diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a1bbc8..b91d21b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,27 @@ Format follows [Keep a Changelog](https://keepachangelog.com/). Versions follow --- +## [0.2.0] - 2026-03-21 + +### Added + +- Cross-project memory scope with global scope support for sharing knowledge across projects. +- Global detection heuristic with `GLOBAL_KEYWORDS` array covering distributions, containers, orchestration, shells, databases, cloud, VCS, protocols, and package managers. +- `memory_scope_promote` tool: promote memories from project scope to global scope. +- `memory_scope_demote` tool: demote memories from global scope back to project scope. +- `memory_global_list` tool: list/search all global-scoped memories with optional unused filter. +- Usage statistics tracking: `lastRecalled`, `recallCount`, and `projectCount` fields on every memory record. +- Smart unused detection: identifies global memories not recalled within `unusedDaysThreshold` using actual recall events. +- New config options: `globalDetectionThreshold` (default: 2), `globalDiscountFactor` (default: 0.7), `unusedDaysThreshold` (default: 30). + +### Changed + +- Dual-scope recall: `memory_search` now queries both project and global scopes with global scores discounted by 0.7x. +- Auto-recall (system.transform) now includes global memories in context injection. +- `memory_global_list` output now includes usage statistics (stored date, last recalled, recall count, project count). + +--- + ## [0.1.6] - 2026-03-20 ### Fixed diff --git a/README.md b/README.md index 75ddea5..3b4e0c6 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,9 @@ If you already use other plugins, keep them and append `"lancedb-opencode-pro"`. "importanceWeight": 0.4 }, "includeGlobalScope": true, + "globalDetectionThreshold": 2, + "globalDiscountFactor": 0.7, + "unusedDaysThreshold": 30, "minCaptureChars": 80, "maxEntriesPerScope": 3000 } @@ -184,6 +187,9 @@ Create `~/.config/opencode/lancedb-opencode-pro.json`: "importanceWeight": 0.4 }, "includeGlobalScope": true, + "globalDetectionThreshold": 2, + "globalDiscountFactor": 0.7, + "unusedDaysThreshold": 30, "minCaptureChars": 80, "maxEntriesPerScope": 3000 } @@ -229,6 +235,9 @@ Supported environment variables: - `LANCEDB_OPENCODE_PRO_RECENCY_HALF_LIFE_HOURS` - `LANCEDB_OPENCODE_PRO_IMPORTANCE_WEIGHT` - `LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE` +- `LANCEDB_OPENCODE_PRO_GLOBAL_DETECTION_THRESHOLD` +- `LANCEDB_OPENCODE_PRO_GLOBAL_DISCOUNT_FACTOR` +- `LANCEDB_OPENCODE_PRO_UNUSED_DAYS_THRESHOLD` - `LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS` - `LANCEDB_OPENCODE_PRO_MAX_ENTRIES_PER_SCOPE` @@ -237,6 +246,7 @@ Supported environment variables: - Auto-capture of durable outcomes from completed assistant responses. - Hybrid retrieval (vector + lexical) for future context injection. - Project-scope memory isolation (`project:*` + optional `global`). +- Cross-project memory sharing via global scope with automatic detection. - Memory tools: - `memory_search` - `memory_delete` @@ -246,6 +256,9 @@ Supported environment variables: - `memory_feedback_wrong` - `memory_feedback_useful` - `memory_effectiveness` + - `memory_scope_promote` + - `memory_scope_demote` + - `memory_global_list` - `memory_port_plan` ## Memory Effectiveness Feedback @@ -371,6 +384,9 @@ Example sidecar: "importanceWeight": 0.4 }, "includeGlobalScope": true, + "globalDetectionThreshold": 2, + "globalDiscountFactor": 0.7, + "unusedDaysThreshold": 30, "minCaptureChars": 80, "maxEntriesPerScope": 3000 } diff --git a/openspec/specs/memory-dual-scope-recall/spec.md b/openspec/specs/memory-dual-scope-recall/spec.md new file mode 100644 index 0000000..1806992 --- /dev/null +++ b/openspec/specs/memory-dual-scope-recall/spec.md @@ -0,0 +1,51 @@ +# memory-dual-scope-recall Specification + +## Purpose + +When retrieving memories, automatically include both project-scoped and relevant global-scoped memories, with appropriate score weighting to prioritize project context. + +## Requirements + +### Requirement: Dual-scope parallel query + +The system MUST query both the active project scope and the global scope in parallel when executing `memory_search`. + +#### Scenario: Dual-scope search +- **WHEN** user executes `memory_search` with query "docker alpine" +- **THEN** the system queries memories in both `project:` and `global` scopes +- **AND** results are merged into a single ranked list + +### Requirement: Global score discount + +The system MUST apply a configurable discount factor (default: 0.7) to global scope scores during merge to prevent drowning out project-specific context. + +#### Scenario: Score calculation +- **WHEN** a project memory scores 0.9 and a global memory scores 0.9 +- **THEN** the project memory retains 0.9 +- **AND** the global memory is discounted to 0.63 (0.9 × 0.7) + +### Requirement: Scope metadata in results + +The system MUST include scope information in recall results so users can identify the source of each memory. + +#### Scenario: Result metadata +- **WHEN** recall results are returned +- **THEN** each result includes `metadata.scope: "project"` or `metadata.scope: "global"` +- **AND** each result includes `metadata.source: "global"` for global memories (distinct from project source) + +### Requirement: Global scope inclusion toggle + +The system MUST respect a configuration option `includeGlobalScope` (default: `true`) to control whether global memories are included in recall. + +#### Scenario: Global inclusion disabled +- **WHEN** `includeGlobalScope` is set to `false` +- **THEN** `memory_search` only queries the project scope +- **AND** no global memories appear in results + +### Requirement: Dual-scope recall for auto-recall + +The system MUST also apply dual-scope recall during automatic system-transform recall, not just for manual `memory_search`. + +#### Scenario: Auto-recall includes global +- **WHEN** the system performs automatic context injection during system.transform +- **THEN** global memories relevant to the query are included with appropriate discounting diff --git a/openspec/specs/memory-global-detection/spec.md b/openspec/specs/memory-global-detection/spec.md new file mode 100644 index 0000000..f471a23 --- /dev/null +++ b/openspec/specs/memory-global-detection/spec.md @@ -0,0 +1,46 @@ +# memory-global-detection Specification + +## Purpose + +Automatically detect memory content that may be applicable across projects using heuristic keyword matching, and prompt the user to confirm promotion to global scope. + +## Requirements + +### Requirement: Global keyword detection + +The system MUST analyze memory content against a predefined list of cross-project keywords and calculate a match score. + +#### Scenario: High keyword match triggers promotion prompt +- **WHEN** memory content matches 2 or more global keywords +- **THEN** the system presents a promotion prompt to the user + +#### Scenario: Low keyword match does not trigger prompt +- **WHEN** memory content matches fewer than 2 global keywords +- **THEN** no promotion prompt is shown and memory is stored as project-scoped + +### Requirement: Keyword list coverage + +The system MUST check for keywords from these categories: +- Linux distributions (alpine, debian, ubuntu, centos, fedora, arch) +- Containers (docker, dockerfile, docker-compose, containerd) +- Orchestration (kubernetes, k8s, helm, kubectl) +- Shells/Systems (bash, shell, linux, unix, posix, busybox) +- Web servers (nginx, apache, caddy) +- Databases (postgres, postgresql, mysql, redis, mongodb, sqlite) +- Cloud platforms (aws, gcp, azure, digitalocean) +- Version control (git, github, gitlab, bitbucket) +- Protocols (api, rest, graphql, grpc, http, https) +- Package managers (npm, yarn, pnpm, pip, cargo, make, cmake) + +### Requirement: Detection does not block storage + +The system MUST NOT block memory storage while awaiting promotion confirmation. + +#### Scenario: Memory stored while awaiting confirmation +- **WHEN** detection triggers promotion prompt +- **THEN** the memory is stored as project-scoped immediately +- **AND** the promotion prompt is presented asynchronously + +### Requirement: Keyword detection configurable + +The system MUST allow configuration of the global detection threshold via `global_detection_threshold` config (default: 2). diff --git a/openspec/specs/memory-global-list/spec.md b/openspec/specs/memory-global-list/spec.md new file mode 100644 index 0000000..79dd87b --- /dev/null +++ b/openspec/specs/memory-global-list/spec.md @@ -0,0 +1,42 @@ +# memory-global-list Specification + +## Purpose + +Provide a tool for users to view and search all global-scoped memories across projects. + +## Requirements + +### Requirement: List global memories tool + +The system MUST provide a `memory_global_list` tool that returns all memories with `scope: "global"`. + +#### Scenario: List all global memories +- **WHEN** user invokes `memory_global_list` +- **THEN** the system returns all global-scoped memories with their IDs, content, and timestamps + +#### Scenario: Search within global memories +- **WHEN** user invokes `memory_global_list` with a search query +- **THEN** the system returns global memories matching the query, ranked by relevance + +### Requirement: Global memory details + +The system MUST include usage statistics for each global memory. + +#### Scenario: Memory usage tracking +- **WHEN** global memories are returned +- **THEN** each entry includes: + - `lastRecalled`: timestamp of most recent recall + - `recallCount`: total number of times recalled + - `projectCount`: number of distinct projects that have recalled this memory + +### Requirement: Global memory filtering + +The system MUST support filtering global memories by usage status. + +#### Scenario: Filter unused memories +- **WHEN** user invokes `memory_global_list` with `filter: "unused"` +- **THEN** only memories not recalled in the past 30 days are returned + +#### Scenario: Filter frequently used memories +- **WHEN** user invokes `memory_global_list` with `filter: "frequently_used"` +- **THEN** memories with high recall counts are prioritized in results diff --git a/openspec/specs/memory-management-commands/spec.md b/openspec/specs/memory-management-commands/spec.md index a3ac0ab..503f0f6 100644 --- a/openspec/specs/memory-management-commands/spec.md +++ b/openspec/specs/memory-management-commands/spec.md @@ -75,3 +75,41 @@ The system MUST provide a structured command for users to report whether a recal - **WHEN** a user submits usefulness feedback for a recalled memory result - **THEN** the system stores a helpfulness evaluation event that can be aggregated in recall-quality reporting +### Requirement: Scope promotion tool + +The system MUST provide a `memory_scope_promote` tool that accepts a memory ID and confirmation flag to promote memories from project to global scope. + +#### Scenario: User promotes a memory +- **WHEN** user invokes `memory_scope_promote` with a valid memory ID and `confirm: true` +- **THEN** the memory's scope is updated to `"global"` +- **AND** the tool returns confirmation with the updated memory details + +#### Scenario: Promotion without confirmation +- **WHEN** user invokes `memory_scope_promote` without confirmation +- **THEN** the tool returns guidance for safe execution + +### Requirement: Scope demotion tool + +The system MUST provide a `memory_scope_demote` tool that accepts a memory ID and confirmation flag to demote memories from global to project scope. + +#### Scenario: User demotes a memory +- **WHEN** user invokes `memory_scope_demote` with a valid memory ID and `confirm: true` +- **THEN** the memory's scope is updated to `"project"` +- **AND** the tool returns confirmation with the updated memory details + +### Requirement: Global memory list tool + +The system MUST provide a `memory_global_list` tool that returns all memories with `scope: "global"` and supports optional search query and filtering. + +#### Scenario: List all global memories +- **WHEN** user invokes `memory_global_list` +- **THEN** the system returns all global-scoped memories with their IDs, content, timestamps, and usage statistics + +#### Scenario: Search within global memories +- **WHEN** user invokes `memory_global_list` with a search query +- **THEN** the system returns global memories matching the query, ranked by relevance + +#### Scenario: Filter unused global memories +- **WHEN** user invokes `memory_global_list` with `filter: "unused"` +- **THEN** only memories not recalled in the past 30 days are returned + diff --git a/openspec/specs/memory-scope-field/spec.md b/openspec/specs/memory-scope-field/spec.md new file mode 100644 index 0000000..aaf1a7e --- /dev/null +++ b/openspec/specs/memory-scope-field/spec.md @@ -0,0 +1,43 @@ +# memory-scope-field Specification + +## Purpose + +Add a `scope` metadata field to memory entries to distinguish between project-specific and globally shared knowledge. + +## Requirements + +### Requirement: Memory scope field + +The system MUST store a `scope` field on every memory entry with value `"project"` or `"global"`, defaulting to `"project"` when not specified. + +#### Scenario: New memory entry inherits project scope +- **WHEN** a new memory is stored without explicit scope +- **THEN** the entry is stored with `scope: "project"` + +#### Scenario: Global memory promotion +- **WHEN** a memory is promoted to global scope +- **THEN** the entry's `scope` field is updated to `"global"` + +#### Scenario: Existing memories maintain project scope +- **WHEN** existing memories without explicit scope are queried +- **THEN** they are treated as `scope: "project"` for backward compatibility + +### Requirement: Scope field queryable + +The system MUST support filtering memories by scope field during storage and retrieval operations. + +#### Scenario: Query only project memories +- **WHEN** retrieval is constrained to project scope +- **THEN** memories with `scope: "global"` are excluded from results + +#### Scenario: Query only global memories +- **WHEN** retrieval requests global scope only +- **THEN** memories with `scope: "project"` are excluded from results + +### Requirement: Scope persisted + +The system MUST persist the scope field to LanceDB storage and include it in all memory entry responses. + +#### Scenario: Scope survives restart +- **WHEN** the system restarts after storing a global-scoped memory +- **THEN** the memory is still returned with `scope: "global"` diff --git a/openspec/specs/memory-scope-promotion/spec.md b/openspec/specs/memory-scope-promotion/spec.md new file mode 100644 index 0000000..1a09bda --- /dev/null +++ b/openspec/specs/memory-scope-promotion/spec.md @@ -0,0 +1,67 @@ +# memory-scope-promotion Specification + +## Purpose + +Provide tools for users to manually promote project-scoped memories to global scope and to demote unused global memories back to project scope. + +## Requirements + +### Requirement: Manual promotion tool + +The system MUST provide a `memory_scope_promote` tool that accepts a memory ID and promotes it from project to global scope. + +#### Scenario: User promotes a memory +- **WHEN** user invokes `memory_scope_promote` with a valid memory ID +- **THEN** the memory's scope is updated to `"global"` +- **AND** the tool returns confirmation with the updated memory details + +#### Scenario: Promotion of non-existent memory +- **WHEN** user invokes `memory_scope_promote` with a non-existent memory ID +- **THEN** the tool returns an error with guidance + +### Requirement: Manual demotion tool + +The system MUST provide a `memory_scope_demote` tool that accepts a memory ID and demotes it from global to project scope. + +#### Scenario: User demotes a memory +- **WHEN** user invokes `memory_scope_demote` with a valid memory ID +- **THEN** the memory's scope is updated to `"project"` +- **AND** the tool returns confirmation with the updated memory details + +#### Scenario: Demotion of project-scoped memory +- **WHEN** user invokes `memory_scope_demote` on a project-scoped memory +- **THEN** the tool returns an error indicating scope is already project + +### Requirement: Confirmation required for promotion/demotion + +The system MUST require explicit confirmation signal before executing scope changes. + +#### Scenario: Promotion without confirmation +- **WHEN** user invokes `memory_scope_promote` without confirmation +- **THEN** the tool returns guidance for safe execution + +### Requirement: Promotion prompt from detection + +When the global detection heuristic triggers, the system MUST present a structured prompt offering the user choices. + +#### Scenario: Detection prompt options +- **WHEN** global detection triggers during memory storage +- **THEN** the user is presented with options: + - "Promote to global scope" (stores the memory as global) + - "Keep as project scope" (keeps the memory as project-scoped) + - "Dismiss" (same as keep as project scope) + +### Requirement: Unused global detection + +The system MUST track when global memories are recalled and identify those not used within a configurable time window. + +#### Scenario: Unused global memory detected +- **WHEN** a global memory has not been recalled in the past 30 days (configurable via `unused_days_threshold`) +- **THEN** the system presents a demotion prompt listing unused global memories + +#### Scenario: Demotion prompt options for unused memories +- **WHEN** unused global memories are detected +- **THEN** the user is presented with options: + - "Demote all unused" (moves all to project scope) + - "Review individually" (allows per-memory demotion) + - "Keep all" (dismisses the prompt) diff --git a/openspec/specs/memory-usage-stats/spec.md b/openspec/specs/memory-usage-stats/spec.md new file mode 100644 index 0000000..5cd218e --- /dev/null +++ b/openspec/specs/memory-usage-stats/spec.md @@ -0,0 +1,61 @@ +# memory-usage-stats Specification + +## Purpose + +Track recall usage statistics for each memory entry to enable smart unused memory detection and provide usage insights. + +## Requirements + +### Requirement: Usage statistics fields + +The system MUST store usage statistics on each memory record: +- `lastRecalled`: Unix timestamp of most recent recall (0 if never recalled) +- `recallCount`: Total number of times this memory was returned in recall results +- `projectCount`: Number of distinct project scopes that have recalled this memory + +#### Scenario: New memory has zero usage +- **WHEN** a new memory is stored +- **THEN** `lastRecalled` is 0, `recallCount` is 0, and `projectCount` is 0 + +#### Scenario: Usage fields are queryable +- **WHEN** memories are listed or searched +- **THEN** usage statistics are included in the response + +### Requirement: Usage tracking on recall + +The system MUST update usage statistics when a memory is returned in recall results. + +#### Scenario: Global memory recalled in search +- **WHEN** a global memory is returned in `memory_search` results +- **THEN** `recallCount` is incremented by 1 +- **AND** `lastRecalled` is updated to current timestamp +- **AND** `projectCount` is updated if the project scope is new + +#### Scenario: Global memory recalled in auto-inject +- **WHEN** a global memory is injected into system context +- **THEN** `recallCount` is incremented by 1 +- **AND** `lastRecalled` is updated to current timestamp + +### Requirement: Smart unused detection + +The system MUST use actual recall usage to identify unused global memories. + +#### Scenario: Memory not recalled in threshold period +- **WHEN** a global memory has `lastRecalled` older than `unusedDaysThreshold` +- **THEN** the memory is flagged as unused + +#### Scenario: Memory recalled recently +- **WHEN** a global memory has `lastRecalled` within `unusedDaysThreshold` +- **THEN** the memory is NOT flagged as unused, regardless of storage age + +### Requirement: Usage statistics in global list + +The system MUST include usage statistics in `memory_global_list` output. + +#### Scenario: List global memories with usage +- **WHEN** user invokes `memory_global_list` +- **THEN** each entry includes `lastRecalled`, `recallCount`, and `projectCount` + +#### Scenario: Filter by unused +- **WHEN** user invokes `memory_global_list` with `filter: "unused"` +- **THEN** only memories with `lastRecalled` older than threshold are returned diff --git a/package.json b/package.json index 3770f48..ee834bd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "lancedb-opencode-pro", - "version": "0.1.6", + "version": "0.2.0", "description": "LanceDB-backed long-term memory provider for OpenCode", "type": "module", "main": "dist/index.js", diff --git a/src/config.ts b/src/config.ts index fffc51d..3b83df0 100644 --- a/src/config.ts +++ b/src/config.ts @@ -89,6 +89,19 @@ export function resolveMemoryConfig(config: Config | undefined, worktree?: strin importanceWeight, }, includeGlobalScope: toBoolean(process.env.LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE ?? raw.includeGlobalScope, true), + globalDetectionThreshold: Math.max( + 1, + Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_GLOBAL_DETECTION_THRESHOLD ?? raw.globalDetectionThreshold, 2)), + ), + globalDiscountFactor: clamp( + toNumber(process.env.LANCEDB_OPENCODE_PRO_GLOBAL_DISCOUNT_FACTOR ?? raw.globalDiscountFactor, 0.7), + 0, + 1, + ), + unusedDaysThreshold: Math.max( + 1, + Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_UNUSED_DAYS_THRESHOLD ?? raw.unusedDaysThreshold, 30)), + ), minCaptureChars: Math.max( 30, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS ?? raw.minCaptureChars, 80)), diff --git a/src/extract.ts b/src/extract.ts index bd1545b..c96c740 100644 --- a/src/extract.ts +++ b/src/extract.ts @@ -16,6 +16,69 @@ const DECISION_SIGNALS = ["decide", "decision", "tradeoff", "architecture", "採 const FACT_SIGNALS = ["because", "root cause", "原因", "由於"]; const PREF_SIGNALS = ["prefer", "preference", "偏好", "習慣"]; +const GLOBAL_KEYWORDS = [ + // Distributions + "alpine", + "debian", + "ubuntu", + "centos", + "fedora", + "arch", + // Containers + "docker", + "dockerfile", + "docker-compose", + "containerd", + // Orchestration + "kubernetes", + "k8s", + "helm", + "kubectl", + // Shells/Systems + "bash", + "shell", + "linux", + "unix", + "posix", + "busybox", + // Web servers + "nginx", + "apache", + "caddy", + // Databases + "postgres", + "postgresql", + "mysql", + "redis", + "mongodb", + "sqlite", + // Cloud + "aws", + "gcp", + "azure", + "digitalocean", + // VCS + "git", + "github", + "gitlab", + "bitbucket", + // Protocols + "api", + "rest", + "graphql", + "grpc", + "http", + "https", + // Package managers + "npm", + "yarn", + "pnpm", + "pip", + "cargo", + "make", + "cmake", +]; + export function extractCaptureCandidate(text: string, minChars: number): CaptureCandidateResult { const normalized = text.trim(); if (normalized.length < minChars) { @@ -50,3 +113,18 @@ function clipText(text: string, maxLen: number): string { if (text.length <= maxLen) return text; return `${text.slice(0, maxLen - 3)}...`; } + +export function detectGlobalWorthiness(content: string): number { + const lower = content.toLowerCase(); + let matches = 0; + for (const keyword of GLOBAL_KEYWORDS) { + if (lower.includes(keyword)) { + matches += 1; + } + } + return matches; +} + +export function isGlobalCandidate(content: string, threshold: number): boolean { + return detectGlobalWorthiness(content) >= threshold; +} diff --git a/src/index.ts b/src/index.ts index eca41f0..032da0c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -4,7 +4,7 @@ import type { Part, TextPart } from "@opencode-ai/sdk"; import { resolveMemoryConfig } from "./config.js"; import { createEmbedder } from "./embedder.js"; import type { Embedder } from "./embedder.js"; -import { extractCaptureCandidate } from "./extract.js"; +import { extractCaptureCandidate, isGlobalCandidate } from "./extract.js"; import { isTcpPortAvailable, parsePortReservations, planPorts, reservationKey } from "./ports.js"; import { buildScopeFilter, deriveProjectScope } from "./scope.js"; import { MemoryStore } from "./store.js"; @@ -67,6 +67,7 @@ const plugin: Plugin = async (input) => { recencyBoost: state.config.retrieval.recencyBoost, recencyHalfLifeHours: state.config.retrieval.recencyHalfLifeHours, importanceWeight: state.config.retrieval.importanceWeight, + globalDiscountFactor: state.config.globalDiscountFactor, }); await state.store.putEvent({ @@ -86,6 +87,10 @@ const plugin: Plugin = async (input) => { if (results.length === 0) return; + for (const result of results) { + state.store.updateMemoryUsage(result.record.id, activeScope, scopes).catch(() => {}); + } + const memoryBlock = [ "[Memory Recall - optional historical context]", ...results.map((item, index) => `${index + 1}. [${item.record.id}] (${item.record.scope}) ${item.record.text}`), @@ -127,6 +132,7 @@ const plugin: Plugin = async (input) => { recencyBoost: state.config.retrieval.recencyBoost, recencyHalfLifeHours: state.config.retrieval.recencyHalfLifeHours, importanceWeight: state.config.retrieval.importanceWeight, + globalDiscountFactor: state.config.globalDiscountFactor, }); await state.store.putEvent({ @@ -143,6 +149,10 @@ const plugin: Plugin = async (input) => { if (results.length === 0) return "No relevant memory found."; + for (const result of results) { + state.store.updateMemoryUsage(result.record.id, activeScope, scopes).catch(() => {}); + } + return results .map((item, idx) => { const percent = Math.round(item.score * 100); @@ -314,6 +324,107 @@ const plugin: Plugin = async (input) => { return JSON.stringify(summary, null, 2); }, }), + memory_scope_promote: tool({ + description: "Promote a memory from project scope to global scope for cross-project sharing", + args: { + id: tool.schema.string().min(6), + confirm: tool.schema.boolean().default(false), + }, + execute: async (args, context) => { + await state.ensureInitialized(); + if (!state.initialized) return unavailableMessage(state.config.embedding.provider); + if (!args.confirm) { + return "Rejected: memory_scope_promote requires confirm=true."; + } + const activeScope = deriveProjectScope(context.worktree); + const scopes = buildScopeFilter(activeScope, state.config.includeGlobalScope); + const exists = await state.store.hasMemory(args.id, scopes); + if (!exists) { + return `Memory ${args.id} not found in current scope.`; + } + const updated = await state.store.updateMemoryScope(args.id, "global", scopes); + if (!updated) { + return `Failed to promote memory ${args.id}.`; + } + return `Promoted memory ${args.id} to global scope.`; + }, + }), + memory_scope_demote: tool({ + description: "Demote a memory from global scope to project scope", + args: { + id: tool.schema.string().min(6), + confirm: tool.schema.boolean().default(false), + scope: tool.schema.string().optional(), + }, + execute: async (args, context) => { + await state.ensureInitialized(); + if (!state.initialized) return unavailableMessage(state.config.embedding.provider); + if (!args.confirm) { + return "Rejected: memory_scope_demote requires confirm=true."; + } + const projectScope = args.scope ?? deriveProjectScope(context.worktree); + const globalExists = await state.store.hasMemory(args.id, ["global"]); + if (!globalExists) { + return `Memory ${args.id} not found in global scope or is not a global memory.`; + } + const updated = await state.store.updateMemoryScope(args.id, projectScope, ["global"]); + if (!updated) { + return `Failed to demote memory ${args.id}.`; + } + return `Demoted memory ${args.id} from global to ${projectScope}.`; + }, + }), + memory_global_list: tool({ + description: "List all global-scoped memories, optionally filtered by search query or unused status", + args: { + query: tool.schema.string().optional(), + filter: tool.schema.string().optional(), + limit: tool.schema.number().int().min(1).max(100).default(20), + }, + execute: async (args) => { + await state.ensureInitialized(); + if (!state.initialized) return unavailableMessage(state.config.embedding.provider); + + let records: import("./types.js").MemoryRecord[]; + if (args.filter === "unused") { + records = await state.store.getUnusedGlobalMemories(state.config.unusedDaysThreshold, args.limit); + } else if (args.query) { + let queryVector: number[] = []; + try { + queryVector = await state.embedder.embed(args.query); + } catch { + queryVector = []; + } + records = await state.store.search({ + query: args.query, + queryVector, + scopes: ["global"], + limit: args.limit, + vectorWeight: 0.7, + bm25Weight: 0.3, + minScore: 0.2, + globalDiscountFactor: 1.0, + }).then((results) => results.map((r) => r.record)); + } else { + records = await state.store.readGlobalMemories(args.limit); + } + + if (records.length === 0) { + return "No global memories found."; + } + + return records + .map((record, idx) => { + const date = new Date(record.timestamp).toISOString().split("T")[0]; + const lastRecalled = record.lastRecalled > 0 + ? new Date(record.lastRecalled).toISOString().split("T")[0] + : "never"; + return `${idx + 1}. [${record.id}] ${record.text.slice(0, 80)}... + Stored: ${date} | Recalled: ${lastRecalled} | Count: ${record.recallCount} | Projects: ${record.projectCount}`; + }) + .join("\n"); + }, + }), memory_port_plan: tool({ description: "Plan non-conflicting host ports for compose services and optionally persist reservations", args: { @@ -385,6 +496,9 @@ const plugin: Plugin = async (input) => { scope: "global", importance: 0.8, timestamp: Date.now(), + lastRecalled: 0, + recallCount: 0, + projectCount: 0, schemaVersion: SCHEMA_VERSION, embeddingModel: state.config.embedding.model, vectorDim: vector.length, @@ -559,6 +673,9 @@ async function flushAutoCapture( scope: activeScope, importance: result.candidate.importance, timestamp: Date.now(), + lastRecalled: 0, + recallCount: 0, + projectCount: 0, schemaVersion: SCHEMA_VERSION, embeddingModel: state.config.embedding.model, vectorDim: vector.length, diff --git a/src/store.ts b/src/store.ts index 3df3069..c5bc21c 100644 --- a/src/store.ts +++ b/src/store.ts @@ -67,6 +67,9 @@ export class MemoryStore { scope: "global", importance: 0, timestamp: 0, + lastRecalled: 0, + recallCount: 0, + projectCount: 0, schemaVersion: 1, embeddingModel: "bootstrap", vectorDim, @@ -149,6 +152,7 @@ export class MemoryStore { recencyBoost?: boolean; recencyHalfLifeHours?: number; importanceWeight?: number; + globalDiscountFactor?: number; }): Promise { const cached = await this.getCachedScopes(params.scopes); if (cached.records.length === 0) return []; @@ -165,6 +169,7 @@ export class MemoryStore { const recencyBoostEnabled = params.recencyBoost ?? true; const recencyHalfLifeHours = Math.max(1, params.recencyHalfLifeHours ?? 72); const importanceWeight = clampImportanceWeight(params.importanceWeight ?? 0.4); + const globalDiscountFactor = params.globalDiscountFactor ?? 1.0; const candidates = cached.records .filter((record) => params.queryVector.length === 0 || record.vector.length === params.queryVector.length) @@ -172,7 +177,8 @@ export class MemoryStore { const recordNorm = cached.norms.get(record.id) ?? vecNorm(record.vector); const vectorScore = useVectorChannel ? fastCosine(params.queryVector, record.vector, queryNorm, recordNorm) : 0; const bm25Score = useBm25Channel ? bm25LikeScore(queryTokens, cached.tokenized[index], cached.idf) : 0; - return { record, vectorScore, bm25Score }; + const isGlobal = record.scope === "global"; + return { record, vectorScore, bm25Score, isGlobal }; }); if (candidates.length === 0) return []; @@ -197,7 +203,8 @@ export class MemoryStore { ? computeRecencyMultiplier(item.record.timestamp, recencyHalfLifeHours) : 1; const importanceFactor = 1 + importanceWeight * clampImportance(item.record.importance); - const score = rrfScore * recencyFactor * importanceFactor; + const scopeFactor = item.isGlobal ? globalDiscountFactor : 1.0; + const score = rrfScore * recencyFactor * importanceFactor * scopeFactor; return { record: item.record, score, @@ -221,6 +228,30 @@ export class MemoryStore { return true; } + async updateMemoryScope(id: string, newScope: string, scopes: string[]): Promise { + const rows = await this.readByScopes(scopes); + const match = rows.find((row) => row.id === id); + if (!match) return false; + + await this.requireTable().delete(`id = '${escapeSql(id)}'`); + this.invalidateScope(match.scope); + + await this.requireTable().add([{ ...match, scope: newScope }]); + this.invalidateScope(newScope); + return true; + } + + async readGlobalMemories(limit: number = 100): Promise { + const rows = await this.readByScopes(["global"]); + return rows.sort((a, b) => b.timestamp - a.timestamp).slice(0, limit); + } + + async getUnusedGlobalMemories(unusedDaysThreshold: number, limit: number = 100): Promise { + const cutoffTime = Date.now() - unusedDaysThreshold * 24 * 60 * 60 * 1000; + const rows = await this.readByScopes(["global"]); + return rows.filter((row) => row.lastRecalled > 0 && row.lastRecalled < cutoffTime).slice(0, limit); + } + async clearScope(scope: string): Promise { const rows = await this.readByScopes([scope]); if (rows.length === 0) return 0; @@ -255,6 +286,46 @@ export class MemoryStore { return rows.some((row) => row.id === id); } + async updateMemoryUsage(id: string, projectScope: string, scopes: string[]): Promise { + const rows = await this.readByScopes(scopes); + const match = rows.find((row) => row.id === id); + if (!match) return; + + const now = Date.now(); + const newRecallCount = match.recallCount + 1; + + let newProjectCount = match.projectCount; + let metadataJson = match.metadataJson; + + if (match.scope === "global" && projectScope) { + const projects = extractRecalledProjects(metadataJson); + if (!projects.has(projectScope)) { + projects.add(projectScope); + if (projects.size > 100) { + const arr = Array.from(projects); + arr.splice(0, arr.length - 100); + metadataJson = JSON.stringify({ recalledProjects: arr }); + } else { + metadataJson = JSON.stringify({ recalledProjects: Array.from(projects) }); + } + newProjectCount = projects.size; + } + } + + await this.requireTable().delete(`id = '${escapeSql(id)}'`); + this.invalidateScope(match.scope); + + await this.requireTable().add([{ + ...match, + lastRecalled: now, + recallCount: newRecallCount, + projectCount: newProjectCount, + metadataJson, + }]); + + this.invalidateScope(match.scope); + } + async listEvents(scopes: string[], limit: number): Promise { const rows = await this.readEventsByScopes(scopes); return rows.sort((a, b) => b.timestamp - a.timestamp).slice(0, limit); @@ -547,6 +618,9 @@ function normalizeRow(row: Record): MemoryRecord | null { scope: row.scope, importance: Number(row.importance ?? 0.5), timestamp: Number(row.timestamp ?? Date.now()), + lastRecalled: Number(row.lastRecalled ?? 0), + recallCount: Number(row.recallCount ?? 0), + projectCount: Number(row.projectCount ?? 0), schemaVersion: Number(row.schemaVersion ?? 1), embeddingModel: String(row.embeddingModel ?? "unknown"), vectorDim: Number(row.vectorDim ?? vector.length), @@ -712,3 +786,15 @@ function bm25LikeScore(query: string[], doc: string[], idf: Map) return 1 - Math.exp(-score); } + +function extractRecalledProjects(metadataJson: string): Set { + try { + const metadata = JSON.parse(metadataJson); + if (metadata && Array.isArray(metadata.recalledProjects)) { + return new Set(metadata.recalledProjects); + } + } catch { + // ignore parse errors + } + return new Set(); +} diff --git a/src/types.ts b/src/types.ts index 527859c..520d82b 100644 --- a/src/types.ts +++ b/src/types.ts @@ -18,6 +18,8 @@ export type FeedbackType = "missing" | "wrong" | "useful"; export type RecallSource = "system-transform" | "manual-search"; +export type MemoryScope = "project" | "global"; + export interface EmbeddingConfig { provider: EmbeddingProvider; model: string; @@ -43,6 +45,9 @@ export interface MemoryRuntimeConfig { embedding: EmbeddingConfig; retrieval: RetrievalConfig; includeGlobalScope: boolean; + globalDetectionThreshold: number; + globalDiscountFactor: number; + unusedDaysThreshold: number; minCaptureChars: number; maxEntriesPerScope: number; } @@ -55,6 +60,9 @@ export interface MemoryRecord { scope: string; importance: number; timestamp: number; + lastRecalled: number; + recallCount: number; + projectCount: number; schemaVersion: number; embeddingModel: string; vectorDim: number; diff --git a/test/setup.ts b/test/setup.ts index e9687cc..d1dcb02 100644 --- a/test/setup.ts +++ b/test/setup.ts @@ -63,6 +63,9 @@ export function createTestRecord(overrides: Partial = {}): MemoryR scope: overrides.scope ?? "project:test", importance: overrides.importance ?? 0.5, timestamp: overrides.timestamp ?? Date.now(), + lastRecalled: overrides.lastRecalled ?? 0, + recallCount: overrides.recallCount ?? 0, + projectCount: overrides.projectCount ?? 0, schemaVersion: overrides.schemaVersion ?? 1, embeddingModel: overrides.embeddingModel ?? DEFAULT_EMBEDDING_MODEL, vectorDim: overrides.vectorDim ?? vector.length, @@ -77,6 +80,9 @@ export function assertRecordsMatch(actual: MemoryRecord, expected: MemoryRecord) assert.equal(actual.scope, expected.scope); assert.equal(actual.importance, expected.importance); assert.equal(actual.timestamp, expected.timestamp); + assert.equal(actual.lastRecalled, expected.lastRecalled); + assert.equal(actual.recallCount, expected.recallCount); + assert.equal(actual.projectCount, expected.projectCount); assert.equal(actual.schemaVersion, expected.schemaVersion); assert.equal(actual.embeddingModel, expected.embeddingModel); assert.equal(actual.vectorDim, expected.vectorDim);