@launchapp-dev/ao-memory-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/migrate.ts ADDED
@@ -0,0 +1,250 @@
1
+ #!/usr/bin/env node --experimental-strip-types
2
+ /**
3
+ * Migration utility: imports existing .ao/memory/*.md files into the memory database.
4
+ *
5
+ * Usage:
6
+ * node --experimental-strip-types migrate.ts [--repos-dir <path>] [--db <path>]
7
+ *
8
+ * Defaults:
9
+ * --repos-dir: scans current directory and subdirectories for .ao/memory/
10
+ * --db: ~/.ao/memory.db
11
+ */
12
+ import { readdirSync, readFileSync, existsSync, statSync } from "node:fs";
13
+ import { join, basename, dirname } from "node:path";
14
+ import { resolveDbPath, initDb, contentHash, now } from "./src/db.ts";
15
+
16
+ // Parse args
17
+ const argv = process.argv.slice(2);
18
+ let reposDir = ".";
19
+ let dbPath: string | undefined;
20
+ for (let i = 0; i < argv.length; i++) {
21
+ if (argv[i] === "--repos-dir" && argv[i + 1]) reposDir = argv[++i];
22
+ if (argv[i] === "--db" && argv[i + 1]) dbPath = argv[++i];
23
+ }
24
+
25
+ const db = initDb(resolveDbPath(dbPath));
26
+
27
+ const roleToFile: Record<string, string> = {
28
+ "planner.md": "planner",
29
+ "product-owner.md": "product-owner",
30
+ "reconciler.md": "reconciler",
31
+ "reviewer.md": "reviewer",
32
+ "qa-tester.md": "qa-tester",
33
+ };
34
+
35
+ const sectionToEntryType: Record<string, string> = {
36
+ "tasks enqueued": "task_dispatch",
37
+ "recently enqueued": "task_dispatch",
38
+ "rework dispatched": "task_dispatch",
39
+ "rebase dispatched": "task_dispatch",
40
+ "tasks skipped": "observation",
41
+ "capacity notes": "observation",
42
+ "queue status": "observation",
43
+ "pipeline health": "observation",
44
+ "decisions": "decision",
45
+ "tasks created": "decision",
46
+ "features assessed": "observation",
47
+ "gaps identified": "observation",
48
+ "tasks unblocked": "action",
49
+ "tasks marked done": "action",
50
+ "queue cleaned": "action",
51
+ "actions log": "action",
52
+ "prs merged": "review",
53
+ "prs with changes requested": "review",
54
+ "prs closed": "review",
55
+ "known patterns": "pattern",
56
+ "log": "test_result",
57
+ "test results": "test_result",
58
+ "bugs filed": "test_result",
59
+ "regressions": "test_result",
60
+ };
61
+
62
+ function guessEntryType(sectionHeader: string, agentRole: string): string {
63
+ const lower = sectionHeader.toLowerCase();
64
+ for (const [key, type] of Object.entries(sectionToEntryType)) {
65
+ if (lower.includes(key)) return type;
66
+ }
67
+ // Fallback by role
68
+ if (agentRole === "planner") return "task_dispatch";
69
+ if (agentRole === "product-owner") return "decision";
70
+ if (agentRole === "reconciler") return "action";
71
+ if (agentRole === "reviewer") return "review";
72
+ if (agentRole === "qa-tester") return "test_result";
73
+ return "observation";
74
+ }
75
+
76
+ interface ParsedEntry {
77
+ date: string;
78
+ title: string;
79
+ body: string;
80
+ entryType: string;
81
+ taskId?: string;
82
+ prNumber?: number;
83
+ runId?: string;
84
+ }
85
+
86
+ function parseMemoryFile(content: string, agentRole: string): ParsedEntry[] {
87
+ const entries: ParsedEntry[] = [];
88
+ const lines = content.split("\n");
89
+ let currentSection = "";
90
+ let currentDate = "";
91
+ let currentBlock: string[] = [];
92
+
93
+ function flushBlock() {
94
+ if (currentBlock.length === 0 || !currentDate) return;
95
+ const body = currentBlock.join("\n").trim();
96
+ if (!body) return;
97
+
98
+ const entryType = guessEntryType(currentSection, agentRole);
99
+ const firstLine = currentBlock.find(l => l.trim())?.trim() || "";
100
+ const title = firstLine.length > 120 ? firstLine.slice(0, 117) + "..." : firstLine;
101
+
102
+ // Extract task IDs
103
+ const taskMatch = body.match(/TASK-\d+/);
104
+ const prMatch = body.match(/(?:PR\s*#|#)(\d+)/);
105
+ const runMatch = body.match(/run\s+(\d+)/i);
106
+
107
+ entries.push({
108
+ date: currentDate,
109
+ title: title || `${agentRole} ${entryType} ${currentDate}`,
110
+ body,
111
+ entryType,
112
+ taskId: taskMatch?.[0],
113
+ prNumber: prMatch ? parseInt(prMatch[1]) : undefined,
114
+ runId: runMatch ? `run ${runMatch[1]}` : undefined,
115
+ });
116
+ }
117
+
118
+ for (const line of lines) {
119
+ // Section headers
120
+ const sectionMatch = line.match(/^##\s+(.+)/);
121
+ if (sectionMatch) {
122
+ flushBlock();
123
+ currentBlock = [];
124
+ currentSection = sectionMatch[1];
125
+
126
+ // Check if section header contains a date
127
+ const dateInHeader = currentSection.match(/(\d{4}-\d{2}-\d{2})/);
128
+ if (dateInHeader) currentDate = dateInHeader[1];
129
+ continue;
130
+ }
131
+
132
+ // Date patterns
133
+ const dateMatch = line.match(/\[(\d{4}-\d{2}-\d{2})\]/);
134
+ if (dateMatch) {
135
+ flushBlock();
136
+ currentBlock = [];
137
+ currentDate = dateMatch[1];
138
+ currentBlock.push(line);
139
+ continue;
140
+ }
141
+
142
+ // Separator — flush
143
+ if (line.match(/^---\s*$/)) {
144
+ flushBlock();
145
+ currentBlock = [];
146
+ continue;
147
+ }
148
+
149
+ currentBlock.push(line);
150
+ }
151
+ flushBlock();
152
+
153
+ return entries;
154
+ }
155
+
156
+ function findMemoryDirs(rootDir: string): { project: string; memoryDir: string }[] {
157
+ const results: { project: string; memoryDir: string }[] = [];
158
+
159
+ // Check if rootDir itself has .ao/memory
160
+ const directMemory = join(rootDir, ".ao", "memory");
161
+ if (existsSync(directMemory) && statSync(directMemory).isDirectory()) {
162
+ results.push({ project: basename(rootDir), memoryDir: directMemory });
163
+ }
164
+
165
+ // Scan subdirectories
166
+ try {
167
+ for (const entry of readdirSync(rootDir, { withFileTypes: true })) {
168
+ if (!entry.isDirectory() || entry.name.startsWith(".")) continue;
169
+ const memDir = join(rootDir, entry.name, ".ao", "memory");
170
+ if (existsSync(memDir) && statSync(memDir).isDirectory()) {
171
+ results.push({ project: entry.name, memoryDir: memDir });
172
+ }
173
+ }
174
+ } catch {}
175
+
176
+ return results;
177
+ }
178
+
179
+ // Main
180
+ const ts = now();
181
+ const memoryDirs = findMemoryDirs(reposDir);
182
+ const summary: Record<string, Record<string, number>> = {};
183
+ let totalImported = 0;
184
+ let totalSkipped = 0;
185
+
186
+ const insert = db.prepare(`
187
+ INSERT INTO memory_entries (entry_type, agent_role, project, title, body, task_id, pr_number, run_id, status, tags, metadata, created_at, occurred_at, updated_at, content_hash)
188
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active', '[]', '{}', ?, ?, ?, ?)
189
+ `);
190
+
191
+ const checkHash = db.prepare("SELECT id FROM memory_entries WHERE content_hash = ?");
192
+
193
+ const importAll = db.transaction(() => {
194
+ for (const { project, memoryDir } of memoryDirs) {
195
+ summary[project] = {};
196
+ try {
197
+ for (const file of readdirSync(memoryDir)) {
198
+ const agentRole = roleToFile[file];
199
+ if (!agentRole) continue;
200
+
201
+ const content = readFileSync(join(memoryDir, file), "utf-8");
202
+ const entries = parseMemoryFile(content, agentRole);
203
+
204
+ let count = 0;
205
+ for (const entry of entries) {
206
+ const hash = contentHash(entry.entryType, agentRole, project, entry.title, entry.body);
207
+ if (checkHash.get(hash)) {
208
+ totalSkipped++;
209
+ continue;
210
+ }
211
+
212
+ insert.run(
213
+ entry.entryType, agentRole, project,
214
+ entry.title, entry.body,
215
+ entry.taskId || null, entry.prNumber || null, entry.runId || null,
216
+ ts, entry.date + "T00:00:00.000Z", ts, hash
217
+ );
218
+ count++;
219
+ totalImported++;
220
+ }
221
+ summary[project][agentRole] = count;
222
+ }
223
+ } catch (err) {
224
+ console.error(`Error processing ${project}: ${err}`);
225
+ }
226
+ }
227
+ });
228
+
229
+ importAll();
230
+
231
+ // Print results
232
+ console.log("\n=== Migration Complete ===\n");
233
+ console.log(`Scanned: ${memoryDirs.length} projects with .ao/memory/`);
234
+ console.log(`Imported: ${totalImported} entries`);
235
+ console.log(`Skipped (duplicates): ${totalSkipped}\n`);
236
+
237
+ const roles = [...new Set(Object.values(summary).flatMap(s => Object.keys(s)))].sort();
238
+ const header = ["Project", ...roles, "Total"].map(h => h.padEnd(16)).join(" | ");
239
+ console.log(header);
240
+ console.log("-".repeat(header.length));
241
+
242
+ for (const [project, counts] of Object.entries(summary).sort()) {
243
+ const total = Object.values(counts).reduce((a, b) => a + b, 0);
244
+ const row = [project, ...roles.map(r => String(counts[r] || 0)), String(total)]
245
+ .map(v => v.padEnd(16))
246
+ .join(" | ");
247
+ console.log(row);
248
+ }
249
+
250
+ console.log(`\nDatabase: ${resolveDbPath(dbPath)}`);
package/package.json ADDED
@@ -0,0 +1,24 @@
1
+ {
2
+ "name": "@launchapp-dev/ao-memory-mcp",
3
+ "version": "1.0.0",
4
+ "description": "MCP server for agent memory management — structured recall, cross-project patterns, and lifecycle management for ao-cli agents",
5
+ "type": "module",
6
+ "bin": {
7
+ "ao-memory-mcp": "./src/server.ts"
8
+ },
9
+ "scripts": {
10
+ "start": "node --experimental-strip-types src/server.ts",
11
+ "migrate": "node --experimental-strip-types migrate.ts",
12
+ "build": "tsc --noEmit -p tsconfig.json"
13
+ },
14
+ "dependencies": {
15
+ "@modelcontextprotocol/sdk": "^1.0.0",
16
+ "better-sqlite3": "^11.0.0",
17
+ "zod": "^3.22.0"
18
+ },
19
+ "devDependencies": {
20
+ "@types/better-sqlite3": "^7.6.0",
21
+ "@types/node": "^22.0.0",
22
+ "typescript": "^5.0.0"
23
+ }
24
+ }
package/src/db.ts ADDED
@@ -0,0 +1,48 @@
1
+ import Database from "better-sqlite3";
2
+ import { createHash } from "node:crypto";
3
+ import { readFileSync } from "node:fs";
4
+ import { join, dirname } from "node:path";
5
+ import { mkdirSync } from "node:fs";
6
+ import { fileURLToPath } from "node:url";
7
+
8
+ const __dirname = dirname(fileURLToPath(import.meta.url));
9
+
10
+ export function resolveDbPath(cliDbPath?: string): string {
11
+ if (cliDbPath) return cliDbPath;
12
+ if (process.env.AO_MEMORY_DB) return process.env.AO_MEMORY_DB;
13
+ const home = process.env.HOME || process.env.USERPROFILE || ".";
14
+ const aoDir = join(home, ".ao");
15
+ mkdirSync(aoDir, { recursive: true });
16
+ return join(aoDir, "memory.db");
17
+ }
18
+
19
+ export function initDb(dbPath: string): Database.Database {
20
+ const db = new Database(dbPath);
21
+ const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
22
+ db.exec(schema);
23
+ return db;
24
+ }
25
+
26
+ export function contentHash(
27
+ entryType: string,
28
+ agentRole: string,
29
+ project: string,
30
+ title: string,
31
+ body: string
32
+ ): string {
33
+ return createHash("sha256")
34
+ .update(`${entryType}\0${agentRole}\0${project}\0${title}\0${body}`)
35
+ .digest("hex");
36
+ }
37
+
38
+ export function now(): string {
39
+ return new Date().toISOString();
40
+ }
41
+
42
+ export function jsonResult(data: unknown) {
43
+ return { content: [{ type: "text" as const, text: JSON.stringify(data, null, 2) }] };
44
+ }
45
+
46
+ export function errorResult(message: string) {
47
+ return { content: [{ type: "text" as const, text: JSON.stringify({ error: message }) }], isError: true };
48
+ }
package/src/schema.sql ADDED
@@ -0,0 +1,113 @@
1
+ PRAGMA journal_mode = WAL;
2
+ PRAGMA foreign_keys = ON;
3
+
4
+ CREATE TABLE IF NOT EXISTS memory_entries (
5
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
6
+ entry_type TEXT NOT NULL,
7
+ agent_role TEXT NOT NULL,
8
+ project TEXT NOT NULL,
9
+ title TEXT NOT NULL,
10
+ body TEXT NOT NULL,
11
+ task_id TEXT,
12
+ pr_number INTEGER,
13
+ run_id TEXT,
14
+ status TEXT NOT NULL DEFAULT 'active',
15
+ tags TEXT NOT NULL DEFAULT '[]',
16
+ metadata TEXT NOT NULL DEFAULT '{}',
17
+ created_at TEXT NOT NULL,
18
+ occurred_at TEXT NOT NULL,
19
+ updated_at TEXT NOT NULL,
20
+ content_hash TEXT NOT NULL
21
+ );
22
+
23
+ CREATE TABLE IF NOT EXISTS memory_summaries (
24
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
25
+ agent_role TEXT NOT NULL,
26
+ project TEXT NOT NULL,
27
+ entry_type TEXT,
28
+ title TEXT NOT NULL,
29
+ body TEXT NOT NULL,
30
+ entry_count INTEGER NOT NULL,
31
+ date_from TEXT NOT NULL,
32
+ date_to TEXT NOT NULL,
33
+ entry_ids TEXT NOT NULL,
34
+ created_at TEXT NOT NULL
35
+ );
36
+
37
+ CREATE TABLE IF NOT EXISTS memory_patterns (
38
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
39
+ pattern_type TEXT NOT NULL,
40
+ title TEXT NOT NULL,
41
+ description TEXT NOT NULL,
42
+ projects TEXT NOT NULL DEFAULT '[]',
43
+ agent_roles TEXT NOT NULL DEFAULT '[]',
44
+ entry_ids TEXT NOT NULL DEFAULT '[]',
45
+ occurrence_count INTEGER NOT NULL DEFAULT 1,
46
+ status TEXT NOT NULL DEFAULT 'active',
47
+ first_seen TEXT NOT NULL,
48
+ last_seen TEXT NOT NULL,
49
+ resolved_at TEXT,
50
+ created_at TEXT NOT NULL,
51
+ updated_at TEXT NOT NULL
52
+ );
53
+
54
+ -- Indexes for memory_entries
55
+ CREATE INDEX IF NOT EXISTS idx_me_entry_type ON memory_entries(entry_type);
56
+ CREATE INDEX IF NOT EXISTS idx_me_agent_role ON memory_entries(agent_role);
57
+ CREATE INDEX IF NOT EXISTS idx_me_project ON memory_entries(project);
58
+ CREATE INDEX IF NOT EXISTS idx_me_status ON memory_entries(status);
59
+ CREATE INDEX IF NOT EXISTS idx_me_task_id ON memory_entries(task_id);
60
+ CREATE INDEX IF NOT EXISTS idx_me_occurred_at ON memory_entries(occurred_at);
61
+ CREATE INDEX IF NOT EXISTS idx_me_content_hash ON memory_entries(content_hash);
62
+ CREATE INDEX IF NOT EXISTS idx_me_role_project ON memory_entries(agent_role, project);
63
+ CREATE INDEX IF NOT EXISTS idx_me_proj_type ON memory_entries(project, entry_type);
64
+ CREATE INDEX IF NOT EXISTS idx_me_proj_date ON memory_entries(project, occurred_at);
65
+
66
+ -- Indexes for memory_summaries
67
+ CREATE INDEX IF NOT EXISTS idx_ms_role_project ON memory_summaries(agent_role, project);
68
+
69
+ -- Indexes for memory_patterns
70
+ CREATE INDEX IF NOT EXISTS idx_mp_status ON memory_patterns(status);
71
+ CREATE INDEX IF NOT EXISTS idx_mp_pattern_type ON memory_patterns(pattern_type);
72
+
73
+ -- FTS5 for memory_entries
74
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_fts USING fts5(
75
+ title,
76
+ body,
77
+ content=memory_entries,
78
+ content_rowid=id
79
+ );
80
+
81
+ CREATE TRIGGER IF NOT EXISTS memory_fts_insert AFTER INSERT ON memory_entries BEGIN
82
+ INSERT INTO memory_fts(rowid, title, body) VALUES (new.id, new.title, new.body);
83
+ END;
84
+
85
+ CREATE TRIGGER IF NOT EXISTS memory_fts_update AFTER UPDATE ON memory_entries BEGIN
86
+ INSERT INTO memory_fts(memory_fts, rowid, title, body) VALUES ('delete', old.id, old.title, old.body);
87
+ INSERT INTO memory_fts(rowid, title, body) VALUES (new.id, new.title, new.body);
88
+ END;
89
+
90
+ CREATE TRIGGER IF NOT EXISTS memory_fts_delete AFTER DELETE ON memory_entries BEGIN
91
+ INSERT INTO memory_fts(memory_fts, rowid, title, body) VALUES ('delete', old.id, old.title, old.body);
92
+ END;
93
+
94
+ -- FTS5 for memory_summaries
95
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_summaries_fts USING fts5(
96
+ title,
97
+ body,
98
+ content=memory_summaries,
99
+ content_rowid=id
100
+ );
101
+
102
+ CREATE TRIGGER IF NOT EXISTS ms_fts_insert AFTER INSERT ON memory_summaries BEGIN
103
+ INSERT INTO memory_summaries_fts(rowid, title, body) VALUES (new.id, new.title, new.body);
104
+ END;
105
+
106
+ CREATE TRIGGER IF NOT EXISTS ms_fts_update AFTER UPDATE ON memory_summaries BEGIN
107
+ INSERT INTO memory_summaries_fts(memory_summaries_fts, rowid, title, body) VALUES ('delete', old.id, old.title, old.body);
108
+ INSERT INTO memory_summaries_fts(rowid, title, body) VALUES (new.id, new.title, new.body);
109
+ END;
110
+
111
+ CREATE TRIGGER IF NOT EXISTS ms_fts_delete AFTER DELETE ON memory_summaries BEGIN
112
+ INSERT INTO memory_summaries_fts(memory_summaries_fts, rowid, title, body) VALUES ('delete', old.id, old.title, old.body);
113
+ END;
package/src/server.ts ADDED
@@ -0,0 +1,59 @@
1
+ #!/usr/bin/env node --experimental-strip-types
2
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import {
5
+ ListToolsRequestSchema,
6
+ CallToolRequestSchema,
7
+ } from "@modelcontextprotocol/sdk/types.js";
8
+ import { resolveDbPath, initDb, errorResult } from "./db.ts";
9
+ import { storeTools, handleStore } from "./tools/store.ts";
10
+ import { recallTools, handleRecall } from "./tools/recall.ts";
11
+ import { statsTools, handleStats } from "./tools/stats.ts";
12
+ import { contextTools, handleContext } from "./tools/context.ts";
13
+ import { summarizeTools, handleSummarize } from "./tools/summarize.ts";
14
+ import { patternTools, handlePatterns } from "./tools/patterns.ts";
15
+
16
+ // Parse CLI args
17
+ const args = process.argv.slice(2);
18
+ let dbPath: string | undefined;
19
+ for (let i = 0; i < args.length; i++) {
20
+ if (args[i] === "--db" && args[i + 1]) {
21
+ dbPath = args[++i];
22
+ }
23
+ }
24
+
25
+ const db = initDb(resolveDbPath(dbPath));
26
+
27
+ const allTools = [
28
+ ...storeTools,
29
+ ...recallTools,
30
+ ...statsTools,
31
+ ...contextTools,
32
+ ...summarizeTools,
33
+ ...patternTools,
34
+ ];
35
+
36
+ const handlers = [handleStore, handleRecall, handleStats, handleContext, handleSummarize, handlePatterns];
37
+
38
+ const server = new Server(
39
+ { name: "ao-memory-mcp", version: "1.0.0" },
40
+ { capabilities: { tools: {} } }
41
+ );
42
+
43
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({
44
+ tools: allTools,
45
+ }));
46
+
47
+ server.setRequestHandler(CallToolRequestSchema, async (req) => {
48
+ const { name, arguments: input } = req.params;
49
+
50
+ for (const handler of handlers) {
51
+ const result = handler(db, name, input || {});
52
+ if (result) return result;
53
+ }
54
+
55
+ return errorResult(`Unknown tool: ${name}`);
56
+ });
57
+
58
+ const transport = new StdioServerTransport();
59
+ await server.connect(transport);
@@ -0,0 +1,77 @@
1
+ import type Database from "better-sqlite3";
2
+ import { jsonResult } from "../db.ts";
3
+
4
+ export const contextTools = [
5
+ {
6
+ name: "memory.context",
7
+ description:
8
+ "Agent boot tool — call at the start of each run to load relevant memory. Returns recent entries, active decisions, cross-project patterns, summaries, and a summarization_needed flag.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ agent_role: { type: "string", description: "Agent role requesting context" },
13
+ project: { type: "string", description: "Project the agent is working on" },
14
+ limit: { type: "number", description: "Max entries per section (default 10)" },
15
+ },
16
+ required: ["agent_role", "project"],
17
+ },
18
+ },
19
+ ];
20
+
21
+ export function handleContext(db: Database.Database, name: string, args: any) {
22
+ if (name === "memory.context") return memoryContext(db, args);
23
+ return null;
24
+ }
25
+
26
+ function memoryContext(db: Database.Database, args: any) {
27
+ const { agent_role, project } = args;
28
+ const limit = args.limit || 10;
29
+
30
+ const recentEntries = db.prepare(`
31
+ SELECT * FROM memory_entries
32
+ WHERE agent_role = ? AND project = ? AND status = 'active'
33
+ ORDER BY occurred_at DESC LIMIT ?
34
+ `).all(agent_role, project, limit);
35
+
36
+ const activeDecisions = db.prepare(`
37
+ SELECT * FROM memory_entries
38
+ WHERE project = ? AND entry_type = 'decision' AND status = 'active'
39
+ ORDER BY occurred_at DESC LIMIT ?
40
+ `).all(project, limit);
41
+
42
+ const activePatterns = db.prepare(`
43
+ SELECT * FROM memory_patterns
44
+ WHERE status = 'active'
45
+ AND EXISTS (SELECT 1 FROM json_each(projects) WHERE json_each.value = ?)
46
+ ORDER BY last_seen DESC LIMIT ?
47
+ `).all(project, limit);
48
+
49
+ const recentSummaries = db.prepare(`
50
+ SELECT * FROM memory_summaries
51
+ WHERE agent_role = ? AND project = ?
52
+ ORDER BY created_at DESC LIMIT 5
53
+ `).all(agent_role, project);
54
+
55
+ const crossProjectAlerts = db.prepare(`
56
+ SELECT * FROM memory_patterns
57
+ WHERE status = 'active' AND occurrence_count >= 3
58
+ ORDER BY last_seen DESC LIMIT 5
59
+ `).all();
60
+
61
+ // Check if summarization is needed: 20+ active entries older than 3 days
62
+ const threeDaysAgo = new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
63
+ const staleCount = (db.prepare(`
64
+ SELECT COUNT(*) as count FROM memory_entries
65
+ WHERE agent_role = ? AND project = ? AND status = 'active' AND occurred_at < ?
66
+ `).get(agent_role, project, threeDaysAgo) as any).count;
67
+
68
+ return jsonResult({
69
+ recent_entries: recentEntries,
70
+ active_decisions: activeDecisions,
71
+ active_patterns: activePatterns,
72
+ recent_summaries: recentSummaries,
73
+ cross_project_alerts: crossProjectAlerts,
74
+ summarization_needed: staleCount >= 20,
75
+ stale_entry_count: staleCount,
76
+ });
77
+ }
@@ -0,0 +1,165 @@
1
+ import type Database from "better-sqlite3";
2
+ import { now, jsonResult, errorResult } from "../db.ts";
3
+
4
+ export const patternTools = [
5
+ {
6
+ name: "memory.patterns.detect",
7
+ description:
8
+ "Scan for recurring patterns across projects. Finds entries with similar titles or matching tags that appear in multiple projects.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ min_occurrences: { type: "number", description: "Minimum projects to count as a pattern (default 2)" },
13
+ entry_type: { type: "string", description: "Restrict to entry type" },
14
+ limit: { type: "number", description: "Max patterns to return (default 10)" },
15
+ },
16
+ },
17
+ },
18
+ {
19
+ name: "memory.patterns.record",
20
+ description:
21
+ "Create or update a confirmed cross-project pattern.",
22
+ inputSchema: {
23
+ type: "object" as const,
24
+ properties: {
25
+ id: { type: "number", description: "Existing pattern ID to update (omit to create new)" },
26
+ pattern_type: { type: "string", description: "Type (e.g. bug_pattern, process_pattern, architectural_pattern, anti_pattern)" },
27
+ title: { type: "string", description: "Pattern name" },
28
+ description: { type: "string", description: "Full description" },
29
+ projects: { type: "array", items: { type: "string" }, description: "Projects where pattern appears" },
30
+ agent_roles: { type: "array", items: { type: "string" }, description: "Roles that reported it" },
31
+ entry_ids: { type: "array", items: { type: "number" }, description: "Memory entry IDs as evidence" },
32
+ status: { type: "string", enum: ["active", "resolved", "archived"], description: "Pattern status" },
33
+ },
34
+ required: ["pattern_type", "title", "description"],
35
+ },
36
+ },
37
+ {
38
+ name: "memory.patterns.list",
39
+ description: "List known cross-project patterns.",
40
+ inputSchema: {
41
+ type: "object" as const,
42
+ properties: {
43
+ status: { type: "string", enum: ["active", "resolved", "archived", "all"], description: "Filter by status (default: active)" },
44
+ pattern_type: { type: "string", description: "Filter by pattern type" },
45
+ project: { type: "string", description: "Filter patterns involving this project" },
46
+ limit: { type: "number", description: "Max results (default 20)" },
47
+ },
48
+ },
49
+ },
50
+ ];
51
+
52
+ export function handlePatterns(db: Database.Database, name: string, args: any) {
53
+ if (name === "memory.patterns.detect") return patternsDetect(db, args);
54
+ if (name === "memory.patterns.record") return patternsRecord(db, args);
55
+ if (name === "memory.patterns.list") return patternsList(db, args);
56
+ return null;
57
+ }
58
+
59
+ function patternsDetect(db: Database.Database, args: any) {
60
+ const minOccurrences = args.min_occurrences ?? 2;
61
+ const limit = args.limit ?? 10;
62
+
63
+ // Find tags that appear across multiple projects
64
+ const tagCondition = args.entry_type ? "AND e.entry_type = ?" : "";
65
+ const tagVals = args.entry_type ? [args.entry_type] : [];
66
+
67
+ const tagPatterns = db.prepare(`
68
+ SELECT t.value as tag, COUNT(DISTINCT e.project) as project_count,
69
+ GROUP_CONCAT(DISTINCT e.project) as projects,
70
+ COUNT(*) as total_entries
71
+ FROM memory_entries e, json_each(e.tags) t
72
+ WHERE e.status = 'active' ${tagCondition}
73
+ GROUP BY t.value
74
+ HAVING COUNT(DISTINCT e.project) >= ?
75
+ ORDER BY project_count DESC
76
+ LIMIT ?
77
+ `).all(...tagVals, minOccurrences, limit);
78
+
79
+ // Find similar titles across projects using FTS5
80
+ const titlePatterns = db.prepare(`
81
+ SELECT e1.title, COUNT(DISTINCT e1.project) as project_count,
82
+ GROUP_CONCAT(DISTINCT e1.project) as projects,
83
+ COUNT(*) as total_entries
84
+ FROM memory_entries e1
85
+ WHERE e1.status = 'active' ${tagCondition}
86
+ GROUP BY e1.title
87
+ HAVING COUNT(DISTINCT e1.project) >= ?
88
+ ORDER BY project_count DESC
89
+ LIMIT ?
90
+ `).all(...tagVals, minOccurrences, limit);
91
+
92
+ return jsonResult({
93
+ tag_patterns: tagPatterns,
94
+ title_patterns: titlePatterns,
95
+ });
96
+ }
97
+
98
+ function patternsRecord(db: Database.Database, args: any) {
99
+ const ts = now();
100
+
101
+ if (args.id) {
102
+ const existing = db.prepare("SELECT * FROM memory_patterns WHERE id = ?").get(args.id) as any;
103
+ if (!existing) return errorResult(`Pattern ${args.id} not found`);
104
+
105
+ const sets: string[] = [];
106
+ const vals: any[] = [];
107
+
108
+ if (args.pattern_type) { sets.push("pattern_type = ?"); vals.push(args.pattern_type); }
109
+ if (args.title) { sets.push("title = ?"); vals.push(args.title); }
110
+ if (args.description) { sets.push("description = ?"); vals.push(args.description); }
111
+ if (args.projects) { sets.push("projects = ?"); vals.push(JSON.stringify(args.projects)); }
112
+ if (args.agent_roles) { sets.push("agent_roles = ?"); vals.push(JSON.stringify(args.agent_roles)); }
113
+ if (args.entry_ids) { sets.push("entry_ids = ?"); vals.push(JSON.stringify(args.entry_ids)); }
114
+ if (args.status) {
115
+ sets.push("status = ?"); vals.push(args.status);
116
+ if (args.status === "resolved") { sets.push("resolved_at = ?"); vals.push(ts); }
117
+ }
118
+
119
+ sets.push("last_seen = ?"); vals.push(ts);
120
+ sets.push("updated_at = ?"); vals.push(ts);
121
+ sets.push("occurrence_count = occurrence_count + 1");
122
+
123
+ vals.push(args.id);
124
+ db.prepare(`UPDATE memory_patterns SET ${sets.join(", ")} WHERE id = ?`).run(...vals);
125
+
126
+ return jsonResult({ id: args.id, updated: true });
127
+ }
128
+
129
+ const result = db.prepare(`
130
+ INSERT INTO memory_patterns (pattern_type, title, description, projects, agent_roles, entry_ids, occurrence_count, status, first_seen, last_seen, created_at, updated_at)
131
+ VALUES (?, ?, ?, ?, ?, ?, 1, 'active', ?, ?, ?, ?)
132
+ `).run(
133
+ args.pattern_type,
134
+ args.title,
135
+ args.description,
136
+ JSON.stringify(args.projects || []),
137
+ JSON.stringify(args.agent_roles || []),
138
+ JSON.stringify(args.entry_ids || []),
139
+ ts, ts, ts, ts
140
+ );
141
+
142
+ return jsonResult({ id: result.lastInsertRowid, created: true });
143
+ }
144
+
145
+ function patternsList(db: Database.Database, args: any) {
146
+ const conditions: string[] = [];
147
+ const vals: any[] = [];
148
+ const status = args.status || "active";
149
+
150
+ if (status !== "all") { conditions.push("status = ?"); vals.push(status); }
151
+ if (args.pattern_type) { conditions.push("pattern_type = ?"); vals.push(args.pattern_type); }
152
+ if (args.project) {
153
+ conditions.push("EXISTS (SELECT 1 FROM json_each(projects) WHERE json_each.value = ?)");
154
+ vals.push(args.project);
155
+ }
156
+
157
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
158
+ const limit = args.limit || 20;
159
+
160
+ const rows = db.prepare(
161
+ `SELECT * FROM memory_patterns ${where} ORDER BY last_seen DESC LIMIT ?`
162
+ ).all(...vals, limit);
163
+
164
+ return jsonResult({ patterns: rows, count: rows.length });
165
+ }
@@ -0,0 +1,124 @@
1
+ import type Database from "better-sqlite3";
2
+ import { jsonResult, errorResult } from "../db.ts";
3
+
4
+ export const recallTools = [
5
+ {
6
+ name: "memory.recall",
7
+ description:
8
+ "Query memory entries with structured filters. Returns entries sorted by date. Default: 50 most recent active entries.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ agent_role: { type: "string", description: "Filter by agent role" },
13
+ project: { type: "string", description: "Filter by project" },
14
+ entry_type: { type: "string", description: "Filter by entry type" },
15
+ task_id: { type: "string", description: "Filter by task ID" },
16
+ status: { type: "string", enum: ["active", "summarized", "archived"], description: "Filter by status (default: active)" },
17
+ date_from: { type: "string", description: "Entries from this ISO date" },
18
+ date_to: { type: "string", description: "Entries up to this ISO date" },
19
+ tags: { type: "array", items: { type: "string" }, description: "Filter by tags (must have ALL specified)" },
20
+ limit: { type: "number", description: "Max results (default 50)" },
21
+ offset: { type: "number", description: "Pagination offset (default 0)" },
22
+ order: { type: "string", enum: ["newest", "oldest"], description: "Sort order (default: newest)" },
23
+ },
24
+ },
25
+ },
26
+ {
27
+ name: "memory.search",
28
+ description:
29
+ "Full-text search across all memory entries. Uses FTS5 for fast matching.",
30
+ inputSchema: {
31
+ type: "object" as const,
32
+ properties: {
33
+ query: { type: "string", description: "Search query (supports FTS5 syntax)" },
34
+ agent_role: { type: "string", description: "Restrict to agent role" },
35
+ project: { type: "string", description: "Restrict to project" },
36
+ entry_type: { type: "string", description: "Restrict to entry type" },
37
+ status: { type: "string", description: "Restrict to status" },
38
+ limit: { type: "number", description: "Max results (default 20)" },
39
+ },
40
+ required: ["query"],
41
+ },
42
+ },
43
+ {
44
+ name: "memory.get",
45
+ description: "Get a single memory entry by ID.",
46
+ inputSchema: {
47
+ type: "object" as const,
48
+ properties: {
49
+ id: { type: "number", description: "Memory entry ID" },
50
+ },
51
+ required: ["id"],
52
+ },
53
+ },
54
+ ];
55
+
56
+ export function handleRecall(db: Database.Database, name: string, args: any) {
57
+ if (name === "memory.recall") return memoryRecall(db, args);
58
+ if (name === "memory.search") return memorySearch(db, args);
59
+ if (name === "memory.get") return memoryGet(db, args);
60
+ return null;
61
+ }
62
+
63
+ function memoryRecall(db: Database.Database, args: any) {
64
+ const conditions: string[] = [];
65
+ const vals: any[] = [];
66
+ const status = args.status || "active";
67
+
68
+ conditions.push("status = ?"); vals.push(status);
69
+ if (args.agent_role) { conditions.push("agent_role = ?"); vals.push(args.agent_role); }
70
+ if (args.project) { conditions.push("project = ?"); vals.push(args.project); }
71
+ if (args.entry_type) { conditions.push("entry_type = ?"); vals.push(args.entry_type); }
72
+ if (args.task_id) { conditions.push("task_id = ?"); vals.push(args.task_id); }
73
+ if (args.date_from) { conditions.push("occurred_at >= ?"); vals.push(args.date_from); }
74
+ if (args.date_to) { conditions.push("occurred_at <= ?"); vals.push(args.date_to); }
75
+
76
+ if (args.tags?.length) {
77
+ for (const tag of args.tags) {
78
+ conditions.push("EXISTS (SELECT 1 FROM json_each(tags) WHERE json_each.value = ?)");
79
+ vals.push(tag);
80
+ }
81
+ }
82
+
83
+ const order = args.order === "oldest" ? "ASC" : "DESC";
84
+ const limit = args.limit || 50;
85
+ const offset = args.offset || 0;
86
+
87
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
88
+ const rows = db.prepare(
89
+ `SELECT * FROM memory_entries ${where} ORDER BY occurred_at ${order} LIMIT ? OFFSET ?`
90
+ ).all(...vals, limit, offset);
91
+
92
+ return jsonResult({ entries: rows, count: rows.length });
93
+ }
94
+
95
+ function memorySearch(db: Database.Database, args: any) {
96
+ const conditions: string[] = [];
97
+ const vals: any[] = [];
98
+
99
+ if (args.agent_role) { conditions.push("e.agent_role = ?"); vals.push(args.agent_role); }
100
+ if (args.project) { conditions.push("e.project = ?"); vals.push(args.project); }
101
+ if (args.entry_type) { conditions.push("e.entry_type = ?"); vals.push(args.entry_type); }
102
+ if (args.status) { conditions.push("e.status = ?"); vals.push(args.status); }
103
+
104
+ const extraWhere = conditions.length ? `AND ${conditions.join(" AND ")}` : "";
105
+ const limit = args.limit || 20;
106
+
107
+ const rows = db.prepare(`
108
+ SELECT e.*, snippet(memory_fts, 0, '<mark>', '</mark>', '...', 32) as title_snippet,
109
+ snippet(memory_fts, 1, '<mark>', '</mark>', '...', 64) as body_snippet
110
+ FROM memory_fts f
111
+ JOIN memory_entries e ON e.id = f.rowid
112
+ WHERE memory_fts MATCH ? ${extraWhere}
113
+ ORDER BY rank
114
+ LIMIT ?
115
+ `).all(args.query, ...vals, limit);
116
+
117
+ return jsonResult({ entries: rows, count: rows.length });
118
+ }
119
+
120
+ function memoryGet(db: Database.Database, args: any) {
121
+ const entry = db.prepare("SELECT * FROM memory_entries WHERE id = ?").get(args.id);
122
+ if (!entry) return errorResult(`Entry ${args.id} not found`);
123
+ return jsonResult(entry);
124
+ }
@@ -0,0 +1,74 @@
1
+ import type Database from "better-sqlite3";
2
+ import { jsonResult } from "../db.ts";
3
+
4
+ export const statsTools = [
5
+ {
6
+ name: "memory.stats",
7
+ description:
8
+ "Get aggregate statistics about memory entries. Optionally filter by project or agent role.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ project: { type: "string", description: "Filter by project" },
13
+ agent_role: { type: "string", description: "Filter by agent role" },
14
+ },
15
+ },
16
+ },
17
+ ];
18
+
19
+ export function handleStats(db: Database.Database, name: string, args: any) {
20
+ if (name === "memory.stats") return memoryStats(db, args);
21
+ return null;
22
+ }
23
+
24
+ function memoryStats(db: Database.Database, args: any) {
25
+ const conditions: string[] = [];
26
+ const vals: any[] = [];
27
+
28
+ if (args.project) { conditions.push("project = ?"); vals.push(args.project); }
29
+ if (args.agent_role) { conditions.push("agent_role = ?"); vals.push(args.agent_role); }
30
+
31
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
32
+
33
+ const total = (db.prepare(`SELECT COUNT(*) as count FROM memory_entries ${where}`).get(...vals) as any).count;
34
+
35
+ const byType = db.prepare(
36
+ `SELECT entry_type, COUNT(*) as count FROM memory_entries ${where} GROUP BY entry_type ORDER BY count DESC`
37
+ ).all(...vals);
38
+
39
+ const byStatus = db.prepare(
40
+ `SELECT status, COUNT(*) as count FROM memory_entries ${where} GROUP BY status ORDER BY count DESC`
41
+ ).all(...vals);
42
+
43
+ const byRole = db.prepare(
44
+ `SELECT agent_role, COUNT(*) as count FROM memory_entries ${where} GROUP BY agent_role ORDER BY count DESC`
45
+ ).all(...vals);
46
+
47
+ const byProject = db.prepare(
48
+ `SELECT project, COUNT(*) as count FROM memory_entries ${where} GROUP BY project ORDER BY count DESC`
49
+ ).all(...vals);
50
+
51
+ const dateRange = db.prepare(
52
+ `SELECT MIN(occurred_at) as oldest, MAX(occurred_at) as newest FROM memory_entries ${where}`
53
+ ).get(...vals) as any;
54
+
55
+ const summaryCount = (db.prepare(
56
+ `SELECT COUNT(*) as count FROM memory_summaries ${where.replace("entry_type", "entry_type")}`
57
+ ).get(...vals) as any).count;
58
+
59
+ const patternCount = (db.prepare(
60
+ `SELECT COUNT(*) as count FROM memory_patterns WHERE status = 'active'`
61
+ ).get() as any).count;
62
+
63
+ return jsonResult({
64
+ total_entries: total,
65
+ total_summaries: summaryCount,
66
+ active_patterns: patternCount,
67
+ oldest_entry: dateRange?.oldest,
68
+ newest_entry: dateRange?.newest,
69
+ by_type: byType,
70
+ by_status: byStatus,
71
+ by_role: byRole,
72
+ by_project: byProject,
73
+ });
74
+ }
@@ -0,0 +1,160 @@
1
+ import type Database from "better-sqlite3";
2
+ import { contentHash, now, jsonResult, errorResult } from "../db.ts";
3
+
4
+ export const storeTools = [
5
+ {
6
+ name: "memory.store",
7
+ description:
8
+ "Store a new memory entry. Deduplicates via content hash — returns existing entry if duplicate detected.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ entry_type: { type: "string", description: "Type of memory (e.g. decision, observation, task_dispatch, test_result, review, action)" },
13
+ agent_role: { type: "string", description: "Agent role that produced this memory (e.g. planner, reviewer, qa-tester)" },
14
+ project: { type: "string", description: "Project/repo name" },
15
+ title: { type: "string", description: "Short summary line" },
16
+ body: { type: "string", description: "Full markdown content" },
17
+ task_id: { type: "string", description: "Task ID reference (e.g. TASK-051)" },
18
+ pr_number: { type: "number", description: "PR number if applicable" },
19
+ run_id: { type: "string", description: "Run identifier (e.g. run 51)" },
20
+ tags: { type: "array", items: { type: "string" }, description: "Tags for categorization" },
21
+ metadata: { type: "object", description: "Entry-type-specific metadata" },
22
+ occurred_at: { type: "string", description: "ISO 8601 date when event occurred (defaults to now)" },
23
+ },
24
+ required: ["entry_type", "agent_role", "project", "title", "body"],
25
+ },
26
+ },
27
+ {
28
+ name: "memory.update",
29
+ description: "Update an existing memory entry by ID.",
30
+ inputSchema: {
31
+ type: "object" as const,
32
+ properties: {
33
+ id: { type: "number", description: "Memory entry ID" },
34
+ title: { type: "string", description: "New title" },
35
+ body: { type: "string", description: "New body" },
36
+ status: { type: "string", enum: ["active", "summarized", "archived"], description: "New status" },
37
+ tags: { type: "array", items: { type: "string" }, description: "New tags" },
38
+ metadata: { type: "object", description: "Metadata to merge" },
39
+ },
40
+ required: ["id"],
41
+ },
42
+ },
43
+ {
44
+ name: "memory.archive",
45
+ description: "Bulk archive entries by filter. At least one filter required.",
46
+ inputSchema: {
47
+ type: "object" as const,
48
+ properties: {
49
+ ids: { type: "array", items: { type: "number" }, description: "Specific entry IDs to archive" },
50
+ agent_role: { type: "string", description: "Archive all active entries for this role" },
51
+ project: { type: "string", description: "Archive all active entries for this project" },
52
+ before: { type: "string", description: "Archive entries with occurred_at before this ISO date" },
53
+ },
54
+ },
55
+ },
56
+ ];
57
+
58
+ export function handleStore(db: Database.Database, name: string, args: any) {
59
+ if (name === "memory.store") return memoryStore(db, args);
60
+ if (name === "memory.update") return memoryUpdate(db, args);
61
+ if (name === "memory.archive") return memoryArchive(db, args);
62
+ return null;
63
+ }
64
+
65
+ function memoryStore(db: Database.Database, args: any) {
66
+ const { entry_type, agent_role, project, title, body } = args;
67
+ const hash = contentHash(entry_type, agent_role, project, title, body);
68
+
69
+ const existing = db.prepare("SELECT id FROM memory_entries WHERE content_hash = ?").get(hash) as any;
70
+ if (existing) {
71
+ return jsonResult({ duplicate: true, existing_id: existing.id });
72
+ }
73
+
74
+ const ts = now();
75
+ const result = db.prepare(`
76
+ INSERT INTO memory_entries (entry_type, agent_role, project, title, body, task_id, pr_number, run_id, status, tags, metadata, created_at, occurred_at, updated_at, content_hash)
77
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, ?, ?, ?)
78
+ `).run(
79
+ entry_type,
80
+ agent_role,
81
+ project,
82
+ title,
83
+ body,
84
+ args.task_id || null,
85
+ args.pr_number || null,
86
+ args.run_id || null,
87
+ JSON.stringify(args.tags || []),
88
+ JSON.stringify(args.metadata || {}),
89
+ ts,
90
+ args.occurred_at || ts,
91
+ ts,
92
+ hash
93
+ );
94
+
95
+ return jsonResult({ id: result.lastInsertRowid, created: true });
96
+ }
97
+
98
+ function memoryUpdate(db: Database.Database, args: any) {
99
+ const { id, ...updates } = args;
100
+
101
+ const entry = db.prepare("SELECT * FROM memory_entries WHERE id = ?").get(id) as any;
102
+ if (!entry) return errorResult(`Entry ${id} not found`);
103
+
104
+ const sets: string[] = [];
105
+ const vals: any[] = [];
106
+
107
+ if (updates.title !== undefined) { sets.push("title = ?"); vals.push(updates.title); }
108
+ if (updates.body !== undefined) { sets.push("body = ?"); vals.push(updates.body); }
109
+ if (updates.status !== undefined) {
110
+ sets.push("status = ?"); vals.push(updates.status);
111
+ if (updates.status === "archived") { sets.push("archived_at = ?"); vals.push(now()); }
112
+ }
113
+ if (updates.tags !== undefined) { sets.push("tags = ?"); vals.push(JSON.stringify(updates.tags)); }
114
+ if (updates.metadata !== undefined) {
115
+ const merged = { ...JSON.parse(entry.metadata), ...updates.metadata };
116
+ sets.push("metadata = ?"); vals.push(JSON.stringify(merged));
117
+ }
118
+
119
+ if (sets.length === 0) return errorResult("No fields to update");
120
+
121
+ sets.push("updated_at = ?"); vals.push(now());
122
+
123
+ if (updates.title !== undefined || updates.body !== undefined) {
124
+ const newTitle = updates.title || entry.title;
125
+ const newBody = updates.body || entry.body;
126
+ const hash = contentHash(entry.entry_type, entry.agent_role, entry.project, newTitle, newBody);
127
+ sets.push("content_hash = ?"); vals.push(hash);
128
+ }
129
+
130
+ vals.push(id);
131
+ db.prepare(`UPDATE memory_entries SET ${sets.join(", ")} WHERE id = ?`).run(...vals);
132
+
133
+ return jsonResult({ id, updated: true });
134
+ }
135
+
136
+ function memoryArchive(db: Database.Database, args: any) {
137
+ const { ids, agent_role, project, before } = args;
138
+
139
+ if (!ids && !agent_role && !project && !before) {
140
+ return errorResult("At least one filter required");
141
+ }
142
+
143
+ const conditions: string[] = ["status = 'active'"];
144
+ const vals: any[] = [];
145
+
146
+ if (ids?.length) {
147
+ conditions.push(`id IN (${ids.map(() => "?").join(",")})`);
148
+ vals.push(...ids);
149
+ }
150
+ if (agent_role) { conditions.push("agent_role = ?"); vals.push(agent_role); }
151
+ if (project) { conditions.push("project = ?"); vals.push(project); }
152
+ if (before) { conditions.push("occurred_at < ?"); vals.push(before); }
153
+
154
+ const ts = now();
155
+ const result = db.prepare(
156
+ `UPDATE memory_entries SET status = 'archived', archived_at = ?, updated_at = ? WHERE ${conditions.join(" AND ")}`
157
+ ).run(ts, ts, ...vals);
158
+
159
+ return jsonResult({ archived_count: result.changes });
160
+ }
@@ -0,0 +1,140 @@
1
+ import type Database from "better-sqlite3";
2
+ import { now, jsonResult, errorResult } from "../db.ts";
3
+
4
+ export const summarizeTools = [
5
+ {
6
+ name: "memory.summarize",
7
+ description:
8
+ "Create a summary of memory entries. The calling agent provides the summary text. The server creates the summary record and transitions matching entries to 'summarized' status.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ agent_role: { type: "string", description: "Agent role being summarized" },
13
+ project: { type: "string", description: "Project being summarized" },
14
+ entry_type: { type: "string", description: "Entry type filter (omit for mixed)" },
15
+ summary_title: { type: "string", description: "Title for the summary" },
16
+ summary_body: { type: "string", description: "The summary text (markdown)" },
17
+ before: { type: "string", description: "Summarize entries before this ISO date (default: 3 days ago)" },
18
+ entry_ids: { type: "array", items: { type: "number" }, description: "Specific entry IDs to summarize (overrides date filter)" },
19
+ },
20
+ required: ["agent_role", "project", "summary_title", "summary_body"],
21
+ },
22
+ },
23
+ {
24
+ name: "memory.cleanup",
25
+ description:
26
+ "Identify entries needing summarization or archive old summarized entries. Use dry_run to preview without changes.",
27
+ inputSchema: {
28
+ type: "object" as const,
29
+ properties: {
30
+ older_than_days: { type: "number", description: "Entries older than N days (default 7)" },
31
+ min_entries: { type: "number", description: "Minimum entries per role+project to trigger (default 10)" },
32
+ dry_run: { type: "boolean", description: "If true, just report what would happen (default true)" },
33
+ },
34
+ },
35
+ },
36
+ ];
37
+
38
+ export function handleSummarize(db: Database.Database, name: string, args: any) {
39
+ if (name === "memory.summarize") return memorySummarize(db, args);
40
+ if (name === "memory.cleanup") return memoryCleanup(db, args);
41
+ return null;
42
+ }
43
+
44
+ function memorySummarize(db: Database.Database, args: any) {
45
+ const { agent_role, project, entry_type, summary_title, summary_body } = args;
46
+
47
+ const summarize = db.transaction(() => {
48
+ let entryIds: number[];
49
+
50
+ if (args.entry_ids?.length) {
51
+ entryIds = args.entry_ids;
52
+ } else {
53
+ const cutoff = args.before || new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
54
+ const conditions = ["agent_role = ?", "project = ?", "status = 'active'", "occurred_at < ?"];
55
+ const vals = [agent_role, project, cutoff];
56
+ if (entry_type) { conditions.push("entry_type = ?"); vals.push(entry_type); }
57
+
58
+ const rows = db.prepare(
59
+ `SELECT id FROM memory_entries WHERE ${conditions.join(" AND ")} ORDER BY occurred_at ASC`
60
+ ).all(...vals) as any[];
61
+ entryIds = rows.map(r => r.id);
62
+ }
63
+
64
+ if (entryIds.length === 0) {
65
+ return { error: "No entries to summarize" };
66
+ }
67
+
68
+ const entries = db.prepare(
69
+ `SELECT MIN(occurred_at) as date_from, MAX(occurred_at) as date_to FROM memory_entries WHERE id IN (${entryIds.map(() => "?").join(",")})`
70
+ ).get(...entryIds) as any;
71
+
72
+ const ts = now();
73
+ const result = db.prepare(`
74
+ INSERT INTO memory_summaries (agent_role, project, entry_type, title, body, entry_count, date_from, date_to, entry_ids, created_at)
75
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
76
+ `).run(
77
+ agent_role, project, entry_type || null,
78
+ summary_title, summary_body,
79
+ entryIds.length, entries.date_from, entries.date_to,
80
+ JSON.stringify(entryIds), ts
81
+ );
82
+
83
+ const placeholders = entryIds.map(() => "?").join(",");
84
+ db.prepare(
85
+ `UPDATE memory_entries SET status = 'summarized', updated_at = ? WHERE id IN (${placeholders})`
86
+ ).run(ts, ...entryIds);
87
+
88
+ return {
89
+ summary_id: result.lastInsertRowid,
90
+ entries_summarized: entryIds.length,
91
+ date_from: entries.date_from,
92
+ date_to: entries.date_to,
93
+ };
94
+ });
95
+
96
+ const result = summarize();
97
+ if ((result as any).error) return errorResult((result as any).error);
98
+ return jsonResult(result);
99
+ }
100
+
101
+ function memoryCleanup(db: Database.Database, args: any) {
102
+ const olderThanDays = args.older_than_days ?? 7;
103
+ const minEntries = args.min_entries ?? 10;
104
+ const dryRun = args.dry_run ?? true;
105
+
106
+ const cutoff = new Date(Date.now() - olderThanDays * 24 * 60 * 60 * 1000).toISOString();
107
+
108
+ // Find scopes that need summarization
109
+ const needsSummarization = db.prepare(`
110
+ SELECT agent_role, project, COUNT(*) as entry_count,
111
+ MIN(occurred_at) as date_from, MAX(occurred_at) as date_to
112
+ FROM memory_entries
113
+ WHERE status = 'active' AND occurred_at < ?
114
+ GROUP BY agent_role, project
115
+ HAVING COUNT(*) >= ?
116
+ ORDER BY entry_count DESC
117
+ `).all(cutoff, minEntries);
118
+
119
+ // Find old summarized entries eligible for archival
120
+ const archivalCutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString();
121
+ const needsArchival = (db.prepare(`
122
+ SELECT COUNT(*) as count FROM memory_entries
123
+ WHERE status = 'summarized' AND updated_at < ?
124
+ `).get(archivalCutoff) as any).count;
125
+
126
+ if (!dryRun && needsArchival > 0) {
127
+ const ts = now();
128
+ db.prepare(`
129
+ UPDATE memory_entries SET status = 'archived', archived_at = ?, updated_at = ?
130
+ WHERE status = 'summarized' AND updated_at < ?
131
+ `).run(ts, ts, archivalCutoff);
132
+ }
133
+
134
+ return jsonResult({
135
+ needs_summarization: needsSummarization,
136
+ needs_archival: needsArchival,
137
+ archived: dryRun ? 0 : needsArchival,
138
+ dry_run: dryRun,
139
+ });
140
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,12 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "NodeNext",
5
+ "moduleResolution": "NodeNext",
6
+ "noEmit": true,
7
+ "strict": false,
8
+ "skipLibCheck": true,
9
+ "types": ["node"]
10
+ },
11
+ "include": ["src/**/*.ts", "migrate.ts"]
12
+ }