@launchapp-dev/ao-memory-mcp 1.0.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,115 +1,122 @@
1
1
  import type Database from "better-sqlite3";
2
- import { contentHash, now, jsonResult, errorResult } from "../db.ts";
2
+ import { contentHash, now, jsonResult, errorResult, touchAccess } from "../db.ts";
3
+ import { embed, storeVector, deleteVector } from "../embeddings.ts";
3
4
 
4
5
  export const storeTools = [
5
6
  {
6
- name: "memory.store",
7
+ name: "memory.remember",
7
8
  description:
8
- "Store a new memory entry. Deduplicates via content hash returns existing entry if duplicate detected.",
9
+ "Store a new memory. Automatically embeds for semantic search. Deduplicates via content hash. Supports semantic (facts/knowledge), episodic (events/history), and procedural (how-to/workflows) types.",
9
10
  inputSchema: {
10
11
  type: "object" as const,
11
12
  properties: {
12
- entry_type: { type: "string", description: "Type of memory (e.g. decision, observation, task_dispatch, test_result, review, action)" },
13
- agent_role: { type: "string", description: "Agent role that produced this memory (e.g. planner, reviewer, qa-tester)" },
14
- project: { type: "string", description: "Project/repo name" },
15
- title: { type: "string", description: "Short summary line" },
16
- body: { type: "string", description: "Full markdown content" },
17
- task_id: { type: "string", description: "Task ID reference (e.g. TASK-051)" },
18
- pr_number: { type: "number", description: "PR number if applicable" },
19
- run_id: { type: "string", description: "Run identifier (e.g. run 51)" },
20
- tags: { type: "array", items: { type: "string" }, description: "Tags for categorization" },
21
- metadata: { type: "object", description: "Entry-type-specific metadata" },
22
- occurred_at: { type: "string", description: "ISO 8601 date when event occurred (defaults to now)" },
13
+ memory_type: { type: "string", enum: ["semantic", "episodic", "procedural"], description: "Type: semantic (facts), episodic (events), procedural (how-to)" },
14
+ title: { type: "string", description: "Short summary" },
15
+ content: { type: "string", description: "Full content (markdown)" },
16
+ scope: { type: "string", enum: ["global", "user", "project", "session"], description: "Scope (default: project)" },
17
+ namespace: { type: "string", description: "Scope identifier — project name, user id, session id" },
18
+ agent_role: { type: "string", description: "Agent role storing this memory" },
19
+ task_id: { type: "string", description: "Task ID reference" },
20
+ pr_number: { type: "number", description: "PR number" },
21
+ run_id: { type: "string", description: "Run identifier" },
22
+ tags: { type: "array", items: { type: "string" }, description: "Tags" },
23
+ metadata: { type: "object", description: "Custom metadata" },
24
+ confidence: { type: "number", description: "Confidence score 0.0-1.0 (default 1.0)" },
25
+ occurred_at: { type: "string", description: "ISO 8601 date (default: now)" },
23
26
  },
24
- required: ["entry_type", "agent_role", "project", "title", "body"],
27
+ required: ["memory_type", "title", "content"],
25
28
  },
26
29
  },
27
30
  {
28
31
  name: "memory.update",
29
- description: "Update an existing memory entry by ID.",
32
+ description: "Update an existing memory by ID.",
30
33
  inputSchema: {
31
34
  type: "object" as const,
32
35
  properties: {
33
- id: { type: "number", description: "Memory entry ID" },
34
- title: { type: "string", description: "New title" },
35
- body: { type: "string", description: "New body" },
36
- status: { type: "string", enum: ["active", "summarized", "archived"], description: "New status" },
37
- tags: { type: "array", items: { type: "string" }, description: "New tags" },
38
- metadata: { type: "object", description: "Metadata to merge" },
36
+ id: { type: "number", description: "Memory ID" },
37
+ title: { type: "string" },
38
+ content: { type: "string" },
39
+ status: { type: "string", enum: ["active", "summarized", "archived"] },
40
+ confidence: { type: "number" },
41
+ tags: { type: "array", items: { type: "string" } },
42
+ metadata: { type: "object" },
43
+ superseded_by: { type: "number", description: "ID of memory that replaces this one" },
39
44
  },
40
45
  required: ["id"],
41
46
  },
42
47
  },
43
48
  {
44
- name: "memory.archive",
45
- description: "Bulk archive entries by filter. At least one filter required.",
49
+ name: "memory.forget",
50
+ description: "Archive memories. Soft delete they remain queryable with status filter.",
46
51
  inputSchema: {
47
52
  type: "object" as const,
48
53
  properties: {
49
- ids: { type: "array", items: { type: "number" }, description: "Specific entry IDs to archive" },
50
- agent_role: { type: "string", description: "Archive all active entries for this role" },
51
- project: { type: "string", description: "Archive all active entries for this project" },
52
- before: { type: "string", description: "Archive entries with occurred_at before this ISO date" },
54
+ ids: { type: "array", items: { type: "number" }, description: "Specific IDs to archive" },
55
+ agent_role: { type: "string", description: "Archive all for this role" },
56
+ namespace: { type: "string", description: "Archive all in this namespace" },
57
+ before: { type: "string", description: "Archive entries before this ISO date" },
53
58
  },
54
59
  },
55
60
  },
56
61
  ];
57
62
 
58
63
  export function handleStore(db: Database.Database, name: string, args: any) {
59
- if (name === "memory.store") return memoryStore(db, args);
64
+ if (name === "memory.remember") return memoryRemember(db, args);
60
65
  if (name === "memory.update") return memoryUpdate(db, args);
61
- if (name === "memory.archive") return memoryArchive(db, args);
66
+ if (name === "memory.forget") return memoryForget(db, args);
62
67
  return null;
63
68
  }
64
69
 
65
- function memoryStore(db: Database.Database, args: any) {
66
- const { entry_type, agent_role, project, title, body } = args;
67
- const hash = contentHash(entry_type, agent_role, project, title, body);
70
+ async function memoryRemember(db: Database.Database, args: any) {
71
+ const { memory_type, title, content } = args;
72
+ const scope = args.scope || "project";
73
+ const hash = contentHash(memory_type, scope, args.namespace || "", title, content);
68
74
 
69
- const existing = db.prepare("SELECT id FROM memory_entries WHERE content_hash = ?").get(hash) as any;
75
+ const existing = db.prepare("SELECT id FROM memories WHERE content_hash = ?").get(hash) as any;
70
76
  if (existing) {
77
+ touchAccess(db, existing.id);
71
78
  return jsonResult({ duplicate: true, existing_id: existing.id });
72
79
  }
73
80
 
74
81
  const ts = now();
75
82
  const result = db.prepare(`
76
- INSERT INTO memory_entries (entry_type, agent_role, project, title, body, task_id, pr_number, run_id, status, tags, metadata, created_at, occurred_at, updated_at, content_hash)
77
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, ?, ?, ?)
83
+ INSERT INTO memories (memory_type, scope, namespace, agent_role, title, content, task_id, pr_number, run_id, status, confidence, tags, metadata, created_at, occurred_at, updated_at, content_hash)
84
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, ?, ?, ?, ?)
78
85
  `).run(
79
- entry_type,
80
- agent_role,
81
- project,
82
- title,
83
- body,
84
- args.task_id || null,
85
- args.pr_number || null,
86
- args.run_id || null,
87
- JSON.stringify(args.tags || []),
88
- JSON.stringify(args.metadata || {}),
89
- ts,
90
- args.occurred_at || ts,
91
- ts,
92
- hash
86
+ memory_type, scope, args.namespace || null, args.agent_role || null,
87
+ title, content,
88
+ args.task_id || null, args.pr_number || null, args.run_id || null,
89
+ args.confidence ?? 1.0,
90
+ JSON.stringify(args.tags || []), JSON.stringify(args.metadata || {}),
91
+ ts, args.occurred_at || ts, ts, hash
93
92
  );
94
93
 
95
- return jsonResult({ id: result.lastInsertRowid, created: true });
94
+ const id = Number(result.lastInsertRowid);
95
+
96
+ // Embed asynchronously
97
+ try {
98
+ const embedding = await embed(`${title}\n${content}`);
99
+ storeVector(db, "vec_memories", id, embedding);
100
+ } catch (e) {
101
+ console.error(`[ao-memory] Embedding failed for memory ${id}:`, e);
102
+ }
103
+
104
+ return jsonResult({ id, created: true });
96
105
  }
97
106
 
98
- function memoryUpdate(db: Database.Database, args: any) {
107
+ async function memoryUpdate(db: Database.Database, args: any) {
99
108
  const { id, ...updates } = args;
100
-
101
- const entry = db.prepare("SELECT * FROM memory_entries WHERE id = ?").get(id) as any;
102
- if (!entry) return errorResult(`Entry ${id} not found`);
109
+ const entry = db.prepare("SELECT * FROM memories WHERE id = ?").get(id) as any;
110
+ if (!entry) return errorResult(`Memory ${id} not found`);
103
111
 
104
112
  const sets: string[] = [];
105
113
  const vals: any[] = [];
106
114
 
107
115
  if (updates.title !== undefined) { sets.push("title = ?"); vals.push(updates.title); }
108
- if (updates.body !== undefined) { sets.push("body = ?"); vals.push(updates.body); }
109
- if (updates.status !== undefined) {
110
- sets.push("status = ?"); vals.push(updates.status);
111
- if (updates.status === "archived") { sets.push("archived_at = ?"); vals.push(now()); }
112
- }
116
+ if (updates.content !== undefined) { sets.push("content = ?"); vals.push(updates.content); }
117
+ if (updates.status !== undefined) { sets.push("status = ?"); vals.push(updates.status); }
118
+ if (updates.confidence !== undefined) { sets.push("confidence = ?"); vals.push(updates.confidence); }
119
+ if (updates.superseded_by !== undefined) { sets.push("superseded_by = ?"); vals.push(updates.superseded_by); }
113
120
  if (updates.tags !== undefined) { sets.push("tags = ?"); vals.push(JSON.stringify(updates.tags)); }
114
121
  if (updates.metadata !== undefined) {
115
122
  const merged = { ...JSON.parse(entry.metadata), ...updates.metadata };
@@ -117,44 +124,45 @@ function memoryUpdate(db: Database.Database, args: any) {
117
124
  }
118
125
 
119
126
  if (sets.length === 0) return errorResult("No fields to update");
120
-
121
127
  sets.push("updated_at = ?"); vals.push(now());
122
128
 
123
- if (updates.title !== undefined || updates.body !== undefined) {
129
+ if (updates.title !== undefined || updates.content !== undefined) {
124
130
  const newTitle = updates.title || entry.title;
125
- const newBody = updates.body || entry.body;
126
- const hash = contentHash(entry.entry_type, entry.agent_role, entry.project, newTitle, newBody);
131
+ const newContent = updates.content || entry.content;
132
+ const hash = contentHash(entry.memory_type, entry.scope, entry.namespace || "", newTitle, newContent);
127
133
  sets.push("content_hash = ?"); vals.push(hash);
134
+
135
+ // Re-embed
136
+ try {
137
+ const embedding = await embed(`${newTitle}\n${newContent}`);
138
+ storeVector(db, "vec_memories", id, embedding);
139
+ } catch {}
128
140
  }
129
141
 
130
142
  vals.push(id);
131
- db.prepare(`UPDATE memory_entries SET ${sets.join(", ")} WHERE id = ?`).run(...vals);
143
+ db.prepare(`UPDATE memories SET ${sets.join(", ")} WHERE id = ?`).run(...vals);
132
144
 
133
145
  return jsonResult({ id, updated: true });
134
146
  }
135
147
 
136
- function memoryArchive(db: Database.Database, args: any) {
137
- const { ids, agent_role, project, before } = args;
138
-
139
- if (!ids && !agent_role && !project && !before) {
148
+ function memoryForget(db: Database.Database, args: any) {
149
+ const { ids, agent_role, namespace, before } = args;
150
+ if (!ids && !agent_role && !namespace && !before) {
140
151
  return errorResult("At least one filter required");
141
152
  }
142
153
 
143
154
  const conditions: string[] = ["status = 'active'"];
144
155
  const vals: any[] = [];
145
156
 
146
- if (ids?.length) {
147
- conditions.push(`id IN (${ids.map(() => "?").join(",")})`);
148
- vals.push(...ids);
149
- }
157
+ if (ids?.length) { conditions.push(`id IN (${ids.map(() => "?").join(",")})`); vals.push(...ids); }
150
158
  if (agent_role) { conditions.push("agent_role = ?"); vals.push(agent_role); }
151
- if (project) { conditions.push("project = ?"); vals.push(project); }
159
+ if (namespace) { conditions.push("namespace = ?"); vals.push(namespace); }
152
160
  if (before) { conditions.push("occurred_at < ?"); vals.push(before); }
153
161
 
154
162
  const ts = now();
155
163
  const result = db.prepare(
156
- `UPDATE memory_entries SET status = 'archived', archived_at = ?, updated_at = ? WHERE ${conditions.join(" AND ")}`
157
- ).run(ts, ts, ...vals);
164
+ `UPDATE memories SET status = 'archived', updated_at = ? WHERE ${conditions.join(" AND ")}`
165
+ ).run(ts, ...vals);
158
166
 
159
167
  return jsonResult({ archived_count: result.changes });
160
168
  }
@@ -5,31 +5,29 @@ export const summarizeTools = [
5
5
  {
6
6
  name: "memory.summarize",
7
7
  description:
8
- "Create a summary of memory entries. The calling agent provides the summary text. The server creates the summary record and transitions matching entries to 'summarized' status.",
8
+ "Create a summary of memory entries. Agent provides the summary text. Server creates the summary record and transitions entries to 'summarized'.",
9
9
  inputSchema: {
10
10
  type: "object" as const,
11
11
  properties: {
12
- agent_role: { type: "string", description: "Agent role being summarized" },
13
- project: { type: "string", description: "Project being summarized" },
14
- entry_type: { type: "string", description: "Entry type filter (omit for mixed)" },
15
- summary_title: { type: "string", description: "Title for the summary" },
16
- summary_body: { type: "string", description: "The summary text (markdown)" },
17
- before: { type: "string", description: "Summarize entries before this ISO date (default: 3 days ago)" },
18
- entry_ids: { type: "array", items: { type: "number" }, description: "Specific entry IDs to summarize (overrides date filter)" },
12
+ namespace: { type: "string", description: "Namespace to summarize" },
13
+ agent_role: { type: "string", description: "Agent role" },
14
+ summary_title: { type: "string", description: "Summary title" },
15
+ summary_body: { type: "string", description: "Summary content (markdown)" },
16
+ before: { type: "string", description: "Summarize entries before this ISO date" },
17
+ entry_ids: { type: "array", items: { type: "number" }, description: "Specific IDs to summarize" },
19
18
  },
20
- required: ["agent_role", "project", "summary_title", "summary_body"],
19
+ required: ["namespace", "summary_title", "summary_body"],
21
20
  },
22
21
  },
23
22
  {
24
23
  name: "memory.cleanup",
25
- description:
26
- "Identify entries needing summarization or archive old summarized entries. Use dry_run to preview without changes.",
24
+ description: "Identify stale entries needing summarization or archive old summarized entries.",
27
25
  inputSchema: {
28
26
  type: "object" as const,
29
27
  properties: {
30
28
  older_than_days: { type: "number", description: "Entries older than N days (default 7)" },
31
- min_entries: { type: "number", description: "Minimum entries per role+project to trigger (default 10)" },
32
- dry_run: { type: "boolean", description: "If true, just report what would happen (default true)" },
29
+ min_entries: { type: "number", description: "Min entries per scope to trigger (default 10)" },
30
+ dry_run: { type: "boolean", description: "Preview only (default true)" },
33
31
  },
34
32
  },
35
33
  },
@@ -42,58 +40,44 @@ export function handleSummarize(db: Database.Database, name: string, args: any)
42
40
  }
43
41
 
44
42
  function memorySummarize(db: Database.Database, args: any) {
45
- const { agent_role, project, entry_type, summary_title, summary_body } = args;
43
+ const { namespace, agent_role, summary_title, summary_body } = args;
46
44
 
47
- const summarize = db.transaction(() => {
45
+ const result = db.transaction(() => {
48
46
  let entryIds: number[];
49
47
 
50
48
  if (args.entry_ids?.length) {
51
49
  entryIds = args.entry_ids;
52
50
  } else {
53
51
  const cutoff = args.before || new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
54
- const conditions = ["agent_role = ?", "project = ?", "status = 'active'", "occurred_at < ?"];
55
- const vals = [agent_role, project, cutoff];
56
- if (entry_type) { conditions.push("entry_type = ?"); vals.push(entry_type); }
52
+ const conditions = ["namespace = ?", "status = 'active'", "occurred_at < ?"];
53
+ const vals = [namespace, cutoff];
54
+ if (agent_role) { conditions.push("agent_role = ?"); vals.push(agent_role); }
57
55
 
58
56
  const rows = db.prepare(
59
- `SELECT id FROM memory_entries WHERE ${conditions.join(" AND ")} ORDER BY occurred_at ASC`
57
+ `SELECT id FROM memories WHERE ${conditions.join(" AND ")}`
60
58
  ).all(...vals) as any[];
61
59
  entryIds = rows.map(r => r.id);
62
60
  }
63
61
 
64
- if (entryIds.length === 0) {
65
- return { error: "No entries to summarize" };
66
- }
62
+ if (entryIds.length === 0) return { error: "No entries to summarize" };
67
63
 
68
- const entries = db.prepare(
69
- `SELECT MIN(occurred_at) as date_from, MAX(occurred_at) as date_to FROM memory_entries WHERE id IN (${entryIds.map(() => "?").join(",")})`
64
+ const range = db.prepare(
65
+ `SELECT MIN(occurred_at) as date_from, MAX(occurred_at) as date_to FROM memories WHERE id IN (${entryIds.map(() => "?").join(",")})`
70
66
  ).get(...entryIds) as any;
71
67
 
72
68
  const ts = now();
73
- const result = db.prepare(`
74
- INSERT INTO memory_summaries (agent_role, project, entry_type, title, body, entry_count, date_from, date_to, entry_ids, created_at)
75
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
76
- `).run(
77
- agent_role, project, entry_type || null,
78
- summary_title, summary_body,
79
- entryIds.length, entries.date_from, entries.date_to,
80
- JSON.stringify(entryIds), ts
81
- );
82
-
83
- const placeholders = entryIds.map(() => "?").join(",");
69
+ const sumResult = db.prepare(`
70
+ INSERT INTO summaries (scope, namespace, agent_role, title, content, entry_count, date_from, date_to, entry_ids, created_at)
71
+ VALUES ('project', ?, ?, ?, ?, ?, ?, ?, ?, ?)
72
+ `).run(namespace, agent_role || null, summary_title, summary_body, entryIds.length, range.date_from, range.date_to, JSON.stringify(entryIds), ts);
73
+
84
74
  db.prepare(
85
- `UPDATE memory_entries SET status = 'summarized', updated_at = ? WHERE id IN (${placeholders})`
75
+ `UPDATE memories SET status = 'summarized', updated_at = ? WHERE id IN (${entryIds.map(() => "?").join(",")})`
86
76
  ).run(ts, ...entryIds);
87
77
 
88
- return {
89
- summary_id: result.lastInsertRowid,
90
- entries_summarized: entryIds.length,
91
- date_from: entries.date_from,
92
- date_to: entries.date_to,
93
- };
94
- });
78
+ return { summary_id: Number(sumResult.lastInsertRowid), entries_summarized: entryIds.length };
79
+ })();
95
80
 
96
- const result = summarize();
97
81
  if ((result as any).error) return errorResult((result as any).error);
98
82
  return jsonResult(result);
99
83
  }
@@ -105,36 +89,26 @@ function memoryCleanup(db: Database.Database, args: any) {
105
89
 
106
90
  const cutoff = new Date(Date.now() - olderThanDays * 24 * 60 * 60 * 1000).toISOString();
107
91
 
108
- // Find scopes that need summarization
109
92
  const needsSummarization = db.prepare(`
110
- SELECT agent_role, project, COUNT(*) as entry_count,
93
+ SELECT namespace, agent_role, COUNT(*) as entry_count,
111
94
  MIN(occurred_at) as date_from, MAX(occurred_at) as date_to
112
- FROM memory_entries
113
- WHERE status = 'active' AND occurred_at < ?
114
- GROUP BY agent_role, project
95
+ FROM memories WHERE status = 'active' AND occurred_at < ?
96
+ GROUP BY namespace, agent_role
115
97
  HAVING COUNT(*) >= ?
116
- ORDER BY entry_count DESC
117
98
  `).all(cutoff, minEntries);
118
99
 
119
- // Find old summarized entries eligible for archival
120
100
  const archivalCutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString();
121
- const needsArchival = (db.prepare(`
122
- SELECT COUNT(*) as count FROM memory_entries
123
- WHERE status = 'summarized' AND updated_at < ?
124
- `).get(archivalCutoff) as any).count;
101
+ const needsArchival = (db.prepare(
102
+ "SELECT COUNT(*) as c FROM memories WHERE status = 'summarized' AND updated_at < ?"
103
+ ).get(archivalCutoff) as any).c;
125
104
 
105
+ let archived = 0;
126
106
  if (!dryRun && needsArchival > 0) {
127
107
  const ts = now();
128
- db.prepare(`
129
- UPDATE memory_entries SET status = 'archived', archived_at = ?, updated_at = ?
130
- WHERE status = 'summarized' AND updated_at < ?
131
- `).run(ts, ts, archivalCutoff);
108
+ archived = db.prepare(
109
+ "UPDATE memories SET status = 'archived', updated_at = ? WHERE status = 'summarized' AND updated_at < ?"
110
+ ).run(ts, archivalCutoff).changes;
132
111
  }
133
112
 
134
- return jsonResult({
135
- needs_summarization: needsSummarization,
136
- needs_archival: needsArchival,
137
- archived: dryRun ? 0 : needsArchival,
138
- dry_run: dryRun,
139
- });
113
+ return jsonResult({ needs_summarization: needsSummarization, needs_archival: needsArchival, archived, dry_run: dryRun });
140
114
  }
@@ -1,165 +0,0 @@
1
- import type Database from "better-sqlite3";
2
- import { now, jsonResult, errorResult } from "../db.ts";
3
-
4
- export const patternTools = [
5
- {
6
- name: "memory.patterns.detect",
7
- description:
8
- "Scan for recurring patterns across projects. Finds entries with similar titles or matching tags that appear in multiple projects.",
9
- inputSchema: {
10
- type: "object" as const,
11
- properties: {
12
- min_occurrences: { type: "number", description: "Minimum projects to count as a pattern (default 2)" },
13
- entry_type: { type: "string", description: "Restrict to entry type" },
14
- limit: { type: "number", description: "Max patterns to return (default 10)" },
15
- },
16
- },
17
- },
18
- {
19
- name: "memory.patterns.record",
20
- description:
21
- "Create or update a confirmed cross-project pattern.",
22
- inputSchema: {
23
- type: "object" as const,
24
- properties: {
25
- id: { type: "number", description: "Existing pattern ID to update (omit to create new)" },
26
- pattern_type: { type: "string", description: "Type (e.g. bug_pattern, process_pattern, architectural_pattern, anti_pattern)" },
27
- title: { type: "string", description: "Pattern name" },
28
- description: { type: "string", description: "Full description" },
29
- projects: { type: "array", items: { type: "string" }, description: "Projects where pattern appears" },
30
- agent_roles: { type: "array", items: { type: "string" }, description: "Roles that reported it" },
31
- entry_ids: { type: "array", items: { type: "number" }, description: "Memory entry IDs as evidence" },
32
- status: { type: "string", enum: ["active", "resolved", "archived"], description: "Pattern status" },
33
- },
34
- required: ["pattern_type", "title", "description"],
35
- },
36
- },
37
- {
38
- name: "memory.patterns.list",
39
- description: "List known cross-project patterns.",
40
- inputSchema: {
41
- type: "object" as const,
42
- properties: {
43
- status: { type: "string", enum: ["active", "resolved", "archived", "all"], description: "Filter by status (default: active)" },
44
- pattern_type: { type: "string", description: "Filter by pattern type" },
45
- project: { type: "string", description: "Filter patterns involving this project" },
46
- limit: { type: "number", description: "Max results (default 20)" },
47
- },
48
- },
49
- },
50
- ];
51
-
52
- export function handlePatterns(db: Database.Database, name: string, args: any) {
53
- if (name === "memory.patterns.detect") return patternsDetect(db, args);
54
- if (name === "memory.patterns.record") return patternsRecord(db, args);
55
- if (name === "memory.patterns.list") return patternsList(db, args);
56
- return null;
57
- }
58
-
59
- function patternsDetect(db: Database.Database, args: any) {
60
- const minOccurrences = args.min_occurrences ?? 2;
61
- const limit = args.limit ?? 10;
62
-
63
- // Find tags that appear across multiple projects
64
- const tagCondition = args.entry_type ? "AND e.entry_type = ?" : "";
65
- const tagVals = args.entry_type ? [args.entry_type] : [];
66
-
67
- const tagPatterns = db.prepare(`
68
- SELECT t.value as tag, COUNT(DISTINCT e.project) as project_count,
69
- GROUP_CONCAT(DISTINCT e.project) as projects,
70
- COUNT(*) as total_entries
71
- FROM memory_entries e, json_each(e.tags) t
72
- WHERE e.status = 'active' ${tagCondition}
73
- GROUP BY t.value
74
- HAVING COUNT(DISTINCT e.project) >= ?
75
- ORDER BY project_count DESC
76
- LIMIT ?
77
- `).all(...tagVals, minOccurrences, limit);
78
-
79
- // Find similar titles across projects using FTS5
80
- const titlePatterns = db.prepare(`
81
- SELECT e1.title, COUNT(DISTINCT e1.project) as project_count,
82
- GROUP_CONCAT(DISTINCT e1.project) as projects,
83
- COUNT(*) as total_entries
84
- FROM memory_entries e1
85
- WHERE e1.status = 'active' ${tagCondition}
86
- GROUP BY e1.title
87
- HAVING COUNT(DISTINCT e1.project) >= ?
88
- ORDER BY project_count DESC
89
- LIMIT ?
90
- `).all(...tagVals, minOccurrences, limit);
91
-
92
- return jsonResult({
93
- tag_patterns: tagPatterns,
94
- title_patterns: titlePatterns,
95
- });
96
- }
97
-
98
- function patternsRecord(db: Database.Database, args: any) {
99
- const ts = now();
100
-
101
- if (args.id) {
102
- const existing = db.prepare("SELECT * FROM memory_patterns WHERE id = ?").get(args.id) as any;
103
- if (!existing) return errorResult(`Pattern ${args.id} not found`);
104
-
105
- const sets: string[] = [];
106
- const vals: any[] = [];
107
-
108
- if (args.pattern_type) { sets.push("pattern_type = ?"); vals.push(args.pattern_type); }
109
- if (args.title) { sets.push("title = ?"); vals.push(args.title); }
110
- if (args.description) { sets.push("description = ?"); vals.push(args.description); }
111
- if (args.projects) { sets.push("projects = ?"); vals.push(JSON.stringify(args.projects)); }
112
- if (args.agent_roles) { sets.push("agent_roles = ?"); vals.push(JSON.stringify(args.agent_roles)); }
113
- if (args.entry_ids) { sets.push("entry_ids = ?"); vals.push(JSON.stringify(args.entry_ids)); }
114
- if (args.status) {
115
- sets.push("status = ?"); vals.push(args.status);
116
- if (args.status === "resolved") { sets.push("resolved_at = ?"); vals.push(ts); }
117
- }
118
-
119
- sets.push("last_seen = ?"); vals.push(ts);
120
- sets.push("updated_at = ?"); vals.push(ts);
121
- sets.push("occurrence_count = occurrence_count + 1");
122
-
123
- vals.push(args.id);
124
- db.prepare(`UPDATE memory_patterns SET ${sets.join(", ")} WHERE id = ?`).run(...vals);
125
-
126
- return jsonResult({ id: args.id, updated: true });
127
- }
128
-
129
- const result = db.prepare(`
130
- INSERT INTO memory_patterns (pattern_type, title, description, projects, agent_roles, entry_ids, occurrence_count, status, first_seen, last_seen, created_at, updated_at)
131
- VALUES (?, ?, ?, ?, ?, ?, 1, 'active', ?, ?, ?, ?)
132
- `).run(
133
- args.pattern_type,
134
- args.title,
135
- args.description,
136
- JSON.stringify(args.projects || []),
137
- JSON.stringify(args.agent_roles || []),
138
- JSON.stringify(args.entry_ids || []),
139
- ts, ts, ts, ts
140
- );
141
-
142
- return jsonResult({ id: result.lastInsertRowid, created: true });
143
- }
144
-
145
- function patternsList(db: Database.Database, args: any) {
146
- const conditions: string[] = [];
147
- const vals: any[] = [];
148
- const status = args.status || "active";
149
-
150
- if (status !== "all") { conditions.push("status = ?"); vals.push(status); }
151
- if (args.pattern_type) { conditions.push("pattern_type = ?"); vals.push(args.pattern_type); }
152
- if (args.project) {
153
- conditions.push("EXISTS (SELECT 1 FROM json_each(projects) WHERE json_each.value = ?)");
154
- vals.push(args.project);
155
- }
156
-
157
- const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
158
- const limit = args.limit || 20;
159
-
160
- const rows = db.prepare(
161
- `SELECT * FROM memory_patterns ${where} ORDER BY last_seen DESC LIMIT ?`
162
- ).all(...vals, limit);
163
-
164
- return jsonResult({ patterns: rows, count: rows.length });
165
- }