@launchapp-dev/ao-memory-mcp 1.0.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,15 +5,15 @@ export const contextTools = [
5
5
  {
6
6
  name: "memory.context",
7
7
  description:
8
- "Agent boot tool — call at the start of each run to load relevant memory. Returns recent entries, active decisions, cross-project patterns, summaries, and a summarization_needed flag.",
8
+ "Agent boot tool — call at the start of each run to load all relevant memory. Returns recent memories, active decisions, related entities, episode summaries, and document count. Scoped by namespace and agent role.",
9
9
  inputSchema: {
10
10
  type: "object" as const,
11
11
  properties: {
12
+ namespace: { type: "string", description: "Project/scope to load context for" },
12
13
  agent_role: { type: "string", description: "Agent role requesting context" },
13
- project: { type: "string", description: "Project the agent is working on" },
14
14
  limit: { type: "number", description: "Max entries per section (default 10)" },
15
15
  },
16
- required: ["agent_role", "project"],
16
+ required: ["namespace"],
17
17
  },
18
18
  },
19
19
  ];
@@ -24,53 +24,82 @@ export function handleContext(db: Database.Database, name: string, args: any) {
24
24
  }
25
25
 
26
26
  function memoryContext(db: Database.Database, args: any) {
27
- const { agent_role, project } = args;
27
+ const { namespace, agent_role } = args;
28
28
  const limit = args.limit || 10;
29
29
 
30
- const recentEntries = db.prepare(`
31
- SELECT * FROM memory_entries
32
- WHERE agent_role = ? AND project = ? AND status = 'active'
30
+ // Recent memories for this agent+namespace
31
+ const recentMemories = db.prepare(`
32
+ SELECT * FROM memories
33
+ WHERE namespace = ? ${agent_role ? "AND agent_role = ?" : ""}
34
+ AND status = 'active'
33
35
  ORDER BY occurred_at DESC LIMIT ?
34
- `).all(agent_role, project, limit);
36
+ `).all(...(agent_role ? [namespace, agent_role, limit] : [namespace, limit]));
35
37
 
36
- const activeDecisions = db.prepare(`
37
- SELECT * FROM memory_entries
38
- WHERE project = ? AND entry_type = 'decision' AND status = 'active'
39
- ORDER BY occurred_at DESC LIMIT ?
40
- `).all(project, limit);
38
+ // Active semantic memories (facts/knowledge) for this namespace
39
+ const knowledge = db.prepare(`
40
+ SELECT * FROM memories
41
+ WHERE namespace = ? AND memory_type = 'semantic' AND status = 'active'
42
+ ORDER BY confidence DESC, access_count DESC LIMIT ?
43
+ `).all(namespace, limit);
44
+
45
+ // Active procedural memories (how-to) for this namespace
46
+ const procedures = db.prepare(`
47
+ SELECT * FROM memories
48
+ WHERE namespace = ? AND memory_type = 'procedural' AND status = 'active'
49
+ ORDER BY access_count DESC LIMIT ?
50
+ `).all(namespace, limit);
41
51
 
42
- const activePatterns = db.prepare(`
43
- SELECT * FROM memory_patterns
44
- WHERE status = 'active'
45
- AND EXISTS (SELECT 1 FROM json_each(projects) WHERE json_each.value = ?)
46
- ORDER BY last_seen DESC LIMIT ?
47
- `).all(project, limit);
52
+ // Related entities
53
+ const entities = db.prepare(`
54
+ SELECT e.*, (SELECT COUNT(*) FROM relations r WHERE r.source_entity_id = e.id OR r.target_entity_id = e.id) as relation_count
55
+ FROM entities e
56
+ WHERE e.namespace = ?
57
+ ORDER BY relation_count DESC LIMIT ?
58
+ `).all(namespace, limit);
48
59
 
49
- const recentSummaries = db.prepare(`
50
- SELECT * FROM memory_summaries
51
- WHERE agent_role = ? AND project = ?
52
- ORDER BY created_at DESC LIMIT 5
53
- `).all(agent_role, project);
60
+ // Recent episode summaries
61
+ const episodeSummaries = db.prepare(`
62
+ SELECT DISTINCT session_id, summary, MAX(created_at) as last_at
63
+ FROM episodes
64
+ WHERE namespace = ? AND summary IS NOT NULL
65
+ GROUP BY session_id
66
+ ORDER BY last_at DESC LIMIT 5
67
+ `).all(namespace);
54
68
 
55
- const crossProjectAlerts = db.prepare(`
56
- SELECT * FROM memory_patterns
57
- WHERE status = 'active' AND occurrence_count >= 3
58
- ORDER BY last_seen DESC LIMIT 5
69
+ // Document count
70
+ const docCount = (db.prepare(
71
+ "SELECT COUNT(*) as count FROM documents WHERE namespace = ?"
72
+ ).get(namespace) as any).count;
73
+
74
+ // Global memories (cross-project)
75
+ const globalMemories = db.prepare(`
76
+ SELECT * FROM memories
77
+ WHERE scope = 'global' AND status = 'active'
78
+ ORDER BY confidence DESC, occurred_at DESC LIMIT 5
59
79
  `).all();
60
80
 
61
- // Check if summarization is needed: 20+ active entries older than 3 days
81
+ // Check if summarization needed
62
82
  const threeDaysAgo = new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
63
83
  const staleCount = (db.prepare(`
64
- SELECT COUNT(*) as count FROM memory_entries
65
- WHERE agent_role = ? AND project = ? AND status = 'active' AND occurred_at < ?
66
- `).get(agent_role, project, threeDaysAgo) as any).count;
84
+ SELECT COUNT(*) as count FROM memories
85
+ WHERE namespace = ? ${agent_role ? "AND agent_role = ?" : ""}
86
+ AND status = 'active' AND occurred_at < ?
87
+ `).get(...(agent_role ? [namespace, agent_role, threeDaysAgo] : [namespace, threeDaysAgo])) as any).count;
88
+
89
+ // Stats
90
+ const totalMemories = (db.prepare(
91
+ "SELECT COUNT(*) as count FROM memories WHERE namespace = ? AND status = 'active'"
92
+ ).get(namespace) as any).count;
67
93
 
68
94
  return jsonResult({
69
- recent_entries: recentEntries,
70
- active_decisions: activeDecisions,
71
- active_patterns: activePatterns,
72
- recent_summaries: recentSummaries,
73
- cross_project_alerts: crossProjectAlerts,
95
+ recent_memories: recentMemories,
96
+ knowledge,
97
+ procedures,
98
+ entities,
99
+ episode_summaries: episodeSummaries,
100
+ global_memories: globalMemories,
101
+ document_count: docCount,
102
+ total_active_memories: totalMemories,
74
103
  summarization_needed: staleCount >= 20,
75
104
  stale_entry_count: staleCount,
76
105
  });
@@ -0,0 +1,215 @@
1
+ import type Database from "better-sqlite3";
2
+ import { now, jsonResult, errorResult, chunkText } from "../db.ts";
3
+ import { embed, storeVector, deleteVector, hybridSearch } from "../embeddings.ts";
4
+
5
+ export const documentTools = [
6
+ {
7
+ name: "memory.doc.ingest",
8
+ description:
9
+ "Ingest a document into memory. Automatically chunks and embeds for semantic retrieval. Great for architecture docs, specs, READMEs, code files, or any reference material.",
10
+ inputSchema: {
11
+ type: "object" as const,
12
+ properties: {
13
+ title: { type: "string", description: "Document title" },
14
+ content: { type: "string", description: "Full document content" },
15
+ source: { type: "string", description: "File path, URL, or identifier" },
16
+ namespace: { type: "string", description: "Project/scope for this document" },
17
+ mime_type: { type: "string", description: "Content type (default: text/plain)" },
18
+ chunk_size: { type: "number", description: "Max chars per chunk (default: 1000)" },
19
+ chunk_overlap: { type: "number", description: "Overlap chars between chunks (default: 100)" },
20
+ metadata: { type: "object", description: "Custom metadata" },
21
+ },
22
+ required: ["title", "content"],
23
+ },
24
+ },
25
+ {
26
+ name: "memory.doc.search",
27
+ description:
28
+ "Search across ingested documents using hybrid semantic + keyword search. Returns relevant chunks with document context.",
29
+ inputSchema: {
30
+ type: "object" as const,
31
+ properties: {
32
+ query: { type: "string", description: "Search query" },
33
+ namespace: { type: "string", description: "Restrict to namespace" },
34
+ limit: { type: "number", description: "Max chunks to return (default 5)" },
35
+ alpha: { type: "number", description: "Semantic vs keyword weight (default 0.6)" },
36
+ },
37
+ required: ["query"],
38
+ },
39
+ },
40
+ {
41
+ name: "memory.doc.list",
42
+ description: "List ingested documents.",
43
+ inputSchema: {
44
+ type: "object" as const,
45
+ properties: {
46
+ namespace: { type: "string", description: "Filter by namespace" },
47
+ limit: { type: "number", description: "Max results (default 50)" },
48
+ },
49
+ },
50
+ },
51
+ {
52
+ name: "memory.doc.get",
53
+ description: "Get a full document by ID, including all its chunks.",
54
+ inputSchema: {
55
+ type: "object" as const,
56
+ properties: {
57
+ id: { type: "number", description: "Document ID" },
58
+ },
59
+ required: ["id"],
60
+ },
61
+ },
62
+ {
63
+ name: "memory.doc.delete",
64
+ description: "Delete a document and all its chunks.",
65
+ inputSchema: {
66
+ type: "object" as const,
67
+ properties: {
68
+ id: { type: "number", description: "Document ID" },
69
+ },
70
+ required: ["id"],
71
+ },
72
+ },
73
+ ];
74
+
75
+ export function handleDocuments(db: Database.Database, name: string, args: any) {
76
+ if (name === "memory.doc.ingest") return docIngest(db, args);
77
+ if (name === "memory.doc.search") return docSearch(db, args);
78
+ if (name === "memory.doc.list") return docList(db, args);
79
+ if (name === "memory.doc.get") return docGet(db, args);
80
+ if (name === "memory.doc.delete") return docDelete(db, args);
81
+ return null;
82
+ }
83
+
84
+ async function docIngest(db: Database.Database, args: any) {
85
+ const ts = now();
86
+ const chunkSize = args.chunk_size || 1000;
87
+ const chunkOverlap = args.chunk_overlap || 100;
88
+
89
+ // Check for existing doc with same source
90
+ if (args.source) {
91
+ const existing = db.prepare("SELECT id FROM documents WHERE source = ? AND namespace IS ?").get(args.source, args.namespace || null) as any;
92
+ if (existing) {
93
+ // Re-ingest: delete old chunks
94
+ const oldChunks = db.prepare("SELECT id FROM chunks WHERE document_id = ?").all(existing.id) as any[];
95
+ for (const c of oldChunks) deleteVector(db, "vec_chunks", c.id);
96
+ db.prepare("DELETE FROM chunks WHERE document_id = ?").run(existing.id);
97
+ db.prepare("DELETE FROM documents WHERE id = ?").run(existing.id);
98
+ }
99
+ }
100
+
101
+ const docResult = db.prepare(`
102
+ INSERT INTO documents (namespace, title, source, mime_type, content, metadata, created_at, updated_at)
103
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
104
+ `).run(
105
+ args.namespace || null, args.title, args.source || null,
106
+ args.mime_type || "text/plain", args.content,
107
+ JSON.stringify(args.metadata || {}), ts, ts
108
+ );
109
+
110
+ const docId = Number(docResult.lastInsertRowid);
111
+ const textChunks = chunkText(args.content, chunkSize, chunkOverlap);
112
+
113
+ const insertChunk = db.prepare(`
114
+ INSERT INTO chunks (document_id, chunk_index, content, char_offset, char_length, metadata, created_at)
115
+ VALUES (?, ?, ?, ?, ?, '{}', ?)
116
+ `);
117
+
118
+ const chunkIds: number[] = [];
119
+ for (let i = 0; i < textChunks.length; i++) {
120
+ const c = textChunks[i];
121
+ const result = insertChunk.run(docId, i, c.content, c.offset, c.content.length, ts);
122
+ chunkIds.push(Number(result.lastInsertRowid));
123
+ }
124
+
125
+ // Embed all chunks
126
+ let embedded = 0;
127
+ for (let i = 0; i < textChunks.length; i++) {
128
+ try {
129
+ const embedding = await embed(textChunks[i].content);
130
+ storeVector(db, "vec_chunks", chunkIds[i], embedding);
131
+ embedded++;
132
+ } catch (e) {
133
+ console.error(`[ao-memory] Chunk embed failed:`, e);
134
+ }
135
+ }
136
+
137
+ return jsonResult({ document_id: docId, chunks: textChunks.length, embedded });
138
+ }
139
+
140
+ async function docSearch(db: Database.Database, args: any) {
141
+ const limit = args.limit || 5;
142
+ const alpha = args.alpha ?? 0.6;
143
+
144
+ let queryEmbedding: Float32Array;
145
+ try {
146
+ queryEmbedding = await embed(args.query, true);
147
+ } catch {
148
+ // FTS-only fallback
149
+ const rows = db.prepare(`
150
+ SELECT c.*, d.title as doc_title, d.source as doc_source
151
+ FROM chunks_fts f
152
+ JOIN chunks c ON c.id = f.rowid
153
+ JOIN documents d ON d.id = c.document_id
154
+ ${args.namespace ? "WHERE d.namespace = ?" : ""}
155
+ ORDER BY rank LIMIT ?
156
+ `).all(...(args.namespace ? [args.namespace, limit] : [limit]));
157
+ return jsonResult({ chunks: rows, count: rows.length, mode: "keyword_only" });
158
+ }
159
+
160
+ const results = hybridSearch(db, "chunks_fts", "vec_chunks", args.query, queryEmbedding, limit * 2, alpha);
161
+ if (results.length === 0) return jsonResult({ chunks: [], count: 0 });
162
+
163
+ const ids = results.map(r => r.rowid);
164
+ const scoreMap = new Map(results.map(r => [r.rowid, r.score]));
165
+
166
+ const conditions = [`c.id IN (${ids.map(() => "?").join(",")})`];
167
+ const vals: any[] = [...ids];
168
+ if (args.namespace) { conditions.push("d.namespace = ?"); vals.push(args.namespace); }
169
+
170
+ const rows = db.prepare(`
171
+ SELECT c.*, d.title as doc_title, d.source as doc_source, d.namespace as doc_namespace
172
+ FROM chunks c
173
+ JOIN documents d ON d.id = c.document_id
174
+ WHERE ${conditions.join(" AND ")}
175
+ `).all(...vals) as any[];
176
+
177
+ const scored = rows
178
+ .map(r => ({ ...r, _score: scoreMap.get(r.id) || 0 }))
179
+ .sort((a, b) => b._score - a._score)
180
+ .slice(0, limit);
181
+
182
+ return jsonResult({ chunks: scored, count: scored.length });
183
+ }
184
+
185
+ function docList(db: Database.Database, args: any) {
186
+ const conditions: string[] = [];
187
+ const vals: any[] = [];
188
+ if (args.namespace) { conditions.push("namespace = ?"); vals.push(args.namespace); }
189
+
190
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
191
+ const limit = args.limit || 50;
192
+
193
+ const rows = db.prepare(`
194
+ SELECT d.*, (SELECT COUNT(*) FROM chunks c WHERE c.document_id = d.id) as chunk_count
195
+ FROM documents d ${where}
196
+ ORDER BY d.created_at DESC LIMIT ?
197
+ `).all(...vals, limit);
198
+
199
+ return jsonResult({ documents: rows, count: rows.length });
200
+ }
201
+
202
+ function docGet(db: Database.Database, args: any) {
203
+ const doc = db.prepare("SELECT * FROM documents WHERE id = ?").get(args.id) as any;
204
+ if (!doc) return errorResult(`Document ${args.id} not found`);
205
+ const chunks = db.prepare("SELECT * FROM chunks WHERE document_id = ? ORDER BY chunk_index").all(args.id);
206
+ return jsonResult({ ...doc, chunks });
207
+ }
208
+
209
+ function docDelete(db: Database.Database, args: any) {
210
+ const chunks = db.prepare("SELECT id FROM chunks WHERE document_id = ?").all(args.id) as any[];
211
+ for (const c of chunks) deleteVector(db, "vec_chunks", c.id);
212
+ db.prepare("DELETE FROM chunks WHERE document_id = ?").run(args.id);
213
+ db.prepare("DELETE FROM documents WHERE id = ?").run(args.id);
214
+ return jsonResult({ deleted: true, chunks_removed: chunks.length });
215
+ }
@@ -0,0 +1,112 @@
1
+ import type Database from "better-sqlite3";
2
+ import { now, jsonResult, errorResult } from "../db.ts";
3
+
4
+ export const episodeTools = [
5
+ {
6
+ name: "memory.episode.log",
7
+ description:
8
+ "Log a conversation turn or run event to episodic memory. Use for tracking what happened during agent runs.",
9
+ inputSchema: {
10
+ type: "object" as const,
11
+ properties: {
12
+ session_id: { type: "string", description: "Session/run identifier" },
13
+ namespace: { type: "string", description: "Project or scope" },
14
+ agent_role: { type: "string", description: "Agent role" },
15
+ role: { type: "string", enum: ["user", "assistant", "system"], description: "Message role" },
16
+ content: { type: "string", description: "Message or event content" },
17
+ summary: { type: "string", description: "Optional short summary" },
18
+ metadata: { type: "object", description: "Custom metadata (e.g. tool calls, tokens used)" },
19
+ },
20
+ required: ["session_id", "role", "content"],
21
+ },
22
+ },
23
+ {
24
+ name: "memory.episode.list",
25
+ description: "List episodes for a session or namespace.",
26
+ inputSchema: {
27
+ type: "object" as const,
28
+ properties: {
29
+ session_id: { type: "string", description: "Filter by session" },
30
+ namespace: { type: "string", description: "Filter by namespace" },
31
+ agent_role: { type: "string", description: "Filter by agent role" },
32
+ limit: { type: "number", description: "Max results (default 50)" },
33
+ order: { type: "string", enum: ["newest", "oldest"], description: "Default: oldest" },
34
+ },
35
+ },
36
+ },
37
+ {
38
+ name: "memory.episode.summarize",
39
+ description: "Store a summary for a session. The calling agent provides the summary text.",
40
+ inputSchema: {
41
+ type: "object" as const,
42
+ properties: {
43
+ session_id: { type: "string", description: "Session to summarize" },
44
+ namespace: { type: "string", description: "Scope" },
45
+ agent_role: { type: "string", description: "Agent role" },
46
+ summary: { type: "string", description: "Summary text" },
47
+ },
48
+ required: ["session_id", "summary"],
49
+ },
50
+ },
51
+ ];
52
+
53
+ export function handleEpisodes(db: Database.Database, name: string, args: any) {
54
+ if (name === "memory.episode.log") return episodeLog(db, args);
55
+ if (name === "memory.episode.list") return episodeList(db, args);
56
+ if (name === "memory.episode.summarize") return episodeSummarize(db, args);
57
+ return null;
58
+ }
59
+
60
+ function episodeLog(db: Database.Database, args: any) {
61
+ const ts = now();
62
+ const result = db.prepare(`
63
+ INSERT INTO episodes (session_id, namespace, agent_role, role, content, summary, metadata, created_at)
64
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
65
+ `).run(
66
+ args.session_id, args.namespace || null, args.agent_role || null,
67
+ args.role, args.content, args.summary || null,
68
+ JSON.stringify(args.metadata || {}), ts
69
+ );
70
+ return jsonResult({ id: Number(result.lastInsertRowid), created: true });
71
+ }
72
+
73
+ function episodeList(db: Database.Database, args: any) {
74
+ const conditions: string[] = [];
75
+ const vals: any[] = [];
76
+ if (args.session_id) { conditions.push("session_id = ?"); vals.push(args.session_id); }
77
+ if (args.namespace) { conditions.push("namespace = ?"); vals.push(args.namespace); }
78
+ if (args.agent_role) { conditions.push("agent_role = ?"); vals.push(args.agent_role); }
79
+
80
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
81
+ const order = args.order === "newest" ? "DESC" : "ASC";
82
+ const limit = args.limit || 50;
83
+
84
+ const rows = db.prepare(
85
+ `SELECT * FROM episodes ${where} ORDER BY created_at ${order} LIMIT ?`
86
+ ).all(...vals, limit);
87
+
88
+ return jsonResult({ episodes: rows, count: rows.length });
89
+ }
90
+
91
+ function episodeSummarize(db: Database.Database, args: any) {
92
+ // Update all episodes in the session with the summary
93
+ const result = db.prepare(
94
+ `UPDATE episodes SET summary = ? WHERE session_id = ? AND summary IS NULL`
95
+ ).run(args.summary, args.session_id);
96
+
97
+ // Also store as a memory for cross-session recall
98
+ const ts = now();
99
+ db.prepare(`
100
+ INSERT INTO memories (memory_type, scope, namespace, agent_role, title, content, status, confidence, tags, metadata, created_at, occurred_at, updated_at, content_hash)
101
+ VALUES ('episodic', 'session', ?, ?, ?, ?, 'active', 1.0, '["session_summary"]', ?, ?, ?, ?, ?)
102
+ `).run(
103
+ args.namespace || null, args.agent_role || null,
104
+ `Session ${args.session_id} summary`,
105
+ args.summary,
106
+ JSON.stringify({ session_id: args.session_id }),
107
+ ts, ts, ts,
108
+ require("node:crypto").createHash("sha256").update(`episode\0${args.session_id}\0${args.summary}`).digest("hex")
109
+ );
110
+
111
+ return jsonResult({ episodes_updated: result.changes, session_id: args.session_id });
112
+ }