@launchapp-dev/ao-memory-mcp 2.0.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/db.d.ts +25 -0
- package/dist/db.js +90 -0
- package/dist/embeddings.d.ts +14 -0
- package/dist/embeddings.js +72 -0
- package/dist/schema.sql +194 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +56 -0
- package/dist/tools/context.d.ts +29 -0
- package/dist/tools/context.js +88 -0
- package/dist/tools/documents.d.ts +142 -0
- package/dist/tools/documents.js +201 -0
- package/dist/tools/episodes.d.ts +112 -0
- package/dist/tools/episodes.js +98 -0
- package/dist/tools/knowledge.d.ts +177 -0
- package/dist/tools/knowledge.js +235 -0
- package/dist/tools/recall.d.ts +153 -0
- package/dist/tools/recall.js +180 -0
- package/dist/tools/stats.d.ts +24 -0
- package/dist/tools/stats.js +50 -0
- package/dist/tools/store.d.ts +180 -0
- package/dist/tools/store.js +176 -0
- package/dist/tools/summarize.d.ts +74 -0
- package/dist/tools/summarize.js +92 -0
- package/package.json +9 -5
- package/migrate.ts +0 -250
- package/src/db.ts +0 -106
- package/src/embeddings.ts +0 -97
- package/src/server.ts +0 -70
- package/src/tools/context.ts +0 -106
- package/src/tools/documents.ts +0 -215
- package/src/tools/episodes.ts +0 -112
- package/src/tools/knowledge.ts +0 -248
- package/src/tools/recall.ts +0 -167
- package/src/tools/stats.ts +0 -51
- package/src/tools/store.ts +0 -168
- package/src/tools/summarize.ts +0 -114
- package/tsconfig.json +0 -12
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { now, jsonResult, errorResult } from "../db.js";
|
|
2
|
+
export const summarizeTools = [
|
|
3
|
+
{
|
|
4
|
+
name: "memory.summarize",
|
|
5
|
+
description: "Create a summary of memory entries. Agent provides the summary text. Server creates the summary record and transitions entries to 'summarized'.",
|
|
6
|
+
inputSchema: {
|
|
7
|
+
type: "object",
|
|
8
|
+
properties: {
|
|
9
|
+
namespace: { type: "string", description: "Namespace to summarize" },
|
|
10
|
+
agent_role: { type: "string", description: "Agent role" },
|
|
11
|
+
summary_title: { type: "string", description: "Summary title" },
|
|
12
|
+
summary_body: { type: "string", description: "Summary content (markdown)" },
|
|
13
|
+
before: { type: "string", description: "Summarize entries before this ISO date" },
|
|
14
|
+
entry_ids: { type: "array", items: { type: "number" }, description: "Specific IDs to summarize" },
|
|
15
|
+
},
|
|
16
|
+
required: ["namespace", "summary_title", "summary_body"],
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
name: "memory.cleanup",
|
|
21
|
+
description: "Identify stale entries needing summarization or archive old summarized entries.",
|
|
22
|
+
inputSchema: {
|
|
23
|
+
type: "object",
|
|
24
|
+
properties: {
|
|
25
|
+
older_than_days: { type: "number", description: "Entries older than N days (default 7)" },
|
|
26
|
+
min_entries: { type: "number", description: "Min entries per scope to trigger (default 10)" },
|
|
27
|
+
dry_run: { type: "boolean", description: "Preview only (default true)" },
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
];
|
|
32
|
+
export function handleSummarize(db, name, args) {
|
|
33
|
+
if (name === "memory.summarize")
|
|
34
|
+
return memorySummarize(db, args);
|
|
35
|
+
if (name === "memory.cleanup")
|
|
36
|
+
return memoryCleanup(db, args);
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
function memorySummarize(db, args) {
|
|
40
|
+
const { namespace, agent_role, summary_title, summary_body } = args;
|
|
41
|
+
const result = db.transaction(() => {
|
|
42
|
+
let entryIds;
|
|
43
|
+
if (args.entry_ids?.length) {
|
|
44
|
+
entryIds = args.entry_ids;
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
const cutoff = args.before || new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
|
|
48
|
+
const conditions = ["namespace = ?", "status = 'active'", "occurred_at < ?"];
|
|
49
|
+
const vals = [namespace, cutoff];
|
|
50
|
+
if (agent_role) {
|
|
51
|
+
conditions.push("agent_role = ?");
|
|
52
|
+
vals.push(agent_role);
|
|
53
|
+
}
|
|
54
|
+
const rows = db.prepare(`SELECT id FROM memories WHERE ${conditions.join(" AND ")}`).all(...vals);
|
|
55
|
+
entryIds = rows.map(r => r.id);
|
|
56
|
+
}
|
|
57
|
+
if (entryIds.length === 0)
|
|
58
|
+
return { error: "No entries to summarize" };
|
|
59
|
+
const range = db.prepare(`SELECT MIN(occurred_at) as date_from, MAX(occurred_at) as date_to FROM memories WHERE id IN (${entryIds.map(() => "?").join(",")})`).get(...entryIds);
|
|
60
|
+
const ts = now();
|
|
61
|
+
const sumResult = db.prepare(`
|
|
62
|
+
INSERT INTO summaries (scope, namespace, agent_role, title, content, entry_count, date_from, date_to, entry_ids, created_at)
|
|
63
|
+
VALUES ('project', ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
64
|
+
`).run(namespace, agent_role || null, summary_title, summary_body, entryIds.length, range.date_from, range.date_to, JSON.stringify(entryIds), ts);
|
|
65
|
+
db.prepare(`UPDATE memories SET status = 'summarized', updated_at = ? WHERE id IN (${entryIds.map(() => "?").join(",")})`).run(ts, ...entryIds);
|
|
66
|
+
return { summary_id: Number(sumResult.lastInsertRowid), entries_summarized: entryIds.length };
|
|
67
|
+
})();
|
|
68
|
+
if (result.error)
|
|
69
|
+
return errorResult(result.error);
|
|
70
|
+
return jsonResult(result);
|
|
71
|
+
}
|
|
72
|
+
function memoryCleanup(db, args) {
|
|
73
|
+
const olderThanDays = args.older_than_days ?? 7;
|
|
74
|
+
const minEntries = args.min_entries ?? 10;
|
|
75
|
+
const dryRun = args.dry_run ?? true;
|
|
76
|
+
const cutoff = new Date(Date.now() - olderThanDays * 24 * 60 * 60 * 1000).toISOString();
|
|
77
|
+
const needsSummarization = db.prepare(`
|
|
78
|
+
SELECT namespace, agent_role, COUNT(*) as entry_count,
|
|
79
|
+
MIN(occurred_at) as date_from, MAX(occurred_at) as date_to
|
|
80
|
+
FROM memories WHERE status = 'active' AND occurred_at < ?
|
|
81
|
+
GROUP BY namespace, agent_role
|
|
82
|
+
HAVING COUNT(*) >= ?
|
|
83
|
+
`).all(cutoff, minEntries);
|
|
84
|
+
const archivalCutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString();
|
|
85
|
+
const needsArchival = db.prepare("SELECT COUNT(*) as c FROM memories WHERE status = 'summarized' AND updated_at < ?").get(archivalCutoff).c;
|
|
86
|
+
let archived = 0;
|
|
87
|
+
if (!dryRun && needsArchival > 0) {
|
|
88
|
+
const ts = now();
|
|
89
|
+
archived = db.prepare("UPDATE memories SET status = 'archived', updated_at = ? WHERE status = 'summarized' AND updated_at < ?").run(ts, archivalCutoff).changes;
|
|
90
|
+
}
|
|
91
|
+
return jsonResult({ needs_summarization: needsSummarization, needs_archival: needsArchival, archived, dry_run: dryRun });
|
|
92
|
+
}
|
package/package.json
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@launchapp-dev/ao-memory-mcp",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.1",
|
|
4
4
|
"description": "Cognitive memory MCP server for AI agents — semantic search, document RAG, knowledge graph, episodic memory, and hybrid retrieval",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
7
|
-
"ao-memory-mcp": "./
|
|
7
|
+
"ao-memory-mcp": "./dist/server.js"
|
|
8
8
|
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist",
|
|
11
|
+
"src/schema.sql"
|
|
12
|
+
],
|
|
9
13
|
"scripts": {
|
|
10
|
-
"start": "node
|
|
11
|
-
"
|
|
12
|
-
"
|
|
14
|
+
"start": "node dist/server.js",
|
|
15
|
+
"build": "tsc && cp src/schema.sql dist/schema.sql",
|
|
16
|
+
"prepublishOnly": "npm run build"
|
|
13
17
|
},
|
|
14
18
|
"dependencies": {
|
|
15
19
|
"@huggingface/transformers": "^3.0.0",
|
package/migrate.ts
DELETED
|
@@ -1,250 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --experimental-strip-types
|
|
2
|
-
/**
|
|
3
|
-
* Migration utility: imports existing .ao/memory/*.md files into the memory database.
|
|
4
|
-
*
|
|
5
|
-
* Usage:
|
|
6
|
-
* node --experimental-strip-types migrate.ts [--repos-dir <path>] [--db <path>]
|
|
7
|
-
*
|
|
8
|
-
* Defaults:
|
|
9
|
-
* --repos-dir: scans current directory and subdirectories for .ao/memory/
|
|
10
|
-
* --db: ~/.ao/memory.db
|
|
11
|
-
*/
|
|
12
|
-
import { readdirSync, readFileSync, existsSync, statSync } from "node:fs";
|
|
13
|
-
import { join, basename, dirname } from "node:path";
|
|
14
|
-
import { resolveDbPath, initDb, contentHash, now } from "./src/db.ts";
|
|
15
|
-
|
|
16
|
-
// Parse args
|
|
17
|
-
const argv = process.argv.slice(2);
|
|
18
|
-
let reposDir = ".";
|
|
19
|
-
let dbPath: string | undefined;
|
|
20
|
-
for (let i = 0; i < argv.length; i++) {
|
|
21
|
-
if (argv[i] === "--repos-dir" && argv[i + 1]) reposDir = argv[++i];
|
|
22
|
-
if (argv[i] === "--db" && argv[i + 1]) dbPath = argv[++i];
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
const db = initDb(resolveDbPath(dbPath));
|
|
26
|
-
|
|
27
|
-
const roleToFile: Record<string, string> = {
|
|
28
|
-
"planner.md": "planner",
|
|
29
|
-
"product-owner.md": "product-owner",
|
|
30
|
-
"reconciler.md": "reconciler",
|
|
31
|
-
"reviewer.md": "reviewer",
|
|
32
|
-
"qa-tester.md": "qa-tester",
|
|
33
|
-
};
|
|
34
|
-
|
|
35
|
-
const sectionToEntryType: Record<string, string> = {
|
|
36
|
-
"tasks enqueued": "task_dispatch",
|
|
37
|
-
"recently enqueued": "task_dispatch",
|
|
38
|
-
"rework dispatched": "task_dispatch",
|
|
39
|
-
"rebase dispatched": "task_dispatch",
|
|
40
|
-
"tasks skipped": "observation",
|
|
41
|
-
"capacity notes": "observation",
|
|
42
|
-
"queue status": "observation",
|
|
43
|
-
"pipeline health": "observation",
|
|
44
|
-
"decisions": "decision",
|
|
45
|
-
"tasks created": "decision",
|
|
46
|
-
"features assessed": "observation",
|
|
47
|
-
"gaps identified": "observation",
|
|
48
|
-
"tasks unblocked": "action",
|
|
49
|
-
"tasks marked done": "action",
|
|
50
|
-
"queue cleaned": "action",
|
|
51
|
-
"actions log": "action",
|
|
52
|
-
"prs merged": "review",
|
|
53
|
-
"prs with changes requested": "review",
|
|
54
|
-
"prs closed": "review",
|
|
55
|
-
"known patterns": "pattern",
|
|
56
|
-
"log": "test_result",
|
|
57
|
-
"test results": "test_result",
|
|
58
|
-
"bugs filed": "test_result",
|
|
59
|
-
"regressions": "test_result",
|
|
60
|
-
};
|
|
61
|
-
|
|
62
|
-
function guessEntryType(sectionHeader: string, agentRole: string): string {
|
|
63
|
-
const lower = sectionHeader.toLowerCase();
|
|
64
|
-
for (const [key, type] of Object.entries(sectionToEntryType)) {
|
|
65
|
-
if (lower.includes(key)) return type;
|
|
66
|
-
}
|
|
67
|
-
// Fallback by role
|
|
68
|
-
if (agentRole === "planner") return "task_dispatch";
|
|
69
|
-
if (agentRole === "product-owner") return "decision";
|
|
70
|
-
if (agentRole === "reconciler") return "action";
|
|
71
|
-
if (agentRole === "reviewer") return "review";
|
|
72
|
-
if (agentRole === "qa-tester") return "test_result";
|
|
73
|
-
return "observation";
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
interface ParsedEntry {
|
|
77
|
-
date: string;
|
|
78
|
-
title: string;
|
|
79
|
-
body: string;
|
|
80
|
-
entryType: string;
|
|
81
|
-
taskId?: string;
|
|
82
|
-
prNumber?: number;
|
|
83
|
-
runId?: string;
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
function parseMemoryFile(content: string, agentRole: string): ParsedEntry[] {
|
|
87
|
-
const entries: ParsedEntry[] = [];
|
|
88
|
-
const lines = content.split("\n");
|
|
89
|
-
let currentSection = "";
|
|
90
|
-
let currentDate = "";
|
|
91
|
-
let currentBlock: string[] = [];
|
|
92
|
-
|
|
93
|
-
function flushBlock() {
|
|
94
|
-
if (currentBlock.length === 0 || !currentDate) return;
|
|
95
|
-
const body = currentBlock.join("\n").trim();
|
|
96
|
-
if (!body) return;
|
|
97
|
-
|
|
98
|
-
const entryType = guessEntryType(currentSection, agentRole);
|
|
99
|
-
const firstLine = currentBlock.find(l => l.trim())?.trim() || "";
|
|
100
|
-
const title = firstLine.length > 120 ? firstLine.slice(0, 117) + "..." : firstLine;
|
|
101
|
-
|
|
102
|
-
// Extract task IDs
|
|
103
|
-
const taskMatch = body.match(/TASK-\d+/);
|
|
104
|
-
const prMatch = body.match(/(?:PR\s*#|#)(\d+)/);
|
|
105
|
-
const runMatch = body.match(/run\s+(\d+)/i);
|
|
106
|
-
|
|
107
|
-
entries.push({
|
|
108
|
-
date: currentDate,
|
|
109
|
-
title: title || `${agentRole} ${entryType} ${currentDate}`,
|
|
110
|
-
body,
|
|
111
|
-
entryType,
|
|
112
|
-
taskId: taskMatch?.[0],
|
|
113
|
-
prNumber: prMatch ? parseInt(prMatch[1]) : undefined,
|
|
114
|
-
runId: runMatch ? `run ${runMatch[1]}` : undefined,
|
|
115
|
-
});
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
for (const line of lines) {
|
|
119
|
-
// Section headers
|
|
120
|
-
const sectionMatch = line.match(/^##\s+(.+)/);
|
|
121
|
-
if (sectionMatch) {
|
|
122
|
-
flushBlock();
|
|
123
|
-
currentBlock = [];
|
|
124
|
-
currentSection = sectionMatch[1];
|
|
125
|
-
|
|
126
|
-
// Check if section header contains a date
|
|
127
|
-
const dateInHeader = currentSection.match(/(\d{4}-\d{2}-\d{2})/);
|
|
128
|
-
if (dateInHeader) currentDate = dateInHeader[1];
|
|
129
|
-
continue;
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
// Date patterns
|
|
133
|
-
const dateMatch = line.match(/\[(\d{4}-\d{2}-\d{2})\]/);
|
|
134
|
-
if (dateMatch) {
|
|
135
|
-
flushBlock();
|
|
136
|
-
currentBlock = [];
|
|
137
|
-
currentDate = dateMatch[1];
|
|
138
|
-
currentBlock.push(line);
|
|
139
|
-
continue;
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
// Separator — flush
|
|
143
|
-
if (line.match(/^---\s*$/)) {
|
|
144
|
-
flushBlock();
|
|
145
|
-
currentBlock = [];
|
|
146
|
-
continue;
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
currentBlock.push(line);
|
|
150
|
-
}
|
|
151
|
-
flushBlock();
|
|
152
|
-
|
|
153
|
-
return entries;
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
function findMemoryDirs(rootDir: string): { project: string; memoryDir: string }[] {
|
|
157
|
-
const results: { project: string; memoryDir: string }[] = [];
|
|
158
|
-
|
|
159
|
-
// Check if rootDir itself has .ao/memory
|
|
160
|
-
const directMemory = join(rootDir, ".ao", "memory");
|
|
161
|
-
if (existsSync(directMemory) && statSync(directMemory).isDirectory()) {
|
|
162
|
-
results.push({ project: basename(rootDir), memoryDir: directMemory });
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
// Scan subdirectories
|
|
166
|
-
try {
|
|
167
|
-
for (const entry of readdirSync(rootDir, { withFileTypes: true })) {
|
|
168
|
-
if (!entry.isDirectory() || entry.name.startsWith(".")) continue;
|
|
169
|
-
const memDir = join(rootDir, entry.name, ".ao", "memory");
|
|
170
|
-
if (existsSync(memDir) && statSync(memDir).isDirectory()) {
|
|
171
|
-
results.push({ project: entry.name, memoryDir: memDir });
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
} catch {}
|
|
175
|
-
|
|
176
|
-
return results;
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
// Main
|
|
180
|
-
const ts = now();
|
|
181
|
-
const memoryDirs = findMemoryDirs(reposDir);
|
|
182
|
-
const summary: Record<string, Record<string, number>> = {};
|
|
183
|
-
let totalImported = 0;
|
|
184
|
-
let totalSkipped = 0;
|
|
185
|
-
|
|
186
|
-
const insert = db.prepare(`
|
|
187
|
-
INSERT INTO memory_entries (entry_type, agent_role, project, title, body, task_id, pr_number, run_id, status, tags, metadata, created_at, occurred_at, updated_at, content_hash)
|
|
188
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active', '[]', '{}', ?, ?, ?, ?)
|
|
189
|
-
`);
|
|
190
|
-
|
|
191
|
-
const checkHash = db.prepare("SELECT id FROM memory_entries WHERE content_hash = ?");
|
|
192
|
-
|
|
193
|
-
const importAll = db.transaction(() => {
|
|
194
|
-
for (const { project, memoryDir } of memoryDirs) {
|
|
195
|
-
summary[project] = {};
|
|
196
|
-
try {
|
|
197
|
-
for (const file of readdirSync(memoryDir)) {
|
|
198
|
-
const agentRole = roleToFile[file];
|
|
199
|
-
if (!agentRole) continue;
|
|
200
|
-
|
|
201
|
-
const content = readFileSync(join(memoryDir, file), "utf-8");
|
|
202
|
-
const entries = parseMemoryFile(content, agentRole);
|
|
203
|
-
|
|
204
|
-
let count = 0;
|
|
205
|
-
for (const entry of entries) {
|
|
206
|
-
const hash = contentHash(entry.entryType, agentRole, project, entry.title, entry.body);
|
|
207
|
-
if (checkHash.get(hash)) {
|
|
208
|
-
totalSkipped++;
|
|
209
|
-
continue;
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
insert.run(
|
|
213
|
-
entry.entryType, agentRole, project,
|
|
214
|
-
entry.title, entry.body,
|
|
215
|
-
entry.taskId || null, entry.prNumber || null, entry.runId || null,
|
|
216
|
-
ts, entry.date + "T00:00:00.000Z", ts, hash
|
|
217
|
-
);
|
|
218
|
-
count++;
|
|
219
|
-
totalImported++;
|
|
220
|
-
}
|
|
221
|
-
summary[project][agentRole] = count;
|
|
222
|
-
}
|
|
223
|
-
} catch (err) {
|
|
224
|
-
console.error(`Error processing ${project}: ${err}`);
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
});
|
|
228
|
-
|
|
229
|
-
importAll();
|
|
230
|
-
|
|
231
|
-
// Print results
|
|
232
|
-
console.log("\n=== Migration Complete ===\n");
|
|
233
|
-
console.log(`Scanned: ${memoryDirs.length} projects with .ao/memory/`);
|
|
234
|
-
console.log(`Imported: ${totalImported} entries`);
|
|
235
|
-
console.log(`Skipped (duplicates): ${totalSkipped}\n`);
|
|
236
|
-
|
|
237
|
-
const roles = [...new Set(Object.values(summary).flatMap(s => Object.keys(s)))].sort();
|
|
238
|
-
const header = ["Project", ...roles, "Total"].map(h => h.padEnd(16)).join(" | ");
|
|
239
|
-
console.log(header);
|
|
240
|
-
console.log("-".repeat(header.length));
|
|
241
|
-
|
|
242
|
-
for (const [project, counts] of Object.entries(summary).sort()) {
|
|
243
|
-
const total = Object.values(counts).reduce((a, b) => a + b, 0);
|
|
244
|
-
const row = [project, ...roles.map(r => String(counts[r] || 0)), String(total)]
|
|
245
|
-
.map(v => v.padEnd(16))
|
|
246
|
-
.join(" | ");
|
|
247
|
-
console.log(row);
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
console.log(`\nDatabase: ${resolveDbPath(dbPath)}`);
|
package/src/db.ts
DELETED
|
@@ -1,106 +0,0 @@
|
|
|
1
|
-
import Database from "better-sqlite3";
|
|
2
|
-
import { createHash } from "node:crypto";
|
|
3
|
-
import { readFileSync } from "node:fs";
|
|
4
|
-
import { join, dirname } from "node:path";
|
|
5
|
-
import { mkdirSync } from "node:fs";
|
|
6
|
-
import { fileURLToPath } from "node:url";
|
|
7
|
-
|
|
8
|
-
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
9
|
-
|
|
10
|
-
let vecLoaded = false;
|
|
11
|
-
|
|
12
|
-
export function resolveDbPath(cliDbPath?: string): string {
|
|
13
|
-
if (cliDbPath) return cliDbPath;
|
|
14
|
-
if (process.env.AO_MEMORY_DB) return process.env.AO_MEMORY_DB;
|
|
15
|
-
const home = process.env.HOME || process.env.USERPROFILE || ".";
|
|
16
|
-
const aoDir = join(home, ".ao");
|
|
17
|
-
mkdirSync(aoDir, { recursive: true });
|
|
18
|
-
return join(aoDir, "memory.db");
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
export function initDb(dbPath: string): Database.Database {
|
|
22
|
-
const db = new Database(dbPath);
|
|
23
|
-
const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
|
|
24
|
-
db.exec(schema);
|
|
25
|
-
return db;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
export function initVec(db: Database.Database, dimensions: number) {
|
|
29
|
-
if (vecLoaded) return;
|
|
30
|
-
try {
|
|
31
|
-
const sqliteVec = require("sqlite-vec");
|
|
32
|
-
sqliteVec.load(db);
|
|
33
|
-
vecLoaded = true;
|
|
34
|
-
} catch {
|
|
35
|
-
try {
|
|
36
|
-
// Fallback: the package might export differently
|
|
37
|
-
const mod = require("sqlite-vec");
|
|
38
|
-
if (mod.default?.load) mod.default.load(db);
|
|
39
|
-
else if (mod.load) mod.load(db);
|
|
40
|
-
vecLoaded = true;
|
|
41
|
-
} catch (e) {
|
|
42
|
-
console.error("[ao-memory] sqlite-vec not available, vector search disabled");
|
|
43
|
-
return;
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
db.exec(`
|
|
48
|
-
CREATE VIRTUAL TABLE IF NOT EXISTS vec_memories USING vec0(
|
|
49
|
-
embedding float[${dimensions}] distance_metric=cosine
|
|
50
|
-
);
|
|
51
|
-
CREATE VIRTUAL TABLE IF NOT EXISTS vec_chunks USING vec0(
|
|
52
|
-
embedding float[${dimensions}] distance_metric=cosine
|
|
53
|
-
);
|
|
54
|
-
`);
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
export function isVecAvailable(): boolean {
|
|
58
|
-
return vecLoaded;
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
export function contentHash(...parts: string[]): string {
|
|
62
|
-
return createHash("sha256").update(parts.join("\0")).digest("hex");
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
export function now(): string {
|
|
66
|
-
return new Date().toISOString();
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
export function jsonResult(data: unknown) {
|
|
70
|
-
return { content: [{ type: "text" as const, text: JSON.stringify(data, null, 2) }] };
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
export function errorResult(message: string) {
|
|
74
|
-
return { content: [{ type: "text" as const, text: JSON.stringify({ error: message }) }], isError: true };
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
export function touchAccess(db: Database.Database, id: number) {
|
|
78
|
-
db.prepare("UPDATE memories SET last_accessed_at = ?, access_count = access_count + 1 WHERE id = ?").run(now(), id);
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
export function chunkText(text: string, maxChars: number = 1000, overlap: number = 100): { content: string; offset: number }[] {
|
|
82
|
-
if (text.length <= maxChars) {
|
|
83
|
-
return [{ content: text, offset: 0 }];
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
const chunks: { content: string; offset: number }[] = [];
|
|
87
|
-
let offset = 0;
|
|
88
|
-
while (offset < text.length) {
|
|
89
|
-
let end = Math.min(offset + maxChars, text.length);
|
|
90
|
-
|
|
91
|
-
if (end < text.length) {
|
|
92
|
-
const paraBreak = text.lastIndexOf("\n\n", end);
|
|
93
|
-
if (paraBreak > offset + maxChars * 0.3) end = paraBreak + 2;
|
|
94
|
-
else {
|
|
95
|
-
const sentBreak = text.lastIndexOf(". ", end);
|
|
96
|
-
if (sentBreak > offset + maxChars * 0.3) end = sentBreak + 2;
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
chunks.push({ content: text.slice(offset, end).trim(), offset });
|
|
101
|
-
offset = end - overlap;
|
|
102
|
-
if (offset >= text.length) break;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
return chunks.filter(c => c.content.length > 0);
|
|
106
|
-
}
|
package/src/embeddings.ts
DELETED
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import type Database from "better-sqlite3";
|
|
2
|
-
import { isVecAvailable } from "./db.ts";
|
|
3
|
-
|
|
4
|
-
let extractor: any = null;
|
|
5
|
-
|
|
6
|
-
const DEFAULT_MODEL = "nomic-ai/nomic-embed-text-v1.5";
|
|
7
|
-
const NOMIC_DIMS = 768;
|
|
8
|
-
const MINILM_DIMS = 384;
|
|
9
|
-
|
|
10
|
-
export function getModelId(): string {
|
|
11
|
-
return process.env.AO_MEMORY_MODEL || DEFAULT_MODEL;
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
function isNomicModel(): boolean {
|
|
15
|
-
return getModelId().includes("nomic");
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
export function getDimensions(): number {
|
|
19
|
-
return isNomicModel() ? NOMIC_DIMS : MINILM_DIMS;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
async function getExtractor() {
|
|
23
|
-
if (extractor) return extractor;
|
|
24
|
-
const { pipeline } = await import("@huggingface/transformers");
|
|
25
|
-
const model = getModelId();
|
|
26
|
-
console.error(`[ao-memory] Loading embedding model: ${model}`);
|
|
27
|
-
extractor = await pipeline("feature-extraction", model, { dtype: "q8" });
|
|
28
|
-
console.error(`[ao-memory] Model ready (${getDimensions()}d)`);
|
|
29
|
-
return extractor;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
export async function embed(text: string, isQuery: boolean = false): Promise<Float32Array> {
|
|
33
|
-
const ext = await getExtractor();
|
|
34
|
-
const input = isNomicModel()
|
|
35
|
-
? (isQuery ? `search_query: ${text}` : `search_document: ${text}`)
|
|
36
|
-
: text;
|
|
37
|
-
const output = await ext(input, { pooling: "mean", normalize: true });
|
|
38
|
-
return new Float32Array(output.data);
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
export function storeVector(db: Database.Database, table: string, rowid: number, embedding: Float32Array) {
|
|
42
|
-
if (!isVecAvailable()) return;
|
|
43
|
-
db.prepare(`INSERT OR REPLACE INTO ${table}(rowid, embedding) VALUES (?, ?)`).run(
|
|
44
|
-
BigInt(rowid), Buffer.from(embedding.buffer)
|
|
45
|
-
);
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
export function deleteVector(db: Database.Database, table: string, rowid: number) {
|
|
49
|
-
if (!isVecAvailable()) return;
|
|
50
|
-
db.prepare(`DELETE FROM ${table} WHERE rowid = ?`).run(BigInt(rowid));
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
export function searchVectors(db: Database.Database, table: string, queryEmbedding: Float32Array, limit: number = 20): { rowid: number; distance: number }[] {
|
|
54
|
-
if (!isVecAvailable()) return [];
|
|
55
|
-
return db.prepare(
|
|
56
|
-
`SELECT rowid, distance FROM ${table} WHERE embedding MATCH ? ORDER BY distance LIMIT ?`
|
|
57
|
-
).all(Buffer.from(queryEmbedding.buffer), limit) as any[];
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
export function hybridSearch(
|
|
61
|
-
db: Database.Database,
|
|
62
|
-
ftsTable: string,
|
|
63
|
-
vecTable: string,
|
|
64
|
-
queryText: string,
|
|
65
|
-
queryEmbedding: Float32Array,
|
|
66
|
-
limit: number = 10,
|
|
67
|
-
alpha: number = 0.5
|
|
68
|
-
): { rowid: number; score: number }[] {
|
|
69
|
-
const RRF_K = 60;
|
|
70
|
-
const scores = new Map<number, number>();
|
|
71
|
-
|
|
72
|
-
// FTS5 keyword results
|
|
73
|
-
try {
|
|
74
|
-
const ftsResults = db.prepare(
|
|
75
|
-
`SELECT rowid FROM ${ftsTable} WHERE ${ftsTable} MATCH ? LIMIT 30`
|
|
76
|
-
).all(queryText) as any[];
|
|
77
|
-
|
|
78
|
-
ftsResults.forEach((r, i) => {
|
|
79
|
-
const id = Number(r.rowid);
|
|
80
|
-
scores.set(id, (scores.get(id) || 0) + (1 - alpha) * (1 / (RRF_K + i + 1)));
|
|
81
|
-
});
|
|
82
|
-
} catch {}
|
|
83
|
-
|
|
84
|
-
// Vector similarity results
|
|
85
|
-
if (isVecAvailable()) {
|
|
86
|
-
const vecResults = searchVectors(db, vecTable, queryEmbedding, 30);
|
|
87
|
-
vecResults.forEach((r, i) => {
|
|
88
|
-
const id = Number(r.rowid);
|
|
89
|
-
scores.set(id, (scores.get(id) || 0) + alpha * (1 / (RRF_K + i + 1)));
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
return [...scores.entries()]
|
|
94
|
-
.sort((a, b) => b[1] - a[1])
|
|
95
|
-
.slice(0, limit)
|
|
96
|
-
.map(([rowid, score]) => ({ rowid, score }));
|
|
97
|
-
}
|
package/src/server.ts
DELETED
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --experimental-strip-types
|
|
2
|
-
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
3
|
-
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
4
|
-
import {
|
|
5
|
-
ListToolsRequestSchema,
|
|
6
|
-
CallToolRequestSchema,
|
|
7
|
-
} from "@modelcontextprotocol/sdk/types.js";
|
|
8
|
-
import { resolveDbPath, initDb, initVec, errorResult } from "./db.ts";
|
|
9
|
-
import { getDimensions } from "./embeddings.ts";
|
|
10
|
-
import { storeTools, handleStore } from "./tools/store.ts";
|
|
11
|
-
import { recallTools, handleRecall } from "./tools/recall.ts";
|
|
12
|
-
import { statsTools, handleStats } from "./tools/stats.ts";
|
|
13
|
-
import { contextTools, handleContext } from "./tools/context.ts";
|
|
14
|
-
import { summarizeTools, handleSummarize } from "./tools/summarize.ts";
|
|
15
|
-
import { documentTools, handleDocuments } from "./tools/documents.ts";
|
|
16
|
-
import { knowledgeTools, handleKnowledge } from "./tools/knowledge.ts";
|
|
17
|
-
import { episodeTools, handleEpisodes } from "./tools/episodes.ts";
|
|
18
|
-
|
|
19
|
-
// Parse CLI args
|
|
20
|
-
const args = process.argv.slice(2);
|
|
21
|
-
let dbPath: string | undefined;
|
|
22
|
-
for (let i = 0; i < args.length; i++) {
|
|
23
|
-
if (args[i] === "--db" && args[i + 1]) dbPath = args[++i];
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
const db = initDb(resolveDbPath(dbPath));
|
|
27
|
-
initVec(db, getDimensions());
|
|
28
|
-
|
|
29
|
-
const allTools = [
|
|
30
|
-
...storeTools,
|
|
31
|
-
...recallTools,
|
|
32
|
-
...documentTools,
|
|
33
|
-
...knowledgeTools,
|
|
34
|
-
...episodeTools,
|
|
35
|
-
...contextTools,
|
|
36
|
-
...summarizeTools,
|
|
37
|
-
...statsTools,
|
|
38
|
-
];
|
|
39
|
-
|
|
40
|
-
const handlers: Array<(db: any, name: string, args: any) => any> = [
|
|
41
|
-
handleStore, handleRecall, handleDocuments, handleKnowledge,
|
|
42
|
-
handleEpisodes, handleContext, handleSummarize, handleStats,
|
|
43
|
-
];
|
|
44
|
-
|
|
45
|
-
const server = new Server(
|
|
46
|
-
{ name: "ao-memory-mcp", version: "2.0.0" },
|
|
47
|
-
{ capabilities: { tools: {} } }
|
|
48
|
-
);
|
|
49
|
-
|
|
50
|
-
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
51
|
-
tools: allTools,
|
|
52
|
-
}));
|
|
53
|
-
|
|
54
|
-
server.setRequestHandler(CallToolRequestSchema, async (req) => {
|
|
55
|
-
const { name, arguments: input } = req.params;
|
|
56
|
-
|
|
57
|
-
for (const handler of handlers) {
|
|
58
|
-
const result = handler(db, name, input || {});
|
|
59
|
-
if (result !== null) {
|
|
60
|
-
// Handle async results (embed operations)
|
|
61
|
-
if (result instanceof Promise) return await result;
|
|
62
|
-
return result;
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
return errorResult(`Unknown tool: ${name}`);
|
|
67
|
-
});
|
|
68
|
-
|
|
69
|
-
const transport = new StdioServerTransport();
|
|
70
|
-
await server.connect(transport);
|