@launchapp-dev/ao-memory-mcp 2.0.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/db.d.ts +25 -0
- package/dist/db.js +90 -0
- package/dist/embeddings.d.ts +14 -0
- package/dist/embeddings.js +72 -0
- package/dist/schema.sql +194 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +56 -0
- package/dist/tools/context.d.ts +29 -0
- package/dist/tools/context.js +88 -0
- package/dist/tools/documents.d.ts +142 -0
- package/dist/tools/documents.js +201 -0
- package/dist/tools/episodes.d.ts +112 -0
- package/dist/tools/episodes.js +98 -0
- package/dist/tools/knowledge.d.ts +177 -0
- package/dist/tools/knowledge.js +235 -0
- package/dist/tools/recall.d.ts +153 -0
- package/dist/tools/recall.js +180 -0
- package/dist/tools/stats.d.ts +24 -0
- package/dist/tools/stats.js +50 -0
- package/dist/tools/store.d.ts +180 -0
- package/dist/tools/store.js +176 -0
- package/dist/tools/summarize.d.ts +74 -0
- package/dist/tools/summarize.js +92 -0
- package/package.json +9 -5
- package/migrate.ts +0 -250
- package/src/db.ts +0 -106
- package/src/embeddings.ts +0 -97
- package/src/server.ts +0 -70
- package/src/tools/context.ts +0 -106
- package/src/tools/documents.ts +0 -215
- package/src/tools/episodes.ts +0 -112
- package/src/tools/knowledge.ts +0 -248
- package/src/tools/recall.ts +0 -167
- package/src/tools/stats.ts +0 -51
- package/src/tools/store.ts +0 -168
- package/src/tools/summarize.ts +0 -114
- package/tsconfig.json +0 -12
package/dist/db.d.ts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
export declare function resolveDbPath(cliDbPath?: string): string;
|
|
3
|
+
export declare function initDb(dbPath: string): Database.Database;
|
|
4
|
+
export declare function initVec(db: Database.Database, dimensions: number): Promise<void>;
|
|
5
|
+
export declare function isVecAvailable(): boolean;
|
|
6
|
+
export declare function contentHash(...parts: string[]): string;
|
|
7
|
+
export declare function now(): string;
|
|
8
|
+
export declare function jsonResult(data: unknown): {
|
|
9
|
+
content: {
|
|
10
|
+
type: "text";
|
|
11
|
+
text: string;
|
|
12
|
+
}[];
|
|
13
|
+
};
|
|
14
|
+
export declare function errorResult(message: string): {
|
|
15
|
+
content: {
|
|
16
|
+
type: "text";
|
|
17
|
+
text: string;
|
|
18
|
+
}[];
|
|
19
|
+
isError: boolean;
|
|
20
|
+
};
|
|
21
|
+
export declare function touchAccess(db: Database.Database, id: number): void;
|
|
22
|
+
export declare function chunkText(text: string, maxChars?: number, overlap?: number): {
|
|
23
|
+
content: string;
|
|
24
|
+
offset: number;
|
|
25
|
+
}[];
|
package/dist/db.js
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
import { createHash } from "node:crypto";
|
|
3
|
+
import { readFileSync } from "node:fs";
|
|
4
|
+
import { join, dirname } from "node:path";
|
|
5
|
+
import { mkdirSync } from "node:fs";
|
|
6
|
+
import { fileURLToPath } from "node:url";
|
|
7
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
8
|
+
let vecLoaded = false;
|
|
9
|
+
export function resolveDbPath(cliDbPath) {
|
|
10
|
+
if (cliDbPath)
|
|
11
|
+
return cliDbPath;
|
|
12
|
+
if (process.env.AO_MEMORY_DB)
|
|
13
|
+
return process.env.AO_MEMORY_DB;
|
|
14
|
+
const home = process.env.HOME || process.env.USERPROFILE || ".";
|
|
15
|
+
const aoDir = join(home, ".ao");
|
|
16
|
+
mkdirSync(aoDir, { recursive: true });
|
|
17
|
+
return join(aoDir, "memory.db");
|
|
18
|
+
}
|
|
19
|
+
export function initDb(dbPath) {
|
|
20
|
+
const db = new Database(dbPath);
|
|
21
|
+
const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
|
|
22
|
+
db.exec(schema);
|
|
23
|
+
return db;
|
|
24
|
+
}
|
|
25
|
+
export async function initVec(db, dimensions) {
|
|
26
|
+
if (vecLoaded)
|
|
27
|
+
return;
|
|
28
|
+
try {
|
|
29
|
+
const sqliteVec = await import("sqlite-vec");
|
|
30
|
+
const load = sqliteVec.load || sqliteVec.default?.load;
|
|
31
|
+
if (load)
|
|
32
|
+
load(db);
|
|
33
|
+
vecLoaded = true;
|
|
34
|
+
}
|
|
35
|
+
catch (e) {
|
|
36
|
+
console.error("[ao-memory] sqlite-vec not available, vector search disabled");
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
db.exec(`
|
|
40
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS vec_memories USING vec0(
|
|
41
|
+
embedding float[${dimensions}] distance_metric=cosine
|
|
42
|
+
);
|
|
43
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS vec_chunks USING vec0(
|
|
44
|
+
embedding float[${dimensions}] distance_metric=cosine
|
|
45
|
+
);
|
|
46
|
+
`);
|
|
47
|
+
}
|
|
48
|
+
export function isVecAvailable() {
|
|
49
|
+
return vecLoaded;
|
|
50
|
+
}
|
|
51
|
+
export function contentHash(...parts) {
|
|
52
|
+
return createHash("sha256").update(parts.join("\0")).digest("hex");
|
|
53
|
+
}
|
|
54
|
+
export function now() {
|
|
55
|
+
return new Date().toISOString();
|
|
56
|
+
}
|
|
57
|
+
export function jsonResult(data) {
|
|
58
|
+
return { content: [{ type: "text", text: JSON.stringify(data, null, 2) }] };
|
|
59
|
+
}
|
|
60
|
+
export function errorResult(message) {
|
|
61
|
+
return { content: [{ type: "text", text: JSON.stringify({ error: message }) }], isError: true };
|
|
62
|
+
}
|
|
63
|
+
export function touchAccess(db, id) {
|
|
64
|
+
db.prepare("UPDATE memories SET last_accessed_at = ?, access_count = access_count + 1 WHERE id = ?").run(now(), id);
|
|
65
|
+
}
|
|
66
|
+
export function chunkText(text, maxChars = 1000, overlap = 100) {
|
|
67
|
+
if (text.length <= maxChars) {
|
|
68
|
+
return [{ content: text, offset: 0 }];
|
|
69
|
+
}
|
|
70
|
+
const chunks = [];
|
|
71
|
+
let offset = 0;
|
|
72
|
+
while (offset < text.length) {
|
|
73
|
+
let end = Math.min(offset + maxChars, text.length);
|
|
74
|
+
if (end < text.length) {
|
|
75
|
+
const paraBreak = text.lastIndexOf("\n\n", end);
|
|
76
|
+
if (paraBreak > offset + maxChars * 0.3)
|
|
77
|
+
end = paraBreak + 2;
|
|
78
|
+
else {
|
|
79
|
+
const sentBreak = text.lastIndexOf(". ", end);
|
|
80
|
+
if (sentBreak > offset + maxChars * 0.3)
|
|
81
|
+
end = sentBreak + 2;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
chunks.push({ content: text.slice(offset, end).trim(), offset });
|
|
85
|
+
offset = end - overlap;
|
|
86
|
+
if (offset >= text.length)
|
|
87
|
+
break;
|
|
88
|
+
}
|
|
89
|
+
return chunks.filter(c => c.content.length > 0);
|
|
90
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type Database from "better-sqlite3";
|
|
2
|
+
export declare function getModelId(): string;
|
|
3
|
+
export declare function getDimensions(): number;
|
|
4
|
+
export declare function embed(text: string, isQuery?: boolean): Promise<Float32Array>;
|
|
5
|
+
export declare function storeVector(db: Database.Database, table: string, rowid: number, embedding: Float32Array): void;
|
|
6
|
+
export declare function deleteVector(db: Database.Database, table: string, rowid: number): void;
|
|
7
|
+
export declare function searchVectors(db: Database.Database, table: string, queryEmbedding: Float32Array, limit?: number): {
|
|
8
|
+
rowid: number;
|
|
9
|
+
distance: number;
|
|
10
|
+
}[];
|
|
11
|
+
export declare function hybridSearch(db: Database.Database, ftsTable: string, vecTable: string, queryText: string, queryEmbedding: Float32Array, limit?: number, alpha?: number): {
|
|
12
|
+
rowid: number;
|
|
13
|
+
score: number;
|
|
14
|
+
}[];
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { isVecAvailable } from "./db.js";
|
|
2
|
+
let extractor = null;
|
|
3
|
+
const DEFAULT_MODEL = "nomic-ai/nomic-embed-text-v1.5";
|
|
4
|
+
const NOMIC_DIMS = 768;
|
|
5
|
+
const MINILM_DIMS = 384;
|
|
6
|
+
export function getModelId() {
|
|
7
|
+
return process.env.AO_MEMORY_MODEL || DEFAULT_MODEL;
|
|
8
|
+
}
|
|
9
|
+
function isNomicModel() {
|
|
10
|
+
return getModelId().includes("nomic");
|
|
11
|
+
}
|
|
12
|
+
export function getDimensions() {
|
|
13
|
+
return isNomicModel() ? NOMIC_DIMS : MINILM_DIMS;
|
|
14
|
+
}
|
|
15
|
+
async function getExtractor() {
|
|
16
|
+
if (extractor)
|
|
17
|
+
return extractor;
|
|
18
|
+
const { pipeline } = await import("@huggingface/transformers");
|
|
19
|
+
const model = getModelId();
|
|
20
|
+
console.error(`[ao-memory] Loading embedding model: ${model}`);
|
|
21
|
+
extractor = await pipeline("feature-extraction", model, { dtype: "q8" });
|
|
22
|
+
console.error(`[ao-memory] Model ready (${getDimensions()}d)`);
|
|
23
|
+
return extractor;
|
|
24
|
+
}
|
|
25
|
+
export async function embed(text, isQuery = false) {
|
|
26
|
+
const ext = await getExtractor();
|
|
27
|
+
const input = isNomicModel()
|
|
28
|
+
? (isQuery ? `search_query: ${text}` : `search_document: ${text}`)
|
|
29
|
+
: text;
|
|
30
|
+
const output = await ext(input, { pooling: "mean", normalize: true });
|
|
31
|
+
return new Float32Array(output.data);
|
|
32
|
+
}
|
|
33
|
+
export function storeVector(db, table, rowid, embedding) {
|
|
34
|
+
if (!isVecAvailable())
|
|
35
|
+
return;
|
|
36
|
+
db.prepare(`INSERT OR REPLACE INTO ${table}(rowid, embedding) VALUES (?, ?)`).run(BigInt(rowid), Buffer.from(embedding.buffer));
|
|
37
|
+
}
|
|
38
|
+
export function deleteVector(db, table, rowid) {
|
|
39
|
+
if (!isVecAvailable())
|
|
40
|
+
return;
|
|
41
|
+
db.prepare(`DELETE FROM ${table} WHERE rowid = ?`).run(BigInt(rowid));
|
|
42
|
+
}
|
|
43
|
+
export function searchVectors(db, table, queryEmbedding, limit = 20) {
|
|
44
|
+
if (!isVecAvailable())
|
|
45
|
+
return [];
|
|
46
|
+
return db.prepare(`SELECT rowid, distance FROM ${table} WHERE embedding MATCH ? ORDER BY distance LIMIT ?`).all(Buffer.from(queryEmbedding.buffer), limit);
|
|
47
|
+
}
|
|
48
|
+
export function hybridSearch(db, ftsTable, vecTable, queryText, queryEmbedding, limit = 10, alpha = 0.5) {
|
|
49
|
+
const RRF_K = 60;
|
|
50
|
+
const scores = new Map();
|
|
51
|
+
// FTS5 keyword results
|
|
52
|
+
try {
|
|
53
|
+
const ftsResults = db.prepare(`SELECT rowid FROM ${ftsTable} WHERE ${ftsTable} MATCH ? LIMIT 30`).all(queryText);
|
|
54
|
+
ftsResults.forEach((r, i) => {
|
|
55
|
+
const id = Number(r.rowid);
|
|
56
|
+
scores.set(id, (scores.get(id) || 0) + (1 - alpha) * (1 / (RRF_K + i + 1)));
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
catch { }
|
|
60
|
+
// Vector similarity results
|
|
61
|
+
if (isVecAvailable()) {
|
|
62
|
+
const vecResults = searchVectors(db, vecTable, queryEmbedding, 30);
|
|
63
|
+
vecResults.forEach((r, i) => {
|
|
64
|
+
const id = Number(r.rowid);
|
|
65
|
+
scores.set(id, (scores.get(id) || 0) + alpha * (1 / (RRF_K + i + 1)));
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
return [...scores.entries()]
|
|
69
|
+
.sort((a, b) => b[1] - a[1])
|
|
70
|
+
.slice(0, limit)
|
|
71
|
+
.map(([rowid, score]) => ({ rowid, score }));
|
|
72
|
+
}
|
package/dist/schema.sql
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
PRAGMA journal_mode = WAL;
|
|
2
|
+
PRAGMA foreign_keys = ON;
|
|
3
|
+
|
|
4
|
+
-- ============================================================
|
|
5
|
+
-- MEMORIES — unified store for semantic, episodic, procedural
|
|
6
|
+
-- ============================================================
|
|
7
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
8
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
9
|
+
memory_type TEXT NOT NULL, -- semantic | episodic | procedural
|
|
10
|
+
scope TEXT NOT NULL DEFAULT 'project', -- global | user | project | session
|
|
11
|
+
namespace TEXT, -- project name, user id, session id, etc.
|
|
12
|
+
agent_role TEXT, -- planner, reviewer, qa-tester, or any custom role
|
|
13
|
+
title TEXT NOT NULL,
|
|
14
|
+
content TEXT NOT NULL,
|
|
15
|
+
-- references
|
|
16
|
+
task_id TEXT,
|
|
17
|
+
pr_number INTEGER,
|
|
18
|
+
run_id TEXT,
|
|
19
|
+
-- lifecycle
|
|
20
|
+
status TEXT NOT NULL DEFAULT 'active', -- active | summarized | archived
|
|
21
|
+
confidence REAL NOT NULL DEFAULT 1.0, -- 0.0-1.0, decays over time
|
|
22
|
+
superseded_by INTEGER REFERENCES memories(id),
|
|
23
|
+
-- temporal
|
|
24
|
+
tags TEXT NOT NULL DEFAULT '[]',
|
|
25
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
26
|
+
created_at TEXT NOT NULL,
|
|
27
|
+
occurred_at TEXT NOT NULL,
|
|
28
|
+
updated_at TEXT NOT NULL,
|
|
29
|
+
last_accessed_at TEXT,
|
|
30
|
+
access_count INTEGER NOT NULL DEFAULT 0,
|
|
31
|
+
content_hash TEXT NOT NULL
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
CREATE INDEX IF NOT EXISTS idx_mem_type ON memories(memory_type);
|
|
35
|
+
CREATE INDEX IF NOT EXISTS idx_mem_scope ON memories(scope, namespace);
|
|
36
|
+
CREATE INDEX IF NOT EXISTS idx_mem_role ON memories(agent_role);
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_mem_status ON memories(status);
|
|
38
|
+
CREATE INDEX IF NOT EXISTS idx_mem_task ON memories(task_id);
|
|
39
|
+
CREATE INDEX IF NOT EXISTS idx_mem_occurred ON memories(occurred_at);
|
|
40
|
+
CREATE INDEX IF NOT EXISTS idx_mem_hash ON memories(content_hash);
|
|
41
|
+
CREATE INDEX IF NOT EXISTS idx_mem_ns_type ON memories(namespace, memory_type);
|
|
42
|
+
CREATE INDEX IF NOT EXISTS idx_mem_ns_role ON memories(namespace, agent_role);
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_mem_confidence ON memories(confidence);
|
|
44
|
+
CREATE INDEX IF NOT EXISTS idx_mem_accessed ON memories(last_accessed_at);
|
|
45
|
+
|
|
46
|
+
-- FTS5 for memories
|
|
47
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
|
|
48
|
+
title, content,
|
|
49
|
+
content=memories, content_rowid=id
|
|
50
|
+
);
|
|
51
|
+
CREATE TRIGGER IF NOT EXISTS mem_fts_i AFTER INSERT ON memories BEGIN
|
|
52
|
+
INSERT INTO memories_fts(rowid, title, content) VALUES (new.id, new.title, new.content);
|
|
53
|
+
END;
|
|
54
|
+
CREATE TRIGGER IF NOT EXISTS mem_fts_u AFTER UPDATE ON memories BEGIN
|
|
55
|
+
INSERT INTO memories_fts(memories_fts, rowid, title, content) VALUES ('delete', old.id, old.title, old.content);
|
|
56
|
+
INSERT INTO memories_fts(rowid, title, content) VALUES (new.id, new.title, new.content);
|
|
57
|
+
END;
|
|
58
|
+
CREATE TRIGGER IF NOT EXISTS mem_fts_d AFTER DELETE ON memories BEGIN
|
|
59
|
+
INSERT INTO memories_fts(memories_fts, rowid, title, content) VALUES ('delete', old.id, old.title, old.content);
|
|
60
|
+
END;
|
|
61
|
+
|
|
62
|
+
-- ============================================================
|
|
63
|
+
-- DOCUMENTS — source documents ingested for RAG
|
|
64
|
+
-- ============================================================
|
|
65
|
+
CREATE TABLE IF NOT EXISTS documents (
|
|
66
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
67
|
+
namespace TEXT,
|
|
68
|
+
title TEXT NOT NULL,
|
|
69
|
+
source TEXT, -- file path, URL, or identifier
|
|
70
|
+
mime_type TEXT DEFAULT 'text/plain',
|
|
71
|
+
content TEXT NOT NULL, -- full original content
|
|
72
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
73
|
+
created_at TEXT NOT NULL,
|
|
74
|
+
updated_at TEXT NOT NULL
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
CREATE INDEX IF NOT EXISTS idx_doc_ns ON documents(namespace);
|
|
78
|
+
|
|
79
|
+
-- CHUNKS — document chunks with embeddings
|
|
80
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
81
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
82
|
+
document_id INTEGER NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
|
|
83
|
+
chunk_index INTEGER NOT NULL,
|
|
84
|
+
content TEXT NOT NULL,
|
|
85
|
+
char_offset INTEGER NOT NULL DEFAULT 0,
|
|
86
|
+
char_length INTEGER NOT NULL DEFAULT 0,
|
|
87
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
88
|
+
created_at TEXT NOT NULL
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
CREATE INDEX IF NOT EXISTS idx_chunk_doc ON chunks(document_id);
|
|
92
|
+
|
|
93
|
+
-- FTS5 for chunks
|
|
94
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(
|
|
95
|
+
content,
|
|
96
|
+
content=chunks, content_rowid=id
|
|
97
|
+
);
|
|
98
|
+
CREATE TRIGGER IF NOT EXISTS chunk_fts_i AFTER INSERT ON chunks BEGIN
|
|
99
|
+
INSERT INTO chunks_fts(rowid, content) VALUES (new.id, new.content);
|
|
100
|
+
END;
|
|
101
|
+
CREATE TRIGGER IF NOT EXISTS chunk_fts_u AFTER UPDATE ON chunks BEGIN
|
|
102
|
+
INSERT INTO chunks_fts(chunks_fts, rowid, content) VALUES ('delete', old.id, old.content);
|
|
103
|
+
INSERT INTO chunks_fts(rowid, content) VALUES (new.id, new.content);
|
|
104
|
+
END;
|
|
105
|
+
CREATE TRIGGER IF NOT EXISTS chunk_fts_d AFTER DELETE ON chunks BEGIN
|
|
106
|
+
INSERT INTO chunks_fts(chunks_fts, rowid, content) VALUES ('delete', old.id, old.content);
|
|
107
|
+
END;
|
|
108
|
+
|
|
109
|
+
-- ============================================================
|
|
110
|
+
-- KNOWLEDGE GRAPH — entities and relations
|
|
111
|
+
-- ============================================================
|
|
112
|
+
CREATE TABLE IF NOT EXISTS entities (
|
|
113
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
114
|
+
name TEXT NOT NULL,
|
|
115
|
+
entity_type TEXT NOT NULL, -- project, person, technology, concept, file, etc.
|
|
116
|
+
namespace TEXT,
|
|
117
|
+
description TEXT,
|
|
118
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
119
|
+
created_at TEXT NOT NULL,
|
|
120
|
+
updated_at TEXT NOT NULL,
|
|
121
|
+
UNIQUE(name, entity_type, namespace)
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
CREATE INDEX IF NOT EXISTS idx_ent_type ON entities(entity_type);
|
|
125
|
+
CREATE INDEX IF NOT EXISTS idx_ent_ns ON entities(namespace);
|
|
126
|
+
|
|
127
|
+
CREATE TABLE IF NOT EXISTS relations (
|
|
128
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
129
|
+
source_entity_id INTEGER NOT NULL REFERENCES entities(id) ON DELETE CASCADE,
|
|
130
|
+
relation_type TEXT NOT NULL, -- uses, depends_on, created_by, part_of, related_to, etc.
|
|
131
|
+
target_entity_id INTEGER NOT NULL REFERENCES entities(id) ON DELETE CASCADE,
|
|
132
|
+
weight REAL NOT NULL DEFAULT 1.0,
|
|
133
|
+
memory_id INTEGER REFERENCES memories(id), -- evidence link
|
|
134
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
135
|
+
created_at TEXT NOT NULL,
|
|
136
|
+
UNIQUE(source_entity_id, relation_type, target_entity_id)
|
|
137
|
+
);
|
|
138
|
+
|
|
139
|
+
CREATE INDEX IF NOT EXISTS idx_rel_source ON relations(source_entity_id);
|
|
140
|
+
CREATE INDEX IF NOT EXISTS idx_rel_target ON relations(target_entity_id);
|
|
141
|
+
CREATE INDEX IF NOT EXISTS idx_rel_type ON relations(relation_type);
|
|
142
|
+
|
|
143
|
+
-- ============================================================
|
|
144
|
+
-- EPISODES — conversation/run history
|
|
145
|
+
-- ============================================================
|
|
146
|
+
CREATE TABLE IF NOT EXISTS episodes (
|
|
147
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
148
|
+
session_id TEXT NOT NULL,
|
|
149
|
+
namespace TEXT,
|
|
150
|
+
agent_role TEXT,
|
|
151
|
+
role TEXT NOT NULL, -- user | assistant | system
|
|
152
|
+
content TEXT NOT NULL,
|
|
153
|
+
summary TEXT,
|
|
154
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
155
|
+
created_at TEXT NOT NULL
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
CREATE INDEX IF NOT EXISTS idx_ep_session ON episodes(session_id);
|
|
159
|
+
CREATE INDEX IF NOT EXISTS idx_ep_ns ON episodes(namespace);
|
|
160
|
+
|
|
161
|
+
-- FTS5 for episodes
|
|
162
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS episodes_fts USING fts5(
|
|
163
|
+
content, summary,
|
|
164
|
+
content=episodes, content_rowid=id
|
|
165
|
+
);
|
|
166
|
+
CREATE TRIGGER IF NOT EXISTS ep_fts_i AFTER INSERT ON episodes BEGIN
|
|
167
|
+
INSERT INTO episodes_fts(rowid, content, summary) VALUES (new.id, new.content, new.summary);
|
|
168
|
+
END;
|
|
169
|
+
CREATE TRIGGER IF NOT EXISTS ep_fts_u AFTER UPDATE ON episodes BEGIN
|
|
170
|
+
INSERT INTO episodes_fts(episodes_fts, rowid, content, summary) VALUES ('delete', old.id, old.content, old.summary);
|
|
171
|
+
INSERT INTO episodes_fts(rowid, content, summary) VALUES (new.id, new.content, new.summary);
|
|
172
|
+
END;
|
|
173
|
+
CREATE TRIGGER IF NOT EXISTS ep_fts_d AFTER DELETE ON episodes BEGIN
|
|
174
|
+
INSERT INTO episodes_fts(episodes_fts, rowid, content, summary) VALUES ('delete', old.id, old.content, old.content);
|
|
175
|
+
END;
|
|
176
|
+
|
|
177
|
+
-- ============================================================
|
|
178
|
+
-- SUMMARIES — rolled-up digests
|
|
179
|
+
-- ============================================================
|
|
180
|
+
CREATE TABLE IF NOT EXISTS summaries (
|
|
181
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
182
|
+
scope TEXT NOT NULL,
|
|
183
|
+
namespace TEXT,
|
|
184
|
+
agent_role TEXT,
|
|
185
|
+
title TEXT NOT NULL,
|
|
186
|
+
content TEXT NOT NULL,
|
|
187
|
+
entry_count INTEGER NOT NULL,
|
|
188
|
+
date_from TEXT NOT NULL,
|
|
189
|
+
date_to TEXT NOT NULL,
|
|
190
|
+
entry_ids TEXT NOT NULL, -- JSON array
|
|
191
|
+
created_at TEXT NOT NULL
|
|
192
|
+
);
|
|
193
|
+
|
|
194
|
+
CREATE INDEX IF NOT EXISTS idx_sum_ns ON summaries(namespace, agent_role);
|
package/dist/server.d.ts
ADDED
package/dist/server.js
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
3
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
4
|
+
import { ListToolsRequestSchema, CallToolRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
|
5
|
+
import { resolveDbPath, initDb, initVec, errorResult } from "./db.js";
|
|
6
|
+
import { getDimensions } from "./embeddings.js";
|
|
7
|
+
import { storeTools, handleStore } from "./tools/store.js";
|
|
8
|
+
import { recallTools, handleRecall } from "./tools/recall.js";
|
|
9
|
+
import { statsTools, handleStats } from "./tools/stats.js";
|
|
10
|
+
import { contextTools, handleContext } from "./tools/context.js";
|
|
11
|
+
import { summarizeTools, handleSummarize } from "./tools/summarize.js";
|
|
12
|
+
import { documentTools, handleDocuments } from "./tools/documents.js";
|
|
13
|
+
import { knowledgeTools, handleKnowledge } from "./tools/knowledge.js";
|
|
14
|
+
import { episodeTools, handleEpisodes } from "./tools/episodes.js";
|
|
15
|
+
// Parse CLI args
|
|
16
|
+
const args = process.argv.slice(2);
|
|
17
|
+
let dbPath;
|
|
18
|
+
for (let i = 0; i < args.length; i++) {
|
|
19
|
+
if (args[i] === "--db" && args[i + 1])
|
|
20
|
+
dbPath = args[++i];
|
|
21
|
+
}
|
|
22
|
+
const db = initDb(resolveDbPath(dbPath));
|
|
23
|
+
await initVec(db, getDimensions());
|
|
24
|
+
const allTools = [
|
|
25
|
+
...storeTools,
|
|
26
|
+
...recallTools,
|
|
27
|
+
...documentTools,
|
|
28
|
+
...knowledgeTools,
|
|
29
|
+
...episodeTools,
|
|
30
|
+
...contextTools,
|
|
31
|
+
...summarizeTools,
|
|
32
|
+
...statsTools,
|
|
33
|
+
];
|
|
34
|
+
const handlers = [
|
|
35
|
+
handleStore, handleRecall, handleDocuments, handleKnowledge,
|
|
36
|
+
handleEpisodes, handleContext, handleSummarize, handleStats,
|
|
37
|
+
];
|
|
38
|
+
const server = new Server({ name: "ao-memory-mcp", version: "2.0.0" }, { capabilities: { tools: {} } });
|
|
39
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
40
|
+
tools: allTools,
|
|
41
|
+
}));
|
|
42
|
+
server.setRequestHandler(CallToolRequestSchema, async (req) => {
|
|
43
|
+
const { name, arguments: input } = req.params;
|
|
44
|
+
for (const handler of handlers) {
|
|
45
|
+
const result = handler(db, name, input || {});
|
|
46
|
+
if (result !== null) {
|
|
47
|
+
// Handle async results (embed operations)
|
|
48
|
+
if (result instanceof Promise)
|
|
49
|
+
return await result;
|
|
50
|
+
return result;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return errorResult(`Unknown tool: ${name}`);
|
|
54
|
+
});
|
|
55
|
+
const transport = new StdioServerTransport();
|
|
56
|
+
await server.connect(transport);
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import type Database from "better-sqlite3";
|
|
2
|
+
export declare const contextTools: {
|
|
3
|
+
name: string;
|
|
4
|
+
description: string;
|
|
5
|
+
inputSchema: {
|
|
6
|
+
type: "object";
|
|
7
|
+
properties: {
|
|
8
|
+
namespace: {
|
|
9
|
+
type: string;
|
|
10
|
+
description: string;
|
|
11
|
+
};
|
|
12
|
+
agent_role: {
|
|
13
|
+
type: string;
|
|
14
|
+
description: string;
|
|
15
|
+
};
|
|
16
|
+
limit: {
|
|
17
|
+
type: string;
|
|
18
|
+
description: string;
|
|
19
|
+
};
|
|
20
|
+
};
|
|
21
|
+
required: string[];
|
|
22
|
+
};
|
|
23
|
+
}[];
|
|
24
|
+
export declare function handleContext(db: Database.Database, name: string, args: any): {
|
|
25
|
+
content: {
|
|
26
|
+
type: "text";
|
|
27
|
+
text: string;
|
|
28
|
+
}[];
|
|
29
|
+
};
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { jsonResult } from "../db.js";
|
|
2
|
+
export const contextTools = [
|
|
3
|
+
{
|
|
4
|
+
name: "memory.context",
|
|
5
|
+
description: "Agent boot tool — call at the start of each run to load all relevant memory. Returns recent memories, active decisions, related entities, episode summaries, and document count. Scoped by namespace and agent role.",
|
|
6
|
+
inputSchema: {
|
|
7
|
+
type: "object",
|
|
8
|
+
properties: {
|
|
9
|
+
namespace: { type: "string", description: "Project/scope to load context for" },
|
|
10
|
+
agent_role: { type: "string", description: "Agent role requesting context" },
|
|
11
|
+
limit: { type: "number", description: "Max entries per section (default 10)" },
|
|
12
|
+
},
|
|
13
|
+
required: ["namespace"],
|
|
14
|
+
},
|
|
15
|
+
},
|
|
16
|
+
];
|
|
17
|
+
export function handleContext(db, name, args) {
|
|
18
|
+
if (name === "memory.context")
|
|
19
|
+
return memoryContext(db, args);
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
function memoryContext(db, args) {
|
|
23
|
+
const { namespace, agent_role } = args;
|
|
24
|
+
const limit = args.limit || 10;
|
|
25
|
+
// Recent memories for this agent+namespace
|
|
26
|
+
const recentMemories = db.prepare(`
|
|
27
|
+
SELECT * FROM memories
|
|
28
|
+
WHERE namespace = ? ${agent_role ? "AND agent_role = ?" : ""}
|
|
29
|
+
AND status = 'active'
|
|
30
|
+
ORDER BY occurred_at DESC LIMIT ?
|
|
31
|
+
`).all(...(agent_role ? [namespace, agent_role, limit] : [namespace, limit]));
|
|
32
|
+
// Active semantic memories (facts/knowledge) for this namespace
|
|
33
|
+
const knowledge = db.prepare(`
|
|
34
|
+
SELECT * FROM memories
|
|
35
|
+
WHERE namespace = ? AND memory_type = 'semantic' AND status = 'active'
|
|
36
|
+
ORDER BY confidence DESC, access_count DESC LIMIT ?
|
|
37
|
+
`).all(namespace, limit);
|
|
38
|
+
// Active procedural memories (how-to) for this namespace
|
|
39
|
+
const procedures = db.prepare(`
|
|
40
|
+
SELECT * FROM memories
|
|
41
|
+
WHERE namespace = ? AND memory_type = 'procedural' AND status = 'active'
|
|
42
|
+
ORDER BY access_count DESC LIMIT ?
|
|
43
|
+
`).all(namespace, limit);
|
|
44
|
+
// Related entities
|
|
45
|
+
const entities = db.prepare(`
|
|
46
|
+
SELECT e.*, (SELECT COUNT(*) FROM relations r WHERE r.source_entity_id = e.id OR r.target_entity_id = e.id) as relation_count
|
|
47
|
+
FROM entities e
|
|
48
|
+
WHERE e.namespace = ?
|
|
49
|
+
ORDER BY relation_count DESC LIMIT ?
|
|
50
|
+
`).all(namespace, limit);
|
|
51
|
+
// Recent episode summaries
|
|
52
|
+
const episodeSummaries = db.prepare(`
|
|
53
|
+
SELECT DISTINCT session_id, summary, MAX(created_at) as last_at
|
|
54
|
+
FROM episodes
|
|
55
|
+
WHERE namespace = ? AND summary IS NOT NULL
|
|
56
|
+
GROUP BY session_id
|
|
57
|
+
ORDER BY last_at DESC LIMIT 5
|
|
58
|
+
`).all(namespace);
|
|
59
|
+
// Document count
|
|
60
|
+
const docCount = db.prepare("SELECT COUNT(*) as count FROM documents WHERE namespace = ?").get(namespace).count;
|
|
61
|
+
// Global memories (cross-project)
|
|
62
|
+
const globalMemories = db.prepare(`
|
|
63
|
+
SELECT * FROM memories
|
|
64
|
+
WHERE scope = 'global' AND status = 'active'
|
|
65
|
+
ORDER BY confidence DESC, occurred_at DESC LIMIT 5
|
|
66
|
+
`).all();
|
|
67
|
+
// Check if summarization needed
|
|
68
|
+
const threeDaysAgo = new Date(Date.now() - 3 * 24 * 60 * 60 * 1000).toISOString();
|
|
69
|
+
const staleCount = db.prepare(`
|
|
70
|
+
SELECT COUNT(*) as count FROM memories
|
|
71
|
+
WHERE namespace = ? ${agent_role ? "AND agent_role = ?" : ""}
|
|
72
|
+
AND status = 'active' AND occurred_at < ?
|
|
73
|
+
`).get(...(agent_role ? [namespace, agent_role, threeDaysAgo] : [namespace, threeDaysAgo])).count;
|
|
74
|
+
// Stats
|
|
75
|
+
const totalMemories = db.prepare("SELECT COUNT(*) as count FROM memories WHERE namespace = ? AND status = 'active'").get(namespace).count;
|
|
76
|
+
return jsonResult({
|
|
77
|
+
recent_memories: recentMemories,
|
|
78
|
+
knowledge,
|
|
79
|
+
procedures,
|
|
80
|
+
entities,
|
|
81
|
+
episode_summaries: episodeSummaries,
|
|
82
|
+
global_memories: globalMemories,
|
|
83
|
+
document_count: docCount,
|
|
84
|
+
total_active_memories: totalMemories,
|
|
85
|
+
summarization_needed: staleCount >= 20,
|
|
86
|
+
stale_entry_count: staleCount,
|
|
87
|
+
});
|
|
88
|
+
}
|