@mrtrinhvn/ag-kit 1.3.1 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -46
- package/bin/cli.js +79 -201
- package/package.json +1 -1
- package/template/.agent/knowledge/USER.md +4 -0
- package/template/.agent/knowledge/ag-kit-ecosystem.md +168 -0
- package/template/.agent/knowledge/ag-kit-elite.md +6 -3
- package/template/.agent/knowledge/context_isolation.md +16 -0
- package/template/.agent/scripts/_post-commit-hook +8 -0
- package/template/.agent/scripts/brain_builder.py +287 -0
- package/template/.agent/scripts/memory_mcp_server.py +423 -0
- package/template/.agent/scripts/memory_tool.py +367 -0
- package/template/.agent/scripts/receptionist_down.sh +5 -5
- package/template/.agent/scripts/receptionist_up.sh +13 -10
- package/template/.agent/scripts/refresh_brain.sh +21 -0
- package/template/.agent/scripts/repomap.py +32 -3
- package/template/.agent/skills/ag-kit-core/SKILL.md +88 -2
- package/template/.agent/skills/intelligent-routing/SKILL.md +20 -10
- package/template/.agent/skills/telegram-agentic-gateway/SKILL.md +3 -0
- package/template/.agent/skills/telegram-agentic-gateway/templates/start.sh.template +2 -2
|
@@ -0,0 +1,423 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
memory_mcp_server.py — Ag-Kit Memory MCP Server (Option B - Vector Ready)
|
|
4
|
+
|
|
5
|
+
Exposes the Two-Tier Memory (graph.db) as native MCP tools to Antigravity AI.
|
|
6
|
+
AI can call these tools directly without any shell commands or bootstrap.
|
|
7
|
+
It uses Ollama locally to automatically generate vector embeddings for semantic search.
|
|
8
|
+
|
|
9
|
+
Tools:
|
|
10
|
+
memory_save — Save a hot node immediately (auto embeds)
|
|
11
|
+
memory_search — Search nodes by keyword + cosine similarity
|
|
12
|
+
memory_link — Create edge between two nodes
|
|
13
|
+
memory_graph — Show node connections
|
|
14
|
+
memory_hot — List hot (working memory) nodes
|
|
15
|
+
memory_cold — List cold (consolidated) nodes
|
|
16
|
+
memory_consolidate — Merge old hot nodes into cold summaries
|
|
17
|
+
memory_status — DB stats (node counts, tiers)
|
|
18
|
+
|
|
19
|
+
Protocol: MCP stdio (JSON-RPC 2.0)
|
|
20
|
+
Run: python3 .agent/scripts/memory_mcp_server.py [--db PATH]
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import sys
|
|
24
|
+
import json
|
|
25
|
+
import sqlite3
|
|
26
|
+
import argparse
|
|
27
|
+
import urllib.request
|
|
28
|
+
import struct
|
|
29
|
+
import math
|
|
30
|
+
from pathlib import Path
|
|
31
|
+
from datetime import datetime, timedelta
|
|
32
|
+
|
|
33
|
+
# ─── Default DB path: resolve relative to this script ──────────────────────
|
|
34
|
+
DEFAULT_DB = Path(__file__).parent.parent / "memory" / "graph.db"
|
|
35
|
+
VALID_CATEGORIES = {"user_pref", "decision", "error", "pattern", "context", "general"}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# ─── Option B: Vector Embeddings (Ollama) ─────────────────────────────────────
|
|
39
|
+
|
|
40
|
+
def get_embedding(text: str) -> bytes:
|
|
41
|
+
try:
|
|
42
|
+
req = urllib.request.Request(
|
|
43
|
+
"http://localhost:11434/api/embed",
|
|
44
|
+
json.dumps({"model": "nomic-embed-text", "input": text}).encode('utf-8')
|
|
45
|
+
)
|
|
46
|
+
with urllib.request.urlopen(req, timeout=10) as res:
|
|
47
|
+
data = json.loads(res.read().decode('utf-8'))
|
|
48
|
+
floats = data.get("embeddings", [[]])[0]
|
|
49
|
+
if not floats: return None
|
|
50
|
+
return struct.pack(f"{len(floats)}f", *floats)
|
|
51
|
+
except Exception:
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
def cosine_similarity(v1: bytes, v2: bytes) -> float:
|
|
55
|
+
if not v1 or not v2: return 0.0
|
|
56
|
+
try:
|
|
57
|
+
f1 = struct.unpack(f"{len(v1)//4}f", v1)
|
|
58
|
+
f2 = struct.unpack(f"{len(v2)//4}f", v2)
|
|
59
|
+
dot = sum(a * b for a, b in zip(f1, f2))
|
|
60
|
+
norm1 = math.sqrt(sum(a * a for a in f1))
|
|
61
|
+
norm2 = math.sqrt(sum(b * b for b in f2))
|
|
62
|
+
if norm1 == 0 or norm2 == 0: return 0.0
|
|
63
|
+
return dot / (norm1 * norm2)
|
|
64
|
+
except Exception:
|
|
65
|
+
return 0.0
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
# ─── DB ────────────────────────────────────────────────────────────────────
|
|
69
|
+
|
|
70
|
+
def get_db(db_path: Path) -> sqlite3.Connection:
|
|
71
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
72
|
+
conn = sqlite3.connect(str(db_path))
|
|
73
|
+
conn.row_factory = sqlite3.Row
|
|
74
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
75
|
+
_ensure_schema(conn)
|
|
76
|
+
return conn
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _ensure_schema(conn):
|
|
80
|
+
conn.executescript("""
|
|
81
|
+
CREATE TABLE IF NOT EXISTS nodes (
|
|
82
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
83
|
+
content TEXT NOT NULL,
|
|
84
|
+
category TEXT NOT NULL DEFAULT 'general',
|
|
85
|
+
energy INTEGER NOT NULL DEFAULT 100,
|
|
86
|
+
tier TEXT NOT NULL DEFAULT 'hot',
|
|
87
|
+
source_ids TEXT,
|
|
88
|
+
embedding BLOB,
|
|
89
|
+
created_at TEXT NOT NULL,
|
|
90
|
+
updated_at TEXT NOT NULL
|
|
91
|
+
);
|
|
92
|
+
CREATE TABLE IF NOT EXISTS edges (
|
|
93
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
94
|
+
from_node INTEGER NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
|
|
95
|
+
to_node INTEGER NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
|
|
96
|
+
relation TEXT NOT NULL DEFAULT 'related_to',
|
|
97
|
+
created_at TEXT NOT NULL
|
|
98
|
+
);
|
|
99
|
+
CREATE INDEX IF NOT EXISTS idx_nodes_tier ON nodes(tier);
|
|
100
|
+
CREATE INDEX IF NOT EXISTS idx_nodes_category ON nodes(category);
|
|
101
|
+
CREATE INDEX IF NOT EXISTS idx_nodes_energy ON nodes(energy);
|
|
102
|
+
CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_node);
|
|
103
|
+
""")
|
|
104
|
+
cols = [r[1] for r in conn.execute("PRAGMA table_info(nodes)").fetchall()]
|
|
105
|
+
for col, typedef in [("tier", "TEXT DEFAULT 'hot'"), ("source_ids", "TEXT"), ("embedding", "BLOB")]:
|
|
106
|
+
if col not in cols:
|
|
107
|
+
conn.execute(f"ALTER TABLE nodes ADD COLUMN {col} {typedef}")
|
|
108
|
+
conn.commit()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
# ─── Tool Implementations ──────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
def tool_memory_save(args: dict, db: Path) -> str:
|
|
114
|
+
content = args.get("content", "").strip()
|
|
115
|
+
if not content: return "❌ content is required"
|
|
116
|
+
category = args.get("category", "general")
|
|
117
|
+
if category not in VALID_CATEGORIES: category = "general"
|
|
118
|
+
|
|
119
|
+
now = datetime.now().isoformat()
|
|
120
|
+
emb = get_embedding(content)
|
|
121
|
+
|
|
122
|
+
conn = get_db(db)
|
|
123
|
+
cur = conn.execute(
|
|
124
|
+
"INSERT INTO nodes (content, category, energy, tier, embedding, created_at, updated_at) VALUES (?, ?, 100, 'hot', ?, ?, ?)",
|
|
125
|
+
(content, category, emb, now, now)
|
|
126
|
+
)
|
|
127
|
+
conn.commit()
|
|
128
|
+
conn.close()
|
|
129
|
+
|
|
130
|
+
emb_status = "✨[Vector]" if emb else "⚠️[No-Vector]"
|
|
131
|
+
return f"✅ {emb_status} Saved hot node #{cur.lastrowid} [{category}]: {content[:80]}"
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def tool_memory_search(args: dict, db: Path) -> str:
|
|
135
|
+
keyword = args.get("keyword", "").strip()
|
|
136
|
+
if not keyword: return "❌ keyword is required"
|
|
137
|
+
tier = args.get("tier")
|
|
138
|
+
limit = int(args.get("limit", 8))
|
|
139
|
+
tier_filter = f"AND tier = '{tier}'" if tier in ("hot", "cold") else ""
|
|
140
|
+
|
|
141
|
+
conn = get_db(db)
|
|
142
|
+
|
|
143
|
+
# 1. Exact match
|
|
144
|
+
exact_rows = conn.execute(
|
|
145
|
+
f"SELECT id, tier, category, energy, content, created_at FROM nodes WHERE content LIKE ? {tier_filter} ORDER BY energy DESC LIMIT ?",
|
|
146
|
+
(f"%{keyword}%", limit)
|
|
147
|
+
).fetchall()
|
|
148
|
+
|
|
149
|
+
results = []
|
|
150
|
+
seen = set()
|
|
151
|
+
for r in exact_rows:
|
|
152
|
+
seen.add(r["id"])
|
|
153
|
+
results.append({"row": r, "score": 1.0, "match": "exact"})
|
|
154
|
+
|
|
155
|
+
# 2. Semantic search
|
|
156
|
+
query_emb = get_embedding(keyword)
|
|
157
|
+
if query_emb:
|
|
158
|
+
all_nodes = conn.execute(f"SELECT id, tier, category, energy, content, created_at, embedding FROM nodes WHERE embedding IS NOT NULL {tier_filter}").fetchall()
|
|
159
|
+
for r in all_nodes:
|
|
160
|
+
if r["id"] in seen: continue
|
|
161
|
+
sim = cosine_similarity(query_emb, r["embedding"])
|
|
162
|
+
if sim > 0.60:
|
|
163
|
+
results.append({"row": dict(r), "score": sim, "match": f"semantic {sim:.2f}"})
|
|
164
|
+
|
|
165
|
+
results.sort(key=lambda x: (x["score"], x["row"]["energy"]), reverse=True)
|
|
166
|
+
results = results[:limit]
|
|
167
|
+
|
|
168
|
+
if not results:
|
|
169
|
+
conn.close()
|
|
170
|
+
return f"🔍 No results for '{keyword}'"
|
|
171
|
+
|
|
172
|
+
ids = [res["row"]["id"] for res in results]
|
|
173
|
+
if ids:
|
|
174
|
+
conn.execute(
|
|
175
|
+
f"UPDATE nodes SET energy=MIN(100,energy+10), updated_at=? WHERE id IN ({','.join('?'*len(ids))})",
|
|
176
|
+
[datetime.now().isoformat()] + ids
|
|
177
|
+
)
|
|
178
|
+
conn.commit()
|
|
179
|
+
conn.close()
|
|
180
|
+
|
|
181
|
+
lines = [f"🔍 Found {len(results)} result(s) for '{keyword}':\n"]
|
|
182
|
+
for res in results:
|
|
183
|
+
r = res["row"]
|
|
184
|
+
icon = "🔥" if r["tier"] == "hot" else "❄️"
|
|
185
|
+
lines.append(f"{icon} #{r['id']} [{r['category']}] ⚡{r['energy']} ({res['match']}) — {r['content'][:150]}")
|
|
186
|
+
return "\n".join(lines)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def tool_memory_link(args: dict, db: Path) -> str:
|
|
190
|
+
from_id = args.get("from_id")
|
|
191
|
+
to_id = args.get("to_id")
|
|
192
|
+
relation = args.get("relation", "related_to")
|
|
193
|
+
if not from_id or not to_id: return "❌ from_id and to_id are required"
|
|
194
|
+
now = datetime.now().isoformat()
|
|
195
|
+
conn = get_db(db)
|
|
196
|
+
a = conn.execute("SELECT content FROM nodes WHERE id=?", (from_id,)).fetchone()
|
|
197
|
+
b = conn.execute("SELECT content FROM nodes WHERE id=?", (to_id,)).fetchone()
|
|
198
|
+
if not a or not b:
|
|
199
|
+
conn.close()
|
|
200
|
+
return f"❌ Node #{from_id} or #{to_id} not found"
|
|
201
|
+
conn.execute(
|
|
202
|
+
"INSERT INTO edges (from_node, to_node, relation, created_at) VALUES (?, ?, ?, ?)",
|
|
203
|
+
(from_id, to_id, relation, now)
|
|
204
|
+
)
|
|
205
|
+
conn.commit()
|
|
206
|
+
conn.close()
|
|
207
|
+
return f"🔗 #{from_id} --[{relation}]--> #{to_id}\n From: {a['content'][:60]}\n To: {b['content'][:60]}"
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def tool_memory_graph(args: dict, db: Path) -> str:
|
|
211
|
+
node_id = args.get("node_id")
|
|
212
|
+
if not node_id: return "❌ node_id is required"
|
|
213
|
+
conn = get_db(db)
|
|
214
|
+
node = conn.execute("SELECT * FROM nodes WHERE id=?", (node_id,)).fetchone()
|
|
215
|
+
if not node:
|
|
216
|
+
conn.close()
|
|
217
|
+
return f"❌ Node #{node_id} not found"
|
|
218
|
+
out = [f"🕸️ Node #{node['id']} [{node['tier'].upper()}] [{node['category']}] ⚡{node['energy']}"]
|
|
219
|
+
out.append(f" {node['content'][:200]}\n")
|
|
220
|
+
outgoing = conn.execute("SELECT e.relation, e.to_node, n.content FROM edges e JOIN nodes n ON e.to_node=n.id WHERE e.from_node=?", (node_id,)).fetchall()
|
|
221
|
+
if outgoing:
|
|
222
|
+
out.append(f"→ OUTGOING ({len(outgoing)}):")
|
|
223
|
+
for e in outgoing: out.append(f" --[{e['relation']}]--> #{e['to_node']}: {e['content'][:80]}")
|
|
224
|
+
incoming = conn.execute("SELECT e.relation, e.from_node, n.content FROM edges e JOIN nodes n ON e.from_node=n.id WHERE e.to_node=?", (node_id,)).fetchall()
|
|
225
|
+
if incoming:
|
|
226
|
+
out.append(f"\n← INCOMING ({len(incoming)}):")
|
|
227
|
+
for e in incoming: out.append(f" #{e['from_node']}: {e['content'][:80]} --[{e['relation']}]-->")
|
|
228
|
+
conn.close()
|
|
229
|
+
return "\n".join(out)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def tool_memory_hot(args: dict, db: Path) -> str:
|
|
233
|
+
limit = int(args.get("limit", 10))
|
|
234
|
+
conn = get_db(db)
|
|
235
|
+
rows = conn.execute(
|
|
236
|
+
"SELECT id, category, energy, content, embedding, created_at FROM nodes WHERE tier='hot' ORDER BY energy DESC, updated_at DESC LIMIT ?",
|
|
237
|
+
(limit,)
|
|
238
|
+
).fetchall()
|
|
239
|
+
conn.close()
|
|
240
|
+
if not rows: return "🔥 No hot nodes yet."
|
|
241
|
+
lines = [f"🔥 HOT nodes ({len(rows)}):\n"]
|
|
242
|
+
for r in rows:
|
|
243
|
+
lines.append(f" {'✨' if r['embedding'] else ' '}#{r['id']} [{r['category']}] ⚡{r['energy']} 📅{r['created_at'][:10]}")
|
|
244
|
+
lines.append(f" {r['content'][:120]}")
|
|
245
|
+
return "\n".join(lines)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def tool_memory_cold(args: dict, db: Path) -> str:
|
|
249
|
+
limit = int(args.get("limit", 10))
|
|
250
|
+
conn = get_db(db)
|
|
251
|
+
rows = conn.execute(
|
|
252
|
+
"SELECT id, category, energy, content, source_ids, updated_at FROM nodes WHERE tier='cold' ORDER BY energy DESC LIMIT ?",
|
|
253
|
+
(limit,)
|
|
254
|
+
).fetchall()
|
|
255
|
+
conn.close()
|
|
256
|
+
if not rows: return "❄️ No cold nodes yet."
|
|
257
|
+
lines = [f"❄️ COLD nodes ({len(rows)} consolidated):\n"]
|
|
258
|
+
for r in rows:
|
|
259
|
+
srcs = len(json.loads(r["source_ids"] or "[]"))
|
|
260
|
+
lines.append(f" #{r['id']} [{r['category']}] ⚡{r['energy']} (from {srcs} hot nodes)")
|
|
261
|
+
lines.append(f" {r['content'][:200]}")
|
|
262
|
+
return "\n".join(lines)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def tool_memory_consolidate(args: dict, db: Path) -> str:
|
|
266
|
+
days = int(args.get("days", 7))
|
|
267
|
+
category = args.get("category")
|
|
268
|
+
threshold = (datetime.now() - timedelta(days=days)).isoformat()
|
|
269
|
+
cat_filter = f"AND category='{category}'" if category else ""
|
|
270
|
+
|
|
271
|
+
conn = get_db(db)
|
|
272
|
+
hot_rows = conn.execute(
|
|
273
|
+
f"SELECT id, content, category FROM nodes WHERE tier='hot' AND updated_at<? {cat_filter} ORDER BY category, energy DESC",
|
|
274
|
+
(threshold,)
|
|
275
|
+
).fetchall()
|
|
276
|
+
|
|
277
|
+
if not hot_rows:
|
|
278
|
+
conn.close()
|
|
279
|
+
return f"⚡ No hot nodes older than {days} days to consolidate."
|
|
280
|
+
|
|
281
|
+
from collections import defaultdict
|
|
282
|
+
groups = defaultdict(list)
|
|
283
|
+
for r in hot_rows:
|
|
284
|
+
groups[r["category"]].append(r)
|
|
285
|
+
|
|
286
|
+
results = []
|
|
287
|
+
now = datetime.now().isoformat()
|
|
288
|
+
for cat, nodes in groups.items():
|
|
289
|
+
node_ids = [r["id"] for r in nodes]
|
|
290
|
+
merged = "\n".join(f"• {r['content'][:200]}" for r in nodes[:20])
|
|
291
|
+
summary = f"[Consolidated {len(nodes)} nodes — {cat}]\n{merged}"
|
|
292
|
+
|
|
293
|
+
emb = get_embedding(summary)
|
|
294
|
+
|
|
295
|
+
cur = conn.execute(
|
|
296
|
+
"INSERT INTO nodes (content, category, energy, tier, source_ids, embedding, created_at, updated_at) VALUES (?, ?, 80, 'cold', ?, ?, ?, ?)",
|
|
297
|
+
(summary, cat, json.dumps(node_ids), emb, now, now)
|
|
298
|
+
)
|
|
299
|
+
cold_id = cur.lastrowid
|
|
300
|
+
for nid in node_ids:
|
|
301
|
+
conn.execute("INSERT INTO edges (from_node, to_node, relation, created_at) VALUES (?, ?, 'consolidated_into', ?)", (nid, cold_id, now))
|
|
302
|
+
conn.execute("UPDATE nodes SET energy=MAX(0, energy-40) WHERE id=?", (nid,))
|
|
303
|
+
results.append(f"❄️ [{cat}] merged {len(nodes)} hot nodes → cold #{cold_id}")
|
|
304
|
+
|
|
305
|
+
conn.commit()
|
|
306
|
+
conn.close()
|
|
307
|
+
return "\n".join(results) + f"\n\n✅ Done. Summary vectors calculated."
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def tool_memory_status(args: dict, db: Path) -> str:
|
|
311
|
+
conn = get_db(db)
|
|
312
|
+
total = conn.execute("SELECT COUNT(*) FROM nodes").fetchone()[0]
|
|
313
|
+
hot = conn.execute("SELECT COUNT(*) FROM nodes WHERE tier='hot'").fetchone()[0]
|
|
314
|
+
cold = conn.execute("SELECT COUNT(*) FROM nodes WHERE tier='cold'").fetchone()[0]
|
|
315
|
+
vecs = conn.execute("SELECT COUNT(*) FROM nodes WHERE embedding IS NOT NULL").fetchone()[0]
|
|
316
|
+
edges = conn.execute("SELECT COUNT(*) FROM edges").fetchone()[0]
|
|
317
|
+
cats = conn.execute("SELECT category, COUNT(*) n FROM nodes GROUP BY category ORDER BY n DESC").fetchall()
|
|
318
|
+
conn.close()
|
|
319
|
+
lines = [
|
|
320
|
+
f"🧠 Memory Graph Status",
|
|
321
|
+
f" DB: {db}",
|
|
322
|
+
f" Nodes: {total} total (🔥{hot} hot | ❄️{cold} cold | ✨{vecs} vector embeddings)",
|
|
323
|
+
f" Edges: {edges}",
|
|
324
|
+
f" Categories: " + ", ".join(f"{r['category']}({r['n']})" for r in cats)
|
|
325
|
+
]
|
|
326
|
+
return "\n".join(lines)
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
# ─── MCP Protocol (stdio JSON-RPC 2.0) ────────────────────────────────────
|
|
330
|
+
|
|
331
|
+
TOOLS = {
|
|
332
|
+
"memory_save": {
|
|
333
|
+
"description": "Save an important piece of knowledge as a hot memory node. Will automatically generate vector embeddings.",
|
|
334
|
+
"inputSchema": {
|
|
335
|
+
"type": "object",
|
|
336
|
+
"properties": {
|
|
337
|
+
"content": {"type": "string", "description": "The knowledge to save"},
|
|
338
|
+
"category": {"type": "string", "enum": list(VALID_CATEGORIES), "description": "Category of knowledge"}
|
|
339
|
+
},
|
|
340
|
+
"required": ["content"]
|
|
341
|
+
}
|
|
342
|
+
},
|
|
343
|
+
"memory_search": {
|
|
344
|
+
"description": "Search memory graph by keyword. Uses Semantic Cosine Similarity search under the hood.",
|
|
345
|
+
"inputSchema": {
|
|
346
|
+
"type": "object",
|
|
347
|
+
"properties": {
|
|
348
|
+
"keyword": {"type": "string", "description": "Keyword to search"},
|
|
349
|
+
"tier": {"type": "string", "enum": ["hot", "cold"], "description": "Search only in hot or cold tier"},
|
|
350
|
+
"limit": {"type": "integer", "description": "Max results (default 8)"}
|
|
351
|
+
},
|
|
352
|
+
"required": ["keyword"]
|
|
353
|
+
}
|
|
354
|
+
},
|
|
355
|
+
"memory_link": {
|
|
356
|
+
"description": "Create a relationship (edge) between two memory nodes.",
|
|
357
|
+
"inputSchema": {"type": "object", "properties": {"from_id": {"type": "integer"}, "to_id": {"type": "integer"}, "relation": {"type": "string"}}, "required": ["from_id", "to_id"]}
|
|
358
|
+
},
|
|
359
|
+
"memory_graph": {"description": "Show all edges (incoming and outgoing) for a node.", "inputSchema": {"type": "object", "properties": {"node_id": {"type": "integer"}}, "required": ["node_id"]}},
|
|
360
|
+
"memory_hot": {"description": "List hot (working memory) nodes.", "inputSchema": {"type": "object", "properties": {"limit": {"type": "integer"}}}},
|
|
361
|
+
"memory_cold": {"description": "List cold (consolidated long-term memory) nodes.", "inputSchema": {"type": "object", "properties": {"limit": {"type": "integer"}}}},
|
|
362
|
+
"memory_consolidate": {"description": "Merge old hot nodes into cold consolidated summaries.", "inputSchema": {"type": "object", "properties": {"days": {"type": "integer"}, "category": {"type": "string"}}}},
|
|
363
|
+
"memory_status": {"description": "Show memory graph statistics.", "inputSchema": {"type": "object", "properties": {}}}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
TOOL_FNS = {
|
|
367
|
+
"memory_save": tool_memory_save, "memory_search": tool_memory_search,
|
|
368
|
+
"memory_link": tool_memory_link, "memory_graph": tool_memory_graph,
|
|
369
|
+
"memory_hot": tool_memory_hot, "memory_cold": tool_memory_cold,
|
|
370
|
+
"memory_consolidate": tool_memory_consolidate, "memory_status": tool_memory_status,
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
def send(obj: dict):
|
|
374
|
+
sys.stdout.write(json.dumps(obj) + "\n")
|
|
375
|
+
sys.stdout.flush()
|
|
376
|
+
|
|
377
|
+
def handle(req: dict, db: Path):
|
|
378
|
+
method = req.get("method", "")
|
|
379
|
+
req_id = req.get("id")
|
|
380
|
+
|
|
381
|
+
if method == "initialize":
|
|
382
|
+
send({"jsonrpc": "2.0", "id": req_id, "result": {"protocolVersion": "2024-11-05", "capabilities": {"tools": {}}, "serverInfo": {"name": "ag-kit-memory", "version": "1.0.1"}}})
|
|
383
|
+
elif method == "tools/list":
|
|
384
|
+
send({"jsonrpc": "2.0", "id": req_id, "result": {"tools": [{"name": name, **info} for name, info in TOOLS.items()]}})
|
|
385
|
+
elif method == "tools/call":
|
|
386
|
+
name = req.get("params", {}).get("name", "")
|
|
387
|
+
args = req.get("params", {}).get("arguments", {})
|
|
388
|
+
fn = TOOL_FNS.get(name)
|
|
389
|
+
if fn:
|
|
390
|
+
try:
|
|
391
|
+
result_text = fn(args, db)
|
|
392
|
+
send({"jsonrpc": "2.0", "id": req_id, "result": {"content": [{"type": "text", "text": result_text}], "isError": False}})
|
|
393
|
+
except Exception as e:
|
|
394
|
+
send({"jsonrpc": "2.0", "id": req_id, "result": {"content": [{"type": "text", "text": f"❌ Error: {e}"}], "isError": True}})
|
|
395
|
+
else:
|
|
396
|
+
send({"jsonrpc": "2.0", "id": req_id, "error": {"code": -32601, "message": f"Tool not found: {name}"}})
|
|
397
|
+
elif method == "notifications/initialized":
|
|
398
|
+
pass
|
|
399
|
+
else:
|
|
400
|
+
if req_id is not None:
|
|
401
|
+
send({"jsonrpc": "2.0", "id": req_id, "error": {"code": -32601, "message": f"Method not found: {method}"}})
|
|
402
|
+
|
|
403
|
+
def main():
|
|
404
|
+
p = argparse.ArgumentParser()
|
|
405
|
+
p.add_argument("--db", default=str(DEFAULT_DB))
|
|
406
|
+
args = p.parse_args()
|
|
407
|
+
db = Path(args.db)
|
|
408
|
+
|
|
409
|
+
# Ensure DB ready
|
|
410
|
+
conn = get_db(db)
|
|
411
|
+
conn.close()
|
|
412
|
+
|
|
413
|
+
for line in sys.stdin:
|
|
414
|
+
line = line.strip()
|
|
415
|
+
if not line: continue
|
|
416
|
+
try:
|
|
417
|
+
req = json.loads(line)
|
|
418
|
+
handle(req, db)
|
|
419
|
+
except json.JSONDecodeError: pass
|
|
420
|
+
except Exception as e: sys.stderr.write(f"[memory_mcp] Error: {e}\n")
|
|
421
|
+
|
|
422
|
+
if __name__ == "__main__":
|
|
423
|
+
main()
|