@smilintux/skmemory 0.5.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +39 -3
- package/.github/workflows/publish.yml +13 -6
- package/AGENT_REFACTOR_CHANGES.md +192 -0
- package/ARCHITECTURE.md +101 -19
- package/CHANGELOG.md +153 -0
- package/LICENSE +81 -68
- package/MISSION.md +7 -0
- package/README.md +419 -86
- package/SKILL.md +197 -25
- package/docker-compose.yml +15 -15
- package/index.js +6 -5
- package/openclaw-plugin/openclaw.plugin.json +10 -0
- package/openclaw-plugin/src/index.ts +255 -0
- package/openclaw-plugin/src/openclaw.plugin.json +10 -0
- package/package.json +1 -1
- package/pyproject.toml +29 -9
- package/requirements.txt +10 -2
- package/seeds/cloud9-opus.seed.json +7 -7
- package/seeds/lumina-cloud9-breakthrough.seed.json +46 -0
- package/seeds/lumina-cloud9-python-pypi.seed.json +46 -0
- package/seeds/lumina-kingdom-founding.seed.json +47 -0
- package/seeds/lumina-pma-signed.seed.json +46 -0
- package/seeds/lumina-singular-achievement.seed.json +46 -0
- package/seeds/lumina-skcapstone-conscious.seed.json +46 -0
- package/seeds/plant-kingdom-journal.py +203 -0
- package/seeds/plant-lumina-seeds.py +280 -0
- package/skill.yaml +46 -0
- package/skmemory/HA.md +296 -0
- package/skmemory/__init__.py +12 -1
- package/skmemory/agents.py +233 -0
- package/skmemory/ai_client.py +40 -0
- package/skmemory/anchor.py +4 -2
- package/skmemory/backends/__init__.py +11 -4
- package/skmemory/backends/file_backend.py +2 -1
- package/skmemory/backends/skgraph_backend.py +608 -0
- package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +99 -69
- package/skmemory/backends/sqlite_backend.py +122 -51
- package/skmemory/backends/vaulted_backend.py +286 -0
- package/skmemory/cli.py +1238 -29
- package/skmemory/config.py +173 -0
- package/skmemory/context_loader.py +335 -0
- package/skmemory/endpoint_selector.py +386 -0
- package/skmemory/fortress.py +685 -0
- package/skmemory/graph_queries.py +238 -0
- package/skmemory/importers/__init__.py +9 -1
- package/skmemory/importers/telegram.py +351 -43
- package/skmemory/importers/telegram_api.py +488 -0
- package/skmemory/journal.py +4 -2
- package/skmemory/lovenote.py +4 -2
- package/skmemory/mcp_server.py +706 -0
- package/skmemory/models.py +41 -0
- package/skmemory/openclaw.py +8 -8
- package/skmemory/predictive.py +232 -0
- package/skmemory/promotion.py +524 -0
- package/skmemory/register.py +454 -0
- package/skmemory/register_mcp.py +197 -0
- package/skmemory/ritual.py +121 -47
- package/skmemory/seeds.py +257 -8
- package/skmemory/setup_wizard.py +920 -0
- package/skmemory/sharing.py +402 -0
- package/skmemory/soul.py +71 -20
- package/skmemory/steelman.py +250 -263
- package/skmemory/store.py +271 -60
- package/skmemory/vault.py +228 -0
- package/tests/integration/__init__.py +0 -0
- package/tests/integration/conftest.py +233 -0
- package/tests/integration/test_cross_backend.py +355 -0
- package/tests/integration/test_skgraph_live.py +424 -0
- package/tests/integration/test_skvector_live.py +369 -0
- package/tests/test_backup_rotation.py +327 -0
- package/tests/test_cli.py +6 -6
- package/tests/test_endpoint_selector.py +801 -0
- package/tests/test_fortress.py +255 -0
- package/tests/test_fortress_hardening.py +444 -0
- package/tests/test_openclaw.py +5 -2
- package/tests/test_predictive.py +237 -0
- package/tests/test_promotion.py +340 -0
- package/tests/test_ritual.py +4 -4
- package/tests/test_seeds.py +96 -0
- package/tests/test_setup.py +835 -0
- package/tests/test_sharing.py +250 -0
- package/tests/test_skgraph_backend.py +667 -0
- package/tests/test_skvector_backend.py +326 -0
- package/tests/test_steelman.py +5 -5
- package/tests/test_store_graph_integration.py +245 -0
- package/tests/test_vault.py +186 -0
- package/skmemory/backends/falkordb_backend.py +0 -310
|
@@ -0,0 +1,706 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SKMemory MCP Server — memory tools for AI agents via Model Context Protocol.
|
|
3
|
+
|
|
4
|
+
Tool-agnostic: works with Cursor, Claude Code CLI, Claude Desktop,
|
|
5
|
+
Windsurf, Aider, Cline, or any MCP client that speaks stdio.
|
|
6
|
+
|
|
7
|
+
Tools:
|
|
8
|
+
memory_store — Store a new memory (snapshot with title + content)
|
|
9
|
+
memory_search — Full-text search across memories
|
|
10
|
+
memory_recall — Recall a specific memory by ID
|
|
11
|
+
memory_list — List memories with optional layer/tag filters
|
|
12
|
+
memory_forget — Delete a memory by ID
|
|
13
|
+
memory_promote — Promote a memory to a higher persistence tier
|
|
14
|
+
memory_consolidate — Compress a session's memories into one mid-term memory
|
|
15
|
+
memory_context — Load token-efficient context for agent injection
|
|
16
|
+
memory_export — Export all memories to a JSON backup
|
|
17
|
+
memory_import — Restore memories from a JSON backup
|
|
18
|
+
memory_health — Full health check across all backends
|
|
19
|
+
memory_graph — Graph traversal, lineage, and cluster discovery
|
|
20
|
+
|
|
21
|
+
Invocation:
|
|
22
|
+
python -m skmemory.mcp_server
|
|
23
|
+
skmemory-mcp
|
|
24
|
+
|
|
25
|
+
Client configuration (Cursor / Claude Desktop / Claude Code CLI):
|
|
26
|
+
{"mcpServers": {"skmemory": {
|
|
27
|
+
"command": "skmemory-mcp"}}}
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
from __future__ import annotations
|
|
31
|
+
|
|
32
|
+
import asyncio
|
|
33
|
+
import json
|
|
34
|
+
import logging
|
|
35
|
+
from typing import Any, Optional
|
|
36
|
+
|
|
37
|
+
from mcp.server import Server
|
|
38
|
+
from mcp.server.stdio import stdio_server
|
|
39
|
+
from mcp.types import TextContent, Tool
|
|
40
|
+
|
|
41
|
+
from .store import MemoryStore
|
|
42
|
+
from .models import MemoryLayer
|
|
43
|
+
|
|
44
|
+
logger = logging.getLogger("skmemory.mcp")
|
|
45
|
+
|
|
46
|
+
server = Server("skmemory")
|
|
47
|
+
|
|
48
|
+
# ---------------------------------------------------------------------------
|
|
49
|
+
# Shared store instance
|
|
50
|
+
# ---------------------------------------------------------------------------
|
|
51
|
+
|
|
52
|
+
_store: Optional[MemoryStore] = None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _get_store() -> MemoryStore:
|
|
56
|
+
global _store
|
|
57
|
+
if _store is None:
|
|
58
|
+
_store = MemoryStore()
|
|
59
|
+
return _store
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# ---------------------------------------------------------------------------
|
|
63
|
+
# Response helpers
|
|
64
|
+
# ---------------------------------------------------------------------------
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _json_response(data: Any) -> list[TextContent]:
|
|
68
|
+
return [TextContent(type="text", text=json.dumps(data, indent=2, default=str))]
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _error_response(message: str) -> list[TextContent]:
|
|
72
|
+
return [TextContent(type="text", text=json.dumps({"error": message}))]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _memory_dict(m: Any) -> dict:
|
|
76
|
+
return m.model_dump() if hasattr(m, "model_dump") else vars(m)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
_LAYER_MAP = {
|
|
80
|
+
"short-term": MemoryLayer.SHORT,
|
|
81
|
+
"mid-term": MemoryLayer.MID,
|
|
82
|
+
"long-term": MemoryLayer.LONG,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
# ---------------------------------------------------------------------------
|
|
87
|
+
# Tool definitions
|
|
88
|
+
# ---------------------------------------------------------------------------
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@server.list_tools()
|
|
92
|
+
async def list_tools() -> list[Tool]:
|
|
93
|
+
return [
|
|
94
|
+
Tool(
|
|
95
|
+
name="memory_store",
|
|
96
|
+
description="Store a new memory in SKMemory (polaroid snapshot).",
|
|
97
|
+
inputSchema={
|
|
98
|
+
"type": "object",
|
|
99
|
+
"properties": {
|
|
100
|
+
"title": {
|
|
101
|
+
"type": "string",
|
|
102
|
+
"description": "Short label for this memory.",
|
|
103
|
+
},
|
|
104
|
+
"content": {
|
|
105
|
+
"type": "string",
|
|
106
|
+
"description": "The full memory content.",
|
|
107
|
+
},
|
|
108
|
+
"layer": {
|
|
109
|
+
"type": "string",
|
|
110
|
+
"enum": ["short-term", "mid-term", "long-term"],
|
|
111
|
+
"description": "Memory layer (default: short-term).",
|
|
112
|
+
},
|
|
113
|
+
"tags": {
|
|
114
|
+
"type": "array",
|
|
115
|
+
"items": {"type": "string"},
|
|
116
|
+
"description": "Optional tags for categorisation.",
|
|
117
|
+
},
|
|
118
|
+
"source": {
|
|
119
|
+
"type": "string",
|
|
120
|
+
"description": "Where this memory came from (default: mcp).",
|
|
121
|
+
},
|
|
122
|
+
},
|
|
123
|
+
"required": ["title", "content"],
|
|
124
|
+
},
|
|
125
|
+
),
|
|
126
|
+
Tool(
|
|
127
|
+
name="memory_search",
|
|
128
|
+
description="Full-text search across all SKMemory layers.",
|
|
129
|
+
inputSchema={
|
|
130
|
+
"type": "object",
|
|
131
|
+
"properties": {
|
|
132
|
+
"query": {"type": "string", "description": "Search query."},
|
|
133
|
+
"limit": {
|
|
134
|
+
"type": "integer",
|
|
135
|
+
"description": "Max results (default: 10).",
|
|
136
|
+
},
|
|
137
|
+
},
|
|
138
|
+
"required": ["query"],
|
|
139
|
+
},
|
|
140
|
+
),
|
|
141
|
+
Tool(
|
|
142
|
+
name="memory_recall",
|
|
143
|
+
description="Recall a specific memory by its ID.",
|
|
144
|
+
inputSchema={
|
|
145
|
+
"type": "object",
|
|
146
|
+
"properties": {
|
|
147
|
+
"memory_id": {
|
|
148
|
+
"type": "string",
|
|
149
|
+
"description": "The memory's unique ID.",
|
|
150
|
+
},
|
|
151
|
+
},
|
|
152
|
+
"required": ["memory_id"],
|
|
153
|
+
},
|
|
154
|
+
),
|
|
155
|
+
Tool(
|
|
156
|
+
name="memory_list",
|
|
157
|
+
description="List memories with optional layer and tag filters.",
|
|
158
|
+
inputSchema={
|
|
159
|
+
"type": "object",
|
|
160
|
+
"properties": {
|
|
161
|
+
"layer": {
|
|
162
|
+
"type": "string",
|
|
163
|
+
"enum": ["short-term", "mid-term", "long-term"],
|
|
164
|
+
"description": "Filter by memory layer.",
|
|
165
|
+
},
|
|
166
|
+
"tags": {
|
|
167
|
+
"type": "array",
|
|
168
|
+
"items": {"type": "string"},
|
|
169
|
+
"description": "Filter by tags (all must match).",
|
|
170
|
+
},
|
|
171
|
+
"limit": {
|
|
172
|
+
"type": "integer",
|
|
173
|
+
"description": "Max results (default: 50).",
|
|
174
|
+
},
|
|
175
|
+
},
|
|
176
|
+
"required": [],
|
|
177
|
+
},
|
|
178
|
+
),
|
|
179
|
+
Tool(
|
|
180
|
+
name="memory_forget",
|
|
181
|
+
description="Delete (forget) a memory by its ID.",
|
|
182
|
+
inputSchema={
|
|
183
|
+
"type": "object",
|
|
184
|
+
"properties": {
|
|
185
|
+
"memory_id": {
|
|
186
|
+
"type": "string",
|
|
187
|
+
"description": "The memory's unique ID.",
|
|
188
|
+
},
|
|
189
|
+
},
|
|
190
|
+
"required": ["memory_id"],
|
|
191
|
+
},
|
|
192
|
+
),
|
|
193
|
+
Tool(
|
|
194
|
+
name="memory_promote",
|
|
195
|
+
description="Promote a memory to a higher persistence tier (short→mid→long).",
|
|
196
|
+
inputSchema={
|
|
197
|
+
"type": "object",
|
|
198
|
+
"properties": {
|
|
199
|
+
"memory_id": {
|
|
200
|
+
"type": "string",
|
|
201
|
+
"description": "ID of the memory to promote.",
|
|
202
|
+
},
|
|
203
|
+
"target": {
|
|
204
|
+
"type": "string",
|
|
205
|
+
"enum": ["mid-term", "long-term"],
|
|
206
|
+
"description": "Target layer to promote to.",
|
|
207
|
+
},
|
|
208
|
+
"summary": {
|
|
209
|
+
"type": "string",
|
|
210
|
+
"description": "Optional compressed summary for the promoted memory.",
|
|
211
|
+
},
|
|
212
|
+
},
|
|
213
|
+
"required": ["memory_id", "target"],
|
|
214
|
+
},
|
|
215
|
+
),
|
|
216
|
+
Tool(
|
|
217
|
+
name="memory_consolidate",
|
|
218
|
+
description=(
|
|
219
|
+
"Compress a session's short-term memories into one mid-term memory."
|
|
220
|
+
),
|
|
221
|
+
inputSchema={
|
|
222
|
+
"type": "object",
|
|
223
|
+
"properties": {
|
|
224
|
+
"session_id": {
|
|
225
|
+
"type": "string",
|
|
226
|
+
"description": "Session identifier to consolidate.",
|
|
227
|
+
},
|
|
228
|
+
"summary": {
|
|
229
|
+
"type": "string",
|
|
230
|
+
"description": "Human/AI-written summary of the session.",
|
|
231
|
+
},
|
|
232
|
+
},
|
|
233
|
+
"required": ["session_id", "summary"],
|
|
234
|
+
},
|
|
235
|
+
),
|
|
236
|
+
Tool(
|
|
237
|
+
name="memory_context",
|
|
238
|
+
description=(
|
|
239
|
+
"Load token-efficient memory context for agent system prompt injection. "
|
|
240
|
+
"Uses tiered lazy loading: today's memories (full), yesterday (summaries), "
|
|
241
|
+
"older (reference counts only). Deep details available via memory_search."
|
|
242
|
+
),
|
|
243
|
+
inputSchema={
|
|
244
|
+
"type": "object",
|
|
245
|
+
"properties": {
|
|
246
|
+
"token_budget": {
|
|
247
|
+
"type": "integer",
|
|
248
|
+
"description": (
|
|
249
|
+
"Max tokens for context (default: 4000). "
|
|
250
|
+
"Uses word_count * 1.3 approximation."
|
|
251
|
+
),
|
|
252
|
+
},
|
|
253
|
+
},
|
|
254
|
+
"required": [],
|
|
255
|
+
},
|
|
256
|
+
),
|
|
257
|
+
Tool(
|
|
258
|
+
name="memory_export",
|
|
259
|
+
description="Export all memories to a dated JSON backup file.",
|
|
260
|
+
inputSchema={"type": "object", "properties": {}, "required": []},
|
|
261
|
+
),
|
|
262
|
+
Tool(
|
|
263
|
+
name="memory_import",
|
|
264
|
+
description="Restore memories from a JSON backup file.",
|
|
265
|
+
inputSchema={
|
|
266
|
+
"type": "object",
|
|
267
|
+
"properties": {
|
|
268
|
+
"backup_path": {
|
|
269
|
+
"type": "string",
|
|
270
|
+
"description": "Absolute path to the backup JSON file.",
|
|
271
|
+
},
|
|
272
|
+
},
|
|
273
|
+
"required": ["backup_path"],
|
|
274
|
+
},
|
|
275
|
+
),
|
|
276
|
+
Tool(
|
|
277
|
+
name="memory_health",
|
|
278
|
+
description=(
|
|
279
|
+
"Full health check across all backends (primary, vector, graph)."
|
|
280
|
+
),
|
|
281
|
+
inputSchema={"type": "object", "properties": {}, "required": []},
|
|
282
|
+
),
|
|
283
|
+
Tool(
|
|
284
|
+
name="memory_graph",
|
|
285
|
+
description=(
|
|
286
|
+
"Graph operations: traverse connections, get lineage, find clusters. "
|
|
287
|
+
"Requires SKGraph backend (FalkorDB)."
|
|
288
|
+
),
|
|
289
|
+
inputSchema={
|
|
290
|
+
"type": "object",
|
|
291
|
+
"properties": {
|
|
292
|
+
"action": {
|
|
293
|
+
"type": "string",
|
|
294
|
+
"enum": ["traverse", "lineage", "clusters"],
|
|
295
|
+
"description": "Graph operation to perform.",
|
|
296
|
+
},
|
|
297
|
+
"memory_id": {
|
|
298
|
+
"type": "string",
|
|
299
|
+
"description": "Memory ID (required for traverse/lineage).",
|
|
300
|
+
},
|
|
301
|
+
"depth": {
|
|
302
|
+
"type": "integer",
|
|
303
|
+
"description": "Traversal depth (default: 2, for traverse only).",
|
|
304
|
+
},
|
|
305
|
+
},
|
|
306
|
+
"required": ["action"],
|
|
307
|
+
},
|
|
308
|
+
),
|
|
309
|
+
# ── Telegram ───────────────────────────────────────────────
|
|
310
|
+
Tool(
|
|
311
|
+
name="telegram_import",
|
|
312
|
+
description=(
|
|
313
|
+
"Import a Telegram Desktop chat export into memories. "
|
|
314
|
+
"Point to the export directory containing result.json."
|
|
315
|
+
),
|
|
316
|
+
inputSchema={
|
|
317
|
+
"type": "object",
|
|
318
|
+
"properties": {
|
|
319
|
+
"export_path": {
|
|
320
|
+
"type": "string",
|
|
321
|
+
"description": "Path to Telegram export directory or result.json file.",
|
|
322
|
+
},
|
|
323
|
+
"mode": {
|
|
324
|
+
"type": "string",
|
|
325
|
+
"enum": ["daily", "message"],
|
|
326
|
+
"description": "Import mode (default: daily).",
|
|
327
|
+
},
|
|
328
|
+
"min_length": {
|
|
329
|
+
"type": "integer",
|
|
330
|
+
"description": "Skip messages shorter than this (default: 30).",
|
|
331
|
+
},
|
|
332
|
+
"chat_name": {
|
|
333
|
+
"type": "string",
|
|
334
|
+
"description": "Override the chat name from the export.",
|
|
335
|
+
},
|
|
336
|
+
"tags": {
|
|
337
|
+
"type": "string",
|
|
338
|
+
"description": "Extra comma-separated tags.",
|
|
339
|
+
},
|
|
340
|
+
},
|
|
341
|
+
"required": ["export_path"],
|
|
342
|
+
},
|
|
343
|
+
),
|
|
344
|
+
Tool(
|
|
345
|
+
name="telegram_import_api",
|
|
346
|
+
description=(
|
|
347
|
+
"Import messages directly from Telegram API using Telethon. "
|
|
348
|
+
"Requires TELEGRAM_API_ID and TELEGRAM_API_HASH env vars."
|
|
349
|
+
),
|
|
350
|
+
inputSchema={
|
|
351
|
+
"type": "object",
|
|
352
|
+
"properties": {
|
|
353
|
+
"chat": {
|
|
354
|
+
"type": "string",
|
|
355
|
+
"description": "Chat username, title, or numeric ID.",
|
|
356
|
+
},
|
|
357
|
+
"mode": {
|
|
358
|
+
"type": "string",
|
|
359
|
+
"enum": ["daily", "message"],
|
|
360
|
+
"description": "Import mode (default: daily).",
|
|
361
|
+
},
|
|
362
|
+
"limit": {
|
|
363
|
+
"type": "integer",
|
|
364
|
+
"description": "Maximum number of messages to fetch.",
|
|
365
|
+
},
|
|
366
|
+
"since": {
|
|
367
|
+
"type": "string",
|
|
368
|
+
"description": "Only fetch messages after this date (YYYY-MM-DD).",
|
|
369
|
+
},
|
|
370
|
+
"min_length": {
|
|
371
|
+
"type": "integer",
|
|
372
|
+
"description": "Skip messages shorter than this (default: 30).",
|
|
373
|
+
},
|
|
374
|
+
"chat_name": {
|
|
375
|
+
"type": "string",
|
|
376
|
+
"description": "Override the chat name.",
|
|
377
|
+
},
|
|
378
|
+
"tags": {
|
|
379
|
+
"type": "string",
|
|
380
|
+
"description": "Extra comma-separated tags.",
|
|
381
|
+
},
|
|
382
|
+
},
|
|
383
|
+
"required": ["chat"],
|
|
384
|
+
},
|
|
385
|
+
),
|
|
386
|
+
Tool(
|
|
387
|
+
name="telegram_setup",
|
|
388
|
+
description=(
|
|
389
|
+
"Check Telegram API setup status. Reports whether Telethon is "
|
|
390
|
+
"installed, API credentials are set, and a session file exists."
|
|
391
|
+
),
|
|
392
|
+
inputSchema={"type": "object", "properties": {}, "required": []},
|
|
393
|
+
),
|
|
394
|
+
Tool(
|
|
395
|
+
name="telegram_catchup",
|
|
396
|
+
description=(
|
|
397
|
+
"Full catch-up import from a Telegram group into ALL memory tiers. "
|
|
398
|
+
"Downloads chat via Telethon and distributes: last 24h → short-term, "
|
|
399
|
+
"last 7 days → mid-term, older → long-term."
|
|
400
|
+
),
|
|
401
|
+
inputSchema={
|
|
402
|
+
"type": "object",
|
|
403
|
+
"properties": {
|
|
404
|
+
"chat": {
|
|
405
|
+
"type": "string",
|
|
406
|
+
"description": "Chat username, title, or numeric ID",
|
|
407
|
+
},
|
|
408
|
+
"limit": {
|
|
409
|
+
"type": "integer",
|
|
410
|
+
"description": "Max messages to fetch (default: 2000)",
|
|
411
|
+
"default": 2000,
|
|
412
|
+
},
|
|
413
|
+
"since": {
|
|
414
|
+
"type": "string",
|
|
415
|
+
"description": "Only messages after this date (YYYY-MM-DD)",
|
|
416
|
+
},
|
|
417
|
+
"min_length": {
|
|
418
|
+
"type": "integer",
|
|
419
|
+
"description": "Skip messages shorter than this (default: 20)",
|
|
420
|
+
"default": 20,
|
|
421
|
+
},
|
|
422
|
+
"tags": {
|
|
423
|
+
"type": "string",
|
|
424
|
+
"description": "Extra comma-separated tags",
|
|
425
|
+
},
|
|
426
|
+
},
|
|
427
|
+
"required": ["chat"],
|
|
428
|
+
},
|
|
429
|
+
),
|
|
430
|
+
# ── Memory Integrity ──────────────────────────────────────
|
|
431
|
+
Tool(
|
|
432
|
+
name="memory_verify",
|
|
433
|
+
description=(
|
|
434
|
+
"Verify integrity hashes for all stored memories. "
|
|
435
|
+
"Returns a report of passed, tampered, and unsealed memories. "
|
|
436
|
+
"Tampered memories are flagged with CRITICAL severity."
|
|
437
|
+
),
|
|
438
|
+
inputSchema={
|
|
439
|
+
"type": "object",
|
|
440
|
+
"properties": {},
|
|
441
|
+
"required": [],
|
|
442
|
+
},
|
|
443
|
+
),
|
|
444
|
+
Tool(
|
|
445
|
+
name="memory_audit",
|
|
446
|
+
description=(
|
|
447
|
+
"Show the most recent audit trail entries. "
|
|
448
|
+
"The audit trail is a chain-hashed JSONL log of every "
|
|
449
|
+
"store/recall/delete/tamper operation."
|
|
450
|
+
),
|
|
451
|
+
inputSchema={
|
|
452
|
+
"type": "object",
|
|
453
|
+
"properties": {
|
|
454
|
+
"last": {
|
|
455
|
+
"type": "integer",
|
|
456
|
+
"description": "Number of recent entries to return (default: 20).",
|
|
457
|
+
},
|
|
458
|
+
},
|
|
459
|
+
"required": [],
|
|
460
|
+
},
|
|
461
|
+
),
|
|
462
|
+
]
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
# ---------------------------------------------------------------------------
|
|
466
|
+
# Tool handlers
|
|
467
|
+
# ---------------------------------------------------------------------------
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
@server.call_tool()
|
|
471
|
+
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
|
472
|
+
try:
|
|
473
|
+
store = _get_store()
|
|
474
|
+
|
|
475
|
+
if name == "memory_store":
|
|
476
|
+
title = arguments["title"]
|
|
477
|
+
content = arguments["content"]
|
|
478
|
+
layer_str = arguments.get("layer", "short-term")
|
|
479
|
+
tags = arguments.get("tags", [])
|
|
480
|
+
source = arguments.get("source", "mcp")
|
|
481
|
+
layer = _LAYER_MAP.get(layer_str, MemoryLayer.SHORT)
|
|
482
|
+
memory = store.snapshot(
|
|
483
|
+
title=title,
|
|
484
|
+
content=content,
|
|
485
|
+
layer=layer,
|
|
486
|
+
tags=tags,
|
|
487
|
+
source=source,
|
|
488
|
+
)
|
|
489
|
+
return _json_response({"memory_id": memory.id, "stored": True})
|
|
490
|
+
|
|
491
|
+
elif name == "memory_search":
|
|
492
|
+
query = arguments["query"]
|
|
493
|
+
limit = int(arguments.get("limit", 10))
|
|
494
|
+
memories = store.search(query, limit=limit)
|
|
495
|
+
return _json_response([_memory_dict(m) for m in memories])
|
|
496
|
+
|
|
497
|
+
elif name == "memory_recall":
|
|
498
|
+
memory_id = arguments["memory_id"]
|
|
499
|
+
memory = store.recall(memory_id)
|
|
500
|
+
if memory is None:
|
|
501
|
+
return _error_response(f"Memory not found: {memory_id}")
|
|
502
|
+
return _json_response(_memory_dict(memory))
|
|
503
|
+
|
|
504
|
+
elif name == "memory_list":
|
|
505
|
+
layer_str = arguments.get("layer")
|
|
506
|
+
tags = arguments.get("tags")
|
|
507
|
+
limit = int(arguments.get("limit", 50))
|
|
508
|
+
layer = _LAYER_MAP.get(layer_str) if layer_str else None
|
|
509
|
+
memories = store.list_memories(layer=layer, tags=tags, limit=limit)
|
|
510
|
+
return _json_response([_memory_dict(m) for m in memories])
|
|
511
|
+
|
|
512
|
+
elif name == "memory_forget":
|
|
513
|
+
memory_id = arguments["memory_id"]
|
|
514
|
+
deleted = store.forget(memory_id)
|
|
515
|
+
return _json_response({"memory_id": memory_id, "deleted": deleted})
|
|
516
|
+
|
|
517
|
+
elif name == "memory_promote":
|
|
518
|
+
memory_id = arguments["memory_id"]
|
|
519
|
+
target_str = arguments["target"]
|
|
520
|
+
summary = arguments.get("summary", "")
|
|
521
|
+
target = _LAYER_MAP.get(target_str)
|
|
522
|
+
if target is None:
|
|
523
|
+
return _error_response(f"Invalid target layer: {target_str}")
|
|
524
|
+
promoted = store.promote(memory_id, target, summary=summary)
|
|
525
|
+
if promoted is None:
|
|
526
|
+
return _error_response(f"Memory not found: {memory_id}")
|
|
527
|
+
return _json_response({
|
|
528
|
+
"promoted_id": promoted.id,
|
|
529
|
+
"source_id": memory_id,
|
|
530
|
+
"target_layer": target_str,
|
|
531
|
+
})
|
|
532
|
+
|
|
533
|
+
elif name == "memory_consolidate":
|
|
534
|
+
session_id = arguments["session_id"]
|
|
535
|
+
summary = arguments["summary"]
|
|
536
|
+
consolidated = store.consolidate_session(session_id, summary)
|
|
537
|
+
return _json_response({
|
|
538
|
+
"memory_id": consolidated.id,
|
|
539
|
+
"session_id": session_id,
|
|
540
|
+
"consolidated": True,
|
|
541
|
+
})
|
|
542
|
+
|
|
543
|
+
elif name == "memory_context":
|
|
544
|
+
token_budget = int(arguments.get("token_budget", 4000))
|
|
545
|
+
context = store.load_context(max_tokens=token_budget)
|
|
546
|
+
return _json_response(context)
|
|
547
|
+
|
|
548
|
+
elif name == "memory_export":
|
|
549
|
+
path = store.export_backup()
|
|
550
|
+
return _json_response({"exported": True, "path": path})
|
|
551
|
+
|
|
552
|
+
elif name == "memory_import":
|
|
553
|
+
backup_path = arguments["backup_path"]
|
|
554
|
+
count = store.import_backup(backup_path)
|
|
555
|
+
return _json_response({"imported": count, "path": backup_path})
|
|
556
|
+
|
|
557
|
+
elif name == "memory_health":
|
|
558
|
+
health = store.health()
|
|
559
|
+
return _json_response(health)
|
|
560
|
+
|
|
561
|
+
elif name == "memory_graph":
|
|
562
|
+
action = arguments["action"]
|
|
563
|
+
if store.graph is None:
|
|
564
|
+
return _error_response(
|
|
565
|
+
"SKGraph backend not configured. "
|
|
566
|
+
"Install falkordb and configure the graph backend."
|
|
567
|
+
)
|
|
568
|
+
if action == "traverse":
|
|
569
|
+
mid = arguments.get("memory_id")
|
|
570
|
+
if not mid:
|
|
571
|
+
return _error_response("memory_id required for traverse")
|
|
572
|
+
depth = int(arguments.get("depth", 2))
|
|
573
|
+
results = store.graph.get_related(mid, depth=depth)
|
|
574
|
+
return _json_response(results)
|
|
575
|
+
elif action == "lineage":
|
|
576
|
+
mid = arguments.get("memory_id")
|
|
577
|
+
if not mid:
|
|
578
|
+
return _error_response("memory_id required for lineage")
|
|
579
|
+
chain = store.graph.get_lineage(mid)
|
|
580
|
+
return _json_response(chain)
|
|
581
|
+
elif action == "clusters":
|
|
582
|
+
clusters = store.graph.find_clusters()
|
|
583
|
+
return _json_response(clusters)
|
|
584
|
+
else:
|
|
585
|
+
return _error_response(f"Unknown graph action: {action}")
|
|
586
|
+
|
|
587
|
+
elif name == "memory_stats":
|
|
588
|
+
health = store.health()
|
|
589
|
+
return _json_response(health)
|
|
590
|
+
|
|
591
|
+
elif name == "memory_verify":
|
|
592
|
+
from .fortress import FortifiedMemoryStore
|
|
593
|
+
from .config import SKMEMORY_HOME
|
|
594
|
+
|
|
595
|
+
fortress = FortifiedMemoryStore(
|
|
596
|
+
primary=store.primary,
|
|
597
|
+
use_sqlite=False,
|
|
598
|
+
audit_path=SKMEMORY_HOME / "audit.jsonl",
|
|
599
|
+
)
|
|
600
|
+
result = fortress.verify_all()
|
|
601
|
+
return _json_response(result)
|
|
602
|
+
|
|
603
|
+
elif name == "memory_audit":
|
|
604
|
+
from .fortress import AuditLog
|
|
605
|
+
from .config import SKMEMORY_HOME
|
|
606
|
+
|
|
607
|
+
n = int(arguments.get("last", 20))
|
|
608
|
+
audit = AuditLog(path=SKMEMORY_HOME / "audit.jsonl")
|
|
609
|
+
records = audit.tail(n)
|
|
610
|
+
return _json_response(records)
|
|
611
|
+
|
|
612
|
+
# ── Telegram tools ────────────────────────────────────
|
|
613
|
+
elif name == "telegram_import":
|
|
614
|
+
from .importers.telegram import import_telegram
|
|
615
|
+
|
|
616
|
+
export_path = arguments["export_path"]
|
|
617
|
+
mode = arguments.get("mode", "daily")
|
|
618
|
+
min_length = arguments.get("min_length", 30)
|
|
619
|
+
chat_name = arguments.get("chat_name")
|
|
620
|
+
tags_str = arguments.get("tags", "")
|
|
621
|
+
tags = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else None
|
|
622
|
+
|
|
623
|
+
stats = import_telegram(
|
|
624
|
+
store,
|
|
625
|
+
export_path,
|
|
626
|
+
mode=mode,
|
|
627
|
+
min_message_length=min_length,
|
|
628
|
+
chat_name=chat_name,
|
|
629
|
+
tags=tags,
|
|
630
|
+
)
|
|
631
|
+
return _json_response(stats)
|
|
632
|
+
|
|
633
|
+
elif name == "telegram_import_api":
|
|
634
|
+
from .importers.telegram_api import import_telegram_api
|
|
635
|
+
|
|
636
|
+
chat = arguments["chat"]
|
|
637
|
+
mode = arguments.get("mode", "daily")
|
|
638
|
+
limit = arguments.get("limit")
|
|
639
|
+
since = arguments.get("since")
|
|
640
|
+
min_length = arguments.get("min_length", 30)
|
|
641
|
+
chat_name = arguments.get("chat_name")
|
|
642
|
+
tags_str = arguments.get("tags", "")
|
|
643
|
+
tags = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else None
|
|
644
|
+
|
|
645
|
+
stats = import_telegram_api(
|
|
646
|
+
store,
|
|
647
|
+
chat,
|
|
648
|
+
mode=mode,
|
|
649
|
+
limit=limit,
|
|
650
|
+
since=since,
|
|
651
|
+
min_message_length=min_length,
|
|
652
|
+
chat_name=chat_name,
|
|
653
|
+
tags=tags,
|
|
654
|
+
)
|
|
655
|
+
return _json_response(stats)
|
|
656
|
+
|
|
657
|
+
elif name == "telegram_setup":
|
|
658
|
+
from .importers.telegram_api import check_setup
|
|
659
|
+
|
|
660
|
+
result = check_setup()
|
|
661
|
+
return _json_response(result)
|
|
662
|
+
|
|
663
|
+
elif name == "telegram_catchup":
|
|
664
|
+
from .importers.telegram_api import import_telegram_api
|
|
665
|
+
|
|
666
|
+
chat = args["chat"]
|
|
667
|
+
limit = args.get("limit", 2000)
|
|
668
|
+
since = args.get("since")
|
|
669
|
+
min_length = args.get("min_length", 20)
|
|
670
|
+
tags_str = args.get("tags", "")
|
|
671
|
+
tags = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else None
|
|
672
|
+
|
|
673
|
+
store = MemoryStore()
|
|
674
|
+
stats = import_telegram_api(
|
|
675
|
+
store, chat, mode="catchup", limit=limit, since=since,
|
|
676
|
+
min_message_length=min_length, tags=tags,
|
|
677
|
+
)
|
|
678
|
+
return _json_response(stats)
|
|
679
|
+
|
|
680
|
+
else:
|
|
681
|
+
return _error_response(f"Unknown tool: {name}")
|
|
682
|
+
|
|
683
|
+
except Exception as exc:
|
|
684
|
+
logger.exception("Tool %s failed", name)
|
|
685
|
+
return _error_response(str(exc))
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
# ---------------------------------------------------------------------------
|
|
689
|
+
# Entry point
|
|
690
|
+
# ---------------------------------------------------------------------------
|
|
691
|
+
|
|
692
|
+
|
|
693
|
+
def main() -> None:
|
|
694
|
+
"""Run the SKMemory MCP server on stdio transport."""
|
|
695
|
+
logging.basicConfig(level=logging.WARNING, format="%(name)s: %(message)s")
|
|
696
|
+
asyncio.run(_run_server())
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
async def _run_server() -> None:
|
|
700
|
+
"""Async entry point for the stdio MCP server."""
|
|
701
|
+
async with stdio_server() as (read_stream, write_stream):
|
|
702
|
+
await server.run(read_stream, write_stream, server.create_initialization_options())
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
if __name__ == "__main__":
|
|
706
|
+
main()
|