claude-memory-agent 2.2.4 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +2 -2
- package/hooks/auto_capture.py +58 -1
- package/hooks/grounding-hook-v2.py +129 -0
- package/hooks/grounding-hook.py +95 -0
- package/hooks/session_end_hook.py +35 -0
- package/hooks/session_start.py +56 -0
- package/main.py +165 -0
- package/mcp_proxy.py +307 -0
- package/mcp_server_full.py +497 -0
- package/package.json +1 -1
- package/services/native_memory_sync.py +66 -310
|
@@ -0,0 +1,497 @@
|
|
|
1
|
+
"""MCP stdio server for Claude Memory.
|
|
2
|
+
|
|
3
|
+
Thin adapter that exposes the memory system's core skills as MCP tools,
|
|
4
|
+
allowing any MCP-compatible client (OpenClaw, Claude Code, etc.) to
|
|
5
|
+
store, search, and manage memories over stdio JSON-RPC.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
python mcp_server.py # stdio mode (default)
|
|
9
|
+
mcp dev mcp_server.py # interactive inspector
|
|
10
|
+
|
|
11
|
+
Shares the same SQLite database as the HTTP server (main.py) via WAL mode.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
# ── CRITICAL: Suppress stdout noise before ANY library imports ──────────
|
|
15
|
+
# stdout is reserved exclusively for MCP JSON-RPC protocol messages.
|
|
16
|
+
# Any stray print/progress-bar on stdout will corrupt the protocol.
|
|
17
|
+
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
|
|
21
|
+
os.environ.setdefault("TOKENIZERS_PARALLELISM", "false")
|
|
22
|
+
os.environ.setdefault("TRANSFORMERS_NO_ADVISORY_WARNINGS", "1")
|
|
23
|
+
os.environ.setdefault("TQDM_DISABLE", "1")
|
|
24
|
+
os.environ.setdefault("HF_HUB_DISABLE_PROGRESS_BARS", "1")
|
|
25
|
+
|
|
26
|
+
import logging
|
|
27
|
+
|
|
28
|
+
logging.basicConfig(
|
|
29
|
+
stream=sys.stderr,
|
|
30
|
+
level=logging.INFO,
|
|
31
|
+
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
|
32
|
+
)
|
|
33
|
+
logger = logging.getLogger("mcp-claude-memory")
|
|
34
|
+
|
|
35
|
+
# Add memory-agent/ to sys.path so local imports (services.*, skills.*, config) resolve
|
|
36
|
+
AGENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
37
|
+
if AGENT_DIR not in sys.path:
|
|
38
|
+
sys.path.insert(0, AGENT_DIR)
|
|
39
|
+
|
|
40
|
+
# ── Imports ─────────────────────────────────────────────────────────────
|
|
41
|
+
|
|
42
|
+
import json
|
|
43
|
+
from collections.abc import AsyncIterator
|
|
44
|
+
from contextlib import asynccontextmanager
|
|
45
|
+
from dataclasses import dataclass
|
|
46
|
+
from typing import Optional, List, Dict, Any
|
|
47
|
+
|
|
48
|
+
# MCP SDK - support both v1 (FastMCP) and v2 (MCPServer) import paths
|
|
49
|
+
try:
|
|
50
|
+
from mcp.server.fastmcp import FastMCP, Context
|
|
51
|
+
except ImportError:
|
|
52
|
+
try:
|
|
53
|
+
from mcp.server.mcpserver import MCPServer as FastMCP, Context
|
|
54
|
+
except ImportError:
|
|
55
|
+
raise ImportError(
|
|
56
|
+
"MCP SDK not found. Install with: pip install 'mcp>=1.0.0'"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
from services.database import DatabaseService
|
|
60
|
+
from services.embeddings import EmbeddingService
|
|
61
|
+
from config import config
|
|
62
|
+
|
|
63
|
+
# Direct skill imports - no HTTP, no FastAPI dependency
|
|
64
|
+
from skills.store import store_memory, store_project, store_pattern
|
|
65
|
+
from skills.search import semantic_search, search_patterns, get_project_context
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
# ── Lifespan: DB + Embeddings initialization ───────────────────────────
|
|
69
|
+
|
|
70
|
+
@dataclass
|
|
71
|
+
class AppContext:
|
|
72
|
+
db: DatabaseService
|
|
73
|
+
embeddings: EmbeddingService
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@asynccontextmanager
|
|
77
|
+
async def app_lifespan(server: Any) -> AsyncIterator[AppContext]:
|
|
78
|
+
"""Initialize database and embedding services on startup, clean up on shutdown."""
|
|
79
|
+
logger.info("Initializing Claude Memory MCP server...")
|
|
80
|
+
|
|
81
|
+
db = DatabaseService()
|
|
82
|
+
await db.connect()
|
|
83
|
+
await db.initialize_schema()
|
|
84
|
+
logger.info(f"Database connected: {db.db_path}")
|
|
85
|
+
|
|
86
|
+
logger.info(
|
|
87
|
+
f"Loading embedding model: {config.EMBEDDING_MODEL} "
|
|
88
|
+
f"(provider: {config.EMBEDDING_PROVIDER}) - this may take a moment..."
|
|
89
|
+
)
|
|
90
|
+
embeddings = EmbeddingService(
|
|
91
|
+
provider_type=config.EMBEDDING_PROVIDER,
|
|
92
|
+
model=config.EMBEDDING_MODEL,
|
|
93
|
+
)
|
|
94
|
+
logger.info(f"Embeddings ready (dim={embeddings.get_dimension()})")
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
yield AppContext(db=db, embeddings=embeddings)
|
|
98
|
+
finally:
|
|
99
|
+
if db.conn:
|
|
100
|
+
db.conn.close()
|
|
101
|
+
logger.info("Claude Memory MCP server shut down.")
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# ── MCP Server ──────────────────────────────────────────────────────────
|
|
105
|
+
|
|
106
|
+
mcp_server = FastMCP("claude-memory", lifespan=app_lifespan)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _get_app(ctx: Context) -> AppContext:
|
|
110
|
+
"""Extract AppContext from the MCP request context."""
|
|
111
|
+
return ctx.request_context.lifespan_context
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
# ── Tools ───────────────────────────────────────────────────────────────
|
|
115
|
+
|
|
116
|
+
@mcp_server.tool()
|
|
117
|
+
async def memory_store(
|
|
118
|
+
ctx: Context,
|
|
119
|
+
content: str,
|
|
120
|
+
memory_type: str = "chunk",
|
|
121
|
+
tags: Optional[List[str]] = None,
|
|
122
|
+
importance: int = 5,
|
|
123
|
+
outcome: Optional[str] = None,
|
|
124
|
+
success: Optional[bool] = None,
|
|
125
|
+
project_path: Optional[str] = None,
|
|
126
|
+
project_name: Optional[str] = None,
|
|
127
|
+
project_type: Optional[str] = None,
|
|
128
|
+
tech_stack: Optional[List[str]] = None,
|
|
129
|
+
agent_type: Optional[str] = None,
|
|
130
|
+
) -> str:
|
|
131
|
+
"""Store a memory with semantic embedding.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
content: Content to remember
|
|
135
|
+
memory_type: Type: session, decision, code, chunk, error, preference
|
|
136
|
+
tags: Classification tags
|
|
137
|
+
importance: 1-10 importance scale (default 5)
|
|
138
|
+
outcome: What happened
|
|
139
|
+
success: Did it work?
|
|
140
|
+
project_path: Project path
|
|
141
|
+
project_name: Project name
|
|
142
|
+
project_type: Project type (wordpress, react, etc.)
|
|
143
|
+
tech_stack: Technologies used
|
|
144
|
+
agent_type: Agent used (Explore, Plan, etc.)
|
|
145
|
+
"""
|
|
146
|
+
app = _get_app(ctx)
|
|
147
|
+
result = await store_memory(
|
|
148
|
+
db=app.db,
|
|
149
|
+
embeddings=app.embeddings,
|
|
150
|
+
content=content,
|
|
151
|
+
memory_type=memory_type,
|
|
152
|
+
tags=tags,
|
|
153
|
+
importance=importance,
|
|
154
|
+
outcome=outcome,
|
|
155
|
+
success=success,
|
|
156
|
+
project_path=project_path,
|
|
157
|
+
project_name=project_name,
|
|
158
|
+
project_type=project_type,
|
|
159
|
+
tech_stack=tech_stack,
|
|
160
|
+
agent_type=agent_type,
|
|
161
|
+
)
|
|
162
|
+
return json.dumps(result, default=str)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@mcp_server.tool()
|
|
166
|
+
async def memory_search(
|
|
167
|
+
ctx: Context,
|
|
168
|
+
query: str,
|
|
169
|
+
limit: int = 10,
|
|
170
|
+
memory_type: Optional[str] = None,
|
|
171
|
+
project_path: Optional[str] = None,
|
|
172
|
+
success_only: bool = False,
|
|
173
|
+
threshold: float = 0.5,
|
|
174
|
+
) -> str:
|
|
175
|
+
"""Search memories using natural language. Returns similar content ranked by relevance.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
query: Search query
|
|
179
|
+
limit: Max results (default 10)
|
|
180
|
+
memory_type: Filter: session, decision, code, chunk, error, preference
|
|
181
|
+
project_path: Filter by project
|
|
182
|
+
success_only: Only return successful memories
|
|
183
|
+
threshold: Minimum similarity 0-1 (default 0.5)
|
|
184
|
+
"""
|
|
185
|
+
app = _get_app(ctx)
|
|
186
|
+
result = await semantic_search(
|
|
187
|
+
db=app.db,
|
|
188
|
+
embeddings=app.embeddings,
|
|
189
|
+
query=query,
|
|
190
|
+
limit=limit,
|
|
191
|
+
memory_type=memory_type,
|
|
192
|
+
project_path=project_path,
|
|
193
|
+
success_only=success_only,
|
|
194
|
+
threshold=threshold,
|
|
195
|
+
)
|
|
196
|
+
return json.dumps(result, default=str)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@mcp_server.tool()
|
|
200
|
+
async def memory_search_patterns(
|
|
201
|
+
ctx: Context,
|
|
202
|
+
query: str,
|
|
203
|
+
limit: int = 5,
|
|
204
|
+
problem_type: Optional[str] = None,
|
|
205
|
+
threshold: float = 0.5,
|
|
206
|
+
) -> str:
|
|
207
|
+
"""Search for reusable solution patterns, ranked by similarity and success rate.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
query: Problem description
|
|
211
|
+
limit: Max results (default 5)
|
|
212
|
+
problem_type: Filter: bug_fix, feature, refactor, config, performance
|
|
213
|
+
threshold: Minimum similarity 0-1 (default 0.5)
|
|
214
|
+
"""
|
|
215
|
+
app = _get_app(ctx)
|
|
216
|
+
result = await search_patterns(
|
|
217
|
+
db=app.db,
|
|
218
|
+
embeddings=app.embeddings,
|
|
219
|
+
query=query,
|
|
220
|
+
limit=limit,
|
|
221
|
+
problem_type=problem_type,
|
|
222
|
+
threshold=threshold,
|
|
223
|
+
)
|
|
224
|
+
return json.dumps(result, default=str)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
@mcp_server.tool()
|
|
228
|
+
async def memory_store_pattern(
|
|
229
|
+
ctx: Context,
|
|
230
|
+
name: str,
|
|
231
|
+
solution: str,
|
|
232
|
+
problem_type: Optional[str] = None,
|
|
233
|
+
tech_context: Optional[List[str]] = None,
|
|
234
|
+
) -> str:
|
|
235
|
+
"""Store a reusable solution pattern for future reference.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
name: Pattern name
|
|
239
|
+
solution: The solution
|
|
240
|
+
problem_type: Type: bug_fix, feature, refactor, config, performance
|
|
241
|
+
tech_context: Technologies this applies to
|
|
242
|
+
"""
|
|
243
|
+
app = _get_app(ctx)
|
|
244
|
+
result = await store_pattern(
|
|
245
|
+
db=app.db,
|
|
246
|
+
embeddings=app.embeddings,
|
|
247
|
+
name=name,
|
|
248
|
+
solution=solution,
|
|
249
|
+
problem_type=problem_type,
|
|
250
|
+
tech_context=tech_context,
|
|
251
|
+
)
|
|
252
|
+
return json.dumps(result, default=str)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@mcp_server.tool()
|
|
256
|
+
async def memory_store_project(
|
|
257
|
+
ctx: Context,
|
|
258
|
+
path: str,
|
|
259
|
+
name: Optional[str] = None,
|
|
260
|
+
project_type: Optional[str] = None,
|
|
261
|
+
tech_stack: Optional[List[str]] = None,
|
|
262
|
+
conventions: Optional[Dict[str, Any]] = None,
|
|
263
|
+
preferences: Optional[Dict[str, Any]] = None,
|
|
264
|
+
) -> str:
|
|
265
|
+
"""Store project info (tech stack, conventions, preferences).
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
path: Project path
|
|
269
|
+
name: Project name
|
|
270
|
+
project_type: Project type
|
|
271
|
+
tech_stack: Technologies used
|
|
272
|
+
conventions: Coding conventions dict
|
|
273
|
+
preferences: User preferences dict
|
|
274
|
+
"""
|
|
275
|
+
app = _get_app(ctx)
|
|
276
|
+
result = await store_project(
|
|
277
|
+
db=app.db,
|
|
278
|
+
path=path,
|
|
279
|
+
name=name,
|
|
280
|
+
project_type=project_type,
|
|
281
|
+
tech_stack=tech_stack,
|
|
282
|
+
conventions=conventions,
|
|
283
|
+
preferences=preferences,
|
|
284
|
+
)
|
|
285
|
+
return json.dumps(result, default=str)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
@mcp_server.tool()
|
|
289
|
+
async def memory_get_project(
|
|
290
|
+
ctx: Context,
|
|
291
|
+
project_path: str,
|
|
292
|
+
) -> str:
|
|
293
|
+
"""Get stored info about a project.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
project_path: Project path to look up
|
|
297
|
+
"""
|
|
298
|
+
app = _get_app(ctx)
|
|
299
|
+
result = await app.db.get_project(project_path)
|
|
300
|
+
if result is None:
|
|
301
|
+
return json.dumps({"found": False, "project_path": project_path})
|
|
302
|
+
return json.dumps(result, default=str)
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
@mcp_server.tool()
|
|
306
|
+
async def memory_context(
|
|
307
|
+
ctx: Context,
|
|
308
|
+
project_path: Optional[str] = None,
|
|
309
|
+
query: Optional[str] = None,
|
|
310
|
+
include_decisions: bool = True,
|
|
311
|
+
include_errors: bool = True,
|
|
312
|
+
include_patterns: bool = True,
|
|
313
|
+
) -> str:
|
|
314
|
+
"""Load relevant memories for the current session. Call at session start to get
|
|
315
|
+
project info, recent decisions, patterns, and relevant past errors/solutions.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
project_path: Project path (optional, filters results)
|
|
319
|
+
query: Optional semantic query to find relevant memories
|
|
320
|
+
include_decisions: Include recent decisions (default true)
|
|
321
|
+
include_errors: Include recent errors (default true)
|
|
322
|
+
include_patterns: Include solution patterns (default true)
|
|
323
|
+
"""
|
|
324
|
+
app = _get_app(ctx)
|
|
325
|
+
result: Dict[str, Any] = {}
|
|
326
|
+
|
|
327
|
+
# Project context (includes decisions, code patterns, and query-relevant memories)
|
|
328
|
+
if project_path:
|
|
329
|
+
project_ctx = await get_project_context(
|
|
330
|
+
db=app.db,
|
|
331
|
+
embeddings=app.embeddings,
|
|
332
|
+
project_path=project_path,
|
|
333
|
+
query=query,
|
|
334
|
+
)
|
|
335
|
+
result["project"] = project_ctx
|
|
336
|
+
|
|
337
|
+
# Solution patterns (when a query is provided)
|
|
338
|
+
if include_patterns and query:
|
|
339
|
+
patterns = await search_patterns(
|
|
340
|
+
db=app.db,
|
|
341
|
+
embeddings=app.embeddings,
|
|
342
|
+
query=query,
|
|
343
|
+
limit=5,
|
|
344
|
+
)
|
|
345
|
+
result["patterns"] = patterns
|
|
346
|
+
|
|
347
|
+
# Recent errors (when requested and query provided)
|
|
348
|
+
if include_errors and query:
|
|
349
|
+
errors = await semantic_search(
|
|
350
|
+
db=app.db,
|
|
351
|
+
embeddings=app.embeddings,
|
|
352
|
+
query=query,
|
|
353
|
+
limit=5,
|
|
354
|
+
memory_type="error",
|
|
355
|
+
project_path=project_path,
|
|
356
|
+
)
|
|
357
|
+
result["recent_errors"] = errors
|
|
358
|
+
|
|
359
|
+
# Stats
|
|
360
|
+
try:
|
|
361
|
+
stats = await app.db.get_stats()
|
|
362
|
+
result["stats"] = stats
|
|
363
|
+
except Exception as e:
|
|
364
|
+
result["stats_error"] = str(e)
|
|
365
|
+
|
|
366
|
+
result["success"] = True
|
|
367
|
+
return json.dumps(result, default=str)
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
@mcp_server.tool()
|
|
371
|
+
async def memory_stats(ctx: Context) -> str:
|
|
372
|
+
"""Get memory statistics including total memories, database size, and breakdown by type."""
|
|
373
|
+
app = _get_app(ctx)
|
|
374
|
+
result = await app.db.get_stats()
|
|
375
|
+
return json.dumps(result, default=str)
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
@mcp_server.tool()
|
|
379
|
+
async def memory_dashboard(ctx: Context) -> str:
|
|
380
|
+
"""Open the Claude Memory real-time dashboard in the browser."""
|
|
381
|
+
import webbrowser
|
|
382
|
+
|
|
383
|
+
url = config.get_dashboard_url()
|
|
384
|
+
try:
|
|
385
|
+
webbrowser.open(url)
|
|
386
|
+
return json.dumps({
|
|
387
|
+
"success": True,
|
|
388
|
+
"url": url,
|
|
389
|
+
"message": f"Dashboard opened at {url}",
|
|
390
|
+
})
|
|
391
|
+
except Exception as e:
|
|
392
|
+
return json.dumps({
|
|
393
|
+
"success": False,
|
|
394
|
+
"url": url,
|
|
395
|
+
"error": str(e),
|
|
396
|
+
"message": f"Could not auto-open browser. Visit {url} manually.",
|
|
397
|
+
})
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
@mcp_server.tool()
|
|
401
|
+
async def memory_sync_native(
|
|
402
|
+
ctx: Context,
|
|
403
|
+
project_path: Optional[str] = None,
|
|
404
|
+
direction: str = "from_native",
|
|
405
|
+
) -> str:
|
|
406
|
+
"""Sync Claude's native MEMORY.md into the MCP vector DB.
|
|
407
|
+
|
|
408
|
+
One-way sync: ingests MEMORY.md content into the MCP vector DB
|
|
409
|
+
so it becomes searchable via semantic search. Claude Code owns
|
|
410
|
+
MEMORY.md exclusively.
|
|
411
|
+
|
|
412
|
+
Args:
|
|
413
|
+
project_path: Project path to sync. If omitted, syncs current project.
|
|
414
|
+
direction: Only 'from_native' is supported (MEMORY.md -> MCP DB)
|
|
415
|
+
"""
|
|
416
|
+
from services.native_memory_sync import sync_native_to_mcp
|
|
417
|
+
|
|
418
|
+
app = _get_app(ctx)
|
|
419
|
+
|
|
420
|
+
if not project_path:
|
|
421
|
+
return json.dumps({
|
|
422
|
+
"success": False,
|
|
423
|
+
"error": "project_path is required for native memory sync",
|
|
424
|
+
})
|
|
425
|
+
|
|
426
|
+
if direction != "from_native":
|
|
427
|
+
return json.dumps({
|
|
428
|
+
"success": False,
|
|
429
|
+
"error": f"Only direction='from_native' is supported. MCP no longer writes to MEMORY.md.",
|
|
430
|
+
})
|
|
431
|
+
|
|
432
|
+
try:
|
|
433
|
+
result = await sync_native_to_mcp(app.db, app.embeddings, project_path)
|
|
434
|
+
return json.dumps(result, default=str)
|
|
435
|
+
except Exception as e:
|
|
436
|
+
logger.error(f"memory_sync_native failed: {e}")
|
|
437
|
+
return json.dumps({"success": False, "error": str(e)})
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
# ── Cross-Session Awareness Tools ────────────────────────────────────────
|
|
441
|
+
|
|
442
|
+
@mcp_server.tool()
|
|
443
|
+
async def memory_active_sessions(
|
|
444
|
+
ctx: Context,
|
|
445
|
+
project_path: str,
|
|
446
|
+
exclude_session_id: Optional[str] = None,
|
|
447
|
+
) -> str:
|
|
448
|
+
"""List active parallel Claude Code sessions for a project.
|
|
449
|
+
|
|
450
|
+
Use this to see what other sessions are currently working on,
|
|
451
|
+
what files they've modified, and detect potential conflicts.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
project_path: Project path to check
|
|
455
|
+
exclude_session_id: Optional session ID to exclude from results
|
|
456
|
+
"""
|
|
457
|
+
app = _get_app(ctx)
|
|
458
|
+
sessions = await app.db.get_active_sessions(project_path, exclude_session_id)
|
|
459
|
+
return json.dumps({
|
|
460
|
+
"success": True,
|
|
461
|
+
"sessions": sessions,
|
|
462
|
+
"count": len(sessions),
|
|
463
|
+
}, default=str)
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
@mcp_server.tool()
|
|
467
|
+
async def memory_session_catchup(
|
|
468
|
+
ctx: Context,
|
|
469
|
+
project_path: str,
|
|
470
|
+
session_id: Optional[str] = None,
|
|
471
|
+
since: Optional[str] = None,
|
|
472
|
+
) -> str:
|
|
473
|
+
"""Get a catch-up summary of what other sessions did.
|
|
474
|
+
|
|
475
|
+
Returns recent cross-session activity grouped by session,
|
|
476
|
+
including file changes, decisions, and goals.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
project_path: Project path to check
|
|
480
|
+
session_id: Current session ID (to exclude own events)
|
|
481
|
+
since: ISO timestamp to get events after (optional)
|
|
482
|
+
"""
|
|
483
|
+
app = _get_app(ctx)
|
|
484
|
+
from services.session_awareness import get_session_awareness
|
|
485
|
+
awareness = get_session_awareness(app.db)
|
|
486
|
+
result = await awareness.get_catchup(
|
|
487
|
+
session_id=session_id or "",
|
|
488
|
+
project_path=project_path,
|
|
489
|
+
since=since,
|
|
490
|
+
)
|
|
491
|
+
return json.dumps(result, default=str)
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
# ── Entry Point ─────────────────────────────────────────────────────────
|
|
495
|
+
|
|
496
|
+
if __name__ == "__main__":
|
|
497
|
+
mcp_server.run(transport="stdio")
|