claude-memory-agent 2.2.4 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +2 -2
- package/hooks/auto_capture.py +58 -1
- package/hooks/grounding-hook-v2.py +129 -0
- package/hooks/grounding-hook.py +95 -0
- package/hooks/session_end_hook.py +35 -0
- package/hooks/session_start.py +56 -0
- package/main.py +165 -0
- package/mcp_proxy.py +307 -0
- package/mcp_server_full.py +497 -0
- package/package.json +1 -1
- package/services/native_memory_sync.py +66 -310
package/mcp_proxy.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
"""Slim MCP proxy for Claude Memory.
|
|
2
|
+
|
|
3
|
+
Thin adapter that exposes 3 unified tools over stdio JSON-RPC,
|
|
4
|
+
forwarding all work to the HTTP backend (main.py on port 8102).
|
|
5
|
+
|
|
6
|
+
NO embedding model loaded. NO database connection. Just HTTP calls.
|
|
7
|
+
|
|
8
|
+
Tools:
|
|
9
|
+
memory_ask - Unified search (replaces memory_search, memory_search_patterns,
|
|
10
|
+
memory_context, memory_get_project, memory_active_sessions,
|
|
11
|
+
memory_session_catchup)
|
|
12
|
+
memory_store - Unified store (replaces memory_store, memory_store_pattern,
|
|
13
|
+
memory_store_project)
|
|
14
|
+
memory_status - Quick stats + project info (replaces memory_stats, memory_dashboard)
|
|
15
|
+
|
|
16
|
+
Usage:
|
|
17
|
+
python mcp_proxy.py # stdio mode (default)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
# ── Suppress stdout noise before ANY library imports ─────────────────
|
|
21
|
+
import os
|
|
22
|
+
import sys
|
|
23
|
+
|
|
24
|
+
os.environ.setdefault("TOKENIZERS_PARALLELISM", "false")
|
|
25
|
+
os.environ.setdefault("TRANSFORMERS_NO_ADVISORY_WARNINGS", "1")
|
|
26
|
+
os.environ.setdefault("TQDM_DISABLE", "1")
|
|
27
|
+
|
|
28
|
+
import logging
|
|
29
|
+
|
|
30
|
+
logging.basicConfig(
|
|
31
|
+
stream=sys.stderr,
|
|
32
|
+
level=logging.INFO,
|
|
33
|
+
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
|
34
|
+
)
|
|
35
|
+
logger = logging.getLogger("mcp-proxy")
|
|
36
|
+
|
|
37
|
+
# ── Imports ──────────────────────────────────────────────────────────
|
|
38
|
+
|
|
39
|
+
import json
|
|
40
|
+
import asyncio
|
|
41
|
+
from typing import Optional, List, Dict, Any
|
|
42
|
+
|
|
43
|
+
import httpx
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
from mcp.server.fastmcp import FastMCP
|
|
47
|
+
except ImportError:
|
|
48
|
+
raise ImportError(
|
|
49
|
+
"MCP SDK not found. Install with: pip install 'mcp>=1.0.0'"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# ── Config ───────────────────────────────────────────────────────────
|
|
53
|
+
|
|
54
|
+
BACKEND_URL = os.environ.get("MEMORY_AGENT_URL", "http://localhost:8102")
|
|
55
|
+
TIMEOUT = 5.0 # seconds per HTTP call
|
|
56
|
+
|
|
57
|
+
# ── HTTP helpers ─────────────────────────────────────────────────────
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
async def _rest_get(path: str, params: dict = None) -> Optional[dict]:
|
|
61
|
+
"""GET request to the backend REST API."""
|
|
62
|
+
try:
|
|
63
|
+
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
|
64
|
+
resp = await client.get(f"{BACKEND_URL}{path}", params=params)
|
|
65
|
+
if resp.status_code == 200:
|
|
66
|
+
return resp.json()
|
|
67
|
+
except Exception as e:
|
|
68
|
+
logger.debug(f"REST GET {path} failed: {e}")
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
async def _rest_post(path: str, body: dict = None) -> Optional[dict]:
|
|
73
|
+
"""POST request to the backend REST API."""
|
|
74
|
+
try:
|
|
75
|
+
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
|
76
|
+
resp = await client.post(f"{BACKEND_URL}{path}", json=body or {})
|
|
77
|
+
if resp.status_code == 200:
|
|
78
|
+
return resp.json()
|
|
79
|
+
except Exception as e:
|
|
80
|
+
logger.debug(f"REST POST {path} failed: {e}")
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
async def _a2a_skill(skill_id: str, params: dict) -> Optional[dict]:
|
|
85
|
+
"""Call a backend skill via the A2A JSON-RPC protocol."""
|
|
86
|
+
payload = {
|
|
87
|
+
"jsonrpc": "2.0",
|
|
88
|
+
"id": f"proxy-{skill_id}",
|
|
89
|
+
"method": "tasks/send",
|
|
90
|
+
"params": {
|
|
91
|
+
"message": {"parts": [{"type": "text", "text": ""}]},
|
|
92
|
+
"metadata": {"skill_id": skill_id, "params": params},
|
|
93
|
+
},
|
|
94
|
+
}
|
|
95
|
+
try:
|
|
96
|
+
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
|
97
|
+
resp = await client.post(f"{BACKEND_URL}/a2a", json=payload)
|
|
98
|
+
if resp.status_code == 200:
|
|
99
|
+
data = resp.json()
|
|
100
|
+
# Extract artifact text from A2A response
|
|
101
|
+
try:
|
|
102
|
+
text = data["result"]["artifacts"][0]["parts"][0]["text"]
|
|
103
|
+
return json.loads(text)
|
|
104
|
+
except (KeyError, IndexError, json.JSONDecodeError):
|
|
105
|
+
return data.get("result")
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.debug(f"A2A skill {skill_id} failed: {e}")
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
# ── MCP Server ───────────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
mcp_server = FastMCP("claude-memory")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@mcp_server.tool()
|
|
117
|
+
async def memory_ask(
|
|
118
|
+
query: str,
|
|
119
|
+
project_path: Optional[str] = None,
|
|
120
|
+
type_hint: Optional[str] = None,
|
|
121
|
+
limit: int = 10,
|
|
122
|
+
) -> str:
|
|
123
|
+
"""Search memories using natural language. Returns similar content ranked by relevance.
|
|
124
|
+
|
|
125
|
+
Unified search that combines semantic search and pattern matching.
|
|
126
|
+
Use type_hint to focus: "pattern" for solutions, "error" for past bugs,
|
|
127
|
+
"session" for session context, "decision" for architectural choices.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
query: Search query (natural language)
|
|
131
|
+
project_path: Filter by project path
|
|
132
|
+
type_hint: Focus search: pattern, error, session, decision, project, context
|
|
133
|
+
limit: Max results (default 10)
|
|
134
|
+
"""
|
|
135
|
+
tasks = []
|
|
136
|
+
results: Dict[str, Any] = {"query": query}
|
|
137
|
+
|
|
138
|
+
# -- Semantic search (always) --
|
|
139
|
+
search_params = {"query": query, "limit": limit}
|
|
140
|
+
if project_path:
|
|
141
|
+
search_params["project_path"] = project_path
|
|
142
|
+
if type_hint in ("error", "decision", "session", "code", "preference"):
|
|
143
|
+
search_params["memory_type"] = type_hint
|
|
144
|
+
|
|
145
|
+
tasks.append(("search", _rest_get("/api/search", search_params)))
|
|
146
|
+
|
|
147
|
+
# -- Pattern search (always, lightweight) --
|
|
148
|
+
pattern_params: dict = {"query": query, "limit": 5}
|
|
149
|
+
if type_hint == "pattern":
|
|
150
|
+
pattern_params["limit"] = limit
|
|
151
|
+
tasks.append(("patterns", _a2a_skill("search_patterns", pattern_params)))
|
|
152
|
+
|
|
153
|
+
# -- Project context (if type_hint is project/context, fetch project metadata) --
|
|
154
|
+
if type_hint in ("project", "context") and project_path:
|
|
155
|
+
tasks.append(("project", _a2a_skill("get_project_context", {
|
|
156
|
+
"project_path": project_path,
|
|
157
|
+
"limit": 5,
|
|
158
|
+
})))
|
|
159
|
+
|
|
160
|
+
# -- Session context (if type_hint is session) --
|
|
161
|
+
if type_hint == "session" and project_path:
|
|
162
|
+
tasks.append(("sessions", _rest_get("/api/sessions/active", {
|
|
163
|
+
"project_path": project_path,
|
|
164
|
+
})))
|
|
165
|
+
|
|
166
|
+
# Run all in parallel
|
|
167
|
+
gathered = await asyncio.gather(
|
|
168
|
+
*[t[1] for t in tasks], return_exceptions=True
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
for (label, _), result in zip(tasks, gathered):
|
|
172
|
+
if isinstance(result, Exception) or result is None:
|
|
173
|
+
continue
|
|
174
|
+
if label == "search" and result.get("results"):
|
|
175
|
+
results["memories"] = result["results"][:limit]
|
|
176
|
+
elif label == "patterns" and result.get("patterns"):
|
|
177
|
+
results["patterns"] = result["patterns"][:5]
|
|
178
|
+
elif label == "project" and result.get("project"):
|
|
179
|
+
results["project_context"] = result["project"]
|
|
180
|
+
elif label == "sessions":
|
|
181
|
+
results["active_sessions"] = result.get("sessions", [])
|
|
182
|
+
|
|
183
|
+
results["success"] = bool(results.get("memories") or results.get("patterns"))
|
|
184
|
+
return json.dumps(results, default=str)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@mcp_server.tool()
|
|
188
|
+
async def memory_store(
|
|
189
|
+
content: str,
|
|
190
|
+
memory_type: str = "chunk",
|
|
191
|
+
importance: int = 5,
|
|
192
|
+
tags: Optional[List[str]] = None,
|
|
193
|
+
project_path: Optional[str] = None,
|
|
194
|
+
outcome: Optional[str] = None,
|
|
195
|
+
success: Optional[bool] = None,
|
|
196
|
+
# Pattern-specific fields
|
|
197
|
+
pattern_name: Optional[str] = None,
|
|
198
|
+
problem_type: Optional[str] = None,
|
|
199
|
+
tech_context: Optional[List[str]] = None,
|
|
200
|
+
# Project-specific fields
|
|
201
|
+
project_name: Optional[str] = None,
|
|
202
|
+
project_type: Optional[str] = None,
|
|
203
|
+
tech_stack: Optional[List[str]] = None,
|
|
204
|
+
conventions: Optional[Dict[str, Any]] = None,
|
|
205
|
+
preferences: Optional[Dict[str, Any]] = None,
|
|
206
|
+
) -> str:
|
|
207
|
+
"""Store a memory, pattern, or project info. Routes automatically by type.
|
|
208
|
+
|
|
209
|
+
For memories: set content, memory_type, importance, tags.
|
|
210
|
+
For patterns: set content as solution, pattern_name, problem_type.
|
|
211
|
+
For projects: set project_path, project_name, tech_stack, conventions.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
content: Content to remember (or solution for patterns)
|
|
215
|
+
memory_type: Type: session, decision, code, chunk, error, preference
|
|
216
|
+
importance: 1-10 importance scale (default 5)
|
|
217
|
+
tags: Classification tags
|
|
218
|
+
project_path: Project path
|
|
219
|
+
outcome: What happened
|
|
220
|
+
success: Did it work?
|
|
221
|
+
pattern_name: Pattern name (triggers pattern storage)
|
|
222
|
+
problem_type: Pattern type: bug_fix, feature, refactor, config, performance
|
|
223
|
+
tech_context: Technologies for pattern
|
|
224
|
+
project_name: Project name (triggers project storage)
|
|
225
|
+
project_type: Project type (wordpress, react, etc.)
|
|
226
|
+
tech_stack: Technologies used
|
|
227
|
+
conventions: Coding conventions dict
|
|
228
|
+
preferences: User preferences dict
|
|
229
|
+
"""
|
|
230
|
+
# Route to pattern storage
|
|
231
|
+
if pattern_name:
|
|
232
|
+
result = await _a2a_skill("store_pattern", {
|
|
233
|
+
"name": pattern_name,
|
|
234
|
+
"solution": content,
|
|
235
|
+
"problem_type": problem_type,
|
|
236
|
+
"tech_context": tech_context,
|
|
237
|
+
})
|
|
238
|
+
return json.dumps(result or {"error": "Memory agent unavailable"}, default=str)
|
|
239
|
+
|
|
240
|
+
# Route to project storage
|
|
241
|
+
if project_type or tech_stack or conventions or preferences:
|
|
242
|
+
result = await _a2a_skill("store_project", {
|
|
243
|
+
"path": project_path or "",
|
|
244
|
+
"name": project_name,
|
|
245
|
+
"project_type": project_type,
|
|
246
|
+
"tech_stack": tech_stack,
|
|
247
|
+
"conventions": conventions,
|
|
248
|
+
"preferences": preferences,
|
|
249
|
+
})
|
|
250
|
+
return json.dumps(result or {"error": "Memory agent unavailable"}, default=str)
|
|
251
|
+
|
|
252
|
+
# Default: store memory
|
|
253
|
+
result = await _a2a_skill("store_memory", {
|
|
254
|
+
"content": content,
|
|
255
|
+
"memory_type": memory_type,
|
|
256
|
+
"importance": importance,
|
|
257
|
+
"tags": tags,
|
|
258
|
+
"project_path": project_path,
|
|
259
|
+
"outcome": outcome,
|
|
260
|
+
"success": success,
|
|
261
|
+
})
|
|
262
|
+
return json.dumps(result or {"error": "Memory agent unavailable"}, default=str)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
@mcp_server.tool()
|
|
266
|
+
async def memory_status(
|
|
267
|
+
project_path: Optional[str] = None,
|
|
268
|
+
) -> str:
|
|
269
|
+
"""Get memory system status: stats, project info, and health check.
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
project_path: Optional project path for project-specific info
|
|
273
|
+
"""
|
|
274
|
+
tasks = [("stats", _rest_get("/api/stats"))]
|
|
275
|
+
|
|
276
|
+
if project_path:
|
|
277
|
+
tasks.append(("project", _a2a_skill("get_project_context", {
|
|
278
|
+
"project_path": project_path,
|
|
279
|
+
"limit": 3,
|
|
280
|
+
})))
|
|
281
|
+
|
|
282
|
+
gathered = await asyncio.gather(
|
|
283
|
+
*[t[1] for t in tasks], return_exceptions=True
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
result: Dict[str, Any] = {"success": True}
|
|
287
|
+
|
|
288
|
+
for (label, _), data in zip(tasks, gathered):
|
|
289
|
+
if isinstance(data, Exception) or data is None:
|
|
290
|
+
continue
|
|
291
|
+
if label == "stats":
|
|
292
|
+
result["stats"] = data
|
|
293
|
+
elif label == "project":
|
|
294
|
+
result["project"] = data
|
|
295
|
+
|
|
296
|
+
if not result.get("stats"):
|
|
297
|
+
result["success"] = False
|
|
298
|
+
result["error"] = "Memory agent unavailable - is main.py running on port 8102?"
|
|
299
|
+
|
|
300
|
+
return json.dumps(result, default=str)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
# ── Entry Point ──────────────────────────────────────────────────────
|
|
304
|
+
|
|
305
|
+
if __name__ == "__main__":
|
|
306
|
+
logger.info(f"Starting slim MCP proxy -> {BACKEND_URL}")
|
|
307
|
+
mcp_server.run(transport="stdio")
|