claude-memory-agent 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/.env.example +107 -0
  2. package/README.md +200 -0
  3. package/agent_card.py +512 -0
  4. package/bin/cli.js +181 -0
  5. package/bin/postinstall.js +216 -0
  6. package/config.py +104 -0
  7. package/dashboard.html +2689 -0
  8. package/hooks/README.md +196 -0
  9. package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
  10. package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
  11. package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
  12. package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
  13. package/hooks/auto-detect-response.py +348 -0
  14. package/hooks/auto_capture.py +255 -0
  15. package/hooks/detect-correction.py +173 -0
  16. package/hooks/grounding-hook.py +348 -0
  17. package/hooks/log-tool-use.py +234 -0
  18. package/hooks/log-user-request.py +208 -0
  19. package/hooks/pre-tool-decision.py +218 -0
  20. package/hooks/problem-detector.py +343 -0
  21. package/hooks/session_end.py +192 -0
  22. package/hooks/session_start.py +227 -0
  23. package/install.py +887 -0
  24. package/main.py +2859 -0
  25. package/manager.py +997 -0
  26. package/package.json +55 -0
  27. package/requirements.txt +8 -0
  28. package/run_server.py +136 -0
  29. package/services/__init__.py +50 -0
  30. package/services/__pycache__/__init__.cpython-312.pyc +0 -0
  31. package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
  32. package/services/__pycache__/auth.cpython-312.pyc +0 -0
  33. package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
  34. package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
  35. package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
  36. package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
  37. package/services/__pycache__/confidence.cpython-312.pyc +0 -0
  38. package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
  39. package/services/__pycache__/database.cpython-312.pyc +0 -0
  40. package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
  41. package/services/__pycache__/insights.cpython-312.pyc +0 -0
  42. package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
  43. package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
  44. package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
  45. package/services/__pycache__/timeline.cpython-312.pyc +0 -0
  46. package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
  47. package/services/__pycache__/websocket.cpython-312.pyc +0 -0
  48. package/services/agent_registry.py +753 -0
  49. package/services/auth.py +331 -0
  50. package/services/auto_inject.py +250 -0
  51. package/services/claude_md_sync.py +275 -0
  52. package/services/cleanup.py +667 -0
  53. package/services/compaction_flush.py +447 -0
  54. package/services/confidence.py +301 -0
  55. package/services/daily_log.py +333 -0
  56. package/services/database.py +2485 -0
  57. package/services/embeddings.py +358 -0
  58. package/services/insights.py +632 -0
  59. package/services/llm_analyzer.py +595 -0
  60. package/services/memory_md_sync.py +409 -0
  61. package/services/retry_queue.py +453 -0
  62. package/services/timeline.py +579 -0
  63. package/services/vector_index.py +398 -0
  64. package/services/websocket.py +257 -0
  65. package/skills/__init__.py +6 -0
  66. package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
  67. package/skills/__pycache__/admin.cpython-312.pyc +0 -0
  68. package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
  69. package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
  70. package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
  71. package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
  72. package/skills/__pycache__/insights.cpython-312.pyc +0 -0
  73. package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
  74. package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
  75. package/skills/__pycache__/search.cpython-312.pyc +0 -0
  76. package/skills/__pycache__/state.cpython-312.pyc +0 -0
  77. package/skills/__pycache__/store.cpython-312.pyc +0 -0
  78. package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
  79. package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
  80. package/skills/__pycache__/verification.cpython-312.pyc +0 -0
  81. package/skills/admin.py +469 -0
  82. package/skills/checkpoint.py +198 -0
  83. package/skills/claude_md.py +363 -0
  84. package/skills/cleanup.py +241 -0
  85. package/skills/grounding.py +801 -0
  86. package/skills/insights.py +231 -0
  87. package/skills/natural_language.py +277 -0
  88. package/skills/retrieve.py +67 -0
  89. package/skills/search.py +213 -0
  90. package/skills/state.py +182 -0
  91. package/skills/store.py +179 -0
  92. package/skills/summarize.py +588 -0
  93. package/skills/timeline.py +387 -0
  94. package/skills/verification.py +391 -0
  95. package/start_daemon.py +155 -0
  96. package/test_automation.py +221 -0
  97. package/test_complete.py +338 -0
  98. package/test_full.py +322 -0
  99. package/update_system.py +817 -0
  100. package/verify_db.py +134 -0
@@ -0,0 +1,398 @@
1
+ """FAISS-based vector indexing for fast similarity search.
2
+
3
+ Provides O(log n) search instead of O(n) for large memory collections.
4
+ Falls back to numpy cosine similarity if FAISS is unavailable.
5
+ """
6
+ import os
7
+ import json
8
+ import time
9
+ import numpy as np
10
+ from pathlib import Path
11
+ from typing import List, Dict, Any, Optional, Tuple
12
+ from threading import Lock
13
+ from dotenv import load_dotenv
14
+
15
+ load_dotenv()
16
+
17
+ # Try to import FAISS
18
+ FAISS_AVAILABLE = False
19
+ try:
20
+ import faiss
21
+ FAISS_AVAILABLE = True
22
+ except ImportError:
23
+ faiss = None
24
+
25
+ # Index configuration
26
+ INDEX_DIR = os.getenv("INDEX_DIR", str(Path(__file__).parent.parent / "indexes"))
27
+ EMBEDDING_DIM = int(os.getenv("EMBEDDING_DIM", "768"))
28
+ INDEX_TYPE = os.getenv("INDEX_TYPE", "flat") # flat, ivf, hnsw
29
+
30
+
31
+ class VectorIndex:
32
+ """FAISS-based vector index with persistence and automatic rebuilding.
33
+
34
+ Supports three index types:
35
+ - flat: Exact search (IndexFlatIP) - best for < 10K vectors
36
+ - ivf: Inverted file index (IndexIVFFlat) - good for 10K-1M vectors
37
+ - hnsw: Hierarchical NSW (IndexHNSWFlat) - best for 1M+ vectors
38
+
39
+ Falls back to numpy-based search if FAISS is unavailable.
40
+ """
41
+
42
+ def __init__(
43
+ self,
44
+ name: str,
45
+ dimension: int = EMBEDDING_DIM,
46
+ index_type: str = INDEX_TYPE,
47
+ index_dir: str = INDEX_DIR
48
+ ):
49
+ self.name = name
50
+ self.dimension = dimension
51
+ self.index_type = index_type
52
+ self.index_dir = Path(index_dir)
53
+ self.index_dir.mkdir(parents=True, exist_ok=True)
54
+
55
+ self.index_path = self.index_dir / f"{name}.index"
56
+ self.id_map_path = self.index_dir / f"{name}_ids.json"
57
+
58
+ # FAISS index
59
+ self._index: Optional[Any] = None
60
+ self._id_map: List[int] = [] # Maps FAISS internal ID to database ID
61
+ self._reverse_map: Dict[int, int] = {} # Maps database ID to FAISS internal ID
62
+
63
+ # Thread safety
64
+ self._lock = Lock()
65
+
66
+ # Stats
67
+ self._last_rebuild: Optional[float] = None
68
+ self._search_count = 0
69
+ self._add_count = 0
70
+
71
+ # Fallback storage for numpy-based search
72
+ self._fallback_vectors: List[Tuple[int, np.ndarray]] = []
73
+
74
+ # Initialize
75
+ self._initialize_index()
76
+
77
+ def _initialize_index(self):
78
+ """Initialize or load the FAISS index."""
79
+ if not FAISS_AVAILABLE:
80
+ return
81
+
82
+ # Try to load existing index
83
+ if self.index_path.exists() and self.id_map_path.exists():
84
+ try:
85
+ self._index = faiss.read_index(str(self.index_path))
86
+ with open(self.id_map_path, 'r') as f:
87
+ self._id_map = json.load(f)
88
+ self._reverse_map = {db_id: idx for idx, db_id in enumerate(self._id_map)}
89
+ return
90
+ except Exception:
91
+ pass # Fall through to create new index
92
+
93
+ # Create new index based on type
94
+ self._create_index()
95
+
96
+ def _create_index(self):
97
+ """Create a new FAISS index."""
98
+ if not FAISS_AVAILABLE:
99
+ return
100
+
101
+ if self.index_type == "flat":
102
+ # Exact search using inner product (for normalized vectors = cosine similarity)
103
+ self._index = faiss.IndexFlatIP(self.dimension)
104
+ elif self.index_type == "ivf":
105
+ # IVF index for larger collections
106
+ # Start with flat, train later when we have enough vectors
107
+ self._index = faiss.IndexFlatIP(self.dimension)
108
+ elif self.index_type == "hnsw":
109
+ # HNSW for very large collections
110
+ self._index = faiss.IndexHNSWFlat(self.dimension, 32) # 32 neighbors
111
+ self._index.hnsw.efConstruction = 200
112
+ self._index.hnsw.efSearch = 128
113
+ else:
114
+ # Default to flat
115
+ self._index = faiss.IndexFlatIP(self.dimension)
116
+
117
+ self._id_map = []
118
+ self._reverse_map = {}
119
+
120
+ def add(self, db_id: int, embedding: List[float]) -> bool:
121
+ """Add a vector to the index.
122
+
123
+ Args:
124
+ db_id: Database ID for this vector
125
+ embedding: The embedding vector (will be L2 normalized)
126
+
127
+ Returns:
128
+ True if added successfully
129
+ """
130
+ with self._lock:
131
+ # Normalize vector for cosine similarity via inner product
132
+ vector = np.array([embedding], dtype=np.float32)
133
+ faiss.normalize_L2(vector) if FAISS_AVAILABLE else None
134
+
135
+ if FAISS_AVAILABLE and self._index is not None:
136
+ # Check if ID already exists
137
+ if db_id in self._reverse_map:
138
+ # Update existing - remove old and add new
139
+ # Note: FAISS doesn't support in-place updates, so we mark for rebuild
140
+ old_idx = self._reverse_map[db_id]
141
+ # We can't remove from flat index, so just add and track duplicate
142
+ # The search will return the latest entry
143
+
144
+ self._index.add(vector)
145
+ internal_id = len(self._id_map)
146
+ self._id_map.append(db_id)
147
+ self._reverse_map[db_id] = internal_id
148
+ self._add_count += 1
149
+ return True
150
+ else:
151
+ # Fallback: store in memory
152
+ self._fallback_vectors.append((db_id, vector[0]))
153
+ self._add_count += 1
154
+ return True
155
+
156
+ def add_batch(self, items: List[Tuple[int, List[float]]]) -> int:
157
+ """Add multiple vectors to the index.
158
+
159
+ Args:
160
+ items: List of (db_id, embedding) tuples
161
+
162
+ Returns:
163
+ Number of vectors added
164
+ """
165
+ if not items:
166
+ return 0
167
+
168
+ with self._lock:
169
+ vectors = np.array([item[1] for item in items], dtype=np.float32)
170
+ if FAISS_AVAILABLE:
171
+ faiss.normalize_L2(vectors)
172
+
173
+ if FAISS_AVAILABLE and self._index is not None:
174
+ self._index.add(vectors)
175
+ for db_id, _ in items:
176
+ internal_id = len(self._id_map)
177
+ self._id_map.append(db_id)
178
+ self._reverse_map[db_id] = internal_id
179
+ self._add_count += len(items)
180
+ return len(items)
181
+ else:
182
+ for i, (db_id, _) in enumerate(items):
183
+ self._fallback_vectors.append((db_id, vectors[i]))
184
+ self._add_count += len(items)
185
+ return len(items)
186
+
187
+ def search(
188
+ self,
189
+ query_embedding: List[float],
190
+ k: int = 10,
191
+ threshold: float = 0.0
192
+ ) -> List[Tuple[int, float]]:
193
+ """Search for similar vectors.
194
+
195
+ Args:
196
+ query_embedding: Query vector
197
+ k: Number of results to return
198
+ threshold: Minimum similarity threshold (0-1 for cosine)
199
+
200
+ Returns:
201
+ List of (db_id, similarity) tuples, sorted by similarity descending
202
+ """
203
+ with self._lock:
204
+ self._search_count += 1
205
+
206
+ # Normalize query vector
207
+ query = np.array([query_embedding], dtype=np.float32)
208
+ if FAISS_AVAILABLE:
209
+ faiss.normalize_L2(query)
210
+
211
+ if FAISS_AVAILABLE and self._index is not None and self._index.ntotal > 0:
212
+ # FAISS search
213
+ distances, indices = self._index.search(query, min(k * 2, self._index.ntotal))
214
+
215
+ results = []
216
+ seen_ids = set()
217
+ for dist, idx in zip(distances[0], indices[0]):
218
+ if idx < 0 or idx >= len(self._id_map):
219
+ continue
220
+ db_id = self._id_map[idx]
221
+ if db_id in seen_ids:
222
+ continue # Skip duplicates (from updates)
223
+ seen_ids.add(db_id)
224
+
225
+ # Inner product of normalized vectors = cosine similarity
226
+ similarity = float(dist)
227
+ if similarity >= threshold:
228
+ results.append((db_id, similarity))
229
+
230
+ if len(results) >= k:
231
+ break
232
+
233
+ return results
234
+
235
+ else:
236
+ # Fallback to numpy
237
+ return self._numpy_search(query[0], k, threshold)
238
+
239
+ def _numpy_search(
240
+ self,
241
+ query: np.ndarray,
242
+ k: int,
243
+ threshold: float
244
+ ) -> List[Tuple[int, float]]:
245
+ """Fallback numpy-based cosine similarity search."""
246
+ if not self._fallback_vectors:
247
+ return []
248
+
249
+ results = []
250
+ query_norm = np.linalg.norm(query)
251
+ if query_norm == 0:
252
+ return []
253
+
254
+ for db_id, vec in self._fallback_vectors:
255
+ vec_norm = np.linalg.norm(vec)
256
+ if vec_norm == 0:
257
+ continue
258
+ similarity = float(np.dot(query, vec) / (query_norm * vec_norm))
259
+ if similarity >= threshold:
260
+ results.append((db_id, similarity))
261
+
262
+ results.sort(key=lambda x: x[1], reverse=True)
263
+ return results[:k]
264
+
265
+ def save(self) -> bool:
266
+ """Persist the index to disk."""
267
+ if not FAISS_AVAILABLE or self._index is None:
268
+ return False
269
+
270
+ with self._lock:
271
+ try:
272
+ faiss.write_index(self._index, str(self.index_path))
273
+ with open(self.id_map_path, 'w') as f:
274
+ json.dump(self._id_map, f)
275
+ return True
276
+ except Exception:
277
+ return False
278
+
279
+ def load(self) -> bool:
280
+ """Load the index from disk."""
281
+ if not FAISS_AVAILABLE:
282
+ return False
283
+
284
+ with self._lock:
285
+ try:
286
+ if self.index_path.exists() and self.id_map_path.exists():
287
+ self._index = faiss.read_index(str(self.index_path))
288
+ with open(self.id_map_path, 'r') as f:
289
+ self._id_map = json.load(f)
290
+ self._reverse_map = {db_id: idx for idx, db_id in enumerate(self._id_map)}
291
+ return True
292
+ except Exception:
293
+ pass
294
+ return False
295
+
296
+ def rebuild(self, items: List[Tuple[int, List[float]]]) -> int:
297
+ """Rebuild the entire index from scratch.
298
+
299
+ Args:
300
+ items: List of (db_id, embedding) tuples
301
+
302
+ Returns:
303
+ Number of vectors indexed
304
+ """
305
+ with self._lock:
306
+ self._create_index()
307
+ self._fallback_vectors = []
308
+ self._last_rebuild = time.time()
309
+
310
+ return self.add_batch(items)
311
+
312
+ def clear(self):
313
+ """Clear the index."""
314
+ with self._lock:
315
+ self._create_index()
316
+ self._fallback_vectors = []
317
+
318
+ def remove(self, db_id: int) -> bool:
319
+ """Mark a vector for removal (requires rebuild to take effect)."""
320
+ # FAISS flat index doesn't support removal
321
+ # We track removed IDs and filter during search
322
+ # For now, just return False - rebuild needed
323
+ return False
324
+
325
+ def size(self) -> int:
326
+ """Return the number of vectors in the index."""
327
+ if FAISS_AVAILABLE and self._index is not None:
328
+ return self._index.ntotal
329
+ return len(self._fallback_vectors)
330
+
331
+ def get_stats(self) -> Dict[str, Any]:
332
+ """Get index statistics."""
333
+ return {
334
+ "name": self.name,
335
+ "dimension": self.dimension,
336
+ "index_type": self.index_type,
337
+ "faiss_available": FAISS_AVAILABLE,
338
+ "size": self.size(),
339
+ "search_count": self._search_count,
340
+ "add_count": self._add_count,
341
+ "last_rebuild": self._last_rebuild,
342
+ "index_path": str(self.index_path),
343
+ "id_map_size": len(self._id_map)
344
+ }
345
+
346
+
347
+ class VectorIndexManager:
348
+ """Manages multiple vector indexes for different tables."""
349
+
350
+ def __init__(self, index_dir: str = INDEX_DIR):
351
+ self.index_dir = index_dir
352
+ self._indexes: Dict[str, VectorIndex] = {}
353
+ self._lock = Lock()
354
+
355
+ def get_index(self, name: str, dimension: int = EMBEDDING_DIM) -> VectorIndex:
356
+ """Get or create an index by name."""
357
+ with self._lock:
358
+ if name not in self._indexes:
359
+ self._indexes[name] = VectorIndex(
360
+ name=name,
361
+ dimension=dimension,
362
+ index_dir=self.index_dir
363
+ )
364
+ return self._indexes[name]
365
+
366
+ def save_all(self) -> Dict[str, bool]:
367
+ """Save all indexes to disk."""
368
+ results = {}
369
+ with self._lock:
370
+ for name, index in self._indexes.items():
371
+ results[name] = index.save()
372
+ return results
373
+
374
+ def get_all_stats(self) -> Dict[str, Dict[str, Any]]:
375
+ """Get stats for all indexes."""
376
+ stats = {}
377
+ with self._lock:
378
+ for name, index in self._indexes.items():
379
+ stats[name] = index.get_stats()
380
+ stats["faiss_available"] = FAISS_AVAILABLE
381
+ return stats
382
+
383
+
384
+ # Global manager instance
385
+ _manager: Optional[VectorIndexManager] = None
386
+
387
+
388
+ def get_index_manager() -> VectorIndexManager:
389
+ """Get the global index manager instance."""
390
+ global _manager
391
+ if _manager is None:
392
+ _manager = VectorIndexManager()
393
+ return _manager
394
+
395
+
396
+ def get_index(name: str, dimension: int = EMBEDDING_DIM) -> VectorIndex:
397
+ """Convenience function to get an index by name."""
398
+ return get_index_manager().get_index(name, dimension)
@@ -0,0 +1,257 @@
1
+ """WebSocket service for real-time dashboard updates.
2
+
3
+ Provides:
4
+ - Connection management for multiple clients
5
+ - Event broadcasting to all connected clients
6
+ - Filtered subscriptions by event type and project
7
+ - Heartbeat for connection health
8
+ """
9
+ import asyncio
10
+ import json
11
+ import time
12
+ from typing import Dict, Any, Set, Optional, List
13
+ from dataclasses import dataclass, field
14
+ from fastapi import WebSocket, WebSocketDisconnect
15
+
16
+
17
+ @dataclass
18
+ class WebSocketClient:
19
+ """Represents a connected WebSocket client."""
20
+ websocket: WebSocket
21
+ connected_at: float = field(default_factory=time.time)
22
+ subscriptions: Set[str] = field(default_factory=set)
23
+ project_filter: Optional[str] = None
24
+ last_ping: float = field(default_factory=time.time)
25
+
26
+
27
+ class WebSocketManager:
28
+ """Manages WebSocket connections and broadcasts.
29
+
30
+ Features:
31
+ - Multiple client connections
32
+ - Event type filtering
33
+ - Project-based filtering
34
+ - Automatic reconnection handling
35
+ - Heartbeat monitoring
36
+ """
37
+
38
+ def __init__(self):
39
+ self.clients: Dict[str, WebSocketClient] = {}
40
+ self._client_counter = 0
41
+ self._broadcast_queue: asyncio.Queue = asyncio.Queue()
42
+ self._running = False
43
+
44
+ async def connect(self, websocket: WebSocket) -> str:
45
+ """Accept a new WebSocket connection.
46
+
47
+ Returns:
48
+ Client ID for this connection
49
+ """
50
+ await websocket.accept()
51
+ self._client_counter += 1
52
+ client_id = f"client_{self._client_counter}"
53
+
54
+ self.clients[client_id] = WebSocketClient(
55
+ websocket=websocket,
56
+ subscriptions={"*"} # Subscribe to all events by default
57
+ )
58
+
59
+ # Send welcome message
60
+ await self._send_to_client(client_id, {
61
+ "type": "connected",
62
+ "client_id": client_id,
63
+ "message": "Connected to Claude Memory real-time feed",
64
+ "timestamp": time.time()
65
+ })
66
+
67
+ return client_id
68
+
69
+ async def disconnect(self, client_id: str):
70
+ """Handle client disconnection."""
71
+ if client_id in self.clients:
72
+ del self.clients[client_id]
73
+
74
+ async def subscribe(self, client_id: str, event_types: List[str], project_path: Optional[str] = None):
75
+ """Update client subscriptions.
76
+
77
+ Args:
78
+ client_id: Client to update
79
+ event_types: List of event types to subscribe to (or ["*"] for all)
80
+ project_path: Optional project filter
81
+ """
82
+ if client_id not in self.clients:
83
+ return
84
+
85
+ client = self.clients[client_id]
86
+ client.subscriptions = set(event_types)
87
+ client.project_filter = project_path
88
+
89
+ await self._send_to_client(client_id, {
90
+ "type": "subscribed",
91
+ "event_types": event_types,
92
+ "project_filter": project_path,
93
+ "timestamp": time.time()
94
+ })
95
+
96
+ async def broadcast(self, event_type: str, data: Dict[str, Any], project_path: Optional[str] = None):
97
+ """Broadcast an event to all subscribed clients.
98
+
99
+ Args:
100
+ event_type: Type of event (memory_stored, memory_searched, timeline_logged, etc.)
101
+ data: Event data payload
102
+ project_path: Project this event relates to (for filtering)
103
+ """
104
+ message = {
105
+ "type": event_type,
106
+ "data": data,
107
+ "project_path": project_path,
108
+ "timestamp": time.time()
109
+ }
110
+
111
+ # Debug logging
112
+ print(f"[WS] Broadcasting {event_type} to {len(self.clients)} clients, project={project_path}")
113
+
114
+ # Send to all matching clients
115
+ disconnected = []
116
+ sent_count = 0
117
+ for client_id, client in self.clients.items():
118
+ # Check if client is subscribed to this event type
119
+ if "*" not in client.subscriptions and event_type not in client.subscriptions:
120
+ print(f"[WS] Skipping {client_id}: not subscribed to {event_type}")
121
+ continue
122
+
123
+ # Check project filter
124
+ # If project_path is None, send to all clients (global event)
125
+ # If project_path is set, only send to matching clients
126
+ if project_path and client.project_filter and client.project_filter != project_path:
127
+ print(f"[WS] Skipping {client_id}: project mismatch ({client.project_filter} != {project_path})")
128
+ continue
129
+
130
+ try:
131
+ await client.websocket.send_json(message)
132
+ sent_count += 1
133
+ print(f"[WS] Sent {event_type} to {client_id}")
134
+ except Exception as e:
135
+ print(f"[WS] Error sending to {client_id}: {e}")
136
+ disconnected.append(client_id)
137
+
138
+ print(f"[WS] Broadcast complete: sent to {sent_count}/{len(self.clients)} clients")
139
+
140
+ # Clean up disconnected clients
141
+ for client_id in disconnected:
142
+ await self.disconnect(client_id)
143
+
144
+ async def _send_to_client(self, client_id: str, message: Dict[str, Any]):
145
+ """Send message to a specific client."""
146
+ if client_id not in self.clients:
147
+ return
148
+
149
+ try:
150
+ await self.clients[client_id].websocket.send_json(message)
151
+ except Exception:
152
+ await self.disconnect(client_id)
153
+
154
+ async def handle_message(self, client_id: str, message: Dict[str, Any]):
155
+ """Handle incoming message from client.
156
+
157
+ Supported message types:
158
+ - subscribe: Update subscriptions
159
+ - ping: Heartbeat
160
+ """
161
+ msg_type = message.get("type")
162
+
163
+ if msg_type == "subscribe":
164
+ await self.subscribe(
165
+ client_id,
166
+ message.get("event_types", ["*"]),
167
+ message.get("project_path")
168
+ )
169
+
170
+ elif msg_type == "ping":
171
+ if client_id in self.clients:
172
+ self.clients[client_id].last_ping = time.time()
173
+ await self._send_to_client(client_id, {
174
+ "type": "pong",
175
+ "timestamp": time.time()
176
+ })
177
+
178
+ elif msg_type == "get_stats":
179
+ # Send current stats
180
+ await self._send_to_client(client_id, {
181
+ "type": "stats",
182
+ "connected_clients": len(self.clients),
183
+ "timestamp": time.time()
184
+ })
185
+
186
+ def get_stats(self) -> Dict[str, Any]:
187
+ """Get WebSocket service statistics."""
188
+ return {
189
+ "connected_clients": len(self.clients),
190
+ "clients": [
191
+ {
192
+ "id": cid,
193
+ "connected_at": c.connected_at,
194
+ "subscriptions": list(c.subscriptions),
195
+ "project_filter": c.project_filter,
196
+ "last_ping": c.last_ping
197
+ }
198
+ for cid, c in self.clients.items()
199
+ ]
200
+ }
201
+
202
+
203
+ # Global WebSocket manager instance
204
+ _ws_manager: Optional[WebSocketManager] = None
205
+
206
+
207
+ def get_websocket_manager() -> WebSocketManager:
208
+ """Get the global WebSocket manager instance."""
209
+ global _ws_manager
210
+ if _ws_manager is None:
211
+ _ws_manager = WebSocketManager()
212
+ return _ws_manager
213
+
214
+
215
+ # Event types for memory system
216
+ class EventTypes:
217
+ """Standard event types for broadcasting."""
218
+ # Memory events
219
+ MEMORY_STORED = "memory_stored"
220
+ MEMORY_SEARCHED = "memory_searched"
221
+ MEMORY_DELETED = "memory_deleted"
222
+ MEMORY_ARCHIVED = "memory_archived"
223
+ MEMORY_RESTORED = "memory_restored"
224
+
225
+ # Timeline events
226
+ TIMELINE_LOGGED = "timeline_logged"
227
+ CHECKPOINT_CREATED = "checkpoint_created"
228
+
229
+ # Session events
230
+ SESSION_STARTED = "session_started"
231
+ SESSION_ENDED = "session_ended"
232
+ SESSION_SUMMARIZED = "session_summarized"
233
+
234
+ # Anchor events
235
+ ANCHOR_MARKED = "anchor_marked"
236
+ ANCHOR_CONFLICT = "anchor_conflict"
237
+ CONFLICT_RESOLVED = "conflict_resolved"
238
+
239
+ # Admin events
240
+ CLEANUP_STARTED = "cleanup_started"
241
+ CLEANUP_COMPLETED = "cleanup_completed"
242
+ REINDEX_PROGRESS = "reindex_progress"
243
+ REINDEX_COMPLETED = "reindex_completed"
244
+
245
+ # System events
246
+ HEALTH_CHECK = "health_check"
247
+ ERROR = "error"
248
+
249
+
250
+ async def broadcast_event(
251
+ event_type: str,
252
+ data: Dict[str, Any],
253
+ project_path: Optional[str] = None
254
+ ):
255
+ """Helper function to broadcast an event."""
256
+ manager = get_websocket_manager()
257
+ await manager.broadcast(event_type, data, project_path)
@@ -0,0 +1,6 @@
1
+ from .store import store_memory
2
+ from .retrieve import retrieve_memory
3
+ from .search import semantic_search
4
+ from .summarize import summarize_session
5
+
6
+ __all__ = ["store_memory", "retrieve_memory", "semantic_search", "summarize_session"]