claude-memory-agent 2.1.0 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/bin/cli.js +11 -1
  2. package/bin/lib/banner.js +39 -0
  3. package/bin/lib/environment.js +166 -0
  4. package/bin/lib/installer.js +291 -0
  5. package/bin/lib/models.js +95 -0
  6. package/bin/lib/steps/advanced.js +101 -0
  7. package/bin/lib/steps/confirm.js +87 -0
  8. package/bin/lib/steps/model.js +57 -0
  9. package/bin/lib/steps/provider.js +65 -0
  10. package/bin/lib/steps/scope.js +59 -0
  11. package/bin/lib/steps/server.js +74 -0
  12. package/bin/lib/ui.js +75 -0
  13. package/bin/onboarding.js +164 -0
  14. package/bin/postinstall.js +22 -257
  15. package/config.py +103 -4
  16. package/dashboard.html +697 -27
  17. package/hooks/extract_memories.py +439 -0
  18. package/hooks/pre_compact_hook.py +76 -0
  19. package/hooks/session_end_hook.py +149 -0
  20. package/hooks/stop_hook.py +372 -0
  21. package/install.py +91 -37
  22. package/main.py +1636 -892
  23. package/mcp_server.py +451 -0
  24. package/package.json +14 -3
  25. package/requirements.txt +12 -8
  26. package/services/adaptive_ranker.py +272 -0
  27. package/services/agent_catalog.json +153 -0
  28. package/services/agent_registry.py +245 -730
  29. package/services/claude_md_sync.py +320 -4
  30. package/services/consolidation.py +417 -0
  31. package/services/database.py +586 -105
  32. package/services/embedding_pipeline.py +262 -0
  33. package/services/embeddings.py +493 -85
  34. package/services/memory_decay.py +408 -0
  35. package/services/native_memory_paths.py +86 -0
  36. package/services/native_memory_sync.py +496 -0
  37. package/services/response_manager.py +183 -0
  38. package/services/terminal_ui.py +199 -0
  39. package/services/tier_manager.py +235 -0
  40. package/services/websocket.py +26 -6
  41. package/skills/search.py +136 -61
  42. package/skills/session_review.py +210 -23
  43. package/skills/store.py +125 -18
  44. package/terminal_dashboard.py +474 -0
  45. package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
  46. package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
  47. package/hooks/__pycache__/grounding-hook.cpython-312.pyc +0 -0
  48. package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
  49. package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
  50. package/services/__pycache__/__init__.cpython-312.pyc +0 -0
  51. package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
  52. package/services/__pycache__/auth.cpython-312.pyc +0 -0
  53. package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
  54. package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
  55. package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
  56. package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
  57. package/services/__pycache__/confidence.cpython-312.pyc +0 -0
  58. package/services/__pycache__/curator.cpython-312.pyc +0 -0
  59. package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
  60. package/services/__pycache__/database.cpython-312.pyc +0 -0
  61. package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
  62. package/services/__pycache__/insights.cpython-312.pyc +0 -0
  63. package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
  64. package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
  65. package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
  66. package/services/__pycache__/timeline.cpython-312.pyc +0 -0
  67. package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
  68. package/services/__pycache__/websocket.cpython-312.pyc +0 -0
  69. package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
  70. package/skills/__pycache__/admin.cpython-312.pyc +0 -0
  71. package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
  72. package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
  73. package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
  74. package/skills/__pycache__/confidence_tracker.cpython-312.pyc +0 -0
  75. package/skills/__pycache__/context.cpython-312.pyc +0 -0
  76. package/skills/__pycache__/curator.cpython-312.pyc +0 -0
  77. package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
  78. package/skills/__pycache__/insights.cpython-312.pyc +0 -0
  79. package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
  80. package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
  81. package/skills/__pycache__/search.cpython-312.pyc +0 -0
  82. package/skills/__pycache__/session_review.cpython-312.pyc +0 -0
  83. package/skills/__pycache__/state.cpython-312.pyc +0 -0
  84. package/skills/__pycache__/store.cpython-312.pyc +0 -0
  85. package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
  86. package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
  87. package/skills/__pycache__/verification.cpython-312.pyc +0 -0
  88. package/test_automation.py +0 -221
  89. package/test_complete.py +0 -338
  90. package/test_full.py +0 -322
  91. package/verify_db.py +0 -134
package/skills/store.py CHANGED
@@ -2,7 +2,7 @@
2
2
  import logging
3
3
  from typing import Dict, Any, Optional, List
4
4
  from services.database import DatabaseService
5
- from services.embeddings import EmbeddingService
5
+ from services.embeddings import EmbeddingService, EmbeddingError
6
6
 
7
7
  logger = logging.getLogger(__name__)
8
8
 
@@ -43,6 +43,14 @@ async def _auto_infer_relationships(
43
43
  relationships_created = []
44
44
  content_lower = content.lower()
45
45
 
46
+ # Generate embedding once for all similarity-based detections
47
+ cached_embedding = None
48
+ if embeddings:
49
+ try:
50
+ cached_embedding = await embeddings.generate_embedding(content)
51
+ except Exception as e:
52
+ logger.debug(f"Embedding generation for relationship inference failed: {e}")
53
+
46
54
  # 1. Fix Detection: If this is a successful decision/code after a recent error
47
55
  if outcome == 'success' and memory_type in ['decision', 'code']:
48
56
  if session_id:
@@ -65,11 +73,10 @@ async def _auto_infer_relationships(
65
73
  # 2. Causal Keyword Detection
66
74
  causal_keywords = ['because', 'due to', 'caused by', 'result of', 'since']
67
75
  if any(kw in content_lower for kw in causal_keywords):
68
- if embeddings:
76
+ if cached_embedding is not None:
69
77
  try:
70
- embedding = await embeddings.generate_embedding(content)
71
78
  similar = await db.search_similar(
72
- embedding, limit=3, threshold=0.7, project_path=project_path
79
+ cached_embedding, limit=3, threshold=0.7, project_path=project_path
73
80
  )
74
81
  for mem in similar:
75
82
  if mem['id'] != memory_id:
@@ -84,11 +91,10 @@ async def _auto_infer_relationships(
84
91
  # 3. Support Detection
85
92
  support_keywords = ['supports', 'evidence for', 'proves', 'confirms', 'validates']
86
93
  if any(kw in content_lower for kw in support_keywords):
87
- if embeddings:
94
+ if cached_embedding is not None:
88
95
  try:
89
- embedding = await embeddings.generate_embedding(content)
90
96
  similar = await db.search_similar(
91
- embedding, limit=2, threshold=0.75, project_path=project_path
97
+ cached_embedding, limit=2, threshold=0.75, project_path=project_path
92
98
  )
93
99
  for mem in similar:
94
100
  if mem['id'] != memory_id:
@@ -103,11 +109,10 @@ async def _auto_infer_relationships(
103
109
  # 4. Contradiction Detection
104
110
  contradiction_keywords = ['but actually', 'wrong', 'incorrect', 'not true', 'instead', 'actually']
105
111
  if any(kw in content_lower for kw in contradiction_keywords):
106
- if embeddings:
112
+ if cached_embedding is not None:
107
113
  try:
108
- embedding = await embeddings.generate_embedding(content)
109
114
  similar = await db.search_similar(
110
- embedding, limit=2, threshold=0.8, project_path=project_path
115
+ cached_embedding, limit=2, threshold=0.8, project_path=project_path
111
116
  )
112
117
  for mem in similar:
113
118
  if mem['id'] != memory_id:
@@ -122,9 +127,8 @@ async def _auto_infer_relationships(
122
127
  # 5. Temporal Proximity: Link to recent memories in same session
123
128
  if session_id:
124
129
  try:
125
- # Get recent memories from same session (any type)
126
130
  recent = await db.get_memories_by_type(
127
- memory_type=memory_type, # Same type for relevance
131
+ memory_type=memory_type,
128
132
  session_id=session_id,
129
133
  limit=3
130
134
  )
@@ -139,11 +143,10 @@ async def _auto_infer_relationships(
139
143
  logger.debug(f"Temporal proximity detection failed: {e}")
140
144
 
141
145
  # 6. High Semantic Similarity: Strong related link
142
- if embeddings:
146
+ if cached_embedding is not None:
143
147
  try:
144
- embedding = await embeddings.generate_embedding(content)
145
148
  very_similar = await db.search_similar(
146
- embedding, limit=2, threshold=0.85, project_path=project_path
149
+ cached_embedding, limit=2, threshold=0.85, project_path=project_path
147
150
  )
148
151
  for mem in very_similar:
149
152
  if mem['id'] != memory_id:
@@ -232,8 +235,89 @@ async def store_memory(
232
235
  Returns:
233
236
  Dict with stored memory ID and status
234
237
  """
235
- # Generate embedding for the content
236
- embedding = await embeddings.generate_embedding(content)
238
+ # Consolidate legacy outcome/success into outcome_status
239
+ # If caller uses legacy params, derive outcome_status from them
240
+ if outcome_status == 'pending':
241
+ if success is True:
242
+ outcome_status = 'success'
243
+ elif success is False:
244
+ outcome_status = 'failed'
245
+ elif outcome and isinstance(outcome, str):
246
+ # Map common outcome text values
247
+ outcome_lower = outcome.lower().strip()
248
+ if outcome_lower in ('success', 'worked', 'fixed', 'resolved'):
249
+ outcome_status = 'success'
250
+ elif outcome_lower in ('failed', 'broken', 'error'):
251
+ outcome_status = 'failed'
252
+ elif outcome_lower in ('partial', 'partially'):
253
+ outcome_status = 'partial'
254
+
255
+ # Generate embedding for the content with status tracking
256
+ embed_result = await embeddings.generate_embedding_with_status(content)
257
+ embedding = embed_result.embedding
258
+
259
+ if not embed_result.ok:
260
+ logger.warning(
261
+ f"Embedding failed ({embed_result.error.value}): {embed_result.error_message}. "
262
+ f"Memory will be stored without embedding (not semantically searchable)."
263
+ )
264
+
265
+ # === Dedup check: find near-duplicates before storing ===
266
+ link_to_id = None # Set if we find a near-duplicate to link after storing
267
+ if embedding:
268
+ try:
269
+ duplicates = await db.find_similar_for_dedup(
270
+ embedding=embedding,
271
+ project_path=project_path,
272
+ threshold=0.92,
273
+ limit=3
274
+ )
275
+ if duplicates:
276
+ best_match = duplicates[0]
277
+ if best_match['similarity'] >= 0.95:
278
+ # Very high similarity - merge into existing memory
279
+ # Keeps longer content, higher importance/confidence
280
+ updated_id = await db.merge_memory(
281
+ existing_id=best_match['id'],
282
+ new_content=content,
283
+ new_importance=importance,
284
+ new_confidence=confidence
285
+ )
286
+ logger.info(
287
+ f"Dedup: merged with memory #{best_match['id']} "
288
+ f"(similarity: {best_match['similarity']:.3f})"
289
+ )
290
+ return {
291
+ "success": True,
292
+ "memory_id": updated_id,
293
+ "action": "merged",
294
+ "merged_with": best_match['id'],
295
+ "similarity": best_match['similarity'],
296
+ "type": memory_type,
297
+ "importance": importance,
298
+ "confidence": confidence,
299
+ "outcome_status": outcome_status,
300
+ "project": project_path,
301
+ "relationships_created": [],
302
+ "message": (
303
+ f"Merged with existing memory #{best_match['id']} "
304
+ f"(similarity: {best_match['similarity']:.2f})"
305
+ )
306
+ }
307
+ elif best_match['similarity'] >= 0.92:
308
+ # High similarity but not identical - store new but link as related
309
+ # We'll create the relationship after storing below
310
+ link_to_id = best_match['id']
311
+ link_similarity = best_match['similarity']
312
+ logger.info(
313
+ f"Dedup: will link to memory #{link_to_id} "
314
+ f"(similarity: {link_similarity:.3f})"
315
+ )
316
+ except Exception as e:
317
+ # Dedup is best-effort; never block a store operation
318
+ logger.warning(f"Dedup check failed (non-fatal): {e}")
319
+ link_to_id = None
320
+ # === End dedup check ===
237
321
 
238
322
  # Store in database with full context
239
323
  memory_id = await db.store_memory(
@@ -280,7 +364,21 @@ async def store_memory(
280
364
  logger.warning(f"Failed to auto-infer relationships: {e}")
281
365
  # Don't fail the store operation if relationship inference fails
282
366
 
283
- return {
367
+ # If dedup found a near-duplicate (0.92-0.95 range), link as related
368
+ dedup_linked = False
369
+ if link_to_id is not None:
370
+ try:
371
+ result = await db.create_relationship(
372
+ memory_id, link_to_id, 'related', strength=0.9
373
+ )
374
+ if result.get('success'):
375
+ relationships_created.append(f"near-duplicate of #{link_to_id}")
376
+ dedup_linked = True
377
+ logger.info(f"Dedup: linked memory #{memory_id} to near-duplicate #{link_to_id}")
378
+ except Exception as e:
379
+ logger.warning(f"Failed to create dedup relationship: {e}")
380
+
381
+ response = {
284
382
  "success": True,
285
383
  "memory_id": memory_id,
286
384
  "type": memory_type,
@@ -289,8 +387,17 @@ async def store_memory(
289
387
  "outcome_status": outcome_status,
290
388
  "project": project_path,
291
389
  "relationships_created": relationships_created,
390
+ "has_embedding": embedding is not None,
292
391
  "message": f"Memory stored successfully with ID {memory_id}"
293
392
  }
393
+ if not embed_result.ok:
394
+ response["embedding_error"] = embed_result.error.value
395
+ response["embedding_error_detail"] = embed_result.error_message
396
+ if dedup_linked:
397
+ response["action"] = "stored_and_linked"
398
+ response["linked_to"] = link_to_id
399
+
400
+ return response
294
401
 
295
402
 
296
403
  async def store_project(
@@ -0,0 +1,474 @@
1
+ #!/usr/bin/env python3
2
+ """Live Terminal Dashboard for Claude Memory Agent.
3
+
4
+ A standalone real-time dashboard that polls the running server and displays
5
+ live statistics using Rich's Live display.
6
+
7
+ Usage:
8
+ python terminal_dashboard.py # Default: http://localhost:8102
9
+ python terminal_dashboard.py --port 8103 # Custom port
10
+ python terminal_dashboard.py --refresh 3 # Refresh every 3 seconds
11
+
12
+ Press Ctrl+C to exit.
13
+ """
14
+ import sys
15
+ import time
16
+ import argparse
17
+ import httpx
18
+ from datetime import datetime, timedelta
19
+ from typing import Dict, Any, Optional
20
+
21
+ from rich.console import Console
22
+ from rich.panel import Panel
23
+ from rich.table import Table
24
+ from rich.text import Text
25
+ from rich.layout import Layout
26
+ from rich.live import Live
27
+ from rich.align import Align
28
+ from rich.columns import Columns
29
+ from rich.progress_bar import ProgressBar
30
+ from rich.spinner import Spinner
31
+ from rich.theme import Theme
32
+
33
+ THEME = Theme({
34
+ "info": "cyan",
35
+ "warning": "yellow",
36
+ "error.style": "bold red",
37
+ "success": "bold green",
38
+ "hot": "bold red",
39
+ "warm": "bold yellow",
40
+ "cold": "bold blue",
41
+ "header": "bold magenta",
42
+ "muted": "dim white",
43
+ "accent": "bold cyan",
44
+ })
45
+
46
+ console = Console(theme=THEME)
47
+
48
+
49
+
50
+ class DashboardState:
51
+ """Tracks dashboard state and history."""
52
+
53
+ def __init__(self):
54
+ self.health: Dict = {}
55
+ self.stats: Dict = {}
56
+ self.tier_stats: Dict = {}
57
+ self.consolidation_stats: Dict = {}
58
+ self.pipeline_stats: Dict = {}
59
+ self.index_stats: Dict = {}
60
+ self.decay_stats: Dict = {}
61
+ self.recent_activity: list = []
62
+ self.last_update: Optional[datetime] = None
63
+ self.error: Optional[str] = None
64
+ self.uptime_start: Optional[datetime] = None
65
+ self.refresh_count: int = 0
66
+ self.connection_errors: int = 0
67
+
68
+
69
+ def fetch_data(base_url: str, state: DashboardState):
70
+ """Fetch all dashboard data from the server."""
71
+ client = httpx.Client(timeout=5.0)
72
+ state.error = None
73
+
74
+ try:
75
+ # Health check
76
+ r = client.get(f"{base_url}/health")
77
+ state.health = r.json()
78
+ if state.uptime_start is None:
79
+ state.uptime_start = datetime.now()
80
+
81
+ # Stats
82
+ try:
83
+ r = client.get(f"{base_url}/api/stats")
84
+ state.stats = r.json()
85
+ except Exception:
86
+ pass
87
+
88
+ # Tier stats
89
+ try:
90
+ r = client.get(f"{base_url}/api/tiers/stats")
91
+ state.tier_stats = r.json()
92
+ except Exception:
93
+ pass
94
+
95
+ # Consolidation stats
96
+ try:
97
+ r = client.get(f"{base_url}/api/consolidation/stats")
98
+ state.consolidation_stats = r.json()
99
+ except Exception:
100
+ pass
101
+
102
+ # Embedding pipeline stats
103
+ try:
104
+ r = client.get(f"{base_url}/api/embedding-pipeline/stats")
105
+ state.pipeline_stats = r.json()
106
+ except Exception:
107
+ pass
108
+
109
+ # Index stats
110
+ try:
111
+ r = client.get(f"{base_url}/api/index-stats")
112
+ state.index_stats = r.json()
113
+ except Exception:
114
+ pass
115
+
116
+ # Decay stats
117
+ try:
118
+ r = client.get(f"{base_url}/api/decay/stats")
119
+ state.decay_stats = r.json()
120
+ except Exception:
121
+ pass
122
+
123
+ state.last_update = datetime.now()
124
+ state.refresh_count += 1
125
+ state.connection_errors = 0
126
+
127
+ except httpx.ConnectError:
128
+ state.error = "Cannot connect to server"
129
+ state.connection_errors += 1
130
+ except Exception as e:
131
+ state.error = str(e)
132
+ state.connection_errors += 1
133
+ finally:
134
+ client.close()
135
+
136
+
137
+ def make_header(state: DashboardState, base_url: str) -> Panel:
138
+ """Create the header panel."""
139
+ text = Text()
140
+ text.append(" Claude", style="bold cyan")
141
+ text.append("Memory", style="bold white")
142
+ text.append(" ", style="")
143
+
144
+ # Connection status
145
+ if state.error:
146
+ text.append(" DISCONNECTED ", style="bold white on red")
147
+ else:
148
+ text.append(" LIVE ", style="bold white on green")
149
+
150
+ text.append(f" {base_url}", style="muted")
151
+
152
+ # Uptime
153
+ if state.uptime_start:
154
+ uptime = datetime.now() - state.uptime_start
155
+ hours = int(uptime.total_seconds() // 3600)
156
+ mins = int((uptime.total_seconds() % 3600) // 60)
157
+ text.append(f" | Uptime: {hours}h{mins:02d}m", style="muted")
158
+
159
+ text.append(f" | Refresh #{state.refresh_count}", style="muted")
160
+
161
+ return Panel(Align.center(text), style="cyan", height=3)
162
+
163
+
164
+ def make_memory_panel(state: DashboardState) -> Panel:
165
+ """Create the memory statistics panel."""
166
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
167
+ table.add_column("label", style="dim cyan", width=16)
168
+ table.add_column("value", style="bold white")
169
+
170
+ stats = state.stats
171
+ total = stats.get('total_memories', 0)
172
+ table.add_row("Total Memories", f"[bold]{total}[/bold]")
173
+
174
+ # Type breakdown
175
+ type_counts = stats.get('type_counts', {})
176
+ if type_counts:
177
+ for mtype, count in sorted(type_counts.items(), key=lambda x: -x[1]):
178
+ bar_width = min(int(count / max(total, 1) * 20), 20)
179
+ bar = "[green]" + "=" * bar_width + "[/green]" + "[muted]" + "-" * (20 - bar_width) + "[/muted]"
180
+ table.add_row(f" {mtype}", f"{count:>4} {bar}")
181
+
182
+ table.add_row("", "")
183
+ table.add_row("Patterns", str(stats.get('total_patterns', 0)))
184
+ table.add_row("Timeline Events", str(stats.get('total_timeline_events', 0)))
185
+
186
+ return Panel(table, title="[bold]Memories[/bold]", border_style="green")
187
+
188
+
189
+ def make_tier_panel(state: DashboardState) -> Panel:
190
+ """Create the tier distribution panel."""
191
+ table = Table(show_header=True, box=None, padding=(0, 1), expand=True)
192
+ table.add_column("Tier", style="dim cyan", width=8)
193
+ table.add_column("Count", style="bold white", width=8, justify="right")
194
+ table.add_column("Avg Imp", style="dim white", width=8, justify="right")
195
+ table.add_column("", width=20)
196
+
197
+ tiers = state.tier_stats.get('tiers', {})
198
+ total = state.tier_stats.get('total_memories', 1) or 1
199
+
200
+ tier_styles = {'hot': 'hot', 'warm': 'warm', 'cold': 'cold'}
201
+
202
+ for tier_name in ['hot', 'warm', 'cold']:
203
+ info = tiers.get(tier_name, {'count': 0, 'avg_importance': 0})
204
+ count = info.get('count', 0)
205
+ avg_imp = info.get('avg_importance', 0)
206
+ pct = count / total * 100
207
+
208
+ bar_width = min(int(pct / 5), 20)
209
+ style = tier_styles.get(tier_name, 'muted')
210
+ bar = f"[{style}]" + "|" * bar_width + f"[/{style}]" + " " * (20 - bar_width)
211
+
212
+ icon = {"hot": "[hot]***[/hot]", "warm": "[warm] ** [/warm]", "cold": "[cold] * [/cold]"}.get(tier_name, "")
213
+ table.add_row(
214
+ f"[{style}]{tier_name.upper()}[/{style}] {icon}",
215
+ str(count),
216
+ f"{avg_imp:.1f}",
217
+ f"{bar} {pct:.0f}%"
218
+ )
219
+
220
+ return Panel(table, title="[bold]Memory Tiers[/bold]", border_style="yellow")
221
+
222
+
223
+ def make_health_panel(state: DashboardState) -> Panel:
224
+ """Create the system health panel."""
225
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
226
+ table.add_column("key", style="dim cyan", width=16)
227
+ table.add_column("value", style="bold white")
228
+
229
+ health = state.health
230
+
231
+ # Server status
232
+ status = health.get('status', 'unknown')
233
+ if status == 'healthy':
234
+ table.add_row("Server", "[green]Healthy[/green]")
235
+ elif status == 'degraded':
236
+ table.add_row("Server", "[yellow]Degraded[/yellow]")
237
+ else:
238
+ table.add_row("Server", f"[red]{status}[/red]")
239
+
240
+ # Ollama
241
+ ollama = health.get('ollama', {})
242
+ if isinstance(ollama, dict):
243
+ if ollama.get('healthy'):
244
+ model = ollama.get('model', '?')
245
+ table.add_row("Ollama", f"[green]OK[/green] ({model})")
246
+ else:
247
+ table.add_row("Ollama", "[red]Down[/red]")
248
+ else:
249
+ table.add_row("Ollama", str(ollama))
250
+
251
+ # Database
252
+ db_info = health.get('database', {})
253
+ if isinstance(db_info, dict):
254
+ if db_info.get('connected'):
255
+ table.add_row("Database", "[green]Connected[/green]")
256
+ else:
257
+ table.add_row("Database", "[red]Disconnected[/red]")
258
+
259
+ # Vector index
260
+ index = health.get('vector_index', {})
261
+ if isinstance(index, dict):
262
+ faiss = "[green]FAISS[/green]" if index.get('faiss_available') else "[yellow]NumPy[/yellow]"
263
+ table.add_row("Vector Index", faiss)
264
+
265
+ return Panel(table, title="[bold]Health[/bold]", border_style="green")
266
+
267
+
268
+ def make_pipeline_panel(state: DashboardState) -> Panel:
269
+ """Create the embedding pipeline panel."""
270
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
271
+ table.add_column("key", style="dim cyan", width=16)
272
+ table.add_column("value", style="bold white")
273
+
274
+ pipeline = state.pipeline_stats
275
+ cache = pipeline.get('cache', {})
276
+
277
+ if cache:
278
+ size = cache.get('size', 0)
279
+ max_size = cache.get('max_size', 0)
280
+ hits = cache.get('hits', 0)
281
+ misses = cache.get('misses', 0)
282
+ hit_rate = cache.get('hit_rate', 0)
283
+ mem_mb = cache.get('estimated_memory_mb', 0)
284
+
285
+ # Cache fill bar
286
+ fill_pct = size / max(max_size, 1) * 100
287
+ fill_bar_w = min(int(fill_pct / 5), 20)
288
+ fill_bar = "[cyan]" + "|" * fill_bar_w + "[/cyan]" + "[muted]" + "-" * (20 - fill_bar_w) + "[/muted]"
289
+
290
+ table.add_row("Cache Fill", f"{size}/{max_size} {fill_bar}")
291
+ table.add_row("Hit Rate", f"[{'green' if hit_rate > 0.5 else 'yellow'}]{hit_rate:.1%}[/{'green' if hit_rate > 0.5 else 'yellow'}]")
292
+ table.add_row("Hits / Misses", f"[green]{hits}[/green] / [yellow]{misses}[/yellow]")
293
+ table.add_row("Memory", f"{mem_mb:.2f} MB")
294
+ else:
295
+ table.add_row("Status", "[muted]Not initialized[/muted]")
296
+
297
+ precomputing = pipeline.get('precompute_running', False)
298
+ if precomputing:
299
+ table.add_row("Precompute", "[green]Running[/green]")
300
+ else:
301
+ table.add_row("Precompute", "[muted]Idle[/muted]")
302
+
303
+ degraded = pipeline.get('service_degraded', False)
304
+ if degraded:
305
+ table.add_row("Ollama", "[red]Degraded[/red]")
306
+
307
+ return Panel(table, title="[bold]Embedding Pipeline[/bold]", border_style="cyan")
308
+
309
+
310
+ def make_consolidation_panel(state: DashboardState) -> Panel:
311
+ """Create the consolidation stats panel."""
312
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
313
+ table.add_column("key", style="dim cyan", width=16)
314
+ table.add_column("value", style="bold white")
315
+
316
+ cs = state.consolidation_stats
317
+
318
+ consolidated = cs.get('consolidated_memories', 0)
319
+ archived = cs.get('archived_originals', 0)
320
+ avg_group = cs.get('avg_group_size', 0)
321
+
322
+ table.add_row("Consolidated", str(consolidated))
323
+ table.add_row("Archived", str(archived))
324
+ table.add_row("Avg Group Size", str(avg_group))
325
+ table.add_row("Space Saved", cs.get('space_savings_estimate', 'N/A'))
326
+
327
+ return Panel(table, title="[bold]Consolidation[/bold]", border_style="magenta")
328
+
329
+
330
+ def make_decay_panel(state: DashboardState) -> Panel:
331
+ """Create the memory decay stats panel."""
332
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
333
+ table.add_column("key", style="dim cyan", width=16)
334
+ table.add_column("value", style="bold white")
335
+
336
+ ds = state.decay_stats
337
+
338
+ permanent = ds.get('permanent_count', 0)
339
+ decayable = ds.get('decayable_count', 0)
340
+ at_risk = ds.get('at_risk_count', 0)
341
+ healthy = ds.get('healthy_count', 0)
342
+ archived = ds.get('archived_by_decay', 0)
343
+
344
+ table.add_row("Permanent", f"[green]{permanent}[/green]")
345
+ table.add_row("Decayable", str(decayable))
346
+
347
+ if decayable > 0:
348
+ health_pct = healthy / max(decayable, 1) * 100
349
+ risk_bar_w = min(int(at_risk / max(decayable, 1) * 20), 20)
350
+ health_bar_w = 20 - risk_bar_w
351
+ bar = "[green]" + "|" * health_bar_w + "[/green]" + "[red]" + "|" * risk_bar_w + "[/red]"
352
+ table.add_row("Health", f"{bar} {health_pct:.0f}%")
353
+
354
+ table.add_row("At Risk", f"[{'red' if at_risk > 0 else 'green'}]{at_risk}[/{'red' if at_risk > 0 else 'green'}]")
355
+ table.add_row("Archived", str(archived))
356
+
357
+ return Panel(table, title="[bold]Memory Decay[/bold]", border_style="yellow")
358
+
359
+
360
+ def make_index_panel(state: DashboardState) -> Panel:
361
+ """Create the vector index stats panel."""
362
+ table = Table(show_header=False, box=None, padding=(0, 1), expand=True)
363
+ table.add_column("key", style="dim cyan", width=16)
364
+ table.add_column("value", style="bold white")
365
+
366
+ ix = state.index_stats
367
+
368
+ faiss = ix.get('faiss_available', False)
369
+ table.add_row("Backend", "[green]FAISS[/green]" if faiss else "[yellow]NumPy[/yellow]")
370
+
371
+ for idx_name in ['memories', 'patterns', 'timeline']:
372
+ idx = ix.get(idx_name, {})
373
+ if idx:
374
+ size = idx.get('size', 0)
375
+ searches = idx.get('search_count', 0)
376
+ table.add_row(f" {idx_name}", f"{size} vectors, {searches} searches")
377
+
378
+ return Panel(table, title="[bold]Vector Index[/bold]", border_style="blue")
379
+
380
+
381
+ def make_error_panel(state: DashboardState) -> Panel:
382
+ """Create an error panel when server is unreachable."""
383
+ text = Text()
384
+ text.append("\n Cannot connect to server\n\n", style="bold red")
385
+ text.append(f" {state.error}\n\n", style="muted")
386
+ text.append(f" Consecutive errors: {state.connection_errors}\n", style="yellow")
387
+ text.append(" Retrying...\n", style="muted")
388
+ return Panel(text, title="[bold red]Connection Error[/bold red]", border_style="red")
389
+
390
+
391
+ def build_layout(state: DashboardState, base_url: str) -> Layout:
392
+ """Build the full dashboard layout."""
393
+ layout = Layout()
394
+
395
+ layout.split_column(
396
+ Layout(name="header", size=3),
397
+ Layout(name="body"),
398
+ Layout(name="footer", size=3),
399
+ )
400
+
401
+ # Header
402
+ layout["header"].update(make_header(state, base_url))
403
+
404
+ if state.error and state.connection_errors > 2:
405
+ layout["body"].update(make_error_panel(state))
406
+ else:
407
+ # Body: two rows of panels
408
+ layout["body"].split_column(
409
+ Layout(name="top_row", size=14),
410
+ Layout(name="bottom_row"),
411
+ )
412
+
413
+ layout["top_row"].split_row(
414
+ Layout(make_memory_panel(state), name="memories"),
415
+ Layout(make_tier_panel(state), name="tiers"),
416
+ Layout(make_health_panel(state), name="health"),
417
+ )
418
+
419
+ layout["bottom_row"].split_row(
420
+ Layout(make_pipeline_panel(state), name="pipeline"),
421
+ Layout(make_consolidation_panel(state), name="consolidation"),
422
+ Layout(make_decay_panel(state), name="decay"),
423
+ Layout(make_index_panel(state), name="index"),
424
+ )
425
+
426
+ # Footer
427
+ footer_text = Text()
428
+ footer_text.append(" Press ", style="muted")
429
+ footer_text.append("Ctrl+C", style="bold")
430
+ footer_text.append(" to exit", style="muted")
431
+ if state.last_update:
432
+ footer_text.append(f" | Last updated: {state.last_update.strftime('%H:%M:%S')}", style="muted")
433
+ layout["footer"].update(Panel(footer_text, style="muted"))
434
+
435
+ return layout
436
+
437
+
438
+ def main():
439
+ parser = argparse.ArgumentParser(description="Claude Memory Agent - Live Dashboard")
440
+ parser.add_argument("--port", type=int, default=8102, help="Server port (default: 8102)")
441
+ parser.add_argument("--host", type=str, default="localhost", help="Server host (default: localhost)")
442
+ parser.add_argument("--refresh", type=float, default=2.0, help="Refresh interval in seconds (default: 2)")
443
+ args = parser.parse_args()
444
+
445
+ base_url = f"http://{args.host}:{args.port}"
446
+ state = DashboardState()
447
+
448
+ console.print(f"\n[bold cyan]Connecting to {base_url}...[/bold cyan]\n")
449
+
450
+ # Initial fetch
451
+ fetch_data(base_url, state)
452
+
453
+ if state.error:
454
+ console.print(f"[red]Warning: {state.error}[/red]")
455
+ console.print("[muted]Dashboard will retry automatically...[/muted]\n")
456
+
457
+ try:
458
+ with Live(
459
+ build_layout(state, base_url),
460
+ console=console,
461
+ refresh_per_second=1,
462
+ screen=True,
463
+ ) as live:
464
+ while True:
465
+ time.sleep(args.refresh)
466
+ fetch_data(base_url, state)
467
+ live.update(build_layout(state, base_url))
468
+
469
+ except KeyboardInterrupt:
470
+ console.print("\n[muted]Dashboard stopped.[/muted]")
471
+
472
+
473
+ if __name__ == "__main__":
474
+ main()