claude-memory-agent 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/.env.example +107 -0
  2. package/README.md +200 -0
  3. package/agent_card.py +512 -0
  4. package/bin/cli.js +181 -0
  5. package/bin/postinstall.js +216 -0
  6. package/config.py +104 -0
  7. package/dashboard.html +2689 -0
  8. package/hooks/README.md +196 -0
  9. package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
  10. package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
  11. package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
  12. package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
  13. package/hooks/auto-detect-response.py +348 -0
  14. package/hooks/auto_capture.py +255 -0
  15. package/hooks/detect-correction.py +173 -0
  16. package/hooks/grounding-hook.py +348 -0
  17. package/hooks/log-tool-use.py +234 -0
  18. package/hooks/log-user-request.py +208 -0
  19. package/hooks/pre-tool-decision.py +218 -0
  20. package/hooks/problem-detector.py +343 -0
  21. package/hooks/session_end.py +192 -0
  22. package/hooks/session_start.py +227 -0
  23. package/install.py +887 -0
  24. package/main.py +2859 -0
  25. package/manager.py +997 -0
  26. package/package.json +55 -0
  27. package/requirements.txt +8 -0
  28. package/run_server.py +136 -0
  29. package/services/__init__.py +50 -0
  30. package/services/__pycache__/__init__.cpython-312.pyc +0 -0
  31. package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
  32. package/services/__pycache__/auth.cpython-312.pyc +0 -0
  33. package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
  34. package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
  35. package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
  36. package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
  37. package/services/__pycache__/confidence.cpython-312.pyc +0 -0
  38. package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
  39. package/services/__pycache__/database.cpython-312.pyc +0 -0
  40. package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
  41. package/services/__pycache__/insights.cpython-312.pyc +0 -0
  42. package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
  43. package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
  44. package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
  45. package/services/__pycache__/timeline.cpython-312.pyc +0 -0
  46. package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
  47. package/services/__pycache__/websocket.cpython-312.pyc +0 -0
  48. package/services/agent_registry.py +753 -0
  49. package/services/auth.py +331 -0
  50. package/services/auto_inject.py +250 -0
  51. package/services/claude_md_sync.py +275 -0
  52. package/services/cleanup.py +667 -0
  53. package/services/compaction_flush.py +447 -0
  54. package/services/confidence.py +301 -0
  55. package/services/daily_log.py +333 -0
  56. package/services/database.py +2485 -0
  57. package/services/embeddings.py +358 -0
  58. package/services/insights.py +632 -0
  59. package/services/llm_analyzer.py +595 -0
  60. package/services/memory_md_sync.py +409 -0
  61. package/services/retry_queue.py +453 -0
  62. package/services/timeline.py +579 -0
  63. package/services/vector_index.py +398 -0
  64. package/services/websocket.py +257 -0
  65. package/skills/__init__.py +6 -0
  66. package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
  67. package/skills/__pycache__/admin.cpython-312.pyc +0 -0
  68. package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
  69. package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
  70. package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
  71. package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
  72. package/skills/__pycache__/insights.cpython-312.pyc +0 -0
  73. package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
  74. package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
  75. package/skills/__pycache__/search.cpython-312.pyc +0 -0
  76. package/skills/__pycache__/state.cpython-312.pyc +0 -0
  77. package/skills/__pycache__/store.cpython-312.pyc +0 -0
  78. package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
  79. package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
  80. package/skills/__pycache__/verification.cpython-312.pyc +0 -0
  81. package/skills/admin.py +469 -0
  82. package/skills/checkpoint.py +198 -0
  83. package/skills/claude_md.py +363 -0
  84. package/skills/cleanup.py +241 -0
  85. package/skills/grounding.py +801 -0
  86. package/skills/insights.py +231 -0
  87. package/skills/natural_language.py +277 -0
  88. package/skills/retrieve.py +67 -0
  89. package/skills/search.py +213 -0
  90. package/skills/state.py +182 -0
  91. package/skills/store.py +179 -0
  92. package/skills/summarize.py +588 -0
  93. package/skills/timeline.py +387 -0
  94. package/skills/verification.py +391 -0
  95. package/start_daemon.py +155 -0
  96. package/test_automation.py +221 -0
  97. package/test_complete.py +338 -0
  98. package/test_full.py +322 -0
  99. package/update_system.py +817 -0
  100. package/verify_db.py +134 -0
package/main.py ADDED
@@ -0,0 +1,2859 @@
1
+ """
2
+ Claude Memory Agent - A2A Server with FastAPI.
3
+
4
+ Provides semantic memory storage and retrieval for Claude Code sessions.
5
+ Implements Google A2A protocol for agent-to-agent communication.
6
+ Enhanced with rich context support for cross-project memory management.
7
+ """
8
+ import os
9
+ import json
10
+ import uuid
11
+ import asyncio
12
+ import sqlite3
13
+ import logging
14
+ from contextlib import asynccontextmanager
15
+ from typing import Dict, Any, Optional
16
+ from datetime import datetime
17
+
18
+ # Configure logging
19
+ logger = logging.getLogger(__name__)
20
+ if not logger.handlers:
21
+ handler = logging.StreamHandler()
22
+ handler.setFormatter(logging.Formatter(
23
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
24
+ ))
25
+ logger.addHandler(handler)
26
+ logger.setLevel(logging.INFO)
27
+
28
+ from fastapi import FastAPI, HTTPException, Request, WebSocket, WebSocketDisconnect
29
+ from fastapi.responses import JSONResponse
30
+ from fastapi.middleware.cors import CORSMiddleware
31
+ from pydantic import BaseModel
32
+ from dotenv import load_dotenv
33
+
34
+ from agent_card import AGENT_CARD
35
+ from services.database import (
36
+ DatabaseService, normalize_path,
37
+ DatabaseError, ConnectionPoolError, QueryTimeoutError,
38
+ RetryExhaustedError, MigrationError
39
+ )
40
+ from services.embeddings import EmbeddingService
41
+ from services.auth import get_auth_service, AuthService
42
+
43
+ # Original memory skills
44
+ from skills.store import store_memory, store_project, store_pattern
45
+ from skills.retrieve import retrieve_memory
46
+ from skills.search import semantic_search, search_patterns, get_project_context
47
+ from skills.summarize import (
48
+ summarize_session, auto_summarize_session, get_session_handoff,
49
+ create_diary_entry, check_session_inactivity
50
+ )
51
+
52
+ # Timeline skills (Anti-Hallucination Layer)
53
+ from skills.timeline import timeline_log, timeline_log_batch, timeline_get, timeline_search, timeline_auto_detect, timeline_chain
54
+ from skills.state import state_get, state_update, state_init_session
55
+ from skills.checkpoint import checkpoint_create, checkpoint_load, checkpoint_list
56
+ from skills.grounding import (
57
+ context_refresh, check_contradictions, verify_entity, mark_anchor,
58
+ get_unresolved_conflicts, resolve_conflict, get_anchor_history, auto_resolve_conflicts
59
+ )
60
+
61
+ # CLAUDE.md management skills
62
+ from skills.claude_md import (
63
+ claude_md_read, claude_md_add_section, claude_md_update_section,
64
+ claude_md_add_instruction, claude_md_list_sections, claude_md_suggest_from_session
65
+ )
66
+
67
+ # Verification skills (Best-of-N, Quote Extraction)
68
+ from skills.verification import best_of_n_verify, extract_quotes, require_grounding
69
+
70
+ # Cross-session learning skills
71
+ from skills.insights import (
72
+ run_aggregation, get_insights, suggest_improvements,
73
+ record_insight_feedback, mark_insight_applied, get_project_insights
74
+ )
75
+
76
+ # Memory cleanup skills
77
+ from skills.cleanup import (
78
+ memory_cleanup, get_archived_memories, restore_memory,
79
+ get_cleanup_config, set_cleanup_config, get_cleanup_stats,
80
+ purge_expired_archives
81
+ )
82
+
83
+ # Admin skills (embedding model management, reindexing)
84
+ from skills.admin import (
85
+ get_embedding_status, switch_embedding_model, reindex_memories,
86
+ get_reindex_progress, cancel_reindex, get_model_info, get_system_stats
87
+ )
88
+
89
+ # Natural language memory interface
90
+ from skills.natural_language import process_natural_command
91
+
92
+ # WebSocket service for real-time updates
93
+ from services.websocket import get_websocket_manager, broadcast_event, EventTypes
94
+
95
+ # Auto-injection service for mid-task relevance
96
+ from services.auto_inject import get_auto_injector
97
+
98
+ # Confidence scoring service
99
+ from services.confidence import get_confidence_service
100
+
101
+ # CLAUDE.md sync service
102
+ from services.claude_md_sync import get_claude_md_sync
103
+
104
+ # Agent registry for dashboard
105
+ from services.agent_registry import (
106
+ AVAILABLE_AGENTS, AVAILABLE_MCPS, AVAILABLE_HOOKS,
107
+ AGENT_CATEGORIES, get_agents_by_category, get_agent_by_id
108
+ )
109
+
110
+ load_dotenv()
111
+
112
+ # Initialize services
113
+ db = DatabaseService()
114
+ embeddings = EmbeddingService()
115
+
116
+ # Retry queue (imported lazily to avoid circular imports)
117
+ retry_queue = None
118
+
119
+ # Task storage
120
+ tasks: Dict[str, Dict[str, Any]] = {}
121
+
122
+
123
+ async def process_queued_request(item: Dict[str, Any]) -> bool:
124
+ """Process a single queued request."""
125
+ import httpx
126
+
127
+ endpoint = item.get("endpoint", "")
128
+ method = item.get("method", "POST")
129
+ payload = item.get("payload", {})
130
+ headers = item.get("headers", {})
131
+
132
+ if isinstance(payload, str):
133
+ payload = json.loads(payload)
134
+ if isinstance(headers, str):
135
+ headers = json.loads(headers) if headers else {}
136
+
137
+ try:
138
+ async with httpx.AsyncClient(timeout=30.0) as client:
139
+ if method.upper() == "POST":
140
+ response = await client.post(endpoint, json=payload, headers=headers)
141
+ elif method.upper() == "GET":
142
+ response = await client.get(endpoint, headers=headers)
143
+ else:
144
+ return False
145
+
146
+ return response.status_code < 400
147
+ except Exception:
148
+ return False
149
+
150
+
151
+ @asynccontextmanager
152
+ async def lifespan(app: FastAPI):
153
+ """Application lifespan manager."""
154
+ global retry_queue
155
+
156
+ await db.connect()
157
+ await db.initialize_schema()
158
+
159
+ # Initialize retry queue
160
+ from services.retry_queue import get_queue
161
+ retry_queue = get_queue()
162
+
163
+ # Start background queue processor
164
+ queue_task = asyncio.create_task(
165
+ retry_queue.process_queue(
166
+ processor=process_queued_request,
167
+ batch_size=10,
168
+ interval_seconds=5.0
169
+ )
170
+ )
171
+
172
+ print(f"Memory Agent v2.0 started on port {os.getenv('PORT', 8102)}")
173
+ print(f"Retry queue initialized (depth: {retry_queue.get_queue_depth()})")
174
+
175
+ # Show auth status
176
+ auth_stats = auth_service.get_stats()
177
+ if auth_stats["enabled"]:
178
+ print(f"Authentication: ENABLED ({auth_stats['active_keys']} active keys)")
179
+ print(f" Key file: {auth_stats['key_file']}")
180
+ if auth_stats['active_keys'] == 1:
181
+ print(" Note: Default key generated. Check key file for the key hash.")
182
+ else:
183
+ print("Authentication: DISABLED (set AUTH_ENABLED=true to enable)")
184
+
185
+ yield
186
+
187
+ # Cleanup
188
+ retry_queue.stop_processing()
189
+ queue_task.cancel()
190
+ try:
191
+ await queue_task
192
+ except asyncio.CancelledError:
193
+ pass
194
+ retry_queue.close()
195
+ await db.disconnect()
196
+
197
+
198
+ app = FastAPI(
199
+ title="Claude Memory Agent",
200
+ description="Persistent semantic memory for Claude Code sessions with cross-project support",
201
+ version="2.0.0",
202
+ lifespan=lifespan
203
+ )
204
+
205
+ # Add CORS middleware for dashboard
206
+ app.add_middleware(
207
+ CORSMiddleware,
208
+ allow_origins=["*"],
209
+ allow_credentials=True,
210
+ allow_methods=["*"],
211
+ allow_headers=["*"],
212
+ )
213
+
214
+ # Initialize auth service
215
+ auth_service = get_auth_service()
216
+
217
+
218
+ # Authentication middleware
219
+ @app.middleware("http")
220
+ async def auth_middleware(request: Request, call_next):
221
+ """Validate API key for protected endpoints."""
222
+ path = request.url.path
223
+
224
+ # Skip auth for exempt endpoints
225
+ if auth_service.is_exempt(path):
226
+ return await call_next(request)
227
+
228
+ # Skip if auth is disabled
229
+ if not auth_service.enabled:
230
+ return await call_next(request)
231
+
232
+ # Get API key from header
233
+ api_key = request.headers.get("X-Memory-Key")
234
+
235
+ # Validate key
236
+ valid, error, key_info = auth_service.validate_key(api_key)
237
+
238
+ if not valid:
239
+ return JSONResponse(
240
+ status_code=401 if error == "Missing API key" else 403,
241
+ content={
242
+ "error": error,
243
+ "code": "AUTH_REQUIRED" if error == "Missing API key" else "AUTH_FAILED"
244
+ },
245
+ headers={"WWW-Authenticate": "X-Memory-Key"}
246
+ )
247
+
248
+ # Add key info to request state for downstream use
249
+ request.state.auth = key_info
250
+
251
+ # Add rate limit headers to response
252
+ response = await call_next(request)
253
+ if key_info:
254
+ response.headers["X-RateLimit-Remaining"] = str(key_info.get("rate_remaining", 0))
255
+
256
+ return response
257
+
258
+
259
+ # ============= Pydantic Models =============
260
+
261
+ class A2AMessage(BaseModel):
262
+ role: str
263
+ parts: list
264
+
265
+
266
+ class A2ARequest(BaseModel):
267
+ jsonrpc: str = "2.0"
268
+ id: Any
269
+ method: str
270
+ params: Optional[Dict[str, Any]] = None
271
+
272
+
273
+ # ============= A2A Endpoints =============
274
+
275
+ @app.get("/.well-known/agent.json")
276
+ async def get_agent_card():
277
+ return JSONResponse(content=AGENT_CARD)
278
+
279
+
280
+ @app.post("/a2a")
281
+ async def a2a_endpoint(request: A2ARequest):
282
+ try:
283
+ if request.method == "tasks/send":
284
+ return await handle_task_send(request)
285
+ elif request.method == "tasks/get":
286
+ return await handle_task_get(request)
287
+ elif request.method == "tasks/cancel":
288
+ return await handle_task_cancel(request)
289
+ else:
290
+ return JSONResponse(content={
291
+ "jsonrpc": "2.0",
292
+ "id": request.id,
293
+ "error": {"code": -32601, "message": f"Method not found: {request.method}"}
294
+ })
295
+ except Exception as e:
296
+ return JSONResponse(content={
297
+ "jsonrpc": "2.0",
298
+ "id": request.id,
299
+ "error": {"code": -32000, "message": str(e)}
300
+ })
301
+
302
+
303
+ async def handle_task_send(request: A2ARequest) -> JSONResponse:
304
+ params = request.params or {}
305
+ task_id = params.get("id") or str(uuid.uuid4())
306
+ message = params.get("message", {})
307
+ session_id = params.get("sessionId")
308
+ metadata = params.get("metadata", {})
309
+
310
+ parts = message.get("parts", [])
311
+ text_content = ""
312
+ skill_id = metadata.get("skill_id", "semantic_search")
313
+ skill_params = metadata.get("params", {})
314
+
315
+ for part in parts:
316
+ if isinstance(part, dict) and part.get("type") == "text":
317
+ text_content = part.get("text", "")
318
+ elif isinstance(part, str):
319
+ text_content = part
320
+
321
+ try:
322
+ result = await execute_skill(
323
+ skill_id=skill_id,
324
+ query=text_content,
325
+ params=skill_params,
326
+ session_id=session_id
327
+ )
328
+
329
+ tasks[task_id] = {
330
+ "id": task_id,
331
+ "status": "completed",
332
+ "result": result,
333
+ "created_at": datetime.now().isoformat()
334
+ }
335
+
336
+ return JSONResponse(content={
337
+ "jsonrpc": "2.0",
338
+ "id": request.id,
339
+ "result": {
340
+ "id": task_id,
341
+ "status": {"state": "completed"},
342
+ "artifacts": [{"parts": [{"type": "text", "text": json.dumps(result, indent=2)}]}]
343
+ }
344
+ })
345
+
346
+ except Exception as e:
347
+ tasks[task_id] = {
348
+ "id": task_id,
349
+ "status": "failed",
350
+ "error": str(e),
351
+ "created_at": datetime.now().isoformat()
352
+ }
353
+ return JSONResponse(content={
354
+ "jsonrpc": "2.0",
355
+ "id": request.id,
356
+ "error": {"code": -32000, "message": str(e)}
357
+ })
358
+
359
+
360
+ async def handle_task_get(request: A2ARequest) -> JSONResponse:
361
+ params = request.params or {}
362
+ task_id = params.get("id")
363
+
364
+ if not task_id or task_id not in tasks:
365
+ return JSONResponse(content={
366
+ "jsonrpc": "2.0",
367
+ "id": request.id,
368
+ "error": {"code": -32602, "message": f"Task not found: {task_id}"}
369
+ })
370
+
371
+ task = tasks[task_id]
372
+ return JSONResponse(content={
373
+ "jsonrpc": "2.0",
374
+ "id": request.id,
375
+ "result": {
376
+ "id": task_id,
377
+ "status": {"state": task["status"]},
378
+ "artifacts": [{"parts": [{"type": "text", "text": json.dumps(task.get("result", {}), indent=2)}]}] if task.get("result") else []
379
+ }
380
+ })
381
+
382
+
383
+ async def handle_task_cancel(request: A2ARequest) -> JSONResponse:
384
+ params = request.params or {}
385
+ task_id = params.get("id")
386
+ if task_id and task_id in tasks:
387
+ tasks[task_id]["status"] = "cancelled"
388
+ return JSONResponse(content={
389
+ "jsonrpc": "2.0",
390
+ "id": request.id,
391
+ "result": {"id": task_id, "status": {"state": "cancelled"}}
392
+ })
393
+
394
+
395
+ async def execute_skill(
396
+ skill_id: str,
397
+ query: str,
398
+ params: Dict[str, Any],
399
+ session_id: Optional[str] = None
400
+ ) -> Dict[str, Any]:
401
+ """Execute the specified skill with enhanced context support."""
402
+ # Debug to file
403
+ with open("c:/Users/moham/Desktop/Claude Memory/memory-agent/debug.log", "a") as f:
404
+ f.write(f"[SKILL DEBUG] execute_skill called with skill_id='{skill_id}'\n")
405
+ f.flush()
406
+
407
+ if skill_id == "store_memory":
408
+ result = await store_memory(
409
+ db=db,
410
+ embeddings=embeddings,
411
+ content=params.get("content", query),
412
+ memory_type=params.get("type", "chunk"),
413
+ metadata=params.get("metadata"),
414
+ session_id=session_id or params.get("session_id"),
415
+ # Project context
416
+ project_path=params.get("project_path"),
417
+ project_name=params.get("project_name"),
418
+ project_type=params.get("project_type"),
419
+ tech_stack=params.get("tech_stack"),
420
+ # Agent context
421
+ agent_type=params.get("agent_type"),
422
+ skill_used=params.get("skill_used"),
423
+ tools_used=params.get("tools_used"),
424
+ # Outcome
425
+ outcome=params.get("outcome"),
426
+ success=params.get("success"),
427
+ # Classification
428
+ tags=params.get("tags"),
429
+ importance=params.get("importance", 5)
430
+ )
431
+ # Broadcast real-time update
432
+ print(f"[DEBUG] About to broadcast memory_stored event for memory_id={result.get('memory_id')}")
433
+ try:
434
+ await broadcast_event(
435
+ EventTypes.MEMORY_STORED,
436
+ {"memory_id": result.get("memory_id"), "type": params.get("type", "chunk")},
437
+ params.get("project_path")
438
+ )
439
+ print(f"[DEBUG] Broadcast completed successfully")
440
+ except Exception as e:
441
+ print(f"[DEBUG] Broadcast error: {e}")
442
+ return result
443
+
444
+ elif skill_id == "store_project":
445
+ return await store_project(
446
+ db=db,
447
+ path=params.get("path"),
448
+ name=params.get("name"),
449
+ project_type=params.get("project_type"),
450
+ tech_stack=params.get("tech_stack"),
451
+ conventions=params.get("conventions"),
452
+ preferences=params.get("preferences")
453
+ )
454
+
455
+ elif skill_id == "store_pattern":
456
+ return await store_pattern(
457
+ db=db,
458
+ embeddings=embeddings,
459
+ name=params.get("name"),
460
+ solution=params.get("solution"),
461
+ problem_type=params.get("problem_type"),
462
+ tech_context=params.get("tech_context"),
463
+ metadata=params.get("metadata")
464
+ )
465
+
466
+ elif skill_id == "retrieve_memory":
467
+ return await retrieve_memory(
468
+ db=db,
469
+ memory_id=params.get("memory_id"),
470
+ memory_type=params.get("type"),
471
+ session_id=session_id or params.get("session_id"),
472
+ project_path=params.get("project_path"),
473
+ limit=params.get("limit", 10)
474
+ )
475
+
476
+ elif skill_id == "semantic_search":
477
+ return await semantic_search(
478
+ db=db,
479
+ embeddings=embeddings,
480
+ query=params.get("query", query),
481
+ limit=params.get("limit", 10),
482
+ memory_type=params.get("type"),
483
+ session_id=session_id or params.get("session_id"),
484
+ project_path=params.get("project_path"),
485
+ agent_type=params.get("agent_type"),
486
+ success_only=params.get("success_only", False),
487
+ threshold=params.get("threshold", 0.5)
488
+ )
489
+
490
+ elif skill_id == "search_patterns":
491
+ return await search_patterns(
492
+ db=db,
493
+ embeddings=embeddings,
494
+ query=params.get("query", query),
495
+ limit=params.get("limit", 5),
496
+ problem_type=params.get("problem_type"),
497
+ threshold=params.get("threshold", 0.5)
498
+ )
499
+
500
+ elif skill_id == "get_project_context":
501
+ return await get_project_context(
502
+ db=db,
503
+ embeddings=embeddings,
504
+ project_path=params.get("project_path"),
505
+ query=params.get("query"),
506
+ limit=params.get("limit", 10)
507
+ )
508
+
509
+ elif skill_id == "summarize_session":
510
+ return await summarize_session(
511
+ db=db,
512
+ embeddings=embeddings,
513
+ session_id=session_id or params.get("session_id", str(uuid.uuid4())),
514
+ summary=params.get("summary", query),
515
+ key_decisions=params.get("key_decisions"),
516
+ code_patterns=params.get("code_patterns"),
517
+ metadata=params.get("metadata"),
518
+ project_path=params.get("project_path")
519
+ )
520
+
521
+ elif skill_id == "auto_summarize_session":
522
+ return await auto_summarize_session(
523
+ db=db,
524
+ embeddings=embeddings,
525
+ session_id=session_id or params.get("session_id"),
526
+ project_path=params.get("project_path")
527
+ )
528
+
529
+ elif skill_id == "get_session_handoff":
530
+ return await get_session_handoff(
531
+ db=db,
532
+ embeddings=embeddings,
533
+ project_path=params.get("project_path"),
534
+ include_last_n_sessions=params.get("include_last_n_sessions", 3)
535
+ )
536
+
537
+ elif skill_id == "create_diary_entry":
538
+ return await create_diary_entry(
539
+ db=db,
540
+ embeddings=embeddings,
541
+ session_id=session_id or params.get("session_id"),
542
+ project_path=params.get("project_path"),
543
+ user_notes=params.get("user_notes")
544
+ )
545
+
546
+ elif skill_id == "check_session_inactivity":
547
+ return await check_session_inactivity(
548
+ db=db,
549
+ session_id=session_id or params.get("session_id"),
550
+ inactivity_threshold_hours=params.get("inactivity_threshold_hours", 4.0)
551
+ )
552
+
553
+ elif skill_id == "get_stats":
554
+ return await db.get_stats()
555
+
556
+ # ============================================================
557
+ # TIMELINE SKILLS
558
+ # ============================================================
559
+
560
+ elif skill_id == "timeline_log":
561
+ result = await timeline_log(
562
+ db=db,
563
+ embeddings=embeddings,
564
+ session_id=params.get("session_id") or session_id or str(uuid.uuid4()),
565
+ event_type=params.get("event_type", "observation"),
566
+ summary=params.get("summary", query),
567
+ details=params.get("details"),
568
+ project_path=params.get("project_path"),
569
+ parent_event_id=params.get("parent_event_id"),
570
+ root_event_id=params.get("root_event_id"),
571
+ entities=params.get("entities"),
572
+ status=params.get("status", "completed"),
573
+ outcome=params.get("outcome"),
574
+ confidence=params.get("confidence"),
575
+ is_anchor=params.get("is_anchor", False)
576
+ )
577
+ # Broadcast real-time update
578
+ await broadcast_event(
579
+ EventTypes.TIMELINE_LOGGED,
580
+ {"event_id": result.get("event_id"), "event_type": params.get("event_type", "observation")},
581
+ params.get("project_path")
582
+ )
583
+ return result
584
+
585
+ elif skill_id == "timeline_log_batch":
586
+ # Batch logging - more efficient than multiple timeline_log calls
587
+ result = await timeline_log_batch(
588
+ db=db,
589
+ embeddings=embeddings,
590
+ session_id=params.get("session_id") or session_id or str(uuid.uuid4()),
591
+ events=params.get("events", []),
592
+ project_path=params.get("project_path"),
593
+ parent_event_id=params.get("parent_event_id"),
594
+ root_event_id=params.get("root_event_id")
595
+ )
596
+ # Broadcast single update for the batch
597
+ if result.get("events_logged", 0) > 0:
598
+ await broadcast_event(
599
+ EventTypes.TIMELINE_LOGGED,
600
+ {
601
+ "event_ids": result.get("event_ids", []),
602
+ "batch_size": result.get("events_logged", 0),
603
+ "event_types": result.get("event_types", {})
604
+ },
605
+ params.get("project_path")
606
+ )
607
+ return result
608
+
609
+ elif skill_id == "timeline_get":
610
+ return await timeline_get(
611
+ db=db,
612
+ session_id=params.get("session_id") or session_id,
613
+ limit=params.get("limit", 20),
614
+ event_type=params.get("event_type"),
615
+ since_event_id=params.get("since_event_id"),
616
+ anchors_only=params.get("anchors_only", False),
617
+ include_state=params.get("include_state", True),
618
+ include_checkpoint=params.get("include_checkpoint", True)
619
+ )
620
+
621
+ elif skill_id == "timeline_search":
622
+ return await timeline_search(
623
+ db=db,
624
+ embeddings=embeddings,
625
+ query=params.get("query", query),
626
+ session_id=params.get("session_id") or session_id,
627
+ limit=params.get("limit", 10),
628
+ threshold=params.get("threshold", 0.5)
629
+ )
630
+
631
+ elif skill_id == "timeline_auto_detect":
632
+ return await timeline_auto_detect(
633
+ db=db,
634
+ embeddings=embeddings,
635
+ session_id=params.get("session_id") or session_id or str(uuid.uuid4()),
636
+ response_text=params.get("response_text", query),
637
+ project_path=params.get("project_path"),
638
+ parent_event_id=params.get("parent_event_id")
639
+ )
640
+
641
+ elif skill_id == "timeline_chain":
642
+ return await timeline_chain(
643
+ db=db,
644
+ session_id=params.get("session_id") or session_id,
645
+ root_event_id=params.get("root_event_id"),
646
+ include_details=params.get("include_details", False)
647
+ )
648
+
649
+ # ============================================================
650
+ # STATE SKILLS
651
+ # ============================================================
652
+
653
+ elif skill_id == "state_get":
654
+ return await state_get(
655
+ db=db,
656
+ session_id=params.get("session_id") or session_id,
657
+ project_path=params.get("project_path")
658
+ )
659
+
660
+ elif skill_id == "state_update":
661
+ return await state_update(
662
+ db=db,
663
+ session_id=params.get("session_id") or session_id,
664
+ current_goal=params.get("current_goal"),
665
+ pending_questions=params.get("pending_questions"),
666
+ add_question=params.get("add_question"),
667
+ remove_question=params.get("remove_question"),
668
+ register_entity=params.get("register_entity"),
669
+ entity_registry=params.get("entity_registry"),
670
+ add_decision=params.get("add_decision"),
671
+ decisions_summary=params.get("decisions_summary")
672
+ )
673
+
674
+ elif skill_id == "state_init_session":
675
+ return await state_init_session(
676
+ db=db,
677
+ embeddings=embeddings,
678
+ project_path=params.get("project_path")
679
+ )
680
+
681
+ # ============================================================
682
+ # CHECKPOINT SKILLS
683
+ # ============================================================
684
+
685
+ elif skill_id == "checkpoint_create":
686
+ return await checkpoint_create(
687
+ db=db,
688
+ embeddings=embeddings,
689
+ session_id=params.get("session_id") or session_id,
690
+ summary=params.get("summary"),
691
+ key_facts=params.get("key_facts"),
692
+ include_state=params.get("include_state", True)
693
+ )
694
+
695
+ elif skill_id == "checkpoint_load":
696
+ return await checkpoint_load(
697
+ db=db,
698
+ session_id=params.get("session_id") or session_id,
699
+ checkpoint_id=params.get("checkpoint_id"),
700
+ project_path=params.get("project_path")
701
+ )
702
+
703
+ elif skill_id == "checkpoint_list":
704
+ return await checkpoint_list(
705
+ db=db,
706
+ session_id=params.get("session_id") or session_id,
707
+ limit=params.get("limit", 10)
708
+ )
709
+
710
+ # ============================================================
711
+ # GROUNDING SKILLS (Anti-Hallucination)
712
+ # ============================================================
713
+
714
+ elif skill_id == "context_refresh":
715
+ return await context_refresh(
716
+ db=db,
717
+ embeddings=embeddings,
718
+ session_id=params.get("session_id") or session_id,
719
+ query=params.get("query", query) if query else None,
720
+ include_recent_events=params.get("include_recent_events", 10),
721
+ include_state=params.get("include_state", True),
722
+ include_checkpoint=params.get("include_checkpoint", True),
723
+ include_relevant_memories=params.get("include_relevant_memories", True),
724
+ check_contradictions=params.get("check_contradictions", True)
725
+ )
726
+
727
+ elif skill_id == "check_contradictions":
728
+ return await check_contradictions(
729
+ db=db,
730
+ embeddings=embeddings,
731
+ statement=params.get("statement", query),
732
+ session_id=params.get("session_id") or session_id,
733
+ scope=params.get("scope", "session")
734
+ )
735
+
736
+ elif skill_id == "verify_entity":
737
+ return await verify_entity(
738
+ db=db,
739
+ session_id=params.get("session_id") or session_id,
740
+ entity_key=params.get("entity_key"),
741
+ entity_type=params.get("entity_type")
742
+ )
743
+
744
+ elif skill_id == "mark_anchor":
745
+ result = await mark_anchor(
746
+ db=db,
747
+ embeddings=embeddings,
748
+ session_id=params.get("session_id") or session_id,
749
+ fact=params.get("fact", query),
750
+ details=params.get("details"),
751
+ project_path=params.get("project_path"),
752
+ force=params.get("force", False)
753
+ )
754
+ # Broadcast real-time update
755
+ event_type = EventTypes.ANCHOR_CONFLICT if result.get("conflict_detected") else EventTypes.ANCHOR_MARKED
756
+ await broadcast_event(
757
+ event_type,
758
+ {"anchor_id": result.get("anchor_id"), "fact": params.get("fact", query)[:100]},
759
+ params.get("project_path")
760
+ )
761
+ return result
762
+
763
+ elif skill_id == "get_unresolved_conflicts":
764
+ return await get_unresolved_conflicts(
765
+ db=db,
766
+ session_id=params.get("session_id") or session_id,
767
+ project_path=params.get("project_path"),
768
+ limit=params.get("limit", 20)
769
+ )
770
+
771
+ elif skill_id == "resolve_conflict":
772
+ return await resolve_conflict(
773
+ db=db,
774
+ embeddings=embeddings,
775
+ conflict_id=params.get("conflict_id"),
776
+ resolution=params.get("resolution"),
777
+ keep_anchor_id=params.get("keep_anchor_id"),
778
+ resolved_by=params.get("resolved_by", "user")
779
+ )
780
+
781
+ elif skill_id == "get_anchor_history":
782
+ return await get_anchor_history(
783
+ db=db,
784
+ anchor_id=params.get("anchor_id"),
785
+ session_id=params.get("session_id") or session_id,
786
+ limit=params.get("limit", 50)
787
+ )
788
+
789
+ elif skill_id == "auto_resolve_conflicts":
790
+ return await auto_resolve_conflicts(
791
+ db=db,
792
+ embeddings=embeddings,
793
+ session_id=params.get("session_id") or session_id
794
+ )
795
+
796
+ # ============================================================
797
+ # CLAUDE.MD MANAGEMENT SKILLS
798
+ # ============================================================
799
+
800
+ elif skill_id == "claude_md_read":
801
+ return await claude_md_read(
802
+ section=params.get("section")
803
+ )
804
+
805
+ elif skill_id == "claude_md_add_section":
806
+ return await claude_md_add_section(
807
+ section_name=params.get("section_name"),
808
+ content=params.get("content", query),
809
+ position=params.get("position", "end")
810
+ )
811
+
812
+ elif skill_id == "claude_md_update_section":
813
+ return await claude_md_update_section(
814
+ section_name=params.get("section_name"),
815
+ content=params.get("content", query),
816
+ mode=params.get("mode", "replace")
817
+ )
818
+
819
+ elif skill_id == "claude_md_add_instruction":
820
+ return await claude_md_add_instruction(
821
+ section_name=params.get("section_name"),
822
+ instruction=params.get("instruction", query),
823
+ bullet_style=params.get("bullet_style", "-")
824
+ )
825
+
826
+ elif skill_id == "claude_md_list_sections":
827
+ return await claude_md_list_sections()
828
+
829
+ elif skill_id == "claude_md_suggest":
830
+ return await claude_md_suggest_from_session(
831
+ db=db,
832
+ session_id=params.get("session_id") or session_id,
833
+ min_importance=params.get("min_importance", 7)
834
+ )
835
+
836
+ # ============================================================
837
+ # VERIFICATION SKILLS (Best-of-N, Quote Extraction)
838
+ # ============================================================
839
+
840
+ elif skill_id == "best_of_n_verify":
841
+ return await best_of_n_verify(
842
+ query=params.get("query", query),
843
+ n=params.get("n", 3),
844
+ context=params.get("context"),
845
+ threshold=params.get("threshold", 0.7)
846
+ )
847
+
848
+ elif skill_id == "extract_quotes":
849
+ return await extract_quotes(
850
+ document=params.get("document", ""),
851
+ query=params.get("query", query),
852
+ max_quotes=params.get("max_quotes", 5),
853
+ min_length=params.get("min_length", 20)
854
+ )
855
+
856
+ elif skill_id == "require_grounding":
857
+ return await require_grounding(
858
+ db=db,
859
+ session_id=params.get("session_id") or session_id,
860
+ statement=params.get("statement", query),
861
+ source_type=params.get("source_type", "any")
862
+ )
863
+
864
+ # ============================================================
865
+ # CROSS-SESSION LEARNING SKILLS
866
+ # ============================================================
867
+
868
+ elif skill_id == "run_aggregation":
869
+ return await run_aggregation(
870
+ db=db,
871
+ embeddings=embeddings,
872
+ days_back=params.get("days_back", 30)
873
+ )
874
+
875
+ elif skill_id == "get_insights":
876
+ return await get_insights(
877
+ db=db,
878
+ embeddings=embeddings,
879
+ insight_type=params.get("insight_type"),
880
+ project_path=params.get("project_path"),
881
+ min_confidence=params.get("min_confidence", 0.5),
882
+ limit=params.get("limit", 10)
883
+ )
884
+
885
+ elif skill_id == "suggest_improvements":
886
+ return await suggest_improvements(
887
+ db=db,
888
+ embeddings=embeddings,
889
+ min_confidence=params.get("min_confidence", 0.7)
890
+ )
891
+
892
+ elif skill_id == "record_insight_feedback":
893
+ return await record_insight_feedback(
894
+ db=db,
895
+ embeddings=embeddings,
896
+ insight_id=params.get("insight_id"),
897
+ helpful=params.get("helpful", True),
898
+ session_id=session_id or params.get("session_id"),
899
+ comment=params.get("comment")
900
+ )
901
+
902
+ elif skill_id == "mark_insight_applied":
903
+ return await mark_insight_applied(
904
+ db=db,
905
+ embeddings=embeddings,
906
+ insight_id=params.get("insight_id")
907
+ )
908
+
909
+ elif skill_id == "get_project_insights":
910
+ return await get_project_insights(
911
+ db=db,
912
+ embeddings=embeddings,
913
+ project_path=params.get("project_path"),
914
+ include_global=params.get("include_global", True),
915
+ limit=params.get("limit", 10)
916
+ )
917
+
918
+ # ============================================================
919
+ # MEMORY CLEANUP SKILLS
920
+ # ============================================================
921
+
922
+ elif skill_id == "memory_cleanup":
923
+ result = await memory_cleanup(
924
+ db=db,
925
+ embeddings=embeddings,
926
+ project_path=params.get("project_path"),
927
+ dry_run=params.get("dry_run", True)
928
+ )
929
+ # Broadcast real-time update (only for actual cleanup, not dry run)
930
+ if not params.get("dry_run", True):
931
+ await broadcast_event(
932
+ EventTypes.CLEANUP_COMPLETED,
933
+ {"archived": result.get("total_archived", 0), "deleted": result.get("total_deleted", 0)},
934
+ params.get("project_path")
935
+ )
936
+ return result
937
+
938
+ elif skill_id == "get_archived_memories":
939
+ return await get_archived_memories(
940
+ db=db,
941
+ embeddings=embeddings,
942
+ project_path=params.get("project_path"),
943
+ reason=params.get("reason"),
944
+ limit=params.get("limit", 50)
945
+ )
946
+
947
+ elif skill_id == "restore_memory":
948
+ return await restore_memory(
949
+ db=db,
950
+ embeddings=embeddings,
951
+ archive_id=params.get("archive_id")
952
+ )
953
+
954
+ elif skill_id == "get_cleanup_config":
955
+ return await get_cleanup_config(
956
+ db=db,
957
+ embeddings=embeddings,
958
+ project_path=params.get("project_path")
959
+ )
960
+
961
+ elif skill_id == "set_cleanup_config":
962
+ return await set_cleanup_config(
963
+ db=db,
964
+ embeddings=embeddings,
965
+ project_path=params.get("project_path"),
966
+ retention_days=params.get("retention_days"),
967
+ min_relevance_score=params.get("min_relevance_score"),
968
+ keep_high_importance=params.get("keep_high_importance"),
969
+ importance_threshold=params.get("importance_threshold"),
970
+ dedup_enabled=params.get("dedup_enabled"),
971
+ dedup_threshold=params.get("dedup_threshold"),
972
+ archive_before_delete=params.get("archive_before_delete"),
973
+ auto_cleanup_enabled=params.get("auto_cleanup_enabled")
974
+ )
975
+
976
+ elif skill_id == "get_cleanup_stats":
977
+ return await get_cleanup_stats(db=db, embeddings=embeddings)
978
+
979
+ elif skill_id == "purge_expired_archives":
980
+ return await purge_expired_archives(db=db, embeddings=embeddings)
981
+
982
+ # ============================================================
983
+ # ADMIN SKILLS (Embedding Model Management)
984
+ # ============================================================
985
+
986
+ elif skill_id == "get_embedding_status":
987
+ return await get_embedding_status(db=db, embeddings=embeddings)
988
+
989
+ elif skill_id == "switch_embedding_model":
990
+ return await switch_embedding_model(
991
+ db=db,
992
+ embeddings=embeddings,
993
+ model=params.get("model", "nomic-embed-text"),
994
+ reindex_existing=params.get("reindex_existing", False)
995
+ )
996
+
997
+ elif skill_id == "reindex_memories":
998
+ return await reindex_memories(
999
+ db=db,
1000
+ embeddings=embeddings,
1001
+ model=params.get("model"),
1002
+ project_path=params.get("project_path"),
1003
+ batch_size=params.get("batch_size", 10),
1004
+ dry_run=params.get("dry_run", False)
1005
+ )
1006
+
1007
+ elif skill_id == "get_reindex_progress":
1008
+ return await get_reindex_progress(db=db, embeddings=embeddings)
1009
+
1010
+ elif skill_id == "cancel_reindex":
1011
+ return await cancel_reindex(db=db, embeddings=embeddings)
1012
+
1013
+ elif skill_id == "get_model_info":
1014
+ return await get_model_info(
1015
+ db=db,
1016
+ embeddings=embeddings,
1017
+ model=params.get("model")
1018
+ )
1019
+
1020
+ elif skill_id == "get_system_stats":
1021
+ return await get_system_stats(db=db, embeddings=embeddings)
1022
+
1023
+ # ============================================================
1024
+ # MOLTBOT-INSPIRED SKILLS (Human-Readable Transparency)
1025
+ # ============================================================
1026
+
1027
+ elif skill_id == "daily_log_append":
1028
+ from services.daily_log import append_entry
1029
+ return await append_entry(
1030
+ project_path=params.get("project_path"),
1031
+ content=params.get("content", query),
1032
+ entry_type=params.get("entry_type", "note"),
1033
+ session_id=session_id or params.get("session_id")
1034
+ )
1035
+
1036
+ elif skill_id == "daily_log_append_session":
1037
+ from services.daily_log import append_session_summary
1038
+ return await append_session_summary(
1039
+ project_path=params.get("project_path"),
1040
+ session_id=session_id or params.get("session_id"),
1041
+ decisions=params.get("decisions"),
1042
+ accomplishments=params.get("accomplishments"),
1043
+ notes=params.get("notes"),
1044
+ errors_solved=params.get("errors_solved")
1045
+ )
1046
+
1047
+ elif skill_id == "daily_log_read":
1048
+ from services.daily_log import load_recent_logs
1049
+ return await load_recent_logs(
1050
+ project_path=params.get("project_path"),
1051
+ days=params.get("days", 2),
1052
+ max_chars=params.get("max_chars", 8000)
1053
+ )
1054
+
1055
+ elif skill_id == "daily_log_highlights":
1056
+ from services.daily_log import get_today_highlights
1057
+ return await get_today_highlights(
1058
+ project_path=params.get("project_path"),
1059
+ max_entries=params.get("max_entries", 10)
1060
+ )
1061
+
1062
+ elif skill_id == "daily_log_list":
1063
+ from services.daily_log import list_logs
1064
+ return await list_logs(
1065
+ project_path=params.get("project_path"),
1066
+ limit=params.get("limit", 30)
1067
+ )
1068
+
1069
+ elif skill_id == "sync_memory_md":
1070
+ from services.memory_md_sync import sync_to_memory_md
1071
+ return await sync_to_memory_md(
1072
+ db=db,
1073
+ project_path=params.get("project_path"),
1074
+ min_importance=params.get("min_importance", 7),
1075
+ min_pattern_success=params.get("min_pattern_success", 3)
1076
+ )
1077
+
1078
+ elif skill_id == "read_memory_md":
1079
+ from services.memory_md_sync import read_memory_md
1080
+ return await read_memory_md(
1081
+ project_path=params.get("project_path")
1082
+ )
1083
+
1084
+ elif skill_id == "get_memory_md_summary":
1085
+ from services.memory_md_sync import get_memory_md_summary
1086
+ return await get_memory_md_summary(
1087
+ project_path=params.get("project_path")
1088
+ )
1089
+
1090
+ elif skill_id == "add_memory_md_fact":
1091
+ from services.memory_md_sync import add_fact
1092
+ return await add_fact(
1093
+ project_path=params.get("project_path"),
1094
+ fact=params.get("fact", query),
1095
+ section=params.get("section", "anchors")
1096
+ )
1097
+
1098
+ elif skill_id == "check_flush_needed":
1099
+ from services.compaction_flush import check_flush_needed
1100
+ return await check_flush_needed(
1101
+ db=db,
1102
+ session_id=session_id or params.get("session_id"),
1103
+ event_threshold=params.get("event_threshold", 50),
1104
+ time_threshold_minutes=params.get("time_threshold_minutes", 30)
1105
+ )
1106
+
1107
+ elif skill_id == "pre_compaction_flush":
1108
+ from services.compaction_flush import execute_flush
1109
+ return await execute_flush(
1110
+ db=db,
1111
+ project_path=params.get("project_path"),
1112
+ session_id=session_id or params.get("session_id")
1113
+ )
1114
+
1115
+ elif skill_id == "list_flushes":
1116
+ from services.compaction_flush import list_flushes
1117
+ return await list_flushes(
1118
+ project_path=params.get("project_path"),
1119
+ limit=params.get("limit", 20)
1120
+ )
1121
+
1122
+ elif skill_id == "read_flush":
1123
+ from services.compaction_flush import read_flush
1124
+ return await read_flush(
1125
+ project_path=params.get("project_path"),
1126
+ filename=params.get("filename")
1127
+ )
1128
+
1129
+ else:
1130
+ raise ValueError(f"Unknown skill: {skill_id}")
1131
+
1132
+
1133
+ # ============= REST API Endpoints =============
1134
+
1135
+ @app.get("/api/stats")
1136
+ async def api_get_stats():
1137
+ try:
1138
+ stats = await db.get_stats()
1139
+ except DatabaseError as e:
1140
+ return {
1141
+ "success": False,
1142
+ "error_code": e.error_code,
1143
+ "error": str(e)
1144
+ }
1145
+ except Exception as e:
1146
+ return {
1147
+ "success": False,
1148
+ "error_code": "STATS_ERROR",
1149
+ "error": f"Failed to get stats: {str(e)}"
1150
+ }
1151
+
1152
+ # Add timeline stats
1153
+ try:
1154
+ timeline_stats = await db.execute_query(
1155
+ "SELECT COUNT(*) as count FROM timeline_events"
1156
+ )
1157
+ stats["total_timeline_events"] = timeline_stats[0]["count"] if timeline_stats else 0
1158
+ except (DatabaseError, sqlite3.Error) as e:
1159
+ logger.warning(f"Failed to get timeline stats: {e}")
1160
+ stats["total_timeline_events"] = 0
1161
+ stats["timeline_stats_error"] = str(e)
1162
+ except Exception as e:
1163
+ logger.warning(f"Unexpected error getting timeline stats: {e}")
1164
+ stats["total_timeline_events"] = 0
1165
+ stats["timeline_stats_error"] = f"Unexpected error: {str(e)}"
1166
+
1167
+ stats["success"] = True
1168
+ return stats
1169
+
1170
+
1171
+ @app.get("/api/memories")
1172
+ async def api_get_memories(
1173
+ project_path: Optional[str] = None,
1174
+ memory_type: Optional[str] = None,
1175
+ limit: int = 50,
1176
+ offset: int = 0
1177
+ ):
1178
+ """Get memories with optional filtering by project and type."""
1179
+ try:
1180
+ # Normalize project_path to match stored paths (handles backslash/forward slash mismatch)
1181
+ if project_path:
1182
+ project_path = normalize_path(project_path)
1183
+
1184
+ query = "SELECT * FROM memories WHERE 1=1"
1185
+ params = []
1186
+
1187
+ if project_path:
1188
+ # Use REPLACE to normalize stored paths for comparison
1189
+ query += " AND REPLACE(project_path, '\\', '/') = ?"
1190
+ params.append(project_path)
1191
+
1192
+ if memory_type and memory_type != "all":
1193
+ query += " AND type = ?"
1194
+ params.append(memory_type)
1195
+
1196
+ query += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
1197
+ params.extend([limit, offset])
1198
+
1199
+ memories = await db.execute_query(query, params)
1200
+
1201
+ # Get total count
1202
+ count_query = "SELECT COUNT(*) as count FROM memories WHERE 1=1"
1203
+ count_params = []
1204
+ if project_path:
1205
+ # Use REPLACE to normalize stored paths for comparison
1206
+ count_query += " AND REPLACE(project_path, '\\', '/') = ?"
1207
+ count_params.append(project_path)
1208
+ if memory_type and memory_type != "all":
1209
+ count_query += " AND type = ?"
1210
+ count_params.append(memory_type)
1211
+
1212
+ count_result = await db.execute_query(count_query, count_params)
1213
+ total = count_result[0]["count"] if count_result else 0
1214
+
1215
+ return {
1216
+ "success": True,
1217
+ "memories": memories,
1218
+ "total": total,
1219
+ "limit": limit,
1220
+ "offset": offset
1221
+ }
1222
+ except Exception as e:
1223
+ logger.error(f"Failed to get memories: {e}")
1224
+ return {"success": False, "error": str(e), "memories": []}
1225
+
1226
+
1227
+ @app.get("/api/patterns")
1228
+ async def api_get_patterns(
1229
+ project_path: Optional[str] = None,
1230
+ problem_type: Optional[str] = None,
1231
+ limit: int = 50
1232
+ ):
1233
+ """Get stored patterns with optional filtering."""
1234
+ try:
1235
+ query = "SELECT * FROM patterns WHERE 1=1"
1236
+ params = []
1237
+
1238
+ if problem_type and problem_type != "all":
1239
+ query += " AND problem_type = ?"
1240
+ params.append(problem_type)
1241
+
1242
+ query += " ORDER BY success_count DESC, created_at DESC LIMIT ?"
1243
+ params.append(limit)
1244
+
1245
+ patterns = await db.execute_query(query, params)
1246
+
1247
+ return {
1248
+ "success": True,
1249
+ "patterns": patterns or []
1250
+ }
1251
+ except Exception as e:
1252
+ logger.error(f"Failed to get patterns: {e}")
1253
+ return {"success": False, "error": str(e), "patterns": []}
1254
+
1255
+
1256
+ @app.get("/api/search")
1257
+ async def api_search_memories(
1258
+ query: str,
1259
+ project_path: Optional[str] = None,
1260
+ limit: int = 20
1261
+ ):
1262
+ """Semantic search across memories."""
1263
+ try:
1264
+ results = await semantic_search(
1265
+ db=db,
1266
+ embeddings=embeddings,
1267
+ query=query,
1268
+ project_path=project_path,
1269
+ limit=limit
1270
+ )
1271
+ return {
1272
+ "success": True,
1273
+ "results": results.get("results", []),
1274
+ "query": query
1275
+ }
1276
+ except Exception as e:
1277
+ logger.error(f"Search failed: {e}")
1278
+ return {"success": False, "error": str(e), "results": []}
1279
+
1280
+
1281
+ @app.get("/api/timeline")
1282
+ async def api_get_timeline(
1283
+ project_path: Optional[str] = None,
1284
+ session_id: Optional[str] = None,
1285
+ event_type: Optional[str] = None,
1286
+ limit: int = 100
1287
+ ):
1288
+ """Get timeline events with optional filtering."""
1289
+ try:
1290
+ query = "SELECT * FROM timeline_events WHERE 1=1"
1291
+ params = []
1292
+
1293
+ if project_path:
1294
+ query += " AND project_path = ?"
1295
+ params.append(project_path)
1296
+
1297
+ if session_id:
1298
+ query += " AND session_id = ?"
1299
+ params.append(session_id)
1300
+
1301
+ if event_type and event_type != "all":
1302
+ query += " AND event_type = ?"
1303
+ params.append(event_type)
1304
+
1305
+ query += " ORDER BY created_at DESC LIMIT ?"
1306
+ params.append(limit)
1307
+
1308
+ events = await db.execute_query(query, params)
1309
+
1310
+ return {
1311
+ "success": True,
1312
+ "events": events or [],
1313
+ "count": len(events) if events else 0
1314
+ }
1315
+ except Exception as e:
1316
+ logger.error(f"Failed to get timeline: {e}")
1317
+ return {"success": False, "error": str(e), "events": []}
1318
+
1319
+
1320
+ @app.get("/dashboard")
1321
+ async def serve_dashboard():
1322
+ """Serve the monitoring dashboard."""
1323
+ from fastapi.responses import FileResponse
1324
+ import os
1325
+ dashboard_path = os.path.join(os.path.dirname(__file__), "dashboard.html")
1326
+ return FileResponse(dashboard_path, media_type="text/html")
1327
+
1328
+
1329
+ @app.get("/api/projects")
1330
+ async def get_all_projects():
1331
+ """Get all projects that have sessions."""
1332
+ try:
1333
+ # Get unique projects from session_state
1334
+ projects = await db.execute_query("""
1335
+ SELECT DISTINCT
1336
+ project_path,
1337
+ MAX(updated_at) as last_activity,
1338
+ COUNT(*) as session_count
1339
+ FROM session_state
1340
+ WHERE project_path IS NOT NULL
1341
+ GROUP BY project_path
1342
+ ORDER BY last_activity DESC
1343
+ """)
1344
+
1345
+ # Also get from memories table
1346
+ memory_projects = await db.execute_query("""
1347
+ SELECT DISTINCT
1348
+ project_path,
1349
+ MAX(created_at) as last_activity,
1350
+ COUNT(*) as memory_count
1351
+ FROM memories
1352
+ WHERE project_path IS NOT NULL
1353
+ GROUP BY project_path
1354
+ ORDER BY last_activity DESC
1355
+ """)
1356
+
1357
+ # Merge and deduplicate - normalize paths to prevent duplicates
1358
+ all_projects = {}
1359
+ for p in (projects or []):
1360
+ path = p.get('project_path')
1361
+ if path:
1362
+ # Normalize path to ensure consistent keys
1363
+ normalized = normalize_path(path)
1364
+ if normalized in all_projects:
1365
+ # Merge session counts
1366
+ all_projects[normalized]['session_count'] += p.get('session_count', 0)
1367
+ else:
1368
+ all_projects[normalized] = {
1369
+ 'project_path': normalized,
1370
+ 'last_activity': p.get('last_activity'),
1371
+ 'session_count': p.get('session_count', 0),
1372
+ 'memory_count': 0
1373
+ }
1374
+
1375
+ for p in (memory_projects or []):
1376
+ path = p.get('project_path')
1377
+ if path:
1378
+ # Normalize path to ensure consistent keys
1379
+ normalized = normalize_path(path)
1380
+ if normalized in all_projects:
1381
+ all_projects[normalized]['memory_count'] = p.get('memory_count', 0)
1382
+ else:
1383
+ all_projects[normalized] = {
1384
+ 'project_path': normalized,
1385
+ 'last_activity': p.get('last_activity'),
1386
+ 'session_count': 0,
1387
+ 'memory_count': p.get('memory_count', 0)
1388
+ }
1389
+
1390
+ return {
1391
+ 'success': True,
1392
+ 'projects': list(all_projects.values()),
1393
+ 'count': len(all_projects)
1394
+ }
1395
+ except DatabaseError as e:
1396
+ logger.error(f"Database error getting projects: {e}")
1397
+ return {
1398
+ 'success': False,
1399
+ 'error_code': e.error_code,
1400
+ 'error': str(e),
1401
+ 'projects': []
1402
+ }
1403
+ except Exception as e:
1404
+ logger.error(f"Unexpected error getting projects: {e}")
1405
+ return {
1406
+ 'success': False,
1407
+ 'error_code': 'PROJECTS_FETCH_ERROR',
1408
+ 'error': str(e),
1409
+ 'projects': []
1410
+ }
1411
+
1412
+
1413
+ @app.get("/api/sessions/{project_path:path}")
1414
+ async def get_project_sessions(project_path: str):
1415
+ """Get all sessions for a project."""
1416
+ # Normalize path to prevent duplicates from different separators
1417
+ project_path = normalize_path(project_path)
1418
+ try:
1419
+ sessions = await db.execute_query("""
1420
+ SELECT
1421
+ session_id,
1422
+ current_goal,
1423
+ updated_at,
1424
+ created_at
1425
+ FROM session_state
1426
+ WHERE project_path = ?
1427
+ ORDER BY updated_at DESC
1428
+ """, (project_path,))
1429
+
1430
+ return {
1431
+ 'success': True,
1432
+ 'sessions': sessions or [],
1433
+ 'count': len(sessions or [])
1434
+ }
1435
+ except DatabaseError as e:
1436
+ logger.error(f"Database error getting sessions for {project_path}: {e}")
1437
+ return {
1438
+ 'success': False,
1439
+ 'error_code': e.error_code,
1440
+ 'error': str(e),
1441
+ 'sessions': []
1442
+ }
1443
+ except Exception as e:
1444
+ logger.error(f"Unexpected error getting sessions for {project_path}: {e}")
1445
+ return {
1446
+ 'success': False,
1447
+ 'error_code': 'SESSIONS_FETCH_ERROR',
1448
+ 'error': str(e),
1449
+ 'sessions': []
1450
+ }
1451
+
1452
+
1453
+ @app.get("/health")
1454
+ async def health_check():
1455
+ """Comprehensive health check with component status.
1456
+
1457
+ Returns status of:
1458
+ - Agent (always healthy if responding)
1459
+ - Database (SQLite connection)
1460
+ - Ollama (embedding service)
1461
+ """
1462
+ # Check database health
1463
+ db_healthy = False
1464
+ db_error = None
1465
+ try:
1466
+ # Simple query to verify DB connection
1467
+ result = await db.execute_query("SELECT 1 as test")
1468
+ db_healthy = result is not None
1469
+ except Exception as e:
1470
+ db_error = str(e)
1471
+
1472
+ # Check Ollama health
1473
+ ollama_health = await embeddings.check_health()
1474
+
1475
+ # Overall status
1476
+ all_healthy = db_healthy and ollama_health.get("healthy", False)
1477
+ degraded = db_healthy and not ollama_health.get("healthy", False)
1478
+
1479
+ status = "healthy" if all_healthy else ("degraded" if degraded else "unhealthy")
1480
+
1481
+ return {
1482
+ "status": status,
1483
+ "version": "2.0.0",
1484
+ "timestamp": datetime.now().isoformat(),
1485
+ "components": {
1486
+ "agent": {
1487
+ "healthy": True,
1488
+ "version": "2.0.0"
1489
+ },
1490
+ "database": {
1491
+ "healthy": db_healthy,
1492
+ "error": db_error
1493
+ },
1494
+ "ollama": ollama_health
1495
+ },
1496
+ "degraded_mode": embeddings.is_degraded(),
1497
+ "capabilities": {
1498
+ "semantic_search": ollama_health.get("healthy", False),
1499
+ "keyword_search": db_healthy, # Fallback always available if DB healthy
1500
+ "memory_storage": db_healthy and ollama_health.get("healthy", False),
1501
+ "memory_storage_degraded": db_healthy # Can store without embeddings
1502
+ }
1503
+ }
1504
+
1505
+
1506
+ @app.get("/ready")
1507
+ async def readiness_check():
1508
+ """Readiness probe for startup checks.
1509
+
1510
+ Returns 200 only when the service is fully ready to accept requests.
1511
+ Used by orchestrators to know when to route traffic.
1512
+ """
1513
+ # Check database is connected
1514
+ try:
1515
+ result = await db.execute_query("SELECT 1 as test")
1516
+ if not result:
1517
+ return JSONResponse(
1518
+ status_code=503,
1519
+ content={"ready": False, "reason": "Database not responding"}
1520
+ )
1521
+ except Exception as e:
1522
+ return JSONResponse(
1523
+ status_code=503,
1524
+ content={"ready": False, "reason": f"Database error: {str(e)}"}
1525
+ )
1526
+
1527
+ # Note: We don't require Ollama for readiness - service can run in degraded mode
1528
+ return {
1529
+ "ready": True,
1530
+ "timestamp": datetime.now().isoformat(),
1531
+ "degraded_mode": embeddings.is_degraded()
1532
+ }
1533
+
1534
+
1535
+ @app.get("/health/live")
1536
+ async def liveness_check():
1537
+ """Simple liveness probe.
1538
+
1539
+ Returns 200 if the process is alive. Used for basic health monitoring.
1540
+ """
1541
+ return {"alive": True, "timestamp": datetime.now().isoformat()}
1542
+
1543
+
1544
+ @app.get("/api/index-stats")
1545
+ async def get_index_stats():
1546
+ """Get FAISS vector index statistics.
1547
+
1548
+ Returns information about:
1549
+ - Whether FAISS is available
1550
+ - Index sizes for memories, patterns, and timeline
1551
+ - Search and add counts
1552
+ """
1553
+ try:
1554
+ stats = db.get_index_stats()
1555
+ return {
1556
+ "success": True,
1557
+ "stats": stats,
1558
+ "timestamp": datetime.now().isoformat()
1559
+ }
1560
+ except Exception as e:
1561
+ return {
1562
+ "success": False,
1563
+ "error": str(e),
1564
+ "timestamp": datetime.now().isoformat()
1565
+ }
1566
+
1567
+
1568
+ @app.post("/api/rebuild-indexes")
1569
+ async def rebuild_indexes():
1570
+ """Rebuild all FAISS vector indexes from the database.
1571
+
1572
+ Use this if indexes get out of sync with the database.
1573
+ """
1574
+ try:
1575
+ await db._rebuild_memories_index()
1576
+ await db._rebuild_patterns_index()
1577
+ await db._rebuild_timeline_index()
1578
+
1579
+ return {
1580
+ "success": True,
1581
+ "message": "Indexes rebuilt successfully",
1582
+ "stats": db.get_index_stats(),
1583
+ "timestamp": datetime.now().isoformat()
1584
+ }
1585
+ except Exception as e:
1586
+ return {
1587
+ "success": False,
1588
+ "error": str(e),
1589
+ "timestamp": datetime.now().isoformat()
1590
+ }
1591
+
1592
+
1593
+ # ============= Retry Queue API =============
1594
+
1595
+ @app.get("/api/queue/stats")
1596
+ async def get_queue_stats():
1597
+ """Get retry queue statistics."""
1598
+ if retry_queue is None:
1599
+ return {"success": False, "error": "Queue not initialized"}
1600
+
1601
+ return {
1602
+ "success": True,
1603
+ "stats": retry_queue.get_stats(),
1604
+ "timestamp": datetime.now().isoformat()
1605
+ }
1606
+
1607
+
1608
+ @app.get("/api/queue/pending")
1609
+ async def get_pending_requests(limit: int = 20):
1610
+ """Get pending requests in the queue."""
1611
+ if retry_queue is None:
1612
+ return {"success": False, "error": "Queue not initialized"}
1613
+
1614
+ items = retry_queue.get_pending(limit=limit)
1615
+ return {
1616
+ "success": True,
1617
+ "items": items,
1618
+ "count": len(items),
1619
+ "timestamp": datetime.now().isoformat()
1620
+ }
1621
+
1622
+
1623
+ @app.get("/api/queue/dead-letters")
1624
+ async def get_dead_letters(limit: int = 50):
1625
+ """Get items from the dead letter queue."""
1626
+ if retry_queue is None:
1627
+ return {"success": False, "error": "Queue not initialized"}
1628
+
1629
+ items = retry_queue.get_dead_letters(limit=limit)
1630
+ return {
1631
+ "success": True,
1632
+ "items": items,
1633
+ "count": len(items),
1634
+ "timestamp": datetime.now().isoformat()
1635
+ }
1636
+
1637
+
1638
+ @app.post("/api/queue/retry-dead-letter/{item_id}")
1639
+ async def retry_dead_letter(item_id: int):
1640
+ """Move a dead letter back to the pending queue for retry."""
1641
+ if retry_queue is None:
1642
+ return {"success": False, "error": "Queue not initialized"}
1643
+
1644
+ success = retry_queue.retry_dead_letter(item_id)
1645
+ return {
1646
+ "success": success,
1647
+ "message": "Item requeued" if success else "Item not found",
1648
+ "timestamp": datetime.now().isoformat()
1649
+ }
1650
+
1651
+
1652
+ @app.post("/api/queue/enqueue")
1653
+ async def enqueue_request(request: Request):
1654
+ """Manually enqueue a request for retry.
1655
+
1656
+ Useful for testing or manual recovery.
1657
+ """
1658
+ if retry_queue is None:
1659
+ return {"success": False, "error": "Queue not initialized"}
1660
+
1661
+ try:
1662
+ body = await request.json()
1663
+ item_id = retry_queue.enqueue(
1664
+ endpoint=body.get("endpoint"),
1665
+ payload=body.get("payload", {}),
1666
+ method=body.get("method", "POST"),
1667
+ headers=body.get("headers")
1668
+ )
1669
+ return {
1670
+ "success": True,
1671
+ "item_id": item_id,
1672
+ "timestamp": datetime.now().isoformat()
1673
+ }
1674
+ except Exception as e:
1675
+ return {
1676
+ "success": False,
1677
+ "error": str(e),
1678
+ "timestamp": datetime.now().isoformat()
1679
+ }
1680
+
1681
+
1682
+ # ============= Authentication API =============
1683
+
1684
+ @app.get("/api/auth/stats")
1685
+ async def get_auth_stats():
1686
+ """Get authentication service statistics."""
1687
+ return {
1688
+ "success": True,
1689
+ "stats": auth_service.get_stats(),
1690
+ "timestamp": datetime.now().isoformat()
1691
+ }
1692
+
1693
+
1694
+ @app.get("/api/auth/keys")
1695
+ async def list_api_keys():
1696
+ """List all API keys (without the actual key values)."""
1697
+ return {
1698
+ "success": True,
1699
+ "keys": auth_service.list_keys(),
1700
+ "timestamp": datetime.now().isoformat()
1701
+ }
1702
+
1703
+
1704
+ @app.post("/api/auth/keys")
1705
+ async def create_api_key(request: Request):
1706
+ """Generate a new API key.
1707
+
1708
+ IMPORTANT: The key is only returned once! Store it securely.
1709
+ """
1710
+ try:
1711
+ body = await request.json()
1712
+ name = body.get("name")
1713
+ description = body.get("description", "")
1714
+ rate_limit = body.get("rate_limit", 100)
1715
+
1716
+ if not name:
1717
+ return JSONResponse(
1718
+ status_code=400,
1719
+ content={"success": False, "error": "Name is required"}
1720
+ )
1721
+
1722
+ key = auth_service.generate_key(name, description, rate_limit)
1723
+ return {
1724
+ "success": True,
1725
+ "key": key,
1726
+ "name": name,
1727
+ "warning": "Store this key securely - it will not be shown again!",
1728
+ "timestamp": datetime.now().isoformat()
1729
+ }
1730
+ except Exception as e:
1731
+ return {"success": False, "error": str(e)}
1732
+
1733
+
1734
+ @app.post("/api/auth/keys/{name}/revoke")
1735
+ async def revoke_api_key(name: str):
1736
+ """Revoke an API key by name."""
1737
+ success = auth_service.revoke_key(name)
1738
+ return {
1739
+ "success": success,
1740
+ "message": f"Key '{name}' revoked" if success else f"Key '{name}' not found",
1741
+ "timestamp": datetime.now().isoformat()
1742
+ }
1743
+
1744
+
1745
+ @app.post("/api/auth/keys/{name}/rotate")
1746
+ async def rotate_api_key(name: str):
1747
+ """Rotate an API key (revoke old, generate new with same name).
1748
+
1749
+ IMPORTANT: The new key is only returned once! Store it securely.
1750
+ """
1751
+ new_key = auth_service.rotate_key(name)
1752
+ if new_key:
1753
+ return {
1754
+ "success": True,
1755
+ "key": new_key,
1756
+ "name": name,
1757
+ "warning": "Store this key securely - it will not be shown again!",
1758
+ "timestamp": datetime.now().isoformat()
1759
+ }
1760
+ return {
1761
+ "success": False,
1762
+ "error": f"Key '{name}' not found or already revoked",
1763
+ "timestamp": datetime.now().isoformat()
1764
+ }
1765
+
1766
+
1767
+ # ============= Anchor Conflict Resolution API =============
1768
+
1769
+ @app.get("/api/conflicts")
1770
+ async def api_get_conflicts(
1771
+ session_id: Optional[str] = None,
1772
+ project_path: Optional[str] = None,
1773
+ limit: int = 20
1774
+ ):
1775
+ """Get unresolved anchor conflicts."""
1776
+ return await get_unresolved_conflicts(
1777
+ db=db,
1778
+ session_id=session_id,
1779
+ project_path=project_path,
1780
+ limit=limit
1781
+ )
1782
+
1783
+
1784
+ @app.post("/api/conflicts/{conflict_id}/resolve")
1785
+ async def api_resolve_conflict(conflict_id: int, request: Request):
1786
+ """Resolve an anchor conflict."""
1787
+ try:
1788
+ body = await request.json()
1789
+ return await resolve_conflict(
1790
+ db=db,
1791
+ embeddings=embeddings,
1792
+ conflict_id=conflict_id,
1793
+ resolution=body.get("resolution"),
1794
+ keep_anchor_id=body.get("keep_anchor_id"),
1795
+ resolved_by=body.get("resolved_by", "api")
1796
+ )
1797
+ except json.JSONDecodeError as e:
1798
+ return {
1799
+ "success": False,
1800
+ "error_code": "INVALID_JSON",
1801
+ "error": f"Invalid JSON in request body: {str(e)}"
1802
+ }
1803
+ except DatabaseError as e:
1804
+ logger.error(f"Database error resolving conflict {conflict_id}: {e}")
1805
+ return {
1806
+ "success": False,
1807
+ "error_code": e.error_code,
1808
+ "error": str(e)
1809
+ }
1810
+ except Exception as e:
1811
+ logger.error(f"Unexpected error resolving conflict {conflict_id}: {e}")
1812
+ return {
1813
+ "success": False,
1814
+ "error_code": "CONFLICT_RESOLVE_ERROR",
1815
+ "error": str(e)
1816
+ }
1817
+
1818
+
1819
+ @app.post("/api/conflicts/auto-resolve")
1820
+ async def api_auto_resolve(session_id: Optional[str] = None):
1821
+ """Attempt automatic resolution of simple conflicts."""
1822
+ return await auto_resolve_conflicts(
1823
+ db=db,
1824
+ embeddings=embeddings,
1825
+ session_id=session_id
1826
+ )
1827
+
1828
+
1829
+ @app.get("/api/anchors/history")
1830
+ async def api_anchor_history(
1831
+ anchor_id: Optional[int] = None,
1832
+ session_id: Optional[str] = None,
1833
+ limit: int = 50
1834
+ ):
1835
+ """Get anchor history for tracking fact evolution."""
1836
+ return await get_anchor_history(
1837
+ db=db,
1838
+ anchor_id=anchor_id,
1839
+ session_id=session_id,
1840
+ limit=limit
1841
+ )
1842
+
1843
+
1844
+ # ============= Memory Cleanup API =============
1845
+
1846
+ @app.post("/api/cleanup")
1847
+ async def api_run_cleanup(
1848
+ project_path: Optional[str] = None,
1849
+ dry_run: bool = True
1850
+ ):
1851
+ """Run memory cleanup (dry run by default)."""
1852
+ return await memory_cleanup(
1853
+ db=db,
1854
+ embeddings=embeddings,
1855
+ project_path=project_path,
1856
+ dry_run=dry_run
1857
+ )
1858
+
1859
+
1860
+ @app.get("/api/cleanup/stats")
1861
+ async def api_cleanup_stats():
1862
+ """Get cleanup statistics."""
1863
+ return await get_cleanup_stats(db=db, embeddings=embeddings)
1864
+
1865
+
1866
+ @app.get("/api/cleanup/config")
1867
+ async def api_get_cleanup_config(project_path: Optional[str] = None):
1868
+ """Get cleanup configuration."""
1869
+ return await get_cleanup_config(db=db, embeddings=embeddings, project_path=project_path)
1870
+
1871
+
1872
+ @app.post("/api/cleanup/config")
1873
+ async def api_set_cleanup_config(request: Request):
1874
+ """Update cleanup configuration."""
1875
+ try:
1876
+ body = await request.json()
1877
+ return await set_cleanup_config(
1878
+ db=db,
1879
+ embeddings=embeddings,
1880
+ project_path=body.get("project_path"),
1881
+ retention_days=body.get("retention_days"),
1882
+ min_relevance_score=body.get("min_relevance_score"),
1883
+ keep_high_importance=body.get("keep_high_importance"),
1884
+ importance_threshold=body.get("importance_threshold"),
1885
+ dedup_enabled=body.get("dedup_enabled"),
1886
+ dedup_threshold=body.get("dedup_threshold"),
1887
+ archive_before_delete=body.get("archive_before_delete"),
1888
+ auto_cleanup_enabled=body.get("auto_cleanup_enabled")
1889
+ )
1890
+ except json.JSONDecodeError as e:
1891
+ return {
1892
+ "success": False,
1893
+ "error_code": "INVALID_JSON",
1894
+ "error": f"Invalid JSON in request body: {str(e)}"
1895
+ }
1896
+ except DatabaseError as e:
1897
+ logger.error(f"Database error setting cleanup config: {e}")
1898
+ return {
1899
+ "success": False,
1900
+ "error_code": e.error_code,
1901
+ "error": str(e)
1902
+ }
1903
+ except Exception as e:
1904
+ logger.error(f"Unexpected error setting cleanup config: {e}")
1905
+ return {
1906
+ "success": False,
1907
+ "error_code": "CLEANUP_CONFIG_ERROR",
1908
+ "error": str(e)
1909
+ }
1910
+
1911
+
1912
+ @app.get("/api/archives")
1913
+ async def api_get_archives(
1914
+ project_path: Optional[str] = None,
1915
+ reason: Optional[str] = None,
1916
+ limit: int = 50
1917
+ ):
1918
+ """Get archived memories."""
1919
+ return await get_archived_memories(
1920
+ db=db,
1921
+ embeddings=embeddings,
1922
+ project_path=project_path,
1923
+ reason=reason,
1924
+ limit=limit
1925
+ )
1926
+
1927
+
1928
+ @app.post("/api/archives/{archive_id}/restore")
1929
+ async def api_restore_memory(archive_id: int):
1930
+ """Restore an archived memory."""
1931
+ return await restore_memory(db=db, embeddings=embeddings, archive_id=archive_id)
1932
+
1933
+
1934
+ @app.post("/api/archives/purge")
1935
+ async def api_purge_archives():
1936
+ """Permanently delete expired archives."""
1937
+ return await purge_expired_archives(db=db, embeddings=embeddings)
1938
+
1939
+
1940
+ # ============= Admin API (Embedding Model Management) =============
1941
+
1942
+ @app.get("/api/admin/embeddings")
1943
+ async def api_embedding_status():
1944
+ """Get embedding service status and available models."""
1945
+ return await get_embedding_status(db=db, embeddings=embeddings)
1946
+
1947
+
1948
+ @app.post("/api/admin/embeddings/switch")
1949
+ async def api_switch_model(request: Request):
1950
+ """Switch embedding model."""
1951
+ try:
1952
+ body = await request.json()
1953
+ return await switch_embedding_model(
1954
+ db=db,
1955
+ embeddings=embeddings,
1956
+ model=body.get("model", "nomic-embed-text"),
1957
+ reindex_existing=body.get("reindex_existing", False)
1958
+ )
1959
+ except json.JSONDecodeError as e:
1960
+ return {
1961
+ "success": False,
1962
+ "error_code": "INVALID_JSON",
1963
+ "error": f"Invalid JSON in request body: {str(e)}"
1964
+ }
1965
+ except DatabaseError as e:
1966
+ logger.error(f"Database error switching embedding model: {e}")
1967
+ return {
1968
+ "success": False,
1969
+ "error_code": e.error_code,
1970
+ "error": str(e)
1971
+ }
1972
+ except Exception as e:
1973
+ logger.error(f"Unexpected error switching embedding model: {e}")
1974
+ return {
1975
+ "success": False,
1976
+ "error_code": "MODEL_SWITCH_ERROR",
1977
+ "error": str(e)
1978
+ }
1979
+
1980
+
1981
+ @app.get("/api/admin/embeddings/models/{model}")
1982
+ async def api_model_info(model: str):
1983
+ """Get detailed info about a specific embedding model."""
1984
+ return await get_model_info(db=db, embeddings=embeddings, model=model)
1985
+
1986
+
1987
+ @app.post("/api/admin/reindex")
1988
+ async def api_start_reindex(request: Request):
1989
+ """Start background reindexing of memories."""
1990
+ try:
1991
+ body = await request.json()
1992
+ return await reindex_memories(
1993
+ db=db,
1994
+ embeddings=embeddings,
1995
+ model=body.get("model"),
1996
+ project_path=body.get("project_path"),
1997
+ batch_size=body.get("batch_size", 10),
1998
+ dry_run=body.get("dry_run", False)
1999
+ )
2000
+ except json.JSONDecodeError as e:
2001
+ return {
2002
+ "success": False,
2003
+ "error_code": "INVALID_JSON",
2004
+ "error": f"Invalid JSON in request body: {str(e)}"
2005
+ }
2006
+ except DatabaseError as e:
2007
+ logger.error(f"Database error during reindex: {e}")
2008
+ return {
2009
+ "success": False,
2010
+ "error_code": e.error_code,
2011
+ "error": str(e)
2012
+ }
2013
+ except Exception as e:
2014
+ logger.error(f"Unexpected error during reindex: {e}")
2015
+ return {
2016
+ "success": False,
2017
+ "error_code": "REINDEX_ERROR",
2018
+ "error": str(e)
2019
+ }
2020
+
2021
+
2022
+ @app.get("/api/admin/reindex/progress")
2023
+ async def api_reindex_progress():
2024
+ """Get reindex progress."""
2025
+ return await get_reindex_progress(db=db, embeddings=embeddings)
2026
+
2027
+
2028
+ @app.post("/api/admin/reindex/cancel")
2029
+ async def api_cancel_reindex():
2030
+ """Cancel running reindex operation."""
2031
+ return await cancel_reindex(db=db, embeddings=embeddings)
2032
+
2033
+
2034
+ @app.get("/api/admin/stats")
2035
+ async def api_system_stats():
2036
+ """Get comprehensive system statistics."""
2037
+ return await get_system_stats(db=db, embeddings=embeddings)
2038
+
2039
+
2040
+ # ============= Auto-Injection API =============
2041
+
2042
+ @app.post("/api/inject")
2043
+ async def api_auto_inject(request: Request):
2044
+ """Get relevant context for current task (auto-injection).
2045
+
2046
+ Send current query/task and get back relevant memories and patterns.
2047
+ """
2048
+ try:
2049
+ body = await request.json()
2050
+ injector = get_auto_injector(db, embeddings)
2051
+
2052
+ context = await injector.get_relevant_context(
2053
+ current_query=body.get("query", ""),
2054
+ project_path=body.get("project_path"),
2055
+ task_type=body.get("task_type"),
2056
+ max_results=body.get("max_results", 3)
2057
+ )
2058
+
2059
+ # Format for easy consumption
2060
+ formatted = injector.format_injection(context)
2061
+
2062
+ return {
2063
+ "success": True,
2064
+ "context": context,
2065
+ "formatted": formatted
2066
+ }
2067
+ except json.JSONDecodeError as e:
2068
+ return {
2069
+ "success": False,
2070
+ "error_code": "INVALID_JSON",
2071
+ "error": f"Invalid JSON in request body: {str(e)}"
2072
+ }
2073
+ except DatabaseError as e:
2074
+ logger.error(f"Database error during auto-inject: {e}")
2075
+ return {
2076
+ "success": False,
2077
+ "error_code": e.error_code,
2078
+ "error": str(e)
2079
+ }
2080
+ except Exception as e:
2081
+ logger.error(f"Unexpected error during auto-inject: {e}")
2082
+ return {
2083
+ "success": False,
2084
+ "error_code": "AUTO_INJECT_ERROR",
2085
+ "error": str(e)
2086
+ }
2087
+
2088
+
2089
+ @app.post("/api/inject/reset")
2090
+ async def api_reset_injection():
2091
+ """Reset injection tracking for new session."""
2092
+ injector = get_auto_injector(db, embeddings)
2093
+ injector.reset_session()
2094
+ return {"success": True, "message": "Injection context reset"}
2095
+
2096
+
2097
+ # ============= Confidence Scoring API =============
2098
+
2099
+ @app.get("/api/memory/{memory_id}/confidence")
2100
+ async def api_get_confidence(memory_id: int):
2101
+ """Get confidence score for a memory."""
2102
+ confidence_svc = get_confidence_service(db, embeddings)
2103
+ return await confidence_svc.calculate_confidence(memory_id)
2104
+
2105
+
2106
+ @app.post("/api/memory/{memory_id}/verify")
2107
+ async def api_verify_memory(memory_id: int, request: Request):
2108
+ """Mark a memory as verified or unverified."""
2109
+ try:
2110
+ body = await request.json()
2111
+ confidence_svc = get_confidence_service(db, embeddings)
2112
+ return await confidence_svc.verify_memory(
2113
+ memory_id=memory_id,
2114
+ verified=body.get("verified", True),
2115
+ verified_by=body.get("verified_by", "user")
2116
+ )
2117
+ except json.JSONDecodeError as e:
2118
+ return {
2119
+ "success": False,
2120
+ "error_code": "INVALID_JSON",
2121
+ "error": f"Invalid JSON in request body: {str(e)}"
2122
+ }
2123
+ except DatabaseError as e:
2124
+ logger.error(f"Database error verifying memory {memory_id}: {e}")
2125
+ return {
2126
+ "success": False,
2127
+ "error_code": e.error_code,
2128
+ "error": str(e)
2129
+ }
2130
+ except Exception as e:
2131
+ logger.error(f"Unexpected error verifying memory {memory_id}: {e}")
2132
+ return {
2133
+ "success": False,
2134
+ "error_code": "MEMORY_VERIFY_ERROR",
2135
+ "error": str(e)
2136
+ }
2137
+
2138
+
2139
+ @app.post("/api/memory/{memory_id}/outdated")
2140
+ async def api_mark_outdated(memory_id: int, request: Request):
2141
+ """Mark a memory as outdated."""
2142
+ try:
2143
+ body = await request.json()
2144
+ confidence_svc = get_confidence_service(db, embeddings)
2145
+ return await confidence_svc.mark_outdated(
2146
+ memory_id=memory_id,
2147
+ reason=body.get("reason", "manually marked")
2148
+ )
2149
+ except json.JSONDecodeError as e:
2150
+ return {
2151
+ "success": False,
2152
+ "error_code": "INVALID_JSON",
2153
+ "error": f"Invalid JSON in request body: {str(e)}"
2154
+ }
2155
+ except DatabaseError as e:
2156
+ logger.error(f"Database error marking memory {memory_id} outdated: {e}")
2157
+ return {
2158
+ "success": False,
2159
+ "error_code": e.error_code,
2160
+ "error": str(e)
2161
+ }
2162
+ except Exception as e:
2163
+ logger.error(f"Unexpected error marking memory {memory_id} outdated: {e}")
2164
+ return {
2165
+ "success": False,
2166
+ "error_code": "MEMORY_OUTDATED_ERROR",
2167
+ "error": str(e)
2168
+ }
2169
+
2170
+
2171
+ @app.get("/api/memory/low-confidence")
2172
+ async def api_low_confidence(
2173
+ project_path: Optional[str] = None,
2174
+ threshold: float = 0.5,
2175
+ limit: int = 20
2176
+ ):
2177
+ """Get memories with low confidence that need verification."""
2178
+ confidence_svc = get_confidence_service(db, embeddings)
2179
+ return await confidence_svc.get_low_confidence_memories(
2180
+ project_path=project_path,
2181
+ threshold=threshold,
2182
+ limit=limit
2183
+ )
2184
+
2185
+
2186
+ # ============= CLAUDE.md Sync API =============
2187
+
2188
+ @app.get("/api/claude-md/suggestions")
2189
+ async def api_claude_md_suggestions(project_path: Optional[str] = None):
2190
+ """Get suggestions for CLAUDE.md updates."""
2191
+ sync_svc = get_claude_md_sync(db, embeddings)
2192
+ return await sync_svc.suggest_updates(project_path)
2193
+
2194
+
2195
+ @app.post("/api/claude-md/sync")
2196
+ async def api_claude_md_sync(request: Request):
2197
+ """Sync learnings to CLAUDE.md."""
2198
+ try:
2199
+ body = await request.json()
2200
+ sync_svc = get_claude_md_sync(db, embeddings)
2201
+ return await sync_svc.sync_to_claude_md(
2202
+ project_path=body.get("project_path"),
2203
+ dry_run=body.get("dry_run", True)
2204
+ )
2205
+ except json.JSONDecodeError as e:
2206
+ return {
2207
+ "success": False,
2208
+ "error_code": "INVALID_JSON",
2209
+ "error": f"Invalid JSON in request body: {str(e)}"
2210
+ }
2211
+ except DatabaseError as e:
2212
+ logger.error(f"Database error syncing CLAUDE.md: {e}")
2213
+ return {
2214
+ "success": False,
2215
+ "error_code": e.error_code,
2216
+ "error": str(e)
2217
+ }
2218
+ except Exception as e:
2219
+ logger.error(f"Unexpected error syncing CLAUDE.md: {e}")
2220
+ return {
2221
+ "success": False,
2222
+ "error_code": "CLAUDE_MD_SYNC_ERROR",
2223
+ "error": str(e)
2224
+ }
2225
+
2226
+
2227
+ # ============= Natural Language Interface =============
2228
+
2229
+ @app.post("/api/memory/natural")
2230
+ async def api_natural_language(request: Request):
2231
+ """Process natural language memory commands.
2232
+
2233
+ Examples:
2234
+ - "remember this: always use async/await for DB calls"
2235
+ - "what did I learn about authentication?"
2236
+ - "show me past errors"
2237
+ - "memory stats"
2238
+ """
2239
+ try:
2240
+ body = await request.json()
2241
+ result = await process_natural_command(
2242
+ db=db,
2243
+ embeddings=embeddings,
2244
+ command=body.get("command", ""),
2245
+ project_path=body.get("project_path"),
2246
+ session_id=body.get("session_id")
2247
+ )
2248
+ return result
2249
+ except json.JSONDecodeError as e:
2250
+ return {
2251
+ "success": False,
2252
+ "error_code": "INVALID_JSON",
2253
+ "error": f"Invalid JSON in request body: {str(e)}"
2254
+ }
2255
+ except DatabaseError as e:
2256
+ logger.error(f"Database error processing natural command: {e}")
2257
+ return {
2258
+ "success": False,
2259
+ "error_code": e.error_code,
2260
+ "error": str(e)
2261
+ }
2262
+ except Exception as e:
2263
+ logger.error(f"Unexpected error processing natural command: {e}")
2264
+ return {
2265
+ "success": False,
2266
+ "error_code": "NATURAL_COMMAND_ERROR",
2267
+ "error": str(e)
2268
+ }
2269
+
2270
+
2271
+ # ============= Session Summarization API =============
2272
+
2273
+ @app.post("/api/sessions/{session_id}/auto-summarize")
2274
+ async def api_auto_summarize(session_id: str, project_path: Optional[str] = None):
2275
+ """Auto-summarize a session based on its timeline."""
2276
+ return await auto_summarize_session(
2277
+ db=db,
2278
+ embeddings=embeddings,
2279
+ session_id=session_id,
2280
+ project_path=project_path
2281
+ )
2282
+
2283
+
2284
+ @app.get("/api/sessions/handoff")
2285
+ async def api_session_handoff(
2286
+ project_path: Optional[str] = None,
2287
+ include_last_n: int = 3
2288
+ ):
2289
+ """Get context handoff from previous sessions."""
2290
+ return await get_session_handoff(
2291
+ db=db,
2292
+ embeddings=embeddings,
2293
+ project_path=project_path,
2294
+ include_last_n_sessions=include_last_n
2295
+ )
2296
+
2297
+
2298
+ @app.post("/api/sessions/{session_id}/diary")
2299
+ async def api_create_diary(
2300
+ session_id: str,
2301
+ request: Request
2302
+ ):
2303
+ """Create a detailed diary entry for a session."""
2304
+ try:
2305
+ body = await request.json()
2306
+ return await create_diary_entry(
2307
+ db=db,
2308
+ embeddings=embeddings,
2309
+ session_id=session_id,
2310
+ project_path=body.get("project_path"),
2311
+ user_notes=body.get("user_notes")
2312
+ )
2313
+ except json.JSONDecodeError as e:
2314
+ return {
2315
+ "success": False,
2316
+ "error_code": "INVALID_JSON",
2317
+ "error": f"Invalid JSON in request body: {str(e)}"
2318
+ }
2319
+ except DatabaseError as e:
2320
+ logger.error(f"Database error creating diary for session {session_id}: {e}")
2321
+ return {
2322
+ "success": False,
2323
+ "error_code": e.error_code,
2324
+ "error": str(e)
2325
+ }
2326
+ except Exception as e:
2327
+ logger.error(f"Unexpected error creating diary for session {session_id}: {e}")
2328
+ return {
2329
+ "success": False,
2330
+ "error_code": "DIARY_CREATE_ERROR",
2331
+ "error": str(e)
2332
+ }
2333
+
2334
+
2335
+ @app.get("/api/sessions/{session_id}/inactivity")
2336
+ async def api_check_inactivity(
2337
+ session_id: str,
2338
+ threshold_hours: float = 4.0
2339
+ ):
2340
+ """Check if a session should be auto-summarized due to inactivity."""
2341
+ return await check_session_inactivity(
2342
+ db=db,
2343
+ session_id=session_id,
2344
+ inactivity_threshold_hours=threshold_hours
2345
+ )
2346
+
2347
+
2348
+ # ============= Insights API =============
2349
+
2350
+ @app.get("/api/insights")
2351
+ async def api_get_insights(
2352
+ insight_type: Optional[str] = None,
2353
+ project_path: Optional[str] = None,
2354
+ min_confidence: float = 0.5,
2355
+ limit: int = 20
2356
+ ):
2357
+ """Get cross-session learning insights."""
2358
+ from skills.insights import get_insights as get_insights_skill
2359
+ return await get_insights_skill(
2360
+ db=db,
2361
+ embeddings=embeddings,
2362
+ insight_type=insight_type,
2363
+ project_path=project_path,
2364
+ min_confidence=min_confidence,
2365
+ limit=limit
2366
+ )
2367
+
2368
+
2369
+ @app.post("/api/insights/aggregate")
2370
+ async def api_run_aggregation(days_back: int = 30):
2371
+ """Run cross-session learning aggregation."""
2372
+ from skills.insights import run_aggregation as run_agg
2373
+ return await run_agg(db=db, embeddings=embeddings, days_back=days_back)
2374
+
2375
+
2376
+ @app.get("/api/insights/suggestions")
2377
+ async def api_get_suggestions(min_confidence: float = 0.7):
2378
+ """Get CLAUDE.md improvement suggestions."""
2379
+ from skills.insights import suggest_improvements as suggest
2380
+ return await suggest(db=db, embeddings=embeddings, min_confidence=min_confidence)
2381
+
2382
+
2383
+ @app.post("/api/insights/{insight_id}/feedback")
2384
+ async def api_insight_feedback(insight_id: int, request: Request):
2385
+ """Record feedback on an insight."""
2386
+ from skills.insights import record_insight_feedback as record_fb
2387
+ try:
2388
+ body = await request.json()
2389
+ return await record_fb(
2390
+ db=db,
2391
+ embeddings=embeddings,
2392
+ insight_id=insight_id,
2393
+ helpful=body.get("helpful", True),
2394
+ session_id=body.get("session_id"),
2395
+ comment=body.get("comment")
2396
+ )
2397
+ except json.JSONDecodeError as e:
2398
+ return {
2399
+ "success": False,
2400
+ "error_code": "INVALID_JSON",
2401
+ "error": f"Invalid JSON in request body: {str(e)}"
2402
+ }
2403
+ except DatabaseError as e:
2404
+ logger.error(f"Database error recording feedback for insight {insight_id}: {e}")
2405
+ return {
2406
+ "success": False,
2407
+ "error_code": e.error_code,
2408
+ "error": str(e)
2409
+ }
2410
+ except Exception as e:
2411
+ logger.error(f"Unexpected error recording feedback for insight {insight_id}: {e}")
2412
+ return {
2413
+ "success": False,
2414
+ "error_code": "INSIGHT_FEEDBACK_ERROR",
2415
+ "error": str(e)
2416
+ }
2417
+
2418
+
2419
+ @app.post("/api/insights/{insight_id}/apply")
2420
+ async def api_mark_insight_applied(insight_id: int):
2421
+ """Mark an insight as applied to CLAUDE.md."""
2422
+ from skills.insights import mark_insight_applied as mark_applied
2423
+ return await mark_applied(db=db, embeddings=embeddings, insight_id=insight_id)
2424
+
2425
+
2426
+ # ============= Agent Configuration API =============
2427
+
2428
+ @app.get("/api/agents")
2429
+ async def get_all_agents():
2430
+ """Get all available agents with categories."""
2431
+ return {
2432
+ "success": True,
2433
+ "agents": AVAILABLE_AGENTS,
2434
+ "categories": AGENT_CATEGORIES,
2435
+ "by_category": get_agents_by_category(),
2436
+ "total": len(AVAILABLE_AGENTS)
2437
+ }
2438
+
2439
+
2440
+ @app.get("/api/mcps")
2441
+ async def get_all_mcps():
2442
+ """Get all available MCP servers."""
2443
+ return {
2444
+ "success": True,
2445
+ "mcps": AVAILABLE_MCPS,
2446
+ "total": len(AVAILABLE_MCPS)
2447
+ }
2448
+
2449
+
2450
+ @app.get("/api/hooks")
2451
+ async def get_all_hooks():
2452
+ """Get all available hooks."""
2453
+ return {
2454
+ "success": True,
2455
+ "hooks": AVAILABLE_HOOKS,
2456
+ "total": len(AVAILABLE_HOOKS)
2457
+ }
2458
+
2459
+
2460
+ @app.get("/api/project/{project_path:path}/config")
2461
+ async def get_project_config(project_path: str):
2462
+ """Get full configuration for a project."""
2463
+ # Normalize path to prevent duplicates from different separators
2464
+ project_path = normalize_path(project_path)
2465
+ try:
2466
+ # Get agent configs
2467
+ agent_configs = await db.execute_query(
2468
+ "SELECT * FROM project_agent_config WHERE project_path = ?",
2469
+ (project_path,)
2470
+ )
2471
+
2472
+ # Get MCP configs
2473
+ mcp_configs = await db.execute_query(
2474
+ "SELECT * FROM project_mcp_config WHERE project_path = ?",
2475
+ (project_path,)
2476
+ )
2477
+
2478
+ # Get hook configs
2479
+ hook_configs = await db.execute_query(
2480
+ "SELECT * FROM project_hook_config WHERE project_path = ?",
2481
+ (project_path,)
2482
+ )
2483
+
2484
+ # Get project preferences
2485
+ prefs = await db.execute_query(
2486
+ "SELECT * FROM project_preferences WHERE project_path = ?",
2487
+ (project_path,)
2488
+ )
2489
+
2490
+ # Build agent status map (enabled/disabled)
2491
+ agent_status = {}
2492
+ for config in (agent_configs or []):
2493
+ agent_status[config['agent_id']] = {
2494
+ 'enabled': bool(config['enabled']),
2495
+ 'priority': config['priority'],
2496
+ 'settings': json.loads(config['settings']) if config['settings'] else {}
2497
+ }
2498
+
2499
+ # Fill in defaults for unconfigured agents
2500
+ for agent in AVAILABLE_AGENTS:
2501
+ if agent['id'] not in agent_status:
2502
+ agent_status[agent['id']] = {
2503
+ 'enabled': agent['default_enabled'],
2504
+ 'priority': agent['priority'],
2505
+ 'settings': {}
2506
+ }
2507
+
2508
+ # Build MCP status map
2509
+ mcp_status = {}
2510
+ for config in (mcp_configs or []):
2511
+ mcp_status[config['mcp_id']] = {
2512
+ 'enabled': bool(config['enabled']),
2513
+ 'settings': json.loads(config['settings']) if config['settings'] else {}
2514
+ }
2515
+
2516
+ for mcp in AVAILABLE_MCPS:
2517
+ if mcp['id'] not in mcp_status:
2518
+ mcp_status[mcp['id']] = {
2519
+ 'enabled': mcp['default_enabled'],
2520
+ 'settings': {}
2521
+ }
2522
+
2523
+ # Build hook status map
2524
+ hook_status = {}
2525
+ for config in (hook_configs or []):
2526
+ hook_status[config['hook_id']] = {
2527
+ 'enabled': bool(config['enabled']),
2528
+ 'settings': json.loads(config['settings']) if config['settings'] else {}
2529
+ }
2530
+
2531
+ for hook in AVAILABLE_HOOKS:
2532
+ if hook['id'] not in hook_status:
2533
+ hook_status[hook['id']] = {
2534
+ 'enabled': hook['default_enabled'],
2535
+ 'settings': {}
2536
+ }
2537
+
2538
+ return {
2539
+ "success": True,
2540
+ "project_path": project_path,
2541
+ "preferences": prefs[0] if prefs else None,
2542
+ "agents": agent_status,
2543
+ "mcps": mcp_status,
2544
+ "hooks": hook_status,
2545
+ "stats": {
2546
+ "enabled_agents": sum(1 for a in agent_status.values() if a['enabled']),
2547
+ "total_agents": len(AVAILABLE_AGENTS),
2548
+ "enabled_mcps": sum(1 for m in mcp_status.values() if m['enabled']),
2549
+ "total_mcps": len(AVAILABLE_MCPS),
2550
+ "enabled_hooks": sum(1 for h in hook_status.values() if h['enabled']),
2551
+ "total_hooks": len(AVAILABLE_HOOKS)
2552
+ }
2553
+ }
2554
+ except DatabaseError as e:
2555
+ logger.error(f"Database error getting project config for {project_path}: {e}")
2556
+ return {
2557
+ "success": False,
2558
+ "error_code": e.error_code,
2559
+ "error": str(e)
2560
+ }
2561
+ except Exception as e:
2562
+ logger.error(f"Unexpected error getting project config for {project_path}: {e}")
2563
+ return {
2564
+ "success": False,
2565
+ "error_code": "PROJECT_CONFIG_ERROR",
2566
+ "error": str(e)
2567
+ }
2568
+
2569
+
2570
+ @app.post("/api/project/{project_path:path}/agent/{agent_id}")
2571
+ async def update_agent_config(project_path: str, agent_id: str, request: Request):
2572
+ """Update agent configuration for a project."""
2573
+ # Normalize path to prevent duplicates from different separators
2574
+ project_path = normalize_path(project_path)
2575
+ try:
2576
+ body = await request.json()
2577
+ enabled = body.get('enabled', True)
2578
+ priority = body.get('priority', 5)
2579
+ settings = json.dumps(body.get('settings', {}))
2580
+
2581
+ await db.execute_query(
2582
+ """
2583
+ INSERT INTO project_agent_config (project_path, agent_id, enabled, priority, settings)
2584
+ VALUES (?, ?, ?, ?, ?)
2585
+ ON CONFLICT(project_path, agent_id) DO UPDATE SET
2586
+ enabled = excluded.enabled,
2587
+ priority = excluded.priority,
2588
+ settings = excluded.settings,
2589
+ updated_at = datetime('now')
2590
+ """,
2591
+ (project_path, agent_id, 1 if enabled else 0, priority, settings)
2592
+ )
2593
+ db.conn.commit()
2594
+
2595
+ return {"success": True, "agent_id": agent_id, "enabled": enabled}
2596
+ except json.JSONDecodeError as e:
2597
+ return {
2598
+ "success": False,
2599
+ "error_code": "INVALID_JSON",
2600
+ "error": f"Invalid JSON in request body: {str(e)}"
2601
+ }
2602
+ except DatabaseError as e:
2603
+ logger.error(f"Database error updating agent config {agent_id} for {project_path}: {e}")
2604
+ return {
2605
+ "success": False,
2606
+ "error_code": e.error_code,
2607
+ "error": str(e)
2608
+ }
2609
+ except Exception as e:
2610
+ logger.error(f"Unexpected error updating agent config {agent_id} for {project_path}: {e}")
2611
+ return {
2612
+ "success": False,
2613
+ "error_code": "AGENT_CONFIG_UPDATE_ERROR",
2614
+ "error": str(e)
2615
+ }
2616
+
2617
+
2618
+ @app.post("/api/project/{project_path:path}/mcp/{mcp_id}")
2619
+ async def update_mcp_config(project_path: str, mcp_id: str, request: Request):
2620
+ """Update MCP configuration for a project."""
2621
+ # Normalize path to prevent duplicates from different separators
2622
+ project_path = normalize_path(project_path)
2623
+ try:
2624
+ body = await request.json()
2625
+ enabled = body.get('enabled', True)
2626
+ settings = json.dumps(body.get('settings', {}))
2627
+
2628
+ await db.execute_query(
2629
+ """
2630
+ INSERT INTO project_mcp_config (project_path, mcp_id, enabled, settings)
2631
+ VALUES (?, ?, ?, ?)
2632
+ ON CONFLICT(project_path, mcp_id) DO UPDATE SET
2633
+ enabled = excluded.enabled,
2634
+ settings = excluded.settings,
2635
+ updated_at = datetime('now')
2636
+ """,
2637
+ (project_path, mcp_id, 1 if enabled else 0, settings)
2638
+ )
2639
+ db.conn.commit()
2640
+
2641
+ return {"success": True, "mcp_id": mcp_id, "enabled": enabled}
2642
+ except json.JSONDecodeError as e:
2643
+ return {
2644
+ "success": False,
2645
+ "error_code": "INVALID_JSON",
2646
+ "error": f"Invalid JSON in request body: {str(e)}"
2647
+ }
2648
+ except DatabaseError as e:
2649
+ logger.error(f"Database error updating MCP config {mcp_id} for {project_path}: {e}")
2650
+ return {
2651
+ "success": False,
2652
+ "error_code": e.error_code,
2653
+ "error": str(e)
2654
+ }
2655
+ except Exception as e:
2656
+ logger.error(f"Unexpected error updating MCP config {mcp_id} for {project_path}: {e}")
2657
+ return {
2658
+ "success": False,
2659
+ "error_code": "MCP_CONFIG_UPDATE_ERROR",
2660
+ "error": str(e)
2661
+ }
2662
+
2663
+
2664
+ @app.post("/api/project/{project_path:path}/hook/{hook_id}")
2665
+ async def update_hook_config(project_path: str, hook_id: str, request: Request):
2666
+ """Update hook configuration for a project."""
2667
+ # Normalize path to prevent duplicates from different separators
2668
+ project_path = normalize_path(project_path)
2669
+ try:
2670
+ body = await request.json()
2671
+ enabled = body.get('enabled', True)
2672
+ settings = json.dumps(body.get('settings', {}))
2673
+
2674
+ await db.execute_query(
2675
+ """
2676
+ INSERT INTO project_hook_config (project_path, hook_id, enabled, settings)
2677
+ VALUES (?, ?, ?, ?)
2678
+ ON CONFLICT(project_path, hook_id) DO UPDATE SET
2679
+ enabled = excluded.enabled,
2680
+ settings = excluded.settings,
2681
+ updated_at = datetime('now')
2682
+ """,
2683
+ (project_path, hook_id, 1 if enabled else 0, settings)
2684
+ )
2685
+ db.conn.commit()
2686
+
2687
+ return {"success": True, "hook_id": hook_id, "enabled": enabled}
2688
+ except json.JSONDecodeError as e:
2689
+ return {
2690
+ "success": False,
2691
+ "error_code": "INVALID_JSON",
2692
+ "error": f"Invalid JSON in request body: {str(e)}"
2693
+ }
2694
+ except DatabaseError as e:
2695
+ logger.error(f"Database error updating hook config {hook_id} for {project_path}: {e}")
2696
+ return {
2697
+ "success": False,
2698
+ "error_code": e.error_code,
2699
+ "error": str(e)
2700
+ }
2701
+ except Exception as e:
2702
+ logger.error(f"Unexpected error updating hook config {hook_id} for {project_path}: {e}")
2703
+ return {
2704
+ "success": False,
2705
+ "error_code": "HOOK_CONFIG_UPDATE_ERROR",
2706
+ "error": str(e)
2707
+ }
2708
+
2709
+
2710
+ @app.post("/api/project/{project_path:path}/preferences")
2711
+ async def update_project_preferences(project_path: str, request: Request):
2712
+ """Update project preferences."""
2713
+ # Normalize path to prevent duplicates from different separators
2714
+ project_path = normalize_path(project_path)
2715
+ try:
2716
+ body = await request.json()
2717
+
2718
+ await db.execute_query(
2719
+ """
2720
+ INSERT INTO project_preferences (project_path, name, description, color, icon, default_model, auto_memory, auto_checkpoint, settings)
2721
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
2722
+ ON CONFLICT(project_path) DO UPDATE SET
2723
+ name = excluded.name,
2724
+ description = excluded.description,
2725
+ color = excluded.color,
2726
+ icon = excluded.icon,
2727
+ default_model = excluded.default_model,
2728
+ auto_memory = excluded.auto_memory,
2729
+ auto_checkpoint = excluded.auto_checkpoint,
2730
+ settings = excluded.settings,
2731
+ updated_at = datetime('now')
2732
+ """,
2733
+ (
2734
+ project_path,
2735
+ body.get('name'),
2736
+ body.get('description'),
2737
+ body.get('color', '#58a6ff'),
2738
+ body.get('icon', 'folder'),
2739
+ body.get('default_model', 'sonnet'),
2740
+ 1 if body.get('auto_memory', True) else 0,
2741
+ 1 if body.get('auto_checkpoint', True) else 0,
2742
+ json.dumps(body.get('settings', {}))
2743
+ )
2744
+ )
2745
+ db.conn.commit()
2746
+
2747
+ return {"success": True, "project_path": project_path}
2748
+ except json.JSONDecodeError as e:
2749
+ return {
2750
+ "success": False,
2751
+ "error_code": "INVALID_JSON",
2752
+ "error": f"Invalid JSON in request body: {str(e)}"
2753
+ }
2754
+ except DatabaseError as e:
2755
+ logger.error(f"Database error updating preferences for {project_path}: {e}")
2756
+ return {
2757
+ "success": False,
2758
+ "error_code": e.error_code,
2759
+ "error": str(e)
2760
+ }
2761
+ except Exception as e:
2762
+ logger.error(f"Unexpected error updating preferences for {project_path}: {e}")
2763
+ return {
2764
+ "success": False,
2765
+ "error_code": "PREFERENCES_UPDATE_ERROR",
2766
+ "error": str(e)
2767
+ }
2768
+
2769
+
2770
+ @app.post("/api/project/{project_path:path}/agents/bulk")
2771
+ async def bulk_update_agents(project_path: str, request: Request):
2772
+ """Bulk enable/disable agents for a project."""
2773
+ # Normalize path to prevent duplicates from different separators
2774
+ project_path = normalize_path(project_path)
2775
+ try:
2776
+ body = await request.json()
2777
+ updates = body.get('updates', {}) # {agent_id: enabled}
2778
+
2779
+ for agent_id, enabled in updates.items():
2780
+ await db.execute_query(
2781
+ """
2782
+ INSERT INTO project_agent_config (project_path, agent_id, enabled)
2783
+ VALUES (?, ?, ?)
2784
+ ON CONFLICT(project_path, agent_id) DO UPDATE SET
2785
+ enabled = excluded.enabled,
2786
+ updated_at = datetime('now')
2787
+ """,
2788
+ (project_path, agent_id, 1 if enabled else 0)
2789
+ )
2790
+
2791
+ db.conn.commit()
2792
+
2793
+ return {"success": True, "updated": len(updates)}
2794
+ except json.JSONDecodeError as e:
2795
+ return {
2796
+ "success": False,
2797
+ "error_code": "INVALID_JSON",
2798
+ "error": f"Invalid JSON in request body: {str(e)}"
2799
+ }
2800
+ except DatabaseError as e:
2801
+ logger.error(f"Database error in bulk agent update for {project_path}: {e}")
2802
+ return {
2803
+ "success": False,
2804
+ "error_code": e.error_code,
2805
+ "error": str(e)
2806
+ }
2807
+ except Exception as e:
2808
+ logger.error(f"Unexpected error in bulk agent update for {project_path}: {e}")
2809
+ return {
2810
+ "success": False,
2811
+ "error_code": "BULK_UPDATE_ERROR",
2812
+ "error": str(e)
2813
+ }
2814
+
2815
+
2816
+ # ============= WebSocket Endpoint for Real-time Updates =============
2817
+
2818
+ @app.websocket("/ws")
2819
+ async def websocket_endpoint(websocket: WebSocket):
2820
+ """WebSocket endpoint for real-time dashboard updates.
2821
+
2822
+ Clients can:
2823
+ - Receive broadcasts of all memory/timeline events
2824
+ - Subscribe to specific event types
2825
+ - Filter by project path
2826
+ """
2827
+ ws_manager = get_websocket_manager()
2828
+ client_id = await ws_manager.connect(websocket)
2829
+
2830
+ try:
2831
+ while True:
2832
+ # Wait for messages from client
2833
+ data = await websocket.receive_json()
2834
+ await ws_manager.handle_message(client_id, data)
2835
+
2836
+ except WebSocketDisconnect:
2837
+ await ws_manager.disconnect(client_id)
2838
+ except Exception:
2839
+ await ws_manager.disconnect(client_id)
2840
+
2841
+
2842
+ @app.get("/api/ws/stats")
2843
+ async def websocket_stats():
2844
+ """Get WebSocket connection statistics."""
2845
+ ws_manager = get_websocket_manager()
2846
+ return {
2847
+ "success": True,
2848
+ "stats": ws_manager.get_stats()
2849
+ }
2850
+
2851
+
2852
+ if __name__ == "__main__":
2853
+ import uvicorn
2854
+ uvicorn.run(
2855
+ "main:app",
2856
+ host=os.getenv("HOST", "0.0.0.0"),
2857
+ port=int(os.getenv("PORT", 8102)),
2858
+ reload=True
2859
+ )