omni-cortex 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/.env.example +22 -0
  2. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +280 -0
  3. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/chat_service.py +315 -0
  4. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/database.py +1093 -0
  5. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/image_service.py +549 -0
  6. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/logging_config.py +122 -0
  7. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/main.py +1124 -0
  8. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/models.py +241 -0
  9. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_config.py +170 -0
  10. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_scanner.py +164 -0
  11. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/prompt_security.py +111 -0
  12. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/pyproject.toml +23 -0
  13. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/security.py +104 -0
  14. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/uv.lock +1110 -0
  15. omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/websocket_manager.py +104 -0
  16. omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/post_tool_use.py +335 -0
  17. omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/pre_tool_use.py +333 -0
  18. omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/stop.py +184 -0
  19. omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/subagent_stop.py +120 -0
  20. omni_cortex-1.6.0.dist-info/METADATA +319 -0
  21. omni_cortex-1.6.0.dist-info/RECORD +24 -0
  22. omni_cortex-1.6.0.dist-info/WHEEL +4 -0
  23. omni_cortex-1.6.0.dist-info/entry_points.txt +4 -0
  24. omni_cortex-1.6.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1124 @@
1
+ """FastAPI backend for Omni-Cortex Web Dashboard."""
2
+ # Trigger reload for relationship graph column fix
3
+
4
+ import asyncio
5
+ import json
6
+ import os
7
+ import traceback
8
+ from contextlib import asynccontextmanager
9
+ from datetime import datetime
10
+ from pathlib import Path
11
+ from typing import Optional
12
+
13
+ import uvicorn
14
+ from fastapi import FastAPI, HTTPException, Query, WebSocket, WebSocketDisconnect, Request, Depends
15
+ from fastapi.middleware.cors import CORSMiddleware
16
+ from fastapi.staticfiles import StaticFiles
17
+ from fastapi.responses import FileResponse, Response
18
+ from starlette.middleware.base import BaseHTTPMiddleware
19
+ from watchdog.events import FileSystemEventHandler
20
+ from watchdog.observers import Observer
21
+
22
+ # Rate limiting imports (optional - graceful degradation if not installed)
23
+ try:
24
+ from slowapi import Limiter, _rate_limit_exceeded_handler
25
+ from slowapi.util import get_remote_address
26
+ from slowapi.errors import RateLimitExceeded
27
+ RATE_LIMITING_AVAILABLE = True
28
+ except ImportError:
29
+ RATE_LIMITING_AVAILABLE = False
30
+ Limiter = None
31
+
32
+ from database import (
33
+ bulk_update_memory_status,
34
+ delete_memory,
35
+ ensure_migrations,
36
+ get_activities,
37
+ get_activity_detail,
38
+ get_activity_heatmap,
39
+ get_all_tags,
40
+ get_command_usage,
41
+ get_mcp_usage,
42
+ get_memories,
43
+ get_memories_needing_review,
44
+ get_memory_by_id,
45
+ get_memory_growth,
46
+ get_memory_stats,
47
+ get_recent_sessions,
48
+ get_relationship_graph,
49
+ get_relationships,
50
+ get_sessions,
51
+ get_skill_usage,
52
+ get_timeline,
53
+ get_tool_usage,
54
+ get_type_distribution,
55
+ search_memories,
56
+ update_memory,
57
+ )
58
+ from logging_config import log_success, log_error
59
+ from models import (
60
+ ChatRequest,
61
+ ChatResponse,
62
+ ConversationSaveRequest,
63
+ ConversationSaveResponse,
64
+ FilterParams,
65
+ MemoryUpdate,
66
+ ProjectInfo,
67
+ ProjectRegistration,
68
+ BatchImageGenerationRequest,
69
+ BatchImageGenerationResponse,
70
+ ImageRefineRequest,
71
+ SingleImageRequestModel,
72
+ SingleImageResponseModel,
73
+ )
74
+ from project_config import (
75
+ load_config,
76
+ add_registered_project,
77
+ remove_registered_project,
78
+ toggle_favorite,
79
+ add_scan_directory,
80
+ remove_scan_directory,
81
+ )
82
+ from project_scanner import scan_projects
83
+ from websocket_manager import manager
84
+ import chat_service
85
+ from image_service import image_service, ImagePreset, SingleImageRequest
86
+ from security import PathValidator, get_cors_config, IS_PRODUCTION
87
+
88
+
89
+ class SecurityHeadersMiddleware(BaseHTTPMiddleware):
90
+ """Add security headers to all responses."""
91
+
92
+ async def dispatch(self, request: Request, call_next) -> Response:
93
+ response = await call_next(request)
94
+
95
+ # Prevent MIME type sniffing
96
+ response.headers["X-Content-Type-Options"] = "nosniff"
97
+
98
+ # Prevent clickjacking
99
+ response.headers["X-Frame-Options"] = "DENY"
100
+
101
+ # XSS protection (legacy browsers)
102
+ response.headers["X-XSS-Protection"] = "1; mode=block"
103
+
104
+ # Content Security Policy
105
+ response.headers["Content-Security-Policy"] = (
106
+ "default-src 'self'; "
107
+ "script-src 'self' 'unsafe-inline' 'unsafe-eval'; " # Vue needs these
108
+ "style-src 'self' 'unsafe-inline'; " # Tailwind needs inline
109
+ "img-src 'self' data: blob: https:; " # Allow AI-generated images
110
+ "connect-src 'self' ws: wss: https://generativelanguage.googleapis.com; "
111
+ "font-src 'self'; "
112
+ "frame-ancestors 'none';"
113
+ )
114
+
115
+ # HSTS (only in production with HTTPS)
116
+ if IS_PRODUCTION and os.getenv("SSL_CERTFILE"):
117
+ response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
118
+
119
+ return response
120
+
121
+
122
+ def validate_project_path(project: str = Query(..., description="Path to the database file")) -> Path:
123
+ """Validate project database path - dependency for endpoints."""
124
+ try:
125
+ return PathValidator.validate_project_path(project)
126
+ except ValueError as e:
127
+ raise HTTPException(status_code=400, detail=str(e))
128
+
129
+
130
+ class DatabaseChangeHandler(FileSystemEventHandler):
131
+ """Handle database file changes for real-time updates."""
132
+
133
+ def __init__(self, ws_manager, loop):
134
+ self.ws_manager = ws_manager
135
+ self.loop = loop
136
+ self._debounce_task: Optional[asyncio.Task] = None
137
+ self._last_path: Optional[str] = None
138
+ self._last_activity_count: dict[str, int] = {}
139
+
140
+ def on_modified(self, event):
141
+ if event.src_path.endswith("cortex.db") or event.src_path.endswith("global.db"):
142
+ # Debounce rapid changes
143
+ self._last_path = event.src_path
144
+ if self._debounce_task is None or self._debounce_task.done():
145
+ self._debounce_task = asyncio.run_coroutine_threadsafe(
146
+ self._debounced_notify(), self.loop
147
+ )
148
+
149
+ async def _debounced_notify(self):
150
+ await asyncio.sleep(0.3) # Reduced from 0.5s for faster updates
151
+ if self._last_path:
152
+ db_path = self._last_path
153
+
154
+ # Broadcast general database change
155
+ await self.ws_manager.broadcast("database_changed", {"path": db_path})
156
+
157
+ # Fetch and broadcast latest activities (IndyDevDan pattern)
158
+ try:
159
+ # Get recent activities
160
+ recent = get_activities(db_path, limit=5, offset=0)
161
+ if recent:
162
+ # Broadcast each new activity
163
+ for activity in recent:
164
+ await self.ws_manager.broadcast_activity_logged(
165
+ db_path,
166
+ activity if isinstance(activity, dict) else activity.model_dump()
167
+ )
168
+
169
+ # Also broadcast session update
170
+ sessions = get_recent_sessions(db_path, limit=1)
171
+ if sessions:
172
+ session = sessions[0]
173
+ await self.ws_manager.broadcast_session_updated(
174
+ db_path,
175
+ session if isinstance(session, dict) else dict(session)
176
+ )
177
+ except Exception as e:
178
+ print(f"[WS] Error broadcasting activities: {e}")
179
+
180
+
181
+ # File watcher
182
+ observer: Optional[Observer] = None
183
+
184
+
185
+ @asynccontextmanager
186
+ async def lifespan(app: FastAPI):
187
+ """Manage file watcher lifecycle."""
188
+ global observer
189
+ loop = asyncio.get_event_loop()
190
+ handler = DatabaseChangeHandler(manager, loop)
191
+ observer = Observer()
192
+
193
+ # Watch common project directories
194
+ watch_paths = [
195
+ Path.home() / ".omni-cortex",
196
+ Path("D:/Projects"),
197
+ ]
198
+
199
+ for watch_path in watch_paths:
200
+ if watch_path.exists():
201
+ observer.schedule(handler, str(watch_path), recursive=True)
202
+ print(f"[Watcher] Monitoring: {watch_path}")
203
+
204
+ observer.start()
205
+ print("[Server] File watcher started")
206
+
207
+ yield
208
+
209
+ observer.stop()
210
+ observer.join()
211
+ print("[Server] File watcher stopped")
212
+
213
+
214
+ # FastAPI app
215
+ app = FastAPI(
216
+ title="Omni-Cortex Dashboard",
217
+ description="Web dashboard for viewing and managing Omni-Cortex memories",
218
+ version="0.1.0",
219
+ lifespan=lifespan,
220
+ )
221
+
222
+ # Add security headers middleware (MUST come before CORS)
223
+ app.add_middleware(SecurityHeadersMiddleware)
224
+
225
+ # Rate limiting (if available)
226
+ if RATE_LIMITING_AVAILABLE:
227
+ limiter = Limiter(key_func=get_remote_address)
228
+ app.state.limiter = limiter
229
+ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
230
+ else:
231
+ limiter = None
232
+
233
+ # CORS configuration (environment-aware)
234
+ cors_config = get_cors_config()
235
+ app.add_middleware(
236
+ CORSMiddleware,
237
+ allow_origins=cors_config["allow_origins"],
238
+ allow_credentials=True,
239
+ allow_methods=cors_config["allow_methods"],
240
+ allow_headers=cors_config["allow_headers"],
241
+ )
242
+
243
+ # Static files for production build
244
+ DASHBOARD_DIR = Path(__file__).parent.parent
245
+ DIST_DIR = DASHBOARD_DIR / "frontend" / "dist"
246
+
247
+
248
+ def setup_static_files():
249
+ """Mount static files if dist directory exists (production build)."""
250
+ if DIST_DIR.exists():
251
+ # Mount assets directory
252
+ assets_dir = DIST_DIR / "assets"
253
+ if assets_dir.exists():
254
+ app.mount("/assets", StaticFiles(directory=str(assets_dir)), name="assets")
255
+ print(f"[Static] Serving assets from: {assets_dir}")
256
+
257
+
258
+ # Call setup at module load
259
+ setup_static_files()
260
+
261
+
262
+ # --- REST Endpoints ---
263
+
264
+
265
+ @app.get("/api/projects", response_model=list[ProjectInfo])
266
+ async def list_projects():
267
+ """List all discovered omni-cortex project databases."""
268
+ return scan_projects()
269
+
270
+
271
+ # --- Project Management Endpoints ---
272
+
273
+
274
+ @app.get("/api/projects/config")
275
+ async def get_project_config():
276
+ """Get project configuration (scan dirs, counts)."""
277
+ config = load_config()
278
+ return {
279
+ "scan_directories": config.scan_directories,
280
+ "registered_count": len(config.registered_projects),
281
+ "favorites_count": len(config.favorites),
282
+ }
283
+
284
+
285
+ @app.post("/api/projects/register")
286
+ async def register_project(body: ProjectRegistration):
287
+ """Manually register a project by path."""
288
+ success = add_registered_project(body.path, body.display_name)
289
+ if not success:
290
+ raise HTTPException(400, "Invalid path or already registered")
291
+ return {"success": True}
292
+
293
+
294
+ @app.delete("/api/projects/register")
295
+ async def unregister_project(path: str = Query(..., description="Project path to unregister")):
296
+ """Remove a registered project."""
297
+ success = remove_registered_project(path)
298
+ if not success:
299
+ raise HTTPException(404, "Project not found")
300
+ return {"success": True}
301
+
302
+
303
+ @app.post("/api/projects/favorite")
304
+ async def toggle_project_favorite(path: str = Query(..., description="Project path to toggle favorite")):
305
+ """Toggle favorite status for a project."""
306
+ is_favorite = toggle_favorite(path)
307
+ return {"is_favorite": is_favorite}
308
+
309
+
310
+ @app.post("/api/projects/scan-directories")
311
+ async def add_scan_dir(directory: str = Query(..., description="Directory path to add")):
312
+ """Add a directory to auto-scan list."""
313
+ success = add_scan_directory(directory)
314
+ if not success:
315
+ raise HTTPException(400, "Invalid directory or already added")
316
+ return {"success": True}
317
+
318
+
319
+ @app.delete("/api/projects/scan-directories")
320
+ async def remove_scan_dir(directory: str = Query(..., description="Directory path to remove")):
321
+ """Remove a directory from auto-scan list."""
322
+ success = remove_scan_directory(directory)
323
+ if not success:
324
+ raise HTTPException(404, "Directory not found")
325
+ return {"success": True}
326
+
327
+
328
+ @app.post("/api/projects/refresh")
329
+ async def refresh_projects():
330
+ """Force rescan of all project directories."""
331
+ projects = scan_projects()
332
+ return {"count": len(projects)}
333
+
334
+
335
+ @app.get("/api/memories")
336
+ async def list_memories(
337
+ project: str = Query(..., description="Path to the database file"),
338
+ memory_type: Optional[str] = Query(None, alias="type"),
339
+ status: Optional[str] = None,
340
+ tags: Optional[str] = None,
341
+ search: Optional[str] = None,
342
+ min_importance: Optional[int] = None,
343
+ max_importance: Optional[int] = None,
344
+ sort_by: str = "last_accessed",
345
+ sort_order: str = "desc",
346
+ limit: int = 50,
347
+ offset: int = 0,
348
+ ):
349
+ """Get memories with filtering and pagination."""
350
+ try:
351
+ if not Path(project).exists():
352
+ log_error("/api/memories", FileNotFoundError("Database not found"), project=project)
353
+ raise HTTPException(status_code=404, detail="Database not found")
354
+
355
+ filters = FilterParams(
356
+ memory_type=memory_type,
357
+ status=status,
358
+ tags=tags.split(",") if tags else None,
359
+ search=search,
360
+ min_importance=min_importance,
361
+ max_importance=max_importance,
362
+ sort_by=sort_by,
363
+ sort_order=sort_order,
364
+ limit=limit,
365
+ offset=offset,
366
+ )
367
+
368
+ memories = get_memories(project, filters)
369
+ log_success("/api/memories", count=len(memories), offset=offset, filters=bool(search or memory_type))
370
+ return memories
371
+ except Exception as e:
372
+ log_error("/api/memories", e, project=project)
373
+ raise
374
+
375
+
376
+ # NOTE: These routes MUST be defined before /api/memories/{memory_id} to avoid path conflicts
377
+ @app.get("/api/memories/needs-review")
378
+ async def get_memories_needing_review_endpoint(
379
+ project: str = Query(..., description="Path to the database file"),
380
+ days_threshold: int = 30,
381
+ limit: int = 50,
382
+ ):
383
+ """Get memories that may need freshness review."""
384
+ if not Path(project).exists():
385
+ raise HTTPException(status_code=404, detail="Database not found")
386
+
387
+ return get_memories_needing_review(project, days_threshold, limit)
388
+
389
+
390
+ @app.post("/api/memories/bulk-update-status")
391
+ async def bulk_update_status_endpoint(
392
+ project: str = Query(..., description="Path to the database file"),
393
+ memory_ids: list[str] = [],
394
+ status: str = "fresh",
395
+ ):
396
+ """Update status for multiple memories at once."""
397
+ if not Path(project).exists():
398
+ raise HTTPException(status_code=404, detail="Database not found")
399
+
400
+ valid_statuses = ["fresh", "needs_review", "outdated", "archived"]
401
+ if status not in valid_statuses:
402
+ raise HTTPException(status_code=400, detail=f"Invalid status. Must be one of: {valid_statuses}")
403
+
404
+ count = bulk_update_memory_status(project, memory_ids, status)
405
+
406
+ # Notify connected clients
407
+ await manager.broadcast("memories_bulk_updated", {"count": count, "status": status})
408
+
409
+ return {"updated_count": count, "status": status}
410
+
411
+
412
+ @app.get("/api/memories/{memory_id}")
413
+ async def get_memory(
414
+ memory_id: str,
415
+ project: str = Query(..., description="Path to the database file"),
416
+ ):
417
+ """Get a single memory by ID."""
418
+ if not Path(project).exists():
419
+ raise HTTPException(status_code=404, detail="Database not found")
420
+
421
+ memory = get_memory_by_id(project, memory_id)
422
+ if not memory:
423
+ raise HTTPException(status_code=404, detail="Memory not found")
424
+ return memory
425
+
426
+
427
+ @app.put("/api/memories/{memory_id}")
428
+ async def update_memory_endpoint(
429
+ memory_id: str,
430
+ updates: MemoryUpdate,
431
+ project: str = Query(..., description="Path to the database file"),
432
+ ):
433
+ """Update a memory."""
434
+ try:
435
+ if not Path(project).exists():
436
+ log_error("/api/memories/update", FileNotFoundError("Database not found"), memory_id=memory_id)
437
+ raise HTTPException(status_code=404, detail="Database not found")
438
+
439
+ updated = update_memory(project, memory_id, updates)
440
+ if not updated:
441
+ log_error("/api/memories/update", ValueError("Memory not found"), memory_id=memory_id)
442
+ raise HTTPException(status_code=404, detail="Memory not found")
443
+
444
+ # Notify connected clients
445
+ await manager.broadcast("memory_updated", updated.model_dump(by_alias=True))
446
+ log_success("/api/memories/update", memory_id=memory_id, fields_updated=len(updates.model_dump(exclude_unset=True)))
447
+ return updated
448
+ except HTTPException:
449
+ raise
450
+ except Exception as e:
451
+ log_error("/api/memories/update", e, memory_id=memory_id)
452
+ raise
453
+
454
+
455
+ @app.delete("/api/memories/{memory_id}")
456
+ async def delete_memory_endpoint(
457
+ memory_id: str,
458
+ project: str = Query(..., description="Path to the database file"),
459
+ ):
460
+ """Delete a memory."""
461
+ try:
462
+ if not Path(project).exists():
463
+ log_error("/api/memories/delete", FileNotFoundError("Database not found"), memory_id=memory_id)
464
+ raise HTTPException(status_code=404, detail="Database not found")
465
+
466
+ deleted = delete_memory(project, memory_id)
467
+ if not deleted:
468
+ log_error("/api/memories/delete", ValueError("Memory not found"), memory_id=memory_id)
469
+ raise HTTPException(status_code=404, detail="Memory not found")
470
+
471
+ # Notify connected clients
472
+ await manager.broadcast("memory_deleted", {"id": memory_id})
473
+ log_success("/api/memories/delete", memory_id=memory_id)
474
+ return {"message": "Memory deleted", "id": memory_id}
475
+ except HTTPException:
476
+ raise
477
+ except Exception as e:
478
+ log_error("/api/memories/delete", e, memory_id=memory_id)
479
+ raise
480
+
481
+
482
+ @app.get("/api/memories/stats/summary")
483
+ async def memory_stats(
484
+ project: str = Query(..., description="Path to the database file"),
485
+ ):
486
+ """Get memory statistics."""
487
+ if not Path(project).exists():
488
+ raise HTTPException(status_code=404, detail="Database not found")
489
+
490
+ return get_memory_stats(project)
491
+
492
+
493
+ @app.get("/api/search")
494
+ async def search(
495
+ q: str = Query(..., min_length=1),
496
+ project: str = Query(..., description="Path to the database file"),
497
+ limit: int = 20,
498
+ ):
499
+ """Search memories."""
500
+ if not Path(project).exists():
501
+ raise HTTPException(status_code=404, detail="Database not found")
502
+
503
+ return search_memories(project, q, limit)
504
+
505
+
506
+ @app.get("/api/activities")
507
+ async def list_activities(
508
+ project: str = Query(..., description="Path to the database file"),
509
+ event_type: Optional[str] = None,
510
+ tool_name: Optional[str] = None,
511
+ limit: int = 100,
512
+ offset: int = 0,
513
+ ):
514
+ """Get activity log entries."""
515
+ if not Path(project).exists():
516
+ raise HTTPException(status_code=404, detail="Database not found")
517
+
518
+ # Ensure migrations are applied (adds summary columns if missing)
519
+ ensure_migrations(project)
520
+
521
+ return get_activities(project, event_type, tool_name, limit, offset)
522
+
523
+
524
+ @app.get("/api/timeline")
525
+ async def get_timeline_view(
526
+ project: str = Query(..., description="Path to the database file"),
527
+ hours: int = 24,
528
+ include_memories: bool = True,
529
+ include_activities: bool = True,
530
+ ):
531
+ """Get timeline of recent activity."""
532
+ if not Path(project).exists():
533
+ raise HTTPException(status_code=404, detail="Database not found")
534
+
535
+ return get_timeline(project, hours, include_memories, include_activities)
536
+
537
+
538
+ @app.get("/api/tags")
539
+ async def list_tags(
540
+ project: str = Query(..., description="Path to the database file"),
541
+ ):
542
+ """Get all tags with counts."""
543
+ if not Path(project).exists():
544
+ raise HTTPException(status_code=404, detail="Database not found")
545
+
546
+ return get_all_tags(project)
547
+
548
+
549
+ @app.get("/api/types")
550
+ async def list_types(
551
+ project: str = Query(..., description="Path to the database file"),
552
+ ):
553
+ """Get memory type distribution."""
554
+ if not Path(project).exists():
555
+ raise HTTPException(status_code=404, detail="Database not found")
556
+
557
+ return get_type_distribution(project)
558
+
559
+
560
+ @app.get("/api/sessions")
561
+ async def list_sessions(
562
+ project: str = Query(..., description="Path to the database file"),
563
+ limit: int = 20,
564
+ ):
565
+ """Get recent sessions."""
566
+ if not Path(project).exists():
567
+ raise HTTPException(status_code=404, detail="Database not found")
568
+
569
+ return get_sessions(project, limit)
570
+
571
+
572
+ # --- Stats Endpoints for Charts ---
573
+
574
+
575
+ @app.get("/api/stats/activity-heatmap")
576
+ async def get_activity_heatmap_endpoint(
577
+ project: str = Query(..., description="Path to the database file"),
578
+ days: int = 90,
579
+ ):
580
+ """Get activity counts grouped by day for heatmap visualization."""
581
+ if not Path(project).exists():
582
+ raise HTTPException(status_code=404, detail="Database not found")
583
+
584
+ return get_activity_heatmap(project, days)
585
+
586
+
587
+ @app.get("/api/stats/tool-usage")
588
+ async def get_tool_usage_endpoint(
589
+ project: str = Query(..., description="Path to the database file"),
590
+ limit: int = 10,
591
+ ):
592
+ """Get tool usage statistics."""
593
+ if not Path(project).exists():
594
+ raise HTTPException(status_code=404, detail="Database not found")
595
+
596
+ return get_tool_usage(project, limit)
597
+
598
+
599
+ @app.get("/api/stats/memory-growth")
600
+ async def get_memory_growth_endpoint(
601
+ project: str = Query(..., description="Path to the database file"),
602
+ days: int = 30,
603
+ ):
604
+ """Get memory creation over time."""
605
+ if not Path(project).exists():
606
+ raise HTTPException(status_code=404, detail="Database not found")
607
+
608
+ return get_memory_growth(project, days)
609
+
610
+
611
+ # --- Command Analytics Endpoints ---
612
+
613
+
614
+ @app.get("/api/stats/command-usage")
615
+ async def get_command_usage_endpoint(
616
+ project: str = Query(..., description="Path to the database file"),
617
+ scope: Optional[str] = Query(None, description="Filter by scope: 'universal' or 'project'"),
618
+ days: int = Query(30, ge=1, le=365),
619
+ ):
620
+ """Get slash command usage statistics."""
621
+ if not Path(project).exists():
622
+ raise HTTPException(status_code=404, detail="Database not found")
623
+
624
+ return get_command_usage(project, scope, days)
625
+
626
+
627
+ @app.get("/api/stats/skill-usage")
628
+ async def get_skill_usage_endpoint(
629
+ project: str = Query(..., description="Path to the database file"),
630
+ scope: Optional[str] = Query(None, description="Filter by scope: 'universal' or 'project'"),
631
+ days: int = Query(30, ge=1, le=365),
632
+ ):
633
+ """Get skill usage statistics."""
634
+ if not Path(project).exists():
635
+ raise HTTPException(status_code=404, detail="Database not found")
636
+
637
+ return get_skill_usage(project, scope, days)
638
+
639
+
640
+ @app.get("/api/stats/mcp-usage")
641
+ async def get_mcp_usage_endpoint(
642
+ project: str = Query(..., description="Path to the database file"),
643
+ days: int = Query(30, ge=1, le=365),
644
+ ):
645
+ """Get MCP server usage statistics."""
646
+ if not Path(project).exists():
647
+ raise HTTPException(status_code=404, detail="Database not found")
648
+
649
+ return get_mcp_usage(project, days)
650
+
651
+
652
+ @app.get("/api/activities/{activity_id}")
653
+ async def get_activity_detail_endpoint(
654
+ activity_id: str,
655
+ project: str = Query(..., description="Path to the database file"),
656
+ ):
657
+ """Get full activity details including complete input/output."""
658
+ if not Path(project).exists():
659
+ raise HTTPException(status_code=404, detail="Database not found")
660
+
661
+ # Ensure migrations are applied
662
+ ensure_migrations(project)
663
+
664
+ activity = get_activity_detail(project, activity_id)
665
+ if not activity:
666
+ raise HTTPException(status_code=404, detail="Activity not found")
667
+
668
+ return activity
669
+
670
+
671
+ @app.post("/api/activities/backfill-summaries")
672
+ async def backfill_activity_summaries_endpoint(
673
+ project: str = Query(..., description="Path to the database file"),
674
+ ):
675
+ """Generate summaries for existing activities that don't have them."""
676
+ if not Path(project).exists():
677
+ raise HTTPException(status_code=404, detail="Database not found")
678
+
679
+ try:
680
+ from backfill_summaries import backfill_all
681
+ results = backfill_all(project)
682
+ return {
683
+ "success": True,
684
+ "summaries_updated": results["summaries"],
685
+ "mcp_servers_updated": results["mcp_servers"],
686
+ }
687
+ except Exception as e:
688
+ raise HTTPException(status_code=500, detail=f"Backfill failed: {str(e)}")
689
+
690
+
691
+ # --- Session Context Endpoints ---
692
+
693
+
694
+ @app.get("/api/sessions/recent")
695
+ async def get_recent_sessions_endpoint(
696
+ project: str = Query(..., description="Path to the database file"),
697
+ limit: int = 5,
698
+ ):
699
+ """Get recent sessions with summaries."""
700
+ if not Path(project).exists():
701
+ raise HTTPException(status_code=404, detail="Database not found")
702
+
703
+ return get_recent_sessions(project, limit)
704
+
705
+
706
+ # --- Relationship Graph Endpoints ---
707
+
708
+
709
+ @app.get("/api/relationships")
710
+ async def get_relationships_endpoint(
711
+ project: str = Query(..., description="Path to the database file"),
712
+ memory_id: Optional[str] = None,
713
+ ):
714
+ """Get memory relationships for graph visualization."""
715
+ if not Path(project).exists():
716
+ raise HTTPException(status_code=404, detail="Database not found")
717
+
718
+ return get_relationships(project, memory_id)
719
+
720
+
721
+ @app.get("/api/relationships/graph")
722
+ async def get_relationship_graph_endpoint(
723
+ project: str = Query(..., description="Path to the database file"),
724
+ center_id: Optional[str] = None,
725
+ depth: int = 2,
726
+ ):
727
+ """Get graph data centered on a memory with configurable depth."""
728
+ if not Path(project).exists():
729
+ raise HTTPException(status_code=404, detail="Database not found")
730
+
731
+ return get_relationship_graph(project, center_id, depth)
732
+
733
+
734
+ # --- Chat Endpoint ---
735
+
736
+
737
+ @app.get("/api/chat/status")
738
+ async def chat_status():
739
+ """Check if chat service is available."""
740
+ return {
741
+ "available": chat_service.is_available(),
742
+ "message": "Chat is available" if chat_service.is_available() else "Set GEMINI_API_KEY environment variable to enable chat",
743
+ }
744
+
745
+
746
+ @app.post("/api/chat", response_model=ChatResponse)
747
+ async def chat_with_memories(
748
+ request: ChatRequest,
749
+ project: str = Query(..., description="Path to the database file"),
750
+ ):
751
+ """Ask a natural language question about memories."""
752
+ try:
753
+ if not Path(project).exists():
754
+ log_error("/api/chat", FileNotFoundError("Database not found"), question=request.question[:50])
755
+ raise HTTPException(status_code=404, detail="Database not found")
756
+
757
+ result = await chat_service.ask_about_memories(
758
+ project,
759
+ request.question,
760
+ request.max_memories,
761
+ )
762
+
763
+ log_success("/api/chat", question_len=len(request.question), sources=len(result.get("sources", [])))
764
+ return ChatResponse(**result)
765
+ except HTTPException:
766
+ raise
767
+ except Exception as e:
768
+ log_error("/api/chat", e, question=request.question[:50])
769
+ raise
770
+
771
+
772
+ @app.get("/api/chat/stream")
773
+ async def stream_chat(
774
+ project: str = Query(..., description="Path to the database file"),
775
+ question: str = Query(..., description="The question to ask"),
776
+ max_memories: int = Query(10, ge=1, le=50),
777
+ ):
778
+ """SSE endpoint for streaming chat responses."""
779
+ from fastapi.responses import StreamingResponse
780
+
781
+ if not Path(project).exists():
782
+ raise HTTPException(status_code=404, detail="Database not found")
783
+
784
+ async def event_generator():
785
+ try:
786
+ async for event in chat_service.stream_ask_about_memories(project, question, max_memories):
787
+ yield f"data: {json.dumps(event)}\n\n"
788
+ except Exception as e:
789
+ yield f"data: {json.dumps({'type': 'error', 'data': str(e)})}\n\n"
790
+
791
+ return StreamingResponse(
792
+ event_generator(),
793
+ media_type="text/event-stream",
794
+ headers={
795
+ "Cache-Control": "no-cache",
796
+ "Connection": "keep-alive",
797
+ "X-Accel-Buffering": "no",
798
+ }
799
+ )
800
+
801
+
802
+ @app.post("/api/chat/save", response_model=ConversationSaveResponse)
803
+ async def save_chat_conversation(
804
+ request: ConversationSaveRequest,
805
+ project: str = Query(..., description="Path to the database file"),
806
+ ):
807
+ """Save a chat conversation as a memory."""
808
+ try:
809
+ if not Path(project).exists():
810
+ log_error("/api/chat/save", FileNotFoundError("Database not found"))
811
+ raise HTTPException(status_code=404, detail="Database not found")
812
+
813
+ result = await chat_service.save_conversation(
814
+ project,
815
+ [msg.model_dump() for msg in request.messages],
816
+ request.referenced_memory_ids,
817
+ request.importance or 60,
818
+ )
819
+
820
+ log_success("/api/chat/save", memory_id=result["memory_id"], messages=len(request.messages))
821
+ return ConversationSaveResponse(**result)
822
+ except HTTPException:
823
+ raise
824
+ except Exception as e:
825
+ log_error("/api/chat/save", e)
826
+ raise
827
+
828
+
829
+ # --- Image Generation Endpoints ---
830
+
831
+
832
+ @app.get("/api/image/status")
833
+ async def get_image_status():
834
+ """Check if image generation is available."""
835
+ return {
836
+ "available": image_service.is_available(),
837
+ "message": "Image generation ready" if image_service.is_available()
838
+ else "Configure GEMINI_API_KEY and install google-genai for image generation",
839
+ }
840
+
841
+
842
+ @app.get("/api/image/presets")
843
+ async def get_image_presets():
844
+ """Get available image preset templates."""
845
+ return {"presets": image_service.get_presets()}
846
+
847
+
848
+ @app.post("/api/image/generate-batch", response_model=BatchImageGenerationResponse)
849
+ async def generate_images_batch(
850
+ request: BatchImageGenerationRequest,
851
+ db_path: str = Query(..., alias="project", description="Path to the database file"),
852
+ ):
853
+ """Generate multiple images with different presets/prompts."""
854
+ # Validate image count
855
+ if len(request.images) not in [1, 2, 4]:
856
+ return BatchImageGenerationResponse(
857
+ success=False,
858
+ errors=["Must request 1, 2, or 4 images"]
859
+ )
860
+
861
+ # Build memory context
862
+ memory_context = ""
863
+ if request.memory_ids:
864
+ memory_context = image_service.build_memory_context(db_path, request.memory_ids)
865
+
866
+ # Build chat context
867
+ chat_context = image_service.build_chat_context(request.chat_messages)
868
+
869
+ # Convert request models to internal format
870
+ image_requests = [
871
+ SingleImageRequest(
872
+ preset=ImagePreset(img.preset),
873
+ custom_prompt=img.custom_prompt,
874
+ aspect_ratio=img.aspect_ratio,
875
+ image_size=img.image_size
876
+ )
877
+ for img in request.images
878
+ ]
879
+
880
+ result = await image_service.generate_batch(
881
+ requests=image_requests,
882
+ memory_context=memory_context,
883
+ chat_context=chat_context,
884
+ use_search_grounding=request.use_search_grounding
885
+ )
886
+
887
+ return BatchImageGenerationResponse(
888
+ success=result.success,
889
+ images=[
890
+ SingleImageResponseModel(
891
+ success=img.success,
892
+ image_data=img.image_data,
893
+ text_response=img.text_response,
894
+ thought_signature=img.thought_signature,
895
+ image_id=img.image_id,
896
+ error=img.error,
897
+ index=img.index
898
+ )
899
+ for img in result.images
900
+ ],
901
+ errors=result.errors
902
+ )
903
+
904
+
905
+ @app.post("/api/image/refine", response_model=SingleImageResponseModel)
906
+ async def refine_image(request: ImageRefineRequest):
907
+ """Refine an existing generated image with a new prompt."""
908
+ result = await image_service.refine_image(
909
+ image_id=request.image_id,
910
+ refinement_prompt=request.refinement_prompt,
911
+ aspect_ratio=request.aspect_ratio,
912
+ image_size=request.image_size
913
+ )
914
+
915
+ return SingleImageResponseModel(
916
+ success=result.success,
917
+ image_data=result.image_data,
918
+ text_response=result.text_response,
919
+ thought_signature=result.thought_signature,
920
+ image_id=result.image_id,
921
+ error=result.error
922
+ )
923
+
924
+
925
+ @app.post("/api/image/clear-conversation")
926
+ async def clear_image_conversation(image_id: Optional[str] = None):
927
+ """Clear image conversation history. If image_id provided, clear only that image."""
928
+ image_service.clear_conversation(image_id)
929
+ return {"status": "cleared", "image_id": image_id}
930
+
931
+
932
+ # --- WebSocket Endpoint ---
933
+
934
+
935
+ @app.websocket("/ws")
936
+ async def websocket_endpoint(websocket: WebSocket):
937
+ """WebSocket endpoint for real-time updates."""
938
+ client_id = await manager.connect(websocket)
939
+ try:
940
+ # Send initial connection confirmation
941
+ await manager.send_to_client(client_id, "connected", {"client_id": client_id})
942
+
943
+ # Keep connection alive and handle messages
944
+ while True:
945
+ data = await websocket.receive_text()
946
+ # Echo back for ping/pong
947
+ if data == "ping":
948
+ await manager.send_to_client(client_id, "pong", {})
949
+ except WebSocketDisconnect:
950
+ await manager.disconnect(client_id)
951
+ except Exception as e:
952
+ print(f"[WS] Error: {e}")
953
+ await manager.disconnect(client_id)
954
+
955
+
956
+ # --- Export Endpoints ---
957
+
958
+
959
+ @app.get("/api/export")
960
+ async def export_memories(
961
+ project: str = Query(..., description="Path to the database file"),
962
+ format: str = Query("json", description="Export format: json, markdown, csv"),
963
+ memory_ids: Optional[str] = Query(None, description="Comma-separated memory IDs to export, or all if empty"),
964
+ include_relationships: bool = Query(True, description="Include memory relationships"),
965
+ ):
966
+ """Export memories to specified format."""
967
+ from fastapi.responses import Response
968
+ import csv
969
+ import io
970
+
971
+ if not Path(project).exists():
972
+ raise HTTPException(status_code=404, detail="Database not found")
973
+
974
+ # Get memories
975
+ if memory_ids:
976
+ ids = memory_ids.split(",")
977
+ memories = [get_memory_by_id(project, mid) for mid in ids if mid.strip()]
978
+ memories = [m for m in memories if m is not None]
979
+ else:
980
+ from models import FilterParams
981
+ filters = FilterParams(limit=1000, offset=0, sort_by="created_at", sort_order="desc")
982
+ memories = get_memories(project, filters)
983
+
984
+ # Get relationships if requested
985
+ relationships = []
986
+ if include_relationships:
987
+ relationships = get_relationships(project)
988
+
989
+ if format == "json":
990
+ export_data = {
991
+ "exported_at": datetime.now().isoformat(),
992
+ "project": project,
993
+ "memory_count": len(memories),
994
+ "memories": [m.model_dump(by_alias=True) for m in memories],
995
+ "relationships": relationships if include_relationships else [],
996
+ }
997
+ return Response(
998
+ content=json.dumps(export_data, indent=2, default=str),
999
+ media_type="application/json",
1000
+ headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"}
1001
+ )
1002
+
1003
+ elif format == "markdown":
1004
+ md_lines = [
1005
+ f"# Omni-Cortex Memory Export",
1006
+ f"",
1007
+ f"**Exported:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
1008
+ f"**Total Memories:** {len(memories)}",
1009
+ f"",
1010
+ "---",
1011
+ "",
1012
+ ]
1013
+ for m in memories:
1014
+ md_lines.extend([
1015
+ f"## {m.type.title()}: {m.content[:50]}{'...' if len(m.content) > 50 else ''}",
1016
+ f"",
1017
+ f"**ID:** `{m.id}`",
1018
+ f"**Type:** {m.type}",
1019
+ f"**Status:** {m.status}",
1020
+ f"**Importance:** {m.importance_score}",
1021
+ f"**Created:** {m.created_at}",
1022
+ f"**Tags:** {', '.join(m.tags) if m.tags else 'None'}",
1023
+ f"",
1024
+ "### Content",
1025
+ f"",
1026
+ m.content,
1027
+ f"",
1028
+ "### Context",
1029
+ f"",
1030
+ m.context or "_No context_",
1031
+ f"",
1032
+ "---",
1033
+ "",
1034
+ ])
1035
+ return Response(
1036
+ content="\n".join(md_lines),
1037
+ media_type="text/markdown",
1038
+ headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md"}
1039
+ )
1040
+
1041
+ elif format == "csv":
1042
+ output = io.StringIO()
1043
+ writer = csv.writer(output)
1044
+ writer.writerow(["id", "type", "status", "importance", "content", "context", "tags", "created_at", "last_accessed"])
1045
+ for m in memories:
1046
+ writer.writerow([
1047
+ m.id,
1048
+ m.type,
1049
+ m.status,
1050
+ m.importance_score,
1051
+ m.content,
1052
+ m.context or "",
1053
+ ",".join(m.tags) if m.tags else "",
1054
+ m.created_at,
1055
+ m.last_accessed or "",
1056
+ ])
1057
+ return Response(
1058
+ content=output.getvalue(),
1059
+ media_type="text/csv",
1060
+ headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"}
1061
+ )
1062
+
1063
+ else:
1064
+ raise HTTPException(status_code=400, detail=f"Unsupported format: {format}. Use json, markdown, or csv.")
1065
+
1066
+
1067
+ # --- Health Check ---
1068
+
1069
+
1070
+ @app.get("/health")
1071
+ async def health_check():
1072
+ """Health check endpoint."""
1073
+ return {
1074
+ "status": "healthy",
1075
+ "websocket_connections": manager.connection_count,
1076
+ }
1077
+
1078
+
1079
+ # --- Static File Serving (SPA) ---
1080
+ # These routes must come AFTER all API routes
1081
+
1082
+
1083
+ @app.get("/")
1084
+ async def serve_root():
1085
+ """Serve the frontend index.html."""
1086
+ index_file = DIST_DIR / "index.html"
1087
+ if index_file.exists():
1088
+ return FileResponse(str(index_file))
1089
+ return {"message": "Omni-Cortex Dashboard API", "docs": "/docs"}
1090
+
1091
+
1092
+ @app.get("/{path:path}")
1093
+ async def serve_spa(path: str):
1094
+ """Catch-all route to serve SPA for client-side routing with path traversal protection."""
1095
+ # Skip API routes and known paths
1096
+ if path.startswith(("api/", "ws", "health", "docs", "openapi", "redoc")):
1097
+ raise HTTPException(status_code=404, detail="Not found")
1098
+
1099
+ # Check if it's a static file (with path traversal protection)
1100
+ safe_path = PathValidator.is_safe_static_path(DIST_DIR, path)
1101
+ if safe_path:
1102
+ return FileResponse(str(safe_path))
1103
+
1104
+ # Otherwise serve index.html for SPA routing
1105
+ index_file = DIST_DIR / "index.html"
1106
+ if index_file.exists():
1107
+ return FileResponse(str(index_file))
1108
+
1109
+ raise HTTPException(status_code=404, detail="Not found")
1110
+
1111
+
1112
+ def run():
1113
+ """Run the dashboard server."""
1114
+ uvicorn.run(
1115
+ "main:app",
1116
+ host="0.0.0.0",
1117
+ port=8765,
1118
+ reload=True,
1119
+ reload_dirs=[str(Path(__file__).parent)],
1120
+ )
1121
+
1122
+
1123
+ if __name__ == "__main__":
1124
+ run()