omni-cortex 1.17.2__py3-none-any.whl → 1.17.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex/_bundled/dashboard/backend/.env.example +12 -0
- omni_cortex/_bundled/dashboard/backend/backfill_summaries.py +280 -0
- omni_cortex/_bundled/dashboard/backend/chat_service.py +631 -0
- omni_cortex/_bundled/dashboard/backend/database.py +1773 -0
- omni_cortex/_bundled/dashboard/backend/image_service.py +552 -0
- omni_cortex/_bundled/dashboard/backend/logging_config.py +122 -0
- omni_cortex/_bundled/dashboard/backend/main.py +1888 -0
- omni_cortex/_bundled/dashboard/backend/models.py +472 -0
- omni_cortex/_bundled/dashboard/backend/project_config.py +170 -0
- omni_cortex/_bundled/dashboard/backend/project_scanner.py +164 -0
- omni_cortex/_bundled/dashboard/backend/prompt_security.py +111 -0
- omni_cortex/_bundled/dashboard/backend/pyproject.toml +23 -0
- omni_cortex/_bundled/dashboard/backend/security.py +104 -0
- omni_cortex/_bundled/dashboard/backend/uv.lock +1110 -0
- omni_cortex/_bundled/dashboard/backend/websocket_manager.py +104 -0
- omni_cortex/_bundled/hooks/post_tool_use.py +497 -0
- omni_cortex/_bundled/hooks/pre_tool_use.py +277 -0
- omni_cortex/_bundled/hooks/session_utils.py +186 -0
- omni_cortex/_bundled/hooks/stop.py +219 -0
- omni_cortex/_bundled/hooks/subagent_stop.py +120 -0
- omni_cortex/_bundled/hooks/user_prompt.py +220 -0
- omni_cortex/dashboard.py +10 -4
- omni_cortex/setup.py +14 -8
- {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.3.dist-info}/METADATA +1 -1
- {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.3.dist-info}/RECORD +49 -28
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/main.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.17.2.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.3.dist-info}/WHEEL +0 -0
- {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.3.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1888 @@
|
|
|
1
|
+
"""FastAPI backend for Omni-Cortex Web Dashboard."""
|
|
2
|
+
# Trigger reload for relationship graph column fix
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import traceback
|
|
8
|
+
from contextlib import asynccontextmanager
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
import uvicorn
|
|
14
|
+
from fastapi import FastAPI, HTTPException, Query, WebSocket, WebSocketDisconnect, Request, Depends
|
|
15
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
16
|
+
from fastapi.staticfiles import StaticFiles
|
|
17
|
+
from fastapi.responses import FileResponse, Response
|
|
18
|
+
from starlette.middleware.base import BaseHTTPMiddleware
|
|
19
|
+
from watchdog.events import FileSystemEventHandler
|
|
20
|
+
from watchdog.observers import Observer
|
|
21
|
+
|
|
22
|
+
# Rate limiting imports (optional - graceful degradation if not installed)
|
|
23
|
+
try:
|
|
24
|
+
from slowapi import Limiter, _rate_limit_exceeded_handler
|
|
25
|
+
from slowapi.util import get_remote_address
|
|
26
|
+
from slowapi.errors import RateLimitExceeded
|
|
27
|
+
RATE_LIMITING_AVAILABLE = True
|
|
28
|
+
except ImportError:
|
|
29
|
+
RATE_LIMITING_AVAILABLE = False
|
|
30
|
+
Limiter = None
|
|
31
|
+
|
|
32
|
+
from database import (
|
|
33
|
+
bulk_update_memory_status,
|
|
34
|
+
create_memory,
|
|
35
|
+
delete_memory,
|
|
36
|
+
delete_user_message,
|
|
37
|
+
delete_user_messages_bulk,
|
|
38
|
+
ensure_migrations,
|
|
39
|
+
get_activities,
|
|
40
|
+
get_activity_detail,
|
|
41
|
+
get_activity_heatmap,
|
|
42
|
+
get_agents,
|
|
43
|
+
get_agent_by_id,
|
|
44
|
+
get_agent_files_touched,
|
|
45
|
+
get_agent_parent,
|
|
46
|
+
get_agent_tool_breakdown,
|
|
47
|
+
get_all_tags,
|
|
48
|
+
get_command_usage,
|
|
49
|
+
get_mcp_usage,
|
|
50
|
+
get_memories,
|
|
51
|
+
get_memories_needing_review,
|
|
52
|
+
get_memory_by_id,
|
|
53
|
+
get_memory_growth,
|
|
54
|
+
get_memory_stats,
|
|
55
|
+
get_recent_sessions,
|
|
56
|
+
get_relationship_graph,
|
|
57
|
+
get_relationships,
|
|
58
|
+
get_sessions,
|
|
59
|
+
get_skill_usage,
|
|
60
|
+
get_style_profile,
|
|
61
|
+
get_style_samples,
|
|
62
|
+
get_style_samples_by_category,
|
|
63
|
+
compute_style_profile_from_messages,
|
|
64
|
+
get_timeline,
|
|
65
|
+
get_tool_usage,
|
|
66
|
+
get_type_distribution,
|
|
67
|
+
get_user_message_count,
|
|
68
|
+
get_user_messages,
|
|
69
|
+
search_memories,
|
|
70
|
+
update_memory,
|
|
71
|
+
)
|
|
72
|
+
from logging_config import log_success, log_error
|
|
73
|
+
from models import (
|
|
74
|
+
AggregateChatRequest,
|
|
75
|
+
AggregateMemoryRequest,
|
|
76
|
+
AggregateStatsRequest,
|
|
77
|
+
AggregateStatsResponse,
|
|
78
|
+
BatchImageGenerationRequest,
|
|
79
|
+
BatchImageGenerationResponse,
|
|
80
|
+
BulkDeleteRequest,
|
|
81
|
+
ChatRequest,
|
|
82
|
+
ChatResponse,
|
|
83
|
+
ComposeRequest,
|
|
84
|
+
ComposeResponse,
|
|
85
|
+
ConversationSaveRequest,
|
|
86
|
+
ConversationSaveResponse,
|
|
87
|
+
FilterParams,
|
|
88
|
+
ImageRefineRequest,
|
|
89
|
+
MemoryCreateRequest,
|
|
90
|
+
MemoryUpdate,
|
|
91
|
+
ProjectInfo,
|
|
92
|
+
ProjectRegistration,
|
|
93
|
+
SingleImageRequestModel,
|
|
94
|
+
SingleImageResponseModel,
|
|
95
|
+
StyleProfile,
|
|
96
|
+
StyleSample,
|
|
97
|
+
UserMessage,
|
|
98
|
+
UserMessagesResponse,
|
|
99
|
+
)
|
|
100
|
+
from project_config import (
|
|
101
|
+
load_config,
|
|
102
|
+
add_registered_project,
|
|
103
|
+
remove_registered_project,
|
|
104
|
+
toggle_favorite,
|
|
105
|
+
add_scan_directory,
|
|
106
|
+
remove_scan_directory,
|
|
107
|
+
)
|
|
108
|
+
from project_scanner import scan_projects
|
|
109
|
+
from websocket_manager import manager
|
|
110
|
+
import chat_service
|
|
111
|
+
from image_service import image_service, ImagePreset, SingleImageRequest
|
|
112
|
+
from security import PathValidator, get_cors_config, IS_PRODUCTION
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
|
|
116
|
+
"""Add security headers to all responses."""
|
|
117
|
+
|
|
118
|
+
async def dispatch(self, request: Request, call_next) -> Response:
|
|
119
|
+
response = await call_next(request)
|
|
120
|
+
|
|
121
|
+
# Prevent MIME type sniffing
|
|
122
|
+
response.headers["X-Content-Type-Options"] = "nosniff"
|
|
123
|
+
|
|
124
|
+
# Prevent clickjacking
|
|
125
|
+
response.headers["X-Frame-Options"] = "DENY"
|
|
126
|
+
|
|
127
|
+
# XSS protection (legacy browsers)
|
|
128
|
+
response.headers["X-XSS-Protection"] = "1; mode=block"
|
|
129
|
+
|
|
130
|
+
# Content Security Policy
|
|
131
|
+
response.headers["Content-Security-Policy"] = (
|
|
132
|
+
"default-src 'self'; "
|
|
133
|
+
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; " # Vue needs these
|
|
134
|
+
"style-src 'self' 'unsafe-inline'; " # Tailwind needs inline
|
|
135
|
+
"img-src 'self' data: blob: https:; " # Allow AI-generated images
|
|
136
|
+
"connect-src 'self' ws: wss: https://generativelanguage.googleapis.com; "
|
|
137
|
+
"font-src 'self'; "
|
|
138
|
+
"frame-ancestors 'none';"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# HSTS (only in production with HTTPS)
|
|
142
|
+
if IS_PRODUCTION and os.getenv("SSL_CERTFILE"):
|
|
143
|
+
response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
|
144
|
+
|
|
145
|
+
return response
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def validate_project_path(project: str = Query(..., description="Path to the database file")) -> Path:
|
|
149
|
+
"""Validate project database path - dependency for endpoints."""
|
|
150
|
+
try:
|
|
151
|
+
return PathValidator.validate_project_path(project)
|
|
152
|
+
except ValueError as e:
|
|
153
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class DatabaseChangeHandler(FileSystemEventHandler):
|
|
157
|
+
"""Handle database file changes for real-time updates."""
|
|
158
|
+
|
|
159
|
+
def __init__(self, ws_manager, loop):
|
|
160
|
+
self.ws_manager = ws_manager
|
|
161
|
+
self.loop = loop
|
|
162
|
+
self._debounce_task: Optional[asyncio.Task] = None
|
|
163
|
+
self._last_path: Optional[str] = None
|
|
164
|
+
self._last_activity_count: dict[str, int] = {}
|
|
165
|
+
|
|
166
|
+
def on_modified(self, event):
|
|
167
|
+
if event.src_path.endswith("cortex.db") or event.src_path.endswith("global.db"):
|
|
168
|
+
# Debounce rapid changes
|
|
169
|
+
self._last_path = event.src_path
|
|
170
|
+
if self._debounce_task is None or self._debounce_task.done():
|
|
171
|
+
self._debounce_task = asyncio.run_coroutine_threadsafe(
|
|
172
|
+
self._debounced_notify(), self.loop
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
async def _debounced_notify(self):
|
|
176
|
+
await asyncio.sleep(0.3) # Reduced from 0.5s for faster updates
|
|
177
|
+
if self._last_path:
|
|
178
|
+
db_path = self._last_path
|
|
179
|
+
|
|
180
|
+
# Broadcast general database change
|
|
181
|
+
await self.ws_manager.broadcast("database_changed", {"path": db_path})
|
|
182
|
+
|
|
183
|
+
# Fetch and broadcast latest activities (IndyDevDan pattern)
|
|
184
|
+
try:
|
|
185
|
+
# Get recent activities
|
|
186
|
+
recent = get_activities(db_path, limit=5, offset=0)
|
|
187
|
+
if recent:
|
|
188
|
+
# Broadcast each new activity
|
|
189
|
+
for activity in recent:
|
|
190
|
+
await self.ws_manager.broadcast_activity_logged(
|
|
191
|
+
db_path,
|
|
192
|
+
activity if isinstance(activity, dict) else activity.model_dump()
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# Also broadcast session update
|
|
196
|
+
sessions = get_recent_sessions(db_path, limit=1)
|
|
197
|
+
if sessions:
|
|
198
|
+
session = sessions[0]
|
|
199
|
+
await self.ws_manager.broadcast_session_updated(
|
|
200
|
+
db_path,
|
|
201
|
+
session if isinstance(session, dict) else dict(session)
|
|
202
|
+
)
|
|
203
|
+
except Exception as e:
|
|
204
|
+
print(f"[WS] Error broadcasting activities: {e}")
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
# File watcher
|
|
208
|
+
observer: Optional[Observer] = None
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@asynccontextmanager
|
|
212
|
+
async def lifespan(app: FastAPI):
|
|
213
|
+
"""Manage file watcher lifecycle."""
|
|
214
|
+
global observer
|
|
215
|
+
loop = asyncio.get_event_loop()
|
|
216
|
+
handler = DatabaseChangeHandler(manager, loop)
|
|
217
|
+
observer = Observer()
|
|
218
|
+
|
|
219
|
+
# Watch common project directories
|
|
220
|
+
watch_paths = [
|
|
221
|
+
Path.home() / ".omni-cortex",
|
|
222
|
+
Path("D:/Projects"),
|
|
223
|
+
]
|
|
224
|
+
|
|
225
|
+
for watch_path in watch_paths:
|
|
226
|
+
if watch_path.exists():
|
|
227
|
+
observer.schedule(handler, str(watch_path), recursive=True)
|
|
228
|
+
print(f"[Watcher] Monitoring: {watch_path}")
|
|
229
|
+
|
|
230
|
+
observer.start()
|
|
231
|
+
print("[Server] File watcher started")
|
|
232
|
+
|
|
233
|
+
yield
|
|
234
|
+
|
|
235
|
+
observer.stop()
|
|
236
|
+
observer.join()
|
|
237
|
+
print("[Server] File watcher stopped")
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
# FastAPI app
|
|
241
|
+
app = FastAPI(
|
|
242
|
+
title="Omni-Cortex Dashboard",
|
|
243
|
+
description="Web dashboard for viewing and managing Omni-Cortex memories",
|
|
244
|
+
version="0.1.0",
|
|
245
|
+
lifespan=lifespan,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
# Add security headers middleware (MUST come before CORS)
|
|
249
|
+
app.add_middleware(SecurityHeadersMiddleware)
|
|
250
|
+
|
|
251
|
+
# Rate limiting (if available)
|
|
252
|
+
if RATE_LIMITING_AVAILABLE:
|
|
253
|
+
limiter = Limiter(key_func=get_remote_address)
|
|
254
|
+
app.state.limiter = limiter
|
|
255
|
+
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
|
256
|
+
else:
|
|
257
|
+
limiter = None
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def rate_limit(limit_string: str):
|
|
261
|
+
"""Decorator for conditional rate limiting.
|
|
262
|
+
|
|
263
|
+
Returns the actual limiter decorator if available, otherwise a no-op.
|
|
264
|
+
Usage: @rate_limit("10/minute")
|
|
265
|
+
"""
|
|
266
|
+
if limiter is not None:
|
|
267
|
+
return limiter.limit(limit_string)
|
|
268
|
+
# No-op decorator when rate limiting is not available
|
|
269
|
+
def noop_decorator(func):
|
|
270
|
+
return func
|
|
271
|
+
return noop_decorator
|
|
272
|
+
|
|
273
|
+
# CORS configuration (environment-aware)
|
|
274
|
+
cors_config = get_cors_config()
|
|
275
|
+
app.add_middleware(
|
|
276
|
+
CORSMiddleware,
|
|
277
|
+
allow_origins=cors_config["allow_origins"],
|
|
278
|
+
allow_credentials=True,
|
|
279
|
+
allow_methods=cors_config["allow_methods"],
|
|
280
|
+
allow_headers=cors_config["allow_headers"],
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Static files for production build
|
|
284
|
+
DASHBOARD_DIR = Path(__file__).parent.parent
|
|
285
|
+
DIST_DIR = DASHBOARD_DIR / "frontend" / "dist"
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def setup_static_files():
|
|
289
|
+
"""Mount static files if dist directory exists (production build)."""
|
|
290
|
+
if DIST_DIR.exists():
|
|
291
|
+
# Mount assets directory
|
|
292
|
+
assets_dir = DIST_DIR / "assets"
|
|
293
|
+
if assets_dir.exists():
|
|
294
|
+
app.mount("/assets", StaticFiles(directory=str(assets_dir)), name="assets")
|
|
295
|
+
print(f"[Static] Serving assets from: {assets_dir}")
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
# Call setup at module load
|
|
299
|
+
setup_static_files()
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
# --- REST Endpoints ---
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
@app.get("/api/projects", response_model=list[ProjectInfo])
|
|
306
|
+
async def list_projects():
|
|
307
|
+
"""List all discovered omni-cortex project databases."""
|
|
308
|
+
return scan_projects()
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
# --- Project Management Endpoints ---
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
@app.get("/api/projects/config")
|
|
315
|
+
async def get_project_config():
|
|
316
|
+
"""Get project configuration (scan dirs, counts)."""
|
|
317
|
+
config = load_config()
|
|
318
|
+
return {
|
|
319
|
+
"scan_directories": config.scan_directories,
|
|
320
|
+
"registered_count": len(config.registered_projects),
|
|
321
|
+
"favorites_count": len(config.favorites),
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
@app.post("/api/projects/register")
|
|
326
|
+
async def register_project(body: ProjectRegistration):
|
|
327
|
+
"""Manually register a project by path."""
|
|
328
|
+
success = add_registered_project(body.path, body.display_name)
|
|
329
|
+
if not success:
|
|
330
|
+
raise HTTPException(400, "Invalid path or already registered")
|
|
331
|
+
return {"success": True}
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
@app.delete("/api/projects/register")
|
|
335
|
+
async def unregister_project(path: str = Query(..., description="Project path to unregister")):
|
|
336
|
+
"""Remove a registered project."""
|
|
337
|
+
success = remove_registered_project(path)
|
|
338
|
+
if not success:
|
|
339
|
+
raise HTTPException(404, "Project not found")
|
|
340
|
+
return {"success": True}
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
@app.post("/api/projects/favorite")
|
|
344
|
+
async def toggle_project_favorite(path: str = Query(..., description="Project path to toggle favorite")):
|
|
345
|
+
"""Toggle favorite status for a project."""
|
|
346
|
+
is_favorite = toggle_favorite(path)
|
|
347
|
+
return {"is_favorite": is_favorite}
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
@app.post("/api/projects/scan-directories")
|
|
351
|
+
async def add_scan_dir(directory: str = Query(..., description="Directory path to add")):
|
|
352
|
+
"""Add a directory to auto-scan list."""
|
|
353
|
+
success = add_scan_directory(directory)
|
|
354
|
+
if not success:
|
|
355
|
+
raise HTTPException(400, "Invalid directory or already added")
|
|
356
|
+
return {"success": True}
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
@app.delete("/api/projects/scan-directories")
|
|
360
|
+
async def remove_scan_dir(directory: str = Query(..., description="Directory path to remove")):
|
|
361
|
+
"""Remove a directory from auto-scan list."""
|
|
362
|
+
success = remove_scan_directory(directory)
|
|
363
|
+
if not success:
|
|
364
|
+
raise HTTPException(404, "Directory not found")
|
|
365
|
+
return {"success": True}
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
@app.post("/api/projects/refresh")
|
|
369
|
+
async def refresh_projects():
|
|
370
|
+
"""Force rescan of all project directories."""
|
|
371
|
+
projects = scan_projects()
|
|
372
|
+
return {"count": len(projects)}
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
# --- Aggregate Multi-Project Endpoints ---
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
@app.post("/api/aggregate/memories")
|
|
379
|
+
@rate_limit("50/minute")
|
|
380
|
+
async def get_aggregate_memories(request: AggregateMemoryRequest):
|
|
381
|
+
"""Get memories from multiple projects with project attribution."""
|
|
382
|
+
try:
|
|
383
|
+
all_memories = []
|
|
384
|
+
filters = request.filters or FilterParams()
|
|
385
|
+
|
|
386
|
+
for project_path in request.projects:
|
|
387
|
+
if not Path(project_path).exists():
|
|
388
|
+
continue
|
|
389
|
+
|
|
390
|
+
try:
|
|
391
|
+
memories = get_memories(project_path, filters)
|
|
392
|
+
# Add project attribution to each memory
|
|
393
|
+
for m in memories:
|
|
394
|
+
m_dict = m.model_dump()
|
|
395
|
+
m_dict['source_project'] = project_path
|
|
396
|
+
# Extract project name from path
|
|
397
|
+
project_dir = Path(project_path).parent
|
|
398
|
+
m_dict['source_project_name'] = project_dir.name
|
|
399
|
+
all_memories.append(m_dict)
|
|
400
|
+
except Exception as e:
|
|
401
|
+
log_error(f"/api/aggregate/memories (project: {project_path})", e)
|
|
402
|
+
continue
|
|
403
|
+
|
|
404
|
+
# Sort by last_accessed or created_at (convert to str to handle mixed tz-aware/naive)
|
|
405
|
+
all_memories.sort(
|
|
406
|
+
key=lambda x: str(x.get('last_accessed') or x.get('created_at') or ''),
|
|
407
|
+
reverse=True
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
# Apply pagination
|
|
411
|
+
start = filters.offset
|
|
412
|
+
end = start + filters.limit
|
|
413
|
+
return all_memories[start:end]
|
|
414
|
+
except Exception as e:
|
|
415
|
+
log_error("/api/aggregate/memories", e)
|
|
416
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
@app.post("/api/aggregate/stats", response_model=AggregateStatsResponse)
|
|
420
|
+
@rate_limit("50/minute")
|
|
421
|
+
async def get_aggregate_stats(request: AggregateStatsRequest):
|
|
422
|
+
"""Get combined statistics across multiple projects."""
|
|
423
|
+
try:
|
|
424
|
+
total_count = 0
|
|
425
|
+
total_access = 0
|
|
426
|
+
importance_sum = 0
|
|
427
|
+
by_type = {}
|
|
428
|
+
by_status = {}
|
|
429
|
+
|
|
430
|
+
for project_path in request.projects:
|
|
431
|
+
if not Path(project_path).exists():
|
|
432
|
+
continue
|
|
433
|
+
|
|
434
|
+
try:
|
|
435
|
+
stats = get_memory_stats(project_path)
|
|
436
|
+
total_count += stats.total_count
|
|
437
|
+
total_access += stats.total_access_count
|
|
438
|
+
|
|
439
|
+
# Weighted average for importance
|
|
440
|
+
project_count = stats.total_count
|
|
441
|
+
project_avg_importance = stats.avg_importance
|
|
442
|
+
importance_sum += project_avg_importance * project_count
|
|
443
|
+
|
|
444
|
+
# Aggregate by_type
|
|
445
|
+
for type_name, count in stats.by_type.items():
|
|
446
|
+
by_type[type_name] = by_type.get(type_name, 0) + count
|
|
447
|
+
|
|
448
|
+
# Aggregate by_status
|
|
449
|
+
for status, count in stats.by_status.items():
|
|
450
|
+
by_status[status] = by_status.get(status, 0) + count
|
|
451
|
+
except Exception as e:
|
|
452
|
+
log_error(f"/api/aggregate/stats (project: {project_path})", e)
|
|
453
|
+
continue
|
|
454
|
+
|
|
455
|
+
return AggregateStatsResponse(
|
|
456
|
+
total_count=total_count,
|
|
457
|
+
total_access_count=total_access,
|
|
458
|
+
avg_importance=round(importance_sum / total_count, 1) if total_count > 0 else 0,
|
|
459
|
+
by_type=by_type,
|
|
460
|
+
by_status=by_status,
|
|
461
|
+
project_count=len(request.projects),
|
|
462
|
+
)
|
|
463
|
+
except Exception as e:
|
|
464
|
+
log_error("/api/aggregate/stats", e)
|
|
465
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
@app.post("/api/aggregate/tags")
|
|
469
|
+
@rate_limit("50/minute")
|
|
470
|
+
async def get_aggregate_tags(request: AggregateStatsRequest):
|
|
471
|
+
"""Get combined tags across multiple projects."""
|
|
472
|
+
try:
|
|
473
|
+
tag_counts = {}
|
|
474
|
+
|
|
475
|
+
for project_path in request.projects:
|
|
476
|
+
if not Path(project_path).exists():
|
|
477
|
+
continue
|
|
478
|
+
|
|
479
|
+
try:
|
|
480
|
+
tags = get_all_tags(project_path)
|
|
481
|
+
for tag in tags:
|
|
482
|
+
tag_name = tag['name']
|
|
483
|
+
tag_counts[tag_name] = tag_counts.get(tag_name, 0) + tag['count']
|
|
484
|
+
except Exception as e:
|
|
485
|
+
log_error(f"/api/aggregate/tags (project: {project_path})", e)
|
|
486
|
+
continue
|
|
487
|
+
|
|
488
|
+
# Return sorted by count
|
|
489
|
+
return sorted(
|
|
490
|
+
[{'name': k, 'count': v} for k, v in tag_counts.items()],
|
|
491
|
+
key=lambda x: x['count'],
|
|
492
|
+
reverse=True
|
|
493
|
+
)
|
|
494
|
+
except Exception as e:
|
|
495
|
+
log_error("/api/aggregate/tags", e)
|
|
496
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
@app.post("/api/aggregate/chat", response_model=ChatResponse)
|
|
500
|
+
@rate_limit("10/minute")
|
|
501
|
+
async def chat_across_projects(request: AggregateChatRequest):
|
|
502
|
+
"""Ask AI about memories across multiple projects."""
|
|
503
|
+
try:
|
|
504
|
+
if not chat_service.is_available():
|
|
505
|
+
raise HTTPException(
|
|
506
|
+
status_code=503,
|
|
507
|
+
detail="Chat service not available. Set GEMINI_API_KEY environment variable."
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
all_sources = []
|
|
511
|
+
|
|
512
|
+
# Gather relevant memories from each project
|
|
513
|
+
for project_path in request.projects:
|
|
514
|
+
if not Path(project_path).exists():
|
|
515
|
+
continue
|
|
516
|
+
|
|
517
|
+
try:
|
|
518
|
+
memories = search_memories(
|
|
519
|
+
project_path,
|
|
520
|
+
request.question,
|
|
521
|
+
limit=request.max_memories_per_project
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
for m in memories:
|
|
525
|
+
project_dir = Path(project_path).parent
|
|
526
|
+
source = {
|
|
527
|
+
'id': m.id,
|
|
528
|
+
'type': m.memory_type,
|
|
529
|
+
'content_preview': m.content[:200],
|
|
530
|
+
'tags': m.tags,
|
|
531
|
+
'project_path': project_path,
|
|
532
|
+
'project_name': project_dir.name,
|
|
533
|
+
}
|
|
534
|
+
all_sources.append(source)
|
|
535
|
+
except Exception as e:
|
|
536
|
+
log_error(f"/api/aggregate/chat (project: {project_path})", e)
|
|
537
|
+
continue
|
|
538
|
+
|
|
539
|
+
if not all_sources:
|
|
540
|
+
return ChatResponse(
|
|
541
|
+
answer="No relevant memories found across the selected projects.",
|
|
542
|
+
sources=[],
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
# Build context with project attribution
|
|
546
|
+
context = "\n\n".join([
|
|
547
|
+
f"[From: {s['project_name']}] {s['content_preview']}"
|
|
548
|
+
for s in all_sources
|
|
549
|
+
])
|
|
550
|
+
|
|
551
|
+
# Query AI with attributed context
|
|
552
|
+
answer = await chat_service.generate_response(request.question, context)
|
|
553
|
+
|
|
554
|
+
log_success("/api/aggregate/chat", projects=len(request.projects), sources=len(all_sources))
|
|
555
|
+
|
|
556
|
+
return ChatResponse(
|
|
557
|
+
answer=answer,
|
|
558
|
+
sources=[ChatSource(**s) for s in all_sources],
|
|
559
|
+
)
|
|
560
|
+
except HTTPException:
|
|
561
|
+
raise
|
|
562
|
+
except Exception as e:
|
|
563
|
+
log_error("/api/aggregate/chat", e)
|
|
564
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
565
|
+
|
|
566
|
+
|
|
567
|
+
@app.get("/api/memories")
|
|
568
|
+
@rate_limit("100/minute")
|
|
569
|
+
async def list_memories(
|
|
570
|
+
project: str = Query(..., description="Path to the database file"),
|
|
571
|
+
memory_type: Optional[str] = Query(None, alias="type"),
|
|
572
|
+
status: Optional[str] = None,
|
|
573
|
+
tags: Optional[str] = None,
|
|
574
|
+
search: Optional[str] = None,
|
|
575
|
+
min_importance: Optional[int] = None,
|
|
576
|
+
max_importance: Optional[int] = None,
|
|
577
|
+
sort_by: str = "last_accessed",
|
|
578
|
+
sort_order: str = "desc",
|
|
579
|
+
limit: int = 50,
|
|
580
|
+
offset: int = 0,
|
|
581
|
+
):
|
|
582
|
+
"""Get memories with filtering and pagination."""
|
|
583
|
+
try:
|
|
584
|
+
if not Path(project).exists():
|
|
585
|
+
log_error("/api/memories", FileNotFoundError("Database not found"), project=project)
|
|
586
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
587
|
+
|
|
588
|
+
filters = FilterParams(
|
|
589
|
+
memory_type=memory_type,
|
|
590
|
+
status=status,
|
|
591
|
+
tags=tags.split(",") if tags else None,
|
|
592
|
+
search=search,
|
|
593
|
+
min_importance=min_importance,
|
|
594
|
+
max_importance=max_importance,
|
|
595
|
+
sort_by=sort_by,
|
|
596
|
+
sort_order=sort_order,
|
|
597
|
+
limit=limit,
|
|
598
|
+
offset=offset,
|
|
599
|
+
)
|
|
600
|
+
|
|
601
|
+
memories = get_memories(project, filters)
|
|
602
|
+
log_success("/api/memories", count=len(memories), offset=offset, filters=bool(search or memory_type))
|
|
603
|
+
return memories
|
|
604
|
+
except Exception as e:
|
|
605
|
+
log_error("/api/memories", e, project=project)
|
|
606
|
+
raise
|
|
607
|
+
|
|
608
|
+
|
|
609
|
+
@app.post("/api/memories")
|
|
610
|
+
@rate_limit("30/minute")
|
|
611
|
+
async def create_memory_endpoint(
|
|
612
|
+
request: MemoryCreateRequest,
|
|
613
|
+
project: str = Query(..., description="Path to the database file"),
|
|
614
|
+
):
|
|
615
|
+
"""Create a new memory."""
|
|
616
|
+
try:
|
|
617
|
+
if not Path(project).exists():
|
|
618
|
+
log_error("/api/memories POST", FileNotFoundError("Database not found"), project=project)
|
|
619
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
620
|
+
|
|
621
|
+
# Create the memory
|
|
622
|
+
memory_id = create_memory(
|
|
623
|
+
db_path=project,
|
|
624
|
+
content=request.content,
|
|
625
|
+
memory_type=request.memory_type,
|
|
626
|
+
context=request.context,
|
|
627
|
+
tags=request.tags if request.tags else None,
|
|
628
|
+
importance_score=request.importance_score,
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
# Fetch the created memory to return it
|
|
632
|
+
created_memory = get_memory_by_id(project, memory_id)
|
|
633
|
+
|
|
634
|
+
# Broadcast to WebSocket clients
|
|
635
|
+
await manager.broadcast("memory_created", created_memory.model_dump(by_alias=True))
|
|
636
|
+
|
|
637
|
+
log_success("/api/memories POST", memory_id=memory_id, type=request.memory_type)
|
|
638
|
+
return created_memory
|
|
639
|
+
except HTTPException:
|
|
640
|
+
raise
|
|
641
|
+
except Exception as e:
|
|
642
|
+
import traceback
|
|
643
|
+
print(f"[DEBUG] create_memory_endpoint error: {type(e).__name__}: {e}")
|
|
644
|
+
traceback.print_exc()
|
|
645
|
+
log_error("/api/memories POST", e, project=project)
|
|
646
|
+
raise
|
|
647
|
+
|
|
648
|
+
|
|
649
|
+
# NOTE: These routes MUST be defined before /api/memories/{memory_id} to avoid path conflicts
|
|
650
|
+
@app.get("/api/memories/needs-review")
|
|
651
|
+
async def get_memories_needing_review_endpoint(
|
|
652
|
+
project: str = Query(..., description="Path to the database file"),
|
|
653
|
+
days_threshold: int = 30,
|
|
654
|
+
limit: int = 50,
|
|
655
|
+
):
|
|
656
|
+
"""Get memories that may need freshness review."""
|
|
657
|
+
if not Path(project).exists():
|
|
658
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
659
|
+
|
|
660
|
+
return get_memories_needing_review(project, days_threshold, limit)
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
@app.post("/api/memories/bulk-update-status")
|
|
664
|
+
async def bulk_update_status_endpoint(
|
|
665
|
+
project: str = Query(..., description="Path to the database file"),
|
|
666
|
+
memory_ids: list[str] = [],
|
|
667
|
+
status: str = "fresh",
|
|
668
|
+
):
|
|
669
|
+
"""Update status for multiple memories at once."""
|
|
670
|
+
if not Path(project).exists():
|
|
671
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
672
|
+
|
|
673
|
+
valid_statuses = ["fresh", "needs_review", "outdated", "archived"]
|
|
674
|
+
if status not in valid_statuses:
|
|
675
|
+
raise HTTPException(status_code=400, detail=f"Invalid status. Must be one of: {valid_statuses}")
|
|
676
|
+
|
|
677
|
+
count = bulk_update_memory_status(project, memory_ids, status)
|
|
678
|
+
|
|
679
|
+
# Notify connected clients
|
|
680
|
+
await manager.broadcast("memories_bulk_updated", {"count": count, "status": status})
|
|
681
|
+
|
|
682
|
+
return {"updated_count": count, "status": status}
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
@app.get("/api/memories/{memory_id}")
|
|
686
|
+
async def get_memory(
|
|
687
|
+
memory_id: str,
|
|
688
|
+
project: str = Query(..., description="Path to the database file"),
|
|
689
|
+
):
|
|
690
|
+
"""Get a single memory by ID."""
|
|
691
|
+
if not Path(project).exists():
|
|
692
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
693
|
+
|
|
694
|
+
memory = get_memory_by_id(project, memory_id)
|
|
695
|
+
if not memory:
|
|
696
|
+
raise HTTPException(status_code=404, detail="Memory not found")
|
|
697
|
+
return memory
|
|
698
|
+
|
|
699
|
+
|
|
700
|
+
@app.put("/api/memories/{memory_id}")
|
|
701
|
+
async def update_memory_endpoint(
|
|
702
|
+
memory_id: str,
|
|
703
|
+
updates: MemoryUpdate,
|
|
704
|
+
project: str = Query(..., description="Path to the database file"),
|
|
705
|
+
):
|
|
706
|
+
"""Update a memory."""
|
|
707
|
+
try:
|
|
708
|
+
if not Path(project).exists():
|
|
709
|
+
log_error("/api/memories/update", FileNotFoundError("Database not found"), memory_id=memory_id)
|
|
710
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
711
|
+
|
|
712
|
+
updated = update_memory(project, memory_id, updates)
|
|
713
|
+
if not updated:
|
|
714
|
+
log_error("/api/memories/update", ValueError("Memory not found"), memory_id=memory_id)
|
|
715
|
+
raise HTTPException(status_code=404, detail="Memory not found")
|
|
716
|
+
|
|
717
|
+
# Notify connected clients
|
|
718
|
+
await manager.broadcast("memory_updated", updated.model_dump(by_alias=True))
|
|
719
|
+
log_success("/api/memories/update", memory_id=memory_id, fields_updated=len(updates.model_dump(exclude_unset=True)))
|
|
720
|
+
return updated
|
|
721
|
+
except HTTPException:
|
|
722
|
+
raise
|
|
723
|
+
except Exception as e:
|
|
724
|
+
log_error("/api/memories/update", e, memory_id=memory_id)
|
|
725
|
+
raise
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
@app.delete("/api/memories/{memory_id}")
|
|
729
|
+
async def delete_memory_endpoint(
|
|
730
|
+
memory_id: str,
|
|
731
|
+
project: str = Query(..., description="Path to the database file"),
|
|
732
|
+
):
|
|
733
|
+
"""Delete a memory."""
|
|
734
|
+
try:
|
|
735
|
+
if not Path(project).exists():
|
|
736
|
+
log_error("/api/memories/delete", FileNotFoundError("Database not found"), memory_id=memory_id)
|
|
737
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
738
|
+
|
|
739
|
+
deleted = delete_memory(project, memory_id)
|
|
740
|
+
if not deleted:
|
|
741
|
+
log_error("/api/memories/delete", ValueError("Memory not found"), memory_id=memory_id)
|
|
742
|
+
raise HTTPException(status_code=404, detail="Memory not found")
|
|
743
|
+
|
|
744
|
+
# Notify connected clients
|
|
745
|
+
await manager.broadcast("memory_deleted", {"id": memory_id})
|
|
746
|
+
log_success("/api/memories/delete", memory_id=memory_id)
|
|
747
|
+
return {"message": "Memory deleted", "id": memory_id}
|
|
748
|
+
except HTTPException:
|
|
749
|
+
raise
|
|
750
|
+
except Exception as e:
|
|
751
|
+
log_error("/api/memories/delete", e, memory_id=memory_id)
|
|
752
|
+
raise
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
@app.get("/api/memories/stats/summary")
|
|
756
|
+
async def memory_stats(
|
|
757
|
+
project: str = Query(..., description="Path to the database file"),
|
|
758
|
+
):
|
|
759
|
+
"""Get memory statistics."""
|
|
760
|
+
if not Path(project).exists():
|
|
761
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
762
|
+
|
|
763
|
+
return get_memory_stats(project)
|
|
764
|
+
|
|
765
|
+
|
|
766
|
+
@app.get("/api/search")
|
|
767
|
+
async def search(
|
|
768
|
+
q: str = Query(..., min_length=1),
|
|
769
|
+
project: str = Query(..., description="Path to the database file"),
|
|
770
|
+
limit: int = 20,
|
|
771
|
+
):
|
|
772
|
+
"""Search memories."""
|
|
773
|
+
if not Path(project).exists():
|
|
774
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
775
|
+
|
|
776
|
+
return search_memories(project, q, limit)
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
@app.get("/api/activities")
|
|
780
|
+
async def list_activities(
|
|
781
|
+
project: str = Query(..., description="Path to the database file"),
|
|
782
|
+
event_type: Optional[str] = None,
|
|
783
|
+
tool_name: Optional[str] = None,
|
|
784
|
+
limit: int = 100,
|
|
785
|
+
offset: int = 0,
|
|
786
|
+
):
|
|
787
|
+
"""Get activity log entries."""
|
|
788
|
+
if not Path(project).exists():
|
|
789
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
790
|
+
|
|
791
|
+
# Ensure migrations are applied (adds summary columns if missing)
|
|
792
|
+
ensure_migrations(project)
|
|
793
|
+
|
|
794
|
+
activities = get_activities(project, event_type, tool_name, limit, offset)
|
|
795
|
+
return {"activities": activities, "count": len(activities)}
|
|
796
|
+
|
|
797
|
+
|
|
798
|
+
@app.get("/api/timeline")
|
|
799
|
+
async def get_timeline_view(
|
|
800
|
+
project: str = Query(..., description="Path to the database file"),
|
|
801
|
+
hours: int = 24,
|
|
802
|
+
include_memories: bool = True,
|
|
803
|
+
include_activities: bool = True,
|
|
804
|
+
):
|
|
805
|
+
"""Get timeline of recent activity."""
|
|
806
|
+
if not Path(project).exists():
|
|
807
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
808
|
+
|
|
809
|
+
return get_timeline(project, hours, include_memories, include_activities)
|
|
810
|
+
|
|
811
|
+
|
|
812
|
+
@app.get("/api/tags")
|
|
813
|
+
async def list_tags(
|
|
814
|
+
project: str = Query(..., description="Path to the database file"),
|
|
815
|
+
):
|
|
816
|
+
"""Get all tags with counts."""
|
|
817
|
+
if not Path(project).exists():
|
|
818
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
819
|
+
|
|
820
|
+
return get_all_tags(project)
|
|
821
|
+
|
|
822
|
+
|
|
823
|
+
@app.get("/api/types")
|
|
824
|
+
async def list_types(
|
|
825
|
+
project: str = Query(..., description="Path to the database file"),
|
|
826
|
+
):
|
|
827
|
+
"""Get memory type distribution."""
|
|
828
|
+
if not Path(project).exists():
|
|
829
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
830
|
+
|
|
831
|
+
return get_type_distribution(project)
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
@app.get("/api/sessions")
|
|
835
|
+
async def list_sessions(
|
|
836
|
+
project: str = Query(..., description="Path to the database file"),
|
|
837
|
+
limit: int = 20,
|
|
838
|
+
):
|
|
839
|
+
"""Get recent sessions."""
|
|
840
|
+
if not Path(project).exists():
|
|
841
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
842
|
+
|
|
843
|
+
return get_sessions(project, limit)
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
# --- Stats Endpoints for Charts ---
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
@app.get("/api/stats/activity-heatmap")
|
|
850
|
+
async def get_activity_heatmap_endpoint(
|
|
851
|
+
project: str = Query(..., description="Path to the database file"),
|
|
852
|
+
days: int = 90,
|
|
853
|
+
):
|
|
854
|
+
"""Get activity counts grouped by day for heatmap visualization."""
|
|
855
|
+
if not Path(project).exists():
|
|
856
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
857
|
+
|
|
858
|
+
return get_activity_heatmap(project, days)
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
@app.get("/api/stats/tool-usage")
|
|
862
|
+
async def get_tool_usage_endpoint(
|
|
863
|
+
project: str = Query(..., description="Path to the database file"),
|
|
864
|
+
limit: int = 10,
|
|
865
|
+
):
|
|
866
|
+
"""Get tool usage statistics."""
|
|
867
|
+
if not Path(project).exists():
|
|
868
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
869
|
+
|
|
870
|
+
return get_tool_usage(project, limit)
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
@app.get("/api/stats/memory-growth")
|
|
874
|
+
async def get_memory_growth_endpoint(
|
|
875
|
+
project: str = Query(..., description="Path to the database file"),
|
|
876
|
+
days: int = 30,
|
|
877
|
+
):
|
|
878
|
+
"""Get memory creation over time."""
|
|
879
|
+
if not Path(project).exists():
|
|
880
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
881
|
+
|
|
882
|
+
return get_memory_growth(project, days)
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
# --- Command Analytics Endpoints ---
|
|
886
|
+
|
|
887
|
+
|
|
888
|
+
@app.get("/api/stats/command-usage")
|
|
889
|
+
async def get_command_usage_endpoint(
|
|
890
|
+
project: str = Query(..., description="Path to the database file"),
|
|
891
|
+
scope: Optional[str] = Query(None, description="Filter by scope: 'universal' or 'project'"),
|
|
892
|
+
days: int = Query(30, ge=1, le=365),
|
|
893
|
+
):
|
|
894
|
+
"""Get slash command usage statistics."""
|
|
895
|
+
if not Path(project).exists():
|
|
896
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
897
|
+
|
|
898
|
+
return get_command_usage(project, scope, days)
|
|
899
|
+
|
|
900
|
+
|
|
901
|
+
@app.get("/api/stats/skill-usage")
|
|
902
|
+
async def get_skill_usage_endpoint(
|
|
903
|
+
project: str = Query(..., description="Path to the database file"),
|
|
904
|
+
scope: Optional[str] = Query(None, description="Filter by scope: 'universal' or 'project'"),
|
|
905
|
+
days: int = Query(30, ge=1, le=365),
|
|
906
|
+
):
|
|
907
|
+
"""Get skill usage statistics."""
|
|
908
|
+
if not Path(project).exists():
|
|
909
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
910
|
+
|
|
911
|
+
return get_skill_usage(project, scope, days)
|
|
912
|
+
|
|
913
|
+
|
|
914
|
+
@app.get("/api/stats/mcp-usage")
|
|
915
|
+
async def get_mcp_usage_endpoint(
|
|
916
|
+
project: str = Query(..., description="Path to the database file"),
|
|
917
|
+
days: int = Query(30, ge=1, le=365),
|
|
918
|
+
):
|
|
919
|
+
"""Get MCP server usage statistics."""
|
|
920
|
+
if not Path(project).exists():
|
|
921
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
922
|
+
|
|
923
|
+
return get_mcp_usage(project, days)
|
|
924
|
+
|
|
925
|
+
|
|
926
|
+
@app.get("/api/activities/{activity_id}")
|
|
927
|
+
async def get_activity_detail_endpoint(
|
|
928
|
+
activity_id: str,
|
|
929
|
+
project: str = Query(..., description="Path to the database file"),
|
|
930
|
+
):
|
|
931
|
+
"""Get full activity details including complete input/output."""
|
|
932
|
+
if not Path(project).exists():
|
|
933
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
934
|
+
|
|
935
|
+
# Ensure migrations are applied
|
|
936
|
+
ensure_migrations(project)
|
|
937
|
+
|
|
938
|
+
activity = get_activity_detail(project, activity_id)
|
|
939
|
+
if not activity:
|
|
940
|
+
raise HTTPException(status_code=404, detail="Activity not found")
|
|
941
|
+
|
|
942
|
+
return activity
|
|
943
|
+
|
|
944
|
+
|
|
945
|
+
@app.post("/api/activities/backfill-summaries")
|
|
946
|
+
async def backfill_activity_summaries_endpoint(
|
|
947
|
+
project: str = Query(..., description="Path to the database file"),
|
|
948
|
+
):
|
|
949
|
+
"""Generate summaries for existing activities that don't have them."""
|
|
950
|
+
if not Path(project).exists():
|
|
951
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
952
|
+
|
|
953
|
+
try:
|
|
954
|
+
from backfill_summaries import backfill_all
|
|
955
|
+
results = backfill_all(project)
|
|
956
|
+
return {
|
|
957
|
+
"success": True,
|
|
958
|
+
"summaries_updated": results["summaries"],
|
|
959
|
+
"mcp_servers_updated": results["mcp_servers"],
|
|
960
|
+
}
|
|
961
|
+
except Exception as e:
|
|
962
|
+
raise HTTPException(status_code=500, detail=f"Backfill failed: {str(e)}")
|
|
963
|
+
|
|
964
|
+
|
|
965
|
+
# --- Session Context Endpoints ---
|
|
966
|
+
|
|
967
|
+
|
|
968
|
+
@app.get("/api/sessions/recent")
|
|
969
|
+
async def get_recent_sessions_endpoint(
|
|
970
|
+
project: str = Query(..., description="Path to the database file"),
|
|
971
|
+
limit: int = 5,
|
|
972
|
+
):
|
|
973
|
+
"""Get recent sessions with summaries."""
|
|
974
|
+
if not Path(project).exists():
|
|
975
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
976
|
+
|
|
977
|
+
return get_recent_sessions(project, limit)
|
|
978
|
+
|
|
979
|
+
|
|
980
|
+
# --- Relationship Graph Endpoints ---
|
|
981
|
+
|
|
982
|
+
|
|
983
|
+
@app.get("/api/relationships")
|
|
984
|
+
async def get_relationships_endpoint(
|
|
985
|
+
project: str = Query(..., description="Path to the database file"),
|
|
986
|
+
memory_id: Optional[str] = None,
|
|
987
|
+
):
|
|
988
|
+
"""Get memory relationships for graph visualization."""
|
|
989
|
+
if not Path(project).exists():
|
|
990
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
991
|
+
|
|
992
|
+
return get_relationships(project, memory_id)
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
@app.get("/api/relationships/graph")
|
|
996
|
+
async def get_relationship_graph_endpoint(
|
|
997
|
+
project: str = Query(..., description="Path to the database file"),
|
|
998
|
+
center_id: Optional[str] = None,
|
|
999
|
+
depth: int = 2,
|
|
1000
|
+
):
|
|
1001
|
+
"""Get graph data centered on a memory with configurable depth."""
|
|
1002
|
+
if not Path(project).exists():
|
|
1003
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1004
|
+
|
|
1005
|
+
return get_relationship_graph(project, center_id, depth)
|
|
1006
|
+
|
|
1007
|
+
|
|
1008
|
+
# --- Chat Endpoint ---
|
|
1009
|
+
|
|
1010
|
+
|
|
1011
|
+
@app.get("/api/chat/status")
|
|
1012
|
+
async def chat_status():
|
|
1013
|
+
"""Check if chat service is available."""
|
|
1014
|
+
return {
|
|
1015
|
+
"available": chat_service.is_available(),
|
|
1016
|
+
"message": "Chat is available" if chat_service.is_available() else "Set GEMINI_API_KEY environment variable to enable chat",
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
|
|
1020
|
+
@app.post("/api/chat", response_model=ChatResponse)
|
|
1021
|
+
@rate_limit("10/minute")
|
|
1022
|
+
async def chat_with_memories(
|
|
1023
|
+
request: ChatRequest,
|
|
1024
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1025
|
+
):
|
|
1026
|
+
"""Ask a natural language question about memories."""
|
|
1027
|
+
try:
|
|
1028
|
+
if not Path(project).exists():
|
|
1029
|
+
log_error("/api/chat", FileNotFoundError("Database not found"), question=request.question[:50])
|
|
1030
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1031
|
+
|
|
1032
|
+
# Fetch style profile if style mode enabled
|
|
1033
|
+
style_context = None
|
|
1034
|
+
if request.use_style:
|
|
1035
|
+
try:
|
|
1036
|
+
# First try computed profile from user_messages (richer data)
|
|
1037
|
+
style_context = compute_style_profile_from_messages(project)
|
|
1038
|
+
# Fall back to stored profile if no user_messages
|
|
1039
|
+
if not style_context:
|
|
1040
|
+
style_context = get_style_profile(project)
|
|
1041
|
+
except Exception:
|
|
1042
|
+
pass # Graceful fallback if no style data
|
|
1043
|
+
|
|
1044
|
+
result = await chat_service.ask_about_memories(
|
|
1045
|
+
project,
|
|
1046
|
+
request.question,
|
|
1047
|
+
request.max_memories,
|
|
1048
|
+
style_context,
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1051
|
+
log_success("/api/chat", question_len=len(request.question), sources=len(result.get("sources", [])))
|
|
1052
|
+
return ChatResponse(**result)
|
|
1053
|
+
except HTTPException:
|
|
1054
|
+
raise
|
|
1055
|
+
except Exception as e:
|
|
1056
|
+
log_error("/api/chat", e, question=request.question[:50])
|
|
1057
|
+
raise
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
@app.get("/api/chat/stream")
|
|
1061
|
+
@rate_limit("10/minute")
|
|
1062
|
+
async def stream_chat(
|
|
1063
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1064
|
+
question: str = Query(..., description="The question to ask"),
|
|
1065
|
+
max_memories: int = Query(10, ge=1, le=50),
|
|
1066
|
+
use_style: bool = Query(False, description="Use user's communication style"),
|
|
1067
|
+
):
|
|
1068
|
+
"""SSE endpoint for streaming chat responses."""
|
|
1069
|
+
from fastapi.responses import StreamingResponse
|
|
1070
|
+
|
|
1071
|
+
if not Path(project).exists():
|
|
1072
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1073
|
+
|
|
1074
|
+
# Fetch style profile if style mode enabled
|
|
1075
|
+
style_context = None
|
|
1076
|
+
if use_style:
|
|
1077
|
+
try:
|
|
1078
|
+
# First try computed profile from user_messages (richer data)
|
|
1079
|
+
style_context = compute_style_profile_from_messages(project)
|
|
1080
|
+
# Fall back to stored profile if no user_messages
|
|
1081
|
+
if not style_context:
|
|
1082
|
+
style_context = get_style_profile(project)
|
|
1083
|
+
except Exception:
|
|
1084
|
+
pass # Graceful fallback if no style data
|
|
1085
|
+
|
|
1086
|
+
async def event_generator():
|
|
1087
|
+
try:
|
|
1088
|
+
async for event in chat_service.stream_ask_about_memories(project, question, max_memories, style_context):
|
|
1089
|
+
yield f"data: {json.dumps(event)}\n\n"
|
|
1090
|
+
except Exception as e:
|
|
1091
|
+
yield f"data: {json.dumps({'type': 'error', 'data': str(e)})}\n\n"
|
|
1092
|
+
|
|
1093
|
+
return StreamingResponse(
|
|
1094
|
+
event_generator(),
|
|
1095
|
+
media_type="text/event-stream",
|
|
1096
|
+
headers={
|
|
1097
|
+
"Cache-Control": "no-cache",
|
|
1098
|
+
"Connection": "keep-alive",
|
|
1099
|
+
"X-Accel-Buffering": "no",
|
|
1100
|
+
}
|
|
1101
|
+
)
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
@app.post("/api/chat/save", response_model=ConversationSaveResponse)
|
|
1105
|
+
async def save_chat_conversation(
|
|
1106
|
+
request: ConversationSaveRequest,
|
|
1107
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1108
|
+
):
|
|
1109
|
+
"""Save a chat conversation as a memory."""
|
|
1110
|
+
try:
|
|
1111
|
+
if not Path(project).exists():
|
|
1112
|
+
log_error("/api/chat/save", FileNotFoundError("Database not found"))
|
|
1113
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1114
|
+
|
|
1115
|
+
result = await chat_service.save_conversation(
|
|
1116
|
+
project,
|
|
1117
|
+
[msg.model_dump() for msg in request.messages],
|
|
1118
|
+
request.referenced_memory_ids,
|
|
1119
|
+
request.importance or 60,
|
|
1120
|
+
)
|
|
1121
|
+
|
|
1122
|
+
log_success("/api/chat/save", memory_id=result["memory_id"], messages=len(request.messages))
|
|
1123
|
+
return ConversationSaveResponse(**result)
|
|
1124
|
+
except HTTPException:
|
|
1125
|
+
raise
|
|
1126
|
+
except Exception as e:
|
|
1127
|
+
log_error("/api/chat/save", e)
|
|
1128
|
+
raise
|
|
1129
|
+
|
|
1130
|
+
|
|
1131
|
+
@app.post("/api/compose-response", response_model=ComposeResponse)
|
|
1132
|
+
@rate_limit("10/minute")
|
|
1133
|
+
async def compose_response_endpoint(
|
|
1134
|
+
request: ComposeRequest,
|
|
1135
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1136
|
+
):
|
|
1137
|
+
"""Compose a response to an incoming message in the user's style."""
|
|
1138
|
+
try:
|
|
1139
|
+
if not Path(project).exists():
|
|
1140
|
+
log_error("/api/compose-response", FileNotFoundError("Database not found"))
|
|
1141
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1142
|
+
|
|
1143
|
+
# Get style profile
|
|
1144
|
+
style_profile = compute_style_profile_from_messages(project)
|
|
1145
|
+
|
|
1146
|
+
# Compose the response
|
|
1147
|
+
result = await chat_service.compose_response(
|
|
1148
|
+
db_path=project,
|
|
1149
|
+
incoming_message=request.incoming_message,
|
|
1150
|
+
context_type=request.context_type,
|
|
1151
|
+
template=request.template,
|
|
1152
|
+
tone_level=request.tone_level,
|
|
1153
|
+
include_memories=request.include_memories,
|
|
1154
|
+
style_profile=style_profile,
|
|
1155
|
+
custom_instructions=request.custom_instructions,
|
|
1156
|
+
include_explanation=request.include_explanation,
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
if result.get("error"):
|
|
1160
|
+
log_error("/api/compose-response", Exception(result["error"]))
|
|
1161
|
+
raise HTTPException(status_code=500, detail=result["error"])
|
|
1162
|
+
|
|
1163
|
+
# Build response model
|
|
1164
|
+
import uuid
|
|
1165
|
+
from datetime import datetime
|
|
1166
|
+
response = ComposeResponse(
|
|
1167
|
+
id=str(uuid.uuid4()),
|
|
1168
|
+
response=result["response"],
|
|
1169
|
+
sources=result["sources"],
|
|
1170
|
+
style_applied=bool(style_profile and style_profile.get("total_messages", 0) > 0),
|
|
1171
|
+
tone_level=request.tone_level,
|
|
1172
|
+
template_used=request.template,
|
|
1173
|
+
incoming_message=request.incoming_message,
|
|
1174
|
+
context_type=request.context_type,
|
|
1175
|
+
created_at=datetime.now().isoformat(),
|
|
1176
|
+
custom_instructions=request.custom_instructions,
|
|
1177
|
+
explanation=result.get("explanation"),
|
|
1178
|
+
)
|
|
1179
|
+
|
|
1180
|
+
log_success("/api/compose-response", context=request.context_type, tone=request.tone_level)
|
|
1181
|
+
return response
|
|
1182
|
+
except HTTPException:
|
|
1183
|
+
raise
|
|
1184
|
+
except Exception as e:
|
|
1185
|
+
log_error("/api/compose-response", e)
|
|
1186
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1187
|
+
|
|
1188
|
+
|
|
1189
|
+
# --- Image Generation Endpoints ---
|
|
1190
|
+
|
|
1191
|
+
|
|
1192
|
+
@app.get("/api/image/status")
|
|
1193
|
+
async def get_image_status():
|
|
1194
|
+
"""Check if image generation is available."""
|
|
1195
|
+
return {
|
|
1196
|
+
"available": image_service.is_available(),
|
|
1197
|
+
"message": "Image generation ready" if image_service.is_available()
|
|
1198
|
+
else "Configure GEMINI_API_KEY and install google-genai for image generation",
|
|
1199
|
+
}
|
|
1200
|
+
|
|
1201
|
+
|
|
1202
|
+
@app.get("/api/image/presets")
|
|
1203
|
+
async def get_image_presets():
|
|
1204
|
+
"""Get available image preset templates."""
|
|
1205
|
+
return {"presets": image_service.get_presets()}
|
|
1206
|
+
|
|
1207
|
+
|
|
1208
|
+
@app.post("/api/image/generate-batch", response_model=BatchImageGenerationResponse)
|
|
1209
|
+
@rate_limit("5/minute")
|
|
1210
|
+
async def generate_images_batch(
|
|
1211
|
+
request: BatchImageGenerationRequest,
|
|
1212
|
+
db_path: str = Query(..., alias="project", description="Path to the database file"),
|
|
1213
|
+
):
|
|
1214
|
+
"""Generate multiple images with different presets/prompts."""
|
|
1215
|
+
# Validate image count
|
|
1216
|
+
if len(request.images) not in [1, 2, 4]:
|
|
1217
|
+
return BatchImageGenerationResponse(
|
|
1218
|
+
success=False,
|
|
1219
|
+
errors=["Must request 1, 2, or 4 images"]
|
|
1220
|
+
)
|
|
1221
|
+
|
|
1222
|
+
# Build memory context
|
|
1223
|
+
memory_context = ""
|
|
1224
|
+
if request.memory_ids:
|
|
1225
|
+
memory_context = image_service.build_memory_context(db_path, request.memory_ids)
|
|
1226
|
+
|
|
1227
|
+
# Build chat context
|
|
1228
|
+
chat_context = image_service.build_chat_context(request.chat_messages)
|
|
1229
|
+
|
|
1230
|
+
# Convert request models to internal format
|
|
1231
|
+
image_requests = [
|
|
1232
|
+
SingleImageRequest(
|
|
1233
|
+
preset=ImagePreset(img.preset),
|
|
1234
|
+
custom_prompt=img.custom_prompt,
|
|
1235
|
+
aspect_ratio=img.aspect_ratio,
|
|
1236
|
+
image_size=img.image_size
|
|
1237
|
+
)
|
|
1238
|
+
for img in request.images
|
|
1239
|
+
]
|
|
1240
|
+
|
|
1241
|
+
result = await image_service.generate_batch(
|
|
1242
|
+
requests=image_requests,
|
|
1243
|
+
memory_context=memory_context,
|
|
1244
|
+
chat_context=chat_context,
|
|
1245
|
+
use_search_grounding=request.use_search_grounding
|
|
1246
|
+
)
|
|
1247
|
+
|
|
1248
|
+
return BatchImageGenerationResponse(
|
|
1249
|
+
success=result.success,
|
|
1250
|
+
images=[
|
|
1251
|
+
SingleImageResponseModel(
|
|
1252
|
+
success=img.success,
|
|
1253
|
+
image_data=img.image_data,
|
|
1254
|
+
text_response=img.text_response,
|
|
1255
|
+
thought_signature=img.thought_signature,
|
|
1256
|
+
image_id=img.image_id,
|
|
1257
|
+
error=img.error,
|
|
1258
|
+
index=img.index
|
|
1259
|
+
)
|
|
1260
|
+
for img in result.images
|
|
1261
|
+
],
|
|
1262
|
+
errors=result.errors
|
|
1263
|
+
)
|
|
1264
|
+
|
|
1265
|
+
|
|
1266
|
+
@app.post("/api/image/refine", response_model=SingleImageResponseModel)
|
|
1267
|
+
@rate_limit("5/minute")
|
|
1268
|
+
async def refine_image(request: ImageRefineRequest):
|
|
1269
|
+
"""Refine an existing generated image with a new prompt."""
|
|
1270
|
+
result = await image_service.refine_image(
|
|
1271
|
+
image_id=request.image_id,
|
|
1272
|
+
refinement_prompt=request.refinement_prompt,
|
|
1273
|
+
aspect_ratio=request.aspect_ratio,
|
|
1274
|
+
image_size=request.image_size
|
|
1275
|
+
)
|
|
1276
|
+
|
|
1277
|
+
return SingleImageResponseModel(
|
|
1278
|
+
success=result.success,
|
|
1279
|
+
image_data=result.image_data,
|
|
1280
|
+
text_response=result.text_response,
|
|
1281
|
+
thought_signature=result.thought_signature,
|
|
1282
|
+
image_id=result.image_id,
|
|
1283
|
+
error=result.error
|
|
1284
|
+
)
|
|
1285
|
+
|
|
1286
|
+
|
|
1287
|
+
@app.post("/api/image/clear-conversation")
|
|
1288
|
+
async def clear_image_conversation(image_id: Optional[str] = None):
|
|
1289
|
+
"""Clear image conversation history. If image_id provided, clear only that image."""
|
|
1290
|
+
image_service.clear_conversation(image_id)
|
|
1291
|
+
return {"status": "cleared", "image_id": image_id}
|
|
1292
|
+
|
|
1293
|
+
|
|
1294
|
+
# --- User Messages & Style Profile Endpoints ---
|
|
1295
|
+
|
|
1296
|
+
|
|
1297
|
+
@app.get("/api/user-messages", response_model=UserMessagesResponse)
|
|
1298
|
+
@rate_limit("100/minute")
|
|
1299
|
+
async def list_user_messages(
|
|
1300
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1301
|
+
session_id: Optional[str] = None,
|
|
1302
|
+
search: Optional[str] = None,
|
|
1303
|
+
has_code_blocks: Optional[bool] = None,
|
|
1304
|
+
has_questions: Optional[bool] = None,
|
|
1305
|
+
has_commands: Optional[bool] = None,
|
|
1306
|
+
tone_filter: Optional[str] = None,
|
|
1307
|
+
sort_by: str = "timestamp",
|
|
1308
|
+
sort_order: str = "desc",
|
|
1309
|
+
limit: int = Query(50, ge=1, le=500),
|
|
1310
|
+
offset: int = Query(0, ge=0),
|
|
1311
|
+
):
|
|
1312
|
+
"""Get user messages with filtering and pagination.
|
|
1313
|
+
|
|
1314
|
+
Filter options:
|
|
1315
|
+
- session_id: Filter by session
|
|
1316
|
+
- search: Search in message content
|
|
1317
|
+
- has_code_blocks: Filter by presence of code blocks
|
|
1318
|
+
- has_questions: Filter by presence of questions
|
|
1319
|
+
- has_commands: Filter by slash commands
|
|
1320
|
+
- tone_filter: Filter by tone indicator (polite, urgent, technical, casual, direct, inquisitive)
|
|
1321
|
+
"""
|
|
1322
|
+
try:
|
|
1323
|
+
if not Path(project).exists():
|
|
1324
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1325
|
+
|
|
1326
|
+
messages = get_user_messages(
|
|
1327
|
+
project,
|
|
1328
|
+
session_id=session_id,
|
|
1329
|
+
search=search,
|
|
1330
|
+
has_code_blocks=has_code_blocks,
|
|
1331
|
+
has_questions=has_questions,
|
|
1332
|
+
has_commands=has_commands,
|
|
1333
|
+
tone_filter=tone_filter,
|
|
1334
|
+
sort_by=sort_by,
|
|
1335
|
+
sort_order=sort_order,
|
|
1336
|
+
limit=limit,
|
|
1337
|
+
offset=offset,
|
|
1338
|
+
)
|
|
1339
|
+
|
|
1340
|
+
total_count = get_user_message_count(project, session_id=session_id)
|
|
1341
|
+
has_more = (offset + len(messages)) < total_count
|
|
1342
|
+
|
|
1343
|
+
log_success("/api/user-messages", count=len(messages), total=total_count)
|
|
1344
|
+
return UserMessagesResponse(
|
|
1345
|
+
messages=[UserMessage(**m) for m in messages],
|
|
1346
|
+
total_count=total_count,
|
|
1347
|
+
limit=limit,
|
|
1348
|
+
offset=offset,
|
|
1349
|
+
has_more=has_more,
|
|
1350
|
+
)
|
|
1351
|
+
except HTTPException:
|
|
1352
|
+
raise
|
|
1353
|
+
except Exception as e:
|
|
1354
|
+
log_error("/api/user-messages", e)
|
|
1355
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1356
|
+
|
|
1357
|
+
|
|
1358
|
+
@app.delete("/api/user-messages/{message_id}")
|
|
1359
|
+
async def delete_single_user_message(
|
|
1360
|
+
message_id: str,
|
|
1361
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1362
|
+
):
|
|
1363
|
+
"""Delete a single user message by ID."""
|
|
1364
|
+
try:
|
|
1365
|
+
if not Path(project).exists():
|
|
1366
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1367
|
+
|
|
1368
|
+
deleted = delete_user_message(project, message_id)
|
|
1369
|
+
if not deleted:
|
|
1370
|
+
raise HTTPException(status_code=404, detail="Message not found")
|
|
1371
|
+
|
|
1372
|
+
log_success("/api/user-messages/delete", message_id=message_id)
|
|
1373
|
+
return {"message": "Message deleted", "id": message_id}
|
|
1374
|
+
except HTTPException:
|
|
1375
|
+
raise
|
|
1376
|
+
except Exception as e:
|
|
1377
|
+
log_error("/api/user-messages/delete", e)
|
|
1378
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1379
|
+
|
|
1380
|
+
|
|
1381
|
+
@app.post("/api/user-messages/bulk-delete")
|
|
1382
|
+
async def delete_user_messages_bulk_endpoint(
|
|
1383
|
+
request: BulkDeleteRequest,
|
|
1384
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1385
|
+
):
|
|
1386
|
+
"""Delete multiple user messages at once."""
|
|
1387
|
+
try:
|
|
1388
|
+
if not Path(project).exists():
|
|
1389
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1390
|
+
|
|
1391
|
+
count = delete_user_messages_bulk(project, request.message_ids)
|
|
1392
|
+
|
|
1393
|
+
log_success("/api/user-messages/bulk-delete", deleted_count=count)
|
|
1394
|
+
return {"message": f"Deleted {count} messages", "deleted_count": count}
|
|
1395
|
+
except HTTPException:
|
|
1396
|
+
raise
|
|
1397
|
+
except Exception as e:
|
|
1398
|
+
log_error("/api/user-messages/bulk-delete", e)
|
|
1399
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
@app.get("/api/style/profile")
|
|
1403
|
+
async def get_style_profile_endpoint(
|
|
1404
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1405
|
+
project_path: Optional[str] = Query(None, description="Project-specific profile path, or None for global"),
|
|
1406
|
+
):
|
|
1407
|
+
"""Get user style profile for style analysis.
|
|
1408
|
+
|
|
1409
|
+
Returns style metrics computed from user messages:
|
|
1410
|
+
- total_messages: Total message count
|
|
1411
|
+
- avg_word_count: Average words per message
|
|
1412
|
+
- primary_tone: Most common tone (direct, polite, technical, etc.)
|
|
1413
|
+
- question_percentage: Percentage of messages containing questions
|
|
1414
|
+
- tone_distribution: Count of messages by tone
|
|
1415
|
+
- style_markers: Descriptive labels for writing style
|
|
1416
|
+
"""
|
|
1417
|
+
try:
|
|
1418
|
+
if not Path(project).exists():
|
|
1419
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1420
|
+
|
|
1421
|
+
# First try to get pre-computed profile from user_style_profiles table
|
|
1422
|
+
profile = get_style_profile(project, project_path=project_path)
|
|
1423
|
+
|
|
1424
|
+
# If no stored profile, compute from user_messages
|
|
1425
|
+
if not profile:
|
|
1426
|
+
profile = compute_style_profile_from_messages(project)
|
|
1427
|
+
|
|
1428
|
+
# If still no profile (no user_messages), return empty structure
|
|
1429
|
+
if not profile:
|
|
1430
|
+
return {
|
|
1431
|
+
"totalMessages": 0,
|
|
1432
|
+
"avgWordCount": 0,
|
|
1433
|
+
"primaryTone": "direct",
|
|
1434
|
+
"questionPercentage": 0,
|
|
1435
|
+
"toneDistribution": {},
|
|
1436
|
+
"styleMarkers": ["No data available yet"],
|
|
1437
|
+
}
|
|
1438
|
+
|
|
1439
|
+
# Convert stored profile format to frontend expected format if needed
|
|
1440
|
+
if "totalMessages" in profile:
|
|
1441
|
+
# Already in camelCase format from compute_style_profile_from_messages
|
|
1442
|
+
pass
|
|
1443
|
+
elif "id" in profile:
|
|
1444
|
+
# Convert stored profile (from user_style_profiles table) to frontend format
|
|
1445
|
+
tone_dist = {}
|
|
1446
|
+
# Stored profile doesn't have tone_distribution, so compute it
|
|
1447
|
+
computed = compute_style_profile_from_messages(project)
|
|
1448
|
+
if computed:
|
|
1449
|
+
tone_dist = computed.get("toneDistribution", {})
|
|
1450
|
+
primary_tone = computed.get("primaryTone", "direct")
|
|
1451
|
+
style_markers = computed.get("styleMarkers", [])
|
|
1452
|
+
else:
|
|
1453
|
+
primary_tone = "direct"
|
|
1454
|
+
style_markers = []
|
|
1455
|
+
|
|
1456
|
+
profile = {
|
|
1457
|
+
"totalMessages": profile.get("total_messages", 0),
|
|
1458
|
+
"avgWordCount": profile.get("avg_word_count", 0) or 0,
|
|
1459
|
+
"primaryTone": primary_tone,
|
|
1460
|
+
"questionPercentage": (profile.get("question_frequency", 0) or 0) * 100,
|
|
1461
|
+
"toneDistribution": tone_dist,
|
|
1462
|
+
"styleMarkers": style_markers or profile.get("greeting_patterns", []) or [],
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
log_success("/api/style/profile", has_profile=True, total_messages=profile.get("totalMessages", 0))
|
|
1466
|
+
return profile
|
|
1467
|
+
except HTTPException:
|
|
1468
|
+
raise
|
|
1469
|
+
except Exception as e:
|
|
1470
|
+
log_error("/api/style/profile", e)
|
|
1471
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1472
|
+
|
|
1473
|
+
|
|
1474
|
+
@app.get("/api/style/samples")
|
|
1475
|
+
async def get_style_samples_endpoint(
|
|
1476
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1477
|
+
samples_per_tone: int = Query(3, ge=1, le=10, description="Max samples per tone category"),
|
|
1478
|
+
):
|
|
1479
|
+
"""Get sample user messages for style analysis preview.
|
|
1480
|
+
|
|
1481
|
+
Returns messages grouped by style category (professional, casual, technical, creative).
|
|
1482
|
+
"""
|
|
1483
|
+
try:
|
|
1484
|
+
if not Path(project).exists():
|
|
1485
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1486
|
+
|
|
1487
|
+
samples = get_style_samples_by_category(project, samples_per_tone=samples_per_tone)
|
|
1488
|
+
|
|
1489
|
+
total_count = sum(len(v) for v in samples.values())
|
|
1490
|
+
log_success("/api/style/samples", count=total_count)
|
|
1491
|
+
return samples
|
|
1492
|
+
except HTTPException:
|
|
1493
|
+
raise
|
|
1494
|
+
except Exception as e:
|
|
1495
|
+
log_error("/api/style/samples", e)
|
|
1496
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
1497
|
+
|
|
1498
|
+
|
|
1499
|
+
# --- WebSocket Endpoint ---
|
|
1500
|
+
|
|
1501
|
+
|
|
1502
|
+
@app.websocket("/ws")
|
|
1503
|
+
async def websocket_endpoint(websocket: WebSocket):
|
|
1504
|
+
"""WebSocket endpoint for real-time updates."""
|
|
1505
|
+
client_id = await manager.connect(websocket)
|
|
1506
|
+
try:
|
|
1507
|
+
# Send initial connection confirmation
|
|
1508
|
+
await manager.send_to_client(client_id, "connected", {"client_id": client_id})
|
|
1509
|
+
|
|
1510
|
+
# Keep connection alive and handle messages
|
|
1511
|
+
while True:
|
|
1512
|
+
data = await websocket.receive_text()
|
|
1513
|
+
# Echo back for ping/pong
|
|
1514
|
+
if data == "ping":
|
|
1515
|
+
await manager.send_to_client(client_id, "pong", {})
|
|
1516
|
+
except WebSocketDisconnect:
|
|
1517
|
+
await manager.disconnect(client_id)
|
|
1518
|
+
except Exception as e:
|
|
1519
|
+
print(f"[WS] Error: {e}")
|
|
1520
|
+
await manager.disconnect(client_id)
|
|
1521
|
+
|
|
1522
|
+
|
|
1523
|
+
# --- Export Endpoints ---
|
|
1524
|
+
|
|
1525
|
+
|
|
1526
|
+
@app.get("/api/export")
|
|
1527
|
+
async def export_memories(
|
|
1528
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1529
|
+
format: str = Query("json", description="Export format: json, markdown, csv"),
|
|
1530
|
+
memory_ids: Optional[str] = Query(None, description="Comma-separated memory IDs to export, or all if empty"),
|
|
1531
|
+
include_relationships: bool = Query(True, description="Include memory relationships"),
|
|
1532
|
+
):
|
|
1533
|
+
"""Export memories to specified format."""
|
|
1534
|
+
from fastapi.responses import Response
|
|
1535
|
+
import csv
|
|
1536
|
+
import io
|
|
1537
|
+
|
|
1538
|
+
if not Path(project).exists():
|
|
1539
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1540
|
+
|
|
1541
|
+
# Get memories
|
|
1542
|
+
if memory_ids:
|
|
1543
|
+
ids = memory_ids.split(",")
|
|
1544
|
+
memories = [get_memory_by_id(project, mid) for mid in ids if mid.strip()]
|
|
1545
|
+
memories = [m for m in memories if m is not None]
|
|
1546
|
+
else:
|
|
1547
|
+
from models import FilterParams
|
|
1548
|
+
filters = FilterParams(limit=1000, offset=0, sort_by="created_at", sort_order="desc")
|
|
1549
|
+
memories = get_memories(project, filters)
|
|
1550
|
+
|
|
1551
|
+
# Get relationships if requested
|
|
1552
|
+
relationships = []
|
|
1553
|
+
if include_relationships:
|
|
1554
|
+
relationships = get_relationships(project)
|
|
1555
|
+
|
|
1556
|
+
if format == "json":
|
|
1557
|
+
export_data = {
|
|
1558
|
+
"exported_at": datetime.now().isoformat(),
|
|
1559
|
+
"project": project,
|
|
1560
|
+
"memory_count": len(memories),
|
|
1561
|
+
"memories": [m.model_dump(by_alias=True) for m in memories],
|
|
1562
|
+
"relationships": relationships if include_relationships else [],
|
|
1563
|
+
}
|
|
1564
|
+
return Response(
|
|
1565
|
+
content=json.dumps(export_data, indent=2, default=str),
|
|
1566
|
+
media_type="application/json",
|
|
1567
|
+
headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"}
|
|
1568
|
+
)
|
|
1569
|
+
|
|
1570
|
+
elif format == "markdown":
|
|
1571
|
+
md_lines = [
|
|
1572
|
+
f"# Omni-Cortex Memory Export",
|
|
1573
|
+
f"",
|
|
1574
|
+
f"**Exported:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
|
1575
|
+
f"**Total Memories:** {len(memories)}",
|
|
1576
|
+
f"",
|
|
1577
|
+
"---",
|
|
1578
|
+
"",
|
|
1579
|
+
]
|
|
1580
|
+
for m in memories:
|
|
1581
|
+
md_lines.extend([
|
|
1582
|
+
f"## {m.type.title()}: {m.content[:50]}{'...' if len(m.content) > 50 else ''}",
|
|
1583
|
+
f"",
|
|
1584
|
+
f"**ID:** `{m.id}`",
|
|
1585
|
+
f"**Type:** {m.type}",
|
|
1586
|
+
f"**Status:** {m.status}",
|
|
1587
|
+
f"**Importance:** {m.importance_score}",
|
|
1588
|
+
f"**Created:** {m.created_at}",
|
|
1589
|
+
f"**Tags:** {', '.join(m.tags) if m.tags else 'None'}",
|
|
1590
|
+
f"",
|
|
1591
|
+
"### Content",
|
|
1592
|
+
f"",
|
|
1593
|
+
m.content,
|
|
1594
|
+
f"",
|
|
1595
|
+
"### Context",
|
|
1596
|
+
f"",
|
|
1597
|
+
m.context or "_No context_",
|
|
1598
|
+
f"",
|
|
1599
|
+
"---",
|
|
1600
|
+
"",
|
|
1601
|
+
])
|
|
1602
|
+
return Response(
|
|
1603
|
+
content="\n".join(md_lines),
|
|
1604
|
+
media_type="text/markdown",
|
|
1605
|
+
headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md"}
|
|
1606
|
+
)
|
|
1607
|
+
|
|
1608
|
+
elif format == "csv":
|
|
1609
|
+
output = io.StringIO()
|
|
1610
|
+
writer = csv.writer(output)
|
|
1611
|
+
writer.writerow(["id", "type", "status", "importance", "content", "context", "tags", "created_at", "last_accessed"])
|
|
1612
|
+
for m in memories:
|
|
1613
|
+
writer.writerow([
|
|
1614
|
+
m.id,
|
|
1615
|
+
m.type,
|
|
1616
|
+
m.status,
|
|
1617
|
+
m.importance_score,
|
|
1618
|
+
m.content,
|
|
1619
|
+
m.context or "",
|
|
1620
|
+
",".join(m.tags) if m.tags else "",
|
|
1621
|
+
m.created_at,
|
|
1622
|
+
m.last_accessed or "",
|
|
1623
|
+
])
|
|
1624
|
+
return Response(
|
|
1625
|
+
content=output.getvalue(),
|
|
1626
|
+
media_type="text/csv",
|
|
1627
|
+
headers={"Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"}
|
|
1628
|
+
)
|
|
1629
|
+
|
|
1630
|
+
else:
|
|
1631
|
+
raise HTTPException(status_code=400, detail=f"Unsupported format: {format}. Use json, markdown, or csv.")
|
|
1632
|
+
|
|
1633
|
+
|
|
1634
|
+
# --- Agent Endpoints ---
|
|
1635
|
+
|
|
1636
|
+
|
|
1637
|
+
@app.get("/api/agents")
|
|
1638
|
+
async def list_agents(
|
|
1639
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1640
|
+
type: Optional[str] = Query(None, description="Filter by agent type: main, subagent, tool"),
|
|
1641
|
+
active_only: bool = Query(False, description="Show only active agents (last 5 minutes)"),
|
|
1642
|
+
limit: int = Query(50, ge=1, le=200),
|
|
1643
|
+
):
|
|
1644
|
+
"""List all agents with filtering."""
|
|
1645
|
+
if not Path(project).exists():
|
|
1646
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1647
|
+
|
|
1648
|
+
agents = get_agents(project, agent_type=type, limit=limit, active_only=active_only)
|
|
1649
|
+
return {"agents": agents, "count": len(agents)}
|
|
1650
|
+
|
|
1651
|
+
|
|
1652
|
+
@app.get("/api/agents/{agent_id}")
|
|
1653
|
+
async def get_agent(
|
|
1654
|
+
agent_id: str,
|
|
1655
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1656
|
+
):
|
|
1657
|
+
"""Get single agent details."""
|
|
1658
|
+
if not Path(project).exists():
|
|
1659
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1660
|
+
|
|
1661
|
+
agent = get_agent_by_id(project, agent_id)
|
|
1662
|
+
if not agent:
|
|
1663
|
+
raise HTTPException(status_code=404, detail="Agent not found")
|
|
1664
|
+
|
|
1665
|
+
return agent
|
|
1666
|
+
|
|
1667
|
+
|
|
1668
|
+
@app.get("/api/agents/{agent_id}/stats")
|
|
1669
|
+
async def get_agent_stats_endpoint(
|
|
1670
|
+
agent_id: str,
|
|
1671
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1672
|
+
):
|
|
1673
|
+
"""Get detailed stats for an agent."""
|
|
1674
|
+
if not Path(project).exists():
|
|
1675
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1676
|
+
|
|
1677
|
+
agent = get_agent_by_id(project, agent_id)
|
|
1678
|
+
if not agent:
|
|
1679
|
+
raise HTTPException(status_code=404, detail="Agent not found")
|
|
1680
|
+
|
|
1681
|
+
tool_breakdown = get_agent_tool_breakdown(project, agent_id)
|
|
1682
|
+
files_touched = get_agent_files_touched(project, agent_id)
|
|
1683
|
+
parent_agent = get_agent_parent(project, agent_id) if agent.get('type') == 'subagent' else None
|
|
1684
|
+
|
|
1685
|
+
return {
|
|
1686
|
+
"agent": agent,
|
|
1687
|
+
"tool_breakdown": tool_breakdown,
|
|
1688
|
+
"files_touched": files_touched,
|
|
1689
|
+
"parent_agent_id": parent_agent,
|
|
1690
|
+
"adw_phase": None # Will be populated in Part 2 with ADW integration
|
|
1691
|
+
}
|
|
1692
|
+
|
|
1693
|
+
|
|
1694
|
+
# --- ADW Endpoints ---
|
|
1695
|
+
|
|
1696
|
+
|
|
1697
|
+
def scan_adw_folder(project_path: str) -> list[dict]:
|
|
1698
|
+
"""Scan agents/ folder for ADW runs relative to project directory."""
|
|
1699
|
+
# Get project directory from db path (e.g., /project/.cortex/cortex.db -> /project)
|
|
1700
|
+
project_dir = Path(project_path).parent.parent if project_path.endswith(".db") else Path(project_path)
|
|
1701
|
+
agents_dir = project_dir / "agents"
|
|
1702
|
+
|
|
1703
|
+
if not agents_dir.exists():
|
|
1704
|
+
return []
|
|
1705
|
+
|
|
1706
|
+
adw_runs = []
|
|
1707
|
+
for adw_dir in agents_dir.iterdir():
|
|
1708
|
+
if adw_dir.is_dir() and adw_dir.name.startswith("adw_"):
|
|
1709
|
+
state_file = adw_dir / "adw_state.json"
|
|
1710
|
+
if state_file.exists():
|
|
1711
|
+
try:
|
|
1712
|
+
state = json.loads(state_file.read_text())
|
|
1713
|
+
adw_runs.append({
|
|
1714
|
+
"adw_id": state.get("adw_id", adw_dir.name),
|
|
1715
|
+
"created_at": state.get("created_at"),
|
|
1716
|
+
"status": state.get("status", "unknown"),
|
|
1717
|
+
"current_phase": state.get("current_phase", "unknown"),
|
|
1718
|
+
"phases_completed": len(state.get("completed_phases", [])),
|
|
1719
|
+
"phases_total": 4, # plan, build, validate, release
|
|
1720
|
+
"project_path": str(project_dir)
|
|
1721
|
+
})
|
|
1722
|
+
except json.JSONDecodeError:
|
|
1723
|
+
pass
|
|
1724
|
+
|
|
1725
|
+
# Sort by created_at descending
|
|
1726
|
+
adw_runs.sort(key=lambda x: x.get("created_at", ""), reverse=True)
|
|
1727
|
+
return adw_runs
|
|
1728
|
+
|
|
1729
|
+
|
|
1730
|
+
def get_adw_state_with_agents(adw_id: str, db_path: str) -> Optional[dict]:
|
|
1731
|
+
"""Get ADW state with correlated agent activity."""
|
|
1732
|
+
# Get project directory from db path
|
|
1733
|
+
project_dir = Path(db_path).parent.parent if db_path.endswith(".db") else Path(db_path)
|
|
1734
|
+
adw_dir = project_dir / "agents" / adw_id
|
|
1735
|
+
state_file = adw_dir / "adw_state.json"
|
|
1736
|
+
|
|
1737
|
+
if not state_file.exists():
|
|
1738
|
+
return None
|
|
1739
|
+
|
|
1740
|
+
state = json.loads(state_file.read_text())
|
|
1741
|
+
|
|
1742
|
+
# Build phase info with agent correlation
|
|
1743
|
+
phases = []
|
|
1744
|
+
all_phases = ["plan", "build", "validate", "release"]
|
|
1745
|
+
completed = state.get("completed_phases", [])
|
|
1746
|
+
current = state.get("current_phase")
|
|
1747
|
+
|
|
1748
|
+
for phase_name in all_phases:
|
|
1749
|
+
phase_dir = adw_dir / phase_name
|
|
1750
|
+
|
|
1751
|
+
# Determine status
|
|
1752
|
+
if phase_name in completed:
|
|
1753
|
+
status = "completed"
|
|
1754
|
+
elif phase_name == current:
|
|
1755
|
+
status = "running"
|
|
1756
|
+
else:
|
|
1757
|
+
status = "pending"
|
|
1758
|
+
|
|
1759
|
+
# Find agents that ran in this phase (from output files) and count calls
|
|
1760
|
+
phase_agents = []
|
|
1761
|
+
total_phase_calls = 0
|
|
1762
|
+
if phase_dir.exists():
|
|
1763
|
+
for output_file in phase_dir.glob("*_output.jsonl"):
|
|
1764
|
+
agent_name = output_file.stem.replace("_output", "")
|
|
1765
|
+
# Count tool_use entries in the JSONL file
|
|
1766
|
+
call_count = 0
|
|
1767
|
+
try:
|
|
1768
|
+
with open(output_file, "r", encoding="utf-8") as f:
|
|
1769
|
+
for line in f:
|
|
1770
|
+
try:
|
|
1771
|
+
entry = json.loads(line)
|
|
1772
|
+
if entry.get("type") == "tool_use":
|
|
1773
|
+
call_count += 1
|
|
1774
|
+
except json.JSONDecodeError:
|
|
1775
|
+
pass
|
|
1776
|
+
except Exception:
|
|
1777
|
+
pass
|
|
1778
|
+
phase_agents.append({
|
|
1779
|
+
"id": agent_name,
|
|
1780
|
+
"call_count": call_count
|
|
1781
|
+
})
|
|
1782
|
+
total_phase_calls += call_count
|
|
1783
|
+
|
|
1784
|
+
phases.append({
|
|
1785
|
+
"name": phase_name,
|
|
1786
|
+
"status": status,
|
|
1787
|
+
"agents": phase_agents, # Now includes id and call_count
|
|
1788
|
+
"agent_ids": [a["id"] for a in phase_agents], # Keep for backwards compat
|
|
1789
|
+
"call_count": total_phase_calls,
|
|
1790
|
+
"duration_seconds": None # Could be computed from timestamps if needed
|
|
1791
|
+
})
|
|
1792
|
+
|
|
1793
|
+
return {
|
|
1794
|
+
"adw_id": state.get("adw_id", adw_id),
|
|
1795
|
+
"task_description": state.get("task_description", ""),
|
|
1796
|
+
"created_at": state.get("created_at"),
|
|
1797
|
+
"current_phase": current,
|
|
1798
|
+
"completed_phases": completed,
|
|
1799
|
+
"status": state.get("status", "unknown"),
|
|
1800
|
+
"phases": phases,
|
|
1801
|
+
"project_path": state.get("project_path", "")
|
|
1802
|
+
}
|
|
1803
|
+
|
|
1804
|
+
|
|
1805
|
+
@app.get("/api/adw/list")
|
|
1806
|
+
async def list_adw_runs(
|
|
1807
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1808
|
+
limit: int = Query(20, ge=1, le=100)
|
|
1809
|
+
):
|
|
1810
|
+
"""List all ADW runs from agents/ folder for the selected project."""
|
|
1811
|
+
adw_runs = scan_adw_folder(project)[:limit]
|
|
1812
|
+
return {"adw_runs": adw_runs, "count": len(adw_runs)}
|
|
1813
|
+
|
|
1814
|
+
|
|
1815
|
+
@app.get("/api/adw/{adw_id}")
|
|
1816
|
+
async def get_adw_details(
|
|
1817
|
+
adw_id: str,
|
|
1818
|
+
project: str = Query(..., description="Path to the database file"),
|
|
1819
|
+
):
|
|
1820
|
+
"""Get ADW state with agent correlation."""
|
|
1821
|
+
if not Path(project).exists():
|
|
1822
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
1823
|
+
|
|
1824
|
+
state = get_adw_state_with_agents(adw_id, project)
|
|
1825
|
+
if not state:
|
|
1826
|
+
raise HTTPException(status_code=404, detail="ADW not found")
|
|
1827
|
+
|
|
1828
|
+
return state
|
|
1829
|
+
|
|
1830
|
+
|
|
1831
|
+
# --- Health Check ---
|
|
1832
|
+
|
|
1833
|
+
|
|
1834
|
+
@app.get("/health")
|
|
1835
|
+
async def health_check():
|
|
1836
|
+
"""Health check endpoint."""
|
|
1837
|
+
return {
|
|
1838
|
+
"status": "healthy",
|
|
1839
|
+
"websocket_connections": manager.connection_count,
|
|
1840
|
+
}
|
|
1841
|
+
|
|
1842
|
+
|
|
1843
|
+
# --- Static File Serving (SPA) ---
|
|
1844
|
+
# These routes must come AFTER all API routes
|
|
1845
|
+
|
|
1846
|
+
|
|
1847
|
+
@app.get("/")
|
|
1848
|
+
async def serve_root():
|
|
1849
|
+
"""Serve the frontend index.html."""
|
|
1850
|
+
index_file = DIST_DIR / "index.html"
|
|
1851
|
+
if index_file.exists():
|
|
1852
|
+
return FileResponse(str(index_file))
|
|
1853
|
+
return {"message": "Omni-Cortex Dashboard API", "docs": "/docs"}
|
|
1854
|
+
|
|
1855
|
+
|
|
1856
|
+
@app.get("/{path:path}")
|
|
1857
|
+
async def serve_spa(path: str):
|
|
1858
|
+
"""Catch-all route to serve SPA for client-side routing with path traversal protection."""
|
|
1859
|
+
# Skip API routes and known paths
|
|
1860
|
+
if path.startswith(("api/", "ws", "health", "docs", "openapi", "redoc")):
|
|
1861
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
1862
|
+
|
|
1863
|
+
# Check if it's a static file (with path traversal protection)
|
|
1864
|
+
safe_path = PathValidator.is_safe_static_path(DIST_DIR, path)
|
|
1865
|
+
if safe_path:
|
|
1866
|
+
return FileResponse(str(safe_path))
|
|
1867
|
+
|
|
1868
|
+
# Otherwise serve index.html for SPA routing
|
|
1869
|
+
index_file = DIST_DIR / "index.html"
|
|
1870
|
+
if index_file.exists():
|
|
1871
|
+
return FileResponse(str(index_file))
|
|
1872
|
+
|
|
1873
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
1874
|
+
|
|
1875
|
+
|
|
1876
|
+
def run():
|
|
1877
|
+
"""Run the dashboard server."""
|
|
1878
|
+
uvicorn.run(
|
|
1879
|
+
"main:app",
|
|
1880
|
+
host="0.0.0.0",
|
|
1881
|
+
port=8765,
|
|
1882
|
+
reload=True,
|
|
1883
|
+
reload_dirs=[str(Path(__file__).parent)],
|
|
1884
|
+
)
|
|
1885
|
+
|
|
1886
|
+
|
|
1887
|
+
if __name__ == "__main__":
|
|
1888
|
+
run()
|