superlocalmemory 2.3.2 → 2.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/ui_server.py ADDED
@@ -0,0 +1,1480 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ SuperLocalMemory V2 - Intelligent Local Memory System
4
+ Copyright (c) 2026 Varun Pratap Bhardwaj
5
+ Licensed under MIT License
6
+
7
+ Repository: https://github.com/varun369/SuperLocalMemoryV2
8
+ Author: Varun Pratap Bhardwaj (Solution Architect)
9
+
10
+ NOTICE: This software is protected by MIT License.
11
+ Attribution must be preserved in all copies or derivatives.
12
+ """
13
+
14
+ """
15
+ SuperLocalMemory V2.2.0 - FastAPI UI Server with WebSocket Support
16
+ Comprehensive REST and WebSocket API for memory visualization and real-time updates.
17
+
18
+ Features:
19
+ - Full REST API for memory CRUD operations
20
+ - WebSocket support for real-time memory updates
21
+ - Profile management and switching
22
+ - Import/Export functionality
23
+ - Advanced search with filters
24
+ - Cluster detail views
25
+ - Timeline aggregation (day/week/month)
26
+ - CORS enabled for cross-origin requests
27
+ - Response compression
28
+ - Comprehensive error handling
29
+ """
30
+
31
+ import sqlite3
32
+ import json
33
+ import asyncio
34
+ import gzip
35
+ import io
36
+ from pathlib import Path
37
+ from typing import Optional, List, Dict, Any, Set
38
+ from datetime import datetime, timedelta
39
+ from collections import defaultdict
40
+
41
+ try:
42
+ from fastapi import FastAPI, HTTPException, Query, WebSocket, WebSocketDisconnect, UploadFile, File
43
+ from fastapi.staticfiles import StaticFiles
44
+ from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse
45
+ from fastapi.middleware.cors import CORSMiddleware
46
+ from fastapi.middleware.gzip import GZipMiddleware
47
+ from pydantic import BaseModel, Field, validator
48
+ import uvicorn
49
+ FASTAPI_AVAILABLE = True
50
+ except ImportError:
51
+ FASTAPI_AVAILABLE = False
52
+ raise ImportError(
53
+ "FastAPI dependencies not installed. "
54
+ "Install with: pip install 'fastapi[all]' uvicorn websockets"
55
+ )
56
+
57
+ # Import local modules
58
+ import sys
59
+ sys.path.insert(0, str(Path(__file__).parent / "src"))
60
+
61
+ from memory_store_v2 import MemoryStoreV2
62
+ from graph_engine import GraphEngine
63
+ from pattern_learner import PatternLearner
64
+
65
+ # Configuration
66
+ MEMORY_DIR = Path.home() / ".claude-memory"
67
+ DB_PATH = MEMORY_DIR / "memory.db"
68
+ UI_DIR = Path(__file__).parent / "ui"
69
+ PROFILES_DIR = MEMORY_DIR / "profiles"
70
+
71
+ # Initialize FastAPI application
72
+ app = FastAPI(
73
+ title="SuperLocalMemory V2.2.0 UI Server",
74
+ description="Knowledge Graph Visualization with Real-Time Updates",
75
+ version="2.2.0",
76
+ docs_url="/api/docs",
77
+ redoc_url="/api/redoc"
78
+ )
79
+
80
+ # Add CORS middleware (for web UI development)
81
+ app.add_middleware(
82
+ CORSMiddleware,
83
+ allow_origins=["http://localhost:*", "http://127.0.0.1:*"],
84
+ allow_credentials=True,
85
+ allow_methods=["*"],
86
+ allow_headers=["*"],
87
+ )
88
+
89
+ # Add GZip compression middleware
90
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
91
+
92
+ # WebSocket connection manager
93
+ class ConnectionManager:
94
+ """Manages WebSocket connections for real-time updates."""
95
+
96
+ def __init__(self):
97
+ self.active_connections: Set[WebSocket] = set()
98
+
99
+ async def connect(self, websocket: WebSocket):
100
+ """Accept and register a WebSocket connection."""
101
+ await websocket.accept()
102
+ self.active_connections.add(websocket)
103
+
104
+ def disconnect(self, websocket: WebSocket):
105
+ """Remove a WebSocket connection."""
106
+ self.active_connections.discard(websocket)
107
+
108
+ async def broadcast(self, message: dict):
109
+ """Broadcast message to all connected clients."""
110
+ disconnected = set()
111
+ for connection in self.active_connections:
112
+ try:
113
+ await connection.send_json(message)
114
+ except Exception:
115
+ disconnected.add(connection)
116
+
117
+ # Clean up disconnected clients
118
+ self.active_connections -= disconnected
119
+
120
+ manager = ConnectionManager()
121
+
122
+ # Mount static files (UI directory)
123
+ UI_DIR.mkdir(exist_ok=True)
124
+ app.mount("/static", StaticFiles(directory=str(UI_DIR)), name="static")
125
+
126
+
127
+ # ============================================================================
128
+ # Request/Response Models
129
+ # ============================================================================
130
+
131
+ class SearchRequest(BaseModel):
132
+ """Advanced search request model."""
133
+ query: str = Field(..., min_length=1, max_length=1000)
134
+ limit: int = Field(default=10, ge=1, le=100)
135
+ min_score: float = Field(default=0.3, ge=0.0, le=1.0)
136
+ category: Optional[str] = None
137
+ project_name: Optional[str] = None
138
+ cluster_id: Optional[int] = None
139
+ date_from: Optional[str] = None # ISO format: YYYY-MM-DD
140
+ date_to: Optional[str] = None
141
+
142
+ class MemoryFilter(BaseModel):
143
+ """Memory filtering options."""
144
+ category: Optional[str] = None
145
+ project_name: Optional[str] = None
146
+ cluster_id: Optional[int] = None
147
+ min_importance: Optional[int] = Field(None, ge=1, le=10)
148
+ tags: Optional[List[str]] = None
149
+
150
+ class ProfileSwitch(BaseModel):
151
+ """Profile switching request."""
152
+ profile_name: str = Field(..., min_length=1, max_length=50)
153
+
154
+ class TimelineParams(BaseModel):
155
+ """Timeline aggregation parameters."""
156
+ days: int = Field(default=30, ge=1, le=365)
157
+ group_by: str = Field(default="day", pattern="^(day|week|month)$")
158
+
159
+
160
+ # ============================================================================
161
+ # Database Helper Functions
162
+ # ============================================================================
163
+
164
+ def get_db_connection():
165
+ """Get database connection with attribution header."""
166
+ if not DB_PATH.exists():
167
+ raise HTTPException(
168
+ status_code=500,
169
+ detail="Memory database not found. Run 'memory-init' to initialize."
170
+ )
171
+ return sqlite3.connect(DB_PATH)
172
+
173
+ def dict_factory(cursor, row):
174
+ """Convert SQLite row to dictionary."""
175
+ fields = [column[0] for column in cursor.description]
176
+ return {key: value for key, value in zip(fields, row)}
177
+
178
+ def validate_profile_name(name: str) -> bool:
179
+ """Validate profile name (alphanumeric, underscore, hyphen only)."""
180
+ import re
181
+ return bool(re.match(r'^[a-zA-Z0-9_-]+$', name))
182
+
183
+
184
+ # ============================================================================
185
+ # API Endpoints - Basic Routes
186
+ # ============================================================================
187
+
188
+ @app.get("/", response_class=HTMLResponse)
189
+ async def root():
190
+ """Serve main UI page."""
191
+ index_path = UI_DIR / "index.html"
192
+ if not index_path.exists():
193
+ return """
194
+ <!DOCTYPE html>
195
+ <html>
196
+ <head>
197
+ <title>SuperLocalMemory V2.2.0</title>
198
+ <meta charset="utf-8">
199
+ <style>
200
+ body {
201
+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Arial, sans-serif;
202
+ padding: 40px;
203
+ max-width: 1200px;
204
+ margin: 0 auto;
205
+ background: #f5f5f5;
206
+ }
207
+ .header {
208
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
209
+ color: white;
210
+ padding: 30px;
211
+ border-radius: 8px;
212
+ margin-bottom: 30px;
213
+ }
214
+ h1 { margin: 0; font-size: 2em; }
215
+ h2 { color: #333; margin-top: 30px; }
216
+ ul { line-height: 1.8; }
217
+ a { color: #667eea; text-decoration: none; }
218
+ a:hover { text-decoration: underline; }
219
+ .endpoint {
220
+ background: white;
221
+ padding: 10px 15px;
222
+ margin: 5px 0;
223
+ border-radius: 4px;
224
+ border-left: 3px solid #667eea;
225
+ }
226
+ .badge {
227
+ display: inline-block;
228
+ padding: 3px 8px;
229
+ background: #667eea;
230
+ color: white;
231
+ border-radius: 3px;
232
+ font-size: 0.8em;
233
+ margin-left: 10px;
234
+ }
235
+ footer {
236
+ margin-top: 50px;
237
+ padding-top: 20px;
238
+ border-top: 2px solid #ddd;
239
+ color: #666;
240
+ text-align: center;
241
+ }
242
+ </style>
243
+ </head>
244
+ <body>
245
+ <div class="header">
246
+ <h1>SuperLocalMemory V2.2.0 UI Server</h1>
247
+ <p>FastAPI Backend with WebSocket Support</p>
248
+ </div>
249
+
250
+ <h2>Available Endpoints</h2>
251
+
252
+ <div class="endpoint">
253
+ <a href="/api/docs">/api/docs</a>
254
+ <span class="badge">Interactive</span>
255
+ <p>Swagger UI - Interactive API Documentation</p>
256
+ </div>
257
+
258
+ <div class="endpoint">
259
+ <a href="/api/stats">/api/stats</a>
260
+ <span class="badge">GET</span>
261
+ <p>System statistics and overview</p>
262
+ </div>
263
+
264
+ <div class="endpoint">
265
+ <a href="/api/memories">/api/memories</a>
266
+ <span class="badge">GET</span>
267
+ <p>List and filter memories</p>
268
+ </div>
269
+
270
+ <div class="endpoint">
271
+ <a href="/api/graph">/api/graph</a>
272
+ <span class="badge">GET</span>
273
+ <p>Knowledge graph data for visualization</p>
274
+ </div>
275
+
276
+ <div class="endpoint">
277
+ <a href="/api/timeline">/api/timeline</a>
278
+ <span class="badge">GET</span>
279
+ <p>Timeline view with day/week/month aggregation</p>
280
+ </div>
281
+
282
+ <div class="endpoint">
283
+ <a href="/api/patterns">/api/patterns</a>
284
+ <span class="badge">GET</span>
285
+ <p>Learned patterns and preferences</p>
286
+ </div>
287
+
288
+ <div class="endpoint">
289
+ <a href="/api/clusters">/api/clusters</a>
290
+ <span class="badge">GET</span>
291
+ <p>Cluster information and themes</p>
292
+ </div>
293
+
294
+ <div class="endpoint">
295
+ /api/clusters/{id} <span class="badge">GET</span>
296
+ <p>Detailed cluster view with members</p>
297
+ </div>
298
+
299
+ <div class="endpoint">
300
+ /api/search <span class="badge">POST</span>
301
+ <p>Advanced semantic search</p>
302
+ </div>
303
+
304
+ <div class="endpoint">
305
+ <a href="/api/profiles">/api/profiles</a>
306
+ <span class="badge">GET</span>
307
+ <p>List available memory profiles</p>
308
+ </div>
309
+
310
+ <div class="endpoint">
311
+ /api/profiles/{name}/switch <span class="badge">POST</span>
312
+ <p>Switch active memory profile</p>
313
+ </div>
314
+
315
+ <div class="endpoint">
316
+ <a href="/api/export">/api/export</a>
317
+ <span class="badge">GET</span>
318
+ <p>Export memories as JSON</p>
319
+ </div>
320
+
321
+ <div class="endpoint">
322
+ /api/import <span class="badge">POST</span>
323
+ <p>Import memories from JSON file</p>
324
+ </div>
325
+
326
+ <div class="endpoint">
327
+ /ws/updates <span class="badge">WebSocket</span>
328
+ <p>Real-time memory updates stream</p>
329
+ </div>
330
+
331
+ <footer>
332
+ <p><strong>SuperLocalMemory V2.2.0</strong></p>
333
+ <p>Copyright (c) 2026 Varun Pratap Bhardwaj</p>
334
+ <p>Licensed under MIT License</p>
335
+ </footer>
336
+ </body>
337
+ </html>
338
+ """
339
+ return index_path.read_text()
340
+
341
+
342
+ @app.get("/health")
343
+ async def health_check():
344
+ """Health check endpoint."""
345
+ return {
346
+ "status": "healthy",
347
+ "version": "2.2.0",
348
+ "database": "connected" if DB_PATH.exists() else "missing",
349
+ "timestamp": datetime.now().isoformat()
350
+ }
351
+
352
+
353
+ # ============================================================================
354
+ # API Endpoints - Memory Management
355
+ # ============================================================================
356
+
357
+ @app.get("/api/memories")
358
+ async def get_memories(
359
+ category: Optional[str] = None,
360
+ project_name: Optional[str] = None,
361
+ cluster_id: Optional[int] = None,
362
+ min_importance: Optional[int] = None,
363
+ tags: Optional[str] = None, # Comma-separated
364
+ limit: int = Query(50, ge=1, le=200),
365
+ offset: int = Query(0, ge=0)
366
+ ):
367
+ """
368
+ List memories with optional filtering and pagination.
369
+
370
+ Query Parameters:
371
+ - category: Filter by category
372
+ - project_name: Filter by project
373
+ - cluster_id: Filter by cluster
374
+ - min_importance: Minimum importance score (1-10)
375
+ - tags: Comma-separated tag list
376
+ - limit: Maximum results (default 50, max 200)
377
+ - offset: Pagination offset
378
+
379
+ Returns:
380
+ - memories: List of memory objects
381
+ - total: Total count matching filters
382
+ - limit: Applied limit
383
+ - offset: Applied offset
384
+ """
385
+ try:
386
+ conn = get_db_connection()
387
+ conn.row_factory = dict_factory
388
+ cursor = conn.cursor()
389
+
390
+ # Build dynamic query
391
+ query = """
392
+ SELECT
393
+ id, content, summary, category, project_name, project_path,
394
+ importance, cluster_id, depth, access_count, parent_id,
395
+ created_at, updated_at, last_accessed, tags, memory_type
396
+ FROM memories
397
+ WHERE 1=1
398
+ """
399
+ params = []
400
+
401
+ if category:
402
+ query += " AND category = ?"
403
+ params.append(category)
404
+
405
+ if project_name:
406
+ query += " AND project_name = ?"
407
+ params.append(project_name)
408
+
409
+ if cluster_id is not None:
410
+ query += " AND cluster_id = ?"
411
+ params.append(cluster_id)
412
+
413
+ if min_importance:
414
+ query += " AND importance >= ?"
415
+ params.append(min_importance)
416
+
417
+ if tags:
418
+ tag_list = [t.strip() for t in tags.split(',')]
419
+ for tag in tag_list:
420
+ query += " AND tags LIKE ?"
421
+ params.append(f'%{tag}%')
422
+
423
+ query += " ORDER BY updated_at DESC LIMIT ? OFFSET ?"
424
+ params.extend([limit, offset])
425
+
426
+ cursor.execute(query, params)
427
+ memories = cursor.fetchall()
428
+
429
+ # Get total count
430
+ count_query = "SELECT COUNT(*) as total FROM memories WHERE 1=1"
431
+ count_params = []
432
+
433
+ if category:
434
+ count_query += " AND category = ?"
435
+ count_params.append(category)
436
+ if project_name:
437
+ count_query += " AND project_name = ?"
438
+ count_params.append(project_name)
439
+ if cluster_id is not None:
440
+ count_query += " AND cluster_id = ?"
441
+ count_params.append(cluster_id)
442
+ if min_importance:
443
+ count_query += " AND importance >= ?"
444
+ count_params.append(min_importance)
445
+
446
+ cursor.execute(count_query, count_params)
447
+ total = cursor.fetchone()['total']
448
+
449
+ conn.close()
450
+
451
+ return {
452
+ "memories": memories,
453
+ "total": total,
454
+ "limit": limit,
455
+ "offset": offset,
456
+ "has_more": (offset + limit) < total
457
+ }
458
+
459
+ except Exception as e:
460
+ raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
461
+
462
+
463
+ @app.get("/api/graph")
464
+ async def get_graph(
465
+ max_nodes: int = Query(100, ge=10, le=500),
466
+ min_importance: int = Query(1, ge=1, le=10)
467
+ ):
468
+ """
469
+ Get knowledge graph data for D3.js force-directed visualization.
470
+
471
+ Parameters:
472
+ - max_nodes: Maximum nodes to return (default 100, max 500)
473
+ - min_importance: Minimum importance filter (default 1)
474
+
475
+ Returns:
476
+ - nodes: List of memory nodes with metadata
477
+ - links: List of edges between memories
478
+ - clusters: Cluster information
479
+ - metadata: Graph statistics
480
+ """
481
+ try:
482
+ conn = get_db_connection()
483
+ conn.row_factory = dict_factory
484
+ cursor = conn.cursor()
485
+
486
+ # Get nodes (memories with graph data)
487
+ cursor.execute("""
488
+ SELECT
489
+ m.id, m.content, m.summary, m.category,
490
+ m.cluster_id, m.importance, m.project_name,
491
+ m.created_at, m.tags,
492
+ gn.entities
493
+ FROM memories m
494
+ LEFT JOIN graph_nodes gn ON m.id = gn.memory_id
495
+ WHERE m.importance >= ?
496
+ ORDER BY m.importance DESC, m.updated_at DESC
497
+ LIMIT ?
498
+ """, (min_importance, max_nodes))
499
+ nodes = cursor.fetchall()
500
+
501
+ # Parse entities JSON and create previews
502
+ for node in nodes:
503
+ if node['entities']:
504
+ try:
505
+ node['entities'] = json.loads(node['entities'])
506
+ except:
507
+ node['entities'] = []
508
+ else:
509
+ node['entities'] = []
510
+
511
+ # Create content preview
512
+ if node['content']:
513
+ node['content_preview'] = (
514
+ node['content'][:100] + "..."
515
+ if len(node['content']) > 100
516
+ else node['content']
517
+ )
518
+
519
+ # Get edges
520
+ memory_ids = [n['id'] for n in nodes]
521
+ if memory_ids:
522
+ placeholders = ','.join('?' * len(memory_ids))
523
+ cursor.execute(f"""
524
+ SELECT
525
+ source_memory_id as source,
526
+ target_memory_id as target,
527
+ weight,
528
+ relationship_type,
529
+ shared_entities
530
+ FROM graph_edges
531
+ WHERE source_memory_id IN ({placeholders})
532
+ AND target_memory_id IN ({placeholders})
533
+ ORDER BY weight DESC
534
+ """, memory_ids + memory_ids)
535
+ links = cursor.fetchall()
536
+
537
+ # Parse shared entities
538
+ for link in links:
539
+ if link['shared_entities']:
540
+ try:
541
+ link['shared_entities'] = json.loads(link['shared_entities'])
542
+ except:
543
+ link['shared_entities'] = []
544
+ else:
545
+ links = []
546
+
547
+ # Get cluster information
548
+ cursor.execute("""
549
+ SELECT
550
+ cluster_id,
551
+ COUNT(*) as size,
552
+ AVG(importance) as avg_importance
553
+ FROM memories
554
+ WHERE cluster_id IS NOT NULL
555
+ GROUP BY cluster_id
556
+ """)
557
+ clusters = cursor.fetchall()
558
+
559
+ conn.close()
560
+
561
+ return {
562
+ "nodes": nodes,
563
+ "links": links,
564
+ "clusters": clusters,
565
+ "metadata": {
566
+ "node_count": len(nodes),
567
+ "edge_count": len(links),
568
+ "cluster_count": len(clusters),
569
+ "filters_applied": {
570
+ "max_nodes": max_nodes,
571
+ "min_importance": min_importance
572
+ }
573
+ }
574
+ }
575
+
576
+ except Exception as e:
577
+ raise HTTPException(status_code=500, detail=f"Graph error: {str(e)}")
578
+
579
+
580
+ @app.get("/api/timeline")
581
+ async def get_timeline(
582
+ days: int = Query(30, ge=1, le=365),
583
+ group_by: str = Query("day", pattern="^(day|week|month)$")
584
+ ):
585
+ """
586
+ Get temporal view of memory creation with flexible grouping.
587
+
588
+ Parameters:
589
+ - days: Number of days to look back (default 30, max 365)
590
+ - group_by: Aggregation period ('day', 'week', 'month')
591
+
592
+ Returns:
593
+ - timeline: Aggregated memory counts by period
594
+ - category_trend: Category breakdown over time
595
+ - period_stats: Statistics for the period
596
+ """
597
+ try:
598
+ conn = get_db_connection()
599
+ conn.row_factory = dict_factory
600
+ cursor = conn.cursor()
601
+
602
+ # Determine date grouping SQL
603
+ if group_by == "day":
604
+ date_group = "DATE(created_at)"
605
+ elif group_by == "week":
606
+ date_group = "strftime('%Y-W%W', created_at)"
607
+ else: # month
608
+ date_group = "strftime('%Y-%m', created_at)"
609
+
610
+ # Timeline aggregates
611
+ cursor.execute(f"""
612
+ SELECT
613
+ {date_group} as period,
614
+ COUNT(*) as count,
615
+ AVG(importance) as avg_importance,
616
+ MIN(importance) as min_importance,
617
+ MAX(importance) as max_importance,
618
+ GROUP_CONCAT(DISTINCT category) as categories
619
+ FROM memories
620
+ WHERE created_at >= datetime('now', '-' || ? || ' days')
621
+ GROUP BY {date_group}
622
+ ORDER BY period DESC
623
+ """, (days,))
624
+ timeline = cursor.fetchall()
625
+
626
+ # Category trend over time
627
+ cursor.execute(f"""
628
+ SELECT
629
+ {date_group} as period,
630
+ category,
631
+ COUNT(*) as count
632
+ FROM memories
633
+ WHERE created_at >= datetime('now', '-' || ? || ' days')
634
+ AND category IS NOT NULL
635
+ GROUP BY {date_group}, category
636
+ ORDER BY period DESC, count DESC
637
+ """, (days,))
638
+ category_trend = cursor.fetchall()
639
+
640
+ # Period statistics
641
+ cursor.execute("""
642
+ SELECT
643
+ COUNT(*) as total_memories,
644
+ COUNT(DISTINCT category) as categories_used,
645
+ COUNT(DISTINCT project_name) as projects_active,
646
+ AVG(importance) as avg_importance
647
+ FROM memories
648
+ WHERE created_at >= datetime('now', '-' || ? || ' days')
649
+ """, (days,))
650
+ period_stats = cursor.fetchone()
651
+
652
+ conn.close()
653
+
654
+ return {
655
+ "timeline": timeline,
656
+ "category_trend": category_trend,
657
+ "period_stats": period_stats,
658
+ "parameters": {
659
+ "days": days,
660
+ "group_by": group_by
661
+ }
662
+ }
663
+
664
+ except Exception as e:
665
+ raise HTTPException(status_code=500, detail=f"Timeline error: {str(e)}")
666
+
667
+
668
+ @app.get("/api/clusters")
669
+ async def get_clusters():
670
+ """
671
+ Get cluster information with member counts, themes, and statistics.
672
+
673
+ Returns:
674
+ - clusters: List of clusters with metadata
675
+ - total_clusters: Total number of clusters
676
+ - unclustered_count: Memories without cluster assignment
677
+ """
678
+ try:
679
+ conn = get_db_connection()
680
+ conn.row_factory = dict_factory
681
+ cursor = conn.cursor()
682
+
683
+ # Get cluster statistics
684
+ cursor.execute("""
685
+ SELECT
686
+ cluster_id,
687
+ COUNT(*) as member_count,
688
+ AVG(importance) as avg_importance,
689
+ MIN(importance) as min_importance,
690
+ MAX(importance) as max_importance,
691
+ GROUP_CONCAT(DISTINCT category) as categories,
692
+ GROUP_CONCAT(DISTINCT project_name) as projects,
693
+ MIN(created_at) as first_memory,
694
+ MAX(created_at) as latest_memory
695
+ FROM memories
696
+ WHERE cluster_id IS NOT NULL
697
+ GROUP BY cluster_id
698
+ ORDER BY member_count DESC
699
+ """)
700
+ clusters = cursor.fetchall()
701
+
702
+ # Get dominant entities per cluster
703
+ for cluster in clusters:
704
+ cluster_id = cluster['cluster_id']
705
+
706
+ # Aggregate entities from all members
707
+ cursor.execute("""
708
+ SELECT gn.entities
709
+ FROM graph_nodes gn
710
+ JOIN memories m ON gn.memory_id = m.id
711
+ WHERE m.cluster_id = ?
712
+ """, (cluster_id,))
713
+
714
+ all_entities = []
715
+ for row in cursor.fetchall():
716
+ if row['entities']:
717
+ try:
718
+ entities = json.loads(row['entities'])
719
+ all_entities.extend(entities)
720
+ except:
721
+ pass
722
+
723
+ # Count and get top entities
724
+ from collections import Counter
725
+ entity_counts = Counter(all_entities)
726
+ cluster['top_entities'] = [
727
+ {"entity": e, "count": c}
728
+ for e, c in entity_counts.most_common(10)
729
+ ]
730
+
731
+ # Get unclustered count
732
+ cursor.execute("""
733
+ SELECT COUNT(*) as count
734
+ FROM memories
735
+ WHERE cluster_id IS NULL
736
+ """)
737
+ unclustered = cursor.fetchone()['count']
738
+
739
+ conn.close()
740
+
741
+ return {
742
+ "clusters": clusters,
743
+ "total_clusters": len(clusters),
744
+ "unclustered_count": unclustered
745
+ }
746
+
747
+ except Exception as e:
748
+ raise HTTPException(status_code=500, detail=f"Cluster error: {str(e)}")
749
+
750
+
751
+ @app.get("/api/clusters/{cluster_id}")
752
+ async def get_cluster_detail(
753
+ cluster_id: int,
754
+ limit: int = Query(50, ge=1, le=200)
755
+ ):
756
+ """
757
+ Get detailed view of a specific cluster.
758
+
759
+ Parameters:
760
+ - cluster_id: Cluster ID to retrieve
761
+ - limit: Maximum members to return
762
+
763
+ Returns:
764
+ - cluster_info: Cluster metadata and statistics
765
+ - members: List of memories in the cluster
766
+ - connections: Internal edges within cluster
767
+ """
768
+ try:
769
+ conn = get_db_connection()
770
+ conn.row_factory = dict_factory
771
+ cursor = conn.cursor()
772
+
773
+ # Get cluster members
774
+ cursor.execute("""
775
+ SELECT
776
+ m.id, m.content, m.summary, m.category,
777
+ m.project_name, m.importance, m.created_at,
778
+ m.tags, gn.entities
779
+ FROM memories m
780
+ LEFT JOIN graph_nodes gn ON m.id = gn.memory_id
781
+ WHERE m.cluster_id = ?
782
+ ORDER BY m.importance DESC, m.created_at DESC
783
+ LIMIT ?
784
+ """, (cluster_id, limit))
785
+ members = cursor.fetchall()
786
+
787
+ if not members:
788
+ raise HTTPException(status_code=404, detail="Cluster not found")
789
+
790
+ # Parse entities
791
+ for member in members:
792
+ if member['entities']:
793
+ try:
794
+ member['entities'] = json.loads(member['entities'])
795
+ except:
796
+ member['entities'] = []
797
+
798
+ # Get cluster statistics
799
+ cursor.execute("""
800
+ SELECT
801
+ COUNT(*) as total_members,
802
+ AVG(importance) as avg_importance,
803
+ COUNT(DISTINCT category) as category_count,
804
+ COUNT(DISTINCT project_name) as project_count
805
+ FROM memories
806
+ WHERE cluster_id = ?
807
+ """, (cluster_id,))
808
+ stats = cursor.fetchone()
809
+
810
+ # Get internal connections
811
+ member_ids = [m['id'] for m in members]
812
+ if member_ids:
813
+ placeholders = ','.join('?' * len(member_ids))
814
+ cursor.execute(f"""
815
+ SELECT
816
+ source_memory_id as source,
817
+ target_memory_id as target,
818
+ weight,
819
+ shared_entities
820
+ FROM graph_edges
821
+ WHERE source_memory_id IN ({placeholders})
822
+ AND target_memory_id IN ({placeholders})
823
+ """, member_ids + member_ids)
824
+ connections = cursor.fetchall()
825
+ else:
826
+ connections = []
827
+
828
+ conn.close()
829
+
830
+ return {
831
+ "cluster_info": {
832
+ "cluster_id": cluster_id,
833
+ **stats
834
+ },
835
+ "members": members,
836
+ "connections": connections
837
+ }
838
+
839
+ except HTTPException:
840
+ raise
841
+ except Exception as e:
842
+ raise HTTPException(status_code=500, detail=f"Cluster detail error: {str(e)}")
843
+
844
+
845
+ @app.get("/api/patterns")
846
+ async def get_patterns():
847
+ """
848
+ Get learned patterns from Pattern Learner (Layer 4).
849
+
850
+ Returns:
851
+ - patterns: Grouped patterns by type
852
+ - total_patterns: Total pattern count
853
+ - pattern_types: List of pattern types found
854
+ - confidence_stats: Confidence distribution
855
+ """
856
+ try:
857
+ conn = get_db_connection()
858
+ conn.row_factory = dict_factory
859
+ cursor = conn.cursor()
860
+
861
+ # Check if identity_patterns table exists
862
+ cursor.execute("""
863
+ SELECT name FROM sqlite_master
864
+ WHERE type='table' AND name='identity_patterns'
865
+ """)
866
+
867
+ if not cursor.fetchone():
868
+ return {
869
+ "patterns": {},
870
+ "total_patterns": 0,
871
+ "pattern_types": [],
872
+ "message": "Pattern learning not initialized. Run pattern learning first."
873
+ }
874
+
875
+ cursor.execute("""
876
+ SELECT
877
+ pattern_type, key, value, confidence,
878
+ evidence_count, updated_at as last_updated
879
+ FROM identity_patterns
880
+ ORDER BY confidence DESC, evidence_count DESC
881
+ """)
882
+ patterns = cursor.fetchall()
883
+
884
+ # Parse value JSON
885
+ for pattern in patterns:
886
+ if pattern['value']:
887
+ try:
888
+ pattern['value'] = json.loads(pattern['value'])
889
+ except:
890
+ pass
891
+
892
+ # Group by type
893
+ grouped = defaultdict(list)
894
+ for pattern in patterns:
895
+ grouped[pattern['pattern_type']].append(pattern)
896
+
897
+ # Confidence statistics
898
+ confidences = [p['confidence'] for p in patterns]
899
+ confidence_stats = {
900
+ "avg": sum(confidences) / len(confidences) if confidences else 0,
901
+ "min": min(confidences) if confidences else 0,
902
+ "max": max(confidences) if confidences else 0
903
+ }
904
+
905
+ conn.close()
906
+
907
+ return {
908
+ "patterns": dict(grouped),
909
+ "total_patterns": len(patterns),
910
+ "pattern_types": list(grouped.keys()),
911
+ "confidence_stats": confidence_stats
912
+ }
913
+
914
+ except Exception as e:
915
+ return {
916
+ "patterns": {},
917
+ "total_patterns": 0,
918
+ "error": str(e)
919
+ }
920
+
921
+
922
+ @app.get("/api/stats")
923
+ async def get_stats():
924
+ """
925
+ Get comprehensive system statistics.
926
+
927
+ Returns:
928
+ - overview: Basic counts and metrics
929
+ - categories: Category breakdown
930
+ - projects: Project breakdown
931
+ - importance_distribution: Importance score distribution
932
+ - recent_activity: Recent memory statistics
933
+ - graph_stats: Graph-specific metrics
934
+ """
935
+ try:
936
+ conn = get_db_connection()
937
+ conn.row_factory = dict_factory
938
+ cursor = conn.cursor()
939
+
940
+ # Basic counts
941
+ cursor.execute("SELECT COUNT(*) as total FROM memories")
942
+ total_memories = cursor.fetchone()['total']
943
+
944
+ cursor.execute("SELECT COUNT(*) as total FROM sessions")
945
+ total_sessions = cursor.fetchone()['total']
946
+
947
+ cursor.execute("SELECT COUNT(DISTINCT cluster_id) as total FROM memories WHERE cluster_id IS NOT NULL")
948
+ total_clusters = cursor.fetchone()['total']
949
+
950
+ cursor.execute("SELECT COUNT(*) as total FROM graph_nodes")
951
+ total_graph_nodes = cursor.fetchone()['total']
952
+
953
+ cursor.execute("SELECT COUNT(*) as total FROM graph_edges")
954
+ total_graph_edges = cursor.fetchone()['total']
955
+
956
+ # Category breakdown
957
+ cursor.execute("""
958
+ SELECT category, COUNT(*) as count
959
+ FROM memories
960
+ WHERE category IS NOT NULL
961
+ GROUP BY category
962
+ ORDER BY count DESC
963
+ LIMIT 10
964
+ """)
965
+ categories = cursor.fetchall()
966
+
967
+ # Project breakdown
968
+ cursor.execute("""
969
+ SELECT project_name, COUNT(*) as count
970
+ FROM memories
971
+ WHERE project_name IS NOT NULL
972
+ GROUP BY project_name
973
+ ORDER BY count DESC
974
+ LIMIT 10
975
+ """)
976
+ projects = cursor.fetchall()
977
+
978
+ # Recent activity (last 7 days)
979
+ cursor.execute("""
980
+ SELECT COUNT(*) as count
981
+ FROM memories
982
+ WHERE created_at >= datetime('now', '-7 days')
983
+ """)
984
+ recent_memories = cursor.fetchone()['count']
985
+
986
+ # Importance distribution
987
+ cursor.execute("""
988
+ SELECT importance, COUNT(*) as count
989
+ FROM memories
990
+ GROUP BY importance
991
+ ORDER BY importance DESC
992
+ """)
993
+ importance_dist = cursor.fetchall()
994
+
995
+ # Database size
996
+ db_size = DB_PATH.stat().st_size if DB_PATH.exists() else 0
997
+
998
+ # Graph density (edges / potential edges)
999
+ if total_graph_nodes > 1:
1000
+ max_edges = (total_graph_nodes * (total_graph_nodes - 1)) / 2
1001
+ density = total_graph_edges / max_edges if max_edges > 0 else 0
1002
+ else:
1003
+ density = 0
1004
+
1005
+ conn.close()
1006
+
1007
+ return {
1008
+ "overview": {
1009
+ "total_memories": total_memories,
1010
+ "total_sessions": total_sessions,
1011
+ "total_clusters": total_clusters,
1012
+ "graph_nodes": total_graph_nodes,
1013
+ "graph_edges": total_graph_edges,
1014
+ "db_size_mb": round(db_size / (1024 * 1024), 2),
1015
+ "recent_memories_7d": recent_memories
1016
+ },
1017
+ "categories": categories,
1018
+ "projects": projects,
1019
+ "importance_distribution": importance_dist,
1020
+ "graph_stats": {
1021
+ "density": round(density, 4),
1022
+ "avg_degree": round(2 * total_graph_edges / total_graph_nodes, 2) if total_graph_nodes > 0 else 0
1023
+ }
1024
+ }
1025
+
1026
+ except Exception as e:
1027
+ raise HTTPException(status_code=500, detail=f"Stats error: {str(e)}")
1028
+
1029
+
1030
+ @app.post("/api/search")
1031
+ async def search_memories(request: SearchRequest):
1032
+ """
1033
+ Advanced semantic search with filters.
1034
+
1035
+ Request body:
1036
+ - query: Search query (required)
1037
+ - limit: Max results (default 10, max 100)
1038
+ - min_score: Minimum similarity score (default 0.3)
1039
+ - category: Optional category filter
1040
+ - project_name: Optional project filter
1041
+ - cluster_id: Optional cluster filter
1042
+ - date_from: Optional start date (YYYY-MM-DD)
1043
+ - date_to: Optional end date (YYYY-MM-DD)
1044
+
1045
+ Returns:
1046
+ - results: Matching memories with scores
1047
+ - query: Original query
1048
+ - total: Result count
1049
+ - filters_applied: Applied filters
1050
+ """
1051
+ try:
1052
+ store = MemoryStoreV2(DB_PATH)
1053
+ results = store.search(
1054
+ query=request.query,
1055
+ limit=request.limit * 2 # Get more, then filter
1056
+ )
1057
+
1058
+ # Apply additional filters
1059
+ filtered = []
1060
+ for result in results:
1061
+ # Score filter
1062
+ if result.get('score', 0) < request.min_score:
1063
+ continue
1064
+
1065
+ # Category filter
1066
+ if request.category and result.get('category') != request.category:
1067
+ continue
1068
+
1069
+ # Project filter
1070
+ if request.project_name and result.get('project_name') != request.project_name:
1071
+ continue
1072
+
1073
+ # Cluster filter
1074
+ if request.cluster_id is not None and result.get('cluster_id') != request.cluster_id:
1075
+ continue
1076
+
1077
+ # Date filters
1078
+ if request.date_from:
1079
+ created = result.get('created_at', '')
1080
+ if created < request.date_from:
1081
+ continue
1082
+
1083
+ if request.date_to:
1084
+ created = result.get('created_at', '')
1085
+ if created > request.date_to:
1086
+ continue
1087
+
1088
+ filtered.append(result)
1089
+
1090
+ if len(filtered) >= request.limit:
1091
+ break
1092
+
1093
+ return {
1094
+ "query": request.query,
1095
+ "results": filtered,
1096
+ "total": len(filtered),
1097
+ "filters_applied": {
1098
+ "category": request.category,
1099
+ "project_name": request.project_name,
1100
+ "cluster_id": request.cluster_id,
1101
+ "date_from": request.date_from,
1102
+ "date_to": request.date_to,
1103
+ "min_score": request.min_score
1104
+ }
1105
+ }
1106
+
1107
+ except Exception as e:
1108
+ raise HTTPException(status_code=500, detail=f"Search error: {str(e)}")
1109
+
1110
+
1111
+ # ============================================================================
1112
+ # API Endpoints - Profile Management
1113
+ # ============================================================================
1114
+
1115
+ @app.get("/api/profiles")
1116
+ async def list_profiles():
1117
+ """
1118
+ List available memory profiles.
1119
+
1120
+ Returns:
1121
+ - profiles: List of profile names
1122
+ - active_profile: Currently active profile
1123
+ - total_profiles: Profile count
1124
+ """
1125
+ try:
1126
+ PROFILES_DIR.mkdir(exist_ok=True)
1127
+
1128
+ profiles = []
1129
+ for profile_dir in PROFILES_DIR.iterdir():
1130
+ if profile_dir.is_dir():
1131
+ db_file = profile_dir / "memory.db"
1132
+ if db_file.exists():
1133
+ profiles.append({
1134
+ "name": profile_dir.name,
1135
+ "path": str(profile_dir),
1136
+ "size_mb": round(db_file.stat().st_size / (1024 * 1024), 2),
1137
+ "modified": datetime.fromtimestamp(db_file.stat().st_mtime).isoformat()
1138
+ })
1139
+
1140
+ # Determine active profile (default is main)
1141
+ active = "default"
1142
+
1143
+ return {
1144
+ "profiles": profiles,
1145
+ "active_profile": active,
1146
+ "total_profiles": len(profiles)
1147
+ }
1148
+
1149
+ except Exception as e:
1150
+ raise HTTPException(status_code=500, detail=f"Profile list error: {str(e)}")
1151
+
1152
+
1153
+ @app.post("/api/profiles/{name}/switch")
1154
+ async def switch_profile(name: str):
1155
+ """
1156
+ Switch active memory profile.
1157
+
1158
+ Parameters:
1159
+ - name: Profile name to switch to
1160
+
1161
+ Returns:
1162
+ - success: Switch status
1163
+ - active_profile: New active profile
1164
+ - message: Status message
1165
+ """
1166
+ try:
1167
+ if not validate_profile_name(name):
1168
+ raise HTTPException(
1169
+ status_code=400,
1170
+ detail="Invalid profile name. Use alphanumeric, underscore, or hyphen only."
1171
+ )
1172
+
1173
+ profile_path = PROFILES_DIR / name / "memory.db"
1174
+
1175
+ if not profile_path.exists():
1176
+ raise HTTPException(
1177
+ status_code=404,
1178
+ detail=f"Profile '{name}' not found"
1179
+ )
1180
+
1181
+ # Note: Actual profile switching would require modifying DB_PATH
1182
+ # This is a placeholder implementation
1183
+ return {
1184
+ "success": True,
1185
+ "active_profile": name,
1186
+ "message": f"Profile switched to '{name}'. Restart server to apply changes."
1187
+ }
1188
+
1189
+ except HTTPException:
1190
+ raise
1191
+ except Exception as e:
1192
+ raise HTTPException(status_code=500, detail=f"Profile switch error: {str(e)}")
1193
+
1194
+
1195
+ # ============================================================================
1196
+ # API Endpoints - Import/Export
1197
+ # ============================================================================
1198
+
1199
+ @app.get("/api/export")
1200
+ async def export_memories(
1201
+ format: str = Query("json", pattern="^(json|jsonl)$"),
1202
+ category: Optional[str] = None,
1203
+ project_name: Optional[str] = None
1204
+ ):
1205
+ """
1206
+ Export memories as JSON or JSONL.
1207
+
1208
+ Parameters:
1209
+ - format: Export format ('json' or 'jsonl')
1210
+ - category: Optional category filter
1211
+ - project_name: Optional project filter
1212
+
1213
+ Returns:
1214
+ - Downloadable JSON file with memories
1215
+ """
1216
+ try:
1217
+ conn = get_db_connection()
1218
+ conn.row_factory = dict_factory
1219
+ cursor = conn.cursor()
1220
+
1221
+ # Build query with filters
1222
+ query = "SELECT * FROM memories WHERE 1=1"
1223
+ params = []
1224
+
1225
+ if category:
1226
+ query += " AND category = ?"
1227
+ params.append(category)
1228
+
1229
+ if project_name:
1230
+ query += " AND project_name = ?"
1231
+ params.append(project_name)
1232
+
1233
+ query += " ORDER BY created_at"
1234
+
1235
+ cursor.execute(query, params)
1236
+ memories = cursor.fetchall()
1237
+ conn.close()
1238
+
1239
+ # Format export
1240
+ if format == "jsonl":
1241
+ # JSON Lines format
1242
+ content = "\n".join(json.dumps(m) for m in memories)
1243
+ media_type = "application/x-ndjson"
1244
+ else:
1245
+ # Standard JSON
1246
+ content = json.dumps({
1247
+ "version": "2.2.0",
1248
+ "exported_at": datetime.now().isoformat(),
1249
+ "total_memories": len(memories),
1250
+ "filters": {
1251
+ "category": category,
1252
+ "project_name": project_name
1253
+ },
1254
+ "memories": memories
1255
+ }, indent=2)
1256
+ media_type = "application/json"
1257
+
1258
+ # Compress if large
1259
+ if len(content) > 10000:
1260
+ compressed = gzip.compress(content.encode())
1261
+ return StreamingResponse(
1262
+ io.BytesIO(compressed),
1263
+ media_type="application/gzip",
1264
+ headers={
1265
+ "Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.{format}.gz"
1266
+ }
1267
+ )
1268
+ else:
1269
+ return StreamingResponse(
1270
+ io.BytesIO(content.encode()),
1271
+ media_type=media_type,
1272
+ headers={
1273
+ "Content-Disposition": f"attachment; filename=memories_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.{format}"
1274
+ }
1275
+ )
1276
+
1277
+ except Exception as e:
1278
+ raise HTTPException(status_code=500, detail=f"Export error: {str(e)}")
1279
+
1280
+
1281
+ @app.post("/api/import")
1282
+ async def import_memories(file: UploadFile = File(...)):
1283
+ """
1284
+ Import memories from JSON file.
1285
+
1286
+ Parameters:
1287
+ - file: JSON file containing memories
1288
+
1289
+ Returns:
1290
+ - success: Import status
1291
+ - imported_count: Number of memories imported
1292
+ - skipped_count: Number of duplicates skipped
1293
+ - errors: List of import errors
1294
+ """
1295
+ try:
1296
+ # Read file content
1297
+ content = await file.read()
1298
+
1299
+ # Handle gzip compressed files
1300
+ if file.filename.endswith('.gz'):
1301
+ content = gzip.decompress(content)
1302
+
1303
+ # Parse JSON
1304
+ try:
1305
+ data = json.loads(content)
1306
+ except json.JSONDecodeError as e:
1307
+ raise HTTPException(status_code=400, detail=f"Invalid JSON: {str(e)}")
1308
+
1309
+ # Extract memories array
1310
+ if isinstance(data, dict) and 'memories' in data:
1311
+ memories = data['memories']
1312
+ elif isinstance(data, list):
1313
+ memories = data
1314
+ else:
1315
+ raise HTTPException(status_code=400, detail="Invalid format: expected 'memories' array")
1316
+
1317
+ # Import memories
1318
+ store = MemoryStoreV2(DB_PATH)
1319
+ imported = 0
1320
+ skipped = 0
1321
+ errors = []
1322
+
1323
+ for idx, memory in enumerate(memories):
1324
+ try:
1325
+ # Validate required fields
1326
+ if 'content' not in memory:
1327
+ errors.append(f"Memory {idx}: missing 'content' field")
1328
+ continue
1329
+
1330
+ # Add memory
1331
+ store.add_memory(
1332
+ content=memory.get('content'),
1333
+ summary=memory.get('summary'),
1334
+ project_path=memory.get('project_path'),
1335
+ project_name=memory.get('project_name'),
1336
+ tags=memory.get('tags', '').split(',') if memory.get('tags') else None,
1337
+ category=memory.get('category'),
1338
+ importance=memory.get('importance', 5)
1339
+ )
1340
+ imported += 1
1341
+
1342
+ # Broadcast update to WebSocket clients
1343
+ await manager.broadcast({
1344
+ "type": "memory_added",
1345
+ "memory_id": imported,
1346
+ "timestamp": datetime.now().isoformat()
1347
+ })
1348
+
1349
+ except Exception as e:
1350
+ if "UNIQUE constraint failed" in str(e):
1351
+ skipped += 1
1352
+ else:
1353
+ errors.append(f"Memory {idx}: {str(e)}")
1354
+
1355
+ return {
1356
+ "success": True,
1357
+ "imported_count": imported,
1358
+ "skipped_count": skipped,
1359
+ "total_processed": len(memories),
1360
+ "errors": errors[:10] # Limit error list
1361
+ }
1362
+
1363
+ except HTTPException:
1364
+ raise
1365
+ except Exception as e:
1366
+ raise HTTPException(status_code=500, detail=f"Import error: {str(e)}")
1367
+
1368
+
1369
+ # ============================================================================
1370
+ # WebSocket Endpoint - Real-Time Updates
1371
+ # ============================================================================
1372
+
1373
+ @app.websocket("/ws/updates")
1374
+ async def websocket_updates(websocket: WebSocket):
1375
+ """
1376
+ WebSocket endpoint for real-time memory updates.
1377
+
1378
+ Broadcasts events:
1379
+ - memory_added: New memory created
1380
+ - memory_updated: Memory modified
1381
+ - cluster_updated: Cluster recalculated
1382
+ - system_stats: Periodic statistics update
1383
+ """
1384
+ await manager.connect(websocket)
1385
+
1386
+ try:
1387
+ # Send initial connection confirmation
1388
+ await websocket.send_json({
1389
+ "type": "connected",
1390
+ "message": "WebSocket connection established",
1391
+ "timestamp": datetime.now().isoformat()
1392
+ })
1393
+
1394
+ # Keep connection alive and handle incoming messages
1395
+ while True:
1396
+ try:
1397
+ # Receive message from client (ping/pong, commands, etc.)
1398
+ data = await websocket.receive_json()
1399
+
1400
+ # Handle client requests
1401
+ if data.get('type') == 'ping':
1402
+ await websocket.send_json({
1403
+ "type": "pong",
1404
+ "timestamp": datetime.now().isoformat()
1405
+ })
1406
+
1407
+ elif data.get('type') == 'get_stats':
1408
+ # Send current stats
1409
+ stats = await get_stats()
1410
+ await websocket.send_json({
1411
+ "type": "stats_update",
1412
+ "data": stats,
1413
+ "timestamp": datetime.now().isoformat()
1414
+ })
1415
+
1416
+ except WebSocketDisconnect:
1417
+ break
1418
+ except Exception as e:
1419
+ await websocket.send_json({
1420
+ "type": "error",
1421
+ "message": str(e),
1422
+ "timestamp": datetime.now().isoformat()
1423
+ })
1424
+
1425
+ finally:
1426
+ manager.disconnect(websocket)
1427
+
1428
+
1429
+ # ============================================================================
1430
+ # Server Startup
1431
+ # ============================================================================
1432
+
1433
+ if __name__ == "__main__":
1434
+ import argparse
1435
+ parser = argparse.ArgumentParser(description="SuperLocalMemory V2 - Web Dashboard")
1436
+ parser.add_argument("--port", type=int, default=8765, help="Port to run on (default 8765)")
1437
+ parser.add_argument("--profile", type=str, default=None, help="Memory profile to use")
1438
+ args = parser.parse_args()
1439
+
1440
+ import socket
1441
+
1442
+ def find_available_port(preferred):
1443
+ """Try preferred port, then scan next 20 ports."""
1444
+ for port in [preferred] + list(range(preferred + 1, preferred + 20)):
1445
+ try:
1446
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
1447
+ s.bind(("127.0.0.1", port))
1448
+ return port
1449
+ except OSError:
1450
+ continue
1451
+ return preferred
1452
+
1453
+ ui_port = find_available_port(args.port)
1454
+ if ui_port != args.port:
1455
+ print(f"\n Port {args.port} in use — using {ui_port} instead\n")
1456
+
1457
+ print("=" * 70)
1458
+ print(" SuperLocalMemory V2.3.0 - FastAPI UI Server")
1459
+ print(" Copyright (c) 2026 Varun Pratap Bhardwaj")
1460
+ print("=" * 70)
1461
+ print(f" Database: {DB_PATH}")
1462
+ print(f" UI Directory: {UI_DIR}")
1463
+ print(f" Profiles: {PROFILES_DIR}")
1464
+ print("=" * 70)
1465
+ print(f"\n Server URLs:")
1466
+ print(f" - Main UI: http://localhost:{ui_port}")
1467
+ print(f" - API Docs: http://localhost:{ui_port}/api/docs")
1468
+ print(f" - Health Check: http://localhost:{ui_port}/health")
1469
+ print(f" - WebSocket: ws://localhost:{ui_port}/ws/updates")
1470
+ print("\n Press Ctrl+C to stop\n")
1471
+
1472
+ # SECURITY: Bind to localhost only to prevent unauthorized network access
1473
+ # For remote access, use a reverse proxy (nginx/caddy) with authentication
1474
+ uvicorn.run(
1475
+ app,
1476
+ host="127.0.0.1", # localhost only - NEVER use 0.0.0.0 without auth
1477
+ port=ui_port,
1478
+ log_level="info",
1479
+ access_log=True
1480
+ )