superlocalmemory 2.3.2 → 2.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/api_server.py ADDED
@@ -0,0 +1,659 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ SuperLocalMemory V2 - Intelligent Local Memory System
4
+ Copyright (c) 2026 Varun Pratap Bhardwaj
5
+ Licensed under MIT License
6
+
7
+ Repository: https://github.com/varun369/SuperLocalMemoryV2
8
+ Author: Varun Pratap Bhardwaj (Solution Architect)
9
+
10
+ NOTICE: This software is protected by MIT License.
11
+ Attribution must be preserved in all copies or derivatives.
12
+ """
13
+
14
+ """
15
+ SuperLocalMemory V2 - FastAPI UI Server
16
+ Provides REST endpoints for memory visualization and exploration.
17
+ """
18
+
19
+ import sqlite3
20
+ import json
21
+ from pathlib import Path
22
+ from typing import Optional, List, Dict, Any
23
+ from datetime import datetime, timedelta
24
+
25
+ from fastapi import FastAPI, HTTPException, Query
26
+ from fastapi.staticfiles import StaticFiles
27
+ from fastapi.responses import HTMLResponse, JSONResponse
28
+ from pydantic import BaseModel
29
+ import uvicorn
30
+
31
+ # Import local modules
32
+ import sys
33
+ sys.path.insert(0, str(Path(__file__).parent / "src"))
34
+
35
+ from memory_store_v2 import MemoryStoreV2
36
+ from graph_engine import GraphEngine
37
+ from pattern_learner import PatternLearner
38
+
39
+ # Configuration
40
+ MEMORY_DIR = Path.home() / ".claude-memory"
41
+ DB_PATH = MEMORY_DIR / "memory.db"
42
+ UI_DIR = Path(__file__).parent / "ui"
43
+
44
+ app = FastAPI(
45
+ title="SuperLocalMemory V2 UI",
46
+ description="Knowledge Graph Visualization for Local Memory System",
47
+ version="2.0.0"
48
+ )
49
+
50
+ # Mount static files
51
+ UI_DIR.mkdir(exist_ok=True)
52
+ app.mount("/static", StaticFiles(directory=str(UI_DIR)), name="static")
53
+
54
+
55
+ # ============================================================================
56
+ # Request/Response Models
57
+ # ============================================================================
58
+
59
+ class SearchRequest(BaseModel):
60
+ query: str
61
+ limit: int = 10
62
+ min_score: float = 0.3
63
+
64
+
65
+ class MemoryFilter(BaseModel):
66
+ category: Optional[str] = None
67
+ project_name: Optional[str] = None
68
+ cluster_id: Optional[int] = None
69
+ min_importance: Optional[int] = None
70
+
71
+
72
+ # ============================================================================
73
+ # Database Helper Functions
74
+ # ============================================================================
75
+
76
+ def get_db_connection():
77
+ """Get database connection."""
78
+ if not DB_PATH.exists():
79
+ raise HTTPException(status_code=500, detail="Memory database not found")
80
+ return sqlite3.connect(DB_PATH)
81
+
82
+
83
+ def dict_factory(cursor, row):
84
+ """Convert SQLite row to dictionary."""
85
+ fields = [column[0] for column in cursor.description]
86
+ return {key: value for key, value in zip(fields, row)}
87
+
88
+
89
+ # ============================================================================
90
+ # API Endpoints
91
+ # ============================================================================
92
+
93
+ @app.get("/", response_class=HTMLResponse)
94
+ async def root():
95
+ """Serve main UI page."""
96
+ index_path = UI_DIR / "index.html"
97
+ if not index_path.exists():
98
+ return """
99
+ <html>
100
+ <head><title>SuperLocalMemory V2</title></head>
101
+ <body style="font-family: Arial; padding: 40px;">
102
+ <h1>SuperLocalMemory V2 UI Server Running</h1>
103
+ <p>UI not found. Please create ui/index.html</p>
104
+ <h2>Available Endpoints:</h2>
105
+ <ul>
106
+ <li><a href="/docs">/docs - Interactive API Documentation</a></li>
107
+ <li><a href="/api/stats">/api/stats - System Statistics</a></li>
108
+ <li><a href="/api/memories">/api/memories - List Memories</a></li>
109
+ <li><a href="/api/graph">/api/graph - Graph Data</a></li>
110
+ <li><a href="/api/clusters">/api/clusters - Cluster Info</a></li>
111
+ <li><a href="/api/patterns">/api/patterns - Learned Patterns</a></li>
112
+ <li><a href="/api/timeline">/api/timeline - Timeline View</a></li>
113
+ <li><a href="/api/tree">/api/tree - Tree Structure</a></li>
114
+ </ul>
115
+ </body>
116
+ </html>
117
+ """
118
+ return index_path.read_text()
119
+
120
+
121
+ @app.get("/api/memories")
122
+ async def get_memories(
123
+ category: Optional[str] = None,
124
+ project_name: Optional[str] = None,
125
+ cluster_id: Optional[int] = None,
126
+ min_importance: Optional[int] = None,
127
+ limit: int = Query(50, le=200),
128
+ offset: int = 0
129
+ ):
130
+ """
131
+ List memories with optional filtering.
132
+
133
+ Query Parameters:
134
+ - category: Filter by category
135
+ - project_name: Filter by project
136
+ - cluster_id: Filter by cluster
137
+ - min_importance: Minimum importance score
138
+ - limit: Maximum results (default 50, max 200)
139
+ - offset: Pagination offset
140
+ """
141
+ conn = get_db_connection()
142
+ conn.row_factory = dict_factory
143
+ cursor = conn.cursor()
144
+
145
+ # Build dynamic query
146
+ query = """
147
+ SELECT
148
+ id, content, summary, category, project_name,
149
+ importance, cluster_id, depth, access_count,
150
+ created_at, updated_at, last_accessed, tags
151
+ FROM memories
152
+ WHERE 1=1
153
+ """
154
+ params = []
155
+
156
+ if category:
157
+ query += " AND category = ?"
158
+ params.append(category)
159
+
160
+ if project_name:
161
+ query += " AND project_name = ?"
162
+ params.append(project_name)
163
+
164
+ if cluster_id is not None:
165
+ query += " AND cluster_id = ?"
166
+ params.append(cluster_id)
167
+
168
+ if min_importance:
169
+ query += " AND importance >= ?"
170
+ params.append(min_importance)
171
+
172
+ query += " ORDER BY updated_at DESC LIMIT ? OFFSET ?"
173
+ params.extend([limit, offset])
174
+
175
+ cursor.execute(query, params)
176
+ memories = cursor.fetchall()
177
+
178
+ # Get total count
179
+ count_query = "SELECT COUNT(*) as total FROM memories WHERE 1=1"
180
+ count_params = []
181
+ if category:
182
+ count_query += " AND category = ?"
183
+ count_params.append(category)
184
+ if project_name:
185
+ count_query += " AND project_name = ?"
186
+ count_params.append(project_name)
187
+ if cluster_id is not None:
188
+ count_query += " AND cluster_id = ?"
189
+ count_params.append(cluster_id)
190
+ if min_importance:
191
+ count_query += " AND importance >= ?"
192
+ count_params.append(min_importance)
193
+
194
+ cursor.execute(count_query, count_params)
195
+ total = cursor.fetchone()['total']
196
+
197
+ conn.close()
198
+
199
+ return {
200
+ "memories": memories,
201
+ "total": total,
202
+ "limit": limit,
203
+ "offset": offset
204
+ }
205
+
206
+
207
+ @app.get("/api/graph")
208
+ async def get_graph(max_nodes: int = Query(100, le=500)):
209
+ """
210
+ Get graph data for D3.js force-directed visualization.
211
+
212
+ Returns:
213
+ - nodes: List of memory nodes with metadata
214
+ - links: List of edges between memories
215
+ """
216
+ conn = get_db_connection()
217
+ conn.row_factory = dict_factory
218
+ cursor = conn.cursor()
219
+
220
+ # Get nodes (memories with graph data)
221
+ cursor.execute("""
222
+ SELECT
223
+ m.id, m.content, m.summary, m.category,
224
+ m.cluster_id, m.importance, m.project_name,
225
+ gn.entities
226
+ FROM memories m
227
+ LEFT JOIN graph_nodes gn ON m.id = gn.memory_id
228
+ WHERE m.cluster_id IS NOT NULL
229
+ ORDER BY m.importance DESC, m.updated_at DESC
230
+ LIMIT ?
231
+ """, (max_nodes,))
232
+ nodes = cursor.fetchall()
233
+
234
+ # Parse entities JSON
235
+ for node in nodes:
236
+ if node['entities']:
237
+ try:
238
+ node['entities'] = json.loads(node['entities'])
239
+ except:
240
+ node['entities'] = []
241
+ else:
242
+ node['entities'] = []
243
+
244
+ # Truncate content for display
245
+ if node['content'] and len(node['content']) > 100:
246
+ node['content_preview'] = node['content'][:100] + "..."
247
+ else:
248
+ node['content_preview'] = node['content']
249
+
250
+ # Get edges
251
+ memory_ids = [n['id'] for n in nodes]
252
+ if memory_ids:
253
+ placeholders = ','.join('?' * len(memory_ids))
254
+ cursor.execute(f"""
255
+ SELECT
256
+ source_memory_id as source,
257
+ target_memory_id as target,
258
+ weight,
259
+ relationship_type,
260
+ shared_entities
261
+ FROM graph_edges
262
+ WHERE source_memory_id IN ({placeholders})
263
+ AND target_memory_id IN ({placeholders})
264
+ ORDER BY weight DESC
265
+ """, memory_ids + memory_ids)
266
+ links = cursor.fetchall()
267
+
268
+ # Parse shared entities
269
+ for link in links:
270
+ if link['shared_entities']:
271
+ try:
272
+ link['shared_entities'] = json.loads(link['shared_entities'])
273
+ except:
274
+ link['shared_entities'] = []
275
+ else:
276
+ links = []
277
+
278
+ conn.close()
279
+
280
+ return {
281
+ "nodes": nodes,
282
+ "links": links,
283
+ "metadata": {
284
+ "node_count": len(nodes),
285
+ "edge_count": len(links)
286
+ }
287
+ }
288
+
289
+
290
+ @app.get("/api/clusters")
291
+ async def get_clusters():
292
+ """
293
+ Get cluster information with member counts and themes.
294
+
295
+ Returns list of clusters with:
296
+ - cluster_id
297
+ - member_count
298
+ - dominant_entities (most common concepts)
299
+ - categories represented
300
+ - importance_avg
301
+ """
302
+ conn = get_db_connection()
303
+ conn.row_factory = dict_factory
304
+ cursor = conn.cursor()
305
+
306
+ # Get cluster statistics
307
+ cursor.execute("""
308
+ SELECT
309
+ cluster_id,
310
+ COUNT(*) as member_count,
311
+ AVG(importance) as avg_importance,
312
+ GROUP_CONCAT(DISTINCT category) as categories,
313
+ GROUP_CONCAT(DISTINCT project_name) as projects
314
+ FROM memories
315
+ WHERE cluster_id IS NOT NULL
316
+ GROUP BY cluster_id
317
+ ORDER BY member_count DESC
318
+ """)
319
+ clusters = cursor.fetchall()
320
+
321
+ # Get dominant entities per cluster
322
+ for cluster in clusters:
323
+ cluster_id = cluster['cluster_id']
324
+
325
+ # Aggregate entities from all members
326
+ cursor.execute("""
327
+ SELECT gn.entities
328
+ FROM graph_nodes gn
329
+ JOIN memories m ON gn.memory_id = m.id
330
+ WHERE m.cluster_id = ?
331
+ """, (cluster_id,))
332
+
333
+ all_entities = []
334
+ for row in cursor.fetchall():
335
+ if row['entities']:
336
+ try:
337
+ entities = json.loads(row['entities'])
338
+ all_entities.extend(entities)
339
+ except:
340
+ pass
341
+
342
+ # Count and get top 5
343
+ from collections import Counter
344
+ entity_counts = Counter(all_entities)
345
+ cluster['top_entities'] = [
346
+ {"entity": e, "count": c}
347
+ for e, c in entity_counts.most_common(5)
348
+ ]
349
+
350
+ conn.close()
351
+
352
+ return {
353
+ "clusters": clusters,
354
+ "total_clusters": len(clusters)
355
+ }
356
+
357
+
358
+ @app.get("/api/patterns")
359
+ async def get_patterns():
360
+ """
361
+ Get learned patterns from Pattern Learner (Layer 4).
362
+
363
+ Returns user preferences, coding style, and terminology patterns.
364
+ """
365
+ try:
366
+ # Initialize pattern learner
367
+ learner = PatternLearner(DB_PATH)
368
+
369
+ # Get all active patterns
370
+ conn = get_db_connection()
371
+ conn.row_factory = dict_factory
372
+ cursor = conn.cursor()
373
+
374
+ cursor.execute("""
375
+ SELECT
376
+ pattern_type, key, value, confidence,
377
+ evidence_count, last_updated
378
+ FROM learned_patterns
379
+ WHERE is_active = 1
380
+ ORDER BY confidence DESC, evidence_count DESC
381
+ """)
382
+ patterns = cursor.fetchall()
383
+
384
+ # Parse value JSON
385
+ for pattern in patterns:
386
+ if pattern['value']:
387
+ try:
388
+ pattern['value'] = json.loads(pattern['value'])
389
+ except:
390
+ pass
391
+
392
+ # Group by type
393
+ grouped = {}
394
+ for pattern in patterns:
395
+ ptype = pattern['pattern_type']
396
+ if ptype not in grouped:
397
+ grouped[ptype] = []
398
+ grouped[ptype].append(pattern)
399
+
400
+ conn.close()
401
+
402
+ return {
403
+ "patterns": grouped,
404
+ "total_patterns": len(patterns),
405
+ "pattern_types": list(grouped.keys())
406
+ }
407
+
408
+ except Exception as e:
409
+ return {
410
+ "patterns": {},
411
+ "total_patterns": 0,
412
+ "error": str(e)
413
+ }
414
+
415
+
416
+ @app.get("/api/stats")
417
+ async def get_stats():
418
+ """
419
+ Get system statistics overview.
420
+
421
+ Returns:
422
+ - Total memories, sessions, clusters
423
+ - Storage usage
424
+ - Recent activity
425
+ - Category breakdown
426
+ """
427
+ conn = get_db_connection()
428
+ conn.row_factory = dict_factory
429
+ cursor = conn.cursor()
430
+
431
+ # Basic counts
432
+ cursor.execute("SELECT COUNT(*) as total FROM memories")
433
+ total_memories = cursor.fetchone()['total']
434
+
435
+ cursor.execute("SELECT COUNT(*) as total FROM sessions")
436
+ total_sessions = cursor.fetchone()['total']
437
+
438
+ cursor.execute("SELECT COUNT(DISTINCT cluster_id) as total FROM memories WHERE cluster_id IS NOT NULL")
439
+ total_clusters = cursor.fetchone()['total']
440
+
441
+ cursor.execute("SELECT COUNT(*) as total FROM graph_nodes")
442
+ total_graph_nodes = cursor.fetchone()['total']
443
+
444
+ cursor.execute("SELECT COUNT(*) as total FROM graph_edges")
445
+ total_graph_edges = cursor.fetchone()['total']
446
+
447
+ # Category breakdown
448
+ cursor.execute("""
449
+ SELECT category, COUNT(*) as count
450
+ FROM memories
451
+ WHERE category IS NOT NULL
452
+ GROUP BY category
453
+ ORDER BY count DESC
454
+ LIMIT 10
455
+ """)
456
+ categories = cursor.fetchall()
457
+
458
+ # Project breakdown
459
+ cursor.execute("""
460
+ SELECT project_name, COUNT(*) as count
461
+ FROM memories
462
+ WHERE project_name IS NOT NULL
463
+ GROUP BY project_name
464
+ ORDER BY count DESC
465
+ LIMIT 10
466
+ """)
467
+ projects = cursor.fetchall()
468
+
469
+ # Recent activity (last 7 days)
470
+ cursor.execute("""
471
+ SELECT COUNT(*) as count
472
+ FROM memories
473
+ WHERE created_at >= datetime('now', '-7 days')
474
+ """)
475
+ recent_memories = cursor.fetchone()['count']
476
+
477
+ # Importance distribution
478
+ cursor.execute("""
479
+ SELECT importance, COUNT(*) as count
480
+ FROM memories
481
+ GROUP BY importance
482
+ ORDER BY importance DESC
483
+ """)
484
+ importance_dist = cursor.fetchall()
485
+
486
+ # Database size
487
+ db_size = DB_PATH.stat().st_size if DB_PATH.exists() else 0
488
+
489
+ conn.close()
490
+
491
+ return {
492
+ "overview": {
493
+ "total_memories": total_memories,
494
+ "total_sessions": total_sessions,
495
+ "total_clusters": total_clusters,
496
+ "graph_nodes": total_graph_nodes,
497
+ "graph_edges": total_graph_edges,
498
+ "db_size_mb": round(db_size / (1024 * 1024), 2),
499
+ "recent_memories_7d": recent_memories
500
+ },
501
+ "categories": categories,
502
+ "projects": projects,
503
+ "importance_distribution": importance_dist
504
+ }
505
+
506
+
507
+ @app.post("/api/search")
508
+ async def search_memories(request: SearchRequest):
509
+ """
510
+ Semantic search using TF-IDF similarity.
511
+
512
+ Request body:
513
+ - query: Search query
514
+ - limit: Max results (default 10)
515
+ - min_score: Minimum similarity score (default 0.3)
516
+ """
517
+ try:
518
+ store = MemoryStoreV2(DB_PATH)
519
+ results = store.search(
520
+ query=request.query,
521
+ limit=request.limit
522
+ )
523
+
524
+ # Filter by min_score
525
+ filtered = [
526
+ r for r in results
527
+ if r.get('score', 0) >= request.min_score
528
+ ]
529
+
530
+ return {
531
+ "query": request.query,
532
+ "results": filtered,
533
+ "total": len(filtered)
534
+ }
535
+
536
+ except Exception as e:
537
+ raise HTTPException(status_code=500, detail=str(e))
538
+
539
+
540
+ @app.get("/api/timeline")
541
+ async def get_timeline(days: int = Query(30, le=365)):
542
+ """
543
+ Get temporal view of memory creation over time.
544
+
545
+ Parameters:
546
+ - days: Number of days to look back (default 30, max 365)
547
+
548
+ Returns daily/weekly aggregates with category breakdown.
549
+ """
550
+ conn = get_db_connection()
551
+ conn.row_factory = dict_factory
552
+ cursor = conn.cursor()
553
+
554
+ # Daily aggregates
555
+ cursor.execute("""
556
+ SELECT
557
+ DATE(created_at) as date,
558
+ COUNT(*) as count,
559
+ AVG(importance) as avg_importance,
560
+ GROUP_CONCAT(DISTINCT category) as categories
561
+ FROM memories
562
+ WHERE created_at >= datetime('now', '-' || ? || ' days')
563
+ GROUP BY DATE(created_at)
564
+ ORDER BY date DESC
565
+ """, (days,))
566
+ daily = cursor.fetchall()
567
+
568
+ # Category trend over time
569
+ cursor.execute("""
570
+ SELECT
571
+ DATE(created_at) as date,
572
+ category,
573
+ COUNT(*) as count
574
+ FROM memories
575
+ WHERE created_at >= datetime('now', '-' || ? || ' days')
576
+ AND category IS NOT NULL
577
+ GROUP BY DATE(created_at), category
578
+ ORDER BY date DESC
579
+ """, (days,))
580
+ category_trend = cursor.fetchall()
581
+
582
+ conn.close()
583
+
584
+ return {
585
+ "timeline": daily,
586
+ "category_trend": category_trend,
587
+ "period_days": days
588
+ }
589
+
590
+
591
+ @app.get("/api/tree")
592
+ async def get_tree():
593
+ """
594
+ Get hierarchical tree structure.
595
+
596
+ Returns nested tree of projects > categories > memories.
597
+ """
598
+ conn = get_db_connection()
599
+ conn.row_factory = dict_factory
600
+ cursor = conn.cursor()
601
+
602
+ # Get all memories with hierarchy info
603
+ cursor.execute("""
604
+ SELECT
605
+ id, parent_id, tree_path, depth,
606
+ project_name, category,
607
+ COALESCE(summary, SUBSTR(content, 1, 100)) as label,
608
+ importance, created_at
609
+ FROM memories
610
+ ORDER BY tree_path
611
+ """)
612
+ nodes = cursor.fetchall()
613
+
614
+ # Build tree structure
615
+ tree = []
616
+ node_map = {}
617
+
618
+ for node in nodes:
619
+ node['children'] = []
620
+ node_map[node['id']] = node
621
+
622
+ if node['parent_id'] is None:
623
+ # Root node
624
+ tree.append(node)
625
+ elif node['parent_id'] in node_map:
626
+ # Add to parent
627
+ node_map[node['parent_id']]['children'].append(node)
628
+
629
+ conn.close()
630
+
631
+ return {
632
+ "tree": tree,
633
+ "total_nodes": len(nodes)
634
+ }
635
+
636
+
637
+ # ============================================================================
638
+ # Server Startup
639
+ # ============================================================================
640
+
641
+ if __name__ == "__main__":
642
+ print("=" * 60)
643
+ print("SuperLocalMemory V2 - UI Server")
644
+ print("=" * 60)
645
+ print(f"Database: {DB_PATH}")
646
+ print(f"UI Directory: {UI_DIR}")
647
+ print("=" * 60)
648
+ print("\nStarting server on http://localhost:8000")
649
+ print("API Documentation: http://localhost:8000/docs")
650
+ print("\nPress Ctrl+C to stop\n")
651
+
652
+ # SECURITY: Bind to localhost only to prevent network exposure
653
+ # For network access, use a reverse proxy with authentication
654
+ uvicorn.run(
655
+ app,
656
+ host="127.0.0.1", # localhost only - NEVER use 0.0.0.0 without auth
657
+ port=8000,
658
+ log_level="info"
659
+ )
package/bin/slm CHANGED
@@ -61,6 +61,15 @@ case "$1" in
61
61
  "$SLM_DIR/bin/superlocalmemoryv2:reset" "$@"
62
62
  ;;
63
63
 
64
+ ui|dashboard)
65
+ PORT="${2:-8765}"
66
+ echo "Starting SuperLocalMemory Web Dashboard..."
67
+ echo "URL: http://localhost:$PORT"
68
+ echo "API Docs: http://localhost:$PORT/api/docs"
69
+ echo ""
70
+ python3 "$SLM_DIR/ui_server.py" --port "$PORT"
71
+ ;;
72
+
64
73
  serve|server)
65
74
  PORT="${2:-8417}"
66
75
  echo "Starting SuperLocalMemory MCP HTTP server..."
@@ -146,7 +155,11 @@ PATTERN LEARNING:
146
155
  slm patterns list [threshold] List learned patterns
147
156
  slm patterns context [threshold] Get coding identity context
148
157
 
149
- HTTP SERVER:
158
+ WEB DASHBOARD:
159
+ slm ui [PORT] Start web dashboard (default port 8765)
160
+ Opens at http://localhost:PORT
161
+
162
+ HTTP SERVER (MCP):
150
163
  slm serve [PORT] Start MCP HTTP server (default port 8417)
151
164
  For ChatGPT/remote: ngrok http PORT
152
165
 
@@ -195,6 +208,7 @@ EOF
195
208
  echo " list - List recent memories"
196
209
  echo " status - System status"
197
210
  echo " context - Project context"
211
+ echo " ui - Start web dashboard"
198
212
  echo " serve - Start MCP HTTP server"
199
213
  echo " profile - Manage profiles"
200
214
  echo " graph - Knowledge graph operations"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superlocalmemory",
3
- "version": "2.3.2",
3
+ "version": "2.3.4",
4
4
  "description": "Your AI Finally Remembers You - Local-first intelligent memory system for AI assistants. Works with Claude, Cursor, Windsurf, VS Code/Copilot, Codex, and 16+ AI tools. 100% local, zero cloud dependencies.",
5
5
  "keywords": [
6
6
  "ai-memory",
@@ -70,7 +70,10 @@
70
70
  "CHANGELOG.md",
71
71
  "docs/",
72
72
  "requirements.txt",
73
- "requirements-core.txt"
73
+ "requirements-core.txt",
74
+ "ui/",
75
+ "ui_server.py",
76
+ "api_server.py"
74
77
  ],
75
78
  "preferGlobal": true,
76
79
  "funding": {