superlocalmemory 2.3.7 → 2.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/ui_server.py CHANGED
@@ -124,6 +124,23 @@ UI_DIR.mkdir(exist_ok=True)
124
124
  app.mount("/static", StaticFiles(directory=str(UI_DIR)), name="static")
125
125
 
126
126
 
127
+ # ============================================================================
128
+ # Profile Helper
129
+ # ============================================================================
130
+
131
+ def get_active_profile() -> str:
132
+ """Read the active profile from profiles.json. Falls back to 'default'."""
133
+ config_file = MEMORY_DIR / "profiles.json"
134
+ if config_file.exists():
135
+ try:
136
+ with open(config_file, 'r') as f:
137
+ pconfig = json.load(f)
138
+ return pconfig.get('active_profile', 'default')
139
+ except (json.JSONDecodeError, IOError):
140
+ pass
141
+ return 'default'
142
+
143
+
127
144
  # ============================================================================
128
145
  # Request/Response Models
129
146
  # ============================================================================
@@ -387,6 +404,8 @@ async def get_memories(
387
404
  conn.row_factory = dict_factory
388
405
  cursor = conn.cursor()
389
406
 
407
+ active_profile = get_active_profile()
408
+
390
409
  # Build dynamic query
391
410
  query = """
392
411
  SELECT
@@ -394,9 +413,9 @@ async def get_memories(
394
413
  importance, cluster_id, depth, access_count, parent_id,
395
414
  created_at, updated_at, last_accessed, tags, memory_type
396
415
  FROM memories
397
- WHERE 1=1
416
+ WHERE profile = ?
398
417
  """
399
- params = []
418
+ params = [active_profile]
400
419
 
401
420
  if category:
402
421
  query += " AND category = ?"
@@ -427,8 +446,8 @@ async def get_memories(
427
446
  memories = cursor.fetchall()
428
447
 
429
448
  # Get total count
430
- count_query = "SELECT COUNT(*) as total FROM memories WHERE 1=1"
431
- count_params = []
449
+ count_query = "SELECT COUNT(*) as total FROM memories WHERE profile = ?"
450
+ count_params = [active_profile]
432
451
 
433
452
  if category:
434
453
  count_query += " AND category = ?"
@@ -483,6 +502,8 @@ async def get_graph(
483
502
  conn.row_factory = dict_factory
484
503
  cursor = conn.cursor()
485
504
 
505
+ active_profile = get_active_profile()
506
+
486
507
  # Get nodes (memories with graph data)
487
508
  cursor.execute("""
488
509
  SELECT
@@ -492,10 +513,10 @@ async def get_graph(
492
513
  gn.entities
493
514
  FROM memories m
494
515
  LEFT JOIN graph_nodes gn ON m.id = gn.memory_id
495
- WHERE m.importance >= ?
516
+ WHERE m.importance >= ? AND m.profile = ?
496
517
  ORDER BY m.importance DESC, m.updated_at DESC
497
518
  LIMIT ?
498
- """, (min_importance, max_nodes))
519
+ """, (min_importance, active_profile, max_nodes))
499
520
  nodes = cursor.fetchall()
500
521
 
501
522
  # Parse entities JSON and create previews
@@ -551,9 +572,9 @@ async def get_graph(
551
572
  COUNT(*) as size,
552
573
  AVG(importance) as avg_importance
553
574
  FROM memories
554
- WHERE cluster_id IS NOT NULL
575
+ WHERE cluster_id IS NOT NULL AND profile = ?
555
576
  GROUP BY cluster_id
556
- """)
577
+ """, (active_profile,))
557
578
  clusters = cursor.fetchall()
558
579
 
559
580
  conn.close()
@@ -607,6 +628,8 @@ async def get_timeline(
607
628
  else: # month
608
629
  date_group = "strftime('%Y-%m', created_at)"
609
630
 
631
+ active_profile = get_active_profile()
632
+
610
633
  # Timeline aggregates
611
634
  cursor.execute(f"""
612
635
  SELECT
@@ -618,9 +641,10 @@ async def get_timeline(
618
641
  GROUP_CONCAT(DISTINCT category) as categories
619
642
  FROM memories
620
643
  WHERE created_at >= datetime('now', '-' || ? || ' days')
644
+ AND profile = ?
621
645
  GROUP BY {date_group}
622
646
  ORDER BY period DESC
623
- """, (days,))
647
+ """, (days, active_profile))
624
648
  timeline = cursor.fetchall()
625
649
 
626
650
  # Category trend over time
@@ -631,10 +655,10 @@ async def get_timeline(
631
655
  COUNT(*) as count
632
656
  FROM memories
633
657
  WHERE created_at >= datetime('now', '-' || ? || ' days')
634
- AND category IS NOT NULL
658
+ AND category IS NOT NULL AND profile = ?
635
659
  GROUP BY {date_group}, category
636
660
  ORDER BY period DESC, count DESC
637
- """, (days,))
661
+ """, (days, active_profile))
638
662
  category_trend = cursor.fetchall()
639
663
 
640
664
  # Period statistics
@@ -646,7 +670,8 @@ async def get_timeline(
646
670
  AVG(importance) as avg_importance
647
671
  FROM memories
648
672
  WHERE created_at >= datetime('now', '-' || ? || ' days')
649
- """, (days,))
673
+ AND profile = ?
674
+ """, (days, active_profile))
650
675
  period_stats = cursor.fetchone()
651
676
 
652
677
  conn.close()
@@ -680,23 +705,29 @@ async def get_clusters():
680
705
  conn.row_factory = dict_factory
681
706
  cursor = conn.cursor()
682
707
 
683
- # Get cluster statistics
708
+ active_profile = get_active_profile()
709
+
710
+ # Get cluster statistics with hierarchy and summaries
684
711
  cursor.execute("""
685
712
  SELECT
686
- cluster_id,
713
+ m.cluster_id,
687
714
  COUNT(*) as member_count,
688
- AVG(importance) as avg_importance,
689
- MIN(importance) as min_importance,
690
- MAX(importance) as max_importance,
691
- GROUP_CONCAT(DISTINCT category) as categories,
692
- GROUP_CONCAT(DISTINCT project_name) as projects,
693
- MIN(created_at) as first_memory,
694
- MAX(created_at) as latest_memory
695
- FROM memories
696
- WHERE cluster_id IS NOT NULL
697
- GROUP BY cluster_id
698
- ORDER BY member_count DESC
699
- """)
715
+ AVG(m.importance) as avg_importance,
716
+ MIN(m.importance) as min_importance,
717
+ MAX(m.importance) as max_importance,
718
+ GROUP_CONCAT(DISTINCT m.category) as categories,
719
+ GROUP_CONCAT(DISTINCT m.project_name) as projects,
720
+ MIN(m.created_at) as first_memory,
721
+ MAX(m.created_at) as latest_memory,
722
+ gc.summary,
723
+ gc.parent_cluster_id,
724
+ gc.depth
725
+ FROM memories m
726
+ LEFT JOIN graph_clusters gc ON m.cluster_id = gc.id
727
+ WHERE m.cluster_id IS NOT NULL AND m.profile = ?
728
+ GROUP BY m.cluster_id
729
+ ORDER BY COALESCE(gc.depth, 0) ASC, member_count DESC
730
+ """, (active_profile,))
700
731
  clusters = cursor.fetchall()
701
732
 
702
733
  # Get dominant entities per cluster
@@ -732,8 +763,8 @@ async def get_clusters():
732
763
  cursor.execute("""
733
764
  SELECT COUNT(*) as count
734
765
  FROM memories
735
- WHERE cluster_id IS NULL
736
- """)
766
+ WHERE cluster_id IS NULL AND profile = ?
767
+ """, (active_profile,))
737
768
  unclustered = cursor.fetchone()['count']
738
769
 
739
770
  conn.close()
@@ -872,13 +903,16 @@ async def get_patterns():
872
903
  "message": "Pattern learning not initialized. Run pattern learning first."
873
904
  }
874
905
 
906
+ active_profile = get_active_profile()
907
+
875
908
  cursor.execute("""
876
909
  SELECT
877
910
  pattern_type, key, value, confidence,
878
911
  evidence_count, updated_at as last_updated
879
912
  FROM identity_patterns
913
+ WHERE profile = ?
880
914
  ORDER BY confidence DESC, evidence_count DESC
881
- """)
915
+ """, (active_profile,))
882
916
  patterns = cursor.fetchall()
883
917
 
884
918
  # Parse value JSON
@@ -937,14 +971,16 @@ async def get_stats():
937
971
  conn.row_factory = dict_factory
938
972
  cursor = conn.cursor()
939
973
 
940
- # Basic counts
941
- cursor.execute("SELECT COUNT(*) as total FROM memories")
974
+ active_profile = get_active_profile()
975
+
976
+ # Basic counts (profile-filtered)
977
+ cursor.execute("SELECT COUNT(*) as total FROM memories WHERE profile = ?", (active_profile,))
942
978
  total_memories = cursor.fetchone()['total']
943
979
 
944
980
  cursor.execute("SELECT COUNT(*) as total FROM sessions")
945
981
  total_sessions = cursor.fetchone()['total']
946
982
 
947
- cursor.execute("SELECT COUNT(DISTINCT cluster_id) as total FROM memories WHERE cluster_id IS NOT NULL")
983
+ cursor.execute("SELECT COUNT(DISTINCT cluster_id) as total FROM memories WHERE cluster_id IS NOT NULL AND profile = ?", (active_profile,))
948
984
  total_clusters = cursor.fetchone()['total']
949
985
 
950
986
  cursor.execute("SELECT COUNT(*) as total FROM graph_nodes")
@@ -1115,30 +1151,40 @@ async def search_memories(request: SearchRequest):
1115
1151
  @app.get("/api/profiles")
1116
1152
  async def list_profiles():
1117
1153
  """
1118
- List available memory profiles.
1154
+ List available memory profiles (column-based).
1119
1155
 
1120
1156
  Returns:
1121
- - profiles: List of profile names
1157
+ - profiles: List of profiles with memory counts
1122
1158
  - active_profile: Currently active profile
1123
1159
  - total_profiles: Profile count
1124
1160
  """
1125
1161
  try:
1126
- PROFILES_DIR.mkdir(exist_ok=True)
1162
+ config_file = MEMORY_DIR / "profiles.json"
1163
+ if config_file.exists():
1164
+ with open(config_file, 'r') as f:
1165
+ config = json.load(f)
1166
+ else:
1167
+ config = {'profiles': {'default': {'name': 'default', 'description': 'Default memory profile'}}, 'active_profile': 'default'}
1127
1168
 
1169
+ active = config.get('active_profile', 'default')
1128
1170
  profiles = []
1129
- for profile_dir in PROFILES_DIR.iterdir():
1130
- if profile_dir.is_dir():
1131
- db_file = profile_dir / "memory.db"
1132
- if db_file.exists():
1133
- profiles.append({
1134
- "name": profile_dir.name,
1135
- "path": str(profile_dir),
1136
- "size_mb": round(db_file.stat().st_size / (1024 * 1024), 2),
1137
- "modified": datetime.fromtimestamp(db_file.stat().st_mtime).isoformat()
1138
- })
1139
1171
 
1140
- # Determine active profile (default is main)
1141
- active = "default"
1172
+ conn = get_db_connection()
1173
+ cursor = conn.cursor()
1174
+
1175
+ for name, info in config.get('profiles', {}).items():
1176
+ cursor.execute("SELECT COUNT(*) FROM memories WHERE profile = ?", (name,))
1177
+ count = cursor.fetchone()[0]
1178
+ profiles.append({
1179
+ "name": name,
1180
+ "description": info.get('description', ''),
1181
+ "memory_count": count,
1182
+ "created_at": info.get('created_at', ''),
1183
+ "last_used": info.get('last_used', ''),
1184
+ "is_active": name == active
1185
+ })
1186
+
1187
+ conn.close()
1142
1188
 
1143
1189
  return {
1144
1190
  "profiles": profiles,
@@ -1153,7 +1199,7 @@ async def list_profiles():
1153
1199
  @app.post("/api/profiles/{name}/switch")
1154
1200
  async def switch_profile(name: str):
1155
1201
  """
1156
- Switch active memory profile.
1202
+ Switch active memory profile (column-based, instant).
1157
1203
 
1158
1204
  Parameters:
1159
1205
  - name: Profile name to switch to
@@ -1161,7 +1207,8 @@ async def switch_profile(name: str):
1161
1207
  Returns:
1162
1208
  - success: Switch status
1163
1209
  - active_profile: New active profile
1164
- - message: Status message
1210
+ - previous_profile: Previously active profile
1211
+ - memory_count: Memories in new profile
1165
1212
  """
1166
1213
  try:
1167
1214
  if not validate_profile_name(name):
@@ -1170,20 +1217,48 @@ async def switch_profile(name: str):
1170
1217
  detail="Invalid profile name. Use alphanumeric, underscore, or hyphen only."
1171
1218
  )
1172
1219
 
1173
- profile_path = PROFILES_DIR / name / "memory.db"
1220
+ config_file = MEMORY_DIR / "profiles.json"
1221
+ if config_file.exists():
1222
+ with open(config_file, 'r') as f:
1223
+ config = json.load(f)
1224
+ else:
1225
+ config = {'profiles': {'default': {'name': 'default', 'description': 'Default memory profile'}}, 'active_profile': 'default'}
1174
1226
 
1175
- if not profile_path.exists():
1227
+ if name not in config.get('profiles', {}):
1176
1228
  raise HTTPException(
1177
1229
  status_code=404,
1178
- detail=f"Profile '{name}' not found"
1230
+ detail=f"Profile '{name}' not found. Available: {', '.join(config.get('profiles', {}).keys())}"
1179
1231
  )
1180
1232
 
1181
- # Note: Actual profile switching would require modifying DB_PATH
1182
- # This is a placeholder implementation
1233
+ previous = config.get('active_profile', 'default')
1234
+ config['active_profile'] = name
1235
+ config['profiles'][name]['last_used'] = datetime.now().isoformat()
1236
+
1237
+ with open(config_file, 'w') as f:
1238
+ json.dump(config, f, indent=2)
1239
+
1240
+ # Get memory count for new profile
1241
+ conn = get_db_connection()
1242
+ cursor = conn.cursor()
1243
+ cursor.execute("SELECT COUNT(*) FROM memories WHERE profile = ?", (name,))
1244
+ count = cursor.fetchone()[0]
1245
+ conn.close()
1246
+
1247
+ # Broadcast profile switch to WebSocket clients
1248
+ await manager.broadcast({
1249
+ "type": "profile_switched",
1250
+ "profile": name,
1251
+ "previous": previous,
1252
+ "memory_count": count,
1253
+ "timestamp": datetime.now().isoformat()
1254
+ })
1255
+
1183
1256
  return {
1184
1257
  "success": True,
1185
1258
  "active_profile": name,
1186
- "message": f"Profile switched to '{name}'. Restart server to apply changes."
1259
+ "previous_profile": previous,
1260
+ "memory_count": count,
1261
+ "message": f"Switched to profile '{name}' ({count} memories). Changes take effect immediately."
1187
1262
  }
1188
1263
 
1189
1264
  except HTTPException:
@@ -1192,6 +1267,97 @@ async def switch_profile(name: str):
1192
1267
  raise HTTPException(status_code=500, detail=f"Profile switch error: {str(e)}")
1193
1268
 
1194
1269
 
1270
+ @app.post("/api/profiles/create")
1271
+ async def create_profile(body: ProfileSwitch):
1272
+ """
1273
+ Create a new memory profile.
1274
+
1275
+ Parameters:
1276
+ - profile_name: Name for the new profile
1277
+
1278
+ Returns:
1279
+ - success: Creation status
1280
+ - profile: Created profile name
1281
+ """
1282
+ try:
1283
+ name = body.profile_name
1284
+ if not validate_profile_name(name):
1285
+ raise HTTPException(status_code=400, detail="Invalid profile name")
1286
+
1287
+ config_file = MEMORY_DIR / "profiles.json"
1288
+ if config_file.exists():
1289
+ with open(config_file, 'r') as f:
1290
+ config = json.load(f)
1291
+ else:
1292
+ config = {'profiles': {'default': {'name': 'default', 'description': 'Default memory profile'}}, 'active_profile': 'default'}
1293
+
1294
+ if name in config.get('profiles', {}):
1295
+ raise HTTPException(status_code=409, detail=f"Profile '{name}' already exists")
1296
+
1297
+ config['profiles'][name] = {
1298
+ 'name': name,
1299
+ 'description': f'Memory profile: {name}',
1300
+ 'created_at': datetime.now().isoformat(),
1301
+ 'last_used': None
1302
+ }
1303
+
1304
+ with open(config_file, 'w') as f:
1305
+ json.dump(config, f, indent=2)
1306
+
1307
+ return {
1308
+ "success": True,
1309
+ "profile": name,
1310
+ "message": f"Profile '{name}' created"
1311
+ }
1312
+
1313
+ except HTTPException:
1314
+ raise
1315
+ except Exception as e:
1316
+ raise HTTPException(status_code=500, detail=f"Profile create error: {str(e)}")
1317
+
1318
+
1319
+ @app.delete("/api/profiles/{name}")
1320
+ async def delete_profile(name: str):
1321
+ """
1322
+ Delete a profile. Moves its memories to 'default'.
1323
+ """
1324
+ try:
1325
+ if name == 'default':
1326
+ raise HTTPException(status_code=400, detail="Cannot delete 'default' profile")
1327
+
1328
+ config_file = MEMORY_DIR / "profiles.json"
1329
+ with open(config_file, 'r') as f:
1330
+ config = json.load(f)
1331
+
1332
+ if name not in config.get('profiles', {}):
1333
+ raise HTTPException(status_code=404, detail=f"Profile '{name}' not found")
1334
+
1335
+ if config.get('active_profile') == name:
1336
+ raise HTTPException(status_code=400, detail="Cannot delete active profile. Switch first.")
1337
+
1338
+ # Move memories to default
1339
+ conn = get_db_connection()
1340
+ cursor = conn.cursor()
1341
+ cursor.execute("UPDATE memories SET profile = 'default' WHERE profile = ?", (name,))
1342
+ moved = cursor.rowcount
1343
+ conn.commit()
1344
+ conn.close()
1345
+
1346
+ del config['profiles'][name]
1347
+ with open(config_file, 'w') as f:
1348
+ json.dump(config, f, indent=2)
1349
+
1350
+ return {
1351
+ "success": True,
1352
+ "message": f"Profile '{name}' deleted. {moved} memories moved to 'default'."
1353
+ }
1354
+
1355
+ except HTTPException:
1356
+ raise
1357
+ except Exception as e:
1358
+ raise HTTPException(status_code=500, detail=f"Profile delete error: {str(e)}")
1359
+
1360
+
1195
1361
  # ============================================================================
1196
1362
  # API Endpoints - Import/Export
1197
1363
  # ============================================================================
@@ -1366,6 +1532,141 @@ async def import_memories(file: UploadFile = File(...)):
1366
1532
  raise HTTPException(status_code=500, detail=f"Import error: {str(e)}")
1367
1533
 
1368
1534
 
1535
+ # ============================================================================
1536
+ # API Endpoints - Backup Management
1537
+ # ============================================================================
1538
+
1539
+ class BackupConfigRequest(BaseModel):
1540
+ """Backup configuration update request."""
1541
+ interval_hours: Optional[int] = Field(None, ge=1, le=8760)
1542
+ max_backups: Optional[int] = Field(None, ge=1, le=100)
1543
+ enabled: Optional[bool] = None
1544
+
1545
+
1546
+ @app.get("/api/backup/status")
1547
+ async def backup_status():
1548
+ """
1549
+ Get auto-backup system status.
1550
+
1551
+ Returns:
1552
+ - enabled: Whether auto-backup is active
1553
+ - interval_display: Human-readable interval
1554
+ - last_backup: Timestamp of last backup
1555
+ - next_backup: When next backup is due
1556
+ - backup_count: Number of existing backups
1557
+ - total_size_mb: Total backup storage used
1558
+ """
1559
+ try:
1560
+ from auto_backup import AutoBackup
1561
+ backup = AutoBackup()
1562
+ return backup.get_status()
1563
+ except ImportError:
1564
+ raise HTTPException(
1565
+ status_code=501,
1566
+ detail="Auto-backup module not installed. Update SuperLocalMemory to v2.4.0+."
1567
+ )
1568
+ except Exception as e:
1569
+ raise HTTPException(status_code=500, detail=f"Backup status error: {str(e)}")
1570
+
1571
+
1572
+ @app.post("/api/backup/create")
1573
+ async def backup_create():
1574
+ """
1575
+ Create a manual backup of memory.db immediately.
1576
+
1577
+ Returns:
1578
+ - success: Whether backup was created
1579
+ - filename: Name of the backup file
1580
+ - status: Updated backup system status
1581
+ """
1582
+ try:
1583
+ from auto_backup import AutoBackup
1584
+ backup = AutoBackup()
1585
+ filename = backup.create_backup(label='manual')
1586
+
1587
+ if filename:
1588
+ return {
1589
+ "success": True,
1590
+ "filename": filename,
1591
+ "message": f"Backup created: {filename}",
1592
+ "status": backup.get_status()
1593
+ }
1594
+ else:
1595
+ return {
1596
+ "success": False,
1597
+ "message": "Backup failed — database may not exist",
1598
+ "status": backup.get_status()
1599
+ }
1600
+ except ImportError:
1601
+ raise HTTPException(
1602
+ status_code=501,
1603
+ detail="Auto-backup module not installed. Update SuperLocalMemory to v2.4.0+."
1604
+ )
1605
+ except Exception as e:
1606
+ raise HTTPException(status_code=500, detail=f"Backup create error: {str(e)}")
1607
+
1608
+
1609
+ @app.post("/api/backup/configure")
1610
+ async def backup_configure(request: BackupConfigRequest):
1611
+ """
1612
+ Update auto-backup configuration.
1613
+
1614
+ Request body (all optional):
1615
+ - interval_hours: Hours between backups (24=daily, 168=weekly)
1616
+ - max_backups: Maximum backup files to retain
1617
+ - enabled: Enable/disable auto-backup
1618
+
1619
+ Returns:
1620
+ - Updated backup status
1621
+ """
1622
+ try:
1623
+ from auto_backup import AutoBackup
1624
+ backup = AutoBackup()
1625
+ result = backup.configure(
1626
+ interval_hours=request.interval_hours,
1627
+ max_backups=request.max_backups,
1628
+ enabled=request.enabled
1629
+ )
1630
+ return {
1631
+ "success": True,
1632
+ "message": "Backup configuration updated",
1633
+ "status": result
1634
+ }
1635
+ except ImportError:
1636
+ raise HTTPException(
1637
+ status_code=501,
1638
+ detail="Auto-backup module not installed. Update SuperLocalMemory to v2.4.0+."
1639
+ )
1640
+ except Exception as e:
1641
+ raise HTTPException(status_code=500, detail=f"Backup configure error: {str(e)}")
1642
+
1643
+
1644
+ @app.get("/api/backup/list")
1645
+ async def backup_list():
1646
+ """
1647
+ List all available backups.
1648
+
1649
+ Returns:
1650
+ - backups: List of backup files with metadata (filename, size, age, created)
1651
+ - count: Total number of backups
1652
+ """
1653
+ try:
1654
+ from auto_backup import AutoBackup
1655
+ backup = AutoBackup()
1656
+ backups = backup.list_backups()
1657
+ return {
1658
+ "backups": backups,
1659
+ "count": len(backups)
1660
+ }
1661
+ except ImportError:
1662
+ raise HTTPException(
1663
+ status_code=501,
1664
+ detail="Auto-backup module not installed. Update SuperLocalMemory to v2.4.0+."
1665
+ )
1666
+ except Exception as e:
1667
+ raise HTTPException(status_code=500, detail=f"Backup list error: {str(e)}")
1668
+
1669
+
1369
1670
  # ============================================================================
1370
1671
  # WebSocket Endpoint - Real-Time Updates
1371
1672
  # ============================================================================