superlocalmemory 2.8.2 → 2.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +7 -5
  2. package/api_server.py +5 -0
  3. package/bin/slm.bat +3 -3
  4. package/docs/SECURITY-QUICK-REFERENCE.md +214 -0
  5. package/install.ps1 +11 -11
  6. package/mcp_server.py +3 -3
  7. package/package.json +2 -2
  8. package/requirements-core.txt +16 -18
  9. package/requirements-learning.txt +8 -8
  10. package/requirements.txt +9 -7
  11. package/scripts/prepack.js +33 -0
  12. package/scripts/verify-v27.ps1 +301 -0
  13. package/src/agent_registry.py +32 -28
  14. package/src/auto_backup.py +12 -6
  15. package/src/cache_manager.py +2 -2
  16. package/src/compression/__init__.py +25 -0
  17. package/src/compression/cli.py +150 -0
  18. package/src/compression/cold_storage.py +217 -0
  19. package/src/compression/config.py +72 -0
  20. package/src/compression/orchestrator.py +133 -0
  21. package/src/compression/tier2_compressor.py +228 -0
  22. package/src/compression/tier3_compressor.py +153 -0
  23. package/src/compression/tier_classifier.py +148 -0
  24. package/src/db_connection_manager.py +5 -5
  25. package/src/event_bus.py +24 -22
  26. package/src/hnsw_index.py +3 -3
  27. package/src/learning/__init__.py +5 -4
  28. package/src/learning/adaptive_ranker.py +14 -265
  29. package/src/learning/bootstrap/__init__.py +69 -0
  30. package/src/learning/bootstrap/constants.py +93 -0
  31. package/src/learning/bootstrap/db_queries.py +316 -0
  32. package/src/learning/bootstrap/sampling.py +82 -0
  33. package/src/learning/bootstrap/text_utils.py +71 -0
  34. package/src/learning/cross_project_aggregator.py +58 -57
  35. package/src/learning/db/__init__.py +40 -0
  36. package/src/learning/db/constants.py +44 -0
  37. package/src/learning/db/schema.py +279 -0
  38. package/src/learning/learning_db.py +15 -234
  39. package/src/learning/ranking/__init__.py +33 -0
  40. package/src/learning/ranking/constants.py +84 -0
  41. package/src/learning/ranking/helpers.py +278 -0
  42. package/src/learning/source_quality_scorer.py +66 -65
  43. package/src/learning/synthetic_bootstrap.py +28 -310
  44. package/src/memory/__init__.py +36 -0
  45. package/src/memory/cli.py +205 -0
  46. package/src/memory/constants.py +39 -0
  47. package/src/memory/helpers.py +28 -0
  48. package/src/memory/schema.py +166 -0
  49. package/src/memory-profiles.py +94 -86
  50. package/src/memory-reset.py +187 -185
  51. package/src/memory_compression.py +2 -2
  52. package/src/memory_store_v2.py +34 -354
  53. package/src/migrate_v1_to_v2.py +11 -10
  54. package/src/patterns/analyzers.py +104 -100
  55. package/src/patterns/learner.py +17 -13
  56. package/src/patterns/scoring.py +25 -21
  57. package/src/patterns/store.py +40 -38
  58. package/src/patterns/terminology.py +53 -51
  59. package/src/provenance_tracker.py +2 -2
  60. package/src/qualixar_attribution.py +1 -1
  61. package/src/search/engine.py +16 -14
  62. package/src/search/index_loader.py +13 -11
  63. package/src/setup_validator.py +160 -158
  64. package/src/subscription_manager.py +20 -18
  65. package/src/tree/builder.py +66 -64
  66. package/src/tree/nodes.py +103 -97
  67. package/src/tree/queries.py +142 -137
  68. package/src/tree/schema.py +46 -42
  69. package/src/webhook_dispatcher.py +3 -3
  70. package/ui_server.py +7 -4
@@ -144,7 +144,7 @@ def check_database() -> Tuple[bool, str, List[str]]:
144
144
  try:
145
145
  cursor.execute("SELECT COUNT(*) FROM memories")
146
146
  memory_count = cursor.fetchone()[0]
147
- except:
147
+ except Exception:
148
148
  memory_count = 0
149
149
 
150
150
  conn.close()
@@ -192,163 +192,165 @@ def initialize_database() -> Tuple[bool, str]:
192
192
  MEMORY_DIR.mkdir(parents=True, exist_ok=True)
193
193
 
194
194
  conn = sqlite3.connect(DB_PATH)
195
- cursor = conn.cursor()
196
-
197
- # Create memories table (core)
198
- cursor.execute('''
199
- CREATE TABLE IF NOT EXISTS memories (
200
- id INTEGER PRIMARY KEY AUTOINCREMENT,
201
- content TEXT NOT NULL,
202
- summary TEXT,
203
- project_path TEXT,
204
- project_name TEXT,
205
- tags TEXT DEFAULT '[]',
206
- category TEXT,
207
- parent_id INTEGER,
208
- tree_path TEXT DEFAULT '/',
209
- depth INTEGER DEFAULT 0,
210
- memory_type TEXT DEFAULT 'session',
211
- importance INTEGER DEFAULT 5,
212
- content_hash TEXT,
213
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
214
- last_accessed TIMESTAMP,
215
- access_count INTEGER DEFAULT 0,
216
- compressed_at TIMESTAMP,
217
- tier INTEGER DEFAULT 1,
218
- cluster_id INTEGER,
219
- FOREIGN KEY (parent_id) REFERENCES memories(id)
220
- )
221
- ''')
222
-
223
- # Create graph tables
224
- cursor.execute('''
225
- CREATE TABLE IF NOT EXISTS graph_nodes (
226
- id INTEGER PRIMARY KEY AUTOINCREMENT,
227
- memory_id INTEGER UNIQUE NOT NULL,
228
- entities TEXT DEFAULT '[]',
229
- embedding_vector BLOB,
230
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
231
- FOREIGN KEY (memory_id) REFERENCES memories(id)
232
- )
233
- ''')
234
-
235
- cursor.execute('''
236
- CREATE TABLE IF NOT EXISTS graph_edges (
237
- id INTEGER PRIMARY KEY AUTOINCREMENT,
238
- source_memory_id INTEGER NOT NULL,
239
- target_memory_id INTEGER NOT NULL,
240
- similarity REAL NOT NULL,
241
- relationship_type TEXT,
242
- shared_entities TEXT DEFAULT '[]',
243
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
244
- FOREIGN KEY (source_memory_id) REFERENCES memories(id),
245
- FOREIGN KEY (target_memory_id) REFERENCES memories(id),
246
- UNIQUE(source_memory_id, target_memory_id)
247
- )
248
- ''')
249
-
250
- cursor.execute('''
251
- CREATE TABLE IF NOT EXISTS graph_clusters (
252
- id INTEGER PRIMARY KEY AUTOINCREMENT,
253
- cluster_name TEXT,
254
- name TEXT,
255
- description TEXT,
256
- summary TEXT,
257
- memory_count INTEGER DEFAULT 0,
258
- member_count INTEGER DEFAULT 0,
259
- avg_importance REAL DEFAULT 5.0,
260
- top_entities TEXT DEFAULT '[]',
261
- parent_cluster_id INTEGER,
262
- depth INTEGER DEFAULT 0,
263
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
264
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
265
- FOREIGN KEY (parent_cluster_id) REFERENCES graph_clusters(id) ON DELETE SET NULL
266
- )
267
- ''')
268
-
269
- # Create pattern learning tables
270
- cursor.execute('''
271
- CREATE TABLE IF NOT EXISTS identity_patterns (
272
- id INTEGER PRIMARY KEY AUTOINCREMENT,
273
- pattern_type TEXT NOT NULL,
274
- pattern_key TEXT NOT NULL,
275
- pattern_value TEXT,
276
- confidence REAL DEFAULT 0.0,
277
- frequency INTEGER DEFAULT 1,
278
- last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
279
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
280
- UNIQUE(pattern_type, pattern_key)
281
- )
282
- ''')
283
-
284
- cursor.execute('''
285
- CREATE TABLE IF NOT EXISTS pattern_examples (
286
- id INTEGER PRIMARY KEY AUTOINCREMENT,
287
- pattern_id INTEGER NOT NULL,
288
- memory_id INTEGER NOT NULL,
289
- context TEXT,
290
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
291
- FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id),
292
- FOREIGN KEY (memory_id) REFERENCES memories(id)
293
- )
294
- ''')
295
-
296
- # Create tree table
297
- cursor.execute('''
298
- CREATE TABLE IF NOT EXISTS memory_tree (
299
- id INTEGER PRIMARY KEY AUTOINCREMENT,
300
- node_type TEXT NOT NULL,
301
- name TEXT NOT NULL,
302
- parent_id INTEGER,
303
- tree_path TEXT DEFAULT '/',
304
- depth INTEGER DEFAULT 0,
305
- memory_count INTEGER DEFAULT 0,
306
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
307
- FOREIGN KEY (parent_id) REFERENCES memory_tree(id)
308
- )
309
- ''')
310
-
311
- # Create archive table
312
- cursor.execute('''
313
- CREATE TABLE IF NOT EXISTS memory_archive (
314
- id INTEGER PRIMARY KEY AUTOINCREMENT,
315
- original_memory_id INTEGER,
316
- compressed_content TEXT NOT NULL,
317
- compression_type TEXT DEFAULT 'tier2',
318
- original_size INTEGER,
319
- compressed_size INTEGER,
320
- archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
321
- )
322
- ''')
323
-
324
- # Create system metadata table for watermarking
325
- cursor.execute('''
326
- CREATE TABLE IF NOT EXISTS system_metadata (
327
- key TEXT PRIMARY KEY,
328
- value TEXT NOT NULL
329
- )
330
- ''')
331
-
332
- # Add system watermark
333
- cursor.execute('''
334
- INSERT OR REPLACE INTO system_metadata (key, value) VALUES
335
- ('product', 'SuperLocalMemory'),
336
- ('website', 'https://superlocalmemory.com'),
337
- ('repository', 'https://github.com/varun369/SuperLocalMemoryV2'),
338
- ('license', 'MIT'),
339
- ('schema_version', '2.0.0')
340
- ''')
341
-
342
- # Create indexes for performance
343
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_name)')
344
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category)')
345
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_cluster ON memories(cluster_id)')
346
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)')
347
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source_memory_id)')
348
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target_memory_id)')
349
-
350
- conn.commit()
351
- conn.close()
195
+ try:
196
+ cursor = conn.cursor()
197
+
198
+ # Create memories table (core)
199
+ cursor.execute('''
200
+ CREATE TABLE IF NOT EXISTS memories (
201
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
202
+ content TEXT NOT NULL,
203
+ summary TEXT,
204
+ project_path TEXT,
205
+ project_name TEXT,
206
+ tags TEXT DEFAULT '[]',
207
+ category TEXT,
208
+ parent_id INTEGER,
209
+ tree_path TEXT DEFAULT '/',
210
+ depth INTEGER DEFAULT 0,
211
+ memory_type TEXT DEFAULT 'session',
212
+ importance INTEGER DEFAULT 5,
213
+ content_hash TEXT,
214
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
215
+ last_accessed TIMESTAMP,
216
+ access_count INTEGER DEFAULT 0,
217
+ compressed_at TIMESTAMP,
218
+ tier INTEGER DEFAULT 1,
219
+ cluster_id INTEGER,
220
+ FOREIGN KEY (parent_id) REFERENCES memories(id)
221
+ )
222
+ ''')
223
+
224
+ # Create graph tables
225
+ cursor.execute('''
226
+ CREATE TABLE IF NOT EXISTS graph_nodes (
227
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
228
+ memory_id INTEGER UNIQUE NOT NULL,
229
+ entities TEXT DEFAULT '[]',
230
+ embedding_vector BLOB,
231
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
232
+ FOREIGN KEY (memory_id) REFERENCES memories(id)
233
+ )
234
+ ''')
235
+
236
+ cursor.execute('''
237
+ CREATE TABLE IF NOT EXISTS graph_edges (
238
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
239
+ source_memory_id INTEGER NOT NULL,
240
+ target_memory_id INTEGER NOT NULL,
241
+ similarity REAL NOT NULL,
242
+ relationship_type TEXT,
243
+ shared_entities TEXT DEFAULT '[]',
244
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
245
+ FOREIGN KEY (source_memory_id) REFERENCES memories(id),
246
+ FOREIGN KEY (target_memory_id) REFERENCES memories(id),
247
+ UNIQUE(source_memory_id, target_memory_id)
248
+ )
249
+ ''')
250
+
251
+ cursor.execute('''
252
+ CREATE TABLE IF NOT EXISTS graph_clusters (
253
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
254
+ cluster_name TEXT,
255
+ name TEXT,
256
+ description TEXT,
257
+ summary TEXT,
258
+ memory_count INTEGER DEFAULT 0,
259
+ member_count INTEGER DEFAULT 0,
260
+ avg_importance REAL DEFAULT 5.0,
261
+ top_entities TEXT DEFAULT '[]',
262
+ parent_cluster_id INTEGER,
263
+ depth INTEGER DEFAULT 0,
264
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
265
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
266
+ FOREIGN KEY (parent_cluster_id) REFERENCES graph_clusters(id) ON DELETE SET NULL
267
+ )
268
+ ''')
269
+
270
+ # Create pattern learning tables
271
+ cursor.execute('''
272
+ CREATE TABLE IF NOT EXISTS identity_patterns (
273
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
274
+ pattern_type TEXT NOT NULL,
275
+ pattern_key TEXT NOT NULL,
276
+ pattern_value TEXT,
277
+ confidence REAL DEFAULT 0.0,
278
+ frequency INTEGER DEFAULT 1,
279
+ last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
280
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
281
+ UNIQUE(pattern_type, pattern_key)
282
+ )
283
+ ''')
284
+
285
+ cursor.execute('''
286
+ CREATE TABLE IF NOT EXISTS pattern_examples (
287
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
288
+ pattern_id INTEGER NOT NULL,
289
+ memory_id INTEGER NOT NULL,
290
+ context TEXT,
291
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
292
+ FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id),
293
+ FOREIGN KEY (memory_id) REFERENCES memories(id)
294
+ )
295
+ ''')
296
+
297
+ # Create tree table
298
+ cursor.execute('''
299
+ CREATE TABLE IF NOT EXISTS memory_tree (
300
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
301
+ node_type TEXT NOT NULL,
302
+ name TEXT NOT NULL,
303
+ parent_id INTEGER,
304
+ tree_path TEXT DEFAULT '/',
305
+ depth INTEGER DEFAULT 0,
306
+ memory_count INTEGER DEFAULT 0,
307
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
308
+ FOREIGN KEY (parent_id) REFERENCES memory_tree(id)
309
+ )
310
+ ''')
311
+
312
+ # Create archive table
313
+ cursor.execute('''
314
+ CREATE TABLE IF NOT EXISTS memory_archive (
315
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
316
+ original_memory_id INTEGER,
317
+ compressed_content TEXT NOT NULL,
318
+ compression_type TEXT DEFAULT 'tier2',
319
+ original_size INTEGER,
320
+ compressed_size INTEGER,
321
+ archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
322
+ )
323
+ ''')
324
+
325
+ # Create system metadata table for watermarking
326
+ cursor.execute('''
327
+ CREATE TABLE IF NOT EXISTS system_metadata (
328
+ key TEXT PRIMARY KEY,
329
+ value TEXT NOT NULL
330
+ )
331
+ ''')
332
+
333
+ # Add system watermark
334
+ cursor.execute('''
335
+ INSERT OR REPLACE INTO system_metadata (key, value) VALUES
336
+ ('product', 'SuperLocalMemory'),
337
+ ('website', 'https://superlocalmemory.com'),
338
+ ('repository', 'https://github.com/varun369/SuperLocalMemoryV2'),
339
+ ('license', 'MIT'),
340
+ ('schema_version', '2.0.0')
341
+ ''')
342
+
343
+ # Create indexes for performance
344
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_name)')
345
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category)')
346
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_cluster ON memories(cluster_id)')
347
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)')
348
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source_memory_id)')
349
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target_memory_id)')
350
+
351
+ conn.commit()
352
+ finally:
353
+ conn.close()
352
354
 
353
355
  return True, "Database initialized successfully"
354
356
 
@@ -53,7 +53,7 @@ class SubscriptionManager:
53
53
  return cls._instances[key]
54
54
 
55
55
  @classmethod
56
- def reset_instance(cls, db_path: Optional[Path] = None):
56
+ def reset_instance(cls, db_path: Optional[Path] = None) -> None:
57
57
  """Remove singleton. Used for testing."""
58
58
  with cls._instances_lock:
59
59
  if db_path is None:
@@ -103,22 +103,24 @@ class SubscriptionManager:
103
103
  except ImportError:
104
104
  import sqlite3
105
105
  conn = sqlite3.connect(str(self.db_path))
106
- conn.execute('''
107
- CREATE TABLE IF NOT EXISTS subscriptions (
108
- id INTEGER PRIMARY KEY AUTOINCREMENT,
109
- subscriber_id TEXT NOT NULL UNIQUE,
110
- channel TEXT NOT NULL,
111
- filter TEXT NOT NULL DEFAULT '{}',
112
- webhook_url TEXT,
113
- durable INTEGER DEFAULT 1,
114
- last_event_id INTEGER DEFAULT 0,
115
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
116
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
117
- )
118
- ''')
119
- conn.execute('CREATE INDEX IF NOT EXISTS idx_subs_channel ON subscriptions(channel)')
120
- conn.commit()
121
- conn.close()
106
+ try:
107
+ conn.execute('''
108
+ CREATE TABLE IF NOT EXISTS subscriptions (
109
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
110
+ subscriber_id TEXT NOT NULL UNIQUE,
111
+ channel TEXT NOT NULL,
112
+ filter TEXT NOT NULL DEFAULT '{}',
113
+ webhook_url TEXT,
114
+ durable INTEGER DEFAULT 1,
115
+ last_event_id INTEGER DEFAULT 0,
116
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
117
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
118
+ )
119
+ ''')
120
+ conn.execute('CREATE INDEX IF NOT EXISTS idx_subs_channel ON subscriptions(channel)')
121
+ conn.commit()
122
+ finally:
123
+ conn.close()
122
124
 
123
125
  # =========================================================================
124
126
  # Subscribe / Unsubscribe
@@ -244,7 +246,7 @@ class SubscriptionManager:
244
246
 
245
247
  return removed
246
248
 
247
- def update_last_event_id(self, subscriber_id: str, event_id: int):
249
+ def update_last_event_id(self, subscriber_id: str, event_id: int) -> None:
248
250
  """Update the last event ID received by a durable subscriber (for replay)."""
249
251
  try:
250
252
  from db_connection_manager import DbConnectionManager
@@ -22,70 +22,72 @@ class TreeBuilderMixin:
22
22
  5. Update aggregated counts
23
23
  """
24
24
  conn = sqlite3.connect(self.db_path)
25
- cursor = conn.cursor()
26
-
27
- # Clear existing tree (keep root)
28
- cursor.execute('DELETE FROM memory_tree WHERE node_type != ?', ('root',))
29
-
30
- # Step 1: Create project nodes
31
- cursor.execute('''
32
- SELECT DISTINCT project_path, project_name
33
- FROM memories
34
- WHERE project_path IS NOT NULL
35
- ORDER BY project_path
36
- ''')
37
- projects = cursor.fetchall()
38
-
39
- project_map = {} # project_path -> node_id
40
-
41
- for project_path, project_name in projects:
42
- name = project_name or project_path.split('/')[-1]
43
- node_id = self.add_node('project', name, self.root_id, description=project_path)
44
- project_map[project_path] = node_id
45
-
46
- # Step 2: Create category nodes within projects
47
- cursor.execute('''
48
- SELECT DISTINCT project_path, category
49
- FROM memories
50
- WHERE project_path IS NOT NULL AND category IS NOT NULL
51
- ORDER BY project_path, category
52
- ''')
53
- categories = cursor.fetchall()
54
-
55
- category_map = {} # (project_path, category) -> node_id
56
-
57
- for project_path, category in categories:
58
- parent_id = project_map.get(project_path)
59
- if parent_id:
60
- node_id = self.add_node('category', category, parent_id)
61
- category_map[(project_path, category)] = node_id
62
-
63
- # Step 3: Link memories as leaf nodes
64
- cursor.execute('''
65
- SELECT id, content, summary, project_path, category, importance, created_at
66
- FROM memories
67
- ORDER BY created_at DESC
68
- ''')
69
- memories = cursor.fetchall()
70
-
71
- for mem_id, content, summary, project_path, category, importance, created_at in memories:
72
- # Determine parent node
73
- if project_path and category and (project_path, category) in category_map:
74
- parent_id = category_map[(project_path, category)]
75
- elif project_path and project_path in project_map:
76
- parent_id = project_map[project_path]
77
- else:
78
- parent_id = self.root_id
79
-
80
- # Create memory node
81
- name = summary or content[:60].replace('\n', ' ')
82
- self.add_node('memory', name, parent_id, memory_id=mem_id, description=content[:200])
83
-
84
- # Step 4: Update aggregated counts
85
- self._update_all_counts()
86
-
87
- conn.commit()
88
- conn.close()
25
+ try:
26
+ cursor = conn.cursor()
27
+
28
+ # Clear existing tree (keep root)
29
+ cursor.execute('DELETE FROM memory_tree WHERE node_type != ?', ('root',))
30
+
31
+ # Step 1: Create project nodes
32
+ cursor.execute('''
33
+ SELECT DISTINCT project_path, project_name
34
+ FROM memories
35
+ WHERE project_path IS NOT NULL
36
+ ORDER BY project_path
37
+ ''')
38
+ projects = cursor.fetchall()
39
+
40
+ project_map = {} # project_path -> node_id
41
+
42
+ for project_path, project_name in projects:
43
+ name = project_name or project_path.split('/')[-1]
44
+ node_id = self.add_node('project', name, self.root_id, description=project_path)
45
+ project_map[project_path] = node_id
46
+
47
+ # Step 2: Create category nodes within projects
48
+ cursor.execute('''
49
+ SELECT DISTINCT project_path, category
50
+ FROM memories
51
+ WHERE project_path IS NOT NULL AND category IS NOT NULL
52
+ ORDER BY project_path, category
53
+ ''')
54
+ categories = cursor.fetchall()
55
+
56
+ category_map = {} # (project_path, category) -> node_id
57
+
58
+ for project_path, category in categories:
59
+ parent_id = project_map.get(project_path)
60
+ if parent_id:
61
+ node_id = self.add_node('category', category, parent_id)
62
+ category_map[(project_path, category)] = node_id
63
+
64
+ # Step 3: Link memories as leaf nodes
65
+ cursor.execute('''
66
+ SELECT id, content, summary, project_path, category, importance, created_at
67
+ FROM memories
68
+ ORDER BY created_at DESC
69
+ ''')
70
+ memories = cursor.fetchall()
71
+
72
+ for mem_id, content, summary, project_path, category, importance, created_at in memories:
73
+ # Determine parent node
74
+ if project_path and category and (project_path, category) in category_map:
75
+ parent_id = category_map[(project_path, category)]
76
+ elif project_path and project_path in project_map:
77
+ parent_id = project_map[project_path]
78
+ else:
79
+ parent_id = self.root_id
80
+
81
+ # Create memory node
82
+ name = summary or content[:60].replace('\n', ' ')
83
+ self.add_node('memory', name, parent_id, memory_id=mem_id, description=content[:200])
84
+
85
+ # Step 4: Update aggregated counts
86
+ self._update_all_counts()
87
+
88
+ conn.commit()
89
+ finally:
90
+ conn.close()
89
91
 
90
92
 
91
93
  def run_cli():