superlocalmemory 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ATTRIBUTION.md +140 -0
- package/CHANGELOG.md +1749 -0
- package/LICENSE +21 -0
- package/README.md +600 -0
- package/bin/aider-smart +72 -0
- package/bin/slm +202 -0
- package/bin/slm-npm +73 -0
- package/bin/slm.bat +195 -0
- package/bin/slm.cmd +10 -0
- package/bin/superlocalmemoryv2:list +3 -0
- package/bin/superlocalmemoryv2:profile +3 -0
- package/bin/superlocalmemoryv2:recall +3 -0
- package/bin/superlocalmemoryv2:remember +3 -0
- package/bin/superlocalmemoryv2:reset +3 -0
- package/bin/superlocalmemoryv2:status +3 -0
- package/completions/slm.bash +58 -0
- package/completions/slm.zsh +76 -0
- package/configs/antigravity-mcp.json +13 -0
- package/configs/chatgpt-desktop-mcp.json +7 -0
- package/configs/claude-desktop-mcp.json +15 -0
- package/configs/codex-mcp.toml +13 -0
- package/configs/cody-commands.json +29 -0
- package/configs/continue-mcp.yaml +14 -0
- package/configs/continue-skills.yaml +26 -0
- package/configs/cursor-mcp.json +15 -0
- package/configs/gemini-cli-mcp.json +11 -0
- package/configs/jetbrains-mcp.json +11 -0
- package/configs/opencode-mcp.json +12 -0
- package/configs/perplexity-mcp.json +9 -0
- package/configs/vscode-copilot-mcp.json +12 -0
- package/configs/windsurf-mcp.json +16 -0
- package/configs/zed-mcp.json +12 -0
- package/docs/ARCHITECTURE.md +877 -0
- package/docs/CLI-COMMANDS-REFERENCE.md +425 -0
- package/docs/COMPETITIVE-ANALYSIS.md +210 -0
- package/docs/COMPRESSION-README.md +390 -0
- package/docs/GRAPH-ENGINE.md +503 -0
- package/docs/MCP-MANUAL-SETUP.md +720 -0
- package/docs/MCP-TROUBLESHOOTING.md +787 -0
- package/docs/PATTERN-LEARNING.md +363 -0
- package/docs/PROFILES-GUIDE.md +453 -0
- package/docs/RESET-GUIDE.md +353 -0
- package/docs/SEARCH-ENGINE-V2.2.0.md +748 -0
- package/docs/SEARCH-INTEGRATION-GUIDE.md +502 -0
- package/docs/UI-SERVER.md +254 -0
- package/docs/UNIVERSAL-INTEGRATION.md +432 -0
- package/docs/V2.2.0-OPTIONAL-SEARCH.md +666 -0
- package/docs/WINDOWS-INSTALL-README.txt +34 -0
- package/docs/WINDOWS-POST-INSTALL.txt +45 -0
- package/docs/example_graph_usage.py +148 -0
- package/hooks/memory-list-skill.js +130 -0
- package/hooks/memory-profile-skill.js +284 -0
- package/hooks/memory-recall-skill.js +109 -0
- package/hooks/memory-remember-skill.js +127 -0
- package/hooks/memory-reset-skill.js +274 -0
- package/install-skills.sh +436 -0
- package/install.ps1 +417 -0
- package/install.sh +755 -0
- package/mcp_server.py +585 -0
- package/package.json +94 -0
- package/requirements-core.txt +24 -0
- package/requirements.txt +10 -0
- package/scripts/postinstall.js +126 -0
- package/scripts/preuninstall.js +57 -0
- package/skills/slm-build-graph/SKILL.md +423 -0
- package/skills/slm-list-recent/SKILL.md +348 -0
- package/skills/slm-recall/SKILL.md +325 -0
- package/skills/slm-remember/SKILL.md +194 -0
- package/skills/slm-status/SKILL.md +363 -0
- package/skills/slm-switch-profile/SKILL.md +442 -0
- package/src/__pycache__/cache_manager.cpython-312.pyc +0 -0
- package/src/__pycache__/embedding_engine.cpython-312.pyc +0 -0
- package/src/__pycache__/graph_engine.cpython-312.pyc +0 -0
- package/src/__pycache__/hnsw_index.cpython-312.pyc +0 -0
- package/src/__pycache__/hybrid_search.cpython-312.pyc +0 -0
- package/src/__pycache__/memory-profiles.cpython-312.pyc +0 -0
- package/src/__pycache__/memory-reset.cpython-312.pyc +0 -0
- package/src/__pycache__/memory_compression.cpython-312.pyc +0 -0
- package/src/__pycache__/memory_store_v2.cpython-312.pyc +0 -0
- package/src/__pycache__/migrate_v1_to_v2.cpython-312.pyc +0 -0
- package/src/__pycache__/pattern_learner.cpython-312.pyc +0 -0
- package/src/__pycache__/query_optimizer.cpython-312.pyc +0 -0
- package/src/__pycache__/search_engine_v2.cpython-312.pyc +0 -0
- package/src/__pycache__/setup_validator.cpython-312.pyc +0 -0
- package/src/__pycache__/tree_manager.cpython-312.pyc +0 -0
- package/src/cache_manager.py +520 -0
- package/src/embedding_engine.py +671 -0
- package/src/graph_engine.py +970 -0
- package/src/hnsw_index.py +626 -0
- package/src/hybrid_search.py +693 -0
- package/src/memory-profiles.py +518 -0
- package/src/memory-reset.py +485 -0
- package/src/memory_compression.py +999 -0
- package/src/memory_store_v2.py +1088 -0
- package/src/migrate_v1_to_v2.py +638 -0
- package/src/pattern_learner.py +898 -0
- package/src/query_optimizer.py +513 -0
- package/src/search_engine_v2.py +403 -0
- package/src/setup_validator.py +479 -0
- package/src/tree_manager.py +720 -0
|
@@ -0,0 +1,638 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
SuperLocalMemory V2 - Intelligent Local Memory System
|
|
4
|
+
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
+
Licensed under MIT License
|
|
6
|
+
|
|
7
|
+
Repository: https://github.com/varun369/SuperLocalMemoryV2
|
|
8
|
+
Author: Varun Pratap Bhardwaj (Solution Architect)
|
|
9
|
+
|
|
10
|
+
NOTICE: This software is protected by MIT License.
|
|
11
|
+
Attribution must be preserved in all copies or derivatives.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
SuperLocalMemory V1 to V2 Migration Script
|
|
16
|
+
|
|
17
|
+
Safely migrates the memory database from V1 schema to V2 architecture.
|
|
18
|
+
This script is idempotent - safe to re-run if interrupted.
|
|
19
|
+
|
|
20
|
+
Usage:
|
|
21
|
+
python ~/.claude-memory/migrate_v1_to_v2.py
|
|
22
|
+
|
|
23
|
+
Features:
|
|
24
|
+
- Adds new columns to memories table
|
|
25
|
+
- Creates new tables for tree, graph, patterns, and archive
|
|
26
|
+
- Creates all required indexes
|
|
27
|
+
- Migrates existing memories to tree structure
|
|
28
|
+
- Handles rollback on failure
|
|
29
|
+
- Prints progress messages
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
import sqlite3
|
|
33
|
+
import json
|
|
34
|
+
from pathlib import Path
|
|
35
|
+
from datetime import datetime
|
|
36
|
+
import sys
|
|
37
|
+
import traceback
|
|
38
|
+
|
|
39
|
+
DB_PATH = Path.home() / '.claude-memory' / 'memory.db'
|
|
40
|
+
BACKUP_PATH = Path.home() / '.claude-memory' / 'backups' / f'pre-v2-{datetime.now().strftime("%Y%m%d-%H%M%S")}.db'
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def create_backup():
|
|
44
|
+
"""Create a backup of the database before migration."""
|
|
45
|
+
print("=" * 60)
|
|
46
|
+
print("CREATING BACKUP")
|
|
47
|
+
print("=" * 60)
|
|
48
|
+
|
|
49
|
+
BACKUP_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
|
|
51
|
+
if DB_PATH.exists():
|
|
52
|
+
import shutil
|
|
53
|
+
shutil.copy2(DB_PATH, BACKUP_PATH)
|
|
54
|
+
print(f"✓ Backup created: {BACKUP_PATH}")
|
|
55
|
+
print(f" Size: {BACKUP_PATH.stat().st_size / 1024:.1f} KB")
|
|
56
|
+
else:
|
|
57
|
+
print("! Database does not exist yet - no backup needed")
|
|
58
|
+
|
|
59
|
+
print()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def check_schema_version(conn):
|
|
63
|
+
"""Check if migration has already been completed."""
|
|
64
|
+
cursor = conn.cursor()
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
cursor.execute("""
|
|
68
|
+
SELECT value FROM system_metadata WHERE key = 'schema_version'
|
|
69
|
+
""")
|
|
70
|
+
result = cursor.fetchone()
|
|
71
|
+
if result and result[0] == '2.0.0':
|
|
72
|
+
return True
|
|
73
|
+
except sqlite3.OperationalError:
|
|
74
|
+
# Table doesn't exist yet
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def add_new_columns(conn):
|
|
81
|
+
"""Add new columns to the memories table."""
|
|
82
|
+
print("=" * 60)
|
|
83
|
+
print("ADDING NEW COLUMNS TO MEMORIES TABLE")
|
|
84
|
+
print("=" * 60)
|
|
85
|
+
|
|
86
|
+
cursor = conn.cursor()
|
|
87
|
+
|
|
88
|
+
new_columns = [
|
|
89
|
+
('parent_id', 'INTEGER'),
|
|
90
|
+
('tree_path', 'TEXT'),
|
|
91
|
+
('depth', 'INTEGER DEFAULT 0'),
|
|
92
|
+
('category', 'TEXT'),
|
|
93
|
+
('cluster_id', 'INTEGER'),
|
|
94
|
+
('last_accessed', 'TIMESTAMP'),
|
|
95
|
+
('access_count', 'INTEGER DEFAULT 0'),
|
|
96
|
+
('tier', 'INTEGER DEFAULT 1')
|
|
97
|
+
]
|
|
98
|
+
|
|
99
|
+
for col_name, col_type in new_columns:
|
|
100
|
+
try:
|
|
101
|
+
cursor.execute(f'ALTER TABLE memories ADD COLUMN {col_name} {col_type}')
|
|
102
|
+
print(f"✓ Added column: {col_name}")
|
|
103
|
+
except sqlite3.OperationalError as e:
|
|
104
|
+
if 'duplicate column' in str(e).lower():
|
|
105
|
+
print(f"- Column already exists: {col_name}")
|
|
106
|
+
else:
|
|
107
|
+
raise
|
|
108
|
+
|
|
109
|
+
print()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def create_new_tables(conn):
|
|
113
|
+
"""Create all new tables for V2 architecture."""
|
|
114
|
+
print("=" * 60)
|
|
115
|
+
print("CREATING NEW TABLES")
|
|
116
|
+
print("=" * 60)
|
|
117
|
+
|
|
118
|
+
cursor = conn.cursor()
|
|
119
|
+
|
|
120
|
+
tables = {
|
|
121
|
+
'memory_tree': '''
|
|
122
|
+
CREATE TABLE IF NOT EXISTS memory_tree (
|
|
123
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
124
|
+
node_type TEXT NOT NULL,
|
|
125
|
+
name TEXT NOT NULL,
|
|
126
|
+
description TEXT,
|
|
127
|
+
parent_id INTEGER,
|
|
128
|
+
tree_path TEXT NOT NULL,
|
|
129
|
+
depth INTEGER DEFAULT 0,
|
|
130
|
+
memory_count INTEGER DEFAULT 0,
|
|
131
|
+
total_size INTEGER DEFAULT 0,
|
|
132
|
+
last_updated TIMESTAMP,
|
|
133
|
+
memory_id INTEGER,
|
|
134
|
+
FOREIGN KEY (parent_id) REFERENCES memory_tree(id) ON DELETE CASCADE,
|
|
135
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
136
|
+
)
|
|
137
|
+
''',
|
|
138
|
+
|
|
139
|
+
'graph_nodes': '''
|
|
140
|
+
CREATE TABLE IF NOT EXISTS graph_nodes (
|
|
141
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
142
|
+
memory_id INTEGER UNIQUE NOT NULL,
|
|
143
|
+
entities TEXT,
|
|
144
|
+
embedding_vector TEXT,
|
|
145
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
146
|
+
)
|
|
147
|
+
''',
|
|
148
|
+
|
|
149
|
+
'graph_edges': '''
|
|
150
|
+
CREATE TABLE IF NOT EXISTS graph_edges (
|
|
151
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
152
|
+
source_memory_id INTEGER NOT NULL,
|
|
153
|
+
target_memory_id INTEGER NOT NULL,
|
|
154
|
+
relationship_type TEXT,
|
|
155
|
+
weight REAL DEFAULT 1.0,
|
|
156
|
+
shared_entities TEXT,
|
|
157
|
+
similarity_score REAL,
|
|
158
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
159
|
+
FOREIGN KEY (source_memory_id) REFERENCES memories(id) ON DELETE CASCADE,
|
|
160
|
+
FOREIGN KEY (target_memory_id) REFERENCES memories(id) ON DELETE CASCADE,
|
|
161
|
+
UNIQUE(source_memory_id, target_memory_id)
|
|
162
|
+
)
|
|
163
|
+
''',
|
|
164
|
+
|
|
165
|
+
'graph_clusters': '''
|
|
166
|
+
CREATE TABLE IF NOT EXISTS graph_clusters (
|
|
167
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
168
|
+
name TEXT NOT NULL,
|
|
169
|
+
description TEXT,
|
|
170
|
+
member_count INTEGER DEFAULT 0,
|
|
171
|
+
avg_importance REAL,
|
|
172
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
173
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
174
|
+
)
|
|
175
|
+
''',
|
|
176
|
+
|
|
177
|
+
'identity_patterns': '''
|
|
178
|
+
CREATE TABLE IF NOT EXISTS identity_patterns (
|
|
179
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
180
|
+
pattern_type TEXT NOT NULL,
|
|
181
|
+
key TEXT NOT NULL,
|
|
182
|
+
value TEXT NOT NULL,
|
|
183
|
+
confidence REAL DEFAULT 0.5,
|
|
184
|
+
evidence_count INTEGER DEFAULT 1,
|
|
185
|
+
memory_ids TEXT,
|
|
186
|
+
category TEXT,
|
|
187
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
188
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
189
|
+
UNIQUE(pattern_type, key, category)
|
|
190
|
+
)
|
|
191
|
+
''',
|
|
192
|
+
|
|
193
|
+
'pattern_examples': '''
|
|
194
|
+
CREATE TABLE IF NOT EXISTS pattern_examples (
|
|
195
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
196
|
+
pattern_id INTEGER NOT NULL,
|
|
197
|
+
memory_id INTEGER NOT NULL,
|
|
198
|
+
example_text TEXT,
|
|
199
|
+
FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id) ON DELETE CASCADE,
|
|
200
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
201
|
+
)
|
|
202
|
+
''',
|
|
203
|
+
|
|
204
|
+
'memory_archive': '''
|
|
205
|
+
CREATE TABLE IF NOT EXISTS memory_archive (
|
|
206
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
207
|
+
memory_id INTEGER UNIQUE NOT NULL,
|
|
208
|
+
full_content TEXT NOT NULL,
|
|
209
|
+
archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
210
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
211
|
+
)
|
|
212
|
+
''',
|
|
213
|
+
|
|
214
|
+
'system_metadata': '''
|
|
215
|
+
CREATE TABLE IF NOT EXISTS system_metadata (
|
|
216
|
+
key TEXT PRIMARY KEY,
|
|
217
|
+
value TEXT,
|
|
218
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
219
|
+
)
|
|
220
|
+
'''
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
for table_name, create_sql in tables.items():
|
|
224
|
+
try:
|
|
225
|
+
cursor.execute(create_sql)
|
|
226
|
+
print(f"✓ Created table: {table_name}")
|
|
227
|
+
except sqlite3.OperationalError as e:
|
|
228
|
+
if 'already exists' in str(e).lower():
|
|
229
|
+
print(f"- Table already exists: {table_name}")
|
|
230
|
+
else:
|
|
231
|
+
raise
|
|
232
|
+
|
|
233
|
+
print()
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def create_indexes(conn):
|
|
237
|
+
"""Create all indexes for performance optimization."""
|
|
238
|
+
print("=" * 60)
|
|
239
|
+
print("CREATING INDEXES")
|
|
240
|
+
print("=" * 60)
|
|
241
|
+
|
|
242
|
+
cursor = conn.cursor()
|
|
243
|
+
|
|
244
|
+
indexes = [
|
|
245
|
+
('idx_project', 'memories', 'project_path'),
|
|
246
|
+
('idx_tags', 'memories', 'tags'),
|
|
247
|
+
('idx_category', 'memories', 'category'),
|
|
248
|
+
('idx_tree_path', 'memories', 'tree_path'),
|
|
249
|
+
('idx_cluster', 'memories', 'cluster_id'),
|
|
250
|
+
('idx_last_accessed', 'memories', 'last_accessed'),
|
|
251
|
+
('idx_tier', 'memories', 'tier'),
|
|
252
|
+
('idx_tree_path_layer2', 'memory_tree', 'tree_path'),
|
|
253
|
+
('idx_node_type', 'memory_tree', 'node_type'),
|
|
254
|
+
('idx_cluster_members', 'memories', 'cluster_id'),
|
|
255
|
+
('idx_graph_source', 'graph_edges', 'source_memory_id'),
|
|
256
|
+
('idx_graph_target', 'graph_edges', 'target_memory_id'),
|
|
257
|
+
('idx_pattern_type', 'identity_patterns', 'pattern_type'),
|
|
258
|
+
('idx_pattern_confidence', 'identity_patterns', 'confidence'),
|
|
259
|
+
('idx_archive_memory', 'memory_archive', 'memory_id')
|
|
260
|
+
]
|
|
261
|
+
|
|
262
|
+
for idx_name, table, column in indexes:
|
|
263
|
+
try:
|
|
264
|
+
cursor.execute(f'CREATE INDEX IF NOT EXISTS {idx_name} ON {table}({column})')
|
|
265
|
+
print(f"✓ Created index: {idx_name}")
|
|
266
|
+
except sqlite3.OperationalError as e:
|
|
267
|
+
print(f"- Index already exists: {idx_name}")
|
|
268
|
+
|
|
269
|
+
print()
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def migrate_to_tree_structure(conn):
|
|
273
|
+
"""Migrate existing memories to tree structure."""
|
|
274
|
+
print("=" * 60)
|
|
275
|
+
print("MIGRATING MEMORIES TO TREE STRUCTURE")
|
|
276
|
+
print("=" * 60)
|
|
277
|
+
|
|
278
|
+
cursor = conn.cursor()
|
|
279
|
+
|
|
280
|
+
# Check if root node already exists
|
|
281
|
+
cursor.execute("SELECT id FROM memory_tree WHERE node_type = 'root'")
|
|
282
|
+
root = cursor.fetchone()
|
|
283
|
+
|
|
284
|
+
if root:
|
|
285
|
+
root_id = root[0]
|
|
286
|
+
print(f"- Root node already exists (id={root_id})")
|
|
287
|
+
else:
|
|
288
|
+
# Create root node
|
|
289
|
+
cursor.execute('''
|
|
290
|
+
INSERT INTO memory_tree (node_type, name, tree_path, depth, last_updated)
|
|
291
|
+
VALUES ('root', 'All Projects', '1', 0, CURRENT_TIMESTAMP)
|
|
292
|
+
''')
|
|
293
|
+
root_id = cursor.lastrowid
|
|
294
|
+
print(f"✓ Created root node (id={root_id})")
|
|
295
|
+
|
|
296
|
+
# Get all existing memories that haven't been migrated
|
|
297
|
+
cursor.execute('''
|
|
298
|
+
SELECT id, project_path, project_name, content
|
|
299
|
+
FROM memories
|
|
300
|
+
WHERE tree_path IS NULL OR tree_path = ''
|
|
301
|
+
''')
|
|
302
|
+
memories = cursor.fetchall()
|
|
303
|
+
|
|
304
|
+
if not memories:
|
|
305
|
+
print("- No memories to migrate (all already in tree)")
|
|
306
|
+
print()
|
|
307
|
+
return
|
|
308
|
+
|
|
309
|
+
print(f"Found {len(memories)} memories to migrate")
|
|
310
|
+
|
|
311
|
+
project_nodes = {} # project_key -> node_id
|
|
312
|
+
|
|
313
|
+
# Load existing project nodes
|
|
314
|
+
cursor.execute("""
|
|
315
|
+
SELECT id, name FROM memory_tree WHERE node_type = 'project'
|
|
316
|
+
""")
|
|
317
|
+
for node_id, name in cursor.fetchall():
|
|
318
|
+
project_nodes[name] = node_id
|
|
319
|
+
|
|
320
|
+
migrated_count = 0
|
|
321
|
+
|
|
322
|
+
for memory_id, project_path, project_name, content in memories:
|
|
323
|
+
# Determine project key
|
|
324
|
+
if not project_name and not project_path:
|
|
325
|
+
project_key = 'Uncategorized'
|
|
326
|
+
else:
|
|
327
|
+
project_key = project_name or Path(project_path).name if project_path else 'Uncategorized'
|
|
328
|
+
|
|
329
|
+
# Create project node if doesn't exist
|
|
330
|
+
if project_key not in project_nodes:
|
|
331
|
+
cursor.execute('''
|
|
332
|
+
INSERT INTO memory_tree (node_type, name, parent_id, tree_path, depth, last_updated)
|
|
333
|
+
VALUES ('project', ?, ?, ?, 1, CURRENT_TIMESTAMP)
|
|
334
|
+
''', (project_key, root_id, f'1.{len(project_nodes) + 2}'))
|
|
335
|
+
|
|
336
|
+
project_nodes[project_key] = cursor.lastrowid
|
|
337
|
+
print(f" ✓ Created project node: {project_key}")
|
|
338
|
+
|
|
339
|
+
# Link memory to project
|
|
340
|
+
project_node_id = project_nodes[project_key]
|
|
341
|
+
tree_path = f'1.{project_node_id}.{memory_id}'
|
|
342
|
+
|
|
343
|
+
# Create memory node in tree
|
|
344
|
+
cursor.execute('''
|
|
345
|
+
INSERT INTO memory_tree (node_type, name, parent_id, tree_path, depth, memory_id, last_updated)
|
|
346
|
+
VALUES ('memory', ?, ?, ?, 2, ?, CURRENT_TIMESTAMP)
|
|
347
|
+
''', (
|
|
348
|
+
f"Memory #{memory_id}",
|
|
349
|
+
project_node_id,
|
|
350
|
+
tree_path,
|
|
351
|
+
memory_id
|
|
352
|
+
))
|
|
353
|
+
|
|
354
|
+
# Update memory with tree info
|
|
355
|
+
cursor.execute('''
|
|
356
|
+
UPDATE memories
|
|
357
|
+
SET tree_path = ?, depth = 2, last_accessed = created_at
|
|
358
|
+
WHERE id = ?
|
|
359
|
+
''', (tree_path, memory_id))
|
|
360
|
+
|
|
361
|
+
migrated_count += 1
|
|
362
|
+
|
|
363
|
+
# Update project node memory counts
|
|
364
|
+
for project_key, project_node_id in project_nodes.items():
|
|
365
|
+
cursor.execute('''
|
|
366
|
+
SELECT COUNT(*), SUM(LENGTH(content))
|
|
367
|
+
FROM memories m
|
|
368
|
+
JOIN memory_tree mt ON mt.memory_id = m.id
|
|
369
|
+
WHERE mt.parent_id = ?
|
|
370
|
+
''', (project_node_id,))
|
|
371
|
+
|
|
372
|
+
count, total_size = cursor.fetchone()
|
|
373
|
+
total_size = total_size or 0
|
|
374
|
+
|
|
375
|
+
cursor.execute('''
|
|
376
|
+
UPDATE memory_tree
|
|
377
|
+
SET memory_count = ?, total_size = ?, last_updated = CURRENT_TIMESTAMP
|
|
378
|
+
WHERE id = ?
|
|
379
|
+
''', (count, total_size, project_node_id))
|
|
380
|
+
|
|
381
|
+
# Update root node count
|
|
382
|
+
cursor.execute('''
|
|
383
|
+
SELECT COUNT(*) FROM memories
|
|
384
|
+
''')
|
|
385
|
+
total_memories = cursor.fetchone()[0]
|
|
386
|
+
|
|
387
|
+
cursor.execute('''
|
|
388
|
+
UPDATE memory_tree
|
|
389
|
+
SET memory_count = ?, last_updated = CURRENT_TIMESTAMP
|
|
390
|
+
WHERE id = ?
|
|
391
|
+
''', (total_memories, root_id))
|
|
392
|
+
|
|
393
|
+
print(f"✓ Migrated {migrated_count} memories to {len(project_nodes)} projects")
|
|
394
|
+
print()
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def update_metadata(conn):
|
|
398
|
+
"""Update system metadata with migration info."""
|
|
399
|
+
print("=" * 60)
|
|
400
|
+
print("UPDATING SYSTEM METADATA")
|
|
401
|
+
print("=" * 60)
|
|
402
|
+
|
|
403
|
+
cursor = conn.cursor()
|
|
404
|
+
|
|
405
|
+
cursor.execute('''
|
|
406
|
+
INSERT OR REPLACE INTO system_metadata (key, value, updated_at)
|
|
407
|
+
VALUES ('schema_version', '2.0.0', CURRENT_TIMESTAMP)
|
|
408
|
+
''')
|
|
409
|
+
print("✓ Set schema_version = 2.0.0")
|
|
410
|
+
|
|
411
|
+
cursor.execute('''
|
|
412
|
+
INSERT OR REPLACE INTO system_metadata (key, value, updated_at)
|
|
413
|
+
VALUES ('migrated_at', ?, CURRENT_TIMESTAMP)
|
|
414
|
+
''', (datetime.now().isoformat(),))
|
|
415
|
+
print(f"✓ Set migrated_at = {datetime.now().isoformat()}")
|
|
416
|
+
|
|
417
|
+
# Count memories
|
|
418
|
+
cursor.execute('SELECT COUNT(*) FROM memories')
|
|
419
|
+
memory_count = cursor.fetchone()[0]
|
|
420
|
+
|
|
421
|
+
cursor.execute('''
|
|
422
|
+
INSERT OR REPLACE INTO system_metadata (key, value, updated_at)
|
|
423
|
+
VALUES ('memory_count_at_migration', ?, CURRENT_TIMESTAMP)
|
|
424
|
+
''', (str(memory_count),))
|
|
425
|
+
print(f"✓ Recorded memory_count_at_migration = {memory_count}")
|
|
426
|
+
|
|
427
|
+
print()
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def verify_migration(conn):
|
|
431
|
+
"""Verify that migration completed successfully."""
|
|
432
|
+
print("=" * 60)
|
|
433
|
+
print("VERIFYING MIGRATION")
|
|
434
|
+
print("=" * 60)
|
|
435
|
+
|
|
436
|
+
cursor = conn.cursor()
|
|
437
|
+
|
|
438
|
+
checks = []
|
|
439
|
+
|
|
440
|
+
# Check schema version
|
|
441
|
+
cursor.execute("SELECT value FROM system_metadata WHERE key = 'schema_version'")
|
|
442
|
+
version = cursor.fetchone()
|
|
443
|
+
checks.append(("Schema version", version and version[0] == '2.0.0'))
|
|
444
|
+
|
|
445
|
+
# Check memories table has new columns
|
|
446
|
+
cursor.execute("PRAGMA table_info(memories)")
|
|
447
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
448
|
+
required_columns = {'parent_id', 'tree_path', 'depth', 'category', 'cluster_id',
|
|
449
|
+
'last_accessed', 'access_count', 'tier'}
|
|
450
|
+
checks.append(("New columns added", required_columns.issubset(columns)))
|
|
451
|
+
|
|
452
|
+
# Check new tables exist
|
|
453
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
|
454
|
+
tables = {row[0] for row in cursor.fetchall()}
|
|
455
|
+
required_tables = {'memory_tree', 'graph_nodes', 'graph_edges', 'graph_clusters',
|
|
456
|
+
'identity_patterns', 'pattern_examples', 'memory_archive', 'system_metadata'}
|
|
457
|
+
checks.append(("New tables created", required_tables.issubset(tables)))
|
|
458
|
+
|
|
459
|
+
# Check tree structure
|
|
460
|
+
cursor.execute("SELECT COUNT(*) FROM memory_tree WHERE node_type = 'root'")
|
|
461
|
+
root_count = cursor.fetchone()[0]
|
|
462
|
+
checks.append(("Root node exists", root_count == 1))
|
|
463
|
+
|
|
464
|
+
cursor.execute("SELECT COUNT(*) FROM memory_tree WHERE node_type = 'project'")
|
|
465
|
+
project_count = cursor.fetchone()[0]
|
|
466
|
+
checks.append(("Project nodes exist", project_count >= 0))
|
|
467
|
+
|
|
468
|
+
cursor.execute("SELECT COUNT(*) FROM memories WHERE tree_path IS NOT NULL")
|
|
469
|
+
migrated_count = cursor.fetchone()[0]
|
|
470
|
+
cursor.execute("SELECT COUNT(*) FROM memories")
|
|
471
|
+
total_count = cursor.fetchone()[0]
|
|
472
|
+
checks.append(("All memories migrated", migrated_count == total_count))
|
|
473
|
+
|
|
474
|
+
# Print results
|
|
475
|
+
all_passed = True
|
|
476
|
+
for check_name, passed in checks:
|
|
477
|
+
status = "✓" if passed else "✗"
|
|
478
|
+
print(f"{status} {check_name}")
|
|
479
|
+
if not passed:
|
|
480
|
+
all_passed = False
|
|
481
|
+
|
|
482
|
+
print()
|
|
483
|
+
|
|
484
|
+
if not all_passed:
|
|
485
|
+
raise Exception("Migration verification failed! See errors above.")
|
|
486
|
+
|
|
487
|
+
return True
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
def print_summary(conn):
|
|
491
|
+
"""Print migration summary statistics."""
|
|
492
|
+
print("=" * 60)
|
|
493
|
+
print("MIGRATION SUMMARY")
|
|
494
|
+
print("=" * 60)
|
|
495
|
+
|
|
496
|
+
cursor = conn.cursor()
|
|
497
|
+
|
|
498
|
+
cursor.execute("SELECT COUNT(*) FROM memories")
|
|
499
|
+
memory_count = cursor.fetchone()[0]
|
|
500
|
+
|
|
501
|
+
cursor.execute("SELECT COUNT(*) FROM memory_tree WHERE node_type = 'project'")
|
|
502
|
+
project_count = cursor.fetchone()[0]
|
|
503
|
+
|
|
504
|
+
cursor.execute("SELECT COUNT(*) FROM memory_tree WHERE node_type = 'memory'")
|
|
505
|
+
tree_memory_count = cursor.fetchone()[0]
|
|
506
|
+
|
|
507
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
|
|
508
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
509
|
+
|
|
510
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='index' ORDER BY name")
|
|
511
|
+
indexes = [row[0] for row in cursor.fetchall()]
|
|
512
|
+
|
|
513
|
+
print(f"Total memories: {memory_count}")
|
|
514
|
+
print(f"Project nodes: {project_count}")
|
|
515
|
+
print(f"Memory nodes in tree: {tree_memory_count}")
|
|
516
|
+
print(f"Total tables: {len(tables)}")
|
|
517
|
+
print(f"Total indexes: {len(indexes)}")
|
|
518
|
+
print()
|
|
519
|
+
|
|
520
|
+
print("Database tables:")
|
|
521
|
+
for table in tables:
|
|
522
|
+
if not table.startswith('sqlite_') and not table.endswith('_fts'):
|
|
523
|
+
print(f" - {table}")
|
|
524
|
+
|
|
525
|
+
print()
|
|
526
|
+
print("=" * 60)
|
|
527
|
+
print("✅ MIGRATION COMPLETED SUCCESSFULLY!")
|
|
528
|
+
print("=" * 60)
|
|
529
|
+
print()
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def migrate():
|
|
533
|
+
"""Main migration function."""
|
|
534
|
+
print()
|
|
535
|
+
print("╔" + "═" * 58 + "╗")
|
|
536
|
+
print("║" + " " * 58 + "║")
|
|
537
|
+
print("║" + " SuperLocalMemory V1 → V2 Migration".center(58) + "║")
|
|
538
|
+
print("║" + " " * 58 + "║")
|
|
539
|
+
print("╚" + "═" * 58 + "╝")
|
|
540
|
+
print()
|
|
541
|
+
print(f"Database: {DB_PATH}")
|
|
542
|
+
print(f"Started: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
|
543
|
+
print()
|
|
544
|
+
|
|
545
|
+
if not DB_PATH.exists():
|
|
546
|
+
print(f"ERROR: Database not found at {DB_PATH}")
|
|
547
|
+
print("Please ensure SuperLocalMemory V1 is installed.")
|
|
548
|
+
sys.exit(1)
|
|
549
|
+
|
|
550
|
+
try:
|
|
551
|
+
# Create backup first
|
|
552
|
+
create_backup()
|
|
553
|
+
|
|
554
|
+
# Open database connection
|
|
555
|
+
conn = sqlite3.connect(DB_PATH)
|
|
556
|
+
conn.execute("PRAGMA foreign_keys = ON")
|
|
557
|
+
|
|
558
|
+
# Check if already migrated
|
|
559
|
+
if check_schema_version(conn):
|
|
560
|
+
print("=" * 60)
|
|
561
|
+
print("ALREADY MIGRATED")
|
|
562
|
+
print("=" * 60)
|
|
563
|
+
print("Database is already at version 2.0.0")
|
|
564
|
+
print("Migration is idempotent - re-running will update any missing components.")
|
|
565
|
+
print()
|
|
566
|
+
|
|
567
|
+
# Start transaction
|
|
568
|
+
conn.execute("BEGIN")
|
|
569
|
+
|
|
570
|
+
try:
|
|
571
|
+
# Run migration steps
|
|
572
|
+
add_new_columns(conn)
|
|
573
|
+
create_new_tables(conn)
|
|
574
|
+
create_indexes(conn)
|
|
575
|
+
migrate_to_tree_structure(conn)
|
|
576
|
+
update_metadata(conn)
|
|
577
|
+
|
|
578
|
+
# Verify migration
|
|
579
|
+
verify_migration(conn)
|
|
580
|
+
|
|
581
|
+
# Commit transaction
|
|
582
|
+
conn.commit()
|
|
583
|
+
|
|
584
|
+
# Print summary
|
|
585
|
+
print_summary(conn)
|
|
586
|
+
|
|
587
|
+
except Exception as e:
|
|
588
|
+
# Rollback on error
|
|
589
|
+
conn.rollback()
|
|
590
|
+
raise
|
|
591
|
+
|
|
592
|
+
finally:
|
|
593
|
+
conn.close()
|
|
594
|
+
|
|
595
|
+
# Print next steps
|
|
596
|
+
print("NEXT STEPS:")
|
|
597
|
+
print("-" * 60)
|
|
598
|
+
print("1. Build knowledge graph:")
|
|
599
|
+
print(" python ~/.claude-memory/graph_engine.py --build")
|
|
600
|
+
print()
|
|
601
|
+
print("2. Learn identity patterns:")
|
|
602
|
+
print(" python ~/.claude-memory/pattern_learner.py --analyze")
|
|
603
|
+
print()
|
|
604
|
+
print("3. Test CLI commands:")
|
|
605
|
+
print(" /recall 'test query'")
|
|
606
|
+
print()
|
|
607
|
+
print("4. Start web UI (optional):")
|
|
608
|
+
print(" cd ~/.claude-memory/ui && uvicorn server:app --port 5432")
|
|
609
|
+
print()
|
|
610
|
+
print("-" * 60)
|
|
611
|
+
print()
|
|
612
|
+
print(f"Backup saved to: {BACKUP_PATH}")
|
|
613
|
+
print()
|
|
614
|
+
|
|
615
|
+
except Exception as e:
|
|
616
|
+
print()
|
|
617
|
+
print("=" * 60)
|
|
618
|
+
print("❌ MIGRATION FAILED")
|
|
619
|
+
print("=" * 60)
|
|
620
|
+
print(f"Error: {str(e)}")
|
|
621
|
+
print()
|
|
622
|
+
print("Stack trace:")
|
|
623
|
+
traceback.print_exc()
|
|
624
|
+
print()
|
|
625
|
+
print("=" * 60)
|
|
626
|
+
print("ROLLBACK INSTRUCTIONS")
|
|
627
|
+
print("=" * 60)
|
|
628
|
+
print("The database has been rolled back to its previous state.")
|
|
629
|
+
print(f"A backup was created at: {BACKUP_PATH}")
|
|
630
|
+
print()
|
|
631
|
+
print("To restore from backup manually:")
|
|
632
|
+
print(f" cp {BACKUP_PATH} {DB_PATH}")
|
|
633
|
+
print()
|
|
634
|
+
sys.exit(1)
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
if __name__ == '__main__':
|
|
638
|
+
migrate()
|