claude-memory-agent 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +107 -0
- package/README.md +200 -0
- package/agent_card.py +512 -0
- package/bin/cli.js +181 -0
- package/bin/postinstall.js +216 -0
- package/config.py +104 -0
- package/dashboard.html +2689 -0
- package/hooks/README.md +196 -0
- package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
- package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
- package/hooks/auto-detect-response.py +348 -0
- package/hooks/auto_capture.py +255 -0
- package/hooks/detect-correction.py +173 -0
- package/hooks/grounding-hook.py +348 -0
- package/hooks/log-tool-use.py +234 -0
- package/hooks/log-user-request.py +208 -0
- package/hooks/pre-tool-decision.py +218 -0
- package/hooks/problem-detector.py +343 -0
- package/hooks/session_end.py +192 -0
- package/hooks/session_start.py +227 -0
- package/install.py +887 -0
- package/main.py +2859 -0
- package/manager.py +997 -0
- package/package.json +55 -0
- package/requirements.txt +8 -0
- package/run_server.py +136 -0
- package/services/__init__.py +50 -0
- package/services/__pycache__/__init__.cpython-312.pyc +0 -0
- package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
- package/services/__pycache__/auth.cpython-312.pyc +0 -0
- package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
- package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
- package/services/__pycache__/confidence.cpython-312.pyc +0 -0
- package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
- package/services/__pycache__/database.cpython-312.pyc +0 -0
- package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
- package/services/__pycache__/insights.cpython-312.pyc +0 -0
- package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
- package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
- package/services/__pycache__/timeline.cpython-312.pyc +0 -0
- package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
- package/services/__pycache__/websocket.cpython-312.pyc +0 -0
- package/services/agent_registry.py +753 -0
- package/services/auth.py +331 -0
- package/services/auto_inject.py +250 -0
- package/services/claude_md_sync.py +275 -0
- package/services/cleanup.py +667 -0
- package/services/compaction_flush.py +447 -0
- package/services/confidence.py +301 -0
- package/services/daily_log.py +333 -0
- package/services/database.py +2485 -0
- package/services/embeddings.py +358 -0
- package/services/insights.py +632 -0
- package/services/llm_analyzer.py +595 -0
- package/services/memory_md_sync.py +409 -0
- package/services/retry_queue.py +453 -0
- package/services/timeline.py +579 -0
- package/services/vector_index.py +398 -0
- package/services/websocket.py +257 -0
- package/skills/__init__.py +6 -0
- package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
- package/skills/__pycache__/admin.cpython-312.pyc +0 -0
- package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
- package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
- package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
- package/skills/__pycache__/insights.cpython-312.pyc +0 -0
- package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
- package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
- package/skills/__pycache__/search.cpython-312.pyc +0 -0
- package/skills/__pycache__/state.cpython-312.pyc +0 -0
- package/skills/__pycache__/store.cpython-312.pyc +0 -0
- package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
- package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
- package/skills/__pycache__/verification.cpython-312.pyc +0 -0
- package/skills/admin.py +469 -0
- package/skills/checkpoint.py +198 -0
- package/skills/claude_md.py +363 -0
- package/skills/cleanup.py +241 -0
- package/skills/grounding.py +801 -0
- package/skills/insights.py +231 -0
- package/skills/natural_language.py +277 -0
- package/skills/retrieve.py +67 -0
- package/skills/search.py +213 -0
- package/skills/state.py +182 -0
- package/skills/store.py +179 -0
- package/skills/summarize.py +588 -0
- package/skills/timeline.py +387 -0
- package/skills/verification.py +391 -0
- package/start_daemon.py +155 -0
- package/test_automation.py +221 -0
- package/test_complete.py +338 -0
- package/test_full.py +322 -0
- package/update_system.py +817 -0
- package/verify_db.py +134 -0
package/update_system.py
ADDED
|
@@ -0,0 +1,817 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Claude Memory System - Update & Migration Script
|
|
4
|
+
================================================
|
|
5
|
+
This script detects the current installation state and performs
|
|
6
|
+
all necessary migrations to bring it to the latest version.
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
python update_system.py [--dry-run] [--verbose]
|
|
10
|
+
|
|
11
|
+
Options:
|
|
12
|
+
--dry-run Show what would be done without making changes
|
|
13
|
+
--verbose Show detailed progress information
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import sqlite3
|
|
17
|
+
import os
|
|
18
|
+
import sys
|
|
19
|
+
import json
|
|
20
|
+
import shutil
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
# Version history and required migrations
|
|
25
|
+
VERSION_HISTORY = {
|
|
26
|
+
"1.0.0": "Initial release - basic memories table",
|
|
27
|
+
"1.1.0": "Added patterns table",
|
|
28
|
+
"1.2.0": "Added timeline_events and session_state",
|
|
29
|
+
"1.3.0": "Added project configurations (agent, mcp, hook configs)",
|
|
30
|
+
"1.4.0": "Added insights and memory_archive",
|
|
31
|
+
"1.5.0": "Added anchor_conflicts and anchor_history",
|
|
32
|
+
"2.0.0": "Path normalization fix, cleanup system",
|
|
33
|
+
"2.1.0": "Current version - full feature set",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
CURRENT_VERSION = "2.1.0"
|
|
37
|
+
|
|
38
|
+
class Colors:
|
|
39
|
+
"""ANSI color codes for terminal output"""
|
|
40
|
+
HEADER = '\033[95m'
|
|
41
|
+
BLUE = '\033[94m'
|
|
42
|
+
CYAN = '\033[96m'
|
|
43
|
+
GREEN = '\033[92m'
|
|
44
|
+
YELLOW = '\033[93m'
|
|
45
|
+
RED = '\033[91m'
|
|
46
|
+
ENDC = '\033[0m'
|
|
47
|
+
BOLD = '\033[1m'
|
|
48
|
+
|
|
49
|
+
def print_header(text):
|
|
50
|
+
print(f"\n{Colors.HEADER}{Colors.BOLD}{'='*60}{Colors.ENDC}")
|
|
51
|
+
print(f"{Colors.HEADER}{Colors.BOLD}{text:^60}{Colors.ENDC}")
|
|
52
|
+
print(f"{Colors.HEADER}{Colors.BOLD}{'='*60}{Colors.ENDC}\n")
|
|
53
|
+
|
|
54
|
+
def print_step(text):
|
|
55
|
+
print(f"{Colors.CYAN}[STEP]{Colors.ENDC} {text}")
|
|
56
|
+
|
|
57
|
+
def print_success(text):
|
|
58
|
+
print(f"{Colors.GREEN}[OK]{Colors.ENDC} {text}")
|
|
59
|
+
|
|
60
|
+
def print_warning(text):
|
|
61
|
+
print(f"{Colors.YELLOW}[WARN]{Colors.ENDC} {text}")
|
|
62
|
+
|
|
63
|
+
def print_error(text):
|
|
64
|
+
print(f"{Colors.RED}[ERROR]{Colors.ENDC} {text}")
|
|
65
|
+
|
|
66
|
+
def print_info(text):
|
|
67
|
+
print(f"{Colors.BLUE}[INFO]{Colors.ENDC} {text}")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class MigrationManager:
|
|
71
|
+
def __init__(self, db_path: str, dry_run: bool = False, verbose: bool = False):
|
|
72
|
+
self.db_path = db_path
|
|
73
|
+
self.dry_run = dry_run
|
|
74
|
+
self.verbose = verbose
|
|
75
|
+
self.conn = None
|
|
76
|
+
self.cursor = None
|
|
77
|
+
self.migrations_run = []
|
|
78
|
+
self.detected_version = None
|
|
79
|
+
|
|
80
|
+
def connect(self):
|
|
81
|
+
"""Connect to the database"""
|
|
82
|
+
if not os.path.exists(self.db_path):
|
|
83
|
+
print_error(f"Database not found: {self.db_path}")
|
|
84
|
+
return False
|
|
85
|
+
self.conn = sqlite3.connect(self.db_path)
|
|
86
|
+
self.conn.row_factory = sqlite3.Row
|
|
87
|
+
self.cursor = self.conn.cursor()
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
def close(self):
|
|
91
|
+
"""Close database connection"""
|
|
92
|
+
if self.conn:
|
|
93
|
+
self.conn.close()
|
|
94
|
+
|
|
95
|
+
def get_tables(self) -> list:
|
|
96
|
+
"""Get list of all tables"""
|
|
97
|
+
self.cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
|
98
|
+
return [r[0] for r in self.cursor.fetchall()]
|
|
99
|
+
|
|
100
|
+
def get_columns(self, table: str) -> dict:
|
|
101
|
+
"""Get columns for a table"""
|
|
102
|
+
self.cursor.execute(f"PRAGMA table_info({table})")
|
|
103
|
+
return {r[1]: r[2] for r in self.cursor.fetchall()}
|
|
104
|
+
|
|
105
|
+
def table_exists(self, table: str) -> bool:
|
|
106
|
+
"""Check if a table exists"""
|
|
107
|
+
return table in self.get_tables()
|
|
108
|
+
|
|
109
|
+
def column_exists(self, table: str, column: str) -> bool:
|
|
110
|
+
"""Check if a column exists in a table"""
|
|
111
|
+
if not self.table_exists(table):
|
|
112
|
+
return False
|
|
113
|
+
return column in self.get_columns(table)
|
|
114
|
+
|
|
115
|
+
def detect_version(self) -> str:
|
|
116
|
+
"""Detect current installation version based on database structure"""
|
|
117
|
+
tables = self.get_tables()
|
|
118
|
+
|
|
119
|
+
# Check from newest to oldest features
|
|
120
|
+
if 'anchor_history' in tables and 'cleanup_config' in tables:
|
|
121
|
+
# Check for latest column additions
|
|
122
|
+
if self.column_exists('memories', 'embedding_model'):
|
|
123
|
+
return "2.1.0"
|
|
124
|
+
return "2.0.0"
|
|
125
|
+
|
|
126
|
+
if 'anchor_conflicts' in tables:
|
|
127
|
+
return "1.5.0"
|
|
128
|
+
|
|
129
|
+
if 'insights' in tables or 'memory_archive' in tables:
|
|
130
|
+
return "1.4.0"
|
|
131
|
+
|
|
132
|
+
if 'project_agent_config' in tables:
|
|
133
|
+
return "1.3.0"
|
|
134
|
+
|
|
135
|
+
if 'timeline_events' in tables:
|
|
136
|
+
return "1.2.0"
|
|
137
|
+
|
|
138
|
+
if 'patterns' in tables:
|
|
139
|
+
return "1.1.0"
|
|
140
|
+
|
|
141
|
+
if 'memories' in tables:
|
|
142
|
+
return "1.0.0"
|
|
143
|
+
|
|
144
|
+
return "0.0.0" # Fresh install
|
|
145
|
+
|
|
146
|
+
def backup_database(self):
|
|
147
|
+
"""Create a backup of the database before migration"""
|
|
148
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
149
|
+
backup_path = f"{self.db_path}.backup_{timestamp}"
|
|
150
|
+
|
|
151
|
+
if self.dry_run:
|
|
152
|
+
print_info(f"Would backup database to: {backup_path}")
|
|
153
|
+
return backup_path
|
|
154
|
+
|
|
155
|
+
print_step(f"Creating backup: {backup_path}")
|
|
156
|
+
shutil.copy2(self.db_path, backup_path)
|
|
157
|
+
print_success("Backup created successfully")
|
|
158
|
+
return backup_path
|
|
159
|
+
|
|
160
|
+
def execute(self, sql: str, params: tuple = None):
|
|
161
|
+
"""Execute SQL with dry-run support"""
|
|
162
|
+
if self.verbose:
|
|
163
|
+
print_info(f"SQL: {sql[:100]}...")
|
|
164
|
+
|
|
165
|
+
if self.dry_run:
|
|
166
|
+
return
|
|
167
|
+
|
|
168
|
+
if params:
|
|
169
|
+
self.cursor.execute(sql, params)
|
|
170
|
+
else:
|
|
171
|
+
self.cursor.execute(sql)
|
|
172
|
+
|
|
173
|
+
def commit(self):
|
|
174
|
+
"""Commit changes with dry-run support"""
|
|
175
|
+
if not self.dry_run:
|
|
176
|
+
self.conn.commit()
|
|
177
|
+
|
|
178
|
+
# =========================================
|
|
179
|
+
# Migration Functions
|
|
180
|
+
# =========================================
|
|
181
|
+
|
|
182
|
+
def migrate_create_base_tables(self):
|
|
183
|
+
"""Create base tables if they don't exist (v1.0.0)"""
|
|
184
|
+
if not self.table_exists('memories'):
|
|
185
|
+
print_step("Creating memories table...")
|
|
186
|
+
self.execute("""
|
|
187
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
188
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
189
|
+
type TEXT DEFAULT 'chunk',
|
|
190
|
+
content TEXT NOT NULL,
|
|
191
|
+
embedding TEXT,
|
|
192
|
+
project_path TEXT,
|
|
193
|
+
project_name TEXT,
|
|
194
|
+
project_type TEXT,
|
|
195
|
+
tech_stack TEXT,
|
|
196
|
+
session_id TEXT,
|
|
197
|
+
chat_id TEXT,
|
|
198
|
+
agent_type TEXT,
|
|
199
|
+
skill_used TEXT,
|
|
200
|
+
tools_used TEXT,
|
|
201
|
+
outcome TEXT,
|
|
202
|
+
success INTEGER,
|
|
203
|
+
user_feedback TEXT,
|
|
204
|
+
tags TEXT,
|
|
205
|
+
metadata TEXT,
|
|
206
|
+
importance INTEGER DEFAULT 5,
|
|
207
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
208
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
209
|
+
last_accessed TEXT,
|
|
210
|
+
access_count INTEGER DEFAULT 0,
|
|
211
|
+
decay_factor REAL DEFAULT 1.0,
|
|
212
|
+
embedding_model TEXT
|
|
213
|
+
)
|
|
214
|
+
""")
|
|
215
|
+
self.migrations_run.append("Created memories table")
|
|
216
|
+
|
|
217
|
+
def migrate_create_patterns(self):
|
|
218
|
+
"""Create patterns table (v1.1.0)"""
|
|
219
|
+
if not self.table_exists('patterns'):
|
|
220
|
+
print_step("Creating patterns table...")
|
|
221
|
+
self.execute("""
|
|
222
|
+
CREATE TABLE IF NOT EXISTS patterns (
|
|
223
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
224
|
+
name TEXT NOT NULL,
|
|
225
|
+
problem_type TEXT,
|
|
226
|
+
solution TEXT NOT NULL,
|
|
227
|
+
embedding TEXT,
|
|
228
|
+
tech_context TEXT,
|
|
229
|
+
success_count INTEGER DEFAULT 0,
|
|
230
|
+
failure_count INTEGER DEFAULT 0,
|
|
231
|
+
metadata TEXT,
|
|
232
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
233
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
234
|
+
)
|
|
235
|
+
""")
|
|
236
|
+
self.migrations_run.append("Created patterns table")
|
|
237
|
+
|
|
238
|
+
def migrate_create_timeline_session(self):
|
|
239
|
+
"""Create timeline and session tables (v1.2.0)"""
|
|
240
|
+
if not self.table_exists('timeline_events'):
|
|
241
|
+
print_step("Creating timeline_events table...")
|
|
242
|
+
self.execute("""
|
|
243
|
+
CREATE TABLE IF NOT EXISTS timeline_events (
|
|
244
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
245
|
+
session_id TEXT NOT NULL,
|
|
246
|
+
project_path TEXT,
|
|
247
|
+
event_type TEXT NOT NULL,
|
|
248
|
+
sequence_num INTEGER,
|
|
249
|
+
summary TEXT,
|
|
250
|
+
details TEXT,
|
|
251
|
+
embedding TEXT,
|
|
252
|
+
parent_event_id INTEGER,
|
|
253
|
+
root_event_id INTEGER,
|
|
254
|
+
entities TEXT,
|
|
255
|
+
status TEXT DEFAULT 'active',
|
|
256
|
+
outcome TEXT,
|
|
257
|
+
confidence REAL DEFAULT 1.0,
|
|
258
|
+
is_anchor INTEGER DEFAULT 0,
|
|
259
|
+
is_reversible INTEGER DEFAULT 1,
|
|
260
|
+
needs_verification INTEGER DEFAULT 0,
|
|
261
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
262
|
+
FOREIGN KEY (parent_event_id) REFERENCES timeline_events(id)
|
|
263
|
+
)
|
|
264
|
+
""")
|
|
265
|
+
self.migrations_run.append("Created timeline_events table")
|
|
266
|
+
|
|
267
|
+
if not self.table_exists('session_state'):
|
|
268
|
+
print_step("Creating session_state table...")
|
|
269
|
+
self.execute("""
|
|
270
|
+
CREATE TABLE IF NOT EXISTS session_state (
|
|
271
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
272
|
+
session_id TEXT UNIQUE NOT NULL,
|
|
273
|
+
project_path TEXT,
|
|
274
|
+
current_goal TEXT,
|
|
275
|
+
pending_questions TEXT,
|
|
276
|
+
entity_registry TEXT,
|
|
277
|
+
decisions_summary TEXT,
|
|
278
|
+
last_checkpoint_id INTEGER,
|
|
279
|
+
events_since_checkpoint INTEGER DEFAULT 0,
|
|
280
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
281
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
282
|
+
last_activity_at TEXT
|
|
283
|
+
)
|
|
284
|
+
""")
|
|
285
|
+
self.migrations_run.append("Created session_state table")
|
|
286
|
+
|
|
287
|
+
if not self.table_exists('checkpoints'):
|
|
288
|
+
print_step("Creating checkpoints table...")
|
|
289
|
+
self.execute("""
|
|
290
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
291
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
292
|
+
session_id TEXT NOT NULL,
|
|
293
|
+
project_path TEXT,
|
|
294
|
+
checkpoint_type TEXT DEFAULT 'auto',
|
|
295
|
+
state_snapshot TEXT,
|
|
296
|
+
summary TEXT,
|
|
297
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
298
|
+
)
|
|
299
|
+
""")
|
|
300
|
+
self.migrations_run.append("Created checkpoints table")
|
|
301
|
+
|
|
302
|
+
def migrate_create_project_configs(self):
|
|
303
|
+
"""Create project configuration tables (v1.3.0)"""
|
|
304
|
+
config_tables = [
|
|
305
|
+
('projects', """
|
|
306
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
307
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
308
|
+
path TEXT UNIQUE NOT NULL,
|
|
309
|
+
name TEXT,
|
|
310
|
+
type TEXT,
|
|
311
|
+
tech_stack TEXT,
|
|
312
|
+
conventions TEXT,
|
|
313
|
+
preferences TEXT,
|
|
314
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
315
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
316
|
+
)
|
|
317
|
+
"""),
|
|
318
|
+
('project_agent_config', """
|
|
319
|
+
CREATE TABLE IF NOT EXISTS project_agent_config (
|
|
320
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
321
|
+
project_path TEXT NOT NULL,
|
|
322
|
+
agent_name TEXT NOT NULL,
|
|
323
|
+
enabled INTEGER DEFAULT 1,
|
|
324
|
+
config TEXT,
|
|
325
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
326
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
327
|
+
UNIQUE(project_path, agent_name)
|
|
328
|
+
)
|
|
329
|
+
"""),
|
|
330
|
+
('project_mcp_config', """
|
|
331
|
+
CREATE TABLE IF NOT EXISTS project_mcp_config (
|
|
332
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
333
|
+
project_path TEXT NOT NULL,
|
|
334
|
+
server_name TEXT NOT NULL,
|
|
335
|
+
enabled INTEGER DEFAULT 1,
|
|
336
|
+
config TEXT,
|
|
337
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
338
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
339
|
+
UNIQUE(project_path, server_name)
|
|
340
|
+
)
|
|
341
|
+
"""),
|
|
342
|
+
('project_hook_config', """
|
|
343
|
+
CREATE TABLE IF NOT EXISTS project_hook_config (
|
|
344
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
345
|
+
project_path TEXT NOT NULL,
|
|
346
|
+
hook_name TEXT NOT NULL,
|
|
347
|
+
enabled INTEGER DEFAULT 1,
|
|
348
|
+
config TEXT,
|
|
349
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
350
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
351
|
+
UNIQUE(project_path, hook_name)
|
|
352
|
+
)
|
|
353
|
+
"""),
|
|
354
|
+
('project_preferences', """
|
|
355
|
+
CREATE TABLE IF NOT EXISTS project_preferences (
|
|
356
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
357
|
+
project_path TEXT UNIQUE NOT NULL,
|
|
358
|
+
preferences TEXT,
|
|
359
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
360
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
361
|
+
)
|
|
362
|
+
"""),
|
|
363
|
+
]
|
|
364
|
+
|
|
365
|
+
for table_name, create_sql in config_tables:
|
|
366
|
+
if not self.table_exists(table_name):
|
|
367
|
+
print_step(f"Creating {table_name} table...")
|
|
368
|
+
self.execute(create_sql)
|
|
369
|
+
self.migrations_run.append(f"Created {table_name} table")
|
|
370
|
+
|
|
371
|
+
def migrate_create_insights_archive(self):
|
|
372
|
+
"""Create insights and archive tables (v1.4.0)"""
|
|
373
|
+
if not self.table_exists('insights'):
|
|
374
|
+
print_step("Creating insights table...")
|
|
375
|
+
self.execute("""
|
|
376
|
+
CREATE TABLE IF NOT EXISTS insights (
|
|
377
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
378
|
+
project_path TEXT,
|
|
379
|
+
insight_type TEXT NOT NULL,
|
|
380
|
+
title TEXT NOT NULL,
|
|
381
|
+
content TEXT NOT NULL,
|
|
382
|
+
source_memories TEXT,
|
|
383
|
+
confidence REAL DEFAULT 0.5,
|
|
384
|
+
status TEXT DEFAULT 'pending',
|
|
385
|
+
user_feedback TEXT,
|
|
386
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
387
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
388
|
+
)
|
|
389
|
+
""")
|
|
390
|
+
self.migrations_run.append("Created insights table")
|
|
391
|
+
|
|
392
|
+
if not self.table_exists('insight_feedback'):
|
|
393
|
+
print_step("Creating insight_feedback table...")
|
|
394
|
+
self.execute("""
|
|
395
|
+
CREATE TABLE IF NOT EXISTS insight_feedback (
|
|
396
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
397
|
+
insight_id INTEGER NOT NULL,
|
|
398
|
+
feedback_type TEXT NOT NULL,
|
|
399
|
+
comment TEXT,
|
|
400
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
401
|
+
FOREIGN KEY (insight_id) REFERENCES insights(id)
|
|
402
|
+
)
|
|
403
|
+
""")
|
|
404
|
+
self.migrations_run.append("Created insight_feedback table")
|
|
405
|
+
|
|
406
|
+
if not self.table_exists('memory_archive'):
|
|
407
|
+
print_step("Creating memory_archive table...")
|
|
408
|
+
self.execute("""
|
|
409
|
+
CREATE TABLE IF NOT EXISTS memory_archive (
|
|
410
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
411
|
+
original_id INTEGER,
|
|
412
|
+
type TEXT,
|
|
413
|
+
content TEXT,
|
|
414
|
+
project_path TEXT,
|
|
415
|
+
importance INTEGER,
|
|
416
|
+
archive_reason TEXT,
|
|
417
|
+
archived_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
418
|
+
original_created_at TEXT
|
|
419
|
+
)
|
|
420
|
+
""")
|
|
421
|
+
self.migrations_run.append("Created memory_archive table")
|
|
422
|
+
|
|
423
|
+
def migrate_create_anchor_tables(self):
|
|
424
|
+
"""Create anchor tracking tables (v1.5.0)"""
|
|
425
|
+
if not self.table_exists('anchor_conflicts'):
|
|
426
|
+
print_step("Creating anchor_conflicts table...")
|
|
427
|
+
self.execute("""
|
|
428
|
+
CREATE TABLE IF NOT EXISTS anchor_conflicts (
|
|
429
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
430
|
+
session_id TEXT NOT NULL,
|
|
431
|
+
anchor_event_id INTEGER,
|
|
432
|
+
conflicting_action TEXT,
|
|
433
|
+
resolution TEXT,
|
|
434
|
+
resolved INTEGER DEFAULT 0,
|
|
435
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
436
|
+
)
|
|
437
|
+
""")
|
|
438
|
+
self.migrations_run.append("Created anchor_conflicts table")
|
|
439
|
+
|
|
440
|
+
if not self.table_exists('anchor_history'):
|
|
441
|
+
print_step("Creating anchor_history table...")
|
|
442
|
+
self.execute("""
|
|
443
|
+
CREATE TABLE IF NOT EXISTS anchor_history (
|
|
444
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
445
|
+
session_id TEXT NOT NULL,
|
|
446
|
+
event_id INTEGER,
|
|
447
|
+
action TEXT NOT NULL,
|
|
448
|
+
previous_state TEXT,
|
|
449
|
+
new_state TEXT,
|
|
450
|
+
reason TEXT,
|
|
451
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
452
|
+
)
|
|
453
|
+
""")
|
|
454
|
+
self.migrations_run.append("Created anchor_history table")
|
|
455
|
+
|
|
456
|
+
def migrate_create_cleanup_tables(self):
|
|
457
|
+
"""Create cleanup system tables (v2.0.0)"""
|
|
458
|
+
if not self.table_exists('cleanup_config'):
|
|
459
|
+
print_step("Creating cleanup_config table...")
|
|
460
|
+
self.execute("""
|
|
461
|
+
CREATE TABLE IF NOT EXISTS cleanup_config (
|
|
462
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
463
|
+
config_key TEXT UNIQUE NOT NULL,
|
|
464
|
+
config_value TEXT,
|
|
465
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
466
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
467
|
+
)
|
|
468
|
+
""")
|
|
469
|
+
self.migrations_run.append("Created cleanup_config table")
|
|
470
|
+
|
|
471
|
+
if not self.table_exists('cleanup_log'):
|
|
472
|
+
print_step("Creating cleanup_log table...")
|
|
473
|
+
self.execute("""
|
|
474
|
+
CREATE TABLE IF NOT EXISTS cleanup_log (
|
|
475
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
476
|
+
cleanup_type TEXT NOT NULL,
|
|
477
|
+
items_processed INTEGER DEFAULT 0,
|
|
478
|
+
items_removed INTEGER DEFAULT 0,
|
|
479
|
+
details TEXT,
|
|
480
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
481
|
+
)
|
|
482
|
+
""")
|
|
483
|
+
self.migrations_run.append("Created cleanup_log table")
|
|
484
|
+
|
|
485
|
+
def migrate_add_missing_columns(self):
|
|
486
|
+
"""Add any missing columns to existing tables"""
|
|
487
|
+
# Memories table columns
|
|
488
|
+
memories_columns = {
|
|
489
|
+
'embedding_model': 'TEXT',
|
|
490
|
+
'decay_factor': 'REAL DEFAULT 1.0',
|
|
491
|
+
'access_count': 'INTEGER DEFAULT 0',
|
|
492
|
+
'last_accessed': 'TEXT',
|
|
493
|
+
'skill_used': 'TEXT',
|
|
494
|
+
'chat_id': 'TEXT',
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
for col, col_type in memories_columns.items():
|
|
498
|
+
if not self.column_exists('memories', col):
|
|
499
|
+
print_step(f"Adding column memories.{col}...")
|
|
500
|
+
self.execute(f"ALTER TABLE memories ADD COLUMN {col} {col_type}")
|
|
501
|
+
self.migrations_run.append(f"Added memories.{col} column")
|
|
502
|
+
|
|
503
|
+
# Session state columns
|
|
504
|
+
session_columns = {
|
|
505
|
+
'last_activity_at': 'TEXT',
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
for col, col_type in session_columns.items():
|
|
509
|
+
if self.table_exists('session_state') and not self.column_exists('session_state', col):
|
|
510
|
+
print_step(f"Adding column session_state.{col}...")
|
|
511
|
+
self.execute(f"ALTER TABLE session_state ADD COLUMN {col} {col_type}")
|
|
512
|
+
self.migrations_run.append(f"Added session_state.{col} column")
|
|
513
|
+
|
|
514
|
+
def migrate_normalize_paths(self):
|
|
515
|
+
"""Normalize all paths to use forward slashes (v2.0.0 fix)"""
|
|
516
|
+
print_step("Normalizing paths in all tables...")
|
|
517
|
+
|
|
518
|
+
# Tables without unique constraints on path - safe to update directly
|
|
519
|
+
simple_tables = [
|
|
520
|
+
('memories', 'project_path'),
|
|
521
|
+
('session_state', 'project_path'),
|
|
522
|
+
('timeline_events', 'project_path'),
|
|
523
|
+
('checkpoints', 'project_path'),
|
|
524
|
+
('insights', 'project_path'),
|
|
525
|
+
('memory_archive', 'project_path'),
|
|
526
|
+
]
|
|
527
|
+
|
|
528
|
+
# Tables with unique constraints - need special handling
|
|
529
|
+
unique_tables = [
|
|
530
|
+
('projects', 'path'),
|
|
531
|
+
('project_agent_config', 'project_path'),
|
|
532
|
+
('project_mcp_config', 'project_path'),
|
|
533
|
+
('project_hook_config', 'project_path'),
|
|
534
|
+
('project_preferences', 'project_path'),
|
|
535
|
+
]
|
|
536
|
+
|
|
537
|
+
# Process simple tables first
|
|
538
|
+
for table, column in simple_tables:
|
|
539
|
+
if self.table_exists(table) and self.column_exists(table, column):
|
|
540
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table} WHERE {column} LIKE '%\\%'")
|
|
541
|
+
count = self.cursor.fetchone()[0]
|
|
542
|
+
|
|
543
|
+
if count > 0:
|
|
544
|
+
print_info(f" Normalizing {count} paths in {table}.{column}")
|
|
545
|
+
if not self.dry_run:
|
|
546
|
+
self.execute(f"""
|
|
547
|
+
UPDATE {table}
|
|
548
|
+
SET {column} = REPLACE({column}, '\\', '/')
|
|
549
|
+
WHERE {column} LIKE '%\\%'
|
|
550
|
+
""")
|
|
551
|
+
self.migrations_run.append(f"Normalized {count} paths in {table}.{column}")
|
|
552
|
+
|
|
553
|
+
# Process tables with unique constraints - delete duplicates first
|
|
554
|
+
for table, column in unique_tables:
|
|
555
|
+
if self.table_exists(table) and self.column_exists(table, column):
|
|
556
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table} WHERE {column} LIKE '%\\%'")
|
|
557
|
+
count = self.cursor.fetchone()[0]
|
|
558
|
+
|
|
559
|
+
if count > 0:
|
|
560
|
+
print_info(f" Normalizing {count} paths in {table}.{column} (handling duplicates)")
|
|
561
|
+
if not self.dry_run:
|
|
562
|
+
# Find paths that would create duplicates after normalization
|
|
563
|
+
self.cursor.execute(f"""
|
|
564
|
+
SELECT {column}, REPLACE({column}, '\\', '/') as normalized
|
|
565
|
+
FROM {table}
|
|
566
|
+
WHERE {column} LIKE '%\\%'
|
|
567
|
+
""")
|
|
568
|
+
to_normalize = self.cursor.fetchall()
|
|
569
|
+
|
|
570
|
+
duplicates_removed = 0
|
|
571
|
+
for row in to_normalize:
|
|
572
|
+
old_path = row[0]
|
|
573
|
+
new_path = row[1]
|
|
574
|
+
|
|
575
|
+
# Check if normalized path already exists
|
|
576
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table} WHERE {column} = ?", (new_path,))
|
|
577
|
+
exists = self.cursor.fetchone()[0] > 0
|
|
578
|
+
|
|
579
|
+
if exists:
|
|
580
|
+
# Delete the row with backslashes (keep the one with forward slashes)
|
|
581
|
+
self.execute(f"DELETE FROM {table} WHERE {column} = ?", (old_path,))
|
|
582
|
+
duplicates_removed += 1
|
|
583
|
+
else:
|
|
584
|
+
# Safe to update
|
|
585
|
+
self.execute(f"UPDATE {table} SET {column} = ? WHERE {column} = ?", (new_path, old_path))
|
|
586
|
+
|
|
587
|
+
if duplicates_removed > 0:
|
|
588
|
+
print_info(f" Removed {duplicates_removed} duplicate entries")
|
|
589
|
+
|
|
590
|
+
self.migrations_run.append(f"Normalized paths in {table}.{column}")
|
|
591
|
+
|
|
592
|
+
def migrate_create_indexes(self):
|
|
593
|
+
"""Create indexes for performance"""
|
|
594
|
+
indexes = [
|
|
595
|
+
('idx_memories_project', 'memories', 'project_path'),
|
|
596
|
+
('idx_memories_type', 'memories', 'type'),
|
|
597
|
+
('idx_memories_created', 'memories', 'created_at'),
|
|
598
|
+
('idx_memories_importance', 'memories', 'importance'),
|
|
599
|
+
('idx_timeline_session', 'timeline_events', 'session_id'),
|
|
600
|
+
('idx_timeline_project', 'timeline_events', 'project_path'),
|
|
601
|
+
('idx_timeline_type', 'timeline_events', 'event_type'),
|
|
602
|
+
('idx_session_project', 'session_state', 'project_path'),
|
|
603
|
+
('idx_patterns_type', 'patterns', 'problem_type'),
|
|
604
|
+
]
|
|
605
|
+
|
|
606
|
+
# Get existing indexes
|
|
607
|
+
self.cursor.execute("SELECT name FROM sqlite_master WHERE type='index'")
|
|
608
|
+
existing = [r[0] for r in self.cursor.fetchall()]
|
|
609
|
+
|
|
610
|
+
for idx_name, table, column in indexes:
|
|
611
|
+
if self.table_exists(table) and self.column_exists(table, column):
|
|
612
|
+
if idx_name not in existing:
|
|
613
|
+
print_step(f"Creating index {idx_name}...")
|
|
614
|
+
self.execute(f"CREATE INDEX IF NOT EXISTS {idx_name} ON {table}({column})")
|
|
615
|
+
self.migrations_run.append(f"Created index {idx_name}")
|
|
616
|
+
|
|
617
|
+
def migrate_set_version(self):
|
|
618
|
+
"""Store the current version in the database"""
|
|
619
|
+
# Create system_info table if it doesn't exist
|
|
620
|
+
if not self.table_exists('system_info'):
|
|
621
|
+
print_step("Creating system_info table...")
|
|
622
|
+
self.execute("""
|
|
623
|
+
CREATE TABLE IF NOT EXISTS system_info (
|
|
624
|
+
key TEXT PRIMARY KEY,
|
|
625
|
+
value TEXT,
|
|
626
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
627
|
+
)
|
|
628
|
+
""")
|
|
629
|
+
self.migrations_run.append("Created system_info table")
|
|
630
|
+
|
|
631
|
+
# Store version
|
|
632
|
+
self.execute("""
|
|
633
|
+
INSERT OR REPLACE INTO system_info (key, value, updated_at)
|
|
634
|
+
VALUES ('version', ?, CURRENT_TIMESTAMP)
|
|
635
|
+
""", (CURRENT_VERSION,))
|
|
636
|
+
self.migrations_run.append(f"Set system version to {CURRENT_VERSION}")
|
|
637
|
+
|
|
638
|
+
def run_migrations(self):
|
|
639
|
+
"""Run all necessary migrations"""
|
|
640
|
+
print_header("Claude Memory System - Update Script")
|
|
641
|
+
|
|
642
|
+
if self.dry_run:
|
|
643
|
+
print_warning("DRY RUN MODE - No changes will be made\n")
|
|
644
|
+
|
|
645
|
+
# Connect to database
|
|
646
|
+
if not self.connect():
|
|
647
|
+
return False
|
|
648
|
+
|
|
649
|
+
# Detect current version
|
|
650
|
+
self.detected_version = self.detect_version()
|
|
651
|
+
print_info(f"Detected version: {self.detected_version}")
|
|
652
|
+
print_info(f"Target version: {CURRENT_VERSION}\n")
|
|
653
|
+
|
|
654
|
+
# Check if any maintenance is needed even at current version
|
|
655
|
+
needs_maintenance = False
|
|
656
|
+
|
|
657
|
+
# Check for un-normalized paths
|
|
658
|
+
for table in ['memories', 'session_state', 'timeline_events']:
|
|
659
|
+
if self.table_exists(table):
|
|
660
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table} WHERE project_path LIKE '%\\%'")
|
|
661
|
+
count = self.cursor.fetchone()[0]
|
|
662
|
+
if count > 0:
|
|
663
|
+
needs_maintenance = True
|
|
664
|
+
break
|
|
665
|
+
|
|
666
|
+
if self.detected_version == CURRENT_VERSION and not needs_maintenance:
|
|
667
|
+
print_success("System is already up to date!")
|
|
668
|
+
self.close()
|
|
669
|
+
return True
|
|
670
|
+
|
|
671
|
+
if self.detected_version == CURRENT_VERSION:
|
|
672
|
+
print_warning("Version is current but maintenance is needed\n")
|
|
673
|
+
|
|
674
|
+
# Create backup
|
|
675
|
+
backup_path = self.backup_database()
|
|
676
|
+
|
|
677
|
+
try:
|
|
678
|
+
# Run migrations in order
|
|
679
|
+
print_header("Running Migrations")
|
|
680
|
+
|
|
681
|
+
# v1.0.0 - Base tables
|
|
682
|
+
self.migrate_create_base_tables()
|
|
683
|
+
|
|
684
|
+
# v1.1.0 - Patterns
|
|
685
|
+
self.migrate_create_patterns()
|
|
686
|
+
|
|
687
|
+
# v1.2.0 - Timeline & Session
|
|
688
|
+
self.migrate_create_timeline_session()
|
|
689
|
+
|
|
690
|
+
# v1.3.0 - Project configs
|
|
691
|
+
self.migrate_create_project_configs()
|
|
692
|
+
|
|
693
|
+
# v1.4.0 - Insights & Archive
|
|
694
|
+
self.migrate_create_insights_archive()
|
|
695
|
+
|
|
696
|
+
# v1.5.0 - Anchor tables
|
|
697
|
+
self.migrate_create_anchor_tables()
|
|
698
|
+
|
|
699
|
+
# v2.0.0 - Cleanup system
|
|
700
|
+
self.migrate_create_cleanup_tables()
|
|
701
|
+
|
|
702
|
+
# Add missing columns
|
|
703
|
+
self.migrate_add_missing_columns()
|
|
704
|
+
|
|
705
|
+
# Normalize paths (critical fix)
|
|
706
|
+
self.migrate_normalize_paths()
|
|
707
|
+
|
|
708
|
+
# Create indexes
|
|
709
|
+
self.migrate_create_indexes()
|
|
710
|
+
|
|
711
|
+
# Set version
|
|
712
|
+
self.migrate_set_version()
|
|
713
|
+
|
|
714
|
+
# Commit all changes
|
|
715
|
+
self.commit()
|
|
716
|
+
|
|
717
|
+
# Print summary
|
|
718
|
+
print_header("Migration Summary")
|
|
719
|
+
|
|
720
|
+
if self.migrations_run:
|
|
721
|
+
print_success(f"Completed {len(self.migrations_run)} migrations:\n")
|
|
722
|
+
for i, migration in enumerate(self.migrations_run, 1):
|
|
723
|
+
print(f" {i}. {migration}")
|
|
724
|
+
else:
|
|
725
|
+
print_info("No migrations were necessary")
|
|
726
|
+
|
|
727
|
+
print(f"\n{Colors.GREEN}Update completed successfully!{Colors.ENDC}")
|
|
728
|
+
print(f" From version: {self.detected_version}")
|
|
729
|
+
print(f" To version: {CURRENT_VERSION}")
|
|
730
|
+
|
|
731
|
+
if backup_path and not self.dry_run:
|
|
732
|
+
print(f"\n Backup saved: {backup_path}")
|
|
733
|
+
|
|
734
|
+
return True
|
|
735
|
+
|
|
736
|
+
except Exception as e:
|
|
737
|
+
print_error(f"Migration failed: {e}")
|
|
738
|
+
print_info(f"Database backup is available at: {backup_path}")
|
|
739
|
+
import traceback
|
|
740
|
+
traceback.print_exc()
|
|
741
|
+
return False
|
|
742
|
+
|
|
743
|
+
finally:
|
|
744
|
+
self.close()
|
|
745
|
+
|
|
746
|
+
def show_status(self):
|
|
747
|
+
"""Show current system status without making changes"""
|
|
748
|
+
print_header("Claude Memory System - Status Check")
|
|
749
|
+
|
|
750
|
+
if not self.connect():
|
|
751
|
+
return
|
|
752
|
+
|
|
753
|
+
self.detected_version = self.detect_version()
|
|
754
|
+
tables = self.get_tables()
|
|
755
|
+
|
|
756
|
+
print_info(f"Database: {self.db_path}")
|
|
757
|
+
print_info(f"Detected version: {self.detected_version}")
|
|
758
|
+
print_info(f"Latest version: {CURRENT_VERSION}")
|
|
759
|
+
print_info(f"Tables found: {len(tables)}")
|
|
760
|
+
|
|
761
|
+
print("\nTables:")
|
|
762
|
+
for table in sorted(tables):
|
|
763
|
+
cols = self.get_columns(table)
|
|
764
|
+
# Count rows
|
|
765
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table}")
|
|
766
|
+
count = self.cursor.fetchone()[0]
|
|
767
|
+
print(f" - {table}: {len(cols)} columns, {count} rows")
|
|
768
|
+
|
|
769
|
+
# Check for issues
|
|
770
|
+
print("\nHealth Check:")
|
|
771
|
+
issues = []
|
|
772
|
+
|
|
773
|
+
# Check for un-normalized paths
|
|
774
|
+
for table in ['memories', 'session_state', 'timeline_events']:
|
|
775
|
+
if self.table_exists(table):
|
|
776
|
+
self.cursor.execute(f"SELECT COUNT(*) FROM {table} WHERE project_path LIKE '%\\%'")
|
|
777
|
+
count = self.cursor.fetchone()[0]
|
|
778
|
+
if count > 0:
|
|
779
|
+
issues.append(f" - {table} has {count} paths with backslashes")
|
|
780
|
+
|
|
781
|
+
if issues:
|
|
782
|
+
print_warning("Issues found:")
|
|
783
|
+
for issue in issues:
|
|
784
|
+
print(issue)
|
|
785
|
+
print(f"\nRun 'python update_system.py' to fix these issues.")
|
|
786
|
+
else:
|
|
787
|
+
print_success("No issues found!")
|
|
788
|
+
|
|
789
|
+
self.close()
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
def main():
|
|
793
|
+
# Parse arguments
|
|
794
|
+
dry_run = '--dry-run' in sys.argv
|
|
795
|
+
verbose = '--verbose' in sys.argv
|
|
796
|
+
status_only = '--status' in sys.argv
|
|
797
|
+
|
|
798
|
+
# Find database
|
|
799
|
+
script_dir = Path(__file__).parent
|
|
800
|
+
db_path = script_dir / "memories.db"
|
|
801
|
+
|
|
802
|
+
if not db_path.exists():
|
|
803
|
+
print_error(f"Database not found at {db_path}")
|
|
804
|
+
print_info("Make sure you're running this from the memory-agent directory")
|
|
805
|
+
sys.exit(1)
|
|
806
|
+
|
|
807
|
+
manager = MigrationManager(str(db_path), dry_run=dry_run, verbose=verbose)
|
|
808
|
+
|
|
809
|
+
if status_only:
|
|
810
|
+
manager.show_status()
|
|
811
|
+
else:
|
|
812
|
+
success = manager.run_migrations()
|
|
813
|
+
sys.exit(0 if success else 1)
|
|
814
|
+
|
|
815
|
+
|
|
816
|
+
if __name__ == "__main__":
|
|
817
|
+
main()
|