claude-self-reflect 5.0.6 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/open-source-maintainer.md +1 -1
- package/.claude/agents/reflection-specialist.md +2 -2
- package/Dockerfile.async-importer +6 -4
- package/Dockerfile.importer +6 -6
- package/Dockerfile.safe-watcher +8 -8
- package/Dockerfile.streaming-importer +8 -1
- package/Dockerfile.watcher +8 -16
- package/docker-compose.yaml +12 -6
- package/installer/.claude/agents/README.md +138 -0
- package/package.json +5 -26
- package/src/__init__.py +0 -0
- package/src/cli/__init__.py +0 -0
- package/src/runtime/__init__.py +0 -0
- package/src/runtime/import-latest.py +124 -0
- package/{scripts → src/runtime}/precompact-hook.sh +1 -1
- package/src/runtime/streaming-importer.py +995 -0
- package/{scripts → src/runtime}/watcher-loop.sh +1 -1
- package/.claude/agents/claude-self-reflect-test.md +0 -1274
- package/.claude/agents/reflect-tester.md +0 -300
- package/scripts/add-timestamp-indexes.py +0 -134
- package/scripts/ast_grep_final_analyzer.py +0 -338
- package/scripts/ast_grep_unified_registry.py +0 -710
- package/scripts/check-collections.py +0 -29
- package/scripts/debug-august-parsing.py +0 -80
- package/scripts/debug-import-single.py +0 -91
- package/scripts/debug-project-resolver.py +0 -82
- package/scripts/debug-temporal-tools.py +0 -135
- package/scripts/import-conversations-enhanced.py +0 -672
- package/scripts/migrate-to-unified-state.py +0 -426
- package/scripts/session_quality_tracker.py +0 -671
- package/scripts/update_patterns.py +0 -334
- /package/{scripts → src}/importer/__init__.py +0 -0
- /package/{scripts → src}/importer/__main__.py +0 -0
- /package/{scripts → src}/importer/core/__init__.py +0 -0
- /package/{scripts → src}/importer/core/config.py +0 -0
- /package/{scripts → src}/importer/core/exceptions.py +0 -0
- /package/{scripts → src}/importer/core/models.py +0 -0
- /package/{scripts → src}/importer/embeddings/__init__.py +0 -0
- /package/{scripts → src}/importer/embeddings/base.py +0 -0
- /package/{scripts → src}/importer/embeddings/fastembed_provider.py +0 -0
- /package/{scripts → src}/importer/embeddings/validator.py +0 -0
- /package/{scripts → src}/importer/embeddings/voyage_provider.py +0 -0
- /package/{scripts → src}/importer/main.py +0 -0
- /package/{scripts → src}/importer/processors/__init__.py +0 -0
- /package/{scripts → src}/importer/processors/ast_extractor.py +0 -0
- /package/{scripts → src}/importer/processors/chunker.py +0 -0
- /package/{scripts → src}/importer/processors/concept_extractor.py +0 -0
- /package/{scripts → src}/importer/processors/conversation_parser.py +0 -0
- /package/{scripts → src}/importer/processors/tool_extractor.py +0 -0
- /package/{scripts → src}/importer/state/__init__.py +0 -0
- /package/{scripts → src}/importer/state/state_manager.py +0 -0
- /package/{scripts → src}/importer/storage/__init__.py +0 -0
- /package/{scripts → src}/importer/storage/qdrant_storage.py +0 -0
- /package/{scripts → src}/importer/utils/__init__.py +0 -0
- /package/{scripts → src}/importer/utils/logger.py +0 -0
- /package/{scripts → src}/importer/utils/project_normalizer.py +0 -0
- /package/{scripts → src/runtime}/delta-metadata-update-safe.py +0 -0
- /package/{scripts → src/runtime}/delta-metadata-update.py +0 -0
- /package/{scripts → src/runtime}/doctor.py +0 -0
- /package/{scripts → src/runtime}/embedding_service.py +0 -0
- /package/{scripts → src/runtime}/force-metadata-recovery.py +0 -0
- /package/{scripts → src/runtime}/import-conversations-unified.py +0 -0
- /package/{scripts → src/runtime}/import_strategies.py +0 -0
- /package/{scripts → src/runtime}/message_processors.py +0 -0
- /package/{scripts → src/runtime}/metadata_extractor.py +0 -0
- /package/{scripts → src/runtime}/streaming-watcher.py +0 -0
- /package/{scripts → src/runtime}/unified_state_manager.py +0 -0
- /package/{scripts → src/runtime}/utils.py +0 -0
|
@@ -1,426 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
Migration script to consolidate multiple state files into unified state format.
|
|
4
|
-
|
|
5
|
-
This script:
|
|
6
|
-
1. Backs up existing state files
|
|
7
|
-
2. Reads from imported-files.json, csr-watcher.json, and other state files
|
|
8
|
-
3. Merges all data with deduplication (newest wins)
|
|
9
|
-
4. Creates unified-state.json with v5.0 format
|
|
10
|
-
5. Provides rollback capability
|
|
11
|
-
"""
|
|
12
|
-
|
|
13
|
-
import json
|
|
14
|
-
import shutil
|
|
15
|
-
import sys
|
|
16
|
-
from pathlib import Path
|
|
17
|
-
from datetime import datetime, timezone
|
|
18
|
-
from typing import Dict, Any, List
|
|
19
|
-
import logging
|
|
20
|
-
|
|
21
|
-
# Add parent directory to path for imports
|
|
22
|
-
sys.path.append(str(Path(__file__).parent))
|
|
23
|
-
from unified_state_manager import UnifiedStateManager
|
|
24
|
-
|
|
25
|
-
logging.basicConfig(
|
|
26
|
-
level=logging.INFO,
|
|
27
|
-
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
28
|
-
)
|
|
29
|
-
logger = logging.getLogger(__name__)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class StateMigrator:
|
|
33
|
-
"""Migrates multiple state files to unified state format."""
|
|
34
|
-
|
|
35
|
-
def __init__(self):
|
|
36
|
-
"""Initialize the migrator."""
|
|
37
|
-
self.config_dir = Path.home() / ".claude-self-reflect" / "config"
|
|
38
|
-
self.backup_dir = self.config_dir / f"backup-before-v5-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
|
|
39
|
-
self.state_manager = UnifiedStateManager()
|
|
40
|
-
|
|
41
|
-
# State files to migrate
|
|
42
|
-
self.state_files = [
|
|
43
|
-
"imported-files.json",
|
|
44
|
-
"csr-watcher.json",
|
|
45
|
-
"unified-import-state.json", # May be in archive
|
|
46
|
-
"watcher-state.json", # May be in archive
|
|
47
|
-
"streaming-state.json" # May be in archive
|
|
48
|
-
]
|
|
49
|
-
|
|
50
|
-
def backup_existing_states(self) -> List[Path]:
|
|
51
|
-
"""
|
|
52
|
-
Backup all existing state files.
|
|
53
|
-
|
|
54
|
-
Returns:
|
|
55
|
-
List of backed up file paths
|
|
56
|
-
"""
|
|
57
|
-
self.backup_dir.mkdir(exist_ok=True)
|
|
58
|
-
backed_up = []
|
|
59
|
-
|
|
60
|
-
logger.info(f"Creating backups in {self.backup_dir}")
|
|
61
|
-
|
|
62
|
-
for state_file in self.state_files:
|
|
63
|
-
# Check both main and archive directories
|
|
64
|
-
sources = [
|
|
65
|
-
self.config_dir / state_file,
|
|
66
|
-
self.config_dir / "archive" / state_file
|
|
67
|
-
]
|
|
68
|
-
|
|
69
|
-
for source in sources:
|
|
70
|
-
if source.exists():
|
|
71
|
-
dest = self.backup_dir / state_file
|
|
72
|
-
if source.parent.name == "archive":
|
|
73
|
-
dest = self.backup_dir / f"archive-{state_file}"
|
|
74
|
-
|
|
75
|
-
shutil.copy2(source, dest)
|
|
76
|
-
backed_up.append(dest)
|
|
77
|
-
logger.info(f" Backed up: {state_file} → {dest.name}")
|
|
78
|
-
|
|
79
|
-
# Also backup unified-state.json if it exists
|
|
80
|
-
unified_state = self.config_dir / "unified-state.json"
|
|
81
|
-
if unified_state.exists():
|
|
82
|
-
dest = self.backup_dir / "unified-state.json.existing"
|
|
83
|
-
shutil.copy2(unified_state, dest)
|
|
84
|
-
backed_up.append(dest)
|
|
85
|
-
logger.info(f" Backed up existing unified state")
|
|
86
|
-
|
|
87
|
-
return backed_up
|
|
88
|
-
|
|
89
|
-
def load_state_file(self, filename: str) -> Dict[str, Any]:
|
|
90
|
-
"""
|
|
91
|
-
Safely load a state file from config or archive directory.
|
|
92
|
-
|
|
93
|
-
Args:
|
|
94
|
-
filename: Name of the state file
|
|
95
|
-
|
|
96
|
-
Returns:
|
|
97
|
-
State dictionary or empty dict if not found
|
|
98
|
-
"""
|
|
99
|
-
# Try main directory first
|
|
100
|
-
file_paths = [
|
|
101
|
-
self.config_dir / filename,
|
|
102
|
-
self.config_dir / "archive" / filename
|
|
103
|
-
]
|
|
104
|
-
|
|
105
|
-
for file_path in file_paths:
|
|
106
|
-
if file_path.exists():
|
|
107
|
-
try:
|
|
108
|
-
with open(file_path, 'r') as f:
|
|
109
|
-
logger.debug(f" Loading {filename} from {file_path.parent.name}/")
|
|
110
|
-
return json.load(f)
|
|
111
|
-
except Exception as e:
|
|
112
|
-
logger.error(f" Error loading {filename}: {e}")
|
|
113
|
-
return {}
|
|
114
|
-
|
|
115
|
-
logger.debug(f" {filename} not found")
|
|
116
|
-
return {}
|
|
117
|
-
|
|
118
|
-
def merge_file_data(self, all_files: Dict[str, Any],
|
|
119
|
-
source_files: Dict[str, Any],
|
|
120
|
-
importer: str) -> Dict[str, Any]:
|
|
121
|
-
"""
|
|
122
|
-
Merge file data from a source into the consolidated dictionary.
|
|
123
|
-
|
|
124
|
-
Args:
|
|
125
|
-
all_files: Consolidated file dictionary
|
|
126
|
-
source_files: Files from a specific source
|
|
127
|
-
importer: Name of the importer (batch/streaming)
|
|
128
|
-
|
|
129
|
-
Returns:
|
|
130
|
-
Updated consolidated dictionary
|
|
131
|
-
"""
|
|
132
|
-
merged_count = 0
|
|
133
|
-
updated_count = 0
|
|
134
|
-
|
|
135
|
-
for file_path, metadata in source_files.items():
|
|
136
|
-
normalized = UnifiedStateManager.normalize_path(file_path)
|
|
137
|
-
|
|
138
|
-
# Check if this file already exists
|
|
139
|
-
if normalized in all_files:
|
|
140
|
-
# Use newer data (compare timestamps)
|
|
141
|
-
existing_time = all_files[normalized].get("imported_at", "")
|
|
142
|
-
new_time = metadata.get("imported_at", "")
|
|
143
|
-
|
|
144
|
-
# Handle None and empty string in comparison
|
|
145
|
-
if (not existing_time) or (new_time and new_time > existing_time):
|
|
146
|
-
# Update with newer data
|
|
147
|
-
all_files[normalized] = {
|
|
148
|
-
"imported_at": metadata.get("imported_at"),
|
|
149
|
-
"last_modified": metadata.get("last_modified", metadata.get("imported_at")),
|
|
150
|
-
"chunks": metadata.get("chunks", 0),
|
|
151
|
-
"importer": importer,
|
|
152
|
-
"collection": metadata.get("collection"),
|
|
153
|
-
"embedding_mode": metadata.get("embedding_mode", "local"),
|
|
154
|
-
"status": "completed",
|
|
155
|
-
"error": None,
|
|
156
|
-
"retry_count": 0
|
|
157
|
-
}
|
|
158
|
-
updated_count += 1
|
|
159
|
-
else:
|
|
160
|
-
# Add new file
|
|
161
|
-
all_files[normalized] = {
|
|
162
|
-
"imported_at": metadata.get("imported_at"),
|
|
163
|
-
"last_modified": metadata.get("last_modified", metadata.get("imported_at")),
|
|
164
|
-
"chunks": metadata.get("chunks", 0),
|
|
165
|
-
"importer": importer,
|
|
166
|
-
"collection": metadata.get("collection"),
|
|
167
|
-
"embedding_mode": metadata.get("embedding_mode", "local"),
|
|
168
|
-
"status": "completed",
|
|
169
|
-
"error": None,
|
|
170
|
-
"retry_count": 0
|
|
171
|
-
}
|
|
172
|
-
merged_count += 1
|
|
173
|
-
|
|
174
|
-
logger.info(f" {importer}: {merged_count} new, {updated_count} updated")
|
|
175
|
-
return all_files
|
|
176
|
-
|
|
177
|
-
def calculate_collection_stats(self, all_files: Dict[str, Any]) -> Dict[str, Any]:
|
|
178
|
-
"""
|
|
179
|
-
Calculate statistics for each collection.
|
|
180
|
-
|
|
181
|
-
Args:
|
|
182
|
-
all_files: All imported files
|
|
183
|
-
|
|
184
|
-
Returns:
|
|
185
|
-
Collection statistics dictionary
|
|
186
|
-
"""
|
|
187
|
-
collections = {}
|
|
188
|
-
|
|
189
|
-
for file_path, metadata in all_files.items():
|
|
190
|
-
collection = metadata.get("collection")
|
|
191
|
-
if collection:
|
|
192
|
-
if collection not in collections:
|
|
193
|
-
collections[collection] = {
|
|
194
|
-
"files": 0,
|
|
195
|
-
"chunks": 0,
|
|
196
|
-
"embedding_mode": metadata.get("embedding_mode", "local"),
|
|
197
|
-
"dimensions": 384 if metadata.get("embedding_mode") == "local" else 1024
|
|
198
|
-
}
|
|
199
|
-
collections[collection]["files"] += 1
|
|
200
|
-
collections[collection]["chunks"] += metadata.get("chunks", 0)
|
|
201
|
-
|
|
202
|
-
return collections
|
|
203
|
-
|
|
204
|
-
def migrate(self, dry_run: bool = False) -> bool:
|
|
205
|
-
"""
|
|
206
|
-
Perform the migration.
|
|
207
|
-
|
|
208
|
-
Args:
|
|
209
|
-
dry_run: If True, only simulate migration without writing
|
|
210
|
-
|
|
211
|
-
Returns:
|
|
212
|
-
True if successful, False otherwise
|
|
213
|
-
"""
|
|
214
|
-
try:
|
|
215
|
-
print("\n" + "="*60)
|
|
216
|
-
print("Claude Self-Reflect State Migration to v5.0")
|
|
217
|
-
print("="*60)
|
|
218
|
-
|
|
219
|
-
# Step 1: Backup
|
|
220
|
-
print("\n1. Creating backups...")
|
|
221
|
-
backed_up = self.backup_existing_states()
|
|
222
|
-
print(f" ✓ Backed up {len(backed_up)} files")
|
|
223
|
-
|
|
224
|
-
# Step 2: Load all state files
|
|
225
|
-
print("\n2. Loading existing state files...")
|
|
226
|
-
imported_files = self.load_state_file("imported-files.json")
|
|
227
|
-
csr_watcher = self.load_state_file("csr-watcher.json")
|
|
228
|
-
unified_import = self.load_state_file("unified-import-state.json")
|
|
229
|
-
watcher_state = self.load_state_file("watcher-state.json")
|
|
230
|
-
streaming_state = self.load_state_file("streaming-state.json")
|
|
231
|
-
|
|
232
|
-
# Step 3: Merge data
|
|
233
|
-
print("\n3. Merging state data...")
|
|
234
|
-
all_files = {}
|
|
235
|
-
|
|
236
|
-
# Process imported-files.json (batch importer)
|
|
237
|
-
if "imported_files" in imported_files:
|
|
238
|
-
all_files = self.merge_file_data(
|
|
239
|
-
all_files,
|
|
240
|
-
imported_files["imported_files"],
|
|
241
|
-
"batch"
|
|
242
|
-
)
|
|
243
|
-
elif imported_files: # Might be at root level
|
|
244
|
-
all_files = self.merge_file_data(
|
|
245
|
-
all_files,
|
|
246
|
-
imported_files,
|
|
247
|
-
"batch"
|
|
248
|
-
)
|
|
249
|
-
|
|
250
|
-
# Process csr-watcher.json (streaming watcher)
|
|
251
|
-
if "imported_files" in csr_watcher:
|
|
252
|
-
all_files = self.merge_file_data(
|
|
253
|
-
all_files,
|
|
254
|
-
csr_watcher["imported_files"],
|
|
255
|
-
"streaming"
|
|
256
|
-
)
|
|
257
|
-
|
|
258
|
-
# Process unified-import-state.json if exists
|
|
259
|
-
if "files" in unified_import:
|
|
260
|
-
all_files = self.merge_file_data(
|
|
261
|
-
all_files,
|
|
262
|
-
unified_import["files"],
|
|
263
|
-
"unified"
|
|
264
|
-
)
|
|
265
|
-
|
|
266
|
-
# Process other watcher states
|
|
267
|
-
for state_data, name in [(watcher_state, "watcher"), (streaming_state, "streaming")]:
|
|
268
|
-
if "imported_files" in state_data:
|
|
269
|
-
all_files = self.merge_file_data(
|
|
270
|
-
all_files,
|
|
271
|
-
state_data["imported_files"],
|
|
272
|
-
name
|
|
273
|
-
)
|
|
274
|
-
|
|
275
|
-
# Step 4: Calculate statistics
|
|
276
|
-
print("\n4. Calculating statistics...")
|
|
277
|
-
total_chunks = sum(f.get("chunks", 0) for f in all_files.values())
|
|
278
|
-
collections = self.calculate_collection_stats(all_files)
|
|
279
|
-
|
|
280
|
-
print(f" - Total files: {len(all_files)}")
|
|
281
|
-
print(f" - Total chunks: {total_chunks}")
|
|
282
|
-
print(f" - Collections: {len(collections)}")
|
|
283
|
-
|
|
284
|
-
if dry_run:
|
|
285
|
-
print("\n5. DRY RUN - Not writing changes")
|
|
286
|
-
print("\nMigration preview complete!")
|
|
287
|
-
return True
|
|
288
|
-
|
|
289
|
-
# Step 5: Create unified state
|
|
290
|
-
print("\n5. Creating unified state...")
|
|
291
|
-
|
|
292
|
-
def create_unified_state(state):
|
|
293
|
-
# Replace all file data
|
|
294
|
-
state["files"] = all_files
|
|
295
|
-
|
|
296
|
-
# Update metadata
|
|
297
|
-
state["metadata"]["total_files"] = len(all_files)
|
|
298
|
-
state["metadata"]["total_chunks"] = total_chunks
|
|
299
|
-
state["metadata"]["migration_from"] = "v3-v4-multi-file"
|
|
300
|
-
state["metadata"]["migration_date"] = datetime.now(timezone.utc).isoformat()
|
|
301
|
-
state["metadata"]["migration_stats"] = {
|
|
302
|
-
"imported_files_count": len(imported_files.get("imported_files", {})),
|
|
303
|
-
"csr_watcher_count": len(csr_watcher.get("imported_files", {})),
|
|
304
|
-
"unified_count": len(all_files)
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
# Update collections
|
|
308
|
-
state["collections"] = collections
|
|
309
|
-
|
|
310
|
-
# Update importer stats
|
|
311
|
-
batch_files = [f for f in all_files.values() if f.get("importer") == "batch"]
|
|
312
|
-
streaming_files = [f for f in all_files.values() if f.get("importer") == "streaming"]
|
|
313
|
-
|
|
314
|
-
state["importers"]["batch"]["files_processed"] = len(batch_files)
|
|
315
|
-
state["importers"]["batch"]["chunks_imported"] = sum(f.get("chunks", 0) for f in batch_files)
|
|
316
|
-
|
|
317
|
-
state["importers"]["streaming"]["files_processed"] = len(streaming_files)
|
|
318
|
-
state["importers"]["streaming"]["chunks_imported"] = sum(f.get("chunks", 0) for f in streaming_files)
|
|
319
|
-
|
|
320
|
-
return state
|
|
321
|
-
|
|
322
|
-
self.state_manager.update_state(create_unified_state)
|
|
323
|
-
|
|
324
|
-
print(f" ✓ Created unified state at {self.state_manager.state_file}")
|
|
325
|
-
|
|
326
|
-
# Step 6: Verification
|
|
327
|
-
print("\n6. Verifying migration...")
|
|
328
|
-
status = self.state_manager.get_status()
|
|
329
|
-
print(f" - Version: {status['version']}")
|
|
330
|
-
print(f" - Files: {status['indexed_files']}/{status['total_files']}")
|
|
331
|
-
print(f" - Chunks: {status['total_chunks']}")
|
|
332
|
-
print(f" - Collections: {', '.join(status['collections'])}")
|
|
333
|
-
|
|
334
|
-
print("\n" + "="*60)
|
|
335
|
-
print("✅ Migration completed successfully!")
|
|
336
|
-
print(f" - Backups saved to: {self.backup_dir}")
|
|
337
|
-
print(f" - Unified state: {self.state_manager.state_file}")
|
|
338
|
-
print("\nNext steps:")
|
|
339
|
-
print(" 1. Update import scripts to use unified_state_manager")
|
|
340
|
-
print(" 2. Test with: python unified_state_manager.py status")
|
|
341
|
-
print(" 3. If issues occur, restore from:", self.backup_dir)
|
|
342
|
-
print("="*60 + "\n")
|
|
343
|
-
|
|
344
|
-
return True
|
|
345
|
-
|
|
346
|
-
except Exception as e:
|
|
347
|
-
logger.error(f"Migration failed: {e}")
|
|
348
|
-
print(f"\n❌ Migration failed: {e}")
|
|
349
|
-
print(f" Backups available at: {self.backup_dir}")
|
|
350
|
-
return False
|
|
351
|
-
|
|
352
|
-
def rollback(self):
|
|
353
|
-
"""Rollback to backed up state files."""
|
|
354
|
-
print("\nRolling back migration...")
|
|
355
|
-
|
|
356
|
-
if not self.backup_dir.exists():
|
|
357
|
-
print("❌ No backup directory found")
|
|
358
|
-
return False
|
|
359
|
-
|
|
360
|
-
# Remove unified state
|
|
361
|
-
unified_state = self.config_dir / "unified-state.json"
|
|
362
|
-
if unified_state.exists():
|
|
363
|
-
unified_state.unlink()
|
|
364
|
-
print(f" Removed {unified_state}")
|
|
365
|
-
|
|
366
|
-
# Restore backed up files
|
|
367
|
-
for backup_file in self.backup_dir.glob("*.json"):
|
|
368
|
-
if backup_file.name == "unified-state.json.existing":
|
|
369
|
-
# Restore previous unified state
|
|
370
|
-
dest = self.config_dir / "unified-state.json"
|
|
371
|
-
elif backup_file.name.startswith("archive-"):
|
|
372
|
-
# Restore to archive directory
|
|
373
|
-
self.config_dir.joinpath("archive").mkdir(exist_ok=True)
|
|
374
|
-
dest = self.config_dir / "archive" / backup_file.name.replace("archive-", "")
|
|
375
|
-
else:
|
|
376
|
-
# Restore to main directory
|
|
377
|
-
dest = self.config_dir / backup_file.name
|
|
378
|
-
|
|
379
|
-
shutil.copy2(backup_file, dest)
|
|
380
|
-
print(f" Restored {backup_file.name} → {dest}")
|
|
381
|
-
|
|
382
|
-
print("✅ Rollback complete")
|
|
383
|
-
return True
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
def main():
|
|
387
|
-
"""Main entry point."""
|
|
388
|
-
import argparse
|
|
389
|
-
|
|
390
|
-
parser = argparse.ArgumentParser(
|
|
391
|
-
description="Migrate multiple state files to unified state format"
|
|
392
|
-
)
|
|
393
|
-
parser.add_argument(
|
|
394
|
-
"--dry-run",
|
|
395
|
-
action="store_true",
|
|
396
|
-
help="Preview migration without making changes"
|
|
397
|
-
)
|
|
398
|
-
parser.add_argument(
|
|
399
|
-
"--rollback",
|
|
400
|
-
action="store_true",
|
|
401
|
-
help="Rollback to previous state files"
|
|
402
|
-
)
|
|
403
|
-
parser.add_argument(
|
|
404
|
-
"--verbose",
|
|
405
|
-
"-v",
|
|
406
|
-
action="store_true",
|
|
407
|
-
help="Enable verbose logging"
|
|
408
|
-
)
|
|
409
|
-
|
|
410
|
-
args = parser.parse_args()
|
|
411
|
-
|
|
412
|
-
if args.verbose:
|
|
413
|
-
logging.getLogger().setLevel(logging.DEBUG)
|
|
414
|
-
|
|
415
|
-
migrator = StateMigrator()
|
|
416
|
-
|
|
417
|
-
if args.rollback:
|
|
418
|
-
success = migrator.rollback()
|
|
419
|
-
else:
|
|
420
|
-
success = migrator.migrate(dry_run=args.dry_run)
|
|
421
|
-
|
|
422
|
-
sys.exit(0 if success else 1)
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
if __name__ == "__main__":
|
|
426
|
-
main()
|