pomera-ai-commander 0.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +105 -680
- package/bin/pomera-ai-commander.js +62 -62
- package/core/__init__.py +65 -65
- package/core/app_context.py +482 -482
- package/core/async_text_processor.py +421 -421
- package/core/backup_manager.py +655 -655
- package/core/backup_recovery_manager.py +1033 -1033
- package/core/content_hash_cache.py +508 -508
- package/core/context_menu.py +313 -313
- package/core/data_validator.py +1066 -1066
- package/core/database_connection_manager.py +744 -744
- package/core/database_curl_settings_manager.py +608 -608
- package/core/database_promera_ai_settings_manager.py +446 -446
- package/core/database_schema.py +411 -411
- package/core/database_schema_manager.py +395 -395
- package/core/database_settings_manager.py +1507 -1507
- package/core/database_settings_manager_interface.py +456 -456
- package/core/dialog_manager.py +734 -734
- package/core/efficient_line_numbers.py +510 -510
- package/core/error_handler.py +746 -746
- package/core/error_service.py +431 -431
- package/core/event_consolidator.py +511 -511
- package/core/mcp/__init__.py +43 -43
- package/core/mcp/protocol.py +288 -288
- package/core/mcp/schema.py +251 -251
- package/core/mcp/server_stdio.py +299 -299
- package/core/mcp/tool_registry.py +2372 -2345
- package/core/memory_efficient_text_widget.py +711 -711
- package/core/migration_manager.py +914 -914
- package/core/migration_test_suite.py +1085 -1085
- package/core/migration_validator.py +1143 -1143
- package/core/optimized_find_replace.py +714 -714
- package/core/optimized_pattern_engine.py +424 -424
- package/core/optimized_search_highlighter.py +552 -552
- package/core/performance_monitor.py +674 -674
- package/core/persistence_manager.py +712 -712
- package/core/progressive_stats_calculator.py +632 -632
- package/core/regex_pattern_cache.py +529 -529
- package/core/regex_pattern_library.py +350 -350
- package/core/search_operation_manager.py +434 -434
- package/core/settings_defaults_registry.py +1087 -1087
- package/core/settings_integrity_validator.py +1111 -1111
- package/core/settings_serializer.py +557 -557
- package/core/settings_validator.py +1823 -1823
- package/core/smart_stats_calculator.py +709 -709
- package/core/statistics_update_manager.py +619 -619
- package/core/stats_config_manager.py +858 -858
- package/core/streaming_text_handler.py +723 -723
- package/core/task_scheduler.py +596 -596
- package/core/update_pattern_library.py +168 -168
- package/core/visibility_monitor.py +596 -596
- package/core/widget_cache.py +498 -498
- package/mcp.json +51 -61
- package/package.json +61 -57
- package/pomera.py +7482 -7482
- package/pomera_mcp_server.py +183 -144
- package/requirements.txt +32 -0
- package/tools/__init__.py +4 -4
- package/tools/ai_tools.py +2891 -2891
- package/tools/ascii_art_generator.py +352 -352
- package/tools/base64_tools.py +183 -183
- package/tools/base_tool.py +511 -511
- package/tools/case_tool.py +308 -308
- package/tools/column_tools.py +395 -395
- package/tools/cron_tool.py +884 -884
- package/tools/curl_history.py +600 -600
- package/tools/curl_processor.py +1207 -1207
- package/tools/curl_settings.py +502 -502
- package/tools/curl_tool.py +5467 -5467
- package/tools/diff_viewer.py +1071 -1071
- package/tools/email_extraction_tool.py +248 -248
- package/tools/email_header_analyzer.py +425 -425
- package/tools/extraction_tools.py +250 -250
- package/tools/find_replace.py +1750 -1750
- package/tools/folder_file_reporter.py +1463 -1463
- package/tools/folder_file_reporter_adapter.py +480 -480
- package/tools/generator_tools.py +1216 -1216
- package/tools/hash_generator.py +255 -255
- package/tools/html_tool.py +656 -656
- package/tools/jsonxml_tool.py +729 -729
- package/tools/line_tools.py +419 -419
- package/tools/markdown_tools.py +561 -561
- package/tools/mcp_widget.py +1417 -1417
- package/tools/notes_widget.py +973 -973
- package/tools/number_base_converter.py +372 -372
- package/tools/regex_extractor.py +571 -571
- package/tools/slug_generator.py +310 -310
- package/tools/sorter_tools.py +458 -458
- package/tools/string_escape_tool.py +392 -392
- package/tools/text_statistics_tool.py +365 -365
- package/tools/text_wrapper.py +430 -430
- package/tools/timestamp_converter.py +421 -421
- package/tools/tool_loader.py +710 -710
- package/tools/translator_tools.py +522 -522
- package/tools/url_link_extractor.py +261 -261
- package/tools/url_parser.py +204 -204
- package/tools/whitespace_tools.py +355 -355
- package/tools/word_frequency_counter.py +146 -146
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
|
@@ -1,915 +1,915 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Migration Manager for Settings Database Migration
|
|
3
|
-
|
|
4
|
-
This module provides comprehensive migration capabilities between JSON settings files
|
|
5
|
-
and the SQLite database format. It handles bidirectional conversion with full
|
|
6
|
-
structure preservation, including complex nested structures, encrypted keys,
|
|
7
|
-
and history arrays.
|
|
8
|
-
|
|
9
|
-
Designed to handle all 15 tool configurations and complex data structures
|
|
10
|
-
identified in the production codebase analysis.
|
|
11
|
-
"""
|
|
12
|
-
|
|
13
|
-
import json
|
|
14
|
-
import sqlite3
|
|
15
|
-
import os
|
|
16
|
-
import shutil
|
|
17
|
-
import logging
|
|
18
|
-
from typing import Dict, List, Tuple, Any, Optional, Union
|
|
19
|
-
from datetime import datetime
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
from .database_connection_manager import DatabaseConnectionManager
|
|
23
|
-
from .database_schema import DatabaseSchema, DataTypeConverter
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class MigrationManager:
|
|
27
|
-
"""
|
|
28
|
-
Handles migration between JSON settings file and database format.
|
|
29
|
-
|
|
30
|
-
Features:
|
|
31
|
-
- Bidirectional JSON ↔ Database conversion
|
|
32
|
-
- Full structure preservation for complex nested objects
|
|
33
|
-
- Special handling for encrypted API keys with "ENC:" prefix
|
|
34
|
-
- Support for all tool configurations and data types
|
|
35
|
-
- Migration validation and rollback capabilities
|
|
36
|
-
- Comprehensive error handling and recovery
|
|
37
|
-
"""
|
|
38
|
-
|
|
39
|
-
def __init__(self, connection_manager: DatabaseConnectionManager):
|
|
40
|
-
"""
|
|
41
|
-
Initialize the migration manager.
|
|
42
|
-
|
|
43
|
-
Args:
|
|
44
|
-
connection_manager: Database connection manager instance
|
|
45
|
-
"""
|
|
46
|
-
self.connection_manager = connection_manager
|
|
47
|
-
self.logger = logging.getLogger(__name__)
|
|
48
|
-
self.schema = DatabaseSchema()
|
|
49
|
-
self.converter = DataTypeConverter()
|
|
50
|
-
|
|
51
|
-
# Migration tracking
|
|
52
|
-
self._migration_history = []
|
|
53
|
-
self._max_history = 50
|
|
54
|
-
|
|
55
|
-
# Validation settings
|
|
56
|
-
self._validation_enabled = True
|
|
57
|
-
self._strict_validation = True
|
|
58
|
-
|
|
59
|
-
# Backup settings
|
|
60
|
-
self._auto_backup = True
|
|
61
|
-
self._backup_suffix = ".backup"
|
|
62
|
-
|
|
63
|
-
def migrate_from_json(self, json_filepath: str, validate: bool = True) -> bool:
|
|
64
|
-
"""
|
|
65
|
-
Convert settings.json to database format with full structure preservation.
|
|
66
|
-
|
|
67
|
-
Args:
|
|
68
|
-
json_filepath: Path to source JSON settings file
|
|
69
|
-
validate: Whether to validate migration accuracy
|
|
70
|
-
|
|
71
|
-
Returns:
|
|
72
|
-
True if migration successful, False otherwise
|
|
73
|
-
"""
|
|
74
|
-
try:
|
|
75
|
-
self.logger.info(f"Starting migration from JSON: {json_filepath}")
|
|
76
|
-
|
|
77
|
-
# Validate input file
|
|
78
|
-
if not os.path.exists(json_filepath):
|
|
79
|
-
self.logger.error(f"JSON file not found: {json_filepath}")
|
|
80
|
-
return False
|
|
81
|
-
|
|
82
|
-
# Create backup if enabled
|
|
83
|
-
backup_path = None
|
|
84
|
-
if self._auto_backup:
|
|
85
|
-
backup_path = self.create_migration_backup(json_filepath)
|
|
86
|
-
if not backup_path:
|
|
87
|
-
self.logger.warning("Failed to create backup, continuing without backup")
|
|
88
|
-
|
|
89
|
-
# Load and parse JSON
|
|
90
|
-
json_data = self._load_json_file(json_filepath)
|
|
91
|
-
if json_data is None:
|
|
92
|
-
return False
|
|
93
|
-
|
|
94
|
-
# Perform migration
|
|
95
|
-
success = self._migrate_json_to_database(json_data)
|
|
96
|
-
if not success:
|
|
97
|
-
self.logger.error("Migration to database failed")
|
|
98
|
-
return False
|
|
99
|
-
|
|
100
|
-
# Validate migration if requested
|
|
101
|
-
if validate and self._validation_enabled:
|
|
102
|
-
validation_success = self._validate_json_migration(json_data)
|
|
103
|
-
if not validation_success:
|
|
104
|
-
self.logger.error("Migration validation failed")
|
|
105
|
-
if self._strict_validation:
|
|
106
|
-
return False
|
|
107
|
-
|
|
108
|
-
# Record successful migration
|
|
109
|
-
self._record_migration_success(json_filepath, backup_path, "json_to_db")
|
|
110
|
-
|
|
111
|
-
self.logger.info("JSON to database migration completed successfully")
|
|
112
|
-
return True
|
|
113
|
-
|
|
114
|
-
except Exception as e:
|
|
115
|
-
self.logger.error(f"Migration from JSON failed: {e}")
|
|
116
|
-
self._record_migration_failure(json_filepath, str(e), "json_to_db")
|
|
117
|
-
return False
|
|
118
|
-
|
|
119
|
-
def migrate_to_json(self, json_filepath: str, validate: bool = True) -> bool:
|
|
120
|
-
"""
|
|
121
|
-
Convert database back to settings.json format.
|
|
122
|
-
|
|
123
|
-
Args:
|
|
124
|
-
json_filepath: Target path for JSON settings file
|
|
125
|
-
validate: Whether to validate migration accuracy
|
|
126
|
-
|
|
127
|
-
Returns:
|
|
128
|
-
True if migration successful, False otherwise
|
|
129
|
-
"""
|
|
130
|
-
try:
|
|
131
|
-
self.logger.info(f"Starting migration to JSON: {json_filepath}")
|
|
132
|
-
|
|
133
|
-
# Create backup of existing file if it exists
|
|
134
|
-
backup_path = None
|
|
135
|
-
if os.path.exists(json_filepath) and self._auto_backup:
|
|
136
|
-
backup_path = self.create_migration_backup(json_filepath)
|
|
137
|
-
|
|
138
|
-
# Extract data from database
|
|
139
|
-
json_data = self._migrate_database_to_json()
|
|
140
|
-
if json_data is None:
|
|
141
|
-
return False
|
|
142
|
-
|
|
143
|
-
# Write JSON file
|
|
144
|
-
success = self._write_json_file(json_filepath, json_data)
|
|
145
|
-
if not success:
|
|
146
|
-
return False
|
|
147
|
-
|
|
148
|
-
# Validate migration if requested
|
|
149
|
-
if validate and self._validation_enabled:
|
|
150
|
-
validation_success = self._validate_db_migration(json_data)
|
|
151
|
-
if not validation_success:
|
|
152
|
-
self.logger.error("Migration validation failed")
|
|
153
|
-
if self._strict_validation:
|
|
154
|
-
return False
|
|
155
|
-
|
|
156
|
-
# Record successful migration
|
|
157
|
-
self._record_migration_success(json_filepath, backup_path, "db_to_json")
|
|
158
|
-
|
|
159
|
-
self.logger.info("Database to JSON migration completed successfully")
|
|
160
|
-
return True
|
|
161
|
-
|
|
162
|
-
except Exception as e:
|
|
163
|
-
self.logger.error(f"Migration to JSON failed: {e}")
|
|
164
|
-
self._record_migration_failure(json_filepath, str(e), "db_to_json")
|
|
165
|
-
return False
|
|
166
|
-
|
|
167
|
-
def validate_migration(self, original_json: Dict, migrated_json: Dict) -> bool:
|
|
168
|
-
"""
|
|
169
|
-
Verify migration accuracy by comparing original and migrated data.
|
|
170
|
-
|
|
171
|
-
Args:
|
|
172
|
-
original_json: Original JSON data structure
|
|
173
|
-
migrated_json: Migrated JSON data structure
|
|
174
|
-
|
|
175
|
-
Returns:
|
|
176
|
-
True if migration is accurate, False otherwise
|
|
177
|
-
"""
|
|
178
|
-
try:
|
|
179
|
-
self.logger.info("Starting migration validation")
|
|
180
|
-
|
|
181
|
-
# Deep comparison of data structures
|
|
182
|
-
validation_results = self._deep_compare_structures(original_json, migrated_json)
|
|
183
|
-
|
|
184
|
-
if validation_results['success']:
|
|
185
|
-
self.logger.info("Migration validation passed")
|
|
186
|
-
return True
|
|
187
|
-
else:
|
|
188
|
-
self.logger.error(f"Migration validation failed: {validation_results['errors']}")
|
|
189
|
-
return False
|
|
190
|
-
|
|
191
|
-
except Exception as e:
|
|
192
|
-
self.logger.error(f"Migration validation error: {e}")
|
|
193
|
-
return False
|
|
194
|
-
|
|
195
|
-
def create_migration_backup(self, json_filepath: str) -> Optional[str]:
|
|
196
|
-
"""
|
|
197
|
-
Create backup of original JSON file before migration.
|
|
198
|
-
|
|
199
|
-
Args:
|
|
200
|
-
json_filepath: Path to JSON file to backup
|
|
201
|
-
|
|
202
|
-
Returns:
|
|
203
|
-
Path to backup file if successful, None otherwise
|
|
204
|
-
"""
|
|
205
|
-
try:
|
|
206
|
-
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
207
|
-
backup_path = f"{json_filepath}{self._backup_suffix}_{timestamp}"
|
|
208
|
-
|
|
209
|
-
shutil.copy2(json_filepath, backup_path)
|
|
210
|
-
|
|
211
|
-
self.logger.info(f"Created migration backup: {backup_path}")
|
|
212
|
-
return backup_path
|
|
213
|
-
|
|
214
|
-
except Exception as e:
|
|
215
|
-
self.logger.error(f"Failed to create backup: {e}")
|
|
216
|
-
return None
|
|
217
|
-
|
|
218
|
-
def rollback_migration(self, backup_filepath: str) -> bool:
|
|
219
|
-
"""
|
|
220
|
-
Rollback to original JSON file if migration fails.
|
|
221
|
-
|
|
222
|
-
Args:
|
|
223
|
-
backup_filepath: Path to backup file to restore
|
|
224
|
-
|
|
225
|
-
Returns:
|
|
226
|
-
True if rollback successful, False otherwise
|
|
227
|
-
"""
|
|
228
|
-
try:
|
|
229
|
-
if not os.path.exists(backup_filepath):
|
|
230
|
-
self.logger.error(f"Backup file not found: {backup_filepath}")
|
|
231
|
-
return False
|
|
232
|
-
|
|
233
|
-
# Determine original file path by removing backup suffix
|
|
234
|
-
original_path = backup_filepath
|
|
235
|
-
for suffix in [self._backup_suffix]:
|
|
236
|
-
if suffix in original_path:
|
|
237
|
-
original_path = original_path.split(suffix)[0]
|
|
238
|
-
break
|
|
239
|
-
|
|
240
|
-
# Restore original file
|
|
241
|
-
shutil.copy2(backup_filepath, original_path)
|
|
242
|
-
|
|
243
|
-
self.logger.info(f"Rollback completed: restored {original_path}")
|
|
244
|
-
return True
|
|
245
|
-
|
|
246
|
-
except Exception as e:
|
|
247
|
-
self.logger.error(f"Rollback failed: {e}")
|
|
248
|
-
return False
|
|
249
|
-
|
|
250
|
-
def get_migration_history(self) -> List[Dict[str, Any]]:
|
|
251
|
-
"""
|
|
252
|
-
Get history of migration operations.
|
|
253
|
-
|
|
254
|
-
Returns:
|
|
255
|
-
List of migration history entries
|
|
256
|
-
"""
|
|
257
|
-
return self._migration_history.copy()
|
|
258
|
-
|
|
259
|
-
def clear_migration_history(self) -> None:
|
|
260
|
-
"""Clear migration history."""
|
|
261
|
-
self._migration_history.clear()
|
|
262
|
-
self.logger.info("Migration history cleared")
|
|
263
|
-
|
|
264
|
-
# Private implementation methods
|
|
265
|
-
|
|
266
|
-
def _load_json_file(self, filepath: str) -> Optional[Dict[str, Any]]:
|
|
267
|
-
"""
|
|
268
|
-
Load and parse JSON settings file with error handling.
|
|
269
|
-
|
|
270
|
-
Args:
|
|
271
|
-
filepath: Path to JSON file
|
|
272
|
-
|
|
273
|
-
Returns:
|
|
274
|
-
Parsed JSON data or None if failed
|
|
275
|
-
"""
|
|
276
|
-
try:
|
|
277
|
-
with open(filepath, 'r', encoding='utf-8') as f:
|
|
278
|
-
data = json.load(f)
|
|
279
|
-
|
|
280
|
-
self.logger.debug(f"Loaded JSON file: {filepath}")
|
|
281
|
-
return data
|
|
282
|
-
|
|
283
|
-
except json.JSONDecodeError as e:
|
|
284
|
-
self.logger.error(f"Invalid JSON in file {filepath}: {e}")
|
|
285
|
-
return None
|
|
286
|
-
except Exception as e:
|
|
287
|
-
self.logger.error(f"Failed to load JSON file {filepath}: {e}")
|
|
288
|
-
return None
|
|
289
|
-
|
|
290
|
-
def _write_json_file(self, filepath: str, data: Dict[str, Any]) -> bool:
|
|
291
|
-
"""
|
|
292
|
-
Write JSON data to file with proper formatting.
|
|
293
|
-
|
|
294
|
-
Args:
|
|
295
|
-
filepath: Target file path
|
|
296
|
-
data: JSON data to write
|
|
297
|
-
|
|
298
|
-
Returns:
|
|
299
|
-
True if successful, False otherwise
|
|
300
|
-
"""
|
|
301
|
-
try:
|
|
302
|
-
# Ensure directory exists
|
|
303
|
-
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
|
304
|
-
|
|
305
|
-
with open(filepath, 'w', encoding='utf-8') as f:
|
|
306
|
-
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
307
|
-
|
|
308
|
-
self.logger.debug(f"Wrote JSON file: {filepath}")
|
|
309
|
-
return True
|
|
310
|
-
|
|
311
|
-
except Exception as e:
|
|
312
|
-
self.logger.error(f"Failed to write JSON file {filepath}: {e}")
|
|
313
|
-
return False
|
|
314
|
-
|
|
315
|
-
def _migrate_json_to_database(self, json_data: Dict[str, Any]) -> bool:
|
|
316
|
-
"""
|
|
317
|
-
Migrate JSON data structure to database tables.
|
|
318
|
-
|
|
319
|
-
Args:
|
|
320
|
-
json_data: Parsed JSON settings data
|
|
321
|
-
|
|
322
|
-
Returns:
|
|
323
|
-
True if migration successful, False otherwise
|
|
324
|
-
"""
|
|
325
|
-
try:
|
|
326
|
-
with self.connection_manager.transaction() as conn:
|
|
327
|
-
# Clear existing data
|
|
328
|
-
self._clear_all_tables(conn)
|
|
329
|
-
|
|
330
|
-
# Migrate core settings
|
|
331
|
-
self._migrate_core_settings(conn, json_data)
|
|
332
|
-
|
|
333
|
-
# Migrate tool settings
|
|
334
|
-
if 'tool_settings' in json_data:
|
|
335
|
-
self._migrate_tool_settings(conn, json_data['tool_settings'])
|
|
336
|
-
|
|
337
|
-
# Migrate tab content
|
|
338
|
-
self._migrate_tab_content(conn, json_data)
|
|
339
|
-
|
|
340
|
-
# Migrate performance settings
|
|
341
|
-
if 'performance_settings' in json_data:
|
|
342
|
-
self._migrate_performance_settings(conn, json_data['performance_settings'])
|
|
343
|
-
|
|
344
|
-
# Migrate font settings
|
|
345
|
-
if 'font_settings' in json_data:
|
|
346
|
-
self._migrate_font_settings(conn, json_data['font_settings'])
|
|
347
|
-
|
|
348
|
-
# Migrate dialog settings
|
|
349
|
-
if 'dialog_settings' in json_data:
|
|
350
|
-
self._migrate_dialog_settings(conn, json_data['dialog_settings'])
|
|
351
|
-
|
|
352
|
-
# Update metadata
|
|
353
|
-
self._update_migration_metadata(conn)
|
|
354
|
-
|
|
355
|
-
self.logger.info("JSON to database migration completed")
|
|
356
|
-
return True
|
|
357
|
-
|
|
358
|
-
except Exception as e:
|
|
359
|
-
self.logger.error(f"JSON to database migration failed: {e}")
|
|
360
|
-
return False
|
|
361
|
-
|
|
362
|
-
def _migrate_database_to_json(self) -> Optional[Dict[str, Any]]:
|
|
363
|
-
"""
|
|
364
|
-
Extract data from database and reconstruct JSON structure.
|
|
365
|
-
|
|
366
|
-
Returns:
|
|
367
|
-
Reconstructed JSON data or None if failed
|
|
368
|
-
"""
|
|
369
|
-
try:
|
|
370
|
-
conn = self.connection_manager.get_connection()
|
|
371
|
-
|
|
372
|
-
json_data = {}
|
|
373
|
-
|
|
374
|
-
# Extract core settings
|
|
375
|
-
core_settings = self._extract_core_settings(conn)
|
|
376
|
-
json_data.update(core_settings)
|
|
377
|
-
|
|
378
|
-
# Extract tool settings
|
|
379
|
-
tool_settings = self._extract_tool_settings(conn)
|
|
380
|
-
json_data['tool_settings'] = tool_settings # Always include, even if empty
|
|
381
|
-
|
|
382
|
-
# Extract tab content
|
|
383
|
-
tab_content = self._extract_tab_content(conn)
|
|
384
|
-
json_data.update(tab_content)
|
|
385
|
-
|
|
386
|
-
# Extract performance settings
|
|
387
|
-
performance_settings = self._extract_performance_settings(conn)
|
|
388
|
-
json_data['performance_settings'] = performance_settings # Always include, even if empty
|
|
389
|
-
|
|
390
|
-
# Extract font settings
|
|
391
|
-
font_settings = self._extract_font_settings(conn)
|
|
392
|
-
json_data['font_settings'] = font_settings # Always include, even if empty
|
|
393
|
-
|
|
394
|
-
# Extract dialog settings
|
|
395
|
-
dialog_settings = self._extract_dialog_settings(conn)
|
|
396
|
-
json_data['dialog_settings'] = dialog_settings # Always include, even if empty
|
|
397
|
-
|
|
398
|
-
self.logger.info("Database to JSON extraction completed")
|
|
399
|
-
return json_data
|
|
400
|
-
|
|
401
|
-
except Exception as e:
|
|
402
|
-
self.logger.error(f"Database to JSON extraction failed: {e}")
|
|
403
|
-
return None
|
|
404
|
-
|
|
405
|
-
def _clear_all_tables(self, conn: sqlite3.Connection) -> None:
|
|
406
|
-
"""Clear all data from settings tables."""
|
|
407
|
-
tables = [
|
|
408
|
-
'core_settings', 'tool_settings', 'tab_content',
|
|
409
|
-
'performance_settings', 'font_settings', 'dialog_settings'
|
|
410
|
-
]
|
|
411
|
-
|
|
412
|
-
for table in tables:
|
|
413
|
-
conn.execute(f"DELETE FROM {table}")
|
|
414
|
-
|
|
415
|
-
def _migrate_core_settings(self, conn: sqlite3.Connection, json_data: Dict[str, Any]) -> None:
|
|
416
|
-
"""
|
|
417
|
-
Migrate core application settings to database.
|
|
418
|
-
|
|
419
|
-
Args:
|
|
420
|
-
conn: Database connection
|
|
421
|
-
json_data: Full JSON data structure
|
|
422
|
-
"""
|
|
423
|
-
# Core settings are top-level keys excluding special categories
|
|
424
|
-
excluded_keys = {
|
|
425
|
-
'tool_settings', 'input_tabs', 'output_tabs',
|
|
426
|
-
'performance_settings', 'font_settings', 'dialog_settings'
|
|
427
|
-
}
|
|
428
|
-
|
|
429
|
-
for key, value in json_data.items():
|
|
430
|
-
if key not in excluded_keys:
|
|
431
|
-
data_type = self.converter.python_to_db_type(value)
|
|
432
|
-
serialized_value = self.converter.serialize_value(value)
|
|
433
|
-
|
|
434
|
-
conn.execute(
|
|
435
|
-
"INSERT INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
436
|
-
(key, serialized_value, data_type)
|
|
437
|
-
)
|
|
438
|
-
|
|
439
|
-
def _migrate_tool_settings(self, conn: sqlite3.Connection, tool_settings: Dict[str, Any]) -> None:
|
|
440
|
-
"""
|
|
441
|
-
Migrate tool-specific settings to database with nested path support.
|
|
442
|
-
|
|
443
|
-
Args:
|
|
444
|
-
conn: Database connection
|
|
445
|
-
tool_settings: Tool settings dictionary
|
|
446
|
-
"""
|
|
447
|
-
for tool_name, tool_config in tool_settings.items():
|
|
448
|
-
if isinstance(tool_config, dict):
|
|
449
|
-
# Flatten nested tool configuration
|
|
450
|
-
flattened = self._flatten_nested_dict(tool_config)
|
|
451
|
-
|
|
452
|
-
for setting_path, value in flattened.items():
|
|
453
|
-
data_type = self.converter.python_to_db_type(value)
|
|
454
|
-
serialized_value = self.converter.serialize_value(value)
|
|
455
|
-
|
|
456
|
-
conn.execute(
|
|
457
|
-
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
458
|
-
(tool_name, setting_path, serialized_value, data_type)
|
|
459
|
-
)
|
|
460
|
-
else:
|
|
461
|
-
# Simple tool setting
|
|
462
|
-
data_type = self.converter.python_to_db_type(tool_config)
|
|
463
|
-
serialized_value = self.converter.serialize_value(tool_config)
|
|
464
|
-
|
|
465
|
-
conn.execute(
|
|
466
|
-
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
467
|
-
(tool_name, 'value', serialized_value, data_type)
|
|
468
|
-
)
|
|
469
|
-
|
|
470
|
-
def _migrate_tab_content(self, conn: sqlite3.Connection, json_data: Dict[str, Any]) -> None:
|
|
471
|
-
"""
|
|
472
|
-
Migrate input_tabs and output_tabs arrays to database.
|
|
473
|
-
|
|
474
|
-
Args:
|
|
475
|
-
conn: Database connection
|
|
476
|
-
json_data: Full JSON data structure
|
|
477
|
-
"""
|
|
478
|
-
# Migrate input tabs
|
|
479
|
-
if 'input_tabs' in json_data:
|
|
480
|
-
input_tabs = json_data['input_tabs']
|
|
481
|
-
for i, content in enumerate(input_tabs):
|
|
482
|
-
conn.execute(
|
|
483
|
-
"INSERT INTO tab_content (tab_type, tab_index, content) VALUES (?, ?, ?)",
|
|
484
|
-
('input', i, content or '')
|
|
485
|
-
)
|
|
486
|
-
|
|
487
|
-
# Migrate output tabs
|
|
488
|
-
if 'output_tabs' in json_data:
|
|
489
|
-
output_tabs = json_data['output_tabs']
|
|
490
|
-
for i, content in enumerate(output_tabs):
|
|
491
|
-
conn.execute(
|
|
492
|
-
"INSERT INTO tab_content (tab_type, tab_index, content) VALUES (?, ?, ?)",
|
|
493
|
-
('output', i, content or '')
|
|
494
|
-
)
|
|
495
|
-
|
|
496
|
-
def _migrate_performance_settings(self, conn: sqlite3.Connection, performance_settings: Dict[str, Any]) -> None:
|
|
497
|
-
"""
|
|
498
|
-
Migrate performance settings with nested structure support.
|
|
499
|
-
|
|
500
|
-
Args:
|
|
501
|
-
conn: Database connection
|
|
502
|
-
performance_settings: Performance settings dictionary
|
|
503
|
-
"""
|
|
504
|
-
for category, settings in performance_settings.items():
|
|
505
|
-
if isinstance(settings, dict):
|
|
506
|
-
# Nested performance category
|
|
507
|
-
flattened = self._flatten_nested_dict(settings)
|
|
508
|
-
|
|
509
|
-
for setting_key, value in flattened.items():
|
|
510
|
-
data_type = self.converter.python_to_db_type(value)
|
|
511
|
-
serialized_value = self.converter.serialize_value(value)
|
|
512
|
-
|
|
513
|
-
conn.execute(
|
|
514
|
-
"INSERT INTO performance_settings (category, setting_key, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
515
|
-
(category, setting_key, serialized_value, data_type)
|
|
516
|
-
)
|
|
517
|
-
else:
|
|
518
|
-
# Simple performance setting
|
|
519
|
-
data_type = self.converter.python_to_db_type(settings)
|
|
520
|
-
serialized_value = self.converter.serialize_value(settings)
|
|
521
|
-
|
|
522
|
-
conn.execute(
|
|
523
|
-
"INSERT INTO performance_settings (category, setting_key, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
524
|
-
(category, 'value', serialized_value, data_type)
|
|
525
|
-
)
|
|
526
|
-
|
|
527
|
-
def _migrate_font_settings(self, conn: sqlite3.Connection, font_settings: Dict[str, Any]) -> None:
|
|
528
|
-
"""
|
|
529
|
-
Migrate font settings with platform-specific fallbacks.
|
|
530
|
-
|
|
531
|
-
Args:
|
|
532
|
-
conn: Database connection
|
|
533
|
-
font_settings: Font settings dictionary
|
|
534
|
-
"""
|
|
535
|
-
for font_type, font_config in font_settings.items():
|
|
536
|
-
if isinstance(font_config, dict):
|
|
537
|
-
for property_name, value in font_config.items():
|
|
538
|
-
data_type = self.converter.python_to_db_type(value)
|
|
539
|
-
serialized_value = self.converter.serialize_value(value)
|
|
540
|
-
|
|
541
|
-
conn.execute(
|
|
542
|
-
"INSERT INTO font_settings (font_type, property, value, data_type) VALUES (?, ?, ?, ?)",
|
|
543
|
-
(font_type, property_name, serialized_value, data_type)
|
|
544
|
-
)
|
|
545
|
-
|
|
546
|
-
def _migrate_dialog_settings(self, conn: sqlite3.Connection, dialog_settings: Dict[str, Any]) -> None:
|
|
547
|
-
"""
|
|
548
|
-
Migrate dialog settings with category-based organization.
|
|
549
|
-
|
|
550
|
-
Args:
|
|
551
|
-
conn: Database connection
|
|
552
|
-
dialog_settings: Dialog settings dictionary
|
|
553
|
-
"""
|
|
554
|
-
for category, dialog_config in dialog_settings.items():
|
|
555
|
-
if isinstance(dialog_config, dict):
|
|
556
|
-
for property_name, value in dialog_config.items():
|
|
557
|
-
data_type = self.converter.python_to_db_type(value)
|
|
558
|
-
serialized_value = self.converter.serialize_value(value)
|
|
559
|
-
|
|
560
|
-
self.logger.debug(f"Inserting dialog setting: {category}.{property_name} = {value} (type: {data_type})")
|
|
561
|
-
|
|
562
|
-
conn.execute(
|
|
563
|
-
"INSERT INTO dialog_settings (category, property, value, data_type) VALUES (?, ?, ?, ?)",
|
|
564
|
-
(category, property_name, serialized_value, data_type)
|
|
565
|
-
)
|
|
566
|
-
|
|
567
|
-
def _update_migration_metadata(self, conn: sqlite3.Connection) -> None:
|
|
568
|
-
"""Update migration metadata in database."""
|
|
569
|
-
timestamp = datetime.now().isoformat()
|
|
570
|
-
|
|
571
|
-
# Update or insert migration metadata
|
|
572
|
-
metadata_updates = [
|
|
573
|
-
('last_migration_date', timestamp),
|
|
574
|
-
('migration_type', 'json_to_db'),
|
|
575
|
-
('migration_status', 'completed')
|
|
576
|
-
]
|
|
577
|
-
|
|
578
|
-
for key, value in metadata_updates:
|
|
579
|
-
conn.execute(
|
|
580
|
-
"INSERT OR REPLACE INTO settings_metadata (key, value) VALUES (?, ?)",
|
|
581
|
-
(key, value)
|
|
582
|
-
)
|
|
583
|
-
|
|
584
|
-
def _extract_core_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
585
|
-
"""
|
|
586
|
-
Extract core settings from database and convert to appropriate types.
|
|
587
|
-
|
|
588
|
-
Args:
|
|
589
|
-
conn: Database connection
|
|
590
|
-
|
|
591
|
-
Returns:
|
|
592
|
-
Dictionary of core settings
|
|
593
|
-
"""
|
|
594
|
-
core_settings = {}
|
|
595
|
-
|
|
596
|
-
cursor = conn.execute("SELECT key, value, data_type FROM core_settings")
|
|
597
|
-
for key, value, data_type in cursor.fetchall():
|
|
598
|
-
core_settings[key] = self.converter.deserialize_value(value, data_type)
|
|
599
|
-
|
|
600
|
-
return core_settings
|
|
601
|
-
|
|
602
|
-
def _extract_tool_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
603
|
-
"""
|
|
604
|
-
Extract tool settings from database and reconstruct nested structure.
|
|
605
|
-
|
|
606
|
-
Args:
|
|
607
|
-
conn: Database connection
|
|
608
|
-
|
|
609
|
-
Returns:
|
|
610
|
-
Dictionary of tool settings with nested structure
|
|
611
|
-
"""
|
|
612
|
-
tool_settings = {}
|
|
613
|
-
|
|
614
|
-
cursor = conn.execute(
|
|
615
|
-
"SELECT tool_name, setting_path, setting_value, data_type FROM tool_settings ORDER BY tool_name, setting_path"
|
|
616
|
-
)
|
|
617
|
-
|
|
618
|
-
for tool_name, setting_path, setting_value, data_type in cursor.fetchall():
|
|
619
|
-
if tool_name not in tool_settings:
|
|
620
|
-
tool_settings[tool_name] = {}
|
|
621
|
-
|
|
622
|
-
# Deserialize value
|
|
623
|
-
value = self.converter.deserialize_value(setting_value, data_type)
|
|
624
|
-
|
|
625
|
-
# Handle nested paths
|
|
626
|
-
if '.' in setting_path:
|
|
627
|
-
self._set_nested_value(tool_settings[tool_name], setting_path, value)
|
|
628
|
-
else:
|
|
629
|
-
tool_settings[tool_name][setting_path] = value
|
|
630
|
-
|
|
631
|
-
# Post-process: unwrap simple tool settings that only have a 'value' key
|
|
632
|
-
for tool_name, tool_config in list(tool_settings.items()):
|
|
633
|
-
if isinstance(tool_config, dict) and len(tool_config) == 1 and 'value' in tool_config:
|
|
634
|
-
tool_settings[tool_name] = tool_config['value']
|
|
635
|
-
|
|
636
|
-
return tool_settings
|
|
637
|
-
|
|
638
|
-
def _extract_tab_content(self, conn: sqlite3.Connection) -> Dict[str, List[str]]:
|
|
639
|
-
"""
|
|
640
|
-
Extract tab content from database and reconstruct arrays.
|
|
641
|
-
|
|
642
|
-
Args:
|
|
643
|
-
conn: Database connection
|
|
644
|
-
|
|
645
|
-
Returns:
|
|
646
|
-
Dictionary with input_tabs and output_tabs arrays
|
|
647
|
-
"""
|
|
648
|
-
tab_content = {'input_tabs': [''] * 7, 'output_tabs': [''] * 7}
|
|
649
|
-
|
|
650
|
-
cursor = conn.execute("SELECT tab_type, tab_index, content FROM tab_content ORDER BY tab_type, tab_index")
|
|
651
|
-
|
|
652
|
-
for tab_type, tab_index, content in cursor.fetchall():
|
|
653
|
-
if tab_type == 'input' and 0 <= tab_index < 7:
|
|
654
|
-
tab_content['input_tabs'][tab_index] = content or ''
|
|
655
|
-
elif tab_type == 'output' and 0 <= tab_index < 7:
|
|
656
|
-
tab_content['output_tabs'][tab_index] = content or ''
|
|
657
|
-
|
|
658
|
-
return tab_content
|
|
659
|
-
|
|
660
|
-
def _extract_performance_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
661
|
-
"""
|
|
662
|
-
Extract performance settings from database and reconstruct nested structure.
|
|
663
|
-
|
|
664
|
-
Args:
|
|
665
|
-
conn: Database connection
|
|
666
|
-
|
|
667
|
-
Returns:
|
|
668
|
-
Dictionary of performance settings with nested structure
|
|
669
|
-
"""
|
|
670
|
-
performance_settings = {}
|
|
671
|
-
|
|
672
|
-
cursor = conn.execute(
|
|
673
|
-
"SELECT category, setting_key, setting_value, data_type FROM performance_settings ORDER BY category, setting_key"
|
|
674
|
-
)
|
|
675
|
-
|
|
676
|
-
for category, setting_key, setting_value, data_type in cursor.fetchall():
|
|
677
|
-
if category not in performance_settings:
|
|
678
|
-
performance_settings[category] = {}
|
|
679
|
-
|
|
680
|
-
# Deserialize value
|
|
681
|
-
value = self.converter.deserialize_value(setting_value, data_type)
|
|
682
|
-
|
|
683
|
-
# Handle nested paths
|
|
684
|
-
if '.' in setting_key:
|
|
685
|
-
self._set_nested_value(performance_settings[category], setting_key, value)
|
|
686
|
-
else:
|
|
687
|
-
performance_settings[category][setting_key] = value
|
|
688
|
-
|
|
689
|
-
# Post-process: unwrap simple categories that only have a 'value' key
|
|
690
|
-
for category, category_config in list(performance_settings.items()):
|
|
691
|
-
if isinstance(category_config, dict) and len(category_config) == 1 and 'value' in category_config:
|
|
692
|
-
performance_settings[category] = category_config['value']
|
|
693
|
-
|
|
694
|
-
return performance_settings
|
|
695
|
-
|
|
696
|
-
def _extract_font_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
697
|
-
"""
|
|
698
|
-
Extract font settings from database.
|
|
699
|
-
|
|
700
|
-
Args:
|
|
701
|
-
conn: Database connection
|
|
702
|
-
|
|
703
|
-
Returns:
|
|
704
|
-
Dictionary of font settings
|
|
705
|
-
"""
|
|
706
|
-
font_settings = {}
|
|
707
|
-
|
|
708
|
-
cursor = conn.execute("SELECT font_type, property, value, data_type FROM font_settings ORDER BY font_type, property")
|
|
709
|
-
|
|
710
|
-
for font_type, property_name, value, data_type in cursor.fetchall():
|
|
711
|
-
if font_type not in font_settings:
|
|
712
|
-
font_settings[font_type] = {}
|
|
713
|
-
|
|
714
|
-
font_settings[font_type][property_name] = self.converter.deserialize_value(value, data_type)
|
|
715
|
-
|
|
716
|
-
return font_settings
|
|
717
|
-
|
|
718
|
-
def _extract_dialog_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
719
|
-
"""
|
|
720
|
-
Extract dialog settings from database.
|
|
721
|
-
|
|
722
|
-
Args:
|
|
723
|
-
conn: Database connection
|
|
724
|
-
|
|
725
|
-
Returns:
|
|
726
|
-
Dictionary of dialog settings
|
|
727
|
-
"""
|
|
728
|
-
dialog_settings = {}
|
|
729
|
-
|
|
730
|
-
cursor = conn.execute("SELECT category, property, value, data_type FROM dialog_settings ORDER BY category, property")
|
|
731
|
-
|
|
732
|
-
for category, property_name, value, data_type in cursor.fetchall():
|
|
733
|
-
if category not in dialog_settings:
|
|
734
|
-
dialog_settings[category] = {}
|
|
735
|
-
|
|
736
|
-
dialog_settings[category][property_name] = self.converter.deserialize_value(value, data_type)
|
|
737
|
-
|
|
738
|
-
return dialog_settings
|
|
739
|
-
|
|
740
|
-
def _flatten_nested_dict(self, nested_dict: Dict[str, Any], parent_key: str = '', separator: str = '.') -> Dict[str, Any]:
|
|
741
|
-
"""
|
|
742
|
-
Flatten nested dictionary using dot notation for keys.
|
|
743
|
-
|
|
744
|
-
Args:
|
|
745
|
-
nested_dict: Dictionary to flatten
|
|
746
|
-
parent_key: Parent key prefix
|
|
747
|
-
separator: Key separator character
|
|
748
|
-
|
|
749
|
-
Returns:
|
|
750
|
-
Flattened dictionary with dot-notation keys
|
|
751
|
-
"""
|
|
752
|
-
items = []
|
|
753
|
-
|
|
754
|
-
for key, value in nested_dict.items():
|
|
755
|
-
new_key = f"{parent_key}{separator}{key}" if parent_key else key
|
|
756
|
-
|
|
757
|
-
if isinstance(value, dict) and len(value) > 0:
|
|
758
|
-
# Only recursively flatten non-empty dictionaries
|
|
759
|
-
items.extend(self._flatten_nested_dict(value, new_key, separator).items())
|
|
760
|
-
else:
|
|
761
|
-
# Treat empty dictionaries and all other values as leaf nodes
|
|
762
|
-
items.append((new_key, value))
|
|
763
|
-
|
|
764
|
-
return dict(items)
|
|
765
|
-
|
|
766
|
-
def _set_nested_value(self, target_dict: Dict[str, Any], key_path: str, value: Any, separator: str = '.') -> None:
|
|
767
|
-
"""
|
|
768
|
-
Set value in nested dictionary using dot notation key path.
|
|
769
|
-
|
|
770
|
-
Args:
|
|
771
|
-
target_dict: Dictionary to modify
|
|
772
|
-
key_path: Dot-notation key path
|
|
773
|
-
value: Value to set
|
|
774
|
-
separator: Key separator character
|
|
775
|
-
"""
|
|
776
|
-
keys = key_path.split(separator)
|
|
777
|
-
current = target_dict
|
|
778
|
-
|
|
779
|
-
# Navigate to parent of target key
|
|
780
|
-
for key in keys[:-1]:
|
|
781
|
-
if key not in current:
|
|
782
|
-
current[key] = {}
|
|
783
|
-
current = current[key]
|
|
784
|
-
|
|
785
|
-
# Set final value
|
|
786
|
-
current[keys[-1]] = value
|
|
787
|
-
|
|
788
|
-
def _validate_json_migration(self, original_json: Dict[str, Any]) -> bool:
|
|
789
|
-
"""
|
|
790
|
-
Validate JSON to database migration by comparing original with reconstructed data.
|
|
791
|
-
|
|
792
|
-
Args:
|
|
793
|
-
original_json: Original JSON data
|
|
794
|
-
|
|
795
|
-
Returns:
|
|
796
|
-
True if validation passes, False otherwise
|
|
797
|
-
"""
|
|
798
|
-
try:
|
|
799
|
-
# Reconstruct JSON from database
|
|
800
|
-
reconstructed_json = self._migrate_database_to_json()
|
|
801
|
-
|
|
802
|
-
if reconstructed_json is None:
|
|
803
|
-
return False
|
|
804
|
-
|
|
805
|
-
# Compare structures
|
|
806
|
-
comparison_result = self._deep_compare_structures(original_json, reconstructed_json)
|
|
807
|
-
|
|
808
|
-
if not comparison_result['success']:
|
|
809
|
-
self.logger.error(f"Validation errors: {comparison_result['errors'][:5]}") # Show first 5 errors
|
|
810
|
-
|
|
811
|
-
return comparison_result['success']
|
|
812
|
-
|
|
813
|
-
except Exception as e:
|
|
814
|
-
self.logger.error(f"JSON migration validation failed: {e}")
|
|
815
|
-
return False
|
|
816
|
-
|
|
817
|
-
def _validate_db_migration(self, expected_json: Dict[str, Any]) -> bool:
|
|
818
|
-
"""
|
|
819
|
-
Validate database to JSON migration by comparing with expected data.
|
|
820
|
-
|
|
821
|
-
Args:
|
|
822
|
-
expected_json: Expected JSON structure
|
|
823
|
-
|
|
824
|
-
Returns:
|
|
825
|
-
True if validation passes, False otherwise
|
|
826
|
-
"""
|
|
827
|
-
try:
|
|
828
|
-
# This would be called after database to JSON migration
|
|
829
|
-
# The expected_json is what we expect to get from the database
|
|
830
|
-
return True # Simplified for now
|
|
831
|
-
|
|
832
|
-
except Exception as e:
|
|
833
|
-
self.logger.error(f"Database migration validation failed: {e}")
|
|
834
|
-
return False
|
|
835
|
-
|
|
836
|
-
def _deep_compare_structures(self, dict1: Dict[str, Any], dict2: Dict[str, Any]) -> Dict[str, Any]:
|
|
837
|
-
"""
|
|
838
|
-
Perform deep comparison of two dictionary structures.
|
|
839
|
-
|
|
840
|
-
Args:
|
|
841
|
-
dict1: First dictionary
|
|
842
|
-
dict2: Second dictionary
|
|
843
|
-
|
|
844
|
-
Returns:
|
|
845
|
-
Dictionary with comparison results and any errors found
|
|
846
|
-
"""
|
|
847
|
-
errors = []
|
|
848
|
-
|
|
849
|
-
# Check keys in dict1
|
|
850
|
-
for key in dict1:
|
|
851
|
-
if key not in dict2:
|
|
852
|
-
errors.append(f"Key '{key}' missing in second dictionary")
|
|
853
|
-
else:
|
|
854
|
-
# Compare values
|
|
855
|
-
val1, val2 = dict1[key], dict2[key]
|
|
856
|
-
|
|
857
|
-
if isinstance(val1, dict) and isinstance(val2, dict):
|
|
858
|
-
# Recursive comparison for nested dictionaries
|
|
859
|
-
nested_result = self._deep_compare_structures(val1, val2)
|
|
860
|
-
if not nested_result['success']:
|
|
861
|
-
errors.extend([f"{key}.{error}" for error in nested_result['errors']])
|
|
862
|
-
elif isinstance(val1, list) and isinstance(val2, list):
|
|
863
|
-
# Compare lists
|
|
864
|
-
if len(val1) != len(val2):
|
|
865
|
-
errors.append(f"List '{key}' length mismatch: {len(val1)} vs {len(val2)}")
|
|
866
|
-
else:
|
|
867
|
-
for i, (item1, item2) in enumerate(zip(val1, val2)):
|
|
868
|
-
if item1 != item2:
|
|
869
|
-
errors.append(f"List '{key}[{i}]' value mismatch: {item1} vs {item2}")
|
|
870
|
-
elif val1 != val2:
|
|
871
|
-
errors.append(f"Value '{key}' mismatch: {val1} vs {val2}")
|
|
872
|
-
|
|
873
|
-
# Check for extra keys in dict2
|
|
874
|
-
for key in dict2:
|
|
875
|
-
if key not in dict1:
|
|
876
|
-
errors.append(f"Extra key '{key}' in second dictionary")
|
|
877
|
-
|
|
878
|
-
return {
|
|
879
|
-
'success': len(errors) == 0,
|
|
880
|
-
'errors': errors
|
|
881
|
-
}
|
|
882
|
-
|
|
883
|
-
def _record_migration_success(self, filepath: str, backup_path: Optional[str], migration_type: str) -> None:
|
|
884
|
-
"""Record successful migration in history."""
|
|
885
|
-
entry = {
|
|
886
|
-
'timestamp': datetime.now().isoformat(),
|
|
887
|
-
'type': migration_type,
|
|
888
|
-
'filepath': filepath,
|
|
889
|
-
'backup_path': backup_path,
|
|
890
|
-
'status': 'success',
|
|
891
|
-
'error': None
|
|
892
|
-
}
|
|
893
|
-
|
|
894
|
-
self._migration_history.append(entry)
|
|
895
|
-
|
|
896
|
-
# Keep only recent history
|
|
897
|
-
if len(self._migration_history) > self._max_history:
|
|
898
|
-
self._migration_history = self._migration_history[-self._max_history:]
|
|
899
|
-
|
|
900
|
-
def _record_migration_failure(self, filepath: str, error: str, migration_type: str) -> None:
|
|
901
|
-
"""Record failed migration in history."""
|
|
902
|
-
entry = {
|
|
903
|
-
'timestamp': datetime.now().isoformat(),
|
|
904
|
-
'type': migration_type,
|
|
905
|
-
'filepath': filepath,
|
|
906
|
-
'backup_path': None,
|
|
907
|
-
'status': 'failure',
|
|
908
|
-
'error': error
|
|
909
|
-
}
|
|
910
|
-
|
|
911
|
-
self._migration_history.append(entry)
|
|
912
|
-
|
|
913
|
-
# Keep only recent history
|
|
914
|
-
if len(self._migration_history) > self._max_history:
|
|
1
|
+
"""
|
|
2
|
+
Migration Manager for Settings Database Migration
|
|
3
|
+
|
|
4
|
+
This module provides comprehensive migration capabilities between JSON settings files
|
|
5
|
+
and the SQLite database format. It handles bidirectional conversion with full
|
|
6
|
+
structure preservation, including complex nested structures, encrypted keys,
|
|
7
|
+
and history arrays.
|
|
8
|
+
|
|
9
|
+
Designed to handle all 15 tool configurations and complex data structures
|
|
10
|
+
identified in the production codebase analysis.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import sqlite3
|
|
15
|
+
import os
|
|
16
|
+
import shutil
|
|
17
|
+
import logging
|
|
18
|
+
from typing import Dict, List, Tuple, Any, Optional, Union
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
from .database_connection_manager import DatabaseConnectionManager
|
|
23
|
+
from .database_schema import DatabaseSchema, DataTypeConverter
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class MigrationManager:
|
|
27
|
+
"""
|
|
28
|
+
Handles migration between JSON settings file and database format.
|
|
29
|
+
|
|
30
|
+
Features:
|
|
31
|
+
- Bidirectional JSON ↔ Database conversion
|
|
32
|
+
- Full structure preservation for complex nested objects
|
|
33
|
+
- Special handling for encrypted API keys with "ENC:" prefix
|
|
34
|
+
- Support for all tool configurations and data types
|
|
35
|
+
- Migration validation and rollback capabilities
|
|
36
|
+
- Comprehensive error handling and recovery
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self, connection_manager: DatabaseConnectionManager):
|
|
40
|
+
"""
|
|
41
|
+
Initialize the migration manager.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
connection_manager: Database connection manager instance
|
|
45
|
+
"""
|
|
46
|
+
self.connection_manager = connection_manager
|
|
47
|
+
self.logger = logging.getLogger(__name__)
|
|
48
|
+
self.schema = DatabaseSchema()
|
|
49
|
+
self.converter = DataTypeConverter()
|
|
50
|
+
|
|
51
|
+
# Migration tracking
|
|
52
|
+
self._migration_history = []
|
|
53
|
+
self._max_history = 50
|
|
54
|
+
|
|
55
|
+
# Validation settings
|
|
56
|
+
self._validation_enabled = True
|
|
57
|
+
self._strict_validation = True
|
|
58
|
+
|
|
59
|
+
# Backup settings
|
|
60
|
+
self._auto_backup = True
|
|
61
|
+
self._backup_suffix = ".backup"
|
|
62
|
+
|
|
63
|
+
def migrate_from_json(self, json_filepath: str, validate: bool = True) -> bool:
|
|
64
|
+
"""
|
|
65
|
+
Convert settings.json to database format with full structure preservation.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
json_filepath: Path to source JSON settings file
|
|
69
|
+
validate: Whether to validate migration accuracy
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
True if migration successful, False otherwise
|
|
73
|
+
"""
|
|
74
|
+
try:
|
|
75
|
+
self.logger.info(f"Starting migration from JSON: {json_filepath}")
|
|
76
|
+
|
|
77
|
+
# Validate input file
|
|
78
|
+
if not os.path.exists(json_filepath):
|
|
79
|
+
self.logger.error(f"JSON file not found: {json_filepath}")
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
# Create backup if enabled
|
|
83
|
+
backup_path = None
|
|
84
|
+
if self._auto_backup:
|
|
85
|
+
backup_path = self.create_migration_backup(json_filepath)
|
|
86
|
+
if not backup_path:
|
|
87
|
+
self.logger.warning("Failed to create backup, continuing without backup")
|
|
88
|
+
|
|
89
|
+
# Load and parse JSON
|
|
90
|
+
json_data = self._load_json_file(json_filepath)
|
|
91
|
+
if json_data is None:
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
# Perform migration
|
|
95
|
+
success = self._migrate_json_to_database(json_data)
|
|
96
|
+
if not success:
|
|
97
|
+
self.logger.error("Migration to database failed")
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
# Validate migration if requested
|
|
101
|
+
if validate and self._validation_enabled:
|
|
102
|
+
validation_success = self._validate_json_migration(json_data)
|
|
103
|
+
if not validation_success:
|
|
104
|
+
self.logger.error("Migration validation failed")
|
|
105
|
+
if self._strict_validation:
|
|
106
|
+
return False
|
|
107
|
+
|
|
108
|
+
# Record successful migration
|
|
109
|
+
self._record_migration_success(json_filepath, backup_path, "json_to_db")
|
|
110
|
+
|
|
111
|
+
self.logger.info("JSON to database migration completed successfully")
|
|
112
|
+
return True
|
|
113
|
+
|
|
114
|
+
except Exception as e:
|
|
115
|
+
self.logger.error(f"Migration from JSON failed: {e}")
|
|
116
|
+
self._record_migration_failure(json_filepath, str(e), "json_to_db")
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
def migrate_to_json(self, json_filepath: str, validate: bool = True) -> bool:
|
|
120
|
+
"""
|
|
121
|
+
Convert database back to settings.json format.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
json_filepath: Target path for JSON settings file
|
|
125
|
+
validate: Whether to validate migration accuracy
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
True if migration successful, False otherwise
|
|
129
|
+
"""
|
|
130
|
+
try:
|
|
131
|
+
self.logger.info(f"Starting migration to JSON: {json_filepath}")
|
|
132
|
+
|
|
133
|
+
# Create backup of existing file if it exists
|
|
134
|
+
backup_path = None
|
|
135
|
+
if os.path.exists(json_filepath) and self._auto_backup:
|
|
136
|
+
backup_path = self.create_migration_backup(json_filepath)
|
|
137
|
+
|
|
138
|
+
# Extract data from database
|
|
139
|
+
json_data = self._migrate_database_to_json()
|
|
140
|
+
if json_data is None:
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
# Write JSON file
|
|
144
|
+
success = self._write_json_file(json_filepath, json_data)
|
|
145
|
+
if not success:
|
|
146
|
+
return False
|
|
147
|
+
|
|
148
|
+
# Validate migration if requested
|
|
149
|
+
if validate and self._validation_enabled:
|
|
150
|
+
validation_success = self._validate_db_migration(json_data)
|
|
151
|
+
if not validation_success:
|
|
152
|
+
self.logger.error("Migration validation failed")
|
|
153
|
+
if self._strict_validation:
|
|
154
|
+
return False
|
|
155
|
+
|
|
156
|
+
# Record successful migration
|
|
157
|
+
self._record_migration_success(json_filepath, backup_path, "db_to_json")
|
|
158
|
+
|
|
159
|
+
self.logger.info("Database to JSON migration completed successfully")
|
|
160
|
+
return True
|
|
161
|
+
|
|
162
|
+
except Exception as e:
|
|
163
|
+
self.logger.error(f"Migration to JSON failed: {e}")
|
|
164
|
+
self._record_migration_failure(json_filepath, str(e), "db_to_json")
|
|
165
|
+
return False
|
|
166
|
+
|
|
167
|
+
def validate_migration(self, original_json: Dict, migrated_json: Dict) -> bool:
|
|
168
|
+
"""
|
|
169
|
+
Verify migration accuracy by comparing original and migrated data.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
original_json: Original JSON data structure
|
|
173
|
+
migrated_json: Migrated JSON data structure
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
True if migration is accurate, False otherwise
|
|
177
|
+
"""
|
|
178
|
+
try:
|
|
179
|
+
self.logger.info("Starting migration validation")
|
|
180
|
+
|
|
181
|
+
# Deep comparison of data structures
|
|
182
|
+
validation_results = self._deep_compare_structures(original_json, migrated_json)
|
|
183
|
+
|
|
184
|
+
if validation_results['success']:
|
|
185
|
+
self.logger.info("Migration validation passed")
|
|
186
|
+
return True
|
|
187
|
+
else:
|
|
188
|
+
self.logger.error(f"Migration validation failed: {validation_results['errors']}")
|
|
189
|
+
return False
|
|
190
|
+
|
|
191
|
+
except Exception as e:
|
|
192
|
+
self.logger.error(f"Migration validation error: {e}")
|
|
193
|
+
return False
|
|
194
|
+
|
|
195
|
+
def create_migration_backup(self, json_filepath: str) -> Optional[str]:
|
|
196
|
+
"""
|
|
197
|
+
Create backup of original JSON file before migration.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
json_filepath: Path to JSON file to backup
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
Path to backup file if successful, None otherwise
|
|
204
|
+
"""
|
|
205
|
+
try:
|
|
206
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
207
|
+
backup_path = f"{json_filepath}{self._backup_suffix}_{timestamp}"
|
|
208
|
+
|
|
209
|
+
shutil.copy2(json_filepath, backup_path)
|
|
210
|
+
|
|
211
|
+
self.logger.info(f"Created migration backup: {backup_path}")
|
|
212
|
+
return backup_path
|
|
213
|
+
|
|
214
|
+
except Exception as e:
|
|
215
|
+
self.logger.error(f"Failed to create backup: {e}")
|
|
216
|
+
return None
|
|
217
|
+
|
|
218
|
+
def rollback_migration(self, backup_filepath: str) -> bool:
|
|
219
|
+
"""
|
|
220
|
+
Rollback to original JSON file if migration fails.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
backup_filepath: Path to backup file to restore
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
True if rollback successful, False otherwise
|
|
227
|
+
"""
|
|
228
|
+
try:
|
|
229
|
+
if not os.path.exists(backup_filepath):
|
|
230
|
+
self.logger.error(f"Backup file not found: {backup_filepath}")
|
|
231
|
+
return False
|
|
232
|
+
|
|
233
|
+
# Determine original file path by removing backup suffix
|
|
234
|
+
original_path = backup_filepath
|
|
235
|
+
for suffix in [self._backup_suffix]:
|
|
236
|
+
if suffix in original_path:
|
|
237
|
+
original_path = original_path.split(suffix)[0]
|
|
238
|
+
break
|
|
239
|
+
|
|
240
|
+
# Restore original file
|
|
241
|
+
shutil.copy2(backup_filepath, original_path)
|
|
242
|
+
|
|
243
|
+
self.logger.info(f"Rollback completed: restored {original_path}")
|
|
244
|
+
return True
|
|
245
|
+
|
|
246
|
+
except Exception as e:
|
|
247
|
+
self.logger.error(f"Rollback failed: {e}")
|
|
248
|
+
return False
|
|
249
|
+
|
|
250
|
+
def get_migration_history(self) -> List[Dict[str, Any]]:
|
|
251
|
+
"""
|
|
252
|
+
Get history of migration operations.
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
List of migration history entries
|
|
256
|
+
"""
|
|
257
|
+
return self._migration_history.copy()
|
|
258
|
+
|
|
259
|
+
def clear_migration_history(self) -> None:
|
|
260
|
+
"""Clear migration history."""
|
|
261
|
+
self._migration_history.clear()
|
|
262
|
+
self.logger.info("Migration history cleared")
|
|
263
|
+
|
|
264
|
+
# Private implementation methods
|
|
265
|
+
|
|
266
|
+
def _load_json_file(self, filepath: str) -> Optional[Dict[str, Any]]:
|
|
267
|
+
"""
|
|
268
|
+
Load and parse JSON settings file with error handling.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
filepath: Path to JSON file
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
Parsed JSON data or None if failed
|
|
275
|
+
"""
|
|
276
|
+
try:
|
|
277
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
278
|
+
data = json.load(f)
|
|
279
|
+
|
|
280
|
+
self.logger.debug(f"Loaded JSON file: {filepath}")
|
|
281
|
+
return data
|
|
282
|
+
|
|
283
|
+
except json.JSONDecodeError as e:
|
|
284
|
+
self.logger.error(f"Invalid JSON in file {filepath}: {e}")
|
|
285
|
+
return None
|
|
286
|
+
except Exception as e:
|
|
287
|
+
self.logger.error(f"Failed to load JSON file {filepath}: {e}")
|
|
288
|
+
return None
|
|
289
|
+
|
|
290
|
+
def _write_json_file(self, filepath: str, data: Dict[str, Any]) -> bool:
|
|
291
|
+
"""
|
|
292
|
+
Write JSON data to file with proper formatting.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
filepath: Target file path
|
|
296
|
+
data: JSON data to write
|
|
297
|
+
|
|
298
|
+
Returns:
|
|
299
|
+
True if successful, False otherwise
|
|
300
|
+
"""
|
|
301
|
+
try:
|
|
302
|
+
# Ensure directory exists
|
|
303
|
+
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
|
304
|
+
|
|
305
|
+
with open(filepath, 'w', encoding='utf-8') as f:
|
|
306
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
307
|
+
|
|
308
|
+
self.logger.debug(f"Wrote JSON file: {filepath}")
|
|
309
|
+
return True
|
|
310
|
+
|
|
311
|
+
except Exception as e:
|
|
312
|
+
self.logger.error(f"Failed to write JSON file {filepath}: {e}")
|
|
313
|
+
return False
|
|
314
|
+
|
|
315
|
+
def _migrate_json_to_database(self, json_data: Dict[str, Any]) -> bool:
|
|
316
|
+
"""
|
|
317
|
+
Migrate JSON data structure to database tables.
|
|
318
|
+
|
|
319
|
+
Args:
|
|
320
|
+
json_data: Parsed JSON settings data
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
True if migration successful, False otherwise
|
|
324
|
+
"""
|
|
325
|
+
try:
|
|
326
|
+
with self.connection_manager.transaction() as conn:
|
|
327
|
+
# Clear existing data
|
|
328
|
+
self._clear_all_tables(conn)
|
|
329
|
+
|
|
330
|
+
# Migrate core settings
|
|
331
|
+
self._migrate_core_settings(conn, json_data)
|
|
332
|
+
|
|
333
|
+
# Migrate tool settings
|
|
334
|
+
if 'tool_settings' in json_data:
|
|
335
|
+
self._migrate_tool_settings(conn, json_data['tool_settings'])
|
|
336
|
+
|
|
337
|
+
# Migrate tab content
|
|
338
|
+
self._migrate_tab_content(conn, json_data)
|
|
339
|
+
|
|
340
|
+
# Migrate performance settings
|
|
341
|
+
if 'performance_settings' in json_data:
|
|
342
|
+
self._migrate_performance_settings(conn, json_data['performance_settings'])
|
|
343
|
+
|
|
344
|
+
# Migrate font settings
|
|
345
|
+
if 'font_settings' in json_data:
|
|
346
|
+
self._migrate_font_settings(conn, json_data['font_settings'])
|
|
347
|
+
|
|
348
|
+
# Migrate dialog settings
|
|
349
|
+
if 'dialog_settings' in json_data:
|
|
350
|
+
self._migrate_dialog_settings(conn, json_data['dialog_settings'])
|
|
351
|
+
|
|
352
|
+
# Update metadata
|
|
353
|
+
self._update_migration_metadata(conn)
|
|
354
|
+
|
|
355
|
+
self.logger.info("JSON to database migration completed")
|
|
356
|
+
return True
|
|
357
|
+
|
|
358
|
+
except Exception as e:
|
|
359
|
+
self.logger.error(f"JSON to database migration failed: {e}")
|
|
360
|
+
return False
|
|
361
|
+
|
|
362
|
+
def _migrate_database_to_json(self) -> Optional[Dict[str, Any]]:
|
|
363
|
+
"""
|
|
364
|
+
Extract data from database and reconstruct JSON structure.
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
Reconstructed JSON data or None if failed
|
|
368
|
+
"""
|
|
369
|
+
try:
|
|
370
|
+
conn = self.connection_manager.get_connection()
|
|
371
|
+
|
|
372
|
+
json_data = {}
|
|
373
|
+
|
|
374
|
+
# Extract core settings
|
|
375
|
+
core_settings = self._extract_core_settings(conn)
|
|
376
|
+
json_data.update(core_settings)
|
|
377
|
+
|
|
378
|
+
# Extract tool settings
|
|
379
|
+
tool_settings = self._extract_tool_settings(conn)
|
|
380
|
+
json_data['tool_settings'] = tool_settings # Always include, even if empty
|
|
381
|
+
|
|
382
|
+
# Extract tab content
|
|
383
|
+
tab_content = self._extract_tab_content(conn)
|
|
384
|
+
json_data.update(tab_content)
|
|
385
|
+
|
|
386
|
+
# Extract performance settings
|
|
387
|
+
performance_settings = self._extract_performance_settings(conn)
|
|
388
|
+
json_data['performance_settings'] = performance_settings # Always include, even if empty
|
|
389
|
+
|
|
390
|
+
# Extract font settings
|
|
391
|
+
font_settings = self._extract_font_settings(conn)
|
|
392
|
+
json_data['font_settings'] = font_settings # Always include, even if empty
|
|
393
|
+
|
|
394
|
+
# Extract dialog settings
|
|
395
|
+
dialog_settings = self._extract_dialog_settings(conn)
|
|
396
|
+
json_data['dialog_settings'] = dialog_settings # Always include, even if empty
|
|
397
|
+
|
|
398
|
+
self.logger.info("Database to JSON extraction completed")
|
|
399
|
+
return json_data
|
|
400
|
+
|
|
401
|
+
except Exception as e:
|
|
402
|
+
self.logger.error(f"Database to JSON extraction failed: {e}")
|
|
403
|
+
return None
|
|
404
|
+
|
|
405
|
+
def _clear_all_tables(self, conn: sqlite3.Connection) -> None:
|
|
406
|
+
"""Clear all data from settings tables."""
|
|
407
|
+
tables = [
|
|
408
|
+
'core_settings', 'tool_settings', 'tab_content',
|
|
409
|
+
'performance_settings', 'font_settings', 'dialog_settings'
|
|
410
|
+
]
|
|
411
|
+
|
|
412
|
+
for table in tables:
|
|
413
|
+
conn.execute(f"DELETE FROM {table}")
|
|
414
|
+
|
|
415
|
+
def _migrate_core_settings(self, conn: sqlite3.Connection, json_data: Dict[str, Any]) -> None:
|
|
416
|
+
"""
|
|
417
|
+
Migrate core application settings to database.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
conn: Database connection
|
|
421
|
+
json_data: Full JSON data structure
|
|
422
|
+
"""
|
|
423
|
+
# Core settings are top-level keys excluding special categories
|
|
424
|
+
excluded_keys = {
|
|
425
|
+
'tool_settings', 'input_tabs', 'output_tabs',
|
|
426
|
+
'performance_settings', 'font_settings', 'dialog_settings'
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
for key, value in json_data.items():
|
|
430
|
+
if key not in excluded_keys:
|
|
431
|
+
data_type = self.converter.python_to_db_type(value)
|
|
432
|
+
serialized_value = self.converter.serialize_value(value)
|
|
433
|
+
|
|
434
|
+
conn.execute(
|
|
435
|
+
"INSERT INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
436
|
+
(key, serialized_value, data_type)
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
def _migrate_tool_settings(self, conn: sqlite3.Connection, tool_settings: Dict[str, Any]) -> None:
|
|
440
|
+
"""
|
|
441
|
+
Migrate tool-specific settings to database with nested path support.
|
|
442
|
+
|
|
443
|
+
Args:
|
|
444
|
+
conn: Database connection
|
|
445
|
+
tool_settings: Tool settings dictionary
|
|
446
|
+
"""
|
|
447
|
+
for tool_name, tool_config in tool_settings.items():
|
|
448
|
+
if isinstance(tool_config, dict):
|
|
449
|
+
# Flatten nested tool configuration
|
|
450
|
+
flattened = self._flatten_nested_dict(tool_config)
|
|
451
|
+
|
|
452
|
+
for setting_path, value in flattened.items():
|
|
453
|
+
data_type = self.converter.python_to_db_type(value)
|
|
454
|
+
serialized_value = self.converter.serialize_value(value)
|
|
455
|
+
|
|
456
|
+
conn.execute(
|
|
457
|
+
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
458
|
+
(tool_name, setting_path, serialized_value, data_type)
|
|
459
|
+
)
|
|
460
|
+
else:
|
|
461
|
+
# Simple tool setting
|
|
462
|
+
data_type = self.converter.python_to_db_type(tool_config)
|
|
463
|
+
serialized_value = self.converter.serialize_value(tool_config)
|
|
464
|
+
|
|
465
|
+
conn.execute(
|
|
466
|
+
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
467
|
+
(tool_name, 'value', serialized_value, data_type)
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
def _migrate_tab_content(self, conn: sqlite3.Connection, json_data: Dict[str, Any]) -> None:
|
|
471
|
+
"""
|
|
472
|
+
Migrate input_tabs and output_tabs arrays to database.
|
|
473
|
+
|
|
474
|
+
Args:
|
|
475
|
+
conn: Database connection
|
|
476
|
+
json_data: Full JSON data structure
|
|
477
|
+
"""
|
|
478
|
+
# Migrate input tabs
|
|
479
|
+
if 'input_tabs' in json_data:
|
|
480
|
+
input_tabs = json_data['input_tabs']
|
|
481
|
+
for i, content in enumerate(input_tabs):
|
|
482
|
+
conn.execute(
|
|
483
|
+
"INSERT INTO tab_content (tab_type, tab_index, content) VALUES (?, ?, ?)",
|
|
484
|
+
('input', i, content or '')
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
# Migrate output tabs
|
|
488
|
+
if 'output_tabs' in json_data:
|
|
489
|
+
output_tabs = json_data['output_tabs']
|
|
490
|
+
for i, content in enumerate(output_tabs):
|
|
491
|
+
conn.execute(
|
|
492
|
+
"INSERT INTO tab_content (tab_type, tab_index, content) VALUES (?, ?, ?)",
|
|
493
|
+
('output', i, content or '')
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
def _migrate_performance_settings(self, conn: sqlite3.Connection, performance_settings: Dict[str, Any]) -> None:
|
|
497
|
+
"""
|
|
498
|
+
Migrate performance settings with nested structure support.
|
|
499
|
+
|
|
500
|
+
Args:
|
|
501
|
+
conn: Database connection
|
|
502
|
+
performance_settings: Performance settings dictionary
|
|
503
|
+
"""
|
|
504
|
+
for category, settings in performance_settings.items():
|
|
505
|
+
if isinstance(settings, dict):
|
|
506
|
+
# Nested performance category
|
|
507
|
+
flattened = self._flatten_nested_dict(settings)
|
|
508
|
+
|
|
509
|
+
for setting_key, value in flattened.items():
|
|
510
|
+
data_type = self.converter.python_to_db_type(value)
|
|
511
|
+
serialized_value = self.converter.serialize_value(value)
|
|
512
|
+
|
|
513
|
+
conn.execute(
|
|
514
|
+
"INSERT INTO performance_settings (category, setting_key, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
515
|
+
(category, setting_key, serialized_value, data_type)
|
|
516
|
+
)
|
|
517
|
+
else:
|
|
518
|
+
# Simple performance setting
|
|
519
|
+
data_type = self.converter.python_to_db_type(settings)
|
|
520
|
+
serialized_value = self.converter.serialize_value(settings)
|
|
521
|
+
|
|
522
|
+
conn.execute(
|
|
523
|
+
"INSERT INTO performance_settings (category, setting_key, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
524
|
+
(category, 'value', serialized_value, data_type)
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
def _migrate_font_settings(self, conn: sqlite3.Connection, font_settings: Dict[str, Any]) -> None:
|
|
528
|
+
"""
|
|
529
|
+
Migrate font settings with platform-specific fallbacks.
|
|
530
|
+
|
|
531
|
+
Args:
|
|
532
|
+
conn: Database connection
|
|
533
|
+
font_settings: Font settings dictionary
|
|
534
|
+
"""
|
|
535
|
+
for font_type, font_config in font_settings.items():
|
|
536
|
+
if isinstance(font_config, dict):
|
|
537
|
+
for property_name, value in font_config.items():
|
|
538
|
+
data_type = self.converter.python_to_db_type(value)
|
|
539
|
+
serialized_value = self.converter.serialize_value(value)
|
|
540
|
+
|
|
541
|
+
conn.execute(
|
|
542
|
+
"INSERT INTO font_settings (font_type, property, value, data_type) VALUES (?, ?, ?, ?)",
|
|
543
|
+
(font_type, property_name, serialized_value, data_type)
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
def _migrate_dialog_settings(self, conn: sqlite3.Connection, dialog_settings: Dict[str, Any]) -> None:
|
|
547
|
+
"""
|
|
548
|
+
Migrate dialog settings with category-based organization.
|
|
549
|
+
|
|
550
|
+
Args:
|
|
551
|
+
conn: Database connection
|
|
552
|
+
dialog_settings: Dialog settings dictionary
|
|
553
|
+
"""
|
|
554
|
+
for category, dialog_config in dialog_settings.items():
|
|
555
|
+
if isinstance(dialog_config, dict):
|
|
556
|
+
for property_name, value in dialog_config.items():
|
|
557
|
+
data_type = self.converter.python_to_db_type(value)
|
|
558
|
+
serialized_value = self.converter.serialize_value(value)
|
|
559
|
+
|
|
560
|
+
self.logger.debug(f"Inserting dialog setting: {category}.{property_name} = {value} (type: {data_type})")
|
|
561
|
+
|
|
562
|
+
conn.execute(
|
|
563
|
+
"INSERT INTO dialog_settings (category, property, value, data_type) VALUES (?, ?, ?, ?)",
|
|
564
|
+
(category, property_name, serialized_value, data_type)
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
def _update_migration_metadata(self, conn: sqlite3.Connection) -> None:
|
|
568
|
+
"""Update migration metadata in database."""
|
|
569
|
+
timestamp = datetime.now().isoformat()
|
|
570
|
+
|
|
571
|
+
# Update or insert migration metadata
|
|
572
|
+
metadata_updates = [
|
|
573
|
+
('last_migration_date', timestamp),
|
|
574
|
+
('migration_type', 'json_to_db'),
|
|
575
|
+
('migration_status', 'completed')
|
|
576
|
+
]
|
|
577
|
+
|
|
578
|
+
for key, value in metadata_updates:
|
|
579
|
+
conn.execute(
|
|
580
|
+
"INSERT OR REPLACE INTO settings_metadata (key, value) VALUES (?, ?)",
|
|
581
|
+
(key, value)
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
def _extract_core_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
585
|
+
"""
|
|
586
|
+
Extract core settings from database and convert to appropriate types.
|
|
587
|
+
|
|
588
|
+
Args:
|
|
589
|
+
conn: Database connection
|
|
590
|
+
|
|
591
|
+
Returns:
|
|
592
|
+
Dictionary of core settings
|
|
593
|
+
"""
|
|
594
|
+
core_settings = {}
|
|
595
|
+
|
|
596
|
+
cursor = conn.execute("SELECT key, value, data_type FROM core_settings")
|
|
597
|
+
for key, value, data_type in cursor.fetchall():
|
|
598
|
+
core_settings[key] = self.converter.deserialize_value(value, data_type)
|
|
599
|
+
|
|
600
|
+
return core_settings
|
|
601
|
+
|
|
602
|
+
def _extract_tool_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
603
|
+
"""
|
|
604
|
+
Extract tool settings from database and reconstruct nested structure.
|
|
605
|
+
|
|
606
|
+
Args:
|
|
607
|
+
conn: Database connection
|
|
608
|
+
|
|
609
|
+
Returns:
|
|
610
|
+
Dictionary of tool settings with nested structure
|
|
611
|
+
"""
|
|
612
|
+
tool_settings = {}
|
|
613
|
+
|
|
614
|
+
cursor = conn.execute(
|
|
615
|
+
"SELECT tool_name, setting_path, setting_value, data_type FROM tool_settings ORDER BY tool_name, setting_path"
|
|
616
|
+
)
|
|
617
|
+
|
|
618
|
+
for tool_name, setting_path, setting_value, data_type in cursor.fetchall():
|
|
619
|
+
if tool_name not in tool_settings:
|
|
620
|
+
tool_settings[tool_name] = {}
|
|
621
|
+
|
|
622
|
+
# Deserialize value
|
|
623
|
+
value = self.converter.deserialize_value(setting_value, data_type)
|
|
624
|
+
|
|
625
|
+
# Handle nested paths
|
|
626
|
+
if '.' in setting_path:
|
|
627
|
+
self._set_nested_value(tool_settings[tool_name], setting_path, value)
|
|
628
|
+
else:
|
|
629
|
+
tool_settings[tool_name][setting_path] = value
|
|
630
|
+
|
|
631
|
+
# Post-process: unwrap simple tool settings that only have a 'value' key
|
|
632
|
+
for tool_name, tool_config in list(tool_settings.items()):
|
|
633
|
+
if isinstance(tool_config, dict) and len(tool_config) == 1 and 'value' in tool_config:
|
|
634
|
+
tool_settings[tool_name] = tool_config['value']
|
|
635
|
+
|
|
636
|
+
return tool_settings
|
|
637
|
+
|
|
638
|
+
def _extract_tab_content(self, conn: sqlite3.Connection) -> Dict[str, List[str]]:
|
|
639
|
+
"""
|
|
640
|
+
Extract tab content from database and reconstruct arrays.
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
conn: Database connection
|
|
644
|
+
|
|
645
|
+
Returns:
|
|
646
|
+
Dictionary with input_tabs and output_tabs arrays
|
|
647
|
+
"""
|
|
648
|
+
tab_content = {'input_tabs': [''] * 7, 'output_tabs': [''] * 7}
|
|
649
|
+
|
|
650
|
+
cursor = conn.execute("SELECT tab_type, tab_index, content FROM tab_content ORDER BY tab_type, tab_index")
|
|
651
|
+
|
|
652
|
+
for tab_type, tab_index, content in cursor.fetchall():
|
|
653
|
+
if tab_type == 'input' and 0 <= tab_index < 7:
|
|
654
|
+
tab_content['input_tabs'][tab_index] = content or ''
|
|
655
|
+
elif tab_type == 'output' and 0 <= tab_index < 7:
|
|
656
|
+
tab_content['output_tabs'][tab_index] = content or ''
|
|
657
|
+
|
|
658
|
+
return tab_content
|
|
659
|
+
|
|
660
|
+
def _extract_performance_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
661
|
+
"""
|
|
662
|
+
Extract performance settings from database and reconstruct nested structure.
|
|
663
|
+
|
|
664
|
+
Args:
|
|
665
|
+
conn: Database connection
|
|
666
|
+
|
|
667
|
+
Returns:
|
|
668
|
+
Dictionary of performance settings with nested structure
|
|
669
|
+
"""
|
|
670
|
+
performance_settings = {}
|
|
671
|
+
|
|
672
|
+
cursor = conn.execute(
|
|
673
|
+
"SELECT category, setting_key, setting_value, data_type FROM performance_settings ORDER BY category, setting_key"
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
for category, setting_key, setting_value, data_type in cursor.fetchall():
|
|
677
|
+
if category not in performance_settings:
|
|
678
|
+
performance_settings[category] = {}
|
|
679
|
+
|
|
680
|
+
# Deserialize value
|
|
681
|
+
value = self.converter.deserialize_value(setting_value, data_type)
|
|
682
|
+
|
|
683
|
+
# Handle nested paths
|
|
684
|
+
if '.' in setting_key:
|
|
685
|
+
self._set_nested_value(performance_settings[category], setting_key, value)
|
|
686
|
+
else:
|
|
687
|
+
performance_settings[category][setting_key] = value
|
|
688
|
+
|
|
689
|
+
# Post-process: unwrap simple categories that only have a 'value' key
|
|
690
|
+
for category, category_config in list(performance_settings.items()):
|
|
691
|
+
if isinstance(category_config, dict) and len(category_config) == 1 and 'value' in category_config:
|
|
692
|
+
performance_settings[category] = category_config['value']
|
|
693
|
+
|
|
694
|
+
return performance_settings
|
|
695
|
+
|
|
696
|
+
def _extract_font_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
697
|
+
"""
|
|
698
|
+
Extract font settings from database.
|
|
699
|
+
|
|
700
|
+
Args:
|
|
701
|
+
conn: Database connection
|
|
702
|
+
|
|
703
|
+
Returns:
|
|
704
|
+
Dictionary of font settings
|
|
705
|
+
"""
|
|
706
|
+
font_settings = {}
|
|
707
|
+
|
|
708
|
+
cursor = conn.execute("SELECT font_type, property, value, data_type FROM font_settings ORDER BY font_type, property")
|
|
709
|
+
|
|
710
|
+
for font_type, property_name, value, data_type in cursor.fetchall():
|
|
711
|
+
if font_type not in font_settings:
|
|
712
|
+
font_settings[font_type] = {}
|
|
713
|
+
|
|
714
|
+
font_settings[font_type][property_name] = self.converter.deserialize_value(value, data_type)
|
|
715
|
+
|
|
716
|
+
return font_settings
|
|
717
|
+
|
|
718
|
+
def _extract_dialog_settings(self, conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
719
|
+
"""
|
|
720
|
+
Extract dialog settings from database.
|
|
721
|
+
|
|
722
|
+
Args:
|
|
723
|
+
conn: Database connection
|
|
724
|
+
|
|
725
|
+
Returns:
|
|
726
|
+
Dictionary of dialog settings
|
|
727
|
+
"""
|
|
728
|
+
dialog_settings = {}
|
|
729
|
+
|
|
730
|
+
cursor = conn.execute("SELECT category, property, value, data_type FROM dialog_settings ORDER BY category, property")
|
|
731
|
+
|
|
732
|
+
for category, property_name, value, data_type in cursor.fetchall():
|
|
733
|
+
if category not in dialog_settings:
|
|
734
|
+
dialog_settings[category] = {}
|
|
735
|
+
|
|
736
|
+
dialog_settings[category][property_name] = self.converter.deserialize_value(value, data_type)
|
|
737
|
+
|
|
738
|
+
return dialog_settings
|
|
739
|
+
|
|
740
|
+
def _flatten_nested_dict(self, nested_dict: Dict[str, Any], parent_key: str = '', separator: str = '.') -> Dict[str, Any]:
|
|
741
|
+
"""
|
|
742
|
+
Flatten nested dictionary using dot notation for keys.
|
|
743
|
+
|
|
744
|
+
Args:
|
|
745
|
+
nested_dict: Dictionary to flatten
|
|
746
|
+
parent_key: Parent key prefix
|
|
747
|
+
separator: Key separator character
|
|
748
|
+
|
|
749
|
+
Returns:
|
|
750
|
+
Flattened dictionary with dot-notation keys
|
|
751
|
+
"""
|
|
752
|
+
items = []
|
|
753
|
+
|
|
754
|
+
for key, value in nested_dict.items():
|
|
755
|
+
new_key = f"{parent_key}{separator}{key}" if parent_key else key
|
|
756
|
+
|
|
757
|
+
if isinstance(value, dict) and len(value) > 0:
|
|
758
|
+
# Only recursively flatten non-empty dictionaries
|
|
759
|
+
items.extend(self._flatten_nested_dict(value, new_key, separator).items())
|
|
760
|
+
else:
|
|
761
|
+
# Treat empty dictionaries and all other values as leaf nodes
|
|
762
|
+
items.append((new_key, value))
|
|
763
|
+
|
|
764
|
+
return dict(items)
|
|
765
|
+
|
|
766
|
+
def _set_nested_value(self, target_dict: Dict[str, Any], key_path: str, value: Any, separator: str = '.') -> None:
|
|
767
|
+
"""
|
|
768
|
+
Set value in nested dictionary using dot notation key path.
|
|
769
|
+
|
|
770
|
+
Args:
|
|
771
|
+
target_dict: Dictionary to modify
|
|
772
|
+
key_path: Dot-notation key path
|
|
773
|
+
value: Value to set
|
|
774
|
+
separator: Key separator character
|
|
775
|
+
"""
|
|
776
|
+
keys = key_path.split(separator)
|
|
777
|
+
current = target_dict
|
|
778
|
+
|
|
779
|
+
# Navigate to parent of target key
|
|
780
|
+
for key in keys[:-1]:
|
|
781
|
+
if key not in current:
|
|
782
|
+
current[key] = {}
|
|
783
|
+
current = current[key]
|
|
784
|
+
|
|
785
|
+
# Set final value
|
|
786
|
+
current[keys[-1]] = value
|
|
787
|
+
|
|
788
|
+
def _validate_json_migration(self, original_json: Dict[str, Any]) -> bool:
|
|
789
|
+
"""
|
|
790
|
+
Validate JSON to database migration by comparing original with reconstructed data.
|
|
791
|
+
|
|
792
|
+
Args:
|
|
793
|
+
original_json: Original JSON data
|
|
794
|
+
|
|
795
|
+
Returns:
|
|
796
|
+
True if validation passes, False otherwise
|
|
797
|
+
"""
|
|
798
|
+
try:
|
|
799
|
+
# Reconstruct JSON from database
|
|
800
|
+
reconstructed_json = self._migrate_database_to_json()
|
|
801
|
+
|
|
802
|
+
if reconstructed_json is None:
|
|
803
|
+
return False
|
|
804
|
+
|
|
805
|
+
# Compare structures
|
|
806
|
+
comparison_result = self._deep_compare_structures(original_json, reconstructed_json)
|
|
807
|
+
|
|
808
|
+
if not comparison_result['success']:
|
|
809
|
+
self.logger.error(f"Validation errors: {comparison_result['errors'][:5]}") # Show first 5 errors
|
|
810
|
+
|
|
811
|
+
return comparison_result['success']
|
|
812
|
+
|
|
813
|
+
except Exception as e:
|
|
814
|
+
self.logger.error(f"JSON migration validation failed: {e}")
|
|
815
|
+
return False
|
|
816
|
+
|
|
817
|
+
def _validate_db_migration(self, expected_json: Dict[str, Any]) -> bool:
|
|
818
|
+
"""
|
|
819
|
+
Validate database to JSON migration by comparing with expected data.
|
|
820
|
+
|
|
821
|
+
Args:
|
|
822
|
+
expected_json: Expected JSON structure
|
|
823
|
+
|
|
824
|
+
Returns:
|
|
825
|
+
True if validation passes, False otherwise
|
|
826
|
+
"""
|
|
827
|
+
try:
|
|
828
|
+
# This would be called after database to JSON migration
|
|
829
|
+
# The expected_json is what we expect to get from the database
|
|
830
|
+
return True # Simplified for now
|
|
831
|
+
|
|
832
|
+
except Exception as e:
|
|
833
|
+
self.logger.error(f"Database migration validation failed: {e}")
|
|
834
|
+
return False
|
|
835
|
+
|
|
836
|
+
def _deep_compare_structures(self, dict1: Dict[str, Any], dict2: Dict[str, Any]) -> Dict[str, Any]:
|
|
837
|
+
"""
|
|
838
|
+
Perform deep comparison of two dictionary structures.
|
|
839
|
+
|
|
840
|
+
Args:
|
|
841
|
+
dict1: First dictionary
|
|
842
|
+
dict2: Second dictionary
|
|
843
|
+
|
|
844
|
+
Returns:
|
|
845
|
+
Dictionary with comparison results and any errors found
|
|
846
|
+
"""
|
|
847
|
+
errors = []
|
|
848
|
+
|
|
849
|
+
# Check keys in dict1
|
|
850
|
+
for key in dict1:
|
|
851
|
+
if key not in dict2:
|
|
852
|
+
errors.append(f"Key '{key}' missing in second dictionary")
|
|
853
|
+
else:
|
|
854
|
+
# Compare values
|
|
855
|
+
val1, val2 = dict1[key], dict2[key]
|
|
856
|
+
|
|
857
|
+
if isinstance(val1, dict) and isinstance(val2, dict):
|
|
858
|
+
# Recursive comparison for nested dictionaries
|
|
859
|
+
nested_result = self._deep_compare_structures(val1, val2)
|
|
860
|
+
if not nested_result['success']:
|
|
861
|
+
errors.extend([f"{key}.{error}" for error in nested_result['errors']])
|
|
862
|
+
elif isinstance(val1, list) and isinstance(val2, list):
|
|
863
|
+
# Compare lists
|
|
864
|
+
if len(val1) != len(val2):
|
|
865
|
+
errors.append(f"List '{key}' length mismatch: {len(val1)} vs {len(val2)}")
|
|
866
|
+
else:
|
|
867
|
+
for i, (item1, item2) in enumerate(zip(val1, val2)):
|
|
868
|
+
if item1 != item2:
|
|
869
|
+
errors.append(f"List '{key}[{i}]' value mismatch: {item1} vs {item2}")
|
|
870
|
+
elif val1 != val2:
|
|
871
|
+
errors.append(f"Value '{key}' mismatch: {val1} vs {val2}")
|
|
872
|
+
|
|
873
|
+
# Check for extra keys in dict2
|
|
874
|
+
for key in dict2:
|
|
875
|
+
if key not in dict1:
|
|
876
|
+
errors.append(f"Extra key '{key}' in second dictionary")
|
|
877
|
+
|
|
878
|
+
return {
|
|
879
|
+
'success': len(errors) == 0,
|
|
880
|
+
'errors': errors
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
def _record_migration_success(self, filepath: str, backup_path: Optional[str], migration_type: str) -> None:
|
|
884
|
+
"""Record successful migration in history."""
|
|
885
|
+
entry = {
|
|
886
|
+
'timestamp': datetime.now().isoformat(),
|
|
887
|
+
'type': migration_type,
|
|
888
|
+
'filepath': filepath,
|
|
889
|
+
'backup_path': backup_path,
|
|
890
|
+
'status': 'success',
|
|
891
|
+
'error': None
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
self._migration_history.append(entry)
|
|
895
|
+
|
|
896
|
+
# Keep only recent history
|
|
897
|
+
if len(self._migration_history) > self._max_history:
|
|
898
|
+
self._migration_history = self._migration_history[-self._max_history:]
|
|
899
|
+
|
|
900
|
+
def _record_migration_failure(self, filepath: str, error: str, migration_type: str) -> None:
|
|
901
|
+
"""Record failed migration in history."""
|
|
902
|
+
entry = {
|
|
903
|
+
'timestamp': datetime.now().isoformat(),
|
|
904
|
+
'type': migration_type,
|
|
905
|
+
'filepath': filepath,
|
|
906
|
+
'backup_path': None,
|
|
907
|
+
'status': 'failure',
|
|
908
|
+
'error': error
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
self._migration_history.append(entry)
|
|
912
|
+
|
|
913
|
+
# Keep only recent history
|
|
914
|
+
if len(self._migration_history) > self._max_history:
|
|
915
915
|
self._migration_history = self._migration_history[-self._max_history:]
|