pomera-ai-commander 0.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +105 -680
- package/bin/pomera-ai-commander.js +62 -62
- package/core/__init__.py +65 -65
- package/core/app_context.py +482 -482
- package/core/async_text_processor.py +421 -421
- package/core/backup_manager.py +655 -655
- package/core/backup_recovery_manager.py +1033 -1033
- package/core/content_hash_cache.py +508 -508
- package/core/context_menu.py +313 -313
- package/core/data_validator.py +1066 -1066
- package/core/database_connection_manager.py +744 -744
- package/core/database_curl_settings_manager.py +608 -608
- package/core/database_promera_ai_settings_manager.py +446 -446
- package/core/database_schema.py +411 -411
- package/core/database_schema_manager.py +395 -395
- package/core/database_settings_manager.py +1507 -1507
- package/core/database_settings_manager_interface.py +456 -456
- package/core/dialog_manager.py +734 -734
- package/core/efficient_line_numbers.py +510 -510
- package/core/error_handler.py +746 -746
- package/core/error_service.py +431 -431
- package/core/event_consolidator.py +511 -511
- package/core/mcp/__init__.py +43 -43
- package/core/mcp/protocol.py +288 -288
- package/core/mcp/schema.py +251 -251
- package/core/mcp/server_stdio.py +299 -299
- package/core/mcp/tool_registry.py +2372 -2345
- package/core/memory_efficient_text_widget.py +711 -711
- package/core/migration_manager.py +914 -914
- package/core/migration_test_suite.py +1085 -1085
- package/core/migration_validator.py +1143 -1143
- package/core/optimized_find_replace.py +714 -714
- package/core/optimized_pattern_engine.py +424 -424
- package/core/optimized_search_highlighter.py +552 -552
- package/core/performance_monitor.py +674 -674
- package/core/persistence_manager.py +712 -712
- package/core/progressive_stats_calculator.py +632 -632
- package/core/regex_pattern_cache.py +529 -529
- package/core/regex_pattern_library.py +350 -350
- package/core/search_operation_manager.py +434 -434
- package/core/settings_defaults_registry.py +1087 -1087
- package/core/settings_integrity_validator.py +1111 -1111
- package/core/settings_serializer.py +557 -557
- package/core/settings_validator.py +1823 -1823
- package/core/smart_stats_calculator.py +709 -709
- package/core/statistics_update_manager.py +619 -619
- package/core/stats_config_manager.py +858 -858
- package/core/streaming_text_handler.py +723 -723
- package/core/task_scheduler.py +596 -596
- package/core/update_pattern_library.py +168 -168
- package/core/visibility_monitor.py +596 -596
- package/core/widget_cache.py +498 -498
- package/mcp.json +51 -61
- package/package.json +61 -57
- package/pomera.py +7482 -7482
- package/pomera_mcp_server.py +183 -144
- package/requirements.txt +32 -0
- package/tools/__init__.py +4 -4
- package/tools/ai_tools.py +2891 -2891
- package/tools/ascii_art_generator.py +352 -352
- package/tools/base64_tools.py +183 -183
- package/tools/base_tool.py +511 -511
- package/tools/case_tool.py +308 -308
- package/tools/column_tools.py +395 -395
- package/tools/cron_tool.py +884 -884
- package/tools/curl_history.py +600 -600
- package/tools/curl_processor.py +1207 -1207
- package/tools/curl_settings.py +502 -502
- package/tools/curl_tool.py +5467 -5467
- package/tools/diff_viewer.py +1071 -1071
- package/tools/email_extraction_tool.py +248 -248
- package/tools/email_header_analyzer.py +425 -425
- package/tools/extraction_tools.py +250 -250
- package/tools/find_replace.py +1750 -1750
- package/tools/folder_file_reporter.py +1463 -1463
- package/tools/folder_file_reporter_adapter.py +480 -480
- package/tools/generator_tools.py +1216 -1216
- package/tools/hash_generator.py +255 -255
- package/tools/html_tool.py +656 -656
- package/tools/jsonxml_tool.py +729 -729
- package/tools/line_tools.py +419 -419
- package/tools/markdown_tools.py +561 -561
- package/tools/mcp_widget.py +1417 -1417
- package/tools/notes_widget.py +973 -973
- package/tools/number_base_converter.py +372 -372
- package/tools/regex_extractor.py +571 -571
- package/tools/slug_generator.py +310 -310
- package/tools/sorter_tools.py +458 -458
- package/tools/string_escape_tool.py +392 -392
- package/tools/text_statistics_tool.py +365 -365
- package/tools/text_wrapper.py +430 -430
- package/tools/timestamp_converter.py +421 -421
- package/tools/tool_loader.py +710 -710
- package/tools/translator_tools.py +522 -522
- package/tools/url_link_extractor.py +261 -261
- package/tools/url_parser.py +204 -204
- package/tools/whitespace_tools.py +355 -355
- package/tools/word_frequency_counter.py +146 -146
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
|
@@ -1,396 +1,396 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Database Schema Manager for Settings Migration
|
|
3
|
-
|
|
4
|
-
This module manages database schema creation, validation, and evolution for
|
|
5
|
-
the settings migration system. It handles table creation, indexing, and
|
|
6
|
-
schema versioning to ensure proper database structure.
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import sqlite3
|
|
10
|
-
import logging
|
|
11
|
-
from typing import Dict, List, Any, Optional
|
|
12
|
-
from datetime import datetime
|
|
13
|
-
|
|
14
|
-
from .database_connection_manager import DatabaseConnectionManager
|
|
15
|
-
from .database_schema import DatabaseSchema
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class DatabaseSchemaManager:
|
|
19
|
-
"""
|
|
20
|
-
Manages database schema creation, validation, and evolution.
|
|
21
|
-
|
|
22
|
-
This class handles:
|
|
23
|
-
- Schema initialization and table creation
|
|
24
|
-
- Index creation for performance optimization
|
|
25
|
-
- Schema validation and integrity checks
|
|
26
|
-
- Schema versioning and migration support
|
|
27
|
-
- Metadata management
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
def __init__(self, connection_manager: DatabaseConnectionManager):
|
|
31
|
-
"""
|
|
32
|
-
Initialize the schema manager.
|
|
33
|
-
|
|
34
|
-
Args:
|
|
35
|
-
connection_manager: Database connection manager instance
|
|
36
|
-
"""
|
|
37
|
-
self.connection_manager = connection_manager
|
|
38
|
-
self.logger = logging.getLogger(__name__)
|
|
39
|
-
self.schema = DatabaseSchema()
|
|
40
|
-
|
|
41
|
-
# Schema state tracking
|
|
42
|
-
self._schema_initialized = False
|
|
43
|
-
self._current_version = None
|
|
44
|
-
|
|
45
|
-
def initialize_schema(self) -> bool:
|
|
46
|
-
"""
|
|
47
|
-
Create all required tables and indexes for the settings database.
|
|
48
|
-
|
|
49
|
-
Returns:
|
|
50
|
-
True if schema initialization successful, False otherwise
|
|
51
|
-
"""
|
|
52
|
-
try:
|
|
53
|
-
self.logger.info("Initializing database schema")
|
|
54
|
-
|
|
55
|
-
with self.connection_manager.transaction() as conn:
|
|
56
|
-
# Create tables in proper order
|
|
57
|
-
self._create_all_tables(conn)
|
|
58
|
-
|
|
59
|
-
# Create indexes for performance
|
|
60
|
-
self._create_all_indexes(conn)
|
|
61
|
-
|
|
62
|
-
# Insert initial metadata
|
|
63
|
-
self._insert_initial_metadata(conn)
|
|
64
|
-
|
|
65
|
-
self._schema_initialized = True
|
|
66
|
-
self._current_version = self.schema.SCHEMA_VERSION
|
|
67
|
-
|
|
68
|
-
self.logger.info(f"Database schema initialized successfully (version {self._current_version})")
|
|
69
|
-
return True
|
|
70
|
-
|
|
71
|
-
except Exception as e:
|
|
72
|
-
self.logger.error(f"Schema initialization failed: {e}")
|
|
73
|
-
return False
|
|
74
|
-
|
|
75
|
-
def validate_schema(self) -> bool:
|
|
76
|
-
"""
|
|
77
|
-
Verify database schema integrity and structure.
|
|
78
|
-
|
|
79
|
-
Returns:
|
|
80
|
-
True if schema is valid, False otherwise
|
|
81
|
-
"""
|
|
82
|
-
try:
|
|
83
|
-
self.logger.info("Validating database schema")
|
|
84
|
-
|
|
85
|
-
conn = self.connection_manager.get_connection()
|
|
86
|
-
|
|
87
|
-
# Check if all required tables exist
|
|
88
|
-
if not self._validate_tables_exist(conn):
|
|
89
|
-
return False
|
|
90
|
-
|
|
91
|
-
# Check table structures
|
|
92
|
-
if not self._validate_table_structures(conn):
|
|
93
|
-
return False
|
|
94
|
-
|
|
95
|
-
# Check indexes exist
|
|
96
|
-
if not self._validate_indexes_exist(conn):
|
|
97
|
-
return False
|
|
98
|
-
|
|
99
|
-
# Check metadata
|
|
100
|
-
if not self._validate_metadata(conn):
|
|
101
|
-
return False
|
|
102
|
-
|
|
103
|
-
self.logger.info("Schema validation passed")
|
|
104
|
-
return True
|
|
105
|
-
|
|
106
|
-
except Exception as e:
|
|
107
|
-
self.logger.error(f"Schema validation failed: {e}")
|
|
108
|
-
return False
|
|
109
|
-
|
|
110
|
-
def get_schema_info(self) -> Dict[str, Any]:
|
|
111
|
-
"""
|
|
112
|
-
Get current schema version and metadata information.
|
|
113
|
-
|
|
114
|
-
Returns:
|
|
115
|
-
Dictionary with schema information
|
|
116
|
-
"""
|
|
117
|
-
try:
|
|
118
|
-
conn = self.connection_manager.get_connection()
|
|
119
|
-
|
|
120
|
-
# Get schema version from metadata
|
|
121
|
-
cursor = conn.execute(
|
|
122
|
-
"SELECT value FROM settings_metadata WHERE key = 'schema_version'"
|
|
123
|
-
)
|
|
124
|
-
result = cursor.fetchone()
|
|
125
|
-
schema_version = result[0] if result else "unknown"
|
|
126
|
-
|
|
127
|
-
# Get table information
|
|
128
|
-
cursor = conn.execute(
|
|
129
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
130
|
-
)
|
|
131
|
-
tables = [row[0] for row in cursor.fetchall()]
|
|
132
|
-
|
|
133
|
-
# Get index information
|
|
134
|
-
cursor = conn.execute(
|
|
135
|
-
"SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
136
|
-
)
|
|
137
|
-
indexes = [row[0] for row in cursor.fetchall()]
|
|
138
|
-
|
|
139
|
-
return {
|
|
140
|
-
'schema_version': schema_version,
|
|
141
|
-
'initialized': self._schema_initialized,
|
|
142
|
-
'tables': sorted(tables),
|
|
143
|
-
'indexes': sorted(indexes),
|
|
144
|
-
'table_count': len(tables),
|
|
145
|
-
'index_count': len(indexes)
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
except Exception as e:
|
|
149
|
-
self.logger.error(f"Failed to get schema info: {e}")
|
|
150
|
-
return {
|
|
151
|
-
'schema_version': 'error',
|
|
152
|
-
'initialized': False,
|
|
153
|
-
'tables': [],
|
|
154
|
-
'indexes': [],
|
|
155
|
-
'table_count': 0,
|
|
156
|
-
'index_count': 0,
|
|
157
|
-
'error': str(e)
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
def migrate_schema(self, from_version: str, to_version: str) -> bool:
|
|
161
|
-
"""
|
|
162
|
-
Handle schema migrations between versions.
|
|
163
|
-
|
|
164
|
-
Args:
|
|
165
|
-
from_version: Current schema version
|
|
166
|
-
to_version: Target schema version
|
|
167
|
-
|
|
168
|
-
Returns:
|
|
169
|
-
True if migration successful, False otherwise
|
|
170
|
-
"""
|
|
171
|
-
try:
|
|
172
|
-
self.logger.info(f"Migrating schema from {from_version} to {to_version}")
|
|
173
|
-
|
|
174
|
-
# For now, we only support version 1.0
|
|
175
|
-
if to_version != "1.0":
|
|
176
|
-
self.logger.error(f"Unsupported target schema version: {to_version}")
|
|
177
|
-
return False
|
|
178
|
-
|
|
179
|
-
# If already at target version, nothing to do
|
|
180
|
-
if from_version == to_version:
|
|
181
|
-
self.logger.info("Schema already at target version")
|
|
182
|
-
return True
|
|
183
|
-
|
|
184
|
-
# For initial implementation, we only support creating new schema
|
|
185
|
-
if from_version == "unknown" or from_version is None:
|
|
186
|
-
return self.initialize_schema()
|
|
187
|
-
|
|
188
|
-
self.logger.warning(f"Schema migration from {from_version} to {to_version} not implemented")
|
|
189
|
-
return False
|
|
190
|
-
|
|
191
|
-
except Exception as e:
|
|
192
|
-
self.logger.error(f"Schema migration failed: {e}")
|
|
193
|
-
return False
|
|
194
|
-
|
|
195
|
-
def repair_schema(self) -> bool:
|
|
196
|
-
"""
|
|
197
|
-
Attempt to repair corrupted or incomplete schema.
|
|
198
|
-
|
|
199
|
-
Returns:
|
|
200
|
-
True if repair successful, False otherwise
|
|
201
|
-
"""
|
|
202
|
-
try:
|
|
203
|
-
self.logger.info("Attempting schema repair")
|
|
204
|
-
|
|
205
|
-
conn = self.connection_manager.get_connection()
|
|
206
|
-
|
|
207
|
-
# Check what's missing and try to fix it
|
|
208
|
-
missing_tables = self._get_missing_tables(conn)
|
|
209
|
-
if missing_tables:
|
|
210
|
-
self.logger.info(f"Creating missing tables: {missing_tables}")
|
|
211
|
-
self._create_specific_tables(conn, missing_tables)
|
|
212
|
-
|
|
213
|
-
missing_indexes = self._get_missing_indexes(conn)
|
|
214
|
-
if missing_indexes:
|
|
215
|
-
self.logger.info(f"Creating missing indexes: {missing_indexes}")
|
|
216
|
-
self._create_specific_indexes(conn, missing_indexes)
|
|
217
|
-
|
|
218
|
-
# Validate repair was successful
|
|
219
|
-
if self.validate_schema():
|
|
220
|
-
self.logger.info("Schema repair completed successfully")
|
|
221
|
-
return True
|
|
222
|
-
else:
|
|
223
|
-
self.logger.error("Schema repair failed validation")
|
|
224
|
-
return False
|
|
225
|
-
|
|
226
|
-
except Exception as e:
|
|
227
|
-
self.logger.error(f"Schema repair failed: {e}")
|
|
228
|
-
return False
|
|
229
|
-
|
|
230
|
-
# Private implementation methods
|
|
231
|
-
|
|
232
|
-
def _create_all_tables(self, conn: sqlite3.Connection) -> None:
|
|
233
|
-
"""Create all database tables in proper order."""
|
|
234
|
-
schema_definitions = self.schema.get_schema_definitions()
|
|
235
|
-
table_order = self.schema.get_table_creation_order()
|
|
236
|
-
|
|
237
|
-
for table_name in table_order:
|
|
238
|
-
if table_name in schema_definitions:
|
|
239
|
-
sql = schema_definitions[table_name]
|
|
240
|
-
conn.execute(sql)
|
|
241
|
-
self.logger.debug(f"Created table: {table_name}")
|
|
242
|
-
|
|
243
|
-
def _create_all_indexes(self, conn: sqlite3.Connection) -> None:
|
|
244
|
-
"""Create all performance indexes."""
|
|
245
|
-
index_definitions = self.schema.get_index_definitions()
|
|
246
|
-
|
|
247
|
-
for table_name, indexes in index_definitions.items():
|
|
248
|
-
for index_sql in indexes:
|
|
249
|
-
conn.execute(index_sql)
|
|
250
|
-
self.logger.debug(f"Created index for table: {table_name}")
|
|
251
|
-
|
|
252
|
-
def _insert_initial_metadata(self, conn: sqlite3.Connection) -> None:
|
|
253
|
-
"""Insert initial metadata entries."""
|
|
254
|
-
initial_metadata = self.schema.get_initial_metadata()
|
|
255
|
-
|
|
256
|
-
for key, value, description in initial_metadata:
|
|
257
|
-
conn.execute(
|
|
258
|
-
"INSERT OR REPLACE INTO settings_metadata (key, value, description) VALUES (?, ?, ?)",
|
|
259
|
-
(key, value, description)
|
|
260
|
-
)
|
|
261
|
-
|
|
262
|
-
def _validate_tables_exist(self, conn: sqlite3.Connection) -> bool:
|
|
263
|
-
"""Check if all required tables exist."""
|
|
264
|
-
schema_definitions = self.schema.get_schema_definitions()
|
|
265
|
-
required_tables = set(schema_definitions.keys())
|
|
266
|
-
|
|
267
|
-
cursor = conn.execute(
|
|
268
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
269
|
-
)
|
|
270
|
-
existing_tables = set(row[0] for row in cursor.fetchall())
|
|
271
|
-
|
|
272
|
-
missing_tables = required_tables - existing_tables
|
|
273
|
-
if missing_tables:
|
|
274
|
-
self.logger.error(f"Missing required tables: {missing_tables}")
|
|
275
|
-
return False
|
|
276
|
-
|
|
277
|
-
return True
|
|
278
|
-
|
|
279
|
-
def _validate_table_structures(self, conn: sqlite3.Connection) -> bool:
|
|
280
|
-
"""Validate that table structures match expected schema."""
|
|
281
|
-
# For now, just check that tables exist and have some columns
|
|
282
|
-
# More detailed structure validation could be added later
|
|
283
|
-
|
|
284
|
-
schema_definitions = self.schema.get_schema_definitions()
|
|
285
|
-
|
|
286
|
-
for table_name in schema_definitions.keys():
|
|
287
|
-
try:
|
|
288
|
-
cursor = conn.execute(f"PRAGMA table_info({table_name})")
|
|
289
|
-
columns = cursor.fetchall()
|
|
290
|
-
|
|
291
|
-
if not columns:
|
|
292
|
-
self.logger.error(f"Table {table_name} has no columns")
|
|
293
|
-
return False
|
|
294
|
-
|
|
295
|
-
except sqlite3.Error as e:
|
|
296
|
-
self.logger.error(f"Error checking table structure for {table_name}: {e}")
|
|
297
|
-
return False
|
|
298
|
-
|
|
299
|
-
return True
|
|
300
|
-
|
|
301
|
-
def _validate_indexes_exist(self, conn: sqlite3.Connection) -> bool:
|
|
302
|
-
"""Check if performance indexes exist."""
|
|
303
|
-
# Get expected indexes
|
|
304
|
-
index_definitions = self.schema.get_index_definitions()
|
|
305
|
-
expected_indexes = set()
|
|
306
|
-
|
|
307
|
-
for table_indexes in index_definitions.values():
|
|
308
|
-
for index_sql in table_indexes:
|
|
309
|
-
# Extract index name from CREATE INDEX statement
|
|
310
|
-
parts = index_sql.split()
|
|
311
|
-
if len(parts) >= 5 and parts[0].upper() == "CREATE" and parts[1].upper() == "INDEX":
|
|
312
|
-
index_name = parts[4] # "CREATE INDEX IF NOT EXISTS index_name"
|
|
313
|
-
expected_indexes.add(index_name)
|
|
314
|
-
|
|
315
|
-
# Get existing indexes
|
|
316
|
-
cursor = conn.execute(
|
|
317
|
-
"SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
318
|
-
)
|
|
319
|
-
existing_indexes = set(row[0] for row in cursor.fetchall())
|
|
320
|
-
|
|
321
|
-
missing_indexes = expected_indexes - existing_indexes
|
|
322
|
-
if missing_indexes:
|
|
323
|
-
self.logger.warning(f"Missing performance indexes: {missing_indexes}")
|
|
324
|
-
# Indexes are not critical for functionality, so just warn
|
|
325
|
-
|
|
326
|
-
return True
|
|
327
|
-
|
|
328
|
-
def _validate_metadata(self, conn: sqlite3.Connection) -> bool:
|
|
329
|
-
"""Validate that required metadata exists."""
|
|
330
|
-
try:
|
|
331
|
-
cursor = conn.execute("SELECT key FROM settings_metadata")
|
|
332
|
-
existing_keys = set(row[0] for row in cursor.fetchall())
|
|
333
|
-
|
|
334
|
-
required_keys = {'schema_version', 'created_date'}
|
|
335
|
-
missing_keys = required_keys - existing_keys
|
|
336
|
-
|
|
337
|
-
if missing_keys:
|
|
338
|
-
self.logger.error(f"Missing required metadata keys: {missing_keys}")
|
|
339
|
-
return False
|
|
340
|
-
|
|
341
|
-
return True
|
|
342
|
-
|
|
343
|
-
except sqlite3.Error as e:
|
|
344
|
-
self.logger.error(f"Error validating metadata: {e}")
|
|
345
|
-
return False
|
|
346
|
-
|
|
347
|
-
def _get_missing_tables(self, conn: sqlite3.Connection) -> List[str]:
|
|
348
|
-
"""Get list of missing tables that should exist."""
|
|
349
|
-
schema_definitions = self.schema.get_schema_definitions()
|
|
350
|
-
required_tables = set(schema_definitions.keys())
|
|
351
|
-
|
|
352
|
-
cursor = conn.execute(
|
|
353
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
354
|
-
)
|
|
355
|
-
existing_tables = set(row[0] for row in cursor.fetchall())
|
|
356
|
-
|
|
357
|
-
return list(required_tables - existing_tables)
|
|
358
|
-
|
|
359
|
-
def _get_missing_indexes(self, conn: sqlite3.Connection) -> List[str]:
|
|
360
|
-
"""Get list of missing indexes that should exist."""
|
|
361
|
-
index_definitions = self.schema.get_index_definitions()
|
|
362
|
-
expected_indexes = []
|
|
363
|
-
|
|
364
|
-
for table_indexes in index_definitions.values():
|
|
365
|
-
expected_indexes.extend(table_indexes)
|
|
366
|
-
|
|
367
|
-
cursor = conn.execute(
|
|
368
|
-
"SELECT sql FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
369
|
-
)
|
|
370
|
-
existing_index_sqls = set(row[0] for row in cursor.fetchall() if row[0])
|
|
371
|
-
|
|
372
|
-
missing_indexes = []
|
|
373
|
-
for index_sql in expected_indexes:
|
|
374
|
-
if index_sql not in existing_index_sqls:
|
|
375
|
-
missing_indexes.append(index_sql)
|
|
376
|
-
|
|
377
|
-
return missing_indexes
|
|
378
|
-
|
|
379
|
-
def _create_specific_tables(self, conn: sqlite3.Connection, table_names: List[str]) -> None:
|
|
380
|
-
"""Create specific tables by name."""
|
|
381
|
-
schema_definitions = self.schema.get_schema_definitions()
|
|
382
|
-
|
|
383
|
-
for table_name in table_names:
|
|
384
|
-
if table_name in schema_definitions:
|
|
385
|
-
sql = schema_definitions[table_name]
|
|
386
|
-
conn.execute(sql)
|
|
387
|
-
self.logger.info(f"Created missing table: {table_name}")
|
|
388
|
-
|
|
389
|
-
def _create_specific_indexes(self, conn: sqlite3.Connection, index_sqls: List[str]) -> None:
|
|
390
|
-
"""Create specific indexes by SQL."""
|
|
391
|
-
for index_sql in index_sqls:
|
|
392
|
-
try:
|
|
393
|
-
conn.execute(index_sql)
|
|
394
|
-
self.logger.info(f"Created missing index")
|
|
395
|
-
except sqlite3.Error as e:
|
|
1
|
+
"""
|
|
2
|
+
Database Schema Manager for Settings Migration
|
|
3
|
+
|
|
4
|
+
This module manages database schema creation, validation, and evolution for
|
|
5
|
+
the settings migration system. It handles table creation, indexing, and
|
|
6
|
+
schema versioning to ensure proper database structure.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlite3
|
|
10
|
+
import logging
|
|
11
|
+
from typing import Dict, List, Any, Optional
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
|
|
14
|
+
from .database_connection_manager import DatabaseConnectionManager
|
|
15
|
+
from .database_schema import DatabaseSchema
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class DatabaseSchemaManager:
|
|
19
|
+
"""
|
|
20
|
+
Manages database schema creation, validation, and evolution.
|
|
21
|
+
|
|
22
|
+
This class handles:
|
|
23
|
+
- Schema initialization and table creation
|
|
24
|
+
- Index creation for performance optimization
|
|
25
|
+
- Schema validation and integrity checks
|
|
26
|
+
- Schema versioning and migration support
|
|
27
|
+
- Metadata management
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, connection_manager: DatabaseConnectionManager):
|
|
31
|
+
"""
|
|
32
|
+
Initialize the schema manager.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
connection_manager: Database connection manager instance
|
|
36
|
+
"""
|
|
37
|
+
self.connection_manager = connection_manager
|
|
38
|
+
self.logger = logging.getLogger(__name__)
|
|
39
|
+
self.schema = DatabaseSchema()
|
|
40
|
+
|
|
41
|
+
# Schema state tracking
|
|
42
|
+
self._schema_initialized = False
|
|
43
|
+
self._current_version = None
|
|
44
|
+
|
|
45
|
+
def initialize_schema(self) -> bool:
|
|
46
|
+
"""
|
|
47
|
+
Create all required tables and indexes for the settings database.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
True if schema initialization successful, False otherwise
|
|
51
|
+
"""
|
|
52
|
+
try:
|
|
53
|
+
self.logger.info("Initializing database schema")
|
|
54
|
+
|
|
55
|
+
with self.connection_manager.transaction() as conn:
|
|
56
|
+
# Create tables in proper order
|
|
57
|
+
self._create_all_tables(conn)
|
|
58
|
+
|
|
59
|
+
# Create indexes for performance
|
|
60
|
+
self._create_all_indexes(conn)
|
|
61
|
+
|
|
62
|
+
# Insert initial metadata
|
|
63
|
+
self._insert_initial_metadata(conn)
|
|
64
|
+
|
|
65
|
+
self._schema_initialized = True
|
|
66
|
+
self._current_version = self.schema.SCHEMA_VERSION
|
|
67
|
+
|
|
68
|
+
self.logger.info(f"Database schema initialized successfully (version {self._current_version})")
|
|
69
|
+
return True
|
|
70
|
+
|
|
71
|
+
except Exception as e:
|
|
72
|
+
self.logger.error(f"Schema initialization failed: {e}")
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
def validate_schema(self) -> bool:
|
|
76
|
+
"""
|
|
77
|
+
Verify database schema integrity and structure.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
True if schema is valid, False otherwise
|
|
81
|
+
"""
|
|
82
|
+
try:
|
|
83
|
+
self.logger.info("Validating database schema")
|
|
84
|
+
|
|
85
|
+
conn = self.connection_manager.get_connection()
|
|
86
|
+
|
|
87
|
+
# Check if all required tables exist
|
|
88
|
+
if not self._validate_tables_exist(conn):
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
# Check table structures
|
|
92
|
+
if not self._validate_table_structures(conn):
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
# Check indexes exist
|
|
96
|
+
if not self._validate_indexes_exist(conn):
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
# Check metadata
|
|
100
|
+
if not self._validate_metadata(conn):
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
self.logger.info("Schema validation passed")
|
|
104
|
+
return True
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
self.logger.error(f"Schema validation failed: {e}")
|
|
108
|
+
return False
|
|
109
|
+
|
|
110
|
+
def get_schema_info(self) -> Dict[str, Any]:
|
|
111
|
+
"""
|
|
112
|
+
Get current schema version and metadata information.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Dictionary with schema information
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
conn = self.connection_manager.get_connection()
|
|
119
|
+
|
|
120
|
+
# Get schema version from metadata
|
|
121
|
+
cursor = conn.execute(
|
|
122
|
+
"SELECT value FROM settings_metadata WHERE key = 'schema_version'"
|
|
123
|
+
)
|
|
124
|
+
result = cursor.fetchone()
|
|
125
|
+
schema_version = result[0] if result else "unknown"
|
|
126
|
+
|
|
127
|
+
# Get table information
|
|
128
|
+
cursor = conn.execute(
|
|
129
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
130
|
+
)
|
|
131
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
132
|
+
|
|
133
|
+
# Get index information
|
|
134
|
+
cursor = conn.execute(
|
|
135
|
+
"SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
136
|
+
)
|
|
137
|
+
indexes = [row[0] for row in cursor.fetchall()]
|
|
138
|
+
|
|
139
|
+
return {
|
|
140
|
+
'schema_version': schema_version,
|
|
141
|
+
'initialized': self._schema_initialized,
|
|
142
|
+
'tables': sorted(tables),
|
|
143
|
+
'indexes': sorted(indexes),
|
|
144
|
+
'table_count': len(tables),
|
|
145
|
+
'index_count': len(indexes)
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
except Exception as e:
|
|
149
|
+
self.logger.error(f"Failed to get schema info: {e}")
|
|
150
|
+
return {
|
|
151
|
+
'schema_version': 'error',
|
|
152
|
+
'initialized': False,
|
|
153
|
+
'tables': [],
|
|
154
|
+
'indexes': [],
|
|
155
|
+
'table_count': 0,
|
|
156
|
+
'index_count': 0,
|
|
157
|
+
'error': str(e)
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
def migrate_schema(self, from_version: str, to_version: str) -> bool:
|
|
161
|
+
"""
|
|
162
|
+
Handle schema migrations between versions.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
from_version: Current schema version
|
|
166
|
+
to_version: Target schema version
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
True if migration successful, False otherwise
|
|
170
|
+
"""
|
|
171
|
+
try:
|
|
172
|
+
self.logger.info(f"Migrating schema from {from_version} to {to_version}")
|
|
173
|
+
|
|
174
|
+
# For now, we only support version 1.0
|
|
175
|
+
if to_version != "1.0":
|
|
176
|
+
self.logger.error(f"Unsupported target schema version: {to_version}")
|
|
177
|
+
return False
|
|
178
|
+
|
|
179
|
+
# If already at target version, nothing to do
|
|
180
|
+
if from_version == to_version:
|
|
181
|
+
self.logger.info("Schema already at target version")
|
|
182
|
+
return True
|
|
183
|
+
|
|
184
|
+
# For initial implementation, we only support creating new schema
|
|
185
|
+
if from_version == "unknown" or from_version is None:
|
|
186
|
+
return self.initialize_schema()
|
|
187
|
+
|
|
188
|
+
self.logger.warning(f"Schema migration from {from_version} to {to_version} not implemented")
|
|
189
|
+
return False
|
|
190
|
+
|
|
191
|
+
except Exception as e:
|
|
192
|
+
self.logger.error(f"Schema migration failed: {e}")
|
|
193
|
+
return False
|
|
194
|
+
|
|
195
|
+
def repair_schema(self) -> bool:
|
|
196
|
+
"""
|
|
197
|
+
Attempt to repair corrupted or incomplete schema.
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
True if repair successful, False otherwise
|
|
201
|
+
"""
|
|
202
|
+
try:
|
|
203
|
+
self.logger.info("Attempting schema repair")
|
|
204
|
+
|
|
205
|
+
conn = self.connection_manager.get_connection()
|
|
206
|
+
|
|
207
|
+
# Check what's missing and try to fix it
|
|
208
|
+
missing_tables = self._get_missing_tables(conn)
|
|
209
|
+
if missing_tables:
|
|
210
|
+
self.logger.info(f"Creating missing tables: {missing_tables}")
|
|
211
|
+
self._create_specific_tables(conn, missing_tables)
|
|
212
|
+
|
|
213
|
+
missing_indexes = self._get_missing_indexes(conn)
|
|
214
|
+
if missing_indexes:
|
|
215
|
+
self.logger.info(f"Creating missing indexes: {missing_indexes}")
|
|
216
|
+
self._create_specific_indexes(conn, missing_indexes)
|
|
217
|
+
|
|
218
|
+
# Validate repair was successful
|
|
219
|
+
if self.validate_schema():
|
|
220
|
+
self.logger.info("Schema repair completed successfully")
|
|
221
|
+
return True
|
|
222
|
+
else:
|
|
223
|
+
self.logger.error("Schema repair failed validation")
|
|
224
|
+
return False
|
|
225
|
+
|
|
226
|
+
except Exception as e:
|
|
227
|
+
self.logger.error(f"Schema repair failed: {e}")
|
|
228
|
+
return False
|
|
229
|
+
|
|
230
|
+
# Private implementation methods
|
|
231
|
+
|
|
232
|
+
def _create_all_tables(self, conn: sqlite3.Connection) -> None:
|
|
233
|
+
"""Create all database tables in proper order."""
|
|
234
|
+
schema_definitions = self.schema.get_schema_definitions()
|
|
235
|
+
table_order = self.schema.get_table_creation_order()
|
|
236
|
+
|
|
237
|
+
for table_name in table_order:
|
|
238
|
+
if table_name in schema_definitions:
|
|
239
|
+
sql = schema_definitions[table_name]
|
|
240
|
+
conn.execute(sql)
|
|
241
|
+
self.logger.debug(f"Created table: {table_name}")
|
|
242
|
+
|
|
243
|
+
def _create_all_indexes(self, conn: sqlite3.Connection) -> None:
|
|
244
|
+
"""Create all performance indexes."""
|
|
245
|
+
index_definitions = self.schema.get_index_definitions()
|
|
246
|
+
|
|
247
|
+
for table_name, indexes in index_definitions.items():
|
|
248
|
+
for index_sql in indexes:
|
|
249
|
+
conn.execute(index_sql)
|
|
250
|
+
self.logger.debug(f"Created index for table: {table_name}")
|
|
251
|
+
|
|
252
|
+
def _insert_initial_metadata(self, conn: sqlite3.Connection) -> None:
|
|
253
|
+
"""Insert initial metadata entries."""
|
|
254
|
+
initial_metadata = self.schema.get_initial_metadata()
|
|
255
|
+
|
|
256
|
+
for key, value, description in initial_metadata:
|
|
257
|
+
conn.execute(
|
|
258
|
+
"INSERT OR REPLACE INTO settings_metadata (key, value, description) VALUES (?, ?, ?)",
|
|
259
|
+
(key, value, description)
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
def _validate_tables_exist(self, conn: sqlite3.Connection) -> bool:
|
|
263
|
+
"""Check if all required tables exist."""
|
|
264
|
+
schema_definitions = self.schema.get_schema_definitions()
|
|
265
|
+
required_tables = set(schema_definitions.keys())
|
|
266
|
+
|
|
267
|
+
cursor = conn.execute(
|
|
268
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
269
|
+
)
|
|
270
|
+
existing_tables = set(row[0] for row in cursor.fetchall())
|
|
271
|
+
|
|
272
|
+
missing_tables = required_tables - existing_tables
|
|
273
|
+
if missing_tables:
|
|
274
|
+
self.logger.error(f"Missing required tables: {missing_tables}")
|
|
275
|
+
return False
|
|
276
|
+
|
|
277
|
+
return True
|
|
278
|
+
|
|
279
|
+
def _validate_table_structures(self, conn: sqlite3.Connection) -> bool:
|
|
280
|
+
"""Validate that table structures match expected schema."""
|
|
281
|
+
# For now, just check that tables exist and have some columns
|
|
282
|
+
# More detailed structure validation could be added later
|
|
283
|
+
|
|
284
|
+
schema_definitions = self.schema.get_schema_definitions()
|
|
285
|
+
|
|
286
|
+
for table_name in schema_definitions.keys():
|
|
287
|
+
try:
|
|
288
|
+
cursor = conn.execute(f"PRAGMA table_info({table_name})")
|
|
289
|
+
columns = cursor.fetchall()
|
|
290
|
+
|
|
291
|
+
if not columns:
|
|
292
|
+
self.logger.error(f"Table {table_name} has no columns")
|
|
293
|
+
return False
|
|
294
|
+
|
|
295
|
+
except sqlite3.Error as e:
|
|
296
|
+
self.logger.error(f"Error checking table structure for {table_name}: {e}")
|
|
297
|
+
return False
|
|
298
|
+
|
|
299
|
+
return True
|
|
300
|
+
|
|
301
|
+
def _validate_indexes_exist(self, conn: sqlite3.Connection) -> bool:
|
|
302
|
+
"""Check if performance indexes exist."""
|
|
303
|
+
# Get expected indexes
|
|
304
|
+
index_definitions = self.schema.get_index_definitions()
|
|
305
|
+
expected_indexes = set()
|
|
306
|
+
|
|
307
|
+
for table_indexes in index_definitions.values():
|
|
308
|
+
for index_sql in table_indexes:
|
|
309
|
+
# Extract index name from CREATE INDEX statement
|
|
310
|
+
parts = index_sql.split()
|
|
311
|
+
if len(parts) >= 5 and parts[0].upper() == "CREATE" and parts[1].upper() == "INDEX":
|
|
312
|
+
index_name = parts[4] # "CREATE INDEX IF NOT EXISTS index_name"
|
|
313
|
+
expected_indexes.add(index_name)
|
|
314
|
+
|
|
315
|
+
# Get existing indexes
|
|
316
|
+
cursor = conn.execute(
|
|
317
|
+
"SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
318
|
+
)
|
|
319
|
+
existing_indexes = set(row[0] for row in cursor.fetchall())
|
|
320
|
+
|
|
321
|
+
missing_indexes = expected_indexes - existing_indexes
|
|
322
|
+
if missing_indexes:
|
|
323
|
+
self.logger.warning(f"Missing performance indexes: {missing_indexes}")
|
|
324
|
+
# Indexes are not critical for functionality, so just warn
|
|
325
|
+
|
|
326
|
+
return True
|
|
327
|
+
|
|
328
|
+
def _validate_metadata(self, conn: sqlite3.Connection) -> bool:
|
|
329
|
+
"""Validate that required metadata exists."""
|
|
330
|
+
try:
|
|
331
|
+
cursor = conn.execute("SELECT key FROM settings_metadata")
|
|
332
|
+
existing_keys = set(row[0] for row in cursor.fetchall())
|
|
333
|
+
|
|
334
|
+
required_keys = {'schema_version', 'created_date'}
|
|
335
|
+
missing_keys = required_keys - existing_keys
|
|
336
|
+
|
|
337
|
+
if missing_keys:
|
|
338
|
+
self.logger.error(f"Missing required metadata keys: {missing_keys}")
|
|
339
|
+
return False
|
|
340
|
+
|
|
341
|
+
return True
|
|
342
|
+
|
|
343
|
+
except sqlite3.Error as e:
|
|
344
|
+
self.logger.error(f"Error validating metadata: {e}")
|
|
345
|
+
return False
|
|
346
|
+
|
|
347
|
+
def _get_missing_tables(self, conn: sqlite3.Connection) -> List[str]:
|
|
348
|
+
"""Get list of missing tables that should exist."""
|
|
349
|
+
schema_definitions = self.schema.get_schema_definitions()
|
|
350
|
+
required_tables = set(schema_definitions.keys())
|
|
351
|
+
|
|
352
|
+
cursor = conn.execute(
|
|
353
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
354
|
+
)
|
|
355
|
+
existing_tables = set(row[0] for row in cursor.fetchall())
|
|
356
|
+
|
|
357
|
+
return list(required_tables - existing_tables)
|
|
358
|
+
|
|
359
|
+
def _get_missing_indexes(self, conn: sqlite3.Connection) -> List[str]:
|
|
360
|
+
"""Get list of missing indexes that should exist."""
|
|
361
|
+
index_definitions = self.schema.get_index_definitions()
|
|
362
|
+
expected_indexes = []
|
|
363
|
+
|
|
364
|
+
for table_indexes in index_definitions.values():
|
|
365
|
+
expected_indexes.extend(table_indexes)
|
|
366
|
+
|
|
367
|
+
cursor = conn.execute(
|
|
368
|
+
"SELECT sql FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'"
|
|
369
|
+
)
|
|
370
|
+
existing_index_sqls = set(row[0] for row in cursor.fetchall() if row[0])
|
|
371
|
+
|
|
372
|
+
missing_indexes = []
|
|
373
|
+
for index_sql in expected_indexes:
|
|
374
|
+
if index_sql not in existing_index_sqls:
|
|
375
|
+
missing_indexes.append(index_sql)
|
|
376
|
+
|
|
377
|
+
return missing_indexes
|
|
378
|
+
|
|
379
|
+
def _create_specific_tables(self, conn: sqlite3.Connection, table_names: List[str]) -> None:
|
|
380
|
+
"""Create specific tables by name."""
|
|
381
|
+
schema_definitions = self.schema.get_schema_definitions()
|
|
382
|
+
|
|
383
|
+
for table_name in table_names:
|
|
384
|
+
if table_name in schema_definitions:
|
|
385
|
+
sql = schema_definitions[table_name]
|
|
386
|
+
conn.execute(sql)
|
|
387
|
+
self.logger.info(f"Created missing table: {table_name}")
|
|
388
|
+
|
|
389
|
+
def _create_specific_indexes(self, conn: sqlite3.Connection, index_sqls: List[str]) -> None:
|
|
390
|
+
"""Create specific indexes by SQL."""
|
|
391
|
+
for index_sql in index_sqls:
|
|
392
|
+
try:
|
|
393
|
+
conn.execute(index_sql)
|
|
394
|
+
self.logger.info(f"Created missing index")
|
|
395
|
+
except sqlite3.Error as e:
|
|
396
396
|
self.logger.warning(f"Failed to create index: {e}")
|