pomera-ai-commander 0.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +105 -680
- package/bin/pomera-ai-commander.js +62 -62
- package/core/__init__.py +65 -65
- package/core/app_context.py +482 -482
- package/core/async_text_processor.py +421 -421
- package/core/backup_manager.py +655 -655
- package/core/backup_recovery_manager.py +1033 -1033
- package/core/content_hash_cache.py +508 -508
- package/core/context_menu.py +313 -313
- package/core/data_validator.py +1066 -1066
- package/core/database_connection_manager.py +744 -744
- package/core/database_curl_settings_manager.py +608 -608
- package/core/database_promera_ai_settings_manager.py +446 -446
- package/core/database_schema.py +411 -411
- package/core/database_schema_manager.py +395 -395
- package/core/database_settings_manager.py +1507 -1507
- package/core/database_settings_manager_interface.py +456 -456
- package/core/dialog_manager.py +734 -734
- package/core/efficient_line_numbers.py +510 -510
- package/core/error_handler.py +746 -746
- package/core/error_service.py +431 -431
- package/core/event_consolidator.py +511 -511
- package/core/mcp/__init__.py +43 -43
- package/core/mcp/protocol.py +288 -288
- package/core/mcp/schema.py +251 -251
- package/core/mcp/server_stdio.py +299 -299
- package/core/mcp/tool_registry.py +2372 -2345
- package/core/memory_efficient_text_widget.py +711 -711
- package/core/migration_manager.py +914 -914
- package/core/migration_test_suite.py +1085 -1085
- package/core/migration_validator.py +1143 -1143
- package/core/optimized_find_replace.py +714 -714
- package/core/optimized_pattern_engine.py +424 -424
- package/core/optimized_search_highlighter.py +552 -552
- package/core/performance_monitor.py +674 -674
- package/core/persistence_manager.py +712 -712
- package/core/progressive_stats_calculator.py +632 -632
- package/core/regex_pattern_cache.py +529 -529
- package/core/regex_pattern_library.py +350 -350
- package/core/search_operation_manager.py +434 -434
- package/core/settings_defaults_registry.py +1087 -1087
- package/core/settings_integrity_validator.py +1111 -1111
- package/core/settings_serializer.py +557 -557
- package/core/settings_validator.py +1823 -1823
- package/core/smart_stats_calculator.py +709 -709
- package/core/statistics_update_manager.py +619 -619
- package/core/stats_config_manager.py +858 -858
- package/core/streaming_text_handler.py +723 -723
- package/core/task_scheduler.py +596 -596
- package/core/update_pattern_library.py +168 -168
- package/core/visibility_monitor.py +596 -596
- package/core/widget_cache.py +498 -498
- package/mcp.json +51 -61
- package/package.json +61 -57
- package/pomera.py +7482 -7482
- package/pomera_mcp_server.py +183 -144
- package/requirements.txt +32 -0
- package/tools/__init__.py +4 -4
- package/tools/ai_tools.py +2891 -2891
- package/tools/ascii_art_generator.py +352 -352
- package/tools/base64_tools.py +183 -183
- package/tools/base_tool.py +511 -511
- package/tools/case_tool.py +308 -308
- package/tools/column_tools.py +395 -395
- package/tools/cron_tool.py +884 -884
- package/tools/curl_history.py +600 -600
- package/tools/curl_processor.py +1207 -1207
- package/tools/curl_settings.py +502 -502
- package/tools/curl_tool.py +5467 -5467
- package/tools/diff_viewer.py +1071 -1071
- package/tools/email_extraction_tool.py +248 -248
- package/tools/email_header_analyzer.py +425 -425
- package/tools/extraction_tools.py +250 -250
- package/tools/find_replace.py +1750 -1750
- package/tools/folder_file_reporter.py +1463 -1463
- package/tools/folder_file_reporter_adapter.py +480 -480
- package/tools/generator_tools.py +1216 -1216
- package/tools/hash_generator.py +255 -255
- package/tools/html_tool.py +656 -656
- package/tools/jsonxml_tool.py +729 -729
- package/tools/line_tools.py +419 -419
- package/tools/markdown_tools.py +561 -561
- package/tools/mcp_widget.py +1417 -1417
- package/tools/notes_widget.py +973 -973
- package/tools/number_base_converter.py +372 -372
- package/tools/regex_extractor.py +571 -571
- package/tools/slug_generator.py +310 -310
- package/tools/sorter_tools.py +458 -458
- package/tools/string_escape_tool.py +392 -392
- package/tools/text_statistics_tool.py +365 -365
- package/tools/text_wrapper.py +430 -430
- package/tools/timestamp_converter.py +421 -421
- package/tools/tool_loader.py +710 -710
- package/tools/translator_tools.py +522 -522
- package/tools/url_link_extractor.py +261 -261
- package/tools/url_parser.py +204 -204
- package/tools/whitespace_tools.py +355 -355
- package/tools/word_frequency_counter.py +146 -146
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
|
@@ -1,1508 +1,1508 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Database Settings Manager for Settings Migration
|
|
3
|
-
|
|
4
|
-
This module provides a drop-in replacement for the current JSON-based settings system
|
|
5
|
-
with a database backend. It maintains full backward compatibility with existing code
|
|
6
|
-
while providing better concurrency handling and data integrity.
|
|
7
|
-
|
|
8
|
-
The DatabaseSettingsManager maintains identical API signatures to the current system,
|
|
9
|
-
ensuring zero code changes are required in existing tools.
|
|
10
|
-
"""
|
|
11
|
-
|
|
12
|
-
import json
|
|
13
|
-
import sqlite3
|
|
14
|
-
import logging
|
|
15
|
-
import threading
|
|
16
|
-
from typing import Dict, List, Tuple, Any, Optional, Union
|
|
17
|
-
from datetime import datetime
|
|
18
|
-
from pathlib import Path
|
|
19
|
-
|
|
20
|
-
from .database_connection_manager import DatabaseConnectionManager
|
|
21
|
-
from .database_schema_manager import DatabaseSchemaManager
|
|
22
|
-
from .migration_manager import MigrationManager
|
|
23
|
-
from .error_handler import get_error_handler, ErrorCategory, ErrorSeverity
|
|
24
|
-
from .data_validator import DataValidator
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class NestedSettingsProxy:
|
|
28
|
-
"""
|
|
29
|
-
Proxy for nested dictionary access that updates the database when modified.
|
|
30
|
-
"""
|
|
31
|
-
|
|
32
|
-
def __init__(self, settings_manager: 'DatabaseSettingsManager', parent_key: str, data: Dict[str, Any]):
|
|
33
|
-
"""
|
|
34
|
-
Initialize nested settings proxy.
|
|
35
|
-
|
|
36
|
-
Args:
|
|
37
|
-
settings_manager: DatabaseSettingsManager instance
|
|
38
|
-
parent_key: Parent key path (e.g., "tool_settings")
|
|
39
|
-
data: Dictionary data for this level
|
|
40
|
-
"""
|
|
41
|
-
self.settings_manager = settings_manager
|
|
42
|
-
self.parent_key = parent_key
|
|
43
|
-
self._data = data.copy()
|
|
44
|
-
|
|
45
|
-
def __getitem__(self, key: str) -> Any:
|
|
46
|
-
"""Handle nested access like settings["tool_settings"]["Tool Name"]."""
|
|
47
|
-
if key not in self._data:
|
|
48
|
-
# For tool_settings, create empty tool settings when accessed
|
|
49
|
-
if self.parent_key == "tool_settings":
|
|
50
|
-
# Initialize empty tool settings
|
|
51
|
-
self._data[key] = {}
|
|
52
|
-
# Also save to database
|
|
53
|
-
self.settings_manager.set_tool_setting(key, "initialized", True)
|
|
54
|
-
else:
|
|
55
|
-
raise KeyError(f"Key '{key}' not found in {self.parent_key}")
|
|
56
|
-
|
|
57
|
-
value = self._data[key]
|
|
58
|
-
|
|
59
|
-
# Return nested proxy for further nesting
|
|
60
|
-
if isinstance(value, dict):
|
|
61
|
-
nested_key = f"{self.parent_key}.{key}" if self.parent_key else key
|
|
62
|
-
return NestedSettingsProxy(self.settings_manager, nested_key, value)
|
|
63
|
-
|
|
64
|
-
return value
|
|
65
|
-
|
|
66
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
|
67
|
-
"""Handle nested assignment like settings["tool_settings"]["Tool Name"] = {...}."""
|
|
68
|
-
self._data[key] = value
|
|
69
|
-
|
|
70
|
-
# Update the full parent structure in database
|
|
71
|
-
# Get the current full structure and update it
|
|
72
|
-
current_settings = self.settings_manager._load_all_settings()
|
|
73
|
-
self._update_nested_value(current_settings, self.parent_key, self._data)
|
|
74
|
-
self.settings_manager.save_settings(current_settings)
|
|
75
|
-
|
|
76
|
-
# Invalidate cache
|
|
77
|
-
self.settings_manager._settings_proxy._invalidate_cache()
|
|
78
|
-
|
|
79
|
-
def __contains__(self, key: str) -> bool:
|
|
80
|
-
"""Handle 'key' in nested_settings checks."""
|
|
81
|
-
return key in self._data
|
|
82
|
-
|
|
83
|
-
def __iter__(self):
|
|
84
|
-
"""Handle iteration over nested keys."""
|
|
85
|
-
return iter(self._data)
|
|
86
|
-
|
|
87
|
-
def __len__(self) -> int:
|
|
88
|
-
"""Handle len(nested_settings) calls."""
|
|
89
|
-
return len(self._data)
|
|
90
|
-
|
|
91
|
-
def get(self, key: str, default: Any = None) -> Any:
|
|
92
|
-
"""Handle nested_settings.get("key", default) calls."""
|
|
93
|
-
if key not in self._data and self.parent_key == "tool_settings":
|
|
94
|
-
# For tool_settings, create empty tool settings when accessed via get()
|
|
95
|
-
self._data[key] = {}
|
|
96
|
-
# Also save to database
|
|
97
|
-
self.settings_manager.set_tool_setting(key, "initialized", True)
|
|
98
|
-
|
|
99
|
-
value = self._data.get(key, default)
|
|
100
|
-
|
|
101
|
-
if isinstance(value, dict) and value is not default:
|
|
102
|
-
nested_key = f"{self.parent_key}.{key}" if self.parent_key else key
|
|
103
|
-
return NestedSettingsProxy(self.settings_manager, nested_key, value)
|
|
104
|
-
|
|
105
|
-
return value
|
|
106
|
-
|
|
107
|
-
def update(self, other: Dict[str, Any]) -> None:
|
|
108
|
-
"""Handle nested_settings.update(dict) calls."""
|
|
109
|
-
self._data.update(other)
|
|
110
|
-
|
|
111
|
-
# Update the full parent structure in database
|
|
112
|
-
current_settings = self.settings_manager._load_all_settings()
|
|
113
|
-
self._update_nested_value(current_settings, self.parent_key, self._data)
|
|
114
|
-
self.settings_manager.save_settings(current_settings)
|
|
115
|
-
|
|
116
|
-
# Invalidate cache
|
|
117
|
-
self.settings_manager._settings_proxy._invalidate_cache()
|
|
118
|
-
|
|
119
|
-
def keys(self):
|
|
120
|
-
"""Return all available keys."""
|
|
121
|
-
return self._data.keys()
|
|
122
|
-
|
|
123
|
-
def values(self):
|
|
124
|
-
"""Return all values."""
|
|
125
|
-
return self._data.values()
|
|
126
|
-
|
|
127
|
-
def items(self):
|
|
128
|
-
"""Return all key-value pairs."""
|
|
129
|
-
return self._data.items()
|
|
130
|
-
|
|
131
|
-
def copy(self) -> Dict[str, Any]:
|
|
132
|
-
"""Return a copy of the underlying data as a regular dictionary."""
|
|
133
|
-
return self._data.copy()
|
|
134
|
-
|
|
135
|
-
def _update_nested_value(self, data: Dict[str, Any], path: str, value: Any) -> None:
|
|
136
|
-
"""Update value in nested dictionary using dot notation."""
|
|
137
|
-
keys = path.split('.')
|
|
138
|
-
current = data
|
|
139
|
-
|
|
140
|
-
# Navigate to the parent of the target key
|
|
141
|
-
for key in keys[:-1]:
|
|
142
|
-
if key not in current:
|
|
143
|
-
current[key] = {}
|
|
144
|
-
current = current[key]
|
|
145
|
-
|
|
146
|
-
# Set the final value
|
|
147
|
-
current[keys[-1]] = value
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
class SettingsDictProxy:
|
|
151
|
-
"""
|
|
152
|
-
Provides dictionary-like interface that transparently accesses database.
|
|
153
|
-
Allows existing code like self.settings["key"] to work unchanged.
|
|
154
|
-
"""
|
|
155
|
-
|
|
156
|
-
def __init__(self, settings_manager: 'DatabaseSettingsManager'):
|
|
157
|
-
"""
|
|
158
|
-
Initialize the settings dictionary proxy.
|
|
159
|
-
|
|
160
|
-
Args:
|
|
161
|
-
settings_manager: DatabaseSettingsManager instance
|
|
162
|
-
"""
|
|
163
|
-
self.settings_manager = settings_manager
|
|
164
|
-
self._cache = {}
|
|
165
|
-
self._cache_dirty = True
|
|
166
|
-
self._lock = threading.RLock()
|
|
167
|
-
|
|
168
|
-
def _refresh_cache(self) -> None:
|
|
169
|
-
"""Refresh the internal cache from database."""
|
|
170
|
-
with self._lock:
|
|
171
|
-
if self._cache_dirty:
|
|
172
|
-
self._cache = self.settings_manager._load_all_settings()
|
|
173
|
-
self._cache_dirty = False
|
|
174
|
-
|
|
175
|
-
def _invalidate_cache(self) -> None:
|
|
176
|
-
"""Mark cache as dirty to force refresh on next access."""
|
|
177
|
-
with self._lock:
|
|
178
|
-
self._cache_dirty = True
|
|
179
|
-
|
|
180
|
-
def __getitem__(self, key: str) -> Any:
|
|
181
|
-
"""Handle self.settings["key"] access."""
|
|
182
|
-
self._refresh_cache()
|
|
183
|
-
if key not in self._cache:
|
|
184
|
-
# For tool_settings, initialize empty dictionary
|
|
185
|
-
if key == "tool_settings":
|
|
186
|
-
self._cache[key] = {}
|
|
187
|
-
# Save to database
|
|
188
|
-
self.settings_manager.set_setting(key, {})
|
|
189
|
-
else:
|
|
190
|
-
raise KeyError(f"Setting key '{key}' not found")
|
|
191
|
-
|
|
192
|
-
value = self._cache[key]
|
|
193
|
-
|
|
194
|
-
# Return nested proxy for dictionaries to enable nested assignment
|
|
195
|
-
if isinstance(value, dict):
|
|
196
|
-
return NestedSettingsProxy(self.settings_manager, key, value)
|
|
197
|
-
|
|
198
|
-
return value
|
|
199
|
-
|
|
200
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
|
201
|
-
"""Handle self.settings["key"] = value assignment."""
|
|
202
|
-
self.settings_manager.set_setting(key, value)
|
|
203
|
-
self._invalidate_cache()
|
|
204
|
-
|
|
205
|
-
def __contains__(self, key: str) -> bool:
|
|
206
|
-
"""Handle 'key' in self.settings checks."""
|
|
207
|
-
self._refresh_cache()
|
|
208
|
-
return key in self._cache
|
|
209
|
-
|
|
210
|
-
def __iter__(self):
|
|
211
|
-
"""Handle iteration over settings keys."""
|
|
212
|
-
self._refresh_cache()
|
|
213
|
-
return iter(self._cache)
|
|
214
|
-
|
|
215
|
-
def __len__(self) -> int:
|
|
216
|
-
"""Handle len(self.settings) calls."""
|
|
217
|
-
self._refresh_cache()
|
|
218
|
-
return len(self._cache)
|
|
219
|
-
|
|
220
|
-
def get(self, key: str, default: Any = None) -> Any:
|
|
221
|
-
"""Handle self.settings.get("key", default) calls."""
|
|
222
|
-
self._refresh_cache()
|
|
223
|
-
if key not in self._cache and key == "tool_settings":
|
|
224
|
-
# Initialize empty tool_settings if not found
|
|
225
|
-
self._cache[key] = {}
|
|
226
|
-
self.settings_manager.set_setting(key, {})
|
|
227
|
-
|
|
228
|
-
value = self._cache.get(key, default)
|
|
229
|
-
|
|
230
|
-
# Return nested proxy for dictionaries
|
|
231
|
-
if isinstance(value, dict) and value is not default:
|
|
232
|
-
return NestedSettingsProxy(self.settings_manager, key, value)
|
|
233
|
-
|
|
234
|
-
return value
|
|
235
|
-
|
|
236
|
-
def update(self, other: Dict[str, Any]) -> None:
|
|
237
|
-
"""Handle self.settings.update(dict) calls."""
|
|
238
|
-
self.settings_manager.bulk_update_settings(other)
|
|
239
|
-
self._invalidate_cache()
|
|
240
|
-
|
|
241
|
-
def keys(self):
|
|
242
|
-
"""Return all available setting keys."""
|
|
243
|
-
self._refresh_cache()
|
|
244
|
-
return self._cache.keys()
|
|
245
|
-
|
|
246
|
-
def values(self):
|
|
247
|
-
"""Return all setting values."""
|
|
248
|
-
self._refresh_cache()
|
|
249
|
-
return self._cache.values()
|
|
250
|
-
|
|
251
|
-
def items(self):
|
|
252
|
-
"""Return all key-value pairs."""
|
|
253
|
-
self._refresh_cache()
|
|
254
|
-
return self._cache.items()
|
|
255
|
-
|
|
256
|
-
def pop(self, key: str, default=None):
|
|
257
|
-
"""Remove and return a setting value."""
|
|
258
|
-
try:
|
|
259
|
-
value = self[key]
|
|
260
|
-
self.settings_manager._delete_setting(key)
|
|
261
|
-
self._invalidate_cache()
|
|
262
|
-
return value
|
|
263
|
-
except KeyError:
|
|
264
|
-
if default is not None:
|
|
265
|
-
return default
|
|
266
|
-
raise
|
|
267
|
-
|
|
268
|
-
def setdefault(self, key: str, default: Any = None) -> Any:
|
|
269
|
-
"""Get setting value or set and return default if not exists."""
|
|
270
|
-
try:
|
|
271
|
-
return self[key]
|
|
272
|
-
except KeyError:
|
|
273
|
-
self[key] = default
|
|
274
|
-
return default
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
class DatabaseSettingsManager:
|
|
278
|
-
"""
|
|
279
|
-
Drop-in replacement for the current settings system with database backend.
|
|
280
|
-
|
|
281
|
-
Maintains full backward compatibility with existing code while providing:
|
|
282
|
-
- Better concurrency handling through SQLite WAL mode
|
|
283
|
-
- Data integrity through ACID transactions
|
|
284
|
-
- Automatic backup and recovery
|
|
285
|
-
- Enhanced error handling
|
|
286
|
-
|
|
287
|
-
All existing method signatures are preserved to ensure zero code changes
|
|
288
|
-
are required in existing tools.
|
|
289
|
-
"""
|
|
290
|
-
|
|
291
|
-
def __init__(self, db_path: str = ":memory:", backup_path: Optional[str] = None,
|
|
292
|
-
json_settings_path: str = "settings.json",
|
|
293
|
-
enable_performance_monitoring: bool = True,
|
|
294
|
-
enable_auto_backup: bool = True,
|
|
295
|
-
backup_interval: int = 300):
|
|
296
|
-
"""
|
|
297
|
-
Initialize the database settings manager.
|
|
298
|
-
|
|
299
|
-
Args:
|
|
300
|
-
db_path: Path to SQLite database file (":memory:" for in-memory)
|
|
301
|
-
backup_path: Path for automatic backups
|
|
302
|
-
json_settings_path: Path to JSON settings file for migration
|
|
303
|
-
enable_performance_monitoring: Whether to enable performance monitoring
|
|
304
|
-
enable_auto_backup: Whether to enable automatic backups
|
|
305
|
-
backup_interval: Automatic backup interval in seconds
|
|
306
|
-
"""
|
|
307
|
-
self.db_path = db_path
|
|
308
|
-
self.backup_path = backup_path or "settings_backup.db"
|
|
309
|
-
self.json_settings_path = json_settings_path
|
|
310
|
-
self.enable_performance_monitoring = enable_performance_monitoring
|
|
311
|
-
self.enable_auto_backup = enable_auto_backup
|
|
312
|
-
self.logger = logging.getLogger(__name__)
|
|
313
|
-
|
|
314
|
-
# Initialize error handler
|
|
315
|
-
self.error_handler = get_error_handler()
|
|
316
|
-
|
|
317
|
-
# Initialize database components with error handling
|
|
318
|
-
try:
|
|
319
|
-
self.connection_manager = DatabaseConnectionManager(
|
|
320
|
-
db_path, backup_path, enable_performance_monitoring
|
|
321
|
-
)
|
|
322
|
-
self.schema_manager = DatabaseSchemaManager(self.connection_manager)
|
|
323
|
-
self.migration_manager = MigrationManager(self.connection_manager)
|
|
324
|
-
self.data_validator = DataValidator(self.connection_manager, self.schema_manager)
|
|
325
|
-
except Exception as e:
|
|
326
|
-
self.error_handler.handle_error(
|
|
327
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
328
|
-
f"Failed to initialize database components: {e}",
|
|
329
|
-
exception=e
|
|
330
|
-
)
|
|
331
|
-
|
|
332
|
-
# Initialize settings integrity validator (always available)
|
|
333
|
-
self.settings_integrity_validator = None
|
|
334
|
-
try:
|
|
335
|
-
from .settings_integrity_validator import SettingsIntegrityValidator
|
|
336
|
-
self.settings_integrity_validator = SettingsIntegrityValidator()
|
|
337
|
-
except ImportError:
|
|
338
|
-
self.logger.warning("Settings integrity validator not available")
|
|
339
|
-
|
|
340
|
-
# Initialize backup and recovery manager
|
|
341
|
-
self.backup_recovery_manager = None
|
|
342
|
-
if enable_auto_backup:
|
|
343
|
-
try:
|
|
344
|
-
from .backup_recovery_manager import BackupRecoveryManager
|
|
345
|
-
|
|
346
|
-
backup_dir = Path(backup_path).parent / "backups" if backup_path else "backups"
|
|
347
|
-
self.backup_recovery_manager = BackupRecoveryManager(
|
|
348
|
-
backup_dir=str(backup_dir),
|
|
349
|
-
auto_backup_interval=backup_interval,
|
|
350
|
-
enable_compression=True
|
|
351
|
-
)
|
|
352
|
-
|
|
353
|
-
# Start automatic backup
|
|
354
|
-
self.backup_recovery_manager.start_auto_backup(
|
|
355
|
-
self.connection_manager, self
|
|
356
|
-
)
|
|
357
|
-
|
|
358
|
-
except ImportError:
|
|
359
|
-
self.logger.warning("Backup/recovery manager not available")
|
|
360
|
-
|
|
361
|
-
# Initialize performance monitoring
|
|
362
|
-
self.performance_monitor = None
|
|
363
|
-
if enable_performance_monitoring:
|
|
364
|
-
try:
|
|
365
|
-
from .performance_monitor import get_performance_monitor
|
|
366
|
-
self.performance_monitor = get_performance_monitor()
|
|
367
|
-
except ImportError:
|
|
368
|
-
self.logger.warning("Performance monitoring not available")
|
|
369
|
-
|
|
370
|
-
# Disable strict validation for now to handle default settings differences
|
|
371
|
-
self.migration_manager._strict_validation = False
|
|
372
|
-
|
|
373
|
-
# Settings proxy for dictionary-like access
|
|
374
|
-
self._settings_proxy = SettingsDictProxy(self)
|
|
375
|
-
|
|
376
|
-
# Internal state
|
|
377
|
-
self._initialized = False
|
|
378
|
-
self._lock = threading.RLock()
|
|
379
|
-
self._default_settings_provider = None
|
|
380
|
-
|
|
381
|
-
# Initialize database schema
|
|
382
|
-
self._initialize_database()
|
|
383
|
-
|
|
384
|
-
# Migrate from JSON if exists and database is empty
|
|
385
|
-
self._migrate_from_json_if_needed()
|
|
386
|
-
|
|
387
|
-
def set_default_settings_provider(self, provider_func):
|
|
388
|
-
"""
|
|
389
|
-
Set a function that provides default settings.
|
|
390
|
-
|
|
391
|
-
Args:
|
|
392
|
-
provider_func: Function that returns default settings dictionary
|
|
393
|
-
"""
|
|
394
|
-
self._default_settings_provider = provider_func
|
|
395
|
-
|
|
396
|
-
def _initialize_database(self) -> None:
|
|
397
|
-
"""Initialize database schema and validate structure with error handling."""
|
|
398
|
-
try:
|
|
399
|
-
# Initialize schema
|
|
400
|
-
if not self.schema_manager.initialize_schema():
|
|
401
|
-
self.error_handler.handle_error(
|
|
402
|
-
ErrorCategory.DATABASE_CORRUPTION,
|
|
403
|
-
"Failed to initialize database schema"
|
|
404
|
-
)
|
|
405
|
-
if not self.error_handler.is_fallback_mode():
|
|
406
|
-
raise RuntimeError("Failed to initialize database schema")
|
|
407
|
-
return
|
|
408
|
-
|
|
409
|
-
# Validate schema
|
|
410
|
-
if not self.schema_manager.validate_schema():
|
|
411
|
-
self.logger.warning("Schema validation failed, attempting repair")
|
|
412
|
-
if not self.schema_manager.repair_schema():
|
|
413
|
-
self.error_handler.handle_error(
|
|
414
|
-
ErrorCategory.DATABASE_CORRUPTION,
|
|
415
|
-
"Failed to repair database schema"
|
|
416
|
-
)
|
|
417
|
-
if not self.error_handler.is_fallback_mode():
|
|
418
|
-
raise RuntimeError("Failed to repair database schema")
|
|
419
|
-
return
|
|
420
|
-
|
|
421
|
-
# Perform comprehensive data validation
|
|
422
|
-
validation_issues = self.data_validator.validate_database(fix_issues=True)
|
|
423
|
-
if validation_issues:
|
|
424
|
-
critical_issues = [i for i in validation_issues if i.severity == ErrorSeverity.CRITICAL]
|
|
425
|
-
if critical_issues:
|
|
426
|
-
self.error_handler.handle_error(
|
|
427
|
-
ErrorCategory.DATA_VALIDATION,
|
|
428
|
-
f"Critical data validation issues found: {len(critical_issues)}",
|
|
429
|
-
context={'issues': [i.message for i in critical_issues]}
|
|
430
|
-
)
|
|
431
|
-
|
|
432
|
-
self._initialized = True
|
|
433
|
-
self.logger.info("Database settings manager initialized successfully")
|
|
434
|
-
|
|
435
|
-
except Exception as e:
|
|
436
|
-
self.error_handler.handle_error(
|
|
437
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
438
|
-
f"Database initialization failed: {e}",
|
|
439
|
-
exception=e
|
|
440
|
-
)
|
|
441
|
-
if not self.error_handler.is_fallback_mode():
|
|
442
|
-
raise
|
|
443
|
-
|
|
444
|
-
def _migrate_from_json_if_needed(self) -> None:
|
|
445
|
-
"""Migrate from JSON settings file if it exists and database is empty."""
|
|
446
|
-
try:
|
|
447
|
-
# Check if database already has data
|
|
448
|
-
conn = self.connection_manager.get_connection()
|
|
449
|
-
cursor = conn.execute("SELECT COUNT(*) FROM core_settings")
|
|
450
|
-
count = cursor.fetchone()[0]
|
|
451
|
-
|
|
452
|
-
if count > 0:
|
|
453
|
-
self.logger.info("Database already contains settings, skipping migration")
|
|
454
|
-
return
|
|
455
|
-
|
|
456
|
-
# Check if JSON file exists
|
|
457
|
-
if Path(self.json_settings_path).exists():
|
|
458
|
-
# Perform migration
|
|
459
|
-
self.logger.info(f"Migrating settings from {self.json_settings_path}")
|
|
460
|
-
if self.migration_manager.migrate_from_json(self.json_settings_path):
|
|
461
|
-
self.logger.info("JSON to database migration completed successfully")
|
|
462
|
-
return
|
|
463
|
-
else:
|
|
464
|
-
self.logger.warning("Migration failed, using default settings")
|
|
465
|
-
else:
|
|
466
|
-
self.logger.info("No JSON settings file found, using defaults")
|
|
467
|
-
|
|
468
|
-
# Always populate defaults if database is empty
|
|
469
|
-
self._populate_default_settings()
|
|
470
|
-
|
|
471
|
-
except Exception as e:
|
|
472
|
-
self.logger.error(f"Migration from JSON failed: {e}")
|
|
473
|
-
self._populate_default_settings()
|
|
474
|
-
|
|
475
|
-
def _populate_default_settings(self) -> None:
|
|
476
|
-
"""Populate database with default settings if empty."""
|
|
477
|
-
try:
|
|
478
|
-
default_settings = self._get_minimal_default_settings()
|
|
479
|
-
|
|
480
|
-
# Use migration manager to populate database
|
|
481
|
-
self.migration_manager._migrate_json_to_database(default_settings)
|
|
482
|
-
self.logger.info("Default settings populated in database")
|
|
483
|
-
|
|
484
|
-
except Exception as e:
|
|
485
|
-
self.error_handler.handle_error(
|
|
486
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
487
|
-
f"Failed to populate default settings: {e}",
|
|
488
|
-
exception=e
|
|
489
|
-
)
|
|
490
|
-
|
|
491
|
-
def _get_minimal_default_settings(self) -> Dict[str, Any]:
|
|
492
|
-
"""Get minimal default settings for emergency fallback.
|
|
493
|
-
|
|
494
|
-
Uses the centralized Settings Defaults Registry if available,
|
|
495
|
-
otherwise falls back to the provided default settings provider
|
|
496
|
-
or hardcoded minimal defaults.
|
|
497
|
-
"""
|
|
498
|
-
# Try to use the centralized Settings Defaults Registry first
|
|
499
|
-
try:
|
|
500
|
-
from .settings_defaults_registry import get_registry
|
|
501
|
-
registry = get_registry()
|
|
502
|
-
return registry.get_all_defaults(tab_count=7)
|
|
503
|
-
except ImportError:
|
|
504
|
-
self.logger.debug("Settings Defaults Registry not available")
|
|
505
|
-
except Exception as e:
|
|
506
|
-
self.logger.warning(f"Failed to get defaults from registry: {e}")
|
|
507
|
-
|
|
508
|
-
# Use provided default settings provider if available
|
|
509
|
-
if self._default_settings_provider:
|
|
510
|
-
try:
|
|
511
|
-
return self._default_settings_provider()
|
|
512
|
-
except Exception as e:
|
|
513
|
-
self.logger.warning(f"Default settings provider failed: {e}")
|
|
514
|
-
|
|
515
|
-
# Fallback to minimal defaults
|
|
516
|
-
return {
|
|
517
|
-
"export_path": str(Path.home() / "Downloads"),
|
|
518
|
-
"debug_level": "INFO",
|
|
519
|
-
"selected_tool": "Case Tool",
|
|
520
|
-
"active_input_tab": 0,
|
|
521
|
-
"active_output_tab": 0,
|
|
522
|
-
"input_tabs": [""] * 7,
|
|
523
|
-
"output_tabs": [""] * 7,
|
|
524
|
-
"tool_settings": {},
|
|
525
|
-
"performance_settings": {
|
|
526
|
-
"mode": "automatic",
|
|
527
|
-
"async_processing": {"enabled": True, "threshold_kb": 10}
|
|
528
|
-
},
|
|
529
|
-
"font_settings": {
|
|
530
|
-
"text_font": {"family": "Consolas", "size": 11}
|
|
531
|
-
},
|
|
532
|
-
"dialog_settings": {
|
|
533
|
-
"error": {"enabled": True, "locked": True}
|
|
534
|
-
}
|
|
535
|
-
}
|
|
536
|
-
|
|
537
|
-
# Backward Compatible API Methods
|
|
538
|
-
|
|
539
|
-
def load_settings(self) -> Dict[str, Any]:
|
|
540
|
-
"""
|
|
541
|
-
Reconstruct the full settings dictionary from database tables.
|
|
542
|
-
|
|
543
|
-
This method maintains compatibility with the existing load_settings() API
|
|
544
|
-
while internally using the database backend with error handling.
|
|
545
|
-
|
|
546
|
-
Returns:
|
|
547
|
-
Complete settings dictionary matching JSON structure
|
|
548
|
-
"""
|
|
549
|
-
try:
|
|
550
|
-
# Check if in fallback mode
|
|
551
|
-
if self.error_handler.is_fallback_mode():
|
|
552
|
-
fallback_settings = self.error_handler.get_fallback_settings()
|
|
553
|
-
if fallback_settings:
|
|
554
|
-
return fallback_settings
|
|
555
|
-
else:
|
|
556
|
-
# Return minimal defaults if fallback fails
|
|
557
|
-
return self._get_minimal_default_settings()
|
|
558
|
-
|
|
559
|
-
with self._lock:
|
|
560
|
-
return self._load_all_settings()
|
|
561
|
-
|
|
562
|
-
except Exception as e:
|
|
563
|
-
self.error_handler.handle_error(
|
|
564
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
565
|
-
f"Failed to load settings: {e}",
|
|
566
|
-
exception=e
|
|
567
|
-
)
|
|
568
|
-
|
|
569
|
-
# Try fallback mode
|
|
570
|
-
if self.error_handler.is_fallback_mode():
|
|
571
|
-
fallback_settings = self.error_handler.get_fallback_settings()
|
|
572
|
-
if fallback_settings:
|
|
573
|
-
return fallback_settings
|
|
574
|
-
|
|
575
|
-
# Return minimal defaults as last resort
|
|
576
|
-
return self._get_minimal_default_settings()
|
|
577
|
-
|
|
578
|
-
def save_settings(self, settings_dict: Optional[Dict[str, Any]] = None) -> bool:
|
|
579
|
-
"""
|
|
580
|
-
Parse settings dictionary and update database tables.
|
|
581
|
-
|
|
582
|
-
This method maintains compatibility with the existing save_settings() API
|
|
583
|
-
while internally using the database backend with error handling.
|
|
584
|
-
|
|
585
|
-
Args:
|
|
586
|
-
settings_dict: Settings dictionary to save (if None, saves current state)
|
|
587
|
-
|
|
588
|
-
Returns:
|
|
589
|
-
True if save successful, False otherwise
|
|
590
|
-
"""
|
|
591
|
-
try:
|
|
592
|
-
if settings_dict is None:
|
|
593
|
-
# If no dict provided, this is a no-op since database is always current
|
|
594
|
-
return True
|
|
595
|
-
|
|
596
|
-
# Validate settings data before saving
|
|
597
|
-
validation_issues = self.data_validator.validate_settings_data(settings_dict)
|
|
598
|
-
critical_issues = [i for i in validation_issues if i.severity == ErrorSeverity.CRITICAL]
|
|
599
|
-
|
|
600
|
-
if critical_issues:
|
|
601
|
-
self.error_handler.handle_error(
|
|
602
|
-
ErrorCategory.DATA_VALIDATION,
|
|
603
|
-
f"Critical validation issues in settings data: {len(critical_issues)}",
|
|
604
|
-
context={'issues': [i.message for i in critical_issues]}
|
|
605
|
-
)
|
|
606
|
-
return False
|
|
607
|
-
|
|
608
|
-
# Check if in fallback mode
|
|
609
|
-
if self.error_handler.is_fallback_mode():
|
|
610
|
-
return self.error_handler.save_fallback_settings(settings_dict)
|
|
611
|
-
|
|
612
|
-
with self._lock:
|
|
613
|
-
# Use migration manager to update database from dictionary
|
|
614
|
-
success = self.migration_manager._migrate_json_to_database(settings_dict)
|
|
615
|
-
|
|
616
|
-
if not success:
|
|
617
|
-
self.error_handler.handle_error(
|
|
618
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
619
|
-
"Failed to save settings to database"
|
|
620
|
-
)
|
|
621
|
-
|
|
622
|
-
# Try fallback mode
|
|
623
|
-
if self.error_handler.is_fallback_mode():
|
|
624
|
-
return self.error_handler.save_fallback_settings(settings_dict)
|
|
625
|
-
|
|
626
|
-
return success
|
|
627
|
-
|
|
628
|
-
except Exception as e:
|
|
629
|
-
self.error_handler.handle_error(
|
|
630
|
-
ErrorCategory.DATABASE_CONNECTION,
|
|
631
|
-
f"Failed to save settings: {e}",
|
|
632
|
-
exception=e
|
|
633
|
-
)
|
|
634
|
-
|
|
635
|
-
# Try fallback mode
|
|
636
|
-
if self.error_handler.is_fallback_mode():
|
|
637
|
-
return self.error_handler.save_fallback_settings(settings_dict)
|
|
638
|
-
|
|
639
|
-
return False
|
|
640
|
-
|
|
641
|
-
def get_tool_settings(self, tool_name: str) -> Dict[str, Any]:
|
|
642
|
-
"""
|
|
643
|
-
Get all settings for a specific tool.
|
|
644
|
-
|
|
645
|
-
Args:
|
|
646
|
-
tool_name: Name of the tool
|
|
647
|
-
|
|
648
|
-
Returns:
|
|
649
|
-
Dictionary of tool settings
|
|
650
|
-
"""
|
|
651
|
-
try:
|
|
652
|
-
query = "SELECT setting_path, setting_value, data_type FROM tool_settings WHERE tool_name = ?"
|
|
653
|
-
params = (tool_name,)
|
|
654
|
-
|
|
655
|
-
if self.enable_performance_monitoring:
|
|
656
|
-
with self.connection_manager.monitored_query(query, params) as conn:
|
|
657
|
-
cursor = conn.execute(query, params)
|
|
658
|
-
results = cursor.fetchall()
|
|
659
|
-
else:
|
|
660
|
-
conn = self.connection_manager.get_connection()
|
|
661
|
-
cursor = conn.execute(query, params)
|
|
662
|
-
results = cursor.fetchall()
|
|
663
|
-
|
|
664
|
-
tool_settings = {}
|
|
665
|
-
for setting_path, setting_value, data_type in results:
|
|
666
|
-
value = self.migration_manager.converter.deserialize_value(setting_value, data_type)
|
|
667
|
-
|
|
668
|
-
# Handle nested paths
|
|
669
|
-
if '.' in setting_path:
|
|
670
|
-
self._set_nested_value(tool_settings, setting_path, value)
|
|
671
|
-
else:
|
|
672
|
-
tool_settings[setting_path] = value
|
|
673
|
-
|
|
674
|
-
# Post-process: unwrap simple tool settings that only have a 'value' key
|
|
675
|
-
if len(tool_settings) == 1 and 'value' in tool_settings:
|
|
676
|
-
return tool_settings['value']
|
|
677
|
-
|
|
678
|
-
return tool_settings
|
|
679
|
-
|
|
680
|
-
except Exception as e:
|
|
681
|
-
self.logger.error(f"Failed to get tool settings for {tool_name}: {e}")
|
|
682
|
-
return {}
|
|
683
|
-
|
|
684
|
-
def set_tool_setting(self, tool_name: str, key: str, value: Any) -> None:
|
|
685
|
-
"""
|
|
686
|
-
Set a specific tool setting.
|
|
687
|
-
|
|
688
|
-
Args:
|
|
689
|
-
tool_name: Name of the tool
|
|
690
|
-
key: Setting key (supports nested paths with dots)
|
|
691
|
-
value: Setting value
|
|
692
|
-
"""
|
|
693
|
-
try:
|
|
694
|
-
with self.connection_manager.transaction() as conn:
|
|
695
|
-
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
696
|
-
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
697
|
-
|
|
698
|
-
conn.execute(
|
|
699
|
-
"INSERT OR REPLACE INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
700
|
-
(tool_name, key, serialized_value, data_type)
|
|
701
|
-
)
|
|
702
|
-
|
|
703
|
-
# Record change for backup triggering
|
|
704
|
-
self._record_change()
|
|
705
|
-
|
|
706
|
-
# Invalidate proxy cache
|
|
707
|
-
self._settings_proxy._invalidate_cache()
|
|
708
|
-
|
|
709
|
-
except Exception as e:
|
|
710
|
-
self.logger.error(f"Failed to set tool setting {tool_name}.{key}: {e}")
|
|
711
|
-
|
|
712
|
-
def get_setting(self, key: str, default: Any = None) -> Any:
|
|
713
|
-
"""
|
|
714
|
-
Get a core application setting.
|
|
715
|
-
|
|
716
|
-
Args:
|
|
717
|
-
key: Setting key (supports nested paths with dots)
|
|
718
|
-
default: Default value if setting not found
|
|
719
|
-
|
|
720
|
-
Returns:
|
|
721
|
-
Setting value or default
|
|
722
|
-
"""
|
|
723
|
-
try:
|
|
724
|
-
# Handle nested keys
|
|
725
|
-
if '.' in key:
|
|
726
|
-
settings = self._load_all_settings()
|
|
727
|
-
return self._get_nested_value(settings, key, default)
|
|
728
|
-
|
|
729
|
-
# Simple key lookup with monitoring
|
|
730
|
-
query = "SELECT value, data_type FROM core_settings WHERE key = ?"
|
|
731
|
-
params = (key,)
|
|
732
|
-
|
|
733
|
-
if self.enable_performance_monitoring:
|
|
734
|
-
with self.connection_manager.monitored_query(query, params) as conn:
|
|
735
|
-
cursor = conn.execute(query, params)
|
|
736
|
-
result = cursor.fetchone()
|
|
737
|
-
else:
|
|
738
|
-
conn = self.connection_manager.get_connection()
|
|
739
|
-
cursor = conn.execute(query, params)
|
|
740
|
-
result = cursor.fetchone()
|
|
741
|
-
|
|
742
|
-
if result:
|
|
743
|
-
value, data_type = result
|
|
744
|
-
return self.migration_manager.converter.deserialize_value(value, data_type)
|
|
745
|
-
else:
|
|
746
|
-
return default
|
|
747
|
-
|
|
748
|
-
except Exception as e:
|
|
749
|
-
self.logger.error(f"Failed to get setting {key}: {e}")
|
|
750
|
-
return default
|
|
751
|
-
|
|
752
|
-
def set_setting(self, key: str, value: Any) -> None:
|
|
753
|
-
"""
|
|
754
|
-
Set a core application setting.
|
|
755
|
-
|
|
756
|
-
Args:
|
|
757
|
-
key: Setting key (supports nested paths with dots)
|
|
758
|
-
value: Setting value
|
|
759
|
-
"""
|
|
760
|
-
try:
|
|
761
|
-
# Handle special keys that need to go to specific tables
|
|
762
|
-
if key in ['input_tabs', 'output_tabs', 'tool_settings', 'performance_settings', 'font_settings', 'dialog_settings']:
|
|
763
|
-
# These keys need special handling - update the full structure
|
|
764
|
-
settings = self._load_all_settings()
|
|
765
|
-
settings[key] = value
|
|
766
|
-
self.save_settings(settings)
|
|
767
|
-
return
|
|
768
|
-
|
|
769
|
-
# Handle nested keys by updating the full structure
|
|
770
|
-
if '.' in key:
|
|
771
|
-
settings = self._load_all_settings()
|
|
772
|
-
self._set_nested_value(settings, key, value)
|
|
773
|
-
self.save_settings(settings)
|
|
774
|
-
return
|
|
775
|
-
|
|
776
|
-
# Simple key update for core settings
|
|
777
|
-
with self.connection_manager.transaction() as conn:
|
|
778
|
-
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
779
|
-
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
780
|
-
|
|
781
|
-
conn.execute(
|
|
782
|
-
"INSERT OR REPLACE INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
783
|
-
(key, serialized_value, data_type)
|
|
784
|
-
)
|
|
785
|
-
|
|
786
|
-
# Record change for backup triggering
|
|
787
|
-
self._record_change()
|
|
788
|
-
|
|
789
|
-
# Invalidate proxy cache
|
|
790
|
-
self._settings_proxy._invalidate_cache()
|
|
791
|
-
|
|
792
|
-
except Exception as e:
|
|
793
|
-
self.logger.error(f"Failed to set setting {key}: {e}")
|
|
794
|
-
|
|
795
|
-
# Enhanced API Methods
|
|
796
|
-
|
|
797
|
-
def get_nested_setting(self, path: str, default: Any = None) -> Any:
|
|
798
|
-
"""
|
|
799
|
-
Get setting using dot notation: 'performance_settings.caching.enabled'
|
|
800
|
-
|
|
801
|
-
Args:
|
|
802
|
-
path: Dot-separated path to setting
|
|
803
|
-
default: Default value if setting not found
|
|
804
|
-
|
|
805
|
-
Returns:
|
|
806
|
-
Setting value or default
|
|
807
|
-
"""
|
|
808
|
-
return self.get_setting(path, default)
|
|
809
|
-
|
|
810
|
-
def set_nested_setting(self, path: str, value: Any) -> None:
|
|
811
|
-
"""
|
|
812
|
-
Set setting using dot notation.
|
|
813
|
-
|
|
814
|
-
Args:
|
|
815
|
-
path: Dot-separated path to setting
|
|
816
|
-
value: Setting value
|
|
817
|
-
"""
|
|
818
|
-
self.set_setting(path, value)
|
|
819
|
-
|
|
820
|
-
def bulk_update_settings(self, updates: Dict[str, Any]) -> None:
|
|
821
|
-
"""
|
|
822
|
-
Update multiple settings in a single transaction.
|
|
823
|
-
|
|
824
|
-
Args:
|
|
825
|
-
updates: Dictionary of setting updates
|
|
826
|
-
"""
|
|
827
|
-
try:
|
|
828
|
-
with self.connection_manager.transaction() as conn:
|
|
829
|
-
for key, value in updates.items():
|
|
830
|
-
if key == 'tool_settings' and isinstance(value, dict):
|
|
831
|
-
# Handle tool settings specially
|
|
832
|
-
for tool_name, tool_config in value.items():
|
|
833
|
-
self._update_tool_settings_in_transaction(conn, tool_name, tool_config)
|
|
834
|
-
else:
|
|
835
|
-
# Handle core settings
|
|
836
|
-
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
837
|
-
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
838
|
-
|
|
839
|
-
conn.execute(
|
|
840
|
-
"INSERT OR REPLACE INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
841
|
-
(key, serialized_value, data_type)
|
|
842
|
-
)
|
|
843
|
-
|
|
844
|
-
# Invalidate proxy cache
|
|
845
|
-
self._settings_proxy._invalidate_cache()
|
|
846
|
-
|
|
847
|
-
except Exception as e:
|
|
848
|
-
self.logger.error(f"Failed to bulk update settings: {e}")
|
|
849
|
-
|
|
850
|
-
def export_to_json(self, filepath: str) -> bool:
|
|
851
|
-
"""
|
|
852
|
-
Export current database state to JSON file.
|
|
853
|
-
|
|
854
|
-
Args:
|
|
855
|
-
filepath: Target JSON file path
|
|
856
|
-
|
|
857
|
-
Returns:
|
|
858
|
-
True if export successful, False otherwise
|
|
859
|
-
"""
|
|
860
|
-
return self.migration_manager.migrate_to_json(filepath)
|
|
861
|
-
|
|
862
|
-
def import_from_json(self, filepath: str) -> bool:
|
|
863
|
-
"""
|
|
864
|
-
Import settings from JSON file to database.
|
|
865
|
-
|
|
866
|
-
Args:
|
|
867
|
-
filepath: Source JSON file path
|
|
868
|
-
|
|
869
|
-
Returns:
|
|
870
|
-
True if import successful, False otherwise
|
|
871
|
-
"""
|
|
872
|
-
return self.migration_manager.migrate_from_json(filepath)
|
|
873
|
-
|
|
874
|
-
# Backup and Recovery Methods
|
|
875
|
-
|
|
876
|
-
def create_backup(self, backup_type: str = "manual",
|
|
877
|
-
description: Optional[str] = None) -> bool:
|
|
878
|
-
"""
|
|
879
|
-
Create a backup of current settings.
|
|
880
|
-
|
|
881
|
-
Args:
|
|
882
|
-
backup_type: Type of backup ("manual", "automatic", "migration", "emergency")
|
|
883
|
-
description: Optional description for the backup
|
|
884
|
-
|
|
885
|
-
Returns:
|
|
886
|
-
True if backup created successfully
|
|
887
|
-
"""
|
|
888
|
-
try:
|
|
889
|
-
if not self.backup_recovery_manager:
|
|
890
|
-
self.logger.warning("Backup manager not available")
|
|
891
|
-
return False
|
|
892
|
-
|
|
893
|
-
from .backup_recovery_manager import BackupType
|
|
894
|
-
|
|
895
|
-
# Map string to enum
|
|
896
|
-
backup_type_enum = {
|
|
897
|
-
"manual": BackupType.MANUAL,
|
|
898
|
-
"automatic": BackupType.AUTOMATIC,
|
|
899
|
-
"migration": BackupType.MIGRATION,
|
|
900
|
-
"emergency": BackupType.EMERGENCY
|
|
901
|
-
}.get(backup_type, BackupType.MANUAL)
|
|
902
|
-
|
|
903
|
-
# Create database backup
|
|
904
|
-
backup_info = self.backup_recovery_manager.create_database_backup(
|
|
905
|
-
self.connection_manager,
|
|
906
|
-
backup_type_enum,
|
|
907
|
-
description
|
|
908
|
-
)
|
|
909
|
-
|
|
910
|
-
return backup_info is not None
|
|
911
|
-
|
|
912
|
-
except Exception as e:
|
|
913
|
-
self.error_handler.handle_error(
|
|
914
|
-
ErrorCategory.BACKUP,
|
|
915
|
-
f"Failed to create backup: {e}",
|
|
916
|
-
exception=e
|
|
917
|
-
)
|
|
918
|
-
return False
|
|
919
|
-
|
|
920
|
-
def restore_from_backup(self, backup_filepath: str) -> bool:
|
|
921
|
-
"""
|
|
922
|
-
Restore settings from a backup file.
|
|
923
|
-
|
|
924
|
-
Args:
|
|
925
|
-
backup_filepath: Path to backup file
|
|
926
|
-
|
|
927
|
-
Returns:
|
|
928
|
-
True if restore successful
|
|
929
|
-
"""
|
|
930
|
-
try:
|
|
931
|
-
if not self.backup_recovery_manager:
|
|
932
|
-
self.logger.warning("Backup manager not available")
|
|
933
|
-
return False
|
|
934
|
-
|
|
935
|
-
# Find backup info
|
|
936
|
-
backup_history = self.backup_recovery_manager.get_backup_history()
|
|
937
|
-
backup_info = None
|
|
938
|
-
|
|
939
|
-
for backup in backup_history:
|
|
940
|
-
if backup.filepath == backup_filepath:
|
|
941
|
-
backup_info = backup
|
|
942
|
-
break
|
|
943
|
-
|
|
944
|
-
if not backup_info:
|
|
945
|
-
self.logger.error(f"Backup info not found for: {backup_filepath}")
|
|
946
|
-
return False
|
|
947
|
-
|
|
948
|
-
# Restore from backup
|
|
949
|
-
success = self.backup_recovery_manager.restore_from_database_backup(
|
|
950
|
-
backup_info, self.connection_manager
|
|
951
|
-
)
|
|
952
|
-
|
|
953
|
-
if success:
|
|
954
|
-
# Invalidate cache after restore
|
|
955
|
-
self._settings_proxy._invalidate_cache()
|
|
956
|
-
self.logger.info("Settings restored from backup successfully")
|
|
957
|
-
|
|
958
|
-
return success
|
|
959
|
-
|
|
960
|
-
except Exception as e:
|
|
961
|
-
self.error_handler.handle_error(
|
|
962
|
-
ErrorCategory.RECOVERY,
|
|
963
|
-
f"Failed to restore from backup: {e}",
|
|
964
|
-
exception=e
|
|
965
|
-
)
|
|
966
|
-
return False
|
|
967
|
-
|
|
968
|
-
def repair_database(self) -> bool:
|
|
969
|
-
"""
|
|
970
|
-
Attempt to repair database corruption.
|
|
971
|
-
|
|
972
|
-
Returns:
|
|
973
|
-
True if repair successful
|
|
974
|
-
"""
|
|
975
|
-
try:
|
|
976
|
-
if not self.backup_recovery_manager or not self.data_validator:
|
|
977
|
-
self.logger.warning("Backup manager or data validator not available")
|
|
978
|
-
return False
|
|
979
|
-
|
|
980
|
-
success = self.backup_recovery_manager.repair_database(
|
|
981
|
-
self.connection_manager, self.data_validator
|
|
982
|
-
)
|
|
983
|
-
|
|
984
|
-
if success:
|
|
985
|
-
# Invalidate cache after repair
|
|
986
|
-
self._settings_proxy._invalidate_cache()
|
|
987
|
-
self.logger.info("Database repair completed successfully")
|
|
988
|
-
|
|
989
|
-
return success
|
|
990
|
-
|
|
991
|
-
except Exception as e:
|
|
992
|
-
self.error_handler.handle_error(
|
|
993
|
-
ErrorCategory.RECOVERY,
|
|
994
|
-
f"Database repair failed: {e}",
|
|
995
|
-
exception=e
|
|
996
|
-
)
|
|
997
|
-
return False
|
|
998
|
-
|
|
999
|
-
def validate_settings_integrity(self, apply_fixes: bool = False) -> Dict[str, Any]:
|
|
1000
|
-
"""
|
|
1001
|
-
Validate settings integrity and optionally apply fixes.
|
|
1002
|
-
|
|
1003
|
-
Args:
|
|
1004
|
-
apply_fixes: Whether to apply automatic fixes
|
|
1005
|
-
|
|
1006
|
-
Returns:
|
|
1007
|
-
Validation report dictionary
|
|
1008
|
-
"""
|
|
1009
|
-
try:
|
|
1010
|
-
if not self.settings_integrity_validator:
|
|
1011
|
-
self.logger.warning("Settings integrity validator not available")
|
|
1012
|
-
return {"error": "Validator not available"}
|
|
1013
|
-
|
|
1014
|
-
# Load current settings
|
|
1015
|
-
settings_data = self.load_settings()
|
|
1016
|
-
|
|
1017
|
-
# Validate integrity
|
|
1018
|
-
issues = self.settings_integrity_validator.validate_settings_integrity(
|
|
1019
|
-
settings_data, apply_fixes
|
|
1020
|
-
)
|
|
1021
|
-
|
|
1022
|
-
# Generate report
|
|
1023
|
-
report = self.settings_integrity_validator.get_validation_report(issues)
|
|
1024
|
-
|
|
1025
|
-
if apply_fixes and issues:
|
|
1026
|
-
# Save fixed settings back to database
|
|
1027
|
-
self.save_settings(settings_data)
|
|
1028
|
-
self.logger.info(f"Applied automatic fixes for {len([i for i in issues if i.auto_fixable])} issues")
|
|
1029
|
-
|
|
1030
|
-
return report
|
|
1031
|
-
|
|
1032
|
-
except Exception as e:
|
|
1033
|
-
self.error_handler.handle_error(
|
|
1034
|
-
ErrorCategory.DATA_VALIDATION,
|
|
1035
|
-
f"Settings integrity validation failed: {e}",
|
|
1036
|
-
exception=e
|
|
1037
|
-
)
|
|
1038
|
-
return {"error": str(e)}
|
|
1039
|
-
|
|
1040
|
-
def export_settings_to_file(self, export_path: str,
|
|
1041
|
-
format_type: str = "json") -> bool:
|
|
1042
|
-
"""
|
|
1043
|
-
Export current settings to a file.
|
|
1044
|
-
|
|
1045
|
-
Args:
|
|
1046
|
-
export_path: Path to export file
|
|
1047
|
-
format_type: Export format ("json" or "compressed")
|
|
1048
|
-
|
|
1049
|
-
Returns:
|
|
1050
|
-
True if export successful
|
|
1051
|
-
"""
|
|
1052
|
-
try:
|
|
1053
|
-
if not self.backup_recovery_manager:
|
|
1054
|
-
self.logger.warning("Backup manager not available")
|
|
1055
|
-
return False
|
|
1056
|
-
|
|
1057
|
-
# Load current settings
|
|
1058
|
-
settings_data = self.load_settings()
|
|
1059
|
-
|
|
1060
|
-
# Export settings
|
|
1061
|
-
success = self.backup_recovery_manager.export_settings(
|
|
1062
|
-
settings_data, export_path, format_type
|
|
1063
|
-
)
|
|
1064
|
-
|
|
1065
|
-
return success
|
|
1066
|
-
|
|
1067
|
-
except Exception as e:
|
|
1068
|
-
self.error_handler.handle_error(
|
|
1069
|
-
ErrorCategory.BACKUP,
|
|
1070
|
-
f"Failed to export settings: {e}",
|
|
1071
|
-
exception=e
|
|
1072
|
-
)
|
|
1073
|
-
return False
|
|
1074
|
-
|
|
1075
|
-
def import_settings_from_file(self, import_path: str) -> bool:
|
|
1076
|
-
"""
|
|
1077
|
-
Import settings from a file.
|
|
1078
|
-
|
|
1079
|
-
Args:
|
|
1080
|
-
import_path: Path to import file
|
|
1081
|
-
|
|
1082
|
-
Returns:
|
|
1083
|
-
True if import successful
|
|
1084
|
-
"""
|
|
1085
|
-
try:
|
|
1086
|
-
if not self.backup_recovery_manager:
|
|
1087
|
-
self.logger.warning("Backup manager not available")
|
|
1088
|
-
return False
|
|
1089
|
-
|
|
1090
|
-
# Import settings
|
|
1091
|
-
settings_data = self.backup_recovery_manager.import_settings(import_path)
|
|
1092
|
-
|
|
1093
|
-
if settings_data is None:
|
|
1094
|
-
return False
|
|
1095
|
-
|
|
1096
|
-
# Validate imported settings
|
|
1097
|
-
if self.settings_integrity_validator:
|
|
1098
|
-
issues = self.settings_integrity_validator.validate_settings_integrity(
|
|
1099
|
-
settings_data, apply_fixes=True
|
|
1100
|
-
)
|
|
1101
|
-
|
|
1102
|
-
critical_issues = [i for i in issues if i.severity == 'critical']
|
|
1103
|
-
if critical_issues:
|
|
1104
|
-
self.logger.error(f"Imported settings have {len(critical_issues)} critical issues")
|
|
1105
|
-
return False
|
|
1106
|
-
|
|
1107
|
-
# Save imported settings
|
|
1108
|
-
success = self.save_settings(settings_data)
|
|
1109
|
-
|
|
1110
|
-
if success:
|
|
1111
|
-
self.logger.info("Settings imported successfully")
|
|
1112
|
-
|
|
1113
|
-
return success
|
|
1114
|
-
|
|
1115
|
-
except Exception as e:
|
|
1116
|
-
self.error_handler.handle_error(
|
|
1117
|
-
ErrorCategory.MIGRATION,
|
|
1118
|
-
f"Failed to import settings: {e}",
|
|
1119
|
-
exception=e
|
|
1120
|
-
)
|
|
1121
|
-
return False
|
|
1122
|
-
|
|
1123
|
-
def get_backup_statistics(self) -> Dict[str, Any]:
|
|
1124
|
-
"""
|
|
1125
|
-
Get backup statistics and information.
|
|
1126
|
-
|
|
1127
|
-
Returns:
|
|
1128
|
-
Dictionary with backup statistics
|
|
1129
|
-
"""
|
|
1130
|
-
try:
|
|
1131
|
-
if not self.backup_recovery_manager:
|
|
1132
|
-
return {"error": "Backup manager not available"}
|
|
1133
|
-
|
|
1134
|
-
return self.backup_recovery_manager.get_backup_statistics()
|
|
1135
|
-
|
|
1136
|
-
except Exception as e:
|
|
1137
|
-
self.logger.error(f"Failed to get backup statistics: {e}")
|
|
1138
|
-
return {"error": str(e)}
|
|
1139
|
-
|
|
1140
|
-
def cleanup_old_backups(self) -> int:
|
|
1141
|
-
"""
|
|
1142
|
-
Clean up old backups based on retention policy.
|
|
1143
|
-
|
|
1144
|
-
Returns:
|
|
1145
|
-
Number of backups cleaned up
|
|
1146
|
-
"""
|
|
1147
|
-
try:
|
|
1148
|
-
if not self.backup_recovery_manager:
|
|
1149
|
-
self.logger.warning("Backup manager not available")
|
|
1150
|
-
return 0
|
|
1151
|
-
|
|
1152
|
-
return self.backup_recovery_manager.cleanup_old_backups()
|
|
1153
|
-
|
|
1154
|
-
except Exception as e:
|
|
1155
|
-
self.logger.error(f"Failed to cleanup old backups: {e}")
|
|
1156
|
-
return 0
|
|
1157
|
-
|
|
1158
|
-
def close(self) -> None:
|
|
1159
|
-
"""Close the settings manager and cleanup resources."""
|
|
1160
|
-
try:
|
|
1161
|
-
# Stop automatic backup
|
|
1162
|
-
if self.backup_recovery_manager:
|
|
1163
|
-
self.backup_recovery_manager.stop_auto_backup()
|
|
1164
|
-
|
|
1165
|
-
# Close database connections
|
|
1166
|
-
if self.connection_manager:
|
|
1167
|
-
self.connection_manager.close_all_connections()
|
|
1168
|
-
|
|
1169
|
-
self.logger.info("Database settings manager closed")
|
|
1170
|
-
|
|
1171
|
-
except Exception as e:
|
|
1172
|
-
self.logger.error(f"Error closing settings manager: {e}")
|
|
1173
|
-
|
|
1174
|
-
# Dictionary-like interface property
|
|
1175
|
-
|
|
1176
|
-
@property
|
|
1177
|
-
def settings(self) -> SettingsDictProxy:
|
|
1178
|
-
"""
|
|
1179
|
-
Provide dictionary-like access to settings.
|
|
1180
|
-
|
|
1181
|
-
This allows existing code like app.settings["key"] to work unchanged.
|
|
1182
|
-
|
|
1183
|
-
Returns:
|
|
1184
|
-
SettingsDictProxy instance for transparent database access
|
|
1185
|
-
"""
|
|
1186
|
-
return self._settings_proxy
|
|
1187
|
-
|
|
1188
|
-
# Private implementation methods
|
|
1189
|
-
|
|
1190
|
-
def _load_all_settings(self) -> Dict[str, Any]:
|
|
1191
|
-
"""Load complete settings structure from database."""
|
|
1192
|
-
try:
|
|
1193
|
-
return self.migration_manager._migrate_database_to_json() or {}
|
|
1194
|
-
except Exception as e:
|
|
1195
|
-
self.logger.error(f"Failed to load all settings: {e}")
|
|
1196
|
-
return {}
|
|
1197
|
-
|
|
1198
|
-
def _update_tool_settings_in_transaction(self, conn: sqlite3.Connection,
|
|
1199
|
-
tool_name: str, tool_config: Any) -> None:
|
|
1200
|
-
"""Update tool settings within an existing transaction."""
|
|
1201
|
-
# Clear existing tool settings
|
|
1202
|
-
conn.execute("DELETE FROM tool_settings WHERE tool_name = ?", (tool_name,))
|
|
1203
|
-
|
|
1204
|
-
if isinstance(tool_config, dict):
|
|
1205
|
-
# Flatten nested tool configuration
|
|
1206
|
-
flattened = self.migration_manager._flatten_nested_dict(tool_config)
|
|
1207
|
-
|
|
1208
|
-
for setting_path, value in flattened.items():
|
|
1209
|
-
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
1210
|
-
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
1211
|
-
|
|
1212
|
-
conn.execute(
|
|
1213
|
-
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
1214
|
-
(tool_name, setting_path, serialized_value, data_type)
|
|
1215
|
-
)
|
|
1216
|
-
else:
|
|
1217
|
-
# Simple tool setting
|
|
1218
|
-
data_type = self.migration_manager.converter.python_to_db_type(tool_config)
|
|
1219
|
-
serialized_value = self.migration_manager.converter.serialize_value(tool_config)
|
|
1220
|
-
|
|
1221
|
-
conn.execute(
|
|
1222
|
-
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
1223
|
-
(tool_name, 'value', serialized_value, data_type)
|
|
1224
|
-
)
|
|
1225
|
-
|
|
1226
|
-
def _get_nested_value(self, data: Dict[str, Any], path: str, default: Any = None) -> Any:
|
|
1227
|
-
"""Get value from nested dictionary using dot notation."""
|
|
1228
|
-
keys = path.split('.')
|
|
1229
|
-
current = data
|
|
1230
|
-
|
|
1231
|
-
for key in keys:
|
|
1232
|
-
if isinstance(current, dict) and key in current:
|
|
1233
|
-
current = current[key]
|
|
1234
|
-
else:
|
|
1235
|
-
return default
|
|
1236
|
-
|
|
1237
|
-
return current
|
|
1238
|
-
|
|
1239
|
-
def _set_nested_value(self, data: Dict[str, Any], path: str, value: Any) -> None:
|
|
1240
|
-
"""Set value in nested dictionary using dot notation."""
|
|
1241
|
-
keys = path.split('.')
|
|
1242
|
-
current = data
|
|
1243
|
-
|
|
1244
|
-
# Navigate to the parent of the target key
|
|
1245
|
-
for key in keys[:-1]:
|
|
1246
|
-
if key not in current:
|
|
1247
|
-
current[key] = {}
|
|
1248
|
-
current = current[key]
|
|
1249
|
-
|
|
1250
|
-
# Set the final value
|
|
1251
|
-
current[keys[-1]] = value
|
|
1252
|
-
|
|
1253
|
-
def _delete_setting(self, key: str) -> None:
|
|
1254
|
-
"""Delete a setting from the database."""
|
|
1255
|
-
try:
|
|
1256
|
-
with self.connection_manager.transaction() as conn:
|
|
1257
|
-
conn.execute("DELETE FROM core_settings WHERE key = ?", (key,))
|
|
1258
|
-
|
|
1259
|
-
# Record change for backup triggering
|
|
1260
|
-
self._record_change()
|
|
1261
|
-
|
|
1262
|
-
# Invalidate proxy cache
|
|
1263
|
-
self._settings_proxy._invalidate_cache()
|
|
1264
|
-
|
|
1265
|
-
except Exception as e:
|
|
1266
|
-
self.logger.error(f"Failed to delete setting {key}: {e}")
|
|
1267
|
-
|
|
1268
|
-
def _record_change(self) -> None:
|
|
1269
|
-
"""Record a database change for backup triggering and performance monitoring."""
|
|
1270
|
-
try:
|
|
1271
|
-
# Record change for backup manager
|
|
1272
|
-
if self.backup_manager:
|
|
1273
|
-
self.backup_manager.record_change()
|
|
1274
|
-
|
|
1275
|
-
# Record change for persistence manager
|
|
1276
|
-
if self.persistence_manager:
|
|
1277
|
-
self.persistence_manager.record_change()
|
|
1278
|
-
|
|
1279
|
-
# Record change for connection manager
|
|
1280
|
-
self.connection_manager._changes_since_backup += 1
|
|
1281
|
-
|
|
1282
|
-
except Exception as e:
|
|
1283
|
-
self.logger.debug(f"Failed to record change: {e}")
|
|
1284
|
-
|
|
1285
|
-
# Performance Monitoring and Optimization Methods
|
|
1286
|
-
|
|
1287
|
-
def get_performance_stats(self) -> Dict[str, Any]:
|
|
1288
|
-
"""
|
|
1289
|
-
Get comprehensive performance statistics.
|
|
1290
|
-
|
|
1291
|
-
Returns:
|
|
1292
|
-
Dictionary with performance metrics
|
|
1293
|
-
"""
|
|
1294
|
-
stats = {}
|
|
1295
|
-
|
|
1296
|
-
# Connection manager stats
|
|
1297
|
-
try:
|
|
1298
|
-
stats['connection'] = self.connection_manager.get_performance_stats()
|
|
1299
|
-
except Exception as e:
|
|
1300
|
-
self.logger.warning(f"Failed to get connection stats: {e}")
|
|
1301
|
-
stats['connection'] = {}
|
|
1302
|
-
|
|
1303
|
-
# Performance monitor stats
|
|
1304
|
-
if self.performance_monitor:
|
|
1305
|
-
try:
|
|
1306
|
-
monitor_stats = self.performance_monitor.get_performance_stats()
|
|
1307
|
-
stats['monitor'] = {
|
|
1308
|
-
'total_queries': monitor_stats.total_queries,
|
|
1309
|
-
'avg_execution_time': monitor_stats.avg_execution_time,
|
|
1310
|
-
'cache_hit_rate': monitor_stats.cache_hit_rate,
|
|
1311
|
-
'queries_per_second': monitor_stats.queries_per_second,
|
|
1312
|
-
'memory_usage_mb': monitor_stats.memory_usage_mb,
|
|
1313
|
-
'slow_queries_count': len(monitor_stats.slow_queries)
|
|
1314
|
-
}
|
|
1315
|
-
stats['hot_settings'] = self.performance_monitor.get_hot_settings(10)
|
|
1316
|
-
stats['cache_stats'] = self.performance_monitor.get_cache_stats()
|
|
1317
|
-
except Exception as e:
|
|
1318
|
-
self.logger.warning(f"Failed to get monitor stats: {e}")
|
|
1319
|
-
stats['monitor'] = {}
|
|
1320
|
-
|
|
1321
|
-
# Backup manager stats
|
|
1322
|
-
if self.backup_manager:
|
|
1323
|
-
try:
|
|
1324
|
-
stats['backup'] = self.backup_manager.get_backup_info()
|
|
1325
|
-
except Exception as e:
|
|
1326
|
-
self.logger.warning(f"Failed to get backup stats: {e}")
|
|
1327
|
-
stats['backup'] = {}
|
|
1328
|
-
|
|
1329
|
-
return stats
|
|
1330
|
-
|
|
1331
|
-
def optimize_performance(self) -> Dict[str, Any]:
|
|
1332
|
-
"""
|
|
1333
|
-
Perform comprehensive performance optimization.
|
|
1334
|
-
|
|
1335
|
-
Returns:
|
|
1336
|
-
Dictionary with optimization results
|
|
1337
|
-
"""
|
|
1338
|
-
results = {
|
|
1339
|
-
'database_optimization': [],
|
|
1340
|
-
'cache_optimization': [],
|
|
1341
|
-
'backup_optimization': [],
|
|
1342
|
-
'errors': []
|
|
1343
|
-
}
|
|
1344
|
-
|
|
1345
|
-
try:
|
|
1346
|
-
# Database optimization
|
|
1347
|
-
db_actions = self.connection_manager.optimize_database()
|
|
1348
|
-
results['database_optimization'] = db_actions
|
|
1349
|
-
|
|
1350
|
-
# Cache optimization
|
|
1351
|
-
if self.performance_monitor:
|
|
1352
|
-
# Clear cache if hit rate is low
|
|
1353
|
-
cache_stats = self.performance_monitor.get_cache_stats()
|
|
1354
|
-
if cache_stats.get('hit_rate_percent', 0) < 20:
|
|
1355
|
-
self.performance_monitor.clear_cache()
|
|
1356
|
-
results['cache_optimization'].append("Cleared low-performing cache")
|
|
1357
|
-
|
|
1358
|
-
# Suggest hot settings for caching
|
|
1359
|
-
hot_settings = self.performance_monitor.get_hot_settings(5)
|
|
1360
|
-
if hot_settings:
|
|
1361
|
-
results['cache_optimization'].append(
|
|
1362
|
-
f"Hot settings identified: {[s[0] for s in hot_settings]}"
|
|
1363
|
-
)
|
|
1364
|
-
|
|
1365
|
-
# Backup optimization
|
|
1366
|
-
if self.backup_manager:
|
|
1367
|
-
# Trigger backup if many changes
|
|
1368
|
-
if self.backup_manager.changes_since_backup > 50:
|
|
1369
|
-
from .backup_manager import BackupTrigger
|
|
1370
|
-
backup_info = self.backup_manager.backup_database(
|
|
1371
|
-
self.connection_manager,
|
|
1372
|
-
trigger=BackupTrigger.MANUAL
|
|
1373
|
-
)
|
|
1374
|
-
if backup_info:
|
|
1375
|
-
results['backup_optimization'].append("Created optimization backup")
|
|
1376
|
-
|
|
1377
|
-
except Exception as e:
|
|
1378
|
-
results['errors'].append(f"Optimization error: {e}")
|
|
1379
|
-
self.logger.error(f"Performance optimization failed: {e}")
|
|
1380
|
-
|
|
1381
|
-
return results
|
|
1382
|
-
|
|
1383
|
-
def export_performance_report(self, filepath: str) -> bool:
|
|
1384
|
-
"""
|
|
1385
|
-
Export comprehensive performance report.
|
|
1386
|
-
|
|
1387
|
-
Args:
|
|
1388
|
-
filepath: Target file path
|
|
1389
|
-
|
|
1390
|
-
Returns:
|
|
1391
|
-
True if export successful
|
|
1392
|
-
"""
|
|
1393
|
-
try:
|
|
1394
|
-
report_data = {
|
|
1395
|
-
'report_timestamp': datetime.now().isoformat(),
|
|
1396
|
-
'database_info': {
|
|
1397
|
-
'db_path': self.db_path,
|
|
1398
|
-
'backup_path': self.backup_path,
|
|
1399
|
-
'performance_monitoring_enabled': self.enable_performance_monitoring,
|
|
1400
|
-
'auto_backup_enabled': self.enable_auto_backup
|
|
1401
|
-
},
|
|
1402
|
-
'performance_stats': self.get_performance_stats(),
|
|
1403
|
-
'optimization_suggestions': []
|
|
1404
|
-
}
|
|
1405
|
-
|
|
1406
|
-
# Add optimization suggestions
|
|
1407
|
-
if self.performance_monitor:
|
|
1408
|
-
try:
|
|
1409
|
-
suggestions = self.performance_monitor.optimize_indexes(self.connection_manager)
|
|
1410
|
-
report_data['optimization_suggestions'] = suggestions
|
|
1411
|
-
except Exception as e:
|
|
1412
|
-
self.logger.warning(f"Failed to get optimization suggestions: {e}")
|
|
1413
|
-
|
|
1414
|
-
# Export performance monitor metrics if available
|
|
1415
|
-
if self.performance_monitor:
|
|
1416
|
-
try:
|
|
1417
|
-
monitor_export_path = filepath.replace('.json', '_monitor_metrics.json')
|
|
1418
|
-
self.performance_monitor.export_metrics(monitor_export_path)
|
|
1419
|
-
report_data['monitor_metrics_file'] = monitor_export_path
|
|
1420
|
-
except Exception as e:
|
|
1421
|
-
self.logger.warning(f"Failed to export monitor metrics: {e}")
|
|
1422
|
-
|
|
1423
|
-
# Export backup report if available
|
|
1424
|
-
if self.backup_manager:
|
|
1425
|
-
try:
|
|
1426
|
-
backup_export_path = filepath.replace('.json', '_backup_report.json')
|
|
1427
|
-
self.backup_manager.export_backup_report(backup_export_path)
|
|
1428
|
-
report_data['backup_report_file'] = backup_export_path
|
|
1429
|
-
except Exception as e:
|
|
1430
|
-
self.logger.warning(f"Failed to export backup report: {e}")
|
|
1431
|
-
|
|
1432
|
-
with open(filepath, 'w') as f:
|
|
1433
|
-
json.dump(report_data, f, indent=2, default=str)
|
|
1434
|
-
|
|
1435
|
-
self.logger.info(f"Performance report exported to {filepath}")
|
|
1436
|
-
return True
|
|
1437
|
-
|
|
1438
|
-
except Exception as e:
|
|
1439
|
-
self.logger.error(f"Failed to export performance report: {e}")
|
|
1440
|
-
return False
|
|
1441
|
-
|
|
1442
|
-
def set_performance_config(self, config: Dict[str, Any]) -> None:
|
|
1443
|
-
"""
|
|
1444
|
-
Update performance configuration.
|
|
1445
|
-
|
|
1446
|
-
Args:
|
|
1447
|
-
config: Configuration dictionary with performance settings
|
|
1448
|
-
"""
|
|
1449
|
-
try:
|
|
1450
|
-
# Update connection manager settings
|
|
1451
|
-
if 'slow_query_threshold' in config:
|
|
1452
|
-
self.connection_manager.set_slow_query_threshold(config['slow_query_threshold'])
|
|
1453
|
-
|
|
1454
|
-
# Update backup manager settings
|
|
1455
|
-
if self.backup_manager:
|
|
1456
|
-
if 'backup_interval' in config:
|
|
1457
|
-
self.backup_manager.set_backup_interval(config['backup_interval'])
|
|
1458
|
-
if 'change_threshold' in config:
|
|
1459
|
-
self.backup_manager.set_change_threshold(config['change_threshold'])
|
|
1460
|
-
|
|
1461
|
-
# Update performance monitor settings
|
|
1462
|
-
if self.performance_monitor and 'cache_size' in config:
|
|
1463
|
-
# Clear and recreate cache with new size
|
|
1464
|
-
self.performance_monitor.clear_cache()
|
|
1465
|
-
# Note: Cache size change requires reinitializing the monitor
|
|
1466
|
-
|
|
1467
|
-
except Exception as e:
|
|
1468
|
-
self.logger.error(f"Failed to update performance config: {e}")
|
|
1469
|
-
|
|
1470
|
-
def get_memory_usage(self) -> Dict[str, float]:
|
|
1471
|
-
"""
|
|
1472
|
-
Get current memory usage statistics.
|
|
1473
|
-
|
|
1474
|
-
Returns:
|
|
1475
|
-
Dictionary with memory usage in MB
|
|
1476
|
-
"""
|
|
1477
|
-
if self.performance_monitor:
|
|
1478
|
-
return self.performance_monitor.get_memory_trend()
|
|
1479
|
-
return {'current': 0.0, 'average': 0.0, 'peak': 0.0}
|
|
1480
|
-
|
|
1481
|
-
def clear_performance_data(self) -> None:
|
|
1482
|
-
"""Clear all performance monitoring data."""
|
|
1483
|
-
try:
|
|
1484
|
-
self.connection_manager.clear_performance_data()
|
|
1485
|
-
|
|
1486
|
-
if self.performance_monitor:
|
|
1487
|
-
self.performance_monitor.reset_metrics()
|
|
1488
|
-
|
|
1489
|
-
except Exception as e:
|
|
1490
|
-
self.logger.error(f"Failed to clear performance data: {e}")
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
# Convenience function for creating settings manager instance
|
|
1494
|
-
def create_settings_manager(db_path: str = ":memory:",
|
|
1495
|
-
backup_path: Optional[str] = None,
|
|
1496
|
-
json_settings_path: str = "settings.json") -> DatabaseSettingsManager:
|
|
1497
|
-
"""
|
|
1498
|
-
Create a DatabaseSettingsManager instance with standard configuration.
|
|
1499
|
-
|
|
1500
|
-
Args:
|
|
1501
|
-
db_path: Path to SQLite database file
|
|
1502
|
-
backup_path: Path for automatic backups
|
|
1503
|
-
json_settings_path: Path to JSON settings file for migration
|
|
1504
|
-
|
|
1505
|
-
Returns:
|
|
1506
|
-
Configured DatabaseSettingsManager instance
|
|
1507
|
-
"""
|
|
1
|
+
"""
|
|
2
|
+
Database Settings Manager for Settings Migration
|
|
3
|
+
|
|
4
|
+
This module provides a drop-in replacement for the current JSON-based settings system
|
|
5
|
+
with a database backend. It maintains full backward compatibility with existing code
|
|
6
|
+
while providing better concurrency handling and data integrity.
|
|
7
|
+
|
|
8
|
+
The DatabaseSettingsManager maintains identical API signatures to the current system,
|
|
9
|
+
ensuring zero code changes are required in existing tools.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import sqlite3
|
|
14
|
+
import logging
|
|
15
|
+
import threading
|
|
16
|
+
from typing import Dict, List, Tuple, Any, Optional, Union
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
|
|
20
|
+
from .database_connection_manager import DatabaseConnectionManager
|
|
21
|
+
from .database_schema_manager import DatabaseSchemaManager
|
|
22
|
+
from .migration_manager import MigrationManager
|
|
23
|
+
from .error_handler import get_error_handler, ErrorCategory, ErrorSeverity
|
|
24
|
+
from .data_validator import DataValidator
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class NestedSettingsProxy:
|
|
28
|
+
"""
|
|
29
|
+
Proxy for nested dictionary access that updates the database when modified.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, settings_manager: 'DatabaseSettingsManager', parent_key: str, data: Dict[str, Any]):
|
|
33
|
+
"""
|
|
34
|
+
Initialize nested settings proxy.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
settings_manager: DatabaseSettingsManager instance
|
|
38
|
+
parent_key: Parent key path (e.g., "tool_settings")
|
|
39
|
+
data: Dictionary data for this level
|
|
40
|
+
"""
|
|
41
|
+
self.settings_manager = settings_manager
|
|
42
|
+
self.parent_key = parent_key
|
|
43
|
+
self._data = data.copy()
|
|
44
|
+
|
|
45
|
+
def __getitem__(self, key: str) -> Any:
|
|
46
|
+
"""Handle nested access like settings["tool_settings"]["Tool Name"]."""
|
|
47
|
+
if key not in self._data:
|
|
48
|
+
# For tool_settings, create empty tool settings when accessed
|
|
49
|
+
if self.parent_key == "tool_settings":
|
|
50
|
+
# Initialize empty tool settings
|
|
51
|
+
self._data[key] = {}
|
|
52
|
+
# Also save to database
|
|
53
|
+
self.settings_manager.set_tool_setting(key, "initialized", True)
|
|
54
|
+
else:
|
|
55
|
+
raise KeyError(f"Key '{key}' not found in {self.parent_key}")
|
|
56
|
+
|
|
57
|
+
value = self._data[key]
|
|
58
|
+
|
|
59
|
+
# Return nested proxy for further nesting
|
|
60
|
+
if isinstance(value, dict):
|
|
61
|
+
nested_key = f"{self.parent_key}.{key}" if self.parent_key else key
|
|
62
|
+
return NestedSettingsProxy(self.settings_manager, nested_key, value)
|
|
63
|
+
|
|
64
|
+
return value
|
|
65
|
+
|
|
66
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
67
|
+
"""Handle nested assignment like settings["tool_settings"]["Tool Name"] = {...}."""
|
|
68
|
+
self._data[key] = value
|
|
69
|
+
|
|
70
|
+
# Update the full parent structure in database
|
|
71
|
+
# Get the current full structure and update it
|
|
72
|
+
current_settings = self.settings_manager._load_all_settings()
|
|
73
|
+
self._update_nested_value(current_settings, self.parent_key, self._data)
|
|
74
|
+
self.settings_manager.save_settings(current_settings)
|
|
75
|
+
|
|
76
|
+
# Invalidate cache
|
|
77
|
+
self.settings_manager._settings_proxy._invalidate_cache()
|
|
78
|
+
|
|
79
|
+
def __contains__(self, key: str) -> bool:
|
|
80
|
+
"""Handle 'key' in nested_settings checks."""
|
|
81
|
+
return key in self._data
|
|
82
|
+
|
|
83
|
+
def __iter__(self):
|
|
84
|
+
"""Handle iteration over nested keys."""
|
|
85
|
+
return iter(self._data)
|
|
86
|
+
|
|
87
|
+
def __len__(self) -> int:
|
|
88
|
+
"""Handle len(nested_settings) calls."""
|
|
89
|
+
return len(self._data)
|
|
90
|
+
|
|
91
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
92
|
+
"""Handle nested_settings.get("key", default) calls."""
|
|
93
|
+
if key not in self._data and self.parent_key == "tool_settings":
|
|
94
|
+
# For tool_settings, create empty tool settings when accessed via get()
|
|
95
|
+
self._data[key] = {}
|
|
96
|
+
# Also save to database
|
|
97
|
+
self.settings_manager.set_tool_setting(key, "initialized", True)
|
|
98
|
+
|
|
99
|
+
value = self._data.get(key, default)
|
|
100
|
+
|
|
101
|
+
if isinstance(value, dict) and value is not default:
|
|
102
|
+
nested_key = f"{self.parent_key}.{key}" if self.parent_key else key
|
|
103
|
+
return NestedSettingsProxy(self.settings_manager, nested_key, value)
|
|
104
|
+
|
|
105
|
+
return value
|
|
106
|
+
|
|
107
|
+
def update(self, other: Dict[str, Any]) -> None:
|
|
108
|
+
"""Handle nested_settings.update(dict) calls."""
|
|
109
|
+
self._data.update(other)
|
|
110
|
+
|
|
111
|
+
# Update the full parent structure in database
|
|
112
|
+
current_settings = self.settings_manager._load_all_settings()
|
|
113
|
+
self._update_nested_value(current_settings, self.parent_key, self._data)
|
|
114
|
+
self.settings_manager.save_settings(current_settings)
|
|
115
|
+
|
|
116
|
+
# Invalidate cache
|
|
117
|
+
self.settings_manager._settings_proxy._invalidate_cache()
|
|
118
|
+
|
|
119
|
+
def keys(self):
|
|
120
|
+
"""Return all available keys."""
|
|
121
|
+
return self._data.keys()
|
|
122
|
+
|
|
123
|
+
def values(self):
|
|
124
|
+
"""Return all values."""
|
|
125
|
+
return self._data.values()
|
|
126
|
+
|
|
127
|
+
def items(self):
|
|
128
|
+
"""Return all key-value pairs."""
|
|
129
|
+
return self._data.items()
|
|
130
|
+
|
|
131
|
+
def copy(self) -> Dict[str, Any]:
|
|
132
|
+
"""Return a copy of the underlying data as a regular dictionary."""
|
|
133
|
+
return self._data.copy()
|
|
134
|
+
|
|
135
|
+
def _update_nested_value(self, data: Dict[str, Any], path: str, value: Any) -> None:
|
|
136
|
+
"""Update value in nested dictionary using dot notation."""
|
|
137
|
+
keys = path.split('.')
|
|
138
|
+
current = data
|
|
139
|
+
|
|
140
|
+
# Navigate to the parent of the target key
|
|
141
|
+
for key in keys[:-1]:
|
|
142
|
+
if key not in current:
|
|
143
|
+
current[key] = {}
|
|
144
|
+
current = current[key]
|
|
145
|
+
|
|
146
|
+
# Set the final value
|
|
147
|
+
current[keys[-1]] = value
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class SettingsDictProxy:
|
|
151
|
+
"""
|
|
152
|
+
Provides dictionary-like interface that transparently accesses database.
|
|
153
|
+
Allows existing code like self.settings["key"] to work unchanged.
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
def __init__(self, settings_manager: 'DatabaseSettingsManager'):
|
|
157
|
+
"""
|
|
158
|
+
Initialize the settings dictionary proxy.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
settings_manager: DatabaseSettingsManager instance
|
|
162
|
+
"""
|
|
163
|
+
self.settings_manager = settings_manager
|
|
164
|
+
self._cache = {}
|
|
165
|
+
self._cache_dirty = True
|
|
166
|
+
self._lock = threading.RLock()
|
|
167
|
+
|
|
168
|
+
def _refresh_cache(self) -> None:
|
|
169
|
+
"""Refresh the internal cache from database."""
|
|
170
|
+
with self._lock:
|
|
171
|
+
if self._cache_dirty:
|
|
172
|
+
self._cache = self.settings_manager._load_all_settings()
|
|
173
|
+
self._cache_dirty = False
|
|
174
|
+
|
|
175
|
+
def _invalidate_cache(self) -> None:
|
|
176
|
+
"""Mark cache as dirty to force refresh on next access."""
|
|
177
|
+
with self._lock:
|
|
178
|
+
self._cache_dirty = True
|
|
179
|
+
|
|
180
|
+
def __getitem__(self, key: str) -> Any:
|
|
181
|
+
"""Handle self.settings["key"] access."""
|
|
182
|
+
self._refresh_cache()
|
|
183
|
+
if key not in self._cache:
|
|
184
|
+
# For tool_settings, initialize empty dictionary
|
|
185
|
+
if key == "tool_settings":
|
|
186
|
+
self._cache[key] = {}
|
|
187
|
+
# Save to database
|
|
188
|
+
self.settings_manager.set_setting(key, {})
|
|
189
|
+
else:
|
|
190
|
+
raise KeyError(f"Setting key '{key}' not found")
|
|
191
|
+
|
|
192
|
+
value = self._cache[key]
|
|
193
|
+
|
|
194
|
+
# Return nested proxy for dictionaries to enable nested assignment
|
|
195
|
+
if isinstance(value, dict):
|
|
196
|
+
return NestedSettingsProxy(self.settings_manager, key, value)
|
|
197
|
+
|
|
198
|
+
return value
|
|
199
|
+
|
|
200
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
201
|
+
"""Handle self.settings["key"] = value assignment."""
|
|
202
|
+
self.settings_manager.set_setting(key, value)
|
|
203
|
+
self._invalidate_cache()
|
|
204
|
+
|
|
205
|
+
def __contains__(self, key: str) -> bool:
|
|
206
|
+
"""Handle 'key' in self.settings checks."""
|
|
207
|
+
self._refresh_cache()
|
|
208
|
+
return key in self._cache
|
|
209
|
+
|
|
210
|
+
def __iter__(self):
|
|
211
|
+
"""Handle iteration over settings keys."""
|
|
212
|
+
self._refresh_cache()
|
|
213
|
+
return iter(self._cache)
|
|
214
|
+
|
|
215
|
+
def __len__(self) -> int:
|
|
216
|
+
"""Handle len(self.settings) calls."""
|
|
217
|
+
self._refresh_cache()
|
|
218
|
+
return len(self._cache)
|
|
219
|
+
|
|
220
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
221
|
+
"""Handle self.settings.get("key", default) calls."""
|
|
222
|
+
self._refresh_cache()
|
|
223
|
+
if key not in self._cache and key == "tool_settings":
|
|
224
|
+
# Initialize empty tool_settings if not found
|
|
225
|
+
self._cache[key] = {}
|
|
226
|
+
self.settings_manager.set_setting(key, {})
|
|
227
|
+
|
|
228
|
+
value = self._cache.get(key, default)
|
|
229
|
+
|
|
230
|
+
# Return nested proxy for dictionaries
|
|
231
|
+
if isinstance(value, dict) and value is not default:
|
|
232
|
+
return NestedSettingsProxy(self.settings_manager, key, value)
|
|
233
|
+
|
|
234
|
+
return value
|
|
235
|
+
|
|
236
|
+
def update(self, other: Dict[str, Any]) -> None:
|
|
237
|
+
"""Handle self.settings.update(dict) calls."""
|
|
238
|
+
self.settings_manager.bulk_update_settings(other)
|
|
239
|
+
self._invalidate_cache()
|
|
240
|
+
|
|
241
|
+
def keys(self):
|
|
242
|
+
"""Return all available setting keys."""
|
|
243
|
+
self._refresh_cache()
|
|
244
|
+
return self._cache.keys()
|
|
245
|
+
|
|
246
|
+
def values(self):
|
|
247
|
+
"""Return all setting values."""
|
|
248
|
+
self._refresh_cache()
|
|
249
|
+
return self._cache.values()
|
|
250
|
+
|
|
251
|
+
def items(self):
|
|
252
|
+
"""Return all key-value pairs."""
|
|
253
|
+
self._refresh_cache()
|
|
254
|
+
return self._cache.items()
|
|
255
|
+
|
|
256
|
+
def pop(self, key: str, default=None):
|
|
257
|
+
"""Remove and return a setting value."""
|
|
258
|
+
try:
|
|
259
|
+
value = self[key]
|
|
260
|
+
self.settings_manager._delete_setting(key)
|
|
261
|
+
self._invalidate_cache()
|
|
262
|
+
return value
|
|
263
|
+
except KeyError:
|
|
264
|
+
if default is not None:
|
|
265
|
+
return default
|
|
266
|
+
raise
|
|
267
|
+
|
|
268
|
+
def setdefault(self, key: str, default: Any = None) -> Any:
|
|
269
|
+
"""Get setting value or set and return default if not exists."""
|
|
270
|
+
try:
|
|
271
|
+
return self[key]
|
|
272
|
+
except KeyError:
|
|
273
|
+
self[key] = default
|
|
274
|
+
return default
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
class DatabaseSettingsManager:
|
|
278
|
+
"""
|
|
279
|
+
Drop-in replacement for the current settings system with database backend.
|
|
280
|
+
|
|
281
|
+
Maintains full backward compatibility with existing code while providing:
|
|
282
|
+
- Better concurrency handling through SQLite WAL mode
|
|
283
|
+
- Data integrity through ACID transactions
|
|
284
|
+
- Automatic backup and recovery
|
|
285
|
+
- Enhanced error handling
|
|
286
|
+
|
|
287
|
+
All existing method signatures are preserved to ensure zero code changes
|
|
288
|
+
are required in existing tools.
|
|
289
|
+
"""
|
|
290
|
+
|
|
291
|
+
def __init__(self, db_path: str = ":memory:", backup_path: Optional[str] = None,
|
|
292
|
+
json_settings_path: str = "settings.json",
|
|
293
|
+
enable_performance_monitoring: bool = True,
|
|
294
|
+
enable_auto_backup: bool = True,
|
|
295
|
+
backup_interval: int = 300):
|
|
296
|
+
"""
|
|
297
|
+
Initialize the database settings manager.
|
|
298
|
+
|
|
299
|
+
Args:
|
|
300
|
+
db_path: Path to SQLite database file (":memory:" for in-memory)
|
|
301
|
+
backup_path: Path for automatic backups
|
|
302
|
+
json_settings_path: Path to JSON settings file for migration
|
|
303
|
+
enable_performance_monitoring: Whether to enable performance monitoring
|
|
304
|
+
enable_auto_backup: Whether to enable automatic backups
|
|
305
|
+
backup_interval: Automatic backup interval in seconds
|
|
306
|
+
"""
|
|
307
|
+
self.db_path = db_path
|
|
308
|
+
self.backup_path = backup_path or "settings_backup.db"
|
|
309
|
+
self.json_settings_path = json_settings_path
|
|
310
|
+
self.enable_performance_monitoring = enable_performance_monitoring
|
|
311
|
+
self.enable_auto_backup = enable_auto_backup
|
|
312
|
+
self.logger = logging.getLogger(__name__)
|
|
313
|
+
|
|
314
|
+
# Initialize error handler
|
|
315
|
+
self.error_handler = get_error_handler()
|
|
316
|
+
|
|
317
|
+
# Initialize database components with error handling
|
|
318
|
+
try:
|
|
319
|
+
self.connection_manager = DatabaseConnectionManager(
|
|
320
|
+
db_path, backup_path, enable_performance_monitoring
|
|
321
|
+
)
|
|
322
|
+
self.schema_manager = DatabaseSchemaManager(self.connection_manager)
|
|
323
|
+
self.migration_manager = MigrationManager(self.connection_manager)
|
|
324
|
+
self.data_validator = DataValidator(self.connection_manager, self.schema_manager)
|
|
325
|
+
except Exception as e:
|
|
326
|
+
self.error_handler.handle_error(
|
|
327
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
328
|
+
f"Failed to initialize database components: {e}",
|
|
329
|
+
exception=e
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# Initialize settings integrity validator (always available)
|
|
333
|
+
self.settings_integrity_validator = None
|
|
334
|
+
try:
|
|
335
|
+
from .settings_integrity_validator import SettingsIntegrityValidator
|
|
336
|
+
self.settings_integrity_validator = SettingsIntegrityValidator()
|
|
337
|
+
except ImportError:
|
|
338
|
+
self.logger.warning("Settings integrity validator not available")
|
|
339
|
+
|
|
340
|
+
# Initialize backup and recovery manager
|
|
341
|
+
self.backup_recovery_manager = None
|
|
342
|
+
if enable_auto_backup:
|
|
343
|
+
try:
|
|
344
|
+
from .backup_recovery_manager import BackupRecoveryManager
|
|
345
|
+
|
|
346
|
+
backup_dir = Path(backup_path).parent / "backups" if backup_path else "backups"
|
|
347
|
+
self.backup_recovery_manager = BackupRecoveryManager(
|
|
348
|
+
backup_dir=str(backup_dir),
|
|
349
|
+
auto_backup_interval=backup_interval,
|
|
350
|
+
enable_compression=True
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
# Start automatic backup
|
|
354
|
+
self.backup_recovery_manager.start_auto_backup(
|
|
355
|
+
self.connection_manager, self
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
except ImportError:
|
|
359
|
+
self.logger.warning("Backup/recovery manager not available")
|
|
360
|
+
|
|
361
|
+
# Initialize performance monitoring
|
|
362
|
+
self.performance_monitor = None
|
|
363
|
+
if enable_performance_monitoring:
|
|
364
|
+
try:
|
|
365
|
+
from .performance_monitor import get_performance_monitor
|
|
366
|
+
self.performance_monitor = get_performance_monitor()
|
|
367
|
+
except ImportError:
|
|
368
|
+
self.logger.warning("Performance monitoring not available")
|
|
369
|
+
|
|
370
|
+
# Disable strict validation for now to handle default settings differences
|
|
371
|
+
self.migration_manager._strict_validation = False
|
|
372
|
+
|
|
373
|
+
# Settings proxy for dictionary-like access
|
|
374
|
+
self._settings_proxy = SettingsDictProxy(self)
|
|
375
|
+
|
|
376
|
+
# Internal state
|
|
377
|
+
self._initialized = False
|
|
378
|
+
self._lock = threading.RLock()
|
|
379
|
+
self._default_settings_provider = None
|
|
380
|
+
|
|
381
|
+
# Initialize database schema
|
|
382
|
+
self._initialize_database()
|
|
383
|
+
|
|
384
|
+
# Migrate from JSON if exists and database is empty
|
|
385
|
+
self._migrate_from_json_if_needed()
|
|
386
|
+
|
|
387
|
+
def set_default_settings_provider(self, provider_func):
|
|
388
|
+
"""
|
|
389
|
+
Set a function that provides default settings.
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
provider_func: Function that returns default settings dictionary
|
|
393
|
+
"""
|
|
394
|
+
self._default_settings_provider = provider_func
|
|
395
|
+
|
|
396
|
+
def _initialize_database(self) -> None:
|
|
397
|
+
"""Initialize database schema and validate structure with error handling."""
|
|
398
|
+
try:
|
|
399
|
+
# Initialize schema
|
|
400
|
+
if not self.schema_manager.initialize_schema():
|
|
401
|
+
self.error_handler.handle_error(
|
|
402
|
+
ErrorCategory.DATABASE_CORRUPTION,
|
|
403
|
+
"Failed to initialize database schema"
|
|
404
|
+
)
|
|
405
|
+
if not self.error_handler.is_fallback_mode():
|
|
406
|
+
raise RuntimeError("Failed to initialize database schema")
|
|
407
|
+
return
|
|
408
|
+
|
|
409
|
+
# Validate schema
|
|
410
|
+
if not self.schema_manager.validate_schema():
|
|
411
|
+
self.logger.warning("Schema validation failed, attempting repair")
|
|
412
|
+
if not self.schema_manager.repair_schema():
|
|
413
|
+
self.error_handler.handle_error(
|
|
414
|
+
ErrorCategory.DATABASE_CORRUPTION,
|
|
415
|
+
"Failed to repair database schema"
|
|
416
|
+
)
|
|
417
|
+
if not self.error_handler.is_fallback_mode():
|
|
418
|
+
raise RuntimeError("Failed to repair database schema")
|
|
419
|
+
return
|
|
420
|
+
|
|
421
|
+
# Perform comprehensive data validation
|
|
422
|
+
validation_issues = self.data_validator.validate_database(fix_issues=True)
|
|
423
|
+
if validation_issues:
|
|
424
|
+
critical_issues = [i for i in validation_issues if i.severity == ErrorSeverity.CRITICAL]
|
|
425
|
+
if critical_issues:
|
|
426
|
+
self.error_handler.handle_error(
|
|
427
|
+
ErrorCategory.DATA_VALIDATION,
|
|
428
|
+
f"Critical data validation issues found: {len(critical_issues)}",
|
|
429
|
+
context={'issues': [i.message for i in critical_issues]}
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
self._initialized = True
|
|
433
|
+
self.logger.info("Database settings manager initialized successfully")
|
|
434
|
+
|
|
435
|
+
except Exception as e:
|
|
436
|
+
self.error_handler.handle_error(
|
|
437
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
438
|
+
f"Database initialization failed: {e}",
|
|
439
|
+
exception=e
|
|
440
|
+
)
|
|
441
|
+
if not self.error_handler.is_fallback_mode():
|
|
442
|
+
raise
|
|
443
|
+
|
|
444
|
+
def _migrate_from_json_if_needed(self) -> None:
|
|
445
|
+
"""Migrate from JSON settings file if it exists and database is empty."""
|
|
446
|
+
try:
|
|
447
|
+
# Check if database already has data
|
|
448
|
+
conn = self.connection_manager.get_connection()
|
|
449
|
+
cursor = conn.execute("SELECT COUNT(*) FROM core_settings")
|
|
450
|
+
count = cursor.fetchone()[0]
|
|
451
|
+
|
|
452
|
+
if count > 0:
|
|
453
|
+
self.logger.info("Database already contains settings, skipping migration")
|
|
454
|
+
return
|
|
455
|
+
|
|
456
|
+
# Check if JSON file exists
|
|
457
|
+
if Path(self.json_settings_path).exists():
|
|
458
|
+
# Perform migration
|
|
459
|
+
self.logger.info(f"Migrating settings from {self.json_settings_path}")
|
|
460
|
+
if self.migration_manager.migrate_from_json(self.json_settings_path):
|
|
461
|
+
self.logger.info("JSON to database migration completed successfully")
|
|
462
|
+
return
|
|
463
|
+
else:
|
|
464
|
+
self.logger.warning("Migration failed, using default settings")
|
|
465
|
+
else:
|
|
466
|
+
self.logger.info("No JSON settings file found, using defaults")
|
|
467
|
+
|
|
468
|
+
# Always populate defaults if database is empty
|
|
469
|
+
self._populate_default_settings()
|
|
470
|
+
|
|
471
|
+
except Exception as e:
|
|
472
|
+
self.logger.error(f"Migration from JSON failed: {e}")
|
|
473
|
+
self._populate_default_settings()
|
|
474
|
+
|
|
475
|
+
def _populate_default_settings(self) -> None:
|
|
476
|
+
"""Populate database with default settings if empty."""
|
|
477
|
+
try:
|
|
478
|
+
default_settings = self._get_minimal_default_settings()
|
|
479
|
+
|
|
480
|
+
# Use migration manager to populate database
|
|
481
|
+
self.migration_manager._migrate_json_to_database(default_settings)
|
|
482
|
+
self.logger.info("Default settings populated in database")
|
|
483
|
+
|
|
484
|
+
except Exception as e:
|
|
485
|
+
self.error_handler.handle_error(
|
|
486
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
487
|
+
f"Failed to populate default settings: {e}",
|
|
488
|
+
exception=e
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
def _get_minimal_default_settings(self) -> Dict[str, Any]:
|
|
492
|
+
"""Get minimal default settings for emergency fallback.
|
|
493
|
+
|
|
494
|
+
Uses the centralized Settings Defaults Registry if available,
|
|
495
|
+
otherwise falls back to the provided default settings provider
|
|
496
|
+
or hardcoded minimal defaults.
|
|
497
|
+
"""
|
|
498
|
+
# Try to use the centralized Settings Defaults Registry first
|
|
499
|
+
try:
|
|
500
|
+
from .settings_defaults_registry import get_registry
|
|
501
|
+
registry = get_registry()
|
|
502
|
+
return registry.get_all_defaults(tab_count=7)
|
|
503
|
+
except ImportError:
|
|
504
|
+
self.logger.debug("Settings Defaults Registry not available")
|
|
505
|
+
except Exception as e:
|
|
506
|
+
self.logger.warning(f"Failed to get defaults from registry: {e}")
|
|
507
|
+
|
|
508
|
+
# Use provided default settings provider if available
|
|
509
|
+
if self._default_settings_provider:
|
|
510
|
+
try:
|
|
511
|
+
return self._default_settings_provider()
|
|
512
|
+
except Exception as e:
|
|
513
|
+
self.logger.warning(f"Default settings provider failed: {e}")
|
|
514
|
+
|
|
515
|
+
# Fallback to minimal defaults
|
|
516
|
+
return {
|
|
517
|
+
"export_path": str(Path.home() / "Downloads"),
|
|
518
|
+
"debug_level": "INFO",
|
|
519
|
+
"selected_tool": "Case Tool",
|
|
520
|
+
"active_input_tab": 0,
|
|
521
|
+
"active_output_tab": 0,
|
|
522
|
+
"input_tabs": [""] * 7,
|
|
523
|
+
"output_tabs": [""] * 7,
|
|
524
|
+
"tool_settings": {},
|
|
525
|
+
"performance_settings": {
|
|
526
|
+
"mode": "automatic",
|
|
527
|
+
"async_processing": {"enabled": True, "threshold_kb": 10}
|
|
528
|
+
},
|
|
529
|
+
"font_settings": {
|
|
530
|
+
"text_font": {"family": "Consolas", "size": 11}
|
|
531
|
+
},
|
|
532
|
+
"dialog_settings": {
|
|
533
|
+
"error": {"enabled": True, "locked": True}
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
# Backward Compatible API Methods
|
|
538
|
+
|
|
539
|
+
def load_settings(self) -> Dict[str, Any]:
|
|
540
|
+
"""
|
|
541
|
+
Reconstruct the full settings dictionary from database tables.
|
|
542
|
+
|
|
543
|
+
This method maintains compatibility with the existing load_settings() API
|
|
544
|
+
while internally using the database backend with error handling.
|
|
545
|
+
|
|
546
|
+
Returns:
|
|
547
|
+
Complete settings dictionary matching JSON structure
|
|
548
|
+
"""
|
|
549
|
+
try:
|
|
550
|
+
# Check if in fallback mode
|
|
551
|
+
if self.error_handler.is_fallback_mode():
|
|
552
|
+
fallback_settings = self.error_handler.get_fallback_settings()
|
|
553
|
+
if fallback_settings:
|
|
554
|
+
return fallback_settings
|
|
555
|
+
else:
|
|
556
|
+
# Return minimal defaults if fallback fails
|
|
557
|
+
return self._get_minimal_default_settings()
|
|
558
|
+
|
|
559
|
+
with self._lock:
|
|
560
|
+
return self._load_all_settings()
|
|
561
|
+
|
|
562
|
+
except Exception as e:
|
|
563
|
+
self.error_handler.handle_error(
|
|
564
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
565
|
+
f"Failed to load settings: {e}",
|
|
566
|
+
exception=e
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
# Try fallback mode
|
|
570
|
+
if self.error_handler.is_fallback_mode():
|
|
571
|
+
fallback_settings = self.error_handler.get_fallback_settings()
|
|
572
|
+
if fallback_settings:
|
|
573
|
+
return fallback_settings
|
|
574
|
+
|
|
575
|
+
# Return minimal defaults as last resort
|
|
576
|
+
return self._get_minimal_default_settings()
|
|
577
|
+
|
|
578
|
+
def save_settings(self, settings_dict: Optional[Dict[str, Any]] = None) -> bool:
|
|
579
|
+
"""
|
|
580
|
+
Parse settings dictionary and update database tables.
|
|
581
|
+
|
|
582
|
+
This method maintains compatibility with the existing save_settings() API
|
|
583
|
+
while internally using the database backend with error handling.
|
|
584
|
+
|
|
585
|
+
Args:
|
|
586
|
+
settings_dict: Settings dictionary to save (if None, saves current state)
|
|
587
|
+
|
|
588
|
+
Returns:
|
|
589
|
+
True if save successful, False otherwise
|
|
590
|
+
"""
|
|
591
|
+
try:
|
|
592
|
+
if settings_dict is None:
|
|
593
|
+
# If no dict provided, this is a no-op since database is always current
|
|
594
|
+
return True
|
|
595
|
+
|
|
596
|
+
# Validate settings data before saving
|
|
597
|
+
validation_issues = self.data_validator.validate_settings_data(settings_dict)
|
|
598
|
+
critical_issues = [i for i in validation_issues if i.severity == ErrorSeverity.CRITICAL]
|
|
599
|
+
|
|
600
|
+
if critical_issues:
|
|
601
|
+
self.error_handler.handle_error(
|
|
602
|
+
ErrorCategory.DATA_VALIDATION,
|
|
603
|
+
f"Critical validation issues in settings data: {len(critical_issues)}",
|
|
604
|
+
context={'issues': [i.message for i in critical_issues]}
|
|
605
|
+
)
|
|
606
|
+
return False
|
|
607
|
+
|
|
608
|
+
# Check if in fallback mode
|
|
609
|
+
if self.error_handler.is_fallback_mode():
|
|
610
|
+
return self.error_handler.save_fallback_settings(settings_dict)
|
|
611
|
+
|
|
612
|
+
with self._lock:
|
|
613
|
+
# Use migration manager to update database from dictionary
|
|
614
|
+
success = self.migration_manager._migrate_json_to_database(settings_dict)
|
|
615
|
+
|
|
616
|
+
if not success:
|
|
617
|
+
self.error_handler.handle_error(
|
|
618
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
619
|
+
"Failed to save settings to database"
|
|
620
|
+
)
|
|
621
|
+
|
|
622
|
+
# Try fallback mode
|
|
623
|
+
if self.error_handler.is_fallback_mode():
|
|
624
|
+
return self.error_handler.save_fallback_settings(settings_dict)
|
|
625
|
+
|
|
626
|
+
return success
|
|
627
|
+
|
|
628
|
+
except Exception as e:
|
|
629
|
+
self.error_handler.handle_error(
|
|
630
|
+
ErrorCategory.DATABASE_CONNECTION,
|
|
631
|
+
f"Failed to save settings: {e}",
|
|
632
|
+
exception=e
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
# Try fallback mode
|
|
636
|
+
if self.error_handler.is_fallback_mode():
|
|
637
|
+
return self.error_handler.save_fallback_settings(settings_dict)
|
|
638
|
+
|
|
639
|
+
return False
|
|
640
|
+
|
|
641
|
+
def get_tool_settings(self, tool_name: str) -> Dict[str, Any]:
|
|
642
|
+
"""
|
|
643
|
+
Get all settings for a specific tool.
|
|
644
|
+
|
|
645
|
+
Args:
|
|
646
|
+
tool_name: Name of the tool
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
Dictionary of tool settings
|
|
650
|
+
"""
|
|
651
|
+
try:
|
|
652
|
+
query = "SELECT setting_path, setting_value, data_type FROM tool_settings WHERE tool_name = ?"
|
|
653
|
+
params = (tool_name,)
|
|
654
|
+
|
|
655
|
+
if self.enable_performance_monitoring:
|
|
656
|
+
with self.connection_manager.monitored_query(query, params) as conn:
|
|
657
|
+
cursor = conn.execute(query, params)
|
|
658
|
+
results = cursor.fetchall()
|
|
659
|
+
else:
|
|
660
|
+
conn = self.connection_manager.get_connection()
|
|
661
|
+
cursor = conn.execute(query, params)
|
|
662
|
+
results = cursor.fetchall()
|
|
663
|
+
|
|
664
|
+
tool_settings = {}
|
|
665
|
+
for setting_path, setting_value, data_type in results:
|
|
666
|
+
value = self.migration_manager.converter.deserialize_value(setting_value, data_type)
|
|
667
|
+
|
|
668
|
+
# Handle nested paths
|
|
669
|
+
if '.' in setting_path:
|
|
670
|
+
self._set_nested_value(tool_settings, setting_path, value)
|
|
671
|
+
else:
|
|
672
|
+
tool_settings[setting_path] = value
|
|
673
|
+
|
|
674
|
+
# Post-process: unwrap simple tool settings that only have a 'value' key
|
|
675
|
+
if len(tool_settings) == 1 and 'value' in tool_settings:
|
|
676
|
+
return tool_settings['value']
|
|
677
|
+
|
|
678
|
+
return tool_settings
|
|
679
|
+
|
|
680
|
+
except Exception as e:
|
|
681
|
+
self.logger.error(f"Failed to get tool settings for {tool_name}: {e}")
|
|
682
|
+
return {}
|
|
683
|
+
|
|
684
|
+
def set_tool_setting(self, tool_name: str, key: str, value: Any) -> None:
|
|
685
|
+
"""
|
|
686
|
+
Set a specific tool setting.
|
|
687
|
+
|
|
688
|
+
Args:
|
|
689
|
+
tool_name: Name of the tool
|
|
690
|
+
key: Setting key (supports nested paths with dots)
|
|
691
|
+
value: Setting value
|
|
692
|
+
"""
|
|
693
|
+
try:
|
|
694
|
+
with self.connection_manager.transaction() as conn:
|
|
695
|
+
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
696
|
+
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
697
|
+
|
|
698
|
+
conn.execute(
|
|
699
|
+
"INSERT OR REPLACE INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
700
|
+
(tool_name, key, serialized_value, data_type)
|
|
701
|
+
)
|
|
702
|
+
|
|
703
|
+
# Record change for backup triggering
|
|
704
|
+
self._record_change()
|
|
705
|
+
|
|
706
|
+
# Invalidate proxy cache
|
|
707
|
+
self._settings_proxy._invalidate_cache()
|
|
708
|
+
|
|
709
|
+
except Exception as e:
|
|
710
|
+
self.logger.error(f"Failed to set tool setting {tool_name}.{key}: {e}")
|
|
711
|
+
|
|
712
|
+
def get_setting(self, key: str, default: Any = None) -> Any:
|
|
713
|
+
"""
|
|
714
|
+
Get a core application setting.
|
|
715
|
+
|
|
716
|
+
Args:
|
|
717
|
+
key: Setting key (supports nested paths with dots)
|
|
718
|
+
default: Default value if setting not found
|
|
719
|
+
|
|
720
|
+
Returns:
|
|
721
|
+
Setting value or default
|
|
722
|
+
"""
|
|
723
|
+
try:
|
|
724
|
+
# Handle nested keys
|
|
725
|
+
if '.' in key:
|
|
726
|
+
settings = self._load_all_settings()
|
|
727
|
+
return self._get_nested_value(settings, key, default)
|
|
728
|
+
|
|
729
|
+
# Simple key lookup with monitoring
|
|
730
|
+
query = "SELECT value, data_type FROM core_settings WHERE key = ?"
|
|
731
|
+
params = (key,)
|
|
732
|
+
|
|
733
|
+
if self.enable_performance_monitoring:
|
|
734
|
+
with self.connection_manager.monitored_query(query, params) as conn:
|
|
735
|
+
cursor = conn.execute(query, params)
|
|
736
|
+
result = cursor.fetchone()
|
|
737
|
+
else:
|
|
738
|
+
conn = self.connection_manager.get_connection()
|
|
739
|
+
cursor = conn.execute(query, params)
|
|
740
|
+
result = cursor.fetchone()
|
|
741
|
+
|
|
742
|
+
if result:
|
|
743
|
+
value, data_type = result
|
|
744
|
+
return self.migration_manager.converter.deserialize_value(value, data_type)
|
|
745
|
+
else:
|
|
746
|
+
return default
|
|
747
|
+
|
|
748
|
+
except Exception as e:
|
|
749
|
+
self.logger.error(f"Failed to get setting {key}: {e}")
|
|
750
|
+
return default
|
|
751
|
+
|
|
752
|
+
def set_setting(self, key: str, value: Any) -> None:
|
|
753
|
+
"""
|
|
754
|
+
Set a core application setting.
|
|
755
|
+
|
|
756
|
+
Args:
|
|
757
|
+
key: Setting key (supports nested paths with dots)
|
|
758
|
+
value: Setting value
|
|
759
|
+
"""
|
|
760
|
+
try:
|
|
761
|
+
# Handle special keys that need to go to specific tables
|
|
762
|
+
if key in ['input_tabs', 'output_tabs', 'tool_settings', 'performance_settings', 'font_settings', 'dialog_settings']:
|
|
763
|
+
# These keys need special handling - update the full structure
|
|
764
|
+
settings = self._load_all_settings()
|
|
765
|
+
settings[key] = value
|
|
766
|
+
self.save_settings(settings)
|
|
767
|
+
return
|
|
768
|
+
|
|
769
|
+
# Handle nested keys by updating the full structure
|
|
770
|
+
if '.' in key:
|
|
771
|
+
settings = self._load_all_settings()
|
|
772
|
+
self._set_nested_value(settings, key, value)
|
|
773
|
+
self.save_settings(settings)
|
|
774
|
+
return
|
|
775
|
+
|
|
776
|
+
# Simple key update for core settings
|
|
777
|
+
with self.connection_manager.transaction() as conn:
|
|
778
|
+
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
779
|
+
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
780
|
+
|
|
781
|
+
conn.execute(
|
|
782
|
+
"INSERT OR REPLACE INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
783
|
+
(key, serialized_value, data_type)
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
# Record change for backup triggering
|
|
787
|
+
self._record_change()
|
|
788
|
+
|
|
789
|
+
# Invalidate proxy cache
|
|
790
|
+
self._settings_proxy._invalidate_cache()
|
|
791
|
+
|
|
792
|
+
except Exception as e:
|
|
793
|
+
self.logger.error(f"Failed to set setting {key}: {e}")
|
|
794
|
+
|
|
795
|
+
# Enhanced API Methods
|
|
796
|
+
|
|
797
|
+
def get_nested_setting(self, path: str, default: Any = None) -> Any:
|
|
798
|
+
"""
|
|
799
|
+
Get setting using dot notation: 'performance_settings.caching.enabled'
|
|
800
|
+
|
|
801
|
+
Args:
|
|
802
|
+
path: Dot-separated path to setting
|
|
803
|
+
default: Default value if setting not found
|
|
804
|
+
|
|
805
|
+
Returns:
|
|
806
|
+
Setting value or default
|
|
807
|
+
"""
|
|
808
|
+
return self.get_setting(path, default)
|
|
809
|
+
|
|
810
|
+
def set_nested_setting(self, path: str, value: Any) -> None:
|
|
811
|
+
"""
|
|
812
|
+
Set setting using dot notation.
|
|
813
|
+
|
|
814
|
+
Args:
|
|
815
|
+
path: Dot-separated path to setting
|
|
816
|
+
value: Setting value
|
|
817
|
+
"""
|
|
818
|
+
self.set_setting(path, value)
|
|
819
|
+
|
|
820
|
+
def bulk_update_settings(self, updates: Dict[str, Any]) -> None:
|
|
821
|
+
"""
|
|
822
|
+
Update multiple settings in a single transaction.
|
|
823
|
+
|
|
824
|
+
Args:
|
|
825
|
+
updates: Dictionary of setting updates
|
|
826
|
+
"""
|
|
827
|
+
try:
|
|
828
|
+
with self.connection_manager.transaction() as conn:
|
|
829
|
+
for key, value in updates.items():
|
|
830
|
+
if key == 'tool_settings' and isinstance(value, dict):
|
|
831
|
+
# Handle tool settings specially
|
|
832
|
+
for tool_name, tool_config in value.items():
|
|
833
|
+
self._update_tool_settings_in_transaction(conn, tool_name, tool_config)
|
|
834
|
+
else:
|
|
835
|
+
# Handle core settings
|
|
836
|
+
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
837
|
+
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
838
|
+
|
|
839
|
+
conn.execute(
|
|
840
|
+
"INSERT OR REPLACE INTO core_settings (key, value, data_type) VALUES (?, ?, ?)",
|
|
841
|
+
(key, serialized_value, data_type)
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
# Invalidate proxy cache
|
|
845
|
+
self._settings_proxy._invalidate_cache()
|
|
846
|
+
|
|
847
|
+
except Exception as e:
|
|
848
|
+
self.logger.error(f"Failed to bulk update settings: {e}")
|
|
849
|
+
|
|
850
|
+
def export_to_json(self, filepath: str) -> bool:
|
|
851
|
+
"""
|
|
852
|
+
Export current database state to JSON file.
|
|
853
|
+
|
|
854
|
+
Args:
|
|
855
|
+
filepath: Target JSON file path
|
|
856
|
+
|
|
857
|
+
Returns:
|
|
858
|
+
True if export successful, False otherwise
|
|
859
|
+
"""
|
|
860
|
+
return self.migration_manager.migrate_to_json(filepath)
|
|
861
|
+
|
|
862
|
+
def import_from_json(self, filepath: str) -> bool:
|
|
863
|
+
"""
|
|
864
|
+
Import settings from JSON file to database.
|
|
865
|
+
|
|
866
|
+
Args:
|
|
867
|
+
filepath: Source JSON file path
|
|
868
|
+
|
|
869
|
+
Returns:
|
|
870
|
+
True if import successful, False otherwise
|
|
871
|
+
"""
|
|
872
|
+
return self.migration_manager.migrate_from_json(filepath)
|
|
873
|
+
|
|
874
|
+
# Backup and Recovery Methods
|
|
875
|
+
|
|
876
|
+
def create_backup(self, backup_type: str = "manual",
|
|
877
|
+
description: Optional[str] = None) -> bool:
|
|
878
|
+
"""
|
|
879
|
+
Create a backup of current settings.
|
|
880
|
+
|
|
881
|
+
Args:
|
|
882
|
+
backup_type: Type of backup ("manual", "automatic", "migration", "emergency")
|
|
883
|
+
description: Optional description for the backup
|
|
884
|
+
|
|
885
|
+
Returns:
|
|
886
|
+
True if backup created successfully
|
|
887
|
+
"""
|
|
888
|
+
try:
|
|
889
|
+
if not self.backup_recovery_manager:
|
|
890
|
+
self.logger.warning("Backup manager not available")
|
|
891
|
+
return False
|
|
892
|
+
|
|
893
|
+
from .backup_recovery_manager import BackupType
|
|
894
|
+
|
|
895
|
+
# Map string to enum
|
|
896
|
+
backup_type_enum = {
|
|
897
|
+
"manual": BackupType.MANUAL,
|
|
898
|
+
"automatic": BackupType.AUTOMATIC,
|
|
899
|
+
"migration": BackupType.MIGRATION,
|
|
900
|
+
"emergency": BackupType.EMERGENCY
|
|
901
|
+
}.get(backup_type, BackupType.MANUAL)
|
|
902
|
+
|
|
903
|
+
# Create database backup
|
|
904
|
+
backup_info = self.backup_recovery_manager.create_database_backup(
|
|
905
|
+
self.connection_manager,
|
|
906
|
+
backup_type_enum,
|
|
907
|
+
description
|
|
908
|
+
)
|
|
909
|
+
|
|
910
|
+
return backup_info is not None
|
|
911
|
+
|
|
912
|
+
except Exception as e:
|
|
913
|
+
self.error_handler.handle_error(
|
|
914
|
+
ErrorCategory.BACKUP,
|
|
915
|
+
f"Failed to create backup: {e}",
|
|
916
|
+
exception=e
|
|
917
|
+
)
|
|
918
|
+
return False
|
|
919
|
+
|
|
920
|
+
def restore_from_backup(self, backup_filepath: str) -> bool:
|
|
921
|
+
"""
|
|
922
|
+
Restore settings from a backup file.
|
|
923
|
+
|
|
924
|
+
Args:
|
|
925
|
+
backup_filepath: Path to backup file
|
|
926
|
+
|
|
927
|
+
Returns:
|
|
928
|
+
True if restore successful
|
|
929
|
+
"""
|
|
930
|
+
try:
|
|
931
|
+
if not self.backup_recovery_manager:
|
|
932
|
+
self.logger.warning("Backup manager not available")
|
|
933
|
+
return False
|
|
934
|
+
|
|
935
|
+
# Find backup info
|
|
936
|
+
backup_history = self.backup_recovery_manager.get_backup_history()
|
|
937
|
+
backup_info = None
|
|
938
|
+
|
|
939
|
+
for backup in backup_history:
|
|
940
|
+
if backup.filepath == backup_filepath:
|
|
941
|
+
backup_info = backup
|
|
942
|
+
break
|
|
943
|
+
|
|
944
|
+
if not backup_info:
|
|
945
|
+
self.logger.error(f"Backup info not found for: {backup_filepath}")
|
|
946
|
+
return False
|
|
947
|
+
|
|
948
|
+
# Restore from backup
|
|
949
|
+
success = self.backup_recovery_manager.restore_from_database_backup(
|
|
950
|
+
backup_info, self.connection_manager
|
|
951
|
+
)
|
|
952
|
+
|
|
953
|
+
if success:
|
|
954
|
+
# Invalidate cache after restore
|
|
955
|
+
self._settings_proxy._invalidate_cache()
|
|
956
|
+
self.logger.info("Settings restored from backup successfully")
|
|
957
|
+
|
|
958
|
+
return success
|
|
959
|
+
|
|
960
|
+
except Exception as e:
|
|
961
|
+
self.error_handler.handle_error(
|
|
962
|
+
ErrorCategory.RECOVERY,
|
|
963
|
+
f"Failed to restore from backup: {e}",
|
|
964
|
+
exception=e
|
|
965
|
+
)
|
|
966
|
+
return False
|
|
967
|
+
|
|
968
|
+
def repair_database(self) -> bool:
|
|
969
|
+
"""
|
|
970
|
+
Attempt to repair database corruption.
|
|
971
|
+
|
|
972
|
+
Returns:
|
|
973
|
+
True if repair successful
|
|
974
|
+
"""
|
|
975
|
+
try:
|
|
976
|
+
if not self.backup_recovery_manager or not self.data_validator:
|
|
977
|
+
self.logger.warning("Backup manager or data validator not available")
|
|
978
|
+
return False
|
|
979
|
+
|
|
980
|
+
success = self.backup_recovery_manager.repair_database(
|
|
981
|
+
self.connection_manager, self.data_validator
|
|
982
|
+
)
|
|
983
|
+
|
|
984
|
+
if success:
|
|
985
|
+
# Invalidate cache after repair
|
|
986
|
+
self._settings_proxy._invalidate_cache()
|
|
987
|
+
self.logger.info("Database repair completed successfully")
|
|
988
|
+
|
|
989
|
+
return success
|
|
990
|
+
|
|
991
|
+
except Exception as e:
|
|
992
|
+
self.error_handler.handle_error(
|
|
993
|
+
ErrorCategory.RECOVERY,
|
|
994
|
+
f"Database repair failed: {e}",
|
|
995
|
+
exception=e
|
|
996
|
+
)
|
|
997
|
+
return False
|
|
998
|
+
|
|
999
|
+
def validate_settings_integrity(self, apply_fixes: bool = False) -> Dict[str, Any]:
|
|
1000
|
+
"""
|
|
1001
|
+
Validate settings integrity and optionally apply fixes.
|
|
1002
|
+
|
|
1003
|
+
Args:
|
|
1004
|
+
apply_fixes: Whether to apply automatic fixes
|
|
1005
|
+
|
|
1006
|
+
Returns:
|
|
1007
|
+
Validation report dictionary
|
|
1008
|
+
"""
|
|
1009
|
+
try:
|
|
1010
|
+
if not self.settings_integrity_validator:
|
|
1011
|
+
self.logger.warning("Settings integrity validator not available")
|
|
1012
|
+
return {"error": "Validator not available"}
|
|
1013
|
+
|
|
1014
|
+
# Load current settings
|
|
1015
|
+
settings_data = self.load_settings()
|
|
1016
|
+
|
|
1017
|
+
# Validate integrity
|
|
1018
|
+
issues = self.settings_integrity_validator.validate_settings_integrity(
|
|
1019
|
+
settings_data, apply_fixes
|
|
1020
|
+
)
|
|
1021
|
+
|
|
1022
|
+
# Generate report
|
|
1023
|
+
report = self.settings_integrity_validator.get_validation_report(issues)
|
|
1024
|
+
|
|
1025
|
+
if apply_fixes and issues:
|
|
1026
|
+
# Save fixed settings back to database
|
|
1027
|
+
self.save_settings(settings_data)
|
|
1028
|
+
self.logger.info(f"Applied automatic fixes for {len([i for i in issues if i.auto_fixable])} issues")
|
|
1029
|
+
|
|
1030
|
+
return report
|
|
1031
|
+
|
|
1032
|
+
except Exception as e:
|
|
1033
|
+
self.error_handler.handle_error(
|
|
1034
|
+
ErrorCategory.DATA_VALIDATION,
|
|
1035
|
+
f"Settings integrity validation failed: {e}",
|
|
1036
|
+
exception=e
|
|
1037
|
+
)
|
|
1038
|
+
return {"error": str(e)}
|
|
1039
|
+
|
|
1040
|
+
def export_settings_to_file(self, export_path: str,
|
|
1041
|
+
format_type: str = "json") -> bool:
|
|
1042
|
+
"""
|
|
1043
|
+
Export current settings to a file.
|
|
1044
|
+
|
|
1045
|
+
Args:
|
|
1046
|
+
export_path: Path to export file
|
|
1047
|
+
format_type: Export format ("json" or "compressed")
|
|
1048
|
+
|
|
1049
|
+
Returns:
|
|
1050
|
+
True if export successful
|
|
1051
|
+
"""
|
|
1052
|
+
try:
|
|
1053
|
+
if not self.backup_recovery_manager:
|
|
1054
|
+
self.logger.warning("Backup manager not available")
|
|
1055
|
+
return False
|
|
1056
|
+
|
|
1057
|
+
# Load current settings
|
|
1058
|
+
settings_data = self.load_settings()
|
|
1059
|
+
|
|
1060
|
+
# Export settings
|
|
1061
|
+
success = self.backup_recovery_manager.export_settings(
|
|
1062
|
+
settings_data, export_path, format_type
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
return success
|
|
1066
|
+
|
|
1067
|
+
except Exception as e:
|
|
1068
|
+
self.error_handler.handle_error(
|
|
1069
|
+
ErrorCategory.BACKUP,
|
|
1070
|
+
f"Failed to export settings: {e}",
|
|
1071
|
+
exception=e
|
|
1072
|
+
)
|
|
1073
|
+
return False
|
|
1074
|
+
|
|
1075
|
+
def import_settings_from_file(self, import_path: str) -> bool:
|
|
1076
|
+
"""
|
|
1077
|
+
Import settings from a file.
|
|
1078
|
+
|
|
1079
|
+
Args:
|
|
1080
|
+
import_path: Path to import file
|
|
1081
|
+
|
|
1082
|
+
Returns:
|
|
1083
|
+
True if import successful
|
|
1084
|
+
"""
|
|
1085
|
+
try:
|
|
1086
|
+
if not self.backup_recovery_manager:
|
|
1087
|
+
self.logger.warning("Backup manager not available")
|
|
1088
|
+
return False
|
|
1089
|
+
|
|
1090
|
+
# Import settings
|
|
1091
|
+
settings_data = self.backup_recovery_manager.import_settings(import_path)
|
|
1092
|
+
|
|
1093
|
+
if settings_data is None:
|
|
1094
|
+
return False
|
|
1095
|
+
|
|
1096
|
+
# Validate imported settings
|
|
1097
|
+
if self.settings_integrity_validator:
|
|
1098
|
+
issues = self.settings_integrity_validator.validate_settings_integrity(
|
|
1099
|
+
settings_data, apply_fixes=True
|
|
1100
|
+
)
|
|
1101
|
+
|
|
1102
|
+
critical_issues = [i for i in issues if i.severity == 'critical']
|
|
1103
|
+
if critical_issues:
|
|
1104
|
+
self.logger.error(f"Imported settings have {len(critical_issues)} critical issues")
|
|
1105
|
+
return False
|
|
1106
|
+
|
|
1107
|
+
# Save imported settings
|
|
1108
|
+
success = self.save_settings(settings_data)
|
|
1109
|
+
|
|
1110
|
+
if success:
|
|
1111
|
+
self.logger.info("Settings imported successfully")
|
|
1112
|
+
|
|
1113
|
+
return success
|
|
1114
|
+
|
|
1115
|
+
except Exception as e:
|
|
1116
|
+
self.error_handler.handle_error(
|
|
1117
|
+
ErrorCategory.MIGRATION,
|
|
1118
|
+
f"Failed to import settings: {e}",
|
|
1119
|
+
exception=e
|
|
1120
|
+
)
|
|
1121
|
+
return False
|
|
1122
|
+
|
|
1123
|
+
def get_backup_statistics(self) -> Dict[str, Any]:
|
|
1124
|
+
"""
|
|
1125
|
+
Get backup statistics and information.
|
|
1126
|
+
|
|
1127
|
+
Returns:
|
|
1128
|
+
Dictionary with backup statistics
|
|
1129
|
+
"""
|
|
1130
|
+
try:
|
|
1131
|
+
if not self.backup_recovery_manager:
|
|
1132
|
+
return {"error": "Backup manager not available"}
|
|
1133
|
+
|
|
1134
|
+
return self.backup_recovery_manager.get_backup_statistics()
|
|
1135
|
+
|
|
1136
|
+
except Exception as e:
|
|
1137
|
+
self.logger.error(f"Failed to get backup statistics: {e}")
|
|
1138
|
+
return {"error": str(e)}
|
|
1139
|
+
|
|
1140
|
+
def cleanup_old_backups(self) -> int:
|
|
1141
|
+
"""
|
|
1142
|
+
Clean up old backups based on retention policy.
|
|
1143
|
+
|
|
1144
|
+
Returns:
|
|
1145
|
+
Number of backups cleaned up
|
|
1146
|
+
"""
|
|
1147
|
+
try:
|
|
1148
|
+
if not self.backup_recovery_manager:
|
|
1149
|
+
self.logger.warning("Backup manager not available")
|
|
1150
|
+
return 0
|
|
1151
|
+
|
|
1152
|
+
return self.backup_recovery_manager.cleanup_old_backups()
|
|
1153
|
+
|
|
1154
|
+
except Exception as e:
|
|
1155
|
+
self.logger.error(f"Failed to cleanup old backups: {e}")
|
|
1156
|
+
return 0
|
|
1157
|
+
|
|
1158
|
+
def close(self) -> None:
|
|
1159
|
+
"""Close the settings manager and cleanup resources."""
|
|
1160
|
+
try:
|
|
1161
|
+
# Stop automatic backup
|
|
1162
|
+
if self.backup_recovery_manager:
|
|
1163
|
+
self.backup_recovery_manager.stop_auto_backup()
|
|
1164
|
+
|
|
1165
|
+
# Close database connections
|
|
1166
|
+
if self.connection_manager:
|
|
1167
|
+
self.connection_manager.close_all_connections()
|
|
1168
|
+
|
|
1169
|
+
self.logger.info("Database settings manager closed")
|
|
1170
|
+
|
|
1171
|
+
except Exception as e:
|
|
1172
|
+
self.logger.error(f"Error closing settings manager: {e}")
|
|
1173
|
+
|
|
1174
|
+
# Dictionary-like interface property
|
|
1175
|
+
|
|
1176
|
+
@property
|
|
1177
|
+
def settings(self) -> SettingsDictProxy:
|
|
1178
|
+
"""
|
|
1179
|
+
Provide dictionary-like access to settings.
|
|
1180
|
+
|
|
1181
|
+
This allows existing code like app.settings["key"] to work unchanged.
|
|
1182
|
+
|
|
1183
|
+
Returns:
|
|
1184
|
+
SettingsDictProxy instance for transparent database access
|
|
1185
|
+
"""
|
|
1186
|
+
return self._settings_proxy
|
|
1187
|
+
|
|
1188
|
+
# Private implementation methods
|
|
1189
|
+
|
|
1190
|
+
def _load_all_settings(self) -> Dict[str, Any]:
|
|
1191
|
+
"""Load complete settings structure from database."""
|
|
1192
|
+
try:
|
|
1193
|
+
return self.migration_manager._migrate_database_to_json() or {}
|
|
1194
|
+
except Exception as e:
|
|
1195
|
+
self.logger.error(f"Failed to load all settings: {e}")
|
|
1196
|
+
return {}
|
|
1197
|
+
|
|
1198
|
+
def _update_tool_settings_in_transaction(self, conn: sqlite3.Connection,
|
|
1199
|
+
tool_name: str, tool_config: Any) -> None:
|
|
1200
|
+
"""Update tool settings within an existing transaction."""
|
|
1201
|
+
# Clear existing tool settings
|
|
1202
|
+
conn.execute("DELETE FROM tool_settings WHERE tool_name = ?", (tool_name,))
|
|
1203
|
+
|
|
1204
|
+
if isinstance(tool_config, dict):
|
|
1205
|
+
# Flatten nested tool configuration
|
|
1206
|
+
flattened = self.migration_manager._flatten_nested_dict(tool_config)
|
|
1207
|
+
|
|
1208
|
+
for setting_path, value in flattened.items():
|
|
1209
|
+
data_type = self.migration_manager.converter.python_to_db_type(value)
|
|
1210
|
+
serialized_value = self.migration_manager.converter.serialize_value(value)
|
|
1211
|
+
|
|
1212
|
+
conn.execute(
|
|
1213
|
+
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
1214
|
+
(tool_name, setting_path, serialized_value, data_type)
|
|
1215
|
+
)
|
|
1216
|
+
else:
|
|
1217
|
+
# Simple tool setting
|
|
1218
|
+
data_type = self.migration_manager.converter.python_to_db_type(tool_config)
|
|
1219
|
+
serialized_value = self.migration_manager.converter.serialize_value(tool_config)
|
|
1220
|
+
|
|
1221
|
+
conn.execute(
|
|
1222
|
+
"INSERT INTO tool_settings (tool_name, setting_path, setting_value, data_type) VALUES (?, ?, ?, ?)",
|
|
1223
|
+
(tool_name, 'value', serialized_value, data_type)
|
|
1224
|
+
)
|
|
1225
|
+
|
|
1226
|
+
def _get_nested_value(self, data: Dict[str, Any], path: str, default: Any = None) -> Any:
|
|
1227
|
+
"""Get value from nested dictionary using dot notation."""
|
|
1228
|
+
keys = path.split('.')
|
|
1229
|
+
current = data
|
|
1230
|
+
|
|
1231
|
+
for key in keys:
|
|
1232
|
+
if isinstance(current, dict) and key in current:
|
|
1233
|
+
current = current[key]
|
|
1234
|
+
else:
|
|
1235
|
+
return default
|
|
1236
|
+
|
|
1237
|
+
return current
|
|
1238
|
+
|
|
1239
|
+
def _set_nested_value(self, data: Dict[str, Any], path: str, value: Any) -> None:
|
|
1240
|
+
"""Set value in nested dictionary using dot notation."""
|
|
1241
|
+
keys = path.split('.')
|
|
1242
|
+
current = data
|
|
1243
|
+
|
|
1244
|
+
# Navigate to the parent of the target key
|
|
1245
|
+
for key in keys[:-1]:
|
|
1246
|
+
if key not in current:
|
|
1247
|
+
current[key] = {}
|
|
1248
|
+
current = current[key]
|
|
1249
|
+
|
|
1250
|
+
# Set the final value
|
|
1251
|
+
current[keys[-1]] = value
|
|
1252
|
+
|
|
1253
|
+
def _delete_setting(self, key: str) -> None:
|
|
1254
|
+
"""Delete a setting from the database."""
|
|
1255
|
+
try:
|
|
1256
|
+
with self.connection_manager.transaction() as conn:
|
|
1257
|
+
conn.execute("DELETE FROM core_settings WHERE key = ?", (key,))
|
|
1258
|
+
|
|
1259
|
+
# Record change for backup triggering
|
|
1260
|
+
self._record_change()
|
|
1261
|
+
|
|
1262
|
+
# Invalidate proxy cache
|
|
1263
|
+
self._settings_proxy._invalidate_cache()
|
|
1264
|
+
|
|
1265
|
+
except Exception as e:
|
|
1266
|
+
self.logger.error(f"Failed to delete setting {key}: {e}")
|
|
1267
|
+
|
|
1268
|
+
def _record_change(self) -> None:
|
|
1269
|
+
"""Record a database change for backup triggering and performance monitoring."""
|
|
1270
|
+
try:
|
|
1271
|
+
# Record change for backup manager
|
|
1272
|
+
if self.backup_manager:
|
|
1273
|
+
self.backup_manager.record_change()
|
|
1274
|
+
|
|
1275
|
+
# Record change for persistence manager
|
|
1276
|
+
if self.persistence_manager:
|
|
1277
|
+
self.persistence_manager.record_change()
|
|
1278
|
+
|
|
1279
|
+
# Record change for connection manager
|
|
1280
|
+
self.connection_manager._changes_since_backup += 1
|
|
1281
|
+
|
|
1282
|
+
except Exception as e:
|
|
1283
|
+
self.logger.debug(f"Failed to record change: {e}")
|
|
1284
|
+
|
|
1285
|
+
# Performance Monitoring and Optimization Methods
|
|
1286
|
+
|
|
1287
|
+
def get_performance_stats(self) -> Dict[str, Any]:
|
|
1288
|
+
"""
|
|
1289
|
+
Get comprehensive performance statistics.
|
|
1290
|
+
|
|
1291
|
+
Returns:
|
|
1292
|
+
Dictionary with performance metrics
|
|
1293
|
+
"""
|
|
1294
|
+
stats = {}
|
|
1295
|
+
|
|
1296
|
+
# Connection manager stats
|
|
1297
|
+
try:
|
|
1298
|
+
stats['connection'] = self.connection_manager.get_performance_stats()
|
|
1299
|
+
except Exception as e:
|
|
1300
|
+
self.logger.warning(f"Failed to get connection stats: {e}")
|
|
1301
|
+
stats['connection'] = {}
|
|
1302
|
+
|
|
1303
|
+
# Performance monitor stats
|
|
1304
|
+
if self.performance_monitor:
|
|
1305
|
+
try:
|
|
1306
|
+
monitor_stats = self.performance_monitor.get_performance_stats()
|
|
1307
|
+
stats['monitor'] = {
|
|
1308
|
+
'total_queries': monitor_stats.total_queries,
|
|
1309
|
+
'avg_execution_time': monitor_stats.avg_execution_time,
|
|
1310
|
+
'cache_hit_rate': monitor_stats.cache_hit_rate,
|
|
1311
|
+
'queries_per_second': monitor_stats.queries_per_second,
|
|
1312
|
+
'memory_usage_mb': monitor_stats.memory_usage_mb,
|
|
1313
|
+
'slow_queries_count': len(monitor_stats.slow_queries)
|
|
1314
|
+
}
|
|
1315
|
+
stats['hot_settings'] = self.performance_monitor.get_hot_settings(10)
|
|
1316
|
+
stats['cache_stats'] = self.performance_monitor.get_cache_stats()
|
|
1317
|
+
except Exception as e:
|
|
1318
|
+
self.logger.warning(f"Failed to get monitor stats: {e}")
|
|
1319
|
+
stats['monitor'] = {}
|
|
1320
|
+
|
|
1321
|
+
# Backup manager stats
|
|
1322
|
+
if self.backup_manager:
|
|
1323
|
+
try:
|
|
1324
|
+
stats['backup'] = self.backup_manager.get_backup_info()
|
|
1325
|
+
except Exception as e:
|
|
1326
|
+
self.logger.warning(f"Failed to get backup stats: {e}")
|
|
1327
|
+
stats['backup'] = {}
|
|
1328
|
+
|
|
1329
|
+
return stats
|
|
1330
|
+
|
|
1331
|
+
def optimize_performance(self) -> Dict[str, Any]:
|
|
1332
|
+
"""
|
|
1333
|
+
Perform comprehensive performance optimization.
|
|
1334
|
+
|
|
1335
|
+
Returns:
|
|
1336
|
+
Dictionary with optimization results
|
|
1337
|
+
"""
|
|
1338
|
+
results = {
|
|
1339
|
+
'database_optimization': [],
|
|
1340
|
+
'cache_optimization': [],
|
|
1341
|
+
'backup_optimization': [],
|
|
1342
|
+
'errors': []
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
try:
|
|
1346
|
+
# Database optimization
|
|
1347
|
+
db_actions = self.connection_manager.optimize_database()
|
|
1348
|
+
results['database_optimization'] = db_actions
|
|
1349
|
+
|
|
1350
|
+
# Cache optimization
|
|
1351
|
+
if self.performance_monitor:
|
|
1352
|
+
# Clear cache if hit rate is low
|
|
1353
|
+
cache_stats = self.performance_monitor.get_cache_stats()
|
|
1354
|
+
if cache_stats.get('hit_rate_percent', 0) < 20:
|
|
1355
|
+
self.performance_monitor.clear_cache()
|
|
1356
|
+
results['cache_optimization'].append("Cleared low-performing cache")
|
|
1357
|
+
|
|
1358
|
+
# Suggest hot settings for caching
|
|
1359
|
+
hot_settings = self.performance_monitor.get_hot_settings(5)
|
|
1360
|
+
if hot_settings:
|
|
1361
|
+
results['cache_optimization'].append(
|
|
1362
|
+
f"Hot settings identified: {[s[0] for s in hot_settings]}"
|
|
1363
|
+
)
|
|
1364
|
+
|
|
1365
|
+
# Backup optimization
|
|
1366
|
+
if self.backup_manager:
|
|
1367
|
+
# Trigger backup if many changes
|
|
1368
|
+
if self.backup_manager.changes_since_backup > 50:
|
|
1369
|
+
from .backup_manager import BackupTrigger
|
|
1370
|
+
backup_info = self.backup_manager.backup_database(
|
|
1371
|
+
self.connection_manager,
|
|
1372
|
+
trigger=BackupTrigger.MANUAL
|
|
1373
|
+
)
|
|
1374
|
+
if backup_info:
|
|
1375
|
+
results['backup_optimization'].append("Created optimization backup")
|
|
1376
|
+
|
|
1377
|
+
except Exception as e:
|
|
1378
|
+
results['errors'].append(f"Optimization error: {e}")
|
|
1379
|
+
self.logger.error(f"Performance optimization failed: {e}")
|
|
1380
|
+
|
|
1381
|
+
return results
|
|
1382
|
+
|
|
1383
|
+
def export_performance_report(self, filepath: str) -> bool:
|
|
1384
|
+
"""
|
|
1385
|
+
Export comprehensive performance report.
|
|
1386
|
+
|
|
1387
|
+
Args:
|
|
1388
|
+
filepath: Target file path
|
|
1389
|
+
|
|
1390
|
+
Returns:
|
|
1391
|
+
True if export successful
|
|
1392
|
+
"""
|
|
1393
|
+
try:
|
|
1394
|
+
report_data = {
|
|
1395
|
+
'report_timestamp': datetime.now().isoformat(),
|
|
1396
|
+
'database_info': {
|
|
1397
|
+
'db_path': self.db_path,
|
|
1398
|
+
'backup_path': self.backup_path,
|
|
1399
|
+
'performance_monitoring_enabled': self.enable_performance_monitoring,
|
|
1400
|
+
'auto_backup_enabled': self.enable_auto_backup
|
|
1401
|
+
},
|
|
1402
|
+
'performance_stats': self.get_performance_stats(),
|
|
1403
|
+
'optimization_suggestions': []
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
# Add optimization suggestions
|
|
1407
|
+
if self.performance_monitor:
|
|
1408
|
+
try:
|
|
1409
|
+
suggestions = self.performance_monitor.optimize_indexes(self.connection_manager)
|
|
1410
|
+
report_data['optimization_suggestions'] = suggestions
|
|
1411
|
+
except Exception as e:
|
|
1412
|
+
self.logger.warning(f"Failed to get optimization suggestions: {e}")
|
|
1413
|
+
|
|
1414
|
+
# Export performance monitor metrics if available
|
|
1415
|
+
if self.performance_monitor:
|
|
1416
|
+
try:
|
|
1417
|
+
monitor_export_path = filepath.replace('.json', '_monitor_metrics.json')
|
|
1418
|
+
self.performance_monitor.export_metrics(monitor_export_path)
|
|
1419
|
+
report_data['monitor_metrics_file'] = monitor_export_path
|
|
1420
|
+
except Exception as e:
|
|
1421
|
+
self.logger.warning(f"Failed to export monitor metrics: {e}")
|
|
1422
|
+
|
|
1423
|
+
# Export backup report if available
|
|
1424
|
+
if self.backup_manager:
|
|
1425
|
+
try:
|
|
1426
|
+
backup_export_path = filepath.replace('.json', '_backup_report.json')
|
|
1427
|
+
self.backup_manager.export_backup_report(backup_export_path)
|
|
1428
|
+
report_data['backup_report_file'] = backup_export_path
|
|
1429
|
+
except Exception as e:
|
|
1430
|
+
self.logger.warning(f"Failed to export backup report: {e}")
|
|
1431
|
+
|
|
1432
|
+
with open(filepath, 'w') as f:
|
|
1433
|
+
json.dump(report_data, f, indent=2, default=str)
|
|
1434
|
+
|
|
1435
|
+
self.logger.info(f"Performance report exported to {filepath}")
|
|
1436
|
+
return True
|
|
1437
|
+
|
|
1438
|
+
except Exception as e:
|
|
1439
|
+
self.logger.error(f"Failed to export performance report: {e}")
|
|
1440
|
+
return False
|
|
1441
|
+
|
|
1442
|
+
def set_performance_config(self, config: Dict[str, Any]) -> None:
|
|
1443
|
+
"""
|
|
1444
|
+
Update performance configuration.
|
|
1445
|
+
|
|
1446
|
+
Args:
|
|
1447
|
+
config: Configuration dictionary with performance settings
|
|
1448
|
+
"""
|
|
1449
|
+
try:
|
|
1450
|
+
# Update connection manager settings
|
|
1451
|
+
if 'slow_query_threshold' in config:
|
|
1452
|
+
self.connection_manager.set_slow_query_threshold(config['slow_query_threshold'])
|
|
1453
|
+
|
|
1454
|
+
# Update backup manager settings
|
|
1455
|
+
if self.backup_manager:
|
|
1456
|
+
if 'backup_interval' in config:
|
|
1457
|
+
self.backup_manager.set_backup_interval(config['backup_interval'])
|
|
1458
|
+
if 'change_threshold' in config:
|
|
1459
|
+
self.backup_manager.set_change_threshold(config['change_threshold'])
|
|
1460
|
+
|
|
1461
|
+
# Update performance monitor settings
|
|
1462
|
+
if self.performance_monitor and 'cache_size' in config:
|
|
1463
|
+
# Clear and recreate cache with new size
|
|
1464
|
+
self.performance_monitor.clear_cache()
|
|
1465
|
+
# Note: Cache size change requires reinitializing the monitor
|
|
1466
|
+
|
|
1467
|
+
except Exception as e:
|
|
1468
|
+
self.logger.error(f"Failed to update performance config: {e}")
|
|
1469
|
+
|
|
1470
|
+
def get_memory_usage(self) -> Dict[str, float]:
|
|
1471
|
+
"""
|
|
1472
|
+
Get current memory usage statistics.
|
|
1473
|
+
|
|
1474
|
+
Returns:
|
|
1475
|
+
Dictionary with memory usage in MB
|
|
1476
|
+
"""
|
|
1477
|
+
if self.performance_monitor:
|
|
1478
|
+
return self.performance_monitor.get_memory_trend()
|
|
1479
|
+
return {'current': 0.0, 'average': 0.0, 'peak': 0.0}
|
|
1480
|
+
|
|
1481
|
+
def clear_performance_data(self) -> None:
|
|
1482
|
+
"""Clear all performance monitoring data."""
|
|
1483
|
+
try:
|
|
1484
|
+
self.connection_manager.clear_performance_data()
|
|
1485
|
+
|
|
1486
|
+
if self.performance_monitor:
|
|
1487
|
+
self.performance_monitor.reset_metrics()
|
|
1488
|
+
|
|
1489
|
+
except Exception as e:
|
|
1490
|
+
self.logger.error(f"Failed to clear performance data: {e}")
|
|
1491
|
+
|
|
1492
|
+
|
|
1493
|
+
# Convenience function for creating settings manager instance
|
|
1494
|
+
def create_settings_manager(db_path: str = ":memory:",
|
|
1495
|
+
backup_path: Optional[str] = None,
|
|
1496
|
+
json_settings_path: str = "settings.json") -> DatabaseSettingsManager:
|
|
1497
|
+
"""
|
|
1498
|
+
Create a DatabaseSettingsManager instance with standard configuration.
|
|
1499
|
+
|
|
1500
|
+
Args:
|
|
1501
|
+
db_path: Path to SQLite database file
|
|
1502
|
+
backup_path: Path for automatic backups
|
|
1503
|
+
json_settings_path: Path to JSON settings file for migration
|
|
1504
|
+
|
|
1505
|
+
Returns:
|
|
1506
|
+
Configured DatabaseSettingsManager instance
|
|
1507
|
+
"""
|
|
1508
1508
|
return DatabaseSettingsManager(db_path, backup_path, json_settings_path)
|