pomera-ai-commander 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +680 -0
- package/bin/pomera-ai-commander.js +62 -0
- package/core/__init__.py +66 -0
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/app_context.py +482 -0
- package/core/async_text_processor.py +422 -0
- package/core/backup_manager.py +656 -0
- package/core/backup_recovery_manager.py +1034 -0
- package/core/content_hash_cache.py +509 -0
- package/core/context_menu.py +313 -0
- package/core/data_validator.py +1067 -0
- package/core/database_connection_manager.py +745 -0
- package/core/database_curl_settings_manager.py +609 -0
- package/core/database_promera_ai_settings_manager.py +447 -0
- package/core/database_schema.py +412 -0
- package/core/database_schema_manager.py +396 -0
- package/core/database_settings_manager.py +1508 -0
- package/core/database_settings_manager_interface.py +457 -0
- package/core/dialog_manager.py +735 -0
- package/core/efficient_line_numbers.py +511 -0
- package/core/error_handler.py +747 -0
- package/core/error_service.py +431 -0
- package/core/event_consolidator.py +512 -0
- package/core/mcp/__init__.py +43 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/core/mcp/protocol.py +288 -0
- package/core/mcp/schema.py +251 -0
- package/core/mcp/server_stdio.py +299 -0
- package/core/mcp/tool_registry.py +2345 -0
- package/core/memory_efficient_text_widget.py +712 -0
- package/core/migration_manager.py +915 -0
- package/core/migration_test_suite.py +1086 -0
- package/core/migration_validator.py +1144 -0
- package/core/optimized_find_replace.py +715 -0
- package/core/optimized_pattern_engine.py +424 -0
- package/core/optimized_search_highlighter.py +553 -0
- package/core/performance_monitor.py +675 -0
- package/core/persistence_manager.py +713 -0
- package/core/progressive_stats_calculator.py +632 -0
- package/core/regex_pattern_cache.py +530 -0
- package/core/regex_pattern_library.py +351 -0
- package/core/search_operation_manager.py +435 -0
- package/core/settings_defaults_registry.py +1087 -0
- package/core/settings_integrity_validator.py +1112 -0
- package/core/settings_serializer.py +558 -0
- package/core/settings_validator.py +1824 -0
- package/core/smart_stats_calculator.py +710 -0
- package/core/statistics_update_manager.py +619 -0
- package/core/stats_config_manager.py +858 -0
- package/core/streaming_text_handler.py +723 -0
- package/core/task_scheduler.py +596 -0
- package/core/update_pattern_library.py +169 -0
- package/core/visibility_monitor.py +596 -0
- package/core/widget_cache.py +498 -0
- package/mcp.json +61 -0
- package/package.json +57 -0
- package/pomera.py +7483 -0
- package/pomera_mcp_server.py +144 -0
- package/tools/__init__.py +5 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
- package/tools/ai_tools.py +2892 -0
- package/tools/ascii_art_generator.py +353 -0
- package/tools/base64_tools.py +184 -0
- package/tools/base_tool.py +511 -0
- package/tools/case_tool.py +309 -0
- package/tools/column_tools.py +396 -0
- package/tools/cron_tool.py +885 -0
- package/tools/curl_history.py +601 -0
- package/tools/curl_processor.py +1208 -0
- package/tools/curl_settings.py +503 -0
- package/tools/curl_tool.py +5467 -0
- package/tools/diff_viewer.py +1072 -0
- package/tools/email_extraction_tool.py +249 -0
- package/tools/email_header_analyzer.py +426 -0
- package/tools/extraction_tools.py +250 -0
- package/tools/find_replace.py +1751 -0
- package/tools/folder_file_reporter.py +1463 -0
- package/tools/folder_file_reporter_adapter.py +480 -0
- package/tools/generator_tools.py +1217 -0
- package/tools/hash_generator.py +256 -0
- package/tools/html_tool.py +657 -0
- package/tools/huggingface_helper.py +449 -0
- package/tools/jsonxml_tool.py +730 -0
- package/tools/line_tools.py +419 -0
- package/tools/list_comparator.py +720 -0
- package/tools/markdown_tools.py +562 -0
- package/tools/mcp_widget.py +1417 -0
- package/tools/notes_widget.py +973 -0
- package/tools/number_base_converter.py +373 -0
- package/tools/regex_extractor.py +572 -0
- package/tools/slug_generator.py +311 -0
- package/tools/sorter_tools.py +459 -0
- package/tools/string_escape_tool.py +393 -0
- package/tools/text_statistics_tool.py +366 -0
- package/tools/text_wrapper.py +431 -0
- package/tools/timestamp_converter.py +422 -0
- package/tools/tool_loader.py +710 -0
- package/tools/translator_tools.py +523 -0
- package/tools/url_link_extractor.py +262 -0
- package/tools/url_parser.py +205 -0
- package/tools/whitespace_tools.py +356 -0
- package/tools/word_frequency_counter.py +147 -0
|
@@ -0,0 +1,1034 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backup and Recovery Manager for Settings Database Migration
|
|
3
|
+
|
|
4
|
+
This module provides comprehensive backup and recovery procedures for the
|
|
5
|
+
settings database system. It includes automatic JSON backup creation,
|
|
6
|
+
manual backup and restore functionality, database repair tools, and
|
|
7
|
+
settings export/import utilities.
|
|
8
|
+
|
|
9
|
+
Features:
|
|
10
|
+
- Automatic JSON backup creation before migration
|
|
11
|
+
- Manual backup and restore functionality
|
|
12
|
+
- Database repair and recovery tools
|
|
13
|
+
- Settings export and import utilities
|
|
14
|
+
- Validation tools for settings integrity
|
|
15
|
+
- Backup rotation and cleanup procedures
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import json
|
|
19
|
+
import sqlite3
|
|
20
|
+
import os
|
|
21
|
+
import gzip
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
import shutil
|
|
24
|
+
import gzip
|
|
25
|
+
import logging
|
|
26
|
+
import threading
|
|
27
|
+
import time
|
|
28
|
+
from typing import Dict, List, Tuple, Any, Optional, Union
|
|
29
|
+
from datetime import datetime, timedelta
|
|
30
|
+
from pathlib import Path
|
|
31
|
+
from dataclasses import dataclass
|
|
32
|
+
from enum import Enum
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class BackupType(Enum):
|
|
36
|
+
"""Types of backups that can be created."""
|
|
37
|
+
AUTOMATIC = "automatic"
|
|
38
|
+
MANUAL = "manual"
|
|
39
|
+
MIGRATION = "migration"
|
|
40
|
+
EMERGENCY = "emergency"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class BackupFormat(Enum):
|
|
44
|
+
"""Backup file formats."""
|
|
45
|
+
JSON = "json"
|
|
46
|
+
SQLITE = "sqlite"
|
|
47
|
+
COMPRESSED = "compressed"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class BackupInfo:
|
|
52
|
+
"""Information about a backup."""
|
|
53
|
+
timestamp: datetime
|
|
54
|
+
backup_type: BackupType
|
|
55
|
+
format: BackupFormat
|
|
56
|
+
filepath: str
|
|
57
|
+
size_bytes: int
|
|
58
|
+
checksum: Optional[str] = None
|
|
59
|
+
description: Optional[str] = None
|
|
60
|
+
source_info: Optional[Dict[str, Any]] = None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class BackupRecoveryManager:
|
|
64
|
+
"""
|
|
65
|
+
Comprehensive backup and recovery manager for the settings database system.
|
|
66
|
+
|
|
67
|
+
Provides automatic and manual backup creation, recovery procedures,
|
|
68
|
+
database repair tools, and settings validation utilities.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
def __init__(self, backup_dir: str = "backups",
|
|
72
|
+
max_backups: int = 50,
|
|
73
|
+
auto_backup_interval: int = 3600, # 1 hour
|
|
74
|
+
enable_compression: bool = True):
|
|
75
|
+
"""
|
|
76
|
+
Initialize the backup and recovery manager.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
backup_dir: Directory for storing backups
|
|
80
|
+
max_backups: Maximum number of backups to keep
|
|
81
|
+
auto_backup_interval: Automatic backup interval in seconds
|
|
82
|
+
enable_compression: Whether to compress backups
|
|
83
|
+
"""
|
|
84
|
+
self.backup_dir = Path(backup_dir)
|
|
85
|
+
self.max_backups = max_backups
|
|
86
|
+
self.auto_backup_interval = auto_backup_interval
|
|
87
|
+
self.enable_compression = enable_compression
|
|
88
|
+
|
|
89
|
+
# Ensure backup directory exists
|
|
90
|
+
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
91
|
+
|
|
92
|
+
# Backup tracking
|
|
93
|
+
self._backup_history: List[BackupInfo] = []
|
|
94
|
+
self._last_auto_backup: Optional[datetime] = None
|
|
95
|
+
self._backup_lock = threading.RLock()
|
|
96
|
+
|
|
97
|
+
# Auto backup thread
|
|
98
|
+
self._auto_backup_thread: Optional[threading.Thread] = None
|
|
99
|
+
self._auto_backup_stop_event = threading.Event()
|
|
100
|
+
self._auto_backup_enabled = False
|
|
101
|
+
|
|
102
|
+
# Logger
|
|
103
|
+
self.logger = logging.getLogger(__name__)
|
|
104
|
+
|
|
105
|
+
# Load existing backup history and retention settings
|
|
106
|
+
self._load_backup_history()
|
|
107
|
+
self._load_retention_settings()
|
|
108
|
+
|
|
109
|
+
def create_json_backup(self, settings_data: Dict[str, Any],
|
|
110
|
+
backup_type: BackupType = BackupType.MANUAL,
|
|
111
|
+
description: Optional[str] = None) -> Optional[BackupInfo]:
|
|
112
|
+
"""
|
|
113
|
+
Create a JSON backup of settings data.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
settings_data: Settings data to backup
|
|
117
|
+
backup_type: Type of backup being created
|
|
118
|
+
description: Optional description for the backup
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
BackupInfo if successful, None otherwise
|
|
122
|
+
"""
|
|
123
|
+
try:
|
|
124
|
+
timestamp = datetime.now()
|
|
125
|
+
filename = self._generate_backup_filename("json", backup_type, timestamp)
|
|
126
|
+
filepath = self.backup_dir / filename
|
|
127
|
+
|
|
128
|
+
# Create backup
|
|
129
|
+
if self.enable_compression:
|
|
130
|
+
with gzip.open(f"{filepath}.gz", 'wt', encoding='utf-8') as f:
|
|
131
|
+
json.dump(settings_data, f, indent=2, ensure_ascii=False)
|
|
132
|
+
filepath = f"{filepath}.gz"
|
|
133
|
+
format_type = BackupFormat.COMPRESSED
|
|
134
|
+
else:
|
|
135
|
+
with open(filepath, 'w', encoding='utf-8') as f:
|
|
136
|
+
json.dump(settings_data, f, indent=2, ensure_ascii=False)
|
|
137
|
+
format_type = BackupFormat.JSON
|
|
138
|
+
|
|
139
|
+
# Get file size
|
|
140
|
+
size_bytes = os.path.getsize(filepath)
|
|
141
|
+
|
|
142
|
+
# Calculate checksum
|
|
143
|
+
checksum = self._calculate_checksum(filepath)
|
|
144
|
+
|
|
145
|
+
# Create backup info
|
|
146
|
+
backup_info = BackupInfo(
|
|
147
|
+
timestamp=timestamp,
|
|
148
|
+
backup_type=backup_type,
|
|
149
|
+
format=format_type,
|
|
150
|
+
filepath=str(filepath),
|
|
151
|
+
size_bytes=size_bytes,
|
|
152
|
+
checksum=checksum,
|
|
153
|
+
description=description,
|
|
154
|
+
source_info={
|
|
155
|
+
'data_type': 'json_settings',
|
|
156
|
+
'keys_count': len(settings_data),
|
|
157
|
+
'tool_count': len(settings_data.get('tool_settings', {}))
|
|
158
|
+
}
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
# Record backup
|
|
162
|
+
self._record_backup(backup_info)
|
|
163
|
+
|
|
164
|
+
self.logger.info(f"JSON backup created: {filepath}")
|
|
165
|
+
return backup_info
|
|
166
|
+
|
|
167
|
+
except Exception as e:
|
|
168
|
+
self.logger.error(f"Failed to create JSON backup: {e}")
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
def create_database_backup(self, connection_manager,
|
|
172
|
+
backup_type: BackupType = BackupType.MANUAL,
|
|
173
|
+
description: Optional[str] = None) -> Optional[BackupInfo]:
|
|
174
|
+
"""
|
|
175
|
+
Create a database backup.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
connection_manager: Database connection manager
|
|
179
|
+
backup_type: Type of backup being created
|
|
180
|
+
description: Optional description for the backup
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
BackupInfo if successful, None otherwise
|
|
184
|
+
"""
|
|
185
|
+
try:
|
|
186
|
+
timestamp = datetime.now()
|
|
187
|
+
filename = self._generate_backup_filename("db", backup_type, timestamp)
|
|
188
|
+
filepath = self.backup_dir / filename
|
|
189
|
+
|
|
190
|
+
# Create database backup
|
|
191
|
+
success = connection_manager.backup_to_disk(str(filepath))
|
|
192
|
+
if not success:
|
|
193
|
+
self.logger.error("Database backup failed")
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
# Compress if enabled
|
|
197
|
+
if self.enable_compression:
|
|
198
|
+
compressed_path = f"{filepath}.gz"
|
|
199
|
+
with open(filepath, 'rb') as f_in:
|
|
200
|
+
with gzip.open(compressed_path, 'wb') as f_out:
|
|
201
|
+
shutil.copyfileobj(f_in, f_out)
|
|
202
|
+
|
|
203
|
+
# Remove uncompressed file
|
|
204
|
+
os.remove(filepath)
|
|
205
|
+
filepath = compressed_path
|
|
206
|
+
format_type = BackupFormat.COMPRESSED
|
|
207
|
+
else:
|
|
208
|
+
format_type = BackupFormat.SQLITE
|
|
209
|
+
|
|
210
|
+
# Get file size
|
|
211
|
+
size_bytes = os.path.getsize(filepath)
|
|
212
|
+
|
|
213
|
+
# Calculate checksum
|
|
214
|
+
checksum = self._calculate_checksum(filepath)
|
|
215
|
+
|
|
216
|
+
# Get database info
|
|
217
|
+
db_info = self._get_database_info(connection_manager)
|
|
218
|
+
|
|
219
|
+
# Create backup info
|
|
220
|
+
backup_info = BackupInfo(
|
|
221
|
+
timestamp=timestamp,
|
|
222
|
+
backup_type=backup_type,
|
|
223
|
+
format=format_type,
|
|
224
|
+
filepath=str(filepath),
|
|
225
|
+
size_bytes=size_bytes,
|
|
226
|
+
checksum=checksum,
|
|
227
|
+
description=description,
|
|
228
|
+
source_info=db_info
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Record backup
|
|
232
|
+
self._record_backup(backup_info)
|
|
233
|
+
|
|
234
|
+
self.logger.info(f"Database backup created: {filepath}")
|
|
235
|
+
return backup_info
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
self.logger.error(f"Failed to create database backup: {e}")
|
|
239
|
+
return None
|
|
240
|
+
|
|
241
|
+
def restore_from_json_backup(self, backup_info: BackupInfo) -> Optional[Dict[str, Any]]:
|
|
242
|
+
"""
|
|
243
|
+
Restore settings from a JSON backup.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
backup_info: Information about the backup to restore
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Restored settings data if successful, None otherwise
|
|
250
|
+
"""
|
|
251
|
+
try:
|
|
252
|
+
filepath = backup_info.filepath
|
|
253
|
+
|
|
254
|
+
if not os.path.exists(filepath):
|
|
255
|
+
self.logger.error(f"Backup file not found: {filepath}")
|
|
256
|
+
return None
|
|
257
|
+
|
|
258
|
+
# Verify checksum if available
|
|
259
|
+
if backup_info.checksum:
|
|
260
|
+
current_checksum = self._calculate_checksum(filepath)
|
|
261
|
+
if current_checksum != backup_info.checksum:
|
|
262
|
+
self.logger.warning(f"Backup checksum mismatch: {filepath}")
|
|
263
|
+
|
|
264
|
+
# Load backup data
|
|
265
|
+
if backup_info.format == BackupFormat.COMPRESSED:
|
|
266
|
+
with gzip.open(filepath, 'rt', encoding='utf-8') as f:
|
|
267
|
+
settings_data = json.load(f)
|
|
268
|
+
else:
|
|
269
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
270
|
+
settings_data = json.load(f)
|
|
271
|
+
|
|
272
|
+
self.logger.info(f"Settings restored from JSON backup: {filepath}")
|
|
273
|
+
return settings_data
|
|
274
|
+
|
|
275
|
+
except Exception as e:
|
|
276
|
+
self.logger.error(f"Failed to restore from JSON backup: {e}")
|
|
277
|
+
return None
|
|
278
|
+
|
|
279
|
+
def restore_from_database_backup(self, backup_info: BackupInfo,
|
|
280
|
+
connection_manager) -> bool:
|
|
281
|
+
"""
|
|
282
|
+
Restore database from a backup.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
backup_info: Information about the backup to restore
|
|
286
|
+
connection_manager: Database connection manager
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
True if restore successful, False otherwise
|
|
290
|
+
"""
|
|
291
|
+
try:
|
|
292
|
+
filepath = backup_info.filepath
|
|
293
|
+
|
|
294
|
+
if not os.path.exists(filepath):
|
|
295
|
+
self.logger.error(f"Backup file not found: {filepath}")
|
|
296
|
+
return False
|
|
297
|
+
|
|
298
|
+
# Verify checksum if available
|
|
299
|
+
if backup_info.checksum:
|
|
300
|
+
current_checksum = self._calculate_checksum(filepath)
|
|
301
|
+
if current_checksum != backup_info.checksum:
|
|
302
|
+
self.logger.warning(f"Backup checksum mismatch: {filepath}")
|
|
303
|
+
|
|
304
|
+
# Prepare restore file
|
|
305
|
+
restore_path = filepath
|
|
306
|
+
if backup_info.format == BackupFormat.COMPRESSED:
|
|
307
|
+
# Decompress to temporary file
|
|
308
|
+
temp_path = self.backup_dir / f"temp_restore_{int(time.time())}.db"
|
|
309
|
+
with gzip.open(filepath, 'rb') as f_in:
|
|
310
|
+
with open(temp_path, 'wb') as f_out:
|
|
311
|
+
shutil.copyfileobj(f_in, f_out)
|
|
312
|
+
restore_path = str(temp_path)
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
# Restore database
|
|
316
|
+
success = connection_manager.restore_from_disk(restore_path)
|
|
317
|
+
|
|
318
|
+
if success:
|
|
319
|
+
self.logger.info(f"Database restored from backup: {filepath}")
|
|
320
|
+
else:
|
|
321
|
+
self.logger.error(f"Database restore failed: {filepath}")
|
|
322
|
+
|
|
323
|
+
return success
|
|
324
|
+
|
|
325
|
+
finally:
|
|
326
|
+
# Clean up temporary file
|
|
327
|
+
if restore_path != filepath and os.path.exists(restore_path):
|
|
328
|
+
os.remove(restore_path)
|
|
329
|
+
|
|
330
|
+
except Exception as e:
|
|
331
|
+
self.logger.error(f"Failed to restore from database backup: {e}")
|
|
332
|
+
return False
|
|
333
|
+
|
|
334
|
+
def create_migration_backup(self, json_filepath: str) -> Optional[BackupInfo]:
|
|
335
|
+
"""
|
|
336
|
+
Create a backup before migration.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
json_filepath: Path to JSON settings file to backup
|
|
340
|
+
|
|
341
|
+
Returns:
|
|
342
|
+
BackupInfo if successful, None otherwise
|
|
343
|
+
"""
|
|
344
|
+
try:
|
|
345
|
+
if not os.path.exists(json_filepath):
|
|
346
|
+
self.logger.warning(f"JSON file not found for migration backup: {json_filepath}")
|
|
347
|
+
return None
|
|
348
|
+
|
|
349
|
+
# Load JSON data
|
|
350
|
+
with open(json_filepath, 'r', encoding='utf-8') as f:
|
|
351
|
+
settings_data = json.load(f)
|
|
352
|
+
|
|
353
|
+
# Create backup
|
|
354
|
+
return self.create_json_backup(
|
|
355
|
+
settings_data,
|
|
356
|
+
BackupType.MIGRATION,
|
|
357
|
+
f"Pre-migration backup of {json_filepath}"
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
except Exception as e:
|
|
361
|
+
self.logger.error(f"Failed to create migration backup: {e}")
|
|
362
|
+
return None
|
|
363
|
+
|
|
364
|
+
def repair_database(self, connection_manager, data_validator) -> bool:
|
|
365
|
+
"""
|
|
366
|
+
Attempt to repair database corruption.
|
|
367
|
+
|
|
368
|
+
Args:
|
|
369
|
+
connection_manager: Database connection manager
|
|
370
|
+
data_validator: Data validator for integrity checks
|
|
371
|
+
|
|
372
|
+
Returns:
|
|
373
|
+
True if repair successful, False otherwise
|
|
374
|
+
"""
|
|
375
|
+
try:
|
|
376
|
+
self.logger.info("Starting database repair procedure")
|
|
377
|
+
|
|
378
|
+
# Create emergency backup first
|
|
379
|
+
emergency_backup = self.create_database_backup(
|
|
380
|
+
connection_manager,
|
|
381
|
+
BackupType.EMERGENCY,
|
|
382
|
+
"Emergency backup before repair"
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
if not emergency_backup:
|
|
386
|
+
self.logger.warning("Could not create emergency backup before repair")
|
|
387
|
+
|
|
388
|
+
# Validate database and get issues
|
|
389
|
+
validation_issues = data_validator.validate_database(fix_issues=False)
|
|
390
|
+
|
|
391
|
+
if not validation_issues:
|
|
392
|
+
self.logger.info("No database issues found - repair not needed")
|
|
393
|
+
return True
|
|
394
|
+
|
|
395
|
+
# Attempt to repair issues
|
|
396
|
+
repair_success = data_validator.repair_data_corruption(validation_issues)
|
|
397
|
+
|
|
398
|
+
if repair_success:
|
|
399
|
+
# Re-validate after repair
|
|
400
|
+
post_repair_issues = data_validator.validate_database(fix_issues=False)
|
|
401
|
+
remaining_critical = [i for i in post_repair_issues
|
|
402
|
+
if i.severity.value == "critical"]
|
|
403
|
+
|
|
404
|
+
if not remaining_critical:
|
|
405
|
+
self.logger.info("Database repair completed successfully")
|
|
406
|
+
return True
|
|
407
|
+
else:
|
|
408
|
+
self.logger.warning(f"Database repair partially successful - {len(remaining_critical)} critical issues remain")
|
|
409
|
+
return False
|
|
410
|
+
else:
|
|
411
|
+
self.logger.error("Database repair failed")
|
|
412
|
+
return False
|
|
413
|
+
|
|
414
|
+
except Exception as e:
|
|
415
|
+
self.logger.error(f"Database repair procedure failed: {e}")
|
|
416
|
+
return False
|
|
417
|
+
|
|
418
|
+
def export_settings(self, settings_data: Dict[str, Any],
|
|
419
|
+
export_path: str,
|
|
420
|
+
format_type: str = "json") -> bool:
|
|
421
|
+
"""
|
|
422
|
+
Export settings to a file.
|
|
423
|
+
|
|
424
|
+
Args:
|
|
425
|
+
settings_data: Settings data to export
|
|
426
|
+
export_path: Path to export file
|
|
427
|
+
format_type: Export format ("json" or "compressed")
|
|
428
|
+
|
|
429
|
+
Returns:
|
|
430
|
+
True if export successful, False otherwise
|
|
431
|
+
"""
|
|
432
|
+
try:
|
|
433
|
+
export_file = Path(export_path)
|
|
434
|
+
|
|
435
|
+
# Validate settings data
|
|
436
|
+
if not settings_data:
|
|
437
|
+
self.logger.error("Export failed: No settings data provided")
|
|
438
|
+
return False
|
|
439
|
+
|
|
440
|
+
if not isinstance(settings_data, dict):
|
|
441
|
+
self.logger.error(f"Export failed: Settings data must be a dictionary, got {type(settings_data)}")
|
|
442
|
+
return False
|
|
443
|
+
|
|
444
|
+
# Create parent directory if needed
|
|
445
|
+
export_file.parent.mkdir(parents=True, exist_ok=True)
|
|
446
|
+
self.logger.debug(f"Export directory created/verified: {export_file.parent}")
|
|
447
|
+
|
|
448
|
+
# Count items being exported for logging
|
|
449
|
+
tool_count = len(settings_data.get("tool_settings", {}))
|
|
450
|
+
total_keys = len(settings_data.keys())
|
|
451
|
+
|
|
452
|
+
if format_type == "compressed":
|
|
453
|
+
with gzip.open(export_path, 'wt', encoding='utf-8') as f:
|
|
454
|
+
json.dump(settings_data, f, indent=2, ensure_ascii=False)
|
|
455
|
+
self.logger.info(f"Settings exported (compressed) to: {export_path} - {total_keys} keys, {tool_count} tools")
|
|
456
|
+
else:
|
|
457
|
+
with open(export_path, 'w', encoding='utf-8') as f:
|
|
458
|
+
json.dump(settings_data, f, indent=2, ensure_ascii=False)
|
|
459
|
+
self.logger.info(f"Settings exported to: {export_path} - {total_keys} keys, {tool_count} tools")
|
|
460
|
+
|
|
461
|
+
# Verify file was created and has content
|
|
462
|
+
if export_file.exists():
|
|
463
|
+
file_size = export_file.stat().st_size
|
|
464
|
+
if file_size > 0:
|
|
465
|
+
self.logger.debug(f"Export verification passed - file size: {file_size} bytes")
|
|
466
|
+
return True
|
|
467
|
+
else:
|
|
468
|
+
self.logger.error("Export failed: File created but is empty")
|
|
469
|
+
return False
|
|
470
|
+
else:
|
|
471
|
+
self.logger.error("Export failed: File was not created")
|
|
472
|
+
return False
|
|
473
|
+
|
|
474
|
+
except PermissionError as e:
|
|
475
|
+
self.logger.error(f"Export failed: Permission denied - {e}")
|
|
476
|
+
return False
|
|
477
|
+
except json.JSONEncodeError as e:
|
|
478
|
+
self.logger.error(f"Export failed: JSON encoding error - {e}")
|
|
479
|
+
return False
|
|
480
|
+
except Exception as e:
|
|
481
|
+
self.logger.error(f"Export failed with unexpected error: {e}", exc_info=True)
|
|
482
|
+
return False
|
|
483
|
+
|
|
484
|
+
def import_settings(self, import_path: str) -> Optional[Dict[str, Any]]:
|
|
485
|
+
"""
|
|
486
|
+
Import settings from a file.
|
|
487
|
+
|
|
488
|
+
Args:
|
|
489
|
+
import_path: Path to import file
|
|
490
|
+
|
|
491
|
+
Returns:
|
|
492
|
+
Imported settings data if successful, None otherwise
|
|
493
|
+
"""
|
|
494
|
+
try:
|
|
495
|
+
import_file = Path(import_path)
|
|
496
|
+
|
|
497
|
+
# Validate file exists
|
|
498
|
+
if not import_file.exists():
|
|
499
|
+
self.logger.error(f"Import failed: File not found - {import_path}")
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
# Check file size
|
|
503
|
+
file_size = import_file.stat().st_size
|
|
504
|
+
if file_size == 0:
|
|
505
|
+
self.logger.error(f"Import failed: File is empty - {import_path}")
|
|
506
|
+
return None
|
|
507
|
+
|
|
508
|
+
self.logger.debug(f"Import file validation passed - size: {file_size} bytes")
|
|
509
|
+
|
|
510
|
+
# Detect if file is compressed
|
|
511
|
+
is_compressed = import_path.endswith('.gz')
|
|
512
|
+
|
|
513
|
+
if is_compressed:
|
|
514
|
+
self.logger.debug("Importing compressed file")
|
|
515
|
+
with gzip.open(import_path, 'rt', encoding='utf-8') as f:
|
|
516
|
+
settings_data = json.load(f)
|
|
517
|
+
else:
|
|
518
|
+
self.logger.debug("Importing uncompressed file")
|
|
519
|
+
with open(import_path, 'r', encoding='utf-8') as f:
|
|
520
|
+
settings_data = json.load(f)
|
|
521
|
+
|
|
522
|
+
# Validate imported data
|
|
523
|
+
if not isinstance(settings_data, dict):
|
|
524
|
+
self.logger.error(f"Import failed: Invalid data format - expected dict, got {type(settings_data)}")
|
|
525
|
+
return None
|
|
526
|
+
|
|
527
|
+
# Count imported items for logging
|
|
528
|
+
tool_count = len(settings_data.get("tool_settings", {}))
|
|
529
|
+
total_keys = len(settings_data.keys())
|
|
530
|
+
|
|
531
|
+
self.logger.info(f"Settings imported from: {import_path} - {total_keys} keys, {tool_count} tools")
|
|
532
|
+
return settings_data
|
|
533
|
+
|
|
534
|
+
except PermissionError as e:
|
|
535
|
+
self.logger.error(f"Import failed: Permission denied - {e}")
|
|
536
|
+
return None
|
|
537
|
+
except json.JSONDecodeError as e:
|
|
538
|
+
self.logger.error(f"Import failed: Invalid JSON format - {e}")
|
|
539
|
+
return None
|
|
540
|
+
except UnicodeDecodeError as e:
|
|
541
|
+
self.logger.error(f"Import failed: File encoding error - {e}")
|
|
542
|
+
return None
|
|
543
|
+
except Exception as e:
|
|
544
|
+
self.logger.error(f"Import failed with unexpected error: {e}", exc_info=True)
|
|
545
|
+
return None
|
|
546
|
+
|
|
547
|
+
def validate_backup_integrity(self, backup_info: BackupInfo) -> bool:
|
|
548
|
+
"""
|
|
549
|
+
Validate the integrity of a backup file.
|
|
550
|
+
|
|
551
|
+
Args:
|
|
552
|
+
backup_info: Information about the backup to validate
|
|
553
|
+
|
|
554
|
+
Returns:
|
|
555
|
+
True if backup is valid, False otherwise
|
|
556
|
+
"""
|
|
557
|
+
try:
|
|
558
|
+
filepath = backup_info.filepath
|
|
559
|
+
|
|
560
|
+
# Check file exists
|
|
561
|
+
if not os.path.exists(filepath):
|
|
562
|
+
self.logger.error(f"Backup file not found: {filepath}")
|
|
563
|
+
return False
|
|
564
|
+
|
|
565
|
+
# Check file size
|
|
566
|
+
current_size = os.path.getsize(filepath)
|
|
567
|
+
if current_size != backup_info.size_bytes:
|
|
568
|
+
self.logger.error(f"Backup file size mismatch: expected {backup_info.size_bytes}, got {current_size}")
|
|
569
|
+
return False
|
|
570
|
+
|
|
571
|
+
# Check checksum if available
|
|
572
|
+
if backup_info.checksum:
|
|
573
|
+
current_checksum = self._calculate_checksum(filepath)
|
|
574
|
+
if current_checksum != backup_info.checksum:
|
|
575
|
+
self.logger.error(f"Backup checksum mismatch: {filepath}")
|
|
576
|
+
return False
|
|
577
|
+
|
|
578
|
+
# Try to read the backup
|
|
579
|
+
if backup_info.format in [BackupFormat.JSON, BackupFormat.COMPRESSED]:
|
|
580
|
+
try:
|
|
581
|
+
if backup_info.format == BackupFormat.COMPRESSED:
|
|
582
|
+
with gzip.open(filepath, 'rt', encoding='utf-8') as f:
|
|
583
|
+
json.load(f)
|
|
584
|
+
else:
|
|
585
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
586
|
+
json.load(f)
|
|
587
|
+
except json.JSONDecodeError:
|
|
588
|
+
self.logger.error(f"Backup contains invalid JSON: {filepath}")
|
|
589
|
+
return False
|
|
590
|
+
|
|
591
|
+
elif backup_info.format == BackupFormat.SQLITE:
|
|
592
|
+
# Validate SQLite database
|
|
593
|
+
try:
|
|
594
|
+
if backup_info.format == BackupFormat.COMPRESSED:
|
|
595
|
+
# Decompress to temporary file for validation
|
|
596
|
+
temp_path = self.backup_dir / f"temp_validate_{int(time.time())}.db"
|
|
597
|
+
with gzip.open(filepath, 'rb') as f_in:
|
|
598
|
+
with open(temp_path, 'wb') as f_out:
|
|
599
|
+
shutil.copyfileobj(f_in, f_out)
|
|
600
|
+
validate_path = str(temp_path)
|
|
601
|
+
else:
|
|
602
|
+
validate_path = filepath
|
|
603
|
+
|
|
604
|
+
try:
|
|
605
|
+
conn = sqlite3.connect(validate_path)
|
|
606
|
+
cursor = conn.execute("PRAGMA integrity_check")
|
|
607
|
+
result = cursor.fetchone()[0]
|
|
608
|
+
conn.close()
|
|
609
|
+
|
|
610
|
+
if result != "ok":
|
|
611
|
+
self.logger.error(f"Backup database integrity check failed: {result}")
|
|
612
|
+
return False
|
|
613
|
+
finally:
|
|
614
|
+
if validate_path != filepath and os.path.exists(validate_path):
|
|
615
|
+
os.remove(validate_path)
|
|
616
|
+
|
|
617
|
+
except sqlite3.Error as e:
|
|
618
|
+
self.logger.error(f"Backup database validation failed: {e}")
|
|
619
|
+
return False
|
|
620
|
+
|
|
621
|
+
self.logger.info(f"Backup validation successful: {filepath}")
|
|
622
|
+
return True
|
|
623
|
+
|
|
624
|
+
except Exception as e:
|
|
625
|
+
self.logger.error(f"Backup validation failed: {e}")
|
|
626
|
+
return False
|
|
627
|
+
|
|
628
|
+
def cleanup_old_backups(self) -> int:
|
|
629
|
+
"""
|
|
630
|
+
Clean up old backups based on retention policy.
|
|
631
|
+
|
|
632
|
+
Returns:
|
|
633
|
+
Number of backups cleaned up
|
|
634
|
+
"""
|
|
635
|
+
try:
|
|
636
|
+
with self._backup_lock:
|
|
637
|
+
if len(self._backup_history) <= self.max_backups:
|
|
638
|
+
return 0
|
|
639
|
+
|
|
640
|
+
# Sort by timestamp, keep most recent
|
|
641
|
+
sorted_backups = sorted(self._backup_history, key=lambda b: b.timestamp, reverse=True)
|
|
642
|
+
backups_to_remove = sorted_backups[self.max_backups:]
|
|
643
|
+
|
|
644
|
+
removed_count = 0
|
|
645
|
+
for backup in backups_to_remove:
|
|
646
|
+
try:
|
|
647
|
+
if os.path.exists(backup.filepath):
|
|
648
|
+
os.remove(backup.filepath)
|
|
649
|
+
self.logger.debug(f"Removed old backup: {backup.filepath}")
|
|
650
|
+
|
|
651
|
+
self._backup_history.remove(backup)
|
|
652
|
+
removed_count += 1
|
|
653
|
+
|
|
654
|
+
except Exception as e:
|
|
655
|
+
self.logger.warning(f"Failed to remove backup {backup.filepath}: {e}")
|
|
656
|
+
|
|
657
|
+
# Save updated history
|
|
658
|
+
self._save_backup_history()
|
|
659
|
+
|
|
660
|
+
if removed_count > 0:
|
|
661
|
+
self.logger.info(f"Cleaned up {removed_count} old backups")
|
|
662
|
+
|
|
663
|
+
return removed_count
|
|
664
|
+
|
|
665
|
+
except Exception as e:
|
|
666
|
+
self.logger.error(f"Backup cleanup failed: {e}")
|
|
667
|
+
return 0
|
|
668
|
+
|
|
669
|
+
def start_auto_backup(self, connection_manager, settings_manager) -> None:
|
|
670
|
+
"""
|
|
671
|
+
Start automatic backup thread.
|
|
672
|
+
|
|
673
|
+
Args:
|
|
674
|
+
connection_manager: Database connection manager
|
|
675
|
+
settings_manager: Settings manager for data access
|
|
676
|
+
"""
|
|
677
|
+
if self._auto_backup_enabled:
|
|
678
|
+
return
|
|
679
|
+
|
|
680
|
+
self._auto_backup_enabled = True
|
|
681
|
+
self._auto_backup_stop_event.clear()
|
|
682
|
+
|
|
683
|
+
self._auto_backup_thread = threading.Thread(
|
|
684
|
+
target=self._auto_backup_worker,
|
|
685
|
+
args=(connection_manager, settings_manager),
|
|
686
|
+
daemon=True,
|
|
687
|
+
name="AutoBackupWorker"
|
|
688
|
+
)
|
|
689
|
+
self._auto_backup_thread.start()
|
|
690
|
+
|
|
691
|
+
self.logger.info("Automatic backup started")
|
|
692
|
+
|
|
693
|
+
def stop_auto_backup(self) -> None:
|
|
694
|
+
"""Stop automatic backup thread."""
|
|
695
|
+
if not self._auto_backup_enabled:
|
|
696
|
+
return
|
|
697
|
+
|
|
698
|
+
self._auto_backup_enabled = False
|
|
699
|
+
self._auto_backup_stop_event.set()
|
|
700
|
+
|
|
701
|
+
if self._auto_backup_thread and self._auto_backup_thread.is_alive():
|
|
702
|
+
self._auto_backup_thread.join(timeout=5)
|
|
703
|
+
|
|
704
|
+
self.logger.info("Automatic backup stopped")
|
|
705
|
+
|
|
706
|
+
def get_backup_history(self) -> List[BackupInfo]:
|
|
707
|
+
"""Get list of all backups."""
|
|
708
|
+
return self._backup_history.copy()
|
|
709
|
+
|
|
710
|
+
def get_backup_statistics(self) -> Dict[str, Any]:
|
|
711
|
+
"""
|
|
712
|
+
Get backup statistics.
|
|
713
|
+
|
|
714
|
+
Returns:
|
|
715
|
+
Dictionary with backup statistics
|
|
716
|
+
"""
|
|
717
|
+
with self._backup_lock:
|
|
718
|
+
total_backups = len(self._backup_history)
|
|
719
|
+
total_size = sum(b.size_bytes for b in self._backup_history)
|
|
720
|
+
|
|
721
|
+
# Count by type
|
|
722
|
+
type_counts = {}
|
|
723
|
+
for backup_type in BackupType:
|
|
724
|
+
count = len([b for b in self._backup_history if b.backup_type == backup_type])
|
|
725
|
+
type_counts[backup_type.value] = count
|
|
726
|
+
|
|
727
|
+
# Count by format
|
|
728
|
+
format_counts = {}
|
|
729
|
+
for backup_format in BackupFormat:
|
|
730
|
+
count = len([b for b in self._backup_history if b.format == backup_format])
|
|
731
|
+
format_counts[backup_format.value] = count
|
|
732
|
+
|
|
733
|
+
# Recent backups
|
|
734
|
+
recent_backups = [
|
|
735
|
+
b for b in self._backup_history
|
|
736
|
+
if b.timestamp > datetime.now() - timedelta(days=7)
|
|
737
|
+
]
|
|
738
|
+
|
|
739
|
+
return {
|
|
740
|
+
'total_backups': total_backups,
|
|
741
|
+
'total_size_bytes': total_size,
|
|
742
|
+
'total_size_mb': round(total_size / (1024 * 1024), 2),
|
|
743
|
+
'backups_by_type': type_counts,
|
|
744
|
+
'backups_by_format': format_counts,
|
|
745
|
+
'recent_backups_7d': len(recent_backups),
|
|
746
|
+
'last_backup': self._backup_history[-1].timestamp.isoformat() if self._backup_history else None,
|
|
747
|
+
'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None,
|
|
748
|
+
'auto_backup_enabled': self._auto_backup_enabled,
|
|
749
|
+
'backup_directory': str(self.backup_dir),
|
|
750
|
+
'max_backups': self.max_backups
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
# Retention Settings Management
|
|
754
|
+
|
|
755
|
+
def get_retention_settings(self) -> Dict[str, Any]:
|
|
756
|
+
"""
|
|
757
|
+
Get current retention policy settings.
|
|
758
|
+
|
|
759
|
+
Returns:
|
|
760
|
+
Dictionary with retention settings
|
|
761
|
+
"""
|
|
762
|
+
return {
|
|
763
|
+
'max_backups': self.max_backups,
|
|
764
|
+
'auto_backup_interval': self.auto_backup_interval,
|
|
765
|
+
'enable_compression': self.enable_compression,
|
|
766
|
+
'backup_directory': str(self.backup_dir),
|
|
767
|
+
'auto_backup_enabled': self._auto_backup_enabled
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
def update_retention_settings(self, max_backups: Optional[int] = None,
|
|
771
|
+
auto_backup_interval: Optional[int] = None,
|
|
772
|
+
enable_compression: Optional[bool] = None) -> bool:
|
|
773
|
+
"""
|
|
774
|
+
Update retention policy settings.
|
|
775
|
+
|
|
776
|
+
Args:
|
|
777
|
+
max_backups: Maximum number of backups to keep
|
|
778
|
+
auto_backup_interval: Automatic backup interval in seconds
|
|
779
|
+
enable_compression: Whether to enable backup compression
|
|
780
|
+
|
|
781
|
+
Returns:
|
|
782
|
+
True if settings updated successfully
|
|
783
|
+
"""
|
|
784
|
+
try:
|
|
785
|
+
settings_changed = False
|
|
786
|
+
|
|
787
|
+
# Update max backups
|
|
788
|
+
if max_backups is not None and max_backups >= 5:
|
|
789
|
+
old_max = self.max_backups
|
|
790
|
+
self.max_backups = max_backups
|
|
791
|
+
settings_changed = True
|
|
792
|
+
|
|
793
|
+
# If we reduced the limit, cleanup old backups immediately
|
|
794
|
+
if max_backups < old_max:
|
|
795
|
+
self.cleanup_old_backups()
|
|
796
|
+
|
|
797
|
+
self.logger.info(f"Updated max_backups: {old_max} -> {max_backups}")
|
|
798
|
+
|
|
799
|
+
# Update auto backup interval
|
|
800
|
+
if auto_backup_interval is not None and auto_backup_interval >= 300: # Minimum 5 minutes
|
|
801
|
+
old_interval = self.auto_backup_interval
|
|
802
|
+
self.auto_backup_interval = auto_backup_interval
|
|
803
|
+
settings_changed = True
|
|
804
|
+
|
|
805
|
+
self.logger.info(f"Updated auto_backup_interval: {old_interval}s -> {auto_backup_interval}s")
|
|
806
|
+
|
|
807
|
+
# Update compression setting
|
|
808
|
+
if enable_compression is not None:
|
|
809
|
+
old_compression = self.enable_compression
|
|
810
|
+
self.enable_compression = enable_compression
|
|
811
|
+
settings_changed = True
|
|
812
|
+
|
|
813
|
+
self.logger.info(f"Updated enable_compression: {old_compression} -> {enable_compression}")
|
|
814
|
+
|
|
815
|
+
# Save settings to persistent storage
|
|
816
|
+
if settings_changed:
|
|
817
|
+
self._save_retention_settings()
|
|
818
|
+
|
|
819
|
+
return settings_changed
|
|
820
|
+
|
|
821
|
+
except Exception as e:
|
|
822
|
+
self.logger.error(f"Failed to update retention settings: {e}")
|
|
823
|
+
return False
|
|
824
|
+
|
|
825
|
+
def reset_retention_settings_to_defaults(self) -> bool:
|
|
826
|
+
"""
|
|
827
|
+
Reset retention settings to default values.
|
|
828
|
+
|
|
829
|
+
Returns:
|
|
830
|
+
True if reset successful
|
|
831
|
+
"""
|
|
832
|
+
try:
|
|
833
|
+
return self.update_retention_settings(
|
|
834
|
+
max_backups=50,
|
|
835
|
+
auto_backup_interval=3600, # 1 hour
|
|
836
|
+
enable_compression=True
|
|
837
|
+
)
|
|
838
|
+
except Exception as e:
|
|
839
|
+
self.logger.error(f"Failed to reset retention settings: {e}")
|
|
840
|
+
return False
|
|
841
|
+
|
|
842
|
+
# Private methods
|
|
843
|
+
|
|
844
|
+
def _generate_backup_filename(self, extension: str, backup_type: BackupType,
|
|
845
|
+
timestamp: datetime) -> str:
|
|
846
|
+
"""Generate backup filename."""
|
|
847
|
+
timestamp_str = timestamp.strftime("%Y%m%d_%H%M%S")
|
|
848
|
+
return f"settings_backup_{backup_type.value}_{timestamp_str}.{extension}"
|
|
849
|
+
|
|
850
|
+
def _calculate_checksum(self, filepath: str) -> str:
|
|
851
|
+
"""Calculate MD5 checksum of a file."""
|
|
852
|
+
import hashlib
|
|
853
|
+
|
|
854
|
+
hash_md5 = hashlib.md5()
|
|
855
|
+
with open(filepath, "rb") as f:
|
|
856
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
|
857
|
+
hash_md5.update(chunk)
|
|
858
|
+
return hash_md5.hexdigest()
|
|
859
|
+
|
|
860
|
+
def _get_database_info(self, connection_manager) -> Dict[str, Any]:
|
|
861
|
+
"""Get database information for backup metadata."""
|
|
862
|
+
try:
|
|
863
|
+
conn = connection_manager.get_connection()
|
|
864
|
+
|
|
865
|
+
# Get table counts
|
|
866
|
+
table_counts = {}
|
|
867
|
+
tables = ['core_settings', 'tool_settings', 'tab_content',
|
|
868
|
+
'performance_settings', 'font_settings', 'dialog_settings']
|
|
869
|
+
|
|
870
|
+
for table in tables:
|
|
871
|
+
try:
|
|
872
|
+
cursor = conn.execute(f"SELECT COUNT(*) FROM {table}")
|
|
873
|
+
count = cursor.fetchone()[0]
|
|
874
|
+
table_counts[table] = count
|
|
875
|
+
except sqlite3.Error:
|
|
876
|
+
table_counts[table] = 0
|
|
877
|
+
|
|
878
|
+
return {
|
|
879
|
+
'data_type': 'sqlite_database',
|
|
880
|
+
'table_counts': table_counts,
|
|
881
|
+
'total_records': sum(table_counts.values())
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
except Exception as e:
|
|
885
|
+
self.logger.warning(f"Failed to get database info: {e}")
|
|
886
|
+
return {'data_type': 'sqlite_database', 'error': str(e)}
|
|
887
|
+
|
|
888
|
+
def _record_backup(self, backup_info: BackupInfo) -> None:
|
|
889
|
+
"""Record backup in history."""
|
|
890
|
+
with self._backup_lock:
|
|
891
|
+
self._backup_history.append(backup_info)
|
|
892
|
+
|
|
893
|
+
# Update last auto backup time if applicable
|
|
894
|
+
if backup_info.backup_type == BackupType.AUTOMATIC:
|
|
895
|
+
self._last_auto_backup = backup_info.timestamp
|
|
896
|
+
|
|
897
|
+
# Save history
|
|
898
|
+
self._save_backup_history()
|
|
899
|
+
|
|
900
|
+
# Clean up old backups if needed
|
|
901
|
+
if len(self._backup_history) > self.max_backups:
|
|
902
|
+
self.cleanup_old_backups()
|
|
903
|
+
|
|
904
|
+
def _load_backup_history(self) -> None:
|
|
905
|
+
"""Load backup history from file."""
|
|
906
|
+
history_file = self.backup_dir / "backup_history.json"
|
|
907
|
+
|
|
908
|
+
try:
|
|
909
|
+
if history_file.exists():
|
|
910
|
+
with open(history_file, 'r', encoding='utf-8') as f:
|
|
911
|
+
history_data = json.load(f)
|
|
912
|
+
|
|
913
|
+
self._backup_history = []
|
|
914
|
+
for item in history_data.get('backups', []):
|
|
915
|
+
backup_info = BackupInfo(
|
|
916
|
+
timestamp=datetime.fromisoformat(item['timestamp']),
|
|
917
|
+
backup_type=BackupType(item['backup_type']),
|
|
918
|
+
format=BackupFormat(item['format']),
|
|
919
|
+
filepath=item['filepath'],
|
|
920
|
+
size_bytes=item['size_bytes'],
|
|
921
|
+
checksum=item.get('checksum'),
|
|
922
|
+
description=item.get('description'),
|
|
923
|
+
source_info=item.get('source_info')
|
|
924
|
+
)
|
|
925
|
+
self._backup_history.append(backup_info)
|
|
926
|
+
|
|
927
|
+
# Load last auto backup time
|
|
928
|
+
if 'last_auto_backup' in history_data:
|
|
929
|
+
self._last_auto_backup = datetime.fromisoformat(history_data['last_auto_backup'])
|
|
930
|
+
|
|
931
|
+
self.logger.debug(f"Loaded {len(self._backup_history)} backup records")
|
|
932
|
+
|
|
933
|
+
except Exception as e:
|
|
934
|
+
self.logger.warning(f"Failed to load backup history: {e}")
|
|
935
|
+
self._backup_history = []
|
|
936
|
+
|
|
937
|
+
def _save_backup_history(self) -> None:
|
|
938
|
+
"""Save backup history to file."""
|
|
939
|
+
history_file = self.backup_dir / "backup_history.json"
|
|
940
|
+
|
|
941
|
+
try:
|
|
942
|
+
history_data = {
|
|
943
|
+
'backups': [
|
|
944
|
+
{
|
|
945
|
+
'timestamp': backup.timestamp.isoformat(),
|
|
946
|
+
'backup_type': backup.backup_type.value,
|
|
947
|
+
'format': backup.format.value,
|
|
948
|
+
'filepath': backup.filepath,
|
|
949
|
+
'size_bytes': backup.size_bytes,
|
|
950
|
+
'checksum': backup.checksum,
|
|
951
|
+
'description': backup.description,
|
|
952
|
+
'source_info': backup.source_info
|
|
953
|
+
}
|
|
954
|
+
for backup in self._backup_history
|
|
955
|
+
],
|
|
956
|
+
'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
with open(history_file, 'w', encoding='utf-8') as f:
|
|
960
|
+
json.dump(history_data, f, indent=2, ensure_ascii=False)
|
|
961
|
+
|
|
962
|
+
except Exception as e:
|
|
963
|
+
self.logger.warning(f"Failed to save backup history: {e}")
|
|
964
|
+
|
|
965
|
+
def _save_retention_settings(self) -> None:
|
|
966
|
+
"""Save retention settings to file."""
|
|
967
|
+
settings_file = self.backup_dir / "retention_settings.json"
|
|
968
|
+
|
|
969
|
+
try:
|
|
970
|
+
settings_data = {
|
|
971
|
+
'max_backups': self.max_backups,
|
|
972
|
+
'auto_backup_interval': self.auto_backup_interval,
|
|
973
|
+
'enable_compression': self.enable_compression,
|
|
974
|
+
'last_updated': datetime.now().isoformat()
|
|
975
|
+
}
|
|
976
|
+
|
|
977
|
+
with open(settings_file, 'w', encoding='utf-8') as f:
|
|
978
|
+
json.dump(settings_data, f, indent=2, ensure_ascii=False)
|
|
979
|
+
|
|
980
|
+
self.logger.debug("Retention settings saved")
|
|
981
|
+
|
|
982
|
+
except Exception as e:
|
|
983
|
+
self.logger.warning(f"Failed to save retention settings: {e}")
|
|
984
|
+
|
|
985
|
+
def _load_retention_settings(self) -> None:
|
|
986
|
+
"""Load retention settings from file."""
|
|
987
|
+
settings_file = self.backup_dir / "retention_settings.json"
|
|
988
|
+
|
|
989
|
+
try:
|
|
990
|
+
if settings_file.exists():
|
|
991
|
+
with open(settings_file, 'r', encoding='utf-8') as f:
|
|
992
|
+
settings_data = json.load(f)
|
|
993
|
+
|
|
994
|
+
# Apply loaded settings
|
|
995
|
+
self.max_backups = settings_data.get('max_backups', self.max_backups)
|
|
996
|
+
self.auto_backup_interval = settings_data.get('auto_backup_interval', self.auto_backup_interval)
|
|
997
|
+
self.enable_compression = settings_data.get('enable_compression', self.enable_compression)
|
|
998
|
+
|
|
999
|
+
self.logger.debug("Retention settings loaded from file")
|
|
1000
|
+
|
|
1001
|
+
except Exception as e:
|
|
1002
|
+
self.logger.warning(f"Failed to load retention settings: {e}")
|
|
1003
|
+
|
|
1004
|
+
def _auto_backup_worker(self, connection_manager, settings_manager) -> None:
|
|
1005
|
+
"""Worker thread for automatic backups."""
|
|
1006
|
+
while not self._auto_backup_stop_event.is_set():
|
|
1007
|
+
try:
|
|
1008
|
+
# Check if backup is needed
|
|
1009
|
+
should_backup = False
|
|
1010
|
+
|
|
1011
|
+
if self._last_auto_backup is None:
|
|
1012
|
+
should_backup = True
|
|
1013
|
+
elif datetime.now() - self._last_auto_backup > timedelta(seconds=self.auto_backup_interval):
|
|
1014
|
+
should_backup = True
|
|
1015
|
+
|
|
1016
|
+
if should_backup:
|
|
1017
|
+
# Create automatic backup
|
|
1018
|
+
backup_info = self.create_database_backup(
|
|
1019
|
+
connection_manager,
|
|
1020
|
+
BackupType.AUTOMATIC,
|
|
1021
|
+
"Automatic scheduled backup"
|
|
1022
|
+
)
|
|
1023
|
+
|
|
1024
|
+
if backup_info:
|
|
1025
|
+
self.logger.debug("Automatic backup created successfully")
|
|
1026
|
+
else:
|
|
1027
|
+
self.logger.warning("Automatic backup failed")
|
|
1028
|
+
|
|
1029
|
+
# Wait before next check
|
|
1030
|
+
self._auto_backup_stop_event.wait(min(300, self.auto_backup_interval // 12)) # Check every 5 minutes or 1/12 of interval
|
|
1031
|
+
|
|
1032
|
+
except Exception as e:
|
|
1033
|
+
self.logger.error(f"Auto backup worker error: {e}")
|
|
1034
|
+
self._auto_backup_stop_event.wait(300) # Wait 5 minutes on error
|