pomera-ai-commander 1.1.1 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (213) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1199 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_directory.py +549 -0
  12. package/core/data_validator.py +1066 -1066
  13. package/core/database_connection_manager.py +744 -744
  14. package/core/database_curl_settings_manager.py +608 -608
  15. package/core/database_promera_ai_settings_manager.py +446 -446
  16. package/core/database_schema.py +411 -411
  17. package/core/database_schema_manager.py +395 -395
  18. package/core/database_settings_manager.py +1507 -1507
  19. package/core/database_settings_manager_interface.py +456 -456
  20. package/core/dialog_manager.py +734 -734
  21. package/core/diff_utils.py +239 -0
  22. package/core/efficient_line_numbers.py +540 -510
  23. package/core/error_handler.py +746 -746
  24. package/core/error_service.py +431 -431
  25. package/core/event_consolidator.py +511 -511
  26. package/core/mcp/__init__.py +43 -43
  27. package/core/mcp/find_replace_diff.py +334 -0
  28. package/core/mcp/protocol.py +288 -288
  29. package/core/mcp/schema.py +251 -251
  30. package/core/mcp/server_stdio.py +299 -299
  31. package/core/mcp/tool_registry.py +2699 -2345
  32. package/core/memento.py +275 -0
  33. package/core/memory_efficient_text_widget.py +711 -711
  34. package/core/migration_manager.py +914 -914
  35. package/core/migration_test_suite.py +1085 -1085
  36. package/core/migration_validator.py +1143 -1143
  37. package/core/optimized_find_replace.py +714 -714
  38. package/core/optimized_pattern_engine.py +424 -424
  39. package/core/optimized_search_highlighter.py +552 -552
  40. package/core/performance_monitor.py +674 -674
  41. package/core/persistence_manager.py +712 -712
  42. package/core/progressive_stats_calculator.py +632 -632
  43. package/core/regex_pattern_cache.py +529 -529
  44. package/core/regex_pattern_library.py +350 -350
  45. package/core/search_operation_manager.py +434 -434
  46. package/core/settings_defaults_registry.py +1087 -1087
  47. package/core/settings_integrity_validator.py +1111 -1111
  48. package/core/settings_serializer.py +557 -557
  49. package/core/settings_validator.py +1823 -1823
  50. package/core/smart_stats_calculator.py +709 -709
  51. package/core/statistics_update_manager.py +619 -619
  52. package/core/stats_config_manager.py +858 -858
  53. package/core/streaming_text_handler.py +723 -723
  54. package/core/task_scheduler.py +596 -596
  55. package/core/update_pattern_library.py +168 -168
  56. package/core/visibility_monitor.py +596 -596
  57. package/core/widget_cache.py +498 -498
  58. package/mcp.json +51 -61
  59. package/migrate_data.py +127 -0
  60. package/package.json +64 -57
  61. package/pomera.py +7883 -7482
  62. package/pomera_mcp_server.py +183 -144
  63. package/requirements.txt +33 -0
  64. package/scripts/Dockerfile.alpine +43 -0
  65. package/scripts/Dockerfile.gui-test +54 -0
  66. package/scripts/Dockerfile.linux +43 -0
  67. package/scripts/Dockerfile.test-linux +80 -0
  68. package/scripts/Dockerfile.ubuntu +39 -0
  69. package/scripts/README.md +53 -0
  70. package/scripts/build-all.bat +113 -0
  71. package/scripts/build-docker.bat +53 -0
  72. package/scripts/build-docker.sh +55 -0
  73. package/scripts/build-optimized.bat +101 -0
  74. package/scripts/build.sh +78 -0
  75. package/scripts/docker-compose.test.yml +27 -0
  76. package/scripts/docker-compose.yml +32 -0
  77. package/scripts/postinstall.js +62 -0
  78. package/scripts/requirements-minimal.txt +33 -0
  79. package/scripts/test-linux-simple.bat +28 -0
  80. package/scripts/validate-release-workflow.py +450 -0
  81. package/tools/__init__.py +4 -4
  82. package/tools/ai_tools.py +2891 -2891
  83. package/tools/ascii_art_generator.py +352 -352
  84. package/tools/base64_tools.py +183 -183
  85. package/tools/base_tool.py +511 -511
  86. package/tools/case_tool.py +308 -308
  87. package/tools/column_tools.py +395 -395
  88. package/tools/cron_tool.py +884 -884
  89. package/tools/curl_history.py +600 -600
  90. package/tools/curl_processor.py +1207 -1207
  91. package/tools/curl_settings.py +502 -502
  92. package/tools/curl_tool.py +5467 -5467
  93. package/tools/diff_viewer.py +1817 -1072
  94. package/tools/email_extraction_tool.py +248 -248
  95. package/tools/email_header_analyzer.py +425 -425
  96. package/tools/extraction_tools.py +250 -250
  97. package/tools/find_replace.py +2289 -1750
  98. package/tools/folder_file_reporter.py +1463 -1463
  99. package/tools/folder_file_reporter_adapter.py +480 -480
  100. package/tools/generator_tools.py +1216 -1216
  101. package/tools/hash_generator.py +255 -255
  102. package/tools/html_tool.py +656 -656
  103. package/tools/jsonxml_tool.py +729 -729
  104. package/tools/line_tools.py +419 -419
  105. package/tools/markdown_tools.py +561 -561
  106. package/tools/mcp_widget.py +1417 -1417
  107. package/tools/notes_widget.py +978 -973
  108. package/tools/number_base_converter.py +372 -372
  109. package/tools/regex_extractor.py +571 -571
  110. package/tools/slug_generator.py +310 -310
  111. package/tools/sorter_tools.py +458 -458
  112. package/tools/string_escape_tool.py +392 -392
  113. package/tools/text_statistics_tool.py +365 -365
  114. package/tools/text_wrapper.py +430 -430
  115. package/tools/timestamp_converter.py +421 -421
  116. package/tools/tool_loader.py +710 -710
  117. package/tools/translator_tools.py +522 -522
  118. package/tools/url_link_extractor.py +261 -261
  119. package/tools/url_parser.py +204 -204
  120. package/tools/whitespace_tools.py +355 -355
  121. package/tools/word_frequency_counter.py +146 -146
  122. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  123. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  124. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  125. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  126. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  127. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  128. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  129. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  131. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  132. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  134. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  135. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  136. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  137. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  138. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  139. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  140. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  141. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  142. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  143. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  144. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  145. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  146. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  147. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  148. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  149. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  150. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  151. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  152. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  153. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  154. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  155. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  156. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  157. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  158. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  159. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  160. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  161. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  162. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  163. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  164. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  165. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  166. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  167. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  168. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  169. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  170. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  192. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  193. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  194. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  195. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  196. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  197. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  198. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  199. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  200. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  201. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  202. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  203. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  204. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  205. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  206. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  207. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  208. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  209. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  210. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  211. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  212. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  213. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,1034 +1,1200 @@
1
- """
2
- Backup and Recovery Manager for Settings Database Migration
3
-
4
- This module provides comprehensive backup and recovery procedures for the
5
- settings database system. It includes automatic JSON backup creation,
6
- manual backup and restore functionality, database repair tools, and
7
- settings export/import utilities.
8
-
9
- Features:
10
- - Automatic JSON backup creation before migration
11
- - Manual backup and restore functionality
12
- - Database repair and recovery tools
13
- - Settings export and import utilities
14
- - Validation tools for settings integrity
15
- - Backup rotation and cleanup procedures
16
- """
17
-
18
- import json
19
- import sqlite3
20
- import os
21
- import gzip
22
- from pathlib import Path
23
- import shutil
24
- import gzip
25
- import logging
26
- import threading
27
- import time
28
- from typing import Dict, List, Tuple, Any, Optional, Union
29
- from datetime import datetime, timedelta
30
- from pathlib import Path
31
- from dataclasses import dataclass
32
- from enum import Enum
33
-
34
-
35
- class BackupType(Enum):
36
- """Types of backups that can be created."""
37
- AUTOMATIC = "automatic"
38
- MANUAL = "manual"
39
- MIGRATION = "migration"
40
- EMERGENCY = "emergency"
41
-
42
-
43
- class BackupFormat(Enum):
44
- """Backup file formats."""
45
- JSON = "json"
46
- SQLITE = "sqlite"
47
- COMPRESSED = "compressed"
48
-
49
-
50
- @dataclass
51
- class BackupInfo:
52
- """Information about a backup."""
53
- timestamp: datetime
54
- backup_type: BackupType
55
- format: BackupFormat
56
- filepath: str
57
- size_bytes: int
58
- checksum: Optional[str] = None
59
- description: Optional[str] = None
60
- source_info: Optional[Dict[str, Any]] = None
61
-
62
-
63
- class BackupRecoveryManager:
64
- """
65
- Comprehensive backup and recovery manager for the settings database system.
66
-
67
- Provides automatic and manual backup creation, recovery procedures,
68
- database repair tools, and settings validation utilities.
69
- """
70
-
71
- def __init__(self, backup_dir: str = "backups",
72
- max_backups: int = 50,
73
- auto_backup_interval: int = 3600, # 1 hour
74
- enable_compression: bool = True):
75
- """
76
- Initialize the backup and recovery manager.
77
-
78
- Args:
79
- backup_dir: Directory for storing backups
80
- max_backups: Maximum number of backups to keep
81
- auto_backup_interval: Automatic backup interval in seconds
82
- enable_compression: Whether to compress backups
83
- """
84
- self.backup_dir = Path(backup_dir)
85
- self.max_backups = max_backups
86
- self.auto_backup_interval = auto_backup_interval
87
- self.enable_compression = enable_compression
88
-
89
- # Ensure backup directory exists
90
- self.backup_dir.mkdir(parents=True, exist_ok=True)
91
-
92
- # Backup tracking
93
- self._backup_history: List[BackupInfo] = []
94
- self._last_auto_backup: Optional[datetime] = None
95
- self._backup_lock = threading.RLock()
96
-
97
- # Auto backup thread
98
- self._auto_backup_thread: Optional[threading.Thread] = None
99
- self._auto_backup_stop_event = threading.Event()
100
- self._auto_backup_enabled = False
101
-
102
- # Logger
103
- self.logger = logging.getLogger(__name__)
104
-
105
- # Load existing backup history and retention settings
106
- self._load_backup_history()
107
- self._load_retention_settings()
108
-
109
- def create_json_backup(self, settings_data: Dict[str, Any],
110
- backup_type: BackupType = BackupType.MANUAL,
111
- description: Optional[str] = None) -> Optional[BackupInfo]:
112
- """
113
- Create a JSON backup of settings data.
114
-
115
- Args:
116
- settings_data: Settings data to backup
117
- backup_type: Type of backup being created
118
- description: Optional description for the backup
119
-
120
- Returns:
121
- BackupInfo if successful, None otherwise
122
- """
123
- try:
124
- timestamp = datetime.now()
125
- filename = self._generate_backup_filename("json", backup_type, timestamp)
126
- filepath = self.backup_dir / filename
127
-
128
- # Create backup
129
- if self.enable_compression:
130
- with gzip.open(f"{filepath}.gz", 'wt', encoding='utf-8') as f:
131
- json.dump(settings_data, f, indent=2, ensure_ascii=False)
132
- filepath = f"{filepath}.gz"
133
- format_type = BackupFormat.COMPRESSED
134
- else:
135
- with open(filepath, 'w', encoding='utf-8') as f:
136
- json.dump(settings_data, f, indent=2, ensure_ascii=False)
137
- format_type = BackupFormat.JSON
138
-
139
- # Get file size
140
- size_bytes = os.path.getsize(filepath)
141
-
142
- # Calculate checksum
143
- checksum = self._calculate_checksum(filepath)
144
-
145
- # Create backup info
146
- backup_info = BackupInfo(
147
- timestamp=timestamp,
148
- backup_type=backup_type,
149
- format=format_type,
150
- filepath=str(filepath),
151
- size_bytes=size_bytes,
152
- checksum=checksum,
153
- description=description,
154
- source_info={
155
- 'data_type': 'json_settings',
156
- 'keys_count': len(settings_data),
157
- 'tool_count': len(settings_data.get('tool_settings', {}))
158
- }
159
- )
160
-
161
- # Record backup
162
- self._record_backup(backup_info)
163
-
164
- self.logger.info(f"JSON backup created: {filepath}")
165
- return backup_info
166
-
167
- except Exception as e:
168
- self.logger.error(f"Failed to create JSON backup: {e}")
169
- return None
170
-
171
- def create_database_backup(self, connection_manager,
172
- backup_type: BackupType = BackupType.MANUAL,
173
- description: Optional[str] = None) -> Optional[BackupInfo]:
174
- """
175
- Create a database backup.
176
-
177
- Args:
178
- connection_manager: Database connection manager
179
- backup_type: Type of backup being created
180
- description: Optional description for the backup
181
-
182
- Returns:
183
- BackupInfo if successful, None otherwise
184
- """
185
- try:
186
- timestamp = datetime.now()
187
- filename = self._generate_backup_filename("db", backup_type, timestamp)
188
- filepath = self.backup_dir / filename
189
-
190
- # Create database backup
191
- success = connection_manager.backup_to_disk(str(filepath))
192
- if not success:
193
- self.logger.error("Database backup failed")
194
- return None
195
-
196
- # Compress if enabled
197
- if self.enable_compression:
198
- compressed_path = f"{filepath}.gz"
199
- with open(filepath, 'rb') as f_in:
200
- with gzip.open(compressed_path, 'wb') as f_out:
201
- shutil.copyfileobj(f_in, f_out)
202
-
203
- # Remove uncompressed file
204
- os.remove(filepath)
205
- filepath = compressed_path
206
- format_type = BackupFormat.COMPRESSED
207
- else:
208
- format_type = BackupFormat.SQLITE
209
-
210
- # Get file size
211
- size_bytes = os.path.getsize(filepath)
212
-
213
- # Calculate checksum
214
- checksum = self._calculate_checksum(filepath)
215
-
216
- # Get database info
217
- db_info = self._get_database_info(connection_manager)
218
-
219
- # Create backup info
220
- backup_info = BackupInfo(
221
- timestamp=timestamp,
222
- backup_type=backup_type,
223
- format=format_type,
224
- filepath=str(filepath),
225
- size_bytes=size_bytes,
226
- checksum=checksum,
227
- description=description,
228
- source_info=db_info
229
- )
230
-
231
- # Record backup
232
- self._record_backup(backup_info)
233
-
234
- self.logger.info(f"Database backup created: {filepath}")
235
- return backup_info
236
-
237
- except Exception as e:
238
- self.logger.error(f"Failed to create database backup: {e}")
239
- return None
240
-
241
- def restore_from_json_backup(self, backup_info: BackupInfo) -> Optional[Dict[str, Any]]:
242
- """
243
- Restore settings from a JSON backup.
244
-
245
- Args:
246
- backup_info: Information about the backup to restore
247
-
248
- Returns:
249
- Restored settings data if successful, None otherwise
250
- """
251
- try:
252
- filepath = backup_info.filepath
253
-
254
- if not os.path.exists(filepath):
255
- self.logger.error(f"Backup file not found: {filepath}")
256
- return None
257
-
258
- # Verify checksum if available
259
- if backup_info.checksum:
260
- current_checksum = self._calculate_checksum(filepath)
261
- if current_checksum != backup_info.checksum:
262
- self.logger.warning(f"Backup checksum mismatch: {filepath}")
263
-
264
- # Load backup data
265
- if backup_info.format == BackupFormat.COMPRESSED:
266
- with gzip.open(filepath, 'rt', encoding='utf-8') as f:
267
- settings_data = json.load(f)
268
- else:
269
- with open(filepath, 'r', encoding='utf-8') as f:
270
- settings_data = json.load(f)
271
-
272
- self.logger.info(f"Settings restored from JSON backup: {filepath}")
273
- return settings_data
274
-
275
- except Exception as e:
276
- self.logger.error(f"Failed to restore from JSON backup: {e}")
277
- return None
278
-
279
- def restore_from_database_backup(self, backup_info: BackupInfo,
280
- connection_manager) -> bool:
281
- """
282
- Restore database from a backup.
283
-
284
- Args:
285
- backup_info: Information about the backup to restore
286
- connection_manager: Database connection manager
287
-
288
- Returns:
289
- True if restore successful, False otherwise
290
- """
291
- try:
292
- filepath = backup_info.filepath
293
-
294
- if not os.path.exists(filepath):
295
- self.logger.error(f"Backup file not found: {filepath}")
296
- return False
297
-
298
- # Verify checksum if available
299
- if backup_info.checksum:
300
- current_checksum = self._calculate_checksum(filepath)
301
- if current_checksum != backup_info.checksum:
302
- self.logger.warning(f"Backup checksum mismatch: {filepath}")
303
-
304
- # Prepare restore file
305
- restore_path = filepath
306
- if backup_info.format == BackupFormat.COMPRESSED:
307
- # Decompress to temporary file
308
- temp_path = self.backup_dir / f"temp_restore_{int(time.time())}.db"
309
- with gzip.open(filepath, 'rb') as f_in:
310
- with open(temp_path, 'wb') as f_out:
311
- shutil.copyfileobj(f_in, f_out)
312
- restore_path = str(temp_path)
313
-
314
- try:
315
- # Restore database
316
- success = connection_manager.restore_from_disk(restore_path)
317
-
318
- if success:
319
- self.logger.info(f"Database restored from backup: {filepath}")
320
- else:
321
- self.logger.error(f"Database restore failed: {filepath}")
322
-
323
- return success
324
-
325
- finally:
326
- # Clean up temporary file
327
- if restore_path != filepath and os.path.exists(restore_path):
328
- os.remove(restore_path)
329
-
330
- except Exception as e:
331
- self.logger.error(f"Failed to restore from database backup: {e}")
332
- return False
333
-
334
- def create_migration_backup(self, json_filepath: str) -> Optional[BackupInfo]:
335
- """
336
- Create a backup before migration.
337
-
338
- Args:
339
- json_filepath: Path to JSON settings file to backup
340
-
341
- Returns:
342
- BackupInfo if successful, None otherwise
343
- """
344
- try:
345
- if not os.path.exists(json_filepath):
346
- self.logger.warning(f"JSON file not found for migration backup: {json_filepath}")
347
- return None
348
-
349
- # Load JSON data
350
- with open(json_filepath, 'r', encoding='utf-8') as f:
351
- settings_data = json.load(f)
352
-
353
- # Create backup
354
- return self.create_json_backup(
355
- settings_data,
356
- BackupType.MIGRATION,
357
- f"Pre-migration backup of {json_filepath}"
358
- )
359
-
360
- except Exception as e:
361
- self.logger.error(f"Failed to create migration backup: {e}")
362
- return None
363
-
364
- def repair_database(self, connection_manager, data_validator) -> bool:
365
- """
366
- Attempt to repair database corruption.
367
-
368
- Args:
369
- connection_manager: Database connection manager
370
- data_validator: Data validator for integrity checks
371
-
372
- Returns:
373
- True if repair successful, False otherwise
374
- """
375
- try:
376
- self.logger.info("Starting database repair procedure")
377
-
378
- # Create emergency backup first
379
- emergency_backup = self.create_database_backup(
380
- connection_manager,
381
- BackupType.EMERGENCY,
382
- "Emergency backup before repair"
383
- )
384
-
385
- if not emergency_backup:
386
- self.logger.warning("Could not create emergency backup before repair")
387
-
388
- # Validate database and get issues
389
- validation_issues = data_validator.validate_database(fix_issues=False)
390
-
391
- if not validation_issues:
392
- self.logger.info("No database issues found - repair not needed")
393
- return True
394
-
395
- # Attempt to repair issues
396
- repair_success = data_validator.repair_data_corruption(validation_issues)
397
-
398
- if repair_success:
399
- # Re-validate after repair
400
- post_repair_issues = data_validator.validate_database(fix_issues=False)
401
- remaining_critical = [i for i in post_repair_issues
402
- if i.severity.value == "critical"]
403
-
404
- if not remaining_critical:
405
- self.logger.info("Database repair completed successfully")
406
- return True
407
- else:
408
- self.logger.warning(f"Database repair partially successful - {len(remaining_critical)} critical issues remain")
409
- return False
410
- else:
411
- self.logger.error("Database repair failed")
412
- return False
413
-
414
- except Exception as e:
415
- self.logger.error(f"Database repair procedure failed: {e}")
416
- return False
417
-
418
- def export_settings(self, settings_data: Dict[str, Any],
419
- export_path: str,
420
- format_type: str = "json") -> bool:
421
- """
422
- Export settings to a file.
423
-
424
- Args:
425
- settings_data: Settings data to export
426
- export_path: Path to export file
427
- format_type: Export format ("json" or "compressed")
428
-
429
- Returns:
430
- True if export successful, False otherwise
431
- """
432
- try:
433
- export_file = Path(export_path)
434
-
435
- # Validate settings data
436
- if not settings_data:
437
- self.logger.error("Export failed: No settings data provided")
438
- return False
439
-
440
- if not isinstance(settings_data, dict):
441
- self.logger.error(f"Export failed: Settings data must be a dictionary, got {type(settings_data)}")
442
- return False
443
-
444
- # Create parent directory if needed
445
- export_file.parent.mkdir(parents=True, exist_ok=True)
446
- self.logger.debug(f"Export directory created/verified: {export_file.parent}")
447
-
448
- # Count items being exported for logging
449
- tool_count = len(settings_data.get("tool_settings", {}))
450
- total_keys = len(settings_data.keys())
451
-
452
- if format_type == "compressed":
453
- with gzip.open(export_path, 'wt', encoding='utf-8') as f:
454
- json.dump(settings_data, f, indent=2, ensure_ascii=False)
455
- self.logger.info(f"Settings exported (compressed) to: {export_path} - {total_keys} keys, {tool_count} tools")
456
- else:
457
- with open(export_path, 'w', encoding='utf-8') as f:
458
- json.dump(settings_data, f, indent=2, ensure_ascii=False)
459
- self.logger.info(f"Settings exported to: {export_path} - {total_keys} keys, {tool_count} tools")
460
-
461
- # Verify file was created and has content
462
- if export_file.exists():
463
- file_size = export_file.stat().st_size
464
- if file_size > 0:
465
- self.logger.debug(f"Export verification passed - file size: {file_size} bytes")
466
- return True
467
- else:
468
- self.logger.error("Export failed: File created but is empty")
469
- return False
470
- else:
471
- self.logger.error("Export failed: File was not created")
472
- return False
473
-
474
- except PermissionError as e:
475
- self.logger.error(f"Export failed: Permission denied - {e}")
476
- return False
477
- except json.JSONEncodeError as e:
478
- self.logger.error(f"Export failed: JSON encoding error - {e}")
479
- return False
480
- except Exception as e:
481
- self.logger.error(f"Export failed with unexpected error: {e}", exc_info=True)
482
- return False
483
-
484
- def import_settings(self, import_path: str) -> Optional[Dict[str, Any]]:
485
- """
486
- Import settings from a file.
487
-
488
- Args:
489
- import_path: Path to import file
490
-
491
- Returns:
492
- Imported settings data if successful, None otherwise
493
- """
494
- try:
495
- import_file = Path(import_path)
496
-
497
- # Validate file exists
498
- if not import_file.exists():
499
- self.logger.error(f"Import failed: File not found - {import_path}")
500
- return None
501
-
502
- # Check file size
503
- file_size = import_file.stat().st_size
504
- if file_size == 0:
505
- self.logger.error(f"Import failed: File is empty - {import_path}")
506
- return None
507
-
508
- self.logger.debug(f"Import file validation passed - size: {file_size} bytes")
509
-
510
- # Detect if file is compressed
511
- is_compressed = import_path.endswith('.gz')
512
-
513
- if is_compressed:
514
- self.logger.debug("Importing compressed file")
515
- with gzip.open(import_path, 'rt', encoding='utf-8') as f:
516
- settings_data = json.load(f)
517
- else:
518
- self.logger.debug("Importing uncompressed file")
519
- with open(import_path, 'r', encoding='utf-8') as f:
520
- settings_data = json.load(f)
521
-
522
- # Validate imported data
523
- if not isinstance(settings_data, dict):
524
- self.logger.error(f"Import failed: Invalid data format - expected dict, got {type(settings_data)}")
525
- return None
526
-
527
- # Count imported items for logging
528
- tool_count = len(settings_data.get("tool_settings", {}))
529
- total_keys = len(settings_data.keys())
530
-
531
- self.logger.info(f"Settings imported from: {import_path} - {total_keys} keys, {tool_count} tools")
532
- return settings_data
533
-
534
- except PermissionError as e:
535
- self.logger.error(f"Import failed: Permission denied - {e}")
536
- return None
537
- except json.JSONDecodeError as e:
538
- self.logger.error(f"Import failed: Invalid JSON format - {e}")
539
- return None
540
- except UnicodeDecodeError as e:
541
- self.logger.error(f"Import failed: File encoding error - {e}")
542
- return None
543
- except Exception as e:
544
- self.logger.error(f"Import failed with unexpected error: {e}", exc_info=True)
545
- return None
546
-
547
- def validate_backup_integrity(self, backup_info: BackupInfo) -> bool:
548
- """
549
- Validate the integrity of a backup file.
550
-
551
- Args:
552
- backup_info: Information about the backup to validate
553
-
554
- Returns:
555
- True if backup is valid, False otherwise
556
- """
557
- try:
558
- filepath = backup_info.filepath
559
-
560
- # Check file exists
561
- if not os.path.exists(filepath):
562
- self.logger.error(f"Backup file not found: {filepath}")
563
- return False
564
-
565
- # Check file size
566
- current_size = os.path.getsize(filepath)
567
- if current_size != backup_info.size_bytes:
568
- self.logger.error(f"Backup file size mismatch: expected {backup_info.size_bytes}, got {current_size}")
569
- return False
570
-
571
- # Check checksum if available
572
- if backup_info.checksum:
573
- current_checksum = self._calculate_checksum(filepath)
574
- if current_checksum != backup_info.checksum:
575
- self.logger.error(f"Backup checksum mismatch: {filepath}")
576
- return False
577
-
578
- # Try to read the backup
579
- if backup_info.format in [BackupFormat.JSON, BackupFormat.COMPRESSED]:
580
- try:
581
- if backup_info.format == BackupFormat.COMPRESSED:
582
- with gzip.open(filepath, 'rt', encoding='utf-8') as f:
583
- json.load(f)
584
- else:
585
- with open(filepath, 'r', encoding='utf-8') as f:
586
- json.load(f)
587
- except json.JSONDecodeError:
588
- self.logger.error(f"Backup contains invalid JSON: {filepath}")
589
- return False
590
-
591
- elif backup_info.format == BackupFormat.SQLITE:
592
- # Validate SQLite database
593
- try:
594
- if backup_info.format == BackupFormat.COMPRESSED:
595
- # Decompress to temporary file for validation
596
- temp_path = self.backup_dir / f"temp_validate_{int(time.time())}.db"
597
- with gzip.open(filepath, 'rb') as f_in:
598
- with open(temp_path, 'wb') as f_out:
599
- shutil.copyfileobj(f_in, f_out)
600
- validate_path = str(temp_path)
601
- else:
602
- validate_path = filepath
603
-
604
- try:
605
- conn = sqlite3.connect(validate_path)
606
- cursor = conn.execute("PRAGMA integrity_check")
607
- result = cursor.fetchone()[0]
608
- conn.close()
609
-
610
- if result != "ok":
611
- self.logger.error(f"Backup database integrity check failed: {result}")
612
- return False
613
- finally:
614
- if validate_path != filepath and os.path.exists(validate_path):
615
- os.remove(validate_path)
616
-
617
- except sqlite3.Error as e:
618
- self.logger.error(f"Backup database validation failed: {e}")
619
- return False
620
-
621
- self.logger.info(f"Backup validation successful: {filepath}")
622
- return True
623
-
624
- except Exception as e:
625
- self.logger.error(f"Backup validation failed: {e}")
626
- return False
627
-
628
- def cleanup_old_backups(self) -> int:
629
- """
630
- Clean up old backups based on retention policy.
631
-
632
- Returns:
633
- Number of backups cleaned up
634
- """
635
- try:
636
- with self._backup_lock:
637
- if len(self._backup_history) <= self.max_backups:
638
- return 0
639
-
640
- # Sort by timestamp, keep most recent
641
- sorted_backups = sorted(self._backup_history, key=lambda b: b.timestamp, reverse=True)
642
- backups_to_remove = sorted_backups[self.max_backups:]
643
-
644
- removed_count = 0
645
- for backup in backups_to_remove:
646
- try:
647
- if os.path.exists(backup.filepath):
648
- os.remove(backup.filepath)
649
- self.logger.debug(f"Removed old backup: {backup.filepath}")
650
-
651
- self._backup_history.remove(backup)
652
- removed_count += 1
653
-
654
- except Exception as e:
655
- self.logger.warning(f"Failed to remove backup {backup.filepath}: {e}")
656
-
657
- # Save updated history
658
- self._save_backup_history()
659
-
660
- if removed_count > 0:
661
- self.logger.info(f"Cleaned up {removed_count} old backups")
662
-
663
- return removed_count
664
-
665
- except Exception as e:
666
- self.logger.error(f"Backup cleanup failed: {e}")
667
- return 0
668
-
669
- def start_auto_backup(self, connection_manager, settings_manager) -> None:
670
- """
671
- Start automatic backup thread.
672
-
673
- Args:
674
- connection_manager: Database connection manager
675
- settings_manager: Settings manager for data access
676
- """
677
- if self._auto_backup_enabled:
678
- return
679
-
680
- self._auto_backup_enabled = True
681
- self._auto_backup_stop_event.clear()
682
-
683
- self._auto_backup_thread = threading.Thread(
684
- target=self._auto_backup_worker,
685
- args=(connection_manager, settings_manager),
686
- daemon=True,
687
- name="AutoBackupWorker"
688
- )
689
- self._auto_backup_thread.start()
690
-
691
- self.logger.info("Automatic backup started")
692
-
693
- def stop_auto_backup(self) -> None:
694
- """Stop automatic backup thread."""
695
- if not self._auto_backup_enabled:
696
- return
697
-
698
- self._auto_backup_enabled = False
699
- self._auto_backup_stop_event.set()
700
-
701
- if self._auto_backup_thread and self._auto_backup_thread.is_alive():
702
- self._auto_backup_thread.join(timeout=5)
703
-
704
- self.logger.info("Automatic backup stopped")
705
-
706
- def get_backup_history(self) -> List[BackupInfo]:
707
- """Get list of all backups."""
708
- return self._backup_history.copy()
709
-
710
- def get_backup_statistics(self) -> Dict[str, Any]:
711
- """
712
- Get backup statistics.
713
-
714
- Returns:
715
- Dictionary with backup statistics
716
- """
717
- with self._backup_lock:
718
- total_backups = len(self._backup_history)
719
- total_size = sum(b.size_bytes for b in self._backup_history)
720
-
721
- # Count by type
722
- type_counts = {}
723
- for backup_type in BackupType:
724
- count = len([b for b in self._backup_history if b.backup_type == backup_type])
725
- type_counts[backup_type.value] = count
726
-
727
- # Count by format
728
- format_counts = {}
729
- for backup_format in BackupFormat:
730
- count = len([b for b in self._backup_history if b.format == backup_format])
731
- format_counts[backup_format.value] = count
732
-
733
- # Recent backups
734
- recent_backups = [
735
- b for b in self._backup_history
736
- if b.timestamp > datetime.now() - timedelta(days=7)
737
- ]
738
-
739
- return {
740
- 'total_backups': total_backups,
741
- 'total_size_bytes': total_size,
742
- 'total_size_mb': round(total_size / (1024 * 1024), 2),
743
- 'backups_by_type': type_counts,
744
- 'backups_by_format': format_counts,
745
- 'recent_backups_7d': len(recent_backups),
746
- 'last_backup': self._backup_history[-1].timestamp.isoformat() if self._backup_history else None,
747
- 'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None,
748
- 'auto_backup_enabled': self._auto_backup_enabled,
749
- 'backup_directory': str(self.backup_dir),
750
- 'max_backups': self.max_backups
751
- }
752
-
753
- # Retention Settings Management
754
-
755
- def get_retention_settings(self) -> Dict[str, Any]:
756
- """
757
- Get current retention policy settings.
758
-
759
- Returns:
760
- Dictionary with retention settings
761
- """
762
- return {
763
- 'max_backups': self.max_backups,
764
- 'auto_backup_interval': self.auto_backup_interval,
765
- 'enable_compression': self.enable_compression,
766
- 'backup_directory': str(self.backup_dir),
767
- 'auto_backup_enabled': self._auto_backup_enabled
768
- }
769
-
770
- def update_retention_settings(self, max_backups: Optional[int] = None,
771
- auto_backup_interval: Optional[int] = None,
772
- enable_compression: Optional[bool] = None) -> bool:
773
- """
774
- Update retention policy settings.
775
-
776
- Args:
777
- max_backups: Maximum number of backups to keep
778
- auto_backup_interval: Automatic backup interval in seconds
779
- enable_compression: Whether to enable backup compression
780
-
781
- Returns:
782
- True if settings updated successfully
783
- """
784
- try:
785
- settings_changed = False
786
-
787
- # Update max backups
788
- if max_backups is not None and max_backups >= 5:
789
- old_max = self.max_backups
790
- self.max_backups = max_backups
791
- settings_changed = True
792
-
793
- # If we reduced the limit, cleanup old backups immediately
794
- if max_backups < old_max:
795
- self.cleanup_old_backups()
796
-
797
- self.logger.info(f"Updated max_backups: {old_max} -> {max_backups}")
798
-
799
- # Update auto backup interval
800
- if auto_backup_interval is not None and auto_backup_interval >= 300: # Minimum 5 minutes
801
- old_interval = self.auto_backup_interval
802
- self.auto_backup_interval = auto_backup_interval
803
- settings_changed = True
804
-
805
- self.logger.info(f"Updated auto_backup_interval: {old_interval}s -> {auto_backup_interval}s")
806
-
807
- # Update compression setting
808
- if enable_compression is not None:
809
- old_compression = self.enable_compression
810
- self.enable_compression = enable_compression
811
- settings_changed = True
812
-
813
- self.logger.info(f"Updated enable_compression: {old_compression} -> {enable_compression}")
814
-
815
- # Save settings to persistent storage
816
- if settings_changed:
817
- self._save_retention_settings()
818
-
819
- return settings_changed
820
-
821
- except Exception as e:
822
- self.logger.error(f"Failed to update retention settings: {e}")
823
- return False
824
-
825
- def reset_retention_settings_to_defaults(self) -> bool:
826
- """
827
- Reset retention settings to default values.
828
-
829
- Returns:
830
- True if reset successful
831
- """
832
- try:
833
- return self.update_retention_settings(
834
- max_backups=50,
835
- auto_backup_interval=3600, # 1 hour
836
- enable_compression=True
837
- )
838
- except Exception as e:
839
- self.logger.error(f"Failed to reset retention settings: {e}")
840
- return False
841
-
842
- # Private methods
843
-
844
- def _generate_backup_filename(self, extension: str, backup_type: BackupType,
845
- timestamp: datetime) -> str:
846
- """Generate backup filename."""
847
- timestamp_str = timestamp.strftime("%Y%m%d_%H%M%S")
848
- return f"settings_backup_{backup_type.value}_{timestamp_str}.{extension}"
849
-
850
- def _calculate_checksum(self, filepath: str) -> str:
851
- """Calculate MD5 checksum of a file."""
852
- import hashlib
853
-
854
- hash_md5 = hashlib.md5()
855
- with open(filepath, "rb") as f:
856
- for chunk in iter(lambda: f.read(4096), b""):
857
- hash_md5.update(chunk)
858
- return hash_md5.hexdigest()
859
-
860
- def _get_database_info(self, connection_manager) -> Dict[str, Any]:
861
- """Get database information for backup metadata."""
862
- try:
863
- conn = connection_manager.get_connection()
864
-
865
- # Get table counts
866
- table_counts = {}
867
- tables = ['core_settings', 'tool_settings', 'tab_content',
868
- 'performance_settings', 'font_settings', 'dialog_settings']
869
-
870
- for table in tables:
871
- try:
872
- cursor = conn.execute(f"SELECT COUNT(*) FROM {table}")
873
- count = cursor.fetchone()[0]
874
- table_counts[table] = count
875
- except sqlite3.Error:
876
- table_counts[table] = 0
877
-
878
- return {
879
- 'data_type': 'sqlite_database',
880
- 'table_counts': table_counts,
881
- 'total_records': sum(table_counts.values())
882
- }
883
-
884
- except Exception as e:
885
- self.logger.warning(f"Failed to get database info: {e}")
886
- return {'data_type': 'sqlite_database', 'error': str(e)}
887
-
888
- def _record_backup(self, backup_info: BackupInfo) -> None:
889
- """Record backup in history."""
890
- with self._backup_lock:
891
- self._backup_history.append(backup_info)
892
-
893
- # Update last auto backup time if applicable
894
- if backup_info.backup_type == BackupType.AUTOMATIC:
895
- self._last_auto_backup = backup_info.timestamp
896
-
897
- # Save history
898
- self._save_backup_history()
899
-
900
- # Clean up old backups if needed
901
- if len(self._backup_history) > self.max_backups:
902
- self.cleanup_old_backups()
903
-
904
- def _load_backup_history(self) -> None:
905
- """Load backup history from file."""
906
- history_file = self.backup_dir / "backup_history.json"
907
-
908
- try:
909
- if history_file.exists():
910
- with open(history_file, 'r', encoding='utf-8') as f:
911
- history_data = json.load(f)
912
-
913
- self._backup_history = []
914
- for item in history_data.get('backups', []):
915
- backup_info = BackupInfo(
916
- timestamp=datetime.fromisoformat(item['timestamp']),
917
- backup_type=BackupType(item['backup_type']),
918
- format=BackupFormat(item['format']),
919
- filepath=item['filepath'],
920
- size_bytes=item['size_bytes'],
921
- checksum=item.get('checksum'),
922
- description=item.get('description'),
923
- source_info=item.get('source_info')
924
- )
925
- self._backup_history.append(backup_info)
926
-
927
- # Load last auto backup time
928
- if 'last_auto_backup' in history_data:
929
- self._last_auto_backup = datetime.fromisoformat(history_data['last_auto_backup'])
930
-
931
- self.logger.debug(f"Loaded {len(self._backup_history)} backup records")
932
-
933
- except Exception as e:
934
- self.logger.warning(f"Failed to load backup history: {e}")
935
- self._backup_history = []
936
-
937
- def _save_backup_history(self) -> None:
938
- """Save backup history to file."""
939
- history_file = self.backup_dir / "backup_history.json"
940
-
941
- try:
942
- history_data = {
943
- 'backups': [
944
- {
945
- 'timestamp': backup.timestamp.isoformat(),
946
- 'backup_type': backup.backup_type.value,
947
- 'format': backup.format.value,
948
- 'filepath': backup.filepath,
949
- 'size_bytes': backup.size_bytes,
950
- 'checksum': backup.checksum,
951
- 'description': backup.description,
952
- 'source_info': backup.source_info
953
- }
954
- for backup in self._backup_history
955
- ],
956
- 'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None
957
- }
958
-
959
- with open(history_file, 'w', encoding='utf-8') as f:
960
- json.dump(history_data, f, indent=2, ensure_ascii=False)
961
-
962
- except Exception as e:
963
- self.logger.warning(f"Failed to save backup history: {e}")
964
-
965
- def _save_retention_settings(self) -> None:
966
- """Save retention settings to file."""
967
- settings_file = self.backup_dir / "retention_settings.json"
968
-
969
- try:
970
- settings_data = {
971
- 'max_backups': self.max_backups,
972
- 'auto_backup_interval': self.auto_backup_interval,
973
- 'enable_compression': self.enable_compression,
974
- 'last_updated': datetime.now().isoformat()
975
- }
976
-
977
- with open(settings_file, 'w', encoding='utf-8') as f:
978
- json.dump(settings_data, f, indent=2, ensure_ascii=False)
979
-
980
- self.logger.debug("Retention settings saved")
981
-
982
- except Exception as e:
983
- self.logger.warning(f"Failed to save retention settings: {e}")
984
-
985
- def _load_retention_settings(self) -> None:
986
- """Load retention settings from file."""
987
- settings_file = self.backup_dir / "retention_settings.json"
988
-
989
- try:
990
- if settings_file.exists():
991
- with open(settings_file, 'r', encoding='utf-8') as f:
992
- settings_data = json.load(f)
993
-
994
- # Apply loaded settings
995
- self.max_backups = settings_data.get('max_backups', self.max_backups)
996
- self.auto_backup_interval = settings_data.get('auto_backup_interval', self.auto_backup_interval)
997
- self.enable_compression = settings_data.get('enable_compression', self.enable_compression)
998
-
999
- self.logger.debug("Retention settings loaded from file")
1000
-
1001
- except Exception as e:
1002
- self.logger.warning(f"Failed to load retention settings: {e}")
1003
-
1004
- def _auto_backup_worker(self, connection_manager, settings_manager) -> None:
1005
- """Worker thread for automatic backups."""
1006
- while not self._auto_backup_stop_event.is_set():
1007
- try:
1008
- # Check if backup is needed
1009
- should_backup = False
1010
-
1011
- if self._last_auto_backup is None:
1012
- should_backup = True
1013
- elif datetime.now() - self._last_auto_backup > timedelta(seconds=self.auto_backup_interval):
1014
- should_backup = True
1015
-
1016
- if should_backup:
1017
- # Create automatic backup
1018
- backup_info = self.create_database_backup(
1019
- connection_manager,
1020
- BackupType.AUTOMATIC,
1021
- "Automatic scheduled backup"
1022
- )
1023
-
1024
- if backup_info:
1025
- self.logger.debug("Automatic backup created successfully")
1026
- else:
1027
- self.logger.warning("Automatic backup failed")
1028
-
1029
- # Wait before next check
1030
- self._auto_backup_stop_event.wait(min(300, self.auto_backup_interval // 12)) # Check every 5 minutes or 1/12 of interval
1031
-
1032
- except Exception as e:
1033
- self.logger.error(f"Auto backup worker error: {e}")
1
+ """
2
+ Backup and Recovery Manager for Settings Database Migration
3
+
4
+ This module provides comprehensive backup and recovery procedures for the
5
+ settings database system. It includes automatic JSON backup creation,
6
+ manual backup and restore functionality, database repair tools, and
7
+ settings export/import utilities.
8
+
9
+ Features:
10
+ - Automatic JSON backup creation before migration
11
+ - Manual backup and restore functionality
12
+ - Database repair and recovery tools
13
+ - Settings export and import utilities
14
+ - Validation tools for settings integrity
15
+ - Backup rotation and cleanup procedures
16
+ """
17
+
18
+ import json
19
+ import sqlite3
20
+ import os
21
+ import gzip
22
+ from pathlib import Path
23
+ import shutil
24
+ import gzip
25
+ import logging
26
+ import threading
27
+ import time
28
+ from typing import Dict, List, Tuple, Any, Optional, Union
29
+ from datetime import datetime, timedelta
30
+ from pathlib import Path
31
+ from dataclasses import dataclass
32
+ from enum import Enum
33
+
34
+
35
+ class BackupType(Enum):
36
+ """Types of backups that can be created."""
37
+ AUTOMATIC = "automatic"
38
+ MANUAL = "manual"
39
+ MIGRATION = "migration"
40
+ EMERGENCY = "emergency"
41
+
42
+
43
+ class BackupFormat(Enum):
44
+ """Backup file formats."""
45
+ JSON = "json"
46
+ SQLITE = "sqlite"
47
+ COMPRESSED = "compressed"
48
+
49
+
50
+ @dataclass
51
+ class BackupInfo:
52
+ """Information about a backup."""
53
+ timestamp: datetime
54
+ backup_type: BackupType
55
+ format: BackupFormat
56
+ filepath: str
57
+ size_bytes: int
58
+ checksum: Optional[str] = None
59
+ description: Optional[str] = None
60
+ source_info: Optional[Dict[str, Any]] = None
61
+
62
+
63
+ class BackupRecoveryManager:
64
+ """
65
+ Comprehensive backup and recovery manager for the settings database system.
66
+
67
+ Provides automatic and manual backup creation, recovery procedures,
68
+ database repair tools, and settings validation utilities.
69
+ """
70
+
71
+ def __init__(self, backup_dir: str = "backups",
72
+ max_backups: int = 50,
73
+ auto_backup_interval: int = 3600, # 1 hour
74
+ enable_compression: bool = True):
75
+ """
76
+ Initialize the backup and recovery manager.
77
+
78
+ Args:
79
+ backup_dir: Directory for storing backups
80
+ max_backups: Maximum number of backups to keep
81
+ auto_backup_interval: Automatic backup interval in seconds
82
+ enable_compression: Whether to compress backups
83
+ """
84
+ self.backup_dir = Path(backup_dir)
85
+ self.max_backups = max_backups
86
+ self.auto_backup_interval = auto_backup_interval
87
+ self.enable_compression = enable_compression
88
+
89
+ # Ensure backup directory exists
90
+ self.backup_dir.mkdir(parents=True, exist_ok=True)
91
+
92
+ # Backup tracking
93
+ self._backup_history: List[BackupInfo] = []
94
+ self._last_auto_backup: Optional[datetime] = None
95
+ self._backup_lock = threading.RLock()
96
+
97
+ # Auto backup thread
98
+ self._auto_backup_thread: Optional[threading.Thread] = None
99
+ self._auto_backup_stop_event = threading.Event()
100
+ self._auto_backup_enabled = False
101
+
102
+ # Logger
103
+ self.logger = logging.getLogger(__name__)
104
+
105
+ # Load existing backup history and retention settings
106
+ self._load_backup_history()
107
+ self._load_retention_settings()
108
+
109
+ def create_json_backup(self, settings_data: Dict[str, Any],
110
+ backup_type: BackupType = BackupType.MANUAL,
111
+ description: Optional[str] = None) -> Optional[BackupInfo]:
112
+ """
113
+ Create a JSON backup of settings data.
114
+
115
+ Args:
116
+ settings_data: Settings data to backup
117
+ backup_type: Type of backup being created
118
+ description: Optional description for the backup
119
+
120
+ Returns:
121
+ BackupInfo if successful, None otherwise
122
+ """
123
+ try:
124
+ timestamp = datetime.now()
125
+ filename = self._generate_backup_filename("json", backup_type, timestamp)
126
+ filepath = self.backup_dir / filename
127
+
128
+ # Create backup
129
+ if self.enable_compression:
130
+ with gzip.open(f"{filepath}.gz", 'wt', encoding='utf-8') as f:
131
+ json.dump(settings_data, f, indent=2, ensure_ascii=False)
132
+ filepath = f"{filepath}.gz"
133
+ format_type = BackupFormat.COMPRESSED
134
+ else:
135
+ with open(filepath, 'w', encoding='utf-8') as f:
136
+ json.dump(settings_data, f, indent=2, ensure_ascii=False)
137
+ format_type = BackupFormat.JSON
138
+
139
+ # Get file size
140
+ size_bytes = os.path.getsize(filepath)
141
+
142
+ # Calculate checksum
143
+ checksum = self._calculate_checksum(filepath)
144
+
145
+ # Create backup info
146
+ backup_info = BackupInfo(
147
+ timestamp=timestamp,
148
+ backup_type=backup_type,
149
+ format=format_type,
150
+ filepath=str(filepath),
151
+ size_bytes=size_bytes,
152
+ checksum=checksum,
153
+ description=description,
154
+ source_info={
155
+ 'data_type': 'json_settings',
156
+ 'keys_count': len(settings_data),
157
+ 'tool_count': len(settings_data.get('tool_settings', {}))
158
+ }
159
+ )
160
+
161
+ # Record backup
162
+ self._record_backup(backup_info)
163
+
164
+ self.logger.info(f"JSON backup created: {filepath}")
165
+ return backup_info
166
+
167
+ except Exception as e:
168
+ self.logger.error(f"Failed to create JSON backup: {e}")
169
+ return None
170
+
171
+ def create_database_backup(self, connection_manager,
172
+ backup_type: BackupType = BackupType.MANUAL,
173
+ description: Optional[str] = None) -> Optional[BackupInfo]:
174
+ """
175
+ Create a database backup.
176
+
177
+ Args:
178
+ connection_manager: Database connection manager
179
+ backup_type: Type of backup being created
180
+ description: Optional description for the backup
181
+
182
+ Returns:
183
+ BackupInfo if successful, None otherwise
184
+ """
185
+ try:
186
+ timestamp = datetime.now()
187
+ filename = self._generate_backup_filename("db", backup_type, timestamp)
188
+ filepath = self.backup_dir / filename
189
+
190
+ # Create database backup
191
+ success = connection_manager.backup_to_disk(str(filepath))
192
+ if not success:
193
+ self.logger.error("Database backup failed")
194
+ return None
195
+
196
+ # Compress if enabled
197
+ if self.enable_compression:
198
+ compressed_path = f"{filepath}.gz"
199
+ with open(filepath, 'rb') as f_in:
200
+ with gzip.open(compressed_path, 'wb') as f_out:
201
+ shutil.copyfileobj(f_in, f_out)
202
+
203
+ # Remove uncompressed file
204
+ os.remove(filepath)
205
+ filepath = compressed_path
206
+ format_type = BackupFormat.COMPRESSED
207
+ else:
208
+ format_type = BackupFormat.SQLITE
209
+
210
+ # Get file size
211
+ size_bytes = os.path.getsize(filepath)
212
+
213
+ # Calculate checksum
214
+ checksum = self._calculate_checksum(filepath)
215
+
216
+ # Get database info
217
+ db_info = self._get_database_info(connection_manager)
218
+
219
+ # Create backup info
220
+ backup_info = BackupInfo(
221
+ timestamp=timestamp,
222
+ backup_type=backup_type,
223
+ format=format_type,
224
+ filepath=str(filepath),
225
+ size_bytes=size_bytes,
226
+ checksum=checksum,
227
+ description=description,
228
+ source_info=db_info
229
+ )
230
+
231
+ # Record backup
232
+ self._record_backup(backup_info)
233
+
234
+ self.logger.info(f"Database backup created: {filepath}")
235
+ return backup_info
236
+
237
+ except Exception as e:
238
+ self.logger.error(f"Failed to create database backup: {e}")
239
+ return None
240
+
241
+ def restore_from_json_backup(self, backup_info: BackupInfo) -> Optional[Dict[str, Any]]:
242
+ """
243
+ Restore settings from a JSON backup.
244
+
245
+ Args:
246
+ backup_info: Information about the backup to restore
247
+
248
+ Returns:
249
+ Restored settings data if successful, None otherwise
250
+ """
251
+ try:
252
+ filepath = backup_info.filepath
253
+
254
+ if not os.path.exists(filepath):
255
+ self.logger.error(f"Backup file not found: {filepath}")
256
+ return None
257
+
258
+ # Verify checksum if available
259
+ if backup_info.checksum:
260
+ current_checksum = self._calculate_checksum(filepath)
261
+ if current_checksum != backup_info.checksum:
262
+ self.logger.warning(f"Backup checksum mismatch: {filepath}")
263
+
264
+ # Load backup data
265
+ if backup_info.format == BackupFormat.COMPRESSED:
266
+ with gzip.open(filepath, 'rt', encoding='utf-8') as f:
267
+ settings_data = json.load(f)
268
+ else:
269
+ with open(filepath, 'r', encoding='utf-8') as f:
270
+ settings_data = json.load(f)
271
+
272
+ self.logger.info(f"Settings restored from JSON backup: {filepath}")
273
+ return settings_data
274
+
275
+ except Exception as e:
276
+ self.logger.error(f"Failed to restore from JSON backup: {e}")
277
+ return None
278
+
279
+ def restore_from_database_backup(self, backup_info: BackupInfo,
280
+ connection_manager) -> bool:
281
+ """
282
+ Restore database from a backup.
283
+
284
+ Args:
285
+ backup_info: Information about the backup to restore
286
+ connection_manager: Database connection manager
287
+
288
+ Returns:
289
+ True if restore successful, False otherwise
290
+ """
291
+ try:
292
+ filepath = backup_info.filepath
293
+
294
+ if not os.path.exists(filepath):
295
+ self.logger.error(f"Backup file not found: {filepath}")
296
+ return False
297
+
298
+ # Verify checksum if available
299
+ if backup_info.checksum:
300
+ current_checksum = self._calculate_checksum(filepath)
301
+ if current_checksum != backup_info.checksum:
302
+ self.logger.warning(f"Backup checksum mismatch: {filepath}")
303
+
304
+ # Prepare restore file
305
+ restore_path = filepath
306
+ if backup_info.format == BackupFormat.COMPRESSED:
307
+ # Decompress to temporary file
308
+ temp_path = self.backup_dir / f"temp_restore_{int(time.time())}.db"
309
+ with gzip.open(filepath, 'rb') as f_in:
310
+ with open(temp_path, 'wb') as f_out:
311
+ shutil.copyfileobj(f_in, f_out)
312
+ restore_path = str(temp_path)
313
+
314
+ try:
315
+ # Restore database
316
+ success = connection_manager.restore_from_disk(restore_path)
317
+
318
+ if success:
319
+ self.logger.info(f"Database restored from backup: {filepath}")
320
+ else:
321
+ self.logger.error(f"Database restore failed: {filepath}")
322
+
323
+ return success
324
+
325
+ finally:
326
+ # Clean up temporary file
327
+ if restore_path != filepath and os.path.exists(restore_path):
328
+ os.remove(restore_path)
329
+
330
+ except Exception as e:
331
+ self.logger.error(f"Failed to restore from database backup: {e}")
332
+ return False
333
+
334
+ def create_migration_backup(self, json_filepath: str) -> Optional[BackupInfo]:
335
+ """
336
+ Create a backup before migration.
337
+
338
+ Args:
339
+ json_filepath: Path to JSON settings file to backup
340
+
341
+ Returns:
342
+ BackupInfo if successful, None otherwise
343
+ """
344
+ try:
345
+ if not os.path.exists(json_filepath):
346
+ self.logger.warning(f"JSON file not found for migration backup: {json_filepath}")
347
+ return None
348
+
349
+ # Load JSON data
350
+ with open(json_filepath, 'r', encoding='utf-8') as f:
351
+ settings_data = json.load(f)
352
+
353
+ # Create backup
354
+ return self.create_json_backup(
355
+ settings_data,
356
+ BackupType.MIGRATION,
357
+ f"Pre-migration backup of {json_filepath}"
358
+ )
359
+
360
+ except Exception as e:
361
+ self.logger.error(f"Failed to create migration backup: {e}")
362
+ return None
363
+
364
+ def repair_database(self, connection_manager, data_validator) -> bool:
365
+ """
366
+ Attempt to repair database corruption.
367
+
368
+ Args:
369
+ connection_manager: Database connection manager
370
+ data_validator: Data validator for integrity checks
371
+
372
+ Returns:
373
+ True if repair successful, False otherwise
374
+ """
375
+ try:
376
+ self.logger.info("Starting database repair procedure")
377
+
378
+ # Create emergency backup first
379
+ emergency_backup = self.create_database_backup(
380
+ connection_manager,
381
+ BackupType.EMERGENCY,
382
+ "Emergency backup before repair"
383
+ )
384
+
385
+ if not emergency_backup:
386
+ self.logger.warning("Could not create emergency backup before repair")
387
+
388
+ # Validate database and get issues
389
+ validation_issues = data_validator.validate_database(fix_issues=False)
390
+
391
+ if not validation_issues:
392
+ self.logger.info("No database issues found - repair not needed")
393
+ return True
394
+
395
+ # Attempt to repair issues
396
+ repair_success = data_validator.repair_data_corruption(validation_issues)
397
+
398
+ if repair_success:
399
+ # Re-validate after repair
400
+ post_repair_issues = data_validator.validate_database(fix_issues=False)
401
+ remaining_critical = [i for i in post_repair_issues
402
+ if i.severity.value == "critical"]
403
+
404
+ if not remaining_critical:
405
+ self.logger.info("Database repair completed successfully")
406
+ return True
407
+ else:
408
+ self.logger.warning(f"Database repair partially successful - {len(remaining_critical)} critical issues remain")
409
+ return False
410
+ else:
411
+ self.logger.error("Database repair failed")
412
+ return False
413
+
414
+ except Exception as e:
415
+ self.logger.error(f"Database repair procedure failed: {e}")
416
+ return False
417
+
418
+ def export_settings(self, settings_data: Dict[str, Any],
419
+ export_path: str,
420
+ format_type: str = "json") -> bool:
421
+ """
422
+ Export settings to a file.
423
+
424
+ Also exports notes from notes.db if available.
425
+
426
+ Args:
427
+ settings_data: Settings data to export
428
+ export_path: Path to export file
429
+ format_type: Export format ("json" or "compressed")
430
+
431
+ Returns:
432
+ True if export successful, False otherwise
433
+ """
434
+ try:
435
+ export_file = Path(export_path)
436
+
437
+ # Validate settings data
438
+ if not settings_data:
439
+ self.logger.error("Export failed: No settings data provided")
440
+ return False
441
+
442
+ if not isinstance(settings_data, dict):
443
+ self.logger.error(f"Export failed: Settings data must be a dictionary, got {type(settings_data)}")
444
+ return False
445
+
446
+ # Create parent directory if needed
447
+ export_file.parent.mkdir(parents=True, exist_ok=True)
448
+ self.logger.debug(f"Export directory created/verified: {export_file.parent}")
449
+
450
+ # Include notes data from notes.db
451
+ notes_data = self._export_notes_data()
452
+ if notes_data:
453
+ settings_data['notes'] = notes_data
454
+ self.logger.info(f"Including {len(notes_data)} notes in export")
455
+
456
+ # Count items being exported for logging
457
+ tool_count = len(settings_data.get("tool_settings", {}))
458
+ notes_count = len(settings_data.get("notes", []))
459
+ total_keys = len(settings_data.keys())
460
+
461
+ if format_type == "compressed":
462
+ with gzip.open(export_path, 'wt', encoding='utf-8') as f:
463
+ json.dump(settings_data, f, indent=2, ensure_ascii=False)
464
+ self.logger.info(f"Settings exported (compressed) to: {export_path} - {total_keys} keys, {tool_count} tools, {notes_count} notes")
465
+ else:
466
+ with open(export_path, 'w', encoding='utf-8') as f:
467
+ json.dump(settings_data, f, indent=2, ensure_ascii=False)
468
+ self.logger.info(f"Settings exported to: {export_path} - {total_keys} keys, {tool_count} tools, {notes_count} notes")
469
+
470
+ # Verify file was created and has content
471
+ if export_file.exists():
472
+ file_size = export_file.stat().st_size
473
+ if file_size > 0:
474
+ self.logger.debug(f"Export verification passed - file size: {file_size} bytes")
475
+ return True
476
+ else:
477
+ self.logger.error("Export failed: File created but is empty")
478
+ return False
479
+ else:
480
+ self.logger.error("Export failed: File was not created")
481
+ return False
482
+
483
+ except PermissionError as e:
484
+ self.logger.error(f"Export failed: Permission denied - {e}")
485
+ return False
486
+ except json.JSONEncodeError as e:
487
+ self.logger.error(f"Export failed: JSON encoding error - {e}")
488
+ return False
489
+ except Exception as e:
490
+ self.logger.error(f"Export failed with unexpected error: {e}", exc_info=True)
491
+ return False
492
+
493
+ def _export_notes_data(self) -> Optional[List[Dict[str, Any]]]:
494
+ """
495
+ Export notes from notes.db.
496
+
497
+ Returns:
498
+ List of note dictionaries, or None if notes.db not available
499
+ """
500
+ try:
501
+ # Get notes database path
502
+ try:
503
+ from core.data_directory import get_database_path
504
+ notes_db_path = get_database_path('notes.db')
505
+ except ImportError:
506
+ # Fallback to backup directory parent
507
+ notes_db_path = str(self.backup_dir.parent / 'notes.db')
508
+
509
+ if not os.path.exists(notes_db_path):
510
+ self.logger.debug(f"Notes database not found: {notes_db_path}")
511
+ return None
512
+
513
+ import sqlite3
514
+ conn = sqlite3.connect(notes_db_path, timeout=10.0)
515
+ conn.row_factory = sqlite3.Row
516
+
517
+ try:
518
+ cursor = conn.execute('''
519
+ SELECT id, Created, Modified, Title, Input, Output
520
+ FROM notes ORDER BY id
521
+ ''')
522
+ notes = []
523
+ for row in cursor.fetchall():
524
+ notes.append({
525
+ 'id': row['id'],
526
+ 'Created': row['Created'],
527
+ 'Modified': row['Modified'],
528
+ 'Title': row['Title'],
529
+ 'Input': row['Input'],
530
+ 'Output': row['Output']
531
+ })
532
+
533
+ self.logger.debug(f"Exported {len(notes)} notes from notes.db")
534
+ return notes
535
+
536
+ finally:
537
+ conn.close()
538
+
539
+ except Exception as e:
540
+ self.logger.warning(f"Failed to export notes data: {e}")
541
+ return None
542
+
543
+ def _import_notes_data(self, notes_data: List[Dict[str, Any]]) -> int:
544
+ """
545
+ Import notes to notes.db.
546
+
547
+ Notes are imported with their original IDs if no conflict exists,
548
+ otherwise they are inserted with new IDs.
549
+
550
+ Args:
551
+ notes_data: List of note dictionaries to import
552
+
553
+ Returns:
554
+ Number of notes successfully imported
555
+ """
556
+ if not notes_data:
557
+ return 0
558
+
559
+ try:
560
+ # Get notes database path
561
+ try:
562
+ from core.data_directory import get_database_path
563
+ notes_db_path = get_database_path('notes.db')
564
+ except ImportError:
565
+ # Fallback to backup directory parent
566
+ notes_db_path = str(self.backup_dir.parent / 'notes.db')
567
+
568
+ import sqlite3
569
+ conn = sqlite3.connect(notes_db_path, timeout=10.0)
570
+
571
+ try:
572
+ # Ensure tables exist
573
+ conn.execute('''
574
+ CREATE TABLE IF NOT EXISTS notes (
575
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
576
+ Created DATETIME DEFAULT CURRENT_TIMESTAMP,
577
+ Modified DATETIME DEFAULT CURRENT_TIMESTAMP,
578
+ Title TEXT(255),
579
+ Input TEXT,
580
+ Output TEXT
581
+ )
582
+ ''')
583
+
584
+ # Check if FTS table exists
585
+ fts_exists = conn.execute(
586
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='notes_fts'"
587
+ ).fetchone() is not None
588
+
589
+ imported_count = 0
590
+ for note in notes_data:
591
+ try:
592
+ # Check if note with this ID already exists
593
+ existing = conn.execute(
594
+ 'SELECT id FROM notes WHERE id = ?',
595
+ (note.get('id'),)
596
+ ).fetchone()
597
+
598
+ if existing:
599
+ # Skip notes that already exist
600
+ self.logger.debug(f"Skipping existing note ID {note.get('id')}")
601
+ continue
602
+
603
+ # Insert with original ID if possible
604
+ conn.execute('''
605
+ INSERT INTO notes (id, Created, Modified, Title, Input, Output)
606
+ VALUES (?, ?, ?, ?, ?, ?)
607
+ ''', (
608
+ note.get('id'),
609
+ note.get('Created'),
610
+ note.get('Modified'),
611
+ note.get('Title'),
612
+ note.get('Input'),
613
+ note.get('Output')
614
+ ))
615
+ imported_count += 1
616
+
617
+ except Exception as e:
618
+ self.logger.debug(f"Failed to import note {note.get('id')}: {e}")
619
+
620
+ conn.commit()
621
+
622
+ # Rebuild FTS index if table exists
623
+ if fts_exists:
624
+ try:
625
+ conn.execute('INSERT INTO notes_fts(notes_fts) VALUES("rebuild")')
626
+ conn.commit()
627
+ except Exception as e:
628
+ self.logger.debug(f"FTS rebuild skipped: {e}")
629
+
630
+ self.logger.debug(f"Imported {imported_count} notes to notes.db")
631
+ return imported_count
632
+
633
+ finally:
634
+ conn.close()
635
+
636
+ except Exception as e:
637
+ self.logger.warning(f"Failed to import notes data: {e}")
638
+ return 0
639
+
640
+ def import_settings(self, import_path: str) -> Optional[Dict[str, Any]]:
641
+ """
642
+ Import settings from a file.
643
+
644
+ Also imports notes to notes.db if present in the import file.
645
+
646
+ Args:
647
+ import_path: Path to import file
648
+
649
+ Returns:
650
+ Imported settings data if successful, None otherwise
651
+ """
652
+ try:
653
+ import_file = Path(import_path)
654
+
655
+ # Validate file exists
656
+ if not import_file.exists():
657
+ self.logger.error(f"Import failed: File not found - {import_path}")
658
+ return None
659
+
660
+ # Check file size
661
+ file_size = import_file.stat().st_size
662
+ if file_size == 0:
663
+ self.logger.error(f"Import failed: File is empty - {import_path}")
664
+ return None
665
+
666
+ self.logger.debug(f"Import file validation passed - size: {file_size} bytes")
667
+
668
+ # Detect if file is compressed
669
+ is_compressed = import_path.endswith('.gz')
670
+
671
+ if is_compressed:
672
+ self.logger.debug("Importing compressed file")
673
+ with gzip.open(import_path, 'rt', encoding='utf-8') as f:
674
+ settings_data = json.load(f)
675
+ else:
676
+ self.logger.debug("Importing uncompressed file")
677
+ with open(import_path, 'r', encoding='utf-8') as f:
678
+ settings_data = json.load(f)
679
+
680
+ # Validate imported data
681
+ if not isinstance(settings_data, dict):
682
+ self.logger.error(f"Import failed: Invalid data format - expected dict, got {type(settings_data)}")
683
+ return None
684
+
685
+ # Import notes if present
686
+ if 'notes' in settings_data:
687
+ notes_data = settings_data.pop('notes') # Remove from settings_data
688
+ if notes_data:
689
+ imported_count = self._import_notes_data(notes_data)
690
+ self.logger.info(f"Imported {imported_count} notes to notes.db")
691
+
692
+ # Count imported items for logging
693
+ tool_count = len(settings_data.get("tool_settings", {}))
694
+ total_keys = len(settings_data.keys())
695
+
696
+ self.logger.info(f"Settings imported from: {import_path} - {total_keys} keys, {tool_count} tools")
697
+ return settings_data
698
+
699
+ except PermissionError as e:
700
+ self.logger.error(f"Import failed: Permission denied - {e}")
701
+ return None
702
+ except json.JSONDecodeError as e:
703
+ self.logger.error(f"Import failed: Invalid JSON format - {e}")
704
+ return None
705
+ except UnicodeDecodeError as e:
706
+ self.logger.error(f"Import failed: File encoding error - {e}")
707
+ return None
708
+ except Exception as e:
709
+ self.logger.error(f"Import failed with unexpected error: {e}", exc_info=True)
710
+ return None
711
+
712
+ def validate_backup_integrity(self, backup_info: BackupInfo) -> bool:
713
+ """
714
+ Validate the integrity of a backup file.
715
+
716
+ Args:
717
+ backup_info: Information about the backup to validate
718
+
719
+ Returns:
720
+ True if backup is valid, False otherwise
721
+ """
722
+ try:
723
+ filepath = backup_info.filepath
724
+
725
+ # Check file exists
726
+ if not os.path.exists(filepath):
727
+ self.logger.error(f"Backup file not found: {filepath}")
728
+ return False
729
+
730
+ # Check file size
731
+ current_size = os.path.getsize(filepath)
732
+ if current_size != backup_info.size_bytes:
733
+ self.logger.error(f"Backup file size mismatch: expected {backup_info.size_bytes}, got {current_size}")
734
+ return False
735
+
736
+ # Check checksum if available
737
+ if backup_info.checksum:
738
+ current_checksum = self._calculate_checksum(filepath)
739
+ if current_checksum != backup_info.checksum:
740
+ self.logger.error(f"Backup checksum mismatch: {filepath}")
741
+ return False
742
+
743
+ # Try to read the backup
744
+ if backup_info.format in [BackupFormat.JSON, BackupFormat.COMPRESSED]:
745
+ try:
746
+ if backup_info.format == BackupFormat.COMPRESSED:
747
+ with gzip.open(filepath, 'rt', encoding='utf-8') as f:
748
+ json.load(f)
749
+ else:
750
+ with open(filepath, 'r', encoding='utf-8') as f:
751
+ json.load(f)
752
+ except json.JSONDecodeError:
753
+ self.logger.error(f"Backup contains invalid JSON: {filepath}")
754
+ return False
755
+
756
+ elif backup_info.format == BackupFormat.SQLITE:
757
+ # Validate SQLite database
758
+ try:
759
+ if backup_info.format == BackupFormat.COMPRESSED:
760
+ # Decompress to temporary file for validation
761
+ temp_path = self.backup_dir / f"temp_validate_{int(time.time())}.db"
762
+ with gzip.open(filepath, 'rb') as f_in:
763
+ with open(temp_path, 'wb') as f_out:
764
+ shutil.copyfileobj(f_in, f_out)
765
+ validate_path = str(temp_path)
766
+ else:
767
+ validate_path = filepath
768
+
769
+ try:
770
+ conn = sqlite3.connect(validate_path)
771
+ cursor = conn.execute("PRAGMA integrity_check")
772
+ result = cursor.fetchone()[0]
773
+ conn.close()
774
+
775
+ if result != "ok":
776
+ self.logger.error(f"Backup database integrity check failed: {result}")
777
+ return False
778
+ finally:
779
+ if validate_path != filepath and os.path.exists(validate_path):
780
+ os.remove(validate_path)
781
+
782
+ except sqlite3.Error as e:
783
+ self.logger.error(f"Backup database validation failed: {e}")
784
+ return False
785
+
786
+ self.logger.info(f"Backup validation successful: {filepath}")
787
+ return True
788
+
789
+ except Exception as e:
790
+ self.logger.error(f"Backup validation failed: {e}")
791
+ return False
792
+
793
+ def cleanup_old_backups(self) -> int:
794
+ """
795
+ Clean up old backups based on retention policy.
796
+
797
+ Returns:
798
+ Number of backups cleaned up
799
+ """
800
+ try:
801
+ with self._backup_lock:
802
+ if len(self._backup_history) <= self.max_backups:
803
+ return 0
804
+
805
+ # Sort by timestamp, keep most recent
806
+ sorted_backups = sorted(self._backup_history, key=lambda b: b.timestamp, reverse=True)
807
+ backups_to_remove = sorted_backups[self.max_backups:]
808
+
809
+ removed_count = 0
810
+ for backup in backups_to_remove:
811
+ try:
812
+ if os.path.exists(backup.filepath):
813
+ os.remove(backup.filepath)
814
+ self.logger.debug(f"Removed old backup: {backup.filepath}")
815
+
816
+ self._backup_history.remove(backup)
817
+ removed_count += 1
818
+
819
+ except Exception as e:
820
+ self.logger.warning(f"Failed to remove backup {backup.filepath}: {e}")
821
+
822
+ # Save updated history
823
+ self._save_backup_history()
824
+
825
+ if removed_count > 0:
826
+ self.logger.info(f"Cleaned up {removed_count} old backups")
827
+
828
+ return removed_count
829
+
830
+ except Exception as e:
831
+ self.logger.error(f"Backup cleanup failed: {e}")
832
+ return 0
833
+
834
+ def start_auto_backup(self, connection_manager, settings_manager) -> None:
835
+ """
836
+ Start automatic backup thread.
837
+
838
+ Args:
839
+ connection_manager: Database connection manager
840
+ settings_manager: Settings manager for data access
841
+ """
842
+ if self._auto_backup_enabled:
843
+ return
844
+
845
+ self._auto_backup_enabled = True
846
+ self._auto_backup_stop_event.clear()
847
+
848
+ self._auto_backup_thread = threading.Thread(
849
+ target=self._auto_backup_worker,
850
+ args=(connection_manager, settings_manager),
851
+ daemon=True,
852
+ name="AutoBackupWorker"
853
+ )
854
+ self._auto_backup_thread.start()
855
+
856
+ self.logger.info("Automatic backup started")
857
+
858
+ def stop_auto_backup(self) -> None:
859
+ """Stop automatic backup thread."""
860
+ if not self._auto_backup_enabled:
861
+ return
862
+
863
+ self._auto_backup_enabled = False
864
+ self._auto_backup_stop_event.set()
865
+
866
+ if self._auto_backup_thread and self._auto_backup_thread.is_alive():
867
+ self._auto_backup_thread.join(timeout=5)
868
+
869
+ self.logger.info("Automatic backup stopped")
870
+
871
+ def get_backup_history(self) -> List[BackupInfo]:
872
+ """Get list of all backups."""
873
+ return self._backup_history.copy()
874
+
875
+ def get_backup_statistics(self) -> Dict[str, Any]:
876
+ """
877
+ Get backup statistics.
878
+
879
+ Returns:
880
+ Dictionary with backup statistics
881
+ """
882
+ with self._backup_lock:
883
+ total_backups = len(self._backup_history)
884
+ total_size = sum(b.size_bytes for b in self._backup_history)
885
+
886
+ # Count by type
887
+ type_counts = {}
888
+ for backup_type in BackupType:
889
+ count = len([b for b in self._backup_history if b.backup_type == backup_type])
890
+ type_counts[backup_type.value] = count
891
+
892
+ # Count by format
893
+ format_counts = {}
894
+ for backup_format in BackupFormat:
895
+ count = len([b for b in self._backup_history if b.format == backup_format])
896
+ format_counts[backup_format.value] = count
897
+
898
+ # Recent backups
899
+ recent_backups = [
900
+ b for b in self._backup_history
901
+ if b.timestamp > datetime.now() - timedelta(days=7)
902
+ ]
903
+
904
+ return {
905
+ 'total_backups': total_backups,
906
+ 'total_size_bytes': total_size,
907
+ 'total_size_mb': round(total_size / (1024 * 1024), 2),
908
+ 'backups_by_type': type_counts,
909
+ 'backups_by_format': format_counts,
910
+ 'recent_backups_7d': len(recent_backups),
911
+ 'last_backup': self._backup_history[-1].timestamp.isoformat() if self._backup_history else None,
912
+ 'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None,
913
+ 'auto_backup_enabled': self._auto_backup_enabled,
914
+ 'backup_directory': str(self.backup_dir),
915
+ 'max_backups': self.max_backups
916
+ }
917
+
918
+ # Retention Settings Management
919
+
920
+ def get_retention_settings(self) -> Dict[str, Any]:
921
+ """
922
+ Get current retention policy settings.
923
+
924
+ Returns:
925
+ Dictionary with retention settings
926
+ """
927
+ return {
928
+ 'max_backups': self.max_backups,
929
+ 'auto_backup_interval': self.auto_backup_interval,
930
+ 'enable_compression': self.enable_compression,
931
+ 'backup_directory': str(self.backup_dir),
932
+ 'auto_backup_enabled': self._auto_backup_enabled
933
+ }
934
+
935
+ def update_retention_settings(self, max_backups: Optional[int] = None,
936
+ auto_backup_interval: Optional[int] = None,
937
+ enable_compression: Optional[bool] = None) -> bool:
938
+ """
939
+ Update retention policy settings.
940
+
941
+ Args:
942
+ max_backups: Maximum number of backups to keep
943
+ auto_backup_interval: Automatic backup interval in seconds
944
+ enable_compression: Whether to enable backup compression
945
+
946
+ Returns:
947
+ True if settings updated successfully
948
+ """
949
+ try:
950
+ settings_changed = False
951
+
952
+ # Update max backups
953
+ if max_backups is not None and max_backups >= 5:
954
+ old_max = self.max_backups
955
+ self.max_backups = max_backups
956
+ settings_changed = True
957
+
958
+ # If we reduced the limit, cleanup old backups immediately
959
+ if max_backups < old_max:
960
+ self.cleanup_old_backups()
961
+
962
+ self.logger.info(f"Updated max_backups: {old_max} -> {max_backups}")
963
+
964
+ # Update auto backup interval
965
+ if auto_backup_interval is not None and auto_backup_interval >= 300: # Minimum 5 minutes
966
+ old_interval = self.auto_backup_interval
967
+ self.auto_backup_interval = auto_backup_interval
968
+ settings_changed = True
969
+
970
+ self.logger.info(f"Updated auto_backup_interval: {old_interval}s -> {auto_backup_interval}s")
971
+
972
+ # Update compression setting
973
+ if enable_compression is not None:
974
+ old_compression = self.enable_compression
975
+ self.enable_compression = enable_compression
976
+ settings_changed = True
977
+
978
+ self.logger.info(f"Updated enable_compression: {old_compression} -> {enable_compression}")
979
+
980
+ # Save settings to persistent storage
981
+ if settings_changed:
982
+ self._save_retention_settings()
983
+
984
+ return settings_changed
985
+
986
+ except Exception as e:
987
+ self.logger.error(f"Failed to update retention settings: {e}")
988
+ return False
989
+
990
+ def reset_retention_settings_to_defaults(self) -> bool:
991
+ """
992
+ Reset retention settings to default values.
993
+
994
+ Returns:
995
+ True if reset successful
996
+ """
997
+ try:
998
+ return self.update_retention_settings(
999
+ max_backups=50,
1000
+ auto_backup_interval=3600, # 1 hour
1001
+ enable_compression=True
1002
+ )
1003
+ except Exception as e:
1004
+ self.logger.error(f"Failed to reset retention settings: {e}")
1005
+ return False
1006
+
1007
+ # Private methods
1008
+
1009
+ def _generate_backup_filename(self, extension: str, backup_type: BackupType,
1010
+ timestamp: datetime) -> str:
1011
+ """Generate backup filename."""
1012
+ timestamp_str = timestamp.strftime("%Y%m%d_%H%M%S")
1013
+ return f"settings_backup_{backup_type.value}_{timestamp_str}.{extension}"
1014
+
1015
+ def _calculate_checksum(self, filepath: str) -> str:
1016
+ """Calculate MD5 checksum of a file."""
1017
+ import hashlib
1018
+
1019
+ hash_md5 = hashlib.md5()
1020
+ with open(filepath, "rb") as f:
1021
+ for chunk in iter(lambda: f.read(4096), b""):
1022
+ hash_md5.update(chunk)
1023
+ return hash_md5.hexdigest()
1024
+
1025
+ def _get_database_info(self, connection_manager) -> Dict[str, Any]:
1026
+ """Get database information for backup metadata."""
1027
+ try:
1028
+ conn = connection_manager.get_connection()
1029
+
1030
+ # Get table counts
1031
+ table_counts = {}
1032
+ tables = ['core_settings', 'tool_settings', 'tab_content',
1033
+ 'performance_settings', 'font_settings', 'dialog_settings',
1034
+ 'notes', 'notes_fts']
1035
+
1036
+ for table in tables:
1037
+ try:
1038
+ cursor = conn.execute(f"SELECT COUNT(*) FROM {table}")
1039
+ count = cursor.fetchone()[0]
1040
+ table_counts[table] = count
1041
+ except sqlite3.Error:
1042
+ table_counts[table] = 0
1043
+
1044
+ return {
1045
+ 'data_type': 'sqlite_database',
1046
+ 'table_counts': table_counts,
1047
+ 'total_records': sum(table_counts.values())
1048
+ }
1049
+
1050
+ except Exception as e:
1051
+ self.logger.warning(f"Failed to get database info: {e}")
1052
+ return {'data_type': 'sqlite_database', 'error': str(e)}
1053
+
1054
+ def _record_backup(self, backup_info: BackupInfo) -> None:
1055
+ """Record backup in history."""
1056
+ with self._backup_lock:
1057
+ self._backup_history.append(backup_info)
1058
+
1059
+ # Update last auto backup time if applicable
1060
+ if backup_info.backup_type == BackupType.AUTOMATIC:
1061
+ self._last_auto_backup = backup_info.timestamp
1062
+
1063
+ # Save history
1064
+ self._save_backup_history()
1065
+
1066
+ # Clean up old backups if needed
1067
+ if len(self._backup_history) > self.max_backups:
1068
+ self.cleanup_old_backups()
1069
+
1070
+ def _load_backup_history(self) -> None:
1071
+ """Load backup history from file."""
1072
+ history_file = self.backup_dir / "backup_history.json"
1073
+
1074
+ try:
1075
+ if history_file.exists():
1076
+ with open(history_file, 'r', encoding='utf-8') as f:
1077
+ history_data = json.load(f)
1078
+
1079
+ self._backup_history = []
1080
+ for item in history_data.get('backups', []):
1081
+ backup_info = BackupInfo(
1082
+ timestamp=datetime.fromisoformat(item['timestamp']),
1083
+ backup_type=BackupType(item['backup_type']),
1084
+ format=BackupFormat(item['format']),
1085
+ filepath=item['filepath'],
1086
+ size_bytes=item['size_bytes'],
1087
+ checksum=item.get('checksum'),
1088
+ description=item.get('description'),
1089
+ source_info=item.get('source_info')
1090
+ )
1091
+ self._backup_history.append(backup_info)
1092
+
1093
+ # Load last auto backup time
1094
+ if 'last_auto_backup' in history_data:
1095
+ self._last_auto_backup = datetime.fromisoformat(history_data['last_auto_backup'])
1096
+
1097
+ self.logger.debug(f"Loaded {len(self._backup_history)} backup records")
1098
+
1099
+ except Exception as e:
1100
+ self.logger.warning(f"Failed to load backup history: {e}")
1101
+ self._backup_history = []
1102
+
1103
+ def _save_backup_history(self) -> None:
1104
+ """Save backup history to file."""
1105
+ history_file = self.backup_dir / "backup_history.json"
1106
+
1107
+ try:
1108
+ history_data = {
1109
+ 'backups': [
1110
+ {
1111
+ 'timestamp': backup.timestamp.isoformat(),
1112
+ 'backup_type': backup.backup_type.value,
1113
+ 'format': backup.format.value,
1114
+ 'filepath': backup.filepath,
1115
+ 'size_bytes': backup.size_bytes,
1116
+ 'checksum': backup.checksum,
1117
+ 'description': backup.description,
1118
+ 'source_info': backup.source_info
1119
+ }
1120
+ for backup in self._backup_history
1121
+ ],
1122
+ 'last_auto_backup': self._last_auto_backup.isoformat() if self._last_auto_backup else None
1123
+ }
1124
+
1125
+ with open(history_file, 'w', encoding='utf-8') as f:
1126
+ json.dump(history_data, f, indent=2, ensure_ascii=False)
1127
+
1128
+ except Exception as e:
1129
+ self.logger.warning(f"Failed to save backup history: {e}")
1130
+
1131
+ def _save_retention_settings(self) -> None:
1132
+ """Save retention settings to file."""
1133
+ settings_file = self.backup_dir / "retention_settings.json"
1134
+
1135
+ try:
1136
+ settings_data = {
1137
+ 'max_backups': self.max_backups,
1138
+ 'auto_backup_interval': self.auto_backup_interval,
1139
+ 'enable_compression': self.enable_compression,
1140
+ 'last_updated': datetime.now().isoformat()
1141
+ }
1142
+
1143
+ with open(settings_file, 'w', encoding='utf-8') as f:
1144
+ json.dump(settings_data, f, indent=2, ensure_ascii=False)
1145
+
1146
+ self.logger.debug("Retention settings saved")
1147
+
1148
+ except Exception as e:
1149
+ self.logger.warning(f"Failed to save retention settings: {e}")
1150
+
1151
+ def _load_retention_settings(self) -> None:
1152
+ """Load retention settings from file."""
1153
+ settings_file = self.backup_dir / "retention_settings.json"
1154
+
1155
+ try:
1156
+ if settings_file.exists():
1157
+ with open(settings_file, 'r', encoding='utf-8') as f:
1158
+ settings_data = json.load(f)
1159
+
1160
+ # Apply loaded settings
1161
+ self.max_backups = settings_data.get('max_backups', self.max_backups)
1162
+ self.auto_backup_interval = settings_data.get('auto_backup_interval', self.auto_backup_interval)
1163
+ self.enable_compression = settings_data.get('enable_compression', self.enable_compression)
1164
+
1165
+ self.logger.debug("Retention settings loaded from file")
1166
+
1167
+ except Exception as e:
1168
+ self.logger.warning(f"Failed to load retention settings: {e}")
1169
+
1170
+ def _auto_backup_worker(self, connection_manager, settings_manager) -> None:
1171
+ """Worker thread for automatic backups."""
1172
+ while not self._auto_backup_stop_event.is_set():
1173
+ try:
1174
+ # Check if backup is needed
1175
+ should_backup = False
1176
+
1177
+ if self._last_auto_backup is None:
1178
+ should_backup = True
1179
+ elif datetime.now() - self._last_auto_backup > timedelta(seconds=self.auto_backup_interval):
1180
+ should_backup = True
1181
+
1182
+ if should_backup:
1183
+ # Create automatic backup
1184
+ backup_info = self.create_database_backup(
1185
+ connection_manager,
1186
+ BackupType.AUTOMATIC,
1187
+ "Automatic scheduled backup"
1188
+ )
1189
+
1190
+ if backup_info:
1191
+ self.logger.debug("Automatic backup created successfully")
1192
+ else:
1193
+ self.logger.warning("Automatic backup failed")
1194
+
1195
+ # Wait before next check
1196
+ self._auto_backup_stop_event.wait(min(300, self.auto_backup_interval // 12)) # Check every 5 minutes or 1/12 of interval
1197
+
1198
+ except Exception as e:
1199
+ self.logger.error(f"Auto backup worker error: {e}")
1034
1200
  self._auto_backup_stop_event.wait(300) # Wait 5 minutes on error