pomera-ai-commander 0.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1033 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_validator.py +1066 -1066
  12. package/core/database_connection_manager.py +744 -744
  13. package/core/database_curl_settings_manager.py +608 -608
  14. package/core/database_promera_ai_settings_manager.py +446 -446
  15. package/core/database_schema.py +411 -411
  16. package/core/database_schema_manager.py +395 -395
  17. package/core/database_settings_manager.py +1507 -1507
  18. package/core/database_settings_manager_interface.py +456 -456
  19. package/core/dialog_manager.py +734 -734
  20. package/core/efficient_line_numbers.py +510 -510
  21. package/core/error_handler.py +746 -746
  22. package/core/error_service.py +431 -431
  23. package/core/event_consolidator.py +511 -511
  24. package/core/mcp/__init__.py +43 -43
  25. package/core/mcp/protocol.py +288 -288
  26. package/core/mcp/schema.py +251 -251
  27. package/core/mcp/server_stdio.py +299 -299
  28. package/core/mcp/tool_registry.py +2372 -2345
  29. package/core/memory_efficient_text_widget.py +711 -711
  30. package/core/migration_manager.py +914 -914
  31. package/core/migration_test_suite.py +1085 -1085
  32. package/core/migration_validator.py +1143 -1143
  33. package/core/optimized_find_replace.py +714 -714
  34. package/core/optimized_pattern_engine.py +424 -424
  35. package/core/optimized_search_highlighter.py +552 -552
  36. package/core/performance_monitor.py +674 -674
  37. package/core/persistence_manager.py +712 -712
  38. package/core/progressive_stats_calculator.py +632 -632
  39. package/core/regex_pattern_cache.py +529 -529
  40. package/core/regex_pattern_library.py +350 -350
  41. package/core/search_operation_manager.py +434 -434
  42. package/core/settings_defaults_registry.py +1087 -1087
  43. package/core/settings_integrity_validator.py +1111 -1111
  44. package/core/settings_serializer.py +557 -557
  45. package/core/settings_validator.py +1823 -1823
  46. package/core/smart_stats_calculator.py +709 -709
  47. package/core/statistics_update_manager.py +619 -619
  48. package/core/stats_config_manager.py +858 -858
  49. package/core/streaming_text_handler.py +723 -723
  50. package/core/task_scheduler.py +596 -596
  51. package/core/update_pattern_library.py +168 -168
  52. package/core/visibility_monitor.py +596 -596
  53. package/core/widget_cache.py +498 -498
  54. package/mcp.json +51 -61
  55. package/package.json +61 -57
  56. package/pomera.py +7482 -7482
  57. package/pomera_mcp_server.py +183 -144
  58. package/requirements.txt +32 -0
  59. package/tools/__init__.py +4 -4
  60. package/tools/ai_tools.py +2891 -2891
  61. package/tools/ascii_art_generator.py +352 -352
  62. package/tools/base64_tools.py +183 -183
  63. package/tools/base_tool.py +511 -511
  64. package/tools/case_tool.py +308 -308
  65. package/tools/column_tools.py +395 -395
  66. package/tools/cron_tool.py +884 -884
  67. package/tools/curl_history.py +600 -600
  68. package/tools/curl_processor.py +1207 -1207
  69. package/tools/curl_settings.py +502 -502
  70. package/tools/curl_tool.py +5467 -5467
  71. package/tools/diff_viewer.py +1071 -1071
  72. package/tools/email_extraction_tool.py +248 -248
  73. package/tools/email_header_analyzer.py +425 -425
  74. package/tools/extraction_tools.py +250 -250
  75. package/tools/find_replace.py +1750 -1750
  76. package/tools/folder_file_reporter.py +1463 -1463
  77. package/tools/folder_file_reporter_adapter.py +480 -480
  78. package/tools/generator_tools.py +1216 -1216
  79. package/tools/hash_generator.py +255 -255
  80. package/tools/html_tool.py +656 -656
  81. package/tools/jsonxml_tool.py +729 -729
  82. package/tools/line_tools.py +419 -419
  83. package/tools/markdown_tools.py +561 -561
  84. package/tools/mcp_widget.py +1417 -1417
  85. package/tools/notes_widget.py +973 -973
  86. package/tools/number_base_converter.py +372 -372
  87. package/tools/regex_extractor.py +571 -571
  88. package/tools/slug_generator.py +310 -310
  89. package/tools/sorter_tools.py +458 -458
  90. package/tools/string_escape_tool.py +392 -392
  91. package/tools/text_statistics_tool.py +365 -365
  92. package/tools/text_wrapper.py +430 -430
  93. package/tools/timestamp_converter.py +421 -421
  94. package/tools/tool_loader.py +710 -710
  95. package/tools/translator_tools.py +522 -522
  96. package/tools/url_link_extractor.py +261 -261
  97. package/tools/url_parser.py +204 -204
  98. package/tools/whitespace_tools.py +355 -355
  99. package/tools/word_frequency_counter.py +146 -146
  100. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  102. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  103. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  104. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  105. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  106. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  107. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  108. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  109. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  110. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  111. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  112. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  113. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  114. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  115. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  116. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  117. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  118. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  119. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  120. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  121. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  122. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  123. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  124. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  125. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  126. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  127. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  128. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  129. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  131. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  132. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  134. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  135. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  136. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  137. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  138. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  139. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  140. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  141. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  142. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  143. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  144. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  145. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  146. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  147. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  148. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  151. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  152. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  153. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  154. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  155. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  156. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  157. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  158. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  159. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  160. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  161. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  162. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  163. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  164. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  165. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  166. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  167. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  168. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  169. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  170. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,656 +1,656 @@
1
- """
2
- Automatic Backup and Persistence Manager for Database Settings
3
-
4
- This module provides comprehensive backup and persistence management for the
5
- database settings system, including configurable backup intervals, disk
6
- persistence triggers, backup rotation, and recovery procedures.
7
- """
8
-
9
- import os
10
- import shutil
11
- import sqlite3
12
- import threading
13
- import time
14
- import logging
15
- import json
16
- import gzip
17
- from typing import Dict, List, Optional, Any, Callable
18
- from datetime import datetime, timedelta
19
- from pathlib import Path
20
- from dataclasses import dataclass
21
- from enum import Enum
22
-
23
-
24
- class BackupTrigger(Enum):
25
- """Backup trigger types."""
26
- TIME_BASED = "time_based"
27
- CHANGE_BASED = "change_based"
28
- MANUAL = "manual"
29
- SHUTDOWN = "shutdown"
30
-
31
-
32
- @dataclass
33
- class BackupInfo:
34
- """Information about a backup."""
35
- filepath: str
36
- timestamp: datetime
37
- size_bytes: int
38
- trigger: BackupTrigger
39
- compressed: bool = False
40
- metadata: Dict[str, Any] = None
41
-
42
- def to_dict(self) -> Dict[str, Any]:
43
- """Convert to dictionary for serialization."""
44
- return {
45
- 'filepath': self.filepath,
46
- 'timestamp': self.timestamp.isoformat(),
47
- 'size_bytes': self.size_bytes,
48
- 'trigger': self.trigger.value,
49
- 'compressed': self.compressed,
50
- 'metadata': self.metadata or {}
51
- }
52
-
53
- @classmethod
54
- def from_dict(cls, data: Dict[str, Any]) -> 'BackupInfo':
55
- """Create from dictionary."""
56
- return cls(
57
- filepath=data['filepath'],
58
- timestamp=datetime.fromisoformat(data['timestamp']),
59
- size_bytes=data['size_bytes'],
60
- trigger=BackupTrigger(data['trigger']),
61
- compressed=data.get('compressed', False),
62
- metadata=data.get('metadata', {})
63
- )
64
-
65
-
66
- class BackupRotationPolicy:
67
- """Policy for backup rotation and cleanup."""
68
-
69
- def __init__(self, max_backups: int = 10, max_age_days: int = 30,
70
- keep_daily: int = 7, keep_weekly: int = 4, keep_monthly: int = 12):
71
- """
72
- Initialize backup rotation policy.
73
-
74
- Args:
75
- max_backups: Maximum number of backups to keep
76
- max_age_days: Maximum age of backups in days
77
- keep_daily: Number of daily backups to keep
78
- keep_weekly: Number of weekly backups to keep
79
- keep_monthly: Number of monthly backups to keep
80
- """
81
- self.max_backups = max_backups
82
- self.max_age_days = max_age_days
83
- self.keep_daily = keep_daily
84
- self.keep_weekly = keep_weekly
85
- self.keep_monthly = keep_monthly
86
-
87
- def should_keep_backup(self, backup_info: BackupInfo, all_backups: List[BackupInfo]) -> bool:
88
- """
89
- Determine if a backup should be kept based on rotation policy.
90
-
91
- Args:
92
- backup_info: Backup to evaluate
93
- all_backups: All available backups
94
-
95
- Returns:
96
- True if backup should be kept
97
- """
98
- now = datetime.now()
99
- backup_age = now - backup_info.timestamp
100
-
101
- # Always keep recent backups
102
- if backup_age.days < 1:
103
- return True
104
-
105
- # Check age limit
106
- if backup_age.days > self.max_age_days:
107
- return False
108
-
109
- # Keep based on frequency
110
- if backup_age.days <= self.keep_daily:
111
- return True
112
-
113
- # Weekly backups (keep one per week)
114
- if backup_age.days <= self.keep_weekly * 7:
115
- week_start = backup_info.timestamp - timedelta(days=backup_info.timestamp.weekday())
116
- week_backups = [
117
- b for b in all_backups
118
- if (b.timestamp - timedelta(days=b.timestamp.weekday())).date() == week_start.date()
119
- ]
120
- # Keep the latest backup of the week
121
- return backup_info == max(week_backups, key=lambda x: x.timestamp)
122
-
123
- # Monthly backups (keep one per month)
124
- if backup_age.days <= self.keep_monthly * 30:
125
- month_backups = [
126
- b for b in all_backups
127
- if b.timestamp.year == backup_info.timestamp.year and
128
- b.timestamp.month == backup_info.timestamp.month
129
- ]
130
- # Keep the latest backup of the month
131
- return backup_info == max(month_backups, key=lambda x: x.timestamp)
132
-
133
- return False
134
-
135
-
136
- class BackupManager:
137
- """
138
- Comprehensive backup and persistence manager for database settings.
139
- """
140
-
141
- def __init__(self, backup_dir: str = "backups",
142
- auto_backup_interval: int = 300, # 5 minutes
143
- change_threshold: int = 100,
144
- enable_compression: bool = True,
145
- rotation_policy: Optional[BackupRotationPolicy] = None):
146
- """
147
- Initialize backup manager.
148
-
149
- Args:
150
- backup_dir: Directory for storing backups
151
- auto_backup_interval: Automatic backup interval in seconds
152
- change_threshold: Number of changes before triggering backup
153
- enable_compression: Whether to compress backups
154
- rotation_policy: Backup rotation policy
155
- """
156
- self.backup_dir = Path(backup_dir)
157
- self.auto_backup_interval = auto_backup_interval
158
- self.change_threshold = change_threshold
159
- self.enable_compression = enable_compression
160
- self.rotation_policy = rotation_policy or BackupRotationPolicy()
161
-
162
- # Ensure backup directory exists
163
- self.backup_dir.mkdir(parents=True, exist_ok=True)
164
-
165
- # Logging (initialize first)
166
- self.logger = logging.getLogger(__name__)
167
-
168
- # Statistics (initialize before loading history)
169
- self.backup_stats = {
170
- 'total_backups': 0,
171
- 'successful_backups': 0,
172
- 'failed_backups': 0,
173
- 'total_size_bytes': 0,
174
- 'compression_ratio': 0.0
175
- }
176
-
177
- # State tracking
178
- self.changes_since_backup = 0
179
- self.last_backup_time = None
180
- self.backup_history = []
181
-
182
- # Threading
183
- self._lock = threading.RLock()
184
- self._backup_thread = None
185
- self._stop_event = threading.Event()
186
- self._backup_callbacks = []
187
-
188
- # Load backup history after all attributes are initialized
189
- self._load_backup_history()
190
-
191
- def start_auto_backup(self) -> None:
192
- """Start automatic backup thread."""
193
- if self._backup_thread and self._backup_thread.is_alive():
194
- return
195
-
196
- self._stop_event.clear()
197
- self._backup_thread = threading.Thread(
198
- target=self._backup_worker,
199
- daemon=True,
200
- name="BackupManager"
201
- )
202
- self._backup_thread.start()
203
- self.logger.info("Automatic backup started")
204
-
205
- def stop_auto_backup(self) -> None:
206
- """Stop automatic backup thread."""
207
- if self._backup_thread and self._backup_thread.is_alive():
208
- self._stop_event.set()
209
- self._backup_thread.join(timeout=10)
210
- self.logger.info("Automatic backup stopped")
211
-
212
- def _backup_worker(self) -> None:
213
- """Worker thread for automatic backups."""
214
- while not self._stop_event.is_set():
215
- try:
216
- should_backup = False
217
-
218
- # Time-based backup
219
- if self.last_backup_time is None:
220
- should_backup = True
221
- elif datetime.now() - self.last_backup_time > timedelta(seconds=self.auto_backup_interval):
222
- should_backup = True
223
-
224
- # Change-based backup
225
- if self.changes_since_backup >= self.change_threshold:
226
- should_backup = True
227
-
228
- if should_backup:
229
- # Determine trigger type
230
- trigger = BackupTrigger.TIME_BASED
231
- if self.changes_since_backup >= self.change_threshold:
232
- trigger = BackupTrigger.CHANGE_BASED
233
-
234
- # Note: Auto backup worker needs connection manager to be set
235
- # This will be handled when backup manager is integrated with settings manager
236
-
237
- # Wait before next check
238
- self._stop_event.wait(min(60, self.auto_backup_interval // 5))
239
-
240
- except Exception as e:
241
- self.logger.error(f"Backup worker error: {e}")
242
- self._stop_event.wait(60) # Wait before retrying
243
-
244
- def backup_database(self, connection_manager, trigger: BackupTrigger = BackupTrigger.MANUAL,
245
- metadata: Optional[Dict[str, Any]] = None) -> Optional[BackupInfo]:
246
- """
247
- Create a backup of the database.
248
-
249
- Args:
250
- connection_manager: Database connection manager
251
- trigger: Backup trigger type
252
- metadata: Additional metadata to store with backup
253
-
254
- Returns:
255
- BackupInfo if successful, None otherwise
256
- """
257
- try:
258
- with self._lock:
259
- return self._perform_backup(connection_manager, trigger, metadata)
260
- except Exception as e:
261
- self.logger.error(f"Backup failed: {e}")
262
- self.backup_stats['failed_backups'] += 1
263
- return None
264
-
265
- def _perform_backup(self, connection_manager, trigger: BackupTrigger = BackupTrigger.TIME_BASED,
266
- metadata: Optional[Dict[str, Any]] = None) -> Optional[BackupInfo]:
267
- """Internal backup implementation."""
268
- timestamp = datetime.now()
269
- backup_filename = f"settings_backup_{timestamp.strftime('%Y%m%d_%H%M%S')}.db"
270
-
271
- if self.enable_compression:
272
- backup_filename += ".gz"
273
-
274
- backup_path = self.backup_dir / backup_filename
275
-
276
- try:
277
- # Get database connection
278
- source_conn = connection_manager.get_connection()
279
-
280
- if self.enable_compression:
281
- # Backup to temporary file then compress
282
- temp_path = backup_path.with_suffix('')
283
- backup_conn = sqlite3.connect(str(temp_path))
284
-
285
- try:
286
- # Perform backup
287
- source_conn.backup(backup_conn)
288
- backup_conn.close()
289
-
290
- # Compress the backup
291
- with open(temp_path, 'rb') as f_in:
292
- with gzip.open(backup_path, 'wb') as f_out:
293
- shutil.copyfileobj(f_in, f_out)
294
-
295
- # Remove temporary file
296
- temp_path.unlink()
297
-
298
- finally:
299
- if backup_conn:
300
- backup_conn.close()
301
- else:
302
- # Direct backup without compression
303
- backup_conn = sqlite3.connect(str(backup_path))
304
- try:
305
- source_conn.backup(backup_conn)
306
- finally:
307
- backup_conn.close()
308
-
309
- # Get backup size
310
- backup_size = backup_path.stat().st_size
311
-
312
- # Create backup info
313
- backup_info = BackupInfo(
314
- filepath=str(backup_path),
315
- timestamp=timestamp,
316
- size_bytes=backup_size,
317
- trigger=trigger,
318
- compressed=self.enable_compression,
319
- metadata=metadata or {}
320
- )
321
-
322
- # Update state
323
- self.backup_history.append(backup_info)
324
- self.last_backup_time = timestamp
325
- self.changes_since_backup = 0
326
-
327
- # Update statistics
328
- self.backup_stats['total_backups'] += 1
329
- self.backup_stats['successful_backups'] += 1
330
- self.backup_stats['total_size_bytes'] += backup_size
331
-
332
- # Save backup history
333
- self._save_backup_history()
334
-
335
- # Perform rotation cleanup
336
- self._cleanup_old_backups()
337
-
338
- # Notify callbacks
339
- for callback in self._backup_callbacks:
340
- try:
341
- callback(backup_info)
342
- except Exception as e:
343
- self.logger.warning(f"Backup callback failed: {e}")
344
-
345
- self.logger.info(f"Backup created: {backup_path} ({backup_size} bytes)")
346
- return backup_info
347
-
348
- except Exception as e:
349
- self.logger.error(f"Backup creation failed: {e}")
350
- self.backup_stats['failed_backups'] += 1
351
-
352
- # Clean up failed backup file
353
- if backup_path.exists():
354
- try:
355
- backup_path.unlink()
356
- except Exception:
357
- pass
358
-
359
- raise
360
-
361
- def restore_from_backup(self, connection_manager, backup_path: Optional[str] = None) -> bool:
362
- """
363
- Restore database from backup.
364
-
365
- Args:
366
- connection_manager: Database connection manager
367
- backup_path: Path to backup file (uses latest if None)
368
-
369
- Returns:
370
- True if restore successful
371
- """
372
- try:
373
- with self._lock:
374
- if backup_path is None:
375
- # Use latest backup
376
- if not self.backup_history:
377
- self.logger.error("No backups available for restore")
378
- return False
379
-
380
- latest_backup = max(self.backup_history, key=lambda x: x.timestamp)
381
- backup_path = latest_backup.filepath
382
-
383
- backup_file = Path(backup_path)
384
- if not backup_file.exists():
385
- self.logger.error(f"Backup file not found: {backup_path}")
386
- return False
387
-
388
- # Close existing connections
389
- connection_manager.close_all_connections()
390
-
391
- # Determine if backup is compressed
392
- is_compressed = backup_path.endswith('.gz')
393
-
394
- if is_compressed:
395
- # Decompress and restore
396
- temp_path = backup_file.with_suffix('')
397
-
398
- with gzip.open(backup_path, 'rb') as f_in:
399
- with open(temp_path, 'wb') as f_out:
400
- shutil.copyfileobj(f_in, f_out)
401
-
402
- try:
403
- # Restore from decompressed file
404
- if connection_manager.db_path != ":memory:":
405
- shutil.copy2(temp_path, connection_manager.db_path)
406
- else:
407
- # For in-memory database, restore by copying data
408
- restore_conn = sqlite3.connect(str(temp_path))
409
- try:
410
- memory_conn = sqlite3.connect(":memory:")
411
- restore_conn.backup(memory_conn)
412
- # Update connection manager's main connection
413
- connection_manager._main_connection = memory_conn
414
- finally:
415
- restore_conn.close()
416
- finally:
417
- temp_path.unlink()
418
- else:
419
- # Direct restore
420
- if connection_manager.db_path != ":memory:":
421
- shutil.copy2(backup_path, connection_manager.db_path)
422
- else:
423
- # For in-memory database
424
- restore_conn = sqlite3.connect(backup_path)
425
- try:
426
- memory_conn = sqlite3.connect(":memory:")
427
- restore_conn.backup(memory_conn)
428
- connection_manager._main_connection = memory_conn
429
- finally:
430
- restore_conn.close()
431
-
432
- # Reinitialize connection manager
433
- if connection_manager.db_path != ":memory:":
434
- connection_manager._initialize_main_connection()
435
-
436
- self.logger.info(f"Database restored from: {backup_path}")
437
- return True
438
-
439
- except Exception as e:
440
- self.logger.error(f"Restore failed: {e}")
441
- return False
442
-
443
- def _cleanup_old_backups(self) -> None:
444
- """Clean up old backups based on rotation policy."""
445
- try:
446
- # Apply rotation policy
447
- backups_to_keep = []
448
- backups_to_remove = []
449
-
450
- for backup in self.backup_history:
451
- if self.rotation_policy.should_keep_backup(backup, self.backup_history):
452
- backups_to_keep.append(backup)
453
- else:
454
- backups_to_remove.append(backup)
455
-
456
- # Remove old backup files
457
- for backup in backups_to_remove:
458
- try:
459
- backup_path = Path(backup.filepath)
460
- if backup_path.exists():
461
- backup_path.unlink()
462
- self.logger.debug(f"Removed old backup: {backup.filepath}")
463
- except Exception as e:
464
- self.logger.warning(f"Failed to remove backup {backup.filepath}: {e}")
465
-
466
- # Update backup history
467
- self.backup_history = backups_to_keep
468
-
469
- # Enforce maximum backup count
470
- if len(self.backup_history) > self.rotation_policy.max_backups:
471
- # Sort by timestamp and keep the most recent
472
- self.backup_history.sort(key=lambda x: x.timestamp, reverse=True)
473
- excess_backups = self.backup_history[self.rotation_policy.max_backups:]
474
-
475
- for backup in excess_backups:
476
- try:
477
- backup_path = Path(backup.filepath)
478
- if backup_path.exists():
479
- backup_path.unlink()
480
- except Exception as e:
481
- self.logger.warning(f"Failed to remove excess backup {backup.filepath}: {e}")
482
-
483
- self.backup_history = self.backup_history[:self.rotation_policy.max_backups]
484
-
485
- # Save updated history
486
- self._save_backup_history()
487
-
488
- except Exception as e:
489
- self.logger.error(f"Backup cleanup failed: {e}")
490
-
491
- def _load_backup_history(self) -> None:
492
- """Load backup history from metadata file."""
493
- history_file = self.backup_dir / "backup_history.json"
494
-
495
- try:
496
- if history_file.exists():
497
- with open(history_file, 'r') as f:
498
- data = json.load(f)
499
-
500
- self.backup_history = [
501
- BackupInfo.from_dict(item) for item in data.get('backups', [])
502
- ]
503
- self.backup_stats.update(data.get('stats', {}))
504
-
505
- # Verify backup files still exist
506
- valid_backups = []
507
- for backup in self.backup_history:
508
- if Path(backup.filepath).exists():
509
- valid_backups.append(backup)
510
- else:
511
- self.logger.warning(f"Backup file missing: {backup.filepath}")
512
-
513
- self.backup_history = valid_backups
514
-
515
- except Exception as e:
516
- self.logger.warning(f"Failed to load backup history: {e}")
517
- self.backup_history = []
518
-
519
- def _save_backup_history(self) -> None:
520
- """Save backup history to metadata file."""
521
- history_file = self.backup_dir / "backup_history.json"
522
-
523
- try:
524
- data = {
525
- 'backups': [backup.to_dict() for backup in self.backup_history],
526
- 'stats': self.backup_stats,
527
- 'last_updated': datetime.now().isoformat()
528
- }
529
-
530
- with open(history_file, 'w') as f:
531
- json.dump(data, f, indent=2)
532
-
533
- except Exception as e:
534
- self.logger.error(f"Failed to save backup history: {e}")
535
-
536
- def record_change(self) -> None:
537
- """Record a database change for change-based backup triggering."""
538
- with self._lock:
539
- self.changes_since_backup += 1
540
-
541
- def get_backup_info(self) -> Dict[str, Any]:
542
- """
543
- Get comprehensive backup information.
544
-
545
- Returns:
546
- Dictionary with backup status and statistics
547
- """
548
- with self._lock:
549
- return {
550
- 'backup_dir': str(self.backup_dir),
551
- 'auto_backup_interval': self.auto_backup_interval,
552
- 'change_threshold': self.change_threshold,
553
- 'changes_since_backup': self.changes_since_backup,
554
- 'last_backup_time': self.last_backup_time.isoformat() if self.last_backup_time else None,
555
- 'backup_count': len(self.backup_history),
556
- 'total_backup_size': sum(b.size_bytes for b in self.backup_history),
557
- 'compression_enabled': self.enable_compression,
558
- 'statistics': self.backup_stats.copy(),
559
- 'recent_backups': [
560
- {
561
- 'filepath': b.filepath,
562
- 'timestamp': b.timestamp.isoformat(),
563
- 'size_bytes': b.size_bytes,
564
- 'trigger': b.trigger.value,
565
- 'compressed': b.compressed
566
- }
567
- for b in sorted(self.backup_history, key=lambda x: x.timestamp, reverse=True)[:10]
568
- ]
569
- }
570
-
571
- def add_backup_callback(self, callback: Callable[[BackupInfo], None]) -> None:
572
- """
573
- Add callback to be called after successful backup.
574
-
575
- Args:
576
- callback: Function to call with BackupInfo
577
- """
578
- self._backup_callbacks.append(callback)
579
-
580
- def remove_backup_callback(self, callback: Callable[[BackupInfo], None]) -> None:
581
- """Remove backup callback."""
582
- if callback in self._backup_callbacks:
583
- self._backup_callbacks.remove(callback)
584
-
585
- def set_backup_interval(self, seconds: int) -> None:
586
- """
587
- Set automatic backup interval.
588
-
589
- Args:
590
- seconds: Backup interval in seconds (0 to disable)
591
- """
592
- self.auto_backup_interval = max(0, seconds)
593
-
594
- if self.auto_backup_interval > 0:
595
- self.start_auto_backup()
596
- else:
597
- self.stop_auto_backup()
598
-
599
- def set_change_threshold(self, changes: int) -> None:
600
- """
601
- Set change threshold for triggering backups.
602
-
603
- Args:
604
- changes: Number of changes before backup
605
- """
606
- self.change_threshold = max(1, changes)
607
-
608
- def export_backup_report(self, filepath: str) -> bool:
609
- """
610
- Export detailed backup report to file.
611
-
612
- Args:
613
- filepath: Target file path
614
-
615
- Returns:
616
- True if export successful
617
- """
618
- try:
619
- report_data = {
620
- 'report_timestamp': datetime.now().isoformat(),
621
- 'backup_configuration': {
622
- 'backup_dir': str(self.backup_dir),
623
- 'auto_backup_interval': self.auto_backup_interval,
624
- 'change_threshold': self.change_threshold,
625
- 'compression_enabled': self.enable_compression,
626
- 'rotation_policy': {
627
- 'max_backups': self.rotation_policy.max_backups,
628
- 'max_age_days': self.rotation_policy.max_age_days,
629
- 'keep_daily': self.rotation_policy.keep_daily,
630
- 'keep_weekly': self.rotation_policy.keep_weekly,
631
- 'keep_monthly': self.rotation_policy.keep_monthly
632
- }
633
- },
634
- 'backup_statistics': self.backup_stats,
635
- 'backup_history': [backup.to_dict() for backup in self.backup_history],
636
- 'current_status': self.get_backup_info()
637
- }
638
-
639
- with open(filepath, 'w') as f:
640
- json.dump(report_data, f, indent=2)
641
-
642
- self.logger.info(f"Backup report exported to {filepath}")
643
- return True
644
-
645
- except Exception as e:
646
- self.logger.error(f"Failed to export backup report: {e}")
647
- return False
648
-
649
- def __enter__(self):
650
- """Context manager entry."""
651
- self.start_auto_backup()
652
- return self
653
-
654
- def __exit__(self, exc_type, exc_val, exc_tb):
655
- """Context manager exit."""
1
+ """
2
+ Automatic Backup and Persistence Manager for Database Settings
3
+
4
+ This module provides comprehensive backup and persistence management for the
5
+ database settings system, including configurable backup intervals, disk
6
+ persistence triggers, backup rotation, and recovery procedures.
7
+ """
8
+
9
+ import os
10
+ import shutil
11
+ import sqlite3
12
+ import threading
13
+ import time
14
+ import logging
15
+ import json
16
+ import gzip
17
+ from typing import Dict, List, Optional, Any, Callable
18
+ from datetime import datetime, timedelta
19
+ from pathlib import Path
20
+ from dataclasses import dataclass
21
+ from enum import Enum
22
+
23
+
24
+ class BackupTrigger(Enum):
25
+ """Backup trigger types."""
26
+ TIME_BASED = "time_based"
27
+ CHANGE_BASED = "change_based"
28
+ MANUAL = "manual"
29
+ SHUTDOWN = "shutdown"
30
+
31
+
32
+ @dataclass
33
+ class BackupInfo:
34
+ """Information about a backup."""
35
+ filepath: str
36
+ timestamp: datetime
37
+ size_bytes: int
38
+ trigger: BackupTrigger
39
+ compressed: bool = False
40
+ metadata: Dict[str, Any] = None
41
+
42
+ def to_dict(self) -> Dict[str, Any]:
43
+ """Convert to dictionary for serialization."""
44
+ return {
45
+ 'filepath': self.filepath,
46
+ 'timestamp': self.timestamp.isoformat(),
47
+ 'size_bytes': self.size_bytes,
48
+ 'trigger': self.trigger.value,
49
+ 'compressed': self.compressed,
50
+ 'metadata': self.metadata or {}
51
+ }
52
+
53
+ @classmethod
54
+ def from_dict(cls, data: Dict[str, Any]) -> 'BackupInfo':
55
+ """Create from dictionary."""
56
+ return cls(
57
+ filepath=data['filepath'],
58
+ timestamp=datetime.fromisoformat(data['timestamp']),
59
+ size_bytes=data['size_bytes'],
60
+ trigger=BackupTrigger(data['trigger']),
61
+ compressed=data.get('compressed', False),
62
+ metadata=data.get('metadata', {})
63
+ )
64
+
65
+
66
+ class BackupRotationPolicy:
67
+ """Policy for backup rotation and cleanup."""
68
+
69
+ def __init__(self, max_backups: int = 10, max_age_days: int = 30,
70
+ keep_daily: int = 7, keep_weekly: int = 4, keep_monthly: int = 12):
71
+ """
72
+ Initialize backup rotation policy.
73
+
74
+ Args:
75
+ max_backups: Maximum number of backups to keep
76
+ max_age_days: Maximum age of backups in days
77
+ keep_daily: Number of daily backups to keep
78
+ keep_weekly: Number of weekly backups to keep
79
+ keep_monthly: Number of monthly backups to keep
80
+ """
81
+ self.max_backups = max_backups
82
+ self.max_age_days = max_age_days
83
+ self.keep_daily = keep_daily
84
+ self.keep_weekly = keep_weekly
85
+ self.keep_monthly = keep_monthly
86
+
87
+ def should_keep_backup(self, backup_info: BackupInfo, all_backups: List[BackupInfo]) -> bool:
88
+ """
89
+ Determine if a backup should be kept based on rotation policy.
90
+
91
+ Args:
92
+ backup_info: Backup to evaluate
93
+ all_backups: All available backups
94
+
95
+ Returns:
96
+ True if backup should be kept
97
+ """
98
+ now = datetime.now()
99
+ backup_age = now - backup_info.timestamp
100
+
101
+ # Always keep recent backups
102
+ if backup_age.days < 1:
103
+ return True
104
+
105
+ # Check age limit
106
+ if backup_age.days > self.max_age_days:
107
+ return False
108
+
109
+ # Keep based on frequency
110
+ if backup_age.days <= self.keep_daily:
111
+ return True
112
+
113
+ # Weekly backups (keep one per week)
114
+ if backup_age.days <= self.keep_weekly * 7:
115
+ week_start = backup_info.timestamp - timedelta(days=backup_info.timestamp.weekday())
116
+ week_backups = [
117
+ b for b in all_backups
118
+ if (b.timestamp - timedelta(days=b.timestamp.weekday())).date() == week_start.date()
119
+ ]
120
+ # Keep the latest backup of the week
121
+ return backup_info == max(week_backups, key=lambda x: x.timestamp)
122
+
123
+ # Monthly backups (keep one per month)
124
+ if backup_age.days <= self.keep_monthly * 30:
125
+ month_backups = [
126
+ b for b in all_backups
127
+ if b.timestamp.year == backup_info.timestamp.year and
128
+ b.timestamp.month == backup_info.timestamp.month
129
+ ]
130
+ # Keep the latest backup of the month
131
+ return backup_info == max(month_backups, key=lambda x: x.timestamp)
132
+
133
+ return False
134
+
135
+
136
+ class BackupManager:
137
+ """
138
+ Comprehensive backup and persistence manager for database settings.
139
+ """
140
+
141
+ def __init__(self, backup_dir: str = "backups",
142
+ auto_backup_interval: int = 300, # 5 minutes
143
+ change_threshold: int = 100,
144
+ enable_compression: bool = True,
145
+ rotation_policy: Optional[BackupRotationPolicy] = None):
146
+ """
147
+ Initialize backup manager.
148
+
149
+ Args:
150
+ backup_dir: Directory for storing backups
151
+ auto_backup_interval: Automatic backup interval in seconds
152
+ change_threshold: Number of changes before triggering backup
153
+ enable_compression: Whether to compress backups
154
+ rotation_policy: Backup rotation policy
155
+ """
156
+ self.backup_dir = Path(backup_dir)
157
+ self.auto_backup_interval = auto_backup_interval
158
+ self.change_threshold = change_threshold
159
+ self.enable_compression = enable_compression
160
+ self.rotation_policy = rotation_policy or BackupRotationPolicy()
161
+
162
+ # Ensure backup directory exists
163
+ self.backup_dir.mkdir(parents=True, exist_ok=True)
164
+
165
+ # Logging (initialize first)
166
+ self.logger = logging.getLogger(__name__)
167
+
168
+ # Statistics (initialize before loading history)
169
+ self.backup_stats = {
170
+ 'total_backups': 0,
171
+ 'successful_backups': 0,
172
+ 'failed_backups': 0,
173
+ 'total_size_bytes': 0,
174
+ 'compression_ratio': 0.0
175
+ }
176
+
177
+ # State tracking
178
+ self.changes_since_backup = 0
179
+ self.last_backup_time = None
180
+ self.backup_history = []
181
+
182
+ # Threading
183
+ self._lock = threading.RLock()
184
+ self._backup_thread = None
185
+ self._stop_event = threading.Event()
186
+ self._backup_callbacks = []
187
+
188
+ # Load backup history after all attributes are initialized
189
+ self._load_backup_history()
190
+
191
+ def start_auto_backup(self) -> None:
192
+ """Start automatic backup thread."""
193
+ if self._backup_thread and self._backup_thread.is_alive():
194
+ return
195
+
196
+ self._stop_event.clear()
197
+ self._backup_thread = threading.Thread(
198
+ target=self._backup_worker,
199
+ daemon=True,
200
+ name="BackupManager"
201
+ )
202
+ self._backup_thread.start()
203
+ self.logger.info("Automatic backup started")
204
+
205
+ def stop_auto_backup(self) -> None:
206
+ """Stop automatic backup thread."""
207
+ if self._backup_thread and self._backup_thread.is_alive():
208
+ self._stop_event.set()
209
+ self._backup_thread.join(timeout=10)
210
+ self.logger.info("Automatic backup stopped")
211
+
212
+ def _backup_worker(self) -> None:
213
+ """Worker thread for automatic backups."""
214
+ while not self._stop_event.is_set():
215
+ try:
216
+ should_backup = False
217
+
218
+ # Time-based backup
219
+ if self.last_backup_time is None:
220
+ should_backup = True
221
+ elif datetime.now() - self.last_backup_time > timedelta(seconds=self.auto_backup_interval):
222
+ should_backup = True
223
+
224
+ # Change-based backup
225
+ if self.changes_since_backup >= self.change_threshold:
226
+ should_backup = True
227
+
228
+ if should_backup:
229
+ # Determine trigger type
230
+ trigger = BackupTrigger.TIME_BASED
231
+ if self.changes_since_backup >= self.change_threshold:
232
+ trigger = BackupTrigger.CHANGE_BASED
233
+
234
+ # Note: Auto backup worker needs connection manager to be set
235
+ # This will be handled when backup manager is integrated with settings manager
236
+
237
+ # Wait before next check
238
+ self._stop_event.wait(min(60, self.auto_backup_interval // 5))
239
+
240
+ except Exception as e:
241
+ self.logger.error(f"Backup worker error: {e}")
242
+ self._stop_event.wait(60) # Wait before retrying
243
+
244
+ def backup_database(self, connection_manager, trigger: BackupTrigger = BackupTrigger.MANUAL,
245
+ metadata: Optional[Dict[str, Any]] = None) -> Optional[BackupInfo]:
246
+ """
247
+ Create a backup of the database.
248
+
249
+ Args:
250
+ connection_manager: Database connection manager
251
+ trigger: Backup trigger type
252
+ metadata: Additional metadata to store with backup
253
+
254
+ Returns:
255
+ BackupInfo if successful, None otherwise
256
+ """
257
+ try:
258
+ with self._lock:
259
+ return self._perform_backup(connection_manager, trigger, metadata)
260
+ except Exception as e:
261
+ self.logger.error(f"Backup failed: {e}")
262
+ self.backup_stats['failed_backups'] += 1
263
+ return None
264
+
265
+ def _perform_backup(self, connection_manager, trigger: BackupTrigger = BackupTrigger.TIME_BASED,
266
+ metadata: Optional[Dict[str, Any]] = None) -> Optional[BackupInfo]:
267
+ """Internal backup implementation."""
268
+ timestamp = datetime.now()
269
+ backup_filename = f"settings_backup_{timestamp.strftime('%Y%m%d_%H%M%S')}.db"
270
+
271
+ if self.enable_compression:
272
+ backup_filename += ".gz"
273
+
274
+ backup_path = self.backup_dir / backup_filename
275
+
276
+ try:
277
+ # Get database connection
278
+ source_conn = connection_manager.get_connection()
279
+
280
+ if self.enable_compression:
281
+ # Backup to temporary file then compress
282
+ temp_path = backup_path.with_suffix('')
283
+ backup_conn = sqlite3.connect(str(temp_path))
284
+
285
+ try:
286
+ # Perform backup
287
+ source_conn.backup(backup_conn)
288
+ backup_conn.close()
289
+
290
+ # Compress the backup
291
+ with open(temp_path, 'rb') as f_in:
292
+ with gzip.open(backup_path, 'wb') as f_out:
293
+ shutil.copyfileobj(f_in, f_out)
294
+
295
+ # Remove temporary file
296
+ temp_path.unlink()
297
+
298
+ finally:
299
+ if backup_conn:
300
+ backup_conn.close()
301
+ else:
302
+ # Direct backup without compression
303
+ backup_conn = sqlite3.connect(str(backup_path))
304
+ try:
305
+ source_conn.backup(backup_conn)
306
+ finally:
307
+ backup_conn.close()
308
+
309
+ # Get backup size
310
+ backup_size = backup_path.stat().st_size
311
+
312
+ # Create backup info
313
+ backup_info = BackupInfo(
314
+ filepath=str(backup_path),
315
+ timestamp=timestamp,
316
+ size_bytes=backup_size,
317
+ trigger=trigger,
318
+ compressed=self.enable_compression,
319
+ metadata=metadata or {}
320
+ )
321
+
322
+ # Update state
323
+ self.backup_history.append(backup_info)
324
+ self.last_backup_time = timestamp
325
+ self.changes_since_backup = 0
326
+
327
+ # Update statistics
328
+ self.backup_stats['total_backups'] += 1
329
+ self.backup_stats['successful_backups'] += 1
330
+ self.backup_stats['total_size_bytes'] += backup_size
331
+
332
+ # Save backup history
333
+ self._save_backup_history()
334
+
335
+ # Perform rotation cleanup
336
+ self._cleanup_old_backups()
337
+
338
+ # Notify callbacks
339
+ for callback in self._backup_callbacks:
340
+ try:
341
+ callback(backup_info)
342
+ except Exception as e:
343
+ self.logger.warning(f"Backup callback failed: {e}")
344
+
345
+ self.logger.info(f"Backup created: {backup_path} ({backup_size} bytes)")
346
+ return backup_info
347
+
348
+ except Exception as e:
349
+ self.logger.error(f"Backup creation failed: {e}")
350
+ self.backup_stats['failed_backups'] += 1
351
+
352
+ # Clean up failed backup file
353
+ if backup_path.exists():
354
+ try:
355
+ backup_path.unlink()
356
+ except Exception:
357
+ pass
358
+
359
+ raise
360
+
361
+ def restore_from_backup(self, connection_manager, backup_path: Optional[str] = None) -> bool:
362
+ """
363
+ Restore database from backup.
364
+
365
+ Args:
366
+ connection_manager: Database connection manager
367
+ backup_path: Path to backup file (uses latest if None)
368
+
369
+ Returns:
370
+ True if restore successful
371
+ """
372
+ try:
373
+ with self._lock:
374
+ if backup_path is None:
375
+ # Use latest backup
376
+ if not self.backup_history:
377
+ self.logger.error("No backups available for restore")
378
+ return False
379
+
380
+ latest_backup = max(self.backup_history, key=lambda x: x.timestamp)
381
+ backup_path = latest_backup.filepath
382
+
383
+ backup_file = Path(backup_path)
384
+ if not backup_file.exists():
385
+ self.logger.error(f"Backup file not found: {backup_path}")
386
+ return False
387
+
388
+ # Close existing connections
389
+ connection_manager.close_all_connections()
390
+
391
+ # Determine if backup is compressed
392
+ is_compressed = backup_path.endswith('.gz')
393
+
394
+ if is_compressed:
395
+ # Decompress and restore
396
+ temp_path = backup_file.with_suffix('')
397
+
398
+ with gzip.open(backup_path, 'rb') as f_in:
399
+ with open(temp_path, 'wb') as f_out:
400
+ shutil.copyfileobj(f_in, f_out)
401
+
402
+ try:
403
+ # Restore from decompressed file
404
+ if connection_manager.db_path != ":memory:":
405
+ shutil.copy2(temp_path, connection_manager.db_path)
406
+ else:
407
+ # For in-memory database, restore by copying data
408
+ restore_conn = sqlite3.connect(str(temp_path))
409
+ try:
410
+ memory_conn = sqlite3.connect(":memory:")
411
+ restore_conn.backup(memory_conn)
412
+ # Update connection manager's main connection
413
+ connection_manager._main_connection = memory_conn
414
+ finally:
415
+ restore_conn.close()
416
+ finally:
417
+ temp_path.unlink()
418
+ else:
419
+ # Direct restore
420
+ if connection_manager.db_path != ":memory:":
421
+ shutil.copy2(backup_path, connection_manager.db_path)
422
+ else:
423
+ # For in-memory database
424
+ restore_conn = sqlite3.connect(backup_path)
425
+ try:
426
+ memory_conn = sqlite3.connect(":memory:")
427
+ restore_conn.backup(memory_conn)
428
+ connection_manager._main_connection = memory_conn
429
+ finally:
430
+ restore_conn.close()
431
+
432
+ # Reinitialize connection manager
433
+ if connection_manager.db_path != ":memory:":
434
+ connection_manager._initialize_main_connection()
435
+
436
+ self.logger.info(f"Database restored from: {backup_path}")
437
+ return True
438
+
439
+ except Exception as e:
440
+ self.logger.error(f"Restore failed: {e}")
441
+ return False
442
+
443
+ def _cleanup_old_backups(self) -> None:
444
+ """Clean up old backups based on rotation policy."""
445
+ try:
446
+ # Apply rotation policy
447
+ backups_to_keep = []
448
+ backups_to_remove = []
449
+
450
+ for backup in self.backup_history:
451
+ if self.rotation_policy.should_keep_backup(backup, self.backup_history):
452
+ backups_to_keep.append(backup)
453
+ else:
454
+ backups_to_remove.append(backup)
455
+
456
+ # Remove old backup files
457
+ for backup in backups_to_remove:
458
+ try:
459
+ backup_path = Path(backup.filepath)
460
+ if backup_path.exists():
461
+ backup_path.unlink()
462
+ self.logger.debug(f"Removed old backup: {backup.filepath}")
463
+ except Exception as e:
464
+ self.logger.warning(f"Failed to remove backup {backup.filepath}: {e}")
465
+
466
+ # Update backup history
467
+ self.backup_history = backups_to_keep
468
+
469
+ # Enforce maximum backup count
470
+ if len(self.backup_history) > self.rotation_policy.max_backups:
471
+ # Sort by timestamp and keep the most recent
472
+ self.backup_history.sort(key=lambda x: x.timestamp, reverse=True)
473
+ excess_backups = self.backup_history[self.rotation_policy.max_backups:]
474
+
475
+ for backup in excess_backups:
476
+ try:
477
+ backup_path = Path(backup.filepath)
478
+ if backup_path.exists():
479
+ backup_path.unlink()
480
+ except Exception as e:
481
+ self.logger.warning(f"Failed to remove excess backup {backup.filepath}: {e}")
482
+
483
+ self.backup_history = self.backup_history[:self.rotation_policy.max_backups]
484
+
485
+ # Save updated history
486
+ self._save_backup_history()
487
+
488
+ except Exception as e:
489
+ self.logger.error(f"Backup cleanup failed: {e}")
490
+
491
+ def _load_backup_history(self) -> None:
492
+ """Load backup history from metadata file."""
493
+ history_file = self.backup_dir / "backup_history.json"
494
+
495
+ try:
496
+ if history_file.exists():
497
+ with open(history_file, 'r') as f:
498
+ data = json.load(f)
499
+
500
+ self.backup_history = [
501
+ BackupInfo.from_dict(item) for item in data.get('backups', [])
502
+ ]
503
+ self.backup_stats.update(data.get('stats', {}))
504
+
505
+ # Verify backup files still exist
506
+ valid_backups = []
507
+ for backup in self.backup_history:
508
+ if Path(backup.filepath).exists():
509
+ valid_backups.append(backup)
510
+ else:
511
+ self.logger.warning(f"Backup file missing: {backup.filepath}")
512
+
513
+ self.backup_history = valid_backups
514
+
515
+ except Exception as e:
516
+ self.logger.warning(f"Failed to load backup history: {e}")
517
+ self.backup_history = []
518
+
519
+ def _save_backup_history(self) -> None:
520
+ """Save backup history to metadata file."""
521
+ history_file = self.backup_dir / "backup_history.json"
522
+
523
+ try:
524
+ data = {
525
+ 'backups': [backup.to_dict() for backup in self.backup_history],
526
+ 'stats': self.backup_stats,
527
+ 'last_updated': datetime.now().isoformat()
528
+ }
529
+
530
+ with open(history_file, 'w') as f:
531
+ json.dump(data, f, indent=2)
532
+
533
+ except Exception as e:
534
+ self.logger.error(f"Failed to save backup history: {e}")
535
+
536
+ def record_change(self) -> None:
537
+ """Record a database change for change-based backup triggering."""
538
+ with self._lock:
539
+ self.changes_since_backup += 1
540
+
541
+ def get_backup_info(self) -> Dict[str, Any]:
542
+ """
543
+ Get comprehensive backup information.
544
+
545
+ Returns:
546
+ Dictionary with backup status and statistics
547
+ """
548
+ with self._lock:
549
+ return {
550
+ 'backup_dir': str(self.backup_dir),
551
+ 'auto_backup_interval': self.auto_backup_interval,
552
+ 'change_threshold': self.change_threshold,
553
+ 'changes_since_backup': self.changes_since_backup,
554
+ 'last_backup_time': self.last_backup_time.isoformat() if self.last_backup_time else None,
555
+ 'backup_count': len(self.backup_history),
556
+ 'total_backup_size': sum(b.size_bytes for b in self.backup_history),
557
+ 'compression_enabled': self.enable_compression,
558
+ 'statistics': self.backup_stats.copy(),
559
+ 'recent_backups': [
560
+ {
561
+ 'filepath': b.filepath,
562
+ 'timestamp': b.timestamp.isoformat(),
563
+ 'size_bytes': b.size_bytes,
564
+ 'trigger': b.trigger.value,
565
+ 'compressed': b.compressed
566
+ }
567
+ for b in sorted(self.backup_history, key=lambda x: x.timestamp, reverse=True)[:10]
568
+ ]
569
+ }
570
+
571
+ def add_backup_callback(self, callback: Callable[[BackupInfo], None]) -> None:
572
+ """
573
+ Add callback to be called after successful backup.
574
+
575
+ Args:
576
+ callback: Function to call with BackupInfo
577
+ """
578
+ self._backup_callbacks.append(callback)
579
+
580
+ def remove_backup_callback(self, callback: Callable[[BackupInfo], None]) -> None:
581
+ """Remove backup callback."""
582
+ if callback in self._backup_callbacks:
583
+ self._backup_callbacks.remove(callback)
584
+
585
+ def set_backup_interval(self, seconds: int) -> None:
586
+ """
587
+ Set automatic backup interval.
588
+
589
+ Args:
590
+ seconds: Backup interval in seconds (0 to disable)
591
+ """
592
+ self.auto_backup_interval = max(0, seconds)
593
+
594
+ if self.auto_backup_interval > 0:
595
+ self.start_auto_backup()
596
+ else:
597
+ self.stop_auto_backup()
598
+
599
+ def set_change_threshold(self, changes: int) -> None:
600
+ """
601
+ Set change threshold for triggering backups.
602
+
603
+ Args:
604
+ changes: Number of changes before backup
605
+ """
606
+ self.change_threshold = max(1, changes)
607
+
608
+ def export_backup_report(self, filepath: str) -> bool:
609
+ """
610
+ Export detailed backup report to file.
611
+
612
+ Args:
613
+ filepath: Target file path
614
+
615
+ Returns:
616
+ True if export successful
617
+ """
618
+ try:
619
+ report_data = {
620
+ 'report_timestamp': datetime.now().isoformat(),
621
+ 'backup_configuration': {
622
+ 'backup_dir': str(self.backup_dir),
623
+ 'auto_backup_interval': self.auto_backup_interval,
624
+ 'change_threshold': self.change_threshold,
625
+ 'compression_enabled': self.enable_compression,
626
+ 'rotation_policy': {
627
+ 'max_backups': self.rotation_policy.max_backups,
628
+ 'max_age_days': self.rotation_policy.max_age_days,
629
+ 'keep_daily': self.rotation_policy.keep_daily,
630
+ 'keep_weekly': self.rotation_policy.keep_weekly,
631
+ 'keep_monthly': self.rotation_policy.keep_monthly
632
+ }
633
+ },
634
+ 'backup_statistics': self.backup_stats,
635
+ 'backup_history': [backup.to_dict() for backup in self.backup_history],
636
+ 'current_status': self.get_backup_info()
637
+ }
638
+
639
+ with open(filepath, 'w') as f:
640
+ json.dump(report_data, f, indent=2)
641
+
642
+ self.logger.info(f"Backup report exported to {filepath}")
643
+ return True
644
+
645
+ except Exception as e:
646
+ self.logger.error(f"Failed to export backup report: {e}")
647
+ return False
648
+
649
+ def __enter__(self):
650
+ """Context manager entry."""
651
+ self.start_auto_backup()
652
+ return self
653
+
654
+ def __exit__(self, exc_type, exc_val, exc_tb):
655
+ """Context manager exit."""
656
656
  self.stop_auto_backup()