pomera-ai-commander 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +680 -0
- package/bin/pomera-ai-commander.js +62 -0
- package/core/__init__.py +66 -0
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/app_context.py +482 -0
- package/core/async_text_processor.py +422 -0
- package/core/backup_manager.py +656 -0
- package/core/backup_recovery_manager.py +1034 -0
- package/core/content_hash_cache.py +509 -0
- package/core/context_menu.py +313 -0
- package/core/data_validator.py +1067 -0
- package/core/database_connection_manager.py +745 -0
- package/core/database_curl_settings_manager.py +609 -0
- package/core/database_promera_ai_settings_manager.py +447 -0
- package/core/database_schema.py +412 -0
- package/core/database_schema_manager.py +396 -0
- package/core/database_settings_manager.py +1508 -0
- package/core/database_settings_manager_interface.py +457 -0
- package/core/dialog_manager.py +735 -0
- package/core/efficient_line_numbers.py +511 -0
- package/core/error_handler.py +747 -0
- package/core/error_service.py +431 -0
- package/core/event_consolidator.py +512 -0
- package/core/mcp/__init__.py +43 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/core/mcp/protocol.py +288 -0
- package/core/mcp/schema.py +251 -0
- package/core/mcp/server_stdio.py +299 -0
- package/core/mcp/tool_registry.py +2345 -0
- package/core/memory_efficient_text_widget.py +712 -0
- package/core/migration_manager.py +915 -0
- package/core/migration_test_suite.py +1086 -0
- package/core/migration_validator.py +1144 -0
- package/core/optimized_find_replace.py +715 -0
- package/core/optimized_pattern_engine.py +424 -0
- package/core/optimized_search_highlighter.py +553 -0
- package/core/performance_monitor.py +675 -0
- package/core/persistence_manager.py +713 -0
- package/core/progressive_stats_calculator.py +632 -0
- package/core/regex_pattern_cache.py +530 -0
- package/core/regex_pattern_library.py +351 -0
- package/core/search_operation_manager.py +435 -0
- package/core/settings_defaults_registry.py +1087 -0
- package/core/settings_integrity_validator.py +1112 -0
- package/core/settings_serializer.py +558 -0
- package/core/settings_validator.py +1824 -0
- package/core/smart_stats_calculator.py +710 -0
- package/core/statistics_update_manager.py +619 -0
- package/core/stats_config_manager.py +858 -0
- package/core/streaming_text_handler.py +723 -0
- package/core/task_scheduler.py +596 -0
- package/core/update_pattern_library.py +169 -0
- package/core/visibility_monitor.py +596 -0
- package/core/widget_cache.py +498 -0
- package/mcp.json +61 -0
- package/package.json +57 -0
- package/pomera.py +7483 -0
- package/pomera_mcp_server.py +144 -0
- package/tools/__init__.py +5 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
- package/tools/ai_tools.py +2892 -0
- package/tools/ascii_art_generator.py +353 -0
- package/tools/base64_tools.py +184 -0
- package/tools/base_tool.py +511 -0
- package/tools/case_tool.py +309 -0
- package/tools/column_tools.py +396 -0
- package/tools/cron_tool.py +885 -0
- package/tools/curl_history.py +601 -0
- package/tools/curl_processor.py +1208 -0
- package/tools/curl_settings.py +503 -0
- package/tools/curl_tool.py +5467 -0
- package/tools/diff_viewer.py +1072 -0
- package/tools/email_extraction_tool.py +249 -0
- package/tools/email_header_analyzer.py +426 -0
- package/tools/extraction_tools.py +250 -0
- package/tools/find_replace.py +1751 -0
- package/tools/folder_file_reporter.py +1463 -0
- package/tools/folder_file_reporter_adapter.py +480 -0
- package/tools/generator_tools.py +1217 -0
- package/tools/hash_generator.py +256 -0
- package/tools/html_tool.py +657 -0
- package/tools/huggingface_helper.py +449 -0
- package/tools/jsonxml_tool.py +730 -0
- package/tools/line_tools.py +419 -0
- package/tools/list_comparator.py +720 -0
- package/tools/markdown_tools.py +562 -0
- package/tools/mcp_widget.py +1417 -0
- package/tools/notes_widget.py +973 -0
- package/tools/number_base_converter.py +373 -0
- package/tools/regex_extractor.py +572 -0
- package/tools/slug_generator.py +311 -0
- package/tools/sorter_tools.py +459 -0
- package/tools/string_escape_tool.py +393 -0
- package/tools/text_statistics_tool.py +366 -0
- package/tools/text_wrapper.py +431 -0
- package/tools/timestamp_converter.py +422 -0
- package/tools/tool_loader.py +710 -0
- package/tools/translator_tools.py +523 -0
- package/tools/url_link_extractor.py +262 -0
- package/tools/url_parser.py +205 -0
- package/tools/whitespace_tools.py +356 -0
- package/tools/word_frequency_counter.py +147 -0
|
@@ -0,0 +1,745 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Connection Manager for Settings Migration
|
|
3
|
+
|
|
4
|
+
This module provides robust database connection management with SQLite WAL mode
|
|
5
|
+
for concurrent access, connection pooling, transaction management, and automatic
|
|
6
|
+
backup scheduling with disk persistence.
|
|
7
|
+
|
|
8
|
+
Designed to handle the high-frequency settings access patterns identified in
|
|
9
|
+
the production codebase analysis (579+ config operations across 45 files).
|
|
10
|
+
|
|
11
|
+
Enhanced with performance monitoring and optimization capabilities.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import sqlite3
|
|
15
|
+
import threading
|
|
16
|
+
import time
|
|
17
|
+
import os
|
|
18
|
+
import shutil
|
|
19
|
+
import logging
|
|
20
|
+
from typing import Optional, Callable, List, Any, Dict
|
|
21
|
+
from datetime import datetime, timedelta
|
|
22
|
+
from contextlib import contextmanager
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DatabaseConnectionManager:
|
|
27
|
+
"""
|
|
28
|
+
Manages SQLite database connections with WAL mode for concurrency support.
|
|
29
|
+
|
|
30
|
+
Features:
|
|
31
|
+
- WAL (Write-Ahead Logging) mode for better concurrent access
|
|
32
|
+
- Connection pooling for multiple threads
|
|
33
|
+
- Automatic backup scheduling and disk persistence
|
|
34
|
+
- Transaction management with rollback support
|
|
35
|
+
- Error handling and connection recovery
|
|
36
|
+
- Thread-safe operations
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self, db_path: str = ":memory:", backup_path: Optional[str] = None,
|
|
40
|
+
enable_performance_monitoring: bool = True):
|
|
41
|
+
"""
|
|
42
|
+
Initialize the database connection manager.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
db_path: Path to SQLite database file (":memory:" for in-memory)
|
|
46
|
+
backup_path: Path for automatic backups (None to disable)
|
|
47
|
+
enable_performance_monitoring: Whether to enable performance monitoring
|
|
48
|
+
"""
|
|
49
|
+
self.db_path = db_path
|
|
50
|
+
self.backup_path = backup_path or "settings_backup.db"
|
|
51
|
+
self.backup_interval = 300 # 5 minutes default
|
|
52
|
+
self.last_backup = None
|
|
53
|
+
self.auto_backup_enabled = True
|
|
54
|
+
self.enable_performance_monitoring = enable_performance_monitoring
|
|
55
|
+
|
|
56
|
+
# Thread safety
|
|
57
|
+
self._lock = threading.RLock()
|
|
58
|
+
self._connections = {} # Thread-local connections
|
|
59
|
+
self._main_connection = None
|
|
60
|
+
|
|
61
|
+
# Connection configuration
|
|
62
|
+
self._connection_config = {
|
|
63
|
+
'timeout': 30.0,
|
|
64
|
+
'isolation_level': None, # Autocommit mode
|
|
65
|
+
'check_same_thread': False
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Backup and persistence settings
|
|
69
|
+
self._backup_thread = None
|
|
70
|
+
self._backup_stop_event = threading.Event()
|
|
71
|
+
self._changes_since_backup = 0
|
|
72
|
+
self._max_changes_before_backup = 100
|
|
73
|
+
|
|
74
|
+
# Error handling
|
|
75
|
+
self.logger = logging.getLogger(__name__)
|
|
76
|
+
self._connection_errors = []
|
|
77
|
+
self._max_error_history = 50
|
|
78
|
+
|
|
79
|
+
# Performance monitoring
|
|
80
|
+
self._performance_monitor = None
|
|
81
|
+
if enable_performance_monitoring:
|
|
82
|
+
try:
|
|
83
|
+
from .performance_monitor import get_performance_monitor
|
|
84
|
+
self._performance_monitor = get_performance_monitor()
|
|
85
|
+
except ImportError:
|
|
86
|
+
self.logger.warning("Performance monitoring not available")
|
|
87
|
+
|
|
88
|
+
# Query execution statistics
|
|
89
|
+
self._query_count = 0
|
|
90
|
+
self._total_query_time = 0.0
|
|
91
|
+
self._slow_queries = []
|
|
92
|
+
self._slow_query_threshold = 0.1 # 100ms
|
|
93
|
+
|
|
94
|
+
# Initialize main connection
|
|
95
|
+
self._initialize_main_connection()
|
|
96
|
+
|
|
97
|
+
def _initialize_main_connection(self) -> None:
|
|
98
|
+
"""Initialize the main database connection with proper configuration."""
|
|
99
|
+
try:
|
|
100
|
+
self._main_connection = sqlite3.connect(
|
|
101
|
+
self.db_path,
|
|
102
|
+
**self._connection_config
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
# Configure WAL mode for better concurrency
|
|
106
|
+
self._configure_wal_mode(self._main_connection)
|
|
107
|
+
|
|
108
|
+
# Configure performance settings
|
|
109
|
+
self._configure_performance_settings(self._main_connection)
|
|
110
|
+
|
|
111
|
+
# Start automatic backup if enabled and not in-memory
|
|
112
|
+
if self.auto_backup_enabled and self.db_path != ":memory:":
|
|
113
|
+
self._start_backup_thread()
|
|
114
|
+
|
|
115
|
+
self.logger.info(f"Database connection initialized: {self.db_path}")
|
|
116
|
+
|
|
117
|
+
except Exception as e:
|
|
118
|
+
self._log_connection_error(f"Failed to initialize main connection: {e}")
|
|
119
|
+
raise
|
|
120
|
+
|
|
121
|
+
def _configure_wal_mode(self, connection: sqlite3.Connection) -> None:
|
|
122
|
+
"""
|
|
123
|
+
Configure WAL (Write-Ahead Logging) mode for better concurrency.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
connection: SQLite connection to configure
|
|
127
|
+
"""
|
|
128
|
+
try:
|
|
129
|
+
# Enable WAL mode for better concurrent access
|
|
130
|
+
connection.execute("PRAGMA journal_mode=WAL")
|
|
131
|
+
|
|
132
|
+
# Configure WAL settings for performance
|
|
133
|
+
connection.execute("PRAGMA wal_autocheckpoint=1000") # Checkpoint every 1000 pages
|
|
134
|
+
connection.execute("PRAGMA wal_checkpoint(TRUNCATE)") # Initial checkpoint
|
|
135
|
+
|
|
136
|
+
self.logger.debug("WAL mode configured successfully")
|
|
137
|
+
|
|
138
|
+
except Exception as e:
|
|
139
|
+
self.logger.warning(f"Failed to configure WAL mode: {e}")
|
|
140
|
+
# Continue without WAL mode - not critical for in-memory databases
|
|
141
|
+
|
|
142
|
+
def _configure_performance_settings(self, connection: sqlite3.Connection) -> None:
|
|
143
|
+
"""
|
|
144
|
+
Configure SQLite performance settings for optimal operation.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
connection: SQLite connection to configure
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
# Performance optimizations
|
|
151
|
+
connection.execute("PRAGMA synchronous=NORMAL") # Balance safety and speed
|
|
152
|
+
connection.execute("PRAGMA cache_size=10000") # 10MB cache
|
|
153
|
+
connection.execute("PRAGMA temp_store=MEMORY") # Use memory for temp tables
|
|
154
|
+
connection.execute("PRAGMA mmap_size=268435456") # 256MB memory mapping
|
|
155
|
+
|
|
156
|
+
# Enable foreign key constraints
|
|
157
|
+
connection.execute("PRAGMA foreign_keys=ON")
|
|
158
|
+
|
|
159
|
+
# Optimize for frequent reads with some writes
|
|
160
|
+
connection.execute("PRAGMA optimize")
|
|
161
|
+
|
|
162
|
+
self.logger.debug("Performance settings configured")
|
|
163
|
+
|
|
164
|
+
except Exception as e:
|
|
165
|
+
self.logger.warning(f"Failed to configure performance settings: {e}")
|
|
166
|
+
|
|
167
|
+
def get_connection(self) -> sqlite3.Connection:
|
|
168
|
+
"""
|
|
169
|
+
Get a database connection for the current thread.
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
SQLite connection object
|
|
173
|
+
|
|
174
|
+
Raises:
|
|
175
|
+
sqlite3.Error: If connection cannot be established
|
|
176
|
+
"""
|
|
177
|
+
thread_id = threading.get_ident()
|
|
178
|
+
|
|
179
|
+
with self._lock:
|
|
180
|
+
# Return existing connection for this thread
|
|
181
|
+
if thread_id in self._connections:
|
|
182
|
+
connection = self._connections[thread_id]
|
|
183
|
+
try:
|
|
184
|
+
# Test connection is still valid
|
|
185
|
+
start_time = time.time()
|
|
186
|
+
connection.execute("SELECT 1")
|
|
187
|
+
|
|
188
|
+
# Record the test query
|
|
189
|
+
if self.enable_performance_monitoring:
|
|
190
|
+
execution_time = time.time() - start_time
|
|
191
|
+
self._record_query_performance("SELECT 1", execution_time)
|
|
192
|
+
|
|
193
|
+
return connection
|
|
194
|
+
except sqlite3.Error:
|
|
195
|
+
# Connection is stale, remove it
|
|
196
|
+
del self._connections[thread_id]
|
|
197
|
+
|
|
198
|
+
# Create new connection for this thread
|
|
199
|
+
try:
|
|
200
|
+
connection = sqlite3.connect(
|
|
201
|
+
self.db_path,
|
|
202
|
+
**self._connection_config
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# Configure the new connection
|
|
206
|
+
self._configure_wal_mode(connection)
|
|
207
|
+
self._configure_performance_settings(connection)
|
|
208
|
+
|
|
209
|
+
# Store for reuse
|
|
210
|
+
self._connections[thread_id] = connection
|
|
211
|
+
|
|
212
|
+
self.logger.debug(f"Created new connection for thread {thread_id}")
|
|
213
|
+
return connection
|
|
214
|
+
|
|
215
|
+
except Exception as e:
|
|
216
|
+
error_msg = f"Failed to create connection for thread {thread_id}: {e}"
|
|
217
|
+
self._log_connection_error(error_msg)
|
|
218
|
+
raise sqlite3.Error(error_msg)
|
|
219
|
+
|
|
220
|
+
@contextmanager
|
|
221
|
+
def transaction(self):
|
|
222
|
+
"""
|
|
223
|
+
Context manager for database transactions with automatic rollback on error.
|
|
224
|
+
|
|
225
|
+
Usage:
|
|
226
|
+
with connection_manager.transaction() as conn:
|
|
227
|
+
conn.execute("INSERT INTO table VALUES (?)", (value,))
|
|
228
|
+
conn.execute("UPDATE table SET col = ?", (new_value,))
|
|
229
|
+
"""
|
|
230
|
+
connection = self.get_connection()
|
|
231
|
+
|
|
232
|
+
try:
|
|
233
|
+
connection.execute("BEGIN TRANSACTION")
|
|
234
|
+
yield connection
|
|
235
|
+
connection.execute("COMMIT")
|
|
236
|
+
self._changes_since_backup += 1
|
|
237
|
+
|
|
238
|
+
except Exception as e:
|
|
239
|
+
connection.execute("ROLLBACK")
|
|
240
|
+
self.logger.error(f"Transaction rolled back due to error: {e}")
|
|
241
|
+
raise
|
|
242
|
+
|
|
243
|
+
def execute_transaction(self, operations: List[Callable[[sqlite3.Connection], Any]]) -> List[Any]:
|
|
244
|
+
"""
|
|
245
|
+
Execute multiple operations in a single transaction.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
operations: List of functions that take a connection and return a result
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
List of results from each operation
|
|
252
|
+
|
|
253
|
+
Raises:
|
|
254
|
+
sqlite3.Error: If any operation fails (all operations are rolled back)
|
|
255
|
+
"""
|
|
256
|
+
results = []
|
|
257
|
+
|
|
258
|
+
with self.transaction() as conn:
|
|
259
|
+
for operation in operations:
|
|
260
|
+
try:
|
|
261
|
+
result = operation(conn)
|
|
262
|
+
results.append(result)
|
|
263
|
+
except Exception as e:
|
|
264
|
+
self.logger.error(f"Operation failed in transaction: {e}")
|
|
265
|
+
raise
|
|
266
|
+
|
|
267
|
+
return results
|
|
268
|
+
|
|
269
|
+
def backup_to_disk(self, filepath: Optional[str] = None) -> bool:
|
|
270
|
+
"""
|
|
271
|
+
Backup the current database to a disk file.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
filepath: Target backup file path (uses default if None)
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
True if backup successful, False otherwise
|
|
278
|
+
"""
|
|
279
|
+
if self.db_path == ":memory:" and not self._main_connection:
|
|
280
|
+
self.logger.warning("Cannot backup: no in-memory database connection")
|
|
281
|
+
return False
|
|
282
|
+
|
|
283
|
+
backup_path = filepath or self.backup_path
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
# Ensure backup directory exists
|
|
287
|
+
backup_dir = os.path.dirname(backup_path)
|
|
288
|
+
if backup_dir:
|
|
289
|
+
os.makedirs(backup_dir, exist_ok=True)
|
|
290
|
+
|
|
291
|
+
# Create backup connection
|
|
292
|
+
backup_conn = sqlite3.connect(backup_path)
|
|
293
|
+
|
|
294
|
+
try:
|
|
295
|
+
# Perform backup
|
|
296
|
+
source_conn = self._main_connection or self.get_connection()
|
|
297
|
+
source_conn.backup(backup_conn)
|
|
298
|
+
|
|
299
|
+
self.last_backup = datetime.now()
|
|
300
|
+
self._changes_since_backup = 0
|
|
301
|
+
|
|
302
|
+
self.logger.info(f"Database backed up to: {backup_path}")
|
|
303
|
+
return True
|
|
304
|
+
|
|
305
|
+
finally:
|
|
306
|
+
backup_conn.close()
|
|
307
|
+
|
|
308
|
+
except Exception as e:
|
|
309
|
+
self._log_connection_error(f"Backup failed: {e}")
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
def restore_from_disk(self, filepath: Optional[str] = None) -> bool:
|
|
313
|
+
"""
|
|
314
|
+
Restore database from a disk backup file.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
filepath: Source backup file path (uses default if None)
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
True if restore successful, False otherwise
|
|
321
|
+
"""
|
|
322
|
+
restore_path = filepath or self.backup_path
|
|
323
|
+
|
|
324
|
+
if not os.path.exists(restore_path):
|
|
325
|
+
self.logger.error(f"Backup file not found: {restore_path}")
|
|
326
|
+
return False
|
|
327
|
+
|
|
328
|
+
try:
|
|
329
|
+
# Close existing connections
|
|
330
|
+
self.close_all_connections()
|
|
331
|
+
|
|
332
|
+
# Copy backup to main database location if not in-memory
|
|
333
|
+
if self.db_path != ":memory:":
|
|
334
|
+
shutil.copy2(restore_path, self.db_path)
|
|
335
|
+
else:
|
|
336
|
+
# For in-memory, we need to restore by copying data
|
|
337
|
+
restore_conn = sqlite3.connect(restore_path)
|
|
338
|
+
try:
|
|
339
|
+
self._main_connection = sqlite3.connect(":memory:")
|
|
340
|
+
restore_conn.backup(self._main_connection)
|
|
341
|
+
self._configure_wal_mode(self._main_connection)
|
|
342
|
+
self._configure_performance_settings(self._main_connection)
|
|
343
|
+
finally:
|
|
344
|
+
restore_conn.close()
|
|
345
|
+
|
|
346
|
+
# Reinitialize if needed
|
|
347
|
+
if self.db_path != ":memory:":
|
|
348
|
+
self._initialize_main_connection()
|
|
349
|
+
|
|
350
|
+
self.logger.info(f"Database restored from: {restore_path}")
|
|
351
|
+
return True
|
|
352
|
+
|
|
353
|
+
except Exception as e:
|
|
354
|
+
self._log_connection_error(f"Restore failed: {e}")
|
|
355
|
+
return False
|
|
356
|
+
|
|
357
|
+
def _start_backup_thread(self) -> None:
|
|
358
|
+
"""Start the automatic backup thread."""
|
|
359
|
+
if self._backup_thread and self._backup_thread.is_alive():
|
|
360
|
+
return
|
|
361
|
+
|
|
362
|
+
self._backup_stop_event.clear()
|
|
363
|
+
self._backup_thread = threading.Thread(
|
|
364
|
+
target=self._backup_worker,
|
|
365
|
+
daemon=True,
|
|
366
|
+
name="DatabaseBackupWorker"
|
|
367
|
+
)
|
|
368
|
+
self._backup_thread.start()
|
|
369
|
+
self.logger.debug("Automatic backup thread started")
|
|
370
|
+
|
|
371
|
+
def _backup_worker(self) -> None:
|
|
372
|
+
"""Worker thread for automatic backups."""
|
|
373
|
+
while not self._backup_stop_event.is_set():
|
|
374
|
+
try:
|
|
375
|
+
# Check if backup is needed
|
|
376
|
+
should_backup = False
|
|
377
|
+
|
|
378
|
+
# Time-based backup
|
|
379
|
+
if self.last_backup is None:
|
|
380
|
+
should_backup = True
|
|
381
|
+
elif datetime.now() - self.last_backup > timedelta(seconds=self.backup_interval):
|
|
382
|
+
should_backup = True
|
|
383
|
+
|
|
384
|
+
# Change-based backup
|
|
385
|
+
if self._changes_since_backup >= self._max_changes_before_backup:
|
|
386
|
+
should_backup = True
|
|
387
|
+
|
|
388
|
+
if should_backup:
|
|
389
|
+
self.backup_to_disk()
|
|
390
|
+
|
|
391
|
+
# Wait before next check (but allow early termination)
|
|
392
|
+
self._backup_stop_event.wait(min(60, self.backup_interval // 5))
|
|
393
|
+
|
|
394
|
+
except Exception as e:
|
|
395
|
+
self.logger.error(f"Backup worker error: {e}")
|
|
396
|
+
# Continue running despite errors
|
|
397
|
+
self._backup_stop_event.wait(60)
|
|
398
|
+
|
|
399
|
+
def set_backup_interval(self, seconds: int) -> None:
|
|
400
|
+
"""
|
|
401
|
+
Set the automatic backup interval.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
seconds: Backup interval in seconds (0 to disable)
|
|
405
|
+
"""
|
|
406
|
+
self.backup_interval = max(0, seconds)
|
|
407
|
+
self.auto_backup_enabled = seconds > 0
|
|
408
|
+
|
|
409
|
+
if self.auto_backup_enabled and self.db_path != ":memory:":
|
|
410
|
+
self._start_backup_thread()
|
|
411
|
+
elif not self.auto_backup_enabled and self._backup_thread:
|
|
412
|
+
self._backup_stop_event.set()
|
|
413
|
+
|
|
414
|
+
def get_connection_info(self) -> Dict[str, Any]:
|
|
415
|
+
"""
|
|
416
|
+
Get information about current database connections.
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
Dictionary with connection statistics and status
|
|
420
|
+
"""
|
|
421
|
+
with self._lock:
|
|
422
|
+
info = {
|
|
423
|
+
'db_path': self.db_path,
|
|
424
|
+
'backup_path': self.backup_path,
|
|
425
|
+
'active_connections': len(self._connections),
|
|
426
|
+
'backup_interval': self.backup_interval,
|
|
427
|
+
'last_backup': self.last_backup.isoformat() if self.last_backup else None,
|
|
428
|
+
'changes_since_backup': self._changes_since_backup,
|
|
429
|
+
'auto_backup_enabled': self.auto_backup_enabled,
|
|
430
|
+
'recent_errors': self._connection_errors[-5:] if self._connection_errors else [],
|
|
431
|
+
'performance_monitoring_enabled': self.enable_performance_monitoring
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
# Add performance statistics if monitoring is enabled
|
|
435
|
+
if self._performance_monitor:
|
|
436
|
+
try:
|
|
437
|
+
perf_stats = self._performance_monitor.get_performance_stats()
|
|
438
|
+
info.update({
|
|
439
|
+
'query_count': self._query_count,
|
|
440
|
+
'avg_query_time': self._total_query_time / max(self._query_count, 1),
|
|
441
|
+
'slow_queries_count': len(self._slow_queries),
|
|
442
|
+
'cache_hit_rate': perf_stats.cache_hit_rate,
|
|
443
|
+
'memory_usage_mb': perf_stats.memory_usage_mb
|
|
444
|
+
})
|
|
445
|
+
except Exception as e:
|
|
446
|
+
self.logger.warning(f"Failed to get performance stats: {e}")
|
|
447
|
+
|
|
448
|
+
return info
|
|
449
|
+
|
|
450
|
+
@contextmanager
|
|
451
|
+
def monitored_query(self, query: str, params: tuple = ()):
|
|
452
|
+
"""
|
|
453
|
+
Context manager for executing queries with performance monitoring.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
query: SQL query string
|
|
457
|
+
params: Query parameters
|
|
458
|
+
|
|
459
|
+
Yields:
|
|
460
|
+
Database connection
|
|
461
|
+
"""
|
|
462
|
+
start_time = time.time()
|
|
463
|
+
connection = self.get_connection()
|
|
464
|
+
|
|
465
|
+
try:
|
|
466
|
+
# Record setting access if it's a settings query
|
|
467
|
+
if self._performance_monitor:
|
|
468
|
+
self._extract_and_record_setting_access(query, params)
|
|
469
|
+
|
|
470
|
+
yield connection
|
|
471
|
+
|
|
472
|
+
finally:
|
|
473
|
+
# Record query performance
|
|
474
|
+
execution_time = time.time() - start_time
|
|
475
|
+
self._record_query_performance(query, execution_time)
|
|
476
|
+
|
|
477
|
+
def _extract_and_record_setting_access(self, query: str, params: tuple) -> None:
|
|
478
|
+
"""Extract setting key from query and record access."""
|
|
479
|
+
if not self._performance_monitor:
|
|
480
|
+
return
|
|
481
|
+
|
|
482
|
+
try:
|
|
483
|
+
query_lower = query.lower()
|
|
484
|
+
|
|
485
|
+
# Extract setting key from different query types
|
|
486
|
+
if 'core_settings' in query_lower and 'where key' in query_lower:
|
|
487
|
+
if params and len(params) > 0:
|
|
488
|
+
setting_key = str(params[0])
|
|
489
|
+
self._performance_monitor.record_setting_access(f"core:{setting_key}")
|
|
490
|
+
|
|
491
|
+
elif 'tool_settings' in query_lower and 'where tool_name' in query_lower:
|
|
492
|
+
if params and len(params) >= 2:
|
|
493
|
+
tool_name = str(params[0])
|
|
494
|
+
setting_path = str(params[1]) if len(params) > 1 else "all"
|
|
495
|
+
self._performance_monitor.record_setting_access(f"tool:{tool_name}.{setting_path}")
|
|
496
|
+
|
|
497
|
+
elif 'tab_content' in query_lower:
|
|
498
|
+
if params and len(params) > 0:
|
|
499
|
+
tab_type = str(params[0])
|
|
500
|
+
self._performance_monitor.record_setting_access(f"tab:{tab_type}")
|
|
501
|
+
|
|
502
|
+
except Exception as e:
|
|
503
|
+
self.logger.debug(f"Failed to extract setting access: {e}")
|
|
504
|
+
|
|
505
|
+
def _record_query_performance(self, query: str, execution_time: float) -> None:
|
|
506
|
+
"""Record query performance metrics."""
|
|
507
|
+
with self._lock:
|
|
508
|
+
self._query_count += 1
|
|
509
|
+
self._total_query_time += execution_time
|
|
510
|
+
|
|
511
|
+
# Track slow queries
|
|
512
|
+
if execution_time > self._slow_query_threshold:
|
|
513
|
+
slow_query_info = {
|
|
514
|
+
'query': query[:200], # Truncate long queries
|
|
515
|
+
'execution_time': execution_time,
|
|
516
|
+
'timestamp': datetime.now().isoformat()
|
|
517
|
+
}
|
|
518
|
+
self._slow_queries.append(slow_query_info)
|
|
519
|
+
|
|
520
|
+
# Keep only recent slow queries
|
|
521
|
+
if len(self._slow_queries) > 50:
|
|
522
|
+
self._slow_queries = self._slow_queries[-50:]
|
|
523
|
+
|
|
524
|
+
self.logger.warning(f"Slow query detected: {execution_time:.3f}s - {query[:100]}...")
|
|
525
|
+
|
|
526
|
+
def get_performance_stats(self) -> Dict[str, Any]:
|
|
527
|
+
"""
|
|
528
|
+
Get detailed performance statistics.
|
|
529
|
+
|
|
530
|
+
Returns:
|
|
531
|
+
Dictionary with performance metrics
|
|
532
|
+
"""
|
|
533
|
+
with self._lock:
|
|
534
|
+
stats = {
|
|
535
|
+
'total_queries': self._query_count,
|
|
536
|
+
'total_query_time': self._total_query_time,
|
|
537
|
+
'avg_query_time': self._total_query_time / max(self._query_count, 1),
|
|
538
|
+
'slow_queries_count': len(self._slow_queries),
|
|
539
|
+
'slow_query_threshold': self._slow_query_threshold,
|
|
540
|
+
'recent_slow_queries': self._slow_queries[-10:] if self._slow_queries else []
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
# Add performance monitor stats if available
|
|
544
|
+
if self._performance_monitor:
|
|
545
|
+
try:
|
|
546
|
+
monitor_stats = self._performance_monitor.get_performance_stats()
|
|
547
|
+
stats.update({
|
|
548
|
+
'cache_hit_rate': monitor_stats.cache_hit_rate,
|
|
549
|
+
'queries_per_second': monitor_stats.queries_per_second,
|
|
550
|
+
'memory_usage_mb': monitor_stats.memory_usage_mb,
|
|
551
|
+
'hot_settings': self._performance_monitor.get_hot_settings(10),
|
|
552
|
+
'cache_stats': self._performance_monitor.get_cache_stats()
|
|
553
|
+
})
|
|
554
|
+
except Exception as e:
|
|
555
|
+
self.logger.warning(f"Failed to get monitor stats: {e}")
|
|
556
|
+
|
|
557
|
+
return stats
|
|
558
|
+
|
|
559
|
+
def optimize_database(self) -> List[str]:
|
|
560
|
+
"""
|
|
561
|
+
Perform database optimization based on usage patterns.
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
List of optimization actions performed
|
|
565
|
+
"""
|
|
566
|
+
actions = []
|
|
567
|
+
|
|
568
|
+
try:
|
|
569
|
+
conn = self.get_connection()
|
|
570
|
+
|
|
571
|
+
# Analyze and optimize
|
|
572
|
+
conn.execute("ANALYZE")
|
|
573
|
+
actions.append("Analyzed database statistics")
|
|
574
|
+
|
|
575
|
+
# Optimize query planner
|
|
576
|
+
conn.execute("PRAGMA optimize")
|
|
577
|
+
actions.append("Optimized query planner")
|
|
578
|
+
|
|
579
|
+
# Vacuum if needed (for non-memory databases)
|
|
580
|
+
if self.db_path != ":memory:":
|
|
581
|
+
# Check fragmentation
|
|
582
|
+
cursor = conn.execute("PRAGMA freelist_count")
|
|
583
|
+
free_pages = cursor.fetchone()[0]
|
|
584
|
+
|
|
585
|
+
cursor = conn.execute("PRAGMA page_count")
|
|
586
|
+
total_pages = cursor.fetchone()[0]
|
|
587
|
+
|
|
588
|
+
if total_pages > 0 and (free_pages / total_pages) > 0.1: # 10% fragmentation
|
|
589
|
+
conn.execute("VACUUM")
|
|
590
|
+
actions.append("Vacuumed database to reduce fragmentation")
|
|
591
|
+
|
|
592
|
+
# Suggest indexes based on performance monitor data
|
|
593
|
+
if self._performance_monitor:
|
|
594
|
+
index_suggestions = self._performance_monitor.optimize_indexes(self)
|
|
595
|
+
for index_sql in index_suggestions:
|
|
596
|
+
try:
|
|
597
|
+
conn.execute(index_sql)
|
|
598
|
+
actions.append(f"Created index: {index_sql}")
|
|
599
|
+
except sqlite3.Error as e:
|
|
600
|
+
self.logger.warning(f"Failed to create index: {e}")
|
|
601
|
+
|
|
602
|
+
self.logger.info(f"Database optimization completed: {len(actions)} actions")
|
|
603
|
+
|
|
604
|
+
except Exception as e:
|
|
605
|
+
self.logger.error(f"Database optimization failed: {e}")
|
|
606
|
+
actions.append(f"Optimization failed: {e}")
|
|
607
|
+
|
|
608
|
+
return actions
|
|
609
|
+
|
|
610
|
+
def clear_performance_data(self) -> None:
|
|
611
|
+
"""Clear performance monitoring data."""
|
|
612
|
+
with self._lock:
|
|
613
|
+
self._query_count = 0
|
|
614
|
+
self._total_query_time = 0.0
|
|
615
|
+
self._slow_queries.clear()
|
|
616
|
+
|
|
617
|
+
if self._performance_monitor:
|
|
618
|
+
self._performance_monitor.reset_metrics()
|
|
619
|
+
|
|
620
|
+
def set_slow_query_threshold(self, threshold_seconds: float) -> None:
|
|
621
|
+
"""
|
|
622
|
+
Set the threshold for slow query detection.
|
|
623
|
+
|
|
624
|
+
Args:
|
|
625
|
+
threshold_seconds: Threshold in seconds
|
|
626
|
+
"""
|
|
627
|
+
self._slow_query_threshold = max(0.001, threshold_seconds) # Minimum 1ms
|
|
628
|
+
|
|
629
|
+
def _log_connection_error(self, error_msg: str) -> None:
|
|
630
|
+
"""Log connection error with timestamp."""
|
|
631
|
+
error_entry = {
|
|
632
|
+
'timestamp': datetime.now().isoformat(),
|
|
633
|
+
'error': error_msg
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
self._connection_errors.append(error_entry)
|
|
637
|
+
|
|
638
|
+
# Keep only recent errors
|
|
639
|
+
if len(self._connection_errors) > self._max_error_history:
|
|
640
|
+
self._connection_errors = self._connection_errors[-self._max_error_history:]
|
|
641
|
+
|
|
642
|
+
self.logger.error(error_msg)
|
|
643
|
+
|
|
644
|
+
def close_connection(self, thread_id: Optional[int] = None) -> None:
|
|
645
|
+
"""
|
|
646
|
+
Close database connection for specific thread or current thread.
|
|
647
|
+
|
|
648
|
+
Args:
|
|
649
|
+
thread_id: Thread ID to close connection for (None for current thread)
|
|
650
|
+
"""
|
|
651
|
+
target_thread = thread_id or threading.get_ident()
|
|
652
|
+
|
|
653
|
+
with self._lock:
|
|
654
|
+
if target_thread in self._connections:
|
|
655
|
+
try:
|
|
656
|
+
self._connections[target_thread].close()
|
|
657
|
+
del self._connections[target_thread]
|
|
658
|
+
self.logger.debug(f"Closed connection for thread {target_thread}")
|
|
659
|
+
except Exception as e:
|
|
660
|
+
self.logger.warning(f"Error closing connection for thread {target_thread}: {e}")
|
|
661
|
+
|
|
662
|
+
def close_all_connections(self) -> None:
|
|
663
|
+
"""Close all database connections and stop background threads."""
|
|
664
|
+
# Stop backup thread
|
|
665
|
+
if self._backup_thread and self._backup_thread.is_alive():
|
|
666
|
+
self._backup_stop_event.set()
|
|
667
|
+
self._backup_thread.join(timeout=5)
|
|
668
|
+
|
|
669
|
+
with self._lock:
|
|
670
|
+
# Close all thread connections
|
|
671
|
+
for thread_id in list(self._connections.keys()):
|
|
672
|
+
self.close_connection(thread_id)
|
|
673
|
+
|
|
674
|
+
# Close main connection
|
|
675
|
+
if self._main_connection:
|
|
676
|
+
try:
|
|
677
|
+
self._main_connection.close()
|
|
678
|
+
self._main_connection = None
|
|
679
|
+
self.logger.info("All database connections closed")
|
|
680
|
+
except Exception as e:
|
|
681
|
+
self.logger.warning(f"Error closing main connection: {e}")
|
|
682
|
+
|
|
683
|
+
def __enter__(self):
|
|
684
|
+
"""Context manager entry."""
|
|
685
|
+
return self
|
|
686
|
+
|
|
687
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
688
|
+
"""Context manager exit - close all connections."""
|
|
689
|
+
self.close_all_connections()
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
# Connection pool for shared access across modules
|
|
693
|
+
class ConnectionPool:
|
|
694
|
+
"""
|
|
695
|
+
Singleton connection pool for shared database access across the application.
|
|
696
|
+
"""
|
|
697
|
+
|
|
698
|
+
_instance = None
|
|
699
|
+
_lock = threading.Lock()
|
|
700
|
+
|
|
701
|
+
def __new__(cls):
|
|
702
|
+
if cls._instance is None:
|
|
703
|
+
with cls._lock:
|
|
704
|
+
if cls._instance is None:
|
|
705
|
+
cls._instance = super().__new__(cls)
|
|
706
|
+
cls._instance._initialized = False
|
|
707
|
+
return cls._instance
|
|
708
|
+
|
|
709
|
+
def __init__(self):
|
|
710
|
+
if not getattr(self, '_initialized', False):
|
|
711
|
+
self._manager = None
|
|
712
|
+
self._initialized = True
|
|
713
|
+
|
|
714
|
+
def initialize(self, db_path: str = ":memory:", backup_path: Optional[str] = None) -> None:
|
|
715
|
+
"""
|
|
716
|
+
Initialize the connection pool with database settings.
|
|
717
|
+
|
|
718
|
+
Args:
|
|
719
|
+
db_path: Path to SQLite database file
|
|
720
|
+
backup_path: Path for automatic backups
|
|
721
|
+
"""
|
|
722
|
+
if self._manager:
|
|
723
|
+
self._manager.close_all_connections()
|
|
724
|
+
|
|
725
|
+
self._manager = DatabaseConnectionManager(db_path, backup_path)
|
|
726
|
+
|
|
727
|
+
def get_manager(self) -> DatabaseConnectionManager:
|
|
728
|
+
"""
|
|
729
|
+
Get the connection manager instance.
|
|
730
|
+
|
|
731
|
+
Returns:
|
|
732
|
+
DatabaseConnectionManager instance
|
|
733
|
+
|
|
734
|
+
Raises:
|
|
735
|
+
RuntimeError: If pool not initialized
|
|
736
|
+
"""
|
|
737
|
+
if not self._manager:
|
|
738
|
+
raise RuntimeError("Connection pool not initialized. Call initialize() first.")
|
|
739
|
+
return self._manager
|
|
740
|
+
|
|
741
|
+
def close(self) -> None:
|
|
742
|
+
"""Close the connection pool."""
|
|
743
|
+
if self._manager:
|
|
744
|
+
self._manager.close_all_connections()
|
|
745
|
+
self._manager = None
|