pomera-ai-commander 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (192) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +680 -0
  3. package/bin/pomera-ai-commander.js +62 -0
  4. package/core/__init__.py +66 -0
  5. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  6. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  7. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  8. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  9. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  10. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  11. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  12. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  13. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  14. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  15. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  16. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  17. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  18. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  19. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  20. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  21. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  22. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  23. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  24. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  25. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  26. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  27. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  28. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  29. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  30. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  31. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  32. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  33. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  34. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  35. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  36. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  37. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  38. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  39. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  40. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  41. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  42. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  43. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  44. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  45. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  46. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  47. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  48. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  49. package/core/app_context.py +482 -0
  50. package/core/async_text_processor.py +422 -0
  51. package/core/backup_manager.py +656 -0
  52. package/core/backup_recovery_manager.py +1034 -0
  53. package/core/content_hash_cache.py +509 -0
  54. package/core/context_menu.py +313 -0
  55. package/core/data_validator.py +1067 -0
  56. package/core/database_connection_manager.py +745 -0
  57. package/core/database_curl_settings_manager.py +609 -0
  58. package/core/database_promera_ai_settings_manager.py +447 -0
  59. package/core/database_schema.py +412 -0
  60. package/core/database_schema_manager.py +396 -0
  61. package/core/database_settings_manager.py +1508 -0
  62. package/core/database_settings_manager_interface.py +457 -0
  63. package/core/dialog_manager.py +735 -0
  64. package/core/efficient_line_numbers.py +511 -0
  65. package/core/error_handler.py +747 -0
  66. package/core/error_service.py +431 -0
  67. package/core/event_consolidator.py +512 -0
  68. package/core/mcp/__init__.py +43 -0
  69. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  70. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  71. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  72. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  73. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  74. package/core/mcp/protocol.py +288 -0
  75. package/core/mcp/schema.py +251 -0
  76. package/core/mcp/server_stdio.py +299 -0
  77. package/core/mcp/tool_registry.py +2345 -0
  78. package/core/memory_efficient_text_widget.py +712 -0
  79. package/core/migration_manager.py +915 -0
  80. package/core/migration_test_suite.py +1086 -0
  81. package/core/migration_validator.py +1144 -0
  82. package/core/optimized_find_replace.py +715 -0
  83. package/core/optimized_pattern_engine.py +424 -0
  84. package/core/optimized_search_highlighter.py +553 -0
  85. package/core/performance_monitor.py +675 -0
  86. package/core/persistence_manager.py +713 -0
  87. package/core/progressive_stats_calculator.py +632 -0
  88. package/core/regex_pattern_cache.py +530 -0
  89. package/core/regex_pattern_library.py +351 -0
  90. package/core/search_operation_manager.py +435 -0
  91. package/core/settings_defaults_registry.py +1087 -0
  92. package/core/settings_integrity_validator.py +1112 -0
  93. package/core/settings_serializer.py +558 -0
  94. package/core/settings_validator.py +1824 -0
  95. package/core/smart_stats_calculator.py +710 -0
  96. package/core/statistics_update_manager.py +619 -0
  97. package/core/stats_config_manager.py +858 -0
  98. package/core/streaming_text_handler.py +723 -0
  99. package/core/task_scheduler.py +596 -0
  100. package/core/update_pattern_library.py +169 -0
  101. package/core/visibility_monitor.py +596 -0
  102. package/core/widget_cache.py +498 -0
  103. package/mcp.json +61 -0
  104. package/package.json +57 -0
  105. package/pomera.py +7483 -0
  106. package/pomera_mcp_server.py +144 -0
  107. package/tools/__init__.py +5 -0
  108. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  109. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  110. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  111. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  112. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  113. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  114. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  115. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  116. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  117. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  118. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  119. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  120. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  121. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  122. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  123. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  124. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  125. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  126. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  127. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  128. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  129. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  130. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  131. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  132. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  133. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  134. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  135. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  136. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  137. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  138. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  139. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  140. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  141. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  142. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  143. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  144. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  145. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  146. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  147. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  148. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
  151. package/tools/ai_tools.py +2892 -0
  152. package/tools/ascii_art_generator.py +353 -0
  153. package/tools/base64_tools.py +184 -0
  154. package/tools/base_tool.py +511 -0
  155. package/tools/case_tool.py +309 -0
  156. package/tools/column_tools.py +396 -0
  157. package/tools/cron_tool.py +885 -0
  158. package/tools/curl_history.py +601 -0
  159. package/tools/curl_processor.py +1208 -0
  160. package/tools/curl_settings.py +503 -0
  161. package/tools/curl_tool.py +5467 -0
  162. package/tools/diff_viewer.py +1072 -0
  163. package/tools/email_extraction_tool.py +249 -0
  164. package/tools/email_header_analyzer.py +426 -0
  165. package/tools/extraction_tools.py +250 -0
  166. package/tools/find_replace.py +1751 -0
  167. package/tools/folder_file_reporter.py +1463 -0
  168. package/tools/folder_file_reporter_adapter.py +480 -0
  169. package/tools/generator_tools.py +1217 -0
  170. package/tools/hash_generator.py +256 -0
  171. package/tools/html_tool.py +657 -0
  172. package/tools/huggingface_helper.py +449 -0
  173. package/tools/jsonxml_tool.py +730 -0
  174. package/tools/line_tools.py +419 -0
  175. package/tools/list_comparator.py +720 -0
  176. package/tools/markdown_tools.py +562 -0
  177. package/tools/mcp_widget.py +1417 -0
  178. package/tools/notes_widget.py +973 -0
  179. package/tools/number_base_converter.py +373 -0
  180. package/tools/regex_extractor.py +572 -0
  181. package/tools/slug_generator.py +311 -0
  182. package/tools/sorter_tools.py +459 -0
  183. package/tools/string_escape_tool.py +393 -0
  184. package/tools/text_statistics_tool.py +366 -0
  185. package/tools/text_wrapper.py +431 -0
  186. package/tools/timestamp_converter.py +422 -0
  187. package/tools/tool_loader.py +710 -0
  188. package/tools/translator_tools.py +523 -0
  189. package/tools/url_link_extractor.py +262 -0
  190. package/tools/url_parser.py +205 -0
  191. package/tools/whitespace_tools.py +356 -0
  192. package/tools/word_frequency_counter.py +147 -0
@@ -0,0 +1,675 @@
1
+ """
2
+ Performance Monitor for Database Settings System
3
+
4
+ This module provides comprehensive performance monitoring and optimization
5
+ for the database settings system, including query performance tracking,
6
+ caching layer, memory usage monitoring, and automatic optimization.
7
+ """
8
+
9
+ import time
10
+ import threading
11
+ import sqlite3
12
+ import logging
13
+ import statistics
14
+ from typing import Dict, List, Any, Optional, Callable, Tuple
15
+ from datetime import datetime, timedelta
16
+ from collections import defaultdict, deque
17
+ from dataclasses import dataclass, field
18
+ from contextlib import contextmanager
19
+ import psutil
20
+ import os
21
+
22
+
23
+ @dataclass
24
+ class QueryMetrics:
25
+ """Metrics for a database query."""
26
+ query_hash: str
27
+ query_text: str
28
+ execution_time: float
29
+ timestamp: datetime
30
+ thread_id: int
31
+ result_count: int = 0
32
+ cache_hit: bool = False
33
+
34
+
35
+ @dataclass
36
+ class PerformanceStats:
37
+ """Aggregated performance statistics."""
38
+ total_queries: int = 0
39
+ avg_execution_time: float = 0.0
40
+ max_execution_time: float = 0.0
41
+ min_execution_time: float = float('inf')
42
+ cache_hit_rate: float = 0.0
43
+ queries_per_second: float = 0.0
44
+ memory_usage_mb: float = 0.0
45
+ active_connections: int = 0
46
+ slow_queries: List[QueryMetrics] = field(default_factory=list)
47
+
48
+
49
+ class QueryCache:
50
+ """
51
+ LRU cache for database query results with TTL support.
52
+ """
53
+
54
+ def __init__(self, max_size: int = 1000, ttl_seconds: int = 300):
55
+ """
56
+ Initialize query cache.
57
+
58
+ Args:
59
+ max_size: Maximum number of cached queries
60
+ ttl_seconds: Time-to-live for cached results in seconds
61
+ """
62
+ self.max_size = max_size
63
+ self.ttl_seconds = ttl_seconds
64
+ self._cache = {}
65
+ self._access_order = deque()
66
+ self._lock = threading.RLock()
67
+
68
+ # Cache statistics
69
+ self.hits = 0
70
+ self.misses = 0
71
+ self.evictions = 0
72
+
73
+ def get(self, query_hash: str) -> Optional[Any]:
74
+ """
75
+ Get cached query result.
76
+
77
+ Args:
78
+ query_hash: Hash of the query
79
+
80
+ Returns:
81
+ Cached result or None if not found/expired
82
+ """
83
+ with self._lock:
84
+ if query_hash not in self._cache:
85
+ self.misses += 1
86
+ return None
87
+
88
+ entry = self._cache[query_hash]
89
+
90
+ # Check TTL
91
+ if datetime.now() - entry['timestamp'] > timedelta(seconds=self.ttl_seconds):
92
+ del self._cache[query_hash]
93
+ self._access_order.remove(query_hash)
94
+ self.misses += 1
95
+ return None
96
+
97
+ # Update access order
98
+ self._access_order.remove(query_hash)
99
+ self._access_order.append(query_hash)
100
+
101
+ self.hits += 1
102
+ return entry['result']
103
+
104
+ def put(self, query_hash: str, result: Any) -> None:
105
+ """
106
+ Cache query result.
107
+
108
+ Args:
109
+ query_hash: Hash of the query
110
+ result: Query result to cache
111
+ """
112
+ with self._lock:
113
+ # Remove if already exists
114
+ if query_hash in self._cache:
115
+ self._access_order.remove(query_hash)
116
+
117
+ # Evict oldest if at capacity
118
+ elif len(self._cache) >= self.max_size:
119
+ oldest = self._access_order.popleft()
120
+ del self._cache[oldest]
121
+ self.evictions += 1
122
+
123
+ # Add new entry
124
+ self._cache[query_hash] = {
125
+ 'result': result,
126
+ 'timestamp': datetime.now()
127
+ }
128
+ self._access_order.append(query_hash)
129
+
130
+ def clear(self) -> None:
131
+ """Clear all cached entries."""
132
+ with self._lock:
133
+ self._cache.clear()
134
+ self._access_order.clear()
135
+
136
+ def get_stats(self) -> Dict[str, Any]:
137
+ """Get cache statistics."""
138
+ with self._lock:
139
+ total_requests = self.hits + self.misses
140
+ hit_rate = (self.hits / total_requests * 100) if total_requests > 0 else 0
141
+
142
+ return {
143
+ 'size': len(self._cache),
144
+ 'max_size': self.max_size,
145
+ 'hits': self.hits,
146
+ 'misses': self.misses,
147
+ 'evictions': self.evictions,
148
+ 'hit_rate_percent': hit_rate,
149
+ 'ttl_seconds': self.ttl_seconds
150
+ }
151
+
152
+
153
+ class ConnectionPool:
154
+ """
155
+ Connection pool with performance monitoring.
156
+ """
157
+
158
+ def __init__(self, db_path: str, max_connections: int = 10):
159
+ """
160
+ Initialize connection pool.
161
+
162
+ Args:
163
+ db_path: Database file path
164
+ max_connections: Maximum number of pooled connections
165
+ """
166
+ self.db_path = db_path
167
+ self.max_connections = max_connections
168
+ self._pool = deque()
169
+ self._active_connections = set()
170
+ self._lock = threading.RLock()
171
+ self._created_count = 0
172
+ self._borrowed_count = 0
173
+ self._returned_count = 0
174
+
175
+ @contextmanager
176
+ def get_connection(self):
177
+ """
178
+ Get a connection from the pool.
179
+
180
+ Yields:
181
+ SQLite connection
182
+ """
183
+ conn = self._borrow_connection()
184
+ try:
185
+ yield conn
186
+ finally:
187
+ self._return_connection(conn)
188
+
189
+ def _borrow_connection(self) -> sqlite3.Connection:
190
+ """Borrow a connection from the pool."""
191
+ with self._lock:
192
+ if self._pool:
193
+ conn = self._pool.popleft()
194
+ else:
195
+ conn = self._create_connection()
196
+
197
+ self._active_connections.add(conn)
198
+ self._borrowed_count += 1
199
+ return conn
200
+
201
+ def _return_connection(self, conn: sqlite3.Connection) -> None:
202
+ """Return a connection to the pool."""
203
+ with self._lock:
204
+ if conn in self._active_connections:
205
+ self._active_connections.remove(conn)
206
+
207
+ if len(self._pool) < self.max_connections:
208
+ self._pool.append(conn)
209
+ self._returned_count += 1
210
+ else:
211
+ conn.close()
212
+
213
+ def _create_connection(self) -> sqlite3.Connection:
214
+ """Create a new database connection."""
215
+ conn = sqlite3.connect(
216
+ self.db_path,
217
+ timeout=30.0,
218
+ isolation_level=None,
219
+ check_same_thread=False
220
+ )
221
+
222
+ # Configure performance settings
223
+ conn.execute("PRAGMA journal_mode=WAL")
224
+ conn.execute("PRAGMA synchronous=NORMAL")
225
+ conn.execute("PRAGMA cache_size=10000")
226
+ conn.execute("PRAGMA temp_store=MEMORY")
227
+ conn.execute("PRAGMA mmap_size=268435456")
228
+ conn.execute("PRAGMA foreign_keys=ON")
229
+
230
+ self._created_count += 1
231
+ return conn
232
+
233
+ def get_stats(self) -> Dict[str, Any]:
234
+ """Get connection pool statistics."""
235
+ with self._lock:
236
+ return {
237
+ 'pool_size': len(self._pool),
238
+ 'active_connections': len(self._active_connections),
239
+ 'max_connections': self.max_connections,
240
+ 'created_count': self._created_count,
241
+ 'borrowed_count': self._borrowed_count,
242
+ 'returned_count': self._returned_count
243
+ }
244
+
245
+ def close_all(self) -> None:
246
+ """Close all connections in the pool."""
247
+ with self._lock:
248
+ # Close pooled connections
249
+ while self._pool:
250
+ conn = self._pool.popleft()
251
+ conn.close()
252
+
253
+ # Close active connections
254
+ for conn in list(self._active_connections):
255
+ conn.close()
256
+ self._active_connections.clear()
257
+
258
+
259
+ class PerformanceMonitor:
260
+ """
261
+ Comprehensive performance monitoring system for database settings.
262
+ """
263
+
264
+ def __init__(self, enable_caching: bool = True, cache_size: int = 1000,
265
+ slow_query_threshold: float = 0.1, max_metrics_history: int = 10000):
266
+ """
267
+ Initialize performance monitor.
268
+
269
+ Args:
270
+ enable_caching: Whether to enable query result caching
271
+ cache_size: Maximum number of cached queries
272
+ slow_query_threshold: Threshold in seconds for slow query detection
273
+ max_metrics_history: Maximum number of query metrics to keep
274
+ """
275
+ self.enable_caching = enable_caching
276
+ self.slow_query_threshold = slow_query_threshold
277
+ self.max_metrics_history = max_metrics_history
278
+
279
+ # Query cache
280
+ self.query_cache = QueryCache(max_size=cache_size) if enable_caching else None
281
+
282
+ # Metrics storage
283
+ self.query_metrics = deque(maxlen=max_metrics_history)
284
+ self.query_stats = defaultdict(list) # query_hash -> [execution_times]
285
+
286
+ # Performance tracking
287
+ self._lock = threading.RLock()
288
+ self.start_time = datetime.now()
289
+ self.logger = logging.getLogger(__name__)
290
+
291
+ # Hot settings tracking
292
+ self.hot_settings = defaultdict(int) # setting_key -> access_count
293
+ self.hot_queries = defaultdict(int) # query_hash -> execution_count
294
+
295
+ # Memory monitoring
296
+ self.process = psutil.Process(os.getpid())
297
+ self.memory_samples = deque(maxlen=100)
298
+
299
+ # Connection pool (optional)
300
+ self.connection_pool = None
301
+
302
+ def set_connection_pool(self, pool: ConnectionPool) -> None:
303
+ """Set connection pool for monitoring."""
304
+ self.connection_pool = pool
305
+
306
+ def _hash_query(self, query: str, params: Tuple = ()) -> str:
307
+ """Generate hash for query and parameters."""
308
+ import hashlib
309
+ query_str = f"{query}:{str(params)}"
310
+ return hashlib.md5(query_str.encode()).hexdigest()
311
+
312
+ @contextmanager
313
+ def monitor_query(self, query: str, params: Tuple = ()):
314
+ """
315
+ Context manager for monitoring query execution.
316
+
317
+ Args:
318
+ query: SQL query string
319
+ params: Query parameters
320
+
321
+ Yields:
322
+ Tuple of (connection, cached_result_if_available)
323
+ """
324
+ query_hash = self._hash_query(query, params)
325
+ start_time = time.time()
326
+ thread_id = threading.get_ident()
327
+
328
+ # Check cache first
329
+ cached_result = None
330
+ if self.query_cache:
331
+ cached_result = self.query_cache.get(query_hash)
332
+ if cached_result is not None:
333
+ # Record cache hit
334
+ execution_time = time.time() - start_time
335
+ self._record_query_metric(
336
+ query_hash, query, execution_time, thread_id,
337
+ len(cached_result) if isinstance(cached_result, (list, tuple)) else 1,
338
+ cache_hit=True
339
+ )
340
+ yield None, cached_result
341
+ return
342
+
343
+ # Execute query
344
+ connection = None
345
+ try:
346
+ if self.connection_pool:
347
+ with self.connection_pool.get_connection() as conn:
348
+ connection = conn
349
+ yield conn, None
350
+ else:
351
+ # Caller provides connection
352
+ yield None, None
353
+ finally:
354
+ # Record metrics
355
+ execution_time = time.time() - start_time
356
+ self._record_query_metric(
357
+ query_hash, query, execution_time, thread_id, 0, cache_hit=False
358
+ )
359
+
360
+ def cache_query_result(self, query: str, params: Tuple, result: Any) -> None:
361
+ """
362
+ Cache query result if caching is enabled.
363
+
364
+ Args:
365
+ query: SQL query string
366
+ params: Query parameters
367
+ result: Query result to cache
368
+ """
369
+ if self.query_cache:
370
+ query_hash = self._hash_query(query, params)
371
+ self.query_cache.put(query_hash, result)
372
+
373
+ def _record_query_metric(self, query_hash: str, query: str, execution_time: float,
374
+ thread_id: int, result_count: int, cache_hit: bool = False) -> None:
375
+ """Record query execution metrics."""
376
+ with self._lock:
377
+ metric = QueryMetrics(
378
+ query_hash=query_hash,
379
+ query_text=query,
380
+ execution_time=execution_time,
381
+ timestamp=datetime.now(),
382
+ thread_id=thread_id,
383
+ result_count=result_count,
384
+ cache_hit=cache_hit
385
+ )
386
+
387
+ self.query_metrics.append(metric)
388
+ self.query_stats[query_hash].append(execution_time)
389
+ self.hot_queries[query_hash] += 1
390
+
391
+ # Log slow queries
392
+ if execution_time > self.slow_query_threshold and not cache_hit:
393
+ self.logger.warning(
394
+ f"Slow query detected: {execution_time:.3f}s - {query[:100]}..."
395
+ )
396
+
397
+ def record_setting_access(self, setting_key: str) -> None:
398
+ """
399
+ Record access to a specific setting for hot data tracking.
400
+
401
+ Args:
402
+ setting_key: Setting key that was accessed
403
+ """
404
+ with self._lock:
405
+ self.hot_settings[setting_key] += 1
406
+
407
+ def get_performance_stats(self, window_minutes: int = 60) -> PerformanceStats:
408
+ """
409
+ Get aggregated performance statistics.
410
+
411
+ Args:
412
+ window_minutes: Time window for statistics in minutes
413
+
414
+ Returns:
415
+ PerformanceStats object with aggregated metrics
416
+ """
417
+ with self._lock:
418
+ cutoff_time = datetime.now() - timedelta(minutes=window_minutes)
419
+
420
+ # Filter metrics to time window
421
+ recent_metrics = [
422
+ m for m in self.query_metrics
423
+ if m.timestamp >= cutoff_time
424
+ ]
425
+
426
+ if not recent_metrics:
427
+ return PerformanceStats()
428
+
429
+ # Calculate statistics
430
+ execution_times = [m.execution_time for m in recent_metrics if not m.cache_hit]
431
+ cache_hits = sum(1 for m in recent_metrics if m.cache_hit)
432
+
433
+ stats = PerformanceStats(
434
+ total_queries=len(recent_metrics),
435
+ cache_hit_rate=(cache_hits / len(recent_metrics) * 100) if recent_metrics else 0,
436
+ queries_per_second=len(recent_metrics) / (window_minutes * 60),
437
+ memory_usage_mb=self._get_memory_usage_mb()
438
+ )
439
+
440
+ if execution_times:
441
+ stats.avg_execution_time = statistics.mean(execution_times)
442
+ stats.max_execution_time = max(execution_times)
443
+ stats.min_execution_time = min(execution_times)
444
+
445
+ # Get slow queries
446
+ stats.slow_queries = [
447
+ m for m in recent_metrics
448
+ if m.execution_time > self.slow_query_threshold and not m.cache_hit
449
+ ]
450
+
451
+ # Connection pool stats
452
+ if self.connection_pool:
453
+ pool_stats = self.connection_pool.get_stats()
454
+ stats.active_connections = pool_stats['active_connections']
455
+
456
+ return stats
457
+
458
+ def get_hot_settings(self, top_n: int = 10) -> List[Tuple[str, int]]:
459
+ """
460
+ Get most frequently accessed settings.
461
+
462
+ Args:
463
+ top_n: Number of top settings to return
464
+
465
+ Returns:
466
+ List of (setting_key, access_count) tuples
467
+ """
468
+ with self._lock:
469
+ return sorted(
470
+ self.hot_settings.items(),
471
+ key=lambda x: x[1],
472
+ reverse=True
473
+ )[:top_n]
474
+
475
+ def get_hot_queries(self, top_n: int = 10) -> List[Tuple[str, int, float]]:
476
+ """
477
+ Get most frequently executed queries with average execution time.
478
+
479
+ Args:
480
+ top_n: Number of top queries to return
481
+
482
+ Returns:
483
+ List of (query_hash, execution_count, avg_time) tuples
484
+ """
485
+ with self._lock:
486
+ hot_queries = []
487
+ for query_hash, count in self.hot_queries.items():
488
+ if query_hash in self.query_stats:
489
+ avg_time = statistics.mean(self.query_stats[query_hash])
490
+ hot_queries.append((query_hash, count, avg_time))
491
+
492
+ return sorted(hot_queries, key=lambda x: x[1], reverse=True)[:top_n]
493
+
494
+ def _get_memory_usage_mb(self) -> float:
495
+ """Get current memory usage in MB."""
496
+ try:
497
+ memory_info = self.process.memory_info()
498
+ memory_mb = memory_info.rss / 1024 / 1024
499
+ self.memory_samples.append(memory_mb)
500
+ return memory_mb
501
+ except Exception:
502
+ return 0.0
503
+
504
+ def get_memory_trend(self) -> Dict[str, float]:
505
+ """Get memory usage trend statistics."""
506
+ if not self.memory_samples:
507
+ return {'current': 0.0, 'average': 0.0, 'peak': 0.0}
508
+
509
+ return {
510
+ 'current': self.memory_samples[-1],
511
+ 'average': statistics.mean(self.memory_samples),
512
+ 'peak': max(self.memory_samples)
513
+ }
514
+
515
+ def optimize_indexes(self, connection_manager) -> List[str]:
516
+ """
517
+ Analyze query patterns and suggest index optimizations.
518
+
519
+ Args:
520
+ connection_manager: Database connection manager
521
+
522
+ Returns:
523
+ List of suggested index creation SQL statements
524
+ """
525
+ suggestions = []
526
+
527
+ # Analyze hot queries for index opportunities
528
+ hot_queries = self.get_hot_queries(20)
529
+
530
+ for query_hash, count, avg_time in hot_queries:
531
+ # Find the actual query
532
+ query_text = None
533
+ for metric in self.query_metrics:
534
+ if metric.query_hash == query_hash:
535
+ query_text = metric.query_text
536
+ break
537
+
538
+ if not query_text:
539
+ continue
540
+
541
+ # Analyze query for index opportunities
542
+ query_lower = query_text.lower()
543
+
544
+ # Tool settings queries
545
+ if 'tool_settings' in query_lower and 'where tool_name' in query_lower:
546
+ if count > 10 and avg_time > 0.01:
547
+ suggestions.append(
548
+ "CREATE INDEX IF NOT EXISTS idx_tool_settings_tool_name_path "
549
+ "ON tool_settings(tool_name, setting_path)"
550
+ )
551
+
552
+ # Core settings queries
553
+ if 'core_settings' in query_lower and 'where key' in query_lower:
554
+ if count > 10 and avg_time > 0.01:
555
+ suggestions.append(
556
+ "CREATE INDEX IF NOT EXISTS idx_core_settings_key "
557
+ "ON core_settings(key)"
558
+ )
559
+
560
+ # Performance settings queries
561
+ if 'performance_settings' in query_lower and 'where category' in query_lower:
562
+ if count > 5 and avg_time > 0.01:
563
+ suggestions.append(
564
+ "CREATE INDEX IF NOT EXISTS idx_performance_settings_category "
565
+ "ON performance_settings(category, setting_key)"
566
+ )
567
+
568
+ return list(set(suggestions)) # Remove duplicates
569
+
570
+ def clear_cache(self) -> None:
571
+ """Clear query cache."""
572
+ if self.query_cache:
573
+ self.query_cache.clear()
574
+
575
+ def reset_metrics(self) -> None:
576
+ """Reset all performance metrics."""
577
+ with self._lock:
578
+ self.query_metrics.clear()
579
+ self.query_stats.clear()
580
+ self.hot_settings.clear()
581
+ self.hot_queries.clear()
582
+ self.memory_samples.clear()
583
+ self.start_time = datetime.now()
584
+
585
+ def get_cache_stats(self) -> Dict[str, Any]:
586
+ """Get query cache statistics."""
587
+ if self.query_cache:
588
+ return self.query_cache.get_stats()
589
+ return {'enabled': False}
590
+
591
+ def export_metrics(self, filepath: str) -> bool:
592
+ """
593
+ Export performance metrics to JSON file.
594
+
595
+ Args:
596
+ filepath: Target file path
597
+
598
+ Returns:
599
+ True if export successful
600
+ """
601
+ try:
602
+ import json
603
+
604
+ with self._lock:
605
+ metrics_data = {
606
+ 'export_timestamp': datetime.now().isoformat(),
607
+ 'monitoring_duration_minutes': (datetime.now() - self.start_time).total_seconds() / 60,
608
+ 'performance_stats': self.get_performance_stats().__dict__,
609
+ 'hot_settings': dict(self.hot_settings),
610
+ 'hot_queries': dict(self.hot_queries),
611
+ 'cache_stats': self.get_cache_stats(),
612
+ 'memory_trend': self.get_memory_trend(),
613
+ 'recent_slow_queries': [
614
+ {
615
+ 'query': m.query_text[:200],
616
+ 'execution_time': m.execution_time,
617
+ 'timestamp': m.timestamp.isoformat()
618
+ }
619
+ for m in self.query_metrics
620
+ if m.execution_time > self.slow_query_threshold
621
+ ][-20:] # Last 20 slow queries
622
+ }
623
+
624
+ with open(filepath, 'w') as f:
625
+ json.dump(metrics_data, f, indent=2, default=str)
626
+
627
+ self.logger.info(f"Performance metrics exported to {filepath}")
628
+ return True
629
+
630
+ except Exception as e:
631
+ self.logger.error(f"Failed to export metrics: {e}")
632
+ return False
633
+
634
+
635
+ # Global performance monitor instance
636
+ _performance_monitor = None
637
+ _monitor_lock = threading.Lock()
638
+
639
+
640
+ def get_performance_monitor() -> PerformanceMonitor:
641
+ """Get the global performance monitor instance."""
642
+ global _performance_monitor
643
+
644
+ if _performance_monitor is None:
645
+ with _monitor_lock:
646
+ if _performance_monitor is None:
647
+ _performance_monitor = PerformanceMonitor()
648
+
649
+ return _performance_monitor
650
+
651
+
652
+ def initialize_performance_monitoring(enable_caching: bool = True,
653
+ cache_size: int = 1000,
654
+ slow_query_threshold: float = 0.1) -> PerformanceMonitor:
655
+ """
656
+ Initialize global performance monitoring.
657
+
658
+ Args:
659
+ enable_caching: Whether to enable query result caching
660
+ cache_size: Maximum number of cached queries
661
+ slow_query_threshold: Threshold in seconds for slow query detection
662
+
663
+ Returns:
664
+ PerformanceMonitor instance
665
+ """
666
+ global _performance_monitor
667
+
668
+ with _monitor_lock:
669
+ _performance_monitor = PerformanceMonitor(
670
+ enable_caching=enable_caching,
671
+ cache_size=cache_size,
672
+ slow_query_threshold=slow_query_threshold
673
+ )
674
+
675
+ return _performance_monitor