pomera-ai-commander 1.1.1 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (213) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1199 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_directory.py +549 -0
  12. package/core/data_validator.py +1066 -1066
  13. package/core/database_connection_manager.py +744 -744
  14. package/core/database_curl_settings_manager.py +608 -608
  15. package/core/database_promera_ai_settings_manager.py +446 -446
  16. package/core/database_schema.py +411 -411
  17. package/core/database_schema_manager.py +395 -395
  18. package/core/database_settings_manager.py +1507 -1507
  19. package/core/database_settings_manager_interface.py +456 -456
  20. package/core/dialog_manager.py +734 -734
  21. package/core/diff_utils.py +239 -0
  22. package/core/efficient_line_numbers.py +540 -510
  23. package/core/error_handler.py +746 -746
  24. package/core/error_service.py +431 -431
  25. package/core/event_consolidator.py +511 -511
  26. package/core/mcp/__init__.py +43 -43
  27. package/core/mcp/find_replace_diff.py +334 -0
  28. package/core/mcp/protocol.py +288 -288
  29. package/core/mcp/schema.py +251 -251
  30. package/core/mcp/server_stdio.py +299 -299
  31. package/core/mcp/tool_registry.py +2699 -2345
  32. package/core/memento.py +275 -0
  33. package/core/memory_efficient_text_widget.py +711 -711
  34. package/core/migration_manager.py +914 -914
  35. package/core/migration_test_suite.py +1085 -1085
  36. package/core/migration_validator.py +1143 -1143
  37. package/core/optimized_find_replace.py +714 -714
  38. package/core/optimized_pattern_engine.py +424 -424
  39. package/core/optimized_search_highlighter.py +552 -552
  40. package/core/performance_monitor.py +674 -674
  41. package/core/persistence_manager.py +712 -712
  42. package/core/progressive_stats_calculator.py +632 -632
  43. package/core/regex_pattern_cache.py +529 -529
  44. package/core/regex_pattern_library.py +350 -350
  45. package/core/search_operation_manager.py +434 -434
  46. package/core/settings_defaults_registry.py +1087 -1087
  47. package/core/settings_integrity_validator.py +1111 -1111
  48. package/core/settings_serializer.py +557 -557
  49. package/core/settings_validator.py +1823 -1823
  50. package/core/smart_stats_calculator.py +709 -709
  51. package/core/statistics_update_manager.py +619 -619
  52. package/core/stats_config_manager.py +858 -858
  53. package/core/streaming_text_handler.py +723 -723
  54. package/core/task_scheduler.py +596 -596
  55. package/core/update_pattern_library.py +168 -168
  56. package/core/visibility_monitor.py +596 -596
  57. package/core/widget_cache.py +498 -498
  58. package/mcp.json +51 -61
  59. package/migrate_data.py +127 -0
  60. package/package.json +64 -57
  61. package/pomera.py +7883 -7482
  62. package/pomera_mcp_server.py +183 -144
  63. package/requirements.txt +33 -0
  64. package/scripts/Dockerfile.alpine +43 -0
  65. package/scripts/Dockerfile.gui-test +54 -0
  66. package/scripts/Dockerfile.linux +43 -0
  67. package/scripts/Dockerfile.test-linux +80 -0
  68. package/scripts/Dockerfile.ubuntu +39 -0
  69. package/scripts/README.md +53 -0
  70. package/scripts/build-all.bat +113 -0
  71. package/scripts/build-docker.bat +53 -0
  72. package/scripts/build-docker.sh +55 -0
  73. package/scripts/build-optimized.bat +101 -0
  74. package/scripts/build.sh +78 -0
  75. package/scripts/docker-compose.test.yml +27 -0
  76. package/scripts/docker-compose.yml +32 -0
  77. package/scripts/postinstall.js +62 -0
  78. package/scripts/requirements-minimal.txt +33 -0
  79. package/scripts/test-linux-simple.bat +28 -0
  80. package/scripts/validate-release-workflow.py +450 -0
  81. package/tools/__init__.py +4 -4
  82. package/tools/ai_tools.py +2891 -2891
  83. package/tools/ascii_art_generator.py +352 -352
  84. package/tools/base64_tools.py +183 -183
  85. package/tools/base_tool.py +511 -511
  86. package/tools/case_tool.py +308 -308
  87. package/tools/column_tools.py +395 -395
  88. package/tools/cron_tool.py +884 -884
  89. package/tools/curl_history.py +600 -600
  90. package/tools/curl_processor.py +1207 -1207
  91. package/tools/curl_settings.py +502 -502
  92. package/tools/curl_tool.py +5467 -5467
  93. package/tools/diff_viewer.py +1817 -1072
  94. package/tools/email_extraction_tool.py +248 -248
  95. package/tools/email_header_analyzer.py +425 -425
  96. package/tools/extraction_tools.py +250 -250
  97. package/tools/find_replace.py +2289 -1750
  98. package/tools/folder_file_reporter.py +1463 -1463
  99. package/tools/folder_file_reporter_adapter.py +480 -480
  100. package/tools/generator_tools.py +1216 -1216
  101. package/tools/hash_generator.py +255 -255
  102. package/tools/html_tool.py +656 -656
  103. package/tools/jsonxml_tool.py +729 -729
  104. package/tools/line_tools.py +419 -419
  105. package/tools/markdown_tools.py +561 -561
  106. package/tools/mcp_widget.py +1417 -1417
  107. package/tools/notes_widget.py +978 -973
  108. package/tools/number_base_converter.py +372 -372
  109. package/tools/regex_extractor.py +571 -571
  110. package/tools/slug_generator.py +310 -310
  111. package/tools/sorter_tools.py +458 -458
  112. package/tools/string_escape_tool.py +392 -392
  113. package/tools/text_statistics_tool.py +365 -365
  114. package/tools/text_wrapper.py +430 -430
  115. package/tools/timestamp_converter.py +421 -421
  116. package/tools/tool_loader.py +710 -710
  117. package/tools/translator_tools.py +522 -522
  118. package/tools/url_link_extractor.py +261 -261
  119. package/tools/url_parser.py +204 -204
  120. package/tools/whitespace_tools.py +355 -355
  121. package/tools/word_frequency_counter.py +146 -146
  122. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  123. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  124. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  125. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  126. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  127. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  128. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  129. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  131. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  132. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  134. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  135. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  136. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  137. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  138. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  139. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  140. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  141. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  142. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  143. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  144. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  145. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  146. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  147. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  148. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  149. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  150. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  151. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  152. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  153. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  154. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  155. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  156. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  157. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  158. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  159. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  160. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  161. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  162. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  163. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  164. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  165. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  166. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  167. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  168. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  169. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  170. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  192. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  193. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  194. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  195. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  196. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  197. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  198. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  199. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  200. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  201. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  202. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  203. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  204. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  205. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  206. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  207. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  208. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  209. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  210. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  211. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  212. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  213. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,858 +1,858 @@
1
- """
2
- Statistics Configuration and Monitoring Manager for Pomera AI Commander.
3
-
4
- This module provides configuration options for adjusting debounce delays and cache sizes,
5
- diagnostic information and performance metrics logging, settings to completely disable
6
- statistics calculations, and debug mode with performance metrics logging for analysis.
7
-
8
- Requirements addressed:
9
- - 7.1: Provide options to adjust debounce delays
10
- - 7.2: Provide diagnostic information when performance issues occur
11
- - 7.3: Completely skip all calculations when statistics are disabled
12
- - 7.4: Log performance metrics for analysis when debugging is enabled
13
- """
14
-
15
- import json
16
- import time
17
- import logging
18
- import threading
19
- from typing import Dict, Any, Optional, List, Callable
20
- from dataclasses import dataclass, field, asdict
21
- from enum import Enum
22
- from pathlib import Path
23
- from collections import deque
24
- import sys
25
-
26
-
27
- class PerformanceLevel(Enum):
28
- """Performance optimization levels."""
29
- MAXIMUM = "maximum" # All optimizations enabled, may skip some updates
30
- BALANCED = "balanced" # Default - good balance of features and performance
31
- QUALITY = "quality" # Prioritize accuracy over performance
32
- DISABLED = "disabled" # Statistics completely disabled
33
-
34
-
35
- @dataclass
36
- class DebounceSettings:
37
- """Configuration for debouncing behavior."""
38
- strategy: str = "adaptive" # immediate, fast, normal, slow, adaptive
39
- immediate_threshold: int = 100
40
- fast_delay_ms: int = 50
41
- normal_delay_ms: int = 300
42
- slow_delay_ms: int = 500
43
- large_content_threshold: int = 10000
44
- very_large_threshold: int = 100000
45
- max_delay_ms: int = 1000
46
-
47
- def to_dict(self) -> Dict[str, Any]:
48
- """Convert to dictionary."""
49
- return asdict(self)
50
-
51
- @classmethod
52
- def from_dict(cls, data: Dict[str, Any]) -> 'DebounceSettings':
53
- """Create from dictionary."""
54
- return cls(**data)
55
-
56
-
57
- @dataclass
58
- class CacheSettings:
59
- """Configuration for caching behavior."""
60
- enabled: bool = True
61
- max_cache_size: int = 1000
62
- max_memory_mb: int = 50
63
- cleanup_threshold_mb: int = 45
64
- cleanup_interval_seconds: int = 300
65
- enable_incremental_updates: bool = True
66
- enable_advanced_stats: bool = True
67
-
68
- def to_dict(self) -> Dict[str, Any]:
69
- """Convert to dictionary."""
70
- return asdict(self)
71
-
72
- @classmethod
73
- def from_dict(cls, data: Dict[str, Any]) -> 'CacheSettings':
74
- """Create from dictionary."""
75
- return cls(**data)
76
-
77
-
78
- @dataclass
79
- class VisibilitySettings:
80
- """Configuration for visibility-aware updates."""
81
- enabled: bool = True
82
- skip_hidden_tabs: bool = True
83
- pause_when_minimized: bool = True
84
- skip_invisible_stats_bars: bool = True
85
- idle_threshold_seconds: float = 5.0
86
- reduce_frequency_when_idle: bool = True
87
-
88
- def to_dict(self) -> Dict[str, Any]:
89
- """Convert to dictionary."""
90
- return asdict(self)
91
-
92
- @classmethod
93
- def from_dict(cls, data: Dict[str, Any]) -> 'VisibilitySettings':
94
- """Create from dictionary."""
95
- return cls(**data)
96
-
97
-
98
- @dataclass
99
- class ProgressiveCalculationSettings:
100
- """Configuration for progressive calculation."""
101
- enabled: bool = True
102
- chunk_size: int = 10000
103
- threshold_characters: int = 50000
104
- progress_indicator_threshold_ms: float = 100.0
105
- enable_cancellation: bool = True
106
- yield_interval_chunks: int = 2
107
-
108
- def to_dict(self) -> Dict[str, Any]:
109
- """Convert to dictionary."""
110
- return asdict(self)
111
-
112
- @classmethod
113
- def from_dict(cls, data: Dict[str, Any]) -> 'ProgressiveCalculationSettings':
114
- """Create from dictionary."""
115
- return cls(**data)
116
-
117
-
118
- @dataclass
119
- class DebugSettings:
120
- """Configuration for debugging and monitoring."""
121
- enabled: bool = False
122
- log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR
123
- log_performance_metrics: bool = False
124
- log_cache_operations: bool = False
125
- log_event_consolidation: bool = False
126
- log_visibility_changes: bool = False
127
- performance_warning_threshold_ms: float = 500.0
128
- save_metrics_to_file: bool = False
129
- metrics_file_path: str = "stats_performance_metrics.json"
130
-
131
- def to_dict(self) -> Dict[str, Any]:
132
- """Convert to dictionary."""
133
- return asdict(self)
134
-
135
- @classmethod
136
- def from_dict(cls, data: Dict[str, Any]) -> 'DebugSettings':
137
- """Create from dictionary."""
138
- return cls(**data)
139
-
140
-
141
- @dataclass
142
- class StatisticsConfiguration:
143
- """Complete statistics optimization configuration."""
144
- performance_level: str = "balanced"
145
- statistics_enabled: bool = True
146
- debounce: DebounceSettings = field(default_factory=DebounceSettings)
147
- cache: CacheSettings = field(default_factory=CacheSettings)
148
- visibility: VisibilitySettings = field(default_factory=VisibilitySettings)
149
- progressive: ProgressiveCalculationSettings = field(default_factory=ProgressiveCalculationSettings)
150
- debug: DebugSettings = field(default_factory=DebugSettings)
151
-
152
- def to_dict(self) -> Dict[str, Any]:
153
- """Convert to dictionary."""
154
- return {
155
- 'performance_level': self.performance_level,
156
- 'statistics_enabled': self.statistics_enabled,
157
- 'debounce': self.debounce.to_dict(),
158
- 'cache': self.cache.to_dict(),
159
- 'visibility': self.visibility.to_dict(),
160
- 'progressive': self.progressive.to_dict(),
161
- 'debug': self.debug.to_dict()
162
- }
163
-
164
- @classmethod
165
- def from_dict(cls, data: Dict[str, Any]) -> 'StatisticsConfiguration':
166
- """Create from dictionary."""
167
- return cls(
168
- performance_level=data.get('performance_level', 'balanced'),
169
- statistics_enabled=data.get('statistics_enabled', True),
170
- debounce=DebounceSettings.from_dict(data.get('debounce', {})),
171
- cache=CacheSettings.from_dict(data.get('cache', {})),
172
- visibility=VisibilitySettings.from_dict(data.get('visibility', {})),
173
- progressive=ProgressiveCalculationSettings.from_dict(data.get('progressive', {})),
174
- debug=DebugSettings.from_dict(data.get('debug', {}))
175
- )
176
-
177
- def apply_performance_level(self, level: PerformanceLevel):
178
- """Apply a performance level preset."""
179
- self.performance_level = level.value
180
-
181
- if level == PerformanceLevel.DISABLED:
182
- self.statistics_enabled = False
183
-
184
- elif level == PerformanceLevel.MAXIMUM:
185
- self.statistics_enabled = True
186
- self.debounce.strategy = "adaptive"
187
- self.debounce.normal_delay_ms = 500
188
- self.cache.enabled = True
189
- self.cache.enable_incremental_updates = True
190
- self.cache.enable_advanced_stats = False
191
- self.visibility.enabled = True
192
- self.visibility.skip_hidden_tabs = True
193
- self.progressive.enabled = True
194
-
195
- elif level == PerformanceLevel.BALANCED:
196
- self.statistics_enabled = True
197
- self.debounce.strategy = "adaptive"
198
- self.debounce.normal_delay_ms = 300
199
- self.cache.enabled = True
200
- self.cache.enable_incremental_updates = True
201
- self.cache.enable_advanced_stats = True
202
- self.visibility.enabled = True
203
- self.progressive.enabled = True
204
-
205
- elif level == PerformanceLevel.QUALITY:
206
- self.statistics_enabled = True
207
- self.debounce.strategy = "normal"
208
- self.debounce.normal_delay_ms = 100
209
- self.cache.enabled = True
210
- self.cache.enable_incremental_updates = False
211
- self.cache.enable_advanced_stats = True
212
- self.visibility.enabled = False
213
- self.progressive.enabled = False
214
-
215
-
216
- @dataclass
217
- class PerformanceMetric:
218
- """A single performance metric measurement."""
219
- timestamp: float
220
- metric_name: str
221
- value: float
222
- unit: str
223
- context: Dict[str, Any] = field(default_factory=dict)
224
-
225
- def to_dict(self) -> Dict[str, Any]:
226
- """Convert to dictionary."""
227
- return {
228
- 'timestamp': self.timestamp,
229
- 'metric_name': self.metric_name,
230
- 'value': self.value,
231
- 'unit': self.unit,
232
- 'context': self.context
233
- }
234
-
235
-
236
- @dataclass
237
- class DiagnosticInfo:
238
- """Diagnostic information about statistics system."""
239
- timestamp: float
240
- issue_type: str
241
- severity: str # info, warning, error, critical
242
- message: str
243
- details: Dict[str, Any] = field(default_factory=dict)
244
- suggestions: List[str] = field(default_factory=list)
245
-
246
- def to_dict(self) -> Dict[str, Any]:
247
- """Convert to dictionary."""
248
- return {
249
- 'timestamp': self.timestamp,
250
- 'issue_type': self.issue_type,
251
- 'severity': self.severity,
252
- 'message': self.message,
253
- 'details': self.details,
254
- 'suggestions': self.suggestions
255
- }
256
-
257
-
258
- class StatsConfigManager:
259
- """
260
- Configuration and monitoring manager for statistics optimization system.
261
-
262
- Provides centralized configuration management, performance monitoring,
263
- diagnostic information, and debug logging capabilities.
264
- """
265
-
266
- def __init__(self, config_file: Optional[str] = None):
267
- """
268
- Initialize the configuration manager.
269
-
270
- Args:
271
- config_file: Optional path to configuration file
272
- """
273
- self.config_file = config_file or "stats_config.json"
274
- self.config = StatisticsConfiguration()
275
-
276
- # Performance metrics storage
277
- self.metrics_history: deque = deque(maxlen=1000)
278
- self.metrics_lock = threading.RLock()
279
-
280
- # Diagnostic information storage
281
- self.diagnostics: deque = deque(maxlen=100)
282
- self.diagnostics_lock = threading.RLock()
283
-
284
- # Configuration change callbacks
285
- self.config_change_callbacks: List[Callable[[StatisticsConfiguration], None]] = []
286
-
287
- # Logger setup
288
- self.logger = self._setup_logger()
289
-
290
- # Performance tracking
291
- self.performance_warnings_count = 0
292
- self.last_performance_check = time.time()
293
-
294
- # Load configuration if file exists
295
- self.load_configuration()
296
-
297
- def _setup_logger(self) -> logging.Logger:
298
- """Setup logger for statistics system."""
299
- logger = logging.getLogger('stats_optimization')
300
-
301
- # Remove existing handlers
302
- logger.handlers.clear()
303
-
304
- # Set level based on debug settings
305
- log_level = getattr(logging, self.config.debug.log_level, logging.INFO)
306
- logger.setLevel(log_level)
307
-
308
- # Console handler
309
- console_handler = logging.StreamHandler(sys.stdout)
310
- console_handler.setLevel(log_level)
311
-
312
- # Formatter
313
- formatter = logging.Formatter(
314
- '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
315
- datefmt='%Y-%m-%d %H:%M:%S'
316
- )
317
- console_handler.setFormatter(formatter)
318
-
319
- logger.addHandler(console_handler)
320
-
321
- # File handler if debug enabled
322
- if self.config.debug.enabled and self.config.debug.save_metrics_to_file:
323
- try:
324
- file_handler = logging.FileHandler('stats_optimization.log')
325
- file_handler.setLevel(log_level)
326
- file_handler.setFormatter(formatter)
327
- logger.addHandler(file_handler)
328
- except Exception as e:
329
- print(f"Failed to create log file handler: {e}")
330
-
331
- return logger
332
-
333
- def load_configuration(self) -> bool:
334
- """
335
- Load configuration from file.
336
-
337
- Returns:
338
- True if loaded successfully, False otherwise
339
- """
340
- try:
341
- config_path = Path(self.config_file)
342
- if config_path.exists():
343
- with open(config_path, 'r', encoding='utf-8') as f:
344
- data = json.load(f)
345
-
346
- self.config = StatisticsConfiguration.from_dict(data)
347
- self.logger.info(f"Configuration loaded from {self.config_file}")
348
-
349
- # Update logger based on new config
350
- self.logger = self._setup_logger()
351
-
352
- # Notify callbacks
353
- self._notify_config_change()
354
-
355
- return True
356
- except Exception as e:
357
- self.logger.error(f"Failed to load configuration: {e}")
358
-
359
- return False
360
-
361
- def save_configuration(self) -> bool:
362
- """
363
- Save configuration to file.
364
-
365
- Returns:
366
- True if saved successfully, False otherwise
367
- """
368
- try:
369
- config_path = Path(self.config_file)
370
- config_path.parent.mkdir(parents=True, exist_ok=True)
371
-
372
- with open(config_path, 'w', encoding='utf-8') as f:
373
- json.dump(self.config.to_dict(), f, indent=2)
374
-
375
- self.logger.info(f"Configuration saved to {self.config_file}")
376
- return True
377
-
378
- except Exception as e:
379
- self.logger.error(f"Failed to save configuration: {e}")
380
- return False
381
-
382
- def get_configuration(self) -> StatisticsConfiguration:
383
- """Get current configuration."""
384
- return self.config
385
-
386
- def update_configuration(self, config: StatisticsConfiguration, save: bool = True):
387
- """
388
- Update configuration.
389
-
390
- Args:
391
- config: New configuration
392
- save: Whether to save to file
393
- """
394
- self.config = config
395
-
396
- # Update logger
397
- self.logger = self._setup_logger()
398
-
399
- # Save if requested
400
- if save:
401
- self.save_configuration()
402
-
403
- # Notify callbacks
404
- self._notify_config_change()
405
-
406
- self.logger.info("Configuration updated")
407
-
408
- def update_debounce_settings(self, settings: DebounceSettings, save: bool = True):
409
- """Update debounce settings."""
410
- self.config.debounce = settings
411
-
412
- if save:
413
- self.save_configuration()
414
-
415
- self._notify_config_change()
416
-
417
- if self.config.debug.enabled:
418
- self.logger.debug(f"Debounce settings updated: {settings.to_dict()}")
419
-
420
- def update_cache_settings(self, settings: CacheSettings, save: bool = True):
421
- """Update cache settings."""
422
- self.config.cache = settings
423
-
424
- if save:
425
- self.save_configuration()
426
-
427
- self._notify_config_change()
428
-
429
- if self.config.debug.enabled:
430
- self.logger.debug(f"Cache settings updated: {settings.to_dict()}")
431
-
432
- def set_performance_level(self, level: PerformanceLevel, save: bool = True):
433
- """
434
- Set performance level preset.
435
-
436
- Args:
437
- level: Performance level to apply
438
- save: Whether to save to file
439
- """
440
- self.config.apply_performance_level(level)
441
-
442
- if save:
443
- self.save_configuration()
444
-
445
- self._notify_config_change()
446
-
447
- self.logger.info(f"Performance level set to: {level.value}")
448
-
449
- def enable_statistics(self, enabled: bool = True, save: bool = True):
450
- """
451
- Enable or disable statistics calculations.
452
-
453
- Args:
454
- enabled: True to enable, False to disable
455
- save: Whether to save to file
456
- """
457
- self.config.statistics_enabled = enabled
458
-
459
- if save:
460
- self.save_configuration()
461
-
462
- self._notify_config_change()
463
-
464
- status = "enabled" if enabled else "disabled"
465
- self.logger.info(f"Statistics calculations {status}")
466
-
467
- def enable_debug_mode(self, enabled: bool = True, save: bool = True):
468
- """
469
- Enable or disable debug mode.
470
-
471
- Args:
472
- enabled: True to enable, False to disable
473
- save: Whether to save to file
474
- """
475
- self.config.debug.enabled = enabled
476
-
477
- if enabled:
478
- self.config.debug.log_performance_metrics = True
479
- self.config.debug.log_level = "DEBUG"
480
- else:
481
- self.config.debug.log_level = "INFO"
482
-
483
- # Update logger
484
- self.logger = self._setup_logger()
485
-
486
- if save:
487
- self.save_configuration()
488
-
489
- self._notify_config_change()
490
-
491
- status = "enabled" if enabled else "disabled"
492
- self.logger.info(f"Debug mode {status}")
493
-
494
- def register_config_change_callback(self, callback: Callable[[StatisticsConfiguration], None]):
495
- """Register a callback for configuration changes."""
496
- self.config_change_callbacks.append(callback)
497
-
498
- def _notify_config_change(self):
499
- """Notify all callbacks of configuration change."""
500
- for callback in self.config_change_callbacks:
501
- try:
502
- callback(self.config)
503
- except Exception as e:
504
- self.logger.error(f"Error in config change callback: {e}")
505
-
506
- def record_metric(self, metric_name: str, value: float, unit: str,
507
- context: Optional[Dict[str, Any]] = None):
508
- """
509
- Record a performance metric.
510
-
511
- Args:
512
- metric_name: Name of the metric
513
- value: Metric value
514
- unit: Unit of measurement
515
- context: Optional context information
516
- """
517
- if not self.config.debug.log_performance_metrics:
518
- return
519
-
520
- metric = PerformanceMetric(
521
- timestamp=time.time(),
522
- metric_name=metric_name,
523
- value=value,
524
- unit=unit,
525
- context=context or {}
526
- )
527
-
528
- with self.metrics_lock:
529
- self.metrics_history.append(metric)
530
-
531
- # Log if debug enabled
532
- if self.config.debug.enabled:
533
- self.logger.debug(f"Metric: {metric_name} = {value} {unit}")
534
-
535
- # Check for performance warnings
536
- self._check_performance_warning(metric)
537
-
538
- # Save to file if configured
539
- if self.config.debug.save_metrics_to_file:
540
- self._save_metrics_to_file()
541
-
542
- def _check_performance_warning(self, metric: PerformanceMetric):
543
- """Check if metric indicates a performance issue."""
544
- threshold = self.config.debug.performance_warning_threshold_ms
545
-
546
- # Check calculation time metrics
547
- if metric.metric_name in ['calculation_time', 'update_time'] and metric.unit == 'ms':
548
- if metric.value > threshold:
549
- self.performance_warnings_count += 1
550
-
551
- diagnostic = DiagnosticInfo(
552
- timestamp=time.time(),
553
- issue_type='performance_warning',
554
- severity='warning',
555
- message=f"{metric.metric_name} exceeded threshold: {metric.value:.2f}ms > {threshold}ms",
556
- details={
557
- 'metric': metric.to_dict(),
558
- 'threshold_ms': threshold
559
- },
560
- suggestions=[
561
- "Consider increasing debounce delays",
562
- "Enable progressive calculation for large content",
563
- "Check if visibility awareness is enabled",
564
- "Review cache settings"
565
- ]
566
- )
567
-
568
- self.add_diagnostic(diagnostic)
569
-
570
- def add_diagnostic(self, diagnostic: DiagnosticInfo):
571
- """
572
- Add diagnostic information.
573
-
574
- Args:
575
- diagnostic: Diagnostic information to add
576
- """
577
- with self.diagnostics_lock:
578
- self.diagnostics.append(diagnostic)
579
-
580
- # Log based on severity
581
- log_method = getattr(self.logger, diagnostic.severity, self.logger.info)
582
- log_method(f"Diagnostic: {diagnostic.message}")
583
-
584
- def get_diagnostics(self, severity: Optional[str] = None,
585
- issue_type: Optional[str] = None,
586
- limit: int = 10) -> List[DiagnosticInfo]:
587
- """
588
- Get diagnostic information.
589
-
590
- Args:
591
- severity: Filter by severity (optional)
592
- issue_type: Filter by issue type (optional)
593
- limit: Maximum number of diagnostics to return
594
-
595
- Returns:
596
- List of diagnostic information
597
- """
598
- with self.diagnostics_lock:
599
- diagnostics = list(self.diagnostics)
600
-
601
- # Filter by severity
602
- if severity:
603
- diagnostics = [d for d in diagnostics if d.severity == severity]
604
-
605
- # Filter by issue type
606
- if issue_type:
607
- diagnostics = [d for d in diagnostics if d.issue_type == issue_type]
608
-
609
- # Sort by timestamp (most recent first)
610
- diagnostics.sort(key=lambda d: d.timestamp, reverse=True)
611
-
612
- return diagnostics[:limit]
613
-
614
- def get_metrics(self, metric_name: Optional[str] = None,
615
- time_range_seconds: Optional[float] = None,
616
- limit: int = 100) -> List[PerformanceMetric]:
617
- """
618
- Get performance metrics.
619
-
620
- Args:
621
- metric_name: Filter by metric name (optional)
622
- time_range_seconds: Only return metrics from last N seconds (optional)
623
- limit: Maximum number of metrics to return
624
-
625
- Returns:
626
- List of performance metrics
627
- """
628
- with self.metrics_lock:
629
- metrics = list(self.metrics_history)
630
-
631
- # Filter by metric name
632
- if metric_name:
633
- metrics = [m for m in metrics if m.metric_name == metric_name]
634
-
635
- # Filter by time range
636
- if time_range_seconds:
637
- cutoff_time = time.time() - time_range_seconds
638
- metrics = [m for m in metrics if m.timestamp >= cutoff_time]
639
-
640
- # Sort by timestamp (most recent first)
641
- metrics.sort(key=lambda m: m.timestamp, reverse=True)
642
-
643
- return metrics[:limit]
644
-
645
- def get_metric_statistics(self, metric_name: str,
646
- time_range_seconds: Optional[float] = None) -> Dict[str, float]:
647
- """
648
- Get statistics for a specific metric.
649
-
650
- Args:
651
- metric_name: Name of the metric
652
- time_range_seconds: Time range to analyze (optional)
653
-
654
- Returns:
655
- Dictionary with min, max, avg, median values
656
- """
657
- metrics = self.get_metrics(metric_name, time_range_seconds, limit=1000)
658
-
659
- if not metrics:
660
- return {
661
- 'count': 0,
662
- 'min': 0.0,
663
- 'max': 0.0,
664
- 'avg': 0.0,
665
- 'median': 0.0
666
- }
667
-
668
- values = [m.value for m in metrics]
669
- values.sort()
670
-
671
- return {
672
- 'count': len(values),
673
- 'min': min(values),
674
- 'max': max(values),
675
- 'avg': sum(values) / len(values),
676
- 'median': values[len(values) // 2]
677
- }
678
-
679
- def _save_metrics_to_file(self):
680
- """Save metrics to file."""
681
- try:
682
- metrics_file = Path(self.config.debug.metrics_file_path)
683
-
684
- with self.metrics_lock:
685
- metrics_data = [m.to_dict() for m in self.metrics_history]
686
-
687
- with open(metrics_file, 'w', encoding='utf-8') as f:
688
- json.dump(metrics_data, f, indent=2)
689
-
690
- except Exception as e:
691
- self.logger.error(f"Failed to save metrics to file: {e}")
692
-
693
- def generate_performance_report(self) -> Dict[str, Any]:
694
- """
695
- Generate a comprehensive performance report.
696
-
697
- Returns:
698
- Dictionary with performance analysis
699
- """
700
- report = {
701
- 'timestamp': time.time(),
702
- 'configuration': self.config.to_dict(),
703
- 'statistics_enabled': self.config.statistics_enabled,
704
- 'performance_level': self.config.performance_level,
705
- 'metrics_summary': {},
706
- 'diagnostics_summary': {},
707
- 'recommendations': []
708
- }
709
-
710
- # Metrics summary
711
- metric_names = set(m.metric_name for m in self.metrics_history)
712
- for metric_name in metric_names:
713
- stats = self.get_metric_statistics(metric_name, time_range_seconds=300)
714
- report['metrics_summary'][metric_name] = stats
715
-
716
- # Diagnostics summary
717
- with self.diagnostics_lock:
718
- diagnostics = list(self.diagnostics)
719
-
720
- severity_counts = {}
721
- for diagnostic in diagnostics:
722
- severity_counts[diagnostic.severity] = severity_counts.get(diagnostic.severity, 0) + 1
723
-
724
- report['diagnostics_summary'] = {
725
- 'total_count': len(diagnostics),
726
- 'by_severity': severity_counts,
727
- 'performance_warnings': self.performance_warnings_count
728
- }
729
-
730
- # Generate recommendations
731
- report['recommendations'] = self._generate_recommendations(report)
732
-
733
- return report
734
-
735
- def _generate_recommendations(self, report: Dict[str, Any]) -> List[str]:
736
- """Generate performance recommendations based on report."""
737
- recommendations = []
738
-
739
- # Check if statistics are disabled
740
- if not self.config.statistics_enabled:
741
- recommendations.append("Statistics are currently disabled. Enable them to see performance metrics.")
742
- return recommendations
743
-
744
- # Check calculation times
745
- calc_time_stats = report['metrics_summary'].get('calculation_time', {})
746
- if calc_time_stats.get('avg', 0) > 200:
747
- recommendations.append(
748
- f"Average calculation time is high ({calc_time_stats['avg']:.2f}ms). "
749
- "Consider enabling progressive calculation or increasing debounce delays."
750
- )
751
-
752
- # Check performance warnings
753
- if self.performance_warnings_count > 10:
754
- recommendations.append(
755
- f"Multiple performance warnings detected ({self.performance_warnings_count}). "
756
- "Consider switching to 'maximum' performance level."
757
- )
758
-
759
- # Check cache effectiveness
760
- if not self.config.cache.enabled:
761
- recommendations.append("Cache is disabled. Enabling cache can significantly improve performance.")
762
-
763
- # Check visibility awareness
764
- if not self.config.visibility.enabled:
765
- recommendations.append(
766
- "Visibility awareness is disabled. Enabling it can reduce unnecessary calculations."
767
- )
768
-
769
- # Check progressive calculation
770
- if not self.config.progressive.enabled:
771
- recommendations.append(
772
- "Progressive calculation is disabled. Enable it for better handling of large content."
773
- )
774
-
775
- return recommendations
776
-
777
- def export_diagnostics(self, file_path: str) -> bool:
778
- """
779
- Export diagnostics to file.
780
-
781
- Args:
782
- file_path: Path to export file
783
-
784
- Returns:
785
- True if exported successfully
786
- """
787
- try:
788
- with self.diagnostics_lock:
789
- diagnostics_data = [d.to_dict() for d in self.diagnostics]
790
-
791
- export_path = Path(file_path)
792
- export_path.parent.mkdir(parents=True, exist_ok=True)
793
-
794
- with open(export_path, 'w', encoding='utf-8') as f:
795
- json.dump(diagnostics_data, f, indent=2)
796
-
797
- self.logger.info(f"Diagnostics exported to {file_path}")
798
- return True
799
-
800
- except Exception as e:
801
- self.logger.error(f"Failed to export diagnostics: {e}")
802
- return False
803
-
804
- def clear_diagnostics(self):
805
- """Clear all diagnostic information."""
806
- with self.diagnostics_lock:
807
- self.diagnostics.clear()
808
-
809
- self.performance_warnings_count = 0
810
- self.logger.info("Diagnostics cleared")
811
-
812
- def clear_metrics(self):
813
- """Clear all performance metrics."""
814
- with self.metrics_lock:
815
- self.metrics_history.clear()
816
-
817
- self.logger.info("Metrics cleared")
818
-
819
- def reset_to_defaults(self, save: bool = True):
820
- """
821
- Reset configuration to defaults.
822
-
823
- Args:
824
- save: Whether to save to file
825
- """
826
- self.config = StatisticsConfiguration()
827
-
828
- if save:
829
- self.save_configuration()
830
-
831
- self._notify_config_change()
832
-
833
- self.logger.info("Configuration reset to defaults")
834
-
835
-
836
- # Global instance
837
- _global_config_manager: Optional[StatsConfigManager] = None
838
-
839
-
840
- def get_config_manager() -> StatsConfigManager:
841
- """Get the global configuration manager instance."""
842
- global _global_config_manager
843
- if _global_config_manager is None:
844
- _global_config_manager = StatsConfigManager()
845
- return _global_config_manager
846
-
847
-
848
- def create_config_manager(config_file: Optional[str] = None) -> StatsConfigManager:
849
- """
850
- Create a new configuration manager instance.
851
-
852
- Args:
853
- config_file: Optional path to configuration file
854
-
855
- Returns:
856
- New StatsConfigManager instance
857
- """
858
- return StatsConfigManager(config_file)
1
+ """
2
+ Statistics Configuration and Monitoring Manager for Pomera AI Commander.
3
+
4
+ This module provides configuration options for adjusting debounce delays and cache sizes,
5
+ diagnostic information and performance metrics logging, settings to completely disable
6
+ statistics calculations, and debug mode with performance metrics logging for analysis.
7
+
8
+ Requirements addressed:
9
+ - 7.1: Provide options to adjust debounce delays
10
+ - 7.2: Provide diagnostic information when performance issues occur
11
+ - 7.3: Completely skip all calculations when statistics are disabled
12
+ - 7.4: Log performance metrics for analysis when debugging is enabled
13
+ """
14
+
15
+ import json
16
+ import time
17
+ import logging
18
+ import threading
19
+ from typing import Dict, Any, Optional, List, Callable
20
+ from dataclasses import dataclass, field, asdict
21
+ from enum import Enum
22
+ from pathlib import Path
23
+ from collections import deque
24
+ import sys
25
+
26
+
27
+ class PerformanceLevel(Enum):
28
+ """Performance optimization levels."""
29
+ MAXIMUM = "maximum" # All optimizations enabled, may skip some updates
30
+ BALANCED = "balanced" # Default - good balance of features and performance
31
+ QUALITY = "quality" # Prioritize accuracy over performance
32
+ DISABLED = "disabled" # Statistics completely disabled
33
+
34
+
35
+ @dataclass
36
+ class DebounceSettings:
37
+ """Configuration for debouncing behavior."""
38
+ strategy: str = "adaptive" # immediate, fast, normal, slow, adaptive
39
+ immediate_threshold: int = 100
40
+ fast_delay_ms: int = 50
41
+ normal_delay_ms: int = 300
42
+ slow_delay_ms: int = 500
43
+ large_content_threshold: int = 10000
44
+ very_large_threshold: int = 100000
45
+ max_delay_ms: int = 1000
46
+
47
+ def to_dict(self) -> Dict[str, Any]:
48
+ """Convert to dictionary."""
49
+ return asdict(self)
50
+
51
+ @classmethod
52
+ def from_dict(cls, data: Dict[str, Any]) -> 'DebounceSettings':
53
+ """Create from dictionary."""
54
+ return cls(**data)
55
+
56
+
57
+ @dataclass
58
+ class CacheSettings:
59
+ """Configuration for caching behavior."""
60
+ enabled: bool = True
61
+ max_cache_size: int = 1000
62
+ max_memory_mb: int = 50
63
+ cleanup_threshold_mb: int = 45
64
+ cleanup_interval_seconds: int = 300
65
+ enable_incremental_updates: bool = True
66
+ enable_advanced_stats: bool = True
67
+
68
+ def to_dict(self) -> Dict[str, Any]:
69
+ """Convert to dictionary."""
70
+ return asdict(self)
71
+
72
+ @classmethod
73
+ def from_dict(cls, data: Dict[str, Any]) -> 'CacheSettings':
74
+ """Create from dictionary."""
75
+ return cls(**data)
76
+
77
+
78
+ @dataclass
79
+ class VisibilitySettings:
80
+ """Configuration for visibility-aware updates."""
81
+ enabled: bool = True
82
+ skip_hidden_tabs: bool = True
83
+ pause_when_minimized: bool = True
84
+ skip_invisible_stats_bars: bool = True
85
+ idle_threshold_seconds: float = 5.0
86
+ reduce_frequency_when_idle: bool = True
87
+
88
+ def to_dict(self) -> Dict[str, Any]:
89
+ """Convert to dictionary."""
90
+ return asdict(self)
91
+
92
+ @classmethod
93
+ def from_dict(cls, data: Dict[str, Any]) -> 'VisibilitySettings':
94
+ """Create from dictionary."""
95
+ return cls(**data)
96
+
97
+
98
+ @dataclass
99
+ class ProgressiveCalculationSettings:
100
+ """Configuration for progressive calculation."""
101
+ enabled: bool = True
102
+ chunk_size: int = 10000
103
+ threshold_characters: int = 50000
104
+ progress_indicator_threshold_ms: float = 100.0
105
+ enable_cancellation: bool = True
106
+ yield_interval_chunks: int = 2
107
+
108
+ def to_dict(self) -> Dict[str, Any]:
109
+ """Convert to dictionary."""
110
+ return asdict(self)
111
+
112
+ @classmethod
113
+ def from_dict(cls, data: Dict[str, Any]) -> 'ProgressiveCalculationSettings':
114
+ """Create from dictionary."""
115
+ return cls(**data)
116
+
117
+
118
+ @dataclass
119
+ class DebugSettings:
120
+ """Configuration for debugging and monitoring."""
121
+ enabled: bool = False
122
+ log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR
123
+ log_performance_metrics: bool = False
124
+ log_cache_operations: bool = False
125
+ log_event_consolidation: bool = False
126
+ log_visibility_changes: bool = False
127
+ performance_warning_threshold_ms: float = 500.0
128
+ save_metrics_to_file: bool = False
129
+ metrics_file_path: str = "stats_performance_metrics.json"
130
+
131
+ def to_dict(self) -> Dict[str, Any]:
132
+ """Convert to dictionary."""
133
+ return asdict(self)
134
+
135
+ @classmethod
136
+ def from_dict(cls, data: Dict[str, Any]) -> 'DebugSettings':
137
+ """Create from dictionary."""
138
+ return cls(**data)
139
+
140
+
141
+ @dataclass
142
+ class StatisticsConfiguration:
143
+ """Complete statistics optimization configuration."""
144
+ performance_level: str = "balanced"
145
+ statistics_enabled: bool = True
146
+ debounce: DebounceSettings = field(default_factory=DebounceSettings)
147
+ cache: CacheSettings = field(default_factory=CacheSettings)
148
+ visibility: VisibilitySettings = field(default_factory=VisibilitySettings)
149
+ progressive: ProgressiveCalculationSettings = field(default_factory=ProgressiveCalculationSettings)
150
+ debug: DebugSettings = field(default_factory=DebugSettings)
151
+
152
+ def to_dict(self) -> Dict[str, Any]:
153
+ """Convert to dictionary."""
154
+ return {
155
+ 'performance_level': self.performance_level,
156
+ 'statistics_enabled': self.statistics_enabled,
157
+ 'debounce': self.debounce.to_dict(),
158
+ 'cache': self.cache.to_dict(),
159
+ 'visibility': self.visibility.to_dict(),
160
+ 'progressive': self.progressive.to_dict(),
161
+ 'debug': self.debug.to_dict()
162
+ }
163
+
164
+ @classmethod
165
+ def from_dict(cls, data: Dict[str, Any]) -> 'StatisticsConfiguration':
166
+ """Create from dictionary."""
167
+ return cls(
168
+ performance_level=data.get('performance_level', 'balanced'),
169
+ statistics_enabled=data.get('statistics_enabled', True),
170
+ debounce=DebounceSettings.from_dict(data.get('debounce', {})),
171
+ cache=CacheSettings.from_dict(data.get('cache', {})),
172
+ visibility=VisibilitySettings.from_dict(data.get('visibility', {})),
173
+ progressive=ProgressiveCalculationSettings.from_dict(data.get('progressive', {})),
174
+ debug=DebugSettings.from_dict(data.get('debug', {}))
175
+ )
176
+
177
+ def apply_performance_level(self, level: PerformanceLevel):
178
+ """Apply a performance level preset."""
179
+ self.performance_level = level.value
180
+
181
+ if level == PerformanceLevel.DISABLED:
182
+ self.statistics_enabled = False
183
+
184
+ elif level == PerformanceLevel.MAXIMUM:
185
+ self.statistics_enabled = True
186
+ self.debounce.strategy = "adaptive"
187
+ self.debounce.normal_delay_ms = 500
188
+ self.cache.enabled = True
189
+ self.cache.enable_incremental_updates = True
190
+ self.cache.enable_advanced_stats = False
191
+ self.visibility.enabled = True
192
+ self.visibility.skip_hidden_tabs = True
193
+ self.progressive.enabled = True
194
+
195
+ elif level == PerformanceLevel.BALANCED:
196
+ self.statistics_enabled = True
197
+ self.debounce.strategy = "adaptive"
198
+ self.debounce.normal_delay_ms = 300
199
+ self.cache.enabled = True
200
+ self.cache.enable_incremental_updates = True
201
+ self.cache.enable_advanced_stats = True
202
+ self.visibility.enabled = True
203
+ self.progressive.enabled = True
204
+
205
+ elif level == PerformanceLevel.QUALITY:
206
+ self.statistics_enabled = True
207
+ self.debounce.strategy = "normal"
208
+ self.debounce.normal_delay_ms = 100
209
+ self.cache.enabled = True
210
+ self.cache.enable_incremental_updates = False
211
+ self.cache.enable_advanced_stats = True
212
+ self.visibility.enabled = False
213
+ self.progressive.enabled = False
214
+
215
+
216
+ @dataclass
217
+ class PerformanceMetric:
218
+ """A single performance metric measurement."""
219
+ timestamp: float
220
+ metric_name: str
221
+ value: float
222
+ unit: str
223
+ context: Dict[str, Any] = field(default_factory=dict)
224
+
225
+ def to_dict(self) -> Dict[str, Any]:
226
+ """Convert to dictionary."""
227
+ return {
228
+ 'timestamp': self.timestamp,
229
+ 'metric_name': self.metric_name,
230
+ 'value': self.value,
231
+ 'unit': self.unit,
232
+ 'context': self.context
233
+ }
234
+
235
+
236
+ @dataclass
237
+ class DiagnosticInfo:
238
+ """Diagnostic information about statistics system."""
239
+ timestamp: float
240
+ issue_type: str
241
+ severity: str # info, warning, error, critical
242
+ message: str
243
+ details: Dict[str, Any] = field(default_factory=dict)
244
+ suggestions: List[str] = field(default_factory=list)
245
+
246
+ def to_dict(self) -> Dict[str, Any]:
247
+ """Convert to dictionary."""
248
+ return {
249
+ 'timestamp': self.timestamp,
250
+ 'issue_type': self.issue_type,
251
+ 'severity': self.severity,
252
+ 'message': self.message,
253
+ 'details': self.details,
254
+ 'suggestions': self.suggestions
255
+ }
256
+
257
+
258
+ class StatsConfigManager:
259
+ """
260
+ Configuration and monitoring manager for statistics optimization system.
261
+
262
+ Provides centralized configuration management, performance monitoring,
263
+ diagnostic information, and debug logging capabilities.
264
+ """
265
+
266
+ def __init__(self, config_file: Optional[str] = None):
267
+ """
268
+ Initialize the configuration manager.
269
+
270
+ Args:
271
+ config_file: Optional path to configuration file
272
+ """
273
+ self.config_file = config_file or "stats_config.json"
274
+ self.config = StatisticsConfiguration()
275
+
276
+ # Performance metrics storage
277
+ self.metrics_history: deque = deque(maxlen=1000)
278
+ self.metrics_lock = threading.RLock()
279
+
280
+ # Diagnostic information storage
281
+ self.diagnostics: deque = deque(maxlen=100)
282
+ self.diagnostics_lock = threading.RLock()
283
+
284
+ # Configuration change callbacks
285
+ self.config_change_callbacks: List[Callable[[StatisticsConfiguration], None]] = []
286
+
287
+ # Logger setup
288
+ self.logger = self._setup_logger()
289
+
290
+ # Performance tracking
291
+ self.performance_warnings_count = 0
292
+ self.last_performance_check = time.time()
293
+
294
+ # Load configuration if file exists
295
+ self.load_configuration()
296
+
297
+ def _setup_logger(self) -> logging.Logger:
298
+ """Setup logger for statistics system."""
299
+ logger = logging.getLogger('stats_optimization')
300
+
301
+ # Remove existing handlers
302
+ logger.handlers.clear()
303
+
304
+ # Set level based on debug settings
305
+ log_level = getattr(logging, self.config.debug.log_level, logging.INFO)
306
+ logger.setLevel(log_level)
307
+
308
+ # Console handler
309
+ console_handler = logging.StreamHandler(sys.stdout)
310
+ console_handler.setLevel(log_level)
311
+
312
+ # Formatter
313
+ formatter = logging.Formatter(
314
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
315
+ datefmt='%Y-%m-%d %H:%M:%S'
316
+ )
317
+ console_handler.setFormatter(formatter)
318
+
319
+ logger.addHandler(console_handler)
320
+
321
+ # File handler if debug enabled
322
+ if self.config.debug.enabled and self.config.debug.save_metrics_to_file:
323
+ try:
324
+ file_handler = logging.FileHandler('stats_optimization.log')
325
+ file_handler.setLevel(log_level)
326
+ file_handler.setFormatter(formatter)
327
+ logger.addHandler(file_handler)
328
+ except Exception as e:
329
+ print(f"Failed to create log file handler: {e}")
330
+
331
+ return logger
332
+
333
+ def load_configuration(self) -> bool:
334
+ """
335
+ Load configuration from file.
336
+
337
+ Returns:
338
+ True if loaded successfully, False otherwise
339
+ """
340
+ try:
341
+ config_path = Path(self.config_file)
342
+ if config_path.exists():
343
+ with open(config_path, 'r', encoding='utf-8') as f:
344
+ data = json.load(f)
345
+
346
+ self.config = StatisticsConfiguration.from_dict(data)
347
+ self.logger.info(f"Configuration loaded from {self.config_file}")
348
+
349
+ # Update logger based on new config
350
+ self.logger = self._setup_logger()
351
+
352
+ # Notify callbacks
353
+ self._notify_config_change()
354
+
355
+ return True
356
+ except Exception as e:
357
+ self.logger.error(f"Failed to load configuration: {e}")
358
+
359
+ return False
360
+
361
+ def save_configuration(self) -> bool:
362
+ """
363
+ Save configuration to file.
364
+
365
+ Returns:
366
+ True if saved successfully, False otherwise
367
+ """
368
+ try:
369
+ config_path = Path(self.config_file)
370
+ config_path.parent.mkdir(parents=True, exist_ok=True)
371
+
372
+ with open(config_path, 'w', encoding='utf-8') as f:
373
+ json.dump(self.config.to_dict(), f, indent=2)
374
+
375
+ self.logger.info(f"Configuration saved to {self.config_file}")
376
+ return True
377
+
378
+ except Exception as e:
379
+ self.logger.error(f"Failed to save configuration: {e}")
380
+ return False
381
+
382
+ def get_configuration(self) -> StatisticsConfiguration:
383
+ """Get current configuration."""
384
+ return self.config
385
+
386
+ def update_configuration(self, config: StatisticsConfiguration, save: bool = True):
387
+ """
388
+ Update configuration.
389
+
390
+ Args:
391
+ config: New configuration
392
+ save: Whether to save to file
393
+ """
394
+ self.config = config
395
+
396
+ # Update logger
397
+ self.logger = self._setup_logger()
398
+
399
+ # Save if requested
400
+ if save:
401
+ self.save_configuration()
402
+
403
+ # Notify callbacks
404
+ self._notify_config_change()
405
+
406
+ self.logger.info("Configuration updated")
407
+
408
+ def update_debounce_settings(self, settings: DebounceSettings, save: bool = True):
409
+ """Update debounce settings."""
410
+ self.config.debounce = settings
411
+
412
+ if save:
413
+ self.save_configuration()
414
+
415
+ self._notify_config_change()
416
+
417
+ if self.config.debug.enabled:
418
+ self.logger.debug(f"Debounce settings updated: {settings.to_dict()}")
419
+
420
+ def update_cache_settings(self, settings: CacheSettings, save: bool = True):
421
+ """Update cache settings."""
422
+ self.config.cache = settings
423
+
424
+ if save:
425
+ self.save_configuration()
426
+
427
+ self._notify_config_change()
428
+
429
+ if self.config.debug.enabled:
430
+ self.logger.debug(f"Cache settings updated: {settings.to_dict()}")
431
+
432
+ def set_performance_level(self, level: PerformanceLevel, save: bool = True):
433
+ """
434
+ Set performance level preset.
435
+
436
+ Args:
437
+ level: Performance level to apply
438
+ save: Whether to save to file
439
+ """
440
+ self.config.apply_performance_level(level)
441
+
442
+ if save:
443
+ self.save_configuration()
444
+
445
+ self._notify_config_change()
446
+
447
+ self.logger.info(f"Performance level set to: {level.value}")
448
+
449
+ def enable_statistics(self, enabled: bool = True, save: bool = True):
450
+ """
451
+ Enable or disable statistics calculations.
452
+
453
+ Args:
454
+ enabled: True to enable, False to disable
455
+ save: Whether to save to file
456
+ """
457
+ self.config.statistics_enabled = enabled
458
+
459
+ if save:
460
+ self.save_configuration()
461
+
462
+ self._notify_config_change()
463
+
464
+ status = "enabled" if enabled else "disabled"
465
+ self.logger.info(f"Statistics calculations {status}")
466
+
467
+ def enable_debug_mode(self, enabled: bool = True, save: bool = True):
468
+ """
469
+ Enable or disable debug mode.
470
+
471
+ Args:
472
+ enabled: True to enable, False to disable
473
+ save: Whether to save to file
474
+ """
475
+ self.config.debug.enabled = enabled
476
+
477
+ if enabled:
478
+ self.config.debug.log_performance_metrics = True
479
+ self.config.debug.log_level = "DEBUG"
480
+ else:
481
+ self.config.debug.log_level = "INFO"
482
+
483
+ # Update logger
484
+ self.logger = self._setup_logger()
485
+
486
+ if save:
487
+ self.save_configuration()
488
+
489
+ self._notify_config_change()
490
+
491
+ status = "enabled" if enabled else "disabled"
492
+ self.logger.info(f"Debug mode {status}")
493
+
494
+ def register_config_change_callback(self, callback: Callable[[StatisticsConfiguration], None]):
495
+ """Register a callback for configuration changes."""
496
+ self.config_change_callbacks.append(callback)
497
+
498
+ def _notify_config_change(self):
499
+ """Notify all callbacks of configuration change."""
500
+ for callback in self.config_change_callbacks:
501
+ try:
502
+ callback(self.config)
503
+ except Exception as e:
504
+ self.logger.error(f"Error in config change callback: {e}")
505
+
506
+ def record_metric(self, metric_name: str, value: float, unit: str,
507
+ context: Optional[Dict[str, Any]] = None):
508
+ """
509
+ Record a performance metric.
510
+
511
+ Args:
512
+ metric_name: Name of the metric
513
+ value: Metric value
514
+ unit: Unit of measurement
515
+ context: Optional context information
516
+ """
517
+ if not self.config.debug.log_performance_metrics:
518
+ return
519
+
520
+ metric = PerformanceMetric(
521
+ timestamp=time.time(),
522
+ metric_name=metric_name,
523
+ value=value,
524
+ unit=unit,
525
+ context=context or {}
526
+ )
527
+
528
+ with self.metrics_lock:
529
+ self.metrics_history.append(metric)
530
+
531
+ # Log if debug enabled
532
+ if self.config.debug.enabled:
533
+ self.logger.debug(f"Metric: {metric_name} = {value} {unit}")
534
+
535
+ # Check for performance warnings
536
+ self._check_performance_warning(metric)
537
+
538
+ # Save to file if configured
539
+ if self.config.debug.save_metrics_to_file:
540
+ self._save_metrics_to_file()
541
+
542
+ def _check_performance_warning(self, metric: PerformanceMetric):
543
+ """Check if metric indicates a performance issue."""
544
+ threshold = self.config.debug.performance_warning_threshold_ms
545
+
546
+ # Check calculation time metrics
547
+ if metric.metric_name in ['calculation_time', 'update_time'] and metric.unit == 'ms':
548
+ if metric.value > threshold:
549
+ self.performance_warnings_count += 1
550
+
551
+ diagnostic = DiagnosticInfo(
552
+ timestamp=time.time(),
553
+ issue_type='performance_warning',
554
+ severity='warning',
555
+ message=f"{metric.metric_name} exceeded threshold: {metric.value:.2f}ms > {threshold}ms",
556
+ details={
557
+ 'metric': metric.to_dict(),
558
+ 'threshold_ms': threshold
559
+ },
560
+ suggestions=[
561
+ "Consider increasing debounce delays",
562
+ "Enable progressive calculation for large content",
563
+ "Check if visibility awareness is enabled",
564
+ "Review cache settings"
565
+ ]
566
+ )
567
+
568
+ self.add_diagnostic(diagnostic)
569
+
570
+ def add_diagnostic(self, diagnostic: DiagnosticInfo):
571
+ """
572
+ Add diagnostic information.
573
+
574
+ Args:
575
+ diagnostic: Diagnostic information to add
576
+ """
577
+ with self.diagnostics_lock:
578
+ self.diagnostics.append(diagnostic)
579
+
580
+ # Log based on severity
581
+ log_method = getattr(self.logger, diagnostic.severity, self.logger.info)
582
+ log_method(f"Diagnostic: {diagnostic.message}")
583
+
584
+ def get_diagnostics(self, severity: Optional[str] = None,
585
+ issue_type: Optional[str] = None,
586
+ limit: int = 10) -> List[DiagnosticInfo]:
587
+ """
588
+ Get diagnostic information.
589
+
590
+ Args:
591
+ severity: Filter by severity (optional)
592
+ issue_type: Filter by issue type (optional)
593
+ limit: Maximum number of diagnostics to return
594
+
595
+ Returns:
596
+ List of diagnostic information
597
+ """
598
+ with self.diagnostics_lock:
599
+ diagnostics = list(self.diagnostics)
600
+
601
+ # Filter by severity
602
+ if severity:
603
+ diagnostics = [d for d in diagnostics if d.severity == severity]
604
+
605
+ # Filter by issue type
606
+ if issue_type:
607
+ diagnostics = [d for d in diagnostics if d.issue_type == issue_type]
608
+
609
+ # Sort by timestamp (most recent first)
610
+ diagnostics.sort(key=lambda d: d.timestamp, reverse=True)
611
+
612
+ return diagnostics[:limit]
613
+
614
+ def get_metrics(self, metric_name: Optional[str] = None,
615
+ time_range_seconds: Optional[float] = None,
616
+ limit: int = 100) -> List[PerformanceMetric]:
617
+ """
618
+ Get performance metrics.
619
+
620
+ Args:
621
+ metric_name: Filter by metric name (optional)
622
+ time_range_seconds: Only return metrics from last N seconds (optional)
623
+ limit: Maximum number of metrics to return
624
+
625
+ Returns:
626
+ List of performance metrics
627
+ """
628
+ with self.metrics_lock:
629
+ metrics = list(self.metrics_history)
630
+
631
+ # Filter by metric name
632
+ if metric_name:
633
+ metrics = [m for m in metrics if m.metric_name == metric_name]
634
+
635
+ # Filter by time range
636
+ if time_range_seconds:
637
+ cutoff_time = time.time() - time_range_seconds
638
+ metrics = [m for m in metrics if m.timestamp >= cutoff_time]
639
+
640
+ # Sort by timestamp (most recent first)
641
+ metrics.sort(key=lambda m: m.timestamp, reverse=True)
642
+
643
+ return metrics[:limit]
644
+
645
+ def get_metric_statistics(self, metric_name: str,
646
+ time_range_seconds: Optional[float] = None) -> Dict[str, float]:
647
+ """
648
+ Get statistics for a specific metric.
649
+
650
+ Args:
651
+ metric_name: Name of the metric
652
+ time_range_seconds: Time range to analyze (optional)
653
+
654
+ Returns:
655
+ Dictionary with min, max, avg, median values
656
+ """
657
+ metrics = self.get_metrics(metric_name, time_range_seconds, limit=1000)
658
+
659
+ if not metrics:
660
+ return {
661
+ 'count': 0,
662
+ 'min': 0.0,
663
+ 'max': 0.0,
664
+ 'avg': 0.0,
665
+ 'median': 0.0
666
+ }
667
+
668
+ values = [m.value for m in metrics]
669
+ values.sort()
670
+
671
+ return {
672
+ 'count': len(values),
673
+ 'min': min(values),
674
+ 'max': max(values),
675
+ 'avg': sum(values) / len(values),
676
+ 'median': values[len(values) // 2]
677
+ }
678
+
679
+ def _save_metrics_to_file(self):
680
+ """Save metrics to file."""
681
+ try:
682
+ metrics_file = Path(self.config.debug.metrics_file_path)
683
+
684
+ with self.metrics_lock:
685
+ metrics_data = [m.to_dict() for m in self.metrics_history]
686
+
687
+ with open(metrics_file, 'w', encoding='utf-8') as f:
688
+ json.dump(metrics_data, f, indent=2)
689
+
690
+ except Exception as e:
691
+ self.logger.error(f"Failed to save metrics to file: {e}")
692
+
693
+ def generate_performance_report(self) -> Dict[str, Any]:
694
+ """
695
+ Generate a comprehensive performance report.
696
+
697
+ Returns:
698
+ Dictionary with performance analysis
699
+ """
700
+ report = {
701
+ 'timestamp': time.time(),
702
+ 'configuration': self.config.to_dict(),
703
+ 'statistics_enabled': self.config.statistics_enabled,
704
+ 'performance_level': self.config.performance_level,
705
+ 'metrics_summary': {},
706
+ 'diagnostics_summary': {},
707
+ 'recommendations': []
708
+ }
709
+
710
+ # Metrics summary
711
+ metric_names = set(m.metric_name for m in self.metrics_history)
712
+ for metric_name in metric_names:
713
+ stats = self.get_metric_statistics(metric_name, time_range_seconds=300)
714
+ report['metrics_summary'][metric_name] = stats
715
+
716
+ # Diagnostics summary
717
+ with self.diagnostics_lock:
718
+ diagnostics = list(self.diagnostics)
719
+
720
+ severity_counts = {}
721
+ for diagnostic in diagnostics:
722
+ severity_counts[diagnostic.severity] = severity_counts.get(diagnostic.severity, 0) + 1
723
+
724
+ report['diagnostics_summary'] = {
725
+ 'total_count': len(diagnostics),
726
+ 'by_severity': severity_counts,
727
+ 'performance_warnings': self.performance_warnings_count
728
+ }
729
+
730
+ # Generate recommendations
731
+ report['recommendations'] = self._generate_recommendations(report)
732
+
733
+ return report
734
+
735
+ def _generate_recommendations(self, report: Dict[str, Any]) -> List[str]:
736
+ """Generate performance recommendations based on report."""
737
+ recommendations = []
738
+
739
+ # Check if statistics are disabled
740
+ if not self.config.statistics_enabled:
741
+ recommendations.append("Statistics are currently disabled. Enable them to see performance metrics.")
742
+ return recommendations
743
+
744
+ # Check calculation times
745
+ calc_time_stats = report['metrics_summary'].get('calculation_time', {})
746
+ if calc_time_stats.get('avg', 0) > 200:
747
+ recommendations.append(
748
+ f"Average calculation time is high ({calc_time_stats['avg']:.2f}ms). "
749
+ "Consider enabling progressive calculation or increasing debounce delays."
750
+ )
751
+
752
+ # Check performance warnings
753
+ if self.performance_warnings_count > 10:
754
+ recommendations.append(
755
+ f"Multiple performance warnings detected ({self.performance_warnings_count}). "
756
+ "Consider switching to 'maximum' performance level."
757
+ )
758
+
759
+ # Check cache effectiveness
760
+ if not self.config.cache.enabled:
761
+ recommendations.append("Cache is disabled. Enabling cache can significantly improve performance.")
762
+
763
+ # Check visibility awareness
764
+ if not self.config.visibility.enabled:
765
+ recommendations.append(
766
+ "Visibility awareness is disabled. Enabling it can reduce unnecessary calculations."
767
+ )
768
+
769
+ # Check progressive calculation
770
+ if not self.config.progressive.enabled:
771
+ recommendations.append(
772
+ "Progressive calculation is disabled. Enable it for better handling of large content."
773
+ )
774
+
775
+ return recommendations
776
+
777
+ def export_diagnostics(self, file_path: str) -> bool:
778
+ """
779
+ Export diagnostics to file.
780
+
781
+ Args:
782
+ file_path: Path to export file
783
+
784
+ Returns:
785
+ True if exported successfully
786
+ """
787
+ try:
788
+ with self.diagnostics_lock:
789
+ diagnostics_data = [d.to_dict() for d in self.diagnostics]
790
+
791
+ export_path = Path(file_path)
792
+ export_path.parent.mkdir(parents=True, exist_ok=True)
793
+
794
+ with open(export_path, 'w', encoding='utf-8') as f:
795
+ json.dump(diagnostics_data, f, indent=2)
796
+
797
+ self.logger.info(f"Diagnostics exported to {file_path}")
798
+ return True
799
+
800
+ except Exception as e:
801
+ self.logger.error(f"Failed to export diagnostics: {e}")
802
+ return False
803
+
804
+ def clear_diagnostics(self):
805
+ """Clear all diagnostic information."""
806
+ with self.diagnostics_lock:
807
+ self.diagnostics.clear()
808
+
809
+ self.performance_warnings_count = 0
810
+ self.logger.info("Diagnostics cleared")
811
+
812
+ def clear_metrics(self):
813
+ """Clear all performance metrics."""
814
+ with self.metrics_lock:
815
+ self.metrics_history.clear()
816
+
817
+ self.logger.info("Metrics cleared")
818
+
819
+ def reset_to_defaults(self, save: bool = True):
820
+ """
821
+ Reset configuration to defaults.
822
+
823
+ Args:
824
+ save: Whether to save to file
825
+ """
826
+ self.config = StatisticsConfiguration()
827
+
828
+ if save:
829
+ self.save_configuration()
830
+
831
+ self._notify_config_change()
832
+
833
+ self.logger.info("Configuration reset to defaults")
834
+
835
+
836
+ # Global instance
837
+ _global_config_manager: Optional[StatsConfigManager] = None
838
+
839
+
840
+ def get_config_manager() -> StatsConfigManager:
841
+ """Get the global configuration manager instance."""
842
+ global _global_config_manager
843
+ if _global_config_manager is None:
844
+ _global_config_manager = StatsConfigManager()
845
+ return _global_config_manager
846
+
847
+
848
+ def create_config_manager(config_file: Optional[str] = None) -> StatsConfigManager:
849
+ """
850
+ Create a new configuration manager instance.
851
+
852
+ Args:
853
+ config_file: Optional path to configuration file
854
+
855
+ Returns:
856
+ New StatsConfigManager instance
857
+ """
858
+ return StatsConfigManager(config_file)