crackerjack 0.32.0__py3-none-any.whl → 0.33.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (200) hide show
  1. crackerjack/__main__.py +1350 -34
  2. crackerjack/adapters/__init__.py +17 -0
  3. crackerjack/adapters/lsp_client.py +358 -0
  4. crackerjack/adapters/rust_tool_adapter.py +194 -0
  5. crackerjack/adapters/rust_tool_manager.py +193 -0
  6. crackerjack/adapters/skylos_adapter.py +231 -0
  7. crackerjack/adapters/zuban_adapter.py +560 -0
  8. crackerjack/agents/base.py +7 -3
  9. crackerjack/agents/coordinator.py +271 -33
  10. crackerjack/agents/documentation_agent.py +9 -15
  11. crackerjack/agents/dry_agent.py +3 -15
  12. crackerjack/agents/formatting_agent.py +1 -1
  13. crackerjack/agents/import_optimization_agent.py +36 -180
  14. crackerjack/agents/performance_agent.py +17 -98
  15. crackerjack/agents/performance_helpers.py +7 -31
  16. crackerjack/agents/proactive_agent.py +1 -3
  17. crackerjack/agents/refactoring_agent.py +16 -85
  18. crackerjack/agents/refactoring_helpers.py +7 -42
  19. crackerjack/agents/security_agent.py +9 -48
  20. crackerjack/agents/test_creation_agent.py +356 -513
  21. crackerjack/agents/test_specialist_agent.py +0 -4
  22. crackerjack/api.py +6 -25
  23. crackerjack/cli/cache_handlers.py +204 -0
  24. crackerjack/cli/cache_handlers_enhanced.py +683 -0
  25. crackerjack/cli/facade.py +100 -0
  26. crackerjack/cli/handlers.py +224 -9
  27. crackerjack/cli/interactive.py +6 -4
  28. crackerjack/cli/options.py +642 -55
  29. crackerjack/cli/utils.py +2 -1
  30. crackerjack/code_cleaner.py +58 -117
  31. crackerjack/config/global_lock_config.py +8 -48
  32. crackerjack/config/hooks.py +53 -62
  33. crackerjack/core/async_workflow_orchestrator.py +24 -34
  34. crackerjack/core/autofix_coordinator.py +3 -17
  35. crackerjack/core/enhanced_container.py +64 -6
  36. crackerjack/core/file_lifecycle.py +12 -89
  37. crackerjack/core/performance.py +2 -2
  38. crackerjack/core/performance_monitor.py +15 -55
  39. crackerjack/core/phase_coordinator.py +257 -218
  40. crackerjack/core/resource_manager.py +14 -90
  41. crackerjack/core/service_watchdog.py +62 -95
  42. crackerjack/core/session_coordinator.py +149 -0
  43. crackerjack/core/timeout_manager.py +14 -72
  44. crackerjack/core/websocket_lifecycle.py +13 -78
  45. crackerjack/core/workflow_orchestrator.py +558 -240
  46. crackerjack/docs/INDEX.md +11 -0
  47. crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
  48. crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
  49. crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
  50. crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
  51. crackerjack/docs/generated/api/SERVICES.md +1252 -0
  52. crackerjack/documentation/__init__.py +31 -0
  53. crackerjack/documentation/ai_templates.py +756 -0
  54. crackerjack/documentation/dual_output_generator.py +765 -0
  55. crackerjack/documentation/mkdocs_integration.py +518 -0
  56. crackerjack/documentation/reference_generator.py +977 -0
  57. crackerjack/dynamic_config.py +55 -50
  58. crackerjack/executors/async_hook_executor.py +10 -15
  59. crackerjack/executors/cached_hook_executor.py +117 -43
  60. crackerjack/executors/hook_executor.py +8 -34
  61. crackerjack/executors/hook_lock_manager.py +26 -183
  62. crackerjack/executors/individual_hook_executor.py +13 -11
  63. crackerjack/executors/lsp_aware_hook_executor.py +270 -0
  64. crackerjack/executors/tool_proxy.py +417 -0
  65. crackerjack/hooks/lsp_hook.py +79 -0
  66. crackerjack/intelligence/adaptive_learning.py +25 -10
  67. crackerjack/intelligence/agent_orchestrator.py +2 -5
  68. crackerjack/intelligence/agent_registry.py +34 -24
  69. crackerjack/intelligence/agent_selector.py +5 -7
  70. crackerjack/interactive.py +17 -6
  71. crackerjack/managers/async_hook_manager.py +0 -1
  72. crackerjack/managers/hook_manager.py +79 -1
  73. crackerjack/managers/publish_manager.py +66 -13
  74. crackerjack/managers/test_command_builder.py +5 -17
  75. crackerjack/managers/test_executor.py +1 -3
  76. crackerjack/managers/test_manager.py +109 -7
  77. crackerjack/managers/test_manager_backup.py +10 -9
  78. crackerjack/mcp/cache.py +2 -2
  79. crackerjack/mcp/client_runner.py +1 -1
  80. crackerjack/mcp/context.py +191 -68
  81. crackerjack/mcp/dashboard.py +7 -5
  82. crackerjack/mcp/enhanced_progress_monitor.py +31 -28
  83. crackerjack/mcp/file_monitor.py +30 -23
  84. crackerjack/mcp/progress_components.py +31 -21
  85. crackerjack/mcp/progress_monitor.py +50 -53
  86. crackerjack/mcp/rate_limiter.py +6 -6
  87. crackerjack/mcp/server_core.py +161 -32
  88. crackerjack/mcp/service_watchdog.py +2 -1
  89. crackerjack/mcp/state.py +4 -7
  90. crackerjack/mcp/task_manager.py +11 -9
  91. crackerjack/mcp/tools/core_tools.py +174 -33
  92. crackerjack/mcp/tools/error_analyzer.py +3 -2
  93. crackerjack/mcp/tools/execution_tools.py +15 -12
  94. crackerjack/mcp/tools/execution_tools_backup.py +42 -30
  95. crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
  96. crackerjack/mcp/tools/intelligence_tools.py +5 -2
  97. crackerjack/mcp/tools/monitoring_tools.py +33 -70
  98. crackerjack/mcp/tools/proactive_tools.py +24 -11
  99. crackerjack/mcp/tools/progress_tools.py +5 -8
  100. crackerjack/mcp/tools/utility_tools.py +20 -14
  101. crackerjack/mcp/tools/workflow_executor.py +62 -40
  102. crackerjack/mcp/websocket/app.py +8 -0
  103. crackerjack/mcp/websocket/endpoints.py +352 -357
  104. crackerjack/mcp/websocket/jobs.py +40 -57
  105. crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
  106. crackerjack/mcp/websocket/server.py +7 -25
  107. crackerjack/mcp/websocket/websocket_handler.py +6 -17
  108. crackerjack/mixins/__init__.py +3 -0
  109. crackerjack/mixins/error_handling.py +145 -0
  110. crackerjack/models/config.py +21 -1
  111. crackerjack/models/config_adapter.py +49 -1
  112. crackerjack/models/protocols.py +176 -107
  113. crackerjack/models/resource_protocols.py +55 -210
  114. crackerjack/models/task.py +3 -0
  115. crackerjack/monitoring/ai_agent_watchdog.py +13 -13
  116. crackerjack/monitoring/metrics_collector.py +426 -0
  117. crackerjack/monitoring/regression_prevention.py +8 -8
  118. crackerjack/monitoring/websocket_server.py +643 -0
  119. crackerjack/orchestration/advanced_orchestrator.py +11 -6
  120. crackerjack/orchestration/coverage_improvement.py +3 -3
  121. crackerjack/orchestration/execution_strategies.py +26 -6
  122. crackerjack/orchestration/test_progress_streamer.py +8 -5
  123. crackerjack/plugins/base.py +2 -2
  124. crackerjack/plugins/hooks.py +7 -0
  125. crackerjack/plugins/managers.py +11 -8
  126. crackerjack/security/__init__.py +0 -1
  127. crackerjack/security/audit.py +90 -105
  128. crackerjack/services/anomaly_detector.py +392 -0
  129. crackerjack/services/api_extractor.py +615 -0
  130. crackerjack/services/backup_service.py +2 -2
  131. crackerjack/services/bounded_status_operations.py +15 -152
  132. crackerjack/services/cache.py +127 -1
  133. crackerjack/services/changelog_automation.py +395 -0
  134. crackerjack/services/config.py +18 -11
  135. crackerjack/services/config_merge.py +30 -85
  136. crackerjack/services/config_template.py +506 -0
  137. crackerjack/services/contextual_ai_assistant.py +48 -22
  138. crackerjack/services/coverage_badge_service.py +171 -0
  139. crackerjack/services/coverage_ratchet.py +41 -17
  140. crackerjack/services/debug.py +3 -3
  141. crackerjack/services/dependency_analyzer.py +460 -0
  142. crackerjack/services/dependency_monitor.py +14 -11
  143. crackerjack/services/documentation_generator.py +491 -0
  144. crackerjack/services/documentation_service.py +675 -0
  145. crackerjack/services/enhanced_filesystem.py +6 -5
  146. crackerjack/services/enterprise_optimizer.py +865 -0
  147. crackerjack/services/error_pattern_analyzer.py +676 -0
  148. crackerjack/services/file_hasher.py +1 -1
  149. crackerjack/services/git.py +41 -45
  150. crackerjack/services/health_metrics.py +10 -8
  151. crackerjack/services/heatmap_generator.py +735 -0
  152. crackerjack/services/initialization.py +30 -33
  153. crackerjack/services/input_validator.py +5 -97
  154. crackerjack/services/intelligent_commit.py +327 -0
  155. crackerjack/services/log_manager.py +15 -12
  156. crackerjack/services/logging.py +4 -3
  157. crackerjack/services/lsp_client.py +628 -0
  158. crackerjack/services/memory_optimizer.py +409 -0
  159. crackerjack/services/metrics.py +42 -33
  160. crackerjack/services/parallel_executor.py +416 -0
  161. crackerjack/services/pattern_cache.py +1 -1
  162. crackerjack/services/pattern_detector.py +6 -6
  163. crackerjack/services/performance_benchmarks.py +250 -576
  164. crackerjack/services/performance_cache.py +382 -0
  165. crackerjack/services/performance_monitor.py +565 -0
  166. crackerjack/services/predictive_analytics.py +510 -0
  167. crackerjack/services/quality_baseline.py +234 -0
  168. crackerjack/services/quality_baseline_enhanced.py +646 -0
  169. crackerjack/services/quality_intelligence.py +785 -0
  170. crackerjack/services/regex_patterns.py +605 -524
  171. crackerjack/services/regex_utils.py +43 -123
  172. crackerjack/services/secure_path_utils.py +5 -164
  173. crackerjack/services/secure_status_formatter.py +30 -141
  174. crackerjack/services/secure_subprocess.py +11 -92
  175. crackerjack/services/security.py +61 -30
  176. crackerjack/services/security_logger.py +18 -22
  177. crackerjack/services/server_manager.py +124 -16
  178. crackerjack/services/status_authentication.py +16 -159
  179. crackerjack/services/status_security_manager.py +4 -131
  180. crackerjack/services/terminal_utils.py +0 -0
  181. crackerjack/services/thread_safe_status_collector.py +19 -125
  182. crackerjack/services/unified_config.py +21 -13
  183. crackerjack/services/validation_rate_limiter.py +5 -54
  184. crackerjack/services/version_analyzer.py +459 -0
  185. crackerjack/services/version_checker.py +1 -1
  186. crackerjack/services/websocket_resource_limiter.py +10 -144
  187. crackerjack/services/zuban_lsp_service.py +390 -0
  188. crackerjack/slash_commands/__init__.py +2 -7
  189. crackerjack/slash_commands/run.md +2 -2
  190. crackerjack/tools/validate_input_validator_patterns.py +14 -40
  191. crackerjack/tools/validate_regex_patterns.py +19 -48
  192. {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/METADATA +197 -26
  193. crackerjack-0.33.1.dist-info/RECORD +229 -0
  194. crackerjack/CLAUDE.md +0 -207
  195. crackerjack/RULES.md +0 -380
  196. crackerjack/py313.py +0 -234
  197. crackerjack-0.32.0.dist-info/RECORD +0 -180
  198. {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/WHEEL +0 -0
  199. {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/entry_points.txt +0 -0
  200. {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,409 @@
1
+ import gc
2
+ import sys
3
+ import time
4
+ import typing as t
5
+ from collections.abc import Callable
6
+ from dataclasses import dataclass
7
+ from functools import wraps
8
+ from threading import Lock
9
+ from typing import Any
10
+ from weakref import WeakSet
11
+
12
+ from crackerjack.services.logging import get_logger
13
+
14
+
15
+ @dataclass
16
+ class MemoryStats:
17
+ total_allocated_mb: float
18
+ peak_usage_mb: float
19
+ current_usage_mb: float
20
+ gc_collections: int
21
+ lazy_objects_created: int
22
+ lazy_objects_loaded: int
23
+ resource_pools_active: int
24
+
25
+
26
+ class LazyLoader:
27
+ def __init__(
28
+ self,
29
+ factory: Callable[[], Any],
30
+ name: str = "unnamed",
31
+ auto_dispose: bool = True,
32
+ ):
33
+ self._factory = factory
34
+ self._name = name
35
+ self._auto_dispose = auto_dispose
36
+ self._value: Any | None = None
37
+ self._loaded = False
38
+ self._lock = Lock()
39
+ self._access_count = 0
40
+ self._last_access = time.time()
41
+ self._logger = get_logger(f"crackerjack.lazy_loader.{name}")
42
+
43
+ MemoryOptimizer.get_instance().register_lazy_object(self)
44
+
45
+ @property
46
+ def is_loaded(self) -> bool:
47
+ with self._lock:
48
+ return self._loaded
49
+
50
+ @property
51
+ def access_count(self) -> int:
52
+ return self._access_count
53
+
54
+ def get(self) -> Any:
55
+ with self._lock:
56
+ if not self._loaded:
57
+ self._logger.debug(f"Lazy loading resource: {self._name}")
58
+ start_time = time.time()
59
+
60
+ try:
61
+ self._value = self._factory()
62
+ self._loaded = True
63
+ load_time = time.time() - start_time
64
+ self._logger.debug(f"Loaded {self._name} in {load_time: .3f}s")
65
+
66
+ MemoryOptimizer.get_instance().notify_lazy_load(self._name)
67
+
68
+ except Exception as e:
69
+ self._logger.error(f"Failed to load {self._name}: {e}")
70
+ raise
71
+
72
+ self._access_count += 1
73
+ self._last_access = time.time()
74
+
75
+ if self._value is None:
76
+ raise RuntimeError(f"Lazy loader {self._name} has no value")
77
+
78
+ return self._value
79
+
80
+ def dispose(self) -> None:
81
+ with self._lock:
82
+ if self._loaded and self._value is not None:
83
+ self._logger.debug(f"Disposing lazy resource: {self._name}")
84
+
85
+ if hasattr(self._value, "close"):
86
+ try:
87
+ self._value.close()
88
+ except Exception as e:
89
+ self._logger.warning(f"Error closing {self._name}: {e}")
90
+
91
+ self._value = None
92
+ self._loaded = False
93
+
94
+ gc.collect()
95
+
96
+ def __del__(self) -> None:
97
+ if self._auto_dispose:
98
+ self.dispose()
99
+
100
+
101
+ class ResourcePool:
102
+ def __init__(
103
+ self,
104
+ factory: Callable[[], Any],
105
+ max_size: int = 5,
106
+ name: str = "unnamed",
107
+ ):
108
+ self._factory = factory
109
+ self._max_size = max_size
110
+ self._name = name
111
+ self._pool: list[Any] = []
112
+ self._in_use: WeakSet[t.Any] = WeakSet()
113
+ self._lock = Lock()
114
+ self._created_count = 0
115
+ self._reused_count = 0
116
+ self._logger = get_logger(f"crackerjack.resource_pool.{name}")
117
+
118
+ def acquire(self) -> Any:
119
+ with self._lock:
120
+ if self._pool:
121
+ resource = self._pool.pop()
122
+ self._in_use.add(resource)
123
+ self._reused_count += 1
124
+ self._logger.debug(f"Reused resource from {self._name} pool")
125
+ return resource
126
+ else:
127
+ resource = self._factory()
128
+ self._in_use.add(resource)
129
+ self._created_count += 1
130
+ self._logger.debug(f"Created new resource for {self._name} pool")
131
+ return resource
132
+
133
+ def release(self, resource: Any) -> None:
134
+ with self._lock:
135
+ if resource in self._in_use:
136
+ self._in_use.discard(resource)
137
+
138
+ if len(self._pool) < self._max_size:
139
+ self._pool.append(resource)
140
+ self._logger.debug(f"Returned resource to {self._name} pool")
141
+ else:
142
+ if hasattr(resource, "close"):
143
+ try:
144
+ resource.close()
145
+ except Exception as e:
146
+ self._logger.warning(f"Error closing resource: {e}")
147
+
148
+ self._logger.debug(
149
+ f"Pool full, disposed resource from {self._name}"
150
+ )
151
+
152
+ def clear(self) -> None:
153
+ with self._lock:
154
+ for resource in self._pool:
155
+ if hasattr(resource, "close"):
156
+ try:
157
+ resource.close()
158
+ except Exception as e:
159
+ self._logger.warning(f"Error closing pooled resource: {e}")
160
+
161
+ self._pool.clear()
162
+ self._logger.info(f"Cleared {self._name} resource pool")
163
+
164
+ def get_stats(self) -> dict[str, Any]:
165
+ with self._lock:
166
+ return {
167
+ "pool_size": len(self._pool),
168
+ "in_use": len(self._in_use),
169
+ "created_total": self._created_count,
170
+ "reused_total": self._reused_count,
171
+ "efficiency": (
172
+ self._reused_count / (self._created_count + self._reused_count)
173
+ if self._created_count + self._reused_count > 0
174
+ else 0.0
175
+ ),
176
+ }
177
+
178
+
179
+ class MemoryProfiler:
180
+ def __init__(self) -> None:
181
+ self._start_memory = 0.0
182
+ self._peak_memory = 0.0
183
+ self._measurements: list[tuple[float, float]] = []
184
+ self._logger = get_logger("crackerjack.memory_profiler")
185
+
186
+ def start_profiling(self) -> None:
187
+ self._start_memory = self._get_memory_usage()
188
+ self._peak_memory = self._start_memory
189
+ self._measurements.clear()
190
+ self._logger.debug(f"Started memory profiling at {self._start_memory: .2f} MB")
191
+
192
+ def record_checkpoint(self, name: str = "") -> float:
193
+ current_memory = self._get_memory_usage()
194
+ self._peak_memory = max(self._peak_memory, current_memory)
195
+
196
+ timestamp = time.time()
197
+ self._measurements.append((timestamp, current_memory))
198
+
199
+ if name:
200
+ self._logger.debug(f"Memory checkpoint '{name}': {current_memory: .2f} MB")
201
+
202
+ return current_memory
203
+
204
+ def get_summary(self) -> dict[str, Any]:
205
+ if not self._measurements:
206
+ return {}
207
+
208
+ current_memory = self._get_memory_usage()
209
+ memory_delta = current_memory - self._start_memory
210
+
211
+ return {
212
+ "start_memory_mb": self._start_memory,
213
+ "current_memory_mb": current_memory,
214
+ "peak_memory_mb": self._peak_memory,
215
+ "memory_delta_mb": memory_delta,
216
+ "checkpoints": len(self._measurements),
217
+ }
218
+
219
+ def _get_memory_usage(self) -> float:
220
+ try:
221
+ import os
222
+
223
+ import psutil
224
+
225
+ process = psutil.Process(os.getpid())
226
+ memory_mb: float = process.memory_info().rss / 1024 / 1024
227
+ return memory_mb
228
+ except ImportError:
229
+ import tracemalloc
230
+
231
+ if tracemalloc.is_tracing():
232
+ current, _peak = tracemalloc.get_traced_memory()
233
+ return current / 1024 / 1024
234
+ else:
235
+ return sys.getsizeof(gc.get_objects()) / 1024 / 1024
236
+
237
+
238
+ class MemoryOptimizer:
239
+ _instance: t.Optional["MemoryOptimizer"] = None
240
+ _lock = Lock()
241
+
242
+ def __init__(self) -> None:
243
+ self._lazy_objects: WeakSet[t.Any] = WeakSet()
244
+ self._resource_pools: dict[str, ResourcePool] = {}
245
+ self._profiler = MemoryProfiler()
246
+ self._stats_lock = Lock()
247
+ self._lazy_created_count = 0
248
+ self._lazy_loaded_count = 0
249
+ self._gc_threshold = 100
250
+ self._auto_gc = True
251
+ self._logger = get_logger("crackerjack.memory_optimizer")
252
+
253
+ @classmethod
254
+ def get_instance(cls) -> "MemoryOptimizer":
255
+ with cls._lock:
256
+ if cls._instance is None:
257
+ cls._instance = cls()
258
+ return cls._instance
259
+
260
+ def register_lazy_object(self, lazy_obj: LazyLoader) -> None:
261
+ self._lazy_objects.add(lazy_obj)
262
+ with self._stats_lock:
263
+ self._lazy_created_count += 1
264
+
265
+ def notify_lazy_load(self, name: str) -> None:
266
+ with self._stats_lock:
267
+ self._lazy_loaded_count += 1
268
+
269
+ if self._auto_gc and self._should_run_gc():
270
+ self._run_memory_cleanup()
271
+
272
+ def register_resource_pool(self, name: str, pool: ResourcePool) -> None:
273
+ self._resource_pools[name] = pool
274
+ self._logger.debug(f"Registered resource pool: {name}")
275
+
276
+ def get_resource_pool(self, name: str) -> ResourcePool | None:
277
+ return self._resource_pools.get(name)
278
+
279
+ def start_profiling(self) -> None:
280
+ self._profiler.start_profiling()
281
+
282
+ def record_checkpoint(self, name: str = "") -> float:
283
+ return self._profiler.record_checkpoint(name)
284
+
285
+ def get_memory_stats(self) -> MemoryStats:
286
+ profiler_stats = self._profiler.get_summary()
287
+
288
+ with self._stats_lock:
289
+ return MemoryStats(
290
+ total_allocated_mb=profiler_stats.get("peak_memory_mb", 0.0),
291
+ peak_usage_mb=profiler_stats.get("peak_memory_mb", 0.0),
292
+ current_usage_mb=profiler_stats.get("current_memory_mb", 0.0),
293
+ gc_collections=len(gc.get_stats()) if hasattr(gc, "get_stats") else 0,
294
+ lazy_objects_created=self._lazy_created_count,
295
+ lazy_objects_loaded=self._lazy_loaded_count,
296
+ resource_pools_active=len(self._resource_pools),
297
+ )
298
+
299
+ def optimize_memory(self) -> None:
300
+ self._logger.info("Running memory optimization")
301
+
302
+ self._cleanup_lazy_objects()
303
+
304
+ self._cleanup_resource_pools()
305
+
306
+ collected = gc.collect()
307
+ self._logger.debug(f"Garbage collection freed {collected} objects")
308
+
309
+ def _should_run_gc(self) -> bool:
310
+ current_memory = self._profiler.get_summary().get("current_memory_mb", 0)
311
+ should_gc: bool = current_memory > self._gc_threshold
312
+ return should_gc
313
+
314
+ def _run_memory_cleanup(self) -> None:
315
+ self._logger.debug("Running automatic memory cleanup")
316
+
317
+ before_gc = self._profiler._get_memory_usage()
318
+ collected = gc.collect()
319
+ after_gc = self._profiler._get_memory_usage()
320
+
321
+ memory_freed = before_gc - after_gc
322
+
323
+ if memory_freed > 1.0:
324
+ self._logger.info(
325
+ f"Memory cleanup freed {memory_freed: .2f} MB ({collected} objects)"
326
+ )
327
+
328
+ def _cleanup_lazy_objects(self) -> None:
329
+ disposed_count = 0
330
+
331
+ lazy_objects = list[t.Any](self._lazy_objects)
332
+
333
+ for lazy_obj in lazy_objects:
334
+ if (
335
+ hasattr(lazy_obj, "_last_access")
336
+ and lazy_obj._last_access < time.time() - 300
337
+ and lazy_obj.is_loaded
338
+ ):
339
+ lazy_obj.dispose()
340
+ disposed_count += 1
341
+
342
+ if disposed_count > 0:
343
+ self._logger.debug(f"Disposed {disposed_count} unused lazy objects")
344
+
345
+ def _cleanup_resource_pools(self) -> None:
346
+ for name, pool in self._resource_pools.items():
347
+ stats = pool.get_stats()
348
+
349
+ if stats["efficiency"] < 0.1 and stats["created_total"] > 10:
350
+ pool.clear()
351
+ self._logger.debug(f"Cleared inefficient resource pool: {name}")
352
+
353
+
354
+ def lazy_property(factory: t.Callable[[], t.Any]) -> property:
355
+ def decorator(self: t.Any) -> Any:
356
+ attr_name = f"_lazy_{factory.__name__}"
357
+
358
+ if not hasattr(self, attr_name):
359
+ loader = LazyLoader(factory, factory.__name__)
360
+ setattr(self, attr_name, loader)
361
+
362
+ return getattr(self, attr_name).get()
363
+
364
+ return property(decorator)
365
+
366
+
367
+ def memory_optimized(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
368
+ @wraps(func)
369
+ def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any:
370
+ optimizer = MemoryOptimizer.get_instance()
371
+
372
+ before_memory = optimizer.record_checkpoint(f"{func.__name__}_start")
373
+
374
+ try:
375
+ result = func(*args, **kwargs)
376
+
377
+ after_memory = optimizer.record_checkpoint(f"{func.__name__}_end")
378
+
379
+ memory_delta = after_memory - before_memory
380
+ if memory_delta > 10.0:
381
+ optimizer._logger.warning(
382
+ f"Function {func.__name__} increased memory by {memory_delta: .2f} MB"
383
+ )
384
+
385
+ return result
386
+
387
+ finally:
388
+ if optimizer._should_run_gc():
389
+ optimizer._run_memory_cleanup()
390
+
391
+ return wrapper
392
+
393
+
394
+ def get_memory_optimizer() -> MemoryOptimizer:
395
+ return MemoryOptimizer.get_instance()
396
+
397
+
398
+ def create_lazy_service(factory: Callable[[], Any], name: str) -> LazyLoader:
399
+ return LazyLoader(factory, name)
400
+
401
+
402
+ def create_resource_pool(
403
+ factory: Callable[[], Any],
404
+ max_size: int = 5,
405
+ name: str = "unnamed",
406
+ ) -> ResourcePool:
407
+ pool = ResourcePool(factory, max_size, name)
408
+ MemoryOptimizer.get_instance().register_resource_pool(name, pool)
409
+ return pool
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import sqlite3
3
3
  import threading
4
+ import typing as t
4
5
  from contextlib import contextmanager
5
6
  from datetime import date, datetime
6
7
  from pathlib import Path
@@ -21,45 +22,45 @@ class MetricsCollector:
21
22
  def _init_database(self) -> None:
22
23
  with self._get_connection() as conn:
23
24
  conn.executescript("""
24
- - - Jobs table
25
+ -- Jobs table
25
26
  CREATE TABLE IF NOT EXISTS jobs (
26
27
  id INTEGER PRIMARY KEY AUTOINCREMENT,
27
28
  job_id TEXT UNIQUE NOT NULL,
28
29
  start_time TIMESTAMP NOT NULL,
29
30
  end_time TIMESTAMP,
30
- status TEXT NOT NULL, - - 'running', 'success', 'failed', 'cancelled'
31
+ status TEXT NOT NULL, -- 'running', 'success', 'failed', 'cancelled'
31
32
  iterations INTEGER DEFAULT 0,
32
33
  ai_agent BOOLEAN DEFAULT 0,
33
34
  error_message TEXT,
34
- metadata TEXT - - JSON field for additional data
35
+ metadata TEXT -- JSON field for additional data
35
36
  );
36
37
 
37
- - - Errors table
38
+ -- Errors table
38
39
  CREATE TABLE IF NOT EXISTS errors (
39
40
  id INTEGER PRIMARY KEY AUTOINCREMENT,
40
41
  job_id TEXT,
41
42
  timestamp TIMESTAMP NOT NULL,
42
- error_type TEXT NOT NULL, - - 'hook', 'test', 'lint', 'type_check', etc.
43
- error_category TEXT, - - 'ruff', 'pyright', 'pytest', etc.
43
+ error_type TEXT NOT NULL, -- 'hook', 'test', 'lint', 'type_check', etc.
44
+ error_category TEXT, -- 'ruff', 'pyright', 'pytest', etc.
44
45
  error_message TEXT,
45
46
  file_path TEXT,
46
47
  line_number INTEGER,
47
48
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
48
49
  );
49
50
 
50
- - - Hook executions table
51
+ -- Hook executions table
51
52
  CREATE TABLE IF NOT EXISTS hook_executions (
52
53
  id INTEGER PRIMARY KEY AUTOINCREMENT,
53
54
  job_id TEXT,
54
55
  timestamp TIMESTAMP NOT NULL,
55
56
  hook_name TEXT NOT NULL,
56
- hook_type TEXT, - - 'fast', 'comprehensive'
57
+ hook_type TEXT, -- 'fast', 'comprehensive'
57
58
  execution_time_ms INTEGER,
58
- status TEXT, - - 'success', 'failed', 'skipped'
59
+ status TEXT, -- 'success', 'failed', 'skipped'
59
60
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
60
61
  );
61
62
 
62
- - - Test executions table
63
+ -- Test executions table
63
64
  CREATE TABLE IF NOT EXISTS test_executions (
64
65
  id INTEGER PRIMARY KEY AUTOINCREMENT,
65
66
  job_id TEXT,
@@ -73,17 +74,17 @@ class MetricsCollector:
73
74
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
74
75
  );
75
76
 
76
- - - Orchestration executions table (NEW)
77
+ -- Orchestration executions table (NEW)
77
78
  CREATE TABLE IF NOT EXISTS orchestration_executions (
78
79
  id INTEGER PRIMARY KEY AUTOINCREMENT,
79
80
  job_id TEXT,
80
81
  timestamp TIMESTAMP NOT NULL,
81
- execution_strategy TEXT NOT NULL, - - 'batch', 'individual', 'adaptive', 'selective'
82
- progress_level TEXT NOT NULL, - - 'basic', 'detailed', 'granular', 'streaming'
83
- ai_mode TEXT NOT NULL, - - 'single - agent', 'multi - agent', 'coordinator'
82
+ execution_strategy TEXT NOT NULL, -- 'batch', 'individual', 'adaptive', 'selective'
83
+ progress_level TEXT NOT NULL, -- 'basic', 'detailed', 'granular', 'streaming'
84
+ ai_mode TEXT NOT NULL, -- 'single-agent', 'multi-agent', 'coordinator'
84
85
  iteration_count INTEGER DEFAULT 1,
85
- strategy_switches INTEGER DEFAULT 0, - - How many times strategy changed
86
- correlation_insights TEXT, - - JSON of correlation analysis results
86
+ strategy_switches INTEGER DEFAULT 0, -- How many times strategy changed
87
+ correlation_insights TEXT, -- JSON of correlation analysis results
87
88
  total_execution_time_ms INTEGER,
88
89
  hooks_execution_time_ms INTEGER,
89
90
  tests_execution_time_ms INTEGER,
@@ -91,7 +92,7 @@ class MetricsCollector:
91
92
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
92
93
  );
93
94
 
94
- - - Strategy decisions table (NEW)
95
+ -- Strategy decisions table (NEW)
95
96
  CREATE TABLE IF NOT EXISTS strategy_decisions (
96
97
  id INTEGER PRIMARY KEY AUTOINCREMENT,
97
98
  job_id TEXT,
@@ -99,29 +100,29 @@ class MetricsCollector:
99
100
  timestamp TIMESTAMP NOT NULL,
100
101
  previous_strategy TEXT,
101
102
  selected_strategy TEXT NOT NULL,
102
- decision_reason TEXT, - - Why this strategy was chosen
103
- context_data TEXT, - - JSON of execution context
104
- effectiveness_score REAL, - - How well the strategy worked (0 - 1)
103
+ decision_reason TEXT, -- Why this strategy was chosen
104
+ context_data TEXT, -- JSON of execution context
105
+ effectiveness_score REAL, -- How well the strategy worked (0 - 1)
105
106
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
106
107
  );
107
108
 
108
- - - Individual test executions table (NEW - more granular than test_executions)
109
+ -- Individual test executions table (NEW - more granular than test_executions)
109
110
  CREATE TABLE IF NOT EXISTS individual_test_executions (
110
111
  id INTEGER PRIMARY KEY AUTOINCREMENT,
111
112
  job_id TEXT,
112
113
  timestamp TIMESTAMP NOT NULL,
113
- test_id TEXT NOT NULL, - - Full test identifier
114
+ test_id TEXT NOT NULL, -- Full test identifier
114
115
  test_file TEXT NOT NULL,
115
116
  test_class TEXT,
116
117
  test_method TEXT,
117
- status TEXT NOT NULL, - - 'passed', 'failed', 'skipped', 'error'
118
+ status TEXT NOT NULL, -- 'passed', 'failed', 'skipped', 'error'
118
119
  execution_time_ms INTEGER,
119
120
  error_message TEXT,
120
121
  error_traceback TEXT,
121
122
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
122
123
  );
123
124
 
124
- - - Daily summary table (for quick stats)
125
+ -- Daily summary table (for quick stats)
125
126
  CREATE TABLE IF NOT EXISTS daily_summary (
126
127
  date DATE PRIMARY KEY,
127
128
  total_jobs INTEGER DEFAULT 0,
@@ -134,9 +135,9 @@ class MetricsCollector:
134
135
  type_errors INTEGER DEFAULT 0,
135
136
  avg_job_duration_ms INTEGER,
136
137
  total_ai_fixes INTEGER DEFAULT 0,
137
- orchestrated_jobs INTEGER DEFAULT 0, - - NEW
138
- avg_orchestration_iterations REAL DEFAULT 0, - - NEW
139
- most_effective_strategy TEXT - - NEW
138
+ orchestrated_jobs INTEGER DEFAULT 0, -- NEW
139
+ avg_orchestration_iterations REAL DEFAULT 0, -- NEW
140
+ most_effective_strategy TEXT -- NEW
140
141
  );
141
142
 
142
143
  --Create indexes for performance
@@ -152,7 +153,7 @@ class MetricsCollector:
152
153
  """)
153
154
 
154
155
  @contextmanager
155
- def _get_connection(self):
156
+ def _get_connection(self) -> t.Iterator[sqlite3.Connection]:
156
157
  conn = sqlite3.connect(str(self.db_path))
157
158
  conn.row_factory = sqlite3.Row
158
159
  try:
@@ -442,10 +443,18 @@ class MetricsCollector:
442
443
  """).fetchall()
443
444
 
444
445
  return {
445
- "strategy_effectiveness": [dict(row) for row in strategy_stats],
446
- "correlation_patterns": [dict(row) for row in correlation_patterns],
447
- "performance_by_strategy": [dict(row) for row in performance_stats],
448
- "test_failure_patterns": [dict(row) for row in test_failure_patterns],
446
+ "strategy_effectiveness": [
447
+ dict[str, t.Any](row) for row in strategy_stats
448
+ ],
449
+ "correlation_patterns": [
450
+ dict[str, t.Any](row) for row in correlation_patterns
451
+ ],
452
+ "performance_by_strategy": [
453
+ dict[str, t.Any](row) for row in performance_stats
454
+ ],
455
+ "test_failure_patterns": [
456
+ dict[str, t.Any](row) for row in test_failure_patterns
457
+ ],
449
458
  }
450
459
 
451
460
  def _update_daily_summary(
@@ -564,7 +573,7 @@ class MetricsCollector:
564
573
  "error_breakdown": {
565
574
  row["error_type"]: row["count"] for row in error_stats
566
575
  },
567
- "common_errors": [dict(row) for row in common_errors],
576
+ "common_errors": [dict[str, t.Any](row) for row in common_errors],
568
577
  }
569
578
 
570
579