crackerjack 0.33.0__py3-none-any.whl → 0.33.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (198) hide show
  1. crackerjack/__main__.py +1350 -34
  2. crackerjack/adapters/__init__.py +17 -0
  3. crackerjack/adapters/lsp_client.py +358 -0
  4. crackerjack/adapters/rust_tool_adapter.py +194 -0
  5. crackerjack/adapters/rust_tool_manager.py +193 -0
  6. crackerjack/adapters/skylos_adapter.py +231 -0
  7. crackerjack/adapters/zuban_adapter.py +560 -0
  8. crackerjack/agents/base.py +7 -3
  9. crackerjack/agents/coordinator.py +271 -33
  10. crackerjack/agents/documentation_agent.py +9 -15
  11. crackerjack/agents/dry_agent.py +3 -15
  12. crackerjack/agents/formatting_agent.py +1 -1
  13. crackerjack/agents/import_optimization_agent.py +36 -180
  14. crackerjack/agents/performance_agent.py +17 -98
  15. crackerjack/agents/performance_helpers.py +7 -31
  16. crackerjack/agents/proactive_agent.py +1 -3
  17. crackerjack/agents/refactoring_agent.py +16 -85
  18. crackerjack/agents/refactoring_helpers.py +7 -42
  19. crackerjack/agents/security_agent.py +9 -48
  20. crackerjack/agents/test_creation_agent.py +356 -513
  21. crackerjack/agents/test_specialist_agent.py +0 -4
  22. crackerjack/api.py +6 -25
  23. crackerjack/cli/cache_handlers.py +204 -0
  24. crackerjack/cli/cache_handlers_enhanced.py +683 -0
  25. crackerjack/cli/facade.py +100 -0
  26. crackerjack/cli/handlers.py +224 -9
  27. crackerjack/cli/interactive.py +6 -4
  28. crackerjack/cli/options.py +642 -55
  29. crackerjack/cli/utils.py +2 -1
  30. crackerjack/code_cleaner.py +58 -117
  31. crackerjack/config/global_lock_config.py +8 -48
  32. crackerjack/config/hooks.py +53 -62
  33. crackerjack/core/async_workflow_orchestrator.py +24 -34
  34. crackerjack/core/autofix_coordinator.py +3 -17
  35. crackerjack/core/enhanced_container.py +4 -13
  36. crackerjack/core/file_lifecycle.py +12 -89
  37. crackerjack/core/performance.py +2 -2
  38. crackerjack/core/performance_monitor.py +15 -55
  39. crackerjack/core/phase_coordinator.py +104 -204
  40. crackerjack/core/resource_manager.py +14 -90
  41. crackerjack/core/service_watchdog.py +62 -95
  42. crackerjack/core/session_coordinator.py +149 -0
  43. crackerjack/core/timeout_manager.py +14 -72
  44. crackerjack/core/websocket_lifecycle.py +13 -78
  45. crackerjack/core/workflow_orchestrator.py +171 -174
  46. crackerjack/docs/INDEX.md +11 -0
  47. crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
  48. crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
  49. crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
  50. crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
  51. crackerjack/docs/generated/api/SERVICES.md +1252 -0
  52. crackerjack/documentation/__init__.py +31 -0
  53. crackerjack/documentation/ai_templates.py +756 -0
  54. crackerjack/documentation/dual_output_generator.py +765 -0
  55. crackerjack/documentation/mkdocs_integration.py +518 -0
  56. crackerjack/documentation/reference_generator.py +977 -0
  57. crackerjack/dynamic_config.py +55 -50
  58. crackerjack/executors/async_hook_executor.py +10 -15
  59. crackerjack/executors/cached_hook_executor.py +117 -43
  60. crackerjack/executors/hook_executor.py +8 -34
  61. crackerjack/executors/hook_lock_manager.py +26 -183
  62. crackerjack/executors/individual_hook_executor.py +13 -11
  63. crackerjack/executors/lsp_aware_hook_executor.py +270 -0
  64. crackerjack/executors/tool_proxy.py +417 -0
  65. crackerjack/hooks/lsp_hook.py +79 -0
  66. crackerjack/intelligence/adaptive_learning.py +25 -10
  67. crackerjack/intelligence/agent_orchestrator.py +2 -5
  68. crackerjack/intelligence/agent_registry.py +34 -24
  69. crackerjack/intelligence/agent_selector.py +5 -7
  70. crackerjack/interactive.py +17 -6
  71. crackerjack/managers/async_hook_manager.py +0 -1
  72. crackerjack/managers/hook_manager.py +79 -1
  73. crackerjack/managers/publish_manager.py +44 -8
  74. crackerjack/managers/test_command_builder.py +1 -15
  75. crackerjack/managers/test_executor.py +1 -3
  76. crackerjack/managers/test_manager.py +98 -7
  77. crackerjack/managers/test_manager_backup.py +10 -9
  78. crackerjack/mcp/cache.py +2 -2
  79. crackerjack/mcp/client_runner.py +1 -1
  80. crackerjack/mcp/context.py +191 -68
  81. crackerjack/mcp/dashboard.py +7 -5
  82. crackerjack/mcp/enhanced_progress_monitor.py +31 -28
  83. crackerjack/mcp/file_monitor.py +30 -23
  84. crackerjack/mcp/progress_components.py +31 -21
  85. crackerjack/mcp/progress_monitor.py +50 -53
  86. crackerjack/mcp/rate_limiter.py +6 -6
  87. crackerjack/mcp/server_core.py +17 -16
  88. crackerjack/mcp/service_watchdog.py +2 -1
  89. crackerjack/mcp/state.py +4 -7
  90. crackerjack/mcp/task_manager.py +11 -9
  91. crackerjack/mcp/tools/core_tools.py +173 -32
  92. crackerjack/mcp/tools/error_analyzer.py +3 -2
  93. crackerjack/mcp/tools/execution_tools.py +8 -10
  94. crackerjack/mcp/tools/execution_tools_backup.py +42 -30
  95. crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
  96. crackerjack/mcp/tools/intelligence_tools.py +5 -2
  97. crackerjack/mcp/tools/monitoring_tools.py +33 -70
  98. crackerjack/mcp/tools/proactive_tools.py +24 -11
  99. crackerjack/mcp/tools/progress_tools.py +5 -8
  100. crackerjack/mcp/tools/utility_tools.py +20 -14
  101. crackerjack/mcp/tools/workflow_executor.py +62 -40
  102. crackerjack/mcp/websocket/app.py +8 -0
  103. crackerjack/mcp/websocket/endpoints.py +352 -357
  104. crackerjack/mcp/websocket/jobs.py +40 -57
  105. crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
  106. crackerjack/mcp/websocket/server.py +7 -25
  107. crackerjack/mcp/websocket/websocket_handler.py +6 -17
  108. crackerjack/mixins/__init__.py +0 -2
  109. crackerjack/mixins/error_handling.py +1 -70
  110. crackerjack/models/config.py +12 -1
  111. crackerjack/models/config_adapter.py +49 -1
  112. crackerjack/models/protocols.py +122 -122
  113. crackerjack/models/resource_protocols.py +55 -210
  114. crackerjack/monitoring/ai_agent_watchdog.py +13 -13
  115. crackerjack/monitoring/metrics_collector.py +426 -0
  116. crackerjack/monitoring/regression_prevention.py +8 -8
  117. crackerjack/monitoring/websocket_server.py +643 -0
  118. crackerjack/orchestration/advanced_orchestrator.py +11 -6
  119. crackerjack/orchestration/coverage_improvement.py +3 -3
  120. crackerjack/orchestration/execution_strategies.py +26 -6
  121. crackerjack/orchestration/test_progress_streamer.py +8 -5
  122. crackerjack/plugins/base.py +2 -2
  123. crackerjack/plugins/hooks.py +7 -0
  124. crackerjack/plugins/managers.py +11 -8
  125. crackerjack/security/__init__.py +0 -1
  126. crackerjack/security/audit.py +6 -35
  127. crackerjack/services/anomaly_detector.py +392 -0
  128. crackerjack/services/api_extractor.py +615 -0
  129. crackerjack/services/backup_service.py +2 -2
  130. crackerjack/services/bounded_status_operations.py +15 -152
  131. crackerjack/services/cache.py +127 -1
  132. crackerjack/services/changelog_automation.py +395 -0
  133. crackerjack/services/config.py +15 -9
  134. crackerjack/services/config_merge.py +19 -80
  135. crackerjack/services/config_template.py +506 -0
  136. crackerjack/services/contextual_ai_assistant.py +48 -22
  137. crackerjack/services/coverage_badge_service.py +171 -0
  138. crackerjack/services/coverage_ratchet.py +27 -25
  139. crackerjack/services/debug.py +3 -3
  140. crackerjack/services/dependency_analyzer.py +460 -0
  141. crackerjack/services/dependency_monitor.py +14 -11
  142. crackerjack/services/documentation_generator.py +491 -0
  143. crackerjack/services/documentation_service.py +675 -0
  144. crackerjack/services/enhanced_filesystem.py +6 -5
  145. crackerjack/services/enterprise_optimizer.py +865 -0
  146. crackerjack/services/error_pattern_analyzer.py +676 -0
  147. crackerjack/services/file_hasher.py +1 -1
  148. crackerjack/services/git.py +8 -25
  149. crackerjack/services/health_metrics.py +10 -8
  150. crackerjack/services/heatmap_generator.py +735 -0
  151. crackerjack/services/initialization.py +11 -30
  152. crackerjack/services/input_validator.py +5 -97
  153. crackerjack/services/intelligent_commit.py +327 -0
  154. crackerjack/services/log_manager.py +15 -12
  155. crackerjack/services/logging.py +4 -3
  156. crackerjack/services/lsp_client.py +628 -0
  157. crackerjack/services/memory_optimizer.py +19 -87
  158. crackerjack/services/metrics.py +42 -33
  159. crackerjack/services/parallel_executor.py +9 -67
  160. crackerjack/services/pattern_cache.py +1 -1
  161. crackerjack/services/pattern_detector.py +6 -6
  162. crackerjack/services/performance_benchmarks.py +18 -59
  163. crackerjack/services/performance_cache.py +20 -81
  164. crackerjack/services/performance_monitor.py +27 -95
  165. crackerjack/services/predictive_analytics.py +510 -0
  166. crackerjack/services/quality_baseline.py +234 -0
  167. crackerjack/services/quality_baseline_enhanced.py +646 -0
  168. crackerjack/services/quality_intelligence.py +785 -0
  169. crackerjack/services/regex_patterns.py +618 -524
  170. crackerjack/services/regex_utils.py +43 -123
  171. crackerjack/services/secure_path_utils.py +5 -164
  172. crackerjack/services/secure_status_formatter.py +30 -141
  173. crackerjack/services/secure_subprocess.py +11 -92
  174. crackerjack/services/security.py +9 -41
  175. crackerjack/services/security_logger.py +12 -24
  176. crackerjack/services/server_manager.py +124 -16
  177. crackerjack/services/status_authentication.py +16 -159
  178. crackerjack/services/status_security_manager.py +4 -131
  179. crackerjack/services/thread_safe_status_collector.py +19 -125
  180. crackerjack/services/unified_config.py +21 -13
  181. crackerjack/services/validation_rate_limiter.py +5 -54
  182. crackerjack/services/version_analyzer.py +459 -0
  183. crackerjack/services/version_checker.py +1 -1
  184. crackerjack/services/websocket_resource_limiter.py +10 -144
  185. crackerjack/services/zuban_lsp_service.py +390 -0
  186. crackerjack/slash_commands/__init__.py +2 -7
  187. crackerjack/slash_commands/run.md +2 -2
  188. crackerjack/tools/validate_input_validator_patterns.py +14 -40
  189. crackerjack/tools/validate_regex_patterns.py +19 -48
  190. {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/METADATA +196 -25
  191. crackerjack-0.33.2.dist-info/RECORD +229 -0
  192. crackerjack/CLAUDE.md +0 -207
  193. crackerjack/RULES.md +0 -380
  194. crackerjack/py313.py +0 -234
  195. crackerjack-0.33.0.dist-info/RECORD +0 -187
  196. {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/WHEEL +0 -0
  197. {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/entry_points.txt +0 -0
  198. {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/licenses/LICENSE +0 -0
@@ -1,9 +1,3 @@
1
- """Memory optimization service with lazy loading and resource management.
2
-
3
- This module provides memory-efficient patterns for managing heavy resources,
4
- lazy loading, and memory profiling capabilities.
5
- """
6
-
7
1
  import gc
8
2
  import sys
9
3
  import time
@@ -20,8 +14,6 @@ from crackerjack.services.logging import get_logger
20
14
 
21
15
  @dataclass
22
16
  class MemoryStats:
23
- """Memory usage statistics."""
24
-
25
17
  total_allocated_mb: float
26
18
  peak_usage_mb: float
27
19
  current_usage_mb: float
@@ -32,8 +24,6 @@ class MemoryStats:
32
24
 
33
25
 
34
26
  class LazyLoader:
35
- """Lazy loader for expensive resources."""
36
-
37
27
  def __init__(
38
28
  self,
39
29
  factory: Callable[[], Any],
@@ -50,22 +40,18 @@ class LazyLoader:
50
40
  self._last_access = time.time()
51
41
  self._logger = get_logger(f"crackerjack.lazy_loader.{name}")
52
42
 
53
- # Register for memory tracking
54
43
  MemoryOptimizer.get_instance().register_lazy_object(self)
55
44
 
56
45
  @property
57
46
  def is_loaded(self) -> bool:
58
- """Check if resource is loaded."""
59
47
  with self._lock:
60
48
  return self._loaded
61
49
 
62
50
  @property
63
51
  def access_count(self) -> int:
64
- """Get access count."""
65
52
  return self._access_count
66
53
 
67
54
  def get(self) -> Any:
68
- """Get the loaded resource, loading if necessary."""
69
55
  with self._lock:
70
56
  if not self._loaded:
71
57
  self._logger.debug(f"Lazy loading resource: {self._name}")
@@ -75,9 +61,8 @@ class LazyLoader:
75
61
  self._value = self._factory()
76
62
  self._loaded = True
77
63
  load_time = time.time() - start_time
78
- self._logger.debug(f"Loaded {self._name} in {load_time:.3f}s")
64
+ self._logger.debug(f"Loaded {self._name} in {load_time: .3f}s")
79
65
 
80
- # Register for memory tracking
81
66
  MemoryOptimizer.get_instance().notify_lazy_load(self._name)
82
67
 
83
68
  except Exception as e:
@@ -93,12 +78,10 @@ class LazyLoader:
93
78
  return self._value
94
79
 
95
80
  def dispose(self) -> None:
96
- """Dispose of the loaded resource."""
97
81
  with self._lock:
98
82
  if self._loaded and self._value is not None:
99
83
  self._logger.debug(f"Disposing lazy resource: {self._name}")
100
84
 
101
- # If the object has a cleanup method, call it
102
85
  if hasattr(self._value, "close"):
103
86
  try:
104
87
  self._value.close()
@@ -108,18 +91,14 @@ class LazyLoader:
108
91
  self._value = None
109
92
  self._loaded = False
110
93
 
111
- # Force garbage collection for this object
112
94
  gc.collect()
113
95
 
114
- def __del__(self):
115
- """Clean up on deletion."""
96
+ def __del__(self) -> None:
116
97
  if self._auto_dispose:
117
98
  self.dispose()
118
99
 
119
100
 
120
101
  class ResourcePool:
121
- """Pool for reusable expensive objects."""
122
-
123
102
  def __init__(
124
103
  self,
125
104
  factory: Callable[[], Any],
@@ -137,7 +116,6 @@ class ResourcePool:
137
116
  self._logger = get_logger(f"crackerjack.resource_pool.{name}")
138
117
 
139
118
  def acquire(self) -> Any:
140
- """Acquire a resource from the pool."""
141
119
  with self._lock:
142
120
  if self._pool:
143
121
  resource = self._pool.pop()
@@ -153,7 +131,6 @@ class ResourcePool:
153
131
  return resource
154
132
 
155
133
  def release(self, resource: Any) -> None:
156
- """Release a resource back to the pool."""
157
134
  with self._lock:
158
135
  if resource in self._in_use:
159
136
  self._in_use.discard(resource)
@@ -162,7 +139,6 @@ class ResourcePool:
162
139
  self._pool.append(resource)
163
140
  self._logger.debug(f"Returned resource to {self._name} pool")
164
141
  else:
165
- # Pool is full, dispose of resource
166
142
  if hasattr(resource, "close"):
167
143
  try:
168
144
  resource.close()
@@ -174,7 +150,6 @@ class ResourcePool:
174
150
  )
175
151
 
176
152
  def clear(self) -> None:
177
- """Clear all resources from the pool."""
178
153
  with self._lock:
179
154
  for resource in self._pool:
180
155
  if hasattr(resource, "close"):
@@ -187,7 +162,6 @@ class ResourcePool:
187
162
  self._logger.info(f"Cleared {self._name} resource pool")
188
163
 
189
164
  def get_stats(self) -> dict[str, Any]:
190
- """Get pool statistics."""
191
165
  with self._lock:
192
166
  return {
193
167
  "pool_size": len(self._pool),
@@ -203,23 +177,19 @@ class ResourcePool:
203
177
 
204
178
 
205
179
  class MemoryProfiler:
206
- """Simple memory profiler for performance monitoring."""
207
-
208
- def __init__(self):
180
+ def __init__(self) -> None:
209
181
  self._start_memory = 0.0
210
182
  self._peak_memory = 0.0
211
183
  self._measurements: list[tuple[float, float]] = []
212
184
  self._logger = get_logger("crackerjack.memory_profiler")
213
185
 
214
186
  def start_profiling(self) -> None:
215
- """Start memory profiling."""
216
187
  self._start_memory = self._get_memory_usage()
217
188
  self._peak_memory = self._start_memory
218
189
  self._measurements.clear()
219
- self._logger.debug(f"Started memory profiling at {self._start_memory:.2f} MB")
190
+ self._logger.debug(f"Started memory profiling at {self._start_memory: .2f} MB")
220
191
 
221
192
  def record_checkpoint(self, name: str = "") -> float:
222
- """Record memory checkpoint."""
223
193
  current_memory = self._get_memory_usage()
224
194
  self._peak_memory = max(self._peak_memory, current_memory)
225
195
 
@@ -227,12 +197,11 @@ class MemoryProfiler:
227
197
  self._measurements.append((timestamp, current_memory))
228
198
 
229
199
  if name:
230
- self._logger.debug(f"Memory checkpoint '{name}': {current_memory:.2f} MB")
200
+ self._logger.debug(f"Memory checkpoint '{name}': {current_memory: .2f} MB")
231
201
 
232
202
  return current_memory
233
203
 
234
204
  def get_summary(self) -> dict[str, Any]:
235
- """Get profiling summary."""
236
205
  if not self._measurements:
237
206
  return {}
238
207
 
@@ -248,85 +217,72 @@ class MemoryProfiler:
248
217
  }
249
218
 
250
219
  def _get_memory_usage(self) -> float:
251
- """Get current memory usage in MB."""
252
220
  try:
253
221
  import os
254
222
 
255
223
  import psutil
256
224
 
257
225
  process = psutil.Process(os.getpid())
258
- return process.memory_info().rss / 1024 / 1024
226
+ memory_mb: float = process.memory_info().rss / 1024 / 1024
227
+ return memory_mb
259
228
  except ImportError:
260
- # Fallback to tracemalloc if psutil not available
261
229
  import tracemalloc
262
230
 
263
231
  if tracemalloc.is_tracing():
264
232
  current, _peak = tracemalloc.get_traced_memory()
265
233
  return current / 1024 / 1024
266
234
  else:
267
- # Basic fallback using sys.getsizeof (less accurate)
268
235
  return sys.getsizeof(gc.get_objects()) / 1024 / 1024
269
236
 
270
237
 
271
238
  class MemoryOptimizer:
272
- """Central memory optimization coordinator."""
273
-
274
239
  _instance: t.Optional["MemoryOptimizer"] = None
275
240
  _lock = Lock()
276
241
 
277
- def __init__(self):
242
+ def __init__(self) -> None:
278
243
  self._lazy_objects: WeakSet[t.Any] = WeakSet()
279
244
  self._resource_pools: dict[str, ResourcePool] = {}
280
245
  self._profiler = MemoryProfiler()
281
246
  self._stats_lock = Lock()
282
247
  self._lazy_created_count = 0
283
248
  self._lazy_loaded_count = 0
284
- self._gc_threshold = 100 # MB
249
+ self._gc_threshold = 100
285
250
  self._auto_gc = True
286
251
  self._logger = get_logger("crackerjack.memory_optimizer")
287
252
 
288
253
  @classmethod
289
254
  def get_instance(cls) -> "MemoryOptimizer":
290
- """Get singleton instance."""
291
255
  with cls._lock:
292
256
  if cls._instance is None:
293
257
  cls._instance = cls()
294
258
  return cls._instance
295
259
 
296
260
  def register_lazy_object(self, lazy_obj: LazyLoader) -> None:
297
- """Register a lazy object for tracking."""
298
261
  self._lazy_objects.add(lazy_obj)
299
262
  with self._stats_lock:
300
263
  self._lazy_created_count += 1
301
264
 
302
265
  def notify_lazy_load(self, name: str) -> None:
303
- """Notify that a lazy object was loaded."""
304
266
  with self._stats_lock:
305
267
  self._lazy_loaded_count += 1
306
268
 
307
- # Check if we should trigger garbage collection
308
269
  if self._auto_gc and self._should_run_gc():
309
270
  self._run_memory_cleanup()
310
271
 
311
272
  def register_resource_pool(self, name: str, pool: ResourcePool) -> None:
312
- """Register a resource pool."""
313
273
  self._resource_pools[name] = pool
314
274
  self._logger.debug(f"Registered resource pool: {name}")
315
275
 
316
276
  def get_resource_pool(self, name: str) -> ResourcePool | None:
317
- """Get a registered resource pool."""
318
277
  return self._resource_pools.get(name)
319
278
 
320
279
  def start_profiling(self) -> None:
321
- """Start memory profiling."""
322
280
  self._profiler.start_profiling()
323
281
 
324
282
  def record_checkpoint(self, name: str = "") -> float:
325
- """Record memory checkpoint."""
326
283
  return self._profiler.record_checkpoint(name)
327
284
 
328
285
  def get_memory_stats(self) -> MemoryStats:
329
- """Get comprehensive memory statistics."""
330
286
  profiler_stats = self._profiler.get_summary()
331
287
 
332
288
  with self._stats_lock:
@@ -341,52 +297,43 @@ class MemoryOptimizer:
341
297
  )
342
298
 
343
299
  def optimize_memory(self) -> None:
344
- """Run memory optimization."""
345
300
  self._logger.info("Running memory optimization")
346
301
 
347
- # Dispose unused lazy objects
348
302
  self._cleanup_lazy_objects()
349
303
 
350
- # Clear resource pools if needed
351
304
  self._cleanup_resource_pools()
352
305
 
353
- # Force garbage collection
354
306
  collected = gc.collect()
355
307
  self._logger.debug(f"Garbage collection freed {collected} objects")
356
308
 
357
309
  def _should_run_gc(self) -> bool:
358
- """Check if garbage collection should be triggered."""
359
310
  current_memory = self._profiler.get_summary().get("current_memory_mb", 0)
360
- return current_memory > self._gc_threshold
311
+ should_gc: bool = current_memory > self._gc_threshold
312
+ return should_gc
361
313
 
362
314
  def _run_memory_cleanup(self) -> None:
363
- """Run memory cleanup operations."""
364
315
  self._logger.debug("Running automatic memory cleanup")
365
316
 
366
- # Collect garbage
367
317
  before_gc = self._profiler._get_memory_usage()
368
318
  collected = gc.collect()
369
319
  after_gc = self._profiler._get_memory_usage()
370
320
 
371
321
  memory_freed = before_gc - after_gc
372
322
 
373
- if memory_freed > 1.0: # More than 1MB freed
323
+ if memory_freed > 1.0:
374
324
  self._logger.info(
375
- f"Memory cleanup freed {memory_freed:.2f} MB ({collected} objects)"
325
+ f"Memory cleanup freed {memory_freed: .2f} MB ({collected} objects)"
376
326
  )
377
327
 
378
328
  def _cleanup_lazy_objects(self) -> None:
379
- """Clean up unused lazy objects."""
380
329
  disposed_count = 0
381
330
 
382
- # Convert to list to avoid modification during iteration
383
- lazy_objects = list(self._lazy_objects)
331
+ lazy_objects = list[t.Any](self._lazy_objects)
384
332
 
385
333
  for lazy_obj in lazy_objects:
386
- # Dispose objects that haven't been accessed recently
387
334
  if (
388
335
  hasattr(lazy_obj, "_last_access")
389
- and lazy_obj._last_access < time.time() - 300 # 5 minutes
336
+ and lazy_obj._last_access < time.time() - 300
390
337
  and lazy_obj.is_loaded
391
338
  ):
392
339
  lazy_obj.dispose()
@@ -396,19 +343,15 @@ class MemoryOptimizer:
396
343
  self._logger.debug(f"Disposed {disposed_count} unused lazy objects")
397
344
 
398
345
  def _cleanup_resource_pools(self) -> None:
399
- """Clean up resource pools."""
400
346
  for name, pool in self._resource_pools.items():
401
347
  stats = pool.get_stats()
402
348
 
403
- # Clear pool if efficiency is very low (lots of created, few reused)
404
349
  if stats["efficiency"] < 0.1 and stats["created_total"] > 10:
405
350
  pool.clear()
406
351
  self._logger.debug(f"Cleared inefficient resource pool: {name}")
407
352
 
408
353
 
409
- def lazy_property(factory: t.Callable[[], Any]) -> t.Callable[[t.Any], Any]:
410
- """Decorator for lazy property loading."""
411
-
354
+ def lazy_property(factory: t.Callable[[], t.Any]) -> property:
412
355
  def decorator(self: t.Any) -> Any:
413
356
  attr_name = f"_lazy_{factory.__name__}"
414
357
 
@@ -418,51 +361,41 @@ def lazy_property(factory: t.Callable[[], Any]) -> t.Callable[[t.Any], Any]:
418
361
 
419
362
  return getattr(self, attr_name).get()
420
363
 
421
- return property(decorator) # type: ignore[return-value]
364
+ return property(decorator)
422
365
 
423
366
 
424
367
  def memory_optimized(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
425
- """Decorator to add memory optimization to functions."""
426
-
427
368
  @wraps(func)
428
369
  def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any:
429
370
  optimizer = MemoryOptimizer.get_instance()
430
371
 
431
- # Record memory before function execution
432
372
  before_memory = optimizer.record_checkpoint(f"{func.__name__}_start")
433
373
 
434
374
  try:
435
375
  result = func(*args, **kwargs)
436
376
 
437
- # Record memory after function execution
438
377
  after_memory = optimizer.record_checkpoint(f"{func.__name__}_end")
439
378
 
440
- # Log significant memory increases
441
379
  memory_delta = after_memory - before_memory
442
- if memory_delta > 10.0: # More than 10MB increase
380
+ if memory_delta > 10.0:
443
381
  optimizer._logger.warning(
444
- f"Function {func.__name__} increased memory by {memory_delta:.2f} MB"
382
+ f"Function {func.__name__} increased memory by {memory_delta: .2f} MB"
445
383
  )
446
384
 
447
385
  return result
448
386
 
449
387
  finally:
450
- # Run cleanup if memory usage is high
451
388
  if optimizer._should_run_gc():
452
389
  optimizer._run_memory_cleanup()
453
390
 
454
391
  return wrapper
455
392
 
456
393
 
457
- # Global optimizer instance
458
394
  def get_memory_optimizer() -> MemoryOptimizer:
459
- """Get global memory optimizer instance."""
460
395
  return MemoryOptimizer.get_instance()
461
396
 
462
397
 
463
- # Factory functions for common patterns
464
398
  def create_lazy_service(factory: Callable[[], Any], name: str) -> LazyLoader:
465
- """Create a lazy-loaded service."""
466
399
  return LazyLoader(factory, name)
467
400
 
468
401
 
@@ -471,7 +404,6 @@ def create_resource_pool(
471
404
  max_size: int = 5,
472
405
  name: str = "unnamed",
473
406
  ) -> ResourcePool:
474
- """Create a resource pool and register it."""
475
407
  pool = ResourcePool(factory, max_size, name)
476
408
  MemoryOptimizer.get_instance().register_resource_pool(name, pool)
477
409
  return pool
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import sqlite3
3
3
  import threading
4
+ import typing as t
4
5
  from contextlib import contextmanager
5
6
  from datetime import date, datetime
6
7
  from pathlib import Path
@@ -21,45 +22,45 @@ class MetricsCollector:
21
22
  def _init_database(self) -> None:
22
23
  with self._get_connection() as conn:
23
24
  conn.executescript("""
24
- - - Jobs table
25
+ -- Jobs table
25
26
  CREATE TABLE IF NOT EXISTS jobs (
26
27
  id INTEGER PRIMARY KEY AUTOINCREMENT,
27
28
  job_id TEXT UNIQUE NOT NULL,
28
29
  start_time TIMESTAMP NOT NULL,
29
30
  end_time TIMESTAMP,
30
- status TEXT NOT NULL, - - 'running', 'success', 'failed', 'cancelled'
31
+ status TEXT NOT NULL, -- 'running', 'success', 'failed', 'cancelled'
31
32
  iterations INTEGER DEFAULT 0,
32
33
  ai_agent BOOLEAN DEFAULT 0,
33
34
  error_message TEXT,
34
- metadata TEXT - - JSON field for additional data
35
+ metadata TEXT -- JSON field for additional data
35
36
  );
36
37
 
37
- - - Errors table
38
+ -- Errors table
38
39
  CREATE TABLE IF NOT EXISTS errors (
39
40
  id INTEGER PRIMARY KEY AUTOINCREMENT,
40
41
  job_id TEXT,
41
42
  timestamp TIMESTAMP NOT NULL,
42
- error_type TEXT NOT NULL, - - 'hook', 'test', 'lint', 'type_check', etc.
43
- error_category TEXT, - - 'ruff', 'pyright', 'pytest', etc.
43
+ error_type TEXT NOT NULL, -- 'hook', 'test', 'lint', 'type_check', etc.
44
+ error_category TEXT, -- 'ruff', 'pyright', 'pytest', etc.
44
45
  error_message TEXT,
45
46
  file_path TEXT,
46
47
  line_number INTEGER,
47
48
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
48
49
  );
49
50
 
50
- - - Hook executions table
51
+ -- Hook executions table
51
52
  CREATE TABLE IF NOT EXISTS hook_executions (
52
53
  id INTEGER PRIMARY KEY AUTOINCREMENT,
53
54
  job_id TEXT,
54
55
  timestamp TIMESTAMP NOT NULL,
55
56
  hook_name TEXT NOT NULL,
56
- hook_type TEXT, - - 'fast', 'comprehensive'
57
+ hook_type TEXT, -- 'fast', 'comprehensive'
57
58
  execution_time_ms INTEGER,
58
- status TEXT, - - 'success', 'failed', 'skipped'
59
+ status TEXT, -- 'success', 'failed', 'skipped'
59
60
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
60
61
  );
61
62
 
62
- - - Test executions table
63
+ -- Test executions table
63
64
  CREATE TABLE IF NOT EXISTS test_executions (
64
65
  id INTEGER PRIMARY KEY AUTOINCREMENT,
65
66
  job_id TEXT,
@@ -73,17 +74,17 @@ class MetricsCollector:
73
74
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
74
75
  );
75
76
 
76
- - - Orchestration executions table (NEW)
77
+ -- Orchestration executions table (NEW)
77
78
  CREATE TABLE IF NOT EXISTS orchestration_executions (
78
79
  id INTEGER PRIMARY KEY AUTOINCREMENT,
79
80
  job_id TEXT,
80
81
  timestamp TIMESTAMP NOT NULL,
81
- execution_strategy TEXT NOT NULL, - - 'batch', 'individual', 'adaptive', 'selective'
82
- progress_level TEXT NOT NULL, - - 'basic', 'detailed', 'granular', 'streaming'
83
- ai_mode TEXT NOT NULL, - - 'single - agent', 'multi - agent', 'coordinator'
82
+ execution_strategy TEXT NOT NULL, -- 'batch', 'individual', 'adaptive', 'selective'
83
+ progress_level TEXT NOT NULL, -- 'basic', 'detailed', 'granular', 'streaming'
84
+ ai_mode TEXT NOT NULL, -- 'single-agent', 'multi-agent', 'coordinator'
84
85
  iteration_count INTEGER DEFAULT 1,
85
- strategy_switches INTEGER DEFAULT 0, - - How many times strategy changed
86
- correlation_insights TEXT, - - JSON of correlation analysis results
86
+ strategy_switches INTEGER DEFAULT 0, -- How many times strategy changed
87
+ correlation_insights TEXT, -- JSON of correlation analysis results
87
88
  total_execution_time_ms INTEGER,
88
89
  hooks_execution_time_ms INTEGER,
89
90
  tests_execution_time_ms INTEGER,
@@ -91,7 +92,7 @@ class MetricsCollector:
91
92
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
92
93
  );
93
94
 
94
- - - Strategy decisions table (NEW)
95
+ -- Strategy decisions table (NEW)
95
96
  CREATE TABLE IF NOT EXISTS strategy_decisions (
96
97
  id INTEGER PRIMARY KEY AUTOINCREMENT,
97
98
  job_id TEXT,
@@ -99,29 +100,29 @@ class MetricsCollector:
99
100
  timestamp TIMESTAMP NOT NULL,
100
101
  previous_strategy TEXT,
101
102
  selected_strategy TEXT NOT NULL,
102
- decision_reason TEXT, - - Why this strategy was chosen
103
- context_data TEXT, - - JSON of execution context
104
- effectiveness_score REAL, - - How well the strategy worked (0 - 1)
103
+ decision_reason TEXT, -- Why this strategy was chosen
104
+ context_data TEXT, -- JSON of execution context
105
+ effectiveness_score REAL, -- How well the strategy worked (0 - 1)
105
106
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
106
107
  );
107
108
 
108
- - - Individual test executions table (NEW - more granular than test_executions)
109
+ -- Individual test executions table (NEW - more granular than test_executions)
109
110
  CREATE TABLE IF NOT EXISTS individual_test_executions (
110
111
  id INTEGER PRIMARY KEY AUTOINCREMENT,
111
112
  job_id TEXT,
112
113
  timestamp TIMESTAMP NOT NULL,
113
- test_id TEXT NOT NULL, - - Full test identifier
114
+ test_id TEXT NOT NULL, -- Full test identifier
114
115
  test_file TEXT NOT NULL,
115
116
  test_class TEXT,
116
117
  test_method TEXT,
117
- status TEXT NOT NULL, - - 'passed', 'failed', 'skipped', 'error'
118
+ status TEXT NOT NULL, -- 'passed', 'failed', 'skipped', 'error'
118
119
  execution_time_ms INTEGER,
119
120
  error_message TEXT,
120
121
  error_traceback TEXT,
121
122
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
122
123
  );
123
124
 
124
- - - Daily summary table (for quick stats)
125
+ -- Daily summary table (for quick stats)
125
126
  CREATE TABLE IF NOT EXISTS daily_summary (
126
127
  date DATE PRIMARY KEY,
127
128
  total_jobs INTEGER DEFAULT 0,
@@ -134,9 +135,9 @@ class MetricsCollector:
134
135
  type_errors INTEGER DEFAULT 0,
135
136
  avg_job_duration_ms INTEGER,
136
137
  total_ai_fixes INTEGER DEFAULT 0,
137
- orchestrated_jobs INTEGER DEFAULT 0, - - NEW
138
- avg_orchestration_iterations REAL DEFAULT 0, - - NEW
139
- most_effective_strategy TEXT - - NEW
138
+ orchestrated_jobs INTEGER DEFAULT 0, -- NEW
139
+ avg_orchestration_iterations REAL DEFAULT 0, -- NEW
140
+ most_effective_strategy TEXT -- NEW
140
141
  );
141
142
 
142
143
  --Create indexes for performance
@@ -152,7 +153,7 @@ class MetricsCollector:
152
153
  """)
153
154
 
154
155
  @contextmanager
155
- def _get_connection(self):
156
+ def _get_connection(self) -> t.Iterator[sqlite3.Connection]:
156
157
  conn = sqlite3.connect(str(self.db_path))
157
158
  conn.row_factory = sqlite3.Row
158
159
  try:
@@ -442,10 +443,18 @@ class MetricsCollector:
442
443
  """).fetchall()
443
444
 
444
445
  return {
445
- "strategy_effectiveness": [dict(row) for row in strategy_stats],
446
- "correlation_patterns": [dict(row) for row in correlation_patterns],
447
- "performance_by_strategy": [dict(row) for row in performance_stats],
448
- "test_failure_patterns": [dict(row) for row in test_failure_patterns],
446
+ "strategy_effectiveness": [
447
+ dict[str, t.Any](row) for row in strategy_stats
448
+ ],
449
+ "correlation_patterns": [
450
+ dict[str, t.Any](row) for row in correlation_patterns
451
+ ],
452
+ "performance_by_strategy": [
453
+ dict[str, t.Any](row) for row in performance_stats
454
+ ],
455
+ "test_failure_patterns": [
456
+ dict[str, t.Any](row) for row in test_failure_patterns
457
+ ],
449
458
  }
450
459
 
451
460
  def _update_daily_summary(
@@ -564,7 +573,7 @@ class MetricsCollector:
564
573
  "error_breakdown": {
565
574
  row["error_type"]: row["count"] for row in error_stats
566
575
  },
567
- "common_errors": [dict(row) for row in common_errors],
576
+ "common_errors": [dict[str, t.Any](row) for row in common_errors],
568
577
  }
569
578
 
570
579