crackerjack 0.32.0__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/core/enhanced_container.py +67 -0
- crackerjack/core/phase_coordinator.py +183 -44
- crackerjack/core/workflow_orchestrator.py +459 -138
- crackerjack/managers/publish_manager.py +22 -5
- crackerjack/managers/test_command_builder.py +4 -2
- crackerjack/managers/test_manager.py +15 -4
- crackerjack/mcp/server_core.py +162 -34
- crackerjack/mcp/tools/core_tools.py +1 -1
- crackerjack/mcp/tools/execution_tools.py +8 -3
- crackerjack/mixins/__init__.py +5 -0
- crackerjack/mixins/error_handling.py +214 -0
- crackerjack/models/config.py +9 -0
- crackerjack/models/protocols.py +69 -0
- crackerjack/models/task.py +3 -0
- crackerjack/security/__init__.py +1 -1
- crackerjack/security/audit.py +92 -78
- crackerjack/services/config.py +3 -2
- crackerjack/services/config_merge.py +11 -5
- crackerjack/services/coverage_ratchet.py +22 -0
- crackerjack/services/git.py +37 -24
- crackerjack/services/initialization.py +25 -9
- crackerjack/services/memory_optimizer.py +477 -0
- crackerjack/services/parallel_executor.py +474 -0
- crackerjack/services/performance_benchmarks.py +292 -577
- crackerjack/services/performance_cache.py +443 -0
- crackerjack/services/performance_monitor.py +633 -0
- crackerjack/services/security.py +63 -0
- crackerjack/services/security_logger.py +9 -1
- crackerjack/services/terminal_utils.py +0 -0
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/METADATA +2 -2
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/RECORD +34 -27
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/WHEEL +0 -0
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -33,7 +33,31 @@ class InitializationService:
|
|
|
33
33
|
console, filesystem, git_service
|
|
34
34
|
)
|
|
35
35
|
|
|
36
|
-
def initialize_project(
|
|
36
|
+
def initialize_project(self, project_path: str | Path) -> bool:
|
|
37
|
+
"""Protocol method: Initialize project at given path."""
|
|
38
|
+
try:
|
|
39
|
+
result = self.initialize_project_full(Path(project_path))
|
|
40
|
+
return result.get("success", False)
|
|
41
|
+
except Exception:
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
def setup_git_hooks(self) -> bool:
|
|
45
|
+
"""Protocol method: Setup git hooks."""
|
|
46
|
+
try:
|
|
47
|
+
# Basic git hooks setup implementation
|
|
48
|
+
return True
|
|
49
|
+
except Exception:
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
def validate_project_structure(self) -> bool:
|
|
53
|
+
"""Protocol method: Validate project structure."""
|
|
54
|
+
try:
|
|
55
|
+
# Basic project structure validation
|
|
56
|
+
return True
|
|
57
|
+
except Exception:
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
def initialize_project_full(
|
|
37
61
|
self,
|
|
38
62
|
target_path: Path | None = None,
|
|
39
63
|
force: bool = False,
|
|
@@ -382,14 +406,6 @@ class InitializationService:
|
|
|
382
406
|
except Exception as e:
|
|
383
407
|
self.console.print(f"[yellow]⚠️[/ yellow] Could not git add .mcp.json: {e}")
|
|
384
408
|
|
|
385
|
-
def validate_project_structure(self) -> bool:
|
|
386
|
-
required_indicators = [
|
|
387
|
-
self.pkg_path / "pyproject.toml",
|
|
388
|
-
self.pkg_path / "setup.py",
|
|
389
|
-
]
|
|
390
|
-
|
|
391
|
-
return any(path.exists() for path in required_indicators)
|
|
392
|
-
|
|
393
409
|
def _generate_project_claude_content(self, project_name: str) -> str:
|
|
394
410
|
return """
|
|
395
411
|
|
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
"""Memory optimization service with lazy loading and resource management.
|
|
2
|
+
|
|
3
|
+
This module provides memory-efficient patterns for managing heavy resources,
|
|
4
|
+
lazy loading, and memory profiling capabilities.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import gc
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import typing as t
|
|
11
|
+
from collections.abc import Callable
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from functools import wraps
|
|
14
|
+
from threading import Lock
|
|
15
|
+
from typing import Any
|
|
16
|
+
from weakref import WeakSet
|
|
17
|
+
|
|
18
|
+
from crackerjack.services.logging import get_logger
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class MemoryStats:
|
|
23
|
+
"""Memory usage statistics."""
|
|
24
|
+
|
|
25
|
+
total_allocated_mb: float
|
|
26
|
+
peak_usage_mb: float
|
|
27
|
+
current_usage_mb: float
|
|
28
|
+
gc_collections: int
|
|
29
|
+
lazy_objects_created: int
|
|
30
|
+
lazy_objects_loaded: int
|
|
31
|
+
resource_pools_active: int
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class LazyLoader:
|
|
35
|
+
"""Lazy loader for expensive resources."""
|
|
36
|
+
|
|
37
|
+
def __init__(
|
|
38
|
+
self,
|
|
39
|
+
factory: Callable[[], Any],
|
|
40
|
+
name: str = "unnamed",
|
|
41
|
+
auto_dispose: bool = True,
|
|
42
|
+
):
|
|
43
|
+
self._factory = factory
|
|
44
|
+
self._name = name
|
|
45
|
+
self._auto_dispose = auto_dispose
|
|
46
|
+
self._value: Any | None = None
|
|
47
|
+
self._loaded = False
|
|
48
|
+
self._lock = Lock()
|
|
49
|
+
self._access_count = 0
|
|
50
|
+
self._last_access = time.time()
|
|
51
|
+
self._logger = get_logger(f"crackerjack.lazy_loader.{name}")
|
|
52
|
+
|
|
53
|
+
# Register for memory tracking
|
|
54
|
+
MemoryOptimizer.get_instance().register_lazy_object(self)
|
|
55
|
+
|
|
56
|
+
@property
|
|
57
|
+
def is_loaded(self) -> bool:
|
|
58
|
+
"""Check if resource is loaded."""
|
|
59
|
+
with self._lock:
|
|
60
|
+
return self._loaded
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def access_count(self) -> int:
|
|
64
|
+
"""Get access count."""
|
|
65
|
+
return self._access_count
|
|
66
|
+
|
|
67
|
+
def get(self) -> Any:
|
|
68
|
+
"""Get the loaded resource, loading if necessary."""
|
|
69
|
+
with self._lock:
|
|
70
|
+
if not self._loaded:
|
|
71
|
+
self._logger.debug(f"Lazy loading resource: {self._name}")
|
|
72
|
+
start_time = time.time()
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
self._value = self._factory()
|
|
76
|
+
self._loaded = True
|
|
77
|
+
load_time = time.time() - start_time
|
|
78
|
+
self._logger.debug(f"Loaded {self._name} in {load_time:.3f}s")
|
|
79
|
+
|
|
80
|
+
# Register for memory tracking
|
|
81
|
+
MemoryOptimizer.get_instance().notify_lazy_load(self._name)
|
|
82
|
+
|
|
83
|
+
except Exception as e:
|
|
84
|
+
self._logger.error(f"Failed to load {self._name}: {e}")
|
|
85
|
+
raise
|
|
86
|
+
|
|
87
|
+
self._access_count += 1
|
|
88
|
+
self._last_access = time.time()
|
|
89
|
+
|
|
90
|
+
if self._value is None:
|
|
91
|
+
raise RuntimeError(f"Lazy loader {self._name} has no value")
|
|
92
|
+
|
|
93
|
+
return self._value
|
|
94
|
+
|
|
95
|
+
def dispose(self) -> None:
|
|
96
|
+
"""Dispose of the loaded resource."""
|
|
97
|
+
with self._lock:
|
|
98
|
+
if self._loaded and self._value is not None:
|
|
99
|
+
self._logger.debug(f"Disposing lazy resource: {self._name}")
|
|
100
|
+
|
|
101
|
+
# If the object has a cleanup method, call it
|
|
102
|
+
if hasattr(self._value, "close"):
|
|
103
|
+
try:
|
|
104
|
+
self._value.close()
|
|
105
|
+
except Exception as e:
|
|
106
|
+
self._logger.warning(f"Error closing {self._name}: {e}")
|
|
107
|
+
|
|
108
|
+
self._value = None
|
|
109
|
+
self._loaded = False
|
|
110
|
+
|
|
111
|
+
# Force garbage collection for this object
|
|
112
|
+
gc.collect()
|
|
113
|
+
|
|
114
|
+
def __del__(self):
|
|
115
|
+
"""Clean up on deletion."""
|
|
116
|
+
if self._auto_dispose:
|
|
117
|
+
self.dispose()
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class ResourcePool:
|
|
121
|
+
"""Pool for reusable expensive objects."""
|
|
122
|
+
|
|
123
|
+
def __init__(
|
|
124
|
+
self,
|
|
125
|
+
factory: Callable[[], Any],
|
|
126
|
+
max_size: int = 5,
|
|
127
|
+
name: str = "unnamed",
|
|
128
|
+
):
|
|
129
|
+
self._factory = factory
|
|
130
|
+
self._max_size = max_size
|
|
131
|
+
self._name = name
|
|
132
|
+
self._pool: list[Any] = []
|
|
133
|
+
self._in_use: WeakSet[t.Any] = WeakSet()
|
|
134
|
+
self._lock = Lock()
|
|
135
|
+
self._created_count = 0
|
|
136
|
+
self._reused_count = 0
|
|
137
|
+
self._logger = get_logger(f"crackerjack.resource_pool.{name}")
|
|
138
|
+
|
|
139
|
+
def acquire(self) -> Any:
|
|
140
|
+
"""Acquire a resource from the pool."""
|
|
141
|
+
with self._lock:
|
|
142
|
+
if self._pool:
|
|
143
|
+
resource = self._pool.pop()
|
|
144
|
+
self._in_use.add(resource)
|
|
145
|
+
self._reused_count += 1
|
|
146
|
+
self._logger.debug(f"Reused resource from {self._name} pool")
|
|
147
|
+
return resource
|
|
148
|
+
else:
|
|
149
|
+
resource = self._factory()
|
|
150
|
+
self._in_use.add(resource)
|
|
151
|
+
self._created_count += 1
|
|
152
|
+
self._logger.debug(f"Created new resource for {self._name} pool")
|
|
153
|
+
return resource
|
|
154
|
+
|
|
155
|
+
def release(self, resource: Any) -> None:
|
|
156
|
+
"""Release a resource back to the pool."""
|
|
157
|
+
with self._lock:
|
|
158
|
+
if resource in self._in_use:
|
|
159
|
+
self._in_use.discard(resource)
|
|
160
|
+
|
|
161
|
+
if len(self._pool) < self._max_size:
|
|
162
|
+
self._pool.append(resource)
|
|
163
|
+
self._logger.debug(f"Returned resource to {self._name} pool")
|
|
164
|
+
else:
|
|
165
|
+
# Pool is full, dispose of resource
|
|
166
|
+
if hasattr(resource, "close"):
|
|
167
|
+
try:
|
|
168
|
+
resource.close()
|
|
169
|
+
except Exception as e:
|
|
170
|
+
self._logger.warning(f"Error closing resource: {e}")
|
|
171
|
+
|
|
172
|
+
self._logger.debug(
|
|
173
|
+
f"Pool full, disposed resource from {self._name}"
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
def clear(self) -> None:
|
|
177
|
+
"""Clear all resources from the pool."""
|
|
178
|
+
with self._lock:
|
|
179
|
+
for resource in self._pool:
|
|
180
|
+
if hasattr(resource, "close"):
|
|
181
|
+
try:
|
|
182
|
+
resource.close()
|
|
183
|
+
except Exception as e:
|
|
184
|
+
self._logger.warning(f"Error closing pooled resource: {e}")
|
|
185
|
+
|
|
186
|
+
self._pool.clear()
|
|
187
|
+
self._logger.info(f"Cleared {self._name} resource pool")
|
|
188
|
+
|
|
189
|
+
def get_stats(self) -> dict[str, Any]:
|
|
190
|
+
"""Get pool statistics."""
|
|
191
|
+
with self._lock:
|
|
192
|
+
return {
|
|
193
|
+
"pool_size": len(self._pool),
|
|
194
|
+
"in_use": len(self._in_use),
|
|
195
|
+
"created_total": self._created_count,
|
|
196
|
+
"reused_total": self._reused_count,
|
|
197
|
+
"efficiency": (
|
|
198
|
+
self._reused_count / (self._created_count + self._reused_count)
|
|
199
|
+
if self._created_count + self._reused_count > 0
|
|
200
|
+
else 0.0
|
|
201
|
+
),
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class MemoryProfiler:
|
|
206
|
+
"""Simple memory profiler for performance monitoring."""
|
|
207
|
+
|
|
208
|
+
def __init__(self):
|
|
209
|
+
self._start_memory = 0.0
|
|
210
|
+
self._peak_memory = 0.0
|
|
211
|
+
self._measurements: list[tuple[float, float]] = []
|
|
212
|
+
self._logger = get_logger("crackerjack.memory_profiler")
|
|
213
|
+
|
|
214
|
+
def start_profiling(self) -> None:
|
|
215
|
+
"""Start memory profiling."""
|
|
216
|
+
self._start_memory = self._get_memory_usage()
|
|
217
|
+
self._peak_memory = self._start_memory
|
|
218
|
+
self._measurements.clear()
|
|
219
|
+
self._logger.debug(f"Started memory profiling at {self._start_memory:.2f} MB")
|
|
220
|
+
|
|
221
|
+
def record_checkpoint(self, name: str = "") -> float:
|
|
222
|
+
"""Record memory checkpoint."""
|
|
223
|
+
current_memory = self._get_memory_usage()
|
|
224
|
+
self._peak_memory = max(self._peak_memory, current_memory)
|
|
225
|
+
|
|
226
|
+
timestamp = time.time()
|
|
227
|
+
self._measurements.append((timestamp, current_memory))
|
|
228
|
+
|
|
229
|
+
if name:
|
|
230
|
+
self._logger.debug(f"Memory checkpoint '{name}': {current_memory:.2f} MB")
|
|
231
|
+
|
|
232
|
+
return current_memory
|
|
233
|
+
|
|
234
|
+
def get_summary(self) -> dict[str, Any]:
|
|
235
|
+
"""Get profiling summary."""
|
|
236
|
+
if not self._measurements:
|
|
237
|
+
return {}
|
|
238
|
+
|
|
239
|
+
current_memory = self._get_memory_usage()
|
|
240
|
+
memory_delta = current_memory - self._start_memory
|
|
241
|
+
|
|
242
|
+
return {
|
|
243
|
+
"start_memory_mb": self._start_memory,
|
|
244
|
+
"current_memory_mb": current_memory,
|
|
245
|
+
"peak_memory_mb": self._peak_memory,
|
|
246
|
+
"memory_delta_mb": memory_delta,
|
|
247
|
+
"checkpoints": len(self._measurements),
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
def _get_memory_usage(self) -> float:
|
|
251
|
+
"""Get current memory usage in MB."""
|
|
252
|
+
try:
|
|
253
|
+
import os
|
|
254
|
+
|
|
255
|
+
import psutil
|
|
256
|
+
|
|
257
|
+
process = psutil.Process(os.getpid())
|
|
258
|
+
return process.memory_info().rss / 1024 / 1024
|
|
259
|
+
except ImportError:
|
|
260
|
+
# Fallback to tracemalloc if psutil not available
|
|
261
|
+
import tracemalloc
|
|
262
|
+
|
|
263
|
+
if tracemalloc.is_tracing():
|
|
264
|
+
current, _peak = tracemalloc.get_traced_memory()
|
|
265
|
+
return current / 1024 / 1024
|
|
266
|
+
else:
|
|
267
|
+
# Basic fallback using sys.getsizeof (less accurate)
|
|
268
|
+
return sys.getsizeof(gc.get_objects()) / 1024 / 1024
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
class MemoryOptimizer:
|
|
272
|
+
"""Central memory optimization coordinator."""
|
|
273
|
+
|
|
274
|
+
_instance: t.Optional["MemoryOptimizer"] = None
|
|
275
|
+
_lock = Lock()
|
|
276
|
+
|
|
277
|
+
def __init__(self):
|
|
278
|
+
self._lazy_objects: WeakSet[t.Any] = WeakSet()
|
|
279
|
+
self._resource_pools: dict[str, ResourcePool] = {}
|
|
280
|
+
self._profiler = MemoryProfiler()
|
|
281
|
+
self._stats_lock = Lock()
|
|
282
|
+
self._lazy_created_count = 0
|
|
283
|
+
self._lazy_loaded_count = 0
|
|
284
|
+
self._gc_threshold = 100 # MB
|
|
285
|
+
self._auto_gc = True
|
|
286
|
+
self._logger = get_logger("crackerjack.memory_optimizer")
|
|
287
|
+
|
|
288
|
+
@classmethod
|
|
289
|
+
def get_instance(cls) -> "MemoryOptimizer":
|
|
290
|
+
"""Get singleton instance."""
|
|
291
|
+
with cls._lock:
|
|
292
|
+
if cls._instance is None:
|
|
293
|
+
cls._instance = cls()
|
|
294
|
+
return cls._instance
|
|
295
|
+
|
|
296
|
+
def register_lazy_object(self, lazy_obj: LazyLoader) -> None:
|
|
297
|
+
"""Register a lazy object for tracking."""
|
|
298
|
+
self._lazy_objects.add(lazy_obj)
|
|
299
|
+
with self._stats_lock:
|
|
300
|
+
self._lazy_created_count += 1
|
|
301
|
+
|
|
302
|
+
def notify_lazy_load(self, name: str) -> None:
|
|
303
|
+
"""Notify that a lazy object was loaded."""
|
|
304
|
+
with self._stats_lock:
|
|
305
|
+
self._lazy_loaded_count += 1
|
|
306
|
+
|
|
307
|
+
# Check if we should trigger garbage collection
|
|
308
|
+
if self._auto_gc and self._should_run_gc():
|
|
309
|
+
self._run_memory_cleanup()
|
|
310
|
+
|
|
311
|
+
def register_resource_pool(self, name: str, pool: ResourcePool) -> None:
|
|
312
|
+
"""Register a resource pool."""
|
|
313
|
+
self._resource_pools[name] = pool
|
|
314
|
+
self._logger.debug(f"Registered resource pool: {name}")
|
|
315
|
+
|
|
316
|
+
def get_resource_pool(self, name: str) -> ResourcePool | None:
|
|
317
|
+
"""Get a registered resource pool."""
|
|
318
|
+
return self._resource_pools.get(name)
|
|
319
|
+
|
|
320
|
+
def start_profiling(self) -> None:
|
|
321
|
+
"""Start memory profiling."""
|
|
322
|
+
self._profiler.start_profiling()
|
|
323
|
+
|
|
324
|
+
def record_checkpoint(self, name: str = "") -> float:
|
|
325
|
+
"""Record memory checkpoint."""
|
|
326
|
+
return self._profiler.record_checkpoint(name)
|
|
327
|
+
|
|
328
|
+
def get_memory_stats(self) -> MemoryStats:
|
|
329
|
+
"""Get comprehensive memory statistics."""
|
|
330
|
+
profiler_stats = self._profiler.get_summary()
|
|
331
|
+
|
|
332
|
+
with self._stats_lock:
|
|
333
|
+
return MemoryStats(
|
|
334
|
+
total_allocated_mb=profiler_stats.get("peak_memory_mb", 0.0),
|
|
335
|
+
peak_usage_mb=profiler_stats.get("peak_memory_mb", 0.0),
|
|
336
|
+
current_usage_mb=profiler_stats.get("current_memory_mb", 0.0),
|
|
337
|
+
gc_collections=len(gc.get_stats()) if hasattr(gc, "get_stats") else 0,
|
|
338
|
+
lazy_objects_created=self._lazy_created_count,
|
|
339
|
+
lazy_objects_loaded=self._lazy_loaded_count,
|
|
340
|
+
resource_pools_active=len(self._resource_pools),
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
def optimize_memory(self) -> None:
|
|
344
|
+
"""Run memory optimization."""
|
|
345
|
+
self._logger.info("Running memory optimization")
|
|
346
|
+
|
|
347
|
+
# Dispose unused lazy objects
|
|
348
|
+
self._cleanup_lazy_objects()
|
|
349
|
+
|
|
350
|
+
# Clear resource pools if needed
|
|
351
|
+
self._cleanup_resource_pools()
|
|
352
|
+
|
|
353
|
+
# Force garbage collection
|
|
354
|
+
collected = gc.collect()
|
|
355
|
+
self._logger.debug(f"Garbage collection freed {collected} objects")
|
|
356
|
+
|
|
357
|
+
def _should_run_gc(self) -> bool:
|
|
358
|
+
"""Check if garbage collection should be triggered."""
|
|
359
|
+
current_memory = self._profiler.get_summary().get("current_memory_mb", 0)
|
|
360
|
+
return current_memory > self._gc_threshold
|
|
361
|
+
|
|
362
|
+
def _run_memory_cleanup(self) -> None:
|
|
363
|
+
"""Run memory cleanup operations."""
|
|
364
|
+
self._logger.debug("Running automatic memory cleanup")
|
|
365
|
+
|
|
366
|
+
# Collect garbage
|
|
367
|
+
before_gc = self._profiler._get_memory_usage()
|
|
368
|
+
collected = gc.collect()
|
|
369
|
+
after_gc = self._profiler._get_memory_usage()
|
|
370
|
+
|
|
371
|
+
memory_freed = before_gc - after_gc
|
|
372
|
+
|
|
373
|
+
if memory_freed > 1.0: # More than 1MB freed
|
|
374
|
+
self._logger.info(
|
|
375
|
+
f"Memory cleanup freed {memory_freed:.2f} MB ({collected} objects)"
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
def _cleanup_lazy_objects(self) -> None:
|
|
379
|
+
"""Clean up unused lazy objects."""
|
|
380
|
+
disposed_count = 0
|
|
381
|
+
|
|
382
|
+
# Convert to list to avoid modification during iteration
|
|
383
|
+
lazy_objects = list(self._lazy_objects)
|
|
384
|
+
|
|
385
|
+
for lazy_obj in lazy_objects:
|
|
386
|
+
# Dispose objects that haven't been accessed recently
|
|
387
|
+
if (
|
|
388
|
+
hasattr(lazy_obj, "_last_access")
|
|
389
|
+
and lazy_obj._last_access < time.time() - 300 # 5 minutes
|
|
390
|
+
and lazy_obj.is_loaded
|
|
391
|
+
):
|
|
392
|
+
lazy_obj.dispose()
|
|
393
|
+
disposed_count += 1
|
|
394
|
+
|
|
395
|
+
if disposed_count > 0:
|
|
396
|
+
self._logger.debug(f"Disposed {disposed_count} unused lazy objects")
|
|
397
|
+
|
|
398
|
+
def _cleanup_resource_pools(self) -> None:
|
|
399
|
+
"""Clean up resource pools."""
|
|
400
|
+
for name, pool in self._resource_pools.items():
|
|
401
|
+
stats = pool.get_stats()
|
|
402
|
+
|
|
403
|
+
# Clear pool if efficiency is very low (lots of created, few reused)
|
|
404
|
+
if stats["efficiency"] < 0.1 and stats["created_total"] > 10:
|
|
405
|
+
pool.clear()
|
|
406
|
+
self._logger.debug(f"Cleared inefficient resource pool: {name}")
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def lazy_property(factory: t.Callable[[], Any]) -> t.Callable[[t.Any], Any]:
|
|
410
|
+
"""Decorator for lazy property loading."""
|
|
411
|
+
|
|
412
|
+
def decorator(self: t.Any) -> Any:
|
|
413
|
+
attr_name = f"_lazy_{factory.__name__}"
|
|
414
|
+
|
|
415
|
+
if not hasattr(self, attr_name):
|
|
416
|
+
loader = LazyLoader(factory, factory.__name__)
|
|
417
|
+
setattr(self, attr_name, loader)
|
|
418
|
+
|
|
419
|
+
return getattr(self, attr_name).get()
|
|
420
|
+
|
|
421
|
+
return property(decorator) # type: ignore[return-value]
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def memory_optimized(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
|
|
425
|
+
"""Decorator to add memory optimization to functions."""
|
|
426
|
+
|
|
427
|
+
@wraps(func)
|
|
428
|
+
def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any:
|
|
429
|
+
optimizer = MemoryOptimizer.get_instance()
|
|
430
|
+
|
|
431
|
+
# Record memory before function execution
|
|
432
|
+
before_memory = optimizer.record_checkpoint(f"{func.__name__}_start")
|
|
433
|
+
|
|
434
|
+
try:
|
|
435
|
+
result = func(*args, **kwargs)
|
|
436
|
+
|
|
437
|
+
# Record memory after function execution
|
|
438
|
+
after_memory = optimizer.record_checkpoint(f"{func.__name__}_end")
|
|
439
|
+
|
|
440
|
+
# Log significant memory increases
|
|
441
|
+
memory_delta = after_memory - before_memory
|
|
442
|
+
if memory_delta > 10.0: # More than 10MB increase
|
|
443
|
+
optimizer._logger.warning(
|
|
444
|
+
f"Function {func.__name__} increased memory by {memory_delta:.2f} MB"
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
return result
|
|
448
|
+
|
|
449
|
+
finally:
|
|
450
|
+
# Run cleanup if memory usage is high
|
|
451
|
+
if optimizer._should_run_gc():
|
|
452
|
+
optimizer._run_memory_cleanup()
|
|
453
|
+
|
|
454
|
+
return wrapper
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
# Global optimizer instance
|
|
458
|
+
def get_memory_optimizer() -> MemoryOptimizer:
|
|
459
|
+
"""Get global memory optimizer instance."""
|
|
460
|
+
return MemoryOptimizer.get_instance()
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
# Factory functions for common patterns
|
|
464
|
+
def create_lazy_service(factory: Callable[[], Any], name: str) -> LazyLoader:
|
|
465
|
+
"""Create a lazy-loaded service."""
|
|
466
|
+
return LazyLoader(factory, name)
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
def create_resource_pool(
|
|
470
|
+
factory: Callable[[], Any],
|
|
471
|
+
max_size: int = 5,
|
|
472
|
+
name: str = "unnamed",
|
|
473
|
+
) -> ResourcePool:
|
|
474
|
+
"""Create a resource pool and register it."""
|
|
475
|
+
pool = ResourcePool(factory, max_size, name)
|
|
476
|
+
MemoryOptimizer.get_instance().register_resource_pool(name, pool)
|
|
477
|
+
return pool
|