crackerjack 0.31.9__py3-none-any.whl → 0.31.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (155) hide show
  1. crackerjack/CLAUDE.md +288 -705
  2. crackerjack/__main__.py +22 -8
  3. crackerjack/agents/__init__.py +0 -3
  4. crackerjack/agents/architect_agent.py +0 -43
  5. crackerjack/agents/base.py +1 -9
  6. crackerjack/agents/coordinator.py +2 -148
  7. crackerjack/agents/documentation_agent.py +109 -81
  8. crackerjack/agents/dry_agent.py +122 -97
  9. crackerjack/agents/formatting_agent.py +3 -16
  10. crackerjack/agents/import_optimization_agent.py +1174 -130
  11. crackerjack/agents/performance_agent.py +956 -188
  12. crackerjack/agents/performance_helpers.py +229 -0
  13. crackerjack/agents/proactive_agent.py +1 -48
  14. crackerjack/agents/refactoring_agent.py +516 -246
  15. crackerjack/agents/refactoring_helpers.py +282 -0
  16. crackerjack/agents/security_agent.py +393 -90
  17. crackerjack/agents/test_creation_agent.py +1776 -120
  18. crackerjack/agents/test_specialist_agent.py +59 -15
  19. crackerjack/agents/tracker.py +0 -102
  20. crackerjack/api.py +145 -37
  21. crackerjack/cli/handlers.py +48 -30
  22. crackerjack/cli/interactive.py +11 -11
  23. crackerjack/cli/options.py +66 -4
  24. crackerjack/code_cleaner.py +808 -148
  25. crackerjack/config/global_lock_config.py +110 -0
  26. crackerjack/config/hooks.py +43 -64
  27. crackerjack/core/async_workflow_orchestrator.py +247 -97
  28. crackerjack/core/autofix_coordinator.py +192 -109
  29. crackerjack/core/enhanced_container.py +46 -63
  30. crackerjack/core/file_lifecycle.py +549 -0
  31. crackerjack/core/performance.py +9 -8
  32. crackerjack/core/performance_monitor.py +395 -0
  33. crackerjack/core/phase_coordinator.py +282 -95
  34. crackerjack/core/proactive_workflow.py +9 -58
  35. crackerjack/core/resource_manager.py +501 -0
  36. crackerjack/core/service_watchdog.py +490 -0
  37. crackerjack/core/session_coordinator.py +4 -8
  38. crackerjack/core/timeout_manager.py +504 -0
  39. crackerjack/core/websocket_lifecycle.py +475 -0
  40. crackerjack/core/workflow_orchestrator.py +355 -204
  41. crackerjack/dynamic_config.py +47 -6
  42. crackerjack/errors.py +3 -4
  43. crackerjack/executors/async_hook_executor.py +63 -13
  44. crackerjack/executors/cached_hook_executor.py +14 -14
  45. crackerjack/executors/hook_executor.py +100 -37
  46. crackerjack/executors/hook_lock_manager.py +856 -0
  47. crackerjack/executors/individual_hook_executor.py +120 -86
  48. crackerjack/intelligence/__init__.py +0 -7
  49. crackerjack/intelligence/adaptive_learning.py +13 -86
  50. crackerjack/intelligence/agent_orchestrator.py +15 -78
  51. crackerjack/intelligence/agent_registry.py +12 -59
  52. crackerjack/intelligence/agent_selector.py +31 -92
  53. crackerjack/intelligence/integration.py +1 -41
  54. crackerjack/interactive.py +9 -9
  55. crackerjack/managers/async_hook_manager.py +25 -8
  56. crackerjack/managers/hook_manager.py +9 -9
  57. crackerjack/managers/publish_manager.py +57 -59
  58. crackerjack/managers/test_command_builder.py +6 -36
  59. crackerjack/managers/test_executor.py +9 -61
  60. crackerjack/managers/test_manager.py +52 -62
  61. crackerjack/managers/test_manager_backup.py +77 -127
  62. crackerjack/managers/test_progress.py +4 -23
  63. crackerjack/mcp/cache.py +5 -12
  64. crackerjack/mcp/client_runner.py +10 -10
  65. crackerjack/mcp/context.py +64 -6
  66. crackerjack/mcp/dashboard.py +14 -11
  67. crackerjack/mcp/enhanced_progress_monitor.py +55 -55
  68. crackerjack/mcp/file_monitor.py +72 -42
  69. crackerjack/mcp/progress_components.py +103 -84
  70. crackerjack/mcp/progress_monitor.py +122 -49
  71. crackerjack/mcp/rate_limiter.py +12 -12
  72. crackerjack/mcp/server_core.py +16 -22
  73. crackerjack/mcp/service_watchdog.py +26 -26
  74. crackerjack/mcp/state.py +15 -0
  75. crackerjack/mcp/tools/core_tools.py +95 -39
  76. crackerjack/mcp/tools/error_analyzer.py +6 -32
  77. crackerjack/mcp/tools/execution_tools.py +1 -56
  78. crackerjack/mcp/tools/execution_tools_backup.py +35 -131
  79. crackerjack/mcp/tools/intelligence_tool_registry.py +0 -36
  80. crackerjack/mcp/tools/intelligence_tools.py +2 -55
  81. crackerjack/mcp/tools/monitoring_tools.py +308 -145
  82. crackerjack/mcp/tools/proactive_tools.py +12 -42
  83. crackerjack/mcp/tools/progress_tools.py +23 -15
  84. crackerjack/mcp/tools/utility_tools.py +3 -40
  85. crackerjack/mcp/tools/workflow_executor.py +40 -60
  86. crackerjack/mcp/websocket/app.py +0 -3
  87. crackerjack/mcp/websocket/endpoints.py +206 -268
  88. crackerjack/mcp/websocket/jobs.py +213 -66
  89. crackerjack/mcp/websocket/server.py +84 -6
  90. crackerjack/mcp/websocket/websocket_handler.py +137 -29
  91. crackerjack/models/config_adapter.py +3 -16
  92. crackerjack/models/protocols.py +162 -3
  93. crackerjack/models/resource_protocols.py +454 -0
  94. crackerjack/models/task.py +3 -3
  95. crackerjack/monitoring/__init__.py +0 -0
  96. crackerjack/monitoring/ai_agent_watchdog.py +25 -71
  97. crackerjack/monitoring/regression_prevention.py +28 -87
  98. crackerjack/orchestration/advanced_orchestrator.py +44 -78
  99. crackerjack/orchestration/coverage_improvement.py +10 -60
  100. crackerjack/orchestration/execution_strategies.py +16 -16
  101. crackerjack/orchestration/test_progress_streamer.py +61 -53
  102. crackerjack/plugins/base.py +1 -1
  103. crackerjack/plugins/managers.py +22 -20
  104. crackerjack/py313.py +65 -21
  105. crackerjack/services/backup_service.py +467 -0
  106. crackerjack/services/bounded_status_operations.py +627 -0
  107. crackerjack/services/cache.py +7 -9
  108. crackerjack/services/config.py +35 -52
  109. crackerjack/services/config_integrity.py +5 -16
  110. crackerjack/services/config_merge.py +542 -0
  111. crackerjack/services/contextual_ai_assistant.py +17 -19
  112. crackerjack/services/coverage_ratchet.py +51 -76
  113. crackerjack/services/debug.py +25 -39
  114. crackerjack/services/dependency_monitor.py +52 -50
  115. crackerjack/services/enhanced_filesystem.py +14 -11
  116. crackerjack/services/file_hasher.py +1 -1
  117. crackerjack/services/filesystem.py +1 -12
  118. crackerjack/services/git.py +78 -44
  119. crackerjack/services/health_metrics.py +31 -27
  120. crackerjack/services/initialization.py +281 -433
  121. crackerjack/services/input_validator.py +760 -0
  122. crackerjack/services/log_manager.py +16 -16
  123. crackerjack/services/logging.py +7 -6
  124. crackerjack/services/metrics.py +43 -43
  125. crackerjack/services/pattern_cache.py +2 -31
  126. crackerjack/services/pattern_detector.py +26 -63
  127. crackerjack/services/performance_benchmarks.py +20 -45
  128. crackerjack/services/regex_patterns.py +2887 -0
  129. crackerjack/services/regex_utils.py +537 -0
  130. crackerjack/services/secure_path_utils.py +683 -0
  131. crackerjack/services/secure_status_formatter.py +534 -0
  132. crackerjack/services/secure_subprocess.py +605 -0
  133. crackerjack/services/security.py +47 -10
  134. crackerjack/services/security_logger.py +492 -0
  135. crackerjack/services/server_manager.py +109 -50
  136. crackerjack/services/smart_scheduling.py +8 -25
  137. crackerjack/services/status_authentication.py +603 -0
  138. crackerjack/services/status_security_manager.py +442 -0
  139. crackerjack/services/thread_safe_status_collector.py +546 -0
  140. crackerjack/services/tool_version_service.py +1 -23
  141. crackerjack/services/unified_config.py +36 -58
  142. crackerjack/services/validation_rate_limiter.py +269 -0
  143. crackerjack/services/version_checker.py +9 -40
  144. crackerjack/services/websocket_resource_limiter.py +572 -0
  145. crackerjack/slash_commands/__init__.py +52 -2
  146. crackerjack/tools/__init__.py +0 -0
  147. crackerjack/tools/validate_input_validator_patterns.py +262 -0
  148. crackerjack/tools/validate_regex_patterns.py +198 -0
  149. {crackerjack-0.31.9.dist-info → crackerjack-0.31.12.dist-info}/METADATA +197 -12
  150. crackerjack-0.31.12.dist-info/RECORD +178 -0
  151. crackerjack/cli/facade.py +0 -104
  152. crackerjack-0.31.9.dist-info/RECORD +0 -149
  153. {crackerjack-0.31.9.dist-info → crackerjack-0.31.12.dist-info}/WHEEL +0 -0
  154. {crackerjack-0.31.9.dist-info → crackerjack-0.31.12.dist-info}/entry_points.txt +0 -0
  155. {crackerjack-0.31.9.dist-info → crackerjack-0.31.12.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,549 @@
1
+ """File operations with RAII patterns and comprehensive error handling.
2
+
3
+ Provides robust file handling with automatic cleanup, atomic operations,
4
+ and comprehensive error recovery patterns.
5
+ """
6
+
7
+ import asyncio
8
+ import contextlib
9
+ import fcntl
10
+ import logging
11
+ import os
12
+ import shutil
13
+ import time
14
+ import typing as t
15
+ from pathlib import Path
16
+
17
+ from ..models.resource_protocols import AbstractFileResource
18
+ from .resource_manager import ResourceManager
19
+
20
+
21
+ class AtomicFileWriter(AbstractFileResource):
22
+ """Atomic file writer with automatic cleanup and rollback on errors."""
23
+
24
+ def __init__(
25
+ self,
26
+ target_path: Path,
27
+ backup: bool = True,
28
+ manager: ResourceManager | None = None,
29
+ ) -> None:
30
+ super().__init__(target_path)
31
+ self.backup = backup
32
+ self.manager = manager
33
+ self.temp_path: Path | None = None
34
+ self.backup_path: Path | None = None
35
+ self._file_handle: t.IO[str] | None = None
36
+ self.logger = logging.getLogger(__name__)
37
+
38
+ if manager:
39
+ manager.register_resource(self)
40
+
41
+ async def _do_initialize(self) -> None:
42
+ """Initialize atomic file writer."""
43
+ # Create temporary file in same directory as target
44
+ self.temp_path = self.path.parent / f".{self.path.name}.tmp.{os.getpid()}"
45
+
46
+ # Create backup if requested and target exists
47
+ if self.backup and self.path.exists():
48
+ self.backup_path = self.path.with_suffix(f"{self.path.suffix}.bak")
49
+ shutil.copy2(self.path, self.backup_path)
50
+
51
+ # Open temporary file for writing
52
+ self._file_handle = self.temp_path.open("w", encoding="utf-8")
53
+
54
+ async def _do_cleanup(self) -> None:
55
+ """Clean up temporary files and handles."""
56
+ # Close file handle
57
+ if self._file_handle and not self._file_handle.closed:
58
+ self._file_handle.close()
59
+
60
+ # Remove temporary file if it exists
61
+ if self.temp_path and self.temp_path.exists():
62
+ try:
63
+ self.temp_path.unlink()
64
+ except OSError as e:
65
+ self.logger.warning(f"Failed to remove temp file {self.temp_path}: {e}")
66
+
67
+ # Remove backup file if cleanup is successful
68
+ if self.backup_path and self.backup_path.exists():
69
+ try:
70
+ self.backup_path.unlink()
71
+ except OSError as e:
72
+ self.logger.warning(
73
+ f"Failed to remove backup file {self.backup_path}: {e}"
74
+ )
75
+
76
+ def write(self, content: str) -> None:
77
+ """Write content to the temporary file."""
78
+ if not self._file_handle:
79
+ raise RuntimeError("AtomicFileWriter not initialized")
80
+ self._file_handle.write(content)
81
+
82
+ def writelines(self, lines: t.Iterable[str]) -> None:
83
+ """Write multiple lines to the temporary file."""
84
+ if not self._file_handle:
85
+ raise RuntimeError("AtomicFileWriter not initialized")
86
+ self._file_handle.writelines(lines)
87
+
88
+ def flush(self) -> None:
89
+ """Flush the temporary file."""
90
+ if not self._file_handle:
91
+ raise RuntimeError("AtomicFileWriter not initialized")
92
+ self._file_handle.flush()
93
+ os.fsync(self._file_handle.fileno())
94
+
95
+ async def commit(self) -> None:
96
+ """Atomically commit the changes to the target file."""
97
+ if not self.temp_path:
98
+ raise RuntimeError("AtomicFileWriter not initialized")
99
+
100
+ # Ensure all data is written
101
+ self.flush()
102
+
103
+ # Close temporary file
104
+ if self._file_handle:
105
+ self._file_handle.close()
106
+ self._file_handle = None
107
+
108
+ # Atomic move
109
+ try:
110
+ self.temp_path.replace(self.path)
111
+ self.logger.debug(f"Successfully committed changes to {self.path}")
112
+ except OSError as e:
113
+ # Restore from backup if available
114
+ if self.backup_path and self.backup_path.exists():
115
+ try:
116
+ self.backup_path.replace(self.path)
117
+ self.logger.info(
118
+ f"Restored {self.path} from backup after commit failure"
119
+ )
120
+ except OSError:
121
+ self.logger.error(f"Failed to restore {self.path} from backup")
122
+ raise RuntimeError(f"Failed to commit changes to {self.path}") from e
123
+
124
+ async def rollback(self) -> None:
125
+ """Rollback changes and restore from backup if available."""
126
+ if self.backup_path and self.backup_path.exists():
127
+ try:
128
+ self.backup_path.replace(self.path)
129
+ self.logger.info(f"Rolled back changes to {self.path}")
130
+ except OSError as e:
131
+ self.logger.error(f"Failed to rollback {self.path}: {e}")
132
+ raise
133
+
134
+
135
+ class LockedFileResource(AbstractFileResource):
136
+ """File resource with exclusive locking for concurrent access protection."""
137
+
138
+ def __init__(
139
+ self,
140
+ path: Path,
141
+ mode: str = "r+",
142
+ timeout: float = 30.0,
143
+ manager: ResourceManager | None = None,
144
+ ) -> None:
145
+ super().__init__(path)
146
+ self.mode = mode
147
+ self.timeout = timeout
148
+ self._file_handle: t.IO[str] | None = None
149
+ self.logger = logging.getLogger(__name__)
150
+
151
+ if manager:
152
+ manager.register_resource(self)
153
+
154
+ async def _do_initialize(self) -> None:
155
+ """Initialize locked file resource."""
156
+ # Ensure parent directory exists
157
+ self.path.parent.mkdir(parents=True, exist_ok=True)
158
+
159
+ # Open file
160
+ self._file_handle = self.path.open(self.mode)
161
+
162
+ # Acquire exclusive lock with timeout
163
+ start_time = time.time()
164
+ while time.time() - start_time < self.timeout:
165
+ try:
166
+ fcntl.flock(self._file_handle.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
167
+ self.logger.debug(f"Acquired lock on {self.path}")
168
+ return
169
+ except OSError:
170
+ await asyncio.sleep(0.1)
171
+
172
+ raise TimeoutError(
173
+ f"Failed to acquire lock on {self.path} within {self.timeout}s"
174
+ )
175
+
176
+ async def _do_cleanup(self) -> None:
177
+ """Clean up locked file resource."""
178
+ if self._file_handle and not self._file_handle.closed:
179
+ try:
180
+ # Release lock
181
+ fcntl.flock(self._file_handle.fileno(), fcntl.LOCK_UN)
182
+ self.logger.debug(f"Released lock on {self.path}")
183
+ except OSError as e:
184
+ self.logger.warning(f"Failed to release lock on {self.path}: {e}")
185
+ finally:
186
+ self._file_handle.close()
187
+
188
+ @property
189
+ def file_handle(self) -> t.IO[str]:
190
+ """Get the file handle."""
191
+ if not self._file_handle:
192
+ raise RuntimeError("LockedFileResource not initialized")
193
+ return self._file_handle
194
+
195
+ def read(self) -> str:
196
+ """Read content from the locked file."""
197
+ self.file_handle.seek(0)
198
+ return self.file_handle.read()
199
+
200
+ def write(self, content: str) -> None:
201
+ """Write content to the locked file."""
202
+ self.file_handle.seek(0)
203
+ self.file_handle.write(content)
204
+ self.file_handle.truncate()
205
+ self.file_handle.flush()
206
+ os.fsync(self.file_handle.fileno())
207
+
208
+
209
+ class SafeDirectoryCreator(AbstractFileResource):
210
+ """Safe directory creation with cleanup and rollback capabilities."""
211
+
212
+ def __init__(
213
+ self,
214
+ path: Path,
215
+ cleanup_on_error: bool = True,
216
+ manager: ResourceManager | None = None,
217
+ ) -> None:
218
+ super().__init__(path)
219
+ self.cleanup_on_error = cleanup_on_error
220
+ self._created_dirs: list[Path] = []
221
+ self.logger = logging.getLogger(__name__)
222
+
223
+ if manager:
224
+ manager.register_resource(self)
225
+
226
+ async def _do_initialize(self) -> None:
227
+ """Initialize safe directory creator."""
228
+ # Track which directories we create
229
+ current = self.path
230
+
231
+ while not current.exists():
232
+ self._created_dirs.append(current)
233
+ current = current.parent
234
+
235
+ # Reverse to create from parent to child
236
+ self._created_dirs.reverse()
237
+
238
+ # Create directories
239
+ for dir_path in self._created_dirs:
240
+ try:
241
+ dir_path.mkdir(exist_ok=True)
242
+ self.logger.debug(f"Created directory: {dir_path}")
243
+ except OSError as e:
244
+ self.logger.error(f"Failed to create directory {dir_path}: {e}")
245
+ if self.cleanup_on_error:
246
+ await self._cleanup_created_dirs()
247
+ raise
248
+
249
+ async def _do_cleanup(self) -> None:
250
+ """Clean up created directories if requested."""
251
+ if self.cleanup_on_error:
252
+ await self._cleanup_created_dirs()
253
+
254
+ async def _cleanup_created_dirs(self) -> None:
255
+ """Remove directories that we created."""
256
+ # Remove in reverse order (child to parent)
257
+ for dir_path in reversed(self._created_dirs):
258
+ try:
259
+ if dir_path.exists() and not any(dir_path.iterdir()):
260
+ dir_path.rmdir()
261
+ self.logger.debug(f"Removed directory: {dir_path}")
262
+ except OSError as e:
263
+ self.logger.warning(f"Failed to remove directory {dir_path}: {e}")
264
+
265
+
266
+ class BatchFileOperations:
267
+ """Batch file operations with atomic commit/rollback."""
268
+
269
+ def __init__(self, manager: ResourceManager | None = None) -> None:
270
+ self.manager = manager or ResourceManager()
271
+ self.operations: list[t.Callable[[], None]] = []
272
+ self.rollback_operations: list[t.Callable[[], None]] = []
273
+ self.logger = logging.getLogger(__name__)
274
+
275
+ def add_write_operation(
276
+ self,
277
+ path: Path,
278
+ content: str,
279
+ backup: bool = True,
280
+ ) -> None:
281
+ """Add a write operation to the batch."""
282
+
283
+ def write_op():
284
+ writer = AtomicFileWriter(path, backup, self.manager)
285
+ asyncio.create_task(writer.initialize())
286
+ writer.write(content)
287
+ asyncio.create_task(writer.commit())
288
+
289
+ def rollback_op():
290
+ writer = AtomicFileWriter(path, backup)
291
+ asyncio.create_task(writer.rollback())
292
+
293
+ self.operations.append(write_op)
294
+ self.rollback_operations.append(rollback_op)
295
+
296
+ def add_copy_operation(
297
+ self,
298
+ source: Path,
299
+ dest: Path,
300
+ backup: bool = True,
301
+ ) -> None:
302
+ """Add a copy operation to the batch."""
303
+
304
+ def copy_op():
305
+ if backup and dest.exists():
306
+ backup_path = dest.with_suffix(f"{dest.suffix}.bak")
307
+ shutil.copy2(dest, backup_path)
308
+ shutil.copy2(source, dest)
309
+
310
+ def rollback_op():
311
+ if backup:
312
+ backup_path = dest.with_suffix(f"{dest.suffix}.bak")
313
+ if backup_path.exists():
314
+ shutil.move(backup_path, dest)
315
+
316
+ self.operations.append(copy_op)
317
+ self.rollback_operations.append(rollback_op)
318
+
319
+ def add_move_operation(
320
+ self,
321
+ source: Path,
322
+ dest: Path,
323
+ ) -> None:
324
+ """Add a move operation to the batch."""
325
+
326
+ def move_op():
327
+ shutil.move(source, dest)
328
+
329
+ def rollback_op():
330
+ shutil.move(dest, source)
331
+
332
+ self.operations.append(move_op)
333
+ self.rollback_operations.append(rollback_op)
334
+
335
+ def add_delete_operation(
336
+ self,
337
+ path: Path,
338
+ backup: bool = True,
339
+ ) -> None:
340
+ """Add a delete operation to the batch."""
341
+ backup_path: Path | None = None
342
+
343
+ def delete_op():
344
+ nonlocal backup_path
345
+ if backup and path.exists():
346
+ backup_path = path.with_suffix(f"{path.suffix}.bak.{os.getpid()}")
347
+ shutil.move(path, backup_path)
348
+ elif path.exists():
349
+ path.unlink()
350
+
351
+ def rollback_op():
352
+ if backup_path and backup_path.exists():
353
+ shutil.move(backup_path, path)
354
+
355
+ self.operations.append(delete_op)
356
+ self.rollback_operations.append(rollback_op)
357
+
358
+ async def commit_all(self) -> None:
359
+ """Execute all operations atomically."""
360
+ executed_ops = 0
361
+
362
+ try:
363
+ for i, operation in enumerate(self.operations):
364
+ operation()
365
+ executed_ops = i + 1
366
+
367
+ self.logger.info(f"Successfully executed {executed_ops} file operations")
368
+
369
+ except Exception as e:
370
+ self.logger.error(f"Batch operation failed at step {executed_ops}: {e}")
371
+
372
+ # Rollback executed operations in reverse order
373
+ for i in range(executed_ops - 1, -1, -1):
374
+ try:
375
+ self.rollback_operations[i]()
376
+ except Exception as rollback_error:
377
+ self.logger.error(
378
+ f"Rollback failed for operation {i}: {rollback_error}"
379
+ )
380
+
381
+ raise RuntimeError("Batch file operations failed and rolled back") from e
382
+
383
+
384
+ # Context managers for common file operations
385
+ @contextlib.asynccontextmanager
386
+ async def atomic_file_write(
387
+ path: Path,
388
+ backup: bool = True,
389
+ ):
390
+ """Context manager for atomic file writing."""
391
+ writer = AtomicFileWriter(path, backup)
392
+ try:
393
+ await writer.initialize()
394
+ yield writer
395
+ await writer.commit()
396
+ except Exception:
397
+ await writer.rollback()
398
+ raise
399
+ finally:
400
+ await writer.cleanup()
401
+
402
+
403
+ @contextlib.asynccontextmanager
404
+ async def locked_file_access(
405
+ path: Path,
406
+ mode: str = "r+",
407
+ timeout: float = 30.0,
408
+ ):
409
+ """Context manager for locked file access."""
410
+ file_resource = LockedFileResource(path, mode, timeout)
411
+ try:
412
+ await file_resource.initialize()
413
+ yield file_resource
414
+ finally:
415
+ await file_resource.cleanup()
416
+
417
+
418
+ @contextlib.asynccontextmanager
419
+ async def safe_directory_creation(
420
+ path: Path,
421
+ cleanup_on_error: bool = True,
422
+ ):
423
+ """Context manager for safe directory creation."""
424
+ creator = SafeDirectoryCreator(path, cleanup_on_error)
425
+ try:
426
+ await creator.initialize()
427
+ yield creator
428
+ finally:
429
+ await creator.cleanup()
430
+
431
+
432
+ @contextlib.asynccontextmanager
433
+ async def batch_file_operations():
434
+ """Context manager for batch file operations."""
435
+ batch = BatchFileOperations()
436
+ try:
437
+ yield batch
438
+ await batch.commit_all()
439
+ except Exception:
440
+ # Rollback is handled in commit_all
441
+ raise
442
+
443
+
444
+ # File operation utilities with enhanced error handling
445
+ class SafeFileOperations:
446
+ """Utility class for safe file operations with comprehensive error handling."""
447
+
448
+ @staticmethod
449
+ async def safe_read_text(
450
+ path: Path,
451
+ encoding: str = "utf-8",
452
+ fallback_encodings: list[str] | None = None,
453
+ ) -> str:
454
+ """Safely read text file with encoding fallback."""
455
+ fallback_encodings = fallback_encodings or ["latin-1", "cp1252"]
456
+
457
+ for enc in [encoding] + fallback_encodings:
458
+ try:
459
+ return path.read_text(encoding=enc)
460
+ except UnicodeDecodeError:
461
+ continue
462
+ except FileNotFoundError:
463
+ raise
464
+ except Exception as e:
465
+ logging.getLogger(__name__).warning(
466
+ f"Failed to read {path} with encoding {enc}: {e}"
467
+ )
468
+ continue
469
+
470
+ raise RuntimeError(f"Failed to read {path} with any supported encoding")
471
+
472
+ @staticmethod
473
+ async def safe_write_text(
474
+ path: Path,
475
+ content: str,
476
+ encoding: str = "utf-8",
477
+ atomic: bool = True,
478
+ backup: bool = True,
479
+ ) -> None:
480
+ """Safely write text file with atomic operation support."""
481
+ if atomic:
482
+ async with atomic_file_write(path, backup) as writer:
483
+ writer.write(content)
484
+ else:
485
+ # Ensure parent directory exists
486
+ path.parent.mkdir(parents=True, exist_ok=True)
487
+ path.write_text(content, encoding=encoding)
488
+
489
+ @staticmethod
490
+ async def safe_copy_file(
491
+ source: Path,
492
+ dest: Path,
493
+ preserve_metadata: bool = True,
494
+ backup: bool = True,
495
+ ) -> None:
496
+ """Safely copy file with backup support."""
497
+ if not source.exists():
498
+ raise FileNotFoundError(f"Source file not found: {source}")
499
+
500
+ # Create backup if requested
501
+ if backup and dest.exists():
502
+ backup_path = dest.with_suffix(f"{dest.suffix}.bak")
503
+ shutil.copy2(dest, backup_path)
504
+
505
+ try:
506
+ # Ensure destination directory exists
507
+ dest.parent.mkdir(parents=True, exist_ok=True)
508
+
509
+ if preserve_metadata:
510
+ shutil.copy2(source, dest)
511
+ else:
512
+ shutil.copy(source, dest)
513
+
514
+ except Exception as e:
515
+ # Restore backup if copy failed
516
+ if backup and dest.with_suffix(f"{dest.suffix}.bak").exists():
517
+ shutil.move(dest.with_suffix(f"{dest.suffix}.bak"), dest)
518
+ raise RuntimeError(f"Failed to copy {source} to {dest}") from e
519
+
520
+ @staticmethod
521
+ async def safe_move_file(
522
+ source: Path,
523
+ dest: Path,
524
+ backup: bool = True,
525
+ ) -> None:
526
+ """Safely move file with backup support."""
527
+ if not source.exists():
528
+ raise FileNotFoundError(f"Source file not found: {source}")
529
+
530
+ # Create backup of destination if it exists
531
+ backup_path = None
532
+ if backup and dest.exists():
533
+ backup_path = dest.with_suffix(f"{dest.suffix}.bak.{os.getpid()}")
534
+ shutil.move(dest, backup_path)
535
+
536
+ try:
537
+ # Ensure destination directory exists
538
+ dest.parent.mkdir(parents=True, exist_ok=True)
539
+ shutil.move(source, dest)
540
+
541
+ # Remove backup on success
542
+ if backup_path and backup_path.exists():
543
+ backup_path.unlink()
544
+
545
+ except Exception as e:
546
+ # Restore backup if move failed
547
+ if backup_path and backup_path.exists():
548
+ shutil.move(backup_path, dest)
549
+ raise RuntimeError(f"Failed to move {source} to {dest}") from e
@@ -78,11 +78,11 @@ class PerformanceMonitor:
78
78
  stats = self.get_stats(name)
79
79
  if stats:
80
80
  self.console.print(
81
- f"[cyan]📊 {name}: [/cyan] "
82
- f"avg = {stats['avg']:.3f}s, "
83
- f"min = {stats['min']:.3f}s, "
84
- f"max = {stats['max']:.3f}s, "
85
- f"count = {stats['count']}",
81
+ f"[cyan]📊 {name}: [/ cyan] "
82
+ f"avg={stats['avg']: .3f}s, "
83
+ f"min={stats['min']: .3f}s, "
84
+ f"max={stats['max']: .3f}s, "
85
+ f"count={stats['count']}",
86
86
  )
87
87
  else:
88
88
  for metric_name in self.metrics:
@@ -107,8 +107,8 @@ def memoize_with_ttl(
107
107
  cache[key] = (time.time(), result)
108
108
  return result
109
109
 
110
- wrapper.cache_clear = cache.clear # type: ignore[attr-defined]
111
- wrapper.cache_info = lambda: {"size": len(cache), "ttl": ttl} # type: ignore[attr-defined]
110
+ setattr(wrapper, "cache_clear", cache.clear)
111
+ setattr(wrapper, "cache_info", lambda: {"size": len(cache), "ttl": ttl})
112
112
  return wrapper
113
113
 
114
114
  return decorator
@@ -163,7 +163,8 @@ class OptimizedFileWatcher:
163
163
  self._file_cache.clear()
164
164
 
165
165
  if hasattr(self.get_python_files, "cache_clear"):
166
- self.get_python_files.cache_clear() # type: ignore[attr-defined]
166
+ cache_clear = getattr(self.get_python_files, "cache_clear")
167
+ cache_clear()
167
168
 
168
169
 
169
170
  class ParallelTaskExecutor: