crackerjack 0.31.10__py3-none-any.whl โ 0.31.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +288 -705
- crackerjack/__main__.py +22 -8
- crackerjack/agents/__init__.py +0 -3
- crackerjack/agents/architect_agent.py +0 -43
- crackerjack/agents/base.py +1 -9
- crackerjack/agents/coordinator.py +2 -148
- crackerjack/agents/documentation_agent.py +109 -81
- crackerjack/agents/dry_agent.py +122 -97
- crackerjack/agents/formatting_agent.py +3 -16
- crackerjack/agents/import_optimization_agent.py +1174 -130
- crackerjack/agents/performance_agent.py +956 -188
- crackerjack/agents/performance_helpers.py +229 -0
- crackerjack/agents/proactive_agent.py +1 -48
- crackerjack/agents/refactoring_agent.py +516 -246
- crackerjack/agents/refactoring_helpers.py +282 -0
- crackerjack/agents/security_agent.py +393 -90
- crackerjack/agents/test_creation_agent.py +1776 -120
- crackerjack/agents/test_specialist_agent.py +59 -15
- crackerjack/agents/tracker.py +0 -102
- crackerjack/api.py +145 -37
- crackerjack/cli/handlers.py +48 -30
- crackerjack/cli/interactive.py +11 -11
- crackerjack/cli/options.py +66 -4
- crackerjack/code_cleaner.py +808 -148
- crackerjack/config/global_lock_config.py +110 -0
- crackerjack/config/hooks.py +43 -64
- crackerjack/core/async_workflow_orchestrator.py +247 -97
- crackerjack/core/autofix_coordinator.py +192 -109
- crackerjack/core/enhanced_container.py +46 -63
- crackerjack/core/file_lifecycle.py +549 -0
- crackerjack/core/performance.py +9 -8
- crackerjack/core/performance_monitor.py +395 -0
- crackerjack/core/phase_coordinator.py +281 -94
- crackerjack/core/proactive_workflow.py +9 -58
- crackerjack/core/resource_manager.py +501 -0
- crackerjack/core/service_watchdog.py +490 -0
- crackerjack/core/session_coordinator.py +4 -8
- crackerjack/core/timeout_manager.py +504 -0
- crackerjack/core/websocket_lifecycle.py +475 -0
- crackerjack/core/workflow_orchestrator.py +343 -209
- crackerjack/dynamic_config.py +50 -9
- crackerjack/errors.py +3 -4
- crackerjack/executors/async_hook_executor.py +63 -13
- crackerjack/executors/cached_hook_executor.py +14 -14
- crackerjack/executors/hook_executor.py +100 -37
- crackerjack/executors/hook_lock_manager.py +856 -0
- crackerjack/executors/individual_hook_executor.py +120 -86
- crackerjack/intelligence/__init__.py +0 -7
- crackerjack/intelligence/adaptive_learning.py +13 -86
- crackerjack/intelligence/agent_orchestrator.py +15 -78
- crackerjack/intelligence/agent_registry.py +12 -59
- crackerjack/intelligence/agent_selector.py +31 -92
- crackerjack/intelligence/integration.py +1 -41
- crackerjack/interactive.py +9 -9
- crackerjack/managers/async_hook_manager.py +25 -8
- crackerjack/managers/hook_manager.py +9 -9
- crackerjack/managers/publish_manager.py +57 -59
- crackerjack/managers/test_command_builder.py +6 -36
- crackerjack/managers/test_executor.py +9 -61
- crackerjack/managers/test_manager.py +17 -63
- crackerjack/managers/test_manager_backup.py +77 -127
- crackerjack/managers/test_progress.py +4 -23
- crackerjack/mcp/cache.py +5 -12
- crackerjack/mcp/client_runner.py +10 -10
- crackerjack/mcp/context.py +64 -6
- crackerjack/mcp/dashboard.py +14 -11
- crackerjack/mcp/enhanced_progress_monitor.py +55 -55
- crackerjack/mcp/file_monitor.py +72 -42
- crackerjack/mcp/progress_components.py +103 -84
- crackerjack/mcp/progress_monitor.py +122 -49
- crackerjack/mcp/rate_limiter.py +12 -12
- crackerjack/mcp/server_core.py +16 -22
- crackerjack/mcp/service_watchdog.py +26 -26
- crackerjack/mcp/state.py +15 -0
- crackerjack/mcp/tools/core_tools.py +95 -39
- crackerjack/mcp/tools/error_analyzer.py +6 -32
- crackerjack/mcp/tools/execution_tools.py +1 -56
- crackerjack/mcp/tools/execution_tools_backup.py +35 -131
- crackerjack/mcp/tools/intelligence_tool_registry.py +0 -36
- crackerjack/mcp/tools/intelligence_tools.py +2 -55
- crackerjack/mcp/tools/monitoring_tools.py +308 -145
- crackerjack/mcp/tools/proactive_tools.py +12 -42
- crackerjack/mcp/tools/progress_tools.py +23 -15
- crackerjack/mcp/tools/utility_tools.py +3 -40
- crackerjack/mcp/tools/workflow_executor.py +40 -60
- crackerjack/mcp/websocket/app.py +0 -3
- crackerjack/mcp/websocket/endpoints.py +206 -268
- crackerjack/mcp/websocket/jobs.py +213 -66
- crackerjack/mcp/websocket/server.py +84 -6
- crackerjack/mcp/websocket/websocket_handler.py +137 -29
- crackerjack/models/config_adapter.py +3 -16
- crackerjack/models/protocols.py +162 -3
- crackerjack/models/resource_protocols.py +454 -0
- crackerjack/models/task.py +3 -3
- crackerjack/monitoring/__init__.py +0 -0
- crackerjack/monitoring/ai_agent_watchdog.py +25 -71
- crackerjack/monitoring/regression_prevention.py +28 -87
- crackerjack/orchestration/advanced_orchestrator.py +44 -78
- crackerjack/orchestration/coverage_improvement.py +10 -60
- crackerjack/orchestration/execution_strategies.py +16 -16
- crackerjack/orchestration/test_progress_streamer.py +61 -53
- crackerjack/plugins/base.py +1 -1
- crackerjack/plugins/managers.py +22 -20
- crackerjack/py313.py +65 -21
- crackerjack/services/backup_service.py +467 -0
- crackerjack/services/bounded_status_operations.py +627 -0
- crackerjack/services/cache.py +7 -9
- crackerjack/services/config.py +35 -52
- crackerjack/services/config_integrity.py +5 -16
- crackerjack/services/config_merge.py +542 -0
- crackerjack/services/contextual_ai_assistant.py +17 -19
- crackerjack/services/coverage_ratchet.py +44 -73
- crackerjack/services/debug.py +25 -39
- crackerjack/services/dependency_monitor.py +52 -50
- crackerjack/services/enhanced_filesystem.py +14 -11
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/filesystem.py +1 -12
- crackerjack/services/git.py +71 -47
- crackerjack/services/health_metrics.py +31 -27
- crackerjack/services/initialization.py +276 -428
- crackerjack/services/input_validator.py +760 -0
- crackerjack/services/log_manager.py +16 -16
- crackerjack/services/logging.py +7 -6
- crackerjack/services/metrics.py +43 -43
- crackerjack/services/pattern_cache.py +2 -31
- crackerjack/services/pattern_detector.py +26 -63
- crackerjack/services/performance_benchmarks.py +20 -45
- crackerjack/services/regex_patterns.py +2887 -0
- crackerjack/services/regex_utils.py +537 -0
- crackerjack/services/secure_path_utils.py +683 -0
- crackerjack/services/secure_status_formatter.py +534 -0
- crackerjack/services/secure_subprocess.py +605 -0
- crackerjack/services/security.py +47 -10
- crackerjack/services/security_logger.py +492 -0
- crackerjack/services/server_manager.py +109 -50
- crackerjack/services/smart_scheduling.py +8 -25
- crackerjack/services/status_authentication.py +603 -0
- crackerjack/services/status_security_manager.py +442 -0
- crackerjack/services/thread_safe_status_collector.py +546 -0
- crackerjack/services/tool_version_service.py +1 -23
- crackerjack/services/unified_config.py +36 -58
- crackerjack/services/validation_rate_limiter.py +269 -0
- crackerjack/services/version_checker.py +9 -40
- crackerjack/services/websocket_resource_limiter.py +572 -0
- crackerjack/slash_commands/__init__.py +52 -2
- crackerjack/tools/__init__.py +0 -0
- crackerjack/tools/validate_input_validator_patterns.py +262 -0
- crackerjack/tools/validate_regex_patterns.py +198 -0
- {crackerjack-0.31.10.dist-info โ crackerjack-0.31.13.dist-info}/METADATA +197 -12
- crackerjack-0.31.13.dist-info/RECORD +178 -0
- crackerjack/cli/facade.py +0 -104
- crackerjack-0.31.10.dist-info/RECORD +0 -149
- {crackerjack-0.31.10.dist-info โ crackerjack-0.31.13.dist-info}/WHEEL +0 -0
- {crackerjack-0.31.10.dist-info โ crackerjack-0.31.13.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.31.10.dist-info โ crackerjack-0.31.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -18,7 +18,7 @@ from .progress_components import (
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
class MetricCard(Widget):
|
|
21
|
-
value = reactive(" --
|
|
21
|
+
value = reactive(" --")
|
|
22
22
|
label = reactive("Metric")
|
|
23
23
|
trend = reactive("")
|
|
24
24
|
color = reactive("white")
|
|
@@ -26,7 +26,7 @@ class MetricCard(Widget):
|
|
|
26
26
|
def __init__(
|
|
27
27
|
self,
|
|
28
28
|
label: str,
|
|
29
|
-
value: str = " --
|
|
29
|
+
value: str = " --",
|
|
30
30
|
trend: str = "",
|
|
31
31
|
color: str = "white",
|
|
32
32
|
**kwargs,
|
|
@@ -50,39 +50,39 @@ class AgentActivityWidget(Widget):
|
|
|
50
50
|
|
|
51
51
|
def compose(self) -> ComposeResult:
|
|
52
52
|
with Vertical():
|
|
53
|
-
with Horizontal(id="agent
|
|
53
|
+
with Horizontal(id="agent-metrics"):
|
|
54
54
|
yield MetricCard(
|
|
55
55
|
"Active Agents",
|
|
56
56
|
"0",
|
|
57
57
|
color="cyan",
|
|
58
|
-
id="active
|
|
58
|
+
id="active-agents-metric",
|
|
59
59
|
)
|
|
60
60
|
yield MetricCard(
|
|
61
61
|
"Issues Fixed",
|
|
62
62
|
"0",
|
|
63
63
|
"โ",
|
|
64
64
|
color="green",
|
|
65
|
-
id="issues
|
|
65
|
+
id="issues-fixed-metric",
|
|
66
66
|
)
|
|
67
67
|
yield MetricCard(
|
|
68
68
|
"Confidence",
|
|
69
|
-
"0
|
|
69
|
+
"0%",
|
|
70
70
|
color="yellow",
|
|
71
|
-
id="confidence
|
|
71
|
+
id="confidence-metric",
|
|
72
72
|
)
|
|
73
73
|
yield MetricCard(
|
|
74
74
|
"Cache Hits",
|
|
75
75
|
"0",
|
|
76
76
|
color="magenta",
|
|
77
|
-
id="cache
|
|
77
|
+
id="cache-hits-metric",
|
|
78
78
|
)
|
|
79
79
|
|
|
80
|
-
yield DataTable(id="agents
|
|
80
|
+
yield DataTable(id="agents-detail-table")
|
|
81
81
|
|
|
82
|
-
yield Label("โธ๏ธ Coordinator: Idle", id="coordinator
|
|
82
|
+
yield Label("โธ๏ธ Coordinator: Idle", id="coordinator-status-bar")
|
|
83
83
|
|
|
84
84
|
def on_mount(self) -> None:
|
|
85
|
-
table = self.query_one("#agents
|
|
85
|
+
table = self.query_one("#agents-detail-table", DataTable)
|
|
86
86
|
table.add_columns(
|
|
87
87
|
("Agent", 20),
|
|
88
88
|
("Status", 10),
|
|
@@ -100,7 +100,7 @@ class AgentActivityWidget(Widget):
|
|
|
100
100
|
active_agents = activity.get("active_agents", [])
|
|
101
101
|
|
|
102
102
|
active_count = len(active_agents)
|
|
103
|
-
self.query_one("#active
|
|
103
|
+
self.query_one("#active-agents-metric", MetricCard).value = str(
|
|
104
104
|
active_count,
|
|
105
105
|
)
|
|
106
106
|
|
|
@@ -110,14 +110,14 @@ class AgentActivityWidget(Widget):
|
|
|
110
110
|
) / max(active_count, 1)
|
|
111
111
|
cache_hits = activity.get("cache_hits", 0)
|
|
112
112
|
|
|
113
|
-
self.query_one("#issues
|
|
113
|
+
self.query_one("#issues-fixed-metric", MetricCard).value = str(
|
|
114
114
|
total_fixed,
|
|
115
115
|
)
|
|
116
116
|
self.query_one(
|
|
117
|
-
"#confidence
|
|
117
|
+
"#confidence-metric",
|
|
118
118
|
MetricCard,
|
|
119
|
-
).value = f"{avg_confidence
|
|
120
|
-
self.query_one("#cache
|
|
119
|
+
).value = f"{avg_confidence:.0%}"
|
|
120
|
+
self.query_one("#cache-hits-metric", MetricCard).value = str(cache_hits)
|
|
121
121
|
|
|
122
122
|
self._update_coordinator_status(activity)
|
|
123
123
|
|
|
@@ -130,25 +130,25 @@ class AgentActivityWidget(Widget):
|
|
|
130
130
|
status_icons = {"active": "๐ข", "processing": "๐", "idle": "โธ๏ธ", "error": "๐ด"}
|
|
131
131
|
|
|
132
132
|
icon = status_icons.get(status) or "โธ๏ธ"
|
|
133
|
-
status_bar = self.query_one("#coordinator
|
|
133
|
+
status_bar = self.query_one("#coordinator-status-bar", Label)
|
|
134
134
|
status_bar.update(
|
|
135
135
|
f"{icon} Coordinator: {status.title()} ({total_agents} agents available)",
|
|
136
136
|
)
|
|
137
137
|
|
|
138
138
|
def _update_agent_table(self, agents: list) -> None:
|
|
139
|
-
table = self.query_one("#agents
|
|
139
|
+
table = self.query_one("#agents-detail-table", DataTable)
|
|
140
140
|
table.clear()
|
|
141
141
|
|
|
142
142
|
if not agents:
|
|
143
|
-
table.add_row("No active agents", "
|
|
143
|
+
table.add_row("No active agents", "-", "-", "-", "-")
|
|
144
144
|
return
|
|
145
145
|
|
|
146
146
|
for agent in agents:
|
|
147
147
|
name = agent.get("agent_type", "Unknown")
|
|
148
148
|
status = agent.get("status", "idle")
|
|
149
|
-
issue_type = agent.get("issue_type", "
|
|
150
|
-
confidence = f"{agent.get('confidence', 0)
|
|
151
|
-
time_elapsed = f"{agent.get('processing_time', 0)
|
|
149
|
+
issue_type = agent.get("issue_type", "-")
|
|
150
|
+
confidence = f"{agent.get('confidence', 0):.0%}"
|
|
151
|
+
time_elapsed = f"{agent.get('processing_time', 0):.1f}s"
|
|
152
152
|
|
|
153
153
|
status_emoji = {
|
|
154
154
|
"processing": "๐",
|
|
@@ -197,10 +197,10 @@ class JobProgressPanel(Widget):
|
|
|
197
197
|
self.border_title_align = "left"
|
|
198
198
|
|
|
199
199
|
with Horizontal():
|
|
200
|
-
with Vertical(id="job
|
|
200
|
+
with Vertical(id="job-progress-section"):
|
|
201
201
|
yield self._compose_progress_section()
|
|
202
202
|
|
|
203
|
-
with Vertical(id="job
|
|
203
|
+
with Vertical(id="job-metrics-section"):
|
|
204
204
|
yield self._compose_metrics_section()
|
|
205
205
|
|
|
206
206
|
def _compose_progress_section(self) -> ComposeResult:
|
|
@@ -211,14 +211,14 @@ class JobProgressPanel(Widget):
|
|
|
211
211
|
stage = self.job_data.get("stage", "Unknown")
|
|
212
212
|
status = self.job_data.get("status", "Unknown")
|
|
213
213
|
|
|
214
|
-
yield Label(f"Stage: {stage}", classes="stage
|
|
215
|
-
yield Label(f"Status: {status}", classes="status
|
|
214
|
+
yield Label(f"Stage: {stage}", classes="stage-label")
|
|
215
|
+
yield Label(f"Status: {status}", classes="status-label")
|
|
216
216
|
yield Label(f"Iteration: {iteration} / {max_iterations}")
|
|
217
217
|
|
|
218
218
|
yield ProgressBar(
|
|
219
219
|
total=100,
|
|
220
220
|
progress=progress,
|
|
221
|
-
id=f"job
|
|
221
|
+
id=f"job-progress-{self.job_data.get('job_id', 'unknown')}",
|
|
222
222
|
)
|
|
223
223
|
|
|
224
224
|
elapsed = time.time() - self.start_time
|
|
@@ -229,7 +229,7 @@ class JobProgressPanel(Widget):
|
|
|
229
229
|
fixed = self.job_data.get("errors_fixed", 0)
|
|
230
230
|
remaining = max(0, total_issues - fixed)
|
|
231
231
|
|
|
232
|
-
with Horizontal(classes="metrics
|
|
232
|
+
with Horizontal(classes="metrics-grid"):
|
|
233
233
|
yield MetricCard("Issues Found", str(total_issues), color="yellow")
|
|
234
234
|
yield MetricCard(
|
|
235
235
|
"Fixed",
|
|
@@ -247,16 +247,16 @@ class JobProgressPanel(Widget):
|
|
|
247
247
|
if total_issues > 0:
|
|
248
248
|
success_rate = (fixed / total_issues) * 100
|
|
249
249
|
yield Label(
|
|
250
|
-
f"Success Rate: {success_rate
|
|
251
|
-
classes="success
|
|
250
|
+
f"Success Rate: {success_rate:.1f}%",
|
|
251
|
+
classes="success-rate",
|
|
252
252
|
)
|
|
253
253
|
|
|
254
254
|
def _format_time(self, seconds: float) -> str:
|
|
255
255
|
if seconds < 60:
|
|
256
|
-
return f"{seconds
|
|
256
|
+
return f"{seconds:.0f}s"
|
|
257
257
|
if seconds < 3600:
|
|
258
|
-
return f"{seconds / 60
|
|
259
|
-
return f"{seconds / 3600
|
|
258
|
+
return f"{seconds / 60:.0f}m {seconds % 60:.0f}s"
|
|
259
|
+
return f"{seconds / 3600:.0f}h {(seconds % 3600) / 60:.0f}m"
|
|
260
260
|
|
|
261
261
|
|
|
262
262
|
class ServiceHealthPanel(Widget):
|
|
@@ -266,10 +266,10 @@ class ServiceHealthPanel(Widget):
|
|
|
266
266
|
self.border_title_align = "left"
|
|
267
267
|
|
|
268
268
|
def compose(self) -> ComposeResult:
|
|
269
|
-
yield DataTable(id="services
|
|
269
|
+
yield DataTable(id="services-table")
|
|
270
270
|
|
|
271
271
|
def on_mount(self) -> None:
|
|
272
|
-
table = self.query_one("#services
|
|
272
|
+
table = self.query_one("#services-table", DataTable)
|
|
273
273
|
table.add_columns(
|
|
274
274
|
("Service", 20),
|
|
275
275
|
("Status", 12),
|
|
@@ -280,7 +280,7 @@ class ServiceHealthPanel(Widget):
|
|
|
280
280
|
table.zebra_stripes = True
|
|
281
281
|
|
|
282
282
|
def update_services(self, services: list[dict]) -> None:
|
|
283
|
-
table = self.query_one("#services
|
|
283
|
+
table = self.query_one("#services-table", DataTable)
|
|
284
284
|
table.clear()
|
|
285
285
|
|
|
286
286
|
for service in services:
|
|
@@ -308,7 +308,7 @@ class ServiceHealthPanel(Widget):
|
|
|
308
308
|
|
|
309
309
|
if isinstance(last_check, int | float):
|
|
310
310
|
last_check_str = datetime.fromtimestamp(last_check).strftime(
|
|
311
|
-
"
|
|
311
|
+
"%H:%M:%S",
|
|
312
312
|
)
|
|
313
313
|
else:
|
|
314
314
|
last_check_str = str(last_check)
|
|
@@ -323,12 +323,12 @@ class ServiceHealthPanel(Widget):
|
|
|
323
323
|
|
|
324
324
|
def _format_uptime(self, seconds: float) -> str:
|
|
325
325
|
if seconds < 60:
|
|
326
|
-
return f"{seconds
|
|
326
|
+
return f"{seconds:.0f}s"
|
|
327
327
|
if seconds < 3600:
|
|
328
|
-
return f"{seconds / 60
|
|
328
|
+
return f"{seconds / 60:.0f}m"
|
|
329
329
|
if seconds < 86400:
|
|
330
|
-
return f"{seconds / 3600
|
|
331
|
-
return f"{seconds / 86400
|
|
330
|
+
return f"{seconds / 3600:.1f}h"
|
|
331
|
+
return f"{seconds / 86400:.1f}d"
|
|
332
332
|
|
|
333
333
|
|
|
334
334
|
class EnhancedCrackerjackDashboard(App):
|
|
@@ -349,13 +349,13 @@ class EnhancedCrackerjackDashboard(App):
|
|
|
349
349
|
def compose(self) -> ComposeResult:
|
|
350
350
|
yield Label("๐ Crackerjack Progress Monitor", id="header")
|
|
351
351
|
|
|
352
|
-
with Container(id="main
|
|
353
|
-
yield AgentActivityWidget(id="agent
|
|
352
|
+
with Container(id="main-content"):
|
|
353
|
+
yield AgentActivityWidget(id="agent-panel")
|
|
354
354
|
|
|
355
|
-
yield ServiceHealthPanel(id="service
|
|
355
|
+
yield ServiceHealthPanel(id="service-panel")
|
|
356
356
|
|
|
357
|
-
with Container(id="jobs
|
|
358
|
-
yield Label("Loading jobs...", id="jobs
|
|
357
|
+
with Container(id="jobs-container"):
|
|
358
|
+
yield Label("Loading jobs...", id="jobs-placeholder")
|
|
359
359
|
|
|
360
360
|
yield Footer()
|
|
361
361
|
|
|
@@ -368,7 +368,7 @@ class EnhancedCrackerjackDashboard(App):
|
|
|
368
368
|
jobs_data = jobs_result.get("data", {})
|
|
369
369
|
|
|
370
370
|
services = self.service_manager.check_all_services()
|
|
371
|
-
self.query_one("#service
|
|
371
|
+
self.query_one("#service-panel", ServiceHealthPanel).update_services(
|
|
372
372
|
services,
|
|
373
373
|
)
|
|
374
374
|
|
|
@@ -376,7 +376,7 @@ class EnhancedCrackerjackDashboard(App):
|
|
|
376
376
|
aggregated_agent_data = self._aggregate_agent_data(
|
|
377
377
|
jobs_data["individual_jobs"],
|
|
378
378
|
)
|
|
379
|
-
self.query_one("#agent
|
|
379
|
+
self.query_one("#agent-panel", AgentActivityWidget).update_metrics(
|
|
380
380
|
aggregated_agent_data,
|
|
381
381
|
)
|
|
382
382
|
|
|
@@ -419,13 +419,13 @@ class EnhancedCrackerjackDashboard(App):
|
|
|
419
419
|
return aggregated
|
|
420
420
|
|
|
421
421
|
def _update_job_panels(self, jobs: list[dict]) -> None:
|
|
422
|
-
container = self.query_one("#jobs
|
|
422
|
+
container = self.query_one("#jobs-container", Container)
|
|
423
423
|
|
|
424
424
|
with suppress(Exception):
|
|
425
|
-
container.remove_children("#jobs
|
|
425
|
+
container.remove_children("#jobs-placeholder")
|
|
426
426
|
|
|
427
|
-
existing_job_ids = {panel.id for panel in container.query(".job
|
|
428
|
-
current_job_ids = {f"job
|
|
427
|
+
existing_job_ids = {panel.id for panel in container.query(".job-panel")}
|
|
428
|
+
current_job_ids = {f"job-{job['job_id']}" for job in jobs}
|
|
429
429
|
|
|
430
430
|
for panel_id in existing_job_ids - current_job_ids:
|
|
431
431
|
with suppress(Exception):
|
|
@@ -433,9 +433,9 @@ class EnhancedCrackerjackDashboard(App):
|
|
|
433
433
|
panel.remove()
|
|
434
434
|
|
|
435
435
|
for job in jobs:
|
|
436
|
-
panel_id = f"job
|
|
436
|
+
panel_id = f"job-{job['job_id']}"
|
|
437
437
|
if panel_id not in existing_job_ids:
|
|
438
|
-
panel = JobProgressPanel(job, id=panel_id, classes="job
|
|
438
|
+
panel = JobProgressPanel(job, id=panel_id, classes="job-panel")
|
|
439
439
|
container.mount(panel)
|
|
440
440
|
else:
|
|
441
441
|
panel = container.query_one(f"#{panel_id}", JobProgressPanel)
|
|
@@ -461,7 +461,7 @@ async def run_enhanced_progress_monitor(
|
|
|
461
461
|
from rich.console import Console
|
|
462
462
|
|
|
463
463
|
console = Console()
|
|
464
|
-
console.print("[bold cyan]๐ ๏ธ
|
|
464
|
+
console.print("[bold cyan]๐ ๏ธ Development Mode: Enabled[/bold cyan]")
|
|
465
465
|
app.dev = True
|
|
466
466
|
|
|
467
467
|
await app.run_async()
|
crackerjack/mcp/file_monitor.py
CHANGED
|
@@ -19,15 +19,20 @@ import contextlib
|
|
|
19
19
|
|
|
20
20
|
from rich.console import Console
|
|
21
21
|
|
|
22
|
+
from crackerjack.services.secure_path_utils import SecurePathValidator
|
|
23
|
+
|
|
22
24
|
console = Console()
|
|
23
25
|
|
|
24
26
|
|
|
25
27
|
if WATCHDOG_AVAILABLE:
|
|
26
28
|
|
|
27
29
|
class ProgressFileHandler(FileSystemEventHandler):
|
|
28
|
-
def __init__(
|
|
30
|
+
def __init__(
|
|
31
|
+
self, callback: Callable[[str, dict], None], progress_dir: Path
|
|
32
|
+
) -> None:
|
|
29
33
|
super().__init__()
|
|
30
34
|
self.callback = callback
|
|
35
|
+
self.progress_dir = SecurePathValidator.validate_safe_path(progress_dir)
|
|
31
36
|
self._last_processed: dict[str, float] = {}
|
|
32
37
|
self._debounce_delay = 0.1
|
|
33
38
|
|
|
@@ -35,29 +40,47 @@ if WATCHDOG_AVAILABLE:
|
|
|
35
40
|
if event.is_directory:
|
|
36
41
|
return
|
|
37
42
|
|
|
38
|
-
|
|
43
|
+
try:
|
|
44
|
+
file_path = Path(event.src_path)
|
|
39
45
|
|
|
40
|
-
|
|
41
|
-
|
|
46
|
+
# Validate that the file path is within our allowed progress directory
|
|
47
|
+
validated_path = SecurePathValidator.validate_safe_path(
|
|
48
|
+
file_path, self.progress_dir
|
|
49
|
+
)
|
|
42
50
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
51
|
+
if (
|
|
52
|
+
not validated_path.name.startswith("job-")
|
|
53
|
+
or validated_path.suffix != ".json"
|
|
54
|
+
):
|
|
46
55
|
return
|
|
47
56
|
|
|
48
|
-
|
|
57
|
+
now = time.time()
|
|
58
|
+
if validated_path.name in self._last_processed:
|
|
59
|
+
if (
|
|
60
|
+
now - self._last_processed[validated_path.name]
|
|
61
|
+
< self._debounce_delay
|
|
62
|
+
):
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
self._last_processed[validated_path.name] = now
|
|
49
66
|
|
|
50
|
-
|
|
67
|
+
job_id = validated_path.stem.replace("job-", "")
|
|
68
|
+
except Exception:
|
|
69
|
+
# If path validation fails, skip processing this file
|
|
70
|
+
return
|
|
51
71
|
|
|
52
72
|
try:
|
|
53
|
-
|
|
73
|
+
# Validate file size before reading
|
|
74
|
+
SecurePathValidator.validate_file_size(validated_path)
|
|
75
|
+
|
|
76
|
+
with validated_path.open() as f:
|
|
54
77
|
progress_data = json.load(f)
|
|
55
78
|
|
|
56
79
|
self.callback(job_id, progress_data)
|
|
57
80
|
|
|
58
81
|
except (json.JSONDecodeError, FileNotFoundError, OSError) as e:
|
|
59
82
|
console.print(
|
|
60
|
-
f"[yellow]Warning: Failed to read progress file {file_path}: {e}[/yellow]",
|
|
83
|
+
f"[yellow]Warning: Failed to read progress file {file_path}: {e}[/ yellow]",
|
|
61
84
|
)
|
|
62
85
|
|
|
63
86
|
def on_created(self, event: FileSystemEvent) -> None:
|
|
@@ -71,7 +94,7 @@ else:
|
|
|
71
94
|
|
|
72
95
|
class AsyncProgressMonitor:
|
|
73
96
|
def __init__(self, progress_dir: Path) -> None:
|
|
74
|
-
self.progress_dir = progress_dir
|
|
97
|
+
self.progress_dir = SecurePathValidator.validate_safe_path(progress_dir)
|
|
75
98
|
self.observer: Observer | None = None
|
|
76
99
|
self.subscribers: dict[str, set[Callable[[dict], None]]] = {}
|
|
77
100
|
self._running = False
|
|
@@ -80,7 +103,7 @@ class AsyncProgressMonitor:
|
|
|
80
103
|
|
|
81
104
|
if not WATCHDOG_AVAILABLE:
|
|
82
105
|
console.print(
|
|
83
|
-
"[yellow]Warning: watchdog not available, falling back to polling[/yellow]",
|
|
106
|
+
"[yellow]Warning: watchdog not available, falling back to polling[/ yellow]",
|
|
84
107
|
)
|
|
85
108
|
|
|
86
109
|
async def start(self) -> None:
|
|
@@ -89,14 +112,14 @@ class AsyncProgressMonitor:
|
|
|
89
112
|
|
|
90
113
|
self._running = True
|
|
91
114
|
|
|
92
|
-
handler = ProgressFileHandler(self._on_file_changed)
|
|
115
|
+
handler = ProgressFileHandler(self._on_file_changed, self.progress_dir)
|
|
93
116
|
|
|
94
117
|
self.observer = Observer()
|
|
95
118
|
self.observer.schedule(handler, str(self.progress_dir), recursive=False)
|
|
96
119
|
self.observer.start()
|
|
97
120
|
|
|
98
121
|
console.print(
|
|
99
|
-
f"[green]๐ Started monitoring progress directory: {self.progress_dir}[/green]",
|
|
122
|
+
f"[green]๐ Started monitoring progress directory: {self.progress_dir}[/ green]",
|
|
100
123
|
)
|
|
101
124
|
|
|
102
125
|
async def stop(self) -> None:
|
|
@@ -107,14 +130,14 @@ class AsyncProgressMonitor:
|
|
|
107
130
|
self.observer.join()
|
|
108
131
|
self.observer = None
|
|
109
132
|
|
|
110
|
-
console.print("[yellow]๐ Stopped progress directory monitoring[/yellow]")
|
|
133
|
+
console.print("[yellow]๐ Stopped progress directory monitoring[/ yellow]")
|
|
111
134
|
|
|
112
135
|
def subscribe(self, job_id: str, callback: Callable[[dict], None]) -> None:
|
|
113
136
|
if job_id not in self.subscribers:
|
|
114
137
|
self.subscribers[job_id] = set()
|
|
115
138
|
|
|
116
139
|
self.subscribers[job_id].add(callback)
|
|
117
|
-
console.print(f"[cyan]๐ Subscribed to job updates: {job_id}[/cyan]")
|
|
140
|
+
console.print(f"[cyan]๐ Subscribed to job updates: {job_id}[/ cyan]")
|
|
118
141
|
|
|
119
142
|
def unsubscribe(self, job_id: str, callback: Callable[[dict], None]) -> None:
|
|
120
143
|
if job_id in self.subscribers:
|
|
@@ -123,7 +146,7 @@ class AsyncProgressMonitor:
|
|
|
123
146
|
if not self.subscribers[job_id]:
|
|
124
147
|
del self.subscribers[job_id]
|
|
125
148
|
|
|
126
|
-
console.print(f"[cyan]๐ Unsubscribed from job updates: {job_id}[/cyan]")
|
|
149
|
+
console.print(f"[cyan]๐ Unsubscribed from job updates: {job_id}[/ cyan]")
|
|
127
150
|
|
|
128
151
|
def _on_file_changed(self, job_id: str, progress_data: dict) -> None:
|
|
129
152
|
if job_id in self.subscribers:
|
|
@@ -132,13 +155,13 @@ class AsyncProgressMonitor:
|
|
|
132
155
|
callback(progress_data)
|
|
133
156
|
except Exception as e:
|
|
134
157
|
console.print(
|
|
135
|
-
f"[red]Error in progress callback for job {job_id}: {e}[/red]",
|
|
158
|
+
f"[red]Error in progress callback for job {job_id}: {e}[/ red]",
|
|
136
159
|
)
|
|
137
160
|
|
|
138
161
|
self.subscribers[job_id].discard(callback)
|
|
139
162
|
|
|
140
163
|
async def get_current_progress(self, job_id: str) -> dict | None:
|
|
141
|
-
progress_file = self.progress_dir / f"job
|
|
164
|
+
progress_file = self.progress_dir / f"job-{job_id}.json"
|
|
142
165
|
|
|
143
166
|
if not progress_file.exists():
|
|
144
167
|
return None
|
|
@@ -156,7 +179,7 @@ class AsyncProgressMonitor:
|
|
|
156
179
|
cleaned = 0
|
|
157
180
|
cutoff_time = time.time() - (max_age_minutes * 60)
|
|
158
181
|
|
|
159
|
-
for progress_file in self.progress_dir.glob("job
|
|
182
|
+
for progress_file in self.progress_dir.glob("job-* .json"):
|
|
160
183
|
try:
|
|
161
184
|
if progress_file.stat().st_mtime < cutoff_time:
|
|
162
185
|
with progress_file.open() as f:
|
|
@@ -166,7 +189,7 @@ class AsyncProgressMonitor:
|
|
|
166
189
|
progress_file.unlink()
|
|
167
190
|
cleaned += 1
|
|
168
191
|
console.print(
|
|
169
|
-
f"[dim]๐งน Cleaned up old progress file: {progress_file.name}[/dim]",
|
|
192
|
+
f"[dim]๐งน Cleaned up old progress file: {progress_file.name}[/ dim]",
|
|
170
193
|
)
|
|
171
194
|
|
|
172
195
|
except (json.JSONDecodeError, OSError, KeyError):
|
|
@@ -176,7 +199,7 @@ class AsyncProgressMonitor:
|
|
|
176
199
|
progress_file.unlink()
|
|
177
200
|
cleaned += 1
|
|
178
201
|
console.print(
|
|
179
|
-
f"[dim]๐งน Removed corrupted progress file: {progress_file.name}[/dim]",
|
|
202
|
+
f"[dim]๐งน Removed corrupted progress file: {progress_file.name}[/ dim]",
|
|
180
203
|
)
|
|
181
204
|
|
|
182
205
|
return cleaned
|
|
@@ -184,7 +207,7 @@ class AsyncProgressMonitor:
|
|
|
184
207
|
|
|
185
208
|
class PollingProgressMonitor:
|
|
186
209
|
def __init__(self, progress_dir: Path) -> None:
|
|
187
|
-
self.progress_dir = progress_dir
|
|
210
|
+
self.progress_dir = SecurePathValidator.validate_safe_path(progress_dir)
|
|
188
211
|
self.subscribers: dict[str, set[Callable[[dict], None]]] = {}
|
|
189
212
|
self._running = False
|
|
190
213
|
self._poll_task: asyncio.Task | None = None
|
|
@@ -196,7 +219,7 @@ class PollingProgressMonitor:
|
|
|
196
219
|
self._running = True
|
|
197
220
|
self._poll_task = asyncio.create_task(self._poll_loop())
|
|
198
221
|
console.print(
|
|
199
|
-
f"[yellow]๐ Started polling progress directory: {self.progress_dir}[/yellow]",
|
|
222
|
+
f"[yellow]๐ Started polling progress directory: {self.progress_dir}[/ yellow]",
|
|
200
223
|
)
|
|
201
224
|
|
|
202
225
|
async def stop(self) -> None:
|
|
@@ -208,7 +231,7 @@ class PollingProgressMonitor:
|
|
|
208
231
|
await self._poll_task
|
|
209
232
|
self._poll_task = None
|
|
210
233
|
|
|
211
|
-
console.print("[yellow]๐ Stopped progress directory polling[/yellow]")
|
|
234
|
+
console.print("[yellow]๐ Stopped progress directory polling[/ yellow]")
|
|
212
235
|
|
|
213
236
|
async def _poll_loop(self) -> None:
|
|
214
237
|
while self._running:
|
|
@@ -218,7 +241,7 @@ class PollingProgressMonitor:
|
|
|
218
241
|
except asyncio.CancelledError:
|
|
219
242
|
break
|
|
220
243
|
except Exception as e:
|
|
221
|
-
console.print(f"[red]Error in polling loop: {e}[/red]")
|
|
244
|
+
console.print(f"[red]Error in polling loop: {e}[/ red]")
|
|
222
245
|
await asyncio.sleep(1)
|
|
223
246
|
|
|
224
247
|
async def _check_files(self) -> None:
|
|
@@ -227,26 +250,33 @@ class PollingProgressMonitor:
|
|
|
227
250
|
|
|
228
251
|
current_files = {}
|
|
229
252
|
|
|
230
|
-
for progress_file in self.progress_dir.glob("job
|
|
253
|
+
for progress_file in self.progress_dir.glob("job-*.json"):
|
|
231
254
|
try:
|
|
232
|
-
|
|
233
|
-
|
|
255
|
+
# Validate file path is within our allowed directory
|
|
256
|
+
validated_file = SecurePathValidator.validate_safe_path(
|
|
257
|
+
progress_file, self.progress_dir
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
mtime = validated_file.stat().st_mtime
|
|
261
|
+
current_files[validated_file.name] = mtime
|
|
234
262
|
|
|
235
263
|
if (
|
|
236
|
-
|
|
237
|
-
or mtime > self._file_mtimes[
|
|
264
|
+
validated_file.name not in self._file_mtimes
|
|
265
|
+
or mtime > self._file_mtimes[validated_file.name]
|
|
238
266
|
):
|
|
239
|
-
job_id =
|
|
267
|
+
job_id = validated_file.stem.replace("job-", "")
|
|
240
268
|
|
|
241
269
|
try:
|
|
242
|
-
|
|
270
|
+
# Validate file size before reading
|
|
271
|
+
SecurePathValidator.validate_file_size(validated_file)
|
|
272
|
+
with validated_file.open() as f:
|
|
243
273
|
progress_data = json.load(f)
|
|
244
274
|
|
|
245
275
|
self._notify_subscribers(job_id, progress_data)
|
|
246
276
|
|
|
247
277
|
except (json.JSONDecodeError, OSError) as e:
|
|
248
278
|
console.print(
|
|
249
|
-
f"[yellow]Warning: Failed to read progress file {progress_file}: {e}[/yellow]",
|
|
279
|
+
f"[yellow]Warning: Failed to read progress file {progress_file}: {e}[/ yellow]",
|
|
250
280
|
)
|
|
251
281
|
|
|
252
282
|
except OSError:
|
|
@@ -261,7 +291,7 @@ class PollingProgressMonitor:
|
|
|
261
291
|
callback(progress_data)
|
|
262
292
|
except Exception as e:
|
|
263
293
|
console.print(
|
|
264
|
-
f"[red]Error in progress callback for job {job_id}: {e}[/red]",
|
|
294
|
+
f"[red]Error in progress callback for job {job_id}: {e}[/ red]",
|
|
265
295
|
)
|
|
266
296
|
self.subscribers[job_id].discard(callback)
|
|
267
297
|
|
|
@@ -270,7 +300,7 @@ class PollingProgressMonitor:
|
|
|
270
300
|
self.subscribers[job_id] = set()
|
|
271
301
|
|
|
272
302
|
self.subscribers[job_id].add(callback)
|
|
273
|
-
console.print(f"[cyan]๐ Subscribed to job updates: {job_id} (polling)[/cyan]")
|
|
303
|
+
console.print(f"[cyan]๐ Subscribed to job updates: {job_id} (polling)[/ cyan]")
|
|
274
304
|
|
|
275
305
|
def unsubscribe(self, job_id: str, callback: Callable[[dict], None]) -> None:
|
|
276
306
|
if job_id in self.subscribers:
|
|
@@ -280,11 +310,11 @@ class PollingProgressMonitor:
|
|
|
280
310
|
del self.subscribers[job_id]
|
|
281
311
|
|
|
282
312
|
console.print(
|
|
283
|
-
f"[cyan]๐ Unsubscribed from job updates: {job_id} (polling)[/cyan]",
|
|
313
|
+
f"[cyan]๐ Unsubscribed from job updates: {job_id} (polling)[/ cyan]",
|
|
284
314
|
)
|
|
285
315
|
|
|
286
316
|
async def get_current_progress(self, job_id: str) -> dict | None:
|
|
287
|
-
progress_file = self.progress_dir / f"job
|
|
317
|
+
progress_file = self.progress_dir / f"job-{job_id}.json"
|
|
288
318
|
|
|
289
319
|
if not progress_file.exists():
|
|
290
320
|
return None
|
|
@@ -302,7 +332,7 @@ class PollingProgressMonitor:
|
|
|
302
332
|
cleaned = 0
|
|
303
333
|
cutoff_time = time.time() - (max_age_minutes * 60)
|
|
304
334
|
|
|
305
|
-
for progress_file in self.progress_dir.glob("job
|
|
335
|
+
for progress_file in self.progress_dir.glob("job-* .json"):
|
|
306
336
|
try:
|
|
307
337
|
if progress_file.stat().st_mtime < cutoff_time:
|
|
308
338
|
with progress_file.open() as f:
|
|
@@ -312,7 +342,7 @@ class PollingProgressMonitor:
|
|
|
312
342
|
progress_file.unlink()
|
|
313
343
|
cleaned += 1
|
|
314
344
|
console.print(
|
|
315
|
-
f"[dim]๐งน Cleaned up old progress file: {progress_file.name}[/dim]",
|
|
345
|
+
f"[dim]๐งน Cleaned up old progress file: {progress_file.name}[/ dim]",
|
|
316
346
|
)
|
|
317
347
|
|
|
318
348
|
except (json.JSONDecodeError, OSError, KeyError):
|
|
@@ -322,7 +352,7 @@ class PollingProgressMonitor:
|
|
|
322
352
|
progress_file.unlink()
|
|
323
353
|
cleaned += 1
|
|
324
354
|
console.print(
|
|
325
|
-
f"[dim]๐งน Removed corrupted progress file: {progress_file.name}[/dim]",
|
|
355
|
+
f"[dim]๐งน Removed corrupted progress file: {progress_file.name}[/ dim]",
|
|
326
356
|
)
|
|
327
357
|
|
|
328
358
|
return cleaned
|