crackerjack 0.30.3__py3-none-any.whl → 0.31.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +225 -299
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +169 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +652 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +401 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +618 -928
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +561 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +640 -0
- crackerjack/dynamic_config.py +94 -103
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +411 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +435 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +144 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +615 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +370 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +141 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +360 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +347 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +347 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +395 -0
- crackerjack/services/git.py +165 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +847 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.4.dist-info/METADATA +742 -0
- crackerjack-0.31.4.dist-info/RECORD +148 -0
- crackerjack-0.31.4.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/crackerjack.py +0 -3805
- crackerjack/pyproject.toml +0 -286
- crackerjack-0.30.3.dist-info/METADATA +0 -1290
- crackerjack-0.30.3.dist-info/RECORD +0 -16
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.4.dist-info}/WHEEL +0 -0
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
import uuid
|
|
5
|
+
from contextlib import suppress
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
|
|
11
|
+
console = Console()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class JobManager:
|
|
15
|
+
def __init__(self, progress_dir: Path) -> None:
|
|
16
|
+
self.progress_dir = progress_dir
|
|
17
|
+
self.active_connections: dict[str, set[Any]] = {}
|
|
18
|
+
self.known_jobs: set[str] = set()
|
|
19
|
+
self.is_running = True
|
|
20
|
+
|
|
21
|
+
self.progress_dir.mkdir(exist_ok=True)
|
|
22
|
+
|
|
23
|
+
def validate_job_id(self, job_id: str) -> bool:
|
|
24
|
+
if not job_id:
|
|
25
|
+
return False
|
|
26
|
+
|
|
27
|
+
with suppress(ValueError):
|
|
28
|
+
uuid.UUID(job_id)
|
|
29
|
+
return True
|
|
30
|
+
|
|
31
|
+
import re
|
|
32
|
+
|
|
33
|
+
# Performance: Use simpler regex pattern without whitespace
|
|
34
|
+
return bool(re.match(r"^[a-zA-Z0-9_-]+$", job_id) and len(job_id) <= 50)
|
|
35
|
+
|
|
36
|
+
def add_connection(self, job_id: str, websocket: Any) -> None:
|
|
37
|
+
if job_id not in self.active_connections:
|
|
38
|
+
self.active_connections[job_id] = set()
|
|
39
|
+
self.active_connections[job_id].add(websocket)
|
|
40
|
+
|
|
41
|
+
def remove_connection(self, job_id: str, websocket: Any) -> None:
|
|
42
|
+
if job_id in self.active_connections:
|
|
43
|
+
self.active_connections[job_id].discard(websocket)
|
|
44
|
+
if not self.active_connections[job_id]:
|
|
45
|
+
del self.active_connections[job_id]
|
|
46
|
+
|
|
47
|
+
async def broadcast_to_job(self, job_id: str, data: dict) -> None:
|
|
48
|
+
if job_id not in self.active_connections:
|
|
49
|
+
return
|
|
50
|
+
|
|
51
|
+
connections = self.active_connections[job_id].copy()
|
|
52
|
+
for websocket in connections:
|
|
53
|
+
try:
|
|
54
|
+
await websocket.send_json(data)
|
|
55
|
+
except Exception:
|
|
56
|
+
self.remove_connection(job_id, websocket)
|
|
57
|
+
|
|
58
|
+
def get_latest_job_id(self) -> str | None:
|
|
59
|
+
if not self.progress_dir.exists():
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
# Performance: Use more specific glob pattern to reduce filesystem calls
|
|
63
|
+
progress_files = list(self.progress_dir.glob("job-*.json"))
|
|
64
|
+
if not progress_files:
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
latest_file = max(progress_files, key=lambda f: f.stat().st_mtime)
|
|
68
|
+
return self.extract_job_id_from_file(latest_file)
|
|
69
|
+
|
|
70
|
+
def extract_job_id_from_file(self, progress_file: Path) -> str | None:
|
|
71
|
+
# Performance: Use slice instead of replace for fixed prefix removal
|
|
72
|
+
return (
|
|
73
|
+
progress_file.stem[4:] # Remove "job-" prefix (4 chars)
|
|
74
|
+
if progress_file.stem.startswith("job-")
|
|
75
|
+
else None
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def get_job_progress(self, job_id: str) -> dict | None:
|
|
79
|
+
if not self.validate_job_id(job_id):
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
progress_file = self.progress_dir / f"job-{job_id}.json"
|
|
83
|
+
if not progress_file.exists():
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
return json.loads(progress_file.read_text())
|
|
88
|
+
except (json.JSONDecodeError, OSError):
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
async def _process_progress_file(self, progress_file: Path) -> None:
|
|
92
|
+
"""Process a single progress file and handle new job detection."""
|
|
93
|
+
job_id = self.extract_job_id_from_file(progress_file)
|
|
94
|
+
if not (job_id and self.validate_job_id(job_id)):
|
|
95
|
+
return
|
|
96
|
+
|
|
97
|
+
progress_data = self.get_job_progress(job_id)
|
|
98
|
+
if progress_data and job_id not in self.known_jobs:
|
|
99
|
+
self.known_jobs.add(job_id)
|
|
100
|
+
console.print(f"[green]New job detected: {job_id}[/green]")
|
|
101
|
+
await self.broadcast_to_job(job_id, progress_data)
|
|
102
|
+
|
|
103
|
+
async def _monitor_directory_changes(self) -> None:
|
|
104
|
+
"""Monitor the progress directory for new job files."""
|
|
105
|
+
while self.is_running:
|
|
106
|
+
try:
|
|
107
|
+
if self.progress_dir.exists():
|
|
108
|
+
for progress_file in self.progress_dir.glob("job-*.json"):
|
|
109
|
+
await self._process_progress_file(progress_file)
|
|
110
|
+
|
|
111
|
+
await asyncio.sleep(1) # Check every second
|
|
112
|
+
except Exception as e:
|
|
113
|
+
console.print(f"[red]Progress monitoring error: {e}[/red]")
|
|
114
|
+
await asyncio.sleep(5) # Wait longer on error
|
|
115
|
+
|
|
116
|
+
async def monitor_progress_files(self) -> None:
|
|
117
|
+
from crackerjack.mcp.file_monitor import create_progress_monitor
|
|
118
|
+
|
|
119
|
+
console.print("[blue]Starting progress file monitoring...[/blue]")
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
monitor = create_progress_monitor(self.progress_dir)
|
|
123
|
+
await monitor.start()
|
|
124
|
+
|
|
125
|
+
def on_progress_update(job_id: str, progress_data: dict) -> None:
|
|
126
|
+
"""Callback for when progress files are updated."""
|
|
127
|
+
if job_id and self.validate_job_id(job_id):
|
|
128
|
+
# Schedule the broadcast in the event loop
|
|
129
|
+
asyncio.create_task(self.broadcast_to_job(job_id, progress_data))
|
|
130
|
+
|
|
131
|
+
if job_id not in self.known_jobs:
|
|
132
|
+
self.known_jobs.add(job_id)
|
|
133
|
+
console.print(f"[green]New job detected: {job_id}[/green]")
|
|
134
|
+
|
|
135
|
+
# Monitor for new job files by checking the directory periodically
|
|
136
|
+
await self._monitor_directory_changes()
|
|
137
|
+
|
|
138
|
+
except Exception as e:
|
|
139
|
+
console.print(f"[red]Progress monitoring setup error: {e}[/red]")
|
|
140
|
+
|
|
141
|
+
async def cleanup_old_jobs(self) -> None:
|
|
142
|
+
"""Periodically clean up old job files."""
|
|
143
|
+
while self.is_running:
|
|
144
|
+
try:
|
|
145
|
+
await self._perform_cleanup_cycle()
|
|
146
|
+
await asyncio.sleep(3600)
|
|
147
|
+
except Exception as e:
|
|
148
|
+
console.print(f"[red]Cleanup error: {e}[/red]")
|
|
149
|
+
await asyncio.sleep(3600)
|
|
150
|
+
|
|
151
|
+
async def _perform_cleanup_cycle(self) -> None:
|
|
152
|
+
"""Perform a single cleanup cycle for old jobs."""
|
|
153
|
+
if not self.progress_dir.exists():
|
|
154
|
+
return
|
|
155
|
+
|
|
156
|
+
cutoff_time = self._calculate_cleanup_cutoff_time()
|
|
157
|
+
old_job_files = self._find_old_job_files(cutoff_time)
|
|
158
|
+
|
|
159
|
+
for progress_file in old_job_files:
|
|
160
|
+
self._cleanup_old_job_file(progress_file)
|
|
161
|
+
|
|
162
|
+
def _calculate_cleanup_cutoff_time(self) -> float:
|
|
163
|
+
"""Calculate cutoff time for job cleanup (24 hours ago)."""
|
|
164
|
+
return time.time() - (24 * 60 * 60)
|
|
165
|
+
|
|
166
|
+
def _find_old_job_files(self, cutoff_time: float) -> list[Path]:
|
|
167
|
+
"""Find job files older than the cutoff time."""
|
|
168
|
+
return [
|
|
169
|
+
progress_file
|
|
170
|
+
for progress_file in self.progress_dir.glob("job-*.json")
|
|
171
|
+
if progress_file.stat().st_mtime < cutoff_time
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
def _cleanup_old_job_file(self, progress_file: Path) -> None:
|
|
175
|
+
"""Clean up a single old job file if it's safe to do so."""
|
|
176
|
+
job_id = self.extract_job_id_from_file(progress_file)
|
|
177
|
+
|
|
178
|
+
if job_id not in self.active_connections:
|
|
179
|
+
progress_file.unlink(missing_ok=True)
|
|
180
|
+
console.print(f"[yellow]Cleaned up old job: {job_id}[/yellow]")
|
|
181
|
+
|
|
182
|
+
async def timeout_stuck_jobs(self) -> None:
|
|
183
|
+
"""Monitor and timeout stuck jobs that haven't been updated."""
|
|
184
|
+
while self.is_running:
|
|
185
|
+
try:
|
|
186
|
+
await self._check_and_timeout_stuck_jobs()
|
|
187
|
+
await asyncio.sleep(300)
|
|
188
|
+
except Exception as e:
|
|
189
|
+
console.print(f"[red]Timeout check error: {e}[/red]")
|
|
190
|
+
await asyncio.sleep(300)
|
|
191
|
+
|
|
192
|
+
async def _check_and_timeout_stuck_jobs(self) -> None:
|
|
193
|
+
"""Check for stuck jobs and timeout those that are inactive."""
|
|
194
|
+
if not self.progress_dir.exists():
|
|
195
|
+
return
|
|
196
|
+
|
|
197
|
+
current_time = time.time()
|
|
198
|
+
timeout_seconds = 30 * 60
|
|
199
|
+
|
|
200
|
+
for progress_file in self.progress_dir.glob("job -* .json"):
|
|
201
|
+
await self._process_job_timeout_check(
|
|
202
|
+
progress_file,
|
|
203
|
+
current_time,
|
|
204
|
+
timeout_seconds,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
async def _process_job_timeout_check(
|
|
208
|
+
self,
|
|
209
|
+
progress_file: Path,
|
|
210
|
+
current_time: float,
|
|
211
|
+
timeout_seconds: int,
|
|
212
|
+
) -> None:
|
|
213
|
+
"""Process timeout check for a single job file."""
|
|
214
|
+
try:
|
|
215
|
+
progress_data = json.loads(progress_file.read_text())
|
|
216
|
+
|
|
217
|
+
if self._should_timeout_job(
|
|
218
|
+
progress_data,
|
|
219
|
+
progress_file,
|
|
220
|
+
current_time,
|
|
221
|
+
timeout_seconds,
|
|
222
|
+
):
|
|
223
|
+
self._timeout_job(progress_data, progress_file)
|
|
224
|
+
|
|
225
|
+
except (json.JSONDecodeError, OSError):
|
|
226
|
+
pass # Skip files that can't be processed
|
|
227
|
+
|
|
228
|
+
def _should_timeout_job(
|
|
229
|
+
self,
|
|
230
|
+
progress_data: dict,
|
|
231
|
+
progress_file: Path,
|
|
232
|
+
current_time: float,
|
|
233
|
+
timeout_seconds: int,
|
|
234
|
+
) -> bool:
|
|
235
|
+
"""Determine if a job should be timed out."""
|
|
236
|
+
return (
|
|
237
|
+
progress_data.get("status") == "running"
|
|
238
|
+
and current_time - progress_file.stat().st_mtime > timeout_seconds
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
def _timeout_job(self, progress_data: dict, progress_file: Path) -> None:
|
|
242
|
+
"""Mark a job as failed due to timeout."""
|
|
243
|
+
progress_data["status"] = "failed"
|
|
244
|
+
progress_data["message"] = "Job timed out (no updates for 30 minutes)"
|
|
245
|
+
|
|
246
|
+
progress_file.write_text(json.dumps(progress_data, indent=2))
|
|
247
|
+
|
|
248
|
+
job_id = progress_data.get("job_id", "unknown")
|
|
249
|
+
console.print(f"[red]Job {job_id} timed out and marked as failed[/red]")
|
|
250
|
+
|
|
251
|
+
def cleanup(self) -> None:
|
|
252
|
+
self.is_running = False
|
|
253
|
+
console.print("[blue]Job manager cleanup completed[/blue]")
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import signal
|
|
2
|
+
import subprocess
|
|
3
|
+
import tempfile
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import uvicorn
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
|
|
10
|
+
from .app import create_websocket_app
|
|
11
|
+
from .jobs import JobManager
|
|
12
|
+
|
|
13
|
+
console = Console()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class WebSocketServer:
|
|
17
|
+
def __init__(self, port: int = 8675) -> None:
|
|
18
|
+
self.port = port
|
|
19
|
+
self.progress_dir = Path(tempfile.gettempdir()) / "crackerjack-mcp-progress"
|
|
20
|
+
self.is_running = True
|
|
21
|
+
self.job_manager: JobManager | None = None
|
|
22
|
+
self.app = None
|
|
23
|
+
|
|
24
|
+
def setup(self) -> None:
|
|
25
|
+
self.progress_dir.mkdir(exist_ok=True)
|
|
26
|
+
|
|
27
|
+
self.job_manager = JobManager(self.progress_dir)
|
|
28
|
+
|
|
29
|
+
self.app = create_websocket_app(self.job_manager, self.progress_dir)
|
|
30
|
+
|
|
31
|
+
signal.signal(signal.SIGINT, self._signal_handler)
|
|
32
|
+
signal.signal(signal.SIGTERM, self._signal_handler)
|
|
33
|
+
|
|
34
|
+
def _signal_handler(self, _signum: int, _frame) -> None:
|
|
35
|
+
console.print("\n[yellow]Shutting down WebSocket server...[/yellow]")
|
|
36
|
+
self.is_running = False
|
|
37
|
+
|
|
38
|
+
def run(self) -> None:
|
|
39
|
+
try:
|
|
40
|
+
self.setup()
|
|
41
|
+
console.print(
|
|
42
|
+
f"[green]Starting WebSocket server on port {self.port}[/green]",
|
|
43
|
+
)
|
|
44
|
+
console.print(f"Progress directory: {self.progress_dir}")
|
|
45
|
+
console.print("Press Ctrl + C to stop")
|
|
46
|
+
|
|
47
|
+
config = uvicorn.Config(
|
|
48
|
+
app=self.app,
|
|
49
|
+
port=self.port,
|
|
50
|
+
host="127.0.0.1",
|
|
51
|
+
log_level="info",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
server = uvicorn.Server(config)
|
|
55
|
+
server.run()
|
|
56
|
+
|
|
57
|
+
except KeyboardInterrupt:
|
|
58
|
+
console.print("\n[yellow]Server stopped by user[/yellow]")
|
|
59
|
+
except Exception as e:
|
|
60
|
+
console.print(f"[red]Server error: {e}[/red]")
|
|
61
|
+
finally:
|
|
62
|
+
pass # Cleanup handled by FastAPI lifespan
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def handle_websocket_server_command(
|
|
66
|
+
start: bool = False,
|
|
67
|
+
stop: bool = False,
|
|
68
|
+
restart: bool = False,
|
|
69
|
+
port: int = 8675,
|
|
70
|
+
) -> None:
|
|
71
|
+
"""Handle WebSocket server start/stop/restart commands."""
|
|
72
|
+
if stop or restart:
|
|
73
|
+
console.print("[yellow]Stopping WebSocket servers...[/yellow]")
|
|
74
|
+
# Kill any existing uvicorn processes running on the port
|
|
75
|
+
try:
|
|
76
|
+
result = subprocess.run(
|
|
77
|
+
["pkill", "-f", f"uvicorn.*:{port}"],
|
|
78
|
+
check=False,
|
|
79
|
+
capture_output=True,
|
|
80
|
+
text=True,
|
|
81
|
+
timeout=10,
|
|
82
|
+
)
|
|
83
|
+
if result.returncode == 0:
|
|
84
|
+
console.print("[green]✅ WebSocket servers stopped[/green]")
|
|
85
|
+
else:
|
|
86
|
+
console.print("[dim]No WebSocket servers were running[/dim]")
|
|
87
|
+
except subprocess.TimeoutExpired:
|
|
88
|
+
console.print("[red]Timeout stopping WebSocket servers[/red]")
|
|
89
|
+
except Exception as e:
|
|
90
|
+
console.print(f"[red]Error stopping WebSocket servers: {e}[/red]")
|
|
91
|
+
|
|
92
|
+
if stop:
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
# For restart, wait a moment before starting again
|
|
96
|
+
time.sleep(2)
|
|
97
|
+
|
|
98
|
+
if start or restart:
|
|
99
|
+
console.print(f"[green]Starting WebSocket server on port {port}...[/green]")
|
|
100
|
+
try:
|
|
101
|
+
server = WebSocketServer(port)
|
|
102
|
+
server.run()
|
|
103
|
+
except Exception as e:
|
|
104
|
+
console.print(f"[red]Failed to start WebSocket server: {e}[/red]")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def main(port: int = 8675) -> None:
|
|
108
|
+
server = WebSocketServer(port)
|
|
109
|
+
server.run()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
if __name__ == "__main__":
|
|
113
|
+
import sys
|
|
114
|
+
|
|
115
|
+
port = int(sys.argv[1]) if len(sys.argv) > 1 else 8675
|
|
116
|
+
main(port)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from fastapi import FastAPI, WebSocket, WebSocketDisconnect
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
|
|
6
|
+
from .jobs import JobManager
|
|
7
|
+
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class WebSocketHandler:
|
|
12
|
+
def __init__(self, job_manager: JobManager, progress_dir: Path) -> None:
|
|
13
|
+
self.job_manager = job_manager
|
|
14
|
+
self.progress_dir = progress_dir
|
|
15
|
+
|
|
16
|
+
async def handle_connection(self, websocket: WebSocket, job_id: str) -> None:
|
|
17
|
+
if not self.job_manager.validate_job_id(job_id):
|
|
18
|
+
await websocket.close(code=1008, reason="Invalid job ID")
|
|
19
|
+
return
|
|
20
|
+
|
|
21
|
+
await websocket.accept()
|
|
22
|
+
self.job_manager.add_connection(job_id, websocket)
|
|
23
|
+
|
|
24
|
+
console.print(f"[green]WebSocket connected for job: {job_id}[/green]")
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
initial_progress = self.job_manager.get_job_progress(job_id)
|
|
28
|
+
if initial_progress:
|
|
29
|
+
await websocket.send_json(initial_progress)
|
|
30
|
+
else:
|
|
31
|
+
await websocket.send_json(
|
|
32
|
+
{
|
|
33
|
+
"job_id": job_id,
|
|
34
|
+
"status": "waiting",
|
|
35
|
+
"message": "Waiting for job to start...",
|
|
36
|
+
"overall_progress": 0,
|
|
37
|
+
"iteration": 0,
|
|
38
|
+
"max_iterations": 10,
|
|
39
|
+
"current_stage": "Initializing",
|
|
40
|
+
},
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
while True:
|
|
44
|
+
try:
|
|
45
|
+
data = await websocket.receive_text()
|
|
46
|
+
console.print(
|
|
47
|
+
f"[blue]Received message for {job_id}: {data[:100]}...[/blue]",
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
await websocket.send_json(
|
|
51
|
+
{
|
|
52
|
+
"type": "echo",
|
|
53
|
+
"message": f"Received: {data}",
|
|
54
|
+
"job_id": job_id,
|
|
55
|
+
},
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
except WebSocketDisconnect:
|
|
59
|
+
break
|
|
60
|
+
|
|
61
|
+
except WebSocketDisconnect:
|
|
62
|
+
console.print(f"[yellow]WebSocket disconnected for job: {job_id}[/yellow]")
|
|
63
|
+
except Exception as e:
|
|
64
|
+
console.print(f"[red]WebSocket error for job {job_id}: {e}[/red]")
|
|
65
|
+
finally:
|
|
66
|
+
self.job_manager.remove_connection(job_id, websocket)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def register_websocket_routes(
|
|
70
|
+
app: FastAPI,
|
|
71
|
+
job_manager: JobManager,
|
|
72
|
+
progress_dir: Path,
|
|
73
|
+
) -> None:
|
|
74
|
+
handler = WebSocketHandler(job_manager, progress_dir)
|
|
75
|
+
|
|
76
|
+
@app.websocket(" / ws / progress / {job_id}")
|
|
77
|
+
async def websocket_progress_endpoint(websocket: WebSocket, job_id: str) -> None:
|
|
78
|
+
await handler.handle_connection(websocket, job_id)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from .config import (
|
|
2
|
+
AIConfig,
|
|
3
|
+
CleaningConfig,
|
|
4
|
+
CleanupConfig,
|
|
5
|
+
ExecutionConfig,
|
|
6
|
+
GitConfig,
|
|
7
|
+
HookConfig,
|
|
8
|
+
ProgressConfig,
|
|
9
|
+
PublishConfig,
|
|
10
|
+
TestConfig,
|
|
11
|
+
WorkflowOptions,
|
|
12
|
+
)
|
|
13
|
+
from .protocols import OptionsProtocol
|
|
14
|
+
from .task import HookResult, SessionTracker, TaskStatus
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"AIConfig",
|
|
18
|
+
"CleaningConfig",
|
|
19
|
+
"CleanupConfig",
|
|
20
|
+
"ExecutionConfig",
|
|
21
|
+
"GitConfig",
|
|
22
|
+
"HookConfig",
|
|
23
|
+
"HookResult",
|
|
24
|
+
"OptionsProtocol",
|
|
25
|
+
"ProgressConfig",
|
|
26
|
+
"PublishConfig",
|
|
27
|
+
"SessionTracker",
|
|
28
|
+
"TaskStatus",
|
|
29
|
+
"TestConfig",
|
|
30
|
+
"WorkflowOptions",
|
|
31
|
+
]
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import typing as t
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class CleaningConfig:
|
|
7
|
+
clean: bool = True
|
|
8
|
+
update_docs: bool = False
|
|
9
|
+
force_update_docs: bool = False
|
|
10
|
+
compress_docs: bool = False
|
|
11
|
+
auto_compress_docs: bool = False
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class HookConfig:
|
|
16
|
+
skip_hooks: bool = False
|
|
17
|
+
update_precommit: bool = False
|
|
18
|
+
experimental_hooks: bool = False
|
|
19
|
+
enable_pyrefly: bool = False
|
|
20
|
+
enable_ty: bool = False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class TestConfig:
|
|
25
|
+
test: bool = False
|
|
26
|
+
benchmark: bool = False
|
|
27
|
+
test_workers: int = 0
|
|
28
|
+
test_timeout: int = 0
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class PublishConfig:
|
|
33
|
+
publish: t.Any | None = None
|
|
34
|
+
bump: t.Any | None = None
|
|
35
|
+
all: t.Any | None = None
|
|
36
|
+
no_git_tags: bool = False
|
|
37
|
+
skip_version_check: bool = False
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class GitConfig:
|
|
42
|
+
commit: bool = False
|
|
43
|
+
create_pr: bool = False
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class AIConfig:
|
|
48
|
+
ai_agent: bool = False
|
|
49
|
+
start_mcp_server: bool = False
|
|
50
|
+
max_iterations: int = 10
|
|
51
|
+
autofix: bool = True
|
|
52
|
+
ai_agent_autofix: bool = False
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class ExecutionConfig:
|
|
57
|
+
interactive: bool = False
|
|
58
|
+
verbose: bool = False
|
|
59
|
+
async_mode: bool = False
|
|
60
|
+
no_config_updates: bool = False
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class ProgressConfig:
|
|
65
|
+
enabled: bool = False
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class CleanupConfig:
|
|
70
|
+
auto_cleanup: bool = True
|
|
71
|
+
keep_debug_logs: int = 5
|
|
72
|
+
keep_coverage_files: int = 10
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class EnterpriseConfig:
|
|
77
|
+
enabled: bool = False
|
|
78
|
+
license_key: str | None = None
|
|
79
|
+
organization: str | None = None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@dataclass
|
|
83
|
+
class WorkflowOptions:
|
|
84
|
+
cleaning: CleaningConfig = field(default_factory=CleaningConfig)
|
|
85
|
+
hooks: HookConfig = field(default_factory=HookConfig)
|
|
86
|
+
testing: TestConfig = field(default_factory=TestConfig)
|
|
87
|
+
publishing: PublishConfig = field(default_factory=PublishConfig)
|
|
88
|
+
git: GitConfig = field(default_factory=GitConfig)
|
|
89
|
+
ai: AIConfig = field(default_factory=AIConfig)
|
|
90
|
+
execution: ExecutionConfig = field(default_factory=ExecutionConfig)
|
|
91
|
+
progress: ProgressConfig = field(default_factory=ProgressConfig)
|
|
92
|
+
cleanup: CleanupConfig = field(default_factory=CleanupConfig)
|
|
93
|
+
enterprise: EnterpriseConfig = field(default_factory=EnterpriseConfig)
|