crackerjack 0.29.0__py3-none-any.whl → 0.31.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +225 -253
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +169 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +652 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +401 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +670 -0
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +561 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +640 -0
- crackerjack/dynamic_config.py +577 -0
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +411 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +435 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +144 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +615 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +370 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +141 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +360 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +347 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +347 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +395 -0
- crackerjack/services/git.py +165 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +847 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.4.dist-info/METADATA +742 -0
- crackerjack-0.31.4.dist-info/RECORD +148 -0
- crackerjack-0.31.4.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/.pre-commit-config-ai.yaml +0 -149
- crackerjack/.pre-commit-config-fast.yaml +0 -69
- crackerjack/.pre-commit-config.yaml +0 -114
- crackerjack/crackerjack.py +0 -4140
- crackerjack/pyproject.toml +0 -285
- crackerjack-0.29.0.dist-info/METADATA +0 -1289
- crackerjack-0.29.0.dist-info/RECORD +0 -17
- {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/WHEEL +0 -0
- {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import contextlib
|
|
3
|
+
import time
|
|
4
|
+
import typing as t
|
|
5
|
+
from collections import defaultdict, deque
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
|
|
11
|
+
console = Console()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class RateLimitConfig:
|
|
16
|
+
requests_per_minute: int = 30
|
|
17
|
+
requests_per_hour: int = 300
|
|
18
|
+
|
|
19
|
+
max_concurrent_jobs: int = 5
|
|
20
|
+
max_job_duration_minutes: int = 30
|
|
21
|
+
max_file_size_mb: int = 100
|
|
22
|
+
max_progress_files: int = 1000
|
|
23
|
+
|
|
24
|
+
max_cache_entries: int = 10000
|
|
25
|
+
max_state_history: int = 100
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class RateLimiter:
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
requests_per_minute: int = 30,
|
|
32
|
+
requests_per_hour: int = 300,
|
|
33
|
+
) -> None:
|
|
34
|
+
self.requests_per_minute = requests_per_minute
|
|
35
|
+
self.requests_per_hour = requests_per_hour
|
|
36
|
+
|
|
37
|
+
self.minute_windows: dict[str, deque[float]] = defaultdict(
|
|
38
|
+
lambda: deque(maxlen=requests_per_minute),
|
|
39
|
+
)
|
|
40
|
+
self.hour_windows: dict[str, deque[float]] = defaultdict(
|
|
41
|
+
lambda: deque(maxlen=requests_per_hour),
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
self.global_minute_window: deque[float] = deque(maxlen=requests_per_minute * 10)
|
|
45
|
+
self.global_hour_window: deque[float] = deque(maxlen=requests_per_hour * 10)
|
|
46
|
+
|
|
47
|
+
self._lock = asyncio.Lock()
|
|
48
|
+
|
|
49
|
+
async def is_allowed(
|
|
50
|
+
self,
|
|
51
|
+
client_id: str = "default",
|
|
52
|
+
) -> tuple[bool, dict[str, t.Any]]:
|
|
53
|
+
async with self._lock:
|
|
54
|
+
now = time.time()
|
|
55
|
+
|
|
56
|
+
self._cleanup_windows(now)
|
|
57
|
+
|
|
58
|
+
minute_count = len(self.minute_windows[client_id])
|
|
59
|
+
hour_count = len(self.hour_windows[client_id])
|
|
60
|
+
|
|
61
|
+
global_minute_count = len(self.global_minute_window)
|
|
62
|
+
global_hour_count = len(self.global_hour_window)
|
|
63
|
+
|
|
64
|
+
if minute_count >= self.requests_per_minute:
|
|
65
|
+
return False, {
|
|
66
|
+
"reason": "minute_limit_exceeded",
|
|
67
|
+
"limit": self.requests_per_minute,
|
|
68
|
+
"window": "1 minute",
|
|
69
|
+
"retry_after": 60,
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if hour_count >= self.requests_per_hour:
|
|
73
|
+
return False, {
|
|
74
|
+
"reason": "hour_limit_exceeded",
|
|
75
|
+
"limit": self.requests_per_hour,
|
|
76
|
+
"window": "1 hour",
|
|
77
|
+
"retry_after": 3600,
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if global_minute_count >= self.requests_per_minute * 10:
|
|
81
|
+
return False, {
|
|
82
|
+
"reason": "global_minute_limit_exceeded",
|
|
83
|
+
"retry_after": 60,
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if global_hour_count >= self.requests_per_hour * 10:
|
|
87
|
+
return False, {
|
|
88
|
+
"reason": "global_hour_limit_exceeded",
|
|
89
|
+
"retry_after": 3600,
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
self.minute_windows[client_id].append(now)
|
|
93
|
+
self.hour_windows[client_id].append(now)
|
|
94
|
+
self.global_minute_window.append(now)
|
|
95
|
+
self.global_hour_window.append(now)
|
|
96
|
+
|
|
97
|
+
return True, {
|
|
98
|
+
"allowed": True,
|
|
99
|
+
"minute_requests_remaining": self.requests_per_minute
|
|
100
|
+
- minute_count
|
|
101
|
+
- 1,
|
|
102
|
+
"hour_requests_remaining": self.requests_per_hour - hour_count - 1,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
def _cleanup_windows(self, now: float) -> None:
|
|
106
|
+
minute_cutoff = now - 60
|
|
107
|
+
hour_cutoff = now - 3600
|
|
108
|
+
|
|
109
|
+
self._cleanup_client_windows(minute_cutoff, hour_cutoff)
|
|
110
|
+
self._cleanup_global_windows(minute_cutoff, hour_cutoff)
|
|
111
|
+
|
|
112
|
+
def _cleanup_client_windows(self, minute_cutoff: float, hour_cutoff: float) -> None:
|
|
113
|
+
for client_id in list(self.minute_windows.keys()):
|
|
114
|
+
minute_window = self.minute_windows[client_id]
|
|
115
|
+
hour_window = self.hour_windows[client_id]
|
|
116
|
+
|
|
117
|
+
self._remove_expired_entries(minute_window, minute_cutoff)
|
|
118
|
+
self._remove_expired_entries(hour_window, hour_cutoff)
|
|
119
|
+
|
|
120
|
+
if not minute_window:
|
|
121
|
+
del self.minute_windows[client_id]
|
|
122
|
+
if not hour_window:
|
|
123
|
+
del self.hour_windows[client_id]
|
|
124
|
+
|
|
125
|
+
def _cleanup_global_windows(self, minute_cutoff: float, hour_cutoff: float) -> None:
|
|
126
|
+
self._remove_expired_entries(self.global_minute_window, minute_cutoff)
|
|
127
|
+
self._remove_expired_entries(self.global_hour_window, hour_cutoff)
|
|
128
|
+
|
|
129
|
+
def _remove_expired_entries(self, window: deque, cutoff: float) -> None:
|
|
130
|
+
while window and window[0] < cutoff:
|
|
131
|
+
window.popleft()
|
|
132
|
+
|
|
133
|
+
def get_stats(self) -> dict[str, t.Any]:
|
|
134
|
+
now = time.time()
|
|
135
|
+
self._cleanup_windows(now)
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
"active_clients": len(self.minute_windows),
|
|
139
|
+
"global_minute_requests": len(self.global_minute_window),
|
|
140
|
+
"global_hour_requests": len(self.global_hour_window),
|
|
141
|
+
"limits": {
|
|
142
|
+
"requests_per_minute": self.requests_per_minute,
|
|
143
|
+
"requests_per_hour": self.requests_per_hour,
|
|
144
|
+
},
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class ResourceMonitor:
|
|
149
|
+
def __init__(self, config: RateLimitConfig) -> None:
|
|
150
|
+
self.config = config
|
|
151
|
+
self.active_jobs: dict[str, float] = {}
|
|
152
|
+
self.job_locks = asyncio.Semaphore(config.max_concurrent_jobs)
|
|
153
|
+
self._lock = asyncio.Lock()
|
|
154
|
+
|
|
155
|
+
async def acquire_job_slot(self, job_id: str) -> bool:
|
|
156
|
+
try:
|
|
157
|
+
if (
|
|
158
|
+
self.job_locks.locked()
|
|
159
|
+
and len(self.active_jobs) >= self.config.max_concurrent_jobs
|
|
160
|
+
):
|
|
161
|
+
console.print(
|
|
162
|
+
f"[yellow]🚫 Job {job_id} rejected: max concurrent jobs ({self.config.max_concurrent_jobs}) reached[/yellow]",
|
|
163
|
+
)
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
await asyncio.wait_for(self.job_locks.acquire(), timeout=0.1)
|
|
168
|
+
except TimeoutError:
|
|
169
|
+
console.print(
|
|
170
|
+
f"[yellow]🚫 Job {job_id} rejected: max concurrent jobs ({self.config.max_concurrent_jobs}) reached[/yellow]",
|
|
171
|
+
)
|
|
172
|
+
return False
|
|
173
|
+
|
|
174
|
+
async with self._lock:
|
|
175
|
+
self.active_jobs[job_id] = time.time()
|
|
176
|
+
|
|
177
|
+
console.print(
|
|
178
|
+
f"[green]🎯 Job {job_id} acquired slot ({len(self.active_jobs)} / {self.config.max_concurrent_jobs})[/green]",
|
|
179
|
+
)
|
|
180
|
+
return True
|
|
181
|
+
|
|
182
|
+
except Exception as e:
|
|
183
|
+
console.print(f"[red]Error acquiring job slot for {job_id}: {e}[/red]")
|
|
184
|
+
return False
|
|
185
|
+
|
|
186
|
+
async def release_job_slot(self, job_id: str) -> None:
|
|
187
|
+
async with self._lock:
|
|
188
|
+
if job_id in self.active_jobs:
|
|
189
|
+
start_time = self.active_jobs.pop(job_id)
|
|
190
|
+
duration = time.time() - start_time
|
|
191
|
+
console.print(
|
|
192
|
+
f"[blue]🏁 Job {job_id} completed in {duration: .1f}s ({len(self.active_jobs)} / {self.config.max_concurrent_jobs} active)[/blue]",
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
self.job_locks.release()
|
|
196
|
+
|
|
197
|
+
async def cleanup_stale_jobs(self) -> int:
|
|
198
|
+
now = time.time()
|
|
199
|
+
max_duration = self.config.max_job_duration_minutes * 60
|
|
200
|
+
stale_jobs = []
|
|
201
|
+
|
|
202
|
+
async with self._lock:
|
|
203
|
+
for job_id, start_time in list(self.active_jobs.items()):
|
|
204
|
+
if now - start_time > max_duration:
|
|
205
|
+
stale_jobs.append(job_id)
|
|
206
|
+
del self.active_jobs[job_id]
|
|
207
|
+
self.job_locks.release()
|
|
208
|
+
|
|
209
|
+
if stale_jobs:
|
|
210
|
+
console.print(
|
|
211
|
+
f"[yellow]🧹 Cleaned up {len(stale_jobs)} stale jobs (exceeded {self.config.max_job_duration_minutes}m)[/yellow]",
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
return len(stale_jobs)
|
|
215
|
+
|
|
216
|
+
def check_file_size(self, file_path: Path) -> bool:
|
|
217
|
+
try:
|
|
218
|
+
if not file_path.exists():
|
|
219
|
+
return True
|
|
220
|
+
|
|
221
|
+
size_mb = file_path.stat().st_size / (1024 * 1024)
|
|
222
|
+
if size_mb > self.config.max_file_size_mb:
|
|
223
|
+
console.print(
|
|
224
|
+
f"[red]🚫 File {file_path} ({size_mb: .1f}MB) exceeds limit ({self.config.max_file_size_mb}MB)[/red]",
|
|
225
|
+
)
|
|
226
|
+
return False
|
|
227
|
+
|
|
228
|
+
return True
|
|
229
|
+
except OSError:
|
|
230
|
+
return False
|
|
231
|
+
|
|
232
|
+
def check_progress_files_limit(self, progress_dir: Path) -> bool:
|
|
233
|
+
try:
|
|
234
|
+
if not progress_dir.exists():
|
|
235
|
+
return True
|
|
236
|
+
|
|
237
|
+
file_count = len(list(progress_dir.glob("job -* .json")))
|
|
238
|
+
if file_count > self.config.max_progress_files:
|
|
239
|
+
console.print(
|
|
240
|
+
f"[red]🚫 Progress files ({file_count}) exceed limit ({self.config.max_progress_files})[/red]",
|
|
241
|
+
)
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
return True
|
|
245
|
+
except OSError:
|
|
246
|
+
return False
|
|
247
|
+
|
|
248
|
+
def get_stats(self) -> dict[str, t.Any]:
|
|
249
|
+
return {
|
|
250
|
+
"active_jobs": len(self.active_jobs),
|
|
251
|
+
"max_concurrent_jobs": self.config.max_concurrent_jobs,
|
|
252
|
+
"available_slots": self.job_locks._value,
|
|
253
|
+
"job_details": {
|
|
254
|
+
job_id: {"duration": time.time() - start_time, "start_time": start_time}
|
|
255
|
+
for job_id, start_time in self.active_jobs.items()
|
|
256
|
+
},
|
|
257
|
+
"limits": {
|
|
258
|
+
"max_concurrent_jobs": self.config.max_concurrent_jobs,
|
|
259
|
+
"max_job_duration_minutes": self.config.max_job_duration_minutes,
|
|
260
|
+
"max_file_size_mb": self.config.max_file_size_mb,
|
|
261
|
+
"max_progress_files": self.config.max_progress_files,
|
|
262
|
+
},
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
class RateLimitMiddleware:
|
|
267
|
+
def __init__(self, config: RateLimitConfig | None = None) -> None:
|
|
268
|
+
self.config = config or RateLimitConfig()
|
|
269
|
+
self.rate_limiter = RateLimiter(
|
|
270
|
+
self.config.requests_per_minute,
|
|
271
|
+
self.config.requests_per_hour,
|
|
272
|
+
)
|
|
273
|
+
self.resource_monitor = ResourceMonitor(self.config)
|
|
274
|
+
|
|
275
|
+
self._cleanup_task: asyncio.Task | None = None
|
|
276
|
+
self._running = False
|
|
277
|
+
|
|
278
|
+
async def start(self) -> None:
|
|
279
|
+
self._running = True
|
|
280
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
|
281
|
+
console.print("[green]🛡️ Rate limiting middleware started[/green]")
|
|
282
|
+
|
|
283
|
+
async def stop(self) -> None:
|
|
284
|
+
self._running = False
|
|
285
|
+
if self._cleanup_task:
|
|
286
|
+
self._cleanup_task.cancel()
|
|
287
|
+
with contextlib.suppress(asyncio.CancelledError):
|
|
288
|
+
await self._cleanup_task
|
|
289
|
+
console.print("[yellow]🛡️ Rate limiting middleware stopped[/yellow]")
|
|
290
|
+
|
|
291
|
+
async def check_request_allowed(
|
|
292
|
+
self,
|
|
293
|
+
client_id: str = "default",
|
|
294
|
+
) -> tuple[bool, dict[str, t.Any]]:
|
|
295
|
+
return await self.rate_limiter.is_allowed(client_id)
|
|
296
|
+
|
|
297
|
+
async def acquire_job_resources(self, job_id: str) -> bool:
|
|
298
|
+
return await self.resource_monitor.acquire_job_slot(job_id)
|
|
299
|
+
|
|
300
|
+
async def release_job_resources(self, job_id: str) -> None:
|
|
301
|
+
await self.resource_monitor.release_job_slot(job_id)
|
|
302
|
+
|
|
303
|
+
def validate_file_size(self, file_path: Path) -> bool:
|
|
304
|
+
return self.resource_monitor.check_file_size(file_path)
|
|
305
|
+
|
|
306
|
+
def validate_progress_files(self, progress_dir: Path) -> bool:
|
|
307
|
+
return self.resource_monitor.check_progress_files_limit(progress_dir)
|
|
308
|
+
|
|
309
|
+
async def _cleanup_loop(self) -> None:
|
|
310
|
+
while self._running:
|
|
311
|
+
try:
|
|
312
|
+
await self.resource_monitor.cleanup_stale_jobs()
|
|
313
|
+
await asyncio.sleep(300)
|
|
314
|
+
except asyncio.CancelledError:
|
|
315
|
+
break
|
|
316
|
+
except Exception as e:
|
|
317
|
+
console.print(f"[red]Error in cleanup loop: {e}[/red]")
|
|
318
|
+
await asyncio.sleep(60)
|
|
319
|
+
|
|
320
|
+
def get_comprehensive_stats(self) -> dict[str, t.Any]:
|
|
321
|
+
return {
|
|
322
|
+
"rate_limiting": self.rate_limiter.get_stats(),
|
|
323
|
+
"resource_usage": self.resource_monitor.get_stats(),
|
|
324
|
+
"config": {
|
|
325
|
+
"requests_per_minute": self.config.requests_per_minute,
|
|
326
|
+
"requests_per_hour": self.config.requests_per_hour,
|
|
327
|
+
"max_concurrent_jobs": self.config.max_concurrent_jobs,
|
|
328
|
+
"max_job_duration_minutes": self.config.max_job_duration_minutes,
|
|
329
|
+
"max_file_size_mb": self.config.max_file_size_mb,
|
|
330
|
+
"max_progress_files": self.config.max_progress_files,
|
|
331
|
+
},
|
|
332
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from .server_core import (
|
|
2
|
+
MCP_AVAILABLE,
|
|
3
|
+
MCPOptions,
|
|
4
|
+
create_mcp_server,
|
|
5
|
+
main,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"MCP_AVAILABLE",
|
|
10
|
+
"MCPOptions",
|
|
11
|
+
"create_mcp_server",
|
|
12
|
+
"main",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
if __name__ == "__main__":
|
|
17
|
+
import sys
|
|
18
|
+
|
|
19
|
+
project_path = sys.argv[1] if len(sys.argv) > 1 else "."
|
|
20
|
+
websocket_port = int(sys.argv[2]) if len(sys.argv) > 2 else None
|
|
21
|
+
|
|
22
|
+
main(project_path, websocket_port)
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import time
|
|
3
|
+
import typing as t
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Final
|
|
6
|
+
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from mcp.server.fastmcp import FastMCP
|
|
11
|
+
|
|
12
|
+
_mcp_available = True
|
|
13
|
+
except ImportError:
|
|
14
|
+
_mcp_available = False
|
|
15
|
+
FastMCP = None
|
|
16
|
+
|
|
17
|
+
MCP_AVAILABLE: Final[bool] = _mcp_available
|
|
18
|
+
|
|
19
|
+
from .context import (
|
|
20
|
+
MCPServerConfig,
|
|
21
|
+
MCPServerContext,
|
|
22
|
+
clear_context,
|
|
23
|
+
get_context,
|
|
24
|
+
set_context,
|
|
25
|
+
)
|
|
26
|
+
from .rate_limiter import RateLimitConfig
|
|
27
|
+
from .tools import (
|
|
28
|
+
register_core_tools,
|
|
29
|
+
register_execution_tools,
|
|
30
|
+
register_intelligence_tools,
|
|
31
|
+
register_monitoring_tools,
|
|
32
|
+
register_proactive_tools,
|
|
33
|
+
register_progress_tools,
|
|
34
|
+
register_utility_tools,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
console = Console()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class MCPOptions:
|
|
41
|
+
def __init__(self, **kwargs: t.Any) -> None:
|
|
42
|
+
self.commit: bool = False
|
|
43
|
+
self.interactive: bool = False
|
|
44
|
+
self.no_config_updates: bool = False
|
|
45
|
+
self.verbose: bool = False
|
|
46
|
+
self.clean: bool = False
|
|
47
|
+
self.test: bool = False
|
|
48
|
+
self.autofix: bool = True
|
|
49
|
+
self.skip_hooks: bool = False
|
|
50
|
+
self.ai_agent: bool = False
|
|
51
|
+
self.ai_debug: bool = False
|
|
52
|
+
self.publish: str | None = None
|
|
53
|
+
self.bump: str | None = None
|
|
54
|
+
self.create_pr: bool = False
|
|
55
|
+
self.testing: bool = False
|
|
56
|
+
|
|
57
|
+
for key, value in kwargs.items():
|
|
58
|
+
if hasattr(self, key):
|
|
59
|
+
setattr(self, key, value)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _validate_job_id(job_id: str) -> bool:
|
|
63
|
+
if not job_id or not isinstance(job_id, str):
|
|
64
|
+
return False
|
|
65
|
+
if len(job_id) > 50:
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
import re
|
|
69
|
+
|
|
70
|
+
return bool(re.match(r"^[a-zA-Z0-9_-]+$", job_id))
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
async def _start_websocket_server() -> bool:
|
|
74
|
+
context = get_context()
|
|
75
|
+
if context:
|
|
76
|
+
return await context.start_websocket_server()
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def create_mcp_server() -> t.Any | None:
|
|
81
|
+
if not MCP_AVAILABLE or FastMCP is None:
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
mcp_app = FastMCP("crackerjack-mcp-server")
|
|
85
|
+
|
|
86
|
+
from crackerjack.slash_commands import get_slash_command_path
|
|
87
|
+
|
|
88
|
+
@mcp_app.prompt("run")
|
|
89
|
+
async def get_crackerjack_run_prompt() -> str:
|
|
90
|
+
try:
|
|
91
|
+
command_path = get_slash_command_path("run")
|
|
92
|
+
return command_path.read_text()
|
|
93
|
+
except Exception as e:
|
|
94
|
+
msg = f"Failed to read run command: {e}"
|
|
95
|
+
raise ValueError(msg)
|
|
96
|
+
|
|
97
|
+
@mcp_app.prompt("init")
|
|
98
|
+
async def get_crackerjack_init_prompt() -> str:
|
|
99
|
+
try:
|
|
100
|
+
command_path = get_slash_command_path("init")
|
|
101
|
+
return command_path.read_text()
|
|
102
|
+
except Exception as e:
|
|
103
|
+
msg = f"Failed to read init command: {e}"
|
|
104
|
+
raise ValueError(msg)
|
|
105
|
+
|
|
106
|
+
@mcp_app.prompt("status")
|
|
107
|
+
async def get_crackerjack_status_prompt() -> str:
|
|
108
|
+
try:
|
|
109
|
+
command_path = get_slash_command_path("status")
|
|
110
|
+
return command_path.read_text()
|
|
111
|
+
except Exception as e:
|
|
112
|
+
msg = f"Failed to read status command: {e}"
|
|
113
|
+
raise ValueError(msg)
|
|
114
|
+
|
|
115
|
+
register_core_tools(mcp_app)
|
|
116
|
+
register_execution_tools(mcp_app)
|
|
117
|
+
register_intelligence_tools(mcp_app)
|
|
118
|
+
register_monitoring_tools(mcp_app)
|
|
119
|
+
register_progress_tools(mcp_app)
|
|
120
|
+
register_proactive_tools(mcp_app)
|
|
121
|
+
register_utility_tools(mcp_app)
|
|
122
|
+
|
|
123
|
+
return mcp_app
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def handle_mcp_server_command(
|
|
127
|
+
start: bool = False,
|
|
128
|
+
stop: bool = False,
|
|
129
|
+
restart: bool = False,
|
|
130
|
+
websocket_port: int | None = None,
|
|
131
|
+
) -> None:
|
|
132
|
+
"""Handle MCP server start/stop/restart commands."""
|
|
133
|
+
if stop or restart:
|
|
134
|
+
console.print("[yellow]Stopping MCP servers...[/yellow]")
|
|
135
|
+
# Kill any existing MCP server processes
|
|
136
|
+
try:
|
|
137
|
+
result = subprocess.run(
|
|
138
|
+
["pkill", "-f", "crackerjack-mcp-server"],
|
|
139
|
+
check=False,
|
|
140
|
+
capture_output=True,
|
|
141
|
+
text=True,
|
|
142
|
+
timeout=10,
|
|
143
|
+
)
|
|
144
|
+
if result.returncode == 0:
|
|
145
|
+
console.print("[green]✅ MCP servers stopped[/green]")
|
|
146
|
+
else:
|
|
147
|
+
console.print("[dim]No MCP servers were running[/dim]")
|
|
148
|
+
except subprocess.TimeoutExpired:
|
|
149
|
+
console.print("[red]Timeout stopping MCP servers[/red]")
|
|
150
|
+
except Exception as e:
|
|
151
|
+
console.print(f"[red]Error stopping MCP servers: {e}[/red]")
|
|
152
|
+
|
|
153
|
+
if stop:
|
|
154
|
+
return
|
|
155
|
+
|
|
156
|
+
# For restart, wait a moment before starting again
|
|
157
|
+
time.sleep(2)
|
|
158
|
+
|
|
159
|
+
if start or restart:
|
|
160
|
+
console.print("[green]Starting MCP server...[/green]")
|
|
161
|
+
try:
|
|
162
|
+
main(".", websocket_port)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
console.print(f"[red]Failed to start MCP server: {e}[/red]")
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _initialize_context(context: MCPServerContext) -> None:
|
|
168
|
+
set_context(context)
|
|
169
|
+
|
|
170
|
+
context.safe_print("MCP Server context initialized")
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _stop_websocket_server() -> None:
|
|
174
|
+
from contextlib import suppress
|
|
175
|
+
|
|
176
|
+
with suppress(RuntimeError):
|
|
177
|
+
# Context not initialized, nothing to stop
|
|
178
|
+
context = get_context()
|
|
179
|
+
if context and hasattr(context, "_stop_websocket_server"):
|
|
180
|
+
# The websocket cleanup is handled asynchronously
|
|
181
|
+
# and called from the context's cleanup handlers
|
|
182
|
+
pass
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def main(project_path_arg: str = ".", websocket_port: int | None = None) -> None:
|
|
186
|
+
if not MCP_AVAILABLE:
|
|
187
|
+
return
|
|
188
|
+
|
|
189
|
+
try:
|
|
190
|
+
project_path = Path(project_path_arg).resolve()
|
|
191
|
+
|
|
192
|
+
config = MCPServerConfig(
|
|
193
|
+
project_path=project_path,
|
|
194
|
+
rate_limit_config=RateLimitConfig(),
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
context = MCPServerContext(config)
|
|
198
|
+
context.console = console
|
|
199
|
+
|
|
200
|
+
# Set custom WebSocket port if specified
|
|
201
|
+
if websocket_port:
|
|
202
|
+
context.websocket_server_port = websocket_port
|
|
203
|
+
|
|
204
|
+
_initialize_context(context)
|
|
205
|
+
|
|
206
|
+
mcp_app = create_mcp_server()
|
|
207
|
+
if not mcp_app:
|
|
208
|
+
console.print("[red]Failed to create MCP server[/red]")
|
|
209
|
+
return
|
|
210
|
+
|
|
211
|
+
console.print("[green]Starting Crackerjack MCP Server...[/green]")
|
|
212
|
+
console.print(f"Project path: {project_path}")
|
|
213
|
+
if websocket_port:
|
|
214
|
+
console.print(f"WebSocket port: {websocket_port}")
|
|
215
|
+
|
|
216
|
+
console.print("[yellow]MCP app created, about to run...[/yellow]")
|
|
217
|
+
try:
|
|
218
|
+
mcp_app.run()
|
|
219
|
+
except Exception as e:
|
|
220
|
+
console.print(f"[red]MCP run failed: {e}[/red]")
|
|
221
|
+
import traceback
|
|
222
|
+
|
|
223
|
+
traceback.print_exc()
|
|
224
|
+
raise
|
|
225
|
+
|
|
226
|
+
except KeyboardInterrupt:
|
|
227
|
+
console.print("Server stopped by user")
|
|
228
|
+
except Exception as e:
|
|
229
|
+
console.print(f"Server error: {e}")
|
|
230
|
+
import traceback
|
|
231
|
+
|
|
232
|
+
traceback.print_exc()
|
|
233
|
+
finally:
|
|
234
|
+
_stop_websocket_server()
|
|
235
|
+
clear_context()
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
if __name__ == "__main__":
|
|
239
|
+
import sys
|
|
240
|
+
|
|
241
|
+
project_path = sys.argv[1] if len(sys.argv) > 1 else "."
|
|
242
|
+
websocket_port = int(sys.argv[2]) if len(sys.argv) > 2 else None
|
|
243
|
+
|
|
244
|
+
main(project_path, websocket_port)
|