crackerjack 0.30.3__py3-none-any.whl โ 0.31.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +227 -299
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +170 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +657 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +409 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +618 -928
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +585 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +826 -0
- crackerjack/dynamic_config.py +94 -103
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +433 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +443 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +114 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +621 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +372 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +217 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +565 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/coverage_improvement.py +223 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +358 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +356 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +421 -0
- crackerjack/services/git.py +176 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +873 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.7.dist-info/METADATA +742 -0
- crackerjack-0.31.7.dist-info/RECORD +149 -0
- crackerjack-0.31.7.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/crackerjack.py +0 -3805
- crackerjack/pyproject.toml +0 -286
- crackerjack-0.30.3.dist-info/METADATA +0 -1290
- crackerjack-0.30.3.dist-info/RECORD +0 -16
- {crackerjack-0.30.3.dist-info โ crackerjack-0.31.7.dist-info}/WHEEL +0 -0
- {crackerjack-0.30.3.dist-info โ crackerjack-0.31.7.dist-info}/licenses/LICENSE +0 -0
crackerjack/mcp/state.py
ADDED
|
@@ -0,0 +1,395 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
import typing as t
|
|
5
|
+
import uuid
|
|
6
|
+
from dataclasses import asdict, dataclass
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class StageStatus(str, Enum):
|
|
12
|
+
PENDING = "pending"
|
|
13
|
+
RUNNING = "running"
|
|
14
|
+
COMPLETED = "completed"
|
|
15
|
+
FAILED = "failed"
|
|
16
|
+
ERROR = "error"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Priority(str, Enum):
|
|
20
|
+
CRITICAL = "critical"
|
|
21
|
+
HIGH = "high"
|
|
22
|
+
MEDIUM = "medium"
|
|
23
|
+
LOW = "low"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class Issue:
|
|
28
|
+
id: str
|
|
29
|
+
type: str
|
|
30
|
+
message: str
|
|
31
|
+
file_path: str
|
|
32
|
+
line_number: int | None = None
|
|
33
|
+
priority: Priority = Priority.MEDIUM
|
|
34
|
+
stage: str = ""
|
|
35
|
+
suggested_fix: str | None = None
|
|
36
|
+
auto_fixable: bool = False
|
|
37
|
+
|
|
38
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
39
|
+
return asdict(self)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class StageResult:
|
|
44
|
+
stage: str
|
|
45
|
+
status: StageStatus
|
|
46
|
+
start_time: float
|
|
47
|
+
end_time: float | None = None
|
|
48
|
+
duration: float | None = None
|
|
49
|
+
issues_found: list[Issue] | None = None
|
|
50
|
+
fixes_applied: list[str] | None = None
|
|
51
|
+
error_message: str | None = None
|
|
52
|
+
|
|
53
|
+
def __post_init__(self) -> None:
|
|
54
|
+
if self.issues_found is None:
|
|
55
|
+
self.issues_found = []
|
|
56
|
+
if self.fixes_applied is None:
|
|
57
|
+
self.fixes_applied = []
|
|
58
|
+
if self.end_time and self.start_time:
|
|
59
|
+
self.duration = self.end_time - self.start_time
|
|
60
|
+
|
|
61
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
62
|
+
data = asdict(self)
|
|
63
|
+
data["issues_found"] = [issue.to_dict() for issue in self.issues_found or []]
|
|
64
|
+
return data
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class SessionState:
|
|
69
|
+
session_id: str
|
|
70
|
+
start_time: float
|
|
71
|
+
current_stage: str | None = None
|
|
72
|
+
stages: dict[str, StageResult] | None = None
|
|
73
|
+
global_issues: list[Issue] | None = None
|
|
74
|
+
fixes_applied: list[str] | None = None
|
|
75
|
+
metadata: dict[str, t.Any] | None = None
|
|
76
|
+
|
|
77
|
+
def __post_init__(self) -> None:
|
|
78
|
+
if self.stages is None:
|
|
79
|
+
self.stages = {}
|
|
80
|
+
if self.global_issues is None:
|
|
81
|
+
self.global_issues = []
|
|
82
|
+
if self.fixes_applied is None:
|
|
83
|
+
self.fixes_applied = []
|
|
84
|
+
if self.metadata is None:
|
|
85
|
+
self.metadata = {}
|
|
86
|
+
|
|
87
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
88
|
+
data = asdict(self)
|
|
89
|
+
data["stages"] = {k: v.to_dict() for k, v in (self.stages or {}).items()}
|
|
90
|
+
data["global_issues"] = [issue.to_dict() for issue in self.global_issues or []]
|
|
91
|
+
return data
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class StateManager:
|
|
95
|
+
def __init__(self, state_dir: Path | None = None, batched_saver=None) -> None:
|
|
96
|
+
self._lock = asyncio.Lock()
|
|
97
|
+
self.state_dir = state_dir or Path.home() / ".cache" / "crackerjack-mcp"
|
|
98
|
+
self.state_dir.mkdir(exist_ok=True)
|
|
99
|
+
self.session_state = SessionState(
|
|
100
|
+
session_id=self._generate_session_id(),
|
|
101
|
+
start_time=time.time(),
|
|
102
|
+
)
|
|
103
|
+
self.checkpoints_dir = self.state_dir / "checkpoints"
|
|
104
|
+
self.checkpoints_dir.mkdir(exist_ok=True)
|
|
105
|
+
self._batched_saver = batched_saver
|
|
106
|
+
|
|
107
|
+
def _generate_session_id(self) -> str:
|
|
108
|
+
return str(uuid.uuid4())[:8]
|
|
109
|
+
|
|
110
|
+
async def start_stage(self, stage: str) -> None:
|
|
111
|
+
async with self._lock:
|
|
112
|
+
if not self.session_state.stages:
|
|
113
|
+
self.session_state.stages = {}
|
|
114
|
+
self.session_state.current_stage = stage
|
|
115
|
+
self.session_state.stages[stage] = StageResult(
|
|
116
|
+
stage=stage,
|
|
117
|
+
status=StageStatus.RUNNING,
|
|
118
|
+
start_time=time.time(),
|
|
119
|
+
)
|
|
120
|
+
self._save_state()
|
|
121
|
+
|
|
122
|
+
async def complete_stage(
|
|
123
|
+
self,
|
|
124
|
+
stage: str,
|
|
125
|
+
issues: list[Issue] | None = None,
|
|
126
|
+
fixes: list[str] | None = None,
|
|
127
|
+
) -> None:
|
|
128
|
+
async with self._lock:
|
|
129
|
+
stage_result = self._get_stage_result(stage)
|
|
130
|
+
if not stage_result:
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
self._update_stage_completion(stage_result)
|
|
134
|
+
self._process_stage_issues(stage_result, issues)
|
|
135
|
+
self._process_stage_fixes(stage_result, fixes)
|
|
136
|
+
self._clear_current_stage(stage)
|
|
137
|
+
self._save_state()
|
|
138
|
+
|
|
139
|
+
def _get_stage_result(self, stage: str) -> StageResult | None:
|
|
140
|
+
if not self.session_state.stages or stage not in self.session_state.stages:
|
|
141
|
+
return None
|
|
142
|
+
return self.session_state.stages[stage]
|
|
143
|
+
|
|
144
|
+
def _update_stage_completion(self, stage_result: StageResult) -> None:
|
|
145
|
+
stage_result.status = StageStatus.COMPLETED
|
|
146
|
+
stage_result.end_time = time.time()
|
|
147
|
+
stage_result.duration = stage_result.end_time - stage_result.start_time
|
|
148
|
+
|
|
149
|
+
def _process_stage_issues(
|
|
150
|
+
self,
|
|
151
|
+
stage_result: StageResult,
|
|
152
|
+
issues: list[Issue] | None,
|
|
153
|
+
) -> None:
|
|
154
|
+
if not issues:
|
|
155
|
+
return
|
|
156
|
+
stage_result.issues_found = issues
|
|
157
|
+
if not self.session_state.global_issues:
|
|
158
|
+
self.session_state.global_issues = []
|
|
159
|
+
self.session_state.global_issues.extend(issues)
|
|
160
|
+
|
|
161
|
+
def _process_stage_fixes(
|
|
162
|
+
self,
|
|
163
|
+
stage_result: StageResult,
|
|
164
|
+
fixes: list[str] | None,
|
|
165
|
+
) -> None:
|
|
166
|
+
if not fixes:
|
|
167
|
+
return
|
|
168
|
+
stage_result.fixes_applied = fixes
|
|
169
|
+
if not self.session_state.fixes_applied:
|
|
170
|
+
self.session_state.fixes_applied = []
|
|
171
|
+
self.session_state.fixes_applied.extend(fixes)
|
|
172
|
+
|
|
173
|
+
def _clear_current_stage(self, stage: str) -> None:
|
|
174
|
+
if self.session_state.current_stage == stage:
|
|
175
|
+
self.session_state.current_stage = None
|
|
176
|
+
|
|
177
|
+
async def fail_stage(self, stage: str, error_message: str) -> None:
|
|
178
|
+
async with self._lock:
|
|
179
|
+
if not self.session_state.stages or stage not in self.session_state.stages:
|
|
180
|
+
return
|
|
181
|
+
stage_result = self.session_state.stages[stage]
|
|
182
|
+
stage_result.status = StageStatus.FAILED
|
|
183
|
+
stage_result.end_time = time.time()
|
|
184
|
+
stage_result.duration = stage_result.end_time - stage_result.start_time
|
|
185
|
+
stage_result.error_message = error_message
|
|
186
|
+
if self.session_state.current_stage == stage:
|
|
187
|
+
self.session_state.current_stage = None
|
|
188
|
+
self._save_state()
|
|
189
|
+
|
|
190
|
+
async def update_stage_status(self, stage: str, status: str) -> None:
|
|
191
|
+
async with self._lock:
|
|
192
|
+
if not self.session_state.stages:
|
|
193
|
+
self.session_state.stages = {}
|
|
194
|
+
if stage not in self.session_state.stages:
|
|
195
|
+
self.session_state.stages[stage] = StageResult(
|
|
196
|
+
stage=stage,
|
|
197
|
+
status=StageStatus(status),
|
|
198
|
+
start_time=time.time(),
|
|
199
|
+
)
|
|
200
|
+
else:
|
|
201
|
+
self.session_state.stages[stage].status = StageStatus(status)
|
|
202
|
+
if status in ("completed", "failed", "error"):
|
|
203
|
+
self.session_state.stages[stage].end_time = time.time()
|
|
204
|
+
self._save_state()
|
|
205
|
+
|
|
206
|
+
async def add_issue(self, issue: Issue) -> None:
|
|
207
|
+
async with self._lock:
|
|
208
|
+
if not self.session_state.global_issues:
|
|
209
|
+
self.session_state.global_issues = []
|
|
210
|
+
self.session_state.global_issues.append(issue)
|
|
211
|
+
self._save_state()
|
|
212
|
+
|
|
213
|
+
def remove_issue(self, issue_id: str) -> bool:
|
|
214
|
+
if not self.session_state.global_issues:
|
|
215
|
+
return False
|
|
216
|
+
initial_count = len(self.session_state.global_issues)
|
|
217
|
+
self.session_state.global_issues = [
|
|
218
|
+
issue for issue in self.session_state.global_issues if issue.id != issue_id
|
|
219
|
+
]
|
|
220
|
+
removed = len(self.session_state.global_issues) < initial_count
|
|
221
|
+
if removed:
|
|
222
|
+
self._save_state()
|
|
223
|
+
|
|
224
|
+
return removed
|
|
225
|
+
|
|
226
|
+
def get_issues_by_priority(self, priority: Priority) -> list[Issue]:
|
|
227
|
+
if not self.session_state.global_issues:
|
|
228
|
+
return []
|
|
229
|
+
|
|
230
|
+
return [
|
|
231
|
+
issue
|
|
232
|
+
for issue in self.session_state.global_issues
|
|
233
|
+
if issue.priority == priority
|
|
234
|
+
]
|
|
235
|
+
|
|
236
|
+
def get_issues_by_type(self, issue_type: str) -> list[Issue]:
|
|
237
|
+
if not self.session_state.global_issues:
|
|
238
|
+
return []
|
|
239
|
+
|
|
240
|
+
return [
|
|
241
|
+
issue
|
|
242
|
+
for issue in self.session_state.global_issues
|
|
243
|
+
if issue.type == issue_type
|
|
244
|
+
]
|
|
245
|
+
|
|
246
|
+
def get_auto_fixable_issues(self) -> list[Issue]:
|
|
247
|
+
if not self.session_state.global_issues:
|
|
248
|
+
return []
|
|
249
|
+
|
|
250
|
+
return [
|
|
251
|
+
issue for issue in self.session_state.global_issues if issue.auto_fixable
|
|
252
|
+
]
|
|
253
|
+
|
|
254
|
+
def get_session_summary(self) -> dict[str, t.Any]:
|
|
255
|
+
stages = self.session_state.stages or {}
|
|
256
|
+
issues = self.session_state.global_issues or []
|
|
257
|
+
fixes = self.session_state.fixes_applied or []
|
|
258
|
+
priority_counts = {}
|
|
259
|
+
for priority in Priority:
|
|
260
|
+
priority_counts[priority.value] = len(self.get_issues_by_priority(priority))
|
|
261
|
+
type_counts = {}
|
|
262
|
+
for issue in issues:
|
|
263
|
+
type_counts[issue.type] = type_counts.get(issue.type, 0) + 1
|
|
264
|
+
stage_status = {}
|
|
265
|
+
for stage_name, stage_result in stages.items():
|
|
266
|
+
stage_status[stage_name] = stage_result.status.value
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
"session_id": self.session_state.session_id,
|
|
270
|
+
"duration": time.time() - self.session_state.start_time,
|
|
271
|
+
"current_stage": self.session_state.current_stage,
|
|
272
|
+
"stages": stage_status,
|
|
273
|
+
"total_issues": len(issues),
|
|
274
|
+
"issues_by_priority": priority_counts,
|
|
275
|
+
"issues_by_type": type_counts,
|
|
276
|
+
"total_fixes": len(fixes),
|
|
277
|
+
"auto_fixable_issues": len(self.get_auto_fixable_issues()),
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
async def save_checkpoint(self, name: str) -> None:
|
|
281
|
+
async with self._lock:
|
|
282
|
+
checkpoint_file = self.checkpoints_dir / f"{name}.json"
|
|
283
|
+
checkpoint_data = {
|
|
284
|
+
"name": name,
|
|
285
|
+
"timestamp": time.time(),
|
|
286
|
+
"session_state": self.session_state.to_dict(),
|
|
287
|
+
}
|
|
288
|
+
with checkpoint_file.open("w") as f:
|
|
289
|
+
json.dump(checkpoint_data, f, indent=2)
|
|
290
|
+
|
|
291
|
+
def load_checkpoint(self, name: str) -> bool:
|
|
292
|
+
checkpoint_file = self.checkpoints_dir / f"{name}.json"
|
|
293
|
+
if not checkpoint_file.exists():
|
|
294
|
+
return False
|
|
295
|
+
try:
|
|
296
|
+
with checkpoint_file.open() as f:
|
|
297
|
+
checkpoint_data = json.load(f)
|
|
298
|
+
session_data = checkpoint_data["session_state"]
|
|
299
|
+
self.session_state = SessionState(
|
|
300
|
+
session_id=session_data["session_id"],
|
|
301
|
+
start_time=session_data["start_time"],
|
|
302
|
+
current_stage=session_data.get("current_stage"),
|
|
303
|
+
metadata=session_data.get("metadata", {}),
|
|
304
|
+
)
|
|
305
|
+
stages = {}
|
|
306
|
+
for stage_name, stage_data in session_data.get("stages", {}).items():
|
|
307
|
+
issues = [
|
|
308
|
+
Issue(**issue_data)
|
|
309
|
+
for issue_data in stage_data.get("issues_found", [])
|
|
310
|
+
]
|
|
311
|
+
stages[stage_name] = StageResult(
|
|
312
|
+
stage=stage_data["stage"],
|
|
313
|
+
status=StageStatus(stage_data["status"]),
|
|
314
|
+
start_time=stage_data["start_time"],
|
|
315
|
+
end_time=stage_data.get("end_time"),
|
|
316
|
+
duration=stage_data.get("duration"),
|
|
317
|
+
issues_found=issues,
|
|
318
|
+
fixes_applied=stage_data.get("fixes_applied", []),
|
|
319
|
+
error_message=stage_data.get("error_message"),
|
|
320
|
+
)
|
|
321
|
+
self.session_state.stages = stages
|
|
322
|
+
global_issues = [
|
|
323
|
+
Issue(**issue_data)
|
|
324
|
+
for issue_data in session_data.get("global_issues", [])
|
|
325
|
+
]
|
|
326
|
+
self.session_state.global_issues = global_issues
|
|
327
|
+
self.session_state.fixes_applied = session_data.get("fixes_applied", [])
|
|
328
|
+
self._save_state()
|
|
329
|
+
return True
|
|
330
|
+
except Exception:
|
|
331
|
+
return False
|
|
332
|
+
|
|
333
|
+
def list_checkpoints(self) -> list[dict[str, t.Any]]:
|
|
334
|
+
checkpoints: list[dict[str, t.Any]] = []
|
|
335
|
+
for checkpoint_file in self.checkpoints_dir.glob(" * .json"):
|
|
336
|
+
try:
|
|
337
|
+
with checkpoint_file.open() as f:
|
|
338
|
+
data = json.load(f)
|
|
339
|
+
checkpoints.append(
|
|
340
|
+
{
|
|
341
|
+
"name": data.get("name", checkpoint_file.stem),
|
|
342
|
+
"timestamp": data.get("timestamp", 0),
|
|
343
|
+
"file": str(checkpoint_file),
|
|
344
|
+
},
|
|
345
|
+
)
|
|
346
|
+
except Exception:
|
|
347
|
+
continue
|
|
348
|
+
import operator
|
|
349
|
+
|
|
350
|
+
checkpoints.sort(key=operator.itemgetter("timestamp"), reverse=True)
|
|
351
|
+
return checkpoints
|
|
352
|
+
|
|
353
|
+
async def reset_session(self) -> None:
|
|
354
|
+
async with self._lock:
|
|
355
|
+
self.session_state = SessionState(
|
|
356
|
+
session_id=self._generate_session_id(),
|
|
357
|
+
start_time=time.time(),
|
|
358
|
+
)
|
|
359
|
+
self._save_state()
|
|
360
|
+
|
|
361
|
+
def _save_state(self) -> None:
|
|
362
|
+
if self._batched_saver:
|
|
363
|
+
save_func = self._save_state_sync
|
|
364
|
+
|
|
365
|
+
save_func()
|
|
366
|
+
else:
|
|
367
|
+
self._save_state_sync()
|
|
368
|
+
|
|
369
|
+
def _save_state_sync(self) -> None:
|
|
370
|
+
state_file = self.state_dir / "current_session.json"
|
|
371
|
+
try:
|
|
372
|
+
with state_file.open("w") as f:
|
|
373
|
+
json.dump(self.session_state.to_dict(), f, indent=2)
|
|
374
|
+
except (OSError, json.JSONEncodeError):
|
|
375
|
+
pass
|
|
376
|
+
except Exception:
|
|
377
|
+
pass
|
|
378
|
+
|
|
379
|
+
def _load_state(self) -> bool:
|
|
380
|
+
state_file = self.state_dir / "current_session.json"
|
|
381
|
+
if not state_file.exists():
|
|
382
|
+
return False
|
|
383
|
+
try:
|
|
384
|
+
with state_file.open() as f:
|
|
385
|
+
session_data = json.load(f)
|
|
386
|
+
checkpoint_data = {"session_state": session_data}
|
|
387
|
+
temp_checkpoint = self.checkpoints_dir / "_temp.json"
|
|
388
|
+
with temp_checkpoint.open("w") as f:
|
|
389
|
+
json.dump(checkpoint_data, f)
|
|
390
|
+
result = self.load_checkpoint("_temp")
|
|
391
|
+
temp_checkpoint.unlink(missing_ok=True)
|
|
392
|
+
|
|
393
|
+
return result
|
|
394
|
+
except Exception:
|
|
395
|
+
return False
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
import typing as t
|
|
5
|
+
from contextlib import asynccontextmanager, suppress
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
|
|
10
|
+
console = Console()
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class TaskInfo:
|
|
16
|
+
task_id: str
|
|
17
|
+
task: asyncio.Task
|
|
18
|
+
created_at: float
|
|
19
|
+
description: str = ""
|
|
20
|
+
timeout_seconds: float | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AsyncTaskManager:
|
|
24
|
+
def __init__(self, max_concurrent_tasks: int = 10) -> None:
|
|
25
|
+
self.max_concurrent_tasks = max_concurrent_tasks
|
|
26
|
+
self._tasks: dict[str, TaskInfo] = {}
|
|
27
|
+
self._task_semaphore = asyncio.Semaphore(max_concurrent_tasks)
|
|
28
|
+
self._cleanup_task: asyncio.Task | None = None
|
|
29
|
+
self._running = False
|
|
30
|
+
self._lock = asyncio.Lock()
|
|
31
|
+
|
|
32
|
+
async def start(self) -> None:
|
|
33
|
+
self._running = True
|
|
34
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
|
35
|
+
try:
|
|
36
|
+
console.print(
|
|
37
|
+
f"[green]๐ฏ Task Manager started (max {self.max_concurrent_tasks} concurrent)[ / green]",
|
|
38
|
+
)
|
|
39
|
+
except (ValueError, OSError):
|
|
40
|
+
import logging
|
|
41
|
+
|
|
42
|
+
logging.info(
|
|
43
|
+
f"Task Manager started (max {self.max_concurrent_tasks} concurrent) ",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
async def stop(self) -> None:
|
|
47
|
+
self._running = False
|
|
48
|
+
|
|
49
|
+
if self._cleanup_task:
|
|
50
|
+
self._cleanup_task.cancel()
|
|
51
|
+
with suppress(asyncio.CancelledError):
|
|
52
|
+
await self._cleanup_task
|
|
53
|
+
|
|
54
|
+
await self._cancel_all_tasks()
|
|
55
|
+
try:
|
|
56
|
+
console.print("[yellow]๐ฏ Task Manager stopped[ / yellow]")
|
|
57
|
+
except (ValueError, OSError):
|
|
58
|
+
import logging
|
|
59
|
+
|
|
60
|
+
logging.info("Task Manager stopped")
|
|
61
|
+
|
|
62
|
+
async def create_task(
|
|
63
|
+
self,
|
|
64
|
+
coro: t.Coroutine,
|
|
65
|
+
task_id: str,
|
|
66
|
+
description: str = "",
|
|
67
|
+
timeout_seconds: float | None = None,
|
|
68
|
+
) -> asyncio.Task:
|
|
69
|
+
async with self._lock:
|
|
70
|
+
if task_id in self._tasks:
|
|
71
|
+
msg = f"Task {task_id} already exists"
|
|
72
|
+
raise ValueError(msg)
|
|
73
|
+
|
|
74
|
+
if len(self._tasks) >= self.max_concurrent_tasks:
|
|
75
|
+
msg = f"Maximum concurrent tasks ({self.max_concurrent_tasks}) reached"
|
|
76
|
+
raise RuntimeError(
|
|
77
|
+
msg,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if timeout_seconds:
|
|
81
|
+
coro = asyncio.wait_for(coro, timeout=timeout_seconds)
|
|
82
|
+
|
|
83
|
+
task = asyncio.create_task(self._wrap_task(coro, task_id))
|
|
84
|
+
|
|
85
|
+
task_info = TaskInfo(
|
|
86
|
+
task_id=task_id,
|
|
87
|
+
task=task,
|
|
88
|
+
created_at=time.time(),
|
|
89
|
+
description=description,
|
|
90
|
+
timeout_seconds=timeout_seconds,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
async with self._lock:
|
|
94
|
+
self._tasks[task_id] = task_info
|
|
95
|
+
|
|
96
|
+
logger.info(f"Created task {task_id}: {description}")
|
|
97
|
+
from contextlib import suppress
|
|
98
|
+
|
|
99
|
+
with suppress(ValueError, OSError):
|
|
100
|
+
console.print(f"[blue]๐ Task {task_id} created: {description}[ / blue]")
|
|
101
|
+
return task
|
|
102
|
+
|
|
103
|
+
async def _wrap_task(self, coro: t.Coroutine, task_id: str) -> t.Any:
|
|
104
|
+
try:
|
|
105
|
+
async with self._task_semaphore:
|
|
106
|
+
result = await coro
|
|
107
|
+
logger.info(f"Task {task_id} completed successfully")
|
|
108
|
+
return result
|
|
109
|
+
except asyncio.CancelledError:
|
|
110
|
+
logger.info(f"Task {task_id} was cancelled")
|
|
111
|
+
raise
|
|
112
|
+
except Exception as e:
|
|
113
|
+
logger.exception(f"Task {task_id} failed: {e}")
|
|
114
|
+
from contextlib import suppress
|
|
115
|
+
|
|
116
|
+
with suppress(ValueError, OSError):
|
|
117
|
+
console.print(f"[red]โ Task {task_id} failed: {e}[ / red]")
|
|
118
|
+
raise
|
|
119
|
+
finally:
|
|
120
|
+
async with self._lock:
|
|
121
|
+
if task_id in self._tasks:
|
|
122
|
+
del self._tasks[task_id]
|
|
123
|
+
|
|
124
|
+
async def cancel_task(self, task_id: str) -> bool:
|
|
125
|
+
async with self._lock:
|
|
126
|
+
task_info = self._tasks.get(task_id)
|
|
127
|
+
if not task_info:
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
task_info.task.cancel()
|
|
131
|
+
logger.info(f"Cancelled task {task_id}")
|
|
132
|
+
from contextlib import suppress
|
|
133
|
+
|
|
134
|
+
with suppress(ValueError, OSError):
|
|
135
|
+
console.print(f"[yellow]๐ซ Task {task_id} cancelled[ / yellow]")
|
|
136
|
+
return True
|
|
137
|
+
|
|
138
|
+
async def get_task_status(self, task_id: str) -> dict[str, t.Any] | None:
|
|
139
|
+
async with self._lock:
|
|
140
|
+
task_info = self._tasks.get(task_id)
|
|
141
|
+
if not task_info:
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
return {
|
|
145
|
+
"task_id": task_id,
|
|
146
|
+
"description": task_info.description,
|
|
147
|
+
"created_at": task_info.created_at,
|
|
148
|
+
"running_time": time.time() - task_info.created_at,
|
|
149
|
+
"done": task_info.task.done(),
|
|
150
|
+
"cancelled": task_info.task.cancelled(),
|
|
151
|
+
"timeout_seconds": task_info.timeout_seconds,
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
async def list_active_tasks(self) -> list[dict[str, t.Any]]:
|
|
155
|
+
async with self._lock:
|
|
156
|
+
tasks = []
|
|
157
|
+
for task_id, task_info in self._tasks.items():
|
|
158
|
+
task_status = {
|
|
159
|
+
"task_id": task_id,
|
|
160
|
+
"description": task_info.description,
|
|
161
|
+
"created_at": task_info.created_at,
|
|
162
|
+
"running_time": time.time() - task_info.created_at,
|
|
163
|
+
"done": task_info.task.done(),
|
|
164
|
+
"cancelled": task_info.task.cancelled(),
|
|
165
|
+
"timeout_seconds": task_info.timeout_seconds,
|
|
166
|
+
}
|
|
167
|
+
tasks.append(task_status)
|
|
168
|
+
return tasks
|
|
169
|
+
|
|
170
|
+
async def wait_for_task(self, task_id: str, timeout: float | None = None) -> t.Any:
|
|
171
|
+
async with self._lock:
|
|
172
|
+
task_info = self._tasks.get(task_id)
|
|
173
|
+
if not task_info:
|
|
174
|
+
msg = f"Task {task_id} not found"
|
|
175
|
+
raise ValueError(msg)
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
if timeout:
|
|
179
|
+
return await asyncio.wait_for(task_info.task, timeout=timeout)
|
|
180
|
+
return await task_info.task
|
|
181
|
+
except TimeoutError:
|
|
182
|
+
logger.warning(f"Timeout waiting for task {task_id}")
|
|
183
|
+
raise
|
|
184
|
+
|
|
185
|
+
@asynccontextmanager
|
|
186
|
+
async def managed_task(
|
|
187
|
+
self,
|
|
188
|
+
coro: t.Coroutine,
|
|
189
|
+
task_id: str,
|
|
190
|
+
description: str = "",
|
|
191
|
+
timeout_seconds: float | None = None,
|
|
192
|
+
):
|
|
193
|
+
task = await self.create_task(coro, task_id, description, timeout_seconds)
|
|
194
|
+
try:
|
|
195
|
+
yield task
|
|
196
|
+
finally:
|
|
197
|
+
if not task.done():
|
|
198
|
+
task.cancel()
|
|
199
|
+
with suppress(asyncio.CancelledError):
|
|
200
|
+
await task
|
|
201
|
+
|
|
202
|
+
async def _cancel_all_tasks(self) -> None:
|
|
203
|
+
async with self._lock:
|
|
204
|
+
tasks_to_cancel = list(self._tasks.values())
|
|
205
|
+
|
|
206
|
+
if not tasks_to_cancel:
|
|
207
|
+
return
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
console.print(
|
|
211
|
+
f"[yellow]๐งน Cancelling {len(tasks_to_cancel)} running tasks[ / yellow]",
|
|
212
|
+
)
|
|
213
|
+
except (ValueError, OSError):
|
|
214
|
+
import logging
|
|
215
|
+
|
|
216
|
+
logging.info(f"Cancelling {len(tasks_to_cancel)} running tasks")
|
|
217
|
+
|
|
218
|
+
for task_info in tasks_to_cancel:
|
|
219
|
+
task_info.task.cancel()
|
|
220
|
+
|
|
221
|
+
await asyncio.gather(
|
|
222
|
+
*[task_info.task for task_info in tasks_to_cancel],
|
|
223
|
+
return_exceptions=True,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
async with self._lock:
|
|
227
|
+
self._tasks.clear()
|
|
228
|
+
|
|
229
|
+
async def _cleanup_loop(self) -> None:
|
|
230
|
+
while self._running:
|
|
231
|
+
try:
|
|
232
|
+
await self._cleanup_completed_tasks()
|
|
233
|
+
await asyncio.sleep(30)
|
|
234
|
+
except asyncio.CancelledError:
|
|
235
|
+
break
|
|
236
|
+
except Exception as e:
|
|
237
|
+
logger.exception(f"Error in task cleanup loop: {e}")
|
|
238
|
+
await asyncio.sleep(5)
|
|
239
|
+
|
|
240
|
+
async def _cleanup_completed_tasks(self) -> None:
|
|
241
|
+
async with self._lock:
|
|
242
|
+
completed_tasks = []
|
|
243
|
+
for task_id, task_info in list(self._tasks.items()):
|
|
244
|
+
if task_info.task.done():
|
|
245
|
+
completed_tasks.append(task_id)
|
|
246
|
+
del self._tasks[task_id]
|
|
247
|
+
|
|
248
|
+
if completed_tasks:
|
|
249
|
+
logger.info(f"Cleaned up {len(completed_tasks)} completed tasks")
|
|
250
|
+
|
|
251
|
+
def get_stats(self) -> dict[str, t.Any]:
|
|
252
|
+
return {
|
|
253
|
+
"running": self._running,
|
|
254
|
+
"active_tasks": len(self._tasks),
|
|
255
|
+
"max_concurrent_tasks": self.max_concurrent_tasks,
|
|
256
|
+
"available_slots": self._task_semaphore._value,
|
|
257
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from .core_tools import register_core_tools
|
|
2
|
+
from .execution_tools import register_execution_tools
|
|
3
|
+
from .intelligence_tool_registry import register_intelligence_tools
|
|
4
|
+
from .monitoring_tools import register_monitoring_tools
|
|
5
|
+
from .proactive_tools import register_proactive_tools
|
|
6
|
+
from .progress_tools import register_progress_tools
|
|
7
|
+
from .utility_tools import register_utility_tools
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"register_core_tools",
|
|
11
|
+
"register_execution_tools",
|
|
12
|
+
"register_intelligence_tools",
|
|
13
|
+
"register_monitoring_tools",
|
|
14
|
+
"register_progress_tools",
|
|
15
|
+
"register_proactive_tools",
|
|
16
|
+
"register_utility_tools",
|
|
17
|
+
]
|