crackerjack 0.30.3__py3-none-any.whl → 0.31.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +227 -299
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +170 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +657 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +409 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +618 -928
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +585 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +826 -0
- crackerjack/dynamic_config.py +94 -103
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +433 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +443 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +114 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +621 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +372 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +217 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +565 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/coverage_improvement.py +223 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +358 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +356 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +421 -0
- crackerjack/services/git.py +176 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +873 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.7.dist-info/METADATA +742 -0
- crackerjack-0.31.7.dist-info/RECORD +149 -0
- crackerjack-0.31.7.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/crackerjack.py +0 -3805
- crackerjack/pyproject.toml +0 -286
- crackerjack-0.30.3.dist-info/METADATA +0 -1290
- crackerjack-0.30.3.dist-info/RECORD +0 -16
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.7.dist-info}/WHEEL +0 -0
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.7.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,617 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import subprocess
|
|
3
|
+
import tempfile
|
|
4
|
+
import time
|
|
5
|
+
import tomllib
|
|
6
|
+
import typing as t
|
|
7
|
+
from contextlib import suppress
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
|
|
13
|
+
from crackerjack.models.protocols import FileSystemInterface
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class DependencyVulnerability:
|
|
18
|
+
package: str
|
|
19
|
+
installed_version: str
|
|
20
|
+
vulnerability_id: str
|
|
21
|
+
severity: str
|
|
22
|
+
advisory_url: str
|
|
23
|
+
vulnerable_versions: str
|
|
24
|
+
patched_version: str
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class MajorUpdate:
|
|
29
|
+
package: str
|
|
30
|
+
current_version: str
|
|
31
|
+
latest_version: str
|
|
32
|
+
release_date: str
|
|
33
|
+
breaking_changes: bool
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class DependencyMonitorService:
|
|
37
|
+
def __init__(
|
|
38
|
+
self,
|
|
39
|
+
filesystem: FileSystemInterface,
|
|
40
|
+
console: Console | None = None,
|
|
41
|
+
) -> None:
|
|
42
|
+
self.filesystem = filesystem
|
|
43
|
+
self.console = console or Console()
|
|
44
|
+
self.project_root = Path.cwd()
|
|
45
|
+
self.pyproject_path = self.project_root / "pyproject.toml"
|
|
46
|
+
self.cache_file = self.project_root / ".crackerjack" / "dependency_cache.json"
|
|
47
|
+
|
|
48
|
+
def check_dependency_updates(self) -> bool:
|
|
49
|
+
if not self.pyproject_path.exists():
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
dependencies = self._parse_dependencies()
|
|
53
|
+
if not dependencies:
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
vulnerabilities = self._check_security_vulnerabilities(dependencies)
|
|
57
|
+
major_updates = self._check_major_updates(dependencies)
|
|
58
|
+
|
|
59
|
+
if vulnerabilities:
|
|
60
|
+
self._report_vulnerabilities(vulnerabilities)
|
|
61
|
+
return True
|
|
62
|
+
|
|
63
|
+
if major_updates and self._should_notify_major_updates():
|
|
64
|
+
self._report_major_updates(major_updates)
|
|
65
|
+
return True
|
|
66
|
+
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
def _parse_dependencies(self) -> dict[str, str]:
|
|
70
|
+
"""Parse dependencies from pyproject.toml file."""
|
|
71
|
+
try:
|
|
72
|
+
with self.pyproject_path.open("rb") as f:
|
|
73
|
+
data = tomllib.load(f)
|
|
74
|
+
|
|
75
|
+
dependencies = {}
|
|
76
|
+
project_data = data.get("project", {})
|
|
77
|
+
|
|
78
|
+
self._extract_main_dependencies(project_data, dependencies)
|
|
79
|
+
self._extract_optional_dependencies(project_data, dependencies)
|
|
80
|
+
|
|
81
|
+
return dependencies
|
|
82
|
+
|
|
83
|
+
except Exception as e:
|
|
84
|
+
self.console.print(
|
|
85
|
+
f"[yellow]Warning: Failed to parse pyproject.toml: {e}[/yellow]",
|
|
86
|
+
)
|
|
87
|
+
return {}
|
|
88
|
+
|
|
89
|
+
def _extract_main_dependencies(
|
|
90
|
+
self,
|
|
91
|
+
project_data: dict[str, t.Any],
|
|
92
|
+
dependencies: dict[str, str],
|
|
93
|
+
) -> None:
|
|
94
|
+
"""Extract main dependencies from project data."""
|
|
95
|
+
if "dependencies" not in project_data:
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
for dep in project_data["dependencies"]:
|
|
99
|
+
name, version = self._parse_dependency_spec(dep)
|
|
100
|
+
if name and version:
|
|
101
|
+
dependencies[name] = version
|
|
102
|
+
|
|
103
|
+
def _extract_optional_dependencies(
|
|
104
|
+
self,
|
|
105
|
+
project_data: dict[str, t.Any],
|
|
106
|
+
dependencies: dict[str, str],
|
|
107
|
+
) -> None:
|
|
108
|
+
"""Extract optional dependencies from project data."""
|
|
109
|
+
if "optional-dependencies" not in project_data:
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
for group_deps in project_data["optional-dependencies"].values():
|
|
113
|
+
for dep in group_deps:
|
|
114
|
+
name, version = self._parse_dependency_spec(dep)
|
|
115
|
+
if name and version:
|
|
116
|
+
dependencies[name] = version
|
|
117
|
+
|
|
118
|
+
def _parse_dependency_spec(self, spec: str) -> tuple[str | None, str | None]:
|
|
119
|
+
if not spec or spec.startswith("-"):
|
|
120
|
+
return None, None
|
|
121
|
+
|
|
122
|
+
for operator in (">=", "<=", "==", "~=", "!=", ">", "<"):
|
|
123
|
+
if operator in spec:
|
|
124
|
+
parts = spec.split(operator, 1)
|
|
125
|
+
if len(parts) == 2:
|
|
126
|
+
package = parts[0].strip()
|
|
127
|
+
version = parts[1].strip()
|
|
128
|
+
return package, version
|
|
129
|
+
|
|
130
|
+
return spec.strip(), "latest"
|
|
131
|
+
|
|
132
|
+
def _check_security_vulnerabilities(
|
|
133
|
+
self,
|
|
134
|
+
dependencies: dict[str, str],
|
|
135
|
+
) -> list[DependencyVulnerability]:
|
|
136
|
+
vulnerabilities: list[DependencyVulnerability] = []
|
|
137
|
+
|
|
138
|
+
safety_vulns = self._check_with_safety(dependencies)
|
|
139
|
+
vulnerabilities.extend(safety_vulns)
|
|
140
|
+
|
|
141
|
+
if not vulnerabilities:
|
|
142
|
+
pip_audit_vulns = self._check_with_pip_audit(dependencies)
|
|
143
|
+
vulnerabilities.extend(pip_audit_vulns)
|
|
144
|
+
|
|
145
|
+
return vulnerabilities
|
|
146
|
+
|
|
147
|
+
def _check_with_safety(
|
|
148
|
+
self,
|
|
149
|
+
dependencies: dict[str, str],
|
|
150
|
+
) -> list[DependencyVulnerability]:
|
|
151
|
+
cmd = ["uv", "run", "safety", "check", "--file", "__TEMP_FILE__", "--json"]
|
|
152
|
+
return self._run_vulnerability_tool(
|
|
153
|
+
dependencies,
|
|
154
|
+
cmd,
|
|
155
|
+
self._parse_safety_output,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
def _check_with_pip_audit(
|
|
159
|
+
self,
|
|
160
|
+
dependencies: dict[str, str],
|
|
161
|
+
) -> list[DependencyVulnerability]:
|
|
162
|
+
cmd = [
|
|
163
|
+
"uv",
|
|
164
|
+
"run",
|
|
165
|
+
"pip-audit",
|
|
166
|
+
"--requirement",
|
|
167
|
+
"__TEMP_FILE__",
|
|
168
|
+
"--format",
|
|
169
|
+
"json",
|
|
170
|
+
]
|
|
171
|
+
return self._run_vulnerability_tool(
|
|
172
|
+
dependencies,
|
|
173
|
+
cmd,
|
|
174
|
+
self._parse_pip_audit_output,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
def _run_vulnerability_tool(
|
|
178
|
+
self,
|
|
179
|
+
dependencies: dict[str, str],
|
|
180
|
+
command_template: list[str],
|
|
181
|
+
parser_func: t.Callable[[t.Any], list[DependencyVulnerability]],
|
|
182
|
+
) -> list[DependencyVulnerability]:
|
|
183
|
+
"""Common logic for running vulnerability scanning tools."""
|
|
184
|
+
try:
|
|
185
|
+
temp_file = self._create_requirements_file(dependencies)
|
|
186
|
+
try:
|
|
187
|
+
result = self._execute_vulnerability_command(
|
|
188
|
+
command_template,
|
|
189
|
+
temp_file,
|
|
190
|
+
)
|
|
191
|
+
return self._process_vulnerability_result(result, parser_func)
|
|
192
|
+
finally:
|
|
193
|
+
Path(temp_file).unlink(missing_ok=True)
|
|
194
|
+
except (
|
|
195
|
+
subprocess.CalledProcessError,
|
|
196
|
+
subprocess.TimeoutExpired,
|
|
197
|
+
json.JSONDecodeError,
|
|
198
|
+
Exception,
|
|
199
|
+
):
|
|
200
|
+
return [] # Vulnerability check failed, return empty list
|
|
201
|
+
|
|
202
|
+
def _create_requirements_file(self, dependencies: dict[str, str]) -> str:
|
|
203
|
+
"""Create temporary requirements file for vulnerability scanning."""
|
|
204
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f:
|
|
205
|
+
for package, version in dependencies.items():
|
|
206
|
+
if version != "latest":
|
|
207
|
+
f.write(f"{package}=={version}\n")
|
|
208
|
+
else:
|
|
209
|
+
f.write(f"{package}\n")
|
|
210
|
+
return f.name
|
|
211
|
+
|
|
212
|
+
def _execute_vulnerability_command(
|
|
213
|
+
self,
|
|
214
|
+
command_template: list[str],
|
|
215
|
+
temp_file: str,
|
|
216
|
+
) -> subprocess.CompletedProcess[str]:
|
|
217
|
+
"""Execute vulnerability scanning command with temp file."""
|
|
218
|
+
cmd = [part.replace("__TEMP_FILE__", temp_file) for part in command_template]
|
|
219
|
+
return subprocess.run(
|
|
220
|
+
cmd,
|
|
221
|
+
check=False,
|
|
222
|
+
capture_output=True,
|
|
223
|
+
text=True,
|
|
224
|
+
timeout=30,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
def _process_vulnerability_result(
|
|
228
|
+
self,
|
|
229
|
+
result: subprocess.CompletedProcess[str],
|
|
230
|
+
parser_func: t.Callable[[t.Any], list[DependencyVulnerability]],
|
|
231
|
+
) -> list[DependencyVulnerability]:
|
|
232
|
+
"""Process vulnerability scan result using appropriate parser."""
|
|
233
|
+
if result.returncode == 0:
|
|
234
|
+
return []
|
|
235
|
+
|
|
236
|
+
if result.stdout:
|
|
237
|
+
data = json.loads(result.stdout)
|
|
238
|
+
return parser_func(data)
|
|
239
|
+
|
|
240
|
+
return []
|
|
241
|
+
|
|
242
|
+
def _parse_safety_output(self, safety_data: t.Any) -> list[DependencyVulnerability]:
|
|
243
|
+
vulnerabilities: list[DependencyVulnerability] = []
|
|
244
|
+
|
|
245
|
+
with suppress(Exception):
|
|
246
|
+
for vuln in safety_data:
|
|
247
|
+
vulnerabilities.append(
|
|
248
|
+
DependencyVulnerability(
|
|
249
|
+
package=vuln.get("package", ""),
|
|
250
|
+
installed_version=vuln.get("installed_version", ""),
|
|
251
|
+
vulnerability_id=vuln.get("vulnerability_id", ""),
|
|
252
|
+
severity=vuln.get("severity", "unknown"),
|
|
253
|
+
advisory_url=vuln.get("more_info_url", ""),
|
|
254
|
+
vulnerable_versions=vuln.get("vulnerable_spec", ""),
|
|
255
|
+
patched_version=vuln.get("analyzed_version", ""),
|
|
256
|
+
),
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
return vulnerabilities
|
|
260
|
+
|
|
261
|
+
def _parse_pip_audit_output(
|
|
262
|
+
self,
|
|
263
|
+
audit_data: t.Any,
|
|
264
|
+
) -> list[DependencyVulnerability]:
|
|
265
|
+
vulnerabilities: list[DependencyVulnerability] = []
|
|
266
|
+
|
|
267
|
+
with suppress(Exception):
|
|
268
|
+
for vuln in audit_data.get("vulnerabilities", []):
|
|
269
|
+
package = vuln.get("package", {})
|
|
270
|
+
vulnerabilities.append(
|
|
271
|
+
DependencyVulnerability(
|
|
272
|
+
package=package.get("name", ""),
|
|
273
|
+
installed_version=package.get("version", ""),
|
|
274
|
+
vulnerability_id=vuln.get("id", ""),
|
|
275
|
+
severity=vuln.get("severity", "unknown"),
|
|
276
|
+
advisory_url=vuln.get("link", ""),
|
|
277
|
+
vulnerable_versions=vuln.get("vulnerable_ranges", ""),
|
|
278
|
+
patched_version=vuln.get("fix_versions", [""])[0],
|
|
279
|
+
),
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
return vulnerabilities
|
|
283
|
+
|
|
284
|
+
def _check_major_updates(self, dependencies: dict[str, str]) -> list[MajorUpdate]:
|
|
285
|
+
major_updates: list[MajorUpdate] = []
|
|
286
|
+
cache = self._load_update_cache()
|
|
287
|
+
current_time = time.time()
|
|
288
|
+
|
|
289
|
+
for package, current_version in dependencies.items():
|
|
290
|
+
if current_version == "latest":
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
update = self._check_package_major_update(
|
|
294
|
+
package,
|
|
295
|
+
current_version,
|
|
296
|
+
cache,
|
|
297
|
+
current_time,
|
|
298
|
+
)
|
|
299
|
+
if update:
|
|
300
|
+
major_updates.append(update)
|
|
301
|
+
|
|
302
|
+
self._save_update_cache(cache)
|
|
303
|
+
return major_updates
|
|
304
|
+
|
|
305
|
+
def _check_package_major_update(
|
|
306
|
+
self,
|
|
307
|
+
package: str,
|
|
308
|
+
current_version: str,
|
|
309
|
+
cache: dict[str, t.Any],
|
|
310
|
+
current_time: float,
|
|
311
|
+
) -> MajorUpdate | None:
|
|
312
|
+
"""Check if a specific package has a major update available."""
|
|
313
|
+
cache_key = self._build_cache_key(package, current_version)
|
|
314
|
+
|
|
315
|
+
# Try to get from cache first
|
|
316
|
+
cached_update = self._get_cached_major_update(
|
|
317
|
+
cache_key,
|
|
318
|
+
cache,
|
|
319
|
+
current_time,
|
|
320
|
+
package,
|
|
321
|
+
current_version,
|
|
322
|
+
)
|
|
323
|
+
if cached_update is not None:
|
|
324
|
+
return cached_update
|
|
325
|
+
|
|
326
|
+
# Check for updates and update cache
|
|
327
|
+
return self._fetch_and_cache_update_info(
|
|
328
|
+
package,
|
|
329
|
+
current_version,
|
|
330
|
+
cache_key,
|
|
331
|
+
cache,
|
|
332
|
+
current_time,
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
def _build_cache_key(self, package: str, current_version: str) -> str:
|
|
336
|
+
"""Build cache key for package version."""
|
|
337
|
+
return f"{package}_{current_version}"
|
|
338
|
+
|
|
339
|
+
def _get_cached_major_update(
|
|
340
|
+
self,
|
|
341
|
+
cache_key: str,
|
|
342
|
+
cache: dict[str, t.Any],
|
|
343
|
+
current_time: float,
|
|
344
|
+
package: str,
|
|
345
|
+
current_version: str,
|
|
346
|
+
) -> MajorUpdate | None:
|
|
347
|
+
"""Get major update from cache if available and valid."""
|
|
348
|
+
if not self._is_cache_entry_valid(cache_key, cache, current_time):
|
|
349
|
+
return None
|
|
350
|
+
|
|
351
|
+
cached_data = cache[cache_key]
|
|
352
|
+
if not cached_data["has_major_update"]:
|
|
353
|
+
return None
|
|
354
|
+
|
|
355
|
+
return self._create_major_update_from_cache(
|
|
356
|
+
package,
|
|
357
|
+
current_version,
|
|
358
|
+
cached_data,
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
def _is_cache_entry_valid(
|
|
362
|
+
self,
|
|
363
|
+
cache_key: str,
|
|
364
|
+
cache: dict[str, t.Any],
|
|
365
|
+
current_time: float,
|
|
366
|
+
) -> bool:
|
|
367
|
+
"""Check if cache entry exists and is not expired."""
|
|
368
|
+
if cache_key not in cache:
|
|
369
|
+
return False
|
|
370
|
+
|
|
371
|
+
cached_data = cache[cache_key]
|
|
372
|
+
cache_age = current_time - cached_data["timestamp"]
|
|
373
|
+
return cache_age < 86400 # Not expired (24 hours)
|
|
374
|
+
|
|
375
|
+
def _create_major_update_from_cache(
|
|
376
|
+
self,
|
|
377
|
+
package: str,
|
|
378
|
+
current_version: str,
|
|
379
|
+
cached_data: dict[str, t.Any],
|
|
380
|
+
) -> MajorUpdate:
|
|
381
|
+
"""Create MajorUpdate instance from cached data."""
|
|
382
|
+
return MajorUpdate(
|
|
383
|
+
package=package,
|
|
384
|
+
current_version=current_version,
|
|
385
|
+
latest_version=cached_data["latest_version"],
|
|
386
|
+
release_date=cached_data["release_date"],
|
|
387
|
+
breaking_changes=cached_data["breaking_changes"],
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
def _fetch_and_cache_update_info(
|
|
391
|
+
self,
|
|
392
|
+
package: str,
|
|
393
|
+
current_version: str,
|
|
394
|
+
cache_key: str,
|
|
395
|
+
cache: dict[str, t.Any],
|
|
396
|
+
current_time: float,
|
|
397
|
+
) -> MajorUpdate | None:
|
|
398
|
+
"""Fetch latest version info and cache the result."""
|
|
399
|
+
latest_info = self._get_latest_version_info(package)
|
|
400
|
+
if not latest_info:
|
|
401
|
+
return None
|
|
402
|
+
|
|
403
|
+
has_major_update = self._is_major_version_update(
|
|
404
|
+
current_version,
|
|
405
|
+
latest_info["version"],
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
self._update_cache_entry(
|
|
409
|
+
cache,
|
|
410
|
+
cache_key,
|
|
411
|
+
current_time,
|
|
412
|
+
has_major_update,
|
|
413
|
+
latest_info,
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
return self._create_major_update_if_needed(
|
|
417
|
+
package,
|
|
418
|
+
current_version,
|
|
419
|
+
latest_info,
|
|
420
|
+
has_major_update,
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
def _create_major_update_if_needed(
|
|
424
|
+
self,
|
|
425
|
+
package: str,
|
|
426
|
+
current_version: str,
|
|
427
|
+
latest_info: dict[str, t.Any],
|
|
428
|
+
has_major_update: bool,
|
|
429
|
+
) -> MajorUpdate | None:
|
|
430
|
+
"""Create MajorUpdate instance if there is a major update available."""
|
|
431
|
+
if not has_major_update:
|
|
432
|
+
return None
|
|
433
|
+
|
|
434
|
+
return MajorUpdate(
|
|
435
|
+
package=package,
|
|
436
|
+
current_version=current_version,
|
|
437
|
+
latest_version=latest_info["version"],
|
|
438
|
+
release_date=latest_info["release_date"],
|
|
439
|
+
breaking_changes=latest_info["breaking_changes"],
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
def _update_cache_entry(
|
|
443
|
+
self,
|
|
444
|
+
cache: dict[str, t.Any],
|
|
445
|
+
cache_key: str,
|
|
446
|
+
current_time: float,
|
|
447
|
+
has_major_update: bool,
|
|
448
|
+
latest_info: dict[str, t.Any],
|
|
449
|
+
) -> None:
|
|
450
|
+
"""Update cache with latest version information."""
|
|
451
|
+
cache[cache_key] = {
|
|
452
|
+
"timestamp": current_time,
|
|
453
|
+
"has_major_update": has_major_update,
|
|
454
|
+
"latest_version": latest_info["version"],
|
|
455
|
+
"release_date": latest_info["release_date"],
|
|
456
|
+
"breaking_changes": latest_info["breaking_changes"],
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
def _get_latest_version_info(self, package: str) -> dict[str, t.Any] | None:
|
|
460
|
+
try:
|
|
461
|
+
data = self._fetch_pypi_data(package)
|
|
462
|
+
return self._extract_version_info(data)
|
|
463
|
+
except Exception:
|
|
464
|
+
return None
|
|
465
|
+
|
|
466
|
+
def _fetch_pypi_data(self, package: str) -> dict[str, t.Any]:
|
|
467
|
+
"""Fetch package data from PyPI API."""
|
|
468
|
+
import urllib.request
|
|
469
|
+
|
|
470
|
+
url = f"https://pypi.org/pypi/{package}/json"
|
|
471
|
+
self._validate_pypi_url(url)
|
|
472
|
+
|
|
473
|
+
with urllib.request.urlopen(url, timeout=10) as response: # nosec B310
|
|
474
|
+
return json.load(response)
|
|
475
|
+
|
|
476
|
+
def _validate_pypi_url(self, url: str) -> None:
|
|
477
|
+
"""Validate PyPI URL for security."""
|
|
478
|
+
if not url.startswith("https://pypi.org/"):
|
|
479
|
+
msg = f"Invalid URL scheme: {url}"
|
|
480
|
+
raise ValueError(msg)
|
|
481
|
+
|
|
482
|
+
def _extract_version_info(self, data: dict[str, t.Any]) -> dict[str, t.Any] | None:
|
|
483
|
+
"""Extract version information from PyPI response data."""
|
|
484
|
+
info = data.get("info", {})
|
|
485
|
+
releases = data.get("releases", {})
|
|
486
|
+
|
|
487
|
+
latest_version = info.get("version", "")
|
|
488
|
+
if not latest_version:
|
|
489
|
+
return None
|
|
490
|
+
|
|
491
|
+
release_date = self._get_release_date(releases, latest_version)
|
|
492
|
+
breaking_changes = self._has_breaking_changes(latest_version)
|
|
493
|
+
|
|
494
|
+
return {
|
|
495
|
+
"version": latest_version,
|
|
496
|
+
"release_date": release_date,
|
|
497
|
+
"breaking_changes": breaking_changes,
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
def _get_release_date(self, releases: dict[str, t.Any], version: str) -> str:
|
|
501
|
+
"""Extract release date for a specific version."""
|
|
502
|
+
release_info = releases.get(version, [])
|
|
503
|
+
if release_info:
|
|
504
|
+
return release_info[0].get("upload_time", "")
|
|
505
|
+
return ""
|
|
506
|
+
|
|
507
|
+
def _has_breaking_changes(self, version: str) -> bool:
|
|
508
|
+
"""Determine if version likely has breaking changes based on major version."""
|
|
509
|
+
return version.split(".")[0] != "0" if "." in version else False
|
|
510
|
+
|
|
511
|
+
def _is_major_version_update(self, current: str, latest: str) -> bool:
|
|
512
|
+
with suppress(ValueError, IndexError):
|
|
513
|
+
current_parts = current.split(".")
|
|
514
|
+
latest_parts = latest.split(".")
|
|
515
|
+
|
|
516
|
+
if current_parts and latest_parts:
|
|
517
|
+
current_major = int(current_parts[0])
|
|
518
|
+
latest_major = int(latest_parts[0])
|
|
519
|
+
return latest_major > current_major
|
|
520
|
+
|
|
521
|
+
return False
|
|
522
|
+
|
|
523
|
+
def _should_notify_major_updates(self) -> bool:
|
|
524
|
+
cache = self._load_update_cache()
|
|
525
|
+
last_major_notification = cache.get("last_major_notification", 0)
|
|
526
|
+
current_time = time.time()
|
|
527
|
+
|
|
528
|
+
if current_time - last_major_notification > 604800:
|
|
529
|
+
cache["last_major_notification"] = current_time
|
|
530
|
+
self._save_update_cache(cache)
|
|
531
|
+
return True
|
|
532
|
+
|
|
533
|
+
return False
|
|
534
|
+
|
|
535
|
+
def _load_update_cache(self) -> dict[str, t.Any]:
|
|
536
|
+
with suppress(Exception):
|
|
537
|
+
if self.cache_file.exists():
|
|
538
|
+
with self.cache_file.open() as f:
|
|
539
|
+
return json.load(f)
|
|
540
|
+
return {}
|
|
541
|
+
|
|
542
|
+
def _save_update_cache(self, cache: dict[str, t.Any]) -> None:
|
|
543
|
+
with suppress(Exception):
|
|
544
|
+
self.cache_file.parent.mkdir(exist_ok=True)
|
|
545
|
+
with self.cache_file.open("w") as f:
|
|
546
|
+
json.dump(cache, f, indent=2)
|
|
547
|
+
|
|
548
|
+
def _report_vulnerabilities(
|
|
549
|
+
self,
|
|
550
|
+
vulnerabilities: list[DependencyVulnerability],
|
|
551
|
+
) -> None:
|
|
552
|
+
self.console.print("\n[bold red]🚨 Security Vulnerabilities Found![/bold red]")
|
|
553
|
+
self.console.print(
|
|
554
|
+
"[red]Please update the following packages immediately:[/red]\n",
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
for vuln in vulnerabilities:
|
|
558
|
+
self.console.print(f"[red]• {vuln.package} {vuln.installed_version}[/red]")
|
|
559
|
+
self.console.print(f" [dim]Vulnerability ID: {vuln.vulnerability_id}[/dim]")
|
|
560
|
+
self.console.print(f" [dim]Severity: {vuln.severity.upper()}[/dim]")
|
|
561
|
+
if vuln.patched_version:
|
|
562
|
+
self.console.print(
|
|
563
|
+
f" [green]Fix available: {vuln.patched_version}[/green]",
|
|
564
|
+
)
|
|
565
|
+
if vuln.advisory_url:
|
|
566
|
+
self.console.print(f" [dim]More info: {vuln.advisory_url}[/dim]")
|
|
567
|
+
self.console.print()
|
|
568
|
+
|
|
569
|
+
def _report_major_updates(self, major_updates: list[MajorUpdate]) -> None:
|
|
570
|
+
self.console.print(
|
|
571
|
+
"\n[bold yellow]📦 Major Version Updates Available[/bold yellow]",
|
|
572
|
+
)
|
|
573
|
+
self.console.print(
|
|
574
|
+
"[yellow]The following packages have major updates:[/yellow]\n",
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
for update in major_updates:
|
|
578
|
+
self.console.print(f"[yellow]• {update.package}[/yellow]")
|
|
579
|
+
self.console.print(f" [dim]Current: {update.current_version}[/dim]")
|
|
580
|
+
self.console.print(f" [dim]Latest: {update.latest_version}[/dim]")
|
|
581
|
+
if update.release_date:
|
|
582
|
+
release_date = update.release_date[:10]
|
|
583
|
+
self.console.print(f" [dim]Released: {release_date}[/dim]")
|
|
584
|
+
if update.breaking_changes:
|
|
585
|
+
self.console.print(" [red]⚠️ May contain breaking changes[/red]")
|
|
586
|
+
self.console.print()
|
|
587
|
+
|
|
588
|
+
self.console.print(
|
|
589
|
+
"[dim]Review changelogs before updating to major versions.[/dim]",
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
def force_check_updates(
|
|
593
|
+
self,
|
|
594
|
+
) -> tuple[list[DependencyVulnerability], list[MajorUpdate]]:
|
|
595
|
+
if not self.pyproject_path.exists():
|
|
596
|
+
self.console.print("[yellow]⚠️ No pyproject.toml found[/yellow]")
|
|
597
|
+
return [], []
|
|
598
|
+
|
|
599
|
+
self.console.print("[dim]Parsing dependencies from pyproject.toml...[/dim]")
|
|
600
|
+
dependencies = self._parse_dependencies()
|
|
601
|
+
if not dependencies:
|
|
602
|
+
self.console.print(
|
|
603
|
+
"[yellow]⚠️ No dependencies found in pyproject.toml[/yellow]",
|
|
604
|
+
)
|
|
605
|
+
return [], []
|
|
606
|
+
|
|
607
|
+
self.console.print(
|
|
608
|
+
f"[dim]Found {len(dependencies)} dependencies to check[/dim]",
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
self.console.print("[dim]Checking for security vulnerabilities...[/dim]")
|
|
612
|
+
vulnerabilities = self._check_security_vulnerabilities(dependencies)
|
|
613
|
+
|
|
614
|
+
self.console.print("[dim]Checking for major version updates...[/dim]")
|
|
615
|
+
major_updates = self._check_major_updates(dependencies)
|
|
616
|
+
|
|
617
|
+
return vulnerabilities, major_updates
|