crackerjack 0.33.0__py3-none-any.whl → 0.33.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/__main__.py +1350 -34
- crackerjack/adapters/__init__.py +17 -0
- crackerjack/adapters/lsp_client.py +358 -0
- crackerjack/adapters/rust_tool_adapter.py +194 -0
- crackerjack/adapters/rust_tool_manager.py +193 -0
- crackerjack/adapters/skylos_adapter.py +231 -0
- crackerjack/adapters/zuban_adapter.py +560 -0
- crackerjack/agents/base.py +7 -3
- crackerjack/agents/coordinator.py +271 -33
- crackerjack/agents/documentation_agent.py +9 -15
- crackerjack/agents/dry_agent.py +3 -15
- crackerjack/agents/formatting_agent.py +1 -1
- crackerjack/agents/import_optimization_agent.py +36 -180
- crackerjack/agents/performance_agent.py +17 -98
- crackerjack/agents/performance_helpers.py +7 -31
- crackerjack/agents/proactive_agent.py +1 -3
- crackerjack/agents/refactoring_agent.py +16 -85
- crackerjack/agents/refactoring_helpers.py +7 -42
- crackerjack/agents/security_agent.py +9 -48
- crackerjack/agents/test_creation_agent.py +356 -513
- crackerjack/agents/test_specialist_agent.py +0 -4
- crackerjack/api.py +6 -25
- crackerjack/cli/cache_handlers.py +204 -0
- crackerjack/cli/cache_handlers_enhanced.py +683 -0
- crackerjack/cli/facade.py +100 -0
- crackerjack/cli/handlers.py +224 -9
- crackerjack/cli/interactive.py +6 -4
- crackerjack/cli/options.py +642 -55
- crackerjack/cli/utils.py +2 -1
- crackerjack/code_cleaner.py +58 -117
- crackerjack/config/global_lock_config.py +8 -48
- crackerjack/config/hooks.py +53 -62
- crackerjack/core/async_workflow_orchestrator.py +24 -34
- crackerjack/core/autofix_coordinator.py +3 -17
- crackerjack/core/enhanced_container.py +4 -13
- crackerjack/core/file_lifecycle.py +12 -89
- crackerjack/core/performance.py +2 -2
- crackerjack/core/performance_monitor.py +15 -55
- crackerjack/core/phase_coordinator.py +104 -204
- crackerjack/core/resource_manager.py +14 -90
- crackerjack/core/service_watchdog.py +62 -95
- crackerjack/core/session_coordinator.py +149 -0
- crackerjack/core/timeout_manager.py +14 -72
- crackerjack/core/websocket_lifecycle.py +13 -78
- crackerjack/core/workflow_orchestrator.py +171 -174
- crackerjack/docs/INDEX.md +11 -0
- crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
- crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
- crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
- crackerjack/docs/generated/api/SERVICES.md +1252 -0
- crackerjack/documentation/__init__.py +31 -0
- crackerjack/documentation/ai_templates.py +756 -0
- crackerjack/documentation/dual_output_generator.py +765 -0
- crackerjack/documentation/mkdocs_integration.py +518 -0
- crackerjack/documentation/reference_generator.py +977 -0
- crackerjack/dynamic_config.py +55 -50
- crackerjack/executors/async_hook_executor.py +10 -15
- crackerjack/executors/cached_hook_executor.py +117 -43
- crackerjack/executors/hook_executor.py +8 -34
- crackerjack/executors/hook_lock_manager.py +26 -183
- crackerjack/executors/individual_hook_executor.py +13 -11
- crackerjack/executors/lsp_aware_hook_executor.py +270 -0
- crackerjack/executors/tool_proxy.py +417 -0
- crackerjack/hooks/lsp_hook.py +79 -0
- crackerjack/intelligence/adaptive_learning.py +25 -10
- crackerjack/intelligence/agent_orchestrator.py +2 -5
- crackerjack/intelligence/agent_registry.py +34 -24
- crackerjack/intelligence/agent_selector.py +5 -7
- crackerjack/interactive.py +17 -6
- crackerjack/managers/async_hook_manager.py +0 -1
- crackerjack/managers/hook_manager.py +79 -1
- crackerjack/managers/publish_manager.py +44 -8
- crackerjack/managers/test_command_builder.py +1 -15
- crackerjack/managers/test_executor.py +1 -3
- crackerjack/managers/test_manager.py +98 -7
- crackerjack/managers/test_manager_backup.py +10 -9
- crackerjack/mcp/cache.py +2 -2
- crackerjack/mcp/client_runner.py +1 -1
- crackerjack/mcp/context.py +191 -68
- crackerjack/mcp/dashboard.py +7 -5
- crackerjack/mcp/enhanced_progress_monitor.py +31 -28
- crackerjack/mcp/file_monitor.py +30 -23
- crackerjack/mcp/progress_components.py +31 -21
- crackerjack/mcp/progress_monitor.py +50 -53
- crackerjack/mcp/rate_limiter.py +6 -6
- crackerjack/mcp/server_core.py +17 -16
- crackerjack/mcp/service_watchdog.py +2 -1
- crackerjack/mcp/state.py +4 -7
- crackerjack/mcp/task_manager.py +11 -9
- crackerjack/mcp/tools/core_tools.py +173 -32
- crackerjack/mcp/tools/error_analyzer.py +3 -2
- crackerjack/mcp/tools/execution_tools.py +8 -10
- crackerjack/mcp/tools/execution_tools_backup.py +42 -30
- crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
- crackerjack/mcp/tools/intelligence_tools.py +5 -2
- crackerjack/mcp/tools/monitoring_tools.py +33 -70
- crackerjack/mcp/tools/proactive_tools.py +24 -11
- crackerjack/mcp/tools/progress_tools.py +5 -8
- crackerjack/mcp/tools/utility_tools.py +20 -14
- crackerjack/mcp/tools/workflow_executor.py +62 -40
- crackerjack/mcp/websocket/app.py +8 -0
- crackerjack/mcp/websocket/endpoints.py +352 -357
- crackerjack/mcp/websocket/jobs.py +40 -57
- crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
- crackerjack/mcp/websocket/server.py +7 -25
- crackerjack/mcp/websocket/websocket_handler.py +6 -17
- crackerjack/mixins/__init__.py +0 -2
- crackerjack/mixins/error_handling.py +1 -70
- crackerjack/models/config.py +12 -1
- crackerjack/models/config_adapter.py +49 -1
- crackerjack/models/protocols.py +122 -122
- crackerjack/models/resource_protocols.py +55 -210
- crackerjack/monitoring/ai_agent_watchdog.py +13 -13
- crackerjack/monitoring/metrics_collector.py +426 -0
- crackerjack/monitoring/regression_prevention.py +8 -8
- crackerjack/monitoring/websocket_server.py +643 -0
- crackerjack/orchestration/advanced_orchestrator.py +11 -6
- crackerjack/orchestration/coverage_improvement.py +3 -3
- crackerjack/orchestration/execution_strategies.py +26 -6
- crackerjack/orchestration/test_progress_streamer.py +8 -5
- crackerjack/plugins/base.py +2 -2
- crackerjack/plugins/hooks.py +7 -0
- crackerjack/plugins/managers.py +11 -8
- crackerjack/security/__init__.py +0 -1
- crackerjack/security/audit.py +6 -35
- crackerjack/services/anomaly_detector.py +392 -0
- crackerjack/services/api_extractor.py +615 -0
- crackerjack/services/backup_service.py +2 -2
- crackerjack/services/bounded_status_operations.py +15 -152
- crackerjack/services/cache.py +127 -1
- crackerjack/services/changelog_automation.py +395 -0
- crackerjack/services/config.py +15 -9
- crackerjack/services/config_merge.py +19 -80
- crackerjack/services/config_template.py +506 -0
- crackerjack/services/contextual_ai_assistant.py +48 -22
- crackerjack/services/coverage_badge_service.py +171 -0
- crackerjack/services/coverage_ratchet.py +27 -25
- crackerjack/services/debug.py +3 -3
- crackerjack/services/dependency_analyzer.py +460 -0
- crackerjack/services/dependency_monitor.py +14 -11
- crackerjack/services/documentation_generator.py +491 -0
- crackerjack/services/documentation_service.py +675 -0
- crackerjack/services/enhanced_filesystem.py +6 -5
- crackerjack/services/enterprise_optimizer.py +865 -0
- crackerjack/services/error_pattern_analyzer.py +676 -0
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/git.py +8 -25
- crackerjack/services/health_metrics.py +10 -8
- crackerjack/services/heatmap_generator.py +735 -0
- crackerjack/services/initialization.py +11 -30
- crackerjack/services/input_validator.py +5 -97
- crackerjack/services/intelligent_commit.py +327 -0
- crackerjack/services/log_manager.py +15 -12
- crackerjack/services/logging.py +4 -3
- crackerjack/services/lsp_client.py +628 -0
- crackerjack/services/memory_optimizer.py +19 -87
- crackerjack/services/metrics.py +42 -33
- crackerjack/services/parallel_executor.py +9 -67
- crackerjack/services/pattern_cache.py +1 -1
- crackerjack/services/pattern_detector.py +6 -6
- crackerjack/services/performance_benchmarks.py +18 -59
- crackerjack/services/performance_cache.py +20 -81
- crackerjack/services/performance_monitor.py +27 -95
- crackerjack/services/predictive_analytics.py +510 -0
- crackerjack/services/quality_baseline.py +234 -0
- crackerjack/services/quality_baseline_enhanced.py +646 -0
- crackerjack/services/quality_intelligence.py +785 -0
- crackerjack/services/regex_patterns.py +618 -524
- crackerjack/services/regex_utils.py +43 -123
- crackerjack/services/secure_path_utils.py +5 -164
- crackerjack/services/secure_status_formatter.py +30 -141
- crackerjack/services/secure_subprocess.py +11 -92
- crackerjack/services/security.py +9 -41
- crackerjack/services/security_logger.py +12 -24
- crackerjack/services/server_manager.py +124 -16
- crackerjack/services/status_authentication.py +16 -159
- crackerjack/services/status_security_manager.py +4 -131
- crackerjack/services/thread_safe_status_collector.py +19 -125
- crackerjack/services/unified_config.py +21 -13
- crackerjack/services/validation_rate_limiter.py +5 -54
- crackerjack/services/version_analyzer.py +459 -0
- crackerjack/services/version_checker.py +1 -1
- crackerjack/services/websocket_resource_limiter.py +10 -144
- crackerjack/services/zuban_lsp_service.py +390 -0
- crackerjack/slash_commands/__init__.py +2 -7
- crackerjack/slash_commands/run.md +2 -2
- crackerjack/tools/validate_input_validator_patterns.py +14 -40
- crackerjack/tools/validate_regex_patterns.py +19 -48
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/METADATA +196 -25
- crackerjack-0.33.2.dist-info/RECORD +229 -0
- crackerjack/CLAUDE.md +0 -207
- crackerjack/RULES.md +0 -380
- crackerjack/py313.py +0 -234
- crackerjack-0.33.0.dist-info/RECORD +0 -187
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/WHEEL +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,460 @@
|
|
|
1
|
+
"""Dependency analysis service for generating network graph visualizations."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import typing as t
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class DependencyNode:
|
|
16
|
+
"""Represents a node in the dependency graph."""
|
|
17
|
+
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
type: str # module, function, class, variable
|
|
21
|
+
file_path: str
|
|
22
|
+
line_number: int
|
|
23
|
+
size: int = 1 # For visual sizing
|
|
24
|
+
complexity: int = 0
|
|
25
|
+
imports: list[str] = field(default_factory=list)
|
|
26
|
+
exports: list[str] = field(default_factory=list)
|
|
27
|
+
metadata: dict[str, t.Any] = field(default_factory=dict[str, t.Any])
|
|
28
|
+
|
|
29
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
30
|
+
"""Convert to dictionary for JSON serialization."""
|
|
31
|
+
return {
|
|
32
|
+
"id": self.id,
|
|
33
|
+
"name": self.name,
|
|
34
|
+
"type": self.type,
|
|
35
|
+
"file_path": self.file_path,
|
|
36
|
+
"line_number": self.line_number,
|
|
37
|
+
"size": self.size,
|
|
38
|
+
"complexity": self.complexity,
|
|
39
|
+
"imports": self.imports,
|
|
40
|
+
"exports": self.exports,
|
|
41
|
+
"metadata": self.metadata,
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class DependencyEdge:
|
|
47
|
+
"""Represents an edge (relationship) in the dependency graph."""
|
|
48
|
+
|
|
49
|
+
source: str
|
|
50
|
+
target: str
|
|
51
|
+
type: str # import, call, inheritance, composition
|
|
52
|
+
weight: float = 1.0
|
|
53
|
+
metadata: dict[str, t.Any] = field(default_factory=dict[str, t.Any])
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
56
|
+
"""Convert to dictionary for JSON serialization."""
|
|
57
|
+
return {
|
|
58
|
+
"source": self.source,
|
|
59
|
+
"target": self.target,
|
|
60
|
+
"type": self.type,
|
|
61
|
+
"weight": self.weight,
|
|
62
|
+
"metadata": self.metadata,
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass
|
|
67
|
+
class DependencyGraph:
|
|
68
|
+
"""Complete dependency graph data structure."""
|
|
69
|
+
|
|
70
|
+
nodes: dict[str, DependencyNode] = field(default_factory=dict[str, t.Any])
|
|
71
|
+
edges: list[DependencyEdge] = field(default_factory=list)
|
|
72
|
+
clusters: dict[str, list[str]] = field(default_factory=dict[str, t.Any])
|
|
73
|
+
metrics: dict[str, t.Any] = field(default_factory=dict[str, t.Any])
|
|
74
|
+
generated_at: datetime = field(default_factory=datetime.now)
|
|
75
|
+
|
|
76
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
77
|
+
"""Convert to dictionary for JSON serialization."""
|
|
78
|
+
return {
|
|
79
|
+
"nodes": [node.to_dict() for node in self.nodes.values()],
|
|
80
|
+
"edges": [edge.to_dict() for edge in self.edges],
|
|
81
|
+
"clusters": self.clusters,
|
|
82
|
+
"metrics": self.metrics,
|
|
83
|
+
"generated_at": self.generated_at.isoformat(),
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class DependencyAnalyzer:
|
|
88
|
+
"""Analyzes code dependencies and generates network graph data."""
|
|
89
|
+
|
|
90
|
+
def __init__(self, project_root: Path):
|
|
91
|
+
"""Initialize with project root directory."""
|
|
92
|
+
self.project_root = Path(project_root)
|
|
93
|
+
self.python_files: list[Path] = []
|
|
94
|
+
self.dependency_graph = DependencyGraph()
|
|
95
|
+
|
|
96
|
+
def analyze_project(self) -> DependencyGraph:
|
|
97
|
+
"""Analyze the entire project and build dependency graph."""
|
|
98
|
+
logger.info(f"Starting dependency analysis for {self.project_root}")
|
|
99
|
+
|
|
100
|
+
# Discover Python files
|
|
101
|
+
self._discover_python_files()
|
|
102
|
+
|
|
103
|
+
# Parse each file for dependencies
|
|
104
|
+
for file_path in self.python_files:
|
|
105
|
+
try:
|
|
106
|
+
self._analyze_file(file_path)
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.warning(f"Failed to analyze {file_path}: {e}")
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
# Generate clusters and metrics
|
|
112
|
+
self._generate_clusters()
|
|
113
|
+
self._calculate_metrics()
|
|
114
|
+
|
|
115
|
+
logger.info(
|
|
116
|
+
f"Dependency analysis complete: "
|
|
117
|
+
f"{len(self.dependency_graph.nodes)} nodes, "
|
|
118
|
+
f"{len(self.dependency_graph.edges)} edges"
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return self.dependency_graph
|
|
122
|
+
|
|
123
|
+
def _discover_python_files(self) -> None:
|
|
124
|
+
"""Discover all Python files in the project."""
|
|
125
|
+
self.python_files = list[t.Any](self.project_root.rglob("*.py"))
|
|
126
|
+
|
|
127
|
+
# Filter out common excluded directories
|
|
128
|
+
excluded_patterns = {
|
|
129
|
+
"__pycache__",
|
|
130
|
+
".git",
|
|
131
|
+
".pytest_cache",
|
|
132
|
+
"node_modules",
|
|
133
|
+
"venv",
|
|
134
|
+
".venv",
|
|
135
|
+
"build",
|
|
136
|
+
"dist",
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
self.python_files = [
|
|
140
|
+
f
|
|
141
|
+
for f in self.python_files
|
|
142
|
+
if not any(pattern in f.parts for pattern in excluded_patterns)
|
|
143
|
+
]
|
|
144
|
+
|
|
145
|
+
logger.info(f"Discovered {len(self.python_files)} Python files")
|
|
146
|
+
|
|
147
|
+
def _analyze_file(self, file_path: Path) -> None:
|
|
148
|
+
"""Analyze a single Python file for dependencies."""
|
|
149
|
+
try:
|
|
150
|
+
content = file_path.read_text(encoding="utf-8")
|
|
151
|
+
|
|
152
|
+
tree = ast.parse(content)
|
|
153
|
+
visitor = DependencyVisitor(file_path, self.project_root)
|
|
154
|
+
visitor.visit(tree)
|
|
155
|
+
|
|
156
|
+
# Add nodes from this file
|
|
157
|
+
for node in visitor.nodes:
|
|
158
|
+
self.dependency_graph.nodes[node.id] = node
|
|
159
|
+
|
|
160
|
+
# Add edges from this file
|
|
161
|
+
self.dependency_graph.edges.extend(visitor.edges)
|
|
162
|
+
|
|
163
|
+
except SyntaxError as e:
|
|
164
|
+
logger.warning(f"Syntax error in {file_path}: {e}")
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.error(f"Error analyzing {file_path}: {e}")
|
|
167
|
+
|
|
168
|
+
def _generate_clusters(self) -> None:
|
|
169
|
+
"""Generate clusters based on module hierarchy."""
|
|
170
|
+
clusters: dict[str, list[str]] = {}
|
|
171
|
+
|
|
172
|
+
for node_id, node in self.dependency_graph.nodes.items():
|
|
173
|
+
# Create clusters based on directory structure
|
|
174
|
+
relative_path = Path(node.file_path).relative_to(self.project_root)
|
|
175
|
+
parts = relative_path.parts[:-1] # Exclude filename
|
|
176
|
+
|
|
177
|
+
if parts:
|
|
178
|
+
cluster_name = "/".join(parts)
|
|
179
|
+
if cluster_name not in clusters:
|
|
180
|
+
clusters[cluster_name] = []
|
|
181
|
+
clusters[cluster_name].append(node_id)
|
|
182
|
+
else:
|
|
183
|
+
# Root level files
|
|
184
|
+
if "root" not in clusters:
|
|
185
|
+
clusters["root"] = []
|
|
186
|
+
clusters["root"].append(node_id)
|
|
187
|
+
|
|
188
|
+
self.dependency_graph.clusters = clusters
|
|
189
|
+
|
|
190
|
+
def _calculate_metrics(self) -> None:
|
|
191
|
+
"""Calculate graph metrics for visualization."""
|
|
192
|
+
nodes = self.dependency_graph.nodes
|
|
193
|
+
edges = self.dependency_graph.edges
|
|
194
|
+
|
|
195
|
+
# Basic metrics
|
|
196
|
+
metrics = {
|
|
197
|
+
"total_nodes": len(nodes),
|
|
198
|
+
"total_edges": len(edges),
|
|
199
|
+
"total_clusters": len(self.dependency_graph.clusters),
|
|
200
|
+
"density": len(edges) / (len(nodes) * (len(nodes) - 1))
|
|
201
|
+
if len(nodes) > 1
|
|
202
|
+
else 0,
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
# Node type distribution
|
|
206
|
+
type_counts: dict[str, int] = {}
|
|
207
|
+
complexity_sum = 0
|
|
208
|
+
|
|
209
|
+
for node in nodes.values():
|
|
210
|
+
type_counts[node.type] = type_counts.get(node.type, 0) + 1
|
|
211
|
+
complexity_sum += node.complexity
|
|
212
|
+
|
|
213
|
+
metrics["node_types"] = type_counts
|
|
214
|
+
metrics["average_complexity"] = complexity_sum / len(nodes) if nodes else 0
|
|
215
|
+
|
|
216
|
+
# Edge type distribution
|
|
217
|
+
edge_type_counts: dict[str, int] = {}
|
|
218
|
+
for edge in edges:
|
|
219
|
+
edge_type_counts[edge.type] = edge_type_counts.get(edge.type, 0) + 1
|
|
220
|
+
|
|
221
|
+
metrics["edge_types"] = edge_type_counts
|
|
222
|
+
|
|
223
|
+
# Find most connected nodes
|
|
224
|
+
in_degree: dict[str, int] = {}
|
|
225
|
+
out_degree: dict[str, int] = {}
|
|
226
|
+
|
|
227
|
+
for edge in edges:
|
|
228
|
+
out_degree[edge.source] = out_degree.get(edge.source, 0) + 1
|
|
229
|
+
in_degree[edge.target] = in_degree.get(edge.target, 0) + 1
|
|
230
|
+
|
|
231
|
+
# Top 10 most connected nodes
|
|
232
|
+
from operator import itemgetter
|
|
233
|
+
|
|
234
|
+
top_in = sorted(in_degree.items(), key=itemgetter(1), reverse=True)[:10]
|
|
235
|
+
top_out = sorted(out_degree.items(), key=itemgetter(1), reverse=True)[:10]
|
|
236
|
+
|
|
237
|
+
metrics["top_imported"] = [
|
|
238
|
+
{"node": node, "count": count} for node, count in top_in
|
|
239
|
+
]
|
|
240
|
+
metrics["top_exporters"] = [
|
|
241
|
+
{"node": node, "count": count} for node, count in top_out
|
|
242
|
+
]
|
|
243
|
+
|
|
244
|
+
self.dependency_graph.metrics = metrics
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class DependencyVisitor(ast.NodeVisitor):
|
|
248
|
+
"""AST visitor for extracting dependency information."""
|
|
249
|
+
|
|
250
|
+
def __init__(self, file_path: Path, project_root: Path):
|
|
251
|
+
"""Initialize visitor with file context."""
|
|
252
|
+
self.file_path = file_path
|
|
253
|
+
self.project_root = project_root
|
|
254
|
+
self.relative_path = file_path.relative_to(project_root)
|
|
255
|
+
self.module_name = str(self.relative_path).replace("/", ".").replace(".py", "")
|
|
256
|
+
|
|
257
|
+
self.nodes: list[DependencyNode] = []
|
|
258
|
+
self.edges: list[DependencyEdge] = []
|
|
259
|
+
self.current_class: str | None = None
|
|
260
|
+
self.imports: dict[str, str] = {} # alias -> full_name
|
|
261
|
+
|
|
262
|
+
def visit_Module(self, node: ast.Module) -> None:
|
|
263
|
+
"""Visit module and create module node."""
|
|
264
|
+
module_node = DependencyNode(
|
|
265
|
+
id=f"module:{self.module_name}",
|
|
266
|
+
name=self.module_name,
|
|
267
|
+
type="module",
|
|
268
|
+
file_path=str(self.file_path),
|
|
269
|
+
line_number=1,
|
|
270
|
+
size=len(node.body),
|
|
271
|
+
complexity=self._calculate_complexity(node),
|
|
272
|
+
metadata={"docstring": ast.get_docstring(node)},
|
|
273
|
+
)
|
|
274
|
+
self.nodes.append(module_node)
|
|
275
|
+
self.generic_visit(node)
|
|
276
|
+
|
|
277
|
+
def visit_Import(self, node: ast.Import) -> None:
|
|
278
|
+
"""Handle import statements."""
|
|
279
|
+
for alias in node.names:
|
|
280
|
+
imported_name = alias.asname or alias.name
|
|
281
|
+
self.imports[imported_name] = alias.name
|
|
282
|
+
|
|
283
|
+
# Create import edge
|
|
284
|
+
edge = DependencyEdge(
|
|
285
|
+
source=f"module:{self.module_name}",
|
|
286
|
+
target=f"module:{alias.name}",
|
|
287
|
+
type="import",
|
|
288
|
+
metadata={"line": node.lineno, "alias": alias.asname},
|
|
289
|
+
)
|
|
290
|
+
self.edges.append(edge)
|
|
291
|
+
|
|
292
|
+
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
|
293
|
+
"""Handle from...import statements."""
|
|
294
|
+
if node.module:
|
|
295
|
+
for alias in node.names:
|
|
296
|
+
imported_name = alias.asname or alias.name
|
|
297
|
+
full_name = f"{node.module}.{alias.name}"
|
|
298
|
+
self.imports[imported_name] = full_name
|
|
299
|
+
|
|
300
|
+
# Create import edge
|
|
301
|
+
edge = DependencyEdge(
|
|
302
|
+
source=f"module:{self.module_name}",
|
|
303
|
+
target=f"symbol:{full_name}",
|
|
304
|
+
type="import_from",
|
|
305
|
+
metadata={
|
|
306
|
+
"line": node.lineno,
|
|
307
|
+
"module": node.module,
|
|
308
|
+
"symbol": alias.name,
|
|
309
|
+
"alias": alias.asname,
|
|
310
|
+
},
|
|
311
|
+
)
|
|
312
|
+
self.edges.append(edge)
|
|
313
|
+
|
|
314
|
+
def visit_ClassDef(self, node: ast.ClassDef) -> None:
|
|
315
|
+
"""Handle class definitions."""
|
|
316
|
+
class_id = f"class:{self.module_name}.{node.name}"
|
|
317
|
+
self.current_class = node.name
|
|
318
|
+
|
|
319
|
+
class_node = DependencyNode(
|
|
320
|
+
id=class_id,
|
|
321
|
+
name=node.name,
|
|
322
|
+
type="class",
|
|
323
|
+
file_path=str(self.file_path),
|
|
324
|
+
line_number=node.lineno,
|
|
325
|
+
size=len(node.body),
|
|
326
|
+
complexity=self._calculate_complexity(node),
|
|
327
|
+
metadata={
|
|
328
|
+
"docstring": ast.get_docstring(node),
|
|
329
|
+
"decorators": [
|
|
330
|
+
self._get_decorator_name(d) for d in node.decorator_list
|
|
331
|
+
],
|
|
332
|
+
},
|
|
333
|
+
)
|
|
334
|
+
self.nodes.append(class_node)
|
|
335
|
+
|
|
336
|
+
# Handle inheritance
|
|
337
|
+
for base in node.bases:
|
|
338
|
+
if isinstance(base, ast.Name):
|
|
339
|
+
base_name = self._resolve_name(base.id)
|
|
340
|
+
edge = DependencyEdge(
|
|
341
|
+
source=class_id,
|
|
342
|
+
target=f"class:{base_name}",
|
|
343
|
+
type="inheritance",
|
|
344
|
+
metadata={"line": node.lineno},
|
|
345
|
+
)
|
|
346
|
+
self.edges.append(edge)
|
|
347
|
+
|
|
348
|
+
self.generic_visit(node)
|
|
349
|
+
self.current_class = None
|
|
350
|
+
|
|
351
|
+
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
|
|
352
|
+
"""Handle function definitions."""
|
|
353
|
+
if self.current_class:
|
|
354
|
+
func_id = f"method:{self.module_name}.{self.current_class}.{node.name}"
|
|
355
|
+
func_type = "method"
|
|
356
|
+
else:
|
|
357
|
+
func_id = f"function:{self.module_name}.{node.name}"
|
|
358
|
+
func_type = "function"
|
|
359
|
+
|
|
360
|
+
func_node = DependencyNode(
|
|
361
|
+
id=func_id,
|
|
362
|
+
name=node.name,
|
|
363
|
+
type=func_type,
|
|
364
|
+
file_path=str(self.file_path),
|
|
365
|
+
line_number=node.lineno,
|
|
366
|
+
size=len(node.body),
|
|
367
|
+
complexity=self._calculate_complexity(node),
|
|
368
|
+
metadata={
|
|
369
|
+
"docstring": ast.get_docstring(node),
|
|
370
|
+
"decorators": [
|
|
371
|
+
self._get_decorator_name(d) for d in node.decorator_list
|
|
372
|
+
],
|
|
373
|
+
"args": [arg.arg for arg in node.args.args],
|
|
374
|
+
},
|
|
375
|
+
)
|
|
376
|
+
self.nodes.append(func_node)
|
|
377
|
+
|
|
378
|
+
self.generic_visit(node)
|
|
379
|
+
|
|
380
|
+
def visit_Call(self, node: ast.Call) -> None:
|
|
381
|
+
"""Handle function/method calls."""
|
|
382
|
+
if isinstance(node.func, ast.Name):
|
|
383
|
+
called_name = self._resolve_name(node.func.id)
|
|
384
|
+
|
|
385
|
+
# Create call edge from current context
|
|
386
|
+
source_id = self._get_current_context_id(node.lineno)
|
|
387
|
+
if source_id:
|
|
388
|
+
edge = DependencyEdge(
|
|
389
|
+
source=source_id,
|
|
390
|
+
target=f"function:{called_name}",
|
|
391
|
+
type="call",
|
|
392
|
+
weight=0.5,
|
|
393
|
+
metadata={"line": node.lineno},
|
|
394
|
+
)
|
|
395
|
+
self.edges.append(edge)
|
|
396
|
+
|
|
397
|
+
elif isinstance(node.func, ast.Attribute):
|
|
398
|
+
# Handle method calls
|
|
399
|
+
if isinstance(node.func.value, ast.Name):
|
|
400
|
+
obj_name = self._resolve_name(node.func.value.id)
|
|
401
|
+
method_name = node.func.attr
|
|
402
|
+
|
|
403
|
+
source_id = self._get_current_context_id(node.lineno)
|
|
404
|
+
if source_id:
|
|
405
|
+
edge = DependencyEdge(
|
|
406
|
+
source=source_id,
|
|
407
|
+
target=f"method:{obj_name}.{method_name}",
|
|
408
|
+
type="call",
|
|
409
|
+
weight=0.5,
|
|
410
|
+
metadata={"line": node.lineno},
|
|
411
|
+
)
|
|
412
|
+
self.edges.append(edge)
|
|
413
|
+
|
|
414
|
+
self.generic_visit(node)
|
|
415
|
+
|
|
416
|
+
def _calculate_complexity(self, node: ast.AST) -> int:
|
|
417
|
+
"""Calculate cyclomatic complexity of a node."""
|
|
418
|
+
complexity = 1 # Base complexity
|
|
419
|
+
|
|
420
|
+
for child in ast.walk(node):
|
|
421
|
+
if isinstance(child, ast.If | ast.While | ast.For | ast.With | ast.Try):
|
|
422
|
+
complexity += 1
|
|
423
|
+
elif isinstance(child, ast.BoolOp):
|
|
424
|
+
complexity += len(child.values) - 1
|
|
425
|
+
elif isinstance(
|
|
426
|
+
child, ast.ListComp | ast.SetComp | ast.DictComp | ast.GeneratorExp
|
|
427
|
+
):
|
|
428
|
+
complexity += 1
|
|
429
|
+
|
|
430
|
+
return complexity
|
|
431
|
+
|
|
432
|
+
def _get_decorator_name(self, decorator: ast.AST) -> str:
|
|
433
|
+
"""Get the name of a decorator."""
|
|
434
|
+
if isinstance(decorator, ast.Name):
|
|
435
|
+
return decorator.id
|
|
436
|
+
elif isinstance(decorator, ast.Attribute):
|
|
437
|
+
return f"{decorator.value.id}.{decorator.attr}" # type: ignore
|
|
438
|
+
return "unknown"
|
|
439
|
+
|
|
440
|
+
def _resolve_name(self, name: str) -> str:
|
|
441
|
+
"""Resolve a name through imports."""
|
|
442
|
+
return self.imports.get(name, f"{self.module_name}.{name}")
|
|
443
|
+
|
|
444
|
+
def _get_current_context_id(self, line_number: int) -> str | None:
|
|
445
|
+
"""Get the ID of the current context (function/class/module)."""
|
|
446
|
+
# For simplicity, return module context
|
|
447
|
+
# In a more sophisticated implementation, we'd track the nested context
|
|
448
|
+
return f"module:{self.module_name}"
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def analyze_project_dependencies(project_root: str | Path) -> DependencyGraph:
|
|
452
|
+
"""Analyze project dependencies and return graph data."""
|
|
453
|
+
analyzer = DependencyAnalyzer(Path(project_root))
|
|
454
|
+
return analyzer.analyze_project()
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
def export_graph_data(graph: DependencyGraph, output_path: str | Path) -> None:
|
|
458
|
+
"""Export dependency graph to JSON file."""
|
|
459
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
460
|
+
json.dump(graph.to_dict(), f, indent=2)
|
|
@@ -71,7 +71,7 @@ class DependencyMonitorService:
|
|
|
71
71
|
with self.pyproject_path.open("rb") as f:
|
|
72
72
|
data = tomllib.load(f)
|
|
73
73
|
|
|
74
|
-
dependencies = {}
|
|
74
|
+
dependencies: dict[str, str] = {}
|
|
75
75
|
project_data = data.get("project", {})
|
|
76
76
|
|
|
77
77
|
self._extract_main_dependencies(project_data, dependencies)
|
|
@@ -357,7 +357,8 @@ class DependencyMonitorService:
|
|
|
357
357
|
|
|
358
358
|
cached_data = cache[cache_key]
|
|
359
359
|
cache_age = current_time - cached_data["timestamp"]
|
|
360
|
-
|
|
360
|
+
is_fresh: bool = cache_age < 86400
|
|
361
|
+
return is_fresh
|
|
361
362
|
|
|
362
363
|
def _create_major_update_from_cache(
|
|
363
364
|
self,
|
|
@@ -447,21 +448,22 @@ class DependencyMonitorService:
|
|
|
447
448
|
return None
|
|
448
449
|
|
|
449
450
|
def _fetch_pypi_data(self, package: str) -> dict[str, t.Any]:
|
|
450
|
-
import urllib.request
|
|
451
451
|
from urllib.parse import urlparse
|
|
452
452
|
|
|
453
|
-
|
|
453
|
+
import requests
|
|
454
|
+
|
|
455
|
+
url = f"https: //pypi.org/pypi/{package}/json"
|
|
454
456
|
self._validate_pypi_url(url)
|
|
455
457
|
|
|
456
458
|
parsed = urlparse(url)
|
|
457
|
-
|
|
459
|
+
|
|
458
460
|
if parsed.scheme != "https" or parsed.netloc != "pypi.org":
|
|
459
|
-
msg = f"Invalid URL: only https
|
|
461
|
+
msg = f"Invalid URL: only https: //pypi.org URLs are allowed, got {url}"
|
|
460
462
|
raise ValueError(msg)
|
|
461
463
|
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
464
|
+
response = requests.get(url, timeout=10, verify=True)
|
|
465
|
+
response.raise_for_status()
|
|
466
|
+
return t.cast(dict[str, t.Any], response.json())
|
|
465
467
|
|
|
466
468
|
def _validate_pypi_url(self, url: str) -> None:
|
|
467
469
|
from urllib.parse import urlparse
|
|
@@ -500,7 +502,8 @@ class DependencyMonitorService:
|
|
|
500
502
|
def _get_release_date(self, releases: dict[str, t.Any], version: str) -> str:
|
|
501
503
|
release_info = releases.get(version, [])
|
|
502
504
|
if release_info:
|
|
503
|
-
|
|
505
|
+
upload_time: str = release_info[0].get("upload_time", "")
|
|
506
|
+
return upload_time
|
|
504
507
|
return ""
|
|
505
508
|
|
|
506
509
|
def _has_breaking_changes(self, version: str) -> bool:
|
|
@@ -534,7 +537,7 @@ class DependencyMonitorService:
|
|
|
534
537
|
with suppress(Exception):
|
|
535
538
|
if self.cache_file.exists():
|
|
536
539
|
with self.cache_file.open() as f:
|
|
537
|
-
return json.load(f)
|
|
540
|
+
return t.cast(dict[str, t.Any], json.load(f))
|
|
538
541
|
return {}
|
|
539
542
|
|
|
540
543
|
def _save_update_cache(self, cache: dict[str, t.Any]) -> None:
|