gwc-pybundle 2.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gwc-pybundle might be problematic. Click here for more details.
- gwc_pybundle-2.1.2.dist-info/METADATA +903 -0
- gwc_pybundle-2.1.2.dist-info/RECORD +82 -0
- gwc_pybundle-2.1.2.dist-info/WHEEL +5 -0
- gwc_pybundle-2.1.2.dist-info/entry_points.txt +2 -0
- gwc_pybundle-2.1.2.dist-info/licenses/LICENSE.md +25 -0
- gwc_pybundle-2.1.2.dist-info/top_level.txt +1 -0
- pybundle/__init__.py +0 -0
- pybundle/__main__.py +4 -0
- pybundle/cli.py +546 -0
- pybundle/context.py +404 -0
- pybundle/doctor.py +148 -0
- pybundle/filters.py +228 -0
- pybundle/manifest.py +77 -0
- pybundle/packaging.py +45 -0
- pybundle/policy.py +132 -0
- pybundle/profiles.py +454 -0
- pybundle/roadmap_model.py +42 -0
- pybundle/roadmap_scan.py +328 -0
- pybundle/root_detect.py +14 -0
- pybundle/runner.py +180 -0
- pybundle/steps/__init__.py +26 -0
- pybundle/steps/ai_context.py +791 -0
- pybundle/steps/api_docs.py +219 -0
- pybundle/steps/asyncio_analysis.py +358 -0
- pybundle/steps/bandit.py +72 -0
- pybundle/steps/base.py +20 -0
- pybundle/steps/blocking_call_detection.py +291 -0
- pybundle/steps/call_graph.py +219 -0
- pybundle/steps/compileall.py +76 -0
- pybundle/steps/config_docs.py +319 -0
- pybundle/steps/config_validation.py +302 -0
- pybundle/steps/container_image.py +294 -0
- pybundle/steps/context_expand.py +272 -0
- pybundle/steps/copy_pack.py +293 -0
- pybundle/steps/coverage.py +101 -0
- pybundle/steps/cprofile_step.py +166 -0
- pybundle/steps/dependency_sizes.py +136 -0
- pybundle/steps/django_checks.py +214 -0
- pybundle/steps/dockerfile_lint.py +282 -0
- pybundle/steps/dockerignore.py +311 -0
- pybundle/steps/duplication.py +103 -0
- pybundle/steps/env_completeness.py +269 -0
- pybundle/steps/env_var_usage.py +253 -0
- pybundle/steps/error_refs.py +204 -0
- pybundle/steps/event_loop_patterns.py +280 -0
- pybundle/steps/exception_patterns.py +190 -0
- pybundle/steps/fastapi_integration.py +250 -0
- pybundle/steps/flask_debugging.py +312 -0
- pybundle/steps/git_analytics.py +315 -0
- pybundle/steps/handoff_md.py +176 -0
- pybundle/steps/import_time.py +175 -0
- pybundle/steps/interrogate.py +106 -0
- pybundle/steps/license_scan.py +96 -0
- pybundle/steps/line_profiler.py +117 -0
- pybundle/steps/link_validation.py +287 -0
- pybundle/steps/logging_analysis.py +233 -0
- pybundle/steps/memory_profile.py +176 -0
- pybundle/steps/migration_history.py +336 -0
- pybundle/steps/mutation_testing.py +141 -0
- pybundle/steps/mypy.py +103 -0
- pybundle/steps/orm_optimization.py +316 -0
- pybundle/steps/pip_audit.py +45 -0
- pybundle/steps/pipdeptree.py +62 -0
- pybundle/steps/pylance.py +562 -0
- pybundle/steps/pytest.py +66 -0
- pybundle/steps/query_pattern_analysis.py +334 -0
- pybundle/steps/radon.py +161 -0
- pybundle/steps/repro_md.py +161 -0
- pybundle/steps/rg_scans.py +78 -0
- pybundle/steps/roadmap.py +153 -0
- pybundle/steps/ruff.py +117 -0
- pybundle/steps/secrets_detection.py +235 -0
- pybundle/steps/security_headers.py +309 -0
- pybundle/steps/shell.py +74 -0
- pybundle/steps/slow_tests.py +178 -0
- pybundle/steps/sqlalchemy_validation.py +269 -0
- pybundle/steps/test_flakiness.py +184 -0
- pybundle/steps/tree.py +116 -0
- pybundle/steps/type_coverage.py +277 -0
- pybundle/steps/unused_deps.py +211 -0
- pybundle/steps/vulture.py +167 -0
- pybundle/tools.py +63 -0
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Step: Blocking Call Detection
|
|
3
|
+
Detect synchronous/blocking calls in async functions.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import ast
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, List, Set, Tuple, Optional
|
|
9
|
+
|
|
10
|
+
from .base import Step, StepResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class BlockingCallDetectionStep(Step):
|
|
14
|
+
"""Detect blocking calls within async functions."""
|
|
15
|
+
|
|
16
|
+
name = "blocking call detection"
|
|
17
|
+
|
|
18
|
+
# Common blocking calls to detect
|
|
19
|
+
BLOCKING_CALLS = {
|
|
20
|
+
# Time
|
|
21
|
+
"time.sleep": "Time",
|
|
22
|
+
"time.time": "Time", # Can be called in tight loops
|
|
23
|
+
# Network
|
|
24
|
+
"requests.get": "Network",
|
|
25
|
+
"requests.post": "Network",
|
|
26
|
+
"requests.put": "Network",
|
|
27
|
+
"requests.delete": "Network",
|
|
28
|
+
"requests.request": "Network",
|
|
29
|
+
"urllib.request.urlopen": "Network",
|
|
30
|
+
# I/O
|
|
31
|
+
"open": "I/O",
|
|
32
|
+
"Path.read_text": "I/O",
|
|
33
|
+
"Path.write_text": "I/O",
|
|
34
|
+
"json.load": "I/O",
|
|
35
|
+
"json.dump": "I/O",
|
|
36
|
+
"pickle.load": "I/O",
|
|
37
|
+
"pickle.dump": "I/O",
|
|
38
|
+
# Database
|
|
39
|
+
"query": "Database",
|
|
40
|
+
"execute": "Database",
|
|
41
|
+
"fetch": "Database",
|
|
42
|
+
"commit": "Database",
|
|
43
|
+
"rollback": "Database",
|
|
44
|
+
# Subprocess
|
|
45
|
+
"subprocess.run": "Subprocess",
|
|
46
|
+
"subprocess.call": "Subprocess",
|
|
47
|
+
"subprocess.Popen": "Subprocess",
|
|
48
|
+
"os.system": "Subprocess",
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
|
|
52
|
+
"""Detect blocking calls in async functions."""
|
|
53
|
+
import time
|
|
54
|
+
|
|
55
|
+
start = time.time()
|
|
56
|
+
|
|
57
|
+
root = ctx.root
|
|
58
|
+
|
|
59
|
+
# Find blocking calls in async context
|
|
60
|
+
blocking_issues = self._find_blocking_calls(root)
|
|
61
|
+
|
|
62
|
+
# Generate report
|
|
63
|
+
lines = [
|
|
64
|
+
"=" * 80,
|
|
65
|
+
"BLOCKING CALL DETECTION REPORT",
|
|
66
|
+
"=" * 80,
|
|
67
|
+
"",
|
|
68
|
+
]
|
|
69
|
+
|
|
70
|
+
# Summary
|
|
71
|
+
lines.extend(
|
|
72
|
+
[
|
|
73
|
+
"SUMMARY",
|
|
74
|
+
"=" * 80,
|
|
75
|
+
"",
|
|
76
|
+
f"Async functions analyzed: {blocking_issues['async_functions_count']}",
|
|
77
|
+
f"Functions with blocking calls: {len(blocking_issues['affected_functions'])}",
|
|
78
|
+
f"Total blocking calls detected: {blocking_issues['total_issues']}",
|
|
79
|
+
"",
|
|
80
|
+
]
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
if not blocking_issues["affected_functions"]:
|
|
84
|
+
lines.extend(
|
|
85
|
+
[
|
|
86
|
+
"✓ No blocking calls detected in async functions",
|
|
87
|
+
"",
|
|
88
|
+
"Great! Your async code appears to be non-blocking.",
|
|
89
|
+
"",
|
|
90
|
+
]
|
|
91
|
+
)
|
|
92
|
+
else:
|
|
93
|
+
# Detailed breakdown
|
|
94
|
+
lines.extend(
|
|
95
|
+
[
|
|
96
|
+
"BLOCKING CALLS BY TYPE",
|
|
97
|
+
"=" * 80,
|
|
98
|
+
"",
|
|
99
|
+
]
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Count by category
|
|
103
|
+
category_counts: Dict[str, int] = {}
|
|
104
|
+
for func_issues in blocking_issues["affected_functions"].values():
|
|
105
|
+
for issue in func_issues:
|
|
106
|
+
category = issue["category"]
|
|
107
|
+
category_counts[category] = category_counts.get(category, 0) + 1
|
|
108
|
+
|
|
109
|
+
for category in sorted(category_counts.keys()):
|
|
110
|
+
lines.append(f"{category}: {category_counts[category]} calls")
|
|
111
|
+
lines.append("")
|
|
112
|
+
|
|
113
|
+
# Detailed issues
|
|
114
|
+
lines.append("ISSUES BY FUNCTION")
|
|
115
|
+
lines.append("-" * 80)
|
|
116
|
+
lines.append("")
|
|
117
|
+
|
|
118
|
+
for func_key in sorted(blocking_issues["affected_functions"].keys()):
|
|
119
|
+
issues = blocking_issues["affected_functions"][func_key]
|
|
120
|
+
file_path, func_name, line_no = func_key.rsplit(":", 2)
|
|
121
|
+
line_no = int(line_no)
|
|
122
|
+
|
|
123
|
+
lines.append(f"Function: {func_name}")
|
|
124
|
+
lines.append(f"Location: {file_path}:{line_no}")
|
|
125
|
+
lines.append(f"Blocking calls: {len(issues)}")
|
|
126
|
+
lines.append("")
|
|
127
|
+
|
|
128
|
+
for issue in issues[:10]:
|
|
129
|
+
lines.append(
|
|
130
|
+
f" Line {issue['line']}: {issue['call_name']} ({issue['category']})"
|
|
131
|
+
)
|
|
132
|
+
if issue.get("context"):
|
|
133
|
+
context_line = issue["context"].strip()
|
|
134
|
+
if len(context_line) > 70:
|
|
135
|
+
context_line = context_line[:67] + "..."
|
|
136
|
+
lines.append(f" > {context_line}")
|
|
137
|
+
|
|
138
|
+
if len(issues) > 10:
|
|
139
|
+
lines.append(f" ... and {len(issues) - 10} more")
|
|
140
|
+
|
|
141
|
+
lines.append("")
|
|
142
|
+
|
|
143
|
+
# Recommendations
|
|
144
|
+
lines.extend(
|
|
145
|
+
[
|
|
146
|
+
"=" * 80,
|
|
147
|
+
"RECOMMENDATIONS",
|
|
148
|
+
"=" * 80,
|
|
149
|
+
"",
|
|
150
|
+
]
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
if blocking_issues["affected_functions"]:
|
|
154
|
+
lines.append("To fix blocking calls in async functions:")
|
|
155
|
+
lines.append("")
|
|
156
|
+
lines.append(" 1. NETWORK (requests → aiohttp/httpx)")
|
|
157
|
+
lines.append(" Before: response = requests.get(url)")
|
|
158
|
+
lines.append(" After: response = await client.get(url)")
|
|
159
|
+
lines.append("")
|
|
160
|
+
lines.append(" 2. TIME (time.sleep → asyncio.sleep)")
|
|
161
|
+
lines.append(" Before: time.sleep(1)")
|
|
162
|
+
lines.append(" After: await asyncio.sleep(1)")
|
|
163
|
+
lines.append("")
|
|
164
|
+
lines.append(" 3. DATABASE (sync drivers → async drivers)")
|
|
165
|
+
lines.append(" Before: results = session.query(...).all()")
|
|
166
|
+
lines.append(" After: results = await session.execute(...)")
|
|
167
|
+
lines.append("")
|
|
168
|
+
lines.append(" 4. FILE I/O (open → aiofiles)")
|
|
169
|
+
lines.append(" Before: with open(file) as f: data = f.read()")
|
|
170
|
+
lines.append(" After: async with aiofiles.open(file) as f: data = await f.read()")
|
|
171
|
+
lines.append("")
|
|
172
|
+
lines.append(" 5. SUBPROCESS (subprocess → asyncio.create_subprocess_exec)")
|
|
173
|
+
lines.append(
|
|
174
|
+
" Before: result = subprocess.run(['cmd'], capture_output=True)"
|
|
175
|
+
)
|
|
176
|
+
lines.append(
|
|
177
|
+
" After: proc = await asyncio.create_subprocess_exec('cmd')"
|
|
178
|
+
)
|
|
179
|
+
lines.append("")
|
|
180
|
+
|
|
181
|
+
else:
|
|
182
|
+
lines.append(" ✓ Great: No blocking calls detected in async functions")
|
|
183
|
+
lines.append(" - Continue following non-blocking patterns")
|
|
184
|
+
lines.append(" - Use type hints to indicate async functions")
|
|
185
|
+
lines.append(" - Document expected async library usage")
|
|
186
|
+
|
|
187
|
+
lines.append("")
|
|
188
|
+
|
|
189
|
+
# Write report
|
|
190
|
+
output = "\n".join(lines)
|
|
191
|
+
dest = ctx.workdir / "logs" / "131_async_blocking.txt"
|
|
192
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
193
|
+
dest.write_text(output, encoding="utf-8")
|
|
194
|
+
|
|
195
|
+
elapsed = int(time.time() - start)
|
|
196
|
+
return StepResult(self.name, "OK", elapsed, "")
|
|
197
|
+
|
|
198
|
+
def _find_blocking_calls(self, root: Path) -> Dict:
|
|
199
|
+
"""Find blocking calls in async functions."""
|
|
200
|
+
affected_functions: Dict[str, List[Dict]] = {}
|
|
201
|
+
async_functions_count = 0
|
|
202
|
+
|
|
203
|
+
python_files = list(root.rglob("*.py"))
|
|
204
|
+
|
|
205
|
+
for py_file in python_files:
|
|
206
|
+
if any(
|
|
207
|
+
part in py_file.parts
|
|
208
|
+
for part in ["venv", ".venv", "env", "__pycache__", "site-packages"]
|
|
209
|
+
):
|
|
210
|
+
continue
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
source = py_file.read_text(encoding="utf-8", errors="ignore")
|
|
214
|
+
rel_path = str(py_file.relative_to(root))
|
|
215
|
+
|
|
216
|
+
tree = ast.parse(source)
|
|
217
|
+
|
|
218
|
+
for node in ast.walk(tree):
|
|
219
|
+
if isinstance(node, ast.AsyncFunctionDef):
|
|
220
|
+
async_functions_count += 1
|
|
221
|
+
func_name = node.name
|
|
222
|
+
func_line = node.lineno
|
|
223
|
+
func_key = f"{rel_path}:{func_name}:{func_line}"
|
|
224
|
+
|
|
225
|
+
# Find blocking calls in this async function
|
|
226
|
+
issues = self._find_blocking_in_function(
|
|
227
|
+
node, rel_path, source
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
if issues:
|
|
231
|
+
affected_functions[func_key] = issues
|
|
232
|
+
|
|
233
|
+
except (OSError, UnicodeDecodeError, SyntaxError):
|
|
234
|
+
continue
|
|
235
|
+
|
|
236
|
+
# Count total issues
|
|
237
|
+
total_issues = sum(len(v) for v in affected_functions.values())
|
|
238
|
+
|
|
239
|
+
return {
|
|
240
|
+
"affected_functions": affected_functions,
|
|
241
|
+
"async_functions_count": async_functions_count,
|
|
242
|
+
"total_issues": total_issues,
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
def _find_blocking_in_function(
|
|
246
|
+
self, func_node: ast.AsyncFunctionDef, file_path: str, source: str
|
|
247
|
+
) -> List[Dict]:
|
|
248
|
+
"""Find blocking calls within a single async function."""
|
|
249
|
+
issues = []
|
|
250
|
+
source_lines = source.split("\n")
|
|
251
|
+
|
|
252
|
+
for call_node in ast.walk(func_node):
|
|
253
|
+
if isinstance(call_node, ast.Call):
|
|
254
|
+
call_name = self._get_call_name(call_node)
|
|
255
|
+
|
|
256
|
+
if call_name:
|
|
257
|
+
# Check if this is a blocking call
|
|
258
|
+
for blocking_pattern, category in self.BLOCKING_CALLS.items():
|
|
259
|
+
if blocking_pattern in call_name or call_name == blocking_pattern.split(
|
|
260
|
+
"."
|
|
261
|
+
)[
|
|
262
|
+
-1
|
|
263
|
+
]:
|
|
264
|
+
context = source_lines[call_node.lineno - 1]
|
|
265
|
+
|
|
266
|
+
issues.append(
|
|
267
|
+
{
|
|
268
|
+
"line": call_node.lineno,
|
|
269
|
+
"call_name": call_name,
|
|
270
|
+
"category": category,
|
|
271
|
+
"context": context,
|
|
272
|
+
}
|
|
273
|
+
)
|
|
274
|
+
break
|
|
275
|
+
|
|
276
|
+
return issues
|
|
277
|
+
|
|
278
|
+
def _get_call_name(self, node: ast.Call) -> Optional[str]:
|
|
279
|
+
"""Extract the name of a function call."""
|
|
280
|
+
if isinstance(node.func, ast.Name):
|
|
281
|
+
return node.func.id
|
|
282
|
+
elif isinstance(node.func, ast.Attribute):
|
|
283
|
+
parts = []
|
|
284
|
+
current = node.func
|
|
285
|
+
while isinstance(current, ast.Attribute):
|
|
286
|
+
parts.append(current.attr)
|
|
287
|
+
current = current.value
|
|
288
|
+
if isinstance(current, ast.Name):
|
|
289
|
+
parts.append(current.id)
|
|
290
|
+
return ".".join(reversed(parts))
|
|
291
|
+
return None
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Step: Call Graph Generation
|
|
3
|
+
Generate static call graph and identify orphaned functions.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import ast
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, List, Set
|
|
9
|
+
|
|
10
|
+
from .base import Step, StepResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CallGraphStep(Step):
|
|
14
|
+
"""Generate static call graph and identify orphaned functions."""
|
|
15
|
+
|
|
16
|
+
name = "call graph"
|
|
17
|
+
|
|
18
|
+
def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
|
|
19
|
+
"""Analyze function definitions and calls to build call graph."""
|
|
20
|
+
import time
|
|
21
|
+
|
|
22
|
+
start = time.time()
|
|
23
|
+
|
|
24
|
+
root = ctx.root
|
|
25
|
+
python_files = sorted(root.rglob("*.py"))
|
|
26
|
+
if not python_files:
|
|
27
|
+
return StepResult(self.name, "SKIP", int(time.time() - start), "No Python files found")
|
|
28
|
+
|
|
29
|
+
# Track functions
|
|
30
|
+
defined_functions: Dict[str, str] = {} # func_name -> file:line
|
|
31
|
+
called_functions: Set[str] = set() # func_name
|
|
32
|
+
function_calls: Dict[str, List[str]] = {} # func_name -> [called_func, ...]
|
|
33
|
+
analyzed_files = 0
|
|
34
|
+
|
|
35
|
+
for py_file in python_files:
|
|
36
|
+
# Skip non-user code
|
|
37
|
+
if any(
|
|
38
|
+
part in py_file.parts
|
|
39
|
+
for part in [
|
|
40
|
+
"venv",
|
|
41
|
+
".venv",
|
|
42
|
+
"env",
|
|
43
|
+
"site-packages",
|
|
44
|
+
"__pycache__",
|
|
45
|
+
".git",
|
|
46
|
+
"node_modules",
|
|
47
|
+
]
|
|
48
|
+
):
|
|
49
|
+
continue
|
|
50
|
+
|
|
51
|
+
analyzed_files += 1
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
source = py_file.read_text(encoding="utf-8", errors="ignore")
|
|
55
|
+
tree = ast.parse(source, str(py_file))
|
|
56
|
+
rel_path = py_file.relative_to(root)
|
|
57
|
+
|
|
58
|
+
# Find function definitions
|
|
59
|
+
for node in ast.walk(tree):
|
|
60
|
+
if isinstance(node, ast.FunctionDef):
|
|
61
|
+
func_name = node.name
|
|
62
|
+
# Skip private and magic methods
|
|
63
|
+
if not func_name.startswith("_"):
|
|
64
|
+
location = f"{rel_path}:{node.lineno}"
|
|
65
|
+
defined_functions[func_name] = location
|
|
66
|
+
|
|
67
|
+
# Track calls within this function
|
|
68
|
+
calls = self._extract_function_calls(node)
|
|
69
|
+
if func_name not in function_calls:
|
|
70
|
+
function_calls[func_name] = []
|
|
71
|
+
function_calls[func_name].extend(calls)
|
|
72
|
+
called_functions.update(calls)
|
|
73
|
+
|
|
74
|
+
except (SyntaxError, UnicodeDecodeError):
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
# Identify orphaned functions (defined but never called)
|
|
78
|
+
orphaned = set(defined_functions.keys()) - called_functions
|
|
79
|
+
|
|
80
|
+
# Identify entry points (called but not defined in codebase)
|
|
81
|
+
external_calls = called_functions - set(defined_functions.keys())
|
|
82
|
+
|
|
83
|
+
# Calculate statistics
|
|
84
|
+
total_functions = len(defined_functions)
|
|
85
|
+
total_calls = sum(len(calls) for calls in function_calls.values())
|
|
86
|
+
|
|
87
|
+
# Generate report
|
|
88
|
+
lines = [
|
|
89
|
+
"=" * 80,
|
|
90
|
+
"CALL GRAPH ANALYSIS",
|
|
91
|
+
"=" * 80,
|
|
92
|
+
"",
|
|
93
|
+
f"Total Python files analyzed: {analyzed_files}",
|
|
94
|
+
f"Total functions defined: {total_functions}",
|
|
95
|
+
f"Total function calls: {total_calls}",
|
|
96
|
+
f"Orphaned functions (never called): {len(orphaned)}",
|
|
97
|
+
f"External/library calls: {len(external_calls)}",
|
|
98
|
+
"",
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
# Orphaned functions
|
|
102
|
+
if orphaned:
|
|
103
|
+
lines.extend(
|
|
104
|
+
[
|
|
105
|
+
"=" * 80,
|
|
106
|
+
"ORPHANED FUNCTIONS (defined but never called)",
|
|
107
|
+
"=" * 80,
|
|
108
|
+
"",
|
|
109
|
+
]
|
|
110
|
+
)
|
|
111
|
+
for func in sorted(orphaned):
|
|
112
|
+
location = defined_functions[func]
|
|
113
|
+
lines.append(f" {func:30} at {location}")
|
|
114
|
+
lines.append("")
|
|
115
|
+
|
|
116
|
+
# Most called functions
|
|
117
|
+
if function_calls:
|
|
118
|
+
# Count how many times each function is called
|
|
119
|
+
call_counts: Dict[str, int] = {}
|
|
120
|
+
for calls in function_calls.values():
|
|
121
|
+
for called_func in calls:
|
|
122
|
+
call_counts[called_func] = call_counts.get(called_func, 0) + 1
|
|
123
|
+
|
|
124
|
+
# Filter to only defined functions
|
|
125
|
+
internal_call_counts = {
|
|
126
|
+
func: count
|
|
127
|
+
for func, count in call_counts.items()
|
|
128
|
+
if func in defined_functions
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if internal_call_counts:
|
|
132
|
+
lines.extend(
|
|
133
|
+
[
|
|
134
|
+
"=" * 80,
|
|
135
|
+
"MOST CALLED FUNCTIONS (top 20)",
|
|
136
|
+
"=" * 80,
|
|
137
|
+
"",
|
|
138
|
+
]
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
sorted_calls = sorted(
|
|
142
|
+
internal_call_counts.items(), key=lambda x: x[1], reverse=True
|
|
143
|
+
)
|
|
144
|
+
for func, count in sorted_calls[:20]:
|
|
145
|
+
location = defined_functions[func]
|
|
146
|
+
lines.append(f" {func:30} {count:4} call(s) at {location}")
|
|
147
|
+
lines.append("")
|
|
148
|
+
|
|
149
|
+
# Functions that call many others
|
|
150
|
+
if function_calls:
|
|
151
|
+
lines.extend(
|
|
152
|
+
[
|
|
153
|
+
"=" * 80,
|
|
154
|
+
"FUNCTIONS WITH MOST CALLS (top 20)",
|
|
155
|
+
"=" * 80,
|
|
156
|
+
"",
|
|
157
|
+
]
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
sorted_callers = sorted(
|
|
161
|
+
function_calls.items(), key=lambda x: len(x[1]), reverse=True
|
|
162
|
+
)
|
|
163
|
+
for func, calls in sorted_callers[:20]:
|
|
164
|
+
if func in defined_functions:
|
|
165
|
+
location = defined_functions[func]
|
|
166
|
+
unique_calls = len(set(calls))
|
|
167
|
+
lines.append(
|
|
168
|
+
f" {func:30} calls {unique_calls:3} function(s) at {location}"
|
|
169
|
+
)
|
|
170
|
+
lines.append("")
|
|
171
|
+
|
|
172
|
+
# Recommendations
|
|
173
|
+
lines.extend(
|
|
174
|
+
[
|
|
175
|
+
"=" * 80,
|
|
176
|
+
"RECOMMENDATIONS",
|
|
177
|
+
"=" * 80,
|
|
178
|
+
"",
|
|
179
|
+
]
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
if len(orphaned) > 10:
|
|
183
|
+
lines.append(f" - {len(orphaned)} orphaned functions found; consider removing unused code")
|
|
184
|
+
if len(orphaned) > 0:
|
|
185
|
+
lines.append(" - Review orphaned functions for dead code removal")
|
|
186
|
+
if total_functions > 0:
|
|
187
|
+
orphan_pct = len(orphaned) / total_functions * 100
|
|
188
|
+
if orphan_pct > 30:
|
|
189
|
+
lines.append(f" - {orphan_pct:.1f}% of functions are never called")
|
|
190
|
+
|
|
191
|
+
lines.append("")
|
|
192
|
+
|
|
193
|
+
# Write report
|
|
194
|
+
output = "\n".join(lines)
|
|
195
|
+
dest = ctx.workdir / "meta" / "103_call_graph.txt"
|
|
196
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
197
|
+
dest.write_text(output, encoding="utf-8")
|
|
198
|
+
|
|
199
|
+
elapsed = int(time.time() - start)
|
|
200
|
+
return StepResult(self.name, "OK", elapsed, "")
|
|
201
|
+
|
|
202
|
+
def _extract_function_calls(self, func_node: ast.FunctionDef) -> List[str]:
|
|
203
|
+
"""Extract all function calls within a function definition."""
|
|
204
|
+
calls = []
|
|
205
|
+
for node in ast.walk(func_node):
|
|
206
|
+
if isinstance(node, ast.Call):
|
|
207
|
+
func_name = self._get_call_name(node.func)
|
|
208
|
+
if func_name and not func_name.startswith("_"):
|
|
209
|
+
calls.append(func_name)
|
|
210
|
+
return calls
|
|
211
|
+
|
|
212
|
+
def _get_call_name(self, node: ast.expr) -> str:
|
|
213
|
+
"""Extract function name from call node."""
|
|
214
|
+
if isinstance(node, ast.Name):
|
|
215
|
+
return node.id
|
|
216
|
+
elif isinstance(node, ast.Attribute):
|
|
217
|
+
# For method calls, just return the method name
|
|
218
|
+
return node.attr
|
|
219
|
+
return ""
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess # nosec B404 - Required for tool execution, paths validated
|
|
4
|
+
import time
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from .base import StepResult
|
|
9
|
+
from ..context import BundleContext
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _guess_targets(root: Path) -> list[str]:
|
|
13
|
+
"""
|
|
14
|
+
Heuristic targets:
|
|
15
|
+
- If there are top-level Python package dirs (contain __init__.py), compile those.
|
|
16
|
+
- Otherwise compile '.' (repo root).
|
|
17
|
+
"""
|
|
18
|
+
targets: list[str] = []
|
|
19
|
+
|
|
20
|
+
for p in sorted(root.iterdir()):
|
|
21
|
+
if not p.is_dir():
|
|
22
|
+
continue
|
|
23
|
+
if p.name.startswith("."):
|
|
24
|
+
continue
|
|
25
|
+
if (p / "__init__.py").is_file():
|
|
26
|
+
targets.append(p.name)
|
|
27
|
+
|
|
28
|
+
return targets or ["."]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class CompileAllStep:
|
|
33
|
+
name: str = "compileall"
|
|
34
|
+
quiet: bool = True
|
|
35
|
+
|
|
36
|
+
def run(self, ctx: BundleContext) -> StepResult:
|
|
37
|
+
start = time.time()
|
|
38
|
+
out = ctx.logdir / "30_compileall.txt"
|
|
39
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
|
|
41
|
+
py = ctx.tools.python
|
|
42
|
+
if not py:
|
|
43
|
+
out.write_text("python not found; skipping compileall\n", encoding="utf-8")
|
|
44
|
+
return StepResult(self.name, "SKIP", 0, "missing python")
|
|
45
|
+
|
|
46
|
+
targets = _guess_targets(ctx.root)
|
|
47
|
+
cmd = [py, "-m", "compileall"]
|
|
48
|
+
if self.quiet:
|
|
49
|
+
cmd.append("-q")
|
|
50
|
+
cmd.extend(targets)
|
|
51
|
+
|
|
52
|
+
header = (
|
|
53
|
+
f"## PWD: {ctx.root}\n## CMD: {' '.join(cmd)}\n## TARGETS: {targets}\n\n"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
cp = subprocess.run( # nosec B603
|
|
58
|
+
cmd,
|
|
59
|
+
cwd=str(ctx.root),
|
|
60
|
+
text=True,
|
|
61
|
+
capture_output=True,
|
|
62
|
+
check=False,
|
|
63
|
+
)
|
|
64
|
+
text = header + (cp.stdout or "") + ("\n" + cp.stderr if cp.stderr else "")
|
|
65
|
+
out.write_text(ctx.redact_text(text), encoding="utf-8")
|
|
66
|
+
dur = int(time.time() - start)
|
|
67
|
+
|
|
68
|
+
# compileall uses non-zero for compile failures; we record it but don't fail bundling.
|
|
69
|
+
note = "" if cp.returncode == 0 else f"exit={cp.returncode} (recorded)"
|
|
70
|
+
return StepResult(self.name, "PASS", dur, note)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
out.write_text(
|
|
73
|
+
ctx.redact_text(header + f"\nEXCEPTION: {e}\n"), encoding="utf-8"
|
|
74
|
+
)
|
|
75
|
+
dur = int(time.time() - start)
|
|
76
|
+
return StepResult(self.name, "PASS", dur, f"exception recorded: {e}")
|