gwc-pybundle 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gwc-pybundle might be problematic. Click here for more details.

Files changed (82) hide show
  1. gwc_pybundle-2.1.2.dist-info/METADATA +903 -0
  2. gwc_pybundle-2.1.2.dist-info/RECORD +82 -0
  3. gwc_pybundle-2.1.2.dist-info/WHEEL +5 -0
  4. gwc_pybundle-2.1.2.dist-info/entry_points.txt +2 -0
  5. gwc_pybundle-2.1.2.dist-info/licenses/LICENSE.md +25 -0
  6. gwc_pybundle-2.1.2.dist-info/top_level.txt +1 -0
  7. pybundle/__init__.py +0 -0
  8. pybundle/__main__.py +4 -0
  9. pybundle/cli.py +546 -0
  10. pybundle/context.py +404 -0
  11. pybundle/doctor.py +148 -0
  12. pybundle/filters.py +228 -0
  13. pybundle/manifest.py +77 -0
  14. pybundle/packaging.py +45 -0
  15. pybundle/policy.py +132 -0
  16. pybundle/profiles.py +454 -0
  17. pybundle/roadmap_model.py +42 -0
  18. pybundle/roadmap_scan.py +328 -0
  19. pybundle/root_detect.py +14 -0
  20. pybundle/runner.py +180 -0
  21. pybundle/steps/__init__.py +26 -0
  22. pybundle/steps/ai_context.py +791 -0
  23. pybundle/steps/api_docs.py +219 -0
  24. pybundle/steps/asyncio_analysis.py +358 -0
  25. pybundle/steps/bandit.py +72 -0
  26. pybundle/steps/base.py +20 -0
  27. pybundle/steps/blocking_call_detection.py +291 -0
  28. pybundle/steps/call_graph.py +219 -0
  29. pybundle/steps/compileall.py +76 -0
  30. pybundle/steps/config_docs.py +319 -0
  31. pybundle/steps/config_validation.py +302 -0
  32. pybundle/steps/container_image.py +294 -0
  33. pybundle/steps/context_expand.py +272 -0
  34. pybundle/steps/copy_pack.py +293 -0
  35. pybundle/steps/coverage.py +101 -0
  36. pybundle/steps/cprofile_step.py +166 -0
  37. pybundle/steps/dependency_sizes.py +136 -0
  38. pybundle/steps/django_checks.py +214 -0
  39. pybundle/steps/dockerfile_lint.py +282 -0
  40. pybundle/steps/dockerignore.py +311 -0
  41. pybundle/steps/duplication.py +103 -0
  42. pybundle/steps/env_completeness.py +269 -0
  43. pybundle/steps/env_var_usage.py +253 -0
  44. pybundle/steps/error_refs.py +204 -0
  45. pybundle/steps/event_loop_patterns.py +280 -0
  46. pybundle/steps/exception_patterns.py +190 -0
  47. pybundle/steps/fastapi_integration.py +250 -0
  48. pybundle/steps/flask_debugging.py +312 -0
  49. pybundle/steps/git_analytics.py +315 -0
  50. pybundle/steps/handoff_md.py +176 -0
  51. pybundle/steps/import_time.py +175 -0
  52. pybundle/steps/interrogate.py +106 -0
  53. pybundle/steps/license_scan.py +96 -0
  54. pybundle/steps/line_profiler.py +117 -0
  55. pybundle/steps/link_validation.py +287 -0
  56. pybundle/steps/logging_analysis.py +233 -0
  57. pybundle/steps/memory_profile.py +176 -0
  58. pybundle/steps/migration_history.py +336 -0
  59. pybundle/steps/mutation_testing.py +141 -0
  60. pybundle/steps/mypy.py +103 -0
  61. pybundle/steps/orm_optimization.py +316 -0
  62. pybundle/steps/pip_audit.py +45 -0
  63. pybundle/steps/pipdeptree.py +62 -0
  64. pybundle/steps/pylance.py +562 -0
  65. pybundle/steps/pytest.py +66 -0
  66. pybundle/steps/query_pattern_analysis.py +334 -0
  67. pybundle/steps/radon.py +161 -0
  68. pybundle/steps/repro_md.py +161 -0
  69. pybundle/steps/rg_scans.py +78 -0
  70. pybundle/steps/roadmap.py +153 -0
  71. pybundle/steps/ruff.py +117 -0
  72. pybundle/steps/secrets_detection.py +235 -0
  73. pybundle/steps/security_headers.py +309 -0
  74. pybundle/steps/shell.py +74 -0
  75. pybundle/steps/slow_tests.py +178 -0
  76. pybundle/steps/sqlalchemy_validation.py +269 -0
  77. pybundle/steps/test_flakiness.py +184 -0
  78. pybundle/steps/tree.py +116 -0
  79. pybundle/steps/type_coverage.py +277 -0
  80. pybundle/steps/unused_deps.py +211 -0
  81. pybundle/steps/vulture.py +167 -0
  82. pybundle/tools.py +63 -0
@@ -0,0 +1,277 @@
1
+ """Type hint coverage analysis step.
2
+
3
+ Analyzes Python source files to compute type annotation coverage percentage,
4
+ identifying functions/methods/classes that lack type hints.
5
+ """
6
+
7
+ import ast
8
+ import os
9
+ from dataclasses import dataclass, field
10
+ from pathlib import Path
11
+ from typing import List, Tuple
12
+
13
+ from .base import StepResult
14
+ from ..context import BundleContext
15
+ from ..filters import should_exclude_from_analysis
16
+
17
+
18
+ @dataclass
19
+ class TypeCoverageStats:
20
+ """Type coverage statistics."""
21
+
22
+ total_functions: int = 0
23
+ typed_functions: int = 0
24
+ total_classes: int = 0
25
+ typed_classes: int = 0
26
+ total_attributes: int = 0
27
+ typed_attributes: int = 0
28
+ missing_items: List[Tuple[str, int, str]] = field(
29
+ default_factory=list
30
+ ) # (file, line, name)
31
+
32
+ @property
33
+ def function_coverage(self) -> float:
34
+ """Return function type coverage percentage."""
35
+ if self.total_functions == 0:
36
+ return 100.0
37
+ return (self.typed_functions / self.total_functions) * 100
38
+
39
+ @property
40
+ def class_coverage(self) -> float:
41
+ """Return class type coverage percentage."""
42
+ if self.total_classes == 0:
43
+ return 100.0
44
+ return (self.typed_classes / self.total_classes) * 100
45
+
46
+ @property
47
+ def attribute_coverage(self) -> float:
48
+ """Return attribute type coverage percentage."""
49
+ if self.total_attributes == 0:
50
+ return 100.0
51
+ return (self.typed_attributes / self.total_attributes) * 100
52
+
53
+ @property
54
+ def overall_coverage(self) -> float:
55
+ """Return overall type coverage percentage."""
56
+ total = self.total_functions + self.total_classes + self.total_attributes
57
+ typed = self.typed_functions + self.typed_classes + self.typed_attributes
58
+ if total == 0:
59
+ return 100.0
60
+ return (typed / total) * 100
61
+
62
+
63
+ class TypeCoverageAnalyzer(ast.NodeVisitor):
64
+ """AST visitor for analyzing type coverage."""
65
+
66
+ def __init__(self, filepath: str) -> None:
67
+ self.filepath = filepath
68
+ self.stats = TypeCoverageStats()
69
+
70
+ def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
71
+ """Visit function definition."""
72
+ self._analyze_function(node)
73
+ self.generic_visit(node)
74
+
75
+ def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
76
+ """Visit async function definition."""
77
+ self._analyze_function(node)
78
+ self.generic_visit(node)
79
+
80
+ def _analyze_function(self, node):
81
+ """Analyze function/method for type hints."""
82
+ # Skip special methods and private methods
83
+ if node.name.startswith("_") and not node.name.startswith("__"):
84
+ return
85
+
86
+ self.stats.total_functions += 1
87
+
88
+ # Check if function has type hints
89
+ has_return_type = node.returns is not None
90
+ has_param_types = all(
91
+ arg.annotation is not None
92
+ for arg in node.args.args
93
+ if arg.arg not in ("self", "cls")
94
+ )
95
+
96
+ if has_return_type and (
97
+ has_param_types
98
+ or len([a for a in node.args.args if a.arg not in ("self", "cls")]) == 0
99
+ ):
100
+ self.stats.typed_functions += 1
101
+ else:
102
+ self.stats.missing_items.append(
103
+ (self.filepath, node.lineno, f"function '{node.name}'")
104
+ )
105
+
106
+ def visit_ClassDef(self, node: ast.ClassDef) -> None:
107
+ """Visit class definition."""
108
+ # Skip private classes
109
+ if node.name.startswith("_"):
110
+ self.generic_visit(node)
111
+ return
112
+
113
+ self.stats.total_classes += 1
114
+
115
+ # Check if class has any type-hinted attributes
116
+ has_annotations = any(isinstance(stmt, ast.AnnAssign) for stmt in node.body)
117
+
118
+ # Check __init__ for parameter types
119
+ init_method = None
120
+ for stmt in node.body:
121
+ if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
122
+ init_method = stmt
123
+ break
124
+
125
+ if init_method:
126
+ has_init_types = any(
127
+ arg.annotation is not None
128
+ for arg in init_method.args.args
129
+ if arg.arg not in ("self", "cls")
130
+ )
131
+ if has_annotations or has_init_types:
132
+ self.stats.typed_classes += 1
133
+ else:
134
+ self.stats.missing_items.append(
135
+ (self.filepath, node.lineno, f"class '{node.name}'")
136
+ )
137
+ else:
138
+ # Class without __init__ - check for annotated attributes
139
+ if has_annotations:
140
+ self.stats.typed_classes += 1
141
+ else:
142
+ self.stats.missing_items.append(
143
+ (self.filepath, node.lineno, f"class '{node.name}'")
144
+ )
145
+
146
+ self.generic_visit(node)
147
+
148
+ def visit_AnnAssign(self, node: ast.AnnAssign) -> None:
149
+ """Visit annotated assignment (class/module level)."""
150
+ # Count as typed attribute
151
+ self.stats.total_attributes += 1
152
+ self.stats.typed_attributes += 1
153
+ self.generic_visit(node)
154
+
155
+
156
+ @dataclass
157
+ class TypeCoverageStep:
158
+ """Step that analyzes type hint coverage."""
159
+
160
+ name: str = "type-coverage"
161
+ outfile: str = "logs/80_type_coverage.txt"
162
+
163
+ def run(self, context: BundleContext) -> StepResult:
164
+ """Analyze type coverage in Python files."""
165
+ import time
166
+
167
+ start = time.time()
168
+
169
+ # Get all Python files in the project
170
+ python_files = self._find_python_files(context.root)
171
+
172
+ if not python_files:
173
+ elapsed = time.time() - start
174
+ note = "No Python files found"
175
+ return StepResult(self.name, "SKIP", int(elapsed), note)
176
+
177
+ overall_stats = TypeCoverageStats()
178
+
179
+ for filepath in python_files:
180
+ try:
181
+ with open(filepath, "r", encoding="utf-8") as f:
182
+ source = f.read()
183
+
184
+ tree = ast.parse(source, filename=str(filepath))
185
+ analyzer = TypeCoverageAnalyzer(str(filepath))
186
+ analyzer.visit(tree)
187
+
188
+ # Merge stats
189
+ overall_stats.total_functions += analyzer.stats.total_functions
190
+ overall_stats.typed_functions += analyzer.stats.typed_functions
191
+ overall_stats.total_classes += analyzer.stats.total_classes
192
+ overall_stats.typed_classes += analyzer.stats.typed_classes
193
+ overall_stats.total_attributes += analyzer.stats.total_attributes
194
+ overall_stats.typed_attributes += analyzer.stats.typed_attributes
195
+ overall_stats.missing_items.extend(analyzer.stats.missing_items)
196
+
197
+ except SyntaxError:
198
+ # Skip files with syntax errors
199
+ continue
200
+ except Exception:
201
+ # Skip files that can't be parsed
202
+ continue
203
+
204
+ elapsed = time.time() - start
205
+
206
+ # Write report
207
+ log_path = context.workdir / self.outfile
208
+ log_path.parent.mkdir(parents=True, exist_ok=True)
209
+ with open(log_path, "w") as f:
210
+ f.write("=" * 80 + "\n")
211
+ f.write("TYPE HINT COVERAGE ANALYSIS\n")
212
+ f.write("=" * 80 + "\n\n")
213
+
214
+ f.write("Summary:\n")
215
+ f.write("-" * 80 + "\n")
216
+ f.write(f"Overall Coverage: {overall_stats.overall_coverage:6.2f}%\n")
217
+ f.write(
218
+ f"Function Coverage: {overall_stats.function_coverage:6.2f}% "
219
+ f"({overall_stats.typed_functions}/{overall_stats.total_functions})\n"
220
+ )
221
+ f.write(
222
+ f"Class Coverage: {overall_stats.class_coverage:6.2f}% "
223
+ f"({overall_stats.typed_classes}/{overall_stats.total_classes})\n"
224
+ )
225
+ f.write(
226
+ f"Attribute Coverage: {overall_stats.attribute_coverage:6.2f}% "
227
+ f"({overall_stats.typed_attributes}/{overall_stats.total_attributes})\n"
228
+ )
229
+ f.write("\n")
230
+
231
+ if overall_stats.missing_items:
232
+ f.write("Missing Type Hints:\n")
233
+ f.write("-" * 80 + "\n")
234
+
235
+ # Sort by file, then line number
236
+ sorted_missing = sorted(
237
+ overall_stats.missing_items, key=lambda x: (x[0], x[1])
238
+ )
239
+
240
+ current_file = None
241
+ for filepath, lineno, name in sorted_missing:
242
+ # Show relative path
243
+ rel_path = os.path.relpath(filepath, context.root)
244
+ if rel_path != current_file:
245
+ f.write(f"\n{rel_path}:\n")
246
+ current_file = rel_path
247
+ f.write(f" Line {lineno:4d}: {name}\n")
248
+
249
+ f.write("\n")
250
+
251
+ f.write("=" * 80 + "\n")
252
+ f.write(f"Analysis complete - {len(python_files)} files analyzed\n")
253
+ f.write("=" * 80 + "\n")
254
+
255
+ # Determine status
256
+ coverage = overall_stats.overall_coverage
257
+ if coverage >= 80:
258
+ status = "OK"
259
+ elif coverage >= 50:
260
+ status = "WARN"
261
+ else:
262
+ status = "FAIL"
263
+
264
+ note = f"{coverage:.1f}% type coverage"
265
+ return StepResult(self.name, status, int(elapsed), note)
266
+
267
+ def _find_python_files(self, root: Path) -> List[Path]:
268
+ """Find all Python source files, excluding dependencies/caches/build dirs."""
269
+ python_files = []
270
+
271
+ for path in root.rglob("*.py"):
272
+ # Use comprehensive exclusion filter for PROJECT files only
273
+ if should_exclude_from_analysis(path):
274
+ continue
275
+ python_files.append(path)
276
+
277
+ return python_files
@@ -0,0 +1,211 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess # nosec B404 - Required for tool execution, paths validated
4
+ import time
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ from .base import StepResult
9
+ from ..context import BundleContext
10
+
11
+
12
+ @dataclass
13
+ class UnusedDependenciesStep:
14
+ name: str = "unused dependencies"
15
+ outfile: str = "meta/31_unused_packages.txt"
16
+
17
+ def run(self, ctx: BundleContext) -> StepResult:
18
+ start = time.time()
19
+ out = ctx.workdir / self.outfile
20
+ out.parent.mkdir(parents=True, exist_ok=True)
21
+
22
+ python = ctx.tools.python
23
+ if not python:
24
+ out.write_text("python not found; skipping\n", encoding="utf-8")
25
+ return StepResult(self.name, "SKIP", 0, "missing python")
26
+
27
+ try:
28
+ # Get all installed packages
29
+ pip_freeze_result = subprocess.run( # nosec B603
30
+ [python, "-m", "pip", "freeze"],
31
+ cwd=ctx.root,
32
+ stdout=subprocess.PIPE,
33
+ stderr=subprocess.PIPE,
34
+ text=True,
35
+ timeout=30,
36
+ )
37
+
38
+ if pip_freeze_result.returncode != 0:
39
+ out.write_text(
40
+ f"pip freeze failed: {pip_freeze_result.stderr}\n", encoding="utf-8"
41
+ )
42
+ return StepResult(self.name, "FAIL", 0, "pip freeze failed")
43
+
44
+ # Parse installed packages (normalize names)
45
+ installed_packages: list[str] = []
46
+ for line in pip_freeze_result.stdout.splitlines():
47
+ if line and not line.startswith("-") and "==" in line:
48
+ pkg_name = line.split("==")[0].strip().lower().replace("_", "-")
49
+ installed_packages.append(pkg_name)
50
+
51
+ # Get imported modules from source code
52
+ imported_modules = self._get_imported_modules(ctx.root)
53
+
54
+ # Find unused packages (installed but not imported)
55
+ # Note: This is a heuristic - some packages are used indirectly
56
+ unused = sorted(set(installed_packages) - imported_modules)
57
+
58
+ # Categorize by confidence level
59
+ likely_unused, maybe_unused = self._categorize_by_confidence(
60
+ unused, ctx.root
61
+ )
62
+
63
+ # Write results
64
+ with out.open("w", encoding="utf-8") as f:
65
+ f.write("=" * 70 + "\n")
66
+ f.write("UNUSED DEPENDENCIES ANALYSIS\n")
67
+ f.write("=" * 70 + "\n\n")
68
+ f.write(f"Total installed packages: {len(installed_packages)}\n")
69
+ f.write(f"Total imported modules: {len(imported_modules)}\n")
70
+ f.write(f"Potentially unused packages: {len(unused)}\n\n")
71
+
72
+ if likely_unused:
73
+ f.write("LIKELY UNUSED (high confidence - no obvious indirect usage):\n")
74
+ f.write("These packages are good candidates for removal.\n\n")
75
+ for pkg in likely_unused:
76
+ f.write(f" - {pkg}\n")
77
+ f.write("\n")
78
+
79
+ if maybe_unused:
80
+ f.write("MAYBE UNUSED (lower confidence - might be plugins/indirect deps):\n")
81
+ f.write("These may be used indirectly or as plugins. Verify before removing.\n\n")
82
+ for pkg in maybe_unused:
83
+ f.write(f" - {pkg}\n")
84
+ f.write("\n")
85
+
86
+ if not unused:
87
+ f.write("No obviously unused packages detected.\n")
88
+
89
+ elapsed = int((time.time() - start) * 1000)
90
+ return StepResult(self.name, "OK", elapsed, "")
91
+
92
+ except subprocess.TimeoutExpired:
93
+ out.write_text("Analysis timed out\n", encoding="utf-8")
94
+ return StepResult(
95
+ self.name, "FAIL", int((time.time() - start) * 1000), "timeout"
96
+ )
97
+ except Exception as e:
98
+ out.write_text(f"Error: {e}\n", encoding="utf-8")
99
+ return StepResult(
100
+ self.name, "FAIL", int((time.time() - start) * 1000), str(e)
101
+ )
102
+
103
+ def _get_imported_modules(self, root: Path) -> set[str]:
104
+ """Extract top-level module names from import statements."""
105
+ imported = set()
106
+
107
+ for py_file in root.rglob("*.py"):
108
+ # Skip venv and common excluded directories
109
+ parts = set(py_file.parts)
110
+ if any(
111
+ x in parts
112
+ for x in [
113
+ ".venv",
114
+ "venv",
115
+ "__pycache__",
116
+ "node_modules",
117
+ "dist",
118
+ "build",
119
+ "artifacts",
120
+ ".git",
121
+ ".tox",
122
+ ]
123
+ ):
124
+ continue
125
+
126
+ try:
127
+ content = py_file.read_text(encoding="utf-8", errors="ignore")
128
+ for line in content.splitlines():
129
+ line = line.strip()
130
+ # Match: import foo, from foo import bar
131
+ if line.startswith("import "):
132
+ module = line[7:].split()[0].split(".")[0].split(",")[0].strip()
133
+ imported.add(module.lower().replace("_", "-"))
134
+ elif line.startswith("from "):
135
+ line_parts = line.split()
136
+ if len(line_parts) >= 2:
137
+ module = line_parts[1].split(".")[0].strip()
138
+ imported.add(module.lower().replace("_", "-"))
139
+ except Exception:
140
+ continue
141
+
142
+ return imported
143
+
144
+ def _categorize_by_confidence(
145
+ self, unused: list[str], root: Path
146
+ ) -> tuple[list[str], list[str]]:
147
+ """Categorize unused packages by confidence level.
148
+
149
+ Returns:
150
+ (likely_unused, maybe_unused) tuple of package lists
151
+ """
152
+ # Packages that are commonly indirect deps or plugins (lower confidence)
153
+ indirect_patterns = {
154
+ # Build/dev tools often used indirectly
155
+ "setuptools", "wheel", "pip", "build",
156
+ # Testing plugins
157
+ "pytest-", "coverage", "pluggy",
158
+ # Type checking stubs
159
+ "types-", "-stubs",
160
+ # Common indirect deps
161
+ "certifi", "charset-normalizer", "idna", "urllib3",
162
+ "six", "packaging", "pyparsing",
163
+ # Web framework plugins often auto-discovered
164
+ "uvicorn", "gunicorn", "hypercorn",
165
+ # Database drivers (might be dynamically loaded)
166
+ "psycopg2", "pymysql", "cx-oracle",
167
+ # Celery/async workers
168
+ "celery", "redis", "kombu", "amqp",
169
+ }
170
+
171
+ likely_unused = []
172
+ maybe_unused = []
173
+
174
+ for pkg in unused:
175
+ # Check if package matches indirect dependency patterns
176
+ is_likely_indirect = any(
177
+ pattern in pkg for pattern in indirect_patterns
178
+ )
179
+
180
+ # Check if mentioned in config files (pyproject.toml, setup.py, etc.)
181
+ mentioned_in_config = self._is_mentioned_in_config(pkg, root)
182
+
183
+ if is_likely_indirect or mentioned_in_config:
184
+ maybe_unused.append(pkg)
185
+ else:
186
+ likely_unused.append(pkg)
187
+
188
+ return likely_unused, maybe_unused
189
+
190
+ def _is_mentioned_in_config(self, pkg: str, root: Path) -> bool:
191
+ """Check if package is mentioned in config files."""
192
+ config_files = [
193
+ "pyproject.toml",
194
+ "setup.py",
195
+ "setup.cfg",
196
+ "requirements.txt",
197
+ "requirements-dev.txt",
198
+ ]
199
+
200
+ for config_file in config_files:
201
+ config_path = root / config_file
202
+ if config_path.exists():
203
+ try:
204
+ content = config_path.read_text(encoding="utf-8", errors="ignore")
205
+ # Normalize package name for comparison
206
+ if pkg in content or pkg.replace("-", "_") in content:
207
+ return True
208
+ except Exception:
209
+ continue
210
+
211
+ return False
@@ -0,0 +1,167 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess # nosec B404 - Required for tool execution, paths validated
4
+ import time
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ from .base import StepResult
9
+ from ..context import BundleContext
10
+ from ..tools import which
11
+
12
+
13
+ def _repo_has_py_files(root: Path) -> bool:
14
+ """Fast check if there are Python files to scan."""
15
+ for p in root.rglob("*.py"):
16
+ parts = set(p.parts)
17
+ if (
18
+ ".venv" not in parts
19
+ and "__pycache__" not in parts
20
+ and "node_modules" not in parts
21
+ and "dist" not in parts
22
+ and "build" not in parts
23
+ and "artifacts" not in parts
24
+ ):
25
+ return True
26
+ return False
27
+
28
+
29
+ def _detect_framework_decorators(root: Path) -> set[str]:
30
+ """Detect common framework decorators that should be whitelisted."""
31
+ decorators = set()
32
+
33
+ # Check for FastAPI
34
+ for p in root.rglob("*.py"):
35
+ try:
36
+ content = p.read_text(encoding="utf-8", errors="ignore")
37
+ if "from fastapi" in content or "import fastapi" in content:
38
+ decorators.update([
39
+ "@app.get",
40
+ "@app.post",
41
+ "@app.put",
42
+ "@app.delete",
43
+ "@app.patch",
44
+ "@router.get",
45
+ "@router.post",
46
+ "@router.put",
47
+ "@router.delete",
48
+ "@router.patch",
49
+ "@app.on_event",
50
+ "@app.middleware",
51
+ ])
52
+ if "from flask" in content or "import flask" in content:
53
+ decorators.update([
54
+ "@app.route",
55
+ "@app.before_request",
56
+ "@app.after_request",
57
+ "@app.errorhandler",
58
+ ])
59
+ if "from django" in content or "import django" in content:
60
+ decorators.update([
61
+ "@login_required",
62
+ "@permission_required",
63
+ "@staff_member_required",
64
+ ])
65
+ except Exception:
66
+ continue
67
+
68
+ return decorators
69
+
70
+
71
+ @dataclass
72
+ class VultureStep:
73
+ name: str = "vulture"
74
+ target: str = "."
75
+ outfile: str = "logs/50_vulture.txt"
76
+
77
+ def run(self, ctx: BundleContext) -> StepResult:
78
+ start = time.time()
79
+ out = ctx.workdir / self.outfile
80
+ out.parent.mkdir(parents=True, exist_ok=True)
81
+
82
+ vulture = which("vulture")
83
+ if not vulture:
84
+ out.write_text(
85
+ "vulture not found; skipping (pip install vulture)\n", encoding="utf-8"
86
+ )
87
+ return StepResult(self.name, "SKIP", 0, "missing vulture")
88
+
89
+ if not _repo_has_py_files(ctx.root):
90
+ out.write_text(
91
+ "no .py files detected; skipping vulture\n", encoding="utf-8"
92
+ )
93
+ return StepResult(self.name, "SKIP", 0, "no python files")
94
+
95
+ target_path = ctx.root / self.target
96
+
97
+ # Create FastAPI/framework decorator whitelist if applicable
98
+ whitelist_file = self._create_framework_whitelist(ctx)
99
+
100
+ # Build command with ALL OPTIONS FIRST, then paths
101
+ # This is critical for argparse compatibility
102
+ cmd = [
103
+ vulture,
104
+ "--exclude",
105
+ "*venv*,*.venv*,.pybundle-venv,venv,env,.env,__pycache__,artifacts,build,dist,.git,.tox,node_modules",
106
+ "--min-confidence",
107
+ "60",
108
+ "--sort-by-size",
109
+ ]
110
+
111
+ # Add target path
112
+ cmd.append(str(target_path))
113
+
114
+ # Add whitelist as a regular path argument (not a flag)
115
+ # Vulture treats whitelist files as regular Python files to analyze
116
+ if whitelist_file and whitelist_file.exists():
117
+ cmd.append(str(whitelist_file))
118
+
119
+ try:
120
+ result = subprocess.run( # nosec B603 - Using full path from which()
121
+ cmd,
122
+ cwd=ctx.root,
123
+ stdout=subprocess.PIPE,
124
+ stderr=subprocess.STDOUT,
125
+ text=True,
126
+ timeout=120,
127
+ )
128
+ out.write_text(result.stdout, encoding="utf-8")
129
+ elapsed = int((time.time() - start) * 1000)
130
+
131
+ # Vulture exit codes:
132
+ # 0 = no dead code found
133
+ # 1 = usage/configuration error
134
+ # 3 = dead code found (this is success!)
135
+ if result.returncode in (0, 3):
136
+ return StepResult(self.name, "OK", elapsed, "")
137
+ else:
138
+ return StepResult(
139
+ self.name, "FAIL", elapsed, f"exit {result.returncode}"
140
+ )
141
+ except subprocess.TimeoutExpired:
142
+ out.write_text("vulture timed out after 120s\n", encoding="utf-8")
143
+ return StepResult(self.name, "FAIL", 120000, "timeout")
144
+ except Exception as e:
145
+ out.write_text(f"vulture error: {e}\n", encoding="utf-8")
146
+ return StepResult(self.name, "FAIL", 0, str(e))
147
+
148
+ def _create_framework_whitelist(self, ctx: BundleContext) -> Path | None:
149
+ """Create a temporary whitelist file for framework decorators."""
150
+ decorators = _detect_framework_decorators(ctx.root)
151
+
152
+ if not decorators:
153
+ return None
154
+
155
+ # Put whitelist in meta/whitelists/ not logs/ to avoid path parsing issues
156
+ whitelist_path = ctx.workdir / "meta" / "whitelists" / "vulture_framework.py"
157
+ whitelist_path.parent.mkdir(parents=True, exist_ok=True)
158
+
159
+ with whitelist_path.open("w", encoding="utf-8") as f:
160
+ f.write("# Auto-generated whitelist for framework decorators\n")
161
+ f.write("# These patterns are commonly used but appear unused to vulture\n\n")
162
+ for decorator in sorted(decorators):
163
+ # Create dummy functions to whitelist the decorator pattern
164
+ func_name = decorator.replace("@", "").replace(".", "_")
165
+ f.write(f"def {func_name}(): pass\n")
166
+
167
+ return whitelist_path