gwc-pybundle 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gwc-pybundle might be problematic. Click here for more details.

Files changed (82) hide show
  1. gwc_pybundle-2.1.2.dist-info/METADATA +903 -0
  2. gwc_pybundle-2.1.2.dist-info/RECORD +82 -0
  3. gwc_pybundle-2.1.2.dist-info/WHEEL +5 -0
  4. gwc_pybundle-2.1.2.dist-info/entry_points.txt +2 -0
  5. gwc_pybundle-2.1.2.dist-info/licenses/LICENSE.md +25 -0
  6. gwc_pybundle-2.1.2.dist-info/top_level.txt +1 -0
  7. pybundle/__init__.py +0 -0
  8. pybundle/__main__.py +4 -0
  9. pybundle/cli.py +546 -0
  10. pybundle/context.py +404 -0
  11. pybundle/doctor.py +148 -0
  12. pybundle/filters.py +228 -0
  13. pybundle/manifest.py +77 -0
  14. pybundle/packaging.py +45 -0
  15. pybundle/policy.py +132 -0
  16. pybundle/profiles.py +454 -0
  17. pybundle/roadmap_model.py +42 -0
  18. pybundle/roadmap_scan.py +328 -0
  19. pybundle/root_detect.py +14 -0
  20. pybundle/runner.py +180 -0
  21. pybundle/steps/__init__.py +26 -0
  22. pybundle/steps/ai_context.py +791 -0
  23. pybundle/steps/api_docs.py +219 -0
  24. pybundle/steps/asyncio_analysis.py +358 -0
  25. pybundle/steps/bandit.py +72 -0
  26. pybundle/steps/base.py +20 -0
  27. pybundle/steps/blocking_call_detection.py +291 -0
  28. pybundle/steps/call_graph.py +219 -0
  29. pybundle/steps/compileall.py +76 -0
  30. pybundle/steps/config_docs.py +319 -0
  31. pybundle/steps/config_validation.py +302 -0
  32. pybundle/steps/container_image.py +294 -0
  33. pybundle/steps/context_expand.py +272 -0
  34. pybundle/steps/copy_pack.py +293 -0
  35. pybundle/steps/coverage.py +101 -0
  36. pybundle/steps/cprofile_step.py +166 -0
  37. pybundle/steps/dependency_sizes.py +136 -0
  38. pybundle/steps/django_checks.py +214 -0
  39. pybundle/steps/dockerfile_lint.py +282 -0
  40. pybundle/steps/dockerignore.py +311 -0
  41. pybundle/steps/duplication.py +103 -0
  42. pybundle/steps/env_completeness.py +269 -0
  43. pybundle/steps/env_var_usage.py +253 -0
  44. pybundle/steps/error_refs.py +204 -0
  45. pybundle/steps/event_loop_patterns.py +280 -0
  46. pybundle/steps/exception_patterns.py +190 -0
  47. pybundle/steps/fastapi_integration.py +250 -0
  48. pybundle/steps/flask_debugging.py +312 -0
  49. pybundle/steps/git_analytics.py +315 -0
  50. pybundle/steps/handoff_md.py +176 -0
  51. pybundle/steps/import_time.py +175 -0
  52. pybundle/steps/interrogate.py +106 -0
  53. pybundle/steps/license_scan.py +96 -0
  54. pybundle/steps/line_profiler.py +117 -0
  55. pybundle/steps/link_validation.py +287 -0
  56. pybundle/steps/logging_analysis.py +233 -0
  57. pybundle/steps/memory_profile.py +176 -0
  58. pybundle/steps/migration_history.py +336 -0
  59. pybundle/steps/mutation_testing.py +141 -0
  60. pybundle/steps/mypy.py +103 -0
  61. pybundle/steps/orm_optimization.py +316 -0
  62. pybundle/steps/pip_audit.py +45 -0
  63. pybundle/steps/pipdeptree.py +62 -0
  64. pybundle/steps/pylance.py +562 -0
  65. pybundle/steps/pytest.py +66 -0
  66. pybundle/steps/query_pattern_analysis.py +334 -0
  67. pybundle/steps/radon.py +161 -0
  68. pybundle/steps/repro_md.py +161 -0
  69. pybundle/steps/rg_scans.py +78 -0
  70. pybundle/steps/roadmap.py +153 -0
  71. pybundle/steps/ruff.py +117 -0
  72. pybundle/steps/secrets_detection.py +235 -0
  73. pybundle/steps/security_headers.py +309 -0
  74. pybundle/steps/shell.py +74 -0
  75. pybundle/steps/slow_tests.py +178 -0
  76. pybundle/steps/sqlalchemy_validation.py +269 -0
  77. pybundle/steps/test_flakiness.py +184 -0
  78. pybundle/steps/tree.py +116 -0
  79. pybundle/steps/type_coverage.py +277 -0
  80. pybundle/steps/unused_deps.py +211 -0
  81. pybundle/steps/vulture.py +167 -0
  82. pybundle/tools.py +63 -0
@@ -0,0 +1,219 @@
1
+ """API documentation generation step using pdoc.
2
+
3
+ Generates HTML API documentation and validates docstring parsing.
4
+ """
5
+
6
+ import shutil
7
+ import subprocess
8
+ import time
9
+ from pathlib import Path
10
+ from dataclasses import dataclass
11
+
12
+ from .base import StepResult
13
+ from ..context import BundleContext
14
+
15
+
16
+ @dataclass
17
+ class ApiDocsStep:
18
+ """Step that generates API documentation using pdoc."""
19
+
20
+ name: str = "api-docs"
21
+ outfile: str = "logs/82_api_docs.txt"
22
+
23
+ def run(self, context: BundleContext) -> StepResult:
24
+ """Generate API documentation."""
25
+ start = time.time()
26
+
27
+ # Check if pdoc is available and get version
28
+ pdoc_path = shutil.which("pdoc")
29
+ if not pdoc_path:
30
+ elapsed = time.time() - start
31
+ note = "pdoc not installed (pip install pdoc)"
32
+ return StepResult(self.name, "SKIP", int(elapsed), note)
33
+
34
+ # Get pdoc version for diagnostics
35
+ pdoc_version = None
36
+ try:
37
+ version_result = subprocess.run(
38
+ ["pdoc", "--version"],
39
+ capture_output=True,
40
+ text=True,
41
+ timeout=5,
42
+ )
43
+ pdoc_version = version_result.stdout.strip() or version_result.stderr.strip()
44
+ except Exception:
45
+ pdoc_version = "unknown"
46
+
47
+ # Find Python package(s) in project
48
+ packages = self._find_packages(context.root)
49
+
50
+ if not packages:
51
+ elapsed = time.time() - start
52
+ note = "No Python packages found"
53
+ return StepResult(self.name, "SKIP", int(elapsed), note)
54
+
55
+ # Create output directory in meta
56
+ output_dir = context.workdir / "meta/82_api_docs"
57
+ output_dir.mkdir(parents=True, exist_ok=True)
58
+
59
+ # Generate docs for each package
60
+ success_count = 0
61
+ error_count = 0
62
+ errors = []
63
+
64
+ for package in packages:
65
+ try:
66
+ # Use pdoc v14+ syntax (--output-dir instead of --html + --output-dir)
67
+ # Try modern syntax first, fallback to legacy if it fails
68
+ result = subprocess.run(
69
+ [
70
+ "pdoc",
71
+ "--output-dir",
72
+ str(output_dir),
73
+ str(package),
74
+ ],
75
+ cwd=context.root,
76
+ capture_output=True,
77
+ text=True,
78
+ timeout=60,
79
+ )
80
+
81
+ if result.returncode == 0:
82
+ success_count += 1
83
+ else:
84
+ error_count += 1
85
+ error_msg = result.stderr or result.stdout
86
+ errors.append((package.name, result.returncode, error_msg))
87
+
88
+ except subprocess.TimeoutExpired:
89
+ error_count += 1
90
+ errors.append((package.name, -1, "Timeout after 60s"))
91
+ except Exception as e:
92
+ error_count += 1
93
+ errors.append((package.name, -1, str(e)))
94
+
95
+ elapsed = time.time() - start
96
+
97
+ # Write summary report
98
+ log_path = context.workdir / self.outfile
99
+ log_path.parent.mkdir(parents=True, exist_ok=True)
100
+ with open(log_path, "w") as f:
101
+ f.write("=" * 80 + "\n")
102
+ f.write("API DOCUMENTATION GENERATION\n")
103
+ f.write("=" * 80 + "\n\n")
104
+
105
+ f.write("Environment:\n")
106
+ f.write("-" * 80 + "\n")
107
+ f.write(f"pdoc path: {pdoc_path}\n")
108
+ f.write(f"pdoc version: {pdoc_version}\n")
109
+ f.write("\n")
110
+
111
+ f.write("Summary:\n")
112
+ f.write("-" * 80 + "\n")
113
+ f.write(f"Packages processed: {len(packages)}\n")
114
+ f.write(f"Successful: {success_count}\n")
115
+ f.write(f"Failed: {error_count}\n")
116
+ f.write(f"Output directory: {output_dir.relative_to(context.workdir)}\n")
117
+ f.write("\n")
118
+
119
+ if errors:
120
+ f.write("Errors:\n")
121
+ f.write("-" * 80 + "\n")
122
+ for package_name, retcode, error_msg in errors:
123
+ f.write(f"\n{package_name}:\n")
124
+ f.write(f" Exit code: {retcode}\n")
125
+ # Limit error message length
126
+ if len(error_msg) > 500:
127
+ error_msg = error_msg[:500] + "\n... (truncated)"
128
+ f.write(f" Error: {error_msg}\n")
129
+
130
+ # Add actionable fix suggestions
131
+ f.write("\n 💡 How to fix:\n")
132
+ if "--html" in error_msg or "unrecognized arguments" in error_msg:
133
+ f.write(" - Your pdoc version may not support --html flag\n")
134
+ f.write(" - pdoc < 14.0: use 'pdoc --html'\n")
135
+ f.write(" - pdoc >= 14.0: use 'pdoc --output-dir'\n")
136
+ f.write(f" - Detected version: {pdoc_version}\n")
137
+ f.write(" - Try: pip install --upgrade pdoc\n")
138
+ elif "Timeout" in error_msg:
139
+ f.write(" - Package is too large or has import errors\n")
140
+ f.write(" - Check for circular imports or missing dependencies\n")
141
+ else:
142
+ f.write(" - Check that package imports work: python -c 'import <package>'\n")
143
+ f.write(" - Verify dependencies are installed\n")
144
+ f.write(" - Run manually: pdoc --output-dir docs/ <package>\n")
145
+
146
+ if success_count > 0:
147
+ f.write("\nGenerated Documentation:\n")
148
+ f.write("-" * 80 + "\n")
149
+ # List generated HTML files
150
+ html_files = sorted(output_dir.rglob("*.html"))
151
+ for html_file in html_files[:20]: # Limit to first 20
152
+ rel_path = html_file.relative_to(context.workdir)
153
+ f.write(f" {rel_path}\n")
154
+ if len(html_files) > 20:
155
+ f.write(f" ... and {len(html_files) - 20} more files\n")
156
+
157
+ f.write("\n" + "=" * 80 + "\n")
158
+ f.write("Documentation generation complete\n")
159
+ f.write("=" * 80 + "\n")
160
+
161
+ # Determine status
162
+ if error_count == 0:
163
+ status = "OK"
164
+ note = f"Generated docs for {success_count} package(s)"
165
+ elif success_count > 0:
166
+ status = "WARN"
167
+ note = f"{success_count} OK, {error_count} failed"
168
+ else:
169
+ status = "FAIL"
170
+ note = f"All {error_count} package(s) failed"
171
+
172
+ return StepResult(self.name, status, int(elapsed), note)
173
+
174
+ def _find_packages(self, root: Path) -> list[Path]:
175
+ """Find Python packages (directories with __init__.py).
176
+
177
+ Returns top-level packages only, excluding common directories.
178
+ """
179
+ packages = []
180
+ exclude_dirs = {
181
+ "__pycache__",
182
+ ".git",
183
+ ".tox",
184
+ "venv",
185
+ "env",
186
+ ".venv",
187
+ ".env",
188
+ "node_modules",
189
+ "artifacts",
190
+ "build",
191
+ "dist",
192
+ ".pytest_cache",
193
+ ".mypy_cache",
194
+ ".ruff_cache",
195
+ "htmlcov",
196
+ ".egg-info",
197
+ "tests",
198
+ "test",
199
+ ".pybundle-venv", # pybundle's venv
200
+ }
201
+
202
+ # Look for directories with __init__.py at root level first
203
+ for item in root.iterdir():
204
+ if item.is_dir() and item.name not in exclude_dirs:
205
+ init_file = item / "__init__.py"
206
+ if init_file.exists():
207
+ packages.append(item)
208
+
209
+ # If no packages found at root, look one level deeper
210
+ if not packages:
211
+ for item in root.iterdir():
212
+ if item.is_dir() and item.name not in exclude_dirs:
213
+ for subitem in item.iterdir():
214
+ if subitem.is_dir() and subitem.name not in exclude_dirs:
215
+ init_file = subitem / "__init__.py"
216
+ if init_file.exists():
217
+ packages.append(subitem)
218
+
219
+ return packages
@@ -0,0 +1,358 @@
1
+ """
2
+ Step: AsyncIO Task Analysis
3
+ Static analysis of async/await patterns in codebase.
4
+ """
5
+
6
+ import re
7
+ import ast
8
+ from pathlib import Path
9
+ from typing import Dict, List, Set, Tuple, Optional
10
+
11
+ from .base import Step, StepResult
12
+
13
+
14
+ class AsyncioAnalysisStep(Step):
15
+ """Analyze async/await patterns in Python code."""
16
+
17
+ name = "asyncio analysis"
18
+
19
+ def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
20
+ """Analyze asyncio usage and patterns."""
21
+ import time
22
+
23
+ start = time.time()
24
+
25
+ root = ctx.root
26
+
27
+ # Find all async definitions
28
+ async_funcs = self._find_async_functions(root)
29
+
30
+ # Analyze async patterns
31
+ patterns = self._analyze_async_patterns(root, async_funcs)
32
+
33
+ # Generate report
34
+ lines = [
35
+ "=" * 80,
36
+ "ASYNCIO ANALYSIS REPORT",
37
+ "=" * 80,
38
+ "",
39
+ ]
40
+
41
+ # Summary
42
+ lines.extend(
43
+ [
44
+ "SUMMARY",
45
+ "=" * 80,
46
+ "",
47
+ f"Async functions found: {len(async_funcs['all_async'])}",
48
+ f"Coroutines: {len(async_funcs['coroutines'])}",
49
+ f"Async generators: {len(async_funcs['async_generators'])}",
50
+ f"Async context managers: {len(async_funcs['async_context_managers'])}",
51
+ "",
52
+ ]
53
+ )
54
+
55
+ if not async_funcs["all_async"]:
56
+ lines.extend(
57
+ [
58
+ "⊘ No async code detected in project",
59
+ "",
60
+ "This project does not appear to use asyncio patterns.",
61
+ "If this is incorrect, ensure async functions are in analyzed files.",
62
+ "",
63
+ ]
64
+ )
65
+ else:
66
+ # Detailed breakdown
67
+ lines.extend(
68
+ [
69
+ "ASYNC FUNCTIONS BY TYPE",
70
+ "=" * 80,
71
+ "",
72
+ ]
73
+ )
74
+
75
+ if async_funcs["coroutines"]:
76
+ lines.append(f"COROUTINES ({len(async_funcs['coroutines'])}):")
77
+ for file_path, func_name, line_no in sorted(
78
+ async_funcs["coroutines"], key=lambda x: (x[0], x[2])
79
+ )[:20]:
80
+ lines.append(f" {file_path}:{line_no} - {func_name}")
81
+ if len(async_funcs["coroutines"]) > 20:
82
+ lines.append(f" ... and {len(async_funcs['coroutines']) - 20} more")
83
+ lines.append("")
84
+
85
+ if async_funcs["async_generators"]:
86
+ lines.append(f"ASYNC GENERATORS ({len(async_funcs['async_generators'])}):")
87
+ for file_path, func_name, line_no in sorted(
88
+ async_funcs["async_generators"], key=lambda x: (x[0], x[2])
89
+ )[:20]:
90
+ lines.append(f" {file_path}:{line_no} - {func_name}")
91
+ if len(async_funcs["async_generators"]) > 20:
92
+ lines.append(
93
+ f" ... and {len(async_funcs['async_generators']) - 20} more"
94
+ )
95
+ lines.append("")
96
+
97
+ if async_funcs["async_context_managers"]:
98
+ lines.append(
99
+ f"ASYNC CONTEXT MANAGERS ({len(async_funcs['async_context_managers'])}):"
100
+ )
101
+ for file_path, func_name, line_no in sorted(
102
+ async_funcs["async_context_managers"], key=lambda x: (x[0], x[2])
103
+ )[:20]:
104
+ lines.append(f" {file_path}:{line_no} - {func_name}")
105
+ if len(async_funcs["async_context_managers"]) > 20:
106
+ lines.append(
107
+ f" ... and {len(async_funcs['async_context_managers']) - 20} more"
108
+ )
109
+ lines.append("")
110
+
111
+ # Pattern analysis
112
+ if async_funcs["all_async"]:
113
+ lines.extend(
114
+ [
115
+ "=" * 80,
116
+ "ASYNC PATTERNS",
117
+ "=" * 80,
118
+ "",
119
+ ]
120
+ )
121
+
122
+ if patterns["missing_awaits"]:
123
+ lines.append(
124
+ f"⚠ POTENTIAL MISSING AWAITS ({len(patterns['missing_awaits'])}):"
125
+ )
126
+ for item in patterns["missing_awaits"][:15]:
127
+ lines.append(f" {item['file']}:{item['line']}")
128
+ if item.get("context"):
129
+ context_line = item["context"].strip()
130
+ if len(context_line) > 70:
131
+ context_line = context_line[:67] + "..."
132
+ lines.append(f" > {context_line}")
133
+ if len(patterns["missing_awaits"]) > 15:
134
+ lines.append(
135
+ f" ... and {len(patterns['missing_awaits']) - 15} more"
136
+ )
137
+ lines.append("")
138
+ else:
139
+ lines.append("✓ No obvious missing await calls detected")
140
+ lines.append("")
141
+
142
+ if patterns["async_context_managers"]:
143
+ lines.append(
144
+ f"✓ ASYNC CONTEXT MANAGERS ({len(patterns['async_context_managers'])}):"
145
+ )
146
+ for item in patterns["async_context_managers"][:10]:
147
+ lines.append(f" {item}")
148
+ if len(patterns["async_context_managers"]) > 10:
149
+ lines.append(
150
+ f" ... and {len(patterns['async_context_managers']) - 10} more"
151
+ )
152
+ lines.append("")
153
+
154
+ # Python version features
155
+ lines.extend(
156
+ [
157
+ "PYTHON 3.9+ FEATURES",
158
+ "=" * 80,
159
+ "",
160
+ ]
161
+ )
162
+
163
+ if patterns["taskgroup_usage"]:
164
+ lines.append(f"✓ TaskGroup usage (Python 3.11+): {len(patterns['taskgroup_usage'])}")
165
+ for item in patterns["taskgroup_usage"][:5]:
166
+ lines.append(f" {item}")
167
+ lines.append("")
168
+ else:
169
+ lines.append("ℹ No TaskGroup usage (available in Python 3.11+)")
170
+ lines.append("")
171
+
172
+ if patterns["exception_groups"]:
173
+ lines.append(f"✓ Exception Groups usage: {len(patterns['exception_groups'])}")
174
+ for item in patterns["exception_groups"][:5]:
175
+ lines.append(f" {item}")
176
+ lines.append("")
177
+ else:
178
+ lines.append("ℹ No exception groups detected")
179
+ lines.append("")
180
+
181
+ # Recommendations
182
+ lines.extend(
183
+ [
184
+ "=" * 80,
185
+ "RECOMMENDATIONS",
186
+ "=" * 80,
187
+ "",
188
+ ]
189
+ )
190
+
191
+ if async_funcs["all_async"]:
192
+ if patterns["missing_awaits"]:
193
+ lines.append(" - Review potential missing await calls marked above")
194
+ lines.append(" - Use linters (ruff, pylint) with async checks enabled")
195
+ else:
196
+ lines.append(" - ✓ Good: No obvious missing awaits detected")
197
+
198
+ lines.append(" - Use asyncio.gather() for concurrent execution")
199
+ lines.append(" - Prefer TaskGroup (Python 3.11+) for structured concurrency")
200
+ lines.append(" - Use asyncio.timeout() for deadline management")
201
+ lines.append(" - Avoid blocking calls (use async equivalents)")
202
+ lines.append(" - Use pytest-asyncio for testing async code")
203
+ else:
204
+ lines.append(" - No async code detected")
205
+ lines.append(" - If planning to add asyncio, use structured concurrency patterns")
206
+ lines.append(" - Consider asyncio for I/O-bound operations")
207
+
208
+ lines.append("")
209
+
210
+ # Write report
211
+ output = "\n".join(lines)
212
+ dest = ctx.workdir / "logs" / "130_async_analysis.txt"
213
+ dest.parent.mkdir(parents=True, exist_ok=True)
214
+ dest.write_text(output, encoding="utf-8")
215
+
216
+ elapsed = int(time.time() - start)
217
+ return StepResult(self.name, "OK", elapsed, "")
218
+
219
+ def _find_async_functions(self, root: Path) -> Dict[str, List[Tuple[str, str, int]]]:
220
+ """Find all async function definitions."""
221
+ coroutines = []
222
+ async_generators = []
223
+ async_context_managers = []
224
+
225
+ python_files = list(root.rglob("*.py"))
226
+
227
+ for py_file in python_files:
228
+ # Skip venv and cache
229
+ if any(
230
+ part in py_file.parts
231
+ for part in ["venv", ".venv", "env", "__pycache__", "site-packages"]
232
+ ):
233
+ continue
234
+
235
+ try:
236
+ source = py_file.read_text(encoding="utf-8", errors="ignore")
237
+ rel_path = str(py_file.relative_to(root))
238
+
239
+ tree = ast.parse(source)
240
+
241
+ for node in ast.walk(tree):
242
+ if isinstance(node, ast.AsyncFunctionDef):
243
+ func_name = node.name
244
+ line_no = node.lineno
245
+
246
+ # Determine type by checking for yield/yield from
247
+ has_yield = any(
248
+ isinstance(n, (ast.Yield, ast.YieldFrom))
249
+ for n in ast.walk(node)
250
+ )
251
+
252
+ if has_yield:
253
+ async_generators.append((rel_path, func_name, line_no))
254
+ elif func_name.startswith("__aenter__") or func_name.startswith(
255
+ "__aexit__"
256
+ ):
257
+ async_context_managers.append((rel_path, func_name, line_no))
258
+ else:
259
+ coroutines.append((rel_path, func_name, line_no))
260
+
261
+ except (OSError, UnicodeDecodeError, SyntaxError):
262
+ continue
263
+
264
+ all_async = coroutines + async_generators + async_context_managers
265
+
266
+ return {
267
+ "all_async": all_async,
268
+ "coroutines": coroutines,
269
+ "async_generators": async_generators,
270
+ "async_context_managers": async_context_managers,
271
+ }
272
+
273
+ def _analyze_async_patterns(self, root: Path, async_funcs: Dict) -> Dict:
274
+ """Analyze async patterns and best practices."""
275
+ missing_awaits = []
276
+ async_context_managers = []
277
+ taskgroup_usage = []
278
+ exception_groups = []
279
+
280
+ python_files = list(root.rglob("*.py"))
281
+
282
+ for py_file in python_files:
283
+ if any(
284
+ part in py_file.parts
285
+ for part in ["venv", ".venv", "env", "__pycache__", "site-packages"]
286
+ ):
287
+ continue
288
+
289
+ try:
290
+ source = py_file.read_text(encoding="utf-8", errors="ignore")
291
+ rel_path = str(py_file.relative_to(root))
292
+
293
+ tree = ast.parse(source)
294
+
295
+ for node in ast.walk(tree):
296
+ if isinstance(node, ast.AsyncFunctionDef):
297
+ # Check for missing awaits
298
+ for call_node in ast.walk(node):
299
+ if isinstance(call_node, ast.Call):
300
+ # Check if it's a coroutine call without await
301
+ if isinstance(call_node.func, ast.Name):
302
+ func_name = call_node.func.id
303
+ # Look for common async functions not being awaited
304
+ if func_name in [
305
+ "sleep",
306
+ "gather",
307
+ "create_task",
308
+ ] or any(
309
+ n[1] == func_name
310
+ for n in async_funcs["all_async"]
311
+ ):
312
+ # Check if parent is not Await
313
+ if not any(
314
+ isinstance(p, ast.Await)
315
+ for p in ast.walk(node)
316
+ if call_node in list(ast.walk(p))
317
+ ):
318
+ # This is a simple heuristic
319
+ context = source.split("\n")[
320
+ call_node.lineno - 1
321
+ ]
322
+ missing_awaits.append(
323
+ {
324
+ "file": rel_path,
325
+ "line": call_node.lineno,
326
+ "context": context,
327
+ }
328
+ )
329
+
330
+ # Check for async with
331
+ for stmt in ast.walk(node):
332
+ if isinstance(stmt, ast.AsyncWith):
333
+ async_context_managers.append(
334
+ f"{rel_path}:{node.lineno}"
335
+ )
336
+
337
+ # Global patterns
338
+ for line_num, line in enumerate(source.split("\n"), 1):
339
+ if "TaskGroup" in line:
340
+ taskgroup_usage.append(f"{rel_path}:{line_num}")
341
+
342
+ if "ExceptionGroup" in line:
343
+ exception_groups.append(f"{rel_path}:{line_num}")
344
+
345
+ except (OSError, UnicodeDecodeError, SyntaxError):
346
+ continue
347
+
348
+ # Deduplicate
349
+ async_context_managers = list(set(async_context_managers))
350
+ taskgroup_usage = list(set(taskgroup_usage))
351
+ exception_groups = list(set(exception_groups))
352
+
353
+ return {
354
+ "missing_awaits": missing_awaits,
355
+ "async_context_managers": async_context_managers,
356
+ "taskgroup_usage": taskgroup_usage,
357
+ "exception_groups": exception_groups,
358
+ }
@@ -0,0 +1,72 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess # nosec B404 - Required for tool execution, paths validated
4
+ import time
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ from .base import StepResult
9
+ from ..context import BundleContext
10
+ from ..tools import which
11
+
12
+
13
+ def _repo_has_py_files(root: Path) -> bool:
14
+ """Fast check if there are Python files to scan."""
15
+ for p in root.rglob("*.py"):
16
+ parts = set(p.parts)
17
+ if (
18
+ ".venv" not in parts
19
+ and "__pycache__" not in parts
20
+ and "node_modules" not in parts
21
+ and "dist" not in parts
22
+ and "build" not in parts
23
+ and "artifacts" not in parts
24
+ ):
25
+ return True
26
+ return False
27
+
28
+
29
+ @dataclass
30
+ class BanditStep:
31
+ name: str = "bandit"
32
+ target: str = "."
33
+ outfile: str = "logs/50_bandit.txt"
34
+
35
+ def run(self, ctx: BundleContext) -> StepResult:
36
+ start = time.time()
37
+ out = ctx.workdir / self.outfile
38
+ out.parent.mkdir(parents=True, exist_ok=True)
39
+
40
+ bandit = which("bandit")
41
+ if not bandit:
42
+ out.write_text(
43
+ "bandit not found; skipping (pip install bandit)\n", encoding="utf-8"
44
+ )
45
+ return StepResult(self.name, "SKIP", 0, "missing bandit")
46
+
47
+ if not _repo_has_py_files(ctx.root):
48
+ out.write_text("no .py files detected; skipping bandit\n", encoding="utf-8")
49
+ return StepResult(self.name, "SKIP", 0, "no python files")
50
+
51
+ # Run bandit with recursive mode, excluding common directories
52
+ cmd = [
53
+ bandit,
54
+ "-r",
55
+ self.target,
56
+ "--exclude",
57
+ "**/artifacts/**,.venv,venv,__pycache__,.mypy_cache,.ruff_cache,node_modules,dist,build,.git,.tox,*.egg-info",
58
+ "--format",
59
+ "txt",
60
+ ]
61
+ header = f"## PWD: {ctx.root}\n## CMD: {' '.join(cmd)}\n\n"
62
+
63
+ cp = subprocess.run( # nosec B603
64
+ cmd, cwd=str(ctx.root), text=True, capture_output=True, check=False
65
+ )
66
+ text = header + (cp.stdout or "") + ("\n" + cp.stderr if cp.stderr else "")
67
+ out.write_text(ctx.redact_text(text), encoding="utf-8")
68
+
69
+ dur = int(time.time() - start)
70
+ # Bandit exit codes: 0=no issues, 1=issues found
71
+ note = "" if cp.returncode == 0 else f"exit={cp.returncode} (security issues)"
72
+ return StepResult(self.name, "PASS", dur, note)
pybundle/steps/base.py ADDED
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Protocol
5
+
6
+ from ..context import BundleContext
7
+
8
+
9
+ @dataclass
10
+ class StepResult:
11
+ name: str
12
+ status: str # "PASS" | "FAIL" | "SKIP"
13
+ seconds: int
14
+ note: str = ""
15
+
16
+
17
+ class Step(Protocol):
18
+ name: str
19
+
20
+ def run(self, ctx: BundleContext) -> StepResult: ...