gwc-pybundle 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gwc-pybundle might be problematic. Click here for more details.

Files changed (82) hide show
  1. gwc_pybundle-2.1.2.dist-info/METADATA +903 -0
  2. gwc_pybundle-2.1.2.dist-info/RECORD +82 -0
  3. gwc_pybundle-2.1.2.dist-info/WHEEL +5 -0
  4. gwc_pybundle-2.1.2.dist-info/entry_points.txt +2 -0
  5. gwc_pybundle-2.1.2.dist-info/licenses/LICENSE.md +25 -0
  6. gwc_pybundle-2.1.2.dist-info/top_level.txt +1 -0
  7. pybundle/__init__.py +0 -0
  8. pybundle/__main__.py +4 -0
  9. pybundle/cli.py +546 -0
  10. pybundle/context.py +404 -0
  11. pybundle/doctor.py +148 -0
  12. pybundle/filters.py +228 -0
  13. pybundle/manifest.py +77 -0
  14. pybundle/packaging.py +45 -0
  15. pybundle/policy.py +132 -0
  16. pybundle/profiles.py +454 -0
  17. pybundle/roadmap_model.py +42 -0
  18. pybundle/roadmap_scan.py +328 -0
  19. pybundle/root_detect.py +14 -0
  20. pybundle/runner.py +180 -0
  21. pybundle/steps/__init__.py +26 -0
  22. pybundle/steps/ai_context.py +791 -0
  23. pybundle/steps/api_docs.py +219 -0
  24. pybundle/steps/asyncio_analysis.py +358 -0
  25. pybundle/steps/bandit.py +72 -0
  26. pybundle/steps/base.py +20 -0
  27. pybundle/steps/blocking_call_detection.py +291 -0
  28. pybundle/steps/call_graph.py +219 -0
  29. pybundle/steps/compileall.py +76 -0
  30. pybundle/steps/config_docs.py +319 -0
  31. pybundle/steps/config_validation.py +302 -0
  32. pybundle/steps/container_image.py +294 -0
  33. pybundle/steps/context_expand.py +272 -0
  34. pybundle/steps/copy_pack.py +293 -0
  35. pybundle/steps/coverage.py +101 -0
  36. pybundle/steps/cprofile_step.py +166 -0
  37. pybundle/steps/dependency_sizes.py +136 -0
  38. pybundle/steps/django_checks.py +214 -0
  39. pybundle/steps/dockerfile_lint.py +282 -0
  40. pybundle/steps/dockerignore.py +311 -0
  41. pybundle/steps/duplication.py +103 -0
  42. pybundle/steps/env_completeness.py +269 -0
  43. pybundle/steps/env_var_usage.py +253 -0
  44. pybundle/steps/error_refs.py +204 -0
  45. pybundle/steps/event_loop_patterns.py +280 -0
  46. pybundle/steps/exception_patterns.py +190 -0
  47. pybundle/steps/fastapi_integration.py +250 -0
  48. pybundle/steps/flask_debugging.py +312 -0
  49. pybundle/steps/git_analytics.py +315 -0
  50. pybundle/steps/handoff_md.py +176 -0
  51. pybundle/steps/import_time.py +175 -0
  52. pybundle/steps/interrogate.py +106 -0
  53. pybundle/steps/license_scan.py +96 -0
  54. pybundle/steps/line_profiler.py +117 -0
  55. pybundle/steps/link_validation.py +287 -0
  56. pybundle/steps/logging_analysis.py +233 -0
  57. pybundle/steps/memory_profile.py +176 -0
  58. pybundle/steps/migration_history.py +336 -0
  59. pybundle/steps/mutation_testing.py +141 -0
  60. pybundle/steps/mypy.py +103 -0
  61. pybundle/steps/orm_optimization.py +316 -0
  62. pybundle/steps/pip_audit.py +45 -0
  63. pybundle/steps/pipdeptree.py +62 -0
  64. pybundle/steps/pylance.py +562 -0
  65. pybundle/steps/pytest.py +66 -0
  66. pybundle/steps/query_pattern_analysis.py +334 -0
  67. pybundle/steps/radon.py +161 -0
  68. pybundle/steps/repro_md.py +161 -0
  69. pybundle/steps/rg_scans.py +78 -0
  70. pybundle/steps/roadmap.py +153 -0
  71. pybundle/steps/ruff.py +117 -0
  72. pybundle/steps/secrets_detection.py +235 -0
  73. pybundle/steps/security_headers.py +309 -0
  74. pybundle/steps/shell.py +74 -0
  75. pybundle/steps/slow_tests.py +178 -0
  76. pybundle/steps/sqlalchemy_validation.py +269 -0
  77. pybundle/steps/test_flakiness.py +184 -0
  78. pybundle/steps/tree.py +116 -0
  79. pybundle/steps/type_coverage.py +277 -0
  80. pybundle/steps/unused_deps.py +211 -0
  81. pybundle/steps/vulture.py +167 -0
  82. pybundle/tools.py +63 -0
@@ -0,0 +1,214 @@
1
+ """
2
+ Step: Django System Checks
3
+ Run Django's built-in system checks for security and deployment best practices.
4
+ """
5
+
6
+ import subprocess
7
+ import re
8
+ from pathlib import Path
9
+ from typing import Dict, List, Tuple
10
+
11
+ from .base import Step, StepResult
12
+
13
+
14
+ class DjangoSystemChecksStep(Step):
15
+ """Run Django system checks and parse results."""
16
+
17
+ name = "django checks"
18
+
19
+ def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
20
+ """Run Django system checks."""
21
+ import time
22
+
23
+ start = time.time()
24
+
25
+ root = ctx.root
26
+
27
+ # Check if Django is installed and project has manage.py
28
+ manage_py = self._find_manage_py(root)
29
+ if not manage_py:
30
+ elapsed = int(time.time() - start)
31
+ return StepResult(
32
+ self.name, "SKIP", elapsed, "No Django project found (manage.py not detected)"
33
+ )
34
+
35
+ # Run Django system checks
36
+ checks_output = self._run_django_checks(manage_py)
37
+
38
+ # Also run deploy checks
39
+ deploy_output = self._run_django_deploy_checks(manage_py)
40
+
41
+ # Parse results
42
+ checks_issues = self._parse_check_output(checks_output)
43
+ deploy_issues = self._parse_check_output(deploy_output)
44
+
45
+ # Generate report
46
+ lines = [
47
+ "=" * 80,
48
+ "DJANGO SYSTEM CHECKS REPORT",
49
+ "=" * 80,
50
+ "",
51
+ ]
52
+
53
+ lines.extend(
54
+ [
55
+ "SUMMARY",
56
+ "=" * 80,
57
+ f"Django project: {manage_py}",
58
+ "",
59
+ ]
60
+ )
61
+
62
+ # General checks
63
+ lines.extend(
64
+ [
65
+ "GENERAL SYSTEM CHECKS",
66
+ "-" * 80,
67
+ "",
68
+ ]
69
+ )
70
+
71
+ if checks_issues:
72
+ lines.append(f"Found {len(checks_issues)} issue(s):")
73
+ lines.append("")
74
+ for level, msg, hint in checks_issues:
75
+ icon = "⚠" if level.upper() == "WARNING" else "✗" if level.upper() == "ERROR" else "ℹ"
76
+ lines.append(f" {icon} [{level.upper()}] {msg}")
77
+ if hint:
78
+ lines.append(f" Hint: {hint}")
79
+ lines.append("")
80
+ else:
81
+ lines.append("✓ All general checks passed")
82
+ lines.append("")
83
+
84
+ # Deployment checks
85
+ lines.extend(
86
+ [
87
+ "DEPLOYMENT BEST PRACTICES (--deploy mode)",
88
+ "-" * 80,
89
+ "",
90
+ ]
91
+ )
92
+
93
+ if deploy_issues:
94
+ lines.append(f"Found {len(deploy_issues)} issue(s):")
95
+ lines.append("")
96
+ for level, msg, hint in deploy_issues:
97
+ icon = "⚠" if level.upper() == "WARNING" else "✗" if level.upper() == "ERROR" else "ℹ"
98
+ lines.append(f" {icon} [{level.upper()}] {msg}")
99
+ if hint:
100
+ lines.append(f" Hint: {hint}")
101
+ lines.append("")
102
+ else:
103
+ lines.append("✓ All deployment checks passed")
104
+ lines.append("")
105
+
106
+ # Recommendations
107
+ lines.extend(
108
+ [
109
+ "=" * 80,
110
+ "DEPLOYMENT READINESS CHECKLIST",
111
+ "=" * 80,
112
+ "",
113
+ "Security Settings:",
114
+ " ☐ DEBUG = False in production",
115
+ " ☐ SECRET_KEY set to secure random value",
116
+ " ☐ ALLOWED_HOSTS configured",
117
+ " ☐ SECURE_HSTS_SECONDS >= 31536000 (1 year)",
118
+ " ☐ SESSION_COOKIE_SECURE = True",
119
+ " ☐ SESSION_COOKIE_HTTPONLY = True",
120
+ " ☐ CSRF_COOKIE_SECURE = True",
121
+ " ☐ CSRF_COOKIE_HTTPONLY = True",
122
+ "",
123
+ "Database:",
124
+ " ☐ Using production-grade database (not SQLite)",
125
+ " ☐ Database connections configured with SSL",
126
+ " ☐ Database backups automated",
127
+ "",
128
+ "Static & Media:",
129
+ " ☐ STATIC_ROOT configured",
130
+ " ☐ Static files collected (./manage.py collectstatic)",
131
+ " ☐ MEDIA_ROOT and MEDIA_URL configured",
132
+ "",
133
+ "Logging:",
134
+ " ☐ LOGGING configured for production",
135
+ " ☐ Error emails configured (ADMINS)",
136
+ " ☐ Log files retention policy set",
137
+ "",
138
+ "Performance:",
139
+ " ☐ Database connection pooling enabled",
140
+ " ☐ Caching configured (Redis/Memcached)",
141
+ " ☐ Compression enabled (gzip, brotli)",
142
+ "",
143
+ ]
144
+ )
145
+
146
+ # Write report
147
+ output = "\n".join(lines)
148
+ dest = ctx.workdir / "logs" / "150_django_checks.txt"
149
+ dest.parent.mkdir(parents=True, exist_ok=True)
150
+ dest.write_text(output, encoding="utf-8")
151
+
152
+ elapsed = int(time.time() - start)
153
+ return StepResult(self.name, "OK", elapsed, "")
154
+
155
+ def _find_manage_py(self, root: Path) -> Path | None:
156
+ """Find Django's manage.py file."""
157
+ # Check root
158
+ if (root / "manage.py").exists():
159
+ return root / "manage.py"
160
+
161
+ # Check common locations
162
+ for subdir in [".", "src", "app", "django_app"]:
163
+ candidate = root / subdir / "manage.py"
164
+ if candidate.exists():
165
+ return candidate
166
+
167
+ return None
168
+
169
+ def _run_django_checks(self, manage_py: Path) -> str:
170
+ """Run basic Django checks."""
171
+ try:
172
+ result = subprocess.run(
173
+ ["python", str(manage_py), "check"],
174
+ capture_output=True,
175
+ text=True,
176
+ timeout=10,
177
+ cwd=manage_py.parent,
178
+ )
179
+ return result.stdout + result.stderr
180
+ except Exception:
181
+ return ""
182
+
183
+ def _run_django_deploy_checks(self, manage_py: Path) -> str:
184
+ """Run Django deployment checks."""
185
+ try:
186
+ result = subprocess.run(
187
+ ["python", str(manage_py), "check", "--deploy"],
188
+ capture_output=True,
189
+ text=True,
190
+ timeout=10,
191
+ cwd=manage_py.parent,
192
+ )
193
+ return result.stdout + result.stderr
194
+ except Exception:
195
+ return ""
196
+
197
+ def _parse_check_output(self, output: str) -> List[Tuple[str, str, str]]:
198
+ """Parse Django check output."""
199
+ issues = []
200
+
201
+ lines = output.split("\n")
202
+ for line in lines:
203
+ # Pattern: "WARNING (security.W001): ..."
204
+ match = re.search(r"(ERROR|WARNING|INFO)\s+\((\w+)\):\s+(.*?)(?:\nHint|$)", line)
205
+ if match:
206
+ level, code, msg = match.groups()
207
+ hint = ""
208
+ # Try to extract hint from next line
209
+ idx = lines.index(line) if line in lines else -1
210
+ if idx >= 0 and idx + 1 < len(lines) and "Hint:" in lines[idx + 1]:
211
+ hint = lines[idx + 1].replace("Hint:", "").strip()
212
+ issues.append((level, f"{msg} ({code})", hint))
213
+
214
+ return issues
@@ -0,0 +1,282 @@
1
+ """
2
+ Step: Dockerfile Linting
3
+ Validate Dockerfile best practices using hadolint.
4
+ """
5
+
6
+ import subprocess
7
+ import re
8
+ from pathlib import Path
9
+ from typing import List, Dict
10
+
11
+ from .base import Step, StepResult
12
+
13
+
14
+ class DockerfileLintStep(Step):
15
+ """Lint Dockerfiles for best practices using hadolint."""
16
+
17
+ name = "dockerfile lint"
18
+
19
+ def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
20
+ """Run hadolint on all Dockerfiles found."""
21
+ import time
22
+
23
+ start = time.time()
24
+
25
+ # Find all Dockerfiles (exact name or name with .suffix)
26
+ dockerfiles = []
27
+ for pattern in ["Dockerfile", "dockerfile", "Dockerfile.*", "dockerfile.*"]:
28
+ dockerfiles.extend(ctx.root.rglob(pattern))
29
+
30
+ # Filter to files that are actually Dockerfiles (exclude .py, .pyc, etc)
31
+ seen = set()
32
+ unique_dockerfiles = []
33
+ for df in dockerfiles:
34
+ if df.is_file():
35
+ # Skip if it's clearly not a Dockerfile (has a code extension)
36
+ if df.suffix in [".py", ".pyc", ".pyo", ".pyw", ".pyd"]:
37
+ continue
38
+
39
+ key = str(df).lower()
40
+ if key not in seen:
41
+ seen.add(key)
42
+ unique_dockerfiles.append(df)
43
+
44
+ if not unique_dockerfiles:
45
+ elapsed = int(time.time() - start)
46
+ return StepResult(self.name, "SKIP", elapsed, "No Dockerfiles found")
47
+
48
+ # Check if hadolint is available
49
+ hadolint_path = None
50
+ try:
51
+ result = subprocess.run(
52
+ ["which", "hadolint"],
53
+ capture_output=True,
54
+ text=True,
55
+ )
56
+ if result.returncode == 0:
57
+ hadolint_path = result.stdout.strip()
58
+ except FileNotFoundError:
59
+ pass
60
+
61
+ # Generate report
62
+ lines = [
63
+ "=" * 80,
64
+ "DOCKERFILE LINTING REPORT",
65
+ "=" * 80,
66
+ "",
67
+ f"Dockerfiles found: {len(unique_dockerfiles)}",
68
+ "",
69
+ ]
70
+
71
+ # List dockerfiles
72
+ lines.extend(
73
+ [
74
+ "=" * 80,
75
+ "DOCKERFILES",
76
+ "=" * 80,
77
+ "",
78
+ ]
79
+ )
80
+
81
+ for df in sorted(unique_dockerfiles):
82
+ rel_path = df.relative_to(ctx.root)
83
+ lines.append(f" - {rel_path}")
84
+
85
+ # Show file size
86
+ try:
87
+ size = df.stat().st_size
88
+ lines.append(f" Size: {size} bytes")
89
+ except OSError:
90
+ pass
91
+
92
+ lines.append("")
93
+
94
+ # Run hadolint if available
95
+ if hadolint_path:
96
+ lines.extend(
97
+ [
98
+ "=" * 80,
99
+ "HADOLINT RESULTS",
100
+ "=" * 80,
101
+ "",
102
+ ]
103
+ )
104
+
105
+ hadolint_issues: Dict[str, List[str]] = {}
106
+ total_warnings = 0
107
+ total_errors = 0
108
+
109
+ for df in sorted(unique_dockerfiles):
110
+ rel_path = df.relative_to(ctx.root)
111
+
112
+ try:
113
+ result = subprocess.run(
114
+ [hadolint_path, str(df)],
115
+ capture_output=True,
116
+ text=True,
117
+ timeout=30,
118
+ )
119
+
120
+ output = result.stdout + result.stderr
121
+ if output.strip():
122
+ issues = self._parse_hadolint_output(output)
123
+ if issues:
124
+ hadolint_issues[str(rel_path)] = issues
125
+
126
+ # Count issue types
127
+ for issue in issues:
128
+ if "warning" in issue.lower() or "note" in issue.lower():
129
+ total_warnings += 1
130
+ elif "error" in issue.lower():
131
+ total_errors += 1
132
+
133
+ except subprocess.TimeoutExpired:
134
+ lines.append(f"⚠ Timeout analyzing {rel_path}")
135
+ except Exception as e:
136
+ lines.append(f"⚠ Error analyzing {rel_path}: {e}")
137
+
138
+ if hadolint_issues:
139
+ lines.append(f"Total issues found: {total_errors} error(s), {total_warnings} warning(s)")
140
+ lines.append("")
141
+
142
+ for dockerfile, issues in sorted(hadolint_issues.items()):
143
+ lines.append(f"\n{dockerfile}:")
144
+ lines.append("-" * 80)
145
+ for issue in issues:
146
+ lines.append(f" {issue}")
147
+ else:
148
+ lines.append("✓ No issues found")
149
+ lines.append("")
150
+
151
+ else:
152
+ lines.extend(
153
+ [
154
+ "=" * 80,
155
+ "HADOLINT RESULTS",
156
+ "=" * 80,
157
+ "",
158
+ "⚠ hadolint not installed. Install with: pip install hadolint-py",
159
+ " or via system package manager for full features.",
160
+ "",
161
+ ]
162
+ )
163
+
164
+ # Dockerfile content analysis
165
+ lines.extend(
166
+ [
167
+ "=" * 80,
168
+ "DOCKERFILE ANALYSIS",
169
+ "=" * 80,
170
+ "",
171
+ ]
172
+ )
173
+
174
+ analysis = self._analyze_dockerfiles(unique_dockerfiles, ctx.root)
175
+
176
+ if analysis["uses_multistage"]:
177
+ lines.append("✓ Multi-stage builds detected")
178
+ else:
179
+ lines.append("⚠ No multi-stage builds detected")
180
+
181
+ if analysis["uses_user"]:
182
+ lines.append("✓ Non-root user configured")
183
+ else:
184
+ lines.append("⚠ No non-root user found (security risk)")
185
+
186
+ if analysis["pinned_versions"]:
187
+ lines.append(f"✓ Pinned base images: {analysis['pinned_versions']}")
188
+ else:
189
+ lines.append("⚠ Unpinned base image versions detected")
190
+
191
+ if analysis["has_healthcheck"]:
192
+ lines.append("✓ HEALTHCHECK configured")
193
+ else:
194
+ lines.append("⚠ No HEALTHCHECK found")
195
+
196
+ if analysis["use_args"]:
197
+ lines.append(f"✓ ARG variables: {analysis['use_args']}")
198
+
199
+ lines.append("")
200
+
201
+ # Best practices recommendations
202
+ lines.extend(
203
+ [
204
+ "=" * 80,
205
+ "RECOMMENDATIONS",
206
+ "=" * 80,
207
+ "",
208
+ ]
209
+ )
210
+
211
+ if not analysis["uses_multistage"]:
212
+ lines.append(" - Consider using multi-stage builds to reduce final image size")
213
+ if not analysis["uses_user"]:
214
+ lines.append(" - Add RUN groupadd -r appuser && useradd -r -g appuser appuser")
215
+ lines.append(" and USER appuser for security")
216
+ if not analysis["pinned_versions"]:
217
+ lines.append(" - Pin base image versions (use specific tags, not 'latest')")
218
+ if not analysis["has_healthcheck"]:
219
+ lines.append(" - Add HEALTHCHECK instruction for production containers")
220
+
221
+ lines.append("")
222
+
223
+ # Write report
224
+ output = "\n".join(lines)
225
+ dest = ctx.workdir / "logs" / "105_dockerfile_lint.txt"
226
+ dest.parent.mkdir(parents=True, exist_ok=True)
227
+ dest.write_text(output, encoding="utf-8")
228
+
229
+ elapsed = int(time.time() - start)
230
+ return StepResult(self.name, "OK", elapsed, "")
231
+
232
+ def _parse_hadolint_output(self, output: str) -> List[str]:
233
+ """Parse hadolint output and extract issue lines."""
234
+ issues = []
235
+ for line in output.split("\n"):
236
+ line = line.strip()
237
+ if line and not line.startswith("=="):
238
+ issues.append(line)
239
+ return issues
240
+
241
+ def _analyze_dockerfiles(self, dockerfiles: List[Path], root: Path) -> Dict:
242
+ """Perform static analysis of Dockerfile content."""
243
+ analysis = {
244
+ "uses_multistage": False,
245
+ "uses_user": False,
246
+ "pinned_versions": 0,
247
+ "has_healthcheck": False,
248
+ "use_args": 0,
249
+ }
250
+
251
+ for dockerfile in dockerfiles:
252
+ try:
253
+ content = dockerfile.read_text(encoding="utf-8", errors="ignore")
254
+
255
+ # Check for multi-stage
256
+ if "FROM" in content and content.count("FROM") > 1:
257
+ analysis["uses_multistage"] = True
258
+
259
+ # Check for USER instruction
260
+ if re.search(r"^\s*USER\s+\w+", content, re.MULTILINE):
261
+ if not "USER root" in content or "USER appuser" in content or "USER app" in content:
262
+ analysis["uses_user"] = True
263
+
264
+ # Check for pinned versions
265
+ from_pattern = re.findall(r"FROM\s+([^\s]+)", content)
266
+ for from_ref in from_pattern:
267
+ # Check if has tag (not latest or missing)
268
+ if ":" in from_ref and not from_ref.endswith(":latest"):
269
+ analysis["pinned_versions"] += 1
270
+
271
+ # Check for HEALTHCHECK
272
+ if "HEALTHCHECK" in content:
273
+ analysis["has_healthcheck"] = True
274
+
275
+ # Count ARG instructions
276
+ arg_count = len(re.findall(r"^\s*ARG\s+", content, re.MULTILINE))
277
+ analysis["use_args"] += arg_count
278
+
279
+ except (OSError, UnicodeDecodeError):
280
+ continue
281
+
282
+ return analysis