gwc-pybundle 2.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gwc-pybundle might be problematic. Click here for more details.
- gwc_pybundle-2.1.2.dist-info/METADATA +903 -0
- gwc_pybundle-2.1.2.dist-info/RECORD +82 -0
- gwc_pybundle-2.1.2.dist-info/WHEEL +5 -0
- gwc_pybundle-2.1.2.dist-info/entry_points.txt +2 -0
- gwc_pybundle-2.1.2.dist-info/licenses/LICENSE.md +25 -0
- gwc_pybundle-2.1.2.dist-info/top_level.txt +1 -0
- pybundle/__init__.py +0 -0
- pybundle/__main__.py +4 -0
- pybundle/cli.py +546 -0
- pybundle/context.py +404 -0
- pybundle/doctor.py +148 -0
- pybundle/filters.py +228 -0
- pybundle/manifest.py +77 -0
- pybundle/packaging.py +45 -0
- pybundle/policy.py +132 -0
- pybundle/profiles.py +454 -0
- pybundle/roadmap_model.py +42 -0
- pybundle/roadmap_scan.py +328 -0
- pybundle/root_detect.py +14 -0
- pybundle/runner.py +180 -0
- pybundle/steps/__init__.py +26 -0
- pybundle/steps/ai_context.py +791 -0
- pybundle/steps/api_docs.py +219 -0
- pybundle/steps/asyncio_analysis.py +358 -0
- pybundle/steps/bandit.py +72 -0
- pybundle/steps/base.py +20 -0
- pybundle/steps/blocking_call_detection.py +291 -0
- pybundle/steps/call_graph.py +219 -0
- pybundle/steps/compileall.py +76 -0
- pybundle/steps/config_docs.py +319 -0
- pybundle/steps/config_validation.py +302 -0
- pybundle/steps/container_image.py +294 -0
- pybundle/steps/context_expand.py +272 -0
- pybundle/steps/copy_pack.py +293 -0
- pybundle/steps/coverage.py +101 -0
- pybundle/steps/cprofile_step.py +166 -0
- pybundle/steps/dependency_sizes.py +136 -0
- pybundle/steps/django_checks.py +214 -0
- pybundle/steps/dockerfile_lint.py +282 -0
- pybundle/steps/dockerignore.py +311 -0
- pybundle/steps/duplication.py +103 -0
- pybundle/steps/env_completeness.py +269 -0
- pybundle/steps/env_var_usage.py +253 -0
- pybundle/steps/error_refs.py +204 -0
- pybundle/steps/event_loop_patterns.py +280 -0
- pybundle/steps/exception_patterns.py +190 -0
- pybundle/steps/fastapi_integration.py +250 -0
- pybundle/steps/flask_debugging.py +312 -0
- pybundle/steps/git_analytics.py +315 -0
- pybundle/steps/handoff_md.py +176 -0
- pybundle/steps/import_time.py +175 -0
- pybundle/steps/interrogate.py +106 -0
- pybundle/steps/license_scan.py +96 -0
- pybundle/steps/line_profiler.py +117 -0
- pybundle/steps/link_validation.py +287 -0
- pybundle/steps/logging_analysis.py +233 -0
- pybundle/steps/memory_profile.py +176 -0
- pybundle/steps/migration_history.py +336 -0
- pybundle/steps/mutation_testing.py +141 -0
- pybundle/steps/mypy.py +103 -0
- pybundle/steps/orm_optimization.py +316 -0
- pybundle/steps/pip_audit.py +45 -0
- pybundle/steps/pipdeptree.py +62 -0
- pybundle/steps/pylance.py +562 -0
- pybundle/steps/pytest.py +66 -0
- pybundle/steps/query_pattern_analysis.py +334 -0
- pybundle/steps/radon.py +161 -0
- pybundle/steps/repro_md.py +161 -0
- pybundle/steps/rg_scans.py +78 -0
- pybundle/steps/roadmap.py +153 -0
- pybundle/steps/ruff.py +117 -0
- pybundle/steps/secrets_detection.py +235 -0
- pybundle/steps/security_headers.py +309 -0
- pybundle/steps/shell.py +74 -0
- pybundle/steps/slow_tests.py +178 -0
- pybundle/steps/sqlalchemy_validation.py +269 -0
- pybundle/steps/test_flakiness.py +184 -0
- pybundle/steps/tree.py +116 -0
- pybundle/steps/type_coverage.py +277 -0
- pybundle/steps/unused_deps.py +211 -0
- pybundle/steps/vulture.py +167 -0
- pybundle/tools.py +63 -0
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Step: Migration History Tracking
|
|
3
|
+
Analyze database migrations and track migration status.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import re
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, List, Set, Tuple, Optional
|
|
9
|
+
|
|
10
|
+
from .base import Step, StepResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class MigrationHistoryStep(Step):
|
|
14
|
+
"""Track and analyze database migrations."""
|
|
15
|
+
|
|
16
|
+
name = "migration history"
|
|
17
|
+
|
|
18
|
+
def run(self, ctx: "BundleContext") -> StepResult: # type: ignore[name-defined]
|
|
19
|
+
"""Analyze database migrations."""
|
|
20
|
+
import time
|
|
21
|
+
|
|
22
|
+
start = time.time()
|
|
23
|
+
|
|
24
|
+
root = ctx.root
|
|
25
|
+
|
|
26
|
+
# Find migrations
|
|
27
|
+
migrations = self._find_migrations(root)
|
|
28
|
+
|
|
29
|
+
# Generate report
|
|
30
|
+
lines = [
|
|
31
|
+
"=" * 80,
|
|
32
|
+
"DATABASE MIGRATION ANALYSIS REPORT",
|
|
33
|
+
"=" * 80,
|
|
34
|
+
"",
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
# Summary
|
|
38
|
+
lines.extend(
|
|
39
|
+
[
|
|
40
|
+
"SUMMARY",
|
|
41
|
+
"=" * 80,
|
|
42
|
+
"",
|
|
43
|
+
f"Migration frameworks detected: {len(migrations['frameworks'])}",
|
|
44
|
+
f"Total migration files found: {migrations['total_migrations']}",
|
|
45
|
+
"",
|
|
46
|
+
]
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if not migrations["frameworks"]:
|
|
50
|
+
lines.extend(
|
|
51
|
+
[
|
|
52
|
+
"⊘ No database migrations detected",
|
|
53
|
+
"",
|
|
54
|
+
"This project does not appear to use database migrations.",
|
|
55
|
+
"Consider using migrations for schema versioning and deployment.",
|
|
56
|
+
"",
|
|
57
|
+
]
|
|
58
|
+
)
|
|
59
|
+
else:
|
|
60
|
+
# Framework breakdown
|
|
61
|
+
lines.extend(
|
|
62
|
+
[
|
|
63
|
+
"MIGRATION FRAMEWORKS DETECTED",
|
|
64
|
+
"=" * 80,
|
|
65
|
+
"",
|
|
66
|
+
]
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
for framework, details in migrations["frameworks"].items():
|
|
70
|
+
lines.append(f"✓ {framework}")
|
|
71
|
+
lines.append(f" Location: {details['location']}")
|
|
72
|
+
lines.append(f" Migration count: {len(details['migrations'])}")
|
|
73
|
+
|
|
74
|
+
if details["migrations"]:
|
|
75
|
+
lines.append(" Files:")
|
|
76
|
+
for mig_file in sorted(details["migrations"])[:10]:
|
|
77
|
+
lines.append(f" - {mig_file}")
|
|
78
|
+
if len(details["migrations"]) > 10:
|
|
79
|
+
lines.append(
|
|
80
|
+
f" ... and {len(details['migrations']) - 10} more"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
lines.append("")
|
|
84
|
+
|
|
85
|
+
# Django-specific analysis
|
|
86
|
+
if "Django" in migrations["frameworks"]:
|
|
87
|
+
django_info = migrations["frameworks"]["Django"]
|
|
88
|
+
lines.extend(
|
|
89
|
+
[
|
|
90
|
+
"DJANGO MIGRATIONS DETAIL",
|
|
91
|
+
"=" * 80,
|
|
92
|
+
"",
|
|
93
|
+
]
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
lines.append(f"Total Django migrations: {len(django_info['migrations'])}")
|
|
97
|
+
|
|
98
|
+
# Extract migration info
|
|
99
|
+
initial_count = sum(
|
|
100
|
+
1
|
|
101
|
+
for m in django_info["migrations"]
|
|
102
|
+
if "0001_initial" in m or "initial" in m.lower()
|
|
103
|
+
)
|
|
104
|
+
lines.append(f"Initial migrations: {initial_count}")
|
|
105
|
+
|
|
106
|
+
# Check for dependencies
|
|
107
|
+
has_dependencies = self._check_django_dependencies(
|
|
108
|
+
root, django_info["location"]
|
|
109
|
+
)
|
|
110
|
+
if has_dependencies:
|
|
111
|
+
lines.append("✓ Migration dependencies detected")
|
|
112
|
+
else:
|
|
113
|
+
lines.append("⚠ No migration dependencies detected")
|
|
114
|
+
|
|
115
|
+
lines.append("")
|
|
116
|
+
|
|
117
|
+
lines.append("Latest migrations:")
|
|
118
|
+
for mig_file in sorted(django_info["migrations"])[-5:]:
|
|
119
|
+
lines.append(f" - {mig_file}")
|
|
120
|
+
|
|
121
|
+
lines.append("")
|
|
122
|
+
|
|
123
|
+
# Alembic-specific analysis
|
|
124
|
+
if "Alembic" in migrations["frameworks"]:
|
|
125
|
+
alembic_info = migrations["frameworks"]["Alembic"]
|
|
126
|
+
lines.extend(
|
|
127
|
+
[
|
|
128
|
+
"ALEMBIC MIGRATIONS DETAIL",
|
|
129
|
+
"=" * 80,
|
|
130
|
+
"",
|
|
131
|
+
]
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
lines.append(f"Total Alembic versions: {len(alembic_info['migrations'])}")
|
|
135
|
+
|
|
136
|
+
# Check for downgrade capability
|
|
137
|
+
versions_with_downgrade = self._check_alembic_downgrades(
|
|
138
|
+
root, alembic_info["location"]
|
|
139
|
+
)
|
|
140
|
+
lines.append(
|
|
141
|
+
f"Versions with downgrade support: {versions_with_downgrade}"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
lines.append("Latest versions:")
|
|
145
|
+
for mig_file in sorted(alembic_info["migrations"])[-5:]:
|
|
146
|
+
lines.append(f" - {mig_file}")
|
|
147
|
+
|
|
148
|
+
lines.append("")
|
|
149
|
+
|
|
150
|
+
# Migration statistics
|
|
151
|
+
if migrations["total_migrations"] > 0:
|
|
152
|
+
lines.extend(
|
|
153
|
+
[
|
|
154
|
+
"=" * 80,
|
|
155
|
+
"MIGRATION STATISTICS",
|
|
156
|
+
"=" * 80,
|
|
157
|
+
"",
|
|
158
|
+
]
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
lines.append(f"Total migration files: {migrations['total_migrations']}")
|
|
162
|
+
|
|
163
|
+
# Check for common issues
|
|
164
|
+
issues = self._check_migration_issues(root, migrations)
|
|
165
|
+
if issues:
|
|
166
|
+
lines.append(f"Potential issues found: {len(issues)}")
|
|
167
|
+
lines.append("")
|
|
168
|
+
for issue in issues[:10]:
|
|
169
|
+
lines.append(f" ⚠ {issue}")
|
|
170
|
+
if len(issues) > 10:
|
|
171
|
+
lines.append(f" ... and {len(issues) - 10} more")
|
|
172
|
+
else:
|
|
173
|
+
lines.append("✓ No obvious migration issues detected")
|
|
174
|
+
|
|
175
|
+
lines.append("")
|
|
176
|
+
|
|
177
|
+
# Recommendations
|
|
178
|
+
lines.extend(
|
|
179
|
+
[
|
|
180
|
+
"=" * 80,
|
|
181
|
+
"RECOMMENDATIONS",
|
|
182
|
+
"=" * 80,
|
|
183
|
+
"",
|
|
184
|
+
]
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
if migrations["frameworks"]:
|
|
188
|
+
lines.append(" - Review migration dependencies for circular references")
|
|
189
|
+
lines.append(" - Ensure downgrade paths are tested")
|
|
190
|
+
lines.append(" - Keep migration files in version control")
|
|
191
|
+
lines.append(" - Document any manual migrations")
|
|
192
|
+
lines.append(" - Test migrations in CI/CD pipeline")
|
|
193
|
+
lines.append(" - Consider squashing old migrations periodically")
|
|
194
|
+
|
|
195
|
+
else:
|
|
196
|
+
lines.append(" - Consider implementing database migrations")
|
|
197
|
+
lines.append(" - Django projects: python manage.py makemigrations")
|
|
198
|
+
lines.append(" - SQLAlchemy projects: alembic init migrations")
|
|
199
|
+
lines.append(" - Tortoise ORM: aerich init-db")
|
|
200
|
+
lines.append(" - Benefits: reproducible deployments, rollback capability")
|
|
201
|
+
|
|
202
|
+
lines.append("")
|
|
203
|
+
|
|
204
|
+
# Write report
|
|
205
|
+
output = "\n".join(lines)
|
|
206
|
+
dest = ctx.workdir / "meta" / "140_migrations.txt"
|
|
207
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
208
|
+
dest.write_text(output, encoding="utf-8")
|
|
209
|
+
|
|
210
|
+
elapsed = int(time.time() - start)
|
|
211
|
+
return StepResult(self.name, "OK", elapsed, "")
|
|
212
|
+
|
|
213
|
+
def _find_migrations(self, root: Path) -> Dict:
|
|
214
|
+
"""Find all migration directories and files."""
|
|
215
|
+
frameworks = {}
|
|
216
|
+
total_migrations = 0
|
|
217
|
+
|
|
218
|
+
# Django migrations
|
|
219
|
+
django_migrations = list(root.rglob("migrations"))
|
|
220
|
+
for mig_dir in django_migrations:
|
|
221
|
+
# Check if it's a Django migration directory
|
|
222
|
+
init_file = mig_dir / "__init__.py"
|
|
223
|
+
py_files = list(mig_dir.glob("*.py"))
|
|
224
|
+
|
|
225
|
+
if init_file.exists() and any(
|
|
226
|
+
f.name.startswith(("0001_", "0002_", "0003_")) or "000" in f.name
|
|
227
|
+
for f in py_files
|
|
228
|
+
):
|
|
229
|
+
rel_path = str(mig_dir.relative_to(root))
|
|
230
|
+
migration_files = [
|
|
231
|
+
f.name
|
|
232
|
+
for f in py_files
|
|
233
|
+
if f.name != "__init__.py" and f.name != "__pycache__"
|
|
234
|
+
]
|
|
235
|
+
if migration_files:
|
|
236
|
+
if "Django" not in frameworks:
|
|
237
|
+
frameworks["Django"] = {"location": rel_path, "migrations": []}
|
|
238
|
+
frameworks["Django"]["migrations"].extend(migration_files)
|
|
239
|
+
total_migrations += len(migration_files)
|
|
240
|
+
|
|
241
|
+
# Alembic migrations
|
|
242
|
+
alembic_dirs = list(root.rglob("alembic"))
|
|
243
|
+
for alembic_dir in alembic_dirs:
|
|
244
|
+
versions_dir = alembic_dir / "versions"
|
|
245
|
+
if versions_dir.exists():
|
|
246
|
+
py_files = list(versions_dir.glob("*.py"))
|
|
247
|
+
migration_files = [f.name for f in py_files if f.name != "__init__.py"]
|
|
248
|
+
if migration_files:
|
|
249
|
+
rel_path = str(alembic_dir.relative_to(root))
|
|
250
|
+
frameworks["Alembic"] = {
|
|
251
|
+
"location": rel_path,
|
|
252
|
+
"migrations": migration_files,
|
|
253
|
+
}
|
|
254
|
+
total_migrations += len(migration_files)
|
|
255
|
+
|
|
256
|
+
# Check for Tortoise ORM migrations
|
|
257
|
+
tortoise_dirs = list(root.rglob("aerich_migrations"))
|
|
258
|
+
for tortoise_dir in tortoise_dirs:
|
|
259
|
+
py_files = list(tortoise_dir.glob("*.sql"))
|
|
260
|
+
if py_files:
|
|
261
|
+
rel_path = str(tortoise_dir.relative_to(root))
|
|
262
|
+
migration_files = [f.name for f in py_files]
|
|
263
|
+
frameworks["Tortoise ORM"] = {
|
|
264
|
+
"location": rel_path,
|
|
265
|
+
"migrations": migration_files,
|
|
266
|
+
}
|
|
267
|
+
total_migrations += len(migration_files)
|
|
268
|
+
|
|
269
|
+
return {
|
|
270
|
+
"frameworks": frameworks,
|
|
271
|
+
"total_migrations": total_migrations,
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
def _check_django_dependencies(self, root: Path, location: str) -> bool:
|
|
275
|
+
"""Check if Django migrations have dependency information."""
|
|
276
|
+
mig_dir = root / location
|
|
277
|
+
if not mig_dir.exists():
|
|
278
|
+
return False
|
|
279
|
+
|
|
280
|
+
py_files = list(mig_dir.glob("*.py"))
|
|
281
|
+
for py_file in py_files:
|
|
282
|
+
if py_file.name == "__init__.py":
|
|
283
|
+
continue
|
|
284
|
+
try:
|
|
285
|
+
content = py_file.read_text(encoding="utf-8", errors="ignore")
|
|
286
|
+
if "dependencies" in content and "[" in content:
|
|
287
|
+
return True
|
|
288
|
+
except (OSError, UnicodeDecodeError):
|
|
289
|
+
continue
|
|
290
|
+
|
|
291
|
+
return False
|
|
292
|
+
|
|
293
|
+
def _check_alembic_downgrades(self, root: Path, location: str) -> int:
|
|
294
|
+
"""Check how many Alembic versions have downgrade functions."""
|
|
295
|
+
versions_dir = root / location / "versions"
|
|
296
|
+
if not versions_dir.exists():
|
|
297
|
+
return 0
|
|
298
|
+
|
|
299
|
+
with_downgrade = 0
|
|
300
|
+
py_files = list(versions_dir.glob("*.py"))
|
|
301
|
+
|
|
302
|
+
for py_file in py_files:
|
|
303
|
+
try:
|
|
304
|
+
content = py_file.read_text(encoding="utf-8", errors="ignore")
|
|
305
|
+
if "def downgrade()" in content or "def downgrade():" in content:
|
|
306
|
+
with_downgrade += 1
|
|
307
|
+
except (OSError, UnicodeDecodeError):
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
return with_downgrade
|
|
311
|
+
|
|
312
|
+
def _check_migration_issues(self, root: Path, migrations: Dict) -> List[str]:
|
|
313
|
+
"""Check for common migration issues."""
|
|
314
|
+
issues = []
|
|
315
|
+
|
|
316
|
+
# Check for migration files without corresponding reverse
|
|
317
|
+
if "Alembic" in migrations["frameworks"]:
|
|
318
|
+
alembic_info = migrations["frameworks"]["Alembic"]
|
|
319
|
+
alembic_dir = root / alembic_info["location"]
|
|
320
|
+
versions_dir = alembic_dir / "versions"
|
|
321
|
+
|
|
322
|
+
if versions_dir.exists():
|
|
323
|
+
for py_file in versions_dir.glob("*.py"):
|
|
324
|
+
try:
|
|
325
|
+
content = py_file.read_text(encoding="utf-8", errors="ignore")
|
|
326
|
+
if "def upgrade()" in content and "def downgrade()" not in content:
|
|
327
|
+
if "pass" not in content.split("def downgrade()")[1].split(
|
|
328
|
+
"\n"
|
|
329
|
+
)[0]:
|
|
330
|
+
issues.append(
|
|
331
|
+
f"Missing downgrade path in {py_file.name}"
|
|
332
|
+
)
|
|
333
|
+
except (OSError, UnicodeDecodeError, IndexError):
|
|
334
|
+
continue
|
|
335
|
+
|
|
336
|
+
return issues
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Mutation testing with mutmut - Milestone 4 (v1.4.1)
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import subprocess
|
|
8
|
+
import time
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
|
|
11
|
+
from .base import StepResult
|
|
12
|
+
from ..context import BundleContext
|
|
13
|
+
from ..tools import which
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class MutationTestingStep:
|
|
18
|
+
"""
|
|
19
|
+
Mutation testing to measure test suite effectiveness.
|
|
20
|
+
|
|
21
|
+
EXPENSIVE: Disabled by default. Run many test executions with code mutations.
|
|
22
|
+
|
|
23
|
+
Outputs:
|
|
24
|
+
- logs/72_mutation_testing.txt: Mutation testing results
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
name: str = "mutation_testing"
|
|
28
|
+
|
|
29
|
+
def run(self, ctx: BundleContext) -> StepResult:
|
|
30
|
+
start = time.time()
|
|
31
|
+
|
|
32
|
+
# Only run if explicitly enabled (very slow!)
|
|
33
|
+
if not ctx.options.enable_mutation_testing:
|
|
34
|
+
return StepResult(
|
|
35
|
+
self.name, "SKIP", 0, "mutation testing not enabled (slow!)"
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Check for mutmut
|
|
39
|
+
mutmut = which("mutmut")
|
|
40
|
+
if not mutmut:
|
|
41
|
+
output_file = ctx.workdir / "logs" / "72_mutation_testing.txt"
|
|
42
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
output_file.write_text(
|
|
44
|
+
"mutmut not found; install with: pip install mutmut\n", encoding="utf-8"
|
|
45
|
+
)
|
|
46
|
+
return StepResult(self.name, "SKIP", 0, "mutmut not installed")
|
|
47
|
+
|
|
48
|
+
if not ctx.tools.pytest:
|
|
49
|
+
return StepResult(self.name, "SKIP", 0, "pytest not found")
|
|
50
|
+
|
|
51
|
+
tests_dir = ctx.root / "tests"
|
|
52
|
+
if not tests_dir.is_dir():
|
|
53
|
+
return StepResult(self.name, "SKIP", 0, "no tests/ directory")
|
|
54
|
+
|
|
55
|
+
ctx.emit(" ⚠️ Running mutation testing (this may take several minutes)...")
|
|
56
|
+
|
|
57
|
+
output_file = ctx.workdir / "logs" / "72_mutation_testing.txt"
|
|
58
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
# Run mutmut
|
|
62
|
+
# First, run mutmut run to generate mutations
|
|
63
|
+
run_result = subprocess.run(
|
|
64
|
+
[mutmut, "run", "--paths-to-mutate", "."],
|
|
65
|
+
cwd=ctx.root,
|
|
66
|
+
capture_output=True,
|
|
67
|
+
text=True,
|
|
68
|
+
timeout=600, # 10 minute timeout (mutation testing is SLOW)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Then get results summary
|
|
72
|
+
results_result = subprocess.run(
|
|
73
|
+
[mutmut, "results"],
|
|
74
|
+
cwd=ctx.root,
|
|
75
|
+
capture_output=True,
|
|
76
|
+
text=True,
|
|
77
|
+
timeout=60,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Generate report
|
|
81
|
+
with output_file.open("w") as f:
|
|
82
|
+
f.write("=" * 70 + "\n")
|
|
83
|
+
f.write("MUTATION TESTING (mutmut)\n")
|
|
84
|
+
f.write("=" * 70 + "\n")
|
|
85
|
+
f.write("⚠️ WARNING: Mutation testing is VERY SLOW\n")
|
|
86
|
+
f.write("=" * 70 + "\n\n")
|
|
87
|
+
|
|
88
|
+
f.write("MUTATION RUN OUTPUT:\n")
|
|
89
|
+
f.write("-" * 70 + "\n")
|
|
90
|
+
f.write(run_result.stdout)
|
|
91
|
+
if run_result.stderr:
|
|
92
|
+
f.write("\nErrors:\n")
|
|
93
|
+
f.write(run_result.stderr)
|
|
94
|
+
|
|
95
|
+
f.write("\n" + "=" * 70 + "\n")
|
|
96
|
+
f.write("MUTATION RESULTS SUMMARY:\n")
|
|
97
|
+
f.write("-" * 70 + "\n")
|
|
98
|
+
f.write(results_result.stdout)
|
|
99
|
+
if results_result.stderr:
|
|
100
|
+
f.write("\nErrors:\n")
|
|
101
|
+
f.write(results_result.stderr)
|
|
102
|
+
|
|
103
|
+
f.write("\n" + "=" * 70 + "\n")
|
|
104
|
+
f.write("INTERPRETATION:\n")
|
|
105
|
+
f.write("-" * 70 + "\n")
|
|
106
|
+
f.write("- Killed mutations: Your tests caught the bug (GOOD!)\n")
|
|
107
|
+
f.write("- Survived mutations: Your tests missed the bug (BAD!)\n")
|
|
108
|
+
f.write("- Timeout/Suspicious: Tests took too long or behaved oddly\n")
|
|
109
|
+
f.write("\n")
|
|
110
|
+
f.write("Mutation Score = Killed / (Killed + Survived + Timeout)\n")
|
|
111
|
+
f.write("Target: >80% mutation score for well-tested code\n")
|
|
112
|
+
f.write("\n")
|
|
113
|
+
f.write("To see specific survived mutations:\n")
|
|
114
|
+
f.write(" mutmut show <id>\n")
|
|
115
|
+
f.write("\n")
|
|
116
|
+
f.write("=" * 70 + "\n")
|
|
117
|
+
f.write("RECOMMENDATIONS:\n")
|
|
118
|
+
f.write("- Add tests for survived mutations\n")
|
|
119
|
+
f.write("- Focus on edge cases and boundary conditions\n")
|
|
120
|
+
f.write("- Improve assertion quality (not just 'assert result')\n")
|
|
121
|
+
|
|
122
|
+
elapsed = int((time.time() - start) * 1000)
|
|
123
|
+
|
|
124
|
+
if run_result.returncode == 0:
|
|
125
|
+
return StepResult(self.name, "OK", elapsed)
|
|
126
|
+
else:
|
|
127
|
+
return StepResult(
|
|
128
|
+
self.name, "FAIL", elapsed, f"exit {run_result.returncode}"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
except subprocess.TimeoutExpired:
|
|
132
|
+
elapsed = int((time.time() - start) * 1000)
|
|
133
|
+
with output_file.open("w") as f:
|
|
134
|
+
f.write("Mutation testing timed out after 10 minutes\n")
|
|
135
|
+
f.write(
|
|
136
|
+
"Consider testing a smaller subset or using --paths-to-mutate\n"
|
|
137
|
+
)
|
|
138
|
+
return StepResult(self.name, "FAIL", elapsed, "timeout")
|
|
139
|
+
except Exception as e:
|
|
140
|
+
elapsed = int((time.time() - start) * 1000)
|
|
141
|
+
return StepResult(self.name, "FAIL", elapsed, str(e))
|
pybundle/steps/mypy.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess # nosec B404 - Required for tool execution, paths validated
|
|
4
|
+
import time
|
|
5
|
+
import sys
|
|
6
|
+
import re
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from .base import StepResult
|
|
11
|
+
from ..context import BundleContext
|
|
12
|
+
from ..tools import which
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _has_mypy_config(root: Path) -> bool:
|
|
16
|
+
if (root / "mypy.ini").is_file():
|
|
17
|
+
return True
|
|
18
|
+
if (root / "setup.cfg").is_file():
|
|
19
|
+
return True
|
|
20
|
+
if (root / "pyproject.toml").is_file():
|
|
21
|
+
# we don't parse TOML here; presence is enough for v1
|
|
22
|
+
return True
|
|
23
|
+
return False
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _check_python_version_mismatch(root: Path) -> tuple[str | None, str | None]:
|
|
27
|
+
"""Check for Python version mismatch between runtime and mypy config.
|
|
28
|
+
|
|
29
|
+
Returns (runtime_version, config_version) where config_version is from mypy.ini
|
|
30
|
+
"""
|
|
31
|
+
# Get runtime Python version (e.g., "3.11")
|
|
32
|
+
runtime_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
33
|
+
|
|
34
|
+
# Check mypy.ini for python_version
|
|
35
|
+
mypy_ini = root / "mypy.ini"
|
|
36
|
+
if mypy_ini.is_file():
|
|
37
|
+
try:
|
|
38
|
+
content = mypy_ini.read_text(encoding="utf-8")
|
|
39
|
+
match = re.search(r'^python_version\s*=\s*["\']?(\d+\.\d+)', content, re.MULTILINE)
|
|
40
|
+
if match:
|
|
41
|
+
config_version = match.group(1)
|
|
42
|
+
if config_version != runtime_version:
|
|
43
|
+
return (runtime_version, config_version)
|
|
44
|
+
except Exception:
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
return (None, None)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class MypyStep:
|
|
52
|
+
name: str = "mypy"
|
|
53
|
+
target: str = "pybundle"
|
|
54
|
+
outfile: str = "logs/33_mypy.txt"
|
|
55
|
+
|
|
56
|
+
def run(self, ctx: BundleContext) -> StepResult:
|
|
57
|
+
start = time.time()
|
|
58
|
+
out = ctx.workdir / self.outfile
|
|
59
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
60
|
+
|
|
61
|
+
mypy = which("mypy")
|
|
62
|
+
if not mypy:
|
|
63
|
+
out.write_text(
|
|
64
|
+
"mypy not found; skipping (pip install mypy)\n", encoding="utf-8"
|
|
65
|
+
)
|
|
66
|
+
return StepResult(self.name, "SKIP", 0, "missing mypy")
|
|
67
|
+
|
|
68
|
+
if not _has_mypy_config(ctx.root):
|
|
69
|
+
out.write_text(
|
|
70
|
+
"no mypy config detected (mypy.ini/setup.cfg/pyproject.toml); skipping\n",
|
|
71
|
+
encoding="utf-8",
|
|
72
|
+
)
|
|
73
|
+
return StepResult(self.name, "SKIP", 0, "no config")
|
|
74
|
+
|
|
75
|
+
# Check for Python version mismatch
|
|
76
|
+
runtime_ver, config_ver = _check_python_version_mismatch(ctx.root)
|
|
77
|
+
version_warning = ""
|
|
78
|
+
if runtime_ver and config_ver:
|
|
79
|
+
version_warning = (
|
|
80
|
+
f"\n⚠ WARNING: Python version mismatch!\n"
|
|
81
|
+
f" Runtime: Python {runtime_ver}\n"
|
|
82
|
+
f" mypy.ini: python_version = {config_ver}\n"
|
|
83
|
+
f" This may allow/reject syntax that won't work at runtime.\n"
|
|
84
|
+
f" Recommendation: Set mypy.ini python_version = {runtime_ver}\n\n"
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
cmd = [mypy, "--exclude", "^artifacts/", self.target]
|
|
88
|
+
header = f"## PWD: {ctx.root}\n## CMD: {' '.join(cmd)}\n{version_warning}\n"
|
|
89
|
+
|
|
90
|
+
cp = subprocess.run( # nosec B603
|
|
91
|
+
cmd, cwd=str(ctx.root), text=True, capture_output=True, check=False
|
|
92
|
+
)
|
|
93
|
+
text = header + (cp.stdout or "") + ("\n" + cp.stderr if cp.stderr else "")
|
|
94
|
+
out.write_text(ctx.redact_text(text), encoding="utf-8")
|
|
95
|
+
|
|
96
|
+
dur = int(time.time() - start)
|
|
97
|
+
note = "" if cp.returncode == 0 else f"exit={cp.returncode} (type findings)"
|
|
98
|
+
|
|
99
|
+
# Add version warning to note if present
|
|
100
|
+
if version_warning:
|
|
101
|
+
note = (note + "; " if note else "") + f"Python {runtime_ver} vs config {config_ver}"
|
|
102
|
+
|
|
103
|
+
return StepResult(self.name, "PASS", dur, note)
|