gwc-pybundle 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,300 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import shutil
5
+ import time
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+
9
+ from .base import StepResult
10
+ from ..context import BundleContext
11
+ from ..policy import AIContextPolicy, PathFilter
12
+
13
+
14
+ DEFAULT_EXCLUDE_DIRS = {
15
+ ".git",
16
+ ".venv",
17
+ ".mypy_cache",
18
+ ".ruff_cache",
19
+ ".pytest_cache",
20
+ "__pycache__",
21
+ "node_modules",
22
+ "dist",
23
+ "build",
24
+ "target",
25
+ ".next",
26
+ ".nuxt",
27
+ "artifacts",
28
+ ".cache",
29
+ }
30
+
31
+ DEFAULT_INCLUDE_FILES = [
32
+ "pyproject.toml",
33
+ "requirements.txt",
34
+ "poetry.lock",
35
+ "pdm.lock",
36
+ "uv.lock",
37
+ "setup.cfg",
38
+ "setup.py",
39
+ "mypy.ini",
40
+ "ruff.toml",
41
+ ".ruff.toml",
42
+ "pytest.ini",
43
+ "tox.ini",
44
+ ".python-version",
45
+ "README.md",
46
+ "README.rst",
47
+ "README.txt",
48
+ "CHANGELOG.md",
49
+ "LICENSE",
50
+ "LICENSE.md",
51
+ ".tox",
52
+ ".nox",
53
+ ".direnv",
54
+ ]
55
+
56
+ DEFAULT_INCLUDE_DIRS = [
57
+ "src",
58
+ "tests",
59
+ "tools",
60
+ "docs",
61
+ ".github",
62
+ ]
63
+
64
+ DEFAULT_INCLUDE_GLOBS = [
65
+ # common python project layouts
66
+ "*.py",
67
+ "*/**/*.py",
68
+ # templates/assets if present
69
+ "templates/**/*",
70
+ "static/**/*",
71
+ ]
72
+
73
+ def _is_venv_root(p: Path) -> bool:
74
+ if not p.is_dir():
75
+ return False
76
+
77
+ # Strong marker: standard venv metadata
78
+ if (p / "pyvenv.cfg").is_file():
79
+ return True
80
+
81
+ # Typical venv executables (Linux/macOS)
82
+ if (p / "bin").is_dir():
83
+ # venv/virtualenv always has python here
84
+ if (p / "bin" / "python").exists() or (p / "bin" / "python3").exists():
85
+ # activation script is common but not guaranteed; still strong signal
86
+ if (p / "bin" / "activate").is_file():
87
+ return True
88
+ # also accept presence of site-packages under lib
89
+ if any((p / "lib").glob("python*/site-packages")):
90
+ return True
91
+
92
+ # Windows venv layout
93
+ if (p / "Scripts").is_dir():
94
+ if (p / "Scripts" / "python.exe").is_file() or (p / "Scripts" / "python").exists():
95
+ if (p / "Scripts" / "activate").is_file():
96
+ return True
97
+ if (p / "Lib" / "site-packages").is_dir():
98
+ return True
99
+
100
+ # Some virtualenvs keep a .Python marker (macOS, older tooling)
101
+ if (p / ".Python").exists():
102
+ return True
103
+
104
+ return False
105
+
106
+ def _is_under_venv(root: Path, rel_path: Path) -> bool:
107
+ # walk ancestors: a/b/c.py -> check a, a/b, a/b/c
108
+ cur = root
109
+ for part in rel_path.parts:
110
+ cur = cur / part
111
+ if _is_venv_root(cur):
112
+ return True
113
+ return False
114
+
115
+ def _is_excluded_path(rel: Path, exclude_dirs: set[str]) -> bool:
116
+ for part in rel.parts:
117
+ if part in exclude_dirs:
118
+ return True
119
+ return False
120
+
121
+ def _safe_copy_file(src: Path, dst: Path) -> None:
122
+ dst.parent.mkdir(parents=True, exist_ok=True)
123
+ # preserve mode + timestamps where possible
124
+ shutil.copy2(src, dst)
125
+
126
+ def _copy_tree_filtered(
127
+ root: Path, src_dir: Path, dst_dir: Path, filt: PathFilter
128
+ ) -> tuple[int, int]:
129
+ """
130
+ Copy directory tree while pruning excluded directories and skipping excluded files.
131
+ Returns: (files_copied, dirs_pruned)
132
+ """
133
+ files = 0
134
+ pruned = 0
135
+
136
+ for dirpath, dirnames, filenames in os.walk(src_dir):
137
+ dp = Path(dirpath)
138
+ rel = dp.relative_to(src_dir)
139
+
140
+ # prune dirs (name-based + venv-structure)
141
+ kept: list[str] = []
142
+ for d in dirnames:
143
+ if filt.should_prune_dir(dp, d):
144
+ pruned += 1
145
+ continue
146
+ kept.append(d)
147
+ dirnames[:] = kept
148
+
149
+ for fn in filenames:
150
+ sp = dp / fn
151
+
152
+ # apply policy filter (extensions + excluded dirs, etc.)
153
+ if not filt.should_include_file(root, sp):
154
+ continue
155
+
156
+ tp = dst_dir / rel / fn
157
+ try:
158
+ _safe_copy_file(sp, tp)
159
+ files += 1
160
+ except OSError:
161
+ continue
162
+
163
+ return files, pruned
164
+
165
+ def _guess_package_dirs(root: Path, exclude_dirs: set[str]) -> list[Path]:
166
+ """
167
+ Heuristic: top-level dirs containing __init__.py are packages.
168
+ """
169
+ out: list[Path] = []
170
+ for p in sorted(root.iterdir()):
171
+ if not p.is_dir():
172
+ continue
173
+ if p.name.startswith("."):
174
+ continue
175
+ if p.name in exclude_dirs:
176
+ continue
177
+ if (p / "__init__.py").is_file():
178
+ out.append(p)
179
+ return out
180
+
181
+ @dataclass
182
+ class CuratedCopyStep:
183
+ name: str = "copy curated source pack"
184
+ include_files: list[str] | None = None
185
+ include_dirs: list[str] | None = None
186
+ include_globs: list[str] | None = None
187
+ exclude_dirs: set[str] | None = None
188
+ max_files: int = 20000
189
+ policy: AIContextPolicy | None = None
190
+
191
+ def run(self, ctx: BundleContext) -> StepResult:
192
+ start = time.time()
193
+ dst_root = ctx.srcdir # bundle/src
194
+ dst_root.mkdir(parents=True, exist_ok=True)
195
+
196
+ policy = self.policy or AIContextPolicy()
197
+
198
+ exclude = set(self.exclude_dirs) if self.exclude_dirs else set(policy.exclude_dirs)
199
+ filt = PathFilter(exclude_dirs=exclude, exclude_file_exts=set(policy.exclude_file_exts))
200
+ include_files = self.include_files or list(policy.include_files)
201
+ include_dirs = self.include_dirs or list(policy.include_dirs)
202
+ include_globs = self.include_globs or list(policy.include_globs)
203
+
204
+ copied = 0
205
+ pruned = 0
206
+
207
+ # 1) Include well-known top-level files if present
208
+ for rel_file in include_files:
209
+ sp = ctx.root / rel_file
210
+ if not filt.should_include_file(ctx.root, sp):
211
+ continue
212
+ if sp.is_file():
213
+ if _is_excluded_path(Path(rel_file), exclude):
214
+ continue
215
+ try:
216
+ _safe_copy_file(sp, dst_root / rel_file)
217
+ copied += 1
218
+ except OSError:
219
+ pass
220
+
221
+ # 2) Include common top-level dirs (src/tests/tools)
222
+ for rel_dir in include_dirs:
223
+ sp = ctx.root / rel_dir
224
+ if sp.is_dir() and rel_dir not in exclude:
225
+ if _is_venv_root(sp):
226
+ pruned += 1
227
+ continue
228
+ if _is_excluded_path(Path(rel_dir), exclude):
229
+ continue
230
+ files_copied, dirs_pruned = _copy_tree_filtered(
231
+ ctx.root, sp, dst_root / rel_dir, filt
232
+ )
233
+ copied += files_copied
234
+ pruned += dirs_pruned
235
+ if copied >= self.max_files:
236
+ break
237
+
238
+ # 3) Include detected package dirs at root (if not already copied)
239
+ if copied < self.max_files:
240
+ for pkg_dir in _guess_package_dirs(ctx.root, exclude):
241
+ rel_pkg_name = pkg_dir.name
242
+ if (dst_root / rel_pkg_name).exists():
243
+ continue
244
+ files_copied, dirs_pruned = _copy_tree_filtered(
245
+ ctx.root, pkg_dir, dst_root / rel_pkg_name, filt
246
+ )
247
+ copied += files_copied
248
+ pruned += dirs_pruned
249
+ if copied >= self.max_files:
250
+ break
251
+
252
+ # 4) Optional globs (best-effort; avoid deep explosion by pruning excluded dirs)
253
+ # We’ll apply globs but skip anything under excluded dirs.
254
+ if copied < self.max_files:
255
+ for g in include_globs:
256
+ for sp in ctx.root.glob(g):
257
+ try:
258
+ if not sp.exists():
259
+ continue
260
+ rel_path = sp.relative_to(ctx.root)
261
+ if _is_excluded_path(rel_path, exclude):
262
+ continue
263
+
264
+ if _is_under_venv(ctx.root, rel_path):
265
+ pruned += 1
266
+ continue
267
+
268
+ dst = dst_root / rel_path
269
+ if dst.exists():
270
+ continue
271
+
272
+ if sp.is_file():
273
+ _safe_copy_file(sp, dst)
274
+ copied += 1
275
+ elif sp.is_dir():
276
+ files_copied, dirs_pruned = _copy_tree_filtered(
277
+ ctx.root, sp, dst_root / rel_path, filt
278
+ )
279
+ copied += files_copied
280
+ pruned += dirs_pruned
281
+ if copied >= self.max_files:
282
+ break
283
+ except Exception:
284
+ continue
285
+ if copied >= self.max_files:
286
+ break
287
+
288
+ # write a short manifest for sanity
289
+ manifest = ctx.workdir / "meta" / "50_copy_manifest.txt"
290
+ manifest.parent.mkdir(parents=True, exist_ok=True)
291
+ manifest.write_text(
292
+ f"copied_files={copied}\npruned_dirs={pruned}\nmax_files={self.max_files}\n",
293
+ encoding="utf-8",
294
+ )
295
+
296
+ dur = int(time.time() - start)
297
+ note = f"copied={copied} pruned={pruned}"
298
+ if copied >= self.max_files:
299
+ note += " (HIT MAX)"
300
+ return StepResult(self.name, "PASS", dur, note)
@@ -0,0 +1,204 @@
1
+ from __future__ import annotations
2
+
3
+ import re
4
+ import time
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ from .base import StepResult
9
+ from ..context import BundleContext
10
+
11
+
12
+ DEFAULT_EXCLUDE_PREFIXES = (
13
+ ".git/",
14
+ ".venv/",
15
+ ".mypy_cache/",
16
+ ".ruff_cache/",
17
+ ".pytest_cache/",
18
+ "__pycache__/",
19
+ "node_modules/",
20
+ "dist/",
21
+ "build/",
22
+ "artifacts/",
23
+ )
24
+
25
+ # Patterns based on your bash sed rules:
26
+ # 1) tool-style: path:line(:col)...
27
+ _RE_COLON_LINE = re.compile(r"^([A-Za-z0-9_.\/-]+\.[A-Za-z0-9]+):\d+(?::\d+)?\b.*$")
28
+
29
+ # 2) pytest traceback: File "path", line N
30
+ _RE_PYTEST_FILE = re.compile(r'^\s*File "([^"]+)", line \d+\b.*$')
31
+
32
+ # 3) mypy: (optional "mypy:") ./path:line: (error|note|warning):
33
+ _RE_MYPY_LINE = re.compile(
34
+ r"^(?:mypy:\s*)?(?:\./)?([A-Za-z0-9_.\/-]+\.[A-Za-z0-9]+):\d+:\s*(?:error|note|warning):.*$"
35
+ )
36
+
37
+ # 4) mypy rare: path: (error|note|warning): ...
38
+ _RE_MYPY_NOLINE = re.compile(
39
+ r"^(?:mypy:\s*)?(?:\./)?([A-Za-z0-9_.\/-]+\.[A-Za-z0-9]+):\s*(?:error|note|warning):.*$"
40
+ )
41
+
42
+
43
+ def _normalize_to_repo_rel(root: Path, p: str) -> str | None:
44
+ p = p.strip()
45
+ if not p:
46
+ return None
47
+
48
+ # remove leading ./ for consistency
49
+ if p.startswith("./"):
50
+ p = p[2:]
51
+
52
+ # absolute path -> must be under repo root
53
+ if p.startswith("/"):
54
+ try:
55
+ rp = Path(p).resolve()
56
+ rr = rp.relative_to(root.resolve())
57
+ return str(rr).replace("\\", "/")
58
+ except Exception:
59
+ return None
60
+
61
+ # relative path
62
+ return p.replace("\\", "/")
63
+
64
+
65
+ def _is_allowed_repo_file(root: Path, rel: str) -> bool:
66
+ rel = rel.lstrip("./")
67
+ if not rel or rel.endswith("/"):
68
+ return False
69
+
70
+ # exclude common junk
71
+ for pref in DEFAULT_EXCLUDE_PREFIXES:
72
+ if rel.startswith(pref):
73
+ return False
74
+ if "/__pycache__/" in f"/{rel}/":
75
+ return False
76
+
77
+ # must exist and be a file inside repo
78
+ fp = (root / rel).resolve()
79
+ try:
80
+ fp.relative_to(root.resolve())
81
+ except Exception:
82
+ return False
83
+
84
+ return fp.is_file()
85
+
86
+
87
+ def _extract_paths_from_text(text: str) -> list[str]:
88
+ out: list[str] = []
89
+ for line in text.splitlines():
90
+ m = _RE_COLON_LINE.match(line)
91
+ if m:
92
+ out.append(m.group(1))
93
+ continue
94
+
95
+ m = _RE_PYTEST_FILE.match(line)
96
+ if m:
97
+ out.append(m.group(1))
98
+ continue
99
+
100
+ m = _RE_MYPY_LINE.match(line)
101
+ if m:
102
+ out.append(m.group(1))
103
+ continue
104
+
105
+ m = _RE_MYPY_NOLINE.match(line)
106
+ if m:
107
+ out.append(m.group(1))
108
+ continue
109
+
110
+ return out
111
+
112
+
113
+ @dataclass
114
+ class ErrorReferencedFilesStep:
115
+ name: str = "collect error-referenced files"
116
+ max_files: int = 250
117
+ # Paths are relative to the bundle workdir
118
+ log_files: list[str] | None = None
119
+
120
+ def run(self, ctx: BundleContext) -> StepResult:
121
+ start = time.time()
122
+
123
+ # Default set aligned to our step numbers
124
+ log_files = self.log_files or [
125
+ "logs/31_ruff_check.txt",
126
+ "logs/32_ruff_format_check.txt",
127
+ "logs/33_mypy.txt",
128
+ "logs/34_pytest_q.txt",
129
+ ]
130
+
131
+ out_list = ctx.workdir / "error_files_from_logs.txt"
132
+ out_count = ctx.workdir / "error_refs_count.txt"
133
+ report = ctx.metadir / "60_error_refs_report.txt"
134
+
135
+ dest_root = ctx.srcdir / "_error_refs"
136
+ dest_root.mkdir(parents=True, exist_ok=True)
137
+
138
+ # Collect candidate paths
139
+ candidates: set[str] = set()
140
+ scanned = 0
141
+ missing_logs = 0
142
+
143
+ for lf in log_files:
144
+ lp = ctx.workdir / lf
145
+ if not lp.is_file():
146
+ missing_logs += 1
147
+ continue
148
+ scanned += 1
149
+ try:
150
+ txt = lp.read_text(encoding="utf-8", errors="replace")
151
+ except Exception:
152
+ continue
153
+
154
+ for raw in _extract_paths_from_text(txt):
155
+ norm = _normalize_to_repo_rel(ctx.root, raw)
156
+ if norm:
157
+ candidates.add(norm)
158
+
159
+ # Normalize / filter to real repo files
160
+ allowed = sorted([p for p in candidates if _is_allowed_repo_file(ctx.root, p)])
161
+
162
+ # Write list file (even if empty)
163
+ out_list.write_text(
164
+ "\n".join(allowed) + ("\n" if allowed else ""), encoding="utf-8"
165
+ )
166
+
167
+ # Copy up to max_files
168
+ copied = 0
169
+ for rel in allowed:
170
+ if copied >= self.max_files:
171
+ break
172
+ src = ctx.root / rel
173
+ dst = dest_root / rel
174
+ dst.parent.mkdir(parents=True, exist_ok=True)
175
+ try:
176
+ # preserve mode/timestamps
177
+ dst.write_bytes(src.read_bytes())
178
+ copied += 1
179
+ except Exception:
180
+ continue
181
+
182
+ out_count.write_text(f"{copied}\n", encoding="utf-8")
183
+
184
+ report.write_text(
185
+ "\n".join(
186
+ [
187
+ f"scanned_logs={scanned}",
188
+ f"missing_logs={missing_logs}",
189
+ f"candidates_total={len(candidates)}",
190
+ f"allowed_repo_files={len(allowed)}",
191
+ f"copied={copied}",
192
+ f"max_files={self.max_files}",
193
+ "dest=src/_error_refs",
194
+ ]
195
+ )
196
+ + "\n",
197
+ encoding="utf-8",
198
+ )
199
+
200
+ dur = int(time.time() - start)
201
+ note = f"allowed={len(allowed)} copied={copied}"
202
+ if copied >= self.max_files:
203
+ note += " (HIT MAX)"
204
+ return StepResult(self.name, "PASS", dur, note)
@@ -0,0 +1,166 @@
1
+ from __future__ import annotations
2
+
3
+ import time
4
+ from dataclasses import asdict
5
+ from datetime import datetime, timezone
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ from .base import Step, StepResult
10
+
11
+
12
+ def _utc_now() -> str:
13
+ return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
14
+
15
+
16
+ def _safe_read(path: Path) -> str:
17
+ if not path.exists():
18
+ return f"(missing: {path.as_posix()})"
19
+ return path.read_text(encoding="utf-8", errors="replace").strip()
20
+
21
+
22
+ def _tool_table(tools_obj: Any) -> list[str]:
23
+ d = (
24
+ asdict(tools_obj)
25
+ if hasattr(tools_obj, "__dataclass_fields__")
26
+ else dict(tools_obj)
27
+ )
28
+ lines = ["| Tool | Status |", "|------|--------|"]
29
+ for k in sorted(d.keys()):
30
+ v = d[k]
31
+ if v:
32
+ lines.append(f"| `{k}` | ✅ `{v}` |")
33
+ else:
34
+ lines.append(f"| `{k}` | ❌ `<missing>` |")
35
+ return lines
36
+
37
+
38
+ class HandoffMarkdownStep(Step):
39
+ name = "generate HANDOFF.md"
40
+
41
+ def run(self, ctx: Any) -> StepResult:
42
+ start = time.time()
43
+
44
+ created_utc = getattr(ctx, "created_utc", None) or _utc_now()
45
+ profile = getattr(ctx, "profile_name", "<unknown>")
46
+ root_path = Path(getattr(ctx, "root"))
47
+ project = root_path.name
48
+ root = str(root_path)
49
+ workdir_path = Path(getattr(ctx, "workdir"))
50
+ workdir = str(workdir_path)
51
+
52
+ # filenames fixed to match your repo
53
+ uname = _safe_read(workdir_path / "meta" / "21_uname.txt")
54
+ pyver = _safe_read(workdir_path / "meta" / "20_python_version.txt")
55
+
56
+ redact = bool(getattr(ctx, "redact", True))
57
+ redact_status = "enabled" if redact else "disabled"
58
+
59
+ results: list[Any] = list(getattr(ctx, "results", []))
60
+ pass_n = sum(1 for r in results if getattr(r, "status", "") == "PASS")
61
+ fail_n = sum(1 for r in results if getattr(r, "status", "") == "FAIL")
62
+ skip_n = sum(1 for r in results if getattr(r, "status", "") == "SKIP")
63
+ total_n = len(results)
64
+
65
+ overall = "FAIL" if fail_n else ("DEGRADED" if skip_n else "PASS")
66
+
67
+ # tool table
68
+ tools_obj = getattr(ctx, "tools", None) or getattr(ctx, "tooling", None)
69
+ tools_table = (
70
+ _tool_table(tools_obj) if tools_obj is not None else ["(no tools detected)"]
71
+ )
72
+
73
+ command_used = getattr(ctx, "command_used", "") or "(not captured)"
74
+
75
+ lines: list[str] = []
76
+ lines.append("# Bundle Handoff")
77
+ lines.append("")
78
+ lines.append("## Overview")
79
+ lines.append(
80
+ f"- **Bundle tool:** pybundle {getattr(ctx, 'version', '<unknown>')}"
81
+ )
82
+ lines.append(f"- **Profile:** {profile}")
83
+ lines.append(f"- **Created (UTC):** {created_utc}")
84
+ lines.append(f"- **Project:** {project}")
85
+ lines.append(f"- **Root:** {root}")
86
+ lines.append(f"- **Workdir:** {workdir}")
87
+ lines.append("")
88
+ lines.append("## System")
89
+ lines.append(f"- **OS:** {uname}")
90
+ lines.append(f"- **Python:** {pyver}")
91
+ lines.append(f"- **Redaction:** {redact_status}")
92
+ lines.append("")
93
+ lines.append("## At a glance")
94
+
95
+ roadmap_json = _safe_read(workdir_path / "meta" / "70_roadmap.json")
96
+ copy_manifest = _safe_read(workdir_path / "meta" / "50_copy_manifest.txt")
97
+ pip_freeze = _safe_read(workdir_path / "meta" / "22_pip_freeze.txt")
98
+
99
+ lines.append("## AI context summary")
100
+
101
+ copy_manifest = _safe_read(workdir_path / "meta" / "50_copy_manifest.txt").strip()
102
+ if copy_manifest:
103
+ lines.append("### Curated copy")
104
+ lines.append("```")
105
+ lines.append(copy_manifest)
106
+ lines.append("```")
107
+ else:
108
+ lines.append("- Curated copy manifest not found.")
109
+
110
+ roadmap_json = _safe_read(workdir_path / "meta" / "70_roadmap.json").strip()
111
+ if roadmap_json:
112
+ try:
113
+ import json
114
+ rj = json.loads(roadmap_json)
115
+ langs = set()
116
+ for n in rj.get("nodes", []):
117
+ if isinstance(n, dict):
118
+ lang = n.get("lang")
119
+ if lang:
120
+ langs.add(lang)
121
+ eps = rj.get("entrypoints", []) or []
122
+ lines.append(f"- **Languages detected:** {', '.join(sorted(langs)) if langs else '(none)'}")
123
+ if eps:
124
+ lines.append("- **Entrypoints:**")
125
+ for ep in eps[:10]:
126
+ node = ep.get("node") if isinstance(ep, dict) else None
127
+ reason = ep.get("reason") if isinstance(ep, dict) else None
128
+ conf = ep.get("confidence") if isinstance(ep, dict) else None
129
+ if node:
130
+ extra = ""
131
+ if reason is not None and conf is not None:
132
+ extra = f" — {reason} ({conf}/3)"
133
+ lines.append(f" - `{node}`{extra}")
134
+ else:
135
+ lines.append("- **Entrypoints:** (none detected)")
136
+ except Exception:
137
+ lines.append("- Roadmap JSON present but could not be parsed.")
138
+ else:
139
+ lines.append("- Roadmap not found.")
140
+
141
+ lines.append("")
142
+
143
+ lines.append(f"- **Overall status:** {overall}")
144
+ lines.append(
145
+ f"- **Steps:** {total_n} total — {pass_n} PASS, {fail_n} FAIL, {skip_n} SKIP"
146
+ )
147
+ lines.append("")
148
+ lines.append("## Tools")
149
+ lines.extend(tools_table)
150
+ lines.append("")
151
+ lines.append("## Command used")
152
+ lines.append("```bash")
153
+ lines.append(command_used)
154
+ lines.append("```")
155
+ lines.append("")
156
+ lines.append("## Reproduction")
157
+ lines.append("See **REPRO.md** for step-by-step reproduction instructions.")
158
+ lines.append("")
159
+
160
+ out_path = workdir_path / "HANDOFF.md"
161
+ out_path.write_text("\n".join(lines), encoding="utf-8")
162
+
163
+ secs = int(time.time() - start)
164
+ return StepResult(
165
+ name=self.name, status="PASS", seconds=secs, note="wrote HANDOFF.md"
166
+ )