@ranger1/dx 0.1.29 → 0.1.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,211 @@
1
+ #!/usr/bin/env python3
2
+ # PR context builder (deterministic).
3
+ # - Reads PR metadata + recent comments via gh
4
+ # - Reads changed files via git diff (no patch)
5
+ # - Writes Markdown context file to ~/.opencode/cache/
6
+ # - Prints exactly one JSON object to stdout
7
+
8
+ import argparse
9
+ import hashlib
10
+ import json
11
+ import os
12
+ import subprocess
13
+ import sys
14
+ from pathlib import Path
15
+
16
+
17
+ CACHE_DIR = Path.home() / ".opencode" / "cache"
18
+ MARKER_SUBSTR = "<!-- pr-review-loop-marker"
19
+
20
+
21
+ def _json_out(obj):
22
+ sys.stdout.write(json.dumps(obj, ensure_ascii=True))
23
+ sys.stdout.write("\n")
24
+
25
+
26
+ def _run_capture(cmd):
27
+ p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
28
+ return p.returncode, p.stdout, p.stderr
29
+
30
+
31
+ def _clip(s, n):
32
+ if s is None:
33
+ return ""
34
+ s = str(s)
35
+ return s if len(s) <= n else (s[:n] + "...")
36
+
37
+
38
+ def _safe_basename(name):
39
+ if not name:
40
+ return None
41
+ base = os.path.basename(name.strip())
42
+ if base != name.strip():
43
+ return None
44
+ if base in (".", ".."):
45
+ return None
46
+ return base
47
+
48
+
49
+ def _git_fetch_origin(ref):
50
+ subprocess.run(["git", "fetch", "origin", ref], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
51
+
52
+
53
+ def _git_numstat(base_ref):
54
+ # Prefer origin/<base>...HEAD; fallback to <base>...HEAD.
55
+ for lhs in (f"origin/{base_ref}...HEAD", f"{base_ref}...HEAD"):
56
+ rc, out, _ = _run_capture(["git", "diff", "--numstat", lhs])
57
+ if rc == 0:
58
+ return out
59
+ return ""
60
+
61
+
62
+ def _parse_numstat(numstat_text):
63
+ rows = []
64
+ for line in (numstat_text or "").splitlines():
65
+ parts = line.split("\t")
66
+ if len(parts) < 3:
67
+ continue
68
+ add_s, del_s, path = parts[0].strip(), parts[1].strip(), parts[2].strip()
69
+ if not path:
70
+ continue
71
+ rows.append((add_s, del_s, path))
72
+ return rows
73
+
74
+
75
+ def main(argv):
76
+ class _ArgParser(argparse.ArgumentParser):
77
+ def error(self, message):
78
+ raise ValueError(message)
79
+
80
+ parser = _ArgParser(add_help=False)
81
+ parser.add_argument("--pr", type=int, required=True)
82
+ parser.add_argument("--round", type=int, default=1)
83
+ try:
84
+ args = parser.parse_args(argv)
85
+ except ValueError:
86
+ _json_out({"error": "INVALID_ARGS"})
87
+ return 2
88
+
89
+ pr_number = int(args.pr)
90
+ round_num = int(args.round)
91
+
92
+ # Preconditions: be in a git repo and gh is authenticated.
93
+ rc, out, _ = _run_capture(["git", "rev-parse", "--is-inside-work-tree"])
94
+ if rc != 0 or out.strip() != "true":
95
+ _json_out({"error": "NOT_A_GIT_REPO"})
96
+ return 1
97
+
98
+ if subprocess.run(["gh", "auth", "status"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode != 0:
99
+ _json_out({"error": "GH_NOT_AUTHENTICATED"})
100
+ return 1
101
+
102
+ rc, owner_repo, _ = _run_capture(["gh", "repo", "view", "--json", "nameWithOwner", "--jq", ".nameWithOwner"])
103
+ owner_repo = owner_repo.strip() if rc == 0 else ""
104
+ if not owner_repo:
105
+ _json_out({"error": "REPO_NOT_FOUND"})
106
+ return 1
107
+
108
+ fields = "number,url,title,body,isDraft,labels,baseRefName,headRefName,baseRefOid,headRefOid,comments"
109
+ rc, pr_json, _ = _run_capture(["gh", "pr", "view", str(pr_number), "--repo", owner_repo, "--json", fields])
110
+ if rc != 0:
111
+ _json_out({"error": "PR_NOT_FOUND_OR_NO_ACCESS"})
112
+ return 1
113
+ try:
114
+ pr = json.loads(pr_json)
115
+ except Exception:
116
+ _json_out({"error": "PR_NOT_FOUND_OR_NO_ACCESS"})
117
+ return 1
118
+
119
+ head_oid = (pr.get("headRefOid") or "").strip()
120
+ base_ref = (pr.get("baseRefName") or "").strip() or "main"
121
+ head_ref = (pr.get("headRefName") or "").strip()
122
+ url = (pr.get("url") or "").strip()
123
+
124
+ seed = f"{pr_number}:{round_num}:{head_oid}".encode("utf-8")
125
+ run_id = hashlib.sha1(seed).hexdigest()[:12]
126
+
127
+ _git_fetch_origin(base_ref)
128
+ file_rows = _parse_numstat(_git_numstat(base_ref))
129
+
130
+ labels = []
131
+ for l in (pr.get("labels") or []):
132
+ if isinstance(l, dict) and l.get("name"):
133
+ labels.append(str(l.get("name")))
134
+
135
+ comments = pr.get("comments") or []
136
+ recent = comments[-10:] if isinstance(comments, list) else []
137
+ marker_count = 0
138
+ for c in recent:
139
+ if not isinstance(c, dict):
140
+ continue
141
+ body = c.get("body") or ""
142
+ if isinstance(body, str) and MARKER_SUBSTR in body:
143
+ marker_count += 1
144
+
145
+ CACHE_DIR.mkdir(parents=True, exist_ok=True)
146
+ context_file = f"pr-context-pr{pr_number}-r{round_num}-{run_id}.md"
147
+ context_path = CACHE_DIR / context_file
148
+
149
+ with open(context_path, "w", encoding="utf-8", newline="\n") as fp:
150
+ fp.write("# PR Context\n\n")
151
+ fp.write(f"- Repo: {owner_repo}\n")
152
+ fp.write(f"- PR: #{pr_number} {url}\n")
153
+ fp.write(f"- Round: {round_num}\n")
154
+ fp.write(f"- RunId: {run_id}\n")
155
+ fp.write(f"- Base: {base_ref}\n")
156
+ fp.write(f"- Head: {head_ref}\n")
157
+ fp.write(f"- HeadOid: {head_oid}\n")
158
+ fp.write(f"- Draft: {pr.get('isDraft')}\n")
159
+ fp.write(f"- Labels: {', '.join(labels) if labels else '(none)'}\n")
160
+ fp.write(f"- ExistingLoopMarkers: {marker_count}\n\n")
161
+
162
+ fp.write("## Title\n\n")
163
+ fp.write(_clip(pr.get("title") or "", 200) + "\n\n")
164
+
165
+ fp.write("## Body (excerpt)\n\n")
166
+ fp.write(_clip(pr.get("body") or "", 2000) or "(empty)")
167
+ fp.write("\n\n")
168
+
169
+ fp.write(f"## Changed Files ({len(file_rows)})\n\n")
170
+ if file_rows:
171
+ for add_s, del_s, path in file_rows:
172
+ fp.write(f"- +{add_s} -{del_s} {path}\n")
173
+ else:
174
+ fp.write("(none)\n")
175
+ fp.write("\n")
176
+
177
+ fp.write("## Recent Comments (excerpt)\n\n")
178
+ if recent:
179
+ for c in recent:
180
+ if not isinstance(c, dict):
181
+ continue
182
+ author = None
183
+ if isinstance(c.get("author"), dict):
184
+ author = (c.get("author") or {}).get("login")
185
+ fp.write(f"- {author or 'unknown'}: {_clip(c.get('body') or '', 300)}\n")
186
+ else:
187
+ fp.write("(none)\n")
188
+
189
+ _json_out(
190
+ {
191
+ "agent": "pr-context",
192
+ "prNumber": pr_number,
193
+ "round": round_num,
194
+ "runId": run_id,
195
+ "repo": {"nameWithOwner": owner_repo},
196
+ "headOid": head_oid,
197
+ "existingMarkerCount": marker_count,
198
+ "contextFile": context_file,
199
+ }
200
+ )
201
+ return 0
202
+
203
+
204
+ if __name__ == "__main__":
205
+ try:
206
+ raise SystemExit(main(sys.argv[1:]))
207
+ except SystemExit:
208
+ raise
209
+ except Exception:
210
+ _json_out({"error": "PR_CONTEXT_SCRIPT_FAILED"})
211
+ raise SystemExit(1)
@@ -0,0 +1,256 @@
1
+ #!/usr/bin/env python3
2
+ # PR precheck workflow (all handled by this script):
3
+ # - Verify running inside a git repo
4
+ # - Verify GitHub auth (gh)
5
+ # - Read PR info (headRefName/baseRefName/mergeable)
6
+ # - Checkout PR branch (gh pr checkout) if needed
7
+ # - Fetch base branch (origin/<base>, fallback main/master)
8
+ # - If mergeable == CONFLICTING: return {"error":"PR_MERGE_CONFLICTS_UNRESOLVED"}
9
+ # - Run dx cache clear
10
+ # - Run dx lint and dx build all concurrently
11
+ # - On failure, write fixFile to ~/.opencode/cache/ and return {"ok":false,"fixFile":"..."}
12
+ # - On success, return {"ok":true}
13
+ #
14
+ # Stdout contract: print exactly one JSON object and nothing else.
15
+
16
+ import json
17
+ import re
18
+ import secrets
19
+ import subprocess
20
+ import sys
21
+ from pathlib import Path
22
+
23
+
24
+ def run(cmd, *, cwd=None, stdout_path=None, stderr_path=None):
25
+ if stdout_path and stderr_path and stdout_path == stderr_path:
26
+ with open(stdout_path, "wb") as f:
27
+ p = subprocess.run(cmd, cwd=cwd, stdout=f, stderr=f)
28
+ return p.returncode
29
+
30
+ if stdout_path and stderr_path:
31
+ with open(stdout_path, "wb") as stdout_f, open(stderr_path, "wb") as stderr_f:
32
+ p = subprocess.run(cmd, cwd=cwd, stdout=stdout_f, stderr=stderr_f)
33
+ return p.returncode
34
+ elif stdout_path:
35
+ with open(stdout_path, "wb") as stdout_f:
36
+ p = subprocess.run(cmd, cwd=cwd, stdout=stdout_f, stderr=subprocess.DEVNULL)
37
+ return p.returncode
38
+ elif stderr_path:
39
+ with open(stderr_path, "wb") as stderr_f:
40
+ p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.DEVNULL, stderr=stderr_f)
41
+ return p.returncode
42
+ else:
43
+ p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
44
+ return p.returncode
45
+
46
+
47
+ def run_capture(cmd, *, cwd=None):
48
+ p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
49
+ return p.returncode, p.stdout, p.stderr
50
+
51
+
52
+ def tail_text(path, max_lines=200, max_chars=12000):
53
+ try:
54
+ data = Path(path).read_text(errors="replace")
55
+ except Exception:
56
+ return "(failed to read log)"
57
+ lines = data.splitlines()
58
+ tail = "\n".join(lines[-max_lines:])
59
+ if len(tail) > max_chars:
60
+ tail = tail[-max_chars:]
61
+ return tail
62
+
63
+
64
+ def first_file_line(text):
65
+ for m in re.finditer(r"^([^\s:]+\.[a-zA-Z0-9]+):(\d+)(?::(\d+))?\b", text, flags=re.M):
66
+ file = m.group(1)
67
+ line = int(m.group(2))
68
+ return file, line
69
+ return None, None
70
+
71
+
72
+ def write_fixfile(path, issues):
73
+ p = Path(path)
74
+ out = ["## IssuesToFix", ""]
75
+ for it in issues:
76
+ out.append(f"- id: {it['id']}")
77
+ out.append(f" priority: {it['priority']}")
78
+ out.append(f" category: {it['category']}")
79
+ out.append(f" file: {it['file']}")
80
+ out.append(f" line: {it['line'] if it['line'] is not None else 'null'}")
81
+ out.append(f" title: {it['title']}")
82
+ desc = it["description"].replace("\n", "\\n")
83
+ sugg = it["suggestion"].replace("\n", "\\n")
84
+ out.append(f" description: {desc}")
85
+ out.append(f" suggestion: {sugg}")
86
+ p.write_text("\n".join(out) + "\n")
87
+
88
+
89
+ def main():
90
+ if len(sys.argv) < 2:
91
+ print(json.dumps({"error": "PR_NUMBER_NOT_PROVIDED"}))
92
+ return 1
93
+
94
+ pr = sys.argv[1].strip()
95
+ if not pr.isdigit():
96
+ print(json.dumps({"error": "PR_NUMBER_NOT_PROVIDED"}))
97
+ return 1
98
+
99
+ rc, out, _ = run_capture(["git", "rev-parse", "--is-inside-work-tree"])
100
+ if rc != 0 or out.strip() != "true":
101
+ print(json.dumps({"error": "NOT_A_GIT_REPO"}))
102
+ return 1
103
+
104
+ rc = run(["gh", "auth", "status"])
105
+ if rc != 0:
106
+ print(json.dumps({"error": "GH_NOT_AUTHENTICATED"}))
107
+ return 1
108
+
109
+ rc, pr_json, _ = run_capture(["gh", "pr", "view", pr, "--json", "headRefName,baseRefName,mergeable"])
110
+ if rc != 0:
111
+ print(json.dumps({"error": "PR_NOT_FOUND_OR_NO_ACCESS"}))
112
+ return 1
113
+ try:
114
+ pr_info = json.loads(pr_json)
115
+ except Exception:
116
+ print(json.dumps({"error": "PR_NOT_FOUND_OR_NO_ACCESS"}))
117
+ return 1
118
+
119
+ head = (pr_info.get("headRefName") or "").strip()
120
+ base = (pr_info.get("baseRefName") or "").strip()
121
+ mergeable = (pr_info.get("mergeable") or "").strip()
122
+
123
+ rc, cur_branch, _ = run_capture(["git", "rev-parse", "--abbrev-ref", "HEAD"])
124
+ if rc != 0:
125
+ print(json.dumps({"error": "PR_CHECKOUT_FAILED"}))
126
+ return 1
127
+ if head and cur_branch.strip() != head:
128
+ if run(["gh", "pr", "checkout", pr]) != 0:
129
+ print(json.dumps({"error": "PR_CHECKOUT_FAILED"}))
130
+ return 1
131
+
132
+ if not base:
133
+ rc, out, _ = run_capture(["gh", "repo", "view", "--json", "defaultBranchRef", "--jq", ".defaultBranchRef.name"])
134
+ if rc == 0:
135
+ base = out.strip()
136
+ if not base:
137
+ print(json.dumps({"error": "PR_BASE_REF_NOT_FOUND"}))
138
+ return 1
139
+
140
+ if run(["git", "fetch", "origin", base]) != 0:
141
+ ok = False
142
+ for fallback in ("main", "master"):
143
+ if fallback == base:
144
+ continue
145
+ if run(["git", "fetch", "origin", fallback]) == 0:
146
+ base = fallback
147
+ ok = True
148
+ break
149
+ if not ok:
150
+ print(json.dumps({"error": "PR_BASE_REF_FETCH_FAILED"}))
151
+ return 1
152
+
153
+ if mergeable == "CONFLICTING":
154
+ print(json.dumps({"error": "PR_MERGE_CONFLICTS_UNRESOLVED"}))
155
+ return 1
156
+
157
+ run_id = secrets.token_hex(4)
158
+ cache = Path.home() / ".opencode" / "cache"
159
+ cache.mkdir(parents=True, exist_ok=True)
160
+
161
+ cache_clear_log = cache / f"precheck-pr{pr}-{run_id}-cache-clear.log"
162
+ lint_log = cache / f"precheck-pr{pr}-{run_id}-lint.log"
163
+ build_log = cache / f"precheck-pr{pr}-{run_id}-build.log"
164
+ meta_log = cache / f"precheck-pr{pr}-{run_id}-meta.json"
165
+
166
+ meta_log.write_text(json.dumps({
167
+ "pr": int(pr),
168
+ "headRefName": head,
169
+ "baseRefName": base,
170
+ "mergeable": mergeable,
171
+ "cacheClearLog": str(cache_clear_log),
172
+ "lintLog": str(lint_log),
173
+ "buildLog": str(build_log),
174
+ }, indent=2) + "\n")
175
+
176
+ cache_rc = run(["dx", "cache", "clear"], stdout_path=str(cache_clear_log), stderr_path=str(cache_clear_log))
177
+ if cache_rc != 0:
178
+ fix_file = f"precheck-fix-pr{pr}-{run_id}.md"
179
+ fix_path = cache / fix_file
180
+ log_tail = tail_text(cache_clear_log)
181
+ issues = [{
182
+ "id": "PRE-001",
183
+ "priority": "P1",
184
+ "category": "quality",
185
+ "file": "<unknown>",
186
+ "line": None,
187
+ "title": "dx cache clear failed",
188
+ "description": log_tail,
189
+ "suggestion": f"Open log: {cache_clear_log}",
190
+ }]
191
+ write_fixfile(str(fix_path), issues)
192
+ print(json.dumps({"ok": False, "fixFile": fix_file}))
193
+ return 1
194
+
195
+ import threading
196
+
197
+ results = {}
198
+
199
+ def worker(name, cmd, log_path):
200
+ results[name] = run(cmd, stdout_path=str(log_path), stderr_path=str(log_path))
201
+
202
+ t1 = threading.Thread(target=worker, args=("lint", ["dx", "lint"], lint_log))
203
+ t2 = threading.Thread(target=worker, args=("build", ["dx", "build", "all"], build_log))
204
+ t1.start()
205
+ t2.start()
206
+ t1.join()
207
+ t2.join()
208
+
209
+ if results.get("lint", 1) == 0 and results.get("build", 1) == 0:
210
+ print(json.dumps({"ok": True}))
211
+ return 0
212
+
213
+ fix_file = f"precheck-fix-pr{pr}-{run_id}.md"
214
+ fix_path = cache / fix_file
215
+
216
+ issues = []
217
+ i = 1
218
+ if results.get("lint", 1) != 0:
219
+ log_tail = tail_text(lint_log)
220
+ file, line = first_file_line(log_tail)
221
+ issues.append({
222
+ "id": f"PRE-{i:03d}",
223
+ "priority": "P1",
224
+ "category": "lint",
225
+ "file": file or "<unknown>",
226
+ "line": line,
227
+ "title": "dx lint failed",
228
+ "description": log_tail,
229
+ "suggestion": f"Open log: {lint_log}",
230
+ })
231
+ i += 1
232
+ if results.get("build", 1) != 0:
233
+ log_tail = tail_text(build_log)
234
+ file, line = first_file_line(log_tail)
235
+ issues.append({
236
+ "id": f"PRE-{i:03d}",
237
+ "priority": "P0",
238
+ "category": "build",
239
+ "file": file or "<unknown>",
240
+ "line": line,
241
+ "title": "dx build all failed",
242
+ "description": log_tail,
243
+ "suggestion": f"Open log: {build_log}",
244
+ })
245
+
246
+ write_fixfile(str(fix_path), issues)
247
+ print(json.dumps({"ok": False, "fixFile": fix_file}))
248
+ return 1
249
+
250
+
251
+ if __name__ == "__main__":
252
+ try:
253
+ sys.exit(main())
254
+ except Exception as e:
255
+ print(json.dumps({"error": "PRECHECK_SCRIPT_FAILED"}))
256
+ sys.exit(1)