@ranger1/dx 0.1.76 → 0.1.78

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +92 -31
  2. package/bin/dx.js +3 -3
  3. package/lib/cli/commands/deploy.js +2 -1
  4. package/lib/cli/commands/stack.js +198 -237
  5. package/lib/cli/commands/start.js +0 -6
  6. package/lib/cli/dx-cli.js +10 -1
  7. package/lib/cli/help.js +8 -7
  8. package/lib/{opencode-initial.js → codex-initial.js} +3 -82
  9. package/lib/vercel-deploy.js +14 -27
  10. package/package.json +1 -2
  11. package/@opencode/agents/__pycache__/gh_review_harvest.cpython-314.pyc +0 -0
  12. package/@opencode/agents/__pycache__/pr_context.cpython-314.pyc +0 -0
  13. package/@opencode/agents/__pycache__/pr_precheck.cpython-314.pyc +0 -0
  14. package/@opencode/agents/__pycache__/pr_review_aggregate.cpython-314.pyc +0 -0
  15. package/@opencode/agents/__pycache__/test_pr_review_aggregate.cpython-314-pytest-9.0.2.pyc +0 -0
  16. package/@opencode/agents/__pycache__/test_pr_review_aggregate.cpython-314.pyc +0 -0
  17. package/@opencode/agents/claude-reviewer.md +0 -82
  18. package/@opencode/agents/codex-reviewer.md +0 -83
  19. package/@opencode/agents/gemini-reviewer.md +0 -82
  20. package/@opencode/agents/gh-thread-reviewer.md +0 -122
  21. package/@opencode/agents/gh_review_harvest.py +0 -292
  22. package/@opencode/agents/pr-context.md +0 -82
  23. package/@opencode/agents/pr-fix.md +0 -243
  24. package/@opencode/agents/pr-precheck.md +0 -89
  25. package/@opencode/agents/pr-review-aggregate.md +0 -151
  26. package/@opencode/agents/pr_context.py +0 -351
  27. package/@opencode/agents/pr_precheck.py +0 -505
  28. package/@opencode/agents/pr_review_aggregate.py +0 -868
  29. package/@opencode/agents/test_pr_review_aggregate.py +0 -701
  30. package/@opencode/commands/doctor.md +0 -271
  31. package/@opencode/commands/git-commit-and-pr.md +0 -282
  32. package/@opencode/commands/git-release.md +0 -642
  33. package/@opencode/commands/oh_attach.json +0 -92
  34. package/@opencode/commands/opencode_attach.json +0 -29
  35. package/@opencode/commands/opencode_attach.py +0 -142
  36. package/@opencode/commands/pr-review-loop.md +0 -211
@@ -1,505 +0,0 @@
1
- #!/usr/bin/env python3
2
- # PR precheck workflow (all handled by this script):
3
- # - Verify running inside a git repo
4
- # - Verify GitHub auth (gh)
5
- # - Read PR info (headRefName/baseRefName/mergeable)
6
- # - Checkout PR branch (gh pr checkout) if needed
7
- # - Fetch base branch (origin/<base>)
8
- # - If mergeable == CONFLICTING: return {"error":"PR_MERGE_CONFLICTS_UNRESOLVED"}
9
- # - Run dx cache clear
10
- # - Run dx lint and dx build all concurrently
11
- # - On failure, write fixFile to project cache: ./.cache/
12
- # and return {"ok":false,"fixFile":"./.cache/..."}
13
- # - On success, return {"ok":true}
14
- #
15
- # Stdout contract: print exactly one JSON object and nothing else.
16
-
17
- import json
18
- import re
19
- import subprocess
20
- import sys
21
- from urllib.parse import urlparse
22
- from pathlib import Path
23
-
24
-
25
- _last_pr_number = None
26
- _last_round = None
27
-
28
-
29
- def emit_json(obj):
30
- # Stdout contract: exactly one JSON line.
31
- _ = sys.stdout.write(json.dumps(obj, separators=(",", ":"), ensure_ascii=True) + "\n")
32
-
33
-
34
- def parse_args(argv):
35
- pr = None
36
- round_n = 1
37
-
38
- positional = []
39
- i = 1
40
- while i < len(argv):
41
- a = argv[i]
42
- if a == "--pr":
43
- i += 1
44
- if i >= len(argv):
45
- return None, None, "PR_NUMBER_NOT_PROVIDED"
46
- pr = argv[i]
47
- elif a.startswith("--pr="):
48
- pr = a.split("=", 1)[1]
49
- elif a == "--round":
50
- i += 1
51
- if i >= len(argv):
52
- return None, None, "ROUND_INVALID"
53
- round_n = argv[i]
54
- elif a.startswith("--round="):
55
- round_n = a.split("=", 1)[1]
56
- elif a.startswith("-"):
57
- return None, None, "INVALID_ARGS"
58
- else:
59
- positional.append(a)
60
- i += 1
61
-
62
- if pr is None and positional:
63
- pr = positional[0]
64
-
65
- pr = (pr or "").strip()
66
- if not pr.isdigit():
67
- return None, None, "PR_NUMBER_NOT_PROVIDED"
68
-
69
- try:
70
- round_int = int(str(round_n).strip())
71
- except Exception:
72
- return int(pr), None, "ROUND_INVALID"
73
- if round_int < 1:
74
- return int(pr), None, "ROUND_INVALID"
75
-
76
- return int(pr), round_int, None
77
-
78
- def run(cmd, *, cwd=None, stdout_path=None, stderr_path=None):
79
- try:
80
- return _run(cmd, cwd=cwd, stdout_path=stdout_path, stderr_path=stderr_path)
81
- except FileNotFoundError:
82
- # Match common shell semantics for "command not found".
83
- return 127
84
-
85
-
86
- def _run(cmd, *, cwd=None, stdout_path=None, stderr_path=None):
87
- if stdout_path and stderr_path and stdout_path == stderr_path:
88
- with open(stdout_path, "wb") as f:
89
- p = subprocess.run(cmd, cwd=cwd, stdout=f, stderr=f)
90
- return p.returncode
91
-
92
- if stdout_path and stderr_path:
93
- with open(stdout_path, "wb") as stdout_f, open(stderr_path, "wb") as stderr_f:
94
- p = subprocess.run(cmd, cwd=cwd, stdout=stdout_f, stderr=stderr_f)
95
- return p.returncode
96
- elif stdout_path:
97
- with open(stdout_path, "wb") as stdout_f:
98
- p = subprocess.run(cmd, cwd=cwd, stdout=stdout_f, stderr=subprocess.DEVNULL)
99
- return p.returncode
100
- elif stderr_path:
101
- with open(stderr_path, "wb") as stderr_f:
102
- p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.DEVNULL, stderr=stderr_f)
103
- return p.returncode
104
- else:
105
- p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
106
- return p.returncode
107
-
108
-
109
- def run_capture(cmd, *, cwd=None):
110
- try:
111
- p = subprocess.run(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
112
- return p.returncode, p.stdout, p.stderr
113
- except FileNotFoundError as e:
114
- return 127, "", str(e)
115
-
116
-
117
- def _detect_git_remote_host():
118
- # Best-effort parse from origin remote.
119
- rc, origin_url, _ = run_capture(["git", "remote", "get-url", "origin"])
120
- if rc != 0:
121
- rc, origin_url, _ = run_capture(["git", "config", "--get", "remote.origin.url"])
122
- if rc != 0:
123
- return None
124
-
125
- url = (origin_url or "").strip()
126
- if not url:
127
- return None
128
-
129
- # Examples:
130
- # - git@github.com:owner/repo.git
131
- # - ssh://git@github.company.com/owner/repo.git
132
- # - https://github.com/owner/repo.git
133
- if url.startswith("git@"): # SCP-like syntax
134
- # git@host:owner/repo(.git)
135
- m = re.match(r"^git@([^:]+):", url)
136
- return m.group(1) if m else None
137
-
138
- if url.startswith("ssh://") or url.startswith("https://") or url.startswith("http://"):
139
- try:
140
- parsed = urlparse(url)
141
- return parsed.hostname
142
- except Exception:
143
- return None
144
-
145
- return None
146
-
147
-
148
- def repo_root():
149
- try:
150
- p = subprocess.run(
151
- ["git", "rev-parse", "--show-toplevel"],
152
- stdout=subprocess.PIPE,
153
- stderr=subprocess.DEVNULL,
154
- text=True,
155
- )
156
- out = (p.stdout or "").strip()
157
- if p.returncode == 0 and out:
158
- return Path(out)
159
- except Exception:
160
- pass
161
- return Path.cwd()
162
-
163
-
164
- def cache_dir(repo_root_path):
165
- return (repo_root_path / ".cache").resolve()
166
-
167
-
168
- def repo_relpath(repo_root_path, p):
169
- try:
170
- rel = p.resolve().relative_to(repo_root_path.resolve())
171
- return "./" + rel.as_posix()
172
- except Exception:
173
- return str(p)
174
-
175
-
176
- def tail_text(path, max_lines=200, max_chars=12000):
177
- try:
178
- data = Path(path).read_text(errors="replace")
179
- except Exception:
180
- return "(failed to read log)"
181
- lines = data.splitlines()
182
- tail = "\n".join(lines[-max_lines:])
183
- if len(tail) > max_chars:
184
- tail = tail[-max_chars:]
185
- return tail
186
-
187
-
188
- def first_file_line(text):
189
- for m in re.finditer(r"^([^\s:]+\.[a-zA-Z0-9]+):(\d+)(?::(\d+))?\b", text, flags=re.M):
190
- file = m.group(1)
191
- line = int(m.group(2))
192
- return file, line
193
- return None, None
194
-
195
-
196
- def write_fixfile(path, issues):
197
- p = Path(path)
198
- out = ["## IssuesToFix", ""]
199
- for it in issues:
200
- out.append(f"- id: {it['id']}")
201
- out.append(f" priority: {it['priority']}")
202
- out.append(f" category: {it['category']}")
203
- out.append(f" file: {it['file']}")
204
- out.append(f" line: {it['line'] if it['line'] is not None else 'null'}")
205
- out.append(f" title: {it['title']}")
206
- desc = it["description"].replace("\n", "\\n")
207
- sugg = it["suggestion"].replace("\n", "\\n")
208
- out.append(f" description: {desc}")
209
- out.append(f" suggestion: {sugg}")
210
- _ = p.write_text("\n".join(out) + "\n")
211
-
212
-
213
- def main():
214
- global _last_pr_number
215
- global _last_round
216
-
217
- pr_number, round_n, arg_err = parse_args(sys.argv)
218
- if arg_err:
219
- err_obj: dict[str, object] = {"error": arg_err}
220
- if pr_number is not None:
221
- err_obj["prNumber"] = pr_number
222
- if round_n is not None:
223
- err_obj["round"] = round_n
224
- emit_json(err_obj)
225
- return 1
226
-
227
- _last_pr_number = pr_number
228
- _last_round = round_n
229
-
230
- pr = str(pr_number)
231
- base_payload: dict[str, object] = {
232
- "prNumber": pr_number,
233
- "round": round_n,
234
- }
235
-
236
- rc, git_out, _ = run_capture(["git", "rev-parse", "--is-inside-work-tree"])
237
- if rc != 0 or git_out.strip() != "true":
238
- emit_json({
239
- **base_payload,
240
- "error": "NOT_A_GIT_REPO",
241
- })
242
- return 1
243
-
244
- host = _detect_git_remote_host() or "github.com"
245
-
246
- auth_host_used = None
247
- rc, gh_out, gh_err = run_capture(["gh", "auth", "status", "--hostname", host])
248
- if rc == 127:
249
- emit_json({
250
- **base_payload,
251
- "error": "GH_CLI_NOT_FOUND",
252
- "detail": "gh not found in PATH",
253
- "suggestion": "Install GitHub CLI: https://cli.github.com/",
254
- })
255
- return 1
256
-
257
- if rc == 0:
258
- auth_host_used = host
259
- else:
260
- # If hostname auth fails (e.g. SSH host alias), fall back to default host.
261
- rc_default, gh_out_default, gh_err_default = run_capture(["gh", "auth", "status"])
262
- if rc_default == 0:
263
- # Proceed using default gh auth context; avoid false GH_NOT_AUTHENTICATED.
264
- auth_host_used = "default"
265
- rc, gh_out, gh_err = rc_default, gh_out_default, gh_err_default
266
-
267
- if rc != 0:
268
- detail = (gh_err or gh_out or "").strip()
269
- if len(detail) > 4000:
270
- detail = detail[-4000:]
271
- emit_json({
272
- **base_payload,
273
- "error": "GH_NOT_AUTHENTICATED",
274
- "host": host,
275
- "detail": detail,
276
- "suggestion": f"Run: gh auth login --hostname {host}",
277
- })
278
- return 1
279
-
280
- if auth_host_used == "default":
281
- base_payload["authHostUsed"] = auth_host_used
282
-
283
- rc, pr_json, _ = run_capture([
284
- "gh",
285
- "pr",
286
- "view",
287
- pr,
288
- "--json",
289
- "headRefName,baseRefName,mergeable,headRefOid",
290
- ])
291
- if rc != 0:
292
- emit_json({
293
- **base_payload,
294
- "error": "PR_NOT_FOUND_OR_NO_ACCESS",
295
- })
296
- return 1
297
- try:
298
- pr_info = json.loads(pr_json)
299
- except Exception:
300
- emit_json({
301
- **base_payload,
302
- "error": "PR_NOT_FOUND_OR_NO_ACCESS",
303
- })
304
- return 1
305
-
306
- head = (pr_info.get("headRefName") or "").strip()
307
- base = (pr_info.get("baseRefName") or "").strip()
308
- mergeable = (pr_info.get("mergeable") or "").strip()
309
-
310
- head_oid = (pr_info.get("headRefOid") or "").strip()
311
- if not head_oid:
312
- emit_json({
313
- **base_payload,
314
- "error": "PR_HEAD_OID_NOT_FOUND",
315
- "headRefName": head,
316
- "baseRefName": base,
317
- "mergeable": mergeable,
318
- })
319
- return 1
320
-
321
- head_short = head_oid[:7]
322
- run_id = f"{pr_number}-{round_n}-{head_short}"
323
-
324
- payload: dict[str, object] = {
325
- **base_payload,
326
- "runId": run_id,
327
- "headOid": head_oid,
328
- "headShort": head_short,
329
- "headRefName": head,
330
- "baseRefName": base,
331
- "mergeable": mergeable,
332
- }
333
-
334
- rc, cur_branch, _ = run_capture(["git", "rev-parse", "--abbrev-ref", "HEAD"])
335
- if rc != 0:
336
- emit_json({
337
- **payload,
338
- "error": "PR_CHECKOUT_FAILED",
339
- })
340
- return 1
341
- if head and cur_branch.strip() != head:
342
- if run(["gh", "pr", "checkout", pr]) != 0:
343
- emit_json({
344
- **payload,
345
- "error": "PR_CHECKOUT_FAILED",
346
- })
347
- return 1
348
-
349
- if not base:
350
- rc, default_branch_out, _ = run_capture([
351
- "gh",
352
- "repo",
353
- "view",
354
- "--json",
355
- "defaultBranchRef",
356
- "--jq",
357
- ".defaultBranchRef.name",
358
- ])
359
- if rc == 0:
360
- base = default_branch_out.strip()
361
- if not base:
362
- emit_json({
363
- **payload,
364
- "error": "PR_BASE_REF_NOT_FOUND",
365
- })
366
- return 1
367
-
368
- # baseRefName can be resolved from default branch; keep payload in sync.
369
- payload["baseRefName"] = base
370
-
371
- if run(["git", "fetch", "origin", base]) != 0:
372
- emit_json({
373
- **payload,
374
- "error": "PR_BASE_REF_FETCH_FAILED",
375
- "baseRefName": base,
376
- })
377
- return 1
378
-
379
- if mergeable == "CONFLICTING":
380
- emit_json({
381
- **payload,
382
- "error": "PR_MERGE_CONFLICTS_UNRESOLVED",
383
- })
384
- return 1
385
-
386
- root = repo_root()
387
- cache = cache_dir(root)
388
- cache.mkdir(parents=True, exist_ok=True)
389
-
390
- cache_clear_log = cache / f"precheck-{run_id}-cache-clear.log"
391
- lint_log = cache / f"precheck-{run_id}-lint.log"
392
- build_log = cache / f"precheck-{run_id}-build.log"
393
- meta_log = cache / f"precheck-{run_id}-meta.json"
394
-
395
- _ = meta_log.write_text(json.dumps({
396
- "prNumber": pr_number,
397
- "round": round_n,
398
- "runId": run_id,
399
- "headOid": head_oid,
400
- "headShort": head_short,
401
- "headRefName": head,
402
- "baseRefName": base,
403
- "mergeable": mergeable,
404
- "cacheClearLog": repo_relpath(root, cache_clear_log),
405
- "lintLog": repo_relpath(root, lint_log),
406
- "buildLog": repo_relpath(root, build_log),
407
- }, indent=2) + "\n")
408
-
409
- cache_rc = run(["dx", "cache", "clear"], stdout_path=str(cache_clear_log), stderr_path=str(cache_clear_log))
410
- if cache_rc != 0:
411
- fix_file = f"precheck-fix-{run_id}.md"
412
- fix_path = cache / fix_file
413
- log_tail = tail_text(cache_clear_log)
414
- issues = [{
415
- "id": "PRE-001",
416
- "priority": "P1",
417
- "category": "quality",
418
- "file": "<unknown>",
419
- "line": None,
420
- "title": "dx cache clear failed",
421
- "description": log_tail,
422
- "suggestion": f"Open log: {repo_relpath(root, cache_clear_log)}",
423
- }]
424
- write_fixfile(str(fix_path), issues)
425
- emit_json({
426
- **payload,
427
- "ok": False,
428
- "fixFile": repo_relpath(root, fix_path),
429
- })
430
- return 1
431
-
432
- import threading
433
-
434
- results = {}
435
-
436
- def worker(name, cmd, log_path):
437
- results[name] = run(cmd, stdout_path=str(log_path), stderr_path=str(log_path))
438
-
439
- t1 = threading.Thread(target=worker, args=("lint", ["dx", "lint"], lint_log))
440
- t2 = threading.Thread(target=worker, args=("build", ["dx", "build", "all"], build_log))
441
- t1.start()
442
- t2.start()
443
- t1.join()
444
- t2.join()
445
-
446
- if results.get("lint", 1) == 0 and results.get("build", 1) == 0:
447
- emit_json({
448
- **payload,
449
- "ok": True,
450
- })
451
- return 0
452
-
453
- fix_file = f"precheck-fix-{run_id}.md"
454
- fix_path = cache / fix_file
455
-
456
- issues = []
457
- i = 1
458
- if results.get("lint", 1) != 0:
459
- log_tail = tail_text(lint_log)
460
- file, line = first_file_line(log_tail)
461
- issues.append({
462
- "id": f"PRE-{i:03d}",
463
- "priority": "P1",
464
- "category": "lint",
465
- "file": file or "<unknown>",
466
- "line": line,
467
- "title": "dx lint failed",
468
- "description": log_tail,
469
- "suggestion": f"Open log: {repo_relpath(root, lint_log)}",
470
- })
471
- i += 1
472
- if results.get("build", 1) != 0:
473
- log_tail = tail_text(build_log)
474
- file, line = first_file_line(log_tail)
475
- issues.append({
476
- "id": f"PRE-{i:03d}",
477
- "priority": "P0",
478
- "category": "build",
479
- "file": file or "<unknown>",
480
- "line": line,
481
- "title": "dx build all failed",
482
- "description": log_tail,
483
- "suggestion": f"Open log: {repo_relpath(root, build_log)}",
484
- })
485
-
486
- write_fixfile(str(fix_path), issues)
487
- emit_json({
488
- **payload,
489
- "ok": False,
490
- "fixFile": repo_relpath(root, fix_path),
491
- })
492
- return 1
493
-
494
-
495
- if __name__ == "__main__":
496
- try:
497
- sys.exit(main())
498
- except Exception:
499
- err_obj: dict[str, object] = {"error": "PRECHECK_SCRIPT_FAILED"}
500
- if _last_pr_number is not None:
501
- err_obj["prNumber"] = _last_pr_number
502
- if _last_round is not None:
503
- err_obj["round"] = _last_round
504
- emit_json(err_obj)
505
- sys.exit(1)