@sentry/warden 0.14.0 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/args.d.ts +1 -0
- package/dist/cli/args.d.ts.map +1 -1
- package/dist/cli/args.js +17 -2
- package/dist/cli/args.js.map +1 -1
- package/dist/cli/commands/add.d.ts.map +1 -1
- package/dist/cli/commands/add.js +25 -33
- package/dist/cli/commands/add.js.map +1 -1
- package/dist/cli/commands/logs.d.ts.map +1 -1
- package/dist/cli/commands/logs.js +4 -11
- package/dist/cli/commands/logs.js.map +1 -1
- package/dist/cli/commands/setup-app.d.ts.map +1 -1
- package/dist/cli/commands/setup-app.js +19 -15
- package/dist/cli/commands/setup-app.js.map +1 -1
- package/dist/cli/context.d.ts +2 -0
- package/dist/cli/context.d.ts.map +1 -1
- package/dist/cli/context.js +8 -2
- package/dist/cli/context.js.map +1 -1
- package/dist/cli/files.d.ts.map +1 -1
- package/dist/cli/files.js +27 -30
- package/dist/cli/files.js.map +1 -1
- package/dist/cli/git.d.ts +8 -3
- package/dist/cli/git.d.ts.map +1 -1
- package/dist/cli/git.js +24 -13
- package/dist/cli/git.js.map +1 -1
- package/dist/cli/index.js +10 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/input.d.ts +7 -0
- package/dist/cli/input.d.ts.map +1 -1
- package/dist/cli/input.js +13 -2
- package/dist/cli/input.js.map +1 -1
- package/dist/cli/main.d.ts.map +1 -1
- package/dist/cli/main.js +62 -19
- package/dist/cli/main.js.map +1 -1
- package/dist/config/writer.d.ts.map +1 -1
- package/dist/config/writer.js +18 -0
- package/dist/config/writer.js.map +1 -1
- package/dist/evals/index.js +1 -1
- package/dist/evals/index.js.map +1 -1
- package/dist/output/github-issues.d.ts.map +1 -1
- package/dist/output/github-issues.js +14 -8
- package/dist/output/github-issues.js.map +1 -1
- package/dist/sdk/analyze.d.ts.map +1 -1
- package/dist/sdk/analyze.js +2 -2
- package/dist/sdk/analyze.js.map +1 -1
- package/dist/sdk/auth.d.ts.map +1 -1
- package/dist/sdk/auth.js +2 -2
- package/dist/sdk/auth.js.map +1 -1
- package/dist/sdk/errors.d.ts +3 -1
- package/dist/sdk/errors.d.ts.map +1 -1
- package/dist/sdk/errors.js +2 -2
- package/dist/sdk/errors.js.map +1 -1
- package/dist/skills/remote.js +1 -1
- package/dist/skills/remote.js.map +1 -1
- package/dist/utils/exec.d.ts +4 -1
- package/dist/utils/exec.d.ts.map +1 -1
- package/dist/utils/exec.js +6 -4
- package/dist/utils/exec.js.map +1 -1
- package/package.json +1 -1
- package/skills/warden-sweep/SKILL.md +67 -74
- package/skills/warden-sweep/references/patch-prompt.md +72 -0
- package/skills/warden-sweep/references/verify-prompt.md +25 -0
- package/skills/warden-sweep/scripts/_utils.py +62 -0
- package/skills/warden-sweep/scripts/create_issue.py +189 -0
- package/skills/warden-sweep/scripts/find_reviewers.py +16 -17
- package/skills/warden-sweep/scripts/generate_report.py +20 -25
- package/skills/warden-sweep/scripts/organize.py +128 -21
- package/skills/warden-sweep/scripts/scan.py +82 -130
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# /// script
|
|
3
|
+
# requires-python = ">=3.9"
|
|
4
|
+
# ///
|
|
5
|
+
"""
|
|
6
|
+
Warden Sweep: Create tracking issue.
|
|
7
|
+
|
|
8
|
+
Creates a GitHub issue summarizing the sweep results after verification
|
|
9
|
+
but before patching. Gives every PR a parent to reference and gives
|
|
10
|
+
reviewers a single place to see the full picture.
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
uv run create_issue.py <sweep-dir>
|
|
14
|
+
|
|
15
|
+
Stdout: JSON with issueUrl and issueNumber
|
|
16
|
+
Stderr: Progress lines
|
|
17
|
+
|
|
18
|
+
Idempotent: if issueUrl already exists in manifest, skips creation.
|
|
19
|
+
"""
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import argparse
|
|
23
|
+
import json
|
|
24
|
+
import os
|
|
25
|
+
import subprocess
|
|
26
|
+
import sys
|
|
27
|
+
from typing import Any
|
|
28
|
+
|
|
29
|
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
30
|
+
from _utils import ( # noqa: E402
|
|
31
|
+
ensure_github_label,
|
|
32
|
+
pr_number_from_url,
|
|
33
|
+
read_json,
|
|
34
|
+
read_jsonl,
|
|
35
|
+
severity_badge,
|
|
36
|
+
write_json,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def build_issue_body(
|
|
41
|
+
run_id: str,
|
|
42
|
+
scan_index: list[dict[str, Any]],
|
|
43
|
+
all_findings: list[dict[str, Any]],
|
|
44
|
+
verified: list[dict[str, Any]],
|
|
45
|
+
rejected: list[dict[str, Any]],
|
|
46
|
+
) -> str:
|
|
47
|
+
"""Build the GitHub issue body markdown."""
|
|
48
|
+
files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
|
|
49
|
+
files_timed_out = sum(
|
|
50
|
+
1 for e in scan_index
|
|
51
|
+
if e.get("status") == "error" and e.get("error") == "timeout"
|
|
52
|
+
)
|
|
53
|
+
files_errored = sum(
|
|
54
|
+
1 for e in scan_index
|
|
55
|
+
if e.get("status") == "error" and e.get("error") != "timeout"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Collect unique skills from scan index
|
|
59
|
+
skills: set[str] = set()
|
|
60
|
+
for entry in scan_index:
|
|
61
|
+
for skill in entry.get("skills", []):
|
|
62
|
+
skills.add(skill)
|
|
63
|
+
|
|
64
|
+
lines = [
|
|
65
|
+
f"## Warden Sweep `{run_id}`",
|
|
66
|
+
"",
|
|
67
|
+
"| Metric | Count |",
|
|
68
|
+
"|--------|-------|",
|
|
69
|
+
f"| Files scanned | {files_scanned} |",
|
|
70
|
+
f"| Files timed out | {files_timed_out} |",
|
|
71
|
+
f"| Files errored | {files_errored} |",
|
|
72
|
+
f"| Total findings | {len(all_findings)} |",
|
|
73
|
+
f"| Verified | {len(verified)} |",
|
|
74
|
+
f"| Rejected | {len(rejected)} |",
|
|
75
|
+
"",
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
if verified:
|
|
79
|
+
lines.append("### Verified Findings")
|
|
80
|
+
lines.append("")
|
|
81
|
+
lines.append("| Severity | Skill | File | Title |")
|
|
82
|
+
lines.append("|----------|-------|------|-------|")
|
|
83
|
+
for f in verified:
|
|
84
|
+
sev = severity_badge(f.get("severity", "info"))
|
|
85
|
+
skill = f.get("skill", "")
|
|
86
|
+
file_path = f.get("file", "")
|
|
87
|
+
start_line = f.get("startLine")
|
|
88
|
+
location = f"{file_path}:{start_line}" if start_line else file_path
|
|
89
|
+
title = f.get("title", "")
|
|
90
|
+
lines.append(f"| {sev} | {skill} | `{location}` | {title} |")
|
|
91
|
+
lines.append("")
|
|
92
|
+
|
|
93
|
+
if skills:
|
|
94
|
+
lines.append("### Skills Run")
|
|
95
|
+
lines.append("")
|
|
96
|
+
lines.append(", ".join(sorted(skills)))
|
|
97
|
+
lines.append("")
|
|
98
|
+
|
|
99
|
+
lines.append("> Generated by Warden Sweep. PRs referencing this issue will appear below.")
|
|
100
|
+
|
|
101
|
+
return "\n".join(lines) + "\n"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def create_github_issue(title: str, body: str) -> dict[str, Any]:
|
|
105
|
+
"""Create a GitHub issue with the warden label. Returns issueUrl and issueNumber."""
|
|
106
|
+
ensure_github_label("warden", "5319E7", "Automated fix from Warden Sweep")
|
|
107
|
+
|
|
108
|
+
result = subprocess.run(
|
|
109
|
+
[
|
|
110
|
+
"gh", "issue", "create",
|
|
111
|
+
"--label", "warden",
|
|
112
|
+
"--title", title,
|
|
113
|
+
"--body", body,
|
|
114
|
+
],
|
|
115
|
+
capture_output=True,
|
|
116
|
+
text=True,
|
|
117
|
+
timeout=30,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if result.returncode != 0:
|
|
121
|
+
raise RuntimeError(f"gh issue create failed: {result.stderr.strip()}")
|
|
122
|
+
|
|
123
|
+
issue_url = result.stdout.strip()
|
|
124
|
+
try:
|
|
125
|
+
issue_number = int(pr_number_from_url(issue_url))
|
|
126
|
+
except (ValueError, IndexError):
|
|
127
|
+
raise RuntimeError(f"Could not parse issue number from gh output: {issue_url}")
|
|
128
|
+
|
|
129
|
+
return {"issueUrl": issue_url, "issueNumber": issue_number}
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def main() -> None:
|
|
133
|
+
parser = argparse.ArgumentParser(
|
|
134
|
+
description="Warden Sweep: Create tracking issue"
|
|
135
|
+
)
|
|
136
|
+
parser.add_argument("sweep_dir", help="Path to the sweep directory")
|
|
137
|
+
args = parser.parse_args()
|
|
138
|
+
|
|
139
|
+
sweep_dir = args.sweep_dir
|
|
140
|
+
data_dir = os.path.join(sweep_dir, "data")
|
|
141
|
+
manifest_path = os.path.join(data_dir, "manifest.json")
|
|
142
|
+
|
|
143
|
+
if not os.path.isdir(sweep_dir):
|
|
144
|
+
print(
|
|
145
|
+
json.dumps({"error": f"Sweep directory not found: {sweep_dir}"}),
|
|
146
|
+
file=sys.stdout,
|
|
147
|
+
)
|
|
148
|
+
sys.exit(1)
|
|
149
|
+
|
|
150
|
+
manifest = read_json(manifest_path) or {}
|
|
151
|
+
|
|
152
|
+
# Idempotency: if issue already exists, return existing values
|
|
153
|
+
if manifest.get("issueUrl"):
|
|
154
|
+
output = {
|
|
155
|
+
"issueUrl": manifest["issueUrl"],
|
|
156
|
+
"issueNumber": manifest.get("issueNumber", 0),
|
|
157
|
+
}
|
|
158
|
+
print(json.dumps(output))
|
|
159
|
+
return
|
|
160
|
+
|
|
161
|
+
run_id = manifest.get("runId", "unknown")
|
|
162
|
+
|
|
163
|
+
# Read sweep data
|
|
164
|
+
scan_index = read_jsonl(os.path.join(data_dir, "scan-index.jsonl"))
|
|
165
|
+
all_findings = read_jsonl(os.path.join(data_dir, "all-findings.jsonl"))
|
|
166
|
+
verified = read_jsonl(os.path.join(data_dir, "verified.jsonl"))
|
|
167
|
+
rejected = read_jsonl(os.path.join(data_dir, "rejected.jsonl"))
|
|
168
|
+
|
|
169
|
+
files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
|
|
170
|
+
|
|
171
|
+
# Build issue
|
|
172
|
+
title = f"Warden Sweep {run_id}: {len(verified)} findings across {files_scanned} files"
|
|
173
|
+
body = build_issue_body(run_id, scan_index, all_findings, verified, rejected)
|
|
174
|
+
|
|
175
|
+
print("Creating tracking issue...", file=sys.stderr)
|
|
176
|
+
result = create_github_issue(title, body)
|
|
177
|
+
print(f"Created issue: {result['issueUrl']}", file=sys.stderr)
|
|
178
|
+
|
|
179
|
+
# Write issueUrl and issueNumber to manifest
|
|
180
|
+
manifest["issueUrl"] = result["issueUrl"]
|
|
181
|
+
manifest["issueNumber"] = result["issueNumber"]
|
|
182
|
+
manifest.setdefault("phases", {})["issue"] = "complete"
|
|
183
|
+
write_json(manifest_path, manifest)
|
|
184
|
+
|
|
185
|
+
print(json.dumps(result))
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
if __name__ == "__main__":
|
|
189
|
+
main()
|
|
@@ -20,22 +20,11 @@ from __future__ import annotations
|
|
|
20
20
|
|
|
21
21
|
import argparse
|
|
22
22
|
import json
|
|
23
|
-
import
|
|
23
|
+
import os
|
|
24
24
|
import sys
|
|
25
25
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
"""Run a command and return stdout, or None on failure."""
|
|
29
|
-
try:
|
|
30
|
-
result = subprocess.run(
|
|
31
|
-
args,
|
|
32
|
-
capture_output=True,
|
|
33
|
-
text=True,
|
|
34
|
-
timeout=timeout,
|
|
35
|
-
)
|
|
36
|
-
return result.stdout.strip() if result.returncode == 0 else None
|
|
37
|
-
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
38
|
-
return None
|
|
26
|
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
27
|
+
from _utils import run_cmd_stdout as run_cmd # noqa: E402
|
|
39
28
|
|
|
40
29
|
|
|
41
30
|
def get_top_authors(file_path: str, count: int = 2) -> list[str]:
|
|
@@ -86,6 +75,12 @@ def email_to_github_username(email: str) -> str | None:
|
|
|
86
75
|
return output if output else None
|
|
87
76
|
|
|
88
77
|
|
|
78
|
+
def get_current_github_user() -> str | None:
|
|
79
|
+
"""Get the currently authenticated GitHub username."""
|
|
80
|
+
output = run_cmd(["gh", "api", "/user", "--jq", ".login"])
|
|
81
|
+
return output if output else None
|
|
82
|
+
|
|
83
|
+
|
|
89
84
|
def main():
|
|
90
85
|
parser = argparse.ArgumentParser(
|
|
91
86
|
description="Find top git contributors for PR reviewer assignment"
|
|
@@ -97,7 +92,11 @@ def main():
|
|
|
97
92
|
)
|
|
98
93
|
args = parser.parse_args()
|
|
99
94
|
|
|
100
|
-
|
|
95
|
+
current_user = get_current_github_user()
|
|
96
|
+
|
|
97
|
+
# Request extra candidates to compensate for self-exclusion
|
|
98
|
+
fetch_count = args.count + 1 if current_user else args.count
|
|
99
|
+
emails = get_top_authors(args.file_path, fetch_count)
|
|
101
100
|
if not emails:
|
|
102
101
|
print(json.dumps({"reviewers": [], "note": "No recent authors found"}))
|
|
103
102
|
return
|
|
@@ -105,10 +104,10 @@ def main():
|
|
|
105
104
|
reviewers: list[str] = []
|
|
106
105
|
for email in emails:
|
|
107
106
|
username = email_to_github_username(email)
|
|
108
|
-
if username:
|
|
107
|
+
if username and username != current_user:
|
|
109
108
|
reviewers.append(username)
|
|
110
109
|
|
|
111
|
-
print(json.dumps({"reviewers": reviewers}))
|
|
110
|
+
print(json.dumps({"reviewers": reviewers[:args.count]}))
|
|
112
111
|
|
|
113
112
|
|
|
114
113
|
if __name__ == "__main__":
|
|
@@ -23,30 +23,7 @@ from datetime import datetime, timezone
|
|
|
23
23
|
from typing import Any
|
|
24
24
|
|
|
25
25
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
26
|
-
from _utils import read_jsonl # noqa: E402
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def read_json(path: str) -> dict[str, Any] | None:
|
|
30
|
-
"""Read a JSON file and return parsed object."""
|
|
31
|
-
if not os.path.exists(path):
|
|
32
|
-
return None
|
|
33
|
-
try:
|
|
34
|
-
with open(path) as f:
|
|
35
|
-
return json.load(f)
|
|
36
|
-
except (json.JSONDecodeError, OSError):
|
|
37
|
-
return None
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def severity_badge(severity: str) -> str:
|
|
41
|
-
"""Return a markdown-friendly severity indicator."""
|
|
42
|
-
badges = {
|
|
43
|
-
"critical": "**CRITICAL**",
|
|
44
|
-
"high": "**HIGH**",
|
|
45
|
-
"medium": "MEDIUM",
|
|
46
|
-
"low": "LOW",
|
|
47
|
-
"info": "info",
|
|
48
|
-
}
|
|
49
|
-
return badges.get(severity, severity)
|
|
26
|
+
from _utils import read_json, read_jsonl, severity_badge # noqa: E402
|
|
50
27
|
|
|
51
28
|
|
|
52
29
|
def generate_summary_md(
|
|
@@ -65,7 +42,14 @@ def generate_summary_md(
|
|
|
65
42
|
completed_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
66
43
|
|
|
67
44
|
files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
|
|
68
|
-
|
|
45
|
+
files_timed_out = sum(
|
|
46
|
+
1 for e in scan_index
|
|
47
|
+
if e.get("status") == "error" and e.get("error") == "timeout"
|
|
48
|
+
)
|
|
49
|
+
files_errored = sum(
|
|
50
|
+
1 for e in scan_index
|
|
51
|
+
if e.get("status") == "error" and e.get("error") != "timeout"
|
|
52
|
+
)
|
|
69
53
|
|
|
70
54
|
prs_created = sum(1 for p in patches if p.get("status") == "created")
|
|
71
55
|
prs_failed = sum(1 for p in patches if p.get("status") == "error")
|
|
@@ -88,6 +72,7 @@ def generate_summary_md(
|
|
|
88
72
|
f"| Metric | Count |",
|
|
89
73
|
f"|--------|-------|",
|
|
90
74
|
f"| Files scanned | {files_scanned} |",
|
|
75
|
+
f"| Files timed out | {files_timed_out} |",
|
|
91
76
|
f"| Files errored | {files_errored} |",
|
|
92
77
|
f"| Total findings | {len(all_findings)} |",
|
|
93
78
|
f"| Verified | {len(verified)} |",
|
|
@@ -176,6 +161,14 @@ def generate_report_json(
|
|
|
176
161
|
completed_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
177
162
|
|
|
178
163
|
files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
|
|
164
|
+
files_timed_out = sum(
|
|
165
|
+
1 for e in scan_index
|
|
166
|
+
if e.get("status") == "error" and e.get("error") == "timeout"
|
|
167
|
+
)
|
|
168
|
+
files_errored = sum(
|
|
169
|
+
1 for e in scan_index
|
|
170
|
+
if e.get("status") == "error" and e.get("error") != "timeout"
|
|
171
|
+
)
|
|
179
172
|
prs_created = sum(1 for p in patches if p.get("status") == "created")
|
|
180
173
|
prs_failed = sum(1 for p in patches if p.get("status") == "error")
|
|
181
174
|
|
|
@@ -190,6 +183,8 @@ def generate_report_json(
|
|
|
190
183
|
"completedAt": completed_at,
|
|
191
184
|
"scan": {
|
|
192
185
|
"filesScanned": files_scanned,
|
|
186
|
+
"filesTimedOut": files_timed_out,
|
|
187
|
+
"filesErrored": files_errored,
|
|
193
188
|
"totalFindings": len(all_findings),
|
|
194
189
|
},
|
|
195
190
|
"verify": {
|
|
@@ -36,7 +36,7 @@ from datetime import datetime, timezone
|
|
|
36
36
|
from typing import Any
|
|
37
37
|
|
|
38
38
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
39
|
-
from _utils import read_jsonl # noqa: E402
|
|
39
|
+
from _utils import ensure_github_label, pr_number_from_url, read_json, read_jsonl, write_json # noqa: E402
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
SECURITY_SKILL_PATTERNS = [
|
|
@@ -54,6 +54,15 @@ def is_security_skill(skill_name: str) -> bool:
|
|
|
54
54
|
return name_lower in SECURITY_SKILL_PATTERNS
|
|
55
55
|
|
|
56
56
|
|
|
57
|
+
def severity_label(severity: str) -> str:
|
|
58
|
+
"""Format a severity string for inline display in issue comments."""
|
|
59
|
+
if not severity:
|
|
60
|
+
return ""
|
|
61
|
+
if severity in ("critical", "high"):
|
|
62
|
+
return f" (**{severity.upper()}**)"
|
|
63
|
+
return f" ({severity.upper()})"
|
|
64
|
+
|
|
65
|
+
|
|
57
66
|
def identify_security_findings(
|
|
58
67
|
sweep_dir: str,
|
|
59
68
|
) -> list[dict[str, Any]]:
|
|
@@ -101,18 +110,7 @@ def copy_security_findings(
|
|
|
101
110
|
|
|
102
111
|
def create_security_label() -> None:
|
|
103
112
|
"""Create the security label on GitHub (idempotent)."""
|
|
104
|
-
|
|
105
|
-
subprocess.run(
|
|
106
|
-
[
|
|
107
|
-
"gh", "label", "create", "security",
|
|
108
|
-
"--color", "D93F0B",
|
|
109
|
-
"--description", "Security-related changes",
|
|
110
|
-
],
|
|
111
|
-
capture_output=True,
|
|
112
|
-
timeout=15,
|
|
113
|
-
)
|
|
114
|
-
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
115
|
-
pass
|
|
113
|
+
ensure_github_label("security", "D93F0B", "Security-related changes")
|
|
116
114
|
|
|
117
115
|
|
|
118
116
|
def label_security_prs(
|
|
@@ -156,6 +154,115 @@ def label_security_prs(
|
|
|
156
154
|
return labeled
|
|
157
155
|
|
|
158
156
|
|
|
157
|
+
def _has_sweep_complete_comment(issue_url: str) -> bool:
|
|
158
|
+
"""Check if the tracking issue already has a 'Sweep Complete' comment."""
|
|
159
|
+
try:
|
|
160
|
+
result = subprocess.run(
|
|
161
|
+
["gh", "issue", "view", issue_url, "--json", "comments", "--jq",
|
|
162
|
+
'.comments[].body | select(startswith("## Sweep Complete"))'],
|
|
163
|
+
capture_output=True,
|
|
164
|
+
text=True,
|
|
165
|
+
timeout=15,
|
|
166
|
+
)
|
|
167
|
+
return result.returncode == 0 and result.stdout.strip() != ""
|
|
168
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def update_tracking_issue(sweep_dir: str) -> None:
|
|
173
|
+
"""Post a comment on the tracking issue with final PR results. Idempotent."""
|
|
174
|
+
manifest = read_json(os.path.join(sweep_dir, "data", "manifest.json"))
|
|
175
|
+
if not manifest:
|
|
176
|
+
return
|
|
177
|
+
|
|
178
|
+
issue_url = manifest.get("issueUrl")
|
|
179
|
+
if not issue_url:
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
if _has_sweep_complete_comment(issue_url):
|
|
183
|
+
print("Tracking issue already has completion comment, skipping.", file=sys.stderr)
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
187
|
+
verified = read_jsonl(os.path.join(sweep_dir, "data", "verified.jsonl"))
|
|
188
|
+
security_index = read_jsonl(os.path.join(sweep_dir, "security", "index.jsonl"))
|
|
189
|
+
|
|
190
|
+
# Build lookup from findingId to verified finding
|
|
191
|
+
verified_lookup: dict[str, dict[str, Any]] = {}
|
|
192
|
+
for f in verified:
|
|
193
|
+
fid = f.get("findingId", "")
|
|
194
|
+
if fid:
|
|
195
|
+
verified_lookup[fid] = f
|
|
196
|
+
|
|
197
|
+
security_ids = {f.get("findingId", "") for f in security_index}
|
|
198
|
+
|
|
199
|
+
created = sum(1 for p in patches if p.get("status") == "created")
|
|
200
|
+
existing = sum(1 for p in patches if p.get("status") == "existing")
|
|
201
|
+
failed = sum(1 for p in patches if p.get("status") == "error")
|
|
202
|
+
|
|
203
|
+
lines = [
|
|
204
|
+
"## Sweep Complete",
|
|
205
|
+
"",
|
|
206
|
+
"| PRs Created | PRs Skipped (existing) | PRs Failed | Security Findings |",
|
|
207
|
+
"|-------------|------------------------|------------|-------------------|",
|
|
208
|
+
f"| {created} | {existing} | {failed} | {len(security_index)} |",
|
|
209
|
+
"",
|
|
210
|
+
]
|
|
211
|
+
|
|
212
|
+
# PR task list
|
|
213
|
+
pr_entries = [p for p in patches if p.get("status") == "created" and p.get("prUrl")]
|
|
214
|
+
if pr_entries:
|
|
215
|
+
lines.append("### PRs")
|
|
216
|
+
lines.append("")
|
|
217
|
+
for p in pr_entries:
|
|
218
|
+
fid = p.get("findingId", "")
|
|
219
|
+
pr_number = pr_number_from_url(p.get("prUrl", ""))
|
|
220
|
+
finding = verified_lookup.get(fid, {})
|
|
221
|
+
title = finding.get("title", fid)
|
|
222
|
+
sev = severity_label(finding.get("severity", ""))
|
|
223
|
+
lines.append(f"- [ ] #{pr_number} - fix: {title}{sev}")
|
|
224
|
+
lines.append("")
|
|
225
|
+
|
|
226
|
+
# Security findings section
|
|
227
|
+
security_prs = [
|
|
228
|
+
p for p in patches
|
|
229
|
+
if p.get("status") == "created"
|
|
230
|
+
and p.get("findingId", "") in security_ids
|
|
231
|
+
and p.get("prUrl")
|
|
232
|
+
]
|
|
233
|
+
if security_prs:
|
|
234
|
+
lines.append("### Security Findings")
|
|
235
|
+
lines.append("")
|
|
236
|
+
for p in security_prs:
|
|
237
|
+
fid = p.get("findingId", "")
|
|
238
|
+
pr_number = pr_number_from_url(p.get("prUrl", ""))
|
|
239
|
+
finding = verified_lookup.get(fid, {})
|
|
240
|
+
title = finding.get("title", fid)
|
|
241
|
+
sev = severity_label(finding.get("severity", ""))
|
|
242
|
+
lines.append(f"- #{pr_number} - {title}{sev}")
|
|
243
|
+
lines.append("")
|
|
244
|
+
|
|
245
|
+
body = "\n".join(lines)
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
result = subprocess.run(
|
|
249
|
+
["gh", "issue", "comment", issue_url, "--body", body],
|
|
250
|
+
capture_output=True,
|
|
251
|
+
text=True,
|
|
252
|
+
timeout=30,
|
|
253
|
+
)
|
|
254
|
+
if result.returncode != 0:
|
|
255
|
+
print(
|
|
256
|
+
f"Warning: Failed to comment on tracking issue: {result.stderr.strip()}",
|
|
257
|
+
file=sys.stderr,
|
|
258
|
+
)
|
|
259
|
+
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
|
|
260
|
+
print(
|
|
261
|
+
f"Warning: Failed to comment on tracking issue: {e}",
|
|
262
|
+
file=sys.stderr,
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
|
|
159
266
|
def update_findings_with_pr_links(sweep_dir: str) -> None:
|
|
160
267
|
"""Append PR links to findings/*.md for created PRs."""
|
|
161
268
|
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
@@ -218,20 +325,16 @@ def run_generate_report(sweep_dir: str, script_dir: str) -> None:
|
|
|
218
325
|
def update_manifest(sweep_dir: str) -> None:
|
|
219
326
|
"""Mark organize phase complete and add completedAt timestamp."""
|
|
220
327
|
manifest_path = os.path.join(sweep_dir, "data", "manifest.json")
|
|
221
|
-
|
|
328
|
+
manifest = read_json(manifest_path)
|
|
329
|
+
if not manifest:
|
|
222
330
|
return
|
|
223
331
|
|
|
224
|
-
with open(manifest_path) as f:
|
|
225
|
-
manifest = json.load(f)
|
|
226
|
-
|
|
227
332
|
manifest.setdefault("phases", {})["organize"] = "complete"
|
|
228
333
|
manifest["completedAt"] = datetime.now(timezone.utc).strftime(
|
|
229
334
|
"%Y-%m-%dT%H:%M:%SZ"
|
|
230
335
|
)
|
|
231
336
|
|
|
232
|
-
|
|
233
|
-
json.dump(manifest, f, indent=2)
|
|
234
|
-
f.write("\n")
|
|
337
|
+
write_json(manifest_path, manifest)
|
|
235
338
|
|
|
236
339
|
|
|
237
340
|
def main() -> None:
|
|
@@ -279,7 +382,11 @@ def main() -> None:
|
|
|
279
382
|
print("Generating summary and report...", file=sys.stderr)
|
|
280
383
|
run_generate_report(sweep_dir, script_dir)
|
|
281
384
|
|
|
282
|
-
# Step 6: Update
|
|
385
|
+
# Step 6: Update tracking issue with PR results
|
|
386
|
+
print("Updating tracking issue...", file=sys.stderr)
|
|
387
|
+
update_tracking_issue(sweep_dir)
|
|
388
|
+
|
|
389
|
+
# Step 7: Update manifest
|
|
283
390
|
update_manifest(sweep_dir)
|
|
284
391
|
|
|
285
392
|
# Gather stats for output
|