@sentry/warden 0.13.0 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents.lock +7 -0
- package/dist/cli/args.d.ts +15 -12
- package/dist/cli/args.d.ts.map +1 -1
- package/dist/cli/args.js +61 -3
- package/dist/cli/args.js.map +1 -1
- package/dist/cli/commands/add.d.ts.map +1 -1
- package/dist/cli/commands/add.js +25 -33
- package/dist/cli/commands/add.js.map +1 -1
- package/dist/cli/commands/init.d.ts +0 -3
- package/dist/cli/commands/init.d.ts.map +1 -1
- package/dist/cli/commands/init.js +206 -19
- package/dist/cli/commands/init.js.map +1 -1
- package/dist/cli/commands/logs.d.ts +19 -0
- package/dist/cli/commands/logs.d.ts.map +1 -0
- package/dist/cli/commands/logs.js +412 -0
- package/dist/cli/commands/logs.js.map +1 -0
- package/dist/cli/commands/setup-app.d.ts.map +1 -1
- package/dist/cli/commands/setup-app.js +19 -15
- package/dist/cli/commands/setup-app.js.map +1 -1
- package/dist/cli/context.d.ts +2 -0
- package/dist/cli/context.d.ts.map +1 -1
- package/dist/cli/context.js +8 -2
- package/dist/cli/context.js.map +1 -1
- package/dist/cli/files.d.ts.map +1 -1
- package/dist/cli/files.js +27 -30
- package/dist/cli/files.js.map +1 -1
- package/dist/cli/git.d.ts +8 -3
- package/dist/cli/git.d.ts.map +1 -1
- package/dist/cli/git.js +24 -13
- package/dist/cli/git.js.map +1 -1
- package/dist/cli/index.js +10 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/input.d.ts +7 -0
- package/dist/cli/input.d.ts.map +1 -1
- package/dist/cli/input.js +13 -2
- package/dist/cli/input.js.map +1 -1
- package/dist/cli/main.d.ts.map +1 -1
- package/dist/cli/main.js +108 -32
- package/dist/cli/main.js.map +1 -1
- package/dist/cli/output/formatters.d.ts +2 -1
- package/dist/cli/output/formatters.d.ts.map +1 -1
- package/dist/cli/output/formatters.js +22 -19
- package/dist/cli/output/formatters.js.map +1 -1
- package/dist/cli/output/index.d.ts +1 -1
- package/dist/cli/output/index.d.ts.map +1 -1
- package/dist/cli/output/index.js +1 -1
- package/dist/cli/output/index.js.map +1 -1
- package/dist/cli/output/ink-runner.js +1 -1
- package/dist/cli/output/ink-runner.js.map +1 -1
- package/dist/cli/output/jsonl.d.ts +49 -13
- package/dist/cli/output/jsonl.d.ts.map +1 -1
- package/dist/cli/output/jsonl.js +137 -4
- package/dist/cli/output/jsonl.js.map +1 -1
- package/dist/cli/output/tasks.d.ts.map +1 -1
- package/dist/cli/output/tasks.js +1 -22
- package/dist/cli/output/tasks.js.map +1 -1
- package/dist/cli/terminal.d.ts.map +1 -1
- package/dist/cli/terminal.js +0 -2
- package/dist/cli/terminal.js.map +1 -1
- package/dist/config/schema.d.ts +49 -98
- package/dist/config/schema.d.ts.map +1 -1
- package/dist/config/schema.js +0 -12
- package/dist/config/schema.js.map +1 -1
- package/dist/config/writer.d.ts.map +1 -1
- package/dist/config/writer.js +18 -0
- package/dist/config/writer.js.map +1 -1
- package/dist/evals/index.js +1 -1
- package/dist/evals/index.js.map +1 -1
- package/dist/evals/runner.d.ts.map +1 -1
- package/dist/evals/runner.js +0 -1
- package/dist/evals/runner.js.map +1 -1
- package/dist/evals/types.d.ts +9 -15
- package/dist/evals/types.d.ts.map +1 -1
- package/dist/output/github-checks.d.ts +1 -1
- package/dist/output/github-checks.d.ts.map +1 -1
- package/dist/output/github-checks.js +2 -6
- package/dist/output/github-checks.js.map +1 -1
- package/dist/output/github-issues.d.ts.map +1 -1
- package/dist/output/github-issues.js +14 -8
- package/dist/output/github-issues.js.map +1 -1
- package/dist/output/issue-renderer.js +1 -1
- package/dist/output/issue-renderer.js.map +1 -1
- package/dist/sdk/analyze.d.ts.map +1 -1
- package/dist/sdk/analyze.js +14 -27
- package/dist/sdk/analyze.js.map +1 -1
- package/dist/sdk/auth.d.ts +16 -0
- package/dist/sdk/auth.d.ts.map +1 -0
- package/dist/sdk/auth.js +37 -0
- package/dist/sdk/auth.js.map +1 -0
- package/dist/sdk/errors.d.ts +8 -1
- package/dist/sdk/errors.d.ts.map +1 -1
- package/dist/sdk/errors.js +22 -2
- package/dist/sdk/errors.js.map +1 -1
- package/dist/sdk/prompt.js +1 -1
- package/dist/sdk/runner.d.ts +2 -1
- package/dist/sdk/runner.d.ts.map +1 -1
- package/dist/sdk/runner.js +3 -1
- package/dist/sdk/runner.js.map +1 -1
- package/dist/sdk/types.d.ts +0 -3
- package/dist/sdk/types.d.ts.map +1 -1
- package/dist/sdk/types.js.map +1 -1
- package/dist/skills/remote.js +1 -1
- package/dist/skills/remote.js.map +1 -1
- package/dist/types/index.d.ts +23 -24
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +19 -7
- package/dist/types/index.js.map +1 -1
- package/dist/utils/exec.d.ts +4 -1
- package/dist/utils/exec.d.ts.map +1 -1
- package/dist/utils/exec.js +6 -4
- package/dist/utils/exec.js.map +1 -1
- package/package.json +1 -1
- package/skills/warden/SKILL.md +76 -0
- package/skills/warden/references/cli-reference.md +142 -0
- package/skills/warden/references/config-schema.md +111 -0
- package/skills/warden/references/configuration.md +110 -0
- package/skills/warden/references/creating-skills.md +84 -0
- package/skills/warden-sweep/SKILL.md +400 -0
- package/skills/warden-sweep/references/patch-prompt.md +72 -0
- package/skills/warden-sweep/references/verify-prompt.md +25 -0
- package/skills/warden-sweep/scripts/_utils.py +99 -0
- package/skills/warden-sweep/scripts/create_issue.py +189 -0
- package/skills/warden-sweep/scripts/extract_findings.py +219 -0
- package/skills/warden-sweep/scripts/find_reviewers.py +114 -0
- package/skills/warden-sweep/scripts/generate_report.py +266 -0
- package/skills/warden-sweep/scripts/index_prs.py +187 -0
- package/skills/warden-sweep/scripts/organize.py +422 -0
- package/skills/warden-sweep/scripts/scan.py +584 -0
- package/dist/sdk/session.d.ts +0 -43
- package/dist/sdk/session.d.ts.map +0 -1
- package/dist/sdk/session.js +0 -105
- package/dist/sdk/session.js.map +0 -1
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# /// script
|
|
3
|
+
# requires-python = ">=3.9"
|
|
4
|
+
# ///
|
|
5
|
+
"""
|
|
6
|
+
Warden Sweep: Organize phase.
|
|
7
|
+
|
|
8
|
+
Identifies security findings, creates security indexes, labels security PRs,
|
|
9
|
+
updates finding reports with PR links, generates summary report, and
|
|
10
|
+
finalizes the manifest.
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
uv run organize.py <sweep-dir>
|
|
14
|
+
|
|
15
|
+
Stdout: JSON summary (for LLM consumption)
|
|
16
|
+
Stderr: Progress lines
|
|
17
|
+
|
|
18
|
+
Side effects:
|
|
19
|
+
- Creates security/index.jsonl with security findings
|
|
20
|
+
- Copies security finding .md files to security/
|
|
21
|
+
- Creates "security" label on GitHub
|
|
22
|
+
- Labels security PRs with "security"
|
|
23
|
+
- Appends PR links to findings/*.md
|
|
24
|
+
- Runs generate_report.py for summary.md and report.json
|
|
25
|
+
- Updates manifest phases.organize to "complete"
|
|
26
|
+
"""
|
|
27
|
+
from __future__ import annotations
|
|
28
|
+
|
|
29
|
+
import argparse
|
|
30
|
+
import json
|
|
31
|
+
import os
|
|
32
|
+
import shutil
|
|
33
|
+
import subprocess
|
|
34
|
+
import sys
|
|
35
|
+
from datetime import datetime, timezone
|
|
36
|
+
from typing import Any
|
|
37
|
+
|
|
38
|
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
39
|
+
from _utils import ensure_github_label, pr_number_from_url, read_json, read_jsonl, write_json # noqa: E402
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
SECURITY_SKILL_PATTERNS = [
|
|
43
|
+
"security-review",
|
|
44
|
+
"owasp-review",
|
|
45
|
+
"security-audit",
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def is_security_skill(skill_name: str) -> bool:
|
|
50
|
+
"""Check if a skill name indicates a security-related skill."""
|
|
51
|
+
name_lower = skill_name.lower()
|
|
52
|
+
if "security" in name_lower:
|
|
53
|
+
return True
|
|
54
|
+
return name_lower in SECURITY_SKILL_PATTERNS
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def severity_label(severity: str) -> str:
|
|
58
|
+
"""Format a severity string for inline display in issue comments."""
|
|
59
|
+
if not severity:
|
|
60
|
+
return ""
|
|
61
|
+
if severity in ("critical", "high"):
|
|
62
|
+
return f" (**{severity.upper()}**)"
|
|
63
|
+
return f" ({severity.upper()})"
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def identify_security_findings(
|
|
67
|
+
sweep_dir: str,
|
|
68
|
+
) -> list[dict[str, Any]]:
|
|
69
|
+
"""Find security-related verified findings and write security/index.jsonl."""
|
|
70
|
+
verified = read_jsonl(os.path.join(sweep_dir, "data", "verified.jsonl"))
|
|
71
|
+
|
|
72
|
+
security_findings: list[dict[str, Any]] = []
|
|
73
|
+
for finding in verified:
|
|
74
|
+
skill = finding.get("skill", "")
|
|
75
|
+
if is_security_skill(skill):
|
|
76
|
+
entry = {
|
|
77
|
+
"findingId": finding.get("findingId", ""),
|
|
78
|
+
"skill": skill,
|
|
79
|
+
"severity": finding.get("severity", "info"),
|
|
80
|
+
"file": finding.get("file", ""),
|
|
81
|
+
"title": finding.get("title", ""),
|
|
82
|
+
}
|
|
83
|
+
security_findings.append(entry)
|
|
84
|
+
|
|
85
|
+
# Write security index
|
|
86
|
+
security_dir = os.path.join(sweep_dir, "security")
|
|
87
|
+
os.makedirs(security_dir, exist_ok=True)
|
|
88
|
+
index_path = os.path.join(security_dir, "index.jsonl")
|
|
89
|
+
with open(index_path, "w") as f:
|
|
90
|
+
for entry in security_findings:
|
|
91
|
+
f.write(json.dumps(entry) + "\n")
|
|
92
|
+
|
|
93
|
+
return security_findings
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def copy_security_findings(
|
|
97
|
+
sweep_dir: str, security_findings: list[dict[str, Any]]
|
|
98
|
+
) -> None:
|
|
99
|
+
"""Copy security finding .md files to security/ directory."""
|
|
100
|
+
findings_dir = os.path.join(sweep_dir, "findings")
|
|
101
|
+
security_dir = os.path.join(sweep_dir, "security")
|
|
102
|
+
|
|
103
|
+
for finding in security_findings:
|
|
104
|
+
fid = finding.get("findingId", "")
|
|
105
|
+
src = os.path.join(findings_dir, f"{fid}.md")
|
|
106
|
+
dst = os.path.join(security_dir, f"{fid}.md")
|
|
107
|
+
if os.path.exists(src):
|
|
108
|
+
shutil.copy2(src, dst)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def create_security_label() -> None:
|
|
112
|
+
"""Create the security label on GitHub (idempotent)."""
|
|
113
|
+
ensure_github_label("security", "D93F0B", "Security-related changes")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def label_security_prs(
|
|
117
|
+
sweep_dir: str, security_findings: list[dict[str, Any]]
|
|
118
|
+
) -> int:
|
|
119
|
+
"""Add "security" label to PRs for security findings. Returns count labeled."""
|
|
120
|
+
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
121
|
+
security_ids = {f.get("findingId", "") for f in security_findings}
|
|
122
|
+
|
|
123
|
+
labeled = 0
|
|
124
|
+
for patch in patches:
|
|
125
|
+
if patch.get("status") != "created":
|
|
126
|
+
continue
|
|
127
|
+
if patch.get("findingId", "") not in security_ids:
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
pr_url = patch.get("prUrl", "")
|
|
131
|
+
if not pr_url:
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
result = subprocess.run(
|
|
136
|
+
["gh", "pr", "edit", pr_url, "--add-label", "security"],
|
|
137
|
+
capture_output=True,
|
|
138
|
+
text=True,
|
|
139
|
+
timeout=15,
|
|
140
|
+
)
|
|
141
|
+
if result.returncode == 0:
|
|
142
|
+
labeled += 1
|
|
143
|
+
else:
|
|
144
|
+
print(
|
|
145
|
+
f"Warning: Failed to label PR {pr_url}: {result.stderr.strip()}",
|
|
146
|
+
file=sys.stderr,
|
|
147
|
+
)
|
|
148
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
149
|
+
print(
|
|
150
|
+
f"Warning: Failed to label PR {pr_url}",
|
|
151
|
+
file=sys.stderr,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
return labeled
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _has_sweep_complete_comment(issue_url: str) -> bool:
|
|
158
|
+
"""Check if the tracking issue already has a 'Sweep Complete' comment."""
|
|
159
|
+
try:
|
|
160
|
+
result = subprocess.run(
|
|
161
|
+
["gh", "issue", "view", issue_url, "--json", "comments", "--jq",
|
|
162
|
+
'.comments[].body | select(startswith("## Sweep Complete"))'],
|
|
163
|
+
capture_output=True,
|
|
164
|
+
text=True,
|
|
165
|
+
timeout=15,
|
|
166
|
+
)
|
|
167
|
+
return result.returncode == 0 and result.stdout.strip() != ""
|
|
168
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def update_tracking_issue(sweep_dir: str) -> None:
|
|
173
|
+
"""Post a comment on the tracking issue with final PR results. Idempotent."""
|
|
174
|
+
manifest = read_json(os.path.join(sweep_dir, "data", "manifest.json"))
|
|
175
|
+
if not manifest:
|
|
176
|
+
return
|
|
177
|
+
|
|
178
|
+
issue_url = manifest.get("issueUrl")
|
|
179
|
+
if not issue_url:
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
if _has_sweep_complete_comment(issue_url):
|
|
183
|
+
print("Tracking issue already has completion comment, skipping.", file=sys.stderr)
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
187
|
+
verified = read_jsonl(os.path.join(sweep_dir, "data", "verified.jsonl"))
|
|
188
|
+
security_index = read_jsonl(os.path.join(sweep_dir, "security", "index.jsonl"))
|
|
189
|
+
|
|
190
|
+
# Build lookup from findingId to verified finding
|
|
191
|
+
verified_lookup: dict[str, dict[str, Any]] = {}
|
|
192
|
+
for f in verified:
|
|
193
|
+
fid = f.get("findingId", "")
|
|
194
|
+
if fid:
|
|
195
|
+
verified_lookup[fid] = f
|
|
196
|
+
|
|
197
|
+
security_ids = {f.get("findingId", "") for f in security_index}
|
|
198
|
+
|
|
199
|
+
created = sum(1 for p in patches if p.get("status") == "created")
|
|
200
|
+
existing = sum(1 for p in patches if p.get("status") == "existing")
|
|
201
|
+
failed = sum(1 for p in patches if p.get("status") == "error")
|
|
202
|
+
|
|
203
|
+
lines = [
|
|
204
|
+
"## Sweep Complete",
|
|
205
|
+
"",
|
|
206
|
+
"| PRs Created | PRs Skipped (existing) | PRs Failed | Security Findings |",
|
|
207
|
+
"|-------------|------------------------|------------|-------------------|",
|
|
208
|
+
f"| {created} | {existing} | {failed} | {len(security_index)} |",
|
|
209
|
+
"",
|
|
210
|
+
]
|
|
211
|
+
|
|
212
|
+
# PR task list
|
|
213
|
+
pr_entries = [p for p in patches if p.get("status") == "created" and p.get("prUrl")]
|
|
214
|
+
if pr_entries:
|
|
215
|
+
lines.append("### PRs")
|
|
216
|
+
lines.append("")
|
|
217
|
+
for p in pr_entries:
|
|
218
|
+
fid = p.get("findingId", "")
|
|
219
|
+
pr_number = pr_number_from_url(p.get("prUrl", ""))
|
|
220
|
+
finding = verified_lookup.get(fid, {})
|
|
221
|
+
title = finding.get("title", fid)
|
|
222
|
+
sev = severity_label(finding.get("severity", ""))
|
|
223
|
+
lines.append(f"- [ ] #{pr_number} - fix: {title}{sev}")
|
|
224
|
+
lines.append("")
|
|
225
|
+
|
|
226
|
+
# Security findings section
|
|
227
|
+
security_prs = [
|
|
228
|
+
p for p in patches
|
|
229
|
+
if p.get("status") == "created"
|
|
230
|
+
and p.get("findingId", "") in security_ids
|
|
231
|
+
and p.get("prUrl")
|
|
232
|
+
]
|
|
233
|
+
if security_prs:
|
|
234
|
+
lines.append("### Security Findings")
|
|
235
|
+
lines.append("")
|
|
236
|
+
for p in security_prs:
|
|
237
|
+
fid = p.get("findingId", "")
|
|
238
|
+
pr_number = pr_number_from_url(p.get("prUrl", ""))
|
|
239
|
+
finding = verified_lookup.get(fid, {})
|
|
240
|
+
title = finding.get("title", fid)
|
|
241
|
+
sev = severity_label(finding.get("severity", ""))
|
|
242
|
+
lines.append(f"- #{pr_number} - {title}{sev}")
|
|
243
|
+
lines.append("")
|
|
244
|
+
|
|
245
|
+
body = "\n".join(lines)
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
result = subprocess.run(
|
|
249
|
+
["gh", "issue", "comment", issue_url, "--body", body],
|
|
250
|
+
capture_output=True,
|
|
251
|
+
text=True,
|
|
252
|
+
timeout=30,
|
|
253
|
+
)
|
|
254
|
+
if result.returncode != 0:
|
|
255
|
+
print(
|
|
256
|
+
f"Warning: Failed to comment on tracking issue: {result.stderr.strip()}",
|
|
257
|
+
file=sys.stderr,
|
|
258
|
+
)
|
|
259
|
+
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
|
|
260
|
+
print(
|
|
261
|
+
f"Warning: Failed to comment on tracking issue: {e}",
|
|
262
|
+
file=sys.stderr,
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def update_findings_with_pr_links(sweep_dir: str) -> None:
|
|
267
|
+
"""Append PR links to findings/*.md for created PRs."""
|
|
268
|
+
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
269
|
+
findings_dir = os.path.join(sweep_dir, "findings")
|
|
270
|
+
|
|
271
|
+
for patch in patches:
|
|
272
|
+
if patch.get("status") != "created":
|
|
273
|
+
continue
|
|
274
|
+
|
|
275
|
+
fid = patch.get("findingId", "")
|
|
276
|
+
pr_url = patch.get("prUrl", "")
|
|
277
|
+
branch = patch.get("branch", "")
|
|
278
|
+
reviewers = patch.get("reviewers", [])
|
|
279
|
+
|
|
280
|
+
if not fid or not pr_url:
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
md_path = os.path.join(findings_dir, f"{fid}.md")
|
|
284
|
+
if not os.path.exists(md_path):
|
|
285
|
+
continue
|
|
286
|
+
|
|
287
|
+
# Check if PR section already appended
|
|
288
|
+
with open(md_path) as f:
|
|
289
|
+
content = f.read()
|
|
290
|
+
if "## Pull Request" in content:
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
reviewers_str = ", ".join(reviewers) if reviewers else "none"
|
|
294
|
+
pr_section = (
|
|
295
|
+
f"\n\n## Pull Request\n"
|
|
296
|
+
f"**PR**: {pr_url}\n"
|
|
297
|
+
f"**Branch**: {branch}\n"
|
|
298
|
+
f"**Reviewers**: {reviewers_str}\n"
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
with open(md_path, "a") as f:
|
|
302
|
+
f.write(pr_section)
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def run_generate_report(sweep_dir: str, script_dir: str) -> None:
|
|
306
|
+
"""Run generate_report.py as a subprocess."""
|
|
307
|
+
report_script = os.path.join(script_dir, "generate_report.py")
|
|
308
|
+
|
|
309
|
+
try:
|
|
310
|
+
result = subprocess.run(
|
|
311
|
+
[sys.executable, report_script, sweep_dir],
|
|
312
|
+
capture_output=True,
|
|
313
|
+
text=True,
|
|
314
|
+
timeout=60,
|
|
315
|
+
)
|
|
316
|
+
if result.returncode != 0:
|
|
317
|
+
print(
|
|
318
|
+
f"Warning: generate_report.py failed: {result.stderr}",
|
|
319
|
+
file=sys.stderr,
|
|
320
|
+
)
|
|
321
|
+
except Exception as e:
|
|
322
|
+
print(f"Warning: generate_report.py failed: {e}", file=sys.stderr)
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def update_manifest(sweep_dir: str) -> None:
|
|
326
|
+
"""Mark organize phase complete and add completedAt timestamp."""
|
|
327
|
+
manifest_path = os.path.join(sweep_dir, "data", "manifest.json")
|
|
328
|
+
manifest = read_json(manifest_path)
|
|
329
|
+
if not manifest:
|
|
330
|
+
return
|
|
331
|
+
|
|
332
|
+
manifest.setdefault("phases", {})["organize"] = "complete"
|
|
333
|
+
manifest["completedAt"] = datetime.now(timezone.utc).strftime(
|
|
334
|
+
"%Y-%m-%dT%H:%M:%SZ"
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
write_json(manifest_path, manifest)
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def main() -> None:
|
|
341
|
+
parser = argparse.ArgumentParser(
|
|
342
|
+
description="Warden Sweep: Organize phase"
|
|
343
|
+
)
|
|
344
|
+
parser.add_argument("sweep_dir", help="Path to the sweep directory")
|
|
345
|
+
args = parser.parse_args()
|
|
346
|
+
|
|
347
|
+
sweep_dir = args.sweep_dir
|
|
348
|
+
|
|
349
|
+
if not os.path.isdir(sweep_dir):
|
|
350
|
+
print(
|
|
351
|
+
json.dumps({"error": f"Sweep directory not found: {sweep_dir}"}),
|
|
352
|
+
file=sys.stdout,
|
|
353
|
+
)
|
|
354
|
+
sys.exit(1)
|
|
355
|
+
|
|
356
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
357
|
+
|
|
358
|
+
# Step 1: Identify security findings
|
|
359
|
+
print("Identifying security findings...", file=sys.stderr)
|
|
360
|
+
security_findings = identify_security_findings(sweep_dir)
|
|
361
|
+
print(
|
|
362
|
+
f"Found {len(security_findings)} security finding(s)",
|
|
363
|
+
file=sys.stderr,
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
# Step 2: Label security PRs
|
|
367
|
+
security_prs_labeled = 0
|
|
368
|
+
if security_findings:
|
|
369
|
+
print("Creating security label...", file=sys.stderr)
|
|
370
|
+
create_security_label()
|
|
371
|
+
print("Labeling security PRs...", file=sys.stderr)
|
|
372
|
+
security_prs_labeled = label_security_prs(sweep_dir, security_findings)
|
|
373
|
+
|
|
374
|
+
# Step 3: Update finding reports with PR links
|
|
375
|
+
print("Updating finding reports with PR links...", file=sys.stderr)
|
|
376
|
+
update_findings_with_pr_links(sweep_dir)
|
|
377
|
+
|
|
378
|
+
# Step 4: Copy security finding reports (after PR links are added)
|
|
379
|
+
copy_security_findings(sweep_dir, security_findings)
|
|
380
|
+
|
|
381
|
+
# Step 5: Generate summary and report
|
|
382
|
+
print("Generating summary and report...", file=sys.stderr)
|
|
383
|
+
run_generate_report(sweep_dir, script_dir)
|
|
384
|
+
|
|
385
|
+
# Step 6: Update tracking issue with PR results
|
|
386
|
+
print("Updating tracking issue...", file=sys.stderr)
|
|
387
|
+
update_tracking_issue(sweep_dir)
|
|
388
|
+
|
|
389
|
+
# Step 7: Update manifest
|
|
390
|
+
update_manifest(sweep_dir)
|
|
391
|
+
|
|
392
|
+
# Gather stats for output
|
|
393
|
+
scan_index = read_jsonl(os.path.join(sweep_dir, "data", "scan-index.jsonl"))
|
|
394
|
+
verified = read_jsonl(os.path.join(sweep_dir, "data", "verified.jsonl"))
|
|
395
|
+
rejected = read_jsonl(os.path.join(sweep_dir, "data", "rejected.jsonl"))
|
|
396
|
+
patches = read_jsonl(os.path.join(sweep_dir, "data", "patches.jsonl"))
|
|
397
|
+
|
|
398
|
+
files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
|
|
399
|
+
prs_created = sum(1 for p in patches if p.get("status") == "created")
|
|
400
|
+
|
|
401
|
+
summary_path = os.path.join(sweep_dir, "summary.md")
|
|
402
|
+
report_path = os.path.join(sweep_dir, "data", "report.json")
|
|
403
|
+
|
|
404
|
+
output = {
|
|
405
|
+
"securityFindings": len(security_findings),
|
|
406
|
+
"securityPRsLabeled": security_prs_labeled,
|
|
407
|
+
"summaryPath": summary_path,
|
|
408
|
+
"reportPath": report_path,
|
|
409
|
+
"stats": {
|
|
410
|
+
"filesScanned": files_scanned,
|
|
411
|
+
"verified": len(verified),
|
|
412
|
+
"rejected": len(rejected),
|
|
413
|
+
"prsCreated": prs_created,
|
|
414
|
+
"securityFindings": len(security_findings),
|
|
415
|
+
},
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
print(json.dumps(output, indent=2))
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
if __name__ == "__main__":
|
|
422
|
+
main()
|