@sentry/warden 0.13.0 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. package/agents.lock +7 -0
  2. package/dist/cli/args.d.ts +15 -12
  3. package/dist/cli/args.d.ts.map +1 -1
  4. package/dist/cli/args.js +61 -3
  5. package/dist/cli/args.js.map +1 -1
  6. package/dist/cli/commands/add.d.ts.map +1 -1
  7. package/dist/cli/commands/add.js +25 -33
  8. package/dist/cli/commands/add.js.map +1 -1
  9. package/dist/cli/commands/init.d.ts +0 -3
  10. package/dist/cli/commands/init.d.ts.map +1 -1
  11. package/dist/cli/commands/init.js +206 -19
  12. package/dist/cli/commands/init.js.map +1 -1
  13. package/dist/cli/commands/logs.d.ts +19 -0
  14. package/dist/cli/commands/logs.d.ts.map +1 -0
  15. package/dist/cli/commands/logs.js +412 -0
  16. package/dist/cli/commands/logs.js.map +1 -0
  17. package/dist/cli/commands/setup-app.d.ts.map +1 -1
  18. package/dist/cli/commands/setup-app.js +19 -15
  19. package/dist/cli/commands/setup-app.js.map +1 -1
  20. package/dist/cli/context.d.ts +2 -0
  21. package/dist/cli/context.d.ts.map +1 -1
  22. package/dist/cli/context.js +8 -2
  23. package/dist/cli/context.js.map +1 -1
  24. package/dist/cli/files.d.ts.map +1 -1
  25. package/dist/cli/files.js +27 -30
  26. package/dist/cli/files.js.map +1 -1
  27. package/dist/cli/git.d.ts +8 -3
  28. package/dist/cli/git.d.ts.map +1 -1
  29. package/dist/cli/git.js +24 -13
  30. package/dist/cli/git.js.map +1 -1
  31. package/dist/cli/index.js +10 -0
  32. package/dist/cli/index.js.map +1 -1
  33. package/dist/cli/input.d.ts +7 -0
  34. package/dist/cli/input.d.ts.map +1 -1
  35. package/dist/cli/input.js +13 -2
  36. package/dist/cli/input.js.map +1 -1
  37. package/dist/cli/main.d.ts.map +1 -1
  38. package/dist/cli/main.js +108 -32
  39. package/dist/cli/main.js.map +1 -1
  40. package/dist/cli/output/formatters.d.ts +2 -1
  41. package/dist/cli/output/formatters.d.ts.map +1 -1
  42. package/dist/cli/output/formatters.js +22 -19
  43. package/dist/cli/output/formatters.js.map +1 -1
  44. package/dist/cli/output/index.d.ts +1 -1
  45. package/dist/cli/output/index.d.ts.map +1 -1
  46. package/dist/cli/output/index.js +1 -1
  47. package/dist/cli/output/index.js.map +1 -1
  48. package/dist/cli/output/ink-runner.js +1 -1
  49. package/dist/cli/output/ink-runner.js.map +1 -1
  50. package/dist/cli/output/jsonl.d.ts +49 -13
  51. package/dist/cli/output/jsonl.d.ts.map +1 -1
  52. package/dist/cli/output/jsonl.js +137 -4
  53. package/dist/cli/output/jsonl.js.map +1 -1
  54. package/dist/cli/output/tasks.d.ts.map +1 -1
  55. package/dist/cli/output/tasks.js +1 -22
  56. package/dist/cli/output/tasks.js.map +1 -1
  57. package/dist/cli/terminal.d.ts.map +1 -1
  58. package/dist/cli/terminal.js +0 -2
  59. package/dist/cli/terminal.js.map +1 -1
  60. package/dist/config/schema.d.ts +49 -98
  61. package/dist/config/schema.d.ts.map +1 -1
  62. package/dist/config/schema.js +0 -12
  63. package/dist/config/schema.js.map +1 -1
  64. package/dist/config/writer.d.ts.map +1 -1
  65. package/dist/config/writer.js +18 -0
  66. package/dist/config/writer.js.map +1 -1
  67. package/dist/evals/index.js +1 -1
  68. package/dist/evals/index.js.map +1 -1
  69. package/dist/evals/runner.d.ts.map +1 -1
  70. package/dist/evals/runner.js +0 -1
  71. package/dist/evals/runner.js.map +1 -1
  72. package/dist/evals/types.d.ts +9 -15
  73. package/dist/evals/types.d.ts.map +1 -1
  74. package/dist/output/github-checks.d.ts +1 -1
  75. package/dist/output/github-checks.d.ts.map +1 -1
  76. package/dist/output/github-checks.js +2 -6
  77. package/dist/output/github-checks.js.map +1 -1
  78. package/dist/output/github-issues.d.ts.map +1 -1
  79. package/dist/output/github-issues.js +14 -8
  80. package/dist/output/github-issues.js.map +1 -1
  81. package/dist/output/issue-renderer.js +1 -1
  82. package/dist/output/issue-renderer.js.map +1 -1
  83. package/dist/sdk/analyze.d.ts.map +1 -1
  84. package/dist/sdk/analyze.js +14 -27
  85. package/dist/sdk/analyze.js.map +1 -1
  86. package/dist/sdk/auth.d.ts +16 -0
  87. package/dist/sdk/auth.d.ts.map +1 -0
  88. package/dist/sdk/auth.js +37 -0
  89. package/dist/sdk/auth.js.map +1 -0
  90. package/dist/sdk/errors.d.ts +8 -1
  91. package/dist/sdk/errors.d.ts.map +1 -1
  92. package/dist/sdk/errors.js +22 -2
  93. package/dist/sdk/errors.js.map +1 -1
  94. package/dist/sdk/prompt.js +1 -1
  95. package/dist/sdk/runner.d.ts +2 -1
  96. package/dist/sdk/runner.d.ts.map +1 -1
  97. package/dist/sdk/runner.js +3 -1
  98. package/dist/sdk/runner.js.map +1 -1
  99. package/dist/sdk/types.d.ts +0 -3
  100. package/dist/sdk/types.d.ts.map +1 -1
  101. package/dist/sdk/types.js.map +1 -1
  102. package/dist/skills/remote.js +1 -1
  103. package/dist/skills/remote.js.map +1 -1
  104. package/dist/types/index.d.ts +23 -24
  105. package/dist/types/index.d.ts.map +1 -1
  106. package/dist/types/index.js +19 -7
  107. package/dist/types/index.js.map +1 -1
  108. package/dist/utils/exec.d.ts +4 -1
  109. package/dist/utils/exec.d.ts.map +1 -1
  110. package/dist/utils/exec.js +6 -4
  111. package/dist/utils/exec.js.map +1 -1
  112. package/package.json +1 -1
  113. package/skills/warden/SKILL.md +76 -0
  114. package/skills/warden/references/cli-reference.md +142 -0
  115. package/skills/warden/references/config-schema.md +111 -0
  116. package/skills/warden/references/configuration.md +110 -0
  117. package/skills/warden/references/creating-skills.md +84 -0
  118. package/skills/warden-sweep/SKILL.md +400 -0
  119. package/skills/warden-sweep/references/patch-prompt.md +72 -0
  120. package/skills/warden-sweep/references/verify-prompt.md +25 -0
  121. package/skills/warden-sweep/scripts/_utils.py +99 -0
  122. package/skills/warden-sweep/scripts/create_issue.py +189 -0
  123. package/skills/warden-sweep/scripts/extract_findings.py +219 -0
  124. package/skills/warden-sweep/scripts/find_reviewers.py +114 -0
  125. package/skills/warden-sweep/scripts/generate_report.py +266 -0
  126. package/skills/warden-sweep/scripts/index_prs.py +187 -0
  127. package/skills/warden-sweep/scripts/organize.py +422 -0
  128. package/skills/warden-sweep/scripts/scan.py +584 -0
  129. package/dist/sdk/session.d.ts +0 -43
  130. package/dist/sdk/session.d.ts.map +0 -1
  131. package/dist/sdk/session.js +0 -105
  132. package/dist/sdk/session.js.map +0 -1
@@ -0,0 +1,266 @@
1
+ #!/usr/bin/env python3
2
+ # /// script
3
+ # requires-python = ">=3.9"
4
+ # ///
5
+ """
6
+ Generate summary.md and report.json from a completed sweep.
7
+
8
+ Usage:
9
+ python generate_report.py <sweep-dir>
10
+
11
+ Reads the data/ subdirectory for all-findings.jsonl, verified.jsonl,
12
+ rejected.jsonl, patches.jsonl, and security/index.jsonl, then produces:
13
+ - <sweep-dir>/summary.md
14
+ - <sweep-dir>/data/report.json
15
+ """
16
+ from __future__ import annotations
17
+
18
+ import argparse
19
+ import json
20
+ import os
21
+ import sys
22
+ from datetime import datetime, timezone
23
+ from typing import Any
24
+
25
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
26
+ from _utils import read_json, read_jsonl, severity_badge # noqa: E402
27
+
28
+
29
+ def generate_summary_md(
30
+ manifest: dict[str, Any],
31
+ scan_index: list[dict[str, Any]],
32
+ all_findings: list[dict[str, Any]],
33
+ verified: list[dict[str, Any]],
34
+ rejected: list[dict[str, Any]],
35
+ patches: list[dict[str, Any]],
36
+ security_index: list[dict[str, Any]],
37
+ ) -> str:
38
+ """Generate the summary.md content."""
39
+ run_id = manifest.get("runId", "unknown")
40
+ started_at = manifest.get("startedAt", "unknown")
41
+ repo = manifest.get("repo", "unknown")
42
+ completed_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
43
+
44
+ files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
45
+ files_timed_out = sum(
46
+ 1 for e in scan_index
47
+ if e.get("status") == "error" and e.get("error") == "timeout"
48
+ )
49
+ files_errored = sum(
50
+ 1 for e in scan_index
51
+ if e.get("status") == "error" and e.get("error") != "timeout"
52
+ )
53
+
54
+ prs_created = sum(1 for p in patches if p.get("status") == "created")
55
+ prs_failed = sum(1 for p in patches if p.get("status") == "error")
56
+
57
+ # Severity breakdown of verified findings
58
+ by_severity: dict[str, int] = {}
59
+ for f in verified:
60
+ sev = f.get("severity", "info")
61
+ by_severity[sev] = by_severity.get(sev, 0) + 1
62
+
63
+ lines = [
64
+ f"# Warden Sweep: `{run_id}`",
65
+ "",
66
+ f"**Repo**: {repo}",
67
+ f"**Started**: {started_at}",
68
+ f"**Completed**: {completed_at}",
69
+ "",
70
+ "## Stats",
71
+ "",
72
+ f"| Metric | Count |",
73
+ f"|--------|-------|",
74
+ f"| Files scanned | {files_scanned} |",
75
+ f"| Files timed out | {files_timed_out} |",
76
+ f"| Files errored | {files_errored} |",
77
+ f"| Total findings | {len(all_findings)} |",
78
+ f"| Verified | {len(verified)} |",
79
+ f"| Rejected | {len(rejected)} |",
80
+ f"| PRs created | {prs_created} |",
81
+ f"| PRs failed | {prs_failed} |",
82
+ f"| Security findings | {len(security_index)} |",
83
+ "",
84
+ ]
85
+
86
+ if by_severity:
87
+ lines.append("### By Severity")
88
+ lines.append("")
89
+ for sev in ["critical", "high", "medium", "low", "info"]:
90
+ count = by_severity.get(sev, 0)
91
+ if count > 0:
92
+ lines.append(f"- {severity_badge(sev)}: {count}")
93
+ lines.append("")
94
+
95
+ # Security callout
96
+ if security_index:
97
+ lines.append("## Security Findings")
98
+ lines.append("")
99
+ lines.append("The following findings are security-related and may need priority review:")
100
+ lines.append("")
101
+ lines.append("| ID | Severity | Skill | File | Title |")
102
+ lines.append("|----|----------|-------|------|-------|")
103
+ for sf in security_index:
104
+ fid = sf.get("findingId", "")
105
+ sev = severity_badge(sf.get("severity", "info"))
106
+ skill = sf.get("skill", "")
107
+ filepath = sf.get("file", "")
108
+ title = sf.get("title", "")
109
+ lines.append(f"| `{fid}` | {sev} | {skill} | `{filepath}` | {title} |")
110
+ lines.append("")
111
+
112
+ # Verified findings table
113
+ if verified:
114
+ lines.append("## Verified Findings")
115
+ lines.append("")
116
+ lines.append("| ID | Severity | Skill | File | Title | PR |")
117
+ lines.append("|----|----------|-------|------|-------|-----|")
118
+
119
+ # Build patches lookup
120
+ pr_lookup: dict[str, str] = {}
121
+ for p in patches:
122
+ if p.get("status") == "created" and p.get("findingId"):
123
+ pr_lookup[p["findingId"]] = p.get("prUrl", "")
124
+
125
+ for f in verified:
126
+ fid = f.get("findingId", "")
127
+ sev = severity_badge(f.get("severity", "info"))
128
+ skill = f.get("skill", "")
129
+ filepath = f.get("file", "")
130
+ title = f.get("title", "")
131
+ pr_url = pr_lookup.get(fid, "")
132
+ pr_link = f"[PR]({pr_url})" if pr_url else "-"
133
+ lines.append(f"| `{fid}` | {sev} | {skill} | `{filepath}` | {title} | {pr_link} |")
134
+ lines.append("")
135
+
136
+ # Rejected findings summary
137
+ if rejected:
138
+ lines.append(f"## Rejected Findings ({len(rejected)})")
139
+ lines.append("")
140
+ lines.append("These findings were evaluated and determined to be false positives.")
141
+ lines.append("See `data/rejected.jsonl` for details.")
142
+ lines.append("")
143
+
144
+ lines.append("---")
145
+ lines.append(f"*Generated by Warden Sweep `{run_id}`*")
146
+
147
+ return "\n".join(lines) + "\n"
148
+
149
+
150
+ def generate_report_json(
151
+ manifest: dict[str, Any],
152
+ scan_index: list[dict[str, Any]],
153
+ all_findings: list[dict[str, Any]],
154
+ verified: list[dict[str, Any]],
155
+ rejected: list[dict[str, Any]],
156
+ patches: list[dict[str, Any]],
157
+ security_index: list[dict[str, Any]],
158
+ ) -> dict[str, Any]:
159
+ """Generate the report.json data."""
160
+ run_id = manifest.get("runId", "unknown")
161
+ completed_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
162
+
163
+ files_scanned = sum(1 for e in scan_index if e.get("status") == "complete")
164
+ files_timed_out = sum(
165
+ 1 for e in scan_index
166
+ if e.get("status") == "error" and e.get("error") == "timeout"
167
+ )
168
+ files_errored = sum(
169
+ 1 for e in scan_index
170
+ if e.get("status") == "error" and e.get("error") != "timeout"
171
+ )
172
+ prs_created = sum(1 for p in patches if p.get("status") == "created")
173
+ prs_failed = sum(1 for p in patches if p.get("status") == "error")
174
+
175
+ # Count verify errors (findings in all but not in verified or rejected)
176
+ verified_ids = {f["findingId"] for f in verified if "findingId" in f}
177
+ rejected_ids = {f["findingId"] for f in rejected if "findingId" in f}
178
+ all_ids = {f["findingId"] for f in all_findings if "findingId" in f}
179
+ verify_errors = len(all_ids - verified_ids - rejected_ids)
180
+
181
+ return {
182
+ "runId": run_id,
183
+ "completedAt": completed_at,
184
+ "scan": {
185
+ "filesScanned": files_scanned,
186
+ "filesTimedOut": files_timed_out,
187
+ "filesErrored": files_errored,
188
+ "totalFindings": len(all_findings),
189
+ },
190
+ "verify": {
191
+ "verified": len(verified),
192
+ "rejected": len(rejected),
193
+ "errors": verify_errors,
194
+ },
195
+ "patch": {
196
+ "prsCreated": prs_created,
197
+ "prsFailed": prs_failed,
198
+ },
199
+ "security": {
200
+ "count": len(security_index),
201
+ },
202
+ "prs": [
203
+ {
204
+ "findingId": p.get("findingId", ""),
205
+ "url": p.get("prUrl", ""),
206
+ "severity": next(
207
+ (f.get("severity", "") for f in verified if f.get("findingId") == p.get("findingId")),
208
+ "",
209
+ ),
210
+ }
211
+ for p in patches
212
+ if p.get("status") == "created"
213
+ ],
214
+ }
215
+
216
+
217
+ def main():
218
+ parser = argparse.ArgumentParser(
219
+ description="Generate sweep summary and report"
220
+ )
221
+ parser.add_argument("sweep_dir", help="Path to the sweep output directory")
222
+ args = parser.parse_args()
223
+
224
+ sweep_dir = args.sweep_dir
225
+ data_dir = os.path.join(sweep_dir, "data")
226
+
227
+ # Read inputs
228
+ manifest = read_json(os.path.join(data_dir, "manifest.json")) or {}
229
+ scan_index = read_jsonl(os.path.join(data_dir, "scan-index.jsonl"))
230
+ all_findings = read_jsonl(os.path.join(data_dir, "all-findings.jsonl"))
231
+ verified = read_jsonl(os.path.join(data_dir, "verified.jsonl"))
232
+ rejected = read_jsonl(os.path.join(data_dir, "rejected.jsonl"))
233
+ patches = read_jsonl(os.path.join(data_dir, "patches.jsonl"))
234
+ security_index = read_jsonl(os.path.join(sweep_dir, "security", "index.jsonl"))
235
+
236
+ # Generate summary.md
237
+ summary_md = generate_summary_md(
238
+ manifest, scan_index,
239
+ all_findings, verified, rejected, patches, security_index,
240
+ )
241
+ summary_path = os.path.join(sweep_dir, "summary.md")
242
+ with open(summary_path, "w") as f:
243
+ f.write(summary_md)
244
+
245
+ # Generate report.json
246
+ report = generate_report_json(
247
+ manifest, scan_index, all_findings,
248
+ verified, rejected, patches, security_index,
249
+ )
250
+ report_path = os.path.join(data_dir, "report.json")
251
+ with open(report_path, "w") as f:
252
+ json.dump(report, f, indent=2)
253
+ f.write("\n")
254
+
255
+ print(json.dumps({
256
+ "summaryPath": summary_path,
257
+ "reportPath": report_path,
258
+ "verified": len(verified),
259
+ "rejected": len(rejected),
260
+ "prsCreated": report["patch"]["prsCreated"],
261
+ "securityFindings": len(security_index),
262
+ }))
263
+
264
+
265
+ if __name__ == "__main__":
266
+ main()
@@ -0,0 +1,187 @@
1
+ #!/usr/bin/env python3
2
+ # /// script
3
+ # requires-python = ">=3.9"
4
+ # ///
5
+ """
6
+ Warden Sweep: Index existing PRs for deduplication.
7
+
8
+ Fetches open warden-labeled PRs via gh, identifies file overlap with
9
+ verified findings, and caches diffs for overlapping PRs.
10
+
11
+ Usage:
12
+ uv run index_prs.py <sweep-dir>
13
+
14
+ Stdout: JSON summary (for LLM consumption)
15
+ Stderr: Progress lines
16
+
17
+ Side effects:
18
+ - Creates data/existing-prs.json
19
+ - Creates data/pr-diffs/<number>.diff for overlapping PRs
20
+ """
21
+ from __future__ import annotations
22
+
23
+ import argparse
24
+ import json
25
+ import os
26
+ import subprocess
27
+ import sys
28
+ from typing import Any
29
+
30
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
31
+ from _utils import read_jsonl, run_cmd # noqa: E402
32
+
33
+
34
+ def fetch_warden_prs(sweep_dir: str) -> list[dict[str, Any]]:
35
+ """Fetch open PRs with the warden label."""
36
+ result = run_cmd(
37
+ [
38
+ "gh", "pr", "list",
39
+ "--label", "warden",
40
+ "--state", "open",
41
+ "--json", "number,title,url,files",
42
+ "--limit", "100",
43
+ ],
44
+ timeout=30,
45
+ )
46
+
47
+ if result.returncode != 0:
48
+ print(f"Warning: gh pr list failed: {result.stderr}", file=sys.stderr)
49
+ return []
50
+
51
+ try:
52
+ prs = json.loads(result.stdout)
53
+ except json.JSONDecodeError:
54
+ print("Warning: Failed to parse gh pr list output", file=sys.stderr)
55
+ return []
56
+
57
+ # Save raw PR data
58
+ prs_path = os.path.join(sweep_dir, "data", "existing-prs.json")
59
+ with open(prs_path, "w") as f:
60
+ json.dump(prs, f, indent=2)
61
+ f.write("\n")
62
+
63
+ return prs
64
+
65
+
66
+ def build_file_index(
67
+ prs: list[dict[str, Any]],
68
+ ) -> dict[str, list[dict[str, Any]]]:
69
+ """Build a file-to-PR lookup from the PR list."""
70
+ index: dict[str, list[dict[str, Any]]] = {}
71
+
72
+ for pr in prs:
73
+ pr_info = {
74
+ "number": pr.get("number"),
75
+ "title": pr.get("title", ""),
76
+ "url": pr.get("url", ""),
77
+ }
78
+ files = pr.get("files") or []
79
+ for file_entry in files:
80
+ # gh returns files as objects with "path" key
81
+ if isinstance(file_entry, dict):
82
+ path = file_entry.get("path", "")
83
+ else:
84
+ path = str(file_entry)
85
+ if path:
86
+ index.setdefault(path, []).append(pr_info)
87
+
88
+ return index
89
+
90
+
91
+ def get_verified_files(sweep_dir: str) -> set[str]:
92
+ """Get the set of files that have verified findings."""
93
+ verified_path = os.path.join(sweep_dir, "data", "verified.jsonl")
94
+ entries = read_jsonl(verified_path)
95
+ return {e.get("file", "") for e in entries if e.get("file")}
96
+
97
+
98
+ def fetch_pr_diff(pr_number: int, sweep_dir: str) -> bool:
99
+ """Fetch and cache a PR diff. Returns True on success."""
100
+ diff_path = os.path.join(
101
+ sweep_dir, "data", "pr-diffs", f"{pr_number}.diff"
102
+ )
103
+
104
+ # Skip if already cached
105
+ if os.path.exists(diff_path):
106
+ return True
107
+
108
+ result = run_cmd(
109
+ ["gh", "pr", "diff", str(pr_number)],
110
+ timeout=30,
111
+ )
112
+
113
+ if result.returncode != 0:
114
+ print(
115
+ f"Warning: Failed to fetch diff for PR #{pr_number}: {result.stderr}",
116
+ file=sys.stderr,
117
+ )
118
+ return False
119
+
120
+ with open(diff_path, "w") as f:
121
+ f.write(result.stdout)
122
+
123
+ return True
124
+
125
+
126
+ def main() -> None:
127
+ parser = argparse.ArgumentParser(
128
+ description="Warden Sweep: Index existing PRs for dedup"
129
+ )
130
+ parser.add_argument("sweep_dir", help="Path to the sweep directory")
131
+ args = parser.parse_args()
132
+
133
+ sweep_dir = args.sweep_dir
134
+
135
+ if not os.path.isdir(sweep_dir):
136
+ print(
137
+ json.dumps({"error": f"Sweep directory not found: {sweep_dir}"}),
138
+ file=sys.stdout,
139
+ )
140
+ sys.exit(1)
141
+
142
+ # Ensure pr-diffs directory exists
143
+ os.makedirs(os.path.join(sweep_dir, "data", "pr-diffs"), exist_ok=True)
144
+
145
+ # Fetch open warden PRs
146
+ print("Fetching open warden-labeled PRs...", file=sys.stderr)
147
+ prs = fetch_warden_prs(sweep_dir)
148
+ print(f"Found {len(prs)} open warden PR(s)", file=sys.stderr)
149
+
150
+ # Build file index
151
+ file_index = build_file_index(prs)
152
+
153
+ # Find overlap with verified findings
154
+ verified_files = get_verified_files(sweep_dir)
155
+ overlapping_prs: set[int] = set()
156
+
157
+ for vfile in verified_files:
158
+ if vfile in file_index:
159
+ for pr_info in file_index[vfile]:
160
+ overlapping_prs.add(pr_info["number"])
161
+
162
+ # Fetch diffs for overlapping PRs
163
+ diffs_cached = 0
164
+ for pr_number in sorted(overlapping_prs):
165
+ print(f"Caching diff for PR #{pr_number}...", file=sys.stderr)
166
+ if fetch_pr_diff(pr_number, sweep_dir):
167
+ diffs_cached += 1
168
+
169
+ # Build output file index (only for files that have verified findings)
170
+ output_file_index: dict[str, list[dict[str, Any]]] = {}
171
+ for vfile in verified_files:
172
+ if vfile in file_index:
173
+ output_file_index[vfile] = file_index[vfile]
174
+
175
+ # Output summary
176
+ output = {
177
+ "totalPRs": len(prs),
178
+ "overlappingPRs": len(overlapping_prs),
179
+ "fileIndex": output_file_index,
180
+ "diffsCached": diffs_cached,
181
+ }
182
+
183
+ print(json.dumps(output, indent=2))
184
+
185
+
186
+ if __name__ == "__main__":
187
+ main()