@delegance/claude-autopilot 1.0.0-alpha.4 → 1.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/package.json +5 -2
- package/scripts/autoregress.ts +268 -0
- package/src/cli/index.ts +16 -1
- package/src/cli/run.ts +21 -0
- package/src/formatters/github-annotations.ts +36 -0
- package/src/formatters/index.ts +3 -0
- package/src/formatters/sarif.ts +103 -0
- package/src/snapshots/impact-selector.ts +60 -0
- package/src/snapshots/import-scanner.ts +44 -0
- package/src/snapshots/serializer.ts +24 -0
- package/tests/snapshots/baselines/.gitkeep +0 -0
- package/tests/snapshots/import-map.json +1 -0
- package/tests/snapshots/index.json +1 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,28 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 1.0.0-alpha.6
|
|
4
|
+
|
|
5
|
+
### Added
|
|
6
|
+
|
|
7
|
+
- **Auto-regression testing** (`scripts/autoregress.ts generate|run|update`) — autoresearch-inspired snapshot tests for changed source modules
|
|
8
|
+
- **Impact-aware selection** — only fires snapshots whose source modules (or one-hop importers) were touched; high-impact paths (`src/core/pipeline/**`, `src/adapters/**`, `src/core/findings/**`, `src/core/config/**`) and >10-file changes trigger full run
|
|
9
|
+
- **Snapshot serializer** (`src/snapshots/serializer.ts`) — deterministic JSON normalization: sorted keys, `<timestamp>`, `<uuid>`, path stripping
|
|
10
|
+
- **Import scanner** (`src/snapshots/import-scanner.ts`) — static `import`/`export` graph → reverse dependency map
|
|
11
|
+
- **Impact selector** (`src/snapshots/impact-selector.ts`) — merge-base diff + one-hop expansion + overrides
|
|
12
|
+
- **Baseline capture** — `CAPTURE_BASELINE=1` env flag; `autoregress update` rewrites baselines after intentional changes
|
|
13
|
+
- **Staleness detection** — warns and skips snapshots whose `@snapshot-for` source file no longer exists
|
|
14
|
+
- 10 new unit tests (AR1-AR10) for serializer, import scanner, and impact selector
|
|
15
|
+
|
|
16
|
+
## 1.0.0-alpha.5 (2026-04-21)
|
|
17
|
+
|
|
18
|
+
### New Features
|
|
19
|
+
|
|
20
|
+
- **`--format sarif --output <path>`** on `autopilot run` — serialises `RunResult` to SARIF 2.1.0; deduplicates rules by category; normalises URIs to repo-relative forward-slash; always emits `results: []` even on error so `upload-sarif` never fails on a missing file
|
|
21
|
+
- **Auto GitHub Actions annotations** — when `GITHUB_ACTIONS=true`, `emitAnnotations()` fires after every run and writes `::error`/`::warning`/`::notice` workflow commands to stdout; GitHub renders these as inline annotations on the PR diff
|
|
22
|
+
- **`src/formatters/`** — pure formatter modules (`sarif.ts`, `github-annotations.ts`) with full command-injection encoding (`%`, `\r`, `\n`, `:`, `,`) for annotation properties and data
|
|
23
|
+
- **`action.yml`** composite action — checkout → setup-node@v4 → npx autopilot run → upload-sarif@v3; inputs: `version`, `config`, `sarif-output`, `openai-api-key`; upload step runs `if: always()` so findings surface even when run exits 1
|
|
24
|
+
- 21 new formatter tests (11 SARIF + 10 annotations) → **95 total**
|
|
25
|
+
|
|
3
26
|
## 1.0.0-alpha.4 (2026-04-21)
|
|
4
27
|
|
|
5
28
|
### New Features
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@delegance/claude-autopilot",
|
|
3
|
-
"version": "1.0.0-alpha.
|
|
3
|
+
"version": "1.0.0-alpha.6",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Claude Code automation pipeline: spec → plan → implement → validate → PR",
|
|
6
6
|
"keywords": ["claude", "autopilot", "ai", "pipeline", "code-review", "cli"],
|
|
@@ -20,12 +20,15 @@
|
|
|
20
20
|
"src/",
|
|
21
21
|
"presets/",
|
|
22
22
|
"scripts/test-runner.mjs",
|
|
23
|
+
"scripts/autoregress.ts",
|
|
24
|
+
"tests/snapshots/",
|
|
23
25
|
"CHANGELOG.md"
|
|
24
26
|
],
|
|
25
27
|
"scripts": {
|
|
26
28
|
"test": "node scripts/test-runner.mjs",
|
|
27
29
|
"typecheck": "tsc --noEmit",
|
|
28
|
-
"build": "tsc"
|
|
30
|
+
"build": "tsc",
|
|
31
|
+
"autoregress": "tsx scripts/autoregress.ts"
|
|
29
32
|
},
|
|
30
33
|
"devDependencies": {
|
|
31
34
|
"@types/js-yaml": "^4",
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// scripts/autoregress.ts
|
|
3
|
+
import * as fs from 'node:fs';
|
|
4
|
+
import * as path from 'node:path';
|
|
5
|
+
import { execSync, spawnSync } from 'node:child_process';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { selectSnapshots } from '../src/snapshots/impact-selector.ts';
|
|
8
|
+
import OpenAI from 'openai';
|
|
9
|
+
import { buildImportMap } from '../src/snapshots/import-scanner.ts';
|
|
10
|
+
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const ROOT = path.resolve(__dirname, '..');
|
|
13
|
+
const SNAPSHOTS_DIR = path.join(ROOT, 'tests', 'snapshots');
|
|
14
|
+
const INDEX_PATH = path.join(SNAPSHOTS_DIR, 'index.json');
|
|
15
|
+
const IMPORT_MAP_PATH = path.join(SNAPSHOTS_DIR, 'import-map.json');
|
|
16
|
+
const BASELINES_DIR = path.join(SNAPSHOTS_DIR, 'baselines');
|
|
17
|
+
|
|
18
|
+
function loadJson<T>(p: string, fallback: T): T {
|
|
19
|
+
try { return JSON.parse(fs.readFileSync(p, 'utf8')) as T; } catch { return fallback; }
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function getChangedFiles(since?: string): string[] | null {
|
|
23
|
+
try {
|
|
24
|
+
const base = since
|
|
25
|
+
? since
|
|
26
|
+
: execSync('git merge-base origin/main HEAD', { cwd: ROOT }).toString().trim();
|
|
27
|
+
const out = execSync(`git diff ${base} HEAD --name-only`, { cwd: ROOT }).toString();
|
|
28
|
+
return out.trim().split('\n').filter(Boolean);
|
|
29
|
+
} catch { return null; }
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function allSnapFiles(): string[] {
|
|
33
|
+
if (!fs.existsSync(SNAPSHOTS_DIR)) return [];
|
|
34
|
+
return fs.readdirSync(SNAPSHOTS_DIR)
|
|
35
|
+
.filter(f => f.endsWith('.snap.ts'))
|
|
36
|
+
.map(f => path.join('tests', 'snapshots', f));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function runSnapshot(snapFile: string, capture: boolean): 'pass' | 'fail' | 'baseline-missing' | 'stale' {
|
|
40
|
+
const absSnap = path.join(ROOT, snapFile);
|
|
41
|
+
const content = fs.readFileSync(absSnap, 'utf8');
|
|
42
|
+
const forMatch = content.match(/@snapshot-for:\s*(.+)/);
|
|
43
|
+
if (forMatch) {
|
|
44
|
+
const src = forMatch[1]!.trim();
|
|
45
|
+
if (!fs.existsSync(path.join(ROOT, src))) {
|
|
46
|
+
console.warn(` [warn] stale — source gone: ${src}`);
|
|
47
|
+
return 'stale';
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const slug = path.basename(snapFile, '.snap.ts');
|
|
52
|
+
const baselinePath = path.join(BASELINES_DIR, `${slug}.json`);
|
|
53
|
+
if (!capture && !fs.existsSync(baselinePath)) {
|
|
54
|
+
console.error(` [fail] baseline missing: ${baselinePath}`);
|
|
55
|
+
return 'baseline-missing';
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const env = { ...process.env };
|
|
59
|
+
if (capture) env.CAPTURE_BASELINE = '1';
|
|
60
|
+
else delete env.CAPTURE_BASELINE;
|
|
61
|
+
|
|
62
|
+
const result = spawnSync('node', ['--test', '--import', 'tsx', absSnap], {
|
|
63
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
64
|
+
cwd: ROOT,
|
|
65
|
+
env,
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
if (result.status === 0) return 'pass';
|
|
69
|
+
if (capture) return 'pass';
|
|
70
|
+
console.error(` ${(result.stderr?.toString() ?? '') || (result.stdout?.toString() ?? '')}`);
|
|
71
|
+
return 'fail';
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function cmdRun(args: string[]): number {
|
|
75
|
+
const runAll = args.includes('--all');
|
|
76
|
+
const sinceIdx = args.indexOf('--since');
|
|
77
|
+
const since = sinceIdx >= 0 ? args[sinceIdx + 1] : undefined;
|
|
78
|
+
const index = loadJson<Record<string, string[]>>(INDEX_PATH, {});
|
|
79
|
+
const importMap = loadJson<Record<string, string[]>>(IMPORT_MAP_PATH, {});
|
|
80
|
+
const snapFiles = allSnapFiles();
|
|
81
|
+
|
|
82
|
+
let selected: string[];
|
|
83
|
+
if (runAll || snapFiles.length === 0) {
|
|
84
|
+
selected = snapFiles;
|
|
85
|
+
console.log(`[autoregress run] --all: running ${snapFiles.length} snapshot(s)`);
|
|
86
|
+
} else {
|
|
87
|
+
const changed = getChangedFiles(since);
|
|
88
|
+
if (!changed) {
|
|
89
|
+
console.warn('[autoregress run] merge-base resolution failed — running all');
|
|
90
|
+
selected = snapFiles;
|
|
91
|
+
} else {
|
|
92
|
+
const r = selectSnapshots(changed, snapFiles, index, importMap);
|
|
93
|
+
selected = r.selected;
|
|
94
|
+
console.log(`[autoregress run] ${r.reason} (${selected.length}/${snapFiles.length})`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (selected.length === 0) {
|
|
99
|
+
console.log('[autoregress run] no snapshots to run — pass');
|
|
100
|
+
return 0;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
let passed = 0, failed = 0, missing = 0, stale = 0;
|
|
104
|
+
for (const snap of selected) {
|
|
105
|
+
process.stdout.write(` ${snap} ... `);
|
|
106
|
+
const v = runSnapshot(snap, false);
|
|
107
|
+
if (v === 'pass') { passed++; console.log('pass'); }
|
|
108
|
+
else if (v === 'fail') { failed++; console.log('FAIL'); }
|
|
109
|
+
else if (v === 'baseline-missing') { missing++; console.log('BASELINE MISSING'); }
|
|
110
|
+
else { stale++; console.log('stale (skipped)'); }
|
|
111
|
+
}
|
|
112
|
+
console.log(`\n ${passed} passed ${failed} failed ${missing} baseline-missing ${stale} stale`);
|
|
113
|
+
return failed > 0 || missing > 0 ? 1 : 0;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
function cmdUpdate(args: string[]): number {
|
|
117
|
+
const snapIdx = args.indexOf('--snapshot');
|
|
118
|
+
const slug = snapIdx >= 0 ? args[snapIdx + 1] : undefined;
|
|
119
|
+
const snapFiles = slug
|
|
120
|
+
? [path.join('tests', 'snapshots', `${slug}.snap.ts`)]
|
|
121
|
+
: allSnapFiles();
|
|
122
|
+
console.log(`[autoregress update] rewriting ${snapFiles.length} baseline(s)`);
|
|
123
|
+
let failed = 0;
|
|
124
|
+
for (const snap of snapFiles) {
|
|
125
|
+
const absSnap = path.join(ROOT, snap);
|
|
126
|
+
if (!fs.existsSync(absSnap)) {
|
|
127
|
+
console.error(` [error] snapshot file not found: ${snap}`);
|
|
128
|
+
failed++;
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
process.stdout.write(` ${snap} ... `);
|
|
132
|
+
runSnapshot(snap, true);
|
|
133
|
+
console.log('updated');
|
|
134
|
+
}
|
|
135
|
+
return failed > 0 ? 1 : 0;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const GENERATOR_VERSION = '1.0.0-alpha.6';
|
|
139
|
+
|
|
140
|
+
const GENERATE_PROMPT = `You are generating a behavioral snapshot test for a TypeScript module.
|
|
141
|
+
|
|
142
|
+
Module path: {filePath}
|
|
143
|
+
Module contents:
|
|
144
|
+
{fileContents}
|
|
145
|
+
|
|
146
|
+
Write a snapshot test file. Requirements:
|
|
147
|
+
1. Header comments at top:
|
|
148
|
+
// @snapshot-for: {filePath}
|
|
149
|
+
// @generated-at: {generatedAt}
|
|
150
|
+
// @source-commit: {sourceCommit}
|
|
151
|
+
// @generator-version: {version}
|
|
152
|
+
2. Import the module's exported functions under test
|
|
153
|
+
3. Import { normalizeSnapshot } from '../../src/snapshots/serializer.ts'
|
|
154
|
+
4. Import fs from 'node:fs', describe/it from 'node:test', assert from 'node:assert/strict'
|
|
155
|
+
5. Baseline loading pattern (use slug {slug}):
|
|
156
|
+
const SLUG = '{slug}';
|
|
157
|
+
import { fileURLToPath } from 'node:url';
|
|
158
|
+
const baselineRaw = process.env.CAPTURE_BASELINE === '1' ? '{}' : fs.readFileSync(fileURLToPath(new URL('./baselines/{slug}.json', import.meta.url)), 'utf8');
|
|
159
|
+
const baseline = JSON.parse(baselineRaw);
|
|
160
|
+
const captured: Record<string, unknown> = {};
|
|
161
|
+
process.on('exit', () => {
|
|
162
|
+
if (process.env.CAPTURE_BASELINE === '1') {
|
|
163
|
+
const p = fileURLToPath(new URL('./baselines/{slug}.json', import.meta.url));
|
|
164
|
+
fs.writeFileSync(p, JSON.stringify(captured, null, 2), 'utf8');
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
6. In each test: if (process.env.CAPTURE_BASELINE === '1') { captured['test-name'] = result; return; }
|
|
168
|
+
Else: assert.equal(normalizeSnapshot(result), normalizeSnapshot(baseline['test-name']));
|
|
169
|
+
7. Write 2-4 it() tests covering representative behaviors
|
|
170
|
+
8. Output ONLY the TypeScript file contents, no markdown fences, no explanation`;
|
|
171
|
+
|
|
172
|
+
async function cmdGenerate(args: string[]): Promise<number> {
|
|
173
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
174
|
+
if (!apiKey) { console.error('[autoregress generate] OPENAI_API_KEY not set'); return 1; }
|
|
175
|
+
|
|
176
|
+
const sinceIdx = args.indexOf('--since');
|
|
177
|
+
const since = sinceIdx >= 0 ? args[sinceIdx + 1] : undefined;
|
|
178
|
+
const changed = getChangedFiles(since);
|
|
179
|
+
if (!changed) { console.error('[autoregress generate] could not determine changed files'); return 1; }
|
|
180
|
+
|
|
181
|
+
const srcFiles = changed.filter(f => f.startsWith('src/') && f.endsWith('.ts'));
|
|
182
|
+
if (srcFiles.length === 0) {
|
|
183
|
+
console.log('[autoregress generate] no src/*.ts files changed — nothing to generate');
|
|
184
|
+
return 0;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
console.log(`[autoregress generate] generating snapshots for ${srcFiles.length} file(s)`);
|
|
188
|
+
|
|
189
|
+
const client = new OpenAI({ apiKey });
|
|
190
|
+
let sourceCommit = 'unknown';
|
|
191
|
+
try { sourceCommit = execSync('git rev-parse --short HEAD', { cwd: ROOT }).toString().trim(); } catch {}
|
|
192
|
+
const generatedAt = new Date().toISOString();
|
|
193
|
+
|
|
194
|
+
for (const srcFile of srcFiles) {
|
|
195
|
+
const absFile = path.join(ROOT, srcFile);
|
|
196
|
+
if (!fs.existsSync(absFile)) { console.warn(` skip (not found): ${srcFile}`); continue; }
|
|
197
|
+
|
|
198
|
+
const fileContents = fs.readFileSync(absFile, 'utf8');
|
|
199
|
+
const slug = srcFile.replace(/[/\\]/g, '-').replace(/\.ts$/, '');
|
|
200
|
+
|
|
201
|
+
process.stdout.write(` ${srcFile} → ${slug}.snap.ts ... `);
|
|
202
|
+
|
|
203
|
+
const prompt = GENERATE_PROMPT
|
|
204
|
+
.replace(/{filePath}/g, srcFile)
|
|
205
|
+
.replace(/{fileContents}/g, fileContents)
|
|
206
|
+
.replace(/{slug}/g, slug)
|
|
207
|
+
.replace(/{version}/g, GENERATOR_VERSION)
|
|
208
|
+
.replace(/{generatedAt}/g, generatedAt)
|
|
209
|
+
.replace(/{sourceCommit}/g, sourceCommit);
|
|
210
|
+
|
|
211
|
+
let snapContent: string;
|
|
212
|
+
try {
|
|
213
|
+
const response = await client.responses.create({
|
|
214
|
+
model: process.env.CODEX_MODEL ?? 'gpt-5.3-codex',
|
|
215
|
+
instructions: 'You write TypeScript snapshot tests. Output ONLY the file contents, no markdown fences.',
|
|
216
|
+
input: prompt,
|
|
217
|
+
max_output_tokens: 2000,
|
|
218
|
+
});
|
|
219
|
+
snapContent = (response.output_text ?? '').replace(/^```typescript\n?/m, '').replace(/```$/m, '').trim();
|
|
220
|
+
} catch (err) {
|
|
221
|
+
console.error(`LLM error: ${err instanceof Error ? err.message : String(err)}`);
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const snapPath = path.join(SNAPSHOTS_DIR, `${slug}.snap.ts`);
|
|
226
|
+
fs.writeFileSync(snapPath, snapContent + '\n', 'utf8');
|
|
227
|
+
|
|
228
|
+
const captureResult = spawnSync('node', ['--test', '--import', 'tsx', snapPath], {
|
|
229
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
230
|
+
cwd: ROOT,
|
|
231
|
+
env: { ...process.env, CAPTURE_BASELINE: '1' },
|
|
232
|
+
});
|
|
233
|
+
const baselinePath = path.join(BASELINES_DIR, `${slug}.json`);
|
|
234
|
+
console.log(fs.existsSync(baselinePath) ? 'generated + baseline captured' :
|
|
235
|
+
`generated (capture failed: ${captureResult.stderr?.toString().slice(0, 60)})`);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Rebuild index.json from @snapshot-for headers
|
|
239
|
+
const newIndex: Record<string, string[]> = {};
|
|
240
|
+
for (const f of fs.readdirSync(SNAPSHOTS_DIR).filter(x => x.endsWith('.snap.ts'))) {
|
|
241
|
+
const snapRelPath = path.join('tests', 'snapshots', f);
|
|
242
|
+
const content = fs.readFileSync(path.join(SNAPSHOTS_DIR, f), 'utf8');
|
|
243
|
+
const sources = [...content.matchAll(/@snapshot-for:\s*(.+)/g)].map(m => m[1]!.trim());
|
|
244
|
+
if (sources.length) newIndex[snapRelPath] = sources;
|
|
245
|
+
}
|
|
246
|
+
fs.writeFileSync(INDEX_PATH, JSON.stringify(newIndex, null, 2) + '\n', 'utf8');
|
|
247
|
+
|
|
248
|
+
// Rebuild import-map.json — prefix keys/values with 'src/' to match repo-relative git diff paths
|
|
249
|
+
const rawImportMap = buildImportMap(path.join(ROOT, 'src'));
|
|
250
|
+
const newImportMap: Record<string, string[]> = {};
|
|
251
|
+
for (const [dep, importers] of Object.entries(rawImportMap)) {
|
|
252
|
+
newImportMap[`src/${dep}`] = importers.map(i => `src/${i}`);
|
|
253
|
+
}
|
|
254
|
+
fs.writeFileSync(IMPORT_MAP_PATH, JSON.stringify(newImportMap, null, 2) + '\n', 'utf8');
|
|
255
|
+
|
|
256
|
+
console.log('\n[autoregress generate] index.json + import-map.json rebuilt');
|
|
257
|
+
return 0;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
const [,, subcmd, ...rest] = process.argv;
|
|
261
|
+
switch (subcmd) {
|
|
262
|
+
case 'run': process.exit(cmdRun(rest)); break;
|
|
263
|
+
case 'update': process.exit(cmdUpdate(rest)); break;
|
|
264
|
+
case 'generate': process.exit(await cmdGenerate(rest)); break;
|
|
265
|
+
default:
|
|
266
|
+
console.error(`[autoregress] unknown subcommand: ${subcmd ?? '(none)'}`);
|
|
267
|
+
process.exit(1);
|
|
268
|
+
}
|
package/src/cli/index.ts
CHANGED
|
@@ -17,7 +17,7 @@ import { runWatch } from './watch.ts';
|
|
|
17
17
|
const args = process.argv.slice(2);
|
|
18
18
|
|
|
19
19
|
const SUBCOMMANDS = ['init', 'run', 'preflight', 'help', '--help', '-h'] as const;
|
|
20
|
-
const VALUE_FLAGS = ['base', 'config', 'files'];
|
|
20
|
+
const VALUE_FLAGS = ['base', 'config', 'files', 'format', 'output', 'debounce'];
|
|
21
21
|
|
|
22
22
|
// Detect first non-flag arg as subcommand, default to 'run'
|
|
23
23
|
const subcommand = (args[0] && !args[0].startsWith('--')) ? args[0] : 'run';
|
|
@@ -53,6 +53,8 @@ Options (run):
|
|
|
53
53
|
--config <path> Path to config file (default: ./autopilot.config.yaml)
|
|
54
54
|
--files <a,b,c> Explicit comma-separated file list (skips git detection)
|
|
55
55
|
--dry-run Show what would run without executing
|
|
56
|
+
--format <text|sarif> Output format (default: text)
|
|
57
|
+
--output <path> Output file path (required with --format sarif)
|
|
56
58
|
|
|
57
59
|
Options (watch):
|
|
58
60
|
--config <path> Path to config file (default: ./autopilot.config.yaml)
|
|
@@ -92,12 +94,25 @@ switch (subcommand) {
|
|
|
92
94
|
const config = flag('config');
|
|
93
95
|
const filesArg = flag('files');
|
|
94
96
|
const dryRun = boolFlag('dry-run');
|
|
97
|
+
const formatArg = flag('format');
|
|
98
|
+
const outputPath = flag('output');
|
|
99
|
+
|
|
100
|
+
if (formatArg && formatArg !== 'text' && formatArg !== 'sarif') {
|
|
101
|
+
console.error(`\x1b[31m[autopilot] --format must be "text" or "sarif"\x1b[0m`);
|
|
102
|
+
process.exit(1);
|
|
103
|
+
}
|
|
104
|
+
if (formatArg === 'sarif' && !outputPath) {
|
|
105
|
+
console.error(`\x1b[31m[autopilot] --format sarif requires --output <path>\x1b[0m`);
|
|
106
|
+
process.exit(1);
|
|
107
|
+
}
|
|
95
108
|
|
|
96
109
|
const code = await runCommand({
|
|
97
110
|
base,
|
|
98
111
|
configPath: config,
|
|
99
112
|
files: filesArg ? filesArg.split(',').map(f => f.trim()) : undefined,
|
|
100
113
|
dryRun,
|
|
114
|
+
format: formatArg as 'text' | 'sarif' | undefined,
|
|
115
|
+
outputPath,
|
|
101
116
|
});
|
|
102
117
|
process.exit(code);
|
|
103
118
|
break;
|
package/src/cli/run.ts
CHANGED
|
@@ -10,6 +10,14 @@ import { resolveGitTouchedFiles } from '../core/git/touched-files.ts';
|
|
|
10
10
|
import type { RunInput } from '../core/pipeline/run.ts';
|
|
11
11
|
import type { ReviewEngine } from '../adapters/review-engine/types.ts';
|
|
12
12
|
import type { AutopilotConfig } from '../core/config/types.ts';
|
|
13
|
+
import { fileURLToPath } from 'node:url';
|
|
14
|
+
import { toSarif } from '../formatters/sarif.ts';
|
|
15
|
+
import { emitAnnotations } from '../formatters/github-annotations.ts';
|
|
16
|
+
|
|
17
|
+
function readToolVersion(): string {
|
|
18
|
+
const pkgPath = path.join(path.dirname(fileURLToPath(import.meta.url)), '../../package.json');
|
|
19
|
+
return (JSON.parse(fs.readFileSync(pkgPath, 'utf8')) as { version: string }).version;
|
|
20
|
+
}
|
|
13
21
|
|
|
14
22
|
const C = {
|
|
15
23
|
reset: '\x1b[0m',
|
|
@@ -31,6 +39,8 @@ export interface RunCommandOptions {
|
|
|
31
39
|
base?: string; // git base ref (default HEAD~1)
|
|
32
40
|
files?: string[]; // explicit file list (skips git detection)
|
|
33
41
|
dryRun?: boolean; // skip review, print what would run
|
|
42
|
+
format?: 'text' | 'sarif';
|
|
43
|
+
outputPath?: string;
|
|
34
44
|
}
|
|
35
45
|
|
|
36
46
|
/**
|
|
@@ -109,6 +119,17 @@ export async function runCommand(options: RunCommandOptions = {}): Promise<numbe
|
|
|
109
119
|
console.log('');
|
|
110
120
|
const result = await runAutopilot(input);
|
|
111
121
|
|
|
122
|
+
// emitAnnotations is a no-op unless GITHUB_ACTIONS=true
|
|
123
|
+
emitAnnotations(result.allFindings);
|
|
124
|
+
|
|
125
|
+
// Write SARIF output if requested
|
|
126
|
+
if (options.format === 'sarif' && options.outputPath) {
|
|
127
|
+
const sarif = toSarif(result, { toolVersion: readToolVersion(), cwd });
|
|
128
|
+
fs.mkdirSync(path.dirname(path.resolve(options.outputPath)), { recursive: true });
|
|
129
|
+
fs.writeFileSync(options.outputPath, JSON.stringify(sarif, null, 2), 'utf8');
|
|
130
|
+
console.log(fmt('dim', `[run] SARIF written to ${options.outputPath}`));
|
|
131
|
+
}
|
|
132
|
+
|
|
112
133
|
// Print phase summaries
|
|
113
134
|
for (const phase of result.phases) {
|
|
114
135
|
const icon = phase.status === 'pass' ? fmt('green', '✓') :
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { Finding } from '../core/findings/types.ts';
|
|
2
|
+
|
|
3
|
+
export function encodeAnnotationProperty(s: string): string {
|
|
4
|
+
return s
|
|
5
|
+
.replace(/%/g, '%25')
|
|
6
|
+
.replace(/\r/g, '%0D')
|
|
7
|
+
.replace(/\n/g, '%0A')
|
|
8
|
+
.replace(/:/g, '%3A')
|
|
9
|
+
.replace(/,/g, '%2C');
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function encodeAnnotationData(s: string): string {
|
|
13
|
+
return s
|
|
14
|
+
.replace(/%/g, '%25')
|
|
15
|
+
.replace(/\r/g, '%0D')
|
|
16
|
+
.replace(/\n/g, '%0A');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function severityToCommand(s: Finding['severity']): 'error' | 'warning' | 'notice' {
|
|
20
|
+
if (s === 'critical') return 'error';
|
|
21
|
+
if (s === 'warning') return 'warning';
|
|
22
|
+
return 'notice';
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function emitAnnotations(findings: Finding[]): void {
|
|
26
|
+
if (process.env.GITHUB_ACTIONS !== 'true') return;
|
|
27
|
+
for (const f of findings) {
|
|
28
|
+
const cmd = severityToCommand(f.severity);
|
|
29
|
+
const props: string[] = [`file=${encodeAnnotationProperty(f.file)}`];
|
|
30
|
+
if (f.line !== undefined) {
|
|
31
|
+
props.push(`line=${f.line}`, `endLine=${f.line}`);
|
|
32
|
+
}
|
|
33
|
+
props.push(`title=${encodeAnnotationProperty(f.category)}`);
|
|
34
|
+
process.stdout.write(`::${cmd} ${props.join(',')}::${encodeAnnotationData(f.message)}\n`);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import * as path from 'node:path';
|
|
2
|
+
import type { RunResult } from '../core/pipeline/run.ts';
|
|
3
|
+
import type { Finding } from '../core/findings/types.ts';
|
|
4
|
+
|
|
5
|
+
interface SarifLog {
|
|
6
|
+
$schema: string;
|
|
7
|
+
version: '2.1.0';
|
|
8
|
+
runs: SarifRun[];
|
|
9
|
+
}
|
|
10
|
+
interface SarifRun {
|
|
11
|
+
tool: { driver: SarifDriver };
|
|
12
|
+
results: SarifResult[];
|
|
13
|
+
}
|
|
14
|
+
interface SarifDriver {
|
|
15
|
+
name: string;
|
|
16
|
+
version: string;
|
|
17
|
+
informationUri: string;
|
|
18
|
+
rules: SarifRule[];
|
|
19
|
+
}
|
|
20
|
+
interface SarifRule {
|
|
21
|
+
id: string;
|
|
22
|
+
name: string;
|
|
23
|
+
shortDescription: { text: string };
|
|
24
|
+
}
|
|
25
|
+
interface SarifResult {
|
|
26
|
+
ruleId: string;
|
|
27
|
+
level: 'error' | 'warning' | 'note';
|
|
28
|
+
message: { text: string };
|
|
29
|
+
locations: SarifLocation[];
|
|
30
|
+
fixes?: Array<{ description: { text: string } }>;
|
|
31
|
+
}
|
|
32
|
+
interface SarifLocation {
|
|
33
|
+
physicalLocation: {
|
|
34
|
+
artifactLocation: { uri: string; uriBaseId: string };
|
|
35
|
+
region?: { startLine: number };
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export type { SarifLog };
|
|
40
|
+
|
|
41
|
+
export function normalizeSarifUri(file: string, cwd: string): string {
|
|
42
|
+
let rel = path.isAbsolute(file) ? path.relative(cwd, file) : file;
|
|
43
|
+
rel = rel.replace(/\\/g, '/');
|
|
44
|
+
if (rel.startsWith('./')) rel = rel.slice(2);
|
|
45
|
+
if (rel.startsWith('../')) rel = file.replace(/\\/g, '/');
|
|
46
|
+
return rel;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function severityToLevel(s: Finding['severity']): 'error' | 'warning' | 'note' {
|
|
50
|
+
if (s === 'critical') return 'error';
|
|
51
|
+
if (s === 'warning') return 'warning';
|
|
52
|
+
return 'note';
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export function toSarif(
|
|
56
|
+
result: RunResult,
|
|
57
|
+
opts: { toolVersion: string; cwd?: string },
|
|
58
|
+
): SarifLog {
|
|
59
|
+
const cwd = opts.cwd ?? process.cwd();
|
|
60
|
+
|
|
61
|
+
const rulesMap = new Map<string, SarifRule>();
|
|
62
|
+
for (const f of result.allFindings) {
|
|
63
|
+
if (!rulesMap.has(f.category)) {
|
|
64
|
+
rulesMap.set(f.category, {
|
|
65
|
+
id: f.category,
|
|
66
|
+
name: f.category,
|
|
67
|
+
shortDescription: { text: f.category },
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const results: SarifResult[] = result.allFindings.map(f => {
|
|
73
|
+
const r: SarifResult = {
|
|
74
|
+
ruleId: f.category,
|
|
75
|
+
level: severityToLevel(f.severity),
|
|
76
|
+
message: { text: f.message },
|
|
77
|
+
locations: [{
|
|
78
|
+
physicalLocation: {
|
|
79
|
+
artifactLocation: { uri: normalizeSarifUri(f.file, cwd), uriBaseId: '%SRCROOT%' },
|
|
80
|
+
...(f.line !== undefined ? { region: { startLine: f.line } } : {}),
|
|
81
|
+
},
|
|
82
|
+
}],
|
|
83
|
+
};
|
|
84
|
+
if (f.suggestion) r.fixes = [{ description: { text: f.suggestion } }];
|
|
85
|
+
return r;
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
return {
|
|
89
|
+
$schema: 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json',
|
|
90
|
+
version: '2.1.0',
|
|
91
|
+
runs: [{
|
|
92
|
+
tool: {
|
|
93
|
+
driver: {
|
|
94
|
+
name: 'claude-autopilot',
|
|
95
|
+
version: opts.toolVersion,
|
|
96
|
+
informationUri: 'https://github.com/axledbetter/claude-autopilot',
|
|
97
|
+
rules: [...rulesMap.values()],
|
|
98
|
+
},
|
|
99
|
+
},
|
|
100
|
+
results,
|
|
101
|
+
}],
|
|
102
|
+
};
|
|
103
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
const HIGH_IMPACT_PATTERNS = [
|
|
2
|
+
/^src\/core\/pipeline\//,
|
|
3
|
+
/^src\/adapters\//,
|
|
4
|
+
/^src\/core\/findings\//,
|
|
5
|
+
/^src\/core\/config\//,
|
|
6
|
+
];
|
|
7
|
+
|
|
8
|
+
export interface SelectResult {
|
|
9
|
+
selected: string[];
|
|
10
|
+
fullRun: boolean;
|
|
11
|
+
reason: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function selectSnapshots(
|
|
15
|
+
changedFiles: string[],
|
|
16
|
+
allSnapshotFiles: string[],
|
|
17
|
+
index: Record<string, string[]>,
|
|
18
|
+
importMap: Record<string, string[]>,
|
|
19
|
+
options: { highImpactPatterns?: RegExp[]; volumeThreshold?: number } = {},
|
|
20
|
+
): SelectResult {
|
|
21
|
+
const patterns = options.highImpactPatterns ?? HIGH_IMPACT_PATTERNS;
|
|
22
|
+
const volumeThreshold = options.volumeThreshold ?? 10;
|
|
23
|
+
|
|
24
|
+
if (changedFiles.length > volumeThreshold) {
|
|
25
|
+
return { selected: allSnapshotFiles, fullRun: true, reason: 'volume override (>10 files changed)' };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
for (const f of changedFiles) {
|
|
29
|
+
for (const p of patterns) {
|
|
30
|
+
if (p.test(f)) {
|
|
31
|
+
return { selected: allSnapshotFiles, fullRun: true, reason: `high-impact path matched: ${f}` };
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Build: sourceFile → snapFiles that cover it
|
|
37
|
+
const sourceToSnaps: Record<string, string[]> = {};
|
|
38
|
+
for (const [snapFile, sources] of Object.entries(index)) {
|
|
39
|
+
for (const src of sources) {
|
|
40
|
+
if (!sourceToSnaps[src]) sourceToSnaps[src] = [];
|
|
41
|
+
sourceToSnaps[src]!.push(snapFile);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const selected = new Set<string>();
|
|
46
|
+
for (const changed of changedFiles) {
|
|
47
|
+
for (const snap of sourceToSnaps[changed] ?? []) selected.add(snap);
|
|
48
|
+
for (const importer of importMap[changed] ?? []) {
|
|
49
|
+
for (const snap of sourceToSnaps[importer] ?? []) selected.add(snap);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
selected: [...selected],
|
|
55
|
+
fullRun: false,
|
|
56
|
+
reason: selected.size === 0
|
|
57
|
+
? 'no snapshots matched changed files'
|
|
58
|
+
: `${selected.size} snapshot(s) selected`,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import * as fs from 'node:fs';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
|
|
4
|
+
const IMPORT_RE = /^(?:import|export)\s+(?:.*?from\s+)?['"]([^'"]+)['"]/gm;
|
|
5
|
+
|
|
6
|
+
function allTsFiles(dir: string): string[] {
|
|
7
|
+
const results: string[] = [];
|
|
8
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
9
|
+
const full = path.join(dir, entry.name);
|
|
10
|
+
if (entry.isDirectory()) results.push(...allTsFiles(full));
|
|
11
|
+
else if (entry.isFile() && entry.name.endsWith('.ts')) results.push(full);
|
|
12
|
+
}
|
|
13
|
+
return results;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function resolveImport(importer: string, specifier: string, srcDir: string): string | null {
|
|
17
|
+
if (!specifier.startsWith('.')) return null;
|
|
18
|
+
const abs = path.resolve(path.dirname(importer), specifier);
|
|
19
|
+
const withExt = abs.endsWith('.ts') ? abs : abs + '.ts';
|
|
20
|
+
const rel = path.relative(srcDir, withExt).replace(/\\/g, '/');
|
|
21
|
+
if (rel.startsWith('..')) return null;
|
|
22
|
+
return rel;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function buildImportMap(srcDir: string): Record<string, string[]> {
|
|
26
|
+
const absDir = path.resolve(srcDir);
|
|
27
|
+
const files = allTsFiles(absDir);
|
|
28
|
+
const map: Record<string, string[]> = {};
|
|
29
|
+
|
|
30
|
+
for (const file of files) {
|
|
31
|
+
const relImporter = path.relative(absDir, file).replace(/\\/g, '/');
|
|
32
|
+
const content = fs.readFileSync(file, 'utf8');
|
|
33
|
+
let m: RegExpExecArray | null;
|
|
34
|
+
IMPORT_RE.lastIndex = 0;
|
|
35
|
+
while ((m = IMPORT_RE.exec(content)) !== null) {
|
|
36
|
+
const resolved = resolveImport(file, m[1]!, absDir);
|
|
37
|
+
if (!resolved) continue;
|
|
38
|
+
if (!map[resolved]) map[resolved] = [];
|
|
39
|
+
if (!map[resolved]!.includes(relImporter)) map[resolved]!.push(relImporter);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return map;
|
|
44
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
const ISO_TS_RE = /^\d{4}-\d{2}-\d{2}T/;
|
|
2
|
+
const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-/i;
|
|
3
|
+
|
|
4
|
+
function normalizeValue(value: unknown, cwd?: string): unknown {
|
|
5
|
+
if (typeof value === 'string') {
|
|
6
|
+
if (ISO_TS_RE.test(value)) return '<timestamp>';
|
|
7
|
+
if (UUID_RE.test(value)) return '<uuid>';
|
|
8
|
+
if (cwd && value.startsWith(cwd + '/')) return value.slice(cwd.length + 1);
|
|
9
|
+
return value;
|
|
10
|
+
}
|
|
11
|
+
if (Array.isArray(value)) return value.map(v => normalizeValue(v, cwd));
|
|
12
|
+
if (value !== null && typeof value === 'object') {
|
|
13
|
+
const sorted: Record<string, unknown> = {};
|
|
14
|
+
for (const key of Object.keys(value as object).sort()) {
|
|
15
|
+
sorted[key] = normalizeValue((value as Record<string, unknown>)[key], cwd);
|
|
16
|
+
}
|
|
17
|
+
return sorted;
|
|
18
|
+
}
|
|
19
|
+
return value;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function normalizeSnapshot(value: unknown, cwd?: string): string {
|
|
23
|
+
return JSON.stringify(normalizeValue(value, cwd), null, 2);
|
|
24
|
+
}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{}
|