@delegance/claude-autopilot 2.2.0 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,15 @@
1
1
  # Changelog
2
2
 
3
+ ## [2.3.0] — 2026-04-22
4
+
5
+ ### Added
6
+ - **Parallel chunk review** — file-level chunks are now reviewed concurrently (default parallelism: 3, configurable via `chunking.parallelism`); serial fallback preserved when `cost.budgetUSD` is set so budget enforcement remains accurate
7
+ - **`.autopilot-ignore`** — project-level suppression file; format: `<rule-id> <glob>` or bare `<glob>` (matches any finding on that path); comments and blank lines ignored; suppressed count printed dim after run
8
+ - **`--delta` mode** — only reports findings new since the previous run; pre-existing findings are hidden and the count is printed dim; findings always persisted to `.autopilot-cache/findings.json` after each run (gitignored)
9
+ - `src/core/ignore/index.ts` — `loadIgnoreRules()`, `applyIgnoreRules()`
10
+ - `src/core/persist/findings-cache.ts` — `loadCachedFindings()`, `saveCachedFindings()`, `filterNewFindings()`
11
+ - 15 new tests — **248 total**
12
+
3
13
  ## [2.2.0] — 2026-04-22
4
14
 
5
15
  ### Added
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@delegance/claude-autopilot",
3
- "version": "2.2.0",
3
+ "version": "2.3.0",
4
4
  "type": "module",
5
5
  "description": "Claude Code automation pipeline: spec → plan → implement → validate → PR",
6
6
  "keywords": [
package/src/cli/index.ts CHANGED
@@ -67,6 +67,7 @@ Options (run):
67
67
  --files <a,b,c> Explicit comma-separated file list (skips git detection)
68
68
  --dry-run Show what would run without executing
69
69
  --diff Send git diff hunks instead of full files (~70% fewer tokens)
70
+ --delta Only report findings new since last run (suppress pre-existing)
70
71
  --post-comments Post/update a summary comment on the open PR
71
72
  --format <text|sarif> Output format (default: text)
72
73
  --output <path> Output file path (required with --format sarif)
@@ -122,6 +123,7 @@ switch (subcommand) {
122
123
  const filesArg = flag('files');
123
124
  const dryRun = boolFlag('dry-run');
124
125
  const diff = boolFlag('diff');
126
+ const delta = boolFlag('delta');
125
127
  const postComments = boolFlag('post-comments');
126
128
  const formatArg = flag('format');
127
129
  const outputPath = flag('output');
@@ -141,6 +143,7 @@ switch (subcommand) {
141
143
  files: filesArg ? filesArg.split(',').map(f => f.trim()) : undefined,
142
144
  dryRun,
143
145
  diff,
146
+ delta,
144
147
  postComments,
145
148
  format: formatArg as 'text' | 'sarif' | undefined,
146
149
  outputPath,
package/src/cli/run.ts CHANGED
@@ -38,6 +38,8 @@ import { detectProtectedPaths } from '../core/detect/protected-paths.ts';
38
38
  import { detectGitContext } from '../core/detect/git-context.ts';
39
39
  import { detectProject } from './detector.ts';
40
40
  import { detectPrNumber, formatComment, postPrComment } from './pr-comment.ts';
41
+ import { loadIgnoreRules, applyIgnoreRules } from '../core/ignore/index.ts';
42
+ import { loadCachedFindings, saveCachedFindings, filterNewFindings } from '../core/persist/findings-cache.ts';
41
43
 
42
44
  function readToolVersion(): string {
43
45
  const pkgPath = path.join(path.dirname(fileURLToPath(import.meta.url)), '../../package.json');
@@ -65,6 +67,7 @@ export interface RunCommandOptions {
65
67
  files?: string[]; // explicit file list (skips git detection)
66
68
  dryRun?: boolean; // skip review, print what would run
67
69
  diff?: boolean; // use diff strategy (send git hunks instead of full files)
70
+ delta?: boolean; // only report findings not present in last run's baseline
68
71
  format?: 'text' | 'sarif';
69
72
  outputPath?: string;
70
73
  postComments?: boolean; // post/update summary comment on the open PR
@@ -187,6 +190,36 @@ export async function runCommand(options: RunCommandOptions = {}): Promise<numbe
187
190
  console.log('');
188
191
  const result = await runAutopilot(input);
189
192
 
193
+ // Apply .autopilot-ignore suppression rules
194
+ const ignoreRules = loadIgnoreRules(cwd);
195
+ if (ignoreRules.length > 0) {
196
+ const before = result.allFindings.length;
197
+ result.allFindings = applyIgnoreRules(result.allFindings, ignoreRules);
198
+ for (const phase of result.phases) {
199
+ phase.findings = applyIgnoreRules(phase.findings, ignoreRules);
200
+ }
201
+ const suppressed = before - result.allFindings.length;
202
+ if (suppressed > 0) {
203
+ console.log(fmt('dim', ` [run] ${suppressed} finding${suppressed !== 1 ? 's' : ''} suppressed by .autopilot-ignore`));
204
+ }
205
+ }
206
+
207
+ // Delta mode: filter to only new findings vs last run's baseline, then persist
208
+ if (options.delta) {
209
+ const cached = loadCachedFindings(cwd);
210
+ const before = result.allFindings.length;
211
+ result.allFindings = filterNewFindings(result.allFindings, cached);
212
+ for (const phase of result.phases) {
213
+ phase.findings = filterNewFindings(phase.findings, cached);
214
+ }
215
+ const existing = before - result.allFindings.length;
216
+ if (existing > 0) {
217
+ console.log(fmt('dim', ` [run] ${existing} pre-existing finding${existing !== 1 ? 's' : ''} hidden (--delta mode)`));
218
+ }
219
+ }
220
+ // Always persist the unfiltered findings as the new baseline
221
+ saveCachedFindings(cwd, result.allFindings);
222
+
190
223
  // emitAnnotations is a no-op unless GITHUB_ACTIONS=true
191
224
  emitAnnotations(result.allFindings);
192
225
 
@@ -0,0 +1,42 @@
1
+ import * as fs from 'node:fs';
2
+ import * as path from 'node:path';
3
+ import { minimatch } from 'minimatch';
4
+ import type { Finding } from '../findings/types.ts';
5
+
6
+ export interface IgnoreRule {
7
+ ruleId: string | '*'; // finding id prefix or '*' for any
8
+ pathGlob: string | null; // null = match all paths
9
+ }
10
+
11
+ export function loadIgnoreRules(cwd: string): IgnoreRule[] {
12
+ const filePath = path.join(cwd, '.autopilot-ignore');
13
+ if (!fs.existsSync(filePath)) return [];
14
+
15
+ const rules: IgnoreRule[] = [];
16
+ for (const raw of fs.readFileSync(filePath, 'utf8').split('\n')) {
17
+ const line = raw.trim();
18
+ if (!line || line.startsWith('#')) continue;
19
+
20
+ const parts = line.split(/\s+/);
21
+ if (parts.length === 1) {
22
+ // bare glob — suppress any finding whose file matches
23
+ rules.push({ ruleId: '*', pathGlob: parts[0]! });
24
+ } else {
25
+ // <rule-id-or-*> <path-glob>
26
+ rules.push({ ruleId: parts[0]!, pathGlob: parts[1]! });
27
+ }
28
+ }
29
+ return rules;
30
+ }
31
+
32
+ function matchesRule(finding: Finding, rule: IgnoreRule): boolean {
33
+ const ruleMatches = rule.ruleId === '*' || finding.id.startsWith(rule.ruleId);
34
+ if (!ruleMatches) return false;
35
+ if (rule.pathGlob === null) return true;
36
+ return minimatch(finding.file.replace(/\\/g, '/'), rule.pathGlob, { matchBase: true });
37
+ }
38
+
39
+ export function applyIgnoreRules(findings: Finding[], rules: IgnoreRule[]): Finding[] {
40
+ if (rules.length === 0) return findings;
41
+ return findings.filter(f => !rules.some(r => matchesRule(f, r)));
42
+ }
@@ -0,0 +1,43 @@
1
+ import * as fs from 'node:fs';
2
+ import * as path from 'node:path';
3
+ import type { Finding } from '../findings/types.ts';
4
+
5
+ const CACHE_DIR = '.autopilot-cache';
6
+ const CACHE_FILE = 'findings.json';
7
+
8
+ function cacheFilePath(cwd: string): string {
9
+ return path.join(cwd, CACHE_DIR, CACHE_FILE);
10
+ }
11
+
12
+ function findingKey(f: Finding): string {
13
+ return `${f.id}::${f.file}::${f.line ?? ''}`;
14
+ }
15
+
16
+ export function loadCachedFindings(cwd: string): Finding[] {
17
+ const p = cacheFilePath(cwd);
18
+ if (!fs.existsSync(p)) return [];
19
+ try {
20
+ return JSON.parse(fs.readFileSync(p, 'utf8')) as Finding[];
21
+ } catch {
22
+ return [];
23
+ }
24
+ }
25
+
26
+ export function saveCachedFindings(cwd: string, findings: Finding[]): void {
27
+ const dir = path.join(cwd, CACHE_DIR);
28
+ fs.mkdirSync(dir, { recursive: true });
29
+ // atomic write
30
+ const tmp = cacheFilePath(cwd) + '.tmp';
31
+ fs.writeFileSync(tmp, JSON.stringify(findings, null, 2), 'utf8');
32
+ fs.renameSync(tmp, cacheFilePath(cwd));
33
+ }
34
+
35
+ /**
36
+ * Returns only findings not present in the cached baseline.
37
+ * Two findings are considered the same when id + file + line all match.
38
+ */
39
+ export function filterNewFindings(current: Finding[], cached: Finding[]): Finding[] {
40
+ if (cached.length === 0) return current;
41
+ const seen = new Set(cached.map(findingKey));
42
+ return current.filter(f => !seen.has(findingKey(f)));
43
+ }
@@ -1,7 +1,7 @@
1
1
  import type { ReviewEngine } from '../../adapters/review-engine/types.ts';
2
2
  import type { Finding } from '../findings/types.ts';
3
3
  import type { AutopilotConfig } from '../config/types.ts';
4
- import { buildReviewChunks } from '../chunking/index.ts';
4
+ import { buildReviewChunks, type ReviewChunk } from '../chunking/index.ts';
5
5
 
6
6
  export interface ReviewPhaseResult {
7
7
  phase: 'review';
@@ -22,6 +22,48 @@ export interface ReviewPhaseInput {
22
22
  base?: string;
23
23
  }
24
24
 
25
+ interface ChunkResult {
26
+ findings: Finding[];
27
+ inputTokens: number;
28
+ outputTokens: number;
29
+ costUSD: number;
30
+ }
31
+
32
+ async function reviewChunk(chunk: ReviewChunk, input: ReviewPhaseInput): Promise<ChunkResult> {
33
+ const output = await input.engine.review({
34
+ content: chunk.content,
35
+ kind: chunk.kind,
36
+ context: { stack: input.config.stack, cwd: input.cwd, gitSummary: input.gitSummary },
37
+ });
38
+ return {
39
+ findings: output.findings,
40
+ inputTokens: output.usage?.input ?? 0,
41
+ outputTokens: output.usage?.output ?? 0,
42
+ costUSD: output.usage?.costUSD ?? 0,
43
+ };
44
+ }
45
+
46
+ /** Run up to `limit` promises concurrently, preserving result order. */
47
+ async function pMap<T, R>(
48
+ items: T[],
49
+ fn: (item: T, index: number) => Promise<R>,
50
+ limit: number,
51
+ ): Promise<R[]> {
52
+ const results: R[] = new Array(items.length);
53
+ let next = 0;
54
+
55
+ async function worker(): Promise<void> {
56
+ while (next < items.length) {
57
+ const i = next++;
58
+ results[i] = await fn(items[i]!, i);
59
+ }
60
+ }
61
+
62
+ const workers = Array.from({ length: Math.min(limit, items.length) }, () => worker());
63
+ await Promise.all(workers);
64
+ return results;
65
+ }
66
+
25
67
  export async function runReviewPhase(input: ReviewPhaseInput): Promise<ReviewPhaseResult> {
26
68
  const start = Date.now();
27
69
 
@@ -39,41 +81,51 @@ export async function runReviewPhase(input: ReviewPhaseInput): Promise<ReviewPha
39
81
  base: input.base,
40
82
  });
41
83
 
42
- const allFindings: Finding[] = [];
43
- let totalInputTokens = 0;
44
- let totalOutputTokens = 0;
45
- let totalCostUSD = 0;
46
- let budgetExceeded = false;
84
+ const parallelism = input.config.chunking?.parallelism ?? 3;
85
+ const budgetUSD = input.budgetRemainingUSD;
47
86
 
48
- for (const chunk of chunks) {
49
- if (input.budgetRemainingUSD !== undefined && totalCostUSD >= input.budgetRemainingUSD) {
50
- budgetExceeded = true;
51
- break;
87
+ // For budget tracking we still need to enforce it — run serially if budget set,
88
+ // parallel otherwise (budget check between serial chunks is the safe path).
89
+ let chunkResults: ChunkResult[];
90
+ if (budgetUSD !== undefined) {
91
+ chunkResults = [];
92
+ let spent = 0;
93
+ let budgetExceeded = false;
94
+ for (const chunk of chunks) {
95
+ if (spent >= budgetUSD) { budgetExceeded = true; break; }
96
+ const r = await reviewChunk(chunk, input);
97
+ spent += r.costUSD;
98
+ chunkResults.push(r);
52
99
  }
53
- const output = await input.engine.review({
54
- content: chunk.content,
55
- kind: chunk.kind,
56
- context: { stack: input.config.stack, cwd: input.cwd, gitSummary: input.gitSummary },
57
- });
58
- allFindings.push(...output.findings);
59
- if (output.usage) {
60
- totalInputTokens += output.usage.input;
61
- totalOutputTokens += output.usage.output;
62
- if (output.usage.costUSD !== undefined) totalCostUSD += output.usage.costUSD;
100
+ if (budgetExceeded) {
101
+ chunkResults.push({
102
+ findings: [{
103
+ id: 'budget-exceeded',
104
+ source: 'pipeline',
105
+ severity: 'warning',
106
+ category: 'budget',
107
+ file: '<pipeline>',
108
+ message: `Review budget of $${budgetUSD} USD exceeded — remaining chunks skipped`,
109
+ protectedPath: false,
110
+ createdAt: new Date().toISOString(),
111
+ }],
112
+ inputTokens: 0, outputTokens: 0, costUSD: 0,
113
+ });
63
114
  }
115
+ } else {
116
+ chunkResults = await pMap(chunks, chunk => reviewChunk(chunk, input), parallelism);
64
117
  }
65
118
 
66
- if (budgetExceeded) {
67
- allFindings.push({
68
- id: 'budget-exceeded',
69
- source: 'pipeline',
70
- severity: 'warning',
71
- category: 'budget',
72
- file: '<pipeline>',
73
- message: `Review budget of $${input.budgetRemainingUSD} USD exceeded — remaining chunks skipped`,
74
- protectedPath: false,
75
- createdAt: new Date().toISOString(),
76
- });
119
+ let totalInputTokens = 0;
120
+ let totalOutputTokens = 0;
121
+ let totalCostUSD = 0;
122
+ const allFindings: Finding[] = [];
123
+
124
+ for (const r of chunkResults) {
125
+ allFindings.push(...r.findings);
126
+ totalInputTokens += r.inputTokens;
127
+ totalOutputTokens += r.outputTokens;
128
+ totalCostUSD += r.costUSD;
77
129
  }
78
130
 
79
131
  const hasCritical = allFindings.some(f => f.severity === 'critical');