@delegance/claude-autopilot 1.5.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,38 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.6.0] — 2026-04-22
4
+
5
+ ### Added
6
+ - **Provider usage scanner** (`src/core/detect/provider-usage.ts`) — walks project source files, counts per-provider API key and SDK references (capped at 1 per file to avoid skew), returns `ProviderCounts`
7
+ - **`dominantProvider()`** — returns the provider with the highest file-reference count
8
+ - **Smart `auto` tiebreaker** — when multiple API keys are present, `auto` scans the codebase and prefers the provider already used there; falls back to env-key priority order if counts are all zero
9
+ - `ReviewInput.context.cwd` — threads working directory through to the review engine so `auto` knows where to scan; `review-phase.ts` now passes `cwd` in context
10
+ - 12 new tests for `detectProviderUsage` and `dominantProvider` — **181 total**
11
+
12
+ ## [1.5.0] — 2026-04-22
13
+
14
+ ### Added
15
+ - **Gemini adapter** (`gemini`) — Google Gemini 2.5 Pro via `@google/generative-ai`; accepts `GEMINI_API_KEY` or `GOOGLE_API_KEY`; 1M token context window
16
+ - **OpenAI-compatible adapter** (`openai-compatible`) — works with any OpenAI-API-compatible endpoint (Groq, Ollama, Together AI, etc.); requires `options.model`; auto-selects API key via `options.apiKeyEnv` → `OPENAI_API_KEY` → `'ollama'`
17
+ - **Updated auto adapter** — full priority chain: `ANTHROPIC_API_KEY` → `GEMINI_API_KEY`/`GOOGLE_API_KEY` → `OPENAI_API_KEY` → `GROQ_API_KEY` (wraps openai-compatible with Groq config)
18
+ - `run.ts` no-key warning now lists all four key options
19
+
20
+ ### Changed
21
+ - 169 tests total (up from 136)
22
+
23
+ ## [1.4.0] — 2026-04-21
24
+
25
+ ### Added
26
+ - **Static rules registry** (`src/core/static-rules/registry.ts`) — lazy-loads built-in rules by name; fixes critical bug where config `staticRules` was always silently ignored
27
+ - **7 built-in rules**: `hardcoded-secrets`, `npm-audit`, `package-lock-sync`, `console-log`, `todo-fixme`, `large-file`, `missing-tests`
28
+ - **Claude adapter** (`claude`) — Anthropic Claude Opus 4.7 via `@anthropic-ai/sdk`; configurable model via `context.model`
29
+ - **Auto adapter** (`auto`) — detects best available key at runtime; checked in priority order
30
+ - `doctor` now checks `ANTHROPIC_API_KEY` in addition to `OPENAI_API_KEY`
31
+ - 136 tests total
32
+
33
+ ### Fixed
34
+ - **Critical**: `staticRules` in `RunInput` was never populated — config-listed rules were silently ignored. `loadRulesFromConfig()` now wired into `run.ts`
35
+
3
36
  ## [1.2.8] — 2026-04-21
4
37
 
5
38
  ### Added
package/README.md CHANGED
@@ -118,7 +118,7 @@ Presets: `nextjs-supabase`, `t3`, `python-fastapi`, `rails-postgres`, `go`.
118
118
  ```yaml
119
119
  configVersion: 1
120
120
  reviewEngine:
121
- adapter: codex
121
+ adapter: auto # auto-detects best available key at runtime
122
122
  testCommand: npm test
123
123
  protectedPaths:
124
124
  - src/core/**
@@ -130,6 +130,37 @@ staticRules:
130
130
 
131
131
  Full schema and preset defaults: `presets/<name>/autopilot.config.yaml`.
132
132
 
133
+ ### Review Engine Adapters
134
+
135
+ | Adapter | Key required | Notes |
136
+ |---|---|---|
137
+ | `auto` | any below | Auto-selects best available (recommended) |
138
+ | `claude` | `ANTHROPIC_API_KEY` | Opus 4.7 default |
139
+ | `gemini` | `GEMINI_API_KEY` or `GOOGLE_API_KEY` | Gemini 2.5 Pro, 1M context |
140
+ | `codex` | `OPENAI_API_KEY` | GPT-5 Codex |
141
+ | `openai-compatible` | configurable | Groq, Ollama, Together AI, etc. |
142
+
143
+ `auto` priority: Anthropic → Gemini → OpenAI → Groq.
144
+
145
+ **Groq example:**
146
+ ```yaml
147
+ reviewEngine:
148
+ adapter: openai-compatible
149
+ options:
150
+ model: llama-3.3-70b-versatile
151
+ baseUrl: https://api.groq.com/openai/v1
152
+ apiKeyEnv: GROQ_API_KEY
153
+ ```
154
+
155
+ **Ollama (local, no key):**
156
+ ```yaml
157
+ reviewEngine:
158
+ adapter: openai-compatible
159
+ options:
160
+ model: llama3.2
161
+ baseUrl: http://localhost:11434/v1
162
+ ```
163
+
133
164
  ## GitHub Actions
134
165
 
135
166
  ```yaml
@@ -175,7 +206,7 @@ Four pluggable adapter points:
175
206
 
176
207
  | Point | Built-in | Purpose |
177
208
  |---|---|---|
178
- | `review-engine` | `codex` | LLM code review (OpenAI) |
209
+ | `review-engine` | `auto`, `claude`, `gemini`, `codex`, `openai-compatible` | LLM code review |
179
210
  | `vcs-host` | `github` | PR comments + SARIF upload |
180
211
  | `migration-runner` | `supabase` | DB migration execution |
181
212
  | `review-bot-parser` | `cursor` | Parse review bot comments |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@delegance/claude-autopilot",
3
- "version": "1.5.0",
3
+ "version": "1.6.0",
4
4
  "type": "module",
5
5
  "description": "Claude Code automation pipeline: spec → plan → implement → validate → PR",
6
6
  "keywords": [
@@ -1,44 +1,74 @@
1
1
  import type { Capabilities } from '../base.ts';
2
2
  import type { ReviewEngine, ReviewInput, ReviewOutput } from './types.ts';
3
3
  import { AutopilotError } from '../../core/errors.ts';
4
+ import { detectProviderUsage, dominantProvider, type Provider } from '../../core/detect/provider-usage.ts';
4
5
 
5
- // Priority order for key detection
6
- async function resolveAdapter(): Promise<ReviewEngine> {
6
+ interface AvailableProvider {
7
+ provider: Provider;
8
+ load: () => Promise<ReviewEngine>;
9
+ }
10
+
11
+ function buildGroqAdapter(base: ReviewEngine): ReviewEngine {
12
+ return {
13
+ ...base,
14
+ name: 'auto',
15
+ review(input: ReviewInput) {
16
+ return base.review({
17
+ ...input,
18
+ context: {
19
+ ...input.context,
20
+ model: 'llama-3.3-70b-versatile',
21
+ baseUrl: 'https://api.groq.com/openai/v1',
22
+ apiKeyEnv: 'GROQ_API_KEY',
23
+ } as typeof input.context,
24
+ });
25
+ },
26
+ };
27
+ }
28
+
29
+ function getAvailableProviders(): AvailableProvider[] {
30
+ const available: AvailableProvider[] = [];
7
31
  if (process.env.ANTHROPIC_API_KEY) {
8
- const { claudeAdapter } = await import('./claude.ts');
9
- return claudeAdapter;
32
+ available.push({ provider: 'anthropic', load: async () => (await import('./claude.ts')).claudeAdapter });
10
33
  }
11
34
  if (process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY) {
12
- const { geminiAdapter } = await import('./gemini.ts');
13
- return geminiAdapter;
35
+ available.push({ provider: 'gemini', load: async () => (await import('./gemini.ts')).geminiAdapter });
14
36
  }
15
37
  if (process.env.OPENAI_API_KEY) {
16
- const { codexAdapter } = await import('./codex.ts');
17
- return codexAdapter;
38
+ available.push({ provider: 'openai', load: async () => (await import('./codex.ts')).codexAdapter });
18
39
  }
19
40
  if (process.env.GROQ_API_KEY) {
20
- const { openaiCompatibleAdapter } = await import('./openai-compatible.ts');
21
- // Wrap with Groq config injected into review() context
22
- return {
23
- ...openaiCompatibleAdapter,
24
- name: 'auto',
25
- review(input: ReviewInput) {
26
- return openaiCompatibleAdapter.review({
27
- ...input,
28
- context: {
29
- ...input.context,
30
- model: 'llama-3.3-70b-versatile',
31
- baseUrl: 'https://api.groq.com/openai/v1',
32
- apiKeyEnv: 'GROQ_API_KEY',
33
- } as typeof input.context,
34
- });
35
- },
36
- };
41
+ available.push({
42
+ provider: 'groq',
43
+ load: async () => buildGroqAdapter((await import('./openai-compatible.ts')).openaiCompatibleAdapter),
44
+ });
45
+ }
46
+ return available;
47
+ }
48
+
49
+ async function resolveAdapter(cwd: string): Promise<ReviewEngine> {
50
+ const available = getAvailableProviders();
51
+
52
+ if (available.length === 0) {
53
+ throw new AutopilotError(
54
+ 'No LLM API key found. Set one of: ANTHROPIC_API_KEY, GEMINI_API_KEY, OPENAI_API_KEY, GROQ_API_KEY',
55
+ { code: 'auth', provider: 'auto' },
56
+ );
57
+ }
58
+
59
+ // Single provider — no need to scan
60
+ if (available.length === 1) return available[0]!.load();
61
+
62
+ // Multiple keys present — prefer the provider most referenced in source code
63
+ const counts = detectProviderUsage(cwd);
64
+ const dominant = dominantProvider(counts);
65
+ if (dominant) {
66
+ const match = available.find(p => p.provider === dominant);
67
+ if (match) return match.load();
37
68
  }
38
- throw new AutopilotError(
39
- 'No LLM API key found. Set one of: ANTHROPIC_API_KEY, GEMINI_API_KEY, OPENAI_API_KEY, GROQ_API_KEY',
40
- { code: 'auth', provider: 'auto' },
41
- );
69
+
70
+ // Fallback to first available (env-key priority order)
71
+ return available[0]!.load();
42
72
  }
43
73
 
44
74
  export const autoAdapter: ReviewEngine = {
@@ -54,7 +84,9 @@ export const autoAdapter: ReviewEngine = {
54
84
  },
55
85
 
56
86
  async review(input: ReviewInput): Promise<ReviewOutput> {
57
- const adapter = await resolveAdapter();
87
+ const cwd = (input.context as Record<string, unknown> | undefined)?.['cwd'] as string | undefined
88
+ ?? process.cwd();
89
+ const adapter = await resolveAdapter(cwd);
58
90
  return adapter.review(input);
59
91
  },
60
92
  };
@@ -4,7 +4,7 @@ import type { Finding } from '../../core/findings/types.ts';
4
4
  export interface ReviewInput {
5
5
  content: string;
6
6
  kind: 'spec' | 'pr-diff' | 'file-batch';
7
- context?: { spec?: string; plan?: string; stack?: string };
7
+ context?: { spec?: string; plan?: string; stack?: string; cwd?: string };
8
8
  }
9
9
 
10
10
  export interface ReviewOutput {
@@ -0,0 +1,74 @@
1
+ import * as fs from 'node:fs';
2
+ import * as path from 'node:path';
3
+
4
+ export type Provider = 'anthropic' | 'gemini' | 'openai' | 'groq';
5
+
6
+ const PROVIDER_PATTERNS: Record<Provider, RegExp> = {
7
+ anthropic: /ANTHROPIC_API_KEY|@anthropic-ai\/sdk|anthropic\.com|claude-[a-z0-9]/gi,
8
+ gemini: /GEMINI_API_KEY|GOOGLE_API_KEY|@google\/generative-ai|generativelanguage\.googleapis/gi,
9
+ openai: /OPENAI_API_KEY|openai\.com|gpt-[0-9]/gi,
10
+ groq: /GROQ_API_KEY|api\.groq\.com/gi,
11
+ };
12
+
13
+ const SOURCE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', '.py', '.go', '.rb']);
14
+
15
+ const SKIP_DIRS = new Set(['node_modules', '.git', 'dist', 'build', '.next', '.nuxt', 'out',
16
+ 'coverage', '__pycache__', '.venv', 'venv', 'target', '.gradle', '.cache', '.turbo']);
17
+
18
+ function walkSync(dir: string, files: string[] = []): string[] {
19
+ let entries: fs.Dirent[];
20
+ try {
21
+ entries = fs.readdirSync(dir, { withFileTypes: true });
22
+ } catch {
23
+ return files;
24
+ }
25
+ for (const entry of entries) {
26
+ if (SKIP_DIRS.has(entry.name)) continue;
27
+ const full = path.join(dir, entry.name);
28
+ if (entry.isDirectory()) {
29
+ walkSync(full, files);
30
+ } else if (entry.isFile() && SOURCE_EXTENSIONS.has(path.extname(entry.name))) {
31
+ files.push(full);
32
+ }
33
+ }
34
+ return files;
35
+ }
36
+
37
+ export interface ProviderCounts {
38
+ anthropic: number;
39
+ gemini: number;
40
+ openai: number;
41
+ groq: number;
42
+ }
43
+
44
+ /**
45
+ * Scans source files under `cwd` and returns per-provider match counts.
46
+ * Counts are capped at 1 per file to avoid skewing on generated lock files.
47
+ */
48
+ export function detectProviderUsage(cwd: string): ProviderCounts {
49
+ const counts: ProviderCounts = { anthropic: 0, gemini: 0, openai: 0, groq: 0 };
50
+ const files = walkSync(cwd);
51
+ for (const file of files) {
52
+ let content: string;
53
+ try {
54
+ content = fs.readFileSync(file, 'utf8');
55
+ } catch {
56
+ continue;
57
+ }
58
+ for (const [provider, pattern] of Object.entries(PROVIDER_PATTERNS) as [Provider, RegExp][]) {
59
+ pattern.lastIndex = 0;
60
+ if (pattern.test(content)) counts[provider]++;
61
+ }
62
+ }
63
+ return counts;
64
+ }
65
+
66
+ /**
67
+ * Returns the provider with the highest usage count, or null if all zero.
68
+ */
69
+ export function dominantProvider(counts: ProviderCounts): Provider | null {
70
+ const entries = Object.entries(counts) as [Provider, number][];
71
+ const max = Math.max(...entries.map(([, v]) => v));
72
+ if (max === 0) return null;
73
+ return entries.find(([, v]) => v === max)![0];
74
+ }
@@ -49,7 +49,7 @@ export async function runReviewPhase(input: ReviewPhaseInput): Promise<ReviewPha
49
49
  const output = await input.engine.review({
50
50
  content: chunk.content,
51
51
  kind: chunk.kind,
52
- context: { stack: input.config.stack },
52
+ context: { stack: input.config.stack, cwd: input.cwd },
53
53
  });
54
54
  allFindings.push(...output.findings);
55
55
  if (output.usage) {