@delegance/claude-autopilot 1.0.0-alpha.5 → 1.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/package.json +5 -2
- package/scripts/autoregress.ts +268 -0
- package/src/snapshots/impact-selector.ts +60 -0
- package/src/snapshots/import-scanner.ts +44 -0
- package/src/snapshots/serializer.ts +24 -0
- package/tests/snapshots/baselines/.gitkeep +0 -0
- package/tests/snapshots/import-map.json +1 -0
- package/tests/snapshots/index.json +1 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,18 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 1.0.0-alpha.6
|
|
4
|
+
|
|
5
|
+
### Added
|
|
6
|
+
|
|
7
|
+
- **Auto-regression testing** (`scripts/autoregress.ts generate|run|update`) — autoresearch-inspired snapshot tests for changed source modules
|
|
8
|
+
- **Impact-aware selection** — only fires snapshots whose source modules (or one-hop importers) were touched; high-impact paths (`src/core/pipeline/**`, `src/adapters/**`, `src/core/findings/**`, `src/core/config/**`) and >10-file changes trigger full run
|
|
9
|
+
- **Snapshot serializer** (`src/snapshots/serializer.ts`) — deterministic JSON normalization: sorted keys, `<timestamp>`, `<uuid>`, path stripping
|
|
10
|
+
- **Import scanner** (`src/snapshots/import-scanner.ts`) — static `import`/`export` graph → reverse dependency map
|
|
11
|
+
- **Impact selector** (`src/snapshots/impact-selector.ts`) — merge-base diff + one-hop expansion + overrides
|
|
12
|
+
- **Baseline capture** — `CAPTURE_BASELINE=1` env flag; `autoregress update` rewrites baselines after intentional changes
|
|
13
|
+
- **Staleness detection** — warns and skips snapshots whose `@snapshot-for` source file no longer exists
|
|
14
|
+
- 10 new unit tests (AR1-AR10) for serializer, import scanner, and impact selector
|
|
15
|
+
|
|
3
16
|
## 1.0.0-alpha.5 (2026-04-21)
|
|
4
17
|
|
|
5
18
|
### New Features
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@delegance/claude-autopilot",
|
|
3
|
-
"version": "1.0.0-alpha.
|
|
3
|
+
"version": "1.0.0-alpha.6",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Claude Code automation pipeline: spec → plan → implement → validate → PR",
|
|
6
6
|
"keywords": ["claude", "autopilot", "ai", "pipeline", "code-review", "cli"],
|
|
@@ -20,12 +20,15 @@
|
|
|
20
20
|
"src/",
|
|
21
21
|
"presets/",
|
|
22
22
|
"scripts/test-runner.mjs",
|
|
23
|
+
"scripts/autoregress.ts",
|
|
24
|
+
"tests/snapshots/",
|
|
23
25
|
"CHANGELOG.md"
|
|
24
26
|
],
|
|
25
27
|
"scripts": {
|
|
26
28
|
"test": "node scripts/test-runner.mjs",
|
|
27
29
|
"typecheck": "tsc --noEmit",
|
|
28
|
-
"build": "tsc"
|
|
30
|
+
"build": "tsc",
|
|
31
|
+
"autoregress": "tsx scripts/autoregress.ts"
|
|
29
32
|
},
|
|
30
33
|
"devDependencies": {
|
|
31
34
|
"@types/js-yaml": "^4",
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// scripts/autoregress.ts
|
|
3
|
+
import * as fs from 'node:fs';
|
|
4
|
+
import * as path from 'node:path';
|
|
5
|
+
import { execSync, spawnSync } from 'node:child_process';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { selectSnapshots } from '../src/snapshots/impact-selector.ts';
|
|
8
|
+
import OpenAI from 'openai';
|
|
9
|
+
import { buildImportMap } from '../src/snapshots/import-scanner.ts';
|
|
10
|
+
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const ROOT = path.resolve(__dirname, '..');
|
|
13
|
+
const SNAPSHOTS_DIR = path.join(ROOT, 'tests', 'snapshots');
|
|
14
|
+
const INDEX_PATH = path.join(SNAPSHOTS_DIR, 'index.json');
|
|
15
|
+
const IMPORT_MAP_PATH = path.join(SNAPSHOTS_DIR, 'import-map.json');
|
|
16
|
+
const BASELINES_DIR = path.join(SNAPSHOTS_DIR, 'baselines');
|
|
17
|
+
|
|
18
|
+
function loadJson<T>(p: string, fallback: T): T {
|
|
19
|
+
try { return JSON.parse(fs.readFileSync(p, 'utf8')) as T; } catch { return fallback; }
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function getChangedFiles(since?: string): string[] | null {
|
|
23
|
+
try {
|
|
24
|
+
const base = since
|
|
25
|
+
? since
|
|
26
|
+
: execSync('git merge-base origin/main HEAD', { cwd: ROOT }).toString().trim();
|
|
27
|
+
const out = execSync(`git diff ${base} HEAD --name-only`, { cwd: ROOT }).toString();
|
|
28
|
+
return out.trim().split('\n').filter(Boolean);
|
|
29
|
+
} catch { return null; }
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function allSnapFiles(): string[] {
|
|
33
|
+
if (!fs.existsSync(SNAPSHOTS_DIR)) return [];
|
|
34
|
+
return fs.readdirSync(SNAPSHOTS_DIR)
|
|
35
|
+
.filter(f => f.endsWith('.snap.ts'))
|
|
36
|
+
.map(f => path.join('tests', 'snapshots', f));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function runSnapshot(snapFile: string, capture: boolean): 'pass' | 'fail' | 'baseline-missing' | 'stale' {
|
|
40
|
+
const absSnap = path.join(ROOT, snapFile);
|
|
41
|
+
const content = fs.readFileSync(absSnap, 'utf8');
|
|
42
|
+
const forMatch = content.match(/@snapshot-for:\s*(.+)/);
|
|
43
|
+
if (forMatch) {
|
|
44
|
+
const src = forMatch[1]!.trim();
|
|
45
|
+
if (!fs.existsSync(path.join(ROOT, src))) {
|
|
46
|
+
console.warn(` [warn] stale — source gone: ${src}`);
|
|
47
|
+
return 'stale';
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const slug = path.basename(snapFile, '.snap.ts');
|
|
52
|
+
const baselinePath = path.join(BASELINES_DIR, `${slug}.json`);
|
|
53
|
+
if (!capture && !fs.existsSync(baselinePath)) {
|
|
54
|
+
console.error(` [fail] baseline missing: ${baselinePath}`);
|
|
55
|
+
return 'baseline-missing';
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const env = { ...process.env };
|
|
59
|
+
if (capture) env.CAPTURE_BASELINE = '1';
|
|
60
|
+
else delete env.CAPTURE_BASELINE;
|
|
61
|
+
|
|
62
|
+
const result = spawnSync('node', ['--test', '--import', 'tsx', absSnap], {
|
|
63
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
64
|
+
cwd: ROOT,
|
|
65
|
+
env,
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
if (result.status === 0) return 'pass';
|
|
69
|
+
if (capture) return 'pass';
|
|
70
|
+
console.error(` ${(result.stderr?.toString() ?? '') || (result.stdout?.toString() ?? '')}`);
|
|
71
|
+
return 'fail';
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function cmdRun(args: string[]): number {
|
|
75
|
+
const runAll = args.includes('--all');
|
|
76
|
+
const sinceIdx = args.indexOf('--since');
|
|
77
|
+
const since = sinceIdx >= 0 ? args[sinceIdx + 1] : undefined;
|
|
78
|
+
const index = loadJson<Record<string, string[]>>(INDEX_PATH, {});
|
|
79
|
+
const importMap = loadJson<Record<string, string[]>>(IMPORT_MAP_PATH, {});
|
|
80
|
+
const snapFiles = allSnapFiles();
|
|
81
|
+
|
|
82
|
+
let selected: string[];
|
|
83
|
+
if (runAll || snapFiles.length === 0) {
|
|
84
|
+
selected = snapFiles;
|
|
85
|
+
console.log(`[autoregress run] --all: running ${snapFiles.length} snapshot(s)`);
|
|
86
|
+
} else {
|
|
87
|
+
const changed = getChangedFiles(since);
|
|
88
|
+
if (!changed) {
|
|
89
|
+
console.warn('[autoregress run] merge-base resolution failed — running all');
|
|
90
|
+
selected = snapFiles;
|
|
91
|
+
} else {
|
|
92
|
+
const r = selectSnapshots(changed, snapFiles, index, importMap);
|
|
93
|
+
selected = r.selected;
|
|
94
|
+
console.log(`[autoregress run] ${r.reason} (${selected.length}/${snapFiles.length})`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (selected.length === 0) {
|
|
99
|
+
console.log('[autoregress run] no snapshots to run — pass');
|
|
100
|
+
return 0;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
let passed = 0, failed = 0, missing = 0, stale = 0;
|
|
104
|
+
for (const snap of selected) {
|
|
105
|
+
process.stdout.write(` ${snap} ... `);
|
|
106
|
+
const v = runSnapshot(snap, false);
|
|
107
|
+
if (v === 'pass') { passed++; console.log('pass'); }
|
|
108
|
+
else if (v === 'fail') { failed++; console.log('FAIL'); }
|
|
109
|
+
else if (v === 'baseline-missing') { missing++; console.log('BASELINE MISSING'); }
|
|
110
|
+
else { stale++; console.log('stale (skipped)'); }
|
|
111
|
+
}
|
|
112
|
+
console.log(`\n ${passed} passed ${failed} failed ${missing} baseline-missing ${stale} stale`);
|
|
113
|
+
return failed > 0 || missing > 0 ? 1 : 0;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
function cmdUpdate(args: string[]): number {
|
|
117
|
+
const snapIdx = args.indexOf('--snapshot');
|
|
118
|
+
const slug = snapIdx >= 0 ? args[snapIdx + 1] : undefined;
|
|
119
|
+
const snapFiles = slug
|
|
120
|
+
? [path.join('tests', 'snapshots', `${slug}.snap.ts`)]
|
|
121
|
+
: allSnapFiles();
|
|
122
|
+
console.log(`[autoregress update] rewriting ${snapFiles.length} baseline(s)`);
|
|
123
|
+
let failed = 0;
|
|
124
|
+
for (const snap of snapFiles) {
|
|
125
|
+
const absSnap = path.join(ROOT, snap);
|
|
126
|
+
if (!fs.existsSync(absSnap)) {
|
|
127
|
+
console.error(` [error] snapshot file not found: ${snap}`);
|
|
128
|
+
failed++;
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
process.stdout.write(` ${snap} ... `);
|
|
132
|
+
runSnapshot(snap, true);
|
|
133
|
+
console.log('updated');
|
|
134
|
+
}
|
|
135
|
+
return failed > 0 ? 1 : 0;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const GENERATOR_VERSION = '1.0.0-alpha.6';
|
|
139
|
+
|
|
140
|
+
const GENERATE_PROMPT = `You are generating a behavioral snapshot test for a TypeScript module.
|
|
141
|
+
|
|
142
|
+
Module path: {filePath}
|
|
143
|
+
Module contents:
|
|
144
|
+
{fileContents}
|
|
145
|
+
|
|
146
|
+
Write a snapshot test file. Requirements:
|
|
147
|
+
1. Header comments at top:
|
|
148
|
+
// @snapshot-for: {filePath}
|
|
149
|
+
// @generated-at: {generatedAt}
|
|
150
|
+
// @source-commit: {sourceCommit}
|
|
151
|
+
// @generator-version: {version}
|
|
152
|
+
2. Import the module's exported functions under test
|
|
153
|
+
3. Import { normalizeSnapshot } from '../../src/snapshots/serializer.ts'
|
|
154
|
+
4. Import fs from 'node:fs', describe/it from 'node:test', assert from 'node:assert/strict'
|
|
155
|
+
5. Baseline loading pattern (use slug {slug}):
|
|
156
|
+
const SLUG = '{slug}';
|
|
157
|
+
import { fileURLToPath } from 'node:url';
|
|
158
|
+
const baselineRaw = process.env.CAPTURE_BASELINE === '1' ? '{}' : fs.readFileSync(fileURLToPath(new URL('./baselines/{slug}.json', import.meta.url)), 'utf8');
|
|
159
|
+
const baseline = JSON.parse(baselineRaw);
|
|
160
|
+
const captured: Record<string, unknown> = {};
|
|
161
|
+
process.on('exit', () => {
|
|
162
|
+
if (process.env.CAPTURE_BASELINE === '1') {
|
|
163
|
+
const p = fileURLToPath(new URL('./baselines/{slug}.json', import.meta.url));
|
|
164
|
+
fs.writeFileSync(p, JSON.stringify(captured, null, 2), 'utf8');
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
6. In each test: if (process.env.CAPTURE_BASELINE === '1') { captured['test-name'] = result; return; }
|
|
168
|
+
Else: assert.equal(normalizeSnapshot(result), normalizeSnapshot(baseline['test-name']));
|
|
169
|
+
7. Write 2-4 it() tests covering representative behaviors
|
|
170
|
+
8. Output ONLY the TypeScript file contents, no markdown fences, no explanation`;
|
|
171
|
+
|
|
172
|
+
async function cmdGenerate(args: string[]): Promise<number> {
|
|
173
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
174
|
+
if (!apiKey) { console.error('[autoregress generate] OPENAI_API_KEY not set'); return 1; }
|
|
175
|
+
|
|
176
|
+
const sinceIdx = args.indexOf('--since');
|
|
177
|
+
const since = sinceIdx >= 0 ? args[sinceIdx + 1] : undefined;
|
|
178
|
+
const changed = getChangedFiles(since);
|
|
179
|
+
if (!changed) { console.error('[autoregress generate] could not determine changed files'); return 1; }
|
|
180
|
+
|
|
181
|
+
const srcFiles = changed.filter(f => f.startsWith('src/') && f.endsWith('.ts'));
|
|
182
|
+
if (srcFiles.length === 0) {
|
|
183
|
+
console.log('[autoregress generate] no src/*.ts files changed — nothing to generate');
|
|
184
|
+
return 0;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
console.log(`[autoregress generate] generating snapshots for ${srcFiles.length} file(s)`);
|
|
188
|
+
|
|
189
|
+
const client = new OpenAI({ apiKey });
|
|
190
|
+
let sourceCommit = 'unknown';
|
|
191
|
+
try { sourceCommit = execSync('git rev-parse --short HEAD', { cwd: ROOT }).toString().trim(); } catch {}
|
|
192
|
+
const generatedAt = new Date().toISOString();
|
|
193
|
+
|
|
194
|
+
for (const srcFile of srcFiles) {
|
|
195
|
+
const absFile = path.join(ROOT, srcFile);
|
|
196
|
+
if (!fs.existsSync(absFile)) { console.warn(` skip (not found): ${srcFile}`); continue; }
|
|
197
|
+
|
|
198
|
+
const fileContents = fs.readFileSync(absFile, 'utf8');
|
|
199
|
+
const slug = srcFile.replace(/[/\\]/g, '-').replace(/\.ts$/, '');
|
|
200
|
+
|
|
201
|
+
process.stdout.write(` ${srcFile} → ${slug}.snap.ts ... `);
|
|
202
|
+
|
|
203
|
+
const prompt = GENERATE_PROMPT
|
|
204
|
+
.replace(/{filePath}/g, srcFile)
|
|
205
|
+
.replace(/{fileContents}/g, fileContents)
|
|
206
|
+
.replace(/{slug}/g, slug)
|
|
207
|
+
.replace(/{version}/g, GENERATOR_VERSION)
|
|
208
|
+
.replace(/{generatedAt}/g, generatedAt)
|
|
209
|
+
.replace(/{sourceCommit}/g, sourceCommit);
|
|
210
|
+
|
|
211
|
+
let snapContent: string;
|
|
212
|
+
try {
|
|
213
|
+
const response = await client.responses.create({
|
|
214
|
+
model: process.env.CODEX_MODEL ?? 'gpt-5.3-codex',
|
|
215
|
+
instructions: 'You write TypeScript snapshot tests. Output ONLY the file contents, no markdown fences.',
|
|
216
|
+
input: prompt,
|
|
217
|
+
max_output_tokens: 2000,
|
|
218
|
+
});
|
|
219
|
+
snapContent = (response.output_text ?? '').replace(/^```typescript\n?/m, '').replace(/```$/m, '').trim();
|
|
220
|
+
} catch (err) {
|
|
221
|
+
console.error(`LLM error: ${err instanceof Error ? err.message : String(err)}`);
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const snapPath = path.join(SNAPSHOTS_DIR, `${slug}.snap.ts`);
|
|
226
|
+
fs.writeFileSync(snapPath, snapContent + '\n', 'utf8');
|
|
227
|
+
|
|
228
|
+
const captureResult = spawnSync('node', ['--test', '--import', 'tsx', snapPath], {
|
|
229
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
230
|
+
cwd: ROOT,
|
|
231
|
+
env: { ...process.env, CAPTURE_BASELINE: '1' },
|
|
232
|
+
});
|
|
233
|
+
const baselinePath = path.join(BASELINES_DIR, `${slug}.json`);
|
|
234
|
+
console.log(fs.existsSync(baselinePath) ? 'generated + baseline captured' :
|
|
235
|
+
`generated (capture failed: ${captureResult.stderr?.toString().slice(0, 60)})`);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Rebuild index.json from @snapshot-for headers
|
|
239
|
+
const newIndex: Record<string, string[]> = {};
|
|
240
|
+
for (const f of fs.readdirSync(SNAPSHOTS_DIR).filter(x => x.endsWith('.snap.ts'))) {
|
|
241
|
+
const snapRelPath = path.join('tests', 'snapshots', f);
|
|
242
|
+
const content = fs.readFileSync(path.join(SNAPSHOTS_DIR, f), 'utf8');
|
|
243
|
+
const sources = [...content.matchAll(/@snapshot-for:\s*(.+)/g)].map(m => m[1]!.trim());
|
|
244
|
+
if (sources.length) newIndex[snapRelPath] = sources;
|
|
245
|
+
}
|
|
246
|
+
fs.writeFileSync(INDEX_PATH, JSON.stringify(newIndex, null, 2) + '\n', 'utf8');
|
|
247
|
+
|
|
248
|
+
// Rebuild import-map.json — prefix keys/values with 'src/' to match repo-relative git diff paths
|
|
249
|
+
const rawImportMap = buildImportMap(path.join(ROOT, 'src'));
|
|
250
|
+
const newImportMap: Record<string, string[]> = {};
|
|
251
|
+
for (const [dep, importers] of Object.entries(rawImportMap)) {
|
|
252
|
+
newImportMap[`src/${dep}`] = importers.map(i => `src/${i}`);
|
|
253
|
+
}
|
|
254
|
+
fs.writeFileSync(IMPORT_MAP_PATH, JSON.stringify(newImportMap, null, 2) + '\n', 'utf8');
|
|
255
|
+
|
|
256
|
+
console.log('\n[autoregress generate] index.json + import-map.json rebuilt');
|
|
257
|
+
return 0;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
const [,, subcmd, ...rest] = process.argv;
|
|
261
|
+
switch (subcmd) {
|
|
262
|
+
case 'run': process.exit(cmdRun(rest)); break;
|
|
263
|
+
case 'update': process.exit(cmdUpdate(rest)); break;
|
|
264
|
+
case 'generate': process.exit(await cmdGenerate(rest)); break;
|
|
265
|
+
default:
|
|
266
|
+
console.error(`[autoregress] unknown subcommand: ${subcmd ?? '(none)'}`);
|
|
267
|
+
process.exit(1);
|
|
268
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
const HIGH_IMPACT_PATTERNS = [
|
|
2
|
+
/^src\/core\/pipeline\//,
|
|
3
|
+
/^src\/adapters\//,
|
|
4
|
+
/^src\/core\/findings\//,
|
|
5
|
+
/^src\/core\/config\//,
|
|
6
|
+
];
|
|
7
|
+
|
|
8
|
+
export interface SelectResult {
|
|
9
|
+
selected: string[];
|
|
10
|
+
fullRun: boolean;
|
|
11
|
+
reason: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function selectSnapshots(
|
|
15
|
+
changedFiles: string[],
|
|
16
|
+
allSnapshotFiles: string[],
|
|
17
|
+
index: Record<string, string[]>,
|
|
18
|
+
importMap: Record<string, string[]>,
|
|
19
|
+
options: { highImpactPatterns?: RegExp[]; volumeThreshold?: number } = {},
|
|
20
|
+
): SelectResult {
|
|
21
|
+
const patterns = options.highImpactPatterns ?? HIGH_IMPACT_PATTERNS;
|
|
22
|
+
const volumeThreshold = options.volumeThreshold ?? 10;
|
|
23
|
+
|
|
24
|
+
if (changedFiles.length > volumeThreshold) {
|
|
25
|
+
return { selected: allSnapshotFiles, fullRun: true, reason: 'volume override (>10 files changed)' };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
for (const f of changedFiles) {
|
|
29
|
+
for (const p of patterns) {
|
|
30
|
+
if (p.test(f)) {
|
|
31
|
+
return { selected: allSnapshotFiles, fullRun: true, reason: `high-impact path matched: ${f}` };
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Build: sourceFile → snapFiles that cover it
|
|
37
|
+
const sourceToSnaps: Record<string, string[]> = {};
|
|
38
|
+
for (const [snapFile, sources] of Object.entries(index)) {
|
|
39
|
+
for (const src of sources) {
|
|
40
|
+
if (!sourceToSnaps[src]) sourceToSnaps[src] = [];
|
|
41
|
+
sourceToSnaps[src]!.push(snapFile);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const selected = new Set<string>();
|
|
46
|
+
for (const changed of changedFiles) {
|
|
47
|
+
for (const snap of sourceToSnaps[changed] ?? []) selected.add(snap);
|
|
48
|
+
for (const importer of importMap[changed] ?? []) {
|
|
49
|
+
for (const snap of sourceToSnaps[importer] ?? []) selected.add(snap);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
selected: [...selected],
|
|
55
|
+
fullRun: false,
|
|
56
|
+
reason: selected.size === 0
|
|
57
|
+
? 'no snapshots matched changed files'
|
|
58
|
+
: `${selected.size} snapshot(s) selected`,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import * as fs from 'node:fs';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
|
|
4
|
+
const IMPORT_RE = /^(?:import|export)\s+(?:.*?from\s+)?['"]([^'"]+)['"]/gm;
|
|
5
|
+
|
|
6
|
+
function allTsFiles(dir: string): string[] {
|
|
7
|
+
const results: string[] = [];
|
|
8
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
9
|
+
const full = path.join(dir, entry.name);
|
|
10
|
+
if (entry.isDirectory()) results.push(...allTsFiles(full));
|
|
11
|
+
else if (entry.isFile() && entry.name.endsWith('.ts')) results.push(full);
|
|
12
|
+
}
|
|
13
|
+
return results;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function resolveImport(importer: string, specifier: string, srcDir: string): string | null {
|
|
17
|
+
if (!specifier.startsWith('.')) return null;
|
|
18
|
+
const abs = path.resolve(path.dirname(importer), specifier);
|
|
19
|
+
const withExt = abs.endsWith('.ts') ? abs : abs + '.ts';
|
|
20
|
+
const rel = path.relative(srcDir, withExt).replace(/\\/g, '/');
|
|
21
|
+
if (rel.startsWith('..')) return null;
|
|
22
|
+
return rel;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function buildImportMap(srcDir: string): Record<string, string[]> {
|
|
26
|
+
const absDir = path.resolve(srcDir);
|
|
27
|
+
const files = allTsFiles(absDir);
|
|
28
|
+
const map: Record<string, string[]> = {};
|
|
29
|
+
|
|
30
|
+
for (const file of files) {
|
|
31
|
+
const relImporter = path.relative(absDir, file).replace(/\\/g, '/');
|
|
32
|
+
const content = fs.readFileSync(file, 'utf8');
|
|
33
|
+
let m: RegExpExecArray | null;
|
|
34
|
+
IMPORT_RE.lastIndex = 0;
|
|
35
|
+
while ((m = IMPORT_RE.exec(content)) !== null) {
|
|
36
|
+
const resolved = resolveImport(file, m[1]!, absDir);
|
|
37
|
+
if (!resolved) continue;
|
|
38
|
+
if (!map[resolved]) map[resolved] = [];
|
|
39
|
+
if (!map[resolved]!.includes(relImporter)) map[resolved]!.push(relImporter);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return map;
|
|
44
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
const ISO_TS_RE = /^\d{4}-\d{2}-\d{2}T/;
|
|
2
|
+
const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-/i;
|
|
3
|
+
|
|
4
|
+
function normalizeValue(value: unknown, cwd?: string): unknown {
|
|
5
|
+
if (typeof value === 'string') {
|
|
6
|
+
if (ISO_TS_RE.test(value)) return '<timestamp>';
|
|
7
|
+
if (UUID_RE.test(value)) return '<uuid>';
|
|
8
|
+
if (cwd && value.startsWith(cwd + '/')) return value.slice(cwd.length + 1);
|
|
9
|
+
return value;
|
|
10
|
+
}
|
|
11
|
+
if (Array.isArray(value)) return value.map(v => normalizeValue(v, cwd));
|
|
12
|
+
if (value !== null && typeof value === 'object') {
|
|
13
|
+
const sorted: Record<string, unknown> = {};
|
|
14
|
+
for (const key of Object.keys(value as object).sort()) {
|
|
15
|
+
sorted[key] = normalizeValue((value as Record<string, unknown>)[key], cwd);
|
|
16
|
+
}
|
|
17
|
+
return sorted;
|
|
18
|
+
}
|
|
19
|
+
return value;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function normalizeSnapshot(value: unknown, cwd?: string): string {
|
|
23
|
+
return JSON.stringify(normalizeValue(value, cwd), null, 2);
|
|
24
|
+
}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{}
|