@mutineerjs/mutineer 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -7
- package/dist/core/__tests__/module.spec.js +66 -3
- package/dist/core/__tests__/sfc.spec.d.ts +1 -0
- package/dist/core/__tests__/sfc.spec.js +76 -0
- package/dist/core/__tests__/variant-utils.spec.d.ts +1 -0
- package/dist/core/__tests__/variant-utils.spec.js +93 -0
- package/dist/runner/__tests__/args.spec.d.ts +1 -0
- package/dist/runner/__tests__/args.spec.js +225 -0
- package/dist/runner/__tests__/cache.spec.d.ts +1 -0
- package/dist/runner/__tests__/cache.spec.js +180 -0
- package/dist/runner/__tests__/changed.spec.d.ts +1 -0
- package/dist/runner/__tests__/changed.spec.js +227 -0
- package/dist/runner/__tests__/cleanup.spec.d.ts +1 -0
- package/dist/runner/__tests__/cleanup.spec.js +41 -0
- package/dist/runner/__tests__/config.spec.d.ts +1 -0
- package/dist/runner/__tests__/config.spec.js +71 -0
- package/dist/runner/__tests__/coverage-resolver.spec.d.ts +1 -0
- package/dist/runner/__tests__/coverage-resolver.spec.js +171 -0
- package/dist/runner/__tests__/pool-executor.spec.d.ts +1 -0
- package/dist/runner/__tests__/pool-executor.spec.js +213 -0
- package/dist/runner/__tests__/tasks.spec.d.ts +1 -0
- package/dist/runner/__tests__/tasks.spec.js +95 -0
- package/dist/runner/__tests__/variants.spec.d.ts +1 -0
- package/dist/runner/__tests__/variants.spec.js +259 -0
- package/dist/runner/args.d.ts +5 -0
- package/dist/runner/args.js +7 -0
- package/dist/runner/config.js +2 -2
- package/dist/runner/coverage-resolver.d.ts +21 -0
- package/dist/runner/coverage-resolver.js +96 -0
- package/dist/runner/jest/__tests__/pool.spec.d.ts +1 -0
- package/dist/runner/jest/__tests__/pool.spec.js +212 -0
- package/dist/runner/jest/__tests__/worker-runtime.spec.d.ts +1 -0
- package/dist/runner/jest/__tests__/worker-runtime.spec.js +148 -0
- package/dist/runner/orchestrator.js +43 -295
- package/dist/runner/pool-executor.d.ts +17 -0
- package/dist/runner/pool-executor.js +143 -0
- package/dist/runner/shared/__tests__/mutant-paths.spec.d.ts +1 -0
- package/dist/runner/shared/__tests__/mutant-paths.spec.js +66 -0
- package/dist/runner/shared/__tests__/redirect-state.spec.d.ts +1 -0
- package/dist/runner/shared/__tests__/redirect-state.spec.js +56 -0
- package/dist/runner/tasks.d.ts +12 -0
- package/dist/runner/tasks.js +25 -0
- package/dist/runner/variants.d.ts +17 -2
- package/dist/runner/variants.js +33 -0
- package/dist/runner/vitest/__tests__/redirect-loader.spec.js +4 -0
- package/dist/utils/__tests__/logger.spec.d.ts +1 -0
- package/dist/utils/__tests__/logger.spec.js +61 -0
- package/dist/utils/__tests__/normalizePath.spec.d.ts +1 -0
- package/dist/utils/__tests__/normalizePath.spec.js +22 -0
- package/package.json +3 -1
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import { clearCacheOnStart, saveCacheAtomic, decodeCacheKey, keyForTests, hash, readMutantCache, } from '../cache.js';
|
|
6
|
+
let tmpDir;
|
|
7
|
+
beforeEach(async () => {
|
|
8
|
+
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mutineer-cache-'));
|
|
9
|
+
});
|
|
10
|
+
afterEach(async () => {
|
|
11
|
+
await fs.rm(tmpDir, { recursive: true, force: true });
|
|
12
|
+
});
|
|
13
|
+
describe('clearCacheOnStart', () => {
|
|
14
|
+
it('removes the cache file if it exists', async () => {
|
|
15
|
+
const cacheFile = path.join(tmpDir, '.mutate-cache.json');
|
|
16
|
+
await fs.writeFile(cacheFile, '{}');
|
|
17
|
+
await clearCacheOnStart(tmpDir);
|
|
18
|
+
await expect(fs.access(cacheFile)).rejects.toThrow();
|
|
19
|
+
});
|
|
20
|
+
it('does not throw if cache file does not exist', async () => {
|
|
21
|
+
await expect(clearCacheOnStart(tmpDir)).resolves.toBeUndefined();
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
describe('saveCacheAtomic', () => {
|
|
25
|
+
it('writes cache data to the file', async () => {
|
|
26
|
+
const cache = {
|
|
27
|
+
key1: {
|
|
28
|
+
status: 'killed',
|
|
29
|
+
file: 'foo.ts',
|
|
30
|
+
line: 1,
|
|
31
|
+
col: 0,
|
|
32
|
+
mutator: 'flipEQ',
|
|
33
|
+
},
|
|
34
|
+
};
|
|
35
|
+
await saveCacheAtomic(tmpDir, cache);
|
|
36
|
+
const content = await fs.readFile(path.join(tmpDir, '.mutate-cache.json'), 'utf8');
|
|
37
|
+
expect(JSON.parse(content)).toEqual(cache);
|
|
38
|
+
});
|
|
39
|
+
it('overwrites existing cache', async () => {
|
|
40
|
+
await saveCacheAtomic(tmpDir, { old: {} });
|
|
41
|
+
const newCache = {
|
|
42
|
+
new: {
|
|
43
|
+
status: 'escaped',
|
|
44
|
+
file: 'bar.ts',
|
|
45
|
+
line: 2,
|
|
46
|
+
col: 3,
|
|
47
|
+
mutator: 'andToOr',
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
await saveCacheAtomic(tmpDir, newCache);
|
|
51
|
+
const content = await fs.readFile(path.join(tmpDir, '.mutate-cache.json'), 'utf8');
|
|
52
|
+
expect(JSON.parse(content)).toEqual(newCache);
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
describe('decodeCacheKey', () => {
|
|
56
|
+
it('decodes a full cache key', () => {
|
|
57
|
+
const key = 'testsig:codesig:src/foo.ts:10,5:flipEQ';
|
|
58
|
+
const decoded = decodeCacheKey(key);
|
|
59
|
+
expect(decoded.file).toBe('src/foo.ts');
|
|
60
|
+
expect(decoded.line).toBe(10);
|
|
61
|
+
expect(decoded.col).toBe(5);
|
|
62
|
+
expect(decoded.mutator).toBe('flipEQ');
|
|
63
|
+
});
|
|
64
|
+
it('handles key with no colons', () => {
|
|
65
|
+
const decoded = decodeCacheKey('nodelimiters');
|
|
66
|
+
expect(decoded.mutator).toBe('unknown');
|
|
67
|
+
});
|
|
68
|
+
it('handles key with only one colon', () => {
|
|
69
|
+
const decoded = decodeCacheKey('only:one');
|
|
70
|
+
expect(decoded.mutator).toBe('one');
|
|
71
|
+
});
|
|
72
|
+
it('handles malformed position', () => {
|
|
73
|
+
const key = 'a:b:file:badpos:mutator';
|
|
74
|
+
const decoded = decodeCacheKey(key);
|
|
75
|
+
expect(decoded.mutator).toBe('mutator');
|
|
76
|
+
});
|
|
77
|
+
it('handles key with exactly two colons (no firstColon in rest)', () => {
|
|
78
|
+
// Key: "pos:mutator" after splitting last colon → rest = "pos", no colon in rest
|
|
79
|
+
// Full key: "10,5:mutator" → lastColon splits mutator, positionColon splits pos
|
|
80
|
+
// rest becomes empty string before positionColon
|
|
81
|
+
const decoded = decodeCacheKey('10,5:mutator');
|
|
82
|
+
expect(decoded.mutator).toBe('mutator');
|
|
83
|
+
expect(decoded.line).toBe(0); // positionColon = -1, returns early
|
|
84
|
+
});
|
|
85
|
+
it('handles key with exactly three colons (firstColon but no secondColon)', () => {
|
|
86
|
+
// "sig:10,5:mutator" → mutator='mutator', posRaw='10,5', rest='sig'
|
|
87
|
+
// firstColon in 'sig' = -1, returns early at line 70
|
|
88
|
+
const decoded = decodeCacheKey('sig:10,5:mutator');
|
|
89
|
+
expect(decoded.mutator).toBe('mutator');
|
|
90
|
+
expect(decoded.line).toBe(10);
|
|
91
|
+
expect(decoded.col).toBe(5);
|
|
92
|
+
});
|
|
93
|
+
it('handles key with four colons (firstColon but no secondColon in restAfterFirst)', () => {
|
|
94
|
+
// "tsig:csig:10,5:mutator" → mutator='mutator', posRaw='10,5', rest='tsig:csig'
|
|
95
|
+
// firstColon=4, restAfterFirst='csig', secondColon in 'csig' = -1, returns at line 73
|
|
96
|
+
const decoded = decodeCacheKey('tsig:csig:10,5:mutator');
|
|
97
|
+
expect(decoded.mutator).toBe('mutator');
|
|
98
|
+
expect(decoded.line).toBe(10);
|
|
99
|
+
expect(decoded.col).toBe(5);
|
|
100
|
+
});
|
|
101
|
+
});
|
|
102
|
+
describe('keyForTests', () => {
|
|
103
|
+
it('produces deterministic keys regardless of input order', () => {
|
|
104
|
+
const key1 = keyForTests(['b.test.ts', 'a.test.ts']);
|
|
105
|
+
const key2 = keyForTests(['a.test.ts', 'b.test.ts']);
|
|
106
|
+
expect(key1).toBe(key2);
|
|
107
|
+
});
|
|
108
|
+
it('produces different keys for different test sets', () => {
|
|
109
|
+
const key1 = keyForTests(['a.test.ts']);
|
|
110
|
+
const key2 = keyForTests(['b.test.ts']);
|
|
111
|
+
expect(key1).not.toBe(key2);
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
describe('hash', () => {
|
|
115
|
+
it('returns a 12 character hex string', () => {
|
|
116
|
+
const h = hash('test');
|
|
117
|
+
expect(h).toMatch(/^[0-9a-f]{12}$/);
|
|
118
|
+
});
|
|
119
|
+
it('returns the same hash for the same input', () => {
|
|
120
|
+
expect(hash('hello')).toBe(hash('hello'));
|
|
121
|
+
});
|
|
122
|
+
it('returns different hashes for different inputs', () => {
|
|
123
|
+
expect(hash('a')).not.toBe(hash('b'));
|
|
124
|
+
});
|
|
125
|
+
});
|
|
126
|
+
describe('readMutantCache', () => {
|
|
127
|
+
it('returns empty object when no cache file exists', async () => {
|
|
128
|
+
const result = await readMutantCache(tmpDir);
|
|
129
|
+
expect(result).toEqual({});
|
|
130
|
+
});
|
|
131
|
+
it('reads and normalizes object-format cache entries', async () => {
|
|
132
|
+
const cache = {
|
|
133
|
+
'testsig:codesig:file.ts:1,0:flip': {
|
|
134
|
+
status: 'killed',
|
|
135
|
+
file: 'file.ts',
|
|
136
|
+
line: 1,
|
|
137
|
+
col: 0,
|
|
138
|
+
mutator: 'flip',
|
|
139
|
+
},
|
|
140
|
+
};
|
|
141
|
+
await fs.writeFile(path.join(tmpDir, '.mutate-cache.json'), JSON.stringify(cache));
|
|
142
|
+
const result = await readMutantCache(tmpDir);
|
|
143
|
+
expect(result['testsig:codesig:file.ts:1,0:flip']).toEqual({
|
|
144
|
+
status: 'killed',
|
|
145
|
+
file: 'file.ts',
|
|
146
|
+
line: 1,
|
|
147
|
+
col: 0,
|
|
148
|
+
mutator: 'flip',
|
|
149
|
+
});
|
|
150
|
+
});
|
|
151
|
+
it('reads and normalizes old string-format cache entries', async () => {
|
|
152
|
+
const cache = {
|
|
153
|
+
'testsig:codesig:file.ts:1,0:flip': 'killed',
|
|
154
|
+
};
|
|
155
|
+
await fs.writeFile(path.join(tmpDir, '.mutate-cache.json'), JSON.stringify(cache));
|
|
156
|
+
const result = await readMutantCache(tmpDir);
|
|
157
|
+
const entry = result['testsig:codesig:file.ts:1,0:flip'];
|
|
158
|
+
expect(entry.status).toBe('killed');
|
|
159
|
+
expect(entry.mutator).toBe('flip');
|
|
160
|
+
});
|
|
161
|
+
it('returns empty object for invalid JSON', async () => {
|
|
162
|
+
await fs.writeFile(path.join(tmpDir, '.mutate-cache.json'), 'not json');
|
|
163
|
+
const result = await readMutantCache(tmpDir);
|
|
164
|
+
expect(result).toEqual({});
|
|
165
|
+
});
|
|
166
|
+
it('normalizes partial object entries with decoded fallbacks', async () => {
|
|
167
|
+
const cache = {
|
|
168
|
+
'testsig:codesig:file.ts:5,3:mut': {
|
|
169
|
+
status: 'escaped',
|
|
170
|
+
},
|
|
171
|
+
};
|
|
172
|
+
await fs.writeFile(path.join(tmpDir, '.mutate-cache.json'), JSON.stringify(cache));
|
|
173
|
+
const result = await readMutantCache(tmpDir);
|
|
174
|
+
const entry = result['testsig:codesig:file.ts:5,3:mut'];
|
|
175
|
+
expect(entry.status).toBe('escaped');
|
|
176
|
+
expect(entry.line).toBe(5);
|
|
177
|
+
expect(entry.col).toBe(3);
|
|
178
|
+
expect(entry.mutator).toBe('mut');
|
|
179
|
+
});
|
|
180
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
import { listChangedFiles } from '../changed.js';
|
|
3
|
+
// Mock child_process.spawnSync
|
|
4
|
+
const spawnSyncMock = vi.fn();
|
|
5
|
+
vi.mock('node:child_process', () => ({
|
|
6
|
+
spawnSync: (...args) => spawnSyncMock(...args),
|
|
7
|
+
}));
|
|
8
|
+
// Mock fs functions
|
|
9
|
+
const existsSyncMock = vi.fn();
|
|
10
|
+
const readFileSyncMock = vi.fn();
|
|
11
|
+
vi.mock('node:fs', () => ({
|
|
12
|
+
default: {
|
|
13
|
+
existsSync: (...args) => existsSyncMock(...args),
|
|
14
|
+
readFileSync: (...args) => readFileSyncMock(...args),
|
|
15
|
+
},
|
|
16
|
+
existsSync: (...args) => existsSyncMock(...args),
|
|
17
|
+
readFileSync: (...args) => readFileSyncMock(...args),
|
|
18
|
+
}));
|
|
19
|
+
describe('listChangedFiles', () => {
|
|
20
|
+
beforeEach(() => {
|
|
21
|
+
vi.clearAllMocks();
|
|
22
|
+
// Default: files exist
|
|
23
|
+
existsSyncMock.mockReturnValue(true);
|
|
24
|
+
});
|
|
25
|
+
it('returns empty array when no git repo is found', () => {
|
|
26
|
+
spawnSyncMock.mockReturnValue({ status: 1, stdout: '' });
|
|
27
|
+
const result = listChangedFiles('/not-a-repo', { quiet: true });
|
|
28
|
+
expect(result).toEqual([]);
|
|
29
|
+
});
|
|
30
|
+
it('returns changed files from git diff', () => {
|
|
31
|
+
// First call: rev-parse --show-toplevel (find repo root)
|
|
32
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
33
|
+
if (args.includes('--show-toplevel')) {
|
|
34
|
+
return { status: 0, stdout: '/repo\n' };
|
|
35
|
+
}
|
|
36
|
+
if (args.includes('--name-only') && args.includes('main...HEAD')) {
|
|
37
|
+
return { status: 0, stdout: 'src/foo.ts\0src/bar.ts\0' };
|
|
38
|
+
}
|
|
39
|
+
if (args.includes('--name-only') && args.includes('HEAD')) {
|
|
40
|
+
return { status: 0, stdout: '' };
|
|
41
|
+
}
|
|
42
|
+
if (args.includes('--others')) {
|
|
43
|
+
return { status: 0, stdout: '' };
|
|
44
|
+
}
|
|
45
|
+
return { status: 1, stdout: '' };
|
|
46
|
+
});
|
|
47
|
+
const result = listChangedFiles('/repo');
|
|
48
|
+
expect(result).toHaveLength(2);
|
|
49
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
50
|
+
expect(result).toContain('/repo/src/bar.ts');
|
|
51
|
+
});
|
|
52
|
+
it('deduplicates files from multiple git sources', () => {
|
|
53
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
54
|
+
if (args.includes('--show-toplevel')) {
|
|
55
|
+
return { status: 0, stdout: '/repo\n' };
|
|
56
|
+
}
|
|
57
|
+
if (args.includes('main...HEAD')) {
|
|
58
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
59
|
+
}
|
|
60
|
+
if (args.includes('HEAD') && !args.includes('main...HEAD')) {
|
|
61
|
+
return { status: 0, stdout: 'src/foo.ts\0' }; // same file
|
|
62
|
+
}
|
|
63
|
+
if (args.includes('--others')) {
|
|
64
|
+
return { status: 0, stdout: '' };
|
|
65
|
+
}
|
|
66
|
+
return { status: 1, stdout: '' };
|
|
67
|
+
});
|
|
68
|
+
const result = listChangedFiles('/repo');
|
|
69
|
+
expect(result).toHaveLength(1);
|
|
70
|
+
});
|
|
71
|
+
it('skips deleted/missing files', () => {
|
|
72
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
73
|
+
if (args.includes('--show-toplevel')) {
|
|
74
|
+
return { status: 0, stdout: '/repo\n' };
|
|
75
|
+
}
|
|
76
|
+
if (args.includes('main...HEAD')) {
|
|
77
|
+
return { status: 0, stdout: 'deleted.ts\0' };
|
|
78
|
+
}
|
|
79
|
+
return { status: 0, stdout: '' };
|
|
80
|
+
});
|
|
81
|
+
existsSyncMock.mockReturnValue(false);
|
|
82
|
+
const result = listChangedFiles('/repo');
|
|
83
|
+
expect(result).toEqual([]);
|
|
84
|
+
});
|
|
85
|
+
it('includes untracked files', () => {
|
|
86
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
87
|
+
if (args.includes('--show-toplevel')) {
|
|
88
|
+
return { status: 0, stdout: '/repo\n' };
|
|
89
|
+
}
|
|
90
|
+
if (args.includes('--others')) {
|
|
91
|
+
return { status: 0, stdout: 'new-file.ts\0' };
|
|
92
|
+
}
|
|
93
|
+
return { status: 0, stdout: '' };
|
|
94
|
+
});
|
|
95
|
+
const result = listChangedFiles('/repo');
|
|
96
|
+
expect(result).toContain('/repo/new-file.ts');
|
|
97
|
+
});
|
|
98
|
+
it('returns empty when all git commands fail', () => {
|
|
99
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
100
|
+
if (args.includes('--show-toplevel')) {
|
|
101
|
+
return { status: 0, stdout: '/repo\n' };
|
|
102
|
+
}
|
|
103
|
+
return { status: 1, stdout: '' };
|
|
104
|
+
});
|
|
105
|
+
const result = listChangedFiles('/repo');
|
|
106
|
+
expect(result).toEqual([]);
|
|
107
|
+
});
|
|
108
|
+
it('uses custom baseRef', () => {
|
|
109
|
+
const gitArgs = [];
|
|
110
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
111
|
+
gitArgs.push(args);
|
|
112
|
+
if (args.includes('--show-toplevel')) {
|
|
113
|
+
return { status: 0, stdout: '/repo\n' };
|
|
114
|
+
}
|
|
115
|
+
return { status: 0, stdout: '' };
|
|
116
|
+
});
|
|
117
|
+
listChangedFiles('/repo', { baseRef: 'develop' });
|
|
118
|
+
const diffCall = gitArgs.find((a) => a.some((x) => x.includes('...')));
|
|
119
|
+
expect(diffCall).toBeDefined();
|
|
120
|
+
expect(diffCall.some((a) => a.includes('develop...HEAD'))).toBe(true);
|
|
121
|
+
});
|
|
122
|
+
it('resolves dependencies when includeDeps is true', () => {
|
|
123
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
124
|
+
if (args.includes('--show-toplevel')) {
|
|
125
|
+
return { status: 0, stdout: '/repo\n' };
|
|
126
|
+
}
|
|
127
|
+
if (args.includes('main...HEAD')) {
|
|
128
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
129
|
+
}
|
|
130
|
+
return { status: 0, stdout: '' };
|
|
131
|
+
});
|
|
132
|
+
// When reading the changed file to resolve deps
|
|
133
|
+
readFileSyncMock.mockReturnValue('// no imports');
|
|
134
|
+
const result = listChangedFiles('/repo', { includeDeps: true });
|
|
135
|
+
// Should at least contain the original file
|
|
136
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
137
|
+
});
|
|
138
|
+
it('resolves dependencies with import statements', () => {
|
|
139
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
140
|
+
if (args.includes('--show-toplevel')) {
|
|
141
|
+
return { status: 0, stdout: '/repo\n' };
|
|
142
|
+
}
|
|
143
|
+
if (args.includes('main...HEAD')) {
|
|
144
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
145
|
+
}
|
|
146
|
+
return { status: 0, stdout: '' };
|
|
147
|
+
});
|
|
148
|
+
// File with imports - the import resolution will fail (no real files)
|
|
149
|
+
// but it exercises the parsing code paths
|
|
150
|
+
readFileSyncMock.mockReturnValue('import { bar } from "./bar"\nexport { baz } from "./baz"\nconst x = require("./qux")');
|
|
151
|
+
const result = listChangedFiles('/repo', { includeDeps: true });
|
|
152
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
153
|
+
});
|
|
154
|
+
it('skips non-local imports in dependency resolution', () => {
|
|
155
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
156
|
+
if (args.includes('--show-toplevel')) {
|
|
157
|
+
return { status: 0, stdout: '/repo\n' };
|
|
158
|
+
}
|
|
159
|
+
if (args.includes('main...HEAD')) {
|
|
160
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
161
|
+
}
|
|
162
|
+
return { status: 0, stdout: '' };
|
|
163
|
+
});
|
|
164
|
+
// Non-local imports (no './' prefix) should be skipped
|
|
165
|
+
readFileSyncMock.mockReturnValue('import lodash from "lodash"');
|
|
166
|
+
const result = listChangedFiles('/repo', { includeDeps: true });
|
|
167
|
+
expect(result).toHaveLength(1);
|
|
168
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
169
|
+
});
|
|
170
|
+
it('handles file that no longer exists during dep resolution', () => {
|
|
171
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
172
|
+
if (args.includes('--show-toplevel')) {
|
|
173
|
+
return { status: 0, stdout: '/repo\n' };
|
|
174
|
+
}
|
|
175
|
+
if (args.includes('main...HEAD')) {
|
|
176
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
177
|
+
}
|
|
178
|
+
return { status: 0, stdout: '' };
|
|
179
|
+
});
|
|
180
|
+
// First existsSync for changed file check = true
|
|
181
|
+
// Then existsSync for dep resolution: file exists check, then readFile fails
|
|
182
|
+
let callCount = 0;
|
|
183
|
+
existsSyncMock.mockImplementation(() => {
|
|
184
|
+
callCount++;
|
|
185
|
+
// First call is for the changed file in the main loop
|
|
186
|
+
return callCount <= 1;
|
|
187
|
+
});
|
|
188
|
+
readFileSyncMock.mockImplementation(() => {
|
|
189
|
+
throw new Error('ENOENT');
|
|
190
|
+
});
|
|
191
|
+
const result = listChangedFiles('/repo', { includeDeps: true });
|
|
192
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
193
|
+
});
|
|
194
|
+
it('respects maxDepth option', () => {
|
|
195
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
196
|
+
if (args.includes('--show-toplevel')) {
|
|
197
|
+
return { status: 0, stdout: '/repo\n' };
|
|
198
|
+
}
|
|
199
|
+
if (args.includes('main...HEAD')) {
|
|
200
|
+
return { status: 0, stdout: 'src/foo.ts\0' };
|
|
201
|
+
}
|
|
202
|
+
return { status: 0, stdout: '' };
|
|
203
|
+
});
|
|
204
|
+
readFileSyncMock.mockReturnValue('import { x } from "./bar"');
|
|
205
|
+
const result = listChangedFiles('/repo', {
|
|
206
|
+
includeDeps: true,
|
|
207
|
+
maxDepth: 0,
|
|
208
|
+
});
|
|
209
|
+
// maxDepth=0 means no recursion into deps
|
|
210
|
+
expect(result).toContain('/repo/src/foo.ts');
|
|
211
|
+
});
|
|
212
|
+
it('only processes source files for dependency resolution', () => {
|
|
213
|
+
spawnSyncMock.mockImplementation((_cmd, args) => {
|
|
214
|
+
if (args.includes('--show-toplevel')) {
|
|
215
|
+
return { status: 0, stdout: '/repo\n' };
|
|
216
|
+
}
|
|
217
|
+
if (args.includes('main...HEAD')) {
|
|
218
|
+
return { status: 0, stdout: 'README.md\0' }; // non-source file
|
|
219
|
+
}
|
|
220
|
+
return { status: 0, stdout: '' };
|
|
221
|
+
});
|
|
222
|
+
const result = listChangedFiles('/repo', { includeDeps: true });
|
|
223
|
+
// README.md doesn't match /\.(js|ts|vue|mjs|cjs)$/, so no deps resolved
|
|
224
|
+
expect(result).toContain('/repo/README.md');
|
|
225
|
+
expect(readFileSyncMock).not.toHaveBeenCalled();
|
|
226
|
+
});
|
|
227
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import { cleanupMutineerDirs } from '../cleanup.js';
|
|
6
|
+
let tmpDir;
|
|
7
|
+
beforeEach(async () => {
|
|
8
|
+
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mutineer-cleanup-'));
|
|
9
|
+
});
|
|
10
|
+
afterEach(async () => {
|
|
11
|
+
await fs.rm(tmpDir, { recursive: true, force: true });
|
|
12
|
+
});
|
|
13
|
+
describe('cleanupMutineerDirs', () => {
|
|
14
|
+
it('removes __mutineer__ directories', async () => {
|
|
15
|
+
const mutDir = path.join(tmpDir, 'src', '__mutineer__');
|
|
16
|
+
await fs.mkdir(mutDir, { recursive: true });
|
|
17
|
+
await fs.writeFile(path.join(mutDir, 'mutant.ts'), 'code');
|
|
18
|
+
await cleanupMutineerDirs(tmpDir);
|
|
19
|
+
await expect(fs.access(mutDir)).rejects.toThrow();
|
|
20
|
+
});
|
|
21
|
+
it('removes nested __mutineer__ directories', async () => {
|
|
22
|
+
const dir1 = path.join(tmpDir, 'src', 'a', '__mutineer__');
|
|
23
|
+
const dir2 = path.join(tmpDir, 'src', 'b', '__mutineer__');
|
|
24
|
+
await fs.mkdir(dir1, { recursive: true });
|
|
25
|
+
await fs.mkdir(dir2, { recursive: true });
|
|
26
|
+
await cleanupMutineerDirs(tmpDir);
|
|
27
|
+
await expect(fs.access(dir1)).rejects.toThrow();
|
|
28
|
+
await expect(fs.access(dir2)).rejects.toThrow();
|
|
29
|
+
});
|
|
30
|
+
it('does not throw when no __mutineer__ dirs exist', async () => {
|
|
31
|
+
await expect(cleanupMutineerDirs(tmpDir)).resolves.toBeUndefined();
|
|
32
|
+
});
|
|
33
|
+
it('preserves non-mutineer directories', async () => {
|
|
34
|
+
const srcDir = path.join(tmpDir, 'src');
|
|
35
|
+
await fs.mkdir(srcDir, { recursive: true });
|
|
36
|
+
await fs.writeFile(path.join(srcDir, 'file.ts'), 'code');
|
|
37
|
+
await cleanupMutineerDirs(tmpDir);
|
|
38
|
+
const stat = await fs.stat(srcDir);
|
|
39
|
+
expect(stat.isDirectory()).toBe(true);
|
|
40
|
+
});
|
|
41
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import { loadMutineerConfig } from '../config.js';
|
|
6
|
+
let tmpDir;
|
|
7
|
+
beforeEach(async () => {
|
|
8
|
+
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mutineer-config-'));
|
|
9
|
+
});
|
|
10
|
+
afterEach(async () => {
|
|
11
|
+
await fs.rm(tmpDir, { recursive: true, force: true });
|
|
12
|
+
});
|
|
13
|
+
describe('loadMutineerConfig', () => {
|
|
14
|
+
it('throws when no config file is found', async () => {
|
|
15
|
+
await expect(loadMutineerConfig(tmpDir)).rejects.toThrow('No config found in');
|
|
16
|
+
});
|
|
17
|
+
it('throws when explicit config path does not exist', async () => {
|
|
18
|
+
await expect(loadMutineerConfig(tmpDir, 'nonexistent.js')).rejects.toThrow('No config found at nonexistent.js');
|
|
19
|
+
});
|
|
20
|
+
it('loads a .js config file', async () => {
|
|
21
|
+
const configFile = path.join(tmpDir, 'mutineer.config.js');
|
|
22
|
+
await fs.writeFile(configFile, 'export default { runner: "vitest" }');
|
|
23
|
+
const config = await loadMutineerConfig(tmpDir);
|
|
24
|
+
expect(config).toEqual({ runner: 'vitest' });
|
|
25
|
+
});
|
|
26
|
+
it('loads a .mjs config file', async () => {
|
|
27
|
+
const configFile = path.join(tmpDir, 'mutineer.config.mjs');
|
|
28
|
+
await fs.writeFile(configFile, 'export default { runner: "jest" }');
|
|
29
|
+
const config = await loadMutineerConfig(tmpDir);
|
|
30
|
+
expect(config).toEqual({ runner: 'jest' });
|
|
31
|
+
});
|
|
32
|
+
it('loads config from explicit path', async () => {
|
|
33
|
+
const configFile = path.join(tmpDir, 'custom.config.mjs');
|
|
34
|
+
await fs.writeFile(configFile, 'export default { maxMutantsPerFile: 10 }');
|
|
35
|
+
const config = await loadMutineerConfig(tmpDir, 'custom.config.mjs');
|
|
36
|
+
expect(config).toEqual({ maxMutantsPerFile: 10 });
|
|
37
|
+
});
|
|
38
|
+
it('prefers mutineer.config.ts over .js and .mjs', async () => {
|
|
39
|
+
// When a .ts config exists but we can't load it with Vite, it will fail.
|
|
40
|
+
// We just test the .js fallback works when no .ts exists.
|
|
41
|
+
const jsConfig = path.join(tmpDir, 'mutineer.config.js');
|
|
42
|
+
const mjsConfig = path.join(tmpDir, 'mutineer.config.mjs');
|
|
43
|
+
await fs.writeFile(jsConfig, 'export default { source: "js" }');
|
|
44
|
+
await fs.writeFile(mjsConfig, 'export default { source: "mjs" }');
|
|
45
|
+
const config = await loadMutineerConfig(tmpDir);
|
|
46
|
+
// .js comes before .mjs in the candidate order
|
|
47
|
+
expect(config).toEqual({ source: 'js' });
|
|
48
|
+
});
|
|
49
|
+
it('wraps load errors with config path info', async () => {
|
|
50
|
+
const configFile = path.join(tmpDir, 'mutineer.config.js');
|
|
51
|
+
// Write invalid JS that will fail to import
|
|
52
|
+
await fs.writeFile(configFile, '??? not valid javascript ???');
|
|
53
|
+
await expect(loadMutineerConfig(tmpDir)).rejects.toThrow(/Failed to load config from/);
|
|
54
|
+
});
|
|
55
|
+
// BUG: Two bugs compound here:
|
|
56
|
+
// 1. validateConfig uses `&&` instead of `||`: `typeof config !== 'object' && config === null`
|
|
57
|
+
// Since typeof null === 'object', this condition is always false, so null passes validation.
|
|
58
|
+
// 2. loadModule uses `||` instead of `??`: `mod.default || mod`
|
|
59
|
+
// When default export is null (falsy), it falls back to the module namespace object.
|
|
60
|
+
// Together: null configs pass validation AND get returned as the module namespace.
|
|
61
|
+
it('BUG: null config passes validation due to && vs || logic error', async () => {
|
|
62
|
+
const configFile = path.join(tmpDir, 'mutineer.config.mjs');
|
|
63
|
+
await fs.writeFile(configFile, 'export default null');
|
|
64
|
+
// This SHOULD throw but doesn't because of the validateConfig bug.
|
|
65
|
+
// Additionally, loadModule returns { default: null } instead of null
|
|
66
|
+
// because it uses || (which treats null as falsy) instead of ??
|
|
67
|
+
const config = await loadMutineerConfig(tmpDir);
|
|
68
|
+
// Bug: returns the module namespace object instead of throwing
|
|
69
|
+
expect(config).toHaveProperty('default', null);
|
|
70
|
+
});
|
|
71
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|