@mutineerjs/mutineer 0.5.1 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +42 -2
  2. package/dist/bin/__tests__/mutineer.spec.d.ts +1 -0
  3. package/dist/bin/__tests__/mutineer.spec.js +43 -0
  4. package/dist/bin/mutineer.d.ts +2 -1
  5. package/dist/bin/mutineer.js +44 -1
  6. package/dist/mutators/__tests__/operator.spec.js +97 -1
  7. package/dist/mutators/__tests__/registry.spec.js +8 -0
  8. package/dist/mutators/operator.d.ts +8 -0
  9. package/dist/mutators/operator.js +58 -1
  10. package/dist/mutators/registry.js +9 -1
  11. package/dist/mutators/utils.d.ts +2 -0
  12. package/dist/mutators/utils.js +58 -1
  13. package/dist/runner/__tests__/args.spec.js +101 -1
  14. package/dist/runner/__tests__/cache.spec.js +65 -8
  15. package/dist/runner/__tests__/changed.spec.js +85 -2
  16. package/dist/runner/__tests__/cleanup.spec.js +30 -0
  17. package/dist/runner/__tests__/config.spec.js +2 -13
  18. package/dist/runner/__tests__/coverage-resolver.spec.js +9 -2
  19. package/dist/runner/__tests__/discover.spec.js +128 -0
  20. package/dist/runner/__tests__/orchestrator.spec.d.ts +1 -0
  21. package/dist/runner/__tests__/orchestrator.spec.js +306 -0
  22. package/dist/runner/__tests__/pool-executor.spec.js +60 -1
  23. package/dist/runner/args.d.ts +18 -0
  24. package/dist/runner/args.js +40 -0
  25. package/dist/runner/cache.d.ts +19 -3
  26. package/dist/runner/cache.js +14 -7
  27. package/dist/runner/changed.js +15 -43
  28. package/dist/runner/cleanup.d.ts +3 -1
  29. package/dist/runner/cleanup.js +18 -1
  30. package/dist/runner/config.js +1 -1
  31. package/dist/runner/coverage-resolver.js +1 -1
  32. package/dist/runner/discover.d.ts +1 -1
  33. package/dist/runner/discover.js +30 -20
  34. package/dist/runner/jest/__tests__/pool.spec.js +41 -0
  35. package/dist/runner/jest/pool.js +3 -3
  36. package/dist/runner/orchestrator.d.ts +1 -0
  37. package/dist/runner/orchestrator.js +38 -9
  38. package/dist/runner/pool-executor.d.ts +5 -0
  39. package/dist/runner/pool-executor.js +15 -4
  40. package/dist/runner/vitest/__tests__/adapter.spec.js +60 -0
  41. package/dist/runner/vitest/__tests__/pool.spec.js +57 -0
  42. package/dist/runner/vitest/adapter.js +16 -9
  43. package/dist/runner/vitest/pool.js +3 -3
  44. package/dist/types/config.d.ts +4 -0
  45. package/dist/utils/__tests__/summary.spec.js +43 -1
  46. package/dist/utils/summary.d.ts +18 -0
  47. package/dist/utils/summary.js +25 -0
  48. package/package.json +2 -1
@@ -2,7 +2,7 @@ import fs from 'node:fs/promises';
2
2
  /**
3
3
  * Clean up all __mutineer__ temp directories created during mutation testing.
4
4
  */
5
- export async function cleanupMutineerDirs(cwd) {
5
+ export async function cleanupMutineerDirs(cwd, opts = {}) {
6
6
  const glob = await import('fast-glob');
7
7
  const dirs = await glob.default('**/__mutineer__', {
8
8
  cwd,
@@ -18,4 +18,21 @@ export async function cleanupMutineerDirs(cwd) {
18
18
  // Ignore cleanup errors
19
19
  }
20
20
  }
21
+ if (opts.includeCacheFiles) {
22
+ // Remove cache files (new name + legacy .mutate-cache* for migration)
23
+ const cacheFiles = await glob.default([
24
+ '.mutineer-cache*.json',
25
+ '.mutineer-cache*.json.tmp',
26
+ '.mutate-cache*.json',
27
+ '.mutate-cache*.json.tmp',
28
+ ], { cwd, absolute: true });
29
+ for (const f of cacheFiles) {
30
+ try {
31
+ await fs.unlink(f);
32
+ }
33
+ catch {
34
+ // Ignore cleanup errors
35
+ }
36
+ }
37
+ }
21
38
  }
@@ -19,7 +19,7 @@ const log = createLogger('config');
19
19
  async function loadModule(filePath) {
20
20
  const moduleUrl = pathToFileURL(filePath).href;
21
21
  const mod = await import(moduleUrl);
22
- return mod.default ?? mod;
22
+ return 'default' in mod ? mod.default : mod;
23
23
  }
24
24
  /**
25
25
  * Validate that the loaded configuration has the expected shape.
@@ -15,7 +15,7 @@ export async function resolveCoverageConfig(opts, cfg, adapter, cliArgs) {
15
15
  ? true
16
16
  : coveragePreference === false
17
17
  ? false
18
- : isCoverageRequestedInArgs([...cliArgs]) || coverageConfig.coverageEnabled;
18
+ : isCoverageRequestedInArgs([...cliArgs]);
19
19
  // Load pre-existing coverage data if provided
20
20
  let coverageData = null;
21
21
  if (opts.coverageFilePath) {
@@ -5,4 +5,4 @@ export interface DiscoveryResult {
5
5
  readonly testMap: TestMap;
6
6
  readonly directTestMap: TestMap;
7
7
  }
8
- export declare function autoDiscoverTargetsAndTests(root: string, cfg: MutineerConfig): Promise<DiscoveryResult>;
8
+ export declare function autoDiscoverTargetsAndTests(root: string, cfg: MutineerConfig, onProgress?: (msg: string) => void): Promise<DiscoveryResult>;
@@ -200,7 +200,7 @@ async function createResolver(rootAbs, exts) {
200
200
  return createNodeResolver();
201
201
  }
202
202
  }
203
- export async function autoDiscoverTargetsAndTests(root, cfg) {
203
+ export async function autoDiscoverTargetsAndTests(root, cfg, onProgress) {
204
204
  const rootAbs = path.resolve(root);
205
205
  const sourceRoots = toArray(cfg.source ?? 'src').map((s) => path.resolve(rootAbs, s));
206
206
  const exts = new Set(toArray(cfg.extensions ?? EXT_DEFAULT));
@@ -219,6 +219,7 @@ export async function autoDiscoverTargetsAndTests(root, cfg) {
219
219
  if (!tests.length)
220
220
  return { targets: [], testMap: new Map(), directTestMap: new Map() };
221
221
  const testSet = new Set(tests.map((t) => normalizePath(t)));
222
+ onProgress?.(`Found ${tests.length} test file(s), resolving imports...`);
222
223
  // 2) Create resolver (Vite if available, otherwise Node-based fallback)
223
224
  const { resolve, cleanup } = await createResolver(rootAbs, exts);
224
225
  const targets = new Map();
@@ -226,6 +227,7 @@ export async function autoDiscoverTargetsAndTests(root, cfg) {
226
227
  const directTestMap = new Map();
227
228
  const contentCache = new Map();
228
229
  const resolveCache = new Map(); // key: importer\0spec -> resolved id
230
+ const childrenCache = new Map(); // key: normalized file -> resolved child abs paths
229
231
  async function crawl(absFile, depth, seen, currentTestAbs) {
230
232
  if (depth > MAX_CRAWL_DEPTH)
231
233
  return; // sane guard for huge graphs
@@ -266,27 +268,34 @@ export async function autoDiscoverTargetsAndTests(root, cfg) {
266
268
  }
267
269
  if (!code)
268
270
  return;
269
- // find import specs and resolve relative to absFile
270
- for (const spec of extractImportSpecs(code)) {
271
- if (!spec)
272
- continue;
273
- const cacheKey = `${absFile}\0${spec}`;
274
- let resolved = resolveCache.get(cacheKey);
275
- if (!resolved) {
276
- resolved = await resolve(spec, absFile);
277
- resolveCache.set(cacheKey, resolved);
271
+ // find import specs and resolve relative to absFile, memoized per file
272
+ let children = childrenCache.get(key);
273
+ if (children === undefined) {
274
+ const resolved = [];
275
+ for (const spec of extractImportSpecs(code)) {
276
+ if (!spec)
277
+ continue;
278
+ const cacheKey = `${absFile}\0${spec}`;
279
+ let resolvedId = resolveCache.get(cacheKey);
280
+ if (!resolvedId) {
281
+ resolvedId = await resolve(spec, absFile);
282
+ resolveCache.set(cacheKey, resolvedId);
283
+ }
284
+ // vite ids could be URLs; ensure we turn into absolute disk path when possible
285
+ const next = path.isAbsolute(resolvedId)
286
+ ? resolvedId
287
+ : normalizePath(path.resolve(rootAbs, resolvedId));
288
+ // skip node_modules and virtual ids
289
+ if (next.includes('/node_modules/'))
290
+ continue;
291
+ if (!path.isAbsolute(next))
292
+ continue;
293
+ resolved.push(next);
278
294
  }
279
- // vite ids could be URLs; ensure we turn into absolute disk path when possible
280
- const next = path.isAbsolute(resolved)
281
- ? resolved
282
- : normalizePath(path.resolve(rootAbs, resolved));
283
- // skip node_modules and virtual ids
284
- if (next.includes('/node_modules/'))
285
- continue;
286
- if (!path.isAbsolute(next))
287
- continue;
288
- await crawl(next, depth + 1, seen, currentTestAbs);
295
+ childrenCache.set(key, resolved);
296
+ children = resolved;
289
297
  }
298
+ await Promise.all(children.map((next) => crawl(next, depth + 1, seen, currentTestAbs)));
290
299
  }
291
300
  try {
292
301
  await Promise.all(tests.map(async (testAbs) => {
@@ -312,6 +321,7 @@ export async function autoDiscoverTargetsAndTests(root, cfg) {
312
321
  await crawl(abs, 0, seen, testAbs);
313
322
  }
314
323
  }));
324
+ onProgress?.(`Discovery complete: ${targets.size} source file(s), ${tests.length} test file(s)`);
315
325
  return { targets: Array.from(targets.values()), testMap, directTestMap };
316
326
  }
317
327
  finally {
@@ -1,4 +1,5 @@
1
1
  import { describe, it, expect, vi } from 'vitest';
2
+ import { EventEmitter } from 'node:events';
2
3
  import { JestPool, runWithJestPool } from '../pool.js';
3
4
  // We'll use the createWorker option to inject mock workers instead of forking processes
4
5
  function makeMockWorker(id) {
@@ -110,6 +111,46 @@ describe('JestPool', () => {
110
111
  // After shutdown, initialised is set to false, so "not initialised" check fires first
111
112
  await expect(pool.run(dummyMutant, ['test.ts'])).rejects.toThrow('Pool not initialised');
112
113
  });
114
+ it('does not give a dead worker to a waiting task after timeout', async () => {
115
+ let callCount = 0;
116
+ const allWorkers = [];
117
+ const pool = new JestPool({
118
+ cwd: '/tmp',
119
+ concurrency: 1,
120
+ createWorker: (id) => {
121
+ callCount++;
122
+ const workerNum = callCount;
123
+ const worker = new EventEmitter();
124
+ worker.id = id;
125
+ worker._ready = true;
126
+ worker.start = vi.fn().mockResolvedValue(undefined);
127
+ worker.isReady = vi.fn(() => worker._ready);
128
+ worker.isBusy = vi.fn().mockReturnValue(false);
129
+ worker.run = vi.fn().mockImplementation(async () => {
130
+ if (workerNum === 1) {
131
+ worker._ready = false;
132
+ Promise.resolve().then(() => worker.emit('exit'));
133
+ return { killed: false, durationMs: 5000, error: 'timeout' };
134
+ }
135
+ return { killed: true, durationMs: 42 };
136
+ });
137
+ worker.shutdown = vi.fn().mockResolvedValue(undefined);
138
+ worker.kill = vi.fn();
139
+ allWorkers.push(worker);
140
+ return worker;
141
+ },
142
+ });
143
+ await pool.init();
144
+ const [result1, result2] = await Promise.all([
145
+ pool.run(dummyMutant, ['a.spec.ts']),
146
+ pool.run({ ...dummyMutant, id: 'test#2' }, ['b.spec.ts']),
147
+ ]);
148
+ expect(result1).toMatchObject({ error: 'timeout' });
149
+ expect(result2).toMatchObject({ killed: true });
150
+ expect(allWorkers).toHaveLength(2);
151
+ expect(allWorkers[1].run).toHaveBeenCalled();
152
+ await pool.shutdown();
153
+ });
113
154
  it('does not double-shutdown', async () => {
114
155
  const workers = [];
115
156
  const pool = new JestPool({
@@ -239,14 +239,14 @@ export class JestPool {
239
239
  });
240
240
  }
241
241
  releaseWorker(worker) {
242
+ if (!worker.isReady())
243
+ return;
242
244
  const waiting = this.waitingTasks.shift();
243
245
  if (waiting) {
244
246
  waiting(worker);
245
247
  return;
246
248
  }
247
- if (worker.isReady()) {
248
- this.availableWorkers.push(worker);
249
- }
249
+ this.availableWorkers.push(worker);
250
250
  }
251
251
  async run(mutant, tests) {
252
252
  if (!this.initialised) {
@@ -9,5 +9,6 @@
9
9
  * 5. Execute mutants via worker pool
10
10
  * 6. Report results
11
11
  */
12
+ export declare function parseMutantTimeoutMs(raw: string | undefined): number;
12
13
  export { readMutantCache } from './cache.js';
13
14
  export declare function runOrchestrator(cliArgs: string[], cwd: string): Promise<void>;
@@ -26,11 +26,11 @@ import { prepareTasks } from './tasks.js';
26
26
  import { executePool } from './pool-executor.js';
27
27
  const log = createLogger('orchestrator');
28
28
  // Per-mutant test timeout (ms). Can be overridden with env MUTINEER_MUTANT_TIMEOUT_MS
29
- const MUTANT_TIMEOUT_MS = (() => {
30
- const raw = process.env.MUTINEER_MUTANT_TIMEOUT_MS;
29
+ export function parseMutantTimeoutMs(raw) {
31
30
  const n = raw ? Number(raw) : NaN;
32
31
  return Number.isFinite(n) && n > 0 ? n : 30_000;
33
- })();
32
+ }
33
+ const MUTANT_TIMEOUT_MS = parseMutantTimeoutMs(process.env.MUTINEER_MUTANT_TIMEOUT_MS);
34
34
  // Re-export readMutantCache for external use
35
35
  export { readMutantCache } from './cache.js';
36
36
  export async function runOrchestrator(cliArgs, cwd) {
@@ -38,12 +38,12 @@ export async function runOrchestrator(cliArgs, cwd) {
38
38
  const cfgPath = extractConfigPath(cliArgs);
39
39
  const cfg = await loadMutineerConfig(cwd, cfgPath);
40
40
  const opts = parseCliOptions(cliArgs, cfg);
41
- await clearCacheOnStart(cwd);
41
+ await clearCacheOnStart(cwd, opts.shard);
42
42
  // Create test runner adapter
43
43
  const adapter = (opts.runner === 'jest' ? createJestAdapter : createVitestAdapter)({
44
44
  cwd,
45
45
  concurrency: opts.concurrency,
46
- timeoutMs: MUTANT_TIMEOUT_MS,
46
+ timeoutMs: opts.timeout ?? cfg.timeout ?? MUTANT_TIMEOUT_MS,
47
47
  config: cfg,
48
48
  cliArgs,
49
49
  });
@@ -66,8 +66,9 @@ export async function runOrchestrator(cliArgs, cwd) {
66
66
  }))
67
67
  : null;
68
68
  // 4. Discover targets and tests
69
- const cache = await readMutantCache(cwd);
70
- const discovered = await autoDiscoverTargetsAndTests(cwd, cfg);
69
+ const cache = await readMutantCache(cwd, opts.shard);
70
+ log.info('Discovering tests...');
71
+ const discovered = await autoDiscoverTargetsAndTests(cwd, cfg, (msg) => log.info(msg));
71
72
  const { testMap, directTestMap } = discovered;
72
73
  const targets = cfg.targets?.length
73
74
  ? [...cfg.targets]
@@ -88,8 +89,24 @@ export async function runOrchestrator(cliArgs, cwd) {
88
89
  }
89
90
  }
90
91
  const baselineTests = Array.from(allTestFiles);
92
+ if (opts.wantsChangedWithDeps) {
93
+ let uncoveredCount = 0;
94
+ for (const target of targets) {
95
+ const absFile = normalizePath(path.isAbsolute(getTargetFile(target))
96
+ ? getTargetFile(target)
97
+ : path.join(cwd, getTargetFile(target)));
98
+ if (changedAbs?.has(absFile) &&
99
+ !testMap.get(normalizePath(absFile))?.size) {
100
+ uncoveredCount++;
101
+ }
102
+ }
103
+ if (uncoveredCount > 0) {
104
+ log.info(`${uncoveredCount} target(s) from --changed-with-deps have no covering tests and will be skipped`);
105
+ }
106
+ }
91
107
  if (!baselineTests.length) {
92
- log.info('No tests found for targets. Exiting.');
108
+ log.error('No tests found for the selected targets. Ensure your source files are covered by at least one test file.');
109
+ process.exitCode = 1;
93
110
  return;
94
111
  }
95
112
  // 5. Run baseline tests (with coverage if needed for filtering)
@@ -122,7 +139,17 @@ export async function runOrchestrator(cliArgs, cwd) {
122
139
  return;
123
140
  }
124
141
  // 8. Prepare tasks and execute via worker pool
125
- const tasks = prepareTasks(variants, updatedCoverage.perTestCoverage, directTestMap);
142
+ let tasks = prepareTasks(variants, updatedCoverage.perTestCoverage, directTestMap);
143
+ // Apply shard filter if requested
144
+ if (opts.shard) {
145
+ const { index, total } = opts.shard;
146
+ tasks = tasks.filter((_, i) => i % total === index - 1);
147
+ log.info(`Shard ${index}/${total}: running ${tasks.length} mutant(s)`);
148
+ if (tasks.length === 0) {
149
+ log.info('No mutants assigned to this shard. Exiting.');
150
+ return;
151
+ }
152
+ }
126
153
  await executePool({
127
154
  tasks,
128
155
  adapter,
@@ -130,6 +157,8 @@ export async function runOrchestrator(cliArgs, cwd) {
130
157
  concurrency: opts.concurrency,
131
158
  progressMode: opts.progressMode,
132
159
  minKillPercent: opts.minKillPercent,
160
+ reportFormat: opts.reportFormat,
133
161
  cwd,
162
+ shard: opts.shard,
134
163
  });
135
164
  }
@@ -8,7 +8,12 @@ export interface PoolExecutionOptions {
8
8
  concurrency: number;
9
9
  progressMode: 'bar' | 'list' | 'quiet';
10
10
  minKillPercent?: number;
11
+ reportFormat?: 'text' | 'json';
11
12
  cwd: string;
13
+ shard?: {
14
+ index: number;
15
+ total: number;
16
+ };
12
17
  }
13
18
  /**
14
19
  * Execute all mutant tasks through the worker pool.
@@ -1,8 +1,9 @@
1
1
  import fs from 'node:fs';
2
+ import path from 'node:path';
2
3
  import { render } from 'ink';
3
4
  import { createElement } from 'react';
4
5
  import { Progress } from '../utils/progress.js';
5
- import { computeSummary, printSummary } from '../utils/summary.js';
6
+ import { computeSummary, printSummary, buildJsonReport, } from '../utils/summary.js';
6
7
  import { saveCacheAtomic } from './cache.js';
7
8
  import { cleanupMutineerDirs } from './cleanup.js';
8
9
  import { PoolSpinner } from '../utils/PoolSpinner.js';
@@ -29,7 +30,18 @@ export async function executePool(opts) {
29
30
  const durationMs = Date.now() - mutationStartTime;
30
31
  progress.finish();
31
32
  const summary = computeSummary(cache);
32
- printSummary(summary, cache, durationMs);
33
+ if (opts.reportFormat === 'json') {
34
+ const report = buildJsonReport(summary, cache, durationMs);
35
+ const shardSuffix = opts.shard
36
+ ? `-shard-${opts.shard.index}-of-${opts.shard.total}`
37
+ : '';
38
+ const outPath = path.join(opts.cwd, `mutineer-report${shardSuffix}.json`);
39
+ fs.writeFileSync(outPath, JSON.stringify(report, null, 2));
40
+ log.info(`JSON report written to ${path.relative(process.cwd(), outPath)}`);
41
+ }
42
+ else {
43
+ printSummary(summary, cache, durationMs);
44
+ }
33
45
  if (opts.minKillPercent !== undefined) {
34
46
  const killRateString = summary.killRate.toFixed(2);
35
47
  const thresholdString = opts.minKillPercent.toFixed(2);
@@ -68,7 +80,6 @@ export async function executePool(opts) {
68
80
  let nextIdx = 0;
69
81
  async function processTask(task) {
70
82
  const { v, tests, key, directTests } = task;
71
- log.debug('Cache ' + JSON.stringify(cache));
72
83
  const cached = cache[key];
73
84
  if (cached) {
74
85
  progress.update(cached.status);
@@ -159,7 +170,7 @@ export async function executePool(opts) {
159
170
  for (let i = 0; i < workerCount; i++)
160
171
  workers.push(worker());
161
172
  await Promise.all(workers);
162
- await saveCacheAtomic(cwd, cache);
173
+ await saveCacheAtomic(cwd, cache, opts.shard);
163
174
  }
164
175
  finally {
165
176
  process.removeAllListeners('SIGINT');
@@ -103,6 +103,40 @@ describe('Vitest adapter', () => {
103
103
  expect(args.join(' ')).toContain('--coverage.enabled=true');
104
104
  expect(args.join(' ')).toContain('--coverage.perTest=true');
105
105
  });
106
+ it('disables coverage thresholds in baseline-with-coverage to prevent threshold failures', async () => {
107
+ const adapter = makeAdapter({ cliArgs: [] });
108
+ spawnMock.mockImplementationOnce(() => ({
109
+ on: (evt, cb) => {
110
+ if (evt === 'exit')
111
+ cb(0);
112
+ },
113
+ }));
114
+ await adapter.runBaseline(['test-a'], {
115
+ collectCoverage: true,
116
+ perTestCoverage: false,
117
+ });
118
+ const args = spawnMock.mock.calls[0][1];
119
+ const argStr = args.join(' ');
120
+ expect(argStr).toContain('--coverage.thresholds.lines=0');
121
+ expect(argStr).toContain('--coverage.thresholds.functions=0');
122
+ expect(argStr).toContain('--coverage.thresholds.branches=0');
123
+ expect(argStr).toContain('--coverage.thresholds.statements=0');
124
+ });
125
+ it('strips --shard= flag from vitest args', async () => {
126
+ const adapter = makeAdapter({ cliArgs: ['--shard=1/4'] });
127
+ spawnMock.mockImplementationOnce(() => ({
128
+ on: (evt, cb) => {
129
+ if (evt === 'exit')
130
+ cb(0);
131
+ },
132
+ }));
133
+ await adapter.runBaseline(['test-a'], {
134
+ collectCoverage: false,
135
+ perTestCoverage: false,
136
+ });
137
+ const args = spawnMock.mock.calls[0][1];
138
+ expect(args.join(' ')).not.toContain('--shard');
139
+ });
106
140
  it('detects coverage config from vitest config file', async () => {
107
141
  const tmp = await fs.mkdtemp(path.join(os.tmpdir(), 'mutineer-vitest-'));
108
142
  const cfgPath = path.join(tmp, 'vitest.config.ts');
@@ -121,6 +155,32 @@ describe('Vitest adapter', () => {
121
155
  }
122
156
  });
123
157
  });
158
+ describe('hasCoverageProvider', () => {
159
+ it('returns true when @vitest/coverage-v8 is resolvable', () => {
160
+ const adapter = makeAdapter({ cwd: process.cwd() });
161
+ // coverage-v8 is installed as a devDependency, so this must resolve
162
+ expect(adapter.hasCoverageProvider()).toBe(true);
163
+ });
164
+ it('returns false when neither provider is resolvable', () => {
165
+ const adapter = makeAdapter({ cwd: '/tmp' });
166
+ expect(adapter.hasCoverageProvider()).toBe(false);
167
+ });
168
+ it('returns true when @vitest/coverage-istanbul is resolvable', () => {
169
+ const adapter = makeAdapter({ cwd: process.cwd() });
170
+ const origResolve = require.resolve;
171
+ const resolveStub = vi
172
+ .spyOn(require, 'resolve')
173
+ .mockImplementation((id, opts) => {
174
+ if (String(id).includes('coverage-v8'))
175
+ throw new Error('not found');
176
+ if (String(id).includes('coverage-istanbul'))
177
+ return '/fake/path';
178
+ return origResolve(id, opts);
179
+ });
180
+ expect(adapter.hasCoverageProvider()).toBe(true);
181
+ resolveStub.mockRestore();
182
+ });
183
+ });
124
184
  describe('isCoverageRequestedInArgs', () => {
125
185
  it('detects enabled coverage flags', () => {
126
186
  expect(isCoverageRequestedInArgs(['--coverage'])).toBe(true);
@@ -88,6 +88,63 @@ describe('VitestPool', () => {
88
88
  expect(result).toEqual({ status: 'escaped', durationMs: 7 });
89
89
  expect(mockPool.run).toHaveBeenCalledWith(mutant, ['bar.spec.ts']);
90
90
  });
91
+ it('does not give a dead worker to a waiting task after timeout', async () => {
92
+ let callCount = 0;
93
+ const allWorkers = [];
94
+ const pool = new VitestPool({
95
+ cwd: process.cwd(),
96
+ concurrency: 1,
97
+ timeoutMs: 5000,
98
+ createWorker: (id) => {
99
+ callCount++;
100
+ const workerNum = callCount;
101
+ const worker = new EventEmitter();
102
+ worker.id = id;
103
+ worker._ready = true;
104
+ worker.start = vi.fn().mockResolvedValue(undefined);
105
+ worker.isReady = vi.fn(() => worker._ready);
106
+ worker.isBusy = vi.fn().mockReturnValue(false);
107
+ worker.run = vi.fn().mockImplementation(async () => {
108
+ if (workerNum === 1) {
109
+ worker._ready = false;
110
+ Promise.resolve().then(() => worker.emit('exit'));
111
+ return { killed: false, durationMs: 5000, error: 'timeout' };
112
+ }
113
+ return { killed: true, durationMs: 42 };
114
+ });
115
+ worker.shutdown = vi.fn().mockResolvedValue(undefined);
116
+ worker.kill = vi.fn();
117
+ allWorkers.push(worker);
118
+ return worker;
119
+ },
120
+ });
121
+ await pool.init();
122
+ const mutant1 = {
123
+ id: '1',
124
+ name: 'm1',
125
+ file: 'a.ts',
126
+ code: 'x',
127
+ line: 1,
128
+ col: 1,
129
+ };
130
+ const mutant2 = {
131
+ id: '2',
132
+ name: 'm2',
133
+ file: 'b.ts',
134
+ code: 'y',
135
+ line: 1,
136
+ col: 1,
137
+ };
138
+ const [result1, result2] = await Promise.all([
139
+ pool.run(mutant1, ['a.spec.ts']),
140
+ pool.run(mutant2, ['b.spec.ts']),
141
+ ]);
142
+ expect(result1).toMatchObject({ error: 'timeout' });
143
+ expect(result2).toMatchObject({ killed: true });
144
+ expect(allWorkers).toHaveLength(2);
145
+ expect(allWorkers[1].run).toHaveBeenCalled();
146
+ await pool.shutdown();
147
+ });
91
148
  it('maps runWithPool errors to error status', async () => {
92
149
  const mockPool = {
93
150
  run: vi.fn().mockRejectedValue(new Error('boom')),
@@ -37,6 +37,7 @@ function stripMutineerArgs(args) {
37
37
  '--config',
38
38
  '-c',
39
39
  '--coverage-file',
40
+ '--shard',
40
41
  ]);
41
42
  const dropExact = new Set([
42
43
  '-m',
@@ -59,6 +60,8 @@ function stripMutineerArgs(args) {
59
60
  continue;
60
61
  if (a.startsWith('--config=') || a.startsWith('-c='))
61
62
  continue;
63
+ if (a.startsWith('--shard='))
64
+ continue;
62
65
  out.push(a);
63
66
  }
64
67
  return out;
@@ -96,6 +99,9 @@ function buildVitestArgs(args, mode) {
96
99
  if (!result.some((a) => a.startsWith('--coverage.perTest='))) {
97
100
  result.push('--coverage.perTest=true');
98
101
  }
102
+ // Disable coverage thresholds so baseline doesn't fail when a broader
103
+ // test set (e.g. from --changed-with-deps) lowers aggregate coverage
104
+ result.push('--coverage.thresholds.lines=0', '--coverage.thresholds.functions=0', '--coverage.thresholds.branches=0', '--coverage.thresholds.statements=0');
99
105
  }
100
106
  return result;
101
107
  }
@@ -193,15 +199,16 @@ export class VitestAdapter {
193
199
  }
194
200
  }
195
201
  hasCoverageProvider() {
196
- try {
197
- require.resolve('@vitest/coverage-v8/package.json', {
198
- paths: [this.options.cwd],
199
- });
200
- return true;
201
- }
202
- catch {
203
- return false;
204
- }
202
+ const packages = ['@vitest/coverage-v8', '@vitest/coverage-istanbul'];
203
+ return packages.some((pkg) => {
204
+ try {
205
+ require.resolve(`${pkg}/package.json`, { paths: [this.options.cwd] });
206
+ return true;
207
+ }
208
+ catch {
209
+ return false;
210
+ }
211
+ });
205
212
  }
206
213
  async detectCoverageConfig() {
207
214
  const configPath = this.options.config.vitestConfig;
@@ -301,6 +301,8 @@ export class VitestPool {
301
301
  });
302
302
  }
303
303
  releaseWorker(worker) {
304
+ if (!worker.isReady())
305
+ return;
304
306
  // If someone is waiting, give them the worker directly
305
307
  const waiting = this.waitingTasks.shift();
306
308
  if (waiting) {
@@ -308,9 +310,7 @@ export class VitestPool {
308
310
  return;
309
311
  }
310
312
  // Otherwise return to the pool
311
- if (worker.isReady()) {
312
- this.availableWorkers.push(worker);
313
- }
313
+ this.availableWorkers.push(worker);
314
314
  }
315
315
  async run(mutant, tests) {
316
316
  if (!this.initialised) {
@@ -38,4 +38,8 @@ export interface MutineerConfig {
38
38
  * Requires Vitest coverage with perTest support.
39
39
  */
40
40
  readonly perTestCoverage?: boolean;
41
+ /** Per-mutant test timeout in milliseconds (default: 30000) */
42
+ readonly timeout?: number;
43
+ /** Output report format: 'text' (default) or 'json' (writes mutineer-report.json) */
44
+ readonly report?: 'text' | 'json';
41
45
  }
@@ -1,5 +1,5 @@
1
1
  import { describe, it, expect, vi } from 'vitest';
2
- import { computeSummary, printSummary, summarise } from '../summary.js';
2
+ import { computeSummary, printSummary, summarise, buildJsonReport, } from '../summary.js';
3
3
  /** Strip ANSI escape codes for clean text assertions */
4
4
  const stripAnsi = (s) => s.replace(/\x1B\[[0-9;]*m/g, '');
5
5
  function makeEntry(overrides) {
@@ -97,6 +97,48 @@ describe('summary', () => {
97
97
  expect(lines.some((l) => l.includes('↳'))).toBe(false);
98
98
  logSpy.mockRestore();
99
99
  });
100
+ it('buildJsonReport includes schemaVersion, timestamp, summary, and mutants', () => {
101
+ const cache = {
102
+ a: makeEntry({ status: 'killed', file: '/tmp/a.ts', mutator: 'flip' }),
103
+ b: makeEntry({ status: 'escaped', file: '/tmp/b.ts', mutator: 'wrap' }),
104
+ };
105
+ const summary = computeSummary(cache);
106
+ const report = buildJsonReport(summary, cache, 1000);
107
+ expect(report.schemaVersion).toBe(1);
108
+ expect(typeof report.timestamp).toBe('string');
109
+ expect(report.durationMs).toBe(1000);
110
+ expect(report.summary).toEqual(summary);
111
+ expect(report.mutants).toHaveLength(2);
112
+ });
113
+ it('buildJsonReport mutant entries have required fields', () => {
114
+ const cache = {
115
+ a: makeEntry({
116
+ status: 'escaped',
117
+ file: '/tmp/a.ts',
118
+ mutator: 'flip',
119
+ originalSnippet: 'a === b',
120
+ mutatedSnippet: 'a !== b',
121
+ coveringTests: ['/tmp/a.spec.ts'],
122
+ }),
123
+ };
124
+ const summary = computeSummary(cache);
125
+ const report = buildJsonReport(summary, cache);
126
+ const mutant = report.mutants[0];
127
+ expect(mutant.file).toBe('/tmp/a.ts');
128
+ expect(mutant.status).toBe('escaped');
129
+ expect(mutant.mutator).toBe('flip');
130
+ expect(mutant.originalSnippet).toBe('a === b');
131
+ expect(mutant.mutatedSnippet).toBe('a !== b');
132
+ expect(mutant.coveringTests).toEqual(['/tmp/a.spec.ts']);
133
+ });
134
+ it('buildJsonReport omits optional fields when absent', () => {
135
+ const cache = { a: makeEntry({ status: 'killed' }) };
136
+ const summary = computeSummary(cache);
137
+ const report = buildJsonReport(summary, cache);
138
+ expect('durationMs' in report).toBe(false);
139
+ expect('originalSnippet' in report.mutants[0]).toBe(false);
140
+ expect('coveringTests' in report.mutants[0]).toBe(false);
141
+ });
100
142
  it('summarise returns summary and prints', () => {
101
143
  const cache = { a: makeEntry({ status: 'killed' }) };
102
144
  const logSpy = vi.spyOn(console, 'log').mockImplementation(() => { });