@mutineerjs/mutineer 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/mutineer.d.ts +1 -1
- package/dist/bin/mutineer.js +3 -1
- package/dist/mutators/__tests__/operator.spec.js +97 -1
- package/dist/mutators/__tests__/registry.spec.js +8 -0
- package/dist/mutators/operator.d.ts +8 -0
- package/dist/mutators/operator.js +58 -1
- package/dist/mutators/registry.js +9 -1
- package/dist/mutators/utils.d.ts +2 -0
- package/dist/mutators/utils.js +58 -1
- package/dist/runner/__tests__/args.spec.js +57 -1
- package/dist/runner/__tests__/cache.spec.js +65 -8
- package/dist/runner/__tests__/cleanup.spec.js +30 -0
- package/dist/runner/__tests__/coverage-resolver.spec.js +2 -0
- package/dist/runner/__tests__/discover.spec.js +128 -0
- package/dist/runner/__tests__/orchestrator.spec.js +167 -2
- package/dist/runner/__tests__/pool-executor.spec.js +60 -1
- package/dist/runner/args.d.ts +13 -0
- package/dist/runner/args.js +27 -0
- package/dist/runner/cache.d.ts +19 -3
- package/dist/runner/cache.js +14 -7
- package/dist/runner/cleanup.d.ts +3 -1
- package/dist/runner/cleanup.js +18 -1
- package/dist/runner/coverage-resolver.js +1 -1
- package/dist/runner/discover.d.ts +1 -1
- package/dist/runner/discover.js +30 -20
- package/dist/runner/orchestrator.d.ts +1 -0
- package/dist/runner/orchestrator.js +22 -8
- package/dist/runner/pool-executor.d.ts +5 -0
- package/dist/runner/pool-executor.js +15 -4
- package/dist/runner/vitest/__tests__/adapter.spec.js +41 -0
- package/dist/runner/vitest/adapter.js +13 -9
- package/dist/types/config.d.ts +2 -0
- package/dist/utils/__tests__/summary.spec.js +43 -1
- package/dist/utils/summary.d.ts +18 -0
- package/dist/utils/summary.js +25 -0
- package/package.json +2 -1
package/dist/bin/mutineer.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
export declare const HELP_TEXT = "Usage: mutineer <command> [options]\n\nCommands:\n init Create a mutineer.config.ts template\n run Run mutation testing\n clean Remove __mutineer__ temp directories\n\nOptions (run):\n --config, -c <path> Config file path\n --concurrency <n> Worker count (default: CPU count - 1)\n --runner <vitest|jest> Test runner (default: vitest)\n --progress <bar|list|quiet> Progress display (default: bar)\n --changed Mutate only git-changed files\n --changed-with-deps Mutate changed files + their local dependencies\n --only-covered-lines Mutate only lines covered by tests\n --per-test-coverage Collect per-test coverage data\n --coverage-file <path> Path to coverage JSON\n --min-kill-percent <n> Minimum kill % threshold (0\u2013100)\n --timeout <ms> Per-mutant test timeout in ms (default: 30000)\n\n --help, -h Show this help\n --version, -V Show version\n";
|
|
2
|
+
export declare const HELP_TEXT = "Usage: mutineer <command> [options]\n\nCommands:\n init Create a mutineer.config.ts template\n run Run mutation testing\n clean Remove __mutineer__ temp directories\n\nOptions (run):\n --config, -c <path> Config file path\n --concurrency <n> Worker count (default: CPU count - 1)\n --runner <vitest|jest> Test runner (default: vitest)\n --progress <bar|list|quiet> Progress display (default: bar)\n --changed Mutate only git-changed files\n --changed-with-deps Mutate changed files + their local dependencies\n --only-covered-lines Mutate only lines covered by tests\n --per-test-coverage Collect per-test coverage data\n --coverage-file <path> Path to coverage JSON\n --min-kill-percent <n> Minimum kill % threshold (0\u2013100)\n --timeout <ms> Per-mutant test timeout in ms (default: 30000)\n --report <text|json> Output format: text (default) or json (writes mutineer-report.json)\n --shard <n>/<total> Run a shard of mutants (e.g. --shard 1/4)\n\n --help, -h Show this help\n --version, -V Show version\n";
|
|
3
3
|
export declare function getVersion(): string;
|
package/dist/bin/mutineer.js
CHANGED
|
@@ -28,6 +28,8 @@ Options (run):
|
|
|
28
28
|
--coverage-file <path> Path to coverage JSON
|
|
29
29
|
--min-kill-percent <n> Minimum kill % threshold (0–100)
|
|
30
30
|
--timeout <ms> Per-mutant test timeout in ms (default: 30000)
|
|
31
|
+
--report <text|json> Output format: text (default) or json (writes mutineer-report.json)
|
|
32
|
+
--shard <n>/<total> Run a shard of mutants (e.g. --shard 1/4)
|
|
31
33
|
|
|
32
34
|
--help, -h Show this help
|
|
33
35
|
--version, -V Show version
|
|
@@ -76,7 +78,7 @@ async function main() {
|
|
|
76
78
|
}
|
|
77
79
|
else if (args[0] === CLEAN_COMMAND) {
|
|
78
80
|
console.log('Cleaning up __mutineer__ directories...');
|
|
79
|
-
await cleanupMutineerDirs(process.cwd());
|
|
81
|
+
await cleanupMutineerDirs(process.cwd(), { includeCacheFiles: true });
|
|
80
82
|
console.log('Done.');
|
|
81
83
|
}
|
|
82
84
|
else {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { describe, it, expect } from 'vitest';
|
|
2
|
-
import { relaxLE, relaxGE, tightenLT, tightenGT, andToOr, orToAnd, nullishToOr, flipEQ, flipNEQ, flipStrictEQ, flipStrictNEQ, addToSub, subToAdd, mulToDiv, divToMul, modToMul, powerToMul, } from '../operator.js';
|
|
2
|
+
import { relaxLE, relaxGE, tightenLT, tightenGT, andToOr, orToAnd, nullishToOr, flipEQ, flipNEQ, flipStrictEQ, flipStrictNEQ, addToSub, subToAdd, mulToDiv, divToMul, modToMul, powerToMul, preInc, preDec, postInc, postDec, addAssignToSub, subAssignToAdd, mulAssignToDiv, divAssignToMul, } from '../operator.js';
|
|
3
3
|
import { buildParseContext } from '../utils.js';
|
|
4
4
|
// ---------------------------------------------------------------------------
|
|
5
5
|
// Shared behaviour (tested once; all mutators use the same factory)
|
|
@@ -183,3 +183,99 @@ describe('powerToMul', () => {
|
|
|
183
183
|
expect(result.code).toBe(`const n = a * b`);
|
|
184
184
|
});
|
|
185
185
|
});
|
|
186
|
+
// ---------------------------------------------------------------------------
|
|
187
|
+
// Increment/decrement mutators
|
|
188
|
+
// ---------------------------------------------------------------------------
|
|
189
|
+
describe('preInc', () => {
|
|
190
|
+
it("replaces '++x' to '--x'", () => {
|
|
191
|
+
const src = `const n = ++i`;
|
|
192
|
+
const [result] = preInc.apply(src);
|
|
193
|
+
expect(result.code).toBe(`const n = --i`);
|
|
194
|
+
});
|
|
195
|
+
it('does not match postfix x++', () => {
|
|
196
|
+
const src = `i++`;
|
|
197
|
+
expect(preInc.apply(src)).toHaveLength(0);
|
|
198
|
+
});
|
|
199
|
+
it('applyWithContext matches apply', () => {
|
|
200
|
+
const src = `const n = ++i`;
|
|
201
|
+
const ctx = buildParseContext(src);
|
|
202
|
+
expect(preInc.applyWithContext(src, ctx)).toEqual(preInc.apply(src));
|
|
203
|
+
});
|
|
204
|
+
});
|
|
205
|
+
describe('preDec', () => {
|
|
206
|
+
it("replaces '--x' to '++x'", () => {
|
|
207
|
+
const src = `const n = --i`;
|
|
208
|
+
const [result] = preDec.apply(src);
|
|
209
|
+
expect(result.code).toBe(`const n = ++i`);
|
|
210
|
+
});
|
|
211
|
+
it('does not match postfix x--', () => {
|
|
212
|
+
const src = `i--`;
|
|
213
|
+
expect(preDec.apply(src)).toHaveLength(0);
|
|
214
|
+
});
|
|
215
|
+
});
|
|
216
|
+
describe('postInc', () => {
|
|
217
|
+
it("replaces 'x++' to 'x--'", () => {
|
|
218
|
+
const src = `i++`;
|
|
219
|
+
const [result] = postInc.apply(src);
|
|
220
|
+
expect(result.code).toBe(`i--`);
|
|
221
|
+
});
|
|
222
|
+
it('does not match prefix ++x', () => {
|
|
223
|
+
const src = `const n = ++i`;
|
|
224
|
+
expect(postInc.apply(src)).toHaveLength(0);
|
|
225
|
+
});
|
|
226
|
+
it('applyWithContext matches apply', () => {
|
|
227
|
+
const src = `i++`;
|
|
228
|
+
const ctx = buildParseContext(src);
|
|
229
|
+
expect(postInc.applyWithContext(src, ctx)).toEqual(postInc.apply(src));
|
|
230
|
+
});
|
|
231
|
+
});
|
|
232
|
+
describe('postDec', () => {
|
|
233
|
+
it("replaces 'x--' to 'x++'", () => {
|
|
234
|
+
const src = `i--`;
|
|
235
|
+
const [result] = postDec.apply(src);
|
|
236
|
+
expect(result.code).toBe(`i++`);
|
|
237
|
+
});
|
|
238
|
+
it('does not match prefix --x', () => {
|
|
239
|
+
const src = `const n = --i`;
|
|
240
|
+
expect(postDec.apply(src)).toHaveLength(0);
|
|
241
|
+
});
|
|
242
|
+
});
|
|
243
|
+
// ---------------------------------------------------------------------------
|
|
244
|
+
// Compound assignment mutators
|
|
245
|
+
// ---------------------------------------------------------------------------
|
|
246
|
+
describe('addAssignToSub', () => {
|
|
247
|
+
it("replaces '+=' with '-='", () => {
|
|
248
|
+
const src = `x += 1`;
|
|
249
|
+
const [result] = addAssignToSub.apply(src);
|
|
250
|
+
expect(result.code).toBe(`x -= 1`);
|
|
251
|
+
});
|
|
252
|
+
it('returns no results when operator absent', () => {
|
|
253
|
+
expect(addAssignToSub.apply(`x -= 1`)).toHaveLength(0);
|
|
254
|
+
});
|
|
255
|
+
it('applyWithContext matches apply', () => {
|
|
256
|
+
const src = `x += 1`;
|
|
257
|
+
const ctx = buildParseContext(src);
|
|
258
|
+
expect(addAssignToSub.applyWithContext(src, ctx)).toEqual(addAssignToSub.apply(src));
|
|
259
|
+
});
|
|
260
|
+
});
|
|
261
|
+
describe('subAssignToAdd', () => {
|
|
262
|
+
it("replaces '-=' with '+='", () => {
|
|
263
|
+
const src = `x -= 1`;
|
|
264
|
+
const [result] = subAssignToAdd.apply(src);
|
|
265
|
+
expect(result.code).toBe(`x += 1`);
|
|
266
|
+
});
|
|
267
|
+
});
|
|
268
|
+
describe('mulAssignToDiv', () => {
|
|
269
|
+
it("replaces '*=' with '/='", () => {
|
|
270
|
+
const src = `x *= 2`;
|
|
271
|
+
const [result] = mulAssignToDiv.apply(src);
|
|
272
|
+
expect(result.code).toBe(`x /= 2`);
|
|
273
|
+
});
|
|
274
|
+
});
|
|
275
|
+
describe('divAssignToMul', () => {
|
|
276
|
+
it("replaces '/=' with '*='", () => {
|
|
277
|
+
const src = `x /= 2`;
|
|
278
|
+
const [result] = divAssignToMul.apply(src);
|
|
279
|
+
expect(result.code).toBe(`x *= 2`);
|
|
280
|
+
});
|
|
281
|
+
});
|
|
@@ -18,6 +18,14 @@ const ALL_NAMES = [
|
|
|
18
18
|
'divToMul',
|
|
19
19
|
'modToMul',
|
|
20
20
|
'powerToMul',
|
|
21
|
+
'preInc',
|
|
22
|
+
'preDec',
|
|
23
|
+
'postInc',
|
|
24
|
+
'postDec',
|
|
25
|
+
'addAssignToSub',
|
|
26
|
+
'subAssignToAdd',
|
|
27
|
+
'mulAssignToDiv',
|
|
28
|
+
'divAssignToMul',
|
|
21
29
|
'returnToNull',
|
|
22
30
|
'returnToUndefined',
|
|
23
31
|
'returnFlipBool',
|
|
@@ -23,3 +23,11 @@ export declare const mulToDiv: ASTMutator;
|
|
|
23
23
|
export declare const divToMul: ASTMutator;
|
|
24
24
|
export declare const modToMul: ASTMutator;
|
|
25
25
|
export declare const powerToMul: ASTMutator;
|
|
26
|
+
export declare const preInc: ASTMutator;
|
|
27
|
+
export declare const preDec: ASTMutator;
|
|
28
|
+
export declare const postInc: ASTMutator;
|
|
29
|
+
export declare const postDec: ASTMutator;
|
|
30
|
+
export declare const addAssignToSub: ASTMutator;
|
|
31
|
+
export declare const subAssignToAdd: ASTMutator;
|
|
32
|
+
export declare const mulAssignToDiv: ASTMutator;
|
|
33
|
+
export declare const divAssignToMul: ASTMutator;
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* using AST traversal and produces a mutated source string with that operator
|
|
6
6
|
* replaced by its counterpart.
|
|
7
7
|
*/
|
|
8
|
-
import { collectOperatorTargets } from './utils.js';
|
|
8
|
+
import { collectOperatorTargets, buildParseContext } from './utils.js';
|
|
9
9
|
/**
|
|
10
10
|
* Factory to build an operator mutator using AST traversal and token analysis.
|
|
11
11
|
*
|
|
@@ -33,6 +33,53 @@ function makeOperatorMutator(name, description, fromOp, toOp) {
|
|
|
33
33
|
},
|
|
34
34
|
};
|
|
35
35
|
}
|
|
36
|
+
/**
|
|
37
|
+
* Factory for UpdateExpression mutators (++/--).
|
|
38
|
+
* mapKey distinguishes prefix vs postfix: 'pre++', 'post++', 'pre--', 'post--'
|
|
39
|
+
*/
|
|
40
|
+
function makeUpdateMutator(name, description, mapKey, toOp) {
|
|
41
|
+
function targetsToOutputs(src, targets) {
|
|
42
|
+
return targets.map((target) => ({
|
|
43
|
+
line: target.line,
|
|
44
|
+
col: target.col1,
|
|
45
|
+
code: src.slice(0, target.start) + toOp + src.slice(target.end),
|
|
46
|
+
}));
|
|
47
|
+
}
|
|
48
|
+
return {
|
|
49
|
+
name,
|
|
50
|
+
description,
|
|
51
|
+
apply(src) {
|
|
52
|
+
const ctx = buildParseContext(src);
|
|
53
|
+
return targetsToOutputs(src, ctx.preCollected.updateTargets.get(mapKey) ?? []);
|
|
54
|
+
},
|
|
55
|
+
applyWithContext(src, ctx) {
|
|
56
|
+
return targetsToOutputs(src, ctx.preCollected.updateTargets.get(mapKey) ?? []);
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Factory for AssignmentExpression mutators (+=, -=, *=, /=).
|
|
62
|
+
*/
|
|
63
|
+
function makeAssignmentMutator(name, description, fromOp, toOp) {
|
|
64
|
+
function targetsToOutputs(src, targets) {
|
|
65
|
+
return targets.map((target) => ({
|
|
66
|
+
line: target.line,
|
|
67
|
+
col: target.col1,
|
|
68
|
+
code: src.slice(0, target.start) + toOp + src.slice(target.end),
|
|
69
|
+
}));
|
|
70
|
+
}
|
|
71
|
+
return {
|
|
72
|
+
name,
|
|
73
|
+
description,
|
|
74
|
+
apply(src) {
|
|
75
|
+
const ctx = buildParseContext(src);
|
|
76
|
+
return targetsToOutputs(src, ctx.preCollected.assignmentTargets.get(fromOp) ?? []);
|
|
77
|
+
},
|
|
78
|
+
applyWithContext(src, ctx) {
|
|
79
|
+
return targetsToOutputs(src, ctx.preCollected.assignmentTargets.get(fromOp) ?? []);
|
|
80
|
+
},
|
|
81
|
+
};
|
|
82
|
+
}
|
|
36
83
|
/* === Boundary mutators === */
|
|
37
84
|
export const relaxLE = makeOperatorMutator('relaxLE', "Change '<=' to '<' (relax boundary)", '<=', '<');
|
|
38
85
|
export const relaxGE = makeOperatorMutator('relaxGE', "Change '>=' to '>' (relax boundary)", '>=', '>');
|
|
@@ -54,3 +101,13 @@ export const mulToDiv = makeOperatorMutator('mulToDiv', "Change '*' to '/'", '*'
|
|
|
54
101
|
export const divToMul = makeOperatorMutator('divToMul', "Change '/' to '*'", '/', '*');
|
|
55
102
|
export const modToMul = makeOperatorMutator('modToMul', "Change '%' to '*'", '%', '*');
|
|
56
103
|
export const powerToMul = makeOperatorMutator('powerToMul', "Change '**' to '*'", '**', '*');
|
|
104
|
+
/* === Increment/decrement mutators === */
|
|
105
|
+
export const preInc = makeUpdateMutator('preInc', "Change '++x' to '--x'", 'pre++', '--');
|
|
106
|
+
export const preDec = makeUpdateMutator('preDec', "Change '--x' to '++x'", 'pre--', '++');
|
|
107
|
+
export const postInc = makeUpdateMutator('postInc', "Change 'x++' to 'x--'", 'post++', '--');
|
|
108
|
+
export const postDec = makeUpdateMutator('postDec', "Change 'x--' to 'x++'", 'post--', '++');
|
|
109
|
+
/* === Compound assignment mutators === */
|
|
110
|
+
export const addAssignToSub = makeAssignmentMutator('addAssignToSub', "Change '+=' to '-='", '+=', '-=');
|
|
111
|
+
export const subAssignToAdd = makeAssignmentMutator('subAssignToAdd', "Change '-=' to '+='", '-=', '+=');
|
|
112
|
+
export const mulAssignToDiv = makeAssignmentMutator('mulAssignToDiv', "Change '*=' to '/='", '*=', '/=');
|
|
113
|
+
export const divAssignToMul = makeAssignmentMutator('divAssignToMul', "Change '/=' to '*='", '/=', '*=');
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
*
|
|
4
4
|
* Aggregates all mutators and exposes `getRegistry` for filtered access.
|
|
5
5
|
*/
|
|
6
|
-
import { relaxLE, relaxGE, tightenLT, tightenGT, andToOr, orToAnd, nullishToOr, flipEQ, flipNEQ, flipStrictEQ, flipStrictNEQ, addToSub, subToAdd, mulToDiv, divToMul, modToMul, powerToMul, } from './operator.js';
|
|
6
|
+
import { relaxLE, relaxGE, tightenLT, tightenGT, andToOr, orToAnd, nullishToOr, flipEQ, flipNEQ, flipStrictEQ, flipStrictNEQ, addToSub, subToAdd, mulToDiv, divToMul, modToMul, powerToMul, preInc, preDec, postInc, postDec, addAssignToSub, subAssignToAdd, mulAssignToDiv, divAssignToMul, } from './operator.js';
|
|
7
7
|
import { returnToNull, returnToUndefined, returnFlipBool, returnZero, returnEmptyStr, returnEmptyArr, } from './return-value.js';
|
|
8
8
|
const ALL = [
|
|
9
9
|
relaxLE,
|
|
@@ -23,6 +23,14 @@ const ALL = [
|
|
|
23
23
|
divToMul,
|
|
24
24
|
modToMul,
|
|
25
25
|
powerToMul,
|
|
26
|
+
preInc,
|
|
27
|
+
preDec,
|
|
28
|
+
postInc,
|
|
29
|
+
postDec,
|
|
30
|
+
addAssignToSub,
|
|
31
|
+
subAssignToAdd,
|
|
32
|
+
mulAssignToDiv,
|
|
33
|
+
divAssignToMul,
|
|
26
34
|
returnToNull,
|
|
27
35
|
returnToUndefined,
|
|
28
36
|
returnFlipBool,
|
package/dist/mutators/utils.d.ts
CHANGED
|
@@ -81,6 +81,8 @@ export interface ReturnStatementInfo {
|
|
|
81
81
|
export interface PreCollected {
|
|
82
82
|
readonly operatorTargets: Map<string, OperatorTarget[]>;
|
|
83
83
|
readonly returnStatements: ReturnStatementInfo[];
|
|
84
|
+
readonly updateTargets: Map<string, OperatorTarget[]>;
|
|
85
|
+
readonly assignmentTargets: Map<string, OperatorTarget[]>;
|
|
84
86
|
}
|
|
85
87
|
/**
|
|
86
88
|
* Pre-parsed context for a source file.
|
package/dist/mutators/utils.js
CHANGED
|
@@ -128,6 +128,8 @@ export function isBinaryOrLogical(node) {
|
|
|
128
128
|
export function collectAllTargets(src, ast, tokens, ignoreLines) {
|
|
129
129
|
const operatorTargets = new Map();
|
|
130
130
|
const returnStatements = [];
|
|
131
|
+
const updateTargets = new Map();
|
|
132
|
+
const assignmentTargets = new Map();
|
|
131
133
|
function handleBinaryOrLogical(n) {
|
|
132
134
|
const nodeStart = n.start ?? 0;
|
|
133
135
|
const nodeEnd = n.end ?? 0;
|
|
@@ -152,6 +154,55 @@ export function collectAllTargets(src, ast, tokens, ignoreLines) {
|
|
|
152
154
|
});
|
|
153
155
|
}
|
|
154
156
|
}
|
|
157
|
+
function handleUpdate(n) {
|
|
158
|
+
const nodeStart = n.start ?? 0;
|
|
159
|
+
const nodeEnd = n.end ?? 0;
|
|
160
|
+
const opValue = n.operator;
|
|
161
|
+
const tok = tokens.find((tk) => tk.start >= nodeStart && tk.end <= nodeEnd && tk.value === opValue);
|
|
162
|
+
if (tok) {
|
|
163
|
+
const line = tok.loc.start.line;
|
|
164
|
+
if (ignoreLines.has(line))
|
|
165
|
+
return;
|
|
166
|
+
const visualCol = getVisualColumn(src, tok.start);
|
|
167
|
+
const mapKey = (n.prefix ? 'pre' : 'post') + opValue;
|
|
168
|
+
let arr = updateTargets.get(mapKey);
|
|
169
|
+
if (!arr) {
|
|
170
|
+
arr = [];
|
|
171
|
+
updateTargets.set(mapKey, arr);
|
|
172
|
+
}
|
|
173
|
+
arr.push({
|
|
174
|
+
start: tok.start,
|
|
175
|
+
end: tok.end,
|
|
176
|
+
line,
|
|
177
|
+
col1: visualCol,
|
|
178
|
+
op: opValue,
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
function handleAssignment(n) {
|
|
183
|
+
const nodeStart = n.start ?? 0;
|
|
184
|
+
const nodeEnd = n.end ?? 0;
|
|
185
|
+
const opValue = n.operator;
|
|
186
|
+
const tok = tokens.find((tk) => tk.start >= nodeStart && tk.end <= nodeEnd && tk.value === opValue);
|
|
187
|
+
if (tok) {
|
|
188
|
+
const line = tok.loc.start.line;
|
|
189
|
+
if (ignoreLines.has(line))
|
|
190
|
+
return;
|
|
191
|
+
const visualCol = getVisualColumn(src, tok.start);
|
|
192
|
+
let arr = assignmentTargets.get(opValue);
|
|
193
|
+
if (!arr) {
|
|
194
|
+
arr = [];
|
|
195
|
+
assignmentTargets.set(opValue, arr);
|
|
196
|
+
}
|
|
197
|
+
arr.push({
|
|
198
|
+
start: tok.start,
|
|
199
|
+
end: tok.end,
|
|
200
|
+
line,
|
|
201
|
+
col1: visualCol,
|
|
202
|
+
op: opValue,
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
}
|
|
155
206
|
traverse(ast, {
|
|
156
207
|
BinaryExpression(p) {
|
|
157
208
|
handleBinaryOrLogical(p.node);
|
|
@@ -159,6 +210,12 @@ export function collectAllTargets(src, ast, tokens, ignoreLines) {
|
|
|
159
210
|
LogicalExpression(p) {
|
|
160
211
|
handleBinaryOrLogical(p.node);
|
|
161
212
|
},
|
|
213
|
+
UpdateExpression(p) {
|
|
214
|
+
handleUpdate(p.node);
|
|
215
|
+
},
|
|
216
|
+
AssignmentExpression(p) {
|
|
217
|
+
handleAssignment(p.node);
|
|
218
|
+
},
|
|
162
219
|
ReturnStatement(p) {
|
|
163
220
|
const node = p.node;
|
|
164
221
|
if (!node.argument)
|
|
@@ -182,7 +239,7 @@ export function collectAllTargets(src, ast, tokens, ignoreLines) {
|
|
|
182
239
|
});
|
|
183
240
|
},
|
|
184
241
|
});
|
|
185
|
-
return { operatorTargets, returnStatements };
|
|
242
|
+
return { operatorTargets, returnStatements, updateTargets, assignmentTargets };
|
|
186
243
|
}
|
|
187
244
|
/**
|
|
188
245
|
* Parse a source file once and build a reusable ParseContext.
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { describe, it, expect } from 'vitest';
|
|
2
|
-
import { parseFlagNumber, readNumberFlag, readStringFlag, validatePercent, validatePositiveMs, parseConcurrency, parseProgressMode, parseCliOptions, extractConfigPath, } from '../args.js';
|
|
2
|
+
import { parseFlagNumber, readNumberFlag, readStringFlag, validatePercent, validatePositiveMs, parseConcurrency, parseProgressMode, parseCliOptions, extractConfigPath, parseShardOption, } from '../args.js';
|
|
3
3
|
describe('parseFlagNumber', () => {
|
|
4
4
|
it('parses valid integers', () => {
|
|
5
5
|
expect(parseFlagNumber('42', '--flag')).toBe(42);
|
|
@@ -227,6 +227,24 @@ describe('parseCliOptions', () => {
|
|
|
227
227
|
it('rejects --timeout abc', () => {
|
|
228
228
|
expect(() => parseCliOptions(['--timeout', 'abc'], emptyCfg)).toThrow('Invalid value for --timeout: abc');
|
|
229
229
|
});
|
|
230
|
+
it('defaults reportFormat to text', () => {
|
|
231
|
+
const opts = parseCliOptions([], emptyCfg);
|
|
232
|
+
expect(opts.reportFormat).toBe('text');
|
|
233
|
+
});
|
|
234
|
+
it('parses --report json', () => {
|
|
235
|
+
const opts = parseCliOptions(['--report', 'json'], emptyCfg);
|
|
236
|
+
expect(opts.reportFormat).toBe('json');
|
|
237
|
+
});
|
|
238
|
+
it('reads report from config', () => {
|
|
239
|
+
const opts = parseCliOptions([], { report: 'json' });
|
|
240
|
+
expect(opts.reportFormat).toBe('json');
|
|
241
|
+
});
|
|
242
|
+
it('CLI --report takes precedence over config', () => {
|
|
243
|
+
const opts = parseCliOptions(['--report', 'json'], {
|
|
244
|
+
report: 'text',
|
|
245
|
+
});
|
|
246
|
+
expect(opts.reportFormat).toBe('json');
|
|
247
|
+
});
|
|
230
248
|
});
|
|
231
249
|
describe('validatePositiveMs', () => {
|
|
232
250
|
it('returns undefined for undefined', () => {
|
|
@@ -247,6 +265,44 @@ describe('validatePositiveMs', () => {
|
|
|
247
265
|
expect(() => validatePositiveMs(NaN, '--timeout')).toThrow('Invalid --timeout: expected a positive number');
|
|
248
266
|
});
|
|
249
267
|
});
|
|
268
|
+
describe('parseShardOption', () => {
|
|
269
|
+
it('returns undefined when --shard is absent', () => {
|
|
270
|
+
expect(parseShardOption([])).toBeUndefined();
|
|
271
|
+
expect(parseShardOption(['--runner', 'vitest'])).toBeUndefined();
|
|
272
|
+
});
|
|
273
|
+
it('parses valid shard with space syntax', () => {
|
|
274
|
+
expect(parseShardOption(['--shard', '1/2'])).toEqual({ index: 1, total: 2 });
|
|
275
|
+
expect(parseShardOption(['--shard', '2/2'])).toEqual({ index: 2, total: 2 });
|
|
276
|
+
expect(parseShardOption(['--shard', '3/4'])).toEqual({ index: 3, total: 4 });
|
|
277
|
+
});
|
|
278
|
+
it('parses valid shard with = syntax', () => {
|
|
279
|
+
expect(parseShardOption(['--shard=1/2'])).toEqual({ index: 1, total: 2 });
|
|
280
|
+
});
|
|
281
|
+
it('throws on 5/4 (index > total)', () => {
|
|
282
|
+
expect(() => parseShardOption(['--shard', '5/4'])).toThrow('Invalid --shard');
|
|
283
|
+
});
|
|
284
|
+
it('throws on 0/4 (index < 1)', () => {
|
|
285
|
+
expect(() => parseShardOption(['--shard', '0/4'])).toThrow('Invalid --shard');
|
|
286
|
+
});
|
|
287
|
+
it('throws on 1/0 (total < 1)', () => {
|
|
288
|
+
expect(() => parseShardOption(['--shard', '1/0'])).toThrow('Invalid --shard');
|
|
289
|
+
});
|
|
290
|
+
it('throws on bad format', () => {
|
|
291
|
+
expect(() => parseShardOption(['--shard', 'bad'])).toThrow('Invalid --shard format');
|
|
292
|
+
expect(() => parseShardOption(['--shard', '1-2'])).toThrow('Invalid --shard format');
|
|
293
|
+
});
|
|
294
|
+
});
|
|
295
|
+
describe('parseCliOptions shard', () => {
|
|
296
|
+
const emptyCfg = {};
|
|
297
|
+
it('parses --shard into opts.shard', () => {
|
|
298
|
+
const opts = parseCliOptions(['--shard', '2/4'], emptyCfg);
|
|
299
|
+
expect(opts.shard).toEqual({ index: 2, total: 4 });
|
|
300
|
+
});
|
|
301
|
+
it('opts.shard is undefined when flag absent', () => {
|
|
302
|
+
const opts = parseCliOptions([], emptyCfg);
|
|
303
|
+
expect(opts.shard).toBeUndefined();
|
|
304
|
+
});
|
|
305
|
+
});
|
|
250
306
|
describe('extractConfigPath', () => {
|
|
251
307
|
it('extracts --config with separate value', () => {
|
|
252
308
|
expect(extractConfigPath(['--config', 'my.config.ts'])).toBe('my.config.ts');
|
|
@@ -2,7 +2,7 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
|
2
2
|
import fs from 'node:fs/promises';
|
|
3
3
|
import path from 'node:path';
|
|
4
4
|
import os from 'node:os';
|
|
5
|
-
import { clearCacheOnStart, saveCacheAtomic, decodeCacheKey, keyForTests, hash, readMutantCache, } from '../cache.js';
|
|
5
|
+
import { clearCacheOnStart, saveCacheAtomic, decodeCacheKey, keyForTests, hash, readMutantCache, getCacheFilename, } from '../cache.js';
|
|
6
6
|
let tmpDir;
|
|
7
7
|
beforeEach(async () => {
|
|
8
8
|
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mutineer-cache-'));
|
|
@@ -10,9 +10,19 @@ beforeEach(async () => {
|
|
|
10
10
|
afterEach(async () => {
|
|
11
11
|
await fs.rm(tmpDir, { recursive: true, force: true });
|
|
12
12
|
});
|
|
13
|
+
describe('getCacheFilename', () => {
|
|
14
|
+
it('returns default filename when no shard', () => {
|
|
15
|
+
expect(getCacheFilename()).toBe('.mutineer-cache.json');
|
|
16
|
+
expect(getCacheFilename(undefined)).toBe('.mutineer-cache.json');
|
|
17
|
+
});
|
|
18
|
+
it('returns shard-namespaced filename when shard provided', () => {
|
|
19
|
+
expect(getCacheFilename({ index: 1, total: 2 })).toBe('.mutineer-cache-shard-1-of-2.json');
|
|
20
|
+
expect(getCacheFilename({ index: 3, total: 4 })).toBe('.mutineer-cache-shard-3-of-4.json');
|
|
21
|
+
});
|
|
22
|
+
});
|
|
13
23
|
describe('clearCacheOnStart', () => {
|
|
14
24
|
it('removes the cache file if it exists', async () => {
|
|
15
|
-
const cacheFile = path.join(tmpDir, '.
|
|
25
|
+
const cacheFile = path.join(tmpDir, '.mutineer-cache.json');
|
|
16
26
|
await fs.writeFile(cacheFile, '{}');
|
|
17
27
|
await clearCacheOnStart(tmpDir);
|
|
18
28
|
await expect(fs.access(cacheFile)).rejects.toThrow();
|
|
@@ -20,6 +30,19 @@ describe('clearCacheOnStart', () => {
|
|
|
20
30
|
it('does not throw if cache file does not exist', async () => {
|
|
21
31
|
await expect(clearCacheOnStart(tmpDir)).resolves.toBeUndefined();
|
|
22
32
|
});
|
|
33
|
+
it('removes shard-specific cache file', async () => {
|
|
34
|
+
const shardFile = path.join(tmpDir, '.mutineer-cache-shard-1-of-2.json');
|
|
35
|
+
await fs.writeFile(shardFile, '{}');
|
|
36
|
+
await clearCacheOnStart(tmpDir, { index: 1, total: 2 });
|
|
37
|
+
await expect(fs.access(shardFile)).rejects.toThrow();
|
|
38
|
+
});
|
|
39
|
+
it('does not remove default cache when shard is specified', async () => {
|
|
40
|
+
const defaultFile = path.join(tmpDir, '.mutineer-cache.json');
|
|
41
|
+
await fs.writeFile(defaultFile, '{}');
|
|
42
|
+
await clearCacheOnStart(tmpDir, { index: 1, total: 2 });
|
|
43
|
+
// default file should still exist
|
|
44
|
+
await expect(fs.access(defaultFile)).resolves.toBeUndefined();
|
|
45
|
+
});
|
|
23
46
|
});
|
|
24
47
|
describe('saveCacheAtomic', () => {
|
|
25
48
|
it('writes cache data to the file', async () => {
|
|
@@ -33,7 +56,7 @@ describe('saveCacheAtomic', () => {
|
|
|
33
56
|
},
|
|
34
57
|
};
|
|
35
58
|
await saveCacheAtomic(tmpDir, cache);
|
|
36
|
-
const content = await fs.readFile(path.join(tmpDir, '.
|
|
59
|
+
const content = await fs.readFile(path.join(tmpDir, '.mutineer-cache.json'), 'utf8');
|
|
37
60
|
expect(JSON.parse(content)).toEqual(cache);
|
|
38
61
|
});
|
|
39
62
|
it('overwrites existing cache', async () => {
|
|
@@ -48,9 +71,25 @@ describe('saveCacheAtomic', () => {
|
|
|
48
71
|
},
|
|
49
72
|
};
|
|
50
73
|
await saveCacheAtomic(tmpDir, newCache);
|
|
51
|
-
const content = await fs.readFile(path.join(tmpDir, '.
|
|
74
|
+
const content = await fs.readFile(path.join(tmpDir, '.mutineer-cache.json'), 'utf8');
|
|
52
75
|
expect(JSON.parse(content)).toEqual(newCache);
|
|
53
76
|
});
|
|
77
|
+
it('writes to shard-named file when shard is provided', async () => {
|
|
78
|
+
const cache = {
|
|
79
|
+
k: {
|
|
80
|
+
status: 'killed',
|
|
81
|
+
file: 'x.ts',
|
|
82
|
+
line: 1,
|
|
83
|
+
col: 0,
|
|
84
|
+
mutator: 'm',
|
|
85
|
+
},
|
|
86
|
+
};
|
|
87
|
+
await saveCacheAtomic(tmpDir, cache, { index: 2, total: 3 });
|
|
88
|
+
const content = await fs.readFile(path.join(tmpDir, '.mutineer-cache-shard-2-of-3.json'), 'utf8');
|
|
89
|
+
expect(JSON.parse(content)).toEqual(cache);
|
|
90
|
+
// default file should NOT exist
|
|
91
|
+
await expect(fs.access(path.join(tmpDir, '.mutineer-cache.json'))).rejects.toThrow();
|
|
92
|
+
});
|
|
54
93
|
});
|
|
55
94
|
describe('decodeCacheKey', () => {
|
|
56
95
|
it('decodes a full cache key', () => {
|
|
@@ -128,6 +167,24 @@ describe('readMutantCache', () => {
|
|
|
128
167
|
const result = await readMutantCache(tmpDir);
|
|
129
168
|
expect(result).toEqual({});
|
|
130
169
|
});
|
|
170
|
+
it('reads from shard-named file when shard is provided', async () => {
|
|
171
|
+
const cache = {
|
|
172
|
+
'k:v:f.ts:1,0:m': {
|
|
173
|
+
status: 'killed',
|
|
174
|
+
file: 'f.ts',
|
|
175
|
+
line: 1,
|
|
176
|
+
col: 0,
|
|
177
|
+
mutator: 'm',
|
|
178
|
+
},
|
|
179
|
+
};
|
|
180
|
+
await fs.writeFile(path.join(tmpDir, '.mutineer-cache-shard-1-of-2.json'), JSON.stringify(cache));
|
|
181
|
+
const result = await readMutantCache(tmpDir, { index: 1, total: 2 });
|
|
182
|
+
expect(result['k:v:f.ts:1,0:m'].status).toBe('killed');
|
|
183
|
+
});
|
|
184
|
+
it('returns empty object when shard file does not exist', async () => {
|
|
185
|
+
const result = await readMutantCache(tmpDir, { index: 2, total: 4 });
|
|
186
|
+
expect(result).toEqual({});
|
|
187
|
+
});
|
|
131
188
|
it('reads and normalizes object-format cache entries', async () => {
|
|
132
189
|
const cache = {
|
|
133
190
|
'testsig:codesig:file.ts:1,0:flip': {
|
|
@@ -138,7 +195,7 @@ describe('readMutantCache', () => {
|
|
|
138
195
|
mutator: 'flip',
|
|
139
196
|
},
|
|
140
197
|
};
|
|
141
|
-
await fs.writeFile(path.join(tmpDir, '.
|
|
198
|
+
await fs.writeFile(path.join(tmpDir, '.mutineer-cache.json'), JSON.stringify(cache));
|
|
142
199
|
const result = await readMutantCache(tmpDir);
|
|
143
200
|
expect(result['testsig:codesig:file.ts:1,0:flip']).toEqual({
|
|
144
201
|
status: 'killed',
|
|
@@ -152,14 +209,14 @@ describe('readMutantCache', () => {
|
|
|
152
209
|
const cache = {
|
|
153
210
|
'testsig:codesig:file.ts:1,0:flip': 'killed',
|
|
154
211
|
};
|
|
155
|
-
await fs.writeFile(path.join(tmpDir, '.
|
|
212
|
+
await fs.writeFile(path.join(tmpDir, '.mutineer-cache.json'), JSON.stringify(cache));
|
|
156
213
|
const result = await readMutantCache(tmpDir);
|
|
157
214
|
const entry = result['testsig:codesig:file.ts:1,0:flip'];
|
|
158
215
|
expect(entry.status).toBe('killed');
|
|
159
216
|
expect(entry.mutator).toBe('flip');
|
|
160
217
|
});
|
|
161
218
|
it('returns empty object for invalid JSON', async () => {
|
|
162
|
-
await fs.writeFile(path.join(tmpDir, '.
|
|
219
|
+
await fs.writeFile(path.join(tmpDir, '.mutineer-cache.json'), 'not json');
|
|
163
220
|
const result = await readMutantCache(tmpDir);
|
|
164
221
|
expect(result).toEqual({});
|
|
165
222
|
});
|
|
@@ -169,7 +226,7 @@ describe('readMutantCache', () => {
|
|
|
169
226
|
status: 'escaped',
|
|
170
227
|
},
|
|
171
228
|
};
|
|
172
|
-
await fs.writeFile(path.join(tmpDir, '.
|
|
229
|
+
await fs.writeFile(path.join(tmpDir, '.mutineer-cache.json'), JSON.stringify(cache));
|
|
173
230
|
const result = await readMutantCache(tmpDir);
|
|
174
231
|
const entry = result['testsig:codesig:file.ts:5,3:mut'];
|
|
175
232
|
expect(entry.status).toBe('escaped');
|
|
@@ -38,4 +38,34 @@ describe('cleanupMutineerDirs', () => {
|
|
|
38
38
|
const stat = await fs.stat(srcDir);
|
|
39
39
|
expect(stat.isDirectory()).toBe(true);
|
|
40
40
|
});
|
|
41
|
+
it('does not remove cache files by default', async () => {
|
|
42
|
+
const cacheFile = path.join(tmpDir, '.mutineer-cache.json');
|
|
43
|
+
await fs.writeFile(cacheFile, '{}');
|
|
44
|
+
await cleanupMutineerDirs(tmpDir);
|
|
45
|
+
await expect(fs.access(cacheFile)).resolves.toBeUndefined();
|
|
46
|
+
});
|
|
47
|
+
it('removes .mutineer-cache*.json files when includeCacheFiles is true', async () => {
|
|
48
|
+
const cacheFile = path.join(tmpDir, '.mutineer-cache.json');
|
|
49
|
+
const shardFile = path.join(tmpDir, '.mutineer-cache-shard-1-of-2.json');
|
|
50
|
+
await fs.writeFile(cacheFile, '{}');
|
|
51
|
+
await fs.writeFile(shardFile, '{}');
|
|
52
|
+
await cleanupMutineerDirs(tmpDir, { includeCacheFiles: true });
|
|
53
|
+
await expect(fs.access(cacheFile)).rejects.toThrow();
|
|
54
|
+
await expect(fs.access(shardFile)).rejects.toThrow();
|
|
55
|
+
});
|
|
56
|
+
it('removes legacy .mutate-cache*.json files for migration when includeCacheFiles is true', async () => {
|
|
57
|
+
const legacyCache = path.join(tmpDir, '.mutate-cache.json');
|
|
58
|
+
const legacyShard = path.join(tmpDir, '.mutate-cache-shard-2-of-4.json');
|
|
59
|
+
await fs.writeFile(legacyCache, '{}');
|
|
60
|
+
await fs.writeFile(legacyShard, '{}');
|
|
61
|
+
await cleanupMutineerDirs(tmpDir, { includeCacheFiles: true });
|
|
62
|
+
await expect(fs.access(legacyCache)).rejects.toThrow();
|
|
63
|
+
await expect(fs.access(legacyShard)).rejects.toThrow();
|
|
64
|
+
});
|
|
65
|
+
it('removes .mutineer-cache*.json.tmp temp files when includeCacheFiles is true', async () => {
|
|
66
|
+
const tmpFile = path.join(tmpDir, '.mutineer-cache.json.tmp');
|
|
67
|
+
await fs.writeFile(tmpFile, '{}');
|
|
68
|
+
await cleanupMutineerDirs(tmpDir, { includeCacheFiles: true });
|
|
69
|
+
await expect(fs.access(tmpFile)).rejects.toThrow();
|
|
70
|
+
});
|
|
41
71
|
});
|