vitest 3.2.0-beta.2 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +29 -0
- package/dist/browser.d.ts +3 -3
- package/dist/browser.js +2 -2
- package/dist/chunks/{base.DwtwORaC.js → base.Cg0miDlQ.js} +11 -14
- package/dist/chunks/{benchmark.BoF7jW0Q.js → benchmark.CYdenmiT.js} +4 -6
- package/dist/chunks/{cac.I9MLYfT-.js → cac.6rXCxFY1.js} +76 -143
- package/dist/chunks/{cli-api.d6IK1pnk.js → cli-api.Cej3MBjA.js} +1460 -1344
- package/dist/chunks/{config.d.UqE-KR0o.d.ts → config.d.D2ROskhv.d.ts} +2 -0
- package/dist/chunks/{console.K1NMVOSc.js → console.CtFJOzRO.js} +25 -45
- package/dist/chunks/{constants.BZZyIeIE.js → constants.DnKduX2e.js} +1 -0
- package/dist/chunks/{coverage.0iPg4Wrz.js → coverage.DVF1vEu8.js} +4 -12
- package/dist/chunks/{coverage.OGU09Jbh.js → coverage.EIiagJJP.js} +578 -993
- package/dist/chunks/{creator.DGAdZ4Hj.js → creator.GK6I-cL4.js} +39 -83
- package/dist/chunks/date.Bq6ZW5rf.js +73 -0
- package/dist/chunks/{defaults.DSxsTG0h.js → defaults.B7q_naMc.js} +2 -1
- package/dist/chunks/{env.Dq0hM4Xv.js → env.D4Lgay0q.js} +1 -1
- package/dist/chunks/{environment.d.D8YDy2v5.d.ts → environment.d.cL3nLXbE.d.ts} +1 -0
- package/dist/chunks/{execute.JlGHLJZT.js → execute.B7h3T_Hc.js} +126 -217
- package/dist/chunks/{git.DXfdBEfR.js → git.BVQ8w_Sw.js} +1 -3
- package/dist/chunks/{global.d.BPa1eL3O.d.ts → global.d.MAmajcmJ.d.ts} +5 -1
- package/dist/chunks/{globals.CpxW8ccg.js → globals.DEHgCU4V.js} +7 -6
- package/dist/chunks/{index.CV36oG_L.js → index.BZ0g1JD2.js} +430 -625
- package/dist/chunks/{index.DswW_LEs.js → index.BbB8_kAK.js} +25 -24
- package/dist/chunks/{index.CmC5OK9L.js → index.CIyJn3t1.js} +38 -82
- package/dist/chunks/{index.CfXMNXHg.js → index.CdQS2e2Q.js} +4 -2
- package/dist/chunks/{index.DFXFpH3w.js → index.CmSc2RE5.js} +85 -105
- package/dist/chunks/index.D3XRDfWc.js +213 -0
- package/dist/chunks/{inspector.DbDkSkFn.js → inspector.C914Efll.js} +4 -1
- package/dist/chunks/{node.3xsWotC9.js → node.fjCdwEIl.js} +1 -1
- package/dist/chunks/{reporters.d.CLC9rhKy.d.ts → reporters.d.C1ogPriE.d.ts} +47 -9
- package/dist/chunks/{rpc.D9_013TY.js → rpc.Iovn4oWe.js} +10 -19
- package/dist/chunks/{runBaseTests.Dn2vyej_.js → runBaseTests.Dd85QTll.js} +27 -31
- package/dist/chunks/{setup-common.CYo3Y0dD.js → setup-common.Dd054P77.js} +16 -42
- package/dist/chunks/{typechecker.DnTrplSJ.js → typechecker.DRKU1-1g.js} +163 -186
- package/dist/chunks/{utils.BfxieIyZ.js → utils.CAioKnHs.js} +9 -14
- package/dist/chunks/{utils.CgTj3MsC.js → utils.XdZDrNZV.js} +6 -13
- package/dist/chunks/{vi.BFR5YIgu.js → vi.bdSIJ99Y.js} +137 -263
- package/dist/chunks/{vite.d.CBZ3M_ru.d.ts → vite.d.DqE4-hhK.d.ts} +3 -1
- package/dist/chunks/{vm.C1HHjtNS.js → vm.BThCzidc.js} +164 -212
- package/dist/chunks/{worker.d.D5Xdi-Zr.d.ts → worker.d.DvqK5Vmu.d.ts} +1 -1
- package/dist/chunks/{worker.d.CoCI7hzP.d.ts → worker.d.tQu2eJQy.d.ts} +5 -3
- package/dist/cli.js +5 -5
- package/dist/config.cjs +3 -1
- package/dist/config.d.ts +7 -6
- package/dist/config.js +3 -3
- package/dist/coverage.d.ts +4 -4
- package/dist/coverage.js +7 -7
- package/dist/environments.d.ts +6 -2
- package/dist/environments.js +1 -1
- package/dist/execute.d.ts +9 -3
- package/dist/execute.js +1 -1
- package/dist/index.d.ts +28 -15
- package/dist/index.js +5 -5
- package/dist/node.d.ts +18 -10
- package/dist/node.js +17 -17
- package/dist/reporters.d.ts +4 -4
- package/dist/reporters.js +4 -4
- package/dist/runners.d.ts +6 -3
- package/dist/runners.js +59 -80
- package/dist/snapshot.js +2 -2
- package/dist/suite.js +2 -2
- package/dist/worker.js +39 -41
- package/dist/workers/forks.js +6 -4
- package/dist/workers/runVmTests.js +20 -21
- package/dist/workers/threads.js +4 -4
- package/dist/workers/vmForks.js +6 -6
- package/dist/workers/vmThreads.js +6 -6
- package/dist/workers.d.ts +4 -4
- package/dist/workers.js +10 -10
- package/package.json +21 -19
- package/dist/chunks/date.CDOsz-HY.js +0 -53
- package/dist/chunks/index.CK1YOQaa.js +0 -143
|
@@ -1,34 +1,34 @@
|
|
|
1
1
|
import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
|
|
2
|
-
import {
|
|
2
|
+
import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
|
|
3
3
|
import pm from 'picomatch';
|
|
4
4
|
import c from 'tinyrainbow';
|
|
5
|
-
import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.
|
|
5
|
+
import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.B7q_naMc.js';
|
|
6
6
|
import crypto from 'node:crypto';
|
|
7
|
-
import {
|
|
7
|
+
import { createDefer, shuffle, toArray } from '@vitest/utils';
|
|
8
8
|
import { builtinModules, createRequire } from 'node:module';
|
|
9
|
-
import path, { win32, dirname
|
|
9
|
+
import path, { win32, dirname, join, resolve } from 'node:path';
|
|
10
10
|
import process$1 from 'node:process';
|
|
11
|
-
import
|
|
11
|
+
import fs$1, { writeFile, rename, stat, unlink } from 'node:fs/promises';
|
|
12
12
|
import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL as URL$1 } from 'node:url';
|
|
13
13
|
import assert from 'node:assert';
|
|
14
14
|
import v8 from 'node:v8';
|
|
15
15
|
import { format, inspect } from 'node:util';
|
|
16
|
-
import {
|
|
17
|
-
import { a as
|
|
16
|
+
import { version, mergeConfig } from 'vite';
|
|
17
|
+
import { e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.DnKduX2e.js';
|
|
18
|
+
import { a as isWindows } from './env.D4Lgay0q.js';
|
|
18
19
|
import * as nodeos from 'node:os';
|
|
19
20
|
import nodeos__default from 'node:os';
|
|
20
21
|
import { isatty } from 'node:tty';
|
|
21
|
-
import { version } from 'vite';
|
|
22
22
|
import EventEmitter from 'node:events';
|
|
23
23
|
import { c as createBirpc } from './index.CJ0plNrh.js';
|
|
24
24
|
import Tinypool$1, { Tinypool } from 'tinypool';
|
|
25
|
-
import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.
|
|
25
|
+
import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.DRKU1-1g.js';
|
|
26
26
|
import { MessageChannel } from 'node:worker_threads';
|
|
27
27
|
import { hasFailed } from '@vitest/runner/utils';
|
|
28
28
|
import { rootDir } from '../path.js';
|
|
29
|
-
import { slash
|
|
29
|
+
import { slash } from 'vite-node/utils';
|
|
30
30
|
import { isCI, provider } from 'std-env';
|
|
31
|
-
import { r as resolveCoverageProviderModule } from './coverage.
|
|
31
|
+
import { r as resolveCoverageProviderModule } from './coverage.DVF1vEu8.js';
|
|
32
32
|
|
|
33
33
|
function groupBy(collection, iteratee) {
|
|
34
34
|
return collection.reduce((acc, item) => {
|
|
@@ -39,150 +39,24 @@ function groupBy(collection, iteratee) {
|
|
|
39
39
|
}, {});
|
|
40
40
|
}
|
|
41
41
|
function stdout() {
|
|
42
|
+
// @ts-expect-error Node.js maps process.stdout to console._stdout
|
|
43
|
+
// eslint-disable-next-line no-console
|
|
42
44
|
return console._stdout || process.stdout;
|
|
43
45
|
}
|
|
44
46
|
function escapeRegExp(s) {
|
|
47
|
+
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
|
45
48
|
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
46
49
|
}
|
|
47
50
|
function wildcardPatternToRegExp(pattern) {
|
|
48
51
|
const negated = pattern.startsWith("!");
|
|
49
|
-
if (negated)
|
|
50
|
-
pattern = pattern.slice(1);
|
|
51
|
-
}
|
|
52
|
+
if (negated) pattern = pattern.slice(1);
|
|
52
53
|
let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
|
|
53
|
-
if (negated) {
|
|
54
|
-
regexp = `(?!${regexp})`;
|
|
55
|
-
}
|
|
54
|
+
if (negated) regexp = `(?!${regexp})`;
|
|
56
55
|
return new RegExp(`^${regexp}`, "i");
|
|
57
56
|
}
|
|
58
57
|
|
|
59
58
|
const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding));
|
|
60
59
|
|
|
61
|
-
class FilesStatsCache {
|
|
62
|
-
cache = new Map();
|
|
63
|
-
getStats(key) {
|
|
64
|
-
return this.cache.get(key);
|
|
65
|
-
}
|
|
66
|
-
async populateStats(root, specs) {
|
|
67
|
-
const promises = specs.map((spec) => {
|
|
68
|
-
const key = `${spec[0].name}:${relative(root, spec.moduleId)}`;
|
|
69
|
-
return this.updateStats(spec.moduleId, key);
|
|
70
|
-
});
|
|
71
|
-
await Promise.all(promises);
|
|
72
|
-
}
|
|
73
|
-
async updateStats(fsPath, key) {
|
|
74
|
-
if (!fs.existsSync(fsPath)) {
|
|
75
|
-
return;
|
|
76
|
-
}
|
|
77
|
-
const stats = await fs.promises.stat(fsPath);
|
|
78
|
-
this.cache.set(key, { size: stats.size });
|
|
79
|
-
}
|
|
80
|
-
removeStats(fsPath) {
|
|
81
|
-
this.cache.forEach((_, key) => {
|
|
82
|
-
if (key.endsWith(fsPath)) {
|
|
83
|
-
this.cache.delete(key);
|
|
84
|
-
}
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
class ResultsCache {
|
|
90
|
-
cache = new Map();
|
|
91
|
-
workspacesKeyMap = new Map();
|
|
92
|
-
cachePath = null;
|
|
93
|
-
version;
|
|
94
|
-
root = "/";
|
|
95
|
-
constructor(version) {
|
|
96
|
-
this.version = version;
|
|
97
|
-
}
|
|
98
|
-
getCachePath() {
|
|
99
|
-
return this.cachePath;
|
|
100
|
-
}
|
|
101
|
-
setConfig(root, config) {
|
|
102
|
-
this.root = root;
|
|
103
|
-
if (config) {
|
|
104
|
-
this.cachePath = resolve(config.dir, "results.json");
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
getResults(key) {
|
|
108
|
-
return this.cache.get(key);
|
|
109
|
-
}
|
|
110
|
-
async readFromCache() {
|
|
111
|
-
if (!this.cachePath) {
|
|
112
|
-
return;
|
|
113
|
-
}
|
|
114
|
-
if (!fs.existsSync(this.cachePath)) {
|
|
115
|
-
return;
|
|
116
|
-
}
|
|
117
|
-
const resultsCache = await fs.promises.readFile(this.cachePath, "utf8");
|
|
118
|
-
const { results, version } = JSON.parse(resultsCache || "[]");
|
|
119
|
-
if (Number(version.split(".")[1]) >= 30) {
|
|
120
|
-
this.cache = new Map(results);
|
|
121
|
-
this.version = version;
|
|
122
|
-
results.forEach(([spec]) => {
|
|
123
|
-
const [projectName, relativePath] = spec.split(":");
|
|
124
|
-
const keyMap = this.workspacesKeyMap.get(relativePath) || [];
|
|
125
|
-
keyMap.push(projectName);
|
|
126
|
-
this.workspacesKeyMap.set(relativePath, keyMap);
|
|
127
|
-
});
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
updateResults(files) {
|
|
131
|
-
files.forEach((file) => {
|
|
132
|
-
const result = file.result;
|
|
133
|
-
if (!result) {
|
|
134
|
-
return;
|
|
135
|
-
}
|
|
136
|
-
const duration = result.duration || 0;
|
|
137
|
-
const relativePath = relative(this.root, file.filepath);
|
|
138
|
-
this.cache.set(`${file.projectName || ""}:${relativePath}`, {
|
|
139
|
-
duration: duration >= 0 ? duration : 0,
|
|
140
|
-
failed: result.state === "fail"
|
|
141
|
-
});
|
|
142
|
-
});
|
|
143
|
-
}
|
|
144
|
-
removeFromCache(filepath) {
|
|
145
|
-
this.cache.forEach((_, key) => {
|
|
146
|
-
if (key.endsWith(filepath)) {
|
|
147
|
-
this.cache.delete(key);
|
|
148
|
-
}
|
|
149
|
-
});
|
|
150
|
-
}
|
|
151
|
-
async writeToCache() {
|
|
152
|
-
if (!this.cachePath) {
|
|
153
|
-
return;
|
|
154
|
-
}
|
|
155
|
-
const results = Array.from(this.cache.entries());
|
|
156
|
-
const cacheDirname = dirname(this.cachePath);
|
|
157
|
-
if (!fs.existsSync(cacheDirname)) {
|
|
158
|
-
await fs.promises.mkdir(cacheDirname, { recursive: true });
|
|
159
|
-
}
|
|
160
|
-
const cache = JSON.stringify({
|
|
161
|
-
version: this.version,
|
|
162
|
-
results
|
|
163
|
-
});
|
|
164
|
-
await fs.promises.writeFile(this.cachePath, cache);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
class VitestCache {
|
|
169
|
-
results;
|
|
170
|
-
stats = new FilesStatsCache();
|
|
171
|
-
constructor(version) {
|
|
172
|
-
this.results = new ResultsCache(version);
|
|
173
|
-
}
|
|
174
|
-
getFileTestResults(key) {
|
|
175
|
-
return this.results.getResults(key);
|
|
176
|
-
}
|
|
177
|
-
getFileStats(key) {
|
|
178
|
-
return this.stats.getStats(key);
|
|
179
|
-
}
|
|
180
|
-
static resolveCacheDir(root, dir, projectName) {
|
|
181
|
-
const baseDir = slash(dir || "node_modules/.vite/vitest");
|
|
182
|
-
return projectName ? resolve(root, baseDir, hash("md5", projectName, "hex")) : resolve(root, baseDir);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
|
|
186
60
|
const JOIN_LEADING_SLASH_RE = /^\.?\//;
|
|
187
61
|
function withTrailingSlash(input = "", respectQueryAndFragment) {
|
|
188
62
|
{
|
|
@@ -2363,7 +2237,7 @@ async function findUp$1(name, {
|
|
|
2363
2237
|
while (directory) {
|
|
2364
2238
|
const filePath = isAbsoluteName ? name : path.join(directory, name);
|
|
2365
2239
|
try {
|
|
2366
|
-
const stats = await
|
|
2240
|
+
const stats = await fs$1.stat(filePath); // eslint-disable-line no-await-in-loop
|
|
2367
2241
|
if ((type === 'file' && stats.isFile()) || (type === 'directory' && stats.isDirectory())) {
|
|
2368
2242
|
return filePath;
|
|
2369
2243
|
}
|
|
@@ -2453,7 +2327,7 @@ const getPackageInfo = quansync(function* (name, options = {}) {
|
|
|
2453
2327
|
return {
|
|
2454
2328
|
name,
|
|
2455
2329
|
version: packageJson.version,
|
|
2456
|
-
rootPath: dirname
|
|
2330
|
+
rootPath: dirname(packageJsonPath),
|
|
2457
2331
|
packageJsonPath,
|
|
2458
2332
|
packageJson
|
|
2459
2333
|
};
|
|
@@ -2477,7 +2351,7 @@ function searchPackageJSON(dir) {
|
|
|
2477
2351
|
while (true) {
|
|
2478
2352
|
if (!dir)
|
|
2479
2353
|
return;
|
|
2480
|
-
const newDir = dirname
|
|
2354
|
+
const newDir = dirname(dir);
|
|
2481
2355
|
if (newDir === dir)
|
|
2482
2356
|
return;
|
|
2483
2357
|
dir = newDir;
|
|
@@ -2517,32 +2391,29 @@ const envsOrder = [
|
|
|
2517
2391
|
"edge-runtime"
|
|
2518
2392
|
];
|
|
2519
2393
|
function getTransformMode(patterns, filename) {
|
|
2520
|
-
if (patterns.web && pm.isMatch(filename, patterns.web))
|
|
2521
|
-
|
|
2522
|
-
|
|
2523
|
-
if (patterns.ssr && pm.isMatch(filename, patterns.ssr)) {
|
|
2524
|
-
return "ssr";
|
|
2525
|
-
}
|
|
2526
|
-
return undefined;
|
|
2394
|
+
if (patterns.web && pm.isMatch(filename, patterns.web)) return "web";
|
|
2395
|
+
if (patterns.ssr && pm.isMatch(filename, patterns.ssr)) return "ssr";
|
|
2396
|
+
return void 0;
|
|
2527
2397
|
}
|
|
2528
2398
|
async function groupFilesByEnv(files) {
|
|
2529
2399
|
const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
|
|
2530
2400
|
const code = await promises$1.readFile(filepath, "utf-8");
|
|
2401
|
+
// 1. Check for control comments in the file
|
|
2531
2402
|
let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
|
|
2403
|
+
// 2. Check for globals
|
|
2532
2404
|
if (!env) {
|
|
2533
|
-
for (const [glob, target] of project.config.environmentMatchGlobs || []) {
|
|
2534
|
-
|
|
2535
|
-
|
|
2536
|
-
break;
|
|
2537
|
-
}
|
|
2405
|
+
for (const [glob, target] of project.config.environmentMatchGlobs || []) if (pm.isMatch(filepath, glob, { cwd: project.config.root })) {
|
|
2406
|
+
env = target;
|
|
2407
|
+
break;
|
|
2538
2408
|
}
|
|
2539
2409
|
}
|
|
2410
|
+
// 3. Fallback to global env
|
|
2540
2411
|
env ||= project.config.environment || "node";
|
|
2541
2412
|
const transformMode = getTransformMode(project.config.testTransformMode, filepath);
|
|
2542
2413
|
let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
|
|
2543
|
-
if (envOptionsJson?.endsWith("*/"))
|
|
2544
|
-
|
|
2545
|
-
|
|
2414
|
+
if (envOptionsJson?.endsWith("*/"))
|
|
2415
|
+
// Trim closing Docblock characters the above regex might have captured
|
|
2416
|
+
envOptionsJson = envOptionsJson.slice(0, -2);
|
|
2546
2417
|
const envOptions = JSON.parse(envOptionsJson || "null");
|
|
2547
2418
|
const envKey = env === "happy-dom" ? "happyDOM" : env;
|
|
2548
2419
|
const environment = {
|
|
@@ -2562,8 +2433,8 @@ async function groupFilesByEnv(files) {
|
|
|
2562
2433
|
return groupBy(filesWithEnv, ({ environment }) => environment.name);
|
|
2563
2434
|
}
|
|
2564
2435
|
|
|
2565
|
-
const created = new Set();
|
|
2566
|
-
const promises = new Map();
|
|
2436
|
+
const created = /* @__PURE__ */ new Set();
|
|
2437
|
+
const promises = /* @__PURE__ */ new Map();
|
|
2567
2438
|
function createMethodsRPC(project, options = {}) {
|
|
2568
2439
|
const ctx = project.vitest;
|
|
2569
2440
|
const cacheFs = options.cacheFs ?? false;
|
|
@@ -2577,9 +2448,7 @@ function createMethodsRPC(project, options = {}) {
|
|
|
2577
2448
|
async getSourceMap(id, force) {
|
|
2578
2449
|
if (force) {
|
|
2579
2450
|
const mod = project.vite.moduleGraph.getModuleById(id);
|
|
2580
|
-
if (mod)
|
|
2581
|
-
project.vite.moduleGraph.invalidateModule(mod);
|
|
2582
|
-
}
|
|
2451
|
+
if (mod) project.vite.moduleGraph.invalidateModule(mod);
|
|
2583
2452
|
}
|
|
2584
2453
|
const r = await project.vitenode.transformRequest(id);
|
|
2585
2454
|
return r?.map;
|
|
@@ -2587,15 +2456,9 @@ function createMethodsRPC(project, options = {}) {
|
|
|
2587
2456
|
async fetch(id, transformMode) {
|
|
2588
2457
|
const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
|
|
2589
2458
|
const code = result.code;
|
|
2590
|
-
if (!cacheFs || result.externalize)
|
|
2591
|
-
|
|
2592
|
-
}
|
|
2593
|
-
if ("id" in result && typeof result.id === "string") {
|
|
2594
|
-
return { id: result.id };
|
|
2595
|
-
}
|
|
2596
|
-
if (code == null) {
|
|
2597
|
-
throw new Error(`Failed to fetch module ${id}`);
|
|
2598
|
-
}
|
|
2459
|
+
if (!cacheFs || result.externalize) return result;
|
|
2460
|
+
if ("id" in result && typeof result.id === "string") return { id: result.id };
|
|
2461
|
+
if (code == null) throw new Error(`Failed to fetch module ${id}`);
|
|
2599
2462
|
const dir = join$1(project.tmpDir, transformMode);
|
|
2600
2463
|
const name = hash("sha1", id, "hex");
|
|
2601
2464
|
const tmp = join$1(dir, name);
|
|
@@ -2619,35 +2482,26 @@ function createMethodsRPC(project, options = {}) {
|
|
|
2619
2482
|
return project.vitenode.transformModule(id, environment).catch(handleRollupError);
|
|
2620
2483
|
},
|
|
2621
2484
|
async onQueued(file) {
|
|
2622
|
-
if (options.collect)
|
|
2623
|
-
|
|
2624
|
-
} else {
|
|
2625
|
-
await ctx._testRun.enqueued(project, file);
|
|
2626
|
-
}
|
|
2485
|
+
if (options.collect) ctx.state.collectFiles(project, [file]);
|
|
2486
|
+
else await ctx._testRun.enqueued(project, file);
|
|
2627
2487
|
},
|
|
2628
2488
|
async onCollected(files) {
|
|
2629
|
-
if (options.collect)
|
|
2630
|
-
|
|
2631
|
-
} else {
|
|
2632
|
-
await ctx._testRun.collected(project, files);
|
|
2633
|
-
}
|
|
2489
|
+
if (options.collect) ctx.state.collectFiles(project, files);
|
|
2490
|
+
else await ctx._testRun.collected(project, files);
|
|
2634
2491
|
},
|
|
2635
2492
|
onAfterSuiteRun(meta) {
|
|
2636
2493
|
ctx.coverageProvider?.onAfterSuiteRun(meta);
|
|
2637
2494
|
},
|
|
2495
|
+
async onTaskAnnotate(testId, annotation) {
|
|
2496
|
+
return ctx._testRun.annotate(testId, annotation);
|
|
2497
|
+
},
|
|
2638
2498
|
async onTaskUpdate(packs, events) {
|
|
2639
|
-
if (options.collect)
|
|
2640
|
-
|
|
2641
|
-
} else {
|
|
2642
|
-
await ctx._testRun.updated(packs, events);
|
|
2643
|
-
}
|
|
2499
|
+
if (options.collect) ctx.state.updateTasks(packs);
|
|
2500
|
+
else await ctx._testRun.updated(packs, events);
|
|
2644
2501
|
},
|
|
2645
2502
|
async onUserConsoleLog(log) {
|
|
2646
|
-
if (options.collect)
|
|
2647
|
-
|
|
2648
|
-
} else {
|
|
2649
|
-
await ctx._testRun.log(log);
|
|
2650
|
-
}
|
|
2503
|
+
if (options.collect) ctx.state.updateUserLog(log);
|
|
2504
|
+
else await ctx._testRun.log(log);
|
|
2651
2505
|
},
|
|
2652
2506
|
onUnhandledError(err, type) {
|
|
2653
2507
|
ctx.state.catchError(err, type);
|
|
@@ -2660,21 +2514,22 @@ function createMethodsRPC(project, options = {}) {
|
|
|
2660
2514
|
}
|
|
2661
2515
|
};
|
|
2662
2516
|
}
|
|
2517
|
+
// serialize rollup error on server to preserve details as a test error
|
|
2663
2518
|
function handleRollupError(e) {
|
|
2664
|
-
if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e))
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
|
|
2672
|
-
|
|
2673
|
-
|
|
2674
|
-
|
|
2675
|
-
|
|
2676
|
-
}
|
|
2677
|
-
}
|
|
2519
|
+
if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e))
|
|
2520
|
+
// eslint-disable-next-line no-throw-literal
|
|
2521
|
+
throw {
|
|
2522
|
+
name: e.name,
|
|
2523
|
+
message: e.message,
|
|
2524
|
+
stack: e.stack,
|
|
2525
|
+
cause: e.cause,
|
|
2526
|
+
__vitest_rollup_error__: {
|
|
2527
|
+
plugin: e.plugin,
|
|
2528
|
+
id: e.id,
|
|
2529
|
+
loc: e.loc,
|
|
2530
|
+
frame: e.frame
|
|
2531
|
+
}
|
|
2532
|
+
};
|
|
2678
2533
|
throw e;
|
|
2679
2534
|
}
|
|
2680
2535
|
/**
|
|
@@ -2692,30 +2547,28 @@ function handleRollupError(e) {
|
|
|
2692
2547
|
* Added in https://github.com/vitest-dev/vitest/pull/7531
|
|
2693
2548
|
*/
|
|
2694
2549
|
async function atomicWriteFile(realFilePath, data) {
|
|
2695
|
-
const dir = dirname(realFilePath);
|
|
2550
|
+
const dir = dirname$1(realFilePath);
|
|
2696
2551
|
const tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
|
2697
2552
|
try {
|
|
2698
2553
|
await writeFile(tmpFilePath, data, "utf-8");
|
|
2699
2554
|
await rename(tmpFilePath, realFilePath);
|
|
2700
2555
|
} finally {
|
|
2701
2556
|
try {
|
|
2702
|
-
if (await stat(tmpFilePath))
|
|
2703
|
-
await unlink(tmpFilePath);
|
|
2704
|
-
}
|
|
2557
|
+
if (await stat(tmpFilePath)) await unlink(tmpFilePath);
|
|
2705
2558
|
} catch {}
|
|
2706
2559
|
}
|
|
2707
2560
|
}
|
|
2708
2561
|
|
|
2709
2562
|
function createChildProcessChannel$1(project, collect = false) {
|
|
2710
2563
|
const emitter = new EventEmitter();
|
|
2711
|
-
const cleanup = () => emitter.removeAllListeners();
|
|
2712
2564
|
const events = {
|
|
2713
2565
|
message: "message",
|
|
2714
2566
|
response: "response"
|
|
2715
2567
|
};
|
|
2716
2568
|
const channel = {
|
|
2717
2569
|
onMessage: (callback) => emitter.on(events.message, callback),
|
|
2718
|
-
postMessage: (message) => emitter.emit(events.response, message)
|
|
2570
|
+
postMessage: (message) => emitter.emit(events.response, message),
|
|
2571
|
+
onClose: () => emitter.removeAllListeners()
|
|
2719
2572
|
};
|
|
2720
2573
|
const rpc = createBirpc(createMethodsRPC(project, {
|
|
2721
2574
|
cacheFs: true,
|
|
@@ -2734,34 +2587,30 @@ function createChildProcessChannel$1(project, collect = false) {
|
|
|
2734
2587
|
throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
|
|
2735
2588
|
}
|
|
2736
2589
|
});
|
|
2737
|
-
project.
|
|
2738
|
-
return
|
|
2739
|
-
channel,
|
|
2740
|
-
cleanup
|
|
2741
|
-
};
|
|
2590
|
+
project.vitest.onCancel((reason) => rpc.onCancel(reason));
|
|
2591
|
+
return channel;
|
|
2742
2592
|
}
|
|
2743
|
-
function createForksPool(
|
|
2593
|
+
function createForksPool(vitest, { execArgv, env }) {
|
|
2744
2594
|
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
2745
|
-
const threadsCount =
|
|
2746
|
-
const poolOptions =
|
|
2747
|
-
const maxThreads = poolOptions.maxForks ??
|
|
2748
|
-
const minThreads = poolOptions.minForks ??
|
|
2749
|
-
const worker = resolve
|
|
2595
|
+
const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
2596
|
+
const poolOptions = vitest.config.poolOptions?.forks ?? {};
|
|
2597
|
+
const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
|
|
2598
|
+
const minThreads = poolOptions.minForks ?? vitest.config.minWorkers ?? threadsCount;
|
|
2599
|
+
const worker = resolve(vitest.distPath, "workers/forks.js");
|
|
2750
2600
|
const options = {
|
|
2751
2601
|
runtime: "child_process",
|
|
2752
|
-
filename: resolve
|
|
2602
|
+
filename: resolve(vitest.distPath, "worker.js"),
|
|
2603
|
+
teardown: "teardown",
|
|
2753
2604
|
maxThreads,
|
|
2754
2605
|
minThreads,
|
|
2755
2606
|
env,
|
|
2756
2607
|
execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
|
|
2757
|
-
terminateTimeout:
|
|
2608
|
+
terminateTimeout: vitest.config.teardownTimeout,
|
|
2758
2609
|
concurrentTasksPerWorker: 1
|
|
2759
2610
|
};
|
|
2760
2611
|
const isolated = poolOptions.isolate ?? true;
|
|
2761
|
-
if (isolated)
|
|
2762
|
-
|
|
2763
|
-
}
|
|
2764
|
-
if (poolOptions.singleFork || !ctx.config.fileParallelism) {
|
|
2612
|
+
if (isolated) options.isolateWorkers = true;
|
|
2613
|
+
if (poolOptions.singleFork || !vitest.config.fileParallelism) {
|
|
2765
2614
|
options.maxThreads = 1;
|
|
2766
2615
|
options.minThreads = 1;
|
|
2767
2616
|
}
|
|
@@ -2770,8 +2619,8 @@ function createForksPool(ctx, { execArgv, env }) {
|
|
|
2770
2619
|
let id = 0;
|
|
2771
2620
|
async function runFiles(project, config, files, environment, invalidates = []) {
|
|
2772
2621
|
const paths = files.map((f) => f.filepath);
|
|
2773
|
-
|
|
2774
|
-
const
|
|
2622
|
+
vitest.state.clearFiles(project, paths);
|
|
2623
|
+
const channel = createChildProcessChannel$1(project, name === "collect");
|
|
2775
2624
|
const workerId = ++id;
|
|
2776
2625
|
const data = {
|
|
2777
2626
|
pool: "forks",
|
|
@@ -2790,24 +2639,18 @@ function createForksPool(ctx, { execArgv, env }) {
|
|
|
2790
2639
|
channel
|
|
2791
2640
|
});
|
|
2792
2641
|
} catch (error) {
|
|
2793
|
-
|
|
2794
|
-
|
|
2795
|
-
|
|
2796
|
-
|
|
2797
|
-
} else {
|
|
2798
|
-
throw error;
|
|
2799
|
-
}
|
|
2800
|
-
} finally {
|
|
2801
|
-
cleanup();
|
|
2642
|
+
// Worker got stuck and won't terminate - this may cause process to hang
|
|
2643
|
+
if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
|
|
2644
|
+
else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
|
|
2645
|
+
else throw error;
|
|
2802
2646
|
}
|
|
2803
2647
|
}
|
|
2804
2648
|
return async (specs, invalidates) => {
|
|
2805
|
-
|
|
2806
|
-
|
|
2649
|
+
// Cancel pending tasks from pool when possible
|
|
2650
|
+
vitest.onCancel(() => pool.cancelPendingTasks());
|
|
2651
|
+
const configs = /* @__PURE__ */ new WeakMap();
|
|
2807
2652
|
const getConfig = (project) => {
|
|
2808
|
-
if (configs.has(project))
|
|
2809
|
-
return configs.get(project);
|
|
2810
|
-
}
|
|
2653
|
+
if (configs.has(project)) return configs.get(project);
|
|
2811
2654
|
const _config = project.getSerializableConfig();
|
|
2812
2655
|
const config = wrapSerializableConfig(_config);
|
|
2813
2656
|
configs.set(project, config);
|
|
@@ -2819,31 +2662,32 @@ function createForksPool(ctx, { execArgv, env }) {
|
|
|
2819
2662
|
const filesByEnv = await groupFilesByEnv(multipleForks);
|
|
2820
2663
|
const files = Object.values(filesByEnv).flat();
|
|
2821
2664
|
const results = [];
|
|
2822
|
-
if (isolated) {
|
|
2823
|
-
|
|
2824
|
-
|
|
2665
|
+
if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
|
|
2666
|
+
else {
|
|
2667
|
+
// When isolation is disabled, we still need to isolate environments and workspace projects from each other.
|
|
2668
|
+
// Tasks are still running parallel but environments are isolated between tasks.
|
|
2825
2669
|
const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
|
|
2826
2670
|
for (const group of Object.values(grouped)) {
|
|
2671
|
+
// Push all files to pool's queue
|
|
2827
2672
|
results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
|
|
2673
|
+
// Once all tasks are running or finished, recycle worker for isolation.
|
|
2674
|
+
// On-going workers will run in the previous environment.
|
|
2828
2675
|
await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
|
|
2829
2676
|
await pool.recycleWorkers();
|
|
2830
2677
|
}
|
|
2831
2678
|
}
|
|
2832
2679
|
const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
|
|
2833
|
-
if (errors.length > 0)
|
|
2834
|
-
throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
2835
|
-
}
|
|
2680
|
+
if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
2836
2681
|
}
|
|
2837
2682
|
if (singleFork.length) {
|
|
2838
2683
|
const filesByEnv = await groupFilesByEnv(singleFork);
|
|
2839
2684
|
const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
|
|
2840
2685
|
for (const env of envs) {
|
|
2841
2686
|
const files = filesByEnv[env];
|
|
2842
|
-
if (!files?.length)
|
|
2843
|
-
continue;
|
|
2844
|
-
}
|
|
2687
|
+
if (!files?.length) continue;
|
|
2845
2688
|
const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
|
|
2846
2689
|
for (const files of Object.values(filesByOptions)) {
|
|
2690
|
+
// Always run environments isolated between each other
|
|
2847
2691
|
await pool.recycleWorkers();
|
|
2848
2692
|
const filenames = files.map((f) => f.file);
|
|
2849
2693
|
await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
|
|
@@ -2876,34 +2720,33 @@ function createWorkerChannel$1(project, collect) {
|
|
|
2876
2720
|
throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
|
|
2877
2721
|
}
|
|
2878
2722
|
});
|
|
2879
|
-
project.
|
|
2723
|
+
project.vitest.onCancel((reason) => rpc.onCancel(reason));
|
|
2880
2724
|
return {
|
|
2881
2725
|
workerPort,
|
|
2882
2726
|
port
|
|
2883
2727
|
};
|
|
2884
2728
|
}
|
|
2885
|
-
function createThreadsPool(
|
|
2729
|
+
function createThreadsPool(vitest, { execArgv, env }) {
|
|
2886
2730
|
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
2887
|
-
const threadsCount =
|
|
2888
|
-
const poolOptions =
|
|
2889
|
-
const maxThreads = poolOptions.maxThreads ??
|
|
2890
|
-
const minThreads = poolOptions.minThreads ??
|
|
2891
|
-
const worker = resolve
|
|
2731
|
+
const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
2732
|
+
const poolOptions = vitest.config.poolOptions?.threads ?? {};
|
|
2733
|
+
const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
|
|
2734
|
+
const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? threadsCount;
|
|
2735
|
+
const worker = resolve(vitest.distPath, "workers/threads.js");
|
|
2892
2736
|
const options = {
|
|
2893
|
-
filename: resolve
|
|
2737
|
+
filename: resolve(vitest.distPath, "worker.js"),
|
|
2738
|
+
teardown: "teardown",
|
|
2894
2739
|
useAtomics: poolOptions.useAtomics ?? false,
|
|
2895
2740
|
maxThreads,
|
|
2896
2741
|
minThreads,
|
|
2897
2742
|
env,
|
|
2898
2743
|
execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
|
|
2899
|
-
terminateTimeout:
|
|
2744
|
+
terminateTimeout: vitest.config.teardownTimeout,
|
|
2900
2745
|
concurrentTasksPerWorker: 1
|
|
2901
2746
|
};
|
|
2902
2747
|
const isolated = poolOptions.isolate ?? true;
|
|
2903
|
-
if (isolated)
|
|
2904
|
-
|
|
2905
|
-
}
|
|
2906
|
-
if (poolOptions.singleThread || !ctx.config.fileParallelism) {
|
|
2748
|
+
if (isolated) options.isolateWorkers = true;
|
|
2749
|
+
if (poolOptions.singleThread || !vitest.config.fileParallelism) {
|
|
2907
2750
|
options.maxThreads = 1;
|
|
2908
2751
|
options.minThreads = 1;
|
|
2909
2752
|
}
|
|
@@ -2912,8 +2755,12 @@ function createThreadsPool(ctx, { execArgv, env }) {
|
|
|
2912
2755
|
let id = 0;
|
|
2913
2756
|
async function runFiles(project, config, files, environment, invalidates = []) {
|
|
2914
2757
|
const paths = files.map((f) => f.filepath);
|
|
2915
|
-
|
|
2758
|
+
vitest.state.clearFiles(project, paths);
|
|
2916
2759
|
const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
|
|
2760
|
+
const onClose = () => {
|
|
2761
|
+
port.close();
|
|
2762
|
+
workerPort.close();
|
|
2763
|
+
};
|
|
2917
2764
|
const workerId = ++id;
|
|
2918
2765
|
const data = {
|
|
2919
2766
|
pool: "threads",
|
|
@@ -2930,29 +2777,23 @@ function createThreadsPool(ctx, { execArgv, env }) {
|
|
|
2930
2777
|
try {
|
|
2931
2778
|
await pool.run(data, {
|
|
2932
2779
|
transferList: [workerPort],
|
|
2933
|
-
name
|
|
2780
|
+
name,
|
|
2781
|
+
channel: { onClose }
|
|
2934
2782
|
});
|
|
2935
2783
|
} catch (error) {
|
|
2936
|
-
|
|
2937
|
-
|
|
2938
|
-
|
|
2939
|
-
|
|
2940
|
-
} else {
|
|
2941
|
-
throw error;
|
|
2942
|
-
}
|
|
2943
|
-
} finally {
|
|
2944
|
-
port.close();
|
|
2945
|
-
workerPort.close();
|
|
2784
|
+
// Worker got stuck and won't terminate - this may cause process to hang
|
|
2785
|
+
if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
|
|
2786
|
+
else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
|
|
2787
|
+
else throw error;
|
|
2946
2788
|
}
|
|
2947
2789
|
}
|
|
2948
2790
|
return async (specs, invalidates) => {
|
|
2949
|
-
|
|
2950
|
-
|
|
2791
|
+
// Cancel pending tasks from pool when possible
|
|
2792
|
+
vitest.onCancel(() => pool.cancelPendingTasks());
|
|
2793
|
+
const configs = /* @__PURE__ */ new WeakMap();
|
|
2951
2794
|
const getConfig = (project) => {
|
|
2952
|
-
if (configs.has(project))
|
|
2953
|
-
|
|
2954
|
-
}
|
|
2955
|
-
const config = project.getSerializableConfig();
|
|
2795
|
+
if (configs.has(project)) return configs.get(project);
|
|
2796
|
+
const config = project.serializedConfig;
|
|
2956
2797
|
configs.set(project, config);
|
|
2957
2798
|
return config;
|
|
2958
2799
|
};
|
|
@@ -2962,31 +2803,32 @@ function createThreadsPool(ctx, { execArgv, env }) {
|
|
|
2962
2803
|
const filesByEnv = await groupFilesByEnv(multipleThreads);
|
|
2963
2804
|
const files = Object.values(filesByEnv).flat();
|
|
2964
2805
|
const results = [];
|
|
2965
|
-
if (isolated) {
|
|
2966
|
-
|
|
2967
|
-
|
|
2806
|
+
if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
|
|
2807
|
+
else {
|
|
2808
|
+
// When isolation is disabled, we still need to isolate environments and workspace projects from each other.
|
|
2809
|
+
// Tasks are still running parallel but environments are isolated between tasks.
|
|
2968
2810
|
const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
|
|
2969
2811
|
for (const group of Object.values(grouped)) {
|
|
2812
|
+
// Push all files to pool's queue
|
|
2970
2813
|
results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
|
|
2814
|
+
// Once all tasks are running or finished, recycle worker for isolation.
|
|
2815
|
+
// On-going workers will run in the previous environment.
|
|
2971
2816
|
await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
|
|
2972
2817
|
await pool.recycleWorkers();
|
|
2973
2818
|
}
|
|
2974
2819
|
}
|
|
2975
2820
|
const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
|
|
2976
|
-
if (errors.length > 0)
|
|
2977
|
-
throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
2978
|
-
}
|
|
2821
|
+
if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
2979
2822
|
}
|
|
2980
2823
|
if (singleThreads.length) {
|
|
2981
2824
|
const filesByEnv = await groupFilesByEnv(singleThreads);
|
|
2982
2825
|
const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
|
|
2983
2826
|
for (const env of envs) {
|
|
2984
2827
|
const files = filesByEnv[env];
|
|
2985
|
-
if (!files?.length)
|
|
2986
|
-
continue;
|
|
2987
|
-
}
|
|
2828
|
+
if (!files?.length) continue;
|
|
2988
2829
|
const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
|
|
2989
2830
|
for (const files of Object.values(filesByOptions)) {
|
|
2831
|
+
// Always run environments isolated between each other
|
|
2990
2832
|
await pool.recycleWorkers();
|
|
2991
2833
|
const filenames = files.map((f) => f.file);
|
|
2992
2834
|
await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
|
|
@@ -3004,15 +2846,13 @@ function createThreadsPool(ctx, { execArgv, env }) {
|
|
|
3004
2846
|
}
|
|
3005
2847
|
|
|
3006
2848
|
function createTypecheckPool(vitest) {
|
|
3007
|
-
const promisesMap = new WeakMap();
|
|
3008
|
-
const rerunTriggered = new WeakSet();
|
|
2849
|
+
const promisesMap = /* @__PURE__ */ new WeakMap();
|
|
2850
|
+
const rerunTriggered = /* @__PURE__ */ new WeakSet();
|
|
3009
2851
|
async function onParseEnd(project, { files, sourceErrors }) {
|
|
3010
2852
|
const checker = project.typechecker;
|
|
3011
2853
|
const { packs, events } = checker.getTestPacksAndEvents();
|
|
3012
2854
|
await vitest._testRun.updated(packs, events);
|
|
3013
|
-
if (!project.config.typecheck.ignoreSourceErrors)
|
|
3014
|
-
sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
|
|
3015
|
-
}
|
|
2855
|
+
if (!project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
|
|
3016
2856
|
const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
|
|
3017
2857
|
if (processError) {
|
|
3018
2858
|
const error = new Error(checker.getOutput());
|
|
@@ -3021,6 +2861,7 @@ function createTypecheckPool(vitest) {
|
|
|
3021
2861
|
}
|
|
3022
2862
|
promisesMap.get(project)?.resolve();
|
|
3023
2863
|
rerunTriggered.delete(project);
|
|
2864
|
+
// triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
|
|
3024
2865
|
if (vitest.config.watch && !vitest.runningPromise) {
|
|
3025
2866
|
await vitest.report("onFinished", files, []);
|
|
3026
2867
|
await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
|
|
@@ -3028,16 +2869,12 @@ function createTypecheckPool(vitest) {
|
|
|
3028
2869
|
}
|
|
3029
2870
|
async function createWorkspaceTypechecker(project, files) {
|
|
3030
2871
|
const checker = project.typechecker ?? new Typechecker(project);
|
|
3031
|
-
if (project.typechecker)
|
|
3032
|
-
return checker;
|
|
3033
|
-
}
|
|
2872
|
+
if (project.typechecker) return checker;
|
|
3034
2873
|
project.typechecker = checker;
|
|
3035
2874
|
checker.setFiles(files);
|
|
3036
2875
|
checker.onParseStart(async () => {
|
|
3037
2876
|
const files = checker.getTestFiles();
|
|
3038
|
-
for (const file of files)
|
|
3039
|
-
await vitest._testRun.enqueued(project, file);
|
|
3040
|
-
}
|
|
2877
|
+
for (const file of files) await vitest._testRun.enqueued(project, file);
|
|
3041
2878
|
await vitest._testRun.collected(project, files);
|
|
3042
2879
|
});
|
|
3043
2880
|
checker.onParseEnd((result) => onParseEnd(project, result));
|
|
@@ -3049,9 +2886,7 @@ function createTypecheckPool(vitest) {
|
|
|
3049
2886
|
}
|
|
3050
2887
|
await checker.collectTests();
|
|
3051
2888
|
const testFiles = checker.getTestFiles();
|
|
3052
|
-
for (const file of testFiles)
|
|
3053
|
-
await vitest._testRun.enqueued(project, file);
|
|
3054
|
-
}
|
|
2889
|
+
for (const file of testFiles) await vitest._testRun.enqueued(project, file);
|
|
3055
2890
|
await vitest._testRun.collected(project, testFiles);
|
|
3056
2891
|
const { packs, events } = checker.getTestPacksAndEvents();
|
|
3057
2892
|
await vitest._testRun.updated(packs, events);
|
|
@@ -3059,9 +2894,7 @@ function createTypecheckPool(vitest) {
|
|
|
3059
2894
|
return checker;
|
|
3060
2895
|
}
|
|
3061
2896
|
async function startTypechecker(project, files) {
|
|
3062
|
-
if (project.typechecker)
|
|
3063
|
-
return project.typechecker;
|
|
3064
|
-
}
|
|
2897
|
+
if (project.typechecker) return;
|
|
3065
2898
|
const checker = await createWorkspaceTypechecker(project, files);
|
|
3066
2899
|
await checker.collectTests();
|
|
3067
2900
|
await checker.start();
|
|
@@ -3085,6 +2918,7 @@ function createTypecheckPool(vitest) {
|
|
|
3085
2918
|
const project = specsByProject[name][0].project;
|
|
3086
2919
|
const files = specsByProject[name].map((spec) => spec.moduleId);
|
|
3087
2920
|
const promise = createDefer();
|
|
2921
|
+
// check that watcher actually triggered rerun
|
|
3088
2922
|
const _p = new Promise((resolve) => {
|
|
3089
2923
|
const _i = setInterval(() => {
|
|
3090
2924
|
if (!project.typechecker || rerunTriggered.has(project)) {
|
|
@@ -3100,16 +2934,14 @@ function createTypecheckPool(vitest) {
|
|
|
3100
2934
|
const triggered = await _p;
|
|
3101
2935
|
if (project.typechecker && !triggered) {
|
|
3102
2936
|
const testFiles = project.typechecker.getTestFiles();
|
|
3103
|
-
for (const file of testFiles)
|
|
3104
|
-
await vitest._testRun.enqueued(project, file);
|
|
3105
|
-
}
|
|
2937
|
+
for (const file of testFiles) await vitest._testRun.enqueued(project, file);
|
|
3106
2938
|
await vitest._testRun.collected(project, testFiles);
|
|
3107
2939
|
await onParseEnd(project, project.typechecker.getResult());
|
|
3108
2940
|
continue;
|
|
3109
2941
|
}
|
|
3110
2942
|
promises.push(promise);
|
|
3111
2943
|
promisesMap.set(project, promise);
|
|
3112
|
-
startTypechecker(project, files);
|
|
2944
|
+
promises.push(startTypechecker(project, files));
|
|
3113
2945
|
}
|
|
3114
2946
|
await Promise.all(promises);
|
|
3115
2947
|
}
|
|
@@ -3131,16 +2963,12 @@ function getDefaultThreadsCount(config) {
|
|
|
3131
2963
|
function getWorkerMemoryLimit(config, pool) {
|
|
3132
2964
|
if (pool === "vmForks") {
|
|
3133
2965
|
const opts = config.poolOptions?.vmForks ?? {};
|
|
3134
|
-
if (opts.memoryLimit)
|
|
3135
|
-
return opts.memoryLimit;
|
|
3136
|
-
}
|
|
2966
|
+
if (opts.memoryLimit) return opts.memoryLimit;
|
|
3137
2967
|
const workers = opts.maxForks ?? getDefaultThreadsCount(config);
|
|
3138
2968
|
return 1 / workers;
|
|
3139
2969
|
} else {
|
|
3140
2970
|
const opts = config.poolOptions?.vmThreads ?? {};
|
|
3141
|
-
if (opts.memoryLimit)
|
|
3142
|
-
return opts.memoryLimit;
|
|
3143
|
-
}
|
|
2971
|
+
if (opts.memoryLimit) return opts.memoryLimit;
|
|
3144
2972
|
const workers = opts.maxThreads ?? getDefaultThreadsCount(config);
|
|
3145
2973
|
return 1 / workers;
|
|
3146
2974
|
}
|
|
@@ -3152,51 +2980,36 @@ function getWorkerMemoryLimit(config, pool) {
|
|
|
3152
2980
|
* @param percentageReference The reference value to use when a '%' value is supplied.
|
|
3153
2981
|
*/
|
|
3154
2982
|
function stringToBytes(input, percentageReference) {
|
|
3155
|
-
if (input === null || input ===
|
|
3156
|
-
|
|
3157
|
-
|
|
3158
|
-
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
|
|
3163
|
-
|
|
3164
|
-
|
|
3165
|
-
|
|
3166
|
-
|
|
3167
|
-
|
|
3168
|
-
|
|
3169
|
-
|
|
3170
|
-
|
|
3171
|
-
|
|
3172
|
-
|
|
3173
|
-
|
|
3174
|
-
case "gb":
|
|
3175
|
-
case "g": return numericValue * 1e3 * 1e3 * 1e3;
|
|
3176
|
-
case "gib": return numericValue * 1024 * 1024 * 1024;
|
|
3177
|
-
}
|
|
3178
|
-
}
|
|
3179
|
-
} else {
|
|
3180
|
-
input = Number.parseFloat(input);
|
|
3181
|
-
}
|
|
3182
|
-
}
|
|
3183
|
-
if (typeof input === "number") {
|
|
3184
|
-
if (input <= 1 && input > 0) {
|
|
3185
|
-
if (percentageReference) {
|
|
3186
|
-
return Math.floor(input * percentageReference);
|
|
3187
|
-
} else {
|
|
3188
|
-
throw new Error("For a percentage based memory limit a percentageReference must be supplied");
|
|
2983
|
+
if (input === null || input === void 0) return input;
|
|
2984
|
+
if (typeof input === "string") if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
|
|
2985
|
+
let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
|
|
2986
|
+
if (trailingChars && numericString) {
|
|
2987
|
+
const numericValue = Number.parseFloat(numericString);
|
|
2988
|
+
trailingChars = trailingChars.toLowerCase();
|
|
2989
|
+
switch (trailingChars) {
|
|
2990
|
+
case "%":
|
|
2991
|
+
input = numericValue / 100;
|
|
2992
|
+
break;
|
|
2993
|
+
case "kb":
|
|
2994
|
+
case "k": return numericValue * 1e3;
|
|
2995
|
+
case "kib": return numericValue * 1024;
|
|
2996
|
+
case "mb":
|
|
2997
|
+
case "m": return numericValue * 1e3 * 1e3;
|
|
2998
|
+
case "mib": return numericValue * 1024 * 1024;
|
|
2999
|
+
case "gb":
|
|
3000
|
+
case "g": return numericValue * 1e3 * 1e3 * 1e3;
|
|
3001
|
+
case "gib": return numericValue * 1024 * 1024 * 1024;
|
|
3189
3002
|
}
|
|
3190
|
-
} else if (input > 1) {
|
|
3191
|
-
return Math.floor(input);
|
|
3192
|
-
} else {
|
|
3193
|
-
throw new Error("Unexpected numerical input for \"memoryLimit\"");
|
|
3194
3003
|
}
|
|
3195
|
-
}
|
|
3004
|
+
} else input = Number.parseFloat(input);
|
|
3005
|
+
if (typeof input === "number") if (input <= 1 && input > 0) if (percentageReference) return Math.floor(input * percentageReference);
|
|
3006
|
+
else throw new Error("For a percentage based memory limit a percentageReference must be supplied");
|
|
3007
|
+
else if (input > 1) return Math.floor(input);
|
|
3008
|
+
else throw new Error("Unexpected numerical input for \"memoryLimit\"");
|
|
3196
3009
|
return null;
|
|
3197
3010
|
}
|
|
3198
3011
|
|
|
3199
|
-
const suppressWarningsPath$1 = resolve
|
|
3012
|
+
const suppressWarningsPath$1 = resolve(rootDir, "./suppress-warnings.cjs");
|
|
3200
3013
|
function createChildProcessChannel(project, collect) {
|
|
3201
3014
|
const emitter = new EventEmitter();
|
|
3202
3015
|
const cleanup = () => emitter.removeAllListeners();
|
|
@@ -3225,22 +3038,22 @@ function createChildProcessChannel(project, collect) {
|
|
|
3225
3038
|
throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
|
|
3226
3039
|
}
|
|
3227
3040
|
});
|
|
3228
|
-
project.
|
|
3041
|
+
project.vitest.onCancel((reason) => rpc.onCancel(reason));
|
|
3229
3042
|
return {
|
|
3230
3043
|
channel,
|
|
3231
3044
|
cleanup
|
|
3232
3045
|
};
|
|
3233
3046
|
}
|
|
3234
|
-
function createVmForksPool(
|
|
3047
|
+
function createVmForksPool(vitest, { execArgv, env }) {
|
|
3235
3048
|
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
3236
|
-
const threadsCount =
|
|
3237
|
-
const poolOptions =
|
|
3238
|
-
const maxThreads = poolOptions.maxForks ??
|
|
3239
|
-
const minThreads = poolOptions.maxForks ??
|
|
3240
|
-
const worker = resolve
|
|
3049
|
+
const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
3050
|
+
const poolOptions = vitest.config.poolOptions?.vmForks ?? {};
|
|
3051
|
+
const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
|
|
3052
|
+
const minThreads = poolOptions.maxForks ?? vitest.config.minWorkers ?? threadsCount;
|
|
3053
|
+
const worker = resolve(vitest.distPath, "workers/vmForks.js");
|
|
3241
3054
|
const options = {
|
|
3242
3055
|
runtime: "child_process",
|
|
3243
|
-
filename: resolve
|
|
3056
|
+
filename: resolve(vitest.distPath, "worker.js"),
|
|
3244
3057
|
maxThreads,
|
|
3245
3058
|
minThreads,
|
|
3246
3059
|
env,
|
|
@@ -3252,11 +3065,11 @@ function createVmForksPool(ctx, { execArgv, env }) {
|
|
|
3252
3065
|
...poolOptions.execArgv ?? [],
|
|
3253
3066
|
...execArgv
|
|
3254
3067
|
],
|
|
3255
|
-
terminateTimeout:
|
|
3068
|
+
terminateTimeout: vitest.config.teardownTimeout,
|
|
3256
3069
|
concurrentTasksPerWorker: 1,
|
|
3257
|
-
maxMemoryLimitBeforeRecycle: getMemoryLimit$1(
|
|
3070
|
+
maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
|
|
3258
3071
|
};
|
|
3259
|
-
if (poolOptions.singleFork || !
|
|
3072
|
+
if (poolOptions.singleFork || !vitest.config.fileParallelism) {
|
|
3260
3073
|
options.maxThreads = 1;
|
|
3261
3074
|
options.minThreads = 1;
|
|
3262
3075
|
}
|
|
@@ -3265,7 +3078,7 @@ function createVmForksPool(ctx, { execArgv, env }) {
|
|
|
3265
3078
|
let id = 0;
|
|
3266
3079
|
async function runFiles(project, config, files, environment, invalidates = []) {
|
|
3267
3080
|
const paths = files.map((f) => f.filepath);
|
|
3268
|
-
|
|
3081
|
+
vitest.state.clearFiles(project, paths);
|
|
3269
3082
|
const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
|
|
3270
3083
|
const workerId = ++id;
|
|
3271
3084
|
const data = {
|
|
@@ -3285,25 +3098,21 @@ function createVmForksPool(ctx, { execArgv, env }) {
|
|
|
3285
3098
|
channel
|
|
3286
3099
|
});
|
|
3287
3100
|
} catch (error) {
|
|
3288
|
-
|
|
3289
|
-
|
|
3290
|
-
|
|
3291
|
-
|
|
3292
|
-
} else {
|
|
3293
|
-
throw error;
|
|
3294
|
-
}
|
|
3101
|
+
// Worker got stuck and won't terminate - this may cause process to hang
|
|
3102
|
+
if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
|
|
3103
|
+
else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
|
|
3104
|
+
else throw error;
|
|
3295
3105
|
} finally {
|
|
3296
3106
|
cleanup();
|
|
3297
3107
|
}
|
|
3298
3108
|
}
|
|
3299
3109
|
return async (specs, invalidates) => {
|
|
3300
|
-
|
|
3301
|
-
|
|
3110
|
+
// Cancel pending tasks from pool when possible
|
|
3111
|
+
vitest.onCancel(() => pool.cancelPendingTasks());
|
|
3112
|
+
const configs = /* @__PURE__ */ new Map();
|
|
3302
3113
|
const getConfig = (project) => {
|
|
3303
|
-
if (configs.has(project))
|
|
3304
|
-
|
|
3305
|
-
}
|
|
3306
|
-
const _config = project.getSerializableConfig();
|
|
3114
|
+
if (configs.has(project)) return configs.get(project);
|
|
3115
|
+
const _config = project.serializedConfig;
|
|
3307
3116
|
const config = wrapSerializableConfig(_config);
|
|
3308
3117
|
configs.set(project, config);
|
|
3309
3118
|
return config;
|
|
@@ -3312,9 +3121,7 @@ function createVmForksPool(ctx, { execArgv, env }) {
|
|
|
3312
3121
|
const promises = Object.values(filesByEnv).flat();
|
|
3313
3122
|
const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
|
|
3314
3123
|
const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
|
|
3315
|
-
if (errors.length > 0)
|
|
3316
|
-
throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
3317
|
-
}
|
|
3124
|
+
if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
3318
3125
|
};
|
|
3319
3126
|
};
|
|
3320
3127
|
return {
|
|
@@ -3327,16 +3134,14 @@ function createVmForksPool(ctx, { execArgv, env }) {
|
|
|
3327
3134
|
function getMemoryLimit$1(config) {
|
|
3328
3135
|
const memory = nodeos.totalmem();
|
|
3329
3136
|
const limit = getWorkerMemoryLimit(config, "vmForks");
|
|
3330
|
-
if (typeof memory === "number")
|
|
3331
|
-
|
|
3332
|
-
|
|
3333
|
-
|
|
3334
|
-
return stringToBytes(limit);
|
|
3335
|
-
}
|
|
3137
|
+
if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
|
|
3138
|
+
// If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
|
|
3139
|
+
if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
|
|
3140
|
+
// just ignore "memoryLimit" value because we cannot detect memory limit
|
|
3336
3141
|
return null;
|
|
3337
3142
|
}
|
|
3338
3143
|
|
|
3339
|
-
const suppressWarningsPath = resolve
|
|
3144
|
+
const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
|
|
3340
3145
|
function createWorkerChannel(project, collect) {
|
|
3341
3146
|
const channel = new MessageChannel();
|
|
3342
3147
|
const port = channel.port2;
|
|
@@ -3353,21 +3158,21 @@ function createWorkerChannel(project, collect) {
|
|
|
3353
3158
|
throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
|
|
3354
3159
|
}
|
|
3355
3160
|
});
|
|
3356
|
-
project.
|
|
3161
|
+
project.vitest.onCancel((reason) => rpc.onCancel(reason));
|
|
3357
3162
|
return {
|
|
3358
3163
|
workerPort,
|
|
3359
3164
|
port
|
|
3360
3165
|
};
|
|
3361
3166
|
}
|
|
3362
|
-
function createVmThreadsPool(
|
|
3167
|
+
function createVmThreadsPool(vitest, { execArgv, env }) {
|
|
3363
3168
|
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
3364
|
-
const threadsCount =
|
|
3365
|
-
const poolOptions =
|
|
3366
|
-
const maxThreads = poolOptions.maxThreads ??
|
|
3367
|
-
const minThreads = poolOptions.minThreads ??
|
|
3368
|
-
const worker = resolve
|
|
3169
|
+
const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
3170
|
+
const poolOptions = vitest.config.poolOptions?.vmThreads ?? {};
|
|
3171
|
+
const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
|
|
3172
|
+
const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? threadsCount;
|
|
3173
|
+
const worker = resolve(vitest.distPath, "workers/vmThreads.js");
|
|
3369
3174
|
const options = {
|
|
3370
|
-
filename: resolve
|
|
3175
|
+
filename: resolve(vitest.distPath, "worker.js"),
|
|
3371
3176
|
useAtomics: poolOptions.useAtomics ?? false,
|
|
3372
3177
|
maxThreads,
|
|
3373
3178
|
minThreads,
|
|
@@ -3380,11 +3185,11 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
|
|
|
3380
3185
|
...poolOptions.execArgv ?? [],
|
|
3381
3186
|
...execArgv
|
|
3382
3187
|
],
|
|
3383
|
-
terminateTimeout:
|
|
3188
|
+
terminateTimeout: vitest.config.teardownTimeout,
|
|
3384
3189
|
concurrentTasksPerWorker: 1,
|
|
3385
|
-
maxMemoryLimitBeforeRecycle: getMemoryLimit(
|
|
3190
|
+
maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
|
|
3386
3191
|
};
|
|
3387
|
-
if (poolOptions.singleThread || !
|
|
3192
|
+
if (poolOptions.singleThread || !vitest.config.fileParallelism) {
|
|
3388
3193
|
options.maxThreads = 1;
|
|
3389
3194
|
options.minThreads = 1;
|
|
3390
3195
|
}
|
|
@@ -3393,7 +3198,7 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
|
|
|
3393
3198
|
let id = 0;
|
|
3394
3199
|
async function runFiles(project, config, files, environment, invalidates = []) {
|
|
3395
3200
|
const paths = files.map((f) => f.filepath);
|
|
3396
|
-
|
|
3201
|
+
vitest.state.clearFiles(project, paths);
|
|
3397
3202
|
const { workerPort, port } = createWorkerChannel(project, name === "collect");
|
|
3398
3203
|
const workerId = ++id;
|
|
3399
3204
|
const data = {
|
|
@@ -3414,25 +3219,21 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
|
|
|
3414
3219
|
name
|
|
3415
3220
|
});
|
|
3416
3221
|
} catch (error) {
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
} else {
|
|
3422
|
-
throw error;
|
|
3423
|
-
}
|
|
3222
|
+
// Worker got stuck and won't terminate - this may cause process to hang
|
|
3223
|
+
if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
|
|
3224
|
+
else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
|
|
3225
|
+
else throw error;
|
|
3424
3226
|
} finally {
|
|
3425
3227
|
port.close();
|
|
3426
3228
|
workerPort.close();
|
|
3427
3229
|
}
|
|
3428
3230
|
}
|
|
3429
3231
|
return async (specs, invalidates) => {
|
|
3430
|
-
|
|
3431
|
-
|
|
3232
|
+
// Cancel pending tasks from pool when possible
|
|
3233
|
+
vitest.onCancel(() => pool.cancelPendingTasks());
|
|
3234
|
+
const configs = /* @__PURE__ */ new Map();
|
|
3432
3235
|
const getConfig = (project) => {
|
|
3433
|
-
if (configs.has(project))
|
|
3434
|
-
return configs.get(project);
|
|
3435
|
-
}
|
|
3236
|
+
if (configs.has(project)) return configs.get(project);
|
|
3436
3237
|
const config = project.serializedConfig;
|
|
3437
3238
|
configs.set(project, config);
|
|
3438
3239
|
return config;
|
|
@@ -3441,9 +3242,7 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
|
|
|
3441
3242
|
const promises = Object.values(filesByEnv).flat();
|
|
3442
3243
|
const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
|
|
3443
3244
|
const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
|
|
3444
|
-
if (errors.length > 0)
|
|
3445
|
-
throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
3446
|
-
}
|
|
3245
|
+
if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
3447
3246
|
};
|
|
3448
3247
|
};
|
|
3449
3248
|
return {
|
|
@@ -3456,12 +3255,10 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
|
|
|
3456
3255
|
function getMemoryLimit(config) {
|
|
3457
3256
|
const memory = nodeos.totalmem();
|
|
3458
3257
|
const limit = getWorkerMemoryLimit(config, "vmThreads");
|
|
3459
|
-
if (typeof memory === "number")
|
|
3460
|
-
|
|
3461
|
-
|
|
3462
|
-
|
|
3463
|
-
return stringToBytes(limit);
|
|
3464
|
-
}
|
|
3258
|
+
if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
|
|
3259
|
+
// If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
|
|
3260
|
+
if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
|
|
3261
|
+
// just ignore "memoryLimit" value because we cannot detect memory limit
|
|
3465
3262
|
return null;
|
|
3466
3263
|
}
|
|
3467
3264
|
|
|
@@ -3474,19 +3271,13 @@ const builtinPools = [
|
|
|
3474
3271
|
"typescript"
|
|
3475
3272
|
];
|
|
3476
3273
|
function getDefaultPoolName(project) {
|
|
3477
|
-
if (project.config.browser.enabled)
|
|
3478
|
-
return "browser";
|
|
3479
|
-
}
|
|
3274
|
+
if (project.config.browser.enabled) return "browser";
|
|
3480
3275
|
return project.config.pool;
|
|
3481
3276
|
}
|
|
3482
3277
|
function getFilePoolName(project, file) {
|
|
3483
3278
|
for (const [glob, pool] of project.config.poolMatchGlobs) {
|
|
3484
|
-
if (pool === "browser")
|
|
3485
|
-
|
|
3486
|
-
}
|
|
3487
|
-
if (pm.isMatch(file, glob, { cwd: project.config.root })) {
|
|
3488
|
-
return pool;
|
|
3489
|
-
}
|
|
3279
|
+
if (pool === "browser") throw new Error("Since Vitest 0.31.0 \"browser\" pool is not supported in \"poolMatchGlobs\". You can create a project to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/projects");
|
|
3280
|
+
if (pm.isMatch(file, glob, { cwd: project.config.root })) return pool;
|
|
3490
3281
|
}
|
|
3491
3282
|
return getDefaultPoolName(project);
|
|
3492
3283
|
}
|
|
@@ -3499,6 +3290,8 @@ function createPool(ctx) {
|
|
|
3499
3290
|
vmForks: null,
|
|
3500
3291
|
typescript: null
|
|
3501
3292
|
};
|
|
3293
|
+
// in addition to resolve.conditions Vite also adds production/development,
|
|
3294
|
+
// see: https://github.com/vitejs/vite/blob/af2aa09575229462635b7cbb6d248ca853057ba2/packages/vite/src/node/plugins/resolve.ts#L1056-L1080
|
|
3502
3295
|
const viteMajor = Number(version.split(".")[0]);
|
|
3503
3296
|
const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
|
|
3504
3297
|
"production",
|
|
@@ -3506,19 +3299,15 @@ function createPool(ctx) {
|
|
|
3506
3299
|
...ctx.vite.config.resolve.conditions
|
|
3507
3300
|
]);
|
|
3508
3301
|
const conditions = [...potentialConditions].filter((condition) => {
|
|
3509
|
-
if (condition === "production")
|
|
3510
|
-
|
|
3511
|
-
}
|
|
3512
|
-
if (condition === "development") {
|
|
3513
|
-
return !ctx.vite.config.isProduction;
|
|
3514
|
-
}
|
|
3302
|
+
if (condition === "production") return ctx.vite.config.isProduction;
|
|
3303
|
+
if (condition === "development") return !ctx.vite.config.isProduction;
|
|
3515
3304
|
return true;
|
|
3516
3305
|
}).map((condition) => {
|
|
3517
|
-
if (viteMajor >= 6 && condition === "development|production")
|
|
3518
|
-
return ctx.vite.config.isProduction ? "production" : "development";
|
|
3519
|
-
}
|
|
3306
|
+
if (viteMajor >= 6 && condition === "development|production") return ctx.vite.config.isProduction ? "production" : "development";
|
|
3520
3307
|
return condition;
|
|
3521
3308
|
}).flatMap((c) => ["--conditions", c]);
|
|
3309
|
+
// Instead of passing whole process.execArgv to the workers, pick allowed options.
|
|
3310
|
+
// Some options may crash worker, e.g. --prof, --title. nodejs/node#41103
|
|
3522
3311
|
const execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
|
|
3523
3312
|
async function executeTests(method, files, invalidate) {
|
|
3524
3313
|
const options = {
|
|
@@ -3533,35 +3322,22 @@ function createPool(ctx) {
|
|
|
3533
3322
|
...ctx.config.env
|
|
3534
3323
|
}
|
|
3535
3324
|
};
|
|
3536
|
-
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
|
|
3540
|
-
}
|
|
3541
|
-
const poolConcurrentPromises = new Map();
|
|
3542
|
-
const customPools = new Map();
|
|
3325
|
+
// env are case-insensitive on Windows, but spawned processes don't support it
|
|
3326
|
+
if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
|
|
3327
|
+
const poolConcurrentPromises = /* @__PURE__ */ new Map();
|
|
3328
|
+
const customPools = /* @__PURE__ */ new Map();
|
|
3543
3329
|
async function resolveCustomPool(filepath) {
|
|
3544
|
-
if (customPools.has(filepath))
|
|
3545
|
-
return customPools.get(filepath);
|
|
3546
|
-
}
|
|
3330
|
+
if (customPools.has(filepath)) return customPools.get(filepath);
|
|
3547
3331
|
const pool = await ctx.runner.executeId(filepath);
|
|
3548
|
-
if (typeof pool.default !== "function") {
|
|
3549
|
-
throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
|
|
3550
|
-
}
|
|
3332
|
+
if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
|
|
3551
3333
|
const poolInstance = await pool.default(ctx, options);
|
|
3552
|
-
if (typeof poolInstance?.name !== "string") {
|
|
3553
|
-
|
|
3554
|
-
}
|
|
3555
|
-
if (typeof poolInstance?.[method] !== "function") {
|
|
3556
|
-
throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
|
|
3557
|
-
}
|
|
3334
|
+
if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
|
|
3335
|
+
if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
|
|
3558
3336
|
customPools.set(filepath, poolInstance);
|
|
3559
3337
|
return poolInstance;
|
|
3560
3338
|
}
|
|
3561
3339
|
function getConcurrentPool(pool, fn) {
|
|
3562
|
-
if (poolConcurrentPromises.has(pool))
|
|
3563
|
-
return poolConcurrentPromises.get(pool);
|
|
3564
|
-
}
|
|
3340
|
+
if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
|
|
3565
3341
|
const promise = fn().finally(() => {
|
|
3566
3342
|
poolConcurrentPromises.delete(pool);
|
|
3567
3343
|
});
|
|
@@ -3578,7 +3354,7 @@ function createPool(ctx) {
|
|
|
3578
3354
|
});
|
|
3579
3355
|
}
|
|
3580
3356
|
const groupedSpecifications = {};
|
|
3581
|
-
const groups = new Set();
|
|
3357
|
+
const groups = /* @__PURE__ */ new Set();
|
|
3582
3358
|
const factories = {
|
|
3583
3359
|
vmThreads: () => createVmThreadsPool(ctx, options),
|
|
3584
3360
|
vmForks: () => createVmForksPool(ctx, options),
|
|
@@ -3595,17 +3371,13 @@ function createPool(ctx) {
|
|
|
3595
3371
|
const Sequencer = ctx.config.sequence.sequencer;
|
|
3596
3372
|
const sequencer = new Sequencer(ctx);
|
|
3597
3373
|
async function sortSpecs(specs) {
|
|
3598
|
-
if (ctx.config.shard)
|
|
3599
|
-
specs = await sequencer.shard(specs);
|
|
3600
|
-
}
|
|
3374
|
+
if (ctx.config.shard) specs = await sequencer.shard(specs);
|
|
3601
3375
|
return sequencer.sort(specs);
|
|
3602
3376
|
}
|
|
3603
3377
|
const sortedGroups = Array.from(groups).sort();
|
|
3604
3378
|
for (const group of sortedGroups) {
|
|
3605
3379
|
const specifications = groupedSpecifications[group];
|
|
3606
|
-
if (!specifications?.length)
|
|
3607
|
-
continue;
|
|
3608
|
-
}
|
|
3380
|
+
if (!specifications?.length) continue;
|
|
3609
3381
|
const filesByPool = {
|
|
3610
3382
|
forks: [],
|
|
3611
3383
|
threads: [],
|
|
@@ -3620,9 +3392,7 @@ function createPool(ctx) {
|
|
|
3620
3392
|
});
|
|
3621
3393
|
await Promise.all(Object.entries(filesByPool).map(async (entry) => {
|
|
3622
3394
|
const [pool, files] = entry;
|
|
3623
|
-
if (!files.length)
|
|
3624
|
-
return null;
|
|
3625
|
-
}
|
|
3395
|
+
if (!files.length) return null;
|
|
3626
3396
|
const specs = await sortSpecs(files);
|
|
3627
3397
|
if (pool in factories) {
|
|
3628
3398
|
const factory = factories[pool];
|
|
@@ -3654,6 +3424,7 @@ class BaseSequencer {
|
|
|
3654
3424
|
constructor(ctx) {
|
|
3655
3425
|
this.ctx = ctx;
|
|
3656
3426
|
}
|
|
3427
|
+
// async so it can be extended by other sequelizers
|
|
3657
3428
|
async shard(files) {
|
|
3658
3429
|
const { config } = this.ctx;
|
|
3659
3430
|
const { index, count } = config.shard;
|
|
@@ -3661,7 +3432,7 @@ class BaseSequencer {
|
|
|
3661
3432
|
const shardStart = shardSize * (index - 1);
|
|
3662
3433
|
const shardEnd = shardSize * index;
|
|
3663
3434
|
return [...files].map((spec) => {
|
|
3664
|
-
const fullPath = resolve
|
|
3435
|
+
const fullPath = resolve$1(slash(config.root), slash(spec.moduleId));
|
|
3665
3436
|
const specPath = fullPath?.slice(config.root.length);
|
|
3666
3437
|
return {
|
|
3667
3438
|
spec,
|
|
@@ -3669,6 +3440,7 @@ class BaseSequencer {
|
|
|
3669
3440
|
};
|
|
3670
3441
|
}).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec);
|
|
3671
3442
|
}
|
|
3443
|
+
// async so it can be extended by other sequelizers
|
|
3672
3444
|
async sort(files) {
|
|
3673
3445
|
const cache = this.ctx.cache;
|
|
3674
3446
|
return [...files].sort((a, b) => {
|
|
@@ -3679,17 +3451,15 @@ class BaseSequencer {
|
|
|
3679
3451
|
if (!aState || !bState) {
|
|
3680
3452
|
const statsA = cache.getFileStats(keyA);
|
|
3681
3453
|
const statsB = cache.getFileStats(keyB);
|
|
3682
|
-
|
|
3683
|
-
|
|
3684
|
-
|
|
3454
|
+
// run unknown first
|
|
3455
|
+
if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
|
|
3456
|
+
// run larger files first
|
|
3685
3457
|
return statsB.size - statsA.size;
|
|
3686
3458
|
}
|
|
3687
|
-
|
|
3688
|
-
|
|
3689
|
-
|
|
3690
|
-
|
|
3691
|
-
return 1;
|
|
3692
|
-
}
|
|
3459
|
+
// run failed first
|
|
3460
|
+
if (aState.failed && !bState.failed) return -1;
|
|
3461
|
+
if (!aState.failed && bState.failed) return 1;
|
|
3462
|
+
// run longer first
|
|
3693
3463
|
return bState.duration - aState.duration;
|
|
3694
3464
|
});
|
|
3695
3465
|
}
|
|
@@ -3703,22 +3473,14 @@ class RandomSequencer extends BaseSequencer {
|
|
|
3703
3473
|
}
|
|
3704
3474
|
|
|
3705
3475
|
function resolvePath(path, root) {
|
|
3706
|
-
return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path));
|
|
3476
|
+
return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve$1(root, path));
|
|
3707
3477
|
}
|
|
3708
3478
|
function parseInspector(inspect) {
|
|
3709
|
-
if (typeof inspect === "boolean" || inspect ===
|
|
3710
|
-
|
|
3711
|
-
}
|
|
3712
|
-
if (typeof inspect === "number") {
|
|
3713
|
-
return { port: inspect };
|
|
3714
|
-
}
|
|
3715
|
-
if (inspect.match(/https?:\//)) {
|
|
3716
|
-
throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
|
|
3717
|
-
}
|
|
3479
|
+
if (typeof inspect === "boolean" || inspect === void 0) return {};
|
|
3480
|
+
if (typeof inspect === "number") return { port: inspect };
|
|
3481
|
+
if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
|
|
3718
3482
|
const [host, port] = inspect.split(":");
|
|
3719
|
-
if (!port) {
|
|
3720
|
-
return { host };
|
|
3721
|
-
}
|
|
3483
|
+
if (!port) return { host };
|
|
3722
3484
|
return {
|
|
3723
3485
|
host,
|
|
3724
3486
|
port: Number(port) || defaultInspectPort
|
|
@@ -3726,51 +3488,28 @@ function parseInspector(inspect) {
|
|
|
3726
3488
|
}
|
|
3727
3489
|
function resolveApiServerConfig(options, defaultPort) {
|
|
3728
3490
|
let api;
|
|
3729
|
-
if (options.ui && !options.api) {
|
|
3730
|
-
|
|
3731
|
-
|
|
3732
|
-
|
|
3733
|
-
|
|
3734
|
-
api =
|
|
3735
|
-
|
|
3736
|
-
|
|
3737
|
-
if (api) {
|
|
3738
|
-
if (options.api.port) {
|
|
3739
|
-
api.port = options.api.port;
|
|
3740
|
-
}
|
|
3741
|
-
if (options.api.strictPort) {
|
|
3742
|
-
api.strictPort = options.api.strictPort;
|
|
3743
|
-
}
|
|
3744
|
-
if (options.api.host) {
|
|
3745
|
-
api.host = options.api.host;
|
|
3746
|
-
}
|
|
3747
|
-
} else {
|
|
3748
|
-
api = { ...options.api };
|
|
3749
|
-
}
|
|
3750
|
-
}
|
|
3491
|
+
if (options.ui && !options.api) api = { port: defaultPort };
|
|
3492
|
+
else if (options.api === true) api = { port: defaultPort };
|
|
3493
|
+
else if (typeof options.api === "number") api = { port: options.api };
|
|
3494
|
+
if (typeof options.api === "object") if (api) {
|
|
3495
|
+
if (options.api.port) api.port = options.api.port;
|
|
3496
|
+
if (options.api.strictPort) api.strictPort = options.api.strictPort;
|
|
3497
|
+
if (options.api.host) api.host = options.api.host;
|
|
3498
|
+
} else api = { ...options.api };
|
|
3751
3499
|
if (api) {
|
|
3752
|
-
if (!api.port && !api.middlewareMode)
|
|
3753
|
-
|
|
3754
|
-
}
|
|
3755
|
-
} else {
|
|
3756
|
-
api = { middlewareMode: true };
|
|
3757
|
-
}
|
|
3500
|
+
if (!api.port && !api.middlewareMode) api.port = defaultPort;
|
|
3501
|
+
} else api = { middlewareMode: true };
|
|
3758
3502
|
return api;
|
|
3759
3503
|
}
|
|
3760
3504
|
function resolveInlineWorkerOption(value) {
|
|
3761
|
-
if (typeof value === "string" && value.trim().endsWith("%"))
|
|
3762
|
-
|
|
3763
|
-
} else {
|
|
3764
|
-
return Number(value);
|
|
3765
|
-
}
|
|
3505
|
+
if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value);
|
|
3506
|
+
else return Number(value);
|
|
3766
3507
|
}
|
|
3767
3508
|
function resolveConfig$1(vitest, options, viteConfig) {
|
|
3768
3509
|
const mode = vitest.mode;
|
|
3769
3510
|
const logger = vitest.logger;
|
|
3770
3511
|
if (options.dom) {
|
|
3771
|
-
if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") {
|
|
3772
|
-
logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
|
|
3773
|
-
}
|
|
3512
|
+
if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
|
|
3774
3513
|
options.environment = "happy-dom";
|
|
3775
3514
|
}
|
|
3776
3515
|
const resolved = {
|
|
@@ -3782,7 +3521,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3782
3521
|
resolved.project = toArray(resolved.project);
|
|
3783
3522
|
resolved.provide ??= {};
|
|
3784
3523
|
resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "";
|
|
3785
|
-
resolved.color = typeof options.name !== "string" ? options.name?.color :
|
|
3524
|
+
resolved.color = typeof options.name !== "string" ? options.name?.color : void 0;
|
|
3786
3525
|
const inspector = resolved.inspect || resolved.inspectBrk;
|
|
3787
3526
|
resolved.inspector = {
|
|
3788
3527
|
...resolved.inspector,
|
|
@@ -3790,43 +3529,28 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3790
3529
|
enabled: !!inspector,
|
|
3791
3530
|
waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
|
|
3792
3531
|
};
|
|
3793
|
-
if (viteConfig.base !== "/")
|
|
3794
|
-
resolved.base = viteConfig.base;
|
|
3795
|
-
}
|
|
3532
|
+
if (viteConfig.base !== "/") resolved.base = viteConfig.base;
|
|
3796
3533
|
resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
|
|
3797
3534
|
if (options.shard) {
|
|
3798
|
-
if (resolved.watch)
|
|
3799
|
-
throw new Error("You cannot use --shard option with enabled watch");
|
|
3800
|
-
}
|
|
3535
|
+
if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch");
|
|
3801
3536
|
const [indexString, countString] = options.shard.split("/");
|
|
3802
3537
|
const index = Math.abs(Number.parseInt(indexString, 10));
|
|
3803
3538
|
const count = Math.abs(Number.parseInt(countString, 10));
|
|
3804
|
-
if (Number.isNaN(count) || count <= 0)
|
|
3805
|
-
|
|
3806
|
-
}
|
|
3807
|
-
if (Number.isNaN(index) || index <= 0 || index > count) {
|
|
3808
|
-
throw new Error("--shard <index> must be a positive number less then <count>");
|
|
3809
|
-
}
|
|
3539
|
+
if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number");
|
|
3540
|
+
if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>");
|
|
3810
3541
|
resolved.shard = {
|
|
3811
3542
|
index,
|
|
3812
3543
|
count
|
|
3813
3544
|
};
|
|
3814
3545
|
}
|
|
3815
|
-
if (resolved.standalone && !resolved.watch)
|
|
3816
|
-
|
|
3817
|
-
|
|
3818
|
-
if (resolved.
|
|
3819
|
-
|
|
3820
|
-
}
|
|
3821
|
-
if (resolved.maxWorkers) {
|
|
3822
|
-
resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
|
|
3823
|
-
}
|
|
3824
|
-
if (resolved.minWorkers) {
|
|
3825
|
-
resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
|
|
3826
|
-
}
|
|
3827
|
-
resolved.browser ??= {};
|
|
3546
|
+
if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`);
|
|
3547
|
+
if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`);
|
|
3548
|
+
if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
|
|
3549
|
+
if (resolved.minWorkers) resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
|
|
3550
|
+
// run benchmark sequentially by default
|
|
3828
3551
|
resolved.fileParallelism ??= mode !== "benchmark";
|
|
3829
3552
|
if (!resolved.fileParallelism) {
|
|
3553
|
+
// ignore user config, parallelism cannot be implemented without limiting workers
|
|
3830
3554
|
resolved.maxWorkers = 1;
|
|
3831
3555
|
resolved.minWorkers = 1;
|
|
3832
3556
|
}
|
|
@@ -3842,65 +3566,53 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3842
3566
|
throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
|
|
3843
3567
|
}
|
|
3844
3568
|
}
|
|
3569
|
+
// apply browser CLI options only if the config already has the browser config and not disabled manually
|
|
3570
|
+
if (vitest._cliOptions.browser && resolved.browser && (resolved.browser.enabled !== false || vitest._cliOptions.browser.enabled)) resolved.browser = mergeConfig(resolved.browser, vitest._cliOptions.browser);
|
|
3571
|
+
resolved.browser ??= {};
|
|
3845
3572
|
const browser = resolved.browser;
|
|
3846
|
-
if (browser.enabled
|
|
3847
|
-
if (!browser.name && !browser.instances)
|
|
3848
|
-
throw new Error(`Vitest Browser Mode requires "browser.name" (deprecated) or "browser.instances" options, none were set.`);
|
|
3849
|
-
}
|
|
3573
|
+
if (browser.enabled) {
|
|
3574
|
+
if (!browser.name && !browser.instances) throw new Error(`Vitest Browser Mode requires "browser.name" (deprecated) or "browser.instances" options, none were set.`);
|
|
3850
3575
|
const instances = browser.instances;
|
|
3851
|
-
if (browser.name && browser.instances)
|
|
3852
|
-
|
|
3853
|
-
|
|
3854
|
-
if (browser.instances && !browser.instances.length) {
|
|
3855
|
-
throw new Error([`"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`, browser.name && instances?.length ? ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c) => c.browser).join(", ")}). Did you mean to use another name?` : ""].join(""));
|
|
3856
|
-
}
|
|
3576
|
+
if (browser.name && browser.instances)
|
|
3577
|
+
// --browser=chromium filters configs to a single one
|
|
3578
|
+
browser.instances = browser.instances.filter((instance) => instance.browser === browser.name);
|
|
3579
|
+
if (browser.instances && !browser.instances.length) throw new Error([`"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`, browser.name && instances?.length ? ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c) => c.browser).join(", ")}). Did you mean to use another name?` : ""].join(""));
|
|
3857
3580
|
}
|
|
3858
3581
|
const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
|
|
3582
|
+
// Browser-mode "Playwright + Chromium" only features:
|
|
3859
3583
|
if (browser.enabled && !playwrightChromiumOnly) {
|
|
3860
3584
|
const browserConfig = { browser: {
|
|
3861
3585
|
provider: browser.provider,
|
|
3862
3586
|
name: browser.name,
|
|
3863
3587
|
instances: browser.instances?.map((i) => ({ browser: i.browser }))
|
|
3864
3588
|
} };
|
|
3865
|
-
if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
|
|
3866
|
-
|
|
3867
|
-
|
|
3868
|
-
|
|
3869
|
-
} }, null, 2)}` + `\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
|
|
3870
|
-
}
|
|
3589
|
+
if (resolved.coverage.enabled && resolved.coverage.provider === "v8") throw new Error(`@vitest/coverage-v8 does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
|
|
3590
|
+
provider: "playwright",
|
|
3591
|
+
instances: [{ browser: "chromium" }]
|
|
3592
|
+
} }, null, 2)}\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
|
|
3871
3593
|
if (resolved.inspect || resolved.inspectBrk) {
|
|
3872
3594
|
const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
|
|
3873
|
-
throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n
|
|
3595
|
+
throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
|
|
3874
3596
|
provider: "playwright",
|
|
3875
3597
|
instances: [{ browser: "chromium" }]
|
|
3876
|
-
} }, null, 2)}
|
|
3598
|
+
} }, null, 2)}\n\n...or disable ${inspectOption}\n`);
|
|
3877
3599
|
}
|
|
3878
3600
|
}
|
|
3879
3601
|
resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
|
|
3880
3602
|
if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
|
|
3881
|
-
const reportsDirectory = resolve(resolved.root, resolved.coverage.reportsDirectory);
|
|
3882
|
-
if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) {
|
|
3883
|
-
throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
|
|
3884
|
-
}
|
|
3885
|
-
}
|
|
3886
|
-
if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) {
|
|
3887
|
-
resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
|
|
3603
|
+
const reportsDirectory = resolve$1(resolved.root, resolved.coverage.reportsDirectory);
|
|
3604
|
+
if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
|
|
3888
3605
|
}
|
|
3606
|
+
if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
|
|
3889
3607
|
resolved.expect ??= {};
|
|
3890
3608
|
resolved.deps ??= {};
|
|
3891
3609
|
resolved.deps.moduleDirectories ??= [];
|
|
3892
3610
|
resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
|
|
3893
|
-
if (!dir.startsWith("/")) {
|
|
3894
|
-
|
|
3895
|
-
}
|
|
3896
|
-
if (!dir.endsWith("/")) {
|
|
3897
|
-
dir += "/";
|
|
3898
|
-
}
|
|
3611
|
+
if (!dir.startsWith("/")) dir = `/${dir}`;
|
|
3612
|
+
if (!dir.endsWith("/")) dir += "/";
|
|
3899
3613
|
return normalize(dir);
|
|
3900
3614
|
});
|
|
3901
|
-
if (!resolved.deps.moduleDirectories.includes("/node_modules/"))
|
|
3902
|
-
resolved.deps.moduleDirectories.push("/node_modules/");
|
|
3903
|
-
}
|
|
3615
|
+
if (!resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
|
|
3904
3616
|
resolved.deps.optimizer ??= {};
|
|
3905
3617
|
resolved.deps.optimizer.ssr ??= {};
|
|
3906
3618
|
resolved.deps.optimizer.ssr.enabled ??= true;
|
|
@@ -3912,6 +3624,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3912
3624
|
resolved.deps.web.transformGlobPattern ??= [];
|
|
3913
3625
|
resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root));
|
|
3914
3626
|
resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root));
|
|
3627
|
+
// override original exclude array for cases where user re-uses same object in test.exclude
|
|
3915
3628
|
resolved.coverage.exclude = [
|
|
3916
3629
|
...resolved.coverage.exclude,
|
|
3917
3630
|
...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`),
|
|
@@ -3926,27 +3639,21 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3926
3639
|
"fallbackCJS"
|
|
3927
3640
|
];
|
|
3928
3641
|
deprecatedDepsOptions.forEach((option) => {
|
|
3929
|
-
if (resolved.deps[option] ===
|
|
3930
|
-
|
|
3931
|
-
|
|
3932
|
-
if (option === "fallbackCJS") {
|
|
3933
|
-
logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
|
|
3934
|
-
} else {
|
|
3642
|
+
if (resolved.deps[option] === void 0) return;
|
|
3643
|
+
if (option === "fallbackCJS") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
|
|
3644
|
+
else {
|
|
3935
3645
|
const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
|
|
3936
3646
|
logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`));
|
|
3937
3647
|
}
|
|
3938
|
-
if (resolved.server.deps[option] ===
|
|
3939
|
-
resolved.server.deps[option] = resolved.deps[option];
|
|
3940
|
-
}
|
|
3648
|
+
if (resolved.server.deps[option] === void 0) resolved.server.deps[option] = resolved.deps[option];
|
|
3941
3649
|
});
|
|
3942
|
-
if (resolved.cliExclude)
|
|
3943
|
-
|
|
3944
|
-
|
|
3650
|
+
if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
|
|
3651
|
+
// vitenode will try to import such file with native node,
|
|
3652
|
+
// but then our mocker will not work properly
|
|
3945
3653
|
if (resolved.server.deps.inline !== true) {
|
|
3946
3654
|
const ssrOptions = viteConfig.ssr;
|
|
3947
|
-
if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null)
|
|
3948
|
-
|
|
3949
|
-
} else {
|
|
3655
|
+
if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) resolved.server.deps.inline = true;
|
|
3656
|
+
else {
|
|
3950
3657
|
resolved.server.deps.inline ??= [];
|
|
3951
3658
|
resolved.server.deps.inline.push(...extraInlineDeps);
|
|
3952
3659
|
}
|
|
@@ -3955,16 +3662,11 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3955
3662
|
resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
|
|
3956
3663
|
resolved.server.deps.moduleDirectories ??= [];
|
|
3957
3664
|
resolved.server.deps.moduleDirectories.push(...resolved.deps.moduleDirectories);
|
|
3958
|
-
if (resolved.runner)
|
|
3959
|
-
|
|
3960
|
-
|
|
3961
|
-
|
|
3962
|
-
|
|
3963
|
-
}
|
|
3964
|
-
resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : undefined;
|
|
3965
|
-
if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
|
|
3966
|
-
resolved.snapshotFormat.plugins = [];
|
|
3967
|
-
}
|
|
3665
|
+
if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
|
|
3666
|
+
resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments");
|
|
3667
|
+
if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
|
|
3668
|
+
resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0;
|
|
3669
|
+
if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) resolved.snapshotFormat.plugins = [];
|
|
3968
3670
|
const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
|
|
3969
3671
|
resolved.snapshotOptions = {
|
|
3970
3672
|
expand: resolved.expandSnapshotDiff ?? false,
|
|
@@ -3976,97 +3678,73 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
3976
3678
|
resolved.snapshotSerializers ??= [];
|
|
3977
3679
|
resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root));
|
|
3978
3680
|
resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
|
|
3979
|
-
if (options.resolveSnapshotPath)
|
|
3980
|
-
delete resolved.resolveSnapshotPath;
|
|
3981
|
-
}
|
|
3681
|
+
if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
|
|
3982
3682
|
resolved.pool ??= "threads";
|
|
3983
|
-
if (process.env.VITEST_MAX_THREADS) {
|
|
3984
|
-
resolved.poolOptions
|
|
3985
|
-
|
|
3986
|
-
threads
|
|
3987
|
-
|
|
3988
|
-
|
|
3989
|
-
|
|
3990
|
-
vmThreads
|
|
3991
|
-
|
|
3992
|
-
|
|
3993
|
-
|
|
3994
|
-
|
|
3995
|
-
|
|
3996
|
-
|
|
3997
|
-
|
|
3998
|
-
|
|
3999
|
-
|
|
4000
|
-
|
|
4001
|
-
|
|
4002
|
-
|
|
4003
|
-
|
|
4004
|
-
|
|
4005
|
-
|
|
4006
|
-
|
|
4007
|
-
|
|
4008
|
-
|
|
4009
|
-
|
|
4010
|
-
|
|
4011
|
-
|
|
4012
|
-
|
|
4013
|
-
|
|
4014
|
-
|
|
4015
|
-
|
|
4016
|
-
|
|
4017
|
-
|
|
4018
|
-
|
|
4019
|
-
|
|
4020
|
-
|
|
4021
|
-
|
|
4022
|
-
|
|
4023
|
-
|
|
4024
|
-
|
|
4025
|
-
|
|
4026
|
-
|
|
4027
|
-
minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
|
|
4028
|
-
},
|
|
4029
|
-
vmForks: {
|
|
4030
|
-
...resolved.poolOptions?.vmForks,
|
|
4031
|
-
minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
|
|
4032
|
-
}
|
|
4033
|
-
};
|
|
4034
|
-
}
|
|
3683
|
+
if (process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
|
|
3684
|
+
...resolved.poolOptions,
|
|
3685
|
+
threads: {
|
|
3686
|
+
...resolved.poolOptions?.threads,
|
|
3687
|
+
maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
|
|
3688
|
+
},
|
|
3689
|
+
vmThreads: {
|
|
3690
|
+
...resolved.poolOptions?.vmThreads,
|
|
3691
|
+
maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
|
|
3692
|
+
}
|
|
3693
|
+
};
|
|
3694
|
+
if (process.env.VITEST_MIN_THREADS) resolved.poolOptions = {
|
|
3695
|
+
...resolved.poolOptions,
|
|
3696
|
+
threads: {
|
|
3697
|
+
...resolved.poolOptions?.threads,
|
|
3698
|
+
minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
|
|
3699
|
+
},
|
|
3700
|
+
vmThreads: {
|
|
3701
|
+
...resolved.poolOptions?.vmThreads,
|
|
3702
|
+
minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
|
|
3703
|
+
}
|
|
3704
|
+
};
|
|
3705
|
+
if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
|
|
3706
|
+
...resolved.poolOptions,
|
|
3707
|
+
forks: {
|
|
3708
|
+
...resolved.poolOptions?.forks,
|
|
3709
|
+
maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
|
|
3710
|
+
},
|
|
3711
|
+
vmForks: {
|
|
3712
|
+
...resolved.poolOptions?.vmForks,
|
|
3713
|
+
maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
|
|
3714
|
+
}
|
|
3715
|
+
};
|
|
3716
|
+
if (process.env.VITEST_MIN_FORKS) resolved.poolOptions = {
|
|
3717
|
+
...resolved.poolOptions,
|
|
3718
|
+
forks: {
|
|
3719
|
+
...resolved.poolOptions?.forks,
|
|
3720
|
+
minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
|
|
3721
|
+
},
|
|
3722
|
+
vmForks: {
|
|
3723
|
+
...resolved.poolOptions?.vmForks,
|
|
3724
|
+
minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
|
|
3725
|
+
}
|
|
3726
|
+
};
|
|
4035
3727
|
const poolThreadsOptions = [
|
|
4036
3728
|
["threads", "minThreads"],
|
|
4037
3729
|
["threads", "maxThreads"],
|
|
4038
3730
|
["vmThreads", "minThreads"],
|
|
4039
3731
|
["vmThreads", "maxThreads"]
|
|
4040
3732
|
];
|
|
4041
|
-
for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions)
|
|
4042
|
-
if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) {
|
|
4043
|
-
resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
|
|
4044
|
-
}
|
|
4045
|
-
}
|
|
3733
|
+
for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
|
|
4046
3734
|
const poolForksOptions = [
|
|
4047
3735
|
["forks", "minForks"],
|
|
4048
3736
|
["forks", "maxForks"],
|
|
4049
3737
|
["vmForks", "minForks"],
|
|
4050
3738
|
["vmForks", "maxForks"]
|
|
4051
3739
|
];
|
|
4052
|
-
for (const [poolOptionKey, workerOptionKey] of poolForksOptions)
|
|
4053
|
-
|
|
4054
|
-
|
|
4055
|
-
|
|
4056
|
-
|
|
4057
|
-
if (
|
|
4058
|
-
resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
|
|
4059
|
-
}
|
|
4060
|
-
if (!builtinPools.includes(resolved.pool)) {
|
|
4061
|
-
resolved.pool = resolvePath(resolved.pool, resolved.root);
|
|
4062
|
-
}
|
|
4063
|
-
if (resolved.poolMatchGlobs) {
|
|
4064
|
-
logger.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "poolMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`));
|
|
4065
|
-
}
|
|
3740
|
+
for (const [poolOptionKey, workerOptionKey] of poolForksOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
|
|
3741
|
+
if (typeof resolved.workspace === "string")
|
|
3742
|
+
// if passed down from the CLI and it's relative, resolve relative to CWD
|
|
3743
|
+
resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve$1(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
|
|
3744
|
+
if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
|
|
3745
|
+
if (resolved.poolMatchGlobs) logger.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "poolMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`));
|
|
4066
3746
|
resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(([glob, pool]) => {
|
|
4067
|
-
if (!builtinPools.includes(pool))
|
|
4068
|
-
pool = resolvePath(pool, resolved.root);
|
|
4069
|
-
}
|
|
3747
|
+
if (!builtinPools.includes(pool)) pool = resolvePath(pool, resolved.root);
|
|
4070
3748
|
return [glob, pool];
|
|
4071
3749
|
});
|
|
4072
3750
|
if (mode === "benchmark") {
|
|
@@ -4074,92 +3752,81 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
4074
3752
|
...benchmarkConfigDefaults,
|
|
4075
3753
|
...resolved.benchmark
|
|
4076
3754
|
};
|
|
3755
|
+
// override test config
|
|
4077
3756
|
resolved.coverage.enabled = false;
|
|
4078
3757
|
resolved.typecheck.enabled = false;
|
|
4079
3758
|
resolved.include = resolved.benchmark.include;
|
|
4080
3759
|
resolved.exclude = resolved.benchmark.exclude;
|
|
4081
3760
|
resolved.includeSource = resolved.benchmark.includeSource;
|
|
4082
3761
|
const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean);
|
|
4083
|
-
if (reporters.length)
|
|
4084
|
-
|
|
4085
|
-
|
|
4086
|
-
|
|
4087
|
-
|
|
4088
|
-
if (options.
|
|
4089
|
-
resolved.benchmark.outputFile = options.outputFile;
|
|
4090
|
-
}
|
|
4091
|
-
if (options.compare) {
|
|
4092
|
-
resolved.benchmark.compare = options.compare;
|
|
4093
|
-
}
|
|
4094
|
-
if (options.outputJson) {
|
|
4095
|
-
resolved.benchmark.outputJson = options.outputJson;
|
|
4096
|
-
}
|
|
3762
|
+
if (reporters.length) resolved.benchmark.reporters = reporters;
|
|
3763
|
+
else resolved.benchmark.reporters = ["default"];
|
|
3764
|
+
if (options.outputFile) resolved.benchmark.outputFile = options.outputFile;
|
|
3765
|
+
// --compare from cli
|
|
3766
|
+
if (options.compare) resolved.benchmark.compare = options.compare;
|
|
3767
|
+
if (options.outputJson) resolved.benchmark.outputJson = options.outputJson;
|
|
4097
3768
|
}
|
|
4098
3769
|
if (typeof resolved.diff === "string") {
|
|
4099
3770
|
resolved.diff = resolvePath(resolved.diff, resolved.root);
|
|
4100
3771
|
resolved.forceRerunTriggers.push(resolved.diff);
|
|
4101
3772
|
}
|
|
3773
|
+
// the server has been created, we don't need to override vite.server options
|
|
4102
3774
|
const api = resolveApiServerConfig(options, defaultPort);
|
|
4103
3775
|
resolved.api = {
|
|
4104
3776
|
...api,
|
|
4105
3777
|
token: crypto.randomUUID()
|
|
4106
3778
|
};
|
|
4107
|
-
if (options.related)
|
|
4108
|
-
|
|
4109
|
-
|
|
4110
|
-
|
|
4111
|
-
|
|
4112
|
-
|
|
4113
|
-
|
|
4114
|
-
|
|
4115
|
-
|
|
4116
|
-
|
|
4117
|
-
|
|
4118
|
-
|
|
4119
|
-
|
|
4120
|
-
|
|
4121
|
-
|
|
4122
|
-
|
|
4123
|
-
|
|
4124
|
-
|
|
4125
|
-
|
|
4126
|
-
|
|
4127
|
-
|
|
4128
|
-
}
|
|
3779
|
+
if (options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
|
|
3780
|
+
/*
|
|
3781
|
+
* Reporters can be defined in many different ways:
|
|
3782
|
+
* { reporter: 'json' }
|
|
3783
|
+
* { reporter: { onFinish() { method() } } }
|
|
3784
|
+
* { reporter: ['json', { onFinish() { method() } }] }
|
|
3785
|
+
* { reporter: [[ 'json' ]] }
|
|
3786
|
+
* { reporter: [[ 'json' ], 'html'] }
|
|
3787
|
+
* { reporter: [[ 'json', { outputFile: 'test.json' } ], 'html'] }
|
|
3788
|
+
*/
|
|
3789
|
+
if (options.reporters) if (!Array.isArray(options.reporters))
|
|
3790
|
+
// Reporter name, e.g. { reporters: 'json' }
|
|
3791
|
+
if (typeof options.reporters === "string") resolved.reporters = [[options.reporters, {}]];
|
|
3792
|
+
else resolved.reporters = [options.reporters];
|
|
3793
|
+
else {
|
|
3794
|
+
resolved.reporters = [];
|
|
3795
|
+
for (const reporter of options.reporters) if (Array.isArray(reporter))
|
|
3796
|
+
// Reporter with options, e.g. { reporters: [ [ 'json', { outputFile: 'test.json' } ] ] }
|
|
3797
|
+
resolved.reporters.push([reporter[0], reporter[1] || {}]);
|
|
3798
|
+
else if (typeof reporter === "string")
|
|
3799
|
+
// Reporter name in array, e.g. { reporters: ["html", "json"]}
|
|
3800
|
+
resolved.reporters.push([reporter, {}]);
|
|
3801
|
+
else
|
|
3802
|
+
// Inline reporter, e.g. { reporter: [{ onFinish() { method() } }] }
|
|
3803
|
+
resolved.reporters.push(reporter);
|
|
4129
3804
|
}
|
|
4130
3805
|
if (mode !== "benchmark") {
|
|
3806
|
+
// @ts-expect-error "reporter" is from CLI, should be absolute to the running directory
|
|
3807
|
+
// it is passed down as "vitest --reporter ../reporter.js"
|
|
4131
3808
|
const reportersFromCLI = resolved.reporter;
|
|
4132
3809
|
const cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
|
|
4133
|
-
|
|
4134
|
-
|
|
4135
|
-
}
|
|
3810
|
+
// ./reporter.js || ../reporter.js, but not .reporters/reporter.js
|
|
3811
|
+
if (/^\.\.?\//.test(reporter)) return resolve$1(process.cwd(), reporter);
|
|
4136
3812
|
return reporter;
|
|
4137
3813
|
});
|
|
4138
|
-
if (cliReporters.length) {
|
|
4139
|
-
resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
|
|
4140
|
-
}
|
|
3814
|
+
if (cliReporters.length) resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
|
|
4141
3815
|
}
|
|
4142
3816
|
if (!resolved.reporters.length) {
|
|
4143
3817
|
resolved.reporters.push(["default", {}]);
|
|
4144
|
-
|
|
4145
|
-
|
|
4146
|
-
}
|
|
4147
|
-
}
|
|
4148
|
-
if (resolved.changed) {
|
|
4149
|
-
resolved.passWithNoTests ??= true;
|
|
3818
|
+
// also enable github-actions reporter as a default
|
|
3819
|
+
if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
|
|
4150
3820
|
}
|
|
3821
|
+
if (resolved.changed) resolved.passWithNoTests ??= true;
|
|
4151
3822
|
resolved.css ??= {};
|
|
4152
3823
|
if (typeof resolved.css === "object") {
|
|
4153
3824
|
resolved.css.modules ??= {};
|
|
4154
3825
|
resolved.css.modules.classNameStrategy ??= "stable";
|
|
4155
3826
|
}
|
|
4156
3827
|
if (resolved.cache !== false) {
|
|
4157
|
-
|
|
4158
|
-
|
|
4159
|
-
logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`));
|
|
4160
|
-
cacheDir = VitestCache.resolveCacheDir(resolved.root, resolved.cache.dir, resolved.name);
|
|
4161
|
-
}
|
|
4162
|
-
resolved.cache = { dir: cacheDir };
|
|
3828
|
+
if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`);
|
|
3829
|
+
resolved.cache = { dir: viteConfig.cacheDir };
|
|
4163
3830
|
}
|
|
4164
3831
|
resolved.sequence ??= {};
|
|
4165
3832
|
if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
|
|
@@ -4167,36 +3834,28 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
4167
3834
|
resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
|
|
4168
3835
|
resolved.sequence.shuffle = tests;
|
|
4169
3836
|
}
|
|
4170
|
-
if (!resolved.sequence?.sequencer)
|
|
4171
|
-
|
|
4172
|
-
|
|
3837
|
+
if (!resolved.sequence?.sequencer)
|
|
3838
|
+
// CLI flag has higher priority
|
|
3839
|
+
resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
|
|
4173
3840
|
resolved.sequence.groupOrder ??= 0;
|
|
4174
3841
|
resolved.sequence.hooks ??= "stack";
|
|
4175
|
-
if (resolved.sequence.sequencer === RandomSequencer)
|
|
4176
|
-
resolved.sequence.seed ??= Date.now();
|
|
4177
|
-
}
|
|
3842
|
+
if (resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
|
|
4178
3843
|
resolved.typecheck = {
|
|
4179
3844
|
...configDefaults.typecheck,
|
|
4180
3845
|
...resolved.typecheck
|
|
4181
3846
|
};
|
|
4182
|
-
if (resolved.environmentMatchGlobs) {
|
|
4183
|
-
|
|
4184
|
-
}
|
|
4185
|
-
resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve(resolved.root, i[0]), i[1]]);
|
|
3847
|
+
if (resolved.environmentMatchGlobs) logger.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "environmentMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`));
|
|
3848
|
+
resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve$1(resolved.root, i[0]), i[1]]);
|
|
4186
3849
|
resolved.typecheck ??= {};
|
|
4187
3850
|
resolved.typecheck.enabled ??= false;
|
|
4188
|
-
if (resolved.typecheck.enabled)
|
|
4189
|
-
logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
|
|
4190
|
-
}
|
|
4191
|
-
resolved.browser ??= {};
|
|
3851
|
+
if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
|
|
4192
3852
|
resolved.browser.enabled ??= false;
|
|
4193
3853
|
resolved.browser.headless ??= isCI;
|
|
4194
3854
|
resolved.browser.isolate ??= true;
|
|
4195
3855
|
resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
|
|
3856
|
+
// disable in headless mode by default, and if CI is detected
|
|
4196
3857
|
resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
|
|
4197
|
-
if (resolved.browser.screenshotDirectory)
|
|
4198
|
-
resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory);
|
|
4199
|
-
}
|
|
3858
|
+
if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
|
|
4200
3859
|
const isPreview = resolved.browser.provider === "preview";
|
|
4201
3860
|
if (isPreview && resolved.browser.screenshotFailures === true) {
|
|
4202
3861
|
console.warn(c.yellow([
|
|
@@ -4205,34 +3864,23 @@ function resolveConfig$1(vitest, options, viteConfig) {
|
|
|
4205
3864
|
`Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
|
|
4206
3865
|
].join("")));
|
|
4207
3866
|
resolved.browser.screenshotFailures = false;
|
|
4208
|
-
} else
|
|
4209
|
-
resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
|
|
4210
|
-
}
|
|
3867
|
+
} else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
|
|
4211
3868
|
resolved.browser.viewport ??= {};
|
|
4212
3869
|
resolved.browser.viewport.width ??= 414;
|
|
4213
3870
|
resolved.browser.viewport.height ??= 896;
|
|
4214
3871
|
resolved.browser.locators ??= {};
|
|
4215
3872
|
resolved.browser.locators.testIdAttribute ??= "data-testid";
|
|
4216
|
-
if (resolved.browser.enabled && provider === "stackblitz")
|
|
4217
|
-
resolved.browser.provider = "preview";
|
|
4218
|
-
}
|
|
3873
|
+
if (resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = "preview";
|
|
4219
3874
|
resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort };
|
|
3875
|
+
// enable includeTaskLocation by default in UI mode
|
|
4220
3876
|
if (resolved.browser.enabled) {
|
|
4221
|
-
if (resolved.browser.ui)
|
|
4222
|
-
|
|
4223
|
-
}
|
|
4224
|
-
} else if (resolved.ui) {
|
|
4225
|
-
resolved.includeTaskLocation ??= true;
|
|
4226
|
-
}
|
|
3877
|
+
if (resolved.browser.ui) resolved.includeTaskLocation ??= true;
|
|
3878
|
+
} else if (resolved.ui) resolved.includeTaskLocation ??= true;
|
|
4227
3879
|
const htmlReporter = toArray(resolved.reporters).some((reporter) => {
|
|
4228
|
-
if (Array.isArray(reporter))
|
|
4229
|
-
return reporter[0] === "html";
|
|
4230
|
-
}
|
|
3880
|
+
if (Array.isArray(reporter)) return reporter[0] === "html";
|
|
4231
3881
|
return false;
|
|
4232
3882
|
});
|
|
4233
|
-
if (htmlReporter)
|
|
4234
|
-
resolved.includeTaskLocation ??= true;
|
|
4235
|
-
}
|
|
3883
|
+
if (htmlReporter) resolved.includeTaskLocation ??= true;
|
|
4236
3884
|
resolved.testTransformMode ??= {};
|
|
4237
3885
|
resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
|
|
4238
3886
|
resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
|
|
@@ -4242,38 +3890,27 @@ function isBrowserEnabled(config) {
|
|
|
4242
3890
|
return Boolean(config.browser?.enabled);
|
|
4243
3891
|
}
|
|
4244
3892
|
function resolveCoverageReporters(configReporters) {
|
|
4245
|
-
|
|
4246
|
-
|
|
4247
|
-
}
|
|
3893
|
+
// E.g. { reporter: "html" }
|
|
3894
|
+
if (!Array.isArray(configReporters)) return [[configReporters, {}]];
|
|
4248
3895
|
const resolvedReporters = [];
|
|
4249
|
-
for (const reporter of configReporters)
|
|
4250
|
-
|
|
4251
|
-
|
|
4252
|
-
|
|
4253
|
-
|
|
4254
|
-
|
|
4255
|
-
}
|
|
3896
|
+
for (const reporter of configReporters) if (Array.isArray(reporter))
|
|
3897
|
+
// E.g. { reporter: [ ["html", { skipEmpty: true }], ["lcov"], ["json", { file: "map.json" }] ]}
|
|
3898
|
+
resolvedReporters.push([reporter[0], reporter[1] || {}]);
|
|
3899
|
+
else
|
|
3900
|
+
// E.g. { reporter: ["html", "json"]}
|
|
3901
|
+
resolvedReporters.push([reporter, {}]);
|
|
4256
3902
|
return resolvedReporters;
|
|
4257
3903
|
}
|
|
4258
3904
|
function isPlaywrightChromiumOnly(vitest, config) {
|
|
4259
3905
|
const browser = config.browser;
|
|
4260
|
-
if (!browser || browser.provider !== "playwright" || !browser.enabled)
|
|
4261
|
-
|
|
4262
|
-
|
|
4263
|
-
if (browser.name) {
|
|
4264
|
-
return browser.name === "chromium";
|
|
4265
|
-
}
|
|
4266
|
-
if (!browser.instances) {
|
|
4267
|
-
return false;
|
|
4268
|
-
}
|
|
3906
|
+
if (!browser || browser.provider !== "playwright" || !browser.enabled) return false;
|
|
3907
|
+
if (browser.name) return browser.name === "chromium";
|
|
3908
|
+
if (!browser.instances) return false;
|
|
4269
3909
|
for (const instance of browser.instances) {
|
|
4270
3910
|
const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser);
|
|
4271
|
-
|
|
4272
|
-
|
|
4273
|
-
|
|
4274
|
-
if (instance.browser !== "chromium") {
|
|
4275
|
-
return false;
|
|
4276
|
-
}
|
|
3911
|
+
// browser config is filtered out
|
|
3912
|
+
if (!vitest.matchesProjectFilter(name)) continue;
|
|
3913
|
+
if (instance.browser !== "chromium") return false;
|
|
4277
3914
|
}
|
|
4278
3915
|
return true;
|
|
4279
3916
|
}
|
|
@@ -4289,9 +3926,7 @@ const DEFAULT_PROJECT = Symbol.for("default-project");
|
|
|
4289
3926
|
let uniqueId = 0;
|
|
4290
3927
|
async function getCoverageProvider(options, loader) {
|
|
4291
3928
|
const coverageModule = await resolveCoverageProviderModule(options, loader);
|
|
4292
|
-
if (coverageModule)
|
|
4293
|
-
return coverageModule.getProvider();
|
|
4294
|
-
}
|
|
3929
|
+
if (coverageModule) return coverageModule.getProvider();
|
|
4295
3930
|
return null;
|
|
4296
3931
|
}
|
|
4297
3932
|
class BaseCoverageProvider {
|
|
@@ -4299,20 +3934,20 @@ class BaseCoverageProvider {
|
|
|
4299
3934
|
name;
|
|
4300
3935
|
version;
|
|
4301
3936
|
options;
|
|
4302
|
-
coverageFiles = new Map();
|
|
3937
|
+
coverageFiles = /* @__PURE__ */ new Map();
|
|
4303
3938
|
pendingPromises = [];
|
|
4304
3939
|
coverageFilesDirectory;
|
|
4305
3940
|
_initialize(ctx) {
|
|
4306
3941
|
this.ctx = ctx;
|
|
4307
|
-
if (ctx.version !== this.version) {
|
|
4308
|
-
|
|
4309
|
-
|
|
3942
|
+
if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
|
|
3943
|
+
Running mixed versions is not supported and may lead into bugs
|
|
3944
|
+
Update your dependencies and make sure the versions match.`));
|
|
4310
3945
|
const config = ctx.config.coverage;
|
|
4311
3946
|
this.options = {
|
|
4312
3947
|
...coverageConfigDefaults,
|
|
4313
3948
|
...config,
|
|
4314
3949
|
provider: this.name,
|
|
4315
|
-
reportsDirectory: resolve(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory),
|
|
3950
|
+
reportsDirectory: resolve$1(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory),
|
|
4316
3951
|
reporter: resolveCoverageReporters(config.reporter || coverageConfigDefaults.reporter),
|
|
4317
3952
|
thresholds: config.thresholds && {
|
|
4318
3953
|
...config.thresholds,
|
|
@@ -4324,7 +3959,7 @@ class BaseCoverageProvider {
|
|
|
4324
3959
|
};
|
|
4325
3960
|
const shard = this.ctx.config.shard;
|
|
4326
3961
|
const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
|
|
4327
|
-
this.coverageFilesDirectory = resolve(this.options.reportsDirectory, tempDirectory);
|
|
3962
|
+
this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory);
|
|
4328
3963
|
}
|
|
4329
3964
|
createCoverageMap() {
|
|
4330
3965
|
throw new Error("BaseReporter's createCoverageMap was not overwritten");
|
|
@@ -4339,31 +3974,23 @@ class BaseCoverageProvider {
|
|
|
4339
3974
|
return this.options;
|
|
4340
3975
|
}
|
|
4341
3976
|
async clean(clean = true) {
|
|
4342
|
-
if (clean && existsSync(this.options.reportsDirectory)) {
|
|
4343
|
-
|
|
4344
|
-
|
|
4345
|
-
|
|
4346
|
-
|
|
4347
|
-
|
|
4348
|
-
|
|
4349
|
-
|
|
4350
|
-
|
|
4351
|
-
|
|
4352
|
-
force: true,
|
|
4353
|
-
maxRetries: 10
|
|
4354
|
-
});
|
|
4355
|
-
}
|
|
3977
|
+
if (clean && existsSync(this.options.reportsDirectory)) await promises$1.rm(this.options.reportsDirectory, {
|
|
3978
|
+
recursive: true,
|
|
3979
|
+
force: true,
|
|
3980
|
+
maxRetries: 10
|
|
3981
|
+
});
|
|
3982
|
+
if (existsSync(this.coverageFilesDirectory)) await promises$1.rm(this.coverageFilesDirectory, {
|
|
3983
|
+
recursive: true,
|
|
3984
|
+
force: true,
|
|
3985
|
+
maxRetries: 10
|
|
3986
|
+
});
|
|
4356
3987
|
await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
|
|
4357
|
-
this.coverageFiles = new Map();
|
|
3988
|
+
this.coverageFiles = /* @__PURE__ */ new Map();
|
|
4358
3989
|
this.pendingPromises = [];
|
|
4359
3990
|
}
|
|
4360
3991
|
onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
|
|
4361
|
-
if (!coverage)
|
|
4362
|
-
|
|
4363
|
-
}
|
|
4364
|
-
if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") {
|
|
4365
|
-
throw new Error(`Invalid transform mode: ${transformMode}`);
|
|
4366
|
-
}
|
|
3992
|
+
if (!coverage) return;
|
|
3993
|
+
if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") throw new Error(`Invalid transform mode: ${transformMode}`);
|
|
4367
3994
|
let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
|
|
4368
3995
|
if (!entry) {
|
|
4369
3996
|
entry = {
|
|
@@ -4374,7 +4001,8 @@ class BaseCoverageProvider {
|
|
|
4374
4001
|
this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
|
|
4375
4002
|
}
|
|
4376
4003
|
const testFilenames = testFiles.join();
|
|
4377
|
-
const filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
|
|
4004
|
+
const filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
|
|
4005
|
+
// If there's a result from previous run, overwrite it
|
|
4378
4006
|
entry[transformMode][testFilenames] = filename;
|
|
4379
4007
|
const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
|
|
4380
4008
|
this.pendingPromises.push(promise);
|
|
@@ -4384,51 +4012,43 @@ class BaseCoverageProvider {
|
|
|
4384
4012
|
const total = this.pendingPromises.length;
|
|
4385
4013
|
await Promise.all(this.pendingPromises);
|
|
4386
4014
|
this.pendingPromises = [];
|
|
4387
|
-
for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) {
|
|
4388
|
-
|
|
4389
|
-
|
|
4390
|
-
|
|
4391
|
-
|
|
4392
|
-
|
|
4393
|
-
|
|
4394
|
-
onDebug(`Reading coverage results ${index}/${total}`);
|
|
4395
|
-
}
|
|
4396
|
-
await Promise.all(chunk.map(async (filename) => {
|
|
4397
|
-
const contents = await promises$1.readFile(filename, "utf-8");
|
|
4398
|
-
const coverage = JSON.parse(contents);
|
|
4399
|
-
onFileRead(coverage);
|
|
4400
|
-
}));
|
|
4015
|
+
for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
|
|
4016
|
+
const filenames = Object.values(coverageByTestfiles);
|
|
4017
|
+
const project = this.ctx.getProjectByName(projectName);
|
|
4018
|
+
for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
|
|
4019
|
+
if (onDebug.enabled) {
|
|
4020
|
+
index += chunk.length;
|
|
4021
|
+
onDebug(`Reading coverage results ${index}/${total}`);
|
|
4401
4022
|
}
|
|
4402
|
-
await
|
|
4023
|
+
await Promise.all(chunk.map(async (filename) => {
|
|
4024
|
+
const contents = await promises$1.readFile(filename, "utf-8");
|
|
4025
|
+
const coverage = JSON.parse(contents);
|
|
4026
|
+
onFileRead(coverage);
|
|
4027
|
+
}));
|
|
4403
4028
|
}
|
|
4029
|
+
await onFinished(project, transformMode);
|
|
4404
4030
|
}
|
|
4405
4031
|
}
|
|
4406
4032
|
async cleanAfterRun() {
|
|
4407
|
-
this.coverageFiles = new Map();
|
|
4033
|
+
this.coverageFiles = /* @__PURE__ */ new Map();
|
|
4408
4034
|
await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
|
|
4409
|
-
|
|
4410
|
-
|
|
4411
|
-
}
|
|
4035
|
+
// Remove empty reports directory, e.g. when only text-reporter is used
|
|
4036
|
+
if (readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
|
|
4412
4037
|
}
|
|
4413
4038
|
async onTestFailure() {
|
|
4414
|
-
if (!this.options.reportOnFailure)
|
|
4415
|
-
await this.cleanAfterRun();
|
|
4416
|
-
}
|
|
4039
|
+
if (!this.options.reportOnFailure) await this.cleanAfterRun();
|
|
4417
4040
|
}
|
|
4418
4041
|
async reportCoverage(coverageMap, { allTestsRun }) {
|
|
4419
4042
|
await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun);
|
|
4043
|
+
// In watch mode we need to preserve the previous results if cleanOnRerun is disabled
|
|
4420
4044
|
const keepResults = !this.options.cleanOnRerun && this.ctx.config.watch;
|
|
4421
|
-
if (!keepResults)
|
|
4422
|
-
await this.cleanAfterRun();
|
|
4423
|
-
}
|
|
4045
|
+
if (!keepResults) await this.cleanAfterRun();
|
|
4424
4046
|
}
|
|
4425
4047
|
async reportThresholds(coverageMap, allTestsRun) {
|
|
4426
4048
|
const resolvedThresholds = this.resolveThresholds(coverageMap);
|
|
4427
4049
|
this.checkThresholds(resolvedThresholds);
|
|
4428
4050
|
if (this.options.thresholds?.autoUpdate && allTestsRun) {
|
|
4429
|
-
if (!this.ctx.server.config.configFile)
|
|
4430
|
-
throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
|
|
4431
|
-
}
|
|
4051
|
+
if (!this.ctx.server.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
|
|
4432
4052
|
const configFilePath = this.ctx.server.config.configFile;
|
|
4433
4053
|
const configModule = await this.parseConfigModule(configFilePath);
|
|
4434
4054
|
await this.updateThresholds({
|
|
@@ -4448,9 +4068,7 @@ class BaseCoverageProvider {
|
|
|
4448
4068
|
const files = coverageMap.files();
|
|
4449
4069
|
const globalCoverageMap = this.createCoverageMap();
|
|
4450
4070
|
for (const key of Object.keys(this.options.thresholds)) {
|
|
4451
|
-
if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key))
|
|
4452
|
-
continue;
|
|
4453
|
-
}
|
|
4071
|
+
if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue;
|
|
4454
4072
|
const glob = key;
|
|
4455
4073
|
const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
|
|
4456
4074
|
const globCoverageMap = this.createCoverageMap();
|
|
@@ -4466,6 +4084,7 @@ class BaseCoverageProvider {
|
|
|
4466
4084
|
thresholds: globThresholds
|
|
4467
4085
|
});
|
|
4468
4086
|
}
|
|
4087
|
+
// Global threshold is for all files, even if they are included by glob patterns
|
|
4469
4088
|
for (const file of files) {
|
|
4470
4089
|
const fileCoverage = coverageMap.fileCoverageFor(file);
|
|
4471
4090
|
globalCoverageMap.addFileCoverage(fileCoverage);
|
|
@@ -4487,9 +4106,8 @@ class BaseCoverageProvider {
|
|
|
4487
4106
|
*/
|
|
4488
4107
|
checkThresholds(allThresholds) {
|
|
4489
4108
|
for (const { coverageMap, thresholds, name } of allThresholds) {
|
|
4490
|
-
if (thresholds.branches ===
|
|
4491
|
-
|
|
4492
|
-
}
|
|
4109
|
+
if (thresholds.branches === void 0 && thresholds.functions === void 0 && thresholds.lines === void 0 && thresholds.statements === void 0) continue;
|
|
4110
|
+
// Construct list of coverage summaries where thresholds are compared against
|
|
4493
4111
|
const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => ({
|
|
4494
4112
|
file,
|
|
4495
4113
|
summary: coverageMap.fileCoverageFor(file).toSummary()
|
|
@@ -4497,47 +4115,40 @@ class BaseCoverageProvider {
|
|
|
4497
4115
|
file: null,
|
|
4498
4116
|
summary: coverageMap.getCoverageSummary()
|
|
4499
4117
|
}];
|
|
4500
|
-
|
|
4501
|
-
|
|
4502
|
-
|
|
4503
|
-
|
|
4504
|
-
|
|
4118
|
+
// Check thresholds of each summary
|
|
4119
|
+
for (const { summary, file } of summaries) for (const thresholdKey of THRESHOLD_KEYS) {
|
|
4120
|
+
const threshold = thresholds[thresholdKey];
|
|
4121
|
+
if (threshold === void 0) continue;
|
|
4122
|
+
/**
|
|
4123
|
+
* Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered),
|
|
4124
|
+
* while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered).
|
|
4125
|
+
*/
|
|
4126
|
+
if (threshold >= 0) {
|
|
4127
|
+
const coverage = summary.data[thresholdKey].pct;
|
|
4128
|
+
if (coverage < threshold) {
|
|
4129
|
+
process.exitCode = 1;
|
|
4130
|
+
/**
|
|
4131
|
+
* Generate error message based on perFile flag:
|
|
4132
|
+
* - ERROR: Coverage for statements (33.33%) does not meet threshold (85%) for src/math.ts
|
|
4133
|
+
* - ERROR: Coverage for statements (50%) does not meet global threshold (85%)
|
|
4134
|
+
*/
|
|
4135
|
+
let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`;
|
|
4136
|
+
if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
|
|
4137
|
+
this.ctx.logger.error(errorMessage);
|
|
4505
4138
|
}
|
|
4506
|
-
|
|
4507
|
-
|
|
4508
|
-
|
|
4509
|
-
|
|
4510
|
-
|
|
4511
|
-
|
|
4512
|
-
|
|
4513
|
-
|
|
4514
|
-
|
|
4515
|
-
|
|
4516
|
-
|
|
4517
|
-
|
|
4518
|
-
|
|
4519
|
-
let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`;
|
|
4520
|
-
if (this.options.thresholds?.perFile && file) {
|
|
4521
|
-
errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
|
|
4522
|
-
}
|
|
4523
|
-
this.ctx.logger.error(errorMessage);
|
|
4524
|
-
}
|
|
4525
|
-
} else {
|
|
4526
|
-
const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
|
|
4527
|
-
const absoluteThreshold = threshold * -1;
|
|
4528
|
-
if (uncovered > absoluteThreshold) {
|
|
4529
|
-
process.exitCode = 1;
|
|
4530
|
-
/**
|
|
4531
|
-
* Generate error message based on perFile flag:
|
|
4532
|
-
* - ERROR: Uncovered statements (33) exceed threshold (30) for src/math.ts
|
|
4533
|
-
* - ERROR: Uncovered statements (33) exceed global threshold (30)
|
|
4534
|
-
*/
|
|
4535
|
-
let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`;
|
|
4536
|
-
if (this.options.thresholds?.perFile && file) {
|
|
4537
|
-
errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
|
|
4538
|
-
}
|
|
4539
|
-
this.ctx.logger.error(errorMessage);
|
|
4540
|
-
}
|
|
4139
|
+
} else {
|
|
4140
|
+
const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
|
|
4141
|
+
const absoluteThreshold = threshold * -1;
|
|
4142
|
+
if (uncovered > absoluteThreshold) {
|
|
4143
|
+
process.exitCode = 1;
|
|
4144
|
+
/**
|
|
4145
|
+
* Generate error message based on perFile flag:
|
|
4146
|
+
* - ERROR: Uncovered statements (33) exceed threshold (30) for src/math.ts
|
|
4147
|
+
* - ERROR: Uncovered statements (33) exceed global threshold (30)
|
|
4148
|
+
*/
|
|
4149
|
+
let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`;
|
|
4150
|
+
if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
|
|
4151
|
+
this.ctx.logger.error(errorMessage);
|
|
4541
4152
|
}
|
|
4542
4153
|
}
|
|
4543
4154
|
}
|
|
@@ -4561,29 +4172,23 @@ class BaseCoverageProvider {
|
|
|
4561
4172
|
*/
|
|
4562
4173
|
if (threshold >= 0) {
|
|
4563
4174
|
const actual = Math.min(...summaries.map((summary) => summary[key].pct));
|
|
4564
|
-
if (actual > threshold)
|
|
4565
|
-
thresholdsToUpdate.push([key, actual]);
|
|
4566
|
-
}
|
|
4175
|
+
if (actual > threshold) thresholdsToUpdate.push([key, actual]);
|
|
4567
4176
|
} else {
|
|
4568
4177
|
const absoluteThreshold = threshold * -1;
|
|
4569
4178
|
const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
|
|
4570
4179
|
if (actual < absoluteThreshold) {
|
|
4180
|
+
// If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%)
|
|
4571
4181
|
const updatedThreshold = actual === 0 ? 100 : actual * -1;
|
|
4572
4182
|
thresholdsToUpdate.push([key, updatedThreshold]);
|
|
4573
4183
|
}
|
|
4574
4184
|
}
|
|
4575
4185
|
}
|
|
4576
|
-
if (thresholdsToUpdate.length === 0)
|
|
4577
|
-
continue;
|
|
4578
|
-
}
|
|
4186
|
+
if (thresholdsToUpdate.length === 0) continue;
|
|
4579
4187
|
updatedThresholds = true;
|
|
4580
|
-
for (const [threshold, newValue] of thresholdsToUpdate)
|
|
4581
|
-
|
|
4582
|
-
|
|
4583
|
-
|
|
4584
|
-
const glob = config.test.coverage.thresholds[name];
|
|
4585
|
-
glob[threshold] = newValue;
|
|
4586
|
-
}
|
|
4188
|
+
for (const [threshold, newValue] of thresholdsToUpdate) if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = newValue;
|
|
4189
|
+
else {
|
|
4190
|
+
const glob = config.test.coverage.thresholds[name];
|
|
4191
|
+
glob[threshold] = newValue;
|
|
4587
4192
|
}
|
|
4588
4193
|
}
|
|
4589
4194
|
if (updatedThresholds) {
|
|
@@ -4593,9 +4198,7 @@ class BaseCoverageProvider {
|
|
|
4593
4198
|
}
|
|
4594
4199
|
async mergeReports(coverageMaps) {
|
|
4595
4200
|
const coverageMap = this.createCoverageMap();
|
|
4596
|
-
for (const coverage of coverageMaps)
|
|
4597
|
-
coverageMap.merge(coverage);
|
|
4598
|
-
}
|
|
4201
|
+
for (const coverage of coverageMaps) coverageMap.merge(coverage);
|
|
4599
4202
|
await this.generateReports(coverageMap, true);
|
|
4600
4203
|
}
|
|
4601
4204
|
hasTerminalReporter(reporters) {
|
|
@@ -4606,11 +4209,8 @@ class BaseCoverageProvider {
|
|
|
4606
4209
|
const index = Math.max(0, chunks.length - 1);
|
|
4607
4210
|
const lastChunk = chunks[index] || [];
|
|
4608
4211
|
chunks[index] = lastChunk;
|
|
4609
|
-
if (lastChunk.length >= size)
|
|
4610
|
-
|
|
4611
|
-
} else {
|
|
4612
|
-
lastChunk.push(item);
|
|
4613
|
-
}
|
|
4212
|
+
if (lastChunk.length >= size) chunks.push([item]);
|
|
4213
|
+
else lastChunk.push(item);
|
|
4614
4214
|
return chunks;
|
|
4615
4215
|
}, []);
|
|
4616
4216
|
}
|
|
@@ -4627,14 +4227,11 @@ class BaseCoverageProvider {
|
|
|
4627
4227
|
return async function transformFile(filename) {
|
|
4628
4228
|
let lastError;
|
|
4629
4229
|
for (const { root, vitenode, isBrowserEnabled } of servers) {
|
|
4630
|
-
|
|
4631
|
-
|
|
4632
|
-
}
|
|
4230
|
+
// On Windows root doesn't start with "/" while filenames do
|
|
4231
|
+
if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue;
|
|
4633
4232
|
if (isBrowserEnabled) {
|
|
4634
|
-
const result = await vitenode.transformRequest(filename,
|
|
4635
|
-
if (result)
|
|
4636
|
-
return result;
|
|
4637
|
-
}
|
|
4233
|
+
const result = await vitenode.transformRequest(filename, void 0, "web").catch(() => null);
|
|
4234
|
+
if (result) return result;
|
|
4638
4235
|
}
|
|
4639
4236
|
try {
|
|
4640
4237
|
return await vitenode.transformRequest(filename);
|
|
@@ -4642,6 +4239,7 @@ class BaseCoverageProvider {
|
|
|
4642
4239
|
lastError = error;
|
|
4643
4240
|
}
|
|
4644
4241
|
}
|
|
4242
|
+
// All vite-node servers failed to transform the file
|
|
4645
4243
|
throw lastError;
|
|
4646
4244
|
};
|
|
4647
4245
|
}
|
|
@@ -4650,29 +4248,24 @@ class BaseCoverageProvider {
|
|
|
4650
4248
|
* Narrow down `unknown` glob thresholds to resolved ones
|
|
4651
4249
|
*/
|
|
4652
4250
|
function resolveGlobThresholds(thresholds) {
|
|
4653
|
-
if (!thresholds || typeof thresholds !== "object") {
|
|
4654
|
-
|
|
4655
|
-
|
|
4656
|
-
|
|
4657
|
-
|
|
4658
|
-
|
|
4659
|
-
|
|
4660
|
-
functions: 100,
|
|
4661
|
-
statements: 100
|
|
4662
|
-
};
|
|
4663
|
-
}
|
|
4251
|
+
if (!thresholds || typeof thresholds !== "object") return {};
|
|
4252
|
+
if (100 in thresholds && thresholds[100] === true) return {
|
|
4253
|
+
lines: 100,
|
|
4254
|
+
branches: 100,
|
|
4255
|
+
functions: 100,
|
|
4256
|
+
statements: 100
|
|
4257
|
+
};
|
|
4664
4258
|
return {
|
|
4665
|
-
lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines :
|
|
4666
|
-
branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches :
|
|
4667
|
-
functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions :
|
|
4668
|
-
statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements :
|
|
4259
|
+
lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0,
|
|
4260
|
+
branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0,
|
|
4261
|
+
functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0,
|
|
4262
|
+
statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements : void 0
|
|
4669
4263
|
};
|
|
4670
4264
|
}
|
|
4671
4265
|
function assertConfigurationModule(config) {
|
|
4672
4266
|
try {
|
|
4673
|
-
|
|
4674
|
-
|
|
4675
|
-
}
|
|
4267
|
+
// @ts-expect-error -- Intentional unsafe null pointer check as wrapped in try-catch
|
|
4268
|
+
if (typeof config.test.coverage.thresholds !== "object") throw new TypeError("Expected config.test.coverage.thresholds to be an object");
|
|
4676
4269
|
} catch (error) {
|
|
4677
4270
|
const message = error instanceof Error ? error.message : String(error);
|
|
4678
4271
|
throw new Error(`Unable to parse thresholds from configuration file: ${message}`);
|
|
@@ -4681,49 +4274,41 @@ function assertConfigurationModule(config) {
|
|
|
4681
4274
|
function resolveConfig(configModule) {
|
|
4682
4275
|
const mod = configModule.exports.default;
|
|
4683
4276
|
try {
|
|
4684
|
-
|
|
4685
|
-
|
|
4686
|
-
|
|
4277
|
+
// Check for "export default { test: {...} }"
|
|
4278
|
+
if (mod.$type === "object") return mod;
|
|
4279
|
+
// "export default defineConfig(...)"
|
|
4687
4280
|
let config = resolveDefineConfig(mod);
|
|
4688
|
-
if (config)
|
|
4689
|
-
|
|
4690
|
-
}
|
|
4281
|
+
if (config) return config;
|
|
4282
|
+
// "export default mergeConfig(..., defineConfig(...))"
|
|
4691
4283
|
if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
|
|
4692
4284
|
config = resolveMergeConfig(mod);
|
|
4693
|
-
if (config)
|
|
4694
|
-
return config;
|
|
4695
|
-
}
|
|
4285
|
+
if (config) return config;
|
|
4696
4286
|
}
|
|
4697
4287
|
} catch (error) {
|
|
4288
|
+
// Reduce magicast's verbose errors to readable ones
|
|
4698
4289
|
throw new Error(error instanceof Error ? error.message : String(error));
|
|
4699
4290
|
}
|
|
4700
4291
|
throw new Error("Failed to update coverage thresholds. Configuration file is too complex.");
|
|
4701
4292
|
}
|
|
4702
4293
|
function resolveDefineConfig(mod) {
|
|
4703
4294
|
if (mod.$type === "function-call" && mod.$callee === "defineConfig") {
|
|
4704
|
-
|
|
4705
|
-
|
|
4706
|
-
}
|
|
4295
|
+
// "export default defineConfig({ test: {...} })"
|
|
4296
|
+
if (mod.$args[0].$type === "object") return mod.$args[0];
|
|
4707
4297
|
if (mod.$args[0].$type === "arrow-function-expression") {
|
|
4708
|
-
if (mod.$args[0].$body.$type === "object")
|
|
4709
|
-
|
|
4710
|
-
|
|
4298
|
+
if (mod.$args[0].$body.$type === "object")
|
|
4299
|
+
// "export default defineConfig(() => ({ test: {...} }))"
|
|
4300
|
+
return mod.$args[0].$body;
|
|
4301
|
+
// "export default defineConfig(() => mergeConfig({...}, ...))"
|
|
4711
4302
|
const config = resolveMergeConfig(mod.$args[0].$body);
|
|
4712
|
-
if (config)
|
|
4713
|
-
return config;
|
|
4714
|
-
}
|
|
4303
|
+
if (config) return config;
|
|
4715
4304
|
}
|
|
4716
4305
|
}
|
|
4717
4306
|
}
|
|
4718
4307
|
function resolveMergeConfig(mod) {
|
|
4719
|
-
if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
|
|
4720
|
-
|
|
4721
|
-
|
|
4722
|
-
if (config) {
|
|
4723
|
-
return config;
|
|
4724
|
-
}
|
|
4725
|
-
}
|
|
4308
|
+
if (mod.$type === "function-call" && mod.$callee === "mergeConfig") for (const arg of mod.$args) {
|
|
4309
|
+
const config = resolveDefineConfig(arg);
|
|
4310
|
+
if (config) return config;
|
|
4726
4311
|
}
|
|
4727
4312
|
}
|
|
4728
4313
|
|
|
4729
|
-
export { BaseCoverageProvider as B, RandomSequencer as R,
|
|
4314
|
+
export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, isBrowserEnabled as d, groupBy as e, getCoverageProvider as f, getFilePoolName as g, hash as h, isPackageExists as i, createPool as j, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
|