vitest 3.1.0-beta.1 → 3.1.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/dist/browser.js +4 -4
  2. package/dist/chunks/base.CylSMlTD.js +41 -0
  3. package/dist/chunks/benchmark.BKUatJGy.js +39 -0
  4. package/dist/chunks/cac.JtTXbKz0.js +1525 -0
  5. package/dist/chunks/{cli-api.BwkkJsRe.js → cli-api.BTtPTYMs.js} +4638 -5072
  6. package/dist/chunks/console.D6t261w0.js +173 -0
  7. package/dist/chunks/constants.BZZyIeIE.js +43 -0
  8. package/dist/chunks/coverage.0iPg4Wrz.js +33 -0
  9. package/dist/chunks/{coverage.gV8doR2Y.js → coverage.C2ohxaN0.js} +2216 -2479
  10. package/dist/chunks/creator.BEXek7yQ.js +640 -0
  11. package/dist/chunks/date.CDOsz-HY.js +53 -0
  12. package/dist/chunks/defaults.DmfNPoe5.js +114 -0
  13. package/dist/chunks/{env.D4Lgay0q.js → env.Dq0hM4Xv.js} +1 -1
  14. package/dist/chunks/execute.DZKwfrTs.js +791 -0
  15. package/dist/chunks/git.DXfdBEfR.js +74 -0
  16. package/dist/chunks/{globals.BEpDe-k3.js → globals.DCbUWjip.js} +10 -10
  17. package/dist/chunks/{index.D7Ny8f_s.js → index.BDobFbcz.js} +6 -7
  18. package/dist/chunks/index.DFXFpH3w.js +607 -0
  19. package/dist/chunks/index.VfYQ6MXY.js +104 -0
  20. package/dist/chunks/index.ZIOEXBQB.js +2382 -0
  21. package/dist/chunks/inspector.DbDkSkFn.js +54 -0
  22. package/dist/chunks/node.IqGoMrm4.js +15 -0
  23. package/dist/chunks/{reporters.d.r7poTZjA.d.ts → reporters.d.5g6jXhoW.d.ts} +14 -3
  24. package/dist/chunks/rpc.DGgL5dw7.js +92 -0
  25. package/dist/chunks/run-once.I7PpBOk1.js +47 -0
  26. package/dist/chunks/runBaseTests.CqmKSG99.js +134 -0
  27. package/dist/chunks/setup-common.DEGDGBiA.js +88 -0
  28. package/dist/chunks/{typechecker.BlF3eHsb.js → typechecker.C2IpOhid.js} +620 -622
  29. package/dist/chunks/utils.BfxieIyZ.js +66 -0
  30. package/dist/chunks/utils.CtocqOoE.js +72 -0
  31. package/dist/chunks/utils.OLmtDstN.js +194 -0
  32. package/dist/chunks/{vi.nSCvwQ7l.js → vi.B-PuvDzu.js} +878 -1019
  33. package/dist/chunks/vite.d.Dh1jE-_V.d.ts +23 -0
  34. package/dist/chunks/vm.BW5voG-u.js +789 -0
  35. package/dist/cli.js +2 -2
  36. package/dist/config.cjs +97 -103
  37. package/dist/config.d.ts +3 -3
  38. package/dist/config.js +6 -6
  39. package/dist/coverage.d.ts +1 -1
  40. package/dist/coverage.js +6 -6
  41. package/dist/environments.js +1 -1
  42. package/dist/execute.js +1 -1
  43. package/dist/index.d.ts +2 -2
  44. package/dist/index.js +6 -6
  45. package/dist/node.d.ts +3 -3
  46. package/dist/node.js +36 -45
  47. package/dist/path.js +1 -4
  48. package/dist/reporters.d.ts +1 -1
  49. package/dist/reporters.js +4 -4
  50. package/dist/runners.js +231 -267
  51. package/dist/snapshot.js +2 -2
  52. package/dist/suite.js +2 -2
  53. package/dist/worker.js +98 -114
  54. package/dist/workers/forks.js +22 -22
  55. package/dist/workers/runVmTests.js +61 -66
  56. package/dist/workers/threads.js +13 -13
  57. package/dist/workers/vmForks.js +24 -24
  58. package/dist/workers/vmThreads.js +15 -15
  59. package/dist/workers.js +10 -10
  60. package/package.json +11 -11
  61. package/dist/chunks/base.DV59CbtV.js +0 -45
  62. package/dist/chunks/benchmark.DL72EVN-.js +0 -40
  63. package/dist/chunks/cac.BjmXy7OV.js +0 -1664
  64. package/dist/chunks/console.CN7AiMGV.js +0 -179
  65. package/dist/chunks/constants.DTYd6dNH.js +0 -46
  66. package/dist/chunks/coverage.A3sS5-Wm.js +0 -40
  67. package/dist/chunks/creator.BsBnpTzI.js +0 -670
  68. package/dist/chunks/date.W2xKR2qe.js +0 -53
  69. package/dist/chunks/defaults.C2Ndd9wx.js +0 -119
  70. package/dist/chunks/execute.eDH0aFFd.js +0 -839
  71. package/dist/chunks/git.B5SDxu-n.js +0 -69
  72. package/dist/chunks/index.DOyx6FYJ.js +0 -2551
  73. package/dist/chunks/index.K90BXFOx.js +0 -658
  74. package/dist/chunks/index.uXkkC4xl.js +0 -111
  75. package/dist/chunks/inspector.DKLceBVD.js +0 -54
  76. package/dist/chunks/node.AKq966Jp.js +0 -15
  77. package/dist/chunks/rpc.TVf73xOu.js +0 -102
  78. package/dist/chunks/run-once.2ogXb3JV.js +0 -28
  79. package/dist/chunks/runBaseTests.BVrL_ow3.js +0 -142
  80. package/dist/chunks/setup-common.CPvtqi8q.js +0 -96
  81. package/dist/chunks/utils.C8RiOc4B.js +0 -77
  82. package/dist/chunks/utils.Cn0zI1t3.js +0 -68
  83. package/dist/chunks/utils.bLM2atbD.js +0 -198
  84. package/dist/chunks/vite.d.Fvq-NZoa.d.ts +0 -11
  85. package/dist/chunks/vm.jEFQDlX_.js +0 -852
@@ -4,7 +4,7 @@ import require$$0 from 'util';
4
4
  import require$$0$1 from 'path';
5
5
  import { relative, resolve, dirname, isAbsolute, join, normalize } from 'pathe';
6
6
  import c from 'tinyrainbow';
7
- import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.C2Ndd9wx.js';
7
+ import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.DmfNPoe5.js';
8
8
  import crypto from 'node:crypto';
9
9
  import { slash, createDefer, shuffle, toArray } from '@vitest/utils';
10
10
  import { writeFile, rename, stat, unlink } from 'node:fs/promises';
@@ -15,8 +15,8 @@ import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL
15
15
  import assert from 'node:assert';
16
16
  import v8 from 'node:v8';
17
17
  import { format, inspect } from 'node:util';
18
- import { e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.DTYd6dNH.js';
19
- import { a as isWindows } from './env.D4Lgay0q.js';
18
+ import { e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.BZZyIeIE.js';
19
+ import { a as isWindows } from './env.Dq0hM4Xv.js';
20
20
  import * as nodeos from 'node:os';
21
21
  import nodeos__default from 'node:os';
22
22
  import { isatty } from 'node:tty';
@@ -24,169 +24,165 @@ import { version } from 'vite';
24
24
  import EventEmitter from 'node:events';
25
25
  import { c as createBirpc } from './index.68735LiX.js';
26
26
  import Tinypool$1, { Tinypool } from 'tinypool';
27
- import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.BlF3eHsb.js';
27
+ import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.C2IpOhid.js';
28
28
  import { MessageChannel } from 'node:worker_threads';
29
29
  import { hasFailed } from '@vitest/runner/utils';
30
30
  import { rootDir } from '../path.js';
31
31
  import { slash as slash$1 } from 'vite-node/utils';
32
32
  import { isCI, provider } from 'std-env';
33
- import { r as resolveCoverageProviderModule } from './coverage.A3sS5-Wm.js';
33
+ import { r as resolveCoverageProviderModule } from './coverage.0iPg4Wrz.js';
34
34
 
35
35
  function groupBy(collection, iteratee) {
36
- return collection.reduce((acc, item) => {
37
- const key = iteratee(item);
38
- acc[key] ||= [];
39
- acc[key].push(item);
40
- return acc;
41
- }, {});
36
+ return collection.reduce((acc, item) => {
37
+ const key = iteratee(item);
38
+ acc[key] ||= [];
39
+ acc[key].push(item);
40
+ return acc;
41
+ }, {});
42
42
  }
43
43
  function stdout() {
44
- return console._stdout || process.stdout;
44
+ return console._stdout || process.stdout;
45
45
  }
46
46
  function escapeRegExp(s) {
47
- return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
47
+ return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
48
48
  }
49
49
  function wildcardPatternToRegExp(pattern) {
50
- const negated = pattern.startsWith("!");
51
- if (negated) {
52
- pattern = pattern.slice(1);
53
- }
54
- let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
55
- if (negated) {
56
- regexp = `(?!${regexp})`;
57
- }
58
- return new RegExp(`^${regexp}`, "i");
50
+ const negated = pattern.startsWith("!");
51
+ if (negated) {
52
+ pattern = pattern.slice(1);
53
+ }
54
+ let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
55
+ if (negated) {
56
+ regexp = `(?!${regexp})`;
57
+ }
58
+ return new RegExp(`^${regexp}`, "i");
59
59
  }
60
60
 
61
61
  const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding));
62
62
 
63
63
  class FilesStatsCache {
64
- cache = /* @__PURE__ */ new Map();
65
- getStats(key) {
66
- return this.cache.get(key);
67
- }
68
- async populateStats(root, specs) {
69
- const promises = specs.map((spec) => {
70
- const key = `${spec[0].name}:${relative(root, spec.moduleId)}`;
71
- return this.updateStats(spec.moduleId, key);
72
- });
73
- await Promise.all(promises);
74
- }
75
- async updateStats(fsPath, key) {
76
- if (!fs.existsSync(fsPath)) {
77
- return;
78
- }
79
- const stats = await fs.promises.stat(fsPath);
80
- this.cache.set(key, { size: stats.size });
81
- }
82
- removeStats(fsPath) {
83
- this.cache.forEach((_, key) => {
84
- if (key.endsWith(fsPath)) {
85
- this.cache.delete(key);
86
- }
87
- });
88
- }
64
+ cache = new Map();
65
+ getStats(key) {
66
+ return this.cache.get(key);
67
+ }
68
+ async populateStats(root, specs) {
69
+ const promises = specs.map((spec) => {
70
+ const key = `${spec[0].name}:${relative(root, spec.moduleId)}`;
71
+ return this.updateStats(spec.moduleId, key);
72
+ });
73
+ await Promise.all(promises);
74
+ }
75
+ async updateStats(fsPath, key) {
76
+ if (!fs.existsSync(fsPath)) {
77
+ return;
78
+ }
79
+ const stats = await fs.promises.stat(fsPath);
80
+ this.cache.set(key, { size: stats.size });
81
+ }
82
+ removeStats(fsPath) {
83
+ this.cache.forEach((_, key) => {
84
+ if (key.endsWith(fsPath)) {
85
+ this.cache.delete(key);
86
+ }
87
+ });
88
+ }
89
89
  }
90
90
 
91
91
  class ResultsCache {
92
- cache = /* @__PURE__ */ new Map();
93
- workspacesKeyMap = /* @__PURE__ */ new Map();
94
- cachePath = null;
95
- version;
96
- root = "/";
97
- constructor(version) {
98
- this.version = version;
99
- }
100
- getCachePath() {
101
- return this.cachePath;
102
- }
103
- setConfig(root, config) {
104
- this.root = root;
105
- if (config) {
106
- this.cachePath = resolve(config.dir, "results.json");
107
- }
108
- }
109
- getResults(key) {
110
- return this.cache.get(key);
111
- }
112
- async readFromCache() {
113
- if (!this.cachePath) {
114
- return;
115
- }
116
- if (!fs.existsSync(this.cachePath)) {
117
- return;
118
- }
119
- const resultsCache = await fs.promises.readFile(this.cachePath, "utf8");
120
- const { results, version } = JSON.parse(resultsCache || "[]");
121
- if (Number(version.split(".")[1]) >= 30) {
122
- this.cache = new Map(results);
123
- this.version = version;
124
- results.forEach(([spec]) => {
125
- const [projectName, relativePath] = spec.split(":");
126
- const keyMap = this.workspacesKeyMap.get(relativePath) || [];
127
- keyMap.push(projectName);
128
- this.workspacesKeyMap.set(relativePath, keyMap);
129
- });
130
- }
131
- }
132
- updateResults(files) {
133
- files.forEach((file) => {
134
- const result = file.result;
135
- if (!result) {
136
- return;
137
- }
138
- const duration = result.duration || 0;
139
- const relativePath = relative(this.root, file.filepath);
140
- this.cache.set(`${file.projectName || ""}:${relativePath}`, {
141
- duration: duration >= 0 ? duration : 0,
142
- failed: result.state === "fail"
143
- });
144
- });
145
- }
146
- removeFromCache(filepath) {
147
- this.cache.forEach((_, key) => {
148
- if (key.endsWith(filepath)) {
149
- this.cache.delete(key);
150
- }
151
- });
152
- }
153
- async writeToCache() {
154
- if (!this.cachePath) {
155
- return;
156
- }
157
- const results = Array.from(this.cache.entries());
158
- const cacheDirname = dirname(this.cachePath);
159
- if (!fs.existsSync(cacheDirname)) {
160
- await fs.promises.mkdir(cacheDirname, { recursive: true });
161
- }
162
- const cache = JSON.stringify({
163
- version: this.version,
164
- results
165
- });
166
- await fs.promises.writeFile(this.cachePath, cache);
167
- }
92
+ cache = new Map();
93
+ workspacesKeyMap = new Map();
94
+ cachePath = null;
95
+ version;
96
+ root = "/";
97
+ constructor(version) {
98
+ this.version = version;
99
+ }
100
+ getCachePath() {
101
+ return this.cachePath;
102
+ }
103
+ setConfig(root, config) {
104
+ this.root = root;
105
+ if (config) {
106
+ this.cachePath = resolve(config.dir, "results.json");
107
+ }
108
+ }
109
+ getResults(key) {
110
+ return this.cache.get(key);
111
+ }
112
+ async readFromCache() {
113
+ if (!this.cachePath) {
114
+ return;
115
+ }
116
+ if (!fs.existsSync(this.cachePath)) {
117
+ return;
118
+ }
119
+ const resultsCache = await fs.promises.readFile(this.cachePath, "utf8");
120
+ const { results, version } = JSON.parse(resultsCache || "[]");
121
+ if (Number(version.split(".")[1]) >= 30) {
122
+ this.cache = new Map(results);
123
+ this.version = version;
124
+ results.forEach(([spec]) => {
125
+ const [projectName, relativePath] = spec.split(":");
126
+ const keyMap = this.workspacesKeyMap.get(relativePath) || [];
127
+ keyMap.push(projectName);
128
+ this.workspacesKeyMap.set(relativePath, keyMap);
129
+ });
130
+ }
131
+ }
132
+ updateResults(files) {
133
+ files.forEach((file) => {
134
+ const result = file.result;
135
+ if (!result) {
136
+ return;
137
+ }
138
+ const duration = result.duration || 0;
139
+ const relativePath = relative(this.root, file.filepath);
140
+ this.cache.set(`${file.projectName || ""}:${relativePath}`, {
141
+ duration: duration >= 0 ? duration : 0,
142
+ failed: result.state === "fail"
143
+ });
144
+ });
145
+ }
146
+ removeFromCache(filepath) {
147
+ this.cache.forEach((_, key) => {
148
+ if (key.endsWith(filepath)) {
149
+ this.cache.delete(key);
150
+ }
151
+ });
152
+ }
153
+ async writeToCache() {
154
+ if (!this.cachePath) {
155
+ return;
156
+ }
157
+ const results = Array.from(this.cache.entries());
158
+ const cacheDirname = dirname(this.cachePath);
159
+ if (!fs.existsSync(cacheDirname)) {
160
+ await fs.promises.mkdir(cacheDirname, { recursive: true });
161
+ }
162
+ const cache = JSON.stringify({
163
+ version: this.version,
164
+ results
165
+ });
166
+ await fs.promises.writeFile(this.cachePath, cache);
167
+ }
168
168
  }
169
169
 
170
170
  class VitestCache {
171
- results;
172
- stats = new FilesStatsCache();
173
- constructor(version) {
174
- this.results = new ResultsCache(version);
175
- }
176
- getFileTestResults(key) {
177
- return this.results.getResults(key);
178
- }
179
- getFileStats(key) {
180
- return this.stats.getStats(key);
181
- }
182
- static resolveCacheDir(root, dir, projectName) {
183
- const baseDir = slash(dir || "node_modules/.vite/vitest");
184
- return projectName ? resolve(
185
- root,
186
- baseDir,
187
- hash("md5", projectName, "hex")
188
- ) : resolve(root, baseDir);
189
- }
171
+ results;
172
+ stats = new FilesStatsCache();
173
+ constructor(version) {
174
+ this.results = new ResultsCache(version);
175
+ }
176
+ getFileTestResults(key) {
177
+ return this.results.getResults(key);
178
+ }
179
+ getFileStats(key) {
180
+ return this.stats.getStats(key);
181
+ }
182
+ static resolveCacheDir(root, dir, projectName) {
183
+ const baseDir = slash(dir || "node_modules/.vite/vitest");
184
+ return projectName ? resolve(root, baseDir, hash("md5", projectName, "hex")) : resolve(root, baseDir);
185
+ }
190
186
  }
191
187
 
192
188
  const JOIN_LEADING_SLASH_RE = /^\.?\//;
@@ -2306,9 +2302,9 @@ function resolvePackage(name, options = {}) {
2306
2302
  }
2307
2303
 
2308
2304
  function getWorkersCountByPercentage(percent) {
2309
- const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length;
2310
- const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2311
- return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2305
+ const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length;
2306
+ const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2307
+ return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2312
2308
  }
2313
2309
 
2314
2310
  var utils$1 = {};
@@ -6438,2435 +6434,2176 @@ function requireMicromatch () {
6438
6434
  var micromatchExports = requireMicromatch();
6439
6435
  var mm = /*@__PURE__*/getDefaultExportFromCjs(micromatchExports);
6440
6436
 
6441
- const envsOrder = ["node", "jsdom", "happy-dom", "edge-runtime"];
6437
+ const envsOrder = [
6438
+ "node",
6439
+ "jsdom",
6440
+ "happy-dom",
6441
+ "edge-runtime"
6442
+ ];
6442
6443
  function getTransformMode(patterns, filename) {
6443
- if (patterns.web && mm.isMatch(filename, patterns.web)) {
6444
- return "web";
6445
- }
6446
- if (patterns.ssr && mm.isMatch(filename, patterns.ssr)) {
6447
- return "ssr";
6448
- }
6449
- return void 0;
6444
+ if (patterns.web && mm.isMatch(filename, patterns.web)) {
6445
+ return "web";
6446
+ }
6447
+ if (patterns.ssr && mm.isMatch(filename, patterns.ssr)) {
6448
+ return "ssr";
6449
+ }
6450
+ return undefined;
6450
6451
  }
6451
6452
  async function groupFilesByEnv(files) {
6452
- const filesWithEnv = await Promise.all(
6453
- files.map(async ({ moduleId: filepath, project, testLines }) => {
6454
- const code = await promises$1.readFile(filepath, "utf-8");
6455
- let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
6456
- if (!env) {
6457
- for (const [glob, target] of project.config.environmentMatchGlobs || []) {
6458
- if (mm.isMatch(filepath, glob, { cwd: project.config.root })) {
6459
- env = target;
6460
- break;
6461
- }
6462
- }
6463
- }
6464
- env ||= project.config.environment || "node";
6465
- const transformMode = getTransformMode(
6466
- project.config.testTransformMode,
6467
- filepath
6468
- );
6469
- let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
6470
- if (envOptionsJson?.endsWith("*/")) {
6471
- envOptionsJson = envOptionsJson.slice(0, -2);
6472
- }
6473
- const envOptions = JSON.parse(envOptionsJson || "null");
6474
- const envKey = env === "happy-dom" ? "happyDOM" : env;
6475
- const environment = {
6476
- name: env,
6477
- transformMode,
6478
- options: envOptions ? { [envKey]: envOptions } : null
6479
- };
6480
- return {
6481
- file: {
6482
- filepath,
6483
- testLocations: testLines
6484
- },
6485
- project,
6486
- environment
6487
- };
6488
- })
6489
- );
6490
- return groupBy(filesWithEnv, ({ environment }) => environment.name);
6453
+ const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
6454
+ const code = await promises$1.readFile(filepath, "utf-8");
6455
+ let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
6456
+ if (!env) {
6457
+ for (const [glob, target] of project.config.environmentMatchGlobs || []) {
6458
+ if (mm.isMatch(filepath, glob, { cwd: project.config.root })) {
6459
+ env = target;
6460
+ break;
6461
+ }
6462
+ }
6463
+ }
6464
+ env ||= project.config.environment || "node";
6465
+ const transformMode = getTransformMode(project.config.testTransformMode, filepath);
6466
+ let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
6467
+ if (envOptionsJson?.endsWith("*/")) {
6468
+ envOptionsJson = envOptionsJson.slice(0, -2);
6469
+ }
6470
+ const envOptions = JSON.parse(envOptionsJson || "null");
6471
+ const envKey = env === "happy-dom" ? "happyDOM" : env;
6472
+ const environment = {
6473
+ name: env,
6474
+ transformMode,
6475
+ options: envOptions ? { [envKey]: envOptions } : null
6476
+ };
6477
+ return {
6478
+ file: {
6479
+ filepath,
6480
+ testLocations: testLines
6481
+ },
6482
+ project,
6483
+ environment
6484
+ };
6485
+ }));
6486
+ return groupBy(filesWithEnv, ({ environment }) => environment.name);
6491
6487
  }
6492
6488
 
6493
- const created = /* @__PURE__ */ new Set();
6494
- const promises = /* @__PURE__ */ new Map();
6489
+ const created = new Set();
6490
+ const promises = new Map();
6495
6491
  function createMethodsRPC(project, options = {}) {
6496
- const ctx = project.vitest;
6497
- const cacheFs = options.cacheFs ?? false;
6498
- return {
6499
- snapshotSaved(snapshot) {
6500
- ctx.snapshot.add(snapshot);
6501
- },
6502
- resolveSnapshotPath(testPath) {
6503
- return ctx.snapshot.resolvePath(testPath, {
6504
- config: project.serializedConfig
6505
- });
6506
- },
6507
- async getSourceMap(id, force) {
6508
- if (force) {
6509
- const mod = project.vite.moduleGraph.getModuleById(id);
6510
- if (mod) {
6511
- project.vite.moduleGraph.invalidateModule(mod);
6512
- }
6513
- }
6514
- const r = await project.vitenode.transformRequest(id);
6515
- return r?.map;
6516
- },
6517
- async fetch(id, transformMode) {
6518
- const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
6519
- const code = result.code;
6520
- if (!cacheFs || result.externalize) {
6521
- return result;
6522
- }
6523
- if ("id" in result && typeof result.id === "string") {
6524
- return { id: result.id };
6525
- }
6526
- if (code == null) {
6527
- throw new Error(`Failed to fetch module ${id}`);
6528
- }
6529
- const dir = join(project.tmpDir, transformMode);
6530
- const name = hash("sha1", id, "hex");
6531
- const tmp = join(dir, name);
6532
- if (!created.has(dir)) {
6533
- mkdirSync(dir, { recursive: true });
6534
- created.add(dir);
6535
- }
6536
- if (promises.has(tmp)) {
6537
- await promises.get(tmp);
6538
- return { id: tmp };
6539
- }
6540
- promises.set(
6541
- tmp,
6542
- atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))
6543
- );
6544
- await promises.get(tmp);
6545
- Object.assign(result, { id: tmp });
6546
- return { id: tmp };
6547
- },
6548
- resolveId(id, importer, transformMode) {
6549
- return project.vitenode.resolveId(id, importer, transformMode).catch(handleRollupError);
6550
- },
6551
- transform(id, environment) {
6552
- return project.vitenode.transformModule(id, environment).catch(handleRollupError);
6553
- },
6554
- async onQueued(file) {
6555
- if (options.collect) {
6556
- ctx.state.collectFiles(project, [file]);
6557
- } else {
6558
- await ctx._testRun.enqueued(project, file);
6559
- }
6560
- },
6561
- async onCollected(files) {
6562
- if (options.collect) {
6563
- ctx.state.collectFiles(project, files);
6564
- } else {
6565
- await ctx._testRun.collected(project, files);
6566
- }
6567
- },
6568
- onAfterSuiteRun(meta) {
6569
- ctx.coverageProvider?.onAfterSuiteRun(meta);
6570
- },
6571
- async onTaskUpdate(packs, events) {
6572
- if (options.collect) {
6573
- ctx.state.updateTasks(packs);
6574
- } else {
6575
- await ctx._testRun.updated(packs, events);
6576
- }
6577
- },
6578
- async onUserConsoleLog(log) {
6579
- if (options.collect) {
6580
- ctx.state.updateUserLog(log);
6581
- } else {
6582
- await ctx._testRun.log(log);
6583
- }
6584
- },
6585
- onUnhandledError(err, type) {
6586
- ctx.state.catchError(err, type);
6587
- },
6588
- onCancel(reason) {
6589
- ctx.cancelCurrentRun(reason);
6590
- },
6591
- getCountOfFailedTests() {
6592
- return ctx.state.getCountOfFailedTests();
6593
- }
6594
- };
6492
+ const ctx = project.vitest;
6493
+ const cacheFs = options.cacheFs ?? false;
6494
+ return {
6495
+ snapshotSaved(snapshot) {
6496
+ ctx.snapshot.add(snapshot);
6497
+ },
6498
+ resolveSnapshotPath(testPath) {
6499
+ return ctx.snapshot.resolvePath(testPath, { config: project.serializedConfig });
6500
+ },
6501
+ async getSourceMap(id, force) {
6502
+ if (force) {
6503
+ const mod = project.vite.moduleGraph.getModuleById(id);
6504
+ if (mod) {
6505
+ project.vite.moduleGraph.invalidateModule(mod);
6506
+ }
6507
+ }
6508
+ const r = await project.vitenode.transformRequest(id);
6509
+ return r?.map;
6510
+ },
6511
+ async fetch(id, transformMode) {
6512
+ const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
6513
+ const code = result.code;
6514
+ if (!cacheFs || result.externalize) {
6515
+ return result;
6516
+ }
6517
+ if ("id" in result && typeof result.id === "string") {
6518
+ return { id: result.id };
6519
+ }
6520
+ if (code == null) {
6521
+ throw new Error(`Failed to fetch module ${id}`);
6522
+ }
6523
+ const dir = join(project.tmpDir, transformMode);
6524
+ const name = hash("sha1", id, "hex");
6525
+ const tmp = join(dir, name);
6526
+ if (!created.has(dir)) {
6527
+ mkdirSync(dir, { recursive: true });
6528
+ created.add(dir);
6529
+ }
6530
+ if (promises.has(tmp)) {
6531
+ await promises.get(tmp);
6532
+ return { id: tmp };
6533
+ }
6534
+ promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp)));
6535
+ await promises.get(tmp);
6536
+ Object.assign(result, { id: tmp });
6537
+ return { id: tmp };
6538
+ },
6539
+ resolveId(id, importer, transformMode) {
6540
+ return project.vitenode.resolveId(id, importer, transformMode).catch(handleRollupError);
6541
+ },
6542
+ transform(id, environment) {
6543
+ return project.vitenode.transformModule(id, environment).catch(handleRollupError);
6544
+ },
6545
+ async onQueued(file) {
6546
+ if (options.collect) {
6547
+ ctx.state.collectFiles(project, [file]);
6548
+ } else {
6549
+ await ctx._testRun.enqueued(project, file);
6550
+ }
6551
+ },
6552
+ async onCollected(files) {
6553
+ if (options.collect) {
6554
+ ctx.state.collectFiles(project, files);
6555
+ } else {
6556
+ await ctx._testRun.collected(project, files);
6557
+ }
6558
+ },
6559
+ onAfterSuiteRun(meta) {
6560
+ ctx.coverageProvider?.onAfterSuiteRun(meta);
6561
+ },
6562
+ async onTaskUpdate(packs, events) {
6563
+ if (options.collect) {
6564
+ ctx.state.updateTasks(packs);
6565
+ } else {
6566
+ await ctx._testRun.updated(packs, events);
6567
+ }
6568
+ },
6569
+ async onUserConsoleLog(log) {
6570
+ if (options.collect) {
6571
+ ctx.state.updateUserLog(log);
6572
+ } else {
6573
+ await ctx._testRun.log(log);
6574
+ }
6575
+ },
6576
+ onUnhandledError(err, type) {
6577
+ ctx.state.catchError(err, type);
6578
+ },
6579
+ onCancel(reason) {
6580
+ ctx.cancelCurrentRun(reason);
6581
+ },
6582
+ getCountOfFailedTests() {
6583
+ return ctx.state.getCountOfFailedTests();
6584
+ }
6585
+ };
6595
6586
  }
6596
6587
  function handleRollupError(e) {
6597
- if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e)) {
6598
- throw {
6599
- name: e.name,
6600
- message: e.message,
6601
- stack: e.stack,
6602
- cause: e.cause,
6603
- __vitest_rollup_error__: {
6604
- plugin: e.plugin,
6605
- id: e.id,
6606
- loc: e.loc,
6607
- frame: e.frame
6608
- }
6609
- };
6610
- }
6611
- throw e;
6588
+ if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e)) {
6589
+ throw {
6590
+ name: e.name,
6591
+ message: e.message,
6592
+ stack: e.stack,
6593
+ cause: e.cause,
6594
+ __vitest_rollup_error__: {
6595
+ plugin: e.plugin,
6596
+ id: e.id,
6597
+ loc: e.loc,
6598
+ frame: e.frame
6599
+ }
6600
+ };
6601
+ }
6602
+ throw e;
6612
6603
  }
6604
+ /**
6605
+ * Performs an atomic write operation using the write-then-rename pattern.
6606
+ *
6607
+ * Why we need this:
6608
+ * - Ensures file integrity by never leaving partially written files on disk
6609
+ * - Prevents other processes from reading incomplete data during writes
6610
+ * - Particularly important for test files where incomplete writes could cause test failures
6611
+ *
6612
+ * The implementation writes to a temporary file first, then renames it to the target path.
6613
+ * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
6614
+ * guaranteeing that other processes will only ever see the complete file.
6615
+ *
6616
+ * Added in https://github.com/vitest-dev/vitest/pull/7531
6617
+ */
6613
6618
  async function atomicWriteFile(realFilePath, data) {
6614
- const dir = dirname(realFilePath);
6615
- const tmpFilePath = join(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
6616
- try {
6617
- await writeFile(tmpFilePath, data, "utf-8");
6618
- await rename(tmpFilePath, realFilePath);
6619
- } finally {
6620
- try {
6621
- if (await stat(tmpFilePath)) {
6622
- await unlink(tmpFilePath);
6623
- }
6624
- } catch {
6625
- }
6626
- }
6619
+ const dir = dirname(realFilePath);
6620
+ const tmpFilePath = join(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
6621
+ try {
6622
+ await writeFile(tmpFilePath, data, "utf-8");
6623
+ await rename(tmpFilePath, realFilePath);
6624
+ } finally {
6625
+ try {
6626
+ if (await stat(tmpFilePath)) {
6627
+ await unlink(tmpFilePath);
6628
+ }
6629
+ } catch {}
6630
+ }
6627
6631
  }
6628
6632
 
6629
6633
  function createChildProcessChannel$1(project, collect = false) {
6630
- const emitter = new EventEmitter();
6631
- const cleanup = () => emitter.removeAllListeners();
6632
- const events = { message: "message", response: "response" };
6633
- const channel = {
6634
- onMessage: (callback) => emitter.on(events.message, callback),
6635
- postMessage: (message) => emitter.emit(events.response, message)
6636
- };
6637
- const rpc = createBirpc(createMethodsRPC(project, { cacheFs: true, collect }), {
6638
- eventNames: ["onCancel"],
6639
- serialize: v8.serialize,
6640
- deserialize: (v) => v8.deserialize(Buffer.from(v)),
6641
- post(v) {
6642
- emitter.emit(events.message, v);
6643
- },
6644
- on(fn) {
6645
- emitter.on(events.response, fn);
6646
- },
6647
- onTimeoutError(functionName) {
6648
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
6649
- }
6650
- });
6651
- project.ctx.onCancel((reason) => rpc.onCancel(reason));
6652
- return { channel, cleanup };
6634
+ const emitter = new EventEmitter();
6635
+ const cleanup = () => emitter.removeAllListeners();
6636
+ const events = {
6637
+ message: "message",
6638
+ response: "response"
6639
+ };
6640
+ const channel = {
6641
+ onMessage: (callback) => emitter.on(events.message, callback),
6642
+ postMessage: (message) => emitter.emit(events.response, message)
6643
+ };
6644
+ const rpc = createBirpc(createMethodsRPC(project, {
6645
+ cacheFs: true,
6646
+ collect
6647
+ }), {
6648
+ eventNames: ["onCancel"],
6649
+ serialize: v8.serialize,
6650
+ deserialize: (v) => v8.deserialize(Buffer.from(v)),
6651
+ post(v) {
6652
+ emitter.emit(events.message, v);
6653
+ },
6654
+ on(fn) {
6655
+ emitter.on(events.response, fn);
6656
+ },
6657
+ onTimeoutError(functionName) {
6658
+ throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
6659
+ }
6660
+ });
6661
+ project.ctx.onCancel((reason) => rpc.onCancel(reason));
6662
+ return {
6663
+ channel,
6664
+ cleanup
6665
+ };
6653
6666
  }
6654
6667
  function createForksPool(ctx, { execArgv, env }) {
6655
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
6656
- const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
6657
- const poolOptions = ctx.config.poolOptions?.forks ?? {};
6658
- const maxThreads = poolOptions.maxForks ?? ctx.config.maxWorkers ?? threadsCount;
6659
- const minThreads = poolOptions.minForks ?? ctx.config.minWorkers ?? threadsCount;
6660
- const worker = resolve$1(ctx.distPath, "workers/forks.js");
6661
- const options = {
6662
- runtime: "child_process",
6663
- filename: resolve$1(ctx.distPath, "worker.js"),
6664
- maxThreads,
6665
- minThreads,
6666
- env,
6667
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
6668
- terminateTimeout: ctx.config.teardownTimeout,
6669
- concurrentTasksPerWorker: 1
6670
- };
6671
- const isolated = poolOptions.isolate ?? true;
6672
- if (isolated) {
6673
- options.isolateWorkers = true;
6674
- }
6675
- if (poolOptions.singleFork || !ctx.config.fileParallelism) {
6676
- options.maxThreads = 1;
6677
- options.minThreads = 1;
6678
- }
6679
- const pool = new Tinypool(options);
6680
- const runWithFiles = (name) => {
6681
- let id = 0;
6682
- async function runFiles(project, config, files, environment, invalidates = []) {
6683
- const paths = files.map((f) => f.filepath);
6684
- ctx.state.clearFiles(project, paths);
6685
- const { channel, cleanup } = createChildProcessChannel$1(project, name === "collect");
6686
- const workerId = ++id;
6687
- const data = {
6688
- pool: "forks",
6689
- worker,
6690
- config,
6691
- files,
6692
- invalidates,
6693
- environment,
6694
- workerId,
6695
- projectName: project.name,
6696
- providedContext: project.getProvidedContext()
6697
- };
6698
- try {
6699
- await pool.run(data, { name, channel });
6700
- } catch (error) {
6701
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
6702
- ctx.state.addProcessTimeoutCause(
6703
- `Failed to terminate worker while running ${paths.join(", ")}.`
6704
- );
6705
- } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
6706
- ctx.state.cancelFiles(paths, project);
6707
- } else {
6708
- throw error;
6709
- }
6710
- } finally {
6711
- cleanup();
6712
- }
6713
- }
6714
- return async (specs, invalidates) => {
6715
- ctx.onCancel(() => pool.cancelPendingTasks());
6716
- const configs = /* @__PURE__ */ new WeakMap();
6717
- const getConfig = (project) => {
6718
- if (configs.has(project)) {
6719
- return configs.get(project);
6720
- }
6721
- const _config = project.getSerializableConfig();
6722
- const config = wrapSerializableConfig(_config);
6723
- configs.set(project, config);
6724
- return config;
6725
- };
6726
- const singleFork = specs.filter(
6727
- (spec) => spec.project.config.poolOptions?.forks?.singleFork
6728
- );
6729
- const multipleForks = specs.filter(
6730
- (spec) => !spec.project.config.poolOptions?.forks?.singleFork
6731
- );
6732
- if (multipleForks.length) {
6733
- const filesByEnv = await groupFilesByEnv(multipleForks);
6734
- const files = Object.values(filesByEnv).flat();
6735
- const results = [];
6736
- if (isolated) {
6737
- results.push(
6738
- ...await Promise.allSettled(
6739
- files.map(
6740
- ({ file, environment, project }) => runFiles(
6741
- project,
6742
- getConfig(project),
6743
- [file],
6744
- environment,
6745
- invalidates
6746
- )
6747
- )
6748
- )
6749
- );
6750
- } else {
6751
- const grouped = groupBy(
6752
- files,
6753
- ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options)
6754
- );
6755
- for (const group of Object.values(grouped)) {
6756
- results.push(
6757
- ...await Promise.allSettled(
6758
- group.map(
6759
- ({ file, environment, project }) => runFiles(
6760
- project,
6761
- getConfig(project),
6762
- [file],
6763
- environment,
6764
- invalidates
6765
- )
6766
- )
6767
- )
6768
- );
6769
- await new Promise(
6770
- (resolve2) => pool.queueSize === 0 ? resolve2() : pool.once("drain", resolve2)
6771
- );
6772
- await pool.recycleWorkers();
6773
- }
6774
- }
6775
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
6776
- if (errors.length > 0) {
6777
- throw new AggregateError(
6778
- errors,
6779
- "Errors occurred while running tests. For more information, see serialized error."
6780
- );
6781
- }
6782
- }
6783
- if (singleFork.length) {
6784
- const filesByEnv = await groupFilesByEnv(singleFork);
6785
- const envs = envsOrder.concat(
6786
- Object.keys(filesByEnv).filter((env2) => !envsOrder.includes(env2))
6787
- );
6788
- for (const env2 of envs) {
6789
- const files = filesByEnv[env2];
6790
- if (!files?.length) {
6791
- continue;
6792
- }
6793
- const filesByOptions = groupBy(
6794
- files,
6795
- ({ project, environment }) => project.name + JSON.stringify(environment.options)
6796
- );
6797
- for (const files2 of Object.values(filesByOptions)) {
6798
- await pool.recycleWorkers();
6799
- const filenames = files2.map((f) => f.file);
6800
- await runFiles(
6801
- files2[0].project,
6802
- getConfig(files2[0].project),
6803
- filenames,
6804
- files2[0].environment,
6805
- invalidates
6806
- );
6807
- }
6808
- }
6809
- }
6810
- };
6811
- };
6812
- return {
6813
- name: "forks",
6814
- runTests: runWithFiles("run"),
6815
- collectTests: runWithFiles("collect"),
6816
- close: () => pool.destroy()
6817
- };
6668
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
6669
+ const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
6670
+ const poolOptions = ctx.config.poolOptions?.forks ?? {};
6671
+ const maxThreads = poolOptions.maxForks ?? ctx.config.maxWorkers ?? threadsCount;
6672
+ const minThreads = poolOptions.minForks ?? ctx.config.minWorkers ?? threadsCount;
6673
+ const worker = resolve$1(ctx.distPath, "workers/forks.js");
6674
+ const options = {
6675
+ runtime: "child_process",
6676
+ filename: resolve$1(ctx.distPath, "worker.js"),
6677
+ maxThreads,
6678
+ minThreads,
6679
+ env,
6680
+ execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
6681
+ terminateTimeout: ctx.config.teardownTimeout,
6682
+ concurrentTasksPerWorker: 1
6683
+ };
6684
+ const isolated = poolOptions.isolate ?? true;
6685
+ if (isolated) {
6686
+ options.isolateWorkers = true;
6687
+ }
6688
+ if (poolOptions.singleFork || !ctx.config.fileParallelism) {
6689
+ options.maxThreads = 1;
6690
+ options.minThreads = 1;
6691
+ }
6692
+ const pool = new Tinypool(options);
6693
+ const runWithFiles = (name) => {
6694
+ let id = 0;
6695
+ async function runFiles(project, config, files, environment, invalidates = []) {
6696
+ const paths = files.map((f) => f.filepath);
6697
+ ctx.state.clearFiles(project, paths);
6698
+ const { channel, cleanup } = createChildProcessChannel$1(project, name === "collect");
6699
+ const workerId = ++id;
6700
+ const data = {
6701
+ pool: "forks",
6702
+ worker,
6703
+ config,
6704
+ files,
6705
+ invalidates,
6706
+ environment,
6707
+ workerId,
6708
+ projectName: project.name,
6709
+ providedContext: project.getProvidedContext()
6710
+ };
6711
+ try {
6712
+ await pool.run(data, {
6713
+ name,
6714
+ channel
6715
+ });
6716
+ } catch (error) {
6717
+ if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
6718
+ ctx.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
6719
+ } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
6720
+ ctx.state.cancelFiles(paths, project);
6721
+ } else {
6722
+ throw error;
6723
+ }
6724
+ } finally {
6725
+ cleanup();
6726
+ }
6727
+ }
6728
+ return async (specs, invalidates) => {
6729
+ ctx.onCancel(() => pool.cancelPendingTasks());
6730
+ const configs = new WeakMap();
6731
+ const getConfig = (project) => {
6732
+ if (configs.has(project)) {
6733
+ return configs.get(project);
6734
+ }
6735
+ const _config = project.getSerializableConfig();
6736
+ const config = wrapSerializableConfig(_config);
6737
+ configs.set(project, config);
6738
+ return config;
6739
+ };
6740
+ const singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork);
6741
+ const multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
6742
+ if (multipleForks.length) {
6743
+ const filesByEnv = await groupFilesByEnv(multipleForks);
6744
+ const files = Object.values(filesByEnv).flat();
6745
+ const results = [];
6746
+ if (isolated) {
6747
+ results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
6748
+ } else {
6749
+ const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
6750
+ for (const group of Object.values(grouped)) {
6751
+ results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
6752
+ await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
6753
+ await pool.recycleWorkers();
6754
+ }
6755
+ }
6756
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
6757
+ if (errors.length > 0) {
6758
+ throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
6759
+ }
6760
+ }
6761
+ if (singleFork.length) {
6762
+ const filesByEnv = await groupFilesByEnv(singleFork);
6763
+ const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
6764
+ for (const env of envs) {
6765
+ const files = filesByEnv[env];
6766
+ if (!files?.length) {
6767
+ continue;
6768
+ }
6769
+ const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
6770
+ for (const files of Object.values(filesByOptions)) {
6771
+ await pool.recycleWorkers();
6772
+ const filenames = files.map((f) => f.file);
6773
+ await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
6774
+ }
6775
+ }
6776
+ }
6777
+ };
6778
+ };
6779
+ return {
6780
+ name: "forks",
6781
+ runTests: runWithFiles("run"),
6782
+ collectTests: runWithFiles("collect"),
6783
+ close: () => pool.destroy()
6784
+ };
6818
6785
  }
6819
6786
 
6820
6787
  function createWorkerChannel$1(project, collect) {
6821
- const channel = new MessageChannel();
6822
- const port = channel.port2;
6823
- const workerPort = channel.port1;
6824
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
6825
- eventNames: ["onCancel"],
6826
- post(v) {
6827
- port.postMessage(v);
6828
- },
6829
- on(fn) {
6830
- port.on("message", fn);
6831
- },
6832
- onTimeoutError(functionName) {
6833
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
6834
- }
6835
- });
6836
- project.ctx.onCancel((reason) => rpc.onCancel(reason));
6837
- return { workerPort, port };
6788
+ const channel = new MessageChannel();
6789
+ const port = channel.port2;
6790
+ const workerPort = channel.port1;
6791
+ const rpc = createBirpc(createMethodsRPC(project, { collect }), {
6792
+ eventNames: ["onCancel"],
6793
+ post(v) {
6794
+ port.postMessage(v);
6795
+ },
6796
+ on(fn) {
6797
+ port.on("message", fn);
6798
+ },
6799
+ onTimeoutError(functionName) {
6800
+ throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
6801
+ }
6802
+ });
6803
+ project.ctx.onCancel((reason) => rpc.onCancel(reason));
6804
+ return {
6805
+ workerPort,
6806
+ port
6807
+ };
6838
6808
  }
6839
6809
  function createThreadsPool(ctx, { execArgv, env }) {
6840
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
6841
- const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
6842
- const poolOptions = ctx.config.poolOptions?.threads ?? {};
6843
- const maxThreads = poolOptions.maxThreads ?? ctx.config.maxWorkers ?? threadsCount;
6844
- const minThreads = poolOptions.minThreads ?? ctx.config.minWorkers ?? threadsCount;
6845
- const worker = resolve$1(ctx.distPath, "workers/threads.js");
6846
- const options = {
6847
- filename: resolve$1(ctx.distPath, "worker.js"),
6848
- // TODO: investigate further
6849
- // It seems atomics introduced V8 Fatal Error https://github.com/vitest-dev/vitest/issues/1191
6850
- useAtomics: poolOptions.useAtomics ?? false,
6851
- maxThreads,
6852
- minThreads,
6853
- env,
6854
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
6855
- terminateTimeout: ctx.config.teardownTimeout,
6856
- concurrentTasksPerWorker: 1
6857
- };
6858
- const isolated = poolOptions.isolate ?? true;
6859
- if (isolated) {
6860
- options.isolateWorkers = true;
6861
- }
6862
- if (poolOptions.singleThread || !ctx.config.fileParallelism) {
6863
- options.maxThreads = 1;
6864
- options.minThreads = 1;
6865
- }
6866
- const pool = new Tinypool$1(options);
6867
- const runWithFiles = (name) => {
6868
- let id = 0;
6869
- async function runFiles(project, config, files, environment, invalidates = []) {
6870
- const paths = files.map((f) => f.filepath);
6871
- ctx.state.clearFiles(project, paths);
6872
- const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
6873
- const workerId = ++id;
6874
- const data = {
6875
- pool: "threads",
6876
- worker,
6877
- port: workerPort,
6878
- config,
6879
- files,
6880
- invalidates,
6881
- environment,
6882
- workerId,
6883
- projectName: project.name,
6884
- providedContext: project.getProvidedContext()
6885
- };
6886
- try {
6887
- await pool.run(data, { transferList: [workerPort], name });
6888
- } catch (error) {
6889
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
6890
- ctx.state.addProcessTimeoutCause(
6891
- `Failed to terminate worker while running ${paths.join(
6892
- ", "
6893
- )}.
6894
- See https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`
6895
- );
6896
- } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
6897
- ctx.state.cancelFiles(paths, project);
6898
- } else {
6899
- throw error;
6900
- }
6901
- } finally {
6902
- port.close();
6903
- workerPort.close();
6904
- }
6905
- }
6906
- return async (specs, invalidates) => {
6907
- ctx.onCancel(() => pool.cancelPendingTasks());
6908
- const configs = /* @__PURE__ */ new WeakMap();
6909
- const getConfig = (project) => {
6910
- if (configs.has(project)) {
6911
- return configs.get(project);
6912
- }
6913
- const config = project.getSerializableConfig();
6914
- configs.set(project, config);
6915
- return config;
6916
- };
6917
- const singleThreads = specs.filter(
6918
- (spec) => spec.project.config.poolOptions?.threads?.singleThread
6919
- );
6920
- const multipleThreads = specs.filter(
6921
- (spec) => !spec.project.config.poolOptions?.threads?.singleThread
6922
- );
6923
- if (multipleThreads.length) {
6924
- const filesByEnv = await groupFilesByEnv(multipleThreads);
6925
- const files = Object.values(filesByEnv).flat();
6926
- const results = [];
6927
- if (isolated) {
6928
- results.push(
6929
- ...await Promise.allSettled(
6930
- files.map(
6931
- ({ file, environment, project }) => runFiles(
6932
- project,
6933
- getConfig(project),
6934
- [file],
6935
- environment,
6936
- invalidates
6937
- )
6938
- )
6939
- )
6940
- );
6941
- } else {
6942
- const grouped = groupBy(
6943
- files,
6944
- ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options)
6945
- );
6946
- for (const group of Object.values(grouped)) {
6947
- results.push(
6948
- ...await Promise.allSettled(
6949
- group.map(
6950
- ({ file, environment, project }) => runFiles(
6951
- project,
6952
- getConfig(project),
6953
- [file],
6954
- environment,
6955
- invalidates
6956
- )
6957
- )
6958
- )
6959
- );
6960
- await new Promise(
6961
- (resolve2) => pool.queueSize === 0 ? resolve2() : pool.once("drain", resolve2)
6962
- );
6963
- await pool.recycleWorkers();
6964
- }
6965
- }
6966
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
6967
- if (errors.length > 0) {
6968
- throw new AggregateError(
6969
- errors,
6970
- "Errors occurred while running tests. For more information, see serialized error."
6971
- );
6972
- }
6973
- }
6974
- if (singleThreads.length) {
6975
- const filesByEnv = await groupFilesByEnv(singleThreads);
6976
- const envs = envsOrder.concat(
6977
- Object.keys(filesByEnv).filter((env2) => !envsOrder.includes(env2))
6978
- );
6979
- for (const env2 of envs) {
6980
- const files = filesByEnv[env2];
6981
- if (!files?.length) {
6982
- continue;
6983
- }
6984
- const filesByOptions = groupBy(
6985
- files,
6986
- ({ project, environment }) => project.name + JSON.stringify(environment.options)
6987
- );
6988
- for (const files2 of Object.values(filesByOptions)) {
6989
- await pool.recycleWorkers();
6990
- const filenames = files2.map((f) => f.file);
6991
- await runFiles(
6992
- files2[0].project,
6993
- getConfig(files2[0].project),
6994
- filenames,
6995
- files2[0].environment,
6996
- invalidates
6997
- );
6998
- }
6999
- }
7000
- }
7001
- };
7002
- };
7003
- return {
7004
- name: "threads",
7005
- runTests: runWithFiles("run"),
7006
- collectTests: runWithFiles("collect"),
7007
- close: () => pool.destroy()
7008
- };
6810
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
6811
+ const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
6812
+ const poolOptions = ctx.config.poolOptions?.threads ?? {};
6813
+ const maxThreads = poolOptions.maxThreads ?? ctx.config.maxWorkers ?? threadsCount;
6814
+ const minThreads = poolOptions.minThreads ?? ctx.config.minWorkers ?? threadsCount;
6815
+ const worker = resolve$1(ctx.distPath, "workers/threads.js");
6816
+ const options = {
6817
+ filename: resolve$1(ctx.distPath, "worker.js"),
6818
+ useAtomics: poolOptions.useAtomics ?? false,
6819
+ maxThreads,
6820
+ minThreads,
6821
+ env,
6822
+ execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
6823
+ terminateTimeout: ctx.config.teardownTimeout,
6824
+ concurrentTasksPerWorker: 1
6825
+ };
6826
+ const isolated = poolOptions.isolate ?? true;
6827
+ if (isolated) {
6828
+ options.isolateWorkers = true;
6829
+ }
6830
+ if (poolOptions.singleThread || !ctx.config.fileParallelism) {
6831
+ options.maxThreads = 1;
6832
+ options.minThreads = 1;
6833
+ }
6834
+ const pool = new Tinypool$1(options);
6835
+ const runWithFiles = (name) => {
6836
+ let id = 0;
6837
+ async function runFiles(project, config, files, environment, invalidates = []) {
6838
+ const paths = files.map((f) => f.filepath);
6839
+ ctx.state.clearFiles(project, paths);
6840
+ const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
6841
+ const workerId = ++id;
6842
+ const data = {
6843
+ pool: "threads",
6844
+ worker,
6845
+ port: workerPort,
6846
+ config,
6847
+ files,
6848
+ invalidates,
6849
+ environment,
6850
+ workerId,
6851
+ projectName: project.name,
6852
+ providedContext: project.getProvidedContext()
6853
+ };
6854
+ try {
6855
+ await pool.run(data, {
6856
+ transferList: [workerPort],
6857
+ name
6858
+ });
6859
+ } catch (error) {
6860
+ if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
6861
+ ctx.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
6862
+ } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
6863
+ ctx.state.cancelFiles(paths, project);
6864
+ } else {
6865
+ throw error;
6866
+ }
6867
+ } finally {
6868
+ port.close();
6869
+ workerPort.close();
6870
+ }
6871
+ }
6872
+ return async (specs, invalidates) => {
6873
+ ctx.onCancel(() => pool.cancelPendingTasks());
6874
+ const configs = new WeakMap();
6875
+ const getConfig = (project) => {
6876
+ if (configs.has(project)) {
6877
+ return configs.get(project);
6878
+ }
6879
+ const config = project.getSerializableConfig();
6880
+ configs.set(project, config);
6881
+ return config;
6882
+ };
6883
+ const singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread);
6884
+ const multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
6885
+ if (multipleThreads.length) {
6886
+ const filesByEnv = await groupFilesByEnv(multipleThreads);
6887
+ const files = Object.values(filesByEnv).flat();
6888
+ const results = [];
6889
+ if (isolated) {
6890
+ results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
6891
+ } else {
6892
+ const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
6893
+ for (const group of Object.values(grouped)) {
6894
+ results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
6895
+ await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
6896
+ await pool.recycleWorkers();
6897
+ }
6898
+ }
6899
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
6900
+ if (errors.length > 0) {
6901
+ throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
6902
+ }
6903
+ }
6904
+ if (singleThreads.length) {
6905
+ const filesByEnv = await groupFilesByEnv(singleThreads);
6906
+ const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
6907
+ for (const env of envs) {
6908
+ const files = filesByEnv[env];
6909
+ if (!files?.length) {
6910
+ continue;
6911
+ }
6912
+ const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
6913
+ for (const files of Object.values(filesByOptions)) {
6914
+ await pool.recycleWorkers();
6915
+ const filenames = files.map((f) => f.file);
6916
+ await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
6917
+ }
6918
+ }
6919
+ }
6920
+ };
6921
+ };
6922
+ return {
6923
+ name: "threads",
6924
+ runTests: runWithFiles("run"),
6925
+ collectTests: runWithFiles("collect"),
6926
+ close: () => pool.destroy()
6927
+ };
7009
6928
  }
7010
6929
 
7011
6930
  function createTypecheckPool(ctx) {
7012
- const promisesMap = /* @__PURE__ */ new WeakMap();
7013
- const rerunTriggered = /* @__PURE__ */ new WeakSet();
7014
- async function onParseEnd(project, { files, sourceErrors }) {
7015
- const checker = project.typechecker;
7016
- const { packs, events } = checker.getTestPacksAndEvents();
7017
- await ctx._testRun.updated(packs, events);
7018
- if (!project.config.typecheck.ignoreSourceErrors) {
7019
- sourceErrors.forEach(
7020
- (error) => ctx.state.catchError(error, "Unhandled Source Error")
7021
- );
7022
- }
7023
- const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
7024
- if (processError) {
7025
- const error = new Error(checker.getOutput());
7026
- error.stack = "";
7027
- ctx.state.catchError(error, "Typecheck Error");
7028
- }
7029
- promisesMap.get(project)?.resolve();
7030
- rerunTriggered.delete(project);
7031
- if (ctx.config.watch && !ctx.runningPromise) {
7032
- await ctx.report("onFinished", files, []);
7033
- await ctx.report("onWatcherStart", files, [
7034
- ...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors,
7035
- ...ctx.state.getUnhandledErrors()
7036
- ]);
7037
- }
7038
- }
7039
- async function createWorkspaceTypechecker(project, files) {
7040
- const checker = project.typechecker ?? new Typechecker(project);
7041
- if (project.typechecker) {
7042
- return checker;
7043
- }
7044
- project.typechecker = checker;
7045
- checker.setFiles(files);
7046
- checker.onParseStart(async () => {
7047
- const files2 = checker.getTestFiles();
7048
- for (const file of files2) {
7049
- await ctx._testRun.enqueued(project, file);
7050
- }
7051
- await ctx._testRun.collected(project, files2);
7052
- });
7053
- checker.onParseEnd((result) => onParseEnd(project, result));
7054
- checker.onWatcherRerun(async () => {
7055
- rerunTriggered.add(project);
7056
- if (!ctx.runningPromise) {
7057
- ctx.state.clearErrors();
7058
- await ctx.report(
7059
- "onWatcherRerun",
7060
- files,
7061
- "File change detected. Triggering rerun."
7062
- );
7063
- }
7064
- await checker.collectTests();
7065
- const testFiles = checker.getTestFiles();
7066
- for (const file of testFiles) {
7067
- await ctx._testRun.enqueued(project, file);
7068
- }
7069
- await ctx._testRun.collected(project, testFiles);
7070
- const { packs, events } = checker.getTestPacksAndEvents();
7071
- await ctx._testRun.updated(packs, events);
7072
- });
7073
- await checker.prepare();
7074
- return checker;
7075
- }
7076
- async function startTypechecker(project, files) {
7077
- if (project.typechecker) {
7078
- return project.typechecker;
7079
- }
7080
- const checker = await createWorkspaceTypechecker(project, files);
7081
- await checker.collectTests();
7082
- await checker.start();
7083
- }
7084
- async function collectTests(specs) {
7085
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
7086
- for (const name in specsByProject) {
7087
- const project = specsByProject[name][0].project;
7088
- const files = specsByProject[name].map((spec) => spec.moduleId);
7089
- const checker = await createWorkspaceTypechecker(project, files);
7090
- checker.setFiles(files);
7091
- await checker.collectTests();
7092
- const testFiles = checker.getTestFiles();
7093
- for (const file of testFiles) {
7094
- await ctx._testRun.enqueued(project, file);
7095
- }
7096
- await ctx._testRun.collected(project, testFiles);
7097
- }
7098
- }
7099
- async function runTests(specs) {
7100
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
7101
- const promises = [];
7102
- for (const name in specsByProject) {
7103
- const project = specsByProject[name][0].project;
7104
- const files = specsByProject[name].map((spec) => spec.moduleId);
7105
- const promise = createDefer();
7106
- const _p = new Promise((resolve) => {
7107
- const _i = setInterval(() => {
7108
- if (!project.typechecker || rerunTriggered.has(project)) {
7109
- resolve(true);
7110
- clearInterval(_i);
7111
- }
7112
- });
7113
- setTimeout(() => {
7114
- resolve(false);
7115
- clearInterval(_i);
7116
- }, 500).unref();
7117
- });
7118
- const triggered = await _p;
7119
- if (project.typechecker && !triggered) {
7120
- const testFiles = project.typechecker.getTestFiles();
7121
- for (const file of testFiles) {
7122
- await ctx._testRun.enqueued(project, file);
7123
- }
7124
- await ctx._testRun.collected(project, testFiles);
7125
- await onParseEnd(project, project.typechecker.getResult());
7126
- continue;
7127
- }
7128
- promises.push(promise);
7129
- promisesMap.set(project, promise);
7130
- startTypechecker(project, files);
7131
- }
7132
- await Promise.all(promises);
7133
- }
7134
- return {
7135
- name: "typescript",
7136
- runTests,
7137
- collectTests,
7138
- async close() {
7139
- const promises = ctx.projects.map(
7140
- (project) => project.typechecker?.stop()
7141
- );
7142
- await Promise.all(promises);
7143
- }
7144
- };
6931
+ const promisesMap = new WeakMap();
6932
+ const rerunTriggered = new WeakSet();
6933
+ async function onParseEnd(project, { files, sourceErrors }) {
6934
+ const checker = project.typechecker;
6935
+ const { packs, events } = checker.getTestPacksAndEvents();
6936
+ await ctx._testRun.updated(packs, events);
6937
+ if (!project.config.typecheck.ignoreSourceErrors) {
6938
+ sourceErrors.forEach((error) => ctx.state.catchError(error, "Unhandled Source Error"));
6939
+ }
6940
+ const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
6941
+ if (processError) {
6942
+ const error = new Error(checker.getOutput());
6943
+ error.stack = "";
6944
+ ctx.state.catchError(error, "Typecheck Error");
6945
+ }
6946
+ promisesMap.get(project)?.resolve();
6947
+ rerunTriggered.delete(project);
6948
+ if (ctx.config.watch && !ctx.runningPromise) {
6949
+ await ctx.report("onFinished", files, []);
6950
+ await ctx.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...ctx.state.getUnhandledErrors()]);
6951
+ }
6952
+ }
6953
+ async function createWorkspaceTypechecker(project, files) {
6954
+ const checker = project.typechecker ?? new Typechecker(project);
6955
+ if (project.typechecker) {
6956
+ return checker;
6957
+ }
6958
+ project.typechecker = checker;
6959
+ checker.setFiles(files);
6960
+ checker.onParseStart(async () => {
6961
+ const files = checker.getTestFiles();
6962
+ for (const file of files) {
6963
+ await ctx._testRun.enqueued(project, file);
6964
+ }
6965
+ await ctx._testRun.collected(project, files);
6966
+ });
6967
+ checker.onParseEnd((result) => onParseEnd(project, result));
6968
+ checker.onWatcherRerun(async () => {
6969
+ rerunTriggered.add(project);
6970
+ if (!ctx.runningPromise) {
6971
+ ctx.state.clearErrors();
6972
+ await ctx.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
6973
+ }
6974
+ await checker.collectTests();
6975
+ const testFiles = checker.getTestFiles();
6976
+ for (const file of testFiles) {
6977
+ await ctx._testRun.enqueued(project, file);
6978
+ }
6979
+ await ctx._testRun.collected(project, testFiles);
6980
+ const { packs, events } = checker.getTestPacksAndEvents();
6981
+ await ctx._testRun.updated(packs, events);
6982
+ });
6983
+ await checker.prepare();
6984
+ return checker;
6985
+ }
6986
+ async function startTypechecker(project, files) {
6987
+ if (project.typechecker) {
6988
+ return project.typechecker;
6989
+ }
6990
+ const checker = await createWorkspaceTypechecker(project, files);
6991
+ await checker.collectTests();
6992
+ await checker.start();
6993
+ }
6994
+ async function collectTests(specs) {
6995
+ const specsByProject = groupBy(specs, (spec) => spec.project.name);
6996
+ for (const name in specsByProject) {
6997
+ const project = specsByProject[name][0].project;
6998
+ const files = specsByProject[name].map((spec) => spec.moduleId);
6999
+ const checker = await createWorkspaceTypechecker(project, files);
7000
+ checker.setFiles(files);
7001
+ await checker.collectTests();
7002
+ const testFiles = checker.getTestFiles();
7003
+ for (const file of testFiles) {
7004
+ await ctx._testRun.enqueued(project, file);
7005
+ }
7006
+ await ctx._testRun.collected(project, testFiles);
7007
+ }
7008
+ }
7009
+ async function runTests(specs) {
7010
+ const specsByProject = groupBy(specs, (spec) => spec.project.name);
7011
+ const promises = [];
7012
+ for (const name in specsByProject) {
7013
+ const project = specsByProject[name][0].project;
7014
+ const files = specsByProject[name].map((spec) => spec.moduleId);
7015
+ const promise = createDefer();
7016
+ const _p = new Promise((resolve) => {
7017
+ const _i = setInterval(() => {
7018
+ if (!project.typechecker || rerunTriggered.has(project)) {
7019
+ resolve(true);
7020
+ clearInterval(_i);
7021
+ }
7022
+ });
7023
+ setTimeout(() => {
7024
+ resolve(false);
7025
+ clearInterval(_i);
7026
+ }, 500).unref();
7027
+ });
7028
+ const triggered = await _p;
7029
+ if (project.typechecker && !triggered) {
7030
+ const testFiles = project.typechecker.getTestFiles();
7031
+ for (const file of testFiles) {
7032
+ await ctx._testRun.enqueued(project, file);
7033
+ }
7034
+ await ctx._testRun.collected(project, testFiles);
7035
+ await onParseEnd(project, project.typechecker.getResult());
7036
+ continue;
7037
+ }
7038
+ promises.push(promise);
7039
+ promisesMap.set(project, promise);
7040
+ startTypechecker(project, files);
7041
+ }
7042
+ await Promise.all(promises);
7043
+ }
7044
+ return {
7045
+ name: "typescript",
7046
+ runTests,
7047
+ collectTests,
7048
+ async close() {
7049
+ const promises = ctx.projects.map((project) => project.typechecker?.stop());
7050
+ await Promise.all(promises);
7051
+ }
7052
+ };
7145
7053
  }
7146
7054
 
7147
7055
  function getDefaultThreadsCount(config) {
7148
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7149
- return config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7056
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7057
+ return config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7150
7058
  }
7151
7059
  function getWorkerMemoryLimit(config) {
7152
- const memoryLimit = config.poolOptions?.vmThreads?.memoryLimit;
7153
- if (memoryLimit) {
7154
- return memoryLimit;
7155
- }
7156
- return 1 / (config.poolOptions?.vmThreads?.maxThreads ?? getDefaultThreadsCount(config));
7060
+ const memoryLimit = config.poolOptions?.vmThreads?.memoryLimit;
7061
+ if (memoryLimit) {
7062
+ return memoryLimit;
7063
+ }
7064
+ return 1 / (config.poolOptions?.vmThreads?.maxThreads ?? getDefaultThreadsCount(config));
7157
7065
  }
7066
+ /**
7067
+ * Converts a string representing an amount of memory to bytes.
7068
+ *
7069
+ * @param input The value to convert to bytes.
7070
+ * @param percentageReference The reference value to use when a '%' value is supplied.
7071
+ */
7158
7072
  function stringToBytes(input, percentageReference) {
7159
- if (input === null || input === void 0) {
7160
- return input;
7161
- }
7162
- if (typeof input === "string") {
7163
- if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
7164
- let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
7165
- if (trailingChars && numericString) {
7166
- const numericValue = Number.parseFloat(numericString);
7167
- trailingChars = trailingChars.toLowerCase();
7168
- switch (trailingChars) {
7169
- case "%":
7170
- input = numericValue / 100;
7171
- break;
7172
- case "kb":
7173
- case "k":
7174
- return numericValue * 1e3;
7175
- case "kib":
7176
- return numericValue * 1024;
7177
- case "mb":
7178
- case "m":
7179
- return numericValue * 1e3 * 1e3;
7180
- case "mib":
7181
- return numericValue * 1024 * 1024;
7182
- case "gb":
7183
- case "g":
7184
- return numericValue * 1e3 * 1e3 * 1e3;
7185
- case "gib":
7186
- return numericValue * 1024 * 1024 * 1024;
7187
- }
7188
- }
7189
- } else {
7190
- input = Number.parseFloat(input);
7191
- }
7192
- }
7193
- if (typeof input === "number") {
7194
- if (input <= 1 && input > 0) {
7195
- if (percentageReference) {
7196
- return Math.floor(input * percentageReference);
7197
- } else {
7198
- throw new Error(
7199
- "For a percentage based memory limit a percentageReference must be supplied"
7200
- );
7201
- }
7202
- } else if (input > 1) {
7203
- return Math.floor(input);
7204
- } else {
7205
- throw new Error('Unexpected numerical input for "memoryLimit"');
7206
- }
7207
- }
7208
- return null;
7073
+ if (input === null || input === undefined) {
7074
+ return input;
7075
+ }
7076
+ if (typeof input === "string") {
7077
+ if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
7078
+ let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
7079
+ if (trailingChars && numericString) {
7080
+ const numericValue = Number.parseFloat(numericString);
7081
+ trailingChars = trailingChars.toLowerCase();
7082
+ switch (trailingChars) {
7083
+ case "%":
7084
+ input = numericValue / 100;
7085
+ break;
7086
+ case "kb":
7087
+ case "k": return numericValue * 1e3;
7088
+ case "kib": return numericValue * 1024;
7089
+ case "mb":
7090
+ case "m": return numericValue * 1e3 * 1e3;
7091
+ case "mib": return numericValue * 1024 * 1024;
7092
+ case "gb":
7093
+ case "g": return numericValue * 1e3 * 1e3 * 1e3;
7094
+ case "gib": return numericValue * 1024 * 1024 * 1024;
7095
+ }
7096
+ }
7097
+ } else {
7098
+ input = Number.parseFloat(input);
7099
+ }
7100
+ }
7101
+ if (typeof input === "number") {
7102
+ if (input <= 1 && input > 0) {
7103
+ if (percentageReference) {
7104
+ return Math.floor(input * percentageReference);
7105
+ } else {
7106
+ throw new Error("For a percentage based memory limit a percentageReference must be supplied");
7107
+ }
7108
+ } else if (input > 1) {
7109
+ return Math.floor(input);
7110
+ } else {
7111
+ throw new Error("Unexpected numerical input for \"memoryLimit\"");
7112
+ }
7113
+ }
7114
+ return null;
7209
7115
  }
7210
7116
 
7211
7117
  const suppressWarningsPath$1 = resolve$1(rootDir, "./suppress-warnings.cjs");
7212
7118
  function createChildProcessChannel(project, collect) {
7213
- const emitter = new EventEmitter();
7214
- const cleanup = () => emitter.removeAllListeners();
7215
- const events = { message: "message", response: "response" };
7216
- const channel = {
7217
- onMessage: (callback) => emitter.on(events.message, callback),
7218
- postMessage: (message) => emitter.emit(events.response, message)
7219
- };
7220
- const rpc = createBirpc(
7221
- createMethodsRPC(project, { cacheFs: true, collect }),
7222
- {
7223
- eventNames: ["onCancel"],
7224
- serialize: v8.serialize,
7225
- deserialize: (v) => v8.deserialize(Buffer.from(v)),
7226
- post(v) {
7227
- emitter.emit(events.message, v);
7228
- },
7229
- on(fn) {
7230
- emitter.on(events.response, fn);
7231
- },
7232
- onTimeoutError(functionName) {
7233
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
7234
- }
7235
- }
7236
- );
7237
- project.ctx.onCancel((reason) => rpc.onCancel(reason));
7238
- return { channel, cleanup };
7119
+ const emitter = new EventEmitter();
7120
+ const cleanup = () => emitter.removeAllListeners();
7121
+ const events = {
7122
+ message: "message",
7123
+ response: "response"
7124
+ };
7125
+ const channel = {
7126
+ onMessage: (callback) => emitter.on(events.message, callback),
7127
+ postMessage: (message) => emitter.emit(events.response, message)
7128
+ };
7129
+ const rpc = createBirpc(createMethodsRPC(project, {
7130
+ cacheFs: true,
7131
+ collect
7132
+ }), {
7133
+ eventNames: ["onCancel"],
7134
+ serialize: v8.serialize,
7135
+ deserialize: (v) => v8.deserialize(Buffer.from(v)),
7136
+ post(v) {
7137
+ emitter.emit(events.message, v);
7138
+ },
7139
+ on(fn) {
7140
+ emitter.on(events.response, fn);
7141
+ },
7142
+ onTimeoutError(functionName) {
7143
+ throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
7144
+ }
7145
+ });
7146
+ project.ctx.onCancel((reason) => rpc.onCancel(reason));
7147
+ return {
7148
+ channel,
7149
+ cleanup
7150
+ };
7239
7151
  }
7240
7152
  function createVmForksPool(ctx, { execArgv, env }) {
7241
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7242
- const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7243
- const poolOptions = ctx.config.poolOptions?.vmForks ?? {};
7244
- const maxThreads = poolOptions.maxForks ?? ctx.config.maxWorkers ?? threadsCount;
7245
- const minThreads = poolOptions.maxForks ?? ctx.config.minWorkers ?? threadsCount;
7246
- const worker = resolve$1(ctx.distPath, "workers/vmForks.js");
7247
- const options = {
7248
- runtime: "child_process",
7249
- filename: resolve$1(ctx.distPath, "worker.js"),
7250
- maxThreads,
7251
- minThreads,
7252
- env,
7253
- execArgv: [
7254
- "--experimental-import-meta-resolve",
7255
- "--experimental-vm-modules",
7256
- "--require",
7257
- suppressWarningsPath$1,
7258
- ...poolOptions.execArgv ?? [],
7259
- ...execArgv
7260
- ],
7261
- terminateTimeout: ctx.config.teardownTimeout,
7262
- concurrentTasksPerWorker: 1,
7263
- maxMemoryLimitBeforeRecycle: getMemoryLimit$1(ctx.config) || void 0
7264
- };
7265
- if (poolOptions.singleFork || !ctx.config.fileParallelism) {
7266
- options.maxThreads = 1;
7267
- options.minThreads = 1;
7268
- }
7269
- const pool = new Tinypool$1(options);
7270
- const runWithFiles = (name) => {
7271
- let id = 0;
7272
- async function runFiles(project, config, files, environment, invalidates = []) {
7273
- const paths = files.map((f) => f.filepath);
7274
- ctx.state.clearFiles(project, paths);
7275
- const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
7276
- const workerId = ++id;
7277
- const data = {
7278
- pool: "forks",
7279
- worker,
7280
- config,
7281
- files,
7282
- invalidates,
7283
- environment,
7284
- workerId,
7285
- projectName: project.name,
7286
- providedContext: project.getProvidedContext()
7287
- };
7288
- try {
7289
- await pool.run(data, { name, channel });
7290
- } catch (error) {
7291
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
7292
- ctx.state.addProcessTimeoutCause(
7293
- `Failed to terminate worker while running ${paths.join(", ")}.`
7294
- );
7295
- } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
7296
- ctx.state.cancelFiles(paths, project);
7297
- } else {
7298
- throw error;
7299
- }
7300
- } finally {
7301
- cleanup();
7302
- }
7303
- }
7304
- return async (specs, invalidates) => {
7305
- ctx.onCancel(() => pool.cancelPendingTasks());
7306
- const configs = /* @__PURE__ */ new Map();
7307
- const getConfig = (project) => {
7308
- if (configs.has(project)) {
7309
- return configs.get(project);
7310
- }
7311
- const _config = project.getSerializableConfig();
7312
- const config = wrapSerializableConfig(_config);
7313
- configs.set(project, config);
7314
- return config;
7315
- };
7316
- const filesByEnv = await groupFilesByEnv(specs);
7317
- const promises = Object.values(filesByEnv).flat();
7318
- const results = await Promise.allSettled(
7319
- promises.map(
7320
- ({ file, environment, project }) => runFiles(
7321
- project,
7322
- getConfig(project),
7323
- [file],
7324
- environment,
7325
- invalidates
7326
- )
7327
- )
7328
- );
7329
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
7330
- if (errors.length > 0) {
7331
- throw new AggregateError(
7332
- errors,
7333
- "Errors occurred while running tests. For more information, see serialized error."
7334
- );
7335
- }
7336
- };
7337
- };
7338
- return {
7339
- name: "vmForks",
7340
- runTests: runWithFiles("run"),
7341
- collectTests: runWithFiles("collect"),
7342
- close: () => pool.destroy()
7343
- };
7344
- }
7345
- function getMemoryLimit$1(config) {
7346
- const memory = nodeos.totalmem();
7347
- const limit = getWorkerMemoryLimit(config);
7348
- if (typeof memory === "number") {
7349
- return stringToBytes(limit, config.watch ? memory / 2 : memory);
7350
- }
7351
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") {
7352
- return stringToBytes(limit);
7353
- }
7354
- return null;
7153
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7154
+ const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7155
+ const poolOptions = ctx.config.poolOptions?.vmForks ?? {};
7156
+ const maxThreads = poolOptions.maxForks ?? ctx.config.maxWorkers ?? threadsCount;
7157
+ const minThreads = poolOptions.maxForks ?? ctx.config.minWorkers ?? threadsCount;
7158
+ const worker = resolve$1(ctx.distPath, "workers/vmForks.js");
7159
+ const options = {
7160
+ runtime: "child_process",
7161
+ filename: resolve$1(ctx.distPath, "worker.js"),
7162
+ maxThreads,
7163
+ minThreads,
7164
+ env,
7165
+ execArgv: [
7166
+ "--experimental-import-meta-resolve",
7167
+ "--experimental-vm-modules",
7168
+ "--require",
7169
+ suppressWarningsPath$1,
7170
+ ...poolOptions.execArgv ?? [],
7171
+ ...execArgv
7172
+ ],
7173
+ terminateTimeout: ctx.config.teardownTimeout,
7174
+ concurrentTasksPerWorker: 1,
7175
+ maxMemoryLimitBeforeRecycle: getMemoryLimit$1(ctx.config) || undefined
7176
+ };
7177
+ if (poolOptions.singleFork || !ctx.config.fileParallelism) {
7178
+ options.maxThreads = 1;
7179
+ options.minThreads = 1;
7180
+ }
7181
+ const pool = new Tinypool$1(options);
7182
+ const runWithFiles = (name) => {
7183
+ let id = 0;
7184
+ async function runFiles(project, config, files, environment, invalidates = []) {
7185
+ const paths = files.map((f) => f.filepath);
7186
+ ctx.state.clearFiles(project, paths);
7187
+ const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
7188
+ const workerId = ++id;
7189
+ const data = {
7190
+ pool: "forks",
7191
+ worker,
7192
+ config,
7193
+ files,
7194
+ invalidates,
7195
+ environment,
7196
+ workerId,
7197
+ projectName: project.name,
7198
+ providedContext: project.getProvidedContext()
7199
+ };
7200
+ try {
7201
+ await pool.run(data, {
7202
+ name,
7203
+ channel
7204
+ });
7205
+ } catch (error) {
7206
+ if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
7207
+ ctx.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
7208
+ } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
7209
+ ctx.state.cancelFiles(paths, project);
7210
+ } else {
7211
+ throw error;
7212
+ }
7213
+ } finally {
7214
+ cleanup();
7215
+ }
7216
+ }
7217
+ return async (specs, invalidates) => {
7218
+ ctx.onCancel(() => pool.cancelPendingTasks());
7219
+ const configs = new Map();
7220
+ const getConfig = (project) => {
7221
+ if (configs.has(project)) {
7222
+ return configs.get(project);
7223
+ }
7224
+ const _config = project.getSerializableConfig();
7225
+ const config = wrapSerializableConfig(_config);
7226
+ configs.set(project, config);
7227
+ return config;
7228
+ };
7229
+ const filesByEnv = await groupFilesByEnv(specs);
7230
+ const promises = Object.values(filesByEnv).flat();
7231
+ const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
7232
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
7233
+ if (errors.length > 0) {
7234
+ throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
7235
+ }
7236
+ };
7237
+ };
7238
+ return {
7239
+ name: "vmForks",
7240
+ runTests: runWithFiles("run"),
7241
+ collectTests: runWithFiles("collect"),
7242
+ close: () => pool.destroy()
7243
+ };
7244
+ }
7245
+ function getMemoryLimit$1(config) {
7246
+ const memory = nodeos.totalmem();
7247
+ const limit = getWorkerMemoryLimit(config);
7248
+ if (typeof memory === "number") {
7249
+ return stringToBytes(limit, config.watch ? memory / 2 : memory);
7250
+ }
7251
+ if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") {
7252
+ return stringToBytes(limit);
7253
+ }
7254
+ return null;
7355
7255
  }
7356
7256
 
7357
7257
  const suppressWarningsPath = resolve$1(rootDir, "./suppress-warnings.cjs");
7358
7258
  function createWorkerChannel(project, collect) {
7359
- const channel = new MessageChannel();
7360
- const port = channel.port2;
7361
- const workerPort = channel.port1;
7362
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
7363
- eventNames: ["onCancel"],
7364
- post(v) {
7365
- port.postMessage(v);
7366
- },
7367
- on(fn) {
7368
- port.on("message", fn);
7369
- },
7370
- onTimeoutError(functionName) {
7371
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
7372
- }
7373
- });
7374
- project.ctx.onCancel((reason) => rpc.onCancel(reason));
7375
- return { workerPort, port };
7259
+ const channel = new MessageChannel();
7260
+ const port = channel.port2;
7261
+ const workerPort = channel.port1;
7262
+ const rpc = createBirpc(createMethodsRPC(project, { collect }), {
7263
+ eventNames: ["onCancel"],
7264
+ post(v) {
7265
+ port.postMessage(v);
7266
+ },
7267
+ on(fn) {
7268
+ port.on("message", fn);
7269
+ },
7270
+ onTimeoutError(functionName) {
7271
+ throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
7272
+ }
7273
+ });
7274
+ project.ctx.onCancel((reason) => rpc.onCancel(reason));
7275
+ return {
7276
+ workerPort,
7277
+ port
7278
+ };
7376
7279
  }
7377
7280
  function createVmThreadsPool(ctx, { execArgv, env }) {
7378
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7379
- const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7380
- const poolOptions = ctx.config.poolOptions?.vmThreads ?? {};
7381
- const maxThreads = poolOptions.maxThreads ?? ctx.config.maxWorkers ?? threadsCount;
7382
- const minThreads = poolOptions.minThreads ?? ctx.config.minWorkers ?? threadsCount;
7383
- const worker = resolve$1(ctx.distPath, "workers/vmThreads.js");
7384
- const options = {
7385
- filename: resolve$1(ctx.distPath, "worker.js"),
7386
- // TODO: investigate further
7387
- // It seems atomics introduced V8 Fatal Error https://github.com/vitest-dev/vitest/issues/1191
7388
- useAtomics: poolOptions.useAtomics ?? false,
7389
- maxThreads,
7390
- minThreads,
7391
- env,
7392
- execArgv: [
7393
- "--experimental-import-meta-resolve",
7394
- "--experimental-vm-modules",
7395
- "--require",
7396
- suppressWarningsPath,
7397
- ...poolOptions.execArgv ?? [],
7398
- ...execArgv
7399
- ],
7400
- terminateTimeout: ctx.config.teardownTimeout,
7401
- concurrentTasksPerWorker: 1,
7402
- maxMemoryLimitBeforeRecycle: getMemoryLimit(ctx.config) || void 0
7403
- };
7404
- if (poolOptions.singleThread || !ctx.config.fileParallelism) {
7405
- options.maxThreads = 1;
7406
- options.minThreads = 1;
7407
- }
7408
- const pool = new Tinypool$1(options);
7409
- const runWithFiles = (name) => {
7410
- let id = 0;
7411
- async function runFiles(project, config, files, environment, invalidates = []) {
7412
- const paths = files.map((f) => f.filepath);
7413
- ctx.state.clearFiles(project, paths);
7414
- const { workerPort, port } = createWorkerChannel(project, name === "collect");
7415
- const workerId = ++id;
7416
- const data = {
7417
- pool: "vmThreads",
7418
- worker,
7419
- port: workerPort,
7420
- config,
7421
- files: paths,
7422
- invalidates,
7423
- environment,
7424
- workerId,
7425
- projectName: project.name,
7426
- providedContext: project.getProvidedContext()
7427
- };
7428
- try {
7429
- await pool.run(data, { transferList: [workerPort], name });
7430
- } catch (error) {
7431
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
7432
- ctx.state.addProcessTimeoutCause(
7433
- `Failed to terminate worker while running ${paths.join(
7434
- ", "
7435
- )}.
7436
- See https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`
7437
- );
7438
- } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
7439
- ctx.state.cancelFiles(paths, project);
7440
- } else {
7441
- throw error;
7442
- }
7443
- } finally {
7444
- port.close();
7445
- workerPort.close();
7446
- }
7447
- }
7448
- return async (specs, invalidates) => {
7449
- ctx.onCancel(() => pool.cancelPendingTasks());
7450
- const configs = /* @__PURE__ */ new Map();
7451
- const getConfig = (project) => {
7452
- if (configs.has(project)) {
7453
- return configs.get(project);
7454
- }
7455
- const config = project.serializedConfig;
7456
- configs.set(project, config);
7457
- return config;
7458
- };
7459
- const filesByEnv = await groupFilesByEnv(specs);
7460
- const promises = Object.values(filesByEnv).flat();
7461
- const results = await Promise.allSettled(
7462
- promises.map(
7463
- ({ file, environment, project }) => runFiles(
7464
- project,
7465
- getConfig(project),
7466
- [file],
7467
- environment,
7468
- invalidates
7469
- )
7470
- )
7471
- );
7472
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
7473
- if (errors.length > 0) {
7474
- throw new AggregateError(
7475
- errors,
7476
- "Errors occurred while running tests. For more information, see serialized error."
7477
- );
7478
- }
7479
- };
7480
- };
7481
- return {
7482
- name: "vmThreads",
7483
- runTests: runWithFiles("run"),
7484
- collectTests: runWithFiles("collect"),
7485
- close: () => pool.destroy()
7486
- };
7281
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
7282
+ const threadsCount = ctx.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
7283
+ const poolOptions = ctx.config.poolOptions?.vmThreads ?? {};
7284
+ const maxThreads = poolOptions.maxThreads ?? ctx.config.maxWorkers ?? threadsCount;
7285
+ const minThreads = poolOptions.minThreads ?? ctx.config.minWorkers ?? threadsCount;
7286
+ const worker = resolve$1(ctx.distPath, "workers/vmThreads.js");
7287
+ const options = {
7288
+ filename: resolve$1(ctx.distPath, "worker.js"),
7289
+ useAtomics: poolOptions.useAtomics ?? false,
7290
+ maxThreads,
7291
+ minThreads,
7292
+ env,
7293
+ execArgv: [
7294
+ "--experimental-import-meta-resolve",
7295
+ "--experimental-vm-modules",
7296
+ "--require",
7297
+ suppressWarningsPath,
7298
+ ...poolOptions.execArgv ?? [],
7299
+ ...execArgv
7300
+ ],
7301
+ terminateTimeout: ctx.config.teardownTimeout,
7302
+ concurrentTasksPerWorker: 1,
7303
+ maxMemoryLimitBeforeRecycle: getMemoryLimit(ctx.config) || undefined
7304
+ };
7305
+ if (poolOptions.singleThread || !ctx.config.fileParallelism) {
7306
+ options.maxThreads = 1;
7307
+ options.minThreads = 1;
7308
+ }
7309
+ const pool = new Tinypool$1(options);
7310
+ const runWithFiles = (name) => {
7311
+ let id = 0;
7312
+ async function runFiles(project, config, files, environment, invalidates = []) {
7313
+ const paths = files.map((f) => f.filepath);
7314
+ ctx.state.clearFiles(project, paths);
7315
+ const { workerPort, port } = createWorkerChannel(project, name === "collect");
7316
+ const workerId = ++id;
7317
+ const data = {
7318
+ pool: "vmThreads",
7319
+ worker,
7320
+ port: workerPort,
7321
+ config,
7322
+ files: paths,
7323
+ invalidates,
7324
+ environment,
7325
+ workerId,
7326
+ projectName: project.name,
7327
+ providedContext: project.getProvidedContext()
7328
+ };
7329
+ try {
7330
+ await pool.run(data, {
7331
+ transferList: [workerPort],
7332
+ name
7333
+ });
7334
+ } catch (error) {
7335
+ if (error instanceof Error && /Failed to terminate worker/.test(error.message)) {
7336
+ ctx.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
7337
+ } else if (ctx.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) {
7338
+ ctx.state.cancelFiles(paths, project);
7339
+ } else {
7340
+ throw error;
7341
+ }
7342
+ } finally {
7343
+ port.close();
7344
+ workerPort.close();
7345
+ }
7346
+ }
7347
+ return async (specs, invalidates) => {
7348
+ ctx.onCancel(() => pool.cancelPendingTasks());
7349
+ const configs = new Map();
7350
+ const getConfig = (project) => {
7351
+ if (configs.has(project)) {
7352
+ return configs.get(project);
7353
+ }
7354
+ const config = project.serializedConfig;
7355
+ configs.set(project, config);
7356
+ return config;
7357
+ };
7358
+ const filesByEnv = await groupFilesByEnv(specs);
7359
+ const promises = Object.values(filesByEnv).flat();
7360
+ const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
7361
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
7362
+ if (errors.length > 0) {
7363
+ throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
7364
+ }
7365
+ };
7366
+ };
7367
+ return {
7368
+ name: "vmThreads",
7369
+ runTests: runWithFiles("run"),
7370
+ collectTests: runWithFiles("collect"),
7371
+ close: () => pool.destroy()
7372
+ };
7487
7373
  }
7488
7374
  function getMemoryLimit(config) {
7489
- const memory = nodeos.totalmem();
7490
- const limit = getWorkerMemoryLimit(config);
7491
- if (typeof memory === "number") {
7492
- return stringToBytes(limit, config.watch ? memory / 2 : memory);
7493
- }
7494
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") {
7495
- return stringToBytes(limit);
7496
- }
7497
- return null;
7375
+ const memory = nodeos.totalmem();
7376
+ const limit = getWorkerMemoryLimit(config);
7377
+ if (typeof memory === "number") {
7378
+ return stringToBytes(limit, config.watch ? memory / 2 : memory);
7379
+ }
7380
+ if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") {
7381
+ return stringToBytes(limit);
7382
+ }
7383
+ return null;
7498
7384
  }
7499
7385
 
7500
7386
  const builtinPools = [
7501
- "forks",
7502
- "threads",
7503
- "browser",
7504
- "vmThreads",
7505
- "vmForks",
7506
- "typescript"
7387
+ "forks",
7388
+ "threads",
7389
+ "browser",
7390
+ "vmThreads",
7391
+ "vmForks",
7392
+ "typescript"
7507
7393
  ];
7508
7394
  function getDefaultPoolName(project) {
7509
- if (project.config.browser.enabled) {
7510
- return "browser";
7511
- }
7512
- return project.config.pool;
7395
+ if (project.config.browser.enabled) {
7396
+ return "browser";
7397
+ }
7398
+ return project.config.pool;
7513
7399
  }
7514
7400
  function getFilePoolName(project, file) {
7515
- for (const [glob, pool] of project.config.poolMatchGlobs) {
7516
- if (pool === "browser") {
7517
- throw new Error(
7518
- 'Since Vitest 0.31.0 "browser" pool is not supported in "poolMatchGlobs". You can create a workspace to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/workspace'
7519
- );
7520
- }
7521
- if (mm.isMatch(file, glob, { cwd: project.config.root })) {
7522
- return pool;
7523
- }
7524
- }
7525
- return getDefaultPoolName(project);
7401
+ for (const [glob, pool] of project.config.poolMatchGlobs) {
7402
+ if (pool === "browser") {
7403
+ throw new Error("Since Vitest 0.31.0 \"browser\" pool is not supported in \"poolMatchGlobs\". You can create a workspace to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/workspace");
7404
+ }
7405
+ if (mm.isMatch(file, glob, { cwd: project.config.root })) {
7406
+ return pool;
7407
+ }
7408
+ }
7409
+ return getDefaultPoolName(project);
7526
7410
  }
7527
7411
  function createPool(ctx) {
7528
- const pools = {
7529
- forks: null,
7530
- threads: null,
7531
- browser: null,
7532
- vmThreads: null,
7533
- vmForks: null,
7534
- typescript: null
7535
- };
7536
- const viteMajor = Number(version.split(".")[0]);
7537
- const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
7538
- "production",
7539
- "development",
7540
- ...ctx.vite.config.resolve.conditions
7541
- ]);
7542
- const conditions = [...potentialConditions].filter((condition) => {
7543
- if (condition === "production") {
7544
- return ctx.vite.config.isProduction;
7545
- }
7546
- if (condition === "development") {
7547
- return !ctx.vite.config.isProduction;
7548
- }
7549
- return true;
7550
- }).map((condition) => {
7551
- if (viteMajor >= 6 && condition === "development|production") {
7552
- return ctx.vite.config.isProduction ? "production" : "development";
7553
- }
7554
- return condition;
7555
- }).flatMap((c) => ["--conditions", c]);
7556
- const execArgv = process.execArgv.filter(
7557
- (execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir")
7558
- );
7559
- async function executeTests(method, files, invalidate) {
7560
- const options = {
7561
- execArgv: [...execArgv, ...conditions],
7562
- env: {
7563
- TEST: "true",
7564
- VITEST: "true",
7565
- NODE_ENV: process.env.NODE_ENV || "test",
7566
- VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
7567
- FORCE_TTY: isatty(1) ? "true" : "",
7568
- ...process.env,
7569
- ...ctx.config.env
7570
- }
7571
- };
7572
- if (isWindows) {
7573
- for (const name in options.env) {
7574
- options.env[name.toUpperCase()] = options.env[name];
7575
- }
7576
- }
7577
- const customPools = /* @__PURE__ */ new Map();
7578
- async function resolveCustomPool(filepath) {
7579
- if (customPools.has(filepath)) {
7580
- return customPools.get(filepath);
7581
- }
7582
- const pool = await ctx.runner.executeId(filepath);
7583
- if (typeof pool.default !== "function") {
7584
- throw new TypeError(
7585
- `Custom pool "${filepath}" must export a function as default export`
7586
- );
7587
- }
7588
- const poolInstance = await pool.default(ctx, options);
7589
- if (typeof poolInstance?.name !== "string") {
7590
- throw new TypeError(
7591
- `Custom pool "${filepath}" should return an object with "name" property`
7592
- );
7593
- }
7594
- if (typeof poolInstance?.[method] !== "function") {
7595
- throw new TypeError(
7596
- `Custom pool "${filepath}" should return an object with "${method}" method`
7597
- );
7598
- }
7599
- customPools.set(filepath, poolInstance);
7600
- return poolInstance;
7601
- }
7602
- const filesByPool = {
7603
- forks: [],
7604
- threads: [],
7605
- vmThreads: [],
7606
- vmForks: [],
7607
- typescript: []
7608
- };
7609
- const factories = {
7610
- vmThreads: () => createVmThreadsPool(ctx, options),
7611
- threads: () => createThreadsPool(ctx, options),
7612
- forks: () => createForksPool(ctx, options),
7613
- vmForks: () => createVmForksPool(ctx, options),
7614
- typescript: () => createTypecheckPool(ctx)
7615
- };
7616
- for (const spec of files) {
7617
- const { pool } = spec[2];
7618
- filesByPool[pool] ??= [];
7619
- filesByPool[pool].push(spec);
7620
- }
7621
- const Sequencer = ctx.config.sequence.sequencer;
7622
- const sequencer = new Sequencer(ctx);
7623
- async function sortSpecs(specs) {
7624
- if (ctx.config.shard) {
7625
- specs = await sequencer.shard(specs);
7626
- }
7627
- return sequencer.sort(specs);
7628
- }
7629
- await Promise.all(
7630
- Object.entries(filesByPool).map(async (entry) => {
7631
- const [pool, files2] = entry;
7632
- if (!files2.length) {
7633
- return null;
7634
- }
7635
- const specs = await sortSpecs(files2);
7636
- if (pool in factories) {
7637
- const factory = factories[pool];
7638
- pools[pool] ??= factory();
7639
- return pools[pool][method](specs, invalidate);
7640
- }
7641
- if (pool === "browser") {
7642
- pools[pool] ??= await (async () => {
7643
- const { createBrowserPool } = await import('@vitest/browser');
7644
- return createBrowserPool(ctx);
7645
- })();
7646
- return pools[pool][method](specs, invalidate);
7647
- }
7648
- const poolHandler = await resolveCustomPool(pool);
7649
- pools[poolHandler.name] ??= poolHandler;
7650
- return poolHandler[method](specs, invalidate);
7651
- })
7652
- );
7653
- }
7654
- return {
7655
- name: "default",
7656
- runTests: (files, invalidates) => executeTests("runTests", files, invalidates),
7657
- collectTests: (files, invalidates) => executeTests("collectTests", files, invalidates),
7658
- async close() {
7659
- await Promise.all(Object.values(pools).map((p) => p?.close?.()));
7660
- }
7661
- };
7412
+ const pools = {
7413
+ forks: null,
7414
+ threads: null,
7415
+ browser: null,
7416
+ vmThreads: null,
7417
+ vmForks: null,
7418
+ typescript: null
7419
+ };
7420
+ const viteMajor = Number(version.split(".")[0]);
7421
+ const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
7422
+ "production",
7423
+ "development",
7424
+ ...ctx.vite.config.resolve.conditions
7425
+ ]);
7426
+ const conditions = [...potentialConditions].filter((condition) => {
7427
+ if (condition === "production") {
7428
+ return ctx.vite.config.isProduction;
7429
+ }
7430
+ if (condition === "development") {
7431
+ return !ctx.vite.config.isProduction;
7432
+ }
7433
+ return true;
7434
+ }).map((condition) => {
7435
+ if (viteMajor >= 6 && condition === "development|production") {
7436
+ return ctx.vite.config.isProduction ? "production" : "development";
7437
+ }
7438
+ return condition;
7439
+ }).flatMap((c) => ["--conditions", c]);
7440
+ const execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
7441
+ async function executeTests(method, files, invalidate) {
7442
+ const options = {
7443
+ execArgv: [...execArgv, ...conditions],
7444
+ env: {
7445
+ TEST: "true",
7446
+ VITEST: "true",
7447
+ NODE_ENV: process.env.NODE_ENV || "test",
7448
+ VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
7449
+ FORCE_TTY: isatty(1) ? "true" : "",
7450
+ ...process.env,
7451
+ ...ctx.config.env
7452
+ }
7453
+ };
7454
+ if (isWindows) {
7455
+ for (const name in options.env) {
7456
+ options.env[name.toUpperCase()] = options.env[name];
7457
+ }
7458
+ }
7459
+ const customPools = new Map();
7460
+ async function resolveCustomPool(filepath) {
7461
+ if (customPools.has(filepath)) {
7462
+ return customPools.get(filepath);
7463
+ }
7464
+ const pool = await ctx.runner.executeId(filepath);
7465
+ if (typeof pool.default !== "function") {
7466
+ throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
7467
+ }
7468
+ const poolInstance = await pool.default(ctx, options);
7469
+ if (typeof poolInstance?.name !== "string") {
7470
+ throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
7471
+ }
7472
+ if (typeof poolInstance?.[method] !== "function") {
7473
+ throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
7474
+ }
7475
+ customPools.set(filepath, poolInstance);
7476
+ return poolInstance;
7477
+ }
7478
+ const filesByPool = {
7479
+ forks: [],
7480
+ threads: [],
7481
+ vmThreads: [],
7482
+ vmForks: [],
7483
+ typescript: []
7484
+ };
7485
+ const factories = {
7486
+ vmThreads: () => createVmThreadsPool(ctx, options),
7487
+ threads: () => createThreadsPool(ctx, options),
7488
+ forks: () => createForksPool(ctx, options),
7489
+ vmForks: () => createVmForksPool(ctx, options),
7490
+ typescript: () => createTypecheckPool(ctx)
7491
+ };
7492
+ for (const spec of files) {
7493
+ const { pool } = spec[2];
7494
+ filesByPool[pool] ??= [];
7495
+ filesByPool[pool].push(spec);
7496
+ }
7497
+ const Sequencer = ctx.config.sequence.sequencer;
7498
+ const sequencer = new Sequencer(ctx);
7499
+ async function sortSpecs(specs) {
7500
+ if (ctx.config.shard) {
7501
+ specs = await sequencer.shard(specs);
7502
+ }
7503
+ return sequencer.sort(specs);
7504
+ }
7505
+ await Promise.all(Object.entries(filesByPool).map(async (entry) => {
7506
+ const [pool, files] = entry;
7507
+ if (!files.length) {
7508
+ return null;
7509
+ }
7510
+ const specs = await sortSpecs(files);
7511
+ if (pool in factories) {
7512
+ const factory = factories[pool];
7513
+ pools[pool] ??= factory();
7514
+ return pools[pool][method](specs, invalidate);
7515
+ }
7516
+ if (pool === "browser") {
7517
+ pools[pool] ??= await (async () => {
7518
+ const { createBrowserPool } = await import('@vitest/browser');
7519
+ return createBrowserPool(ctx);
7520
+ })();
7521
+ return pools[pool][method](specs, invalidate);
7522
+ }
7523
+ const poolHandler = await resolveCustomPool(pool);
7524
+ pools[poolHandler.name] ??= poolHandler;
7525
+ return poolHandler[method](specs, invalidate);
7526
+ }));
7527
+ }
7528
+ return {
7529
+ name: "default",
7530
+ runTests: (files, invalidates) => executeTests("runTests", files, invalidates),
7531
+ collectTests: (files, invalidates) => executeTests("collectTests", files, invalidates),
7532
+ async close() {
7533
+ await Promise.all(Object.values(pools).map((p) => p?.close?.()));
7534
+ }
7535
+ };
7662
7536
  }
7663
7537
 
7664
7538
  class BaseSequencer {
7665
- ctx;
7666
- constructor(ctx) {
7667
- this.ctx = ctx;
7668
- }
7669
- // async so it can be extended by other sequelizers
7670
- async shard(files) {
7671
- const { config } = this.ctx;
7672
- const { index, count } = config.shard;
7673
- const shardSize = Math.ceil(files.length / count);
7674
- const shardStart = shardSize * (index - 1);
7675
- const shardEnd = shardSize * index;
7676
- return [...files].map((spec) => {
7677
- const fullPath = resolve(slash$1(config.root), slash$1(spec.moduleId));
7678
- const specPath = fullPath?.slice(config.root.length);
7679
- return {
7680
- spec,
7681
- hash: hash("sha1", specPath, "hex")
7682
- };
7683
- }).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec);
7684
- }
7685
- // async so it can be extended by other sequelizers
7686
- async sort(files) {
7687
- const cache = this.ctx.cache;
7688
- return [...files].sort((a, b) => {
7689
- const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
7690
- const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
7691
- const aState = cache.getFileTestResults(keyA);
7692
- const bState = cache.getFileTestResults(keyB);
7693
- if (!aState || !bState) {
7694
- const statsA = cache.getFileStats(keyA);
7695
- const statsB = cache.getFileStats(keyB);
7696
- if (!statsA || !statsB) {
7697
- return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
7698
- }
7699
- return statsB.size - statsA.size;
7700
- }
7701
- if (aState.failed && !bState.failed) {
7702
- return -1;
7703
- }
7704
- if (!aState.failed && bState.failed) {
7705
- return 1;
7706
- }
7707
- return bState.duration - aState.duration;
7708
- });
7709
- }
7539
+ ctx;
7540
+ constructor(ctx) {
7541
+ this.ctx = ctx;
7542
+ }
7543
+ async shard(files) {
7544
+ const { config } = this.ctx;
7545
+ const { index, count } = config.shard;
7546
+ const shardSize = Math.ceil(files.length / count);
7547
+ const shardStart = shardSize * (index - 1);
7548
+ const shardEnd = shardSize * index;
7549
+ return [...files].map((spec) => {
7550
+ const fullPath = resolve(slash$1(config.root), slash$1(spec.moduleId));
7551
+ const specPath = fullPath?.slice(config.root.length);
7552
+ return {
7553
+ spec,
7554
+ hash: hash("sha1", specPath, "hex")
7555
+ };
7556
+ }).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec);
7557
+ }
7558
+ async sort(files) {
7559
+ const cache = this.ctx.cache;
7560
+ return [...files].sort((a, b) => {
7561
+ const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
7562
+ const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
7563
+ const aState = cache.getFileTestResults(keyA);
7564
+ const bState = cache.getFileTestResults(keyB);
7565
+ if (!aState || !bState) {
7566
+ const statsA = cache.getFileStats(keyA);
7567
+ const statsB = cache.getFileStats(keyB);
7568
+ if (!statsA || !statsB) {
7569
+ return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
7570
+ }
7571
+ return statsB.size - statsA.size;
7572
+ }
7573
+ if (aState.failed && !bState.failed) {
7574
+ return -1;
7575
+ }
7576
+ if (!aState.failed && bState.failed) {
7577
+ return 1;
7578
+ }
7579
+ return bState.duration - aState.duration;
7580
+ });
7581
+ }
7710
7582
  }
7711
7583
 
7712
7584
  class RandomSequencer extends BaseSequencer {
7713
- async sort(files) {
7714
- const { sequence } = this.ctx.config;
7715
- return shuffle(files, sequence.seed);
7716
- }
7585
+ async sort(files) {
7586
+ const { sequence } = this.ctx.config;
7587
+ return shuffle(files, sequence.seed);
7588
+ }
7717
7589
  }
7718
7590
 
7719
7591
  function resolvePath(path, root) {
7720
- return normalize(
7721
- /* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path)
7722
- );
7592
+ return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path));
7723
7593
  }
7724
7594
  function parseInspector(inspect) {
7725
- if (typeof inspect === "boolean" || inspect === void 0) {
7726
- return {};
7727
- }
7728
- if (typeof inspect === "number") {
7729
- return { port: inspect };
7730
- }
7731
- if (inspect.match(/https?:\//)) {
7732
- throw new Error(
7733
- `Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`
7734
- );
7735
- }
7736
- const [host, port] = inspect.split(":");
7737
- if (!port) {
7738
- return { host };
7739
- }
7740
- return { host, port: Number(port) || defaultInspectPort };
7595
+ if (typeof inspect === "boolean" || inspect === undefined) {
7596
+ return {};
7597
+ }
7598
+ if (typeof inspect === "number") {
7599
+ return { port: inspect };
7600
+ }
7601
+ if (inspect.match(/https?:\//)) {
7602
+ throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
7603
+ }
7604
+ const [host, port] = inspect.split(":");
7605
+ if (!port) {
7606
+ return { host };
7607
+ }
7608
+ return {
7609
+ host,
7610
+ port: Number(port) || defaultInspectPort
7611
+ };
7741
7612
  }
7742
- function resolveApiServerConfig(options, defaultPort2) {
7743
- let api;
7744
- if (options.ui && !options.api) {
7745
- api = { port: defaultPort2 };
7746
- } else if (options.api === true) {
7747
- api = { port: defaultPort2 };
7748
- } else if (typeof options.api === "number") {
7749
- api = { port: options.api };
7750
- }
7751
- if (typeof options.api === "object") {
7752
- if (api) {
7753
- if (options.api.port) {
7754
- api.port = options.api.port;
7755
- }
7756
- if (options.api.strictPort) {
7757
- api.strictPort = options.api.strictPort;
7758
- }
7759
- if (options.api.host) {
7760
- api.host = options.api.host;
7761
- }
7762
- } else {
7763
- api = { ...options.api };
7764
- }
7765
- }
7766
- if (api) {
7767
- if (!api.port && !api.middlewareMode) {
7768
- api.port = defaultPort2;
7769
- }
7770
- } else {
7771
- api = { middlewareMode: true };
7772
- }
7773
- return api;
7613
+ function resolveApiServerConfig(options, defaultPort) {
7614
+ let api;
7615
+ if (options.ui && !options.api) {
7616
+ api = { port: defaultPort };
7617
+ } else if (options.api === true) {
7618
+ api = { port: defaultPort };
7619
+ } else if (typeof options.api === "number") {
7620
+ api = { port: options.api };
7621
+ }
7622
+ if (typeof options.api === "object") {
7623
+ if (api) {
7624
+ if (options.api.port) {
7625
+ api.port = options.api.port;
7626
+ }
7627
+ if (options.api.strictPort) {
7628
+ api.strictPort = options.api.strictPort;
7629
+ }
7630
+ if (options.api.host) {
7631
+ api.host = options.api.host;
7632
+ }
7633
+ } else {
7634
+ api = { ...options.api };
7635
+ }
7636
+ }
7637
+ if (api) {
7638
+ if (!api.port && !api.middlewareMode) {
7639
+ api.port = defaultPort;
7640
+ }
7641
+ } else {
7642
+ api = { middlewareMode: true };
7643
+ }
7644
+ return api;
7774
7645
  }
7775
7646
  function resolveInlineWorkerOption(value) {
7776
- if (typeof value === "string" && value.trim().endsWith("%")) {
7777
- return getWorkersCountByPercentage(value);
7778
- } else {
7779
- return Number(value);
7780
- }
7647
+ if (typeof value === "string" && value.trim().endsWith("%")) {
7648
+ return getWorkersCountByPercentage(value);
7649
+ } else {
7650
+ return Number(value);
7651
+ }
7781
7652
  }
7782
7653
  function resolveConfig$1(vitest, options, viteConfig) {
7783
- const mode = vitest.mode;
7784
- const logger = vitest.logger;
7785
- if (options.dom) {
7786
- if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") {
7787
- logger.console.warn(
7788
- c.yellow(
7789
- `${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`
7790
- )
7791
- );
7792
- }
7793
- options.environment = "happy-dom";
7794
- }
7795
- const resolved = {
7796
- ...configDefaults,
7797
- ...options,
7798
- root: viteConfig.root,
7799
- mode
7800
- };
7801
- resolved.project = toArray(resolved.project);
7802
- resolved.provide ??= {};
7803
- const inspector = resolved.inspect || resolved.inspectBrk;
7804
- resolved.inspector = {
7805
- ...resolved.inspector,
7806
- ...parseInspector(inspector),
7807
- enabled: !!inspector,
7808
- waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
7809
- };
7810
- if (viteConfig.base !== "/") {
7811
- resolved.base = viteConfig.base;
7812
- }
7813
- resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
7814
- if (options.shard) {
7815
- if (resolved.watch) {
7816
- throw new Error("You cannot use --shard option with enabled watch");
7817
- }
7818
- const [indexString, countString] = options.shard.split("/");
7819
- const index = Math.abs(Number.parseInt(indexString, 10));
7820
- const count = Math.abs(Number.parseInt(countString, 10));
7821
- if (Number.isNaN(count) || count <= 0) {
7822
- throw new Error("--shard <count> must be a positive number");
7823
- }
7824
- if (Number.isNaN(index) || index <= 0 || index > count) {
7825
- throw new Error(
7826
- "--shard <index> must be a positive number less then <count>"
7827
- );
7828
- }
7829
- resolved.shard = { index, count };
7830
- }
7831
- if (resolved.standalone && !resolved.watch) {
7832
- throw new Error(`Vitest standalone mode requires --watch`);
7833
- }
7834
- if (resolved.mergeReports && resolved.watch) {
7835
- throw new Error(`Cannot merge reports with --watch enabled`);
7836
- }
7837
- if (resolved.maxWorkers) {
7838
- resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
7839
- }
7840
- if (resolved.minWorkers) {
7841
- resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
7842
- }
7843
- resolved.browser ??= {};
7844
- resolved.fileParallelism ??= mode !== "benchmark";
7845
- if (!resolved.fileParallelism) {
7846
- resolved.maxWorkers = 1;
7847
- resolved.minWorkers = 1;
7848
- }
7849
- if (resolved.maxConcurrency === 0) {
7850
- logger.console.warn(
7851
- c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)
7852
- );
7853
- resolved.maxConcurrency = configDefaults.maxConcurrency;
7854
- }
7855
- if (resolved.inspect || resolved.inspectBrk) {
7856
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread;
7857
- const isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
7858
- if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
7859
- const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
7860
- throw new Error(
7861
- `You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`
7862
- );
7863
- }
7864
- }
7865
- const browser = resolved.browser;
7866
- if (browser.enabled) {
7867
- if (!browser.name && !browser.instances) {
7868
- browser.enabled = false;
7869
- } else {
7870
- const instances = browser.instances;
7871
- if (browser.name && browser.instances) {
7872
- browser.instances = browser.instances.filter((instance) => instance.browser === browser.name);
7873
- }
7874
- if (browser.instances && !browser.instances.length) {
7875
- throw new Error([
7876
- `"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`,
7877
- browser.name && instances?.length ? ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c2) => c2.browser).join(", ")}). Did you mean to use another name?` : ""
7878
- ].join(""));
7879
- }
7880
- }
7881
- }
7882
- const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
7883
- if (browser.enabled && !playwrightChromiumOnly) {
7884
- const browserConfig = {
7885
- browser: {
7886
- provider: browser.provider,
7887
- name: browser.name,
7888
- instances: browser.instances?.map((i) => ({ browser: i.browser }))
7889
- }
7890
- };
7891
- if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
7892
- throw new Error(
7893
- `@vitest/coverage-v8 does not work with
7894
- ${JSON.stringify(browserConfig, null, 2)}
7895
-
7896
- Use either:
7897
- ${JSON.stringify({ browser: { provider: "playwright", instances: [{ browser: "chromium" }] } }, null, 2)}
7898
-
7899
- ...or change your coverage provider to:
7900
- ${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}
7901
- `
7902
- );
7903
- }
7904
- if (resolved.inspect || resolved.inspectBrk) {
7905
- const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
7906
- throw new Error(
7907
- `${inspectOption} does not work with
7908
- ${JSON.stringify(browserConfig, null, 2)}
7909
-
7910
- Use either:
7911
- ${JSON.stringify({ browser: { provider: "playwright", instances: [{ browser: "chromium" }] } }, null, 2)}
7912
-
7913
- ...or disable ${inspectOption}
7914
- `
7915
- );
7916
- }
7917
- }
7918
- resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
7919
- if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
7920
- const reportsDirectory = resolve(
7921
- resolved.root,
7922
- resolved.coverage.reportsDirectory
7923
- );
7924
- if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) {
7925
- throw new Error(
7926
- `You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`
7927
- );
7928
- }
7929
- }
7930
- if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) {
7931
- resolved.coverage.customProviderModule = resolvePath(
7932
- resolved.coverage.customProviderModule,
7933
- resolved.root
7934
- );
7935
- }
7936
- resolved.expect ??= {};
7937
- resolved.deps ??= {};
7938
- resolved.deps.moduleDirectories ??= [];
7939
- resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map(
7940
- (dir) => {
7941
- if (!dir.startsWith("/")) {
7942
- dir = `/${dir}`;
7943
- }
7944
- if (!dir.endsWith("/")) {
7945
- dir += "/";
7946
- }
7947
- return normalize(dir);
7948
- }
7949
- );
7950
- if (!resolved.deps.moduleDirectories.includes("/node_modules/")) {
7951
- resolved.deps.moduleDirectories.push("/node_modules/");
7952
- }
7953
- resolved.deps.optimizer ??= {};
7954
- resolved.deps.optimizer.ssr ??= {};
7955
- resolved.deps.optimizer.ssr.enabled ??= true;
7956
- resolved.deps.optimizer.web ??= {};
7957
- resolved.deps.optimizer.web.enabled ??= true;
7958
- resolved.deps.web ??= {};
7959
- resolved.deps.web.transformAssets ??= true;
7960
- resolved.deps.web.transformCss ??= true;
7961
- resolved.deps.web.transformGlobPattern ??= [];
7962
- resolved.setupFiles = toArray(resolved.setupFiles || []).map(
7963
- (file) => resolvePath(file, resolved.root)
7964
- );
7965
- resolved.globalSetup = toArray(resolved.globalSetup || []).map(
7966
- (file) => resolvePath(file, resolved.root)
7967
- );
7968
- resolved.coverage.exclude.push(
7969
- ...resolved.setupFiles.map(
7970
- (file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(
7971
- resolved.root,
7972
- file
7973
- )}`
7974
- )
7975
- );
7976
- resolved.coverage.exclude.push(...resolved.include);
7977
- resolved.forceRerunTriggers = [
7978
- ...resolved.forceRerunTriggers,
7979
- ...resolved.setupFiles
7980
- ];
7981
- resolved.server ??= {};
7982
- resolved.server.deps ??= {};
7983
- const deprecatedDepsOptions = ["inline", "external", "fallbackCJS"];
7984
- deprecatedDepsOptions.forEach((option) => {
7985
- if (resolved.deps[option] === void 0) {
7986
- return;
7987
- }
7988
- if (option === "fallbackCJS") {
7989
- logger.console.warn(
7990
- c.yellow(
7991
- `${c.inverse(
7992
- c.yellow(" Vitest ")
7993
- )} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`
7994
- )
7995
- );
7996
- } else {
7997
- const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
7998
- logger.console.warn(
7999
- c.yellow(
8000
- `${c.inverse(
8001
- c.yellow(" Vitest ")
8002
- )} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`
8003
- )
8004
- );
8005
- }
8006
- if (resolved.server.deps[option] === void 0) {
8007
- resolved.server.deps[option] = resolved.deps[option];
8008
- }
8009
- });
8010
- if (resolved.cliExclude) {
8011
- resolved.exclude.push(...resolved.cliExclude);
8012
- }
8013
- if (resolved.server.deps.inline !== true) {
8014
- const ssrOptions = viteConfig.ssr;
8015
- if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) {
8016
- resolved.server.deps.inline = true;
8017
- } else {
8018
- resolved.server.deps.inline ??= [];
8019
- resolved.server.deps.inline.push(...extraInlineDeps);
8020
- }
8021
- }
8022
- resolved.server.deps.inlineFiles ??= [];
8023
- resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
8024
- resolved.server.deps.moduleDirectories ??= [];
8025
- resolved.server.deps.moduleDirectories.push(
8026
- ...resolved.deps.moduleDirectories
8027
- );
8028
- if (resolved.runner) {
8029
- resolved.runner = resolvePath(resolved.runner, resolved.root);
8030
- }
8031
- if (resolved.snapshotEnvironment) {
8032
- resolved.snapshotEnvironment = resolvePath(
8033
- resolved.snapshotEnvironment,
8034
- resolved.root
8035
- );
8036
- }
8037
- resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0;
8038
- if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
8039
- resolved.snapshotFormat.plugins = [];
8040
- }
8041
- const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
8042
- resolved.snapshotOptions = {
8043
- expand: resolved.expandSnapshotDiff ?? false,
8044
- snapshotFormat: resolved.snapshotFormat || {},
8045
- updateSnapshot: isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new",
8046
- resolveSnapshotPath: options.resolveSnapshotPath,
8047
- // resolved inside the worker
8048
- snapshotEnvironment: null
8049
- };
8050
- resolved.snapshotSerializers ??= [];
8051
- resolved.snapshotSerializers = resolved.snapshotSerializers.map(
8052
- (file) => resolvePath(file, resolved.root)
8053
- );
8054
- resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
8055
- if (options.resolveSnapshotPath) {
8056
- delete resolved.resolveSnapshotPath;
8057
- }
8058
- resolved.pool ??= "threads";
8059
- if (process.env.VITEST_MAX_THREADS) {
8060
- resolved.poolOptions = {
8061
- ...resolved.poolOptions,
8062
- threads: {
8063
- ...resolved.poolOptions?.threads,
8064
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
8065
- },
8066
- vmThreads: {
8067
- ...resolved.poolOptions?.vmThreads,
8068
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
8069
- }
8070
- };
8071
- }
8072
- if (process.env.VITEST_MIN_THREADS) {
8073
- resolved.poolOptions = {
8074
- ...resolved.poolOptions,
8075
- threads: {
8076
- ...resolved.poolOptions?.threads,
8077
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
8078
- },
8079
- vmThreads: {
8080
- ...resolved.poolOptions?.vmThreads,
8081
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
8082
- }
8083
- };
8084
- }
8085
- if (process.env.VITEST_MAX_FORKS) {
8086
- resolved.poolOptions = {
8087
- ...resolved.poolOptions,
8088
- forks: {
8089
- ...resolved.poolOptions?.forks,
8090
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
8091
- },
8092
- vmForks: {
8093
- ...resolved.poolOptions?.vmForks,
8094
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
8095
- }
8096
- };
8097
- }
8098
- if (process.env.VITEST_MIN_FORKS) {
8099
- resolved.poolOptions = {
8100
- ...resolved.poolOptions,
8101
- forks: {
8102
- ...resolved.poolOptions?.forks,
8103
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
8104
- },
8105
- vmForks: {
8106
- ...resolved.poolOptions?.vmForks,
8107
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
8108
- }
8109
- };
8110
- }
8111
- const poolThreadsOptions = [
8112
- ["threads", "minThreads"],
8113
- ["threads", "maxThreads"],
8114
- ["vmThreads", "minThreads"],
8115
- ["vmThreads", "maxThreads"]
8116
- ];
8117
- for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) {
8118
- if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) {
8119
- resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
8120
- }
8121
- }
8122
- const poolForksOptions = [
8123
- ["forks", "minForks"],
8124
- ["forks", "maxForks"],
8125
- ["vmForks", "minForks"],
8126
- ["vmForks", "maxForks"]
8127
- ];
8128
- for (const [poolOptionKey, workerOptionKey] of poolForksOptions) {
8129
- if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) {
8130
- resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
8131
- }
8132
- }
8133
- if (typeof resolved.workspace === "string") {
8134
- resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
8135
- }
8136
- if (!builtinPools.includes(resolved.pool)) {
8137
- resolved.pool = resolvePath(resolved.pool, resolved.root);
8138
- }
8139
- if (resolved.poolMatchGlobs) {
8140
- logger.warn(
8141
- c.yellow(
8142
- `${c.inverse(
8143
- c.yellow(" Vitest ")
8144
- )} "poolMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`
8145
- )
8146
- );
8147
- }
8148
- resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(
8149
- ([glob, pool]) => {
8150
- if (!builtinPools.includes(pool)) {
8151
- pool = resolvePath(pool, resolved.root);
8152
- }
8153
- return [glob, pool];
8154
- }
8155
- );
8156
- if (mode === "benchmark") {
8157
- resolved.benchmark = {
8158
- ...benchmarkConfigDefaults,
8159
- ...resolved.benchmark
8160
- };
8161
- resolved.coverage.enabled = false;
8162
- resolved.typecheck.enabled = false;
8163
- resolved.include = resolved.benchmark.include;
8164
- resolved.exclude = resolved.benchmark.exclude;
8165
- resolved.includeSource = resolved.benchmark.includeSource;
8166
- const reporters = Array.from(
8167
- /* @__PURE__ */ new Set([
8168
- ...toArray(resolved.benchmark.reporters),
8169
- // @ts-expect-error reporter is CLI flag
8170
- ...toArray(options.reporter)
8171
- ])
8172
- ).filter(Boolean);
8173
- if (reporters.length) {
8174
- resolved.benchmark.reporters = reporters;
8175
- } else {
8176
- resolved.benchmark.reporters = ["default"];
8177
- }
8178
- if (options.outputFile) {
8179
- resolved.benchmark.outputFile = options.outputFile;
8180
- }
8181
- if (options.compare) {
8182
- resolved.benchmark.compare = options.compare;
8183
- }
8184
- if (options.outputJson) {
8185
- resolved.benchmark.outputJson = options.outputJson;
8186
- }
8187
- }
8188
- if (typeof resolved.diff === "string") {
8189
- resolved.diff = resolvePath(resolved.diff, resolved.root);
8190
- resolved.forceRerunTriggers.push(resolved.diff);
8191
- }
8192
- const api = resolveApiServerConfig(options, defaultPort);
8193
- resolved.api = { ...api, token: crypto.randomUUID() };
8194
- if (options.related) {
8195
- resolved.related = toArray(options.related).map(
8196
- (file) => resolve(resolved.root, file)
8197
- );
8198
- }
8199
- if (options.reporters) {
8200
- if (!Array.isArray(options.reporters)) {
8201
- if (typeof options.reporters === "string") {
8202
- resolved.reporters = [[options.reporters, {}]];
8203
- } else {
8204
- resolved.reporters = [options.reporters];
8205
- }
8206
- } else {
8207
- resolved.reporters = [];
8208
- for (const reporter of options.reporters) {
8209
- if (Array.isArray(reporter)) {
8210
- resolved.reporters.push([reporter[0], reporter[1] || {}]);
8211
- } else if (typeof reporter === "string") {
8212
- resolved.reporters.push([reporter, {}]);
8213
- } else {
8214
- resolved.reporters.push(reporter);
8215
- }
8216
- }
8217
- }
8218
- }
8219
- if (mode !== "benchmark") {
8220
- const reportersFromCLI = resolved.reporter;
8221
- const cliReporters = toArray(reportersFromCLI || []).map(
8222
- (reporter) => {
8223
- if (/^\.\.?\//.test(reporter)) {
8224
- return resolve(process.cwd(), reporter);
8225
- }
8226
- return reporter;
8227
- }
8228
- );
8229
- if (cliReporters.length) {
8230
- resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
8231
- }
8232
- }
8233
- if (!resolved.reporters.length) {
8234
- resolved.reporters.push(["default", {}]);
8235
- if (process.env.GITHUB_ACTIONS === "true") {
8236
- resolved.reporters.push(["github-actions", {}]);
8237
- }
8238
- }
8239
- if (resolved.changed) {
8240
- resolved.passWithNoTests ??= true;
8241
- }
8242
- resolved.css ??= {};
8243
- if (typeof resolved.css === "object") {
8244
- resolved.css.modules ??= {};
8245
- resolved.css.modules.classNameStrategy ??= "stable";
8246
- }
8247
- if (resolved.cache !== false) {
8248
- let cacheDir = VitestCache.resolveCacheDir(
8249
- "",
8250
- viteConfig.cacheDir,
8251
- resolved.name
8252
- );
8253
- if (resolved.cache && resolved.cache.dir) {
8254
- logger.console.warn(
8255
- c.yellow(
8256
- `${c.inverse(
8257
- c.yellow(" Vitest ")
8258
- )} "cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir/vitest"`
8259
- )
8260
- );
8261
- cacheDir = VitestCache.resolveCacheDir(
8262
- resolved.root,
8263
- resolved.cache.dir,
8264
- resolved.name
8265
- );
8266
- }
8267
- resolved.cache = { dir: cacheDir };
8268
- }
8269
- resolved.sequence ??= {};
8270
- if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
8271
- const { files, tests } = resolved.sequence.shuffle;
8272
- resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
8273
- resolved.sequence.shuffle = tests;
8274
- }
8275
- if (!resolved.sequence?.sequencer) {
8276
- resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
8277
- }
8278
- resolved.sequence.hooks ??= "stack";
8279
- if (resolved.sequence.sequencer === RandomSequencer) {
8280
- resolved.sequence.seed ??= Date.now();
8281
- }
8282
- resolved.typecheck = {
8283
- ...configDefaults.typecheck,
8284
- ...resolved.typecheck
8285
- };
8286
- if (resolved.environmentMatchGlobs) {
8287
- logger.warn(
8288
- c.yellow(
8289
- `${c.inverse(
8290
- c.yellow(" Vitest ")
8291
- )} "environmentMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`
8292
- )
8293
- );
8294
- }
8295
- resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map(
8296
- (i) => [resolve(resolved.root, i[0]), i[1]]
8297
- );
8298
- resolved.typecheck ??= {};
8299
- resolved.typecheck.enabled ??= false;
8300
- if (resolved.typecheck.enabled) {
8301
- logger.console.warn(
8302
- c.yellow(
8303
- "Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."
8304
- )
8305
- );
8306
- }
8307
- resolved.browser ??= {};
8308
- resolved.browser.enabled ??= false;
8309
- resolved.browser.headless ??= isCI;
8310
- resolved.browser.isolate ??= true;
8311
- resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
8312
- resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
8313
- if (resolved.browser.screenshotDirectory) {
8314
- resolved.browser.screenshotDirectory = resolve(
8315
- resolved.root,
8316
- resolved.browser.screenshotDirectory
8317
- );
8318
- }
8319
- const isPreview = resolved.browser.provider === "preview";
8320
- if (isPreview && resolved.browser.screenshotFailures === true) {
8321
- console.warn(c.yellow(
8322
- [
8323
- `Browser provider "preview" doesn't support screenshots, `,
8324
- `so "browser.screenshotFailures" option is forcefully disabled. `,
8325
- `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
8326
- ].join("")
8327
- ));
8328
- resolved.browser.screenshotFailures = false;
8329
- } else {
8330
- resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
8331
- }
8332
- resolved.browser.viewport ??= {};
8333
- resolved.browser.viewport.width ??= 414;
8334
- resolved.browser.viewport.height ??= 896;
8335
- resolved.browser.locators ??= {};
8336
- resolved.browser.locators.testIdAttribute ??= "data-testid";
8337
- if (resolved.browser.enabled && provider === "stackblitz") {
8338
- resolved.browser.provider = "preview";
8339
- }
8340
- resolved.browser.api = resolveApiServerConfig(
8341
- resolved.browser,
8342
- defaultBrowserPort
8343
- ) || {
8344
- port: defaultBrowserPort
8345
- };
8346
- if (resolved.browser.enabled) {
8347
- if (resolved.browser.ui) {
8348
- resolved.includeTaskLocation ??= true;
8349
- }
8350
- } else if (resolved.ui) {
8351
- resolved.includeTaskLocation ??= true;
8352
- }
8353
- const htmlReporter = toArray(resolved.reporters).some((reporter) => {
8354
- if (Array.isArray(reporter)) {
8355
- return reporter[0] === "html";
8356
- }
8357
- return false;
8358
- });
8359
- if (htmlReporter) {
8360
- resolved.includeTaskLocation ??= true;
8361
- }
8362
- resolved.testTransformMode ??= {};
8363
- resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
8364
- resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
8365
- return resolved;
7654
+ const mode = vitest.mode;
7655
+ const logger = vitest.logger;
7656
+ if (options.dom) {
7657
+ if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") {
7658
+ logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
7659
+ }
7660
+ options.environment = "happy-dom";
7661
+ }
7662
+ const resolved = {
7663
+ ...configDefaults,
7664
+ ...options,
7665
+ root: viteConfig.root,
7666
+ mode
7667
+ };
7668
+ resolved.project = toArray(resolved.project);
7669
+ resolved.provide ??= {};
7670
+ const inspector = resolved.inspect || resolved.inspectBrk;
7671
+ resolved.inspector = {
7672
+ ...resolved.inspector,
7673
+ ...parseInspector(inspector),
7674
+ enabled: !!inspector,
7675
+ waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
7676
+ };
7677
+ if (viteConfig.base !== "/") {
7678
+ resolved.base = viteConfig.base;
7679
+ }
7680
+ resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
7681
+ if (options.shard) {
7682
+ if (resolved.watch) {
7683
+ throw new Error("You cannot use --shard option with enabled watch");
7684
+ }
7685
+ const [indexString, countString] = options.shard.split("/");
7686
+ const index = Math.abs(Number.parseInt(indexString, 10));
7687
+ const count = Math.abs(Number.parseInt(countString, 10));
7688
+ if (Number.isNaN(count) || count <= 0) {
7689
+ throw new Error("--shard <count> must be a positive number");
7690
+ }
7691
+ if (Number.isNaN(index) || index <= 0 || index > count) {
7692
+ throw new Error("--shard <index> must be a positive number less then <count>");
7693
+ }
7694
+ resolved.shard = {
7695
+ index,
7696
+ count
7697
+ };
7698
+ }
7699
+ if (resolved.standalone && !resolved.watch) {
7700
+ throw new Error(`Vitest standalone mode requires --watch`);
7701
+ }
7702
+ if (resolved.mergeReports && resolved.watch) {
7703
+ throw new Error(`Cannot merge reports with --watch enabled`);
7704
+ }
7705
+ if (resolved.maxWorkers) {
7706
+ resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
7707
+ }
7708
+ if (resolved.minWorkers) {
7709
+ resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
7710
+ }
7711
+ resolved.browser ??= {};
7712
+ resolved.fileParallelism ??= mode !== "benchmark";
7713
+ if (!resolved.fileParallelism) {
7714
+ resolved.maxWorkers = 1;
7715
+ resolved.minWorkers = 1;
7716
+ }
7717
+ if (resolved.maxConcurrency === 0) {
7718
+ logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`));
7719
+ resolved.maxConcurrency = configDefaults.maxConcurrency;
7720
+ }
7721
+ if (resolved.inspect || resolved.inspectBrk) {
7722
+ const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread;
7723
+ const isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
7724
+ if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
7725
+ const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
7726
+ throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
7727
+ }
7728
+ }
7729
+ const browser = resolved.browser;
7730
+ if (browser.enabled) {
7731
+ if (!browser.name && !browser.instances) {
7732
+ browser.enabled = false;
7733
+ } else {
7734
+ const instances = browser.instances;
7735
+ if (browser.name && browser.instances) {
7736
+ browser.instances = browser.instances.filter((instance) => instance.browser === browser.name);
7737
+ }
7738
+ if (browser.instances && !browser.instances.length) {
7739
+ throw new Error([`"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`, browser.name && instances?.length ? ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c) => c.browser).join(", ")}). Did you mean to use another name?` : ""].join(""));
7740
+ }
7741
+ }
7742
+ }
7743
+ const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
7744
+ if (browser.enabled && !playwrightChromiumOnly) {
7745
+ const browserConfig = { browser: {
7746
+ provider: browser.provider,
7747
+ name: browser.name,
7748
+ instances: browser.instances?.map((i) => ({ browser: i.browser }))
7749
+ } };
7750
+ if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
7751
+ throw new Error(`@vitest/coverage-v8 does not work with\n${JSON.stringify(browserConfig, null, 2)}\n` + `\nUse either:\n${JSON.stringify({ browser: {
7752
+ provider: "playwright",
7753
+ instances: [{ browser: "chromium" }]
7754
+ } }, null, 2)}` + `\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
7755
+ }
7756
+ if (resolved.inspect || resolved.inspectBrk) {
7757
+ const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
7758
+ throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n` + `\nUse either:\n${JSON.stringify({ browser: {
7759
+ provider: "playwright",
7760
+ instances: [{ browser: "chromium" }]
7761
+ } }, null, 2)}` + `\n\n...or disable ${inspectOption}\n`);
7762
+ }
7763
+ }
7764
+ resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
7765
+ if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
7766
+ const reportsDirectory = resolve(resolved.root, resolved.coverage.reportsDirectory);
7767
+ if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) {
7768
+ throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
7769
+ }
7770
+ }
7771
+ if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) {
7772
+ resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
7773
+ }
7774
+ resolved.expect ??= {};
7775
+ resolved.deps ??= {};
7776
+ resolved.deps.moduleDirectories ??= [];
7777
+ resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
7778
+ if (!dir.startsWith("/")) {
7779
+ dir = `/${dir}`;
7780
+ }
7781
+ if (!dir.endsWith("/")) {
7782
+ dir += "/";
7783
+ }
7784
+ return normalize(dir);
7785
+ });
7786
+ if (!resolved.deps.moduleDirectories.includes("/node_modules/")) {
7787
+ resolved.deps.moduleDirectories.push("/node_modules/");
7788
+ }
7789
+ resolved.deps.optimizer ??= {};
7790
+ resolved.deps.optimizer.ssr ??= {};
7791
+ resolved.deps.optimizer.ssr.enabled ??= true;
7792
+ resolved.deps.optimizer.web ??= {};
7793
+ resolved.deps.optimizer.web.enabled ??= true;
7794
+ resolved.deps.web ??= {};
7795
+ resolved.deps.web.transformAssets ??= true;
7796
+ resolved.deps.web.transformCss ??= true;
7797
+ resolved.deps.web.transformGlobPattern ??= [];
7798
+ resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root));
7799
+ resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root));
7800
+ resolved.coverage.exclude.push(...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`));
7801
+ resolved.coverage.exclude.push(...resolved.include);
7802
+ resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles];
7803
+ resolved.server ??= {};
7804
+ resolved.server.deps ??= {};
7805
+ const deprecatedDepsOptions = [
7806
+ "inline",
7807
+ "external",
7808
+ "fallbackCJS"
7809
+ ];
7810
+ deprecatedDepsOptions.forEach((option) => {
7811
+ if (resolved.deps[option] === undefined) {
7812
+ return;
7813
+ }
7814
+ if (option === "fallbackCJS") {
7815
+ logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
7816
+ } else {
7817
+ const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
7818
+ logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`));
7819
+ }
7820
+ if (resolved.server.deps[option] === undefined) {
7821
+ resolved.server.deps[option] = resolved.deps[option];
7822
+ }
7823
+ });
7824
+ if (resolved.cliExclude) {
7825
+ resolved.exclude.push(...resolved.cliExclude);
7826
+ }
7827
+ if (resolved.server.deps.inline !== true) {
7828
+ const ssrOptions = viteConfig.ssr;
7829
+ if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) {
7830
+ resolved.server.deps.inline = true;
7831
+ } else {
7832
+ resolved.server.deps.inline ??= [];
7833
+ resolved.server.deps.inline.push(...extraInlineDeps);
7834
+ }
7835
+ }
7836
+ resolved.server.deps.inlineFiles ??= [];
7837
+ resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
7838
+ resolved.server.deps.moduleDirectories ??= [];
7839
+ resolved.server.deps.moduleDirectories.push(...resolved.deps.moduleDirectories);
7840
+ if (resolved.runner) {
7841
+ resolved.runner = resolvePath(resolved.runner, resolved.root);
7842
+ }
7843
+ if (resolved.snapshotEnvironment) {
7844
+ resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
7845
+ }
7846
+ resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : undefined;
7847
+ if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
7848
+ resolved.snapshotFormat.plugins = [];
7849
+ }
7850
+ const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
7851
+ resolved.snapshotOptions = {
7852
+ expand: resolved.expandSnapshotDiff ?? false,
7853
+ snapshotFormat: resolved.snapshotFormat || {},
7854
+ updateSnapshot: isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new",
7855
+ resolveSnapshotPath: options.resolveSnapshotPath,
7856
+ snapshotEnvironment: null
7857
+ };
7858
+ resolved.snapshotSerializers ??= [];
7859
+ resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root));
7860
+ resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
7861
+ if (options.resolveSnapshotPath) {
7862
+ delete resolved.resolveSnapshotPath;
7863
+ }
7864
+ resolved.pool ??= "threads";
7865
+ if (process.env.VITEST_MAX_THREADS) {
7866
+ resolved.poolOptions = {
7867
+ ...resolved.poolOptions,
7868
+ threads: {
7869
+ ...resolved.poolOptions?.threads,
7870
+ maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
7871
+ },
7872
+ vmThreads: {
7873
+ ...resolved.poolOptions?.vmThreads,
7874
+ maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
7875
+ }
7876
+ };
7877
+ }
7878
+ if (process.env.VITEST_MIN_THREADS) {
7879
+ resolved.poolOptions = {
7880
+ ...resolved.poolOptions,
7881
+ threads: {
7882
+ ...resolved.poolOptions?.threads,
7883
+ minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
7884
+ },
7885
+ vmThreads: {
7886
+ ...resolved.poolOptions?.vmThreads,
7887
+ minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
7888
+ }
7889
+ };
7890
+ }
7891
+ if (process.env.VITEST_MAX_FORKS) {
7892
+ resolved.poolOptions = {
7893
+ ...resolved.poolOptions,
7894
+ forks: {
7895
+ ...resolved.poolOptions?.forks,
7896
+ maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
7897
+ },
7898
+ vmForks: {
7899
+ ...resolved.poolOptions?.vmForks,
7900
+ maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
7901
+ }
7902
+ };
7903
+ }
7904
+ if (process.env.VITEST_MIN_FORKS) {
7905
+ resolved.poolOptions = {
7906
+ ...resolved.poolOptions,
7907
+ forks: {
7908
+ ...resolved.poolOptions?.forks,
7909
+ minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
7910
+ },
7911
+ vmForks: {
7912
+ ...resolved.poolOptions?.vmForks,
7913
+ minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
7914
+ }
7915
+ };
7916
+ }
7917
+ const poolThreadsOptions = [
7918
+ ["threads", "minThreads"],
7919
+ ["threads", "maxThreads"],
7920
+ ["vmThreads", "minThreads"],
7921
+ ["vmThreads", "maxThreads"]
7922
+ ];
7923
+ for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) {
7924
+ if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) {
7925
+ resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
7926
+ }
7927
+ }
7928
+ const poolForksOptions = [
7929
+ ["forks", "minForks"],
7930
+ ["forks", "maxForks"],
7931
+ ["vmForks", "minForks"],
7932
+ ["vmForks", "maxForks"]
7933
+ ];
7934
+ for (const [poolOptionKey, workerOptionKey] of poolForksOptions) {
7935
+ if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) {
7936
+ resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
7937
+ }
7938
+ }
7939
+ if (typeof resolved.workspace === "string") {
7940
+ resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
7941
+ }
7942
+ if (!builtinPools.includes(resolved.pool)) {
7943
+ resolved.pool = resolvePath(resolved.pool, resolved.root);
7944
+ }
7945
+ if (resolved.poolMatchGlobs) {
7946
+ logger.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "poolMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`));
7947
+ }
7948
+ resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(([glob, pool]) => {
7949
+ if (!builtinPools.includes(pool)) {
7950
+ pool = resolvePath(pool, resolved.root);
7951
+ }
7952
+ return [glob, pool];
7953
+ });
7954
+ if (mode === "benchmark") {
7955
+ resolved.benchmark = {
7956
+ ...benchmarkConfigDefaults,
7957
+ ...resolved.benchmark
7958
+ };
7959
+ resolved.coverage.enabled = false;
7960
+ resolved.typecheck.enabled = false;
7961
+ resolved.include = resolved.benchmark.include;
7962
+ resolved.exclude = resolved.benchmark.exclude;
7963
+ resolved.includeSource = resolved.benchmark.includeSource;
7964
+ const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean);
7965
+ if (reporters.length) {
7966
+ resolved.benchmark.reporters = reporters;
7967
+ } else {
7968
+ resolved.benchmark.reporters = ["default"];
7969
+ }
7970
+ if (options.outputFile) {
7971
+ resolved.benchmark.outputFile = options.outputFile;
7972
+ }
7973
+ if (options.compare) {
7974
+ resolved.benchmark.compare = options.compare;
7975
+ }
7976
+ if (options.outputJson) {
7977
+ resolved.benchmark.outputJson = options.outputJson;
7978
+ }
7979
+ }
7980
+ if (typeof resolved.diff === "string") {
7981
+ resolved.diff = resolvePath(resolved.diff, resolved.root);
7982
+ resolved.forceRerunTriggers.push(resolved.diff);
7983
+ }
7984
+ const api = resolveApiServerConfig(options, defaultPort);
7985
+ resolved.api = {
7986
+ ...api,
7987
+ token: crypto.randomUUID()
7988
+ };
7989
+ if (options.related) {
7990
+ resolved.related = toArray(options.related).map((file) => resolve(resolved.root, file));
7991
+ }
7992
+ if (options.reporters) {
7993
+ if (!Array.isArray(options.reporters)) {
7994
+ if (typeof options.reporters === "string") {
7995
+ resolved.reporters = [[options.reporters, {}]];
7996
+ } else {
7997
+ resolved.reporters = [options.reporters];
7998
+ }
7999
+ } else {
8000
+ resolved.reporters = [];
8001
+ for (const reporter of options.reporters) {
8002
+ if (Array.isArray(reporter)) {
8003
+ resolved.reporters.push([reporter[0], reporter[1] || {}]);
8004
+ } else if (typeof reporter === "string") {
8005
+ resolved.reporters.push([reporter, {}]);
8006
+ } else {
8007
+ resolved.reporters.push(reporter);
8008
+ }
8009
+ }
8010
+ }
8011
+ }
8012
+ if (mode !== "benchmark") {
8013
+ const reportersFromCLI = resolved.reporter;
8014
+ const cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
8015
+ if (/^\.\.?\//.test(reporter)) {
8016
+ return resolve(process.cwd(), reporter);
8017
+ }
8018
+ return reporter;
8019
+ });
8020
+ if (cliReporters.length) {
8021
+ resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
8022
+ }
8023
+ }
8024
+ if (!resolved.reporters.length) {
8025
+ resolved.reporters.push(["default", {}]);
8026
+ if (process.env.GITHUB_ACTIONS === "true") {
8027
+ resolved.reporters.push(["github-actions", {}]);
8028
+ }
8029
+ }
8030
+ if (resolved.changed) {
8031
+ resolved.passWithNoTests ??= true;
8032
+ }
8033
+ resolved.css ??= {};
8034
+ if (typeof resolved.css === "object") {
8035
+ resolved.css.modules ??= {};
8036
+ resolved.css.modules.classNameStrategy ??= "stable";
8037
+ }
8038
+ if (resolved.cache !== false) {
8039
+ let cacheDir = VitestCache.resolveCacheDir("", viteConfig.cacheDir, resolved.name);
8040
+ if (resolved.cache && resolved.cache.dir) {
8041
+ logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`));
8042
+ cacheDir = VitestCache.resolveCacheDir(resolved.root, resolved.cache.dir, resolved.name);
8043
+ }
8044
+ resolved.cache = { dir: cacheDir };
8045
+ }
8046
+ resolved.sequence ??= {};
8047
+ if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
8048
+ const { files, tests } = resolved.sequence.shuffle;
8049
+ resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
8050
+ resolved.sequence.shuffle = tests;
8051
+ }
8052
+ if (!resolved.sequence?.sequencer) {
8053
+ resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
8054
+ }
8055
+ resolved.sequence.hooks ??= "stack";
8056
+ if (resolved.sequence.sequencer === RandomSequencer) {
8057
+ resolved.sequence.seed ??= Date.now();
8058
+ }
8059
+ resolved.typecheck = {
8060
+ ...configDefaults.typecheck,
8061
+ ...resolved.typecheck
8062
+ };
8063
+ if (resolved.environmentMatchGlobs) {
8064
+ logger.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "environmentMatchGlobs" is deprecated. Use "workspace" to define different configurations instead.`));
8065
+ }
8066
+ resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve(resolved.root, i[0]), i[1]]);
8067
+ resolved.typecheck ??= {};
8068
+ resolved.typecheck.enabled ??= false;
8069
+ if (resolved.typecheck.enabled) {
8070
+ logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
8071
+ }
8072
+ resolved.browser ??= {};
8073
+ resolved.browser.enabled ??= false;
8074
+ resolved.browser.headless ??= isCI;
8075
+ resolved.browser.isolate ??= true;
8076
+ resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
8077
+ resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
8078
+ if (resolved.browser.screenshotDirectory) {
8079
+ resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory);
8080
+ }
8081
+ const isPreview = resolved.browser.provider === "preview";
8082
+ if (isPreview && resolved.browser.screenshotFailures === true) {
8083
+ console.warn(c.yellow([
8084
+ `Browser provider "preview" doesn't support screenshots, `,
8085
+ `so "browser.screenshotFailures" option is forcefully disabled. `,
8086
+ `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
8087
+ ].join("")));
8088
+ resolved.browser.screenshotFailures = false;
8089
+ } else {
8090
+ resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
8091
+ }
8092
+ resolved.browser.viewport ??= {};
8093
+ resolved.browser.viewport.width ??= 414;
8094
+ resolved.browser.viewport.height ??= 896;
8095
+ resolved.browser.locators ??= {};
8096
+ resolved.browser.locators.testIdAttribute ??= "data-testid";
8097
+ if (resolved.browser.enabled && provider === "stackblitz") {
8098
+ resolved.browser.provider = "preview";
8099
+ }
8100
+ resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort };
8101
+ if (resolved.browser.enabled) {
8102
+ if (resolved.browser.ui) {
8103
+ resolved.includeTaskLocation ??= true;
8104
+ }
8105
+ } else if (resolved.ui) {
8106
+ resolved.includeTaskLocation ??= true;
8107
+ }
8108
+ const htmlReporter = toArray(resolved.reporters).some((reporter) => {
8109
+ if (Array.isArray(reporter)) {
8110
+ return reporter[0] === "html";
8111
+ }
8112
+ return false;
8113
+ });
8114
+ if (htmlReporter) {
8115
+ resolved.includeTaskLocation ??= true;
8116
+ }
8117
+ resolved.testTransformMode ??= {};
8118
+ resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
8119
+ resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
8120
+ return resolved;
8366
8121
  }
8367
8122
  function isBrowserEnabled(config) {
8368
- return Boolean(config.browser?.enabled);
8123
+ return Boolean(config.browser?.enabled);
8369
8124
  }
8370
8125
  function resolveCoverageReporters(configReporters) {
8371
- if (!Array.isArray(configReporters)) {
8372
- return [[configReporters, {}]];
8373
- }
8374
- const resolvedReporters = [];
8375
- for (const reporter of configReporters) {
8376
- if (Array.isArray(reporter)) {
8377
- resolvedReporters.push([reporter[0], reporter[1] || {}]);
8378
- } else {
8379
- resolvedReporters.push([reporter, {}]);
8380
- }
8381
- }
8382
- return resolvedReporters;
8126
+ if (!Array.isArray(configReporters)) {
8127
+ return [[configReporters, {}]];
8128
+ }
8129
+ const resolvedReporters = [];
8130
+ for (const reporter of configReporters) {
8131
+ if (Array.isArray(reporter)) {
8132
+ resolvedReporters.push([reporter[0], reporter[1] || {}]);
8133
+ } else {
8134
+ resolvedReporters.push([reporter, {}]);
8135
+ }
8136
+ }
8137
+ return resolvedReporters;
8383
8138
  }
8384
8139
  function isPlaywrightChromiumOnly(vitest, config) {
8385
- const browser = config.browser;
8386
- if (!browser || browser.provider !== "playwright" || !browser.enabled) {
8387
- return false;
8388
- }
8389
- if (browser.name) {
8390
- return browser.name === "chromium";
8391
- }
8392
- if (!browser.instances) {
8393
- return false;
8394
- }
8395
- for (const instance of browser.instances) {
8396
- const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser);
8397
- if (!vitest._matchesProjectFilter(name)) {
8398
- continue;
8399
- }
8400
- if (instance.browser !== "chromium") {
8401
- return false;
8402
- }
8403
- }
8404
- return true;
8140
+ const browser = config.browser;
8141
+ if (!browser || browser.provider !== "playwright" || !browser.enabled) {
8142
+ return false;
8143
+ }
8144
+ if (browser.name) {
8145
+ return browser.name === "chromium";
8146
+ }
8147
+ if (!browser.instances) {
8148
+ return false;
8149
+ }
8150
+ for (const instance of browser.instances) {
8151
+ const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser);
8152
+ if (!vitest.matchesProjectFilter(name)) {
8153
+ continue;
8154
+ }
8155
+ if (instance.browser !== "chromium") {
8156
+ return false;
8157
+ }
8158
+ }
8159
+ return true;
8405
8160
  }
8406
8161
 
8407
8162
  const THRESHOLD_KEYS = [
8408
- "lines",
8409
- "functions",
8410
- "statements",
8411
- "branches"
8163
+ "lines",
8164
+ "functions",
8165
+ "statements",
8166
+ "branches"
8412
8167
  ];
8413
8168
  const GLOBAL_THRESHOLDS_KEY = "global";
8414
8169
  const DEFAULT_PROJECT = Symbol.for("default-project");
8415
8170
  let uniqueId = 0;
8416
8171
  async function getCoverageProvider(options, loader) {
8417
- const coverageModule = await resolveCoverageProviderModule(options, loader);
8418
- if (coverageModule) {
8419
- return coverageModule.getProvider();
8420
- }
8421
- return null;
8172
+ const coverageModule = await resolveCoverageProviderModule(options, loader);
8173
+ if (coverageModule) {
8174
+ return coverageModule.getProvider();
8175
+ }
8176
+ return null;
8422
8177
  }
8423
8178
  class BaseCoverageProvider {
8424
- ctx;
8425
- name;
8426
- version;
8427
- options;
8428
- coverageFiles = /* @__PURE__ */ new Map();
8429
- pendingPromises = [];
8430
- coverageFilesDirectory;
8431
- _initialize(ctx) {
8432
- this.ctx = ctx;
8433
- if (ctx.version !== this.version) {
8434
- ctx.logger.warn(
8435
- c.yellow(
8436
- `Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
8437
- Running mixed versions is not supported and may lead into bugs
8438
- Update your dependencies and make sure the versions match.`
8439
- )
8440
- );
8441
- }
8442
- const config = ctx.config.coverage;
8443
- this.options = {
8444
- ...coverageConfigDefaults,
8445
- // User's options
8446
- ...config,
8447
- // Resolved fields
8448
- provider: this.name,
8449
- reportsDirectory: resolve(
8450
- ctx.config.root,
8451
- config.reportsDirectory || coverageConfigDefaults.reportsDirectory
8452
- ),
8453
- reporter: resolveCoverageReporters(
8454
- config.reporter || coverageConfigDefaults.reporter
8455
- ),
8456
- thresholds: config.thresholds && {
8457
- ...config.thresholds,
8458
- lines: config.thresholds["100"] ? 100 : config.thresholds.lines,
8459
- branches: config.thresholds["100"] ? 100 : config.thresholds.branches,
8460
- functions: config.thresholds["100"] ? 100 : config.thresholds.functions,
8461
- statements: config.thresholds["100"] ? 100 : config.thresholds.statements
8462
- }
8463
- };
8464
- const shard = this.ctx.config.shard;
8465
- const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
8466
- this.coverageFilesDirectory = resolve(
8467
- this.options.reportsDirectory,
8468
- tempDirectory
8469
- );
8470
- }
8471
- createCoverageMap() {
8472
- throw new Error("BaseReporter's createCoverageMap was not overwritten");
8473
- }
8474
- async generateReports(_, __) {
8475
- throw new Error("BaseReporter's generateReports was not overwritten");
8476
- }
8477
- async parseConfigModule(_) {
8478
- throw new Error("BaseReporter's parseConfigModule was not overwritten");
8479
- }
8480
- resolveOptions() {
8481
- return this.options;
8482
- }
8483
- async clean(clean = true) {
8484
- if (clean && existsSync(this.options.reportsDirectory)) {
8485
- await promises$1.rm(this.options.reportsDirectory, {
8486
- recursive: true,
8487
- force: true,
8488
- maxRetries: 10
8489
- });
8490
- }
8491
- if (existsSync(this.coverageFilesDirectory)) {
8492
- await promises$1.rm(this.coverageFilesDirectory, {
8493
- recursive: true,
8494
- force: true,
8495
- maxRetries: 10
8496
- });
8497
- }
8498
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
8499
- this.coverageFiles = /* @__PURE__ */ new Map();
8500
- this.pendingPromises = [];
8501
- }
8502
- onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
8503
- if (!coverage) {
8504
- return;
8505
- }
8506
- if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") {
8507
- throw new Error(`Invalid transform mode: ${transformMode}`);
8508
- }
8509
- let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
8510
- if (!entry) {
8511
- entry = { web: {}, ssr: {}, browser: {} };
8512
- this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
8513
- }
8514
- const testFilenames = testFiles.join();
8515
- const filename = resolve(
8516
- this.coverageFilesDirectory,
8517
- `coverage-${uniqueId++}.json`
8518
- );
8519
- entry[transformMode][testFilenames] = filename;
8520
- const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
8521
- this.pendingPromises.push(promise);
8522
- }
8523
- async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
8524
- let index = 0;
8525
- const total = this.pendingPromises.length;
8526
- await Promise.all(this.pendingPromises);
8527
- this.pendingPromises = [];
8528
- for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) {
8529
- for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
8530
- const filenames = Object.values(coverageByTestfiles);
8531
- const project = this.ctx.getProjectByName(projectName);
8532
- for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
8533
- if (onDebug.enabled) {
8534
- index += chunk.length;
8535
- onDebug("Covered files %d/%d", index, total);
8536
- }
8537
- await Promise.all(
8538
- chunk.map(async (filename) => {
8539
- const contents = await promises$1.readFile(filename, "utf-8");
8540
- const coverage = JSON.parse(contents);
8541
- onFileRead(coverage);
8542
- })
8543
- );
8544
- }
8545
- await onFinished(project, transformMode);
8546
- }
8547
- }
8548
- }
8549
- async cleanAfterRun() {
8550
- this.coverageFiles = /* @__PURE__ */ new Map();
8551
- await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
8552
- if (readdirSync(this.options.reportsDirectory).length === 0) {
8553
- await promises$1.rm(this.options.reportsDirectory, { recursive: true });
8554
- }
8555
- }
8556
- async onTestFailure() {
8557
- if (!this.options.reportOnFailure) {
8558
- await this.cleanAfterRun();
8559
- }
8560
- }
8561
- async reportCoverage(coverageMap, { allTestsRun }) {
8562
- await this.generateReports(
8563
- coverageMap || this.createCoverageMap(),
8564
- allTestsRun
8565
- );
8566
- const keepResults = !this.options.cleanOnRerun && this.ctx.config.watch;
8567
- if (!keepResults) {
8568
- await this.cleanAfterRun();
8569
- }
8570
- }
8571
- async reportThresholds(coverageMap, allTestsRun) {
8572
- const resolvedThresholds = this.resolveThresholds(coverageMap);
8573
- this.checkThresholds(resolvedThresholds);
8574
- if (this.options.thresholds?.autoUpdate && allTestsRun) {
8575
- if (!this.ctx.server.config.configFile) {
8576
- throw new Error(
8577
- 'Missing configurationFile. The "coverage.thresholds.autoUpdate" can only be enabled when configuration file is used.'
8578
- );
8579
- }
8580
- const configFilePath = this.ctx.server.config.configFile;
8581
- const configModule = await this.parseConfigModule(configFilePath);
8582
- await this.updateThresholds({
8583
- thresholds: resolvedThresholds,
8584
- configurationFile: configModule,
8585
- onUpdate: () => writeFileSync(
8586
- configFilePath,
8587
- configModule.generate().code,
8588
- "utf-8"
8589
- )
8590
- });
8591
- }
8592
- }
8593
- /**
8594
- * Constructs collected coverage and users' threshold options into separate sets
8595
- * where each threshold set holds their own coverage maps. Threshold set is either
8596
- * for specific files defined by glob pattern or global for all other files.
8597
- */
8598
- resolveThresholds(coverageMap) {
8599
- const resolvedThresholds = [];
8600
- const files = coverageMap.files();
8601
- const globalCoverageMap = this.createCoverageMap();
8602
- for (const key of Object.keys(this.options.thresholds)) {
8603
- if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) {
8604
- continue;
8605
- }
8606
- const glob = key;
8607
- const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
8608
- const globCoverageMap = this.createCoverageMap();
8609
- const matchingFiles = files.filter(
8610
- (file) => mm.isMatch(relative(this.ctx.config.root, file), glob)
8611
- );
8612
- for (const file of matchingFiles) {
8613
- const fileCoverage = coverageMap.fileCoverageFor(file);
8614
- globCoverageMap.addFileCoverage(fileCoverage);
8615
- }
8616
- resolvedThresholds.push({
8617
- name: glob,
8618
- coverageMap: globCoverageMap,
8619
- thresholds: globThresholds
8620
- });
8621
- }
8622
- for (const file of files) {
8623
- const fileCoverage = coverageMap.fileCoverageFor(file);
8624
- globalCoverageMap.addFileCoverage(fileCoverage);
8625
- }
8626
- resolvedThresholds.unshift({
8627
- name: GLOBAL_THRESHOLDS_KEY,
8628
- coverageMap: globalCoverageMap,
8629
- thresholds: {
8630
- branches: this.options.thresholds?.branches,
8631
- functions: this.options.thresholds?.functions,
8632
- lines: this.options.thresholds?.lines,
8633
- statements: this.options.thresholds?.statements
8634
- }
8635
- });
8636
- return resolvedThresholds;
8637
- }
8638
- /**
8639
- * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached.
8640
- */
8641
- checkThresholds(allThresholds) {
8642
- for (const { coverageMap, thresholds, name } of allThresholds) {
8643
- if (thresholds.branches === void 0 && thresholds.functions === void 0 && thresholds.lines === void 0 && thresholds.statements === void 0) {
8644
- continue;
8645
- }
8646
- const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => ({
8647
- file,
8648
- summary: coverageMap.fileCoverageFor(file).toSummary()
8649
- })) : [{ file: null, summary: coverageMap.getCoverageSummary() }];
8650
- for (const { summary, file } of summaries) {
8651
- for (const thresholdKey of THRESHOLD_KEYS) {
8652
- const threshold = thresholds[thresholdKey];
8653
- if (threshold === void 0) {
8654
- continue;
8655
- }
8656
- if (threshold >= 0) {
8657
- const coverage = summary.data[thresholdKey].pct;
8658
- if (coverage < threshold) {
8659
- process.exitCode = 1;
8660
- let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`;
8661
- if (this.options.thresholds?.perFile && file) {
8662
- errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
8663
- }
8664
- this.ctx.logger.error(errorMessage);
8665
- }
8666
- } else {
8667
- const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
8668
- const absoluteThreshold = threshold * -1;
8669
- if (uncovered > absoluteThreshold) {
8670
- process.exitCode = 1;
8671
- let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`;
8672
- if (this.options.thresholds?.perFile && file) {
8673
- errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
8674
- }
8675
- this.ctx.logger.error(errorMessage);
8676
- }
8677
- }
8678
- }
8679
- }
8680
- }
8681
- }
8682
- /**
8683
- * Check if current coverage is above configured thresholds and bump the thresholds if needed
8684
- */
8685
- async updateThresholds({ thresholds: allThresholds, onUpdate, configurationFile }) {
8686
- let updatedThresholds = false;
8687
- const config = resolveConfig(configurationFile);
8688
- assertConfigurationModule(config);
8689
- for (const { coverageMap, thresholds, name } of allThresholds) {
8690
- const summaries = this.options.thresholds?.perFile ? coverageMap.files().map(
8691
- (file) => coverageMap.fileCoverageFor(file).toSummary()
8692
- ) : [coverageMap.getCoverageSummary()];
8693
- const thresholdsToUpdate = [];
8694
- for (const key of THRESHOLD_KEYS) {
8695
- const threshold = thresholds[key] ?? 100;
8696
- if (threshold >= 0) {
8697
- const actual = Math.min(
8698
- ...summaries.map((summary) => summary[key].pct)
8699
- );
8700
- if (actual > threshold) {
8701
- thresholdsToUpdate.push([key, actual]);
8702
- }
8703
- } else {
8704
- const absoluteThreshold = threshold * -1;
8705
- const actual = Math.max(
8706
- ...summaries.map((summary) => summary[key].total - summary[key].covered)
8707
- );
8708
- if (actual < absoluteThreshold) {
8709
- const updatedThreshold = actual === 0 ? 100 : actual * -1;
8710
- thresholdsToUpdate.push([key, updatedThreshold]);
8711
- }
8712
- }
8713
- }
8714
- if (thresholdsToUpdate.length === 0) {
8715
- continue;
8716
- }
8717
- updatedThresholds = true;
8718
- for (const [threshold, newValue] of thresholdsToUpdate) {
8719
- if (name === GLOBAL_THRESHOLDS_KEY) {
8720
- config.test.coverage.thresholds[threshold] = newValue;
8721
- } else {
8722
- const glob = config.test.coverage.thresholds[name];
8723
- glob[threshold] = newValue;
8724
- }
8725
- }
8726
- }
8727
- if (updatedThresholds) {
8728
- this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds.");
8729
- onUpdate();
8730
- }
8731
- }
8732
- async mergeReports(coverageMaps) {
8733
- const coverageMap = this.createCoverageMap();
8734
- for (const coverage of coverageMaps) {
8735
- coverageMap.merge(coverage);
8736
- }
8737
- await this.generateReports(coverageMap, true);
8738
- }
8739
- hasTerminalReporter(reporters) {
8740
- return reporters.some(
8741
- ([reporter]) => reporter === "text" || reporter === "text-summary" || reporter === "text-lcov" || reporter === "teamcity"
8742
- );
8743
- }
8744
- toSlices(array, size) {
8745
- return array.reduce((chunks, item) => {
8746
- const index = Math.max(0, chunks.length - 1);
8747
- const lastChunk = chunks[index] || [];
8748
- chunks[index] = lastChunk;
8749
- if (lastChunk.length >= size) {
8750
- chunks.push([item]);
8751
- } else {
8752
- lastChunk.push(item);
8753
- }
8754
- return chunks;
8755
- }, []);
8756
- }
8757
- createUncoveredFileTransformer(ctx) {
8758
- const servers = [
8759
- ...ctx.projects.map((project) => ({
8760
- root: project.config.root,
8761
- isBrowserEnabled: project.isBrowserEnabled(),
8762
- vitenode: project.vitenode
8763
- })),
8764
- // Check core last as it will match all files anyway
8765
- { root: ctx.config.root, vitenode: ctx.vitenode, isBrowserEnabled: ctx.getRootProject().isBrowserEnabled() }
8766
- ];
8767
- return async function transformFile(filename) {
8768
- let lastError;
8769
- for (const { root, vitenode, isBrowserEnabled } of servers) {
8770
- if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) {
8771
- continue;
8772
- }
8773
- if (isBrowserEnabled) {
8774
- const result = await vitenode.transformRequest(filename, void 0, "web").catch(() => null);
8775
- if (result) {
8776
- return result;
8777
- }
8778
- }
8779
- try {
8780
- return await vitenode.transformRequest(filename);
8781
- } catch (error) {
8782
- lastError = error;
8783
- }
8784
- }
8785
- throw lastError;
8786
- };
8787
- }
8179
+ ctx;
8180
+ name;
8181
+ version;
8182
+ options;
8183
+ coverageFiles = new Map();
8184
+ pendingPromises = [];
8185
+ coverageFilesDirectory;
8186
+ _initialize(ctx) {
8187
+ this.ctx = ctx;
8188
+ if (ctx.version !== this.version) {
8189
+ ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.` + "\nRunning mixed versions is not supported and may lead into bugs" + "\nUpdate your dependencies and make sure the versions match."));
8190
+ }
8191
+ const config = ctx.config.coverage;
8192
+ this.options = {
8193
+ ...coverageConfigDefaults,
8194
+ ...config,
8195
+ provider: this.name,
8196
+ reportsDirectory: resolve(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory),
8197
+ reporter: resolveCoverageReporters(config.reporter || coverageConfigDefaults.reporter),
8198
+ thresholds: config.thresholds && {
8199
+ ...config.thresholds,
8200
+ lines: config.thresholds["100"] ? 100 : config.thresholds.lines,
8201
+ branches: config.thresholds["100"] ? 100 : config.thresholds.branches,
8202
+ functions: config.thresholds["100"] ? 100 : config.thresholds.functions,
8203
+ statements: config.thresholds["100"] ? 100 : config.thresholds.statements
8204
+ }
8205
+ };
8206
+ const shard = this.ctx.config.shard;
8207
+ const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
8208
+ this.coverageFilesDirectory = resolve(this.options.reportsDirectory, tempDirectory);
8209
+ }
8210
+ createCoverageMap() {
8211
+ throw new Error("BaseReporter's createCoverageMap was not overwritten");
8212
+ }
8213
+ async generateReports(_, __) {
8214
+ throw new Error("BaseReporter's generateReports was not overwritten");
8215
+ }
8216
+ async parseConfigModule(_) {
8217
+ throw new Error("BaseReporter's parseConfigModule was not overwritten");
8218
+ }
8219
+ resolveOptions() {
8220
+ return this.options;
8221
+ }
8222
+ async clean(clean = true) {
8223
+ if (clean && existsSync(this.options.reportsDirectory)) {
8224
+ await promises$1.rm(this.options.reportsDirectory, {
8225
+ recursive: true,
8226
+ force: true,
8227
+ maxRetries: 10
8228
+ });
8229
+ }
8230
+ if (existsSync(this.coverageFilesDirectory)) {
8231
+ await promises$1.rm(this.coverageFilesDirectory, {
8232
+ recursive: true,
8233
+ force: true,
8234
+ maxRetries: 10
8235
+ });
8236
+ }
8237
+ await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
8238
+ this.coverageFiles = new Map();
8239
+ this.pendingPromises = [];
8240
+ }
8241
+ onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
8242
+ if (!coverage) {
8243
+ return;
8244
+ }
8245
+ if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") {
8246
+ throw new Error(`Invalid transform mode: ${transformMode}`);
8247
+ }
8248
+ let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
8249
+ if (!entry) {
8250
+ entry = {
8251
+ web: {},
8252
+ ssr: {},
8253
+ browser: {}
8254
+ };
8255
+ this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
8256
+ }
8257
+ const testFilenames = testFiles.join();
8258
+ const filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
8259
+ entry[transformMode][testFilenames] = filename;
8260
+ const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
8261
+ this.pendingPromises.push(promise);
8262
+ }
8263
+ async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
8264
+ let index = 0;
8265
+ const total = this.pendingPromises.length;
8266
+ await Promise.all(this.pendingPromises);
8267
+ this.pendingPromises = [];
8268
+ for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) {
8269
+ for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
8270
+ const filenames = Object.values(coverageByTestfiles);
8271
+ const project = this.ctx.getProjectByName(projectName);
8272
+ for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
8273
+ if (onDebug.enabled) {
8274
+ index += chunk.length;
8275
+ onDebug("Covered files %d/%d", index, total);
8276
+ }
8277
+ await Promise.all(chunk.map(async (filename) => {
8278
+ const contents = await promises$1.readFile(filename, "utf-8");
8279
+ const coverage = JSON.parse(contents);
8280
+ onFileRead(coverage);
8281
+ }));
8282
+ }
8283
+ await onFinished(project, transformMode);
8284
+ }
8285
+ }
8286
+ }
8287
+ async cleanAfterRun() {
8288
+ this.coverageFiles = new Map();
8289
+ await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
8290
+ if (readdirSync(this.options.reportsDirectory).length === 0) {
8291
+ await promises$1.rm(this.options.reportsDirectory, { recursive: true });
8292
+ }
8293
+ }
8294
+ async onTestFailure() {
8295
+ if (!this.options.reportOnFailure) {
8296
+ await this.cleanAfterRun();
8297
+ }
8298
+ }
8299
+ async reportCoverage(coverageMap, { allTestsRun }) {
8300
+ await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun);
8301
+ const keepResults = !this.options.cleanOnRerun && this.ctx.config.watch;
8302
+ if (!keepResults) {
8303
+ await this.cleanAfterRun();
8304
+ }
8305
+ }
8306
+ async reportThresholds(coverageMap, allTestsRun) {
8307
+ const resolvedThresholds = this.resolveThresholds(coverageMap);
8308
+ this.checkThresholds(resolvedThresholds);
8309
+ if (this.options.thresholds?.autoUpdate && allTestsRun) {
8310
+ if (!this.ctx.server.config.configFile) {
8311
+ throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
8312
+ }
8313
+ const configFilePath = this.ctx.server.config.configFile;
8314
+ const configModule = await this.parseConfigModule(configFilePath);
8315
+ await this.updateThresholds({
8316
+ thresholds: resolvedThresholds,
8317
+ configurationFile: configModule,
8318
+ onUpdate: () => writeFileSync(configFilePath, configModule.generate().code, "utf-8")
8319
+ });
8320
+ }
8321
+ }
8322
+ /**
8323
+ * Constructs collected coverage and users' threshold options into separate sets
8324
+ * where each threshold set holds their own coverage maps. Threshold set is either
8325
+ * for specific files defined by glob pattern or global for all other files.
8326
+ */
8327
+ resolveThresholds(coverageMap) {
8328
+ const resolvedThresholds = [];
8329
+ const files = coverageMap.files();
8330
+ const globalCoverageMap = this.createCoverageMap();
8331
+ for (const key of Object.keys(this.options.thresholds)) {
8332
+ if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) {
8333
+ continue;
8334
+ }
8335
+ const glob = key;
8336
+ const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
8337
+ const globCoverageMap = this.createCoverageMap();
8338
+ const matchingFiles = files.filter((file) => mm.isMatch(relative(this.ctx.config.root, file), glob));
8339
+ for (const file of matchingFiles) {
8340
+ const fileCoverage = coverageMap.fileCoverageFor(file);
8341
+ globCoverageMap.addFileCoverage(fileCoverage);
8342
+ }
8343
+ resolvedThresholds.push({
8344
+ name: glob,
8345
+ coverageMap: globCoverageMap,
8346
+ thresholds: globThresholds
8347
+ });
8348
+ }
8349
+ for (const file of files) {
8350
+ const fileCoverage = coverageMap.fileCoverageFor(file);
8351
+ globalCoverageMap.addFileCoverage(fileCoverage);
8352
+ }
8353
+ resolvedThresholds.unshift({
8354
+ name: GLOBAL_THRESHOLDS_KEY,
8355
+ coverageMap: globalCoverageMap,
8356
+ thresholds: {
8357
+ branches: this.options.thresholds?.branches,
8358
+ functions: this.options.thresholds?.functions,
8359
+ lines: this.options.thresholds?.lines,
8360
+ statements: this.options.thresholds?.statements
8361
+ }
8362
+ });
8363
+ return resolvedThresholds;
8364
+ }
8365
+ /**
8366
+ * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached.
8367
+ */
8368
+ checkThresholds(allThresholds) {
8369
+ for (const { coverageMap, thresholds, name } of allThresholds) {
8370
+ if (thresholds.branches === undefined && thresholds.functions === undefined && thresholds.lines === undefined && thresholds.statements === undefined) {
8371
+ continue;
8372
+ }
8373
+ const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => ({
8374
+ file,
8375
+ summary: coverageMap.fileCoverageFor(file).toSummary()
8376
+ })) : [{
8377
+ file: null,
8378
+ summary: coverageMap.getCoverageSummary()
8379
+ }];
8380
+ for (const { summary, file } of summaries) {
8381
+ for (const thresholdKey of THRESHOLD_KEYS) {
8382
+ const threshold = thresholds[thresholdKey];
8383
+ if (threshold === undefined) {
8384
+ continue;
8385
+ }
8386
+ /**
8387
+ * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered),
8388
+ * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered).
8389
+ */
8390
+ if (threshold >= 0) {
8391
+ const coverage = summary.data[thresholdKey].pct;
8392
+ if (coverage < threshold) {
8393
+ process.exitCode = 1;
8394
+ /**
8395
+ * Generate error message based on perFile flag:
8396
+ * - ERROR: Coverage for statements (33.33%) does not meet threshold (85%) for src/math.ts
8397
+ * - ERROR: Coverage for statements (50%) does not meet global threshold (85%)
8398
+ */
8399
+ let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`;
8400
+ if (this.options.thresholds?.perFile && file) {
8401
+ errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
8402
+ }
8403
+ this.ctx.logger.error(errorMessage);
8404
+ }
8405
+ } else {
8406
+ const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
8407
+ const absoluteThreshold = threshold * -1;
8408
+ if (uncovered > absoluteThreshold) {
8409
+ process.exitCode = 1;
8410
+ /**
8411
+ * Generate error message based on perFile flag:
8412
+ * - ERROR: Uncovered statements (33) exceed threshold (30) for src/math.ts
8413
+ * - ERROR: Uncovered statements (33) exceed global threshold (30)
8414
+ */
8415
+ let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`;
8416
+ if (this.options.thresholds?.perFile && file) {
8417
+ errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`;
8418
+ }
8419
+ this.ctx.logger.error(errorMessage);
8420
+ }
8421
+ }
8422
+ }
8423
+ }
8424
+ }
8425
+ }
8426
+ /**
8427
+ * Check if current coverage is above configured thresholds and bump the thresholds if needed
8428
+ */
8429
+ async updateThresholds({ thresholds: allThresholds, onUpdate, configurationFile }) {
8430
+ let updatedThresholds = false;
8431
+ const config = resolveConfig(configurationFile);
8432
+ assertConfigurationModule(config);
8433
+ for (const { coverageMap, thresholds, name } of allThresholds) {
8434
+ const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()];
8435
+ const thresholdsToUpdate = [];
8436
+ for (const key of THRESHOLD_KEYS) {
8437
+ const threshold = thresholds[key] ?? 100;
8438
+ /**
8439
+ * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered),
8440
+ * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered).
8441
+ */
8442
+ if (threshold >= 0) {
8443
+ const actual = Math.min(...summaries.map((summary) => summary[key].pct));
8444
+ if (actual > threshold) {
8445
+ thresholdsToUpdate.push([key, actual]);
8446
+ }
8447
+ } else {
8448
+ const absoluteThreshold = threshold * -1;
8449
+ const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
8450
+ if (actual < absoluteThreshold) {
8451
+ const updatedThreshold = actual === 0 ? 100 : actual * -1;
8452
+ thresholdsToUpdate.push([key, updatedThreshold]);
8453
+ }
8454
+ }
8455
+ }
8456
+ if (thresholdsToUpdate.length === 0) {
8457
+ continue;
8458
+ }
8459
+ updatedThresholds = true;
8460
+ for (const [threshold, newValue] of thresholdsToUpdate) {
8461
+ if (name === GLOBAL_THRESHOLDS_KEY) {
8462
+ config.test.coverage.thresholds[threshold] = newValue;
8463
+ } else {
8464
+ const glob = config.test.coverage.thresholds[name];
8465
+ glob[threshold] = newValue;
8466
+ }
8467
+ }
8468
+ }
8469
+ if (updatedThresholds) {
8470
+ this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds.");
8471
+ onUpdate();
8472
+ }
8473
+ }
8474
+ async mergeReports(coverageMaps) {
8475
+ const coverageMap = this.createCoverageMap();
8476
+ for (const coverage of coverageMaps) {
8477
+ coverageMap.merge(coverage);
8478
+ }
8479
+ await this.generateReports(coverageMap, true);
8480
+ }
8481
+ hasTerminalReporter(reporters) {
8482
+ return reporters.some(([reporter]) => reporter === "text" || reporter === "text-summary" || reporter === "text-lcov" || reporter === "teamcity");
8483
+ }
8484
+ toSlices(array, size) {
8485
+ return array.reduce((chunks, item) => {
8486
+ const index = Math.max(0, chunks.length - 1);
8487
+ const lastChunk = chunks[index] || [];
8488
+ chunks[index] = lastChunk;
8489
+ if (lastChunk.length >= size) {
8490
+ chunks.push([item]);
8491
+ } else {
8492
+ lastChunk.push(item);
8493
+ }
8494
+ return chunks;
8495
+ }, []);
8496
+ }
8497
+ createUncoveredFileTransformer(ctx) {
8498
+ const servers = [...ctx.projects.map((project) => ({
8499
+ root: project.config.root,
8500
+ isBrowserEnabled: project.isBrowserEnabled(),
8501
+ vitenode: project.vitenode
8502
+ })), {
8503
+ root: ctx.config.root,
8504
+ vitenode: ctx.vitenode,
8505
+ isBrowserEnabled: ctx.getRootProject().isBrowserEnabled()
8506
+ }];
8507
+ return async function transformFile(filename) {
8508
+ let lastError;
8509
+ for (const { root, vitenode, isBrowserEnabled } of servers) {
8510
+ if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) {
8511
+ continue;
8512
+ }
8513
+ if (isBrowserEnabled) {
8514
+ const result = await vitenode.transformRequest(filename, undefined, "web").catch(() => null);
8515
+ if (result) {
8516
+ return result;
8517
+ }
8518
+ }
8519
+ try {
8520
+ return await vitenode.transformRequest(filename);
8521
+ } catch (error) {
8522
+ lastError = error;
8523
+ }
8524
+ }
8525
+ throw lastError;
8526
+ };
8527
+ }
8788
8528
  }
8529
+ /**
8530
+ * Narrow down `unknown` glob thresholds to resolved ones
8531
+ */
8789
8532
  function resolveGlobThresholds(thresholds) {
8790
- if (!thresholds || typeof thresholds !== "object") {
8791
- return {};
8792
- }
8793
- if (100 in thresholds && thresholds[100] === true) {
8794
- return {
8795
- lines: 100,
8796
- branches: 100,
8797
- functions: 100,
8798
- statements: 100
8799
- };
8800
- }
8801
- return {
8802
- lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0,
8803
- branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0,
8804
- functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0,
8805
- statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements : void 0
8806
- };
8533
+ if (!thresholds || typeof thresholds !== "object") {
8534
+ return {};
8535
+ }
8536
+ if (100 in thresholds && thresholds[100] === true) {
8537
+ return {
8538
+ lines: 100,
8539
+ branches: 100,
8540
+ functions: 100,
8541
+ statements: 100
8542
+ };
8543
+ }
8544
+ return {
8545
+ lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : undefined,
8546
+ branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : undefined,
8547
+ functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : undefined,
8548
+ statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements : undefined
8549
+ };
8807
8550
  }
8808
8551
  function assertConfigurationModule(config) {
8809
- try {
8810
- if (typeof config.test.coverage.thresholds !== "object") {
8811
- throw new TypeError(
8812
- "Expected config.test.coverage.thresholds to be an object"
8813
- );
8814
- }
8815
- } catch (error) {
8816
- const message = error instanceof Error ? error.message : String(error);
8817
- throw new Error(
8818
- `Unable to parse thresholds from configuration file: ${message}`
8819
- );
8820
- }
8552
+ try {
8553
+ if (typeof config.test.coverage.thresholds !== "object") {
8554
+ throw new TypeError("Expected config.test.coverage.thresholds to be an object");
8555
+ }
8556
+ } catch (error) {
8557
+ const message = error instanceof Error ? error.message : String(error);
8558
+ throw new Error(`Unable to parse thresholds from configuration file: ${message}`);
8559
+ }
8821
8560
  }
8822
8561
  function resolveConfig(configModule) {
8823
- const mod = configModule.exports.default;
8824
- try {
8825
- if (mod.$type === "object") {
8826
- return mod;
8827
- }
8828
- let config = resolveDefineConfig(mod);
8829
- if (config) {
8830
- return config;
8831
- }
8832
- if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
8833
- config = resolveMergeConfig(mod);
8834
- if (config) {
8835
- return config;
8836
- }
8837
- }
8838
- } catch (error) {
8839
- throw new Error(error instanceof Error ? error.message : String(error));
8840
- }
8841
- throw new Error(
8842
- "Failed to update coverage thresholds. Configuration file is too complex."
8843
- );
8562
+ const mod = configModule.exports.default;
8563
+ try {
8564
+ if (mod.$type === "object") {
8565
+ return mod;
8566
+ }
8567
+ let config = resolveDefineConfig(mod);
8568
+ if (config) {
8569
+ return config;
8570
+ }
8571
+ if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
8572
+ config = resolveMergeConfig(mod);
8573
+ if (config) {
8574
+ return config;
8575
+ }
8576
+ }
8577
+ } catch (error) {
8578
+ throw new Error(error instanceof Error ? error.message : String(error));
8579
+ }
8580
+ throw new Error("Failed to update coverage thresholds. Configuration file is too complex.");
8844
8581
  }
8845
8582
  function resolveDefineConfig(mod) {
8846
- if (mod.$type === "function-call" && mod.$callee === "defineConfig") {
8847
- if (mod.$args[0].$type === "object") {
8848
- return mod.$args[0];
8849
- }
8850
- if (mod.$args[0].$type === "arrow-function-expression") {
8851
- if (mod.$args[0].$body.$type === "object") {
8852
- return mod.$args[0].$body;
8853
- }
8854
- const config = resolveMergeConfig(mod.$args[0].$body);
8855
- if (config) {
8856
- return config;
8857
- }
8858
- }
8859
- }
8583
+ if (mod.$type === "function-call" && mod.$callee === "defineConfig") {
8584
+ if (mod.$args[0].$type === "object") {
8585
+ return mod.$args[0];
8586
+ }
8587
+ if (mod.$args[0].$type === "arrow-function-expression") {
8588
+ if (mod.$args[0].$body.$type === "object") {
8589
+ return mod.$args[0].$body;
8590
+ }
8591
+ const config = resolveMergeConfig(mod.$args[0].$body);
8592
+ if (config) {
8593
+ return config;
8594
+ }
8595
+ }
8596
+ }
8860
8597
  }
8861
8598
  function resolveMergeConfig(mod) {
8862
- if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
8863
- for (const arg of mod.$args) {
8864
- const config = resolveDefineConfig(arg);
8865
- if (config) {
8866
- return config;
8867
- }
8868
- }
8869
- }
8599
+ if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
8600
+ for (const arg of mod.$args) {
8601
+ const config = resolveDefineConfig(arg);
8602
+ if (config) {
8603
+ return config;
8604
+ }
8605
+ }
8606
+ }
8870
8607
  }
8871
8608
 
8872
8609
  export { BaseCoverageProvider as B, RandomSequencer as R, VitestCache as V, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, isBrowserEnabled as d, groupBy as e, getCoverageProvider as f, getFilePoolName as g, hash as h, isPackageExists as i, createPool as j, mm as m, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };