vitest 4.0.0-beta.1 → 4.0.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/LICENSE.md +83 -2
  2. package/dist/browser.d.ts +19 -16
  3. package/dist/browser.js +11 -7
  4. package/dist/chunks/{benchmark.CYdenmiT.js → benchmark.LXhJ0F0X.js} +7 -9
  5. package/dist/chunks/{benchmark.d.BwvBVTda.d.ts → benchmark.d.DAaHLpsq.d.ts} +4 -4
  6. package/dist/chunks/{browser.d.q8Z0P0q1.d.ts → browser.d.Dx7DO_Ce.d.ts} +5 -5
  7. package/dist/chunks/{cac.D3EzDDZd.js → cac.elvK37c9.js} +71 -153
  8. package/dist/chunks/{cli-api.Dn5gKePv.js → cli-api.C7plPyhs.js} +1376 -1693
  9. package/dist/chunks/{config.d.HJdfX-8k.d.ts → config.d.B_LthbQq.d.ts} +58 -63
  10. package/dist/chunks/{console.CtFJOzRO.js → console.CiTi59Jy.js} +35 -71
  11. package/dist/chunks/{constants.DnKduX2e.js → constants.D_Q9UYh-.js} +1 -9
  12. package/dist/chunks/{coverage.Cwa-XhJt.js → coverage.CG6Uhorw.js} +522 -792
  13. package/dist/chunks/{coverage.DVF1vEu8.js → coverage.D_JHT54q.js} +2 -2
  14. package/dist/chunks/{coverage.d.S9RMNXIe.d.ts → coverage.d.BZtK59WP.d.ts} +10 -8
  15. package/dist/chunks/{creator.GK6I-cL4.js → creator.08Gi-vCA.js} +93 -77
  16. package/dist/chunks/{date.Bq6ZW5rf.js → date.-jtEtIeV.js} +6 -17
  17. package/dist/chunks/{environment.d.CUq4cUgQ.d.ts → environment.d.BsToaxti.d.ts} +27 -6
  18. package/dist/chunks/{git.BVQ8w_Sw.js → git.BFNcloKD.js} +1 -2
  19. package/dist/chunks/{global.d.CVbXEflG.d.ts → global.d.BK3X7FW1.d.ts} +2 -5
  20. package/dist/chunks/{globals.Cxal6MLI.js → globals.BjvYA-AD.js} +11 -9
  21. package/dist/chunks/{index.BWf_gE5n.js → index.AZOjjqWP.js} +7 -6
  22. package/dist/chunks/{index.B521nVV-.js → index.Bgo3tNWt.js} +23 -4
  23. package/dist/chunks/{index.TfbsX-3I.js → index.BhY64fF0.js} +16 -26
  24. package/dist/chunks/{index.CZI_8rVt.js → index.BwBttQPf.js} +340 -663
  25. package/dist/chunks/{index.CmSc2RE5.js → index.DIWhzsUh.js} +72 -118
  26. package/dist/chunks/{inspector.C914Efll.js → inspector.CvQD-Nie.js} +10 -25
  27. package/dist/chunks/moduleRunner.d.BNa-CL9e.d.ts +201 -0
  28. package/dist/chunks/{node.fjCdwEIl.js → node.BsdMi6DV.js} +2 -2
  29. package/dist/chunks/{plugin.d.C2EcJUjo.d.ts → plugin.d.C5phQR6o.d.ts} +1 -1
  30. package/dist/chunks/{reporters.d.DxZg19fy.d.ts → reporters.d.CVzhsTvK.d.ts} +1233 -1293
  31. package/dist/chunks/resolveSnapshotEnvironment.DQVamkje.js +81 -0
  32. package/dist/chunks/rpc.jKGRSXIH.js +65 -0
  33. package/dist/chunks/{setup-common.D7ZqXFx-.js → setup-common.NAWRuMRP.js} +18 -30
  34. package/dist/chunks/startModuleRunner.oAuCu1yL.js +682 -0
  35. package/dist/chunks/{suite.d.FvehnV49.d.ts → suite.d.BJWk38HB.d.ts} +1 -1
  36. package/dist/chunks/test.KC5tH8hC.js +214 -0
  37. package/dist/chunks/typechecker.gXq-5P3n.js +1438 -0
  38. package/dist/chunks/{utils.XdZDrNZV.js → utils.DGKhod2J.js} +9 -28
  39. package/dist/chunks/{vi.bdSIJ99Y.js → vi.CiJ0Laa6.js} +159 -306
  40. package/dist/chunks/worker.d.B_Fd9M_w.d.ts +100 -0
  41. package/dist/chunks/worker.rPGLlbkW.js +200 -0
  42. package/dist/cli.js +8 -6
  43. package/dist/config.cjs +3 -9
  44. package/dist/config.d.ts +49 -54
  45. package/dist/config.js +1 -1
  46. package/dist/coverage.d.ts +27 -26
  47. package/dist/coverage.js +6 -8
  48. package/dist/environments.d.ts +9 -13
  49. package/dist/environments.js +1 -1
  50. package/dist/index.d.ts +38 -45
  51. package/dist/index.js +10 -10
  52. package/dist/module-evaluator.d.ts +13 -0
  53. package/dist/module-evaluator.js +276 -0
  54. package/dist/module-runner.js +15 -0
  55. package/dist/node.d.ts +44 -42
  56. package/dist/node.js +30 -36
  57. package/dist/reporters.d.ts +12 -13
  58. package/dist/reporters.js +7 -5
  59. package/dist/runners.d.ts +3 -3
  60. package/dist/runners.js +15 -232
  61. package/dist/snapshot.js +3 -3
  62. package/dist/suite.d.ts +2 -2
  63. package/dist/suite.js +4 -3
  64. package/dist/worker-base.js +203 -0
  65. package/dist/{chunks/vm.BThCzidc.js → worker-vm.js} +179 -228
  66. package/dist/workers/runVmTests.js +39 -56
  67. package/globals.d.ts +17 -17
  68. package/package.json +40 -38
  69. package/browser.d.ts +0 -1
  70. package/dist/chunks/base.Bj3pWTr1.js +0 -38
  71. package/dist/chunks/execute.B7h3T_Hc.js +0 -708
  72. package/dist/chunks/index.D-VkfKhf.js +0 -105
  73. package/dist/chunks/rpc.CsFtxqeq.js +0 -83
  74. package/dist/chunks/runBaseTests.BC7ZIH5L.js +0 -129
  75. package/dist/chunks/typechecker.CVytUJuF.js +0 -874
  76. package/dist/chunks/utils.CAioKnHs.js +0 -61
  77. package/dist/chunks/worker.d.CmvJfRGs.d.ts +0 -8
  78. package/dist/chunks/worker.d.DoNjFAiv.d.ts +0 -169
  79. package/dist/execute.d.ts +0 -148
  80. package/dist/execute.js +0 -13
  81. package/dist/worker.js +0 -124
  82. package/dist/workers/forks.js +0 -43
  83. package/dist/workers/threads.js +0 -31
  84. package/dist/workers/vmForks.js +0 -47
  85. package/dist/workers/vmThreads.js +0 -37
  86. package/dist/workers.d.ts +0 -37
  87. package/dist/workers.js +0 -30
  88. package/execute.d.ts +0 -1
  89. package/utils.d.ts +0 -1
  90. package/workers.d.ts +0 -1
@@ -1,13 +1,12 @@
1
1
  import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
2
  import path, { win32, dirname, join, resolve } from 'node:path';
3
+ import { isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, createDefer, slash, shuffle, toArray } from '@vitest/utils/helpers';
3
4
  import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
4
5
  import pm from 'picomatch';
5
6
  import { glob } from 'tinyglobby';
6
7
  import c from 'tinyrainbow';
7
- import { slash, cleanUrl } from 'vite-node/utils';
8
8
  import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CXFFjsi8.js';
9
9
  import crypto from 'node:crypto';
10
- import { createDefer, shuffle, toArray, slash as slash$1 } from '@vitest/utils';
11
10
  import { builtinModules, createRequire } from 'node:module';
12
11
  import process$1 from 'node:process';
13
12
  import fs$1, { writeFile, rename, stat, unlink } from 'node:fs/promises';
@@ -15,28 +14,26 @@ import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL
15
14
  import assert from 'node:assert';
16
15
  import v8 from 'node:v8';
17
16
  import { format, inspect } from 'node:util';
18
- import { version, mergeConfig } from 'vite';
19
- import { c as configFiles, w as workspacesFiles, e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.DnKduX2e.js';
17
+ import { fetchModule, version, mergeConfig } from 'vite';
18
+ import { c as configFiles, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.D_Q9UYh-.js';
20
19
  import { a as isWindows } from './env.D4Lgay0q.js';
21
20
  import * as nodeos from 'node:os';
22
- import nodeos__default from 'node:os';
21
+ import nodeos__default, { tmpdir } from 'node:os';
23
22
  import { isatty } from 'node:tty';
24
23
  import EventEmitter from 'node:events';
25
- import { c as createBirpc } from './index.B521nVV-.js';
24
+ import { c as createBirpc } from './index.Bgo3tNWt.js';
26
25
  import Tinypool$1, { Tinypool } from 'tinypool';
27
- import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.CVytUJuF.js';
26
+ import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.gXq-5P3n.js';
28
27
  import { MessageChannel } from 'node:worker_threads';
29
28
  import { hasFailed } from '@vitest/runner/utils';
30
29
  import { rootDir } from '../path.js';
31
30
  import { isCI, provider } from 'std-env';
32
- import { r as resolveCoverageProviderModule } from './coverage.DVF1vEu8.js';
31
+ import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
33
32
 
34
33
  function groupBy(collection, iteratee) {
35
34
  return collection.reduce((acc, item) => {
36
35
  const key = iteratee(item);
37
- acc[key] ||= [];
38
- acc[key].push(item);
39
- return acc;
36
+ return acc[key] ||= [], acc[key].push(item), acc;
40
37
  }, {});
41
38
  }
42
39
  function stdout() {
@@ -49,7 +46,7 @@ function escapeRegExp(s) {
49
46
  return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
50
47
  }
51
48
  function wildcardPatternToRegExp(pattern) {
52
- const negated = pattern.startsWith("!");
49
+ const negated = pattern[0] === "!";
53
50
  if (negated) pattern = pattern.slice(1);
54
51
  let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
55
52
  if (negated) regexp = `(?!${regexp})`;
@@ -2017,7 +2014,7 @@ function normalizeid(id) {
2017
2014
  if (typeof id !== "string") {
2018
2015
  id = id.toString();
2019
2016
  }
2020
- if (/(node|data|http|https|file):/.test(id)) {
2017
+ if (/(?:node|data|http|https|file):/.test(id)) {
2021
2018
  return id;
2022
2019
  }
2023
2020
  if (BUILTIN_MODULES.has(id)) {
@@ -2051,7 +2048,7 @@ function _resolve$1(id, options = {}) {
2051
2048
  throw new TypeError("input must be a `string` or `URL`");
2052
2049
  }
2053
2050
  }
2054
- if (/(node|data|http|https):/.test(id)) {
2051
+ if (/(?:node|data|http|https):/.test(id)) {
2055
2052
  return id;
2056
2053
  }
2057
2054
  if (BUILTIN_MODULES.has(id)) {
@@ -2385,8 +2382,7 @@ const isPackageListed = quansync(function* (name, cwd) {
2385
2382
  isPackageListed.sync;
2386
2383
 
2387
2384
  function getWorkersCountByPercentage(percent) {
2388
- const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length;
2389
- const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2385
+ const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length, workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2390
2386
  return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2391
2387
  }
2392
2388
 
@@ -2396,35 +2392,19 @@ const envsOrder = [
2396
2392
  "happy-dom",
2397
2393
  "edge-runtime"
2398
2394
  ];
2399
- function getTransformMode(patterns, filename) {
2400
- if (patterns.web && pm.isMatch(filename, patterns.web)) return "web";
2401
- if (patterns.ssr && pm.isMatch(filename, patterns.ssr)) return "ssr";
2402
- return void 0;
2403
- }
2404
2395
  async function groupFilesByEnv(files) {
2405
2396
  const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
2406
2397
  const code = await promises$1.readFile(filepath, "utf-8");
2407
2398
  // 1. Check for control comments in the file
2408
2399
  let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
2409
- // 2. Check for globals
2410
- if (!env) {
2411
- for (const [glob, target] of project.config.environmentMatchGlobs || []) if (pm.isMatch(filepath, glob, { cwd: project.config.root })) {
2412
- env = target;
2413
- break;
2414
- }
2415
- }
2416
- // 3. Fallback to global env
2400
+ // 2. Fallback to global env
2417
2401
  env ||= project.config.environment || "node";
2418
- const transformMode = getTransformMode(project.config.testTransformMode, filepath);
2419
2402
  let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
2420
2403
  if (envOptionsJson?.endsWith("*/"))
2421
2404
  // Trim closing Docblock characters the above regex might have captured
2422
2405
  envOptionsJson = envOptionsJson.slice(0, -2);
2423
- const envOptions = JSON.parse(envOptionsJson || "null");
2424
- const envKey = env === "happy-dom" ? "happyDOM" : env;
2425
- const environment = {
2406
+ const envOptions = JSON.parse(envOptionsJson || "null"), envKey = env === "happy-dom" ? "happyDOM" : env, environment = {
2426
2407
  name: env,
2427
- transformMode,
2428
2408
  options: envOptions ? { [envKey]: envOptions } : null
2429
2409
  };
2430
2410
  return {
@@ -2439,45 +2419,198 @@ async function groupFilesByEnv(files) {
2439
2419
  return groupBy(filesWithEnv, ({ environment }) => environment.name);
2440
2420
  }
2441
2421
 
2442
- const created = /* @__PURE__ */ new Set();
2443
- const promises = /* @__PURE__ */ new Map();
2422
+ const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
2423
+ function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join$1(tmpdir(), nanoid())) {
2424
+ const cachedFsResults = /* @__PURE__ */ new Map();
2425
+ return async (url, importer, environment, options) => {
2426
+ // We are copy pasting Vite's externalization logic from `fetchModule` because
2427
+ // we instead rely on our own `shouldExternalize` method because Vite
2428
+ // doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
2429
+ if (url.startsWith("data:")) return {
2430
+ externalize: url,
2431
+ type: "builtin"
2432
+ };
2433
+ if (url === "/@vite/client" || url === "@vite/client")
2434
+ // this will be stubbed
2435
+ return {
2436
+ externalize: "/@vite/client",
2437
+ type: "module"
2438
+ };
2439
+ const isFileUrl = url.startsWith("file://");
2440
+ if (isExternalUrl(url) && !isFileUrl) return {
2441
+ externalize: url,
2442
+ type: "network"
2443
+ };
2444
+ // Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
2445
+ // so we do this first to resolve the module and check its `id`. The next call of
2446
+ // `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
2447
+ // This also makes it so externalized modules are inside the module graph.
2448
+ const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
2449
+ // if url is already cached, we can just confirm it's also cached on the server
2450
+ if (options?.cached && cached) return { cache: true };
2451
+ if (moduleGraphModule.id) {
2452
+ const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
2453
+ if (externalize) return {
2454
+ externalize,
2455
+ type: "module"
2456
+ };
2457
+ }
2458
+ const moduleRunnerModule = await fetchModule(environment, url, importer, {
2459
+ ...options,
2460
+ inlineSourceMap: false
2461
+ }).catch(handleRollupError), result = processResultSource(environment, moduleRunnerModule);
2462
+ if (!cacheFs || !("code" in result)) return result;
2463
+ const code = result.code;
2464
+ // to avoid serialising large chunks of code,
2465
+ // we store them in a tmp file and read in the test thread
2466
+ if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
2467
+ const dir = join$1(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join$1(dir, name);
2468
+ if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
2469
+ return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
2470
+ };
2471
+ }
2472
+ let SOURCEMAPPING_URL = "sourceMa";
2473
+ SOURCEMAPPING_URL += "ppingURL";
2474
+ const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
2475
+ function processResultSource(environment, result) {
2476
+ if (!("code" in result)) return result;
2477
+ const node = environment.moduleGraph.getModuleById(result.id);
2478
+ if (node?.transformResult)
2479
+ // this also overrides node.transformResult.code which is also what the module
2480
+ // runner does under the hood by default (we disable source maps inlining)
2481
+ inlineSourceMap(node.transformResult);
2482
+ return {
2483
+ ...result,
2484
+ code: node?.transformResult?.code || result.code
2485
+ };
2486
+ }
2487
+ const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
2488
+ // we have to inline the source map ourselves, because
2489
+ // - we don't need //# sourceURL since we are running code in VM
2490
+ // - important in stack traces and the V8 coverage
2491
+ // - we need to inject an empty line for --inspect-brk
2492
+ function inlineSourceMap(result) {
2493
+ const map = result.map;
2494
+ let code = result.code;
2495
+ if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
2496
+ if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
2497
+ const sourceMap = { ...map };
2498
+ // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2499
+ // so that debuggers can be set to break on first line
2500
+ if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2501
+ return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2502
+ }
2503
+ function genSourceMapUrl(map) {
2504
+ if (typeof map !== "string") map = JSON.stringify(map);
2505
+ return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
2506
+ }
2507
+ function getCachedResult(result, cachedFsResults) {
2508
+ const tmp = cachedFsResults.get(result.id);
2509
+ if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
2510
+ return {
2511
+ cached: true,
2512
+ file: result.file,
2513
+ id: result.id,
2514
+ tmp,
2515
+ url: result.url,
2516
+ invalidate: result.invalidate
2517
+ };
2518
+ }
2519
+ // serialize rollup error on server to preserve details as a test error
2520
+ function handleRollupError(e) {
2521
+ throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
2522
+ name: e.name,
2523
+ message: e.message,
2524
+ stack: e.stack,
2525
+ cause: e.cause,
2526
+ __vitest_rollup_error__: {
2527
+ plugin: e.plugin,
2528
+ id: e.id,
2529
+ loc: e.loc,
2530
+ frame: e.frame
2531
+ }
2532
+ } : e;
2533
+ }
2534
+ /**
2535
+ * Performs an atomic write operation using the write-then-rename pattern.
2536
+ *
2537
+ * Why we need this:
2538
+ * - Ensures file integrity by never leaving partially written files on disk
2539
+ * - Prevents other processes from reading incomplete data during writes
2540
+ * - Particularly important for test files where incomplete writes could cause test failures
2541
+ *
2542
+ * The implementation writes to a temporary file first, then renames it to the target path.
2543
+ * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2544
+ * guaranteeing that other processes will only ever see the complete file.
2545
+ *
2546
+ * Added in https://github.com/vitest-dev/vitest/pull/7531
2547
+ */
2548
+ async function atomicWriteFile(realFilePath, data) {
2549
+ const dir = dirname$1(realFilePath), tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2550
+ try {
2551
+ await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
2552
+ } finally {
2553
+ try {
2554
+ if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2555
+ } catch {}
2556
+ }
2557
+ }
2558
+
2559
+ // this is copy pasted from vite
2560
+ function normalizeResolvedIdToUrl(environment, resolvedId) {
2561
+ const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
2562
+ let url;
2563
+ // normalize all imports into resolved URLs
2564
+ // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
2565
+ if (resolvedId.startsWith(withTrailingSlash$1(root)))
2566
+ // in root: infer short absolute path from root
2567
+ url = resolvedId.slice(root.length);
2568
+ else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
2569
+ // an optimized deps may not yet exists in the filesystem, or
2570
+ // a regular file exists but is out of root: rewrite to absolute /@fs/ paths
2571
+ url = path.posix.join("/@fs/", resolvedId);
2572
+ else url = resolvedId;
2573
+ // if the resolved id is not a valid browser import specifier,
2574
+ // prefix it to make it valid. We will strip this before feeding it
2575
+ // back into the transform pipeline
2576
+ if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
2577
+ return url;
2578
+ }
2579
+
2444
2580
  function createMethodsRPC(project, options = {}) {
2445
- const ctx = project.vitest;
2446
- const cacheFs = options.cacheFs ?? false;
2581
+ const ctx = project.vitest, cacheFs = options.cacheFs ?? false, fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir);
2447
2582
  return {
2583
+ async fetch(url, importer, environmentName, options) {
2584
+ const environment = project.vite.environments[environmentName];
2585
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2586
+ const start = performance.now();
2587
+ try {
2588
+ return await fetch(url, importer, environment, options);
2589
+ } finally {
2590
+ project.vitest.state.transformTime += performance.now() - start;
2591
+ }
2592
+ },
2593
+ async resolve(id, importer, environmentName) {
2594
+ const environment = project.vite.environments[environmentName];
2595
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2596
+ const resolved = await environment.pluginContainer.resolveId(id, importer);
2597
+ return resolved ? {
2598
+ file: cleanUrl(resolved.id),
2599
+ url: normalizeResolvedIdToUrl(environment, resolved.id),
2600
+ id: resolved.id
2601
+ } : null;
2602
+ },
2448
2603
  snapshotSaved(snapshot) {
2449
2604
  ctx.snapshot.add(snapshot);
2450
2605
  },
2451
2606
  resolveSnapshotPath(testPath) {
2452
2607
  return ctx.snapshot.resolvePath(testPath, { config: project.serializedConfig });
2453
2608
  },
2454
- async fetch(id, transformMode) {
2455
- const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
2456
- const code = result.code;
2457
- if (!cacheFs || result.externalize) return result;
2458
- if ("id" in result && typeof result.id === "string") return { id: result.id };
2459
- if (code == null) throw new Error(`Failed to fetch module ${id}`);
2460
- const dir = join$1(project.tmpDir, transformMode);
2461
- const name = hash("sha1", id, "hex");
2462
- const tmp = join$1(dir, name);
2463
- if (!created.has(dir)) {
2464
- mkdirSync(dir, { recursive: true });
2465
- created.add(dir);
2466
- }
2467
- if (promises.has(tmp)) {
2468
- await promises.get(tmp);
2469
- return { id: tmp };
2470
- }
2471
- promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp)));
2472
- await promises.get(tmp);
2473
- Object.assign(result, { id: tmp });
2474
- return { id: tmp };
2475
- },
2476
- resolveId(id, importer, transformMode) {
2477
- return project.vitenode.resolveId(id, importer, transformMode).catch(handleRollupError);
2478
- },
2479
- transform(id, environment) {
2480
- return project.vitenode.transformModule(id, environment).catch(handleRollupError);
2609
+ async transform(id) {
2610
+ const environment = project.vite.environments.__vitest_vm__;
2611
+ if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2612
+ const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id)), result = await environment.transformRequest(url).catch(handleRollupError);
2613
+ return { code: result?.code };
2481
2614
  },
2482
2615
  async onQueued(file) {
2483
2616
  if (options.collect) ctx.state.collectFiles(project, [file]);
@@ -2512,63 +2645,12 @@ function createMethodsRPC(project, options = {}) {
2512
2645
  }
2513
2646
  };
2514
2647
  }
2515
- // serialize rollup error on server to preserve details as a test error
2516
- function handleRollupError(e) {
2517
- if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e))
2518
- // eslint-disable-next-line no-throw-literal
2519
- throw {
2520
- name: e.name,
2521
- message: e.message,
2522
- stack: e.stack,
2523
- cause: e.cause,
2524
- __vitest_rollup_error__: {
2525
- plugin: e.plugin,
2526
- id: e.id,
2527
- loc: e.loc,
2528
- frame: e.frame
2529
- }
2530
- };
2531
- throw e;
2532
- }
2533
- /**
2534
- * Performs an atomic write operation using the write-then-rename pattern.
2535
- *
2536
- * Why we need this:
2537
- * - Ensures file integrity by never leaving partially written files on disk
2538
- * - Prevents other processes from reading incomplete data during writes
2539
- * - Particularly important for test files where incomplete writes could cause test failures
2540
- *
2541
- * The implementation writes to a temporary file first, then renames it to the target path.
2542
- * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2543
- * guaranteeing that other processes will only ever see the complete file.
2544
- *
2545
- * Added in https://github.com/vitest-dev/vitest/pull/7531
2546
- */
2547
- async function atomicWriteFile(realFilePath, data) {
2548
- const dir = dirname$1(realFilePath);
2549
- const tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2550
- try {
2551
- await writeFile(tmpFilePath, data, "utf-8");
2552
- await rename(tmpFilePath, realFilePath);
2553
- } finally {
2554
- try {
2555
- if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2556
- } catch {}
2557
- }
2558
- }
2559
2648
 
2560
2649
  function createChildProcessChannel$1(project, collect = false) {
2561
- const emitter = new EventEmitter();
2562
- const events = {
2650
+ const emitter = new EventEmitter(), events = {
2563
2651
  message: "message",
2564
2652
  response: "response"
2565
- };
2566
- const channel = {
2567
- onMessage: (callback) => emitter.on(events.message, callback),
2568
- postMessage: (message) => emitter.emit(events.response, message),
2569
- onClose: () => emitter.removeAllListeners()
2570
- };
2571
- const rpc = createBirpc(createMethodsRPC(project, {
2653
+ }, rpc = createBirpc(createMethodsRPC(project, {
2572
2654
  cacheFs: true,
2573
2655
  collect
2574
2656
  }), {
@@ -2591,23 +2673,22 @@ function createChildProcessChannel$1(project, collect = false) {
2591
2673
  on(fn) {
2592
2674
  emitter.on(events.response, fn);
2593
2675
  },
2594
- onTimeoutError(functionName) {
2595
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2596
- }
2676
+ timeout: -1
2597
2677
  });
2598
2678
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2679
+ const channel = {
2680
+ onMessage: (callback) => emitter.on(events.message, callback),
2681
+ postMessage: (message) => emitter.emit(events.response, message),
2682
+ onClose: () => {
2683
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2684
+ }
2685
+ };
2599
2686
  return channel;
2600
2687
  }
2601
- function createForksPool(vitest, { execArgv, env }) {
2602
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2603
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2604
- const poolOptions = vitest.config.poolOptions?.forks ?? {};
2605
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
2606
- const minThreads = poolOptions.minForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2607
- const worker = resolve(vitest.distPath, "workers/forks.js");
2608
- const options = {
2688
+ function createForksPool(vitest, { execArgv, env }, specifications) {
2689
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2609
2690
  runtime: "child_process",
2610
- filename: resolve(vitest.distPath, "worker.js"),
2691
+ filename: resolve(vitest.distPath, "worker-base.js"),
2611
2692
  teardown: "teardown",
2612
2693
  maxThreads,
2613
2694
  minThreads,
@@ -2615,24 +2696,16 @@ function createForksPool(vitest, { execArgv, env }) {
2615
2696
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2616
2697
  terminateTimeout: vitest.config.teardownTimeout,
2617
2698
  concurrentTasksPerWorker: 1
2618
- };
2619
- const isolated = poolOptions.isolate ?? true;
2699
+ }, isolated = poolOptions.isolate ?? true;
2620
2700
  if (isolated) options.isolateWorkers = true;
2621
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
2622
- options.maxThreads = 1;
2623
- options.minThreads = 1;
2624
- }
2625
- const pool = new Tinypool(options);
2626
- const runWithFiles = (name) => {
2701
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2702
+ const pool = new Tinypool(options), runWithFiles = (name) => {
2627
2703
  let id = 0;
2628
2704
  async function runFiles(project, config, files, environment, invalidates = []) {
2629
2705
  const paths = files.map((f) => f.filepath);
2630
2706
  vitest.state.clearFiles(project, paths);
2631
- const channel = createChildProcessChannel$1(project, name === "collect");
2632
- const workerId = ++id;
2633
- const data = {
2707
+ const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2634
2708
  pool: "forks",
2635
- worker,
2636
2709
  config,
2637
2710
  files,
2638
2711
  invalidates,
@@ -2656,40 +2729,25 @@ function createForksPool(vitest, { execArgv, env }) {
2656
2729
  return async (specs, invalidates) => {
2657
2730
  // Cancel pending tasks from pool when possible
2658
2731
  vitest.onCancel(() => pool.cancelPendingTasks());
2659
- const configs = /* @__PURE__ */ new WeakMap();
2660
- const getConfig = (project) => {
2732
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2661
2733
  if (configs.has(project)) return configs.get(project);
2662
- const _config = project.getSerializableConfig();
2663
- const config = wrapSerializableConfig(_config);
2664
- configs.set(project, config);
2665
- return config;
2666
- };
2667
- const singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork);
2668
- const multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2734
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
2735
+ return configs.set(project, config), config;
2736
+ }, singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork), multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2669
2737
  if (multipleForks.length) {
2670
- const filesByEnv = await groupFilesByEnv(multipleForks);
2671
- const files = Object.values(filesByEnv).flat();
2672
- const results = [];
2738
+ const filesByEnv = await groupFilesByEnv(multipleForks), files = Object.values(filesByEnv).flat(), results = [];
2673
2739
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2674
2740
  else {
2675
2741
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2676
2742
  // Tasks are still running parallel but environments are isolated between tasks.
2677
2743
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2678
- for (const group of Object.values(grouped)) {
2679
- // Push all files to pool's queue
2680
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2681
- // Once all tasks are running or finished, recycle worker for isolation.
2682
- // On-going workers will run in the previous environment.
2683
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2684
- await pool.recycleWorkers();
2685
- }
2744
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2686
2745
  }
2687
2746
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2688
2747
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2689
2748
  }
2690
2749
  if (singleFork.length) {
2691
- const filesByEnv = await groupFilesByEnv(singleFork);
2692
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2750
+ const filesByEnv = await groupFilesByEnv(singleFork), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2693
2751
  for (const env of envs) {
2694
2752
  const files = filesByEnv[env];
2695
2753
  if (!files?.length) continue;
@@ -2713,10 +2771,7 @@ function createForksPool(vitest, { execArgv, env }) {
2713
2771
  }
2714
2772
 
2715
2773
  function createWorkerChannel$1(project, collect) {
2716
- const channel = new MessageChannel();
2717
- const port = channel.port2;
2718
- const workerPort = channel.port1;
2719
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
2774
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
2720
2775
  eventNames: ["onCancel"],
2721
2776
  post(v) {
2722
2777
  port.postMessage(v);
@@ -2724,25 +2779,21 @@ function createWorkerChannel$1(project, collect) {
2724
2779
  on(fn) {
2725
2780
  port.on("message", fn);
2726
2781
  },
2727
- onTimeoutError(functionName) {
2728
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2729
- }
2782
+ timeout: -1
2730
2783
  });
2731
2784
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2785
+ const onClose = () => {
2786
+ port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2787
+ };
2732
2788
  return {
2733
2789
  workerPort,
2734
- port
2790
+ port,
2791
+ onClose
2735
2792
  };
2736
2793
  }
2737
- function createThreadsPool(vitest, { execArgv, env }) {
2738
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2739
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2740
- const poolOptions = vitest.config.poolOptions?.threads ?? {};
2741
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
2742
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2743
- const worker = resolve(vitest.distPath, "workers/threads.js");
2744
- const options = {
2745
- filename: resolve(vitest.distPath, "worker.js"),
2794
+ function createThreadsPool(vitest, { execArgv, env }, specifications) {
2795
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2796
+ filename: resolve(vitest.distPath, "worker-base.js"),
2746
2797
  teardown: "teardown",
2747
2798
  useAtomics: poolOptions.useAtomics ?? false,
2748
2799
  maxThreads,
@@ -2751,28 +2802,16 @@ function createThreadsPool(vitest, { execArgv, env }) {
2751
2802
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2752
2803
  terminateTimeout: vitest.config.teardownTimeout,
2753
2804
  concurrentTasksPerWorker: 1
2754
- };
2755
- const isolated = poolOptions.isolate ?? true;
2805
+ }, isolated = poolOptions.isolate ?? true;
2756
2806
  if (isolated) options.isolateWorkers = true;
2757
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
2758
- options.maxThreads = 1;
2759
- options.minThreads = 1;
2760
- }
2761
- const pool = new Tinypool$1(options);
2762
- const runWithFiles = (name) => {
2807
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2808
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
2763
2809
  let id = 0;
2764
2810
  async function runFiles(project, config, files, environment, invalidates = []) {
2765
2811
  const paths = files.map((f) => f.filepath);
2766
2812
  vitest.state.clearFiles(project, paths);
2767
- const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
2768
- const onClose = () => {
2769
- port.close();
2770
- workerPort.close();
2771
- };
2772
- const workerId = ++id;
2773
- const data = {
2813
+ const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
2774
2814
  pool: "threads",
2775
- worker,
2776
2815
  port: workerPort,
2777
2816
  config,
2778
2817
  files,
@@ -2798,39 +2837,25 @@ function createThreadsPool(vitest, { execArgv, env }) {
2798
2837
  return async (specs, invalidates) => {
2799
2838
  // Cancel pending tasks from pool when possible
2800
2839
  vitest.onCancel(() => pool.cancelPendingTasks());
2801
- const configs = /* @__PURE__ */ new WeakMap();
2802
- const getConfig = (project) => {
2840
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2803
2841
  if (configs.has(project)) return configs.get(project);
2804
2842
  const config = project.serializedConfig;
2805
- configs.set(project, config);
2806
- return config;
2807
- };
2808
- const singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread);
2809
- const multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2843
+ return configs.set(project, config), config;
2844
+ }, singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread), multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2810
2845
  if (multipleThreads.length) {
2811
- const filesByEnv = await groupFilesByEnv(multipleThreads);
2812
- const files = Object.values(filesByEnv).flat();
2813
- const results = [];
2846
+ const filesByEnv = await groupFilesByEnv(multipleThreads), files = Object.values(filesByEnv).flat(), results = [];
2814
2847
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2815
2848
  else {
2816
2849
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2817
2850
  // Tasks are still running parallel but environments are isolated between tasks.
2818
2851
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2819
- for (const group of Object.values(grouped)) {
2820
- // Push all files to pool's queue
2821
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2822
- // Once all tasks are running or finished, recycle worker for isolation.
2823
- // On-going workers will run in the previous environment.
2824
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2825
- await pool.recycleWorkers();
2826
- }
2852
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2827
2853
  }
2828
2854
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2829
2855
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2830
2856
  }
2831
2857
  if (singleThreads.length) {
2832
- const filesByEnv = await groupFilesByEnv(singleThreads);
2833
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2858
+ const filesByEnv = await groupFilesByEnv(singleThreads), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2834
2859
  for (const env of envs) {
2835
2860
  const files = filesByEnv[env];
2836
2861
  if (!files?.length) continue;
@@ -2854,102 +2879,69 @@ function createThreadsPool(vitest, { execArgv, env }) {
2854
2879
  }
2855
2880
 
2856
2881
  function createTypecheckPool(vitest) {
2857
- const promisesMap = /* @__PURE__ */ new WeakMap();
2858
- const rerunTriggered = /* @__PURE__ */ new WeakSet();
2882
+ const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
2859
2883
  async function onParseEnd(project, { files, sourceErrors }) {
2860
- const checker = project.typechecker;
2861
- const { packs, events } = checker.getTestPacksAndEvents();
2862
- await vitest._testRun.updated(packs, events);
2863
- if (!project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2884
+ const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
2885
+ if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2864
2886
  const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
2865
2887
  if (processError) {
2866
2888
  const error = new Error(checker.getOutput());
2867
- error.stack = "";
2868
- vitest.state.catchError(error, "Typecheck Error");
2889
+ error.stack = "", vitest.state.catchError(error, "Typecheck Error");
2869
2890
  }
2870
- promisesMap.get(project)?.resolve();
2871
- rerunTriggered.delete(project);
2872
2891
  // triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
2873
- if (vitest.config.watch && !vitest.runningPromise) {
2874
- await vitest.report("onFinished", files, []);
2875
- await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2892
+ if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
2893
+ const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
2894
+ await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2876
2895
  }
2877
2896
  }
2878
2897
  async function createWorkspaceTypechecker(project, files) {
2879
2898
  const checker = project.typechecker ?? new Typechecker(project);
2880
- if (project.typechecker) return checker;
2881
- project.typechecker = checker;
2882
- checker.setFiles(files);
2883
- checker.onParseStart(async () => {
2899
+ return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
2884
2900
  const files = checker.getTestFiles();
2885
2901
  for (const file of files) await vitest._testRun.enqueued(project, file);
2886
2902
  await vitest._testRun.collected(project, files);
2887
- });
2888
- checker.onParseEnd((result) => onParseEnd(project, result));
2889
- checker.onWatcherRerun(async () => {
2890
- rerunTriggered.add(project);
2891
- if (!vitest.runningPromise) {
2892
- vitest.state.clearErrors();
2893
- await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2894
- }
2903
+ }), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
2904
+ if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2895
2905
  await checker.collectTests();
2896
2906
  const testFiles = checker.getTestFiles();
2897
2907
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2898
2908
  await vitest._testRun.collected(project, testFiles);
2899
2909
  const { packs, events } = checker.getTestPacksAndEvents();
2900
2910
  await vitest._testRun.updated(packs, events);
2901
- });
2902
- return checker;
2911
+ }), checker);
2903
2912
  }
2904
2913
  async function startTypechecker(project, files) {
2905
2914
  if (project.typechecker) return;
2906
2915
  const checker = await createWorkspaceTypechecker(project, files);
2907
- await checker.collectTests();
2908
- await checker.start();
2916
+ await checker.collectTests(), await checker.start();
2909
2917
  }
2910
2918
  async function collectTests(specs) {
2911
2919
  const specsByProject = groupBy(specs, (spec) => spec.project.name);
2912
2920
  for (const name in specsByProject) {
2913
- const project = specsByProject[name][0].project;
2914
- const files = specsByProject[name].map((spec) => spec.moduleId);
2915
- const checker = await createWorkspaceTypechecker(project, files);
2916
- checker.setFiles(files);
2917
- await checker.collectTests();
2921
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), checker = await createWorkspaceTypechecker(project, files);
2922
+ checker.setFiles(files), await checker.collectTests();
2918
2923
  const testFiles = checker.getTestFiles();
2919
2924
  vitest.state.collectFiles(project, testFiles);
2920
2925
  }
2921
2926
  }
2922
2927
  async function runTests(specs) {
2923
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
2924
- const promises = [];
2928
+ const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
2925
2929
  for (const name in specsByProject) {
2926
- const project = specsByProject[name][0].project;
2927
- const files = specsByProject[name].map((spec) => spec.moduleId);
2928
- const promise = createDefer();
2929
- // check that watcher actually triggered rerun
2930
- const _p = new Promise((resolve) => {
2930
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), _p = new Promise((resolve) => {
2931
2931
  const _i = setInterval(() => {
2932
- if (!project.typechecker || rerunTriggered.has(project)) {
2933
- resolve(true);
2934
- clearInterval(_i);
2935
- }
2932
+ if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
2936
2933
  });
2937
2934
  setTimeout(() => {
2938
- resolve(false);
2939
- clearInterval(_i);
2935
+ resolve(false), clearInterval(_i);
2940
2936
  }, 500).unref();
2941
- });
2942
- const triggered = await _p;
2937
+ }), triggered = await _p;
2943
2938
  if (project.typechecker && !triggered) {
2944
2939
  const testFiles = project.typechecker.getTestFiles();
2945
2940
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2946
- await vitest._testRun.collected(project, testFiles);
2947
- await onParseEnd(project, project.typechecker.getResult());
2941
+ await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
2948
2942
  continue;
2949
2943
  }
2950
- promises.push(promise);
2951
- promisesMap.set(project, promise);
2952
- promises.push(startTypechecker(project, files));
2944
+ promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files));
2953
2945
  }
2954
2946
  await Promise.all(promises);
2955
2947
  }
@@ -2993,8 +2985,7 @@ function stringToBytes(input, percentageReference) {
2993
2985
  let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
2994
2986
  if (trailingChars && numericString) {
2995
2987
  const numericValue = Number.parseFloat(numericString);
2996
- trailingChars = trailingChars.toLowerCase();
2997
- switch (trailingChars) {
2988
+ switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
2998
2989
  case "%":
2999
2990
  input = numericValue / 100;
3000
2991
  break;
@@ -3010,26 +3001,20 @@ function stringToBytes(input, percentageReference) {
3010
3001
  }
3011
3002
  }
3012
3003
  } else input = Number.parseFloat(input);
3013
- if (typeof input === "number") if (input <= 1 && input > 0) if (percentageReference) return Math.floor(input * percentageReference);
3014
- else throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3015
- else if (input > 1) return Math.floor(input);
3004
+ if (typeof input === "number") if (input <= 1 && input > 0) {
3005
+ if (percentageReference) return Math.floor(input * percentageReference);
3006
+ throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3007
+ } else if (input > 1) return Math.floor(input);
3016
3008
  else throw new Error("Unexpected numerical input for \"memoryLimit\"");
3017
3009
  return null;
3018
3010
  }
3019
3011
 
3020
3012
  const suppressWarningsPath$1 = resolve(rootDir, "./suppress-warnings.cjs");
3021
3013
  function createChildProcessChannel(project, collect) {
3022
- const emitter = new EventEmitter();
3023
- const cleanup = () => emitter.removeAllListeners();
3024
- const events = {
3014
+ const emitter = new EventEmitter(), events = {
3025
3015
  message: "message",
3026
3016
  response: "response"
3027
- };
3028
- const channel = {
3029
- onMessage: (callback) => emitter.on(events.message, callback),
3030
- postMessage: (message) => emitter.emit(events.response, message)
3031
- };
3032
- const rpc = createBirpc(createMethodsRPC(project, {
3017
+ }, rpc = createBirpc(createMethodsRPC(project, {
3033
3018
  cacheFs: true,
3034
3019
  collect
3035
3020
  }), {
@@ -3052,26 +3037,22 @@ function createChildProcessChannel(project, collect) {
3052
3037
  on(fn) {
3053
3038
  emitter.on(events.response, fn);
3054
3039
  },
3055
- onTimeoutError(functionName) {
3056
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3057
- }
3040
+ timeout: -1
3058
3041
  });
3059
3042
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3060
- return {
3061
- channel,
3062
- cleanup
3043
+ const channel = {
3044
+ onMessage: (callback) => emitter.on(events.message, callback),
3045
+ postMessage: (message) => emitter.emit(events.response, message),
3046
+ onClose: () => {
3047
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3048
+ }
3063
3049
  };
3050
+ return { channel };
3064
3051
  }
3065
- function createVmForksPool(vitest, { execArgv, env }) {
3066
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3067
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3068
- const poolOptions = vitest.config.poolOptions?.vmForks ?? {};
3069
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
3070
- const minThreads = poolOptions.maxForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3071
- const worker = resolve(vitest.distPath, "workers/vmForks.js");
3072
- const options = {
3052
+ function createVmForksPool(vitest, { execArgv, env }, specifications) {
3053
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3073
3054
  runtime: "child_process",
3074
- filename: resolve(vitest.distPath, "worker.js"),
3055
+ filename: resolve(vitest.distPath, "worker-vm.js"),
3075
3056
  maxThreads,
3076
3057
  minThreads,
3077
3058
  env,
@@ -3087,21 +3068,14 @@ function createVmForksPool(vitest, { execArgv, env }) {
3087
3068
  concurrentTasksPerWorker: 1,
3088
3069
  maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
3089
3070
  };
3090
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
3091
- options.maxThreads = 1;
3092
- options.minThreads = 1;
3093
- }
3094
- const pool = new Tinypool$1(options);
3095
- const runWithFiles = (name) => {
3071
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3072
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3096
3073
  let id = 0;
3097
3074
  async function runFiles(project, config, files, environment, invalidates = []) {
3098
3075
  const paths = files.map((f) => f.filepath);
3099
3076
  vitest.state.clearFiles(project, paths);
3100
- const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
3101
- const workerId = ++id;
3102
- const data = {
3103
- pool: "forks",
3104
- worker,
3077
+ const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3078
+ pool: "vmForks",
3105
3079
  config,
3106
3080
  files,
3107
3081
  invalidates,
@@ -3121,24 +3095,17 @@ function createVmForksPool(vitest, { execArgv, env }) {
3121
3095
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3122
3096
  else throw error;
3123
3097
  } finally {
3124
- cleanup();
3098
+ channel.onClose();
3125
3099
  }
3126
3100
  }
3127
3101
  return async (specs, invalidates) => {
3128
3102
  // Cancel pending tasks from pool when possible
3129
3103
  vitest.onCancel(() => pool.cancelPendingTasks());
3130
- const configs = /* @__PURE__ */ new Map();
3131
- const getConfig = (project) => {
3104
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3132
3105
  if (configs.has(project)) return configs.get(project);
3133
- const _config = project.serializedConfig;
3134
- const config = wrapSerializableConfig(_config);
3135
- configs.set(project, config);
3136
- return config;
3137
- };
3138
- const filesByEnv = await groupFilesByEnv(specs);
3139
- const promises = Object.values(filesByEnv).flat();
3140
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3141
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3106
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3107
+ return configs.set(project, config), config;
3108
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3142
3109
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3143
3110
  };
3144
3111
  };
@@ -3150,21 +3117,14 @@ function createVmForksPool(vitest, { execArgv, env }) {
3150
3117
  };
3151
3118
  }
3152
3119
  function getMemoryLimit$1(config) {
3153
- const memory = nodeos.totalmem();
3154
- const limit = getWorkerMemoryLimit(config, "vmForks");
3155
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3156
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3157
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3120
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmForks");
3158
3121
  // just ignore "memoryLimit" value because we cannot detect memory limit
3159
- return null;
3122
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3160
3123
  }
3161
3124
 
3162
3125
  const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
3163
3126
  function createWorkerChannel(project, collect) {
3164
- const channel = new MessageChannel();
3165
- const port = channel.port2;
3166
- const workerPort = channel.port1;
3167
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
3127
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3168
3128
  eventNames: ["onCancel"],
3169
3129
  post(v) {
3170
3130
  port.postMessage(v);
@@ -3172,25 +3132,20 @@ function createWorkerChannel(project, collect) {
3172
3132
  on(fn) {
3173
3133
  port.on("message", fn);
3174
3134
  },
3175
- onTimeoutError(functionName) {
3176
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3177
- }
3135
+ timeout: -1
3178
3136
  });
3179
3137
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3138
+ function onClose() {
3139
+ workerPort.close(), port.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3140
+ }
3180
3141
  return {
3181
3142
  workerPort,
3182
- port
3143
+ onClose
3183
3144
  };
3184
3145
  }
3185
- function createVmThreadsPool(vitest, { execArgv, env }) {
3186
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3187
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3188
- const poolOptions = vitest.config.poolOptions?.vmThreads ?? {};
3189
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
3190
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3191
- const worker = resolve(vitest.distPath, "workers/vmThreads.js");
3192
- const options = {
3193
- filename: resolve(vitest.distPath, "worker.js"),
3146
+ function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3147
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3148
+ filename: resolve(vitest.distPath, "worker-vm.js"),
3194
3149
  useAtomics: poolOptions.useAtomics ?? false,
3195
3150
  maxThreads,
3196
3151
  minThreads,
@@ -3207,21 +3162,14 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3207
3162
  concurrentTasksPerWorker: 1,
3208
3163
  maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
3209
3164
  };
3210
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
3211
- options.maxThreads = 1;
3212
- options.minThreads = 1;
3213
- }
3214
- const pool = new Tinypool$1(options);
3215
- const runWithFiles = (name) => {
3165
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3166
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3216
3167
  let id = 0;
3217
3168
  async function runFiles(project, config, files, environment, invalidates = []) {
3218
3169
  const paths = files.map((f) => f.filepath);
3219
3170
  vitest.state.clearFiles(project, paths);
3220
- const { workerPort, port } = createWorkerChannel(project, name === "collect");
3221
- const workerId = ++id;
3222
- const data = {
3171
+ const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3223
3172
  pool: "vmThreads",
3224
- worker,
3225
3173
  port: workerPort,
3226
3174
  config,
3227
3175
  files: paths,
@@ -3242,24 +3190,17 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3242
3190
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3243
3191
  else throw error;
3244
3192
  } finally {
3245
- port.close();
3246
- workerPort.close();
3193
+ onClose();
3247
3194
  }
3248
3195
  }
3249
3196
  return async (specs, invalidates) => {
3250
3197
  // Cancel pending tasks from pool when possible
3251
3198
  vitest.onCancel(() => pool.cancelPendingTasks());
3252
- const configs = /* @__PURE__ */ new Map();
3253
- const getConfig = (project) => {
3199
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3254
3200
  if (configs.has(project)) return configs.get(project);
3255
3201
  const config = project.serializedConfig;
3256
- configs.set(project, config);
3257
- return config;
3258
- };
3259
- const filesByEnv = await groupFilesByEnv(specs);
3260
- const promises = Object.values(filesByEnv).flat();
3261
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3262
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3202
+ return configs.set(project, config), config;
3203
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3263
3204
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3264
3205
  };
3265
3206
  };
@@ -3271,13 +3212,9 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3271
3212
  };
3272
3213
  }
3273
3214
  function getMemoryLimit(config) {
3274
- const memory = nodeos.totalmem();
3275
- const limit = getWorkerMemoryLimit(config, "vmThreads");
3276
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3277
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3278
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3215
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmThreads");
3279
3216
  // just ignore "memoryLimit" value because we cannot detect memory limit
3280
- return null;
3217
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3281
3218
  }
3282
3219
 
3283
3220
  const builtinPools = [
@@ -3289,14 +3226,9 @@ const builtinPools = [
3289
3226
  "typescript"
3290
3227
  ];
3291
3228
  function getDefaultPoolName(project) {
3292
- if (project.config.browser.enabled) return "browser";
3293
- return project.config.pool;
3229
+ return project.config.browser.enabled ? "browser" : project.config.pool;
3294
3230
  }
3295
- function getFilePoolName(project, file) {
3296
- for (const [glob, pool] of project.config.poolMatchGlobs) {
3297
- if (pool === "browser") throw new Error("Since Vitest 0.31.0 \"browser\" pool is not supported in `poolMatchGlobs`. You can create a project to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/projects");
3298
- if (pm.isMatch(file, glob, { cwd: project.config.root })) return pool;
3299
- }
3231
+ function getFilePoolName(project) {
3300
3232
  return getDefaultPoolName(project);
3301
3233
  }
3302
3234
  function createPool(ctx) {
@@ -3307,26 +3239,15 @@ function createPool(ctx) {
3307
3239
  vmThreads: null,
3308
3240
  vmForks: null,
3309
3241
  typescript: null
3310
- };
3311
- // in addition to resolve.conditions Vite also adds production/development,
3312
- // see: https://github.com/vitejs/vite/blob/af2aa09575229462635b7cbb6d248ca853057ba2/packages/vite/src/node/plugins/resolve.ts#L1056-L1080
3313
- const viteMajor = Number(version.split(".")[0]);
3314
- const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3242
+ }, viteMajor = Number(version.split(".")[0]), potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3315
3243
  "production",
3316
3244
  "development",
3317
3245
  ...ctx.vite.config.resolve.conditions
3318
- ]);
3319
- const conditions = [...potentialConditions].filter((condition) => {
3320
- if (condition === "production") return ctx.vite.config.isProduction;
3321
- if (condition === "development") return !ctx.vite.config.isProduction;
3322
- return true;
3246
+ ]), conditions = [...potentialConditions].filter((condition) => {
3247
+ return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3323
3248
  }).map((condition) => {
3324
- if (viteMajor >= 6 && condition === "development|production") return ctx.vite.config.isProduction ? "production" : "development";
3325
- return condition;
3326
- }).flatMap((c) => ["--conditions", c]);
3327
- // Instead of passing whole process.execArgv to the workers, pick allowed options.
3328
- // Some options may crash worker, e.g. --prof, --title. nodejs/node#41103
3329
- const execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3249
+ return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3250
+ }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3330
3251
  async function executeTests(method, files, invalidate) {
3331
3252
  const options = {
3332
3253
  execArgv: [...execArgv, ...conditions],
@@ -3342,25 +3263,22 @@ function createPool(ctx) {
3342
3263
  };
3343
3264
  // env are case-insensitive on Windows, but spawned processes don't support it
3344
3265
  if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
3345
- const poolConcurrentPromises = /* @__PURE__ */ new Map();
3346
- const customPools = /* @__PURE__ */ new Map();
3266
+ const poolConcurrentPromises = /* @__PURE__ */ new Map(), customPools = /* @__PURE__ */ new Map();
3347
3267
  async function resolveCustomPool(filepath) {
3348
3268
  if (customPools.has(filepath)) return customPools.get(filepath);
3349
- const pool = await ctx.runner.executeId(filepath);
3269
+ const pool = await ctx.runner.import(filepath);
3350
3270
  if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
3351
3271
  const poolInstance = await pool.default(ctx, options);
3352
3272
  if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
3353
3273
  if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
3354
- customPools.set(filepath, poolInstance);
3355
- return poolInstance;
3274
+ return customPools.set(filepath, poolInstance), poolInstance;
3356
3275
  }
3357
3276
  function getConcurrentPool(pool, fn) {
3358
3277
  if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
3359
3278
  const promise = fn().finally(() => {
3360
3279
  poolConcurrentPromises.delete(pool);
3361
3280
  });
3362
- poolConcurrentPromises.set(pool, promise);
3363
- return promise;
3281
+ return poolConcurrentPromises.set(pool, promise), promise;
3364
3282
  }
3365
3283
  function getCustomPool(pool) {
3366
3284
  return getConcurrentPool(pool, () => resolveCustomPool(pool));
@@ -3371,23 +3289,18 @@ function createPool(ctx) {
3371
3289
  return createBrowserPool(ctx);
3372
3290
  });
3373
3291
  }
3374
- const groupedSpecifications = {};
3375
- const groups = /* @__PURE__ */ new Set();
3376
- const factories = {
3377
- vmThreads: () => createVmThreadsPool(ctx, options),
3378
- vmForks: () => createVmForksPool(ctx, options),
3379
- threads: () => createThreadsPool(ctx, options),
3380
- forks: () => createForksPool(ctx, options),
3292
+ const groupedSpecifications = {}, groups = /* @__PURE__ */ new Set(), factories = {
3293
+ vmThreads: (specs) => createVmThreadsPool(ctx, options, specs),
3294
+ vmForks: (specs) => createVmForksPool(ctx, options, specs),
3295
+ threads: (specs) => createThreadsPool(ctx, options, specs),
3296
+ forks: (specs) => createForksPool(ctx, options, specs),
3381
3297
  typescript: () => createTypecheckPool(ctx)
3382
3298
  };
3383
3299
  for (const spec of files) {
3384
- const group = spec[0].config.sequence.groupOrder ?? 0;
3385
- groups.add(group);
3386
- groupedSpecifications[group] ??= [];
3387
- groupedSpecifications[group].push(spec);
3300
+ const group = spec.project.config.sequence.groupOrder ?? 0;
3301
+ groups.add(group), groupedSpecifications[group] ??= [], groupedSpecifications[group].push(spec);
3388
3302
  }
3389
- const Sequencer = ctx.config.sequence.sequencer;
3390
- const sequencer = new Sequencer(ctx);
3303
+ const Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
3391
3304
  async function sortSpecs(specs) {
3392
3305
  if (ctx.config.shard) {
3393
3306
  if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
@@ -3407,26 +3320,19 @@ function createPool(ctx) {
3407
3320
  typescript: []
3408
3321
  };
3409
3322
  specifications.forEach((specification) => {
3410
- const pool = specification[2].pool;
3411
- filesByPool[pool] ??= [];
3412
- filesByPool[pool].push(specification);
3413
- });
3414
- await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3323
+ const pool = specification.pool;
3324
+ filesByPool[pool] ??= [], filesByPool[pool].push(specification);
3325
+ }), await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3415
3326
  const [pool, files] = entry;
3416
3327
  if (!files.length) return null;
3417
3328
  const specs = await sortSpecs(files);
3418
3329
  if (pool in factories) {
3419
3330
  const factory = factories[pool];
3420
- pools[pool] ??= factory();
3421
- return pools[pool][method](specs, invalidate);
3422
- }
3423
- if (pool === "browser") {
3424
- pools.browser ??= await getBrowserPool();
3425
- return pools.browser[method](specs, invalidate);
3331
+ return pools[pool] ??= factory(specs), pools[pool][method](specs, invalidate);
3426
3332
  }
3333
+ if (pool === "browser") return pools.browser ??= await getBrowserPool(), pools.browser[method](specs, invalidate);
3427
3334
  const poolHandler = await getCustomPool(pool);
3428
- pools[poolHandler.name] ??= poolHandler;
3429
- return poolHandler[method](specs, invalidate);
3335
+ return pools[poolHandler.name] ??= poolHandler, poolHandler[method](specs, invalidate);
3430
3336
  }));
3431
3337
  }
3432
3338
  }
@@ -3447,14 +3353,9 @@ class BaseSequencer {
3447
3353
  }
3448
3354
  // async so it can be extended by other sequelizers
3449
3355
  async shard(files) {
3450
- const { config } = this.ctx;
3451
- const { index, count } = config.shard;
3452
- const shardSize = Math.ceil(files.length / count);
3453
- const shardStart = shardSize * (index - 1);
3454
- const shardEnd = shardSize * index;
3356
+ const { config } = this.ctx, { index, count } = config.shard, [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count);
3455
3357
  return [...files].map((spec) => {
3456
- const fullPath = resolve$1(slash(config.root), slash(spec.moduleId));
3457
- const specPath = fullPath?.slice(config.root.length);
3358
+ const fullPath = resolve$1(slash(config.root), slash(spec.moduleId)), specPath = fullPath?.slice(config.root.length);
3458
3359
  return {
3459
3360
  spec,
3460
3361
  hash: hash("sha1", specPath, "hex")
@@ -3465,25 +3366,26 @@ class BaseSequencer {
3465
3366
  async sort(files) {
3466
3367
  const cache = this.ctx.cache;
3467
3368
  return [...files].sort((a, b) => {
3468
- const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
3469
- const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
3470
- const aState = cache.getFileTestResults(keyA);
3471
- const bState = cache.getFileTestResults(keyB);
3369
+ const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`, keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`, aState = cache.getFileTestResults(keyA), bState = cache.getFileTestResults(keyB);
3472
3370
  if (!aState || !bState) {
3473
- const statsA = cache.getFileStats(keyA);
3474
- const statsB = cache.getFileStats(keyB);
3475
- // run unknown first
3476
- if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
3371
+ const statsA = cache.getFileStats(keyA), statsB = cache.getFileStats(keyB);
3477
3372
  // run larger files first
3478
- return statsB.size - statsA.size;
3373
+ return !statsA || !statsB ? !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0 : statsB.size - statsA.size;
3479
3374
  }
3480
- // run failed first
3481
- if (aState.failed && !bState.failed) return -1;
3482
- if (!aState.failed && bState.failed) return 1;
3483
3375
  // run longer first
3484
- return bState.duration - aState.duration;
3376
+ return aState.failed && !bState.failed ? -1 : !aState.failed && bState.failed ? 1 : bState.duration - aState.duration;
3485
3377
  });
3486
3378
  }
3379
+ // Calculate distributed shard range [start, end] distributed equally
3380
+ calculateShardRange(filesCount, index, count) {
3381
+ const baseShardSize = Math.floor(filesCount / count), remainderTestFilesCount = filesCount % count;
3382
+ if (remainderTestFilesCount >= index) {
3383
+ const shardSize = baseShardSize + 1, shardStart = shardSize * (index - 1), shardEnd = shardSize * index;
3384
+ return [shardStart, shardEnd];
3385
+ }
3386
+ const shardStart = remainderTestFilesCount * (baseShardSize + 1) + (index - remainderTestFilesCount - 1) * baseShardSize, shardEnd = shardStart + baseShardSize;
3387
+ return [shardStart, shardEnd];
3388
+ }
3487
3389
  }
3488
3390
 
3489
3391
  class RandomSequencer extends BaseSequencer {
@@ -3501,11 +3403,10 @@ function parseInspector(inspect) {
3501
3403
  if (typeof inspect === "number") return { port: inspect };
3502
3404
  if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
3503
3405
  const [host, port] = inspect.split(":");
3504
- if (!port) return { host };
3505
- return {
3406
+ return port ? {
3506
3407
  host,
3507
3408
  port: Number(port) || defaultInspectPort
3508
- };
3409
+ } : { host };
3509
3410
  }
3510
3411
  function resolveApiServerConfig(options, defaultPort) {
3511
3412
  let api;
@@ -3523,12 +3424,10 @@ function resolveApiServerConfig(options, defaultPort) {
3523
3424
  return api;
3524
3425
  }
3525
3426
  function resolveInlineWorkerOption(value) {
3526
- if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value);
3527
- else return Number(value);
3427
+ return typeof value === "string" && value.trim().endsWith("%") ? getWorkersCountByPercentage(value) : Number(value);
3528
3428
  }
3529
3429
  function resolveConfig$1(vitest, options, viteConfig) {
3530
- const mode = vitest.mode;
3531
- const logger = vitest.logger;
3430
+ const mode = vitest.mode, logger = vitest.logger;
3532
3431
  if (options.dom) {
3533
3432
  if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
3534
3433
  options.environment = "happy-dom";
@@ -3539,24 +3438,17 @@ function resolveConfig$1(vitest, options, viteConfig) {
3539
3438
  root: viteConfig.root,
3540
3439
  mode
3541
3440
  };
3542
- resolved.project = toArray(resolved.project);
3543
- resolved.provide ??= {};
3544
- resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "";
3545
- resolved.color = typeof options.name !== "string" ? options.name?.color : void 0;
3441
+ if (resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
3546
3442
  const inspector = resolved.inspect || resolved.inspectBrk;
3547
- resolved.inspector = {
3443
+ if (resolved.inspector = {
3548
3444
  ...resolved.inspector,
3549
3445
  ...parseInspector(inspector),
3550
3446
  enabled: !!inspector,
3551
3447
  waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
3552
- };
3553
- if (viteConfig.base !== "/") resolved.base = viteConfig.base;
3554
- resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
3555
- if (options.shard) {
3448
+ }, viteConfig.base !== "/") resolved.base = viteConfig.base;
3449
+ if (resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true, options.shard) {
3556
3450
  if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch");
3557
- const [indexString, countString] = options.shard.split("/");
3558
- const index = Math.abs(Number.parseInt(indexString, 10));
3559
- const count = Math.abs(Number.parseInt(countString, 10));
3451
+ const [indexString, countString] = options.shard.split("/"), index = Math.abs(Number.parseInt(indexString, 10)), count = Math.abs(Number.parseInt(countString, 10));
3560
3452
  if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number");
3561
3453
  if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>");
3562
3454
  resolved.shard = {
@@ -3567,21 +3459,12 @@ function resolveConfig$1(vitest, options, viteConfig) {
3567
3459
  if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`);
3568
3460
  if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`);
3569
3461
  if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
3570
- if (resolved.minWorkers) resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
3571
- // run benchmark sequentially by default
3572
- resolved.fileParallelism ??= mode !== "benchmark";
3573
- if (!resolved.fileParallelism) {
3574
- // ignore user config, parallelism cannot be implemented without limiting workers
3575
- resolved.maxWorkers = 1;
3576
- resolved.minWorkers = 1;
3577
- }
3578
- if (resolved.maxConcurrency === 0) {
3579
- logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`));
3580
- resolved.maxConcurrency = configDefaults.maxConcurrency;
3581
- }
3462
+ if (resolved.fileParallelism ??= mode !== "benchmark", !resolved.fileParallelism)
3463
+ // ignore user config, parallelism cannot be implemented without limiting workers
3464
+ resolved.maxWorkers = 1;
3465
+ if (resolved.maxConcurrency === 0) logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)), resolved.maxConcurrency = configDefaults.maxConcurrency;
3582
3466
  if (resolved.inspect || resolved.inspectBrk) {
3583
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread;
3584
- const isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3467
+ const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread, isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3585
3468
  if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
3586
3469
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3587
3470
  throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
@@ -3602,113 +3485,78 @@ function resolveConfig$1(vitest, options, viteConfig) {
3602
3485
  const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
3603
3486
  // Browser-mode "Playwright + Chromium" only features:
3604
3487
  if (browser.enabled && !playwrightChromiumOnly) {
3605
- const browserConfig = { browser: {
3606
- provider: browser.provider,
3607
- name: browser.name,
3608
- instances: browser.instances?.map((i) => ({ browser: i.browser }))
3609
- } };
3610
- if (resolved.coverage.enabled && resolved.coverage.provider === "v8") throw new Error(`@vitest/coverage-v8 does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3611
- provider: "playwright",
3612
- instances: [{ browser: "chromium" }]
3613
- } }, null, 2)}\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
3488
+ const browserConfig = `
3489
+ {
3490
+ browser: {
3491
+ provider: ${browser.provider?.name || "preview"}(),
3492
+ instances: [
3493
+ ${(browser.instances || []).map((i) => `{ browser: '${i.browser}' }`).join(",\n ")}
3494
+ ],
3495
+ },
3496
+ }
3497
+ `.trim(), correctExample = `
3498
+ {
3499
+ browser: {
3500
+ provider: playwright(),
3501
+ instances: [
3502
+ { browser: 'chromium' }
3503
+ ],
3504
+ },
3505
+ }
3506
+ `.trim();
3507
+ if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
3508
+ const coverageExample = `
3509
+ {
3510
+ coverage: {
3511
+ provider: 'istanbul',
3512
+ },
3513
+ }
3514
+ `.trim();
3515
+ throw new Error(`@vitest/coverage-v8 does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or change your coverage provider to:\n${coverageExample}\n`);
3516
+ }
3614
3517
  if (resolved.inspect || resolved.inspectBrk) {
3615
3518
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3616
- throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3617
- provider: "playwright",
3618
- instances: [{ browser: "chromium" }]
3619
- } }, null, 2)}\n\n...or disable ${inspectOption}\n`);
3519
+ throw new Error(`${inspectOption} does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or disable ${inspectOption}\n`);
3620
3520
  }
3621
3521
  }
3622
- resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
3623
- if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3522
+ if (resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter), resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3624
3523
  const reportsDirectory = resolve$1(resolved.root, resolved.coverage.reportsDirectory);
3625
3524
  if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
3626
3525
  }
3627
3526
  if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
3628
- resolved.expect ??= {};
3629
- resolved.deps ??= {};
3630
- resolved.deps.moduleDirectories ??= [];
3631
- resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3632
- if (!dir.startsWith("/")) dir = `/${dir}`;
3527
+ resolved.expect ??= {}, resolved.deps ??= {}, resolved.deps.moduleDirectories ??= [];
3528
+ const envModuleDirectories = process.env.VITEST_MODULE_DIRECTORIES || process.env.npm_config_VITEST_MODULE_DIRECTORIES;
3529
+ if (envModuleDirectories) resolved.deps.moduleDirectories.push(...envModuleDirectories.split(","));
3530
+ if (resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3531
+ if (dir[0] !== "/") dir = `/${dir}`;
3633
3532
  if (!dir.endsWith("/")) dir += "/";
3634
3533
  return normalize(dir);
3635
- });
3636
- if (!resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3637
- resolved.deps.optimizer ??= {};
3638
- resolved.deps.optimizer.ssr ??= {};
3639
- resolved.deps.optimizer.ssr.enabled ??= true;
3640
- resolved.deps.optimizer.web ??= {};
3641
- resolved.deps.optimizer.web.enabled ??= true;
3642
- resolved.deps.web ??= {};
3643
- resolved.deps.web.transformAssets ??= true;
3644
- resolved.deps.web.transformCss ??= true;
3645
- resolved.deps.web.transformGlobPattern ??= [];
3646
- resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root));
3647
- resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root));
3648
- // Add hard-coded default coverage exclusions. These cannot be overidden by user config.
3649
- // Override original exclude array for cases where user re-uses same object in test.exclude.
3650
- resolved.coverage.exclude = [
3534
+ }), !resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3535
+ if (resolved.deps.optimizer ??= {}, resolved.deps.optimizer.ssr ??= {}, resolved.deps.optimizer.ssr.enabled ??= false, resolved.deps.optimizer.client ??= {}, resolved.deps.optimizer.client.enabled ??= false, resolved.deps.web ??= {}, resolved.deps.web.transformAssets ??= true, resolved.deps.web.transformCss ??= true, resolved.deps.web.transformGlobPattern ??= [], resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root)), resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root)), resolved.coverage.exclude = [
3651
3536
  ...resolved.coverage.exclude,
3652
3537
  ...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`),
3653
3538
  ...resolved.include,
3654
- resolved.config && slash$1(resolved.config),
3539
+ resolved.config && slash(resolved.config),
3655
3540
  ...configFiles,
3656
- ...workspacesFiles,
3657
3541
  "**/virtual:*",
3658
3542
  "**/__x00__*",
3659
3543
  "**/node_modules/**"
3660
- ].filter((pattern) => pattern != null);
3661
- resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles];
3662
- resolved.server ??= {};
3663
- resolved.server.deps ??= {};
3664
- const deprecatedDepsOptions = [
3665
- "inline",
3666
- "external",
3667
- "fallbackCJS"
3668
- ];
3669
- deprecatedDepsOptions.forEach((option) => {
3670
- if (resolved.deps[option] === void 0) return;
3671
- if (option === "fallbackCJS") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
3672
- else {
3673
- const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
3674
- logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`));
3675
- }
3676
- if (resolved.server.deps[option] === void 0) resolved.server.deps[option] = resolved.deps[option];
3677
- });
3678
- if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3679
- // vitenode will try to import such file with native node,
3680
- // but then our mocker will not work properly
3681
- if (resolved.server.deps.inline !== true) {
3682
- const ssrOptions = viteConfig.ssr;
3683
- if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) resolved.server.deps.inline = true;
3684
- else {
3685
- resolved.server.deps.inline ??= [];
3686
- resolved.server.deps.inline.push(...extraInlineDeps);
3687
- }
3688
- }
3689
- resolved.server.deps.inlineFiles ??= [];
3690
- resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
3691
- resolved.server.deps.moduleDirectories ??= [];
3692
- resolved.server.deps.moduleDirectories.push(...resolved.deps.moduleDirectories);
3544
+ ].filter((pattern) => pattern != null), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3693
3545
  if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
3694
- resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments");
3695
- if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3696
- resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0;
3697
- if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) resolved.snapshotFormat.plugins = [];
3546
+ if (resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3547
+ if (resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0, resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
3548
+ // TODO: support it via separate config (like DiffOptions) or via `Function.toString()`
3549
+ if (resolved.snapshotFormat.plugins = [], typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`);
3550
+ }
3698
3551
  const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
3699
- resolved.snapshotOptions = {
3552
+ if (resolved.snapshotOptions = {
3700
3553
  expand: resolved.expandSnapshotDiff ?? false,
3701
3554
  snapshotFormat: resolved.snapshotFormat || {},
3702
3555
  updateSnapshot: isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new",
3703
3556
  resolveSnapshotPath: options.resolveSnapshotPath,
3704
3557
  snapshotEnvironment: null
3705
- };
3706
- resolved.snapshotSerializers ??= [];
3707
- resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root));
3708
- resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
3709
- if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3710
- resolved.pool ??= "threads";
3711
- if (process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3558
+ }, resolved.snapshotSerializers ??= [], resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)), resolved.forceRerunTriggers.push(...resolved.snapshotSerializers), options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3559
+ if (resolved.pool ??= "threads", process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3712
3560
  ...resolved.poolOptions,
3713
3561
  threads: {
3714
3562
  ...resolved.poolOptions?.threads,
@@ -3719,17 +3567,6 @@ function resolveConfig$1(vitest, options, viteConfig) {
3719
3567
  maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3720
3568
  }
3721
3569
  };
3722
- if (process.env.VITEST_MIN_THREADS) resolved.poolOptions = {
3723
- ...resolved.poolOptions,
3724
- threads: {
3725
- ...resolved.poolOptions?.threads,
3726
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3727
- },
3728
- vmThreads: {
3729
- ...resolved.poolOptions?.vmThreads,
3730
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3731
- }
3732
- };
3733
3570
  if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
3734
3571
  ...resolved.poolOptions,
3735
3572
  forks: {
@@ -3741,51 +3578,16 @@ function resolveConfig$1(vitest, options, viteConfig) {
3741
3578
  maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3742
3579
  }
3743
3580
  };
3744
- if (process.env.VITEST_MIN_FORKS) resolved.poolOptions = {
3745
- ...resolved.poolOptions,
3746
- forks: {
3747
- ...resolved.poolOptions?.forks,
3748
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3749
- },
3750
- vmForks: {
3751
- ...resolved.poolOptions?.vmForks,
3752
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3753
- }
3754
- };
3755
- const poolThreadsOptions = [
3756
- ["threads", "minThreads"],
3757
- ["threads", "maxThreads"],
3758
- ["vmThreads", "minThreads"],
3759
- ["vmThreads", "maxThreads"]
3760
- ];
3581
+ const poolThreadsOptions = [["threads", "maxThreads"], ["vmThreads", "maxThreads"]];
3761
3582
  for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3762
- const poolForksOptions = [
3763
- ["forks", "minForks"],
3764
- ["forks", "maxForks"],
3765
- ["vmForks", "minForks"],
3766
- ["vmForks", "maxForks"]
3767
- ];
3583
+ const poolForksOptions = [["forks", "maxForks"], ["vmForks", "maxForks"]];
3768
3584
  for (const [poolOptionKey, workerOptionKey] of poolForksOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3769
- if (typeof resolved.workspace === "string")
3770
- // if passed down from the CLI and it's relative, resolve relative to CWD
3771
- resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve$1(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
3772
3585
  if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
3773
- if (resolved.poolMatchGlobs) logger.deprecate("`poolMatchGlobs` is deprecated. Use `test.projects` to define different configurations instead.");
3774
- resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(([glob, pool]) => {
3775
- if (!builtinPools.includes(pool)) pool = resolvePath(pool, resolved.root);
3776
- return [glob, pool];
3777
- });
3778
3586
  if (mode === "benchmark") {
3779
3587
  resolved.benchmark = {
3780
3588
  ...benchmarkConfigDefaults,
3781
3589
  ...resolved.benchmark
3782
- };
3783
- // override test config
3784
- resolved.coverage.enabled = false;
3785
- resolved.typecheck.enabled = false;
3786
- resolved.include = resolved.benchmark.include;
3787
- resolved.exclude = resolved.benchmark.exclude;
3788
- resolved.includeSource = resolved.benchmark.includeSource;
3590
+ }, resolved.coverage.enabled = false, resolved.typecheck.enabled = false, resolved.include = resolved.benchmark.include, resolved.exclude = resolved.benchmark.exclude, resolved.includeSource = resolved.benchmark.includeSource;
3789
3591
  const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean);
3790
3592
  if (reporters.length) resolved.benchmark.reporters = reporters;
3791
3593
  else resolved.benchmark.reporters = ["default"];
@@ -3794,17 +3596,13 @@ function resolveConfig$1(vitest, options, viteConfig) {
3794
3596
  if (options.compare) resolved.benchmark.compare = options.compare;
3795
3597
  if (options.outputJson) resolved.benchmark.outputJson = options.outputJson;
3796
3598
  }
3797
- if (typeof resolved.diff === "string") {
3798
- resolved.diff = resolvePath(resolved.diff, resolved.root);
3799
- resolved.forceRerunTriggers.push(resolved.diff);
3800
- }
3599
+ if (typeof resolved.diff === "string") resolved.diff = resolvePath(resolved.diff, resolved.root), resolved.forceRerunTriggers.push(resolved.diff);
3801
3600
  // the server has been created, we don't need to override vite.server options
3802
3601
  const api = resolveApiServerConfig(options, defaultPort);
3803
- resolved.api = {
3602
+ if (resolved.api = {
3804
3603
  ...api,
3805
3604
  token: crypto.randomUUID()
3806
- };
3807
- if (options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3605
+ }, options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3808
3606
  /*
3809
3607
  * Reporters can be defined in many different ways:
3810
3608
  * { reporter: 'json' }
@@ -3833,86 +3631,55 @@ function resolveConfig$1(vitest, options, viteConfig) {
3833
3631
  if (mode !== "benchmark") {
3834
3632
  // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory
3835
3633
  // it is passed down as "vitest --reporter ../reporter.js"
3836
- const reportersFromCLI = resolved.reporter;
3837
- const cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3838
- // ./reporter.js || ../reporter.js, but not .reporters/reporter.js
3839
- if (/^\.\.?\//.test(reporter)) return resolve$1(process.cwd(), reporter);
3840
- return reporter;
3634
+ const reportersFromCLI = resolved.reporter, cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3635
+ return /^\.\.?\//.test(reporter) ? resolve$1(process.cwd(), reporter) : reporter;
3841
3636
  });
3842
3637
  if (cliReporters.length) resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
3843
3638
  }
3844
3639
  if (!resolved.reporters.length) {
3845
- resolved.reporters.push(["default", {}]);
3846
3640
  // also enable github-actions reporter as a default
3847
- if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3641
+ if (resolved.reporters.push(["default", {}]), process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3848
3642
  }
3849
3643
  if (resolved.changed) resolved.passWithNoTests ??= true;
3850
- resolved.css ??= {};
3851
- if (typeof resolved.css === "object") {
3852
- resolved.css.modules ??= {};
3853
- resolved.css.modules.classNameStrategy ??= "stable";
3854
- }
3644
+ if (resolved.css ??= {}, typeof resolved.css === "object") resolved.css.modules ??= {}, resolved.css.modules.classNameStrategy ??= "stable";
3855
3645
  if (resolved.cache !== false) {
3856
3646
  if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`);
3857
3647
  resolved.cache = { dir: viteConfig.cacheDir };
3858
3648
  }
3859
- resolved.sequence ??= {};
3860
- if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3649
+ if (resolved.sequence ??= {}, resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3861
3650
  const { files, tests } = resolved.sequence.shuffle;
3862
- resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
3863
- resolved.sequence.shuffle = tests;
3651
+ resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer, resolved.sequence.shuffle = tests;
3864
3652
  }
3865
3653
  if (!resolved.sequence?.sequencer)
3866
3654
  // CLI flag has higher priority
3867
3655
  resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
3868
- resolved.sequence.groupOrder ??= 0;
3869
- resolved.sequence.hooks ??= "stack";
3870
- if (resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3871
- resolved.typecheck = {
3656
+ if (resolved.sequence.groupOrder ??= 0, resolved.sequence.hooks ??= "stack", resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3657
+ if (resolved.typecheck = {
3872
3658
  ...configDefaults.typecheck,
3873
3659
  ...resolved.typecheck
3874
- };
3875
- if (resolved.environmentMatchGlobs) logger.deprecate("\"environmentMatchGlobs\" is deprecated. Use `test.projects` to define different configurations instead.");
3876
- resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve$1(resolved.root, i[0]), i[1]]);
3877
- resolved.typecheck ??= {};
3878
- resolved.typecheck.enabled ??= false;
3879
- if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3880
- resolved.browser.enabled ??= false;
3881
- resolved.browser.headless ??= isCI;
3882
- resolved.browser.isolate ??= true;
3883
- resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
3884
- // disable in headless mode by default, and if CI is detected
3885
- resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
3886
- if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3887
- const isPreview = resolved.browser.provider === "preview";
3888
- if (isPreview && resolved.browser.screenshotFailures === true) {
3889
- console.warn(c.yellow([
3890
- `Browser provider "preview" doesn't support screenshots, `,
3891
- `so "browser.screenshotFailures" option is forcefully disabled. `,
3892
- `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3893
- ].join("")));
3894
- resolved.browser.screenshotFailures = false;
3895
- } else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3896
- resolved.browser.viewport ??= {};
3897
- resolved.browser.viewport.width ??= 414;
3898
- resolved.browser.viewport.height ??= 896;
3899
- resolved.browser.locators ??= {};
3900
- resolved.browser.locators.testIdAttribute ??= "data-testid";
3901
- if (resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = "preview";
3902
- resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort };
3660
+ }, resolved.typecheck ??= {}, resolved.typecheck.enabled ??= false, resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3661
+ if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3662
+ if (resolved.browser.viewport ??= {}, resolved.browser.viewport.width ??= 414, resolved.browser.viewport.height ??= 896, resolved.browser.locators ??= {}, resolved.browser.locators.testIdAttribute ??= "data-testid", resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = void 0;
3663
+ if (typeof resolved.browser.provider === "string") {
3664
+ const source = `@vitest/browser/providers/${resolved.browser.provider}`;
3665
+ throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/guide/browser/config#provider`);
3666
+ }
3667
+ const isPreview = resolved.browser.provider?.name === "preview";
3668
+ if (isPreview && resolved.browser.screenshotFailures === true) console.warn(c.yellow([
3669
+ `Browser provider "preview" doesn't support screenshots, `,
3670
+ `so "browser.screenshotFailures" option is forcefully disabled. `,
3671
+ `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3672
+ ].join(""))), resolved.browser.screenshotFailures = false;
3673
+ else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3903
3674
  // enable includeTaskLocation by default in UI mode
3904
- if (resolved.browser.enabled) {
3675
+ if (resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort }, resolved.browser.enabled) {
3905
3676
  if (resolved.browser.ui) resolved.includeTaskLocation ??= true;
3906
3677
  } else if (resolved.ui) resolved.includeTaskLocation ??= true;
3907
3678
  const htmlReporter = toArray(resolved.reporters).some((reporter) => {
3908
- if (Array.isArray(reporter)) return reporter[0] === "html";
3909
- return false;
3679
+ return Array.isArray(reporter) ? reporter[0] === "html" : false;
3910
3680
  });
3911
3681
  if (htmlReporter) resolved.includeTaskLocation ??= true;
3912
- resolved.testTransformMode ??= {};
3913
- resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
3914
- resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
3915
- return resolved;
3682
+ return resolved.server ??= {}, resolved.server.deps ??= {}, resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3916
3683
  }
3917
3684
  function isBrowserEnabled(config) {
3918
3685
  return Boolean(config.browser?.enabled);
@@ -3931,7 +3698,7 @@ function resolveCoverageReporters(configReporters) {
3931
3698
  }
3932
3699
  function isPlaywrightChromiumOnly(vitest, config) {
3933
3700
  const browser = config.browser;
3934
- if (!browser || browser.provider !== "playwright" || !browser.enabled) return false;
3701
+ if (!browser || !browser.provider || browser.provider.name !== "playwright" || !browser.enabled) return false;
3935
3702
  if (browser.name) return browser.name === "chromium";
3936
3703
  if (!browser.instances) return false;
3937
3704
  for (const instance of browser.instances) {
@@ -3948,14 +3715,11 @@ const THRESHOLD_KEYS = [
3948
3715
  "functions",
3949
3716
  "statements",
3950
3717
  "branches"
3951
- ];
3952
- const GLOBAL_THRESHOLDS_KEY = "global";
3953
- const DEFAULT_PROJECT = Symbol.for("default-project");
3718
+ ], GLOBAL_THRESHOLDS_KEY = "global", DEFAULT_PROJECT = Symbol.for("default-project");
3954
3719
  let uniqueId = 0;
3955
3720
  async function getCoverageProvider(options, loader) {
3956
3721
  const coverageModule = await resolveCoverageProviderModule(options, loader);
3957
- if (coverageModule) return coverageModule.getProvider();
3958
- return null;
3722
+ return coverageModule ? coverageModule.getProvider() : null;
3959
3723
  }
3960
3724
  class BaseCoverageProvider {
3961
3725
  ctx;
@@ -3966,12 +3730,12 @@ class BaseCoverageProvider {
3966
3730
  coverageFiles = /* @__PURE__ */ new Map();
3967
3731
  pendingPromises = [];
3968
3732
  coverageFilesDirectory;
3733
+ roots = [];
3969
3734
  _initialize(ctx) {
3970
- this.ctx = ctx;
3971
- if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3735
+ if (this.ctx = ctx, ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3972
3736
  Running mixed versions is not supported and may lead into bugs
3973
3737
  Update your dependencies and make sure the versions match.`));
3974
- const config = ctx.config.coverage;
3738
+ const config = ctx._coverageOptions;
3975
3739
  this.options = {
3976
3740
  ...coverageConfigDefaults,
3977
3741
  ...config,
@@ -3986,47 +3750,46 @@ Update your dependencies and make sure the versions match.`));
3986
3750
  statements: config.thresholds["100"] ? 100 : config.thresholds.statements
3987
3751
  }
3988
3752
  };
3989
- const shard = this.ctx.config.shard;
3990
- const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3991
- this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory);
3753
+ const shard = this.ctx.config.shard, tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3754
+ // If --project filter is set pick only roots of resolved projects
3755
+ this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory), this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root];
3992
3756
  }
3993
3757
  /**
3994
3758
  * Check if file matches `coverage.include` but not `coverage.exclude`
3995
3759
  */
3996
- isIncluded(_filename) {
3997
- const filename = slash(_filename);
3998
- const cacheHit = this.globCache.get(filename);
3760
+ isIncluded(_filename, root) {
3761
+ const roots = root ? [root] : this.roots, filename = slash(_filename), cacheHit = this.globCache.get(filename);
3999
3762
  if (cacheHit !== void 0) return cacheHit;
4000
3763
  // File outside project root with default allowExternal
4001
- if (this.options.allowExternal === false && !filename.startsWith(this.ctx.config.root)) {
4002
- this.globCache.set(filename, false);
4003
- return false;
4004
- }
4005
- const options = {
4006
- contains: true,
4007
- dot: true,
4008
- cwd: this.ctx.config.root,
4009
- ignore: this.options.exclude
4010
- };
3764
+ if (this.options.allowExternal === false && roots.every((root) => !filename.startsWith(root))) return this.globCache.set(filename, false), false;
4011
3765
  // By default `coverage.include` matches all files, except "coverage.exclude"
4012
3766
  const glob = this.options.include || "**";
4013
- const included = pm.isMatch(filename, glob, options) && existsSync(cleanUrl(filename));
4014
- this.globCache.set(filename, included);
4015
- return included;
3767
+ let included = roots.some((root) => {
3768
+ const options = {
3769
+ contains: true,
3770
+ dot: true,
3771
+ cwd: root,
3772
+ ignore: this.options.exclude
3773
+ };
3774
+ return pm.isMatch(filename, glob, options);
3775
+ });
3776
+ return included &&= existsSync(cleanUrl(filename)), this.globCache.set(filename, included), included;
4016
3777
  }
4017
- async getUntestedFiles(testedFiles) {
4018
- if (this.options.include == null) return [];
4019
- let includedFiles = await glob(this.options.include, {
4020
- cwd: this.ctx.config.root,
3778
+ async getUntestedFilesByRoot(testedFiles, include, root) {
3779
+ let includedFiles = await glob(include, {
3780
+ cwd: root,
4021
3781
  ignore: [...this.options.exclude, ...testedFiles.map((file) => slash(file))],
4022
3782
  absolute: true,
4023
3783
  dot: true,
4024
3784
  onlyFiles: true
4025
3785
  });
4026
- // Run again through picomatch as tinyglobby's exclude pattern is different ({ "exclude": ["math"] } should ignore "src/math.ts")
4027
- includedFiles = includedFiles.filter((file) => this.isIncluded(file));
4028
- if (this.ctx.config.changed) includedFiles = (this.ctx.config.related || []).filter((file) => includedFiles.includes(file));
4029
- return includedFiles.map((file) => slash(path.resolve(this.ctx.config.root, file)));
3786
+ if (includedFiles = includedFiles.filter((file) => this.isIncluded(file, root)), this.ctx.config.changed) includedFiles = (this.ctx.config.related || []).filter((file) => includedFiles.includes(file));
3787
+ return includedFiles.map((file) => slash(path.resolve(root, file)));
3788
+ }
3789
+ async getUntestedFiles(testedFiles) {
3790
+ if (this.options.include == null) return [];
3791
+ const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include), matrix = await Promise.all(this.roots.map(rootMapper));
3792
+ return matrix.flatMap((files) => files);
4030
3793
  }
4031
3794
  createCoverageMap() {
4032
3795
  throw new Error("BaseReporter's createCoverageMap was not overwritten");
@@ -4051,56 +3814,37 @@ Update your dependencies and make sure the versions match.`));
4051
3814
  force: true,
4052
3815
  maxRetries: 10
4053
3816
  });
4054
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
4055
- this.coverageFiles = /* @__PURE__ */ new Map();
4056
- this.pendingPromises = [];
3817
+ await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
4057
3818
  }
4058
- onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
3819
+ onAfterSuiteRun({ coverage, environment, projectName, testFiles }) {
4059
3820
  if (!coverage) return;
4060
- if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") throw new Error(`Invalid transform mode: ${transformMode}`);
4061
3821
  let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
4062
- if (!entry) {
4063
- entry = {
4064
- web: {},
4065
- ssr: {},
4066
- browser: {}
4067
- };
4068
- this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
4069
- }
4070
- const testFilenames = testFiles.join();
4071
- const filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
3822
+ if (!entry) entry = {}, this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
3823
+ const testFilenames = testFiles.join(), filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
4072
3824
  // If there's a result from previous run, overwrite it
4073
- entry[transformMode][testFilenames] = filename;
3825
+ entry[environment] ??= {}, entry[environment][testFilenames] = filename;
4074
3826
  const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
4075
3827
  this.pendingPromises.push(promise);
4076
3828
  }
4077
3829
  async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
4078
3830
  let index = 0;
4079
3831
  const total = this.pendingPromises.length;
4080
- await Promise.all(this.pendingPromises);
4081
- this.pendingPromises = [];
4082
- for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
4083
- const filenames = Object.values(coverageByTestfiles);
4084
- const project = this.ctx.getProjectByName(projectName);
3832
+ await Promise.all(this.pendingPromises), this.pendingPromises = [];
3833
+ for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [environment, coverageByTestfiles] of Object.entries(coveragePerProject)) {
3834
+ const filenames = Object.values(coverageByTestfiles), project = this.ctx.getProjectByName(projectName);
4085
3835
  for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
4086
- if (onDebug.enabled) {
4087
- index += chunk.length;
4088
- onDebug(`Reading coverage results ${index}/${total}`);
4089
- }
3836
+ if (onDebug.enabled) index += chunk.length, onDebug(`Reading coverage results ${index}/${total}`);
4090
3837
  await Promise.all(chunk.map(async (filename) => {
4091
- const contents = await promises$1.readFile(filename, "utf-8");
4092
- const coverage = JSON.parse(contents);
3838
+ const contents = await promises$1.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
4093
3839
  onFileRead(coverage);
4094
3840
  }));
4095
3841
  }
4096
- await onFinished(project, transformMode);
3842
+ await onFinished(project, environment);
4097
3843
  }
4098
3844
  }
4099
3845
  async cleanAfterRun() {
4100
- this.coverageFiles = /* @__PURE__ */ new Map();
4101
- await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
4102
3846
  // Remove empty reports directory, e.g. when only text-reporter is used
4103
- if (readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
3847
+ if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises$1.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
4104
3848
  }
4105
3849
  async onTestFailure() {
4106
3850
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
@@ -4113,11 +3857,9 @@ Update your dependencies and make sure the versions match.`));
4113
3857
  }
4114
3858
  async reportThresholds(coverageMap, allTestsRun) {
4115
3859
  const resolvedThresholds = this.resolveThresholds(coverageMap);
4116
- this.checkThresholds(resolvedThresholds);
4117
- if (this.options.thresholds?.autoUpdate && allTestsRun) {
4118
- if (!this.ctx.server.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
4119
- const configFilePath = this.ctx.server.config.configFile;
4120
- const configModule = await this.parseConfigModule(configFilePath);
3860
+ if (this.checkThresholds(resolvedThresholds), this.options.thresholds?.autoUpdate && allTestsRun) {
3861
+ if (!this.ctx.vite.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
3862
+ const configFilePath = this.ctx.vite.config.configFile, configModule = await this.parseConfigModule(configFilePath);
4121
3863
  await this.updateThresholds({
4122
3864
  thresholds: resolvedThresholds,
4123
3865
  configurationFile: configModule,
@@ -4131,16 +3873,10 @@ Update your dependencies and make sure the versions match.`));
4131
3873
  * for specific files defined by glob pattern or global for all other files.
4132
3874
  */
4133
3875
  resolveThresholds(coverageMap) {
4134
- const resolvedThresholds = [];
4135
- const files = coverageMap.files();
4136
- const globalCoverageMap = this.createCoverageMap();
3876
+ const resolvedThresholds = [], files = coverageMap.files(), globalCoverageMap = this.createCoverageMap();
4137
3877
  for (const key of Object.keys(this.options.thresholds)) {
4138
3878
  if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue;
4139
- const glob = key;
4140
- const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
4141
- const globCoverageMap = this.createCoverageMap();
4142
- const matcher = pm(glob);
4143
- const matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
3879
+ const glob = key, globThresholds = resolveGlobThresholds(this.options.thresholds[glob]), globCoverageMap = this.createCoverageMap(), matcher = pm(glob), matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
4144
3880
  for (const file of matchingFiles) {
4145
3881
  const fileCoverage = coverageMap.fileCoverageFor(file);
4146
3882
  globCoverageMap.addFileCoverage(fileCoverage);
@@ -4156,7 +3892,7 @@ Update your dependencies and make sure the versions match.`));
4156
3892
  const fileCoverage = coverageMap.fileCoverageFor(file);
4157
3893
  globalCoverageMap.addFileCoverage(fileCoverage);
4158
3894
  }
4159
- resolvedThresholds.unshift({
3895
+ return resolvedThresholds.unshift({
4160
3896
  name: GLOBAL_THRESHOLDS_KEY,
4161
3897
  coverageMap: globalCoverageMap,
4162
3898
  thresholds: {
@@ -4165,8 +3901,7 @@ Update your dependencies and make sure the versions match.`));
4165
3901
  lines: this.options.thresholds?.lines,
4166
3902
  statements: this.options.thresholds?.statements
4167
3903
  }
4168
- });
4169
- return resolvedThresholds;
3904
+ }), resolvedThresholds;
4170
3905
  }
4171
3906
  /**
4172
3907
  * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached.
@@ -4204,8 +3939,7 @@ Update your dependencies and make sure the versions match.`));
4204
3939
  this.ctx.logger.error(errorMessage);
4205
3940
  }
4206
3941
  } else {
4207
- const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
4208
- const absoluteThreshold = threshold * -1;
3942
+ const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered, absoluteThreshold = threshold * -1;
4209
3943
  if (uncovered > absoluteThreshold) {
4210
3944
  process.exitCode = 1;
4211
3945
  /**
@@ -4229,8 +3963,7 @@ Update your dependencies and make sure the versions match.`));
4229
3963
  const config = resolveConfig(configurationFile);
4230
3964
  assertConfigurationModule(config);
4231
3965
  for (const { coverageMap, thresholds, name } of allThresholds) {
4232
- const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()];
4233
- const thresholdsToUpdate = [];
3966
+ const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()], thresholdsToUpdate = [];
4234
3967
  for (const key of THRESHOLD_KEYS) {
4235
3968
  const threshold = thresholds[key] ?? 100;
4236
3969
  /**
@@ -4241,8 +3974,7 @@ Update your dependencies and make sure the versions match.`));
4241
3974
  const actual = Math.min(...summaries.map((summary) => summary[key].pct));
4242
3975
  if (actual > threshold) thresholdsToUpdate.push([key, actual]);
4243
3976
  } else {
4244
- const absoluteThreshold = threshold * -1;
4245
- const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
3977
+ const absoluteThreshold = threshold * -1, actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
4246
3978
  if (actual < absoluteThreshold) {
4247
3979
  // If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%)
4248
3980
  const updatedThreshold = actual === 0 ? 100 : actual * -1;
@@ -4252,16 +3984,17 @@ Update your dependencies and make sure the versions match.`));
4252
3984
  }
4253
3985
  if (thresholdsToUpdate.length === 0) continue;
4254
3986
  updatedThresholds = true;
4255
- for (const [threshold, newValue] of thresholdsToUpdate) if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = newValue;
4256
- else {
4257
- const glob = config.test.coverage.thresholds[name];
4258
- glob[threshold] = newValue;
3987
+ const thresholdFormatter = typeof this.options.thresholds?.autoUpdate === "function" ? this.options.thresholds?.autoUpdate : (value) => value;
3988
+ for (const [threshold, newValue] of thresholdsToUpdate) {
3989
+ const formattedValue = thresholdFormatter(newValue);
3990
+ if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = formattedValue;
3991
+ else {
3992
+ const glob = config.test.coverage.thresholds[name];
3993
+ glob[threshold] = formattedValue;
3994
+ }
4259
3995
  }
4260
3996
  }
4261
- if (updatedThresholds) {
4262
- this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds.");
4263
- onUpdate();
4264
- }
3997
+ if (updatedThresholds) this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds."), onUpdate();
4265
3998
  }
4266
3999
  async mergeReports(coverageMaps) {
4267
4000
  const coverageMap = this.createCoverageMap();
@@ -4273,10 +4006,8 @@ Update your dependencies and make sure the versions match.`));
4273
4006
  }
4274
4007
  toSlices(array, size) {
4275
4008
  return array.reduce((chunks, item) => {
4276
- const index = Math.max(0, chunks.length - 1);
4277
- const lastChunk = chunks[index] || [];
4278
- chunks[index] = lastChunk;
4279
- if (lastChunk.length >= size) chunks.push([item]);
4009
+ const index = Math.max(0, chunks.length - 1), lastChunk = chunks[index] || [];
4010
+ if (chunks[index] = lastChunk, lastChunk.length >= size) chunks.push([item]);
4280
4011
  else lastChunk.push(item);
4281
4012
  return chunks;
4282
4013
  }, []);
@@ -4285,23 +4016,25 @@ Update your dependencies and make sure the versions match.`));
4285
4016
  const servers = [...ctx.projects.map((project) => ({
4286
4017
  root: project.config.root,
4287
4018
  isBrowserEnabled: project.isBrowserEnabled(),
4288
- vitenode: project.vitenode
4289
- })), {
4019
+ vite: project.vite
4020
+ })), (
4021
+ // Check core last as it will match all files anyway
4022
+ {
4290
4023
  root: ctx.config.root,
4291
- vitenode: ctx.vitenode,
4024
+ vite: ctx.vite,
4292
4025
  isBrowserEnabled: ctx.getRootProject().isBrowserEnabled()
4293
- }];
4026
+ })];
4294
4027
  return async function transformFile(filename) {
4295
4028
  let lastError;
4296
- for (const { root, vitenode, isBrowserEnabled } of servers) {
4029
+ for (const { root, vite, isBrowserEnabled } of servers) {
4297
4030
  // On Windows root doesn't start with "/" while filenames do
4298
4031
  if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue;
4299
4032
  if (isBrowserEnabled) {
4300
- const result = await vitenode.transformRequest(filename, void 0, "web").catch(() => null);
4033
+ const result = await vite.environments.client.transformRequest(filename).catch(() => null);
4301
4034
  if (result) return result;
4302
4035
  }
4303
4036
  try {
4304
- return await vitenode.transformRequest(filename);
4037
+ return await vite.environments.ssr.transformRequest(filename);
4305
4038
  } catch (error) {
4306
4039
  lastError = error;
4307
4040
  }
@@ -4315,14 +4048,12 @@ Update your dependencies and make sure the versions match.`));
4315
4048
  * Narrow down `unknown` glob thresholds to resolved ones
4316
4049
  */
4317
4050
  function resolveGlobThresholds(thresholds) {
4318
- if (!thresholds || typeof thresholds !== "object") return {};
4319
- if (100 in thresholds && thresholds[100] === true) return {
4051
+ return !thresholds || typeof thresholds !== "object" ? {} : 100 in thresholds && thresholds[100] === true ? {
4320
4052
  lines: 100,
4321
4053
  branches: 100,
4322
4054
  functions: 100,
4323
4055
  statements: 100
4324
- };
4325
- return {
4056
+ } : {
4326
4057
  lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0,
4327
4058
  branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0,
4328
4059
  functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0,
@@ -4348,8 +4079,7 @@ function resolveConfig(configModule) {
4348
4079
  if (config) return config;
4349
4080
  // "export default mergeConfig(..., defineConfig(...))"
4350
4081
  if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
4351
- config = resolveMergeConfig(mod);
4352
- if (config) return config;
4082
+ if (config = resolveMergeConfig(mod), config) return config;
4353
4083
  }
4354
4084
  } catch (error) {
4355
4085
  // Reduce magicast's verbose errors to readable ones
@@ -4378,4 +4108,4 @@ function resolveMergeConfig(mod) {
4378
4108
  }
4379
4109
  }
4380
4110
 
4381
- export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, isBrowserEnabled as d, groupBy as e, getCoverageProvider as f, getFilePoolName as g, hash as h, isPackageExists as i, createPool as j, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
4111
+ export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, createFetchModuleFunction as d, isBrowserEnabled as e, groupBy as f, getFilePoolName as g, hash as h, isPackageExists as i, getCoverageProvider as j, createPool as k, normalizeResolvedIdToUrl as n, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };