vitest 3.2.4 → 4.0.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/LICENSE.md +2 -2
  2. package/dist/browser.d.ts +13 -16
  3. package/dist/browser.js +6 -5
  4. package/dist/chunks/base.Cjha6usc.js +129 -0
  5. package/dist/chunks/{benchmark.CYdenmiT.js → benchmark.CJUa-Hsa.js} +6 -8
  6. package/dist/chunks/{benchmark.d.BwvBVTda.d.ts → benchmark.d.DAaHLpsq.d.ts} +4 -4
  7. package/dist/chunks/browser.d.yFAklsD1.d.ts +18 -0
  8. package/dist/chunks/{cac.Cb-PYCCB.js → cac.DCxo_nSu.js} +72 -163
  9. package/dist/chunks/{cli-api.BkDphVBG.js → cli-api.BJJXh9BV.js} +1331 -1678
  10. package/dist/chunks/{config.d.D2ROskhv.d.ts → config.d.B_LthbQq.d.ts} +59 -65
  11. package/dist/chunks/{console.CtFJOzRO.js → console.7h5kHUIf.js} +34 -70
  12. package/dist/chunks/{constants.DnKduX2e.js → constants.D_Q9UYh-.js} +1 -9
  13. package/dist/chunks/{coverage.DL5VHqXY.js → coverage.BCU-r2QL.js} +538 -765
  14. package/dist/chunks/{coverage.DVF1vEu8.js → coverage.D_JHT54q.js} +2 -2
  15. package/dist/chunks/{coverage.d.S9RMNXIe.d.ts → coverage.d.BZtK59WP.d.ts} +10 -8
  16. package/dist/chunks/{creator.GK6I-cL4.js → creator.08Gi-vCA.js} +93 -77
  17. package/dist/chunks/{date.Bq6ZW5rf.js → date.-jtEtIeV.js} +6 -17
  18. package/dist/chunks/{defaults.B7q_naMc.js → defaults.CXFFjsi8.js} +2 -42
  19. package/dist/chunks/environment.d.BsToaxti.d.ts +65 -0
  20. package/dist/chunks/{git.BVQ8w_Sw.js → git.BFNcloKD.js} +1 -2
  21. package/dist/chunks/{global.d.MAmajcmJ.d.ts → global.d.BK3X7FW1.d.ts} +7 -32
  22. package/dist/chunks/{globals.DEHgCU4V.js → globals.DG-S3xFe.js} +8 -8
  23. package/dist/chunks/{index.VByaPkjc.js → index.BIP7prJq.js} +472 -803
  24. package/dist/chunks/{index.B521nVV-.js → index.Bgo3tNWt.js} +23 -4
  25. package/dist/chunks/{index.BCWujgDG.js → index.BjKEiSn0.js} +14 -24
  26. package/dist/chunks/{index.CdQS2e2Q.js → index.CMfqw92x.js} +7 -8
  27. package/dist/chunks/{index.CmSc2RE5.js → index.DIWhzsUh.js} +72 -118
  28. package/dist/chunks/{inspector.C914Efll.js → inspector.CvQD-Nie.js} +10 -25
  29. package/dist/chunks/moduleRunner.d.D9nBoC4p.d.ts +201 -0
  30. package/dist/chunks/moduleTransport.I-bgQy0S.js +19 -0
  31. package/dist/chunks/{node.fjCdwEIl.js → node.CyipiPvJ.js} +1 -1
  32. package/dist/chunks/plugin.d.BMVSnsGV.d.ts +9 -0
  33. package/dist/chunks/{reporters.d.BFLkQcL6.d.ts → reporters.d.BUWjmRYq.d.ts} +2086 -2146
  34. package/dist/chunks/resolveSnapshotEnvironment.Bkht6Yor.js +81 -0
  35. package/dist/chunks/resolver.Bx6lE0iq.js +119 -0
  36. package/dist/chunks/rpc.BKr6mtxz.js +65 -0
  37. package/dist/chunks/{setup-common.Dd054P77.js → setup-common.uiMcU3cv.js} +17 -29
  38. package/dist/chunks/startModuleRunner.p67gbNo9.js +665 -0
  39. package/dist/chunks/{suite.d.FvehnV49.d.ts → suite.d.BJWk38HB.d.ts} +1 -1
  40. package/dist/chunks/test.BiqSKISg.js +214 -0
  41. package/dist/chunks/{typechecker.DRKU1-1g.js → typechecker.DB-fIMaH.js} +165 -234
  42. package/dist/chunks/{utils.CAioKnHs.js → utils.C2YI6McM.js} +5 -14
  43. package/dist/chunks/{utils.XdZDrNZV.js → utils.D2R2NiOH.js} +8 -27
  44. package/dist/chunks/{vi.bdSIJ99Y.js → vi.ZPgvtBao.js} +156 -305
  45. package/dist/chunks/{vm.BThCzidc.js → vm.Ca0Y0W5f.js} +116 -226
  46. package/dist/chunks/{worker.d.1GmBbd7G.d.ts → worker.d.BDsXGkwh.d.ts} +31 -32
  47. package/dist/chunks/{worker.d.CKwWzBSj.d.ts → worker.d.BNcX_2mH.d.ts} +1 -1
  48. package/dist/cli.js +10 -10
  49. package/dist/config.cjs +5 -58
  50. package/dist/config.d.ts +72 -71
  51. package/dist/config.js +3 -9
  52. package/dist/coverage.d.ts +31 -24
  53. package/dist/coverage.js +9 -9
  54. package/dist/environments.d.ts +9 -14
  55. package/dist/environments.js +1 -1
  56. package/dist/index.d.ts +52 -213
  57. package/dist/index.js +7 -9
  58. package/dist/module-evaluator.d.ts +13 -0
  59. package/dist/module-evaluator.js +276 -0
  60. package/dist/module-runner.js +15 -0
  61. package/dist/node.d.ts +62 -51
  62. package/dist/node.js +26 -42
  63. package/dist/reporters.d.ts +11 -12
  64. package/dist/reporters.js +12 -12
  65. package/dist/runners.d.ts +3 -4
  66. package/dist/runners.js +13 -231
  67. package/dist/snapshot.js +2 -2
  68. package/dist/suite.d.ts +2 -2
  69. package/dist/suite.js +2 -2
  70. package/dist/worker.js +90 -47
  71. package/dist/workers/forks.js +34 -10
  72. package/dist/workers/runVmTests.js +36 -56
  73. package/dist/workers/threads.js +34 -10
  74. package/dist/workers/vmForks.js +11 -10
  75. package/dist/workers/vmThreads.js +11 -10
  76. package/dist/workers.d.ts +5 -7
  77. package/dist/workers.js +35 -17
  78. package/globals.d.ts +17 -17
  79. package/package.json +32 -31
  80. package/dist/chunks/base.DfmxU-tU.js +0 -38
  81. package/dist/chunks/environment.d.cL3nLXbE.d.ts +0 -119
  82. package/dist/chunks/execute.B7h3T_Hc.js +0 -708
  83. package/dist/chunks/index.CwejwG0H.js +0 -105
  84. package/dist/chunks/rpc.-pEldfrD.js +0 -83
  85. package/dist/chunks/runBaseTests.9Ij9_de-.js +0 -129
  86. package/dist/chunks/vite.d.CMLlLIFP.d.ts +0 -25
  87. package/dist/execute.d.ts +0 -150
  88. package/dist/execute.js +0 -13
@@ -1,41 +1,39 @@
1
1
  import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
+ import path, { win32, dirname, join, resolve } from 'node:path';
3
+ import { isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, createDefer, slash, shuffle, toArray } from '@vitest/utils';
2
4
  import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
3
5
  import pm from 'picomatch';
6
+ import { glob } from 'tinyglobby';
4
7
  import c from 'tinyrainbow';
5
- import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.B7q_naMc.js';
8
+ import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CXFFjsi8.js';
6
9
  import crypto from 'node:crypto';
7
- import { createDefer, shuffle, toArray } from '@vitest/utils';
8
10
  import { builtinModules, createRequire } from 'node:module';
9
- import path, { win32, dirname, join, resolve } from 'node:path';
10
11
  import process$1 from 'node:process';
11
12
  import fs$1, { writeFile, rename, stat, unlink } from 'node:fs/promises';
12
13
  import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL as URL$1 } from 'node:url';
13
14
  import assert from 'node:assert';
14
15
  import v8 from 'node:v8';
15
16
  import { format, inspect } from 'node:util';
16
- import { version, mergeConfig } from 'vite';
17
- import { e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.DnKduX2e.js';
17
+ import { fetchModule, version, mergeConfig } from 'vite';
18
+ import { c as configFiles, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.D_Q9UYh-.js';
18
19
  import { a as isWindows } from './env.D4Lgay0q.js';
19
20
  import * as nodeos from 'node:os';
20
- import nodeos__default from 'node:os';
21
+ import nodeos__default, { tmpdir } from 'node:os';
21
22
  import { isatty } from 'node:tty';
22
23
  import EventEmitter from 'node:events';
23
- import { c as createBirpc } from './index.B521nVV-.js';
24
+ import { c as createBirpc } from './index.Bgo3tNWt.js';
24
25
  import Tinypool$1, { Tinypool } from 'tinypool';
25
- import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.DRKU1-1g.js';
26
+ import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.DB-fIMaH.js';
26
27
  import { MessageChannel } from 'node:worker_threads';
27
28
  import { hasFailed } from '@vitest/runner/utils';
28
29
  import { rootDir } from '../path.js';
29
- import { slash } from 'vite-node/utils';
30
30
  import { isCI, provider } from 'std-env';
31
- import { r as resolveCoverageProviderModule } from './coverage.DVF1vEu8.js';
31
+ import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
32
32
 
33
33
  function groupBy(collection, iteratee) {
34
34
  return collection.reduce((acc, item) => {
35
35
  const key = iteratee(item);
36
- acc[key] ||= [];
37
- acc[key].push(item);
38
- return acc;
36
+ return acc[key] ||= [], acc[key].push(item), acc;
39
37
  }, {});
40
38
  }
41
39
  function stdout() {
@@ -2016,7 +2014,7 @@ function normalizeid(id) {
2016
2014
  if (typeof id !== "string") {
2017
2015
  id = id.toString();
2018
2016
  }
2019
- if (/(node|data|http|https|file):/.test(id)) {
2017
+ if (/(?:node|data|http|https|file):/.test(id)) {
2020
2018
  return id;
2021
2019
  }
2022
2020
  if (BUILTIN_MODULES.has(id)) {
@@ -2050,7 +2048,7 @@ function _resolve$1(id, options = {}) {
2050
2048
  throw new TypeError("input must be a `string` or `URL`");
2051
2049
  }
2052
2050
  }
2053
- if (/(node|data|http|https):/.test(id)) {
2051
+ if (/(?:node|data|http|https):/.test(id)) {
2054
2052
  return id;
2055
2053
  }
2056
2054
  if (BUILTIN_MODULES.has(id)) {
@@ -2384,8 +2382,7 @@ const isPackageListed = quansync(function* (name, cwd) {
2384
2382
  isPackageListed.sync;
2385
2383
 
2386
2384
  function getWorkersCountByPercentage(percent) {
2387
- const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length;
2388
- const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2385
+ const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length, workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2389
2386
  return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2390
2387
  }
2391
2388
 
@@ -2395,35 +2392,19 @@ const envsOrder = [
2395
2392
  "happy-dom",
2396
2393
  "edge-runtime"
2397
2394
  ];
2398
- function getTransformMode(patterns, filename) {
2399
- if (patterns.web && pm.isMatch(filename, patterns.web)) return "web";
2400
- if (patterns.ssr && pm.isMatch(filename, patterns.ssr)) return "ssr";
2401
- return void 0;
2402
- }
2403
2395
  async function groupFilesByEnv(files) {
2404
2396
  const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
2405
2397
  const code = await promises$1.readFile(filepath, "utf-8");
2406
2398
  // 1. Check for control comments in the file
2407
2399
  let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
2408
- // 2. Check for globals
2409
- if (!env) {
2410
- for (const [glob, target] of project.config.environmentMatchGlobs || []) if (pm.isMatch(filepath, glob, { cwd: project.config.root })) {
2411
- env = target;
2412
- break;
2413
- }
2414
- }
2415
- // 3. Fallback to global env
2400
+ // 2. Fallback to global env
2416
2401
  env ||= project.config.environment || "node";
2417
- const transformMode = getTransformMode(project.config.testTransformMode, filepath);
2418
2402
  let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
2419
2403
  if (envOptionsJson?.endsWith("*/"))
2420
2404
  // Trim closing Docblock characters the above regex might have captured
2421
2405
  envOptionsJson = envOptionsJson.slice(0, -2);
2422
- const envOptions = JSON.parse(envOptionsJson || "null");
2423
- const envKey = env === "happy-dom" ? "happyDOM" : env;
2424
- const environment = {
2406
+ const envOptions = JSON.parse(envOptionsJson || "null"), envKey = env === "happy-dom" ? "happyDOM" : env, environment = {
2425
2407
  name: env,
2426
- transformMode,
2427
2408
  options: envOptions ? { [envKey]: envOptions } : null
2428
2409
  };
2429
2410
  return {
@@ -2438,53 +2419,198 @@ async function groupFilesByEnv(files) {
2438
2419
  return groupBy(filesWithEnv, ({ environment }) => environment.name);
2439
2420
  }
2440
2421
 
2441
- const created = /* @__PURE__ */ new Set();
2442
- const promises = /* @__PURE__ */ new Map();
2422
+ const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
2423
+ function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join$1(tmpdir(), nanoid())) {
2424
+ const cachedFsResults = /* @__PURE__ */ new Map();
2425
+ return async (url, importer, environment, options) => {
2426
+ // We are copy pasting Vite's externalization logic from `fetchModule` because
2427
+ // we instead rely on our own `shouldExternalize` method because Vite
2428
+ // doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
2429
+ if (url.startsWith("data:")) return {
2430
+ externalize: url,
2431
+ type: "builtin"
2432
+ };
2433
+ if (url === "/@vite/client" || url === "@vite/client")
2434
+ // this will be stubbed
2435
+ return {
2436
+ externalize: "/@vite/client",
2437
+ type: "module"
2438
+ };
2439
+ const isFileUrl = url.startsWith("file://");
2440
+ if (isExternalUrl(url) && !isFileUrl) return {
2441
+ externalize: url,
2442
+ type: "network"
2443
+ };
2444
+ // Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
2445
+ // so we do this first to resolve the module and check its `id`. The next call of
2446
+ // `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
2447
+ // This also makes it so externalized modules are inside the module graph.
2448
+ const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
2449
+ // if url is already cached, we can just confirm it's also cached on the server
2450
+ if (options?.cached && cached) return { cache: true };
2451
+ if (moduleGraphModule.id) {
2452
+ const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
2453
+ if (externalize) return {
2454
+ externalize,
2455
+ type: "module"
2456
+ };
2457
+ }
2458
+ const moduleRunnerModule = await fetchModule(environment, url, importer, {
2459
+ ...options,
2460
+ inlineSourceMap: false
2461
+ }).catch(handleRollupError), result = processResultSource(environment, moduleRunnerModule);
2462
+ if (!cacheFs || !("code" in result)) return result;
2463
+ const code = result.code;
2464
+ // to avoid serialising large chunks of code,
2465
+ // we store them in a tmp file and read in the test thread
2466
+ if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
2467
+ const dir = join$1(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join$1(dir, name);
2468
+ if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
2469
+ return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
2470
+ };
2471
+ }
2472
+ let SOURCEMAPPING_URL = "sourceMa";
2473
+ SOURCEMAPPING_URL += "ppingURL";
2474
+ const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
2475
+ function processResultSource(environment, result) {
2476
+ if (!("code" in result)) return result;
2477
+ const node = environment.moduleGraph.getModuleById(result.id);
2478
+ if (node?.transformResult)
2479
+ // this also overrides node.transformResult.code which is also what the module
2480
+ // runner does under the hood by default (we disable source maps inlining)
2481
+ inlineSourceMap(node.transformResult);
2482
+ return {
2483
+ ...result,
2484
+ code: node?.transformResult?.code || result.code
2485
+ };
2486
+ }
2487
+ const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
2488
+ // we have to inline the source map ourselves, because
2489
+ // - we don't need //# sourceURL since we are running code in VM
2490
+ // - important in stack traces and the V8 coverage
2491
+ // - we need to inject an empty line for --inspect-brk
2492
+ function inlineSourceMap(result) {
2493
+ const map = result.map;
2494
+ let code = result.code;
2495
+ if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
2496
+ if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
2497
+ const sourceMap = { ...map };
2498
+ // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2499
+ // so that debuggers can be set to break on first line
2500
+ if (sourceMap.mappings.startsWith(";")) sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2501
+ return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2502
+ }
2503
+ function genSourceMapUrl(map) {
2504
+ if (typeof map !== "string") map = JSON.stringify(map);
2505
+ return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
2506
+ }
2507
+ function getCachedResult(result, cachedFsResults) {
2508
+ const tmp = cachedFsResults.get(result.id);
2509
+ if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
2510
+ return {
2511
+ cached: true,
2512
+ file: result.file,
2513
+ id: result.id,
2514
+ tmp,
2515
+ url: result.url,
2516
+ invalidate: result.invalidate
2517
+ };
2518
+ }
2519
+ // serialize rollup error on server to preserve details as a test error
2520
+ function handleRollupError(e) {
2521
+ throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
2522
+ name: e.name,
2523
+ message: e.message,
2524
+ stack: e.stack,
2525
+ cause: e.cause,
2526
+ __vitest_rollup_error__: {
2527
+ plugin: e.plugin,
2528
+ id: e.id,
2529
+ loc: e.loc,
2530
+ frame: e.frame
2531
+ }
2532
+ } : e;
2533
+ }
2534
+ /**
2535
+ * Performs an atomic write operation using the write-then-rename pattern.
2536
+ *
2537
+ * Why we need this:
2538
+ * - Ensures file integrity by never leaving partially written files on disk
2539
+ * - Prevents other processes from reading incomplete data during writes
2540
+ * - Particularly important for test files where incomplete writes could cause test failures
2541
+ *
2542
+ * The implementation writes to a temporary file first, then renames it to the target path.
2543
+ * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2544
+ * guaranteeing that other processes will only ever see the complete file.
2545
+ *
2546
+ * Added in https://github.com/vitest-dev/vitest/pull/7531
2547
+ */
2548
+ async function atomicWriteFile(realFilePath, data) {
2549
+ const dir = dirname$1(realFilePath), tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2550
+ try {
2551
+ await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
2552
+ } finally {
2553
+ try {
2554
+ if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2555
+ } catch {}
2556
+ }
2557
+ }
2558
+
2559
+ // this is copy pasted from vite
2560
+ function normalizeResolvedIdToUrl(environment, resolvedId) {
2561
+ const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
2562
+ let url;
2563
+ // normalize all imports into resolved URLs
2564
+ // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
2565
+ if (resolvedId.startsWith(withTrailingSlash$1(root)))
2566
+ // in root: infer short absolute path from root
2567
+ url = resolvedId.slice(root.length);
2568
+ else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
2569
+ // an optimized deps may not yet exists in the filesystem, or
2570
+ // a regular file exists but is out of root: rewrite to absolute /@fs/ paths
2571
+ url = path.posix.join("/@fs/", resolvedId);
2572
+ else url = resolvedId;
2573
+ // if the resolved id is not a valid browser import specifier,
2574
+ // prefix it to make it valid. We will strip this before feeding it
2575
+ // back into the transform pipeline
2576
+ if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
2577
+ return url;
2578
+ }
2579
+
2443
2580
  function createMethodsRPC(project, options = {}) {
2444
- const ctx = project.vitest;
2445
- const cacheFs = options.cacheFs ?? false;
2581
+ const ctx = project.vitest, cacheFs = options.cacheFs ?? false, fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir);
2446
2582
  return {
2583
+ async fetch(url, importer, environmentName, options) {
2584
+ const environment = project.vite.environments[environmentName];
2585
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2586
+ const start = performance.now();
2587
+ try {
2588
+ return await fetch(url, importer, environment, options);
2589
+ } finally {
2590
+ project.vitest.state.transformTime += performance.now() - start;
2591
+ }
2592
+ },
2593
+ async resolve(id, importer, environmentName) {
2594
+ const environment = project.vite.environments[environmentName];
2595
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2596
+ const resolved = await environment.pluginContainer.resolveId(id, importer);
2597
+ return resolved ? {
2598
+ file: cleanUrl(resolved.id),
2599
+ url: normalizeResolvedIdToUrl(environment, resolved.id),
2600
+ id: resolved.id
2601
+ } : null;
2602
+ },
2447
2603
  snapshotSaved(snapshot) {
2448
2604
  ctx.snapshot.add(snapshot);
2449
2605
  },
2450
2606
  resolveSnapshotPath(testPath) {
2451
2607
  return ctx.snapshot.resolvePath(testPath, { config: project.serializedConfig });
2452
2608
  },
2453
- async getSourceMap(id, force) {
2454
- if (force) {
2455
- const mod = project.vite.moduleGraph.getModuleById(id);
2456
- if (mod) project.vite.moduleGraph.invalidateModule(mod);
2457
- }
2458
- const r = await project.vitenode.transformRequest(id);
2459
- return r?.map;
2460
- },
2461
- async fetch(id, transformMode) {
2462
- const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
2463
- const code = result.code;
2464
- if (!cacheFs || result.externalize) return result;
2465
- if ("id" in result && typeof result.id === "string") return { id: result.id };
2466
- if (code == null) throw new Error(`Failed to fetch module ${id}`);
2467
- const dir = join$1(project.tmpDir, transformMode);
2468
- const name = hash("sha1", id, "hex");
2469
- const tmp = join$1(dir, name);
2470
- if (!created.has(dir)) {
2471
- mkdirSync(dir, { recursive: true });
2472
- created.add(dir);
2473
- }
2474
- if (promises.has(tmp)) {
2475
- await promises.get(tmp);
2476
- return { id: tmp };
2477
- }
2478
- promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp)));
2479
- await promises.get(tmp);
2480
- Object.assign(result, { id: tmp });
2481
- return { id: tmp };
2482
- },
2483
- resolveId(id, importer, transformMode) {
2484
- return project.vitenode.resolveId(id, importer, transformMode).catch(handleRollupError);
2485
- },
2486
- transform(id, environment) {
2487
- return project.vitenode.transformModule(id, environment).catch(handleRollupError);
2609
+ async transform(id) {
2610
+ const environment = project.vite.environments.__vitest_vm__;
2611
+ if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2612
+ const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id)), result = await environment.transformRequest(url).catch(handleRollupError);
2613
+ return { code: result?.code };
2488
2614
  },
2489
2615
  async onQueued(file) {
2490
2616
  if (options.collect) ctx.state.collectFiles(project, [file]);
@@ -2519,63 +2645,12 @@ function createMethodsRPC(project, options = {}) {
2519
2645
  }
2520
2646
  };
2521
2647
  }
2522
- // serialize rollup error on server to preserve details as a test error
2523
- function handleRollupError(e) {
2524
- if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e))
2525
- // eslint-disable-next-line no-throw-literal
2526
- throw {
2527
- name: e.name,
2528
- message: e.message,
2529
- stack: e.stack,
2530
- cause: e.cause,
2531
- __vitest_rollup_error__: {
2532
- plugin: e.plugin,
2533
- id: e.id,
2534
- loc: e.loc,
2535
- frame: e.frame
2536
- }
2537
- };
2538
- throw e;
2539
- }
2540
- /**
2541
- * Performs an atomic write operation using the write-then-rename pattern.
2542
- *
2543
- * Why we need this:
2544
- * - Ensures file integrity by never leaving partially written files on disk
2545
- * - Prevents other processes from reading incomplete data during writes
2546
- * - Particularly important for test files where incomplete writes could cause test failures
2547
- *
2548
- * The implementation writes to a temporary file first, then renames it to the target path.
2549
- * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2550
- * guaranteeing that other processes will only ever see the complete file.
2551
- *
2552
- * Added in https://github.com/vitest-dev/vitest/pull/7531
2553
- */
2554
- async function atomicWriteFile(realFilePath, data) {
2555
- const dir = dirname$1(realFilePath);
2556
- const tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2557
- try {
2558
- await writeFile(tmpFilePath, data, "utf-8");
2559
- await rename(tmpFilePath, realFilePath);
2560
- } finally {
2561
- try {
2562
- if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2563
- } catch {}
2564
- }
2565
- }
2566
2648
 
2567
2649
  function createChildProcessChannel$1(project, collect = false) {
2568
- const emitter = new EventEmitter();
2569
- const events = {
2650
+ const emitter = new EventEmitter(), events = {
2570
2651
  message: "message",
2571
2652
  response: "response"
2572
- };
2573
- const channel = {
2574
- onMessage: (callback) => emitter.on(events.message, callback),
2575
- postMessage: (message) => emitter.emit(events.response, message),
2576
- onClose: () => emitter.removeAllListeners()
2577
- };
2578
- const rpc = createBirpc(createMethodsRPC(project, {
2653
+ }, rpc = createBirpc(createMethodsRPC(project, {
2579
2654
  cacheFs: true,
2580
2655
  collect
2581
2656
  }), {
@@ -2598,21 +2673,20 @@ function createChildProcessChannel$1(project, collect = false) {
2598
2673
  on(fn) {
2599
2674
  emitter.on(events.response, fn);
2600
2675
  },
2601
- onTimeoutError(functionName) {
2602
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2603
- }
2676
+ timeout: -1
2604
2677
  });
2605
2678
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2679
+ const channel = {
2680
+ onMessage: (callback) => emitter.on(events.message, callback),
2681
+ postMessage: (message) => emitter.emit(events.response, message),
2682
+ onClose: () => {
2683
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2684
+ }
2685
+ };
2606
2686
  return channel;
2607
2687
  }
2608
- function createForksPool(vitest, { execArgv, env }) {
2609
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2610
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2611
- const poolOptions = vitest.config.poolOptions?.forks ?? {};
2612
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
2613
- const minThreads = poolOptions.minForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2614
- const worker = resolve(vitest.distPath, "workers/forks.js");
2615
- const options = {
2688
+ function createForksPool(vitest, { execArgv, env }, specifications) {
2689
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/forks.js"), options = {
2616
2690
  runtime: "child_process",
2617
2691
  filename: resolve(vitest.distPath, "worker.js"),
2618
2692
  teardown: "teardown",
@@ -2622,22 +2696,15 @@ function createForksPool(vitest, { execArgv, env }) {
2622
2696
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2623
2697
  terminateTimeout: vitest.config.teardownTimeout,
2624
2698
  concurrentTasksPerWorker: 1
2625
- };
2626
- const isolated = poolOptions.isolate ?? true;
2699
+ }, isolated = poolOptions.isolate ?? true;
2627
2700
  if (isolated) options.isolateWorkers = true;
2628
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
2629
- options.maxThreads = 1;
2630
- options.minThreads = 1;
2631
- }
2632
- const pool = new Tinypool(options);
2633
- const runWithFiles = (name) => {
2701
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2702
+ const pool = new Tinypool(options), runWithFiles = (name) => {
2634
2703
  let id = 0;
2635
2704
  async function runFiles(project, config, files, environment, invalidates = []) {
2636
2705
  const paths = files.map((f) => f.filepath);
2637
2706
  vitest.state.clearFiles(project, paths);
2638
- const channel = createChildProcessChannel$1(project, name === "collect");
2639
- const workerId = ++id;
2640
- const data = {
2707
+ const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2641
2708
  pool: "forks",
2642
2709
  worker,
2643
2710
  config,
@@ -2663,40 +2730,25 @@ function createForksPool(vitest, { execArgv, env }) {
2663
2730
  return async (specs, invalidates) => {
2664
2731
  // Cancel pending tasks from pool when possible
2665
2732
  vitest.onCancel(() => pool.cancelPendingTasks());
2666
- const configs = /* @__PURE__ */ new WeakMap();
2667
- const getConfig = (project) => {
2733
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2668
2734
  if (configs.has(project)) return configs.get(project);
2669
- const _config = project.getSerializableConfig();
2670
- const config = wrapSerializableConfig(_config);
2671
- configs.set(project, config);
2672
- return config;
2673
- };
2674
- const singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork);
2675
- const multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2735
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
2736
+ return configs.set(project, config), config;
2737
+ }, singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork), multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2676
2738
  if (multipleForks.length) {
2677
- const filesByEnv = await groupFilesByEnv(multipleForks);
2678
- const files = Object.values(filesByEnv).flat();
2679
- const results = [];
2739
+ const filesByEnv = await groupFilesByEnv(multipleForks), files = Object.values(filesByEnv).flat(), results = [];
2680
2740
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2681
2741
  else {
2682
2742
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2683
2743
  // Tasks are still running parallel but environments are isolated between tasks.
2684
2744
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2685
- for (const group of Object.values(grouped)) {
2686
- // Push all files to pool's queue
2687
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2688
- // Once all tasks are running or finished, recycle worker for isolation.
2689
- // On-going workers will run in the previous environment.
2690
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2691
- await pool.recycleWorkers();
2692
- }
2745
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2693
2746
  }
2694
2747
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2695
2748
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2696
2749
  }
2697
2750
  if (singleFork.length) {
2698
- const filesByEnv = await groupFilesByEnv(singleFork);
2699
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2751
+ const filesByEnv = await groupFilesByEnv(singleFork), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2700
2752
  for (const env of envs) {
2701
2753
  const files = filesByEnv[env];
2702
2754
  if (!files?.length) continue;
@@ -2720,10 +2772,7 @@ function createForksPool(vitest, { execArgv, env }) {
2720
2772
  }
2721
2773
 
2722
2774
  function createWorkerChannel$1(project, collect) {
2723
- const channel = new MessageChannel();
2724
- const port = channel.port2;
2725
- const workerPort = channel.port1;
2726
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
2775
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
2727
2776
  eventNames: ["onCancel"],
2728
2777
  post(v) {
2729
2778
  port.postMessage(v);
@@ -2731,24 +2780,20 @@ function createWorkerChannel$1(project, collect) {
2731
2780
  on(fn) {
2732
2781
  port.on("message", fn);
2733
2782
  },
2734
- onTimeoutError(functionName) {
2735
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2736
- }
2783
+ timeout: -1
2737
2784
  });
2738
2785
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2786
+ const onClose = () => {
2787
+ port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2788
+ };
2739
2789
  return {
2740
2790
  workerPort,
2741
- port
2791
+ port,
2792
+ onClose
2742
2793
  };
2743
2794
  }
2744
- function createThreadsPool(vitest, { execArgv, env }) {
2745
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2746
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2747
- const poolOptions = vitest.config.poolOptions?.threads ?? {};
2748
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
2749
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2750
- const worker = resolve(vitest.distPath, "workers/threads.js");
2751
- const options = {
2795
+ function createThreadsPool(vitest, { execArgv, env }, specifications) {
2796
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/threads.js"), options = {
2752
2797
  filename: resolve(vitest.distPath, "worker.js"),
2753
2798
  teardown: "teardown",
2754
2799
  useAtomics: poolOptions.useAtomics ?? false,
@@ -2758,26 +2803,15 @@ function createThreadsPool(vitest, { execArgv, env }) {
2758
2803
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2759
2804
  terminateTimeout: vitest.config.teardownTimeout,
2760
2805
  concurrentTasksPerWorker: 1
2761
- };
2762
- const isolated = poolOptions.isolate ?? true;
2806
+ }, isolated = poolOptions.isolate ?? true;
2763
2807
  if (isolated) options.isolateWorkers = true;
2764
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
2765
- options.maxThreads = 1;
2766
- options.minThreads = 1;
2767
- }
2768
- const pool = new Tinypool$1(options);
2769
- const runWithFiles = (name) => {
2808
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2809
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
2770
2810
  let id = 0;
2771
2811
  async function runFiles(project, config, files, environment, invalidates = []) {
2772
2812
  const paths = files.map((f) => f.filepath);
2773
2813
  vitest.state.clearFiles(project, paths);
2774
- const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
2775
- const onClose = () => {
2776
- port.close();
2777
- workerPort.close();
2778
- };
2779
- const workerId = ++id;
2780
- const data = {
2814
+ const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
2781
2815
  pool: "threads",
2782
2816
  worker,
2783
2817
  port: workerPort,
@@ -2805,39 +2839,25 @@ function createThreadsPool(vitest, { execArgv, env }) {
2805
2839
  return async (specs, invalidates) => {
2806
2840
  // Cancel pending tasks from pool when possible
2807
2841
  vitest.onCancel(() => pool.cancelPendingTasks());
2808
- const configs = /* @__PURE__ */ new WeakMap();
2809
- const getConfig = (project) => {
2842
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2810
2843
  if (configs.has(project)) return configs.get(project);
2811
2844
  const config = project.serializedConfig;
2812
- configs.set(project, config);
2813
- return config;
2814
- };
2815
- const singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread);
2816
- const multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2845
+ return configs.set(project, config), config;
2846
+ }, singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread), multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2817
2847
  if (multipleThreads.length) {
2818
- const filesByEnv = await groupFilesByEnv(multipleThreads);
2819
- const files = Object.values(filesByEnv).flat();
2820
- const results = [];
2848
+ const filesByEnv = await groupFilesByEnv(multipleThreads), files = Object.values(filesByEnv).flat(), results = [];
2821
2849
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2822
2850
  else {
2823
2851
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2824
2852
  // Tasks are still running parallel but environments are isolated between tasks.
2825
2853
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2826
- for (const group of Object.values(grouped)) {
2827
- // Push all files to pool's queue
2828
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2829
- // Once all tasks are running or finished, recycle worker for isolation.
2830
- // On-going workers will run in the previous environment.
2831
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2832
- await pool.recycleWorkers();
2833
- }
2854
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2834
2855
  }
2835
2856
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2836
2857
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2837
2858
  }
2838
2859
  if (singleThreads.length) {
2839
- const filesByEnv = await groupFilesByEnv(singleThreads);
2840
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2860
+ const filesByEnv = await groupFilesByEnv(singleThreads), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2841
2861
  for (const env of envs) {
2842
2862
  const files = filesByEnv[env];
2843
2863
  if (!files?.length) continue;
@@ -2861,102 +2881,69 @@ function createThreadsPool(vitest, { execArgv, env }) {
2861
2881
  }
2862
2882
 
2863
2883
  function createTypecheckPool(vitest) {
2864
- const promisesMap = /* @__PURE__ */ new WeakMap();
2865
- const rerunTriggered = /* @__PURE__ */ new WeakSet();
2884
+ const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
2866
2885
  async function onParseEnd(project, { files, sourceErrors }) {
2867
- const checker = project.typechecker;
2868
- const { packs, events } = checker.getTestPacksAndEvents();
2869
- await vitest._testRun.updated(packs, events);
2870
- if (!project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2886
+ const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
2887
+ if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2871
2888
  const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
2872
2889
  if (processError) {
2873
2890
  const error = new Error(checker.getOutput());
2874
- error.stack = "";
2875
- vitest.state.catchError(error, "Typecheck Error");
2891
+ error.stack = "", vitest.state.catchError(error, "Typecheck Error");
2876
2892
  }
2877
- promisesMap.get(project)?.resolve();
2878
- rerunTriggered.delete(project);
2879
2893
  // triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
2880
- if (vitest.config.watch && !vitest.runningPromise) {
2881
- await vitest.report("onFinished", files, []);
2882
- await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2894
+ if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
2895
+ const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
2896
+ await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2883
2897
  }
2884
2898
  }
2885
2899
  async function createWorkspaceTypechecker(project, files) {
2886
2900
  const checker = project.typechecker ?? new Typechecker(project);
2887
- if (project.typechecker) return checker;
2888
- project.typechecker = checker;
2889
- checker.setFiles(files);
2890
- checker.onParseStart(async () => {
2901
+ return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
2891
2902
  const files = checker.getTestFiles();
2892
2903
  for (const file of files) await vitest._testRun.enqueued(project, file);
2893
2904
  await vitest._testRun.collected(project, files);
2894
- });
2895
- checker.onParseEnd((result) => onParseEnd(project, result));
2896
- checker.onWatcherRerun(async () => {
2897
- rerunTriggered.add(project);
2898
- if (!vitest.runningPromise) {
2899
- vitest.state.clearErrors();
2900
- await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2901
- }
2905
+ }), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
2906
+ if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2902
2907
  await checker.collectTests();
2903
2908
  const testFiles = checker.getTestFiles();
2904
2909
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2905
2910
  await vitest._testRun.collected(project, testFiles);
2906
2911
  const { packs, events } = checker.getTestPacksAndEvents();
2907
2912
  await vitest._testRun.updated(packs, events);
2908
- });
2909
- return checker;
2913
+ }), checker);
2910
2914
  }
2911
2915
  async function startTypechecker(project, files) {
2912
2916
  if (project.typechecker) return;
2913
2917
  const checker = await createWorkspaceTypechecker(project, files);
2914
- await checker.collectTests();
2915
- await checker.start();
2918
+ await checker.collectTests(), await checker.start();
2916
2919
  }
2917
2920
  async function collectTests(specs) {
2918
2921
  const specsByProject = groupBy(specs, (spec) => spec.project.name);
2919
2922
  for (const name in specsByProject) {
2920
- const project = specsByProject[name][0].project;
2921
- const files = specsByProject[name].map((spec) => spec.moduleId);
2922
- const checker = await createWorkspaceTypechecker(project, files);
2923
- checker.setFiles(files);
2924
- await checker.collectTests();
2923
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), checker = await createWorkspaceTypechecker(project, files);
2924
+ checker.setFiles(files), await checker.collectTests();
2925
2925
  const testFiles = checker.getTestFiles();
2926
2926
  vitest.state.collectFiles(project, testFiles);
2927
2927
  }
2928
2928
  }
2929
2929
  async function runTests(specs) {
2930
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
2931
- const promises = [];
2930
+ const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
2932
2931
  for (const name in specsByProject) {
2933
- const project = specsByProject[name][0].project;
2934
- const files = specsByProject[name].map((spec) => spec.moduleId);
2935
- const promise = createDefer();
2936
- // check that watcher actually triggered rerun
2937
- const _p = new Promise((resolve) => {
2932
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), _p = new Promise((resolve) => {
2938
2933
  const _i = setInterval(() => {
2939
- if (!project.typechecker || rerunTriggered.has(project)) {
2940
- resolve(true);
2941
- clearInterval(_i);
2942
- }
2934
+ if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
2943
2935
  });
2944
2936
  setTimeout(() => {
2945
- resolve(false);
2946
- clearInterval(_i);
2937
+ resolve(false), clearInterval(_i);
2947
2938
  }, 500).unref();
2948
- });
2949
- const triggered = await _p;
2939
+ }), triggered = await _p;
2950
2940
  if (project.typechecker && !triggered) {
2951
2941
  const testFiles = project.typechecker.getTestFiles();
2952
2942
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2953
- await vitest._testRun.collected(project, testFiles);
2954
- await onParseEnd(project, project.typechecker.getResult());
2943
+ await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
2955
2944
  continue;
2956
2945
  }
2957
- promises.push(promise);
2958
- promisesMap.set(project, promise);
2959
- promises.push(startTypechecker(project, files));
2946
+ promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files));
2960
2947
  }
2961
2948
  await Promise.all(promises);
2962
2949
  }
@@ -3000,8 +2987,7 @@ function stringToBytes(input, percentageReference) {
3000
2987
  let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
3001
2988
  if (trailingChars && numericString) {
3002
2989
  const numericValue = Number.parseFloat(numericString);
3003
- trailingChars = trailingChars.toLowerCase();
3004
- switch (trailingChars) {
2990
+ switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
3005
2991
  case "%":
3006
2992
  input = numericValue / 100;
3007
2993
  break;
@@ -3017,26 +3003,20 @@ function stringToBytes(input, percentageReference) {
3017
3003
  }
3018
3004
  }
3019
3005
  } else input = Number.parseFloat(input);
3020
- if (typeof input === "number") if (input <= 1 && input > 0) if (percentageReference) return Math.floor(input * percentageReference);
3021
- else throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3022
- else if (input > 1) return Math.floor(input);
3006
+ if (typeof input === "number") if (input <= 1 && input > 0) {
3007
+ if (percentageReference) return Math.floor(input * percentageReference);
3008
+ throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3009
+ } else if (input > 1) return Math.floor(input);
3023
3010
  else throw new Error("Unexpected numerical input for \"memoryLimit\"");
3024
3011
  return null;
3025
3012
  }
3026
3013
 
3027
3014
  const suppressWarningsPath$1 = resolve(rootDir, "./suppress-warnings.cjs");
3028
3015
  function createChildProcessChannel(project, collect) {
3029
- const emitter = new EventEmitter();
3030
- const cleanup = () => emitter.removeAllListeners();
3031
- const events = {
3016
+ const emitter = new EventEmitter(), events = {
3032
3017
  message: "message",
3033
3018
  response: "response"
3034
- };
3035
- const channel = {
3036
- onMessage: (callback) => emitter.on(events.message, callback),
3037
- postMessage: (message) => emitter.emit(events.response, message)
3038
- };
3039
- const rpc = createBirpc(createMethodsRPC(project, {
3019
+ }, rpc = createBirpc(createMethodsRPC(project, {
3040
3020
  cacheFs: true,
3041
3021
  collect
3042
3022
  }), {
@@ -3059,24 +3039,20 @@ function createChildProcessChannel(project, collect) {
3059
3039
  on(fn) {
3060
3040
  emitter.on(events.response, fn);
3061
3041
  },
3062
- onTimeoutError(functionName) {
3063
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3064
- }
3042
+ timeout: -1
3065
3043
  });
3066
3044
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3067
- return {
3068
- channel,
3069
- cleanup
3045
+ const channel = {
3046
+ onMessage: (callback) => emitter.on(events.message, callback),
3047
+ postMessage: (message) => emitter.emit(events.response, message),
3048
+ onClose: () => {
3049
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3050
+ }
3070
3051
  };
3052
+ return { channel };
3071
3053
  }
3072
- function createVmForksPool(vitest, { execArgv, env }) {
3073
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3074
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3075
- const poolOptions = vitest.config.poolOptions?.vmForks ?? {};
3076
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
3077
- const minThreads = poolOptions.maxForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3078
- const worker = resolve(vitest.distPath, "workers/vmForks.js");
3079
- const options = {
3054
+ function createVmForksPool(vitest, { execArgv, env }, specifications) {
3055
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmForks.js"), options = {
3080
3056
  runtime: "child_process",
3081
3057
  filename: resolve(vitest.distPath, "worker.js"),
3082
3058
  maxThreads,
@@ -3094,19 +3070,13 @@ function createVmForksPool(vitest, { execArgv, env }) {
3094
3070
  concurrentTasksPerWorker: 1,
3095
3071
  maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
3096
3072
  };
3097
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
3098
- options.maxThreads = 1;
3099
- options.minThreads = 1;
3100
- }
3101
- const pool = new Tinypool$1(options);
3102
- const runWithFiles = (name) => {
3073
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3074
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3103
3075
  let id = 0;
3104
3076
  async function runFiles(project, config, files, environment, invalidates = []) {
3105
3077
  const paths = files.map((f) => f.filepath);
3106
3078
  vitest.state.clearFiles(project, paths);
3107
- const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
3108
- const workerId = ++id;
3109
- const data = {
3079
+ const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3110
3080
  pool: "forks",
3111
3081
  worker,
3112
3082
  config,
@@ -3128,24 +3098,17 @@ function createVmForksPool(vitest, { execArgv, env }) {
3128
3098
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3129
3099
  else throw error;
3130
3100
  } finally {
3131
- cleanup();
3101
+ channel.onClose();
3132
3102
  }
3133
3103
  }
3134
3104
  return async (specs, invalidates) => {
3135
3105
  // Cancel pending tasks from pool when possible
3136
3106
  vitest.onCancel(() => pool.cancelPendingTasks());
3137
- const configs = /* @__PURE__ */ new Map();
3138
- const getConfig = (project) => {
3107
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3139
3108
  if (configs.has(project)) return configs.get(project);
3140
- const _config = project.serializedConfig;
3141
- const config = wrapSerializableConfig(_config);
3142
- configs.set(project, config);
3143
- return config;
3144
- };
3145
- const filesByEnv = await groupFilesByEnv(specs);
3146
- const promises = Object.values(filesByEnv).flat();
3147
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3148
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3109
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3110
+ return configs.set(project, config), config;
3111
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3149
3112
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3150
3113
  };
3151
3114
  };
@@ -3157,21 +3120,14 @@ function createVmForksPool(vitest, { execArgv, env }) {
3157
3120
  };
3158
3121
  }
3159
3122
  function getMemoryLimit$1(config) {
3160
- const memory = nodeos.totalmem();
3161
- const limit = getWorkerMemoryLimit(config, "vmForks");
3162
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3163
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3164
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3123
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmForks");
3165
3124
  // just ignore "memoryLimit" value because we cannot detect memory limit
3166
- return null;
3125
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3167
3126
  }
3168
3127
 
3169
3128
  const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
3170
3129
  function createWorkerChannel(project, collect) {
3171
- const channel = new MessageChannel();
3172
- const port = channel.port2;
3173
- const workerPort = channel.port1;
3174
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
3130
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3175
3131
  eventNames: ["onCancel"],
3176
3132
  post(v) {
3177
3133
  port.postMessage(v);
@@ -3179,24 +3135,19 @@ function createWorkerChannel(project, collect) {
3179
3135
  on(fn) {
3180
3136
  port.on("message", fn);
3181
3137
  },
3182
- onTimeoutError(functionName) {
3183
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3184
- }
3138
+ timeout: -1
3185
3139
  });
3186
3140
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3141
+ function onClose() {
3142
+ workerPort.close(), port.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3143
+ }
3187
3144
  return {
3188
3145
  workerPort,
3189
- port
3146
+ onClose
3190
3147
  };
3191
3148
  }
3192
- function createVmThreadsPool(vitest, { execArgv, env }) {
3193
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3194
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3195
- const poolOptions = vitest.config.poolOptions?.vmThreads ?? {};
3196
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
3197
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3198
- const worker = resolve(vitest.distPath, "workers/vmThreads.js");
3199
- const options = {
3149
+ function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3150
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmThreads.js"), options = {
3200
3151
  filename: resolve(vitest.distPath, "worker.js"),
3201
3152
  useAtomics: poolOptions.useAtomics ?? false,
3202
3153
  maxThreads,
@@ -3214,19 +3165,13 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3214
3165
  concurrentTasksPerWorker: 1,
3215
3166
  maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
3216
3167
  };
3217
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
3218
- options.maxThreads = 1;
3219
- options.minThreads = 1;
3220
- }
3221
- const pool = new Tinypool$1(options);
3222
- const runWithFiles = (name) => {
3168
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3169
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3223
3170
  let id = 0;
3224
3171
  async function runFiles(project, config, files, environment, invalidates = []) {
3225
3172
  const paths = files.map((f) => f.filepath);
3226
3173
  vitest.state.clearFiles(project, paths);
3227
- const { workerPort, port } = createWorkerChannel(project, name === "collect");
3228
- const workerId = ++id;
3229
- const data = {
3174
+ const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3230
3175
  pool: "vmThreads",
3231
3176
  worker,
3232
3177
  port: workerPort,
@@ -3249,24 +3194,17 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3249
3194
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3250
3195
  else throw error;
3251
3196
  } finally {
3252
- port.close();
3253
- workerPort.close();
3197
+ onClose();
3254
3198
  }
3255
3199
  }
3256
3200
  return async (specs, invalidates) => {
3257
3201
  // Cancel pending tasks from pool when possible
3258
3202
  vitest.onCancel(() => pool.cancelPendingTasks());
3259
- const configs = /* @__PURE__ */ new Map();
3260
- const getConfig = (project) => {
3203
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3261
3204
  if (configs.has(project)) return configs.get(project);
3262
3205
  const config = project.serializedConfig;
3263
- configs.set(project, config);
3264
- return config;
3265
- };
3266
- const filesByEnv = await groupFilesByEnv(specs);
3267
- const promises = Object.values(filesByEnv).flat();
3268
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3269
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3206
+ return configs.set(project, config), config;
3207
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3270
3208
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3271
3209
  };
3272
3210
  };
@@ -3278,13 +3216,9 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3278
3216
  };
3279
3217
  }
3280
3218
  function getMemoryLimit(config) {
3281
- const memory = nodeos.totalmem();
3282
- const limit = getWorkerMemoryLimit(config, "vmThreads");
3283
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3284
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3285
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3219
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmThreads");
3286
3220
  // just ignore "memoryLimit" value because we cannot detect memory limit
3287
- return null;
3221
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3288
3222
  }
3289
3223
 
3290
3224
  const builtinPools = [
@@ -3296,14 +3230,9 @@ const builtinPools = [
3296
3230
  "typescript"
3297
3231
  ];
3298
3232
  function getDefaultPoolName(project) {
3299
- if (project.config.browser.enabled) return "browser";
3300
- return project.config.pool;
3233
+ return project.config.browser.enabled ? "browser" : project.config.pool;
3301
3234
  }
3302
- function getFilePoolName(project, file) {
3303
- for (const [glob, pool] of project.config.poolMatchGlobs) {
3304
- if (pool === "browser") throw new Error("Since Vitest 0.31.0 \"browser\" pool is not supported in `poolMatchGlobs`. You can create a project to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/projects");
3305
- if (pm.isMatch(file, glob, { cwd: project.config.root })) return pool;
3306
- }
3235
+ function getFilePoolName(project) {
3307
3236
  return getDefaultPoolName(project);
3308
3237
  }
3309
3238
  function createPool(ctx) {
@@ -3314,26 +3243,15 @@ function createPool(ctx) {
3314
3243
  vmThreads: null,
3315
3244
  vmForks: null,
3316
3245
  typescript: null
3317
- };
3318
- // in addition to resolve.conditions Vite also adds production/development,
3319
- // see: https://github.com/vitejs/vite/blob/af2aa09575229462635b7cbb6d248ca853057ba2/packages/vite/src/node/plugins/resolve.ts#L1056-L1080
3320
- const viteMajor = Number(version.split(".")[0]);
3321
- const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3246
+ }, viteMajor = Number(version.split(".")[0]), potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3322
3247
  "production",
3323
3248
  "development",
3324
3249
  ...ctx.vite.config.resolve.conditions
3325
- ]);
3326
- const conditions = [...potentialConditions].filter((condition) => {
3327
- if (condition === "production") return ctx.vite.config.isProduction;
3328
- if (condition === "development") return !ctx.vite.config.isProduction;
3329
- return true;
3250
+ ]), conditions = [...potentialConditions].filter((condition) => {
3251
+ return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3330
3252
  }).map((condition) => {
3331
- if (viteMajor >= 6 && condition === "development|production") return ctx.vite.config.isProduction ? "production" : "development";
3332
- return condition;
3333
- }).flatMap((c) => ["--conditions", c]);
3334
- // Instead of passing whole process.execArgv to the workers, pick allowed options.
3335
- // Some options may crash worker, e.g. --prof, --title. nodejs/node#41103
3336
- const execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3253
+ return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3254
+ }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3337
3255
  async function executeTests(method, files, invalidate) {
3338
3256
  const options = {
3339
3257
  execArgv: [...execArgv, ...conditions],
@@ -3349,25 +3267,22 @@ function createPool(ctx) {
3349
3267
  };
3350
3268
  // env are case-insensitive on Windows, but spawned processes don't support it
3351
3269
  if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
3352
- const poolConcurrentPromises = /* @__PURE__ */ new Map();
3353
- const customPools = /* @__PURE__ */ new Map();
3270
+ const poolConcurrentPromises = /* @__PURE__ */ new Map(), customPools = /* @__PURE__ */ new Map();
3354
3271
  async function resolveCustomPool(filepath) {
3355
3272
  if (customPools.has(filepath)) return customPools.get(filepath);
3356
- const pool = await ctx.runner.executeId(filepath);
3273
+ const pool = await ctx.runner.import(filepath);
3357
3274
  if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
3358
3275
  const poolInstance = await pool.default(ctx, options);
3359
3276
  if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
3360
3277
  if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
3361
- customPools.set(filepath, poolInstance);
3362
- return poolInstance;
3278
+ return customPools.set(filepath, poolInstance), poolInstance;
3363
3279
  }
3364
3280
  function getConcurrentPool(pool, fn) {
3365
3281
  if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
3366
3282
  const promise = fn().finally(() => {
3367
3283
  poolConcurrentPromises.delete(pool);
3368
3284
  });
3369
- poolConcurrentPromises.set(pool, promise);
3370
- return promise;
3285
+ return poolConcurrentPromises.set(pool, promise), promise;
3371
3286
  }
3372
3287
  function getCustomPool(pool) {
3373
3288
  return getConcurrentPool(pool, () => resolveCustomPool(pool));
@@ -3378,23 +3293,18 @@ function createPool(ctx) {
3378
3293
  return createBrowserPool(ctx);
3379
3294
  });
3380
3295
  }
3381
- const groupedSpecifications = {};
3382
- const groups = /* @__PURE__ */ new Set();
3383
- const factories = {
3384
- vmThreads: () => createVmThreadsPool(ctx, options),
3385
- vmForks: () => createVmForksPool(ctx, options),
3386
- threads: () => createThreadsPool(ctx, options),
3387
- forks: () => createForksPool(ctx, options),
3296
+ const groupedSpecifications = {}, groups = /* @__PURE__ */ new Set(), factories = {
3297
+ vmThreads: (specs) => createVmThreadsPool(ctx, options, specs),
3298
+ vmForks: (specs) => createVmForksPool(ctx, options, specs),
3299
+ threads: (specs) => createThreadsPool(ctx, options, specs),
3300
+ forks: (specs) => createForksPool(ctx, options, specs),
3388
3301
  typescript: () => createTypecheckPool(ctx)
3389
3302
  };
3390
3303
  for (const spec of files) {
3391
- const group = spec[0].config.sequence.groupOrder ?? 0;
3392
- groups.add(group);
3393
- groupedSpecifications[group] ??= [];
3394
- groupedSpecifications[group].push(spec);
3304
+ const group = spec.project.config.sequence.groupOrder ?? 0;
3305
+ groups.add(group), groupedSpecifications[group] ??= [], groupedSpecifications[group].push(spec);
3395
3306
  }
3396
- const Sequencer = ctx.config.sequence.sequencer;
3397
- const sequencer = new Sequencer(ctx);
3307
+ const Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
3398
3308
  async function sortSpecs(specs) {
3399
3309
  if (ctx.config.shard) {
3400
3310
  if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
@@ -3414,26 +3324,19 @@ function createPool(ctx) {
3414
3324
  typescript: []
3415
3325
  };
3416
3326
  specifications.forEach((specification) => {
3417
- const pool = specification[2].pool;
3418
- filesByPool[pool] ??= [];
3419
- filesByPool[pool].push(specification);
3420
- });
3421
- await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3327
+ const pool = specification.pool;
3328
+ filesByPool[pool] ??= [], filesByPool[pool].push(specification);
3329
+ }), await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3422
3330
  const [pool, files] = entry;
3423
3331
  if (!files.length) return null;
3424
3332
  const specs = await sortSpecs(files);
3425
3333
  if (pool in factories) {
3426
3334
  const factory = factories[pool];
3427
- pools[pool] ??= factory();
3428
- return pools[pool][method](specs, invalidate);
3429
- }
3430
- if (pool === "browser") {
3431
- pools.browser ??= await getBrowserPool();
3432
- return pools.browser[method](specs, invalidate);
3335
+ return pools[pool] ??= factory(specs), pools[pool][method](specs, invalidate);
3433
3336
  }
3337
+ if (pool === "browser") return pools.browser ??= await getBrowserPool(), pools.browser[method](specs, invalidate);
3434
3338
  const poolHandler = await getCustomPool(pool);
3435
- pools[poolHandler.name] ??= poolHandler;
3436
- return poolHandler[method](specs, invalidate);
3339
+ return pools[poolHandler.name] ??= poolHandler, poolHandler[method](specs, invalidate);
3437
3340
  }));
3438
3341
  }
3439
3342
  }
@@ -3454,14 +3357,9 @@ class BaseSequencer {
3454
3357
  }
3455
3358
  // async so it can be extended by other sequelizers
3456
3359
  async shard(files) {
3457
- const { config } = this.ctx;
3458
- const { index, count } = config.shard;
3459
- const shardSize = Math.ceil(files.length / count);
3460
- const shardStart = shardSize * (index - 1);
3461
- const shardEnd = shardSize * index;
3360
+ const { config } = this.ctx, { index, count } = config.shard, [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count);
3462
3361
  return [...files].map((spec) => {
3463
- const fullPath = resolve$1(slash(config.root), slash(spec.moduleId));
3464
- const specPath = fullPath?.slice(config.root.length);
3362
+ const fullPath = resolve$1(slash(config.root), slash(spec.moduleId)), specPath = fullPath?.slice(config.root.length);
3465
3363
  return {
3466
3364
  spec,
3467
3365
  hash: hash("sha1", specPath, "hex")
@@ -3472,25 +3370,26 @@ class BaseSequencer {
3472
3370
  async sort(files) {
3473
3371
  const cache = this.ctx.cache;
3474
3372
  return [...files].sort((a, b) => {
3475
- const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
3476
- const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
3477
- const aState = cache.getFileTestResults(keyA);
3478
- const bState = cache.getFileTestResults(keyB);
3373
+ const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`, keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`, aState = cache.getFileTestResults(keyA), bState = cache.getFileTestResults(keyB);
3479
3374
  if (!aState || !bState) {
3480
- const statsA = cache.getFileStats(keyA);
3481
- const statsB = cache.getFileStats(keyB);
3482
- // run unknown first
3483
- if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
3375
+ const statsA = cache.getFileStats(keyA), statsB = cache.getFileStats(keyB);
3484
3376
  // run larger files first
3485
- return statsB.size - statsA.size;
3377
+ return !statsA || !statsB ? !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0 : statsB.size - statsA.size;
3486
3378
  }
3487
- // run failed first
3488
- if (aState.failed && !bState.failed) return -1;
3489
- if (!aState.failed && bState.failed) return 1;
3490
3379
  // run longer first
3491
- return bState.duration - aState.duration;
3380
+ return aState.failed && !bState.failed ? -1 : !aState.failed && bState.failed ? 1 : bState.duration - aState.duration;
3492
3381
  });
3493
3382
  }
3383
+ // Calculate distributed shard range [start, end] distributed equally
3384
+ calculateShardRange(filesCount, index, count) {
3385
+ const baseShardSize = Math.floor(filesCount / count), remainderTestFilesCount = filesCount % count;
3386
+ if (remainderTestFilesCount >= index) {
3387
+ const shardSize = baseShardSize + 1, shardStart = shardSize * (index - 1), shardEnd = shardSize * index;
3388
+ return [shardStart, shardEnd];
3389
+ }
3390
+ const shardStart = remainderTestFilesCount * (baseShardSize + 1) + (index - remainderTestFilesCount - 1) * baseShardSize, shardEnd = shardStart + baseShardSize;
3391
+ return [shardStart, shardEnd];
3392
+ }
3494
3393
  }
3495
3394
 
3496
3395
  class RandomSequencer extends BaseSequencer {
@@ -3508,11 +3407,10 @@ function parseInspector(inspect) {
3508
3407
  if (typeof inspect === "number") return { port: inspect };
3509
3408
  if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
3510
3409
  const [host, port] = inspect.split(":");
3511
- if (!port) return { host };
3512
- return {
3410
+ return port ? {
3513
3411
  host,
3514
3412
  port: Number(port) || defaultInspectPort
3515
- };
3413
+ } : { host };
3516
3414
  }
3517
3415
  function resolveApiServerConfig(options, defaultPort) {
3518
3416
  let api;
@@ -3530,12 +3428,10 @@ function resolveApiServerConfig(options, defaultPort) {
3530
3428
  return api;
3531
3429
  }
3532
3430
  function resolveInlineWorkerOption(value) {
3533
- if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value);
3534
- else return Number(value);
3431
+ return typeof value === "string" && value.trim().endsWith("%") ? getWorkersCountByPercentage(value) : Number(value);
3535
3432
  }
3536
3433
  function resolveConfig$1(vitest, options, viteConfig) {
3537
- const mode = vitest.mode;
3538
- const logger = vitest.logger;
3434
+ const mode = vitest.mode, logger = vitest.logger;
3539
3435
  if (options.dom) {
3540
3436
  if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
3541
3437
  options.environment = "happy-dom";
@@ -3546,24 +3442,17 @@ function resolveConfig$1(vitest, options, viteConfig) {
3546
3442
  root: viteConfig.root,
3547
3443
  mode
3548
3444
  };
3549
- resolved.project = toArray(resolved.project);
3550
- resolved.provide ??= {};
3551
- resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "";
3552
- resolved.color = typeof options.name !== "string" ? options.name?.color : void 0;
3445
+ if (resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
3553
3446
  const inspector = resolved.inspect || resolved.inspectBrk;
3554
- resolved.inspector = {
3447
+ if (resolved.inspector = {
3555
3448
  ...resolved.inspector,
3556
3449
  ...parseInspector(inspector),
3557
3450
  enabled: !!inspector,
3558
3451
  waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
3559
- };
3560
- if (viteConfig.base !== "/") resolved.base = viteConfig.base;
3561
- resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
3562
- if (options.shard) {
3452
+ }, viteConfig.base !== "/") resolved.base = viteConfig.base;
3453
+ if (resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true, options.shard) {
3563
3454
  if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch");
3564
- const [indexString, countString] = options.shard.split("/");
3565
- const index = Math.abs(Number.parseInt(indexString, 10));
3566
- const count = Math.abs(Number.parseInt(countString, 10));
3455
+ const [indexString, countString] = options.shard.split("/"), index = Math.abs(Number.parseInt(indexString, 10)), count = Math.abs(Number.parseInt(countString, 10));
3567
3456
  if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number");
3568
3457
  if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>");
3569
3458
  resolved.shard = {
@@ -3574,21 +3463,12 @@ function resolveConfig$1(vitest, options, viteConfig) {
3574
3463
  if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`);
3575
3464
  if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`);
3576
3465
  if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
3577
- if (resolved.minWorkers) resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
3578
- // run benchmark sequentially by default
3579
- resolved.fileParallelism ??= mode !== "benchmark";
3580
- if (!resolved.fileParallelism) {
3581
- // ignore user config, parallelism cannot be implemented without limiting workers
3582
- resolved.maxWorkers = 1;
3583
- resolved.minWorkers = 1;
3584
- }
3585
- if (resolved.maxConcurrency === 0) {
3586
- logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`));
3587
- resolved.maxConcurrency = configDefaults.maxConcurrency;
3588
- }
3466
+ if (resolved.fileParallelism ??= mode !== "benchmark", !resolved.fileParallelism)
3467
+ // ignore user config, parallelism cannot be implemented without limiting workers
3468
+ resolved.maxWorkers = 1;
3469
+ if (resolved.maxConcurrency === 0) logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)), resolved.maxConcurrency = configDefaults.maxConcurrency;
3589
3470
  if (resolved.inspect || resolved.inspectBrk) {
3590
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread;
3591
- const isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3471
+ const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread, isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3592
3472
  if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
3593
3473
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3594
3474
  throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
@@ -3609,106 +3489,78 @@ function resolveConfig$1(vitest, options, viteConfig) {
3609
3489
  const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
3610
3490
  // Browser-mode "Playwright + Chromium" only features:
3611
3491
  if (browser.enabled && !playwrightChromiumOnly) {
3612
- const browserConfig = { browser: {
3613
- provider: browser.provider,
3614
- name: browser.name,
3615
- instances: browser.instances?.map((i) => ({ browser: i.browser }))
3616
- } };
3617
- if (resolved.coverage.enabled && resolved.coverage.provider === "v8") throw new Error(`@vitest/coverage-v8 does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3618
- provider: "playwright",
3619
- instances: [{ browser: "chromium" }]
3620
- } }, null, 2)}\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
3492
+ const browserConfig = `
3493
+ {
3494
+ browser: {
3495
+ provider: ${browser.provider?.name || "preview"}(),
3496
+ instances: [
3497
+ ${(browser.instances || []).map((i) => `{ browser: '${i.browser}' }`).join(",\n ")}
3498
+ ],
3499
+ },
3500
+ }
3501
+ `.trim(), correctExample = `
3502
+ {
3503
+ browser: {
3504
+ provider: playwright(),
3505
+ instances: [
3506
+ { browser: 'chromium' }
3507
+ ],
3508
+ },
3509
+ }
3510
+ `.trim();
3511
+ if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
3512
+ const coverageExample = `
3513
+ {
3514
+ coverage: {
3515
+ provider: 'istanbul',
3516
+ },
3517
+ }
3518
+ `.trim();
3519
+ throw new Error(`@vitest/coverage-v8 does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or change your coverage provider to:\n${coverageExample}\n`);
3520
+ }
3621
3521
  if (resolved.inspect || resolved.inspectBrk) {
3622
3522
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3623
- throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3624
- provider: "playwright",
3625
- instances: [{ browser: "chromium" }]
3626
- } }, null, 2)}\n\n...or disable ${inspectOption}\n`);
3523
+ throw new Error(`${inspectOption} does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or disable ${inspectOption}\n`);
3627
3524
  }
3628
3525
  }
3629
- resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
3630
- if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3526
+ if (resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter), resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3631
3527
  const reportsDirectory = resolve$1(resolved.root, resolved.coverage.reportsDirectory);
3632
3528
  if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
3633
3529
  }
3634
3530
  if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
3635
- resolved.expect ??= {};
3636
- resolved.deps ??= {};
3637
- resolved.deps.moduleDirectories ??= [];
3638
- resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3531
+ resolved.expect ??= {}, resolved.deps ??= {}, resolved.deps.moduleDirectories ??= [];
3532
+ const envModuleDirectories = process.env.VITEST_MODULE_DIRECTORIES || process.env.npm_config_VITEST_MODULE_DIRECTORIES;
3533
+ if (envModuleDirectories) resolved.deps.moduleDirectories.push(...envModuleDirectories.split(","));
3534
+ if (resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3639
3535
  if (!dir.startsWith("/")) dir = `/${dir}`;
3640
3536
  if (!dir.endsWith("/")) dir += "/";
3641
3537
  return normalize(dir);
3642
- });
3643
- if (!resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3644
- resolved.deps.optimizer ??= {};
3645
- resolved.deps.optimizer.ssr ??= {};
3646
- resolved.deps.optimizer.ssr.enabled ??= true;
3647
- resolved.deps.optimizer.web ??= {};
3648
- resolved.deps.optimizer.web.enabled ??= true;
3649
- resolved.deps.web ??= {};
3650
- resolved.deps.web.transformAssets ??= true;
3651
- resolved.deps.web.transformCss ??= true;
3652
- resolved.deps.web.transformGlobPattern ??= [];
3653
- resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root));
3654
- resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root));
3655
- // override original exclude array for cases where user re-uses same object in test.exclude
3656
- resolved.coverage.exclude = [
3538
+ }), !resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3539
+ if (resolved.deps.optimizer ??= {}, resolved.deps.optimizer.ssr ??= {}, resolved.deps.optimizer.ssr.enabled ??= false, resolved.deps.optimizer.client ??= {}, resolved.deps.optimizer.client.enabled ??= false, resolved.deps.web ??= {}, resolved.deps.web.transformAssets ??= true, resolved.deps.web.transformCss ??= true, resolved.deps.web.transformGlobPattern ??= [], resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root)), resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root)), resolved.coverage.exclude = [
3657
3540
  ...resolved.coverage.exclude,
3658
3541
  ...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`),
3659
- ...resolved.include
3660
- ];
3661
- resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles];
3662
- resolved.server ??= {};
3663
- resolved.server.deps ??= {};
3664
- const deprecatedDepsOptions = [
3665
- "inline",
3666
- "external",
3667
- "fallbackCJS"
3668
- ];
3669
- deprecatedDepsOptions.forEach((option) => {
3670
- if (resolved.deps[option] === void 0) return;
3671
- if (option === "fallbackCJS") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
3672
- else {
3673
- const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
3674
- logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`));
3675
- }
3676
- if (resolved.server.deps[option] === void 0) resolved.server.deps[option] = resolved.deps[option];
3677
- });
3678
- if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3679
- // vitenode will try to import such file with native node,
3680
- // but then our mocker will not work properly
3681
- if (resolved.server.deps.inline !== true) {
3682
- const ssrOptions = viteConfig.ssr;
3683
- if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) resolved.server.deps.inline = true;
3684
- else {
3685
- resolved.server.deps.inline ??= [];
3686
- resolved.server.deps.inline.push(...extraInlineDeps);
3687
- }
3688
- }
3689
- resolved.server.deps.inlineFiles ??= [];
3690
- resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
3691
- resolved.server.deps.moduleDirectories ??= [];
3692
- resolved.server.deps.moduleDirectories.push(...resolved.deps.moduleDirectories);
3542
+ ...resolved.include,
3543
+ resolved.config && slash(resolved.config),
3544
+ ...configFiles,
3545
+ "**/virtual:*",
3546
+ "**/__x00__*",
3547
+ "**/node_modules/**"
3548
+ ].filter((pattern) => pattern != null), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3693
3549
  if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
3694
- resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments");
3695
- if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3696
- resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0;
3697
- if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) resolved.snapshotFormat.plugins = [];
3550
+ if (resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3551
+ if (resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0, resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
3552
+ // TODO: support it via separate config (like DiffOptions) or via `Function.toString()`
3553
+ if (resolved.snapshotFormat.plugins = [], typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`);
3554
+ }
3698
3555
  const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
3699
- resolved.snapshotOptions = {
3556
+ if (resolved.snapshotOptions = {
3700
3557
  expand: resolved.expandSnapshotDiff ?? false,
3701
3558
  snapshotFormat: resolved.snapshotFormat || {},
3702
3559
  updateSnapshot: isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new",
3703
3560
  resolveSnapshotPath: options.resolveSnapshotPath,
3704
3561
  snapshotEnvironment: null
3705
- };
3706
- resolved.snapshotSerializers ??= [];
3707
- resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root));
3708
- resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
3709
- if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3710
- resolved.pool ??= "threads";
3711
- if (process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3562
+ }, resolved.snapshotSerializers ??= [], resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)), resolved.forceRerunTriggers.push(...resolved.snapshotSerializers), options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3563
+ if (resolved.pool ??= "threads", process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3712
3564
  ...resolved.poolOptions,
3713
3565
  threads: {
3714
3566
  ...resolved.poolOptions?.threads,
@@ -3719,17 +3571,6 @@ function resolveConfig$1(vitest, options, viteConfig) {
3719
3571
  maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3720
3572
  }
3721
3573
  };
3722
- if (process.env.VITEST_MIN_THREADS) resolved.poolOptions = {
3723
- ...resolved.poolOptions,
3724
- threads: {
3725
- ...resolved.poolOptions?.threads,
3726
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3727
- },
3728
- vmThreads: {
3729
- ...resolved.poolOptions?.vmThreads,
3730
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3731
- }
3732
- };
3733
3574
  if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
3734
3575
  ...resolved.poolOptions,
3735
3576
  forks: {
@@ -3741,51 +3582,16 @@ function resolveConfig$1(vitest, options, viteConfig) {
3741
3582
  maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3742
3583
  }
3743
3584
  };
3744
- if (process.env.VITEST_MIN_FORKS) resolved.poolOptions = {
3745
- ...resolved.poolOptions,
3746
- forks: {
3747
- ...resolved.poolOptions?.forks,
3748
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3749
- },
3750
- vmForks: {
3751
- ...resolved.poolOptions?.vmForks,
3752
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3753
- }
3754
- };
3755
- const poolThreadsOptions = [
3756
- ["threads", "minThreads"],
3757
- ["threads", "maxThreads"],
3758
- ["vmThreads", "minThreads"],
3759
- ["vmThreads", "maxThreads"]
3760
- ];
3585
+ const poolThreadsOptions = [["threads", "maxThreads"], ["vmThreads", "maxThreads"]];
3761
3586
  for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3762
- const poolForksOptions = [
3763
- ["forks", "minForks"],
3764
- ["forks", "maxForks"],
3765
- ["vmForks", "minForks"],
3766
- ["vmForks", "maxForks"]
3767
- ];
3587
+ const poolForksOptions = [["forks", "maxForks"], ["vmForks", "maxForks"]];
3768
3588
  for (const [poolOptionKey, workerOptionKey] of poolForksOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3769
- if (typeof resolved.workspace === "string")
3770
- // if passed down from the CLI and it's relative, resolve relative to CWD
3771
- resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve$1(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
3772
3589
  if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
3773
- if (resolved.poolMatchGlobs) logger.deprecate("`poolMatchGlobs` is deprecated. Use `test.projects` to define different configurations instead.");
3774
- resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(([glob, pool]) => {
3775
- if (!builtinPools.includes(pool)) pool = resolvePath(pool, resolved.root);
3776
- return [glob, pool];
3777
- });
3778
3590
  if (mode === "benchmark") {
3779
3591
  resolved.benchmark = {
3780
3592
  ...benchmarkConfigDefaults,
3781
3593
  ...resolved.benchmark
3782
- };
3783
- // override test config
3784
- resolved.coverage.enabled = false;
3785
- resolved.typecheck.enabled = false;
3786
- resolved.include = resolved.benchmark.include;
3787
- resolved.exclude = resolved.benchmark.exclude;
3788
- resolved.includeSource = resolved.benchmark.includeSource;
3594
+ }, resolved.coverage.enabled = false, resolved.typecheck.enabled = false, resolved.include = resolved.benchmark.include, resolved.exclude = resolved.benchmark.exclude, resolved.includeSource = resolved.benchmark.includeSource;
3789
3595
  const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean);
3790
3596
  if (reporters.length) resolved.benchmark.reporters = reporters;
3791
3597
  else resolved.benchmark.reporters = ["default"];
@@ -3794,17 +3600,13 @@ function resolveConfig$1(vitest, options, viteConfig) {
3794
3600
  if (options.compare) resolved.benchmark.compare = options.compare;
3795
3601
  if (options.outputJson) resolved.benchmark.outputJson = options.outputJson;
3796
3602
  }
3797
- if (typeof resolved.diff === "string") {
3798
- resolved.diff = resolvePath(resolved.diff, resolved.root);
3799
- resolved.forceRerunTriggers.push(resolved.diff);
3800
- }
3603
+ if (typeof resolved.diff === "string") resolved.diff = resolvePath(resolved.diff, resolved.root), resolved.forceRerunTriggers.push(resolved.diff);
3801
3604
  // the server has been created, we don't need to override vite.server options
3802
3605
  const api = resolveApiServerConfig(options, defaultPort);
3803
- resolved.api = {
3606
+ if (resolved.api = {
3804
3607
  ...api,
3805
3608
  token: crypto.randomUUID()
3806
- };
3807
- if (options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3609
+ }, options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3808
3610
  /*
3809
3611
  * Reporters can be defined in many different ways:
3810
3612
  * { reporter: 'json' }
@@ -3833,86 +3635,55 @@ function resolveConfig$1(vitest, options, viteConfig) {
3833
3635
  if (mode !== "benchmark") {
3834
3636
  // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory
3835
3637
  // it is passed down as "vitest --reporter ../reporter.js"
3836
- const reportersFromCLI = resolved.reporter;
3837
- const cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3838
- // ./reporter.js || ../reporter.js, but not .reporters/reporter.js
3839
- if (/^\.\.?\//.test(reporter)) return resolve$1(process.cwd(), reporter);
3840
- return reporter;
3638
+ const reportersFromCLI = resolved.reporter, cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3639
+ return /^\.\.?\//.test(reporter) ? resolve$1(process.cwd(), reporter) : reporter;
3841
3640
  });
3842
3641
  if (cliReporters.length) resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
3843
3642
  }
3844
3643
  if (!resolved.reporters.length) {
3845
- resolved.reporters.push(["default", {}]);
3846
3644
  // also enable github-actions reporter as a default
3847
- if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3645
+ if (resolved.reporters.push(["default", {}]), process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3848
3646
  }
3849
3647
  if (resolved.changed) resolved.passWithNoTests ??= true;
3850
- resolved.css ??= {};
3851
- if (typeof resolved.css === "object") {
3852
- resolved.css.modules ??= {};
3853
- resolved.css.modules.classNameStrategy ??= "stable";
3854
- }
3648
+ if (resolved.css ??= {}, typeof resolved.css === "object") resolved.css.modules ??= {}, resolved.css.modules.classNameStrategy ??= "stable";
3855
3649
  if (resolved.cache !== false) {
3856
3650
  if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`);
3857
3651
  resolved.cache = { dir: viteConfig.cacheDir };
3858
3652
  }
3859
- resolved.sequence ??= {};
3860
- if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3653
+ if (resolved.sequence ??= {}, resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3861
3654
  const { files, tests } = resolved.sequence.shuffle;
3862
- resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
3863
- resolved.sequence.shuffle = tests;
3655
+ resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer, resolved.sequence.shuffle = tests;
3864
3656
  }
3865
3657
  if (!resolved.sequence?.sequencer)
3866
3658
  // CLI flag has higher priority
3867
3659
  resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
3868
- resolved.sequence.groupOrder ??= 0;
3869
- resolved.sequence.hooks ??= "stack";
3870
- if (resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3871
- resolved.typecheck = {
3660
+ if (resolved.sequence.groupOrder ??= 0, resolved.sequence.hooks ??= "stack", resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3661
+ if (resolved.typecheck = {
3872
3662
  ...configDefaults.typecheck,
3873
3663
  ...resolved.typecheck
3874
- };
3875
- if (resolved.environmentMatchGlobs) logger.deprecate("\"environmentMatchGlobs\" is deprecated. Use `test.projects` to define different configurations instead.");
3876
- resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve$1(resolved.root, i[0]), i[1]]);
3877
- resolved.typecheck ??= {};
3878
- resolved.typecheck.enabled ??= false;
3879
- if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3880
- resolved.browser.enabled ??= false;
3881
- resolved.browser.headless ??= isCI;
3882
- resolved.browser.isolate ??= true;
3883
- resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
3884
- // disable in headless mode by default, and if CI is detected
3885
- resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
3886
- if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3887
- const isPreview = resolved.browser.provider === "preview";
3888
- if (isPreview && resolved.browser.screenshotFailures === true) {
3889
- console.warn(c.yellow([
3890
- `Browser provider "preview" doesn't support screenshots, `,
3891
- `so "browser.screenshotFailures" option is forcefully disabled. `,
3892
- `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3893
- ].join("")));
3894
- resolved.browser.screenshotFailures = false;
3895
- } else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3896
- resolved.browser.viewport ??= {};
3897
- resolved.browser.viewport.width ??= 414;
3898
- resolved.browser.viewport.height ??= 896;
3899
- resolved.browser.locators ??= {};
3900
- resolved.browser.locators.testIdAttribute ??= "data-testid";
3901
- if (resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = "preview";
3902
- resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort };
3664
+ }, resolved.typecheck ??= {}, resolved.typecheck.enabled ??= false, resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3665
+ if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3666
+ if (resolved.browser.viewport ??= {}, resolved.browser.viewport.width ??= 414, resolved.browser.viewport.height ??= 896, resolved.browser.locators ??= {}, resolved.browser.locators.testIdAttribute ??= "data-testid", resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = void 0;
3667
+ if (typeof resolved.browser.provider === "string") {
3668
+ const source = `@vitest/browser/providers/${resolved.browser.provider}`;
3669
+ throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/guide/browser/config#provider`);
3670
+ }
3671
+ const isPreview = resolved.browser.provider?.name === "preview";
3672
+ if (isPreview && resolved.browser.screenshotFailures === true) console.warn(c.yellow([
3673
+ `Browser provider "preview" doesn't support screenshots, `,
3674
+ `so "browser.screenshotFailures" option is forcefully disabled. `,
3675
+ `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3676
+ ].join(""))), resolved.browser.screenshotFailures = false;
3677
+ else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3903
3678
  // enable includeTaskLocation by default in UI mode
3904
- if (resolved.browser.enabled) {
3679
+ if (resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort }, resolved.browser.enabled) {
3905
3680
  if (resolved.browser.ui) resolved.includeTaskLocation ??= true;
3906
3681
  } else if (resolved.ui) resolved.includeTaskLocation ??= true;
3907
3682
  const htmlReporter = toArray(resolved.reporters).some((reporter) => {
3908
- if (Array.isArray(reporter)) return reporter[0] === "html";
3909
- return false;
3683
+ return Array.isArray(reporter) ? reporter[0] === "html" : false;
3910
3684
  });
3911
3685
  if (htmlReporter) resolved.includeTaskLocation ??= true;
3912
- resolved.testTransformMode ??= {};
3913
- resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
3914
- resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
3915
- return resolved;
3686
+ return resolved.server ??= {}, resolved.server.deps ??= {}, resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3916
3687
  }
3917
3688
  function isBrowserEnabled(config) {
3918
3689
  return Boolean(config.browser?.enabled);
@@ -3931,7 +3702,7 @@ function resolveCoverageReporters(configReporters) {
3931
3702
  }
3932
3703
  function isPlaywrightChromiumOnly(vitest, config) {
3933
3704
  const browser = config.browser;
3934
- if (!browser || browser.provider !== "playwright" || !browser.enabled) return false;
3705
+ if (!browser || !browser.provider || browser.provider.name !== "playwright" || !browser.enabled) return false;
3935
3706
  if (browser.name) return browser.name === "chromium";
3936
3707
  if (!browser.instances) return false;
3937
3708
  for (const instance of browser.instances) {
@@ -3948,29 +3719,27 @@ const THRESHOLD_KEYS = [
3948
3719
  "functions",
3949
3720
  "statements",
3950
3721
  "branches"
3951
- ];
3952
- const GLOBAL_THRESHOLDS_KEY = "global";
3953
- const DEFAULT_PROJECT = Symbol.for("default-project");
3722
+ ], GLOBAL_THRESHOLDS_KEY = "global", DEFAULT_PROJECT = Symbol.for("default-project");
3954
3723
  let uniqueId = 0;
3955
3724
  async function getCoverageProvider(options, loader) {
3956
3725
  const coverageModule = await resolveCoverageProviderModule(options, loader);
3957
- if (coverageModule) return coverageModule.getProvider();
3958
- return null;
3726
+ return coverageModule ? coverageModule.getProvider() : null;
3959
3727
  }
3960
3728
  class BaseCoverageProvider {
3961
3729
  ctx;
3962
3730
  name;
3963
3731
  version;
3964
3732
  options;
3733
+ globCache = /* @__PURE__ */ new Map();
3965
3734
  coverageFiles = /* @__PURE__ */ new Map();
3966
3735
  pendingPromises = [];
3967
3736
  coverageFilesDirectory;
3737
+ roots = [];
3968
3738
  _initialize(ctx) {
3969
- this.ctx = ctx;
3970
- if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3739
+ if (this.ctx = ctx, ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3971
3740
  Running mixed versions is not supported and may lead into bugs
3972
3741
  Update your dependencies and make sure the versions match.`));
3973
- const config = ctx.config.coverage;
3742
+ const config = ctx._coverageOptions;
3974
3743
  this.options = {
3975
3744
  ...coverageConfigDefaults,
3976
3745
  ...config,
@@ -3985,9 +3754,46 @@ Update your dependencies and make sure the versions match.`));
3985
3754
  statements: config.thresholds["100"] ? 100 : config.thresholds.statements
3986
3755
  }
3987
3756
  };
3988
- const shard = this.ctx.config.shard;
3989
- const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3990
- this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory);
3757
+ const shard = this.ctx.config.shard, tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3758
+ // If --project filter is set pick only roots of resolved projects
3759
+ this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory), this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root];
3760
+ }
3761
+ /**
3762
+ * Check if file matches `coverage.include` but not `coverage.exclude`
3763
+ */
3764
+ isIncluded(_filename, root) {
3765
+ const roots = root ? [root] : this.roots, filename = slash(_filename), cacheHit = this.globCache.get(filename);
3766
+ if (cacheHit !== void 0) return cacheHit;
3767
+ // File outside project root with default allowExternal
3768
+ if (this.options.allowExternal === false && roots.every((root) => !filename.startsWith(root))) return this.globCache.set(filename, false), false;
3769
+ // By default `coverage.include` matches all files, except "coverage.exclude"
3770
+ const glob = this.options.include || "**";
3771
+ let included = roots.some((root) => {
3772
+ const options = {
3773
+ contains: true,
3774
+ dot: true,
3775
+ cwd: root,
3776
+ ignore: this.options.exclude
3777
+ };
3778
+ return pm.isMatch(filename, glob, options);
3779
+ });
3780
+ return included &&= existsSync(cleanUrl(filename)), this.globCache.set(filename, included), included;
3781
+ }
3782
+ async getUntestedFilesByRoot(testedFiles, include, root) {
3783
+ let includedFiles = await glob(include, {
3784
+ cwd: root,
3785
+ ignore: [...this.options.exclude, ...testedFiles.map((file) => slash(file))],
3786
+ absolute: true,
3787
+ dot: true,
3788
+ onlyFiles: true
3789
+ });
3790
+ if (includedFiles = includedFiles.filter((file) => this.isIncluded(file, root)), this.ctx.config.changed) includedFiles = (this.ctx.config.related || []).filter((file) => includedFiles.includes(file));
3791
+ return includedFiles.map((file) => slash(path.resolve(root, file)));
3792
+ }
3793
+ async getUntestedFiles(testedFiles) {
3794
+ if (this.options.include == null) return [];
3795
+ const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include), matrix = await Promise.all(this.roots.map(rootMapper));
3796
+ return matrix.flatMap((files) => files);
3991
3797
  }
3992
3798
  createCoverageMap() {
3993
3799
  throw new Error("BaseReporter's createCoverageMap was not overwritten");
@@ -4012,56 +3818,37 @@ Update your dependencies and make sure the versions match.`));
4012
3818
  force: true,
4013
3819
  maxRetries: 10
4014
3820
  });
4015
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
4016
- this.coverageFiles = /* @__PURE__ */ new Map();
4017
- this.pendingPromises = [];
3821
+ await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
4018
3822
  }
4019
- onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
3823
+ onAfterSuiteRun({ coverage, environment, projectName, testFiles }) {
4020
3824
  if (!coverage) return;
4021
- if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") throw new Error(`Invalid transform mode: ${transformMode}`);
4022
3825
  let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
4023
- if (!entry) {
4024
- entry = {
4025
- web: {},
4026
- ssr: {},
4027
- browser: {}
4028
- };
4029
- this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
4030
- }
4031
- const testFilenames = testFiles.join();
4032
- const filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
3826
+ if (!entry) entry = {}, this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
3827
+ const testFilenames = testFiles.join(), filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
4033
3828
  // If there's a result from previous run, overwrite it
4034
- entry[transformMode][testFilenames] = filename;
3829
+ entry[environment] ??= {}, entry[environment][testFilenames] = filename;
4035
3830
  const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
4036
3831
  this.pendingPromises.push(promise);
4037
3832
  }
4038
3833
  async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
4039
3834
  let index = 0;
4040
3835
  const total = this.pendingPromises.length;
4041
- await Promise.all(this.pendingPromises);
4042
- this.pendingPromises = [];
4043
- for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
4044
- const filenames = Object.values(coverageByTestfiles);
4045
- const project = this.ctx.getProjectByName(projectName);
3836
+ await Promise.all(this.pendingPromises), this.pendingPromises = [];
3837
+ for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [environment, coverageByTestfiles] of Object.entries(coveragePerProject)) {
3838
+ const filenames = Object.values(coverageByTestfiles), project = this.ctx.getProjectByName(projectName);
4046
3839
  for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
4047
- if (onDebug.enabled) {
4048
- index += chunk.length;
4049
- onDebug(`Reading coverage results ${index}/${total}`);
4050
- }
3840
+ if (onDebug.enabled) index += chunk.length, onDebug(`Reading coverage results ${index}/${total}`);
4051
3841
  await Promise.all(chunk.map(async (filename) => {
4052
- const contents = await promises$1.readFile(filename, "utf-8");
4053
- const coverage = JSON.parse(contents);
3842
+ const contents = await promises$1.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
4054
3843
  onFileRead(coverage);
4055
3844
  }));
4056
3845
  }
4057
- await onFinished(project, transformMode);
3846
+ await onFinished(project, environment);
4058
3847
  }
4059
3848
  }
4060
3849
  async cleanAfterRun() {
4061
- this.coverageFiles = /* @__PURE__ */ new Map();
4062
- await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
4063
3850
  // Remove empty reports directory, e.g. when only text-reporter is used
4064
- if (readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
3851
+ if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises$1.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
4065
3852
  }
4066
3853
  async onTestFailure() {
4067
3854
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
@@ -4074,11 +3861,9 @@ Update your dependencies and make sure the versions match.`));
4074
3861
  }
4075
3862
  async reportThresholds(coverageMap, allTestsRun) {
4076
3863
  const resolvedThresholds = this.resolveThresholds(coverageMap);
4077
- this.checkThresholds(resolvedThresholds);
4078
- if (this.options.thresholds?.autoUpdate && allTestsRun) {
4079
- if (!this.ctx.server.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
4080
- const configFilePath = this.ctx.server.config.configFile;
4081
- const configModule = await this.parseConfigModule(configFilePath);
3864
+ if (this.checkThresholds(resolvedThresholds), this.options.thresholds?.autoUpdate && allTestsRun) {
3865
+ if (!this.ctx.vite.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
3866
+ const configFilePath = this.ctx.vite.config.configFile, configModule = await this.parseConfigModule(configFilePath);
4082
3867
  await this.updateThresholds({
4083
3868
  thresholds: resolvedThresholds,
4084
3869
  configurationFile: configModule,
@@ -4092,16 +3877,10 @@ Update your dependencies and make sure the versions match.`));
4092
3877
  * for specific files defined by glob pattern or global for all other files.
4093
3878
  */
4094
3879
  resolveThresholds(coverageMap) {
4095
- const resolvedThresholds = [];
4096
- const files = coverageMap.files();
4097
- const globalCoverageMap = this.createCoverageMap();
3880
+ const resolvedThresholds = [], files = coverageMap.files(), globalCoverageMap = this.createCoverageMap();
4098
3881
  for (const key of Object.keys(this.options.thresholds)) {
4099
3882
  if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue;
4100
- const glob = key;
4101
- const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
4102
- const globCoverageMap = this.createCoverageMap();
4103
- const matcher = pm(glob);
4104
- const matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
3883
+ const glob = key, globThresholds = resolveGlobThresholds(this.options.thresholds[glob]), globCoverageMap = this.createCoverageMap(), matcher = pm(glob), matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
4105
3884
  for (const file of matchingFiles) {
4106
3885
  const fileCoverage = coverageMap.fileCoverageFor(file);
4107
3886
  globCoverageMap.addFileCoverage(fileCoverage);
@@ -4117,7 +3896,7 @@ Update your dependencies and make sure the versions match.`));
4117
3896
  const fileCoverage = coverageMap.fileCoverageFor(file);
4118
3897
  globalCoverageMap.addFileCoverage(fileCoverage);
4119
3898
  }
4120
- resolvedThresholds.unshift({
3899
+ return resolvedThresholds.unshift({
4121
3900
  name: GLOBAL_THRESHOLDS_KEY,
4122
3901
  coverageMap: globalCoverageMap,
4123
3902
  thresholds: {
@@ -4126,8 +3905,7 @@ Update your dependencies and make sure the versions match.`));
4126
3905
  lines: this.options.thresholds?.lines,
4127
3906
  statements: this.options.thresholds?.statements
4128
3907
  }
4129
- });
4130
- return resolvedThresholds;
3908
+ }), resolvedThresholds;
4131
3909
  }
4132
3910
  /**
4133
3911
  * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached.
@@ -4165,8 +3943,7 @@ Update your dependencies and make sure the versions match.`));
4165
3943
  this.ctx.logger.error(errorMessage);
4166
3944
  }
4167
3945
  } else {
4168
- const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
4169
- const absoluteThreshold = threshold * -1;
3946
+ const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered, absoluteThreshold = threshold * -1;
4170
3947
  if (uncovered > absoluteThreshold) {
4171
3948
  process.exitCode = 1;
4172
3949
  /**
@@ -4190,8 +3967,7 @@ Update your dependencies and make sure the versions match.`));
4190
3967
  const config = resolveConfig(configurationFile);
4191
3968
  assertConfigurationModule(config);
4192
3969
  for (const { coverageMap, thresholds, name } of allThresholds) {
4193
- const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()];
4194
- const thresholdsToUpdate = [];
3970
+ const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()], thresholdsToUpdate = [];
4195
3971
  for (const key of THRESHOLD_KEYS) {
4196
3972
  const threshold = thresholds[key] ?? 100;
4197
3973
  /**
@@ -4202,8 +3978,7 @@ Update your dependencies and make sure the versions match.`));
4202
3978
  const actual = Math.min(...summaries.map((summary) => summary[key].pct));
4203
3979
  if (actual > threshold) thresholdsToUpdate.push([key, actual]);
4204
3980
  } else {
4205
- const absoluteThreshold = threshold * -1;
4206
- const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
3981
+ const absoluteThreshold = threshold * -1, actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
4207
3982
  if (actual < absoluteThreshold) {
4208
3983
  // If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%)
4209
3984
  const updatedThreshold = actual === 0 ? 100 : actual * -1;
@@ -4213,16 +3988,17 @@ Update your dependencies and make sure the versions match.`));
4213
3988
  }
4214
3989
  if (thresholdsToUpdate.length === 0) continue;
4215
3990
  updatedThresholds = true;
4216
- for (const [threshold, newValue] of thresholdsToUpdate) if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = newValue;
4217
- else {
4218
- const glob = config.test.coverage.thresholds[name];
4219
- glob[threshold] = newValue;
3991
+ const thresholdFormatter = typeof this.options.thresholds?.autoUpdate === "function" ? this.options.thresholds?.autoUpdate : (value) => value;
3992
+ for (const [threshold, newValue] of thresholdsToUpdate) {
3993
+ const formattedValue = thresholdFormatter(newValue);
3994
+ if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = formattedValue;
3995
+ else {
3996
+ const glob = config.test.coverage.thresholds[name];
3997
+ glob[threshold] = formattedValue;
3998
+ }
4220
3999
  }
4221
4000
  }
4222
- if (updatedThresholds) {
4223
- this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds.");
4224
- onUpdate();
4225
- }
4001
+ if (updatedThresholds) this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds."), onUpdate();
4226
4002
  }
4227
4003
  async mergeReports(coverageMaps) {
4228
4004
  const coverageMap = this.createCoverageMap();
@@ -4234,10 +4010,8 @@ Update your dependencies and make sure the versions match.`));
4234
4010
  }
4235
4011
  toSlices(array, size) {
4236
4012
  return array.reduce((chunks, item) => {
4237
- const index = Math.max(0, chunks.length - 1);
4238
- const lastChunk = chunks[index] || [];
4239
- chunks[index] = lastChunk;
4240
- if (lastChunk.length >= size) chunks.push([item]);
4013
+ const index = Math.max(0, chunks.length - 1), lastChunk = chunks[index] || [];
4014
+ if (chunks[index] = lastChunk, lastChunk.length >= size) chunks.push([item]);
4241
4015
  else lastChunk.push(item);
4242
4016
  return chunks;
4243
4017
  }, []);
@@ -4246,23 +4020,25 @@ Update your dependencies and make sure the versions match.`));
4246
4020
  const servers = [...ctx.projects.map((project) => ({
4247
4021
  root: project.config.root,
4248
4022
  isBrowserEnabled: project.isBrowserEnabled(),
4249
- vitenode: project.vitenode
4250
- })), {
4023
+ vite: project.vite
4024
+ })), (
4025
+ // Check core last as it will match all files anyway
4026
+ {
4251
4027
  root: ctx.config.root,
4252
- vitenode: ctx.vitenode,
4028
+ vite: ctx.vite,
4253
4029
  isBrowserEnabled: ctx.getRootProject().isBrowserEnabled()
4254
- }];
4030
+ })];
4255
4031
  return async function transformFile(filename) {
4256
4032
  let lastError;
4257
- for (const { root, vitenode, isBrowserEnabled } of servers) {
4033
+ for (const { root, vite, isBrowserEnabled } of servers) {
4258
4034
  // On Windows root doesn't start with "/" while filenames do
4259
4035
  if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue;
4260
4036
  if (isBrowserEnabled) {
4261
- const result = await vitenode.transformRequest(filename, void 0, "web").catch(() => null);
4037
+ const result = await vite.environments.client.transformRequest(filename).catch(() => null);
4262
4038
  if (result) return result;
4263
4039
  }
4264
4040
  try {
4265
- return await vitenode.transformRequest(filename);
4041
+ return await vite.environments.ssr.transformRequest(filename);
4266
4042
  } catch (error) {
4267
4043
  lastError = error;
4268
4044
  }
@@ -4276,14 +4052,12 @@ Update your dependencies and make sure the versions match.`));
4276
4052
  * Narrow down `unknown` glob thresholds to resolved ones
4277
4053
  */
4278
4054
  function resolveGlobThresholds(thresholds) {
4279
- if (!thresholds || typeof thresholds !== "object") return {};
4280
- if (100 in thresholds && thresholds[100] === true) return {
4055
+ return !thresholds || typeof thresholds !== "object" ? {} : 100 in thresholds && thresholds[100] === true ? {
4281
4056
  lines: 100,
4282
4057
  branches: 100,
4283
4058
  functions: 100,
4284
4059
  statements: 100
4285
- };
4286
- return {
4060
+ } : {
4287
4061
  lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0,
4288
4062
  branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0,
4289
4063
  functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0,
@@ -4309,8 +4083,7 @@ function resolveConfig(configModule) {
4309
4083
  if (config) return config;
4310
4084
  // "export default mergeConfig(..., defineConfig(...))"
4311
4085
  if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
4312
- config = resolveMergeConfig(mod);
4313
- if (config) return config;
4086
+ if (config = resolveMergeConfig(mod), config) return config;
4314
4087
  }
4315
4088
  } catch (error) {
4316
4089
  // Reduce magicast's verbose errors to readable ones
@@ -4339,4 +4112,4 @@ function resolveMergeConfig(mod) {
4339
4112
  }
4340
4113
  }
4341
4114
 
4342
- export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, isBrowserEnabled as d, groupBy as e, getCoverageProvider as f, getFilePoolName as g, hash as h, isPackageExists as i, createPool as j, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
4115
+ export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, createFetchModuleFunction as d, isBrowserEnabled as e, groupBy as f, getFilePoolName as g, hash as h, isPackageExists as i, getCoverageProvider as j, createPool as k, normalizeResolvedIdToUrl as n, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };