vitest 4.0.0-beta.1 → 4.0.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/LICENSE.md +2 -2
  2. package/dist/browser.d.ts +13 -14
  3. package/dist/browser.js +6 -5
  4. package/dist/chunks/base.Cjha6usc.js +129 -0
  5. package/dist/chunks/{benchmark.CYdenmiT.js → benchmark.CJUa-Hsa.js} +6 -8
  6. package/dist/chunks/{benchmark.d.BwvBVTda.d.ts → benchmark.d.DAaHLpsq.d.ts} +4 -4
  7. package/dist/chunks/{browser.d.q8Z0P0q1.d.ts → browser.d.yFAklsD1.d.ts} +5 -5
  8. package/dist/chunks/{cac.D3EzDDZd.js → cac.DCxo_nSu.js} +70 -152
  9. package/dist/chunks/{cli-api.Dn5gKePv.js → cli-api.BJJXh9BV.js} +1330 -1677
  10. package/dist/chunks/{config.d.HJdfX-8k.d.ts → config.d.B_LthbQq.d.ts} +58 -63
  11. package/dist/chunks/{console.CtFJOzRO.js → console.7h5kHUIf.js} +34 -70
  12. package/dist/chunks/{constants.DnKduX2e.js → constants.D_Q9UYh-.js} +1 -9
  13. package/dist/chunks/{coverage.Cwa-XhJt.js → coverage.BCU-r2QL.js} +515 -781
  14. package/dist/chunks/{coverage.DVF1vEu8.js → coverage.D_JHT54q.js} +2 -2
  15. package/dist/chunks/{coverage.d.S9RMNXIe.d.ts → coverage.d.BZtK59WP.d.ts} +10 -8
  16. package/dist/chunks/{creator.GK6I-cL4.js → creator.08Gi-vCA.js} +93 -77
  17. package/dist/chunks/{date.Bq6ZW5rf.js → date.-jtEtIeV.js} +6 -17
  18. package/dist/chunks/{environment.d.CUq4cUgQ.d.ts → environment.d.BsToaxti.d.ts} +27 -6
  19. package/dist/chunks/{git.BVQ8w_Sw.js → git.BFNcloKD.js} +1 -2
  20. package/dist/chunks/{global.d.CVbXEflG.d.ts → global.d.BK3X7FW1.d.ts} +2 -5
  21. package/dist/chunks/{globals.Cxal6MLI.js → globals.DG-S3xFe.js} +8 -8
  22. package/dist/chunks/{index.CZI_8rVt.js → index.BIP7prJq.js} +289 -608
  23. package/dist/chunks/{index.B521nVV-.js → index.Bgo3tNWt.js} +23 -4
  24. package/dist/chunks/{index.TfbsX-3I.js → index.BjKEiSn0.js} +14 -24
  25. package/dist/chunks/{index.BWf_gE5n.js → index.CMfqw92x.js} +7 -6
  26. package/dist/chunks/{index.CmSc2RE5.js → index.DIWhzsUh.js} +72 -118
  27. package/dist/chunks/{inspector.C914Efll.js → inspector.CvQD-Nie.js} +10 -25
  28. package/dist/chunks/moduleRunner.d.D9nBoC4p.d.ts +201 -0
  29. package/dist/chunks/moduleTransport.I-bgQy0S.js +19 -0
  30. package/dist/chunks/{node.fjCdwEIl.js → node.CyipiPvJ.js} +1 -1
  31. package/dist/chunks/{plugin.d.C2EcJUjo.d.ts → plugin.d.BMVSnsGV.d.ts} +1 -1
  32. package/dist/chunks/{reporters.d.DxZg19fy.d.ts → reporters.d.BUWjmRYq.d.ts} +1226 -1291
  33. package/dist/chunks/resolveSnapshotEnvironment.Bkht6Yor.js +81 -0
  34. package/dist/chunks/resolver.Bx6lE0iq.js +119 -0
  35. package/dist/chunks/rpc.BKr6mtxz.js +65 -0
  36. package/dist/chunks/{setup-common.D7ZqXFx-.js → setup-common.uiMcU3cv.js} +17 -29
  37. package/dist/chunks/startModuleRunner.p67gbNo9.js +665 -0
  38. package/dist/chunks/{suite.d.FvehnV49.d.ts → suite.d.BJWk38HB.d.ts} +1 -1
  39. package/dist/chunks/test.BiqSKISg.js +214 -0
  40. package/dist/chunks/{typechecker.CVytUJuF.js → typechecker.DB-fIMaH.js} +144 -213
  41. package/dist/chunks/{utils.CAioKnHs.js → utils.C2YI6McM.js} +5 -14
  42. package/dist/chunks/{utils.XdZDrNZV.js → utils.D2R2NiOH.js} +8 -27
  43. package/dist/chunks/{vi.bdSIJ99Y.js → vi.ZPgvtBao.js} +156 -305
  44. package/dist/chunks/{vm.BThCzidc.js → vm.Ca0Y0W5f.js} +116 -226
  45. package/dist/chunks/{worker.d.DoNjFAiv.d.ts → worker.d.BDsXGkwh.d.ts} +28 -22
  46. package/dist/chunks/{worker.d.CmvJfRGs.d.ts → worker.d.BNcX_2mH.d.ts} +1 -1
  47. package/dist/cli.js +4 -4
  48. package/dist/config.cjs +3 -9
  49. package/dist/config.d.ts +49 -54
  50. package/dist/config.js +1 -1
  51. package/dist/coverage.d.ts +27 -26
  52. package/dist/coverage.js +6 -7
  53. package/dist/environments.d.ts +9 -13
  54. package/dist/environments.js +1 -1
  55. package/dist/index.d.ts +38 -45
  56. package/dist/index.js +7 -9
  57. package/dist/module-evaluator.d.ts +13 -0
  58. package/dist/module-evaluator.js +276 -0
  59. package/dist/module-runner.js +15 -0
  60. package/dist/node.d.ts +40 -41
  61. package/dist/node.js +23 -33
  62. package/dist/reporters.d.ts +12 -13
  63. package/dist/reporters.js +3 -3
  64. package/dist/runners.d.ts +3 -3
  65. package/dist/runners.js +13 -232
  66. package/dist/snapshot.js +2 -2
  67. package/dist/suite.d.ts +2 -2
  68. package/dist/suite.js +2 -2
  69. package/dist/worker.js +90 -47
  70. package/dist/workers/forks.js +34 -10
  71. package/dist/workers/runVmTests.js +36 -56
  72. package/dist/workers/threads.js +34 -10
  73. package/dist/workers/vmForks.js +11 -10
  74. package/dist/workers/vmThreads.js +11 -10
  75. package/dist/workers.d.ts +5 -4
  76. package/dist/workers.js +35 -17
  77. package/globals.d.ts +17 -17
  78. package/package.json +32 -31
  79. package/dist/chunks/base.Bj3pWTr1.js +0 -38
  80. package/dist/chunks/execute.B7h3T_Hc.js +0 -708
  81. package/dist/chunks/index.D-VkfKhf.js +0 -105
  82. package/dist/chunks/rpc.CsFtxqeq.js +0 -83
  83. package/dist/chunks/runBaseTests.BC7ZIH5L.js +0 -129
  84. package/dist/execute.d.ts +0 -148
  85. package/dist/execute.js +0 -13
@@ -1,13 +1,12 @@
1
1
  import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
2
  import path, { win32, dirname, join, resolve } from 'node:path';
3
+ import { isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, createDefer, slash, shuffle, toArray } from '@vitest/utils';
3
4
  import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
4
5
  import pm from 'picomatch';
5
6
  import { glob } from 'tinyglobby';
6
7
  import c from 'tinyrainbow';
7
- import { slash, cleanUrl } from 'vite-node/utils';
8
8
  import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CXFFjsi8.js';
9
9
  import crypto from 'node:crypto';
10
- import { createDefer, shuffle, toArray, slash as slash$1 } from '@vitest/utils';
11
10
  import { builtinModules, createRequire } from 'node:module';
12
11
  import process$1 from 'node:process';
13
12
  import fs$1, { writeFile, rename, stat, unlink } from 'node:fs/promises';
@@ -15,28 +14,26 @@ import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL
15
14
  import assert from 'node:assert';
16
15
  import v8 from 'node:v8';
17
16
  import { format, inspect } from 'node:util';
18
- import { version, mergeConfig } from 'vite';
19
- import { c as configFiles, w as workspacesFiles, e as extraInlineDeps, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.DnKduX2e.js';
17
+ import { fetchModule, version, mergeConfig } from 'vite';
18
+ import { c as configFiles, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.D_Q9UYh-.js';
20
19
  import { a as isWindows } from './env.D4Lgay0q.js';
21
20
  import * as nodeos from 'node:os';
22
- import nodeos__default from 'node:os';
21
+ import nodeos__default, { tmpdir } from 'node:os';
23
22
  import { isatty } from 'node:tty';
24
23
  import EventEmitter from 'node:events';
25
- import { c as createBirpc } from './index.B521nVV-.js';
24
+ import { c as createBirpc } from './index.Bgo3tNWt.js';
26
25
  import Tinypool$1, { Tinypool } from 'tinypool';
27
- import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.CVytUJuF.js';
26
+ import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.DB-fIMaH.js';
28
27
  import { MessageChannel } from 'node:worker_threads';
29
28
  import { hasFailed } from '@vitest/runner/utils';
30
29
  import { rootDir } from '../path.js';
31
30
  import { isCI, provider } from 'std-env';
32
- import { r as resolveCoverageProviderModule } from './coverage.DVF1vEu8.js';
31
+ import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
33
32
 
34
33
  function groupBy(collection, iteratee) {
35
34
  return collection.reduce((acc, item) => {
36
35
  const key = iteratee(item);
37
- acc[key] ||= [];
38
- acc[key].push(item);
39
- return acc;
36
+ return acc[key] ||= [], acc[key].push(item), acc;
40
37
  }, {});
41
38
  }
42
39
  function stdout() {
@@ -2017,7 +2014,7 @@ function normalizeid(id) {
2017
2014
  if (typeof id !== "string") {
2018
2015
  id = id.toString();
2019
2016
  }
2020
- if (/(node|data|http|https|file):/.test(id)) {
2017
+ if (/(?:node|data|http|https|file):/.test(id)) {
2021
2018
  return id;
2022
2019
  }
2023
2020
  if (BUILTIN_MODULES.has(id)) {
@@ -2051,7 +2048,7 @@ function _resolve$1(id, options = {}) {
2051
2048
  throw new TypeError("input must be a `string` or `URL`");
2052
2049
  }
2053
2050
  }
2054
- if (/(node|data|http|https):/.test(id)) {
2051
+ if (/(?:node|data|http|https):/.test(id)) {
2055
2052
  return id;
2056
2053
  }
2057
2054
  if (BUILTIN_MODULES.has(id)) {
@@ -2385,8 +2382,7 @@ const isPackageListed = quansync(function* (name, cwd) {
2385
2382
  isPackageListed.sync;
2386
2383
 
2387
2384
  function getWorkersCountByPercentage(percent) {
2388
- const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length;
2389
- const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2385
+ const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length, workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount);
2390
2386
  return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2391
2387
  }
2392
2388
 
@@ -2396,35 +2392,19 @@ const envsOrder = [
2396
2392
  "happy-dom",
2397
2393
  "edge-runtime"
2398
2394
  ];
2399
- function getTransformMode(patterns, filename) {
2400
- if (patterns.web && pm.isMatch(filename, patterns.web)) return "web";
2401
- if (patterns.ssr && pm.isMatch(filename, patterns.ssr)) return "ssr";
2402
- return void 0;
2403
- }
2404
2395
  async function groupFilesByEnv(files) {
2405
2396
  const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
2406
2397
  const code = await promises$1.readFile(filepath, "utf-8");
2407
2398
  // 1. Check for control comments in the file
2408
2399
  let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
2409
- // 2. Check for globals
2410
- if (!env) {
2411
- for (const [glob, target] of project.config.environmentMatchGlobs || []) if (pm.isMatch(filepath, glob, { cwd: project.config.root })) {
2412
- env = target;
2413
- break;
2414
- }
2415
- }
2416
- // 3. Fallback to global env
2400
+ // 2. Fallback to global env
2417
2401
  env ||= project.config.environment || "node";
2418
- const transformMode = getTransformMode(project.config.testTransformMode, filepath);
2419
2402
  let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
2420
2403
  if (envOptionsJson?.endsWith("*/"))
2421
2404
  // Trim closing Docblock characters the above regex might have captured
2422
2405
  envOptionsJson = envOptionsJson.slice(0, -2);
2423
- const envOptions = JSON.parse(envOptionsJson || "null");
2424
- const envKey = env === "happy-dom" ? "happyDOM" : env;
2425
- const environment = {
2406
+ const envOptions = JSON.parse(envOptionsJson || "null"), envKey = env === "happy-dom" ? "happyDOM" : env, environment = {
2426
2407
  name: env,
2427
- transformMode,
2428
2408
  options: envOptions ? { [envKey]: envOptions } : null
2429
2409
  };
2430
2410
  return {
@@ -2439,45 +2419,198 @@ async function groupFilesByEnv(files) {
2439
2419
  return groupBy(filesWithEnv, ({ environment }) => environment.name);
2440
2420
  }
2441
2421
 
2442
- const created = /* @__PURE__ */ new Set();
2443
- const promises = /* @__PURE__ */ new Map();
2422
+ const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
2423
+ function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join$1(tmpdir(), nanoid())) {
2424
+ const cachedFsResults = /* @__PURE__ */ new Map();
2425
+ return async (url, importer, environment, options) => {
2426
+ // We are copy pasting Vite's externalization logic from `fetchModule` because
2427
+ // we instead rely on our own `shouldExternalize` method because Vite
2428
+ // doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
2429
+ if (url.startsWith("data:")) return {
2430
+ externalize: url,
2431
+ type: "builtin"
2432
+ };
2433
+ if (url === "/@vite/client" || url === "@vite/client")
2434
+ // this will be stubbed
2435
+ return {
2436
+ externalize: "/@vite/client",
2437
+ type: "module"
2438
+ };
2439
+ const isFileUrl = url.startsWith("file://");
2440
+ if (isExternalUrl(url) && !isFileUrl) return {
2441
+ externalize: url,
2442
+ type: "network"
2443
+ };
2444
+ // Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
2445
+ // so we do this first to resolve the module and check its `id`. The next call of
2446
+ // `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
2447
+ // This also makes it so externalized modules are inside the module graph.
2448
+ const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
2449
+ // if url is already cached, we can just confirm it's also cached on the server
2450
+ if (options?.cached && cached) return { cache: true };
2451
+ if (moduleGraphModule.id) {
2452
+ const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
2453
+ if (externalize) return {
2454
+ externalize,
2455
+ type: "module"
2456
+ };
2457
+ }
2458
+ const moduleRunnerModule = await fetchModule(environment, url, importer, {
2459
+ ...options,
2460
+ inlineSourceMap: false
2461
+ }).catch(handleRollupError), result = processResultSource(environment, moduleRunnerModule);
2462
+ if (!cacheFs || !("code" in result)) return result;
2463
+ const code = result.code;
2464
+ // to avoid serialising large chunks of code,
2465
+ // we store them in a tmp file and read in the test thread
2466
+ if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
2467
+ const dir = join$1(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join$1(dir, name);
2468
+ if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
2469
+ return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
2470
+ };
2471
+ }
2472
+ let SOURCEMAPPING_URL = "sourceMa";
2473
+ SOURCEMAPPING_URL += "ppingURL";
2474
+ const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
2475
+ function processResultSource(environment, result) {
2476
+ if (!("code" in result)) return result;
2477
+ const node = environment.moduleGraph.getModuleById(result.id);
2478
+ if (node?.transformResult)
2479
+ // this also overrides node.transformResult.code which is also what the module
2480
+ // runner does under the hood by default (we disable source maps inlining)
2481
+ inlineSourceMap(node.transformResult);
2482
+ return {
2483
+ ...result,
2484
+ code: node?.transformResult?.code || result.code
2485
+ };
2486
+ }
2487
+ const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
2488
+ // we have to inline the source map ourselves, because
2489
+ // - we don't need //# sourceURL since we are running code in VM
2490
+ // - important in stack traces and the V8 coverage
2491
+ // - we need to inject an empty line for --inspect-brk
2492
+ function inlineSourceMap(result) {
2493
+ const map = result.map;
2494
+ let code = result.code;
2495
+ if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
2496
+ if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
2497
+ const sourceMap = { ...map };
2498
+ // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2499
+ // so that debuggers can be set to break on first line
2500
+ if (sourceMap.mappings.startsWith(";")) sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2501
+ return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2502
+ }
2503
+ function genSourceMapUrl(map) {
2504
+ if (typeof map !== "string") map = JSON.stringify(map);
2505
+ return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
2506
+ }
2507
+ function getCachedResult(result, cachedFsResults) {
2508
+ const tmp = cachedFsResults.get(result.id);
2509
+ if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
2510
+ return {
2511
+ cached: true,
2512
+ file: result.file,
2513
+ id: result.id,
2514
+ tmp,
2515
+ url: result.url,
2516
+ invalidate: result.invalidate
2517
+ };
2518
+ }
2519
+ // serialize rollup error on server to preserve details as a test error
2520
+ function handleRollupError(e) {
2521
+ throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
2522
+ name: e.name,
2523
+ message: e.message,
2524
+ stack: e.stack,
2525
+ cause: e.cause,
2526
+ __vitest_rollup_error__: {
2527
+ plugin: e.plugin,
2528
+ id: e.id,
2529
+ loc: e.loc,
2530
+ frame: e.frame
2531
+ }
2532
+ } : e;
2533
+ }
2534
+ /**
2535
+ * Performs an atomic write operation using the write-then-rename pattern.
2536
+ *
2537
+ * Why we need this:
2538
+ * - Ensures file integrity by never leaving partially written files on disk
2539
+ * - Prevents other processes from reading incomplete data during writes
2540
+ * - Particularly important for test files where incomplete writes could cause test failures
2541
+ *
2542
+ * The implementation writes to a temporary file first, then renames it to the target path.
2543
+ * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2544
+ * guaranteeing that other processes will only ever see the complete file.
2545
+ *
2546
+ * Added in https://github.com/vitest-dev/vitest/pull/7531
2547
+ */
2548
+ async function atomicWriteFile(realFilePath, data) {
2549
+ const dir = dirname$1(realFilePath), tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2550
+ try {
2551
+ await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
2552
+ } finally {
2553
+ try {
2554
+ if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2555
+ } catch {}
2556
+ }
2557
+ }
2558
+
2559
+ // this is copy pasted from vite
2560
+ function normalizeResolvedIdToUrl(environment, resolvedId) {
2561
+ const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
2562
+ let url;
2563
+ // normalize all imports into resolved URLs
2564
+ // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
2565
+ if (resolvedId.startsWith(withTrailingSlash$1(root)))
2566
+ // in root: infer short absolute path from root
2567
+ url = resolvedId.slice(root.length);
2568
+ else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
2569
+ // an optimized deps may not yet exists in the filesystem, or
2570
+ // a regular file exists but is out of root: rewrite to absolute /@fs/ paths
2571
+ url = path.posix.join("/@fs/", resolvedId);
2572
+ else url = resolvedId;
2573
+ // if the resolved id is not a valid browser import specifier,
2574
+ // prefix it to make it valid. We will strip this before feeding it
2575
+ // back into the transform pipeline
2576
+ if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
2577
+ return url;
2578
+ }
2579
+
2444
2580
  function createMethodsRPC(project, options = {}) {
2445
- const ctx = project.vitest;
2446
- const cacheFs = options.cacheFs ?? false;
2581
+ const ctx = project.vitest, cacheFs = options.cacheFs ?? false, fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir);
2447
2582
  return {
2583
+ async fetch(url, importer, environmentName, options) {
2584
+ const environment = project.vite.environments[environmentName];
2585
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2586
+ const start = performance.now();
2587
+ try {
2588
+ return await fetch(url, importer, environment, options);
2589
+ } finally {
2590
+ project.vitest.state.transformTime += performance.now() - start;
2591
+ }
2592
+ },
2593
+ async resolve(id, importer, environmentName) {
2594
+ const environment = project.vite.environments[environmentName];
2595
+ if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2596
+ const resolved = await environment.pluginContainer.resolveId(id, importer);
2597
+ return resolved ? {
2598
+ file: cleanUrl(resolved.id),
2599
+ url: normalizeResolvedIdToUrl(environment, resolved.id),
2600
+ id: resolved.id
2601
+ } : null;
2602
+ },
2448
2603
  snapshotSaved(snapshot) {
2449
2604
  ctx.snapshot.add(snapshot);
2450
2605
  },
2451
2606
  resolveSnapshotPath(testPath) {
2452
2607
  return ctx.snapshot.resolvePath(testPath, { config: project.serializedConfig });
2453
2608
  },
2454
- async fetch(id, transformMode) {
2455
- const result = await project.vitenode.fetchResult(id, transformMode).catch(handleRollupError);
2456
- const code = result.code;
2457
- if (!cacheFs || result.externalize) return result;
2458
- if ("id" in result && typeof result.id === "string") return { id: result.id };
2459
- if (code == null) throw new Error(`Failed to fetch module ${id}`);
2460
- const dir = join$1(project.tmpDir, transformMode);
2461
- const name = hash("sha1", id, "hex");
2462
- const tmp = join$1(dir, name);
2463
- if (!created.has(dir)) {
2464
- mkdirSync(dir, { recursive: true });
2465
- created.add(dir);
2466
- }
2467
- if (promises.has(tmp)) {
2468
- await promises.get(tmp);
2469
- return { id: tmp };
2470
- }
2471
- promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp)));
2472
- await promises.get(tmp);
2473
- Object.assign(result, { id: tmp });
2474
- return { id: tmp };
2475
- },
2476
- resolveId(id, importer, transformMode) {
2477
- return project.vitenode.resolveId(id, importer, transformMode).catch(handleRollupError);
2478
- },
2479
- transform(id, environment) {
2480
- return project.vitenode.transformModule(id, environment).catch(handleRollupError);
2609
+ async transform(id) {
2610
+ const environment = project.vite.environments.__vitest_vm__;
2611
+ if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2612
+ const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id)), result = await environment.transformRequest(url).catch(handleRollupError);
2613
+ return { code: result?.code };
2481
2614
  },
2482
2615
  async onQueued(file) {
2483
2616
  if (options.collect) ctx.state.collectFiles(project, [file]);
@@ -2512,63 +2645,12 @@ function createMethodsRPC(project, options = {}) {
2512
2645
  }
2513
2646
  };
2514
2647
  }
2515
- // serialize rollup error on server to preserve details as a test error
2516
- function handleRollupError(e) {
2517
- if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e))
2518
- // eslint-disable-next-line no-throw-literal
2519
- throw {
2520
- name: e.name,
2521
- message: e.message,
2522
- stack: e.stack,
2523
- cause: e.cause,
2524
- __vitest_rollup_error__: {
2525
- plugin: e.plugin,
2526
- id: e.id,
2527
- loc: e.loc,
2528
- frame: e.frame
2529
- }
2530
- };
2531
- throw e;
2532
- }
2533
- /**
2534
- * Performs an atomic write operation using the write-then-rename pattern.
2535
- *
2536
- * Why we need this:
2537
- * - Ensures file integrity by never leaving partially written files on disk
2538
- * - Prevents other processes from reading incomplete data during writes
2539
- * - Particularly important for test files where incomplete writes could cause test failures
2540
- *
2541
- * The implementation writes to a temporary file first, then renames it to the target path.
2542
- * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2543
- * guaranteeing that other processes will only ever see the complete file.
2544
- *
2545
- * Added in https://github.com/vitest-dev/vitest/pull/7531
2546
- */
2547
- async function atomicWriteFile(realFilePath, data) {
2548
- const dir = dirname$1(realFilePath);
2549
- const tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2550
- try {
2551
- await writeFile(tmpFilePath, data, "utf-8");
2552
- await rename(tmpFilePath, realFilePath);
2553
- } finally {
2554
- try {
2555
- if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2556
- } catch {}
2557
- }
2558
- }
2559
2648
 
2560
2649
  function createChildProcessChannel$1(project, collect = false) {
2561
- const emitter = new EventEmitter();
2562
- const events = {
2650
+ const emitter = new EventEmitter(), events = {
2563
2651
  message: "message",
2564
2652
  response: "response"
2565
- };
2566
- const channel = {
2567
- onMessage: (callback) => emitter.on(events.message, callback),
2568
- postMessage: (message) => emitter.emit(events.response, message),
2569
- onClose: () => emitter.removeAllListeners()
2570
- };
2571
- const rpc = createBirpc(createMethodsRPC(project, {
2653
+ }, rpc = createBirpc(createMethodsRPC(project, {
2572
2654
  cacheFs: true,
2573
2655
  collect
2574
2656
  }), {
@@ -2591,21 +2673,20 @@ function createChildProcessChannel$1(project, collect = false) {
2591
2673
  on(fn) {
2592
2674
  emitter.on(events.response, fn);
2593
2675
  },
2594
- onTimeoutError(functionName) {
2595
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2596
- }
2676
+ timeout: -1
2597
2677
  });
2598
2678
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2679
+ const channel = {
2680
+ onMessage: (callback) => emitter.on(events.message, callback),
2681
+ postMessage: (message) => emitter.emit(events.response, message),
2682
+ onClose: () => {
2683
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2684
+ }
2685
+ };
2599
2686
  return channel;
2600
2687
  }
2601
- function createForksPool(vitest, { execArgv, env }) {
2602
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2603
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2604
- const poolOptions = vitest.config.poolOptions?.forks ?? {};
2605
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
2606
- const minThreads = poolOptions.minForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2607
- const worker = resolve(vitest.distPath, "workers/forks.js");
2608
- const options = {
2688
+ function createForksPool(vitest, { execArgv, env }, specifications) {
2689
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/forks.js"), options = {
2609
2690
  runtime: "child_process",
2610
2691
  filename: resolve(vitest.distPath, "worker.js"),
2611
2692
  teardown: "teardown",
@@ -2615,22 +2696,15 @@ function createForksPool(vitest, { execArgv, env }) {
2615
2696
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2616
2697
  terminateTimeout: vitest.config.teardownTimeout,
2617
2698
  concurrentTasksPerWorker: 1
2618
- };
2619
- const isolated = poolOptions.isolate ?? true;
2699
+ }, isolated = poolOptions.isolate ?? true;
2620
2700
  if (isolated) options.isolateWorkers = true;
2621
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
2622
- options.maxThreads = 1;
2623
- options.minThreads = 1;
2624
- }
2625
- const pool = new Tinypool(options);
2626
- const runWithFiles = (name) => {
2701
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2702
+ const pool = new Tinypool(options), runWithFiles = (name) => {
2627
2703
  let id = 0;
2628
2704
  async function runFiles(project, config, files, environment, invalidates = []) {
2629
2705
  const paths = files.map((f) => f.filepath);
2630
2706
  vitest.state.clearFiles(project, paths);
2631
- const channel = createChildProcessChannel$1(project, name === "collect");
2632
- const workerId = ++id;
2633
- const data = {
2707
+ const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2634
2708
  pool: "forks",
2635
2709
  worker,
2636
2710
  config,
@@ -2656,40 +2730,25 @@ function createForksPool(vitest, { execArgv, env }) {
2656
2730
  return async (specs, invalidates) => {
2657
2731
  // Cancel pending tasks from pool when possible
2658
2732
  vitest.onCancel(() => pool.cancelPendingTasks());
2659
- const configs = /* @__PURE__ */ new WeakMap();
2660
- const getConfig = (project) => {
2733
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2661
2734
  if (configs.has(project)) return configs.get(project);
2662
- const _config = project.getSerializableConfig();
2663
- const config = wrapSerializableConfig(_config);
2664
- configs.set(project, config);
2665
- return config;
2666
- };
2667
- const singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork);
2668
- const multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2735
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
2736
+ return configs.set(project, config), config;
2737
+ }, singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork), multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2669
2738
  if (multipleForks.length) {
2670
- const filesByEnv = await groupFilesByEnv(multipleForks);
2671
- const files = Object.values(filesByEnv).flat();
2672
- const results = [];
2739
+ const filesByEnv = await groupFilesByEnv(multipleForks), files = Object.values(filesByEnv).flat(), results = [];
2673
2740
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2674
2741
  else {
2675
2742
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2676
2743
  // Tasks are still running parallel but environments are isolated between tasks.
2677
2744
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2678
- for (const group of Object.values(grouped)) {
2679
- // Push all files to pool's queue
2680
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2681
- // Once all tasks are running or finished, recycle worker for isolation.
2682
- // On-going workers will run in the previous environment.
2683
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2684
- await pool.recycleWorkers();
2685
- }
2745
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2686
2746
  }
2687
2747
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2688
2748
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2689
2749
  }
2690
2750
  if (singleFork.length) {
2691
- const filesByEnv = await groupFilesByEnv(singleFork);
2692
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2751
+ const filesByEnv = await groupFilesByEnv(singleFork), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2693
2752
  for (const env of envs) {
2694
2753
  const files = filesByEnv[env];
2695
2754
  if (!files?.length) continue;
@@ -2713,10 +2772,7 @@ function createForksPool(vitest, { execArgv, env }) {
2713
2772
  }
2714
2773
 
2715
2774
  function createWorkerChannel$1(project, collect) {
2716
- const channel = new MessageChannel();
2717
- const port = channel.port2;
2718
- const workerPort = channel.port1;
2719
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
2775
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
2720
2776
  eventNames: ["onCancel"],
2721
2777
  post(v) {
2722
2778
  port.postMessage(v);
@@ -2724,24 +2780,20 @@ function createWorkerChannel$1(project, collect) {
2724
2780
  on(fn) {
2725
2781
  port.on("message", fn);
2726
2782
  },
2727
- onTimeoutError(functionName) {
2728
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
2729
- }
2783
+ timeout: -1
2730
2784
  });
2731
2785
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
2786
+ const onClose = () => {
2787
+ port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2788
+ };
2732
2789
  return {
2733
2790
  workerPort,
2734
- port
2791
+ port,
2792
+ onClose
2735
2793
  };
2736
2794
  }
2737
- function createThreadsPool(vitest, { execArgv, env }) {
2738
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
2739
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
2740
- const poolOptions = vitest.config.poolOptions?.threads ?? {};
2741
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
2742
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
2743
- const worker = resolve(vitest.distPath, "workers/threads.js");
2744
- const options = {
2795
+ function createThreadsPool(vitest, { execArgv, env }, specifications) {
2796
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/threads.js"), options = {
2745
2797
  filename: resolve(vitest.distPath, "worker.js"),
2746
2798
  teardown: "teardown",
2747
2799
  useAtomics: poolOptions.useAtomics ?? false,
@@ -2751,26 +2803,15 @@ function createThreadsPool(vitest, { execArgv, env }) {
2751
2803
  execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2752
2804
  terminateTimeout: vitest.config.teardownTimeout,
2753
2805
  concurrentTasksPerWorker: 1
2754
- };
2755
- const isolated = poolOptions.isolate ?? true;
2806
+ }, isolated = poolOptions.isolate ?? true;
2756
2807
  if (isolated) options.isolateWorkers = true;
2757
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
2758
- options.maxThreads = 1;
2759
- options.minThreads = 1;
2760
- }
2761
- const pool = new Tinypool$1(options);
2762
- const runWithFiles = (name) => {
2808
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2809
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
2763
2810
  let id = 0;
2764
2811
  async function runFiles(project, config, files, environment, invalidates = []) {
2765
2812
  const paths = files.map((f) => f.filepath);
2766
2813
  vitest.state.clearFiles(project, paths);
2767
- const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
2768
- const onClose = () => {
2769
- port.close();
2770
- workerPort.close();
2771
- };
2772
- const workerId = ++id;
2773
- const data = {
2814
+ const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
2774
2815
  pool: "threads",
2775
2816
  worker,
2776
2817
  port: workerPort,
@@ -2798,39 +2839,25 @@ function createThreadsPool(vitest, { execArgv, env }) {
2798
2839
  return async (specs, invalidates) => {
2799
2840
  // Cancel pending tasks from pool when possible
2800
2841
  vitest.onCancel(() => pool.cancelPendingTasks());
2801
- const configs = /* @__PURE__ */ new WeakMap();
2802
- const getConfig = (project) => {
2842
+ const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2803
2843
  if (configs.has(project)) return configs.get(project);
2804
2844
  const config = project.serializedConfig;
2805
- configs.set(project, config);
2806
- return config;
2807
- };
2808
- const singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread);
2809
- const multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2845
+ return configs.set(project, config), config;
2846
+ }, singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread), multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
2810
2847
  if (multipleThreads.length) {
2811
- const filesByEnv = await groupFilesByEnv(multipleThreads);
2812
- const files = Object.values(filesByEnv).flat();
2813
- const results = [];
2848
+ const filesByEnv = await groupFilesByEnv(multipleThreads), files = Object.values(filesByEnv).flat(), results = [];
2814
2849
  if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2815
2850
  else {
2816
2851
  // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2817
2852
  // Tasks are still running parallel but environments are isolated between tasks.
2818
2853
  const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2819
- for (const group of Object.values(grouped)) {
2820
- // Push all files to pool's queue
2821
- results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2822
- // Once all tasks are running or finished, recycle worker for isolation.
2823
- // On-going workers will run in the previous environment.
2824
- await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve));
2825
- await pool.recycleWorkers();
2826
- }
2854
+ for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2827
2855
  }
2828
2856
  const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2829
2857
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2830
2858
  }
2831
2859
  if (singleThreads.length) {
2832
- const filesByEnv = await groupFilesByEnv(singleThreads);
2833
- const envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2860
+ const filesByEnv = await groupFilesByEnv(singleThreads), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2834
2861
  for (const env of envs) {
2835
2862
  const files = filesByEnv[env];
2836
2863
  if (!files?.length) continue;
@@ -2854,102 +2881,69 @@ function createThreadsPool(vitest, { execArgv, env }) {
2854
2881
  }
2855
2882
 
2856
2883
  function createTypecheckPool(vitest) {
2857
- const promisesMap = /* @__PURE__ */ new WeakMap();
2858
- const rerunTriggered = /* @__PURE__ */ new WeakSet();
2884
+ const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
2859
2885
  async function onParseEnd(project, { files, sourceErrors }) {
2860
- const checker = project.typechecker;
2861
- const { packs, events } = checker.getTestPacksAndEvents();
2862
- await vitest._testRun.updated(packs, events);
2863
- if (!project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2886
+ const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
2887
+ if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2864
2888
  const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
2865
2889
  if (processError) {
2866
2890
  const error = new Error(checker.getOutput());
2867
- error.stack = "";
2868
- vitest.state.catchError(error, "Typecheck Error");
2891
+ error.stack = "", vitest.state.catchError(error, "Typecheck Error");
2869
2892
  }
2870
- promisesMap.get(project)?.resolve();
2871
- rerunTriggered.delete(project);
2872
2893
  // triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
2873
- if (vitest.config.watch && !vitest.runningPromise) {
2874
- await vitest.report("onFinished", files, []);
2875
- await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2894
+ if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
2895
+ const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
2896
+ await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
2876
2897
  }
2877
2898
  }
2878
2899
  async function createWorkspaceTypechecker(project, files) {
2879
2900
  const checker = project.typechecker ?? new Typechecker(project);
2880
- if (project.typechecker) return checker;
2881
- project.typechecker = checker;
2882
- checker.setFiles(files);
2883
- checker.onParseStart(async () => {
2901
+ return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
2884
2902
  const files = checker.getTestFiles();
2885
2903
  for (const file of files) await vitest._testRun.enqueued(project, file);
2886
2904
  await vitest._testRun.collected(project, files);
2887
- });
2888
- checker.onParseEnd((result) => onParseEnd(project, result));
2889
- checker.onWatcherRerun(async () => {
2890
- rerunTriggered.add(project);
2891
- if (!vitest.runningPromise) {
2892
- vitest.state.clearErrors();
2893
- await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2894
- }
2905
+ }), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
2906
+ if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
2895
2907
  await checker.collectTests();
2896
2908
  const testFiles = checker.getTestFiles();
2897
2909
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2898
2910
  await vitest._testRun.collected(project, testFiles);
2899
2911
  const { packs, events } = checker.getTestPacksAndEvents();
2900
2912
  await vitest._testRun.updated(packs, events);
2901
- });
2902
- return checker;
2913
+ }), checker);
2903
2914
  }
2904
2915
  async function startTypechecker(project, files) {
2905
2916
  if (project.typechecker) return;
2906
2917
  const checker = await createWorkspaceTypechecker(project, files);
2907
- await checker.collectTests();
2908
- await checker.start();
2918
+ await checker.collectTests(), await checker.start();
2909
2919
  }
2910
2920
  async function collectTests(specs) {
2911
2921
  const specsByProject = groupBy(specs, (spec) => spec.project.name);
2912
2922
  for (const name in specsByProject) {
2913
- const project = specsByProject[name][0].project;
2914
- const files = specsByProject[name].map((spec) => spec.moduleId);
2915
- const checker = await createWorkspaceTypechecker(project, files);
2916
- checker.setFiles(files);
2917
- await checker.collectTests();
2923
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), checker = await createWorkspaceTypechecker(project, files);
2924
+ checker.setFiles(files), await checker.collectTests();
2918
2925
  const testFiles = checker.getTestFiles();
2919
2926
  vitest.state.collectFiles(project, testFiles);
2920
2927
  }
2921
2928
  }
2922
2929
  async function runTests(specs) {
2923
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
2924
- const promises = [];
2930
+ const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
2925
2931
  for (const name in specsByProject) {
2926
- const project = specsByProject[name][0].project;
2927
- const files = specsByProject[name].map((spec) => spec.moduleId);
2928
- const promise = createDefer();
2929
- // check that watcher actually triggered rerun
2930
- const _p = new Promise((resolve) => {
2932
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), _p = new Promise((resolve) => {
2931
2933
  const _i = setInterval(() => {
2932
- if (!project.typechecker || rerunTriggered.has(project)) {
2933
- resolve(true);
2934
- clearInterval(_i);
2935
- }
2934
+ if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
2936
2935
  });
2937
2936
  setTimeout(() => {
2938
- resolve(false);
2939
- clearInterval(_i);
2937
+ resolve(false), clearInterval(_i);
2940
2938
  }, 500).unref();
2941
- });
2942
- const triggered = await _p;
2939
+ }), triggered = await _p;
2943
2940
  if (project.typechecker && !triggered) {
2944
2941
  const testFiles = project.typechecker.getTestFiles();
2945
2942
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
2946
- await vitest._testRun.collected(project, testFiles);
2947
- await onParseEnd(project, project.typechecker.getResult());
2943
+ await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
2948
2944
  continue;
2949
2945
  }
2950
- promises.push(promise);
2951
- promisesMap.set(project, promise);
2952
- promises.push(startTypechecker(project, files));
2946
+ promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files));
2953
2947
  }
2954
2948
  await Promise.all(promises);
2955
2949
  }
@@ -2993,8 +2987,7 @@ function stringToBytes(input, percentageReference) {
2993
2987
  let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
2994
2988
  if (trailingChars && numericString) {
2995
2989
  const numericValue = Number.parseFloat(numericString);
2996
- trailingChars = trailingChars.toLowerCase();
2997
- switch (trailingChars) {
2990
+ switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
2998
2991
  case "%":
2999
2992
  input = numericValue / 100;
3000
2993
  break;
@@ -3010,26 +3003,20 @@ function stringToBytes(input, percentageReference) {
3010
3003
  }
3011
3004
  }
3012
3005
  } else input = Number.parseFloat(input);
3013
- if (typeof input === "number") if (input <= 1 && input > 0) if (percentageReference) return Math.floor(input * percentageReference);
3014
- else throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3015
- else if (input > 1) return Math.floor(input);
3006
+ if (typeof input === "number") if (input <= 1 && input > 0) {
3007
+ if (percentageReference) return Math.floor(input * percentageReference);
3008
+ throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3009
+ } else if (input > 1) return Math.floor(input);
3016
3010
  else throw new Error("Unexpected numerical input for \"memoryLimit\"");
3017
3011
  return null;
3018
3012
  }
3019
3013
 
3020
3014
  const suppressWarningsPath$1 = resolve(rootDir, "./suppress-warnings.cjs");
3021
3015
  function createChildProcessChannel(project, collect) {
3022
- const emitter = new EventEmitter();
3023
- const cleanup = () => emitter.removeAllListeners();
3024
- const events = {
3016
+ const emitter = new EventEmitter(), events = {
3025
3017
  message: "message",
3026
3018
  response: "response"
3027
- };
3028
- const channel = {
3029
- onMessage: (callback) => emitter.on(events.message, callback),
3030
- postMessage: (message) => emitter.emit(events.response, message)
3031
- };
3032
- const rpc = createBirpc(createMethodsRPC(project, {
3019
+ }, rpc = createBirpc(createMethodsRPC(project, {
3033
3020
  cacheFs: true,
3034
3021
  collect
3035
3022
  }), {
@@ -3052,24 +3039,20 @@ function createChildProcessChannel(project, collect) {
3052
3039
  on(fn) {
3053
3040
  emitter.on(events.response, fn);
3054
3041
  },
3055
- onTimeoutError(functionName) {
3056
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3057
- }
3042
+ timeout: -1
3058
3043
  });
3059
3044
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3060
- return {
3061
- channel,
3062
- cleanup
3045
+ const channel = {
3046
+ onMessage: (callback) => emitter.on(events.message, callback),
3047
+ postMessage: (message) => emitter.emit(events.response, message),
3048
+ onClose: () => {
3049
+ emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3050
+ }
3063
3051
  };
3052
+ return { channel };
3064
3053
  }
3065
- function createVmForksPool(vitest, { execArgv, env }) {
3066
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3067
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3068
- const poolOptions = vitest.config.poolOptions?.vmForks ?? {};
3069
- const maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? threadsCount;
3070
- const minThreads = poolOptions.maxForks ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3071
- const worker = resolve(vitest.distPath, "workers/vmForks.js");
3072
- const options = {
3054
+ function createVmForksPool(vitest, { execArgv, env }, specifications) {
3055
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmForks.js"), options = {
3073
3056
  runtime: "child_process",
3074
3057
  filename: resolve(vitest.distPath, "worker.js"),
3075
3058
  maxThreads,
@@ -3087,19 +3070,13 @@ function createVmForksPool(vitest, { execArgv, env }) {
3087
3070
  concurrentTasksPerWorker: 1,
3088
3071
  maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
3089
3072
  };
3090
- if (poolOptions.singleFork || !vitest.config.fileParallelism) {
3091
- options.maxThreads = 1;
3092
- options.minThreads = 1;
3093
- }
3094
- const pool = new Tinypool$1(options);
3095
- const runWithFiles = (name) => {
3073
+ if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3074
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3096
3075
  let id = 0;
3097
3076
  async function runFiles(project, config, files, environment, invalidates = []) {
3098
3077
  const paths = files.map((f) => f.filepath);
3099
3078
  vitest.state.clearFiles(project, paths);
3100
- const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
3101
- const workerId = ++id;
3102
- const data = {
3079
+ const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3103
3080
  pool: "forks",
3104
3081
  worker,
3105
3082
  config,
@@ -3121,24 +3098,17 @@ function createVmForksPool(vitest, { execArgv, env }) {
3121
3098
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3122
3099
  else throw error;
3123
3100
  } finally {
3124
- cleanup();
3101
+ channel.onClose();
3125
3102
  }
3126
3103
  }
3127
3104
  return async (specs, invalidates) => {
3128
3105
  // Cancel pending tasks from pool when possible
3129
3106
  vitest.onCancel(() => pool.cancelPendingTasks());
3130
- const configs = /* @__PURE__ */ new Map();
3131
- const getConfig = (project) => {
3107
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3132
3108
  if (configs.has(project)) return configs.get(project);
3133
- const _config = project.serializedConfig;
3134
- const config = wrapSerializableConfig(_config);
3135
- configs.set(project, config);
3136
- return config;
3137
- };
3138
- const filesByEnv = await groupFilesByEnv(specs);
3139
- const promises = Object.values(filesByEnv).flat();
3140
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3141
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3109
+ const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3110
+ return configs.set(project, config), config;
3111
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3142
3112
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3143
3113
  };
3144
3114
  };
@@ -3150,21 +3120,14 @@ function createVmForksPool(vitest, { execArgv, env }) {
3150
3120
  };
3151
3121
  }
3152
3122
  function getMemoryLimit$1(config) {
3153
- const memory = nodeos.totalmem();
3154
- const limit = getWorkerMemoryLimit(config, "vmForks");
3155
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3156
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3157
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3123
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmForks");
3158
3124
  // just ignore "memoryLimit" value because we cannot detect memory limit
3159
- return null;
3125
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3160
3126
  }
3161
3127
 
3162
3128
  const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
3163
3129
  function createWorkerChannel(project, collect) {
3164
- const channel = new MessageChannel();
3165
- const port = channel.port2;
3166
- const workerPort = channel.port1;
3167
- const rpc = createBirpc(createMethodsRPC(project, { collect }), {
3130
+ const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3168
3131
  eventNames: ["onCancel"],
3169
3132
  post(v) {
3170
3133
  port.postMessage(v);
@@ -3172,24 +3135,19 @@ function createWorkerChannel(project, collect) {
3172
3135
  on(fn) {
3173
3136
  port.on("message", fn);
3174
3137
  },
3175
- onTimeoutError(functionName) {
3176
- throw new Error(`[vitest-pool]: Timeout calling "${functionName}"`);
3177
- }
3138
+ timeout: -1
3178
3139
  });
3179
3140
  project.vitest.onCancel((reason) => rpc.onCancel(reason));
3141
+ function onClose() {
3142
+ workerPort.close(), port.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3143
+ }
3180
3144
  return {
3181
3145
  workerPort,
3182
- port
3146
+ onClose
3183
3147
  };
3184
3148
  }
3185
- function createVmThreadsPool(vitest, { execArgv, env }) {
3186
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3187
- const threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3188
- const poolOptions = vitest.config.poolOptions?.vmThreads ?? {};
3189
- const maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? threadsCount;
3190
- const minThreads = poolOptions.minThreads ?? vitest.config.minWorkers ?? Math.min(threadsCount, maxThreads);
3191
- const worker = resolve(vitest.distPath, "workers/vmThreads.js");
3192
- const options = {
3149
+ function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3150
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmThreads.js"), options = {
3193
3151
  filename: resolve(vitest.distPath, "worker.js"),
3194
3152
  useAtomics: poolOptions.useAtomics ?? false,
3195
3153
  maxThreads,
@@ -3207,19 +3165,13 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3207
3165
  concurrentTasksPerWorker: 1,
3208
3166
  maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
3209
3167
  };
3210
- if (poolOptions.singleThread || !vitest.config.fileParallelism) {
3211
- options.maxThreads = 1;
3212
- options.minThreads = 1;
3213
- }
3214
- const pool = new Tinypool$1(options);
3215
- const runWithFiles = (name) => {
3168
+ if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3169
+ const pool = new Tinypool$1(options), runWithFiles = (name) => {
3216
3170
  let id = 0;
3217
3171
  async function runFiles(project, config, files, environment, invalidates = []) {
3218
3172
  const paths = files.map((f) => f.filepath);
3219
3173
  vitest.state.clearFiles(project, paths);
3220
- const { workerPort, port } = createWorkerChannel(project, name === "collect");
3221
- const workerId = ++id;
3222
- const data = {
3174
+ const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3223
3175
  pool: "vmThreads",
3224
3176
  worker,
3225
3177
  port: workerPort,
@@ -3242,24 +3194,17 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3242
3194
  else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3243
3195
  else throw error;
3244
3196
  } finally {
3245
- port.close();
3246
- workerPort.close();
3197
+ onClose();
3247
3198
  }
3248
3199
  }
3249
3200
  return async (specs, invalidates) => {
3250
3201
  // Cancel pending tasks from pool when possible
3251
3202
  vitest.onCancel(() => pool.cancelPendingTasks());
3252
- const configs = /* @__PURE__ */ new Map();
3253
- const getConfig = (project) => {
3203
+ const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3254
3204
  if (configs.has(project)) return configs.get(project);
3255
3205
  const config = project.serializedConfig;
3256
- configs.set(project, config);
3257
- return config;
3258
- };
3259
- const filesByEnv = await groupFilesByEnv(specs);
3260
- const promises = Object.values(filesByEnv).flat();
3261
- const results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)));
3262
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3206
+ return configs.set(project, config), config;
3207
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3263
3208
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3264
3209
  };
3265
3210
  };
@@ -3271,13 +3216,9 @@ function createVmThreadsPool(vitest, { execArgv, env }) {
3271
3216
  };
3272
3217
  }
3273
3218
  function getMemoryLimit(config) {
3274
- const memory = nodeos.totalmem();
3275
- const limit = getWorkerMemoryLimit(config, "vmThreads");
3276
- if (typeof memory === "number") return stringToBytes(limit, config.watch ? memory / 2 : memory);
3277
- // If totalmem is not supported we cannot resolve percentage based values like 0.5, "50%"
3278
- if (typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%") return stringToBytes(limit);
3219
+ const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmThreads");
3279
3220
  // just ignore "memoryLimit" value because we cannot detect memory limit
3280
- return null;
3221
+ return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3281
3222
  }
3282
3223
 
3283
3224
  const builtinPools = [
@@ -3289,14 +3230,9 @@ const builtinPools = [
3289
3230
  "typescript"
3290
3231
  ];
3291
3232
  function getDefaultPoolName(project) {
3292
- if (project.config.browser.enabled) return "browser";
3293
- return project.config.pool;
3233
+ return project.config.browser.enabled ? "browser" : project.config.pool;
3294
3234
  }
3295
- function getFilePoolName(project, file) {
3296
- for (const [glob, pool] of project.config.poolMatchGlobs) {
3297
- if (pool === "browser") throw new Error("Since Vitest 0.31.0 \"browser\" pool is not supported in `poolMatchGlobs`. You can create a project to run some of your tests in browser in parallel. Read more: https://vitest.dev/guide/projects");
3298
- if (pm.isMatch(file, glob, { cwd: project.config.root })) return pool;
3299
- }
3235
+ function getFilePoolName(project) {
3300
3236
  return getDefaultPoolName(project);
3301
3237
  }
3302
3238
  function createPool(ctx) {
@@ -3307,26 +3243,15 @@ function createPool(ctx) {
3307
3243
  vmThreads: null,
3308
3244
  vmForks: null,
3309
3245
  typescript: null
3310
- };
3311
- // in addition to resolve.conditions Vite also adds production/development,
3312
- // see: https://github.com/vitejs/vite/blob/af2aa09575229462635b7cbb6d248ca853057ba2/packages/vite/src/node/plugins/resolve.ts#L1056-L1080
3313
- const viteMajor = Number(version.split(".")[0]);
3314
- const potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3246
+ }, viteMajor = Number(version.split(".")[0]), potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3315
3247
  "production",
3316
3248
  "development",
3317
3249
  ...ctx.vite.config.resolve.conditions
3318
- ]);
3319
- const conditions = [...potentialConditions].filter((condition) => {
3320
- if (condition === "production") return ctx.vite.config.isProduction;
3321
- if (condition === "development") return !ctx.vite.config.isProduction;
3322
- return true;
3250
+ ]), conditions = [...potentialConditions].filter((condition) => {
3251
+ return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3323
3252
  }).map((condition) => {
3324
- if (viteMajor >= 6 && condition === "development|production") return ctx.vite.config.isProduction ? "production" : "development";
3325
- return condition;
3326
- }).flatMap((c) => ["--conditions", c]);
3327
- // Instead of passing whole process.execArgv to the workers, pick allowed options.
3328
- // Some options may crash worker, e.g. --prof, --title. nodejs/node#41103
3329
- const execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3253
+ return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3254
+ }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3330
3255
  async function executeTests(method, files, invalidate) {
3331
3256
  const options = {
3332
3257
  execArgv: [...execArgv, ...conditions],
@@ -3342,25 +3267,22 @@ function createPool(ctx) {
3342
3267
  };
3343
3268
  // env are case-insensitive on Windows, but spawned processes don't support it
3344
3269
  if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
3345
- const poolConcurrentPromises = /* @__PURE__ */ new Map();
3346
- const customPools = /* @__PURE__ */ new Map();
3270
+ const poolConcurrentPromises = /* @__PURE__ */ new Map(), customPools = /* @__PURE__ */ new Map();
3347
3271
  async function resolveCustomPool(filepath) {
3348
3272
  if (customPools.has(filepath)) return customPools.get(filepath);
3349
- const pool = await ctx.runner.executeId(filepath);
3273
+ const pool = await ctx.runner.import(filepath);
3350
3274
  if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
3351
3275
  const poolInstance = await pool.default(ctx, options);
3352
3276
  if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
3353
3277
  if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
3354
- customPools.set(filepath, poolInstance);
3355
- return poolInstance;
3278
+ return customPools.set(filepath, poolInstance), poolInstance;
3356
3279
  }
3357
3280
  function getConcurrentPool(pool, fn) {
3358
3281
  if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
3359
3282
  const promise = fn().finally(() => {
3360
3283
  poolConcurrentPromises.delete(pool);
3361
3284
  });
3362
- poolConcurrentPromises.set(pool, promise);
3363
- return promise;
3285
+ return poolConcurrentPromises.set(pool, promise), promise;
3364
3286
  }
3365
3287
  function getCustomPool(pool) {
3366
3288
  return getConcurrentPool(pool, () => resolveCustomPool(pool));
@@ -3371,23 +3293,18 @@ function createPool(ctx) {
3371
3293
  return createBrowserPool(ctx);
3372
3294
  });
3373
3295
  }
3374
- const groupedSpecifications = {};
3375
- const groups = /* @__PURE__ */ new Set();
3376
- const factories = {
3377
- vmThreads: () => createVmThreadsPool(ctx, options),
3378
- vmForks: () => createVmForksPool(ctx, options),
3379
- threads: () => createThreadsPool(ctx, options),
3380
- forks: () => createForksPool(ctx, options),
3296
+ const groupedSpecifications = {}, groups = /* @__PURE__ */ new Set(), factories = {
3297
+ vmThreads: (specs) => createVmThreadsPool(ctx, options, specs),
3298
+ vmForks: (specs) => createVmForksPool(ctx, options, specs),
3299
+ threads: (specs) => createThreadsPool(ctx, options, specs),
3300
+ forks: (specs) => createForksPool(ctx, options, specs),
3381
3301
  typescript: () => createTypecheckPool(ctx)
3382
3302
  };
3383
3303
  for (const spec of files) {
3384
- const group = spec[0].config.sequence.groupOrder ?? 0;
3385
- groups.add(group);
3386
- groupedSpecifications[group] ??= [];
3387
- groupedSpecifications[group].push(spec);
3304
+ const group = spec.project.config.sequence.groupOrder ?? 0;
3305
+ groups.add(group), groupedSpecifications[group] ??= [], groupedSpecifications[group].push(spec);
3388
3306
  }
3389
- const Sequencer = ctx.config.sequence.sequencer;
3390
- const sequencer = new Sequencer(ctx);
3307
+ const Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
3391
3308
  async function sortSpecs(specs) {
3392
3309
  if (ctx.config.shard) {
3393
3310
  if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
@@ -3407,26 +3324,19 @@ function createPool(ctx) {
3407
3324
  typescript: []
3408
3325
  };
3409
3326
  specifications.forEach((specification) => {
3410
- const pool = specification[2].pool;
3411
- filesByPool[pool] ??= [];
3412
- filesByPool[pool].push(specification);
3413
- });
3414
- await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3327
+ const pool = specification.pool;
3328
+ filesByPool[pool] ??= [], filesByPool[pool].push(specification);
3329
+ }), await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3415
3330
  const [pool, files] = entry;
3416
3331
  if (!files.length) return null;
3417
3332
  const specs = await sortSpecs(files);
3418
3333
  if (pool in factories) {
3419
3334
  const factory = factories[pool];
3420
- pools[pool] ??= factory();
3421
- return pools[pool][method](specs, invalidate);
3422
- }
3423
- if (pool === "browser") {
3424
- pools.browser ??= await getBrowserPool();
3425
- return pools.browser[method](specs, invalidate);
3335
+ return pools[pool] ??= factory(specs), pools[pool][method](specs, invalidate);
3426
3336
  }
3337
+ if (pool === "browser") return pools.browser ??= await getBrowserPool(), pools.browser[method](specs, invalidate);
3427
3338
  const poolHandler = await getCustomPool(pool);
3428
- pools[poolHandler.name] ??= poolHandler;
3429
- return poolHandler[method](specs, invalidate);
3339
+ return pools[poolHandler.name] ??= poolHandler, poolHandler[method](specs, invalidate);
3430
3340
  }));
3431
3341
  }
3432
3342
  }
@@ -3447,14 +3357,9 @@ class BaseSequencer {
3447
3357
  }
3448
3358
  // async so it can be extended by other sequelizers
3449
3359
  async shard(files) {
3450
- const { config } = this.ctx;
3451
- const { index, count } = config.shard;
3452
- const shardSize = Math.ceil(files.length / count);
3453
- const shardStart = shardSize * (index - 1);
3454
- const shardEnd = shardSize * index;
3360
+ const { config } = this.ctx, { index, count } = config.shard, [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count);
3455
3361
  return [...files].map((spec) => {
3456
- const fullPath = resolve$1(slash(config.root), slash(spec.moduleId));
3457
- const specPath = fullPath?.slice(config.root.length);
3362
+ const fullPath = resolve$1(slash(config.root), slash(spec.moduleId)), specPath = fullPath?.slice(config.root.length);
3458
3363
  return {
3459
3364
  spec,
3460
3365
  hash: hash("sha1", specPath, "hex")
@@ -3465,25 +3370,26 @@ class BaseSequencer {
3465
3370
  async sort(files) {
3466
3371
  const cache = this.ctx.cache;
3467
3372
  return [...files].sort((a, b) => {
3468
- const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
3469
- const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
3470
- const aState = cache.getFileTestResults(keyA);
3471
- const bState = cache.getFileTestResults(keyB);
3373
+ const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`, keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`, aState = cache.getFileTestResults(keyA), bState = cache.getFileTestResults(keyB);
3472
3374
  if (!aState || !bState) {
3473
- const statsA = cache.getFileStats(keyA);
3474
- const statsB = cache.getFileStats(keyB);
3475
- // run unknown first
3476
- if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
3375
+ const statsA = cache.getFileStats(keyA), statsB = cache.getFileStats(keyB);
3477
3376
  // run larger files first
3478
- return statsB.size - statsA.size;
3377
+ return !statsA || !statsB ? !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0 : statsB.size - statsA.size;
3479
3378
  }
3480
- // run failed first
3481
- if (aState.failed && !bState.failed) return -1;
3482
- if (!aState.failed && bState.failed) return 1;
3483
3379
  // run longer first
3484
- return bState.duration - aState.duration;
3380
+ return aState.failed && !bState.failed ? -1 : !aState.failed && bState.failed ? 1 : bState.duration - aState.duration;
3485
3381
  });
3486
3382
  }
3383
+ // Calculate distributed shard range [start, end] distributed equally
3384
+ calculateShardRange(filesCount, index, count) {
3385
+ const baseShardSize = Math.floor(filesCount / count), remainderTestFilesCount = filesCount % count;
3386
+ if (remainderTestFilesCount >= index) {
3387
+ const shardSize = baseShardSize + 1, shardStart = shardSize * (index - 1), shardEnd = shardSize * index;
3388
+ return [shardStart, shardEnd];
3389
+ }
3390
+ const shardStart = remainderTestFilesCount * (baseShardSize + 1) + (index - remainderTestFilesCount - 1) * baseShardSize, shardEnd = shardStart + baseShardSize;
3391
+ return [shardStart, shardEnd];
3392
+ }
3487
3393
  }
3488
3394
 
3489
3395
  class RandomSequencer extends BaseSequencer {
@@ -3501,11 +3407,10 @@ function parseInspector(inspect) {
3501
3407
  if (typeof inspect === "number") return { port: inspect };
3502
3408
  if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`);
3503
3409
  const [host, port] = inspect.split(":");
3504
- if (!port) return { host };
3505
- return {
3410
+ return port ? {
3506
3411
  host,
3507
3412
  port: Number(port) || defaultInspectPort
3508
- };
3413
+ } : { host };
3509
3414
  }
3510
3415
  function resolveApiServerConfig(options, defaultPort) {
3511
3416
  let api;
@@ -3523,12 +3428,10 @@ function resolveApiServerConfig(options, defaultPort) {
3523
3428
  return api;
3524
3429
  }
3525
3430
  function resolveInlineWorkerOption(value) {
3526
- if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value);
3527
- else return Number(value);
3431
+ return typeof value === "string" && value.trim().endsWith("%") ? getWorkersCountByPercentage(value) : Number(value);
3528
3432
  }
3529
3433
  function resolveConfig$1(vitest, options, viteConfig) {
3530
- const mode = vitest.mode;
3531
- const logger = vitest.logger;
3434
+ const mode = vitest.mode, logger = vitest.logger;
3532
3435
  if (options.dom) {
3533
3436
  if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`));
3534
3437
  options.environment = "happy-dom";
@@ -3539,24 +3442,17 @@ function resolveConfig$1(vitest, options, viteConfig) {
3539
3442
  root: viteConfig.root,
3540
3443
  mode
3541
3444
  };
3542
- resolved.project = toArray(resolved.project);
3543
- resolved.provide ??= {};
3544
- resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "";
3545
- resolved.color = typeof options.name !== "string" ? options.name?.color : void 0;
3445
+ if (resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
3546
3446
  const inspector = resolved.inspect || resolved.inspectBrk;
3547
- resolved.inspector = {
3447
+ if (resolved.inspector = {
3548
3448
  ...resolved.inspector,
3549
3449
  ...parseInspector(inspector),
3550
3450
  enabled: !!inspector,
3551
3451
  waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk
3552
- };
3553
- if (viteConfig.base !== "/") resolved.base = viteConfig.base;
3554
- resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true;
3555
- if (options.shard) {
3452
+ }, viteConfig.base !== "/") resolved.base = viteConfig.base;
3453
+ if (resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true, options.shard) {
3556
3454
  if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch");
3557
- const [indexString, countString] = options.shard.split("/");
3558
- const index = Math.abs(Number.parseInt(indexString, 10));
3559
- const count = Math.abs(Number.parseInt(countString, 10));
3455
+ const [indexString, countString] = options.shard.split("/"), index = Math.abs(Number.parseInt(indexString, 10)), count = Math.abs(Number.parseInt(countString, 10));
3560
3456
  if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number");
3561
3457
  if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>");
3562
3458
  resolved.shard = {
@@ -3567,21 +3463,12 @@ function resolveConfig$1(vitest, options, viteConfig) {
3567
3463
  if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`);
3568
3464
  if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`);
3569
3465
  if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers);
3570
- if (resolved.minWorkers) resolved.minWorkers = resolveInlineWorkerOption(resolved.minWorkers);
3571
- // run benchmark sequentially by default
3572
- resolved.fileParallelism ??= mode !== "benchmark";
3573
- if (!resolved.fileParallelism) {
3574
- // ignore user config, parallelism cannot be implemented without limiting workers
3575
- resolved.maxWorkers = 1;
3576
- resolved.minWorkers = 1;
3577
- }
3578
- if (resolved.maxConcurrency === 0) {
3579
- logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`));
3580
- resolved.maxConcurrency = configDefaults.maxConcurrency;
3581
- }
3466
+ if (resolved.fileParallelism ??= mode !== "benchmark", !resolved.fileParallelism)
3467
+ // ignore user config, parallelism cannot be implemented without limiting workers
3468
+ resolved.maxWorkers = 1;
3469
+ if (resolved.maxConcurrency === 0) logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)), resolved.maxConcurrency = configDefaults.maxConcurrency;
3582
3470
  if (resolved.inspect || resolved.inspectBrk) {
3583
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread;
3584
- const isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3471
+ const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread, isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3585
3472
  if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
3586
3473
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3587
3474
  throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
@@ -3602,113 +3489,78 @@ function resolveConfig$1(vitest, options, viteConfig) {
3602
3489
  const playwrightChromiumOnly = isPlaywrightChromiumOnly(vitest, resolved);
3603
3490
  // Browser-mode "Playwright + Chromium" only features:
3604
3491
  if (browser.enabled && !playwrightChromiumOnly) {
3605
- const browserConfig = { browser: {
3606
- provider: browser.provider,
3607
- name: browser.name,
3608
- instances: browser.instances?.map((i) => ({ browser: i.browser }))
3609
- } };
3610
- if (resolved.coverage.enabled && resolved.coverage.provider === "v8") throw new Error(`@vitest/coverage-v8 does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3611
- provider: "playwright",
3612
- instances: [{ browser: "chromium" }]
3613
- } }, null, 2)}\n\n...or change your coverage provider to:\n${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}\n`);
3492
+ const browserConfig = `
3493
+ {
3494
+ browser: {
3495
+ provider: ${browser.provider?.name || "preview"}(),
3496
+ instances: [
3497
+ ${(browser.instances || []).map((i) => `{ browser: '${i.browser}' }`).join(",\n ")}
3498
+ ],
3499
+ },
3500
+ }
3501
+ `.trim(), correctExample = `
3502
+ {
3503
+ browser: {
3504
+ provider: playwright(),
3505
+ instances: [
3506
+ { browser: 'chromium' }
3507
+ ],
3508
+ },
3509
+ }
3510
+ `.trim();
3511
+ if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
3512
+ const coverageExample = `
3513
+ {
3514
+ coverage: {
3515
+ provider: 'istanbul',
3516
+ },
3517
+ }
3518
+ `.trim();
3519
+ throw new Error(`@vitest/coverage-v8 does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or change your coverage provider to:\n${coverageExample}\n`);
3520
+ }
3614
3521
  if (resolved.inspect || resolved.inspectBrk) {
3615
3522
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3616
- throw new Error(`${inspectOption} does not work with\n${JSON.stringify(browserConfig, null, 2)}\n\nUse either:\n${JSON.stringify({ browser: {
3617
- provider: "playwright",
3618
- instances: [{ browser: "chromium" }]
3619
- } }, null, 2)}\n\n...or disable ${inspectOption}\n`);
3523
+ throw new Error(`${inspectOption} does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or disable ${inspectOption}\n`);
3620
3524
  }
3621
3525
  }
3622
- resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter);
3623
- if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3526
+ if (resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter), resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3624
3527
  const reportsDirectory = resolve$1(resolved.root, resolved.coverage.reportsDirectory);
3625
3528
  if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
3626
3529
  }
3627
3530
  if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
3628
- resolved.expect ??= {};
3629
- resolved.deps ??= {};
3630
- resolved.deps.moduleDirectories ??= [];
3631
- resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3531
+ resolved.expect ??= {}, resolved.deps ??= {}, resolved.deps.moduleDirectories ??= [];
3532
+ const envModuleDirectories = process.env.VITEST_MODULE_DIRECTORIES || process.env.npm_config_VITEST_MODULE_DIRECTORIES;
3533
+ if (envModuleDirectories) resolved.deps.moduleDirectories.push(...envModuleDirectories.split(","));
3534
+ if (resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3632
3535
  if (!dir.startsWith("/")) dir = `/${dir}`;
3633
3536
  if (!dir.endsWith("/")) dir += "/";
3634
3537
  return normalize(dir);
3635
- });
3636
- if (!resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3637
- resolved.deps.optimizer ??= {};
3638
- resolved.deps.optimizer.ssr ??= {};
3639
- resolved.deps.optimizer.ssr.enabled ??= true;
3640
- resolved.deps.optimizer.web ??= {};
3641
- resolved.deps.optimizer.web.enabled ??= true;
3642
- resolved.deps.web ??= {};
3643
- resolved.deps.web.transformAssets ??= true;
3644
- resolved.deps.web.transformCss ??= true;
3645
- resolved.deps.web.transformGlobPattern ??= [];
3646
- resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root));
3647
- resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root));
3648
- // Add hard-coded default coverage exclusions. These cannot be overidden by user config.
3649
- // Override original exclude array for cases where user re-uses same object in test.exclude.
3650
- resolved.coverage.exclude = [
3538
+ }), !resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
3539
+ if (resolved.deps.optimizer ??= {}, resolved.deps.optimizer.ssr ??= {}, resolved.deps.optimizer.ssr.enabled ??= false, resolved.deps.optimizer.client ??= {}, resolved.deps.optimizer.client.enabled ??= false, resolved.deps.web ??= {}, resolved.deps.web.transformAssets ??= true, resolved.deps.web.transformCss ??= true, resolved.deps.web.transformGlobPattern ??= [], resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root)), resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root)), resolved.coverage.exclude = [
3651
3540
  ...resolved.coverage.exclude,
3652
3541
  ...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`),
3653
3542
  ...resolved.include,
3654
- resolved.config && slash$1(resolved.config),
3543
+ resolved.config && slash(resolved.config),
3655
3544
  ...configFiles,
3656
- ...workspacesFiles,
3657
3545
  "**/virtual:*",
3658
3546
  "**/__x00__*",
3659
3547
  "**/node_modules/**"
3660
- ].filter((pattern) => pattern != null);
3661
- resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles];
3662
- resolved.server ??= {};
3663
- resolved.server.deps ??= {};
3664
- const deprecatedDepsOptions = [
3665
- "inline",
3666
- "external",
3667
- "fallbackCJS"
3668
- ];
3669
- deprecatedDepsOptions.forEach((option) => {
3670
- if (resolved.deps[option] === void 0) return;
3671
- if (option === "fallbackCJS") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. Use "server.deps.${option}" instead`));
3672
- else {
3673
- const transformMode = resolved.environment === "happy-dom" || resolved.environment === "jsdom" ? "web" : "ssr";
3674
- logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} "deps.${option}" is deprecated. If you rely on vite-node directly, use "server.deps.${option}" instead. Otherwise, consider using "deps.optimizer.${transformMode}.${option === "external" ? "exclude" : "include"}"`));
3675
- }
3676
- if (resolved.server.deps[option] === void 0) resolved.server.deps[option] = resolved.deps[option];
3677
- });
3678
- if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3679
- // vitenode will try to import such file with native node,
3680
- // but then our mocker will not work properly
3681
- if (resolved.server.deps.inline !== true) {
3682
- const ssrOptions = viteConfig.ssr;
3683
- if (ssrOptions?.noExternal === true && resolved.server.deps.inline == null) resolved.server.deps.inline = true;
3684
- else {
3685
- resolved.server.deps.inline ??= [];
3686
- resolved.server.deps.inline.push(...extraInlineDeps);
3687
- }
3688
- }
3689
- resolved.server.deps.inlineFiles ??= [];
3690
- resolved.server.deps.inlineFiles.push(...resolved.setupFiles);
3691
- resolved.server.deps.moduleDirectories ??= [];
3692
- resolved.server.deps.moduleDirectories.push(...resolved.deps.moduleDirectories);
3548
+ ].filter((pattern) => pattern != null), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3693
3549
  if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
3694
- resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments");
3695
- if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3696
- resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0;
3697
- if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) resolved.snapshotFormat.plugins = [];
3550
+ if (resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3551
+ if (resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0, resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
3552
+ // TODO: support it via separate config (like DiffOptions) or via `Function.toString()`
3553
+ if (resolved.snapshotFormat.plugins = [], typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`);
3554
+ }
3698
3555
  const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT;
3699
- resolved.snapshotOptions = {
3556
+ if (resolved.snapshotOptions = {
3700
3557
  expand: resolved.expandSnapshotDiff ?? false,
3701
3558
  snapshotFormat: resolved.snapshotFormat || {},
3702
3559
  updateSnapshot: isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new",
3703
3560
  resolveSnapshotPath: options.resolveSnapshotPath,
3704
3561
  snapshotEnvironment: null
3705
- };
3706
- resolved.snapshotSerializers ??= [];
3707
- resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root));
3708
- resolved.forceRerunTriggers.push(...resolved.snapshotSerializers);
3709
- if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3710
- resolved.pool ??= "threads";
3711
- if (process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3562
+ }, resolved.snapshotSerializers ??= [], resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)), resolved.forceRerunTriggers.push(...resolved.snapshotSerializers), options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3563
+ if (resolved.pool ??= "threads", process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3712
3564
  ...resolved.poolOptions,
3713
3565
  threads: {
3714
3566
  ...resolved.poolOptions?.threads,
@@ -3719,17 +3571,6 @@ function resolveConfig$1(vitest, options, viteConfig) {
3719
3571
  maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3720
3572
  }
3721
3573
  };
3722
- if (process.env.VITEST_MIN_THREADS) resolved.poolOptions = {
3723
- ...resolved.poolOptions,
3724
- threads: {
3725
- ...resolved.poolOptions?.threads,
3726
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3727
- },
3728
- vmThreads: {
3729
- ...resolved.poolOptions?.vmThreads,
3730
- minThreads: Number.parseInt(process.env.VITEST_MIN_THREADS)
3731
- }
3732
- };
3733
3574
  if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
3734
3575
  ...resolved.poolOptions,
3735
3576
  forks: {
@@ -3741,51 +3582,16 @@ function resolveConfig$1(vitest, options, viteConfig) {
3741
3582
  maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3742
3583
  }
3743
3584
  };
3744
- if (process.env.VITEST_MIN_FORKS) resolved.poolOptions = {
3745
- ...resolved.poolOptions,
3746
- forks: {
3747
- ...resolved.poolOptions?.forks,
3748
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3749
- },
3750
- vmForks: {
3751
- ...resolved.poolOptions?.vmForks,
3752
- minForks: Number.parseInt(process.env.VITEST_MIN_FORKS)
3753
- }
3754
- };
3755
- const poolThreadsOptions = [
3756
- ["threads", "minThreads"],
3757
- ["threads", "maxThreads"],
3758
- ["vmThreads", "minThreads"],
3759
- ["vmThreads", "maxThreads"]
3760
- ];
3585
+ const poolThreadsOptions = [["threads", "maxThreads"], ["vmThreads", "maxThreads"]];
3761
3586
  for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3762
- const poolForksOptions = [
3763
- ["forks", "minForks"],
3764
- ["forks", "maxForks"],
3765
- ["vmForks", "minForks"],
3766
- ["vmForks", "maxForks"]
3767
- ];
3587
+ const poolForksOptions = [["forks", "maxForks"], ["vmForks", "maxForks"]];
3768
3588
  for (const [poolOptionKey, workerOptionKey] of poolForksOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3769
- if (typeof resolved.workspace === "string")
3770
- // if passed down from the CLI and it's relative, resolve relative to CWD
3771
- resolved.workspace = typeof options.workspace === "string" && options.workspace[0] === "." ? resolve$1(process.cwd(), options.workspace) : resolvePath(resolved.workspace, resolved.root);
3772
3589
  if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
3773
- if (resolved.poolMatchGlobs) logger.deprecate("`poolMatchGlobs` is deprecated. Use `test.projects` to define different configurations instead.");
3774
- resolved.poolMatchGlobs = (resolved.poolMatchGlobs || []).map(([glob, pool]) => {
3775
- if (!builtinPools.includes(pool)) pool = resolvePath(pool, resolved.root);
3776
- return [glob, pool];
3777
- });
3778
3590
  if (mode === "benchmark") {
3779
3591
  resolved.benchmark = {
3780
3592
  ...benchmarkConfigDefaults,
3781
3593
  ...resolved.benchmark
3782
- };
3783
- // override test config
3784
- resolved.coverage.enabled = false;
3785
- resolved.typecheck.enabled = false;
3786
- resolved.include = resolved.benchmark.include;
3787
- resolved.exclude = resolved.benchmark.exclude;
3788
- resolved.includeSource = resolved.benchmark.includeSource;
3594
+ }, resolved.coverage.enabled = false, resolved.typecheck.enabled = false, resolved.include = resolved.benchmark.include, resolved.exclude = resolved.benchmark.exclude, resolved.includeSource = resolved.benchmark.includeSource;
3789
3595
  const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean);
3790
3596
  if (reporters.length) resolved.benchmark.reporters = reporters;
3791
3597
  else resolved.benchmark.reporters = ["default"];
@@ -3794,17 +3600,13 @@ function resolveConfig$1(vitest, options, viteConfig) {
3794
3600
  if (options.compare) resolved.benchmark.compare = options.compare;
3795
3601
  if (options.outputJson) resolved.benchmark.outputJson = options.outputJson;
3796
3602
  }
3797
- if (typeof resolved.diff === "string") {
3798
- resolved.diff = resolvePath(resolved.diff, resolved.root);
3799
- resolved.forceRerunTriggers.push(resolved.diff);
3800
- }
3603
+ if (typeof resolved.diff === "string") resolved.diff = resolvePath(resolved.diff, resolved.root), resolved.forceRerunTriggers.push(resolved.diff);
3801
3604
  // the server has been created, we don't need to override vite.server options
3802
3605
  const api = resolveApiServerConfig(options, defaultPort);
3803
- resolved.api = {
3606
+ if (resolved.api = {
3804
3607
  ...api,
3805
3608
  token: crypto.randomUUID()
3806
- };
3807
- if (options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3609
+ }, options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3808
3610
  /*
3809
3611
  * Reporters can be defined in many different ways:
3810
3612
  * { reporter: 'json' }
@@ -3833,86 +3635,55 @@ function resolveConfig$1(vitest, options, viteConfig) {
3833
3635
  if (mode !== "benchmark") {
3834
3636
  // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory
3835
3637
  // it is passed down as "vitest --reporter ../reporter.js"
3836
- const reportersFromCLI = resolved.reporter;
3837
- const cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3838
- // ./reporter.js || ../reporter.js, but not .reporters/reporter.js
3839
- if (/^\.\.?\//.test(reporter)) return resolve$1(process.cwd(), reporter);
3840
- return reporter;
3638
+ const reportersFromCLI = resolved.reporter, cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3639
+ return /^\.\.?\//.test(reporter) ? resolve$1(process.cwd(), reporter) : reporter;
3841
3640
  });
3842
3641
  if (cliReporters.length) resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
3843
3642
  }
3844
3643
  if (!resolved.reporters.length) {
3845
- resolved.reporters.push(["default", {}]);
3846
3644
  // also enable github-actions reporter as a default
3847
- if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3645
+ if (resolved.reporters.push(["default", {}]), process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]);
3848
3646
  }
3849
3647
  if (resolved.changed) resolved.passWithNoTests ??= true;
3850
- resolved.css ??= {};
3851
- if (typeof resolved.css === "object") {
3852
- resolved.css.modules ??= {};
3853
- resolved.css.modules.classNameStrategy ??= "stable";
3854
- }
3648
+ if (resolved.css ??= {}, typeof resolved.css === "object") resolved.css.modules ??= {}, resolved.css.modules.classNameStrategy ??= "stable";
3855
3649
  if (resolved.cache !== false) {
3856
3650
  if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`);
3857
3651
  resolved.cache = { dir: viteConfig.cacheDir };
3858
3652
  }
3859
- resolved.sequence ??= {};
3860
- if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3653
+ if (resolved.sequence ??= {}, resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") {
3861
3654
  const { files, tests } = resolved.sequence.shuffle;
3862
- resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer;
3863
- resolved.sequence.shuffle = tests;
3655
+ resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer, resolved.sequence.shuffle = tests;
3864
3656
  }
3865
3657
  if (!resolved.sequence?.sequencer)
3866
3658
  // CLI flag has higher priority
3867
3659
  resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer;
3868
- resolved.sequence.groupOrder ??= 0;
3869
- resolved.sequence.hooks ??= "stack";
3870
- if (resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3871
- resolved.typecheck = {
3660
+ if (resolved.sequence.groupOrder ??= 0, resolved.sequence.hooks ??= "stack", resolved.sequence.sequencer === RandomSequencer) resolved.sequence.seed ??= Date.now();
3661
+ if (resolved.typecheck = {
3872
3662
  ...configDefaults.typecheck,
3873
3663
  ...resolved.typecheck
3874
- };
3875
- if (resolved.environmentMatchGlobs) logger.deprecate("\"environmentMatchGlobs\" is deprecated. Use `test.projects` to define different configurations instead.");
3876
- resolved.environmentMatchGlobs = (resolved.environmentMatchGlobs || []).map((i) => [resolve$1(resolved.root, i[0]), i[1]]);
3877
- resolved.typecheck ??= {};
3878
- resolved.typecheck.enabled ??= false;
3879
- if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3880
- resolved.browser.enabled ??= false;
3881
- resolved.browser.headless ??= isCI;
3882
- resolved.browser.isolate ??= true;
3883
- resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark";
3884
- // disable in headless mode by default, and if CI is detected
3885
- resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI;
3886
- if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3887
- const isPreview = resolved.browser.provider === "preview";
3888
- if (isPreview && resolved.browser.screenshotFailures === true) {
3889
- console.warn(c.yellow([
3890
- `Browser provider "preview" doesn't support screenshots, `,
3891
- `so "browser.screenshotFailures" option is forcefully disabled. `,
3892
- `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3893
- ].join("")));
3894
- resolved.browser.screenshotFailures = false;
3895
- } else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3896
- resolved.browser.viewport ??= {};
3897
- resolved.browser.viewport.width ??= 414;
3898
- resolved.browser.viewport.height ??= 896;
3899
- resolved.browser.locators ??= {};
3900
- resolved.browser.locators.testIdAttribute ??= "data-testid";
3901
- if (resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = "preview";
3902
- resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort };
3664
+ }, resolved.typecheck ??= {}, resolved.typecheck.enabled ??= false, resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3665
+ if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
3666
+ if (resolved.browser.viewport ??= {}, resolved.browser.viewport.width ??= 414, resolved.browser.viewport.height ??= 896, resolved.browser.locators ??= {}, resolved.browser.locators.testIdAttribute ??= "data-testid", resolved.browser.enabled && provider === "stackblitz") resolved.browser.provider = void 0;
3667
+ if (typeof resolved.browser.provider === "string") {
3668
+ const source = `@vitest/browser/providers/${resolved.browser.provider}`;
3669
+ throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/guide/browser/config#provider`);
3670
+ }
3671
+ const isPreview = resolved.browser.provider?.name === "preview";
3672
+ if (isPreview && resolved.browser.screenshotFailures === true) console.warn(c.yellow([
3673
+ `Browser provider "preview" doesn't support screenshots, `,
3674
+ `so "browser.screenshotFailures" option is forcefully disabled. `,
3675
+ `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.`
3676
+ ].join(""))), resolved.browser.screenshotFailures = false;
3677
+ else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui;
3903
3678
  // enable includeTaskLocation by default in UI mode
3904
- if (resolved.browser.enabled) {
3679
+ if (resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort }, resolved.browser.enabled) {
3905
3680
  if (resolved.browser.ui) resolved.includeTaskLocation ??= true;
3906
3681
  } else if (resolved.ui) resolved.includeTaskLocation ??= true;
3907
3682
  const htmlReporter = toArray(resolved.reporters).some((reporter) => {
3908
- if (Array.isArray(reporter)) return reporter[0] === "html";
3909
- return false;
3683
+ return Array.isArray(reporter) ? reporter[0] === "html" : false;
3910
3684
  });
3911
3685
  if (htmlReporter) resolved.includeTaskLocation ??= true;
3912
- resolved.testTransformMode ??= {};
3913
- resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3;
3914
- resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4;
3915
- return resolved;
3686
+ return resolved.server ??= {}, resolved.server.deps ??= {}, resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3916
3687
  }
3917
3688
  function isBrowserEnabled(config) {
3918
3689
  return Boolean(config.browser?.enabled);
@@ -3931,7 +3702,7 @@ function resolveCoverageReporters(configReporters) {
3931
3702
  }
3932
3703
  function isPlaywrightChromiumOnly(vitest, config) {
3933
3704
  const browser = config.browser;
3934
- if (!browser || browser.provider !== "playwright" || !browser.enabled) return false;
3705
+ if (!browser || !browser.provider || browser.provider.name !== "playwright" || !browser.enabled) return false;
3935
3706
  if (browser.name) return browser.name === "chromium";
3936
3707
  if (!browser.instances) return false;
3937
3708
  for (const instance of browser.instances) {
@@ -3948,14 +3719,11 @@ const THRESHOLD_KEYS = [
3948
3719
  "functions",
3949
3720
  "statements",
3950
3721
  "branches"
3951
- ];
3952
- const GLOBAL_THRESHOLDS_KEY = "global";
3953
- const DEFAULT_PROJECT = Symbol.for("default-project");
3722
+ ], GLOBAL_THRESHOLDS_KEY = "global", DEFAULT_PROJECT = Symbol.for("default-project");
3954
3723
  let uniqueId = 0;
3955
3724
  async function getCoverageProvider(options, loader) {
3956
3725
  const coverageModule = await resolveCoverageProviderModule(options, loader);
3957
- if (coverageModule) return coverageModule.getProvider();
3958
- return null;
3726
+ return coverageModule ? coverageModule.getProvider() : null;
3959
3727
  }
3960
3728
  class BaseCoverageProvider {
3961
3729
  ctx;
@@ -3966,12 +3734,12 @@ class BaseCoverageProvider {
3966
3734
  coverageFiles = /* @__PURE__ */ new Map();
3967
3735
  pendingPromises = [];
3968
3736
  coverageFilesDirectory;
3737
+ roots = [];
3969
3738
  _initialize(ctx) {
3970
- this.ctx = ctx;
3971
- if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3739
+ if (this.ctx = ctx, ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}.
3972
3740
  Running mixed versions is not supported and may lead into bugs
3973
3741
  Update your dependencies and make sure the versions match.`));
3974
- const config = ctx.config.coverage;
3742
+ const config = ctx._coverageOptions;
3975
3743
  this.options = {
3976
3744
  ...coverageConfigDefaults,
3977
3745
  ...config,
@@ -3986,47 +3754,46 @@ Update your dependencies and make sure the versions match.`));
3986
3754
  statements: config.thresholds["100"] ? 100 : config.thresholds.statements
3987
3755
  }
3988
3756
  };
3989
- const shard = this.ctx.config.shard;
3990
- const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3991
- this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory);
3757
+ const shard = this.ctx.config.shard, tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3758
+ // If --project filter is set pick only roots of resolved projects
3759
+ this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory), this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root];
3992
3760
  }
3993
3761
  /**
3994
3762
  * Check if file matches `coverage.include` but not `coverage.exclude`
3995
3763
  */
3996
- isIncluded(_filename) {
3997
- const filename = slash(_filename);
3998
- const cacheHit = this.globCache.get(filename);
3764
+ isIncluded(_filename, root) {
3765
+ const roots = root ? [root] : this.roots, filename = slash(_filename), cacheHit = this.globCache.get(filename);
3999
3766
  if (cacheHit !== void 0) return cacheHit;
4000
3767
  // File outside project root with default allowExternal
4001
- if (this.options.allowExternal === false && !filename.startsWith(this.ctx.config.root)) {
4002
- this.globCache.set(filename, false);
4003
- return false;
4004
- }
4005
- const options = {
4006
- contains: true,
4007
- dot: true,
4008
- cwd: this.ctx.config.root,
4009
- ignore: this.options.exclude
4010
- };
3768
+ if (this.options.allowExternal === false && roots.every((root) => !filename.startsWith(root))) return this.globCache.set(filename, false), false;
4011
3769
  // By default `coverage.include` matches all files, except "coverage.exclude"
4012
3770
  const glob = this.options.include || "**";
4013
- const included = pm.isMatch(filename, glob, options) && existsSync(cleanUrl(filename));
4014
- this.globCache.set(filename, included);
4015
- return included;
3771
+ let included = roots.some((root) => {
3772
+ const options = {
3773
+ contains: true,
3774
+ dot: true,
3775
+ cwd: root,
3776
+ ignore: this.options.exclude
3777
+ };
3778
+ return pm.isMatch(filename, glob, options);
3779
+ });
3780
+ return included &&= existsSync(cleanUrl(filename)), this.globCache.set(filename, included), included;
4016
3781
  }
4017
- async getUntestedFiles(testedFiles) {
4018
- if (this.options.include == null) return [];
4019
- let includedFiles = await glob(this.options.include, {
4020
- cwd: this.ctx.config.root,
3782
+ async getUntestedFilesByRoot(testedFiles, include, root) {
3783
+ let includedFiles = await glob(include, {
3784
+ cwd: root,
4021
3785
  ignore: [...this.options.exclude, ...testedFiles.map((file) => slash(file))],
4022
3786
  absolute: true,
4023
3787
  dot: true,
4024
3788
  onlyFiles: true
4025
3789
  });
4026
- // Run again through picomatch as tinyglobby's exclude pattern is different ({ "exclude": ["math"] } should ignore "src/math.ts")
4027
- includedFiles = includedFiles.filter((file) => this.isIncluded(file));
4028
- if (this.ctx.config.changed) includedFiles = (this.ctx.config.related || []).filter((file) => includedFiles.includes(file));
4029
- return includedFiles.map((file) => slash(path.resolve(this.ctx.config.root, file)));
3790
+ if (includedFiles = includedFiles.filter((file) => this.isIncluded(file, root)), this.ctx.config.changed) includedFiles = (this.ctx.config.related || []).filter((file) => includedFiles.includes(file));
3791
+ return includedFiles.map((file) => slash(path.resolve(root, file)));
3792
+ }
3793
+ async getUntestedFiles(testedFiles) {
3794
+ if (this.options.include == null) return [];
3795
+ const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include), matrix = await Promise.all(this.roots.map(rootMapper));
3796
+ return matrix.flatMap((files) => files);
4030
3797
  }
4031
3798
  createCoverageMap() {
4032
3799
  throw new Error("BaseReporter's createCoverageMap was not overwritten");
@@ -4051,56 +3818,37 @@ Update your dependencies and make sure the versions match.`));
4051
3818
  force: true,
4052
3819
  maxRetries: 10
4053
3820
  });
4054
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
4055
- this.coverageFiles = /* @__PURE__ */ new Map();
4056
- this.pendingPromises = [];
3821
+ await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
4057
3822
  }
4058
- onAfterSuiteRun({ coverage, transformMode, projectName, testFiles }) {
3823
+ onAfterSuiteRun({ coverage, environment, projectName, testFiles }) {
4059
3824
  if (!coverage) return;
4060
- if (transformMode !== "web" && transformMode !== "ssr" && transformMode !== "browser") throw new Error(`Invalid transform mode: ${transformMode}`);
4061
3825
  let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
4062
- if (!entry) {
4063
- entry = {
4064
- web: {},
4065
- ssr: {},
4066
- browser: {}
4067
- };
4068
- this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
4069
- }
4070
- const testFilenames = testFiles.join();
4071
- const filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
3826
+ if (!entry) entry = {}, this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
3827
+ const testFilenames = testFiles.join(), filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
4072
3828
  // If there's a result from previous run, overwrite it
4073
- entry[transformMode][testFilenames] = filename;
3829
+ entry[environment] ??= {}, entry[environment][testFilenames] = filename;
4074
3830
  const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
4075
3831
  this.pendingPromises.push(promise);
4076
3832
  }
4077
3833
  async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
4078
3834
  let index = 0;
4079
3835
  const total = this.pendingPromises.length;
4080
- await Promise.all(this.pendingPromises);
4081
- this.pendingPromises = [];
4082
- for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [transformMode, coverageByTestfiles] of Object.entries(coveragePerProject)) {
4083
- const filenames = Object.values(coverageByTestfiles);
4084
- const project = this.ctx.getProjectByName(projectName);
3836
+ await Promise.all(this.pendingPromises), this.pendingPromises = [];
3837
+ for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [environment, coverageByTestfiles] of Object.entries(coveragePerProject)) {
3838
+ const filenames = Object.values(coverageByTestfiles), project = this.ctx.getProjectByName(projectName);
4085
3839
  for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
4086
- if (onDebug.enabled) {
4087
- index += chunk.length;
4088
- onDebug(`Reading coverage results ${index}/${total}`);
4089
- }
3840
+ if (onDebug.enabled) index += chunk.length, onDebug(`Reading coverage results ${index}/${total}`);
4090
3841
  await Promise.all(chunk.map(async (filename) => {
4091
- const contents = await promises$1.readFile(filename, "utf-8");
4092
- const coverage = JSON.parse(contents);
3842
+ const contents = await promises$1.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
4093
3843
  onFileRead(coverage);
4094
3844
  }));
4095
3845
  }
4096
- await onFinished(project, transformMode);
3846
+ await onFinished(project, environment);
4097
3847
  }
4098
3848
  }
4099
3849
  async cleanAfterRun() {
4100
- this.coverageFiles = /* @__PURE__ */ new Map();
4101
- await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
4102
3850
  // Remove empty reports directory, e.g. when only text-reporter is used
4103
- if (readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
3851
+ if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises$1.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
4104
3852
  }
4105
3853
  async onTestFailure() {
4106
3854
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
@@ -4113,11 +3861,9 @@ Update your dependencies and make sure the versions match.`));
4113
3861
  }
4114
3862
  async reportThresholds(coverageMap, allTestsRun) {
4115
3863
  const resolvedThresholds = this.resolveThresholds(coverageMap);
4116
- this.checkThresholds(resolvedThresholds);
4117
- if (this.options.thresholds?.autoUpdate && allTestsRun) {
4118
- if (!this.ctx.server.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
4119
- const configFilePath = this.ctx.server.config.configFile;
4120
- const configModule = await this.parseConfigModule(configFilePath);
3864
+ if (this.checkThresholds(resolvedThresholds), this.options.thresholds?.autoUpdate && allTestsRun) {
3865
+ if (!this.ctx.vite.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used.");
3866
+ const configFilePath = this.ctx.vite.config.configFile, configModule = await this.parseConfigModule(configFilePath);
4121
3867
  await this.updateThresholds({
4122
3868
  thresholds: resolvedThresholds,
4123
3869
  configurationFile: configModule,
@@ -4131,16 +3877,10 @@ Update your dependencies and make sure the versions match.`));
4131
3877
  * for specific files defined by glob pattern or global for all other files.
4132
3878
  */
4133
3879
  resolveThresholds(coverageMap) {
4134
- const resolvedThresholds = [];
4135
- const files = coverageMap.files();
4136
- const globalCoverageMap = this.createCoverageMap();
3880
+ const resolvedThresholds = [], files = coverageMap.files(), globalCoverageMap = this.createCoverageMap();
4137
3881
  for (const key of Object.keys(this.options.thresholds)) {
4138
3882
  if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue;
4139
- const glob = key;
4140
- const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]);
4141
- const globCoverageMap = this.createCoverageMap();
4142
- const matcher = pm(glob);
4143
- const matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
3883
+ const glob = key, globThresholds = resolveGlobThresholds(this.options.thresholds[glob]), globCoverageMap = this.createCoverageMap(), matcher = pm(glob), matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file)));
4144
3884
  for (const file of matchingFiles) {
4145
3885
  const fileCoverage = coverageMap.fileCoverageFor(file);
4146
3886
  globCoverageMap.addFileCoverage(fileCoverage);
@@ -4156,7 +3896,7 @@ Update your dependencies and make sure the versions match.`));
4156
3896
  const fileCoverage = coverageMap.fileCoverageFor(file);
4157
3897
  globalCoverageMap.addFileCoverage(fileCoverage);
4158
3898
  }
4159
- resolvedThresholds.unshift({
3899
+ return resolvedThresholds.unshift({
4160
3900
  name: GLOBAL_THRESHOLDS_KEY,
4161
3901
  coverageMap: globalCoverageMap,
4162
3902
  thresholds: {
@@ -4165,8 +3905,7 @@ Update your dependencies and make sure the versions match.`));
4165
3905
  lines: this.options.thresholds?.lines,
4166
3906
  statements: this.options.thresholds?.statements
4167
3907
  }
4168
- });
4169
- return resolvedThresholds;
3908
+ }), resolvedThresholds;
4170
3909
  }
4171
3910
  /**
4172
3911
  * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached.
@@ -4204,8 +3943,7 @@ Update your dependencies and make sure the versions match.`));
4204
3943
  this.ctx.logger.error(errorMessage);
4205
3944
  }
4206
3945
  } else {
4207
- const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered;
4208
- const absoluteThreshold = threshold * -1;
3946
+ const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered, absoluteThreshold = threshold * -1;
4209
3947
  if (uncovered > absoluteThreshold) {
4210
3948
  process.exitCode = 1;
4211
3949
  /**
@@ -4229,8 +3967,7 @@ Update your dependencies and make sure the versions match.`));
4229
3967
  const config = resolveConfig(configurationFile);
4230
3968
  assertConfigurationModule(config);
4231
3969
  for (const { coverageMap, thresholds, name } of allThresholds) {
4232
- const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()];
4233
- const thresholdsToUpdate = [];
3970
+ const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()], thresholdsToUpdate = [];
4234
3971
  for (const key of THRESHOLD_KEYS) {
4235
3972
  const threshold = thresholds[key] ?? 100;
4236
3973
  /**
@@ -4241,8 +3978,7 @@ Update your dependencies and make sure the versions match.`));
4241
3978
  const actual = Math.min(...summaries.map((summary) => summary[key].pct));
4242
3979
  if (actual > threshold) thresholdsToUpdate.push([key, actual]);
4243
3980
  } else {
4244
- const absoluteThreshold = threshold * -1;
4245
- const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
3981
+ const absoluteThreshold = threshold * -1, actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered));
4246
3982
  if (actual < absoluteThreshold) {
4247
3983
  // If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%)
4248
3984
  const updatedThreshold = actual === 0 ? 100 : actual * -1;
@@ -4252,16 +3988,17 @@ Update your dependencies and make sure the versions match.`));
4252
3988
  }
4253
3989
  if (thresholdsToUpdate.length === 0) continue;
4254
3990
  updatedThresholds = true;
4255
- for (const [threshold, newValue] of thresholdsToUpdate) if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = newValue;
4256
- else {
4257
- const glob = config.test.coverage.thresholds[name];
4258
- glob[threshold] = newValue;
3991
+ const thresholdFormatter = typeof this.options.thresholds?.autoUpdate === "function" ? this.options.thresholds?.autoUpdate : (value) => value;
3992
+ for (const [threshold, newValue] of thresholdsToUpdate) {
3993
+ const formattedValue = thresholdFormatter(newValue);
3994
+ if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = formattedValue;
3995
+ else {
3996
+ const glob = config.test.coverage.thresholds[name];
3997
+ glob[threshold] = formattedValue;
3998
+ }
4259
3999
  }
4260
4000
  }
4261
- if (updatedThresholds) {
4262
- this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds.");
4263
- onUpdate();
4264
- }
4001
+ if (updatedThresholds) this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds."), onUpdate();
4265
4002
  }
4266
4003
  async mergeReports(coverageMaps) {
4267
4004
  const coverageMap = this.createCoverageMap();
@@ -4273,10 +4010,8 @@ Update your dependencies and make sure the versions match.`));
4273
4010
  }
4274
4011
  toSlices(array, size) {
4275
4012
  return array.reduce((chunks, item) => {
4276
- const index = Math.max(0, chunks.length - 1);
4277
- const lastChunk = chunks[index] || [];
4278
- chunks[index] = lastChunk;
4279
- if (lastChunk.length >= size) chunks.push([item]);
4013
+ const index = Math.max(0, chunks.length - 1), lastChunk = chunks[index] || [];
4014
+ if (chunks[index] = lastChunk, lastChunk.length >= size) chunks.push([item]);
4280
4015
  else lastChunk.push(item);
4281
4016
  return chunks;
4282
4017
  }, []);
@@ -4285,23 +4020,25 @@ Update your dependencies and make sure the versions match.`));
4285
4020
  const servers = [...ctx.projects.map((project) => ({
4286
4021
  root: project.config.root,
4287
4022
  isBrowserEnabled: project.isBrowserEnabled(),
4288
- vitenode: project.vitenode
4289
- })), {
4023
+ vite: project.vite
4024
+ })), (
4025
+ // Check core last as it will match all files anyway
4026
+ {
4290
4027
  root: ctx.config.root,
4291
- vitenode: ctx.vitenode,
4028
+ vite: ctx.vite,
4292
4029
  isBrowserEnabled: ctx.getRootProject().isBrowserEnabled()
4293
- }];
4030
+ })];
4294
4031
  return async function transformFile(filename) {
4295
4032
  let lastError;
4296
- for (const { root, vitenode, isBrowserEnabled } of servers) {
4033
+ for (const { root, vite, isBrowserEnabled } of servers) {
4297
4034
  // On Windows root doesn't start with "/" while filenames do
4298
4035
  if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue;
4299
4036
  if (isBrowserEnabled) {
4300
- const result = await vitenode.transformRequest(filename, void 0, "web").catch(() => null);
4037
+ const result = await vite.environments.client.transformRequest(filename).catch(() => null);
4301
4038
  if (result) return result;
4302
4039
  }
4303
4040
  try {
4304
- return await vitenode.transformRequest(filename);
4041
+ return await vite.environments.ssr.transformRequest(filename);
4305
4042
  } catch (error) {
4306
4043
  lastError = error;
4307
4044
  }
@@ -4315,14 +4052,12 @@ Update your dependencies and make sure the versions match.`));
4315
4052
  * Narrow down `unknown` glob thresholds to resolved ones
4316
4053
  */
4317
4054
  function resolveGlobThresholds(thresholds) {
4318
- if (!thresholds || typeof thresholds !== "object") return {};
4319
- if (100 in thresholds && thresholds[100] === true) return {
4055
+ return !thresholds || typeof thresholds !== "object" ? {} : 100 in thresholds && thresholds[100] === true ? {
4320
4056
  lines: 100,
4321
4057
  branches: 100,
4322
4058
  functions: 100,
4323
4059
  statements: 100
4324
- };
4325
- return {
4060
+ } : {
4326
4061
  lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0,
4327
4062
  branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0,
4328
4063
  functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0,
@@ -4348,8 +4083,7 @@ function resolveConfig(configModule) {
4348
4083
  if (config) return config;
4349
4084
  // "export default mergeConfig(..., defineConfig(...))"
4350
4085
  if (mod.$type === "function-call" && mod.$callee === "mergeConfig") {
4351
- config = resolveMergeConfig(mod);
4352
- if (config) return config;
4086
+ if (config = resolveMergeConfig(mod), config) return config;
4353
4087
  }
4354
4088
  } catch (error) {
4355
4089
  // Reduce magicast's verbose errors to readable ones
@@ -4378,4 +4112,4 @@ function resolveMergeConfig(mod) {
4378
4112
  }
4379
4113
  }
4380
4114
 
4381
- export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, isBrowserEnabled as d, groupBy as e, getCoverageProvider as f, getFilePoolName as g, hash as h, isPackageExists as i, createPool as j, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
4115
+ export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, createFetchModuleFunction as d, isBrowserEnabled as e, groupBy as f, getFilePoolName as g, hash as h, isPackageExists as i, getCoverageProvider as j, createPool as k, normalizeResolvedIdToUrl as n, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };