vitest 4.0.0-beta.10 → 4.0.0-beta.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/LICENSE.md +85 -101
  2. package/dist/browser.d.ts +10 -6
  3. package/dist/browser.js +8 -5
  4. package/dist/chunks/{benchmark.CJUa-Hsa.js → benchmark.DHKMYAts.js} +2 -2
  5. package/dist/chunks/{browser.d.yFAklsD1.d.ts → browser.d.D9YV3JvA.d.ts} +1 -1
  6. package/dist/chunks/{cac.DCxo_nSu.js → cac.r1gel_VZ.js} +18 -11
  7. package/dist/chunks/{cli-api.BJJXh9BV.js → cli-api.CpywZzJV.js} +153 -176
  8. package/dist/chunks/{config.d.B_LthbQq.d.ts → config.d.DGazh2r6.d.ts} +3 -1
  9. package/dist/chunks/{console.7h5kHUIf.js → console.CTJL2nuH.js} +4 -6
  10. package/dist/chunks/{coverage.BCU-r2QL.js → coverage.CiB0fs_7.js} +57 -79
  11. package/dist/chunks/{creator.08Gi-vCA.js → creator.DfXDsUyL.js} +6 -8
  12. package/dist/chunks/{global.d.BK3X7FW1.d.ts → global.d.BcFPD2LN.d.ts} +0 -13
  13. package/dist/chunks/{globals.DG-S3xFe.js → globals.DC4ntO86.js} +8 -6
  14. package/dist/chunks/{index.DIWhzsUh.js → index.Bt-upxGS.js} +6 -12
  15. package/dist/chunks/{index.BIP7prJq.js → index.CHrBLuEH.js} +94 -103
  16. package/dist/chunks/{index.X0nbfr6-.js → index.Dc3xnDvT.js} +48 -289
  17. package/dist/chunks/{index.CMfqw92x.js → index.Dnl38iQ_.js} +2 -2
  18. package/dist/chunks/{index.BjKEiSn0.js → index.uLUz1RDt.js} +3 -3
  19. package/dist/chunks/{inspector.CvQD-Nie.js → inspector.Br76Q2Mb.js} +1 -4
  20. package/dist/chunks/{moduleRunner.d.D9nBoC4p.d.ts → moduleRunner.d.CeYc7nZ0.d.ts} +1 -1
  21. package/dist/chunks/{node.CyipiPvJ.js → node.BwAWWjHZ.js} +3 -4
  22. package/dist/chunks/{plugin.d.BMVSnsGV.d.ts → plugin.d.XreRXLXS.d.ts} +1 -1
  23. package/dist/chunks/{reporters.d.BUWjmRYq.d.ts → reporters.d.CJVTaaWb.d.ts} +54 -13
  24. package/dist/chunks/{resolveSnapshotEnvironment.Bkht6Yor.js → resolveSnapshotEnvironment.BsJpmVZR.js} +7 -8
  25. package/dist/chunks/{rpc.BKr6mtxz.js → rpc.cD77ENhU.js} +13 -14
  26. package/dist/chunks/{setup-common.uiMcU3cv.js → setup-common.BewgbkTd.js} +6 -6
  27. package/dist/chunks/{startModuleRunner.p67gbNo9.js → startModuleRunner.DPBo3mme.js} +65 -56
  28. package/dist/chunks/{test.BiqSKISg.js → test.CTuWuHYH.js} +7 -7
  29. package/dist/chunks/{typechecker.DB-fIMaH.js → typechecker.BfOQ86_a.js} +624 -14
  30. package/dist/chunks/{utils.D2R2NiOH.js → utils.CG9h5ccR.js} +2 -5
  31. package/dist/chunks/{vi.ZPgvtBao.js → vi.B2--mG9U.js} +38 -145
  32. package/dist/{worker.js → chunks/worker.DVTUM2IW.js} +71 -42
  33. package/dist/chunks/{worker.d.BDsXGkwh.d.ts → worker.d.buwuBpBt.d.ts} +2 -77
  34. package/dist/cli.js +7 -5
  35. package/dist/config.d.ts +6 -6
  36. package/dist/coverage.d.ts +5 -5
  37. package/dist/coverage.js +4 -5
  38. package/dist/environments.js +1 -1
  39. package/dist/index.d.ts +8 -8
  40. package/dist/index.js +8 -6
  41. package/dist/module-evaluator.d.ts +3 -3
  42. package/dist/module-evaluator.js +11 -13
  43. package/dist/module-runner.js +5 -5
  44. package/dist/node.d.ts +14 -11
  45. package/dist/node.js +18 -14
  46. package/dist/reporters.d.ts +5 -5
  47. package/dist/reporters.js +7 -5
  48. package/dist/runners.d.ts +1 -1
  49. package/dist/runners.js +9 -7
  50. package/dist/snapshot.js +3 -3
  51. package/dist/suite.js +4 -3
  52. package/dist/{chunks/base.Cjha6usc.js → worker-base.js} +104 -32
  53. package/dist/{chunks/vm.Ca0Y0W5f.js → worker-vm.js} +81 -31
  54. package/dist/workers/runVmTests.js +14 -11
  55. package/package.json +26 -26
  56. package/browser.d.ts +0 -1
  57. package/dist/chunks/moduleTransport.I-bgQy0S.js +0 -19
  58. package/dist/chunks/resolver.Bx6lE0iq.js +0 -119
  59. package/dist/chunks/utils.C2YI6McM.js +0 -52
  60. package/dist/chunks/worker.d.BNcX_2mH.d.ts +0 -8
  61. package/dist/workers/forks.js +0 -67
  62. package/dist/workers/threads.js +0 -55
  63. package/dist/workers/vmForks.js +0 -48
  64. package/dist/workers/vmThreads.js +0 -38
  65. package/dist/workers.d.ts +0 -38
  66. package/dist/workers.js +0 -48
  67. package/execute.d.ts +0 -1
  68. package/utils.d.ts +0 -1
  69. package/workers.d.ts +0 -1
@@ -185,6 +185,7 @@ interface SerializedConfig {
185
185
  providerOptions: {
186
186
  actionTimeout?: number;
187
187
  };
188
+ trace: BrowserTraceViewMode;
188
189
  trackUnhandledErrors: boolean;
189
190
  };
190
191
  standalone: boolean;
@@ -210,5 +211,6 @@ type RuntimeConfig = Pick<SerializedConfig, "allowOnly" | "testTimeout" | "hookT
210
211
  };
211
212
  };
212
213
  type RuntimeOptions = Partial<RuntimeConfig>;
214
+ type BrowserTraceViewMode = "on" | "off" | "on-first-retry" | "on-all-retries" | "retain-on-failure";
213
215
 
214
- export type { FakeTimerInstallOpts as F, RuntimeOptions as R, SerializedCoverageConfig as S, SerializedConfig as a, RuntimeConfig as b };
216
+ export type { BrowserTraceViewMode as B, FakeTimerInstallOpts as F, RuntimeOptions as R, SerializedCoverageConfig as S, SerializedConfig as a, RuntimeConfig as b };
@@ -1,10 +1,10 @@
1
1
  import { Console } from 'node:console';
2
2
  import { relative } from 'node:path';
3
3
  import { Writable } from 'node:stream';
4
- import { getSafeTimers } from '@vitest/utils';
4
+ import { getSafeTimers } from '@vitest/utils/timers';
5
5
  import c from 'tinyrainbow';
6
6
  import { R as RealDate } from './date.-jtEtIeV.js';
7
- import { g as getWorkerState } from './utils.D2R2NiOH.js';
7
+ import { g as getWorkerState } from './utils.CG9h5ccR.js';
8
8
 
9
9
  const UNKNOWN_TEST_ID = "__vitest__unknown_test__";
10
10
  function getTaskIdByStack(root) {
@@ -78,7 +78,7 @@ function createCustomConsole(defaultState) {
78
78
  if (state().config.printConsoleTrace) {
79
79
  const limit = Error.stackTraceLimit;
80
80
  Error.stackTraceLimit = limit + 6;
81
- const stack = (/* @__PURE__ */ new Error("STACK_TRACE")).stack, trace = stack?.split("\n").slice(7).join("\n");
81
+ const trace = (/* @__PURE__ */ new Error("STACK_TRACE")).stack?.split("\n").slice(7).join("\n");
82
82
  Error.stackTraceLimit = limit, buffer.push([data, trace]);
83
83
  } else buffer.push([data, void 0]);
84
84
  schedule(id), callback();
@@ -96,9 +96,7 @@ function createCustomConsole(defaultState) {
96
96
  const limit = Error.stackTraceLimit;
97
97
  Error.stackTraceLimit = limit + 6;
98
98
  const stack = (/* @__PURE__ */ new Error("STACK_TRACE")).stack?.split("\n");
99
- Error.stackTraceLimit = limit;
100
- const isTrace = stack?.some((line) => line.includes("at Console.trace"));
101
- if (isTrace) buffer.push([data, void 0]);
99
+ if (Error.stackTraceLimit = limit, stack?.some((line) => line.includes("at Console.trace"))) buffer.push([data, void 0]);
102
100
  else {
103
101
  const trace = stack?.slice(7).join("\n");
104
102
  Error.stackTraceLimit = limit, buffer.push([data, trace]);
@@ -1,6 +1,6 @@
1
1
  import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
2
  import path, { win32, dirname, join, resolve } from 'node:path';
3
- import { isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, createDefer, slash, shuffle, toArray } from '@vitest/utils';
3
+ import { isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, createDefer, slash, shuffle, toArray } from '@vitest/utils/helpers';
4
4
  import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
5
5
  import pm from 'picomatch';
6
6
  import { glob } from 'tinyglobby';
@@ -20,13 +20,13 @@ import { a as isWindows } from './env.D4Lgay0q.js';
20
20
  import * as nodeos from 'node:os';
21
21
  import nodeos__default, { tmpdir } from 'node:os';
22
22
  import { isatty } from 'node:tty';
23
+ import { rootDir } from '../path.js';
23
24
  import EventEmitter from 'node:events';
24
25
  import { c as createBirpc } from './index.Bgo3tNWt.js';
25
26
  import Tinypool$1, { Tinypool } from 'tinypool';
26
- import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.DB-fIMaH.js';
27
+ import { w as wrapSerializableConfig, a as Typechecker } from './typechecker.BfOQ86_a.js';
27
28
  import { MessageChannel } from 'node:worker_threads';
28
29
  import { hasFailed } from '@vitest/runner/utils';
29
- import { rootDir } from '../path.js';
30
30
  import { isCI, provider } from 'std-env';
31
31
  import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
32
32
 
@@ -46,7 +46,7 @@ function escapeRegExp(s) {
46
46
  return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
47
47
  }
48
48
  function wildcardPatternToRegExp(pattern) {
49
- const negated = pattern.startsWith("!");
49
+ const negated = pattern[0] === "!";
50
50
  if (negated) pattern = pattern.slice(1);
51
51
  let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
52
52
  if (negated) regexp = `(?!${regexp})`;
@@ -2403,17 +2403,17 @@ async function groupFilesByEnv(files) {
2403
2403
  if (envOptionsJson?.endsWith("*/"))
2404
2404
  // Trim closing Docblock characters the above regex might have captured
2405
2405
  envOptionsJson = envOptionsJson.slice(0, -2);
2406
- const envOptions = JSON.parse(envOptionsJson || "null"), envKey = env === "happy-dom" ? "happyDOM" : env, environment = {
2407
- name: env,
2408
- options: envOptions ? { [envKey]: envOptions } : null
2409
- };
2406
+ const envOptions = JSON.parse(envOptionsJson || "null");
2410
2407
  return {
2411
2408
  file: {
2412
2409
  filepath,
2413
2410
  testLocations: testLines
2414
2411
  },
2415
2412
  project,
2416
- environment
2413
+ environment: {
2414
+ name: env,
2415
+ options: envOptions ? { [env === "happy-dom" ? "happyDOM" : env]: envOptions } : null
2416
+ }
2417
2417
  };
2418
2418
  }));
2419
2419
  return groupBy(filesWithEnv, ({ environment }) => environment.name);
@@ -2497,7 +2497,7 @@ function inlineSourceMap(result) {
2497
2497
  const sourceMap = { ...map };
2498
2498
  // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2499
2499
  // so that debuggers can be set to break on first line
2500
- if (sourceMap.mappings.startsWith(";")) sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2500
+ if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2501
2501
  return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2502
2502
  }
2503
2503
  function genSourceMapUrl(map) {
@@ -2609,8 +2609,8 @@ function createMethodsRPC(project, options = {}) {
2609
2609
  async transform(id) {
2610
2610
  const environment = project.vite.environments.__vitest_vm__;
2611
2611
  if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2612
- const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id)), result = await environment.transformRequest(url).catch(handleRollupError);
2613
- return { code: result?.code };
2612
+ const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id));
2613
+ return { code: (await environment.transformRequest(url).catch(handleRollupError))?.code };
2614
2614
  },
2615
2615
  async onQueued(file) {
2616
2616
  if (options.collect) ctx.state.collectFiles(project, [file]);
@@ -2675,20 +2675,18 @@ function createChildProcessChannel$1(project, collect = false) {
2675
2675
  },
2676
2676
  timeout: -1
2677
2677
  });
2678
- project.vitest.onCancel((reason) => rpc.onCancel(reason));
2679
- const channel = {
2678
+ return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
2680
2679
  onMessage: (callback) => emitter.on(events.message, callback),
2681
2680
  postMessage: (message) => emitter.emit(events.response, message),
2682
2681
  onClose: () => {
2683
2682
  emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2684
2683
  }
2685
2684
  };
2686
- return channel;
2687
2685
  }
2688
2686
  function createForksPool(vitest, { execArgv, env }, specifications) {
2689
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/forks.js"), options = {
2687
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2690
2688
  runtime: "child_process",
2691
- filename: resolve(vitest.distPath, "worker.js"),
2689
+ filename: resolve(vitest.distPath, "worker-base.js"),
2692
2690
  teardown: "teardown",
2693
2691
  maxThreads,
2694
2692
  minThreads,
@@ -2706,7 +2704,6 @@ function createForksPool(vitest, { execArgv, env }, specifications) {
2706
2704
  vitest.state.clearFiles(project, paths);
2707
2705
  const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2708
2706
  pool: "forks",
2709
- worker,
2710
2707
  config,
2711
2708
  files,
2712
2709
  invalidates,
@@ -2782,19 +2779,17 @@ function createWorkerChannel$1(project, collect) {
2782
2779
  },
2783
2780
  timeout: -1
2784
2781
  });
2785
- project.vitest.onCancel((reason) => rpc.onCancel(reason));
2786
- const onClose = () => {
2787
- port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2788
- };
2789
- return {
2782
+ return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
2790
2783
  workerPort,
2791
2784
  port,
2792
- onClose
2785
+ onClose: () => {
2786
+ port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2787
+ }
2793
2788
  };
2794
2789
  }
2795
2790
  function createThreadsPool(vitest, { execArgv, env }, specifications) {
2796
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/threads.js"), options = {
2797
- filename: resolve(vitest.distPath, "worker.js"),
2791
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2792
+ filename: resolve(vitest.distPath, "worker-base.js"),
2798
2793
  teardown: "teardown",
2799
2794
  useAtomics: poolOptions.useAtomics ?? false,
2800
2795
  maxThreads,
@@ -2813,7 +2808,6 @@ function createThreadsPool(vitest, { execArgv, env }, specifications) {
2813
2808
  vitest.state.clearFiles(project, paths);
2814
2809
  const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
2815
2810
  pool: "threads",
2816
- worker,
2817
2811
  port: workerPort,
2818
2812
  config,
2819
2813
  files,
@@ -2885,8 +2879,7 @@ function createTypecheckPool(vitest) {
2885
2879
  async function onParseEnd(project, { files, sourceErrors }) {
2886
2880
  const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
2887
2881
  if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
2888
- const processError = !hasFailed(files) && !sourceErrors.length && checker.getExitCode();
2889
- if (processError) {
2882
+ if (!hasFailed(files) && !sourceErrors.length && checker.getExitCode()) {
2890
2883
  const error = new Error(checker.getOutput());
2891
2884
  error.stack = "", vitest.state.catchError(error, "Typecheck Error");
2892
2885
  }
@@ -2929,14 +2922,14 @@ function createTypecheckPool(vitest) {
2929
2922
  async function runTests(specs) {
2930
2923
  const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
2931
2924
  for (const name in specsByProject) {
2932
- const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), _p = new Promise((resolve) => {
2925
+ const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), triggered = await new Promise((resolve) => {
2933
2926
  const _i = setInterval(() => {
2934
2927
  if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
2935
2928
  });
2936
2929
  setTimeout(() => {
2937
2930
  resolve(false), clearInterval(_i);
2938
2931
  }, 500).unref();
2939
- }), triggered = await _p;
2932
+ });
2940
2933
  if (project.typechecker && !triggered) {
2941
2934
  const testFiles = project.typechecker.getTestFiles();
2942
2935
  for (const file of testFiles) await vitest._testRun.enqueued(project, file);
@@ -2965,14 +2958,10 @@ function getDefaultThreadsCount(config) {
2965
2958
  function getWorkerMemoryLimit(config, pool) {
2966
2959
  if (pool === "vmForks") {
2967
2960
  const opts = config.poolOptions?.vmForks ?? {};
2968
- if (opts.memoryLimit) return opts.memoryLimit;
2969
- const workers = opts.maxForks ?? getDefaultThreadsCount(config);
2970
- return 1 / workers;
2961
+ return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxForks ?? getDefaultThreadsCount(config));
2971
2962
  } else {
2972
2963
  const opts = config.poolOptions?.vmThreads ?? {};
2973
- if (opts.memoryLimit) return opts.memoryLimit;
2974
- const workers = opts.maxThreads ?? getDefaultThreadsCount(config);
2975
- return 1 / workers;
2964
+ return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxThreads ?? getDefaultThreadsCount(config));
2976
2965
  }
2977
2966
  }
2978
2967
  /**
@@ -3011,7 +3000,6 @@ function stringToBytes(input, percentageReference) {
3011
3000
  return null;
3012
3001
  }
3013
3002
 
3014
- const suppressWarningsPath$1 = resolve(rootDir, "./suppress-warnings.cjs");
3015
3003
  function createChildProcessChannel(project, collect) {
3016
3004
  const emitter = new EventEmitter(), events = {
3017
3005
  message: "message",
@@ -3041,28 +3029,23 @@ function createChildProcessChannel(project, collect) {
3041
3029
  },
3042
3030
  timeout: -1
3043
3031
  });
3044
- project.vitest.onCancel((reason) => rpc.onCancel(reason));
3045
- const channel = {
3032
+ return project.vitest.onCancel((reason) => rpc.onCancel(reason)), { channel: {
3046
3033
  onMessage: (callback) => emitter.on(events.message, callback),
3047
3034
  postMessage: (message) => emitter.emit(events.response, message),
3048
3035
  onClose: () => {
3049
3036
  emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3050
3037
  }
3051
- };
3052
- return { channel };
3038
+ } };
3053
3039
  }
3054
3040
  function createVmForksPool(vitest, { execArgv, env }, specifications) {
3055
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmForks.js"), options = {
3041
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3056
3042
  runtime: "child_process",
3057
- filename: resolve(vitest.distPath, "worker.js"),
3043
+ filename: resolve(vitest.distPath, "worker-vm.js"),
3058
3044
  maxThreads,
3059
3045
  minThreads,
3060
3046
  env,
3061
3047
  execArgv: [
3062
- "--experimental-import-meta-resolve",
3063
3048
  "--experimental-vm-modules",
3064
- "--require",
3065
- suppressWarningsPath$1,
3066
3049
  ...poolOptions.execArgv ?? [],
3067
3050
  ...execArgv
3068
3051
  ],
@@ -3077,8 +3060,7 @@ function createVmForksPool(vitest, { execArgv, env }, specifications) {
3077
3060
  const paths = files.map((f) => f.filepath);
3078
3061
  vitest.state.clearFiles(project, paths);
3079
3062
  const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3080
- pool: "forks",
3081
- worker,
3063
+ pool: "vmForks",
3082
3064
  config,
3083
3065
  files,
3084
3066
  invalidates,
@@ -3108,7 +3090,7 @@ function createVmForksPool(vitest, { execArgv, env }, specifications) {
3108
3090
  if (configs.has(project)) return configs.get(project);
3109
3091
  const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3110
3092
  return configs.set(project, config), config;
3111
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3093
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3112
3094
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3113
3095
  };
3114
3096
  };
@@ -3125,7 +3107,6 @@ function getMemoryLimit$1(config) {
3125
3107
  return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3126
3108
  }
3127
3109
 
3128
- const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
3129
3110
  function createWorkerChannel(project, collect) {
3130
3111
  const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3131
3112
  eventNames: ["onCancel"],
@@ -3147,17 +3128,14 @@ function createWorkerChannel(project, collect) {
3147
3128
  };
3148
3129
  }
3149
3130
  function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3150
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, worker = resolve(vitest.distPath, "workers/vmThreads.js"), options = {
3151
- filename: resolve(vitest.distPath, "worker.js"),
3131
+ const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3132
+ filename: resolve(vitest.distPath, "worker-vm.js"),
3152
3133
  useAtomics: poolOptions.useAtomics ?? false,
3153
3134
  maxThreads,
3154
3135
  minThreads,
3155
3136
  env,
3156
3137
  execArgv: [
3157
- "--experimental-import-meta-resolve",
3158
3138
  "--experimental-vm-modules",
3159
- "--require",
3160
- suppressWarningsPath,
3161
3139
  ...poolOptions.execArgv ?? [],
3162
3140
  ...execArgv
3163
3141
  ],
@@ -3173,7 +3151,6 @@ function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3173
3151
  vitest.state.clearFiles(project, paths);
3174
3152
  const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3175
3153
  pool: "vmThreads",
3176
- worker,
3177
3154
  port: workerPort,
3178
3155
  config,
3179
3156
  files: paths,
@@ -3204,7 +3181,7 @@ function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3204
3181
  if (configs.has(project)) return configs.get(project);
3205
3182
  const config = project.serializedConfig;
3206
3183
  return configs.set(project, config), config;
3207
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), results = await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))), errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3184
+ }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3208
3185
  if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3209
3186
  };
3210
3187
  };
@@ -3221,6 +3198,7 @@ function getMemoryLimit(config) {
3221
3198
  return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3222
3199
  }
3223
3200
 
3201
+ const suppressWarningsPath = resolve$1(rootDir, "./suppress-warnings.cjs");
3224
3202
  const builtinPools = [
3225
3203
  "forks",
3226
3204
  "threads",
@@ -3243,18 +3221,24 @@ function createPool(ctx) {
3243
3221
  vmThreads: null,
3244
3222
  vmForks: null,
3245
3223
  typescript: null
3246
- }, viteMajor = Number(version.split(".")[0]), potentialConditions = new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3224
+ }, viteMajor = Number(version.split(".")[0]), conditions = [...new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3247
3225
  "production",
3248
3226
  "development",
3249
3227
  ...ctx.vite.config.resolve.conditions
3250
- ]), conditions = [...potentialConditions].filter((condition) => {
3228
+ ])].filter((condition) => {
3251
3229
  return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3252
3230
  }).map((condition) => {
3253
3231
  return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3254
3232
  }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3255
3233
  async function executeTests(method, files, invalidate) {
3256
3234
  const options = {
3257
- execArgv: [...execArgv, ...conditions],
3235
+ execArgv: [
3236
+ ...execArgv,
3237
+ ...conditions,
3238
+ "--experimental-import-meta-resolve",
3239
+ "--require",
3240
+ suppressWarningsPath
3241
+ ],
3258
3242
  env: {
3259
3243
  TEST: "true",
3260
3244
  VITEST: "true",
@@ -3359,7 +3343,7 @@ class BaseSequencer {
3359
3343
  async shard(files) {
3360
3344
  const { config } = this.ctx, { index, count } = config.shard, [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count);
3361
3345
  return [...files].map((spec) => {
3362
- const fullPath = resolve$1(slash(config.root), slash(spec.moduleId)), specPath = fullPath?.slice(config.root.length);
3346
+ const specPath = resolve$1(slash(config.root), slash(spec.moduleId))?.slice(config.root.length);
3363
3347
  return {
3364
3348
  spec,
3365
3349
  hash: hash("sha1", specPath, "hex")
@@ -3532,7 +3516,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3532
3516
  const envModuleDirectories = process.env.VITEST_MODULE_DIRECTORIES || process.env.npm_config_VITEST_MODULE_DIRECTORIES;
3533
3517
  if (envModuleDirectories) resolved.deps.moduleDirectories.push(...envModuleDirectories.split(","));
3534
3518
  if (resolved.deps.moduleDirectories = resolved.deps.moduleDirectories.map((dir) => {
3535
- if (!dir.startsWith("/")) dir = `/${dir}`;
3519
+ if (dir[0] !== "/") dir = `/${dir}`;
3536
3520
  if (!dir.endsWith("/")) dir += "/";
3537
3521
  return normalize(dir);
3538
3522
  }), !resolved.deps.moduleDirectories.includes("/node_modules/")) resolved.deps.moduleDirectories.push("/node_modules/");
@@ -3582,10 +3566,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3582
3566
  maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3583
3567
  }
3584
3568
  };
3585
- const poolThreadsOptions = [["threads", "maxThreads"], ["vmThreads", "maxThreads"]];
3586
- for (const [poolOptionKey, workerOptionKey] of poolThreadsOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3587
- const poolForksOptions = [["forks", "maxForks"], ["vmForks", "maxForks"]];
3588
- for (const [poolOptionKey, workerOptionKey] of poolForksOptions) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3569
+ for (const [poolOptionKey, workerOptionKey] of [["threads", "maxThreads"], ["vmThreads", "maxThreads"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3570
+ for (const [poolOptionKey, workerOptionKey] of [["forks", "maxForks"], ["vmForks", "maxForks"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3589
3571
  if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
3590
3572
  if (mode === "benchmark") {
3591
3573
  resolved.benchmark = {
@@ -3601,10 +3583,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3601
3583
  if (options.outputJson) resolved.benchmark.outputJson = options.outputJson;
3602
3584
  }
3603
3585
  if (typeof resolved.diff === "string") resolved.diff = resolvePath(resolved.diff, resolved.root), resolved.forceRerunTriggers.push(resolved.diff);
3604
- // the server has been created, we don't need to override vite.server options
3605
- const api = resolveApiServerConfig(options, defaultPort);
3606
3586
  if (resolved.api = {
3607
- ...api,
3587
+ ...resolveApiServerConfig(options, defaultPort),
3608
3588
  token: crypto.randomUUID()
3609
3589
  }, options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
3610
3590
  /*
@@ -3679,10 +3659,11 @@ function resolveConfig$1(vitest, options, viteConfig) {
3679
3659
  if (resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort) || { port: defaultBrowserPort }, resolved.browser.enabled) {
3680
3660
  if (resolved.browser.ui) resolved.includeTaskLocation ??= true;
3681
3661
  } else if (resolved.ui) resolved.includeTaskLocation ??= true;
3682
- const htmlReporter = toArray(resolved.reporters).some((reporter) => {
3662
+ if (typeof resolved.browser.trace === "string" || !resolved.browser.trace) resolved.browser.trace = { mode: resolved.browser.trace || "off" };
3663
+ if (resolved.browser.trace.tracesDir != null) resolved.browser.trace.tracesDir = resolvePath(resolved.browser.trace.tracesDir, resolved.root);
3664
+ if (toArray(resolved.reporters).some((reporter) => {
3683
3665
  return Array.isArray(reporter) ? reporter[0] === "html" : false;
3684
- });
3685
- if (htmlReporter) resolved.includeTaskLocation ??= true;
3666
+ })) resolved.includeTaskLocation ??= true;
3686
3667
  return resolved.server ??= {}, resolved.server.deps ??= {}, resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3687
3668
  }
3688
3669
  function isBrowserEnabled(config) {
@@ -3792,8 +3773,8 @@ Update your dependencies and make sure the versions match.`));
3792
3773
  }
3793
3774
  async getUntestedFiles(testedFiles) {
3794
3775
  if (this.options.include == null) return [];
3795
- const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include), matrix = await Promise.all(this.roots.map(rootMapper));
3796
- return matrix.flatMap((files) => files);
3776
+ const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include);
3777
+ return (await Promise.all(this.roots.map(rootMapper))).flatMap((files) => files);
3797
3778
  }
3798
3779
  createCoverageMap() {
3799
3780
  throw new Error("BaseReporter's createCoverageMap was not overwritten");
@@ -3854,10 +3835,7 @@ Update your dependencies and make sure the versions match.`));
3854
3835
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
3855
3836
  }
3856
3837
  async reportCoverage(coverageMap, { allTestsRun }) {
3857
- await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun);
3858
- // In watch mode we need to preserve the previous results if cleanOnRerun is disabled
3859
- const keepResults = !this.options.cleanOnRerun && this.ctx.config.watch;
3860
- if (!keepResults) await this.cleanAfterRun();
3838
+ if (await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun), !(!this.options.cleanOnRerun && this.ctx.config.watch)) await this.cleanAfterRun();
3861
3839
  }
3862
3840
  async reportThresholds(coverageMap, allTestsRun) {
3863
3841
  const resolvedThresholds = this.resolveThresholds(coverageMap);
@@ -2,11 +2,12 @@ import { existsSync, writeFileSync, readFileSync } from 'node:fs';
2
2
  import { mkdir, writeFile } from 'node:fs/promises';
3
3
  import { resolve, dirname, relative } from 'node:path';
4
4
  import { detectPackageManager, installPackage } from './index.D3XRDfWc.js';
5
- import { p as prompt, f as findUp } from './index.X0nbfr6-.js';
5
+ import { p as prompt, a as any } from './index.Dc3xnDvT.js';
6
6
  import { x } from 'tinyexec';
7
7
  import c from 'tinyrainbow';
8
8
  import { c as configFiles } from './constants.D_Q9UYh-.js';
9
9
  import 'node:process';
10
+ import 'node:module';
10
11
  import 'node:url';
11
12
  import './_commonjsHelpers.BFTU3MAI.js';
12
13
  import 'readline';
@@ -297,12 +298,11 @@ async function generateExampleFiles(framework, lang) {
297
298
  // eslint-disable-next-line no-console
298
299
  const log = console.log;
299
300
  function getProviderOptions() {
300
- const providers = {
301
+ return Object.entries({
301
302
  playwright: "Playwright relies on Chrome DevTools protocol. Read more: https://playwright.dev",
302
303
  webdriverio: "WebdriverIO uses WebDriver protocol. Read more: https://webdriver.io",
303
304
  preview: "Preview is useful to quickly run your tests in the browser, but not suitable for CI."
304
- };
305
- return Object.entries(providers).map(([provider, description]) => {
305
+ }).map(([provider, description]) => {
306
306
  return {
307
307
  title: provider,
308
308
  description,
@@ -476,9 +476,7 @@ function getProviderDocsLink(provider) {
476
476
  }
477
477
  function sort(choices, value) {
478
478
  const index = choices.findIndex((i) => i.value === value);
479
- if (index === -1) return choices;
480
- const item = choices.splice(index, 1)[0];
481
- return [item, ...choices];
479
+ return index === -1 ? choices : [choices.splice(index, 1)[0], ...choices];
482
480
  }
483
481
  function fail() {
484
482
  process.exitCode = 1;
@@ -603,7 +601,7 @@ async function create() {
603
601
  if (frameworkPlugin) dependenciesToInstall.push(frameworkPlugin);
604
602
  const pkgManager = await detectPackageManager();
605
603
  log(), await installPackages(pkgManager, dependenciesToInstall.filter((pkg) => !dependencies[pkg]));
606
- const rootConfig = await findUp(configFiles, { cwd: process.cwd() });
604
+ const rootConfig = any(configFiles, { cwd: process.cwd() });
607
605
  let scriptCommand = "vitest";
608
606
  if (log(), rootConfig) {
609
607
  const configPath = resolve(dirname(rootConfig), `vitest.browser.config.${lang}`);
@@ -4,19 +4,6 @@ import { SnapshotState } from '@vitest/snapshot';
4
4
  import { B as BenchmarkResult } from './benchmark.d.DAaHLpsq.js';
5
5
  import { U as UserConsoleLog } from './environment.d.BsToaxti.js';
6
6
 
7
- declare global {
8
- namespace Chai {
9
- interface ContainSubset {
10
- (expected: any): Assertion;
11
- }
12
- interface Assertion {
13
- containSubset: ContainSubset;
14
- }
15
- interface Assert {
16
- containSubset(val: any, exp: any, msg?: string): void;
17
- }
18
- }
19
- }
20
7
  interface SnapshotMatcher<T> {
21
8
  <U extends { [P in keyof T] : any }>(snapshot: Partial<U>, hint?: string): void;
22
9
  (hint?: string): void;
@@ -1,18 +1,20 @@
1
1
  import { g as globalApis } from './constants.D_Q9UYh-.js';
2
- import { i as index } from './index.CMfqw92x.js';
3
- import './vi.ZPgvtBao.js';
2
+ import { i as index } from './index.Dnl38iQ_.js';
3
+ import './vi.B2--mG9U.js';
4
4
  import '@vitest/expect';
5
5
  import '@vitest/runner';
6
6
  import '@vitest/runner/utils';
7
- import './utils.D2R2NiOH.js';
8
- import '@vitest/utils';
9
- import './_commonjsHelpers.BFTU3MAI.js';
7
+ import './utils.CG9h5ccR.js';
8
+ import '@vitest/utils/timers';
10
9
  import '@vitest/snapshot';
11
10
  import '@vitest/utils/error';
11
+ import '@vitest/utils/helpers';
12
12
  import '@vitest/spy';
13
+ import '@vitest/utils/offset';
13
14
  import '@vitest/utils/source-map';
15
+ import './_commonjsHelpers.BFTU3MAI.js';
14
16
  import './date.-jtEtIeV.js';
15
- import './benchmark.CJUa-Hsa.js';
17
+ import './benchmark.DHKMYAts.js';
16
18
  import 'expect-type';
17
19
  import 'vite/module-runner';
18
20
 
@@ -238,10 +238,10 @@ const skipKeys = [
238
238
  "parent"
239
239
  ];
240
240
  function getWindowKeys(global, win, additionalKeys = []) {
241
- const keysArray = [...additionalKeys, ...KEYS], keys = new Set(keysArray.concat(Object.getOwnPropertyNames(win)).filter((k) => {
241
+ const keysArray = [...additionalKeys, ...KEYS];
242
+ return new Set(keysArray.concat(Object.getOwnPropertyNames(win)).filter((k) => {
242
243
  return skipKeys.includes(k) ? false : k in global ? keysArray.includes(k) : true;
243
244
  }));
244
- return keys;
245
245
  }
246
246
  function isClassLikeName(name) {
247
247
  return name[0] === name[0].toUpperCase();
@@ -393,23 +393,18 @@ var jsdom = {
393
393
  });
394
394
  const clearWindowErrors = catchWindowErrors(dom.window);
395
395
  dom.window.Buffer = Buffer, dom.window.jsdom = dom;
396
- // inject web globals if they missing in JSDOM but otherwise available in Nodejs
397
- // https://nodejs.org/dist/latest/docs/api/globals.html
398
- const globalNames = [
396
+ for (const name of [
399
397
  "structuredClone",
400
398
  "BroadcastChannel",
401
399
  "MessageChannel",
402
400
  "MessagePort",
403
401
  "TextEncoder",
404
402
  "TextDecoder"
405
- ];
406
- for (const name of globalNames) {
403
+ ]) {
407
404
  const value = globalThis[name];
408
405
  if (typeof value !== "undefined" && typeof dom.window[name] === "undefined") dom.window[name] = value;
409
406
  }
410
- // since we are providing Node.js's Fetch API,
411
- // we also should override other APIs they use
412
- const overrideGlobals = [
407
+ for (const name of [
413
408
  "fetch",
414
409
  "Request",
415
410
  "Response",
@@ -418,8 +413,7 @@ var jsdom = {
418
413
  "AbortSignal",
419
414
  "URL",
420
415
  "URLSearchParams"
421
- ];
422
- for (const name of overrideGlobals) {
416
+ ]) {
423
417
  const value = globalThis[name];
424
418
  if (typeof value !== "undefined") dom.window[name] = value;
425
419
  }