vitest 3.0.0-beta.3 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/LICENSE.md +1 -315
  2. package/config.d.ts +2 -0
  3. package/dist/browser.d.ts +3 -3
  4. package/dist/browser.js +1 -1
  5. package/dist/chunks/{base.CQ2VEtuH.js → base.CUDzyU2J.js} +2 -2
  6. package/dist/chunks/{cac.e7qW4xLT.js → cac.DZC9WjGM.js} +8 -8
  7. package/dist/chunks/{cli-api.CWDlED-m.js → cli-api.CmJw5Cd_.js} +920 -84
  8. package/dist/chunks/{config.BTPBhmK5.d.ts → config.BRtC-JeT.d.ts} +6 -0
  9. package/dist/chunks/{console.BYGVloWk.js → console.CN7AiMGV.js} +16 -7
  10. package/dist/chunks/{creator.Ot9GlSGw.js → creator.DztqrnyH.js} +1 -1
  11. package/dist/chunks/{execute.2pr0rHgK.js → execute.BMOaRArH.js} +27 -16
  12. package/dist/chunks/global.CnI8_G5V.d.ts +133 -0
  13. package/dist/chunks/{globals.BFncSRNA.js → globals.C5RQxaV3.js} +2 -2
  14. package/dist/chunks/{index.CkWmZCXU.js → index.BQbxGbG9.js} +1 -1
  15. package/dist/chunks/{index.BBoOXW-l.js → index.CUcwvygK.js} +5 -5
  16. package/dist/chunks/{index.DQboAxJm.js → index.D9C26wCk.js} +1 -0
  17. package/dist/chunks/index.TKSL1HjN.js +2460 -0
  18. package/dist/chunks/{reporters.DCiyjXOg.d.ts → reporters.Y8BYiXBN.d.ts} +412 -386
  19. package/dist/chunks/{resolveConfig.C1d7TK-U.js → resolveConfig.CSLLD33d.js} +140 -55
  20. package/dist/chunks/{rpc.C3q9uwRX.js → rpc.TVf73xOu.js} +0 -1
  21. package/dist/chunks/{runBaseTests.qNWRkgHj.js → runBaseTests.C0T_TQwH.js} +9 -7
  22. package/dist/chunks/{setup-common.Cp_bu5q3.js → setup-common.D0zLenuv.js} +1 -1
  23. package/dist/chunks/{RandomSequencer.C6x84bNN.js → typechecker.BJMkWMXo.js} +84 -108
  24. package/dist/chunks/{utils.Coei4Wlj.js → utils.DJWL04yX.js} +9 -20
  25. package/dist/chunks/{vi.S4Fq8wSo.js → vi.Da_PT3Vw.js} +554 -272
  26. package/dist/chunks/{vite.CRSMFy31.d.ts → vite.CQ0dHgkN.d.ts} +1 -1
  27. package/dist/chunks/{vm.DGhTouO3.js → vm.DrFVeTXo.js} +4 -4
  28. package/dist/chunks/{worker.R-PA7DpW.d.ts → worker.B1y96qmv.d.ts} +1 -1
  29. package/dist/chunks/{worker.XbtCXEXv.d.ts → worker.CIpff8Eg.d.ts} +3 -5
  30. package/dist/cli.js +1 -1
  31. package/dist/config.d.ts +4 -4
  32. package/dist/coverage.d.ts +2 -2
  33. package/dist/coverage.js +5 -4
  34. package/dist/execute.d.ts +3 -3
  35. package/dist/execute.js +1 -1
  36. package/dist/index.d.ts +18 -119
  37. package/dist/index.js +2 -2
  38. package/dist/node.d.ts +12 -9
  39. package/dist/node.js +25 -24
  40. package/dist/reporters.d.ts +2 -2
  41. package/dist/reporters.js +4 -10
  42. package/dist/runners.d.ts +2 -1
  43. package/dist/runners.js +9 -16
  44. package/dist/worker.js +1 -1
  45. package/dist/workers/forks.js +2 -2
  46. package/dist/workers/runVmTests.js +8 -6
  47. package/dist/workers/threads.js +2 -2
  48. package/dist/workers/vmForks.js +3 -3
  49. package/dist/workers/vmThreads.js +3 -3
  50. package/dist/workers.d.ts +3 -3
  51. package/dist/workers.js +5 -5
  52. package/package.json +17 -19
  53. package/dist/chunks/index.CzkCSFCy.js +0 -5455
  54. package/dist/chunks/types.BOjykUpq.d.ts +0 -27
@@ -1,4 +1,4 @@
1
- import { slash, createDefer, toArray } from '@vitest/utils';
1
+ import { slash, createDefer, shuffle, toArray } from '@vitest/utils';
2
2
  import fs, { statSync, realpathSync, promises as promises$1 } from 'node:fs';
3
3
  import { mkdir, writeFile } from 'node:fs/promises';
4
4
  import { builtinModules, createRequire } from 'node:module';
@@ -13,8 +13,10 @@ import c from 'tinyrainbow';
13
13
  import { e as extraInlineDeps, d as defaultPort, a as defaultBrowserPort, b as defaultInspectPort } from './constants.fzPh7AOq.js';
14
14
  import * as nodeos from 'node:os';
15
15
  import nodeos__default from 'node:os';
16
- import { h as hash, w as wrapSerializableConfig, T as Typechecker, i as isWindows, R as RandomSequencer, B as BaseSequencer } from './RandomSequencer.C6x84bNN.js';
16
+ import { w as wrapSerializableConfig, a as Typechecker, b as isWindows } from './typechecker.BJMkWMXo.js';
17
17
  import { isCI, provider } from 'std-env';
18
+ import crypto from 'node:crypto';
19
+ import { isatty } from 'node:tty';
18
20
  import { g as getDefaultExportFromCjs } from './_commonjsHelpers.BFTU3MAI.js';
19
21
  import require$$0 from 'util';
20
22
  import require$$0$1 from 'path';
@@ -24,6 +26,7 @@ import Tinypool$1, { Tinypool } from 'tinypool';
24
26
  import { MessageChannel } from 'node:worker_threads';
25
27
  import { hasFailed } from '@vitest/runner/utils';
26
28
  import { rootDir } from '../path.js';
29
+ import { slash as slash$1 } from 'vite-node/utils';
27
30
 
28
31
  function groupBy(collection, iteratee) {
29
32
  return collection.reduce((acc, item) => {
@@ -51,6 +54,8 @@ function wildcardPatternToRegExp(pattern) {
51
54
  return new RegExp(`^${regexp}`, "i");
52
55
  }
53
56
 
57
+ const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding));
58
+
54
59
  class FilesStatsCache {
55
60
  cache = /* @__PURE__ */ new Map();
56
61
  getStats(key) {
@@ -2709,9 +2714,9 @@ function requireToRegexRange () {
2709
2714
  };
2710
2715
 
2711
2716
  function collatePatterns(neg, pos, options) {
2712
- let onlyNegative = filterPatterns(neg, pos, '-', false) || [];
2713
- let onlyPositive = filterPatterns(pos, neg, '', false) || [];
2714
- let intersected = filterPatterns(neg, pos, '-?', true) || [];
2717
+ let onlyNegative = filterPatterns(neg, pos, '-', false);
2718
+ let onlyPositive = filterPatterns(pos, neg, '', false);
2719
+ let intersected = filterPatterns(neg, pos, '-?', true);
2715
2720
  let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
2716
2721
  return subpatterns.join('|');
2717
2722
  }
@@ -6600,7 +6605,7 @@ async function groupFilesByEnv(files) {
6600
6605
  const created = /* @__PURE__ */ new Set();
6601
6606
  const promises = /* @__PURE__ */ new Map();
6602
6607
  function createMethodsRPC(project, options = {}) {
6603
- const ctx = project.ctx;
6608
+ const ctx = project.vitest;
6604
6609
  const cacheFs = options.cacheFs ?? false;
6605
6610
  return {
6606
6611
  snapshotSaved(snapshot) {
@@ -6658,38 +6663,40 @@ function createMethodsRPC(project, options = {}) {
6658
6663
  transform(id, environment) {
6659
6664
  return project.vitenode.transformModule(id, environment).catch(handleRollupError);
6660
6665
  },
6661
- onPathsCollected(paths) {
6662
- ctx.state.collectPaths(paths);
6663
- return ctx.report("onPathsCollected", paths);
6664
- },
6665
- onQueued(file) {
6666
- ctx.state.collectFiles(project, [file]);
6667
- const testModule = ctx.state.getReportedEntity(file);
6668
- return ctx.report("onTestModuleQueued", testModule);
6666
+ async onQueued(file) {
6667
+ if (options.collect) {
6668
+ ctx.state.collectFiles(project, [file]);
6669
+ } else {
6670
+ await ctx._testRun.enqueued(project, file);
6671
+ }
6669
6672
  },
6670
- onCollected(files) {
6671
- ctx.state.collectFiles(project, files);
6672
- return ctx.report("onCollected", files);
6673
+ async onCollected(files) {
6674
+ if (options.collect) {
6675
+ ctx.state.collectFiles(project, files);
6676
+ } else {
6677
+ await ctx._testRun.collected(project, files);
6678
+ }
6673
6679
  },
6674
6680
  onAfterSuiteRun(meta) {
6675
6681
  ctx.coverageProvider?.onAfterSuiteRun(meta);
6676
6682
  },
6677
- onTaskUpdate(packs) {
6678
- ctx.state.updateTasks(packs);
6679
- return ctx.report("onTaskUpdate", packs);
6683
+ async onTaskUpdate(packs, events) {
6684
+ if (options.collect) {
6685
+ ctx.state.updateTasks(packs);
6686
+ } else {
6687
+ await ctx._testRun.updated(packs, events);
6688
+ }
6680
6689
  },
6681
- onUserConsoleLog(log) {
6682
- ctx.state.updateUserLog(log);
6683
- ctx.report("onUserConsoleLog", log);
6690
+ async onUserConsoleLog(log) {
6691
+ if (options.collect) {
6692
+ ctx.state.updateUserLog(log);
6693
+ } else {
6694
+ await ctx._testRun.log(log);
6695
+ }
6684
6696
  },
6685
6697
  onUnhandledError(err, type) {
6686
6698
  ctx.state.catchError(err, type);
6687
6699
  },
6688
- onFinished(files) {
6689
- const errors = ctx.state.getUnhandledErrors();
6690
- ctx._checkUnhandledErrors(errors);
6691
- return ctx.report("onFinished", files, errors);
6692
- },
6693
6700
  onCancel(reason) {
6694
6701
  ctx.cancelCurrentRun(reason);
6695
6702
  },
@@ -6716,7 +6723,7 @@ function handleRollupError(e) {
6716
6723
  throw e;
6717
6724
  }
6718
6725
 
6719
- function createChildProcessChannel$1(project) {
6726
+ function createChildProcessChannel$1(project, collect = false) {
6720
6727
  const emitter = new EventEmitter();
6721
6728
  const cleanup = () => emitter.removeAllListeners();
6722
6729
  const events = { message: "message", response: "response" };
@@ -6724,7 +6731,7 @@ function createChildProcessChannel$1(project) {
6724
6731
  onMessage: (callback) => emitter.on(events.message, callback),
6725
6732
  postMessage: (message) => emitter.emit(events.response, message)
6726
6733
  };
6727
- const rpc = createBirpc(createMethodsRPC(project, { cacheFs: true }), {
6734
+ const rpc = createBirpc(createMethodsRPC(project, { cacheFs: true, collect }), {
6728
6735
  eventNames: ["onCancel"],
6729
6736
  serialize: v8.serialize,
6730
6737
  deserialize: (v) => v8.deserialize(Buffer.from(v)),
@@ -6772,7 +6779,7 @@ function createForksPool(ctx, { execArgv, env }) {
6772
6779
  async function runFiles(project, config, files, environment, invalidates = []) {
6773
6780
  const paths = files.map((f) => f.filepath);
6774
6781
  ctx.state.clearFiles(project, paths);
6775
- const { channel, cleanup } = createChildProcessChannel$1(project);
6782
+ const { channel, cleanup } = createChildProcessChannel$1(project, name === "collect");
6776
6783
  const workerId = ++id;
6777
6784
  const data = {
6778
6785
  pool: "forks",
@@ -6907,11 +6914,11 @@ function createForksPool(ctx, { execArgv, env }) {
6907
6914
  };
6908
6915
  }
6909
6916
 
6910
- function createWorkerChannel$1(project) {
6917
+ function createWorkerChannel$1(project, collect) {
6911
6918
  const channel = new MessageChannel();
6912
6919
  const port = channel.port2;
6913
6920
  const workerPort = channel.port1;
6914
- const rpc = createBirpc(createMethodsRPC(project), {
6921
+ const rpc = createBirpc(createMethodsRPC(project, { collect }), {
6915
6922
  eventNames: ["onCancel"],
6916
6923
  post(v) {
6917
6924
  port.postMessage(v);
@@ -6959,7 +6966,7 @@ function createThreadsPool(ctx, { execArgv, env }) {
6959
6966
  async function runFiles(project, config, files, environment, invalidates = []) {
6960
6967
  const paths = files.map((f) => f.filepath);
6961
6968
  ctx.state.clearFiles(project, paths);
6962
- const { workerPort, port } = createWorkerChannel$1(project);
6969
+ const { workerPort, port } = createWorkerChannel$1(project, name === "collect");
6963
6970
  const workerId = ++id;
6964
6971
  const data = {
6965
6972
  pool: "threads",
@@ -7103,7 +7110,8 @@ function createTypecheckPool(ctx) {
7103
7110
  const rerunTriggered = /* @__PURE__ */ new WeakSet();
7104
7111
  async function onParseEnd(project, { files, sourceErrors }) {
7105
7112
  const checker = project.typechecker;
7106
- await ctx.report("onTaskUpdate", checker.getTestPacks());
7113
+ const { packs, events } = checker.getTestPacksAndEvents();
7114
+ await ctx._testRun.updated(packs, events);
7107
7115
  if (!project.config.typecheck.ignoreSourceErrors) {
7108
7116
  sourceErrors.forEach(
7109
7117
  (error) => ctx.state.catchError(error, "Unhandled Source Error")
@@ -7133,8 +7141,11 @@ function createTypecheckPool(ctx) {
7133
7141
  project.typechecker = checker;
7134
7142
  checker.setFiles(files);
7135
7143
  checker.onParseStart(async () => {
7136
- ctx.state.collectFiles(project, checker.getTestFiles());
7137
- await ctx.report("onCollected");
7144
+ const files2 = checker.getTestFiles();
7145
+ for (const file of files2) {
7146
+ await ctx._testRun.enqueued(project, file);
7147
+ }
7148
+ await ctx._testRun.collected(project, files2);
7138
7149
  });
7139
7150
  checker.onParseEnd((result) => onParseEnd(project, result));
7140
7151
  checker.onWatcherRerun(async () => {
@@ -7148,9 +7159,13 @@ function createTypecheckPool(ctx) {
7148
7159
  );
7149
7160
  }
7150
7161
  await checker.collectTests();
7151
- ctx.state.collectFiles(project, checker.getTestFiles());
7152
- await ctx.report("onTaskUpdate", checker.getTestPacks());
7153
- await ctx.report("onCollected");
7162
+ const testFiles = checker.getTestFiles();
7163
+ for (const file of testFiles) {
7164
+ await ctx._testRun.enqueued(project, file);
7165
+ }
7166
+ await ctx._testRun.collected(project, testFiles);
7167
+ const { packs, events } = checker.getTestPacksAndEvents();
7168
+ await ctx._testRun.updated(packs, events);
7154
7169
  });
7155
7170
  await checker.prepare();
7156
7171
  return checker;
@@ -7171,8 +7186,11 @@ function createTypecheckPool(ctx) {
7171
7186
  const checker = await createWorkspaceTypechecker(project, files);
7172
7187
  checker.setFiles(files);
7173
7188
  await checker.collectTests();
7174
- ctx.state.collectFiles(project, checker.getTestFiles());
7175
- await ctx.report("onCollected");
7189
+ const testFiles = checker.getTestFiles();
7190
+ for (const file of testFiles) {
7191
+ await ctx._testRun.enqueued(project, file);
7192
+ }
7193
+ await ctx._testRun.collected(project, testFiles);
7176
7194
  }
7177
7195
  }
7178
7196
  async function runTests(specs) {
@@ -7196,8 +7214,11 @@ function createTypecheckPool(ctx) {
7196
7214
  });
7197
7215
  const triggered = await _p;
7198
7216
  if (project.typechecker && !triggered) {
7199
- ctx.state.collectFiles(project, project.typechecker.getTestFiles());
7200
- await ctx.report("onCollected");
7217
+ const testFiles = project.typechecker.getTestFiles();
7218
+ for (const file of testFiles) {
7219
+ await ctx._testRun.enqueued(project, file);
7220
+ }
7221
+ await ctx._testRun.collected(project, testFiles);
7201
7222
  await onParseEnd(project, project.typechecker.getResult());
7202
7223
  continue;
7203
7224
  }
@@ -7285,7 +7306,7 @@ function stringToBytes(input, percentageReference) {
7285
7306
  }
7286
7307
 
7287
7308
  const suppressWarningsPath$1 = resolve$1(rootDir, "./suppress-warnings.cjs");
7288
- function createChildProcessChannel(project) {
7309
+ function createChildProcessChannel(project, collect) {
7289
7310
  const emitter = new EventEmitter();
7290
7311
  const cleanup = () => emitter.removeAllListeners();
7291
7312
  const events = { message: "message", response: "response" };
@@ -7294,7 +7315,7 @@ function createChildProcessChannel(project) {
7294
7315
  postMessage: (message) => emitter.emit(events.response, message)
7295
7316
  };
7296
7317
  const rpc = createBirpc(
7297
- createMethodsRPC(project, { cacheFs: true }),
7318
+ createMethodsRPC(project, { cacheFs: true, collect }),
7298
7319
  {
7299
7320
  eventNames: ["onCancel"],
7300
7321
  serialize: v8.serialize,
@@ -7348,7 +7369,7 @@ function createVmForksPool(ctx, { execArgv, env }) {
7348
7369
  async function runFiles(project, config, files, environment, invalidates = []) {
7349
7370
  const paths = files.map((f) => f.filepath);
7350
7371
  ctx.state.clearFiles(project, paths);
7351
- const { channel, cleanup } = createChildProcessChannel(project);
7372
+ const { channel, cleanup } = createChildProcessChannel(project, name === "collect");
7352
7373
  const workerId = ++id;
7353
7374
  const data = {
7354
7375
  pool: "forks",
@@ -7431,11 +7452,11 @@ function getMemoryLimit$1(config) {
7431
7452
  }
7432
7453
 
7433
7454
  const suppressWarningsPath = resolve$1(rootDir, "./suppress-warnings.cjs");
7434
- function createWorkerChannel(project) {
7455
+ function createWorkerChannel(project, collect) {
7435
7456
  const channel = new MessageChannel();
7436
7457
  const port = channel.port2;
7437
7458
  const workerPort = channel.port1;
7438
- const rpc = createBirpc(createMethodsRPC(project), {
7459
+ const rpc = createBirpc(createMethodsRPC(project, { collect }), {
7439
7460
  eventNames: ["onCancel"],
7440
7461
  post(v) {
7441
7462
  port.postMessage(v);
@@ -7487,7 +7508,7 @@ function createVmThreadsPool(ctx, { execArgv, env }) {
7487
7508
  async function runFiles(project, config, files, environment, invalidates = []) {
7488
7509
  const paths = files.map((f) => f.filepath);
7489
7510
  ctx.state.clearFiles(project, paths);
7490
- const { workerPort, port } = createWorkerChannel(project);
7511
+ const { workerPort, port } = createWorkerChannel(project, name === "collect");
7491
7512
  const workerId = ++id;
7492
7513
  const data = {
7493
7514
  pool: "vmThreads",
@@ -7634,6 +7655,7 @@ function createPool(ctx) {
7634
7655
  VITEST: "true",
7635
7656
  NODE_ENV: process.env.NODE_ENV || "test",
7636
7657
  VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
7658
+ FORCE_TTY: isatty(1) ? "true" : "",
7637
7659
  ...process.env,
7638
7660
  ...ctx.config.env
7639
7661
  }
@@ -7730,6 +7752,61 @@ function createPool(ctx) {
7730
7752
  };
7731
7753
  }
7732
7754
 
7755
+ class BaseSequencer {
7756
+ ctx;
7757
+ constructor(ctx) {
7758
+ this.ctx = ctx;
7759
+ }
7760
+ // async so it can be extended by other sequelizers
7761
+ async shard(files) {
7762
+ const { config } = this.ctx;
7763
+ const { index, count } = config.shard;
7764
+ const shardSize = Math.ceil(files.length / count);
7765
+ const shardStart = shardSize * (index - 1);
7766
+ const shardEnd = shardSize * index;
7767
+ return [...files].map((spec) => {
7768
+ const fullPath = resolve(slash$1(config.root), slash$1(spec.moduleId));
7769
+ const specPath = fullPath?.slice(config.root.length);
7770
+ return {
7771
+ spec,
7772
+ hash: hash("sha1", specPath, "hex")
7773
+ };
7774
+ }).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec);
7775
+ }
7776
+ // async so it can be extended by other sequelizers
7777
+ async sort(files) {
7778
+ const cache = this.ctx.cache;
7779
+ return [...files].sort((a, b) => {
7780
+ const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`;
7781
+ const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`;
7782
+ const aState = cache.getFileTestResults(keyA);
7783
+ const bState = cache.getFileTestResults(keyB);
7784
+ if (!aState || !bState) {
7785
+ const statsA = cache.getFileStats(keyA);
7786
+ const statsB = cache.getFileStats(keyB);
7787
+ if (!statsA || !statsB) {
7788
+ return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0;
7789
+ }
7790
+ return statsB.size - statsA.size;
7791
+ }
7792
+ if (aState.failed && !bState.failed) {
7793
+ return -1;
7794
+ }
7795
+ if (!aState.failed && bState.failed) {
7796
+ return 1;
7797
+ }
7798
+ return bState.duration - aState.duration;
7799
+ });
7800
+ }
7801
+ }
7802
+
7803
+ class RandomSequencer extends BaseSequencer {
7804
+ async sort(files) {
7805
+ const { sequence } = this.ctx.config;
7806
+ return shuffle(files, sequence.seed);
7807
+ }
7808
+ }
7809
+
7733
7810
  function resolvePath(path, root) {
7734
7811
  return normalize(
7735
7812
  /* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path)
@@ -7889,15 +7966,22 @@ function resolveConfig(mode, options, viteConfig, logger) {
7889
7966
  ].join(""));
7890
7967
  }
7891
7968
  }
7892
- if (browser.enabled && !(browser.provider === "playwright" && browser.name === "chromium")) {
7893
- const browserConfig = { browser: { provider: browser.provider, name: browser.name } };
7969
+ const playwrightChromiumOnly = browser.provider === "playwright" && (browser.name === "chromium" || browser.instances?.every((i) => i.browser === "chromium"));
7970
+ if (browser.enabled && !playwrightChromiumOnly) {
7971
+ const browserConfig = {
7972
+ browser: {
7973
+ provider: browser.provider,
7974
+ name: browser.name,
7975
+ instances: browser.instances
7976
+ }
7977
+ };
7894
7978
  if (resolved.coverage.enabled && resolved.coverage.provider === "v8") {
7895
7979
  throw new Error(
7896
7980
  `@vitest/coverage-v8 does not work with
7897
7981
  ${JSON.stringify(browserConfig, null, 2)}
7898
7982
 
7899
7983
  Use either:
7900
- ${JSON.stringify({ browser: { provider: "playwright", name: "chromium" } }, null, 2)}
7984
+ ${JSON.stringify({ browser: { provider: "playwright", instances: [{ browser: "chromium" }] } }, null, 2)}
7901
7985
 
7902
7986
  ...or change your coverage provider to:
7903
7987
  ${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}
@@ -7911,7 +7995,7 @@ ${JSON.stringify({ coverage: { provider: "istanbul" } }, null, 2)}
7911
7995
  ${JSON.stringify(browserConfig, null, 2)}
7912
7996
 
7913
7997
  Use either:
7914
- ${JSON.stringify({ browser: { provider: "playwright", name: "chromium" } }, null, 2)}
7998
+ ${JSON.stringify({ browser: { provider: "playwright", instances: [{ browser: "chromium" }] } }, null, 2)}
7915
7999
 
7916
8000
  ...or disable ${inspectOption}
7917
8001
  `
@@ -7976,6 +8060,7 @@ ${JSON.stringify({ browser: { provider: "playwright", name: "chromium" } }, null
7976
8060
  )}`
7977
8061
  )
7978
8062
  );
8063
+ resolved.coverage.exclude.push(...resolved.include);
7979
8064
  resolved.forceRerunTriggers = [
7980
8065
  ...resolved.forceRerunTriggers,
7981
8066
  ...resolved.setupFiles
@@ -8383,4 +8468,4 @@ function resolveCoverageReporters(configReporters) {
8383
8468
  return resolvedReporters;
8384
8469
  }
8385
8470
 
8386
- export { VitestCache as V, resolveConfig as a, resolveApiServerConfig as b, coverageConfigDefaults as c, createMethodsRPC as d, requireMicromatch as e, configDefaults as f, getFilePoolName as g, isBrowserEnabled as h, isPackageExists as i, groupBy as j, createPool as k, mm as m, resolveCoverageReporters as r, stdout as s, wildcardPatternToRegExp as w };
8471
+ export { BaseSequencer as B, RandomSequencer as R, VitestCache as V, resolveConfig as a, resolveApiServerConfig as b, coverageConfigDefaults as c, createMethodsRPC as d, requireMicromatch as e, configDefaults as f, getFilePoolName as g, hash as h, isPackageExists as i, isBrowserEnabled as j, groupBy as k, createPool as l, mm as m, resolveCoverageReporters as r, stdout as s, wildcardPatternToRegExp as w };
@@ -54,7 +54,6 @@ function createRuntimeRpc(options) {
54
54
  {
55
55
  eventNames: [
56
56
  "onUserConsoleLog",
57
- "onFinished",
58
57
  "onCollected",
59
58
  "onCancel"
60
59
  ],
@@ -1,23 +1,24 @@
1
1
  import { performance } from 'node:perf_hooks';
2
2
  import { startTests, collectTests } from '@vitest/runner';
3
- import { a as resolveSnapshotEnvironment, s as setupChaiConfig, r as resolveTestRunner } from './index.BBoOXW-l.js';
3
+ import { a as resolveSnapshotEnvironment, s as setupChaiConfig, r as resolveTestRunner } from './index.CUcwvygK.js';
4
4
  import { s as startCoverageInsideWorker, a as stopCoverageInsideWorker } from './coverage.BWeNbfBa.js';
5
- import { a as globalExpect, v as vi } from './vi.S4Fq8wSo.js';
5
+ import { a as globalExpect, v as vi } from './vi.Da_PT3Vw.js';
6
6
  import { c as closeInspector } from './inspector.DKLceBVD.js';
7
7
  import { createRequire } from 'node:module';
8
8
  import timers from 'node:timers';
9
+ import timersPromises from 'node:timers/promises';
9
10
  import util from 'node:util';
10
11
  import { getSafeTimers } from '@vitest/utils';
11
12
  import { KNOWN_ASSET_TYPES } from 'vite-node/constants';
12
13
  import { installSourcemapsSupport } from 'vite-node/source-map';
13
- import { V as VitestIndex } from './index.CkWmZCXU.js';
14
- import { s as setupCommonEnv } from './setup-common.Cp_bu5q3.js';
14
+ import { V as VitestIndex } from './index.BQbxGbG9.js';
15
+ import { s as setupCommonEnv } from './setup-common.D0zLenuv.js';
15
16
  import { g as getWorkerState, r as resetModules } from './utils.C8RiOc4B.js';
16
17
  import 'chai';
17
18
  import 'node:path';
18
19
  import '../path.js';
19
20
  import 'node:url';
20
- import './rpc.C3q9uwRX.js';
21
+ import './rpc.TVf73xOu.js';
21
22
  import './index.68735LiX.js';
22
23
  import '@vitest/expect';
23
24
  import '@vitest/runner/utils';
@@ -61,7 +62,8 @@ async function setupGlobalEnv(config, { environment }, executor) {
61
62
  }
62
63
  globalThis.__vitest_required__ = {
63
64
  util,
64
- timers
65
+ timers,
66
+ timersPromises
65
67
  };
66
68
  installSourcemapsSupport({
67
69
  getSourceMap: (source) => state.moduleCache.getSourceMap(source)
@@ -77,7 +79,7 @@ function resolveAsset(mod, url) {
77
79
  mod.exports = url;
78
80
  }
79
81
  async function setupConsoleLogSpy() {
80
- const { createCustomConsole } = await import('./console.BYGVloWk.js');
82
+ const { createCustomConsole } = await import('./console.CN7AiMGV.js');
81
83
  globalThis.console = createCustomConsole();
82
84
  }
83
85
  async function withEnv({ environment }, options, fn) {
@@ -13,7 +13,7 @@ async function setupCommonEnv(config) {
13
13
  globalSetup = true;
14
14
  setSafeTimers();
15
15
  if (config.globals) {
16
- (await import('./globals.BFncSRNA.js')).registerApiGlobally();
16
+ (await import('./globals.C5RQxaV3.js')).registerApiGlobally();
17
17
  }
18
18
  }
19
19
  function setupDefines(defines) {