vitest 4.0.0-beta.17 → 4.0.0-beta.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/browser.d.ts +3 -3
  2. package/dist/browser.js +1 -1
  3. package/dist/{worker-base.js → chunks/base.CtHM3ryk.js} +18 -91
  4. package/dist/chunks/{browser.d.CCG7W26I.d.ts → browser.d.B9iJzZyn.d.ts} +3 -2
  5. package/dist/chunks/{cac.BO_6jvrs.js → cac.DCrQhweU.js} +15 -62
  6. package/dist/chunks/{cli-api.BvCJGado.js → cli-api.BjHteKX0.js} +1322 -55
  7. package/dist/chunks/{config.d.C4PpNy7v.d.ts → config.d.u2CUDWwS.d.ts} +2 -16
  8. package/dist/chunks/{coverage.3htTSxXZ.js → coverage.FU3w4IrQ.js} +39 -1213
  9. package/dist/chunks/{creator.Daoa5_gR.js → creator.DucAaYBz.js} +1 -1
  10. package/dist/chunks/{defaults.CXFFjsi8.js → defaults.BOqNVLsY.js} +0 -1
  11. package/dist/chunks/evaluatedModules.Dg1zASAC.js +17 -0
  12. package/dist/chunks/{global.d.D1pbKXir.d.ts → global.d.BgJSTpgQ.d.ts} +2 -1
  13. package/dist/chunks/{globals.DC4ntO86.js → globals.BGT_RUsD.js} +5 -3
  14. package/dist/chunks/{index.01uBqPwR.js → index.BdSLhLDZ.js} +1 -1
  15. package/dist/chunks/{index.Bt-upxGS.js → index.CbWINfS7.js} +29 -4
  16. package/dist/chunks/{index.DehVUBn4.js → index.CcRZ6fUh.js} +1507 -12
  17. package/dist/chunks/{index.Dnl38iQ_.js → index.RwjEGCQ0.js} +3 -3
  18. package/dist/chunks/init-forks.DSafeltJ.js +54 -0
  19. package/dist/chunks/init-threads.SUtZ-067.js +17 -0
  20. package/dist/chunks/{worker.DVTUM2IW.js → init.B2EESLQM.js} +98 -81
  21. package/dist/chunks/{inspector.Br76Q2Mb.js → inspector.DLZxSeU3.js} +1 -2
  22. package/dist/chunks/{moduleRunner.d.aXWuQhZN.d.ts → moduleRunner.d.YtNsMIoJ.d.ts} +1 -1
  23. package/dist/chunks/{plugin.d.CqKwuCSa.d.ts → plugin.d.BB__S31E.d.ts} +1 -1
  24. package/dist/chunks/{reporters.d.DAyr7w3M.d.ts → reporters.d.C6nGyY9_.d.ts} +1104 -1112
  25. package/dist/chunks/{resolveSnapshotEnvironment.BsJpmVZR.js → resolveSnapshotEnvironment.DJJKMKxb.js} +2 -2
  26. package/dist/chunks/{setup-common.BewgbkTd.js → setup-common.DR1sucx6.js} +1 -1
  27. package/dist/chunks/{startModuleRunner.DPBo3mme.js → startModuleRunner.C2tTvmF9.js} +3 -1
  28. package/dist/chunks/{test.CTuWuHYH.js → test.C3RPt8JR.js} +1 -1
  29. package/dist/chunks/{vi.B2--mG9U.js → vi.BZvkKVkM.js} +1 -1
  30. package/dist/{worker-vm.js → chunks/vm.DBeOXrP9.js} +6 -66
  31. package/dist/chunks/{worker.d.DSgBAZPX.d.ts → worker.d.BFk-vvBU.d.ts} +79 -4
  32. package/dist/cli.js +8 -9
  33. package/dist/config.cjs +0 -1
  34. package/dist/config.d.ts +6 -7
  35. package/dist/config.js +1 -1
  36. package/dist/coverage.d.ts +4 -4
  37. package/dist/coverage.js +2 -13
  38. package/dist/environments.js +1 -1
  39. package/dist/index.d.ts +13 -9
  40. package/dist/index.js +5 -3
  41. package/dist/module-evaluator.d.ts +3 -3
  42. package/dist/module-runner.js +1 -1
  43. package/dist/node.d.ts +79 -15
  44. package/dist/node.js +25 -26
  45. package/dist/reporters.d.ts +4 -4
  46. package/dist/reporters.js +9 -10
  47. package/dist/runners.d.ts +1 -1
  48. package/dist/runners.js +2 -2
  49. package/dist/worker.d.ts +26 -0
  50. package/dist/worker.js +46 -0
  51. package/dist/workers/forks.js +50 -0
  52. package/dist/workers/runVmTests.js +8 -7
  53. package/dist/workers/threads.js +50 -0
  54. package/dist/workers/vmForks.js +35 -0
  55. package/dist/workers/vmThreads.js +35 -0
  56. package/package.json +17 -14
  57. package/worker.d.ts +1 -0
  58. package/dist/chunks/typechecker.DsKAhua5.js +0 -1522
@@ -1,64 +1,26 @@
1
- import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
- import path, { win32, dirname, join, resolve } from 'node:path';
3
- import { createDefer, isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, slash, shuffle, toArray } from '@vitest/utils/helpers';
4
- import { isAbsolute, join as join$1, dirname as dirname$1, resolve as resolve$1, relative, normalize } from 'pathe';
1
+ import fs, { statSync, realpathSync, existsSync, promises, readdirSync, writeFileSync } from 'node:fs';
2
+ import path, { win32, dirname, join } from 'node:path';
3
+ import { slash, shuffle, toArray, cleanUrl } from '@vitest/utils/helpers';
4
+ import { isAbsolute, resolve, relative, normalize } from 'pathe';
5
5
  import pm from 'picomatch';
6
6
  import { glob } from 'tinyglobby';
7
7
  import c from 'tinyrainbow';
8
- import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CXFFjsi8.js';
8
+ import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.BOqNVLsY.js';
9
9
  import crypto from 'node:crypto';
10
10
  import { builtinModules, createRequire } from 'node:module';
11
11
  import process$1 from 'node:process';
12
- import fs$1, { writeFile, rename, stat, unlink } from 'node:fs/promises';
12
+ import fs$1 from 'node:fs/promises';
13
13
  import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL as URL$1 } from 'node:url';
14
14
  import assert from 'node:assert';
15
15
  import v8 from 'node:v8';
16
16
  import { format, inspect } from 'node:util';
17
- import { fetchModule, version, mergeConfig } from 'vite';
17
+ import { mergeConfig } from 'vite';
18
18
  import { c as configFiles, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.D_Q9UYh-.js';
19
- import { a as isWindows } from './env.D4Lgay0q.js';
20
- import * as nodeos from 'node:os';
21
- import nodeos__default, { tmpdir } from 'node:os';
22
- import { isatty } from 'node:tty';
23
- import { rootDir } from '../path.js';
24
- import { s as stringify, w as wrapSerializableConfig, a as Typechecker } from './typechecker.DsKAhua5.js';
25
- import createDebug from 'debug';
26
- import EventEmitter from 'node:events';
27
- import { c as createBirpc } from './index.Bgo3tNWt.js';
28
- import Tinypool$1, { Tinypool } from 'tinypool';
29
- import { MessageChannel } from 'node:worker_threads';
30
- import { hasFailed } from '@vitest/runner/utils';
19
+ import './env.D4Lgay0q.js';
20
+ import nodeos__default from 'node:os';
31
21
  import { isCI, provider } from 'std-env';
32
22
  import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
33
23
 
34
- function groupBy(collection, iteratee) {
35
- return collection.reduce((acc, item) => {
36
- const key = iteratee(item);
37
- return acc[key] ||= [], acc[key].push(item), acc;
38
- }, {});
39
- }
40
- function stdout() {
41
- // @ts-expect-error Node.js maps process.stdout to console._stdout
42
- // eslint-disable-next-line no-console
43
- return console._stdout || process.stdout;
44
- }
45
- function escapeRegExp(s) {
46
- // From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
47
- return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
48
- }
49
- function wildcardPatternToRegExp(pattern) {
50
- const negated = pattern[0] === "!";
51
- if (negated) pattern = pattern.slice(1);
52
- let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
53
- if (negated) regexp = `(?!${regexp})`;
54
- return new RegExp(`^${regexp}`, "i");
55
- }
56
-
57
- function createDebugger(namespace) {
58
- const debug = createDebug(namespace);
59
- if (debug.enabled) return debug;
60
- }
61
-
62
24
  const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding));
63
25
 
64
26
  const JOIN_LEADING_SLASH_RE = /^\.?\//;
@@ -2392,1124 +2354,6 @@ function getWorkersCountByPercentage(percent) {
2392
2354
  return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2393
2355
  }
2394
2356
 
2395
- const debug = createDebugger("vitest:browser:pool");
2396
- function createBrowserPool(vitest) {
2397
- const providers = /* @__PURE__ */ new Set(), numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), projectPools = /* @__PURE__ */ new WeakMap(), ensurePool = (project) => {
2398
- if (projectPools.has(project)) return projectPools.get(project);
2399
- debug?.("creating pool for project %s", project.name);
2400
- const resolvedUrls = project.browser.vite.resolvedUrls, origin = resolvedUrls?.local[0] ?? resolvedUrls?.network[0];
2401
- if (!origin) throw new Error(`Can't find browser origin URL for project "${project.name}"`);
2402
- const pool = new BrowserPool(project, {
2403
- maxWorkers: getThreadsCount(project),
2404
- origin
2405
- });
2406
- return projectPools.set(project, pool), vitest.onCancel(() => {
2407
- pool.cancel();
2408
- }), pool;
2409
- }, runWorkspaceTests = async (method, specs) => {
2410
- const groupedFiles = /* @__PURE__ */ new Map();
2411
- for (const { project, moduleId } of specs) {
2412
- const files = groupedFiles.get(project) || [];
2413
- files.push(moduleId), groupedFiles.set(project, files);
2414
- }
2415
- let isCancelled = false;
2416
- vitest.onCancel(() => {
2417
- isCancelled = true;
2418
- });
2419
- const initialisedPools = await Promise.all([...groupedFiles.entries()].map(async ([project, files]) => {
2420
- if (await project._initBrowserProvider(), !project.browser) throw new TypeError(`The browser server was not initialized${project.name ? ` for the "${project.name}" project` : ""}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2421
- if (isCancelled) return;
2422
- debug?.("provider is ready for %s project", project.name);
2423
- const pool = ensurePool(project);
2424
- return vitest.state.clearFiles(project, files), providers.add(project.browser.provider), {
2425
- pool,
2426
- provider: project.browser.provider,
2427
- runTests: () => pool.runTests(method, files)
2428
- };
2429
- }));
2430
- if (isCancelled) return;
2431
- const parallelPools = [], nonParallelPools = [];
2432
- for (const pool of initialisedPools) {
2433
- if (!pool)
2434
- // this means it was cancelled
2435
- return;
2436
- if (pool.provider.mocker && pool.provider.supportsParallelism) parallelPools.push(pool.runTests);
2437
- else nonParallelPools.push(pool.runTests);
2438
- }
2439
- await Promise.all(parallelPools.map((runTests) => runTests()));
2440
- for (const runTests of nonParallelPools) {
2441
- if (isCancelled) return;
2442
- await runTests();
2443
- }
2444
- };
2445
- function getThreadsCount(project) {
2446
- const config = project.config.browser;
2447
- return !config.headless || !config.fileParallelism || !project.browser.provider.supportsParallelism ? 1 : project.config.maxWorkers ? project.config.maxWorkers : threadsCount;
2448
- }
2449
- return {
2450
- name: "browser",
2451
- async close() {
2452
- await Promise.all([...providers].map((provider) => provider.close())), vitest._browserSessions.sessionIds.clear(), providers.clear(), vitest.projects.forEach((project) => {
2453
- project.browser?.state.orchestrators.forEach((orchestrator) => {
2454
- orchestrator.$close();
2455
- });
2456
- }), debug?.("browser pool closed all providers");
2457
- },
2458
- runTests: (files) => runWorkspaceTests("run", files),
2459
- collectTests: (files) => runWorkspaceTests("collect", files)
2460
- };
2461
- }
2462
- function escapePathToRegexp(path) {
2463
- return path.replace(/[/\\.?*()^${}|[\]+]/g, "\\$&");
2464
- }
2465
- class BrowserPool {
2466
- _queue = [];
2467
- _promise;
2468
- _providedContext;
2469
- readySessions = /* @__PURE__ */ new Set();
2470
- constructor(project, options) {
2471
- this.project = project, this.options = options;
2472
- }
2473
- cancel() {
2474
- this._queue = [];
2475
- }
2476
- reject(error) {
2477
- this._promise?.reject(error), this._promise = void 0, this.cancel();
2478
- }
2479
- get orchestrators() {
2480
- return this.project.browser.state.orchestrators;
2481
- }
2482
- async runTests(method, files) {
2483
- if (this._promise ??= createDefer(), !files.length) return debug?.("no tests found, finishing test run immediately"), this._promise.resolve(), this._promise;
2484
- if (this._providedContext = stringify(this.project.getProvidedContext()), this._queue.push(...files), this.readySessions.forEach((sessionId) => {
2485
- if (this._queue.length) this.readySessions.delete(sessionId), this.runNextTest(method, sessionId);
2486
- }), this.orchestrators.size >= this.options.maxWorkers) return debug?.("all orchestrators are ready, not creating more"), this._promise;
2487
- // open the minimum amount of tabs
2488
- // if there is only 1 file running, we don't need 8 tabs running
2489
- const workerCount = Math.min(this.options.maxWorkers - this.orchestrators.size, files.length), promises = [];
2490
- for (let i = 0; i < workerCount; i++) {
2491
- const sessionId = crypto.randomUUID();
2492
- this.project.vitest._browserSessions.sessionIds.add(sessionId);
2493
- const project = this.project.name;
2494
- debug?.("[%s] creating session for %s", sessionId, project);
2495
- const page = this.openPage(sessionId).then(() => {
2496
- // start running tests on the page when it's ready
2497
- this.runNextTest(method, sessionId);
2498
- });
2499
- promises.push(page);
2500
- }
2501
- return await Promise.all(promises), debug?.("all sessions are created"), this._promise;
2502
- }
2503
- async openPage(sessionId) {
2504
- const sessionPromise = this.project.vitest._browserSessions.createSession(sessionId, this.project, this), browser = this.project.browser, url = new URL("/__vitest_test__/", this.options.origin);
2505
- url.searchParams.set("sessionId", sessionId);
2506
- const pagePromise = browser.provider.openPage(sessionId, url.toString());
2507
- await Promise.all([sessionPromise, pagePromise]);
2508
- }
2509
- getOrchestrator(sessionId) {
2510
- const orchestrator = this.orchestrators.get(sessionId);
2511
- if (!orchestrator) throw new Error(`Orchestrator not found for session ${sessionId}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2512
- return orchestrator;
2513
- }
2514
- finishSession(sessionId) {
2515
- // the last worker finished running tests
2516
- if (this.readySessions.add(sessionId), this.readySessions.size === this.orchestrators.size) this._promise?.resolve(), this._promise = void 0, debug?.("[%s] all tests finished running", sessionId);
2517
- else debug?.(`did not finish sessions for ${sessionId}: |ready - %s| |overall - %s|`, [...this.readySessions].join(", "), [...this.orchestrators.keys()].join(", "));
2518
- }
2519
- runNextTest(method, sessionId) {
2520
- const file = this._queue.shift();
2521
- if (!file) {
2522
- // we don't need to cleanup testers if isolation is enabled,
2523
- // because cleanup is done at the end of every test
2524
- if (debug?.("[%s] no more tests to run", sessionId), this.project.config.browser.isolate) {
2525
- this.finishSession(sessionId);
2526
- return;
2527
- }
2528
- this.getOrchestrator(sessionId).cleanupTesters().catch((error) => this.reject(error)).finally(() => this.finishSession(sessionId));
2529
- return;
2530
- }
2531
- if (!this._promise) throw new Error(`Unexpected empty queue`);
2532
- const orchestrator = this.getOrchestrator(sessionId);
2533
- debug?.("[%s] run test %s", sessionId, file), this.setBreakpoint(sessionId, file).then(() => {
2534
- // this starts running tests inside the orchestrator
2535
- orchestrator.createTesters({
2536
- method,
2537
- files: [file],
2538
- providedContext: this._providedContext || "[{}]"
2539
- }).then(() => {
2540
- debug?.("[%s] test %s finished running", sessionId, file), this.runNextTest(method, sessionId);
2541
- }).catch((error) => {
2542
- // if user cancels the test run manually, ignore the error and exit gracefully
2543
- if (this.project.vitest.isCancelling && error instanceof Error && error.message.startsWith("Browser connection was closed while running tests")) {
2544
- this.cancel(), this._promise?.resolve(), this._promise = void 0, debug?.("[%s] browser connection was closed", sessionId);
2545
- return;
2546
- }
2547
- debug?.("[%s] error during %s test run: %s", sessionId, file, error), this.reject(error);
2548
- });
2549
- }).catch((err) => this.reject(err));
2550
- }
2551
- async setBreakpoint(sessionId, file) {
2552
- if (!this.project.config.inspector.waitForDebugger) return;
2553
- const provider = this.project.browser.provider, browser = this.project.config.browser.name;
2554
- if (shouldIgnoreDebugger(provider.name, browser)) {
2555
- debug?.("[$s] ignoring debugger in %s browser because it is not supported", sessionId, browser);
2556
- return;
2557
- }
2558
- if (!provider.getCDPSession) throw new Error("Unable to set breakpoint, CDP not supported");
2559
- debug?.("[%s] set breakpoint for %s", sessionId, file);
2560
- const session = await provider.getCDPSession(sessionId);
2561
- await session.send("Debugger.enable", {}), await session.send("Debugger.setBreakpointByUrl", {
2562
- lineNumber: 0,
2563
- urlRegex: escapePathToRegexp(file)
2564
- });
2565
- }
2566
- }
2567
- function shouldIgnoreDebugger(provider, browser) {
2568
- return provider === "webdriverio" ? browser !== "chrome" && browser !== "edge" : browser !== "chromium";
2569
- }
2570
-
2571
- const envsOrder = [
2572
- "node",
2573
- "jsdom",
2574
- "happy-dom",
2575
- "edge-runtime"
2576
- ];
2577
- async function groupFilesByEnv(files) {
2578
- const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
2579
- const code = await promises$1.readFile(filepath, "utf-8");
2580
- // 1. Check for control comments in the file
2581
- let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
2582
- // 2. Fallback to global env
2583
- env ||= project.config.environment || "node";
2584
- let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
2585
- if (envOptionsJson?.endsWith("*/"))
2586
- // Trim closing Docblock characters the above regex might have captured
2587
- envOptionsJson = envOptionsJson.slice(0, -2);
2588
- const envOptions = JSON.parse(envOptionsJson || "null");
2589
- return {
2590
- file: {
2591
- filepath,
2592
- testLocations: testLines
2593
- },
2594
- project,
2595
- environment: {
2596
- name: env,
2597
- options: envOptions ? { [env === "happy-dom" ? "happyDOM" : env]: envOptions } : null
2598
- }
2599
- };
2600
- }));
2601
- return groupBy(filesWithEnv, ({ environment }) => environment.name);
2602
- }
2603
-
2604
- const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
2605
- function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join$1(tmpdir(), nanoid())) {
2606
- const cachedFsResults = /* @__PURE__ */ new Map();
2607
- return async (url, importer, environment, options) => {
2608
- // We are copy pasting Vite's externalization logic from `fetchModule` because
2609
- // we instead rely on our own `shouldExternalize` method because Vite
2610
- // doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
2611
- if (url.startsWith("data:")) return {
2612
- externalize: url,
2613
- type: "builtin"
2614
- };
2615
- if (url === "/@vite/client" || url === "@vite/client")
2616
- // this will be stubbed
2617
- return {
2618
- externalize: "/@vite/client",
2619
- type: "module"
2620
- };
2621
- const isFileUrl = url.startsWith("file://");
2622
- if (isExternalUrl(url) && !isFileUrl) return {
2623
- externalize: url,
2624
- type: "network"
2625
- };
2626
- // Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
2627
- // so we do this first to resolve the module and check its `id`. The next call of
2628
- // `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
2629
- // This also makes it so externalized modules are inside the module graph.
2630
- const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
2631
- // if url is already cached, we can just confirm it's also cached on the server
2632
- if (options?.cached && cached) return { cache: true };
2633
- if (moduleGraphModule.id) {
2634
- const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
2635
- if (externalize) return {
2636
- externalize,
2637
- type: "module"
2638
- };
2639
- }
2640
- const moduleRunnerModule = await fetchModule(environment, url, importer, {
2641
- ...options,
2642
- inlineSourceMap: false
2643
- }).catch(handleRollupError), result = processResultSource(environment, moduleRunnerModule);
2644
- if (!cacheFs || !("code" in result)) return result;
2645
- const code = result.code;
2646
- // to avoid serialising large chunks of code,
2647
- // we store them in a tmp file and read in the test thread
2648
- if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
2649
- const dir = join$1(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join$1(dir, name);
2650
- if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
2651
- return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
2652
- };
2653
- }
2654
- let SOURCEMAPPING_URL = "sourceMa";
2655
- SOURCEMAPPING_URL += "ppingURL";
2656
- const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
2657
- function processResultSource(environment, result) {
2658
- if (!("code" in result)) return result;
2659
- const node = environment.moduleGraph.getModuleById(result.id);
2660
- if (node?.transformResult)
2661
- // this also overrides node.transformResult.code which is also what the module
2662
- // runner does under the hood by default (we disable source maps inlining)
2663
- inlineSourceMap(node.transformResult);
2664
- return {
2665
- ...result,
2666
- code: node?.transformResult?.code || result.code
2667
- };
2668
- }
2669
- const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
2670
- // we have to inline the source map ourselves, because
2671
- // - we don't need //# sourceURL since we are running code in VM
2672
- // - important in stack traces and the V8 coverage
2673
- // - we need to inject an empty line for --inspect-brk
2674
- function inlineSourceMap(result) {
2675
- const map = result.map;
2676
- let code = result.code;
2677
- if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
2678
- if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
2679
- const sourceMap = { ...map };
2680
- // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2681
- // so that debuggers can be set to break on first line
2682
- if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2683
- return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2684
- }
2685
- function genSourceMapUrl(map) {
2686
- if (typeof map !== "string") map = JSON.stringify(map);
2687
- return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
2688
- }
2689
- function getCachedResult(result, cachedFsResults) {
2690
- const tmp = cachedFsResults.get(result.id);
2691
- if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
2692
- return {
2693
- cached: true,
2694
- file: result.file,
2695
- id: result.id,
2696
- tmp,
2697
- url: result.url,
2698
- invalidate: result.invalidate
2699
- };
2700
- }
2701
- // serialize rollup error on server to preserve details as a test error
2702
- function handleRollupError(e) {
2703
- throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
2704
- name: e.name,
2705
- message: e.message,
2706
- stack: e.stack,
2707
- cause: e.cause,
2708
- __vitest_rollup_error__: {
2709
- plugin: e.plugin,
2710
- id: e.id,
2711
- loc: e.loc,
2712
- frame: e.frame
2713
- }
2714
- } : e;
2715
- }
2716
- /**
2717
- * Performs an atomic write operation using the write-then-rename pattern.
2718
- *
2719
- * Why we need this:
2720
- * - Ensures file integrity by never leaving partially written files on disk
2721
- * - Prevents other processes from reading incomplete data during writes
2722
- * - Particularly important for test files where incomplete writes could cause test failures
2723
- *
2724
- * The implementation writes to a temporary file first, then renames it to the target path.
2725
- * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2726
- * guaranteeing that other processes will only ever see the complete file.
2727
- *
2728
- * Added in https://github.com/vitest-dev/vitest/pull/7531
2729
- */
2730
- async function atomicWriteFile(realFilePath, data) {
2731
- const dir = dirname$1(realFilePath), tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2732
- try {
2733
- await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
2734
- } finally {
2735
- try {
2736
- if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2737
- } catch {}
2738
- }
2739
- }
2740
-
2741
- // this is copy pasted from vite
2742
- function normalizeResolvedIdToUrl(environment, resolvedId) {
2743
- const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
2744
- let url;
2745
- // normalize all imports into resolved URLs
2746
- // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
2747
- if (resolvedId.startsWith(withTrailingSlash$1(root)))
2748
- // in root: infer short absolute path from root
2749
- url = resolvedId.slice(root.length);
2750
- else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
2751
- // an optimized deps may not yet exists in the filesystem, or
2752
- // a regular file exists but is out of root: rewrite to absolute /@fs/ paths
2753
- url = path.posix.join("/@fs/", resolvedId);
2754
- else url = resolvedId;
2755
- // if the resolved id is not a valid browser import specifier,
2756
- // prefix it to make it valid. We will strip this before feeding it
2757
- // back into the transform pipeline
2758
- if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
2759
- return url;
2760
- }
2761
-
2762
- function createMethodsRPC(project, options = {}) {
2763
- const ctx = project.vitest, cacheFs = options.cacheFs ?? false, fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir);
2764
- return {
2765
- async fetch(url, importer, environmentName, options) {
2766
- const environment = project.vite.environments[environmentName];
2767
- if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2768
- const start = performance.now();
2769
- try {
2770
- return await fetch(url, importer, environment, options);
2771
- } finally {
2772
- project.vitest.state.transformTime += performance.now() - start;
2773
- }
2774
- },
2775
- async resolve(id, importer, environmentName) {
2776
- const environment = project.vite.environments[environmentName];
2777
- if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2778
- const resolved = await environment.pluginContainer.resolveId(id, importer);
2779
- return resolved ? {
2780
- file: cleanUrl(resolved.id),
2781
- url: normalizeResolvedIdToUrl(environment, resolved.id),
2782
- id: resolved.id
2783
- } : null;
2784
- },
2785
- snapshotSaved(snapshot) {
2786
- ctx.snapshot.add(snapshot);
2787
- },
2788
- resolveSnapshotPath(testPath) {
2789
- return ctx.snapshot.resolvePath(testPath, { config: project.serializedConfig });
2790
- },
2791
- async transform(id) {
2792
- const environment = project.vite.environments.__vitest_vm__;
2793
- if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2794
- const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id));
2795
- return { code: (await environment.transformRequest(url).catch(handleRollupError))?.code };
2796
- },
2797
- async onQueued(file) {
2798
- if (options.collect) ctx.state.collectFiles(project, [file]);
2799
- else await ctx._testRun.enqueued(project, file);
2800
- },
2801
- async onCollected(files) {
2802
- if (options.collect) ctx.state.collectFiles(project, files);
2803
- else await ctx._testRun.collected(project, files);
2804
- },
2805
- onAfterSuiteRun(meta) {
2806
- ctx.coverageProvider?.onAfterSuiteRun(meta);
2807
- },
2808
- async onTaskAnnotate(testId, annotation) {
2809
- return ctx._testRun.annotate(testId, annotation);
2810
- },
2811
- async onTaskUpdate(packs, events) {
2812
- if (options.collect) ctx.state.updateTasks(packs);
2813
- else await ctx._testRun.updated(packs, events);
2814
- },
2815
- async onUserConsoleLog(log) {
2816
- if (options.collect) ctx.state.updateUserLog(log);
2817
- else await ctx._testRun.log(log);
2818
- },
2819
- onUnhandledError(err, type) {
2820
- ctx.state.catchError(err, type);
2821
- },
2822
- onCancel(reason) {
2823
- ctx.cancelCurrentRun(reason);
2824
- },
2825
- getCountOfFailedTests() {
2826
- return ctx.state.getCountOfFailedTests();
2827
- }
2828
- };
2829
- }
2830
-
2831
- function createChildProcessChannel$1(project, collect = false) {
2832
- const emitter = new EventEmitter(), events = {
2833
- message: "message",
2834
- response: "response"
2835
- }, rpc = createBirpc(createMethodsRPC(project, {
2836
- cacheFs: true,
2837
- collect
2838
- }), {
2839
- eventNames: ["onCancel"],
2840
- serialize: v8.serialize,
2841
- deserialize: (v) => {
2842
- try {
2843
- return v8.deserialize(Buffer.from(v));
2844
- } catch (error) {
2845
- let stringified = "";
2846
- try {
2847
- stringified = `\nReceived value: ${JSON.stringify(v)}`;
2848
- } catch {}
2849
- throw new Error(`[vitest-pool]: Unexpected call to process.send(). Make sure your test cases are not interfering with process's channel.${stringified}`, { cause: error });
2850
- }
2851
- },
2852
- post(v) {
2853
- emitter.emit(events.message, v);
2854
- },
2855
- on(fn) {
2856
- emitter.on(events.response, fn);
2857
- },
2858
- timeout: -1
2859
- });
2860
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
2861
- onMessage: (callback) => emitter.on(events.message, callback),
2862
- postMessage: (message) => emitter.emit(events.response, message),
2863
- onClose: () => {
2864
- emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2865
- }
2866
- };
2867
- }
2868
- function createForksPool(vitest, { execArgv, env }, specifications) {
2869
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2870
- runtime: "child_process",
2871
- filename: resolve(vitest.distPath, "worker-base.js"),
2872
- teardown: "teardown",
2873
- maxThreads,
2874
- minThreads,
2875
- env,
2876
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2877
- terminateTimeout: vitest.config.teardownTimeout,
2878
- concurrentTasksPerWorker: 1
2879
- }, isolated = poolOptions.isolate ?? true;
2880
- if (isolated) options.isolateWorkers = true;
2881
- if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2882
- const pool = new Tinypool(options), runWithFiles = (name) => {
2883
- let id = 0;
2884
- async function runFiles(project, config, files, environment, invalidates = []) {
2885
- const paths = files.map((f) => f.filepath);
2886
- vitest.state.clearFiles(project, paths);
2887
- const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2888
- pool: "forks",
2889
- config,
2890
- files,
2891
- invalidates,
2892
- environment,
2893
- workerId,
2894
- projectName: project.name,
2895
- providedContext: project.getProvidedContext()
2896
- };
2897
- try {
2898
- await pool.run(data, {
2899
- name,
2900
- channel
2901
- });
2902
- } catch (error) {
2903
- // Worker got stuck and won't terminate - this may cause process to hang
2904
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
2905
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
2906
- else throw error;
2907
- }
2908
- }
2909
- return async (specs, invalidates) => {
2910
- // Cancel pending tasks from pool when possible
2911
- vitest.onCancel(() => pool.cancelPendingTasks());
2912
- const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2913
- if (configs.has(project)) return configs.get(project);
2914
- const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
2915
- return configs.set(project, config), config;
2916
- }, singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork), multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2917
- if (multipleForks.length) {
2918
- const filesByEnv = await groupFilesByEnv(multipleForks), files = Object.values(filesByEnv).flat(), results = [];
2919
- if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2920
- else {
2921
- // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2922
- // Tasks are still running parallel but environments are isolated between tasks.
2923
- const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2924
- for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2925
- }
2926
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2927
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2928
- }
2929
- if (singleFork.length) {
2930
- const filesByEnv = await groupFilesByEnv(singleFork), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2931
- for (const env of envs) {
2932
- const files = filesByEnv[env];
2933
- if (!files?.length) continue;
2934
- const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
2935
- for (const files of Object.values(filesByOptions)) {
2936
- // Always run environments isolated between each other
2937
- await pool.recycleWorkers();
2938
- const filenames = files.map((f) => f.file);
2939
- await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
2940
- }
2941
- }
2942
- }
2943
- };
2944
- };
2945
- return {
2946
- name: "forks",
2947
- runTests: runWithFiles("run"),
2948
- collectTests: runWithFiles("collect"),
2949
- close: () => pool.destroy()
2950
- };
2951
- }
2952
-
2953
- function createWorkerChannel$1(project, collect) {
2954
- const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
2955
- eventNames: ["onCancel"],
2956
- post(v) {
2957
- port.postMessage(v);
2958
- },
2959
- on(fn) {
2960
- port.on("message", fn);
2961
- },
2962
- timeout: -1
2963
- });
2964
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
2965
- workerPort,
2966
- port,
2967
- onClose: () => {
2968
- port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2969
- }
2970
- };
2971
- }
2972
- function createThreadsPool(vitest, { execArgv, env }, specifications) {
2973
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2974
- filename: resolve(vitest.distPath, "worker-base.js"),
2975
- teardown: "teardown",
2976
- useAtomics: poolOptions.useAtomics ?? false,
2977
- maxThreads,
2978
- minThreads,
2979
- env,
2980
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2981
- terminateTimeout: vitest.config.teardownTimeout,
2982
- concurrentTasksPerWorker: 1
2983
- }, isolated = poolOptions.isolate ?? true;
2984
- if (isolated) options.isolateWorkers = true;
2985
- if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2986
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
2987
- let id = 0;
2988
- async function runFiles(project, config, files, environment, invalidates = []) {
2989
- const paths = files.map((f) => f.filepath);
2990
- vitest.state.clearFiles(project, paths);
2991
- const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
2992
- pool: "threads",
2993
- port: workerPort,
2994
- config,
2995
- files,
2996
- invalidates,
2997
- environment,
2998
- workerId,
2999
- projectName: project.name,
3000
- providedContext: project.getProvidedContext()
3001
- };
3002
- try {
3003
- await pool.run(data, {
3004
- transferList: [workerPort],
3005
- name,
3006
- channel: { onClose }
3007
- });
3008
- } catch (error) {
3009
- // Worker got stuck and won't terminate - this may cause process to hang
3010
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
3011
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3012
- else throw error;
3013
- }
3014
- }
3015
- return async (specs, invalidates) => {
3016
- // Cancel pending tasks from pool when possible
3017
- vitest.onCancel(() => pool.cancelPendingTasks());
3018
- const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
3019
- if (configs.has(project)) return configs.get(project);
3020
- const config = project.serializedConfig;
3021
- return configs.set(project, config), config;
3022
- }, singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread), multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
3023
- if (multipleThreads.length) {
3024
- const filesByEnv = await groupFilesByEnv(multipleThreads), files = Object.values(filesByEnv).flat(), results = [];
3025
- if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
3026
- else {
3027
- // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
3028
- // Tasks are still running parallel but environments are isolated between tasks.
3029
- const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
3030
- for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
3031
- }
3032
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3033
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3034
- }
3035
- if (singleThreads.length) {
3036
- const filesByEnv = await groupFilesByEnv(singleThreads), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
3037
- for (const env of envs) {
3038
- const files = filesByEnv[env];
3039
- if (!files?.length) continue;
3040
- const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
3041
- for (const files of Object.values(filesByOptions)) {
3042
- // Always run environments isolated between each other
3043
- await pool.recycleWorkers();
3044
- const filenames = files.map((f) => f.file);
3045
- await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
3046
- }
3047
- }
3048
- }
3049
- };
3050
- };
3051
- return {
3052
- name: "threads",
3053
- runTests: runWithFiles("run"),
3054
- collectTests: runWithFiles("collect"),
3055
- close: () => pool.destroy()
3056
- };
3057
- }
3058
-
3059
- function createTypecheckPool(vitest) {
3060
- const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
3061
- async function onParseEnd(project, { files, sourceErrors }) {
3062
- const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
3063
- if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
3064
- if (!hasFailed(files) && !sourceErrors.length && checker.getExitCode()) {
3065
- const error = new Error(checker.getOutput());
3066
- error.stack = "", vitest.state.catchError(error, "Typecheck Error");
3067
- }
3068
- // triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
3069
- if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
3070
- const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
3071
- await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
3072
- }
3073
- }
3074
- async function createWorkspaceTypechecker(project, files) {
3075
- const checker = project.typechecker ?? new Typechecker(project);
3076
- return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
3077
- const files = checker.getTestFiles();
3078
- for (const file of files) await vitest._testRun.enqueued(project, file);
3079
- await vitest._testRun.collected(project, files);
3080
- }), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
3081
- if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
3082
- await checker.collectTests();
3083
- const testFiles = checker.getTestFiles();
3084
- for (const file of testFiles) await vitest._testRun.enqueued(project, file);
3085
- await vitest._testRun.collected(project, testFiles);
3086
- const { packs, events } = checker.getTestPacksAndEvents();
3087
- await vitest._testRun.updated(packs, events);
3088
- }), checker);
3089
- }
3090
- async function startTypechecker(project, files) {
3091
- if (project.typechecker) return;
3092
- const checker = await createWorkspaceTypechecker(project, files);
3093
- await checker.collectTests(), await checker.start();
3094
- }
3095
- async function collectTests(specs) {
3096
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
3097
- for (const name in specsByProject) {
3098
- const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), checker = await createWorkspaceTypechecker(project, files);
3099
- checker.setFiles(files), await checker.collectTests();
3100
- const testFiles = checker.getTestFiles();
3101
- vitest.state.collectFiles(project, testFiles);
3102
- }
3103
- }
3104
- async function runTests(specs) {
3105
- const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
3106
- for (const name in specsByProject) {
3107
- const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), triggered = await new Promise((resolve) => {
3108
- const _i = setInterval(() => {
3109
- if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
3110
- });
3111
- setTimeout(() => {
3112
- resolve(false), clearInterval(_i);
3113
- }, 500).unref();
3114
- });
3115
- if (project.typechecker && !triggered) {
3116
- const testFiles = project.typechecker.getTestFiles();
3117
- for (const file of testFiles) await vitest._testRun.enqueued(project, file);
3118
- await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
3119
- continue;
3120
- }
3121
- promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files));
3122
- }
3123
- await Promise.all(promises);
3124
- }
3125
- return {
3126
- name: "typescript",
3127
- runTests,
3128
- collectTests,
3129
- async close() {
3130
- const promises = vitest.projects.map((project) => project.typechecker?.stop());
3131
- await Promise.all(promises);
3132
- }
3133
- };
3134
- }
3135
-
3136
- function getDefaultThreadsCount(config) {
3137
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3138
- return config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3139
- }
3140
- function getWorkerMemoryLimit(config, pool) {
3141
- if (pool === "vmForks") {
3142
- const opts = config.poolOptions?.vmForks ?? {};
3143
- return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxForks ?? getDefaultThreadsCount(config));
3144
- } else {
3145
- const opts = config.poolOptions?.vmThreads ?? {};
3146
- return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxThreads ?? getDefaultThreadsCount(config));
3147
- }
3148
- }
3149
- /**
3150
- * Converts a string representing an amount of memory to bytes.
3151
- *
3152
- * @param input The value to convert to bytes.
3153
- * @param percentageReference The reference value to use when a '%' value is supplied.
3154
- */
3155
- function stringToBytes(input, percentageReference) {
3156
- if (input === null || input === void 0) return input;
3157
- if (typeof input === "string") if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
3158
- let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
3159
- if (trailingChars && numericString) {
3160
- const numericValue = Number.parseFloat(numericString);
3161
- switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
3162
- case "%":
3163
- input = numericValue / 100;
3164
- break;
3165
- case "kb":
3166
- case "k": return numericValue * 1e3;
3167
- case "kib": return numericValue * 1024;
3168
- case "mb":
3169
- case "m": return numericValue * 1e3 * 1e3;
3170
- case "mib": return numericValue * 1024 * 1024;
3171
- case "gb":
3172
- case "g": return numericValue * 1e3 * 1e3 * 1e3;
3173
- case "gib": return numericValue * 1024 * 1024 * 1024;
3174
- }
3175
- }
3176
- } else input = Number.parseFloat(input);
3177
- if (typeof input === "number") if (input <= 1 && input > 0) {
3178
- if (percentageReference) return Math.floor(input * percentageReference);
3179
- throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3180
- } else if (input > 1) return Math.floor(input);
3181
- else throw new Error("Unexpected numerical input for \"memoryLimit\"");
3182
- return null;
3183
- }
3184
-
3185
- function createChildProcessChannel(project, collect) {
3186
- const emitter = new EventEmitter(), events = {
3187
- message: "message",
3188
- response: "response"
3189
- }, rpc = createBirpc(createMethodsRPC(project, {
3190
- cacheFs: true,
3191
- collect
3192
- }), {
3193
- eventNames: ["onCancel"],
3194
- serialize: v8.serialize,
3195
- deserialize: (v) => {
3196
- try {
3197
- return v8.deserialize(Buffer.from(v));
3198
- } catch (error) {
3199
- let stringified = "";
3200
- try {
3201
- stringified = `\nReceived value: ${JSON.stringify(v)}`;
3202
- } catch {}
3203
- throw new Error(`[vitest-pool]: Unexpected call to process.send(). Make sure your test cases are not interfering with process's channel.${stringified}`, { cause: error });
3204
- }
3205
- },
3206
- post(v) {
3207
- emitter.emit(events.message, v);
3208
- },
3209
- on(fn) {
3210
- emitter.on(events.response, fn);
3211
- },
3212
- timeout: -1
3213
- });
3214
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), { channel: {
3215
- onMessage: (callback) => emitter.on(events.message, callback),
3216
- postMessage: (message) => emitter.emit(events.response, message),
3217
- onClose: () => {
3218
- emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3219
- }
3220
- } };
3221
- }
3222
- function createVmForksPool(vitest, { execArgv, env }, specifications) {
3223
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3224
- runtime: "child_process",
3225
- filename: resolve(vitest.distPath, "worker-vm.js"),
3226
- maxThreads,
3227
- minThreads,
3228
- env,
3229
- execArgv: [
3230
- "--experimental-vm-modules",
3231
- ...poolOptions.execArgv ?? [],
3232
- ...execArgv
3233
- ],
3234
- terminateTimeout: vitest.config.teardownTimeout,
3235
- concurrentTasksPerWorker: 1,
3236
- maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
3237
- };
3238
- if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3239
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
3240
- let id = 0;
3241
- async function runFiles(project, config, files, environment, invalidates = []) {
3242
- const paths = files.map((f) => f.filepath);
3243
- vitest.state.clearFiles(project, paths);
3244
- const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3245
- pool: "vmForks",
3246
- config,
3247
- files,
3248
- invalidates,
3249
- environment,
3250
- workerId,
3251
- projectName: project.name,
3252
- providedContext: project.getProvidedContext()
3253
- };
3254
- try {
3255
- await pool.run(data, {
3256
- name,
3257
- channel
3258
- });
3259
- } catch (error) {
3260
- // Worker got stuck and won't terminate - this may cause process to hang
3261
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
3262
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3263
- else throw error;
3264
- } finally {
3265
- channel.onClose();
3266
- }
3267
- }
3268
- return async (specs, invalidates) => {
3269
- // Cancel pending tasks from pool when possible
3270
- vitest.onCancel(() => pool.cancelPendingTasks());
3271
- const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3272
- if (configs.has(project)) return configs.get(project);
3273
- const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3274
- return configs.set(project, config), config;
3275
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3276
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3277
- };
3278
- };
3279
- return {
3280
- name: "vmForks",
3281
- runTests: runWithFiles("run"),
3282
- collectTests: runWithFiles("collect"),
3283
- close: () => pool.destroy()
3284
- };
3285
- }
3286
- function getMemoryLimit$1(config) {
3287
- const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmForks");
3288
- // just ignore "memoryLimit" value because we cannot detect memory limit
3289
- return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3290
- }
3291
-
3292
- function createWorkerChannel(project, collect) {
3293
- const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3294
- eventNames: ["onCancel"],
3295
- post(v) {
3296
- port.postMessage(v);
3297
- },
3298
- on(fn) {
3299
- port.on("message", fn);
3300
- },
3301
- timeout: -1
3302
- });
3303
- project.vitest.onCancel((reason) => rpc.onCancel(reason));
3304
- function onClose() {
3305
- workerPort.close(), port.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3306
- }
3307
- return {
3308
- workerPort,
3309
- onClose
3310
- };
3311
- }
3312
- function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3313
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3314
- filename: resolve(vitest.distPath, "worker-vm.js"),
3315
- useAtomics: poolOptions.useAtomics ?? false,
3316
- maxThreads,
3317
- minThreads,
3318
- env,
3319
- execArgv: [
3320
- "--experimental-vm-modules",
3321
- ...poolOptions.execArgv ?? [],
3322
- ...execArgv
3323
- ],
3324
- terminateTimeout: vitest.config.teardownTimeout,
3325
- concurrentTasksPerWorker: 1,
3326
- maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
3327
- };
3328
- if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3329
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
3330
- let id = 0;
3331
- async function runFiles(project, config, files, environment, invalidates = []) {
3332
- const paths = files.map((f) => f.filepath);
3333
- vitest.state.clearFiles(project, paths);
3334
- const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3335
- pool: "vmThreads",
3336
- port: workerPort,
3337
- config,
3338
- files: paths,
3339
- invalidates,
3340
- environment,
3341
- workerId,
3342
- projectName: project.name,
3343
- providedContext: project.getProvidedContext()
3344
- };
3345
- try {
3346
- await pool.run(data, {
3347
- transferList: [workerPort],
3348
- name
3349
- });
3350
- } catch (error) {
3351
- // Worker got stuck and won't terminate - this may cause process to hang
3352
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
3353
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3354
- else throw error;
3355
- } finally {
3356
- onClose();
3357
- }
3358
- }
3359
- return async (specs, invalidates) => {
3360
- // Cancel pending tasks from pool when possible
3361
- vitest.onCancel(() => pool.cancelPendingTasks());
3362
- const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3363
- if (configs.has(project)) return configs.get(project);
3364
- const config = project.serializedConfig;
3365
- return configs.set(project, config), config;
3366
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3367
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3368
- };
3369
- };
3370
- return {
3371
- name: "vmThreads",
3372
- runTests: runWithFiles("run"),
3373
- collectTests: runWithFiles("collect"),
3374
- close: () => pool.destroy()
3375
- };
3376
- }
3377
- function getMemoryLimit(config) {
3378
- const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmThreads");
3379
- // just ignore "memoryLimit" value because we cannot detect memory limit
3380
- return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3381
- }
3382
-
3383
- const suppressWarningsPath = resolve$1(rootDir, "./suppress-warnings.cjs");
3384
- const builtinPools = [
3385
- "forks",
3386
- "threads",
3387
- "browser",
3388
- "vmThreads",
3389
- "vmForks",
3390
- "typescript"
3391
- ];
3392
- function getDefaultPoolName(project) {
3393
- return project.config.browser.enabled ? "browser" : project.config.pool;
3394
- }
3395
- function getFilePoolName(project) {
3396
- return getDefaultPoolName(project);
3397
- }
3398
- function createPool(ctx) {
3399
- const pools = {
3400
- forks: null,
3401
- threads: null,
3402
- browser: null,
3403
- vmThreads: null,
3404
- vmForks: null,
3405
- typescript: null
3406
- }, viteMajor = Number(version.split(".")[0]), conditions = [...new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3407
- "production",
3408
- "development",
3409
- ...ctx.vite.config.resolve.conditions
3410
- ])].filter((condition) => {
3411
- return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3412
- }).map((condition) => {
3413
- return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3414
- }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3415
- async function executeTests(method, files, invalidate) {
3416
- const options = {
3417
- execArgv: [
3418
- ...execArgv,
3419
- ...conditions,
3420
- "--experimental-import-meta-resolve",
3421
- "--require",
3422
- suppressWarningsPath
3423
- ],
3424
- env: {
3425
- TEST: "true",
3426
- VITEST: "true",
3427
- NODE_ENV: process.env.NODE_ENV || "test",
3428
- VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
3429
- FORCE_TTY: isatty(1) ? "true" : "",
3430
- ...process.env,
3431
- ...ctx.config.env
3432
- }
3433
- };
3434
- // env are case-insensitive on Windows, but spawned processes don't support it
3435
- if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
3436
- const poolConcurrentPromises = /* @__PURE__ */ new Map(), customPools = /* @__PURE__ */ new Map();
3437
- async function resolveCustomPool(filepath) {
3438
- if (customPools.has(filepath)) return customPools.get(filepath);
3439
- const pool = await ctx.runner.import(filepath);
3440
- if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
3441
- const poolInstance = await pool.default(ctx, options);
3442
- if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
3443
- if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
3444
- return customPools.set(filepath, poolInstance), poolInstance;
3445
- }
3446
- function getConcurrentPool(pool, fn) {
3447
- if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
3448
- const promise = fn().finally(() => {
3449
- poolConcurrentPromises.delete(pool);
3450
- });
3451
- return poolConcurrentPromises.set(pool, promise), promise;
3452
- }
3453
- function getCustomPool(pool) {
3454
- return getConcurrentPool(pool, () => resolveCustomPool(pool));
3455
- }
3456
- const groupedSpecifications = {}, groups = /* @__PURE__ */ new Set(), factories = {
3457
- vmThreads: (specs) => createVmThreadsPool(ctx, options, specs),
3458
- vmForks: (specs) => createVmForksPool(ctx, options, specs),
3459
- threads: (specs) => createThreadsPool(ctx, options, specs),
3460
- forks: (specs) => createForksPool(ctx, options, specs),
3461
- typescript: () => createTypecheckPool(ctx),
3462
- browser: () => createBrowserPool(ctx)
3463
- };
3464
- for (const spec of files) {
3465
- const group = spec.project.config.sequence.groupOrder ?? 0;
3466
- groups.add(group), groupedSpecifications[group] ??= [], groupedSpecifications[group].push(spec);
3467
- }
3468
- const Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
3469
- async function sortSpecs(specs) {
3470
- if (ctx.config.shard) {
3471
- if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
3472
- specs = await sequencer.shard(specs);
3473
- }
3474
- return sequencer.sort(specs);
3475
- }
3476
- const sortedGroups = Array.from(groups).sort();
3477
- for (const group of sortedGroups) {
3478
- const specifications = groupedSpecifications[group];
3479
- if (!specifications?.length) continue;
3480
- const filesByPool = {
3481
- forks: [],
3482
- threads: [],
3483
- vmThreads: [],
3484
- vmForks: [],
3485
- typescript: []
3486
- };
3487
- specifications.forEach((specification) => {
3488
- const pool = specification.pool;
3489
- filesByPool[pool] ??= [], filesByPool[pool].push(specification);
3490
- }), await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3491
- const [pool, files] = entry;
3492
- if (!files.length) return null;
3493
- const specs = await sortSpecs(files);
3494
- if (pool in factories) {
3495
- const factory = factories[pool];
3496
- return pools[pool] ??= factory(specs), pools[pool][method](specs, invalidate);
3497
- }
3498
- const poolHandler = await getCustomPool(pool);
3499
- return pools[poolHandler.name] ??= poolHandler, poolHandler[method](specs, invalidate);
3500
- }));
3501
- }
3502
- }
3503
- return {
3504
- name: "default",
3505
- runTests: (files, invalidates) => executeTests("runTests", files, invalidates),
3506
- collectTests: (files, invalidates) => executeTests("collectTests", files, invalidates),
3507
- async close() {
3508
- await Promise.all(Object.values(pools).map((p) => p?.close?.()));
3509
- }
3510
- };
3511
- }
3512
-
3513
2357
  class BaseSequencer {
3514
2358
  ctx;
3515
2359
  constructor(ctx) {
@@ -3519,7 +2363,7 @@ class BaseSequencer {
3519
2363
  async shard(files) {
3520
2364
  const { config } = this.ctx, { index, count } = config.shard, [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count);
3521
2365
  return [...files].map((spec) => {
3522
- const specPath = resolve$1(slash(config.root), slash(spec.moduleId))?.slice(config.root.length);
2366
+ const specPath = resolve(slash(config.root), slash(spec.moduleId))?.slice(config.root.length);
3523
2367
  return {
3524
2368
  spec,
3525
2369
  hash: hash("sha1", specPath, "hex")
@@ -3560,7 +2404,7 @@ class RandomSequencer extends BaseSequencer {
3560
2404
  }
3561
2405
 
3562
2406
  function resolvePath(path, root) {
3563
- return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve$1(root, path));
2407
+ return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path));
3564
2408
  }
3565
2409
  function parseInspector(inspect) {
3566
2410
  if (typeof inspect === "boolean" || inspect === void 0) return {};
@@ -3602,7 +2446,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3602
2446
  root: viteConfig.root,
3603
2447
  mode
3604
2448
  };
3605
- if (resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
2449
+ if (options.pool && typeof options.pool !== "string") resolved.pool = options.pool.name, resolved.poolRunner = options.pool;
2450
+ if (resolved.pool ??= "forks", resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
3606
2451
  const inspector = resolved.inspect || resolved.inspectBrk;
3607
2452
  if (resolved.inspector = {
3608
2453
  ...resolved.inspector,
@@ -3628,10 +2473,9 @@ function resolveConfig$1(vitest, options, viteConfig) {
3628
2473
  resolved.maxWorkers = 1;
3629
2474
  if (resolved.maxConcurrency === 0) logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)), resolved.maxConcurrency = configDefaults.maxConcurrency;
3630
2475
  if (resolved.inspect || resolved.inspectBrk) {
3631
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread, isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3632
- if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
2476
+ if (resolved.fileParallelism) {
3633
2477
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3634
- throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
2478
+ throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism"`);
3635
2479
  }
3636
2480
  }
3637
2481
  // apply browser CLI options only if the config already has the browser config and not disabled manually
@@ -3691,7 +2535,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3691
2535
  }
3692
2536
  }
3693
2537
  if (resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter), resolved.coverage.enabled && resolved.coverage.reportsDirectory) {
3694
- const reportsDirectory = resolve$1(resolved.root, resolved.coverage.reportsDirectory);
2538
+ const reportsDirectory = resolve(resolved.root, resolved.coverage.reportsDirectory);
3695
2539
  if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`);
3696
2540
  }
3697
2541
  if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root);
@@ -3712,9 +2556,9 @@ function resolveConfig$1(vitest, options, viteConfig) {
3712
2556
  "**/virtual:*",
3713
2557
  "**/__x00__*",
3714
2558
  "**/node_modules/**"
3715
- ].filter((pattern) => pattern != null), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
2559
+ ].filter((pattern) => typeof pattern === "string"), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3716
2560
  if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
3717
- if (resolved.attachmentsDir = resolve$1(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
2561
+ if (resolved.attachmentsDir = resolve(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3718
2562
  if (resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0, resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
3719
2563
  // TODO: support it via separate config (like DiffOptions) or via `Function.toString()`
3720
2564
  if (resolved.snapshotFormat.plugins = [], typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`);
@@ -3727,31 +2571,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3727
2571
  resolveSnapshotPath: options.resolveSnapshotPath,
3728
2572
  snapshotEnvironment: null
3729
2573
  }, resolved.snapshotSerializers ??= [], resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)), resolved.forceRerunTriggers.push(...resolved.snapshotSerializers), options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3730
- if (resolved.pool ??= "threads", process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3731
- ...resolved.poolOptions,
3732
- threads: {
3733
- ...resolved.poolOptions?.threads,
3734
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3735
- },
3736
- vmThreads: {
3737
- ...resolved.poolOptions?.vmThreads,
3738
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3739
- }
3740
- };
3741
- if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
3742
- ...resolved.poolOptions,
3743
- forks: {
3744
- ...resolved.poolOptions?.forks,
3745
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3746
- },
3747
- vmForks: {
3748
- ...resolved.poolOptions?.vmForks,
3749
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3750
- }
3751
- };
3752
- for (const [poolOptionKey, workerOptionKey] of [["threads", "maxThreads"], ["vmThreads", "maxThreads"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3753
- for (const [poolOptionKey, workerOptionKey] of [["forks", "maxForks"], ["vmForks", "maxForks"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3754
- if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
2574
+ if (resolved.execArgv ??= [], resolved.pool ??= "threads", resolved.pool === "vmForks" || resolved.pool === "vmThreads" || resolved.pool === "typescript") resolved.isolate = false;
2575
+ if (process.env.VITEST_MAX_WORKERS) resolved.maxWorkers = Number.parseInt(process.env.VITEST_MAX_WORKERS);
3755
2576
  if (mode === "benchmark") {
3756
2577
  resolved.benchmark = {
3757
2578
  ...benchmarkConfigDefaults,
@@ -3769,7 +2590,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3769
2590
  if (resolved.api = {
3770
2591
  ...resolveApiServerConfig(options, defaultPort),
3771
2592
  token: crypto.randomUUID()
3772
- }, options.related) resolved.related = toArray(options.related).map((file) => resolve$1(resolved.root, file));
2593
+ }, options.related) resolved.related = toArray(options.related).map((file) => resolve(resolved.root, file));
3773
2594
  /*
3774
2595
  * Reporters can be defined in many different ways:
3775
2596
  * { reporter: 'json' }
@@ -3799,7 +2620,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3799
2620
  // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory
3800
2621
  // it is passed down as "vitest --reporter ../reporter.js"
3801
2622
  const reportersFromCLI = resolved.reporter, cliReporters = toArray(reportersFromCLI || []).map((reporter) => {
3802
- return /^\.\.?\//.test(reporter) ? resolve$1(process.cwd(), reporter) : reporter;
2623
+ return /^\.\.?\//.test(reporter) ? resolve(process.cwd(), reporter) : reporter;
3803
2624
  });
3804
2625
  if (cliReporters.length) resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, {}]);
3805
2626
  }
@@ -3825,7 +2646,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3825
2646
  ...configDefaults.typecheck,
3826
2647
  ...resolved.typecheck
3827
2648
  }, resolved.typecheck ??= {}, resolved.typecheck.enabled ??= false, resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3828
- if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.commands ??= {}, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve$1(resolved.root, resolved.browser.screenshotDirectory);
2649
+ if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.commands ??= {}, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory);
2650
+ if (resolved.inspector.enabled) resolved.browser.trackUnhandledErrors ??= false;
3829
2651
  if (resolved.browser.viewport ??= {}, resolved.browser.viewport.width ??= 414, resolved.browser.viewport.height ??= 896, resolved.browser.locators ??= {}, resolved.browser.locators.testIdAttribute ??= "data-testid", typeof resolved.browser.provider === "string") {
3830
2652
  const source = `@vitest/browser-${resolved.browser.provider}`;
3831
2653
  throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/guide/browser/config#provider`);
@@ -3848,7 +2670,11 @@ function resolveConfig$1(vitest, options, viteConfig) {
3848
2670
  if (toArray(resolved.reporters).some((reporter) => {
3849
2671
  return Array.isArray(reporter) ? reporter[0] === "html" : false;
3850
2672
  })) resolved.includeTaskLocation ??= true;
3851
- return resolved.server ??= {}, resolved.server.deps ??= {}, resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
2673
+ if (resolved.server ??= {}, resolved.server.deps ??= {}, resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP) {
2674
+ const userFolder = resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP;
2675
+ resolved.dumpDir = resolve(resolved.root, typeof userFolder === "string" && userFolder !== "true" ? userFolder : ".vitest-dump", resolved.name || "root");
2676
+ }
2677
+ return resolved.testTimeout ??= resolved.browser.enabled ? 3e4 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3852
2678
  }
3853
2679
  function isBrowserEnabled(config) {
3854
2680
  return Boolean(config.browser?.enabled);
@@ -3913,7 +2739,7 @@ Update your dependencies and make sure the versions match.`));
3913
2739
  ...coverageConfigDefaults,
3914
2740
  ...config,
3915
2741
  provider: this.name,
3916
- reportsDirectory: resolve$1(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory),
2742
+ reportsDirectory: resolve(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory),
3917
2743
  reporter: resolveCoverageReporters(config.reporter || coverageConfigDefaults.reporter),
3918
2744
  thresholds: config.thresholds && {
3919
2745
  ...config.thresholds,
@@ -3925,7 +2751,7 @@ Update your dependencies and make sure the versions match.`));
3925
2751
  };
3926
2752
  const shard = this.ctx.config.shard, tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`;
3927
2753
  // If --project filter is set pick only roots of resolved projects
3928
- this.coverageFilesDirectory = resolve$1(this.options.reportsDirectory, tempDirectory), this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root];
2754
+ this.coverageFilesDirectory = resolve(this.options.reportsDirectory, tempDirectory), this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root];
3929
2755
  }
3930
2756
  /**
3931
2757
  * Check if file matches `coverage.include` but not `coverage.exclude`
@@ -3977,26 +2803,26 @@ Update your dependencies and make sure the versions match.`));
3977
2803
  return this.options;
3978
2804
  }
3979
2805
  async clean(clean = true) {
3980
- if (clean && existsSync(this.options.reportsDirectory)) await promises$1.rm(this.options.reportsDirectory, {
2806
+ if (clean && existsSync(this.options.reportsDirectory)) await promises.rm(this.options.reportsDirectory, {
3981
2807
  recursive: true,
3982
2808
  force: true,
3983
2809
  maxRetries: 10
3984
2810
  });
3985
- if (existsSync(this.coverageFilesDirectory)) await promises$1.rm(this.coverageFilesDirectory, {
2811
+ if (existsSync(this.coverageFilesDirectory)) await promises.rm(this.coverageFilesDirectory, {
3986
2812
  recursive: true,
3987
2813
  force: true,
3988
2814
  maxRetries: 10
3989
2815
  });
3990
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
2816
+ await promises.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
3991
2817
  }
3992
2818
  onAfterSuiteRun({ coverage, environment, projectName, testFiles }) {
3993
2819
  if (!coverage) return;
3994
2820
  let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT);
3995
2821
  if (!entry) entry = {}, this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry);
3996
- const testFilenames = testFiles.join(), filename = resolve$1(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
2822
+ const testFilenames = testFiles.join(), filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
3997
2823
  // If there's a result from previous run, overwrite it
3998
2824
  entry[environment] ??= {}, entry[environment][testFilenames] = filename;
3999
- const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
2825
+ const promise = promises.writeFile(filename, JSON.stringify(coverage), "utf-8");
4000
2826
  this.pendingPromises.push(promise);
4001
2827
  }
4002
2828
  async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
@@ -4008,7 +2834,7 @@ Update your dependencies and make sure the versions match.`));
4008
2834
  for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
4009
2835
  if (onDebug.enabled) index += chunk.length, onDebug(`Reading coverage results ${index}/${total}`);
4010
2836
  await Promise.all(chunk.map(async (filename) => {
4011
- const contents = await promises$1.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
2837
+ const contents = await promises.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
4012
2838
  onFileRead(coverage);
4013
2839
  }));
4014
2840
  }
@@ -4017,7 +2843,7 @@ Update your dependencies and make sure the versions match.`));
4017
2843
  }
4018
2844
  async cleanAfterRun() {
4019
2845
  // Remove empty reports directory, e.g. when only text-reporter is used
4020
- if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises$1.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
2846
+ if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises.rm(this.options.reportsDirectory, { recursive: true });
4021
2847
  }
4022
2848
  async onTestFailure() {
4023
2849
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
@@ -4278,4 +3104,4 @@ function resolveMergeConfig(mod) {
4278
3104
  }
4279
3105
  }
4280
3106
 
4281
- export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, createDebugger as d, createFetchModuleFunction as e, isBrowserEnabled as f, getFilePoolName as g, hash as h, isPackageExists as i, groupBy as j, getCoverageProvider as k, createPool as l, normalizeResolvedIdToUrl as n, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
3107
+ export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, isBrowserEnabled as c, getCoverageProvider as g, hash as h, isPackageExists as i, resolveConfig$1 as r };