vitest 4.0.0-beta.18 → 4.0.0-beta.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/browser.d.ts +3 -3
  2. package/dist/browser.js +1 -1
  3. package/dist/{worker-base.js → chunks/base.CtHM3ryk.js} +18 -91
  4. package/dist/chunks/{browser.d.CCG7W26I.d.ts → browser.d.B9iJzZyn.d.ts} +3 -2
  5. package/dist/chunks/{cac.DYnuYoJK.js → cac.DCrQhweU.js} +14 -61
  6. package/dist/chunks/{cli-api.xhe4uqTX.js → cli-api.BjHteKX0.js} +1312 -53
  7. package/dist/chunks/{config.d.C4PpNy7v.d.ts → config.d.u2CUDWwS.d.ts} +2 -16
  8. package/dist/chunks/{coverage.Ds84cgzV.js → coverage.FU3w4IrQ.js} +25 -1241
  9. package/dist/chunks/{defaults.CXFFjsi8.js → defaults.BOqNVLsY.js} +0 -1
  10. package/dist/chunks/evaluatedModules.Dg1zASAC.js +17 -0
  11. package/dist/chunks/{global.d.RTA0rbJI.d.ts → global.d.BgJSTpgQ.d.ts} +1 -1
  12. package/dist/chunks/{globals.CwYe1aG7.js → globals.BGT_RUsD.js} +4 -2
  13. package/dist/chunks/{index.eEkl9h8v.js → index.BdSLhLDZ.js} +1 -1
  14. package/dist/chunks/{index.D2gVI9Ck.js → index.CcRZ6fUh.js} +1506 -11
  15. package/dist/chunks/{index.Bcjk8TKX.js → index.RwjEGCQ0.js} +2 -2
  16. package/dist/chunks/init-forks.DSafeltJ.js +54 -0
  17. package/dist/chunks/init-threads.SUtZ-067.js +17 -0
  18. package/dist/chunks/{worker.CdzokOSx.js → init.B2EESLQM.js} +97 -80
  19. package/dist/chunks/{inspector.Br76Q2Mb.js → inspector.DLZxSeU3.js} +1 -2
  20. package/dist/chunks/{moduleRunner.d.aXWuQhZN.d.ts → moduleRunner.d.YtNsMIoJ.d.ts} +1 -1
  21. package/dist/chunks/{plugin.d.XtKKWlOO.d.ts → plugin.d.BB__S31E.d.ts} +1 -1
  22. package/dist/chunks/{reporters.d.BJ_OuJGZ.d.ts → reporters.d.C6nGyY9_.d.ts} +1113 -1152
  23. package/dist/chunks/{resolveSnapshotEnvironment.tw2a5ux8.js → resolveSnapshotEnvironment.DJJKMKxb.js} +1 -1
  24. package/dist/chunks/{setup-common.DgXU7Yho.js → setup-common.DR1sucx6.js} +1 -1
  25. package/dist/chunks/{startModuleRunner.DPBo3mme.js → startModuleRunner.C2tTvmF9.js} +3 -1
  26. package/dist/{worker-vm.js → chunks/vm.DBeOXrP9.js} +6 -66
  27. package/dist/chunks/{worker.d.DSgBAZPX.d.ts → worker.d.BFk-vvBU.d.ts} +79 -4
  28. package/dist/cli.js +8 -9
  29. package/dist/config.cjs +0 -1
  30. package/dist/config.d.ts +6 -7
  31. package/dist/config.js +1 -1
  32. package/dist/coverage.d.ts +4 -4
  33. package/dist/coverage.js +2 -13
  34. package/dist/index.d.ts +13 -9
  35. package/dist/index.js +4 -2
  36. package/dist/module-evaluator.d.ts +3 -3
  37. package/dist/module-runner.js +1 -1
  38. package/dist/node.d.ts +79 -15
  39. package/dist/node.js +25 -26
  40. package/dist/reporters.d.ts +4 -4
  41. package/dist/reporters.js +9 -10
  42. package/dist/runners.d.ts +1 -1
  43. package/dist/worker.d.ts +26 -0
  44. package/dist/worker.js +46 -0
  45. package/dist/workers/forks.js +50 -0
  46. package/dist/workers/runVmTests.js +6 -5
  47. package/dist/workers/threads.js +50 -0
  48. package/dist/workers/vmForks.js +35 -0
  49. package/dist/workers/vmThreads.js +35 -0
  50. package/package.json +16 -13
  51. package/worker.d.ts +1 -0
  52. package/dist/chunks/typechecker.DsKAhua5.js +0 -1522
@@ -1,64 +1,26 @@
1
- import fs, { statSync, realpathSync, promises as promises$1, mkdirSync, existsSync, readdirSync, writeFileSync } from 'node:fs';
2
- import path, { win32, dirname, join, resolve as resolve$1 } from 'node:path';
3
- import { createDefer, isExternalUrl, unwrapId, nanoid, withTrailingSlash as withTrailingSlash$1, cleanUrl, wrapId, slash, shuffle, toArray } from '@vitest/utils/helpers';
4
- import { isAbsolute, join as join$1, dirname as dirname$1, resolve, relative, normalize } from 'pathe';
1
+ import fs, { statSync, realpathSync, existsSync, promises, readdirSync, writeFileSync } from 'node:fs';
2
+ import path, { win32, dirname, join } from 'node:path';
3
+ import { slash, shuffle, toArray, cleanUrl } from '@vitest/utils/helpers';
4
+ import { isAbsolute, resolve, relative, normalize } from 'pathe';
5
5
  import pm from 'picomatch';
6
6
  import { glob } from 'tinyglobby';
7
7
  import c from 'tinyrainbow';
8
- import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CXFFjsi8.js';
8
+ import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.BOqNVLsY.js';
9
9
  import crypto from 'node:crypto';
10
10
  import { builtinModules, createRequire } from 'node:module';
11
11
  import process$1 from 'node:process';
12
- import fs$1, { writeFile, rename, stat, unlink, readFile as readFile$1 } from 'node:fs/promises';
12
+ import fs$1 from 'node:fs/promises';
13
13
  import { fileURLToPath as fileURLToPath$1, pathToFileURL as pathToFileURL$1, URL as URL$1 } from 'node:url';
14
14
  import assert from 'node:assert';
15
15
  import v8 from 'node:v8';
16
16
  import { format, inspect } from 'node:util';
17
- import { fetchModule, version, mergeConfig } from 'vite';
17
+ import { mergeConfig } from 'vite';
18
18
  import { c as configFiles, d as defaultBrowserPort, b as defaultInspectPort, a as defaultPort } from './constants.D_Q9UYh-.js';
19
- import { a as isWindows } from './env.D4Lgay0q.js';
20
- import * as nodeos from 'node:os';
21
- import nodeos__default, { tmpdir } from 'node:os';
22
- import { isatty } from 'node:tty';
23
- import { rootDir } from '../path.js';
24
- import { s as stringify, w as wrapSerializableConfig, a as Typechecker } from './typechecker.DsKAhua5.js';
25
- import createDebug from 'debug';
26
- import EventEmitter from 'node:events';
27
- import { c as createBirpc } from './index.Bgo3tNWt.js';
28
- import Tinypool$1, { Tinypool } from 'tinypool';
29
- import { MessageChannel } from 'node:worker_threads';
30
- import { hasFailed } from '@vitest/runner/utils';
19
+ import './env.D4Lgay0q.js';
20
+ import nodeos__default from 'node:os';
31
21
  import { isCI, provider } from 'std-env';
32
22
  import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js';
33
23
 
34
- function groupBy(collection, iteratee) {
35
- return collection.reduce((acc, item) => {
36
- const key = iteratee(item);
37
- return acc[key] ||= [], acc[key].push(item), acc;
38
- }, {});
39
- }
40
- function stdout() {
41
- // @ts-expect-error Node.js maps process.stdout to console._stdout
42
- // eslint-disable-next-line no-console
43
- return console._stdout || process.stdout;
44
- }
45
- function escapeRegExp(s) {
46
- // From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
47
- return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
48
- }
49
- function wildcardPatternToRegExp(pattern) {
50
- const negated = pattern[0] === "!";
51
- if (negated) pattern = pattern.slice(1);
52
- let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
53
- if (negated) regexp = `(?!${regexp})`;
54
- return new RegExp(`^${regexp}`, "i");
55
- }
56
-
57
- function createDebugger(namespace) {
58
- const debug = createDebug(namespace);
59
- if (debug.enabled) return debug;
60
- }
61
-
62
24
  const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding));
63
25
 
64
26
  const JOIN_LEADING_SLASH_RE = /^\.?\//;
@@ -2392,1162 +2354,6 @@ function getWorkersCountByPercentage(percent) {
2392
2354
  return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage));
2393
2355
  }
2394
2356
 
2395
- const debug = createDebugger("vitest:browser:pool");
2396
- function createBrowserPool(vitest) {
2397
- const providers = /* @__PURE__ */ new Set(), numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), projectPools = /* @__PURE__ */ new WeakMap(), ensurePool = (project) => {
2398
- if (projectPools.has(project)) return projectPools.get(project);
2399
- debug?.("creating pool for project %s", project.name);
2400
- const resolvedUrls = project.browser.vite.resolvedUrls, origin = resolvedUrls?.local[0] ?? resolvedUrls?.network[0];
2401
- if (!origin) throw new Error(`Can't find browser origin URL for project "${project.name}"`);
2402
- const pool = new BrowserPool(project, {
2403
- maxWorkers: getThreadsCount(project),
2404
- origin
2405
- });
2406
- return projectPools.set(project, pool), vitest.onCancel(() => {
2407
- pool.cancel();
2408
- }), pool;
2409
- }, runWorkspaceTests = async (method, specs) => {
2410
- const groupedFiles = /* @__PURE__ */ new Map();
2411
- for (const { project, moduleId } of specs) {
2412
- const files = groupedFiles.get(project) || [];
2413
- files.push(moduleId), groupedFiles.set(project, files);
2414
- }
2415
- let isCancelled = false;
2416
- vitest.onCancel(() => {
2417
- isCancelled = true;
2418
- });
2419
- const initialisedPools = await Promise.all([...groupedFiles.entries()].map(async ([project, files]) => {
2420
- if (await project._initBrowserProvider(), !project.browser) throw new TypeError(`The browser server was not initialized${project.name ? ` for the "${project.name}" project` : ""}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2421
- if (isCancelled) return;
2422
- debug?.("provider is ready for %s project", project.name);
2423
- const pool = ensurePool(project);
2424
- return vitest.state.clearFiles(project, files), providers.add(project.browser.provider), {
2425
- pool,
2426
- provider: project.browser.provider,
2427
- runTests: () => pool.runTests(method, files)
2428
- };
2429
- }));
2430
- if (isCancelled) return;
2431
- const parallelPools = [], nonParallelPools = [];
2432
- for (const pool of initialisedPools) {
2433
- if (!pool)
2434
- // this means it was cancelled
2435
- return;
2436
- if (pool.provider.mocker && pool.provider.supportsParallelism) parallelPools.push(pool.runTests);
2437
- else nonParallelPools.push(pool.runTests);
2438
- }
2439
- await Promise.all(parallelPools.map((runTests) => runTests()));
2440
- for (const runTests of nonParallelPools) {
2441
- if (isCancelled) return;
2442
- await runTests();
2443
- }
2444
- };
2445
- function getThreadsCount(project) {
2446
- const config = project.config.browser;
2447
- return !config.headless || !config.fileParallelism || !project.browser.provider.supportsParallelism ? 1 : project.config.maxWorkers ? project.config.maxWorkers : threadsCount;
2448
- }
2449
- return {
2450
- name: "browser",
2451
- async close() {
2452
- await Promise.all([...providers].map((provider) => provider.close())), vitest._browserSessions.sessionIds.clear(), providers.clear(), vitest.projects.forEach((project) => {
2453
- project.browser?.state.orchestrators.forEach((orchestrator) => {
2454
- orchestrator.$close();
2455
- });
2456
- }), debug?.("browser pool closed all providers");
2457
- },
2458
- runTests: (files) => runWorkspaceTests("run", files),
2459
- collectTests: (files) => runWorkspaceTests("collect", files)
2460
- };
2461
- }
2462
- function escapePathToRegexp(path) {
2463
- return path.replace(/[/\\.?*()^${}|[\]+]/g, "\\$&");
2464
- }
2465
- class BrowserPool {
2466
- _queue = [];
2467
- _promise;
2468
- _providedContext;
2469
- readySessions = /* @__PURE__ */ new Set();
2470
- constructor(project, options) {
2471
- this.project = project, this.options = options;
2472
- }
2473
- cancel() {
2474
- this._queue = [];
2475
- }
2476
- reject(error) {
2477
- this._promise?.reject(error), this._promise = void 0, this.cancel();
2478
- }
2479
- get orchestrators() {
2480
- return this.project.browser.state.orchestrators;
2481
- }
2482
- async runTests(method, files) {
2483
- if (this._promise ??= createDefer(), !files.length) return debug?.("no tests found, finishing test run immediately"), this._promise.resolve(), this._promise;
2484
- if (this._providedContext = stringify(this.project.getProvidedContext()), this._queue.push(...files), this.readySessions.forEach((sessionId) => {
2485
- if (this._queue.length) this.readySessions.delete(sessionId), this.runNextTest(method, sessionId);
2486
- }), this.orchestrators.size >= this.options.maxWorkers) return debug?.("all orchestrators are ready, not creating more"), this._promise;
2487
- // open the minimum amount of tabs
2488
- // if there is only 1 file running, we don't need 8 tabs running
2489
- const workerCount = Math.min(this.options.maxWorkers - this.orchestrators.size, files.length), promises = [];
2490
- for (let i = 0; i < workerCount; i++) {
2491
- const sessionId = crypto.randomUUID();
2492
- this.project.vitest._browserSessions.sessionIds.add(sessionId);
2493
- const project = this.project.name;
2494
- debug?.("[%s] creating session for %s", sessionId, project);
2495
- const page = this.openPage(sessionId).then(() => {
2496
- // start running tests on the page when it's ready
2497
- this.runNextTest(method, sessionId);
2498
- });
2499
- promises.push(page);
2500
- }
2501
- return await Promise.all(promises), debug?.("all sessions are created"), this._promise;
2502
- }
2503
- async openPage(sessionId) {
2504
- const sessionPromise = this.project.vitest._browserSessions.createSession(sessionId, this.project, this), browser = this.project.browser, url = new URL("/__vitest_test__/", this.options.origin);
2505
- url.searchParams.set("sessionId", sessionId);
2506
- const pagePromise = browser.provider.openPage(sessionId, url.toString());
2507
- await Promise.all([sessionPromise, pagePromise]);
2508
- }
2509
- getOrchestrator(sessionId) {
2510
- const orchestrator = this.orchestrators.get(sessionId);
2511
- if (!orchestrator) throw new Error(`Orchestrator not found for session ${sessionId}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2512
- return orchestrator;
2513
- }
2514
- finishSession(sessionId) {
2515
- // the last worker finished running tests
2516
- if (this.readySessions.add(sessionId), this.readySessions.size === this.orchestrators.size) this._promise?.resolve(), this._promise = void 0, debug?.("[%s] all tests finished running", sessionId);
2517
- else debug?.(`did not finish sessions for ${sessionId}: |ready - %s| |overall - %s|`, [...this.readySessions].join(", "), [...this.orchestrators.keys()].join(", "));
2518
- }
2519
- runNextTest(method, sessionId) {
2520
- const file = this._queue.shift();
2521
- if (!file) {
2522
- // we don't need to cleanup testers if isolation is enabled,
2523
- // because cleanup is done at the end of every test
2524
- if (debug?.("[%s] no more tests to run", sessionId), this.project.config.browser.isolate) {
2525
- this.finishSession(sessionId);
2526
- return;
2527
- }
2528
- this.getOrchestrator(sessionId).cleanupTesters().catch((error) => this.reject(error)).finally(() => this.finishSession(sessionId));
2529
- return;
2530
- }
2531
- if (!this._promise) throw new Error(`Unexpected empty queue`);
2532
- const orchestrator = this.getOrchestrator(sessionId);
2533
- debug?.("[%s] run test %s", sessionId, file), this.setBreakpoint(sessionId, file).then(() => {
2534
- // this starts running tests inside the orchestrator
2535
- orchestrator.createTesters({
2536
- method,
2537
- files: [file],
2538
- providedContext: this._providedContext || "[{}]"
2539
- }).then(() => {
2540
- debug?.("[%s] test %s finished running", sessionId, file), this.runNextTest(method, sessionId);
2541
- }).catch((error) => {
2542
- // if user cancels the test run manually, ignore the error and exit gracefully
2543
- if (this.project.vitest.isCancelling && error instanceof Error && error.message.startsWith("Browser connection was closed while running tests")) {
2544
- this.cancel(), this._promise?.resolve(), this._promise = void 0, debug?.("[%s] browser connection was closed", sessionId);
2545
- return;
2546
- }
2547
- debug?.("[%s] error during %s test run: %s", sessionId, file, error), this.reject(error);
2548
- });
2549
- }).catch((err) => this.reject(err));
2550
- }
2551
- async setBreakpoint(sessionId, file) {
2552
- if (!this.project.config.inspector.waitForDebugger) return;
2553
- const provider = this.project.browser.provider, browser = this.project.config.browser.name;
2554
- if (shouldIgnoreDebugger(provider.name, browser)) {
2555
- debug?.("[$s] ignoring debugger in %s browser because it is not supported", sessionId, browser);
2556
- return;
2557
- }
2558
- if (!provider.getCDPSession) throw new Error("Unable to set breakpoint, CDP not supported");
2559
- debug?.("[%s] set breakpoint for %s", sessionId, file);
2560
- const session = await provider.getCDPSession(sessionId);
2561
- await session.send("Debugger.enable", {}), await session.send("Debugger.setBreakpointByUrl", {
2562
- lineNumber: 0,
2563
- urlRegex: escapePathToRegexp(file)
2564
- });
2565
- }
2566
- }
2567
- function shouldIgnoreDebugger(provider, browser) {
2568
- return provider === "webdriverio" ? browser !== "chrome" && browser !== "edge" : browser !== "chromium";
2569
- }
2570
-
2571
- const envsOrder = [
2572
- "node",
2573
- "jsdom",
2574
- "happy-dom",
2575
- "edge-runtime"
2576
- ];
2577
- async function groupFilesByEnv(files) {
2578
- const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
2579
- const code = await promises$1.readFile(filepath, "utf-8");
2580
- // 1. Check for control comments in the file
2581
- let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
2582
- // 2. Fallback to global env
2583
- env ||= project.config.environment || "node";
2584
- let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
2585
- if (envOptionsJson?.endsWith("*/"))
2586
- // Trim closing Docblock characters the above regex might have captured
2587
- envOptionsJson = envOptionsJson.slice(0, -2);
2588
- const envOptions = JSON.parse(envOptionsJson || "null");
2589
- return {
2590
- file: {
2591
- filepath,
2592
- testLocations: testLines
2593
- },
2594
- project,
2595
- environment: {
2596
- name: env,
2597
- options: envOptions ? { [env === "happy-dom" ? "happyDOM" : env]: envOptions } : null
2598
- }
2599
- };
2600
- }));
2601
- return groupBy(filesWithEnv, ({ environment }) => environment.name);
2602
- }
2603
-
2604
- const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
2605
- function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join$1(tmpdir(), nanoid()), dump) {
2606
- const cachedFsResults = /* @__PURE__ */ new Map();
2607
- return async (url, importer, environment, options) => {
2608
- // We are copy pasting Vite's externalization logic from `fetchModule` because
2609
- // we instead rely on our own `shouldExternalize` method because Vite
2610
- // doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
2611
- if (url.startsWith("data:")) return {
2612
- externalize: url,
2613
- type: "builtin"
2614
- };
2615
- if (url === "/@vite/client" || url === "@vite/client")
2616
- // this will be stubbed
2617
- return {
2618
- externalize: "/@vite/client",
2619
- type: "module"
2620
- };
2621
- const isFileUrl = url.startsWith("file://");
2622
- if (isExternalUrl(url) && !isFileUrl) return {
2623
- externalize: url,
2624
- type: "network"
2625
- };
2626
- // Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
2627
- // so we do this first to resolve the module and check its `id`. The next call of
2628
- // `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
2629
- // This also makes it so externalized modules are inside the module graph.
2630
- const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
2631
- // if url is already cached, we can just confirm it's also cached on the server
2632
- if (options?.cached && cached) return { cache: true };
2633
- if (moduleGraphModule.id) {
2634
- const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
2635
- if (externalize) return {
2636
- externalize,
2637
- type: "module"
2638
- };
2639
- }
2640
- let moduleRunnerModule;
2641
- if (dump?.dumpFolder && dump.readFromDump) {
2642
- const path = resolve(dump?.dumpFolder, url.replace(/[^\w+]/g, "-"));
2643
- if (existsSync(path)) {
2644
- const code = await readFile$1(path, "utf-8"), matchIndex = code.lastIndexOf("\n//");
2645
- if (matchIndex !== -1) {
2646
- const { id, file } = JSON.parse(code.slice(matchIndex + 4));
2647
- moduleRunnerModule = {
2648
- code,
2649
- id,
2650
- url,
2651
- file,
2652
- invalidate: false
2653
- };
2654
- }
2655
- }
2656
- }
2657
- if (!moduleRunnerModule) moduleRunnerModule = await fetchModule(environment, url, importer, {
2658
- ...options,
2659
- inlineSourceMap: false
2660
- }).catch(handleRollupError);
2661
- const result = processResultSource(environment, moduleRunnerModule);
2662
- if (dump?.dumpFolder && "code" in result) {
2663
- const path = resolve(dump?.dumpFolder, result.url.replace(/[^\w+]/g, "-"));
2664
- await writeFile(path, `${result.code}\n// ${JSON.stringify({
2665
- id: result.id,
2666
- file: result.file
2667
- })}`, "utf-8");
2668
- }
2669
- if (!cacheFs || !("code" in result)) return result;
2670
- const code = result.code;
2671
- // to avoid serialising large chunks of code,
2672
- // we store them in a tmp file and read in the test thread
2673
- if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
2674
- const dir = join$1(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join$1(dir, name);
2675
- if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
2676
- return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
2677
- };
2678
- }
2679
- let SOURCEMAPPING_URL = "sourceMa";
2680
- SOURCEMAPPING_URL += "ppingURL";
2681
- const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
2682
- function processResultSource(environment, result) {
2683
- if (!("code" in result)) return result;
2684
- const node = environment.moduleGraph.getModuleById(result.id);
2685
- if (node?.transformResult)
2686
- // this also overrides node.transformResult.code which is also what the module
2687
- // runner does under the hood by default (we disable source maps inlining)
2688
- inlineSourceMap(node.transformResult);
2689
- return {
2690
- ...result,
2691
- code: node?.transformResult?.code || result.code
2692
- };
2693
- }
2694
- const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
2695
- // we have to inline the source map ourselves, because
2696
- // - we don't need //# sourceURL since we are running code in VM
2697
- // - important in stack traces and the V8 coverage
2698
- // - we need to inject an empty line for --inspect-brk
2699
- function inlineSourceMap(result) {
2700
- const map = result.map;
2701
- let code = result.code;
2702
- if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
2703
- if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
2704
- const sourceMap = { ...map };
2705
- // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
2706
- // so that debuggers can be set to break on first line
2707
- if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
2708
- return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
2709
- }
2710
- function genSourceMapUrl(map) {
2711
- if (typeof map !== "string") map = JSON.stringify(map);
2712
- return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
2713
- }
2714
- function getCachedResult(result, cachedFsResults) {
2715
- const tmp = cachedFsResults.get(result.id);
2716
- if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
2717
- return {
2718
- cached: true,
2719
- file: result.file,
2720
- id: result.id,
2721
- tmp,
2722
- url: result.url,
2723
- invalidate: result.invalidate
2724
- };
2725
- }
2726
- // serialize rollup error on server to preserve details as a test error
2727
- function handleRollupError(e) {
2728
- throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
2729
- name: e.name,
2730
- message: e.message,
2731
- stack: e.stack,
2732
- cause: e.cause,
2733
- __vitest_rollup_error__: {
2734
- plugin: e.plugin,
2735
- id: e.id,
2736
- loc: e.loc,
2737
- frame: e.frame
2738
- }
2739
- } : e;
2740
- }
2741
- /**
2742
- * Performs an atomic write operation using the write-then-rename pattern.
2743
- *
2744
- * Why we need this:
2745
- * - Ensures file integrity by never leaving partially written files on disk
2746
- * - Prevents other processes from reading incomplete data during writes
2747
- * - Particularly important for test files where incomplete writes could cause test failures
2748
- *
2749
- * The implementation writes to a temporary file first, then renames it to the target path.
2750
- * This rename operation is atomic on most filesystems (including POSIX-compliant ones),
2751
- * guaranteeing that other processes will only ever see the complete file.
2752
- *
2753
- * Added in https://github.com/vitest-dev/vitest/pull/7531
2754
- */
2755
- async function atomicWriteFile(realFilePath, data) {
2756
- const dir = dirname$1(realFilePath), tmpFilePath = join$1(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
2757
- try {
2758
- await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
2759
- } finally {
2760
- try {
2761
- if (await stat(tmpFilePath)) await unlink(tmpFilePath);
2762
- } catch {}
2763
- }
2764
- }
2765
-
2766
- // this is copy pasted from vite
2767
- function normalizeResolvedIdToUrl(environment, resolvedId) {
2768
- const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
2769
- let url;
2770
- // normalize all imports into resolved URLs
2771
- // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
2772
- if (resolvedId.startsWith(withTrailingSlash$1(root)))
2773
- // in root: infer short absolute path from root
2774
- url = resolvedId.slice(root.length);
2775
- else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
2776
- // an optimized deps may not yet exists in the filesystem, or
2777
- // a regular file exists but is out of root: rewrite to absolute /@fs/ paths
2778
- url = path.posix.join("/@fs/", resolvedId);
2779
- else url = resolvedId;
2780
- // if the resolved id is not a valid browser import specifier,
2781
- // prefix it to make it valid. We will strip this before feeding it
2782
- // back into the transform pipeline
2783
- if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
2784
- return url;
2785
- }
2786
-
2787
- function createMethodsRPC(project, options = {}) {
2788
- const vitest = project.vitest, cacheFs = options.cacheFs ?? false;
2789
- if (project.vitest.state.metadata[project.name] ??= {
2790
- externalized: {},
2791
- duration: {},
2792
- tmps: {}
2793
- }, project.config.dumpDir && !existsSync(project.config.dumpDir)) mkdirSync(project.config.dumpDir, { recursive: true });
2794
- project.vitest.state.metadata[project.name].dumpDir = project.config.dumpDir;
2795
- const fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir, {
2796
- dumpFolder: project.config.dumpDir,
2797
- readFromDump: project.config.server.debug?.load ?? process.env.VITEST_DEBUG_LOAD_DUMP != null
2798
- });
2799
- return {
2800
- async fetch(url, importer, environmentName, options) {
2801
- const environment = project.vite.environments[environmentName];
2802
- if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2803
- const start = performance.now();
2804
- return await fetch(url, importer, environment, options).then((result) => {
2805
- const duration = performance.now() - start;
2806
- project.vitest.state.transformTime += duration;
2807
- const metadata = project.vitest.state.metadata[project.name];
2808
- if ("externalize" in result) metadata.externalized[url] = result.externalize;
2809
- if ("tmp" in result) metadata.tmps[url] = result.tmp;
2810
- return metadata.duration[url] ??= [], metadata.duration[url].push(duration), result;
2811
- });
2812
- },
2813
- async resolve(id, importer, environmentName) {
2814
- const environment = project.vite.environments[environmentName];
2815
- if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
2816
- const resolved = await environment.pluginContainer.resolveId(id, importer);
2817
- return resolved ? {
2818
- file: cleanUrl(resolved.id),
2819
- url: normalizeResolvedIdToUrl(environment, resolved.id),
2820
- id: resolved.id
2821
- } : null;
2822
- },
2823
- snapshotSaved(snapshot) {
2824
- vitest.snapshot.add(snapshot);
2825
- },
2826
- resolveSnapshotPath(testPath) {
2827
- return vitest.snapshot.resolvePath(testPath, { config: project.serializedConfig });
2828
- },
2829
- async transform(id) {
2830
- const environment = project.vite.environments.__vitest_vm__;
2831
- if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
2832
- const url = normalizeResolvedIdToUrl(environment, fileURLToPath$1(id));
2833
- return { code: (await environment.transformRequest(url).catch(handleRollupError))?.code };
2834
- },
2835
- async onQueued(file) {
2836
- if (options.collect) vitest.state.collectFiles(project, [file]);
2837
- else await vitest._testRun.enqueued(project, file);
2838
- },
2839
- async onCollected(files) {
2840
- if (options.collect) vitest.state.collectFiles(project, files);
2841
- else await vitest._testRun.collected(project, files);
2842
- },
2843
- onAfterSuiteRun(meta) {
2844
- vitest.coverageProvider?.onAfterSuiteRun(meta);
2845
- },
2846
- async onTaskAnnotate(testId, annotation) {
2847
- return vitest._testRun.annotate(testId, annotation);
2848
- },
2849
- async onTaskUpdate(packs, events) {
2850
- if (options.collect) vitest.state.updateTasks(packs);
2851
- else await vitest._testRun.updated(packs, events);
2852
- },
2853
- async onUserConsoleLog(log) {
2854
- if (options.collect) vitest.state.updateUserLog(log);
2855
- else await vitest._testRun.log(log);
2856
- },
2857
- onUnhandledError(err, type) {
2858
- vitest.state.catchError(err, type);
2859
- },
2860
- onCancel(reason) {
2861
- vitest.cancelCurrentRun(reason);
2862
- },
2863
- getCountOfFailedTests() {
2864
- return vitest.state.getCountOfFailedTests();
2865
- }
2866
- };
2867
- }
2868
-
2869
- function createChildProcessChannel$1(project, collect = false) {
2870
- const emitter = new EventEmitter(), events = {
2871
- message: "message",
2872
- response: "response"
2873
- }, rpc = createBirpc(createMethodsRPC(project, {
2874
- cacheFs: true,
2875
- collect
2876
- }), {
2877
- eventNames: ["onCancel"],
2878
- serialize: v8.serialize,
2879
- deserialize: (v) => {
2880
- try {
2881
- return v8.deserialize(Buffer.from(v));
2882
- } catch (error) {
2883
- let stringified = "";
2884
- try {
2885
- stringified = `\nReceived value: ${JSON.stringify(v)}`;
2886
- } catch {}
2887
- throw new Error(`[vitest-pool]: Unexpected call to process.send(). Make sure your test cases are not interfering with process's channel.${stringified}`, { cause: error });
2888
- }
2889
- },
2890
- post(v) {
2891
- emitter.emit(events.message, v);
2892
- },
2893
- on(fn) {
2894
- emitter.on(events.response, fn);
2895
- },
2896
- timeout: -1
2897
- });
2898
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
2899
- onMessage: (callback) => emitter.on(events.message, callback),
2900
- postMessage: (message) => emitter.emit(events.response, message),
2901
- onClose: () => {
2902
- emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
2903
- }
2904
- };
2905
- }
2906
- function createForksPool(vitest, { execArgv, env }, specifications) {
2907
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.forks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
2908
- runtime: "child_process",
2909
- filename: resolve$1(vitest.distPath, "worker-base.js"),
2910
- teardown: "teardown",
2911
- maxThreads,
2912
- minThreads,
2913
- env,
2914
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
2915
- terminateTimeout: vitest.config.teardownTimeout,
2916
- concurrentTasksPerWorker: 1
2917
- }, isolated = poolOptions.isolate ?? true;
2918
- if (isolated) options.isolateWorkers = true;
2919
- if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
2920
- const pool = new Tinypool(options), runWithFiles = (name) => {
2921
- let id = 0;
2922
- async function runFiles(project, config, files, environment, invalidates = []) {
2923
- const paths = files.map((f) => f.filepath);
2924
- vitest.state.clearFiles(project, paths);
2925
- const channel = createChildProcessChannel$1(project, name === "collect"), workerId = ++id, data = {
2926
- pool: "forks",
2927
- config,
2928
- files,
2929
- invalidates,
2930
- environment,
2931
- workerId,
2932
- projectName: project.name,
2933
- providedContext: project.getProvidedContext()
2934
- };
2935
- try {
2936
- await pool.run(data, {
2937
- name,
2938
- channel
2939
- });
2940
- } catch (error) {
2941
- // Worker got stuck and won't terminate - this may cause process to hang
2942
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
2943
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
2944
- else throw error;
2945
- }
2946
- }
2947
- return async (specs, invalidates) => {
2948
- // Cancel pending tasks from pool when possible
2949
- vitest.onCancel(() => pool.cancelPendingTasks());
2950
- const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
2951
- if (configs.has(project)) return configs.get(project);
2952
- const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
2953
- return configs.set(project, config), config;
2954
- }, singleFork = specs.filter((spec) => spec.project.config.poolOptions?.forks?.singleFork), multipleForks = specs.filter((spec) => !spec.project.config.poolOptions?.forks?.singleFork);
2955
- if (multipleForks.length) {
2956
- const filesByEnv = await groupFilesByEnv(multipleForks), files = Object.values(filesByEnv).flat(), results = [];
2957
- if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
2958
- else {
2959
- // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
2960
- // Tasks are still running parallel but environments are isolated between tasks.
2961
- const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
2962
- for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
2963
- }
2964
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
2965
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
2966
- }
2967
- if (singleFork.length) {
2968
- const filesByEnv = await groupFilesByEnv(singleFork), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
2969
- for (const env of envs) {
2970
- const files = filesByEnv[env];
2971
- if (!files?.length) continue;
2972
- const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
2973
- for (const files of Object.values(filesByOptions)) {
2974
- // Always run environments isolated between each other
2975
- await pool.recycleWorkers();
2976
- const filenames = files.map((f) => f.file);
2977
- await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
2978
- }
2979
- }
2980
- }
2981
- };
2982
- };
2983
- return {
2984
- name: "forks",
2985
- runTests: runWithFiles("run"),
2986
- collectTests: runWithFiles("collect"),
2987
- close: () => pool.destroy()
2988
- };
2989
- }
2990
-
2991
- function createWorkerChannel$1(project, collect) {
2992
- const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
2993
- eventNames: ["onCancel"],
2994
- post(v) {
2995
- port.postMessage(v);
2996
- },
2997
- on(fn) {
2998
- port.on("message", fn);
2999
- },
3000
- timeout: -1
3001
- });
3002
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), {
3003
- workerPort,
3004
- port,
3005
- onClose: () => {
3006
- port.close(), workerPort.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3007
- }
3008
- };
3009
- }
3010
- function createThreadsPool(vitest, { execArgv, env }, specifications) {
3011
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.threads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3012
- filename: resolve$1(vitest.distPath, "worker-base.js"),
3013
- teardown: "teardown",
3014
- useAtomics: poolOptions.useAtomics ?? false,
3015
- maxThreads,
3016
- minThreads,
3017
- env,
3018
- execArgv: [...poolOptions.execArgv ?? [], ...execArgv],
3019
- terminateTimeout: vitest.config.teardownTimeout,
3020
- concurrentTasksPerWorker: 1
3021
- }, isolated = poolOptions.isolate ?? true;
3022
- if (isolated) options.isolateWorkers = true;
3023
- if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3024
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
3025
- let id = 0;
3026
- async function runFiles(project, config, files, environment, invalidates = []) {
3027
- const paths = files.map((f) => f.filepath);
3028
- vitest.state.clearFiles(project, paths);
3029
- const { workerPort, onClose } = createWorkerChannel$1(project, name === "collect"), workerId = ++id, data = {
3030
- pool: "threads",
3031
- port: workerPort,
3032
- config,
3033
- files,
3034
- invalidates,
3035
- environment,
3036
- workerId,
3037
- projectName: project.name,
3038
- providedContext: project.getProvidedContext()
3039
- };
3040
- try {
3041
- await pool.run(data, {
3042
- transferList: [workerPort],
3043
- name,
3044
- channel: { onClose }
3045
- });
3046
- } catch (error) {
3047
- // Worker got stuck and won't terminate - this may cause process to hang
3048
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
3049
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3050
- else throw error;
3051
- }
3052
- }
3053
- return async (specs, invalidates) => {
3054
- // Cancel pending tasks from pool when possible
3055
- vitest.onCancel(() => pool.cancelPendingTasks());
3056
- const configs = /* @__PURE__ */ new WeakMap(), getConfig = (project) => {
3057
- if (configs.has(project)) return configs.get(project);
3058
- const config = project.serializedConfig;
3059
- return configs.set(project, config), config;
3060
- }, singleThreads = specs.filter((spec) => spec.project.config.poolOptions?.threads?.singleThread), multipleThreads = specs.filter((spec) => !spec.project.config.poolOptions?.threads?.singleThread);
3061
- if (multipleThreads.length) {
3062
- const filesByEnv = await groupFilesByEnv(multipleThreads), files = Object.values(filesByEnv).flat(), results = [];
3063
- if (isolated) results.push(...await Promise.allSettled(files.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates))));
3064
- else {
3065
- // When isolation is disabled, we still need to isolate environments and workspace projects from each other.
3066
- // Tasks are still running parallel but environments are isolated between tasks.
3067
- const grouped = groupBy(files, ({ project, environment }) => project.name + environment.name + JSON.stringify(environment.options));
3068
- for (const group of Object.values(grouped)) results.push(...await Promise.allSettled(group.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))), await new Promise((resolve) => pool.queueSize === 0 ? resolve() : pool.once("drain", resolve)), await pool.recycleWorkers();
3069
- }
3070
- const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
3071
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3072
- }
3073
- if (singleThreads.length) {
3074
- const filesByEnv = await groupFilesByEnv(singleThreads), envs = envsOrder.concat(Object.keys(filesByEnv).filter((env) => !envsOrder.includes(env)));
3075
- for (const env of envs) {
3076
- const files = filesByEnv[env];
3077
- if (!files?.length) continue;
3078
- const filesByOptions = groupBy(files, ({ project, environment }) => project.name + JSON.stringify(environment.options));
3079
- for (const files of Object.values(filesByOptions)) {
3080
- // Always run environments isolated between each other
3081
- await pool.recycleWorkers();
3082
- const filenames = files.map((f) => f.file);
3083
- await runFiles(files[0].project, getConfig(files[0].project), filenames, files[0].environment, invalidates);
3084
- }
3085
- }
3086
- }
3087
- };
3088
- };
3089
- return {
3090
- name: "threads",
3091
- runTests: runWithFiles("run"),
3092
- collectTests: runWithFiles("collect"),
3093
- close: () => pool.destroy()
3094
- };
3095
- }
3096
-
3097
- function createTypecheckPool(vitest) {
3098
- const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
3099
- async function onParseEnd(project, { files, sourceErrors }) {
3100
- const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
3101
- if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
3102
- if (!hasFailed(files) && !sourceErrors.length && checker.getExitCode()) {
3103
- const error = new Error(checker.getOutput());
3104
- error.stack = "", vitest.state.catchError(error, "Typecheck Error");
3105
- }
3106
- // triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
3107
- if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
3108
- const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
3109
- await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
3110
- }
3111
- }
3112
- async function createWorkspaceTypechecker(project, files) {
3113
- const checker = project.typechecker ?? new Typechecker(project);
3114
- return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
3115
- const files = checker.getTestFiles();
3116
- for (const file of files) await vitest._testRun.enqueued(project, file);
3117
- await vitest._testRun.collected(project, files);
3118
- }), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
3119
- if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
3120
- await checker.collectTests();
3121
- const testFiles = checker.getTestFiles();
3122
- for (const file of testFiles) await vitest._testRun.enqueued(project, file);
3123
- await vitest._testRun.collected(project, testFiles);
3124
- const { packs, events } = checker.getTestPacksAndEvents();
3125
- await vitest._testRun.updated(packs, events);
3126
- }), checker);
3127
- }
3128
- async function startTypechecker(project, files) {
3129
- if (project.typechecker) return;
3130
- const checker = await createWorkspaceTypechecker(project, files);
3131
- await checker.collectTests(), await checker.start();
3132
- }
3133
- async function collectTests(specs) {
3134
- const specsByProject = groupBy(specs, (spec) => spec.project.name);
3135
- for (const name in specsByProject) {
3136
- const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), checker = await createWorkspaceTypechecker(project, files);
3137
- checker.setFiles(files), await checker.collectTests();
3138
- const testFiles = checker.getTestFiles();
3139
- vitest.state.collectFiles(project, testFiles);
3140
- }
3141
- }
3142
- async function runTests(specs) {
3143
- const specsByProject = groupBy(specs, (spec) => spec.project.name), promises = [];
3144
- for (const name in specsByProject) {
3145
- const project = specsByProject[name][0].project, files = specsByProject[name].map((spec) => spec.moduleId), promise = createDefer(), triggered = await new Promise((resolve) => {
3146
- const _i = setInterval(() => {
3147
- if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
3148
- });
3149
- setTimeout(() => {
3150
- resolve(false), clearInterval(_i);
3151
- }, 500).unref();
3152
- });
3153
- if (project.typechecker && !triggered) {
3154
- const testFiles = project.typechecker.getTestFiles();
3155
- for (const file of testFiles) await vitest._testRun.enqueued(project, file);
3156
- await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
3157
- continue;
3158
- }
3159
- promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files));
3160
- }
3161
- await Promise.all(promises);
3162
- }
3163
- return {
3164
- name: "typescript",
3165
- runTests,
3166
- collectTests,
3167
- async close() {
3168
- const promises = vitest.projects.map((project) => project.typechecker?.stop());
3169
- await Promise.all(promises);
3170
- }
3171
- };
3172
- }
3173
-
3174
- function getDefaultThreadsCount(config) {
3175
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
3176
- return config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
3177
- }
3178
- function getWorkerMemoryLimit(config, pool) {
3179
- if (pool === "vmForks") {
3180
- const opts = config.poolOptions?.vmForks ?? {};
3181
- return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxForks ?? getDefaultThreadsCount(config));
3182
- } else {
3183
- const opts = config.poolOptions?.vmThreads ?? {};
3184
- return opts.memoryLimit ? opts.memoryLimit : 1 / (opts.maxThreads ?? getDefaultThreadsCount(config));
3185
- }
3186
- }
3187
- /**
3188
- * Converts a string representing an amount of memory to bytes.
3189
- *
3190
- * @param input The value to convert to bytes.
3191
- * @param percentageReference The reference value to use when a '%' value is supplied.
3192
- */
3193
- function stringToBytes(input, percentageReference) {
3194
- if (input === null || input === void 0) return input;
3195
- if (typeof input === "string") if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
3196
- let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
3197
- if (trailingChars && numericString) {
3198
- const numericValue = Number.parseFloat(numericString);
3199
- switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
3200
- case "%":
3201
- input = numericValue / 100;
3202
- break;
3203
- case "kb":
3204
- case "k": return numericValue * 1e3;
3205
- case "kib": return numericValue * 1024;
3206
- case "mb":
3207
- case "m": return numericValue * 1e3 * 1e3;
3208
- case "mib": return numericValue * 1024 * 1024;
3209
- case "gb":
3210
- case "g": return numericValue * 1e3 * 1e3 * 1e3;
3211
- case "gib": return numericValue * 1024 * 1024 * 1024;
3212
- }
3213
- }
3214
- } else input = Number.parseFloat(input);
3215
- if (typeof input === "number") if (input <= 1 && input > 0) {
3216
- if (percentageReference) return Math.floor(input * percentageReference);
3217
- throw new Error("For a percentage based memory limit a percentageReference must be supplied");
3218
- } else if (input > 1) return Math.floor(input);
3219
- else throw new Error("Unexpected numerical input for \"memoryLimit\"");
3220
- return null;
3221
- }
3222
-
3223
- function createChildProcessChannel(project, collect) {
3224
- const emitter = new EventEmitter(), events = {
3225
- message: "message",
3226
- response: "response"
3227
- }, rpc = createBirpc(createMethodsRPC(project, {
3228
- cacheFs: true,
3229
- collect
3230
- }), {
3231
- eventNames: ["onCancel"],
3232
- serialize: v8.serialize,
3233
- deserialize: (v) => {
3234
- try {
3235
- return v8.deserialize(Buffer.from(v));
3236
- } catch (error) {
3237
- let stringified = "";
3238
- try {
3239
- stringified = `\nReceived value: ${JSON.stringify(v)}`;
3240
- } catch {}
3241
- throw new Error(`[vitest-pool]: Unexpected call to process.send(). Make sure your test cases are not interfering with process's channel.${stringified}`, { cause: error });
3242
- }
3243
- },
3244
- post(v) {
3245
- emitter.emit(events.message, v);
3246
- },
3247
- on(fn) {
3248
- emitter.on(events.response, fn);
3249
- },
3250
- timeout: -1
3251
- });
3252
- return project.vitest.onCancel((reason) => rpc.onCancel(reason)), { channel: {
3253
- onMessage: (callback) => emitter.on(events.message, callback),
3254
- postMessage: (message) => emitter.emit(events.response, message),
3255
- onClose: () => {
3256
- emitter.removeAllListeners(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3257
- }
3258
- } };
3259
- }
3260
- function createVmForksPool(vitest, { execArgv, env }, specifications) {
3261
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmForks ?? {}, maxThreads = poolOptions.maxForks ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3262
- runtime: "child_process",
3263
- filename: resolve$1(vitest.distPath, "worker-vm.js"),
3264
- maxThreads,
3265
- minThreads,
3266
- env,
3267
- execArgv: [
3268
- "--experimental-vm-modules",
3269
- ...poolOptions.execArgv ?? [],
3270
- ...execArgv
3271
- ],
3272
- terminateTimeout: vitest.config.teardownTimeout,
3273
- concurrentTasksPerWorker: 1,
3274
- maxMemoryLimitBeforeRecycle: getMemoryLimit$1(vitest.config) || void 0
3275
- };
3276
- if (poolOptions.singleFork || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3277
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
3278
- let id = 0;
3279
- async function runFiles(project, config, files, environment, invalidates = []) {
3280
- const paths = files.map((f) => f.filepath);
3281
- vitest.state.clearFiles(project, paths);
3282
- const { channel } = createChildProcessChannel(project, name === "collect"), workerId = ++id, data = {
3283
- pool: "vmForks",
3284
- config,
3285
- files,
3286
- invalidates,
3287
- environment,
3288
- workerId,
3289
- projectName: project.name,
3290
- providedContext: project.getProvidedContext()
3291
- };
3292
- try {
3293
- await pool.run(data, {
3294
- name,
3295
- channel
3296
- });
3297
- } catch (error) {
3298
- // Worker got stuck and won't terminate - this may cause process to hang
3299
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}.`);
3300
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3301
- else throw error;
3302
- } finally {
3303
- channel.onClose();
3304
- }
3305
- }
3306
- return async (specs, invalidates) => {
3307
- // Cancel pending tasks from pool when possible
3308
- vitest.onCancel(() => pool.cancelPendingTasks());
3309
- const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3310
- if (configs.has(project)) return configs.get(project);
3311
- const _config = project.serializedConfig, config = wrapSerializableConfig(_config);
3312
- return configs.set(project, config), config;
3313
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3314
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3315
- };
3316
- };
3317
- return {
3318
- name: "vmForks",
3319
- runTests: runWithFiles("run"),
3320
- collectTests: runWithFiles("collect"),
3321
- close: () => pool.destroy()
3322
- };
3323
- }
3324
- function getMemoryLimit$1(config) {
3325
- const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmForks");
3326
- // just ignore "memoryLimit" value because we cannot detect memory limit
3327
- return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3328
- }
3329
-
3330
- function createWorkerChannel(project, collect) {
3331
- const channel = new MessageChannel(), port = channel.port2, workerPort = channel.port1, rpc = createBirpc(createMethodsRPC(project, { collect }), {
3332
- eventNames: ["onCancel"],
3333
- post(v) {
3334
- port.postMessage(v);
3335
- },
3336
- on(fn) {
3337
- port.on("message", fn);
3338
- },
3339
- timeout: -1
3340
- });
3341
- project.vitest.onCancel((reason) => rpc.onCancel(reason));
3342
- function onClose() {
3343
- workerPort.close(), port.close(), rpc.$close(/* @__PURE__ */ new Error("[vitest-pool]: Pending methods while closing rpc"));
3344
- }
3345
- return {
3346
- workerPort,
3347
- onClose
3348
- };
3349
- }
3350
- function createVmThreadsPool(vitest, { execArgv, env }, specifications) {
3351
- const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, threadsCount = vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1), recommendedCount = vitest.config.watch ? threadsCount : Math.min(threadsCount, specifications.length), poolOptions = vitest.config.poolOptions?.vmThreads ?? {}, maxThreads = poolOptions.maxThreads ?? vitest.config.maxWorkers ?? recommendedCount, minThreads = vitest.config.watch ? Math.min(recommendedCount, maxThreads) : 0, options = {
3352
- filename: resolve$1(vitest.distPath, "worker-vm.js"),
3353
- useAtomics: poolOptions.useAtomics ?? false,
3354
- maxThreads,
3355
- minThreads,
3356
- env,
3357
- execArgv: [
3358
- "--experimental-vm-modules",
3359
- ...poolOptions.execArgv ?? [],
3360
- ...execArgv
3361
- ],
3362
- terminateTimeout: vitest.config.teardownTimeout,
3363
- concurrentTasksPerWorker: 1,
3364
- maxMemoryLimitBeforeRecycle: getMemoryLimit(vitest.config) || void 0
3365
- };
3366
- if (poolOptions.singleThread || !vitest.config.fileParallelism) options.maxThreads = 1, options.minThreads = 1;
3367
- const pool = new Tinypool$1(options), runWithFiles = (name) => {
3368
- let id = 0;
3369
- async function runFiles(project, config, files, environment, invalidates = []) {
3370
- const paths = files.map((f) => f.filepath);
3371
- vitest.state.clearFiles(project, paths);
3372
- const { workerPort, onClose } = createWorkerChannel(project, name === "collect"), workerId = ++id, data = {
3373
- pool: "vmThreads",
3374
- port: workerPort,
3375
- config,
3376
- files: paths,
3377
- invalidates,
3378
- environment,
3379
- workerId,
3380
- projectName: project.name,
3381
- providedContext: project.getProvidedContext()
3382
- };
3383
- try {
3384
- await pool.run(data, {
3385
- transferList: [workerPort],
3386
- name
3387
- });
3388
- } catch (error) {
3389
- // Worker got stuck and won't terminate - this may cause process to hang
3390
- if (error instanceof Error && /Failed to terminate worker/.test(error.message)) vitest.state.addProcessTimeoutCause(`Failed to terminate worker while running ${paths.join(", ")}. \nSee https://vitest.dev/guide/common-errors.html#failed-to-terminate-worker for troubleshooting.`);
3391
- else if (vitest.isCancelling && error instanceof Error && /The task has been cancelled/.test(error.message)) vitest.state.cancelFiles(paths, project);
3392
- else throw error;
3393
- } finally {
3394
- onClose();
3395
- }
3396
- }
3397
- return async (specs, invalidates) => {
3398
- // Cancel pending tasks from pool when possible
3399
- vitest.onCancel(() => pool.cancelPendingTasks());
3400
- const configs = /* @__PURE__ */ new Map(), getConfig = (project) => {
3401
- if (configs.has(project)) return configs.get(project);
3402
- const config = project.serializedConfig;
3403
- return configs.set(project, config), config;
3404
- }, filesByEnv = await groupFilesByEnv(specs), promises = Object.values(filesByEnv).flat(), errors = (await Promise.allSettled(promises.map(({ file, environment, project }) => runFiles(project, getConfig(project), [file], environment, invalidates)))).filter((r) => r.status === "rejected").map((r) => r.reason);
3405
- if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
3406
- };
3407
- };
3408
- return {
3409
- name: "vmThreads",
3410
- runTests: runWithFiles("run"),
3411
- collectTests: runWithFiles("collect"),
3412
- close: () => pool.destroy()
3413
- };
3414
- }
3415
- function getMemoryLimit(config) {
3416
- const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config, "vmThreads");
3417
- // just ignore "memoryLimit" value because we cannot detect memory limit
3418
- return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
3419
- }
3420
-
3421
- const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
3422
- const builtinPools = [
3423
- "forks",
3424
- "threads",
3425
- "browser",
3426
- "vmThreads",
3427
- "vmForks",
3428
- "typescript"
3429
- ];
3430
- function getDefaultPoolName(project) {
3431
- return project.config.browser.enabled ? "browser" : project.config.pool;
3432
- }
3433
- function getFilePoolName(project) {
3434
- return getDefaultPoolName(project);
3435
- }
3436
- function createPool(ctx) {
3437
- const pools = {
3438
- forks: null,
3439
- threads: null,
3440
- browser: null,
3441
- vmThreads: null,
3442
- vmForks: null,
3443
- typescript: null
3444
- }, viteMajor = Number(version.split(".")[0]), conditions = [...new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
3445
- "production",
3446
- "development",
3447
- ...ctx.vite.config.resolve.conditions
3448
- ])].filter((condition) => {
3449
- return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
3450
- }).map((condition) => {
3451
- return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
3452
- }).flatMap((c) => ["--conditions", c]), execArgv = process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"));
3453
- async function executeTests(method, files, invalidate) {
3454
- const options = {
3455
- execArgv: [
3456
- ...execArgv,
3457
- ...conditions,
3458
- "--experimental-import-meta-resolve",
3459
- "--require",
3460
- suppressWarningsPath
3461
- ],
3462
- env: {
3463
- TEST: "true",
3464
- VITEST: "true",
3465
- NODE_ENV: process.env.NODE_ENV || "test",
3466
- VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
3467
- FORCE_TTY: isatty(1) ? "true" : "",
3468
- ...process.env,
3469
- ...ctx.config.env
3470
- }
3471
- };
3472
- // env are case-insensitive on Windows, but spawned processes don't support it
3473
- if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
3474
- const poolConcurrentPromises = /* @__PURE__ */ new Map(), customPools = /* @__PURE__ */ new Map();
3475
- async function resolveCustomPool(filepath) {
3476
- if (customPools.has(filepath)) return customPools.get(filepath);
3477
- const pool = await ctx.runner.import(filepath);
3478
- if (typeof pool.default !== "function") throw new TypeError(`Custom pool "${filepath}" must export a function as default export`);
3479
- const poolInstance = await pool.default(ctx, options);
3480
- if (typeof poolInstance?.name !== "string") throw new TypeError(`Custom pool "${filepath}" should return an object with "name" property`);
3481
- if (typeof poolInstance?.[method] !== "function") throw new TypeError(`Custom pool "${filepath}" should return an object with "${method}" method`);
3482
- return customPools.set(filepath, poolInstance), poolInstance;
3483
- }
3484
- function getConcurrentPool(pool, fn) {
3485
- if (poolConcurrentPromises.has(pool)) return poolConcurrentPromises.get(pool);
3486
- const promise = fn().finally(() => {
3487
- poolConcurrentPromises.delete(pool);
3488
- });
3489
- return poolConcurrentPromises.set(pool, promise), promise;
3490
- }
3491
- function getCustomPool(pool) {
3492
- return getConcurrentPool(pool, () => resolveCustomPool(pool));
3493
- }
3494
- const groupedSpecifications = {}, groups = /* @__PURE__ */ new Set(), factories = {
3495
- vmThreads: (specs) => createVmThreadsPool(ctx, options, specs),
3496
- vmForks: (specs) => createVmForksPool(ctx, options, specs),
3497
- threads: (specs) => createThreadsPool(ctx, options, specs),
3498
- forks: (specs) => createForksPool(ctx, options, specs),
3499
- typescript: () => createTypecheckPool(ctx),
3500
- browser: () => createBrowserPool(ctx)
3501
- };
3502
- for (const spec of files) {
3503
- const group = spec.project.config.sequence.groupOrder ?? 0;
3504
- groups.add(group), groupedSpecifications[group] ??= [], groupedSpecifications[group].push(spec);
3505
- }
3506
- const Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
3507
- async function sortSpecs(specs) {
3508
- if (ctx.config.shard) {
3509
- if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
3510
- specs = await sequencer.shard(specs);
3511
- }
3512
- return sequencer.sort(specs);
3513
- }
3514
- const sortedGroups = Array.from(groups).sort();
3515
- for (const group of sortedGroups) {
3516
- const specifications = groupedSpecifications[group];
3517
- if (!specifications?.length) continue;
3518
- const filesByPool = {
3519
- forks: [],
3520
- threads: [],
3521
- vmThreads: [],
3522
- vmForks: [],
3523
- typescript: []
3524
- };
3525
- specifications.forEach((specification) => {
3526
- const pool = specification.pool;
3527
- filesByPool[pool] ??= [], filesByPool[pool].push(specification);
3528
- }), await Promise.all(Object.entries(filesByPool).map(async (entry) => {
3529
- const [pool, files] = entry;
3530
- if (!files.length) return null;
3531
- const specs = await sortSpecs(files);
3532
- if (pool in factories) {
3533
- const factory = factories[pool];
3534
- return pools[pool] ??= factory(specs), pools[pool][method](specs, invalidate);
3535
- }
3536
- const poolHandler = await getCustomPool(pool);
3537
- return pools[poolHandler.name] ??= poolHandler, poolHandler[method](specs, invalidate);
3538
- }));
3539
- }
3540
- }
3541
- return {
3542
- name: "default",
3543
- runTests: (files, invalidates) => executeTests("runTests", files, invalidates),
3544
- collectTests: (files, invalidates) => executeTests("collectTests", files, invalidates),
3545
- async close() {
3546
- await Promise.all(Object.values(pools).map((p) => p?.close?.()));
3547
- }
3548
- };
3549
- }
3550
-
3551
2357
  class BaseSequencer {
3552
2358
  ctx;
3553
2359
  constructor(ctx) {
@@ -3640,7 +2446,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3640
2446
  root: viteConfig.root,
3641
2447
  mode
3642
2448
  };
3643
- if (resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
2449
+ if (options.pool && typeof options.pool !== "string") resolved.pool = options.pool.name, resolved.poolRunner = options.pool;
2450
+ if (resolved.pool ??= "forks", resolved.project = toArray(resolved.project), resolved.provide ??= {}, resolved.name = typeof options.name === "string" ? options.name : options.name?.label || "", resolved.color = typeof options.name !== "string" ? options.name?.color : void 0, resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enabled Browser Mode, use "test.browser.enabled" instead.`);
3644
2451
  const inspector = resolved.inspect || resolved.inspectBrk;
3645
2452
  if (resolved.inspector = {
3646
2453
  ...resolved.inspector,
@@ -3666,10 +2473,9 @@ function resolveConfig$1(vitest, options, viteConfig) {
3666
2473
  resolved.maxWorkers = 1;
3667
2474
  if (resolved.maxConcurrency === 0) logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)), resolved.maxConcurrency = configDefaults.maxConcurrency;
3668
2475
  if (resolved.inspect || resolved.inspectBrk) {
3669
- const isSingleThread = resolved.pool === "threads" && resolved.poolOptions?.threads?.singleThread, isSingleFork = resolved.pool === "forks" && resolved.poolOptions?.forks?.singleFork;
3670
- if (resolved.fileParallelism && !isSingleThread && !isSingleFork) {
2476
+ if (resolved.fileParallelism) {
3671
2477
  const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`;
3672
- throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism", "poolOptions.threads.singleThread" or "poolOptions.forks.singleFork"`);
2478
+ throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism"`);
3673
2479
  }
3674
2480
  }
3675
2481
  // apply browser CLI options only if the config already has the browser config and not disabled manually
@@ -3750,7 +2556,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3750
2556
  "**/virtual:*",
3751
2557
  "**/__x00__*",
3752
2558
  "**/node_modules/**"
3753
- ].filter((pattern) => pattern != null), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
2559
+ ].filter((pattern) => typeof pattern === "string"), resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles], resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude);
3754
2560
  if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root);
3755
2561
  if (resolved.attachmentsDir = resolve(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"), resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root);
3756
2562
  if (resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0, resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) {
@@ -3765,31 +2571,8 @@ function resolveConfig$1(vitest, options, viteConfig) {
3765
2571
  resolveSnapshotPath: options.resolveSnapshotPath,
3766
2572
  snapshotEnvironment: null
3767
2573
  }, resolved.snapshotSerializers ??= [], resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)), resolved.forceRerunTriggers.push(...resolved.snapshotSerializers), options.resolveSnapshotPath) delete resolved.resolveSnapshotPath;
3768
- if (resolved.pool ??= "threads", process.env.VITEST_MAX_THREADS) resolved.poolOptions = {
3769
- ...resolved.poolOptions,
3770
- threads: {
3771
- ...resolved.poolOptions?.threads,
3772
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3773
- },
3774
- vmThreads: {
3775
- ...resolved.poolOptions?.vmThreads,
3776
- maxThreads: Number.parseInt(process.env.VITEST_MAX_THREADS)
3777
- }
3778
- };
3779
- if (process.env.VITEST_MAX_FORKS) resolved.poolOptions = {
3780
- ...resolved.poolOptions,
3781
- forks: {
3782
- ...resolved.poolOptions?.forks,
3783
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3784
- },
3785
- vmForks: {
3786
- ...resolved.poolOptions?.vmForks,
3787
- maxForks: Number.parseInt(process.env.VITEST_MAX_FORKS)
3788
- }
3789
- };
3790
- for (const [poolOptionKey, workerOptionKey] of [["threads", "maxThreads"], ["vmThreads", "maxThreads"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3791
- for (const [poolOptionKey, workerOptionKey] of [["forks", "maxForks"], ["vmForks", "maxForks"]]) if (resolved.poolOptions?.[poolOptionKey]?.[workerOptionKey]) resolved.poolOptions[poolOptionKey][workerOptionKey] = resolveInlineWorkerOption(resolved.poolOptions[poolOptionKey][workerOptionKey]);
3792
- if (!builtinPools.includes(resolved.pool)) resolved.pool = resolvePath(resolved.pool, resolved.root);
2574
+ if (resolved.execArgv ??= [], resolved.pool ??= "threads", resolved.pool === "vmForks" || resolved.pool === "vmThreads" || resolved.pool === "typescript") resolved.isolate = false;
2575
+ if (process.env.VITEST_MAX_WORKERS) resolved.maxWorkers = Number.parseInt(process.env.VITEST_MAX_WORKERS);
3793
2576
  if (mode === "benchmark") {
3794
2577
  resolved.benchmark = {
3795
2578
  ...benchmarkConfigDefaults,
@@ -3864,6 +2647,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3864
2647
  ...resolved.typecheck
3865
2648
  }, resolved.typecheck ??= {}, resolved.typecheck.enabled ??= false, resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it."));
3866
2649
  if (resolved.browser.enabled ??= false, resolved.browser.headless ??= isCI, resolved.browser.isolate ??= true, resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark", resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI, resolved.browser.commands ??= {}, resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory);
2650
+ if (resolved.inspector.enabled) resolved.browser.trackUnhandledErrors ??= false;
3867
2651
  if (resolved.browser.viewport ??= {}, resolved.browser.viewport.width ??= 414, resolved.browser.viewport.height ??= 896, resolved.browser.locators ??= {}, resolved.browser.locators.testIdAttribute ??= "data-testid", typeof resolved.browser.provider === "string") {
3868
2652
  const source = `@vitest/browser-${resolved.browser.provider}`;
3869
2653
  throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/guide/browser/config#provider`);
@@ -3890,7 +2674,7 @@ function resolveConfig$1(vitest, options, viteConfig) {
3890
2674
  const userFolder = resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP;
3891
2675
  resolved.dumpDir = resolve(resolved.root, typeof userFolder === "string" && userFolder !== "true" ? userFolder : ".vitest-dump", resolved.name || "root");
3892
2676
  }
3893
- return resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
2677
+ return resolved.testTimeout ??= resolved.browser.enabled ? 3e4 : 5e3, resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4, resolved;
3894
2678
  }
3895
2679
  function isBrowserEnabled(config) {
3896
2680
  return Boolean(config.browser?.enabled);
@@ -4019,17 +2803,17 @@ Update your dependencies and make sure the versions match.`));
4019
2803
  return this.options;
4020
2804
  }
4021
2805
  async clean(clean = true) {
4022
- if (clean && existsSync(this.options.reportsDirectory)) await promises$1.rm(this.options.reportsDirectory, {
2806
+ if (clean && existsSync(this.options.reportsDirectory)) await promises.rm(this.options.reportsDirectory, {
4023
2807
  recursive: true,
4024
2808
  force: true,
4025
2809
  maxRetries: 10
4026
2810
  });
4027
- if (existsSync(this.coverageFilesDirectory)) await promises$1.rm(this.coverageFilesDirectory, {
2811
+ if (existsSync(this.coverageFilesDirectory)) await promises.rm(this.coverageFilesDirectory, {
4028
2812
  recursive: true,
4029
2813
  force: true,
4030
2814
  maxRetries: 10
4031
2815
  });
4032
- await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
2816
+ await promises.mkdir(this.coverageFilesDirectory, { recursive: true }), this.coverageFiles = /* @__PURE__ */ new Map(), this.pendingPromises = [];
4033
2817
  }
4034
2818
  onAfterSuiteRun({ coverage, environment, projectName, testFiles }) {
4035
2819
  if (!coverage) return;
@@ -4038,7 +2822,7 @@ Update your dependencies and make sure the versions match.`));
4038
2822
  const testFilenames = testFiles.join(), filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
4039
2823
  // If there's a result from previous run, overwrite it
4040
2824
  entry[environment] ??= {}, entry[environment][testFilenames] = filename;
4041
- const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
2825
+ const promise = promises.writeFile(filename, JSON.stringify(coverage), "utf-8");
4042
2826
  this.pendingPromises.push(promise);
4043
2827
  }
4044
2828
  async readCoverageFiles({ onFileRead, onFinished, onDebug }) {
@@ -4050,7 +2834,7 @@ Update your dependencies and make sure the versions match.`));
4050
2834
  for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) {
4051
2835
  if (onDebug.enabled) index += chunk.length, onDebug(`Reading coverage results ${index}/${total}`);
4052
2836
  await Promise.all(chunk.map(async (filename) => {
4053
- const contents = await promises$1.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
2837
+ const contents = await promises.readFile(filename, "utf-8"), coverage = JSON.parse(contents);
4054
2838
  onFileRead(coverage);
4055
2839
  }));
4056
2840
  }
@@ -4059,7 +2843,7 @@ Update your dependencies and make sure the versions match.`));
4059
2843
  }
4060
2844
  async cleanAfterRun() {
4061
2845
  // Remove empty reports directory, e.g. when only text-reporter is used
4062
- if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises$1.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises$1.rm(this.options.reportsDirectory, { recursive: true });
2846
+ if (this.coverageFiles = /* @__PURE__ */ new Map(), await promises.rm(this.coverageFilesDirectory, { recursive: true }), readdirSync(this.options.reportsDirectory).length === 0) await promises.rm(this.options.reportsDirectory, { recursive: true });
4063
2847
  }
4064
2848
  async onTestFailure() {
4065
2849
  if (!this.options.reportOnFailure) await this.cleanAfterRun();
@@ -4320,4 +3104,4 @@ function resolveMergeConfig(mod) {
4320
3104
  }
4321
3105
  }
4322
3106
 
4323
- export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, createMethodsRPC as c, createDebugger as d, createFetchModuleFunction as e, isBrowserEnabled as f, getFilePoolName as g, hash as h, isPackageExists as i, groupBy as j, getCoverageProvider as k, createPool as l, normalizeResolvedIdToUrl as n, resolveConfig$1 as r, stdout as s, wildcardPatternToRegExp as w };
3107
+ export { BaseCoverageProvider as B, RandomSequencer as R, resolveApiServerConfig as a, BaseSequencer as b, isBrowserEnabled as c, getCoverageProvider as g, hash as h, isPackageExists as i, resolveConfig$1 as r };