vitest 4.0.0-beta.18 → 4.0.0-beta.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.d.ts +3 -3
- package/dist/browser.js +1 -1
- package/dist/{worker-base.js → chunks/base.CtHM3ryk.js} +18 -91
- package/dist/chunks/{browser.d.CCG7W26I.d.ts → browser.d.B9iJzZyn.d.ts} +3 -2
- package/dist/chunks/{cac.DYnuYoJK.js → cac.DCrQhweU.js} +14 -61
- package/dist/chunks/{cli-api.xhe4uqTX.js → cli-api.BjHteKX0.js} +1312 -53
- package/dist/chunks/{config.d.C4PpNy7v.d.ts → config.d.u2CUDWwS.d.ts} +2 -16
- package/dist/chunks/{coverage.Ds84cgzV.js → coverage.FU3w4IrQ.js} +25 -1241
- package/dist/chunks/{defaults.CXFFjsi8.js → defaults.BOqNVLsY.js} +0 -1
- package/dist/chunks/evaluatedModules.Dg1zASAC.js +17 -0
- package/dist/chunks/{global.d.RTA0rbJI.d.ts → global.d.BgJSTpgQ.d.ts} +1 -1
- package/dist/chunks/{globals.CwYe1aG7.js → globals.BGT_RUsD.js} +4 -2
- package/dist/chunks/{index.eEkl9h8v.js → index.BdSLhLDZ.js} +1 -1
- package/dist/chunks/{index.D2gVI9Ck.js → index.CcRZ6fUh.js} +1506 -11
- package/dist/chunks/{index.Bcjk8TKX.js → index.RwjEGCQ0.js} +2 -2
- package/dist/chunks/init-forks.DSafeltJ.js +54 -0
- package/dist/chunks/init-threads.SUtZ-067.js +17 -0
- package/dist/chunks/{worker.CdzokOSx.js → init.B2EESLQM.js} +97 -80
- package/dist/chunks/{inspector.Br76Q2Mb.js → inspector.DLZxSeU3.js} +1 -2
- package/dist/chunks/{moduleRunner.d.aXWuQhZN.d.ts → moduleRunner.d.YtNsMIoJ.d.ts} +1 -1
- package/dist/chunks/{plugin.d.XtKKWlOO.d.ts → plugin.d.BB__S31E.d.ts} +1 -1
- package/dist/chunks/{reporters.d.BJ_OuJGZ.d.ts → reporters.d.C6nGyY9_.d.ts} +1113 -1152
- package/dist/chunks/{resolveSnapshotEnvironment.tw2a5ux8.js → resolveSnapshotEnvironment.DJJKMKxb.js} +1 -1
- package/dist/chunks/{setup-common.DgXU7Yho.js → setup-common.DR1sucx6.js} +1 -1
- package/dist/chunks/{startModuleRunner.DPBo3mme.js → startModuleRunner.C2tTvmF9.js} +3 -1
- package/dist/{worker-vm.js → chunks/vm.DBeOXrP9.js} +6 -66
- package/dist/chunks/{worker.d.DSgBAZPX.d.ts → worker.d.BFk-vvBU.d.ts} +79 -4
- package/dist/cli.js +8 -9
- package/dist/config.cjs +0 -1
- package/dist/config.d.ts +6 -7
- package/dist/config.js +1 -1
- package/dist/coverage.d.ts +4 -4
- package/dist/coverage.js +2 -13
- package/dist/index.d.ts +13 -9
- package/dist/index.js +4 -2
- package/dist/module-evaluator.d.ts +3 -3
- package/dist/module-runner.js +1 -1
- package/dist/node.d.ts +79 -15
- package/dist/node.js +25 -26
- package/dist/reporters.d.ts +4 -4
- package/dist/reporters.js +9 -10
- package/dist/runners.d.ts +1 -1
- package/dist/worker.d.ts +26 -0
- package/dist/worker.js +46 -0
- package/dist/workers/forks.js +50 -0
- package/dist/workers/runVmTests.js +6 -5
- package/dist/workers/threads.js +50 -0
- package/dist/workers/vmForks.js +35 -0
- package/dist/workers/vmThreads.js +35 -0
- package/package.json +16 -13
- package/worker.d.ts +1 -0
- package/dist/chunks/typechecker.DsKAhua5.js +0 -1522
|
@@ -1,18 +1,19 @@
|
|
|
1
|
-
import fs, { promises, existsSync, readFileSync, statSync, readdirSync,
|
|
2
|
-
import { relative, resolve, dirname, extname, normalize,
|
|
1
|
+
import fs, { promises as promises$1, existsSync, mkdirSync, readFileSync, statSync, readdirSync, writeFileSync } from 'node:fs';
|
|
2
|
+
import { relative, resolve, dirname, join, extname, normalize, basename, isAbsolute } from 'pathe';
|
|
3
3
|
import { C as CoverageProviderMap } from './coverage.D_JHT54q.js';
|
|
4
4
|
import path, { resolve as resolve$1 } from 'node:path';
|
|
5
|
-
import { noop, createDefer, slash,
|
|
5
|
+
import { noop, createDefer, slash, isExternalUrl, unwrapId, nanoid, withTrailingSlash, cleanUrl, wrapId, toArray, deepMerge, deepClone, isPrimitive, notNullish } from '@vitest/utils/helpers';
|
|
6
6
|
import { a as any, p as prompt } from './index.Dc3xnDvT.js';
|
|
7
7
|
import * as vite from 'vite';
|
|
8
|
-
import { parseAst,
|
|
8
|
+
import { parseAst, fetchModule, version, searchForWorkspaceRoot, mergeConfig, createServer } from 'vite';
|
|
9
9
|
import { A as API_PATH, c as configFiles, d as defaultBrowserPort, a as defaultPort } from './constants.D_Q9UYh-.js';
|
|
10
|
+
import * as nodeos from 'node:os';
|
|
10
11
|
import nodeos__default, { tmpdir } from 'node:os';
|
|
11
|
-
import { generateHash as generateHash$1, calculateSuiteHash, someTasksAreOnly, interpretTaskModes, generateFileHash, limitConcurrency, createFileTask as createFileTask$1,
|
|
12
|
+
import { generateHash as generateHash$1, calculateSuiteHash, someTasksAreOnly, interpretTaskModes, hasFailed, generateFileHash, limitConcurrency, createFileTask as createFileTask$1, getTasks, isTestCase } from '@vitest/runner/utils';
|
|
12
13
|
import { SnapshotManager } from '@vitest/snapshot/manager';
|
|
13
|
-
import { v as version$1 } from './cac.
|
|
14
|
+
import { v as version$1 } from './cac.DCrQhweU.js';
|
|
14
15
|
import { c as createBirpc } from './index.Bgo3tNWt.js';
|
|
15
|
-
import { p as parse,
|
|
16
|
+
import { p as parse, d as stringify, e as TraceMap, o as originalPositionFor, h as ancestor, i as printError, f as formatProjectName, w as withLabel, j as errorBanner, k as divider, l as Typechecker, m as generateCodeFrame, n as createDefinesScript, R as ReportersMap, B as BlobReporter, r as readBlobs, q as convertTasksToEvents, H as HangingProcessReporter } from './index.CcRZ6fUh.js';
|
|
16
17
|
import require$$0$3 from 'events';
|
|
17
18
|
import require$$1$1 from 'https';
|
|
18
19
|
import require$$2 from 'http';
|
|
@@ -25,27 +26,32 @@ import require$$0 from 'zlib';
|
|
|
25
26
|
import require$$0$1 from 'buffer';
|
|
26
27
|
import { g as getDefaultExportFromCjs } from './_commonjsHelpers.BFTU3MAI.js';
|
|
27
28
|
import crypto, { createHash } from 'node:crypto';
|
|
28
|
-
import {
|
|
29
|
-
import
|
|
29
|
+
import { rootDir, distDir } from '../path.js';
|
|
30
|
+
import createDebug from 'debug';
|
|
31
|
+
import { h as hash, R as RandomSequencer, i as isPackageExists, c as isBrowserEnabled, r as resolveConfig, g as getCoverageProvider, a as resolveApiServerConfig } from './coverage.FU3w4IrQ.js';
|
|
30
32
|
import { VitestModuleEvaluator } from '#module-evaluator';
|
|
31
33
|
import { ModuleRunner } from 'vite/module-runner';
|
|
34
|
+
import { writeFile, rename, stat, unlink, readFile, rm, mkdir, copyFile } from 'node:fs/promises';
|
|
32
35
|
import { Console } from 'node:console';
|
|
33
36
|
import c from 'tinyrainbow';
|
|
34
37
|
import { highlight } from '@vitest/utils/highlight';
|
|
35
|
-
import { p as printError, f as formatProjectName, w as withLabel, e as errorBanner, d as divider, h as generateCodeFrame, R as ReportersMap, B as BlobReporter, r as readBlobs, H as HangingProcessReporter } from './index.D2gVI9Ck.js';
|
|
36
38
|
import { createRequire, builtinModules, isBuiltin } from 'node:module';
|
|
37
|
-
import url, { pathToFileURL } from 'node:url';
|
|
39
|
+
import url, { fileURLToPath, pathToFileURL } from 'node:url';
|
|
38
40
|
import { i as isTTY, a as isWindows } from './env.D4Lgay0q.js';
|
|
39
|
-
import {
|
|
41
|
+
import { isatty } from 'node:tty';
|
|
42
|
+
import EventEmitter$1, { EventEmitter } from 'node:events';
|
|
43
|
+
import { fork } from 'node:child_process';
|
|
44
|
+
import v8 from 'node:v8';
|
|
45
|
+
import { Worker } from 'node:worker_threads';
|
|
40
46
|
import pm from 'picomatch';
|
|
41
47
|
import { glob, isDynamicPattern } from 'tinyglobby';
|
|
42
48
|
import MagicString from 'magic-string';
|
|
43
49
|
import { hoistMocksPlugin, automockPlugin } from '@vitest/mocker/node';
|
|
44
|
-
import { c as configDefaults } from './defaults.
|
|
50
|
+
import { c as configDefaults } from './defaults.BOqNVLsY.js';
|
|
45
51
|
import { KNOWN_ASSET_RE } from '@vitest/utils/constants';
|
|
46
52
|
import { findNearestPackageData } from '@vitest/utils/resolver';
|
|
47
53
|
import * as esModuleLexer from 'es-module-lexer';
|
|
48
|
-
import { a as BenchmarkReportsMap } from './index.
|
|
54
|
+
import { a as BenchmarkReportsMap } from './index.BdSLhLDZ.js';
|
|
49
55
|
import assert$1 from 'node:assert';
|
|
50
56
|
import { serializeValue } from '@vitest/utils/serialize';
|
|
51
57
|
import { parseErrorStacktrace } from '@vitest/utils/source-map';
|
|
@@ -5108,11 +5114,11 @@ function setup(ctx, _server) {
|
|
|
5108
5114
|
return ctx.state.getPaths();
|
|
5109
5115
|
},
|
|
5110
5116
|
async readTestFile(id) {
|
|
5111
|
-
return !ctx.state.filesMap.has(id) || !existsSync(id) ? null : promises.readFile(id, "utf-8");
|
|
5117
|
+
return !ctx.state.filesMap.has(id) || !existsSync(id) ? null : promises$1.readFile(id, "utf-8");
|
|
5112
5118
|
},
|
|
5113
5119
|
async saveTestFile(id, content) {
|
|
5114
5120
|
if (!ctx.state.filesMap.has(id) || !existsSync(id)) throw new Error(`Test file "${id}" was not registered, so it cannot be updated using the API.`);
|
|
5115
|
-
return promises.writeFile(id, content, "utf-8");
|
|
5121
|
+
return promises$1.writeFile(id, content, "utf-8");
|
|
5116
5122
|
},
|
|
5117
5123
|
async rerun(files, resetTestNamePattern) {
|
|
5118
5124
|
await ctx.rerunFiles(files, void 0, true, resetTestNamePattern);
|
|
@@ -5133,7 +5139,7 @@ function setup(ctx, _server) {
|
|
|
5133
5139
|
const project = ctx.getProjectByName(projectName), result = browser ? await project.browser.vite.transformRequest(id) : await project.vite.transformRequest(id);
|
|
5134
5140
|
if (result) {
|
|
5135
5141
|
try {
|
|
5136
|
-
result.source = result.source || await promises.readFile(id, "utf-8");
|
|
5142
|
+
result.source = result.source || await promises$1.readFile(id, "utf-8");
|
|
5137
5143
|
} catch {}
|
|
5138
5144
|
return result;
|
|
5139
5145
|
}
|
|
@@ -5229,11 +5235,39 @@ var setup$1 = /*#__PURE__*/Object.freeze({
|
|
|
5229
5235
|
setup: setup
|
|
5230
5236
|
});
|
|
5231
5237
|
|
|
5232
|
-
|
|
5238
|
+
function groupBy(collection, iteratee) {
|
|
5239
|
+
return collection.reduce((acc, item) => {
|
|
5240
|
+
const key = iteratee(item);
|
|
5241
|
+
return acc[key] ||= [], acc[key].push(item), acc;
|
|
5242
|
+
}, {});
|
|
5243
|
+
}
|
|
5244
|
+
function stdout() {
|
|
5245
|
+
// @ts-expect-error Node.js maps process.stdout to console._stdout
|
|
5246
|
+
// eslint-disable-next-line no-console
|
|
5247
|
+
return console._stdout || process.stdout;
|
|
5248
|
+
}
|
|
5249
|
+
function escapeRegExp(s) {
|
|
5250
|
+
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
|
5251
|
+
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
5252
|
+
}
|
|
5253
|
+
function wildcardPatternToRegExp(pattern) {
|
|
5254
|
+
const negated = pattern[0] === "!";
|
|
5255
|
+
if (negated) pattern = pattern.slice(1);
|
|
5256
|
+
let regexp = `${pattern.split("*").map(escapeRegExp).join(".*")}$`;
|
|
5257
|
+
if (negated) regexp = `(?!${regexp})`;
|
|
5258
|
+
return new RegExp(`^${regexp}`, "i");
|
|
5259
|
+
}
|
|
5260
|
+
|
|
5261
|
+
function createDebugger(namespace) {
|
|
5262
|
+
const debug = createDebug(namespace);
|
|
5263
|
+
if (debug.enabled) return debug;
|
|
5264
|
+
}
|
|
5265
|
+
|
|
5266
|
+
const debug$1 = createDebugger("vitest:ast-collect-info"), verbose = createDebugger("vitest:ast-collect-verbose");
|
|
5233
5267
|
function astParseFile(filepath, code) {
|
|
5234
5268
|
const ast = parseAst(code);
|
|
5235
5269
|
if (verbose) verbose("Collecting", filepath, code);
|
|
5236
|
-
else debug?.("Collecting", filepath);
|
|
5270
|
+
else debug$1?.("Collecting", filepath);
|
|
5237
5271
|
const definitions = [], getName = (callee) => {
|
|
5238
5272
|
if (!callee) return null;
|
|
5239
5273
|
if (callee.type === "Identifier") return callee.name;
|
|
@@ -5297,7 +5331,7 @@ function astParseFile(filepath, code) {
|
|
|
5297
5331
|
const property = callee.tag?.property?.name;
|
|
5298
5332
|
isDynamicEach = property === "each" || property === "for";
|
|
5299
5333
|
}
|
|
5300
|
-
debug?.("Found", name, message, `(${mode})`), definitions.push({
|
|
5334
|
+
debug$1?.("Found", name, message, `(${mode})`), definitions.push({
|
|
5301
5335
|
start,
|
|
5302
5336
|
end,
|
|
5303
5337
|
name: message,
|
|
@@ -5381,8 +5415,8 @@ function createFileTask(testFilepath, code, requestMap, options) {
|
|
|
5381
5415
|
column: processedLocation.column
|
|
5382
5416
|
});
|
|
5383
5417
|
if (originalLocation.column != null) verbose?.(`Found location for`, definition.type, definition.name, `${processedLocation.line}:${processedLocation.column}`, "->", `${originalLocation.line}:${originalLocation.column}`), location = originalLocation;
|
|
5384
|
-
else debug?.("Cannot find original location for", definition.type, definition.name, `${processedLocation.column}:${processedLocation.line}`);
|
|
5385
|
-
} else debug?.("Cannot find original location for", definition.type, definition.name, `${definition.start}`);
|
|
5418
|
+
else debug$1?.("Cannot find original location for", definition.type, definition.name, `${processedLocation.column}:${processedLocation.line}`);
|
|
5419
|
+
} else debug$1?.("Cannot find original location for", definition.type, definition.name, `${definition.start}`);
|
|
5386
5420
|
if (definition.type === "suite") {
|
|
5387
5421
|
const task = {
|
|
5388
5422
|
type: definition.type,
|
|
@@ -5437,7 +5471,7 @@ async function astCollectTests(project, filepath) {
|
|
|
5437
5471
|
allowOnly: project.config.allowOnly,
|
|
5438
5472
|
testNamePattern: project.config.testNamePattern,
|
|
5439
5473
|
pool: project.browser ? "browser" : project.config.pool
|
|
5440
|
-
}) : (debug?.("Cannot parse", testFilepath, "(vite didn't return anything)"), createFailedFileTask(project, filepath, /* @__PURE__ */ new Error(`Failed to parse ${testFilepath}. Vite didn't return anything.`)));
|
|
5474
|
+
}) : (debug$1?.("Cannot parse", testFilepath, "(vite didn't return anything)"), createFailedFileTask(project, filepath, /* @__PURE__ */ new Error(`Failed to parse ${testFilepath}. Vite didn't return anything.`)));
|
|
5441
5475
|
}
|
|
5442
5476
|
async function transformSSR(project, filepath) {
|
|
5443
5477
|
const request = await project.vite.transformRequest(filepath, { ssr: false });
|
|
@@ -5602,6 +5636,189 @@ class VitestCache {
|
|
|
5602
5636
|
}
|
|
5603
5637
|
}
|
|
5604
5638
|
|
|
5639
|
+
const created = /* @__PURE__ */ new Set(), promises = /* @__PURE__ */ new Map();
|
|
5640
|
+
function createFetchModuleFunction(resolver, cacheFs = false, tmpDir = join(tmpdir(), nanoid()), dump) {
|
|
5641
|
+
const cachedFsResults = /* @__PURE__ */ new Map();
|
|
5642
|
+
return async (url, importer, environment, options) => {
|
|
5643
|
+
// We are copy pasting Vite's externalization logic from `fetchModule` because
|
|
5644
|
+
// we instead rely on our own `shouldExternalize` method because Vite
|
|
5645
|
+
// doesn't support `resolve.external` in non SSR environments (jsdom/happy-dom)
|
|
5646
|
+
if (url.startsWith("data:")) return {
|
|
5647
|
+
externalize: url,
|
|
5648
|
+
type: "builtin"
|
|
5649
|
+
};
|
|
5650
|
+
if (url === "/@vite/client" || url === "@vite/client")
|
|
5651
|
+
// this will be stubbed
|
|
5652
|
+
return {
|
|
5653
|
+
externalize: "/@vite/client",
|
|
5654
|
+
type: "module"
|
|
5655
|
+
};
|
|
5656
|
+
const isFileUrl = url.startsWith("file://");
|
|
5657
|
+
if (isExternalUrl(url) && !isFileUrl) return {
|
|
5658
|
+
externalize: url,
|
|
5659
|
+
type: "network"
|
|
5660
|
+
};
|
|
5661
|
+
// Vite does the same in `fetchModule`, but we want to externalize modules ourselves,
|
|
5662
|
+
// so we do this first to resolve the module and check its `id`. The next call of
|
|
5663
|
+
// `ensureEntryFromUrl` inside `fetchModule` is cached and should take no time
|
|
5664
|
+
// This also makes it so externalized modules are inside the module graph.
|
|
5665
|
+
const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)), cached = !!moduleGraphModule.transformResult;
|
|
5666
|
+
// if url is already cached, we can just confirm it's also cached on the server
|
|
5667
|
+
if (options?.cached && cached) return { cache: true };
|
|
5668
|
+
if (moduleGraphModule.id) {
|
|
5669
|
+
const externalize = await resolver.shouldExternalize(moduleGraphModule.id);
|
|
5670
|
+
if (externalize) return {
|
|
5671
|
+
externalize,
|
|
5672
|
+
type: "module"
|
|
5673
|
+
};
|
|
5674
|
+
}
|
|
5675
|
+
let moduleRunnerModule;
|
|
5676
|
+
if (dump?.dumpFolder && dump.readFromDump) {
|
|
5677
|
+
const path = resolve(dump?.dumpFolder, url.replace(/[^\w+]/g, "-"));
|
|
5678
|
+
if (existsSync(path)) {
|
|
5679
|
+
const code = await readFile(path, "utf-8"), matchIndex = code.lastIndexOf("\n//");
|
|
5680
|
+
if (matchIndex !== -1) {
|
|
5681
|
+
const { id, file } = JSON.parse(code.slice(matchIndex + 4));
|
|
5682
|
+
moduleRunnerModule = {
|
|
5683
|
+
code,
|
|
5684
|
+
id,
|
|
5685
|
+
url,
|
|
5686
|
+
file,
|
|
5687
|
+
invalidate: false
|
|
5688
|
+
};
|
|
5689
|
+
}
|
|
5690
|
+
}
|
|
5691
|
+
}
|
|
5692
|
+
if (!moduleRunnerModule) moduleRunnerModule = await fetchModule(environment, url, importer, {
|
|
5693
|
+
...options,
|
|
5694
|
+
inlineSourceMap: false
|
|
5695
|
+
}).catch(handleRollupError);
|
|
5696
|
+
const result = processResultSource(environment, moduleRunnerModule);
|
|
5697
|
+
if (dump?.dumpFolder && "code" in result) {
|
|
5698
|
+
const path = resolve(dump?.dumpFolder, result.url.replace(/[^\w+]/g, "-"));
|
|
5699
|
+
await writeFile(path, `${result.code}\n// ${JSON.stringify({
|
|
5700
|
+
id: result.id,
|
|
5701
|
+
file: result.file
|
|
5702
|
+
})}`, "utf-8");
|
|
5703
|
+
}
|
|
5704
|
+
if (!cacheFs || !("code" in result)) return result;
|
|
5705
|
+
const code = result.code;
|
|
5706
|
+
// to avoid serialising large chunks of code,
|
|
5707
|
+
// we store them in a tmp file and read in the test thread
|
|
5708
|
+
if (cachedFsResults.has(result.id)) return getCachedResult(result, cachedFsResults);
|
|
5709
|
+
const dir = join(tmpDir, environment.name), name = hash("sha1", result.id, "hex"), tmp = join(dir, name);
|
|
5710
|
+
if (!created.has(dir)) mkdirSync(dir, { recursive: true }), created.add(dir);
|
|
5711
|
+
return promises.has(tmp) ? (await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults)) : (promises.set(tmp, atomicWriteFile(tmp, code).catch(() => writeFile(tmp, code, "utf-8")).finally(() => promises.delete(tmp))), await promises.get(tmp), cachedFsResults.set(result.id, tmp), getCachedResult(result, cachedFsResults));
|
|
5712
|
+
};
|
|
5713
|
+
}
|
|
5714
|
+
let SOURCEMAPPING_URL = "sourceMa";
|
|
5715
|
+
SOURCEMAPPING_URL += "ppingURL";
|
|
5716
|
+
const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
|
|
5717
|
+
function processResultSource(environment, result) {
|
|
5718
|
+
if (!("code" in result)) return result;
|
|
5719
|
+
const node = environment.moduleGraph.getModuleById(result.id);
|
|
5720
|
+
if (node?.transformResult)
|
|
5721
|
+
// this also overrides node.transformResult.code which is also what the module
|
|
5722
|
+
// runner does under the hood by default (we disable source maps inlining)
|
|
5723
|
+
inlineSourceMap(node.transformResult);
|
|
5724
|
+
return {
|
|
5725
|
+
...result,
|
|
5726
|
+
code: node?.transformResult?.code || result.code
|
|
5727
|
+
};
|
|
5728
|
+
}
|
|
5729
|
+
const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
|
|
5730
|
+
// we have to inline the source map ourselves, because
|
|
5731
|
+
// - we don't need //# sourceURL since we are running code in VM
|
|
5732
|
+
// - important in stack traces and the V8 coverage
|
|
5733
|
+
// - we need to inject an empty line for --inspect-brk
|
|
5734
|
+
function inlineSourceMap(result) {
|
|
5735
|
+
const map = result.map;
|
|
5736
|
+
let code = result.code;
|
|
5737
|
+
if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result;
|
|
5738
|
+
if (OTHER_SOURCE_MAP_REGEXP.lastIndex = 0, OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
|
|
5739
|
+
const sourceMap = { ...map };
|
|
5740
|
+
// If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
|
|
5741
|
+
// so that debuggers can be set to break on first line
|
|
5742
|
+
if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`;
|
|
5743
|
+
return result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`, result;
|
|
5744
|
+
}
|
|
5745
|
+
function genSourceMapUrl(map) {
|
|
5746
|
+
if (typeof map !== "string") map = JSON.stringify(map);
|
|
5747
|
+
return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
|
|
5748
|
+
}
|
|
5749
|
+
function getCachedResult(result, cachedFsResults) {
|
|
5750
|
+
const tmp = cachedFsResults.get(result.id);
|
|
5751
|
+
if (!tmp) throw new Error(`The cached result was returned too early for ${result.id}.`);
|
|
5752
|
+
return {
|
|
5753
|
+
cached: true,
|
|
5754
|
+
file: result.file,
|
|
5755
|
+
id: result.id,
|
|
5756
|
+
tmp,
|
|
5757
|
+
url: result.url,
|
|
5758
|
+
invalidate: result.invalidate
|
|
5759
|
+
};
|
|
5760
|
+
}
|
|
5761
|
+
// serialize rollup error on server to preserve details as a test error
|
|
5762
|
+
function handleRollupError(e) {
|
|
5763
|
+
throw e instanceof Error && ("plugin" in e || "frame" in e || "id" in e) ? {
|
|
5764
|
+
name: e.name,
|
|
5765
|
+
message: e.message,
|
|
5766
|
+
stack: e.stack,
|
|
5767
|
+
cause: e.cause,
|
|
5768
|
+
__vitest_rollup_error__: {
|
|
5769
|
+
plugin: e.plugin,
|
|
5770
|
+
id: e.id,
|
|
5771
|
+
loc: e.loc,
|
|
5772
|
+
frame: e.frame
|
|
5773
|
+
}
|
|
5774
|
+
} : e;
|
|
5775
|
+
}
|
|
5776
|
+
/**
|
|
5777
|
+
* Performs an atomic write operation using the write-then-rename pattern.
|
|
5778
|
+
*
|
|
5779
|
+
* Why we need this:
|
|
5780
|
+
* - Ensures file integrity by never leaving partially written files on disk
|
|
5781
|
+
* - Prevents other processes from reading incomplete data during writes
|
|
5782
|
+
* - Particularly important for test files where incomplete writes could cause test failures
|
|
5783
|
+
*
|
|
5784
|
+
* The implementation writes to a temporary file first, then renames it to the target path.
|
|
5785
|
+
* This rename operation is atomic on most filesystems (including POSIX-compliant ones),
|
|
5786
|
+
* guaranteeing that other processes will only ever see the complete file.
|
|
5787
|
+
*
|
|
5788
|
+
* Added in https://github.com/vitest-dev/vitest/pull/7531
|
|
5789
|
+
*/
|
|
5790
|
+
async function atomicWriteFile(realFilePath, data) {
|
|
5791
|
+
const dir = dirname(realFilePath), tmpFilePath = join(dir, `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
|
5792
|
+
try {
|
|
5793
|
+
await writeFile(tmpFilePath, data, "utf-8"), await rename(tmpFilePath, realFilePath);
|
|
5794
|
+
} finally {
|
|
5795
|
+
try {
|
|
5796
|
+
if (await stat(tmpFilePath)) await unlink(tmpFilePath);
|
|
5797
|
+
} catch {}
|
|
5798
|
+
}
|
|
5799
|
+
}
|
|
5800
|
+
|
|
5801
|
+
// this is copy pasted from vite
|
|
5802
|
+
function normalizeResolvedIdToUrl(environment, resolvedId) {
|
|
5803
|
+
const root = environment.config.root, depsOptimizer = environment.depsOptimizer;
|
|
5804
|
+
let url;
|
|
5805
|
+
// normalize all imports into resolved URLs
|
|
5806
|
+
// e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
|
|
5807
|
+
if (resolvedId.startsWith(withTrailingSlash(root)))
|
|
5808
|
+
// in root: infer short absolute path from root
|
|
5809
|
+
url = resolvedId.slice(root.length);
|
|
5810
|
+
else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId)))
|
|
5811
|
+
// an optimized deps may not yet exists in the filesystem, or
|
|
5812
|
+
// a regular file exists but is out of root: rewrite to absolute /@fs/ paths
|
|
5813
|
+
url = path.posix.join("/@fs/", resolvedId);
|
|
5814
|
+
else url = resolvedId;
|
|
5815
|
+
// if the resolved id is not a valid browser import specifier,
|
|
5816
|
+
// prefix it to make it valid. We will strip this before feeding it
|
|
5817
|
+
// back into the transform pipeline
|
|
5818
|
+
if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId);
|
|
5819
|
+
return url;
|
|
5820
|
+
}
|
|
5821
|
+
|
|
5605
5822
|
class ServerModuleRunner extends ModuleRunner {
|
|
5606
5823
|
constructor(environment, resolver, config) {
|
|
5607
5824
|
const fetchModule = createFetchModuleFunction(resolver, false);
|
|
@@ -5808,7 +6025,8 @@ This might cause false positive tests. Resolve unhandled errors to make sure you
|
|
|
5808
6025
|
// Interrupted signals don't set exit code automatically.
|
|
5809
6026
|
// Use same exit code as node: https://nodejs.org/api/process.html#signal-events
|
|
5810
6027
|
if (cleanup(), process.exitCode === void 0) process.exitCode = exitCode !== void 0 ? 128 + exitCode : Number(signal);
|
|
5811
|
-
|
|
6028
|
+
// Timeout to flush stderr
|
|
6029
|
+
setTimeout(() => process.exit(), 1);
|
|
5812
6030
|
};
|
|
5813
6031
|
process.once("SIGINT", onExit), process.once("SIGTERM", onExit), process.once("exit", onExit), this.ctx.onClose(() => {
|
|
5814
6032
|
process.off("SIGINT", onExit), process.off("SIGTERM", onExit), process.off("exit", onExit), cleanup();
|
|
@@ -5855,12 +6073,1070 @@ class VitestPackageInstaller {
|
|
|
5855
6073
|
}
|
|
5856
6074
|
}
|
|
5857
6075
|
|
|
6076
|
+
function getDefaultThreadsCount(config) {
|
|
6077
|
+
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
6078
|
+
return config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
6079
|
+
}
|
|
6080
|
+
function getWorkerMemoryLimit(config) {
|
|
6081
|
+
return config.vmMemoryLimit ? config.vmMemoryLimit : 1 / (config.maxWorkers ?? getDefaultThreadsCount(config));
|
|
6082
|
+
}
|
|
6083
|
+
/**
|
|
6084
|
+
* Converts a string representing an amount of memory to bytes.
|
|
6085
|
+
*
|
|
6086
|
+
* @param input The value to convert to bytes.
|
|
6087
|
+
* @param percentageReference The reference value to use when a '%' value is supplied.
|
|
6088
|
+
*/
|
|
6089
|
+
function stringToBytes(input, percentageReference) {
|
|
6090
|
+
if (input === null || input === void 0) return input;
|
|
6091
|
+
if (typeof input === "string") if (Number.isNaN(Number.parseFloat(input.slice(-1)))) {
|
|
6092
|
+
let [, numericString, trailingChars] = input.match(/(.*?)([^0-9.-]+)$/) || [];
|
|
6093
|
+
if (trailingChars && numericString) {
|
|
6094
|
+
const numericValue = Number.parseFloat(numericString);
|
|
6095
|
+
switch (trailingChars = trailingChars.toLowerCase(), trailingChars) {
|
|
6096
|
+
case "%":
|
|
6097
|
+
input = numericValue / 100;
|
|
6098
|
+
break;
|
|
6099
|
+
case "kb":
|
|
6100
|
+
case "k": return numericValue * 1e3;
|
|
6101
|
+
case "kib": return numericValue * 1024;
|
|
6102
|
+
case "mb":
|
|
6103
|
+
case "m": return numericValue * 1e3 * 1e3;
|
|
6104
|
+
case "mib": return numericValue * 1024 * 1024;
|
|
6105
|
+
case "gb":
|
|
6106
|
+
case "g": return numericValue * 1e3 * 1e3 * 1e3;
|
|
6107
|
+
case "gib": return numericValue * 1024 * 1024 * 1024;
|
|
6108
|
+
}
|
|
6109
|
+
}
|
|
6110
|
+
} else input = Number.parseFloat(input);
|
|
6111
|
+
if (typeof input === "number") if (input <= 1 && input > 0) {
|
|
6112
|
+
if (percentageReference) return Math.floor(input * percentageReference);
|
|
6113
|
+
throw new Error("For a percentage based memory limit a percentageReference must be supplied");
|
|
6114
|
+
} else if (input > 1) return Math.floor(input);
|
|
6115
|
+
else throw new Error("Unexpected numerical input for \"memoryLimit\"");
|
|
6116
|
+
return null;
|
|
6117
|
+
}
|
|
6118
|
+
|
|
6119
|
+
async function groupFilesByEnv(files) {
|
|
6120
|
+
const filesWithEnv = await Promise.all(files.map(async ({ moduleId: filepath, project, testLines }) => {
|
|
6121
|
+
const code = await promises$1.readFile(filepath, "utf-8");
|
|
6122
|
+
// 1. Check for control comments in the file
|
|
6123
|
+
let env = code.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1];
|
|
6124
|
+
// 2. Fallback to global env
|
|
6125
|
+
env ||= project.config.environment || "node";
|
|
6126
|
+
let envOptionsJson = code.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1];
|
|
6127
|
+
if (envOptionsJson?.endsWith("*/"))
|
|
6128
|
+
// Trim closing Docblock characters the above regex might have captured
|
|
6129
|
+
envOptionsJson = envOptionsJson.slice(0, -2);
|
|
6130
|
+
const envOptions = JSON.parse(envOptionsJson || "null");
|
|
6131
|
+
return {
|
|
6132
|
+
file: {
|
|
6133
|
+
filepath,
|
|
6134
|
+
testLocations: testLines
|
|
6135
|
+
},
|
|
6136
|
+
project,
|
|
6137
|
+
environment: {
|
|
6138
|
+
name: env,
|
|
6139
|
+
options: envOptions ? { [env === "happy-dom" ? "happyDOM" : env]: envOptions } : null
|
|
6140
|
+
}
|
|
6141
|
+
};
|
|
6142
|
+
}));
|
|
6143
|
+
return groupBy(filesWithEnv, ({ environment }) => environment.name);
|
|
6144
|
+
}
|
|
6145
|
+
|
|
6146
|
+
const debug = createDebugger("vitest:browser:pool");
|
|
6147
|
+
function createBrowserPool(vitest) {
|
|
6148
|
+
const providers = /* @__PURE__ */ new Set(), numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length, maxThreadsCount = Math.min(12, numCpus - 1), threadsCount = vitest.config.watch ? Math.max(Math.floor(maxThreadsCount / 2), 1) : Math.max(maxThreadsCount, 1), projectPools = /* @__PURE__ */ new WeakMap(), ensurePool = (project) => {
|
|
6149
|
+
if (projectPools.has(project)) return projectPools.get(project);
|
|
6150
|
+
debug?.("creating pool for project %s", project.name);
|
|
6151
|
+
const resolvedUrls = project.browser.vite.resolvedUrls, origin = resolvedUrls?.local[0] ?? resolvedUrls?.network[0];
|
|
6152
|
+
if (!origin) throw new Error(`Can't find browser origin URL for project "${project.name}"`);
|
|
6153
|
+
const pool = new BrowserPool(project, {
|
|
6154
|
+
maxWorkers: getThreadsCount(project),
|
|
6155
|
+
origin
|
|
6156
|
+
});
|
|
6157
|
+
return projectPools.set(project, pool), vitest.onCancel(() => {
|
|
6158
|
+
pool.cancel();
|
|
6159
|
+
}), pool;
|
|
6160
|
+
}, runWorkspaceTests = async (method, specs) => {
|
|
6161
|
+
const groupedFiles = /* @__PURE__ */ new Map();
|
|
6162
|
+
for (const { project, moduleId, testLines } of specs) {
|
|
6163
|
+
const files = groupedFiles.get(project) || [];
|
|
6164
|
+
files.push({
|
|
6165
|
+
filepath: moduleId,
|
|
6166
|
+
testLocations: testLines
|
|
6167
|
+
}), groupedFiles.set(project, files);
|
|
6168
|
+
}
|
|
6169
|
+
let isCancelled = false;
|
|
6170
|
+
vitest.onCancel(() => {
|
|
6171
|
+
isCancelled = true;
|
|
6172
|
+
});
|
|
6173
|
+
const initialisedPools = await Promise.all([...groupedFiles.entries()].map(async ([project, files]) => {
|
|
6174
|
+
if (await project._initBrowserProvider(), !project.browser) throw new TypeError(`The browser server was not initialized${project.name ? ` for the "${project.name}" project` : ""}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
|
|
6175
|
+
if (isCancelled) return;
|
|
6176
|
+
debug?.("provider is ready for %s project", project.name);
|
|
6177
|
+
const pool = ensurePool(project);
|
|
6178
|
+
return vitest.state.clearFiles(project, files.map((f) => f.filepath)), providers.add(project.browser.provider), {
|
|
6179
|
+
pool,
|
|
6180
|
+
provider: project.browser.provider,
|
|
6181
|
+
runTests: () => pool.runTests(method, files)
|
|
6182
|
+
};
|
|
6183
|
+
}));
|
|
6184
|
+
if (isCancelled) return;
|
|
6185
|
+
const parallelPools = [], nonParallelPools = [];
|
|
6186
|
+
for (const pool of initialisedPools) {
|
|
6187
|
+
if (!pool)
|
|
6188
|
+
// this means it was cancelled
|
|
6189
|
+
return;
|
|
6190
|
+
if (pool.provider.mocker && pool.provider.supportsParallelism) parallelPools.push(pool.runTests);
|
|
6191
|
+
else nonParallelPools.push(pool.runTests);
|
|
6192
|
+
}
|
|
6193
|
+
await Promise.all(parallelPools.map((runTests) => runTests()));
|
|
6194
|
+
for (const runTests of nonParallelPools) {
|
|
6195
|
+
if (isCancelled) return;
|
|
6196
|
+
await runTests();
|
|
6197
|
+
}
|
|
6198
|
+
};
|
|
6199
|
+
function getThreadsCount(project) {
|
|
6200
|
+
const config = project.config.browser;
|
|
6201
|
+
return !config.headless || !config.fileParallelism || !project.browser.provider.supportsParallelism ? 1 : project.config.maxWorkers ? project.config.maxWorkers : threadsCount;
|
|
6202
|
+
}
|
|
6203
|
+
return {
|
|
6204
|
+
name: "browser",
|
|
6205
|
+
async close() {
|
|
6206
|
+
await Promise.all([...providers].map((provider) => provider.close())), vitest._browserSessions.sessionIds.clear(), providers.clear(), vitest.projects.forEach((project) => {
|
|
6207
|
+
project.browser?.state.orchestrators.forEach((orchestrator) => {
|
|
6208
|
+
orchestrator.$close();
|
|
6209
|
+
});
|
|
6210
|
+
}), debug?.("browser pool closed all providers");
|
|
6211
|
+
},
|
|
6212
|
+
runTests: (files) => runWorkspaceTests("run", files),
|
|
6213
|
+
collectTests: (files) => runWorkspaceTests("collect", files)
|
|
6214
|
+
};
|
|
6215
|
+
}
|
|
6216
|
+
function escapePathToRegexp(path) {
|
|
6217
|
+
return path.replace(/[/\\.?*()^${}|[\]+]/g, "\\$&");
|
|
6218
|
+
}
|
|
6219
|
+
class BrowserPool {
|
|
6220
|
+
_queue = [];
|
|
6221
|
+
_promise;
|
|
6222
|
+
_providedContext;
|
|
6223
|
+
readySessions = /* @__PURE__ */ new Set();
|
|
6224
|
+
constructor(project, options) {
|
|
6225
|
+
this.project = project, this.options = options;
|
|
6226
|
+
}
|
|
6227
|
+
cancel() {
|
|
6228
|
+
this._queue = [];
|
|
6229
|
+
}
|
|
6230
|
+
reject(error) {
|
|
6231
|
+
this._promise?.reject(error), this._promise = void 0, this.cancel();
|
|
6232
|
+
}
|
|
6233
|
+
get orchestrators() {
|
|
6234
|
+
return this.project.browser.state.orchestrators;
|
|
6235
|
+
}
|
|
6236
|
+
async runTests(method, files) {
|
|
6237
|
+
if (this._promise ??= createDefer(), !files.length) return debug?.("no tests found, finishing test run immediately"), this._promise.resolve(), this._promise;
|
|
6238
|
+
if (this._providedContext = stringify(this.project.getProvidedContext()), this._queue.push(...files), this.readySessions.forEach((sessionId) => {
|
|
6239
|
+
if (this._queue.length) this.readySessions.delete(sessionId), this.runNextTest(method, sessionId);
|
|
6240
|
+
}), this.orchestrators.size >= this.options.maxWorkers) return debug?.("all orchestrators are ready, not creating more"), this._promise;
|
|
6241
|
+
// open the minimum amount of tabs
|
|
6242
|
+
// if there is only 1 file running, we don't need 8 tabs running
|
|
6243
|
+
const workerCount = Math.min(this.options.maxWorkers - this.orchestrators.size, files.length), promises = [];
|
|
6244
|
+
for (let i = 0; i < workerCount; i++) {
|
|
6245
|
+
const sessionId = crypto.randomUUID();
|
|
6246
|
+
this.project.vitest._browserSessions.sessionIds.add(sessionId);
|
|
6247
|
+
const project = this.project.name;
|
|
6248
|
+
debug?.("[%s] creating session for %s", sessionId, project);
|
|
6249
|
+
const page = this.openPage(sessionId).then(() => {
|
|
6250
|
+
// start running tests on the page when it's ready
|
|
6251
|
+
this.runNextTest(method, sessionId);
|
|
6252
|
+
});
|
|
6253
|
+
promises.push(page);
|
|
6254
|
+
}
|
|
6255
|
+
return await Promise.all(promises), debug?.("all sessions are created"), this._promise;
|
|
6256
|
+
}
|
|
6257
|
+
async openPage(sessionId) {
|
|
6258
|
+
const sessionPromise = this.project.vitest._browserSessions.createSession(sessionId, this.project, this), browser = this.project.browser, url = new URL("/__vitest_test__/", this.options.origin);
|
|
6259
|
+
url.searchParams.set("sessionId", sessionId);
|
|
6260
|
+
const pagePromise = browser.provider.openPage(sessionId, url.toString());
|
|
6261
|
+
await Promise.all([sessionPromise, pagePromise]);
|
|
6262
|
+
}
|
|
6263
|
+
getOrchestrator(sessionId) {
|
|
6264
|
+
const orchestrator = this.orchestrators.get(sessionId);
|
|
6265
|
+
if (!orchestrator) throw new Error(`Orchestrator not found for session ${sessionId}. This is a bug in Vitest. Please, open a new issue with reproduction.`);
|
|
6266
|
+
return orchestrator;
|
|
6267
|
+
}
|
|
6268
|
+
finishSession(sessionId) {
|
|
6269
|
+
// the last worker finished running tests
|
|
6270
|
+
if (this.readySessions.add(sessionId), this.readySessions.size === this.orchestrators.size) this._promise?.resolve(), this._promise = void 0, debug?.("[%s] all tests finished running", sessionId);
|
|
6271
|
+
else debug?.(`did not finish sessions for ${sessionId}: |ready - %s| |overall - %s|`, [...this.readySessions].join(", "), [...this.orchestrators.keys()].join(", "));
|
|
6272
|
+
}
|
|
6273
|
+
runNextTest(method, sessionId) {
|
|
6274
|
+
const file = this._queue.shift();
|
|
6275
|
+
if (!file) {
|
|
6276
|
+
// we don't need to cleanup testers if isolation is enabled,
|
|
6277
|
+
// because cleanup is done at the end of every test
|
|
6278
|
+
if (debug?.("[%s] no more tests to run", sessionId), this.project.config.browser.isolate) {
|
|
6279
|
+
this.finishSession(sessionId);
|
|
6280
|
+
return;
|
|
6281
|
+
}
|
|
6282
|
+
this.getOrchestrator(sessionId).cleanupTesters().catch((error) => this.reject(error)).finally(() => this.finishSession(sessionId));
|
|
6283
|
+
return;
|
|
6284
|
+
}
|
|
6285
|
+
if (!this._promise) throw new Error(`Unexpected empty queue`);
|
|
6286
|
+
const orchestrator = this.getOrchestrator(sessionId);
|
|
6287
|
+
debug?.("[%s] run test %s", sessionId, file), this.setBreakpoint(sessionId, file.filepath).then(() => {
|
|
6288
|
+
// this starts running tests inside the orchestrator
|
|
6289
|
+
orchestrator.createTesters({
|
|
6290
|
+
method,
|
|
6291
|
+
files: [file],
|
|
6292
|
+
providedContext: this._providedContext || "[{}]"
|
|
6293
|
+
}).then(() => {
|
|
6294
|
+
debug?.("[%s] test %s finished running", sessionId, file), this.runNextTest(method, sessionId);
|
|
6295
|
+
}).catch((error) => {
|
|
6296
|
+
// if user cancels the test run manually, ignore the error and exit gracefully
|
|
6297
|
+
if (this.project.vitest.isCancelling && error instanceof Error && error.message.startsWith("Browser connection was closed while running tests")) {
|
|
6298
|
+
this.cancel(), this._promise?.resolve(), this._promise = void 0, debug?.("[%s] browser connection was closed", sessionId);
|
|
6299
|
+
return;
|
|
6300
|
+
}
|
|
6301
|
+
debug?.("[%s] error during %s test run: %s", sessionId, file, error), this.reject(error);
|
|
6302
|
+
});
|
|
6303
|
+
}).catch((err) => this.reject(err));
|
|
6304
|
+
}
|
|
6305
|
+
async setBreakpoint(sessionId, file) {
|
|
6306
|
+
if (!this.project.config.inspector.waitForDebugger) return;
|
|
6307
|
+
const provider = this.project.browser.provider, browser = this.project.config.browser.name;
|
|
6308
|
+
if (shouldIgnoreDebugger(provider.name, browser)) {
|
|
6309
|
+
debug?.("[$s] ignoring debugger in %s browser because it is not supported", sessionId, browser);
|
|
6310
|
+
return;
|
|
6311
|
+
}
|
|
6312
|
+
if (!provider.getCDPSession) throw new Error("Unable to set breakpoint, CDP not supported");
|
|
6313
|
+
debug?.("[%s] set breakpoint for %s", sessionId, file);
|
|
6314
|
+
const session = await provider.getCDPSession(sessionId);
|
|
6315
|
+
await session.send("Debugger.enable", {}), await session.send("Debugger.setBreakpointByUrl", {
|
|
6316
|
+
lineNumber: 0,
|
|
6317
|
+
urlRegex: escapePathToRegexp(file)
|
|
6318
|
+
});
|
|
6319
|
+
}
|
|
6320
|
+
}
|
|
6321
|
+
function shouldIgnoreDebugger(provider, browser) {
|
|
6322
|
+
return provider === "webdriverio" ? browser !== "chrome" && browser !== "edge" : browser !== "chromium";
|
|
6323
|
+
}
|
|
6324
|
+
|
|
6325
|
+
function createMethodsRPC(project, options = {}) {
|
|
6326
|
+
const vitest = project.vitest, cacheFs = options.cacheFs ?? false;
|
|
6327
|
+
if (project.vitest.state.metadata[project.name] ??= {
|
|
6328
|
+
externalized: {},
|
|
6329
|
+
duration: {},
|
|
6330
|
+
tmps: {}
|
|
6331
|
+
}, project.config.dumpDir && !existsSync(project.config.dumpDir)) mkdirSync(project.config.dumpDir, { recursive: true });
|
|
6332
|
+
project.vitest.state.metadata[project.name].dumpDir = project.config.dumpDir;
|
|
6333
|
+
const fetch = createFetchModuleFunction(project._resolver, cacheFs, project.tmpDir, {
|
|
6334
|
+
dumpFolder: project.config.dumpDir,
|
|
6335
|
+
readFromDump: project.config.server.debug?.load ?? process.env.VITEST_DEBUG_LOAD_DUMP != null
|
|
6336
|
+
});
|
|
6337
|
+
return {
|
|
6338
|
+
async fetch(url, importer, environmentName, options) {
|
|
6339
|
+
const environment = project.vite.environments[environmentName];
|
|
6340
|
+
if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
|
|
6341
|
+
const start = performance.now();
|
|
6342
|
+
return await fetch(url, importer, environment, options).then((result) => {
|
|
6343
|
+
const duration = performance.now() - start;
|
|
6344
|
+
project.vitest.state.transformTime += duration;
|
|
6345
|
+
const metadata = project.vitest.state.metadata[project.name];
|
|
6346
|
+
if ("externalize" in result) metadata.externalized[url] = result.externalize;
|
|
6347
|
+
if ("tmp" in result) metadata.tmps[url] = result.tmp;
|
|
6348
|
+
return metadata.duration[url] ??= [], metadata.duration[url].push(duration), result;
|
|
6349
|
+
});
|
|
6350
|
+
},
|
|
6351
|
+
async resolve(id, importer, environmentName) {
|
|
6352
|
+
const environment = project.vite.environments[environmentName];
|
|
6353
|
+
if (!environment) throw new Error(`The environment ${environmentName} was not defined in the Vite config.`);
|
|
6354
|
+
const resolved = await environment.pluginContainer.resolveId(id, importer);
|
|
6355
|
+
return resolved ? {
|
|
6356
|
+
file: cleanUrl(resolved.id),
|
|
6357
|
+
url: normalizeResolvedIdToUrl(environment, resolved.id),
|
|
6358
|
+
id: resolved.id
|
|
6359
|
+
} : null;
|
|
6360
|
+
},
|
|
6361
|
+
snapshotSaved(snapshot) {
|
|
6362
|
+
vitest.snapshot.add(snapshot);
|
|
6363
|
+
},
|
|
6364
|
+
resolveSnapshotPath(testPath) {
|
|
6365
|
+
return vitest.snapshot.resolvePath(testPath, { config: project.serializedConfig });
|
|
6366
|
+
},
|
|
6367
|
+
async transform(id) {
|
|
6368
|
+
const environment = project.vite.environments.__vitest_vm__;
|
|
6369
|
+
if (!environment) throw new Error(`The VM environment was not defined in the Vite config. This is a bug in Vitest. Please, open a new issue with reproduction.`);
|
|
6370
|
+
const url = normalizeResolvedIdToUrl(environment, fileURLToPath(id));
|
|
6371
|
+
return { code: (await environment.transformRequest(url).catch(handleRollupError))?.code };
|
|
6372
|
+
},
|
|
6373
|
+
async onQueued(file) {
|
|
6374
|
+
if (options.collect) vitest.state.collectFiles(project, [file]);
|
|
6375
|
+
else await vitest._testRun.enqueued(project, file);
|
|
6376
|
+
},
|
|
6377
|
+
async onCollected(files) {
|
|
6378
|
+
if (options.collect) vitest.state.collectFiles(project, files);
|
|
6379
|
+
else await vitest._testRun.collected(project, files);
|
|
6380
|
+
},
|
|
6381
|
+
onAfterSuiteRun(meta) {
|
|
6382
|
+
vitest.coverageProvider?.onAfterSuiteRun(meta);
|
|
6383
|
+
},
|
|
6384
|
+
async onTaskAnnotate(testId, annotation) {
|
|
6385
|
+
return vitest._testRun.annotate(testId, annotation);
|
|
6386
|
+
},
|
|
6387
|
+
async onTaskUpdate(packs, events) {
|
|
6388
|
+
if (options.collect) vitest.state.updateTasks(packs);
|
|
6389
|
+
else await vitest._testRun.updated(packs, events);
|
|
6390
|
+
},
|
|
6391
|
+
async onUserConsoleLog(log) {
|
|
6392
|
+
if (options.collect) vitest.state.updateUserLog(log);
|
|
6393
|
+
else await vitest._testRun.log(log);
|
|
6394
|
+
},
|
|
6395
|
+
onUnhandledError(err, type) {
|
|
6396
|
+
vitest.state.catchError(err, type);
|
|
6397
|
+
},
|
|
6398
|
+
onCancel(reason) {
|
|
6399
|
+
vitest.cancelCurrentRun(reason);
|
|
6400
|
+
},
|
|
6401
|
+
getCountOfFailedTests() {
|
|
6402
|
+
return vitest.state.getCountOfFailedTests();
|
|
6403
|
+
}
|
|
6404
|
+
};
|
|
6405
|
+
}
|
|
6406
|
+
|
|
6407
|
+
var RunnerState = /* @__PURE__ */ function(RunnerState) {
|
|
6408
|
+
return RunnerState["IDLE"] = "idle", RunnerState["STARTING"] = "starting", RunnerState["STARTED"] = "started", RunnerState["STOPPING"] = "stopping", RunnerState["STOPPED"] = "stopped", RunnerState;
|
|
6409
|
+
}(RunnerState || {});
|
|
6410
|
+
const START_TIMEOUT = 1e4, STOP_TIMEOUT = 1e4;
|
|
6411
|
+
/** @experimental */
|
|
6412
|
+
class PoolRunner {
|
|
6413
|
+
/** Exposed to test runner as `VITEST_POOL_ID`. Value is between 1-`maxWorkers`. */
|
|
6414
|
+
poolId = void 0;
|
|
6415
|
+
project;
|
|
6416
|
+
environment;
|
|
6417
|
+
_state = RunnerState.IDLE;
|
|
6418
|
+
_operationLock = null;
|
|
6419
|
+
_eventEmitter = new EventEmitter();
|
|
6420
|
+
_rpc;
|
|
6421
|
+
get isTerminated() {
|
|
6422
|
+
return this._state === RunnerState.STOPPED;
|
|
6423
|
+
}
|
|
6424
|
+
get isStarted() {
|
|
6425
|
+
return this._state === RunnerState.STARTED;
|
|
6426
|
+
}
|
|
6427
|
+
constructor(options, worker) {
|
|
6428
|
+
this.worker = worker, this.project = options.project, this.environment = options.environment, this._rpc = createBirpc(createMethodsRPC(this.project, {
|
|
6429
|
+
collect: options.method === "collect",
|
|
6430
|
+
cacheFs: worker.cacheFs
|
|
6431
|
+
}), {
|
|
6432
|
+
eventNames: ["onCancel"],
|
|
6433
|
+
post: (request) => this.postMessage(request),
|
|
6434
|
+
on: (callback) => this._eventEmitter.on("rpc", callback),
|
|
6435
|
+
timeout: -1
|
|
6436
|
+
}), this.project.vitest.onCancel((reason) => this._rpc.onCancel(reason));
|
|
6437
|
+
}
|
|
6438
|
+
postMessage(message) {
|
|
6439
|
+
// Only send messages when runner is active (not fully stopped)
|
|
6440
|
+
// Allow sending during STOPPING state for the 'stop' message itself
|
|
6441
|
+
if (this._state !== RunnerState.STOPPED) return this.worker.send(message);
|
|
6442
|
+
}
|
|
6443
|
+
async start() {
|
|
6444
|
+
// Wait for any ongoing operation to complete
|
|
6445
|
+
if (this._operationLock) await this._operationLock;
|
|
6446
|
+
if (!(this._state === RunnerState.STARTED || this._state === RunnerState.STARTING)) {
|
|
6447
|
+
// If stopped, cannot restart
|
|
6448
|
+
if (this._state === RunnerState.STOPPED) throw new Error("[vitest-pool-runner]: Cannot start a stopped runner");
|
|
6449
|
+
// Create operation lock to prevent concurrent start/stop
|
|
6450
|
+
this._operationLock = createDefer();
|
|
6451
|
+
try {
|
|
6452
|
+
this._state = RunnerState.STARTING, await this.worker.start(), this.worker.on("error", this.emitWorkerError), this.worker.on("exit", this.emitUnexpectedExit), this.worker.on("message", this.emitWorkerMessage);
|
|
6453
|
+
// Wait for 'started' message with timeout
|
|
6454
|
+
const startPromise = this.withTimeout(this.waitForStart(), START_TIMEOUT);
|
|
6455
|
+
this.postMessage({
|
|
6456
|
+
type: "start",
|
|
6457
|
+
__vitest_worker_request__: true,
|
|
6458
|
+
options: { reportMemory: this.worker.reportMemory ?? false }
|
|
6459
|
+
}), await startPromise, this._state = RunnerState.STARTED;
|
|
6460
|
+
} catch (error) {
|
|
6461
|
+
throw this._state = RunnerState.IDLE, error;
|
|
6462
|
+
} finally {
|
|
6463
|
+
this._operationLock.resolve(), this._operationLock = null;
|
|
6464
|
+
}
|
|
6465
|
+
}
|
|
6466
|
+
}
|
|
6467
|
+
async stop() {
|
|
6468
|
+
// Wait for any ongoing operation to complete
|
|
6469
|
+
if (this._operationLock) await this._operationLock;
|
|
6470
|
+
if (!(this._state === RunnerState.STOPPED || this._state === RunnerState.STOPPING)) {
|
|
6471
|
+
// If never started, just mark as stopped
|
|
6472
|
+
if (this._state === RunnerState.IDLE) {
|
|
6473
|
+
this._state = RunnerState.STOPPED;
|
|
6474
|
+
return;
|
|
6475
|
+
}
|
|
6476
|
+
// Create operation lock to prevent concurrent start/stop
|
|
6477
|
+
this._operationLock = createDefer();
|
|
6478
|
+
try {
|
|
6479
|
+
this._state = RunnerState.STOPPING, this.worker.off("exit", this.emitUnexpectedExit), await this.withTimeout(new Promise((resolve) => {
|
|
6480
|
+
const onStop = (response) => {
|
|
6481
|
+
if (response.type === "stopped") {
|
|
6482
|
+
if (response.error) this.project.vitest.state.catchError(response.error, "Teardown Error");
|
|
6483
|
+
resolve(), this.off("message", onStop);
|
|
6484
|
+
}
|
|
6485
|
+
};
|
|
6486
|
+
this.on("message", onStop), this.postMessage({
|
|
6487
|
+
type: "stop",
|
|
6488
|
+
__vitest_worker_request__: true
|
|
6489
|
+
});
|
|
6490
|
+
}), STOP_TIMEOUT), this._eventEmitter.removeAllListeners(), this._rpc.$close(/* @__PURE__ */ new Error("[vitest-pool-runner]: Pending methods while closing rpc")), await this.worker.stop(), this._state = RunnerState.STOPPED;
|
|
6491
|
+
} catch (error) {
|
|
6492
|
+
throw this._state = RunnerState.STOPPED, error;
|
|
6493
|
+
} finally {
|
|
6494
|
+
this._operationLock.resolve(), this._operationLock = null;
|
|
6495
|
+
}
|
|
6496
|
+
}
|
|
6497
|
+
}
|
|
6498
|
+
on(event, callback) {
|
|
6499
|
+
this._eventEmitter.on(event, callback);
|
|
6500
|
+
}
|
|
6501
|
+
off(event, callback) {
|
|
6502
|
+
this._eventEmitter.off(event, callback);
|
|
6503
|
+
}
|
|
6504
|
+
emitWorkerError = (maybeError) => {
|
|
6505
|
+
const error = maybeError instanceof Error ? maybeError : new Error(String(maybeError));
|
|
6506
|
+
this._eventEmitter.emit("error", error);
|
|
6507
|
+
};
|
|
6508
|
+
emitWorkerMessage = (response) => {
|
|
6509
|
+
try {
|
|
6510
|
+
const message = this.worker.deserialize(response);
|
|
6511
|
+
if (typeof message === "object" && message != null && message.__vitest_worker_response__) this._eventEmitter.emit("message", message);
|
|
6512
|
+
else this._eventEmitter.emit("rpc", message);
|
|
6513
|
+
} catch (error) {
|
|
6514
|
+
this._eventEmitter.emit("error", error);
|
|
6515
|
+
}
|
|
6516
|
+
};
|
|
6517
|
+
emitUnexpectedExit = () => {
|
|
6518
|
+
const error = /* @__PURE__ */ new Error("Worker exited unexpectedly");
|
|
6519
|
+
this._eventEmitter.emit("error", error);
|
|
6520
|
+
};
|
|
6521
|
+
waitForStart() {
|
|
6522
|
+
return new Promise((resolve) => {
|
|
6523
|
+
const onStart = (message) => {
|
|
6524
|
+
if (message.type === "started") this.off("message", onStart), resolve();
|
|
6525
|
+
};
|
|
6526
|
+
this.on("message", onStart);
|
|
6527
|
+
});
|
|
6528
|
+
}
|
|
6529
|
+
withTimeout(promise, timeout) {
|
|
6530
|
+
return new Promise((resolve_, reject_) => {
|
|
6531
|
+
const timer = setTimeout(() => reject(/* @__PURE__ */ new Error("[vitest-pool-runner]: Timeout waiting for worker to respond")), timeout);
|
|
6532
|
+
function resolve(value) {
|
|
6533
|
+
clearTimeout(timer), resolve_(value);
|
|
6534
|
+
}
|
|
6535
|
+
function reject(error) {
|
|
6536
|
+
clearTimeout(timer), reject_(error);
|
|
6537
|
+
}
|
|
6538
|
+
promise.then(resolve, reject);
|
|
6539
|
+
});
|
|
6540
|
+
}
|
|
6541
|
+
}
|
|
6542
|
+
|
|
6543
|
+
const SIGKILL_TIMEOUT = 500;
|
|
6544
|
+
/** @experimental */
|
|
6545
|
+
class ForksPoolWorker {
|
|
6546
|
+
name = "forks";
|
|
6547
|
+
execArgv;
|
|
6548
|
+
env;
|
|
6549
|
+
cacheFs = true;
|
|
6550
|
+
entrypoint;
|
|
6551
|
+
_fork;
|
|
6552
|
+
constructor(options) {
|
|
6553
|
+
/** Loads {@link file://./../../../runtime/workers/forks.ts} */
|
|
6554
|
+
this.execArgv = options.execArgv, this.env = options.env, this.entrypoint = resolve$1(options.distPath, "workers/forks.js");
|
|
6555
|
+
}
|
|
6556
|
+
on(event, callback) {
|
|
6557
|
+
this.fork.on(event, callback);
|
|
6558
|
+
}
|
|
6559
|
+
off(event, callback) {
|
|
6560
|
+
this.fork.off(event, callback);
|
|
6561
|
+
}
|
|
6562
|
+
send(message) {
|
|
6563
|
+
if ("context" in message) message = {
|
|
6564
|
+
...message,
|
|
6565
|
+
context: {
|
|
6566
|
+
...message.context,
|
|
6567
|
+
config: wrapSerializableConfig(message.context.config)
|
|
6568
|
+
}
|
|
6569
|
+
};
|
|
6570
|
+
this.fork.send(v8.serialize(message));
|
|
6571
|
+
}
|
|
6572
|
+
async start() {
|
|
6573
|
+
this._fork ||= fork(this.entrypoint, [], {
|
|
6574
|
+
env: this.env,
|
|
6575
|
+
execArgv: this.execArgv
|
|
6576
|
+
});
|
|
6577
|
+
}
|
|
6578
|
+
async stop() {
|
|
6579
|
+
const fork = this.fork, waitForExit = new Promise((resolve) => {
|
|
6580
|
+
if (fork.exitCode != null) resolve();
|
|
6581
|
+
else fork.once("exit", resolve);
|
|
6582
|
+
}), sigkillTimeout = setTimeout(() => fork.kill("SIGKILL"), SIGKILL_TIMEOUT);
|
|
6583
|
+
fork.kill(), await waitForExit, clearTimeout(sigkillTimeout), this._fork = void 0;
|
|
6584
|
+
}
|
|
6585
|
+
deserialize(data) {
|
|
6586
|
+
try {
|
|
6587
|
+
return v8.deserialize(Buffer.from(data));
|
|
6588
|
+
} catch (error) {
|
|
6589
|
+
let stringified = "";
|
|
6590
|
+
try {
|
|
6591
|
+
stringified = `\nReceived value: ${JSON.stringify(data)}`;
|
|
6592
|
+
} catch {}
|
|
6593
|
+
throw new Error(`[vitest-pool]: Unexpected call to process.send(). Make sure your test cases are not interfering with process's channel.${stringified}`, { cause: error });
|
|
6594
|
+
}
|
|
6595
|
+
}
|
|
6596
|
+
get fork() {
|
|
6597
|
+
if (!this._fork) throw new Error(`The child process was torn down or never initialized. This is a bug in Vitest.`);
|
|
6598
|
+
return this._fork;
|
|
6599
|
+
}
|
|
6600
|
+
}
|
|
6601
|
+
/**
|
|
6602
|
+
* Prepares `SerializedConfig` for serialization, e.g. `node:v8.serialize`
|
|
6603
|
+
* - Unwrapping done in {@link file://./../../../runtime/workers/init-forks.ts}
|
|
6604
|
+
*/
|
|
6605
|
+
function wrapSerializableConfig(config) {
|
|
6606
|
+
let testNamePattern = config.testNamePattern, defines = config.defines;
|
|
6607
|
+
// v8 serialize does not support regex
|
|
6608
|
+
if (testNamePattern && typeof testNamePattern !== "string") testNamePattern = `$$vitest:${testNamePattern.toString()}`;
|
|
6609
|
+
// v8 serialize drops properties with undefined value
|
|
6610
|
+
if (defines) defines = {
|
|
6611
|
+
keys: Object.keys(defines),
|
|
6612
|
+
original: defines
|
|
6613
|
+
};
|
|
6614
|
+
return {
|
|
6615
|
+
...config,
|
|
6616
|
+
testNamePattern,
|
|
6617
|
+
defines
|
|
6618
|
+
};
|
|
6619
|
+
}
|
|
6620
|
+
|
|
6621
|
+
/** @experimental */
|
|
6622
|
+
class ThreadsPoolWorker {
|
|
6623
|
+
name = "threads";
|
|
6624
|
+
execArgv;
|
|
6625
|
+
env;
|
|
6626
|
+
entrypoint;
|
|
6627
|
+
_thread;
|
|
6628
|
+
constructor(options) {
|
|
6629
|
+
/** Loads {@link file://./../../../runtime/workers/threads.ts} */
|
|
6630
|
+
this.execArgv = options.execArgv, this.env = options.env, this.entrypoint = resolve$1(options.distPath, "workers/threads.js");
|
|
6631
|
+
}
|
|
6632
|
+
on(event, callback) {
|
|
6633
|
+
this.thread.on(event, callback);
|
|
6634
|
+
}
|
|
6635
|
+
off(event, callback) {
|
|
6636
|
+
this.thread.off(event, callback);
|
|
6637
|
+
}
|
|
6638
|
+
send(message) {
|
|
6639
|
+
this.thread.postMessage(message);
|
|
6640
|
+
}
|
|
6641
|
+
async start() {
|
|
6642
|
+
// This can be called multiple times if the runtime is shared.
|
|
6643
|
+
this._thread ||= new Worker(this.entrypoint, {
|
|
6644
|
+
env: this.env,
|
|
6645
|
+
execArgv: this.execArgv
|
|
6646
|
+
});
|
|
6647
|
+
}
|
|
6648
|
+
async stop() {
|
|
6649
|
+
await this.thread.terminate().then(() => {
|
|
6650
|
+
this._thread = void 0;
|
|
6651
|
+
});
|
|
6652
|
+
}
|
|
6653
|
+
deserialize(data) {
|
|
6654
|
+
return data;
|
|
6655
|
+
}
|
|
6656
|
+
get thread() {
|
|
6657
|
+
if (!this._thread) throw new Error(`The worker thread was torn down or never initialized. This is a bug in Vitest.`);
|
|
6658
|
+
return this._thread;
|
|
6659
|
+
}
|
|
6660
|
+
}
|
|
6661
|
+
|
|
6662
|
+
/** @experimental */
|
|
6663
|
+
class TypecheckPoolWorker {
|
|
6664
|
+
name = "typecheck";
|
|
6665
|
+
execArgv;
|
|
6666
|
+
env;
|
|
6667
|
+
project;
|
|
6668
|
+
_eventEmitter = new EventEmitter$1();
|
|
6669
|
+
constructor(options) {
|
|
6670
|
+
this.execArgv = options.execArgv, this.env = options.env, this.project = options.project;
|
|
6671
|
+
}
|
|
6672
|
+
async start() {
|
|
6673
|
+
// noop, onMessage handles it
|
|
6674
|
+
}
|
|
6675
|
+
async stop() {
|
|
6676
|
+
// noop, onMessage handles it
|
|
6677
|
+
}
|
|
6678
|
+
send(message) {
|
|
6679
|
+
onMessage(message, this.project).then((response) => {
|
|
6680
|
+
if (response) this._eventEmitter.emit("message", response);
|
|
6681
|
+
});
|
|
6682
|
+
}
|
|
6683
|
+
on(event, callback) {
|
|
6684
|
+
this._eventEmitter.on(event, callback);
|
|
6685
|
+
}
|
|
6686
|
+
off(event, callback) {
|
|
6687
|
+
this._eventEmitter.on(event, callback);
|
|
6688
|
+
}
|
|
6689
|
+
deserialize(data) {
|
|
6690
|
+
return data;
|
|
6691
|
+
}
|
|
6692
|
+
}
|
|
6693
|
+
const __vitest_worker_response__ = true, runners = /* @__PURE__ */ new WeakMap();
|
|
6694
|
+
async function onMessage(message, project) {
|
|
6695
|
+
if (message?.__vitest_worker_request__ !== true) return;
|
|
6696
|
+
let runner = runners.get(project.vitest);
|
|
6697
|
+
if (!runner) runner = createRunner(project.vitest), runners.set(project.vitest, runner);
|
|
6698
|
+
let runPromise;
|
|
6699
|
+
switch (message.type) {
|
|
6700
|
+
case "start": return {
|
|
6701
|
+
type: "started",
|
|
6702
|
+
__vitest_worker_response__
|
|
6703
|
+
};
|
|
6704
|
+
case "run": return runPromise = runner.runTests(message.context.files, project).catch((error) => error), {
|
|
6705
|
+
type: "testfileFinished",
|
|
6706
|
+
error: await runPromise,
|
|
6707
|
+
__vitest_worker_response__
|
|
6708
|
+
};
|
|
6709
|
+
case "collect": return runPromise = runner.collectTests(message.context.files, project).catch((error) => error), {
|
|
6710
|
+
type: "testfileFinished",
|
|
6711
|
+
error: await runPromise,
|
|
6712
|
+
__vitest_worker_response__
|
|
6713
|
+
};
|
|
6714
|
+
case "stop": return await runPromise, await project.typechecker?.stop(), {
|
|
6715
|
+
type: "stopped",
|
|
6716
|
+
__vitest_worker_response__
|
|
6717
|
+
};
|
|
6718
|
+
}
|
|
6719
|
+
throw new Error(`Unexpected message ${JSON.stringify(message, null, 2)}`);
|
|
6720
|
+
}
|
|
6721
|
+
function createRunner(vitest) {
|
|
6722
|
+
const promisesMap = /* @__PURE__ */ new WeakMap(), rerunTriggered = /* @__PURE__ */ new WeakSet();
|
|
6723
|
+
async function onParseEnd(project, { files, sourceErrors }) {
|
|
6724
|
+
const checker = project.typechecker, { packs, events } = checker.getTestPacksAndEvents();
|
|
6725
|
+
if (await vitest._testRun.updated(packs, events), !project.config.typecheck.ignoreSourceErrors) sourceErrors.forEach((error) => vitest.state.catchError(error, "Unhandled Source Error"));
|
|
6726
|
+
if (!hasFailed(files) && !sourceErrors.length && checker.getExitCode()) {
|
|
6727
|
+
const error = new Error(checker.getOutput());
|
|
6728
|
+
error.stack = "", vitest.state.catchError(error, "Typecheck Error");
|
|
6729
|
+
}
|
|
6730
|
+
// triggered by TSC watcher, not Vitest watcher, so we need to emulate what Vitest does in this case
|
|
6731
|
+
if (promisesMap.get(project)?.resolve(), rerunTriggered.delete(project), vitest.config.watch && !vitest.runningPromise) {
|
|
6732
|
+
const modules = files.map((file) => vitest.state.getReportedEntity(file)).filter((e) => e?.type === "module"), state = vitest.isCancelling ? "interrupted" : modules.some((m) => !m.ok()) ? "failed" : "passed";
|
|
6733
|
+
await vitest.report("onTestRunEnd", modules, [], state), await vitest.report("onWatcherStart", files, [...project.config.typecheck.ignoreSourceErrors ? [] : sourceErrors, ...vitest.state.getUnhandledErrors()]);
|
|
6734
|
+
}
|
|
6735
|
+
}
|
|
6736
|
+
async function createWorkspaceTypechecker(project, files) {
|
|
6737
|
+
const checker = project.typechecker ?? new Typechecker(project);
|
|
6738
|
+
return project.typechecker ? checker : (project.typechecker = checker, checker.setFiles(files), checker.onParseStart(async () => {
|
|
6739
|
+
const files = checker.getTestFiles();
|
|
6740
|
+
for (const file of files) await vitest._testRun.enqueued(project, file);
|
|
6741
|
+
await vitest._testRun.collected(project, files);
|
|
6742
|
+
}), checker.onParseEnd((result) => onParseEnd(project, result)), checker.onWatcherRerun(async () => {
|
|
6743
|
+
if (rerunTriggered.add(project), !vitest.runningPromise) vitest.state.clearErrors(), await vitest.report("onWatcherRerun", files, "File change detected. Triggering rerun.");
|
|
6744
|
+
await checker.collectTests();
|
|
6745
|
+
const testFiles = checker.getTestFiles();
|
|
6746
|
+
for (const file of testFiles) await vitest._testRun.enqueued(project, file);
|
|
6747
|
+
await vitest._testRun.collected(project, testFiles);
|
|
6748
|
+
const { packs, events } = checker.getTestPacksAndEvents();
|
|
6749
|
+
await vitest._testRun.updated(packs, events);
|
|
6750
|
+
}), checker);
|
|
6751
|
+
}
|
|
6752
|
+
async function startTypechecker(project, files) {
|
|
6753
|
+
if (project.typechecker) return;
|
|
6754
|
+
const checker = await createWorkspaceTypechecker(project, files);
|
|
6755
|
+
await checker.collectTests(), await checker.start();
|
|
6756
|
+
}
|
|
6757
|
+
async function collectTests(specs, project) {
|
|
6758
|
+
const files = specs.map((spec) => spec.filepath), checker = await createWorkspaceTypechecker(project, files);
|
|
6759
|
+
checker.setFiles(files), await checker.collectTests();
|
|
6760
|
+
const testFiles = checker.getTestFiles();
|
|
6761
|
+
vitest.state.collectFiles(project, testFiles);
|
|
6762
|
+
}
|
|
6763
|
+
async function runTests(specs, project) {
|
|
6764
|
+
const promises = [], files = specs.map((spec) => spec.filepath), promise = createDefer(), triggered = await new Promise((resolve) => {
|
|
6765
|
+
const _i = setInterval(() => {
|
|
6766
|
+
if (!project.typechecker || rerunTriggered.has(project)) resolve(true), clearInterval(_i);
|
|
6767
|
+
});
|
|
6768
|
+
setTimeout(() => {
|
|
6769
|
+
resolve(false), clearInterval(_i);
|
|
6770
|
+
}, 500).unref();
|
|
6771
|
+
});
|
|
6772
|
+
if (project.typechecker && !triggered) {
|
|
6773
|
+
const testFiles = project.typechecker.getTestFiles();
|
|
6774
|
+
for (const file of testFiles) await vitest._testRun.enqueued(project, file);
|
|
6775
|
+
await vitest._testRun.collected(project, testFiles), await onParseEnd(project, project.typechecker.getResult());
|
|
6776
|
+
}
|
|
6777
|
+
promises.push(promise), promisesMap.set(project, promise), promises.push(startTypechecker(project, files)), await Promise.all(promises);
|
|
6778
|
+
}
|
|
6779
|
+
return {
|
|
6780
|
+
runTests,
|
|
6781
|
+
collectTests
|
|
6782
|
+
};
|
|
6783
|
+
}
|
|
6784
|
+
|
|
6785
|
+
/** @experimental */
|
|
6786
|
+
class VmForksPoolWorker extends ForksPoolWorker {
|
|
6787
|
+
name = "vmForks";
|
|
6788
|
+
reportMemory = true;
|
|
6789
|
+
entrypoint;
|
|
6790
|
+
constructor(options) {
|
|
6791
|
+
/** Loads {@link file://./../../../runtime/workers/vmForks.ts} */
|
|
6792
|
+
super({
|
|
6793
|
+
...options,
|
|
6794
|
+
execArgv: [...options.execArgv, "--experimental-vm-modules"]
|
|
6795
|
+
}), this.entrypoint = resolve$1(options.distPath, "workers/vmForks.js");
|
|
6796
|
+
}
|
|
6797
|
+
}
|
|
6798
|
+
|
|
6799
|
+
/** @experimental */
|
|
6800
|
+
class VmThreadsPoolWorker extends ThreadsPoolWorker {
|
|
6801
|
+
name = "vmThreads";
|
|
6802
|
+
reportMemory = true;
|
|
6803
|
+
entrypoint;
|
|
6804
|
+
constructor(options) {
|
|
6805
|
+
/** Loads {@link file://./../../../runtime/workers/vmThreads.ts} */
|
|
6806
|
+
super({
|
|
6807
|
+
...options,
|
|
6808
|
+
execArgv: [...options.execArgv, "--experimental-vm-modules"]
|
|
6809
|
+
}), this.entrypoint = resolve$1(options.distPath, "workers/vmThreads.js");
|
|
6810
|
+
}
|
|
6811
|
+
}
|
|
6812
|
+
|
|
6813
|
+
const WORKER_START_TIMEOUT = 5e3;
|
|
6814
|
+
class Pool {
|
|
6815
|
+
maxWorkers = 0;
|
|
6816
|
+
workerIds = /* @__PURE__ */ new Map();
|
|
6817
|
+
queue = [];
|
|
6818
|
+
activeTasks = [];
|
|
6819
|
+
sharedRunners = [];
|
|
6820
|
+
exitPromises = [];
|
|
6821
|
+
_isCancelling = false;
|
|
6822
|
+
constructor(options, logger) {
|
|
6823
|
+
this.options = options, this.logger = logger;
|
|
6824
|
+
}
|
|
6825
|
+
setMaxWorkers(maxWorkers) {
|
|
6826
|
+
this.maxWorkers = maxWorkers, this.workerIds = new Map(Array.from({ length: maxWorkers }).fill(0).map((_, i) => [i + 1, true]));
|
|
6827
|
+
}
|
|
6828
|
+
async run(task, method) {
|
|
6829
|
+
// Prevent new tasks from being queued during cancellation
|
|
6830
|
+
if (this._isCancelling) throw new Error("[vitest-pool]: Cannot run tasks while pool is cancelling");
|
|
6831
|
+
// Every runner related failure should make this promise reject so that it's picked by pool.
|
|
6832
|
+
// This resolver is used to make the error handling in recursive queue easier.
|
|
6833
|
+
const testFinish = withResolvers();
|
|
6834
|
+
this.queue.push({
|
|
6835
|
+
task,
|
|
6836
|
+
resolver: testFinish,
|
|
6837
|
+
method
|
|
6838
|
+
}), this.schedule(), await testFinish.promise;
|
|
6839
|
+
}
|
|
6840
|
+
async schedule() {
|
|
6841
|
+
if (this.queue.length === 0 || this.activeTasks.length >= this.maxWorkers) return;
|
|
6842
|
+
const { task, resolver, method } = this.queue.shift();
|
|
6843
|
+
try {
|
|
6844
|
+
let isMemoryLimitReached = false;
|
|
6845
|
+
const runner = this.getPoolRunner(task, method), activeTask = {
|
|
6846
|
+
task,
|
|
6847
|
+
resolver,
|
|
6848
|
+
method,
|
|
6849
|
+
cancelTask
|
|
6850
|
+
};
|
|
6851
|
+
this.activeTasks.push(activeTask), runner.on("error", (error) => {
|
|
6852
|
+
resolver.reject(new Error(`[vitest-pool]: Worker ${task.worker} emitted error.`, { cause: error }));
|
|
6853
|
+
});
|
|
6854
|
+
async function cancelTask() {
|
|
6855
|
+
await runner.stop(), resolver.reject(/* @__PURE__ */ new Error("Cancelled"));
|
|
6856
|
+
}
|
|
6857
|
+
const onFinished = (message) => {
|
|
6858
|
+
if (message?.__vitest_worker_response__ && message.type === "testfileFinished") {
|
|
6859
|
+
if (task.memoryLimit && message.usedMemory) isMemoryLimitReached = message.usedMemory >= task.memoryLimit;
|
|
6860
|
+
if (message.error) this.options.state.catchError(message.error, "Test Run Error");
|
|
6861
|
+
runner.off("message", onFinished), resolver.resolve();
|
|
6862
|
+
}
|
|
6863
|
+
};
|
|
6864
|
+
if (runner.on("message", onFinished), !runner.isStarted) {
|
|
6865
|
+
const id = setTimeout(() => resolver.reject(/* @__PURE__ */ new Error(`[vitest-pool]: Timeout starting ${task.worker} runner.`)), WORKER_START_TIMEOUT);
|
|
6866
|
+
await runner.start().finally(() => clearTimeout(id));
|
|
6867
|
+
}
|
|
6868
|
+
const poolId = runner.poolId ?? this.getWorkerId();
|
|
6869
|
+
runner.poolId = poolId, runner.postMessage({
|
|
6870
|
+
__vitest_worker_request__: true,
|
|
6871
|
+
type: method,
|
|
6872
|
+
context: task.context,
|
|
6873
|
+
poolId
|
|
6874
|
+
}), await resolver.promise;
|
|
6875
|
+
const index = this.activeTasks.indexOf(activeTask);
|
|
6876
|
+
if (index !== -1) this.activeTasks.splice(index, 1);
|
|
6877
|
+
if (!task.isolate && !isMemoryLimitReached && this.queue[0]?.task.isolate === false && isEqualRunner(runner, this.queue[0].task)) return this.sharedRunners.push(runner), this.schedule();
|
|
6878
|
+
// Runner terminations are started but not awaited until the end of full run.
|
|
6879
|
+
// Runner termination can also already start from task cancellation.
|
|
6880
|
+
if (!runner.isTerminated) {
|
|
6881
|
+
const id = setTimeout(() => this.logger.error(`[vitest-pool]: Timeout terminating ${task.worker} worker for test files ${formatFiles(task)}.`), this.options.teardownTimeout);
|
|
6882
|
+
this.exitPromises.push(runner.stop().then(() => clearTimeout(id)).catch((error) => this.logger.error(`[vitest-pool]: Failed to terminate ${task.worker} worker for test files ${formatFiles(task)}.`, error)));
|
|
6883
|
+
}
|
|
6884
|
+
this.freeWorkerId(poolId);
|
|
6885
|
+
}
|
|
6886
|
+
// This is mostly to avoid zombie workers when/if Vitest internals run into errors
|
|
6887
|
+
catch (error) {
|
|
6888
|
+
return resolver.reject(error);
|
|
6889
|
+
}
|
|
6890
|
+
return this.schedule();
|
|
6891
|
+
}
|
|
6892
|
+
async cancel() {
|
|
6893
|
+
// Set flag to prevent new tasks from being queued
|
|
6894
|
+
this._isCancelling = true;
|
|
6895
|
+
const pendingTasks = this.queue.splice(0);
|
|
6896
|
+
if (pendingTasks.length) {
|
|
6897
|
+
const error = /* @__PURE__ */ new Error("Cancelled");
|
|
6898
|
+
pendingTasks.forEach((task) => task.resolver.reject(error));
|
|
6899
|
+
}
|
|
6900
|
+
const activeTasks = this.activeTasks.splice(0);
|
|
6901
|
+
await Promise.all(activeTasks.map((task) => task.cancelTask()));
|
|
6902
|
+
const sharedRunners = this.sharedRunners.splice(0);
|
|
6903
|
+
// Reset flag after cancellation completes
|
|
6904
|
+
await Promise.all(sharedRunners.map((runner) => runner.stop())), await Promise.all(this.exitPromises.splice(0)), this.workerIds.forEach((_, id) => this.freeWorkerId(id)), this._isCancelling = false;
|
|
6905
|
+
}
|
|
6906
|
+
async close() {
|
|
6907
|
+
await this.cancel();
|
|
6908
|
+
}
|
|
6909
|
+
getPoolRunner(task, method) {
|
|
6910
|
+
if (task.isolate === false) {
|
|
6911
|
+
const index = this.sharedRunners.findIndex((runner) => isEqualRunner(runner, task));
|
|
6912
|
+
if (index !== -1) return this.sharedRunners.splice(index, 1)[0];
|
|
6913
|
+
}
|
|
6914
|
+
const options = {
|
|
6915
|
+
distPath: this.options.distPath,
|
|
6916
|
+
project: task.project,
|
|
6917
|
+
method,
|
|
6918
|
+
environment: task.context.environment.name,
|
|
6919
|
+
env: task.env,
|
|
6920
|
+
execArgv: task.execArgv
|
|
6921
|
+
};
|
|
6922
|
+
switch (task.worker) {
|
|
6923
|
+
case "forks": return new PoolRunner(options, new ForksPoolWorker(options));
|
|
6924
|
+
case "vmForks": return new PoolRunner(options, new VmForksPoolWorker(options));
|
|
6925
|
+
case "threads": return new PoolRunner(options, new ThreadsPoolWorker(options));
|
|
6926
|
+
case "vmThreads": return new PoolRunner(options, new VmThreadsPoolWorker(options));
|
|
6927
|
+
case "typescript": return new PoolRunner(options, new TypecheckPoolWorker(options));
|
|
6928
|
+
}
|
|
6929
|
+
const customPool = task.project.config.poolRunner;
|
|
6930
|
+
if (customPool != null && customPool.name === task.worker) return new PoolRunner(options, customPool.createPoolWorker(options));
|
|
6931
|
+
throw new Error(`Runner ${task.worker} is not supported. Test files: ${formatFiles(task)}.`);
|
|
6932
|
+
}
|
|
6933
|
+
getWorkerId() {
|
|
6934
|
+
let workerId = 0;
|
|
6935
|
+
return this.workerIds.forEach((state, id) => {
|
|
6936
|
+
if (state && !workerId) workerId = id, this.workerIds.set(id, false);
|
|
6937
|
+
}), workerId;
|
|
6938
|
+
}
|
|
6939
|
+
freeWorkerId(id) {
|
|
6940
|
+
this.workerIds.set(id, true);
|
|
6941
|
+
}
|
|
6942
|
+
}
|
|
6943
|
+
function withResolvers() {
|
|
6944
|
+
let resolve = () => {}, reject = (_error) => {};
|
|
6945
|
+
const promise = new Promise((res, rej) => {
|
|
6946
|
+
resolve = res, reject = rej;
|
|
6947
|
+
});
|
|
6948
|
+
return {
|
|
6949
|
+
resolve,
|
|
6950
|
+
reject,
|
|
6951
|
+
promise
|
|
6952
|
+
};
|
|
6953
|
+
}
|
|
6954
|
+
function formatFiles(task) {
|
|
6955
|
+
return task.context.files.map((file) => file.filepath).join(", ");
|
|
6956
|
+
}
|
|
6957
|
+
function isEqualRunner(runner, task) {
|
|
6958
|
+
if (task.isolate) throw new Error("Isolated tasks should not share runners");
|
|
6959
|
+
return runner.worker.name === task.worker && runner.project === task.project && runner.environment === task.context.environment.name && runner.worker.execArgv.every((arg, index) => task.execArgv[index] === arg) && isEnvEqual(runner.worker.env, task.env);
|
|
6960
|
+
}
|
|
6961
|
+
function isEnvEqual(a, b) {
|
|
6962
|
+
const keys = Object.keys(a);
|
|
6963
|
+
return keys.length === Object.keys(b).length ? keys.every((key) => a[key] === b[key]) : false;
|
|
6964
|
+
}
|
|
6965
|
+
|
|
6966
|
+
const suppressWarningsPath = resolve(rootDir, "./suppress-warnings.cjs");
|
|
6967
|
+
function getFilePoolName(project) {
|
|
6968
|
+
return project.config.browser.enabled ? "browser" : project.config.pool;
|
|
6969
|
+
}
|
|
6970
|
+
function createPool(ctx) {
|
|
6971
|
+
const pool = new Pool({
|
|
6972
|
+
distPath: ctx.distPath,
|
|
6973
|
+
teardownTimeout: ctx.config.teardownTimeout,
|
|
6974
|
+
state: ctx.state
|
|
6975
|
+
}, ctx.logger), options = resolveOptions(ctx), Sequencer = ctx.config.sequence.sequencer, sequencer = new Sequencer(ctx);
|
|
6976
|
+
let browserPool;
|
|
6977
|
+
async function executeTests(method, specs, invalidates) {
|
|
6978
|
+
if (ctx.onCancel(() => pool.cancel()), ctx.config.shard) {
|
|
6979
|
+
if (!ctx.config.passWithNoTests && ctx.config.shard.count > specs.length) throw new Error(`--shard <count> must be a smaller than count of test files. Resolved ${specs.length} test files for --shard=${ctx.config.shard.index}/${ctx.config.shard.count}.`);
|
|
6980
|
+
specs = await sequencer.shard(Array.from(specs));
|
|
6981
|
+
}
|
|
6982
|
+
const taskGroups = [];
|
|
6983
|
+
let workerId = 0;
|
|
6984
|
+
const sorted = await sequencer.sort(specs), groups = groupSpecs(sorted);
|
|
6985
|
+
for (const group of groups) {
|
|
6986
|
+
if (!group) continue;
|
|
6987
|
+
const taskGroup = [], browserSpecs = [];
|
|
6988
|
+
taskGroups.push({
|
|
6989
|
+
tasks: taskGroup,
|
|
6990
|
+
maxWorkers: group.maxWorkers,
|
|
6991
|
+
browserSpecs
|
|
6992
|
+
});
|
|
6993
|
+
for (const specs of group.specs) {
|
|
6994
|
+
const { project, pool } = specs[0];
|
|
6995
|
+
if (pool === "browser") {
|
|
6996
|
+
browserSpecs.push(...specs);
|
|
6997
|
+
continue;
|
|
6998
|
+
}
|
|
6999
|
+
const byEnv = await groupFilesByEnv(specs), env = Object.values(byEnv)[0][0];
|
|
7000
|
+
taskGroup.push({
|
|
7001
|
+
context: {
|
|
7002
|
+
pool,
|
|
7003
|
+
config: project.serializedConfig,
|
|
7004
|
+
files: specs.map((spec) => ({
|
|
7005
|
+
filepath: spec.moduleId,
|
|
7006
|
+
testLocations: spec.testLines
|
|
7007
|
+
})),
|
|
7008
|
+
invalidates,
|
|
7009
|
+
environment: env.environment,
|
|
7010
|
+
projectName: project.name,
|
|
7011
|
+
providedContext: project.getProvidedContext(),
|
|
7012
|
+
workerId: workerId++
|
|
7013
|
+
},
|
|
7014
|
+
project,
|
|
7015
|
+
env: options.env,
|
|
7016
|
+
execArgv: [...options.execArgv, ...project.config.execArgv],
|
|
7017
|
+
worker: pool,
|
|
7018
|
+
isolate: project.config.isolate,
|
|
7019
|
+
memoryLimit: getMemoryLimit(ctx.config, pool) ?? null
|
|
7020
|
+
});
|
|
7021
|
+
}
|
|
7022
|
+
}
|
|
7023
|
+
const results = [];
|
|
7024
|
+
for (const { tasks, browserSpecs, maxWorkers } of taskGroups) {
|
|
7025
|
+
pool.setMaxWorkers(maxWorkers);
|
|
7026
|
+
const promises = tasks.map(async (task) => {
|
|
7027
|
+
if (ctx.isCancelling) return ctx.state.cancelFiles(task.context.files, task.project);
|
|
7028
|
+
try {
|
|
7029
|
+
await pool.run(task, method);
|
|
7030
|
+
} catch (error) {
|
|
7031
|
+
// Intentionally cancelled
|
|
7032
|
+
if (ctx.isCancelling && error instanceof Error && error.message === "Cancelled") ctx.state.cancelFiles(task.context.files, task.project);
|
|
7033
|
+
else throw error;
|
|
7034
|
+
}
|
|
7035
|
+
});
|
|
7036
|
+
if (browserSpecs.length) if (browserPool ??= createBrowserPool(ctx), method === "collect") promises.push(browserPool.collectTests(browserSpecs));
|
|
7037
|
+
else promises.push(browserPool.runTests(browserSpecs));
|
|
7038
|
+
const groupResults = await Promise.allSettled(promises);
|
|
7039
|
+
results.push(...groupResults);
|
|
7040
|
+
}
|
|
7041
|
+
const errors = results.filter((result) => result.status === "rejected").map((result) => result.reason);
|
|
7042
|
+
if (errors.length > 0) throw new AggregateError(errors, "Errors occurred while running tests. For more information, see serialized error.");
|
|
7043
|
+
}
|
|
7044
|
+
return {
|
|
7045
|
+
name: "default",
|
|
7046
|
+
runTests: (files, invalidates) => executeTests("run", files, invalidates),
|
|
7047
|
+
collectTests: (files, invalidates) => executeTests("collect", files, invalidates),
|
|
7048
|
+
async close() {
|
|
7049
|
+
await Promise.all([pool.close(), browserPool?.close?.()]);
|
|
7050
|
+
}
|
|
7051
|
+
};
|
|
7052
|
+
}
|
|
7053
|
+
function resolveOptions(ctx) {
|
|
7054
|
+
// in addition to resolve.conditions Vite also adds production/development,
|
|
7055
|
+
// see: https://github.com/vitejs/vite/blob/af2aa09575229462635b7cbb6d248ca853057ba2/packages/vite/src/node/plugins/resolve.ts#L1056-L1080
|
|
7056
|
+
const viteMajor = Number(version.split(".")[0]), conditions = [...new Set(viteMajor >= 6 ? ctx.vite.config.ssr.resolve?.conditions ?? [] : [
|
|
7057
|
+
"production",
|
|
7058
|
+
"development",
|
|
7059
|
+
...ctx.vite.config.resolve.conditions
|
|
7060
|
+
])].filter((condition) => {
|
|
7061
|
+
return condition === "production" ? ctx.vite.config.isProduction : condition === "development" ? !ctx.vite.config.isProduction : true;
|
|
7062
|
+
}).map((condition) => {
|
|
7063
|
+
return viteMajor >= 6 && condition === "development|production" ? ctx.vite.config.isProduction ? "production" : "development" : condition;
|
|
7064
|
+
}).flatMap((c) => ["--conditions", c]), options = {
|
|
7065
|
+
execArgv: [
|
|
7066
|
+
...process.execArgv.filter((execArg) => execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir")),
|
|
7067
|
+
...conditions,
|
|
7068
|
+
"--experimental-import-meta-resolve",
|
|
7069
|
+
"--require",
|
|
7070
|
+
suppressWarningsPath
|
|
7071
|
+
],
|
|
7072
|
+
env: {
|
|
7073
|
+
TEST: "true",
|
|
7074
|
+
VITEST: "true",
|
|
7075
|
+
NODE_ENV: process.env.NODE_ENV || "test",
|
|
7076
|
+
VITEST_MODE: ctx.config.watch ? "WATCH" : "RUN",
|
|
7077
|
+
FORCE_TTY: isatty(1) ? "true" : "",
|
|
7078
|
+
...process.env,
|
|
7079
|
+
...ctx.config.env
|
|
7080
|
+
}
|
|
7081
|
+
};
|
|
7082
|
+
// env are case-insensitive on Windows, but spawned processes don't support it
|
|
7083
|
+
if (isWindows) for (const name in options.env) options.env[name.toUpperCase()] = options.env[name];
|
|
7084
|
+
return options;
|
|
7085
|
+
}
|
|
7086
|
+
function resolveMaxWorkers(project) {
|
|
7087
|
+
if (project.config.maxWorkers) return project.config.maxWorkers;
|
|
7088
|
+
if (project.vitest.config.maxWorkers) return project.vitest.config.maxWorkers;
|
|
7089
|
+
const numCpus = typeof nodeos.availableParallelism === "function" ? nodeos.availableParallelism() : nodeos.cpus().length;
|
|
7090
|
+
return project.vitest.config.watch ? Math.max(Math.floor(numCpus / 2), 1) : Math.max(numCpus - 1, 1);
|
|
7091
|
+
}
|
|
7092
|
+
function getMemoryLimit(config, pool) {
|
|
7093
|
+
if (pool !== "vmForks" && pool !== "vmThreads") return null;
|
|
7094
|
+
const memory = nodeos.totalmem(), limit = getWorkerMemoryLimit(config);
|
|
7095
|
+
// just ignore "memoryLimit" value because we cannot detect memory limit
|
|
7096
|
+
return typeof memory === "number" ? stringToBytes(limit, config.watch ? memory / 2 : memory) : typeof limit === "number" && limit > 1 || typeof limit === "string" && limit.at(-1) !== "%" ? stringToBytes(limit) : null;
|
|
7097
|
+
}
|
|
7098
|
+
function groupSpecs(specs) {
|
|
7099
|
+
const groups = [], sequential = {
|
|
7100
|
+
specs: [],
|
|
7101
|
+
maxWorkers: 1
|
|
7102
|
+
}, typechecks = {};
|
|
7103
|
+
for (const project in specs.forEach((spec) => {
|
|
7104
|
+
if (spec.pool === "typescript") {
|
|
7105
|
+
typechecks[spec.project.name] ||= [], typechecks[spec.project.name].push(spec);
|
|
7106
|
+
return;
|
|
7107
|
+
}
|
|
7108
|
+
const order = spec.project.config.sequence.groupOrder;
|
|
7109
|
+
// Files that have disabled parallelism and default groupId are set into their own group
|
|
7110
|
+
if (order === 0 && spec.project.config.fileParallelism === false) return sequential.specs.push([spec]);
|
|
7111
|
+
const maxWorkers = resolveMaxWorkers(spec.project);
|
|
7112
|
+
// Multiple projects with different maxWorkers but same groupId
|
|
7113
|
+
if (groups[order] ||= {
|
|
7114
|
+
specs: [],
|
|
7115
|
+
maxWorkers
|
|
7116
|
+
}, groups[order].maxWorkers !== maxWorkers) {
|
|
7117
|
+
const last = groups[order].specs.at(-1)?.at(-1)?.project.name;
|
|
7118
|
+
throw new Error(`Projects "${last}" and "${spec.project.name}" have different 'maxWorkers' but same 'sequence.groupId'.\nProvide unique 'sequence.groupId' for them.`);
|
|
7119
|
+
}
|
|
7120
|
+
groups[order].specs.push([spec]);
|
|
7121
|
+
}), typechecks) {
|
|
7122
|
+
const order = Math.max(0, ...groups.keys()) + 1;
|
|
7123
|
+
groups[order] ||= {
|
|
7124
|
+
specs: [],
|
|
7125
|
+
maxWorkers: resolveMaxWorkers(typechecks[project][0].project)
|
|
7126
|
+
}, groups[order].specs.push(typechecks[project]);
|
|
7127
|
+
}
|
|
7128
|
+
if (sequential.specs.length) groups.push(sequential);
|
|
7129
|
+
return groups;
|
|
7130
|
+
}
|
|
7131
|
+
|
|
5858
7132
|
function serializeConfig(project) {
|
|
5859
|
-
const { config, globalConfig } = project, viteConfig = project._vite?.config, optimizer = config.deps?.optimizer || {}
|
|
7133
|
+
const { config, globalConfig } = project, viteConfig = project._vite?.config, optimizer = config.deps?.optimizer || {};
|
|
5860
7134
|
return {
|
|
5861
7135
|
environmentOptions: config.environmentOptions,
|
|
5862
7136
|
mode: config.mode,
|
|
5863
7137
|
isolate: config.isolate,
|
|
7138
|
+
fileParallelism: config.fileParallelism,
|
|
7139
|
+
maxWorkers: config.maxWorkers,
|
|
5864
7140
|
base: config.base,
|
|
5865
7141
|
logHeapUsage: config.logHeapUsage,
|
|
5866
7142
|
runner: config.runner,
|
|
@@ -5900,18 +7176,6 @@ function serializeConfig(project) {
|
|
|
5900
7176
|
};
|
|
5901
7177
|
})(config.coverage),
|
|
5902
7178
|
fakeTimers: config.fakeTimers,
|
|
5903
|
-
poolOptions: {
|
|
5904
|
-
forks: {
|
|
5905
|
-
singleFork: poolOptions?.forks?.singleFork ?? globalConfig.poolOptions?.forks?.singleFork ?? false,
|
|
5906
|
-
isolate: poolOptions?.forks?.isolate ?? isolate ?? globalConfig.poolOptions?.forks?.isolate ?? true
|
|
5907
|
-
},
|
|
5908
|
-
threads: {
|
|
5909
|
-
singleThread: poolOptions?.threads?.singleThread ?? globalConfig.poolOptions?.threads?.singleThread ?? false,
|
|
5910
|
-
isolate: poolOptions?.threads?.isolate ?? isolate ?? globalConfig.poolOptions?.threads?.isolate ?? true
|
|
5911
|
-
},
|
|
5912
|
-
vmThreads: { singleThread: poolOptions?.vmThreads?.singleThread ?? globalConfig.poolOptions?.vmThreads?.singleThread ?? false },
|
|
5913
|
-
vmForks: { singleFork: poolOptions?.vmForks?.singleFork ?? globalConfig.poolOptions?.vmForks?.singleFork ?? false }
|
|
5914
|
-
},
|
|
5915
7179
|
deps: {
|
|
5916
7180
|
web: config.deps.web || {},
|
|
5917
7181
|
optimizer: Object.entries(optimizer).reduce((acc, [name, option]) => {
|
|
@@ -7018,7 +8282,7 @@ async function isValidNodeImport(id) {
|
|
|
7018
8282
|
if (/\.(?:\w+-)?esm?(?:-\w+)?\.js$|\/esm?\//.test(id)) return false;
|
|
7019
8283
|
try {
|
|
7020
8284
|
await esModuleLexer.init;
|
|
7021
|
-
const code = await promises.readFile(id, "utf8"), [, , , hasModuleSyntax] = esModuleLexer.parse(code);
|
|
8285
|
+
const code = await promises$1.readFile(id, "utf8"), [, , , hasModuleSyntax] = esModuleLexer.parse(code);
|
|
7022
8286
|
return !hasModuleSyntax;
|
|
7023
8287
|
} catch {
|
|
7024
8288
|
return false;
|
|
@@ -7274,7 +8538,7 @@ class TestProject {
|
|
|
7274
8538
|
const files = await this.globFiles(includeSource, exclude, cwd);
|
|
7275
8539
|
await Promise.all(files.map(async (file) => {
|
|
7276
8540
|
try {
|
|
7277
|
-
const code = await promises.readFile(file, "utf-8");
|
|
8541
|
+
const code = await promises$1.readFile(file, "utf-8");
|
|
7278
8542
|
if (this.isInSourceTestCode(code)) testFiles.push(file);
|
|
7279
8543
|
} catch {
|
|
7280
8544
|
return null;
|
|
@@ -8203,7 +9467,6 @@ class StateManager {
|
|
|
8203
9467
|
idMap = /* @__PURE__ */ new Map();
|
|
8204
9468
|
taskFileMap = /* @__PURE__ */ new WeakMap();
|
|
8205
9469
|
errorsSet = /* @__PURE__ */ new Set();
|
|
8206
|
-
processTimeoutCauses = /* @__PURE__ */ new Set();
|
|
8207
9470
|
reportedTasksMap = /* @__PURE__ */ new WeakMap();
|
|
8208
9471
|
blobs;
|
|
8209
9472
|
transformTime = 0;
|
|
@@ -8238,12 +9501,6 @@ class StateManager {
|
|
|
8238
9501
|
getUnhandledErrors() {
|
|
8239
9502
|
return Array.from(this.errorsSet.values());
|
|
8240
9503
|
}
|
|
8241
|
-
addProcessTimeoutCause(cause) {
|
|
8242
|
-
this.processTimeoutCauses.add(cause);
|
|
8243
|
-
}
|
|
8244
|
-
getProcessTimeoutCauses() {
|
|
8245
|
-
return Array.from(this.processTimeoutCauses.values());
|
|
8246
|
-
}
|
|
8247
9504
|
getPaths() {
|
|
8248
9505
|
return Array.from(this.pathsSet);
|
|
8249
9506
|
}
|
|
@@ -8328,7 +9585,12 @@ class StateManager {
|
|
|
8328
9585
|
return Array.from(this.idMap.values()).filter((t) => t.result?.state === "fail").length;
|
|
8329
9586
|
}
|
|
8330
9587
|
cancelFiles(files, project) {
|
|
8331
|
-
|
|
9588
|
+
// if we don't filter existing modules, they will be overriden by `collectFiles`
|
|
9589
|
+
const nonRegisteredFiles = files.filter(({ filepath }) => {
|
|
9590
|
+
const relativePath = relative(project.config.root, filepath), id = generateFileHash(relativePath, project.name);
|
|
9591
|
+
return !this.idMap.has(id);
|
|
9592
|
+
});
|
|
9593
|
+
this.collectFiles(project, nonRegisteredFiles.map((file) => createFileTask$1(file.filepath, project.config.root, project.config.name)));
|
|
8332
9594
|
}
|
|
8333
9595
|
}
|
|
8334
9596
|
|
|
@@ -8838,6 +10100,7 @@ class TestRun {
|
|
|
8838
10100
|
hasFailed(modules) {
|
|
8839
10101
|
return modules.length ? modules.some((m) => !m.ok()) : !this.vitest.config.passWithNoTests;
|
|
8840
10102
|
}
|
|
10103
|
+
// make sure the error always has a "stacks" property
|
|
8841
10104
|
syncUpdateStacks(update) {
|
|
8842
10105
|
update.forEach(([taskId, result]) => {
|
|
8843
10106
|
const task = this.vitest.state.idMap.get(taskId), isBrowser = task && task.file.pool === "browser";
|
|
@@ -9652,7 +10915,7 @@ class Vitest {
|
|
|
9652
10915
|
async exit(force = false) {
|
|
9653
10916
|
if (setTimeout(() => {
|
|
9654
10917
|
this.report("onProcessTimeout").then(() => {
|
|
9655
|
-
if (console.warn(`close timed out after ${this.config.teardownTimeout}ms`),
|
|
10918
|
+
if (console.warn(`close timed out after ${this.config.teardownTimeout}ms`), !this.pool) {
|
|
9656
10919
|
const runningServers = [this._vite, ...this.projects.map((p) => p._vite)].filter(Boolean).length;
|
|
9657
10920
|
if (runningServers === 1) console.warn("Tests closed successfully but something prevents Vite server from exiting");
|
|
9658
10921
|
else if (runningServers > 1) console.warn(`Tests closed successfully but something prevents ${runningServers} Vite servers from exiting`);
|
|
@@ -9782,10 +11045,6 @@ async function VitestPlugin(options = {}, vitest = new Vitest("test", deepClone(
|
|
|
9782
11045
|
__vitest__: { dev: {} }
|
|
9783
11046
|
},
|
|
9784
11047
|
test: {
|
|
9785
|
-
poolOptions: {
|
|
9786
|
-
threads: { isolate: options.poolOptions?.threads?.isolate ?? options.isolate ?? testConfig.poolOptions?.threads?.isolate ?? viteConfig.test?.isolate },
|
|
9787
|
-
forks: { isolate: options.poolOptions?.forks?.isolate ?? options.isolate ?? testConfig.poolOptions?.forks?.isolate ?? viteConfig.test?.isolate }
|
|
9788
|
-
},
|
|
9789
11048
|
root: testConfig.root ?? viteConfig.test?.root,
|
|
9790
11049
|
deps: testConfig.deps ?? viteConfig.test?.deps
|
|
9791
11050
|
}
|
|
@@ -10254,4 +11513,4 @@ var cliApi = /*#__PURE__*/Object.freeze({
|
|
|
10254
11513
|
startVitest: startVitest
|
|
10255
11514
|
});
|
|
10256
11515
|
|
|
10257
|
-
export { FilesNotFoundError as F, GitNotFoundError as G, Vitest as V, VitestPlugin as a, VitestPackageInstaller as b, createVitest as c,
|
|
11516
|
+
export { FilesNotFoundError as F, GitNotFoundError as G, ThreadsPoolWorker as T, Vitest as V, VitestPlugin as a, VitestPackageInstaller as b, createVitest as c, createMethodsRPC as d, escapeTestName as e, ForksPoolWorker as f, getFilePoolName as g, TypecheckPoolWorker as h, isValidApiRequest as i, VmForksPoolWorker as j, VmThreadsPoolWorker as k, experimental_getRunnerTask as l, registerConsoleShortcuts as m, createViteLogger as n, createDebugger as o, cliApi as p, resolveFsAllow as r, startVitest as s };
|