vitest 4.0.0-beta.18 → 4.0.0-beta.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.d.ts +3 -3
- package/dist/browser.js +1 -1
- package/dist/{worker-base.js → chunks/base.CtHM3ryk.js} +18 -91
- package/dist/chunks/{browser.d.CCG7W26I.d.ts → browser.d.B9iJzZyn.d.ts} +3 -2
- package/dist/chunks/{cac.DYnuYoJK.js → cac.DCrQhweU.js} +14 -61
- package/dist/chunks/{cli-api.xhe4uqTX.js → cli-api.BjHteKX0.js} +1312 -53
- package/dist/chunks/{config.d.C4PpNy7v.d.ts → config.d.u2CUDWwS.d.ts} +2 -16
- package/dist/chunks/{coverage.Ds84cgzV.js → coverage.FU3w4IrQ.js} +25 -1241
- package/dist/chunks/{defaults.CXFFjsi8.js → defaults.BOqNVLsY.js} +0 -1
- package/dist/chunks/evaluatedModules.Dg1zASAC.js +17 -0
- package/dist/chunks/{global.d.RTA0rbJI.d.ts → global.d.BgJSTpgQ.d.ts} +1 -1
- package/dist/chunks/{globals.CwYe1aG7.js → globals.BGT_RUsD.js} +4 -2
- package/dist/chunks/{index.eEkl9h8v.js → index.BdSLhLDZ.js} +1 -1
- package/dist/chunks/{index.D2gVI9Ck.js → index.CcRZ6fUh.js} +1506 -11
- package/dist/chunks/{index.Bcjk8TKX.js → index.RwjEGCQ0.js} +2 -2
- package/dist/chunks/init-forks.DSafeltJ.js +54 -0
- package/dist/chunks/init-threads.SUtZ-067.js +17 -0
- package/dist/chunks/{worker.CdzokOSx.js → init.B2EESLQM.js} +97 -80
- package/dist/chunks/{inspector.Br76Q2Mb.js → inspector.DLZxSeU3.js} +1 -2
- package/dist/chunks/{moduleRunner.d.aXWuQhZN.d.ts → moduleRunner.d.YtNsMIoJ.d.ts} +1 -1
- package/dist/chunks/{plugin.d.XtKKWlOO.d.ts → plugin.d.BB__S31E.d.ts} +1 -1
- package/dist/chunks/{reporters.d.BJ_OuJGZ.d.ts → reporters.d.C6nGyY9_.d.ts} +1113 -1152
- package/dist/chunks/{resolveSnapshotEnvironment.tw2a5ux8.js → resolveSnapshotEnvironment.DJJKMKxb.js} +1 -1
- package/dist/chunks/{setup-common.DgXU7Yho.js → setup-common.DR1sucx6.js} +1 -1
- package/dist/chunks/{startModuleRunner.DPBo3mme.js → startModuleRunner.C2tTvmF9.js} +3 -1
- package/dist/{worker-vm.js → chunks/vm.DBeOXrP9.js} +6 -66
- package/dist/chunks/{worker.d.DSgBAZPX.d.ts → worker.d.BFk-vvBU.d.ts} +79 -4
- package/dist/cli.js +8 -9
- package/dist/config.cjs +0 -1
- package/dist/config.d.ts +6 -7
- package/dist/config.js +1 -1
- package/dist/coverage.d.ts +4 -4
- package/dist/coverage.js +2 -13
- package/dist/index.d.ts +13 -9
- package/dist/index.js +4 -2
- package/dist/module-evaluator.d.ts +3 -3
- package/dist/module-runner.js +1 -1
- package/dist/node.d.ts +79 -15
- package/dist/node.js +25 -26
- package/dist/reporters.d.ts +4 -4
- package/dist/reporters.js +9 -10
- package/dist/runners.d.ts +1 -1
- package/dist/worker.d.ts +26 -0
- package/dist/worker.js +46 -0
- package/dist/workers/forks.js +50 -0
- package/dist/workers/runVmTests.js +6 -5
- package/dist/workers/threads.js +50 -0
- package/dist/workers/vmForks.js +35 -0
- package/dist/workers/vmThreads.js +35 -0
- package/package.json +16 -13
- package/worker.d.ts +1 -0
- package/dist/chunks/typechecker.DsKAhua5.js +0 -1522
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import { existsSync, readFileSync, promises } from 'node:fs';
|
|
2
2
|
import { mkdir, writeFile, readdir, stat, readFile } from 'node:fs/promises';
|
|
3
|
-
import {
|
|
4
|
-
import { resolve, dirname, isAbsolute, relative, basename, normalize } from 'pathe';
|
|
3
|
+
import { resolve as resolve$1, dirname, isAbsolute, relative, basename, join, normalize } from 'pathe';
|
|
5
4
|
import { performance as performance$1 } from 'node:perf_hooks';
|
|
6
|
-
import { getTestName, hasFailed,
|
|
5
|
+
import { getTests, getTestName, hasFailed, getSuites, generateHash, calculateSuiteHash, someTasksAreOnly, interpretTaskModes, getTasks, getFullName } from '@vitest/runner/utils';
|
|
7
6
|
import { slash, toArray, isPrimitive } from '@vitest/utils/helpers';
|
|
8
7
|
import { parseStacktrace, parseErrorStacktrace, defaultStackIgnorePatterns } from '@vitest/utils/source-map';
|
|
9
8
|
import c from 'tinyrainbow';
|
|
@@ -12,9 +11,164 @@ import { stripVTControlCharacters } from 'node:util';
|
|
|
12
11
|
import { Console } from 'node:console';
|
|
13
12
|
import { Writable } from 'node:stream';
|
|
14
13
|
import { inspect } from '@vitest/utils/display';
|
|
14
|
+
import nodeos__default, { hostname } from 'node:os';
|
|
15
|
+
import { x } from 'tinyexec';
|
|
16
|
+
import { distDir } from '../path.js';
|
|
17
|
+
import { parseAstAsync } from 'vite';
|
|
15
18
|
import { positionToOffset, lineSplitRE } from '@vitest/utils/offset';
|
|
16
19
|
import { createRequire } from 'node:module';
|
|
17
|
-
|
|
20
|
+
|
|
21
|
+
/// <reference types="../types/index.d.ts" />
|
|
22
|
+
|
|
23
|
+
// (c) 2020-present Andrea Giammarchi
|
|
24
|
+
|
|
25
|
+
const {parse: $parse, stringify: $stringify} = JSON;
|
|
26
|
+
const {keys} = Object;
|
|
27
|
+
|
|
28
|
+
const Primitive = String; // it could be Number
|
|
29
|
+
const primitive = 'string'; // it could be 'number'
|
|
30
|
+
|
|
31
|
+
const ignore$1 = {};
|
|
32
|
+
const object = 'object';
|
|
33
|
+
|
|
34
|
+
const noop = (_, value) => value;
|
|
35
|
+
|
|
36
|
+
const primitives = value => (
|
|
37
|
+
value instanceof Primitive ? Primitive(value) : value
|
|
38
|
+
);
|
|
39
|
+
|
|
40
|
+
const Primitives = (_, value) => (
|
|
41
|
+
typeof value === primitive ? new Primitive(value) : value
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
const revive = (input, parsed, output, $) => {
|
|
45
|
+
const lazy = [];
|
|
46
|
+
for (let ke = keys(output), {length} = ke, y = 0; y < length; y++) {
|
|
47
|
+
const k = ke[y];
|
|
48
|
+
const value = output[k];
|
|
49
|
+
if (value instanceof Primitive) {
|
|
50
|
+
const tmp = input[value];
|
|
51
|
+
if (typeof tmp === object && !parsed.has(tmp)) {
|
|
52
|
+
parsed.add(tmp);
|
|
53
|
+
output[k] = ignore$1;
|
|
54
|
+
lazy.push({k, a: [input, parsed, tmp, $]});
|
|
55
|
+
}
|
|
56
|
+
else
|
|
57
|
+
output[k] = $.call(output, k, tmp);
|
|
58
|
+
}
|
|
59
|
+
else if (output[k] !== ignore$1)
|
|
60
|
+
output[k] = $.call(output, k, value);
|
|
61
|
+
}
|
|
62
|
+
for (let {length} = lazy, i = 0; i < length; i++) {
|
|
63
|
+
const {k, a} = lazy[i];
|
|
64
|
+
output[k] = $.call(output, k, revive.apply(null, a));
|
|
65
|
+
}
|
|
66
|
+
return output;
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
const set = (known, input, value) => {
|
|
70
|
+
const index = Primitive(input.push(value) - 1);
|
|
71
|
+
known.set(value, index);
|
|
72
|
+
return index;
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Converts a specialized flatted string into a JS value.
|
|
77
|
+
* @param {string} text
|
|
78
|
+
* @param {(this: any, key: string, value: any) => any} [reviver]
|
|
79
|
+
* @returns {any}
|
|
80
|
+
*/
|
|
81
|
+
const parse$1 = (text, reviver) => {
|
|
82
|
+
const input = $parse(text, Primitives).map(primitives);
|
|
83
|
+
const value = input[0];
|
|
84
|
+
const $ = reviver || noop;
|
|
85
|
+
const tmp = typeof value === object && value ?
|
|
86
|
+
revive(input, new Set, value, $) :
|
|
87
|
+
value;
|
|
88
|
+
return $.call({'': tmp}, '', tmp);
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Converts a JS value into a specialized flatted string.
|
|
93
|
+
* @param {any} value
|
|
94
|
+
* @param {((this: any, key: string, value: any) => any) | (string | number)[] | null | undefined} [replacer]
|
|
95
|
+
* @param {string | number | undefined} [space]
|
|
96
|
+
* @returns {string}
|
|
97
|
+
*/
|
|
98
|
+
const stringify = (value, replacer, space) => {
|
|
99
|
+
const $ = replacer && typeof replacer === object ?
|
|
100
|
+
(k, v) => (k === '' || -1 < replacer.indexOf(k) ? v : void 0) :
|
|
101
|
+
(replacer || noop);
|
|
102
|
+
const known = new Map;
|
|
103
|
+
const input = [];
|
|
104
|
+
const output = [];
|
|
105
|
+
let i = +set(known, input, $.call({'': value}, '', value));
|
|
106
|
+
let firstRun = !i;
|
|
107
|
+
while (i < input.length) {
|
|
108
|
+
firstRun = true;
|
|
109
|
+
output[i] = $stringify(input[i++], replace, space);
|
|
110
|
+
}
|
|
111
|
+
return '[' + output.join(',') + ']';
|
|
112
|
+
function replace(key, value) {
|
|
113
|
+
if (firstRun) {
|
|
114
|
+
firstRun = !firstRun;
|
|
115
|
+
return value;
|
|
116
|
+
}
|
|
117
|
+
const after = $.call(this, key, value);
|
|
118
|
+
switch (typeof after) {
|
|
119
|
+
case object:
|
|
120
|
+
if (after === null) return after;
|
|
121
|
+
case primitive:
|
|
122
|
+
return known.get(after) || set(known, input, after);
|
|
123
|
+
}
|
|
124
|
+
return after;
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
function getOutputFile(config, reporter) {
|
|
129
|
+
if (config?.outputFile) return typeof config.outputFile === "string" ? config.outputFile : config.outputFile[reporter];
|
|
130
|
+
}
|
|
131
|
+
function createDefinesScript(define) {
|
|
132
|
+
return !define || serializeDefine(define) === "{}" ? "" : `
|
|
133
|
+
const defines = ${serializeDefine(define)}
|
|
134
|
+
Object.keys(defines).forEach((key) => {
|
|
135
|
+
const segments = key.split('.')
|
|
136
|
+
let target = globalThis
|
|
137
|
+
for (let i = 0; i < segments.length; i++) {
|
|
138
|
+
const segment = segments[i]
|
|
139
|
+
if (i === segments.length - 1) {
|
|
140
|
+
target[segment] = defines[key]
|
|
141
|
+
} else {
|
|
142
|
+
target = target[segment] || (target[segment] = {})
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
})
|
|
146
|
+
`;
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Like `JSON.stringify` but keeps raw string values as a literal
|
|
150
|
+
* in the generated code. For example: `"window"` would refer to
|
|
151
|
+
* the global `window` object directly.
|
|
152
|
+
*/
|
|
153
|
+
function serializeDefine(define) {
|
|
154
|
+
const userDefine = {};
|
|
155
|
+
for (const key in define) {
|
|
156
|
+
// vitest sets this to avoid vite:client-inject plugin
|
|
157
|
+
if (key === "process.env.NODE_ENV" && define[key] === "process.env.NODE_ENV") continue;
|
|
158
|
+
// import.meta.env.* is handled in `importAnalysis` plugin
|
|
159
|
+
if (!key.startsWith("import.meta.env.")) userDefine[key] = define[key];
|
|
160
|
+
}
|
|
161
|
+
let res = `{`;
|
|
162
|
+
const keys = Object.keys(userDefine).sort();
|
|
163
|
+
for (let i = 0; i < keys.length; i++) {
|
|
164
|
+
const key = keys[i], val = userDefine[key];
|
|
165
|
+
if (res += `${JSON.stringify(key)}: ${handleDefineValue(val)}`, i !== keys.length - 1) res += `, `;
|
|
166
|
+
}
|
|
167
|
+
return `${res}}`;
|
|
168
|
+
}
|
|
169
|
+
function handleDefineValue(value) {
|
|
170
|
+
return typeof value === "undefined" ? "undefined" : typeof value === "string" ? value : JSON.stringify(value);
|
|
171
|
+
}
|
|
18
172
|
|
|
19
173
|
class BlobReporter {
|
|
20
174
|
start = 0;
|
|
@@ -53,7 +207,7 @@ class BlobReporter {
|
|
|
53
207
|
modules,
|
|
54
208
|
coverage,
|
|
55
209
|
executionTime
|
|
56
|
-
], reportFile = resolve(this.ctx.config.root, outputFile);
|
|
210
|
+
], reportFile = resolve$1(this.ctx.config.root, outputFile);
|
|
57
211
|
await writeBlob(report, reportFile), this.ctx.logger.log("blob report written to", reportFile);
|
|
58
212
|
}
|
|
59
213
|
}
|
|
@@ -64,10 +218,10 @@ async function writeBlob(content, filename) {
|
|
|
64
218
|
}
|
|
65
219
|
async function readBlobs(currentVersion, blobsDirectory, projectsArray) {
|
|
66
220
|
// using process.cwd() because --merge-reports can only be used in CLI
|
|
67
|
-
const resolvedDir = resolve(process.cwd(), blobsDirectory), promises = (await readdir(resolvedDir)).map(async (filename) => {
|
|
68
|
-
const fullPath = resolve(resolvedDir, filename);
|
|
221
|
+
const resolvedDir = resolve$1(process.cwd(), blobsDirectory), promises = (await readdir(resolvedDir)).map(async (filename) => {
|
|
222
|
+
const fullPath = resolve$1(resolvedDir, filename);
|
|
69
223
|
if (!(await stat(fullPath)).isFile()) throw new TypeError(`vitest.mergeReports() expects all paths in "${blobsDirectory}" to be files generated by the blob reporter, but "${filename}" is not a file`);
|
|
70
|
-
const content = await readFile(fullPath, "utf-8"), [version, files, errors, moduleKeys, coverage, executionTime] = parse(content);
|
|
224
|
+
const content = await readFile(fullPath, "utf-8"), [version, files, errors, moduleKeys, coverage, executionTime] = parse$1(content);
|
|
71
225
|
if (!version) throw new TypeError(`vitest.mergeReports() expects all paths in "${blobsDirectory}" to be files generated by the blob reporter, but "${filename}" is not a valid blob file`);
|
|
72
226
|
return {
|
|
73
227
|
version,
|
|
@@ -109,6 +263,55 @@ async function readBlobs(currentVersion, blobsDirectory, projectsArray) {
|
|
|
109
263
|
};
|
|
110
264
|
}
|
|
111
265
|
|
|
266
|
+
function hasFailedSnapshot(suite) {
|
|
267
|
+
return getTests(suite).some((s) => {
|
|
268
|
+
return s.result?.errors?.some((e) => typeof e?.message === "string" && e.message.match(/Snapshot .* mismatched/));
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
function convertTasksToEvents(file, onTask) {
|
|
272
|
+
const packs = [], events = [];
|
|
273
|
+
function visit(suite) {
|
|
274
|
+
onTask?.(suite), packs.push([
|
|
275
|
+
suite.id,
|
|
276
|
+
suite.result,
|
|
277
|
+
suite.meta
|
|
278
|
+
]), events.push([
|
|
279
|
+
suite.id,
|
|
280
|
+
"suite-prepare",
|
|
281
|
+
void 0
|
|
282
|
+
]), suite.tasks.forEach((task) => {
|
|
283
|
+
if (task.type === "suite") visit(task);
|
|
284
|
+
else if (onTask?.(task), suite.mode !== "skip" && suite.mode !== "todo") packs.push([
|
|
285
|
+
task.id,
|
|
286
|
+
task.result,
|
|
287
|
+
task.meta
|
|
288
|
+
]), events.push([
|
|
289
|
+
task.id,
|
|
290
|
+
"test-prepare",
|
|
291
|
+
void 0
|
|
292
|
+
]), task.annotations.forEach((annotation) => {
|
|
293
|
+
events.push([
|
|
294
|
+
task.id,
|
|
295
|
+
"test-annotation",
|
|
296
|
+
{ annotation }
|
|
297
|
+
]);
|
|
298
|
+
}), events.push([
|
|
299
|
+
task.id,
|
|
300
|
+
"test-finished",
|
|
301
|
+
void 0
|
|
302
|
+
]);
|
|
303
|
+
}), events.push([
|
|
304
|
+
suite.id,
|
|
305
|
+
"suite-finished",
|
|
306
|
+
void 0
|
|
307
|
+
]);
|
|
308
|
+
}
|
|
309
|
+
return visit(file), {
|
|
310
|
+
packs,
|
|
311
|
+
events
|
|
312
|
+
};
|
|
313
|
+
}
|
|
314
|
+
|
|
112
315
|
const F_RIGHT = "→";
|
|
113
316
|
const F_DOWN = "↓";
|
|
114
317
|
const F_DOWN_RIGHT = "↳";
|
|
@@ -1006,6 +1209,1298 @@ function formatTests(states) {
|
|
|
1006
1209
|
return output += currentIcon.color(currentIcon.char.repeat(count)), output;
|
|
1007
1210
|
}
|
|
1008
1211
|
|
|
1212
|
+
// src/vlq.ts
|
|
1213
|
+
var comma = ",".charCodeAt(0);
|
|
1214
|
+
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
|
1215
|
+
var intToChar = new Uint8Array(64);
|
|
1216
|
+
var charToInt = new Uint8Array(128);
|
|
1217
|
+
for (let i = 0; i < chars.length; i++) {
|
|
1218
|
+
const c = chars.charCodeAt(i);
|
|
1219
|
+
intToChar[i] = c;
|
|
1220
|
+
charToInt[c] = i;
|
|
1221
|
+
}
|
|
1222
|
+
function decodeInteger(reader, relative) {
|
|
1223
|
+
let value = 0;
|
|
1224
|
+
let shift = 0;
|
|
1225
|
+
let integer = 0;
|
|
1226
|
+
do {
|
|
1227
|
+
const c = reader.next();
|
|
1228
|
+
integer = charToInt[c];
|
|
1229
|
+
value |= (integer & 31) << shift;
|
|
1230
|
+
shift += 5;
|
|
1231
|
+
} while (integer & 32);
|
|
1232
|
+
const shouldNegate = value & 1;
|
|
1233
|
+
value >>>= 1;
|
|
1234
|
+
if (shouldNegate) {
|
|
1235
|
+
value = -2147483648 | -value;
|
|
1236
|
+
}
|
|
1237
|
+
return relative + value;
|
|
1238
|
+
}
|
|
1239
|
+
function hasMoreVlq(reader, max) {
|
|
1240
|
+
if (reader.pos >= max) return false;
|
|
1241
|
+
return reader.peek() !== comma;
|
|
1242
|
+
}
|
|
1243
|
+
var StringReader = class {
|
|
1244
|
+
constructor(buffer) {
|
|
1245
|
+
this.pos = 0;
|
|
1246
|
+
this.buffer = buffer;
|
|
1247
|
+
}
|
|
1248
|
+
next() {
|
|
1249
|
+
return this.buffer.charCodeAt(this.pos++);
|
|
1250
|
+
}
|
|
1251
|
+
peek() {
|
|
1252
|
+
return this.buffer.charCodeAt(this.pos);
|
|
1253
|
+
}
|
|
1254
|
+
indexOf(char) {
|
|
1255
|
+
const { buffer, pos } = this;
|
|
1256
|
+
const idx = buffer.indexOf(char, pos);
|
|
1257
|
+
return idx === -1 ? buffer.length : idx;
|
|
1258
|
+
}
|
|
1259
|
+
};
|
|
1260
|
+
|
|
1261
|
+
// src/sourcemap-codec.ts
|
|
1262
|
+
function decode(mappings) {
|
|
1263
|
+
const { length } = mappings;
|
|
1264
|
+
const reader = new StringReader(mappings);
|
|
1265
|
+
const decoded = [];
|
|
1266
|
+
let genColumn = 0;
|
|
1267
|
+
let sourcesIndex = 0;
|
|
1268
|
+
let sourceLine = 0;
|
|
1269
|
+
let sourceColumn = 0;
|
|
1270
|
+
let namesIndex = 0;
|
|
1271
|
+
do {
|
|
1272
|
+
const semi = reader.indexOf(";");
|
|
1273
|
+
const line = [];
|
|
1274
|
+
let sorted = true;
|
|
1275
|
+
let lastCol = 0;
|
|
1276
|
+
genColumn = 0;
|
|
1277
|
+
while (reader.pos < semi) {
|
|
1278
|
+
let seg;
|
|
1279
|
+
genColumn = decodeInteger(reader, genColumn);
|
|
1280
|
+
if (genColumn < lastCol) sorted = false;
|
|
1281
|
+
lastCol = genColumn;
|
|
1282
|
+
if (hasMoreVlq(reader, semi)) {
|
|
1283
|
+
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
|
1284
|
+
sourceLine = decodeInteger(reader, sourceLine);
|
|
1285
|
+
sourceColumn = decodeInteger(reader, sourceColumn);
|
|
1286
|
+
if (hasMoreVlq(reader, semi)) {
|
|
1287
|
+
namesIndex = decodeInteger(reader, namesIndex);
|
|
1288
|
+
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
|
1289
|
+
} else {
|
|
1290
|
+
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
|
1291
|
+
}
|
|
1292
|
+
} else {
|
|
1293
|
+
seg = [genColumn];
|
|
1294
|
+
}
|
|
1295
|
+
line.push(seg);
|
|
1296
|
+
reader.pos++;
|
|
1297
|
+
}
|
|
1298
|
+
if (!sorted) sort(line);
|
|
1299
|
+
decoded.push(line);
|
|
1300
|
+
reader.pos = semi + 1;
|
|
1301
|
+
} while (reader.pos <= length);
|
|
1302
|
+
return decoded;
|
|
1303
|
+
}
|
|
1304
|
+
function sort(line) {
|
|
1305
|
+
line.sort(sortComparator$1);
|
|
1306
|
+
}
|
|
1307
|
+
function sortComparator$1(a, b) {
|
|
1308
|
+
return a[0] - b[0];
|
|
1309
|
+
}
|
|
1310
|
+
|
|
1311
|
+
// Matches the scheme of a URL, eg "http://"
|
|
1312
|
+
const schemeRegex = /^[\w+.-]+:\/\//;
|
|
1313
|
+
/**
|
|
1314
|
+
* Matches the parts of a URL:
|
|
1315
|
+
* 1. Scheme, including ":", guaranteed.
|
|
1316
|
+
* 2. User/password, including "@", optional.
|
|
1317
|
+
* 3. Host, guaranteed.
|
|
1318
|
+
* 4. Port, including ":", optional.
|
|
1319
|
+
* 5. Path, including "/", optional.
|
|
1320
|
+
* 6. Query, including "?", optional.
|
|
1321
|
+
* 7. Hash, including "#", optional.
|
|
1322
|
+
*/
|
|
1323
|
+
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
|
1324
|
+
/**
|
|
1325
|
+
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
|
1326
|
+
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
|
1327
|
+
*
|
|
1328
|
+
* 1. Host, optional.
|
|
1329
|
+
* 2. Path, which may include "/", guaranteed.
|
|
1330
|
+
* 3. Query, including "?", optional.
|
|
1331
|
+
* 4. Hash, including "#", optional.
|
|
1332
|
+
*/
|
|
1333
|
+
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
|
1334
|
+
function isAbsoluteUrl(input) {
|
|
1335
|
+
return schemeRegex.test(input);
|
|
1336
|
+
}
|
|
1337
|
+
function isSchemeRelativeUrl(input) {
|
|
1338
|
+
return input.startsWith('//');
|
|
1339
|
+
}
|
|
1340
|
+
function isAbsolutePath(input) {
|
|
1341
|
+
return input.startsWith('/');
|
|
1342
|
+
}
|
|
1343
|
+
function isFileUrl(input) {
|
|
1344
|
+
return input.startsWith('file:');
|
|
1345
|
+
}
|
|
1346
|
+
function isRelative(input) {
|
|
1347
|
+
return /^[.?#]/.test(input);
|
|
1348
|
+
}
|
|
1349
|
+
function parseAbsoluteUrl(input) {
|
|
1350
|
+
const match = urlRegex.exec(input);
|
|
1351
|
+
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
|
1352
|
+
}
|
|
1353
|
+
function parseFileUrl(input) {
|
|
1354
|
+
const match = fileRegex.exec(input);
|
|
1355
|
+
const path = match[2];
|
|
1356
|
+
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
|
1357
|
+
}
|
|
1358
|
+
function makeUrl(scheme, user, host, port, path, query, hash) {
|
|
1359
|
+
return {
|
|
1360
|
+
scheme,
|
|
1361
|
+
user,
|
|
1362
|
+
host,
|
|
1363
|
+
port,
|
|
1364
|
+
path,
|
|
1365
|
+
query,
|
|
1366
|
+
hash,
|
|
1367
|
+
type: 7 /* Absolute */,
|
|
1368
|
+
};
|
|
1369
|
+
}
|
|
1370
|
+
function parseUrl(input) {
|
|
1371
|
+
if (isSchemeRelativeUrl(input)) {
|
|
1372
|
+
const url = parseAbsoluteUrl('http:' + input);
|
|
1373
|
+
url.scheme = '';
|
|
1374
|
+
url.type = 6 /* SchemeRelative */;
|
|
1375
|
+
return url;
|
|
1376
|
+
}
|
|
1377
|
+
if (isAbsolutePath(input)) {
|
|
1378
|
+
const url = parseAbsoluteUrl('http://foo.com' + input);
|
|
1379
|
+
url.scheme = '';
|
|
1380
|
+
url.host = '';
|
|
1381
|
+
url.type = 5 /* AbsolutePath */;
|
|
1382
|
+
return url;
|
|
1383
|
+
}
|
|
1384
|
+
if (isFileUrl(input))
|
|
1385
|
+
return parseFileUrl(input);
|
|
1386
|
+
if (isAbsoluteUrl(input))
|
|
1387
|
+
return parseAbsoluteUrl(input);
|
|
1388
|
+
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
|
1389
|
+
url.scheme = '';
|
|
1390
|
+
url.host = '';
|
|
1391
|
+
url.type = input
|
|
1392
|
+
? input.startsWith('?')
|
|
1393
|
+
? 3 /* Query */
|
|
1394
|
+
: input.startsWith('#')
|
|
1395
|
+
? 2 /* Hash */
|
|
1396
|
+
: 4 /* RelativePath */
|
|
1397
|
+
: 1 /* Empty */;
|
|
1398
|
+
return url;
|
|
1399
|
+
}
|
|
1400
|
+
function stripPathFilename(path) {
|
|
1401
|
+
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|
1402
|
+
// paths. It's not a file, so we can't strip it.
|
|
1403
|
+
if (path.endsWith('/..'))
|
|
1404
|
+
return path;
|
|
1405
|
+
const index = path.lastIndexOf('/');
|
|
1406
|
+
return path.slice(0, index + 1);
|
|
1407
|
+
}
|
|
1408
|
+
function mergePaths(url, base) {
|
|
1409
|
+
normalizePath(base, base.type);
|
|
1410
|
+
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|
1411
|
+
// path).
|
|
1412
|
+
if (url.path === '/') {
|
|
1413
|
+
url.path = base.path;
|
|
1414
|
+
}
|
|
1415
|
+
else {
|
|
1416
|
+
// Resolution happens relative to the base path's directory, not the file.
|
|
1417
|
+
url.path = stripPathFilename(base.path) + url.path;
|
|
1418
|
+
}
|
|
1419
|
+
}
|
|
1420
|
+
/**
|
|
1421
|
+
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
|
1422
|
+
* "foo/.". We need to normalize to a standard representation.
|
|
1423
|
+
*/
|
|
1424
|
+
function normalizePath(url, type) {
|
|
1425
|
+
const rel = type <= 4 /* RelativePath */;
|
|
1426
|
+
const pieces = url.path.split('/');
|
|
1427
|
+
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|
1428
|
+
// pieces[0] is an empty string.
|
|
1429
|
+
let pointer = 1;
|
|
1430
|
+
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|
1431
|
+
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|
1432
|
+
let positive = 0;
|
|
1433
|
+
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|
1434
|
+
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|
1435
|
+
// real directory, we won't need to append, unless the other conditions happen again.
|
|
1436
|
+
let addTrailingSlash = false;
|
|
1437
|
+
for (let i = 1; i < pieces.length; i++) {
|
|
1438
|
+
const piece = pieces[i];
|
|
1439
|
+
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|
1440
|
+
if (!piece) {
|
|
1441
|
+
addTrailingSlash = true;
|
|
1442
|
+
continue;
|
|
1443
|
+
}
|
|
1444
|
+
// If we encounter a real directory, then we don't need to append anymore.
|
|
1445
|
+
addTrailingSlash = false;
|
|
1446
|
+
// A current directory, which we can always drop.
|
|
1447
|
+
if (piece === '.')
|
|
1448
|
+
continue;
|
|
1449
|
+
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|
1450
|
+
// have an excess of parents, and we'll need to keep the "..".
|
|
1451
|
+
if (piece === '..') {
|
|
1452
|
+
if (positive) {
|
|
1453
|
+
addTrailingSlash = true;
|
|
1454
|
+
positive--;
|
|
1455
|
+
pointer--;
|
|
1456
|
+
}
|
|
1457
|
+
else if (rel) {
|
|
1458
|
+
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|
1459
|
+
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|
1460
|
+
pieces[pointer++] = piece;
|
|
1461
|
+
}
|
|
1462
|
+
continue;
|
|
1463
|
+
}
|
|
1464
|
+
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|
1465
|
+
// any popped or dropped directories.
|
|
1466
|
+
pieces[pointer++] = piece;
|
|
1467
|
+
positive++;
|
|
1468
|
+
}
|
|
1469
|
+
let path = '';
|
|
1470
|
+
for (let i = 1; i < pointer; i++) {
|
|
1471
|
+
path += '/' + pieces[i];
|
|
1472
|
+
}
|
|
1473
|
+
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
|
1474
|
+
path += '/';
|
|
1475
|
+
}
|
|
1476
|
+
url.path = path;
|
|
1477
|
+
}
|
|
1478
|
+
/**
|
|
1479
|
+
* Attempts to resolve `input` URL/path relative to `base`.
|
|
1480
|
+
*/
|
|
1481
|
+
function resolve(input, base) {
|
|
1482
|
+
if (!input && !base)
|
|
1483
|
+
return '';
|
|
1484
|
+
const url = parseUrl(input);
|
|
1485
|
+
let inputType = url.type;
|
|
1486
|
+
if (base && inputType !== 7 /* Absolute */) {
|
|
1487
|
+
const baseUrl = parseUrl(base);
|
|
1488
|
+
const baseType = baseUrl.type;
|
|
1489
|
+
switch (inputType) {
|
|
1490
|
+
case 1 /* Empty */:
|
|
1491
|
+
url.hash = baseUrl.hash;
|
|
1492
|
+
// fall through
|
|
1493
|
+
case 2 /* Hash */:
|
|
1494
|
+
url.query = baseUrl.query;
|
|
1495
|
+
// fall through
|
|
1496
|
+
case 3 /* Query */:
|
|
1497
|
+
case 4 /* RelativePath */:
|
|
1498
|
+
mergePaths(url, baseUrl);
|
|
1499
|
+
// fall through
|
|
1500
|
+
case 5 /* AbsolutePath */:
|
|
1501
|
+
// The host, user, and port are joined, you can't copy one without the others.
|
|
1502
|
+
url.user = baseUrl.user;
|
|
1503
|
+
url.host = baseUrl.host;
|
|
1504
|
+
url.port = baseUrl.port;
|
|
1505
|
+
// fall through
|
|
1506
|
+
case 6 /* SchemeRelative */:
|
|
1507
|
+
// The input doesn't have a schema at least, so we need to copy at least that over.
|
|
1508
|
+
url.scheme = baseUrl.scheme;
|
|
1509
|
+
}
|
|
1510
|
+
if (baseType > inputType)
|
|
1511
|
+
inputType = baseType;
|
|
1512
|
+
}
|
|
1513
|
+
normalizePath(url, inputType);
|
|
1514
|
+
const queryHash = url.query + url.hash;
|
|
1515
|
+
switch (inputType) {
|
|
1516
|
+
// This is impossible, because of the empty checks at the start of the function.
|
|
1517
|
+
// case UrlType.Empty:
|
|
1518
|
+
case 2 /* Hash */:
|
|
1519
|
+
case 3 /* Query */:
|
|
1520
|
+
return queryHash;
|
|
1521
|
+
case 4 /* RelativePath */: {
|
|
1522
|
+
// The first char is always a "/", and we need it to be relative.
|
|
1523
|
+
const path = url.path.slice(1);
|
|
1524
|
+
if (!path)
|
|
1525
|
+
return queryHash || '.';
|
|
1526
|
+
if (isRelative(base || input) && !isRelative(path)) {
|
|
1527
|
+
// If base started with a leading ".", or there is no base and input started with a ".",
|
|
1528
|
+
// then we need to ensure that the relative path starts with a ".". We don't know if
|
|
1529
|
+
// relative starts with a "..", though, so check before prepending.
|
|
1530
|
+
return './' + path + queryHash;
|
|
1531
|
+
}
|
|
1532
|
+
return path + queryHash;
|
|
1533
|
+
}
|
|
1534
|
+
case 5 /* AbsolutePath */:
|
|
1535
|
+
return url.path + queryHash;
|
|
1536
|
+
default:
|
|
1537
|
+
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
|
1538
|
+
}
|
|
1539
|
+
}
|
|
1540
|
+
|
|
1541
|
+
// src/trace-mapping.ts
|
|
1542
|
+
|
|
1543
|
+
// src/strip-filename.ts
|
|
1544
|
+
function stripFilename(path) {
|
|
1545
|
+
if (!path) return "";
|
|
1546
|
+
const index = path.lastIndexOf("/");
|
|
1547
|
+
return path.slice(0, index + 1);
|
|
1548
|
+
}
|
|
1549
|
+
|
|
1550
|
+
// src/resolve.ts
|
|
1551
|
+
function resolver(mapUrl, sourceRoot) {
|
|
1552
|
+
const from = stripFilename(mapUrl);
|
|
1553
|
+
const prefix = sourceRoot ? sourceRoot + "/" : "";
|
|
1554
|
+
return (source) => resolve(prefix + (source || ""), from);
|
|
1555
|
+
}
|
|
1556
|
+
|
|
1557
|
+
// src/sourcemap-segment.ts
|
|
1558
|
+
var COLUMN = 0;
|
|
1559
|
+
var SOURCES_INDEX = 1;
|
|
1560
|
+
var SOURCE_LINE = 2;
|
|
1561
|
+
var SOURCE_COLUMN = 3;
|
|
1562
|
+
var NAMES_INDEX = 4;
|
|
1563
|
+
var REV_GENERATED_LINE = 1;
|
|
1564
|
+
var REV_GENERATED_COLUMN = 2;
|
|
1565
|
+
|
|
1566
|
+
// src/sort.ts
|
|
1567
|
+
function maybeSort(mappings, owned) {
|
|
1568
|
+
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
|
1569
|
+
if (unsortedIndex === mappings.length) return mappings;
|
|
1570
|
+
if (!owned) mappings = mappings.slice();
|
|
1571
|
+
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
|
1572
|
+
mappings[i] = sortSegments(mappings[i], owned);
|
|
1573
|
+
}
|
|
1574
|
+
return mappings;
|
|
1575
|
+
}
|
|
1576
|
+
function nextUnsortedSegmentLine(mappings, start) {
|
|
1577
|
+
for (let i = start; i < mappings.length; i++) {
|
|
1578
|
+
if (!isSorted(mappings[i])) return i;
|
|
1579
|
+
}
|
|
1580
|
+
return mappings.length;
|
|
1581
|
+
}
|
|
1582
|
+
function isSorted(line) {
|
|
1583
|
+
for (let j = 1; j < line.length; j++) {
|
|
1584
|
+
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
|
1585
|
+
return false;
|
|
1586
|
+
}
|
|
1587
|
+
}
|
|
1588
|
+
return true;
|
|
1589
|
+
}
|
|
1590
|
+
function sortSegments(line, owned) {
|
|
1591
|
+
if (!owned) line = line.slice();
|
|
1592
|
+
return line.sort(sortComparator);
|
|
1593
|
+
}
|
|
1594
|
+
function sortComparator(a, b) {
|
|
1595
|
+
return a[COLUMN] - b[COLUMN];
|
|
1596
|
+
}
|
|
1597
|
+
|
|
1598
|
+
// src/by-source.ts
|
|
1599
|
+
function buildBySources(decoded, memos) {
|
|
1600
|
+
const sources = memos.map(() => []);
|
|
1601
|
+
for (let i = 0; i < decoded.length; i++) {
|
|
1602
|
+
const line = decoded[i];
|
|
1603
|
+
for (let j = 0; j < line.length; j++) {
|
|
1604
|
+
const seg = line[j];
|
|
1605
|
+
if (seg.length === 1) continue;
|
|
1606
|
+
const sourceIndex2 = seg[SOURCES_INDEX];
|
|
1607
|
+
const sourceLine = seg[SOURCE_LINE];
|
|
1608
|
+
const sourceColumn = seg[SOURCE_COLUMN];
|
|
1609
|
+
const source = sources[sourceIndex2];
|
|
1610
|
+
const segs = source[sourceLine] || (source[sourceLine] = []);
|
|
1611
|
+
segs.push([sourceColumn, i, seg[COLUMN]]);
|
|
1612
|
+
}
|
|
1613
|
+
}
|
|
1614
|
+
for (let i = 0; i < sources.length; i++) {
|
|
1615
|
+
const source = sources[i];
|
|
1616
|
+
for (let j = 0; j < source.length; j++) {
|
|
1617
|
+
const line = source[j];
|
|
1618
|
+
if (line) line.sort(sortComparator);
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
return sources;
|
|
1622
|
+
}
|
|
1623
|
+
|
|
1624
|
+
// src/binary-search.ts
|
|
1625
|
+
var found = false;
|
|
1626
|
+
function binarySearch(haystack, needle, low, high) {
|
|
1627
|
+
while (low <= high) {
|
|
1628
|
+
const mid = low + (high - low >> 1);
|
|
1629
|
+
const cmp = haystack[mid][COLUMN] - needle;
|
|
1630
|
+
if (cmp === 0) {
|
|
1631
|
+
found = true;
|
|
1632
|
+
return mid;
|
|
1633
|
+
}
|
|
1634
|
+
if (cmp < 0) {
|
|
1635
|
+
low = mid + 1;
|
|
1636
|
+
} else {
|
|
1637
|
+
high = mid - 1;
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
found = false;
|
|
1641
|
+
return low - 1;
|
|
1642
|
+
}
|
|
1643
|
+
function upperBound(haystack, needle, index) {
|
|
1644
|
+
for (let i = index + 1; i < haystack.length; index = i++) {
|
|
1645
|
+
if (haystack[i][COLUMN] !== needle) break;
|
|
1646
|
+
}
|
|
1647
|
+
return index;
|
|
1648
|
+
}
|
|
1649
|
+
function lowerBound(haystack, needle, index) {
|
|
1650
|
+
for (let i = index - 1; i >= 0; index = i--) {
|
|
1651
|
+
if (haystack[i][COLUMN] !== needle) break;
|
|
1652
|
+
}
|
|
1653
|
+
return index;
|
|
1654
|
+
}
|
|
1655
|
+
function memoizedState() {
|
|
1656
|
+
return {
|
|
1657
|
+
lastKey: -1,
|
|
1658
|
+
lastNeedle: -1,
|
|
1659
|
+
lastIndex: -1
|
|
1660
|
+
};
|
|
1661
|
+
}
|
|
1662
|
+
function memoizedBinarySearch(haystack, needle, state, key) {
|
|
1663
|
+
const { lastKey, lastNeedle, lastIndex } = state;
|
|
1664
|
+
let low = 0;
|
|
1665
|
+
let high = haystack.length - 1;
|
|
1666
|
+
if (key === lastKey) {
|
|
1667
|
+
if (needle === lastNeedle) {
|
|
1668
|
+
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
|
1669
|
+
return lastIndex;
|
|
1670
|
+
}
|
|
1671
|
+
if (needle >= lastNeedle) {
|
|
1672
|
+
low = lastIndex === -1 ? 0 : lastIndex;
|
|
1673
|
+
} else {
|
|
1674
|
+
high = lastIndex;
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
state.lastKey = key;
|
|
1678
|
+
state.lastNeedle = needle;
|
|
1679
|
+
return state.lastIndex = binarySearch(haystack, needle, low, high);
|
|
1680
|
+
}
|
|
1681
|
+
|
|
1682
|
+
// src/types.ts
|
|
1683
|
+
function parse(map) {
|
|
1684
|
+
return typeof map === "string" ? JSON.parse(map) : map;
|
|
1685
|
+
}
|
|
1686
|
+
|
|
1687
|
+
// src/trace-mapping.ts
|
|
1688
|
+
var LINE_GTR_ZERO = "`line` must be greater than 0 (lines start at line 1)";
|
|
1689
|
+
var COL_GTR_EQ_ZERO = "`column` must be greater than or equal to 0 (columns start at column 0)";
|
|
1690
|
+
var LEAST_UPPER_BOUND = -1;
|
|
1691
|
+
var GREATEST_LOWER_BOUND = 1;
|
|
1692
|
+
var TraceMap = class {
|
|
1693
|
+
constructor(map, mapUrl) {
|
|
1694
|
+
const isString = typeof map === "string";
|
|
1695
|
+
if (!isString && map._decodedMemo) return map;
|
|
1696
|
+
const parsed = parse(map);
|
|
1697
|
+
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
|
1698
|
+
this.version = version;
|
|
1699
|
+
this.file = file;
|
|
1700
|
+
this.names = names || [];
|
|
1701
|
+
this.sourceRoot = sourceRoot;
|
|
1702
|
+
this.sources = sources;
|
|
1703
|
+
this.sourcesContent = sourcesContent;
|
|
1704
|
+
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || void 0;
|
|
1705
|
+
const resolve = resolver(mapUrl, sourceRoot);
|
|
1706
|
+
this.resolvedSources = sources.map(resolve);
|
|
1707
|
+
const { mappings } = parsed;
|
|
1708
|
+
if (typeof mappings === "string") {
|
|
1709
|
+
this._encoded = mappings;
|
|
1710
|
+
this._decoded = void 0;
|
|
1711
|
+
} else if (Array.isArray(mappings)) {
|
|
1712
|
+
this._encoded = void 0;
|
|
1713
|
+
this._decoded = maybeSort(mappings, isString);
|
|
1714
|
+
} else if (parsed.sections) {
|
|
1715
|
+
throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
|
|
1716
|
+
} else {
|
|
1717
|
+
throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
|
|
1718
|
+
}
|
|
1719
|
+
this._decodedMemo = memoizedState();
|
|
1720
|
+
this._bySources = void 0;
|
|
1721
|
+
this._bySourceMemos = void 0;
|
|
1722
|
+
}
|
|
1723
|
+
};
|
|
1724
|
+
function cast(map) {
|
|
1725
|
+
return map;
|
|
1726
|
+
}
|
|
1727
|
+
function decodedMappings(map) {
|
|
1728
|
+
var _a;
|
|
1729
|
+
return (_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded));
|
|
1730
|
+
}
|
|
1731
|
+
function originalPositionFor(map, needle) {
|
|
1732
|
+
let { line, column, bias } = needle;
|
|
1733
|
+
line--;
|
|
1734
|
+
if (line < 0) throw new Error(LINE_GTR_ZERO);
|
|
1735
|
+
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
|
|
1736
|
+
const decoded = decodedMappings(map);
|
|
1737
|
+
if (line >= decoded.length) return OMapping(null, null, null, null);
|
|
1738
|
+
const segments = decoded[line];
|
|
1739
|
+
const index = traceSegmentInternal(
|
|
1740
|
+
segments,
|
|
1741
|
+
cast(map)._decodedMemo,
|
|
1742
|
+
line,
|
|
1743
|
+
column,
|
|
1744
|
+
bias || GREATEST_LOWER_BOUND
|
|
1745
|
+
);
|
|
1746
|
+
if (index === -1) return OMapping(null, null, null, null);
|
|
1747
|
+
const segment = segments[index];
|
|
1748
|
+
if (segment.length === 1) return OMapping(null, null, null, null);
|
|
1749
|
+
const { names, resolvedSources } = map;
|
|
1750
|
+
return OMapping(
|
|
1751
|
+
resolvedSources[segment[SOURCES_INDEX]],
|
|
1752
|
+
segment[SOURCE_LINE] + 1,
|
|
1753
|
+
segment[SOURCE_COLUMN],
|
|
1754
|
+
segment.length === 5 ? names[segment[NAMES_INDEX]] : null
|
|
1755
|
+
);
|
|
1756
|
+
}
|
|
1757
|
+
function generatedPositionFor(map, needle) {
|
|
1758
|
+
const { source, line, column, bias } = needle;
|
|
1759
|
+
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
|
1760
|
+
}
|
|
1761
|
+
function eachMapping(map, cb) {
|
|
1762
|
+
const decoded = decodedMappings(map);
|
|
1763
|
+
const { names, resolvedSources } = map;
|
|
1764
|
+
for (let i = 0; i < decoded.length; i++) {
|
|
1765
|
+
const line = decoded[i];
|
|
1766
|
+
for (let j = 0; j < line.length; j++) {
|
|
1767
|
+
const seg = line[j];
|
|
1768
|
+
const generatedLine = i + 1;
|
|
1769
|
+
const generatedColumn = seg[0];
|
|
1770
|
+
let source = null;
|
|
1771
|
+
let originalLine = null;
|
|
1772
|
+
let originalColumn = null;
|
|
1773
|
+
let name = null;
|
|
1774
|
+
if (seg.length !== 1) {
|
|
1775
|
+
source = resolvedSources[seg[1]];
|
|
1776
|
+
originalLine = seg[2] + 1;
|
|
1777
|
+
originalColumn = seg[3];
|
|
1778
|
+
}
|
|
1779
|
+
if (seg.length === 5) name = names[seg[4]];
|
|
1780
|
+
cb({
|
|
1781
|
+
generatedLine,
|
|
1782
|
+
generatedColumn,
|
|
1783
|
+
source,
|
|
1784
|
+
originalLine,
|
|
1785
|
+
originalColumn,
|
|
1786
|
+
name
|
|
1787
|
+
});
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
function OMapping(source, line, column, name) {
|
|
1792
|
+
return { source, line, column, name };
|
|
1793
|
+
}
|
|
1794
|
+
function GMapping(line, column) {
|
|
1795
|
+
return { line, column };
|
|
1796
|
+
}
|
|
1797
|
+
function traceSegmentInternal(segments, memo, line, column, bias) {
|
|
1798
|
+
let index = memoizedBinarySearch(segments, column, memo, line);
|
|
1799
|
+
if (found) {
|
|
1800
|
+
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
|
1801
|
+
} else if (bias === LEAST_UPPER_BOUND) index++;
|
|
1802
|
+
if (index === -1 || index === segments.length) return -1;
|
|
1803
|
+
return index;
|
|
1804
|
+
}
|
|
1805
|
+
function generatedPosition(map, source, line, column, bias, all) {
|
|
1806
|
+
var _a, _b;
|
|
1807
|
+
line--;
|
|
1808
|
+
if (line < 0) throw new Error(LINE_GTR_ZERO);
|
|
1809
|
+
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
|
|
1810
|
+
const { sources, resolvedSources } = map;
|
|
1811
|
+
let sourceIndex2 = sources.indexOf(source);
|
|
1812
|
+
if (sourceIndex2 === -1) sourceIndex2 = resolvedSources.indexOf(source);
|
|
1813
|
+
if (sourceIndex2 === -1) return all ? [] : GMapping(null, null);
|
|
1814
|
+
const bySourceMemos = (_a = cast(map))._bySourceMemos || (_a._bySourceMemos = sources.map(memoizedState));
|
|
1815
|
+
const generated = (_b = cast(map))._bySources || (_b._bySources = buildBySources(decodedMappings(map), bySourceMemos));
|
|
1816
|
+
const segments = generated[sourceIndex2][line];
|
|
1817
|
+
if (segments == null) return all ? [] : GMapping(null, null);
|
|
1818
|
+
const memo = bySourceMemos[sourceIndex2];
|
|
1819
|
+
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
|
1820
|
+
if (index === -1) return GMapping(null, null);
|
|
1821
|
+
const segment = segments[index];
|
|
1822
|
+
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
|
1823
|
+
}
|
|
1824
|
+
|
|
1825
|
+
// AST walker module for ESTree compatible trees
|
|
1826
|
+
|
|
1827
|
+
|
|
1828
|
+
// An ancestor walk keeps an array of ancestor nodes (including the
|
|
1829
|
+
// current node) and passes them to the callback as third parameter
|
|
1830
|
+
// (and also as state parameter when no other state is present).
|
|
1831
|
+
function ancestor(node, visitors, baseVisitor, state, override) {
|
|
1832
|
+
var ancestors = [];
|
|
1833
|
+
if (!baseVisitor) { baseVisitor = base
|
|
1834
|
+
; }(function c(node, st, override) {
|
|
1835
|
+
var type = override || node.type;
|
|
1836
|
+
var isNew = node !== ancestors[ancestors.length - 1];
|
|
1837
|
+
if (isNew) { ancestors.push(node); }
|
|
1838
|
+
baseVisitor[type](node, st, c);
|
|
1839
|
+
if (visitors[type]) { visitors[type](node, st || ancestors, ancestors); }
|
|
1840
|
+
if (isNew) { ancestors.pop(); }
|
|
1841
|
+
})(node, state, override);
|
|
1842
|
+
}
|
|
1843
|
+
|
|
1844
|
+
function skipThrough(node, st, c) { c(node, st); }
|
|
1845
|
+
function ignore(_node, _st, _c) {}
|
|
1846
|
+
|
|
1847
|
+
// Node walkers.
|
|
1848
|
+
|
|
1849
|
+
var base = {};
|
|
1850
|
+
|
|
1851
|
+
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
|
1852
|
+
for (var i = 0, list = node.body; i < list.length; i += 1)
|
|
1853
|
+
{
|
|
1854
|
+
var stmt = list[i];
|
|
1855
|
+
|
|
1856
|
+
c(stmt, st, "Statement");
|
|
1857
|
+
}
|
|
1858
|
+
};
|
|
1859
|
+
base.Statement = skipThrough;
|
|
1860
|
+
base.EmptyStatement = ignore;
|
|
1861
|
+
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
|
1862
|
+
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
|
1863
|
+
base.IfStatement = function (node, st, c) {
|
|
1864
|
+
c(node.test, st, "Expression");
|
|
1865
|
+
c(node.consequent, st, "Statement");
|
|
1866
|
+
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
|
1867
|
+
};
|
|
1868
|
+
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
|
1869
|
+
base.BreakStatement = base.ContinueStatement = ignore;
|
|
1870
|
+
base.WithStatement = function (node, st, c) {
|
|
1871
|
+
c(node.object, st, "Expression");
|
|
1872
|
+
c(node.body, st, "Statement");
|
|
1873
|
+
};
|
|
1874
|
+
base.SwitchStatement = function (node, st, c) {
|
|
1875
|
+
c(node.discriminant, st, "Expression");
|
|
1876
|
+
for (var i = 0, list = node.cases; i < list.length; i += 1) {
|
|
1877
|
+
var cs = list[i];
|
|
1878
|
+
|
|
1879
|
+
c(cs, st);
|
|
1880
|
+
}
|
|
1881
|
+
};
|
|
1882
|
+
base.SwitchCase = function (node, st, c) {
|
|
1883
|
+
if (node.test) { c(node.test, st, "Expression"); }
|
|
1884
|
+
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
|
1885
|
+
{
|
|
1886
|
+
var cons = list[i];
|
|
1887
|
+
|
|
1888
|
+
c(cons, st, "Statement");
|
|
1889
|
+
}
|
|
1890
|
+
};
|
|
1891
|
+
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
|
1892
|
+
if (node.argument) { c(node.argument, st, "Expression"); }
|
|
1893
|
+
};
|
|
1894
|
+
base.ThrowStatement = base.SpreadElement =
|
|
1895
|
+
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
|
1896
|
+
base.TryStatement = function (node, st, c) {
|
|
1897
|
+
c(node.block, st, "Statement");
|
|
1898
|
+
if (node.handler) { c(node.handler, st); }
|
|
1899
|
+
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
|
1900
|
+
};
|
|
1901
|
+
base.CatchClause = function (node, st, c) {
|
|
1902
|
+
if (node.param) { c(node.param, st, "Pattern"); }
|
|
1903
|
+
c(node.body, st, "Statement");
|
|
1904
|
+
};
|
|
1905
|
+
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
|
1906
|
+
c(node.test, st, "Expression");
|
|
1907
|
+
c(node.body, st, "Statement");
|
|
1908
|
+
};
|
|
1909
|
+
base.ForStatement = function (node, st, c) {
|
|
1910
|
+
if (node.init) { c(node.init, st, "ForInit"); }
|
|
1911
|
+
if (node.test) { c(node.test, st, "Expression"); }
|
|
1912
|
+
if (node.update) { c(node.update, st, "Expression"); }
|
|
1913
|
+
c(node.body, st, "Statement");
|
|
1914
|
+
};
|
|
1915
|
+
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
|
1916
|
+
c(node.left, st, "ForInit");
|
|
1917
|
+
c(node.right, st, "Expression");
|
|
1918
|
+
c(node.body, st, "Statement");
|
|
1919
|
+
};
|
|
1920
|
+
base.ForInit = function (node, st, c) {
|
|
1921
|
+
if (node.type === "VariableDeclaration") { c(node, st); }
|
|
1922
|
+
else { c(node, st, "Expression"); }
|
|
1923
|
+
};
|
|
1924
|
+
base.DebuggerStatement = ignore;
|
|
1925
|
+
|
|
1926
|
+
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
|
1927
|
+
base.VariableDeclaration = function (node, st, c) {
|
|
1928
|
+
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
|
1929
|
+
{
|
|
1930
|
+
var decl = list[i];
|
|
1931
|
+
|
|
1932
|
+
c(decl, st);
|
|
1933
|
+
}
|
|
1934
|
+
};
|
|
1935
|
+
base.VariableDeclarator = function (node, st, c) {
|
|
1936
|
+
c(node.id, st, "Pattern");
|
|
1937
|
+
if (node.init) { c(node.init, st, "Expression"); }
|
|
1938
|
+
};
|
|
1939
|
+
|
|
1940
|
+
base.Function = function (node, st, c) {
|
|
1941
|
+
if (node.id) { c(node.id, st, "Pattern"); }
|
|
1942
|
+
for (var i = 0, list = node.params; i < list.length; i += 1)
|
|
1943
|
+
{
|
|
1944
|
+
var param = list[i];
|
|
1945
|
+
|
|
1946
|
+
c(param, st, "Pattern");
|
|
1947
|
+
}
|
|
1948
|
+
c(node.body, st, node.expression ? "Expression" : "Statement");
|
|
1949
|
+
};
|
|
1950
|
+
|
|
1951
|
+
base.Pattern = function (node, st, c) {
|
|
1952
|
+
if (node.type === "Identifier")
|
|
1953
|
+
{ c(node, st, "VariablePattern"); }
|
|
1954
|
+
else if (node.type === "MemberExpression")
|
|
1955
|
+
{ c(node, st, "MemberPattern"); }
|
|
1956
|
+
else
|
|
1957
|
+
{ c(node, st); }
|
|
1958
|
+
};
|
|
1959
|
+
base.VariablePattern = ignore;
|
|
1960
|
+
base.MemberPattern = skipThrough;
|
|
1961
|
+
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
|
1962
|
+
base.ArrayPattern = function (node, st, c) {
|
|
1963
|
+
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
|
1964
|
+
var elt = list[i];
|
|
1965
|
+
|
|
1966
|
+
if (elt) { c(elt, st, "Pattern"); }
|
|
1967
|
+
}
|
|
1968
|
+
};
|
|
1969
|
+
base.ObjectPattern = function (node, st, c) {
|
|
1970
|
+
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
|
1971
|
+
var prop = list[i];
|
|
1972
|
+
|
|
1973
|
+
if (prop.type === "Property") {
|
|
1974
|
+
if (prop.computed) { c(prop.key, st, "Expression"); }
|
|
1975
|
+
c(prop.value, st, "Pattern");
|
|
1976
|
+
} else if (prop.type === "RestElement") {
|
|
1977
|
+
c(prop.argument, st, "Pattern");
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
};
|
|
1981
|
+
|
|
1982
|
+
base.Expression = skipThrough;
|
|
1983
|
+
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
|
1984
|
+
base.ArrayExpression = function (node, st, c) {
|
|
1985
|
+
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
|
1986
|
+
var elt = list[i];
|
|
1987
|
+
|
|
1988
|
+
if (elt) { c(elt, st, "Expression"); }
|
|
1989
|
+
}
|
|
1990
|
+
};
|
|
1991
|
+
base.ObjectExpression = function (node, st, c) {
|
|
1992
|
+
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
|
1993
|
+
{
|
|
1994
|
+
var prop = list[i];
|
|
1995
|
+
|
|
1996
|
+
c(prop, st);
|
|
1997
|
+
}
|
|
1998
|
+
};
|
|
1999
|
+
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
|
2000
|
+
base.SequenceExpression = function (node, st, c) {
|
|
2001
|
+
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
|
2002
|
+
{
|
|
2003
|
+
var expr = list[i];
|
|
2004
|
+
|
|
2005
|
+
c(expr, st, "Expression");
|
|
2006
|
+
}
|
|
2007
|
+
};
|
|
2008
|
+
base.TemplateLiteral = function (node, st, c) {
|
|
2009
|
+
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
|
2010
|
+
{
|
|
2011
|
+
var quasi = list[i];
|
|
2012
|
+
|
|
2013
|
+
c(quasi, st);
|
|
2014
|
+
}
|
|
2015
|
+
|
|
2016
|
+
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
|
2017
|
+
{
|
|
2018
|
+
var expr = list$1[i$1];
|
|
2019
|
+
|
|
2020
|
+
c(expr, st, "Expression");
|
|
2021
|
+
}
|
|
2022
|
+
};
|
|
2023
|
+
base.TemplateElement = ignore;
|
|
2024
|
+
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
|
2025
|
+
c(node.argument, st, "Expression");
|
|
2026
|
+
};
|
|
2027
|
+
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
|
2028
|
+
c(node.left, st, "Expression");
|
|
2029
|
+
c(node.right, st, "Expression");
|
|
2030
|
+
};
|
|
2031
|
+
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
|
2032
|
+
c(node.left, st, "Pattern");
|
|
2033
|
+
c(node.right, st, "Expression");
|
|
2034
|
+
};
|
|
2035
|
+
base.ConditionalExpression = function (node, st, c) {
|
|
2036
|
+
c(node.test, st, "Expression");
|
|
2037
|
+
c(node.consequent, st, "Expression");
|
|
2038
|
+
c(node.alternate, st, "Expression");
|
|
2039
|
+
};
|
|
2040
|
+
base.NewExpression = base.CallExpression = function (node, st, c) {
|
|
2041
|
+
c(node.callee, st, "Expression");
|
|
2042
|
+
if (node.arguments)
|
|
2043
|
+
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
|
2044
|
+
{
|
|
2045
|
+
var arg = list[i];
|
|
2046
|
+
|
|
2047
|
+
c(arg, st, "Expression");
|
|
2048
|
+
} }
|
|
2049
|
+
};
|
|
2050
|
+
base.MemberExpression = function (node, st, c) {
|
|
2051
|
+
c(node.object, st, "Expression");
|
|
2052
|
+
if (node.computed) { c(node.property, st, "Expression"); }
|
|
2053
|
+
};
|
|
2054
|
+
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
|
2055
|
+
if (node.declaration)
|
|
2056
|
+
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
|
2057
|
+
if (node.source) { c(node.source, st, "Expression"); }
|
|
2058
|
+
};
|
|
2059
|
+
base.ExportAllDeclaration = function (node, st, c) {
|
|
2060
|
+
if (node.exported)
|
|
2061
|
+
{ c(node.exported, st); }
|
|
2062
|
+
c(node.source, st, "Expression");
|
|
2063
|
+
};
|
|
2064
|
+
base.ImportDeclaration = function (node, st, c) {
|
|
2065
|
+
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
|
2066
|
+
{
|
|
2067
|
+
var spec = list[i];
|
|
2068
|
+
|
|
2069
|
+
c(spec, st);
|
|
2070
|
+
}
|
|
2071
|
+
c(node.source, st, "Expression");
|
|
2072
|
+
};
|
|
2073
|
+
base.ImportExpression = function (node, st, c) {
|
|
2074
|
+
c(node.source, st, "Expression");
|
|
2075
|
+
};
|
|
2076
|
+
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
|
2077
|
+
|
|
2078
|
+
base.TaggedTemplateExpression = function (node, st, c) {
|
|
2079
|
+
c(node.tag, st, "Expression");
|
|
2080
|
+
c(node.quasi, st, "Expression");
|
|
2081
|
+
};
|
|
2082
|
+
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
|
2083
|
+
base.Class = function (node, st, c) {
|
|
2084
|
+
if (node.id) { c(node.id, st, "Pattern"); }
|
|
2085
|
+
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
|
2086
|
+
c(node.body, st);
|
|
2087
|
+
};
|
|
2088
|
+
base.ClassBody = function (node, st, c) {
|
|
2089
|
+
for (var i = 0, list = node.body; i < list.length; i += 1)
|
|
2090
|
+
{
|
|
2091
|
+
var elt = list[i];
|
|
2092
|
+
|
|
2093
|
+
c(elt, st);
|
|
2094
|
+
}
|
|
2095
|
+
};
|
|
2096
|
+
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
|
2097
|
+
if (node.computed) { c(node.key, st, "Expression"); }
|
|
2098
|
+
if (node.value) { c(node.value, st, "Expression"); }
|
|
2099
|
+
};
|
|
2100
|
+
|
|
2101
|
+
async function collectTests(ctx, filepath) {
|
|
2102
|
+
const request = await ctx.vite.environments.ssr.transformRequest(filepath);
|
|
2103
|
+
if (!request) return null;
|
|
2104
|
+
const ast = await parseAstAsync(request.code), testFilepath = relative(ctx.config.root, filepath), projectName = ctx.name, typecheckSubprojectName = projectName ? `${projectName}:__typecheck__` : "__typecheck__", file = {
|
|
2105
|
+
filepath,
|
|
2106
|
+
type: "suite",
|
|
2107
|
+
id: generateHash(`${testFilepath}${typecheckSubprojectName}`),
|
|
2108
|
+
name: testFilepath,
|
|
2109
|
+
mode: "run",
|
|
2110
|
+
tasks: [],
|
|
2111
|
+
start: ast.start,
|
|
2112
|
+
end: ast.end,
|
|
2113
|
+
projectName,
|
|
2114
|
+
meta: { typecheck: true },
|
|
2115
|
+
file: null
|
|
2116
|
+
};
|
|
2117
|
+
file.file = file;
|
|
2118
|
+
const definitions = [], getName = (callee) => {
|
|
2119
|
+
if (!callee) return null;
|
|
2120
|
+
if (callee.type === "Identifier") return callee.name;
|
|
2121
|
+
if (callee.type === "CallExpression") return getName(callee.callee);
|
|
2122
|
+
if (callee.type === "TaggedTemplateExpression") return getName(callee.tag);
|
|
2123
|
+
if (callee.type === "MemberExpression")
|
|
2124
|
+
// call as `__vite_ssr__.test.skip()`
|
|
2125
|
+
return callee.object?.type === "Identifier" && [
|
|
2126
|
+
"it",
|
|
2127
|
+
"test",
|
|
2128
|
+
"describe",
|
|
2129
|
+
"suite"
|
|
2130
|
+
].includes(callee.object.name) ? callee.object?.name : callee.object?.name?.startsWith("__vite_ssr_") ? getName(callee.property) : getName(callee.object?.property);
|
|
2131
|
+
// unwrap (0, ...)
|
|
2132
|
+
if (callee.type === "SequenceExpression" && callee.expressions.length === 2) {
|
|
2133
|
+
const [e0, e1] = callee.expressions;
|
|
2134
|
+
if (e0.type === "Literal" && e0.value === 0) return getName(e1);
|
|
2135
|
+
}
|
|
2136
|
+
return null;
|
|
2137
|
+
};
|
|
2138
|
+
ancestor(ast, { CallExpression(node) {
|
|
2139
|
+
const { callee } = node, name = getName(callee);
|
|
2140
|
+
if (!name || ![
|
|
2141
|
+
"it",
|
|
2142
|
+
"test",
|
|
2143
|
+
"describe",
|
|
2144
|
+
"suite"
|
|
2145
|
+
].includes(name)) return;
|
|
2146
|
+
const property = callee?.property?.name;
|
|
2147
|
+
let mode = !property || property === name ? "run" : property;
|
|
2148
|
+
// they will be picked up in the next iteration
|
|
2149
|
+
if ([
|
|
2150
|
+
"each",
|
|
2151
|
+
"for",
|
|
2152
|
+
"skipIf",
|
|
2153
|
+
"runIf"
|
|
2154
|
+
].includes(mode)) return;
|
|
2155
|
+
let start;
|
|
2156
|
+
const end = node.end;
|
|
2157
|
+
// .each
|
|
2158
|
+
if (callee.type === "CallExpression") start = callee.end;
|
|
2159
|
+
else if (callee.type === "TaggedTemplateExpression") start = callee.end + 1;
|
|
2160
|
+
else start = node.start;
|
|
2161
|
+
const { arguments: [messageNode] } = node, message = messageNode?.type === "Literal" || messageNode?.type === "TemplateLiteral" ? request.code.slice(messageNode.start + 1, messageNode.end - 1) : request.code.slice(messageNode.start, messageNode.end);
|
|
2162
|
+
// cannot statically analyze, so we always skip it
|
|
2163
|
+
if (mode === "skipIf" || mode === "runIf") mode = "skip";
|
|
2164
|
+
definitions.push({
|
|
2165
|
+
start,
|
|
2166
|
+
end,
|
|
2167
|
+
name: message,
|
|
2168
|
+
type: name === "it" || name === "test" ? "test" : "suite",
|
|
2169
|
+
mode,
|
|
2170
|
+
task: null
|
|
2171
|
+
});
|
|
2172
|
+
} });
|
|
2173
|
+
let lastSuite = file;
|
|
2174
|
+
const updateLatestSuite = (index) => {
|
|
2175
|
+
while (lastSuite.suite && lastSuite.end < index) lastSuite = lastSuite.suite;
|
|
2176
|
+
return lastSuite;
|
|
2177
|
+
};
|
|
2178
|
+
definitions.sort((a, b) => a.start - b.start).forEach((definition) => {
|
|
2179
|
+
const latestSuite = updateLatestSuite(definition.start);
|
|
2180
|
+
let mode = definition.mode;
|
|
2181
|
+
if (latestSuite.mode !== "run")
|
|
2182
|
+
// inherit suite mode, if it's set
|
|
2183
|
+
mode = latestSuite.mode;
|
|
2184
|
+
if (definition.type === "suite") {
|
|
2185
|
+
const task = {
|
|
2186
|
+
type: definition.type,
|
|
2187
|
+
id: "",
|
|
2188
|
+
suite: latestSuite,
|
|
2189
|
+
file,
|
|
2190
|
+
tasks: [],
|
|
2191
|
+
mode,
|
|
2192
|
+
name: definition.name,
|
|
2193
|
+
end: definition.end,
|
|
2194
|
+
start: definition.start,
|
|
2195
|
+
meta: { typecheck: true }
|
|
2196
|
+
};
|
|
2197
|
+
definition.task = task, latestSuite.tasks.push(task), lastSuite = task;
|
|
2198
|
+
return;
|
|
2199
|
+
}
|
|
2200
|
+
const task = {
|
|
2201
|
+
type: definition.type,
|
|
2202
|
+
id: "",
|
|
2203
|
+
suite: latestSuite,
|
|
2204
|
+
file,
|
|
2205
|
+
mode,
|
|
2206
|
+
timeout: 0,
|
|
2207
|
+
context: {},
|
|
2208
|
+
name: definition.name,
|
|
2209
|
+
end: definition.end,
|
|
2210
|
+
start: definition.start,
|
|
2211
|
+
annotations: [],
|
|
2212
|
+
meta: { typecheck: true }
|
|
2213
|
+
};
|
|
2214
|
+
definition.task = task, latestSuite.tasks.push(task);
|
|
2215
|
+
}), calculateSuiteHash(file);
|
|
2216
|
+
const hasOnly = someTasksAreOnly(file);
|
|
2217
|
+
return interpretTaskModes(file, ctx.config.testNamePattern, void 0, hasOnly, false, ctx.config.allowOnly), {
|
|
2218
|
+
file,
|
|
2219
|
+
parsed: request.code,
|
|
2220
|
+
filepath,
|
|
2221
|
+
map: request.map,
|
|
2222
|
+
definitions
|
|
2223
|
+
};
|
|
2224
|
+
}
|
|
2225
|
+
|
|
2226
|
+
const newLineRegExp = /\r?\n/, errCodeRegExp = /error TS(?<errCode>\d+)/;
|
|
2227
|
+
async function makeTscErrorInfo(errInfo) {
|
|
2228
|
+
const [errFilePathPos = "", ...errMsgRawArr] = errInfo.split(":");
|
|
2229
|
+
if (!errFilePathPos || errMsgRawArr.length === 0 || errMsgRawArr.join("").length === 0) return ["unknown filepath", null];
|
|
2230
|
+
const errMsgRaw = errMsgRawArr.join("").trim(), [errFilePath, errPos] = errFilePathPos.slice(0, -1).split("(");
|
|
2231
|
+
if (!errFilePath || !errPos) return ["unknown filepath", null];
|
|
2232
|
+
const [errLine, errCol] = errPos.split(",");
|
|
2233
|
+
if (!errLine || !errCol) return [errFilePath, null];
|
|
2234
|
+
// get errCode, errMsg
|
|
2235
|
+
const execArr = errCodeRegExp.exec(errMsgRaw);
|
|
2236
|
+
if (!execArr) return [errFilePath, null];
|
|
2237
|
+
const errCodeStr = execArr.groups?.errCode ?? "";
|
|
2238
|
+
if (!errCodeStr) return [errFilePath, null];
|
|
2239
|
+
const line = Number(errLine), col = Number(errCol), errCode = Number(errCodeStr);
|
|
2240
|
+
return [errFilePath, {
|
|
2241
|
+
filePath: errFilePath,
|
|
2242
|
+
errCode,
|
|
2243
|
+
line,
|
|
2244
|
+
column: col,
|
|
2245
|
+
errMsg: errMsgRaw.slice(`error TS${errCode} `.length)
|
|
2246
|
+
}];
|
|
2247
|
+
}
|
|
2248
|
+
async function getRawErrsMapFromTsCompile(tscErrorStdout) {
|
|
2249
|
+
const rawErrsMap = /* @__PURE__ */ new Map();
|
|
2250
|
+
return (await Promise.all(tscErrorStdout.split(newLineRegExp).reduce((prev, next) => {
|
|
2251
|
+
if (!next) return prev;
|
|
2252
|
+
if (next[0] !== " ") prev.push(next);
|
|
2253
|
+
else prev[prev.length - 1] += `\n${next}`;
|
|
2254
|
+
return prev;
|
|
2255
|
+
}, []).map((errInfoLine) => makeTscErrorInfo(errInfoLine)))).forEach(([errFilePath, errInfo]) => {
|
|
2256
|
+
if (errInfo) if (!rawErrsMap.has(errFilePath)) rawErrsMap.set(errFilePath, [errInfo]);
|
|
2257
|
+
else rawErrsMap.get(errFilePath)?.push(errInfo);
|
|
2258
|
+
}), rawErrsMap;
|
|
2259
|
+
}
|
|
2260
|
+
|
|
2261
|
+
function createIndexMap(source) {
|
|
2262
|
+
const map = /* @__PURE__ */ new Map();
|
|
2263
|
+
let index = 0, line = 1, column = 1;
|
|
2264
|
+
for (const char of source) if (map.set(`${line}:${column}`, index++), char === "\n" || char === "\r\n") line++, column = 0;
|
|
2265
|
+
else column++;
|
|
2266
|
+
return map;
|
|
2267
|
+
}
|
|
2268
|
+
|
|
2269
|
+
class TypeCheckError extends Error {
|
|
2270
|
+
name = "TypeCheckError";
|
|
2271
|
+
constructor(message, stacks) {
|
|
2272
|
+
super(message), this.message = message, this.stacks = stacks;
|
|
2273
|
+
}
|
|
2274
|
+
}
|
|
2275
|
+
class Typechecker {
|
|
2276
|
+
_onParseStart;
|
|
2277
|
+
_onParseEnd;
|
|
2278
|
+
_onWatcherRerun;
|
|
2279
|
+
_result = {
|
|
2280
|
+
files: [],
|
|
2281
|
+
sourceErrors: [],
|
|
2282
|
+
time: 0
|
|
2283
|
+
};
|
|
2284
|
+
_startTime = 0;
|
|
2285
|
+
_output = "";
|
|
2286
|
+
_tests = {};
|
|
2287
|
+
process;
|
|
2288
|
+
files = [];
|
|
2289
|
+
constructor(project) {
|
|
2290
|
+
this.project = project;
|
|
2291
|
+
}
|
|
2292
|
+
setFiles(files) {
|
|
2293
|
+
this.files = files;
|
|
2294
|
+
}
|
|
2295
|
+
onParseStart(fn) {
|
|
2296
|
+
this._onParseStart = fn;
|
|
2297
|
+
}
|
|
2298
|
+
onParseEnd(fn) {
|
|
2299
|
+
this._onParseEnd = fn;
|
|
2300
|
+
}
|
|
2301
|
+
onWatcherRerun(fn) {
|
|
2302
|
+
this._onWatcherRerun = fn;
|
|
2303
|
+
}
|
|
2304
|
+
async collectFileTests(filepath) {
|
|
2305
|
+
return collectTests(this.project, filepath);
|
|
2306
|
+
}
|
|
2307
|
+
getFiles() {
|
|
2308
|
+
return this.files;
|
|
2309
|
+
}
|
|
2310
|
+
async collectTests() {
|
|
2311
|
+
const tests = (await Promise.all(this.getFiles().map((filepath) => this.collectFileTests(filepath)))).reduce((acc, data) => {
|
|
2312
|
+
return data && (acc[data.filepath] = data), acc;
|
|
2313
|
+
}, {});
|
|
2314
|
+
return this._tests = tests, tests;
|
|
2315
|
+
}
|
|
2316
|
+
markPassed(file) {
|
|
2317
|
+
if (!file.result?.state) file.result = { state: "pass" };
|
|
2318
|
+
const markTasks = (tasks) => {
|
|
2319
|
+
for (const task of tasks) {
|
|
2320
|
+
if ("tasks" in task) markTasks(task.tasks);
|
|
2321
|
+
if (!task.result?.state && (task.mode === "run" || task.mode === "queued")) task.result = { state: "pass" };
|
|
2322
|
+
}
|
|
2323
|
+
};
|
|
2324
|
+
markTasks(file.tasks);
|
|
2325
|
+
}
|
|
2326
|
+
async prepareResults(output) {
|
|
2327
|
+
const typeErrors = await this.parseTscLikeOutput(output), testFiles = new Set(this.getFiles());
|
|
2328
|
+
if (!this._tests) this._tests = await this.collectTests();
|
|
2329
|
+
const sourceErrors = [], files = [];
|
|
2330
|
+
return testFiles.forEach((path) => {
|
|
2331
|
+
const { file, definitions, map, parsed } = this._tests[path], errors = typeErrors.get(path);
|
|
2332
|
+
if (files.push(file), !errors) {
|
|
2333
|
+
this.markPassed(file);
|
|
2334
|
+
return;
|
|
2335
|
+
}
|
|
2336
|
+
const sortedDefinitions = [...definitions.sort((a, b) => b.start - a.start)], traceMap = map && new TraceMap(map), indexMap = createIndexMap(parsed), markState = (task, state) => {
|
|
2337
|
+
if (task.result = { state: task.mode === "run" || task.mode === "only" ? state : task.mode }, task.suite) markState(task.suite, state);
|
|
2338
|
+
else if (task.file && task !== task.file) markState(task.file, state);
|
|
2339
|
+
};
|
|
2340
|
+
errors.forEach(({ error, originalError }) => {
|
|
2341
|
+
const processedPos = traceMap ? findGeneratedPosition(traceMap, {
|
|
2342
|
+
line: originalError.line,
|
|
2343
|
+
column: originalError.column,
|
|
2344
|
+
source: basename(path)
|
|
2345
|
+
}) : originalError, line = processedPos.line ?? originalError.line, column = processedPos.column ?? originalError.column, index = indexMap.get(`${line}:${column}`), definition = index != null && sortedDefinitions.find((def) => def.start <= index && def.end >= index), suite = definition ? definition.task : file, state = suite.mode === "run" || suite.mode === "only" ? "fail" : suite.mode, errors = suite.result?.errors || [];
|
|
2346
|
+
if (suite.result = {
|
|
2347
|
+
state,
|
|
2348
|
+
errors
|
|
2349
|
+
}, errors.push(error), state === "fail") {
|
|
2350
|
+
if (suite.suite) markState(suite.suite, "fail");
|
|
2351
|
+
else if (suite.file && suite !== suite.file) markState(suite.file, "fail");
|
|
2352
|
+
}
|
|
2353
|
+
}), this.markPassed(file);
|
|
2354
|
+
}), typeErrors.forEach((errors, path) => {
|
|
2355
|
+
if (!testFiles.has(path)) sourceErrors.push(...errors.map(({ error }) => error));
|
|
2356
|
+
}), {
|
|
2357
|
+
files,
|
|
2358
|
+
sourceErrors,
|
|
2359
|
+
time: performance$1.now() - this._startTime
|
|
2360
|
+
};
|
|
2361
|
+
}
|
|
2362
|
+
async parseTscLikeOutput(output) {
|
|
2363
|
+
const errorsMap = await getRawErrsMapFromTsCompile(output), typesErrors = /* @__PURE__ */ new Map();
|
|
2364
|
+
return errorsMap.forEach((errors, path) => {
|
|
2365
|
+
const filepath = resolve$1(this.project.config.root, path), suiteErrors = errors.map((info) => {
|
|
2366
|
+
const limit = Error.stackTraceLimit;
|
|
2367
|
+
Error.stackTraceLimit = 0;
|
|
2368
|
+
// Some expect-type errors have the most useful information on the second line e.g. `This expression is not callable.\n Type 'ExpectString<number>' has no call signatures.`
|
|
2369
|
+
const errMsg = info.errMsg.replace(/\r?\n\s*(Type .* has no call signatures)/g, " $1"), error = new TypeCheckError(errMsg, [{
|
|
2370
|
+
file: filepath,
|
|
2371
|
+
line: info.line,
|
|
2372
|
+
column: info.column,
|
|
2373
|
+
method: ""
|
|
2374
|
+
}]);
|
|
2375
|
+
return Error.stackTraceLimit = limit, {
|
|
2376
|
+
originalError: info,
|
|
2377
|
+
error: {
|
|
2378
|
+
name: error.name,
|
|
2379
|
+
message: errMsg,
|
|
2380
|
+
stacks: error.stacks,
|
|
2381
|
+
stack: ""
|
|
2382
|
+
}
|
|
2383
|
+
};
|
|
2384
|
+
});
|
|
2385
|
+
typesErrors.set(filepath, suiteErrors);
|
|
2386
|
+
}), typesErrors;
|
|
2387
|
+
}
|
|
2388
|
+
async stop() {
|
|
2389
|
+
this.process?.kill(), this.process = void 0;
|
|
2390
|
+
}
|
|
2391
|
+
async ensurePackageInstalled(ctx, checker) {
|
|
2392
|
+
if (checker !== "tsc" && checker !== "vue-tsc") return;
|
|
2393
|
+
const packageName = checker === "tsc" ? "typescript" : "vue-tsc";
|
|
2394
|
+
await ctx.packageInstaller.ensureInstalled(packageName, ctx.config.root);
|
|
2395
|
+
}
|
|
2396
|
+
getExitCode() {
|
|
2397
|
+
return this.process?.exitCode != null && this.process.exitCode;
|
|
2398
|
+
}
|
|
2399
|
+
getOutput() {
|
|
2400
|
+
return this._output;
|
|
2401
|
+
}
|
|
2402
|
+
async spawn() {
|
|
2403
|
+
const { root, watch, typecheck } = this.project.config, args = [
|
|
2404
|
+
"--noEmit",
|
|
2405
|
+
"--pretty",
|
|
2406
|
+
"false",
|
|
2407
|
+
"--incremental",
|
|
2408
|
+
"--tsBuildInfoFile",
|
|
2409
|
+
join(process.versions.pnp ? join(nodeos__default.tmpdir(), this.project.hash) : distDir, "tsconfig.tmp.tsbuildinfo")
|
|
2410
|
+
];
|
|
2411
|
+
// use builtin watcher because it's faster
|
|
2412
|
+
if (watch) args.push("--watch");
|
|
2413
|
+
if (typecheck.allowJs) args.push("--allowJs", "--checkJs");
|
|
2414
|
+
if (typecheck.tsconfig) args.push("-p", resolve$1(root, typecheck.tsconfig));
|
|
2415
|
+
this._output = "", this._startTime = performance$1.now();
|
|
2416
|
+
const child = x(typecheck.checker, args, {
|
|
2417
|
+
nodeOptions: {
|
|
2418
|
+
cwd: root,
|
|
2419
|
+
stdio: "pipe"
|
|
2420
|
+
},
|
|
2421
|
+
throwOnError: false
|
|
2422
|
+
});
|
|
2423
|
+
this.process = child.process;
|
|
2424
|
+
let rerunTriggered = false, dataReceived = false;
|
|
2425
|
+
return new Promise((resolve, reject) => {
|
|
2426
|
+
if (!child.process || !child.process.stdout) {
|
|
2427
|
+
reject(/* @__PURE__ */ new Error(`Failed to initialize ${typecheck.checker}. This is a bug in Vitest - please, open an issue with reproduction.`));
|
|
2428
|
+
return;
|
|
2429
|
+
}
|
|
2430
|
+
child.process.stdout.on("data", (chunk) => {
|
|
2431
|
+
if (dataReceived = true, this._output += chunk, watch) {
|
|
2432
|
+
if (this._output.includes("File change detected") && !rerunTriggered) this._onWatcherRerun?.(), this._startTime = performance$1.now(), this._result.sourceErrors = [], this._result.files = [], this._tests = null, rerunTriggered = true;
|
|
2433
|
+
if (/Found \w+ errors*. Watching for/.test(this._output)) rerunTriggered = false, this.prepareResults(this._output).then((result) => {
|
|
2434
|
+
this._result = result, this._onParseEnd?.(result);
|
|
2435
|
+
}), this._output = "";
|
|
2436
|
+
}
|
|
2437
|
+
});
|
|
2438
|
+
const timeout = setTimeout(() => reject(/* @__PURE__ */ new Error(`${typecheck.checker} spawn timed out`)), this.project.config.typecheck.spawnTimeout);
|
|
2439
|
+
function onError(cause) {
|
|
2440
|
+
clearTimeout(timeout), reject(new Error("Spawning typechecker failed - is typescript installed?", { cause }));
|
|
2441
|
+
}
|
|
2442
|
+
if (child.process.once("spawn", () => {
|
|
2443
|
+
if (this._onParseStart?.(), child.process?.off("error", onError), clearTimeout(timeout), process.platform === "win32")
|
|
2444
|
+
// on Windows, the process might be spawned but fail to start
|
|
2445
|
+
// we wait for a potential error here. if "close" event didn't trigger,
|
|
2446
|
+
// we resolve the promise
|
|
2447
|
+
setTimeout(() => {
|
|
2448
|
+
resolve({ result: child });
|
|
2449
|
+
}, 200);
|
|
2450
|
+
else resolve({ result: child });
|
|
2451
|
+
}), process.platform === "win32") child.process.once("close", (code) => {
|
|
2452
|
+
if (code != null && code !== 0 && !dataReceived) onError(/* @__PURE__ */ new Error(`The ${typecheck.checker} command exited with code ${code}.`));
|
|
2453
|
+
});
|
|
2454
|
+
child.process.once("error", onError);
|
|
2455
|
+
});
|
|
2456
|
+
}
|
|
2457
|
+
async start() {
|
|
2458
|
+
if (this.process) return;
|
|
2459
|
+
const { watch } = this.project.config, { result: child } = await this.spawn();
|
|
2460
|
+
if (!watch) await child, this._result = await this.prepareResults(this._output), await this._onParseEnd?.(this._result);
|
|
2461
|
+
}
|
|
2462
|
+
getResult() {
|
|
2463
|
+
return this._result;
|
|
2464
|
+
}
|
|
2465
|
+
getTestFiles() {
|
|
2466
|
+
return Object.values(this._tests || {}).map((i) => i.file);
|
|
2467
|
+
}
|
|
2468
|
+
getTestPacksAndEvents() {
|
|
2469
|
+
const packs = [], events = [];
|
|
2470
|
+
for (const { file } of Object.values(this._tests || {})) {
|
|
2471
|
+
const result = convertTasksToEvents(file);
|
|
2472
|
+
packs.push(...result.packs), events.push(...result.events);
|
|
2473
|
+
}
|
|
2474
|
+
return {
|
|
2475
|
+
packs,
|
|
2476
|
+
events
|
|
2477
|
+
};
|
|
2478
|
+
}
|
|
2479
|
+
}
|
|
2480
|
+
function findGeneratedPosition(traceMap, { line, column, source }) {
|
|
2481
|
+
const found = generatedPositionFor(traceMap, {
|
|
2482
|
+
line,
|
|
2483
|
+
column,
|
|
2484
|
+
source
|
|
2485
|
+
});
|
|
2486
|
+
if (found.line !== null) return found;
|
|
2487
|
+
// find the next source token position when the exact error position doesn't exist in source map.
|
|
2488
|
+
// this can happen, for example, when the type error is in the comment "// @ts-expect-error"
|
|
2489
|
+
// and comments are stripped away in the generated code.
|
|
2490
|
+
const mappings = [];
|
|
2491
|
+
eachMapping(traceMap, (m) => {
|
|
2492
|
+
if (m.source === source && m.originalLine !== null && m.originalColumn !== null && (line === m.originalLine ? column < m.originalColumn : line < m.originalLine)) mappings.push(m);
|
|
2493
|
+
});
|
|
2494
|
+
const next = mappings.sort((a, b) => a.originalLine === b.originalLine ? a.originalColumn - b.originalColumn : a.originalLine - b.originalLine).at(0);
|
|
2495
|
+
return next ? {
|
|
2496
|
+
line: next.generatedLine,
|
|
2497
|
+
column: next.generatedColumn
|
|
2498
|
+
} : {
|
|
2499
|
+
line: null,
|
|
2500
|
+
column: null
|
|
2501
|
+
};
|
|
2502
|
+
}
|
|
2503
|
+
|
|
1009
2504
|
// use Logger with custom Console to capture entire error printing
|
|
1010
2505
|
function capturePrintError(error, ctx, options) {
|
|
1011
2506
|
let output = "";
|
|
@@ -1431,7 +2926,7 @@ class JsonReporter {
|
|
|
1431
2926
|
async writeReport(report) {
|
|
1432
2927
|
const outputFile = this.options.outputFile ?? getOutputFile(this.ctx.config, "json");
|
|
1433
2928
|
if (outputFile) {
|
|
1434
|
-
const reportFile = resolve(this.ctx.config.root, outputFile), outputDirectory = dirname(reportFile);
|
|
2929
|
+
const reportFile = resolve$1(this.ctx.config.root, outputFile), outputDirectory = dirname(reportFile);
|
|
1435
2930
|
if (!existsSync(outputDirectory)) await promises.mkdir(outputDirectory, { recursive: true });
|
|
1436
2931
|
await promises.writeFile(reportFile, report, "utf-8"), this.ctx.logger.log(`JSON report written to ${reportFile}`);
|
|
1437
2932
|
} else this.ctx.logger.log(report);
|
|
@@ -1500,7 +2995,7 @@ class JUnitReporter {
|
|
|
1500
2995
|
this.ctx = ctx;
|
|
1501
2996
|
const outputFile = this.options.outputFile ?? getOutputFile(this.ctx.config, "junit");
|
|
1502
2997
|
if (outputFile) {
|
|
1503
|
-
this.reportFile = resolve(this.ctx.config.root, outputFile);
|
|
2998
|
+
this.reportFile = resolve$1(this.ctx.config.root, outputFile);
|
|
1504
2999
|
const outputDirectory = dirname(this.reportFile);
|
|
1505
3000
|
if (!existsSync(outputDirectory)) await promises.mkdir(outputDirectory, { recursive: true });
|
|
1506
3001
|
this.fileFd = await promises.open(this.reportFile, "w+"), this.baseLog = async (text) => {
|
|
@@ -1733,4 +3228,4 @@ const ReportersMap = {
|
|
|
1733
3228
|
"github-actions": GithubActionsReporter
|
|
1734
3229
|
};
|
|
1735
3230
|
|
|
1736
|
-
export { BlobReporter as B, DefaultReporter as D, F_RIGHT as F, GithubActionsReporter as G, HangingProcessReporter as H, JsonReporter as J, ReportersMap as R, TapFlatReporter as T, VerboseReporter as V, DotReporter as a, JUnitReporter as b, TapReporter as c,
|
|
3231
|
+
export { BlobReporter as B, DefaultReporter as D, F_RIGHT as F, GithubActionsReporter as G, HangingProcessReporter as H, JsonReporter as J, ReportersMap as R, TapFlatReporter as T, VerboseReporter as V, DotReporter as a, JUnitReporter as b, TapReporter as c, stringify as d, TraceMap as e, formatProjectName as f, getStateSymbol as g, ancestor as h, printError as i, errorBanner as j, divider as k, Typechecker as l, generateCodeFrame as m, createDefinesScript as n, originalPositionFor as o, parse$1 as p, convertTasksToEvents as q, readBlobs as r, separator as s, truncateString as t, utils as u, withLabel as w };
|