@vitest/coverage-v8 1.4.0 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/provider.js +2062 -21
- package/package.json +6 -6
package/dist/provider.js
CHANGED
@@ -1,6 +1,11 @@
|
|
1
|
-
import { existsSync, promises, writeFileSync } from 'node:fs';
|
2
|
-
import { pathToFileURL, fileURLToPath } from 'node:url';
|
3
|
-
import
|
1
|
+
import { existsSync, promises as promises$1, writeFileSync } from 'node:fs';
|
2
|
+
import { pathToFileURL, fileURLToPath as fileURLToPath$1 } from 'node:url';
|
3
|
+
import require$$0 from 'assert';
|
4
|
+
import require$$2 from 'util';
|
5
|
+
import require$$3 from 'path';
|
6
|
+
import require$$4 from 'url';
|
7
|
+
import require$$9 from 'fs';
|
8
|
+
import require$$11 from 'module';
|
4
9
|
import { mergeProcessCovs } from '@bcoe/v8-coverage';
|
5
10
|
import libReport from 'istanbul-lib-report';
|
6
11
|
import reports from 'istanbul-reports';
|
@@ -18,6 +23,2039 @@ import { coverageConfigDefaults, defaultExclude, defaultInclude } from 'vitest/c
|
|
18
23
|
import { BaseCoverageProvider } from 'vitest/coverage';
|
19
24
|
import _TestExclude from 'test-exclude';
|
20
25
|
|
26
|
+
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
27
|
+
|
28
|
+
function getDefaultExportFromCjs (x) {
|
29
|
+
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
30
|
+
}
|
31
|
+
|
32
|
+
var convertSourceMap$1 = {};
|
33
|
+
|
34
|
+
(function (exports) {
|
35
|
+
|
36
|
+
Object.defineProperty(exports, 'commentRegex', {
|
37
|
+
get: function getCommentRegex () {
|
38
|
+
// Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data.
|
39
|
+
return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg;
|
40
|
+
}
|
41
|
+
});
|
42
|
+
|
43
|
+
|
44
|
+
Object.defineProperty(exports, 'mapFileCommentRegex', {
|
45
|
+
get: function getMapFileCommentRegex () {
|
46
|
+
// Matches sourceMappingURL in either // or /* comment styles.
|
47
|
+
return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg;
|
48
|
+
}
|
49
|
+
});
|
50
|
+
|
51
|
+
var decodeBase64;
|
52
|
+
if (typeof Buffer !== 'undefined') {
|
53
|
+
if (typeof Buffer.from === 'function') {
|
54
|
+
decodeBase64 = decodeBase64WithBufferFrom;
|
55
|
+
} else {
|
56
|
+
decodeBase64 = decodeBase64WithNewBuffer;
|
57
|
+
}
|
58
|
+
} else {
|
59
|
+
decodeBase64 = decodeBase64WithAtob;
|
60
|
+
}
|
61
|
+
|
62
|
+
function decodeBase64WithBufferFrom(base64) {
|
63
|
+
return Buffer.from(base64, 'base64').toString();
|
64
|
+
}
|
65
|
+
|
66
|
+
function decodeBase64WithNewBuffer(base64) {
|
67
|
+
if (typeof value === 'number') {
|
68
|
+
throw new TypeError('The value to decode must not be of type number.');
|
69
|
+
}
|
70
|
+
return new Buffer(base64, 'base64').toString();
|
71
|
+
}
|
72
|
+
|
73
|
+
function decodeBase64WithAtob(base64) {
|
74
|
+
return decodeURIComponent(escape(atob(base64)));
|
75
|
+
}
|
76
|
+
|
77
|
+
function stripComment(sm) {
|
78
|
+
return sm.split(',').pop();
|
79
|
+
}
|
80
|
+
|
81
|
+
function readFromFileMap(sm, read) {
|
82
|
+
var r = exports.mapFileCommentRegex.exec(sm);
|
83
|
+
// for some odd reason //# .. captures in 1 and /* .. */ in 2
|
84
|
+
var filename = r[1] || r[2];
|
85
|
+
|
86
|
+
try {
|
87
|
+
var sm = read(filename);
|
88
|
+
if (sm != null && typeof sm.catch === 'function') {
|
89
|
+
return sm.catch(throwError);
|
90
|
+
} else {
|
91
|
+
return sm;
|
92
|
+
}
|
93
|
+
} catch (e) {
|
94
|
+
throwError(e);
|
95
|
+
}
|
96
|
+
|
97
|
+
function throwError(e) {
|
98
|
+
throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack);
|
99
|
+
}
|
100
|
+
}
|
101
|
+
|
102
|
+
function Converter (sm, opts) {
|
103
|
+
opts = opts || {};
|
104
|
+
|
105
|
+
if (opts.hasComment) {
|
106
|
+
sm = stripComment(sm);
|
107
|
+
}
|
108
|
+
|
109
|
+
if (opts.encoding === 'base64') {
|
110
|
+
sm = decodeBase64(sm);
|
111
|
+
} else if (opts.encoding === 'uri') {
|
112
|
+
sm = decodeURIComponent(sm);
|
113
|
+
}
|
114
|
+
|
115
|
+
if (opts.isJSON || opts.encoding) {
|
116
|
+
sm = JSON.parse(sm);
|
117
|
+
}
|
118
|
+
|
119
|
+
this.sourcemap = sm;
|
120
|
+
}
|
121
|
+
|
122
|
+
Converter.prototype.toJSON = function (space) {
|
123
|
+
return JSON.stringify(this.sourcemap, null, space);
|
124
|
+
};
|
125
|
+
|
126
|
+
if (typeof Buffer !== 'undefined') {
|
127
|
+
if (typeof Buffer.from === 'function') {
|
128
|
+
Converter.prototype.toBase64 = encodeBase64WithBufferFrom;
|
129
|
+
} else {
|
130
|
+
Converter.prototype.toBase64 = encodeBase64WithNewBuffer;
|
131
|
+
}
|
132
|
+
} else {
|
133
|
+
Converter.prototype.toBase64 = encodeBase64WithBtoa;
|
134
|
+
}
|
135
|
+
|
136
|
+
function encodeBase64WithBufferFrom() {
|
137
|
+
var json = this.toJSON();
|
138
|
+
return Buffer.from(json, 'utf8').toString('base64');
|
139
|
+
}
|
140
|
+
|
141
|
+
function encodeBase64WithNewBuffer() {
|
142
|
+
var json = this.toJSON();
|
143
|
+
if (typeof json === 'number') {
|
144
|
+
throw new TypeError('The json to encode must not be of type number.');
|
145
|
+
}
|
146
|
+
return new Buffer(json, 'utf8').toString('base64');
|
147
|
+
}
|
148
|
+
|
149
|
+
function encodeBase64WithBtoa() {
|
150
|
+
var json = this.toJSON();
|
151
|
+
return btoa(unescape(encodeURIComponent(json)));
|
152
|
+
}
|
153
|
+
|
154
|
+
Converter.prototype.toURI = function () {
|
155
|
+
var json = this.toJSON();
|
156
|
+
return encodeURIComponent(json);
|
157
|
+
};
|
158
|
+
|
159
|
+
Converter.prototype.toComment = function (options) {
|
160
|
+
var encoding, content, data;
|
161
|
+
if (options != null && options.encoding === 'uri') {
|
162
|
+
encoding = '';
|
163
|
+
content = this.toURI();
|
164
|
+
} else {
|
165
|
+
encoding = ';base64';
|
166
|
+
content = this.toBase64();
|
167
|
+
}
|
168
|
+
data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content;
|
169
|
+
return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
|
170
|
+
};
|
171
|
+
|
172
|
+
// returns copy instead of original
|
173
|
+
Converter.prototype.toObject = function () {
|
174
|
+
return JSON.parse(this.toJSON());
|
175
|
+
};
|
176
|
+
|
177
|
+
Converter.prototype.addProperty = function (key, value) {
|
178
|
+
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead');
|
179
|
+
return this.setProperty(key, value);
|
180
|
+
};
|
181
|
+
|
182
|
+
Converter.prototype.setProperty = function (key, value) {
|
183
|
+
this.sourcemap[key] = value;
|
184
|
+
return this;
|
185
|
+
};
|
186
|
+
|
187
|
+
Converter.prototype.getProperty = function (key) {
|
188
|
+
return this.sourcemap[key];
|
189
|
+
};
|
190
|
+
|
191
|
+
exports.fromObject = function (obj) {
|
192
|
+
return new Converter(obj);
|
193
|
+
};
|
194
|
+
|
195
|
+
exports.fromJSON = function (json) {
|
196
|
+
return new Converter(json, { isJSON: true });
|
197
|
+
};
|
198
|
+
|
199
|
+
exports.fromURI = function (uri) {
|
200
|
+
return new Converter(uri, { encoding: 'uri' });
|
201
|
+
};
|
202
|
+
|
203
|
+
exports.fromBase64 = function (base64) {
|
204
|
+
return new Converter(base64, { encoding: 'base64' });
|
205
|
+
};
|
206
|
+
|
207
|
+
exports.fromComment = function (comment) {
|
208
|
+
var m, encoding;
|
209
|
+
comment = comment
|
210
|
+
.replace(/^\/\*/g, '//')
|
211
|
+
.replace(/\*\/$/g, '');
|
212
|
+
m = exports.commentRegex.exec(comment);
|
213
|
+
encoding = m && m[4] || 'uri';
|
214
|
+
return new Converter(comment, { encoding: encoding, hasComment: true });
|
215
|
+
};
|
216
|
+
|
217
|
+
function makeConverter(sm) {
|
218
|
+
return new Converter(sm, { isJSON: true });
|
219
|
+
}
|
220
|
+
|
221
|
+
exports.fromMapFileComment = function (comment, read) {
|
222
|
+
if (typeof read === 'string') {
|
223
|
+
throw new Error(
|
224
|
+
'String directory paths are no longer supported with `fromMapFileComment`\n' +
|
225
|
+
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
|
226
|
+
)
|
227
|
+
}
|
228
|
+
|
229
|
+
var sm = readFromFileMap(comment, read);
|
230
|
+
if (sm != null && typeof sm.then === 'function') {
|
231
|
+
return sm.then(makeConverter);
|
232
|
+
} else {
|
233
|
+
return makeConverter(sm);
|
234
|
+
}
|
235
|
+
};
|
236
|
+
|
237
|
+
// Finds last sourcemap comment in file or returns null if none was found
|
238
|
+
exports.fromSource = function (content) {
|
239
|
+
var m = content.match(exports.commentRegex);
|
240
|
+
return m ? exports.fromComment(m.pop()) : null;
|
241
|
+
};
|
242
|
+
|
243
|
+
// Finds last sourcemap comment in file or returns null if none was found
|
244
|
+
exports.fromMapFileSource = function (content, read) {
|
245
|
+
if (typeof read === 'string') {
|
246
|
+
throw new Error(
|
247
|
+
'String directory paths are no longer supported with `fromMapFileSource`\n' +
|
248
|
+
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
|
249
|
+
)
|
250
|
+
}
|
251
|
+
var m = content.match(exports.mapFileCommentRegex);
|
252
|
+
return m ? exports.fromMapFileComment(m.pop(), read) : null;
|
253
|
+
};
|
254
|
+
|
255
|
+
exports.removeComments = function (src) {
|
256
|
+
return src.replace(exports.commentRegex, '');
|
257
|
+
};
|
258
|
+
|
259
|
+
exports.removeMapFileComments = function (src) {
|
260
|
+
return src.replace(exports.mapFileCommentRegex, '');
|
261
|
+
};
|
262
|
+
|
263
|
+
exports.generateMapFileComment = function (file, options) {
|
264
|
+
var data = 'sourceMappingURL=' + file;
|
265
|
+
return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
|
266
|
+
};
|
267
|
+
} (convertSourceMap$1));
|
268
|
+
|
269
|
+
var branch;
|
270
|
+
var hasRequiredBranch;
|
271
|
+
|
272
|
+
function requireBranch () {
|
273
|
+
if (hasRequiredBranch) return branch;
|
274
|
+
hasRequiredBranch = 1;
|
275
|
+
branch = class CovBranch {
|
276
|
+
constructor (startLine, startCol, endLine, endCol, count) {
|
277
|
+
this.startLine = startLine;
|
278
|
+
this.startCol = startCol;
|
279
|
+
this.endLine = endLine;
|
280
|
+
this.endCol = endCol;
|
281
|
+
this.count = count;
|
282
|
+
}
|
283
|
+
|
284
|
+
toIstanbul () {
|
285
|
+
const location = {
|
286
|
+
start: {
|
287
|
+
line: this.startLine,
|
288
|
+
column: this.startCol
|
289
|
+
},
|
290
|
+
end: {
|
291
|
+
line: this.endLine,
|
292
|
+
column: this.endCol
|
293
|
+
}
|
294
|
+
};
|
295
|
+
return {
|
296
|
+
type: 'branch',
|
297
|
+
line: this.startLine,
|
298
|
+
loc: location,
|
299
|
+
locations: [Object.assign({}, location)]
|
300
|
+
}
|
301
|
+
}
|
302
|
+
};
|
303
|
+
return branch;
|
304
|
+
}
|
305
|
+
|
306
|
+
var _function;
|
307
|
+
var hasRequired_function;
|
308
|
+
|
309
|
+
function require_function () {
|
310
|
+
if (hasRequired_function) return _function;
|
311
|
+
hasRequired_function = 1;
|
312
|
+
_function = class CovFunction {
|
313
|
+
constructor (name, startLine, startCol, endLine, endCol, count) {
|
314
|
+
this.name = name;
|
315
|
+
this.startLine = startLine;
|
316
|
+
this.startCol = startCol;
|
317
|
+
this.endLine = endLine;
|
318
|
+
this.endCol = endCol;
|
319
|
+
this.count = count;
|
320
|
+
}
|
321
|
+
|
322
|
+
toIstanbul () {
|
323
|
+
const loc = {
|
324
|
+
start: {
|
325
|
+
line: this.startLine,
|
326
|
+
column: this.startCol
|
327
|
+
},
|
328
|
+
end: {
|
329
|
+
line: this.endLine,
|
330
|
+
column: this.endCol
|
331
|
+
}
|
332
|
+
};
|
333
|
+
return {
|
334
|
+
name: this.name,
|
335
|
+
decl: loc,
|
336
|
+
loc,
|
337
|
+
line: this.startLine
|
338
|
+
}
|
339
|
+
}
|
340
|
+
};
|
341
|
+
return _function;
|
342
|
+
}
|
343
|
+
|
344
|
+
var line;
|
345
|
+
var hasRequiredLine;
|
346
|
+
|
347
|
+
function requireLine () {
|
348
|
+
if (hasRequiredLine) return line;
|
349
|
+
hasRequiredLine = 1;
|
350
|
+
line = class CovLine {
|
351
|
+
constructor (line, startCol, lineStr) {
|
352
|
+
this.line = line;
|
353
|
+
// note that startCol and endCol are absolute positions
|
354
|
+
// within a file, not relative to the line.
|
355
|
+
this.startCol = startCol;
|
356
|
+
|
357
|
+
// the line length itself does not include the newline characters,
|
358
|
+
// these are however taken into account when enumerating absolute offset.
|
359
|
+
const matchedNewLineChar = lineStr.match(/\r?\n$/u);
|
360
|
+
const newLineLength = matchedNewLineChar ? matchedNewLineChar[0].length : 0;
|
361
|
+
this.endCol = startCol + lineStr.length - newLineLength;
|
362
|
+
|
363
|
+
// we start with all lines having been executed, and work
|
364
|
+
// backwards zeroing out lines based on V8 output.
|
365
|
+
this.count = 1;
|
366
|
+
|
367
|
+
// set by source.js during parsing, if /* c8 ignore next */ is found.
|
368
|
+
this.ignore = false;
|
369
|
+
}
|
370
|
+
|
371
|
+
toIstanbul () {
|
372
|
+
return {
|
373
|
+
start: {
|
374
|
+
line: this.line,
|
375
|
+
column: 0
|
376
|
+
},
|
377
|
+
end: {
|
378
|
+
line: this.line,
|
379
|
+
column: this.endCol - this.startCol
|
380
|
+
}
|
381
|
+
}
|
382
|
+
}
|
383
|
+
};
|
384
|
+
return line;
|
385
|
+
}
|
386
|
+
|
387
|
+
var range = {};
|
388
|
+
|
389
|
+
/**
|
390
|
+
* ...something resembling a binary search, to find the lowest line within the range.
|
391
|
+
* And then you could break as soon as the line is longer than the range...
|
392
|
+
*/
|
393
|
+
|
394
|
+
var hasRequiredRange;
|
395
|
+
|
396
|
+
function requireRange () {
|
397
|
+
if (hasRequiredRange) return range;
|
398
|
+
hasRequiredRange = 1;
|
399
|
+
range.sliceRange = (lines, startCol, endCol, inclusive = false) => {
|
400
|
+
let start = 0;
|
401
|
+
let end = lines.length;
|
402
|
+
|
403
|
+
if (inclusive) {
|
404
|
+
// I consider this a temporary solution until I find an alternaive way to fix the "off by one issue"
|
405
|
+
--startCol;
|
406
|
+
}
|
407
|
+
|
408
|
+
while (start < end) {
|
409
|
+
let mid = (start + end) >> 1;
|
410
|
+
if (startCol >= lines[mid].endCol) {
|
411
|
+
start = mid + 1;
|
412
|
+
} else if (endCol < lines[mid].startCol) {
|
413
|
+
end = mid - 1;
|
414
|
+
} else {
|
415
|
+
end = mid;
|
416
|
+
while (mid >= 0 && startCol < lines[mid].endCol && endCol >= lines[mid].startCol) {
|
417
|
+
--mid;
|
418
|
+
}
|
419
|
+
start = mid + 1;
|
420
|
+
break
|
421
|
+
}
|
422
|
+
}
|
423
|
+
|
424
|
+
while (end < lines.length && startCol < lines[end].endCol && endCol >= lines[end].startCol) {
|
425
|
+
++end;
|
426
|
+
}
|
427
|
+
|
428
|
+
return lines.slice(start, end)
|
429
|
+
};
|
430
|
+
return range;
|
431
|
+
}
|
432
|
+
|
433
|
+
var traceMapping_umd = {exports: {}};
|
434
|
+
|
435
|
+
var sourcemapCodec_umd = {exports: {}};
|
436
|
+
|
437
|
+
var hasRequiredSourcemapCodec_umd;
|
438
|
+
|
439
|
+
function requireSourcemapCodec_umd () {
|
440
|
+
if (hasRequiredSourcemapCodec_umd) return sourcemapCodec_umd.exports;
|
441
|
+
hasRequiredSourcemapCodec_umd = 1;
|
442
|
+
(function (module, exports) {
|
443
|
+
(function (global, factory) {
|
444
|
+
factory(exports) ;
|
445
|
+
})(commonjsGlobal, (function (exports) {
|
446
|
+
const comma = ','.charCodeAt(0);
|
447
|
+
const semicolon = ';'.charCodeAt(0);
|
448
|
+
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
449
|
+
const intToChar = new Uint8Array(64); // 64 possible chars.
|
450
|
+
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
451
|
+
for (let i = 0; i < chars.length; i++) {
|
452
|
+
const c = chars.charCodeAt(i);
|
453
|
+
intToChar[i] = c;
|
454
|
+
charToInt[c] = i;
|
455
|
+
}
|
456
|
+
// Provide a fallback for older environments.
|
457
|
+
const td = typeof TextDecoder !== 'undefined'
|
458
|
+
? /* #__PURE__ */ new TextDecoder()
|
459
|
+
: typeof Buffer !== 'undefined'
|
460
|
+
? {
|
461
|
+
decode(buf) {
|
462
|
+
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
463
|
+
return out.toString();
|
464
|
+
},
|
465
|
+
}
|
466
|
+
: {
|
467
|
+
decode(buf) {
|
468
|
+
let out = '';
|
469
|
+
for (let i = 0; i < buf.length; i++) {
|
470
|
+
out += String.fromCharCode(buf[i]);
|
471
|
+
}
|
472
|
+
return out;
|
473
|
+
},
|
474
|
+
};
|
475
|
+
function decode(mappings) {
|
476
|
+
const state = new Int32Array(5);
|
477
|
+
const decoded = [];
|
478
|
+
let index = 0;
|
479
|
+
do {
|
480
|
+
const semi = indexOf(mappings, index);
|
481
|
+
const line = [];
|
482
|
+
let sorted = true;
|
483
|
+
let lastCol = 0;
|
484
|
+
state[0] = 0;
|
485
|
+
for (let i = index; i < semi; i++) {
|
486
|
+
let seg;
|
487
|
+
i = decodeInteger(mappings, i, state, 0); // genColumn
|
488
|
+
const col = state[0];
|
489
|
+
if (col < lastCol)
|
490
|
+
sorted = false;
|
491
|
+
lastCol = col;
|
492
|
+
if (hasMoreVlq(mappings, i, semi)) {
|
493
|
+
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
494
|
+
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
495
|
+
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
496
|
+
if (hasMoreVlq(mappings, i, semi)) {
|
497
|
+
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
498
|
+
seg = [col, state[1], state[2], state[3], state[4]];
|
499
|
+
}
|
500
|
+
else {
|
501
|
+
seg = [col, state[1], state[2], state[3]];
|
502
|
+
}
|
503
|
+
}
|
504
|
+
else {
|
505
|
+
seg = [col];
|
506
|
+
}
|
507
|
+
line.push(seg);
|
508
|
+
}
|
509
|
+
if (!sorted)
|
510
|
+
sort(line);
|
511
|
+
decoded.push(line);
|
512
|
+
index = semi + 1;
|
513
|
+
} while (index <= mappings.length);
|
514
|
+
return decoded;
|
515
|
+
}
|
516
|
+
function indexOf(mappings, index) {
|
517
|
+
const idx = mappings.indexOf(';', index);
|
518
|
+
return idx === -1 ? mappings.length : idx;
|
519
|
+
}
|
520
|
+
function decodeInteger(mappings, pos, state, j) {
|
521
|
+
let value = 0;
|
522
|
+
let shift = 0;
|
523
|
+
let integer = 0;
|
524
|
+
do {
|
525
|
+
const c = mappings.charCodeAt(pos++);
|
526
|
+
integer = charToInt[c];
|
527
|
+
value |= (integer & 31) << shift;
|
528
|
+
shift += 5;
|
529
|
+
} while (integer & 32);
|
530
|
+
const shouldNegate = value & 1;
|
531
|
+
value >>>= 1;
|
532
|
+
if (shouldNegate) {
|
533
|
+
value = -0x80000000 | -value;
|
534
|
+
}
|
535
|
+
state[j] += value;
|
536
|
+
return pos;
|
537
|
+
}
|
538
|
+
function hasMoreVlq(mappings, i, length) {
|
539
|
+
if (i >= length)
|
540
|
+
return false;
|
541
|
+
return mappings.charCodeAt(i) !== comma;
|
542
|
+
}
|
543
|
+
function sort(line) {
|
544
|
+
line.sort(sortComparator);
|
545
|
+
}
|
546
|
+
function sortComparator(a, b) {
|
547
|
+
return a[0] - b[0];
|
548
|
+
}
|
549
|
+
function encode(decoded) {
|
550
|
+
const state = new Int32Array(5);
|
551
|
+
const bufLength = 1024 * 16;
|
552
|
+
const subLength = bufLength - 36;
|
553
|
+
const buf = new Uint8Array(bufLength);
|
554
|
+
const sub = buf.subarray(0, subLength);
|
555
|
+
let pos = 0;
|
556
|
+
let out = '';
|
557
|
+
for (let i = 0; i < decoded.length; i++) {
|
558
|
+
const line = decoded[i];
|
559
|
+
if (i > 0) {
|
560
|
+
if (pos === bufLength) {
|
561
|
+
out += td.decode(buf);
|
562
|
+
pos = 0;
|
563
|
+
}
|
564
|
+
buf[pos++] = semicolon;
|
565
|
+
}
|
566
|
+
if (line.length === 0)
|
567
|
+
continue;
|
568
|
+
state[0] = 0;
|
569
|
+
for (let j = 0; j < line.length; j++) {
|
570
|
+
const segment = line[j];
|
571
|
+
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
572
|
+
// may push a comma.
|
573
|
+
if (pos > subLength) {
|
574
|
+
out += td.decode(sub);
|
575
|
+
buf.copyWithin(0, subLength, pos);
|
576
|
+
pos -= subLength;
|
577
|
+
}
|
578
|
+
if (j > 0)
|
579
|
+
buf[pos++] = comma;
|
580
|
+
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
581
|
+
if (segment.length === 1)
|
582
|
+
continue;
|
583
|
+
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
584
|
+
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
585
|
+
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
586
|
+
if (segment.length === 4)
|
587
|
+
continue;
|
588
|
+
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
589
|
+
}
|
590
|
+
}
|
591
|
+
return out + td.decode(buf.subarray(0, pos));
|
592
|
+
}
|
593
|
+
function encodeInteger(buf, pos, state, segment, j) {
|
594
|
+
const next = segment[j];
|
595
|
+
let num = next - state[j];
|
596
|
+
state[j] = next;
|
597
|
+
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
598
|
+
do {
|
599
|
+
let clamped = num & 0b011111;
|
600
|
+
num >>>= 5;
|
601
|
+
if (num > 0)
|
602
|
+
clamped |= 0b100000;
|
603
|
+
buf[pos++] = intToChar[clamped];
|
604
|
+
} while (num > 0);
|
605
|
+
return pos;
|
606
|
+
}
|
607
|
+
|
608
|
+
exports.decode = decode;
|
609
|
+
exports.encode = encode;
|
610
|
+
|
611
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
612
|
+
|
613
|
+
}));
|
614
|
+
|
615
|
+
} (sourcemapCodec_umd, sourcemapCodec_umd.exports));
|
616
|
+
return sourcemapCodec_umd.exports;
|
617
|
+
}
|
618
|
+
|
619
|
+
var resolveUri_umd = {exports: {}};
|
620
|
+
|
621
|
+
var hasRequiredResolveUri_umd;
|
622
|
+
|
623
|
+
function requireResolveUri_umd () {
|
624
|
+
if (hasRequiredResolveUri_umd) return resolveUri_umd.exports;
|
625
|
+
hasRequiredResolveUri_umd = 1;
|
626
|
+
(function (module, exports) {
|
627
|
+
(function (global, factory) {
|
628
|
+
module.exports = factory() ;
|
629
|
+
})(commonjsGlobal, (function () {
|
630
|
+
// Matches the scheme of a URL, eg "http://"
|
631
|
+
const schemeRegex = /^[\w+.-]+:\/\//;
|
632
|
+
/**
|
633
|
+
* Matches the parts of a URL:
|
634
|
+
* 1. Scheme, including ":", guaranteed.
|
635
|
+
* 2. User/password, including "@", optional.
|
636
|
+
* 3. Host, guaranteed.
|
637
|
+
* 4. Port, including ":", optional.
|
638
|
+
* 5. Path, including "/", optional.
|
639
|
+
* 6. Query, including "?", optional.
|
640
|
+
* 7. Hash, including "#", optional.
|
641
|
+
*/
|
642
|
+
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
643
|
+
/**
|
644
|
+
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
645
|
+
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
646
|
+
*
|
647
|
+
* 1. Host, optional.
|
648
|
+
* 2. Path, which may include "/", guaranteed.
|
649
|
+
* 3. Query, including "?", optional.
|
650
|
+
* 4. Hash, including "#", optional.
|
651
|
+
*/
|
652
|
+
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
653
|
+
var UrlType;
|
654
|
+
(function (UrlType) {
|
655
|
+
UrlType[UrlType["Empty"] = 1] = "Empty";
|
656
|
+
UrlType[UrlType["Hash"] = 2] = "Hash";
|
657
|
+
UrlType[UrlType["Query"] = 3] = "Query";
|
658
|
+
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
659
|
+
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
660
|
+
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
661
|
+
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
662
|
+
})(UrlType || (UrlType = {}));
|
663
|
+
function isAbsoluteUrl(input) {
|
664
|
+
return schemeRegex.test(input);
|
665
|
+
}
|
666
|
+
function isSchemeRelativeUrl(input) {
|
667
|
+
return input.startsWith('//');
|
668
|
+
}
|
669
|
+
function isAbsolutePath(input) {
|
670
|
+
return input.startsWith('/');
|
671
|
+
}
|
672
|
+
function isFileUrl(input) {
|
673
|
+
return input.startsWith('file:');
|
674
|
+
}
|
675
|
+
function isRelative(input) {
|
676
|
+
return /^[.?#]/.test(input);
|
677
|
+
}
|
678
|
+
function parseAbsoluteUrl(input) {
|
679
|
+
const match = urlRegex.exec(input);
|
680
|
+
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
681
|
+
}
|
682
|
+
function parseFileUrl(input) {
|
683
|
+
const match = fileRegex.exec(input);
|
684
|
+
const path = match[2];
|
685
|
+
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
686
|
+
}
|
687
|
+
function makeUrl(scheme, user, host, port, path, query, hash) {
|
688
|
+
return {
|
689
|
+
scheme,
|
690
|
+
user,
|
691
|
+
host,
|
692
|
+
port,
|
693
|
+
path,
|
694
|
+
query,
|
695
|
+
hash,
|
696
|
+
type: UrlType.Absolute,
|
697
|
+
};
|
698
|
+
}
|
699
|
+
function parseUrl(input) {
|
700
|
+
if (isSchemeRelativeUrl(input)) {
|
701
|
+
const url = parseAbsoluteUrl('http:' + input);
|
702
|
+
url.scheme = '';
|
703
|
+
url.type = UrlType.SchemeRelative;
|
704
|
+
return url;
|
705
|
+
}
|
706
|
+
if (isAbsolutePath(input)) {
|
707
|
+
const url = parseAbsoluteUrl('http://foo.com' + input);
|
708
|
+
url.scheme = '';
|
709
|
+
url.host = '';
|
710
|
+
url.type = UrlType.AbsolutePath;
|
711
|
+
return url;
|
712
|
+
}
|
713
|
+
if (isFileUrl(input))
|
714
|
+
return parseFileUrl(input);
|
715
|
+
if (isAbsoluteUrl(input))
|
716
|
+
return parseAbsoluteUrl(input);
|
717
|
+
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
718
|
+
url.scheme = '';
|
719
|
+
url.host = '';
|
720
|
+
url.type = input
|
721
|
+
? input.startsWith('?')
|
722
|
+
? UrlType.Query
|
723
|
+
: input.startsWith('#')
|
724
|
+
? UrlType.Hash
|
725
|
+
: UrlType.RelativePath
|
726
|
+
: UrlType.Empty;
|
727
|
+
return url;
|
728
|
+
}
|
729
|
+
function stripPathFilename(path) {
|
730
|
+
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
731
|
+
// paths. It's not a file, so we can't strip it.
|
732
|
+
if (path.endsWith('/..'))
|
733
|
+
return path;
|
734
|
+
const index = path.lastIndexOf('/');
|
735
|
+
return path.slice(0, index + 1);
|
736
|
+
}
|
737
|
+
function mergePaths(url, base) {
|
738
|
+
normalizePath(base, base.type);
|
739
|
+
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
740
|
+
// path).
|
741
|
+
if (url.path === '/') {
|
742
|
+
url.path = base.path;
|
743
|
+
}
|
744
|
+
else {
|
745
|
+
// Resolution happens relative to the base path's directory, not the file.
|
746
|
+
url.path = stripPathFilename(base.path) + url.path;
|
747
|
+
}
|
748
|
+
}
|
749
|
+
/**
|
750
|
+
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
751
|
+
* "foo/.". We need to normalize to a standard representation.
|
752
|
+
*/
|
753
|
+
function normalizePath(url, type) {
|
754
|
+
const rel = type <= UrlType.RelativePath;
|
755
|
+
const pieces = url.path.split('/');
|
756
|
+
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
757
|
+
// pieces[0] is an empty string.
|
758
|
+
let pointer = 1;
|
759
|
+
// Positive is the number of real directories we've output, used for popping a parent directory.
|
760
|
+
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
761
|
+
let positive = 0;
|
762
|
+
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
763
|
+
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
764
|
+
// real directory, we won't need to append, unless the other conditions happen again.
|
765
|
+
let addTrailingSlash = false;
|
766
|
+
for (let i = 1; i < pieces.length; i++) {
|
767
|
+
const piece = pieces[i];
|
768
|
+
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
769
|
+
if (!piece) {
|
770
|
+
addTrailingSlash = true;
|
771
|
+
continue;
|
772
|
+
}
|
773
|
+
// If we encounter a real directory, then we don't need to append anymore.
|
774
|
+
addTrailingSlash = false;
|
775
|
+
// A current directory, which we can always drop.
|
776
|
+
if (piece === '.')
|
777
|
+
continue;
|
778
|
+
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
779
|
+
// have an excess of parents, and we'll need to keep the "..".
|
780
|
+
if (piece === '..') {
|
781
|
+
if (positive) {
|
782
|
+
addTrailingSlash = true;
|
783
|
+
positive--;
|
784
|
+
pointer--;
|
785
|
+
}
|
786
|
+
else if (rel) {
|
787
|
+
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
788
|
+
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
789
|
+
pieces[pointer++] = piece;
|
790
|
+
}
|
791
|
+
continue;
|
792
|
+
}
|
793
|
+
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
794
|
+
// any popped or dropped directories.
|
795
|
+
pieces[pointer++] = piece;
|
796
|
+
positive++;
|
797
|
+
}
|
798
|
+
let path = '';
|
799
|
+
for (let i = 1; i < pointer; i++) {
|
800
|
+
path += '/' + pieces[i];
|
801
|
+
}
|
802
|
+
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
803
|
+
path += '/';
|
804
|
+
}
|
805
|
+
url.path = path;
|
806
|
+
}
|
807
|
+
/**
|
808
|
+
* Attempts to resolve `input` URL/path relative to `base`.
|
809
|
+
*/
|
810
|
+
function resolve(input, base) {
|
811
|
+
if (!input && !base)
|
812
|
+
return '';
|
813
|
+
const url = parseUrl(input);
|
814
|
+
let inputType = url.type;
|
815
|
+
if (base && inputType !== UrlType.Absolute) {
|
816
|
+
const baseUrl = parseUrl(base);
|
817
|
+
const baseType = baseUrl.type;
|
818
|
+
switch (inputType) {
|
819
|
+
case UrlType.Empty:
|
820
|
+
url.hash = baseUrl.hash;
|
821
|
+
// fall through
|
822
|
+
case UrlType.Hash:
|
823
|
+
url.query = baseUrl.query;
|
824
|
+
// fall through
|
825
|
+
case UrlType.Query:
|
826
|
+
case UrlType.RelativePath:
|
827
|
+
mergePaths(url, baseUrl);
|
828
|
+
// fall through
|
829
|
+
case UrlType.AbsolutePath:
|
830
|
+
// The host, user, and port are joined, you can't copy one without the others.
|
831
|
+
url.user = baseUrl.user;
|
832
|
+
url.host = baseUrl.host;
|
833
|
+
url.port = baseUrl.port;
|
834
|
+
// fall through
|
835
|
+
case UrlType.SchemeRelative:
|
836
|
+
// The input doesn't have a schema at least, so we need to copy at least that over.
|
837
|
+
url.scheme = baseUrl.scheme;
|
838
|
+
}
|
839
|
+
if (baseType > inputType)
|
840
|
+
inputType = baseType;
|
841
|
+
}
|
842
|
+
normalizePath(url, inputType);
|
843
|
+
const queryHash = url.query + url.hash;
|
844
|
+
switch (inputType) {
|
845
|
+
// This is impossible, because of the empty checks at the start of the function.
|
846
|
+
// case UrlType.Empty:
|
847
|
+
case UrlType.Hash:
|
848
|
+
case UrlType.Query:
|
849
|
+
return queryHash;
|
850
|
+
case UrlType.RelativePath: {
|
851
|
+
// The first char is always a "/", and we need it to be relative.
|
852
|
+
const path = url.path.slice(1);
|
853
|
+
if (!path)
|
854
|
+
return queryHash || '.';
|
855
|
+
if (isRelative(base || input) && !isRelative(path)) {
|
856
|
+
// If base started with a leading ".", or there is no base and input started with a ".",
|
857
|
+
// then we need to ensure that the relative path starts with a ".". We don't know if
|
858
|
+
// relative starts with a "..", though, so check before prepending.
|
859
|
+
return './' + path + queryHash;
|
860
|
+
}
|
861
|
+
return path + queryHash;
|
862
|
+
}
|
863
|
+
case UrlType.AbsolutePath:
|
864
|
+
return url.path + queryHash;
|
865
|
+
default:
|
866
|
+
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
867
|
+
}
|
868
|
+
}
|
869
|
+
|
870
|
+
return resolve;
|
871
|
+
|
872
|
+
}));
|
873
|
+
|
874
|
+
} (resolveUri_umd));
|
875
|
+
return resolveUri_umd.exports;
|
876
|
+
}
|
877
|
+
|
878
|
+
var hasRequiredTraceMapping_umd;
|
879
|
+
|
880
|
+
function requireTraceMapping_umd () {
|
881
|
+
if (hasRequiredTraceMapping_umd) return traceMapping_umd.exports;
|
882
|
+
hasRequiredTraceMapping_umd = 1;
|
883
|
+
(function (module, exports) {
|
884
|
+
(function (global, factory) {
|
885
|
+
factory(exports, requireSourcemapCodec_umd(), requireResolveUri_umd()) ;
|
886
|
+
})(commonjsGlobal, (function (exports, sourcemapCodec, resolveUri) {
|
887
|
+
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
888
|
+
|
889
|
+
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
890
|
+
|
891
|
+
function resolve(input, base) {
|
892
|
+
// The base is always treated as a directory, if it's not empty.
|
893
|
+
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
894
|
+
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
895
|
+
if (base && !base.endsWith('/'))
|
896
|
+
base += '/';
|
897
|
+
return resolveUri__default["default"](input, base);
|
898
|
+
}
|
899
|
+
|
900
|
+
/**
|
901
|
+
* Removes everything after the last "/", but leaves the slash.
|
902
|
+
*/
|
903
|
+
function stripFilename(path) {
|
904
|
+
if (!path)
|
905
|
+
return '';
|
906
|
+
const index = path.lastIndexOf('/');
|
907
|
+
return path.slice(0, index + 1);
|
908
|
+
}
|
909
|
+
|
910
|
+
const COLUMN = 0;
|
911
|
+
const SOURCES_INDEX = 1;
|
912
|
+
const SOURCE_LINE = 2;
|
913
|
+
const SOURCE_COLUMN = 3;
|
914
|
+
const NAMES_INDEX = 4;
|
915
|
+
const REV_GENERATED_LINE = 1;
|
916
|
+
const REV_GENERATED_COLUMN = 2;
|
917
|
+
|
918
|
+
function maybeSort(mappings, owned) {
|
919
|
+
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
920
|
+
if (unsortedIndex === mappings.length)
|
921
|
+
return mappings;
|
922
|
+
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
923
|
+
// not, we do not want to modify the consumer's input array.
|
924
|
+
if (!owned)
|
925
|
+
mappings = mappings.slice();
|
926
|
+
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
927
|
+
mappings[i] = sortSegments(mappings[i], owned);
|
928
|
+
}
|
929
|
+
return mappings;
|
930
|
+
}
|
931
|
+
function nextUnsortedSegmentLine(mappings, start) {
|
932
|
+
for (let i = start; i < mappings.length; i++) {
|
933
|
+
if (!isSorted(mappings[i]))
|
934
|
+
return i;
|
935
|
+
}
|
936
|
+
return mappings.length;
|
937
|
+
}
|
938
|
+
function isSorted(line) {
|
939
|
+
for (let j = 1; j < line.length; j++) {
|
940
|
+
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
941
|
+
return false;
|
942
|
+
}
|
943
|
+
}
|
944
|
+
return true;
|
945
|
+
}
|
946
|
+
function sortSegments(line, owned) {
|
947
|
+
if (!owned)
|
948
|
+
line = line.slice();
|
949
|
+
return line.sort(sortComparator);
|
950
|
+
}
|
951
|
+
function sortComparator(a, b) {
|
952
|
+
return a[COLUMN] - b[COLUMN];
|
953
|
+
}
|
954
|
+
|
955
|
+
let found = false;
|
956
|
+
/**
|
957
|
+
* A binary search implementation that returns the index if a match is found.
|
958
|
+
* If no match is found, then the left-index (the index associated with the item that comes just
|
959
|
+
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
960
|
+
* the next index:
|
961
|
+
*
|
962
|
+
* ```js
|
963
|
+
* const array = [1, 3];
|
964
|
+
* const needle = 2;
|
965
|
+
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
966
|
+
*
|
967
|
+
* assert.equal(index, 0);
|
968
|
+
* array.splice(index + 1, 0, needle);
|
969
|
+
* assert.deepEqual(array, [1, 2, 3]);
|
970
|
+
* ```
|
971
|
+
*/
|
972
|
+
function binarySearch(haystack, needle, low, high) {
|
973
|
+
while (low <= high) {
|
974
|
+
const mid = low + ((high - low) >> 1);
|
975
|
+
const cmp = haystack[mid][COLUMN] - needle;
|
976
|
+
if (cmp === 0) {
|
977
|
+
found = true;
|
978
|
+
return mid;
|
979
|
+
}
|
980
|
+
if (cmp < 0) {
|
981
|
+
low = mid + 1;
|
982
|
+
}
|
983
|
+
else {
|
984
|
+
high = mid - 1;
|
985
|
+
}
|
986
|
+
}
|
987
|
+
found = false;
|
988
|
+
return low - 1;
|
989
|
+
}
|
990
|
+
function upperBound(haystack, needle, index) {
|
991
|
+
for (let i = index + 1; i < haystack.length; index = i++) {
|
992
|
+
if (haystack[i][COLUMN] !== needle)
|
993
|
+
break;
|
994
|
+
}
|
995
|
+
return index;
|
996
|
+
}
|
997
|
+
function lowerBound(haystack, needle, index) {
|
998
|
+
for (let i = index - 1; i >= 0; index = i--) {
|
999
|
+
if (haystack[i][COLUMN] !== needle)
|
1000
|
+
break;
|
1001
|
+
}
|
1002
|
+
return index;
|
1003
|
+
}
|
1004
|
+
function memoizedState() {
|
1005
|
+
return {
|
1006
|
+
lastKey: -1,
|
1007
|
+
lastNeedle: -1,
|
1008
|
+
lastIndex: -1,
|
1009
|
+
};
|
1010
|
+
}
|
1011
|
+
/**
|
1012
|
+
* This overly complicated beast is just to record the last tested line/column and the resulting
|
1013
|
+
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
1014
|
+
*/
|
1015
|
+
function memoizedBinarySearch(haystack, needle, state, key) {
|
1016
|
+
const { lastKey, lastNeedle, lastIndex } = state;
|
1017
|
+
let low = 0;
|
1018
|
+
let high = haystack.length - 1;
|
1019
|
+
if (key === lastKey) {
|
1020
|
+
if (needle === lastNeedle) {
|
1021
|
+
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
1022
|
+
return lastIndex;
|
1023
|
+
}
|
1024
|
+
if (needle >= lastNeedle) {
|
1025
|
+
// lastIndex may be -1 if the previous needle was not found.
|
1026
|
+
low = lastIndex === -1 ? 0 : lastIndex;
|
1027
|
+
}
|
1028
|
+
else {
|
1029
|
+
high = lastIndex;
|
1030
|
+
}
|
1031
|
+
}
|
1032
|
+
state.lastKey = key;
|
1033
|
+
state.lastNeedle = needle;
|
1034
|
+
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
1035
|
+
}
|
1036
|
+
|
1037
|
+
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
1038
|
+
// of generated line/column.
|
1039
|
+
function buildBySources(decoded, memos) {
|
1040
|
+
const sources = memos.map(buildNullArray);
|
1041
|
+
for (let i = 0; i < decoded.length; i++) {
|
1042
|
+
const line = decoded[i];
|
1043
|
+
for (let j = 0; j < line.length; j++) {
|
1044
|
+
const seg = line[j];
|
1045
|
+
if (seg.length === 1)
|
1046
|
+
continue;
|
1047
|
+
const sourceIndex = seg[SOURCES_INDEX];
|
1048
|
+
const sourceLine = seg[SOURCE_LINE];
|
1049
|
+
const sourceColumn = seg[SOURCE_COLUMN];
|
1050
|
+
const originalSource = sources[sourceIndex];
|
1051
|
+
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
1052
|
+
const memo = memos[sourceIndex];
|
1053
|
+
// The binary search either found a match, or it found the left-index just before where the
|
1054
|
+
// segment should go. Either way, we want to insert after that. And there may be multiple
|
1055
|
+
// generated segments associated with an original location, so there may need to move several
|
1056
|
+
// indexes before we find where we need to insert.
|
1057
|
+
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
1058
|
+
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
1059
|
+
}
|
1060
|
+
}
|
1061
|
+
return sources;
|
1062
|
+
}
|
1063
|
+
function insert(array, index, value) {
|
1064
|
+
for (let i = array.length; i > index; i--) {
|
1065
|
+
array[i] = array[i - 1];
|
1066
|
+
}
|
1067
|
+
array[index] = value;
|
1068
|
+
}
|
1069
|
+
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
1070
|
+
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
1071
|
+
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
1072
|
+
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
1073
|
+
// order when iterating with for-in.
|
1074
|
+
function buildNullArray() {
|
1075
|
+
return { __proto__: null };
|
1076
|
+
}
|
1077
|
+
|
1078
|
+
const AnyMap = function (map, mapUrl) {
|
1079
|
+
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
1080
|
+
if (!('sections' in parsed))
|
1081
|
+
return new TraceMap(parsed, mapUrl);
|
1082
|
+
const mappings = [];
|
1083
|
+
const sources = [];
|
1084
|
+
const sourcesContent = [];
|
1085
|
+
const names = [];
|
1086
|
+
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
|
1087
|
+
const joined = {
|
1088
|
+
version: 3,
|
1089
|
+
file: parsed.file,
|
1090
|
+
names,
|
1091
|
+
sources,
|
1092
|
+
sourcesContent,
|
1093
|
+
mappings,
|
1094
|
+
};
|
1095
|
+
return exports.presortedDecodedMap(joined);
|
1096
|
+
};
|
1097
|
+
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
1098
|
+
const { sections } = input;
|
1099
|
+
for (let i = 0; i < sections.length; i++) {
|
1100
|
+
const { map, offset } = sections[i];
|
1101
|
+
let sl = stopLine;
|
1102
|
+
let sc = stopColumn;
|
1103
|
+
if (i + 1 < sections.length) {
|
1104
|
+
const nextOffset = sections[i + 1].offset;
|
1105
|
+
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
1106
|
+
if (sl === stopLine) {
|
1107
|
+
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
1108
|
+
}
|
1109
|
+
else if (sl < stopLine) {
|
1110
|
+
sc = columnOffset + nextOffset.column;
|
1111
|
+
}
|
1112
|
+
}
|
1113
|
+
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
1114
|
+
}
|
1115
|
+
}
|
1116
|
+
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
1117
|
+
if ('sections' in input)
|
1118
|
+
return recurse(...arguments);
|
1119
|
+
const map = new TraceMap(input, mapUrl);
|
1120
|
+
const sourcesOffset = sources.length;
|
1121
|
+
const namesOffset = names.length;
|
1122
|
+
const decoded = exports.decodedMappings(map);
|
1123
|
+
const { resolvedSources, sourcesContent: contents } = map;
|
1124
|
+
append(sources, resolvedSources);
|
1125
|
+
append(names, map.names);
|
1126
|
+
if (contents)
|
1127
|
+
append(sourcesContent, contents);
|
1128
|
+
else
|
1129
|
+
for (let i = 0; i < resolvedSources.length; i++)
|
1130
|
+
sourcesContent.push(null);
|
1131
|
+
for (let i = 0; i < decoded.length; i++) {
|
1132
|
+
const lineI = lineOffset + i;
|
1133
|
+
// We can only add so many lines before we step into the range that the next section's map
|
1134
|
+
// controls. When we get to the last line, then we'll start checking the segments to see if
|
1135
|
+
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
1136
|
+
// still need to check that we don't overstep lines, too.
|
1137
|
+
if (lineI > stopLine)
|
1138
|
+
return;
|
1139
|
+
// The out line may already exist in mappings (if we're continuing the line started by a
|
1140
|
+
// previous section). Or, we may have jumped ahead several lines to start this section.
|
1141
|
+
const out = getLine(mappings, lineI);
|
1142
|
+
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
1143
|
+
// map can be multiple lines), it doesn't.
|
1144
|
+
const cOffset = i === 0 ? columnOffset : 0;
|
1145
|
+
const line = decoded[i];
|
1146
|
+
for (let j = 0; j < line.length; j++) {
|
1147
|
+
const seg = line[j];
|
1148
|
+
const column = cOffset + seg[COLUMN];
|
1149
|
+
// If this segment steps into the column range that the next section's map controls, we need
|
1150
|
+
// to stop early.
|
1151
|
+
if (lineI === stopLine && column >= stopColumn)
|
1152
|
+
return;
|
1153
|
+
if (seg.length === 1) {
|
1154
|
+
out.push([column]);
|
1155
|
+
continue;
|
1156
|
+
}
|
1157
|
+
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
1158
|
+
const sourceLine = seg[SOURCE_LINE];
|
1159
|
+
const sourceColumn = seg[SOURCE_COLUMN];
|
1160
|
+
out.push(seg.length === 4
|
1161
|
+
? [column, sourcesIndex, sourceLine, sourceColumn]
|
1162
|
+
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
1163
|
+
}
|
1164
|
+
}
|
1165
|
+
}
|
1166
|
+
function append(arr, other) {
|
1167
|
+
for (let i = 0; i < other.length; i++)
|
1168
|
+
arr.push(other[i]);
|
1169
|
+
}
|
1170
|
+
function getLine(arr, index) {
|
1171
|
+
for (let i = arr.length; i <= index; i++)
|
1172
|
+
arr[i] = [];
|
1173
|
+
return arr[index];
|
1174
|
+
}
|
1175
|
+
|
1176
|
+
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
1177
|
+
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
1178
|
+
const LEAST_UPPER_BOUND = -1;
|
1179
|
+
const GREATEST_LOWER_BOUND = 1;
|
1180
|
+
/**
|
1181
|
+
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
1182
|
+
*/
|
1183
|
+
exports.encodedMappings = void 0;
|
1184
|
+
/**
|
1185
|
+
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
1186
|
+
*/
|
1187
|
+
exports.decodedMappings = void 0;
|
1188
|
+
/**
|
1189
|
+
* A low-level API to find the segment associated with a generated line/column (think, from a
|
1190
|
+
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
1191
|
+
*/
|
1192
|
+
exports.traceSegment = void 0;
|
1193
|
+
/**
|
1194
|
+
* A higher-level API to find the source/line/column associated with a generated line/column
|
1195
|
+
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
1196
|
+
* `source-map` library.
|
1197
|
+
*/
|
1198
|
+
exports.originalPositionFor = void 0;
|
1199
|
+
/**
|
1200
|
+
* Finds the generated line/column position of the provided source/line/column source position.
|
1201
|
+
*/
|
1202
|
+
exports.generatedPositionFor = void 0;
|
1203
|
+
/**
|
1204
|
+
* Finds all generated line/column positions of the provided source/line/column source position.
|
1205
|
+
*/
|
1206
|
+
exports.allGeneratedPositionsFor = void 0;
|
1207
|
+
/**
|
1208
|
+
* Iterates each mapping in generated position order.
|
1209
|
+
*/
|
1210
|
+
exports.eachMapping = void 0;
|
1211
|
+
/**
|
1212
|
+
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
1213
|
+
*/
|
1214
|
+
exports.sourceContentFor = void 0;
|
1215
|
+
/**
|
1216
|
+
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
1217
|
+
* maps.
|
1218
|
+
*/
|
1219
|
+
exports.presortedDecodedMap = void 0;
|
1220
|
+
/**
|
1221
|
+
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
1222
|
+
* a sourcemap, or to JSON.stringify.
|
1223
|
+
*/
|
1224
|
+
exports.decodedMap = void 0;
|
1225
|
+
/**
|
1226
|
+
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
1227
|
+
* a sourcemap, or to JSON.stringify.
|
1228
|
+
*/
|
1229
|
+
exports.encodedMap = void 0;
|
1230
|
+
class TraceMap {
|
1231
|
+
constructor(map, mapUrl) {
|
1232
|
+
const isString = typeof map === 'string';
|
1233
|
+
if (!isString && map._decodedMemo)
|
1234
|
+
return map;
|
1235
|
+
const parsed = (isString ? JSON.parse(map) : map);
|
1236
|
+
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
1237
|
+
this.version = version;
|
1238
|
+
this.file = file;
|
1239
|
+
this.names = names || [];
|
1240
|
+
this.sourceRoot = sourceRoot;
|
1241
|
+
this.sources = sources;
|
1242
|
+
this.sourcesContent = sourcesContent;
|
1243
|
+
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
1244
|
+
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
1245
|
+
const { mappings } = parsed;
|
1246
|
+
if (typeof mappings === 'string') {
|
1247
|
+
this._encoded = mappings;
|
1248
|
+
this._decoded = undefined;
|
1249
|
+
}
|
1250
|
+
else {
|
1251
|
+
this._encoded = undefined;
|
1252
|
+
this._decoded = maybeSort(mappings, isString);
|
1253
|
+
}
|
1254
|
+
this._decodedMemo = memoizedState();
|
1255
|
+
this._bySources = undefined;
|
1256
|
+
this._bySourceMemos = undefined;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
(() => {
|
1260
|
+
exports.encodedMappings = (map) => {
|
1261
|
+
var _a;
|
1262
|
+
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
1263
|
+
};
|
1264
|
+
exports.decodedMappings = (map) => {
|
1265
|
+
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
1266
|
+
};
|
1267
|
+
exports.traceSegment = (map, line, column) => {
|
1268
|
+
const decoded = exports.decodedMappings(map);
|
1269
|
+
// It's common for parent source maps to have pointers to lines that have no
|
1270
|
+
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
1271
|
+
if (line >= decoded.length)
|
1272
|
+
return null;
|
1273
|
+
const segments = decoded[line];
|
1274
|
+
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
1275
|
+
return index === -1 ? null : segments[index];
|
1276
|
+
};
|
1277
|
+
exports.originalPositionFor = (map, { line, column, bias }) => {
|
1278
|
+
line--;
|
1279
|
+
if (line < 0)
|
1280
|
+
throw new Error(LINE_GTR_ZERO);
|
1281
|
+
if (column < 0)
|
1282
|
+
throw new Error(COL_GTR_EQ_ZERO);
|
1283
|
+
const decoded = exports.decodedMappings(map);
|
1284
|
+
// It's common for parent source maps to have pointers to lines that have no
|
1285
|
+
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
1286
|
+
if (line >= decoded.length)
|
1287
|
+
return OMapping(null, null, null, null);
|
1288
|
+
const segments = decoded[line];
|
1289
|
+
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
1290
|
+
if (index === -1)
|
1291
|
+
return OMapping(null, null, null, null);
|
1292
|
+
const segment = segments[index];
|
1293
|
+
if (segment.length === 1)
|
1294
|
+
return OMapping(null, null, null, null);
|
1295
|
+
const { names, resolvedSources } = map;
|
1296
|
+
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
1297
|
+
};
|
1298
|
+
exports.allGeneratedPositionsFor = (map, { source, line, column, bias }) => {
|
1299
|
+
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
1300
|
+
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
1301
|
+
};
|
1302
|
+
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
1303
|
+
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
1304
|
+
};
|
1305
|
+
exports.eachMapping = (map, cb) => {
|
1306
|
+
const decoded = exports.decodedMappings(map);
|
1307
|
+
const { names, resolvedSources } = map;
|
1308
|
+
for (let i = 0; i < decoded.length; i++) {
|
1309
|
+
const line = decoded[i];
|
1310
|
+
for (let j = 0; j < line.length; j++) {
|
1311
|
+
const seg = line[j];
|
1312
|
+
const generatedLine = i + 1;
|
1313
|
+
const generatedColumn = seg[0];
|
1314
|
+
let source = null;
|
1315
|
+
let originalLine = null;
|
1316
|
+
let originalColumn = null;
|
1317
|
+
let name = null;
|
1318
|
+
if (seg.length !== 1) {
|
1319
|
+
source = resolvedSources[seg[1]];
|
1320
|
+
originalLine = seg[2] + 1;
|
1321
|
+
originalColumn = seg[3];
|
1322
|
+
}
|
1323
|
+
if (seg.length === 5)
|
1324
|
+
name = names[seg[4]];
|
1325
|
+
cb({
|
1326
|
+
generatedLine,
|
1327
|
+
generatedColumn,
|
1328
|
+
source,
|
1329
|
+
originalLine,
|
1330
|
+
originalColumn,
|
1331
|
+
name,
|
1332
|
+
});
|
1333
|
+
}
|
1334
|
+
}
|
1335
|
+
};
|
1336
|
+
exports.sourceContentFor = (map, source) => {
|
1337
|
+
const { sources, resolvedSources, sourcesContent } = map;
|
1338
|
+
if (sourcesContent == null)
|
1339
|
+
return null;
|
1340
|
+
let index = sources.indexOf(source);
|
1341
|
+
if (index === -1)
|
1342
|
+
index = resolvedSources.indexOf(source);
|
1343
|
+
return index === -1 ? null : sourcesContent[index];
|
1344
|
+
};
|
1345
|
+
exports.presortedDecodedMap = (map, mapUrl) => {
|
1346
|
+
const tracer = new TraceMap(clone(map, []), mapUrl);
|
1347
|
+
tracer._decoded = map.mappings;
|
1348
|
+
return tracer;
|
1349
|
+
};
|
1350
|
+
exports.decodedMap = (map) => {
|
1351
|
+
return clone(map, exports.decodedMappings(map));
|
1352
|
+
};
|
1353
|
+
exports.encodedMap = (map) => {
|
1354
|
+
return clone(map, exports.encodedMappings(map));
|
1355
|
+
};
|
1356
|
+
function generatedPosition(map, source, line, column, bias, all) {
|
1357
|
+
line--;
|
1358
|
+
if (line < 0)
|
1359
|
+
throw new Error(LINE_GTR_ZERO);
|
1360
|
+
if (column < 0)
|
1361
|
+
throw new Error(COL_GTR_EQ_ZERO);
|
1362
|
+
const { sources, resolvedSources } = map;
|
1363
|
+
let sourceIndex = sources.indexOf(source);
|
1364
|
+
if (sourceIndex === -1)
|
1365
|
+
sourceIndex = resolvedSources.indexOf(source);
|
1366
|
+
if (sourceIndex === -1)
|
1367
|
+
return all ? [] : GMapping(null, null);
|
1368
|
+
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
1369
|
+
const segments = generated[sourceIndex][line];
|
1370
|
+
if (segments == null)
|
1371
|
+
return all ? [] : GMapping(null, null);
|
1372
|
+
const memo = map._bySourceMemos[sourceIndex];
|
1373
|
+
if (all)
|
1374
|
+
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
1375
|
+
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
1376
|
+
if (index === -1)
|
1377
|
+
return GMapping(null, null);
|
1378
|
+
const segment = segments[index];
|
1379
|
+
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
1380
|
+
}
|
1381
|
+
})();
|
1382
|
+
function clone(map, mappings) {
|
1383
|
+
return {
|
1384
|
+
version: map.version,
|
1385
|
+
file: map.file,
|
1386
|
+
names: map.names,
|
1387
|
+
sourceRoot: map.sourceRoot,
|
1388
|
+
sources: map.sources,
|
1389
|
+
sourcesContent: map.sourcesContent,
|
1390
|
+
mappings,
|
1391
|
+
};
|
1392
|
+
}
|
1393
|
+
function OMapping(source, line, column, name) {
|
1394
|
+
return { source, line, column, name };
|
1395
|
+
}
|
1396
|
+
function GMapping(line, column) {
|
1397
|
+
return { line, column };
|
1398
|
+
}
|
1399
|
+
function traceSegmentInternal(segments, memo, line, column, bias) {
|
1400
|
+
let index = memoizedBinarySearch(segments, column, memo, line);
|
1401
|
+
if (found) {
|
1402
|
+
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
1403
|
+
}
|
1404
|
+
else if (bias === LEAST_UPPER_BOUND)
|
1405
|
+
index++;
|
1406
|
+
if (index === -1 || index === segments.length)
|
1407
|
+
return -1;
|
1408
|
+
return index;
|
1409
|
+
}
|
1410
|
+
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
1411
|
+
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
1412
|
+
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
1413
|
+
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
1414
|
+
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
1415
|
+
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
1416
|
+
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
1417
|
+
// match LEAST_UPPER_BOUND.
|
1418
|
+
if (!found && bias === LEAST_UPPER_BOUND)
|
1419
|
+
min++;
|
1420
|
+
if (min === -1 || min === segments.length)
|
1421
|
+
return [];
|
1422
|
+
// We may have found the segment that started at an earlier column. If this is the case, then we
|
1423
|
+
// need to slice all generated segments that match _that_ column, because all such segments span
|
1424
|
+
// to our desired column.
|
1425
|
+
const matchedColumn = found ? column : segments[min][COLUMN];
|
1426
|
+
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
1427
|
+
if (!found)
|
1428
|
+
min = lowerBound(segments, matchedColumn, min);
|
1429
|
+
const max = upperBound(segments, matchedColumn, min);
|
1430
|
+
const result = [];
|
1431
|
+
for (; min <= max; min++) {
|
1432
|
+
const segment = segments[min];
|
1433
|
+
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
1434
|
+
}
|
1435
|
+
return result;
|
1436
|
+
}
|
1437
|
+
|
1438
|
+
exports.AnyMap = AnyMap;
|
1439
|
+
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
1440
|
+
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
1441
|
+
exports.TraceMap = TraceMap;
|
1442
|
+
|
1443
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
1444
|
+
|
1445
|
+
}));
|
1446
|
+
|
1447
|
+
} (traceMapping_umd, traceMapping_umd.exports));
|
1448
|
+
return traceMapping_umd.exports;
|
1449
|
+
}
|
1450
|
+
|
1451
|
+
var source;
|
1452
|
+
var hasRequiredSource;
|
1453
|
+
|
1454
|
+
function requireSource () {
|
1455
|
+
if (hasRequiredSource) return source;
|
1456
|
+
hasRequiredSource = 1;
|
1457
|
+
// Patch applied: https://github.com/istanbuljs/v8-to-istanbul/pull/244
|
1458
|
+
const CovLine = requireLine();
|
1459
|
+
const { sliceRange } = requireRange();
|
1460
|
+
const { originalPositionFor, generatedPositionFor, eachMapping, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND } = requireTraceMapping_umd();
|
1461
|
+
|
1462
|
+
source = class CovSource {
|
1463
|
+
constructor (sourceRaw, wrapperLength, traceMap) {
|
1464
|
+
sourceRaw = sourceRaw ? sourceRaw.trimEnd() : '';
|
1465
|
+
this.lines = [];
|
1466
|
+
this.eof = sourceRaw.length;
|
1467
|
+
this.shebangLength = getShebangLength(sourceRaw);
|
1468
|
+
this.wrapperLength = wrapperLength - this.shebangLength;
|
1469
|
+
this._buildLines(sourceRaw, traceMap);
|
1470
|
+
}
|
1471
|
+
|
1472
|
+
_buildLines (source, traceMap) {
|
1473
|
+
let position = 0;
|
1474
|
+
let ignoreCount = 0;
|
1475
|
+
let ignoreAll = false;
|
1476
|
+
const linesToCover = traceMap && this._parseLinesToCover(traceMap);
|
1477
|
+
|
1478
|
+
for (const [i, lineStr] of source.split(/(?<=\r?\n)/u).entries()) {
|
1479
|
+
const lineNumber = i + 1;
|
1480
|
+
const line = new CovLine(lineNumber, position, lineStr);
|
1481
|
+
|
1482
|
+
if (linesToCover && !linesToCover.has(lineNumber)) {
|
1483
|
+
line.ignore = true;
|
1484
|
+
}
|
1485
|
+
|
1486
|
+
if (ignoreCount > 0) {
|
1487
|
+
line.ignore = true;
|
1488
|
+
ignoreCount--;
|
1489
|
+
} else if (ignoreAll) {
|
1490
|
+
line.ignore = true;
|
1491
|
+
}
|
1492
|
+
this.lines.push(line);
|
1493
|
+
position += lineStr.length;
|
1494
|
+
|
1495
|
+
const ignoreToken = this._parseIgnore(lineStr);
|
1496
|
+
if (!ignoreToken) continue
|
1497
|
+
|
1498
|
+
line.ignore = true;
|
1499
|
+
if (ignoreToken.count !== undefined) {
|
1500
|
+
ignoreCount = ignoreToken.count;
|
1501
|
+
}
|
1502
|
+
if (ignoreToken.start || ignoreToken.stop) {
|
1503
|
+
ignoreAll = ignoreToken.start;
|
1504
|
+
ignoreCount = 0;
|
1505
|
+
}
|
1506
|
+
}
|
1507
|
+
}
|
1508
|
+
|
1509
|
+
/**
|
1510
|
+
* Parses for comments:
|
1511
|
+
* c8 ignore next
|
1512
|
+
* c8 ignore next 3
|
1513
|
+
* c8 ignore start
|
1514
|
+
* c8 ignore stop
|
1515
|
+
* And equivalent ones for v8, e.g. v8 ignore next.
|
1516
|
+
* @param {string} lineStr
|
1517
|
+
* @return {{count?: number, start?: boolean, stop?: boolean}|undefined}
|
1518
|
+
*/
|
1519
|
+
_parseIgnore (lineStr) {
|
1520
|
+
const testIgnoreNextLines = lineStr.match(/^\W*\/\* [c|v]8 ignore next (?<count>[0-9]+)/);
|
1521
|
+
if (testIgnoreNextLines) {
|
1522
|
+
return { count: Number(testIgnoreNextLines.groups.count) }
|
1523
|
+
}
|
1524
|
+
|
1525
|
+
// Check if comment is on its own line.
|
1526
|
+
if (lineStr.match(/^\W*\/\* [c|v]8 ignore next/)) {
|
1527
|
+
return { count: 1 }
|
1528
|
+
}
|
1529
|
+
|
1530
|
+
if (lineStr.match(/\/\* [c|v]8 ignore next/)) {
|
1531
|
+
// Won't ignore successive lines, but the current line will be ignored.
|
1532
|
+
return { count: 0 }
|
1533
|
+
}
|
1534
|
+
|
1535
|
+
const testIgnoreStartStop = lineStr.match(/\/\* [c|v]8 ignore (?<mode>start|stop)/);
|
1536
|
+
if (testIgnoreStartStop) {
|
1537
|
+
if (testIgnoreStartStop.groups.mode === 'start') return { start: true }
|
1538
|
+
if (testIgnoreStartStop.groups.mode === 'stop') return { stop: true }
|
1539
|
+
}
|
1540
|
+
}
|
1541
|
+
|
1542
|
+
// given a start column and end column in absolute offsets within
|
1543
|
+
// a source file (0 - EOF), returns the relative line column positions.
|
1544
|
+
offsetToOriginalRelative (sourceMap, startCol, endCol) {
|
1545
|
+
const lines = sliceRange(this.lines, startCol, endCol, true);
|
1546
|
+
if (!lines.length) return {}
|
1547
|
+
|
1548
|
+
const start = originalPositionTryBoth(
|
1549
|
+
sourceMap,
|
1550
|
+
lines[0].line,
|
1551
|
+
Math.max(0, startCol - lines[0].startCol)
|
1552
|
+
);
|
1553
|
+
if (!(start && start.source)) {
|
1554
|
+
return {}
|
1555
|
+
}
|
1556
|
+
|
1557
|
+
let end = originalEndPositionFor(
|
1558
|
+
sourceMap,
|
1559
|
+
lines[lines.length - 1].line,
|
1560
|
+
endCol - lines[lines.length - 1].startCol
|
1561
|
+
);
|
1562
|
+
if (!(end && end.source)) {
|
1563
|
+
return {}
|
1564
|
+
}
|
1565
|
+
|
1566
|
+
if (start.source !== end.source) {
|
1567
|
+
return {}
|
1568
|
+
}
|
1569
|
+
|
1570
|
+
if (start.line === end.line && start.column === end.column) {
|
1571
|
+
end = originalPositionFor(sourceMap, {
|
1572
|
+
line: lines[lines.length - 1].line,
|
1573
|
+
column: endCol - lines[lines.length - 1].startCol,
|
1574
|
+
bias: LEAST_UPPER_BOUND
|
1575
|
+
});
|
1576
|
+
end.column -= 1;
|
1577
|
+
}
|
1578
|
+
|
1579
|
+
return {
|
1580
|
+
source: start.source,
|
1581
|
+
startLine: start.line,
|
1582
|
+
relStartCol: start.column,
|
1583
|
+
endLine: end.line,
|
1584
|
+
relEndCol: end.column
|
1585
|
+
}
|
1586
|
+
}
|
1587
|
+
|
1588
|
+
relativeToOffset (line, relCol) {
|
1589
|
+
line = Math.max(line, 1);
|
1590
|
+
if (this.lines[line - 1] === undefined) return this.eof
|
1591
|
+
return Math.min(this.lines[line - 1].startCol + relCol, this.lines[line - 1].endCol)
|
1592
|
+
}
|
1593
|
+
|
1594
|
+
_parseLinesToCover (traceMap) {
|
1595
|
+
const linesToCover = new Set();
|
1596
|
+
|
1597
|
+
eachMapping(traceMap, (mapping) => {
|
1598
|
+
if (mapping.originalLine !== null) {
|
1599
|
+
linesToCover.add(mapping.originalLine);
|
1600
|
+
}
|
1601
|
+
});
|
1602
|
+
|
1603
|
+
return linesToCover
|
1604
|
+
}
|
1605
|
+
};
|
1606
|
+
|
1607
|
+
// this implementation is pulled over from istanbul-lib-sourcemap:
|
1608
|
+
// https://github.com/istanbuljs/istanbuljs/blob/master/packages/istanbul-lib-source-maps/lib/get-mapping.js
|
1609
|
+
//
|
1610
|
+
/**
|
1611
|
+
* AST ranges are inclusive for start positions and exclusive for end positions.
|
1612
|
+
* Source maps are also logically ranges over text, though interacting with
|
1613
|
+
* them is generally achieved by working with explicit positions.
|
1614
|
+
*
|
1615
|
+
* When finding the _end_ location of an AST item, the range behavior is
|
1616
|
+
* important because what we're asking for is the _end_ of whatever range
|
1617
|
+
* corresponds to the end location we seek.
|
1618
|
+
*
|
1619
|
+
* This boils down to the following steps, conceptually, though the source-map
|
1620
|
+
* library doesn't expose primitives to do this nicely:
|
1621
|
+
*
|
1622
|
+
* 1. Find the range on the generated file that ends at, or exclusively
|
1623
|
+
* contains the end position of the AST node.
|
1624
|
+
* 2. Find the range on the original file that corresponds to
|
1625
|
+
* that generated range.
|
1626
|
+
* 3. Find the _end_ location of that original range.
|
1627
|
+
*/
|
1628
|
+
function originalEndPositionFor (sourceMap, line, column) {
|
1629
|
+
// Given the generated location, find the original location of the mapping
|
1630
|
+
// that corresponds to a range on the generated file that overlaps the
|
1631
|
+
// generated file end location. Note however that this position on its
|
1632
|
+
// own is not useful because it is the position of the _start_ of the range
|
1633
|
+
// on the original file, and we want the _end_ of the range.
|
1634
|
+
const beforeEndMapping = originalPositionTryBoth(
|
1635
|
+
sourceMap,
|
1636
|
+
line,
|
1637
|
+
Math.max(column - 1, 1)
|
1638
|
+
);
|
1639
|
+
|
1640
|
+
if (beforeEndMapping.source === null) {
|
1641
|
+
return null
|
1642
|
+
}
|
1643
|
+
|
1644
|
+
// Convert that original position back to a generated one, with a bump
|
1645
|
+
// to the right, and a rightward bias. Since 'generatedPositionFor' searches
|
1646
|
+
// for mappings in the original-order sorted list, this will find the
|
1647
|
+
// mapping that corresponds to the one immediately after the
|
1648
|
+
// beforeEndMapping mapping.
|
1649
|
+
const afterEndMapping = generatedPositionFor(sourceMap, {
|
1650
|
+
source: beforeEndMapping.source,
|
1651
|
+
line: beforeEndMapping.line,
|
1652
|
+
column: beforeEndMapping.column + 1,
|
1653
|
+
bias: LEAST_UPPER_BOUND
|
1654
|
+
});
|
1655
|
+
if (
|
1656
|
+
// If this is null, it means that we've hit the end of the file,
|
1657
|
+
// so we can use Infinity as the end column.
|
1658
|
+
afterEndMapping.line === null ||
|
1659
|
+
// If these don't match, it means that the call to
|
1660
|
+
// 'generatedPositionFor' didn't find any other original mappings on
|
1661
|
+
// the line we gave, so consider the binding to extend to infinity.
|
1662
|
+
originalPositionFor(sourceMap, afterEndMapping).line !==
|
1663
|
+
beforeEndMapping.line
|
1664
|
+
) {
|
1665
|
+
return {
|
1666
|
+
source: beforeEndMapping.source,
|
1667
|
+
line: beforeEndMapping.line,
|
1668
|
+
column: Infinity
|
1669
|
+
}
|
1670
|
+
}
|
1671
|
+
|
1672
|
+
// Convert the end mapping into the real original position.
|
1673
|
+
return originalPositionFor(sourceMap, afterEndMapping)
|
1674
|
+
}
|
1675
|
+
|
1676
|
+
function originalPositionTryBoth (sourceMap, line, column) {
|
1677
|
+
let original = originalPositionFor(sourceMap, {
|
1678
|
+
line,
|
1679
|
+
column,
|
1680
|
+
bias: GREATEST_LOWER_BOUND
|
1681
|
+
});
|
1682
|
+
if (original.line === null) {
|
1683
|
+
original = originalPositionFor(sourceMap, {
|
1684
|
+
line,
|
1685
|
+
column,
|
1686
|
+
bias: LEAST_UPPER_BOUND
|
1687
|
+
});
|
1688
|
+
}
|
1689
|
+
// The source maps generated by https://github.com/istanbuljs/istanbuljs
|
1690
|
+
// (using @babel/core 7.7.5) have behavior, such that a mapping
|
1691
|
+
// mid-way through a line maps to an earlier line than a mapping
|
1692
|
+
// at position 0. Using the line at positon 0 seems to provide better reports:
|
1693
|
+
//
|
1694
|
+
// if (true) {
|
1695
|
+
// cov_y5divc6zu().b[1][0]++;
|
1696
|
+
// cov_y5divc6zu().s[3]++;
|
1697
|
+
// console.info('reachable');
|
1698
|
+
// } else { ... }
|
1699
|
+
// ^ ^
|
1700
|
+
// l5 l3
|
1701
|
+
const min = originalPositionFor(sourceMap, {
|
1702
|
+
line,
|
1703
|
+
column: 0,
|
1704
|
+
bias: GREATEST_LOWER_BOUND
|
1705
|
+
});
|
1706
|
+
if (min.line > original.line) {
|
1707
|
+
original = min;
|
1708
|
+
}
|
1709
|
+
return original
|
1710
|
+
}
|
1711
|
+
|
1712
|
+
// Not required since Node 12, see: https://github.com/nodejs/node/pull/27375
|
1713
|
+
const isPreNode12 = /^v1[0-1]\./u.test(process.version);
|
1714
|
+
function getShebangLength (source) {
|
1715
|
+
if (isPreNode12 && source.indexOf('#!') === 0) {
|
1716
|
+
const match = source.match(/(?<shebang>#!.*)/);
|
1717
|
+
if (match) {
|
1718
|
+
return match.groups.shebang.length
|
1719
|
+
}
|
1720
|
+
} else {
|
1721
|
+
return 0
|
1722
|
+
}
|
1723
|
+
}
|
1724
|
+
return source;
|
1725
|
+
}
|
1726
|
+
|
1727
|
+
// Patch applied: https://github.com/istanbuljs/v8-to-istanbul/pull/244
|
1728
|
+
const assert = require$$0;
|
1729
|
+
const convertSourceMap = convertSourceMap$1;
|
1730
|
+
const util = require$$2;
|
1731
|
+
const debuglog = util.debuglog('c8');
|
1732
|
+
const { dirname, isAbsolute: isAbsolute$1, join, resolve: resolve$1 } = require$$3;
|
1733
|
+
const { fileURLToPath } = require$$4;
|
1734
|
+
const CovBranch = requireBranch();
|
1735
|
+
const CovFunction = require_function();
|
1736
|
+
const CovSource = requireSource();
|
1737
|
+
const { sliceRange } = requireRange();
|
1738
|
+
const { readFileSync, promises } = require$$9;
|
1739
|
+
const readFile = promises.readFile;
|
1740
|
+
|
1741
|
+
const { TraceMap } = requireTraceMapping_umd();
|
1742
|
+
const isOlderNode10 = /^v10\.(([0-9]\.)|(1[0-5]\.))/u.test(process.version);
|
1743
|
+
const isNode8 = /^v8\./.test(process.version);
|
1744
|
+
|
1745
|
+
// Injected when Node.js is loading script into isolate pre Node 10.16.x.
|
1746
|
+
// see: https://github.com/nodejs/node/pull/21573.
|
1747
|
+
const cjsWrapperLength = isOlderNode10 ? require$$11.wrapper[0].length : 0;
|
1748
|
+
|
1749
|
+
var v8ToIstanbul$2 = class V8ToIstanbul {
|
1750
|
+
constructor (scriptPath, wrapperLength, sources, excludePath, excludeEmptyLines) {
|
1751
|
+
assert(typeof scriptPath === 'string', 'scriptPath must be a string');
|
1752
|
+
assert(!isNode8, 'This module does not support node 8 or lower, please upgrade to node 10');
|
1753
|
+
this.path = parsePath(scriptPath);
|
1754
|
+
this.wrapperLength = wrapperLength === undefined ? cjsWrapperLength : wrapperLength;
|
1755
|
+
this.excludePath = excludePath || (() => false);
|
1756
|
+
this.excludeEmptyLines = excludeEmptyLines === true;
|
1757
|
+
this.sources = sources || {};
|
1758
|
+
this.generatedLines = [];
|
1759
|
+
this.branches = {};
|
1760
|
+
this.functions = {};
|
1761
|
+
this.covSources = [];
|
1762
|
+
this.rawSourceMap = undefined;
|
1763
|
+
this.sourceMap = undefined;
|
1764
|
+
this.sourceTranspiled = undefined;
|
1765
|
+
// Indicate that this report was generated with placeholder data from
|
1766
|
+
// running --all:
|
1767
|
+
this.all = false;
|
1768
|
+
}
|
1769
|
+
|
1770
|
+
async load () {
|
1771
|
+
const rawSource = this.sources.source || await readFile(this.path, 'utf8');
|
1772
|
+
this.rawSourceMap = this.sources.sourceMap ||
|
1773
|
+
// if we find a source-map (either inline, or a .map file) we load
|
1774
|
+
// both the transpiled and original source, both of which are used during
|
1775
|
+
// the backflips we perform to remap absolute to relative positions.
|
1776
|
+
convertSourceMap.fromSource(rawSource) || convertSourceMap.fromMapFileSource(rawSource, this._readFileFromDir.bind(this));
|
1777
|
+
|
1778
|
+
if (this.rawSourceMap) {
|
1779
|
+
if (this.rawSourceMap.sourcemap.sources.length > 1) {
|
1780
|
+
this.sourceMap = new TraceMap(this.rawSourceMap.sourcemap);
|
1781
|
+
if (!this.sourceMap.sourcesContent) {
|
1782
|
+
this.sourceMap.sourcesContent = await this.sourcesContentFromSources();
|
1783
|
+
}
|
1784
|
+
this.covSources = this.sourceMap.sourcesContent.map((rawSource, i) => ({ source: new CovSource(rawSource, this.wrapperLength, this.excludeEmptyLines ? this.sourceMap : null), path: this.sourceMap.sources[i] }));
|
1785
|
+
this.sourceTranspiled = new CovSource(rawSource, this.wrapperLength, this.excludeEmptyLines ? this.sourceMap : null);
|
1786
|
+
} else {
|
1787
|
+
const candidatePath = this.rawSourceMap.sourcemap.sources.length >= 1 ? this.rawSourceMap.sourcemap.sources[0] : this.rawSourceMap.sourcemap.file;
|
1788
|
+
this.path = this._resolveSource(this.rawSourceMap, candidatePath || this.path);
|
1789
|
+
this.sourceMap = new TraceMap(this.rawSourceMap.sourcemap);
|
1790
|
+
|
1791
|
+
let originalRawSource;
|
1792
|
+
if (this.sources.sourceMap && this.sources.sourceMap.sourcemap && this.sources.sourceMap.sourcemap.sourcesContent && this.sources.sourceMap.sourcemap.sourcesContent.length === 1) {
|
1793
|
+
// If the sourcesContent field has been provided, return it rather than attempting
|
1794
|
+
// to load the original source from disk.
|
1795
|
+
// TODO: investigate whether there's ever a case where we hit this logic with 1:many sources.
|
1796
|
+
originalRawSource = this.sources.sourceMap.sourcemap.sourcesContent[0];
|
1797
|
+
} else if (this.sources.originalSource) {
|
1798
|
+
// Original source may be populated on the sources object.
|
1799
|
+
originalRawSource = this.sources.originalSource;
|
1800
|
+
} else if (this.sourceMap.sourcesContent && this.sourceMap.sourcesContent[0]) {
|
1801
|
+
// perhaps we loaded sourcesContent was populated by an inline source map, or .map file?
|
1802
|
+
// TODO: investigate whether there's ever a case where we hit this logic with 1:many sources.
|
1803
|
+
originalRawSource = this.sourceMap.sourcesContent[0];
|
1804
|
+
} else {
|
1805
|
+
// We fallback to reading the original source from disk.
|
1806
|
+
originalRawSource = await readFile(this.path, 'utf8');
|
1807
|
+
}
|
1808
|
+
this.covSources = [{ source: new CovSource(originalRawSource, this.wrapperLength, this.excludeEmptyLines ? this.sourceMap : null), path: this.path }];
|
1809
|
+
this.sourceTranspiled = new CovSource(rawSource, this.wrapperLength, this.excludeEmptyLines ? this.sourceMap : null);
|
1810
|
+
}
|
1811
|
+
} else {
|
1812
|
+
this.covSources = [{ source: new CovSource(rawSource, this.wrapperLength), path: this.path }];
|
1813
|
+
}
|
1814
|
+
}
|
1815
|
+
|
1816
|
+
_readFileFromDir (filename) {
|
1817
|
+
return readFileSync(resolve$1(dirname(this.path), filename), 'utf-8')
|
1818
|
+
}
|
1819
|
+
|
1820
|
+
async sourcesContentFromSources () {
|
1821
|
+
const fileList = this.sourceMap.sources.map(relativePath => {
|
1822
|
+
const realPath = this._resolveSource(this.rawSourceMap, relativePath);
|
1823
|
+
return readFile(realPath, 'utf-8')
|
1824
|
+
.then(result => result)
|
1825
|
+
.catch(err => {
|
1826
|
+
debuglog(`failed to load ${realPath}: ${err.message}`);
|
1827
|
+
})
|
1828
|
+
});
|
1829
|
+
return await Promise.all(fileList)
|
1830
|
+
}
|
1831
|
+
|
1832
|
+
destroy () {
|
1833
|
+
// no longer necessary, but preserved for backwards compatibility.
|
1834
|
+
}
|
1835
|
+
|
1836
|
+
_resolveSource (rawSourceMap, sourcePath) {
|
1837
|
+
if (sourcePath.startsWith('file://')) {
|
1838
|
+
return fileURLToPath(sourcePath)
|
1839
|
+
}
|
1840
|
+
sourcePath = sourcePath.replace(/^webpack:\/\//, '');
|
1841
|
+
const sourceRoot = rawSourceMap.sourcemap.sourceRoot ? rawSourceMap.sourcemap.sourceRoot.replace('file://', '') : '';
|
1842
|
+
const candidatePath = join(sourceRoot, sourcePath);
|
1843
|
+
|
1844
|
+
if (isAbsolute$1(candidatePath)) {
|
1845
|
+
return candidatePath
|
1846
|
+
} else {
|
1847
|
+
return resolve$1(dirname(this.path), candidatePath)
|
1848
|
+
}
|
1849
|
+
}
|
1850
|
+
|
1851
|
+
applyCoverage (blocks) {
|
1852
|
+
blocks.forEach(block => {
|
1853
|
+
block.ranges.forEach((range, i) => {
|
1854
|
+
const isEmptyCoverage = block.functionName === '(empty-report)';
|
1855
|
+
const { startCol, endCol, path, covSource } = this._maybeRemapStartColEndCol(range, isEmptyCoverage);
|
1856
|
+
if (this.excludePath(path)) {
|
1857
|
+
return
|
1858
|
+
}
|
1859
|
+
let lines;
|
1860
|
+
if (isEmptyCoverage) {
|
1861
|
+
// (empty-report), this will result in a report that has all lines zeroed out.
|
1862
|
+
lines = covSource.lines.filter((line) => {
|
1863
|
+
line.count = 0;
|
1864
|
+
return true
|
1865
|
+
});
|
1866
|
+
this.all = lines.length > 0;
|
1867
|
+
} else {
|
1868
|
+
lines = sliceRange(covSource.lines, startCol, endCol);
|
1869
|
+
}
|
1870
|
+
if (!lines.length) {
|
1871
|
+
return
|
1872
|
+
}
|
1873
|
+
|
1874
|
+
const startLineInstance = lines[0];
|
1875
|
+
const endLineInstance = lines[lines.length - 1];
|
1876
|
+
|
1877
|
+
if (block.isBlockCoverage) {
|
1878
|
+
this.branches[path] = this.branches[path] || [];
|
1879
|
+
// record branches.
|
1880
|
+
this.branches[path].push(new CovBranch(
|
1881
|
+
startLineInstance.line,
|
1882
|
+
startCol - startLineInstance.startCol,
|
1883
|
+
endLineInstance.line,
|
1884
|
+
endCol - endLineInstance.startCol,
|
1885
|
+
range.count
|
1886
|
+
));
|
1887
|
+
|
1888
|
+
// if block-level granularity is enabled, we still create a single
|
1889
|
+
// CovFunction tracking object for each set of ranges.
|
1890
|
+
if (block.functionName && i === 0) {
|
1891
|
+
this.functions[path] = this.functions[path] || [];
|
1892
|
+
this.functions[path].push(new CovFunction(
|
1893
|
+
block.functionName,
|
1894
|
+
startLineInstance.line,
|
1895
|
+
startCol - startLineInstance.startCol,
|
1896
|
+
endLineInstance.line,
|
1897
|
+
endCol - endLineInstance.startCol,
|
1898
|
+
range.count
|
1899
|
+
));
|
1900
|
+
}
|
1901
|
+
} else if (block.functionName) {
|
1902
|
+
this.functions[path] = this.functions[path] || [];
|
1903
|
+
// record functions.
|
1904
|
+
this.functions[path].push(new CovFunction(
|
1905
|
+
block.functionName,
|
1906
|
+
startLineInstance.line,
|
1907
|
+
startCol - startLineInstance.startCol,
|
1908
|
+
endLineInstance.line,
|
1909
|
+
endCol - endLineInstance.startCol,
|
1910
|
+
range.count
|
1911
|
+
));
|
1912
|
+
}
|
1913
|
+
|
1914
|
+
// record the lines (we record these as statements, such that we're
|
1915
|
+
// compatible with Istanbul 2.0).
|
1916
|
+
lines.forEach(line => {
|
1917
|
+
// make sure branch spans entire line; don't record 'goodbye'
|
1918
|
+
// branch in `const foo = true ? 'hello' : 'goodbye'` as a
|
1919
|
+
// 0 for line coverage.
|
1920
|
+
//
|
1921
|
+
// All lines start out with coverage of 1, and are later set to 0
|
1922
|
+
// if they are not invoked; line.ignore prevents a line from being
|
1923
|
+
// set to 0, and is set if the special comment /* c8 ignore next */
|
1924
|
+
// is used.
|
1925
|
+
|
1926
|
+
if (startCol <= line.startCol && endCol >= line.endCol && !line.ignore) {
|
1927
|
+
line.count = range.count;
|
1928
|
+
}
|
1929
|
+
});
|
1930
|
+
});
|
1931
|
+
});
|
1932
|
+
}
|
1933
|
+
|
1934
|
+
_maybeRemapStartColEndCol (range, isEmptyCoverage) {
|
1935
|
+
let covSource = this.covSources[0].source;
|
1936
|
+
const covSourceWrapperLength = isEmptyCoverage ? 0 : covSource.wrapperLength;
|
1937
|
+
let startCol = Math.max(0, range.startOffset - covSourceWrapperLength);
|
1938
|
+
let endCol = Math.min(covSource.eof, range.endOffset - covSourceWrapperLength);
|
1939
|
+
let path = this.path;
|
1940
|
+
|
1941
|
+
if (this.sourceMap) {
|
1942
|
+
const sourceTranspiledWrapperLength = isEmptyCoverage ? 0 : this.sourceTranspiled.wrapperLength;
|
1943
|
+
startCol = Math.max(0, range.startOffset - sourceTranspiledWrapperLength);
|
1944
|
+
endCol = Math.min(this.sourceTranspiled.eof, range.endOffset - sourceTranspiledWrapperLength);
|
1945
|
+
|
1946
|
+
const { startLine, relStartCol, endLine, relEndCol, source } = this.sourceTranspiled.offsetToOriginalRelative(
|
1947
|
+
this.sourceMap,
|
1948
|
+
startCol,
|
1949
|
+
endCol
|
1950
|
+
);
|
1951
|
+
|
1952
|
+
const matchingSource = this.covSources.find(covSource => covSource.path === source);
|
1953
|
+
covSource = matchingSource ? matchingSource.source : this.covSources[0].source;
|
1954
|
+
path = matchingSource ? matchingSource.path : this.covSources[0].path;
|
1955
|
+
|
1956
|
+
// next we convert these relative positions back to absolute positions
|
1957
|
+
// in the original source (which is the format expected in the next step).
|
1958
|
+
startCol = covSource.relativeToOffset(startLine, relStartCol);
|
1959
|
+
endCol = covSource.relativeToOffset(endLine, relEndCol);
|
1960
|
+
}
|
1961
|
+
|
1962
|
+
return {
|
1963
|
+
path,
|
1964
|
+
covSource,
|
1965
|
+
startCol,
|
1966
|
+
endCol
|
1967
|
+
}
|
1968
|
+
}
|
1969
|
+
|
1970
|
+
getInnerIstanbul (source, path) {
|
1971
|
+
// We apply the "Resolving Sources" logic (as defined in
|
1972
|
+
// sourcemaps.info/spec.html) as a final step for 1:many source maps.
|
1973
|
+
// for 1:1 source maps, the resolve logic is applied while loading.
|
1974
|
+
//
|
1975
|
+
// TODO: could we move the resolving logic for 1:1 source maps to the final
|
1976
|
+
// step as well? currently this breaks some tests in c8.
|
1977
|
+
let resolvedPath = path;
|
1978
|
+
if (this.rawSourceMap && this.rawSourceMap.sourcemap.sources.length > 1) {
|
1979
|
+
resolvedPath = this._resolveSource(this.rawSourceMap, path);
|
1980
|
+
}
|
1981
|
+
|
1982
|
+
if (this.excludePath(resolvedPath)) {
|
1983
|
+
return
|
1984
|
+
}
|
1985
|
+
|
1986
|
+
return {
|
1987
|
+
[resolvedPath]: {
|
1988
|
+
path: resolvedPath,
|
1989
|
+
all: this.all,
|
1990
|
+
...this._statementsToIstanbul(source, path),
|
1991
|
+
...this._branchesToIstanbul(source, path),
|
1992
|
+
...this._functionsToIstanbul(source, path)
|
1993
|
+
}
|
1994
|
+
}
|
1995
|
+
}
|
1996
|
+
|
1997
|
+
toIstanbul () {
|
1998
|
+
return this.covSources.reduce((istanbulOuter, { source, path }) => Object.assign(istanbulOuter, this.getInnerIstanbul(source, path)), {})
|
1999
|
+
}
|
2000
|
+
|
2001
|
+
_statementsToIstanbul (source, path) {
|
2002
|
+
const statements = {
|
2003
|
+
statementMap: {},
|
2004
|
+
s: {}
|
2005
|
+
};
|
2006
|
+
source.lines.forEach((line, index) => {
|
2007
|
+
if (!line.ignore) {
|
2008
|
+
statements.statementMap[`${index}`] = line.toIstanbul();
|
2009
|
+
statements.s[`${index}`] = line.count;
|
2010
|
+
}
|
2011
|
+
});
|
2012
|
+
return statements
|
2013
|
+
}
|
2014
|
+
|
2015
|
+
_branchesToIstanbul (source, path) {
|
2016
|
+
const branches = {
|
2017
|
+
branchMap: {},
|
2018
|
+
b: {}
|
2019
|
+
};
|
2020
|
+
this.branches[path] = this.branches[path] || [];
|
2021
|
+
this.branches[path].forEach((branch, index) => {
|
2022
|
+
const srcLine = source.lines[branch.startLine - 1];
|
2023
|
+
const ignore = srcLine === undefined ? true : srcLine.ignore;
|
2024
|
+
branches.branchMap[`${index}`] = branch.toIstanbul();
|
2025
|
+
branches.b[`${index}`] = [ignore ? 1 : branch.count];
|
2026
|
+
});
|
2027
|
+
return branches
|
2028
|
+
}
|
2029
|
+
|
2030
|
+
_functionsToIstanbul (source, path) {
|
2031
|
+
const functions = {
|
2032
|
+
fnMap: {},
|
2033
|
+
f: {}
|
2034
|
+
};
|
2035
|
+
this.functions[path] = this.functions[path] || [];
|
2036
|
+
this.functions[path].forEach((fn, index) => {
|
2037
|
+
const srcLine = source.lines[fn.startLine - 1];
|
2038
|
+
const ignore = srcLine === undefined ? true : srcLine.ignore;
|
2039
|
+
functions.fnMap[`${index}`] = fn.toIstanbul();
|
2040
|
+
functions.f[`${index}`] = ignore ? 1 : fn.count;
|
2041
|
+
});
|
2042
|
+
return functions
|
2043
|
+
}
|
2044
|
+
};
|
2045
|
+
|
2046
|
+
function parsePath (scriptPath) {
|
2047
|
+
return scriptPath.startsWith('file://') ? fileURLToPath(scriptPath) : scriptPath
|
2048
|
+
}
|
2049
|
+
|
2050
|
+
// Patch applied: https://github.com/istanbuljs/v8-to-istanbul/pull/244
|
2051
|
+
const V8ToIstanbul = v8ToIstanbul$2;
|
2052
|
+
|
2053
|
+
var v8ToIstanbul = function (path, wrapperLength, sources, excludePath, excludeEmptyLines) {
|
2054
|
+
return new V8ToIstanbul(path, wrapperLength, sources, excludePath, excludeEmptyLines)
|
2055
|
+
};
|
2056
|
+
|
2057
|
+
var v8ToIstanbul$1 = /*@__PURE__*/getDefaultExportFromCjs(v8ToIstanbul);
|
2058
|
+
|
21
2059
|
function normalizeWindowsPath(input = "") {
|
22
2060
|
if (!input || !input.includes("\\")) {
|
23
2061
|
return input;
|
@@ -219,10 +2257,10 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
219
2257
|
}
|
220
2258
|
async clean(clean = true) {
|
221
2259
|
if (clean && existsSync(this.options.reportsDirectory))
|
222
|
-
await promises.rm(this.options.reportsDirectory, { recursive: true, force: true, maxRetries: 10 });
|
2260
|
+
await promises$1.rm(this.options.reportsDirectory, { recursive: true, force: true, maxRetries: 10 });
|
223
2261
|
if (existsSync(this.coverageFilesDirectory))
|
224
|
-
await promises.rm(this.coverageFilesDirectory, { recursive: true, force: true, maxRetries: 10 });
|
225
|
-
await promises.mkdir(this.coverageFilesDirectory, { recursive: true });
|
2262
|
+
await promises$1.rm(this.coverageFilesDirectory, { recursive: true, force: true, maxRetries: 10 });
|
2263
|
+
await promises$1.mkdir(this.coverageFilesDirectory, { recursive: true });
|
226
2264
|
this.coverageFiles = /* @__PURE__ */ new Map();
|
227
2265
|
this.pendingPromises = [];
|
228
2266
|
}
|
@@ -241,7 +2279,7 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
241
2279
|
}
|
242
2280
|
const filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`);
|
243
2281
|
entry[transformMode].push(filename);
|
244
|
-
const promise = promises.writeFile(filename, JSON.stringify(coverage), "utf-8");
|
2282
|
+
const promise = promises$1.writeFile(filename, JSON.stringify(coverage), "utf-8");
|
245
2283
|
this.pendingPromises.push(promise);
|
246
2284
|
}
|
247
2285
|
async reportCoverage({ allTestsRun } = {}) {
|
@@ -261,7 +2299,7 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
261
2299
|
debug("Covered files %d/%d", index, total);
|
262
2300
|
}
|
263
2301
|
await Promise.all(chunk.map(async (filename) => {
|
264
|
-
const contents = await promises.readFile(filename, "utf-8");
|
2302
|
+
const contents = await promises$1.readFile(filename, "utf-8");
|
265
2303
|
const coverage = JSON.parse(contents);
|
266
2304
|
merged = mergeProcessCovs([merged, coverage]);
|
267
2305
|
}));
|
@@ -295,7 +2333,8 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
295
2333
|
const resolvedThresholds = this.resolveThresholds({
|
296
2334
|
coverageMap,
|
297
2335
|
thresholds: this.options.thresholds,
|
298
|
-
createCoverageMap: () => libCoverage.createCoverageMap({})
|
2336
|
+
createCoverageMap: () => libCoverage.createCoverageMap({}),
|
2337
|
+
root: this.ctx.config.root
|
299
2338
|
});
|
300
2339
|
this.checkThresholds({
|
301
2340
|
thresholds: resolvedThresholds,
|
@@ -305,7 +2344,7 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
305
2344
|
if (!this.ctx.server.config.configFile)
|
306
2345
|
throw new Error('Missing configurationFile. The "coverage.thresholds.autoUpdate" can only be enabled when configuration file is used.');
|
307
2346
|
const configFilePath = this.ctx.server.config.configFile;
|
308
|
-
const configModule = parseModule(await promises.readFile(configFilePath, "utf8"));
|
2347
|
+
const configModule = parseModule(await promises$1.readFile(configFilePath, "utf8"));
|
309
2348
|
this.updateThresholds({
|
310
2349
|
thresholds: resolvedThresholds,
|
311
2350
|
perFile: this.options.thresholds.perFile,
|
@@ -314,8 +2353,11 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
314
2353
|
});
|
315
2354
|
}
|
316
2355
|
}
|
317
|
-
|
318
|
-
|
2356
|
+
const keepResults = !this.options.cleanOnRerun && this.ctx.config.watch;
|
2357
|
+
if (!keepResults) {
|
2358
|
+
this.coverageFiles = /* @__PURE__ */ new Map();
|
2359
|
+
await promises$1.rm(this.coverageFilesDirectory, { recursive: true });
|
2360
|
+
}
|
319
2361
|
}
|
320
2362
|
async getUntestedFiles(testedFiles) {
|
321
2363
|
const transformResults = normalizeTransformResults(this.ctx.vitenode.fetchCache);
|
@@ -332,11 +2374,9 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
332
2374
|
debug("Uncovered files %d/%d", index, uncoveredFiles.length);
|
333
2375
|
}
|
334
2376
|
const coverages = await Promise.all(chunk.map(async (filename) => {
|
335
|
-
const
|
336
|
-
|
337
|
-
if (transformResult && stripLiteral(transformResult.code).trim() === "")
|
2377
|
+
const { originalSource, source } = await this.getSources(filename.href, transformResults);
|
2378
|
+
if (source && stripLiteral(source).trim() === "")
|
338
2379
|
return null;
|
339
|
-
const { originalSource } = await this.getSources(filename.href, transformResults);
|
340
2380
|
const coverage = {
|
341
2381
|
url: filename.href,
|
342
2382
|
scriptId: "0",
|
@@ -362,11 +2402,12 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
362
2402
|
return merged;
|
363
2403
|
}
|
364
2404
|
async getSources(url, transformResults, functions = []) {
|
365
|
-
const filePath = normalize(fileURLToPath(url));
|
366
|
-
const transformResult = transformResults.get(filePath)
|
2405
|
+
const filePath = normalize(fileURLToPath$1(url));
|
2406
|
+
const transformResult = transformResults.get(filePath) || await this.ctx.vitenode.transformRequest(filePath).catch(() => {
|
2407
|
+
});
|
367
2408
|
const map = transformResult?.map;
|
368
2409
|
const code = transformResult?.code;
|
369
|
-
const sourcesContent = map?.sourcesContent?.[0] || await promises.readFile(filePath, "utf-8").catch(() => {
|
2410
|
+
const sourcesContent = map?.sourcesContent?.[0] || await promises$1.readFile(filePath, "utf-8").catch(() => {
|
370
2411
|
const length = findLongestFunctionLength(functions);
|
371
2412
|
return ".".repeat(length);
|
372
2413
|
});
|
@@ -396,7 +2437,7 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
396
2437
|
const viteNode = this.ctx.projects.find((project) => project.getName() === projectName)?.vitenode || this.ctx.vitenode;
|
397
2438
|
const fetchCache = transformMode ? viteNode.fetchCaches[transformMode] : viteNode.fetchCache;
|
398
2439
|
const transformResults = normalizeTransformResults(fetchCache);
|
399
|
-
const scriptCoverages = coverage.result.filter((result) => this.testExclude.shouldInstrument(fileURLToPath(result.url)));
|
2440
|
+
const scriptCoverages = coverage.result.filter((result) => this.testExclude.shouldInstrument(fileURLToPath$1(result.url)));
|
400
2441
|
const coverageMap = libCoverage.createCoverageMap({});
|
401
2442
|
let index = 0;
|
402
2443
|
for (const chunk of this.toSlices(scriptCoverages, this.options.processingConcurrency)) {
|
@@ -407,7 +2448,7 @@ class V8CoverageProvider extends BaseCoverageProvider {
|
|
407
2448
|
await Promise.all(chunk.map(async ({ url, functions }) => {
|
408
2449
|
const sources = await this.getSources(url, transformResults, functions);
|
409
2450
|
const wrapperLength = sources.sourceMap ? WRAPPER_LENGTH : 0;
|
410
|
-
const converter = v8ToIstanbul(url, wrapperLength, sources);
|
2451
|
+
const converter = v8ToIstanbul$1(url, wrapperLength, sources, void 0, this.options.ignoreEmptyLines);
|
411
2452
|
await converter.load();
|
412
2453
|
converter.applyCoverage(functions);
|
413
2454
|
coverageMap.merge(converter.toIstanbul());
|