@pierre/diffs 1.1.11 → 1.1.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/AdvancedVirtualizedFileDiff.d.ts.map +1 -1
- package/dist/components/File.js +1 -1
- package/dist/components/File.js.map +1 -1
- package/dist/components/FileDiff.js +1 -1
- package/dist/components/FileDiff.js.map +1 -1
- package/dist/components/UnresolvedFile.d.ts.map +1 -1
- package/dist/components/VirtulizerDevelopment.d.ts.map +1 -1
- package/dist/highlighter/languages/registerCustomLanguage.js +2 -2
- package/dist/highlighter/languages/registerCustomLanguage.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/react/constants.d.ts.map +1 -1
- package/dist/react/jsx.d.ts.map +1 -1
- package/dist/utils/getFiletypeFromFileName.d.ts +5 -3
- package/dist/utils/getFiletypeFromFileName.d.ts.map +1 -1
- package/dist/utils/getFiletypeFromFileName.js +21 -2
- package/dist/utils/getFiletypeFromFileName.js.map +1 -1
- package/dist/worker/WorkerPoolManager.d.ts +2 -0
- package/dist/worker/WorkerPoolManager.d.ts.map +1 -1
- package/dist/worker/WorkerPoolManager.js +26 -3
- package/dist/worker/WorkerPoolManager.js.map +1 -1
- package/dist/worker/types.d.ts +7 -1
- package/dist/worker/types.d.ts.map +1 -1
- package/dist/worker/worker-portable.js +2451 -2405
- package/dist/worker/worker-portable.js.map +1 -1
- package/dist/worker/worker.js +242 -214
- package/dist/worker/worker.js.map +1 -1
- package/package.json +1 -1
|
@@ -12237,2391 +12237,10 @@ function attachResolvedThemes(themes, highlighter$1) {
|
|
|
12237
12237
|
}
|
|
12238
12238
|
}
|
|
12239
12239
|
|
|
12240
|
-
//#endregion
|
|
12241
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/base.js
|
|
12242
|
-
var Diff = class {
|
|
12243
|
-
diff(oldStr, newStr, options = {}) {
|
|
12244
|
-
let callback;
|
|
12245
|
-
if (typeof options === "function") {
|
|
12246
|
-
callback = options;
|
|
12247
|
-
options = {};
|
|
12248
|
-
} else if ("callback" in options) {
|
|
12249
|
-
callback = options.callback;
|
|
12250
|
-
}
|
|
12251
|
-
const oldString = this.castInput(oldStr, options);
|
|
12252
|
-
const newString = this.castInput(newStr, options);
|
|
12253
|
-
const oldTokens = this.removeEmpty(this.tokenize(oldString, options));
|
|
12254
|
-
const newTokens = this.removeEmpty(this.tokenize(newString, options));
|
|
12255
|
-
return this.diffWithOptionsObj(oldTokens, newTokens, options, callback);
|
|
12256
|
-
}
|
|
12257
|
-
diffWithOptionsObj(oldTokens, newTokens, options, callback) {
|
|
12258
|
-
var _a;
|
|
12259
|
-
const done = (value) => {
|
|
12260
|
-
value = this.postProcess(value, options);
|
|
12261
|
-
if (callback) {
|
|
12262
|
-
setTimeout(function() {
|
|
12263
|
-
callback(value);
|
|
12264
|
-
}, 0);
|
|
12265
|
-
return undefined;
|
|
12266
|
-
} else {
|
|
12267
|
-
return value;
|
|
12268
|
-
}
|
|
12269
|
-
};
|
|
12270
|
-
const newLen = newTokens.length, oldLen = oldTokens.length;
|
|
12271
|
-
let editLength = 1;
|
|
12272
|
-
let maxEditLength = newLen + oldLen;
|
|
12273
|
-
if (options.maxEditLength != null) {
|
|
12274
|
-
maxEditLength = Math.min(maxEditLength, options.maxEditLength);
|
|
12275
|
-
}
|
|
12276
|
-
const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity;
|
|
12277
|
-
const abortAfterTimestamp = Date.now() + maxExecutionTime;
|
|
12278
|
-
const bestPath = [{
|
|
12279
|
-
oldPos: -1,
|
|
12280
|
-
lastComponent: undefined
|
|
12281
|
-
}];
|
|
12282
|
-
let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options);
|
|
12283
|
-
if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
|
|
12284
|
-
return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens));
|
|
12285
|
-
}
|
|
12286
|
-
let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;
|
|
12287
|
-
const execEditLength = () => {
|
|
12288
|
-
for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
|
|
12289
|
-
let basePath;
|
|
12290
|
-
const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1];
|
|
12291
|
-
if (removePath) {
|
|
12292
|
-
bestPath[diagonalPath - 1] = undefined;
|
|
12293
|
-
}
|
|
12294
|
-
let canAdd = false;
|
|
12295
|
-
if (addPath) {
|
|
12296
|
-
const addPathNewPos = addPath.oldPos - diagonalPath;
|
|
12297
|
-
canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
|
|
12298
|
-
}
|
|
12299
|
-
const canRemove = removePath && removePath.oldPos + 1 < oldLen;
|
|
12300
|
-
if (!canAdd && !canRemove) {
|
|
12301
|
-
bestPath[diagonalPath] = undefined;
|
|
12302
|
-
continue;
|
|
12303
|
-
}
|
|
12304
|
-
if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
|
|
12305
|
-
basePath = this.addToPath(addPath, true, false, 0, options);
|
|
12306
|
-
} else {
|
|
12307
|
-
basePath = this.addToPath(removePath, false, true, 1, options);
|
|
12308
|
-
}
|
|
12309
|
-
newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options);
|
|
12310
|
-
if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
|
|
12311
|
-
return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true;
|
|
12312
|
-
} else {
|
|
12313
|
-
bestPath[diagonalPath] = basePath;
|
|
12314
|
-
if (basePath.oldPos + 1 >= oldLen) {
|
|
12315
|
-
maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
|
|
12316
|
-
}
|
|
12317
|
-
if (newPos + 1 >= newLen) {
|
|
12318
|
-
minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
|
|
12319
|
-
}
|
|
12320
|
-
}
|
|
12321
|
-
}
|
|
12322
|
-
editLength++;
|
|
12323
|
-
};
|
|
12324
|
-
if (callback) {
|
|
12325
|
-
(function exec() {
|
|
12326
|
-
setTimeout(function() {
|
|
12327
|
-
if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
|
|
12328
|
-
return callback(undefined);
|
|
12329
|
-
}
|
|
12330
|
-
if (!execEditLength()) {
|
|
12331
|
-
exec();
|
|
12332
|
-
}
|
|
12333
|
-
}, 0);
|
|
12334
|
-
})();
|
|
12335
|
-
} else {
|
|
12336
|
-
while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
|
|
12337
|
-
const ret = execEditLength();
|
|
12338
|
-
if (ret) {
|
|
12339
|
-
return ret;
|
|
12340
|
-
}
|
|
12341
|
-
}
|
|
12342
|
-
}
|
|
12343
|
-
}
|
|
12344
|
-
addToPath(path, added, removed, oldPosInc, options) {
|
|
12345
|
-
const last = path.lastComponent;
|
|
12346
|
-
if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
|
|
12347
|
-
return {
|
|
12348
|
-
oldPos: path.oldPos + oldPosInc,
|
|
12349
|
-
lastComponent: {
|
|
12350
|
-
count: last.count + 1,
|
|
12351
|
-
added,
|
|
12352
|
-
removed,
|
|
12353
|
-
previousComponent: last.previousComponent
|
|
12354
|
-
}
|
|
12355
|
-
};
|
|
12356
|
-
} else {
|
|
12357
|
-
return {
|
|
12358
|
-
oldPos: path.oldPos + oldPosInc,
|
|
12359
|
-
lastComponent: {
|
|
12360
|
-
count: 1,
|
|
12361
|
-
added,
|
|
12362
|
-
removed,
|
|
12363
|
-
previousComponent: last
|
|
12364
|
-
}
|
|
12365
|
-
};
|
|
12366
|
-
}
|
|
12367
|
-
}
|
|
12368
|
-
extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) {
|
|
12369
|
-
const newLen = newTokens.length, oldLen = oldTokens.length;
|
|
12370
|
-
let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0;
|
|
12371
|
-
while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) {
|
|
12372
|
-
newPos++;
|
|
12373
|
-
oldPos++;
|
|
12374
|
-
commonCount++;
|
|
12375
|
-
if (options.oneChangePerToken) {
|
|
12376
|
-
basePath.lastComponent = {
|
|
12377
|
-
count: 1,
|
|
12378
|
-
previousComponent: basePath.lastComponent,
|
|
12379
|
-
added: false,
|
|
12380
|
-
removed: false
|
|
12381
|
-
};
|
|
12382
|
-
}
|
|
12383
|
-
}
|
|
12384
|
-
if (commonCount && !options.oneChangePerToken) {
|
|
12385
|
-
basePath.lastComponent = {
|
|
12386
|
-
count: commonCount,
|
|
12387
|
-
previousComponent: basePath.lastComponent,
|
|
12388
|
-
added: false,
|
|
12389
|
-
removed: false
|
|
12390
|
-
};
|
|
12391
|
-
}
|
|
12392
|
-
basePath.oldPos = oldPos;
|
|
12393
|
-
return newPos;
|
|
12394
|
-
}
|
|
12395
|
-
equals(left, right, options) {
|
|
12396
|
-
if (options.comparator) {
|
|
12397
|
-
return options.comparator(left, right);
|
|
12398
|
-
} else {
|
|
12399
|
-
return left === right || !!options.ignoreCase && left.toLowerCase() === right.toLowerCase();
|
|
12400
|
-
}
|
|
12401
|
-
}
|
|
12402
|
-
removeEmpty(array) {
|
|
12403
|
-
const ret = [];
|
|
12404
|
-
for (let i$2 = 0; i$2 < array.length; i$2++) {
|
|
12405
|
-
if (array[i$2]) {
|
|
12406
|
-
ret.push(array[i$2]);
|
|
12407
|
-
}
|
|
12408
|
-
}
|
|
12409
|
-
return ret;
|
|
12410
|
-
}
|
|
12411
|
-
castInput(value, options) {
|
|
12412
|
-
return value;
|
|
12413
|
-
}
|
|
12414
|
-
tokenize(value, options) {
|
|
12415
|
-
return Array.from(value);
|
|
12416
|
-
}
|
|
12417
|
-
join(chars) {
|
|
12418
|
-
return chars.join("");
|
|
12419
|
-
}
|
|
12420
|
-
postProcess(changeObjects, options) {
|
|
12421
|
-
return changeObjects;
|
|
12422
|
-
}
|
|
12423
|
-
get useLongestToken() {
|
|
12424
|
-
return false;
|
|
12425
|
-
}
|
|
12426
|
-
buildValues(lastComponent, newTokens, oldTokens) {
|
|
12427
|
-
const components = [];
|
|
12428
|
-
let nextComponent;
|
|
12429
|
-
while (lastComponent) {
|
|
12430
|
-
components.push(lastComponent);
|
|
12431
|
-
nextComponent = lastComponent.previousComponent;
|
|
12432
|
-
delete lastComponent.previousComponent;
|
|
12433
|
-
lastComponent = nextComponent;
|
|
12434
|
-
}
|
|
12435
|
-
components.reverse();
|
|
12436
|
-
const componentLen = components.length;
|
|
12437
|
-
let componentPos = 0, newPos = 0, oldPos = 0;
|
|
12438
|
-
for (; componentPos < componentLen; componentPos++) {
|
|
12439
|
-
const component = components[componentPos];
|
|
12440
|
-
if (!component.removed) {
|
|
12441
|
-
if (!component.added && this.useLongestToken) {
|
|
12442
|
-
let value = newTokens.slice(newPos, newPos + component.count);
|
|
12443
|
-
value = value.map(function(value$1, i$2) {
|
|
12444
|
-
const oldValue = oldTokens[oldPos + i$2];
|
|
12445
|
-
return oldValue.length > value$1.length ? oldValue : value$1;
|
|
12446
|
-
});
|
|
12447
|
-
component.value = this.join(value);
|
|
12448
|
-
} else {
|
|
12449
|
-
component.value = this.join(newTokens.slice(newPos, newPos + component.count));
|
|
12450
|
-
}
|
|
12451
|
-
newPos += component.count;
|
|
12452
|
-
if (!component.added) {
|
|
12453
|
-
oldPos += component.count;
|
|
12454
|
-
}
|
|
12455
|
-
} else {
|
|
12456
|
-
component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count));
|
|
12457
|
-
oldPos += component.count;
|
|
12458
|
-
}
|
|
12459
|
-
}
|
|
12460
|
-
return components;
|
|
12461
|
-
}
|
|
12462
|
-
};
|
|
12463
|
-
|
|
12464
|
-
//#endregion
|
|
12465
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/character.js
|
|
12466
|
-
var CharacterDiff = class extends Diff {};
|
|
12467
|
-
const characterDiff = new CharacterDiff();
|
|
12468
|
-
function diffChars(oldStr, newStr, options) {
|
|
12469
|
-
return characterDiff.diff(oldStr, newStr, options);
|
|
12470
|
-
}
|
|
12471
|
-
|
|
12472
|
-
//#endregion
|
|
12473
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/string.js
|
|
12474
|
-
function longestCommonPrefix(str1, str2) {
|
|
12475
|
-
let i$2;
|
|
12476
|
-
for (i$2 = 0; i$2 < str1.length && i$2 < str2.length; i$2++) {
|
|
12477
|
-
if (str1[i$2] != str2[i$2]) {
|
|
12478
|
-
return str1.slice(0, i$2);
|
|
12479
|
-
}
|
|
12480
|
-
}
|
|
12481
|
-
return str1.slice(0, i$2);
|
|
12482
|
-
}
|
|
12483
|
-
function longestCommonSuffix(str1, str2) {
|
|
12484
|
-
let i$2;
|
|
12485
|
-
if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
|
|
12486
|
-
return "";
|
|
12487
|
-
}
|
|
12488
|
-
for (i$2 = 0; i$2 < str1.length && i$2 < str2.length; i$2++) {
|
|
12489
|
-
if (str1[str1.length - (i$2 + 1)] != str2[str2.length - (i$2 + 1)]) {
|
|
12490
|
-
return str1.slice(-i$2);
|
|
12491
|
-
}
|
|
12492
|
-
}
|
|
12493
|
-
return str1.slice(-i$2);
|
|
12494
|
-
}
|
|
12495
|
-
function replacePrefix(string, oldPrefix, newPrefix) {
|
|
12496
|
-
if (string.slice(0, oldPrefix.length) != oldPrefix) {
|
|
12497
|
-
throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`);
|
|
12498
|
-
}
|
|
12499
|
-
return newPrefix + string.slice(oldPrefix.length);
|
|
12500
|
-
}
|
|
12501
|
-
function replaceSuffix(string, oldSuffix, newSuffix) {
|
|
12502
|
-
if (!oldSuffix) {
|
|
12503
|
-
return string + newSuffix;
|
|
12504
|
-
}
|
|
12505
|
-
if (string.slice(-oldSuffix.length) != oldSuffix) {
|
|
12506
|
-
throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`);
|
|
12507
|
-
}
|
|
12508
|
-
return string.slice(0, -oldSuffix.length) + newSuffix;
|
|
12509
|
-
}
|
|
12510
|
-
function removePrefix(string, oldPrefix) {
|
|
12511
|
-
return replacePrefix(string, oldPrefix, "");
|
|
12512
|
-
}
|
|
12513
|
-
function removeSuffix(string, oldSuffix) {
|
|
12514
|
-
return replaceSuffix(string, oldSuffix, "");
|
|
12515
|
-
}
|
|
12516
|
-
function maximumOverlap(string1, string2) {
|
|
12517
|
-
return string2.slice(0, overlapCount(string1, string2));
|
|
12518
|
-
}
|
|
12519
|
-
function overlapCount(a$1, b$2) {
|
|
12520
|
-
let startA = 0;
|
|
12521
|
-
if (a$1.length > b$2.length) {
|
|
12522
|
-
startA = a$1.length - b$2.length;
|
|
12523
|
-
}
|
|
12524
|
-
let endB = b$2.length;
|
|
12525
|
-
if (a$1.length < b$2.length) {
|
|
12526
|
-
endB = a$1.length;
|
|
12527
|
-
}
|
|
12528
|
-
const map = Array(endB);
|
|
12529
|
-
let k$2 = 0;
|
|
12530
|
-
map[0] = 0;
|
|
12531
|
-
for (let j$1 = 1; j$1 < endB; j$1++) {
|
|
12532
|
-
if (b$2[j$1] == b$2[k$2]) {
|
|
12533
|
-
map[j$1] = map[k$2];
|
|
12534
|
-
} else {
|
|
12535
|
-
map[j$1] = k$2;
|
|
12536
|
-
}
|
|
12537
|
-
while (k$2 > 0 && b$2[j$1] != b$2[k$2]) {
|
|
12538
|
-
k$2 = map[k$2];
|
|
12539
|
-
}
|
|
12540
|
-
if (b$2[j$1] == b$2[k$2]) {
|
|
12541
|
-
k$2++;
|
|
12542
|
-
}
|
|
12543
|
-
}
|
|
12544
|
-
k$2 = 0;
|
|
12545
|
-
for (let i$2 = startA; i$2 < a$1.length; i$2++) {
|
|
12546
|
-
while (k$2 > 0 && a$1[i$2] != b$2[k$2]) {
|
|
12547
|
-
k$2 = map[k$2];
|
|
12548
|
-
}
|
|
12549
|
-
if (a$1[i$2] == b$2[k$2]) {
|
|
12550
|
-
k$2++;
|
|
12551
|
-
}
|
|
12552
|
-
}
|
|
12553
|
-
return k$2;
|
|
12554
|
-
}
|
|
12555
|
-
/**
|
|
12556
|
-
* Returns true if the string consistently uses Windows line endings.
|
|
12557
|
-
*/
|
|
12558
|
-
function hasOnlyWinLineEndings(string) {
|
|
12559
|
-
return string.includes("\r\n") && !string.startsWith("\n") && !string.match(/[^\r]\n/);
|
|
12560
|
-
}
|
|
12561
|
-
/**
|
|
12562
|
-
* Returns true if the string consistently uses Unix line endings.
|
|
12563
|
-
*/
|
|
12564
|
-
function hasOnlyUnixLineEndings(string) {
|
|
12565
|
-
return !string.includes("\r\n") && string.includes("\n");
|
|
12566
|
-
}
|
|
12567
|
-
function trailingWs(string) {
|
|
12568
|
-
let i$2;
|
|
12569
|
-
for (i$2 = string.length - 1; i$2 >= 0; i$2--) {
|
|
12570
|
-
if (!string[i$2].match(/\s/)) {
|
|
12571
|
-
break;
|
|
12572
|
-
}
|
|
12573
|
-
}
|
|
12574
|
-
return string.substring(i$2 + 1);
|
|
12575
|
-
}
|
|
12576
|
-
function leadingWs(string) {
|
|
12577
|
-
const match = string.match(/^\s*/);
|
|
12578
|
-
return match ? match[0] : "";
|
|
12579
|
-
}
|
|
12580
|
-
|
|
12581
|
-
//#endregion
|
|
12582
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/word.js
|
|
12583
|
-
const extendedWordChars = "a-zA-Z0-9_\\u{AD}\\u{C0}-\\u{D6}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
|
|
12584
|
-
const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, "ug");
|
|
12585
|
-
var WordDiff = class extends Diff {
|
|
12586
|
-
equals(left, right, options) {
|
|
12587
|
-
if (options.ignoreCase) {
|
|
12588
|
-
left = left.toLowerCase();
|
|
12589
|
-
right = right.toLowerCase();
|
|
12590
|
-
}
|
|
12591
|
-
return left.trim() === right.trim();
|
|
12592
|
-
}
|
|
12593
|
-
tokenize(value, options = {}) {
|
|
12594
|
-
let parts;
|
|
12595
|
-
if (options.intlSegmenter) {
|
|
12596
|
-
const segmenter = options.intlSegmenter;
|
|
12597
|
-
if (segmenter.resolvedOptions().granularity != "word") {
|
|
12598
|
-
throw new Error("The segmenter passed must have a granularity of \"word\"");
|
|
12599
|
-
}
|
|
12600
|
-
parts = [];
|
|
12601
|
-
for (const segmentObj of Array.from(segmenter.segment(value))) {
|
|
12602
|
-
const segment = segmentObj.segment;
|
|
12603
|
-
if (parts.length && /\s/.test(parts[parts.length - 1]) && /\s/.test(segment)) {
|
|
12604
|
-
parts[parts.length - 1] += segment;
|
|
12605
|
-
} else {
|
|
12606
|
-
parts.push(segment);
|
|
12607
|
-
}
|
|
12608
|
-
}
|
|
12609
|
-
} else {
|
|
12610
|
-
parts = value.match(tokenizeIncludingWhitespace) || [];
|
|
12611
|
-
}
|
|
12612
|
-
const tokens = [];
|
|
12613
|
-
let prevPart = null;
|
|
12614
|
-
parts.forEach((part) => {
|
|
12615
|
-
if (/\s/.test(part)) {
|
|
12616
|
-
if (prevPart == null) {
|
|
12617
|
-
tokens.push(part);
|
|
12618
|
-
} else {
|
|
12619
|
-
tokens.push(tokens.pop() + part);
|
|
12620
|
-
}
|
|
12621
|
-
} else if (prevPart != null && /\s/.test(prevPart)) {
|
|
12622
|
-
if (tokens[tokens.length - 1] == prevPart) {
|
|
12623
|
-
tokens.push(tokens.pop() + part);
|
|
12624
|
-
} else {
|
|
12625
|
-
tokens.push(prevPart + part);
|
|
12626
|
-
}
|
|
12627
|
-
} else {
|
|
12628
|
-
tokens.push(part);
|
|
12629
|
-
}
|
|
12630
|
-
prevPart = part;
|
|
12631
|
-
});
|
|
12632
|
-
return tokens;
|
|
12633
|
-
}
|
|
12634
|
-
join(tokens) {
|
|
12635
|
-
return tokens.map((token$1, i$2) => {
|
|
12636
|
-
if (i$2 == 0) {
|
|
12637
|
-
return token$1;
|
|
12638
|
-
} else {
|
|
12639
|
-
return token$1.replace(/^\s+/, "");
|
|
12640
|
-
}
|
|
12641
|
-
}).join("");
|
|
12642
|
-
}
|
|
12643
|
-
postProcess(changes, options) {
|
|
12644
|
-
if (!changes || options.oneChangePerToken) {
|
|
12645
|
-
return changes;
|
|
12646
|
-
}
|
|
12647
|
-
let lastKeep = null;
|
|
12648
|
-
let insertion = null;
|
|
12649
|
-
let deletion = null;
|
|
12650
|
-
changes.forEach((change) => {
|
|
12651
|
-
if (change.added) {
|
|
12652
|
-
insertion = change;
|
|
12653
|
-
} else if (change.removed) {
|
|
12654
|
-
deletion = change;
|
|
12655
|
-
} else {
|
|
12656
|
-
if (insertion || deletion) {
|
|
12657
|
-
dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
|
|
12658
|
-
}
|
|
12659
|
-
lastKeep = change;
|
|
12660
|
-
insertion = null;
|
|
12661
|
-
deletion = null;
|
|
12662
|
-
}
|
|
12663
|
-
});
|
|
12664
|
-
if (insertion || deletion) {
|
|
12665
|
-
dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
|
|
12666
|
-
}
|
|
12667
|
-
return changes;
|
|
12668
|
-
}
|
|
12669
|
-
};
|
|
12670
|
-
const wordDiff = new WordDiff();
|
|
12671
|
-
function diffWords(oldStr, newStr, options) {
|
|
12672
|
-
if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
|
|
12673
|
-
return diffWordsWithSpace(oldStr, newStr, options);
|
|
12674
|
-
}
|
|
12675
|
-
return wordDiff.diff(oldStr, newStr, options);
|
|
12676
|
-
}
|
|
12677
|
-
function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
|
|
12678
|
-
if (deletion && insertion) {
|
|
12679
|
-
const oldWsPrefix = leadingWs(deletion.value);
|
|
12680
|
-
const oldWsSuffix = trailingWs(deletion.value);
|
|
12681
|
-
const newWsPrefix = leadingWs(insertion.value);
|
|
12682
|
-
const newWsSuffix = trailingWs(insertion.value);
|
|
12683
|
-
if (startKeep) {
|
|
12684
|
-
const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
|
|
12685
|
-
startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
|
|
12686
|
-
deletion.value = removePrefix(deletion.value, commonWsPrefix);
|
|
12687
|
-
insertion.value = removePrefix(insertion.value, commonWsPrefix);
|
|
12688
|
-
}
|
|
12689
|
-
if (endKeep) {
|
|
12690
|
-
const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
|
|
12691
|
-
endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
|
|
12692
|
-
deletion.value = removeSuffix(deletion.value, commonWsSuffix);
|
|
12693
|
-
insertion.value = removeSuffix(insertion.value, commonWsSuffix);
|
|
12694
|
-
}
|
|
12695
|
-
} else if (insertion) {
|
|
12696
|
-
if (startKeep) {
|
|
12697
|
-
const ws = leadingWs(insertion.value);
|
|
12698
|
-
insertion.value = insertion.value.substring(ws.length);
|
|
12699
|
-
}
|
|
12700
|
-
if (endKeep) {
|
|
12701
|
-
const ws = leadingWs(endKeep.value);
|
|
12702
|
-
endKeep.value = endKeep.value.substring(ws.length);
|
|
12703
|
-
}
|
|
12704
|
-
} else if (startKeep && endKeep) {
|
|
12705
|
-
const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value);
|
|
12706
|
-
const newWsStart = longestCommonPrefix(newWsFull, delWsStart);
|
|
12707
|
-
deletion.value = removePrefix(deletion.value, newWsStart);
|
|
12708
|
-
const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
|
|
12709
|
-
deletion.value = removeSuffix(deletion.value, newWsEnd);
|
|
12710
|
-
endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
|
|
12711
|
-
startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
|
|
12712
|
-
} else if (endKeep) {
|
|
12713
|
-
const endKeepWsPrefix = leadingWs(endKeep.value);
|
|
12714
|
-
const deletionWsSuffix = trailingWs(deletion.value);
|
|
12715
|
-
const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
|
|
12716
|
-
deletion.value = removeSuffix(deletion.value, overlap);
|
|
12717
|
-
} else if (startKeep) {
|
|
12718
|
-
const startKeepWsSuffix = trailingWs(startKeep.value);
|
|
12719
|
-
const deletionWsPrefix = leadingWs(deletion.value);
|
|
12720
|
-
const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
|
|
12721
|
-
deletion.value = removePrefix(deletion.value, overlap);
|
|
12722
|
-
}
|
|
12723
|
-
}
|
|
12724
|
-
var WordsWithSpaceDiff = class extends Diff {
|
|
12725
|
-
tokenize(value) {
|
|
12726
|
-
const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, "ug");
|
|
12727
|
-
return value.match(regex) || [];
|
|
12728
|
-
}
|
|
12729
|
-
};
|
|
12730
|
-
const wordsWithSpaceDiff = new WordsWithSpaceDiff();
|
|
12731
|
-
function diffWordsWithSpace(oldStr, newStr, options) {
|
|
12732
|
-
return wordsWithSpaceDiff.diff(oldStr, newStr, options);
|
|
12733
|
-
}
|
|
12734
|
-
|
|
12735
|
-
//#endregion
|
|
12736
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/params.js
|
|
12737
|
-
function generateOptions(options, defaults) {
|
|
12738
|
-
if (typeof options === "function") {
|
|
12739
|
-
defaults.callback = options;
|
|
12740
|
-
} else if (options) {
|
|
12741
|
-
for (const name in options) {
|
|
12742
|
-
/* istanbul ignore else */
|
|
12743
|
-
if (Object.prototype.hasOwnProperty.call(options, name)) {
|
|
12744
|
-
defaults[name] = options[name];
|
|
12745
|
-
}
|
|
12746
|
-
}
|
|
12747
|
-
}
|
|
12748
|
-
return defaults;
|
|
12749
|
-
}
|
|
12750
|
-
|
|
12751
|
-
//#endregion
|
|
12752
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/line.js
|
|
12753
|
-
var LineDiff = class extends Diff {
|
|
12754
|
-
constructor() {
|
|
12755
|
-
super(...arguments);
|
|
12756
|
-
this.tokenize = tokenize;
|
|
12757
|
-
}
|
|
12758
|
-
equals(left, right, options) {
|
|
12759
|
-
if (options.ignoreWhitespace) {
|
|
12760
|
-
if (!options.newlineIsToken || !left.includes("\n")) {
|
|
12761
|
-
left = left.trim();
|
|
12762
|
-
}
|
|
12763
|
-
if (!options.newlineIsToken || !right.includes("\n")) {
|
|
12764
|
-
right = right.trim();
|
|
12765
|
-
}
|
|
12766
|
-
} else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
|
|
12767
|
-
if (left.endsWith("\n")) {
|
|
12768
|
-
left = left.slice(0, -1);
|
|
12769
|
-
}
|
|
12770
|
-
if (right.endsWith("\n")) {
|
|
12771
|
-
right = right.slice(0, -1);
|
|
12772
|
-
}
|
|
12773
|
-
}
|
|
12774
|
-
return super.equals(left, right, options);
|
|
12775
|
-
}
|
|
12776
|
-
};
|
|
12777
|
-
const lineDiff = new LineDiff();
|
|
12778
|
-
function diffLines(oldStr, newStr, options) {
|
|
12779
|
-
return lineDiff.diff(oldStr, newStr, options);
|
|
12780
|
-
}
|
|
12781
|
-
function diffTrimmedLines(oldStr, newStr, options) {
|
|
12782
|
-
options = generateOptions(options, { ignoreWhitespace: true });
|
|
12783
|
-
return lineDiff.diff(oldStr, newStr, options);
|
|
12784
|
-
}
|
|
12785
|
-
function tokenize(value, options) {
|
|
12786
|
-
if (options.stripTrailingCr) {
|
|
12787
|
-
value = value.replace(/\r\n/g, "\n");
|
|
12788
|
-
}
|
|
12789
|
-
const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/);
|
|
12790
|
-
if (!linesAndNewlines[linesAndNewlines.length - 1]) {
|
|
12791
|
-
linesAndNewlines.pop();
|
|
12792
|
-
}
|
|
12793
|
-
for (let i$2 = 0; i$2 < linesAndNewlines.length; i$2++) {
|
|
12794
|
-
const line = linesAndNewlines[i$2];
|
|
12795
|
-
if (i$2 % 2 && !options.newlineIsToken) {
|
|
12796
|
-
retLines[retLines.length - 1] += line;
|
|
12797
|
-
} else {
|
|
12798
|
-
retLines.push(line);
|
|
12799
|
-
}
|
|
12800
|
-
}
|
|
12801
|
-
return retLines;
|
|
12802
|
-
}
|
|
12803
|
-
|
|
12804
|
-
//#endregion
|
|
12805
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/sentence.js
|
|
12806
|
-
function isSentenceEndPunct(char) {
|
|
12807
|
-
return char == "." || char == "!" || char == "?";
|
|
12808
|
-
}
|
|
12809
|
-
var SentenceDiff = class extends Diff {
|
|
12810
|
-
tokenize(value) {
|
|
12811
|
-
var _a;
|
|
12812
|
-
const result = [];
|
|
12813
|
-
let tokenStartI = 0;
|
|
12814
|
-
for (let i$2 = 0; i$2 < value.length; i$2++) {
|
|
12815
|
-
if (i$2 == value.length - 1) {
|
|
12816
|
-
result.push(value.slice(tokenStartI));
|
|
12817
|
-
break;
|
|
12818
|
-
}
|
|
12819
|
-
if (isSentenceEndPunct(value[i$2]) && value[i$2 + 1].match(/\s/)) {
|
|
12820
|
-
result.push(value.slice(tokenStartI, i$2 + 1));
|
|
12821
|
-
i$2 = tokenStartI = i$2 + 1;
|
|
12822
|
-
while ((_a = value[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) {
|
|
12823
|
-
i$2++;
|
|
12824
|
-
}
|
|
12825
|
-
result.push(value.slice(tokenStartI, i$2 + 1));
|
|
12826
|
-
tokenStartI = i$2 + 1;
|
|
12827
|
-
}
|
|
12828
|
-
}
|
|
12829
|
-
return result;
|
|
12830
|
-
}
|
|
12831
|
-
};
|
|
12832
|
-
const sentenceDiff = new SentenceDiff();
|
|
12833
|
-
function diffSentences(oldStr, newStr, options) {
|
|
12834
|
-
return sentenceDiff.diff(oldStr, newStr, options);
|
|
12835
|
-
}
|
|
12836
|
-
|
|
12837
|
-
//#endregion
|
|
12838
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/css.js
|
|
12839
|
-
var CssDiff = class extends Diff {
|
|
12840
|
-
tokenize(value) {
|
|
12841
|
-
return value.split(/([{}:;,]|\s+)/);
|
|
12842
|
-
}
|
|
12843
|
-
};
|
|
12844
|
-
const cssDiff = new CssDiff();
|
|
12845
|
-
function diffCss(oldStr, newStr, options) {
|
|
12846
|
-
return cssDiff.diff(oldStr, newStr, options);
|
|
12847
|
-
}
|
|
12848
|
-
|
|
12849
|
-
//#endregion
|
|
12850
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/json.js
|
|
12851
|
-
var JsonDiff = class extends Diff {
|
|
12852
|
-
constructor() {
|
|
12853
|
-
super(...arguments);
|
|
12854
|
-
this.tokenize = tokenize;
|
|
12855
|
-
}
|
|
12856
|
-
get useLongestToken() {
|
|
12857
|
-
return true;
|
|
12858
|
-
}
|
|
12859
|
-
castInput(value, options) {
|
|
12860
|
-
const { undefinedReplacement, stringifyReplacer = (k$2, v$1) => typeof v$1 === "undefined" ? undefinedReplacement : v$1 } = options;
|
|
12861
|
-
return typeof value === "string" ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, " ");
|
|
12862
|
-
}
|
|
12863
|
-
equals(left, right, options) {
|
|
12864
|
-
return super.equals(left.replace(/,([\r\n])/g, "$1"), right.replace(/,([\r\n])/g, "$1"), options);
|
|
12865
|
-
}
|
|
12866
|
-
};
|
|
12867
|
-
const jsonDiff = new JsonDiff();
|
|
12868
|
-
function diffJson(oldStr, newStr, options) {
|
|
12869
|
-
return jsonDiff.diff(oldStr, newStr, options);
|
|
12870
|
-
}
|
|
12871
|
-
function canonicalize(obj, stack, replacementStack, replacer, key$1) {
|
|
12872
|
-
stack = stack || [];
|
|
12873
|
-
replacementStack = replacementStack || [];
|
|
12874
|
-
if (replacer) {
|
|
12875
|
-
obj = replacer(key$1 === undefined ? "" : key$1, obj);
|
|
12876
|
-
}
|
|
12877
|
-
let i$2;
|
|
12878
|
-
for (i$2 = 0; i$2 < stack.length; i$2 += 1) {
|
|
12879
|
-
if (stack[i$2] === obj) {
|
|
12880
|
-
return replacementStack[i$2];
|
|
12881
|
-
}
|
|
12882
|
-
}
|
|
12883
|
-
let canonicalizedObj;
|
|
12884
|
-
if ("[object Array]" === Object.prototype.toString.call(obj)) {
|
|
12885
|
-
stack.push(obj);
|
|
12886
|
-
canonicalizedObj = new Array(obj.length);
|
|
12887
|
-
replacementStack.push(canonicalizedObj);
|
|
12888
|
-
for (i$2 = 0; i$2 < obj.length; i$2 += 1) {
|
|
12889
|
-
canonicalizedObj[i$2] = canonicalize(obj[i$2], stack, replacementStack, replacer, String(i$2));
|
|
12890
|
-
}
|
|
12891
|
-
stack.pop();
|
|
12892
|
-
replacementStack.pop();
|
|
12893
|
-
return canonicalizedObj;
|
|
12894
|
-
}
|
|
12895
|
-
if (obj && obj.toJSON) {
|
|
12896
|
-
obj = obj.toJSON();
|
|
12897
|
-
}
|
|
12898
|
-
if (typeof obj === "object" && obj !== null) {
|
|
12899
|
-
stack.push(obj);
|
|
12900
|
-
canonicalizedObj = {};
|
|
12901
|
-
replacementStack.push(canonicalizedObj);
|
|
12902
|
-
const sortedKeys = [];
|
|
12903
|
-
let key$2;
|
|
12904
|
-
for (key$2 in obj) {
|
|
12905
|
-
/* istanbul ignore else */
|
|
12906
|
-
if (Object.prototype.hasOwnProperty.call(obj, key$2)) {
|
|
12907
|
-
sortedKeys.push(key$2);
|
|
12908
|
-
}
|
|
12909
|
-
}
|
|
12910
|
-
sortedKeys.sort();
|
|
12911
|
-
for (i$2 = 0; i$2 < sortedKeys.length; i$2 += 1) {
|
|
12912
|
-
key$2 = sortedKeys[i$2];
|
|
12913
|
-
canonicalizedObj[key$2] = canonicalize(obj[key$2], stack, replacementStack, replacer, key$2);
|
|
12914
|
-
}
|
|
12915
|
-
stack.pop();
|
|
12916
|
-
replacementStack.pop();
|
|
12917
|
-
} else {
|
|
12918
|
-
canonicalizedObj = obj;
|
|
12919
|
-
}
|
|
12920
|
-
return canonicalizedObj;
|
|
12921
|
-
}
|
|
12922
|
-
|
|
12923
|
-
//#endregion
|
|
12924
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/array.js
|
|
12925
|
-
var ArrayDiff = class extends Diff {
|
|
12926
|
-
tokenize(value) {
|
|
12927
|
-
return value.slice();
|
|
12928
|
-
}
|
|
12929
|
-
join(value) {
|
|
12930
|
-
return value;
|
|
12931
|
-
}
|
|
12932
|
-
removeEmpty(value) {
|
|
12933
|
-
return value;
|
|
12934
|
-
}
|
|
12935
|
-
};
|
|
12936
|
-
const arrayDiff = new ArrayDiff();
|
|
12937
|
-
function diffArrays(oldArr, newArr, options) {
|
|
12938
|
-
return arrayDiff.diff(oldArr, newArr, options);
|
|
12939
|
-
}
|
|
12940
|
-
|
|
12941
|
-
//#endregion
|
|
12942
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/line-endings.js
|
|
12943
|
-
function unixToWin(patch) {
|
|
12944
|
-
if (Array.isArray(patch)) {
|
|
12945
|
-
return patch.map((p$1) => unixToWin(p$1));
|
|
12946
|
-
}
|
|
12947
|
-
return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map((hunk) => Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i$2) => {
|
|
12948
|
-
var _a;
|
|
12949
|
-
return line.startsWith("\\") || line.endsWith("\r") || ((_a = hunk.lines[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.startsWith("\\")) ? line : line + "\r";
|
|
12950
|
-
}) })) });
|
|
12951
|
-
}
|
|
12952
|
-
function winToUnix(patch) {
|
|
12953
|
-
if (Array.isArray(patch)) {
|
|
12954
|
-
return patch.map((p$1) => winToUnix(p$1));
|
|
12955
|
-
}
|
|
12956
|
-
return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map((hunk) => Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line) => line.endsWith("\r") ? line.substring(0, line.length - 1) : line) })) });
|
|
12957
|
-
}
|
|
12958
|
-
/**
|
|
12959
|
-
* Returns true if the patch consistently uses Unix line endings (or only involves one line and has
|
|
12960
|
-
* no line endings).
|
|
12961
|
-
*/
|
|
12962
|
-
function isUnix(patch) {
|
|
12963
|
-
if (!Array.isArray(patch)) {
|
|
12964
|
-
patch = [patch];
|
|
12965
|
-
}
|
|
12966
|
-
return !patch.some((index) => index.hunks.some((hunk) => hunk.lines.some((line) => !line.startsWith("\\") && line.endsWith("\r"))));
|
|
12967
|
-
}
|
|
12968
|
-
/**
|
|
12969
|
-
* Returns true if the patch uses Windows line endings and only Windows line endings.
|
|
12970
|
-
*/
|
|
12971
|
-
function isWin(patch) {
|
|
12972
|
-
if (!Array.isArray(patch)) {
|
|
12973
|
-
patch = [patch];
|
|
12974
|
-
}
|
|
12975
|
-
return patch.some((index) => index.hunks.some((hunk) => hunk.lines.some((line) => line.endsWith("\r")))) && patch.every((index) => index.hunks.every((hunk) => hunk.lines.every((line, i$2) => {
|
|
12976
|
-
var _a;
|
|
12977
|
-
return line.startsWith("\\") || line.endsWith("\r") || ((_a = hunk.lines[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.startsWith("\\"));
|
|
12978
|
-
})));
|
|
12979
|
-
}
|
|
12980
|
-
|
|
12981
|
-
//#endregion
|
|
12982
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/parse.js
|
|
12983
|
-
/**
|
|
12984
|
-
* Parses a patch into structured data, in the same structure returned by `structuredPatch`.
|
|
12985
|
-
*
|
|
12986
|
-
* @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method.
|
|
12987
|
-
*/
|
|
12988
|
-
function parsePatch(uniDiff) {
|
|
12989
|
-
const diffstr = uniDiff.split(/\n/), list = [];
|
|
12990
|
-
let i$2 = 0;
|
|
12991
|
-
function parseIndex() {
|
|
12992
|
-
const index = {};
|
|
12993
|
-
list.push(index);
|
|
12994
|
-
while (i$2 < diffstr.length) {
|
|
12995
|
-
const line = diffstr[i$2];
|
|
12996
|
-
if (/^(---|\+\+\+|@@)\s/.test(line)) {
|
|
12997
|
-
break;
|
|
12998
|
-
}
|
|
12999
|
-
const headerMatch = /^(?:Index:|diff(?: -r \w+)+)\s+/.exec(line);
|
|
13000
|
-
if (headerMatch) {
|
|
13001
|
-
index.index = line.substring(headerMatch[0].length).trim();
|
|
13002
|
-
}
|
|
13003
|
-
i$2++;
|
|
13004
|
-
}
|
|
13005
|
-
parseFileHeader(index);
|
|
13006
|
-
parseFileHeader(index);
|
|
13007
|
-
index.hunks = [];
|
|
13008
|
-
while (i$2 < diffstr.length) {
|
|
13009
|
-
const line = diffstr[i$2];
|
|
13010
|
-
if (/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/.test(line)) {
|
|
13011
|
-
break;
|
|
13012
|
-
} else if (/^@@/.test(line)) {
|
|
13013
|
-
index.hunks.push(parseHunk());
|
|
13014
|
-
} else if (line) {
|
|
13015
|
-
throw new Error("Unknown line " + (i$2 + 1) + " " + JSON.stringify(line));
|
|
13016
|
-
} else {
|
|
13017
|
-
i$2++;
|
|
13018
|
-
}
|
|
13019
|
-
}
|
|
13020
|
-
}
|
|
13021
|
-
function parseFileHeader(index) {
|
|
13022
|
-
const fileHeaderMatch = /^(---|\+\+\+)\s+/.exec(diffstr[i$2]);
|
|
13023
|
-
if (fileHeaderMatch) {
|
|
13024
|
-
const prefix = fileHeaderMatch[1], data = diffstr[i$2].substring(3).trim().split(" ", 2), header = (data[1] || "").trim();
|
|
13025
|
-
let fileName = data[0].replace(/\\\\/g, "\\");
|
|
13026
|
-
if (fileName.startsWith("\"") && fileName.endsWith("\"")) {
|
|
13027
|
-
fileName = fileName.substr(1, fileName.length - 2);
|
|
13028
|
-
}
|
|
13029
|
-
if (prefix === "---") {
|
|
13030
|
-
index.oldFileName = fileName;
|
|
13031
|
-
index.oldHeader = header;
|
|
13032
|
-
} else {
|
|
13033
|
-
index.newFileName = fileName;
|
|
13034
|
-
index.newHeader = header;
|
|
13035
|
-
}
|
|
13036
|
-
i$2++;
|
|
13037
|
-
}
|
|
13038
|
-
}
|
|
13039
|
-
function parseHunk() {
|
|
13040
|
-
var _a;
|
|
13041
|
-
const chunkHeaderIndex = i$2, chunkHeaderLine = diffstr[i$2++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
|
|
13042
|
-
const hunk = {
|
|
13043
|
-
oldStart: +chunkHeader[1],
|
|
13044
|
-
oldLines: typeof chunkHeader[2] === "undefined" ? 1 : +chunkHeader[2],
|
|
13045
|
-
newStart: +chunkHeader[3],
|
|
13046
|
-
newLines: typeof chunkHeader[4] === "undefined" ? 1 : +chunkHeader[4],
|
|
13047
|
-
lines: []
|
|
13048
|
-
};
|
|
13049
|
-
if (hunk.oldLines === 0) {
|
|
13050
|
-
hunk.oldStart += 1;
|
|
13051
|
-
}
|
|
13052
|
-
if (hunk.newLines === 0) {
|
|
13053
|
-
hunk.newStart += 1;
|
|
13054
|
-
}
|
|
13055
|
-
let addCount = 0, removeCount = 0;
|
|
13056
|
-
for (; i$2 < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i$2]) === null || _a === void 0 ? void 0 : _a.startsWith("\\"))); i$2++) {
|
|
13057
|
-
const operation = diffstr[i$2].length == 0 && i$2 != diffstr.length - 1 ? " " : diffstr[i$2][0];
|
|
13058
|
-
if (operation === "+" || operation === "-" || operation === " " || operation === "\\") {
|
|
13059
|
-
hunk.lines.push(diffstr[i$2]);
|
|
13060
|
-
if (operation === "+") {
|
|
13061
|
-
addCount++;
|
|
13062
|
-
} else if (operation === "-") {
|
|
13063
|
-
removeCount++;
|
|
13064
|
-
} else if (operation === " ") {
|
|
13065
|
-
addCount++;
|
|
13066
|
-
removeCount++;
|
|
13067
|
-
}
|
|
13068
|
-
} else {
|
|
13069
|
-
throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i$2]}`);
|
|
13070
|
-
}
|
|
13071
|
-
}
|
|
13072
|
-
if (!addCount && hunk.newLines === 1) {
|
|
13073
|
-
hunk.newLines = 0;
|
|
13074
|
-
}
|
|
13075
|
-
if (!removeCount && hunk.oldLines === 1) {
|
|
13076
|
-
hunk.oldLines = 0;
|
|
13077
|
-
}
|
|
13078
|
-
if (addCount !== hunk.newLines) {
|
|
13079
|
-
throw new Error("Added line count did not match for hunk at line " + (chunkHeaderIndex + 1));
|
|
13080
|
-
}
|
|
13081
|
-
if (removeCount !== hunk.oldLines) {
|
|
13082
|
-
throw new Error("Removed line count did not match for hunk at line " + (chunkHeaderIndex + 1));
|
|
13083
|
-
}
|
|
13084
|
-
return hunk;
|
|
13085
|
-
}
|
|
13086
|
-
while (i$2 < diffstr.length) {
|
|
13087
|
-
parseIndex();
|
|
13088
|
-
}
|
|
13089
|
-
return list;
|
|
13090
|
-
}
|
|
13091
|
-
|
|
13092
|
-
//#endregion
|
|
13093
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/distance-iterator.js
|
|
13094
|
-
function distance_iterator_default(start, minLine, maxLine) {
|
|
13095
|
-
let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1;
|
|
13096
|
-
return function iterator() {
|
|
13097
|
-
if (wantForward && !forwardExhausted) {
|
|
13098
|
-
if (backwardExhausted) {
|
|
13099
|
-
localOffset++;
|
|
13100
|
-
} else {
|
|
13101
|
-
wantForward = false;
|
|
13102
|
-
}
|
|
13103
|
-
if (start + localOffset <= maxLine) {
|
|
13104
|
-
return start + localOffset;
|
|
13105
|
-
}
|
|
13106
|
-
forwardExhausted = true;
|
|
13107
|
-
}
|
|
13108
|
-
if (!backwardExhausted) {
|
|
13109
|
-
if (!forwardExhausted) {
|
|
13110
|
-
wantForward = true;
|
|
13111
|
-
}
|
|
13112
|
-
if (minLine <= start - localOffset) {
|
|
13113
|
-
return start - localOffset++;
|
|
13114
|
-
}
|
|
13115
|
-
backwardExhausted = true;
|
|
13116
|
-
return iterator();
|
|
13117
|
-
}
|
|
13118
|
-
return undefined;
|
|
13119
|
-
};
|
|
13120
|
-
}
|
|
13121
|
-
|
|
13122
|
-
//#endregion
|
|
13123
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/apply.js
|
|
13124
|
-
/**
|
|
13125
|
-
* attempts to apply a unified diff patch.
|
|
13126
|
-
*
|
|
13127
|
-
* Hunks are applied first to last.
|
|
13128
|
-
* `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly.
|
|
13129
|
-
* If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly.
|
|
13130
|
-
* If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match.
|
|
13131
|
-
* Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly.
|
|
13132
|
-
*
|
|
13133
|
-
* Once a hunk is successfully fitted, the process begins again with the next hunk.
|
|
13134
|
-
* Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks.
|
|
13135
|
-
*
|
|
13136
|
-
* If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`.
|
|
13137
|
-
*
|
|
13138
|
-
* If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly.
|
|
13139
|
-
* (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.)
|
|
13140
|
-
*
|
|
13141
|
-
* If the patch was applied successfully, returns a string containing the patched text.
|
|
13142
|
-
* If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false.
|
|
13143
|
-
*
|
|
13144
|
-
* @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods.
|
|
13145
|
-
*/
|
|
13146
|
-
function applyPatch(source, patch, options = {}) {
|
|
13147
|
-
let patches;
|
|
13148
|
-
if (typeof patch === "string") {
|
|
13149
|
-
patches = parsePatch(patch);
|
|
13150
|
-
} else if (Array.isArray(patch)) {
|
|
13151
|
-
patches = patch;
|
|
13152
|
-
} else {
|
|
13153
|
-
patches = [patch];
|
|
13154
|
-
}
|
|
13155
|
-
if (patches.length > 1) {
|
|
13156
|
-
throw new Error("applyPatch only works with a single input.");
|
|
13157
|
-
}
|
|
13158
|
-
return applyStructuredPatch(source, patches[0], options);
|
|
13159
|
-
}
|
|
13160
|
-
function applyStructuredPatch(source, patch, options = {}) {
|
|
13161
|
-
if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
|
|
13162
|
-
if (hasOnlyWinLineEndings(source) && isUnix(patch)) {
|
|
13163
|
-
patch = unixToWin(patch);
|
|
13164
|
-
} else if (hasOnlyUnixLineEndings(source) && isWin(patch)) {
|
|
13165
|
-
patch = winToUnix(patch);
|
|
13166
|
-
}
|
|
13167
|
-
}
|
|
13168
|
-
const lines = source.split("\n"), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0;
|
|
13169
|
-
let minLine = 0;
|
|
13170
|
-
if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
|
|
13171
|
-
throw new Error("fuzzFactor must be a non-negative integer");
|
|
13172
|
-
}
|
|
13173
|
-
if (!hunks.length) {
|
|
13174
|
-
return source;
|
|
13175
|
-
}
|
|
13176
|
-
let prevLine = "", removeEOFNL = false, addEOFNL = false;
|
|
13177
|
-
for (let i$2 = 0; i$2 < hunks[hunks.length - 1].lines.length; i$2++) {
|
|
13178
|
-
const line = hunks[hunks.length - 1].lines[i$2];
|
|
13179
|
-
if (line[0] == "\\") {
|
|
13180
|
-
if (prevLine[0] == "+") {
|
|
13181
|
-
removeEOFNL = true;
|
|
13182
|
-
} else if (prevLine[0] == "-") {
|
|
13183
|
-
addEOFNL = true;
|
|
13184
|
-
}
|
|
13185
|
-
}
|
|
13186
|
-
prevLine = line;
|
|
13187
|
-
}
|
|
13188
|
-
if (removeEOFNL) {
|
|
13189
|
-
if (addEOFNL) {
|
|
13190
|
-
if (!fuzzFactor && lines[lines.length - 1] == "") {
|
|
13191
|
-
return false;
|
|
13192
|
-
}
|
|
13193
|
-
} else if (lines[lines.length - 1] == "") {
|
|
13194
|
-
lines.pop();
|
|
13195
|
-
} else if (!fuzzFactor) {
|
|
13196
|
-
return false;
|
|
13197
|
-
}
|
|
13198
|
-
} else if (addEOFNL) {
|
|
13199
|
-
if (lines[lines.length - 1] != "") {
|
|
13200
|
-
lines.push("");
|
|
13201
|
-
} else if (!fuzzFactor) {
|
|
13202
|
-
return false;
|
|
13203
|
-
}
|
|
13204
|
-
}
|
|
13205
|
-
/**
|
|
13206
|
-
* Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
|
|
13207
|
-
* insertions, substitutions, or deletions, while ensuring also that:
|
|
13208
|
-
* - lines deleted in the hunk match exactly, and
|
|
13209
|
-
* - wherever an insertion operation or block of insertion operations appears in the hunk, the
|
|
13210
|
-
* immediately preceding and following lines of context match exactly
|
|
13211
|
-
*
|
|
13212
|
-
* `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
|
|
13213
|
-
*
|
|
13214
|
-
* If the hunk can be applied, returns an object with properties `oldLineLastI` and
|
|
13215
|
-
* `replacementLines`. Otherwise, returns null.
|
|
13216
|
-
*/
|
|
13217
|
-
function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) {
|
|
13218
|
-
let nConsecutiveOldContextLines = 0;
|
|
13219
|
-
let nextContextLineMustMatch = false;
|
|
13220
|
-
for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
|
|
13221
|
-
const hunkLine = hunkLines[hunkLinesI], operation = hunkLine.length > 0 ? hunkLine[0] : " ", content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
|
|
13222
|
-
if (operation === "-") {
|
|
13223
|
-
if (compareLine(toPos + 1, lines[toPos], operation, content)) {
|
|
13224
|
-
toPos++;
|
|
13225
|
-
nConsecutiveOldContextLines = 0;
|
|
13226
|
-
} else {
|
|
13227
|
-
if (!maxErrors || lines[toPos] == null) {
|
|
13228
|
-
return null;
|
|
13229
|
-
}
|
|
13230
|
-
patchedLines[patchedLinesLength] = lines[toPos];
|
|
13231
|
-
return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
|
|
13232
|
-
}
|
|
13233
|
-
}
|
|
13234
|
-
if (operation === "+") {
|
|
13235
|
-
if (!lastContextLineMatched) {
|
|
13236
|
-
return null;
|
|
13237
|
-
}
|
|
13238
|
-
patchedLines[patchedLinesLength] = content;
|
|
13239
|
-
patchedLinesLength++;
|
|
13240
|
-
nConsecutiveOldContextLines = 0;
|
|
13241
|
-
nextContextLineMustMatch = true;
|
|
13242
|
-
}
|
|
13243
|
-
if (operation === " ") {
|
|
13244
|
-
nConsecutiveOldContextLines++;
|
|
13245
|
-
patchedLines[patchedLinesLength] = lines[toPos];
|
|
13246
|
-
if (compareLine(toPos + 1, lines[toPos], operation, content)) {
|
|
13247
|
-
patchedLinesLength++;
|
|
13248
|
-
lastContextLineMatched = true;
|
|
13249
|
-
nextContextLineMustMatch = false;
|
|
13250
|
-
toPos++;
|
|
13251
|
-
} else {
|
|
13252
|
-
if (nextContextLineMustMatch || !maxErrors) {
|
|
13253
|
-
return null;
|
|
13254
|
-
}
|
|
13255
|
-
return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
|
|
13256
|
-
}
|
|
13257
|
-
}
|
|
13258
|
-
}
|
|
13259
|
-
patchedLinesLength -= nConsecutiveOldContextLines;
|
|
13260
|
-
toPos -= nConsecutiveOldContextLines;
|
|
13261
|
-
patchedLines.length = patchedLinesLength;
|
|
13262
|
-
return {
|
|
13263
|
-
patchedLines,
|
|
13264
|
-
oldLineLastI: toPos - 1
|
|
13265
|
-
};
|
|
13266
|
-
}
|
|
13267
|
-
const resultLines = [];
|
|
13268
|
-
let prevHunkOffset = 0;
|
|
13269
|
-
for (let i$2 = 0; i$2 < hunks.length; i$2++) {
|
|
13270
|
-
const hunk = hunks[i$2];
|
|
13271
|
-
let hunkResult;
|
|
13272
|
-
const maxLine = lines.length - hunk.oldLines + fuzzFactor;
|
|
13273
|
-
let toPos;
|
|
13274
|
-
for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
|
|
13275
|
-
toPos = hunk.oldStart + prevHunkOffset - 1;
|
|
13276
|
-
const iterator = distance_iterator_default(toPos, minLine, maxLine);
|
|
13277
|
-
for (; toPos !== undefined; toPos = iterator()) {
|
|
13278
|
-
hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
|
|
13279
|
-
if (hunkResult) {
|
|
13280
|
-
break;
|
|
13281
|
-
}
|
|
13282
|
-
}
|
|
13283
|
-
if (hunkResult) {
|
|
13284
|
-
break;
|
|
13285
|
-
}
|
|
13286
|
-
}
|
|
13287
|
-
if (!hunkResult) {
|
|
13288
|
-
return false;
|
|
13289
|
-
}
|
|
13290
|
-
for (let i$3 = minLine; i$3 < toPos; i$3++) {
|
|
13291
|
-
resultLines.push(lines[i$3]);
|
|
13292
|
-
}
|
|
13293
|
-
for (let i$3 = 0; i$3 < hunkResult.patchedLines.length; i$3++) {
|
|
13294
|
-
const line = hunkResult.patchedLines[i$3];
|
|
13295
|
-
resultLines.push(line);
|
|
13296
|
-
}
|
|
13297
|
-
minLine = hunkResult.oldLineLastI + 1;
|
|
13298
|
-
prevHunkOffset = toPos + 1 - hunk.oldStart;
|
|
13299
|
-
}
|
|
13300
|
-
for (let i$2 = minLine; i$2 < lines.length; i$2++) {
|
|
13301
|
-
resultLines.push(lines[i$2]);
|
|
13302
|
-
}
|
|
13303
|
-
return resultLines.join("\n");
|
|
13304
|
-
}
|
|
13305
|
-
/**
|
|
13306
|
-
* applies one or more patches.
|
|
13307
|
-
*
|
|
13308
|
-
* `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files).
|
|
13309
|
-
*
|
|
13310
|
-
* This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
|
|
13311
|
-
*
|
|
13312
|
-
* - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
|
|
13313
|
-
* - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
|
|
13314
|
-
*
|
|
13315
|
-
* Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
|
|
13316
|
-
*/
|
|
13317
|
-
function applyPatches(uniDiff, options) {
|
|
13318
|
-
const spDiff = typeof uniDiff === "string" ? parsePatch(uniDiff) : uniDiff;
|
|
13319
|
-
let currentIndex = 0;
|
|
13320
|
-
function processIndex() {
|
|
13321
|
-
const index = spDiff[currentIndex++];
|
|
13322
|
-
if (!index) {
|
|
13323
|
-
return options.complete();
|
|
13324
|
-
}
|
|
13325
|
-
options.loadFile(index, function(err, data) {
|
|
13326
|
-
if (err) {
|
|
13327
|
-
return options.complete(err);
|
|
13328
|
-
}
|
|
13329
|
-
const updatedContent = applyPatch(data, index, options);
|
|
13330
|
-
options.patched(index, updatedContent, function(err$1) {
|
|
13331
|
-
if (err$1) {
|
|
13332
|
-
return options.complete(err$1);
|
|
13333
|
-
}
|
|
13334
|
-
processIndex();
|
|
13335
|
-
});
|
|
13336
|
-
});
|
|
13337
|
-
}
|
|
13338
|
-
processIndex();
|
|
13339
|
-
}
|
|
13340
|
-
|
|
13341
|
-
//#endregion
|
|
13342
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/reverse.js
|
|
13343
|
-
function reversePatch(structuredPatch$1) {
|
|
13344
|
-
if (Array.isArray(structuredPatch$1)) {
|
|
13345
|
-
return structuredPatch$1.map((patch) => reversePatch(patch)).reverse();
|
|
13346
|
-
}
|
|
13347
|
-
return Object.assign(Object.assign({}, structuredPatch$1), {
|
|
13348
|
-
oldFileName: structuredPatch$1.newFileName,
|
|
13349
|
-
oldHeader: structuredPatch$1.newHeader,
|
|
13350
|
-
newFileName: structuredPatch$1.oldFileName,
|
|
13351
|
-
newHeader: structuredPatch$1.oldHeader,
|
|
13352
|
-
hunks: structuredPatch$1.hunks.map((hunk) => {
|
|
13353
|
-
return {
|
|
13354
|
-
oldLines: hunk.newLines,
|
|
13355
|
-
oldStart: hunk.newStart,
|
|
13356
|
-
newLines: hunk.oldLines,
|
|
13357
|
-
newStart: hunk.oldStart,
|
|
13358
|
-
lines: hunk.lines.map((l$2) => {
|
|
13359
|
-
if (l$2.startsWith("-")) {
|
|
13360
|
-
return `+${l$2.slice(1)}`;
|
|
13361
|
-
}
|
|
13362
|
-
if (l$2.startsWith("+")) {
|
|
13363
|
-
return `-${l$2.slice(1)}`;
|
|
13364
|
-
}
|
|
13365
|
-
return l$2;
|
|
13366
|
-
})
|
|
13367
|
-
};
|
|
13368
|
-
})
|
|
13369
|
-
});
|
|
13370
|
-
}
|
|
13371
|
-
|
|
13372
|
-
//#endregion
|
|
13373
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/create.js
|
|
13374
|
-
const INCLUDE_HEADERS = {
|
|
13375
|
-
includeIndex: true,
|
|
13376
|
-
includeUnderline: true,
|
|
13377
|
-
includeFileHeaders: true
|
|
13378
|
-
};
|
|
13379
|
-
const FILE_HEADERS_ONLY = {
|
|
13380
|
-
includeIndex: false,
|
|
13381
|
-
includeUnderline: false,
|
|
13382
|
-
includeFileHeaders: true
|
|
13383
|
-
};
|
|
13384
|
-
const OMIT_HEADERS = {
|
|
13385
|
-
includeIndex: false,
|
|
13386
|
-
includeUnderline: false,
|
|
13387
|
-
includeFileHeaders: false
|
|
13388
|
-
};
|
|
13389
|
-
function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13390
|
-
let optionsObj;
|
|
13391
|
-
if (!options) {
|
|
13392
|
-
optionsObj = {};
|
|
13393
|
-
} else if (typeof options === "function") {
|
|
13394
|
-
optionsObj = { callback: options };
|
|
13395
|
-
} else {
|
|
13396
|
-
optionsObj = options;
|
|
13397
|
-
}
|
|
13398
|
-
if (typeof optionsObj.context === "undefined") {
|
|
13399
|
-
optionsObj.context = 4;
|
|
13400
|
-
}
|
|
13401
|
-
const context = optionsObj.context;
|
|
13402
|
-
if (optionsObj.newlineIsToken) {
|
|
13403
|
-
throw new Error("newlineIsToken may not be used with patch-generation functions, only with diffing functions");
|
|
13404
|
-
}
|
|
13405
|
-
if (!optionsObj.callback) {
|
|
13406
|
-
return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj));
|
|
13407
|
-
} else {
|
|
13408
|
-
const { callback } = optionsObj;
|
|
13409
|
-
diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => {
|
|
13410
|
-
const patch = diffLinesResultToPatch(diff);
|
|
13411
|
-
callback(patch);
|
|
13412
|
-
} }));
|
|
13413
|
-
}
|
|
13414
|
-
function diffLinesResultToPatch(diff) {
|
|
13415
|
-
if (!diff) {
|
|
13416
|
-
return;
|
|
13417
|
-
}
|
|
13418
|
-
diff.push({
|
|
13419
|
-
value: "",
|
|
13420
|
-
lines: []
|
|
13421
|
-
});
|
|
13422
|
-
function contextLines(lines) {
|
|
13423
|
-
return lines.map(function(entry) {
|
|
13424
|
-
return " " + entry;
|
|
13425
|
-
});
|
|
13426
|
-
}
|
|
13427
|
-
const hunks = [];
|
|
13428
|
-
let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1;
|
|
13429
|
-
for (let i$2 = 0; i$2 < diff.length; i$2++) {
|
|
13430
|
-
const current = diff[i$2], lines = current.lines || splitLines(current.value);
|
|
13431
|
-
current.lines = lines;
|
|
13432
|
-
if (current.added || current.removed) {
|
|
13433
|
-
if (!oldRangeStart) {
|
|
13434
|
-
const prev = diff[i$2 - 1];
|
|
13435
|
-
oldRangeStart = oldLine;
|
|
13436
|
-
newRangeStart = newLine;
|
|
13437
|
-
if (prev) {
|
|
13438
|
-
curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : [];
|
|
13439
|
-
oldRangeStart -= curRange.length;
|
|
13440
|
-
newRangeStart -= curRange.length;
|
|
13441
|
-
}
|
|
13442
|
-
}
|
|
13443
|
-
for (const line of lines) {
|
|
13444
|
-
curRange.push((current.added ? "+" : "-") + line);
|
|
13445
|
-
}
|
|
13446
|
-
if (current.added) {
|
|
13447
|
-
newLine += lines.length;
|
|
13448
|
-
} else {
|
|
13449
|
-
oldLine += lines.length;
|
|
13450
|
-
}
|
|
13451
|
-
} else {
|
|
13452
|
-
if (oldRangeStart) {
|
|
13453
|
-
if (lines.length <= context * 2 && i$2 < diff.length - 2) {
|
|
13454
|
-
for (const line of contextLines(lines)) {
|
|
13455
|
-
curRange.push(line);
|
|
13456
|
-
}
|
|
13457
|
-
} else {
|
|
13458
|
-
const contextSize = Math.min(lines.length, context);
|
|
13459
|
-
for (const line of contextLines(lines.slice(0, contextSize))) {
|
|
13460
|
-
curRange.push(line);
|
|
13461
|
-
}
|
|
13462
|
-
const hunk = {
|
|
13463
|
-
oldStart: oldRangeStart,
|
|
13464
|
-
oldLines: oldLine - oldRangeStart + contextSize,
|
|
13465
|
-
newStart: newRangeStart,
|
|
13466
|
-
newLines: newLine - newRangeStart + contextSize,
|
|
13467
|
-
lines: curRange
|
|
13468
|
-
};
|
|
13469
|
-
hunks.push(hunk);
|
|
13470
|
-
oldRangeStart = 0;
|
|
13471
|
-
newRangeStart = 0;
|
|
13472
|
-
curRange = [];
|
|
13473
|
-
}
|
|
13474
|
-
}
|
|
13475
|
-
oldLine += lines.length;
|
|
13476
|
-
newLine += lines.length;
|
|
13477
|
-
}
|
|
13478
|
-
}
|
|
13479
|
-
for (const hunk of hunks) {
|
|
13480
|
-
for (let i$2 = 0; i$2 < hunk.lines.length; i$2++) {
|
|
13481
|
-
if (hunk.lines[i$2].endsWith("\n")) {
|
|
13482
|
-
hunk.lines[i$2] = hunk.lines[i$2].slice(0, -1);
|
|
13483
|
-
} else {
|
|
13484
|
-
hunk.lines.splice(i$2 + 1, 0, "\");
|
|
13485
|
-
i$2++;
|
|
13486
|
-
}
|
|
13487
|
-
}
|
|
13488
|
-
}
|
|
13489
|
-
return {
|
|
13490
|
-
oldFileName,
|
|
13491
|
-
newFileName,
|
|
13492
|
-
oldHeader,
|
|
13493
|
-
newHeader,
|
|
13494
|
-
hunks
|
|
13495
|
-
};
|
|
13496
|
-
}
|
|
13497
|
-
}
|
|
13498
|
-
/**
|
|
13499
|
-
* creates a unified diff patch.
|
|
13500
|
-
* @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`)
|
|
13501
|
-
*/
|
|
13502
|
-
function formatPatch(patch, headerOptions) {
|
|
13503
|
-
if (!headerOptions) {
|
|
13504
|
-
headerOptions = INCLUDE_HEADERS;
|
|
13505
|
-
}
|
|
13506
|
-
if (Array.isArray(patch)) {
|
|
13507
|
-
if (patch.length > 1 && !headerOptions.includeFileHeaders) {
|
|
13508
|
-
throw new Error("Cannot omit file headers on a multi-file patch. " + "(The result would be unparseable; how would a tool trying to apply " + "the patch know which changes are to which file?)");
|
|
13509
|
-
}
|
|
13510
|
-
return patch.map((p$1) => formatPatch(p$1, headerOptions)).join("\n");
|
|
13511
|
-
}
|
|
13512
|
-
const ret = [];
|
|
13513
|
-
if (headerOptions.includeIndex && patch.oldFileName == patch.newFileName) {
|
|
13514
|
-
ret.push("Index: " + patch.oldFileName);
|
|
13515
|
-
}
|
|
13516
|
-
if (headerOptions.includeUnderline) {
|
|
13517
|
-
ret.push("===================================================================");
|
|
13518
|
-
}
|
|
13519
|
-
if (headerOptions.includeFileHeaders) {
|
|
13520
|
-
ret.push("--- " + patch.oldFileName + (typeof patch.oldHeader === "undefined" ? "" : " " + patch.oldHeader));
|
|
13521
|
-
ret.push("+++ " + patch.newFileName + (typeof patch.newHeader === "undefined" ? "" : " " + patch.newHeader));
|
|
13522
|
-
}
|
|
13523
|
-
for (let i$2 = 0; i$2 < patch.hunks.length; i$2++) {
|
|
13524
|
-
const hunk = patch.hunks[i$2];
|
|
13525
|
-
if (hunk.oldLines === 0) {
|
|
13526
|
-
hunk.oldStart -= 1;
|
|
13527
|
-
}
|
|
13528
|
-
if (hunk.newLines === 0) {
|
|
13529
|
-
hunk.newStart -= 1;
|
|
13530
|
-
}
|
|
13531
|
-
ret.push("@@ -" + hunk.oldStart + "," + hunk.oldLines + " +" + hunk.newStart + "," + hunk.newLines + " @@");
|
|
13532
|
-
for (const line of hunk.lines) {
|
|
13533
|
-
ret.push(line);
|
|
13534
|
-
}
|
|
13535
|
-
}
|
|
13536
|
-
return ret.join("\n") + "\n";
|
|
13537
|
-
}
|
|
13538
|
-
function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13539
|
-
if (typeof options === "function") {
|
|
13540
|
-
options = { callback: options };
|
|
13541
|
-
}
|
|
13542
|
-
if (!(options === null || options === void 0 ? void 0 : options.callback)) {
|
|
13543
|
-
const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
|
|
13544
|
-
if (!patchObj) {
|
|
13545
|
-
return;
|
|
13546
|
-
}
|
|
13547
|
-
return formatPatch(patchObj, options === null || options === void 0 ? void 0 : options.headerOptions);
|
|
13548
|
-
} else {
|
|
13549
|
-
const { callback } = options;
|
|
13550
|
-
structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: (patchObj) => {
|
|
13551
|
-
if (!patchObj) {
|
|
13552
|
-
callback(undefined);
|
|
13553
|
-
} else {
|
|
13554
|
-
callback(formatPatch(patchObj, options.headerOptions));
|
|
13555
|
-
}
|
|
13556
|
-
} }));
|
|
13557
|
-
}
|
|
13558
|
-
}
|
|
13559
|
-
function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13560
|
-
return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
|
|
13561
|
-
}
|
|
13562
|
-
/**
|
|
13563
|
-
* Split `text` into an array of lines, including the trailing newline character (where present)
|
|
13564
|
-
*/
|
|
13565
|
-
function splitLines(text$1) {
|
|
13566
|
-
const hasTrailingNl = text$1.endsWith("\n");
|
|
13567
|
-
const result = text$1.split("\n").map((line) => line + "\n");
|
|
13568
|
-
if (hasTrailingNl) {
|
|
13569
|
-
result.pop();
|
|
13570
|
-
} else {
|
|
13571
|
-
result.push(result.pop().slice(0, -1));
|
|
13572
|
-
}
|
|
13573
|
-
return result;
|
|
13574
|
-
}
|
|
13575
|
-
|
|
13576
|
-
//#endregion
|
|
13577
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/convert/dmp.js
|
|
13578
|
-
/**
|
|
13579
|
-
* converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library
|
|
13580
|
-
*/
|
|
13581
|
-
function convertChangesToDMP(changes) {
|
|
13582
|
-
const ret = [];
|
|
13583
|
-
let change, operation;
|
|
13584
|
-
for (let i$2 = 0; i$2 < changes.length; i$2++) {
|
|
13585
|
-
change = changes[i$2];
|
|
13586
|
-
if (change.added) {
|
|
13587
|
-
operation = 1;
|
|
13588
|
-
} else if (change.removed) {
|
|
13589
|
-
operation = -1;
|
|
13590
|
-
} else {
|
|
13591
|
-
operation = 0;
|
|
13592
|
-
}
|
|
13593
|
-
ret.push([operation, change.value]);
|
|
13594
|
-
}
|
|
13595
|
-
return ret;
|
|
13596
|
-
}
|
|
13597
|
-
|
|
13598
|
-
//#endregion
|
|
13599
|
-
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/convert/xml.js
|
|
13600
|
-
/**
|
|
13601
|
-
* converts a list of change objects to a serialized XML format
|
|
13602
|
-
*/
|
|
13603
|
-
function convertChangesToXML(changes) {
|
|
13604
|
-
const ret = [];
|
|
13605
|
-
for (let i$2 = 0; i$2 < changes.length; i$2++) {
|
|
13606
|
-
const change = changes[i$2];
|
|
13607
|
-
if (change.added) {
|
|
13608
|
-
ret.push("<ins>");
|
|
13609
|
-
} else if (change.removed) {
|
|
13610
|
-
ret.push("<del>");
|
|
13611
|
-
}
|
|
13612
|
-
ret.push(escapeHTML(change.value));
|
|
13613
|
-
if (change.added) {
|
|
13614
|
-
ret.push("</ins>");
|
|
13615
|
-
} else if (change.removed) {
|
|
13616
|
-
ret.push("</del>");
|
|
13617
|
-
}
|
|
13618
|
-
}
|
|
13619
|
-
return ret.join("");
|
|
13620
|
-
}
|
|
13621
|
-
function escapeHTML(s$1) {
|
|
13622
|
-
let n = s$1;
|
|
13623
|
-
n = n.replace(/&/g, "&");
|
|
13624
|
-
n = n.replace(/</g, "<");
|
|
13625
|
-
n = n.replace(/>/g, ">");
|
|
13626
|
-
n = n.replace(/"/g, """);
|
|
13627
|
-
return n;
|
|
13628
|
-
}
|
|
13629
|
-
|
|
13630
|
-
//#endregion
|
|
13631
|
-
//#region src/utils/cleanLastNewline.ts
|
|
13632
|
-
function cleanLastNewline(contents) {
|
|
13633
|
-
return contents.replace(/\n$|\r\n$/, "");
|
|
13634
|
-
}
|
|
13635
|
-
|
|
13636
|
-
//#endregion
|
|
13637
|
-
//#region ../../node_modules/.bun/@shikijs+transformers@4.0.2/node_modules/@shikijs/transformers/dist/index.mjs
|
|
13638
|
-
/**
|
|
13639
|
-
* some comment formats have to be located at the end of line
|
|
13640
|
-
* hence we can skip matching them for other tokens
|
|
13641
|
-
*/
|
|
13642
|
-
const matchers = [
|
|
13643
|
-
[/^(<!--)(.+)(-->)$/, false],
|
|
13644
|
-
[/^(\/\*)(.+)(\*\/)$/, false],
|
|
13645
|
-
[/^(\/\/|["'#]|;{1,2}|%{1,2}|--)(.*)$/, true],
|
|
13646
|
-
[/^(\*)(.+)$/, true]
|
|
13647
|
-
];
|
|
13648
|
-
/**
|
|
13649
|
-
* @param lines line tokens
|
|
13650
|
-
* @param jsx enable JSX parsing
|
|
13651
|
-
* @param matchAlgorithm matching algorithm
|
|
13652
|
-
*/
|
|
13653
|
-
function parseComments(lines, jsx, matchAlgorithm) {
|
|
13654
|
-
const out = [];
|
|
13655
|
-
for (const line of lines) {
|
|
13656
|
-
if (matchAlgorithm === "v3") {
|
|
13657
|
-
const splittedElements = line.children.flatMap((element$1, idx) => {
|
|
13658
|
-
if (element$1.type !== "element") return element$1;
|
|
13659
|
-
const token$1 = element$1.children[0];
|
|
13660
|
-
if (token$1.type !== "text") return element$1;
|
|
13661
|
-
const isLast = idx === line.children.length - 1;
|
|
13662
|
-
if (!matchToken(token$1.value, isLast)) return element$1;
|
|
13663
|
-
const rawSplits = token$1.value.split(/(\s+\/\/)/);
|
|
13664
|
-
if (rawSplits.length <= 1) return element$1;
|
|
13665
|
-
let splits = [rawSplits[0]];
|
|
13666
|
-
for (let i$2 = 1; i$2 < rawSplits.length; i$2 += 2) splits.push(rawSplits[i$2] + (rawSplits[i$2 + 1] || ""));
|
|
13667
|
-
splits = splits.filter(Boolean);
|
|
13668
|
-
if (splits.length <= 1) return element$1;
|
|
13669
|
-
return splits.map((split) => {
|
|
13670
|
-
return {
|
|
13671
|
-
...element$1,
|
|
13672
|
-
children: [{
|
|
13673
|
-
type: "text",
|
|
13674
|
-
value: split
|
|
13675
|
-
}]
|
|
13676
|
-
};
|
|
13677
|
-
});
|
|
13678
|
-
});
|
|
13679
|
-
if (splittedElements.length !== line.children.length) line.children = splittedElements;
|
|
13680
|
-
}
|
|
13681
|
-
const elements = line.children;
|
|
13682
|
-
let start = elements.length - 1;
|
|
13683
|
-
if (matchAlgorithm === "v1") start = 0;
|
|
13684
|
-
else if (jsx) start = elements.length - 2;
|
|
13685
|
-
for (let i$2 = Math.max(start, 0); i$2 < elements.length; i$2++) {
|
|
13686
|
-
const token$1 = elements[i$2];
|
|
13687
|
-
if (token$1.type !== "element") continue;
|
|
13688
|
-
const head$1 = token$1.children.at(0);
|
|
13689
|
-
if (head$1?.type !== "text") continue;
|
|
13690
|
-
const isLast = i$2 === elements.length - 1;
|
|
13691
|
-
let match = matchToken(head$1.value, isLast);
|
|
13692
|
-
let additionalTokens;
|
|
13693
|
-
if (!match && i$2 > 0 && head$1.value.trim().startsWith("[!code")) {
|
|
13694
|
-
const prevToken = elements[i$2 - 1];
|
|
13695
|
-
if (prevToken?.type === "element") {
|
|
13696
|
-
const prevHead = prevToken.children.at(0);
|
|
13697
|
-
if (prevHead?.type === "text" && prevHead.value.includes("//")) {
|
|
13698
|
-
const combinedMatch = matchToken(prevHead.value + head$1.value, isLast);
|
|
13699
|
-
if (combinedMatch) {
|
|
13700
|
-
match = combinedMatch;
|
|
13701
|
-
out.push({
|
|
13702
|
-
info: combinedMatch,
|
|
13703
|
-
line,
|
|
13704
|
-
token: prevToken,
|
|
13705
|
-
isLineCommentOnly: elements.length === 2 && prevToken.children.length === 1 && token$1.children.length === 1,
|
|
13706
|
-
isJsxStyle: false,
|
|
13707
|
-
additionalTokens: [token$1]
|
|
13708
|
-
});
|
|
13709
|
-
continue;
|
|
13710
|
-
}
|
|
13711
|
-
}
|
|
13712
|
-
}
|
|
13713
|
-
}
|
|
13714
|
-
if (!match) continue;
|
|
13715
|
-
if (jsx && !isLast && i$2 !== 0) {
|
|
13716
|
-
const isJsxStyle = isValue(elements[i$2 - 1], "{") && isValue(elements[i$2 + 1], "}");
|
|
13717
|
-
out.push({
|
|
13718
|
-
info: match,
|
|
13719
|
-
line,
|
|
13720
|
-
token: token$1,
|
|
13721
|
-
isLineCommentOnly: elements.length === 3 && token$1.children.length === 1,
|
|
13722
|
-
isJsxStyle,
|
|
13723
|
-
additionalTokens
|
|
13724
|
-
});
|
|
13725
|
-
} else out.push({
|
|
13726
|
-
info: match,
|
|
13727
|
-
line,
|
|
13728
|
-
token: token$1,
|
|
13729
|
-
isLineCommentOnly: elements.length === 1 && token$1.children.length === 1,
|
|
13730
|
-
isJsxStyle: false,
|
|
13731
|
-
additionalTokens
|
|
13732
|
-
});
|
|
13733
|
-
}
|
|
13734
|
-
}
|
|
13735
|
-
return out;
|
|
13736
|
-
}
|
|
13737
|
-
function isValue(element$1, value) {
|
|
13738
|
-
if (element$1.type !== "element") return false;
|
|
13739
|
-
const text$1 = element$1.children[0];
|
|
13740
|
-
if (text$1.type !== "text") return false;
|
|
13741
|
-
return text$1.value.trim() === value;
|
|
13742
|
-
}
|
|
13743
|
-
/**
|
|
13744
|
-
* @param text text value of comment node
|
|
13745
|
-
* @param isLast whether the token is located at the end of line
|
|
13746
|
-
*/
|
|
13747
|
-
function matchToken(text$1, isLast) {
|
|
13748
|
-
let trimmed = text$1.trimStart();
|
|
13749
|
-
const spaceFront = text$1.length - trimmed.length;
|
|
13750
|
-
trimmed = trimmed.trimEnd();
|
|
13751
|
-
const spaceEnd = text$1.length - trimmed.length - spaceFront;
|
|
13752
|
-
for (const [matcher, endOfLine] of matchers) {
|
|
13753
|
-
if (endOfLine && !isLast) continue;
|
|
13754
|
-
const result = matcher.exec(trimmed);
|
|
13755
|
-
if (!result) continue;
|
|
13756
|
-
return [
|
|
13757
|
-
" ".repeat(spaceFront) + result[1],
|
|
13758
|
-
result[2],
|
|
13759
|
-
result[3] ? result[3] + " ".repeat(spaceEnd) : void 0
|
|
13760
|
-
];
|
|
13761
|
-
}
|
|
13762
|
-
}
|
|
13763
|
-
/**
|
|
13764
|
-
* Remove empty comment prefixes at line end, e.g. `// `
|
|
13765
|
-
*
|
|
13766
|
-
* For matchAlgorithm v1
|
|
13767
|
-
*/
|
|
13768
|
-
function v1ClearEndCommentPrefix(text$1) {
|
|
13769
|
-
const match = text$1.match(/(?:\/\/|["'#]|;{1,2}|%{1,2}|--)(\s*)$/);
|
|
13770
|
-
if (match && match[1].trim().length === 0) return text$1.slice(0, match.index);
|
|
13771
|
-
return text$1;
|
|
13772
|
-
}
|
|
13773
|
-
function createCommentNotationTransformer(name, regex, onMatch, matchAlgorithm) {
|
|
13774
|
-
if (matchAlgorithm == null) matchAlgorithm = "v3";
|
|
13775
|
-
return {
|
|
13776
|
-
name,
|
|
13777
|
-
code(code) {
|
|
13778
|
-
const lines = code.children.filter((i$2) => i$2.type === "element");
|
|
13779
|
-
const linesToRemove = [];
|
|
13780
|
-
code.data ??= {};
|
|
13781
|
-
const data = code.data;
|
|
13782
|
-
data._shiki_notation ??= parseComments(lines, ["jsx", "tsx"].includes(this.options.lang), matchAlgorithm);
|
|
13783
|
-
const parsed = data._shiki_notation;
|
|
13784
|
-
for (const comment$1 of parsed) {
|
|
13785
|
-
if (comment$1.info[1].length === 0) continue;
|
|
13786
|
-
let lineIdx = lines.indexOf(comment$1.line);
|
|
13787
|
-
if (comment$1.isLineCommentOnly && matchAlgorithm !== "v1") lineIdx++;
|
|
13788
|
-
let replaced = false;
|
|
13789
|
-
comment$1.info[1] = comment$1.info[1].replace(regex, (...match) => {
|
|
13790
|
-
if (onMatch.call(this, match, comment$1.line, comment$1.token, lines, lineIdx)) {
|
|
13791
|
-
replaced = true;
|
|
13792
|
-
return "";
|
|
13793
|
-
}
|
|
13794
|
-
return match[0];
|
|
13795
|
-
});
|
|
13796
|
-
if (!replaced) continue;
|
|
13797
|
-
if (matchAlgorithm === "v1") comment$1.info[1] = v1ClearEndCommentPrefix(comment$1.info[1]);
|
|
13798
|
-
const isEmpty = comment$1.info[1].trim().length === 0;
|
|
13799
|
-
if (isEmpty) comment$1.info[1] = "";
|
|
13800
|
-
if (isEmpty && comment$1.isLineCommentOnly) linesToRemove.push(comment$1.line);
|
|
13801
|
-
else if (isEmpty && comment$1.isJsxStyle) comment$1.line.children.splice(comment$1.line.children.indexOf(comment$1.token) - 1, 3);
|
|
13802
|
-
else if (isEmpty) {
|
|
13803
|
-
if (comment$1.additionalTokens) for (let j$1 = comment$1.additionalTokens.length - 1; j$1 >= 0; j$1--) {
|
|
13804
|
-
const additionalToken = comment$1.additionalTokens[j$1];
|
|
13805
|
-
const tokenIndex = comment$1.line.children.indexOf(additionalToken);
|
|
13806
|
-
if (tokenIndex !== -1) comment$1.line.children.splice(tokenIndex, 1);
|
|
13807
|
-
}
|
|
13808
|
-
comment$1.line.children.splice(comment$1.line.children.indexOf(comment$1.token), 1);
|
|
13809
|
-
} else {
|
|
13810
|
-
const head$1 = comment$1.token.children[0];
|
|
13811
|
-
if (head$1.type === "text") {
|
|
13812
|
-
head$1.value = comment$1.info.join("");
|
|
13813
|
-
if (comment$1.additionalTokens) for (const additionalToken of comment$1.additionalTokens) {
|
|
13814
|
-
const additionalHead = additionalToken.children[0];
|
|
13815
|
-
if (additionalHead?.type === "text") additionalHead.value = "";
|
|
13816
|
-
}
|
|
13817
|
-
}
|
|
13818
|
-
}
|
|
13819
|
-
}
|
|
13820
|
-
for (const line of linesToRemove) {
|
|
13821
|
-
const index = code.children.indexOf(line);
|
|
13822
|
-
const nextLine = code.children[index + 1];
|
|
13823
|
-
let removeLength = 1;
|
|
13824
|
-
if (nextLine?.type === "text" && nextLine?.value === "\n") removeLength = 2;
|
|
13825
|
-
code.children.splice(index, removeLength);
|
|
13826
|
-
}
|
|
13827
|
-
}
|
|
13828
|
-
};
|
|
13829
|
-
}
|
|
13830
|
-
/**
|
|
13831
|
-
* Transformer for `shiki`'s legacy `lineOptions`
|
|
13832
|
-
*/
|
|
13833
|
-
function transformerCompactLineOptions(lineOptions = []) {
|
|
13834
|
-
return {
|
|
13835
|
-
name: "@shikijs/transformers:compact-line-options",
|
|
13836
|
-
line(node, line) {
|
|
13837
|
-
const lineOption = lineOptions.find((o$2) => o$2.line === line);
|
|
13838
|
-
if (lineOption?.classes) this.addClassToHast(node, lineOption.classes);
|
|
13839
|
-
return node;
|
|
13840
|
-
}
|
|
13841
|
-
};
|
|
13842
|
-
}
|
|
13843
|
-
function parseMetaHighlightString(meta) {
|
|
13844
|
-
if (!meta) return null;
|
|
13845
|
-
const match = meta.match(/\{([\d,-]+)\}/);
|
|
13846
|
-
if (!match) return null;
|
|
13847
|
-
return match[1].split(",").flatMap((v$1) => {
|
|
13848
|
-
const range$1 = v$1.split("-").map((n) => Number.parseInt(n, 10));
|
|
13849
|
-
return range$1.length === 1 ? [range$1[0]] : Array.from({ length: range$1[1] - range$1[0] + 1 }, (_$2, i$2) => range$1[0] + i$2);
|
|
13850
|
-
});
|
|
13851
|
-
}
|
|
13852
|
-
const symbol = Symbol("highlighted-lines");
|
|
13853
|
-
/**
|
|
13854
|
-
* Allow using `{1,3-5}` in the code snippet meta to mark highlighted lines.
|
|
13855
|
-
*/
|
|
13856
|
-
function transformerMetaHighlight(options = {}) {
|
|
13857
|
-
const { className = "highlighted", zeroIndexed = false } = options;
|
|
13858
|
-
return {
|
|
13859
|
-
name: "@shikijs/transformers:meta-highlight",
|
|
13860
|
-
line(node, lineNumber) {
|
|
13861
|
-
if (!this.options.meta?.__raw) return;
|
|
13862
|
-
const meta = this.meta;
|
|
13863
|
-
meta[symbol] ??= parseMetaHighlightString(this.options.meta.__raw);
|
|
13864
|
-
const highlightedLines = meta[symbol] ?? [];
|
|
13865
|
-
const effectiveLine = zeroIndexed ? lineNumber - 1 : lineNumber;
|
|
13866
|
-
if (highlightedLines.includes(effectiveLine)) this.addClassToHast(node, className);
|
|
13867
|
-
return node;
|
|
13868
|
-
}
|
|
13869
|
-
};
|
|
13870
|
-
}
|
|
13871
|
-
function parseMetaHighlightWords(meta) {
|
|
13872
|
-
if (!meta) return [];
|
|
13873
|
-
return Array.from(meta.matchAll(/\/((?:\\.|[^/])+)\//g)).map((v$1) => v$1[1].replace(/\\(.)/g, "$1"));
|
|
13874
|
-
}
|
|
13875
|
-
/**
|
|
13876
|
-
* Allow using `/word/` in the code snippet meta to mark highlighted words.
|
|
13877
|
-
*/
|
|
13878
|
-
function transformerMetaWordHighlight(options = {}) {
|
|
13879
|
-
const { className = "highlighted-word" } = options;
|
|
13880
|
-
return {
|
|
13881
|
-
name: "@shikijs/transformers:meta-word-highlight",
|
|
13882
|
-
preprocess(code, options$1) {
|
|
13883
|
-
if (!this.options.meta?.__raw) return;
|
|
13884
|
-
const words = parseMetaHighlightWords(this.options.meta.__raw);
|
|
13885
|
-
options$1.decorations ||= [];
|
|
13886
|
-
for (const word of words) {
|
|
13887
|
-
const indexes = findAllSubstringIndexes(code, word);
|
|
13888
|
-
for (const index of indexes) options$1.decorations.push({
|
|
13889
|
-
start: index,
|
|
13890
|
-
end: index + word.length,
|
|
13891
|
-
properties: { class: className }
|
|
13892
|
-
});
|
|
13893
|
-
}
|
|
13894
|
-
}
|
|
13895
|
-
};
|
|
13896
|
-
}
|
|
13897
|
-
function findAllSubstringIndexes(str, substr) {
|
|
13898
|
-
const indexes = [];
|
|
13899
|
-
let cursor = 0;
|
|
13900
|
-
while (true) {
|
|
13901
|
-
const index = str.indexOf(substr, cursor);
|
|
13902
|
-
if (index === -1 || index >= str.length) break;
|
|
13903
|
-
if (index < cursor) break;
|
|
13904
|
-
indexes.push(index);
|
|
13905
|
-
cursor = index + substr.length;
|
|
13906
|
-
}
|
|
13907
|
-
return indexes;
|
|
13908
|
-
}
|
|
13909
|
-
function escapeRegExp(str) {
|
|
13910
|
-
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
13911
|
-
}
|
|
13912
|
-
function transformerNotationMap(options = {}, name = "@shikijs/transformers:notation-map") {
|
|
13913
|
-
const { classMap = {}, classActivePre = void 0, classActiveCode = void 0 } = options;
|
|
13914
|
-
return createCommentNotationTransformer(name, new RegExp(`#?\\s*\\[!code (${Object.keys(classMap).map(escapeRegExp).join("|")})(:\\d+)?\\]`, "gi"), function([_$2, match, range$1 = ":1"], _line, _comment, lines, index) {
|
|
13915
|
-
const lineNum = Number.parseInt(range$1.slice(1), 10);
|
|
13916
|
-
for (let i$2 = index; i$2 < Math.min(index + lineNum, lines.length); i$2++) this.addClassToHast(lines[i$2], classMap[match]);
|
|
13917
|
-
if (classActivePre) this.addClassToHast(this.pre, classActivePre);
|
|
13918
|
-
if (classActiveCode) this.addClassToHast(this.code, classActiveCode);
|
|
13919
|
-
return true;
|
|
13920
|
-
}, options.matchAlgorithm);
|
|
13921
|
-
}
|
|
13922
|
-
/**
|
|
13923
|
-
* Use `[!code ++]` and `[!code --]` to mark added and removed lines.
|
|
13924
|
-
*/
|
|
13925
|
-
function transformerNotationDiff(options = {}) {
|
|
13926
|
-
const { classLineAdd = "diff add", classLineRemove = "diff remove", classActivePre = "has-diff", classActiveCode } = options;
|
|
13927
|
-
return transformerNotationMap({
|
|
13928
|
-
classMap: {
|
|
13929
|
-
"++": classLineAdd,
|
|
13930
|
-
"--": classLineRemove
|
|
13931
|
-
},
|
|
13932
|
-
classActivePre,
|
|
13933
|
-
classActiveCode,
|
|
13934
|
-
matchAlgorithm: options.matchAlgorithm
|
|
13935
|
-
}, "@shikijs/transformers:notation-diff");
|
|
13936
|
-
}
|
|
13937
|
-
/**
|
|
13938
|
-
* Allow using `[!code error]` `[!code warning]` notation in code to mark highlighted lines.
|
|
13939
|
-
*/
|
|
13940
|
-
function transformerNotationErrorLevel(options = {}) {
|
|
13941
|
-
const { classMap = {
|
|
13942
|
-
error: ["highlighted", "error"],
|
|
13943
|
-
warning: ["highlighted", "warning"],
|
|
13944
|
-
info: ["highlighted", "info"]
|
|
13945
|
-
}, classActivePre = "has-highlighted", classActiveCode } = options;
|
|
13946
|
-
return transformerNotationMap({
|
|
13947
|
-
classMap,
|
|
13948
|
-
classActivePre,
|
|
13949
|
-
classActiveCode,
|
|
13950
|
-
matchAlgorithm: options.matchAlgorithm
|
|
13951
|
-
}, "@shikijs/transformers:notation-error-level");
|
|
13952
|
-
}
|
|
13953
|
-
/**
|
|
13954
|
-
* Allow using `[!code focus]` notation in code to mark focused lines.
|
|
13955
|
-
*/
|
|
13956
|
-
function transformerNotationFocus(options = {}) {
|
|
13957
|
-
const { classActiveLine = "focused", classActivePre = "has-focused", classActiveCode } = options;
|
|
13958
|
-
return transformerNotationMap({
|
|
13959
|
-
classMap: { focus: classActiveLine },
|
|
13960
|
-
classActivePre,
|
|
13961
|
-
classActiveCode,
|
|
13962
|
-
matchAlgorithm: options.matchAlgorithm
|
|
13963
|
-
}, "@shikijs/transformers:notation-focus");
|
|
13964
|
-
}
|
|
13965
|
-
/**
|
|
13966
|
-
* Allow using `[!code highlight]` notation in code to mark highlighted lines.
|
|
13967
|
-
*/
|
|
13968
|
-
function transformerNotationHighlight(options = {}) {
|
|
13969
|
-
const { classActiveLine = "highlighted", classActivePre = "has-highlighted", classActiveCode } = options;
|
|
13970
|
-
return transformerNotationMap({
|
|
13971
|
-
classMap: {
|
|
13972
|
-
highlight: classActiveLine,
|
|
13973
|
-
hl: classActiveLine
|
|
13974
|
-
},
|
|
13975
|
-
classActivePre,
|
|
13976
|
-
classActiveCode,
|
|
13977
|
-
matchAlgorithm: options.matchAlgorithm
|
|
13978
|
-
}, "@shikijs/transformers:notation-highlight");
|
|
13979
|
-
}
|
|
13980
|
-
function highlightWordInLine(line, ignoredElement, word, className) {
|
|
13981
|
-
const content = getTextContent(line);
|
|
13982
|
-
let index = content.indexOf(word);
|
|
13983
|
-
while (index !== -1) {
|
|
13984
|
-
highlightRange.call(this, line.children, ignoredElement, index, word.length, className);
|
|
13985
|
-
index = content.indexOf(word, index + 1);
|
|
13986
|
-
}
|
|
13987
|
-
}
|
|
13988
|
-
function getTextContent(element$1) {
|
|
13989
|
-
if (element$1.type === "text") return element$1.value;
|
|
13990
|
-
if (element$1.type === "element" && element$1.tagName === "span") return element$1.children.map(getTextContent).join("");
|
|
13991
|
-
return "";
|
|
13992
|
-
}
|
|
13993
|
-
/**
|
|
13994
|
-
* @param elements
|
|
13995
|
-
* @param ignoredElement
|
|
13996
|
-
* @param index highlight beginning index
|
|
13997
|
-
* @param len highlight length
|
|
13998
|
-
* @param className class name to add to highlighted nodes
|
|
13999
|
-
*/
|
|
14000
|
-
function highlightRange(elements, ignoredElement, index, len, className) {
|
|
14001
|
-
let currentIdx = 0;
|
|
14002
|
-
for (let i$2 = 0; i$2 < elements.length; i$2++) {
|
|
14003
|
-
const element$1 = elements[i$2];
|
|
14004
|
-
if (element$1.type !== "element" || element$1.tagName !== "span" || element$1 === ignoredElement) continue;
|
|
14005
|
-
const textNode = element$1.children[0];
|
|
14006
|
-
if (textNode.type !== "text") continue;
|
|
14007
|
-
if (hasOverlap([currentIdx, currentIdx + textNode.value.length - 1], [index, index + len])) {
|
|
14008
|
-
const start = Math.max(0, index - currentIdx);
|
|
14009
|
-
const length = len - Math.max(0, currentIdx - index);
|
|
14010
|
-
if (length === 0) continue;
|
|
14011
|
-
const separated = separateToken(element$1, textNode, start, length);
|
|
14012
|
-
this.addClassToHast(separated[1], className);
|
|
14013
|
-
const output = separated.filter(Boolean);
|
|
14014
|
-
elements.splice(i$2, 1, ...output);
|
|
14015
|
-
i$2 += output.length - 1;
|
|
14016
|
-
}
|
|
14017
|
-
currentIdx += textNode.value.length;
|
|
14018
|
-
}
|
|
14019
|
-
}
|
|
14020
|
-
function hasOverlap(range1, range2) {
|
|
14021
|
-
return range1[0] <= range2[1] && range1[1] >= range2[0];
|
|
14022
|
-
}
|
|
14023
|
-
function separateToken(span, textNode, index, len) {
|
|
14024
|
-
const text$1 = textNode.value;
|
|
14025
|
-
const createNode = (value) => inheritElement(span, { children: [{
|
|
14026
|
-
type: "text",
|
|
14027
|
-
value
|
|
14028
|
-
}] });
|
|
14029
|
-
return [
|
|
14030
|
-
index > 0 ? createNode(text$1.slice(0, index)) : void 0,
|
|
14031
|
-
createNode(text$1.slice(index, index + len)),
|
|
14032
|
-
index + len < text$1.length ? createNode(text$1.slice(index + len)) : void 0
|
|
14033
|
-
];
|
|
14034
|
-
}
|
|
14035
|
-
function inheritElement(original, overrides) {
|
|
14036
|
-
return {
|
|
14037
|
-
...original,
|
|
14038
|
-
properties: { ...original.properties },
|
|
14039
|
-
...overrides
|
|
14040
|
-
};
|
|
14041
|
-
}
|
|
14042
|
-
function transformerNotationWordHighlight(options = {}) {
|
|
14043
|
-
const { classActiveWord = "highlighted-word", classActivePre = void 0 } = options;
|
|
14044
|
-
return createCommentNotationTransformer("@shikijs/transformers:notation-highlight-word", /\s*\[!code word:((?:\\.|[^:\]])+)(:\d+)?\]/, function([_$2, word, range$1], _line, comment$1, lines, index) {
|
|
14045
|
-
const lineNum = range$1 ? Number.parseInt(range$1.slice(1), 10) : lines.length;
|
|
14046
|
-
word = word.replace(/\\(.)/g, "$1");
|
|
14047
|
-
for (let i$2 = index; i$2 < Math.min(index + lineNum, lines.length); i$2++) highlightWordInLine.call(this, lines[i$2], comment$1, word, classActiveWord);
|
|
14048
|
-
if (classActivePre) this.addClassToHast(this.pre, classActivePre);
|
|
14049
|
-
return true;
|
|
14050
|
-
}, options.matchAlgorithm);
|
|
14051
|
-
}
|
|
14052
|
-
/**
|
|
14053
|
-
* Remove comments from the code.
|
|
14054
|
-
*/
|
|
14055
|
-
function transformerRemoveComments(options = {}) {
|
|
14056
|
-
const { removeEmptyLines = true } = options;
|
|
14057
|
-
return {
|
|
14058
|
-
name: "@shikijs/transformers:remove-comments",
|
|
14059
|
-
preprocess(_code, options$1) {
|
|
14060
|
-
if (options$1.includeExplanation !== true && options$1.includeExplanation !== "scopeName") throw new Error("`transformerRemoveComments` requires `includeExplanation` to be set to `true` or `'scopeName'`");
|
|
14061
|
-
},
|
|
14062
|
-
tokens(tokens) {
|
|
14063
|
-
const result = [];
|
|
14064
|
-
for (const line of tokens) {
|
|
14065
|
-
const filteredLine = [];
|
|
14066
|
-
let hasComment = false;
|
|
14067
|
-
for (const token$1 of line) if (token$1.explanation?.some((exp) => exp.scopes.some((s$1) => s$1.scopeName.startsWith("comment")))) hasComment = true;
|
|
14068
|
-
else filteredLine.push(token$1);
|
|
14069
|
-
if (removeEmptyLines && hasComment) {
|
|
14070
|
-
if (filteredLine.every((token$1) => !token$1.content.trim())) continue;
|
|
14071
|
-
}
|
|
14072
|
-
result.push(filteredLine);
|
|
14073
|
-
}
|
|
14074
|
-
return result;
|
|
14075
|
-
}
|
|
14076
|
-
};
|
|
14077
|
-
}
|
|
14078
|
-
/**
|
|
14079
|
-
* Remove line breaks between lines.
|
|
14080
|
-
* Useful when you override `display: block` to `.line` in CSS.
|
|
14081
|
-
*/
|
|
14082
|
-
function transformerRemoveLineBreak() {
|
|
14083
|
-
return {
|
|
14084
|
-
name: "@shikijs/transformers:remove-line-break",
|
|
14085
|
-
code(code) {
|
|
14086
|
-
code.children = code.children.filter((line) => !(line.type === "text" && line.value === "\n"));
|
|
14087
|
-
}
|
|
14088
|
-
};
|
|
14089
|
-
}
|
|
14090
|
-
/**
|
|
14091
|
-
* Remove notation escapes.
|
|
14092
|
-
* Useful when you want to write `// [!code` in markdown.
|
|
14093
|
-
* If you process `// [\!code ...]` expression, you can get `// [!code ...]` in the output.
|
|
14094
|
-
*/
|
|
14095
|
-
function transformerRemoveNotationEscape() {
|
|
14096
|
-
return {
|
|
14097
|
-
name: "@shikijs/transformers:remove-notation-escape",
|
|
14098
|
-
code(hast) {
|
|
14099
|
-
function replace(node) {
|
|
14100
|
-
if (node.type === "text") node.value = node.value.replace("[\\!code", "[!code");
|
|
14101
|
-
else if ("children" in node) for (const child of node.children) replace(child);
|
|
14102
|
-
}
|
|
14103
|
-
replace(hast);
|
|
14104
|
-
return hast;
|
|
14105
|
-
}
|
|
14106
|
-
};
|
|
14107
|
-
}
|
|
14108
|
-
/**
|
|
14109
|
-
* Render indentations as separate tokens.
|
|
14110
|
-
* Apply with CSS, it can be used to render indent guides visually.
|
|
14111
|
-
*/
|
|
14112
|
-
function transformerRenderIndentGuides(options = {}) {
|
|
14113
|
-
return {
|
|
14114
|
-
name: "@shikijs/transformers:render-indent-guides",
|
|
14115
|
-
code(hast) {
|
|
14116
|
-
const indent = Number(this.options.meta?.indent ?? this.options.meta?.__raw?.match(/\{indent:(\d+|false)\}/)?.[1] ?? options.indent ?? 2);
|
|
14117
|
-
if (Number.isNaN(indent) || indent <= 0) return hast;
|
|
14118
|
-
const indentRegex = new RegExp(` {${indent}}| {0,${indent - 1}}\t| {1,}$`, "g");
|
|
14119
|
-
const emptyLines = [];
|
|
14120
|
-
let level = 0;
|
|
14121
|
-
for (const line of hast.children) {
|
|
14122
|
-
if (line.type !== "element") continue;
|
|
14123
|
-
const first = line.children[0];
|
|
14124
|
-
if (first?.type !== "element" || first?.children[0]?.type !== "text") {
|
|
14125
|
-
emptyLines.push([line, level]);
|
|
14126
|
-
continue;
|
|
14127
|
-
}
|
|
14128
|
-
const text$1 = first.children[0];
|
|
14129
|
-
const blanks = text$1.value.split(/[^ \t]/, 1)[0];
|
|
14130
|
-
const ranges = [];
|
|
14131
|
-
for (const match of blanks.matchAll(indentRegex)) {
|
|
14132
|
-
const start = match.index;
|
|
14133
|
-
const end = start + match[0].length;
|
|
14134
|
-
ranges.push([start, end]);
|
|
14135
|
-
}
|
|
14136
|
-
for (const [line$1, level$1] of emptyLines) line$1.children.unshift(...Array.from({ length: Math.min(ranges.length, level$1 + 1) }, (_$2, i$2) => ({
|
|
14137
|
-
type: "element",
|
|
14138
|
-
tagName: "span",
|
|
14139
|
-
properties: {
|
|
14140
|
-
class: "indent",
|
|
14141
|
-
style: `--indent-offset: ${i$2 * indent}ch;`
|
|
14142
|
-
},
|
|
14143
|
-
children: []
|
|
14144
|
-
})));
|
|
14145
|
-
emptyLines.length = 0;
|
|
14146
|
-
level = ranges.length;
|
|
14147
|
-
if (ranges.length) {
|
|
14148
|
-
line.children.unshift(...ranges.map(([start, end]) => ({
|
|
14149
|
-
type: "element",
|
|
14150
|
-
tagName: "span",
|
|
14151
|
-
properties: { class: "indent" },
|
|
14152
|
-
children: [{
|
|
14153
|
-
type: "text",
|
|
14154
|
-
value: text$1.value.slice(start, end)
|
|
14155
|
-
}]
|
|
14156
|
-
})));
|
|
14157
|
-
text$1.value = text$1.value.slice(ranges.at(-1)[1]);
|
|
14158
|
-
}
|
|
14159
|
-
}
|
|
14160
|
-
return hast;
|
|
14161
|
-
}
|
|
14162
|
-
};
|
|
14163
|
-
}
|
|
14164
|
-
function isTab(part) {
|
|
14165
|
-
return part === " ";
|
|
14166
|
-
}
|
|
14167
|
-
function isSpace(part) {
|
|
14168
|
-
return part === " " || part === " ";
|
|
14169
|
-
}
|
|
14170
|
-
function separateContinuousSpaces(inputs) {
|
|
14171
|
-
const result = [];
|
|
14172
|
-
let current = "";
|
|
14173
|
-
function bump() {
|
|
14174
|
-
if (current.length) result.push(current);
|
|
14175
|
-
current = "";
|
|
14176
|
-
}
|
|
14177
|
-
inputs.forEach((part, idx) => {
|
|
14178
|
-
if (isTab(part)) {
|
|
14179
|
-
bump();
|
|
14180
|
-
result.push(part);
|
|
14181
|
-
} else if (isSpace(part) && (isSpace(inputs[idx - 1]) || isSpace(inputs[idx + 1]))) {
|
|
14182
|
-
bump();
|
|
14183
|
-
result.push(part);
|
|
14184
|
-
} else current += part;
|
|
14185
|
-
});
|
|
14186
|
-
bump();
|
|
14187
|
-
return result;
|
|
14188
|
-
}
|
|
14189
|
-
function splitSpaces(parts, type, renderContinuousSpaces = true) {
|
|
14190
|
-
if (type === "all") return parts;
|
|
14191
|
-
let leftCount = 0;
|
|
14192
|
-
let rightCount = 0;
|
|
14193
|
-
if (type === "boundary" || type === "leading") for (let i$2 = 0; i$2 < parts.length; i$2++) if (isSpace(parts[i$2])) leftCount++;
|
|
14194
|
-
else break;
|
|
14195
|
-
if (type === "boundary" || type === "trailing") for (let i$2 = parts.length - 1; i$2 >= 0; i$2--) if (isSpace(parts[i$2])) rightCount++;
|
|
14196
|
-
else break;
|
|
14197
|
-
const middle = parts.slice(leftCount, parts.length - rightCount);
|
|
14198
|
-
return [
|
|
14199
|
-
...parts.slice(0, leftCount),
|
|
14200
|
-
...renderContinuousSpaces ? separateContinuousSpaces(middle) : [middle.join("")],
|
|
14201
|
-
...parts.slice(parts.length - rightCount)
|
|
14202
|
-
];
|
|
14203
|
-
}
|
|
14204
|
-
/**
|
|
14205
|
-
* Render whitespaces as separate tokens.
|
|
14206
|
-
* Apply with CSS, it can be used to render tabs and spaces visually.
|
|
14207
|
-
*/
|
|
14208
|
-
function transformerRenderWhitespace(options = {}) {
|
|
14209
|
-
const classMap = {
|
|
14210
|
-
" ": options.classSpace ?? "space",
|
|
14211
|
-
" ": options.classTab ?? "tab"
|
|
14212
|
-
};
|
|
14213
|
-
const position = options.position ?? "all";
|
|
14214
|
-
const keys = Object.keys(classMap);
|
|
14215
|
-
return {
|
|
14216
|
-
name: "@shikijs/transformers:render-whitespace",
|
|
14217
|
-
root(root$1) {
|
|
14218
|
-
const pre = root$1.children[0];
|
|
14219
|
-
(pre.tagName === "pre" ? pre.children[0] : { children: [root$1] }).children.forEach((line) => {
|
|
14220
|
-
if (line.type !== "element" && line.type !== "root") return;
|
|
14221
|
-
const elements = line.children.filter((token$1) => token$1.type === "element");
|
|
14222
|
-
const last = elements.length - 1;
|
|
14223
|
-
line.children = line.children.flatMap((token$1) => {
|
|
14224
|
-
if (token$1.type !== "element") return token$1;
|
|
14225
|
-
const index = elements.indexOf(token$1);
|
|
14226
|
-
if (position === "boundary" && index !== 0 && index !== last) return token$1;
|
|
14227
|
-
if (position === "trailing" && index !== last) return token$1;
|
|
14228
|
-
if (position === "leading" && index !== 0) return token$1;
|
|
14229
|
-
const node = token$1.children[0];
|
|
14230
|
-
if (node.type !== "text" || !node.value) return token$1;
|
|
14231
|
-
const parts = splitSpaces(node.value.split(/([ \t])/).filter((i$2) => i$2.length), position === "boundary" && index === last && last !== 0 ? "trailing" : position, position !== "trailing" && position !== "leading");
|
|
14232
|
-
if (parts.length <= 1) return token$1;
|
|
14233
|
-
return parts.map((part) => {
|
|
14234
|
-
const clone$1 = {
|
|
14235
|
-
...token$1,
|
|
14236
|
-
properties: { ...token$1.properties }
|
|
14237
|
-
};
|
|
14238
|
-
clone$1.children = [{
|
|
14239
|
-
type: "text",
|
|
14240
|
-
value: part
|
|
14241
|
-
}];
|
|
14242
|
-
if (keys.includes(part)) {
|
|
14243
|
-
this.addClassToHast(clone$1, classMap[part]);
|
|
14244
|
-
delete clone$1.properties.style;
|
|
14245
|
-
}
|
|
14246
|
-
return clone$1;
|
|
14247
|
-
});
|
|
14248
|
-
});
|
|
14249
|
-
});
|
|
14250
|
-
}
|
|
14251
|
-
};
|
|
14252
|
-
}
|
|
14253
|
-
/**
|
|
14254
|
-
* Remove line breaks between lines.
|
|
14255
|
-
* Useful when you override `display: block` to `.line` in CSS.
|
|
14256
|
-
*/
|
|
14257
|
-
function transformerStyleToClass(options = {}) {
|
|
14258
|
-
const { classPrefix = "__shiki_", classSuffix = "", classReplacer = (className) => className } = options;
|
|
14259
|
-
const classToStyle = /* @__PURE__ */ new Map();
|
|
14260
|
-
function stringifyStyle(style) {
|
|
14261
|
-
return Object.entries(style).map(([key$1, value]) => `${key$1}:${value}`).join(";");
|
|
14262
|
-
}
|
|
14263
|
-
function registerStyle(style) {
|
|
14264
|
-
let className = classPrefix + cyrb53(typeof style === "string" ? style : stringifyStyle(style)) + classSuffix;
|
|
14265
|
-
className = classReplacer(className);
|
|
14266
|
-
if (!classToStyle.has(className)) classToStyle.set(className, typeof style === "string" ? style : { ...style });
|
|
14267
|
-
return className;
|
|
14268
|
-
}
|
|
14269
|
-
return {
|
|
14270
|
-
name: "@shikijs/transformers:style-to-class",
|
|
14271
|
-
pre(t) {
|
|
14272
|
-
if (!t.properties.style) return;
|
|
14273
|
-
const className = registerStyle(t.properties.style);
|
|
14274
|
-
delete t.properties.style;
|
|
14275
|
-
this.addClassToHast(t, className);
|
|
14276
|
-
},
|
|
14277
|
-
tokens(lines) {
|
|
14278
|
-
for (const line of lines) for (const token$1 of line) {
|
|
14279
|
-
if (!token$1.htmlStyle) continue;
|
|
14280
|
-
const className = registerStyle(token$1.htmlStyle);
|
|
14281
|
-
token$1.htmlStyle = {};
|
|
14282
|
-
token$1.htmlAttrs ||= {};
|
|
14283
|
-
if (!token$1.htmlAttrs.class) token$1.htmlAttrs.class = className;
|
|
14284
|
-
else token$1.htmlAttrs.class += ` ${className}`;
|
|
14285
|
-
}
|
|
14286
|
-
},
|
|
14287
|
-
getClassRegistry() {
|
|
14288
|
-
return classToStyle;
|
|
14289
|
-
},
|
|
14290
|
-
getCSS() {
|
|
14291
|
-
let css = "";
|
|
14292
|
-
for (const [className, style] of classToStyle.entries()) css += `.${className}{${typeof style === "string" ? style : stringifyStyle(style)}}`;
|
|
14293
|
-
return css;
|
|
14294
|
-
},
|
|
14295
|
-
clearRegistry() {
|
|
14296
|
-
classToStyle.clear();
|
|
14297
|
-
}
|
|
14298
|
-
};
|
|
14299
|
-
}
|
|
14300
|
-
/**
|
|
14301
|
-
* A simple hash function.
|
|
14302
|
-
*
|
|
14303
|
-
* @see https://stackoverflow.com/a/52171480
|
|
14304
|
-
*/
|
|
14305
|
-
function cyrb53(str, seed = 0) {
|
|
14306
|
-
let h1 = 3735928559 ^ seed;
|
|
14307
|
-
let h2 = 1103547991 ^ seed;
|
|
14308
|
-
for (let i$2 = 0, ch; i$2 < str.length; i$2++) {
|
|
14309
|
-
ch = str.charCodeAt(i$2);
|
|
14310
|
-
h1 = Math.imul(h1 ^ ch, 2654435761);
|
|
14311
|
-
h2 = Math.imul(h2 ^ ch, 1597334677);
|
|
14312
|
-
}
|
|
14313
|
-
h1 = Math.imul(h1 ^ h1 >>> 16, 2246822507);
|
|
14314
|
-
h1 ^= Math.imul(h2 ^ h2 >>> 13, 3266489909);
|
|
14315
|
-
h2 = Math.imul(h2 ^ h2 >>> 16, 2246822507);
|
|
14316
|
-
h2 ^= Math.imul(h1 ^ h1 >>> 13, 3266489909);
|
|
14317
|
-
return (4294967296 * (2097151 & h2) + (h1 >>> 0)).toString(36).slice(0, 6);
|
|
14318
|
-
}
|
|
14319
|
-
|
|
14320
|
-
//#endregion
|
|
14321
|
-
//#region src/utils/hast_utils.ts
|
|
14322
|
-
function createTextNodeElement(value) {
|
|
14323
|
-
return {
|
|
14324
|
-
type: "text",
|
|
14325
|
-
value
|
|
14326
|
-
};
|
|
14327
|
-
}
|
|
14328
|
-
function createHastElement({ tagName, children = [], properties = {} }) {
|
|
14329
|
-
return {
|
|
14330
|
-
type: "element",
|
|
14331
|
-
tagName,
|
|
14332
|
-
properties,
|
|
14333
|
-
children
|
|
14334
|
-
};
|
|
14335
|
-
}
|
|
14336
|
-
function createIconElement({ name, width = 16, height = 16, properties }) {
|
|
14337
|
-
return createHastElement({
|
|
14338
|
-
tagName: "svg",
|
|
14339
|
-
properties: {
|
|
14340
|
-
width,
|
|
14341
|
-
height,
|
|
14342
|
-
viewBox: "0 0 16 16",
|
|
14343
|
-
...properties
|
|
14344
|
-
},
|
|
14345
|
-
children: [createHastElement({
|
|
14346
|
-
tagName: "use",
|
|
14347
|
-
properties: { href: `#${name.replace(/^#/, "")}` }
|
|
14348
|
-
})]
|
|
14349
|
-
});
|
|
14350
|
-
}
|
|
14351
|
-
function findCodeElement(nodes) {
|
|
14352
|
-
let firstChild = nodes.children[0];
|
|
14353
|
-
while (firstChild != null) {
|
|
14354
|
-
if (firstChild.type === "element" && firstChild.tagName === "code") {
|
|
14355
|
-
return firstChild;
|
|
14356
|
-
}
|
|
14357
|
-
if ("children" in firstChild) {
|
|
14358
|
-
firstChild = firstChild.children[0];
|
|
14359
|
-
} else {
|
|
14360
|
-
firstChild = null;
|
|
14361
|
-
}
|
|
14362
|
-
}
|
|
14363
|
-
return undefined;
|
|
14364
|
-
}
|
|
14365
|
-
function createGutterWrapper(children) {
|
|
14366
|
-
return createHastElement({
|
|
14367
|
-
tagName: "div",
|
|
14368
|
-
properties: { "data-gutter": "" },
|
|
14369
|
-
children
|
|
14370
|
-
});
|
|
14371
|
-
}
|
|
14372
|
-
function createGutterItem(lineType, lineNumber, lineIndex, properties = {}) {
|
|
14373
|
-
return createHastElement({
|
|
14374
|
-
tagName: "div",
|
|
14375
|
-
properties: {
|
|
14376
|
-
"data-line-type": lineType,
|
|
14377
|
-
"data-column-number": lineNumber,
|
|
14378
|
-
"data-line-index": lineIndex,
|
|
14379
|
-
...properties
|
|
14380
|
-
},
|
|
14381
|
-
children: lineNumber != null ? [createHastElement({
|
|
14382
|
-
tagName: "span",
|
|
14383
|
-
properties: { "data-line-number-content": "" },
|
|
14384
|
-
children: [createTextNodeElement(`${lineNumber}`)]
|
|
14385
|
-
})] : undefined
|
|
14386
|
-
});
|
|
14387
|
-
}
|
|
14388
|
-
function createGutterGap(type, bufferType, size) {
|
|
14389
|
-
return createHastElement({
|
|
14390
|
-
tagName: "div",
|
|
14391
|
-
properties: {
|
|
14392
|
-
"data-gutter-buffer": bufferType,
|
|
14393
|
-
"data-buffer-size": size,
|
|
14394
|
-
"data-line-type": bufferType === "annotation" ? undefined : type,
|
|
14395
|
-
style: bufferType === "annotation" ? `grid-row: span ${size};` : `grid-row: span ${size};min-height:calc(${size} * 1lh);`
|
|
14396
|
-
}
|
|
14397
|
-
});
|
|
14398
|
-
}
|
|
14399
|
-
|
|
14400
|
-
//#endregion
|
|
14401
|
-
//#region src/utils/processLine.ts
|
|
14402
|
-
function processLine(node, line, state) {
|
|
14403
|
-
const lineInfo = typeof state.lineInfo === "function" ? state.lineInfo(line) : state.lineInfo[line - 1];
|
|
14404
|
-
if (lineInfo == null) {
|
|
14405
|
-
const errorMessage = `processLine: line ${line}, contains no state.lineInfo`;
|
|
14406
|
-
console.error(errorMessage, {
|
|
14407
|
-
node,
|
|
14408
|
-
line,
|
|
14409
|
-
state
|
|
14410
|
-
});
|
|
14411
|
-
throw new Error(errorMessage);
|
|
14412
|
-
}
|
|
14413
|
-
node.tagName = "div";
|
|
14414
|
-
node.properties["data-line"] = lineInfo.lineNumber;
|
|
14415
|
-
node.properties["data-alt-line"] = lineInfo.altLineNumber;
|
|
14416
|
-
node.properties["data-line-type"] = lineInfo.type;
|
|
14417
|
-
node.properties["data-line-index"] = lineInfo.lineIndex;
|
|
14418
|
-
if (node.children.length === 0) {
|
|
14419
|
-
node.children.push(createTextNodeElement("\n"));
|
|
14420
|
-
}
|
|
14421
|
-
return node;
|
|
14422
|
-
}
|
|
14423
|
-
|
|
14424
|
-
//#endregion
|
|
14425
|
-
//#region src/utils/wrapTokenFragments.ts
|
|
14426
|
-
const NO_TOKEN = Symbol("no-token");
|
|
14427
|
-
const MULTIPLE_TOKENS = Symbol("multiple-tokens");
|
|
14428
|
-
function wrapTokenFragments(container) {
|
|
14429
|
-
const ownTokenChar = getTokenChar(container);
|
|
14430
|
-
if (ownTokenChar != null) {
|
|
14431
|
-
return ownTokenChar;
|
|
14432
|
-
}
|
|
14433
|
-
let containerTokenState = NO_TOKEN;
|
|
14434
|
-
const wrappedChildren = [];
|
|
14435
|
-
let currentTokenChildren = [];
|
|
14436
|
-
let currentTokenChar;
|
|
14437
|
-
const flushTokenChildren = () => {
|
|
14438
|
-
if (currentTokenChildren.length === 0 || currentTokenChar == null) {
|
|
14439
|
-
currentTokenChildren = [];
|
|
14440
|
-
currentTokenChar = undefined;
|
|
14441
|
-
return;
|
|
14442
|
-
}
|
|
14443
|
-
if (currentTokenChildren.length === 1) {
|
|
14444
|
-
const child = currentTokenChildren[0];
|
|
14445
|
-
if (child?.type === "element") {
|
|
14446
|
-
setTokenChar(child, currentTokenChar);
|
|
14447
|
-
for (const grandChild of child.children) {
|
|
14448
|
-
stripTokenChar(grandChild);
|
|
14449
|
-
}
|
|
14450
|
-
} else {
|
|
14451
|
-
stripTokenChar(child);
|
|
14452
|
-
}
|
|
14453
|
-
wrappedChildren.push(child);
|
|
14454
|
-
currentTokenChildren = [];
|
|
14455
|
-
currentTokenChar = undefined;
|
|
14456
|
-
return;
|
|
14457
|
-
}
|
|
14458
|
-
for (const child of currentTokenChildren) {
|
|
14459
|
-
stripTokenChar(child);
|
|
14460
|
-
}
|
|
14461
|
-
wrappedChildren.push(createHastElement({
|
|
14462
|
-
tagName: "span",
|
|
14463
|
-
properties: { "data-char": currentTokenChar },
|
|
14464
|
-
children: currentTokenChildren
|
|
14465
|
-
}));
|
|
14466
|
-
currentTokenChildren = [];
|
|
14467
|
-
currentTokenChar = undefined;
|
|
14468
|
-
};
|
|
14469
|
-
const mergeContainerTokenState = (childTokenState) => {
|
|
14470
|
-
if (childTokenState === NO_TOKEN) {
|
|
14471
|
-
return;
|
|
14472
|
-
}
|
|
14473
|
-
if (childTokenState === MULTIPLE_TOKENS) {
|
|
14474
|
-
containerTokenState = MULTIPLE_TOKENS;
|
|
14475
|
-
return;
|
|
14476
|
-
}
|
|
14477
|
-
if (containerTokenState === NO_TOKEN) {
|
|
14478
|
-
containerTokenState = childTokenState;
|
|
14479
|
-
return;
|
|
14480
|
-
}
|
|
14481
|
-
if (containerTokenState !== childTokenState) {
|
|
14482
|
-
containerTokenState = MULTIPLE_TOKENS;
|
|
14483
|
-
}
|
|
14484
|
-
};
|
|
14485
|
-
for (const child of container.children) {
|
|
14486
|
-
const childTokenState = child.type === "element" ? wrapTokenFragments(child) : NO_TOKEN;
|
|
14487
|
-
mergeContainerTokenState(childTokenState);
|
|
14488
|
-
if (typeof childTokenState !== "number") {
|
|
14489
|
-
flushTokenChildren();
|
|
14490
|
-
wrappedChildren.push(child);
|
|
14491
|
-
continue;
|
|
14492
|
-
}
|
|
14493
|
-
if (currentTokenChar != null && currentTokenChar !== childTokenState) {
|
|
14494
|
-
flushTokenChildren();
|
|
14495
|
-
}
|
|
14496
|
-
currentTokenChar ??= childTokenState;
|
|
14497
|
-
currentTokenChildren.push(child);
|
|
14498
|
-
}
|
|
14499
|
-
flushTokenChildren();
|
|
14500
|
-
container.children = wrappedChildren;
|
|
14501
|
-
return containerTokenState;
|
|
14502
|
-
}
|
|
14503
|
-
function getTokenChar(node) {
|
|
14504
|
-
const value = node.properties["data-char"];
|
|
14505
|
-
if (typeof value === "number") {
|
|
14506
|
-
return value;
|
|
14507
|
-
}
|
|
14508
|
-
return undefined;
|
|
14509
|
-
}
|
|
14510
|
-
function stripTokenChar(node) {
|
|
14511
|
-
if (node.type !== "element") return;
|
|
14512
|
-
node.properties["data-char"] = undefined;
|
|
14513
|
-
for (const child of node.children) {
|
|
14514
|
-
stripTokenChar(child);
|
|
14515
|
-
}
|
|
14516
|
-
}
|
|
14517
|
-
function setTokenChar(node, char) {
|
|
14518
|
-
node.properties["data-char"] = char;
|
|
14519
|
-
}
|
|
14520
|
-
|
|
14521
|
-
//#endregion
|
|
14522
|
-
//#region src/utils/createTransformerWithState.ts
|
|
14523
|
-
function createTransformerWithState(useTokenTransformer = false, useCSSClasses = false) {
|
|
14524
|
-
const state = { lineInfo: [] };
|
|
14525
|
-
const transformers = [{
|
|
14526
|
-
line(node) {
|
|
14527
|
-
delete node.properties.class;
|
|
14528
|
-
return node;
|
|
14529
|
-
},
|
|
14530
|
-
pre(pre) {
|
|
14531
|
-
const code = findCodeElement(pre);
|
|
14532
|
-
const children = [];
|
|
14533
|
-
if (code != null) {
|
|
14534
|
-
let index = 1;
|
|
14535
|
-
for (const node of code.children) {
|
|
14536
|
-
if (node.type !== "element") continue;
|
|
14537
|
-
if (useTokenTransformer) {
|
|
14538
|
-
wrapTokenFragments(node);
|
|
14539
|
-
}
|
|
14540
|
-
children.push(processLine(node, index, state));
|
|
14541
|
-
index++;
|
|
14542
|
-
}
|
|
14543
|
-
code.children = children;
|
|
14544
|
-
}
|
|
14545
|
-
return pre;
|
|
14546
|
-
},
|
|
14547
|
-
...useTokenTransformer ? {
|
|
14548
|
-
tokens(lines) {
|
|
14549
|
-
for (const line of lines) {
|
|
14550
|
-
let col = 0;
|
|
14551
|
-
for (const token$1 of line) {
|
|
14552
|
-
const tokenWithOriginalRange = token$1;
|
|
14553
|
-
tokenWithOriginalRange.__lineChar ??= col;
|
|
14554
|
-
col += token$1.content.length;
|
|
14555
|
-
}
|
|
14556
|
-
}
|
|
14557
|
-
},
|
|
14558
|
-
preprocess(_code, options) {
|
|
14559
|
-
options.mergeWhitespaces = "never";
|
|
14560
|
-
},
|
|
14561
|
-
span(hast, _line, _char, _lineElement, token$1) {
|
|
14562
|
-
if (token$1?.offset != null && token$1.content != null) {
|
|
14563
|
-
const tokenWithOriginalRange = token$1;
|
|
14564
|
-
const tokenChar = tokenWithOriginalRange.__lineChar;
|
|
14565
|
-
if (tokenChar != null) {
|
|
14566
|
-
hast.properties["data-char"] = tokenChar;
|
|
14567
|
-
}
|
|
14568
|
-
return hast;
|
|
14569
|
-
}
|
|
14570
|
-
return hast;
|
|
14571
|
-
}
|
|
14572
|
-
} : null
|
|
14573
|
-
}];
|
|
14574
|
-
if (useCSSClasses) {
|
|
14575
|
-
transformers.push(tokenStyleNormalizer, toClass);
|
|
14576
|
-
}
|
|
14577
|
-
return {
|
|
14578
|
-
state,
|
|
14579
|
-
transformers,
|
|
14580
|
-
toClass
|
|
14581
|
-
};
|
|
14582
|
-
}
|
|
14583
|
-
const toClass = transformerStyleToClass({ classPrefix: "hl-" });
|
|
14584
|
-
const tokenStyleNormalizer = {
|
|
14585
|
-
name: "token-style-normalizer",
|
|
14586
|
-
tokens(lines) {
|
|
14587
|
-
for (const line of lines) {
|
|
14588
|
-
for (const token$1 of line) {
|
|
14589
|
-
if (token$1.htmlStyle != null) continue;
|
|
14590
|
-
const style = {};
|
|
14591
|
-
if (token$1.color != null) {
|
|
14592
|
-
style.color = token$1.color;
|
|
14593
|
-
}
|
|
14594
|
-
if (token$1.bgColor != null) {
|
|
14595
|
-
style["background-color"] = token$1.bgColor;
|
|
14596
|
-
}
|
|
14597
|
-
if (token$1.fontStyle != null && token$1.fontStyle !== 0) {
|
|
14598
|
-
if ((token$1.fontStyle & 1) !== 0) {
|
|
14599
|
-
style["font-style"] = "italic";
|
|
14600
|
-
}
|
|
14601
|
-
if ((token$1.fontStyle & 2) !== 0) {
|
|
14602
|
-
style["font-weight"] = "bold";
|
|
14603
|
-
}
|
|
14604
|
-
if ((token$1.fontStyle & 4) !== 0) {
|
|
14605
|
-
style["text-decoration"] = "underline";
|
|
14606
|
-
}
|
|
14607
|
-
}
|
|
14608
|
-
if (Object.keys(style).length > 0) {
|
|
14609
|
-
token$1.htmlStyle = style;
|
|
14610
|
-
}
|
|
14611
|
-
}
|
|
14612
|
-
}
|
|
14613
|
-
}
|
|
14614
|
-
};
|
|
14615
|
-
|
|
14616
|
-
//#endregion
|
|
14617
|
-
//#region src/utils/formatCSSVariablePrefix.ts
|
|
14618
|
-
function formatCSSVariablePrefix(type) {
|
|
14619
|
-
return `--${type === "token" ? "diffs-token" : "diffs"}-`;
|
|
14620
|
-
}
|
|
14621
|
-
|
|
14622
12240
|
//#endregion
|
|
14623
12241
|
//#region src/utils/getFiletypeFromFileName.ts
|
|
14624
12242
|
const CUSTOM_EXTENSION_TO_FILE_FORMAT = new Map();
|
|
12243
|
+
let customExtensionsVersion = 0;
|
|
14625
12244
|
const EXTENSION_TO_FILE_FORMAT = {
|
|
14626
12245
|
"1c": "1c",
|
|
14627
12246
|
abap: "abap",
|
|
@@ -14951,35 +12570,2441 @@ const EXTENSION_TO_FILE_FORMAT = {
|
|
|
14951
12570
|
zsh: "zsh",
|
|
14952
12571
|
sty: "tex"
|
|
14953
12572
|
};
|
|
14954
|
-
function getFiletypeFromFileName(fileName) {
|
|
14955
|
-
if (CUSTOM_EXTENSION_TO_FILE_FORMAT.has(fileName)) {
|
|
14956
|
-
return CUSTOM_EXTENSION_TO_FILE_FORMAT.get(fileName) ?? "text";
|
|
12573
|
+
function getFiletypeFromFileName(fileName) {
|
|
12574
|
+
if (CUSTOM_EXTENSION_TO_FILE_FORMAT.has(fileName)) {
|
|
12575
|
+
return CUSTOM_EXTENSION_TO_FILE_FORMAT.get(fileName) ?? "text";
|
|
12576
|
+
}
|
|
12577
|
+
if (EXTENSION_TO_FILE_FORMAT[fileName] != null) {
|
|
12578
|
+
return EXTENSION_TO_FILE_FORMAT[fileName];
|
|
12579
|
+
}
|
|
12580
|
+
const compoundMatch = fileName.match(/\.([^/\\]+\.[^/\\]+)$/);
|
|
12581
|
+
if (compoundMatch != null) {
|
|
12582
|
+
if (CUSTOM_EXTENSION_TO_FILE_FORMAT.has(compoundMatch[1])) {
|
|
12583
|
+
return CUSTOM_EXTENSION_TO_FILE_FORMAT.get(compoundMatch[1]) ?? "text";
|
|
12584
|
+
}
|
|
12585
|
+
if (EXTENSION_TO_FILE_FORMAT[compoundMatch[1]] != null) {
|
|
12586
|
+
return EXTENSION_TO_FILE_FORMAT[compoundMatch[1]] ?? "text";
|
|
12587
|
+
}
|
|
12588
|
+
}
|
|
12589
|
+
const simpleMatch = fileName.match(/\.([^.]+)$/)?.[1] ?? "";
|
|
12590
|
+
if (CUSTOM_EXTENSION_TO_FILE_FORMAT.has(simpleMatch)) {
|
|
12591
|
+
return CUSTOM_EXTENSION_TO_FILE_FORMAT.get(simpleMatch) ?? "text";
|
|
12592
|
+
}
|
|
12593
|
+
return EXTENSION_TO_FILE_FORMAT[simpleMatch] ?? "text";
|
|
12594
|
+
}
|
|
12595
|
+
function replaceCustomExtensions(version, map) {
|
|
12596
|
+
if (version <= customExtensionsVersion) {
|
|
12597
|
+
return false;
|
|
12598
|
+
}
|
|
12599
|
+
CUSTOM_EXTENSION_TO_FILE_FORMAT.clear();
|
|
12600
|
+
for (const key$1 in map) {
|
|
12601
|
+
const lang = map[key$1];
|
|
12602
|
+
if (lang != null) {
|
|
12603
|
+
CUSTOM_EXTENSION_TO_FILE_FORMAT.set(key$1, lang);
|
|
12604
|
+
}
|
|
12605
|
+
}
|
|
12606
|
+
customExtensionsVersion = version;
|
|
12607
|
+
return true;
|
|
12608
|
+
}
|
|
12609
|
+
function getCustomExtensionsVersion() {
|
|
12610
|
+
return customExtensionsVersion;
|
|
12611
|
+
}
|
|
12612
|
+
function setCustomExtension(key$1, lang) {
|
|
12613
|
+
const existing = CUSTOM_EXTENSION_TO_FILE_FORMAT.get(key$1);
|
|
12614
|
+
if (existing === lang) {
|
|
12615
|
+
return false;
|
|
12616
|
+
}
|
|
12617
|
+
if (existing != null) {
|
|
12618
|
+
console.warn(`setCustomExtension: overriding custom mapping for "${key$1}" from "${existing}" to "${lang}"`);
|
|
12619
|
+
}
|
|
12620
|
+
CUSTOM_EXTENSION_TO_FILE_FORMAT.set(key$1, lang);
|
|
12621
|
+
customExtensionsVersion++;
|
|
12622
|
+
return true;
|
|
12623
|
+
}
|
|
12624
|
+
function getCustomExtensionsMap() {
|
|
12625
|
+
return Object.fromEntries(CUSTOM_EXTENSION_TO_FILE_FORMAT);
|
|
12626
|
+
}
|
|
12627
|
+
|
|
12628
|
+
//#endregion
|
|
12629
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/base.js
|
|
12630
|
+
var Diff = class {
|
|
12631
|
+
diff(oldStr, newStr, options = {}) {
|
|
12632
|
+
let callback;
|
|
12633
|
+
if (typeof options === "function") {
|
|
12634
|
+
callback = options;
|
|
12635
|
+
options = {};
|
|
12636
|
+
} else if ("callback" in options) {
|
|
12637
|
+
callback = options.callback;
|
|
12638
|
+
}
|
|
12639
|
+
const oldString = this.castInput(oldStr, options);
|
|
12640
|
+
const newString = this.castInput(newStr, options);
|
|
12641
|
+
const oldTokens = this.removeEmpty(this.tokenize(oldString, options));
|
|
12642
|
+
const newTokens = this.removeEmpty(this.tokenize(newString, options));
|
|
12643
|
+
return this.diffWithOptionsObj(oldTokens, newTokens, options, callback);
|
|
12644
|
+
}
|
|
12645
|
+
diffWithOptionsObj(oldTokens, newTokens, options, callback) {
|
|
12646
|
+
var _a;
|
|
12647
|
+
const done = (value) => {
|
|
12648
|
+
value = this.postProcess(value, options);
|
|
12649
|
+
if (callback) {
|
|
12650
|
+
setTimeout(function() {
|
|
12651
|
+
callback(value);
|
|
12652
|
+
}, 0);
|
|
12653
|
+
return undefined;
|
|
12654
|
+
} else {
|
|
12655
|
+
return value;
|
|
12656
|
+
}
|
|
12657
|
+
};
|
|
12658
|
+
const newLen = newTokens.length, oldLen = oldTokens.length;
|
|
12659
|
+
let editLength = 1;
|
|
12660
|
+
let maxEditLength = newLen + oldLen;
|
|
12661
|
+
if (options.maxEditLength != null) {
|
|
12662
|
+
maxEditLength = Math.min(maxEditLength, options.maxEditLength);
|
|
12663
|
+
}
|
|
12664
|
+
const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity;
|
|
12665
|
+
const abortAfterTimestamp = Date.now() + maxExecutionTime;
|
|
12666
|
+
const bestPath = [{
|
|
12667
|
+
oldPos: -1,
|
|
12668
|
+
lastComponent: undefined
|
|
12669
|
+
}];
|
|
12670
|
+
let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options);
|
|
12671
|
+
if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
|
|
12672
|
+
return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens));
|
|
12673
|
+
}
|
|
12674
|
+
let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;
|
|
12675
|
+
const execEditLength = () => {
|
|
12676
|
+
for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
|
|
12677
|
+
let basePath;
|
|
12678
|
+
const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1];
|
|
12679
|
+
if (removePath) {
|
|
12680
|
+
bestPath[diagonalPath - 1] = undefined;
|
|
12681
|
+
}
|
|
12682
|
+
let canAdd = false;
|
|
12683
|
+
if (addPath) {
|
|
12684
|
+
const addPathNewPos = addPath.oldPos - diagonalPath;
|
|
12685
|
+
canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
|
|
12686
|
+
}
|
|
12687
|
+
const canRemove = removePath && removePath.oldPos + 1 < oldLen;
|
|
12688
|
+
if (!canAdd && !canRemove) {
|
|
12689
|
+
bestPath[diagonalPath] = undefined;
|
|
12690
|
+
continue;
|
|
12691
|
+
}
|
|
12692
|
+
if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
|
|
12693
|
+
basePath = this.addToPath(addPath, true, false, 0, options);
|
|
12694
|
+
} else {
|
|
12695
|
+
basePath = this.addToPath(removePath, false, true, 1, options);
|
|
12696
|
+
}
|
|
12697
|
+
newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options);
|
|
12698
|
+
if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
|
|
12699
|
+
return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true;
|
|
12700
|
+
} else {
|
|
12701
|
+
bestPath[diagonalPath] = basePath;
|
|
12702
|
+
if (basePath.oldPos + 1 >= oldLen) {
|
|
12703
|
+
maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
|
|
12704
|
+
}
|
|
12705
|
+
if (newPos + 1 >= newLen) {
|
|
12706
|
+
minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
|
|
12707
|
+
}
|
|
12708
|
+
}
|
|
12709
|
+
}
|
|
12710
|
+
editLength++;
|
|
12711
|
+
};
|
|
12712
|
+
if (callback) {
|
|
12713
|
+
(function exec() {
|
|
12714
|
+
setTimeout(function() {
|
|
12715
|
+
if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
|
|
12716
|
+
return callback(undefined);
|
|
12717
|
+
}
|
|
12718
|
+
if (!execEditLength()) {
|
|
12719
|
+
exec();
|
|
12720
|
+
}
|
|
12721
|
+
}, 0);
|
|
12722
|
+
})();
|
|
12723
|
+
} else {
|
|
12724
|
+
while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
|
|
12725
|
+
const ret = execEditLength();
|
|
12726
|
+
if (ret) {
|
|
12727
|
+
return ret;
|
|
12728
|
+
}
|
|
12729
|
+
}
|
|
12730
|
+
}
|
|
12731
|
+
}
|
|
12732
|
+
addToPath(path, added, removed, oldPosInc, options) {
|
|
12733
|
+
const last = path.lastComponent;
|
|
12734
|
+
if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
|
|
12735
|
+
return {
|
|
12736
|
+
oldPos: path.oldPos + oldPosInc,
|
|
12737
|
+
lastComponent: {
|
|
12738
|
+
count: last.count + 1,
|
|
12739
|
+
added,
|
|
12740
|
+
removed,
|
|
12741
|
+
previousComponent: last.previousComponent
|
|
12742
|
+
}
|
|
12743
|
+
};
|
|
12744
|
+
} else {
|
|
12745
|
+
return {
|
|
12746
|
+
oldPos: path.oldPos + oldPosInc,
|
|
12747
|
+
lastComponent: {
|
|
12748
|
+
count: 1,
|
|
12749
|
+
added,
|
|
12750
|
+
removed,
|
|
12751
|
+
previousComponent: last
|
|
12752
|
+
}
|
|
12753
|
+
};
|
|
12754
|
+
}
|
|
12755
|
+
}
|
|
12756
|
+
extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) {
|
|
12757
|
+
const newLen = newTokens.length, oldLen = oldTokens.length;
|
|
12758
|
+
let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0;
|
|
12759
|
+
while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) {
|
|
12760
|
+
newPos++;
|
|
12761
|
+
oldPos++;
|
|
12762
|
+
commonCount++;
|
|
12763
|
+
if (options.oneChangePerToken) {
|
|
12764
|
+
basePath.lastComponent = {
|
|
12765
|
+
count: 1,
|
|
12766
|
+
previousComponent: basePath.lastComponent,
|
|
12767
|
+
added: false,
|
|
12768
|
+
removed: false
|
|
12769
|
+
};
|
|
12770
|
+
}
|
|
12771
|
+
}
|
|
12772
|
+
if (commonCount && !options.oneChangePerToken) {
|
|
12773
|
+
basePath.lastComponent = {
|
|
12774
|
+
count: commonCount,
|
|
12775
|
+
previousComponent: basePath.lastComponent,
|
|
12776
|
+
added: false,
|
|
12777
|
+
removed: false
|
|
12778
|
+
};
|
|
12779
|
+
}
|
|
12780
|
+
basePath.oldPos = oldPos;
|
|
12781
|
+
return newPos;
|
|
12782
|
+
}
|
|
12783
|
+
equals(left, right, options) {
|
|
12784
|
+
if (options.comparator) {
|
|
12785
|
+
return options.comparator(left, right);
|
|
12786
|
+
} else {
|
|
12787
|
+
return left === right || !!options.ignoreCase && left.toLowerCase() === right.toLowerCase();
|
|
12788
|
+
}
|
|
12789
|
+
}
|
|
12790
|
+
removeEmpty(array) {
|
|
12791
|
+
const ret = [];
|
|
12792
|
+
for (let i$2 = 0; i$2 < array.length; i$2++) {
|
|
12793
|
+
if (array[i$2]) {
|
|
12794
|
+
ret.push(array[i$2]);
|
|
12795
|
+
}
|
|
12796
|
+
}
|
|
12797
|
+
return ret;
|
|
12798
|
+
}
|
|
12799
|
+
castInput(value, options) {
|
|
12800
|
+
return value;
|
|
12801
|
+
}
|
|
12802
|
+
tokenize(value, options) {
|
|
12803
|
+
return Array.from(value);
|
|
12804
|
+
}
|
|
12805
|
+
join(chars) {
|
|
12806
|
+
return chars.join("");
|
|
12807
|
+
}
|
|
12808
|
+
postProcess(changeObjects, options) {
|
|
12809
|
+
return changeObjects;
|
|
12810
|
+
}
|
|
12811
|
+
get useLongestToken() {
|
|
12812
|
+
return false;
|
|
12813
|
+
}
|
|
12814
|
+
buildValues(lastComponent, newTokens, oldTokens) {
|
|
12815
|
+
const components = [];
|
|
12816
|
+
let nextComponent;
|
|
12817
|
+
while (lastComponent) {
|
|
12818
|
+
components.push(lastComponent);
|
|
12819
|
+
nextComponent = lastComponent.previousComponent;
|
|
12820
|
+
delete lastComponent.previousComponent;
|
|
12821
|
+
lastComponent = nextComponent;
|
|
12822
|
+
}
|
|
12823
|
+
components.reverse();
|
|
12824
|
+
const componentLen = components.length;
|
|
12825
|
+
let componentPos = 0, newPos = 0, oldPos = 0;
|
|
12826
|
+
for (; componentPos < componentLen; componentPos++) {
|
|
12827
|
+
const component = components[componentPos];
|
|
12828
|
+
if (!component.removed) {
|
|
12829
|
+
if (!component.added && this.useLongestToken) {
|
|
12830
|
+
let value = newTokens.slice(newPos, newPos + component.count);
|
|
12831
|
+
value = value.map(function(value$1, i$2) {
|
|
12832
|
+
const oldValue = oldTokens[oldPos + i$2];
|
|
12833
|
+
return oldValue.length > value$1.length ? oldValue : value$1;
|
|
12834
|
+
});
|
|
12835
|
+
component.value = this.join(value);
|
|
12836
|
+
} else {
|
|
12837
|
+
component.value = this.join(newTokens.slice(newPos, newPos + component.count));
|
|
12838
|
+
}
|
|
12839
|
+
newPos += component.count;
|
|
12840
|
+
if (!component.added) {
|
|
12841
|
+
oldPos += component.count;
|
|
12842
|
+
}
|
|
12843
|
+
} else {
|
|
12844
|
+
component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count));
|
|
12845
|
+
oldPos += component.count;
|
|
12846
|
+
}
|
|
12847
|
+
}
|
|
12848
|
+
return components;
|
|
12849
|
+
}
|
|
12850
|
+
};
|
|
12851
|
+
|
|
12852
|
+
//#endregion
|
|
12853
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/character.js
|
|
12854
|
+
var CharacterDiff = class extends Diff {};
|
|
12855
|
+
const characterDiff = new CharacterDiff();
|
|
12856
|
+
function diffChars(oldStr, newStr, options) {
|
|
12857
|
+
return characterDiff.diff(oldStr, newStr, options);
|
|
12858
|
+
}
|
|
12859
|
+
|
|
12860
|
+
//#endregion
|
|
12861
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/string.js
|
|
12862
|
+
function longestCommonPrefix(str1, str2) {
|
|
12863
|
+
let i$2;
|
|
12864
|
+
for (i$2 = 0; i$2 < str1.length && i$2 < str2.length; i$2++) {
|
|
12865
|
+
if (str1[i$2] != str2[i$2]) {
|
|
12866
|
+
return str1.slice(0, i$2);
|
|
12867
|
+
}
|
|
12868
|
+
}
|
|
12869
|
+
return str1.slice(0, i$2);
|
|
12870
|
+
}
|
|
12871
|
+
function longestCommonSuffix(str1, str2) {
|
|
12872
|
+
let i$2;
|
|
12873
|
+
if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
|
|
12874
|
+
return "";
|
|
12875
|
+
}
|
|
12876
|
+
for (i$2 = 0; i$2 < str1.length && i$2 < str2.length; i$2++) {
|
|
12877
|
+
if (str1[str1.length - (i$2 + 1)] != str2[str2.length - (i$2 + 1)]) {
|
|
12878
|
+
return str1.slice(-i$2);
|
|
12879
|
+
}
|
|
12880
|
+
}
|
|
12881
|
+
return str1.slice(-i$2);
|
|
12882
|
+
}
|
|
12883
|
+
function replacePrefix(string, oldPrefix, newPrefix) {
|
|
12884
|
+
if (string.slice(0, oldPrefix.length) != oldPrefix) {
|
|
12885
|
+
throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`);
|
|
12886
|
+
}
|
|
12887
|
+
return newPrefix + string.slice(oldPrefix.length);
|
|
12888
|
+
}
|
|
12889
|
+
function replaceSuffix(string, oldSuffix, newSuffix) {
|
|
12890
|
+
if (!oldSuffix) {
|
|
12891
|
+
return string + newSuffix;
|
|
12892
|
+
}
|
|
12893
|
+
if (string.slice(-oldSuffix.length) != oldSuffix) {
|
|
12894
|
+
throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`);
|
|
12895
|
+
}
|
|
12896
|
+
return string.slice(0, -oldSuffix.length) + newSuffix;
|
|
12897
|
+
}
|
|
12898
|
+
function removePrefix(string, oldPrefix) {
|
|
12899
|
+
return replacePrefix(string, oldPrefix, "");
|
|
12900
|
+
}
|
|
12901
|
+
function removeSuffix(string, oldSuffix) {
|
|
12902
|
+
return replaceSuffix(string, oldSuffix, "");
|
|
12903
|
+
}
|
|
12904
|
+
function maximumOverlap(string1, string2) {
|
|
12905
|
+
return string2.slice(0, overlapCount(string1, string2));
|
|
12906
|
+
}
|
|
12907
|
+
function overlapCount(a$1, b$2) {
|
|
12908
|
+
let startA = 0;
|
|
12909
|
+
if (a$1.length > b$2.length) {
|
|
12910
|
+
startA = a$1.length - b$2.length;
|
|
12911
|
+
}
|
|
12912
|
+
let endB = b$2.length;
|
|
12913
|
+
if (a$1.length < b$2.length) {
|
|
12914
|
+
endB = a$1.length;
|
|
12915
|
+
}
|
|
12916
|
+
const map = Array(endB);
|
|
12917
|
+
let k$2 = 0;
|
|
12918
|
+
map[0] = 0;
|
|
12919
|
+
for (let j$1 = 1; j$1 < endB; j$1++) {
|
|
12920
|
+
if (b$2[j$1] == b$2[k$2]) {
|
|
12921
|
+
map[j$1] = map[k$2];
|
|
12922
|
+
} else {
|
|
12923
|
+
map[j$1] = k$2;
|
|
12924
|
+
}
|
|
12925
|
+
while (k$2 > 0 && b$2[j$1] != b$2[k$2]) {
|
|
12926
|
+
k$2 = map[k$2];
|
|
12927
|
+
}
|
|
12928
|
+
if (b$2[j$1] == b$2[k$2]) {
|
|
12929
|
+
k$2++;
|
|
12930
|
+
}
|
|
12931
|
+
}
|
|
12932
|
+
k$2 = 0;
|
|
12933
|
+
for (let i$2 = startA; i$2 < a$1.length; i$2++) {
|
|
12934
|
+
while (k$2 > 0 && a$1[i$2] != b$2[k$2]) {
|
|
12935
|
+
k$2 = map[k$2];
|
|
12936
|
+
}
|
|
12937
|
+
if (a$1[i$2] == b$2[k$2]) {
|
|
12938
|
+
k$2++;
|
|
12939
|
+
}
|
|
12940
|
+
}
|
|
12941
|
+
return k$2;
|
|
12942
|
+
}
|
|
12943
|
+
/**
|
|
12944
|
+
* Returns true if the string consistently uses Windows line endings.
|
|
12945
|
+
*/
|
|
12946
|
+
function hasOnlyWinLineEndings(string) {
|
|
12947
|
+
return string.includes("\r\n") && !string.startsWith("\n") && !string.match(/[^\r]\n/);
|
|
12948
|
+
}
|
|
12949
|
+
/**
|
|
12950
|
+
* Returns true if the string consistently uses Unix line endings.
|
|
12951
|
+
*/
|
|
12952
|
+
function hasOnlyUnixLineEndings(string) {
|
|
12953
|
+
return !string.includes("\r\n") && string.includes("\n");
|
|
12954
|
+
}
|
|
12955
|
+
function trailingWs(string) {
|
|
12956
|
+
let i$2;
|
|
12957
|
+
for (i$2 = string.length - 1; i$2 >= 0; i$2--) {
|
|
12958
|
+
if (!string[i$2].match(/\s/)) {
|
|
12959
|
+
break;
|
|
12960
|
+
}
|
|
12961
|
+
}
|
|
12962
|
+
return string.substring(i$2 + 1);
|
|
12963
|
+
}
|
|
12964
|
+
function leadingWs(string) {
|
|
12965
|
+
const match = string.match(/^\s*/);
|
|
12966
|
+
return match ? match[0] : "";
|
|
12967
|
+
}
|
|
12968
|
+
|
|
12969
|
+
//#endregion
|
|
12970
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/word.js
|
|
12971
|
+
const extendedWordChars = "a-zA-Z0-9_\\u{AD}\\u{C0}-\\u{D6}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
|
|
12972
|
+
const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, "ug");
|
|
12973
|
+
var WordDiff = class extends Diff {
|
|
12974
|
+
equals(left, right, options) {
|
|
12975
|
+
if (options.ignoreCase) {
|
|
12976
|
+
left = left.toLowerCase();
|
|
12977
|
+
right = right.toLowerCase();
|
|
12978
|
+
}
|
|
12979
|
+
return left.trim() === right.trim();
|
|
12980
|
+
}
|
|
12981
|
+
tokenize(value, options = {}) {
|
|
12982
|
+
let parts;
|
|
12983
|
+
if (options.intlSegmenter) {
|
|
12984
|
+
const segmenter = options.intlSegmenter;
|
|
12985
|
+
if (segmenter.resolvedOptions().granularity != "word") {
|
|
12986
|
+
throw new Error("The segmenter passed must have a granularity of \"word\"");
|
|
12987
|
+
}
|
|
12988
|
+
parts = [];
|
|
12989
|
+
for (const segmentObj of Array.from(segmenter.segment(value))) {
|
|
12990
|
+
const segment = segmentObj.segment;
|
|
12991
|
+
if (parts.length && /\s/.test(parts[parts.length - 1]) && /\s/.test(segment)) {
|
|
12992
|
+
parts[parts.length - 1] += segment;
|
|
12993
|
+
} else {
|
|
12994
|
+
parts.push(segment);
|
|
12995
|
+
}
|
|
12996
|
+
}
|
|
12997
|
+
} else {
|
|
12998
|
+
parts = value.match(tokenizeIncludingWhitespace) || [];
|
|
12999
|
+
}
|
|
13000
|
+
const tokens = [];
|
|
13001
|
+
let prevPart = null;
|
|
13002
|
+
parts.forEach((part) => {
|
|
13003
|
+
if (/\s/.test(part)) {
|
|
13004
|
+
if (prevPart == null) {
|
|
13005
|
+
tokens.push(part);
|
|
13006
|
+
} else {
|
|
13007
|
+
tokens.push(tokens.pop() + part);
|
|
13008
|
+
}
|
|
13009
|
+
} else if (prevPart != null && /\s/.test(prevPart)) {
|
|
13010
|
+
if (tokens[tokens.length - 1] == prevPart) {
|
|
13011
|
+
tokens.push(tokens.pop() + part);
|
|
13012
|
+
} else {
|
|
13013
|
+
tokens.push(prevPart + part);
|
|
13014
|
+
}
|
|
13015
|
+
} else {
|
|
13016
|
+
tokens.push(part);
|
|
13017
|
+
}
|
|
13018
|
+
prevPart = part;
|
|
13019
|
+
});
|
|
13020
|
+
return tokens;
|
|
13021
|
+
}
|
|
13022
|
+
join(tokens) {
|
|
13023
|
+
return tokens.map((token$1, i$2) => {
|
|
13024
|
+
if (i$2 == 0) {
|
|
13025
|
+
return token$1;
|
|
13026
|
+
} else {
|
|
13027
|
+
return token$1.replace(/^\s+/, "");
|
|
13028
|
+
}
|
|
13029
|
+
}).join("");
|
|
13030
|
+
}
|
|
13031
|
+
postProcess(changes, options) {
|
|
13032
|
+
if (!changes || options.oneChangePerToken) {
|
|
13033
|
+
return changes;
|
|
13034
|
+
}
|
|
13035
|
+
let lastKeep = null;
|
|
13036
|
+
let insertion = null;
|
|
13037
|
+
let deletion = null;
|
|
13038
|
+
changes.forEach((change) => {
|
|
13039
|
+
if (change.added) {
|
|
13040
|
+
insertion = change;
|
|
13041
|
+
} else if (change.removed) {
|
|
13042
|
+
deletion = change;
|
|
13043
|
+
} else {
|
|
13044
|
+
if (insertion || deletion) {
|
|
13045
|
+
dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
|
|
13046
|
+
}
|
|
13047
|
+
lastKeep = change;
|
|
13048
|
+
insertion = null;
|
|
13049
|
+
deletion = null;
|
|
13050
|
+
}
|
|
13051
|
+
});
|
|
13052
|
+
if (insertion || deletion) {
|
|
13053
|
+
dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
|
|
13054
|
+
}
|
|
13055
|
+
return changes;
|
|
13056
|
+
}
|
|
13057
|
+
};
|
|
13058
|
+
const wordDiff = new WordDiff();
|
|
13059
|
+
function diffWords(oldStr, newStr, options) {
|
|
13060
|
+
if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
|
|
13061
|
+
return diffWordsWithSpace(oldStr, newStr, options);
|
|
13062
|
+
}
|
|
13063
|
+
return wordDiff.diff(oldStr, newStr, options);
|
|
13064
|
+
}
|
|
13065
|
+
function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
|
|
13066
|
+
if (deletion && insertion) {
|
|
13067
|
+
const oldWsPrefix = leadingWs(deletion.value);
|
|
13068
|
+
const oldWsSuffix = trailingWs(deletion.value);
|
|
13069
|
+
const newWsPrefix = leadingWs(insertion.value);
|
|
13070
|
+
const newWsSuffix = trailingWs(insertion.value);
|
|
13071
|
+
if (startKeep) {
|
|
13072
|
+
const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
|
|
13073
|
+
startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
|
|
13074
|
+
deletion.value = removePrefix(deletion.value, commonWsPrefix);
|
|
13075
|
+
insertion.value = removePrefix(insertion.value, commonWsPrefix);
|
|
13076
|
+
}
|
|
13077
|
+
if (endKeep) {
|
|
13078
|
+
const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
|
|
13079
|
+
endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
|
|
13080
|
+
deletion.value = removeSuffix(deletion.value, commonWsSuffix);
|
|
13081
|
+
insertion.value = removeSuffix(insertion.value, commonWsSuffix);
|
|
13082
|
+
}
|
|
13083
|
+
} else if (insertion) {
|
|
13084
|
+
if (startKeep) {
|
|
13085
|
+
const ws = leadingWs(insertion.value);
|
|
13086
|
+
insertion.value = insertion.value.substring(ws.length);
|
|
13087
|
+
}
|
|
13088
|
+
if (endKeep) {
|
|
13089
|
+
const ws = leadingWs(endKeep.value);
|
|
13090
|
+
endKeep.value = endKeep.value.substring(ws.length);
|
|
13091
|
+
}
|
|
13092
|
+
} else if (startKeep && endKeep) {
|
|
13093
|
+
const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value);
|
|
13094
|
+
const newWsStart = longestCommonPrefix(newWsFull, delWsStart);
|
|
13095
|
+
deletion.value = removePrefix(deletion.value, newWsStart);
|
|
13096
|
+
const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
|
|
13097
|
+
deletion.value = removeSuffix(deletion.value, newWsEnd);
|
|
13098
|
+
endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
|
|
13099
|
+
startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
|
|
13100
|
+
} else if (endKeep) {
|
|
13101
|
+
const endKeepWsPrefix = leadingWs(endKeep.value);
|
|
13102
|
+
const deletionWsSuffix = trailingWs(deletion.value);
|
|
13103
|
+
const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
|
|
13104
|
+
deletion.value = removeSuffix(deletion.value, overlap);
|
|
13105
|
+
} else if (startKeep) {
|
|
13106
|
+
const startKeepWsSuffix = trailingWs(startKeep.value);
|
|
13107
|
+
const deletionWsPrefix = leadingWs(deletion.value);
|
|
13108
|
+
const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
|
|
13109
|
+
deletion.value = removePrefix(deletion.value, overlap);
|
|
13110
|
+
}
|
|
13111
|
+
}
|
|
13112
|
+
var WordsWithSpaceDiff = class extends Diff {
|
|
13113
|
+
tokenize(value) {
|
|
13114
|
+
const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, "ug");
|
|
13115
|
+
return value.match(regex) || [];
|
|
13116
|
+
}
|
|
13117
|
+
};
|
|
13118
|
+
const wordsWithSpaceDiff = new WordsWithSpaceDiff();
|
|
13119
|
+
function diffWordsWithSpace(oldStr, newStr, options) {
|
|
13120
|
+
return wordsWithSpaceDiff.diff(oldStr, newStr, options);
|
|
13121
|
+
}
|
|
13122
|
+
|
|
13123
|
+
//#endregion
|
|
13124
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/params.js
|
|
13125
|
+
function generateOptions(options, defaults) {
|
|
13126
|
+
if (typeof options === "function") {
|
|
13127
|
+
defaults.callback = options;
|
|
13128
|
+
} else if (options) {
|
|
13129
|
+
for (const name in options) {
|
|
13130
|
+
/* istanbul ignore else */
|
|
13131
|
+
if (Object.prototype.hasOwnProperty.call(options, name)) {
|
|
13132
|
+
defaults[name] = options[name];
|
|
13133
|
+
}
|
|
13134
|
+
}
|
|
13135
|
+
}
|
|
13136
|
+
return defaults;
|
|
13137
|
+
}
|
|
13138
|
+
|
|
13139
|
+
//#endregion
|
|
13140
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/line.js
|
|
13141
|
+
var LineDiff = class extends Diff {
|
|
13142
|
+
constructor() {
|
|
13143
|
+
super(...arguments);
|
|
13144
|
+
this.tokenize = tokenize;
|
|
13145
|
+
}
|
|
13146
|
+
equals(left, right, options) {
|
|
13147
|
+
if (options.ignoreWhitespace) {
|
|
13148
|
+
if (!options.newlineIsToken || !left.includes("\n")) {
|
|
13149
|
+
left = left.trim();
|
|
13150
|
+
}
|
|
13151
|
+
if (!options.newlineIsToken || !right.includes("\n")) {
|
|
13152
|
+
right = right.trim();
|
|
13153
|
+
}
|
|
13154
|
+
} else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
|
|
13155
|
+
if (left.endsWith("\n")) {
|
|
13156
|
+
left = left.slice(0, -1);
|
|
13157
|
+
}
|
|
13158
|
+
if (right.endsWith("\n")) {
|
|
13159
|
+
right = right.slice(0, -1);
|
|
13160
|
+
}
|
|
13161
|
+
}
|
|
13162
|
+
return super.equals(left, right, options);
|
|
13163
|
+
}
|
|
13164
|
+
};
|
|
13165
|
+
const lineDiff = new LineDiff();
|
|
13166
|
+
function diffLines(oldStr, newStr, options) {
|
|
13167
|
+
return lineDiff.diff(oldStr, newStr, options);
|
|
13168
|
+
}
|
|
13169
|
+
function diffTrimmedLines(oldStr, newStr, options) {
|
|
13170
|
+
options = generateOptions(options, { ignoreWhitespace: true });
|
|
13171
|
+
return lineDiff.diff(oldStr, newStr, options);
|
|
13172
|
+
}
|
|
13173
|
+
function tokenize(value, options) {
|
|
13174
|
+
if (options.stripTrailingCr) {
|
|
13175
|
+
value = value.replace(/\r\n/g, "\n");
|
|
13176
|
+
}
|
|
13177
|
+
const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/);
|
|
13178
|
+
if (!linesAndNewlines[linesAndNewlines.length - 1]) {
|
|
13179
|
+
linesAndNewlines.pop();
|
|
13180
|
+
}
|
|
13181
|
+
for (let i$2 = 0; i$2 < linesAndNewlines.length; i$2++) {
|
|
13182
|
+
const line = linesAndNewlines[i$2];
|
|
13183
|
+
if (i$2 % 2 && !options.newlineIsToken) {
|
|
13184
|
+
retLines[retLines.length - 1] += line;
|
|
13185
|
+
} else {
|
|
13186
|
+
retLines.push(line);
|
|
13187
|
+
}
|
|
13188
|
+
}
|
|
13189
|
+
return retLines;
|
|
13190
|
+
}
|
|
13191
|
+
|
|
13192
|
+
//#endregion
|
|
13193
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/sentence.js
|
|
13194
|
+
function isSentenceEndPunct(char) {
|
|
13195
|
+
return char == "." || char == "!" || char == "?";
|
|
13196
|
+
}
|
|
13197
|
+
var SentenceDiff = class extends Diff {
|
|
13198
|
+
tokenize(value) {
|
|
13199
|
+
var _a;
|
|
13200
|
+
const result = [];
|
|
13201
|
+
let tokenStartI = 0;
|
|
13202
|
+
for (let i$2 = 0; i$2 < value.length; i$2++) {
|
|
13203
|
+
if (i$2 == value.length - 1) {
|
|
13204
|
+
result.push(value.slice(tokenStartI));
|
|
13205
|
+
break;
|
|
13206
|
+
}
|
|
13207
|
+
if (isSentenceEndPunct(value[i$2]) && value[i$2 + 1].match(/\s/)) {
|
|
13208
|
+
result.push(value.slice(tokenStartI, i$2 + 1));
|
|
13209
|
+
i$2 = tokenStartI = i$2 + 1;
|
|
13210
|
+
while ((_a = value[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) {
|
|
13211
|
+
i$2++;
|
|
13212
|
+
}
|
|
13213
|
+
result.push(value.slice(tokenStartI, i$2 + 1));
|
|
13214
|
+
tokenStartI = i$2 + 1;
|
|
13215
|
+
}
|
|
13216
|
+
}
|
|
13217
|
+
return result;
|
|
13218
|
+
}
|
|
13219
|
+
};
|
|
13220
|
+
const sentenceDiff = new SentenceDiff();
|
|
13221
|
+
function diffSentences(oldStr, newStr, options) {
|
|
13222
|
+
return sentenceDiff.diff(oldStr, newStr, options);
|
|
13223
|
+
}
|
|
13224
|
+
|
|
13225
|
+
//#endregion
|
|
13226
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/css.js
|
|
13227
|
+
var CssDiff = class extends Diff {
|
|
13228
|
+
tokenize(value) {
|
|
13229
|
+
return value.split(/([{}:;,]|\s+)/);
|
|
13230
|
+
}
|
|
13231
|
+
};
|
|
13232
|
+
const cssDiff = new CssDiff();
|
|
13233
|
+
function diffCss(oldStr, newStr, options) {
|
|
13234
|
+
return cssDiff.diff(oldStr, newStr, options);
|
|
13235
|
+
}
|
|
13236
|
+
|
|
13237
|
+
//#endregion
|
|
13238
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/json.js
|
|
13239
|
+
var JsonDiff = class extends Diff {
|
|
13240
|
+
constructor() {
|
|
13241
|
+
super(...arguments);
|
|
13242
|
+
this.tokenize = tokenize;
|
|
13243
|
+
}
|
|
13244
|
+
get useLongestToken() {
|
|
13245
|
+
return true;
|
|
13246
|
+
}
|
|
13247
|
+
castInput(value, options) {
|
|
13248
|
+
const { undefinedReplacement, stringifyReplacer = (k$2, v$1) => typeof v$1 === "undefined" ? undefinedReplacement : v$1 } = options;
|
|
13249
|
+
return typeof value === "string" ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, " ");
|
|
13250
|
+
}
|
|
13251
|
+
equals(left, right, options) {
|
|
13252
|
+
return super.equals(left.replace(/,([\r\n])/g, "$1"), right.replace(/,([\r\n])/g, "$1"), options);
|
|
13253
|
+
}
|
|
13254
|
+
};
|
|
13255
|
+
const jsonDiff = new JsonDiff();
|
|
13256
|
+
function diffJson(oldStr, newStr, options) {
|
|
13257
|
+
return jsonDiff.diff(oldStr, newStr, options);
|
|
13258
|
+
}
|
|
13259
|
+
function canonicalize(obj, stack, replacementStack, replacer, key$1) {
|
|
13260
|
+
stack = stack || [];
|
|
13261
|
+
replacementStack = replacementStack || [];
|
|
13262
|
+
if (replacer) {
|
|
13263
|
+
obj = replacer(key$1 === undefined ? "" : key$1, obj);
|
|
13264
|
+
}
|
|
13265
|
+
let i$2;
|
|
13266
|
+
for (i$2 = 0; i$2 < stack.length; i$2 += 1) {
|
|
13267
|
+
if (stack[i$2] === obj) {
|
|
13268
|
+
return replacementStack[i$2];
|
|
13269
|
+
}
|
|
13270
|
+
}
|
|
13271
|
+
let canonicalizedObj;
|
|
13272
|
+
if ("[object Array]" === Object.prototype.toString.call(obj)) {
|
|
13273
|
+
stack.push(obj);
|
|
13274
|
+
canonicalizedObj = new Array(obj.length);
|
|
13275
|
+
replacementStack.push(canonicalizedObj);
|
|
13276
|
+
for (i$2 = 0; i$2 < obj.length; i$2 += 1) {
|
|
13277
|
+
canonicalizedObj[i$2] = canonicalize(obj[i$2], stack, replacementStack, replacer, String(i$2));
|
|
13278
|
+
}
|
|
13279
|
+
stack.pop();
|
|
13280
|
+
replacementStack.pop();
|
|
13281
|
+
return canonicalizedObj;
|
|
13282
|
+
}
|
|
13283
|
+
if (obj && obj.toJSON) {
|
|
13284
|
+
obj = obj.toJSON();
|
|
13285
|
+
}
|
|
13286
|
+
if (typeof obj === "object" && obj !== null) {
|
|
13287
|
+
stack.push(obj);
|
|
13288
|
+
canonicalizedObj = {};
|
|
13289
|
+
replacementStack.push(canonicalizedObj);
|
|
13290
|
+
const sortedKeys = [];
|
|
13291
|
+
let key$2;
|
|
13292
|
+
for (key$2 in obj) {
|
|
13293
|
+
/* istanbul ignore else */
|
|
13294
|
+
if (Object.prototype.hasOwnProperty.call(obj, key$2)) {
|
|
13295
|
+
sortedKeys.push(key$2);
|
|
13296
|
+
}
|
|
13297
|
+
}
|
|
13298
|
+
sortedKeys.sort();
|
|
13299
|
+
for (i$2 = 0; i$2 < sortedKeys.length; i$2 += 1) {
|
|
13300
|
+
key$2 = sortedKeys[i$2];
|
|
13301
|
+
canonicalizedObj[key$2] = canonicalize(obj[key$2], stack, replacementStack, replacer, key$2);
|
|
13302
|
+
}
|
|
13303
|
+
stack.pop();
|
|
13304
|
+
replacementStack.pop();
|
|
13305
|
+
} else {
|
|
13306
|
+
canonicalizedObj = obj;
|
|
13307
|
+
}
|
|
13308
|
+
return canonicalizedObj;
|
|
13309
|
+
}
|
|
13310
|
+
|
|
13311
|
+
//#endregion
|
|
13312
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/diff/array.js
|
|
13313
|
+
var ArrayDiff = class extends Diff {
|
|
13314
|
+
tokenize(value) {
|
|
13315
|
+
return value.slice();
|
|
13316
|
+
}
|
|
13317
|
+
join(value) {
|
|
13318
|
+
return value;
|
|
13319
|
+
}
|
|
13320
|
+
removeEmpty(value) {
|
|
13321
|
+
return value;
|
|
13322
|
+
}
|
|
13323
|
+
};
|
|
13324
|
+
const arrayDiff = new ArrayDiff();
|
|
13325
|
+
function diffArrays(oldArr, newArr, options) {
|
|
13326
|
+
return arrayDiff.diff(oldArr, newArr, options);
|
|
13327
|
+
}
|
|
13328
|
+
|
|
13329
|
+
//#endregion
|
|
13330
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/line-endings.js
|
|
13331
|
+
function unixToWin(patch) {
|
|
13332
|
+
if (Array.isArray(patch)) {
|
|
13333
|
+
return patch.map((p$1) => unixToWin(p$1));
|
|
13334
|
+
}
|
|
13335
|
+
return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map((hunk) => Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i$2) => {
|
|
13336
|
+
var _a;
|
|
13337
|
+
return line.startsWith("\\") || line.endsWith("\r") || ((_a = hunk.lines[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.startsWith("\\")) ? line : line + "\r";
|
|
13338
|
+
}) })) });
|
|
13339
|
+
}
|
|
13340
|
+
function winToUnix(patch) {
|
|
13341
|
+
if (Array.isArray(patch)) {
|
|
13342
|
+
return patch.map((p$1) => winToUnix(p$1));
|
|
13343
|
+
}
|
|
13344
|
+
return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map((hunk) => Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line) => line.endsWith("\r") ? line.substring(0, line.length - 1) : line) })) });
|
|
13345
|
+
}
|
|
13346
|
+
/**
|
|
13347
|
+
* Returns true if the patch consistently uses Unix line endings (or only involves one line and has
|
|
13348
|
+
* no line endings).
|
|
13349
|
+
*/
|
|
13350
|
+
function isUnix(patch) {
|
|
13351
|
+
if (!Array.isArray(patch)) {
|
|
13352
|
+
patch = [patch];
|
|
13353
|
+
}
|
|
13354
|
+
return !patch.some((index) => index.hunks.some((hunk) => hunk.lines.some((line) => !line.startsWith("\\") && line.endsWith("\r"))));
|
|
13355
|
+
}
|
|
13356
|
+
/**
|
|
13357
|
+
* Returns true if the patch uses Windows line endings and only Windows line endings.
|
|
13358
|
+
*/
|
|
13359
|
+
function isWin(patch) {
|
|
13360
|
+
if (!Array.isArray(patch)) {
|
|
13361
|
+
patch = [patch];
|
|
13362
|
+
}
|
|
13363
|
+
return patch.some((index) => index.hunks.some((hunk) => hunk.lines.some((line) => line.endsWith("\r")))) && patch.every((index) => index.hunks.every((hunk) => hunk.lines.every((line, i$2) => {
|
|
13364
|
+
var _a;
|
|
13365
|
+
return line.startsWith("\\") || line.endsWith("\r") || ((_a = hunk.lines[i$2 + 1]) === null || _a === void 0 ? void 0 : _a.startsWith("\\"));
|
|
13366
|
+
})));
|
|
13367
|
+
}
|
|
13368
|
+
|
|
13369
|
+
//#endregion
|
|
13370
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/parse.js
|
|
13371
|
+
/**
|
|
13372
|
+
* Parses a patch into structured data, in the same structure returned by `structuredPatch`.
|
|
13373
|
+
*
|
|
13374
|
+
* @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method.
|
|
13375
|
+
*/
|
|
13376
|
+
function parsePatch(uniDiff) {
|
|
13377
|
+
const diffstr = uniDiff.split(/\n/), list = [];
|
|
13378
|
+
let i$2 = 0;
|
|
13379
|
+
function parseIndex() {
|
|
13380
|
+
const index = {};
|
|
13381
|
+
list.push(index);
|
|
13382
|
+
while (i$2 < diffstr.length) {
|
|
13383
|
+
const line = diffstr[i$2];
|
|
13384
|
+
if (/^(---|\+\+\+|@@)\s/.test(line)) {
|
|
13385
|
+
break;
|
|
13386
|
+
}
|
|
13387
|
+
const headerMatch = /^(?:Index:|diff(?: -r \w+)+)\s+/.exec(line);
|
|
13388
|
+
if (headerMatch) {
|
|
13389
|
+
index.index = line.substring(headerMatch[0].length).trim();
|
|
13390
|
+
}
|
|
13391
|
+
i$2++;
|
|
13392
|
+
}
|
|
13393
|
+
parseFileHeader(index);
|
|
13394
|
+
parseFileHeader(index);
|
|
13395
|
+
index.hunks = [];
|
|
13396
|
+
while (i$2 < diffstr.length) {
|
|
13397
|
+
const line = diffstr[i$2];
|
|
13398
|
+
if (/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/.test(line)) {
|
|
13399
|
+
break;
|
|
13400
|
+
} else if (/^@@/.test(line)) {
|
|
13401
|
+
index.hunks.push(parseHunk());
|
|
13402
|
+
} else if (line) {
|
|
13403
|
+
throw new Error("Unknown line " + (i$2 + 1) + " " + JSON.stringify(line));
|
|
13404
|
+
} else {
|
|
13405
|
+
i$2++;
|
|
13406
|
+
}
|
|
13407
|
+
}
|
|
13408
|
+
}
|
|
13409
|
+
function parseFileHeader(index) {
|
|
13410
|
+
const fileHeaderMatch = /^(---|\+\+\+)\s+/.exec(diffstr[i$2]);
|
|
13411
|
+
if (fileHeaderMatch) {
|
|
13412
|
+
const prefix = fileHeaderMatch[1], data = diffstr[i$2].substring(3).trim().split(" ", 2), header = (data[1] || "").trim();
|
|
13413
|
+
let fileName = data[0].replace(/\\\\/g, "\\");
|
|
13414
|
+
if (fileName.startsWith("\"") && fileName.endsWith("\"")) {
|
|
13415
|
+
fileName = fileName.substr(1, fileName.length - 2);
|
|
13416
|
+
}
|
|
13417
|
+
if (prefix === "---") {
|
|
13418
|
+
index.oldFileName = fileName;
|
|
13419
|
+
index.oldHeader = header;
|
|
13420
|
+
} else {
|
|
13421
|
+
index.newFileName = fileName;
|
|
13422
|
+
index.newHeader = header;
|
|
13423
|
+
}
|
|
13424
|
+
i$2++;
|
|
13425
|
+
}
|
|
13426
|
+
}
|
|
13427
|
+
function parseHunk() {
|
|
13428
|
+
var _a;
|
|
13429
|
+
const chunkHeaderIndex = i$2, chunkHeaderLine = diffstr[i$2++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
|
|
13430
|
+
const hunk = {
|
|
13431
|
+
oldStart: +chunkHeader[1],
|
|
13432
|
+
oldLines: typeof chunkHeader[2] === "undefined" ? 1 : +chunkHeader[2],
|
|
13433
|
+
newStart: +chunkHeader[3],
|
|
13434
|
+
newLines: typeof chunkHeader[4] === "undefined" ? 1 : +chunkHeader[4],
|
|
13435
|
+
lines: []
|
|
13436
|
+
};
|
|
13437
|
+
if (hunk.oldLines === 0) {
|
|
13438
|
+
hunk.oldStart += 1;
|
|
13439
|
+
}
|
|
13440
|
+
if (hunk.newLines === 0) {
|
|
13441
|
+
hunk.newStart += 1;
|
|
13442
|
+
}
|
|
13443
|
+
let addCount = 0, removeCount = 0;
|
|
13444
|
+
for (; i$2 < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i$2]) === null || _a === void 0 ? void 0 : _a.startsWith("\\"))); i$2++) {
|
|
13445
|
+
const operation = diffstr[i$2].length == 0 && i$2 != diffstr.length - 1 ? " " : diffstr[i$2][0];
|
|
13446
|
+
if (operation === "+" || operation === "-" || operation === " " || operation === "\\") {
|
|
13447
|
+
hunk.lines.push(diffstr[i$2]);
|
|
13448
|
+
if (operation === "+") {
|
|
13449
|
+
addCount++;
|
|
13450
|
+
} else if (operation === "-") {
|
|
13451
|
+
removeCount++;
|
|
13452
|
+
} else if (operation === " ") {
|
|
13453
|
+
addCount++;
|
|
13454
|
+
removeCount++;
|
|
13455
|
+
}
|
|
13456
|
+
} else {
|
|
13457
|
+
throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i$2]}`);
|
|
13458
|
+
}
|
|
13459
|
+
}
|
|
13460
|
+
if (!addCount && hunk.newLines === 1) {
|
|
13461
|
+
hunk.newLines = 0;
|
|
13462
|
+
}
|
|
13463
|
+
if (!removeCount && hunk.oldLines === 1) {
|
|
13464
|
+
hunk.oldLines = 0;
|
|
13465
|
+
}
|
|
13466
|
+
if (addCount !== hunk.newLines) {
|
|
13467
|
+
throw new Error("Added line count did not match for hunk at line " + (chunkHeaderIndex + 1));
|
|
13468
|
+
}
|
|
13469
|
+
if (removeCount !== hunk.oldLines) {
|
|
13470
|
+
throw new Error("Removed line count did not match for hunk at line " + (chunkHeaderIndex + 1));
|
|
13471
|
+
}
|
|
13472
|
+
return hunk;
|
|
13473
|
+
}
|
|
13474
|
+
while (i$2 < diffstr.length) {
|
|
13475
|
+
parseIndex();
|
|
13476
|
+
}
|
|
13477
|
+
return list;
|
|
13478
|
+
}
|
|
13479
|
+
|
|
13480
|
+
//#endregion
|
|
13481
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/util/distance-iterator.js
|
|
13482
|
+
function distance_iterator_default(start, minLine, maxLine) {
|
|
13483
|
+
let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1;
|
|
13484
|
+
return function iterator() {
|
|
13485
|
+
if (wantForward && !forwardExhausted) {
|
|
13486
|
+
if (backwardExhausted) {
|
|
13487
|
+
localOffset++;
|
|
13488
|
+
} else {
|
|
13489
|
+
wantForward = false;
|
|
13490
|
+
}
|
|
13491
|
+
if (start + localOffset <= maxLine) {
|
|
13492
|
+
return start + localOffset;
|
|
13493
|
+
}
|
|
13494
|
+
forwardExhausted = true;
|
|
13495
|
+
}
|
|
13496
|
+
if (!backwardExhausted) {
|
|
13497
|
+
if (!forwardExhausted) {
|
|
13498
|
+
wantForward = true;
|
|
13499
|
+
}
|
|
13500
|
+
if (minLine <= start - localOffset) {
|
|
13501
|
+
return start - localOffset++;
|
|
13502
|
+
}
|
|
13503
|
+
backwardExhausted = true;
|
|
13504
|
+
return iterator();
|
|
13505
|
+
}
|
|
13506
|
+
return undefined;
|
|
13507
|
+
};
|
|
13508
|
+
}
|
|
13509
|
+
|
|
13510
|
+
//#endregion
|
|
13511
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/apply.js
|
|
13512
|
+
/**
|
|
13513
|
+
* attempts to apply a unified diff patch.
|
|
13514
|
+
*
|
|
13515
|
+
* Hunks are applied first to last.
|
|
13516
|
+
* `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly.
|
|
13517
|
+
* If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly.
|
|
13518
|
+
* If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match.
|
|
13519
|
+
* Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly.
|
|
13520
|
+
*
|
|
13521
|
+
* Once a hunk is successfully fitted, the process begins again with the next hunk.
|
|
13522
|
+
* Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks.
|
|
13523
|
+
*
|
|
13524
|
+
* If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`.
|
|
13525
|
+
*
|
|
13526
|
+
* If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly.
|
|
13527
|
+
* (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.)
|
|
13528
|
+
*
|
|
13529
|
+
* If the patch was applied successfully, returns a string containing the patched text.
|
|
13530
|
+
* If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false.
|
|
13531
|
+
*
|
|
13532
|
+
* @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods.
|
|
13533
|
+
*/
|
|
13534
|
+
function applyPatch(source, patch, options = {}) {
|
|
13535
|
+
let patches;
|
|
13536
|
+
if (typeof patch === "string") {
|
|
13537
|
+
patches = parsePatch(patch);
|
|
13538
|
+
} else if (Array.isArray(patch)) {
|
|
13539
|
+
patches = patch;
|
|
13540
|
+
} else {
|
|
13541
|
+
patches = [patch];
|
|
13542
|
+
}
|
|
13543
|
+
if (patches.length > 1) {
|
|
13544
|
+
throw new Error("applyPatch only works with a single input.");
|
|
13545
|
+
}
|
|
13546
|
+
return applyStructuredPatch(source, patches[0], options);
|
|
13547
|
+
}
|
|
13548
|
+
function applyStructuredPatch(source, patch, options = {}) {
|
|
13549
|
+
if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
|
|
13550
|
+
if (hasOnlyWinLineEndings(source) && isUnix(patch)) {
|
|
13551
|
+
patch = unixToWin(patch);
|
|
13552
|
+
} else if (hasOnlyUnixLineEndings(source) && isWin(patch)) {
|
|
13553
|
+
patch = winToUnix(patch);
|
|
13554
|
+
}
|
|
13555
|
+
}
|
|
13556
|
+
const lines = source.split("\n"), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0;
|
|
13557
|
+
let minLine = 0;
|
|
13558
|
+
if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
|
|
13559
|
+
throw new Error("fuzzFactor must be a non-negative integer");
|
|
13560
|
+
}
|
|
13561
|
+
if (!hunks.length) {
|
|
13562
|
+
return source;
|
|
13563
|
+
}
|
|
13564
|
+
let prevLine = "", removeEOFNL = false, addEOFNL = false;
|
|
13565
|
+
for (let i$2 = 0; i$2 < hunks[hunks.length - 1].lines.length; i$2++) {
|
|
13566
|
+
const line = hunks[hunks.length - 1].lines[i$2];
|
|
13567
|
+
if (line[0] == "\\") {
|
|
13568
|
+
if (prevLine[0] == "+") {
|
|
13569
|
+
removeEOFNL = true;
|
|
13570
|
+
} else if (prevLine[0] == "-") {
|
|
13571
|
+
addEOFNL = true;
|
|
13572
|
+
}
|
|
13573
|
+
}
|
|
13574
|
+
prevLine = line;
|
|
13575
|
+
}
|
|
13576
|
+
if (removeEOFNL) {
|
|
13577
|
+
if (addEOFNL) {
|
|
13578
|
+
if (!fuzzFactor && lines[lines.length - 1] == "") {
|
|
13579
|
+
return false;
|
|
13580
|
+
}
|
|
13581
|
+
} else if (lines[lines.length - 1] == "") {
|
|
13582
|
+
lines.pop();
|
|
13583
|
+
} else if (!fuzzFactor) {
|
|
13584
|
+
return false;
|
|
13585
|
+
}
|
|
13586
|
+
} else if (addEOFNL) {
|
|
13587
|
+
if (lines[lines.length - 1] != "") {
|
|
13588
|
+
lines.push("");
|
|
13589
|
+
} else if (!fuzzFactor) {
|
|
13590
|
+
return false;
|
|
13591
|
+
}
|
|
13592
|
+
}
|
|
13593
|
+
/**
|
|
13594
|
+
* Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
|
|
13595
|
+
* insertions, substitutions, or deletions, while ensuring also that:
|
|
13596
|
+
* - lines deleted in the hunk match exactly, and
|
|
13597
|
+
* - wherever an insertion operation or block of insertion operations appears in the hunk, the
|
|
13598
|
+
* immediately preceding and following lines of context match exactly
|
|
13599
|
+
*
|
|
13600
|
+
* `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
|
|
13601
|
+
*
|
|
13602
|
+
* If the hunk can be applied, returns an object with properties `oldLineLastI` and
|
|
13603
|
+
* `replacementLines`. Otherwise, returns null.
|
|
13604
|
+
*/
|
|
13605
|
+
function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) {
|
|
13606
|
+
let nConsecutiveOldContextLines = 0;
|
|
13607
|
+
let nextContextLineMustMatch = false;
|
|
13608
|
+
for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
|
|
13609
|
+
const hunkLine = hunkLines[hunkLinesI], operation = hunkLine.length > 0 ? hunkLine[0] : " ", content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
|
|
13610
|
+
if (operation === "-") {
|
|
13611
|
+
if (compareLine(toPos + 1, lines[toPos], operation, content)) {
|
|
13612
|
+
toPos++;
|
|
13613
|
+
nConsecutiveOldContextLines = 0;
|
|
13614
|
+
} else {
|
|
13615
|
+
if (!maxErrors || lines[toPos] == null) {
|
|
13616
|
+
return null;
|
|
13617
|
+
}
|
|
13618
|
+
patchedLines[patchedLinesLength] = lines[toPos];
|
|
13619
|
+
return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
|
|
13620
|
+
}
|
|
13621
|
+
}
|
|
13622
|
+
if (operation === "+") {
|
|
13623
|
+
if (!lastContextLineMatched) {
|
|
13624
|
+
return null;
|
|
13625
|
+
}
|
|
13626
|
+
patchedLines[patchedLinesLength] = content;
|
|
13627
|
+
patchedLinesLength++;
|
|
13628
|
+
nConsecutiveOldContextLines = 0;
|
|
13629
|
+
nextContextLineMustMatch = true;
|
|
13630
|
+
}
|
|
13631
|
+
if (operation === " ") {
|
|
13632
|
+
nConsecutiveOldContextLines++;
|
|
13633
|
+
patchedLines[patchedLinesLength] = lines[toPos];
|
|
13634
|
+
if (compareLine(toPos + 1, lines[toPos], operation, content)) {
|
|
13635
|
+
patchedLinesLength++;
|
|
13636
|
+
lastContextLineMatched = true;
|
|
13637
|
+
nextContextLineMustMatch = false;
|
|
13638
|
+
toPos++;
|
|
13639
|
+
} else {
|
|
13640
|
+
if (nextContextLineMustMatch || !maxErrors) {
|
|
13641
|
+
return null;
|
|
13642
|
+
}
|
|
13643
|
+
return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
|
|
13644
|
+
}
|
|
13645
|
+
}
|
|
13646
|
+
}
|
|
13647
|
+
patchedLinesLength -= nConsecutiveOldContextLines;
|
|
13648
|
+
toPos -= nConsecutiveOldContextLines;
|
|
13649
|
+
patchedLines.length = patchedLinesLength;
|
|
13650
|
+
return {
|
|
13651
|
+
patchedLines,
|
|
13652
|
+
oldLineLastI: toPos - 1
|
|
13653
|
+
};
|
|
13654
|
+
}
|
|
13655
|
+
const resultLines = [];
|
|
13656
|
+
let prevHunkOffset = 0;
|
|
13657
|
+
for (let i$2 = 0; i$2 < hunks.length; i$2++) {
|
|
13658
|
+
const hunk = hunks[i$2];
|
|
13659
|
+
let hunkResult;
|
|
13660
|
+
const maxLine = lines.length - hunk.oldLines + fuzzFactor;
|
|
13661
|
+
let toPos;
|
|
13662
|
+
for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
|
|
13663
|
+
toPos = hunk.oldStart + prevHunkOffset - 1;
|
|
13664
|
+
const iterator = distance_iterator_default(toPos, minLine, maxLine);
|
|
13665
|
+
for (; toPos !== undefined; toPos = iterator()) {
|
|
13666
|
+
hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
|
|
13667
|
+
if (hunkResult) {
|
|
13668
|
+
break;
|
|
13669
|
+
}
|
|
13670
|
+
}
|
|
13671
|
+
if (hunkResult) {
|
|
13672
|
+
break;
|
|
13673
|
+
}
|
|
13674
|
+
}
|
|
13675
|
+
if (!hunkResult) {
|
|
13676
|
+
return false;
|
|
13677
|
+
}
|
|
13678
|
+
for (let i$3 = minLine; i$3 < toPos; i$3++) {
|
|
13679
|
+
resultLines.push(lines[i$3]);
|
|
13680
|
+
}
|
|
13681
|
+
for (let i$3 = 0; i$3 < hunkResult.patchedLines.length; i$3++) {
|
|
13682
|
+
const line = hunkResult.patchedLines[i$3];
|
|
13683
|
+
resultLines.push(line);
|
|
13684
|
+
}
|
|
13685
|
+
minLine = hunkResult.oldLineLastI + 1;
|
|
13686
|
+
prevHunkOffset = toPos + 1 - hunk.oldStart;
|
|
13687
|
+
}
|
|
13688
|
+
for (let i$2 = minLine; i$2 < lines.length; i$2++) {
|
|
13689
|
+
resultLines.push(lines[i$2]);
|
|
13690
|
+
}
|
|
13691
|
+
return resultLines.join("\n");
|
|
13692
|
+
}
|
|
13693
|
+
/**
|
|
13694
|
+
* applies one or more patches.
|
|
13695
|
+
*
|
|
13696
|
+
* `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files).
|
|
13697
|
+
*
|
|
13698
|
+
* This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
|
|
13699
|
+
*
|
|
13700
|
+
* - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
|
|
13701
|
+
* - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
|
|
13702
|
+
*
|
|
13703
|
+
* Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
|
|
13704
|
+
*/
|
|
13705
|
+
function applyPatches(uniDiff, options) {
|
|
13706
|
+
const spDiff = typeof uniDiff === "string" ? parsePatch(uniDiff) : uniDiff;
|
|
13707
|
+
let currentIndex = 0;
|
|
13708
|
+
function processIndex() {
|
|
13709
|
+
const index = spDiff[currentIndex++];
|
|
13710
|
+
if (!index) {
|
|
13711
|
+
return options.complete();
|
|
13712
|
+
}
|
|
13713
|
+
options.loadFile(index, function(err, data) {
|
|
13714
|
+
if (err) {
|
|
13715
|
+
return options.complete(err);
|
|
13716
|
+
}
|
|
13717
|
+
const updatedContent = applyPatch(data, index, options);
|
|
13718
|
+
options.patched(index, updatedContent, function(err$1) {
|
|
13719
|
+
if (err$1) {
|
|
13720
|
+
return options.complete(err$1);
|
|
13721
|
+
}
|
|
13722
|
+
processIndex();
|
|
13723
|
+
});
|
|
13724
|
+
});
|
|
13725
|
+
}
|
|
13726
|
+
processIndex();
|
|
13727
|
+
}
|
|
13728
|
+
|
|
13729
|
+
//#endregion
|
|
13730
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/reverse.js
|
|
13731
|
+
function reversePatch(structuredPatch$1) {
|
|
13732
|
+
if (Array.isArray(structuredPatch$1)) {
|
|
13733
|
+
return structuredPatch$1.map((patch) => reversePatch(patch)).reverse();
|
|
13734
|
+
}
|
|
13735
|
+
return Object.assign(Object.assign({}, structuredPatch$1), {
|
|
13736
|
+
oldFileName: structuredPatch$1.newFileName,
|
|
13737
|
+
oldHeader: structuredPatch$1.newHeader,
|
|
13738
|
+
newFileName: structuredPatch$1.oldFileName,
|
|
13739
|
+
newHeader: structuredPatch$1.oldHeader,
|
|
13740
|
+
hunks: structuredPatch$1.hunks.map((hunk) => {
|
|
13741
|
+
return {
|
|
13742
|
+
oldLines: hunk.newLines,
|
|
13743
|
+
oldStart: hunk.newStart,
|
|
13744
|
+
newLines: hunk.oldLines,
|
|
13745
|
+
newStart: hunk.oldStart,
|
|
13746
|
+
lines: hunk.lines.map((l$2) => {
|
|
13747
|
+
if (l$2.startsWith("-")) {
|
|
13748
|
+
return `+${l$2.slice(1)}`;
|
|
13749
|
+
}
|
|
13750
|
+
if (l$2.startsWith("+")) {
|
|
13751
|
+
return `-${l$2.slice(1)}`;
|
|
13752
|
+
}
|
|
13753
|
+
return l$2;
|
|
13754
|
+
})
|
|
13755
|
+
};
|
|
13756
|
+
})
|
|
13757
|
+
});
|
|
13758
|
+
}
|
|
13759
|
+
|
|
13760
|
+
//#endregion
|
|
13761
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/patch/create.js
|
|
13762
|
+
const INCLUDE_HEADERS = {
|
|
13763
|
+
includeIndex: true,
|
|
13764
|
+
includeUnderline: true,
|
|
13765
|
+
includeFileHeaders: true
|
|
13766
|
+
};
|
|
13767
|
+
const FILE_HEADERS_ONLY = {
|
|
13768
|
+
includeIndex: false,
|
|
13769
|
+
includeUnderline: false,
|
|
13770
|
+
includeFileHeaders: true
|
|
13771
|
+
};
|
|
13772
|
+
const OMIT_HEADERS = {
|
|
13773
|
+
includeIndex: false,
|
|
13774
|
+
includeUnderline: false,
|
|
13775
|
+
includeFileHeaders: false
|
|
13776
|
+
};
|
|
13777
|
+
function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13778
|
+
let optionsObj;
|
|
13779
|
+
if (!options) {
|
|
13780
|
+
optionsObj = {};
|
|
13781
|
+
} else if (typeof options === "function") {
|
|
13782
|
+
optionsObj = { callback: options };
|
|
13783
|
+
} else {
|
|
13784
|
+
optionsObj = options;
|
|
13785
|
+
}
|
|
13786
|
+
if (typeof optionsObj.context === "undefined") {
|
|
13787
|
+
optionsObj.context = 4;
|
|
13788
|
+
}
|
|
13789
|
+
const context = optionsObj.context;
|
|
13790
|
+
if (optionsObj.newlineIsToken) {
|
|
13791
|
+
throw new Error("newlineIsToken may not be used with patch-generation functions, only with diffing functions");
|
|
13792
|
+
}
|
|
13793
|
+
if (!optionsObj.callback) {
|
|
13794
|
+
return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj));
|
|
13795
|
+
} else {
|
|
13796
|
+
const { callback } = optionsObj;
|
|
13797
|
+
diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => {
|
|
13798
|
+
const patch = diffLinesResultToPatch(diff);
|
|
13799
|
+
callback(patch);
|
|
13800
|
+
} }));
|
|
13801
|
+
}
|
|
13802
|
+
function diffLinesResultToPatch(diff) {
|
|
13803
|
+
if (!diff) {
|
|
13804
|
+
return;
|
|
13805
|
+
}
|
|
13806
|
+
diff.push({
|
|
13807
|
+
value: "",
|
|
13808
|
+
lines: []
|
|
13809
|
+
});
|
|
13810
|
+
function contextLines(lines) {
|
|
13811
|
+
return lines.map(function(entry) {
|
|
13812
|
+
return " " + entry;
|
|
13813
|
+
});
|
|
13814
|
+
}
|
|
13815
|
+
const hunks = [];
|
|
13816
|
+
let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1;
|
|
13817
|
+
for (let i$2 = 0; i$2 < diff.length; i$2++) {
|
|
13818
|
+
const current = diff[i$2], lines = current.lines || splitLines(current.value);
|
|
13819
|
+
current.lines = lines;
|
|
13820
|
+
if (current.added || current.removed) {
|
|
13821
|
+
if (!oldRangeStart) {
|
|
13822
|
+
const prev = diff[i$2 - 1];
|
|
13823
|
+
oldRangeStart = oldLine;
|
|
13824
|
+
newRangeStart = newLine;
|
|
13825
|
+
if (prev) {
|
|
13826
|
+
curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : [];
|
|
13827
|
+
oldRangeStart -= curRange.length;
|
|
13828
|
+
newRangeStart -= curRange.length;
|
|
13829
|
+
}
|
|
13830
|
+
}
|
|
13831
|
+
for (const line of lines) {
|
|
13832
|
+
curRange.push((current.added ? "+" : "-") + line);
|
|
13833
|
+
}
|
|
13834
|
+
if (current.added) {
|
|
13835
|
+
newLine += lines.length;
|
|
13836
|
+
} else {
|
|
13837
|
+
oldLine += lines.length;
|
|
13838
|
+
}
|
|
13839
|
+
} else {
|
|
13840
|
+
if (oldRangeStart) {
|
|
13841
|
+
if (lines.length <= context * 2 && i$2 < diff.length - 2) {
|
|
13842
|
+
for (const line of contextLines(lines)) {
|
|
13843
|
+
curRange.push(line);
|
|
13844
|
+
}
|
|
13845
|
+
} else {
|
|
13846
|
+
const contextSize = Math.min(lines.length, context);
|
|
13847
|
+
for (const line of contextLines(lines.slice(0, contextSize))) {
|
|
13848
|
+
curRange.push(line);
|
|
13849
|
+
}
|
|
13850
|
+
const hunk = {
|
|
13851
|
+
oldStart: oldRangeStart,
|
|
13852
|
+
oldLines: oldLine - oldRangeStart + contextSize,
|
|
13853
|
+
newStart: newRangeStart,
|
|
13854
|
+
newLines: newLine - newRangeStart + contextSize,
|
|
13855
|
+
lines: curRange
|
|
13856
|
+
};
|
|
13857
|
+
hunks.push(hunk);
|
|
13858
|
+
oldRangeStart = 0;
|
|
13859
|
+
newRangeStart = 0;
|
|
13860
|
+
curRange = [];
|
|
13861
|
+
}
|
|
13862
|
+
}
|
|
13863
|
+
oldLine += lines.length;
|
|
13864
|
+
newLine += lines.length;
|
|
13865
|
+
}
|
|
13866
|
+
}
|
|
13867
|
+
for (const hunk of hunks) {
|
|
13868
|
+
for (let i$2 = 0; i$2 < hunk.lines.length; i$2++) {
|
|
13869
|
+
if (hunk.lines[i$2].endsWith("\n")) {
|
|
13870
|
+
hunk.lines[i$2] = hunk.lines[i$2].slice(0, -1);
|
|
13871
|
+
} else {
|
|
13872
|
+
hunk.lines.splice(i$2 + 1, 0, "\");
|
|
13873
|
+
i$2++;
|
|
13874
|
+
}
|
|
13875
|
+
}
|
|
13876
|
+
}
|
|
13877
|
+
return {
|
|
13878
|
+
oldFileName,
|
|
13879
|
+
newFileName,
|
|
13880
|
+
oldHeader,
|
|
13881
|
+
newHeader,
|
|
13882
|
+
hunks
|
|
13883
|
+
};
|
|
13884
|
+
}
|
|
13885
|
+
}
|
|
13886
|
+
/**
|
|
13887
|
+
* creates a unified diff patch.
|
|
13888
|
+
* @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`)
|
|
13889
|
+
*/
|
|
13890
|
+
function formatPatch(patch, headerOptions) {
|
|
13891
|
+
if (!headerOptions) {
|
|
13892
|
+
headerOptions = INCLUDE_HEADERS;
|
|
13893
|
+
}
|
|
13894
|
+
if (Array.isArray(patch)) {
|
|
13895
|
+
if (patch.length > 1 && !headerOptions.includeFileHeaders) {
|
|
13896
|
+
throw new Error("Cannot omit file headers on a multi-file patch. " + "(The result would be unparseable; how would a tool trying to apply " + "the patch know which changes are to which file?)");
|
|
13897
|
+
}
|
|
13898
|
+
return patch.map((p$1) => formatPatch(p$1, headerOptions)).join("\n");
|
|
13899
|
+
}
|
|
13900
|
+
const ret = [];
|
|
13901
|
+
if (headerOptions.includeIndex && patch.oldFileName == patch.newFileName) {
|
|
13902
|
+
ret.push("Index: " + patch.oldFileName);
|
|
13903
|
+
}
|
|
13904
|
+
if (headerOptions.includeUnderline) {
|
|
13905
|
+
ret.push("===================================================================");
|
|
13906
|
+
}
|
|
13907
|
+
if (headerOptions.includeFileHeaders) {
|
|
13908
|
+
ret.push("--- " + patch.oldFileName + (typeof patch.oldHeader === "undefined" ? "" : " " + patch.oldHeader));
|
|
13909
|
+
ret.push("+++ " + patch.newFileName + (typeof patch.newHeader === "undefined" ? "" : " " + patch.newHeader));
|
|
13910
|
+
}
|
|
13911
|
+
for (let i$2 = 0; i$2 < patch.hunks.length; i$2++) {
|
|
13912
|
+
const hunk = patch.hunks[i$2];
|
|
13913
|
+
if (hunk.oldLines === 0) {
|
|
13914
|
+
hunk.oldStart -= 1;
|
|
13915
|
+
}
|
|
13916
|
+
if (hunk.newLines === 0) {
|
|
13917
|
+
hunk.newStart -= 1;
|
|
13918
|
+
}
|
|
13919
|
+
ret.push("@@ -" + hunk.oldStart + "," + hunk.oldLines + " +" + hunk.newStart + "," + hunk.newLines + " @@");
|
|
13920
|
+
for (const line of hunk.lines) {
|
|
13921
|
+
ret.push(line);
|
|
13922
|
+
}
|
|
13923
|
+
}
|
|
13924
|
+
return ret.join("\n") + "\n";
|
|
13925
|
+
}
|
|
13926
|
+
function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13927
|
+
if (typeof options === "function") {
|
|
13928
|
+
options = { callback: options };
|
|
13929
|
+
}
|
|
13930
|
+
if (!(options === null || options === void 0 ? void 0 : options.callback)) {
|
|
13931
|
+
const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
|
|
13932
|
+
if (!patchObj) {
|
|
13933
|
+
return;
|
|
13934
|
+
}
|
|
13935
|
+
return formatPatch(patchObj, options === null || options === void 0 ? void 0 : options.headerOptions);
|
|
13936
|
+
} else {
|
|
13937
|
+
const { callback } = options;
|
|
13938
|
+
structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: (patchObj) => {
|
|
13939
|
+
if (!patchObj) {
|
|
13940
|
+
callback(undefined);
|
|
13941
|
+
} else {
|
|
13942
|
+
callback(formatPatch(patchObj, options.headerOptions));
|
|
13943
|
+
}
|
|
13944
|
+
} }));
|
|
13945
|
+
}
|
|
13946
|
+
}
|
|
13947
|
+
function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
|
|
13948
|
+
return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
|
|
13949
|
+
}
|
|
13950
|
+
/**
|
|
13951
|
+
* Split `text` into an array of lines, including the trailing newline character (where present)
|
|
13952
|
+
*/
|
|
13953
|
+
function splitLines(text$1) {
|
|
13954
|
+
const hasTrailingNl = text$1.endsWith("\n");
|
|
13955
|
+
const result = text$1.split("\n").map((line) => line + "\n");
|
|
13956
|
+
if (hasTrailingNl) {
|
|
13957
|
+
result.pop();
|
|
13958
|
+
} else {
|
|
13959
|
+
result.push(result.pop().slice(0, -1));
|
|
13960
|
+
}
|
|
13961
|
+
return result;
|
|
13962
|
+
}
|
|
13963
|
+
|
|
13964
|
+
//#endregion
|
|
13965
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/convert/dmp.js
|
|
13966
|
+
/**
|
|
13967
|
+
* converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library
|
|
13968
|
+
*/
|
|
13969
|
+
function convertChangesToDMP(changes) {
|
|
13970
|
+
const ret = [];
|
|
13971
|
+
let change, operation;
|
|
13972
|
+
for (let i$2 = 0; i$2 < changes.length; i$2++) {
|
|
13973
|
+
change = changes[i$2];
|
|
13974
|
+
if (change.added) {
|
|
13975
|
+
operation = 1;
|
|
13976
|
+
} else if (change.removed) {
|
|
13977
|
+
operation = -1;
|
|
13978
|
+
} else {
|
|
13979
|
+
operation = 0;
|
|
13980
|
+
}
|
|
13981
|
+
ret.push([operation, change.value]);
|
|
13982
|
+
}
|
|
13983
|
+
return ret;
|
|
13984
|
+
}
|
|
13985
|
+
|
|
13986
|
+
//#endregion
|
|
13987
|
+
//#region ../../node_modules/.bun/diff@8.0.3/node_modules/diff/libesm/convert/xml.js
|
|
13988
|
+
/**
|
|
13989
|
+
* converts a list of change objects to a serialized XML format
|
|
13990
|
+
*/
|
|
13991
|
+
function convertChangesToXML(changes) {
|
|
13992
|
+
const ret = [];
|
|
13993
|
+
for (let i$2 = 0; i$2 < changes.length; i$2++) {
|
|
13994
|
+
const change = changes[i$2];
|
|
13995
|
+
if (change.added) {
|
|
13996
|
+
ret.push("<ins>");
|
|
13997
|
+
} else if (change.removed) {
|
|
13998
|
+
ret.push("<del>");
|
|
13999
|
+
}
|
|
14000
|
+
ret.push(escapeHTML(change.value));
|
|
14001
|
+
if (change.added) {
|
|
14002
|
+
ret.push("</ins>");
|
|
14003
|
+
} else if (change.removed) {
|
|
14004
|
+
ret.push("</del>");
|
|
14005
|
+
}
|
|
14006
|
+
}
|
|
14007
|
+
return ret.join("");
|
|
14008
|
+
}
|
|
14009
|
+
function escapeHTML(s$1) {
|
|
14010
|
+
let n = s$1;
|
|
14011
|
+
n = n.replace(/&/g, "&");
|
|
14012
|
+
n = n.replace(/</g, "<");
|
|
14013
|
+
n = n.replace(/>/g, ">");
|
|
14014
|
+
n = n.replace(/"/g, """);
|
|
14015
|
+
return n;
|
|
14016
|
+
}
|
|
14017
|
+
|
|
14018
|
+
//#endregion
|
|
14019
|
+
//#region src/utils/cleanLastNewline.ts
|
|
14020
|
+
function cleanLastNewline(contents) {
|
|
14021
|
+
return contents.replace(/\n$|\r\n$/, "");
|
|
14022
|
+
}
|
|
14023
|
+
|
|
14024
|
+
//#endregion
|
|
14025
|
+
//#region ../../node_modules/.bun/@shikijs+transformers@4.0.2/node_modules/@shikijs/transformers/dist/index.mjs
|
|
14026
|
+
/**
|
|
14027
|
+
* some comment formats have to be located at the end of line
|
|
14028
|
+
* hence we can skip matching them for other tokens
|
|
14029
|
+
*/
|
|
14030
|
+
const matchers = [
|
|
14031
|
+
[/^(<!--)(.+)(-->)$/, false],
|
|
14032
|
+
[/^(\/\*)(.+)(\*\/)$/, false],
|
|
14033
|
+
[/^(\/\/|["'#]|;{1,2}|%{1,2}|--)(.*)$/, true],
|
|
14034
|
+
[/^(\*)(.+)$/, true]
|
|
14035
|
+
];
|
|
14036
|
+
/**
|
|
14037
|
+
* @param lines line tokens
|
|
14038
|
+
* @param jsx enable JSX parsing
|
|
14039
|
+
* @param matchAlgorithm matching algorithm
|
|
14040
|
+
*/
|
|
14041
|
+
function parseComments(lines, jsx, matchAlgorithm) {
|
|
14042
|
+
const out = [];
|
|
14043
|
+
for (const line of lines) {
|
|
14044
|
+
if (matchAlgorithm === "v3") {
|
|
14045
|
+
const splittedElements = line.children.flatMap((element$1, idx) => {
|
|
14046
|
+
if (element$1.type !== "element") return element$1;
|
|
14047
|
+
const token$1 = element$1.children[0];
|
|
14048
|
+
if (token$1.type !== "text") return element$1;
|
|
14049
|
+
const isLast = idx === line.children.length - 1;
|
|
14050
|
+
if (!matchToken(token$1.value, isLast)) return element$1;
|
|
14051
|
+
const rawSplits = token$1.value.split(/(\s+\/\/)/);
|
|
14052
|
+
if (rawSplits.length <= 1) return element$1;
|
|
14053
|
+
let splits = [rawSplits[0]];
|
|
14054
|
+
for (let i$2 = 1; i$2 < rawSplits.length; i$2 += 2) splits.push(rawSplits[i$2] + (rawSplits[i$2 + 1] || ""));
|
|
14055
|
+
splits = splits.filter(Boolean);
|
|
14056
|
+
if (splits.length <= 1) return element$1;
|
|
14057
|
+
return splits.map((split) => {
|
|
14058
|
+
return {
|
|
14059
|
+
...element$1,
|
|
14060
|
+
children: [{
|
|
14061
|
+
type: "text",
|
|
14062
|
+
value: split
|
|
14063
|
+
}]
|
|
14064
|
+
};
|
|
14065
|
+
});
|
|
14066
|
+
});
|
|
14067
|
+
if (splittedElements.length !== line.children.length) line.children = splittedElements;
|
|
14068
|
+
}
|
|
14069
|
+
const elements = line.children;
|
|
14070
|
+
let start = elements.length - 1;
|
|
14071
|
+
if (matchAlgorithm === "v1") start = 0;
|
|
14072
|
+
else if (jsx) start = elements.length - 2;
|
|
14073
|
+
for (let i$2 = Math.max(start, 0); i$2 < elements.length; i$2++) {
|
|
14074
|
+
const token$1 = elements[i$2];
|
|
14075
|
+
if (token$1.type !== "element") continue;
|
|
14076
|
+
const head$1 = token$1.children.at(0);
|
|
14077
|
+
if (head$1?.type !== "text") continue;
|
|
14078
|
+
const isLast = i$2 === elements.length - 1;
|
|
14079
|
+
let match = matchToken(head$1.value, isLast);
|
|
14080
|
+
let additionalTokens;
|
|
14081
|
+
if (!match && i$2 > 0 && head$1.value.trim().startsWith("[!code")) {
|
|
14082
|
+
const prevToken = elements[i$2 - 1];
|
|
14083
|
+
if (prevToken?.type === "element") {
|
|
14084
|
+
const prevHead = prevToken.children.at(0);
|
|
14085
|
+
if (prevHead?.type === "text" && prevHead.value.includes("//")) {
|
|
14086
|
+
const combinedMatch = matchToken(prevHead.value + head$1.value, isLast);
|
|
14087
|
+
if (combinedMatch) {
|
|
14088
|
+
match = combinedMatch;
|
|
14089
|
+
out.push({
|
|
14090
|
+
info: combinedMatch,
|
|
14091
|
+
line,
|
|
14092
|
+
token: prevToken,
|
|
14093
|
+
isLineCommentOnly: elements.length === 2 && prevToken.children.length === 1 && token$1.children.length === 1,
|
|
14094
|
+
isJsxStyle: false,
|
|
14095
|
+
additionalTokens: [token$1]
|
|
14096
|
+
});
|
|
14097
|
+
continue;
|
|
14098
|
+
}
|
|
14099
|
+
}
|
|
14100
|
+
}
|
|
14101
|
+
}
|
|
14102
|
+
if (!match) continue;
|
|
14103
|
+
if (jsx && !isLast && i$2 !== 0) {
|
|
14104
|
+
const isJsxStyle = isValue(elements[i$2 - 1], "{") && isValue(elements[i$2 + 1], "}");
|
|
14105
|
+
out.push({
|
|
14106
|
+
info: match,
|
|
14107
|
+
line,
|
|
14108
|
+
token: token$1,
|
|
14109
|
+
isLineCommentOnly: elements.length === 3 && token$1.children.length === 1,
|
|
14110
|
+
isJsxStyle,
|
|
14111
|
+
additionalTokens
|
|
14112
|
+
});
|
|
14113
|
+
} else out.push({
|
|
14114
|
+
info: match,
|
|
14115
|
+
line,
|
|
14116
|
+
token: token$1,
|
|
14117
|
+
isLineCommentOnly: elements.length === 1 && token$1.children.length === 1,
|
|
14118
|
+
isJsxStyle: false,
|
|
14119
|
+
additionalTokens
|
|
14120
|
+
});
|
|
14121
|
+
}
|
|
14122
|
+
}
|
|
14123
|
+
return out;
|
|
14124
|
+
}
|
|
14125
|
+
function isValue(element$1, value) {
|
|
14126
|
+
if (element$1.type !== "element") return false;
|
|
14127
|
+
const text$1 = element$1.children[0];
|
|
14128
|
+
if (text$1.type !== "text") return false;
|
|
14129
|
+
return text$1.value.trim() === value;
|
|
14130
|
+
}
|
|
14131
|
+
/**
|
|
14132
|
+
* @param text text value of comment node
|
|
14133
|
+
* @param isLast whether the token is located at the end of line
|
|
14134
|
+
*/
|
|
14135
|
+
function matchToken(text$1, isLast) {
|
|
14136
|
+
let trimmed = text$1.trimStart();
|
|
14137
|
+
const spaceFront = text$1.length - trimmed.length;
|
|
14138
|
+
trimmed = trimmed.trimEnd();
|
|
14139
|
+
const spaceEnd = text$1.length - trimmed.length - spaceFront;
|
|
14140
|
+
for (const [matcher, endOfLine] of matchers) {
|
|
14141
|
+
if (endOfLine && !isLast) continue;
|
|
14142
|
+
const result = matcher.exec(trimmed);
|
|
14143
|
+
if (!result) continue;
|
|
14144
|
+
return [
|
|
14145
|
+
" ".repeat(spaceFront) + result[1],
|
|
14146
|
+
result[2],
|
|
14147
|
+
result[3] ? result[3] + " ".repeat(spaceEnd) : void 0
|
|
14148
|
+
];
|
|
14149
|
+
}
|
|
14150
|
+
}
|
|
14151
|
+
/**
|
|
14152
|
+
* Remove empty comment prefixes at line end, e.g. `// `
|
|
14153
|
+
*
|
|
14154
|
+
* For matchAlgorithm v1
|
|
14155
|
+
*/
|
|
14156
|
+
function v1ClearEndCommentPrefix(text$1) {
|
|
14157
|
+
const match = text$1.match(/(?:\/\/|["'#]|;{1,2}|%{1,2}|--)(\s*)$/);
|
|
14158
|
+
if (match && match[1].trim().length === 0) return text$1.slice(0, match.index);
|
|
14159
|
+
return text$1;
|
|
14160
|
+
}
|
|
14161
|
+
function createCommentNotationTransformer(name, regex, onMatch, matchAlgorithm) {
|
|
14162
|
+
if (matchAlgorithm == null) matchAlgorithm = "v3";
|
|
14163
|
+
return {
|
|
14164
|
+
name,
|
|
14165
|
+
code(code) {
|
|
14166
|
+
const lines = code.children.filter((i$2) => i$2.type === "element");
|
|
14167
|
+
const linesToRemove = [];
|
|
14168
|
+
code.data ??= {};
|
|
14169
|
+
const data = code.data;
|
|
14170
|
+
data._shiki_notation ??= parseComments(lines, ["jsx", "tsx"].includes(this.options.lang), matchAlgorithm);
|
|
14171
|
+
const parsed = data._shiki_notation;
|
|
14172
|
+
for (const comment$1 of parsed) {
|
|
14173
|
+
if (comment$1.info[1].length === 0) continue;
|
|
14174
|
+
let lineIdx = lines.indexOf(comment$1.line);
|
|
14175
|
+
if (comment$1.isLineCommentOnly && matchAlgorithm !== "v1") lineIdx++;
|
|
14176
|
+
let replaced = false;
|
|
14177
|
+
comment$1.info[1] = comment$1.info[1].replace(regex, (...match) => {
|
|
14178
|
+
if (onMatch.call(this, match, comment$1.line, comment$1.token, lines, lineIdx)) {
|
|
14179
|
+
replaced = true;
|
|
14180
|
+
return "";
|
|
14181
|
+
}
|
|
14182
|
+
return match[0];
|
|
14183
|
+
});
|
|
14184
|
+
if (!replaced) continue;
|
|
14185
|
+
if (matchAlgorithm === "v1") comment$1.info[1] = v1ClearEndCommentPrefix(comment$1.info[1]);
|
|
14186
|
+
const isEmpty = comment$1.info[1].trim().length === 0;
|
|
14187
|
+
if (isEmpty) comment$1.info[1] = "";
|
|
14188
|
+
if (isEmpty && comment$1.isLineCommentOnly) linesToRemove.push(comment$1.line);
|
|
14189
|
+
else if (isEmpty && comment$1.isJsxStyle) comment$1.line.children.splice(comment$1.line.children.indexOf(comment$1.token) - 1, 3);
|
|
14190
|
+
else if (isEmpty) {
|
|
14191
|
+
if (comment$1.additionalTokens) for (let j$1 = comment$1.additionalTokens.length - 1; j$1 >= 0; j$1--) {
|
|
14192
|
+
const additionalToken = comment$1.additionalTokens[j$1];
|
|
14193
|
+
const tokenIndex = comment$1.line.children.indexOf(additionalToken);
|
|
14194
|
+
if (tokenIndex !== -1) comment$1.line.children.splice(tokenIndex, 1);
|
|
14195
|
+
}
|
|
14196
|
+
comment$1.line.children.splice(comment$1.line.children.indexOf(comment$1.token), 1);
|
|
14197
|
+
} else {
|
|
14198
|
+
const head$1 = comment$1.token.children[0];
|
|
14199
|
+
if (head$1.type === "text") {
|
|
14200
|
+
head$1.value = comment$1.info.join("");
|
|
14201
|
+
if (comment$1.additionalTokens) for (const additionalToken of comment$1.additionalTokens) {
|
|
14202
|
+
const additionalHead = additionalToken.children[0];
|
|
14203
|
+
if (additionalHead?.type === "text") additionalHead.value = "";
|
|
14204
|
+
}
|
|
14205
|
+
}
|
|
14206
|
+
}
|
|
14207
|
+
}
|
|
14208
|
+
for (const line of linesToRemove) {
|
|
14209
|
+
const index = code.children.indexOf(line);
|
|
14210
|
+
const nextLine = code.children[index + 1];
|
|
14211
|
+
let removeLength = 1;
|
|
14212
|
+
if (nextLine?.type === "text" && nextLine?.value === "\n") removeLength = 2;
|
|
14213
|
+
code.children.splice(index, removeLength);
|
|
14214
|
+
}
|
|
14215
|
+
}
|
|
14216
|
+
};
|
|
14217
|
+
}
|
|
14218
|
+
/**
|
|
14219
|
+
* Transformer for `shiki`'s legacy `lineOptions`
|
|
14220
|
+
*/
|
|
14221
|
+
function transformerCompactLineOptions(lineOptions = []) {
|
|
14222
|
+
return {
|
|
14223
|
+
name: "@shikijs/transformers:compact-line-options",
|
|
14224
|
+
line(node, line) {
|
|
14225
|
+
const lineOption = lineOptions.find((o$2) => o$2.line === line);
|
|
14226
|
+
if (lineOption?.classes) this.addClassToHast(node, lineOption.classes);
|
|
14227
|
+
return node;
|
|
14228
|
+
}
|
|
14229
|
+
};
|
|
14230
|
+
}
|
|
14231
|
+
function parseMetaHighlightString(meta) {
|
|
14232
|
+
if (!meta) return null;
|
|
14233
|
+
const match = meta.match(/\{([\d,-]+)\}/);
|
|
14234
|
+
if (!match) return null;
|
|
14235
|
+
return match[1].split(",").flatMap((v$1) => {
|
|
14236
|
+
const range$1 = v$1.split("-").map((n) => Number.parseInt(n, 10));
|
|
14237
|
+
return range$1.length === 1 ? [range$1[0]] : Array.from({ length: range$1[1] - range$1[0] + 1 }, (_$2, i$2) => range$1[0] + i$2);
|
|
14238
|
+
});
|
|
14239
|
+
}
|
|
14240
|
+
const symbol = Symbol("highlighted-lines");
|
|
14241
|
+
/**
|
|
14242
|
+
* Allow using `{1,3-5}` in the code snippet meta to mark highlighted lines.
|
|
14243
|
+
*/
|
|
14244
|
+
function transformerMetaHighlight(options = {}) {
|
|
14245
|
+
const { className = "highlighted", zeroIndexed = false } = options;
|
|
14246
|
+
return {
|
|
14247
|
+
name: "@shikijs/transformers:meta-highlight",
|
|
14248
|
+
line(node, lineNumber) {
|
|
14249
|
+
if (!this.options.meta?.__raw) return;
|
|
14250
|
+
const meta = this.meta;
|
|
14251
|
+
meta[symbol] ??= parseMetaHighlightString(this.options.meta.__raw);
|
|
14252
|
+
const highlightedLines = meta[symbol] ?? [];
|
|
14253
|
+
const effectiveLine = zeroIndexed ? lineNumber - 1 : lineNumber;
|
|
14254
|
+
if (highlightedLines.includes(effectiveLine)) this.addClassToHast(node, className);
|
|
14255
|
+
return node;
|
|
14256
|
+
}
|
|
14257
|
+
};
|
|
14258
|
+
}
|
|
14259
|
+
function parseMetaHighlightWords(meta) {
|
|
14260
|
+
if (!meta) return [];
|
|
14261
|
+
return Array.from(meta.matchAll(/\/((?:\\.|[^/])+)\//g)).map((v$1) => v$1[1].replace(/\\(.)/g, "$1"));
|
|
14262
|
+
}
|
|
14263
|
+
/**
|
|
14264
|
+
* Allow using `/word/` in the code snippet meta to mark highlighted words.
|
|
14265
|
+
*/
|
|
14266
|
+
function transformerMetaWordHighlight(options = {}) {
|
|
14267
|
+
const { className = "highlighted-word" } = options;
|
|
14268
|
+
return {
|
|
14269
|
+
name: "@shikijs/transformers:meta-word-highlight",
|
|
14270
|
+
preprocess(code, options$1) {
|
|
14271
|
+
if (!this.options.meta?.__raw) return;
|
|
14272
|
+
const words = parseMetaHighlightWords(this.options.meta.__raw);
|
|
14273
|
+
options$1.decorations ||= [];
|
|
14274
|
+
for (const word of words) {
|
|
14275
|
+
const indexes = findAllSubstringIndexes(code, word);
|
|
14276
|
+
for (const index of indexes) options$1.decorations.push({
|
|
14277
|
+
start: index,
|
|
14278
|
+
end: index + word.length,
|
|
14279
|
+
properties: { class: className }
|
|
14280
|
+
});
|
|
14281
|
+
}
|
|
14282
|
+
}
|
|
14283
|
+
};
|
|
14284
|
+
}
|
|
14285
|
+
function findAllSubstringIndexes(str, substr) {
|
|
14286
|
+
const indexes = [];
|
|
14287
|
+
let cursor = 0;
|
|
14288
|
+
while (true) {
|
|
14289
|
+
const index = str.indexOf(substr, cursor);
|
|
14290
|
+
if (index === -1 || index >= str.length) break;
|
|
14291
|
+
if (index < cursor) break;
|
|
14292
|
+
indexes.push(index);
|
|
14293
|
+
cursor = index + substr.length;
|
|
14294
|
+
}
|
|
14295
|
+
return indexes;
|
|
14296
|
+
}
|
|
14297
|
+
function escapeRegExp(str) {
|
|
14298
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
14299
|
+
}
|
|
14300
|
+
function transformerNotationMap(options = {}, name = "@shikijs/transformers:notation-map") {
|
|
14301
|
+
const { classMap = {}, classActivePre = void 0, classActiveCode = void 0 } = options;
|
|
14302
|
+
return createCommentNotationTransformer(name, new RegExp(`#?\\s*\\[!code (${Object.keys(classMap).map(escapeRegExp).join("|")})(:\\d+)?\\]`, "gi"), function([_$2, match, range$1 = ":1"], _line, _comment, lines, index) {
|
|
14303
|
+
const lineNum = Number.parseInt(range$1.slice(1), 10);
|
|
14304
|
+
for (let i$2 = index; i$2 < Math.min(index + lineNum, lines.length); i$2++) this.addClassToHast(lines[i$2], classMap[match]);
|
|
14305
|
+
if (classActivePre) this.addClassToHast(this.pre, classActivePre);
|
|
14306
|
+
if (classActiveCode) this.addClassToHast(this.code, classActiveCode);
|
|
14307
|
+
return true;
|
|
14308
|
+
}, options.matchAlgorithm);
|
|
14309
|
+
}
|
|
14310
|
+
/**
|
|
14311
|
+
* Use `[!code ++]` and `[!code --]` to mark added and removed lines.
|
|
14312
|
+
*/
|
|
14313
|
+
function transformerNotationDiff(options = {}) {
|
|
14314
|
+
const { classLineAdd = "diff add", classLineRemove = "diff remove", classActivePre = "has-diff", classActiveCode } = options;
|
|
14315
|
+
return transformerNotationMap({
|
|
14316
|
+
classMap: {
|
|
14317
|
+
"++": classLineAdd,
|
|
14318
|
+
"--": classLineRemove
|
|
14319
|
+
},
|
|
14320
|
+
classActivePre,
|
|
14321
|
+
classActiveCode,
|
|
14322
|
+
matchAlgorithm: options.matchAlgorithm
|
|
14323
|
+
}, "@shikijs/transformers:notation-diff");
|
|
14324
|
+
}
|
|
14325
|
+
/**
|
|
14326
|
+
* Allow using `[!code error]` `[!code warning]` notation in code to mark highlighted lines.
|
|
14327
|
+
*/
|
|
14328
|
+
function transformerNotationErrorLevel(options = {}) {
|
|
14329
|
+
const { classMap = {
|
|
14330
|
+
error: ["highlighted", "error"],
|
|
14331
|
+
warning: ["highlighted", "warning"],
|
|
14332
|
+
info: ["highlighted", "info"]
|
|
14333
|
+
}, classActivePre = "has-highlighted", classActiveCode } = options;
|
|
14334
|
+
return transformerNotationMap({
|
|
14335
|
+
classMap,
|
|
14336
|
+
classActivePre,
|
|
14337
|
+
classActiveCode,
|
|
14338
|
+
matchAlgorithm: options.matchAlgorithm
|
|
14339
|
+
}, "@shikijs/transformers:notation-error-level");
|
|
14340
|
+
}
|
|
14341
|
+
/**
|
|
14342
|
+
* Allow using `[!code focus]` notation in code to mark focused lines.
|
|
14343
|
+
*/
|
|
14344
|
+
function transformerNotationFocus(options = {}) {
|
|
14345
|
+
const { classActiveLine = "focused", classActivePre = "has-focused", classActiveCode } = options;
|
|
14346
|
+
return transformerNotationMap({
|
|
14347
|
+
classMap: { focus: classActiveLine },
|
|
14348
|
+
classActivePre,
|
|
14349
|
+
classActiveCode,
|
|
14350
|
+
matchAlgorithm: options.matchAlgorithm
|
|
14351
|
+
}, "@shikijs/transformers:notation-focus");
|
|
14352
|
+
}
|
|
14353
|
+
/**
|
|
14354
|
+
* Allow using `[!code highlight]` notation in code to mark highlighted lines.
|
|
14355
|
+
*/
|
|
14356
|
+
function transformerNotationHighlight(options = {}) {
|
|
14357
|
+
const { classActiveLine = "highlighted", classActivePre = "has-highlighted", classActiveCode } = options;
|
|
14358
|
+
return transformerNotationMap({
|
|
14359
|
+
classMap: {
|
|
14360
|
+
highlight: classActiveLine,
|
|
14361
|
+
hl: classActiveLine
|
|
14362
|
+
},
|
|
14363
|
+
classActivePre,
|
|
14364
|
+
classActiveCode,
|
|
14365
|
+
matchAlgorithm: options.matchAlgorithm
|
|
14366
|
+
}, "@shikijs/transformers:notation-highlight");
|
|
14367
|
+
}
|
|
14368
|
+
function highlightWordInLine(line, ignoredElement, word, className) {
|
|
14369
|
+
const content = getTextContent(line);
|
|
14370
|
+
let index = content.indexOf(word);
|
|
14371
|
+
while (index !== -1) {
|
|
14372
|
+
highlightRange.call(this, line.children, ignoredElement, index, word.length, className);
|
|
14373
|
+
index = content.indexOf(word, index + 1);
|
|
14374
|
+
}
|
|
14375
|
+
}
|
|
14376
|
+
function getTextContent(element$1) {
|
|
14377
|
+
if (element$1.type === "text") return element$1.value;
|
|
14378
|
+
if (element$1.type === "element" && element$1.tagName === "span") return element$1.children.map(getTextContent).join("");
|
|
14379
|
+
return "";
|
|
14380
|
+
}
|
|
14381
|
+
/**
|
|
14382
|
+
* @param elements
|
|
14383
|
+
* @param ignoredElement
|
|
14384
|
+
* @param index highlight beginning index
|
|
14385
|
+
* @param len highlight length
|
|
14386
|
+
* @param className class name to add to highlighted nodes
|
|
14387
|
+
*/
|
|
14388
|
+
function highlightRange(elements, ignoredElement, index, len, className) {
|
|
14389
|
+
let currentIdx = 0;
|
|
14390
|
+
for (let i$2 = 0; i$2 < elements.length; i$2++) {
|
|
14391
|
+
const element$1 = elements[i$2];
|
|
14392
|
+
if (element$1.type !== "element" || element$1.tagName !== "span" || element$1 === ignoredElement) continue;
|
|
14393
|
+
const textNode = element$1.children[0];
|
|
14394
|
+
if (textNode.type !== "text") continue;
|
|
14395
|
+
if (hasOverlap([currentIdx, currentIdx + textNode.value.length - 1], [index, index + len])) {
|
|
14396
|
+
const start = Math.max(0, index - currentIdx);
|
|
14397
|
+
const length = len - Math.max(0, currentIdx - index);
|
|
14398
|
+
if (length === 0) continue;
|
|
14399
|
+
const separated = separateToken(element$1, textNode, start, length);
|
|
14400
|
+
this.addClassToHast(separated[1], className);
|
|
14401
|
+
const output = separated.filter(Boolean);
|
|
14402
|
+
elements.splice(i$2, 1, ...output);
|
|
14403
|
+
i$2 += output.length - 1;
|
|
14404
|
+
}
|
|
14405
|
+
currentIdx += textNode.value.length;
|
|
14406
|
+
}
|
|
14407
|
+
}
|
|
14408
|
+
function hasOverlap(range1, range2) {
|
|
14409
|
+
return range1[0] <= range2[1] && range1[1] >= range2[0];
|
|
14410
|
+
}
|
|
14411
|
+
function separateToken(span, textNode, index, len) {
|
|
14412
|
+
const text$1 = textNode.value;
|
|
14413
|
+
const createNode = (value) => inheritElement(span, { children: [{
|
|
14414
|
+
type: "text",
|
|
14415
|
+
value
|
|
14416
|
+
}] });
|
|
14417
|
+
return [
|
|
14418
|
+
index > 0 ? createNode(text$1.slice(0, index)) : void 0,
|
|
14419
|
+
createNode(text$1.slice(index, index + len)),
|
|
14420
|
+
index + len < text$1.length ? createNode(text$1.slice(index + len)) : void 0
|
|
14421
|
+
];
|
|
14422
|
+
}
|
|
14423
|
+
function inheritElement(original, overrides) {
|
|
14424
|
+
return {
|
|
14425
|
+
...original,
|
|
14426
|
+
properties: { ...original.properties },
|
|
14427
|
+
...overrides
|
|
14428
|
+
};
|
|
14429
|
+
}
|
|
14430
|
+
function transformerNotationWordHighlight(options = {}) {
|
|
14431
|
+
const { classActiveWord = "highlighted-word", classActivePre = void 0 } = options;
|
|
14432
|
+
return createCommentNotationTransformer("@shikijs/transformers:notation-highlight-word", /\s*\[!code word:((?:\\.|[^:\]])+)(:\d+)?\]/, function([_$2, word, range$1], _line, comment$1, lines, index) {
|
|
14433
|
+
const lineNum = range$1 ? Number.parseInt(range$1.slice(1), 10) : lines.length;
|
|
14434
|
+
word = word.replace(/\\(.)/g, "$1");
|
|
14435
|
+
for (let i$2 = index; i$2 < Math.min(index + lineNum, lines.length); i$2++) highlightWordInLine.call(this, lines[i$2], comment$1, word, classActiveWord);
|
|
14436
|
+
if (classActivePre) this.addClassToHast(this.pre, classActivePre);
|
|
14437
|
+
return true;
|
|
14438
|
+
}, options.matchAlgorithm);
|
|
14439
|
+
}
|
|
14440
|
+
/**
|
|
14441
|
+
* Remove comments from the code.
|
|
14442
|
+
*/
|
|
14443
|
+
function transformerRemoveComments(options = {}) {
|
|
14444
|
+
const { removeEmptyLines = true } = options;
|
|
14445
|
+
return {
|
|
14446
|
+
name: "@shikijs/transformers:remove-comments",
|
|
14447
|
+
preprocess(_code, options$1) {
|
|
14448
|
+
if (options$1.includeExplanation !== true && options$1.includeExplanation !== "scopeName") throw new Error("`transformerRemoveComments` requires `includeExplanation` to be set to `true` or `'scopeName'`");
|
|
14449
|
+
},
|
|
14450
|
+
tokens(tokens) {
|
|
14451
|
+
const result = [];
|
|
14452
|
+
for (const line of tokens) {
|
|
14453
|
+
const filteredLine = [];
|
|
14454
|
+
let hasComment = false;
|
|
14455
|
+
for (const token$1 of line) if (token$1.explanation?.some((exp) => exp.scopes.some((s$1) => s$1.scopeName.startsWith("comment")))) hasComment = true;
|
|
14456
|
+
else filteredLine.push(token$1);
|
|
14457
|
+
if (removeEmptyLines && hasComment) {
|
|
14458
|
+
if (filteredLine.every((token$1) => !token$1.content.trim())) continue;
|
|
14459
|
+
}
|
|
14460
|
+
result.push(filteredLine);
|
|
14461
|
+
}
|
|
14462
|
+
return result;
|
|
14463
|
+
}
|
|
14464
|
+
};
|
|
14465
|
+
}
|
|
14466
|
+
/**
|
|
14467
|
+
* Remove line breaks between lines.
|
|
14468
|
+
* Useful when you override `display: block` to `.line` in CSS.
|
|
14469
|
+
*/
|
|
14470
|
+
function transformerRemoveLineBreak() {
|
|
14471
|
+
return {
|
|
14472
|
+
name: "@shikijs/transformers:remove-line-break",
|
|
14473
|
+
code(code) {
|
|
14474
|
+
code.children = code.children.filter((line) => !(line.type === "text" && line.value === "\n"));
|
|
14475
|
+
}
|
|
14476
|
+
};
|
|
14477
|
+
}
|
|
14478
|
+
/**
|
|
14479
|
+
* Remove notation escapes.
|
|
14480
|
+
* Useful when you want to write `// [!code` in markdown.
|
|
14481
|
+
* If you process `// [\!code ...]` expression, you can get `// [!code ...]` in the output.
|
|
14482
|
+
*/
|
|
14483
|
+
function transformerRemoveNotationEscape() {
|
|
14484
|
+
return {
|
|
14485
|
+
name: "@shikijs/transformers:remove-notation-escape",
|
|
14486
|
+
code(hast) {
|
|
14487
|
+
function replace(node) {
|
|
14488
|
+
if (node.type === "text") node.value = node.value.replace("[\\!code", "[!code");
|
|
14489
|
+
else if ("children" in node) for (const child of node.children) replace(child);
|
|
14490
|
+
}
|
|
14491
|
+
replace(hast);
|
|
14492
|
+
return hast;
|
|
14493
|
+
}
|
|
14494
|
+
};
|
|
14495
|
+
}
|
|
14496
|
+
/**
|
|
14497
|
+
* Render indentations as separate tokens.
|
|
14498
|
+
* Apply with CSS, it can be used to render indent guides visually.
|
|
14499
|
+
*/
|
|
14500
|
+
function transformerRenderIndentGuides(options = {}) {
|
|
14501
|
+
return {
|
|
14502
|
+
name: "@shikijs/transformers:render-indent-guides",
|
|
14503
|
+
code(hast) {
|
|
14504
|
+
const indent = Number(this.options.meta?.indent ?? this.options.meta?.__raw?.match(/\{indent:(\d+|false)\}/)?.[1] ?? options.indent ?? 2);
|
|
14505
|
+
if (Number.isNaN(indent) || indent <= 0) return hast;
|
|
14506
|
+
const indentRegex = new RegExp(` {${indent}}| {0,${indent - 1}}\t| {1,}$`, "g");
|
|
14507
|
+
const emptyLines = [];
|
|
14508
|
+
let level = 0;
|
|
14509
|
+
for (const line of hast.children) {
|
|
14510
|
+
if (line.type !== "element") continue;
|
|
14511
|
+
const first = line.children[0];
|
|
14512
|
+
if (first?.type !== "element" || first?.children[0]?.type !== "text") {
|
|
14513
|
+
emptyLines.push([line, level]);
|
|
14514
|
+
continue;
|
|
14515
|
+
}
|
|
14516
|
+
const text$1 = first.children[0];
|
|
14517
|
+
const blanks = text$1.value.split(/[^ \t]/, 1)[0];
|
|
14518
|
+
const ranges = [];
|
|
14519
|
+
for (const match of blanks.matchAll(indentRegex)) {
|
|
14520
|
+
const start = match.index;
|
|
14521
|
+
const end = start + match[0].length;
|
|
14522
|
+
ranges.push([start, end]);
|
|
14523
|
+
}
|
|
14524
|
+
for (const [line$1, level$1] of emptyLines) line$1.children.unshift(...Array.from({ length: Math.min(ranges.length, level$1 + 1) }, (_$2, i$2) => ({
|
|
14525
|
+
type: "element",
|
|
14526
|
+
tagName: "span",
|
|
14527
|
+
properties: {
|
|
14528
|
+
class: "indent",
|
|
14529
|
+
style: `--indent-offset: ${i$2 * indent}ch;`
|
|
14530
|
+
},
|
|
14531
|
+
children: []
|
|
14532
|
+
})));
|
|
14533
|
+
emptyLines.length = 0;
|
|
14534
|
+
level = ranges.length;
|
|
14535
|
+
if (ranges.length) {
|
|
14536
|
+
line.children.unshift(...ranges.map(([start, end]) => ({
|
|
14537
|
+
type: "element",
|
|
14538
|
+
tagName: "span",
|
|
14539
|
+
properties: { class: "indent" },
|
|
14540
|
+
children: [{
|
|
14541
|
+
type: "text",
|
|
14542
|
+
value: text$1.value.slice(start, end)
|
|
14543
|
+
}]
|
|
14544
|
+
})));
|
|
14545
|
+
text$1.value = text$1.value.slice(ranges.at(-1)[1]);
|
|
14546
|
+
}
|
|
14547
|
+
}
|
|
14548
|
+
return hast;
|
|
14549
|
+
}
|
|
14550
|
+
};
|
|
14551
|
+
}
|
|
14552
|
+
function isTab(part) {
|
|
14553
|
+
return part === " ";
|
|
14554
|
+
}
|
|
14555
|
+
function isSpace(part) {
|
|
14556
|
+
return part === " " || part === " ";
|
|
14557
|
+
}
|
|
14558
|
+
function separateContinuousSpaces(inputs) {
|
|
14559
|
+
const result = [];
|
|
14560
|
+
let current = "";
|
|
14561
|
+
function bump() {
|
|
14562
|
+
if (current.length) result.push(current);
|
|
14563
|
+
current = "";
|
|
14564
|
+
}
|
|
14565
|
+
inputs.forEach((part, idx) => {
|
|
14566
|
+
if (isTab(part)) {
|
|
14567
|
+
bump();
|
|
14568
|
+
result.push(part);
|
|
14569
|
+
} else if (isSpace(part) && (isSpace(inputs[idx - 1]) || isSpace(inputs[idx + 1]))) {
|
|
14570
|
+
bump();
|
|
14571
|
+
result.push(part);
|
|
14572
|
+
} else current += part;
|
|
14573
|
+
});
|
|
14574
|
+
bump();
|
|
14575
|
+
return result;
|
|
14576
|
+
}
|
|
14577
|
+
function splitSpaces(parts, type, renderContinuousSpaces = true) {
|
|
14578
|
+
if (type === "all") return parts;
|
|
14579
|
+
let leftCount = 0;
|
|
14580
|
+
let rightCount = 0;
|
|
14581
|
+
if (type === "boundary" || type === "leading") for (let i$2 = 0; i$2 < parts.length; i$2++) if (isSpace(parts[i$2])) leftCount++;
|
|
14582
|
+
else break;
|
|
14583
|
+
if (type === "boundary" || type === "trailing") for (let i$2 = parts.length - 1; i$2 >= 0; i$2--) if (isSpace(parts[i$2])) rightCount++;
|
|
14584
|
+
else break;
|
|
14585
|
+
const middle = parts.slice(leftCount, parts.length - rightCount);
|
|
14586
|
+
return [
|
|
14587
|
+
...parts.slice(0, leftCount),
|
|
14588
|
+
...renderContinuousSpaces ? separateContinuousSpaces(middle) : [middle.join("")],
|
|
14589
|
+
...parts.slice(parts.length - rightCount)
|
|
14590
|
+
];
|
|
14591
|
+
}
|
|
14592
|
+
/**
|
|
14593
|
+
* Render whitespaces as separate tokens.
|
|
14594
|
+
* Apply with CSS, it can be used to render tabs and spaces visually.
|
|
14595
|
+
*/
|
|
14596
|
+
function transformerRenderWhitespace(options = {}) {
|
|
14597
|
+
const classMap = {
|
|
14598
|
+
" ": options.classSpace ?? "space",
|
|
14599
|
+
" ": options.classTab ?? "tab"
|
|
14600
|
+
};
|
|
14601
|
+
const position = options.position ?? "all";
|
|
14602
|
+
const keys = Object.keys(classMap);
|
|
14603
|
+
return {
|
|
14604
|
+
name: "@shikijs/transformers:render-whitespace",
|
|
14605
|
+
root(root$1) {
|
|
14606
|
+
const pre = root$1.children[0];
|
|
14607
|
+
(pre.tagName === "pre" ? pre.children[0] : { children: [root$1] }).children.forEach((line) => {
|
|
14608
|
+
if (line.type !== "element" && line.type !== "root") return;
|
|
14609
|
+
const elements = line.children.filter((token$1) => token$1.type === "element");
|
|
14610
|
+
const last = elements.length - 1;
|
|
14611
|
+
line.children = line.children.flatMap((token$1) => {
|
|
14612
|
+
if (token$1.type !== "element") return token$1;
|
|
14613
|
+
const index = elements.indexOf(token$1);
|
|
14614
|
+
if (position === "boundary" && index !== 0 && index !== last) return token$1;
|
|
14615
|
+
if (position === "trailing" && index !== last) return token$1;
|
|
14616
|
+
if (position === "leading" && index !== 0) return token$1;
|
|
14617
|
+
const node = token$1.children[0];
|
|
14618
|
+
if (node.type !== "text" || !node.value) return token$1;
|
|
14619
|
+
const parts = splitSpaces(node.value.split(/([ \t])/).filter((i$2) => i$2.length), position === "boundary" && index === last && last !== 0 ? "trailing" : position, position !== "trailing" && position !== "leading");
|
|
14620
|
+
if (parts.length <= 1) return token$1;
|
|
14621
|
+
return parts.map((part) => {
|
|
14622
|
+
const clone$1 = {
|
|
14623
|
+
...token$1,
|
|
14624
|
+
properties: { ...token$1.properties }
|
|
14625
|
+
};
|
|
14626
|
+
clone$1.children = [{
|
|
14627
|
+
type: "text",
|
|
14628
|
+
value: part
|
|
14629
|
+
}];
|
|
14630
|
+
if (keys.includes(part)) {
|
|
14631
|
+
this.addClassToHast(clone$1, classMap[part]);
|
|
14632
|
+
delete clone$1.properties.style;
|
|
14633
|
+
}
|
|
14634
|
+
return clone$1;
|
|
14635
|
+
});
|
|
14636
|
+
});
|
|
14637
|
+
});
|
|
14638
|
+
}
|
|
14639
|
+
};
|
|
14640
|
+
}
|
|
14641
|
+
/**
|
|
14642
|
+
* Remove line breaks between lines.
|
|
14643
|
+
* Useful when you override `display: block` to `.line` in CSS.
|
|
14644
|
+
*/
|
|
14645
|
+
function transformerStyleToClass(options = {}) {
|
|
14646
|
+
const { classPrefix = "__shiki_", classSuffix = "", classReplacer = (className) => className } = options;
|
|
14647
|
+
const classToStyle = /* @__PURE__ */ new Map();
|
|
14648
|
+
function stringifyStyle(style) {
|
|
14649
|
+
return Object.entries(style).map(([key$1, value]) => `${key$1}:${value}`).join(";");
|
|
14650
|
+
}
|
|
14651
|
+
function registerStyle(style) {
|
|
14652
|
+
let className = classPrefix + cyrb53(typeof style === "string" ? style : stringifyStyle(style)) + classSuffix;
|
|
14653
|
+
className = classReplacer(className);
|
|
14654
|
+
if (!classToStyle.has(className)) classToStyle.set(className, typeof style === "string" ? style : { ...style });
|
|
14655
|
+
return className;
|
|
14656
|
+
}
|
|
14657
|
+
return {
|
|
14658
|
+
name: "@shikijs/transformers:style-to-class",
|
|
14659
|
+
pre(t) {
|
|
14660
|
+
if (!t.properties.style) return;
|
|
14661
|
+
const className = registerStyle(t.properties.style);
|
|
14662
|
+
delete t.properties.style;
|
|
14663
|
+
this.addClassToHast(t, className);
|
|
14664
|
+
},
|
|
14665
|
+
tokens(lines) {
|
|
14666
|
+
for (const line of lines) for (const token$1 of line) {
|
|
14667
|
+
if (!token$1.htmlStyle) continue;
|
|
14668
|
+
const className = registerStyle(token$1.htmlStyle);
|
|
14669
|
+
token$1.htmlStyle = {};
|
|
14670
|
+
token$1.htmlAttrs ||= {};
|
|
14671
|
+
if (!token$1.htmlAttrs.class) token$1.htmlAttrs.class = className;
|
|
14672
|
+
else token$1.htmlAttrs.class += ` ${className}`;
|
|
14673
|
+
}
|
|
14674
|
+
},
|
|
14675
|
+
getClassRegistry() {
|
|
14676
|
+
return classToStyle;
|
|
14677
|
+
},
|
|
14678
|
+
getCSS() {
|
|
14679
|
+
let css = "";
|
|
14680
|
+
for (const [className, style] of classToStyle.entries()) css += `.${className}{${typeof style === "string" ? style : stringifyStyle(style)}}`;
|
|
14681
|
+
return css;
|
|
14682
|
+
},
|
|
14683
|
+
clearRegistry() {
|
|
14684
|
+
classToStyle.clear();
|
|
14685
|
+
}
|
|
14686
|
+
};
|
|
14687
|
+
}
|
|
14688
|
+
/**
|
|
14689
|
+
* A simple hash function.
|
|
14690
|
+
*
|
|
14691
|
+
* @see https://stackoverflow.com/a/52171480
|
|
14692
|
+
*/
|
|
14693
|
+
function cyrb53(str, seed = 0) {
|
|
14694
|
+
let h1 = 3735928559 ^ seed;
|
|
14695
|
+
let h2 = 1103547991 ^ seed;
|
|
14696
|
+
for (let i$2 = 0, ch; i$2 < str.length; i$2++) {
|
|
14697
|
+
ch = str.charCodeAt(i$2);
|
|
14698
|
+
h1 = Math.imul(h1 ^ ch, 2654435761);
|
|
14699
|
+
h2 = Math.imul(h2 ^ ch, 1597334677);
|
|
14700
|
+
}
|
|
14701
|
+
h1 = Math.imul(h1 ^ h1 >>> 16, 2246822507);
|
|
14702
|
+
h1 ^= Math.imul(h2 ^ h2 >>> 13, 3266489909);
|
|
14703
|
+
h2 = Math.imul(h2 ^ h2 >>> 16, 2246822507);
|
|
14704
|
+
h2 ^= Math.imul(h1 ^ h1 >>> 13, 3266489909);
|
|
14705
|
+
return (4294967296 * (2097151 & h2) + (h1 >>> 0)).toString(36).slice(0, 6);
|
|
14706
|
+
}
|
|
14707
|
+
|
|
14708
|
+
//#endregion
|
|
14709
|
+
//#region src/utils/hast_utils.ts
|
|
14710
|
+
function createTextNodeElement(value) {
|
|
14711
|
+
return {
|
|
14712
|
+
type: "text",
|
|
14713
|
+
value
|
|
14714
|
+
};
|
|
14715
|
+
}
|
|
14716
|
+
function createHastElement({ tagName, children = [], properties = {} }) {
|
|
14717
|
+
return {
|
|
14718
|
+
type: "element",
|
|
14719
|
+
tagName,
|
|
14720
|
+
properties,
|
|
14721
|
+
children
|
|
14722
|
+
};
|
|
14723
|
+
}
|
|
14724
|
+
function createIconElement({ name, width = 16, height = 16, properties }) {
|
|
14725
|
+
return createHastElement({
|
|
14726
|
+
tagName: "svg",
|
|
14727
|
+
properties: {
|
|
14728
|
+
width,
|
|
14729
|
+
height,
|
|
14730
|
+
viewBox: "0 0 16 16",
|
|
14731
|
+
...properties
|
|
14732
|
+
},
|
|
14733
|
+
children: [createHastElement({
|
|
14734
|
+
tagName: "use",
|
|
14735
|
+
properties: { href: `#${name.replace(/^#/, "")}` }
|
|
14736
|
+
})]
|
|
14737
|
+
});
|
|
14738
|
+
}
|
|
14739
|
+
function findCodeElement(nodes) {
|
|
14740
|
+
let firstChild = nodes.children[0];
|
|
14741
|
+
while (firstChild != null) {
|
|
14742
|
+
if (firstChild.type === "element" && firstChild.tagName === "code") {
|
|
14743
|
+
return firstChild;
|
|
14744
|
+
}
|
|
14745
|
+
if ("children" in firstChild) {
|
|
14746
|
+
firstChild = firstChild.children[0];
|
|
14747
|
+
} else {
|
|
14748
|
+
firstChild = null;
|
|
14749
|
+
}
|
|
14750
|
+
}
|
|
14751
|
+
return undefined;
|
|
14752
|
+
}
|
|
14753
|
+
function createGutterWrapper(children) {
|
|
14754
|
+
return createHastElement({
|
|
14755
|
+
tagName: "div",
|
|
14756
|
+
properties: { "data-gutter": "" },
|
|
14757
|
+
children
|
|
14758
|
+
});
|
|
14759
|
+
}
|
|
14760
|
+
function createGutterItem(lineType, lineNumber, lineIndex, properties = {}) {
|
|
14761
|
+
return createHastElement({
|
|
14762
|
+
tagName: "div",
|
|
14763
|
+
properties: {
|
|
14764
|
+
"data-line-type": lineType,
|
|
14765
|
+
"data-column-number": lineNumber,
|
|
14766
|
+
"data-line-index": lineIndex,
|
|
14767
|
+
...properties
|
|
14768
|
+
},
|
|
14769
|
+
children: lineNumber != null ? [createHastElement({
|
|
14770
|
+
tagName: "span",
|
|
14771
|
+
properties: { "data-line-number-content": "" },
|
|
14772
|
+
children: [createTextNodeElement(`${lineNumber}`)]
|
|
14773
|
+
})] : undefined
|
|
14774
|
+
});
|
|
14775
|
+
}
|
|
14776
|
+
function createGutterGap(type, bufferType, size) {
|
|
14777
|
+
return createHastElement({
|
|
14778
|
+
tagName: "div",
|
|
14779
|
+
properties: {
|
|
14780
|
+
"data-gutter-buffer": bufferType,
|
|
14781
|
+
"data-buffer-size": size,
|
|
14782
|
+
"data-line-type": bufferType === "annotation" ? undefined : type,
|
|
14783
|
+
style: bufferType === "annotation" ? `grid-row: span ${size};` : `grid-row: span ${size};min-height:calc(${size} * 1lh);`
|
|
14784
|
+
}
|
|
14785
|
+
});
|
|
14786
|
+
}
|
|
14787
|
+
|
|
14788
|
+
//#endregion
|
|
14789
|
+
//#region src/utils/processLine.ts
|
|
14790
|
+
function processLine(node, line, state) {
|
|
14791
|
+
const lineInfo = typeof state.lineInfo === "function" ? state.lineInfo(line) : state.lineInfo[line - 1];
|
|
14792
|
+
if (lineInfo == null) {
|
|
14793
|
+
const errorMessage = `processLine: line ${line}, contains no state.lineInfo`;
|
|
14794
|
+
console.error(errorMessage, {
|
|
14795
|
+
node,
|
|
14796
|
+
line,
|
|
14797
|
+
state
|
|
14798
|
+
});
|
|
14799
|
+
throw new Error(errorMessage);
|
|
14957
14800
|
}
|
|
14958
|
-
|
|
14959
|
-
|
|
14801
|
+
node.tagName = "div";
|
|
14802
|
+
node.properties["data-line"] = lineInfo.lineNumber;
|
|
14803
|
+
node.properties["data-alt-line"] = lineInfo.altLineNumber;
|
|
14804
|
+
node.properties["data-line-type"] = lineInfo.type;
|
|
14805
|
+
node.properties["data-line-index"] = lineInfo.lineIndex;
|
|
14806
|
+
if (node.children.length === 0) {
|
|
14807
|
+
node.children.push(createTextNodeElement("\n"));
|
|
14960
14808
|
}
|
|
14961
|
-
|
|
14962
|
-
|
|
14963
|
-
|
|
14964
|
-
|
|
14809
|
+
return node;
|
|
14810
|
+
}
|
|
14811
|
+
|
|
14812
|
+
//#endregion
|
|
14813
|
+
//#region src/utils/wrapTokenFragments.ts
|
|
14814
|
+
const NO_TOKEN = Symbol("no-token");
|
|
14815
|
+
const MULTIPLE_TOKENS = Symbol("multiple-tokens");
|
|
14816
|
+
function wrapTokenFragments(container) {
|
|
14817
|
+
const ownTokenChar = getTokenChar(container);
|
|
14818
|
+
if (ownTokenChar != null) {
|
|
14819
|
+
return ownTokenChar;
|
|
14820
|
+
}
|
|
14821
|
+
let containerTokenState = NO_TOKEN;
|
|
14822
|
+
const wrappedChildren = [];
|
|
14823
|
+
let currentTokenChildren = [];
|
|
14824
|
+
let currentTokenChar;
|
|
14825
|
+
const flushTokenChildren = () => {
|
|
14826
|
+
if (currentTokenChildren.length === 0 || currentTokenChar == null) {
|
|
14827
|
+
currentTokenChildren = [];
|
|
14828
|
+
currentTokenChar = undefined;
|
|
14829
|
+
return;
|
|
14965
14830
|
}
|
|
14966
|
-
if (
|
|
14967
|
-
|
|
14831
|
+
if (currentTokenChildren.length === 1) {
|
|
14832
|
+
const child = currentTokenChildren[0];
|
|
14833
|
+
if (child?.type === "element") {
|
|
14834
|
+
setTokenChar(child, currentTokenChar);
|
|
14835
|
+
for (const grandChild of child.children) {
|
|
14836
|
+
stripTokenChar(grandChild);
|
|
14837
|
+
}
|
|
14838
|
+
} else {
|
|
14839
|
+
stripTokenChar(child);
|
|
14840
|
+
}
|
|
14841
|
+
wrappedChildren.push(child);
|
|
14842
|
+
currentTokenChildren = [];
|
|
14843
|
+
currentTokenChar = undefined;
|
|
14844
|
+
return;
|
|
14845
|
+
}
|
|
14846
|
+
for (const child of currentTokenChildren) {
|
|
14847
|
+
stripTokenChar(child);
|
|
14848
|
+
}
|
|
14849
|
+
wrappedChildren.push(createHastElement({
|
|
14850
|
+
tagName: "span",
|
|
14851
|
+
properties: { "data-char": currentTokenChar },
|
|
14852
|
+
children: currentTokenChildren
|
|
14853
|
+
}));
|
|
14854
|
+
currentTokenChildren = [];
|
|
14855
|
+
currentTokenChar = undefined;
|
|
14856
|
+
};
|
|
14857
|
+
const mergeContainerTokenState = (childTokenState) => {
|
|
14858
|
+
if (childTokenState === NO_TOKEN) {
|
|
14859
|
+
return;
|
|
14860
|
+
}
|
|
14861
|
+
if (childTokenState === MULTIPLE_TOKENS) {
|
|
14862
|
+
containerTokenState = MULTIPLE_TOKENS;
|
|
14863
|
+
return;
|
|
14864
|
+
}
|
|
14865
|
+
if (containerTokenState === NO_TOKEN) {
|
|
14866
|
+
containerTokenState = childTokenState;
|
|
14867
|
+
return;
|
|
14868
|
+
}
|
|
14869
|
+
if (containerTokenState !== childTokenState) {
|
|
14870
|
+
containerTokenState = MULTIPLE_TOKENS;
|
|
14871
|
+
}
|
|
14872
|
+
};
|
|
14873
|
+
for (const child of container.children) {
|
|
14874
|
+
const childTokenState = child.type === "element" ? wrapTokenFragments(child) : NO_TOKEN;
|
|
14875
|
+
mergeContainerTokenState(childTokenState);
|
|
14876
|
+
if (typeof childTokenState !== "number") {
|
|
14877
|
+
flushTokenChildren();
|
|
14878
|
+
wrappedChildren.push(child);
|
|
14879
|
+
continue;
|
|
14880
|
+
}
|
|
14881
|
+
if (currentTokenChar != null && currentTokenChar !== childTokenState) {
|
|
14882
|
+
flushTokenChildren();
|
|
14968
14883
|
}
|
|
14884
|
+
currentTokenChar ??= childTokenState;
|
|
14885
|
+
currentTokenChildren.push(child);
|
|
14969
14886
|
}
|
|
14970
|
-
|
|
14971
|
-
|
|
14972
|
-
|
|
14887
|
+
flushTokenChildren();
|
|
14888
|
+
container.children = wrappedChildren;
|
|
14889
|
+
return containerTokenState;
|
|
14890
|
+
}
|
|
14891
|
+
function getTokenChar(node) {
|
|
14892
|
+
const value = node.properties["data-char"];
|
|
14893
|
+
if (typeof value === "number") {
|
|
14894
|
+
return value;
|
|
14973
14895
|
}
|
|
14974
|
-
return
|
|
14896
|
+
return undefined;
|
|
14975
14897
|
}
|
|
14976
|
-
function
|
|
14977
|
-
|
|
14978
|
-
|
|
14979
|
-
|
|
14980
|
-
|
|
14898
|
+
function stripTokenChar(node) {
|
|
14899
|
+
if (node.type !== "element") return;
|
|
14900
|
+
node.properties["data-char"] = undefined;
|
|
14901
|
+
for (const child of node.children) {
|
|
14902
|
+
stripTokenChar(child);
|
|
14903
|
+
}
|
|
14904
|
+
}
|
|
14905
|
+
function setTokenChar(node, char) {
|
|
14906
|
+
node.properties["data-char"] = char;
|
|
14907
|
+
}
|
|
14908
|
+
|
|
14909
|
+
//#endregion
|
|
14910
|
+
//#region src/utils/createTransformerWithState.ts
|
|
14911
|
+
function createTransformerWithState(useTokenTransformer = false, useCSSClasses = false) {
|
|
14912
|
+
const state = { lineInfo: [] };
|
|
14913
|
+
const transformers = [{
|
|
14914
|
+
line(node) {
|
|
14915
|
+
delete node.properties.class;
|
|
14916
|
+
return node;
|
|
14917
|
+
},
|
|
14918
|
+
pre(pre) {
|
|
14919
|
+
const code = findCodeElement(pre);
|
|
14920
|
+
const children = [];
|
|
14921
|
+
if (code != null) {
|
|
14922
|
+
let index = 1;
|
|
14923
|
+
for (const node of code.children) {
|
|
14924
|
+
if (node.type !== "element") continue;
|
|
14925
|
+
if (useTokenTransformer) {
|
|
14926
|
+
wrapTokenFragments(node);
|
|
14927
|
+
}
|
|
14928
|
+
children.push(processLine(node, index, state));
|
|
14929
|
+
index++;
|
|
14930
|
+
}
|
|
14931
|
+
code.children = children;
|
|
14932
|
+
}
|
|
14933
|
+
return pre;
|
|
14934
|
+
},
|
|
14935
|
+
...useTokenTransformer ? {
|
|
14936
|
+
tokens(lines) {
|
|
14937
|
+
for (const line of lines) {
|
|
14938
|
+
let col = 0;
|
|
14939
|
+
for (const token$1 of line) {
|
|
14940
|
+
const tokenWithOriginalRange = token$1;
|
|
14941
|
+
tokenWithOriginalRange.__lineChar ??= col;
|
|
14942
|
+
col += token$1.content.length;
|
|
14943
|
+
}
|
|
14944
|
+
}
|
|
14945
|
+
},
|
|
14946
|
+
preprocess(_code, options) {
|
|
14947
|
+
options.mergeWhitespaces = "never";
|
|
14948
|
+
},
|
|
14949
|
+
span(hast, _line, _char, _lineElement, token$1) {
|
|
14950
|
+
if (token$1?.offset != null && token$1.content != null) {
|
|
14951
|
+
const tokenWithOriginalRange = token$1;
|
|
14952
|
+
const tokenChar = tokenWithOriginalRange.__lineChar;
|
|
14953
|
+
if (tokenChar != null) {
|
|
14954
|
+
hast.properties["data-char"] = tokenChar;
|
|
14955
|
+
}
|
|
14956
|
+
return hast;
|
|
14957
|
+
}
|
|
14958
|
+
return hast;
|
|
14959
|
+
}
|
|
14960
|
+
} : null
|
|
14961
|
+
}];
|
|
14962
|
+
if (useCSSClasses) {
|
|
14963
|
+
transformers.push(tokenStyleNormalizer, toClass);
|
|
14964
|
+
}
|
|
14965
|
+
return {
|
|
14966
|
+
state,
|
|
14967
|
+
transformers,
|
|
14968
|
+
toClass
|
|
14969
|
+
};
|
|
14970
|
+
}
|
|
14971
|
+
const toClass = transformerStyleToClass({ classPrefix: "hl-" });
|
|
14972
|
+
const tokenStyleNormalizer = {
|
|
14973
|
+
name: "token-style-normalizer",
|
|
14974
|
+
tokens(lines) {
|
|
14975
|
+
for (const line of lines) {
|
|
14976
|
+
for (const token$1 of line) {
|
|
14977
|
+
if (token$1.htmlStyle != null) continue;
|
|
14978
|
+
const style = {};
|
|
14979
|
+
if (token$1.color != null) {
|
|
14980
|
+
style.color = token$1.color;
|
|
14981
|
+
}
|
|
14982
|
+
if (token$1.bgColor != null) {
|
|
14983
|
+
style["background-color"] = token$1.bgColor;
|
|
14984
|
+
}
|
|
14985
|
+
if (token$1.fontStyle != null && token$1.fontStyle !== 0) {
|
|
14986
|
+
if ((token$1.fontStyle & 1) !== 0) {
|
|
14987
|
+
style["font-style"] = "italic";
|
|
14988
|
+
}
|
|
14989
|
+
if ((token$1.fontStyle & 2) !== 0) {
|
|
14990
|
+
style["font-weight"] = "bold";
|
|
14991
|
+
}
|
|
14992
|
+
if ((token$1.fontStyle & 4) !== 0) {
|
|
14993
|
+
style["text-decoration"] = "underline";
|
|
14994
|
+
}
|
|
14995
|
+
}
|
|
14996
|
+
if (Object.keys(style).length > 0) {
|
|
14997
|
+
token$1.htmlStyle = style;
|
|
14998
|
+
}
|
|
14999
|
+
}
|
|
14981
15000
|
}
|
|
14982
15001
|
}
|
|
15002
|
+
};
|
|
15003
|
+
|
|
15004
|
+
//#endregion
|
|
15005
|
+
//#region src/utils/formatCSSVariablePrefix.ts
|
|
15006
|
+
function formatCSSVariablePrefix(type) {
|
|
15007
|
+
return `--${type === "token" ? "diffs-token" : "diffs"}-`;
|
|
14983
15008
|
}
|
|
14984
15009
|
|
|
14985
15010
|
//#endregion
|
|
@@ -16013,11 +16038,15 @@ async function handleMessage(request) {
|
|
|
16013
16038
|
sendError(request.id, error);
|
|
16014
16039
|
}
|
|
16015
16040
|
}
|
|
16016
|
-
async function handleInitialize({ id, renderOptions: options, preferredHighlighter, resolvedThemes, resolvedLanguages }) {
|
|
16041
|
+
async function handleInitialize({ id, renderOptions: options, preferredHighlighter, resolvedThemes, resolvedLanguages, customExtensionsVersion: customExtensionsVersion$1, customExtensionMap }) {
|
|
16017
16042
|
let highlighter$1 = getHighlighter(preferredHighlighter);
|
|
16018
16043
|
if ("then" in highlighter$1) {
|
|
16019
16044
|
highlighter$1 = await highlighter$1;
|
|
16020
16045
|
}
|
|
16046
|
+
syncCustomExtensionsFromRequest({
|
|
16047
|
+
customExtensionsVersion: customExtensionsVersion$1,
|
|
16048
|
+
customExtensionMap
|
|
16049
|
+
});
|
|
16021
16050
|
attachResolvedThemes(resolvedThemes, highlighter$1);
|
|
16022
16051
|
if (resolvedLanguages != null) {
|
|
16023
16052
|
attachResolvedLanguages(resolvedLanguages, highlighter$1);
|
|
@@ -16044,11 +16073,15 @@ async function handleSetRenderOptions({ id, renderOptions: options, resolvedThem
|
|
|
16044
16073
|
sentAt: Date.now()
|
|
16045
16074
|
});
|
|
16046
16075
|
}
|
|
16047
|
-
async function handleRenderFile({ id, file, resolvedLanguages }) {
|
|
16076
|
+
async function handleRenderFile({ id, file, resolvedLanguages, customExtensionsVersion: customExtensionsVersion$1, customExtensionMap }) {
|
|
16048
16077
|
let highlighter$1 = getHighlighter();
|
|
16049
16078
|
if ("then" in highlighter$1) {
|
|
16050
16079
|
highlighter$1 = await highlighter$1;
|
|
16051
16080
|
}
|
|
16081
|
+
syncCustomExtensionsFromRequest({
|
|
16082
|
+
customExtensionsVersion: customExtensionsVersion$1,
|
|
16083
|
+
customExtensionMap
|
|
16084
|
+
});
|
|
16052
16085
|
if (resolvedLanguages != null) {
|
|
16053
16086
|
attachResolvedLanguages(resolvedLanguages, highlighter$1);
|
|
16054
16087
|
}
|
|
@@ -16059,11 +16092,15 @@ async function handleRenderFile({ id, file, resolvedLanguages }) {
|
|
|
16059
16092
|
};
|
|
16060
16093
|
sendFileSuccess(id, renderFileWithHighlighter(file, highlighter$1, fileOptions), fileOptions);
|
|
16061
16094
|
}
|
|
16062
|
-
async function handleRenderDiff({ id, diff, resolvedLanguages }) {
|
|
16095
|
+
async function handleRenderDiff({ id, diff, resolvedLanguages, customExtensionsVersion: customExtensionsVersion$1, customExtensionMap }) {
|
|
16063
16096
|
let highlighter$1 = getHighlighter();
|
|
16064
16097
|
if ("then" in highlighter$1) {
|
|
16065
16098
|
highlighter$1 = await highlighter$1;
|
|
16066
16099
|
}
|
|
16100
|
+
syncCustomExtensionsFromRequest({
|
|
16101
|
+
customExtensionsVersion: customExtensionsVersion$1,
|
|
16102
|
+
customExtensionMap
|
|
16103
|
+
});
|
|
16067
16104
|
if (resolvedLanguages != null) {
|
|
16068
16105
|
attachResolvedLanguages(resolvedLanguages, highlighter$1);
|
|
16069
16106
|
}
|
|
@@ -16078,6 +16115,15 @@ function getHighlighter(preferredHighlighter = "shiki-js") {
|
|
|
16078
16115
|
});
|
|
16079
16116
|
return highlighter;
|
|
16080
16117
|
}
|
|
16118
|
+
function syncCustomExtensionsFromRequest({ customExtensionsVersion: customExtensionsVersion$1, customExtensionMap }) {
|
|
16119
|
+
if (customExtensionsVersion$1 == null && customExtensionMap == null) {
|
|
16120
|
+
return;
|
|
16121
|
+
}
|
|
16122
|
+
if (customExtensionsVersion$1 == null || customExtensionMap == null) {
|
|
16123
|
+
throw new Error("Worker request must include both customExtensionsVersion and customExtensionMap");
|
|
16124
|
+
}
|
|
16125
|
+
replaceCustomExtensions(customExtensionsVersion$1, customExtensionMap);
|
|
16126
|
+
}
|
|
16081
16127
|
function sendFileSuccess(id, result, options) {
|
|
16082
16128
|
postMessage({
|
|
16083
16129
|
type: "success",
|