@infersec/conduit 1.6.0 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +2 -2
- package/dist/index.js +2 -2
- package/dist/{start-XRPU7RSB.js → start-BS3RcUet.js} +260 -50
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -6,7 +6,7 @@ const __dirname = __pathDirname(__filename);
|
|
|
6
6
|
|
|
7
7
|
import { parseArgs } from 'node:util';
|
|
8
8
|
import 'node:crypto';
|
|
9
|
-
import { a as asError, s as startInferenceAgent } from './start-
|
|
9
|
+
import { a as asError, s as startInferenceAgent } from './start-BS3RcUet.js';
|
|
10
10
|
import 'argon2';
|
|
11
11
|
import 'node:child_process';
|
|
12
12
|
import 'node:stream';
|
|
@@ -45,8 +45,8 @@ import 'querystring';
|
|
|
45
45
|
import 'url';
|
|
46
46
|
import 'http';
|
|
47
47
|
import 'crypto';
|
|
48
|
-
import 'node:stream/promises';
|
|
49
48
|
import 'node:fs';
|
|
49
|
+
import 'node:stream/promises';
|
|
50
50
|
import 'fs/promises';
|
|
51
51
|
import 'stream/promises';
|
|
52
52
|
import 'node:string_decoder';
|
package/dist/index.js
CHANGED
|
@@ -5,7 +5,7 @@ const __filename = __fileURLToPath(import.meta.url);
|
|
|
5
5
|
const __dirname = __pathDirname(__filename);
|
|
6
6
|
|
|
7
7
|
import 'node:crypto';
|
|
8
|
-
import { s as startInferenceAgent, a as asError } from './start-
|
|
8
|
+
import { s as startInferenceAgent, a as asError } from './start-BS3RcUet.js';
|
|
9
9
|
import 'argon2';
|
|
10
10
|
import 'node:child_process';
|
|
11
11
|
import 'node:stream';
|
|
@@ -45,8 +45,8 @@ import 'querystring';
|
|
|
45
45
|
import 'url';
|
|
46
46
|
import 'http';
|
|
47
47
|
import 'crypto';
|
|
48
|
-
import 'node:stream/promises';
|
|
49
48
|
import 'node:fs';
|
|
49
|
+
import 'node:stream/promises';
|
|
50
50
|
import 'fs/promises';
|
|
51
51
|
import 'stream/promises';
|
|
52
52
|
import 'node:string_decoder';
|
|
@@ -30,7 +30,7 @@ import require$$1$3 from 'node:worker_threads';
|
|
|
30
30
|
import require$$1$5, { fileURLToPath } from 'node:url';
|
|
31
31
|
import require$$1$6 from 'node:async_hooks';
|
|
32
32
|
import require$$1$7 from 'node:console';
|
|
33
|
-
import require$$0$b, { mkdir, readFile, writeFile, realpath, readlink, readdir, lstat } from 'node:fs/promises';
|
|
33
|
+
import require$$0$b, { mkdir, readFile, writeFile, stat, unlink, rename, realpath, readlink, readdir, lstat } from 'node:fs/promises';
|
|
34
34
|
import path$1, { dirname, join, win32, posix } from 'node:path';
|
|
35
35
|
import require$$1$8 from 'node:dns';
|
|
36
36
|
import require$$2$4 from 'node:sqlite';
|
|
@@ -44,9 +44,9 @@ import require$$8$1 from 'querystring';
|
|
|
44
44
|
import require$$0$f from 'url';
|
|
45
45
|
import require$$0$g from 'http';
|
|
46
46
|
import require$$0$h from 'crypto';
|
|
47
|
-
import { pipeline, finished } from 'node:stream/promises';
|
|
48
47
|
import * as actualFS from 'node:fs';
|
|
49
48
|
import { existsSync, createWriteStream, statSync, readFileSync, appendFileSync, writeFileSync, createReadStream } from 'node:fs';
|
|
49
|
+
import { pipeline, finished } from 'node:stream/promises';
|
|
50
50
|
import 'fs/promises';
|
|
51
51
|
import 'stream/promises';
|
|
52
52
|
import { StringDecoder } from 'node:string_decoder';
|
|
@@ -96473,11 +96473,13 @@ const ModelDownloadProgressSchema = object({
|
|
|
96473
96473
|
completedFiles: array(string$1().min(1))
|
|
96474
96474
|
});
|
|
96475
96475
|
|
|
96476
|
-
const DOWNLOAD_PROGRESS_TIMEOUT =
|
|
96477
|
-
const
|
|
96476
|
+
const DOWNLOAD_PROGRESS_TIMEOUT = 60000;
|
|
96477
|
+
const DOWNLOAD_RETRY_ATTEMPTS_FULL = 3;
|
|
96478
|
+
const DOWNLOAD_RETRY_ATTEMPTS_RANGE = 10;
|
|
96478
96479
|
async function downloadModelViaHuggingFace({ format, huggingFaceToken, modelSlug: rawModelSlug, progressFilePath, targetDirectory }) {
|
|
96479
96480
|
// Sanitise model ID
|
|
96480
|
-
const [
|
|
96481
|
+
const [modelSlugWithRevision, variant = null] = rawModelSlug.split(":");
|
|
96482
|
+
const { modelSlug, revision } = parseModelRevision(modelSlugWithRevision);
|
|
96481
96483
|
// Prepare directory
|
|
96482
96484
|
await mkdir(targetDirectory, { recursive: true });
|
|
96483
96485
|
let progress;
|
|
@@ -96512,65 +96514,273 @@ async function downloadModelViaHuggingFace({ format, huggingFaceToken, modelSlug
|
|
|
96512
96514
|
console.log("Skipping due to already having completed file:", file.path, file.size);
|
|
96513
96515
|
continue;
|
|
96514
96516
|
}
|
|
96515
|
-
|
|
96516
|
-
|
|
96517
|
-
|
|
96517
|
+
const rangeInfo = await getRangeInfo({
|
|
96518
|
+
accessToken,
|
|
96519
|
+
filePath: file.path,
|
|
96520
|
+
fileSize: file.size,
|
|
96521
|
+
modelSlug,
|
|
96522
|
+
revision
|
|
96523
|
+
});
|
|
96524
|
+
if (rangeInfo.supportsRanges) {
|
|
96518
96525
|
try {
|
|
96519
|
-
|
|
96520
|
-
await mkdir(dirname(join(targetDirectory, file.path)), { recursive: true });
|
|
96521
|
-
const response = await downloadFile({
|
|
96526
|
+
await downloadFileWithRange({
|
|
96522
96527
|
accessToken,
|
|
96523
|
-
|
|
96524
|
-
|
|
96525
|
-
|
|
96526
|
-
|
|
96527
|
-
|
|
96528
|
-
throw new Error(`Requested file did not return a valid response: ${file.path}`);
|
|
96529
|
-
}
|
|
96530
|
-
const input = Readable.fromWeb(response.stream());
|
|
96531
|
-
const meter = watchStreamProgress(25 * 1024 * 1024); // 25 MiB intervals
|
|
96532
|
-
const output = createWriteStream(join(targetDirectory, file.path));
|
|
96533
|
-
let progressTimeout = null;
|
|
96534
|
-
let lastPercentage = "0.0";
|
|
96535
|
-
let lastProgressBytes = 0;
|
|
96536
|
-
meter.progress.on("progress", (totalBytes) => {
|
|
96537
|
-
lastProgressBytes = totalBytes;
|
|
96538
|
-
const percentComplete = ((totalBytes / file.size) * 100).toFixed(1);
|
|
96539
|
-
if (lastPercentage !== percentComplete) {
|
|
96540
|
-
console.log(` => ${percentComplete}% (${totalBytes} / ${file.size})`);
|
|
96541
|
-
}
|
|
96542
|
-
lastPercentage = percentComplete;
|
|
96543
|
-
clearTimeout(progressTimeout);
|
|
96544
|
-
progressTimeout = setTimeout(() => {
|
|
96545
|
-
input.destroy(new Error(`Timed out with no progress for ${DOWNLOAD_PROGRESS_TIMEOUT}ms while downloading ${file.path}. Last progress: ${lastPercentage}% (${lastProgressBytes} / ${file.size})`));
|
|
96546
|
-
}, DOWNLOAD_PROGRESS_TIMEOUT);
|
|
96528
|
+
filePath: file.path,
|
|
96529
|
+
fileSize: rangeInfo.totalSize ?? file.size,
|
|
96530
|
+
modelSlug,
|
|
96531
|
+
revision,
|
|
96532
|
+
targetDirectory
|
|
96547
96533
|
});
|
|
96548
|
-
try {
|
|
96549
|
-
await pipeline(input, meter, output);
|
|
96550
|
-
}
|
|
96551
|
-
finally {
|
|
96552
|
-
clearTimeout(progressTimeout);
|
|
96553
|
-
}
|
|
96554
|
-
downloaded = true;
|
|
96555
|
-
break;
|
|
96556
96534
|
}
|
|
96557
96535
|
catch (error) {
|
|
96558
|
-
|
|
96559
|
-
|
|
96560
|
-
|
|
96561
|
-
|
|
96536
|
+
if (error instanceof RangeNotSupportedError) {
|
|
96537
|
+
console.warn(`Range download unavailable for ${file.path}: ${error.message}`);
|
|
96538
|
+
await downloadFileFull({
|
|
96539
|
+
accessToken,
|
|
96540
|
+
filePath: file.path,
|
|
96541
|
+
fileSize: file.size,
|
|
96542
|
+
modelSlug,
|
|
96543
|
+
targetDirectory
|
|
96544
|
+
});
|
|
96545
|
+
}
|
|
96546
|
+
else {
|
|
96547
|
+
throw error;
|
|
96562
96548
|
}
|
|
96563
96549
|
}
|
|
96564
96550
|
}
|
|
96565
|
-
|
|
96566
|
-
|
|
96567
|
-
|
|
96551
|
+
else {
|
|
96552
|
+
if (rangeInfo.reason) {
|
|
96553
|
+
console.warn(`Range download unavailable for ${file.path}: ${rangeInfo.reason}`);
|
|
96554
|
+
}
|
|
96555
|
+
await downloadFileFull({
|
|
96556
|
+
accessToken,
|
|
96557
|
+
filePath: file.path,
|
|
96558
|
+
fileSize: file.size,
|
|
96559
|
+
modelSlug,
|
|
96560
|
+
targetDirectory
|
|
96561
|
+
});
|
|
96568
96562
|
}
|
|
96569
96563
|
// Update progress
|
|
96570
96564
|
progress.completedFiles.push(file.path);
|
|
96571
96565
|
await writeFile(progressFilePath, JSON.stringify(progress, undefined, 4));
|
|
96572
96566
|
}
|
|
96573
96567
|
}
|
|
96568
|
+
function encodePathSegments(path) {
|
|
96569
|
+
return path
|
|
96570
|
+
.split("/")
|
|
96571
|
+
.map(segment => encodeURIComponent(segment))
|
|
96572
|
+
.join("/");
|
|
96573
|
+
}
|
|
96574
|
+
function parseModelRevision(modelSlugWithRevision) {
|
|
96575
|
+
const revisionIndex = modelSlugWithRevision.indexOf("@");
|
|
96576
|
+
if (revisionIndex === -1) {
|
|
96577
|
+
return { modelSlug: modelSlugWithRevision, revision: "main" };
|
|
96578
|
+
}
|
|
96579
|
+
return {
|
|
96580
|
+
modelSlug: modelSlugWithRevision.slice(0, revisionIndex),
|
|
96581
|
+
revision: modelSlugWithRevision.slice(revisionIndex + 1)
|
|
96582
|
+
};
|
|
96583
|
+
}
|
|
96584
|
+
function getDownloadPaths({ filePath, targetDirectory }) {
|
|
96585
|
+
const finalPath = join(targetDirectory, filePath);
|
|
96586
|
+
return {
|
|
96587
|
+
finalPath,
|
|
96588
|
+
partialPath: `${finalPath}.partial`
|
|
96589
|
+
};
|
|
96590
|
+
}
|
|
96591
|
+
function getResolveURL({ filePath, modelSlug, revision }) {
|
|
96592
|
+
const encodedRepo = encodeURIComponent(modelSlug);
|
|
96593
|
+
const encodedRevision = encodeURIComponent(revision);
|
|
96594
|
+
const encodedPath = encodePathSegments(filePath);
|
|
96595
|
+
return `https://huggingface.co/${encodedRepo}/resolve/${encodedRevision}/${encodedPath}`;
|
|
96596
|
+
}
|
|
96597
|
+
class RangeNotSupportedError extends Error {
|
|
96598
|
+
constructor(message) {
|
|
96599
|
+
super(message);
|
|
96600
|
+
this.name = "RangeNotSupportedError";
|
|
96601
|
+
}
|
|
96602
|
+
}
|
|
96603
|
+
function getAuthHeaders(accessToken) {
|
|
96604
|
+
if (!accessToken) {
|
|
96605
|
+
return {};
|
|
96606
|
+
}
|
|
96607
|
+
return {
|
|
96608
|
+
Authorization: `Bearer ${accessToken}`
|
|
96609
|
+
};
|
|
96610
|
+
}
|
|
96611
|
+
async function getRangeInfo({ accessToken, filePath, fileSize, modelSlug, revision }) {
|
|
96612
|
+
const url = getResolveURL({ filePath, modelSlug, revision });
|
|
96613
|
+
try {
|
|
96614
|
+
const response = await undiciExports.fetch(url, {
|
|
96615
|
+
method: "HEAD",
|
|
96616
|
+
headers: getAuthHeaders(accessToken)
|
|
96617
|
+
});
|
|
96618
|
+
if (!response.ok) {
|
|
96619
|
+
return {
|
|
96620
|
+
reason: `HEAD request failed with status ${response.status}`,
|
|
96621
|
+
supportsRanges: false,
|
|
96622
|
+
totalSize: null
|
|
96623
|
+
};
|
|
96624
|
+
}
|
|
96625
|
+
const acceptRanges = response.headers.get("accept-ranges")?.toLowerCase() ?? "";
|
|
96626
|
+
const totalSize = Number(response.headers.get("content-length"));
|
|
96627
|
+
if (acceptRanges.includes("bytes") === false) {
|
|
96628
|
+
return {
|
|
96629
|
+
reason: "Server does not advertise range support",
|
|
96630
|
+
supportsRanges: false,
|
|
96631
|
+
totalSize: Number.isFinite(totalSize) ? totalSize : fileSize
|
|
96632
|
+
};
|
|
96633
|
+
}
|
|
96634
|
+
return {
|
|
96635
|
+
reason: null,
|
|
96636
|
+
supportsRanges: true,
|
|
96637
|
+
totalSize: Number.isFinite(totalSize) ? totalSize : fileSize
|
|
96638
|
+
};
|
|
96639
|
+
}
|
|
96640
|
+
catch (error) {
|
|
96641
|
+
const parsed = asError(error);
|
|
96642
|
+
return {
|
|
96643
|
+
reason: `HEAD request failed: ${parsed.message}`,
|
|
96644
|
+
supportsRanges: false,
|
|
96645
|
+
totalSize: fileSize
|
|
96646
|
+
};
|
|
96647
|
+
}
|
|
96648
|
+
}
|
|
96649
|
+
async function downloadFileFull({ accessToken, filePath, fileSize, modelSlug, targetDirectory }) {
|
|
96650
|
+
const { finalPath, partialPath } = getDownloadPaths({ filePath, targetDirectory });
|
|
96651
|
+
let lastError = null;
|
|
96652
|
+
for (let attempt = 1; attempt <= DOWNLOAD_RETRY_ATTEMPTS_FULL; attempt++) {
|
|
96653
|
+
try {
|
|
96654
|
+
console.log("Downloading:", filePath, fileSize, `(attempt ${attempt})`);
|
|
96655
|
+
await mkdir(dirname(finalPath), { recursive: true });
|
|
96656
|
+
if (existsSync(partialPath)) {
|
|
96657
|
+
await unlink(partialPath);
|
|
96658
|
+
}
|
|
96659
|
+
const response = await downloadFile({
|
|
96660
|
+
accessToken,
|
|
96661
|
+
repo: modelSlug,
|
|
96662
|
+
path: filePath
|
|
96663
|
+
});
|
|
96664
|
+
if (!response) {
|
|
96665
|
+
throw new Error(`Requested file did not return a valid response: ${filePath}`);
|
|
96666
|
+
}
|
|
96667
|
+
const input = Readable.fromWeb(response.stream());
|
|
96668
|
+
const meter = watchStreamProgress(25 * 1024 * 1024); // 25 MiB intervals
|
|
96669
|
+
const output = createWriteStream(partialPath, { flags: "w" });
|
|
96670
|
+
let progressTimeout = null;
|
|
96671
|
+
let lastPercentage = "0.0";
|
|
96672
|
+
let lastProgressBytes = 0;
|
|
96673
|
+
meter.progress.on("progress", (totalBytes) => {
|
|
96674
|
+
lastProgressBytes = totalBytes;
|
|
96675
|
+
const percentComplete = ((totalBytes / fileSize) * 100).toFixed(1);
|
|
96676
|
+
if (lastPercentage !== percentComplete) {
|
|
96677
|
+
console.log(` => ${percentComplete}% (${totalBytes} / ${fileSize})`);
|
|
96678
|
+
}
|
|
96679
|
+
lastPercentage = percentComplete;
|
|
96680
|
+
clearTimeout(progressTimeout);
|
|
96681
|
+
progressTimeout = setTimeout(() => {
|
|
96682
|
+
input.destroy(new Error(`Timed out with no progress for ${DOWNLOAD_PROGRESS_TIMEOUT}ms while downloading ${filePath}. Last progress: ${lastPercentage}% (${lastProgressBytes} / ${fileSize})`));
|
|
96683
|
+
}, DOWNLOAD_PROGRESS_TIMEOUT);
|
|
96684
|
+
});
|
|
96685
|
+
try {
|
|
96686
|
+
await pipeline(input, meter, output);
|
|
96687
|
+
}
|
|
96688
|
+
finally {
|
|
96689
|
+
clearTimeout(progressTimeout);
|
|
96690
|
+
}
|
|
96691
|
+
await rename(partialPath, finalPath);
|
|
96692
|
+
return;
|
|
96693
|
+
}
|
|
96694
|
+
catch (error) {
|
|
96695
|
+
lastError = asError(error);
|
|
96696
|
+
if (attempt < DOWNLOAD_RETRY_ATTEMPTS_FULL) {
|
|
96697
|
+
console.warn(`Retrying full download (${attempt}/${DOWNLOAD_RETRY_ATTEMPTS_FULL}) for ${filePath} due to error: ${lastError.message}`);
|
|
96698
|
+
continue;
|
|
96699
|
+
}
|
|
96700
|
+
}
|
|
96701
|
+
}
|
|
96702
|
+
const errorMessage = `Failed downloading ${filePath} (${fileSize} bytes) after ${DOWNLOAD_RETRY_ATTEMPTS_FULL} attempts. Last error: ${lastError?.message ?? "Unknown error"}`;
|
|
96703
|
+
throw new Error(errorMessage);
|
|
96704
|
+
}
|
|
96705
|
+
async function downloadFileWithRange({ accessToken, filePath, fileSize, modelSlug, revision, targetDirectory }) {
|
|
96706
|
+
const { finalPath, partialPath } = getDownloadPaths({ filePath, targetDirectory });
|
|
96707
|
+
const url = getResolveURL({ filePath, modelSlug, revision });
|
|
96708
|
+
let lastError = null;
|
|
96709
|
+
for (let attempt = 1; attempt <= DOWNLOAD_RETRY_ATTEMPTS_RANGE; attempt++) {
|
|
96710
|
+
let startOffset = 0;
|
|
96711
|
+
if (existsSync(partialPath)) {
|
|
96712
|
+
const currentStats = await stat(partialPath);
|
|
96713
|
+
startOffset = currentStats.size;
|
|
96714
|
+
if (startOffset > fileSize) {
|
|
96715
|
+
await unlink(partialPath);
|
|
96716
|
+
startOffset = 0;
|
|
96717
|
+
}
|
|
96718
|
+
if (startOffset === fileSize) {
|
|
96719
|
+
await rename(partialPath, finalPath);
|
|
96720
|
+
return;
|
|
96721
|
+
}
|
|
96722
|
+
}
|
|
96723
|
+
try {
|
|
96724
|
+
const resumeLabel = startOffset > 0 ? `resuming at ${startOffset}` : "starting";
|
|
96725
|
+
console.log("Downloading:", filePath, fileSize, `(attempt ${attempt}, ${resumeLabel})`);
|
|
96726
|
+
await mkdir(dirname(finalPath), { recursive: true });
|
|
96727
|
+
const response = await undiciExports.fetch(url, {
|
|
96728
|
+
headers: {
|
|
96729
|
+
...getAuthHeaders(accessToken),
|
|
96730
|
+
Range: `bytes=${startOffset}-`
|
|
96731
|
+
}
|
|
96732
|
+
});
|
|
96733
|
+
if (!response.ok) {
|
|
96734
|
+
throw new Error(`Range request failed with status ${response.status} for ${filePath}`);
|
|
96735
|
+
}
|
|
96736
|
+
if (startOffset > 0 && response.status !== 206) {
|
|
96737
|
+
throw new RangeNotSupportedError(`Server did not honor range request (status ${response.status})`);
|
|
96738
|
+
}
|
|
96739
|
+
if (!response.body) {
|
|
96740
|
+
throw new Error(`Range request returned no body for ${filePath}`);
|
|
96741
|
+
}
|
|
96742
|
+
const input = Readable.fromWeb(response.body);
|
|
96743
|
+
const meter = watchStreamProgress(25 * 1024 * 1024); // 25 MiB intervals
|
|
96744
|
+
const output = createWriteStream(partialPath, { flags: "a" });
|
|
96745
|
+
let progressTimeout = null;
|
|
96746
|
+
let lastPercentage = ((startOffset / fileSize) * 100).toFixed(1);
|
|
96747
|
+
let lastProgressBytes = startOffset;
|
|
96748
|
+
meter.progress.on("progress", (totalBytes) => {
|
|
96749
|
+
lastProgressBytes = startOffset + totalBytes;
|
|
96750
|
+
const percentComplete = ((lastProgressBytes / fileSize) * 100).toFixed(1);
|
|
96751
|
+
if (lastPercentage !== percentComplete) {
|
|
96752
|
+
console.log(` => ${percentComplete}% (${lastProgressBytes} / ${fileSize})`);
|
|
96753
|
+
}
|
|
96754
|
+
lastPercentage = percentComplete;
|
|
96755
|
+
clearTimeout(progressTimeout);
|
|
96756
|
+
progressTimeout = setTimeout(() => {
|
|
96757
|
+
input.destroy(new Error(`Timed out with no progress for ${DOWNLOAD_PROGRESS_TIMEOUT}ms while downloading ${filePath}. Last progress: ${lastPercentage}% (${lastProgressBytes} / ${fileSize})`));
|
|
96758
|
+
}, DOWNLOAD_PROGRESS_TIMEOUT);
|
|
96759
|
+
});
|
|
96760
|
+
try {
|
|
96761
|
+
await pipeline(input, meter, output);
|
|
96762
|
+
}
|
|
96763
|
+
finally {
|
|
96764
|
+
clearTimeout(progressTimeout);
|
|
96765
|
+
}
|
|
96766
|
+
const updatedStats = await stat(partialPath);
|
|
96767
|
+
if (updatedStats.size < fileSize) {
|
|
96768
|
+
throw new Error(`Download incomplete for ${filePath}. Received ${updatedStats.size} / ${fileSize} bytes`);
|
|
96769
|
+
}
|
|
96770
|
+
await rename(partialPath, finalPath);
|
|
96771
|
+
return;
|
|
96772
|
+
}
|
|
96773
|
+
catch (error) {
|
|
96774
|
+
lastError = asError(error);
|
|
96775
|
+
if (attempt < DOWNLOAD_RETRY_ATTEMPTS_RANGE) {
|
|
96776
|
+
console.warn(`Retrying range download (${attempt}/${DOWNLOAD_RETRY_ATTEMPTS_RANGE}) for ${filePath} due to error: ${lastError.message}`);
|
|
96777
|
+
continue;
|
|
96778
|
+
}
|
|
96779
|
+
}
|
|
96780
|
+
}
|
|
96781
|
+
const errorMessage = `Failed downloading ${filePath} (${fileSize} bytes) after ${DOWNLOAD_RETRY_ATTEMPTS_RANGE} attempts. Last error: ${lastError?.message ?? "Unknown error"}`;
|
|
96782
|
+
throw new Error(errorMessage);
|
|
96783
|
+
}
|
|
96574
96784
|
|
|
96575
96785
|
const balanced = (a, b, str) => {
|
|
96576
96786
|
const ma = a instanceof RegExp ? maybeMatch(a, str) : a;
|