@infersec/conduit 1.5.1 → 1.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +4 -4
- package/dist/index.js +4 -4
- package/dist/{start-Cs0zWiJW.js → start-C7RTzXTv.js} +63 -34
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
3
|
-
import { dirname } from 'path';
|
|
2
|
+
import { fileURLToPath as __fileURLToPath } from 'node:url';
|
|
3
|
+
import { dirname as __pathDirname } from 'node:path';
|
|
4
4
|
const __filename = __fileURLToPath(import.meta.url);
|
|
5
|
-
const __dirname =
|
|
5
|
+
const __dirname = __pathDirname(__filename);
|
|
6
6
|
|
|
7
7
|
import { parseArgs } from 'node:util';
|
|
8
8
|
import 'node:crypto';
|
|
9
|
-
import { a as asError, s as startInferenceAgent } from './start-
|
|
9
|
+
import { a as asError, s as startInferenceAgent } from './start-C7RTzXTv.js';
|
|
10
10
|
import 'argon2';
|
|
11
11
|
import 'node:child_process';
|
|
12
12
|
import 'node:stream';
|
package/dist/index.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
3
|
-
import { dirname } from 'path';
|
|
2
|
+
import { fileURLToPath as __fileURLToPath } from 'node:url';
|
|
3
|
+
import { dirname as __pathDirname } from 'node:path';
|
|
4
4
|
const __filename = __fileURLToPath(import.meta.url);
|
|
5
|
-
const __dirname =
|
|
5
|
+
const __dirname = __pathDirname(__filename);
|
|
6
6
|
|
|
7
7
|
import 'node:crypto';
|
|
8
|
-
import { s as startInferenceAgent, a as asError } from './start-
|
|
8
|
+
import { s as startInferenceAgent, a as asError } from './start-C7RTzXTv.js';
|
|
9
9
|
import 'argon2';
|
|
10
10
|
import 'node:child_process';
|
|
11
11
|
import 'node:stream';
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
3
|
-
import { dirname } from 'path';
|
|
2
|
+
import { fileURLToPath as __fileURLToPath } from 'node:url';
|
|
3
|
+
import { dirname as __pathDirname } from 'node:path';
|
|
4
4
|
const __filename = __fileURLToPath(import.meta.url);
|
|
5
|
-
const __dirname =
|
|
5
|
+
const __dirname = __pathDirname(__filename);
|
|
6
6
|
|
|
7
7
|
import require$$0$5 from 'os';
|
|
8
8
|
import require$$1$2, { realpathSync as realpathSync$1, readlinkSync, readdirSync, readdir as readdir$1, lstatSync } from 'fs';
|
|
@@ -31,7 +31,7 @@ import require$$1$5, { fileURLToPath } from 'node:url';
|
|
|
31
31
|
import require$$1$6 from 'node:async_hooks';
|
|
32
32
|
import require$$1$7 from 'node:console';
|
|
33
33
|
import require$$0$b, { realpath, readlink, readdir, lstat, mkdir, readFile, writeFile } from 'node:fs/promises';
|
|
34
|
-
import path$1, { win32, posix, join } from 'node:path';
|
|
34
|
+
import path$1, { win32, posix, dirname, join } from 'node:path';
|
|
35
35
|
import require$$1$8 from 'node:dns';
|
|
36
36
|
import require$$2$4 from 'node:sqlite';
|
|
37
37
|
import require$$0$c from 'path';
|
|
@@ -94890,7 +94890,8 @@ const ModelDownloadProgressSchema = object({
|
|
|
94890
94890
|
completedFiles: array(string().min(1))
|
|
94891
94891
|
});
|
|
94892
94892
|
|
|
94893
|
-
const DOWNLOAD_PROGRESS_TIMEOUT =
|
|
94893
|
+
const DOWNLOAD_PROGRESS_TIMEOUT = 30000;
|
|
94894
|
+
const DOWNLOAD_RETRY_ATTEMPTS = 3;
|
|
94894
94895
|
async function downloadModelViaHuggingFace({ format, huggingFaceToken, modelSlug: rawModelSlug, progressFilePath, targetDirectory }) {
|
|
94895
94896
|
// Sanitise model ID
|
|
94896
94897
|
const [modelSlug, variant = null] = rawModelSlug.split(":");
|
|
@@ -94911,9 +94912,13 @@ async function downloadModelViaHuggingFace({ format, huggingFaceToken, modelSlug
|
|
|
94911
94912
|
// Get all files in model
|
|
94912
94913
|
for await (const file of listFiles({
|
|
94913
94914
|
accessToken,
|
|
94915
|
+
recursive: true,
|
|
94914
94916
|
repo: modelSlug
|
|
94915
94917
|
})) {
|
|
94916
94918
|
const filePath = file.path.toLowerCase();
|
|
94919
|
+
if (format === "gguf" && filePath.endsWith(".gguf") === false) {
|
|
94920
|
+
continue;
|
|
94921
|
+
}
|
|
94917
94922
|
if (variant &&
|
|
94918
94923
|
filePath.endsWith(".gguf") &&
|
|
94919
94924
|
filePath.includes(variant.toLowerCase()) === false) {
|
|
@@ -94924,36 +94929,60 @@ async function downloadModelViaHuggingFace({ format, huggingFaceToken, modelSlug
|
|
|
94924
94929
|
console.log("Skipping due to already having completed file:", file.path, file.size);
|
|
94925
94930
|
continue;
|
|
94926
94931
|
}
|
|
94927
|
-
|
|
94928
|
-
|
|
94929
|
-
|
|
94930
|
-
|
|
94931
|
-
|
|
94932
|
-
|
|
94933
|
-
|
|
94934
|
-
|
|
94935
|
-
|
|
94936
|
-
|
|
94937
|
-
|
|
94938
|
-
|
|
94939
|
-
|
|
94940
|
-
|
|
94941
|
-
|
|
94942
|
-
|
|
94943
|
-
|
|
94944
|
-
|
|
94945
|
-
|
|
94932
|
+
let downloaded = false;
|
|
94933
|
+
let lastError = null;
|
|
94934
|
+
for (let attempt = 1; attempt <= DOWNLOAD_RETRY_ATTEMPTS; attempt++) {
|
|
94935
|
+
try {
|
|
94936
|
+
console.log("Downloading:", file.path, file.size, `(attempt ${attempt})`);
|
|
94937
|
+
await mkdir(dirname(join(targetDirectory, file.path)), { recursive: true });
|
|
94938
|
+
const response = await downloadFile({
|
|
94939
|
+
accessToken,
|
|
94940
|
+
repo: modelSlug,
|
|
94941
|
+
path: file.path
|
|
94942
|
+
// path: join(targetDirectory, file.path)
|
|
94943
|
+
});
|
|
94944
|
+
if (!response) {
|
|
94945
|
+
throw new Error(`Requested file did not return a valid response: ${file.path}`);
|
|
94946
|
+
}
|
|
94947
|
+
const input = Readable.fromWeb(response.stream());
|
|
94948
|
+
const meter = watchStreamProgress(25 * 1024 * 1024); // 25 MiB intervals
|
|
94949
|
+
const output = createWriteStream(join(targetDirectory, file.path));
|
|
94950
|
+
let progressTimeout = null;
|
|
94951
|
+
let lastPercentage = "0.0";
|
|
94952
|
+
let lastProgressBytes = 0;
|
|
94953
|
+
meter.progress.on("progress", (totalBytes) => {
|
|
94954
|
+
lastProgressBytes = totalBytes;
|
|
94955
|
+
const percentComplete = ((totalBytes / file.size) * 100).toFixed(1);
|
|
94956
|
+
if (lastPercentage !== percentComplete) {
|
|
94957
|
+
console.log(` => ${percentComplete}% (${totalBytes} / ${file.size})`);
|
|
94958
|
+
}
|
|
94959
|
+
lastPercentage = percentComplete;
|
|
94960
|
+
clearTimeout(progressTimeout);
|
|
94961
|
+
progressTimeout = setTimeout(() => {
|
|
94962
|
+
input.destroy(new Error(`Timed out with no progress for ${DOWNLOAD_PROGRESS_TIMEOUT}ms while downloading ${file.path}. Last progress: ${lastPercentage}% (${lastProgressBytes} / ${file.size})`));
|
|
94963
|
+
}, DOWNLOAD_PROGRESS_TIMEOUT);
|
|
94964
|
+
});
|
|
94965
|
+
try {
|
|
94966
|
+
await pipeline(input, meter, output);
|
|
94967
|
+
}
|
|
94968
|
+
finally {
|
|
94969
|
+
clearTimeout(progressTimeout);
|
|
94970
|
+
}
|
|
94971
|
+
downloaded = true;
|
|
94972
|
+
break;
|
|
94946
94973
|
}
|
|
94947
|
-
|
|
94948
|
-
|
|
94949
|
-
|
|
94950
|
-
|
|
94951
|
-
|
|
94952
|
-
|
|
94953
|
-
|
|
94954
|
-
|
|
94955
|
-
|
|
94956
|
-
|
|
94974
|
+
catch (error) {
|
|
94975
|
+
lastError = asError(error);
|
|
94976
|
+
if (attempt < DOWNLOAD_RETRY_ATTEMPTS) {
|
|
94977
|
+
console.warn(`Retrying download (${attempt}/${DOWNLOAD_RETRY_ATTEMPTS}) for ${file.path} due to error: ${lastError.message}`);
|
|
94978
|
+
continue;
|
|
94979
|
+
}
|
|
94980
|
+
}
|
|
94981
|
+
}
|
|
94982
|
+
if (!downloaded) {
|
|
94983
|
+
const errorMessage = `Failed downloading ${file.path} (${file.size} bytes) after ${DOWNLOAD_RETRY_ATTEMPTS} attempts. Last error: ${lastError?.message ?? "Unknown error"}`;
|
|
94984
|
+
throw new Error(errorMessage);
|
|
94985
|
+
}
|
|
94957
94986
|
// Update progress
|
|
94958
94987
|
progress.completedFiles.push(file.path);
|
|
94959
94988
|
await writeFile(progressFilePath, JSON.stringify(progress, undefined, 4));
|