@huggingface/inference 3.9.1 → 3.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2 -26
- package/dist/index.js +2 -26
- package/dist/src/lib/getInferenceProviderMapping.d.ts +2 -2
- package/dist/src/lib/getInferenceProviderMapping.d.ts.map +1 -1
- package/dist/src/providers/fal-ai.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/lib/getInferenceProviderMapping.ts +1 -17
- package/src/providers/fal-ai.ts +0 -10
package/dist/index.cjs
CHANGED
|
@@ -342,15 +342,7 @@ var FalAITextToImageTask = class extends FalAITask {
|
|
|
342
342
|
...omit(params.args, ["inputs", "parameters"]),
|
|
343
343
|
...params.args.parameters,
|
|
344
344
|
sync_mode: true,
|
|
345
|
-
prompt: params.args.inputs
|
|
346
|
-
...params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath ? {
|
|
347
|
-
loras: [
|
|
348
|
-
{
|
|
349
|
-
path: buildLoraPath(params.mapping.hfModelId, params.mapping.adapterWeightsPath),
|
|
350
|
-
scale: 1
|
|
351
|
-
}
|
|
352
|
-
]
|
|
353
|
-
} : void 0
|
|
345
|
+
prompt: params.args.inputs
|
|
354
346
|
};
|
|
355
347
|
if (params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath) {
|
|
356
348
|
payload.loras = [
|
|
@@ -1323,7 +1315,7 @@ function getProviderHelper(provider, task) {
|
|
|
1323
1315
|
|
|
1324
1316
|
// package.json
|
|
1325
1317
|
var name = "@huggingface/inference";
|
|
1326
|
-
var version = "3.9.
|
|
1318
|
+
var version = "3.9.2";
|
|
1327
1319
|
|
|
1328
1320
|
// src/providers/consts.ts
|
|
1329
1321
|
var HARDCODED_MODEL_INFERENCE_MAPPING = {
|
|
@@ -1388,22 +1380,6 @@ async function getInferenceProviderMapping(params, options) {
|
|
|
1388
1380
|
`Model ${params.modelId} is in staging mode for provider ${params.provider}. Meant for test purposes only.`
|
|
1389
1381
|
);
|
|
1390
1382
|
}
|
|
1391
|
-
if (providerMapping.adapter === "lora") {
|
|
1392
|
-
const treeResp = await (options?.fetch ?? fetch)(`${HF_HUB_URL}/api/models/${params.modelId}/tree/main`);
|
|
1393
|
-
if (!treeResp.ok) {
|
|
1394
|
-
throw new Error(`Unable to fetch the model tree for ${params.modelId}.`);
|
|
1395
|
-
}
|
|
1396
|
-
const tree = await treeResp.json();
|
|
1397
|
-
const adapterWeightsPath = tree.find(({ type, path }) => type === "file" && path.endsWith(".safetensors"))?.path;
|
|
1398
|
-
if (!adapterWeightsPath) {
|
|
1399
|
-
throw new Error(`No .safetensors file found in the model tree for ${params.modelId}.`);
|
|
1400
|
-
}
|
|
1401
|
-
return {
|
|
1402
|
-
...providerMapping,
|
|
1403
|
-
hfModelId: params.modelId,
|
|
1404
|
-
adapterWeightsPath
|
|
1405
|
-
};
|
|
1406
|
-
}
|
|
1407
1383
|
return { ...providerMapping, hfModelId: params.modelId };
|
|
1408
1384
|
}
|
|
1409
1385
|
return null;
|
package/dist/index.js
CHANGED
|
@@ -285,15 +285,7 @@ var FalAITextToImageTask = class extends FalAITask {
|
|
|
285
285
|
...omit(params.args, ["inputs", "parameters"]),
|
|
286
286
|
...params.args.parameters,
|
|
287
287
|
sync_mode: true,
|
|
288
|
-
prompt: params.args.inputs
|
|
289
|
-
...params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath ? {
|
|
290
|
-
loras: [
|
|
291
|
-
{
|
|
292
|
-
path: buildLoraPath(params.mapping.hfModelId, params.mapping.adapterWeightsPath),
|
|
293
|
-
scale: 1
|
|
294
|
-
}
|
|
295
|
-
]
|
|
296
|
-
} : void 0
|
|
288
|
+
prompt: params.args.inputs
|
|
297
289
|
};
|
|
298
290
|
if (params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath) {
|
|
299
291
|
payload.loras = [
|
|
@@ -1266,7 +1258,7 @@ function getProviderHelper(provider, task) {
|
|
|
1266
1258
|
|
|
1267
1259
|
// package.json
|
|
1268
1260
|
var name = "@huggingface/inference";
|
|
1269
|
-
var version = "3.9.
|
|
1261
|
+
var version = "3.9.2";
|
|
1270
1262
|
|
|
1271
1263
|
// src/providers/consts.ts
|
|
1272
1264
|
var HARDCODED_MODEL_INFERENCE_MAPPING = {
|
|
@@ -1331,22 +1323,6 @@ async function getInferenceProviderMapping(params, options) {
|
|
|
1331
1323
|
`Model ${params.modelId} is in staging mode for provider ${params.provider}. Meant for test purposes only.`
|
|
1332
1324
|
);
|
|
1333
1325
|
}
|
|
1334
|
-
if (providerMapping.adapter === "lora") {
|
|
1335
|
-
const treeResp = await (options?.fetch ?? fetch)(`${HF_HUB_URL}/api/models/${params.modelId}/tree/main`);
|
|
1336
|
-
if (!treeResp.ok) {
|
|
1337
|
-
throw new Error(`Unable to fetch the model tree for ${params.modelId}.`);
|
|
1338
|
-
}
|
|
1339
|
-
const tree = await treeResp.json();
|
|
1340
|
-
const adapterWeightsPath = tree.find(({ type, path }) => type === "file" && path.endsWith(".safetensors"))?.path;
|
|
1341
|
-
if (!adapterWeightsPath) {
|
|
1342
|
-
throw new Error(`No .safetensors file found in the model tree for ${params.modelId}.`);
|
|
1343
|
-
}
|
|
1344
|
-
return {
|
|
1345
|
-
...providerMapping,
|
|
1346
|
-
hfModelId: params.modelId,
|
|
1347
|
-
adapterWeightsPath
|
|
1348
|
-
};
|
|
1349
|
-
}
|
|
1350
1326
|
return { ...providerMapping, hfModelId: params.modelId };
|
|
1351
1327
|
}
|
|
1352
1328
|
return null;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { WidgetType } from "@huggingface/tasks";
|
|
2
2
|
import type { InferenceProvider, ModelId } from "../types";
|
|
3
|
-
export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "featherless-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId"
|
|
4
|
-
export type InferenceProviderMapping = Partial<Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId"
|
|
3
|
+
export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "featherless-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId">>>>;
|
|
4
|
+
export type InferenceProviderMapping = Partial<Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId">>>;
|
|
5
5
|
export interface InferenceProviderModelMapping {
|
|
6
6
|
adapter?: string;
|
|
7
7
|
adapterWeightsPath?: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,
|
|
1
|
+
{"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,0SAA+C,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG,OAAO,CAC7C,MAAM,CAAC,iBAAiB,EAAE,IAAI,CAAC,6BAA6B,EAAE,WAAW,CAAC,CAAC,CAC3E,CAAC;AAEF,MAAM,WAAW,6BAA6B;IAC7C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,IAAI,EAAE,UAAU,CAAC;CACjB;AAED,wBAAsB,2BAA2B,CAChD,MAAM,EAAE;IACP,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,iBAAiB,CAAC;IAC5B,IAAI,EAAE,UAAU,CAAC;CACjB,EACD,OAAO,EAAE;IACR,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;CACtE,GACC,OAAO,CAAC,6BAA6B,GAAG,IAAI,CAAC,CA+C/C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,gCAAgC,EAAE,MAAM,oBAAoB,CAAC;AAG3E,OAAO,KAAK,EAAE,UAAU,EAAE,YAAY,EAAW,SAAS,EAAE,MAAM,UAAU,CAAC;AAG7E,OAAO,EACN,KAAK,oCAAoC,EACzC,kBAAkB,EAClB,KAAK,qBAAqB,EAC1B,KAAK,qBAAqB,EAC1B,MAAM,kBAAkB,CAAC;AAG1B,MAAM,WAAW,gBAAgB;IAChC,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,MAAM,CAAC;CACrB;AAED,UAAU,sBAAsB;IAC/B,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;KACZ,CAAC,CAAC;CACH;AAYD,eAAO,MAAM,2BAA2B,UAA0D,CAAC;AAEnG,uBAAe,SAAU,SAAQ,kBAAkB;gBACtC,GAAG,CAAC,EAAE,MAAM;IAIxB,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAG3D,SAAS,CAAC,MAAM,EAAE,SAAS,GAAG,MAAM;IAG3B,cAAc,CAAC,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;CAUtF;AAMD,qBAAa,oBAAqB,SAAQ,SAAU,YAAW,qBAAqB;IAC1E,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;
|
|
1
|
+
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,gCAAgC,EAAE,MAAM,oBAAoB,CAAC;AAG3E,OAAO,KAAK,EAAE,UAAU,EAAE,YAAY,EAAW,SAAS,EAAE,MAAM,UAAU,CAAC;AAG7E,OAAO,EACN,KAAK,oCAAoC,EACzC,kBAAkB,EAClB,KAAK,qBAAqB,EAC1B,KAAK,qBAAqB,EAC1B,MAAM,kBAAkB,CAAC;AAG1B,MAAM,WAAW,gBAAgB;IAChC,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,MAAM,CAAC;CACrB;AAED,UAAU,sBAAsB;IAC/B,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;KACZ,CAAC,CAAC;CACH;AAYD,eAAO,MAAM,2BAA2B,UAA0D,CAAC;AAEnG,uBAAe,SAAU,SAAQ,kBAAkB;gBACtC,GAAG,CAAC,EAAE,MAAM;IAIxB,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAG3D,SAAS,CAAC,MAAM,EAAE,SAAS,GAAG,MAAM;IAG3B,cAAc,CAAC,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;CAUtF;AAMD,qBAAa,oBAAqB,SAAQ,SAAU,YAAW,qBAAqB;IAC1E,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAuBrD,WAAW,CAAC,QAAQ,EAAE,sBAAsB,EAAE,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;CAkBjH;AAED,qBAAa,oBAAqB,SAAQ,SAAU,YAAW,qBAAqB;;IAI1E,SAAS,CAAC,MAAM,EAAE,SAAS,GAAG,MAAM;IAMpC,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAQrD,WAAW,CACzB,QAAQ,EAAE,gBAAgB,EAC1B,GAAG,CAAC,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAC9B,OAAO,CAAC,IAAI,CAAC;CA8DhB;AAED,qBAAa,mCAAoC,SAAQ,SAAU,YAAW,oCAAoC;IACxG,cAAc,CAAC,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAKvE,WAAW,CAAC,QAAQ,EAAE,OAAO,GAAG,OAAO,CAAC,gCAAgC,CAAC;CASxF;AAED,qBAAa,qBAAsB,SAAQ,SAAS;IAC1C,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAQrD,WAAW,CAAC,QAAQ,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB5D"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/inference",
|
|
3
|
-
"version": "3.9.
|
|
3
|
+
"version": "3.9.2",
|
|
4
4
|
"packageManager": "pnpm@8.10.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Hugging Face and Tim Mikeladze <tim.mikeladze@gmail.com>",
|
|
@@ -40,7 +40,7 @@
|
|
|
40
40
|
},
|
|
41
41
|
"type": "module",
|
|
42
42
|
"dependencies": {
|
|
43
|
-
"@huggingface/tasks": "^0.18.
|
|
43
|
+
"@huggingface/tasks": "^0.18.11",
|
|
44
44
|
"@huggingface/jinja": "^0.3.4"
|
|
45
45
|
},
|
|
46
46
|
"devDependencies": {
|
|
@@ -8,7 +8,7 @@ import { typedInclude } from "../utils/typedInclude";
|
|
|
8
8
|
export const inferenceProviderMappingCache = new Map<ModelId, InferenceProviderMapping>();
|
|
9
9
|
|
|
10
10
|
export type InferenceProviderMapping = Partial<
|
|
11
|
-
Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId"
|
|
11
|
+
Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId">>
|
|
12
12
|
>;
|
|
13
13
|
|
|
14
14
|
export interface InferenceProviderModelMapping {
|
|
@@ -74,22 +74,6 @@ export async function getInferenceProviderMapping(
|
|
|
74
74
|
`Model ${params.modelId} is in staging mode for provider ${params.provider}. Meant for test purposes only.`
|
|
75
75
|
);
|
|
76
76
|
}
|
|
77
|
-
if (providerMapping.adapter === "lora") {
|
|
78
|
-
const treeResp = await (options?.fetch ?? fetch)(`${HF_HUB_URL}/api/models/${params.modelId}/tree/main`);
|
|
79
|
-
if (!treeResp.ok) {
|
|
80
|
-
throw new Error(`Unable to fetch the model tree for ${params.modelId}.`);
|
|
81
|
-
}
|
|
82
|
-
const tree: Array<{ type: "file" | "directory"; path: string }> = await treeResp.json();
|
|
83
|
-
const adapterWeightsPath = tree.find(({ type, path }) => type === "file" && path.endsWith(".safetensors"))?.path;
|
|
84
|
-
if (!adapterWeightsPath) {
|
|
85
|
-
throw new Error(`No .safetensors file found in the model tree for ${params.modelId}.`);
|
|
86
|
-
}
|
|
87
|
-
return {
|
|
88
|
-
...providerMapping,
|
|
89
|
-
hfModelId: params.modelId,
|
|
90
|
-
adapterWeightsPath,
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
77
|
return { ...providerMapping, hfModelId: params.modelId };
|
|
94
78
|
}
|
|
95
79
|
return null;
|
package/src/providers/fal-ai.ts
CHANGED
|
@@ -86,16 +86,6 @@ export class FalAITextToImageTask extends FalAITask implements TextToImageTaskHe
|
|
|
86
86
|
...(params.args.parameters as Record<string, unknown>),
|
|
87
87
|
sync_mode: true,
|
|
88
88
|
prompt: params.args.inputs,
|
|
89
|
-
...(params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath
|
|
90
|
-
? {
|
|
91
|
-
loras: [
|
|
92
|
-
{
|
|
93
|
-
path: buildLoraPath(params.mapping.hfModelId, params.mapping.adapterWeightsPath),
|
|
94
|
-
scale: 1,
|
|
95
|
-
},
|
|
96
|
-
],
|
|
97
|
-
}
|
|
98
|
-
: undefined),
|
|
99
89
|
};
|
|
100
90
|
|
|
101
91
|
if (params.mapping?.adapter === "lora" && params.mapping.adapterWeightsPath) {
|