@huggingface/inference 3.1.2 → 3.1.4-test
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +29 -14
- package/dist/index.js +29 -14
- package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
- package/dist/src/providers/fal-ai.d.ts.map +1 -1
- package/dist/src/providers/replicate.d.ts.map +1 -1
- package/dist/src/providers/together.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/lib/makeRequestOptions.ts +7 -0
- package/src/providers/fal-ai.ts +4 -0
- package/src/providers/replicate.ts +10 -0
- package/src/providers/together.ts +2 -4
package/dist/index.cjs
CHANGED
|
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
6
|
var __export = (target, all) => {
|
|
7
|
-
for (var
|
|
8
|
-
__defProp(target,
|
|
7
|
+
for (var name2 in all)
|
|
8
|
+
__defProp(target, name2, { get: all[name2], enumerable: true });
|
|
9
9
|
};
|
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
|
11
11
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
@@ -117,6 +117,8 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
117
117
|
"Warlord-K/Sana-1024": "fal-ai/sana",
|
|
118
118
|
"fal/AuraFlow-v0.2": "fal-ai/aura-flow",
|
|
119
119
|
"stabilityai/stable-diffusion-3.5-large": "fal-ai/stable-diffusion-v35-large",
|
|
120
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "fal-ai/stable-diffusion-v35-large/turbo",
|
|
121
|
+
"stabilityai/stable-diffusion-3.5-medium": "fal-ai/stable-diffusion-v35-medium",
|
|
120
122
|
"Kwai-Kolors/Kolors": "fal-ai/kolors"
|
|
121
123
|
},
|
|
122
124
|
"automatic-speech-recognition": {
|
|
@@ -124,7 +126,9 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
124
126
|
},
|
|
125
127
|
"text-to-video": {
|
|
126
128
|
"genmo/mochi-1-preview": "fal-ai/mochi-v1",
|
|
127
|
-
"tencent/HunyuanVideo": "fal-ai/hunyuan-video"
|
|
129
|
+
"tencent/HunyuanVideo": "fal-ai/hunyuan-video",
|
|
130
|
+
"THUDM/CogVideoX-5b": "fal-ai/cogvideox-5b",
|
|
131
|
+
"Lightricks/LTX-Video": "fal-ai/ltx-video"
|
|
128
132
|
}
|
|
129
133
|
};
|
|
130
134
|
|
|
@@ -132,8 +136,15 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
132
136
|
var REPLICATE_API_BASE_URL = "https://api.replicate.com";
|
|
133
137
|
var REPLICATE_SUPPORTED_MODEL_IDS = {
|
|
134
138
|
"text-to-image": {
|
|
139
|
+
"black-forest-labs/FLUX.1-dev": "black-forest-labs/flux-dev",
|
|
135
140
|
"black-forest-labs/FLUX.1-schnell": "black-forest-labs/flux-schnell",
|
|
136
|
-
"ByteDance/
|
|
141
|
+
"ByteDance/Hyper-SD": "bytedance/hyper-flux-16step:382cf8959fb0f0d665b26e7e80b8d6dc3faaef1510f14ce017e8c732bb3d1eb7",
|
|
142
|
+
"ByteDance/SDXL-Lightning": "bytedance/sdxl-lightning-4step:5599ed30703defd1d160a25a63321b4dec97101d98b4674bcc56e41f62f35637",
|
|
143
|
+
"playgroundai/playground-v2.5-1024px-aesthetic": "playgroundai/playground-v2.5-1024px-aesthetic:a45f82a1382bed5c7aeb861dac7c7d191b0fdf74d8d57c4a0e6ed7d4d0bf7d24",
|
|
144
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "stability-ai/stable-diffusion-3.5-large-turbo",
|
|
145
|
+
"stabilityai/stable-diffusion-3.5-large": "stability-ai/stable-diffusion-3.5-large",
|
|
146
|
+
"stabilityai/stable-diffusion-3.5-medium": "stability-ai/stable-diffusion-3.5-medium",
|
|
147
|
+
"stabilityai/stable-diffusion-xl-base-1.0": "stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc"
|
|
137
148
|
},
|
|
138
149
|
"text-to-speech": {
|
|
139
150
|
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:39a59319327b27327fa3095149c5a746e7f2aee18c75055c3368237a6503cd26"
|
|
@@ -181,16 +192,14 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
181
192
|
"deepseek-ai/deepseek-llm-67b-chat": "deepseek-ai/deepseek-llm-67b-chat",
|
|
182
193
|
"google/gemma-2-9b-it": "google/gemma-2-9b-it",
|
|
183
194
|
"google/gemma-2b-it": "google/gemma-2-27b-it",
|
|
184
|
-
"llava-hf/llava-v1.6-mistral-7b-hf": "llava-hf/llava-v1.6-mistral-7b-hf",
|
|
185
195
|
"meta-llama/Llama-2-13b-chat-hf": "meta-llama/Llama-2-13b-chat-hf",
|
|
186
|
-
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
187
196
|
"meta-llama/Llama-2-7b-chat-hf": "meta-llama/Llama-2-7b-chat-hf",
|
|
188
197
|
"meta-llama/Llama-3.2-11B-Vision-Instruct": "meta-llama/Llama-Vision-Free",
|
|
189
198
|
"meta-llama/Llama-3.2-3B-Instruct": "meta-llama/Llama-3.2-3B-Instruct-Turbo",
|
|
190
199
|
"meta-llama/Llama-3.2-90B-Vision-Instruct": "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
|
|
191
200
|
"meta-llama/Llama-3.3-70B-Instruct": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
|
192
201
|
"meta-llama/Meta-Llama-3-70B-Instruct": "meta-llama/Llama-3-70b-chat-hf",
|
|
193
|
-
"meta-llama/Meta-Llama-3-8B-Instruct": "
|
|
202
|
+
"meta-llama/Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
|
|
194
203
|
"meta-llama/Meta-Llama-3.1-405B-Instruct": "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
|
|
195
204
|
"meta-llama/Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
|
196
205
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
@@ -209,7 +218,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
209
218
|
"scb10x/llama-3-typhoon-v1.5x-70b-instruct-awq": "scb10x/scb10x-llama3-typhoon-v1-5x-4f316"
|
|
210
219
|
},
|
|
211
220
|
"text-generation": {
|
|
212
|
-
"meta-llama/
|
|
221
|
+
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
213
222
|
"mistralai/Mixtral-8x7B-v0.1": "mistralai/Mixtral-8x7B-v0.1"
|
|
214
223
|
}
|
|
215
224
|
};
|
|
@@ -219,6 +228,10 @@ function isUrl(modelOrUrl) {
|
|
|
219
228
|
return /^http(s?):/.test(modelOrUrl) || modelOrUrl.startsWith("/");
|
|
220
229
|
}
|
|
221
230
|
|
|
231
|
+
// package.json
|
|
232
|
+
var name = "@huggingface/inference";
|
|
233
|
+
var version = "3.1.4-test";
|
|
234
|
+
|
|
222
235
|
// src/lib/makeRequestOptions.ts
|
|
223
236
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
224
237
|
var tasks = null;
|
|
@@ -259,6 +272,8 @@ async function makeRequestOptions(args, options) {
|
|
|
259
272
|
if (accessToken) {
|
|
260
273
|
headers["Authorization"] = provider === "fal-ai" && authMethod === "provider-key" ? `Key ${accessToken}` : `Bearer ${accessToken}`;
|
|
261
274
|
}
|
|
275
|
+
const ownUserAgent = `${name}/${version}`;
|
|
276
|
+
headers["User-Agent"] = [ownUserAgent, typeof navigator !== "undefined" ? navigator.userAgent : void 0].filter((x) => x !== void 0).join(" ");
|
|
262
277
|
const binary = "data" in args && !!args.data;
|
|
263
278
|
if (!binary) {
|
|
264
279
|
headers["Content-Type"] = "application/json";
|
|
@@ -284,8 +299,8 @@ async function makeRequestOptions(args, options) {
|
|
|
284
299
|
credentials = "include";
|
|
285
300
|
}
|
|
286
301
|
if (provider === "replicate") {
|
|
287
|
-
const
|
|
288
|
-
otherArgs = { input: otherArgs, version };
|
|
302
|
+
const version2 = model.includes(":") ? model.split(":")[1] : void 0;
|
|
303
|
+
otherArgs = { input: otherArgs, version: version2 };
|
|
289
304
|
}
|
|
290
305
|
const info = {
|
|
291
306
|
headers,
|
|
@@ -1299,8 +1314,8 @@ var HfInference = class {
|
|
|
1299
1314
|
constructor(accessToken = "", defaultOptions = {}) {
|
|
1300
1315
|
this.accessToken = accessToken;
|
|
1301
1316
|
this.defaultOptions = defaultOptions;
|
|
1302
|
-
for (const [
|
|
1303
|
-
Object.defineProperty(this,
|
|
1317
|
+
for (const [name2, fn] of Object.entries(tasks_exports)) {
|
|
1318
|
+
Object.defineProperty(this, name2, {
|
|
1304
1319
|
enumerable: false,
|
|
1305
1320
|
value: (params, options) => (
|
|
1306
1321
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
@@ -1320,8 +1335,8 @@ var HfInferenceEndpoint = class {
|
|
|
1320
1335
|
constructor(endpointUrl, accessToken = "", defaultOptions = {}) {
|
|
1321
1336
|
accessToken;
|
|
1322
1337
|
defaultOptions;
|
|
1323
|
-
for (const [
|
|
1324
|
-
Object.defineProperty(this,
|
|
1338
|
+
for (const [name2, fn] of Object.entries(tasks_exports)) {
|
|
1339
|
+
Object.defineProperty(this, name2, {
|
|
1325
1340
|
enumerable: false,
|
|
1326
1341
|
value: (params, options) => (
|
|
1327
1342
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
package/dist/index.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
var __defProp = Object.defineProperty;
|
|
2
2
|
var __export = (target, all) => {
|
|
3
|
-
for (var
|
|
4
|
-
__defProp(target,
|
|
3
|
+
for (var name2 in all)
|
|
4
|
+
__defProp(target, name2, { get: all[name2], enumerable: true });
|
|
5
5
|
};
|
|
6
6
|
|
|
7
7
|
// src/tasks/index.ts
|
|
@@ -58,6 +58,8 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
58
58
|
"Warlord-K/Sana-1024": "fal-ai/sana",
|
|
59
59
|
"fal/AuraFlow-v0.2": "fal-ai/aura-flow",
|
|
60
60
|
"stabilityai/stable-diffusion-3.5-large": "fal-ai/stable-diffusion-v35-large",
|
|
61
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "fal-ai/stable-diffusion-v35-large/turbo",
|
|
62
|
+
"stabilityai/stable-diffusion-3.5-medium": "fal-ai/stable-diffusion-v35-medium",
|
|
61
63
|
"Kwai-Kolors/Kolors": "fal-ai/kolors"
|
|
62
64
|
},
|
|
63
65
|
"automatic-speech-recognition": {
|
|
@@ -65,7 +67,9 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
65
67
|
},
|
|
66
68
|
"text-to-video": {
|
|
67
69
|
"genmo/mochi-1-preview": "fal-ai/mochi-v1",
|
|
68
|
-
"tencent/HunyuanVideo": "fal-ai/hunyuan-video"
|
|
70
|
+
"tencent/HunyuanVideo": "fal-ai/hunyuan-video",
|
|
71
|
+
"THUDM/CogVideoX-5b": "fal-ai/cogvideox-5b",
|
|
72
|
+
"Lightricks/LTX-Video": "fal-ai/ltx-video"
|
|
69
73
|
}
|
|
70
74
|
};
|
|
71
75
|
|
|
@@ -73,8 +77,15 @@ var FAL_AI_SUPPORTED_MODEL_IDS = {
|
|
|
73
77
|
var REPLICATE_API_BASE_URL = "https://api.replicate.com";
|
|
74
78
|
var REPLICATE_SUPPORTED_MODEL_IDS = {
|
|
75
79
|
"text-to-image": {
|
|
80
|
+
"black-forest-labs/FLUX.1-dev": "black-forest-labs/flux-dev",
|
|
76
81
|
"black-forest-labs/FLUX.1-schnell": "black-forest-labs/flux-schnell",
|
|
77
|
-
"ByteDance/
|
|
82
|
+
"ByteDance/Hyper-SD": "bytedance/hyper-flux-16step:382cf8959fb0f0d665b26e7e80b8d6dc3faaef1510f14ce017e8c732bb3d1eb7",
|
|
83
|
+
"ByteDance/SDXL-Lightning": "bytedance/sdxl-lightning-4step:5599ed30703defd1d160a25a63321b4dec97101d98b4674bcc56e41f62f35637",
|
|
84
|
+
"playgroundai/playground-v2.5-1024px-aesthetic": "playgroundai/playground-v2.5-1024px-aesthetic:a45f82a1382bed5c7aeb861dac7c7d191b0fdf74d8d57c4a0e6ed7d4d0bf7d24",
|
|
85
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "stability-ai/stable-diffusion-3.5-large-turbo",
|
|
86
|
+
"stabilityai/stable-diffusion-3.5-large": "stability-ai/stable-diffusion-3.5-large",
|
|
87
|
+
"stabilityai/stable-diffusion-3.5-medium": "stability-ai/stable-diffusion-3.5-medium",
|
|
88
|
+
"stabilityai/stable-diffusion-xl-base-1.0": "stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc"
|
|
78
89
|
},
|
|
79
90
|
"text-to-speech": {
|
|
80
91
|
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:39a59319327b27327fa3095149c5a746e7f2aee18c75055c3368237a6503cd26"
|
|
@@ -122,16 +133,14 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
122
133
|
"deepseek-ai/deepseek-llm-67b-chat": "deepseek-ai/deepseek-llm-67b-chat",
|
|
123
134
|
"google/gemma-2-9b-it": "google/gemma-2-9b-it",
|
|
124
135
|
"google/gemma-2b-it": "google/gemma-2-27b-it",
|
|
125
|
-
"llava-hf/llava-v1.6-mistral-7b-hf": "llava-hf/llava-v1.6-mistral-7b-hf",
|
|
126
136
|
"meta-llama/Llama-2-13b-chat-hf": "meta-llama/Llama-2-13b-chat-hf",
|
|
127
|
-
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
128
137
|
"meta-llama/Llama-2-7b-chat-hf": "meta-llama/Llama-2-7b-chat-hf",
|
|
129
138
|
"meta-llama/Llama-3.2-11B-Vision-Instruct": "meta-llama/Llama-Vision-Free",
|
|
130
139
|
"meta-llama/Llama-3.2-3B-Instruct": "meta-llama/Llama-3.2-3B-Instruct-Turbo",
|
|
131
140
|
"meta-llama/Llama-3.2-90B-Vision-Instruct": "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
|
|
132
141
|
"meta-llama/Llama-3.3-70B-Instruct": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
|
133
142
|
"meta-llama/Meta-Llama-3-70B-Instruct": "meta-llama/Llama-3-70b-chat-hf",
|
|
134
|
-
"meta-llama/Meta-Llama-3-8B-Instruct": "
|
|
143
|
+
"meta-llama/Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
|
|
135
144
|
"meta-llama/Meta-Llama-3.1-405B-Instruct": "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
|
|
136
145
|
"meta-llama/Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
|
137
146
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
@@ -150,7 +159,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
150
159
|
"scb10x/llama-3-typhoon-v1.5x-70b-instruct-awq": "scb10x/scb10x-llama3-typhoon-v1-5x-4f316"
|
|
151
160
|
},
|
|
152
161
|
"text-generation": {
|
|
153
|
-
"meta-llama/
|
|
162
|
+
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
154
163
|
"mistralai/Mixtral-8x7B-v0.1": "mistralai/Mixtral-8x7B-v0.1"
|
|
155
164
|
}
|
|
156
165
|
};
|
|
@@ -160,6 +169,10 @@ function isUrl(modelOrUrl) {
|
|
|
160
169
|
return /^http(s?):/.test(modelOrUrl) || modelOrUrl.startsWith("/");
|
|
161
170
|
}
|
|
162
171
|
|
|
172
|
+
// package.json
|
|
173
|
+
var name = "@huggingface/inference";
|
|
174
|
+
var version = "3.1.4-test";
|
|
175
|
+
|
|
163
176
|
// src/lib/makeRequestOptions.ts
|
|
164
177
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
165
178
|
var tasks = null;
|
|
@@ -200,6 +213,8 @@ async function makeRequestOptions(args, options) {
|
|
|
200
213
|
if (accessToken) {
|
|
201
214
|
headers["Authorization"] = provider === "fal-ai" && authMethod === "provider-key" ? `Key ${accessToken}` : `Bearer ${accessToken}`;
|
|
202
215
|
}
|
|
216
|
+
const ownUserAgent = `${name}/${version}`;
|
|
217
|
+
headers["User-Agent"] = [ownUserAgent, typeof navigator !== "undefined" ? navigator.userAgent : void 0].filter((x) => x !== void 0).join(" ");
|
|
203
218
|
const binary = "data" in args && !!args.data;
|
|
204
219
|
if (!binary) {
|
|
205
220
|
headers["Content-Type"] = "application/json";
|
|
@@ -225,8 +240,8 @@ async function makeRequestOptions(args, options) {
|
|
|
225
240
|
credentials = "include";
|
|
226
241
|
}
|
|
227
242
|
if (provider === "replicate") {
|
|
228
|
-
const
|
|
229
|
-
otherArgs = { input: otherArgs, version };
|
|
243
|
+
const version2 = model.includes(":") ? model.split(":")[1] : void 0;
|
|
244
|
+
otherArgs = { input: otherArgs, version: version2 };
|
|
230
245
|
}
|
|
231
246
|
const info = {
|
|
232
247
|
headers,
|
|
@@ -1240,8 +1255,8 @@ var HfInference = class {
|
|
|
1240
1255
|
constructor(accessToken = "", defaultOptions = {}) {
|
|
1241
1256
|
this.accessToken = accessToken;
|
|
1242
1257
|
this.defaultOptions = defaultOptions;
|
|
1243
|
-
for (const [
|
|
1244
|
-
Object.defineProperty(this,
|
|
1258
|
+
for (const [name2, fn] of Object.entries(tasks_exports)) {
|
|
1259
|
+
Object.defineProperty(this, name2, {
|
|
1245
1260
|
enumerable: false,
|
|
1246
1261
|
value: (params, options) => (
|
|
1247
1262
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
@@ -1261,8 +1276,8 @@ var HfInferenceEndpoint = class {
|
|
|
1261
1276
|
constructor(endpointUrl, accessToken = "", defaultOptions = {}) {
|
|
1262
1277
|
accessToken;
|
|
1263
1278
|
defaultOptions;
|
|
1264
|
-
for (const [
|
|
1265
|
-
Object.defineProperty(this,
|
|
1279
|
+
for (const [name2, fn] of Object.entries(tasks_exports)) {
|
|
1280
|
+
Object.defineProperty(this, name2, {
|
|
1266
1281
|
enumerable: false,
|
|
1267
1282
|
value: (params, options) => (
|
|
1268
1283
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAOA,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;
|
|
1
|
+
{"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAOA,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAYpE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,yFAAyF;IACzF,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,CAAC;IACnC,sCAAsC;IACtC,QAAQ,CAAC,EAAE,aAAa,CAAC;IACzB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAuH7C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,mBAAmB,oBAAoB,CAAC;AAErD,KAAK,OAAO,GAAG,MAAM,CAAC;AAEtB,eAAO,MAAM,0BAA0B,EAAE,eAAe,CAAC,OAAO,
|
|
1
|
+
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,mBAAmB,oBAAoB,CAAC;AAErD,KAAK,OAAO,GAAG,MAAM,CAAC;AAEtB,eAAO,MAAM,0BAA0B,EAAE,eAAe,CAAC,OAAO,CAwB/D,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,8BAA8B,CAAC;AAElE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,
|
|
1
|
+
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,8BAA8B,CAAC;AAElE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,CAsBtE,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,
|
|
1
|
+
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,CA6CpE,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/inference",
|
|
3
|
-
"version": "3.1.
|
|
3
|
+
"version": "3.1.4-test",
|
|
4
4
|
"packageManager": "pnpm@8.10.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Tim Mikeladze <tim.mikeladze@gmail.com>",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
},
|
|
40
40
|
"type": "module",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@huggingface/tasks": "^0.15.
|
|
42
|
+
"@huggingface/tasks": "^0.15.3"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "18.13.0"
|
|
@@ -7,6 +7,7 @@ import { TOGETHER_API_BASE_URL, TOGETHER_SUPPORTED_MODEL_IDS } from "../provider
|
|
|
7
7
|
import type { InferenceProvider } from "../types";
|
|
8
8
|
import type { InferenceTask, Options, RequestArgs } from "../types";
|
|
9
9
|
import { isUrl } from "./isUrl";
|
|
10
|
+
import { version as packageVersion, name as packageName } from "../../package.json";
|
|
10
11
|
|
|
11
12
|
const HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
12
13
|
|
|
@@ -89,6 +90,12 @@ export async function makeRequestOptions(
|
|
|
89
90
|
provider === "fal-ai" && authMethod === "provider-key" ? `Key ${accessToken}` : `Bearer ${accessToken}`;
|
|
90
91
|
}
|
|
91
92
|
|
|
93
|
+
// e.g. @huggingface/inference@3.1.3
|
|
94
|
+
const ownUserAgent = `${packageName}/${packageVersion}`;
|
|
95
|
+
headers["User-Agent"] = [ownUserAgent, typeof navigator !== "undefined" ? navigator.userAgent : undefined]
|
|
96
|
+
.filter((x) => x !== undefined)
|
|
97
|
+
.join(" ");
|
|
98
|
+
|
|
92
99
|
const binary = "data" in args && !!args.data;
|
|
93
100
|
|
|
94
101
|
if (!binary) {
|
package/src/providers/fal-ai.ts
CHANGED
|
@@ -15,6 +15,8 @@ export const FAL_AI_SUPPORTED_MODEL_IDS: ProviderMapping<FalAiId> = {
|
|
|
15
15
|
"Warlord-K/Sana-1024": "fal-ai/sana",
|
|
16
16
|
"fal/AuraFlow-v0.2": "fal-ai/aura-flow",
|
|
17
17
|
"stabilityai/stable-diffusion-3.5-large": "fal-ai/stable-diffusion-v35-large",
|
|
18
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "fal-ai/stable-diffusion-v35-large/turbo",
|
|
19
|
+
"stabilityai/stable-diffusion-3.5-medium": "fal-ai/stable-diffusion-v35-medium",
|
|
18
20
|
"Kwai-Kolors/Kolors": "fal-ai/kolors",
|
|
19
21
|
},
|
|
20
22
|
"automatic-speech-recognition": {
|
|
@@ -23,5 +25,7 @@ export const FAL_AI_SUPPORTED_MODEL_IDS: ProviderMapping<FalAiId> = {
|
|
|
23
25
|
"text-to-video": {
|
|
24
26
|
"genmo/mochi-1-preview": "fal-ai/mochi-v1",
|
|
25
27
|
"tencent/HunyuanVideo": "fal-ai/hunyuan-video",
|
|
28
|
+
"THUDM/CogVideoX-5b": "fal-ai/cogvideox-5b",
|
|
29
|
+
"Lightricks/LTX-Video": "fal-ai/ltx-video",
|
|
26
30
|
},
|
|
27
31
|
};
|
|
@@ -6,9 +6,19 @@ type ReplicateId = string;
|
|
|
6
6
|
|
|
7
7
|
export const REPLICATE_SUPPORTED_MODEL_IDS: ProviderMapping<ReplicateId> = {
|
|
8
8
|
"text-to-image": {
|
|
9
|
+
"black-forest-labs/FLUX.1-dev": "black-forest-labs/flux-dev",
|
|
9
10
|
"black-forest-labs/FLUX.1-schnell": "black-forest-labs/flux-schnell",
|
|
11
|
+
"ByteDance/Hyper-SD":
|
|
12
|
+
"bytedance/hyper-flux-16step:382cf8959fb0f0d665b26e7e80b8d6dc3faaef1510f14ce017e8c732bb3d1eb7",
|
|
10
13
|
"ByteDance/SDXL-Lightning":
|
|
11
14
|
"bytedance/sdxl-lightning-4step:5599ed30703defd1d160a25a63321b4dec97101d98b4674bcc56e41f62f35637",
|
|
15
|
+
"playgroundai/playground-v2.5-1024px-aesthetic":
|
|
16
|
+
"playgroundai/playground-v2.5-1024px-aesthetic:a45f82a1382bed5c7aeb861dac7c7d191b0fdf74d8d57c4a0e6ed7d4d0bf7d24",
|
|
17
|
+
"stabilityai/stable-diffusion-3.5-large-turbo": "stability-ai/stable-diffusion-3.5-large-turbo",
|
|
18
|
+
"stabilityai/stable-diffusion-3.5-large": "stability-ai/stable-diffusion-3.5-large",
|
|
19
|
+
"stabilityai/stable-diffusion-3.5-medium": "stability-ai/stable-diffusion-3.5-medium",
|
|
20
|
+
"stabilityai/stable-diffusion-xl-base-1.0":
|
|
21
|
+
"stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc",
|
|
12
22
|
},
|
|
13
23
|
"text-to-speech": {
|
|
14
24
|
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:39a59319327b27327fa3095149c5a746e7f2aee18c75055c3368237a6503cd26",
|
|
@@ -26,16 +26,14 @@ export const TOGETHER_SUPPORTED_MODEL_IDS: ProviderMapping<TogetherId> = {
|
|
|
26
26
|
"deepseek-ai/deepseek-llm-67b-chat": "deepseek-ai/deepseek-llm-67b-chat",
|
|
27
27
|
"google/gemma-2-9b-it": "google/gemma-2-9b-it",
|
|
28
28
|
"google/gemma-2b-it": "google/gemma-2-27b-it",
|
|
29
|
-
"llava-hf/llava-v1.6-mistral-7b-hf": "llava-hf/llava-v1.6-mistral-7b-hf",
|
|
30
29
|
"meta-llama/Llama-2-13b-chat-hf": "meta-llama/Llama-2-13b-chat-hf",
|
|
31
|
-
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
32
30
|
"meta-llama/Llama-2-7b-chat-hf": "meta-llama/Llama-2-7b-chat-hf",
|
|
33
31
|
"meta-llama/Llama-3.2-11B-Vision-Instruct": "meta-llama/Llama-Vision-Free",
|
|
34
32
|
"meta-llama/Llama-3.2-3B-Instruct": "meta-llama/Llama-3.2-3B-Instruct-Turbo",
|
|
35
33
|
"meta-llama/Llama-3.2-90B-Vision-Instruct": "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
|
|
36
34
|
"meta-llama/Llama-3.3-70B-Instruct": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
|
37
35
|
"meta-llama/Meta-Llama-3-70B-Instruct": "meta-llama/Llama-3-70b-chat-hf",
|
|
38
|
-
"meta-llama/Meta-Llama-3-8B-Instruct": "
|
|
36
|
+
"meta-llama/Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
|
|
39
37
|
"meta-llama/Meta-Llama-3.1-405B-Instruct": "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
|
|
40
38
|
"meta-llama/Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
|
41
39
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
@@ -54,7 +52,7 @@ export const TOGETHER_SUPPORTED_MODEL_IDS: ProviderMapping<TogetherId> = {
|
|
|
54
52
|
"scb10x/llama-3-typhoon-v1.5x-70b-instruct-awq": "scb10x/scb10x-llama3-typhoon-v1-5x-4f316",
|
|
55
53
|
},
|
|
56
54
|
"text-generation": {
|
|
57
|
-
"meta-llama/
|
|
55
|
+
"meta-llama/Llama-2-70b-hf": "meta-llama/Llama-2-70b-hf",
|
|
58
56
|
"mistralai/Mixtral-8x7B-v0.1": "mistralai/Mixtral-8x7B-v0.1",
|
|
59
57
|
},
|
|
60
58
|
};
|