@huggingface/inference 3.6.0 → 3.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +240 -71
- package/dist/index.js +240 -60
- package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
- package/dist/src/providers/black-forest-labs.d.ts.map +1 -1
- package/dist/src/providers/cerebras.d.ts.map +1 -1
- package/dist/src/providers/cohere.d.ts.map +1 -1
- package/dist/src/providers/fal-ai.d.ts +6 -16
- package/dist/src/providers/fal-ai.d.ts.map +1 -1
- package/dist/src/providers/fireworks-ai.d.ts.map +1 -1
- package/dist/src/providers/hf-inference.d.ts.map +1 -1
- package/dist/src/providers/hyperbolic.d.ts.map +1 -1
- package/dist/src/providers/nebius.d.ts.map +1 -1
- package/dist/src/providers/novita.d.ts.map +1 -1
- package/dist/src/providers/openai.d.ts.map +1 -1
- package/dist/src/providers/replicate.d.ts.map +1 -1
- package/dist/src/providers/sambanova.d.ts.map +1 -1
- package/dist/src/providers/together.d.ts.map +1 -1
- package/dist/src/snippets/getInferenceSnippets.d.ts.map +1 -1
- package/dist/src/snippets/templates.exported.d.ts +2 -0
- package/dist/src/snippets/templates.exported.d.ts.map +1 -0
- package/dist/src/tasks/cv/textToVideo.d.ts.map +1 -1
- package/dist/src/types.d.ts +4 -2
- package/dist/src/types.d.ts.map +1 -1
- package/dist/test/InferenceClient.spec.d.ts.map +1 -1
- package/package.json +10 -15
- package/src/lib/makeRequestOptions.ts +3 -1
- package/src/providers/black-forest-labs.ts +6 -2
- package/src/providers/cerebras.ts +6 -2
- package/src/providers/cohere.ts +6 -2
- package/src/providers/fal-ai.ts +85 -3
- package/src/providers/fireworks-ai.ts +6 -2
- package/src/providers/hf-inference.ts +6 -2
- package/src/providers/hyperbolic.ts +6 -2
- package/src/providers/nebius.ts +6 -2
- package/src/providers/novita.ts +5 -2
- package/src/providers/openai.ts +6 -2
- package/src/providers/replicate.ts +6 -2
- package/src/providers/sambanova.ts +6 -2
- package/src/providers/together.ts +6 -2
- package/src/snippets/getInferenceSnippets.ts +6 -24
- package/src/snippets/templates.exported.ts +72 -0
- package/src/tasks/cv/textToVideo.ts +5 -21
- package/src/types.ts +5 -2
- package/dist/browser/index.cjs +0 -1652
- package/dist/browser/index.js +0 -1652
- package/src/snippets/templates/js/fetch/basic.jinja +0 -19
- package/src/snippets/templates/js/fetch/basicAudio.jinja +0 -19
- package/src/snippets/templates/js/fetch/basicImage.jinja +0 -19
- package/src/snippets/templates/js/fetch/textToAudio.jinja +0 -41
- package/src/snippets/templates/js/fetch/textToImage.jinja +0 -19
- package/src/snippets/templates/js/fetch/zeroShotClassification.jinja +0 -22
- package/src/snippets/templates/js/huggingface.js/basic.jinja +0 -11
- package/src/snippets/templates/js/huggingface.js/basicAudio.jinja +0 -13
- package/src/snippets/templates/js/huggingface.js/basicImage.jinja +0 -13
- package/src/snippets/templates/js/huggingface.js/conversational.jinja +0 -11
- package/src/snippets/templates/js/huggingface.js/conversationalStream.jinja +0 -19
- package/src/snippets/templates/js/huggingface.js/textToImage.jinja +0 -11
- package/src/snippets/templates/js/huggingface.js/textToVideo.jinja +0 -10
- package/src/snippets/templates/js/openai/conversational.jinja +0 -13
- package/src/snippets/templates/js/openai/conversationalStream.jinja +0 -22
- package/src/snippets/templates/python/fal_client/textToImage.jinja +0 -11
- package/src/snippets/templates/python/huggingface_hub/basic.jinja +0 -4
- package/src/snippets/templates/python/huggingface_hub/basicAudio.jinja +0 -1
- package/src/snippets/templates/python/huggingface_hub/basicImage.jinja +0 -1
- package/src/snippets/templates/python/huggingface_hub/conversational.jinja +0 -6
- package/src/snippets/templates/python/huggingface_hub/conversationalStream.jinja +0 -8
- package/src/snippets/templates/python/huggingface_hub/documentQuestionAnswering.jinja +0 -5
- package/src/snippets/templates/python/huggingface_hub/imageToImage.jinja +0 -6
- package/src/snippets/templates/python/huggingface_hub/importInferenceClient.jinja +0 -6
- package/src/snippets/templates/python/huggingface_hub/textToImage.jinja +0 -5
- package/src/snippets/templates/python/huggingface_hub/textToVideo.jinja +0 -4
- package/src/snippets/templates/python/openai/conversational.jinja +0 -13
- package/src/snippets/templates/python/openai/conversationalStream.jinja +0 -15
- package/src/snippets/templates/python/requests/basic.jinja +0 -7
- package/src/snippets/templates/python/requests/basicAudio.jinja +0 -7
- package/src/snippets/templates/python/requests/basicImage.jinja +0 -7
- package/src/snippets/templates/python/requests/conversational.jinja +0 -9
- package/src/snippets/templates/python/requests/conversationalStream.jinja +0 -16
- package/src/snippets/templates/python/requests/documentQuestionAnswering.jinja +0 -13
- package/src/snippets/templates/python/requests/imageToImage.jinja +0 -15
- package/src/snippets/templates/python/requests/importRequests.jinja +0 -10
- package/src/snippets/templates/python/requests/tabular.jinja +0 -9
- package/src/snippets/templates/python/requests/textToAudio.jinja +0 -23
- package/src/snippets/templates/python/requests/textToImage.jinja +0 -14
- package/src/snippets/templates/python/requests/zeroShotClassification.jinja +0 -8
- package/src/snippets/templates/python/requests/zeroShotImageClassification.jinja +0 -14
- package/src/snippets/templates/sh/curl/basic.jinja +0 -7
- package/src/snippets/templates/sh/curl/basicAudio.jinja +0 -5
- package/src/snippets/templates/sh/curl/basicImage.jinja +0 -5
- package/src/snippets/templates/sh/curl/conversational.jinja +0 -7
- package/src/snippets/templates/sh/curl/conversationalStream.jinja +0 -7
- package/src/snippets/templates/sh/curl/zeroShotClassification.jinja +0 -5
package/dist/index.js
CHANGED
|
@@ -47,6 +47,9 @@ var HF_ROUTER_URL = "https://router.huggingface.co";
|
|
|
47
47
|
|
|
48
48
|
// src/providers/black-forest-labs.ts
|
|
49
49
|
var BLACK_FOREST_LABS_AI_API_BASE_URL = "https://api.us1.bfl.ai";
|
|
50
|
+
var makeBaseUrl = () => {
|
|
51
|
+
return BLACK_FOREST_LABS_AI_API_BASE_URL;
|
|
52
|
+
};
|
|
50
53
|
var makeBody = (params) => {
|
|
51
54
|
return params.args;
|
|
52
55
|
};
|
|
@@ -61,7 +64,7 @@ var makeUrl = (params) => {
|
|
|
61
64
|
return `${params.baseUrl}/v1/${params.model}`;
|
|
62
65
|
};
|
|
63
66
|
var BLACK_FOREST_LABS_CONFIG = {
|
|
64
|
-
|
|
67
|
+
makeBaseUrl,
|
|
65
68
|
makeBody,
|
|
66
69
|
makeHeaders,
|
|
67
70
|
makeUrl
|
|
@@ -69,6 +72,9 @@ var BLACK_FOREST_LABS_CONFIG = {
|
|
|
69
72
|
|
|
70
73
|
// src/providers/cerebras.ts
|
|
71
74
|
var CEREBRAS_API_BASE_URL = "https://api.cerebras.ai";
|
|
75
|
+
var makeBaseUrl2 = () => {
|
|
76
|
+
return CEREBRAS_API_BASE_URL;
|
|
77
|
+
};
|
|
72
78
|
var makeBody2 = (params) => {
|
|
73
79
|
return {
|
|
74
80
|
...params.args,
|
|
@@ -82,7 +88,7 @@ var makeUrl2 = (params) => {
|
|
|
82
88
|
return `${params.baseUrl}/v1/chat/completions`;
|
|
83
89
|
};
|
|
84
90
|
var CEREBRAS_CONFIG = {
|
|
85
|
-
|
|
91
|
+
makeBaseUrl: makeBaseUrl2,
|
|
86
92
|
makeBody: makeBody2,
|
|
87
93
|
makeHeaders: makeHeaders2,
|
|
88
94
|
makeUrl: makeUrl2
|
|
@@ -90,6 +96,9 @@ var CEREBRAS_CONFIG = {
|
|
|
90
96
|
|
|
91
97
|
// src/providers/cohere.ts
|
|
92
98
|
var COHERE_API_BASE_URL = "https://api.cohere.com";
|
|
99
|
+
var makeBaseUrl3 = () => {
|
|
100
|
+
return COHERE_API_BASE_URL;
|
|
101
|
+
};
|
|
93
102
|
var makeBody3 = (params) => {
|
|
94
103
|
return {
|
|
95
104
|
...params.args,
|
|
@@ -103,14 +112,40 @@ var makeUrl3 = (params) => {
|
|
|
103
112
|
return `${params.baseUrl}/compatibility/v1/chat/completions`;
|
|
104
113
|
};
|
|
105
114
|
var COHERE_CONFIG = {
|
|
106
|
-
|
|
115
|
+
makeBaseUrl: makeBaseUrl3,
|
|
107
116
|
makeBody: makeBody3,
|
|
108
117
|
makeHeaders: makeHeaders3,
|
|
109
118
|
makeUrl: makeUrl3
|
|
110
119
|
};
|
|
111
120
|
|
|
121
|
+
// src/lib/InferenceOutputError.ts
|
|
122
|
+
var InferenceOutputError = class extends TypeError {
|
|
123
|
+
constructor(message) {
|
|
124
|
+
super(
|
|
125
|
+
`Invalid inference output: ${message}. Use the 'request' method with the same parameters to do a custom call with no type checking.`
|
|
126
|
+
);
|
|
127
|
+
this.name = "InferenceOutputError";
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
// src/lib/isUrl.ts
|
|
132
|
+
function isUrl(modelOrUrl) {
|
|
133
|
+
return /^http(s?):/.test(modelOrUrl) || modelOrUrl.startsWith("/");
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// src/utils/delay.ts
|
|
137
|
+
function delay(ms) {
|
|
138
|
+
return new Promise((resolve) => {
|
|
139
|
+
setTimeout(() => resolve(), ms);
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
|
|
112
143
|
// src/providers/fal-ai.ts
|
|
113
144
|
var FAL_AI_API_BASE_URL = "https://fal.run";
|
|
145
|
+
var FAL_AI_API_BASE_URL_QUEUE = "https://queue.fal.run";
|
|
146
|
+
var makeBaseUrl4 = (task) => {
|
|
147
|
+
return task === "text-to-video" ? FAL_AI_API_BASE_URL_QUEUE : FAL_AI_API_BASE_URL;
|
|
148
|
+
};
|
|
114
149
|
var makeBody4 = (params) => {
|
|
115
150
|
return params.args;
|
|
116
151
|
};
|
|
@@ -120,17 +155,64 @@ var makeHeaders4 = (params) => {
|
|
|
120
155
|
};
|
|
121
156
|
};
|
|
122
157
|
var makeUrl4 = (params) => {
|
|
123
|
-
|
|
158
|
+
const baseUrl = `${params.baseUrl}/${params.model}`;
|
|
159
|
+
if (params.authMethod !== "provider-key" && params.task === "text-to-video") {
|
|
160
|
+
return `${baseUrl}?_subdomain=queue`;
|
|
161
|
+
}
|
|
162
|
+
return baseUrl;
|
|
124
163
|
};
|
|
125
164
|
var FAL_AI_CONFIG = {
|
|
126
|
-
|
|
165
|
+
makeBaseUrl: makeBaseUrl4,
|
|
127
166
|
makeBody: makeBody4,
|
|
128
167
|
makeHeaders: makeHeaders4,
|
|
129
168
|
makeUrl: makeUrl4
|
|
130
169
|
};
|
|
170
|
+
async function pollFalResponse(res, url, headers) {
|
|
171
|
+
const requestId = res.request_id;
|
|
172
|
+
if (!requestId) {
|
|
173
|
+
throw new InferenceOutputError("No request ID found in the response");
|
|
174
|
+
}
|
|
175
|
+
let status = res.status;
|
|
176
|
+
const parsedUrl = new URL(url);
|
|
177
|
+
const baseUrl = `${parsedUrl.protocol}//${parsedUrl.host}${parsedUrl.host === "router.huggingface.co" ? "/fal-ai" : ""}`;
|
|
178
|
+
const modelId = new URL(res.response_url).pathname;
|
|
179
|
+
const queryParams = parsedUrl.search;
|
|
180
|
+
const statusUrl = `${baseUrl}${modelId}/status${queryParams}`;
|
|
181
|
+
const resultUrl = `${baseUrl}${modelId}${queryParams}`;
|
|
182
|
+
while (status !== "COMPLETED") {
|
|
183
|
+
await delay(500);
|
|
184
|
+
const statusResponse = await fetch(statusUrl, { headers });
|
|
185
|
+
if (!statusResponse.ok) {
|
|
186
|
+
throw new InferenceOutputError("Failed to fetch response status from fal-ai API");
|
|
187
|
+
}
|
|
188
|
+
try {
|
|
189
|
+
status = (await statusResponse.json()).status;
|
|
190
|
+
} catch (error) {
|
|
191
|
+
throw new InferenceOutputError("Failed to parse status response from fal-ai API");
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
const resultResponse = await fetch(resultUrl, { headers });
|
|
195
|
+
let result;
|
|
196
|
+
try {
|
|
197
|
+
result = await resultResponse.json();
|
|
198
|
+
} catch (error) {
|
|
199
|
+
throw new InferenceOutputError("Failed to parse result response from fal-ai API");
|
|
200
|
+
}
|
|
201
|
+
if (typeof result === "object" && !!result && "video" in result && typeof result.video === "object" && !!result.video && "url" in result.video && typeof result.video.url === "string" && isUrl(result.video.url)) {
|
|
202
|
+
const urlResponse = await fetch(result.video.url);
|
|
203
|
+
return await urlResponse.blob();
|
|
204
|
+
} else {
|
|
205
|
+
throw new InferenceOutputError(
|
|
206
|
+
"Expected { video: { url: string } } result format, got instead: " + JSON.stringify(result)
|
|
207
|
+
);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
131
210
|
|
|
132
211
|
// src/providers/fireworks-ai.ts
|
|
133
212
|
var FIREWORKS_AI_API_BASE_URL = "https://api.fireworks.ai";
|
|
213
|
+
var makeBaseUrl5 = () => {
|
|
214
|
+
return FIREWORKS_AI_API_BASE_URL;
|
|
215
|
+
};
|
|
134
216
|
var makeBody5 = (params) => {
|
|
135
217
|
return {
|
|
136
218
|
...params.args,
|
|
@@ -147,13 +229,16 @@ var makeUrl5 = (params) => {
|
|
|
147
229
|
return `${params.baseUrl}/inference`;
|
|
148
230
|
};
|
|
149
231
|
var FIREWORKS_AI_CONFIG = {
|
|
150
|
-
|
|
232
|
+
makeBaseUrl: makeBaseUrl5,
|
|
151
233
|
makeBody: makeBody5,
|
|
152
234
|
makeHeaders: makeHeaders5,
|
|
153
235
|
makeUrl: makeUrl5
|
|
154
236
|
};
|
|
155
237
|
|
|
156
238
|
// src/providers/hf-inference.ts
|
|
239
|
+
var makeBaseUrl6 = () => {
|
|
240
|
+
return `${HF_ROUTER_URL}/hf-inference`;
|
|
241
|
+
};
|
|
157
242
|
var makeBody6 = (params) => {
|
|
158
243
|
return {
|
|
159
244
|
...params.args,
|
|
@@ -173,7 +258,7 @@ var makeUrl6 = (params) => {
|
|
|
173
258
|
return `${params.baseUrl}/models/${params.model}`;
|
|
174
259
|
};
|
|
175
260
|
var HF_INFERENCE_CONFIG = {
|
|
176
|
-
|
|
261
|
+
makeBaseUrl: makeBaseUrl6,
|
|
177
262
|
makeBody: makeBody6,
|
|
178
263
|
makeHeaders: makeHeaders6,
|
|
179
264
|
makeUrl: makeUrl6
|
|
@@ -181,6 +266,9 @@ var HF_INFERENCE_CONFIG = {
|
|
|
181
266
|
|
|
182
267
|
// src/providers/hyperbolic.ts
|
|
183
268
|
var HYPERBOLIC_API_BASE_URL = "https://api.hyperbolic.xyz";
|
|
269
|
+
var makeBaseUrl7 = () => {
|
|
270
|
+
return HYPERBOLIC_API_BASE_URL;
|
|
271
|
+
};
|
|
184
272
|
var makeBody7 = (params) => {
|
|
185
273
|
return {
|
|
186
274
|
...params.args,
|
|
@@ -197,7 +285,7 @@ var makeUrl7 = (params) => {
|
|
|
197
285
|
return `${params.baseUrl}/v1/chat/completions`;
|
|
198
286
|
};
|
|
199
287
|
var HYPERBOLIC_CONFIG = {
|
|
200
|
-
|
|
288
|
+
makeBaseUrl: makeBaseUrl7,
|
|
201
289
|
makeBody: makeBody7,
|
|
202
290
|
makeHeaders: makeHeaders7,
|
|
203
291
|
makeUrl: makeUrl7
|
|
@@ -205,6 +293,9 @@ var HYPERBOLIC_CONFIG = {
|
|
|
205
293
|
|
|
206
294
|
// src/providers/nebius.ts
|
|
207
295
|
var NEBIUS_API_BASE_URL = "https://api.studio.nebius.ai";
|
|
296
|
+
var makeBaseUrl8 = () => {
|
|
297
|
+
return NEBIUS_API_BASE_URL;
|
|
298
|
+
};
|
|
208
299
|
var makeBody8 = (params) => {
|
|
209
300
|
return {
|
|
210
301
|
...params.args,
|
|
@@ -227,7 +318,7 @@ var makeUrl8 = (params) => {
|
|
|
227
318
|
return params.baseUrl;
|
|
228
319
|
};
|
|
229
320
|
var NEBIUS_CONFIG = {
|
|
230
|
-
|
|
321
|
+
makeBaseUrl: makeBaseUrl8,
|
|
231
322
|
makeBody: makeBody8,
|
|
232
323
|
makeHeaders: makeHeaders8,
|
|
233
324
|
makeUrl: makeUrl8
|
|
@@ -235,6 +326,9 @@ var NEBIUS_CONFIG = {
|
|
|
235
326
|
|
|
236
327
|
// src/providers/novita.ts
|
|
237
328
|
var NOVITA_API_BASE_URL = "https://api.novita.ai";
|
|
329
|
+
var makeBaseUrl9 = () => {
|
|
330
|
+
return NOVITA_API_BASE_URL;
|
|
331
|
+
};
|
|
238
332
|
var makeBody9 = (params) => {
|
|
239
333
|
return {
|
|
240
334
|
...params.args,
|
|
@@ -255,7 +349,7 @@ var makeUrl9 = (params) => {
|
|
|
255
349
|
return params.baseUrl;
|
|
256
350
|
};
|
|
257
351
|
var NOVITA_CONFIG = {
|
|
258
|
-
|
|
352
|
+
makeBaseUrl: makeBaseUrl9,
|
|
259
353
|
makeBody: makeBody9,
|
|
260
354
|
makeHeaders: makeHeaders9,
|
|
261
355
|
makeUrl: makeUrl9
|
|
@@ -263,6 +357,9 @@ var NOVITA_CONFIG = {
|
|
|
263
357
|
|
|
264
358
|
// src/providers/replicate.ts
|
|
265
359
|
var REPLICATE_API_BASE_URL = "https://api.replicate.com";
|
|
360
|
+
var makeBaseUrl10 = () => {
|
|
361
|
+
return REPLICATE_API_BASE_URL;
|
|
362
|
+
};
|
|
266
363
|
var makeBody10 = (params) => {
|
|
267
364
|
return {
|
|
268
365
|
input: params.args,
|
|
@@ -279,7 +376,7 @@ var makeUrl10 = (params) => {
|
|
|
279
376
|
return `${params.baseUrl}/v1/models/${params.model}/predictions`;
|
|
280
377
|
};
|
|
281
378
|
var REPLICATE_CONFIG = {
|
|
282
|
-
|
|
379
|
+
makeBaseUrl: makeBaseUrl10,
|
|
283
380
|
makeBody: makeBody10,
|
|
284
381
|
makeHeaders: makeHeaders10,
|
|
285
382
|
makeUrl: makeUrl10
|
|
@@ -287,6 +384,9 @@ var REPLICATE_CONFIG = {
|
|
|
287
384
|
|
|
288
385
|
// src/providers/sambanova.ts
|
|
289
386
|
var SAMBANOVA_API_BASE_URL = "https://api.sambanova.ai";
|
|
387
|
+
var makeBaseUrl11 = () => {
|
|
388
|
+
return SAMBANOVA_API_BASE_URL;
|
|
389
|
+
};
|
|
290
390
|
var makeBody11 = (params) => {
|
|
291
391
|
return {
|
|
292
392
|
...params.args,
|
|
@@ -303,7 +403,7 @@ var makeUrl11 = (params) => {
|
|
|
303
403
|
return params.baseUrl;
|
|
304
404
|
};
|
|
305
405
|
var SAMBANOVA_CONFIG = {
|
|
306
|
-
|
|
406
|
+
makeBaseUrl: makeBaseUrl11,
|
|
307
407
|
makeBody: makeBody11,
|
|
308
408
|
makeHeaders: makeHeaders11,
|
|
309
409
|
makeUrl: makeUrl11
|
|
@@ -311,6 +411,9 @@ var SAMBANOVA_CONFIG = {
|
|
|
311
411
|
|
|
312
412
|
// src/providers/together.ts
|
|
313
413
|
var TOGETHER_API_BASE_URL = "https://api.together.xyz";
|
|
414
|
+
var makeBaseUrl12 = () => {
|
|
415
|
+
return TOGETHER_API_BASE_URL;
|
|
416
|
+
};
|
|
314
417
|
var makeBody12 = (params) => {
|
|
315
418
|
return {
|
|
316
419
|
...params.args,
|
|
@@ -333,7 +436,7 @@ var makeUrl12 = (params) => {
|
|
|
333
436
|
return params.baseUrl;
|
|
334
437
|
};
|
|
335
438
|
var TOGETHER_CONFIG = {
|
|
336
|
-
|
|
439
|
+
makeBaseUrl: makeBaseUrl12,
|
|
337
440
|
makeBody: makeBody12,
|
|
338
441
|
makeHeaders: makeHeaders12,
|
|
339
442
|
makeUrl: makeUrl12
|
|
@@ -341,6 +444,9 @@ var TOGETHER_CONFIG = {
|
|
|
341
444
|
|
|
342
445
|
// src/providers/openai.ts
|
|
343
446
|
var OPENAI_API_BASE_URL = "https://api.openai.com";
|
|
447
|
+
var makeBaseUrl13 = () => {
|
|
448
|
+
return OPENAI_API_BASE_URL;
|
|
449
|
+
};
|
|
344
450
|
var makeBody13 = (params) => {
|
|
345
451
|
if (!params.chatCompletion) {
|
|
346
452
|
throw new Error("OpenAI only supports chat completions.");
|
|
@@ -360,21 +466,16 @@ var makeUrl13 = (params) => {
|
|
|
360
466
|
return `${params.baseUrl}/v1/chat/completions`;
|
|
361
467
|
};
|
|
362
468
|
var OPENAI_CONFIG = {
|
|
363
|
-
|
|
469
|
+
makeBaseUrl: makeBaseUrl13,
|
|
364
470
|
makeBody: makeBody13,
|
|
365
471
|
makeHeaders: makeHeaders13,
|
|
366
472
|
makeUrl: makeUrl13,
|
|
367
473
|
clientSideRoutingOnly: true
|
|
368
474
|
};
|
|
369
475
|
|
|
370
|
-
// src/lib/isUrl.ts
|
|
371
|
-
function isUrl(modelOrUrl) {
|
|
372
|
-
return /^http(s?):/.test(modelOrUrl) || modelOrUrl.startsWith("/");
|
|
373
|
-
}
|
|
374
|
-
|
|
375
476
|
// package.json
|
|
376
477
|
var name = "@huggingface/inference";
|
|
377
|
-
var version = "3.6.
|
|
478
|
+
var version = "3.6.2";
|
|
378
479
|
|
|
379
480
|
// src/providers/consts.ts
|
|
380
481
|
var HARDCODED_MODEL_ID_MAPPING = {
|
|
@@ -513,7 +614,8 @@ function makeRequestOptionsFromResolvedModel(resolvedModel, args, options) {
|
|
|
513
614
|
return "none";
|
|
514
615
|
})();
|
|
515
616
|
const url = endpointUrl ? chatCompletion2 ? endpointUrl + `/v1/chat/completions` : endpointUrl : providerConfig.makeUrl({
|
|
516
|
-
|
|
617
|
+
authMethod,
|
|
618
|
+
baseUrl: authMethod !== "provider-key" ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", provider) : providerConfig.makeBaseUrl(task),
|
|
517
619
|
model: resolvedModel,
|
|
518
620
|
chatCompletion: chatCompletion2,
|
|
519
621
|
task
|
|
@@ -777,16 +879,6 @@ async function* streamingRequest(args, options) {
|
|
|
777
879
|
}
|
|
778
880
|
}
|
|
779
881
|
|
|
780
|
-
// src/lib/InferenceOutputError.ts
|
|
781
|
-
var InferenceOutputError = class extends TypeError {
|
|
782
|
-
constructor(message) {
|
|
783
|
-
super(
|
|
784
|
-
`Invalid inference output: ${message}. Use the 'request' method with the same parameters to do a custom call with no type checking.`
|
|
785
|
-
);
|
|
786
|
-
this.name = "InferenceOutputError";
|
|
787
|
-
}
|
|
788
|
-
};
|
|
789
|
-
|
|
790
882
|
// src/utils/pick.ts
|
|
791
883
|
function pick(o, props) {
|
|
792
884
|
return Object.assign(
|
|
@@ -1001,13 +1093,6 @@ async function objectDetection(args, options) {
|
|
|
1001
1093
|
return res;
|
|
1002
1094
|
}
|
|
1003
1095
|
|
|
1004
|
-
// src/utils/delay.ts
|
|
1005
|
-
function delay(ms) {
|
|
1006
|
-
return new Promise((resolve) => {
|
|
1007
|
-
setTimeout(() => resolve(), ms);
|
|
1008
|
-
});
|
|
1009
|
-
}
|
|
1010
|
-
|
|
1011
1096
|
// src/tasks/cv/textToImage.ts
|
|
1012
1097
|
function getResponseFormatArg(provider) {
|
|
1013
1098
|
switch (provider) {
|
|
@@ -1179,12 +1264,8 @@ async function textToVideo(args, options) {
|
|
|
1179
1264
|
task: "text-to-video"
|
|
1180
1265
|
});
|
|
1181
1266
|
if (args.provider === "fal-ai") {
|
|
1182
|
-
const
|
|
1183
|
-
|
|
1184
|
-
throw new InferenceOutputError("Expected { video: { url: string } }");
|
|
1185
|
-
}
|
|
1186
|
-
const urlResponse = await fetch(res.video.url);
|
|
1187
|
-
return await urlResponse.blob();
|
|
1267
|
+
const { url, info } = await makeRequestOptions(args, { ...options, task: "text-to-video" });
|
|
1268
|
+
return await pollFalResponse(res, url, info.headers);
|
|
1188
1269
|
} else if (args.provider === "novita") {
|
|
1189
1270
|
const isValidOutput = typeof res === "object" && !!res && "video" in res && typeof res.video === "object" && !!res.video && "video_url" in res.video && typeof res.video.video_url === "string" && isUrl(res.video.video_url);
|
|
1190
1271
|
if (!isValidOutput) {
|
|
@@ -1609,9 +1690,116 @@ import {
|
|
|
1609
1690
|
getModelInputSnippet
|
|
1610
1691
|
} from "@huggingface/tasks";
|
|
1611
1692
|
import { Template } from "@huggingface/jinja";
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1693
|
+
|
|
1694
|
+
// src/snippets/templates.exported.ts
|
|
1695
|
+
var templates = {
|
|
1696
|
+
"js": {
|
|
1697
|
+
"fetch": {
|
|
1698
|
+
"basic": 'async function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "application/json",\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});',
|
|
1699
|
+
"basicAudio": 'async function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "audio/flac"\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});',
|
|
1700
|
+
"basicImage": 'async function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "image/jpeg"\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});',
|
|
1701
|
+
"textToAudio": '{% if model.library_name == "transformers" %}\nasync function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "application/json",\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.blob();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n // Returns a byte object of the Audio wavform. Use it directly!\n});\n{% else %}\nasync function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "application/json",\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});\n{% endif %} ',
|
|
1702
|
+
"textToImage": 'async function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "application/json",\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.blob();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n // Use image\n});',
|
|
1703
|
+
"zeroShotClassification": 'async function query(data) {\n const response = await fetch(\n "{{ fullUrl }}",\n {\n headers: {\n Authorization: "{{ authorizationHeader }}",\n "Content-Type": "application/json",\n },\n method: "POST",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({\n inputs: {{ providerInputs.asObj.inputs }},\n parameters: { candidate_labels: ["refund", "legal", "faq"] }\n}).then((response) => {\n console.log(JSON.stringify(response));\n});'
|
|
1704
|
+
},
|
|
1705
|
+
"huggingface.js": {
|
|
1706
|
+
"basic": 'import { InferenceClient } from "@huggingface/inference";\n\nconst client = new InferenceClient("{{ accessToken }}");\n\nconst output = await client.{{ methodName }}({\n model: "{{ model.id }}",\n inputs: {{ inputs.asObj.inputs }},\n provider: "{{ provider }}",\n});\n\nconsole.log(output);',
|
|
1707
|
+
"basicAudio": 'import { InferenceClient } from "@huggingface/inference";\n\nconst client = new InferenceClient("{{ accessToken }}");\n\nconst data = fs.readFileSync({{inputs.asObj.inputs}});\n\nconst output = await client.{{ methodName }}({\n data,\n model: "{{ model.id }}",\n provider: "{{ provider }}",\n});\n\nconsole.log(output);',
|
|
1708
|
+
"basicImage": 'import { InferenceClient } from "@huggingface/inference";\n\nconst client = new InferenceClient("{{ accessToken }}");\n\nconst data = fs.readFileSync({{inputs.asObj.inputs}});\n\nconst output = await client.{{ methodName }}({\n data,\n model: "{{ model.id }}",\n provider: "{{ provider }}",\n});\n\nconsole.log(output);',
|
|
1709
|
+
"conversational": 'import { InferenceClient } from "@huggingface/inference";\n\nconst client = new InferenceClient("{{ accessToken }}");\n\nconst chatCompletion = await client.chatCompletion({\n provider: "{{ provider }}",\n model: "{{ model.id }}",\n{{ inputs.asTsString }}\n});\n\nconsole.log(chatCompletion.choices[0].message);',
|
|
1710
|
+
"conversationalStream": 'import { InferenceClient } from "@huggingface/inference";\n\nconst client = new InferenceClient("{{ accessToken }}");\n\nlet out = "";\n\nconst stream = await client.chatCompletionStream({\n provider: "{{ provider }}",\n model: "{{ model.id }}",\n{{ inputs.asTsString }}\n});\n\nfor await (const chunk of stream) {\n if (chunk.choices && chunk.choices.length > 0) {\n const newContent = chunk.choices[0].delta.content;\n out += newContent;\n console.log(newContent);\n } \n}',
|
|
1711
|
+
"textToImage": `import { InferenceClient } from "@huggingface/inference";
|
|
1712
|
+
|
|
1713
|
+
const client = new InferenceClient("{{ accessToken }}");
|
|
1714
|
+
|
|
1715
|
+
const image = await client.textToImage({
|
|
1716
|
+
provider: "{{ provider }}",
|
|
1717
|
+
model: "{{ model.id }}",
|
|
1718
|
+
inputs: {{ inputs.asObj.inputs }},
|
|
1719
|
+
parameters: { num_inference_steps: 5 },
|
|
1720
|
+
});
|
|
1721
|
+
/// Use the generated image (it's a Blob)`,
|
|
1722
|
+
"textToVideo": `import { InferenceClient } from "@huggingface/inference";
|
|
1723
|
+
|
|
1724
|
+
const client = new InferenceClient("{{ accessToken }}");
|
|
1725
|
+
|
|
1726
|
+
const image = await client.textToVideo({
|
|
1727
|
+
provider: "{{ provider }}",
|
|
1728
|
+
model: "{{ model.id }}",
|
|
1729
|
+
inputs: {{ inputs.asObj.inputs }},
|
|
1730
|
+
});
|
|
1731
|
+
// Use the generated video (it's a Blob)`
|
|
1732
|
+
},
|
|
1733
|
+
"openai": {
|
|
1734
|
+
"conversational": 'import { OpenAI } from "openai";\n\nconst client = new OpenAI({\n baseURL: "{{ baseUrl }}",\n apiKey: "{{ accessToken }}",\n});\n\nconst chatCompletion = await client.chat.completions.create({\n model: "{{ providerModelId }}",\n{{ inputs.asTsString }}\n});\n\nconsole.log(chatCompletion.choices[0].message);',
|
|
1735
|
+
"conversationalStream": 'import { OpenAI } from "openai";\n\nconst client = new OpenAI({\n baseURL: "{{ baseUrl }}",\n apiKey: "{{ accessToken }}",\n});\n\nlet out = "";\n\nconst stream = await client.chat.completions.create({\n provider: "{{ provider }}",\n model: "{{ model.id }}",\n{{ inputs.asTsString }}\n});\n\nfor await (const chunk of stream) {\n if (chunk.choices && chunk.choices.length > 0) {\n const newContent = chunk.choices[0].delta.content;\n out += newContent;\n console.log(newContent);\n } \n}'
|
|
1736
|
+
}
|
|
1737
|
+
},
|
|
1738
|
+
"python": {
|
|
1739
|
+
"fal_client": {
|
|
1740
|
+
"textToImage": '{% if provider == "fal-ai" %}\nimport fal_client\n\nresult = fal_client.subscribe(\n "{{ providerModelId }}",\n arguments={\n "prompt": {{ inputs.asObj.inputs }},\n },\n)\nprint(result)\n{% endif %} '
|
|
1741
|
+
},
|
|
1742
|
+
"huggingface_hub": {
|
|
1743
|
+
"basic": 'result = client.{{ methodName }}(\n inputs={{ inputs.asObj.inputs }},\n model="{{ model.id }}",\n)',
|
|
1744
|
+
"basicAudio": 'output = client.{{ methodName }}({{ inputs.asObj.inputs }}, model="{{ model.id }}")',
|
|
1745
|
+
"basicImage": 'output = client.{{ methodName }}({{ inputs.asObj.inputs }}, model="{{ model.id }}")',
|
|
1746
|
+
"conversational": 'completion = client.chat.completions.create(\n model="{{ model.id }}",\n{{ inputs.asPythonString }}\n)\n\nprint(completion.choices[0].message) ',
|
|
1747
|
+
"conversationalStream": 'stream = client.chat.completions.create(\n model="{{ model.id }}",\n{{ inputs.asPythonString }}\n stream=True,\n)\n\nfor chunk in stream:\n print(chunk.choices[0].delta.content, end="") ',
|
|
1748
|
+
"documentQuestionAnswering": 'output = client.document_question_answering(\n "{{ inputs.asObj.image }}",\n question="{{ inputs.asObj.question }}",\n model="{{ model.id }}",\n) ',
|
|
1749
|
+
"imageToImage": '# output is a PIL.Image object\nimage = client.image_to_image(\n "{{ inputs.asObj.inputs }}",\n prompt="{{ inputs.asObj.parameters.prompt }}",\n model="{{ model.id }}",\n) ',
|
|
1750
|
+
"importInferenceClient": 'from huggingface_hub import InferenceClient\n\nclient = InferenceClient(\n provider="{{ provider }}",\n api_key="{{ accessToken }}",\n)',
|
|
1751
|
+
"textToImage": '# output is a PIL.Image object\nimage = client.text_to_image(\n {{ inputs.asObj.inputs }},\n model="{{ model.id }}",\n) ',
|
|
1752
|
+
"textToVideo": 'video = client.text_to_video(\n {{ inputs.asObj.inputs }},\n model="{{ model.id }}",\n) '
|
|
1753
|
+
},
|
|
1754
|
+
"openai": {
|
|
1755
|
+
"conversational": 'from openai import OpenAI\n\nclient = OpenAI(\n base_url="{{ baseUrl }}",\n api_key="{{ accessToken }}"\n)\n\ncompletion = client.chat.completions.create(\n model="{{ providerModelId }}",\n{{ inputs.asPythonString }}\n)\n\nprint(completion.choices[0].message) ',
|
|
1756
|
+
"conversationalStream": 'from openai import OpenAI\n\nclient = OpenAI(\n base_url="{{ baseUrl }}",\n api_key="{{ accessToken }}"\n)\n\nstream = client.chat.completions.create(\n model="{{ providerModelId }}",\n{{ inputs.asPythonString }}\n stream=True,\n)\n\nfor chunk in stream:\n print(chunk.choices[0].delta.content, end="")'
|
|
1757
|
+
},
|
|
1758
|
+
"requests": {
|
|
1759
|
+
"basic": 'def query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\noutput = query({\n "inputs": {{ providerInputs.asObj.inputs }},\n}) ',
|
|
1760
|
+
"basicAudio": 'def query(filename):\n with open(filename, "rb") as f:\n data = f.read()\n response = requests.post(API_URL, headers={"Content-Type": "audio/flac", **headers}, data=data)\n return response.json()\n\noutput = query({{ providerInputs.asObj.inputs }})',
|
|
1761
|
+
"basicImage": 'def query(filename):\n with open(filename, "rb") as f:\n data = f.read()\n response = requests.post(API_URL, headers={"Content-Type": "image/jpeg", **headers}, data=data)\n return response.json()\n\noutput = query({{ providerInputs.asObj.inputs }})',
|
|
1762
|
+
"conversational": 'def query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\nresponse = query({\n{{ providerInputs.asJsonString }}\n})\n\nprint(response["choices"][0]["message"])',
|
|
1763
|
+
"conversationalStream": 'def query(payload):\n response = requests.post(API_URL, headers=headers, json=payload, stream=True)\n for line in response.iter_lines():\n if not line.startswith(b"data:"):\n continue\n if line.strip() == b"data: [DONE]":\n return\n yield json.loads(line.decode("utf-8").lstrip("data:").rstrip("/n"))\n\nchunks = query({\n{{ providerInputs.asJsonString }},\n "stream": True,\n})\n\nfor chunk in chunks:\n print(chunk["choices"][0]["delta"]["content"], end="")',
|
|
1764
|
+
"documentQuestionAnswering": 'def query(payload):\n with open(payload["image"], "rb") as f:\n img = f.read()\n payload["image"] = base64.b64encode(img).decode("utf-8")\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\noutput = query({\n "inputs": {\n "image": "{{ inputs.asObj.image }}",\n "question": "{{ inputs.asObj.question }}",\n },\n}) ',
|
|
1765
|
+
"imageToImage": 'def query(payload):\n with open(payload["inputs"], "rb") as f:\n img = f.read()\n payload["inputs"] = base64.b64encode(img).decode("utf-8")\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.content\n\nimage_bytes = query({\n{{ providerInputs.asJsonString }}\n})\n\n# You can access the image with PIL.Image for example\nimport io\nfrom PIL import Image\nimage = Image.open(io.BytesIO(image_bytes)) ',
|
|
1766
|
+
"importRequests": '{% if importBase64 %}\nimport base64\n{% endif %}\n{% if importJson %}\nimport json\n{% endif %}\nimport requests\n\nAPI_URL = "{{ fullUrl }}"\nheaders = {"Authorization": "{{ authorizationHeader }}"}',
|
|
1767
|
+
"tabular": 'def query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.content\n\nresponse = query({\n "inputs": {\n "data": {{ providerInputs.asObj.inputs }}\n },\n}) ',
|
|
1768
|
+
"textToAudio": '{% if model.library_name == "transformers" %}\ndef query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.content\n\naudio_bytes = query({\n "inputs": {{ providerInputs.asObj.inputs }},\n})\n# You can access the audio with IPython.display for example\nfrom IPython.display import Audio\nAudio(audio_bytes)\n{% else %}\ndef query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\naudio, sampling_rate = query({\n "inputs": {{ providerInputs.asObj.inputs }},\n})\n# You can access the audio with IPython.display for example\nfrom IPython.display import Audio\nAudio(audio, rate=sampling_rate)\n{% endif %} ',
|
|
1769
|
+
"textToImage": '{% if provider == "hf-inference" %}\ndef query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.content\n\nimage_bytes = query({\n "inputs": {{ providerInputs.asObj.inputs }},\n})\n\n# You can access the image with PIL.Image for example\nimport io\nfrom PIL import Image\nimage = Image.open(io.BytesIO(image_bytes))\n{% endif %}',
|
|
1770
|
+
"zeroShotClassification": 'def query(payload):\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\noutput = query({\n "inputs": {{ providerInputs.asObj.inputs }},\n "parameters": {"candidate_labels": ["refund", "legal", "faq"]},\n}) ',
|
|
1771
|
+
"zeroShotImageClassification": 'def query(data):\n with open(data["image_path"], "rb") as f:\n img = f.read()\n payload={\n "parameters": data["parameters"],\n "inputs": base64.b64encode(img).decode("utf-8")\n }\n response = requests.post(API_URL, headers=headers, json=payload)\n return response.json()\n\noutput = query({\n "image_path": {{ providerInputs.asObj.inputs }},\n "parameters": {"candidate_labels": ["cat", "dog", "llama"]},\n}) '
|
|
1772
|
+
}
|
|
1773
|
+
},
|
|
1774
|
+
"sh": {
|
|
1775
|
+
"curl": {
|
|
1776
|
+
"basic": "curl {{ fullUrl }} \\\n -X POST \\\n -H 'Authorization: {{ authorizationHeader }}' \\\n -H 'Content-Type: application/json' \\\n -d '{\n{{ providerInputs.asCurlString }}\n }'",
|
|
1777
|
+
"basicAudio": "curl {{ fullUrl }} \\\n -X POST \\\n -H 'Authorization: {{ authorizationHeader }}' \\\n -H 'Content-Type: audio/flac' \\\n --data-binary @{{ providerInputs.asObj.inputs }}",
|
|
1778
|
+
"basicImage": "curl {{ fullUrl }} \\\n -X POST \\\n -H 'Authorization: {{ authorizationHeader }}' \\\n -H 'Content-Type: image/jpeg' \\\n --data-binary @{{ providerInputs.asObj.inputs }}",
|
|
1779
|
+
"conversational": `curl {{ fullUrl }} \\
|
|
1780
|
+
-H 'Authorization: {{ authorizationHeader }}' \\
|
|
1781
|
+
-H 'Content-Type: application/json' \\
|
|
1782
|
+
-d '{
|
|
1783
|
+
{{ providerInputs.asCurlString }},
|
|
1784
|
+
"stream": false
|
|
1785
|
+
}'`,
|
|
1786
|
+
"conversationalStream": `curl {{ fullUrl }} \\
|
|
1787
|
+
-H 'Authorization: {{ authorizationHeader }}' \\
|
|
1788
|
+
-H 'Content-Type: application/json' \\
|
|
1789
|
+
-d '{
|
|
1790
|
+
{{ providerInputs.asCurlString }},
|
|
1791
|
+
"stream": true
|
|
1792
|
+
}'`,
|
|
1793
|
+
"zeroShotClassification": `curl {{ fullUrl }} \\
|
|
1794
|
+
-X POST \\
|
|
1795
|
+
-d '{"inputs": {{ providerInputs.asObj.inputs }}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
|
|
1796
|
+
-H 'Content-Type: application/json' \\
|
|
1797
|
+
-H 'Authorization: {{ authorizationHeader }}'`
|
|
1798
|
+
}
|
|
1799
|
+
}
|
|
1800
|
+
};
|
|
1801
|
+
|
|
1802
|
+
// src/snippets/getInferenceSnippets.ts
|
|
1615
1803
|
var PYTHON_CLIENTS = ["huggingface_hub", "fal_client", "requests", "openai"];
|
|
1616
1804
|
var JS_CLIENTS = ["fetch", "huggingface.js", "openai"];
|
|
1617
1805
|
var SH_CLIENTS = ["curl"];
|
|
@@ -1620,20 +1808,12 @@ var CLIENTS = {
|
|
|
1620
1808
|
python: [...PYTHON_CLIENTS],
|
|
1621
1809
|
sh: [...SH_CLIENTS]
|
|
1622
1810
|
};
|
|
1623
|
-
var
|
|
1624
|
-
let currentPath = typeof import.meta !== "undefined" && import.meta.url ? path.normalize(new URL(import.meta.url).pathname) : __dirname;
|
|
1625
|
-
while (currentPath !== "/") {
|
|
1626
|
-
if (pathExists(path.join(currentPath, "package.json"))) {
|
|
1627
|
-
return currentPath;
|
|
1628
|
-
}
|
|
1629
|
-
currentPath = path.normalize(path.join(currentPath, ".."));
|
|
1630
|
-
}
|
|
1631
|
-
return "/";
|
|
1632
|
-
};
|
|
1633
|
-
var templatePath = (language, client, templateName) => path.join(rootDirFinder(), "src", "snippets", "templates", language, client, `${templateName}.jinja`);
|
|
1634
|
-
var hasTemplate = (language, client, templateName) => pathExists(templatePath(language, client, templateName));
|
|
1811
|
+
var hasTemplate = (language, client, templateName) => templates[language]?.[client]?.[templateName] !== void 0;
|
|
1635
1812
|
var loadTemplate = (language, client, templateName) => {
|
|
1636
|
-
const template =
|
|
1813
|
+
const template = templates[language]?.[client]?.[templateName];
|
|
1814
|
+
if (!template) {
|
|
1815
|
+
throw new Error(`Template not found: ${language}/${client}/${templateName}`);
|
|
1816
|
+
}
|
|
1637
1817
|
return (data) => new Template(template).render({ ...data });
|
|
1638
1818
|
};
|
|
1639
1819
|
var snippetImportPythonInferenceClient = loadTemplate("python", "huggingface_hub", "importInferenceClient");
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAcA,OAAO,KAAK,EAAqB,aAAa,EAAE,OAAO,EAAkB,WAAW,EAAE,MAAM,UAAU,CAAC;AAgCvG;;;GAGG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAoC7C;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CAClD,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,
|
|
1
|
+
{"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAcA,OAAO,KAAK,EAAqB,aAAa,EAAE,OAAO,EAAkB,WAAW,EAAE,MAAM,UAAU,CAAC;AAgCvG;;;GAGG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAoC7C;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CAClD,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CA8FpC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"black-forest-labs.d.ts","sourceRoot":"","sources":["../../../src/providers/black-forest-labs.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"black-forest-labs.d.ts","sourceRoot":"","sources":["../../../src/providers/black-forest-labs.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAwBpF,eAAO,MAAM,wBAAwB,EAAE,cAKtC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cerebras.d.ts","sourceRoot":"","sources":["../../../src/providers/cerebras.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"cerebras.d.ts","sourceRoot":"","sources":["../../../src/providers/cerebras.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAuBpF,eAAO,MAAM,eAAe,EAAE,cAK7B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cohere.d.ts","sourceRoot":"","sources":["../../../src/providers/cohere.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"cohere.d.ts","sourceRoot":"","sources":["../../../src/providers/cohere.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAuBpF,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC"}
|
|
@@ -1,19 +1,9 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* See the registered mapping of HF model ID => Fal model ID here:
|
|
3
|
-
*
|
|
4
|
-
* https://huggingface.co/api/partners/fal-ai/models
|
|
5
|
-
*
|
|
6
|
-
* This is a publicly available mapping.
|
|
7
|
-
*
|
|
8
|
-
* If you want to try to run inference for a new model locally before it's registered on huggingface.co,
|
|
9
|
-
* you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
|
|
10
|
-
*
|
|
11
|
-
* - If you work at Fal and want to update this mapping, please use the model mapping API we provide on huggingface.co
|
|
12
|
-
* - If you're a community member and want to add a new supported HF model to Fal, please open an issue on the present repo
|
|
13
|
-
* and we will tag Fal team members.
|
|
14
|
-
*
|
|
15
|
-
* Thanks!
|
|
16
|
-
*/
|
|
17
1
|
import type { ProviderConfig } from "../types";
|
|
18
2
|
export declare const FAL_AI_CONFIG: ProviderConfig;
|
|
3
|
+
export interface FalAiQueueOutput {
|
|
4
|
+
request_id: string;
|
|
5
|
+
status: string;
|
|
6
|
+
response_url: string;
|
|
7
|
+
}
|
|
8
|
+
export declare function pollFalResponse(res: FalAiQueueOutput, url: string, headers: Record<string, string>): Promise<Blob>;
|
|
19
9
|
//# sourceMappingURL=fal-ai.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"fal-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fal-ai.ts"],"names":[],"mappings":"AAkBA,OAAO,KAAK,EAA2C,cAAc,EAAa,MAAM,UAAU,CAAC;AA4BnG,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC;AAEF,MAAM,WAAW,gBAAgB;IAChC,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,MAAM,CAAC;CACrB;AAED,wBAAsB,eAAe,CACpC,GAAG,EAAE,gBAAgB,EACrB,GAAG,EAAE,MAAM,EACX,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAC7B,OAAO,CAAC,IAAI,CAAC,CA0Df"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fireworks-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fireworks-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"fireworks-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/fireworks-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA0BpF,eAAO,MAAM,mBAAmB,EAAE,cAKjC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hf-inference.d.ts","sourceRoot":"","sources":["../../../src/providers/hf-inference.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"hf-inference.d.ts","sourceRoot":"","sources":["../../../src/providers/hf-inference.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA4BpF,eAAO,MAAM,mBAAmB,EAAE,cAKjC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hyperbolic.d.ts","sourceRoot":"","sources":["../../../src/providers/hyperbolic.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"hyperbolic.d.ts","sourceRoot":"","sources":["../../../src/providers/hyperbolic.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA0BpF,eAAO,MAAM,iBAAiB,EAAE,cAK/B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"nebius.d.ts","sourceRoot":"","sources":["../../../src/providers/nebius.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"nebius.d.ts","sourceRoot":"","sources":["../../../src/providers/nebius.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAgCpF,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"novita.d.ts","sourceRoot":"","sources":["../../../src/providers/novita.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"novita.d.ts","sourceRoot":"","sources":["../../../src/providers/novita.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA6BpF,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/providers/openai.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/providers/openai.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA6BpF,eAAO,MAAM,aAAa,EAAE,cAM3B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAEpF,eAAO,MAAM,sBAAsB,8BAA8B,CAAC;AA0BlE,eAAO,MAAM,gBAAgB,EAAE,cAK9B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AA0BpF,eAAO,MAAM,gBAAgB,EAAE,cAK9B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAA4B,cAAc,EAAa,MAAM,UAAU,CAAC;AAgCpF,eAAO,MAAM,eAAe,EAAE,cAK7B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getInferenceSnippets.d.ts","sourceRoot":"","sources":["../../../src/snippets/getInferenceSnippets.ts"],"names":[],"mappings":"AAEA,OAAO,EACN,KAAK,gBAAgB,EAErB,KAAK,gBAAgB,EAGrB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,iBAAiB,EAA8B,MAAM,UAAU,CAAC;
|
|
1
|
+
{"version":3,"file":"getInferenceSnippets.d.ts","sourceRoot":"","sources":["../../../src/snippets/getInferenceSnippets.ts"],"names":[],"mappings":"AAEA,OAAO,EACN,KAAK,gBAAgB,EAErB,KAAK,gBAAgB,EAGrB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,iBAAiB,EAA8B,MAAM,UAAU,CAAC;AAgS9E,wBAAgB,oBAAoB,CACnC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,eAAe,CAAC,EAAE,MAAM,EACxB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CAIpB"}
|