@huggingface/inference 3.1.4-test → 3.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +4 -3
- package/dist/index.js +4 -3
- package/dist/src/config.d.ts +0 -1
- package/dist/src/config.d.ts.map +1 -1
- package/dist/src/providers/together.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/config.ts +0 -1
- package/src/lib/makeRequestOptions.ts +5 -4
- package/src/providers/together.ts +1 -0
package/dist/index.cjs
CHANGED
|
@@ -102,7 +102,6 @@ __export(tasks_exports, {
|
|
|
102
102
|
|
|
103
103
|
// src/config.ts
|
|
104
104
|
var HF_HUB_URL = "https://huggingface.co";
|
|
105
|
-
var HF_INFERENCE_API_URL = "https://api-inference.huggingface.co";
|
|
106
105
|
|
|
107
106
|
// src/providers/fal-ai.ts
|
|
108
107
|
var FAL_AI_API_BASE_URL = "https://fal.run";
|
|
@@ -205,6 +204,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
205
204
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
206
205
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
207
206
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
207
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
208
208
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
209
209
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
210
210
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
@@ -230,7 +230,7 @@ function isUrl(modelOrUrl) {
|
|
|
230
230
|
|
|
231
231
|
// package.json
|
|
232
232
|
var name = "@huggingface/inference";
|
|
233
|
-
var version = "3.1.
|
|
233
|
+
var version = "3.1.5";
|
|
234
234
|
|
|
235
235
|
// src/lib/makeRequestOptions.ts
|
|
236
236
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
@@ -377,7 +377,8 @@ function makeUrl(params) {
|
|
|
377
377
|
return baseUrl;
|
|
378
378
|
}
|
|
379
379
|
default: {
|
|
380
|
-
const
|
|
380
|
+
const baseUrl = HF_HUB_INFERENCE_PROXY_TEMPLATE.replaceAll("{{PROVIDER}}", "hf-inference");
|
|
381
|
+
const url = params.forceTask ? `${baseUrl}/pipeline/${params.forceTask}/${params.model}` : `${baseUrl}/models/${params.model}`;
|
|
381
382
|
if (params.taskHint === "text-generation" && params.chatCompletion) {
|
|
382
383
|
return url + `/v1/chat/completions`;
|
|
383
384
|
}
|
package/dist/index.js
CHANGED
|
@@ -43,7 +43,6 @@ __export(tasks_exports, {
|
|
|
43
43
|
|
|
44
44
|
// src/config.ts
|
|
45
45
|
var HF_HUB_URL = "https://huggingface.co";
|
|
46
|
-
var HF_INFERENCE_API_URL = "https://api-inference.huggingface.co";
|
|
47
46
|
|
|
48
47
|
// src/providers/fal-ai.ts
|
|
49
48
|
var FAL_AI_API_BASE_URL = "https://fal.run";
|
|
@@ -146,6 +145,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
146
145
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
147
146
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
148
147
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
148
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
149
149
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
150
150
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
151
151
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
@@ -171,7 +171,7 @@ function isUrl(modelOrUrl) {
|
|
|
171
171
|
|
|
172
172
|
// package.json
|
|
173
173
|
var name = "@huggingface/inference";
|
|
174
|
-
var version = "3.1.
|
|
174
|
+
var version = "3.1.5";
|
|
175
175
|
|
|
176
176
|
// src/lib/makeRequestOptions.ts
|
|
177
177
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
@@ -318,7 +318,8 @@ function makeUrl(params) {
|
|
|
318
318
|
return baseUrl;
|
|
319
319
|
}
|
|
320
320
|
default: {
|
|
321
|
-
const
|
|
321
|
+
const baseUrl = HF_HUB_INFERENCE_PROXY_TEMPLATE.replaceAll("{{PROVIDER}}", "hf-inference");
|
|
322
|
+
const url = params.forceTask ? `${baseUrl}/pipeline/${params.forceTask}/${params.model}` : `${baseUrl}/models/${params.model}`;
|
|
322
323
|
if (params.taskHint === "text-generation" && params.chatCompletion) {
|
|
323
324
|
return url + `/v1/chat/completions`;
|
|
324
325
|
}
|
package/dist/src/config.d.ts
CHANGED
package/dist/src/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,2BAA2B,CAAC
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,2BAA2B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,
|
|
1
|
+
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,CA8CpE,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/inference",
|
|
3
|
-
"version": "3.1.
|
|
3
|
+
"version": "3.1.5",
|
|
4
4
|
"packageManager": "pnpm@8.10.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Tim Mikeladze <tim.mikeladze@gmail.com>",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
},
|
|
40
40
|
"type": "module",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@huggingface/tasks": "^0.15.
|
|
42
|
+
"@huggingface/tasks": "^0.15.5"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "18.13.0"
|
package/src/config.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { WidgetType } from "@huggingface/tasks";
|
|
2
|
-
import { HF_HUB_URL
|
|
2
|
+
import { HF_HUB_URL } from "../config";
|
|
3
3
|
import { FAL_AI_API_BASE_URL, FAL_AI_SUPPORTED_MODEL_IDS } from "../providers/fal-ai";
|
|
4
4
|
import { REPLICATE_API_BASE_URL, REPLICATE_SUPPORTED_MODEL_IDS } from "../providers/replicate";
|
|
5
5
|
import { SAMBANOVA_API_BASE_URL, SAMBANOVA_SUPPORTED_MODEL_IDS } from "../providers/sambanova";
|
|
@@ -90,7 +90,7 @@ export async function makeRequestOptions(
|
|
|
90
90
|
provider === "fal-ai" && authMethod === "provider-key" ? `Key ${accessToken}` : `Bearer ${accessToken}`;
|
|
91
91
|
}
|
|
92
92
|
|
|
93
|
-
// e.g. @huggingface/inference
|
|
93
|
+
// e.g. @huggingface/inference/3.1.3
|
|
94
94
|
const ownUserAgent = `${packageName}/${packageVersion}`;
|
|
95
95
|
headers["User-Agent"] = [ownUserAgent, typeof navigator !== "undefined" ? navigator.userAgent : undefined]
|
|
96
96
|
.filter((x) => x !== undefined)
|
|
@@ -244,9 +244,10 @@ function makeUrl(params: {
|
|
|
244
244
|
return baseUrl;
|
|
245
245
|
}
|
|
246
246
|
default: {
|
|
247
|
+
const baseUrl = HF_HUB_INFERENCE_PROXY_TEMPLATE.replaceAll("{{PROVIDER}}", "hf-inference");
|
|
247
248
|
const url = params.forceTask
|
|
248
|
-
? `${
|
|
249
|
-
: `${
|
|
249
|
+
? `${baseUrl}/pipeline/${params.forceTask}/${params.model}`
|
|
250
|
+
: `${baseUrl}/models/${params.model}`;
|
|
250
251
|
if (params.taskHint === "text-generation" && params.chatCompletion) {
|
|
251
252
|
return url + `/v1/chat/completions`;
|
|
252
253
|
}
|
|
@@ -39,6 +39,7 @@ export const TOGETHER_SUPPORTED_MODEL_IDS: ProviderMapping<TogetherId> = {
|
|
|
39
39
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
40
40
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
41
41
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
42
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
42
43
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
43
44
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
44
45
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|