@huggingface/inference 3.3.1 → 3.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.cjs +2 -3
- package/dist/index.js +2 -3
- package/dist/src/config.d.ts +0 -1
- package/dist/src/config.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/config.ts +0 -1
- package/src/lib/makeRequestOptions.ts +2 -2
package/README.md
CHANGED
|
@@ -125,7 +125,7 @@ for await (const output of hf.textGenerationStream({
|
|
|
125
125
|
|
|
126
126
|
### Text Generation (Chat Completion API Compatible)
|
|
127
127
|
|
|
128
|
-
Using the `chatCompletion` method, you can generate text with models compatible with the OpenAI Chat Completion API. All models served by [TGI](https://huggingface.co/
|
|
128
|
+
Using the `chatCompletion` method, you can generate text with models compatible with the OpenAI Chat Completion API. All models served by [TGI](https://api-inference.huggingface.co/framework/text-generation-inference) on Hugging Face support Messages API.
|
|
129
129
|
|
|
130
130
|
[Demo](https://huggingface.co/spaces/huggingfacejs/streaming-chat-completion)
|
|
131
131
|
|
package/dist/index.cjs
CHANGED
|
@@ -98,7 +98,6 @@ __export(tasks_exports, {
|
|
|
98
98
|
|
|
99
99
|
// src/config.ts
|
|
100
100
|
var HF_HUB_URL = "https://huggingface.co";
|
|
101
|
-
var HF_ROUTER_URL = "https://router.huggingface.co";
|
|
102
101
|
|
|
103
102
|
// src/providers/fal-ai.ts
|
|
104
103
|
var FAL_AI_API_BASE_URL = "https://fal.run";
|
|
@@ -125,7 +124,7 @@ function isUrl(modelOrUrl) {
|
|
|
125
124
|
|
|
126
125
|
// package.json
|
|
127
126
|
var name = "@huggingface/inference";
|
|
128
|
-
var version = "3.3.
|
|
127
|
+
var version = "3.3.2";
|
|
129
128
|
|
|
130
129
|
// src/providers/consts.ts
|
|
131
130
|
var HARDCODED_MODEL_ID_MAPPING = {
|
|
@@ -189,7 +188,7 @@ async function getProviderModelId(params, args, options = {}) {
|
|
|
189
188
|
}
|
|
190
189
|
|
|
191
190
|
// src/lib/makeRequestOptions.ts
|
|
192
|
-
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${
|
|
191
|
+
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
193
192
|
var tasks = null;
|
|
194
193
|
async function makeRequestOptions(args, options) {
|
|
195
194
|
const { accessToken, endpointUrl, provider: maybeProvider, model: maybeModel, ...remainingArgs } = args;
|
package/dist/index.js
CHANGED
|
@@ -43,7 +43,6 @@ __export(tasks_exports, {
|
|
|
43
43
|
|
|
44
44
|
// src/config.ts
|
|
45
45
|
var HF_HUB_URL = "https://huggingface.co";
|
|
46
|
-
var HF_ROUTER_URL = "https://router.huggingface.co";
|
|
47
46
|
|
|
48
47
|
// src/providers/fal-ai.ts
|
|
49
48
|
var FAL_AI_API_BASE_URL = "https://fal.run";
|
|
@@ -70,7 +69,7 @@ function isUrl(modelOrUrl) {
|
|
|
70
69
|
|
|
71
70
|
// package.json
|
|
72
71
|
var name = "@huggingface/inference";
|
|
73
|
-
var version = "3.3.
|
|
72
|
+
var version = "3.3.2";
|
|
74
73
|
|
|
75
74
|
// src/providers/consts.ts
|
|
76
75
|
var HARDCODED_MODEL_ID_MAPPING = {
|
|
@@ -134,7 +133,7 @@ async function getProviderModelId(params, args, options = {}) {
|
|
|
134
133
|
}
|
|
135
134
|
|
|
136
135
|
// src/lib/makeRequestOptions.ts
|
|
137
|
-
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${
|
|
136
|
+
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
138
137
|
var tasks = null;
|
|
139
138
|
async function makeRequestOptions(args, options) {
|
|
140
139
|
const { accessToken, endpointUrl, provider: maybeProvider, model: maybeModel, ...remainingArgs } = args;
|
package/dist/src/config.d.ts
CHANGED
package/dist/src/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,2BAA2B,CAAC
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,2BAA2B,CAAC"}
|
package/package.json
CHANGED
package/src/config.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { HF_HUB_URL
|
|
1
|
+
import { HF_HUB_URL } from "../config";
|
|
2
2
|
import { FAL_AI_API_BASE_URL } from "../providers/fal-ai";
|
|
3
3
|
import { NEBIUS_API_BASE_URL } from "../providers/nebius";
|
|
4
4
|
import { REPLICATE_API_BASE_URL } from "../providers/replicate";
|
|
@@ -11,7 +11,7 @@ import { isUrl } from "./isUrl";
|
|
|
11
11
|
import { version as packageVersion, name as packageName } from "../../package.json";
|
|
12
12
|
import { getProviderModelId } from "./getProviderModelId";
|
|
13
13
|
|
|
14
|
-
const HF_HUB_INFERENCE_PROXY_TEMPLATE = `${
|
|
14
|
+
const HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
15
15
|
|
|
16
16
|
/**
|
|
17
17
|
* Lazy-loaded from huggingface.co/api/tasks when needed
|