@huggingface/inference 3.1.4 → 3.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.cjs +11 -3
- package/dist/index.js +11 -3
- package/dist/src/providers/replicate.d.ts.map +1 -1
- package/dist/src/providers/sambanova.d.ts.map +1 -1
- package/dist/src/providers/together.d.ts.map +1 -1
- package/dist/src/tasks/audio/textToSpeech.d.ts.map +1 -1
- package/dist/src/types.d.ts +3 -2
- package/dist/src/types.d.ts.map +1 -1
- package/dist/test/HfInference.spec.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/providers/replicate.ts +2 -1
- package/src/providers/sambanova.ts +1 -0
- package/src/providers/together.ts +1 -0
- package/src/tasks/audio/textToSpeech.ts +11 -2
- package/src/types.ts +2 -2
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# 🤗 Hugging Face Inference
|
|
2
2
|
|
|
3
|
-
A Typescript powered wrapper for the
|
|
3
|
+
A Typescript powered wrapper for the HF Inference API (serverless), Inference Endpoints (dedicated), and third-party Inference Providers.
|
|
4
4
|
It works with [Inference API (serverless)](https://huggingface.co/docs/api-inference/index) and [Inference Endpoints (dedicated)](https://huggingface.co/docs/inference-endpoints/index), and even with supported third-party Inference Providers.
|
|
5
5
|
|
|
6
6
|
Check out the [full documentation](https://huggingface.co/docs/huggingface.js/inference/README).
|
package/dist/index.cjs
CHANGED
|
@@ -146,7 +146,8 @@ var REPLICATE_SUPPORTED_MODEL_IDS = {
|
|
|
146
146
|
"stabilityai/stable-diffusion-xl-base-1.0": "stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc"
|
|
147
147
|
},
|
|
148
148
|
"text-to-speech": {
|
|
149
|
-
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:
|
|
149
|
+
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:3c645149db020c85d080e2f8cfe482a0e68189a922cde964fa9e80fb179191f3",
|
|
150
|
+
"hexgrad/Kokoro-82M": "jaaari/kokoro-82m:dfdf537ba482b029e0a761699e6f55e9162cfd159270bfe0e44857caa5f275a6"
|
|
150
151
|
},
|
|
151
152
|
"text-to-video": {
|
|
152
153
|
"genmo/mochi-1-preview": "genmoai/mochi-1:1944af04d098ef69bed7f9d335d102e652203f268ec4aaa2d836f6217217e460"
|
|
@@ -158,6 +159,7 @@ var SAMBANOVA_API_BASE_URL = "https://api.sambanova.ai";
|
|
|
158
159
|
var SAMBANOVA_SUPPORTED_MODEL_IDS = {
|
|
159
160
|
/** Chat completion / conversational */
|
|
160
161
|
conversational: {
|
|
162
|
+
"deepseek-ai/DeepSeek-Distill-R1-Llama-70B": "DeepSeek-Distill-R1-Llama-70B",
|
|
161
163
|
"Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct",
|
|
162
164
|
"Qwen/Qwen2.5-72B-Instruct": "Qwen2.5-72B-Instruct",
|
|
163
165
|
"Qwen/QwQ-32B-Preview": "QwQ-32B-Preview",
|
|
@@ -204,6 +206,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
204
206
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
205
207
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
206
208
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
209
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
207
210
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
208
211
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
209
212
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
@@ -229,7 +232,7 @@ function isUrl(modelOrUrl) {
|
|
|
229
232
|
|
|
230
233
|
// package.json
|
|
231
234
|
var name = "@huggingface/inference";
|
|
232
|
-
var version = "3.1.
|
|
235
|
+
var version = "3.1.6";
|
|
233
236
|
|
|
234
237
|
// src/lib/makeRequestOptions.ts
|
|
235
238
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
@@ -721,7 +724,12 @@ async function buildPayload(args) {
|
|
|
721
724
|
|
|
722
725
|
// src/tasks/audio/textToSpeech.ts
|
|
723
726
|
async function textToSpeech(args, options) {
|
|
724
|
-
const
|
|
727
|
+
const payload = args.provider === "replicate" ? {
|
|
728
|
+
...omit(args, ["inputs", "parameters"]),
|
|
729
|
+
...args.parameters,
|
|
730
|
+
text: args.inputs
|
|
731
|
+
} : args;
|
|
732
|
+
const res = await request(payload, {
|
|
725
733
|
...options,
|
|
726
734
|
taskHint: "text-to-speech"
|
|
727
735
|
});
|
package/dist/index.js
CHANGED
|
@@ -87,7 +87,8 @@ var REPLICATE_SUPPORTED_MODEL_IDS = {
|
|
|
87
87
|
"stabilityai/stable-diffusion-xl-base-1.0": "stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc"
|
|
88
88
|
},
|
|
89
89
|
"text-to-speech": {
|
|
90
|
-
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:
|
|
90
|
+
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:3c645149db020c85d080e2f8cfe482a0e68189a922cde964fa9e80fb179191f3",
|
|
91
|
+
"hexgrad/Kokoro-82M": "jaaari/kokoro-82m:dfdf537ba482b029e0a761699e6f55e9162cfd159270bfe0e44857caa5f275a6"
|
|
91
92
|
},
|
|
92
93
|
"text-to-video": {
|
|
93
94
|
"genmo/mochi-1-preview": "genmoai/mochi-1:1944af04d098ef69bed7f9d335d102e652203f268ec4aaa2d836f6217217e460"
|
|
@@ -99,6 +100,7 @@ var SAMBANOVA_API_BASE_URL = "https://api.sambanova.ai";
|
|
|
99
100
|
var SAMBANOVA_SUPPORTED_MODEL_IDS = {
|
|
100
101
|
/** Chat completion / conversational */
|
|
101
102
|
conversational: {
|
|
103
|
+
"deepseek-ai/DeepSeek-Distill-R1-Llama-70B": "DeepSeek-Distill-R1-Llama-70B",
|
|
102
104
|
"Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct",
|
|
103
105
|
"Qwen/Qwen2.5-72B-Instruct": "Qwen2.5-72B-Instruct",
|
|
104
106
|
"Qwen/QwQ-32B-Preview": "QwQ-32B-Preview",
|
|
@@ -145,6 +147,7 @@ var TOGETHER_SUPPORTED_MODEL_IDS = {
|
|
|
145
147
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
146
148
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
147
149
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
150
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
148
151
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
149
152
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
150
153
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
@@ -170,7 +173,7 @@ function isUrl(modelOrUrl) {
|
|
|
170
173
|
|
|
171
174
|
// package.json
|
|
172
175
|
var name = "@huggingface/inference";
|
|
173
|
-
var version = "3.1.
|
|
176
|
+
var version = "3.1.6";
|
|
174
177
|
|
|
175
178
|
// src/lib/makeRequestOptions.ts
|
|
176
179
|
var HF_HUB_INFERENCE_PROXY_TEMPLATE = `${HF_HUB_URL}/api/inference-proxy/{{PROVIDER}}`;
|
|
@@ -662,7 +665,12 @@ async function buildPayload(args) {
|
|
|
662
665
|
|
|
663
666
|
// src/tasks/audio/textToSpeech.ts
|
|
664
667
|
async function textToSpeech(args, options) {
|
|
665
|
-
const
|
|
668
|
+
const payload = args.provider === "replicate" ? {
|
|
669
|
+
...omit(args, ["inputs", "parameters"]),
|
|
670
|
+
...args.parameters,
|
|
671
|
+
text: args.inputs
|
|
672
|
+
} : args;
|
|
673
|
+
const res = await request(payload, {
|
|
666
674
|
...options,
|
|
667
675
|
taskHint: "text-to-speech"
|
|
668
676
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,8BAA8B,CAAC;AAElE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,
|
|
1
|
+
{"version":3,"file":"replicate.d.ts","sourceRoot":"","sources":["../../../src/providers/replicate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,8BAA8B,CAAC;AAElE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,CAuBtE,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,6BAA6B,CAAC;AAEjE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,
|
|
1
|
+
{"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,sBAAsB,6BAA6B,CAAC;AAEjE,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,eAAO,MAAM,6BAA6B,EAAE,eAAe,CAAC,WAAW,CAiBtE,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,
|
|
1
|
+
{"version":3,"file":"together.d.ts","sourceRoot":"","sources":["../../../src/providers/together.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,eAAO,MAAM,qBAAqB,6BAA6B,CAAC;AAEhE;;GAEG;AACH,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,4BAA4B,EAAE,eAAe,CAAC,UAAU,CA8CpE,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"textToSpeech.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio/textToSpeech.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAE5D,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,KAAK,gBAAgB,GAAG,QAAQ,GAAG,iBAAiB,CAAC;AAKrD;;;GAGG;AACH,wBAAsB,YAAY,CAAC,IAAI,EAAE,gBAAgB,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"textToSpeech.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio/textToSpeech.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAE5D,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,KAAK,gBAAgB,GAAG,QAAQ,GAAG,iBAAiB,CAAC;AAKrD;;;GAGG;AACH,wBAAsB,YAAY,CAAC,IAAI,EAAE,gBAAgB,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CA+B3F"}
|
package/dist/src/types.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import type { PipelineType } from "@huggingface/tasks";
|
|
2
|
-
import type { ChatCompletionInput } from "@huggingface/tasks";
|
|
1
|
+
import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";
|
|
3
2
|
/**
|
|
4
3
|
* HF model id, like "meta-llama/Llama-3.3-70B-Instruct"
|
|
5
4
|
*/
|
|
@@ -78,6 +77,8 @@ export type RequestArgs = BaseArgs & ({
|
|
|
78
77
|
inputs: unknown;
|
|
79
78
|
} | {
|
|
80
79
|
prompt: string;
|
|
80
|
+
} | {
|
|
81
|
+
text: string;
|
|
81
82
|
} | {
|
|
82
83
|
audio_url: string;
|
|
83
84
|
} | ChatCompletionInput) & {
|
package/dist/src/types.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;IAElB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;CACtC;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAE3D,eAAO,MAAM,mBAAmB,2EAA4E,CAAC;AAC7G,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACrC,WAAW,CAAC,EAAE,MAAM,CAAC;CACrB,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HfInference.spec.d.ts","sourceRoot":"","sources":["../../test/HfInference.spec.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"HfInference.spec.d.ts","sourceRoot":"","sources":["../../test/HfInference.spec.ts"],"names":[],"mappings":"AAOA,OAAO,OAAO,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/inference",
|
|
3
|
-
"version": "3.1.
|
|
3
|
+
"version": "3.1.6",
|
|
4
4
|
"packageManager": "pnpm@8.10.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Tim Mikeladze <tim.mikeladze@gmail.com>",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
},
|
|
40
40
|
"type": "module",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@huggingface/tasks": "^0.15.
|
|
42
|
+
"@huggingface/tasks": "^0.15.7"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "18.13.0"
|
|
@@ -21,7 +21,8 @@ export const REPLICATE_SUPPORTED_MODEL_IDS: ProviderMapping<ReplicateId> = {
|
|
|
21
21
|
"stability-ai/sdxl:7762fd07cf82c948538e41f63f77d685e02b063e37e496e96eefd46c929f9bdc",
|
|
22
22
|
},
|
|
23
23
|
"text-to-speech": {
|
|
24
|
-
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:
|
|
24
|
+
"OuteAI/OuteTTS-0.3-500M": "jbilcke/oute-tts:3c645149db020c85d080e2f8cfe482a0e68189a922cde964fa9e80fb179191f3",
|
|
25
|
+
"hexgrad/Kokoro-82M": "jaaari/kokoro-82m:dfdf537ba482b029e0a761699e6f55e9162cfd159270bfe0e44857caa5f275a6",
|
|
25
26
|
},
|
|
26
27
|
"text-to-video": {
|
|
27
28
|
"genmo/mochi-1-preview": "genmoai/mochi-1:1944af04d098ef69bed7f9d335d102e652203f268ec4aaa2d836f6217217e460",
|
|
@@ -7,6 +7,7 @@ type SambanovaId = string;
|
|
|
7
7
|
export const SAMBANOVA_SUPPORTED_MODEL_IDS: ProviderMapping<SambanovaId> = {
|
|
8
8
|
/** Chat completion / conversational */
|
|
9
9
|
conversational: {
|
|
10
|
+
"deepseek-ai/DeepSeek-Distill-R1-Llama-70B": "DeepSeek-Distill-R1-Llama-70B",
|
|
10
11
|
"Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct",
|
|
11
12
|
"Qwen/Qwen2.5-72B-Instruct": "Qwen2.5-72B-Instruct",
|
|
12
13
|
"Qwen/QwQ-32B-Preview": "QwQ-32B-Preview",
|
|
@@ -39,6 +39,7 @@ export const TOGETHER_SUPPORTED_MODEL_IDS: ProviderMapping<TogetherId> = {
|
|
|
39
39
|
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo-128K",
|
|
40
40
|
"microsoft/WizardLM-2-8x22B": "microsoft/WizardLM-2-8x22B",
|
|
41
41
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistralai/Mistral-7B-Instruct-v0.3",
|
|
42
|
+
"mistralai/Mistral-Small-24B-Instruct-2501": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
42
43
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
|
43
44
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
44
45
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import type { TextToSpeechInput } from "@huggingface/tasks";
|
|
2
2
|
import { InferenceOutputError } from "../../lib/InferenceOutputError";
|
|
3
3
|
import type { BaseArgs, Options } from "../../types";
|
|
4
|
+
import { omit } from "../../utils/omit";
|
|
4
5
|
import { request } from "../custom/request";
|
|
5
|
-
|
|
6
6
|
type TextToSpeechArgs = BaseArgs & TextToSpeechInput;
|
|
7
7
|
|
|
8
8
|
interface OutputUrlTextToSpeechGeneration {
|
|
@@ -13,7 +13,16 @@ interface OutputUrlTextToSpeechGeneration {
|
|
|
13
13
|
* Recommended model: espnet/kan-bayashi_ljspeech_vits
|
|
14
14
|
*/
|
|
15
15
|
export async function textToSpeech(args: TextToSpeechArgs, options?: Options): Promise<Blob> {
|
|
16
|
-
|
|
16
|
+
// Replicate models expects "text" instead of "inputs"
|
|
17
|
+
const payload =
|
|
18
|
+
args.provider === "replicate"
|
|
19
|
+
? {
|
|
20
|
+
...omit(args, ["inputs", "parameters"]),
|
|
21
|
+
...args.parameters,
|
|
22
|
+
text: args.inputs,
|
|
23
|
+
}
|
|
24
|
+
: args;
|
|
25
|
+
const res = await request<Blob | OutputUrlTextToSpeechGeneration>(payload, {
|
|
17
26
|
...options,
|
|
18
27
|
taskHint: "text-to-speech",
|
|
19
28
|
});
|
package/src/types.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import type { PipelineType } from "@huggingface/tasks";
|
|
2
|
-
import type { ChatCompletionInput } from "@huggingface/tasks";
|
|
1
|
+
import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";
|
|
3
2
|
|
|
4
3
|
/**
|
|
5
4
|
* HF model id, like "meta-llama/Llama-3.3-70B-Instruct"
|
|
@@ -88,6 +87,7 @@ export type RequestArgs = BaseArgs &
|
|
|
88
87
|
| { data: Blob | ArrayBuffer }
|
|
89
88
|
| { inputs: unknown }
|
|
90
89
|
| { prompt: string }
|
|
90
|
+
| { text: string }
|
|
91
91
|
| { audio_url: string }
|
|
92
92
|
| ChatCompletionInput
|
|
93
93
|
) & {
|