@huggingface/inference 3.8.2 → 3.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -48,16 +48,19 @@ You can send inference requests to third-party providers with the inference clie
48
48
 
49
49
  Currently, we support the following providers:
50
50
  - [Fal.ai](https://fal.ai)
51
+ - [Featherless AI](https://featherless.ai)
51
52
  - [Fireworks AI](https://fireworks.ai)
52
53
  - [Hyperbolic](https://hyperbolic.xyz)
53
54
  - [Nebius](https://studio.nebius.ai)
54
55
  - [Novita](https://novita.ai/?utm_source=github_huggingface&utm_medium=github_readme&utm_campaign=link)
56
+ - [Nscale](https://nscale.com)
55
57
  - [Replicate](https://replicate.com)
56
58
  - [Sambanova](https://sambanova.ai)
57
59
  - [Together](https://together.xyz)
58
60
  - [Blackforestlabs](https://blackforestlabs.ai)
59
61
  - [Cohere](https://cohere.com)
60
62
  - [Cerebras](https://cerebras.ai/)
63
+ - [Groq](https://groq.com)
61
64
 
62
65
  To send requests to a third-party provider, you have to pass the `provider` parameter to the inference function. Make sure your request is authenticated with an access token.
63
66
  ```ts
@@ -76,14 +79,17 @@ When authenticated with a third-party provider key, the request is made directly
76
79
 
77
80
  Only a subset of models are supported when requesting third-party providers. You can check the list of supported models per pipeline tasks here:
78
81
  - [Fal.ai supported models](https://huggingface.co/api/partners/fal-ai/models)
82
+ - [Featherless AI supported models](https://huggingface.co/api/partners/featherless-ai/models)
79
83
  - [Fireworks AI supported models](https://huggingface.co/api/partners/fireworks-ai/models)
80
84
  - [Hyperbolic supported models](https://huggingface.co/api/partners/hyperbolic/models)
81
85
  - [Nebius supported models](https://huggingface.co/api/partners/nebius/models)
86
+ - [Nscale supported models](https://huggingface.co/api/partners/nscale/models)
82
87
  - [Replicate supported models](https://huggingface.co/api/partners/replicate/models)
83
88
  - [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models)
84
89
  - [Together supported models](https://huggingface.co/api/partners/together/models)
85
90
  - [Cohere supported models](https://huggingface.co/api/partners/cohere/models)
86
91
  - [Cerebras supported models](https://huggingface.co/api/partners/cerebras/models)
92
+ - [Groq supported models](https://console.groq.com/docs/models)
87
93
  - [HF Inference API (serverless)](https://huggingface.co/models?inference=warm&sort=trending)
88
94
 
89
95
  ❗**Important note:** To be compatible, the third-party API must adhere to the "standard" shape API we expect on HF model pages for each pipeline task type.
package/dist/index.cjs CHANGED
@@ -482,6 +482,36 @@ var FalAITextToSpeechTask = class extends FalAITask {
482
482
  }
483
483
  };
484
484
 
485
+ // src/providers/featherless-ai.ts
486
+ var FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
487
+ var FeatherlessAIConversationalTask = class extends BaseConversationalTask {
488
+ constructor() {
489
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
490
+ }
491
+ };
492
+ var FeatherlessAITextGenerationTask = class extends BaseTextGenerationTask {
493
+ constructor() {
494
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
495
+ }
496
+ preparePayload(params) {
497
+ return {
498
+ ...params.args,
499
+ ...params.args.parameters,
500
+ model: params.model,
501
+ prompt: params.args.inputs
502
+ };
503
+ }
504
+ async getResponse(response) {
505
+ if (typeof response === "object" && "choices" in response && Array.isArray(response?.choices) && typeof response?.model === "string") {
506
+ const completion = response.choices[0];
507
+ return {
508
+ generated_text: completion.text
509
+ };
510
+ }
511
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
512
+ }
513
+ };
514
+
485
515
  // src/providers/fireworks-ai.ts
486
516
  var FireworksConversationalTask = class extends BaseConversationalTask {
487
517
  constructor() {
@@ -492,6 +522,25 @@ var FireworksConversationalTask = class extends BaseConversationalTask {
492
522
  }
493
523
  };
494
524
 
525
+ // src/providers/groq.ts
526
+ var GROQ_API_BASE_URL = "https://api.groq.com";
527
+ var GroqTextGenerationTask = class extends BaseTextGenerationTask {
528
+ constructor() {
529
+ super("groq", GROQ_API_BASE_URL);
530
+ }
531
+ makeRoute() {
532
+ return "/openai/v1/chat/completions";
533
+ }
534
+ };
535
+ var GroqConversationalTask = class extends BaseConversationalTask {
536
+ constructor() {
537
+ super("groq", GROQ_API_BASE_URL);
538
+ }
539
+ makeRoute() {
540
+ return "/openai/v1/chat/completions";
541
+ }
542
+ };
543
+
495
544
  // src/providers/hf-inference.ts
496
545
  var EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS = ["feature-extraction", "sentence-similarity"];
497
546
  var HFInferenceTask = class extends TaskProviderHelper {
@@ -941,6 +990,41 @@ var NovitaConversationalTask = class extends BaseConversationalTask {
941
990
  }
942
991
  };
943
992
 
993
+ // src/providers/nscale.ts
994
+ var NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
995
+ var NscaleConversationalTask = class extends BaseConversationalTask {
996
+ constructor() {
997
+ super("nscale", NSCALE_API_BASE_URL);
998
+ }
999
+ };
1000
+ var NscaleTextToImageTask = class extends TaskProviderHelper {
1001
+ constructor() {
1002
+ super("nscale", NSCALE_API_BASE_URL);
1003
+ }
1004
+ preparePayload(params) {
1005
+ return {
1006
+ ...omit(params.args, ["inputs", "parameters"]),
1007
+ ...params.args.parameters,
1008
+ response_format: "b64_json",
1009
+ prompt: params.args.inputs,
1010
+ model: params.model
1011
+ };
1012
+ }
1013
+ makeRoute() {
1014
+ return "v1/images/generations";
1015
+ }
1016
+ async getResponse(response, url, headers, outputType) {
1017
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data) && response.data.length > 0 && "b64_json" in response.data[0] && typeof response.data[0].b64_json === "string") {
1018
+ const base64Data = response.data[0].b64_json;
1019
+ if (outputType === "url") {
1020
+ return `data:image/jpeg;base64,${base64Data}`;
1021
+ }
1022
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
1023
+ }
1024
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
1025
+ }
1026
+ };
1027
+
944
1028
  // src/providers/openai.ts
945
1029
  var OPENAI_API_BASE_URL = "https://api.openai.com";
946
1030
  var OpenAIConversationalTask = class extends BaseConversationalTask {
@@ -1139,6 +1223,10 @@ var PROVIDERS = {
1139
1223
  "text-to-video": new FalAITextToVideoTask(),
1140
1224
  "automatic-speech-recognition": new FalAIAutomaticSpeechRecognitionTask()
1141
1225
  },
1226
+ "featherless-ai": {
1227
+ conversational: new FeatherlessAIConversationalTask(),
1228
+ "text-generation": new FeatherlessAITextGenerationTask()
1229
+ },
1142
1230
  "hf-inference": {
1143
1231
  "text-to-image": new HFInferenceTextToImageTask(),
1144
1232
  conversational: new HFInferenceConversationalTask(),
@@ -1172,6 +1260,10 @@ var PROVIDERS = {
1172
1260
  "fireworks-ai": {
1173
1261
  conversational: new FireworksConversationalTask()
1174
1262
  },
1263
+ groq: {
1264
+ conversational: new GroqConversationalTask(),
1265
+ "text-generation": new GroqTextGenerationTask()
1266
+ },
1175
1267
  hyperbolic: {
1176
1268
  "text-to-image": new HyperbolicTextToImageTask(),
1177
1269
  conversational: new HyperbolicConversationalTask(),
@@ -1186,6 +1278,10 @@ var PROVIDERS = {
1186
1278
  conversational: new NovitaConversationalTask(),
1187
1279
  "text-generation": new NovitaTextGenerationTask()
1188
1280
  },
1281
+ nscale: {
1282
+ "text-to-image": new NscaleTextToImageTask(),
1283
+ conversational: new NscaleConversationalTask()
1284
+ },
1189
1285
  openai: {
1190
1286
  conversational: new OpenAIConversationalTask()
1191
1287
  },
@@ -1227,7 +1323,7 @@ function getProviderHelper(provider, task) {
1227
1323
 
1228
1324
  // package.json
1229
1325
  var name = "@huggingface/inference";
1230
- var version = "3.8.2";
1326
+ var version = "3.9.0";
1231
1327
 
1232
1328
  // src/providers/consts.ts
1233
1329
  var HARDCODED_MODEL_INFERENCE_MAPPING = {
@@ -1241,11 +1337,14 @@ var HARDCODED_MODEL_INFERENCE_MAPPING = {
1241
1337
  cerebras: {},
1242
1338
  cohere: {},
1243
1339
  "fal-ai": {},
1340
+ "featherless-ai": {},
1244
1341
  "fireworks-ai": {},
1342
+ groq: {},
1245
1343
  "hf-inference": {},
1246
1344
  hyperbolic: {},
1247
1345
  nebius: {},
1248
1346
  novita: {},
1347
+ nscale: {},
1249
1348
  openai: {},
1250
1349
  replicate: {},
1251
1350
  sambanova: {},
@@ -2164,11 +2263,14 @@ var INFERENCE_PROVIDERS = [
2164
2263
  "cerebras",
2165
2264
  "cohere",
2166
2265
  "fal-ai",
2266
+ "featherless-ai",
2167
2267
  "fireworks-ai",
2268
+ "groq",
2168
2269
  "hf-inference",
2169
2270
  "hyperbolic",
2170
2271
  "nebius",
2171
2272
  "novita",
2273
+ "nscale",
2172
2274
  "openai",
2173
2275
  "replicate",
2174
2276
  "sambanova",
package/dist/index.js CHANGED
@@ -425,6 +425,36 @@ var FalAITextToSpeechTask = class extends FalAITask {
425
425
  }
426
426
  };
427
427
 
428
+ // src/providers/featherless-ai.ts
429
+ var FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
430
+ var FeatherlessAIConversationalTask = class extends BaseConversationalTask {
431
+ constructor() {
432
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
433
+ }
434
+ };
435
+ var FeatherlessAITextGenerationTask = class extends BaseTextGenerationTask {
436
+ constructor() {
437
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
438
+ }
439
+ preparePayload(params) {
440
+ return {
441
+ ...params.args,
442
+ ...params.args.parameters,
443
+ model: params.model,
444
+ prompt: params.args.inputs
445
+ };
446
+ }
447
+ async getResponse(response) {
448
+ if (typeof response === "object" && "choices" in response && Array.isArray(response?.choices) && typeof response?.model === "string") {
449
+ const completion = response.choices[0];
450
+ return {
451
+ generated_text: completion.text
452
+ };
453
+ }
454
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
455
+ }
456
+ };
457
+
428
458
  // src/providers/fireworks-ai.ts
429
459
  var FireworksConversationalTask = class extends BaseConversationalTask {
430
460
  constructor() {
@@ -435,6 +465,25 @@ var FireworksConversationalTask = class extends BaseConversationalTask {
435
465
  }
436
466
  };
437
467
 
468
+ // src/providers/groq.ts
469
+ var GROQ_API_BASE_URL = "https://api.groq.com";
470
+ var GroqTextGenerationTask = class extends BaseTextGenerationTask {
471
+ constructor() {
472
+ super("groq", GROQ_API_BASE_URL);
473
+ }
474
+ makeRoute() {
475
+ return "/openai/v1/chat/completions";
476
+ }
477
+ };
478
+ var GroqConversationalTask = class extends BaseConversationalTask {
479
+ constructor() {
480
+ super("groq", GROQ_API_BASE_URL);
481
+ }
482
+ makeRoute() {
483
+ return "/openai/v1/chat/completions";
484
+ }
485
+ };
486
+
438
487
  // src/providers/hf-inference.ts
439
488
  var EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS = ["feature-extraction", "sentence-similarity"];
440
489
  var HFInferenceTask = class extends TaskProviderHelper {
@@ -884,6 +933,41 @@ var NovitaConversationalTask = class extends BaseConversationalTask {
884
933
  }
885
934
  };
886
935
 
936
+ // src/providers/nscale.ts
937
+ var NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
938
+ var NscaleConversationalTask = class extends BaseConversationalTask {
939
+ constructor() {
940
+ super("nscale", NSCALE_API_BASE_URL);
941
+ }
942
+ };
943
+ var NscaleTextToImageTask = class extends TaskProviderHelper {
944
+ constructor() {
945
+ super("nscale", NSCALE_API_BASE_URL);
946
+ }
947
+ preparePayload(params) {
948
+ return {
949
+ ...omit(params.args, ["inputs", "parameters"]),
950
+ ...params.args.parameters,
951
+ response_format: "b64_json",
952
+ prompt: params.args.inputs,
953
+ model: params.model
954
+ };
955
+ }
956
+ makeRoute() {
957
+ return "v1/images/generations";
958
+ }
959
+ async getResponse(response, url, headers, outputType) {
960
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data) && response.data.length > 0 && "b64_json" in response.data[0] && typeof response.data[0].b64_json === "string") {
961
+ const base64Data = response.data[0].b64_json;
962
+ if (outputType === "url") {
963
+ return `data:image/jpeg;base64,${base64Data}`;
964
+ }
965
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
966
+ }
967
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
968
+ }
969
+ };
970
+
887
971
  // src/providers/openai.ts
888
972
  var OPENAI_API_BASE_URL = "https://api.openai.com";
889
973
  var OpenAIConversationalTask = class extends BaseConversationalTask {
@@ -1082,6 +1166,10 @@ var PROVIDERS = {
1082
1166
  "text-to-video": new FalAITextToVideoTask(),
1083
1167
  "automatic-speech-recognition": new FalAIAutomaticSpeechRecognitionTask()
1084
1168
  },
1169
+ "featherless-ai": {
1170
+ conversational: new FeatherlessAIConversationalTask(),
1171
+ "text-generation": new FeatherlessAITextGenerationTask()
1172
+ },
1085
1173
  "hf-inference": {
1086
1174
  "text-to-image": new HFInferenceTextToImageTask(),
1087
1175
  conversational: new HFInferenceConversationalTask(),
@@ -1115,6 +1203,10 @@ var PROVIDERS = {
1115
1203
  "fireworks-ai": {
1116
1204
  conversational: new FireworksConversationalTask()
1117
1205
  },
1206
+ groq: {
1207
+ conversational: new GroqConversationalTask(),
1208
+ "text-generation": new GroqTextGenerationTask()
1209
+ },
1118
1210
  hyperbolic: {
1119
1211
  "text-to-image": new HyperbolicTextToImageTask(),
1120
1212
  conversational: new HyperbolicConversationalTask(),
@@ -1129,6 +1221,10 @@ var PROVIDERS = {
1129
1221
  conversational: new NovitaConversationalTask(),
1130
1222
  "text-generation": new NovitaTextGenerationTask()
1131
1223
  },
1224
+ nscale: {
1225
+ "text-to-image": new NscaleTextToImageTask(),
1226
+ conversational: new NscaleConversationalTask()
1227
+ },
1132
1228
  openai: {
1133
1229
  conversational: new OpenAIConversationalTask()
1134
1230
  },
@@ -1170,7 +1266,7 @@ function getProviderHelper(provider, task) {
1170
1266
 
1171
1267
  // package.json
1172
1268
  var name = "@huggingface/inference";
1173
- var version = "3.8.2";
1269
+ var version = "3.9.0";
1174
1270
 
1175
1271
  // src/providers/consts.ts
1176
1272
  var HARDCODED_MODEL_INFERENCE_MAPPING = {
@@ -1184,11 +1280,14 @@ var HARDCODED_MODEL_INFERENCE_MAPPING = {
1184
1280
  cerebras: {},
1185
1281
  cohere: {},
1186
1282
  "fal-ai": {},
1283
+ "featherless-ai": {},
1187
1284
  "fireworks-ai": {},
1285
+ groq: {},
1188
1286
  "hf-inference": {},
1189
1287
  hyperbolic: {},
1190
1288
  nebius: {},
1191
1289
  novita: {},
1290
+ nscale: {},
1192
1291
  openai: {},
1193
1292
  replicate: {},
1194
1293
  sambanova: {},
@@ -2107,11 +2206,14 @@ var INFERENCE_PROVIDERS = [
2107
2206
  "cerebras",
2108
2207
  "cohere",
2109
2208
  "fal-ai",
2209
+ "featherless-ai",
2110
2210
  "fireworks-ai",
2211
+ "groq",
2111
2212
  "hf-inference",
2112
2213
  "hyperbolic",
2113
2214
  "nebius",
2114
2215
  "novita",
2216
+ "nscale",
2115
2217
  "openai",
2116
2218
  "replicate",
2117
2219
  "sambanova",
@@ -1,6 +1,6 @@
1
1
  import type { WidgetType } from "@huggingface/tasks";
2
2
  import type { InferenceProvider, ModelId } from "../types";
3
- export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "fireworks-ai" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>>;
3
+ export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "featherless-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>>;
4
4
  export type InferenceProviderMapping = Partial<Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>;
5
5
  export interface InferenceProviderModelMapping {
6
6
  adapter?: string;
@@ -1 +1 @@
1
- {"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,0RAA+C,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG,OAAO,CAC7C,MAAM,CAAC,iBAAiB,EAAE,IAAI,CAAC,6BAA6B,EAAE,WAAW,GAAG,oBAAoB,CAAC,CAAC,CAClG,CAAC;AAEF,MAAM,WAAW,6BAA6B;IAC7C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,IAAI,EAAE,UAAU,CAAC;CACjB;AAED,wBAAsB,2BAA2B,CAChD,MAAM,EAAE;IACP,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,iBAAiB,CAAC;IAC5B,IAAI,EAAE,UAAU,CAAC;CACjB,EACD,OAAO,EAAE;IACR,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;CACtE,GACC,OAAO,CAAC,6BAA6B,GAAG,IAAI,CAAC,CA+D/C"}
1
+ {"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,iUAA+C,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG,OAAO,CAC7C,MAAM,CAAC,iBAAiB,EAAE,IAAI,CAAC,6BAA6B,EAAE,WAAW,GAAG,oBAAoB,CAAC,CAAC,CAClG,CAAC;AAEF,MAAM,WAAW,6BAA6B;IAC7C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,IAAI,EAAE,UAAU,CAAC;CACjB;AAED,wBAAsB,2BAA2B,CAChD,MAAM,EAAE;IACP,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,iBAAiB,CAAC;IAC5B,IAAI,EAAE,UAAU,CAAC;CACjB,EACD,OAAO,EAAE;IACR,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;CACtE,GACC,OAAO,CAAC,6BAA6B,GAAG,IAAI,CAAC,CA+D/C"}
@@ -1 +1 @@
1
- {"version":3,"file":"getProviderHelper.d.ts","sourceRoot":"","sources":["../../../src/lib/getProviderHelper.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EACX,6BAA6B,EAC7B,sBAAsB,EACtB,oCAAoC,EACpC,wBAAwB,EACxB,mCAAmC,EACnC,2BAA2B,EAC3B,kBAAkB,EAClB,6BAA6B,EAC7B,2BAA2B,EAC3B,sBAAsB,EACtB,qBAAqB,EACrB,yBAAyB,EACzB,2BAA2B,EAC3B,4BAA4B,EAC5B,uBAAuB,EACvB,gCAAgC,EAChC,+BAA+B,EAC/B,2BAA2B,EAC3B,kBAAkB,EAClB,4BAA4B,EAC5B,wBAAwB,EACxB,qBAAqB,EACrB,qBAAqB,EACrB,sBAAsB,EACtB,qBAAqB,EACrB,6BAA6B,EAC7B,qBAAqB,EACrB,iCAAiC,EACjC,gCAAgC,EAChC,qCAAqC,EACrC,MAAM,6BAA6B,CAAC;AAIrC,OAAO,KAAK,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEjE,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,iBAAiB,EAAE,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC,CAgFnG,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,iBAAiB,GACrB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,8BAA8B,GAClC,oCAAoC,GAAG,kBAAkB,CAAC;AAC7D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,WAAW,GACf,kBAAkB,GAAG,kBAAkB,CAAC;AAC3C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,6BAA6B,GACjC,mCAAmC,GAAG,kBAAkB,CAAC;AAC5D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,kBAAkB,GACtB,yBAAyB,GAAG,kBAAkB,CAAC;AAClD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gCAAgC,GACpC,qCAAqC,GAAG,kBAAkB,CAAC;AAC9D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,wBAAwB,GAC5B,+BAA+B,GAAG,kBAAkB,CAAC;AACxD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,aAAa,GACjB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,uBAAuB,GAAG,kBAAkB,CAAC;AAChD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,2BAA2B,GAC/B,iCAAiC,GAAG,kBAAkB,CAAC;AAC1D,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,iBAAiB,EAAE,IAAI,EAAE,aAAa,GAAG,SAAS,GAAG,kBAAkB,CAAC"}
1
+ {"version":3,"file":"getProviderHelper.d.ts","sourceRoot":"","sources":["../../../src/lib/getProviderHelper.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EACX,6BAA6B,EAC7B,sBAAsB,EACtB,oCAAoC,EACpC,wBAAwB,EACxB,mCAAmC,EACnC,2BAA2B,EAC3B,kBAAkB,EAClB,6BAA6B,EAC7B,2BAA2B,EAC3B,sBAAsB,EACtB,qBAAqB,EACrB,yBAAyB,EACzB,2BAA2B,EAC3B,4BAA4B,EAC5B,uBAAuB,EACvB,gCAAgC,EAChC,+BAA+B,EAC/B,2BAA2B,EAC3B,kBAAkB,EAClB,4BAA4B,EAC5B,wBAAwB,EACxB,qBAAqB,EACrB,qBAAqB,EACrB,sBAAsB,EACtB,qBAAqB,EACrB,6BAA6B,EAC7B,qBAAqB,EACrB,iCAAiC,EACjC,gCAAgC,EAChC,qCAAqC,EACrC,MAAM,6BAA6B,CAAC;AAIrC,OAAO,KAAK,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEjE,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,iBAAiB,EAAE,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC,CA4FnG,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,iBAAiB,GACrB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,8BAA8B,GAClC,oCAAoC,GAAG,kBAAkB,CAAC;AAC7D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,WAAW,GACf,kBAAkB,GAAG,kBAAkB,CAAC;AAC3C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,6BAA6B,GACjC,mCAAmC,GAAG,kBAAkB,CAAC;AAC5D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,kBAAkB,GACtB,yBAAyB,GAAG,kBAAkB,CAAC;AAClD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gCAAgC,GACpC,qCAAqC,GAAG,kBAAkB,CAAC;AAC9D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,wBAAwB,GAC5B,+BAA+B,GAAG,kBAAkB,CAAC;AACxD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,aAAa,GACjB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,uBAAuB,GAAG,kBAAkB,CAAC;AAChD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,2BAA2B,GAC/B,iCAAiC,GAAG,kBAAkB,CAAC;AAC1D,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,iBAAiB,EAAE,IAAI,EAAE,aAAa,GAAG,SAAS,GAAG,kBAAkB,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,oCAAoC,CAAC;AACxF,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC;;;;;;GAMG;AACH,eAAO,MAAM,iCAAiC,EAAE,MAAM,CACrD,iBAAiB,EACjB,MAAM,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAqB9C,CAAC"}
1
+ {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,oCAAoC,CAAC;AACxF,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC;;;;;;GAMG;AACH,eAAO,MAAM,iCAAiC,EAAE,MAAM,CACrD,iBAAiB,EACjB,MAAM,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAwB9C,CAAC"}
@@ -0,0 +1,22 @@
1
+ import type { ChatCompletionOutput, TextGenerationInput, TextGenerationOutput, TextGenerationOutputFinishReason } from "@huggingface/tasks";
2
+ import type { BodyParams } from "../types";
3
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
4
+ interface FeatherlessAITextCompletionOutput extends Omit<ChatCompletionOutput, "choices"> {
5
+ choices: Array<{
6
+ text: string;
7
+ finish_reason: TextGenerationOutputFinishReason;
8
+ seed: number;
9
+ logprobs: unknown;
10
+ index: number;
11
+ }>;
12
+ }
13
+ export declare class FeatherlessAIConversationalTask extends BaseConversationalTask {
14
+ constructor();
15
+ }
16
+ export declare class FeatherlessAITextGenerationTask extends BaseTextGenerationTask {
17
+ constructor();
18
+ preparePayload(params: BodyParams<TextGenerationInput>): Record<string, unknown>;
19
+ getResponse(response: FeatherlessAITextCompletionOutput): Promise<TextGenerationOutput>;
20
+ }
21
+ export {};
22
+ //# sourceMappingURL=featherless-ai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"featherless-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/featherless-ai.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,oBAAoB,EACpB,mBAAmB,EACnB,oBAAoB,EACpB,gCAAgC,EAChC,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAC3C,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAElF,UAAU,iCAAkC,SAAQ,IAAI,CAAC,oBAAoB,EAAE,SAAS,CAAC;IACxF,OAAO,EAAE,KAAK,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;QACb,aAAa,EAAE,gCAAgC,CAAC;QAChD,IAAI,EAAE,MAAM,CAAC;QACb,QAAQ,EAAE,OAAO,CAAC;QAClB,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;CACH;AAID,qBAAa,+BAAgC,SAAQ,sBAAsB;;CAI1E;AAED,qBAAa,+BAAgC,SAAQ,sBAAsB;;IAKjE,cAAc,CAAC,MAAM,EAAE,UAAU,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAS1E,WAAW,CAAC,QAAQ,EAAE,iCAAiC,GAAG,OAAO,CAAC,oBAAoB,CAAC;CActG"}
@@ -0,0 +1,10 @@
1
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
2
+ export declare class GroqTextGenerationTask extends BaseTextGenerationTask {
3
+ constructor();
4
+ makeRoute(): string;
5
+ }
6
+ export declare class GroqConversationalTask extends BaseConversationalTask {
7
+ constructor();
8
+ makeRoute(): string;
9
+ }
10
+ //# sourceMappingURL=groq.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"groq.d.ts","sourceRoot":"","sources":["../../../src/providers/groq.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAqBlF,qBAAa,sBAAuB,SAAQ,sBAAsB;;IAKxD,SAAS,IAAI,MAAM;CAG5B;AAED,qBAAa,sBAAuB,SAAQ,sBAAsB;;IAKxD,SAAS,IAAI,MAAM;CAG5B"}
@@ -0,0 +1,35 @@
1
+ /**
2
+ * See the registered mapping of HF model ID => Nscale model ID here:
3
+ *
4
+ * https://huggingface.co/api/partners/nscale-cloud/models
5
+ *
6
+ * This is a publicly available mapping.
7
+ *
8
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
9
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
10
+ *
11
+ * - If you work at Nscale and want to update this mapping, please use the model mapping API we provide on huggingface.co
12
+ * - If you're a community member and want to add a new supported HF model to Nscale, please open an issue on the present repo
13
+ * and we will tag Nscale team members.
14
+ *
15
+ * Thanks!
16
+ */
17
+ import type { TextToImageInput } from "@huggingface/tasks";
18
+ import type { BodyParams } from "../types";
19
+ import { BaseConversationalTask, TaskProviderHelper, type TextToImageTaskHelper } from "./providerHelper";
20
+ interface NscaleCloudBase64ImageGeneration {
21
+ data: Array<{
22
+ b64_json: string;
23
+ }>;
24
+ }
25
+ export declare class NscaleConversationalTask extends BaseConversationalTask {
26
+ constructor();
27
+ }
28
+ export declare class NscaleTextToImageTask extends TaskProviderHelper implements TextToImageTaskHelper {
29
+ constructor();
30
+ preparePayload(params: BodyParams<TextToImageInput>): Record<string, unknown>;
31
+ makeRoute(): string;
32
+ getResponse(response: NscaleCloudBase64ImageGeneration, url?: string, headers?: HeadersInit, outputType?: "url" | "blob"): Promise<string | Blob>;
33
+ }
34
+ export {};
35
+ //# sourceMappingURL=nscale.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"nscale.d.ts","sourceRoot":"","sources":["../../../src/providers/nscale.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAE3D,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAE3C,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,KAAK,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAI1G,UAAU,gCAAgC;IACzC,IAAI,EAAE,KAAK,CAAC;QACX,QAAQ,EAAE,MAAM,CAAC;KACjB,CAAC,CAAC;CACH;AAED,qBAAa,wBAAyB,SAAQ,sBAAsB;;CAInE;AAED,qBAAa,qBAAsB,SAAQ,kBAAmB,YAAW,qBAAqB;;IAK7F,cAAc,CAAC,MAAM,EAAE,UAAU,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAU7E,SAAS,IAAI,MAAM;IAIb,WAAW,CAChB,QAAQ,EAAE,gCAAgC,EAC1C,GAAG,CAAC,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,WAAW,EACrB,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,GACzB,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;CAkBzB"}
@@ -30,7 +30,7 @@ export interface Options {
30
30
  billTo?: string;
31
31
  }
32
32
  export type InferenceTask = Exclude<PipelineType, "other"> | "conversational";
33
- export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "openai", "replicate", "sambanova", "together"];
33
+ export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "featherless-ai", "fireworks-ai", "groq", "hf-inference", "hyperbolic", "nebius", "novita", "nscale", "openai", "replicate", "sambanova", "together"];
34
34
  export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
35
35
  export interface BaseArgs {
36
36
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,mCAAmC,CAAC;AAEvF;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IAEtC;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,GAAG,gBAAgB,CAAC;AAE9E,eAAO,MAAM,mBAAmB,kLActB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB;AAED,MAAM,WAAW,UAAU,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IACtF,IAAI,EAAE,CAAC,CAAC;IACR,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,CAAC;IACpD,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,mCAAmC,CAAC;AAEvF;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IAEtC;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,GAAG,gBAAgB,CAAC;AAE9E,eAAO,MAAM,mBAAmB,sNAiBtB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB;AAED,MAAM,WAAW,UAAU,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IACtF,IAAI,EAAE,CAAC,CAAC;IACR,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,CAAC;IACpD,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
@@ -1,2 +1,2 @@
1
- import "./vcr";
1
+ export {};
2
2
  //# sourceMappingURL=InferenceClient.spec.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"InferenceClient.spec.d.ts","sourceRoot":"","sources":["../../test/InferenceClient.spec.ts"],"names":[],"mappings":"AAgBA,OAAO,OAAO,CAAC"}
1
+ {"version":3,"file":"InferenceClient.spec.d.ts","sourceRoot":"","sources":["../../test/InferenceClient.spec.ts"],"names":[],"mappings":""}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/inference",
3
- "version": "3.8.2",
3
+ "version": "3.9.0",
4
4
  "packageManager": "pnpm@8.10.5",
5
5
  "license": "MIT",
6
6
  "author": "Hugging Face and Tim Mikeladze <tim.mikeladze@gmail.com>",
@@ -40,8 +40,8 @@
40
40
  },
41
41
  "type": "module",
42
42
  "dependencies": {
43
- "@huggingface/tasks": "^0.18.10",
44
- "@huggingface/jinja": "^0.3.4"
43
+ "@huggingface/jinja": "^0.3.4",
44
+ "@huggingface/tasks": "^0.18.10"
45
45
  },
46
46
  "devDependencies": {
47
47
  "@types/node": "18.13.0"
@@ -2,12 +2,14 @@ import * as BlackForestLabs from "../providers/black-forest-labs";
2
2
  import * as Cerebras from "../providers/cerebras";
3
3
  import * as Cohere from "../providers/cohere";
4
4
  import * as FalAI from "../providers/fal-ai";
5
+ import * as FeatherlessAI from "../providers/featherless-ai";
5
6
  import * as Fireworks from "../providers/fireworks-ai";
7
+ import * as Groq from "../providers/groq";
6
8
  import * as HFInference from "../providers/hf-inference";
7
-
8
9
  import * as Hyperbolic from "../providers/hyperbolic";
9
10
  import * as Nebius from "../providers/nebius";
10
11
  import * as Novita from "../providers/novita";
12
+ import * as Nscale from "../providers/nscale";
11
13
  import * as OpenAI from "../providers/openai";
12
14
  import type {
13
15
  AudioClassificationTaskHelper,
@@ -62,6 +64,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
62
64
  "text-to-video": new FalAI.FalAITextToVideoTask(),
63
65
  "automatic-speech-recognition": new FalAI.FalAIAutomaticSpeechRecognitionTask(),
64
66
  },
67
+ "featherless-ai": {
68
+ conversational: new FeatherlessAI.FeatherlessAIConversationalTask(),
69
+ "text-generation": new FeatherlessAI.FeatherlessAITextGenerationTask(),
70
+ },
65
71
  "hf-inference": {
66
72
  "text-to-image": new HFInference.HFInferenceTextToImageTask(),
67
73
  conversational: new HFInference.HFInferenceConversationalTask(),
@@ -95,6 +101,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
95
101
  "fireworks-ai": {
96
102
  conversational: new Fireworks.FireworksConversationalTask(),
97
103
  },
104
+ groq: {
105
+ conversational: new Groq.GroqConversationalTask(),
106
+ "text-generation": new Groq.GroqTextGenerationTask(),
107
+ },
98
108
  hyperbolic: {
99
109
  "text-to-image": new Hyperbolic.HyperbolicTextToImageTask(),
100
110
  conversational: new Hyperbolic.HyperbolicConversationalTask(),
@@ -109,6 +119,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
109
119
  conversational: new Novita.NovitaConversationalTask(),
110
120
  "text-generation": new Novita.NovitaTextGenerationTask(),
111
121
  },
122
+ nscale: {
123
+ "text-to-image": new Nscale.NscaleTextToImageTask(),
124
+ conversational: new Nscale.NscaleConversationalTask(),
125
+ },
112
126
  openai: {
113
127
  conversational: new OpenAI.OpenAIConversationalTask(),
114
128
  },
@@ -23,11 +23,14 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record<
23
23
  cerebras: {},
24
24
  cohere: {},
25
25
  "fal-ai": {},
26
+ "featherless-ai": {},
26
27
  "fireworks-ai": {},
28
+ groq: {},
27
29
  "hf-inference": {},
28
30
  hyperbolic: {},
29
31
  nebius: {},
30
32
  novita: {},
33
+ nscale: {},
31
34
  openai: {},
32
35
  replicate: {},
33
36
  sambanova: {},
@@ -0,0 +1,57 @@
1
+ import type {
2
+ ChatCompletionOutput,
3
+ TextGenerationInput,
4
+ TextGenerationOutput,
5
+ TextGenerationOutputFinishReason,
6
+ } from "@huggingface/tasks";
7
+ import { InferenceOutputError } from "../lib/InferenceOutputError";
8
+ import type { BodyParams } from "../types";
9
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
10
+
11
+ interface FeatherlessAITextCompletionOutput extends Omit<ChatCompletionOutput, "choices"> {
12
+ choices: Array<{
13
+ text: string;
14
+ finish_reason: TextGenerationOutputFinishReason;
15
+ seed: number;
16
+ logprobs: unknown;
17
+ index: number;
18
+ }>;
19
+ }
20
+
21
+ const FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
22
+
23
+ export class FeatherlessAIConversationalTask extends BaseConversationalTask {
24
+ constructor() {
25
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
26
+ }
27
+ }
28
+
29
+ export class FeatherlessAITextGenerationTask extends BaseTextGenerationTask {
30
+ constructor() {
31
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
32
+ }
33
+
34
+ override preparePayload(params: BodyParams<TextGenerationInput>): Record<string, unknown> {
35
+ return {
36
+ ...params.args,
37
+ ...params.args.parameters,
38
+ model: params.model,
39
+ prompt: params.args.inputs,
40
+ };
41
+ }
42
+
43
+ override async getResponse(response: FeatherlessAITextCompletionOutput): Promise<TextGenerationOutput> {
44
+ if (
45
+ typeof response === "object" &&
46
+ "choices" in response &&
47
+ Array.isArray(response?.choices) &&
48
+ typeof response?.model === "string"
49
+ ) {
50
+ const completion = response.choices[0];
51
+ return {
52
+ generated_text: completion.text,
53
+ };
54
+ }
55
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
56
+ }
57
+ }
@@ -0,0 +1,40 @@
1
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
2
+
3
+ /**
4
+ * See the registered mapping of HF model ID => Groq model ID here:
5
+ *
6
+ * https://huggingface.co/api/partners/groq/models
7
+ *
8
+ * This is a publicly available mapping.
9
+ *
10
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
11
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
12
+ *
13
+ * - If you work at Groq and want to update this mapping, please use the model mapping API we provide on huggingface.co
14
+ * - If you're a community member and want to add a new supported HF model to Groq, please open an issue on the present repo
15
+ * and we will tag Groq team members.
16
+ *
17
+ * Thanks!
18
+ */
19
+
20
+ const GROQ_API_BASE_URL = "https://api.groq.com";
21
+
22
+ export class GroqTextGenerationTask extends BaseTextGenerationTask {
23
+ constructor() {
24
+ super("groq", GROQ_API_BASE_URL);
25
+ }
26
+
27
+ override makeRoute(): string {
28
+ return "/openai/v1/chat/completions";
29
+ }
30
+ }
31
+
32
+ export class GroqConversationalTask extends BaseConversationalTask {
33
+ constructor() {
34
+ super("groq", GROQ_API_BASE_URL);
35
+ }
36
+
37
+ override makeRoute(): string {
38
+ return "/openai/v1/chat/completions";
39
+ }
40
+ }
@@ -0,0 +1,79 @@
1
+ /**
2
+ * See the registered mapping of HF model ID => Nscale model ID here:
3
+ *
4
+ * https://huggingface.co/api/partners/nscale-cloud/models
5
+ *
6
+ * This is a publicly available mapping.
7
+ *
8
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
9
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
10
+ *
11
+ * - If you work at Nscale and want to update this mapping, please use the model mapping API we provide on huggingface.co
12
+ * - If you're a community member and want to add a new supported HF model to Nscale, please open an issue on the present repo
13
+ * and we will tag Nscale team members.
14
+ *
15
+ * Thanks!
16
+ */
17
+ import type { TextToImageInput } from "@huggingface/tasks";
18
+ import { InferenceOutputError } from "../lib/InferenceOutputError";
19
+ import type { BodyParams } from "../types";
20
+ import { omit } from "../utils/omit";
21
+ import { BaseConversationalTask, TaskProviderHelper, type TextToImageTaskHelper } from "./providerHelper";
22
+
23
+ const NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
24
+
25
+ interface NscaleCloudBase64ImageGeneration {
26
+ data: Array<{
27
+ b64_json: string;
28
+ }>;
29
+ }
30
+
31
+ export class NscaleConversationalTask extends BaseConversationalTask {
32
+ constructor() {
33
+ super("nscale", NSCALE_API_BASE_URL);
34
+ }
35
+ }
36
+
37
+ export class NscaleTextToImageTask extends TaskProviderHelper implements TextToImageTaskHelper {
38
+ constructor() {
39
+ super("nscale", NSCALE_API_BASE_URL);
40
+ }
41
+
42
+ preparePayload(params: BodyParams<TextToImageInput>): Record<string, unknown> {
43
+ return {
44
+ ...omit(params.args, ["inputs", "parameters"]),
45
+ ...params.args.parameters,
46
+ response_format: "b64_json",
47
+ prompt: params.args.inputs,
48
+ model: params.model,
49
+ };
50
+ }
51
+
52
+ makeRoute(): string {
53
+ return "v1/images/generations";
54
+ }
55
+
56
+ async getResponse(
57
+ response: NscaleCloudBase64ImageGeneration,
58
+ url?: string,
59
+ headers?: HeadersInit,
60
+ outputType?: "url" | "blob"
61
+ ): Promise<string | Blob> {
62
+ if (
63
+ typeof response === "object" &&
64
+ "data" in response &&
65
+ Array.isArray(response.data) &&
66
+ response.data.length > 0 &&
67
+ "b64_json" in response.data[0] &&
68
+ typeof response.data[0].b64_json === "string"
69
+ ) {
70
+ const base64Data = response.data[0].b64_json;
71
+ if (outputType === "url") {
72
+ return `data:image/jpeg;base64,${base64Data}`;
73
+ }
74
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
75
+ }
76
+
77
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
78
+ }
79
+ }
package/src/types.ts CHANGED
@@ -42,11 +42,14 @@ export const INFERENCE_PROVIDERS = [
42
42
  "cerebras",
43
43
  "cohere",
44
44
  "fal-ai",
45
+ "featherless-ai",
45
46
  "fireworks-ai",
47
+ "groq",
46
48
  "hf-inference",
47
49
  "hyperbolic",
48
50
  "nebius",
49
51
  "novita",
52
+ "nscale",
50
53
  "openai",
51
54
  "replicate",
52
55
  "sambanova",
@@ -1,2 +0,0 @@
1
- export {};
2
- //# sourceMappingURL=vcr.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"vcr.d.ts","sourceRoot":"","sources":["../../test/vcr.ts"],"names":[],"mappings":""}