@huggingface/inference 3.8.1 → 3.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -48,16 +48,19 @@ You can send inference requests to third-party providers with the inference clie
48
48
 
49
49
  Currently, we support the following providers:
50
50
  - [Fal.ai](https://fal.ai)
51
+ - [Featherless AI](https://featherless.ai)
51
52
  - [Fireworks AI](https://fireworks.ai)
52
53
  - [Hyperbolic](https://hyperbolic.xyz)
53
54
  - [Nebius](https://studio.nebius.ai)
54
55
  - [Novita](https://novita.ai/?utm_source=github_huggingface&utm_medium=github_readme&utm_campaign=link)
56
+ - [Nscale](https://nscale.com)
55
57
  - [Replicate](https://replicate.com)
56
58
  - [Sambanova](https://sambanova.ai)
57
59
  - [Together](https://together.xyz)
58
60
  - [Blackforestlabs](https://blackforestlabs.ai)
59
61
  - [Cohere](https://cohere.com)
60
62
  - [Cerebras](https://cerebras.ai/)
63
+ - [Groq](https://groq.com)
61
64
 
62
65
  To send requests to a third-party provider, you have to pass the `provider` parameter to the inference function. Make sure your request is authenticated with an access token.
63
66
  ```ts
@@ -76,14 +79,17 @@ When authenticated with a third-party provider key, the request is made directly
76
79
 
77
80
  Only a subset of models are supported when requesting third-party providers. You can check the list of supported models per pipeline tasks here:
78
81
  - [Fal.ai supported models](https://huggingface.co/api/partners/fal-ai/models)
82
+ - [Featherless AI supported models](https://huggingface.co/api/partners/featherless-ai/models)
79
83
  - [Fireworks AI supported models](https://huggingface.co/api/partners/fireworks-ai/models)
80
84
  - [Hyperbolic supported models](https://huggingface.co/api/partners/hyperbolic/models)
81
85
  - [Nebius supported models](https://huggingface.co/api/partners/nebius/models)
86
+ - [Nscale supported models](https://huggingface.co/api/partners/nscale/models)
82
87
  - [Replicate supported models](https://huggingface.co/api/partners/replicate/models)
83
88
  - [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models)
84
89
  - [Together supported models](https://huggingface.co/api/partners/together/models)
85
90
  - [Cohere supported models](https://huggingface.co/api/partners/cohere/models)
86
91
  - [Cerebras supported models](https://huggingface.co/api/partners/cerebras/models)
92
+ - [Groq supported models](https://console.groq.com/docs/models)
87
93
  - [HF Inference API (serverless)](https://huggingface.co/models?inference=warm&sort=trending)
88
94
 
89
95
  ❗**Important note:** To be compatible, the third-party API must adhere to the "standard" shape API we expect on HF model pages for each pipeline task type.
package/dist/index.cjs CHANGED
@@ -458,7 +458,7 @@ var FalAITextToSpeechTask = class extends FalAITask {
458
458
  return {
459
459
  ...omit(params.args, ["inputs", "parameters"]),
460
460
  ...params.args.parameters,
461
- lyrics: params.args.inputs
461
+ text: params.args.inputs
462
462
  };
463
463
  }
464
464
  async getResponse(response) {
@@ -482,6 +482,36 @@ var FalAITextToSpeechTask = class extends FalAITask {
482
482
  }
483
483
  };
484
484
 
485
+ // src/providers/featherless-ai.ts
486
+ var FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
487
+ var FeatherlessAIConversationalTask = class extends BaseConversationalTask {
488
+ constructor() {
489
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
490
+ }
491
+ };
492
+ var FeatherlessAITextGenerationTask = class extends BaseTextGenerationTask {
493
+ constructor() {
494
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
495
+ }
496
+ preparePayload(params) {
497
+ return {
498
+ ...params.args,
499
+ ...params.args.parameters,
500
+ model: params.model,
501
+ prompt: params.args.inputs
502
+ };
503
+ }
504
+ async getResponse(response) {
505
+ if (typeof response === "object" && "choices" in response && Array.isArray(response?.choices) && typeof response?.model === "string") {
506
+ const completion = response.choices[0];
507
+ return {
508
+ generated_text: completion.text
509
+ };
510
+ }
511
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
512
+ }
513
+ };
514
+
485
515
  // src/providers/fireworks-ai.ts
486
516
  var FireworksConversationalTask = class extends BaseConversationalTask {
487
517
  constructor() {
@@ -492,6 +522,25 @@ var FireworksConversationalTask = class extends BaseConversationalTask {
492
522
  }
493
523
  };
494
524
 
525
+ // src/providers/groq.ts
526
+ var GROQ_API_BASE_URL = "https://api.groq.com";
527
+ var GroqTextGenerationTask = class extends BaseTextGenerationTask {
528
+ constructor() {
529
+ super("groq", GROQ_API_BASE_URL);
530
+ }
531
+ makeRoute() {
532
+ return "/openai/v1/chat/completions";
533
+ }
534
+ };
535
+ var GroqConversationalTask = class extends BaseConversationalTask {
536
+ constructor() {
537
+ super("groq", GROQ_API_BASE_URL);
538
+ }
539
+ makeRoute() {
540
+ return "/openai/v1/chat/completions";
541
+ }
542
+ };
543
+
495
544
  // src/providers/hf-inference.ts
496
545
  var EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS = ["feature-extraction", "sentence-similarity"];
497
546
  var HFInferenceTask = class extends TaskProviderHelper {
@@ -941,6 +990,41 @@ var NovitaConversationalTask = class extends BaseConversationalTask {
941
990
  }
942
991
  };
943
992
 
993
+ // src/providers/nscale.ts
994
+ var NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
995
+ var NscaleConversationalTask = class extends BaseConversationalTask {
996
+ constructor() {
997
+ super("nscale", NSCALE_API_BASE_URL);
998
+ }
999
+ };
1000
+ var NscaleTextToImageTask = class extends TaskProviderHelper {
1001
+ constructor() {
1002
+ super("nscale", NSCALE_API_BASE_URL);
1003
+ }
1004
+ preparePayload(params) {
1005
+ return {
1006
+ ...omit(params.args, ["inputs", "parameters"]),
1007
+ ...params.args.parameters,
1008
+ response_format: "b64_json",
1009
+ prompt: params.args.inputs,
1010
+ model: params.model
1011
+ };
1012
+ }
1013
+ makeRoute() {
1014
+ return "v1/images/generations";
1015
+ }
1016
+ async getResponse(response, url, headers, outputType) {
1017
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data) && response.data.length > 0 && "b64_json" in response.data[0] && typeof response.data[0].b64_json === "string") {
1018
+ const base64Data = response.data[0].b64_json;
1019
+ if (outputType === "url") {
1020
+ return `data:image/jpeg;base64,${base64Data}`;
1021
+ }
1022
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
1023
+ }
1024
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
1025
+ }
1026
+ };
1027
+
944
1028
  // src/providers/openai.ts
945
1029
  var OPENAI_API_BASE_URL = "https://api.openai.com";
946
1030
  var OpenAIConversationalTask = class extends BaseConversationalTask {
@@ -1042,6 +1126,29 @@ var SambanovaConversationalTask = class extends BaseConversationalTask {
1042
1126
  super("sambanova", "https://api.sambanova.ai");
1043
1127
  }
1044
1128
  };
1129
+ var SambanovaFeatureExtractionTask = class extends TaskProviderHelper {
1130
+ constructor() {
1131
+ super("sambanova", "https://api.sambanova.ai");
1132
+ }
1133
+ makeRoute() {
1134
+ return `/v1/embeddings`;
1135
+ }
1136
+ async getResponse(response) {
1137
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data)) {
1138
+ return response.data.map((item) => item.embedding);
1139
+ }
1140
+ throw new InferenceOutputError(
1141
+ "Expected Sambanova feature-extraction (embeddings) response format to be {'data' : list of {'embedding' : number[]}}"
1142
+ );
1143
+ }
1144
+ preparePayload(params) {
1145
+ return {
1146
+ model: params.model,
1147
+ input: params.args.inputs,
1148
+ ...params.args
1149
+ };
1150
+ }
1151
+ };
1045
1152
 
1046
1153
  // src/providers/together.ts
1047
1154
  var TOGETHER_API_BASE_URL = "https://api.together.xyz";
@@ -1116,6 +1223,10 @@ var PROVIDERS = {
1116
1223
  "text-to-video": new FalAITextToVideoTask(),
1117
1224
  "automatic-speech-recognition": new FalAIAutomaticSpeechRecognitionTask()
1118
1225
  },
1226
+ "featherless-ai": {
1227
+ conversational: new FeatherlessAIConversationalTask(),
1228
+ "text-generation": new FeatherlessAITextGenerationTask()
1229
+ },
1119
1230
  "hf-inference": {
1120
1231
  "text-to-image": new HFInferenceTextToImageTask(),
1121
1232
  conversational: new HFInferenceConversationalTask(),
@@ -1149,6 +1260,10 @@ var PROVIDERS = {
1149
1260
  "fireworks-ai": {
1150
1261
  conversational: new FireworksConversationalTask()
1151
1262
  },
1263
+ groq: {
1264
+ conversational: new GroqConversationalTask(),
1265
+ "text-generation": new GroqTextGenerationTask()
1266
+ },
1152
1267
  hyperbolic: {
1153
1268
  "text-to-image": new HyperbolicTextToImageTask(),
1154
1269
  conversational: new HyperbolicConversationalTask(),
@@ -1163,6 +1278,10 @@ var PROVIDERS = {
1163
1278
  conversational: new NovitaConversationalTask(),
1164
1279
  "text-generation": new NovitaTextGenerationTask()
1165
1280
  },
1281
+ nscale: {
1282
+ "text-to-image": new NscaleTextToImageTask(),
1283
+ conversational: new NscaleConversationalTask()
1284
+ },
1166
1285
  openai: {
1167
1286
  conversational: new OpenAIConversationalTask()
1168
1287
  },
@@ -1172,7 +1291,8 @@ var PROVIDERS = {
1172
1291
  "text-to-video": new ReplicateTextToVideoTask()
1173
1292
  },
1174
1293
  sambanova: {
1175
- conversational: new SambanovaConversationalTask()
1294
+ conversational: new SambanovaConversationalTask(),
1295
+ "feature-extraction": new SambanovaFeatureExtractionTask()
1176
1296
  },
1177
1297
  together: {
1178
1298
  "text-to-image": new TogetherTextToImageTask(),
@@ -1203,7 +1323,7 @@ function getProviderHelper(provider, task) {
1203
1323
 
1204
1324
  // package.json
1205
1325
  var name = "@huggingface/inference";
1206
- var version = "3.8.1";
1326
+ var version = "3.9.0";
1207
1327
 
1208
1328
  // src/providers/consts.ts
1209
1329
  var HARDCODED_MODEL_INFERENCE_MAPPING = {
@@ -1217,11 +1337,14 @@ var HARDCODED_MODEL_INFERENCE_MAPPING = {
1217
1337
  cerebras: {},
1218
1338
  cohere: {},
1219
1339
  "fal-ai": {},
1340
+ "featherless-ai": {},
1220
1341
  "fireworks-ai": {},
1342
+ groq: {},
1221
1343
  "hf-inference": {},
1222
1344
  hyperbolic: {},
1223
1345
  nebius: {},
1224
1346
  novita: {},
1347
+ nscale: {},
1225
1348
  openai: {},
1226
1349
  replicate: {},
1227
1350
  sambanova: {},
@@ -2140,11 +2263,14 @@ var INFERENCE_PROVIDERS = [
2140
2263
  "cerebras",
2141
2264
  "cohere",
2142
2265
  "fal-ai",
2266
+ "featherless-ai",
2143
2267
  "fireworks-ai",
2268
+ "groq",
2144
2269
  "hf-inference",
2145
2270
  "hyperbolic",
2146
2271
  "nebius",
2147
2272
  "novita",
2273
+ "nscale",
2148
2274
  "openai",
2149
2275
  "replicate",
2150
2276
  "sambanova",
package/dist/index.js CHANGED
@@ -401,7 +401,7 @@ var FalAITextToSpeechTask = class extends FalAITask {
401
401
  return {
402
402
  ...omit(params.args, ["inputs", "parameters"]),
403
403
  ...params.args.parameters,
404
- lyrics: params.args.inputs
404
+ text: params.args.inputs
405
405
  };
406
406
  }
407
407
  async getResponse(response) {
@@ -425,6 +425,36 @@ var FalAITextToSpeechTask = class extends FalAITask {
425
425
  }
426
426
  };
427
427
 
428
+ // src/providers/featherless-ai.ts
429
+ var FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
430
+ var FeatherlessAIConversationalTask = class extends BaseConversationalTask {
431
+ constructor() {
432
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
433
+ }
434
+ };
435
+ var FeatherlessAITextGenerationTask = class extends BaseTextGenerationTask {
436
+ constructor() {
437
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
438
+ }
439
+ preparePayload(params) {
440
+ return {
441
+ ...params.args,
442
+ ...params.args.parameters,
443
+ model: params.model,
444
+ prompt: params.args.inputs
445
+ };
446
+ }
447
+ async getResponse(response) {
448
+ if (typeof response === "object" && "choices" in response && Array.isArray(response?.choices) && typeof response?.model === "string") {
449
+ const completion = response.choices[0];
450
+ return {
451
+ generated_text: completion.text
452
+ };
453
+ }
454
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
455
+ }
456
+ };
457
+
428
458
  // src/providers/fireworks-ai.ts
429
459
  var FireworksConversationalTask = class extends BaseConversationalTask {
430
460
  constructor() {
@@ -435,6 +465,25 @@ var FireworksConversationalTask = class extends BaseConversationalTask {
435
465
  }
436
466
  };
437
467
 
468
+ // src/providers/groq.ts
469
+ var GROQ_API_BASE_URL = "https://api.groq.com";
470
+ var GroqTextGenerationTask = class extends BaseTextGenerationTask {
471
+ constructor() {
472
+ super("groq", GROQ_API_BASE_URL);
473
+ }
474
+ makeRoute() {
475
+ return "/openai/v1/chat/completions";
476
+ }
477
+ };
478
+ var GroqConversationalTask = class extends BaseConversationalTask {
479
+ constructor() {
480
+ super("groq", GROQ_API_BASE_URL);
481
+ }
482
+ makeRoute() {
483
+ return "/openai/v1/chat/completions";
484
+ }
485
+ };
486
+
438
487
  // src/providers/hf-inference.ts
439
488
  var EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS = ["feature-extraction", "sentence-similarity"];
440
489
  var HFInferenceTask = class extends TaskProviderHelper {
@@ -884,6 +933,41 @@ var NovitaConversationalTask = class extends BaseConversationalTask {
884
933
  }
885
934
  };
886
935
 
936
+ // src/providers/nscale.ts
937
+ var NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
938
+ var NscaleConversationalTask = class extends BaseConversationalTask {
939
+ constructor() {
940
+ super("nscale", NSCALE_API_BASE_URL);
941
+ }
942
+ };
943
+ var NscaleTextToImageTask = class extends TaskProviderHelper {
944
+ constructor() {
945
+ super("nscale", NSCALE_API_BASE_URL);
946
+ }
947
+ preparePayload(params) {
948
+ return {
949
+ ...omit(params.args, ["inputs", "parameters"]),
950
+ ...params.args.parameters,
951
+ response_format: "b64_json",
952
+ prompt: params.args.inputs,
953
+ model: params.model
954
+ };
955
+ }
956
+ makeRoute() {
957
+ return "v1/images/generations";
958
+ }
959
+ async getResponse(response, url, headers, outputType) {
960
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data) && response.data.length > 0 && "b64_json" in response.data[0] && typeof response.data[0].b64_json === "string") {
961
+ const base64Data = response.data[0].b64_json;
962
+ if (outputType === "url") {
963
+ return `data:image/jpeg;base64,${base64Data}`;
964
+ }
965
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
966
+ }
967
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
968
+ }
969
+ };
970
+
887
971
  // src/providers/openai.ts
888
972
  var OPENAI_API_BASE_URL = "https://api.openai.com";
889
973
  var OpenAIConversationalTask = class extends BaseConversationalTask {
@@ -985,6 +1069,29 @@ var SambanovaConversationalTask = class extends BaseConversationalTask {
985
1069
  super("sambanova", "https://api.sambanova.ai");
986
1070
  }
987
1071
  };
1072
+ var SambanovaFeatureExtractionTask = class extends TaskProviderHelper {
1073
+ constructor() {
1074
+ super("sambanova", "https://api.sambanova.ai");
1075
+ }
1076
+ makeRoute() {
1077
+ return `/v1/embeddings`;
1078
+ }
1079
+ async getResponse(response) {
1080
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data)) {
1081
+ return response.data.map((item) => item.embedding);
1082
+ }
1083
+ throw new InferenceOutputError(
1084
+ "Expected Sambanova feature-extraction (embeddings) response format to be {'data' : list of {'embedding' : number[]}}"
1085
+ );
1086
+ }
1087
+ preparePayload(params) {
1088
+ return {
1089
+ model: params.model,
1090
+ input: params.args.inputs,
1091
+ ...params.args
1092
+ };
1093
+ }
1094
+ };
988
1095
 
989
1096
  // src/providers/together.ts
990
1097
  var TOGETHER_API_BASE_URL = "https://api.together.xyz";
@@ -1059,6 +1166,10 @@ var PROVIDERS = {
1059
1166
  "text-to-video": new FalAITextToVideoTask(),
1060
1167
  "automatic-speech-recognition": new FalAIAutomaticSpeechRecognitionTask()
1061
1168
  },
1169
+ "featherless-ai": {
1170
+ conversational: new FeatherlessAIConversationalTask(),
1171
+ "text-generation": new FeatherlessAITextGenerationTask()
1172
+ },
1062
1173
  "hf-inference": {
1063
1174
  "text-to-image": new HFInferenceTextToImageTask(),
1064
1175
  conversational: new HFInferenceConversationalTask(),
@@ -1092,6 +1203,10 @@ var PROVIDERS = {
1092
1203
  "fireworks-ai": {
1093
1204
  conversational: new FireworksConversationalTask()
1094
1205
  },
1206
+ groq: {
1207
+ conversational: new GroqConversationalTask(),
1208
+ "text-generation": new GroqTextGenerationTask()
1209
+ },
1095
1210
  hyperbolic: {
1096
1211
  "text-to-image": new HyperbolicTextToImageTask(),
1097
1212
  conversational: new HyperbolicConversationalTask(),
@@ -1106,6 +1221,10 @@ var PROVIDERS = {
1106
1221
  conversational: new NovitaConversationalTask(),
1107
1222
  "text-generation": new NovitaTextGenerationTask()
1108
1223
  },
1224
+ nscale: {
1225
+ "text-to-image": new NscaleTextToImageTask(),
1226
+ conversational: new NscaleConversationalTask()
1227
+ },
1109
1228
  openai: {
1110
1229
  conversational: new OpenAIConversationalTask()
1111
1230
  },
@@ -1115,7 +1234,8 @@ var PROVIDERS = {
1115
1234
  "text-to-video": new ReplicateTextToVideoTask()
1116
1235
  },
1117
1236
  sambanova: {
1118
- conversational: new SambanovaConversationalTask()
1237
+ conversational: new SambanovaConversationalTask(),
1238
+ "feature-extraction": new SambanovaFeatureExtractionTask()
1119
1239
  },
1120
1240
  together: {
1121
1241
  "text-to-image": new TogetherTextToImageTask(),
@@ -1146,7 +1266,7 @@ function getProviderHelper(provider, task) {
1146
1266
 
1147
1267
  // package.json
1148
1268
  var name = "@huggingface/inference";
1149
- var version = "3.8.1";
1269
+ var version = "3.9.0";
1150
1270
 
1151
1271
  // src/providers/consts.ts
1152
1272
  var HARDCODED_MODEL_INFERENCE_MAPPING = {
@@ -1160,11 +1280,14 @@ var HARDCODED_MODEL_INFERENCE_MAPPING = {
1160
1280
  cerebras: {},
1161
1281
  cohere: {},
1162
1282
  "fal-ai": {},
1283
+ "featherless-ai": {},
1163
1284
  "fireworks-ai": {},
1285
+ groq: {},
1164
1286
  "hf-inference": {},
1165
1287
  hyperbolic: {},
1166
1288
  nebius: {},
1167
1289
  novita: {},
1290
+ nscale: {},
1168
1291
  openai: {},
1169
1292
  replicate: {},
1170
1293
  sambanova: {},
@@ -2083,11 +2206,14 @@ var INFERENCE_PROVIDERS = [
2083
2206
  "cerebras",
2084
2207
  "cohere",
2085
2208
  "fal-ai",
2209
+ "featherless-ai",
2086
2210
  "fireworks-ai",
2211
+ "groq",
2087
2212
  "hf-inference",
2088
2213
  "hyperbolic",
2089
2214
  "nebius",
2090
2215
  "novita",
2216
+ "nscale",
2091
2217
  "openai",
2092
2218
  "replicate",
2093
2219
  "sambanova",
@@ -1,6 +1,6 @@
1
1
  import type { WidgetType } from "@huggingface/tasks";
2
2
  import type { InferenceProvider, ModelId } from "../types";
3
- export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "fireworks-ai" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>>;
3
+ export declare const inferenceProviderMappingCache: Map<string, Partial<Record<"black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "featherless-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "replicate" | "sambanova" | "together", Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>>;
4
4
  export type InferenceProviderMapping = Partial<Record<InferenceProvider, Omit<InferenceProviderModelMapping, "hfModelId" | "adapterWeightsPath">>>;
5
5
  export interface InferenceProviderModelMapping {
6
6
  adapter?: string;
@@ -1 +1 @@
1
- {"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,0RAA+C,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG,OAAO,CAC7C,MAAM,CAAC,iBAAiB,EAAE,IAAI,CAAC,6BAA6B,EAAE,WAAW,GAAG,oBAAoB,CAAC,CAAC,CAClG,CAAC;AAEF,MAAM,WAAW,6BAA6B;IAC7C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,IAAI,EAAE,UAAU,CAAC;CACjB;AAED,wBAAsB,2BAA2B,CAChD,MAAM,EAAE;IACP,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,iBAAiB,CAAC;IAC5B,IAAI,EAAE,UAAU,CAAC;CACjB,EACD,OAAO,EAAE;IACR,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;CACtE,GACC,OAAO,CAAC,6BAA6B,GAAG,IAAI,CAAC,CA+D/C"}
1
+ {"version":3,"file":"getInferenceProviderMapping.d.ts","sourceRoot":"","sources":["../../../src/lib/getInferenceProviderMapping.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,iBAAiB,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAM3D,eAAO,MAAM,6BAA6B,iUAA+C,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG,OAAO,CAC7C,MAAM,CAAC,iBAAiB,EAAE,IAAI,CAAC,6BAA6B,EAAE,WAAW,GAAG,oBAAoB,CAAC,CAAC,CAClG,CAAC;AAEF,MAAM,WAAW,6BAA6B;IAC7C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,IAAI,EAAE,UAAU,CAAC;CACjB;AAED,wBAAsB,2BAA2B,CAChD,MAAM,EAAE;IACP,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,iBAAiB,CAAC;IAC5B,IAAI,EAAE,UAAU,CAAC;CACjB,EACD,OAAO,EAAE;IACR,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;CACtE,GACC,OAAO,CAAC,6BAA6B,GAAG,IAAI,CAAC,CA+D/C"}
@@ -1 +1 @@
1
- {"version":3,"file":"getProviderHelper.d.ts","sourceRoot":"","sources":["../../../src/lib/getProviderHelper.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EACX,6BAA6B,EAC7B,sBAAsB,EACtB,oCAAoC,EACpC,wBAAwB,EACxB,mCAAmC,EACnC,2BAA2B,EAC3B,kBAAkB,EAClB,6BAA6B,EAC7B,2BAA2B,EAC3B,sBAAsB,EACtB,qBAAqB,EACrB,yBAAyB,EACzB,2BAA2B,EAC3B,4BAA4B,EAC5B,uBAAuB,EACvB,gCAAgC,EAChC,+BAA+B,EAC/B,2BAA2B,EAC3B,kBAAkB,EAClB,4BAA4B,EAC5B,wBAAwB,EACxB,qBAAqB,EACrB,qBAAqB,EACrB,sBAAsB,EACtB,qBAAqB,EACrB,6BAA6B,EAC7B,qBAAqB,EACrB,iCAAiC,EACjC,gCAAgC,EAChC,qCAAqC,EACrC,MAAM,6BAA6B,CAAC;AAIrC,OAAO,KAAK,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEjE,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,iBAAiB,EAAE,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC,CA+EnG,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,iBAAiB,GACrB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,8BAA8B,GAClC,oCAAoC,GAAG,kBAAkB,CAAC;AAC7D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,WAAW,GACf,kBAAkB,GAAG,kBAAkB,CAAC;AAC3C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,6BAA6B,GACjC,mCAAmC,GAAG,kBAAkB,CAAC;AAC5D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,kBAAkB,GACtB,yBAAyB,GAAG,kBAAkB,CAAC;AAClD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gCAAgC,GACpC,qCAAqC,GAAG,kBAAkB,CAAC;AAC9D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,wBAAwB,GAC5B,+BAA+B,GAAG,kBAAkB,CAAC;AACxD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,aAAa,GACjB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,uBAAuB,GAAG,kBAAkB,CAAC;AAChD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,2BAA2B,GAC/B,iCAAiC,GAAG,kBAAkB,CAAC;AAC1D,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,iBAAiB,EAAE,IAAI,EAAE,aAAa,GAAG,SAAS,GAAG,kBAAkB,CAAC"}
1
+ {"version":3,"file":"getProviderHelper.d.ts","sourceRoot":"","sources":["../../../src/lib/getProviderHelper.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EACX,6BAA6B,EAC7B,sBAAsB,EACtB,oCAAoC,EACpC,wBAAwB,EACxB,mCAAmC,EACnC,2BAA2B,EAC3B,kBAAkB,EAClB,6BAA6B,EAC7B,2BAA2B,EAC3B,sBAAsB,EACtB,qBAAqB,EACrB,yBAAyB,EACzB,2BAA2B,EAC3B,4BAA4B,EAC5B,uBAAuB,EACvB,gCAAgC,EAChC,+BAA+B,EAC/B,2BAA2B,EAC3B,kBAAkB,EAClB,4BAA4B,EAC5B,wBAAwB,EACxB,qBAAqB,EACrB,qBAAqB,EACrB,sBAAsB,EACtB,qBAAqB,EACrB,6BAA6B,EAC7B,qBAAqB,EACrB,iCAAiC,EACjC,gCAAgC,EAChC,qCAAqC,EACrC,MAAM,6BAA6B,CAAC;AAIrC,OAAO,KAAK,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEjE,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,iBAAiB,EAAE,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC,CA4FnG,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,iBAAiB,GACrB,wBAAwB,GAAG,kBAAkB,CAAC;AACjD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,8BAA8B,GAClC,oCAAoC,GAAG,kBAAkB,CAAC;AAC7D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,WAAW,GACf,kBAAkB,GAAG,kBAAkB,CAAC;AAC3C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,6BAA6B,GACjC,mCAAmC,GAAG,kBAAkB,CAAC;AAC5D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,kBAAkB,GACtB,yBAAyB,GAAG,kBAAkB,CAAC;AAClD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gCAAgC,GACpC,qCAAqC,GAAG,kBAAkB,CAAC;AAC9D,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,gBAAgB,GACpB,sBAAsB,GAAG,kBAAkB,CAAC;AAC/C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,qBAAqB,GACzB,4BAA4B,GAAG,kBAAkB,CAAC;AACrD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,0BAA0B,GAC9B,gCAAgC,GAAG,kBAAkB,CAAC;AACzD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,wBAAwB,GAC5B,+BAA+B,GAAG,kBAAkB,CAAC;AACxD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,oBAAoB,GACxB,2BAA2B,GAAG,kBAAkB,CAAC;AACpD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,sBAAsB,GAC1B,6BAA6B,GAAG,kBAAkB,CAAC;AACtD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,aAAa,GACjB,qBAAqB,GAAG,kBAAkB,CAAC;AAC9C,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,eAAe,GACnB,uBAAuB,GAAG,kBAAkB,CAAC;AAChD,wBAAgB,iBAAiB,CAChC,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,EAAE,2BAA2B,GAC/B,iCAAiC,GAAG,kBAAkB,CAAC;AAC1D,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,iBAAiB,EAAE,IAAI,EAAE,aAAa,GAAG,SAAS,GAAG,kBAAkB,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,oCAAoC,CAAC;AACxF,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC;;;;;;GAMG;AACH,eAAO,MAAM,iCAAiC,EAAE,MAAM,CACrD,iBAAiB,EACjB,MAAM,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAqB9C,CAAC"}
1
+ {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,oCAAoC,CAAC;AACxF,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC;;;;;;GAMG;AACH,eAAO,MAAM,iCAAiC,EAAE,MAAM,CACrD,iBAAiB,EACjB,MAAM,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAwB9C,CAAC"}
@@ -0,0 +1,22 @@
1
+ import type { ChatCompletionOutput, TextGenerationInput, TextGenerationOutput, TextGenerationOutputFinishReason } from "@huggingface/tasks";
2
+ import type { BodyParams } from "../types";
3
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
4
+ interface FeatherlessAITextCompletionOutput extends Omit<ChatCompletionOutput, "choices"> {
5
+ choices: Array<{
6
+ text: string;
7
+ finish_reason: TextGenerationOutputFinishReason;
8
+ seed: number;
9
+ logprobs: unknown;
10
+ index: number;
11
+ }>;
12
+ }
13
+ export declare class FeatherlessAIConversationalTask extends BaseConversationalTask {
14
+ constructor();
15
+ }
16
+ export declare class FeatherlessAITextGenerationTask extends BaseTextGenerationTask {
17
+ constructor();
18
+ preparePayload(params: BodyParams<TextGenerationInput>): Record<string, unknown>;
19
+ getResponse(response: FeatherlessAITextCompletionOutput): Promise<TextGenerationOutput>;
20
+ }
21
+ export {};
22
+ //# sourceMappingURL=featherless-ai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"featherless-ai.d.ts","sourceRoot":"","sources":["../../../src/providers/featherless-ai.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,oBAAoB,EACpB,mBAAmB,EACnB,oBAAoB,EACpB,gCAAgC,EAChC,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAC3C,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAElF,UAAU,iCAAkC,SAAQ,IAAI,CAAC,oBAAoB,EAAE,SAAS,CAAC;IACxF,OAAO,EAAE,KAAK,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;QACb,aAAa,EAAE,gCAAgC,CAAC;QAChD,IAAI,EAAE,MAAM,CAAC;QACb,QAAQ,EAAE,OAAO,CAAC;QAClB,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;CACH;AAID,qBAAa,+BAAgC,SAAQ,sBAAsB;;CAI1E;AAED,qBAAa,+BAAgC,SAAQ,sBAAsB;;IAKjE,cAAc,CAAC,MAAM,EAAE,UAAU,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAS1E,WAAW,CAAC,QAAQ,EAAE,iCAAiC,GAAG,OAAO,CAAC,oBAAoB,CAAC;CActG"}
@@ -0,0 +1,10 @@
1
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
2
+ export declare class GroqTextGenerationTask extends BaseTextGenerationTask {
3
+ constructor();
4
+ makeRoute(): string;
5
+ }
6
+ export declare class GroqConversationalTask extends BaseConversationalTask {
7
+ constructor();
8
+ makeRoute(): string;
9
+ }
10
+ //# sourceMappingURL=groq.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"groq.d.ts","sourceRoot":"","sources":["../../../src/providers/groq.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAqBlF,qBAAa,sBAAuB,SAAQ,sBAAsB;;IAKxD,SAAS,IAAI,MAAM;CAG5B;AAED,qBAAa,sBAAuB,SAAQ,sBAAsB;;IAKxD,SAAS,IAAI,MAAM;CAG5B"}
@@ -0,0 +1,35 @@
1
+ /**
2
+ * See the registered mapping of HF model ID => Nscale model ID here:
3
+ *
4
+ * https://huggingface.co/api/partners/nscale-cloud/models
5
+ *
6
+ * This is a publicly available mapping.
7
+ *
8
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
9
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
10
+ *
11
+ * - If you work at Nscale and want to update this mapping, please use the model mapping API we provide on huggingface.co
12
+ * - If you're a community member and want to add a new supported HF model to Nscale, please open an issue on the present repo
13
+ * and we will tag Nscale team members.
14
+ *
15
+ * Thanks!
16
+ */
17
+ import type { TextToImageInput } from "@huggingface/tasks";
18
+ import type { BodyParams } from "../types";
19
+ import { BaseConversationalTask, TaskProviderHelper, type TextToImageTaskHelper } from "./providerHelper";
20
+ interface NscaleCloudBase64ImageGeneration {
21
+ data: Array<{
22
+ b64_json: string;
23
+ }>;
24
+ }
25
+ export declare class NscaleConversationalTask extends BaseConversationalTask {
26
+ constructor();
27
+ }
28
+ export declare class NscaleTextToImageTask extends TaskProviderHelper implements TextToImageTaskHelper {
29
+ constructor();
30
+ preparePayload(params: BodyParams<TextToImageInput>): Record<string, unknown>;
31
+ makeRoute(): string;
32
+ getResponse(response: NscaleCloudBase64ImageGeneration, url?: string, headers?: HeadersInit, outputType?: "url" | "blob"): Promise<string | Blob>;
33
+ }
34
+ export {};
35
+ //# sourceMappingURL=nscale.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"nscale.d.ts","sourceRoot":"","sources":["../../../src/providers/nscale.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAE3D,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAE3C,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,KAAK,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAI1G,UAAU,gCAAgC;IACzC,IAAI,EAAE,KAAK,CAAC;QACX,QAAQ,EAAE,MAAM,CAAC;KACjB,CAAC,CAAC;CACH;AAED,qBAAa,wBAAyB,SAAQ,sBAAsB;;CAInE;AAED,qBAAa,qBAAsB,SAAQ,kBAAmB,YAAW,qBAAqB;;IAK7F,cAAc,CAAC,MAAM,EAAE,UAAU,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAU7E,SAAS,IAAI,MAAM;IAIb,WAAW,CAChB,QAAQ,EAAE,gCAAgC,EAC1C,GAAG,CAAC,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,WAAW,EACrB,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,GACzB,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;CAkBzB"}
@@ -1,21 +1,14 @@
1
- /**
2
- * See the registered mapping of HF model ID => Sambanova model ID here:
3
- *
4
- * https://huggingface.co/api/partners/sambanova/models
5
- *
6
- * This is a publicly available mapping.
7
- *
8
- * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
9
- * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
10
- *
11
- * - If you work at Sambanova and want to update this mapping, please use the model mapping API we provide on huggingface.co
12
- * - If you're a community member and want to add a new supported HF model to Sambanova, please open an issue on the present repo
13
- * and we will tag Sambanova team members.
14
- *
15
- * Thanks!
16
- */
17
- import { BaseConversationalTask } from "./providerHelper";
1
+ import type { FeatureExtractionOutput } from "@huggingface/tasks";
2
+ import type { BodyParams } from "../types";
3
+ import type { FeatureExtractionTaskHelper } from "./providerHelper";
4
+ import { BaseConversationalTask, TaskProviderHelper } from "./providerHelper";
18
5
  export declare class SambanovaConversationalTask extends BaseConversationalTask {
19
6
  constructor();
20
7
  }
8
+ export declare class SambanovaFeatureExtractionTask extends TaskProviderHelper implements FeatureExtractionTaskHelper {
9
+ constructor();
10
+ makeRoute(): string;
11
+ getResponse(response: FeatureExtractionOutput): Promise<FeatureExtractionOutput>;
12
+ preparePayload(params: BodyParams): Record<string, unknown>;
13
+ }
21
14
  //# sourceMappingURL=sambanova.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,EAAE,sBAAsB,EAAE,MAAM,kBAAkB,CAAC;AAE1D,qBAAa,2BAA4B,SAAQ,sBAAsB;;CAItE"}
1
+ {"version":3,"file":"sambanova.d.ts","sourceRoot":"","sources":["../../../src/providers/sambanova.ts"],"names":[],"mappings":"AAkBA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,oBAAoB,CAAC;AAClE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAC3C,OAAO,KAAK,EAAE,2BAA2B,EAAE,MAAM,kBAAkB,CAAC;AACpE,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAE9E,qBAAa,2BAA4B,SAAQ,sBAAsB;;CAItE;AAED,qBAAa,8BAA+B,SAAQ,kBAAmB,YAAW,2BAA2B;;IAKnG,SAAS,IAAI,MAAM;IAIb,WAAW,CAAC,QAAQ,EAAE,uBAAuB,GAAG,OAAO,CAAC,uBAAuB,CAAC;IAStF,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;CAOpE"}
@@ -1,6 +1,10 @@
1
1
  import type { FeatureExtractionInput } from "@huggingface/tasks";
2
2
  import type { BaseArgs, Options } from "../../types";
3
- export type FeatureExtractionArgs = BaseArgs & FeatureExtractionInput;
3
+ interface FeatureExtractionOAICompatInput {
4
+ encoding_format?: "float" | "base64";
5
+ dimensions?: number | null;
6
+ }
7
+ export type FeatureExtractionArgs = BaseArgs & FeatureExtractionInput & FeatureExtractionOAICompatInput;
4
8
  /**
5
9
  * Returned values are a multidimensional array of floats (dimension depending on if you sent a string or a list of string, and if the automatic reduction, usually mean_pooling for instance was applied for you or not. This should be explained on the model's README).
6
10
  */
@@ -9,4 +13,5 @@ export type FeatureExtractionOutput = (number | number[] | number[][])[];
9
13
  * This task reads some text and outputs raw float values, that are usually consumed as part of a semantic database/semantic search.
10
14
  */
11
15
  export declare function featureExtraction(args: FeatureExtractionArgs, options?: Options): Promise<FeatureExtractionOutput>;
16
+ export {};
12
17
  //# sourceMappingURL=featureExtraction.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"featureExtraction.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/featureExtraction.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG,sBAAsB,CAAC;AAEtE;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC;AAEzE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,CAOlC"}
1
+ {"version":3,"file":"featureExtraction.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/featureExtraction.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,UAAU,+BAA+B;IACxC,eAAe,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IACrC,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC3B;AAED,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG,sBAAsB,GAAG,+BAA+B,CAAC;AAExG;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC;AAEzE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,CAOlC"}
@@ -30,7 +30,7 @@ export interface Options {
30
30
  billTo?: string;
31
31
  }
32
32
  export type InferenceTask = Exclude<PipelineType, "other"> | "conversational";
33
- export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "openai", "replicate", "sambanova", "together"];
33
+ export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "featherless-ai", "fireworks-ai", "groq", "hf-inference", "hyperbolic", "nebius", "novita", "nscale", "openai", "replicate", "sambanova", "together"];
34
34
  export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
35
35
  export interface BaseArgs {
36
36
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,mCAAmC,CAAC;AAEvF;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IAEtC;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,GAAG,gBAAgB,CAAC;AAE9E,eAAO,MAAM,mBAAmB,kLActB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB;AAED,MAAM,WAAW,UAAU,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IACtF,IAAI,EAAE,CAAC,CAAC;IACR,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,CAAC;IACpD,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,mCAAmC,CAAC;AAEvF;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IAEtC;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,GAAG,gBAAgB,CAAC;AAE9E,eAAO,MAAM,mBAAmB,sNAiBtB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB;AAED,MAAM,WAAW,UAAU,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IACtF,IAAI,EAAE,CAAC,CAAC;IACR,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,CAAC;IACpD,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
@@ -1,2 +1,2 @@
1
- import "./vcr";
1
+ export {};
2
2
  //# sourceMappingURL=InferenceClient.spec.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"InferenceClient.spec.d.ts","sourceRoot":"","sources":["../../test/InferenceClient.spec.ts"],"names":[],"mappings":"AAcA,OAAO,OAAO,CAAC"}
1
+ {"version":3,"file":"InferenceClient.spec.d.ts","sourceRoot":"","sources":["../../test/InferenceClient.spec.ts"],"names":[],"mappings":""}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/inference",
3
- "version": "3.8.1",
3
+ "version": "3.9.0",
4
4
  "packageManager": "pnpm@8.10.5",
5
5
  "license": "MIT",
6
6
  "author": "Hugging Face and Tim Mikeladze <tim.mikeladze@gmail.com>",
@@ -40,8 +40,8 @@
40
40
  },
41
41
  "type": "module",
42
42
  "dependencies": {
43
- "@huggingface/tasks": "^0.18.8",
44
- "@huggingface/jinja": "^0.3.4"
43
+ "@huggingface/jinja": "^0.3.4",
44
+ "@huggingface/tasks": "^0.18.10"
45
45
  },
46
46
  "devDependencies": {
47
47
  "@types/node": "18.13.0"
@@ -2,12 +2,14 @@ import * as BlackForestLabs from "../providers/black-forest-labs";
2
2
  import * as Cerebras from "../providers/cerebras";
3
3
  import * as Cohere from "../providers/cohere";
4
4
  import * as FalAI from "../providers/fal-ai";
5
+ import * as FeatherlessAI from "../providers/featherless-ai";
5
6
  import * as Fireworks from "../providers/fireworks-ai";
7
+ import * as Groq from "../providers/groq";
6
8
  import * as HFInference from "../providers/hf-inference";
7
-
8
9
  import * as Hyperbolic from "../providers/hyperbolic";
9
10
  import * as Nebius from "../providers/nebius";
10
11
  import * as Novita from "../providers/novita";
12
+ import * as Nscale from "../providers/nscale";
11
13
  import * as OpenAI from "../providers/openai";
12
14
  import type {
13
15
  AudioClassificationTaskHelper,
@@ -62,6 +64,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
62
64
  "text-to-video": new FalAI.FalAITextToVideoTask(),
63
65
  "automatic-speech-recognition": new FalAI.FalAIAutomaticSpeechRecognitionTask(),
64
66
  },
67
+ "featherless-ai": {
68
+ conversational: new FeatherlessAI.FeatherlessAIConversationalTask(),
69
+ "text-generation": new FeatherlessAI.FeatherlessAITextGenerationTask(),
70
+ },
65
71
  "hf-inference": {
66
72
  "text-to-image": new HFInference.HFInferenceTextToImageTask(),
67
73
  conversational: new HFInference.HFInferenceConversationalTask(),
@@ -95,6 +101,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
95
101
  "fireworks-ai": {
96
102
  conversational: new Fireworks.FireworksConversationalTask(),
97
103
  },
104
+ groq: {
105
+ conversational: new Groq.GroqConversationalTask(),
106
+ "text-generation": new Groq.GroqTextGenerationTask(),
107
+ },
98
108
  hyperbolic: {
99
109
  "text-to-image": new Hyperbolic.HyperbolicTextToImageTask(),
100
110
  conversational: new Hyperbolic.HyperbolicConversationalTask(),
@@ -109,6 +119,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
109
119
  conversational: new Novita.NovitaConversationalTask(),
110
120
  "text-generation": new Novita.NovitaTextGenerationTask(),
111
121
  },
122
+ nscale: {
123
+ "text-to-image": new Nscale.NscaleTextToImageTask(),
124
+ conversational: new Nscale.NscaleConversationalTask(),
125
+ },
112
126
  openai: {
113
127
  conversational: new OpenAI.OpenAIConversationalTask(),
114
128
  },
@@ -119,6 +133,7 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
119
133
  },
120
134
  sambanova: {
121
135
  conversational: new Sambanova.SambanovaConversationalTask(),
136
+ "feature-extraction": new Sambanova.SambanovaFeatureExtractionTask(),
122
137
  },
123
138
  together: {
124
139
  "text-to-image": new Together.TogetherTextToImageTask(),
@@ -23,11 +23,14 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record<
23
23
  cerebras: {},
24
24
  cohere: {},
25
25
  "fal-ai": {},
26
+ "featherless-ai": {},
26
27
  "fireworks-ai": {},
28
+ groq: {},
27
29
  "hf-inference": {},
28
30
  hyperbolic: {},
29
31
  nebius: {},
30
32
  novita: {},
33
+ nscale: {},
31
34
  openai: {},
32
35
  replicate: {},
33
36
  sambanova: {},
@@ -241,7 +241,7 @@ export class FalAITextToSpeechTask extends FalAITask {
241
241
  return {
242
242
  ...omit(params.args, ["inputs", "parameters"]),
243
243
  ...(params.args.parameters as Record<string, unknown>),
244
- lyrics: params.args.inputs,
244
+ text: params.args.inputs,
245
245
  };
246
246
  }
247
247
 
@@ -0,0 +1,57 @@
1
+ import type {
2
+ ChatCompletionOutput,
3
+ TextGenerationInput,
4
+ TextGenerationOutput,
5
+ TextGenerationOutputFinishReason,
6
+ } from "@huggingface/tasks";
7
+ import { InferenceOutputError } from "../lib/InferenceOutputError";
8
+ import type { BodyParams } from "../types";
9
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
10
+
11
+ interface FeatherlessAITextCompletionOutput extends Omit<ChatCompletionOutput, "choices"> {
12
+ choices: Array<{
13
+ text: string;
14
+ finish_reason: TextGenerationOutputFinishReason;
15
+ seed: number;
16
+ logprobs: unknown;
17
+ index: number;
18
+ }>;
19
+ }
20
+
21
+ const FEATHERLESS_API_BASE_URL = "https://api.featherless.ai";
22
+
23
+ export class FeatherlessAIConversationalTask extends BaseConversationalTask {
24
+ constructor() {
25
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
26
+ }
27
+ }
28
+
29
+ export class FeatherlessAITextGenerationTask extends BaseTextGenerationTask {
30
+ constructor() {
31
+ super("featherless-ai", FEATHERLESS_API_BASE_URL);
32
+ }
33
+
34
+ override preparePayload(params: BodyParams<TextGenerationInput>): Record<string, unknown> {
35
+ return {
36
+ ...params.args,
37
+ ...params.args.parameters,
38
+ model: params.model,
39
+ prompt: params.args.inputs,
40
+ };
41
+ }
42
+
43
+ override async getResponse(response: FeatherlessAITextCompletionOutput): Promise<TextGenerationOutput> {
44
+ if (
45
+ typeof response === "object" &&
46
+ "choices" in response &&
47
+ Array.isArray(response?.choices) &&
48
+ typeof response?.model === "string"
49
+ ) {
50
+ const completion = response.choices[0];
51
+ return {
52
+ generated_text: completion.text,
53
+ };
54
+ }
55
+ throw new InferenceOutputError("Expected Featherless AI text generation response format");
56
+ }
57
+ }
@@ -0,0 +1,40 @@
1
+ import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper";
2
+
3
+ /**
4
+ * See the registered mapping of HF model ID => Groq model ID here:
5
+ *
6
+ * https://huggingface.co/api/partners/groq/models
7
+ *
8
+ * This is a publicly available mapping.
9
+ *
10
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
11
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
12
+ *
13
+ * - If you work at Groq and want to update this mapping, please use the model mapping API we provide on huggingface.co
14
+ * - If you're a community member and want to add a new supported HF model to Groq, please open an issue on the present repo
15
+ * and we will tag Groq team members.
16
+ *
17
+ * Thanks!
18
+ */
19
+
20
+ const GROQ_API_BASE_URL = "https://api.groq.com";
21
+
22
+ export class GroqTextGenerationTask extends BaseTextGenerationTask {
23
+ constructor() {
24
+ super("groq", GROQ_API_BASE_URL);
25
+ }
26
+
27
+ override makeRoute(): string {
28
+ return "/openai/v1/chat/completions";
29
+ }
30
+ }
31
+
32
+ export class GroqConversationalTask extends BaseConversationalTask {
33
+ constructor() {
34
+ super("groq", GROQ_API_BASE_URL);
35
+ }
36
+
37
+ override makeRoute(): string {
38
+ return "/openai/v1/chat/completions";
39
+ }
40
+ }
@@ -0,0 +1,79 @@
1
+ /**
2
+ * See the registered mapping of HF model ID => Nscale model ID here:
3
+ *
4
+ * https://huggingface.co/api/partners/nscale-cloud/models
5
+ *
6
+ * This is a publicly available mapping.
7
+ *
8
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
9
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
10
+ *
11
+ * - If you work at Nscale and want to update this mapping, please use the model mapping API we provide on huggingface.co
12
+ * - If you're a community member and want to add a new supported HF model to Nscale, please open an issue on the present repo
13
+ * and we will tag Nscale team members.
14
+ *
15
+ * Thanks!
16
+ */
17
+ import type { TextToImageInput } from "@huggingface/tasks";
18
+ import { InferenceOutputError } from "../lib/InferenceOutputError";
19
+ import type { BodyParams } from "../types";
20
+ import { omit } from "../utils/omit";
21
+ import { BaseConversationalTask, TaskProviderHelper, type TextToImageTaskHelper } from "./providerHelper";
22
+
23
+ const NSCALE_API_BASE_URL = "https://inference.api.nscale.com";
24
+
25
+ interface NscaleCloudBase64ImageGeneration {
26
+ data: Array<{
27
+ b64_json: string;
28
+ }>;
29
+ }
30
+
31
+ export class NscaleConversationalTask extends BaseConversationalTask {
32
+ constructor() {
33
+ super("nscale", NSCALE_API_BASE_URL);
34
+ }
35
+ }
36
+
37
+ export class NscaleTextToImageTask extends TaskProviderHelper implements TextToImageTaskHelper {
38
+ constructor() {
39
+ super("nscale", NSCALE_API_BASE_URL);
40
+ }
41
+
42
+ preparePayload(params: BodyParams<TextToImageInput>): Record<string, unknown> {
43
+ return {
44
+ ...omit(params.args, ["inputs", "parameters"]),
45
+ ...params.args.parameters,
46
+ response_format: "b64_json",
47
+ prompt: params.args.inputs,
48
+ model: params.model,
49
+ };
50
+ }
51
+
52
+ makeRoute(): string {
53
+ return "v1/images/generations";
54
+ }
55
+
56
+ async getResponse(
57
+ response: NscaleCloudBase64ImageGeneration,
58
+ url?: string,
59
+ headers?: HeadersInit,
60
+ outputType?: "url" | "blob"
61
+ ): Promise<string | Blob> {
62
+ if (
63
+ typeof response === "object" &&
64
+ "data" in response &&
65
+ Array.isArray(response.data) &&
66
+ response.data.length > 0 &&
67
+ "b64_json" in response.data[0] &&
68
+ typeof response.data[0].b64_json === "string"
69
+ ) {
70
+ const base64Data = response.data[0].b64_json;
71
+ if (outputType === "url") {
72
+ return `data:image/jpeg;base64,${base64Data}`;
73
+ }
74
+ return fetch(`data:image/jpeg;base64,${base64Data}`).then((res) => res.blob());
75
+ }
76
+
77
+ throw new InferenceOutputError("Expected Nscale text-to-image response format");
78
+ }
79
+ }
@@ -14,10 +14,42 @@
14
14
  *
15
15
  * Thanks!
16
16
  */
17
- import { BaseConversationalTask } from "./providerHelper";
17
+ import { InferenceOutputError } from "../lib/InferenceOutputError";
18
+
19
+ import type { FeatureExtractionOutput } from "@huggingface/tasks";
20
+ import type { BodyParams } from "../types";
21
+ import type { FeatureExtractionTaskHelper } from "./providerHelper";
22
+ import { BaseConversationalTask, TaskProviderHelper } from "./providerHelper";
18
23
 
19
24
  export class SambanovaConversationalTask extends BaseConversationalTask {
20
25
  constructor() {
21
26
  super("sambanova", "https://api.sambanova.ai");
22
27
  }
23
28
  }
29
+
30
+ export class SambanovaFeatureExtractionTask extends TaskProviderHelper implements FeatureExtractionTaskHelper {
31
+ constructor() {
32
+ super("sambanova", "https://api.sambanova.ai");
33
+ }
34
+
35
+ override makeRoute(): string {
36
+ return `/v1/embeddings`;
37
+ }
38
+
39
+ override async getResponse(response: FeatureExtractionOutput): Promise<FeatureExtractionOutput> {
40
+ if (typeof response === "object" && "data" in response && Array.isArray(response.data)) {
41
+ return response.data.map((item) => item.embedding);
42
+ }
43
+ throw new InferenceOutputError(
44
+ "Expected Sambanova feature-extraction (embeddings) response format to be {'data' : list of {'embedding' : number[]}}"
45
+ );
46
+ }
47
+
48
+ override preparePayload(params: BodyParams): Record<string, unknown> {
49
+ return {
50
+ model: params.model,
51
+ input: params.args.inputs,
52
+ ...params.args,
53
+ };
54
+ }
55
+ }
@@ -3,7 +3,12 @@ import { getProviderHelper } from "../../lib/getProviderHelper";
3
3
  import type { BaseArgs, Options } from "../../types";
4
4
  import { innerRequest } from "../../utils/request";
5
5
 
6
- export type FeatureExtractionArgs = BaseArgs & FeatureExtractionInput;
6
+ interface FeatureExtractionOAICompatInput {
7
+ encoding_format?: "float" | "base64";
8
+ dimensions?: number | null;
9
+ }
10
+
11
+ export type FeatureExtractionArgs = BaseArgs & FeatureExtractionInput & FeatureExtractionOAICompatInput;
7
12
 
8
13
  /**
9
14
  * Returned values are a multidimensional array of floats (dimension depending on if you sent a string or a list of string, and if the automatic reduction, usually mean_pooling for instance was applied for you or not. This should be explained on the model's README).
package/src/types.ts CHANGED
@@ -42,11 +42,14 @@ export const INFERENCE_PROVIDERS = [
42
42
  "cerebras",
43
43
  "cohere",
44
44
  "fal-ai",
45
+ "featherless-ai",
45
46
  "fireworks-ai",
47
+ "groq",
46
48
  "hf-inference",
47
49
  "hyperbolic",
48
50
  "nebius",
49
51
  "novita",
52
+ "nscale",
50
53
  "openai",
51
54
  "replicate",
52
55
  "sambanova",
@@ -1,2 +0,0 @@
1
- export {};
2
- //# sourceMappingURL=vcr.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"vcr.d.ts","sourceRoot":"","sources":["../../test/vcr.ts"],"names":[],"mappings":""}