@huggingface/inference 3.3.3 → 3.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +4 -0
  2. package/dist/index.cjs +131 -53
  3. package/dist/index.js +131 -53
  4. package/dist/src/lib/makeRequestOptions.d.ts +0 -2
  5. package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
  6. package/dist/src/providers/black-forest-labs.d.ts +18 -0
  7. package/dist/src/providers/black-forest-labs.d.ts.map +1 -0
  8. package/dist/src/providers/consts.d.ts.map +1 -1
  9. package/dist/src/providers/hyperbolic.d.ts +18 -0
  10. package/dist/src/providers/hyperbolic.d.ts.map +1 -0
  11. package/dist/src/providers/novita.d.ts +18 -0
  12. package/dist/src/providers/novita.d.ts.map +1 -0
  13. package/dist/src/tasks/cv/textToImage.d.ts +10 -1
  14. package/dist/src/tasks/cv/textToImage.d.ts.map +1 -1
  15. package/dist/src/tasks/nlp/featureExtraction.d.ts.map +1 -1
  16. package/dist/src/tasks/nlp/sentenceSimilarity.d.ts.map +1 -1
  17. package/dist/src/tasks/nlp/textGeneration.d.ts.map +1 -1
  18. package/dist/src/types.d.ts +1 -1
  19. package/dist/src/types.d.ts.map +1 -1
  20. package/dist/src/utils/delay.d.ts +2 -0
  21. package/dist/src/utils/delay.d.ts.map +1 -0
  22. package/dist/test/HfInference.spec.d.ts.map +1 -1
  23. package/package.json +1 -1
  24. package/src/lib/makeRequestOptions.ts +51 -16
  25. package/src/providers/black-forest-labs.ts +18 -0
  26. package/src/providers/consts.ts +3 -0
  27. package/src/providers/hyperbolic.ts +18 -0
  28. package/src/providers/novita.ts +18 -0
  29. package/src/tasks/cv/textToImage.ts +95 -6
  30. package/src/tasks/nlp/featureExtraction.ts +0 -4
  31. package/src/tasks/nlp/sentenceSimilarity.ts +0 -3
  32. package/src/tasks/nlp/textGeneration.ts +31 -0
  33. package/src/types.ts +5 -1
  34. package/src/utils/delay.ts +5 -0
@@ -1 +1 @@
1
- {"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAapE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,yFAAyF;IACzF,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,CAAC;IACnC,sCAAsC;IACtC,QAAQ,CAAC,EAAE,aAAa,CAAC;IACzB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAwG7C"}
1
+ {"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAapE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,sCAAsC;IACtC,QAAQ,CAAC,EAAE,aAAa,CAAC;IACzB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CA6G7C"}
@@ -0,0 +1,18 @@
1
+ export declare const BLACKFORESTLABS_AI_API_BASE_URL = "https://api.us1.bfl.ai/v1";
2
+ /**
3
+ * See the registered mapping of HF model ID => Black Forest Labs model ID here:
4
+ *
5
+ * https://huggingface.co/api/partners/blackforestlabs/models
6
+ *
7
+ * This is a publicly available mapping.
8
+ *
9
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
10
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
11
+ *
12
+ * - If you work at Black Forest Labs and want to update this mapping, please use the model mapping API we provide on huggingface.co
13
+ * - If you're a community member and want to add a new supported HF model to Black Forest Labs, please open an issue on the present repo
14
+ * and we will tag Black Forest Labs team members.
15
+ *
16
+ * Thanks!
17
+ */
18
+ //# sourceMappingURL=black-forest-labs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"black-forest-labs.d.ts","sourceRoot":"","sources":["../../../src/providers/black-forest-labs.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,+BAA+B,8BAA8B,CAAC;AAE3E;;;;;;;;;;;;;;;GAeG"}
@@ -1 +1 @@
1
- {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB;;;;;;GAMG;AACH,eAAO,MAAM,0BAA0B,EAAE,MAAM,CAAC,iBAAiB,EAAE,MAAM,CAAC,OAAO,EAAE,UAAU,CAAC,CAc7F,CAAC"}
1
+ {"version":3,"file":"consts.d.ts","sourceRoot":"","sources":["../../../src/providers/consts.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAClD,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,UAAU,CAAC;AAExC,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB;;;;;;GAMG;AACH,eAAO,MAAM,0BAA0B,EAAE,MAAM,CAAC,iBAAiB,EAAE,MAAM,CAAC,OAAO,EAAE,UAAU,CAAC,CAiB7F,CAAC"}
@@ -0,0 +1,18 @@
1
+ export declare const HYPERBOLIC_API_BASE_URL = "https://api.hyperbolic.xyz";
2
+ /**
3
+ * See the registered mapping of HF model ID => Hyperbolic model ID here:
4
+ *
5
+ * https://huggingface.co/api/partners/hyperbolic/models
6
+ *
7
+ * This is a publicly available mapping.
8
+ *
9
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
10
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
11
+ *
12
+ * - If you work at Hyperbolic and want to update this mapping, please use the model mapping API we provide on huggingface.co
13
+ * - If you're a community member and want to add a new supported HF model to Hyperbolic, please open an issue on the present repo
14
+ * and we will tag Hyperbolic team members.
15
+ *
16
+ * Thanks!
17
+ */
18
+ //# sourceMappingURL=hyperbolic.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hyperbolic.d.ts","sourceRoot":"","sources":["../../../src/providers/hyperbolic.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,uBAAuB,+BAA+B,CAAC;AAEpE;;;;;;;;;;;;;;;GAeG"}
@@ -0,0 +1,18 @@
1
+ export declare const NOVITA_API_BASE_URL = "https://api.novita.ai/v3/openai";
2
+ /**
3
+ * See the registered mapping of HF model ID => Novita model ID here:
4
+ *
5
+ * https://huggingface.co/api/partners/novita/models
6
+ *
7
+ * This is a publicly available mapping.
8
+ *
9
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
10
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
11
+ *
12
+ * - If you work at Novita and want to update this mapping, please use the model mapping API we provide on huggingface.co
13
+ * - If you're a community member and want to add a new supported HF model to Novita, please open an issue on the present repo
14
+ * and we will tag Novita team members.
15
+ *
16
+ * Thanks!
17
+ */
18
+ //# sourceMappingURL=novita.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"novita.d.ts","sourceRoot":"","sources":["../../../src/providers/novita.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,mBAAmB,oCAAoC,CAAC;AAErE;;;;;;;;;;;;;;;GAeG"}
@@ -1,9 +1,18 @@
1
1
  import type { TextToImageInput } from "@huggingface/tasks";
2
2
  import type { BaseArgs, Options } from "../../types";
3
3
  export type TextToImageArgs = BaseArgs & TextToImageInput;
4
+ interface TextToImageOptions extends Options {
5
+ outputType?: "url" | "blob";
6
+ }
4
7
  /**
5
8
  * This task reads some text input and outputs an image.
6
9
  * Recommended model: stabilityai/stable-diffusion-2
7
10
  */
8
- export declare function textToImage(args: TextToImageArgs, options?: Options): Promise<Blob>;
11
+ export declare function textToImage(args: TextToImageArgs, options?: TextToImageOptions & {
12
+ outputType: "url";
13
+ }): Promise<string>;
14
+ export declare function textToImage(args: TextToImageArgs, options?: TextToImageOptions & {
15
+ outputType?: undefined | "blob";
16
+ }): Promise<Blob>;
17
+ export {};
9
18
  //# sourceMappingURL=textToImage.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"textToImage.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToImage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAqB,MAAM,oBAAoB,CAAC;AAE9E,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AAIxE,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AA0B1D;;;GAGG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CAqCzF"}
1
+ {"version":3,"file":"textToImage.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToImage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAqB,MAAM,oBAAoB,CAAC;AAE9E,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AAKxE,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AAmB1D,UAAU,kBAAmB,SAAQ,OAAO;IAC3C,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CAC5B;AAiBD;;;GAGG;AACH,wBAAsB,WAAW,CAChC,IAAI,EAAE,eAAe,EACrB,OAAO,CAAC,EAAE,kBAAkB,GAAG;IAAE,UAAU,EAAE,KAAK,CAAA;CAAE,GAClD,OAAO,CAAC,MAAM,CAAC,CAAC;AACnB,wBAAsB,WAAW,CAChC,IAAI,EAAE,eAAe,EACrB,OAAO,CAAC,EAAE,kBAAkB,GAAG;IAAE,UAAU,CAAC,EAAE,SAAS,GAAG,MAAM,CAAA;CAAE,GAChE,OAAO,CAAC,IAAI,CAAC,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"featureExtraction.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/featureExtraction.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG;IAC9C;;;;;OAKG;IACH,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC1B,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC;AAEzE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,CAyBlC"}
1
+ {"version":3,"file":"featureExtraction.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/featureExtraction.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG;IAC9C;;;;;OAKG;IACH,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC1B,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC;AAEzE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,CAsBlC"}
@@ -1 +1 @@
1
- {"version":3,"file":"sentenceSimilarity.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/sentenceSimilarity.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAG5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAanC"}
1
+ {"version":3,"file":"sentenceSimilarity.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/sentenceSimilarity.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAE5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAWnC"}
@@ -1 +1 @@
1
- {"version":3,"file":"textGeneration.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textGeneration.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAEX,mBAAmB,EACnB,oBAAoB,EAEpB,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,YAAY,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,CAAC;AAY1D;;GAEG;AACH,wBAAsB,cAAc,CACnC,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,oBAAoB,CAAC,CA+B/B"}
1
+ {"version":3,"file":"textGeneration.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textGeneration.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAEX,mBAAmB,EACnB,oBAAoB,EAEpB,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAKrD,YAAY,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,CAAC;AAkB1D;;GAEG;AACH,wBAAsB,cAAc,CACnC,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,oBAAoB,CAAC,CAuD/B"}
@@ -22,7 +22,7 @@ export interface Options {
22
22
  includeCredentials?: string | boolean;
23
23
  }
24
24
  export type InferenceTask = Exclude<PipelineType, "other">;
25
- export declare const INFERENCE_PROVIDERS: readonly ["fal-ai", "fireworks-ai", "nebius", "hf-inference", "replicate", "sambanova", "together"];
25
+ export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "replicate", "sambanova", "together"];
26
26
  export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
27
27
  export interface BaseArgs {
28
28
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;CACtC;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAE3D,eAAO,MAAM,mBAAmB,qGAQtB,CAAC;AACX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACrC,WAAW,CAAC,EAAE,MAAM,CAAC;CACrB,CAAC"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;CACtC;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAE3D,eAAO,MAAM,mBAAmB,kJAWtB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACrC,WAAW,CAAC,EAAE,MAAM,CAAC;CACrB,CAAC"}
@@ -0,0 +1,2 @@
1
+ export declare function delay(ms: number): Promise<void>;
2
+ //# sourceMappingURL=delay.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../../src/utils/delay.ts"],"names":[],"mappings":"AAAA,wBAAgB,KAAK,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAI/C"}
@@ -1 +1 @@
1
- {"version":3,"file":"HfInference.spec.d.ts","sourceRoot":"","sources":["../../test/HfInference.spec.ts"],"names":[],"mappings":"AAOA,OAAO,OAAO,CAAC"}
1
+ {"version":3,"file":"HfInference.spec.d.ts","sourceRoot":"","sources":["../../test/HfInference.spec.ts"],"names":[],"mappings":"AAQA,OAAO,OAAO,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/inference",
3
- "version": "3.3.3",
3
+ "version": "3.3.5",
4
4
  "packageManager": "pnpm@8.10.5",
5
5
  "license": "MIT",
6
6
  "author": "Tim Mikeladze <tim.mikeladze@gmail.com>",
@@ -4,7 +4,10 @@ import { NEBIUS_API_BASE_URL } from "../providers/nebius";
4
4
  import { REPLICATE_API_BASE_URL } from "../providers/replicate";
5
5
  import { SAMBANOVA_API_BASE_URL } from "../providers/sambanova";
6
6
  import { TOGETHER_API_BASE_URL } from "../providers/together";
7
+ import { NOVITA_API_BASE_URL } from "../providers/novita";
7
8
  import { FIREWORKS_AI_API_BASE_URL } from "../providers/fireworks-ai";
9
+ import { HYPERBOLIC_API_BASE_URL } from "../providers/hyperbolic";
10
+ import { BLACKFORESTLABS_AI_API_BASE_URL } from "../providers/black-forest-labs";
8
11
  import type { InferenceProvider } from "../types";
9
12
  import type { InferenceTask, Options, RequestArgs } from "../types";
10
13
  import { isUrl } from "./isUrl";
@@ -28,8 +31,6 @@ export async function makeRequestOptions(
28
31
  stream?: boolean;
29
32
  },
30
33
  options?: Options & {
31
- /** When a model can be used for multiple tasks, and we want to run a non-default task */
32
- forceTask?: string | InferenceTask;
33
34
  /** To load default model if needed */
34
35
  taskHint?: InferenceTask;
35
36
  chatCompletion?: boolean;
@@ -39,14 +40,11 @@ export async function makeRequestOptions(
39
40
  let otherArgs = remainingArgs;
40
41
  const provider = maybeProvider ?? "hf-inference";
41
42
 
42
- const { forceTask, includeCredentials, taskHint, chatCompletion } = options ?? {};
43
+ const { includeCredentials, taskHint, chatCompletion } = options ?? {};
43
44
 
44
45
  if (endpointUrl && provider !== "hf-inference") {
45
46
  throw new Error(`Cannot use endpointUrl with a third-party provider.`);
46
47
  }
47
- if (forceTask && provider !== "hf-inference") {
48
- throw new Error(`Cannot use forceTask with a third-party provider.`);
49
- }
50
48
  if (maybeModel && isUrl(maybeModel)) {
51
49
  throw new Error(`Model URLs are no longer supported. Use endpointUrl instead.`);
52
50
  }
@@ -77,7 +75,6 @@ export async function makeRequestOptions(
77
75
  : makeUrl({
78
76
  authMethod,
79
77
  chatCompletion: chatCompletion ?? false,
80
- forceTask,
81
78
  model,
82
79
  provider: provider ?? "hf-inference",
83
80
  taskHint,
@@ -85,8 +82,13 @@ export async function makeRequestOptions(
85
82
 
86
83
  const headers: Record<string, string> = {};
87
84
  if (accessToken) {
88
- headers["Authorization"] =
89
- provider === "fal-ai" && authMethod === "provider-key" ? `Key ${accessToken}` : `Bearer ${accessToken}`;
85
+ if (provider === "fal-ai" && authMethod === "provider-key") {
86
+ headers["Authorization"] = `Key ${accessToken}`;
87
+ } else if (provider === "black-forest-labs" && authMethod === "provider-key") {
88
+ headers["X-Key"] = accessToken;
89
+ } else {
90
+ headers["Authorization"] = `Bearer ${accessToken}`;
91
+ }
90
92
  }
91
93
 
92
94
  // e.g. @huggingface/inference/3.1.3
@@ -131,7 +133,11 @@ export async function makeRequestOptions(
131
133
  ? args.data
132
134
  : JSON.stringify({
133
135
  ...otherArgs,
134
- ...(chatCompletion || provider === "together" || provider === "nebius" ? { model } : undefined),
136
+ ...(taskHint === "text-to-image" && provider === "hyperbolic"
137
+ ? { model_name: model }
138
+ : chatCompletion || provider === "together" || provider === "nebius" || provider === "hyperbolic"
139
+ ? { model }
140
+ : undefined),
135
141
  }),
136
142
  ...(credentials ? { credentials } : undefined),
137
143
  signal: options?.signal,
@@ -146,7 +152,6 @@ function makeUrl(params: {
146
152
  model: string;
147
153
  provider: InferenceProvider;
148
154
  taskHint: InferenceTask | undefined;
149
- forceTask?: string | InferenceTask;
150
155
  }): string {
151
156
  if (params.authMethod === "none" && params.provider !== "hf-inference") {
152
157
  throw new Error("Authentication is required when requesting a third-party provider. Please provide accessToken");
@@ -154,6 +159,12 @@ function makeUrl(params: {
154
159
 
155
160
  const shouldProxy = params.provider !== "hf-inference" && params.authMethod !== "provider-key";
156
161
  switch (params.provider) {
162
+ case "black-forest-labs": {
163
+ const baseUrl = shouldProxy
164
+ ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", params.provider)
165
+ : BLACKFORESTLABS_AI_API_BASE_URL;
166
+ return `${baseUrl}/${params.model}`;
167
+ }
157
168
  case "fal-ai": {
158
169
  const baseUrl = shouldProxy
159
170
  ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", params.provider)
@@ -213,6 +224,7 @@ function makeUrl(params: {
213
224
  }
214
225
  return baseUrl;
215
226
  }
227
+
216
228
  case "fireworks-ai": {
217
229
  const baseUrl = shouldProxy
218
230
  ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", params.provider)
@@ -222,15 +234,38 @@ function makeUrl(params: {
222
234
  }
223
235
  return baseUrl;
224
236
  }
237
+ case "hyperbolic": {
238
+ const baseUrl = shouldProxy
239
+ ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", params.provider)
240
+ : HYPERBOLIC_API_BASE_URL;
241
+
242
+ if (params.taskHint === "text-to-image") {
243
+ return `${baseUrl}/v1/images/generations`;
244
+ }
245
+ return `${baseUrl}/v1/chat/completions`;
246
+ }
247
+ case "novita": {
248
+ const baseUrl = shouldProxy
249
+ ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", params.provider)
250
+ : NOVITA_API_BASE_URL;
251
+ if (params.taskHint === "text-generation") {
252
+ if (params.chatCompletion) {
253
+ return `${baseUrl}/chat/completions`;
254
+ }
255
+ return `${baseUrl}/completions`;
256
+ }
257
+ return baseUrl;
258
+ }
225
259
  default: {
226
260
  const baseUrl = HF_HUB_INFERENCE_PROXY_TEMPLATE.replaceAll("{{PROVIDER}}", "hf-inference");
227
- const url = params.forceTask
228
- ? `${baseUrl}/pipeline/${params.forceTask}/${params.model}`
229
- : `${baseUrl}/models/${params.model}`;
261
+ if (params.taskHint && ["feature-extraction", "sentence-similarity"].includes(params.taskHint)) {
262
+ /// when deployed on hf-inference, those two tasks are automatically compatible with one another.
263
+ return `${baseUrl}/pipeline/${params.taskHint}/${params.model}`;
264
+ }
230
265
  if (params.taskHint === "text-generation" && params.chatCompletion) {
231
- return url + `/v1/chat/completions`;
266
+ return `${baseUrl}/models/${params.model}/v1/chat/completions`;
232
267
  }
233
- return url;
268
+ return `${baseUrl}/models/${params.model}`;
234
269
  }
235
270
  }
236
271
  }
@@ -0,0 +1,18 @@
1
+ export const BLACKFORESTLABS_AI_API_BASE_URL = "https://api.us1.bfl.ai/v1";
2
+
3
+ /**
4
+ * See the registered mapping of HF model ID => Black Forest Labs model ID here:
5
+ *
6
+ * https://huggingface.co/api/partners/blackforestlabs/models
7
+ *
8
+ * This is a publicly available mapping.
9
+ *
10
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
11
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
12
+ *
13
+ * - If you work at Black Forest Labs and want to update this mapping, please use the model mapping API we provide on huggingface.co
14
+ * - If you're a community member and want to add a new supported HF model to Black Forest Labs, please open an issue on the present repo
15
+ * and we will tag Black Forest Labs team members.
16
+ *
17
+ * Thanks!
18
+ */
@@ -16,11 +16,14 @@ export const HARDCODED_MODEL_ID_MAPPING: Record<InferenceProvider, Record<ModelI
16
16
  * Example:
17
17
  * "Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct",
18
18
  */
19
+ "black-forest-labs": {},
19
20
  "fal-ai": {},
20
21
  "fireworks-ai": {},
21
22
  "hf-inference": {},
23
+ hyperbolic: {},
22
24
  nebius: {},
23
25
  replicate: {},
24
26
  sambanova: {},
25
27
  together: {},
28
+ novita: {},
26
29
  };
@@ -0,0 +1,18 @@
1
+ export const HYPERBOLIC_API_BASE_URL = "https://api.hyperbolic.xyz";
2
+
3
+ /**
4
+ * See the registered mapping of HF model ID => Hyperbolic model ID here:
5
+ *
6
+ * https://huggingface.co/api/partners/hyperbolic/models
7
+ *
8
+ * This is a publicly available mapping.
9
+ *
10
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
11
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
12
+ *
13
+ * - If you work at Hyperbolic and want to update this mapping, please use the model mapping API we provide on huggingface.co
14
+ * - If you're a community member and want to add a new supported HF model to Hyperbolic, please open an issue on the present repo
15
+ * and we will tag Hyperbolic team members.
16
+ *
17
+ * Thanks!
18
+ */
@@ -0,0 +1,18 @@
1
+ export const NOVITA_API_BASE_URL = "https://api.novita.ai/v3/openai";
2
+
3
+ /**
4
+ * See the registered mapping of HF model ID => Novita model ID here:
5
+ *
6
+ * https://huggingface.co/api/partners/novita/models
7
+ *
8
+ * This is a publicly available mapping.
9
+ *
10
+ * If you want to try to run inference for a new model locally before it's registered on huggingface.co,
11
+ * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
12
+ *
13
+ * - If you work at Novita and want to update this mapping, please use the model mapping API we provide on huggingface.co
14
+ * - If you're a community member and want to add a new supported HF model to Novita, please open an issue on the present repo
15
+ * and we will tag Novita team members.
16
+ *
17
+ * Thanks!
18
+ */
@@ -3,6 +3,7 @@ import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, InferenceProvider, Options } from "../../types";
4
4
  import { omit } from "../../utils/omit";
5
5
  import { request } from "../custom/request";
6
+ import { delay } from "../../utils/delay";
6
7
 
7
8
  export type TextToImageArgs = BaseArgs & TextToImageInput;
8
9
 
@@ -14,6 +15,18 @@ interface Base64ImageGeneration {
14
15
  interface OutputUrlImageGeneration {
15
16
  output: string[];
16
17
  }
18
+ interface HyperbolicTextToImageOutput {
19
+ images: Array<{ image: string }>;
20
+ }
21
+
22
+ interface BlackForestLabsResponse {
23
+ id: string;
24
+ polling_url: string;
25
+ }
26
+
27
+ interface TextToImageOptions extends Options {
28
+ outputType?: "url" | "blob";
29
+ }
17
30
 
18
31
  function getResponseFormatArg(provider: InferenceProvider) {
19
32
  switch (provider) {
@@ -34,7 +47,15 @@ function getResponseFormatArg(provider: InferenceProvider) {
34
47
  * This task reads some text input and outputs an image.
35
48
  * Recommended model: stabilityai/stable-diffusion-2
36
49
  */
37
- export async function textToImage(args: TextToImageArgs, options?: Options): Promise<Blob> {
50
+ export async function textToImage(
51
+ args: TextToImageArgs,
52
+ options?: TextToImageOptions & { outputType: "url" }
53
+ ): Promise<string>;
54
+ export async function textToImage(
55
+ args: TextToImageArgs,
56
+ options?: TextToImageOptions & { outputType?: undefined | "blob" }
57
+ ): Promise<Blob>;
58
+ export async function textToImage(args: TextToImageArgs, options?: TextToImageOptions): Promise<Blob | string> {
38
59
  const payload =
39
60
  !args.provider || args.provider === "hf-inference" || args.provider === "sambanova"
40
61
  ? args
@@ -44,23 +65,54 @@ export async function textToImage(args: TextToImageArgs, options?: Options): Pro
44
65
  ...getResponseFormatArg(args.provider),
45
66
  prompt: args.inputs,
46
67
  };
47
- const res = await request<TextToImageOutput | Base64ImageGeneration | OutputUrlImageGeneration>(payload, {
68
+ const res = await request<
69
+ | TextToImageOutput
70
+ | Base64ImageGeneration
71
+ | OutputUrlImageGeneration
72
+ | BlackForestLabsResponse
73
+ | HyperbolicTextToImageOutput
74
+ >(payload, {
48
75
  ...options,
49
76
  taskHint: "text-to-image",
50
77
  });
51
78
 
52
79
  if (res && typeof res === "object") {
80
+ if (args.provider === "black-forest-labs" && "polling_url" in res && typeof res.polling_url === "string") {
81
+ return await pollBflResponse(res.polling_url, options?.outputType);
82
+ }
53
83
  if (args.provider === "fal-ai" && "images" in res && Array.isArray(res.images) && res.images[0].url) {
54
- const image = await fetch(res.images[0].url);
55
- return await image.blob();
84
+ if (options?.outputType === "url") {
85
+ return res.images[0].url;
86
+ } else {
87
+ const image = await fetch(res.images[0].url);
88
+ return await image.blob();
89
+ }
90
+ }
91
+ if (
92
+ args.provider === "hyperbolic" &&
93
+ "images" in res &&
94
+ Array.isArray(res.images) &&
95
+ res.images[0] &&
96
+ typeof res.images[0].image === "string"
97
+ ) {
98
+ if (options?.outputType === "url") {
99
+ return `data:image/jpeg;base64,${res.images[0].image}`;
100
+ }
101
+ const base64Response = await fetch(`data:image/jpeg;base64,${res.images[0].image}`);
102
+ return await base64Response.blob();
56
103
  }
57
104
  if ("data" in res && Array.isArray(res.data) && res.data[0].b64_json) {
58
105
  const base64Data = res.data[0].b64_json;
106
+ if (options?.outputType === "url") {
107
+ return `data:image/jpeg;base64,${base64Data}`;
108
+ }
59
109
  const base64Response = await fetch(`data:image/jpeg;base64,${base64Data}`);
60
- const blob = await base64Response.blob();
61
- return blob;
110
+ return await base64Response.blob();
62
111
  }
63
112
  if ("output" in res && Array.isArray(res.output)) {
113
+ if (options?.outputType === "url") {
114
+ return res.output[0];
115
+ }
64
116
  const urlResponse = await fetch(res.output[0]);
65
117
  const blob = await urlResponse.blob();
66
118
  return blob;
@@ -70,5 +122,42 @@ export async function textToImage(args: TextToImageArgs, options?: Options): Pro
70
122
  if (!isValidOutput) {
71
123
  throw new InferenceOutputError("Expected Blob");
72
124
  }
125
+ if (options?.outputType === "url") {
126
+ const b64 = await res.arrayBuffer().then((buf) => Buffer.from(buf).toString("base64"));
127
+ return `data:image/jpeg;base64,${b64}`;
128
+ }
73
129
  return res;
74
130
  }
131
+
132
+ async function pollBflResponse(url: string, outputType?: "url" | "blob"): Promise<Blob> {
133
+ const urlObj = new URL(url);
134
+ for (let step = 0; step < 5; step++) {
135
+ await delay(1000);
136
+ console.debug(`Polling Black Forest Labs API for the result... ${step + 1}/5`);
137
+ urlObj.searchParams.set("attempt", step.toString(10));
138
+ const resp = await fetch(urlObj, { headers: { "Content-Type": "application/json" } });
139
+ if (!resp.ok) {
140
+ throw new InferenceOutputError("Failed to fetch result from black forest labs API");
141
+ }
142
+ const payload = await resp.json();
143
+ if (
144
+ typeof payload === "object" &&
145
+ payload &&
146
+ "status" in payload &&
147
+ typeof payload.status === "string" &&
148
+ payload.status === "Ready" &&
149
+ "result" in payload &&
150
+ typeof payload.result === "object" &&
151
+ payload.result &&
152
+ "sample" in payload.result &&
153
+ typeof payload.result.sample === "string"
154
+ ) {
155
+ if (outputType === "url") {
156
+ return payload.result.sample;
157
+ }
158
+ const image = await fetch(payload.result.sample);
159
+ return await image.blob();
160
+ }
161
+ }
162
+ throw new InferenceOutputError("Failed to fetch result from black forest labs API");
163
+ }
@@ -1,5 +1,4 @@
1
1
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
2
- import { getDefaultTask } from "../../lib/getDefaultTask";
3
2
  import type { BaseArgs, Options } from "../../types";
4
3
  import { request } from "../custom/request";
5
4
 
@@ -25,12 +24,9 @@ export async function featureExtraction(
25
24
  args: FeatureExtractionArgs,
26
25
  options?: Options
27
26
  ): Promise<FeatureExtractionOutput> {
28
- const defaultTask = args.model ? await getDefaultTask(args.model, args.accessToken, options) : undefined;
29
-
30
27
  const res = await request<FeatureExtractionOutput>(args, {
31
28
  ...options,
32
29
  taskHint: "feature-extraction",
33
- ...(defaultTask === "sentence-similarity" && { forceTask: "feature-extraction" }),
34
30
  });
35
31
  let isValidOutput = true;
36
32
 
@@ -1,6 +1,5 @@
1
1
  import type { SentenceSimilarityInput, SentenceSimilarityOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
- import { getDefaultTask } from "../../lib/getDefaultTask";
4
3
  import type { BaseArgs, Options } from "../../types";
5
4
  import { request } from "../custom/request";
6
5
  import { omit } from "../../utils/omit";
@@ -14,11 +13,9 @@ export async function sentenceSimilarity(
14
13
  args: SentenceSimilarityArgs,
15
14
  options?: Options
16
15
  ): Promise<SentenceSimilarityOutput> {
17
- const defaultTask = args.model ? await getDefaultTask(args.model, args.accessToken, options) : undefined;
18
16
  const res = await request<SentenceSimilarityOutput>(prepareInput(args), {
19
17
  ...options,
20
18
  taskHint: "sentence-similarity",
21
- ...(defaultTask === "feature-extraction" && { forceTask: "sentence-similarity" }),
22
19
  });
23
20
 
24
21
  const isValidOutput = Array.isArray(res) && res.every((x) => typeof x === "number");
@@ -8,6 +8,7 @@ import { InferenceOutputError } from "../../lib/InferenceOutputError";
8
8
  import type { BaseArgs, Options } from "../../types";
9
9
  import { toArray } from "../../utils/toArray";
10
10
  import { request } from "../custom/request";
11
+ import { omit } from "../../utils/omit";
11
12
 
12
13
  export type { TextGenerationInput, TextGenerationOutput };
13
14
 
@@ -21,6 +22,12 @@ interface TogeteherTextCompletionOutput extends Omit<ChatCompletionOutput, "choi
21
22
  }>;
22
23
  }
23
24
 
25
+ interface HyperbolicTextCompletionOutput extends Omit<ChatCompletionOutput, "choices"> {
26
+ choices: Array<{
27
+ message: { content: string };
28
+ }>;
29
+ }
30
+
24
31
  /**
25
32
  * Use to continue text from a prompt. This is a very generic task. Recommended model: gpt2 (it’s a simple model, but fun to play with).
26
33
  */
@@ -43,6 +50,30 @@ export async function textGeneration(
43
50
  return {
44
51
  generated_text: completion.text,
45
52
  };
53
+ } else if (args.provider === "hyperbolic") {
54
+ const payload = {
55
+ messages: [{ content: args.inputs, role: "user" }],
56
+ ...(args.parameters
57
+ ? {
58
+ max_tokens: args.parameters.max_new_tokens,
59
+ ...omit(args.parameters, "max_new_tokens"),
60
+ }
61
+ : undefined),
62
+ ...omit(args, ["inputs", "parameters"]),
63
+ };
64
+ const raw = await request<HyperbolicTextCompletionOutput>(payload, {
65
+ ...options,
66
+ taskHint: "text-generation",
67
+ });
68
+ const isValidOutput =
69
+ typeof raw === "object" && "choices" in raw && Array.isArray(raw?.choices) && typeof raw?.model === "string";
70
+ if (!isValidOutput) {
71
+ throw new InferenceOutputError("Expected ChatCompletionOutput");
72
+ }
73
+ const completion = raw.choices[0];
74
+ return {
75
+ generated_text: completion.message.content,
76
+ };
46
77
  } else {
47
78
  const res = toArray(
48
79
  await request<TextGenerationOutput | TextGenerationOutput[]>(args, {
package/src/types.ts CHANGED
@@ -29,14 +29,18 @@ export interface Options {
29
29
  export type InferenceTask = Exclude<PipelineType, "other">;
30
30
 
31
31
  export const INFERENCE_PROVIDERS = [
32
+ "black-forest-labs",
32
33
  "fal-ai",
33
34
  "fireworks-ai",
34
- "nebius",
35
35
  "hf-inference",
36
+ "hyperbolic",
37
+ "nebius",
38
+ "novita",
36
39
  "replicate",
37
40
  "sambanova",
38
41
  "together",
39
42
  ] as const;
43
+
40
44
  export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
41
45
 
42
46
  export interface BaseArgs {
@@ -0,0 +1,5 @@
1
+ export function delay(ms: number): Promise<void> {
2
+ return new Promise((resolve) => {
3
+ setTimeout(() => resolve(), ms);
4
+ });
5
+ }