@huggingface/inference 3.6.2 → 3.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/README.md +0 -25
  2. package/dist/index.cjs +135 -114
  3. package/dist/index.js +135 -114
  4. package/dist/src/config.d.ts +1 -0
  5. package/dist/src/config.d.ts.map +1 -1
  6. package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
  7. package/dist/src/tasks/audio/automaticSpeechRecognition.d.ts.map +1 -1
  8. package/dist/src/tasks/custom/request.d.ts +1 -0
  9. package/dist/src/tasks/custom/request.d.ts.map +1 -1
  10. package/dist/src/tasks/custom/streamingRequest.d.ts +1 -0
  11. package/dist/src/tasks/custom/streamingRequest.d.ts.map +1 -1
  12. package/dist/src/tasks/cv/imageToText.d.ts.map +1 -1
  13. package/dist/src/tasks/cv/objectDetection.d.ts +1 -1
  14. package/dist/src/tasks/cv/objectDetection.d.ts.map +1 -1
  15. package/dist/src/tasks/cv/textToVideo.d.ts +1 -1
  16. package/dist/src/tasks/cv/textToVideo.d.ts.map +1 -1
  17. package/dist/src/tasks/cv/zeroShotImageClassification.d.ts +1 -1
  18. package/dist/src/tasks/cv/zeroShotImageClassification.d.ts.map +1 -1
  19. package/dist/src/tasks/multimodal/documentQuestionAnswering.d.ts +1 -1
  20. package/dist/src/tasks/multimodal/documentQuestionAnswering.d.ts.map +1 -1
  21. package/dist/src/tasks/multimodal/visualQuestionAnswering.d.ts.map +1 -1
  22. package/dist/src/tasks/nlp/chatCompletion.d.ts +1 -1
  23. package/dist/src/tasks/nlp/chatCompletion.d.ts.map +1 -1
  24. package/dist/src/tasks/nlp/chatCompletionStream.d.ts +1 -1
  25. package/dist/src/tasks/nlp/chatCompletionStream.d.ts.map +1 -1
  26. package/dist/src/tasks/nlp/questionAnswering.d.ts.map +1 -1
  27. package/dist/src/tasks/nlp/sentenceSimilarity.d.ts.map +1 -1
  28. package/dist/src/tasks/nlp/textClassification.d.ts.map +1 -1
  29. package/dist/src/tasks/nlp/tokenClassification.d.ts.map +1 -1
  30. package/dist/src/tasks/nlp/zeroShotClassification.d.ts.map +1 -1
  31. package/dist/src/types.d.ts +7 -0
  32. package/dist/src/types.d.ts.map +1 -1
  33. package/dist/src/utils/request.d.ts +27 -0
  34. package/dist/src/utils/request.d.ts.map +1 -0
  35. package/package.json +2 -2
  36. package/src/config.ts +1 -0
  37. package/src/lib/makeRequestOptions.ts +5 -2
  38. package/src/snippets/templates.exported.ts +1 -1
  39. package/src/tasks/audio/audioClassification.ts +2 -2
  40. package/src/tasks/audio/audioToAudio.ts +2 -2
  41. package/src/tasks/audio/automaticSpeechRecognition.ts +3 -3
  42. package/src/tasks/audio/textToSpeech.ts +2 -2
  43. package/src/tasks/custom/request.ts +7 -32
  44. package/src/tasks/custom/streamingRequest.ts +5 -85
  45. package/src/tasks/cv/imageClassification.ts +2 -2
  46. package/src/tasks/cv/imageSegmentation.ts +2 -2
  47. package/src/tasks/cv/imageToImage.ts +2 -2
  48. package/src/tasks/cv/imageToText.ts +7 -9
  49. package/src/tasks/cv/objectDetection.ts +4 -4
  50. package/src/tasks/cv/textToImage.ts +3 -3
  51. package/src/tasks/cv/textToVideo.ts +23 -20
  52. package/src/tasks/cv/zeroShotImageClassification.ts +4 -5
  53. package/src/tasks/multimodal/documentQuestionAnswering.ts +13 -13
  54. package/src/tasks/multimodal/visualQuestionAnswering.ts +4 -2
  55. package/src/tasks/nlp/chatCompletion.ts +3 -4
  56. package/src/tasks/nlp/chatCompletionStream.ts +3 -3
  57. package/src/tasks/nlp/featureExtraction.ts +2 -2
  58. package/src/tasks/nlp/fillMask.ts +2 -2
  59. package/src/tasks/nlp/questionAnswering.ts +3 -2
  60. package/src/tasks/nlp/sentenceSimilarity.ts +2 -11
  61. package/src/tasks/nlp/summarization.ts +2 -2
  62. package/src/tasks/nlp/tableQuestionAnswering.ts +2 -2
  63. package/src/tasks/nlp/textClassification.ts +8 -9
  64. package/src/tasks/nlp/textGeneration.ts +16 -16
  65. package/src/tasks/nlp/textGenerationStream.ts +2 -2
  66. package/src/tasks/nlp/tokenClassification.ts +9 -10
  67. package/src/tasks/nlp/translation.ts +2 -2
  68. package/src/tasks/nlp/zeroShotClassification.ts +9 -10
  69. package/src/tasks/tabular/tabularClassification.ts +2 -2
  70. package/src/tasks/tabular/tabularRegression.ts +2 -2
  71. package/src/types.ts +8 -0
  72. package/src/utils/request.ts +161 -0
@@ -1 +1 @@
1
- {"version":3,"file":"textToVideo.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToVideo.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AACxE,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAS3D,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AAE1D,MAAM,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAcrC,wBAAsB,WAAW,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC,CA4CtG"}
1
+ {"version":3,"file":"textToVideo.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToVideo.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAI3D,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AAKxE,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AAE1D,MAAM,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAcrC,wBAAsB,WAAW,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAgDtG"}
@@ -1,5 +1,5 @@
1
- import type { BaseArgs, Options } from "../../types";
2
1
  import type { ZeroShotImageClassificationInput, ZeroShotImageClassificationOutput } from "@huggingface/tasks";
2
+ import type { BaseArgs, Options } from "../../types";
3
3
  /**
4
4
  * @deprecated
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"file":"zeroShotImageClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/zeroShotImageClassification.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,OAAO,KAAK,EAAE,gCAAgC,EAAE,iCAAiC,EAAE,MAAM,oBAAoB,CAAC;AAE9G;;GAEG;AACH,UAAU,sCAAsC;IAC/C,MAAM,EAAE;QAAE,KAAK,EAAE,IAAI,GAAG,WAAW,CAAA;KAAE,CAAC;CACtC;AAED,MAAM,MAAM,+BAA+B,GAAG,QAAQ,GACrD,CAAC,gCAAgC,GAAG,sCAAsC,CAAC,CAAC;AAwB7E;;;GAGG;AACH,wBAAsB,2BAA2B,CAChD,IAAI,EAAE,+BAA+B,EACrC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,iCAAiC,CAAC,CAY5C"}
1
+ {"version":3,"file":"zeroShotImageClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/zeroShotImageClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gCAAgC,EAAE,iCAAiC,EAAE,MAAM,oBAAoB,CAAC;AAE9G,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAe,MAAM,aAAa,CAAC;AAIlE;;GAEG;AACH,UAAU,sCAAsC;IAC/C,MAAM,EAAE;QAAE,KAAK,EAAE,IAAI,GAAG,WAAW,CAAA;KAAE,CAAC;CACtC;AAED,MAAM,MAAM,+BAA+B,GAAG,QAAQ,GACrD,CAAC,gCAAgC,GAAG,sCAAsC,CAAC,CAAC;AAwB7E;;;GAGG;AACH,wBAAsB,2BAA2B,CAChD,IAAI,EAAE,+BAA+B,EACrC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,iCAAiC,CAAC,CAY5C"}
@@ -1,5 +1,5 @@
1
- import type { BaseArgs, Options } from "../../types";
2
1
  import type { DocumentQuestionAnsweringInput, DocumentQuestionAnsweringInputData, DocumentQuestionAnsweringOutput } from "@huggingface/tasks";
2
+ import type { BaseArgs, Options } from "../../types";
3
3
  export type DocumentQuestionAnsweringArgs = BaseArgs & DocumentQuestionAnsweringInput & {
4
4
  inputs: DocumentQuestionAnsweringInputData & {
5
5
  image: Blob;
@@ -1 +1 @@
1
- {"version":3,"file":"documentQuestionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/multimodal/documentQuestionAnswering.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAKrD,OAAO,KAAK,EACX,8BAA8B,EAC9B,kCAAkC,EAClC,+BAA+B,EAC/B,MAAM,oBAAoB,CAAC;AAG5B,MAAM,MAAM,6BAA6B,GAAG,QAAQ,GACnD,8BAA8B,GAAG;IAAE,MAAM,EAAE,kCAAkC,GAAG;QAAE,KAAK,EAAE,IAAI,CAAA;KAAE,CAAA;CAAE,CAAC;AAEnG;;GAEG;AACH,wBAAsB,yBAAyB,CAC9C,IAAI,EAAE,6BAA6B,EACnC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,+BAA+B,CAAC,MAAM,CAAC,CAAC,CAgClD"}
1
+ {"version":3,"file":"documentQuestionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/multimodal/documentQuestionAnswering.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,8BAA8B,EAC9B,kCAAkC,EAClC,+BAA+B,EAC/B,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAe,MAAM,aAAa,CAAC;AAMlE,MAAM,MAAM,6BAA6B,GAAG,QAAQ,GACnD,8BAA8B,GAAG;IAAE,MAAM,EAAE,kCAAkC,GAAG;QAAE,KAAK,EAAE,IAAI,CAAA;KAAE,CAAA;CAAE,CAAC;AAEnG;;GAEG;AACH,wBAAsB,yBAAyB,CAC9C,IAAI,EAAE,6BAA6B,EACnC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,+BAA+B,CAAC,MAAM,CAAC,CAAC,CAiClD"}
@@ -1 +1 @@
1
- {"version":3,"file":"visualQuestionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/multimodal/visualQuestionAnswering.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAe,MAAM,aAAa,CAAC;AAKlE,MAAM,MAAM,2BAA2B,GAAG,QAAQ,GACjD,4BAA4B,GAAG;IAAE,MAAM,EAAE,gCAAgC,GAAG;QAAE,KAAK,EAAE,IAAI,CAAA;KAAE,CAAA;CAAE,CAAC;AAE/F;;GAEG;AACH,wBAAsB,uBAAuB,CAC5C,IAAI,EAAE,2BAA2B,EACjC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,6BAA6B,CAAC,MAAM,CAAC,CAAC,CAsBhD"}
1
+ {"version":3,"file":"visualQuestionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/multimodal/visualQuestionAnswering.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,MAAM,oBAAoB,CAAC;AAE5B,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAe,MAAM,aAAa,CAAC;AAKlE,MAAM,MAAM,2BAA2B,GAAG,QAAQ,GACjD,4BAA4B,GAAG;IAAE,MAAM,EAAE,gCAAgC,GAAG;QAAE,KAAK,EAAE,IAAI,CAAA;KAAE,CAAA;CAAE,CAAC;AAE/F;;GAEG;AACH,wBAAsB,uBAAuB,CAC5C,IAAI,EAAE,2BAA2B,EACjC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,6BAA6B,CAAC,MAAM,CAAC,CAAC,CAwBhD"}
@@ -1,5 +1,5 @@
1
- import type { BaseArgs, Options } from "../../types";
2
1
  import type { ChatCompletionInput, ChatCompletionOutput } from "@huggingface/tasks";
2
+ import type { BaseArgs, Options } from "../../types";
3
3
  /**
4
4
  * Use the chat completion endpoint to generate a response to a prompt, using OpenAI message completion API no stream
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"file":"chatCompletion.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/chatCompletion.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAErD,OAAO,KAAK,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAEpF;;GAEG;AACH,wBAAsB,cAAc,CACnC,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,oBAAoB,CAAC,CAuB/B"}
1
+ {"version":3,"file":"chatCompletion.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/chatCompletion.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAEpF,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD;;GAEG;AACH,wBAAsB,cAAc,CACnC,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,oBAAoB,CAAC,CAsB/B"}
@@ -1,5 +1,5 @@
1
- import type { BaseArgs, Options } from "../../types";
2
1
  import type { ChatCompletionInput, ChatCompletionStreamOutput } from "@huggingface/tasks";
2
+ import type { BaseArgs, Options } from "../../types";
3
3
  /**
4
4
  * Use to continue text from a prompt. Same as `textGeneration` but returns generator that can be read one token at a time
5
5
  */
@@ -1 +1 @@
1
- {"version":3,"file":"chatCompletionStream.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/chatCompletionStream.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAErD,OAAO,KAAK,EAAE,mBAAmB,EAAE,0BAA0B,EAAE,MAAM,oBAAoB,CAAC;AAE1F;;GAEG;AACH,wBAAuB,oBAAoB,CAC1C,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,cAAc,CAAC,0BAA0B,CAAC,CAM5C"}
1
+ {"version":3,"file":"chatCompletionStream.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/chatCompletionStream.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,0BAA0B,EAAE,MAAM,oBAAoB,CAAC;AAC1F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD;;GAEG;AACH,wBAAuB,oBAAoB,CAC1C,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,cAAc,CAAC,0BAA0B,CAAC,CAM5C"}
@@ -1 +1 @@
1
- {"version":3,"file":"questionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/questionAnswering.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,oBAAoB,CAAC;AAE1F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG,sBAAsB,CAAC;AAEtE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAyB1C"}
1
+ {"version":3,"file":"questionAnswering.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/questionAnswering.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,oBAAoB,CAAC;AAE1F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,qBAAqB,GAAG,QAAQ,GAAG,sBAAsB,CAAC;AAEtE;;GAEG;AACH,wBAAsB,iBAAiB,CACtC,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CA0B1C"}
@@ -1 +1 @@
1
- {"version":3,"file":"sentenceSimilarity.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/sentenceSimilarity.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAE5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAWnC"}
1
+ {"version":3,"file":"sentenceSimilarity.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/sentenceSimilarity.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAE5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAWnC"}
@@ -1 +1 @@
1
- {"version":3,"file":"textClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAE5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAanC"}
1
+ {"version":3,"file":"textClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,uBAAuB,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAE5F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGrD,MAAM,MAAM,sBAAsB,GAAG,QAAQ,GAAG,uBAAuB,CAAC;AAExE;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,wBAAwB,CAAC,CAYnC"}
@@ -1 +1 @@
1
- {"version":3,"file":"tokenClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/tokenClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,wBAAwB,EAAE,yBAAyB,EAAE,MAAM,oBAAoB,CAAC;AAE9F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,uBAAuB,GAAG,QAAQ,GAAG,wBAAwB,CAAC;AAE1E;;GAEG;AACH,wBAAsB,mBAAmB,CACxC,IAAI,EAAE,uBAAuB,EAC7B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,yBAAyB,CAAC,CAuBpC"}
1
+ {"version":3,"file":"tokenClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/tokenClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,wBAAwB,EAAE,yBAAyB,EAAE,MAAM,oBAAoB,CAAC;AAE9F,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,uBAAuB,GAAG,QAAQ,GAAG,wBAAwB,CAAC;AAE1E;;GAEG;AACH,wBAAsB,mBAAmB,CACxC,IAAI,EAAE,uBAAuB,EAC7B,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,yBAAyB,CAAC,CAsBpC"}
@@ -1 +1 @@
1
- {"version":3,"file":"zeroShotClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/zeroShotClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,2BAA2B,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AAEpG,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,0BAA0B,GAAG,QAAQ,GAAG,2BAA2B,CAAC;AAEhF;;GAEG;AACH,wBAAsB,sBAAsB,CAC3C,IAAI,EAAE,0BAA0B,EAChC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,4BAA4B,CAAC,CAqBvC"}
1
+ {"version":3,"file":"zeroShotClassification.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/zeroShotClassification.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,2BAA2B,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AAEpG,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,MAAM,MAAM,0BAA0B,GAAG,QAAQ,GAAG,2BAA2B,CAAC;AAEhF;;GAEG;AACH,wBAAsB,sBAAsB,CAC3C,IAAI,EAAE,0BAA0B,EAChC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,4BAA4B,CAAC,CAoBvC"}
@@ -20,6 +20,13 @@ export interface Options {
20
20
  * (Default: "same-origin"). String | Boolean. Credentials to use for the request. If this is a string, it will be passed straight on. If it's a boolean, true will be "include" and false will not send credentials at all.
21
21
  */
22
22
  includeCredentials?: string | boolean;
23
+ /**
24
+ * The billing account to use for the requests.
25
+ *
26
+ * By default the requests are billed on the user's account.
27
+ * Requests can only be billed to an organization the user is a member of, and which has subscribed to Enterprise Hub.
28
+ */
29
+ billTo?: string;
23
30
  }
24
31
  export type InferenceTask = Exclude<PipelineType, "other">;
25
32
  export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "openai", "replicate", "sambanova", "together"];
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;CACtC;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAE3D,eAAO,MAAM,mBAAmB,kLActB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,WAAW,cAAc;IAC9B,WAAW,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,aAAa,KAAK,MAAM,CAAC,GAAG,CAAC,MAAM,MAAM,CAAC,CAAC;IACjE,QAAQ,EAAE,CAAC,MAAM,EAAE,UAAU,KAAK,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC1D,WAAW,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9D,OAAO,EAAE,CAAC,MAAM,EAAE,SAAS,KAAK,MAAM,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CAChC;AAED,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAED,MAAM,WAAW,UAAU;IAC1B,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC9B,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC;AAE7B,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,KAAK,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,WAAW,CAAC;IAErB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IAEtC;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAE3D,eAAO,MAAM,mBAAmB,kLActB,CAAC;AAEX,MAAM,MAAM,iBAAiB,GAAG,CAAC,OAAO,mBAAmB,CAAC,CAAC,MAAM,CAAC,CAAC;AAErE,MAAM,WAAW,QAAQ;IACxB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAEhB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,iBAAiB,CAAC;CAC7B;AAED,MAAM,MAAM,WAAW,GAAG,QAAQ,GACjC,CACG;IAAE,IAAI,EAAE,IAAI,GAAG,WAAW,CAAA;CAAE,GAC5B;IAAE,MAAM,EAAE,OAAO,CAAA;CAAE,GACnB;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,GAClB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,GACrB,mBAAmB,CACrB,GAAG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrC,CAAC;AAEH,MAAM,WAAW,cAAc;IAC9B,WAAW,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,aAAa,KAAK,MAAM,CAAC,GAAG,CAAC,MAAM,MAAM,CAAC,CAAC;IACjE,QAAQ,EAAE,CAAC,MAAM,EAAE,UAAU,KAAK,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC1D,WAAW,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9D,OAAO,EAAE,CAAC,MAAM,EAAE,SAAS,KAAK,MAAM,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CAChC;AAED,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG,UAAU,GAAG,qBAAqB,GAAG,cAAc,CAAC;AAEtF,MAAM,WAAW,YAAY;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,UAAU,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACzB,UAAU,EAAE,UAAU,CAAC;IACvB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAED,MAAM,WAAW,UAAU;IAC1B,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC9B,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,aAAa,CAAC;CACrB"}
@@ -0,0 +1,27 @@
1
+ import type { InferenceTask, Options, RequestArgs } from "../types";
2
+ export interface ResponseWrapper<T> {
3
+ data: T;
4
+ requestContext: {
5
+ url: string;
6
+ info: RequestInit;
7
+ };
8
+ }
9
+ /**
10
+ * Primitive to make custom calls to the inference provider
11
+ */
12
+ export declare function innerRequest<T>(args: RequestArgs, options?: Options & {
13
+ /** In most cases (unless we pass a endpointUrl) we know the task */
14
+ task?: InferenceTask;
15
+ /** Is chat completion compatible */
16
+ chatCompletion?: boolean;
17
+ }): Promise<ResponseWrapper<T>>;
18
+ /**
19
+ * Primitive to make custom inference calls that expect server-sent events, and returns the response through a generator
20
+ */
21
+ export declare function innerStreamingRequest<T>(args: RequestArgs, options?: Options & {
22
+ /** In most cases (unless we pass a endpointUrl) we know the task */
23
+ task?: InferenceTask;
24
+ /** Is chat completion compatible */
25
+ chatCompletion?: boolean;
26
+ }): AsyncGenerator<T>;
27
+ //# sourceMappingURL=request.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"request.d.ts","sourceRoot":"","sources":["../../../src/utils/request.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAIpE,MAAM,WAAW,eAAe,CAAC,CAAC;IACjC,IAAI,EAAE,CAAC,CAAC;IACR,cAAc,EAAE;QACf,GAAG,EAAE,MAAM,CAAC;QACZ,IAAI,EAAE,WAAW,CAAC;KAClB,CAAC;CACF;AAED;;GAEG;AACH,wBAAsB,YAAY,CAAC,CAAC,EACnC,IAAI,EAAE,WAAW,EACjB,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,oCAAoC;IACpC,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAoC7B;AAED;;GAEG;AACH,wBAAuB,qBAAqB,CAAC,CAAC,EAC7C,IAAI,EAAE,WAAW,EACjB,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,oCAAoC;IACpC,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,cAAc,CAAC,CAAC,CAAC,CAuFnB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/inference",
3
- "version": "3.6.2",
3
+ "version": "3.7.0",
4
4
  "packageManager": "pnpm@8.10.5",
5
5
  "license": "MIT",
6
6
  "author": "Hugging Face and Tim Mikeladze <tim.mikeladze@gmail.com>",
@@ -40,7 +40,7 @@
40
40
  },
41
41
  "type": "module",
42
42
  "dependencies": {
43
- "@huggingface/tasks": "^0.18.4",
43
+ "@huggingface/tasks": "^0.18.6",
44
44
  "@huggingface/jinja": "^0.3.3"
45
45
  },
46
46
  "devDependencies": {
package/src/config.ts CHANGED
@@ -1,2 +1,3 @@
1
1
  export const HF_HUB_URL = "https://huggingface.co";
2
2
  export const HF_ROUTER_URL = "https://router.huggingface.co";
3
+ export const HF_HEADER_X_BILL_TO = "X-HF-Bill-To";
@@ -1,4 +1,4 @@
1
- import { HF_HUB_URL, HF_ROUTER_URL } from "../config";
1
+ import { HF_HUB_URL, HF_ROUTER_URL, HF_HEADER_X_BILL_TO } from "../config";
2
2
  import { BLACK_FOREST_LABS_CONFIG } from "../providers/black-forest-labs";
3
3
  import { CEREBRAS_CONFIG } from "../providers/cerebras";
4
4
  import { COHERE_CONFIG } from "../providers/cohere";
@@ -117,7 +117,7 @@ export function makeRequestOptionsFromResolvedModel(
117
117
  const provider = maybeProvider ?? "hf-inference";
118
118
  const providerConfig = providerConfigs[provider];
119
119
 
120
- const { includeCredentials, task, chatCompletion, signal } = options ?? {};
120
+ const { includeCredentials, task, chatCompletion, signal, billTo } = options ?? {};
121
121
 
122
122
  const authMethod = (() => {
123
123
  if (providerConfig.clientSideRoutingOnly) {
@@ -159,6 +159,9 @@ export function makeRequestOptionsFromResolvedModel(
159
159
  accessToken,
160
160
  authMethod,
161
161
  });
162
+ if (billTo) {
163
+ headers[HF_HEADER_X_BILL_TO] = billTo;
164
+ }
162
165
 
163
166
  // Add content-type to headers
164
167
  if (!binary) {
@@ -6,7 +6,7 @@ export const templates: Record<string, Record<string, Record<string, string>>> =
6
6
  "basicAudio": "async function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"audio/flac\"\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n\tconst result = await response.json();\n\treturn result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});",
7
7
  "basicImage": "async function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"image/jpeg\"\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n\tconst result = await response.json();\n\treturn result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});",
8
8
  "textToAudio": "{% if model.library_name == \"transformers\" %}\nasync function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n\tconst result = await response.blob();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n // Returns a byte object of the Audio wavform. Use it directly!\n});\n{% else %}\nasync function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n const result = await response.json();\n return result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n console.log(JSON.stringify(response));\n});\n{% endif %} ",
9
- "textToImage": "async function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n\tconst result = await response.blob();\n\treturn result;\n}\n\nquery({ inputs: {{ providerInputs.asObj.inputs }} }).then((response) => {\n // Use image\n});",
9
+ "textToImage": "async function query(data) {\n\tconst response = await fetch(\n\t\t\"{{ fullUrl }}\",\n\t\t{\n\t\t\theaders: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t},\n\t\t\tmethod: \"POST\",\n\t\t\tbody: JSON.stringify(data),\n\t\t}\n\t);\n\tconst result = await response.blob();\n\treturn result;\n}\n\n\nquery({ {{ providerInputs.asTsString }} }).then((response) => {\n // Use image\n});",
10
10
  "zeroShotClassification": "async function query(data) {\n const response = await fetch(\n\t\t\"{{ fullUrl }}\",\n {\n headers: {\n\t\t\t\tAuthorization: \"{{ authorizationHeader }}\",\n \"Content-Type\": \"application/json\",\n },\n method: \"POST\",\n body: JSON.stringify(data),\n }\n );\n const result = await response.json();\n return result;\n}\n\nquery({\n inputs: {{ providerInputs.asObj.inputs }},\n parameters: { candidate_labels: [\"refund\", \"legal\", \"faq\"] }\n}).then((response) => {\n console.log(JSON.stringify(response));\n});"
11
11
  },
12
12
  "huggingface.js": {
@@ -1,7 +1,7 @@
1
1
  import type { AudioClassificationInput, AudioClassificationOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options } from "../../types";
4
- import { request } from "../custom/request";
4
+ import { innerRequest } from "../../utils/request";
5
5
  import type { LegacyAudioInput } from "./utils";
6
6
  import { preparePayload } from "./utils";
7
7
 
@@ -16,7 +16,7 @@ export async function audioClassification(
16
16
  options?: Options
17
17
  ): Promise<AudioClassificationOutput> {
18
18
  const payload = preparePayload(args);
19
- const res = await request<AudioClassificationOutput>(payload, {
19
+ const { data: res } = await innerRequest<AudioClassificationOutput>(payload, {
20
20
  ...options,
21
21
  task: "audio-classification",
22
22
  });
@@ -1,6 +1,6 @@
1
1
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
2
2
  import type { BaseArgs, Options } from "../../types";
3
- import { request } from "../custom/request";
3
+ import { innerRequest } from "../../utils/request";
4
4
  import type { LegacyAudioInput } from "./utils";
5
5
  import { preparePayload } from "./utils";
6
6
 
@@ -37,7 +37,7 @@ export interface AudioToAudioOutput {
37
37
  */
38
38
  export async function audioToAudio(args: AudioToAudioArgs, options?: Options): Promise<AudioToAudioOutput[]> {
39
39
  const payload = preparePayload(args);
40
- const res = await request<AudioToAudioOutput>(payload, {
40
+ const { data: res } = await innerRequest<AudioToAudioOutput>(payload, {
41
41
  ...options,
42
42
  task: "audio-to-audio",
43
43
  });
@@ -2,10 +2,10 @@ import type { AutomaticSpeechRecognitionInput, AutomaticSpeechRecognitionOutput
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options, RequestArgs } from "../../types";
4
4
  import { base64FromBytes } from "../../utils/base64FromBytes";
5
- import { request } from "../custom/request";
5
+ import { omit } from "../../utils/omit";
6
+ import { innerRequest } from "../../utils/request";
6
7
  import type { LegacyAudioInput } from "./utils";
7
8
  import { preparePayload } from "./utils";
8
- import { omit } from "../../utils/omit";
9
9
 
10
10
  export type AutomaticSpeechRecognitionArgs = BaseArgs & (AutomaticSpeechRecognitionInput | LegacyAudioInput);
11
11
  /**
@@ -17,7 +17,7 @@ export async function automaticSpeechRecognition(
17
17
  options?: Options
18
18
  ): Promise<AutomaticSpeechRecognitionOutput> {
19
19
  const payload = await buildPayload(args);
20
- const res = await request<AutomaticSpeechRecognitionOutput>(payload, {
20
+ const { data: res } = await innerRequest<AutomaticSpeechRecognitionOutput>(payload, {
21
21
  ...options,
22
22
  task: "automatic-speech-recognition",
23
23
  });
@@ -2,7 +2,7 @@ import type { TextToSpeechInput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options } from "../../types";
4
4
  import { omit } from "../../utils/omit";
5
- import { request } from "../custom/request";
5
+ import { innerRequest } from "../../utils/request";
6
6
  type TextToSpeechArgs = BaseArgs & TextToSpeechInput;
7
7
 
8
8
  interface OutputUrlTextToSpeechGeneration {
@@ -22,7 +22,7 @@ export async function textToSpeech(args: TextToSpeechArgs, options?: Options): P
22
22
  text: args.inputs,
23
23
  }
24
24
  : args;
25
- const res = await request<Blob | OutputUrlTextToSpeechGeneration>(payload, {
25
+ const { data: res } = await innerRequest<Blob | OutputUrlTextToSpeechGeneration>(payload, {
26
26
  ...options,
27
27
  task: "text-to-speech",
28
28
  });
@@ -1,8 +1,9 @@
1
1
  import type { InferenceTask, Options, RequestArgs } from "../../types";
2
- import { makeRequestOptions } from "../../lib/makeRequestOptions";
2
+ import { innerRequest } from "../../utils/request";
3
3
 
4
4
  /**
5
5
  * Primitive to make custom calls to the inference provider
6
+ * @deprecated Use specific task functions instead. This function will be removed in a future version.
6
7
  */
7
8
  export async function request<T>(
8
9
  args: RequestArgs,
@@ -13,35 +14,9 @@ export async function request<T>(
13
14
  chatCompletion?: boolean;
14
15
  }
15
16
  ): Promise<T> {
16
- const { url, info } = await makeRequestOptions(args, options);
17
- const response = await (options?.fetch ?? fetch)(url, info);
18
-
19
- if (options?.retry_on_error !== false && response.status === 503) {
20
- return request(args, options);
21
- }
22
-
23
- if (!response.ok) {
24
- const contentType = response.headers.get("Content-Type");
25
- if (["application/json", "application/problem+json"].some((ct) => contentType?.startsWith(ct))) {
26
- const output = await response.json();
27
- if ([400, 422, 404, 500].includes(response.status) && options?.chatCompletion) {
28
- throw new Error(
29
- `Server ${args.model} does not seem to support chat completion. Error: ${JSON.stringify(output.error)}`
30
- );
31
- }
32
- if (output.error || output.detail) {
33
- throw new Error(JSON.stringify(output.error ?? output.detail));
34
- } else {
35
- throw new Error(output);
36
- }
37
- }
38
- const message = contentType?.startsWith("text/plain;") ? await response.text() : undefined;
39
- throw new Error(message ?? "An error occurred while fetching the blob");
40
- }
41
-
42
- if (response.headers.get("Content-Type")?.startsWith("application/json")) {
43
- return await response.json();
44
- }
45
-
46
- return (await response.blob()) as T;
17
+ console.warn(
18
+ "The request method is deprecated and will be removed in a future version of huggingface.js. Use specific task functions instead."
19
+ );
20
+ const result = await innerRequest<T>(args, options);
21
+ return result.data;
47
22
  }
@@ -1,10 +1,8 @@
1
1
  import type { InferenceTask, Options, RequestArgs } from "../../types";
2
- import { makeRequestOptions } from "../../lib/makeRequestOptions";
3
- import type { EventSourceMessage } from "../../vendor/fetch-event-source/parse";
4
- import { getLines, getMessages } from "../../vendor/fetch-event-source/parse";
5
-
2
+ import { innerStreamingRequest } from "../../utils/request";
6
3
  /**
7
4
  * Primitive to make custom inference calls that expect server-sent events, and returns the response through a generator
5
+ * @deprecated Use specific task functions instead. This function will be removed in a future version.
8
6
  */
9
7
  export async function* streamingRequest<T>(
10
8
  args: RequestArgs,
@@ -15,86 +13,8 @@ export async function* streamingRequest<T>(
15
13
  chatCompletion?: boolean;
16
14
  }
17
15
  ): AsyncGenerator<T> {
18
- const { url, info } = await makeRequestOptions({ ...args, stream: true }, options);
19
- const response = await (options?.fetch ?? fetch)(url, info);
20
-
21
- if (options?.retry_on_error !== false && response.status === 503) {
22
- return yield* streamingRequest(args, options);
23
- }
24
- if (!response.ok) {
25
- if (response.headers.get("Content-Type")?.startsWith("application/json")) {
26
- const output = await response.json();
27
- if ([400, 422, 404, 500].includes(response.status) && options?.chatCompletion) {
28
- throw new Error(`Server ${args.model} does not seem to support chat completion. Error: ${output.error}`);
29
- }
30
- if (typeof output.error === "string") {
31
- throw new Error(output.error);
32
- }
33
- if (output.error && "message" in output.error && typeof output.error.message === "string") {
34
- /// OpenAI errors
35
- throw new Error(output.error.message);
36
- }
37
- }
38
-
39
- throw new Error(`Server response contains error: ${response.status}`);
40
- }
41
- if (!response.headers.get("content-type")?.startsWith("text/event-stream")) {
42
- throw new Error(
43
- `Server does not support event stream content type, it returned ` + response.headers.get("content-type")
44
- );
45
- }
46
-
47
- if (!response.body) {
48
- return;
49
- }
50
-
51
- const reader = response.body.getReader();
52
- let events: EventSourceMessage[] = [];
53
-
54
- const onEvent = (event: EventSourceMessage) => {
55
- // accumulate events in array
56
- events.push(event);
57
- };
58
-
59
- const onChunk = getLines(
60
- getMessages(
61
- () => {},
62
- () => {},
63
- onEvent
64
- )
16
+ console.warn(
17
+ "The streamingRequest method is deprecated and will be removed in a future version of huggingface.js. Use specific task functions instead."
65
18
  );
66
-
67
- try {
68
- while (true) {
69
- const { done, value } = await reader.read();
70
- if (done) {
71
- return;
72
- }
73
- onChunk(value);
74
- for (const event of events) {
75
- if (event.data.length > 0) {
76
- if (event.data === "[DONE]") {
77
- return;
78
- }
79
- const data = JSON.parse(event.data);
80
- if (typeof data === "object" && data !== null && "error" in data) {
81
- const errorStr =
82
- typeof data.error === "string"
83
- ? data.error
84
- : typeof data.error === "object" &&
85
- data.error &&
86
- "message" in data.error &&
87
- typeof data.error.message === "string"
88
- ? data.error.message
89
- : JSON.stringify(data.error);
90
- throw new Error(`Error forwarded from backend: ` + errorStr);
91
- }
92
- yield data as T;
93
- }
94
- }
95
- events = [];
96
- }
97
- } finally {
98
- reader.releaseLock();
99
- }
19
+ yield* innerStreamingRequest(args, options);
100
20
  }
@@ -1,7 +1,7 @@
1
1
  import type { ImageClassificationInput, ImageClassificationOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options } from "../../types";
4
- import { request } from "../custom/request";
4
+ import { innerRequest } from "../../utils/request";
5
5
  import { preparePayload, type LegacyImageInput } from "./utils";
6
6
 
7
7
  export type ImageClassificationArgs = BaseArgs & (ImageClassificationInput | LegacyImageInput);
@@ -15,7 +15,7 @@ export async function imageClassification(
15
15
  options?: Options
16
16
  ): Promise<ImageClassificationOutput> {
17
17
  const payload = preparePayload(args);
18
- const res = await request<ImageClassificationOutput>(payload, {
18
+ const { data: res } = await innerRequest<ImageClassificationOutput>(payload, {
19
19
  ...options,
20
20
  task: "image-classification",
21
21
  });
@@ -1,7 +1,7 @@
1
1
  import type { ImageSegmentationInput, ImageSegmentationOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options } from "../../types";
4
- import { request } from "../custom/request";
4
+ import { innerRequest } from "../../utils/request";
5
5
  import { preparePayload, type LegacyImageInput } from "./utils";
6
6
 
7
7
  export type ImageSegmentationArgs = BaseArgs & (ImageSegmentationInput | LegacyImageInput);
@@ -15,7 +15,7 @@ export async function imageSegmentation(
15
15
  options?: Options
16
16
  ): Promise<ImageSegmentationOutput> {
17
17
  const payload = preparePayload(args);
18
- const res = await request<ImageSegmentationOutput>(payload, {
18
+ const { data: res } = await innerRequest<ImageSegmentationOutput>(payload, {
19
19
  ...options,
20
20
  task: "image-segmentation",
21
21
  });
@@ -2,7 +2,7 @@ import type { ImageToImageInput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options, RequestArgs } from "../../types";
4
4
  import { base64FromBytes } from "../../utils/base64FromBytes";
5
- import { request } from "../custom/request";
5
+ import { innerRequest } from "../../utils/request";
6
6
 
7
7
  export type ImageToImageArgs = BaseArgs & ImageToImageInput;
8
8
 
@@ -26,7 +26,7 @@ export async function imageToImage(args: ImageToImageArgs, options?: Options): P
26
26
  ),
27
27
  };
28
28
  }
29
- const res = await request<Blob>(reqArgs, {
29
+ const { data: res } = await innerRequest<Blob>(reqArgs, {
30
30
  ...options,
31
31
  task: "image-to-image",
32
32
  });
@@ -1,7 +1,7 @@
1
1
  import type { ImageToTextInput, ImageToTextOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, Options } from "../../types";
4
- import { request } from "../custom/request";
4
+ import { innerRequest } from "../../utils/request";
5
5
  import type { LegacyImageInput } from "./utils";
6
6
  import { preparePayload } from "./utils";
7
7
 
@@ -11,16 +11,14 @@ export type ImageToTextArgs = BaseArgs & (ImageToTextInput | LegacyImageInput);
11
11
  */
12
12
  export async function imageToText(args: ImageToTextArgs, options?: Options): Promise<ImageToTextOutput> {
13
13
  const payload = preparePayload(args);
14
- const res = (
15
- await request<[ImageToTextOutput]>(payload, {
16
- ...options,
17
- task: "image-to-text",
18
- })
19
- )?.[0];
14
+ const { data: res } = await innerRequest<[ImageToTextOutput]>(payload, {
15
+ ...options,
16
+ task: "image-to-text",
17
+ });
20
18
 
21
- if (typeof res?.generated_text !== "string") {
19
+ if (typeof res?.[0]?.generated_text !== "string") {
22
20
  throw new InferenceOutputError("Expected {generated_text: string}");
23
21
  }
24
22
 
25
- return res;
23
+ return res?.[0];
26
24
  }
@@ -1,7 +1,7 @@
1
- import { request } from "../custom/request";
2
- import type { BaseArgs, Options } from "../../types";
3
- import { InferenceOutputError } from "../../lib/InferenceOutputError";
4
1
  import type { ObjectDetectionInput, ObjectDetectionOutput } from "@huggingface/tasks";
2
+ import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
+ import type { BaseArgs, Options } from "../../types";
4
+ import { innerRequest } from "../../utils/request";
5
5
  import { preparePayload, type LegacyImageInput } from "./utils";
6
6
 
7
7
  export type ObjectDetectionArgs = BaseArgs & (ObjectDetectionInput | LegacyImageInput);
@@ -12,7 +12,7 @@ export type ObjectDetectionArgs = BaseArgs & (ObjectDetectionInput | LegacyImage
12
12
  */
13
13
  export async function objectDetection(args: ObjectDetectionArgs, options?: Options): Promise<ObjectDetectionOutput> {
14
14
  const payload = preparePayload(args);
15
- const res = await request<ObjectDetectionOutput>(payload, {
15
+ const { data: res } = await innerRequest<ObjectDetectionOutput>(payload, {
16
16
  ...options,
17
17
  task: "object-detection",
18
18
  });
@@ -1,9 +1,9 @@
1
1
  import type { TextToImageInput, TextToImageOutput } from "@huggingface/tasks";
2
2
  import { InferenceOutputError } from "../../lib/InferenceOutputError";
3
3
  import type { BaseArgs, InferenceProvider, Options } from "../../types";
4
- import { omit } from "../../utils/omit";
5
- import { request } from "../custom/request";
6
4
  import { delay } from "../../utils/delay";
5
+ import { omit } from "../../utils/omit";
6
+ import { innerRequest } from "../../utils/request";
7
7
 
8
8
  export type TextToImageArgs = BaseArgs & TextToImageInput;
9
9
 
@@ -65,7 +65,7 @@ export async function textToImage(args: TextToImageArgs, options?: TextToImageOp
65
65
  ...getResponseFormatArg(args.provider),
66
66
  prompt: args.inputs,
67
67
  };
68
- const res = await request<
68
+ const { data: res } = await innerRequest<
69
69
  | TextToImageOutput
70
70
  | Base64ImageGeneration
71
71
  | OutputUrlImageGeneration