@huggingface/inference 3.5.1 → 3.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/README.md +11 -11
  2. package/dist/browser/index.cjs +1652 -0
  3. package/dist/browser/index.js +1652 -0
  4. package/dist/index.cjs +295 -930
  5. package/dist/index.js +284 -940
  6. package/dist/src/{HfInference.d.ts → InferenceClient.d.ts} +12 -7
  7. package/dist/src/InferenceClient.d.ts.map +1 -0
  8. package/dist/src/index.d.ts +1 -1
  9. package/dist/src/index.d.ts.map +1 -1
  10. package/dist/src/lib/makeRequestOptions.d.ts +16 -1
  11. package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
  12. package/dist/src/providers/cohere.d.ts.map +1 -1
  13. package/dist/src/providers/novita.d.ts.map +1 -1
  14. package/dist/src/snippets/getInferenceSnippets.d.ts +4 -0
  15. package/dist/src/snippets/getInferenceSnippets.d.ts.map +1 -0
  16. package/dist/src/snippets/index.d.ts +1 -4
  17. package/dist/src/snippets/index.d.ts.map +1 -1
  18. package/dist/src/tasks/cv/textToVideo.d.ts.map +1 -1
  19. package/dist/test/InferenceClient.spec.d.ts +2 -0
  20. package/dist/test/InferenceClient.spec.d.ts.map +1 -0
  21. package/package.json +17 -8
  22. package/src/{HfInference.ts → InferenceClient.ts} +12 -7
  23. package/src/index.ts +2 -2
  24. package/src/lib/makeRequestOptions.ts +37 -10
  25. package/src/providers/black-forest-labs.ts +2 -2
  26. package/src/providers/cohere.ts +0 -1
  27. package/src/providers/fireworks-ai.ts +4 -4
  28. package/src/providers/hf-inference.ts +1 -1
  29. package/src/providers/nebius.ts +3 -3
  30. package/src/providers/novita.ts +7 -6
  31. package/src/providers/sambanova.ts +1 -1
  32. package/src/providers/together.ts +3 -3
  33. package/src/snippets/getInferenceSnippets.ts +398 -0
  34. package/src/snippets/index.ts +1 -5
  35. package/src/snippets/templates/js/fetch/basic.jinja +19 -0
  36. package/src/snippets/templates/js/fetch/basicAudio.jinja +19 -0
  37. package/src/snippets/templates/js/fetch/basicImage.jinja +19 -0
  38. package/src/snippets/templates/js/fetch/textToAudio.jinja +41 -0
  39. package/src/snippets/templates/js/fetch/textToImage.jinja +19 -0
  40. package/src/snippets/templates/js/fetch/zeroShotClassification.jinja +22 -0
  41. package/src/snippets/templates/js/huggingface.js/basic.jinja +11 -0
  42. package/src/snippets/templates/js/huggingface.js/basicAudio.jinja +13 -0
  43. package/src/snippets/templates/js/huggingface.js/basicImage.jinja +13 -0
  44. package/src/snippets/templates/js/huggingface.js/conversational.jinja +11 -0
  45. package/src/snippets/templates/js/huggingface.js/conversationalStream.jinja +19 -0
  46. package/src/snippets/templates/js/huggingface.js/textToImage.jinja +11 -0
  47. package/src/snippets/templates/js/huggingface.js/textToVideo.jinja +10 -0
  48. package/src/snippets/templates/js/openai/conversational.jinja +13 -0
  49. package/src/snippets/templates/js/openai/conversationalStream.jinja +22 -0
  50. package/src/snippets/templates/python/fal_client/textToImage.jinja +11 -0
  51. package/src/snippets/templates/python/huggingface_hub/basic.jinja +4 -0
  52. package/src/snippets/templates/python/huggingface_hub/basicAudio.jinja +1 -0
  53. package/src/snippets/templates/python/huggingface_hub/basicImage.jinja +1 -0
  54. package/src/snippets/templates/python/huggingface_hub/conversational.jinja +6 -0
  55. package/src/snippets/templates/python/huggingface_hub/conversationalStream.jinja +8 -0
  56. package/src/snippets/templates/python/huggingface_hub/documentQuestionAnswering.jinja +5 -0
  57. package/src/snippets/templates/python/huggingface_hub/imageToImage.jinja +6 -0
  58. package/src/snippets/templates/python/huggingface_hub/importInferenceClient.jinja +6 -0
  59. package/src/snippets/templates/python/huggingface_hub/textToImage.jinja +5 -0
  60. package/src/snippets/templates/python/huggingface_hub/textToVideo.jinja +4 -0
  61. package/src/snippets/templates/python/openai/conversational.jinja +13 -0
  62. package/src/snippets/templates/python/openai/conversationalStream.jinja +15 -0
  63. package/src/snippets/templates/python/requests/basic.jinja +7 -0
  64. package/src/snippets/templates/python/requests/basicAudio.jinja +7 -0
  65. package/src/snippets/templates/python/requests/basicImage.jinja +7 -0
  66. package/src/snippets/templates/python/requests/conversational.jinja +9 -0
  67. package/src/snippets/templates/python/requests/conversationalStream.jinja +16 -0
  68. package/src/snippets/templates/python/requests/documentQuestionAnswering.jinja +13 -0
  69. package/src/snippets/templates/python/requests/imageToImage.jinja +15 -0
  70. package/src/snippets/templates/python/requests/importRequests.jinja +10 -0
  71. package/src/snippets/templates/python/requests/tabular.jinja +9 -0
  72. package/src/snippets/templates/python/requests/textToAudio.jinja +23 -0
  73. package/src/snippets/templates/python/requests/textToImage.jinja +14 -0
  74. package/src/snippets/templates/python/requests/zeroShotClassification.jinja +8 -0
  75. package/src/snippets/templates/python/requests/zeroShotImageClassification.jinja +14 -0
  76. package/src/snippets/templates/sh/curl/basic.jinja +7 -0
  77. package/src/snippets/templates/sh/curl/basicAudio.jinja +5 -0
  78. package/src/snippets/templates/sh/curl/basicImage.jinja +5 -0
  79. package/src/snippets/templates/sh/curl/conversational.jinja +7 -0
  80. package/src/snippets/templates/sh/curl/conversationalStream.jinja +7 -0
  81. package/src/snippets/templates/sh/curl/zeroShotClassification.jinja +5 -0
  82. package/src/tasks/cv/textToVideo.ts +25 -5
  83. package/src/vendor/fetch-event-source/LICENSE +21 -0
  84. package/dist/src/HfInference.d.ts.map +0 -1
  85. package/dist/src/snippets/curl.d.ts +0 -17
  86. package/dist/src/snippets/curl.d.ts.map +0 -1
  87. package/dist/src/snippets/js.d.ts +0 -21
  88. package/dist/src/snippets/js.d.ts.map +0 -1
  89. package/dist/src/snippets/python.d.ts +0 -23
  90. package/dist/src/snippets/python.d.ts.map +0 -1
  91. package/dist/test/HfInference.spec.d.ts +0 -2
  92. package/dist/test/HfInference.spec.d.ts.map +0 -1
  93. package/src/snippets/curl.ts +0 -177
  94. package/src/snippets/js.ts +0 -475
  95. package/src/snippets/python.ts +0 -487
package/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  # 🤗 Hugging Face Inference
2
2
 
3
- A Typescript powered wrapper for the HF Inference API (serverless), Inference Endpoints (dedicated), and third-party Inference Providers.
4
- It works with [Inference API (serverless)](https://huggingface.co/docs/api-inference/index) and [Inference Endpoints (dedicated)](https://huggingface.co/docs/inference-endpoints/index), and even with supported third-party Inference Providers.
3
+ A Typescript powered wrapper for the HF Inference API (serverless), Inference Endpoints (dedicated), and all supported Inference Providers.
4
+ It works with [Inference API (serverless)](https://huggingface.co/docs/api-inference/index) and [Inference Endpoints (dedicated)](https://huggingface.co/docs/inference-endpoints/index), and even with all supported third-party Inference Providers.
5
5
 
6
6
  Check out the [full documentation](https://huggingface.co/docs/huggingface.js/inference/README).
7
7
 
@@ -25,24 +25,24 @@ yarn add @huggingface/inference
25
25
 
26
26
  ```ts
27
27
  // esm.sh
28
- import { HfInference } from "https://esm.sh/@huggingface/inference"
28
+ import { InferenceClient } from "https://esm.sh/@huggingface/inference"
29
29
  // or npm:
30
- import { HfInference } from "npm:@huggingface/inference"
30
+ import { InferenceClient } from "npm:@huggingface/inference"
31
31
  ```
32
32
 
33
33
  ### Initialize
34
34
 
35
35
  ```typescript
36
- import { HfInference } from '@huggingface/inference'
36
+ import { InferenceClient } from '@huggingface/inference'
37
37
 
38
- const hf = new HfInference('your access token')
38
+ const hf = new InferenceClient('your access token')
39
39
  ```
40
40
 
41
41
  ❗**Important note:** Using an access token is optional to get started, however you will be rate limited eventually. Join [Hugging Face](https://huggingface.co/join) and then visit [access tokens](https://huggingface.co/settings/tokens) to generate your access token for **free**.
42
42
 
43
43
  Your access token should be kept private. If you need to protect it in front-end applications, we suggest setting up a proxy server that stores the access token.
44
44
 
45
- ### Third-party inference providers
45
+ ### All supported inference providers
46
46
 
47
47
  You can send inference requests to third-party providers with the inference client.
48
48
 
@@ -63,7 +63,7 @@ To send requests to a third-party provider, you have to pass the `provider` para
63
63
  ```ts
64
64
  const accessToken = "hf_..."; // Either a HF access token, or an API key from the third-party provider (Replicate in this example)
65
65
 
66
- const client = new HfInference(accessToken);
66
+ const client = new InferenceClient(accessToken);
67
67
  await client.textToImage({
68
68
  provider: "replicate",
69
69
  model:"black-forest-labs/Flux.1-dev",
@@ -93,7 +93,7 @@ This is not an issue for LLMs as everyone converged on the OpenAI API anyways, b
93
93
 
94
94
  ### Tree-shaking
95
95
 
96
- You can import the functions you need directly from the module instead of using the `HfInference` class.
96
+ You can import the functions you need directly from the module instead of using the `InferenceClient` class.
97
97
 
98
98
  ```ts
99
99
  import { textGeneration } from "@huggingface/inference";
@@ -165,7 +165,7 @@ for await (const chunk of hf.chatCompletionStream({
165
165
  It's also possible to call Mistral or OpenAI endpoints directly:
166
166
 
167
167
  ```typescript
168
- const openai = new HfInference(OPENAI_TOKEN).endpoint("https://api.openai.com");
168
+ const openai = new InferenceClient(OPENAI_TOKEN).endpoint("https://api.openai.com");
169
169
 
170
170
  let out = "";
171
171
  for await (const chunk of openai.chatCompletionStream({
@@ -602,7 +602,7 @@ You can use any Chat Completion API-compatible provider with the `chatCompletion
602
602
  ```typescript
603
603
  // Chat Completion Example
604
604
  const MISTRAL_KEY = process.env.MISTRAL_KEY;
605
- const hf = new HfInference(MISTRAL_KEY);
605
+ const hf = new InferenceClient(MISTRAL_KEY);
606
606
  const ep = hf.endpoint("https://api.mistral.ai");
607
607
  const stream = ep.chatCompletionStream({
608
608
  model: "mistral-tiny",