huggingface-hub 0.34.4__py3-none-any.whl → 0.34.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.34.4"
49
+ __version__ = "0.34.5"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -133,7 +133,7 @@ class InferenceClient:
133
133
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
134
134
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
135
135
  provider (`str`, *optional*):
136
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, "sambanova"` or `"together"`.
136
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
137
137
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
138
138
  If model is a URL or `base_url` is passed, then `provider` is not used.
139
139
  token (`str`, *optional*):
@@ -121,7 +121,7 @@ class AsyncInferenceClient:
121
121
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
122
122
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
123
123
  provider (`str`, *optional*):
124
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, "sambanova"` or `"together"`.
124
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
125
125
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
126
126
  If model is a URL or `base_url` is passed, then `provider` is not used.
127
127
  token (`str`, *optional*):
@@ -38,6 +38,7 @@ from .nscale import NscaleConversationalTask, NscaleTextToImageTask
38
38
  from .openai import OpenAIConversationalTask
39
39
  from .replicate import ReplicateImageToImageTask, ReplicateTask, ReplicateTextToImageTask, ReplicateTextToSpeechTask
40
40
  from .sambanova import SambanovaConversationalTask, SambanovaFeatureExtractionTask
41
+ from .scaleway import ScalewayConversationalTask, ScalewayFeatureExtractionTask
41
42
  from .together import TogetherConversationalTask, TogetherTextGenerationTask, TogetherTextToImageTask
42
43
 
43
44
 
@@ -60,6 +61,7 @@ PROVIDER_T = Literal[
60
61
  "openai",
61
62
  "replicate",
62
63
  "sambanova",
64
+ "scaleway",
63
65
  "together",
64
66
  ]
65
67
 
@@ -154,6 +156,10 @@ PROVIDERS: Dict[PROVIDER_T, Dict[str, TaskProviderHelper]] = {
154
156
  "conversational": SambanovaConversationalTask(),
155
157
  "feature-extraction": SambanovaFeatureExtractionTask(),
156
158
  },
159
+ "scaleway": {
160
+ "conversational": ScalewayConversationalTask(),
161
+ "feature-extraction": ScalewayFeatureExtractionTask(),
162
+ },
157
163
  "together": {
158
164
  "text-to-image": TogetherTextToImageTask(),
159
165
  "conversational": TogetherConversationalTask(),
@@ -33,6 +33,7 @@ HARDCODED_MODEL_INFERENCE_MAPPING: Dict[str, Dict[str, InferenceProviderMapping]
33
33
  "nscale": {},
34
34
  "replicate": {},
35
35
  "sambanova": {},
36
+ "scaleway": {},
36
37
  "together": {},
37
38
  }
38
39
 
@@ -0,0 +1,28 @@
1
+ from typing import Any, Dict, Optional, Union
2
+
3
+ from huggingface_hub.inference._common import RequestParameters, _as_dict
4
+
5
+ from ._common import BaseConversationalTask, InferenceProviderMapping, TaskProviderHelper, filter_none
6
+
7
+
8
+ class ScalewayConversationalTask(BaseConversationalTask):
9
+ def __init__(self):
10
+ super().__init__(provider="scaleway", base_url="https://api.scaleway.ai")
11
+
12
+
13
+ class ScalewayFeatureExtractionTask(TaskProviderHelper):
14
+ def __init__(self):
15
+ super().__init__(provider="scaleway", base_url="https://api.scaleway.ai", task="feature-extraction")
16
+
17
+ def _prepare_route(self, mapped_model: str, api_key: str) -> str:
18
+ return "/v1/embeddings"
19
+
20
+ def _prepare_payload_as_dict(
21
+ self, inputs: Any, parameters: Dict, provider_mapping_info: InferenceProviderMapping
22
+ ) -> Optional[Dict]:
23
+ parameters = filter_none(parameters)
24
+ return {"input": inputs, "model": provider_mapping_info.provider_id, **parameters}
25
+
26
+ def get_response(self, response: Union[bytes, Dict], request_params: Optional[RequestParameters] = None) -> Any:
27
+ embeddings = _as_dict(response)["data"]
28
+ return [embedding["embedding"] for embedding in embeddings]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huggingface-hub
3
- Version: 0.34.4
3
+ Version: 0.34.5
4
4
  Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
5
5
  Home-page: https://github.com/huggingface/huggingface_hub
6
6
  Author: Hugging Face, Inc.
@@ -1,4 +1,4 @@
1
- huggingface_hub/__init__.py,sha256=MsUqTc5DczrwOwtEhkM9Y7tkGAlHvDtNTDbWQcEEqTA,51837
1
+ huggingface_hub/__init__.py,sha256=rv3FvQosoKR8MvPywT4KVkRA8MmMN0CmBPLTjmVp_LI,51837
2
2
  huggingface_hub/_commit_api.py,sha256=68HxFnJE2s-QmGZRHQav5kOMTseYV_ZQi04ADaQmZUk,38979
3
3
  huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
4
4
  huggingface_hub/_inference_endpoints.py,sha256=ahmbPcEXsJ_JcMb9TDgdkD8Z2z9uytkFG3_1o6dTm8g,17598
@@ -57,10 +57,10 @@ huggingface_hub/commands/upload_large_folder.py,sha256=_1id84BFtbL8HgFRKZ-el_uPr
57
57
  huggingface_hub/commands/user.py,sha256=MjG1lwMq1p5QAlBolFnRX_pUxE3Kd3UiPl-nEEQSgXg,7537
58
58
  huggingface_hub/commands/version.py,sha256=rGpCbvxImY9eQqXrshYt609Iws27R75WARmKQrIo6Ok,1390
59
59
  huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- huggingface_hub/inference/_client.py,sha256=ggY3ebg_xjC9zuHUTpzG_rghFr65vDt7Btg3pATfJjE,165039
60
+ huggingface_hub/inference/_client.py,sha256=__SCWGGXY6tivLdWJUiL5YZXYEyaOXNOMmwo_6UksZo,165054
61
61
  huggingface_hub/inference/_common.py,sha256=6qAIauugyl1eHk0FhWdjBNEXBNF33_VXC8lc1GR8t7s,15874
62
62
  huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
- huggingface_hub/inference/_generated/_async_client.py,sha256=oklZPp7TBmCkUB5B3UpuivEXQWGgUxXJwA-Zs4YFDsM,171274
63
+ huggingface_hub/inference/_generated/_async_client.py,sha256=7u6ZvDz6nDQ5fhs2qaBAVDw4QZgshGZiBeq-hdRZoL0,171289
64
64
  huggingface_hub/inference/_generated/types/__init__.py,sha256=9WvrGQ8aThtKSNzZF06j-CIE2ZuItne8FFnea1p1u38,6557
65
65
  huggingface_hub/inference/_generated/types/audio_classification.py,sha256=Jg3mzfGhCSH6CfvVvgJSiFpkz6v4nNA0G4LJXacEgNc,1573
66
66
  huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=2Ep4WkePL7oJwcp5nRJqApwviumGHbft9HhXE9XLHj4,891
@@ -103,8 +103,8 @@ huggingface_hub/inference/_mcp/constants.py,sha256=AnOp_oR5Vty0d5J3AynGGNK9i1I9K
103
103
  huggingface_hub/inference/_mcp/mcp_client.py,sha256=ndaTcZZPbU1ZTNUeB9-WdaOx7bHD3lsrXnKxCeiwpUg,15788
104
104
  huggingface_hub/inference/_mcp/types.py,sha256=ic8VSR9JY1d-vPWsBVXYtXtIU669-HbGQ3m12Szs7BQ,815
105
105
  huggingface_hub/inference/_mcp/utils.py,sha256=VsRWl0fuSZDS0zNT9n7FOMSlzA0UBbP8p8xWKWDt2Pc,4093
106
- huggingface_hub/inference/_providers/__init__.py,sha256=8i8Y2X_hEhm20ndFjEQxgddHVAEQpYn38UfLeH4V0Rg,8354
107
- huggingface_hub/inference/_providers/_common.py,sha256=dDtKWEUTcvO0llx96x5E3RwOSzkCIL8cKbiFk9RRh8o,11330
106
+ huggingface_hub/inference/_providers/__init__.py,sha256=j3kMOwM1nopVGZeYAbw78sv0yvQiwB2Nzpm9pQqqnSg,8594
107
+ huggingface_hub/inference/_providers/_common.py,sha256=mUL5T9v3iqM1d7cYIh0qEyVC8w7UliQz_LEni2srekI,11350
108
108
  huggingface_hub/inference/_providers/black_forest_labs.py,sha256=wO7qgRyNyrIKlZtvL3vJEbS4-D19kfoXZk6PDh1dTis,2842
109
109
  huggingface_hub/inference/_providers/cerebras.py,sha256=QOJ-1U-os7uE7p6eUnn_P_APq-yQhx28be7c3Tq2EuA,210
110
110
  huggingface_hub/inference/_providers/cohere.py,sha256=O3tC-qIUL91mx_mE8bOHCtDWcQuKOUauhUoXSUBUCZ8,1253
@@ -120,6 +120,7 @@ huggingface_hub/inference/_providers/nscale.py,sha256=qWUsWinQmUbNUqehyKn34tVoWe
120
120
  huggingface_hub/inference/_providers/openai.py,sha256=GCVYeNdjWIgpQQ7E_Xv8IebmdhTi0S6WfFosz3nLtps,1089
121
121
  huggingface_hub/inference/_providers/replicate.py,sha256=BuLb1x4nUlH5SfazBwvMiFwwcs-OS99U87m3QWdx2is,3810
122
122
  huggingface_hub/inference/_providers/sambanova.py,sha256=Unt3H3jr_kgI9vzRjmmW1DFyoEuPkKCcgIIloiOj3j8,2037
123
+ huggingface_hub/inference/_providers/scaleway.py,sha256=Jy81kXWbXCHBpx6xmyzdEfXGSyhUfjKOLHuDSvhHWGo,1209
123
124
  huggingface_hub/inference/_providers/together.py,sha256=KHF19CS3qXS7G1-CwcMiD8Z5wzPKEKi4F2DzqAthbBE,3439
124
125
  huggingface_hub/serialization/__init__.py,sha256=kn-Fa-m4FzMnN8lNsF-SwFcfzug4CucexybGKyvZ8S0,1041
125
126
  huggingface_hub/serialization/_base.py,sha256=Df3GwGR9NzeK_SD75prXLucJAzPiNPgHbgXSw-_LTk8,8126
@@ -158,9 +159,9 @@ huggingface_hub/utils/insecure_hashlib.py,sha256=iAaepavFZ5Dhfa5n8KozRfQprKmvcjS
158
159
  huggingface_hub/utils/logging.py,sha256=0A8fF1yh3L9Ka_bCDX2ml4U5Ht0tY8Dr3JcbRvWFuwo,4909
159
160
  huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
160
161
  huggingface_hub/utils/tqdm.py,sha256=xAKcyfnNHsZ7L09WuEM5Ew5-MDhiahLACbbN2zMmcLs,10671
161
- huggingface_hub-0.34.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
162
- huggingface_hub-0.34.4.dist-info/METADATA,sha256=sEy_vJu9uxm2AvxvoG49eqdJxzxDYRZPOpeBr4jKhkM,14699
163
- huggingface_hub-0.34.4.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
164
- huggingface_hub-0.34.4.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
165
- huggingface_hub-0.34.4.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
166
- huggingface_hub-0.34.4.dist-info/RECORD,,
162
+ huggingface_hub-0.34.5.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
163
+ huggingface_hub-0.34.5.dist-info/METADATA,sha256=67CHYP_fg-DqvhmXT82RONWfD5P-YOKwqc8INQsDWSY,14699
164
+ huggingface_hub-0.34.5.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
165
+ huggingface_hub-0.34.5.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
166
+ huggingface_hub-0.34.5.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
167
+ huggingface_hub-0.34.5.dist-info/RECORD,,