huggingface-hub 0.33.0rc0__py3-none-any.whl → 0.33.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +1 -1
- huggingface_hub/_inference_endpoints.py +4 -1
- huggingface_hub/inference/_mcp/mcp_client.py +21 -2
- huggingface_hub/inference/_providers/_common.py +31 -5
- huggingface_hub/inference/_providers/hf_inference.py +13 -1
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/METADATA +1 -1
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/RECORD +11 -11
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.33.0rc0.dist-info → huggingface_hub-0.33.1.dist-info}/top_level.txt +0 -0
huggingface_hub/__init__.py
CHANGED
|
@@ -100,6 +100,7 @@ class InferenceEndpoint:
|
|
|
100
100
|
namespace: str
|
|
101
101
|
repository: str = field(init=False)
|
|
102
102
|
status: InferenceEndpointStatus = field(init=False)
|
|
103
|
+
health_route: str = field(init=False)
|
|
103
104
|
url: Optional[str] = field(init=False)
|
|
104
105
|
|
|
105
106
|
# Other fields
|
|
@@ -220,7 +221,8 @@ class InferenceEndpoint:
|
|
|
220
221
|
)
|
|
221
222
|
if self.status == InferenceEndpointStatus.RUNNING and self.url is not None:
|
|
222
223
|
# Verify the endpoint is actually reachable
|
|
223
|
-
|
|
224
|
+
_health_url = f"{self.url.rstrip('/')}/{self.health_route.lstrip('/')}"
|
|
225
|
+
response = get_session().get(_health_url, headers=self._api._build_hf_headers(token=self._token))
|
|
224
226
|
if response.status_code == 200:
|
|
225
227
|
logger.info("Inference Endpoint is ready to be used.")
|
|
226
228
|
return self
|
|
@@ -400,6 +402,7 @@ class InferenceEndpoint:
|
|
|
400
402
|
self.repository = self.raw["model"]["repository"]
|
|
401
403
|
self.status = self.raw["status"]["state"]
|
|
402
404
|
self.url = self.raw["status"].get("url")
|
|
405
|
+
self.health_route = self.raw["healthRoute"]
|
|
403
406
|
|
|
404
407
|
# Other fields
|
|
405
408
|
self.framework = self.raw["model"]["framework"]
|
|
@@ -266,7 +266,7 @@ class MCPClient:
|
|
|
266
266
|
stream=True,
|
|
267
267
|
)
|
|
268
268
|
|
|
269
|
-
message = {"role": "unknown", "content": ""}
|
|
269
|
+
message: Dict[str, Any] = {"role": "unknown", "content": ""}
|
|
270
270
|
final_tool_calls: Dict[int, ChatCompletionStreamOutputDeltaToolCall] = {}
|
|
271
271
|
num_of_chunks = 0
|
|
272
272
|
|
|
@@ -304,7 +304,26 @@ class MCPClient:
|
|
|
304
304
|
# Yield each chunk to caller
|
|
305
305
|
yield chunk
|
|
306
306
|
|
|
307
|
-
if
|
|
307
|
+
# Add the assistant message with tool calls (if any) to messages
|
|
308
|
+
if message["content"] or final_tool_calls:
|
|
309
|
+
# if the role is unknown, set it to assistant
|
|
310
|
+
if message.get("role") == "unknown":
|
|
311
|
+
message["role"] = "assistant"
|
|
312
|
+
# Convert final_tool_calls to the format expected by OpenAI
|
|
313
|
+
if final_tool_calls:
|
|
314
|
+
tool_calls_list: List[Dict[str, Any]] = []
|
|
315
|
+
for tc in final_tool_calls.values():
|
|
316
|
+
tool_calls_list.append(
|
|
317
|
+
{
|
|
318
|
+
"id": tc.id,
|
|
319
|
+
"type": "function",
|
|
320
|
+
"function": {
|
|
321
|
+
"name": tc.function.name,
|
|
322
|
+
"arguments": tc.function.arguments or "{}",
|
|
323
|
+
},
|
|
324
|
+
}
|
|
325
|
+
)
|
|
326
|
+
message["tool_calls"] = tool_calls_list
|
|
308
327
|
messages.append(message)
|
|
309
328
|
|
|
310
329
|
# Process tool calls one by one
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
from functools import lru_cache
|
|
2
|
-
from typing import Any, Dict, List, Optional, Union
|
|
2
|
+
from typing import Any, Dict, List, Optional, Union, overload
|
|
3
3
|
|
|
4
4
|
from huggingface_hub import constants
|
|
5
5
|
from huggingface_hub.hf_api import InferenceProviderMapping
|
|
6
6
|
from huggingface_hub.inference._common import RequestParameters
|
|
7
|
+
from huggingface_hub.inference._generated.types.chat_completion import ChatCompletionInputMessage
|
|
7
8
|
from huggingface_hub.utils import build_hf_headers, get_token, logging
|
|
8
9
|
|
|
9
10
|
|
|
@@ -36,8 +37,30 @@ HARDCODED_MODEL_INFERENCE_MAPPING: Dict[str, Dict[str, InferenceProviderMapping]
|
|
|
36
37
|
}
|
|
37
38
|
|
|
38
39
|
|
|
39
|
-
|
|
40
|
-
|
|
40
|
+
@overload
|
|
41
|
+
def filter_none(obj: Dict[str, Any]) -> Dict[str, Any]: ...
|
|
42
|
+
@overload
|
|
43
|
+
def filter_none(obj: List[Any]) -> List[Any]: ...
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def filter_none(obj: Union[Dict[str, Any], List[Any]]) -> Union[Dict[str, Any], List[Any]]:
|
|
47
|
+
if isinstance(obj, dict):
|
|
48
|
+
cleaned: Dict[str, Any] = {}
|
|
49
|
+
for k, v in obj.items():
|
|
50
|
+
if v is None:
|
|
51
|
+
continue
|
|
52
|
+
if isinstance(v, (dict, list)):
|
|
53
|
+
v = filter_none(v)
|
|
54
|
+
# remove empty nested dicts
|
|
55
|
+
if isinstance(v, dict) and not v:
|
|
56
|
+
continue
|
|
57
|
+
cleaned[k] = v
|
|
58
|
+
return cleaned
|
|
59
|
+
|
|
60
|
+
if isinstance(obj, list):
|
|
61
|
+
return [filter_none(v) if isinstance(v, (dict, list)) else v for v in obj]
|
|
62
|
+
|
|
63
|
+
raise ValueError(f"Expected dict or list, got {type(obj)}")
|
|
41
64
|
|
|
42
65
|
|
|
43
66
|
class TaskProviderHelper:
|
|
@@ -224,9 +247,12 @@ class BaseConversationalTask(TaskProviderHelper):
|
|
|
224
247
|
return "/v1/chat/completions"
|
|
225
248
|
|
|
226
249
|
def _prepare_payload_as_dict(
|
|
227
|
-
self,
|
|
250
|
+
self,
|
|
251
|
+
inputs: List[Union[Dict, ChatCompletionInputMessage]],
|
|
252
|
+
parameters: Dict,
|
|
253
|
+
provider_mapping_info: InferenceProviderMapping,
|
|
228
254
|
) -> Optional[Dict]:
|
|
229
|
-
return {"messages": inputs, **
|
|
255
|
+
return filter_none({"messages": inputs, **parameters, "model": provider_mapping_info.provider_id})
|
|
230
256
|
|
|
231
257
|
|
|
232
258
|
class BaseTextGenerationTask(TaskProviderHelper):
|
|
@@ -75,7 +75,7 @@ class HFInferenceBinaryInputTask(HFInferenceTask):
|
|
|
75
75
|
provider_mapping_info: InferenceProviderMapping,
|
|
76
76
|
extra_payload: Optional[Dict],
|
|
77
77
|
) -> Optional[bytes]:
|
|
78
|
-
parameters = filter_none(
|
|
78
|
+
parameters = filter_none(parameters)
|
|
79
79
|
extra_payload = extra_payload or {}
|
|
80
80
|
has_parameters = len(parameters) > 0 or len(extra_payload) > 0
|
|
81
81
|
|
|
@@ -194,6 +194,18 @@ class HFInferenceFeatureExtractionTask(HFInferenceTask):
|
|
|
194
194
|
def __init__(self):
|
|
195
195
|
super().__init__("feature-extraction")
|
|
196
196
|
|
|
197
|
+
def _prepare_payload_as_dict(
|
|
198
|
+
self, inputs: Any, parameters: Dict, provider_mapping_info: InferenceProviderMapping
|
|
199
|
+
) -> Optional[Dict]:
|
|
200
|
+
if isinstance(inputs, bytes):
|
|
201
|
+
raise ValueError(f"Unexpected binary input for task {self.task}.")
|
|
202
|
+
if isinstance(inputs, Path):
|
|
203
|
+
raise ValueError(f"Unexpected path input for task {self.task} (got {inputs})")
|
|
204
|
+
|
|
205
|
+
# Parameters are sent at root-level for feature-extraction task
|
|
206
|
+
# See specs: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/src/tasks/feature-extraction/spec/input.json
|
|
207
|
+
return {"inputs": inputs, **filter_none(parameters)}
|
|
208
|
+
|
|
197
209
|
def get_response(self, response: Union[bytes, Dict], request_params: Optional[RequestParameters] = None) -> Any:
|
|
198
210
|
if isinstance(response, bytes):
|
|
199
211
|
return _bytes_to_dict(response)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: huggingface-hub
|
|
3
|
-
Version: 0.33.
|
|
3
|
+
Version: 0.33.1
|
|
4
4
|
Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
|
|
5
5
|
Home-page: https://github.com/huggingface/huggingface_hub
|
|
6
6
|
Author: Hugging Face, Inc.
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
huggingface_hub/__init__.py,sha256=
|
|
1
|
+
huggingface_hub/__init__.py,sha256=sbPhQoWpwnOCDHF0vVlJeCZHNQMX_tTG3xUpHNMJ4Ds,50644
|
|
2
2
|
huggingface_hub/_commit_api.py,sha256=ZbmuIhFdF8B3F_cvGtxorka7MmIQOk8oBkCtYltnCvI,39456
|
|
3
3
|
huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
|
|
4
|
-
huggingface_hub/_inference_endpoints.py,sha256=
|
|
4
|
+
huggingface_hub/_inference_endpoints.py,sha256=ahmbPcEXsJ_JcMb9TDgdkD8Z2z9uytkFG3_1o6dTm8g,17598
|
|
5
5
|
huggingface_hub/_local_folder.py,sha256=9NkNGsyEfTtopfhXbicS2TFIcm9lAzLFqItzYy2h0D4,16915
|
|
6
6
|
huggingface_hub/_login.py,sha256=ssf4viT5BhHI2ZidnSuAZcrwSxzaLOrf8xgRVKuvu_A,20298
|
|
7
7
|
huggingface_hub/_oauth.py,sha256=YNbSSZCNZLiCqwMoYboSAfI3XjEsbyAADJcwgRAdhBc,18802
|
|
@@ -85,11 +85,11 @@ huggingface_hub/inference/_mcp/_cli_hacks.py,sha256=cMZirVFe4N0EM9Nzzs9aEmzUBUEB
|
|
|
85
85
|
huggingface_hub/inference/_mcp/agent.py,sha256=azX9_lsFjNlgsEvRYdKgsmOmpNReWIcbuMeIVWc852k,4264
|
|
86
86
|
huggingface_hub/inference/_mcp/cli.py,sha256=9IKItC1XJ4yzQAKP1iZwpYL1BA56bem2AQlKlB0SGdc,9251
|
|
87
87
|
huggingface_hub/inference/_mcp/constants.py,sha256=tE_V6qcvsmvVoJa4eg04jhoTR2Cx1cNHieY2ENrm1_M,2511
|
|
88
|
-
huggingface_hub/inference/_mcp/mcp_client.py,sha256=
|
|
88
|
+
huggingface_hub/inference/_mcp/mcp_client.py,sha256=ndaTcZZPbU1ZTNUeB9-WdaOx7bHD3lsrXnKxCeiwpUg,15788
|
|
89
89
|
huggingface_hub/inference/_mcp/types.py,sha256=JPK7rC9j-abot8pN3xw1UbSv9S2OBSRStjl_cidWs1Q,1247
|
|
90
90
|
huggingface_hub/inference/_mcp/utils.py,sha256=VsRWl0fuSZDS0zNT9n7FOMSlzA0UBbP8p8xWKWDt2Pc,4093
|
|
91
91
|
huggingface_hub/inference/_providers/__init__.py,sha256=rOaUL8zXKazYMgnPMDxEN7Y3nZwaKsA0gkILLWN1HLg,8116
|
|
92
|
-
huggingface_hub/inference/_providers/_common.py,sha256=
|
|
92
|
+
huggingface_hub/inference/_providers/_common.py,sha256=dlZMj20j_81klm-a1VdEbH5pPGjclIv1lLU2urh_Zzk,11448
|
|
93
93
|
huggingface_hub/inference/_providers/black_forest_labs.py,sha256=wO7qgRyNyrIKlZtvL3vJEbS4-D19kfoXZk6PDh1dTis,2842
|
|
94
94
|
huggingface_hub/inference/_providers/cerebras.py,sha256=QOJ-1U-os7uE7p6eUnn_P_APq-yQhx28be7c3Tq2EuA,210
|
|
95
95
|
huggingface_hub/inference/_providers/cohere.py,sha256=O3tC-qIUL91mx_mE8bOHCtDWcQuKOUauhUoXSUBUCZ8,1253
|
|
@@ -97,7 +97,7 @@ huggingface_hub/inference/_providers/fal_ai.py,sha256=gGWPsvQIsuk3kTIXHwpOqA0R1Z
|
|
|
97
97
|
huggingface_hub/inference/_providers/featherless_ai.py,sha256=QxBz-32O4PztxixrIjrfKuTOzvfqyUi-cVsw0Hf_zlY,1382
|
|
98
98
|
huggingface_hub/inference/_providers/fireworks_ai.py,sha256=Id226ITfPkOcFMFzly3MW9l-dZl9l4qizL4JEHWkBFk,1215
|
|
99
99
|
huggingface_hub/inference/_providers/groq.py,sha256=JTk2JV4ZOlaohho7zLAFQtk92kGVsPmLJ1hmzcwsqvQ,315
|
|
100
|
-
huggingface_hub/inference/_providers/hf_inference.py,sha256=
|
|
100
|
+
huggingface_hub/inference/_providers/hf_inference.py,sha256=PoHxjrQ9hs5KZ6iKp2SSum7uuoF_JoyurS4ymF_qhgI,9133
|
|
101
101
|
huggingface_hub/inference/_providers/hyperbolic.py,sha256=OQIBi2j3aNvuaSQ8BUK1K1PVeRXdrxc80G-6YmBa-ns,1985
|
|
102
102
|
huggingface_hub/inference/_providers/nebius.py,sha256=VJpTF2JZ58rznc9wxdk-57vwF8sV2vESw_WkXjXqCho,3580
|
|
103
103
|
huggingface_hub/inference/_providers/novita.py,sha256=HGVC8wPraRQUuI5uBoye1Y4Wqe4X116B71GhhbWy5yM,2514
|
|
@@ -141,9 +141,9 @@ huggingface_hub/utils/insecure_hashlib.py,sha256=iAaepavFZ5Dhfa5n8KozRfQprKmvcjS
|
|
|
141
141
|
huggingface_hub/utils/logging.py,sha256=0A8fF1yh3L9Ka_bCDX2ml4U5Ht0tY8Dr3JcbRvWFuwo,4909
|
|
142
142
|
huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
|
|
143
143
|
huggingface_hub/utils/tqdm.py,sha256=xAKcyfnNHsZ7L09WuEM5Ew5-MDhiahLACbbN2zMmcLs,10671
|
|
144
|
-
huggingface_hub-0.33.
|
|
145
|
-
huggingface_hub-0.33.
|
|
146
|
-
huggingface_hub-0.33.
|
|
147
|
-
huggingface_hub-0.33.
|
|
148
|
-
huggingface_hub-0.33.
|
|
149
|
-
huggingface_hub-0.33.
|
|
144
|
+
huggingface_hub-0.33.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
145
|
+
huggingface_hub-0.33.1.dist-info/METADATA,sha256=MpsYngFKssnSYsYlGEGlmkFrG6GORUcZi7kibsqL73M,14777
|
|
146
|
+
huggingface_hub-0.33.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
147
|
+
huggingface_hub-0.33.1.dist-info/entry_points.txt,sha256=uelw0-fu0kd-CxIuOsR1bsjLIFnAaMQ6AIqluJYDhQw,184
|
|
148
|
+
huggingface_hub-0.33.1.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
|
|
149
|
+
huggingface_hub-0.33.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|