huggingface-hub 0.35.1__py3-none-any.whl → 0.35.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.35.1"
49
+ __version__ = "0.35.3"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -386,6 +386,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
386
386
  refresh: bool = False,
387
387
  revision: Optional[str] = None,
388
388
  expand_info: bool = False,
389
+ maxdepth: Optional[int] = None,
389
390
  ):
390
391
  resolved_path = self.resolve_path(path, revision=revision)
391
392
  path = resolved_path.unresolve()
@@ -405,19 +406,25 @@ class HfFileSystem(fsspec.AbstractFileSystem):
405
406
  if recursive:
406
407
  # Use BFS to traverse the cache and build the "recursive "output
407
408
  # (The Hub uses a so-called "tree first" strategy for the tree endpoint but we sort the output to follow the spec so the result is (eventually) the same)
409
+ depth = 2
408
410
  dirs_to_visit = deque(
409
- [path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
411
+ [(depth, path_info) for path_info in cached_path_infos if path_info["type"] == "directory"]
410
412
  )
411
413
  while dirs_to_visit:
412
- dir_info = dirs_to_visit.popleft()
413
- if dir_info["name"] not in self.dircache:
414
- dirs_not_in_dircache.append(dir_info["name"])
415
- else:
416
- cached_path_infos = self.dircache[dir_info["name"]]
417
- out.extend(cached_path_infos)
418
- dirs_to_visit.extend(
419
- [path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
420
- )
414
+ depth, dir_info = dirs_to_visit.popleft()
415
+ if maxdepth is None or depth <= maxdepth:
416
+ if dir_info["name"] not in self.dircache:
417
+ dirs_not_in_dircache.append(dir_info["name"])
418
+ else:
419
+ cached_path_infos = self.dircache[dir_info["name"]]
420
+ out.extend(cached_path_infos)
421
+ dirs_to_visit.extend(
422
+ [
423
+ (depth + 1, path_info)
424
+ for path_info in cached_path_infos
425
+ if path_info["type"] == "directory"
426
+ ]
427
+ )
421
428
 
422
429
  dirs_not_expanded = []
423
430
  if expand_info:
@@ -436,6 +443,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
436
443
  or common_prefix in chain(dirs_not_in_dircache, dirs_not_expanded)
437
444
  else self._parent(common_prefix)
438
445
  )
446
+ if maxdepth is not None:
447
+ common_path_depth = common_path[len(path) :].count("/")
448
+ maxdepth -= common_path_depth
439
449
  out = [o for o in out if not o["name"].startswith(common_path + "/")]
440
450
  for cached_path in self.dircache:
441
451
  if cached_path.startswith(common_path + "/"):
@@ -448,6 +458,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
448
458
  refresh=True,
449
459
  revision=revision,
450
460
  expand_info=expand_info,
461
+ maxdepth=maxdepth,
451
462
  )
452
463
  )
453
464
  else:
@@ -460,9 +471,10 @@ class HfFileSystem(fsspec.AbstractFileSystem):
460
471
  repo_type=resolved_path.repo_type,
461
472
  )
462
473
  for path_info in tree:
474
+ cache_path = root_path + "/" + path_info.path
463
475
  if isinstance(path_info, RepoFile):
464
476
  cache_path_info = {
465
- "name": root_path + "/" + path_info.path,
477
+ "name": cache_path,
466
478
  "size": path_info.size,
467
479
  "type": "file",
468
480
  "blob_id": path_info.blob_id,
@@ -472,7 +484,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
472
484
  }
473
485
  else:
474
486
  cache_path_info = {
475
- "name": root_path + "/" + path_info.path,
487
+ "name": cache_path,
476
488
  "size": 0,
477
489
  "type": "directory",
478
490
  "tree_id": path_info.tree_id,
@@ -480,7 +492,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
480
492
  }
481
493
  parent_path = self._parent(cache_path_info["name"])
482
494
  self.dircache.setdefault(parent_path, []).append(cache_path_info)
483
- out.append(cache_path_info)
495
+ depth = cache_path[len(path) :].count("/")
496
+ if maxdepth is None or depth <= maxdepth:
497
+ out.append(cache_path_info)
484
498
  return out
485
499
 
486
500
  def walk(self, path: str, *args, **kwargs) -> Iterator[Tuple[str, List[str], List[str]]]:
@@ -547,19 +561,22 @@ class HfFileSystem(fsspec.AbstractFileSystem):
547
561
  Returns:
548
562
  `Union[List[str], Dict[str, Dict[str, Any]]]`: List of paths or dict of file information.
549
563
  """
550
- if maxdepth:
551
- return super().find(
552
- path, maxdepth=maxdepth, withdirs=withdirs, detail=detail, refresh=refresh, revision=revision, **kwargs
553
- )
564
+ if maxdepth is not None and maxdepth < 1:
565
+ raise ValueError("maxdepth must be at least 1")
554
566
  resolved_path = self.resolve_path(path, revision=revision)
555
567
  path = resolved_path.unresolve()
556
568
  try:
557
- out = self._ls_tree(path, recursive=True, refresh=refresh, revision=resolved_path.revision, **kwargs)
569
+ out = self._ls_tree(
570
+ path, recursive=True, refresh=refresh, revision=resolved_path.revision, maxdepth=maxdepth, **kwargs
571
+ )
558
572
  except EntryNotFoundError:
559
573
  # Path could be a file
560
- if self.info(path, revision=revision, **kwargs)["type"] == "file":
561
- out = {path: {}}
562
- else:
574
+ try:
575
+ if self.info(path, revision=revision, **kwargs)["type"] == "file":
576
+ out = {path: {}}
577
+ else:
578
+ out = {}
579
+ except FileNotFoundError:
563
580
  out = {}
564
581
  else:
565
582
  if not withdirs:
@@ -130,7 +130,7 @@ class InferenceClient:
130
130
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
131
131
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
132
132
  provider (`str`, *optional*):
133
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
133
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
134
134
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
135
135
  If model is a URL or `base_url` is passed, then `provider` is not used.
136
136
  token (`str`, *optional*):
@@ -1297,7 +1297,7 @@ class InferenceClient:
1297
1297
  The model to use for inference. Can be a model ID hosted on the Hugging Face Hub or a URL to a deployed
1298
1298
  Inference Endpoint. This parameter overrides the model defined at the instance level. Defaults to None.
1299
1299
  target_size (`ImageToImageTargetSize`, *optional*):
1300
- The size in pixel of the output image.
1300
+ The size in pixel of the output image. This parameter is only supported by some providers and for specific models. It will be ignored when unsupported.
1301
1301
 
1302
1302
  Returns:
1303
1303
  `Image`: The translated image.
@@ -118,7 +118,7 @@ class AsyncInferenceClient:
118
118
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
119
119
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
120
120
  provider (`str`, *optional*):
121
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
121
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
122
122
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
123
123
  If model is a URL or `base_url` is passed, then `provider` is not used.
124
124
  token (`str`, *optional*):
@@ -1342,7 +1342,7 @@ class AsyncInferenceClient:
1342
1342
  The model to use for inference. Can be a model ID hosted on the Hugging Face Hub or a URL to a deployed
1343
1343
  Inference Endpoint. This parameter overrides the model defined at the instance level. Defaults to None.
1344
1344
  target_size (`ImageToImageTargetSize`, *optional*):
1345
- The size in pixel of the output image.
1345
+ The size in pixel of the output image. This parameter is only supported by some providers and for specific models. It will be ignored when unsupported.
1346
1346
 
1347
1347
  Returns:
1348
1348
  `Image`: The translated image.
@@ -33,7 +33,7 @@ class ImageToImageParameters(BaseInferenceType):
33
33
  prompt: Optional[str] = None
34
34
  """The text prompt to guide the image generation."""
35
35
  target_size: Optional[ImageToImageTargetSize] = None
36
- """The size in pixel of the output image."""
36
+ """The size in pixel of the output image. This parameter is only supported by some providers and for specific models. It will be ignored when unsupported."""
37
37
 
38
38
 
39
39
  @dataclass_with_extra
@@ -158,7 +158,7 @@ class MCPClient:
158
158
  from mcp import types as mcp_types
159
159
 
160
160
  # Extract allowed_tools configuration if provided
161
- allowed_tools = params.pop("allowed_tools", [])
161
+ allowed_tools = params.pop("allowed_tools", None)
162
162
 
163
163
  # Determine server type and create appropriate parameters
164
164
  if type == "stdio":
@@ -218,9 +218,10 @@ class MCPClient:
218
218
  logger.debug("Connected to server with tools:", [tool.name for tool in response.tools])
219
219
 
220
220
  # Filter tools based on allowed_tools configuration
221
- filtered_tools = [tool for tool in response.tools if tool.name in allowed_tools]
221
+ filtered_tools = response.tools
222
222
 
223
- if allowed_tools:
223
+ if allowed_tools is not None:
224
+ filtered_tools = [tool for tool in response.tools if tool.name in allowed_tools]
224
225
  logger.debug(
225
226
  f"Tool filtering applied. Using {len(filtered_tools)} of {len(response.tools)} available tools: {[tool.name for tool in filtered_tools]}"
226
227
  )
@@ -41,6 +41,7 @@ from .replicate import ReplicateImageToImageTask, ReplicateTask, ReplicateTextTo
41
41
  from .sambanova import SambanovaConversationalTask, SambanovaFeatureExtractionTask
42
42
  from .scaleway import ScalewayConversationalTask, ScalewayFeatureExtractionTask
43
43
  from .together import TogetherConversationalTask, TogetherTextGenerationTask, TogetherTextToImageTask
44
+ from .zai_org import ZaiConversationalTask
44
45
 
45
46
 
46
47
  logger = logging.get_logger(__name__)
@@ -65,6 +66,7 @@ PROVIDER_T = Literal[
65
66
  "sambanova",
66
67
  "scaleway",
67
68
  "together",
69
+ "zai-org",
68
70
  ]
69
71
 
70
72
  PROVIDER_OR_POLICY_T = Union[PROVIDER_T, Literal["auto"]]
@@ -170,6 +172,9 @@ PROVIDERS: Dict[PROVIDER_T, Dict[str, TaskProviderHelper]] = {
170
172
  "conversational": TogetherConversationalTask(),
171
173
  "text-generation": TogetherTextGenerationTask(),
172
174
  },
175
+ "zai-org": {
176
+ "conversational": ZaiConversationalTask(),
177
+ },
173
178
  }
174
179
 
175
180
 
@@ -35,6 +35,7 @@ HARDCODED_MODEL_INFERENCE_MAPPING: Dict[str, Dict[str, InferenceProviderMapping]
35
35
  "sambanova": {},
36
36
  "scaleway": {},
37
37
  "together": {},
38
+ "zai-org": {},
38
39
  }
39
40
 
40
41
 
@@ -191,6 +191,8 @@ class FalAIImageToImageTask(FalAIQueueTask):
191
191
  self, inputs: Any, parameters: Dict, provider_mapping_info: InferenceProviderMapping
192
192
  ) -> Optional[Dict]:
193
193
  image_url = _as_url(inputs, default_mime_type="image/jpeg")
194
+ if "target_size" in parameters:
195
+ parameters["image_size"] = parameters.pop("target_size")
194
196
  payload: Dict[str, Any] = {
195
197
  "image_url": image_url,
196
198
  **filter_none(parameters),
@@ -0,0 +1,17 @@
1
+ from typing import Any, Dict
2
+
3
+ from huggingface_hub.inference._providers._common import BaseConversationalTask
4
+
5
+
6
+ class ZaiConversationalTask(BaseConversationalTask):
7
+ def __init__(self):
8
+ super().__init__(provider="zai-org", base_url="https://api.z.ai")
9
+
10
+ def _prepare_headers(self, headers: Dict, api_key: str) -> Dict[str, Any]:
11
+ headers = super()._prepare_headers(headers, api_key)
12
+ headers["Accept-Language"] = "en-US,en"
13
+ headers["x-source-channel"] = "hugging_face"
14
+ return headers
15
+
16
+ def _prepare_route(self, mapped_model: str, api_key: str) -> str:
17
+ return "/api/paas/v4/chat/completions"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huggingface-hub
3
- Version: 0.35.1
3
+ Version: 0.35.3
4
4
  Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
5
5
  Home-page: https://github.com/huggingface/huggingface_hub
6
6
  Author: Hugging Face, Inc.
@@ -1,4 +1,4 @@
1
- huggingface_hub/__init__.py,sha256=WwTfh8zb0XfDNzSuZD_Zb2C-UHx2jEidwqNNmB3QwcY,52476
1
+ huggingface_hub/__init__.py,sha256=4gQg-gGWyHu_9bLlxIg8jDYQzBz4B4IogNd2Sc64rtw,52476
2
2
  huggingface_hub/_commit_api.py,sha256=68HxFnJE2s-QmGZRHQav5kOMTseYV_ZQi04ADaQmZUk,38979
3
3
  huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
4
4
  huggingface_hub/_inference_endpoints.py,sha256=ahmbPcEXsJ_JcMb9TDgdkD8Z2z9uytkFG3_1o6dTm8g,17598
@@ -19,7 +19,7 @@ huggingface_hub/errors.py,sha256=D7Lw0Jjrf8vfmD0B26LEvg-JWkU8Zq0KDPJOzFY4QLw,112
19
19
  huggingface_hub/fastai_utils.py,sha256=DpeH9d-6ut2k_nCAAwglM51XmRmgfbRe2SPifpVL5Yk,16745
20
20
  huggingface_hub/file_download.py,sha256=ohaAp9WR4PghMew2KEFlnRVhrvCiBYxs5I5wKOSw0e0,78926
21
21
  huggingface_hub/hf_api.py,sha256=Y0rA53vl0pz8SvRMBDKGuaM3ehUVfyCAa9m5ByNE830,483625
22
- huggingface_hub/hf_file_system.py,sha256=h-ZN-JGtdXfZiQXaNQWEWIkFtKtSKHdndIp_Lh72JdA,46798
22
+ huggingface_hub/hf_file_system.py,sha256=97XNsFaLv2WpLzFRghTl0SPRbySSljnxQuF_Pl4jvz0,47545
23
23
  huggingface_hub/hub_mixin.py,sha256=Ii3w9o7XgGbj6UNPnieW5IDfaCd8OEKpIH1hRkncRDQ,38208
24
24
  huggingface_hub/inference_api.py,sha256=b4-NhPSn9b44nYKV8tDKXodmE4JVdEymMWL4CVGkzlE,8323
25
25
  huggingface_hub/keras_mixin.py,sha256=WGNQZROdw6yjJ1DGTPZPwKAxf1UbkzAx1dRidkeb2fk,19553
@@ -57,10 +57,10 @@ huggingface_hub/commands/upload_large_folder.py,sha256=_1id84BFtbL8HgFRKZ-el_uPr
57
57
  huggingface_hub/commands/user.py,sha256=dDpi0mLYvTeYf0fhPVQyEJsn7Wrk6gWvR5YHC6RgebU,7516
58
58
  huggingface_hub/commands/version.py,sha256=rGpCbvxImY9eQqXrshYt609Iws27R75WARmKQrIo6Ok,1390
59
59
  huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- huggingface_hub/inference/_client.py,sha256=DzuCZ4-hwTH68J70Irq9IdshCAsfdtHaGg69ZfDzsm0,157486
60
+ huggingface_hub/inference/_client.py,sha256=g2qnetdPztZZk_LslcyHvSZ-zcfo677ee5S7-uWKemk,157612
61
61
  huggingface_hub/inference/_common.py,sha256=dI3OPg0320OOB0FRy_kqftW9F3ghEnBVA5Gi4VaSctg,15778
62
62
  huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
- huggingface_hub/inference/_generated/_async_client.py,sha256=kqYLXuhvihMT5oyrTQdLZxNH8dxVQ_Nz6YNsHufAGJM,163395
63
+ huggingface_hub/inference/_generated/_async_client.py,sha256=6faC6FQeRP3fXawOcNvkG31_py_ehVSX2H7oHBqpi_0,163521
64
64
  huggingface_hub/inference/_generated/types/__init__.py,sha256=9WvrGQ8aThtKSNzZF06j-CIE2ZuItne8FFnea1p1u38,6557
65
65
  huggingface_hub/inference/_generated/types/audio_classification.py,sha256=Jg3mzfGhCSH6CfvVvgJSiFpkz6v4nNA0G4LJXacEgNc,1573
66
66
  huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=2Ep4WkePL7oJwcp5nRJqApwviumGHbft9HhXE9XLHj4,891
@@ -73,7 +73,7 @@ huggingface_hub/inference/_generated/types/feature_extraction.py,sha256=NMWVL_TL
73
73
  huggingface_hub/inference/_generated/types/fill_mask.py,sha256=OrTgQ7Ndn0_dWK5thQhZwTOHbQni8j0iJcx9llyhRds,1708
74
74
  huggingface_hub/inference/_generated/types/image_classification.py,sha256=A-Y024o8723_n8mGVos4TwdAkVL62McGeL1iIo4VzNs,1585
75
75
  huggingface_hub/inference/_generated/types/image_segmentation.py,sha256=vrkI4SuP1Iq_iLXc-2pQhYY3SHN4gzvFBoZqbUHxU7o,1950
76
- huggingface_hub/inference/_generated/types/image_to_image.py,sha256=HPz1uKXk_9xvgNUi3GV6n4lw-J3G6cdGTcW3Ou_N0l8,2044
76
+ huggingface_hub/inference/_generated/types/image_to_image.py,sha256=gGEUh9xAsyZOcQG82vZepbJAeMZ-7NxvcnE9mcX3AO0,2157
77
77
  huggingface_hub/inference/_generated/types/image_to_text.py,sha256=OaFEBAfgT-fOVzJ7xVermGf7VODhrc9-Jg38WrM7-2o,4810
78
78
  huggingface_hub/inference/_generated/types/image_to_video.py,sha256=bC-L_cNsDhk4s_IdSiprJ9d1NeMGePLcUp7UPpco21w,2240
79
79
  huggingface_hub/inference/_generated/types/object_detection.py,sha256=VuFlb1281qTXoSgJDmquGz-VNfEZLo2H0Rh_F6MF6ts,2000
@@ -100,15 +100,15 @@ huggingface_hub/inference/_mcp/_cli_hacks.py,sha256=KX9HZJPa1p8ngY3mtYGGlVUXfg4v
100
100
  huggingface_hub/inference/_mcp/agent.py,sha256=VahvSqldiC1R72CFH4T05l80uEXl5OjLwboWQFUJbsw,4281
101
101
  huggingface_hub/inference/_mcp/cli.py,sha256=AmSUT6wXlE6EWmI0SfQgTWYnL07322zGwwk2yMZZlBc,9640
102
102
  huggingface_hub/inference/_mcp/constants.py,sha256=kldRfaidXMdyMl_jLosaQomgWDv4shvnFe3dnQNwXSU,2511
103
- huggingface_hub/inference/_mcp/mcp_client.py,sha256=iKGOY6fD0cph8wp9rXlofJglDLcfXGxedsJ3GLV1z-g,16745
103
+ huggingface_hub/inference/_mcp/mcp_client.py,sha256=Cz4x8z3D8xfY5N7EIqrYzjn_Vd6ta_22vWe936UqKbM,16803
104
104
  huggingface_hub/inference/_mcp/types.py,sha256=3gq-P_mrmvPI6KWBqjCxavtMPiGz10YXog7wg4oJYAo,941
105
105
  huggingface_hub/inference/_mcp/utils.py,sha256=KFsGOC8dytS3VgaugBzibdteWasZ9CAnp83U2SyIlMw,4188
106
- huggingface_hub/inference/_providers/__init__.py,sha256=0tJIYaeg1vO0aiIsddvknZsB0Af_0AaN_qYtdk9hGSw,8740
107
- huggingface_hub/inference/_providers/_common.py,sha256=JwvxrQYjsRwYkD8aOCrxPXSv7Lmtv4Xfn0QjQbQdAF0,12334
106
+ huggingface_hub/inference/_providers/__init__.py,sha256=m-vZjvrWpiERWdeY_6kc56swXXFTYD0UX3j7M_PBBNk,8873
107
+ huggingface_hub/inference/_providers/_common.py,sha256=PIeQ3nKatRN9IrqJSWPcm8jGdc5AcHuYenNlx50WCGA,12353
108
108
  huggingface_hub/inference/_providers/black_forest_labs.py,sha256=FIukZoIFt_FDrTTDfpF-Vko5sXnmH0QvVIsMtV2Jzm8,2852
109
109
  huggingface_hub/inference/_providers/cerebras.py,sha256=QOJ-1U-os7uE7p6eUnn_P_APq-yQhx28be7c3Tq2EuA,210
110
110
  huggingface_hub/inference/_providers/cohere.py,sha256=O3tC-qIUL91mx_mE8bOHCtDWcQuKOUauhUoXSUBUCZ8,1253
111
- huggingface_hub/inference/_providers/fal_ai.py,sha256=CsMCHQ46tVU3ukrZ4qQXK2275HqjEMieCBtifC370eU,9880
111
+ huggingface_hub/inference/_providers/fal_ai.py,sha256=pCr5qP6R1W1CrEw-_nKdNuP3UqsUi58yL18w4r7mXRo,9989
112
112
  huggingface_hub/inference/_providers/featherless_ai.py,sha256=QxBz-32O4PztxixrIjrfKuTOzvfqyUi-cVsw0Hf_zlY,1382
113
113
  huggingface_hub/inference/_providers/fireworks_ai.py,sha256=Id226ITfPkOcFMFzly3MW9l-dZl9l4qizL4JEHWkBFk,1215
114
114
  huggingface_hub/inference/_providers/groq.py,sha256=JTk2JV4ZOlaohho7zLAFQtk92kGVsPmLJ1hmzcwsqvQ,315
@@ -123,6 +123,7 @@ huggingface_hub/inference/_providers/replicate.py,sha256=otVfPkfBtlWrpjQub4V__t7
123
123
  huggingface_hub/inference/_providers/sambanova.py,sha256=Unt3H3jr_kgI9vzRjmmW1DFyoEuPkKCcgIIloiOj3j8,2037
124
124
  huggingface_hub/inference/_providers/scaleway.py,sha256=Jy81kXWbXCHBpx6xmyzdEfXGSyhUfjKOLHuDSvhHWGo,1209
125
125
  huggingface_hub/inference/_providers/together.py,sha256=KHF19CS3qXS7G1-CwcMiD8Z5wzPKEKi4F2DzqAthbBE,3439
126
+ huggingface_hub/inference/_providers/zai_org.py,sha256=plGzMZuLrChZvgpS3CCPqI6ImotZZxNLgfxnR7v6tw8,646
126
127
  huggingface_hub/serialization/__init__.py,sha256=kn-Fa-m4FzMnN8lNsF-SwFcfzug4CucexybGKyvZ8S0,1041
127
128
  huggingface_hub/serialization/_base.py,sha256=Df3GwGR9NzeK_SD75prXLucJAzPiNPgHbgXSw-_LTk8,8126
128
129
  huggingface_hub/serialization/_dduf.py,sha256=s42239rLiHwaJE36QDEmS5GH7DSmQ__BffiHJO5RjIg,15424
@@ -160,9 +161,9 @@ huggingface_hub/utils/insecure_hashlib.py,sha256=iAaepavFZ5Dhfa5n8KozRfQprKmvcjS
160
161
  huggingface_hub/utils/logging.py,sha256=0A8fF1yh3L9Ka_bCDX2ml4U5Ht0tY8Dr3JcbRvWFuwo,4909
161
162
  huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
162
163
  huggingface_hub/utils/tqdm.py,sha256=xAKcyfnNHsZ7L09WuEM5Ew5-MDhiahLACbbN2zMmcLs,10671
163
- huggingface_hub-0.35.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
164
- huggingface_hub-0.35.1.dist-info/METADATA,sha256=DIJWsqgWnRM2pgGuMUEjfzUS6xBnEasjwIvUakgD-gE,14820
165
- huggingface_hub-0.35.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
166
- huggingface_hub-0.35.1.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
167
- huggingface_hub-0.35.1.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
168
- huggingface_hub-0.35.1.dist-info/RECORD,,
164
+ huggingface_hub-0.35.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
165
+ huggingface_hub-0.35.3.dist-info/METADATA,sha256=oSPd1weVzFDQEIIV1d7coTtN9uymtjyVlGeftVVqs24,14820
166
+ huggingface_hub-0.35.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
167
+ huggingface_hub-0.35.3.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
168
+ huggingface_hub-0.35.3.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
169
+ huggingface_hub-0.35.3.dist-info/RECORD,,