huggingface-hub 0.35.0rc1__py3-none-any.whl → 0.35.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.35.0.rc1"
49
+ __version__ = "0.35.2"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -5,6 +5,7 @@ from typing import (
5
5
  Any,
6
6
  Callable,
7
7
  Dict,
8
+ ForwardRef,
8
9
  List,
9
10
  Literal,
10
11
  Optional,
@@ -325,6 +326,8 @@ def type_validator(name: str, value: Any, expected_type: Any) -> None:
325
326
  validator(name, value, args)
326
327
  elif isinstance(expected_type, type): # simple types
327
328
  _validate_simple_type(name, value, expected_type)
329
+ elif isinstance(expected_type, ForwardRef) or isinstance(expected_type, str):
330
+ return
328
331
  else:
329
332
  raise TypeError(f"Unsupported type for field '{name}': {expected_type}")
330
333
 
@@ -267,7 +267,7 @@ def _request_wrapper(
267
267
  """Wrapper around requests methods to follow relative redirects if `follow_relative_redirects=True` even when
268
268
  `allow_redirection=False`.
269
269
 
270
- A backoff mechanism retries the HTTP call on 429, 503 and 504 errors.
270
+ A backoff mechanism retries the HTTP call on 5xx errors and network errors.
271
271
 
272
272
  Args:
273
273
  method (`str`):
@@ -306,7 +306,7 @@ def _request_wrapper(
306
306
  return response
307
307
 
308
308
  # Perform request and return if status_code is not in the retry list.
309
- response = http_backoff(method=method, url=url, **params, retry_on_exceptions=(), retry_on_status_codes=(429,))
309
+ response = http_backoff(method=method, url=url, **params)
310
310
  hf_raise_for_status(response)
311
311
  return response
312
312
 
@@ -386,6 +386,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
386
386
  refresh: bool = False,
387
387
  revision: Optional[str] = None,
388
388
  expand_info: bool = False,
389
+ maxdepth: Optional[int] = None,
389
390
  ):
390
391
  resolved_path = self.resolve_path(path, revision=revision)
391
392
  path = resolved_path.unresolve()
@@ -405,19 +406,25 @@ class HfFileSystem(fsspec.AbstractFileSystem):
405
406
  if recursive:
406
407
  # Use BFS to traverse the cache and build the "recursive "output
407
408
  # (The Hub uses a so-called "tree first" strategy for the tree endpoint but we sort the output to follow the spec so the result is (eventually) the same)
409
+ depth = 2
408
410
  dirs_to_visit = deque(
409
- [path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
411
+ [(depth, path_info) for path_info in cached_path_infos if path_info["type"] == "directory"]
410
412
  )
411
413
  while dirs_to_visit:
412
- dir_info = dirs_to_visit.popleft()
413
- if dir_info["name"] not in self.dircache:
414
- dirs_not_in_dircache.append(dir_info["name"])
415
- else:
416
- cached_path_infos = self.dircache[dir_info["name"]]
417
- out.extend(cached_path_infos)
418
- dirs_to_visit.extend(
419
- [path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
420
- )
414
+ depth, dir_info = dirs_to_visit.popleft()
415
+ if maxdepth is None or depth <= maxdepth:
416
+ if dir_info["name"] not in self.dircache:
417
+ dirs_not_in_dircache.append(dir_info["name"])
418
+ else:
419
+ cached_path_infos = self.dircache[dir_info["name"]]
420
+ out.extend(cached_path_infos)
421
+ dirs_to_visit.extend(
422
+ [
423
+ (depth + 1, path_info)
424
+ for path_info in cached_path_infos
425
+ if path_info["type"] == "directory"
426
+ ]
427
+ )
421
428
 
422
429
  dirs_not_expanded = []
423
430
  if expand_info:
@@ -436,6 +443,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
436
443
  or common_prefix in chain(dirs_not_in_dircache, dirs_not_expanded)
437
444
  else self._parent(common_prefix)
438
445
  )
446
+ if maxdepth is not None:
447
+ common_path_depth = common_path[len(path) :].count("/")
448
+ maxdepth -= common_path_depth
439
449
  out = [o for o in out if not o["name"].startswith(common_path + "/")]
440
450
  for cached_path in self.dircache:
441
451
  if cached_path.startswith(common_path + "/"):
@@ -448,6 +458,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
448
458
  refresh=True,
449
459
  revision=revision,
450
460
  expand_info=expand_info,
461
+ maxdepth=maxdepth,
451
462
  )
452
463
  )
453
464
  else:
@@ -460,9 +471,10 @@ class HfFileSystem(fsspec.AbstractFileSystem):
460
471
  repo_type=resolved_path.repo_type,
461
472
  )
462
473
  for path_info in tree:
474
+ cache_path = root_path + "/" + path_info.path
463
475
  if isinstance(path_info, RepoFile):
464
476
  cache_path_info = {
465
- "name": root_path + "/" + path_info.path,
477
+ "name": cache_path,
466
478
  "size": path_info.size,
467
479
  "type": "file",
468
480
  "blob_id": path_info.blob_id,
@@ -472,7 +484,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
472
484
  }
473
485
  else:
474
486
  cache_path_info = {
475
- "name": root_path + "/" + path_info.path,
487
+ "name": cache_path,
476
488
  "size": 0,
477
489
  "type": "directory",
478
490
  "tree_id": path_info.tree_id,
@@ -480,7 +492,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
480
492
  }
481
493
  parent_path = self._parent(cache_path_info["name"])
482
494
  self.dircache.setdefault(parent_path, []).append(cache_path_info)
483
- out.append(cache_path_info)
495
+ depth = cache_path[len(path) :].count("/")
496
+ if maxdepth is None or depth <= maxdepth:
497
+ out.append(cache_path_info)
484
498
  return out
485
499
 
486
500
  def walk(self, path: str, *args, **kwargs) -> Iterator[Tuple[str, List[str], List[str]]]:
@@ -547,19 +561,22 @@ class HfFileSystem(fsspec.AbstractFileSystem):
547
561
  Returns:
548
562
  `Union[List[str], Dict[str, Dict[str, Any]]]`: List of paths or dict of file information.
549
563
  """
550
- if maxdepth:
551
- return super().find(
552
- path, maxdepth=maxdepth, withdirs=withdirs, detail=detail, refresh=refresh, revision=revision, **kwargs
553
- )
564
+ if maxdepth is not None and maxdepth < 1:
565
+ raise ValueError("maxdepth must be at least 1")
554
566
  resolved_path = self.resolve_path(path, revision=revision)
555
567
  path = resolved_path.unresolve()
556
568
  try:
557
- out = self._ls_tree(path, recursive=True, refresh=refresh, revision=resolved_path.revision, **kwargs)
569
+ out = self._ls_tree(
570
+ path, recursive=True, refresh=refresh, revision=resolved_path.revision, maxdepth=maxdepth, **kwargs
571
+ )
558
572
  except EntryNotFoundError:
559
573
  # Path could be a file
560
- if self.info(path, revision=revision, **kwargs)["type"] == "file":
561
- out = {path: {}}
562
- else:
574
+ try:
575
+ if self.info(path, revision=revision, **kwargs)["type"] == "file":
576
+ out = {path: {}}
577
+ else:
578
+ out = {}
579
+ except FileNotFoundError:
563
580
  out = {}
564
581
  else:
565
582
  if not withdirs:
@@ -958,13 +975,7 @@ class HfFileSystemFile(fsspec.spec.AbstractBufferedFile):
958
975
  repo_type=self.resolved_path.repo_type,
959
976
  endpoint=self.fs.endpoint,
960
977
  )
961
- r = http_backoff(
962
- "GET",
963
- url,
964
- headers=headers,
965
- retry_on_status_codes=(500, 502, 503, 504),
966
- timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
967
- )
978
+ r = http_backoff("GET", url, headers=headers, timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT)
968
979
  hf_raise_for_status(r)
969
980
  return r.content
970
981
 
@@ -1063,7 +1074,6 @@ class HfFileSystemStreamFile(fsspec.spec.AbstractBufferedFile):
1063
1074
  "GET",
1064
1075
  url,
1065
1076
  headers=self.fs._api._build_hf_headers(),
1066
- retry_on_status_codes=(500, 502, 503, 504),
1067
1077
  stream=True,
1068
1078
  timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
1069
1079
  )
@@ -1086,7 +1096,6 @@ class HfFileSystemStreamFile(fsspec.spec.AbstractBufferedFile):
1086
1096
  "GET",
1087
1097
  url,
1088
1098
  headers={"Range": "bytes=%d-" % self.loc, **self.fs._api._build_hf_headers()},
1089
- retry_on_status_codes=(500, 502, 503, 504),
1090
1099
  stream=True,
1091
1100
  timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
1092
1101
  )
@@ -130,7 +130,7 @@ class InferenceClient:
130
130
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
131
131
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
132
132
  provider (`str`, *optional*):
133
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
133
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
134
134
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
135
135
  If model is a URL or `base_url` is passed, then `provider` is not used.
136
136
  token (`str`, *optional*):
@@ -118,7 +118,7 @@ class AsyncInferenceClient:
118
118
  Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
119
119
  arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
120
120
  provider (`str`, *optional*):
121
- Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"together"`.
121
+ Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
122
122
  Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
123
123
  If model is a URL or `base_url` is passed, then `provider` is not used.
124
124
  token (`str`, *optional*):
@@ -41,6 +41,7 @@ from .replicate import ReplicateImageToImageTask, ReplicateTask, ReplicateTextTo
41
41
  from .sambanova import SambanovaConversationalTask, SambanovaFeatureExtractionTask
42
42
  from .scaleway import ScalewayConversationalTask, ScalewayFeatureExtractionTask
43
43
  from .together import TogetherConversationalTask, TogetherTextGenerationTask, TogetherTextToImageTask
44
+ from .zai_org import ZaiConversationalTask
44
45
 
45
46
 
46
47
  logger = logging.get_logger(__name__)
@@ -65,6 +66,7 @@ PROVIDER_T = Literal[
65
66
  "sambanova",
66
67
  "scaleway",
67
68
  "together",
69
+ "zai-org",
68
70
  ]
69
71
 
70
72
  PROVIDER_OR_POLICY_T = Union[PROVIDER_T, Literal["auto"]]
@@ -170,6 +172,9 @@ PROVIDERS: Dict[PROVIDER_T, Dict[str, TaskProviderHelper]] = {
170
172
  "conversational": TogetherConversationalTask(),
171
173
  "text-generation": TogetherTextGenerationTask(),
172
174
  },
175
+ "zai-org": {
176
+ "conversational": ZaiConversationalTask(),
177
+ },
173
178
  }
174
179
 
175
180
 
@@ -35,6 +35,7 @@ HARDCODED_MODEL_INFERENCE_MAPPING: Dict[str, Dict[str, InferenceProviderMapping]
35
35
  "sambanova": {},
36
36
  "scaleway": {},
37
37
  "together": {},
38
+ "zai-org": {},
38
39
  }
39
40
 
40
41
 
@@ -0,0 +1,17 @@
1
+ from typing import Any, Dict
2
+
3
+ from huggingface_hub.inference._providers._common import BaseConversationalTask
4
+
5
+
6
+ class ZaiConversationalTask(BaseConversationalTask):
7
+ def __init__(self):
8
+ super().__init__(provider="zai-org", base_url="https://api.z.ai")
9
+
10
+ def _prepare_headers(self, headers: Dict, api_key: str) -> Dict[str, Any]:
11
+ headers = super()._prepare_headers(headers, api_key)
12
+ headers["Accept-Language"] = "en-US,en"
13
+ headers["x-source-channel"] = "hugging_face"
14
+ return headers
15
+
16
+ def _prepare_route(self, mapped_model: str, api_key: str) -> str:
17
+ return "/api/paas/v4/chat/completions"
huggingface_hub/lfs.py CHANGED
@@ -316,7 +316,7 @@ def _upload_single_part(operation: "CommitOperationAdd", upload_url: str) -> Non
316
316
  """
317
317
  with operation.as_file(with_tqdm=True) as fileobj:
318
318
  # S3 might raise a transient 500 error -> let's retry if that happens
319
- response = http_backoff("PUT", upload_url, data=fileobj, retry_on_status_codes=(500, 502, 503, 504))
319
+ response = http_backoff("PUT", upload_url, data=fileobj)
320
320
  hf_raise_for_status(response)
321
321
 
322
322
 
@@ -400,9 +400,7 @@ def _upload_parts_iteratively(
400
400
  read_limit=chunk_size,
401
401
  ) as fileobj_slice:
402
402
  # S3 might raise a transient 500 error -> let's retry if that happens
403
- part_upload_res = http_backoff(
404
- "PUT", part_upload_url, data=fileobj_slice, retry_on_status_codes=(500, 502, 503, 504)
405
- )
403
+ part_upload_res = http_backoff("PUT", part_upload_url, data=fileobj_slice)
406
404
  hf_raise_for_status(part_upload_res)
407
405
  headers.append(part_upload_res.headers)
408
406
  return headers # type: ignore
@@ -21,7 +21,6 @@ import threading
21
21
  import time
22
22
  import uuid
23
23
  from functools import lru_cache
24
- from http import HTTPStatus
25
24
  from shlex import quote
26
25
  from typing import Any, Callable, List, Optional, Tuple, Type, Union
27
26
 
@@ -221,7 +220,7 @@ def http_backoff(
221
220
  requests.Timeout,
222
221
  requests.ConnectionError,
223
222
  ),
224
- retry_on_status_codes: Union[int, Tuple[int, ...]] = HTTPStatus.SERVICE_UNAVAILABLE,
223
+ retry_on_status_codes: Union[int, Tuple[int, ...]] = (500, 502, 503, 504),
225
224
  **kwargs,
226
225
  ) -> Response:
227
226
  """Wrapper around requests to retry calls on an endpoint, with exponential backoff.
@@ -250,9 +249,8 @@ def http_backoff(
250
249
  retry_on_exceptions (`Type[Exception]` or `Tuple[Type[Exception]]`, *optional*):
251
250
  Define which exceptions must be caught to retry the request. Can be a single type or a tuple of types.
252
251
  By default, retry on `requests.Timeout` and `requests.ConnectionError`.
253
- retry_on_status_codes (`int` or `Tuple[int]`, *optional*, defaults to `503`):
254
- Define on which status codes the request must be retried. By default, only
255
- HTTP 503 Service Unavailable is retried.
252
+ retry_on_status_codes (`int` or `Tuple[int]`, *optional*, defaults to `(500, 502, 503, 504)`):
253
+ Define on which status codes the request must be retried. By default, 5xx errors are retried.
256
254
  **kwargs (`dict`, *optional*):
257
255
  kwargs to pass to `requests.request`.
258
256
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huggingface-hub
3
- Version: 0.35.0rc1
3
+ Version: 0.35.2
4
4
  Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
5
5
  Home-page: https://github.com/huggingface/huggingface_hub
6
6
  Author: Hugging Face, Inc.
@@ -1,4 +1,4 @@
1
- huggingface_hub/__init__.py,sha256=innBAxbG5iNWznViI29JNsPWGhBpFAeZYcqNEUggjJ8,52480
1
+ huggingface_hub/__init__.py,sha256=L_aakAoySW3PJ5_qZ50aF2g3tT-W169OdAUzzSisK0E,52476
2
2
  huggingface_hub/_commit_api.py,sha256=68HxFnJE2s-QmGZRHQav5kOMTseYV_ZQi04ADaQmZUk,38979
3
3
  huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
4
4
  huggingface_hub/_inference_endpoints.py,sha256=ahmbPcEXsJ_JcMb9TDgdkD8Z2z9uytkFG3_1o6dTm8g,17598
@@ -14,16 +14,16 @@ huggingface_hub/_webhooks_payload.py,sha256=Xm3KaK7tCOGBlXkuZvbym6zjHXrT1XCrbUFW
14
14
  huggingface_hub/_webhooks_server.py,sha256=5J63wk9MUGKBNJVsOD9i60mJ-VMp0YYmlf87vQsl-L8,15767
15
15
  huggingface_hub/community.py,sha256=4MtcoxEI9_0lmmilBEnvUEi8_O1Ivfa8p6eKxYU5-ts,12198
16
16
  huggingface_hub/constants.py,sha256=nILseAp4rqLu_KQTZDpPGOhepVAPanD7azbomAvovj0,10313
17
- huggingface_hub/dataclasses.py,sha256=sgPdEi2UDprhNPP2PPkiSlzsHdC1WcpwVTLwlHAEcr0,17224
17
+ huggingface_hub/dataclasses.py,sha256=rjQfuX9MeTXZQrCQC8JvkjpARDehOiSluE7Kz1L7Ueg,17337
18
18
  huggingface_hub/errors.py,sha256=D7Lw0Jjrf8vfmD0B26LEvg-JWkU8Zq0KDPJOzFY4QLw,11201
19
19
  huggingface_hub/fastai_utils.py,sha256=DpeH9d-6ut2k_nCAAwglM51XmRmgfbRe2SPifpVL5Yk,16745
20
- huggingface_hub/file_download.py,sha256=E-NWON01pprbAsw7Kz477JX6f8HTWsdpEdQAtA37t5c,78974
20
+ huggingface_hub/file_download.py,sha256=ohaAp9WR4PghMew2KEFlnRVhrvCiBYxs5I5wKOSw0e0,78926
21
21
  huggingface_hub/hf_api.py,sha256=Y0rA53vl0pz8SvRMBDKGuaM3ehUVfyCAa9m5ByNE830,483625
22
- huggingface_hub/hf_file_system.py,sha256=qgNfEKL4JVbGic4qBZdli1OnZXtt9ztaJQDhqDIRQm8,47033
22
+ huggingface_hub/hf_file_system.py,sha256=97XNsFaLv2WpLzFRghTl0SPRbySSljnxQuF_Pl4jvz0,47545
23
23
  huggingface_hub/hub_mixin.py,sha256=Ii3w9o7XgGbj6UNPnieW5IDfaCd8OEKpIH1hRkncRDQ,38208
24
24
  huggingface_hub/inference_api.py,sha256=b4-NhPSn9b44nYKV8tDKXodmE4JVdEymMWL4CVGkzlE,8323
25
25
  huggingface_hub/keras_mixin.py,sha256=WGNQZROdw6yjJ1DGTPZPwKAxf1UbkzAx1dRidkeb2fk,19553
26
- huggingface_hub/lfs.py,sha256=n-TIjK7J7aXG3zi__0nkd6aNkE4djOf9CD6dYQOQ5P8,16649
26
+ huggingface_hub/lfs.py,sha256=iSwoSDMN1AtiLP9DWz_ht8gx6ZDt2PbSC_0Yd166DF4,16523
27
27
  huggingface_hub/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  huggingface_hub/repocard.py,sha256=1gask5OzfyJDvnXBMRm49VT0SCiIspCYUukSOGNafHE,34857
29
29
  huggingface_hub/repocard_data.py,sha256=hr4ReFpEQMNdh_9Dx-L-IJoI1ElHyk-h-8ZRqwVYYOE,34082
@@ -57,10 +57,10 @@ huggingface_hub/commands/upload_large_folder.py,sha256=_1id84BFtbL8HgFRKZ-el_uPr
57
57
  huggingface_hub/commands/user.py,sha256=dDpi0mLYvTeYf0fhPVQyEJsn7Wrk6gWvR5YHC6RgebU,7516
58
58
  huggingface_hub/commands/version.py,sha256=rGpCbvxImY9eQqXrshYt609Iws27R75WARmKQrIo6Ok,1390
59
59
  huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- huggingface_hub/inference/_client.py,sha256=DzuCZ4-hwTH68J70Irq9IdshCAsfdtHaGg69ZfDzsm0,157486
60
+ huggingface_hub/inference/_client.py,sha256=zDWISnMMS2PurpwcZRZHTKK6kB3Cm0w5R82LDtm4p14,157499
61
61
  huggingface_hub/inference/_common.py,sha256=dI3OPg0320OOB0FRy_kqftW9F3ghEnBVA5Gi4VaSctg,15778
62
62
  huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
- huggingface_hub/inference/_generated/_async_client.py,sha256=kqYLXuhvihMT5oyrTQdLZxNH8dxVQ_Nz6YNsHufAGJM,163395
63
+ huggingface_hub/inference/_generated/_async_client.py,sha256=GuZarDMyKrAKWBWlIjzuEOO8579BnWHM2xaGJbU6U94,163408
64
64
  huggingface_hub/inference/_generated/types/__init__.py,sha256=9WvrGQ8aThtKSNzZF06j-CIE2ZuItne8FFnea1p1u38,6557
65
65
  huggingface_hub/inference/_generated/types/audio_classification.py,sha256=Jg3mzfGhCSH6CfvVvgJSiFpkz6v4nNA0G4LJXacEgNc,1573
66
66
  huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=2Ep4WkePL7oJwcp5nRJqApwviumGHbft9HhXE9XLHj4,891
@@ -103,8 +103,8 @@ huggingface_hub/inference/_mcp/constants.py,sha256=kldRfaidXMdyMl_jLosaQomgWDv4s
103
103
  huggingface_hub/inference/_mcp/mcp_client.py,sha256=iKGOY6fD0cph8wp9rXlofJglDLcfXGxedsJ3GLV1z-g,16745
104
104
  huggingface_hub/inference/_mcp/types.py,sha256=3gq-P_mrmvPI6KWBqjCxavtMPiGz10YXog7wg4oJYAo,941
105
105
  huggingface_hub/inference/_mcp/utils.py,sha256=KFsGOC8dytS3VgaugBzibdteWasZ9CAnp83U2SyIlMw,4188
106
- huggingface_hub/inference/_providers/__init__.py,sha256=0tJIYaeg1vO0aiIsddvknZsB0Af_0AaN_qYtdk9hGSw,8740
107
- huggingface_hub/inference/_providers/_common.py,sha256=JwvxrQYjsRwYkD8aOCrxPXSv7Lmtv4Xfn0QjQbQdAF0,12334
106
+ huggingface_hub/inference/_providers/__init__.py,sha256=m-vZjvrWpiERWdeY_6kc56swXXFTYD0UX3j7M_PBBNk,8873
107
+ huggingface_hub/inference/_providers/_common.py,sha256=PIeQ3nKatRN9IrqJSWPcm8jGdc5AcHuYenNlx50WCGA,12353
108
108
  huggingface_hub/inference/_providers/black_forest_labs.py,sha256=FIukZoIFt_FDrTTDfpF-Vko5sXnmH0QvVIsMtV2Jzm8,2852
109
109
  huggingface_hub/inference/_providers/cerebras.py,sha256=QOJ-1U-os7uE7p6eUnn_P_APq-yQhx28be7c3Tq2EuA,210
110
110
  huggingface_hub/inference/_providers/cohere.py,sha256=O3tC-qIUL91mx_mE8bOHCtDWcQuKOUauhUoXSUBUCZ8,1253
@@ -123,6 +123,7 @@ huggingface_hub/inference/_providers/replicate.py,sha256=otVfPkfBtlWrpjQub4V__t7
123
123
  huggingface_hub/inference/_providers/sambanova.py,sha256=Unt3H3jr_kgI9vzRjmmW1DFyoEuPkKCcgIIloiOj3j8,2037
124
124
  huggingface_hub/inference/_providers/scaleway.py,sha256=Jy81kXWbXCHBpx6xmyzdEfXGSyhUfjKOLHuDSvhHWGo,1209
125
125
  huggingface_hub/inference/_providers/together.py,sha256=KHF19CS3qXS7G1-CwcMiD8Z5wzPKEKi4F2DzqAthbBE,3439
126
+ huggingface_hub/inference/_providers/zai_org.py,sha256=plGzMZuLrChZvgpS3CCPqI6ImotZZxNLgfxnR7v6tw8,646
126
127
  huggingface_hub/serialization/__init__.py,sha256=kn-Fa-m4FzMnN8lNsF-SwFcfzug4CucexybGKyvZ8S0,1041
127
128
  huggingface_hub/serialization/_base.py,sha256=Df3GwGR9NzeK_SD75prXLucJAzPiNPgHbgXSw-_LTk8,8126
128
129
  huggingface_hub/serialization/_dduf.py,sha256=s42239rLiHwaJE36QDEmS5GH7DSmQ__BffiHJO5RjIg,15424
@@ -143,7 +144,7 @@ huggingface_hub/utils/_fixes.py,sha256=xQV1QkUn2WpLqLjtXNiyn9gh-454K6AF-Q3kwkYAQ
143
144
  huggingface_hub/utils/_git_credential.py,sha256=ao9rq-rVHn8lghSVZEjDAX4kIkNi7bayY361TDSgSpg,4619
144
145
  huggingface_hub/utils/_headers.py,sha256=w4ayq4hLGaZ3B7nwdEi5Zu23SmmDuOwv58It78wkakk,8868
145
146
  huggingface_hub/utils/_hf_folder.py,sha256=WNjTnu0Q7tqcSS9EsP4ssCJrrJMcCvAt8P_-LEtmOU8,2487
146
- huggingface_hub/utils/_http.py,sha256=her7UZ0KRo9WYDArpqVFyEXTusOGUECj5HNS8Eahqm8,25531
147
+ huggingface_hub/utils/_http.py,sha256=nPgPzM0ujKjl9v3g8XZ0zGMC_B9eqWvf2Zsr3t7X1jA,25476
147
148
  huggingface_hub/utils/_lfs.py,sha256=EC0Oz6Wiwl8foRNkUOzrETXzAWlbgpnpxo5a410ovFY,3957
148
149
  huggingface_hub/utils/_pagination.py,sha256=EX5tRasSuQDaKbXuGYbInBK2odnSWNHgzw2tSgqeBRI,1906
149
150
  huggingface_hub/utils/_paths.py,sha256=w1ZhFmmD5ykWjp_hAvhjtOoa2ZUcOXJrF4a6O3QpAWo,5042
@@ -160,9 +161,9 @@ huggingface_hub/utils/insecure_hashlib.py,sha256=iAaepavFZ5Dhfa5n8KozRfQprKmvcjS
160
161
  huggingface_hub/utils/logging.py,sha256=0A8fF1yh3L9Ka_bCDX2ml4U5Ht0tY8Dr3JcbRvWFuwo,4909
161
162
  huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
162
163
  huggingface_hub/utils/tqdm.py,sha256=xAKcyfnNHsZ7L09WuEM5Ew5-MDhiahLACbbN2zMmcLs,10671
163
- huggingface_hub-0.35.0rc1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
164
- huggingface_hub-0.35.0rc1.dist-info/METADATA,sha256=D8nwBnEXx1SAoHD_yYWJlF6t6A8ZpYaXFwy5tdmgoT8,14823
165
- huggingface_hub-0.35.0rc1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
166
- huggingface_hub-0.35.0rc1.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
167
- huggingface_hub-0.35.0rc1.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
168
- huggingface_hub-0.35.0rc1.dist-info/RECORD,,
164
+ huggingface_hub-0.35.2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
165
+ huggingface_hub-0.35.2.dist-info/METADATA,sha256=YTZY8rd-itCYzOGItZY4irrWh_x9gRaZmOwfr5ya1zI,14820
166
+ huggingface_hub-0.35.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
167
+ huggingface_hub-0.35.2.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
168
+ huggingface_hub-0.35.2.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
169
+ huggingface_hub-0.35.2.dist-info/RECORD,,