huggingface-hub 0.35.1__py3-none-any.whl → 0.35.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +1 -1
- huggingface_hub/hf_file_system.py +38 -21
- huggingface_hub/inference/_client.py +1 -1
- huggingface_hub/inference/_generated/_async_client.py +1 -1
- huggingface_hub/inference/_providers/__init__.py +5 -0
- huggingface_hub/inference/_providers/_common.py +1 -0
- huggingface_hub/inference/_providers/zai_org.py +17 -0
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/METADATA +1 -1
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/RECORD +13 -12
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.35.1.dist-info → huggingface_hub-0.35.2.dist-info}/top_level.txt +0 -0
huggingface_hub/__init__.py
CHANGED
|
@@ -386,6 +386,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
386
386
|
refresh: bool = False,
|
|
387
387
|
revision: Optional[str] = None,
|
|
388
388
|
expand_info: bool = False,
|
|
389
|
+
maxdepth: Optional[int] = None,
|
|
389
390
|
):
|
|
390
391
|
resolved_path = self.resolve_path(path, revision=revision)
|
|
391
392
|
path = resolved_path.unresolve()
|
|
@@ -405,19 +406,25 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
405
406
|
if recursive:
|
|
406
407
|
# Use BFS to traverse the cache and build the "recursive "output
|
|
407
408
|
# (The Hub uses a so-called "tree first" strategy for the tree endpoint but we sort the output to follow the spec so the result is (eventually) the same)
|
|
409
|
+
depth = 2
|
|
408
410
|
dirs_to_visit = deque(
|
|
409
|
-
[path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
|
|
411
|
+
[(depth, path_info) for path_info in cached_path_infos if path_info["type"] == "directory"]
|
|
410
412
|
)
|
|
411
413
|
while dirs_to_visit:
|
|
412
|
-
dir_info = dirs_to_visit.popleft()
|
|
413
|
-
if
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
414
|
+
depth, dir_info = dirs_to_visit.popleft()
|
|
415
|
+
if maxdepth is None or depth <= maxdepth:
|
|
416
|
+
if dir_info["name"] not in self.dircache:
|
|
417
|
+
dirs_not_in_dircache.append(dir_info["name"])
|
|
418
|
+
else:
|
|
419
|
+
cached_path_infos = self.dircache[dir_info["name"]]
|
|
420
|
+
out.extend(cached_path_infos)
|
|
421
|
+
dirs_to_visit.extend(
|
|
422
|
+
[
|
|
423
|
+
(depth + 1, path_info)
|
|
424
|
+
for path_info in cached_path_infos
|
|
425
|
+
if path_info["type"] == "directory"
|
|
426
|
+
]
|
|
427
|
+
)
|
|
421
428
|
|
|
422
429
|
dirs_not_expanded = []
|
|
423
430
|
if expand_info:
|
|
@@ -436,6 +443,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
436
443
|
or common_prefix in chain(dirs_not_in_dircache, dirs_not_expanded)
|
|
437
444
|
else self._parent(common_prefix)
|
|
438
445
|
)
|
|
446
|
+
if maxdepth is not None:
|
|
447
|
+
common_path_depth = common_path[len(path) :].count("/")
|
|
448
|
+
maxdepth -= common_path_depth
|
|
439
449
|
out = [o for o in out if not o["name"].startswith(common_path + "/")]
|
|
440
450
|
for cached_path in self.dircache:
|
|
441
451
|
if cached_path.startswith(common_path + "/"):
|
|
@@ -448,6 +458,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
448
458
|
refresh=True,
|
|
449
459
|
revision=revision,
|
|
450
460
|
expand_info=expand_info,
|
|
461
|
+
maxdepth=maxdepth,
|
|
451
462
|
)
|
|
452
463
|
)
|
|
453
464
|
else:
|
|
@@ -460,9 +471,10 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
460
471
|
repo_type=resolved_path.repo_type,
|
|
461
472
|
)
|
|
462
473
|
for path_info in tree:
|
|
474
|
+
cache_path = root_path + "/" + path_info.path
|
|
463
475
|
if isinstance(path_info, RepoFile):
|
|
464
476
|
cache_path_info = {
|
|
465
|
-
"name":
|
|
477
|
+
"name": cache_path,
|
|
466
478
|
"size": path_info.size,
|
|
467
479
|
"type": "file",
|
|
468
480
|
"blob_id": path_info.blob_id,
|
|
@@ -472,7 +484,7 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
472
484
|
}
|
|
473
485
|
else:
|
|
474
486
|
cache_path_info = {
|
|
475
|
-
"name":
|
|
487
|
+
"name": cache_path,
|
|
476
488
|
"size": 0,
|
|
477
489
|
"type": "directory",
|
|
478
490
|
"tree_id": path_info.tree_id,
|
|
@@ -480,7 +492,9 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
480
492
|
}
|
|
481
493
|
parent_path = self._parent(cache_path_info["name"])
|
|
482
494
|
self.dircache.setdefault(parent_path, []).append(cache_path_info)
|
|
483
|
-
|
|
495
|
+
depth = cache_path[len(path) :].count("/")
|
|
496
|
+
if maxdepth is None or depth <= maxdepth:
|
|
497
|
+
out.append(cache_path_info)
|
|
484
498
|
return out
|
|
485
499
|
|
|
486
500
|
def walk(self, path: str, *args, **kwargs) -> Iterator[Tuple[str, List[str], List[str]]]:
|
|
@@ -547,19 +561,22 @@ class HfFileSystem(fsspec.AbstractFileSystem):
|
|
|
547
561
|
Returns:
|
|
548
562
|
`Union[List[str], Dict[str, Dict[str, Any]]]`: List of paths or dict of file information.
|
|
549
563
|
"""
|
|
550
|
-
if maxdepth:
|
|
551
|
-
|
|
552
|
-
path, maxdepth=maxdepth, withdirs=withdirs, detail=detail, refresh=refresh, revision=revision, **kwargs
|
|
553
|
-
)
|
|
564
|
+
if maxdepth is not None and maxdepth < 1:
|
|
565
|
+
raise ValueError("maxdepth must be at least 1")
|
|
554
566
|
resolved_path = self.resolve_path(path, revision=revision)
|
|
555
567
|
path = resolved_path.unresolve()
|
|
556
568
|
try:
|
|
557
|
-
out = self._ls_tree(
|
|
569
|
+
out = self._ls_tree(
|
|
570
|
+
path, recursive=True, refresh=refresh, revision=resolved_path.revision, maxdepth=maxdepth, **kwargs
|
|
571
|
+
)
|
|
558
572
|
except EntryNotFoundError:
|
|
559
573
|
# Path could be a file
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
574
|
+
try:
|
|
575
|
+
if self.info(path, revision=revision, **kwargs)["type"] == "file":
|
|
576
|
+
out = {path: {}}
|
|
577
|
+
else:
|
|
578
|
+
out = {}
|
|
579
|
+
except FileNotFoundError:
|
|
563
580
|
out = {}
|
|
564
581
|
else:
|
|
565
582
|
if not withdirs:
|
|
@@ -130,7 +130,7 @@ class InferenceClient:
|
|
|
130
130
|
Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
|
|
131
131
|
arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
|
|
132
132
|
provider (`str`, *optional*):
|
|
133
|
-
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"
|
|
133
|
+
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
|
|
134
134
|
Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
|
|
135
135
|
If model is a URL or `base_url` is passed, then `provider` is not used.
|
|
136
136
|
token (`str`, *optional*):
|
|
@@ -118,7 +118,7 @@ class AsyncInferenceClient:
|
|
|
118
118
|
Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
|
|
119
119
|
arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
|
|
120
120
|
provider (`str`, *optional*):
|
|
121
|
-
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"` or `"
|
|
121
|
+
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `publicai`, `"replicate"`, `"sambanova"`, `"scaleway"`, `"together"` or `"zai-org"`.
|
|
122
122
|
Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
|
|
123
123
|
If model is a URL or `base_url` is passed, then `provider` is not used.
|
|
124
124
|
token (`str`, *optional*):
|
|
@@ -41,6 +41,7 @@ from .replicate import ReplicateImageToImageTask, ReplicateTask, ReplicateTextTo
|
|
|
41
41
|
from .sambanova import SambanovaConversationalTask, SambanovaFeatureExtractionTask
|
|
42
42
|
from .scaleway import ScalewayConversationalTask, ScalewayFeatureExtractionTask
|
|
43
43
|
from .together import TogetherConversationalTask, TogetherTextGenerationTask, TogetherTextToImageTask
|
|
44
|
+
from .zai_org import ZaiConversationalTask
|
|
44
45
|
|
|
45
46
|
|
|
46
47
|
logger = logging.get_logger(__name__)
|
|
@@ -65,6 +66,7 @@ PROVIDER_T = Literal[
|
|
|
65
66
|
"sambanova",
|
|
66
67
|
"scaleway",
|
|
67
68
|
"together",
|
|
69
|
+
"zai-org",
|
|
68
70
|
]
|
|
69
71
|
|
|
70
72
|
PROVIDER_OR_POLICY_T = Union[PROVIDER_T, Literal["auto"]]
|
|
@@ -170,6 +172,9 @@ PROVIDERS: Dict[PROVIDER_T, Dict[str, TaskProviderHelper]] = {
|
|
|
170
172
|
"conversational": TogetherConversationalTask(),
|
|
171
173
|
"text-generation": TogetherTextGenerationTask(),
|
|
172
174
|
},
|
|
175
|
+
"zai-org": {
|
|
176
|
+
"conversational": ZaiConversationalTask(),
|
|
177
|
+
},
|
|
173
178
|
}
|
|
174
179
|
|
|
175
180
|
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from typing import Any, Dict
|
|
2
|
+
|
|
3
|
+
from huggingface_hub.inference._providers._common import BaseConversationalTask
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ZaiConversationalTask(BaseConversationalTask):
|
|
7
|
+
def __init__(self):
|
|
8
|
+
super().__init__(provider="zai-org", base_url="https://api.z.ai")
|
|
9
|
+
|
|
10
|
+
def _prepare_headers(self, headers: Dict, api_key: str) -> Dict[str, Any]:
|
|
11
|
+
headers = super()._prepare_headers(headers, api_key)
|
|
12
|
+
headers["Accept-Language"] = "en-US,en"
|
|
13
|
+
headers["x-source-channel"] = "hugging_face"
|
|
14
|
+
return headers
|
|
15
|
+
|
|
16
|
+
def _prepare_route(self, mapped_model: str, api_key: str) -> str:
|
|
17
|
+
return "/api/paas/v4/chat/completions"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: huggingface-hub
|
|
3
|
-
Version: 0.35.
|
|
3
|
+
Version: 0.35.2
|
|
4
4
|
Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
|
|
5
5
|
Home-page: https://github.com/huggingface/huggingface_hub
|
|
6
6
|
Author: Hugging Face, Inc.
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
huggingface_hub/__init__.py,sha256=
|
|
1
|
+
huggingface_hub/__init__.py,sha256=L_aakAoySW3PJ5_qZ50aF2g3tT-W169OdAUzzSisK0E,52476
|
|
2
2
|
huggingface_hub/_commit_api.py,sha256=68HxFnJE2s-QmGZRHQav5kOMTseYV_ZQi04ADaQmZUk,38979
|
|
3
3
|
huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
|
|
4
4
|
huggingface_hub/_inference_endpoints.py,sha256=ahmbPcEXsJ_JcMb9TDgdkD8Z2z9uytkFG3_1o6dTm8g,17598
|
|
@@ -19,7 +19,7 @@ huggingface_hub/errors.py,sha256=D7Lw0Jjrf8vfmD0B26LEvg-JWkU8Zq0KDPJOzFY4QLw,112
|
|
|
19
19
|
huggingface_hub/fastai_utils.py,sha256=DpeH9d-6ut2k_nCAAwglM51XmRmgfbRe2SPifpVL5Yk,16745
|
|
20
20
|
huggingface_hub/file_download.py,sha256=ohaAp9WR4PghMew2KEFlnRVhrvCiBYxs5I5wKOSw0e0,78926
|
|
21
21
|
huggingface_hub/hf_api.py,sha256=Y0rA53vl0pz8SvRMBDKGuaM3ehUVfyCAa9m5ByNE830,483625
|
|
22
|
-
huggingface_hub/hf_file_system.py,sha256=
|
|
22
|
+
huggingface_hub/hf_file_system.py,sha256=97XNsFaLv2WpLzFRghTl0SPRbySSljnxQuF_Pl4jvz0,47545
|
|
23
23
|
huggingface_hub/hub_mixin.py,sha256=Ii3w9o7XgGbj6UNPnieW5IDfaCd8OEKpIH1hRkncRDQ,38208
|
|
24
24
|
huggingface_hub/inference_api.py,sha256=b4-NhPSn9b44nYKV8tDKXodmE4JVdEymMWL4CVGkzlE,8323
|
|
25
25
|
huggingface_hub/keras_mixin.py,sha256=WGNQZROdw6yjJ1DGTPZPwKAxf1UbkzAx1dRidkeb2fk,19553
|
|
@@ -57,10 +57,10 @@ huggingface_hub/commands/upload_large_folder.py,sha256=_1id84BFtbL8HgFRKZ-el_uPr
|
|
|
57
57
|
huggingface_hub/commands/user.py,sha256=dDpi0mLYvTeYf0fhPVQyEJsn7Wrk6gWvR5YHC6RgebU,7516
|
|
58
58
|
huggingface_hub/commands/version.py,sha256=rGpCbvxImY9eQqXrshYt609Iws27R75WARmKQrIo6Ok,1390
|
|
59
59
|
huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
60
|
-
huggingface_hub/inference/_client.py,sha256=
|
|
60
|
+
huggingface_hub/inference/_client.py,sha256=zDWISnMMS2PurpwcZRZHTKK6kB3Cm0w5R82LDtm4p14,157499
|
|
61
61
|
huggingface_hub/inference/_common.py,sha256=dI3OPg0320OOB0FRy_kqftW9F3ghEnBVA5Gi4VaSctg,15778
|
|
62
62
|
huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
|
-
huggingface_hub/inference/_generated/_async_client.py,sha256=
|
|
63
|
+
huggingface_hub/inference/_generated/_async_client.py,sha256=GuZarDMyKrAKWBWlIjzuEOO8579BnWHM2xaGJbU6U94,163408
|
|
64
64
|
huggingface_hub/inference/_generated/types/__init__.py,sha256=9WvrGQ8aThtKSNzZF06j-CIE2ZuItne8FFnea1p1u38,6557
|
|
65
65
|
huggingface_hub/inference/_generated/types/audio_classification.py,sha256=Jg3mzfGhCSH6CfvVvgJSiFpkz6v4nNA0G4LJXacEgNc,1573
|
|
66
66
|
huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=2Ep4WkePL7oJwcp5nRJqApwviumGHbft9HhXE9XLHj4,891
|
|
@@ -103,8 +103,8 @@ huggingface_hub/inference/_mcp/constants.py,sha256=kldRfaidXMdyMl_jLosaQomgWDv4s
|
|
|
103
103
|
huggingface_hub/inference/_mcp/mcp_client.py,sha256=iKGOY6fD0cph8wp9rXlofJglDLcfXGxedsJ3GLV1z-g,16745
|
|
104
104
|
huggingface_hub/inference/_mcp/types.py,sha256=3gq-P_mrmvPI6KWBqjCxavtMPiGz10YXog7wg4oJYAo,941
|
|
105
105
|
huggingface_hub/inference/_mcp/utils.py,sha256=KFsGOC8dytS3VgaugBzibdteWasZ9CAnp83U2SyIlMw,4188
|
|
106
|
-
huggingface_hub/inference/_providers/__init__.py,sha256=
|
|
107
|
-
huggingface_hub/inference/_providers/_common.py,sha256=
|
|
106
|
+
huggingface_hub/inference/_providers/__init__.py,sha256=m-vZjvrWpiERWdeY_6kc56swXXFTYD0UX3j7M_PBBNk,8873
|
|
107
|
+
huggingface_hub/inference/_providers/_common.py,sha256=PIeQ3nKatRN9IrqJSWPcm8jGdc5AcHuYenNlx50WCGA,12353
|
|
108
108
|
huggingface_hub/inference/_providers/black_forest_labs.py,sha256=FIukZoIFt_FDrTTDfpF-Vko5sXnmH0QvVIsMtV2Jzm8,2852
|
|
109
109
|
huggingface_hub/inference/_providers/cerebras.py,sha256=QOJ-1U-os7uE7p6eUnn_P_APq-yQhx28be7c3Tq2EuA,210
|
|
110
110
|
huggingface_hub/inference/_providers/cohere.py,sha256=O3tC-qIUL91mx_mE8bOHCtDWcQuKOUauhUoXSUBUCZ8,1253
|
|
@@ -123,6 +123,7 @@ huggingface_hub/inference/_providers/replicate.py,sha256=otVfPkfBtlWrpjQub4V__t7
|
|
|
123
123
|
huggingface_hub/inference/_providers/sambanova.py,sha256=Unt3H3jr_kgI9vzRjmmW1DFyoEuPkKCcgIIloiOj3j8,2037
|
|
124
124
|
huggingface_hub/inference/_providers/scaleway.py,sha256=Jy81kXWbXCHBpx6xmyzdEfXGSyhUfjKOLHuDSvhHWGo,1209
|
|
125
125
|
huggingface_hub/inference/_providers/together.py,sha256=KHF19CS3qXS7G1-CwcMiD8Z5wzPKEKi4F2DzqAthbBE,3439
|
|
126
|
+
huggingface_hub/inference/_providers/zai_org.py,sha256=plGzMZuLrChZvgpS3CCPqI6ImotZZxNLgfxnR7v6tw8,646
|
|
126
127
|
huggingface_hub/serialization/__init__.py,sha256=kn-Fa-m4FzMnN8lNsF-SwFcfzug4CucexybGKyvZ8S0,1041
|
|
127
128
|
huggingface_hub/serialization/_base.py,sha256=Df3GwGR9NzeK_SD75prXLucJAzPiNPgHbgXSw-_LTk8,8126
|
|
128
129
|
huggingface_hub/serialization/_dduf.py,sha256=s42239rLiHwaJE36QDEmS5GH7DSmQ__BffiHJO5RjIg,15424
|
|
@@ -160,9 +161,9 @@ huggingface_hub/utils/insecure_hashlib.py,sha256=iAaepavFZ5Dhfa5n8KozRfQprKmvcjS
|
|
|
160
161
|
huggingface_hub/utils/logging.py,sha256=0A8fF1yh3L9Ka_bCDX2ml4U5Ht0tY8Dr3JcbRvWFuwo,4909
|
|
161
162
|
huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
|
|
162
163
|
huggingface_hub/utils/tqdm.py,sha256=xAKcyfnNHsZ7L09WuEM5Ew5-MDhiahLACbbN2zMmcLs,10671
|
|
163
|
-
huggingface_hub-0.35.
|
|
164
|
-
huggingface_hub-0.35.
|
|
165
|
-
huggingface_hub-0.35.
|
|
166
|
-
huggingface_hub-0.35.
|
|
167
|
-
huggingface_hub-0.35.
|
|
168
|
-
huggingface_hub-0.35.
|
|
164
|
+
huggingface_hub-0.35.2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
165
|
+
huggingface_hub-0.35.2.dist-info/METADATA,sha256=YTZY8rd-itCYzOGItZY4irrWh_x9gRaZmOwfr5ya1zI,14820
|
|
166
|
+
huggingface_hub-0.35.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
167
|
+
huggingface_hub-0.35.2.dist-info/entry_points.txt,sha256=HIzLhjwPTO7U_ncpW4AkmzAuaadr1ajmYagW5mdb5TM,217
|
|
168
|
+
huggingface_hub-0.35.2.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
|
|
169
|
+
huggingface_hub-0.35.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|