huggingface-hub 0.24.6__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +21 -1
- huggingface_hub/_commit_api.py +4 -4
- huggingface_hub/_inference_endpoints.py +13 -1
- huggingface_hub/_local_folder.py +191 -4
- huggingface_hub/_login.py +6 -6
- huggingface_hub/_snapshot_download.py +8 -17
- huggingface_hub/_space_api.py +5 -0
- huggingface_hub/_tensorboard_logger.py +29 -13
- huggingface_hub/_upload_large_folder.py +621 -0
- huggingface_hub/_webhooks_server.py +1 -1
- huggingface_hub/commands/_cli_utils.py +5 -0
- huggingface_hub/commands/download.py +8 -0
- huggingface_hub/commands/huggingface_cli.py +6 -1
- huggingface_hub/commands/lfs.py +2 -1
- huggingface_hub/commands/repo_files.py +2 -2
- huggingface_hub/commands/scan_cache.py +99 -57
- huggingface_hub/commands/tag.py +1 -1
- huggingface_hub/commands/upload.py +2 -1
- huggingface_hub/commands/upload_large_folder.py +129 -0
- huggingface_hub/commands/version.py +37 -0
- huggingface_hub/community.py +2 -2
- huggingface_hub/errors.py +218 -1
- huggingface_hub/fastai_utils.py +2 -3
- huggingface_hub/file_download.py +63 -63
- huggingface_hub/hf_api.py +783 -314
- huggingface_hub/hf_file_system.py +15 -23
- huggingface_hub/hub_mixin.py +27 -25
- huggingface_hub/inference/_client.py +78 -127
- huggingface_hub/inference/_generated/_async_client.py +169 -144
- huggingface_hub/inference/_generated/types/base.py +0 -9
- huggingface_hub/inference/_templating.py +2 -3
- huggingface_hub/inference_api.py +2 -2
- huggingface_hub/keras_mixin.py +2 -2
- huggingface_hub/lfs.py +7 -98
- huggingface_hub/repocard.py +6 -5
- huggingface_hub/repository.py +5 -5
- huggingface_hub/serialization/_torch.py +64 -11
- huggingface_hub/utils/__init__.py +13 -14
- huggingface_hub/utils/_cache_manager.py +97 -14
- huggingface_hub/utils/_fixes.py +18 -2
- huggingface_hub/utils/_http.py +228 -2
- huggingface_hub/utils/_lfs.py +110 -0
- huggingface_hub/utils/_runtime.py +7 -1
- huggingface_hub/utils/_token.py +3 -2
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/METADATA +2 -2
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/RECORD +50 -48
- huggingface_hub/inference/_types.py +0 -52
- huggingface_hub/utils/_errors.py +0 -397
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.24.6.dist-info → huggingface_hub-0.25.0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py
CHANGED
|
@@ -19,6 +19,7 @@ import json
|
|
|
19
19
|
import re
|
|
20
20
|
import struct
|
|
21
21
|
import warnings
|
|
22
|
+
from collections import defaultdict
|
|
22
23
|
from concurrent.futures import Future, ThreadPoolExecutor
|
|
23
24
|
from dataclasses import asdict, dataclass, field
|
|
24
25
|
from datetime import datetime
|
|
@@ -47,6 +48,7 @@ from requests.exceptions import HTTPError
|
|
|
47
48
|
from tqdm.auto import tqdm as base_tqdm
|
|
48
49
|
from tqdm.contrib.concurrent import thread_map
|
|
49
50
|
|
|
51
|
+
from . import constants
|
|
50
52
|
from ._commit_api import (
|
|
51
53
|
CommitOperation,
|
|
52
54
|
CommitOperationAdd,
|
|
@@ -73,6 +75,7 @@ from ._multi_commits import (
|
|
|
73
75
|
plan_multi_commits,
|
|
74
76
|
)
|
|
75
77
|
from ._space_api import SpaceHardware, SpaceRuntime, SpaceStorage, SpaceVariable
|
|
78
|
+
from ._upload_large_folder import upload_large_folder_internal
|
|
76
79
|
from .community import (
|
|
77
80
|
Discussion,
|
|
78
81
|
DiscussionComment,
|
|
@@ -82,39 +85,41 @@ from .community import (
|
|
|
82
85
|
deserialize_event,
|
|
83
86
|
)
|
|
84
87
|
from .constants import (
|
|
85
|
-
DEFAULT_ETAG_TIMEOUT,
|
|
86
|
-
DEFAULT_REQUEST_TIMEOUT,
|
|
87
|
-
DEFAULT_REVISION,
|
|
88
|
-
DISCUSSION_STATUS,
|
|
89
|
-
DISCUSSION_TYPES,
|
|
90
|
-
ENDPOINT,
|
|
91
|
-
INFERENCE_ENDPOINTS_ENDPOINT,
|
|
92
|
-
REGEX_COMMIT_OID,
|
|
93
|
-
REPO_TYPE_MODEL,
|
|
94
|
-
REPO_TYPES,
|
|
95
|
-
REPO_TYPES_MAPPING,
|
|
96
|
-
REPO_TYPES_URL_PREFIXES,
|
|
97
|
-
SAFETENSORS_INDEX_FILE,
|
|
98
|
-
SAFETENSORS_MAX_HEADER_LENGTH,
|
|
99
|
-
SAFETENSORS_SINGLE_FILE,
|
|
100
|
-
SPACES_SDK_TYPES,
|
|
101
|
-
WEBHOOK_DOMAIN_T,
|
|
102
|
-
DiscussionStatusFilter,
|
|
103
|
-
DiscussionTypeFilter,
|
|
88
|
+
DEFAULT_ETAG_TIMEOUT, # noqa: F401 # kept for backward compatibility
|
|
89
|
+
DEFAULT_REQUEST_TIMEOUT, # noqa: F401 # kept for backward compatibility
|
|
90
|
+
DEFAULT_REVISION, # noqa: F401 # kept for backward compatibility
|
|
91
|
+
DISCUSSION_STATUS, # noqa: F401 # kept for backward compatibility
|
|
92
|
+
DISCUSSION_TYPES, # noqa: F401 # kept for backward compatibility
|
|
93
|
+
ENDPOINT, # noqa: F401 # kept for backward compatibility
|
|
94
|
+
INFERENCE_ENDPOINTS_ENDPOINT, # noqa: F401 # kept for backward compatibility
|
|
95
|
+
REGEX_COMMIT_OID, # noqa: F401 # kept for backward compatibility
|
|
96
|
+
REPO_TYPE_MODEL, # noqa: F401 # kept for backward compatibility
|
|
97
|
+
REPO_TYPES, # noqa: F401 # kept for backward compatibility
|
|
98
|
+
REPO_TYPES_MAPPING, # noqa: F401 # kept for backward compatibility
|
|
99
|
+
REPO_TYPES_URL_PREFIXES, # noqa: F401 # kept for backward compatibility
|
|
100
|
+
SAFETENSORS_INDEX_FILE, # noqa: F401 # kept for backward compatibility
|
|
101
|
+
SAFETENSORS_MAX_HEADER_LENGTH, # noqa: F401 # kept for backward compatibility
|
|
102
|
+
SAFETENSORS_SINGLE_FILE, # noqa: F401 # kept for backward compatibility
|
|
103
|
+
SPACES_SDK_TYPES, # noqa: F401 # kept for backward compatibility
|
|
104
|
+
WEBHOOK_DOMAIN_T, # noqa: F401 # kept for backward compatibility
|
|
105
|
+
DiscussionStatusFilter, # noqa: F401 # kept for backward compatibility
|
|
106
|
+
DiscussionTypeFilter, # noqa: F401 # kept for backward compatibility
|
|
107
|
+
)
|
|
108
|
+
from .errors import (
|
|
109
|
+
BadRequestError,
|
|
110
|
+
EntryNotFoundError,
|
|
111
|
+
GatedRepoError,
|
|
112
|
+
HfHubHTTPError,
|
|
113
|
+
RepositoryNotFoundError,
|
|
114
|
+
RevisionNotFoundError,
|
|
104
115
|
)
|
|
105
116
|
from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url
|
|
106
117
|
from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData
|
|
107
118
|
from .utils import (
|
|
108
119
|
DEFAULT_IGNORE_PATTERNS,
|
|
109
|
-
BadRequestError,
|
|
110
|
-
EntryNotFoundError,
|
|
111
|
-
GatedRepoError,
|
|
112
120
|
HfFolder, # noqa: F401 # kept for backward compatibility
|
|
113
|
-
HfHubHTTPError,
|
|
114
121
|
LocalTokenNotFoundError,
|
|
115
122
|
NotASafetensorsRepoError,
|
|
116
|
-
RepositoryNotFoundError,
|
|
117
|
-
RevisionNotFoundError,
|
|
118
123
|
SafetensorsFileMetadata,
|
|
119
124
|
SafetensorsParsingError,
|
|
120
125
|
SafetensorsRepoMetadata,
|
|
@@ -131,10 +136,9 @@ from .utils import (
|
|
|
131
136
|
validate_hf_hub_args,
|
|
132
137
|
)
|
|
133
138
|
from .utils import tqdm as hf_tqdm
|
|
139
|
+
from .utils._deprecation import _deprecate_method
|
|
134
140
|
from .utils._typing import CallableT
|
|
135
|
-
from .utils.endpoint_helpers import
|
|
136
|
-
_is_emission_within_threshold,
|
|
137
|
-
)
|
|
141
|
+
from .utils.endpoint_helpers import _is_emission_within_threshold
|
|
138
142
|
|
|
139
143
|
|
|
140
144
|
R = TypeVar("R") # Return type
|
|
@@ -142,13 +146,16 @@ CollectionItemType_T = Literal["model", "dataset", "space", "paper"]
|
|
|
142
146
|
|
|
143
147
|
ExpandModelProperty_T = Literal[
|
|
144
148
|
"author",
|
|
149
|
+
"baseModels",
|
|
145
150
|
"cardData",
|
|
151
|
+
"childrenModelCount",
|
|
146
152
|
"config",
|
|
147
153
|
"createdAt",
|
|
148
154
|
"disabled",
|
|
149
155
|
"downloads",
|
|
150
156
|
"downloadsAllTime",
|
|
151
157
|
"gated",
|
|
158
|
+
"gguf",
|
|
152
159
|
"inference",
|
|
153
160
|
"lastModified",
|
|
154
161
|
"library_name",
|
|
@@ -163,6 +170,7 @@ ExpandModelProperty_T = Literal[
|
|
|
163
170
|
"spaces",
|
|
164
171
|
"tags",
|
|
165
172
|
"transformersInfo",
|
|
173
|
+
"trendingScore",
|
|
166
174
|
"widgetData",
|
|
167
175
|
]
|
|
168
176
|
|
|
@@ -182,17 +190,19 @@ ExpandDatasetProperty_T = Literal[
|
|
|
182
190
|
"private",
|
|
183
191
|
"siblings",
|
|
184
192
|
"sha",
|
|
193
|
+
"trendingScore",
|
|
185
194
|
"tags",
|
|
186
195
|
]
|
|
187
196
|
|
|
188
197
|
ExpandSpaceProperty_T = Literal[
|
|
189
198
|
"author",
|
|
190
199
|
"cardData",
|
|
200
|
+
"createdAt",
|
|
191
201
|
"datasets",
|
|
192
202
|
"disabled",
|
|
193
203
|
"lastModified",
|
|
194
|
-
"createdAt",
|
|
195
204
|
"likes",
|
|
205
|
+
"models",
|
|
196
206
|
"private",
|
|
197
207
|
"runtime",
|
|
198
208
|
"sdk",
|
|
@@ -200,7 +210,7 @@ ExpandSpaceProperty_T = Literal[
|
|
|
200
210
|
"sha",
|
|
201
211
|
"subdomain",
|
|
202
212
|
"tags",
|
|
203
|
-
"
|
|
213
|
+
"trendingScore",
|
|
204
214
|
]
|
|
205
215
|
|
|
206
216
|
USERNAME_PLACEHOLDER = "hf_user"
|
|
@@ -210,7 +220,11 @@ _CREATE_COMMIT_NO_REPO_ERROR_MESSAGE = (
|
|
|
210
220
|
"\nNote: Creating a commit assumes that the repo already exists on the"
|
|
211
221
|
" Huggingface Hub. Please use `create_repo` if it's not the case."
|
|
212
222
|
)
|
|
213
|
-
|
|
223
|
+
_AUTH_CHECK_NO_REPO_ERROR_MESSAGE = (
|
|
224
|
+
"\nNote: The repository either does not exist or you do not have access rights."
|
|
225
|
+
" Please check the repository ID and your access permissions."
|
|
226
|
+
" If this is a private repository, ensure that your token is correct."
|
|
227
|
+
)
|
|
214
228
|
logger = logging.get_logger(__name__)
|
|
215
229
|
|
|
216
230
|
|
|
@@ -245,7 +259,7 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
245
259
|
"""
|
|
246
260
|
input_hf_id = hf_id
|
|
247
261
|
|
|
248
|
-
hub_url = re.sub(r"https?://", "", hub_url if hub_url is not None else ENDPOINT)
|
|
262
|
+
hub_url = re.sub(r"https?://", "", hub_url if hub_url is not None else constants.ENDPOINT)
|
|
249
263
|
is_hf_url = hub_url in hf_id and "@" not in hf_id
|
|
250
264
|
|
|
251
265
|
HFFS_PREFIX = "hf://"
|
|
@@ -262,9 +276,9 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
262
276
|
namespace = None
|
|
263
277
|
if len(url_segments) > 2 and hub_url not in url_segments[-3]:
|
|
264
278
|
repo_type = url_segments[-3]
|
|
265
|
-
elif namespace in REPO_TYPES_MAPPING:
|
|
279
|
+
elif namespace in constants.REPO_TYPES_MAPPING:
|
|
266
280
|
# Mean canonical dataset or model
|
|
267
|
-
repo_type = REPO_TYPES_MAPPING[namespace]
|
|
281
|
+
repo_type = constants.REPO_TYPES_MAPPING[namespace]
|
|
268
282
|
namespace = None
|
|
269
283
|
else:
|
|
270
284
|
repo_type = None
|
|
@@ -273,9 +287,9 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
273
287
|
# Passed <repo_type>/<user>/<model_id> or <repo_type>/<org>/<model_id>
|
|
274
288
|
repo_type, namespace, repo_id = url_segments[-3:]
|
|
275
289
|
elif len(url_segments) == 2:
|
|
276
|
-
if url_segments[0] in REPO_TYPES_MAPPING:
|
|
290
|
+
if url_segments[0] in constants.REPO_TYPES_MAPPING:
|
|
277
291
|
# Passed '<model_id>' or 'datasets/<dataset_id>' for a canonical model or dataset
|
|
278
|
-
repo_type = REPO_TYPES_MAPPING[url_segments[0]]
|
|
292
|
+
repo_type = constants.REPO_TYPES_MAPPING[url_segments[0]]
|
|
279
293
|
namespace = None
|
|
280
294
|
repo_id = hf_id.split("/")[-1]
|
|
281
295
|
else:
|
|
@@ -290,11 +304,11 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
290
304
|
raise ValueError(f"Unable to retrieve user and repo ID from the passed HF ID: {hf_id}")
|
|
291
305
|
|
|
292
306
|
# Check if repo type is known (mapping "spaces" => "space" + empty value => `None`)
|
|
293
|
-
if repo_type in REPO_TYPES_MAPPING:
|
|
294
|
-
repo_type = REPO_TYPES_MAPPING[repo_type]
|
|
307
|
+
if repo_type in constants.REPO_TYPES_MAPPING:
|
|
308
|
+
repo_type = constants.REPO_TYPES_MAPPING[repo_type]
|
|
295
309
|
if repo_type == "":
|
|
296
310
|
repo_type = None
|
|
297
|
-
if repo_type not in REPO_TYPES:
|
|
311
|
+
if repo_type not in constants.REPO_TYPES:
|
|
298
312
|
raise ValueError(f"Unknown `repo_type`: '{repo_type}' ('{input_hf_id}')")
|
|
299
313
|
|
|
300
314
|
return repo_type, namespace, repo_id
|
|
@@ -385,6 +399,9 @@ class CommitInfo(str):
|
|
|
385
399
|
`create_pr=True` is passed. Can be passed as `discussion_num` in
|
|
386
400
|
[`get_discussion_details`]. Example: `1`.
|
|
387
401
|
|
|
402
|
+
repo_url (`RepoUrl`):
|
|
403
|
+
Repo URL of the commit containing info like repo_id, repo_type, etc.
|
|
404
|
+
|
|
388
405
|
_url (`str`, *optional*):
|
|
389
406
|
Legacy url for `str` compatibility. Can be the url to the uploaded file on the Hub (if returned by
|
|
390
407
|
[`upload_file`]), to the uploaded folder on the Hub (if returned by [`upload_folder`]) or to the commit on
|
|
@@ -398,6 +415,9 @@ class CommitInfo(str):
|
|
|
398
415
|
oid: str
|
|
399
416
|
pr_url: Optional[str] = None
|
|
400
417
|
|
|
418
|
+
# Computed from `commit_url` in `__post_init__`
|
|
419
|
+
repo_url: RepoUrl = field(init=False)
|
|
420
|
+
|
|
401
421
|
# Computed from `pr_url` in `__post_init__`
|
|
402
422
|
pr_revision: Optional[str] = field(init=False)
|
|
403
423
|
pr_num: Optional[str] = field(init=False)
|
|
@@ -413,6 +433,10 @@ class CommitInfo(str):
|
|
|
413
433
|
|
|
414
434
|
See https://docs.python.org/3.10/library/dataclasses.html#post-init-processing.
|
|
415
435
|
"""
|
|
436
|
+
# Repo info
|
|
437
|
+
self.repo_url = RepoUrl(self.commit_url.split("/commit/")[0])
|
|
438
|
+
|
|
439
|
+
# PR info
|
|
416
440
|
if self.pr_url is not None:
|
|
417
441
|
self.pr_revision = _parse_revision_from_pr_url(self.pr_url)
|
|
418
442
|
self.pr_num = int(self.pr_revision.split("/")[-1])
|
|
@@ -488,7 +512,7 @@ class WebhookInfo:
|
|
|
488
512
|
id: str
|
|
489
513
|
url: str
|
|
490
514
|
watched: List[WebhookWatchedItem]
|
|
491
|
-
domains: List[WEBHOOK_DOMAIN_T]
|
|
515
|
+
domains: List[constants.WEBHOOK_DOMAIN_T]
|
|
492
516
|
secret: Optional[str]
|
|
493
517
|
disabled: bool
|
|
494
518
|
|
|
@@ -540,14 +564,14 @@ class RepoUrl(str):
|
|
|
540
564
|
def __init__(self, url: Any, endpoint: Optional[str] = None) -> None:
|
|
541
565
|
super().__init__()
|
|
542
566
|
# Parse URL
|
|
543
|
-
self.endpoint = endpoint or ENDPOINT
|
|
567
|
+
self.endpoint = endpoint or constants.ENDPOINT
|
|
544
568
|
repo_type, namespace, repo_name = repo_type_and_id_from_hf_id(self, hub_url=self.endpoint)
|
|
545
569
|
|
|
546
570
|
# Populate fields
|
|
547
571
|
self.namespace = namespace
|
|
548
572
|
self.repo_name = repo_name
|
|
549
573
|
self.repo_id = repo_name if namespace is None else f"{namespace}/{repo_name}"
|
|
550
|
-
self.repo_type = repo_type or REPO_TYPE_MODEL
|
|
574
|
+
self.repo_type = repo_type or constants.REPO_TYPE_MODEL
|
|
551
575
|
self.url = str(self) # just in case it's needed
|
|
552
576
|
|
|
553
577
|
def __repr__(self) -> str:
|
|
@@ -703,13 +727,17 @@ class ModelInfo:
|
|
|
703
727
|
Is the repo private.
|
|
704
728
|
disabled (`bool`, *optional*):
|
|
705
729
|
Is the repo disabled.
|
|
706
|
-
gated (`Literal["auto", "manual", False]`, *optional*):
|
|
707
|
-
Is the repo gated.
|
|
708
|
-
If so, whether there is manual or automatic approval.
|
|
709
730
|
downloads (`int`):
|
|
710
731
|
Number of downloads of the model over the last 30 days.
|
|
711
732
|
downloads_all_time (`int`):
|
|
712
733
|
Cumulated number of downloads of the model since its creation.
|
|
734
|
+
gated (`Literal["auto", "manual", False]`, *optional*):
|
|
735
|
+
Is the repo gated.
|
|
736
|
+
If so, whether there is manual or automatic approval.
|
|
737
|
+
inference (`Literal["cold", "frozen", "warm"]`, *optional*):
|
|
738
|
+
Status of the model on the inference API.
|
|
739
|
+
Warm models are available for immediate use. Cold models will be loaded on first inference call.
|
|
740
|
+
Frozen models are not available in Inference API.
|
|
713
741
|
likes (`int`):
|
|
714
742
|
Number of likes of the model.
|
|
715
743
|
library_name (`str`, *optional*):
|
|
@@ -729,6 +757,8 @@ class ModelInfo:
|
|
|
729
757
|
Model configuration.
|
|
730
758
|
transformers_info (`TransformersInfo`, *optional*):
|
|
731
759
|
Transformers-specific info (auto class, processor, etc.) associated with the model.
|
|
760
|
+
trending_score (`int`, *optional*):
|
|
761
|
+
Trending score of the model.
|
|
732
762
|
card_data (`ModelCardData`, *optional*):
|
|
733
763
|
Model Card Metadata as a [`huggingface_hub.repocard_data.ModelCardData`] object.
|
|
734
764
|
siblings (`List[RepoSibling]`):
|
|
@@ -745,10 +775,12 @@ class ModelInfo:
|
|
|
745
775
|
created_at: Optional[datetime]
|
|
746
776
|
last_modified: Optional[datetime]
|
|
747
777
|
private: Optional[bool]
|
|
748
|
-
gated: Optional[Literal["auto", "manual", False]]
|
|
749
778
|
disabled: Optional[bool]
|
|
750
779
|
downloads: Optional[int]
|
|
751
780
|
downloads_all_time: Optional[int]
|
|
781
|
+
gated: Optional[Literal["auto", "manual", False]]
|
|
782
|
+
gguf: Optional[Dict]
|
|
783
|
+
inference: Optional[Literal["warm", "cold", "frozen"]]
|
|
752
784
|
likes: Optional[int]
|
|
753
785
|
library_name: Optional[str]
|
|
754
786
|
tags: Optional[List[str]]
|
|
@@ -759,6 +791,7 @@ class ModelInfo:
|
|
|
759
791
|
model_index: Optional[Dict]
|
|
760
792
|
config: Optional[Dict]
|
|
761
793
|
transformers_info: Optional[TransformersInfo]
|
|
794
|
+
trending_score: Optional[int]
|
|
762
795
|
siblings: Optional[List[RepoSibling]]
|
|
763
796
|
spaces: Optional[List[str]]
|
|
764
797
|
safetensors: Optional[SafeTensorsInfo]
|
|
@@ -778,9 +811,13 @@ class ModelInfo:
|
|
|
778
811
|
self.downloads_all_time = kwargs.pop("downloadsAllTime", None)
|
|
779
812
|
self.likes = kwargs.pop("likes", None)
|
|
780
813
|
self.library_name = kwargs.pop("library_name", None)
|
|
814
|
+
self.gguf = kwargs.pop("gguf", None)
|
|
815
|
+
self.inference = kwargs.pop("inference", None)
|
|
781
816
|
self.tags = kwargs.pop("tags", None)
|
|
782
817
|
self.pipeline_tag = kwargs.pop("pipeline_tag", None)
|
|
783
818
|
self.mask_token = kwargs.pop("mask_token", None)
|
|
819
|
+
self.trending_score = kwargs.pop("trendingScore", None)
|
|
820
|
+
|
|
784
821
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
785
822
|
self.card_data = (
|
|
786
823
|
ModelCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -810,7 +847,7 @@ class ModelInfo:
|
|
|
810
847
|
)
|
|
811
848
|
for sibling in siblings
|
|
812
849
|
]
|
|
813
|
-
if siblings
|
|
850
|
+
if siblings is not None
|
|
814
851
|
else None
|
|
815
852
|
)
|
|
816
853
|
self.spaces = kwargs.pop("spaces", None)
|
|
@@ -875,6 +912,10 @@ class DatasetInfo:
|
|
|
875
912
|
Model Card Metadata as a [`huggingface_hub.repocard_data.DatasetCardData`] object.
|
|
876
913
|
siblings (`List[RepoSibling]`):
|
|
877
914
|
List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the dataset.
|
|
915
|
+
paperswithcode_id (`str`, *optional*):
|
|
916
|
+
Papers with code ID of the dataset.
|
|
917
|
+
trending_score (`int`, *optional*):
|
|
918
|
+
Trending score of the dataset.
|
|
878
919
|
"""
|
|
879
920
|
|
|
880
921
|
id: str
|
|
@@ -890,6 +931,7 @@ class DatasetInfo:
|
|
|
890
931
|
likes: Optional[int]
|
|
891
932
|
paperswithcode_id: Optional[str]
|
|
892
933
|
tags: Optional[List[str]]
|
|
934
|
+
trending_score: Optional[int]
|
|
893
935
|
card_data: Optional[DatasetCardData]
|
|
894
936
|
siblings: Optional[List[RepoSibling]]
|
|
895
937
|
|
|
@@ -909,6 +951,8 @@ class DatasetInfo:
|
|
|
909
951
|
self.likes = kwargs.pop("likes", None)
|
|
910
952
|
self.paperswithcode_id = kwargs.pop("paperswithcode_id", None)
|
|
911
953
|
self.tags = kwargs.pop("tags", None)
|
|
954
|
+
self.trending_score = kwargs.pop("trendingScore", None)
|
|
955
|
+
|
|
912
956
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
913
957
|
self.card_data = (
|
|
914
958
|
DatasetCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -932,7 +976,7 @@ class DatasetInfo:
|
|
|
932
976
|
)
|
|
933
977
|
for sibling in siblings
|
|
934
978
|
]
|
|
935
|
-
if siblings
|
|
979
|
+
if siblings is not None
|
|
936
980
|
else None
|
|
937
981
|
)
|
|
938
982
|
|
|
@@ -994,6 +1038,8 @@ class SpaceInfo:
|
|
|
994
1038
|
List of models used by the Space.
|
|
995
1039
|
datasets (`List[str]`, *optional*):
|
|
996
1040
|
List of datasets used by the Space.
|
|
1041
|
+
trending_score (`int`, *optional*):
|
|
1042
|
+
Trending score of the Space.
|
|
997
1043
|
"""
|
|
998
1044
|
|
|
999
1045
|
id: str
|
|
@@ -1010,6 +1056,7 @@ class SpaceInfo:
|
|
|
1010
1056
|
sdk: Optional[str]
|
|
1011
1057
|
tags: Optional[List[str]]
|
|
1012
1058
|
siblings: Optional[List[RepoSibling]]
|
|
1059
|
+
trending_score: Optional[int]
|
|
1013
1060
|
card_data: Optional[SpaceCardData]
|
|
1014
1061
|
runtime: Optional[SpaceRuntime]
|
|
1015
1062
|
models: Optional[List[str]]
|
|
@@ -1031,6 +1078,7 @@ class SpaceInfo:
|
|
|
1031
1078
|
self.likes = kwargs.pop("likes", None)
|
|
1032
1079
|
self.sdk = kwargs.pop("sdk", None)
|
|
1033
1080
|
self.tags = kwargs.pop("tags", None)
|
|
1081
|
+
self.trending_score = kwargs.pop("trendingScore", None)
|
|
1034
1082
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
1035
1083
|
self.card_data = (
|
|
1036
1084
|
SpaceCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -1054,7 +1102,7 @@ class SpaceInfo:
|
|
|
1054
1102
|
)
|
|
1055
1103
|
for sibling in siblings
|
|
1056
1104
|
]
|
|
1057
|
-
if siblings
|
|
1105
|
+
if siblings is not None
|
|
1058
1106
|
else None
|
|
1059
1107
|
)
|
|
1060
1108
|
runtime = kwargs.pop("runtime", None)
|
|
@@ -1184,7 +1232,7 @@ class Collection:
|
|
|
1184
1232
|
self.description = kwargs.pop("description", None)
|
|
1185
1233
|
endpoint = kwargs.pop("endpoint", None)
|
|
1186
1234
|
if endpoint is None:
|
|
1187
|
-
endpoint = ENDPOINT
|
|
1235
|
+
endpoint = constants.ENDPOINT
|
|
1188
1236
|
self._url = f"{endpoint}/collections/{self.slug}"
|
|
1189
1237
|
|
|
1190
1238
|
@property
|
|
@@ -1299,18 +1347,49 @@ class UserLikes:
|
|
|
1299
1347
|
spaces: List[str]
|
|
1300
1348
|
|
|
1301
1349
|
|
|
1350
|
+
@dataclass
|
|
1351
|
+
class Organization:
|
|
1352
|
+
"""
|
|
1353
|
+
Contains information about an organization on the Hub.
|
|
1354
|
+
|
|
1355
|
+
Attributes:
|
|
1356
|
+
avatar_url (`str`):
|
|
1357
|
+
URL of the organization's avatar.
|
|
1358
|
+
name (`str`):
|
|
1359
|
+
Name of the organization on the Hub (unique).
|
|
1360
|
+
fullname (`str`):
|
|
1361
|
+
Organization's full name.
|
|
1362
|
+
"""
|
|
1363
|
+
|
|
1364
|
+
avatar_url: str
|
|
1365
|
+
name: str
|
|
1366
|
+
fullname: str
|
|
1367
|
+
|
|
1368
|
+
def __init__(self, **kwargs) -> None:
|
|
1369
|
+
self.avatar_url = kwargs.pop("avatarUrl", "")
|
|
1370
|
+
self.name = kwargs.pop("name", "")
|
|
1371
|
+
self.fullname = kwargs.pop("fullname", "")
|
|
1372
|
+
|
|
1373
|
+
# forward compatibility
|
|
1374
|
+
self.__dict__.update(**kwargs)
|
|
1375
|
+
|
|
1376
|
+
|
|
1302
1377
|
@dataclass
|
|
1303
1378
|
class User:
|
|
1304
1379
|
"""
|
|
1305
1380
|
Contains information about a user on the Hub.
|
|
1306
1381
|
|
|
1307
1382
|
Attributes:
|
|
1308
|
-
avatar_url (`str`):
|
|
1309
|
-
URL of the user's avatar.
|
|
1310
1383
|
username (`str`):
|
|
1311
1384
|
Name of the user on the Hub (unique).
|
|
1312
1385
|
fullname (`str`):
|
|
1313
1386
|
User's full name.
|
|
1387
|
+
avatar_url (`str`):
|
|
1388
|
+
URL of the user's avatar.
|
|
1389
|
+
details (`str`, *optional*):
|
|
1390
|
+
User's details.
|
|
1391
|
+
is_following (`bool`, *optional*):
|
|
1392
|
+
Whether the authenticated user is following this user.
|
|
1314
1393
|
is_pro (`bool`, *optional*):
|
|
1315
1394
|
Whether the user is a pro user.
|
|
1316
1395
|
num_models (`int`, *optional*):
|
|
@@ -1327,16 +1406,20 @@ class User:
|
|
|
1327
1406
|
Number of upvotes received by the user.
|
|
1328
1407
|
num_likes (`int`, *optional*):
|
|
1329
1408
|
Number of likes given by the user.
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1409
|
+
num_following (`int`, *optional*):
|
|
1410
|
+
Number of users this user is following.
|
|
1411
|
+
num_followers (`int`, *optional*):
|
|
1412
|
+
Number of users following this user.
|
|
1413
|
+
orgs (list of [`Organization`]):
|
|
1414
|
+
List of organizations the user is part of.
|
|
1334
1415
|
"""
|
|
1335
1416
|
|
|
1336
1417
|
# Metadata
|
|
1337
|
-
avatar_url: str
|
|
1338
1418
|
username: str
|
|
1339
1419
|
fullname: str
|
|
1420
|
+
avatar_url: str
|
|
1421
|
+
details: Optional[str] = None
|
|
1422
|
+
is_following: Optional[bool] = None
|
|
1340
1423
|
is_pro: Optional[bool] = None
|
|
1341
1424
|
num_models: Optional[int] = None
|
|
1342
1425
|
num_datasets: Optional[int] = None
|
|
@@ -1345,24 +1428,28 @@ class User:
|
|
|
1345
1428
|
num_papers: Optional[int] = None
|
|
1346
1429
|
num_upvotes: Optional[int] = None
|
|
1347
1430
|
num_likes: Optional[int] = None
|
|
1348
|
-
|
|
1349
|
-
|
|
1431
|
+
num_following: Optional[int] = None
|
|
1432
|
+
num_followers: Optional[int] = None
|
|
1433
|
+
orgs: List[Organization] = field(default_factory=list)
|
|
1350
1434
|
|
|
1351
1435
|
def __init__(self, **kwargs) -> None:
|
|
1352
|
-
self.
|
|
1353
|
-
self.
|
|
1354
|
-
self.
|
|
1355
|
-
self.
|
|
1356
|
-
self.
|
|
1357
|
-
self.
|
|
1358
|
-
self.
|
|
1359
|
-
self.
|
|
1360
|
-
self.
|
|
1361
|
-
self.
|
|
1362
|
-
self.
|
|
1363
|
-
self.
|
|
1364
|
-
self.
|
|
1365
|
-
self.
|
|
1436
|
+
self.username = kwargs.pop("user", "")
|
|
1437
|
+
self.fullname = kwargs.pop("fullname", "")
|
|
1438
|
+
self.avatar_url = kwargs.pop("avatarUrl", "")
|
|
1439
|
+
self.is_following = kwargs.pop("isFollowing", None)
|
|
1440
|
+
self.is_pro = kwargs.pop("isPro", None)
|
|
1441
|
+
self.details = kwargs.pop("details", None)
|
|
1442
|
+
self.num_models = kwargs.pop("numModels", None)
|
|
1443
|
+
self.num_datasets = kwargs.pop("numDatasets", None)
|
|
1444
|
+
self.num_spaces = kwargs.pop("numSpaces", None)
|
|
1445
|
+
self.num_discussions = kwargs.pop("numDiscussions", None)
|
|
1446
|
+
self.num_papers = kwargs.pop("numPapers", None)
|
|
1447
|
+
self.num_upvotes = kwargs.pop("numUpvotes", None)
|
|
1448
|
+
self.num_likes = kwargs.pop("numLikes", None)
|
|
1449
|
+
self.num_following = kwargs.pop("numFollowing", None)
|
|
1450
|
+
self.num_followers = kwargs.pop("numFollowers", None)
|
|
1451
|
+
self.user_type = kwargs.pop("type", None)
|
|
1452
|
+
self.orgs = [Organization(**org) for org in kwargs.pop("orgs", [])]
|
|
1366
1453
|
|
|
1367
1454
|
# forward compatibility
|
|
1368
1455
|
self.__dict__.update(**kwargs)
|
|
@@ -1437,7 +1524,7 @@ class HfApi:
|
|
|
1437
1524
|
Additional headers to be sent with each request. Example: `{"X-My-Header": "value"}`.
|
|
1438
1525
|
Headers passed here are taking precedence over the default headers.
|
|
1439
1526
|
"""
|
|
1440
|
-
self.endpoint = endpoint if endpoint is not None else ENDPOINT
|
|
1527
|
+
self.endpoint = endpoint if endpoint is not None else constants.ENDPOINT
|
|
1441
1528
|
self.token = token
|
|
1442
1529
|
self.library_name = library_name
|
|
1443
1530
|
self.library_version = library_version
|
|
@@ -1564,6 +1651,8 @@ class HfApi:
|
|
|
1564
1651
|
# Search-query parameter
|
|
1565
1652
|
filter: Union[str, Iterable[str], None] = None,
|
|
1566
1653
|
author: Optional[str] = None,
|
|
1654
|
+
gated: Optional[bool] = None,
|
|
1655
|
+
inference: Optional[Literal["cold", "frozen", "warm"]] = None,
|
|
1567
1656
|
library: Optional[Union[str, List[str]]] = None,
|
|
1568
1657
|
language: Optional[Union[str, List[str]]] = None,
|
|
1569
1658
|
model_name: Optional[str] = None,
|
|
@@ -1592,7 +1681,15 @@ class HfApi:
|
|
|
1592
1681
|
A string or list of string to filter models on the Hub.
|
|
1593
1682
|
author (`str`, *optional*):
|
|
1594
1683
|
A string which identify the author (user or organization) of the
|
|
1595
|
-
returned models
|
|
1684
|
+
returned models.
|
|
1685
|
+
gated (`bool`, *optional*):
|
|
1686
|
+
A boolean to filter models on the Hub that are gated or not. By default, all models are returned.
|
|
1687
|
+
If `gated=True` is passed, only gated models are returned.
|
|
1688
|
+
If `gated=False` is passed, only non-gated models are returned.
|
|
1689
|
+
inference (`Literal["cold", "frozen", "warm"]`, *optional*):
|
|
1690
|
+
A string to filter models on the Hub by their state on the Inference API.
|
|
1691
|
+
Warm models are available for immediate use. Cold models will be loaded on first inference call.
|
|
1692
|
+
Frozen models are not available in Inference API.
|
|
1596
1693
|
library (`str` or `List`, *optional*):
|
|
1597
1694
|
A string or list of strings of foundational libraries models were
|
|
1598
1695
|
originally trained from, such as pytorch, tensorflow, or allennlp.
|
|
@@ -1630,7 +1727,7 @@ class HfApi:
|
|
|
1630
1727
|
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
1631
1728
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1632
1729
|
This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
|
|
1633
|
-
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
1730
|
+
Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"` and `"widgetData"`.
|
|
1634
1731
|
full (`bool`, *optional*):
|
|
1635
1732
|
Whether to fetch all model data, including the `last_modified`,
|
|
1636
1733
|
the `sha`, the files and the `tags`. This is set to `True` by
|
|
@@ -1718,6 +1815,10 @@ class HfApi:
|
|
|
1718
1815
|
# Handle other query params
|
|
1719
1816
|
if author:
|
|
1720
1817
|
params["author"] = author
|
|
1818
|
+
if gated is not None:
|
|
1819
|
+
params["gated"] = gated
|
|
1820
|
+
if inference is not None:
|
|
1821
|
+
params["inference"] = inference
|
|
1721
1822
|
if pipeline_tag:
|
|
1722
1823
|
params["pipeline_tag"] = pipeline_tag
|
|
1723
1824
|
search_list = []
|
|
@@ -1764,6 +1865,7 @@ class HfApi:
|
|
|
1764
1865
|
author: Optional[str] = None,
|
|
1765
1866
|
benchmark: Optional[Union[str, List[str]]] = None,
|
|
1766
1867
|
dataset_name: Optional[str] = None,
|
|
1868
|
+
gated: Optional[bool] = None,
|
|
1767
1869
|
language_creators: Optional[Union[str, List[str]]] = None,
|
|
1768
1870
|
language: Optional[Union[str, List[str]]] = None,
|
|
1769
1871
|
multilinguality: Optional[Union[str, List[str]]] = None,
|
|
@@ -1795,6 +1897,10 @@ class HfApi:
|
|
|
1795
1897
|
dataset_name (`str`, *optional*):
|
|
1796
1898
|
A string or list of strings that can be used to identify datasets on
|
|
1797
1899
|
the Hub by its name, such as `SQAC` or `wikineural`
|
|
1900
|
+
gated (`bool`, *optional*):
|
|
1901
|
+
A boolean to filter datasets on the Hub that are gated or not. By default, all datasets are returned.
|
|
1902
|
+
If `gated=True` is passed, only gated datasets are returned.
|
|
1903
|
+
If `gated=False` is passed, only non-gated datasets are returned.
|
|
1798
1904
|
language_creators (`str` or `List`, *optional*):
|
|
1799
1905
|
A string or list of strings that can be used to identify datasets on
|
|
1800
1906
|
the Hub with how the data was curated, such as `crowdsourced` or
|
|
@@ -1833,7 +1939,7 @@ class HfApi:
|
|
|
1833
1939
|
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
1834
1940
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1835
1941
|
This parameter cannot be used if `full` is passed.
|
|
1836
|
-
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"
|
|
1942
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"` and `"trendingScore"`.
|
|
1837
1943
|
full (`bool`, *optional*):
|
|
1838
1944
|
Whether to fetch all dataset data, including the `last_modified`,
|
|
1839
1945
|
the `card_data` and the files. Can contain useful information such as the
|
|
@@ -1923,6 +2029,8 @@ class HfApi:
|
|
|
1923
2029
|
# Handle other query params
|
|
1924
2030
|
if author:
|
|
1925
2031
|
params["author"] = author
|
|
2032
|
+
if gated is not None:
|
|
2033
|
+
params["gated"] = gated
|
|
1926
2034
|
search_list = []
|
|
1927
2035
|
if dataset_name:
|
|
1928
2036
|
search_list.append(dataset_name)
|
|
@@ -2014,7 +2122,7 @@ class HfApi:
|
|
|
2014
2122
|
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2015
2123
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2016
2124
|
This parameter cannot be used if `full` is passed.
|
|
2017
|
-
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"
|
|
2125
|
+
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"trendingScore"`.
|
|
2018
2126
|
full (`bool`, *optional*):
|
|
2019
2127
|
Whether to fetch all Spaces data, including the `last_modified`, `siblings`
|
|
2020
2128
|
and `card_data` fields.
|
|
@@ -2110,7 +2218,7 @@ class HfApi:
|
|
|
2110
2218
|
```
|
|
2111
2219
|
"""
|
|
2112
2220
|
if repo_type is None:
|
|
2113
|
-
repo_type = REPO_TYPE_MODEL
|
|
2221
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
2114
2222
|
response = get_session().post(
|
|
2115
2223
|
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like",
|
|
2116
2224
|
headers=self._build_hf_headers(token=token),
|
|
@@ -2161,7 +2269,7 @@ class HfApi:
|
|
|
2161
2269
|
```
|
|
2162
2270
|
"""
|
|
2163
2271
|
if repo_type is None:
|
|
2164
|
-
repo_type = REPO_TYPE_MODEL
|
|
2272
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
2165
2273
|
response = get_session().delete(
|
|
2166
2274
|
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like", headers=self._build_hf_headers(token=token)
|
|
2167
2275
|
)
|
|
@@ -2250,7 +2358,7 @@ class HfApi:
|
|
|
2250
2358
|
*,
|
|
2251
2359
|
repo_type: Optional[str] = None,
|
|
2252
2360
|
token: Union[bool, str, None] = None,
|
|
2253
|
-
) ->
|
|
2361
|
+
) -> Iterable[User]:
|
|
2254
2362
|
"""
|
|
2255
2363
|
List all users who liked a given repo on the hugging Face Hub.
|
|
2256
2364
|
|
|
@@ -2272,29 +2380,15 @@ class HfApi:
|
|
|
2272
2380
|
`None`.
|
|
2273
2381
|
|
|
2274
2382
|
Returns:
|
|
2275
|
-
`
|
|
2383
|
+
`Iterable[User]`: an iterable of [`huggingface_hub.hf_api.User`] objects.
|
|
2276
2384
|
"""
|
|
2277
2385
|
|
|
2278
2386
|
# Construct the API endpoint
|
|
2279
2387
|
if repo_type is None:
|
|
2280
|
-
repo_type = REPO_TYPE_MODEL
|
|
2388
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
2281
2389
|
path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/likers"
|
|
2282
|
-
|
|
2283
|
-
|
|
2284
|
-
# Make the request
|
|
2285
|
-
response = get_session().get(path, headers=headers)
|
|
2286
|
-
hf_raise_for_status(response)
|
|
2287
|
-
|
|
2288
|
-
# Parse the results into User objects
|
|
2289
|
-
likers_data = response.json()
|
|
2290
|
-
return [
|
|
2291
|
-
User(
|
|
2292
|
-
username=user_data["user"],
|
|
2293
|
-
fullname=user_data["fullname"],
|
|
2294
|
-
avatar_url=user_data["avatarUrl"],
|
|
2295
|
-
)
|
|
2296
|
-
for user_data in likers_data
|
|
2297
|
-
]
|
|
2390
|
+
for liker in paginate(path, params={}, headers=self._build_hf_headers(token=token)):
|
|
2391
|
+
yield User(username=liker["user"], fullname=liker["fullname"], avatar_url=liker["avatarUrl"])
|
|
2298
2392
|
|
|
2299
2393
|
@validate_hf_hub_args
|
|
2300
2394
|
def model_info(
|
|
@@ -2331,7 +2425,7 @@ class HfApi:
|
|
|
2331
2425
|
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
2332
2426
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2333
2427
|
This parameter cannot be used if `securityStatus` or `files_metadata` are passed.
|
|
2334
|
-
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
2428
|
+
Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"` and `"widgetData"`.
|
|
2335
2429
|
token (Union[bool, str, None], optional):
|
|
2336
2430
|
A valid user access token (string). Defaults to the locally saved
|
|
2337
2431
|
token, which is the recommended method for authentication (see
|
|
@@ -2405,7 +2499,7 @@ class HfApi:
|
|
|
2405
2499
|
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
2406
2500
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2407
2501
|
This parameter cannot be used if `files_metadata` is passed.
|
|
2408
|
-
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"
|
|
2502
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"` and `"trendingScore"`.
|
|
2409
2503
|
token (Union[bool, str, None], optional):
|
|
2410
2504
|
A valid user access token (string). Defaults to the locally saved
|
|
2411
2505
|
token, which is the recommended method for authentication (see
|
|
@@ -2478,7 +2572,7 @@ class HfApi:
|
|
|
2478
2572
|
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2479
2573
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2480
2574
|
This parameter cannot be used if `full` is passed.
|
|
2481
|
-
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"
|
|
2575
|
+
Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"trendingScore"`.
|
|
2482
2576
|
token (Union[bool, str, None], optional):
|
|
2483
2577
|
A valid user access token (string). Defaults to the locally saved
|
|
2484
2578
|
token, which is the recommended method for authentication (see
|
|
@@ -2903,8 +2997,8 @@ class HfApi:
|
|
|
2903
2997
|
]
|
|
2904
2998
|
```
|
|
2905
2999
|
"""
|
|
2906
|
-
repo_type = repo_type or REPO_TYPE_MODEL
|
|
2907
|
-
revision = quote(revision, safe="") if revision is not None else DEFAULT_REVISION
|
|
3000
|
+
repo_type = repo_type or constants.REPO_TYPE_MODEL
|
|
3001
|
+
revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION
|
|
2908
3002
|
headers = self._build_hf_headers(token=token)
|
|
2909
3003
|
|
|
2910
3004
|
encoded_path_in_repo = "/" + quote(path_in_repo, safe="") if path_in_repo else ""
|
|
@@ -2963,7 +3057,7 @@ class HfApi:
|
|
|
2963
3057
|
[`GitRefs`]: object containing all information about branches and tags for a
|
|
2964
3058
|
repo on the Hub.
|
|
2965
3059
|
"""
|
|
2966
|
-
repo_type = repo_type or REPO_TYPE_MODEL
|
|
3060
|
+
repo_type = repo_type or constants.REPO_TYPE_MODEL
|
|
2967
3061
|
response = get_session().get(
|
|
2968
3062
|
f"{self.endpoint}/api/{repo_type}s/{repo_id}/refs",
|
|
2969
3063
|
headers=self._build_hf_headers(token=token),
|
|
@@ -3049,8 +3143,8 @@ class HfApi:
|
|
|
3049
3143
|
[`~utils.RevisionNotFoundError`]:
|
|
3050
3144
|
If revision is not found (error 404) on the repo.
|
|
3051
3145
|
"""
|
|
3052
|
-
repo_type = repo_type or REPO_TYPE_MODEL
|
|
3053
|
-
revision = quote(revision, safe="") if revision is not None else DEFAULT_REVISION
|
|
3146
|
+
repo_type = repo_type or constants.REPO_TYPE_MODEL
|
|
3147
|
+
revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION
|
|
3054
3148
|
|
|
3055
3149
|
# Paginate over results and return the list of commits.
|
|
3056
3150
|
return [
|
|
@@ -3128,8 +3222,8 @@ class HfApi:
|
|
|
3128
3222
|
]
|
|
3129
3223
|
```
|
|
3130
3224
|
"""
|
|
3131
|
-
repo_type = repo_type or REPO_TYPE_MODEL
|
|
3132
|
-
revision = quote(revision, safe="") if revision is not None else DEFAULT_REVISION
|
|
3225
|
+
repo_type = repo_type or constants.REPO_TYPE_MODEL
|
|
3226
|
+
revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION
|
|
3133
3227
|
headers = self._build_hf_headers(token=token)
|
|
3134
3228
|
|
|
3135
3229
|
response = get_session().post(
|
|
@@ -3218,11 +3312,11 @@ class HfApi:
|
|
|
3218
3312
|
```
|
|
3219
3313
|
"""
|
|
3220
3314
|
if repo_type is None:
|
|
3221
|
-
repo_type = REPO_TYPE_MODEL
|
|
3222
|
-
if repo_type not in REPO_TYPES:
|
|
3315
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
3316
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3223
3317
|
raise ValueError("Invalid repo type")
|
|
3224
3318
|
if branch is None:
|
|
3225
|
-
branch = DEFAULT_REVISION
|
|
3319
|
+
branch = constants.DEFAULT_REVISION
|
|
3226
3320
|
|
|
3227
3321
|
# Prepare request
|
|
3228
3322
|
url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/super-squash/{branch}"
|
|
@@ -3300,7 +3394,7 @@ class HfApi:
|
|
|
3300
3394
|
|
|
3301
3395
|
path = f"{self.endpoint}/api/repos/create"
|
|
3302
3396
|
|
|
3303
|
-
if repo_type not in REPO_TYPES:
|
|
3397
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3304
3398
|
raise ValueError("Invalid repo type")
|
|
3305
3399
|
|
|
3306
3400
|
json: Dict[str, Any] = {"name": name, "organization": organization, "private": private}
|
|
@@ -3310,10 +3404,10 @@ class HfApi:
|
|
|
3310
3404
|
if space_sdk is None:
|
|
3311
3405
|
raise ValueError(
|
|
3312
3406
|
"No space_sdk provided. `create_repo` expects space_sdk to be one"
|
|
3313
|
-
f" of {SPACES_SDK_TYPES} when repo_type is 'space'`"
|
|
3407
|
+
f" of {constants.SPACES_SDK_TYPES} when repo_type is 'space'`"
|
|
3314
3408
|
)
|
|
3315
|
-
if space_sdk not in SPACES_SDK_TYPES:
|
|
3316
|
-
raise ValueError(f"Invalid space_sdk. Please choose one of {SPACES_SDK_TYPES}.")
|
|
3409
|
+
if space_sdk not in constants.SPACES_SDK_TYPES:
|
|
3410
|
+
raise ValueError(f"Invalid space_sdk. Please choose one of {constants.SPACES_SDK_TYPES}.")
|
|
3317
3411
|
json["sdk"] = space_sdk
|
|
3318
3412
|
|
|
3319
3413
|
if space_sdk is not None and repo_type != "space":
|
|
@@ -3369,7 +3463,7 @@ class HfApi:
|
|
|
3369
3463
|
# No write permission on the namespace but repo might already exist
|
|
3370
3464
|
try:
|
|
3371
3465
|
self.repo_info(repo_id=repo_id, repo_type=repo_type, token=token)
|
|
3372
|
-
if repo_type is None or repo_type == REPO_TYPE_MODEL:
|
|
3466
|
+
if repo_type is None or repo_type == constants.REPO_TYPE_MODEL:
|
|
3373
3467
|
return RepoUrl(f"{self.endpoint}/{repo_id}")
|
|
3374
3468
|
return RepoUrl(f"{self.endpoint}/{repo_type}/{repo_id}")
|
|
3375
3469
|
except HfHubHTTPError:
|
|
@@ -3415,7 +3509,7 @@ class HfApi:
|
|
|
3415
3509
|
|
|
3416
3510
|
path = f"{self.endpoint}/api/repos/delete"
|
|
3417
3511
|
|
|
3418
|
-
if repo_type not in REPO_TYPES:
|
|
3512
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3419
3513
|
raise ValueError("Invalid repo type")
|
|
3420
3514
|
|
|
3421
3515
|
json = {"name": name, "organization": organization}
|
|
@@ -3469,10 +3563,10 @@ class HfApi:
|
|
|
3469
3563
|
|
|
3470
3564
|
</Tip>
|
|
3471
3565
|
"""
|
|
3472
|
-
if repo_type not in REPO_TYPES:
|
|
3473
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
3566
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3567
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
3474
3568
|
if repo_type is None:
|
|
3475
|
-
repo_type = REPO_TYPE_MODEL # default repo type
|
|
3569
|
+
repo_type = constants.REPO_TYPE_MODEL # default repo type
|
|
3476
3570
|
|
|
3477
3571
|
r = get_session().put(
|
|
3478
3572
|
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
|
|
@@ -3482,6 +3576,62 @@ class HfApi:
|
|
|
3482
3576
|
hf_raise_for_status(r)
|
|
3483
3577
|
return r.json()
|
|
3484
3578
|
|
|
3579
|
+
@validate_hf_hub_args
|
|
3580
|
+
def update_repo_settings(
|
|
3581
|
+
self,
|
|
3582
|
+
repo_id: str,
|
|
3583
|
+
*,
|
|
3584
|
+
gated: Literal["auto", "manual", False] = False,
|
|
3585
|
+
token: Union[str, bool, None] = None,
|
|
3586
|
+
repo_type: Optional[str] = None,
|
|
3587
|
+
) -> None:
|
|
3588
|
+
"""
|
|
3589
|
+
Update the gated settings of a repository.
|
|
3590
|
+
To give more control over how repos are used, the Hub allows repo authors to enable **access requests** for their repos.
|
|
3591
|
+
|
|
3592
|
+
Args:
|
|
3593
|
+
repo_id (`str`):
|
|
3594
|
+
A namespace (user or an organization) and a repo name separated by a /.
|
|
3595
|
+
gated (`Literal["auto", "manual", False]`, *optional*):
|
|
3596
|
+
The gated release status for the repository.
|
|
3597
|
+
* "auto": The repository is gated, and access requests are automatically approved or denied based on predefined criteria.
|
|
3598
|
+
* "manual": The repository is gated, and access requests require manual approval.
|
|
3599
|
+
* False (default): The repository is not gated, and anyone can access it.
|
|
3600
|
+
token (`Union[str, bool, None]`, *optional*):
|
|
3601
|
+
A valid user access token (string). Defaults to the locally saved token,
|
|
3602
|
+
which is the recommended method for authentication (see
|
|
3603
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
3604
|
+
To disable authentication, pass False.
|
|
3605
|
+
repo_type (`str`, *optional*):
|
|
3606
|
+
The type of the repository to update settings from (`"model"`, `"dataset"` or `"space"`.
|
|
3607
|
+
Defaults to `"model"`.
|
|
3608
|
+
|
|
3609
|
+
Raises:
|
|
3610
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
3611
|
+
If gated is not one of "auto", "manual", or False.
|
|
3612
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
3613
|
+
If repo_type is not one of the values in constants.REPO_TYPES.
|
|
3614
|
+
[`~utils.HfHubHTTPError`]:
|
|
3615
|
+
If the request to the Hugging Face Hub API fails.
|
|
3616
|
+
"""
|
|
3617
|
+
if gated not in ["auto", "manual", False]:
|
|
3618
|
+
raise ValueError(f"Invalid gated status, must be one of 'auto', 'manual', or False. Got '{gated}'.")
|
|
3619
|
+
|
|
3620
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3621
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
3622
|
+
if repo_type is None:
|
|
3623
|
+
repo_type = constants.REPO_TYPE_MODEL # default repo type
|
|
3624
|
+
|
|
3625
|
+
# Build headers
|
|
3626
|
+
headers = self._build_hf_headers(token=token)
|
|
3627
|
+
|
|
3628
|
+
r = get_session().put(
|
|
3629
|
+
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
|
|
3630
|
+
headers=headers,
|
|
3631
|
+
json={"gated": gated},
|
|
3632
|
+
)
|
|
3633
|
+
hf_raise_for_status(r)
|
|
3634
|
+
|
|
3485
3635
|
def move_repo(
|
|
3486
3636
|
self,
|
|
3487
3637
|
from_id: str,
|
|
@@ -3531,7 +3681,7 @@ class HfApi:
|
|
|
3531
3681
|
raise ValueError(f"Invalid repo_id: {to_id}. It should have a namespace (:namespace:/:repo_name:)")
|
|
3532
3682
|
|
|
3533
3683
|
if repo_type is None:
|
|
3534
|
-
repo_type = REPO_TYPE_MODEL # Hub won't accept `None`.
|
|
3684
|
+
repo_type = constants.REPO_TYPE_MODEL # Hub won't accept `None`.
|
|
3535
3685
|
|
|
3536
3686
|
json = {"fromRepo": from_id, "toRepo": to_id, "type": repo_type}
|
|
3537
3687
|
|
|
@@ -3701,19 +3851,19 @@ class HfApi:
|
|
|
3701
3851
|
If repository is not found (error 404): wrong repo_id/repo_type, private
|
|
3702
3852
|
but not authenticated or repo does not exist.
|
|
3703
3853
|
"""
|
|
3704
|
-
if parent_commit is not None and not REGEX_COMMIT_OID.fullmatch(parent_commit):
|
|
3854
|
+
if parent_commit is not None and not constants.REGEX_COMMIT_OID.fullmatch(parent_commit):
|
|
3705
3855
|
raise ValueError(
|
|
3706
|
-
f"`parent_commit` is not a valid commit OID. It must match the following regex: {REGEX_COMMIT_OID}"
|
|
3856
|
+
f"`parent_commit` is not a valid commit OID. It must match the following regex: {constants.REGEX_COMMIT_OID}"
|
|
3707
3857
|
)
|
|
3708
3858
|
|
|
3709
3859
|
if commit_message is None or len(commit_message) == 0:
|
|
3710
3860
|
raise ValueError("`commit_message` can't be empty, please pass a value.")
|
|
3711
3861
|
|
|
3712
3862
|
commit_description = commit_description if commit_description is not None else ""
|
|
3713
|
-
repo_type = repo_type if repo_type is not None else REPO_TYPE_MODEL
|
|
3714
|
-
if repo_type not in REPO_TYPES:
|
|
3715
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
3716
|
-
unquoted_revision = revision or DEFAULT_REVISION
|
|
3863
|
+
repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL
|
|
3864
|
+
if repo_type not in constants.REPO_TYPES:
|
|
3865
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
3866
|
+
unquoted_revision = revision or constants.DEFAULT_REVISION
|
|
3717
3867
|
revision = quote(unquoted_revision, safe="")
|
|
3718
3868
|
create_pr = create_pr if create_pr is not None else False
|
|
3719
3869
|
|
|
@@ -3733,6 +3883,15 @@ class HfApi:
|
|
|
3733
3883
|
" new CommitOperationAdd object if you want to create a new commit."
|
|
3734
3884
|
)
|
|
3735
3885
|
|
|
3886
|
+
if repo_type != "dataset":
|
|
3887
|
+
for addition in additions:
|
|
3888
|
+
if addition.path_in_repo.endswith((".arrow", ".parquet")):
|
|
3889
|
+
warnings.warn(
|
|
3890
|
+
f"It seems that you are about to commit a data file ({addition.path_in_repo}) to a {repo_type}"
|
|
3891
|
+
" repository. You are sure this is intended? If you are trying to upload a dataset, please"
|
|
3892
|
+
" set `repo_type='dataset'` or `--repo-type=dataset` in a CLI."
|
|
3893
|
+
)
|
|
3894
|
+
|
|
3736
3895
|
logger.debug(
|
|
3737
3896
|
f"About to commit to the hub: {len(additions)} addition(s), {len(copies)} copie(s) and"
|
|
3738
3897
|
f" {nb_deletions} deletion(s)."
|
|
@@ -3743,26 +3902,10 @@ class HfApi:
|
|
|
3743
3902
|
for addition in additions:
|
|
3744
3903
|
if addition.path_in_repo == "README.md":
|
|
3745
3904
|
with addition.as_file() as file:
|
|
3746
|
-
|
|
3747
|
-
|
|
3748
|
-
|
|
3749
|
-
|
|
3750
|
-
)
|
|
3751
|
-
# Handle warnings (example: empty metadata)
|
|
3752
|
-
response_content = response.json()
|
|
3753
|
-
message = "\n".join(
|
|
3754
|
-
[f"- {warning.get('message')}" for warning in response_content.get("warnings", [])]
|
|
3755
|
-
)
|
|
3756
|
-
if message:
|
|
3757
|
-
warnings.warn(f"Warnings while validating metadata in README.md:\n{message}")
|
|
3758
|
-
|
|
3759
|
-
# Raise on errors
|
|
3760
|
-
try:
|
|
3761
|
-
hf_raise_for_status(response)
|
|
3762
|
-
except BadRequestError as e:
|
|
3763
|
-
errors = response_content.get("errors", [])
|
|
3764
|
-
message = "\n".join([f"- {error.get('message')}" for error in errors])
|
|
3765
|
-
raise ValueError(f"Invalid metadata in README.md.\n{message}") from e
|
|
3905
|
+
content = file.read().decode()
|
|
3906
|
+
self._validate_yaml(content, repo_type=repo_type, token=token)
|
|
3907
|
+
# Skip other additions after `README.md` has been processed
|
|
3908
|
+
break
|
|
3766
3909
|
|
|
3767
3910
|
# If updating twice the same file or update then delete a file in a single commit
|
|
3768
3911
|
_warn_on_overwriting_operations(operations)
|
|
@@ -3808,7 +3951,7 @@ class HfApi:
|
|
|
3808
3951
|
|
|
3809
3952
|
# Return commit info based on latest commit
|
|
3810
3953
|
url_prefix = self.endpoint
|
|
3811
|
-
if repo_type is not None and repo_type != REPO_TYPE_MODEL:
|
|
3954
|
+
if repo_type is not None and repo_type != constants.REPO_TYPE_MODEL:
|
|
3812
3955
|
url_prefix = f"{url_prefix}/{repo_type}s"
|
|
3813
3956
|
return CommitInfo(
|
|
3814
3957
|
commit_url=f"{url_prefix}/{repo_id}/commit/{info.sha}",
|
|
@@ -3876,6 +4019,9 @@ class HfApi:
|
|
|
3876
4019
|
|
|
3877
4020
|
@experimental
|
|
3878
4021
|
@validate_hf_hub_args
|
|
4022
|
+
@_deprecate_method(
|
|
4023
|
+
version="0.27", message="This is an experimental feature. Please use `upload_large_folder` instead."
|
|
4024
|
+
)
|
|
3879
4025
|
def create_commits_on_pr(
|
|
3880
4026
|
self,
|
|
3881
4027
|
*,
|
|
@@ -4043,7 +4189,7 @@ class HfApi:
|
|
|
4043
4189
|
commits_on_main_branch = {
|
|
4044
4190
|
commit.commit_id
|
|
4045
4191
|
for commit in self.list_repo_commits(
|
|
4046
|
-
repo_id=repo_id, repo_type=repo_type, token=token, revision=DEFAULT_REVISION
|
|
4192
|
+
repo_id=repo_id, repo_type=repo_type, token=token, revision=constants.DEFAULT_REVISION
|
|
4047
4193
|
)
|
|
4048
4194
|
}
|
|
4049
4195
|
pr_commits = [
|
|
@@ -4262,10 +4408,10 @@ class HfApi:
|
|
|
4262
4408
|
>>> create_commit(repo_id, operations=operations, commit_message="Commit all shards")
|
|
4263
4409
|
```
|
|
4264
4410
|
"""
|
|
4265
|
-
repo_type = repo_type if repo_type is not None else REPO_TYPE_MODEL
|
|
4266
|
-
if repo_type not in REPO_TYPES:
|
|
4267
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
4268
|
-
revision = quote(revision, safe="") if revision is not None else DEFAULT_REVISION
|
|
4411
|
+
repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL
|
|
4412
|
+
if repo_type not in constants.REPO_TYPES:
|
|
4413
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
4414
|
+
revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION
|
|
4269
4415
|
create_pr = create_pr if create_pr is not None else False
|
|
4270
4416
|
headers = self._build_hf_headers(token=token)
|
|
4271
4417
|
|
|
@@ -4493,8 +4639,8 @@ class HfApi:
|
|
|
4493
4639
|
"https://huggingface.co/username/my-model/blob/refs%2Fpr%2F1/remote/file/path.h5"
|
|
4494
4640
|
```
|
|
4495
4641
|
"""
|
|
4496
|
-
if repo_type not in REPO_TYPES:
|
|
4497
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
4642
|
+
if repo_type not in constants.REPO_TYPES:
|
|
4643
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
4498
4644
|
|
|
4499
4645
|
commit_message = (
|
|
4500
4646
|
commit_message if commit_message is not None else f"Upload {path_in_repo} with huggingface_hub"
|
|
@@ -4518,9 +4664,9 @@ class HfApi:
|
|
|
4518
4664
|
|
|
4519
4665
|
if commit_info.pr_url is not None:
|
|
4520
4666
|
revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
|
|
4521
|
-
if repo_type in REPO_TYPES_URL_PREFIXES:
|
|
4522
|
-
repo_id = REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
4523
|
-
revision = revision if revision is not None else DEFAULT_REVISION
|
|
4667
|
+
if repo_type in constants.REPO_TYPES_URL_PREFIXES:
|
|
4668
|
+
repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
4669
|
+
revision = revision if revision is not None else constants.DEFAULT_REVISION
|
|
4524
4670
|
|
|
4525
4671
|
return CommitInfo(
|
|
4526
4672
|
commit_url=commit_info.commit_url,
|
|
@@ -4714,8 +4860,10 @@ class HfApi:
|
|
|
4714
4860
|
new files. This is useful if you don't know which files have already been uploaded.
|
|
4715
4861
|
Note: to avoid discrepancies the `.gitattributes` file is not deleted even if it matches the pattern.
|
|
4716
4862
|
multi_commits (`bool`):
|
|
4863
|
+
Deprecated. For large uploads, use `upload_large_folder` instead.
|
|
4717
4864
|
If True, changes are pushed to a PR using a multi-commit process. Defaults to `False`.
|
|
4718
4865
|
multi_commits_verbose (`bool`):
|
|
4866
|
+
Deprecated. For large uploads, use `upload_large_folder` instead.
|
|
4719
4867
|
If True and `multi_commits` is used, more information will be displayed to the user.
|
|
4720
4868
|
run_as_future (`bool`, *optional*):
|
|
4721
4869
|
Whether or not to run this method in the background. Background jobs are run sequentially without
|
|
@@ -4795,11 +4943,11 @@ class HfApi:
|
|
|
4795
4943
|
|
|
4796
4944
|
```
|
|
4797
4945
|
"""
|
|
4798
|
-
if repo_type not in REPO_TYPES:
|
|
4799
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
4946
|
+
if repo_type not in constants.REPO_TYPES:
|
|
4947
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
4800
4948
|
|
|
4801
4949
|
if multi_commits:
|
|
4802
|
-
if revision is not None and revision != DEFAULT_REVISION:
|
|
4950
|
+
if revision is not None and revision != constants.DEFAULT_REVISION:
|
|
4803
4951
|
raise ValueError("Cannot use `multi_commit` to commit changes other than the main branch.")
|
|
4804
4952
|
|
|
4805
4953
|
# By default, upload folder to the root directory in repo.
|
|
@@ -4816,16 +4964,18 @@ class HfApi:
|
|
|
4816
4964
|
delete_operations = self._prepare_folder_deletions(
|
|
4817
4965
|
repo_id=repo_id,
|
|
4818
4966
|
repo_type=repo_type,
|
|
4819
|
-
revision=DEFAULT_REVISION if create_pr else revision,
|
|
4967
|
+
revision=constants.DEFAULT_REVISION if create_pr else revision,
|
|
4820
4968
|
token=token,
|
|
4821
4969
|
path_in_repo=path_in_repo,
|
|
4822
4970
|
delete_patterns=delete_patterns,
|
|
4823
4971
|
)
|
|
4824
|
-
add_operations = _prepare_upload_folder_additions(
|
|
4972
|
+
add_operations = self._prepare_upload_folder_additions(
|
|
4825
4973
|
folder_path,
|
|
4826
4974
|
path_in_repo,
|
|
4827
4975
|
allow_patterns=allow_patterns,
|
|
4828
4976
|
ignore_patterns=ignore_patterns,
|
|
4977
|
+
token=token,
|
|
4978
|
+
repo_type=repo_type,
|
|
4829
4979
|
)
|
|
4830
4980
|
|
|
4831
4981
|
# Optimize operations: if some files will be overwritten, we don't need to delete them first
|
|
@@ -4869,9 +5019,9 @@ class HfApi:
|
|
|
4869
5019
|
# Create url to uploaded folder (for legacy return value)
|
|
4870
5020
|
if create_pr and commit_info.pr_url is not None:
|
|
4871
5021
|
revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
|
|
4872
|
-
if repo_type in REPO_TYPES_URL_PREFIXES:
|
|
4873
|
-
repo_id = REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
4874
|
-
revision = revision if revision is not None else DEFAULT_REVISION
|
|
5022
|
+
if repo_type in constants.REPO_TYPES_URL_PREFIXES:
|
|
5023
|
+
repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
5024
|
+
revision = revision if revision is not None else constants.DEFAULT_REVISION
|
|
4875
5025
|
|
|
4876
5026
|
return CommitInfo(
|
|
4877
5027
|
commit_url=commit_info.commit_url,
|
|
@@ -5118,6 +5268,124 @@ class HfApi:
|
|
|
5118
5268
|
parent_commit=parent_commit,
|
|
5119
5269
|
)
|
|
5120
5270
|
|
|
5271
|
+
def upload_large_folder(
|
|
5272
|
+
self,
|
|
5273
|
+
repo_id: str,
|
|
5274
|
+
folder_path: Union[str, Path],
|
|
5275
|
+
*,
|
|
5276
|
+
repo_type: str, # Repo type is required!
|
|
5277
|
+
revision: Optional[str] = None,
|
|
5278
|
+
private: bool = False,
|
|
5279
|
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
|
5280
|
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
|
5281
|
+
num_workers: Optional[int] = None,
|
|
5282
|
+
print_report: bool = True,
|
|
5283
|
+
print_report_every: int = 60,
|
|
5284
|
+
) -> None:
|
|
5285
|
+
"""Upload a large folder to the Hub in the most resilient way possible.
|
|
5286
|
+
|
|
5287
|
+
Several workers are started to upload files in an optimized way. Before being committed to a repo, files must be
|
|
5288
|
+
hashed and be pre-uploaded if they are LFS files. Workers will perform these tasks for each file in the folder.
|
|
5289
|
+
At each step, some metadata information about the upload process is saved in the folder under `.cache/.huggingface/`
|
|
5290
|
+
to be able to resume the process if interrupted. The whole process might result in several commits.
|
|
5291
|
+
|
|
5292
|
+
Args:
|
|
5293
|
+
repo_id (`str`):
|
|
5294
|
+
The repository to which the file will be uploaded.
|
|
5295
|
+
E.g. `"HuggingFaceTB/smollm-corpus"`.
|
|
5296
|
+
folder_path (`str` or `Path`):
|
|
5297
|
+
Path to the folder to upload on the local file system.
|
|
5298
|
+
repo_type (`str`):
|
|
5299
|
+
Type of the repository. Must be one of `"model"`, `"dataset"` or `"space"`.
|
|
5300
|
+
Unlike in all other `HfApi` methods, `repo_type` is explicitly required here. This is to avoid
|
|
5301
|
+
any mistake when uploading a large folder to the Hub, and therefore prevent from having to re-upload
|
|
5302
|
+
everything.
|
|
5303
|
+
revision (`str`, `optional`):
|
|
5304
|
+
The branch to commit to. If not provided, the `main` branch will be used.
|
|
5305
|
+
private (`bool`, `optional`):
|
|
5306
|
+
Whether the repository should be private. Defaults to False.
|
|
5307
|
+
allow_patterns (`List[str]` or `str`, *optional*):
|
|
5308
|
+
If provided, only files matching at least one pattern are uploaded.
|
|
5309
|
+
ignore_patterns (`List[str]` or `str`, *optional*):
|
|
5310
|
+
If provided, files matching any of the patterns are not uploaded.
|
|
5311
|
+
num_workers (`int`, *optional*):
|
|
5312
|
+
Number of workers to start. Defaults to `os.cpu_count() - 2` (minimum 2).
|
|
5313
|
+
A higher number of workers may speed up the process if your machine allows it. However, on machines with a
|
|
5314
|
+
slower connection, it is recommended to keep the number of workers low to ensure better resumability.
|
|
5315
|
+
Indeed, partially uploaded files will have to be completely re-uploaded if the process is interrupted.
|
|
5316
|
+
print_report (`bool`, *optional*):
|
|
5317
|
+
Whether to print a report of the upload progress. Defaults to True.
|
|
5318
|
+
Report is printed to `sys.stdout` every X seconds (60 by defaults) and overwrites the previous report.
|
|
5319
|
+
print_report_every (`int`, *optional*):
|
|
5320
|
+
Frequency at which the report is printed. Defaults to 60 seconds.
|
|
5321
|
+
|
|
5322
|
+
<Tip>
|
|
5323
|
+
|
|
5324
|
+
A few things to keep in mind:
|
|
5325
|
+
- Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations
|
|
5326
|
+
- Do not start several processes in parallel.
|
|
5327
|
+
- You can interrupt and resume the process at any time.
|
|
5328
|
+
- Do not upload the same folder to several repositories. If you need to do so, you must delete the local `.cache/.huggingface/` folder first.
|
|
5329
|
+
|
|
5330
|
+
</Tip>
|
|
5331
|
+
|
|
5332
|
+
<Tip warning={true}>
|
|
5333
|
+
|
|
5334
|
+
While being much more robust to upload large folders, `upload_large_folder` is more limited than [`upload_folder`] feature-wise. In practice:
|
|
5335
|
+
- you cannot set a custom `path_in_repo`. If you want to upload to a subfolder, you need to set the proper structure locally.
|
|
5336
|
+
- you cannot set a custom `commit_message` and `commit_description` since multiple commits are created.
|
|
5337
|
+
- you cannot delete from the repo while uploading. Please make a separate commit first.
|
|
5338
|
+
- you cannot create a PR directly. Please create a PR first (from the UI or using [`create_pull_request`]) and then commit to it by passing `revision`.
|
|
5339
|
+
|
|
5340
|
+
</Tip>
|
|
5341
|
+
|
|
5342
|
+
**Technical details:**
|
|
5343
|
+
|
|
5344
|
+
`upload_large_folder` process is as follow:
|
|
5345
|
+
1. (Check parameters and setup.)
|
|
5346
|
+
2. Create repo if missing.
|
|
5347
|
+
3. List local files to upload.
|
|
5348
|
+
4. Start workers. Workers can perform the following tasks:
|
|
5349
|
+
- Hash a file.
|
|
5350
|
+
- Get upload mode (regular or LFS) for a list of files.
|
|
5351
|
+
- Pre-upload an LFS file.
|
|
5352
|
+
- Commit a bunch of files.
|
|
5353
|
+
Once a worker finishes a task, it will move on to the next task based on the priority list (see below) until
|
|
5354
|
+
all files are uploaded and committed.
|
|
5355
|
+
5. While workers are up, regularly print a report to sys.stdout.
|
|
5356
|
+
|
|
5357
|
+
Order of priority:
|
|
5358
|
+
1. Commit if more than 5 minutes since last commit attempt (and at least 1 file).
|
|
5359
|
+
2. Commit if at least 150 files are ready to commit.
|
|
5360
|
+
3. Get upload mode if at least 10 files have been hashed.
|
|
5361
|
+
4. Pre-upload LFS file if at least 1 file and no worker is pre-uploading.
|
|
5362
|
+
5. Hash file if at least 1 file and no worker is hashing.
|
|
5363
|
+
6. Get upload mode if at least 1 file and no worker is getting upload mode.
|
|
5364
|
+
7. Pre-upload LFS file if at least 1 file (exception: if hf_transfer is enabled, only 1 worker can preupload LFS at a time).
|
|
5365
|
+
8. Hash file if at least 1 file to hash.
|
|
5366
|
+
9. Get upload mode if at least 1 file to get upload mode.
|
|
5367
|
+
10. Commit if at least 1 file to commit and at least 1 min since last commit attempt.
|
|
5368
|
+
11. Commit if at least 1 file to commit and all other queues are empty.
|
|
5369
|
+
|
|
5370
|
+
Special rules:
|
|
5371
|
+
- If `hf_transfer` is enabled, only 1 LFS uploader at a time. Otherwise the CPU would be bloated by `hf_transfer`.
|
|
5372
|
+
- Only one worker can commit at a time.
|
|
5373
|
+
- If no tasks are available, the worker waits for 10 seconds before checking again.
|
|
5374
|
+
"""
|
|
5375
|
+
return upload_large_folder_internal(
|
|
5376
|
+
self,
|
|
5377
|
+
repo_id=repo_id,
|
|
5378
|
+
folder_path=folder_path,
|
|
5379
|
+
repo_type=repo_type,
|
|
5380
|
+
revision=revision,
|
|
5381
|
+
private=private,
|
|
5382
|
+
allow_patterns=allow_patterns,
|
|
5383
|
+
ignore_patterns=ignore_patterns,
|
|
5384
|
+
num_workers=num_workers,
|
|
5385
|
+
print_report=print_report,
|
|
5386
|
+
print_report_every=print_report_every,
|
|
5387
|
+
)
|
|
5388
|
+
|
|
5121
5389
|
@validate_hf_hub_args
|
|
5122
5390
|
def get_hf_file_metadata(
|
|
5123
5391
|
self,
|
|
@@ -5125,7 +5393,7 @@ class HfApi:
|
|
|
5125
5393
|
url: str,
|
|
5126
5394
|
token: Union[bool, str, None] = None,
|
|
5127
5395
|
proxies: Optional[Dict] = None,
|
|
5128
|
-
timeout: Optional[float] = DEFAULT_REQUEST_TIMEOUT,
|
|
5396
|
+
timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT,
|
|
5129
5397
|
) -> HfFileMetadata:
|
|
5130
5398
|
"""Fetch metadata of a file versioned on the Hub for a given url.
|
|
5131
5399
|
|
|
@@ -5172,7 +5440,7 @@ class HfApi:
|
|
|
5172
5440
|
local_dir: Union[str, Path, None] = None,
|
|
5173
5441
|
force_download: bool = False,
|
|
5174
5442
|
proxies: Optional[Dict] = None,
|
|
5175
|
-
etag_timeout: float = DEFAULT_ETAG_TIMEOUT,
|
|
5443
|
+
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
|
5176
5444
|
token: Union[bool, str, None] = None,
|
|
5177
5445
|
local_files_only: bool = False,
|
|
5178
5446
|
# Deprecated args
|
|
@@ -5311,7 +5579,7 @@ class HfApi:
|
|
|
5311
5579
|
cache_dir: Union[str, Path, None] = None,
|
|
5312
5580
|
local_dir: Union[str, Path, None] = None,
|
|
5313
5581
|
proxies: Optional[Dict] = None,
|
|
5314
|
-
etag_timeout: float = DEFAULT_ETAG_TIMEOUT,
|
|
5582
|
+
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
|
5315
5583
|
force_download: bool = False,
|
|
5316
5584
|
token: Union[bool, str, None] = None,
|
|
5317
5585
|
local_files_only: bool = False,
|
|
@@ -5500,23 +5768,41 @@ class HfApi:
|
|
|
5500
5768
|
```
|
|
5501
5769
|
"""
|
|
5502
5770
|
if self.file_exists( # Single safetensors file => non-sharded model
|
|
5503
|
-
repo_id=repo_id,
|
|
5771
|
+
repo_id=repo_id,
|
|
5772
|
+
filename=constants.SAFETENSORS_SINGLE_FILE,
|
|
5773
|
+
repo_type=repo_type,
|
|
5774
|
+
revision=revision,
|
|
5775
|
+
token=token,
|
|
5504
5776
|
):
|
|
5505
5777
|
file_metadata = self.parse_safetensors_file_metadata(
|
|
5506
|
-
repo_id=repo_id,
|
|
5778
|
+
repo_id=repo_id,
|
|
5779
|
+
filename=constants.SAFETENSORS_SINGLE_FILE,
|
|
5780
|
+
repo_type=repo_type,
|
|
5781
|
+
revision=revision,
|
|
5782
|
+
token=token,
|
|
5507
5783
|
)
|
|
5508
5784
|
return SafetensorsRepoMetadata(
|
|
5509
5785
|
metadata=None,
|
|
5510
5786
|
sharded=False,
|
|
5511
|
-
weight_map={
|
|
5512
|
-
|
|
5787
|
+
weight_map={
|
|
5788
|
+
tensor_name: constants.SAFETENSORS_SINGLE_FILE for tensor_name in file_metadata.tensors.keys()
|
|
5789
|
+
},
|
|
5790
|
+
files_metadata={constants.SAFETENSORS_SINGLE_FILE: file_metadata},
|
|
5513
5791
|
)
|
|
5514
5792
|
elif self.file_exists( # Multiple safetensors files => sharded with index
|
|
5515
|
-
repo_id=repo_id,
|
|
5793
|
+
repo_id=repo_id,
|
|
5794
|
+
filename=constants.SAFETENSORS_INDEX_FILE,
|
|
5795
|
+
repo_type=repo_type,
|
|
5796
|
+
revision=revision,
|
|
5797
|
+
token=token,
|
|
5516
5798
|
):
|
|
5517
5799
|
# Fetch index
|
|
5518
5800
|
index_file = self.hf_hub_download(
|
|
5519
|
-
repo_id=repo_id,
|
|
5801
|
+
repo_id=repo_id,
|
|
5802
|
+
filename=constants.SAFETENSORS_INDEX_FILE,
|
|
5803
|
+
repo_type=repo_type,
|
|
5804
|
+
revision=revision,
|
|
5805
|
+
token=token,
|
|
5520
5806
|
)
|
|
5521
5807
|
with open(index_file) as f:
|
|
5522
5808
|
index = json.load(f)
|
|
@@ -5547,7 +5833,7 @@ class HfApi:
|
|
|
5547
5833
|
else:
|
|
5548
5834
|
# Not a safetensors repo
|
|
5549
5835
|
raise NotASafetensorsRepoError(
|
|
5550
|
-
f"'{repo_id}' is not a safetensors repo. Couldn't find '{SAFETENSORS_INDEX_FILE}' or '{SAFETENSORS_SINGLE_FILE}' files."
|
|
5836
|
+
f"'{repo_id}' is not a safetensors repo. Couldn't find '{constants.SAFETENSORS_INDEX_FILE}' or '{constants.SAFETENSORS_SINGLE_FILE}' files."
|
|
5551
5837
|
)
|
|
5552
5838
|
|
|
5553
5839
|
def parse_safetensors_file_metadata(
|
|
@@ -5608,11 +5894,11 @@ class HfApi:
|
|
|
5608
5894
|
|
|
5609
5895
|
# 2. Parse metadata size
|
|
5610
5896
|
metadata_size = struct.unpack("<Q", response.content[:8])[0]
|
|
5611
|
-
if metadata_size > SAFETENSORS_MAX_HEADER_LENGTH:
|
|
5897
|
+
if metadata_size > constants.SAFETENSORS_MAX_HEADER_LENGTH:
|
|
5612
5898
|
raise SafetensorsParsingError(
|
|
5613
5899
|
f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision "
|
|
5614
|
-
f"'{revision or DEFAULT_REVISION}'): safetensors header is too big. Maximum supported size is "
|
|
5615
|
-
f"{SAFETENSORS_MAX_HEADER_LENGTH} bytes (got {metadata_size})."
|
|
5900
|
+
f"'{revision or constants.DEFAULT_REVISION}'): safetensors header is too big. Maximum supported size is "
|
|
5901
|
+
f"{constants.SAFETENSORS_MAX_HEADER_LENGTH} bytes (got {metadata_size})."
|
|
5616
5902
|
)
|
|
5617
5903
|
|
|
5618
5904
|
# 3.a. Get metadata from payload
|
|
@@ -5629,7 +5915,7 @@ class HfApi:
|
|
|
5629
5915
|
except json.JSONDecodeError as e:
|
|
5630
5916
|
raise SafetensorsParsingError(
|
|
5631
5917
|
f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision "
|
|
5632
|
-
f"'{revision or DEFAULT_REVISION}'): header is not json-encoded string. Please make sure this is a "
|
|
5918
|
+
f"'{revision or constants.DEFAULT_REVISION}'): header is not json-encoded string. Please make sure this is a "
|
|
5633
5919
|
"correctly formatted safetensors file."
|
|
5634
5920
|
) from e
|
|
5635
5921
|
|
|
@@ -5649,7 +5935,7 @@ class HfApi:
|
|
|
5649
5935
|
except (KeyError, IndexError) as e:
|
|
5650
5936
|
raise SafetensorsParsingError(
|
|
5651
5937
|
f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision "
|
|
5652
|
-
f"'{revision or DEFAULT_REVISION}'): header format not recognized. Please make sure this is a correctly"
|
|
5938
|
+
f"'{revision or constants.DEFAULT_REVISION}'): header format not recognized. Please make sure this is a correctly"
|
|
5653
5939
|
" formatted safetensors file."
|
|
5654
5940
|
) from e
|
|
5655
5941
|
|
|
@@ -5705,7 +5991,7 @@ class HfApi:
|
|
|
5705
5991
|
set to `False`.
|
|
5706
5992
|
"""
|
|
5707
5993
|
if repo_type is None:
|
|
5708
|
-
repo_type = REPO_TYPE_MODEL
|
|
5994
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
5709
5995
|
branch = quote(branch, safe="")
|
|
5710
5996
|
|
|
5711
5997
|
# Prepare request
|
|
@@ -5720,8 +6006,18 @@ class HfApi:
|
|
|
5720
6006
|
try:
|
|
5721
6007
|
hf_raise_for_status(response)
|
|
5722
6008
|
except HfHubHTTPError as e:
|
|
5723
|
-
if
|
|
5724
|
-
|
|
6009
|
+
if exist_ok and e.response.status_code == 409:
|
|
6010
|
+
return
|
|
6011
|
+
elif exist_ok and e.response.status_code == 403:
|
|
6012
|
+
# No write permission on the namespace but branch might already exist
|
|
6013
|
+
try:
|
|
6014
|
+
refs = self.list_repo_refs(repo_id=repo_id, repo_type=repo_type, token=token)
|
|
6015
|
+
for branch_ref in refs.branches:
|
|
6016
|
+
if branch_ref.name == branch:
|
|
6017
|
+
return # Branch already exists => do not raise
|
|
6018
|
+
except HfHubHTTPError:
|
|
6019
|
+
pass # We raise the original error if the branch does not exist
|
|
6020
|
+
raise
|
|
5725
6021
|
|
|
5726
6022
|
@validate_hf_hub_args
|
|
5727
6023
|
def delete_branch(
|
|
@@ -5764,7 +6060,7 @@ class HfApi:
|
|
|
5764
6060
|
|
|
5765
6061
|
"""
|
|
5766
6062
|
if repo_type is None:
|
|
5767
|
-
repo_type = REPO_TYPE_MODEL
|
|
6063
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
5768
6064
|
branch = quote(branch, safe="")
|
|
5769
6065
|
|
|
5770
6066
|
# Prepare request
|
|
@@ -5831,8 +6127,8 @@ class HfApi:
|
|
|
5831
6127
|
set to `False`.
|
|
5832
6128
|
"""
|
|
5833
6129
|
if repo_type is None:
|
|
5834
|
-
repo_type = REPO_TYPE_MODEL
|
|
5835
|
-
revision = quote(revision, safe="") if revision is not None else DEFAULT_REVISION
|
|
6130
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
6131
|
+
revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION
|
|
5836
6132
|
|
|
5837
6133
|
# Prepare request
|
|
5838
6134
|
tag_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tag/{revision}"
|
|
@@ -5887,7 +6183,7 @@ class HfApi:
|
|
|
5887
6183
|
If tag is not found.
|
|
5888
6184
|
"""
|
|
5889
6185
|
if repo_type is None:
|
|
5890
|
-
repo_type = REPO_TYPE_MODEL
|
|
6186
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
5891
6187
|
tag = quote(tag, safe="")
|
|
5892
6188
|
|
|
5893
6189
|
# Prepare request
|
|
@@ -5942,8 +6238,8 @@ class HfApi:
|
|
|
5942
6238
|
repo_id: str,
|
|
5943
6239
|
*,
|
|
5944
6240
|
author: Optional[str] = None,
|
|
5945
|
-
discussion_type: Optional[DiscussionTypeFilter] = None,
|
|
5946
|
-
discussion_status: Optional[DiscussionStatusFilter] = None,
|
|
6241
|
+
discussion_type: Optional[constants.DiscussionTypeFilter] = None,
|
|
6242
|
+
discussion_status: Optional[constants.DiscussionStatusFilter] = None,
|
|
5947
6243
|
repo_type: Optional[str] = None,
|
|
5948
6244
|
token: Union[bool, str, None] = None,
|
|
5949
6245
|
) -> Iterator[Discussion]:
|
|
@@ -5995,14 +6291,14 @@ class HfApi:
|
|
|
5995
6291
|
... print(discussion.num, discussion.title)
|
|
5996
6292
|
```
|
|
5997
6293
|
"""
|
|
5998
|
-
if repo_type not in REPO_TYPES:
|
|
5999
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
6294
|
+
if repo_type not in constants.REPO_TYPES:
|
|
6295
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
6000
6296
|
if repo_type is None:
|
|
6001
|
-
repo_type = REPO_TYPE_MODEL
|
|
6002
|
-
if discussion_type is not None and discussion_type not in DISCUSSION_TYPES:
|
|
6003
|
-
raise ValueError(f"Invalid discussion_type, must be one of {DISCUSSION_TYPES}")
|
|
6004
|
-
if discussion_status is not None and discussion_status not in DISCUSSION_STATUS:
|
|
6005
|
-
raise ValueError(f"Invalid discussion_status, must be one of {DISCUSSION_STATUS}")
|
|
6297
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
6298
|
+
if discussion_type is not None and discussion_type not in constants.DISCUSSION_TYPES:
|
|
6299
|
+
raise ValueError(f"Invalid discussion_type, must be one of {constants.DISCUSSION_TYPES}")
|
|
6300
|
+
if discussion_status is not None and discussion_status not in constants.DISCUSSION_STATUS:
|
|
6301
|
+
raise ValueError(f"Invalid discussion_status, must be one of {constants.DISCUSSION_STATUS}")
|
|
6006
6302
|
|
|
6007
6303
|
headers = self._build_hf_headers(token=token)
|
|
6008
6304
|
path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions"
|
|
@@ -6089,10 +6385,10 @@ class HfApi:
|
|
|
6089
6385
|
"""
|
|
6090
6386
|
if not isinstance(discussion_num, int) or discussion_num <= 0:
|
|
6091
6387
|
raise ValueError("Invalid discussion_num, must be a positive integer")
|
|
6092
|
-
if repo_type not in REPO_TYPES:
|
|
6093
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
6388
|
+
if repo_type not in constants.REPO_TYPES:
|
|
6389
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
6094
6390
|
if repo_type is None:
|
|
6095
|
-
repo_type = REPO_TYPE_MODEL
|
|
6391
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
6096
6392
|
|
|
6097
6393
|
path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions/{discussion_num}"
|
|
6098
6394
|
headers = self._build_hf_headers(token=token)
|
|
@@ -6179,10 +6475,10 @@ class HfApi:
|
|
|
6179
6475
|
or because it is set to `private` and you do not have access.
|
|
6180
6476
|
|
|
6181
6477
|
</Tip>"""
|
|
6182
|
-
if repo_type not in REPO_TYPES:
|
|
6183
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
6478
|
+
if repo_type not in constants.REPO_TYPES:
|
|
6479
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
6184
6480
|
if repo_type is None:
|
|
6185
|
-
repo_type = REPO_TYPE_MODEL
|
|
6481
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
6186
6482
|
|
|
6187
6483
|
if description is not None:
|
|
6188
6484
|
description = description.strip()
|
|
@@ -6289,10 +6585,10 @@ class HfApi:
|
|
|
6289
6585
|
"""Internal utility to POST changes to a Discussion or Pull Request"""
|
|
6290
6586
|
if not isinstance(discussion_num, int) or discussion_num <= 0:
|
|
6291
6587
|
raise ValueError("Invalid discussion_num, must be a positive integer")
|
|
6292
|
-
if repo_type not in REPO_TYPES:
|
|
6293
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
6588
|
+
if repo_type not in constants.REPO_TYPES:
|
|
6589
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
6294
6590
|
if repo_type is None:
|
|
6295
|
-
repo_type = REPO_TYPE_MODEL
|
|
6591
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
6296
6592
|
repo_id = f"{repo_type}s/{repo_id}"
|
|
6297
6593
|
|
|
6298
6594
|
path = f"{self.endpoint}/api/{repo_id}/discussions/{discussion_num}/{resource}"
|
|
@@ -7316,7 +7612,7 @@ class HfApi:
|
|
|
7316
7612
|
namespace = namespace or self._get_namespace(token=token)
|
|
7317
7613
|
|
|
7318
7614
|
response = get_session().get(
|
|
7319
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}",
|
|
7615
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}",
|
|
7320
7616
|
headers=self._build_hf_headers(token=token),
|
|
7321
7617
|
)
|
|
7322
7618
|
hf_raise_for_status(response)
|
|
@@ -7340,9 +7636,11 @@ class HfApi:
|
|
|
7340
7636
|
account_id: Optional[str] = None,
|
|
7341
7637
|
min_replica: int = 0,
|
|
7342
7638
|
max_replica: int = 1,
|
|
7639
|
+
scale_to_zero_timeout: int = 15,
|
|
7343
7640
|
revision: Optional[str] = None,
|
|
7344
7641
|
task: Optional[str] = None,
|
|
7345
7642
|
custom_image: Optional[Dict] = None,
|
|
7643
|
+
secrets: Optional[Dict[str, str]] = None,
|
|
7346
7644
|
type: InferenceEndpointType = InferenceEndpointType.PROTECTED,
|
|
7347
7645
|
namespace: Optional[str] = None,
|
|
7348
7646
|
token: Union[bool, str, None] = None,
|
|
@@ -7372,6 +7670,8 @@ class HfApi:
|
|
|
7372
7670
|
The minimum number of replicas (instances) to keep running for the Inference Endpoint. Defaults to 0.
|
|
7373
7671
|
max_replica (`int`, *optional*):
|
|
7374
7672
|
The maximum number of replicas (instances) to scale to for the Inference Endpoint. Defaults to 1.
|
|
7673
|
+
scale_to_zero_timeout (`int`, *optional*):
|
|
7674
|
+
The duration in minutes before an inactive endpoint is scaled to zero. Defaults to 15.
|
|
7375
7675
|
revision (`str`, *optional*):
|
|
7376
7676
|
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
|
7377
7677
|
task (`str`, *optional*):
|
|
@@ -7379,6 +7679,8 @@ class HfApi:
|
|
|
7379
7679
|
custom_image (`Dict`, *optional*):
|
|
7380
7680
|
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
7381
7681
|
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
7682
|
+
secrets (`Dict[str, str]`, *optional*):
|
|
7683
|
+
Secret values to inject in the container environment.
|
|
7382
7684
|
type ([`InferenceEndpointType]`, *optional*):
|
|
7383
7685
|
The type of the Inference Endpoint, which can be `"protected"` (default), `"public"` or `"private"`.
|
|
7384
7686
|
namespace (`str`, *optional*):
|
|
@@ -7441,6 +7743,7 @@ class HfApi:
|
|
|
7441
7743
|
... },
|
|
7442
7744
|
... "url": "ghcr.io/huggingface/text-generation-inference:1.1.0",
|
|
7443
7745
|
... },
|
|
7746
|
+
... secrets={"MY_SECRET_KEY": "secret_value"},
|
|
7444
7747
|
... )
|
|
7445
7748
|
|
|
7446
7749
|
```
|
|
@@ -7457,6 +7760,7 @@ class HfApi:
|
|
|
7457
7760
|
"scaling": {
|
|
7458
7761
|
"maxReplica": max_replica,
|
|
7459
7762
|
"minReplica": min_replica,
|
|
7763
|
+
"scaleToZeroTimeout": scale_to_zero_timeout,
|
|
7460
7764
|
},
|
|
7461
7765
|
},
|
|
7462
7766
|
"model": {
|
|
@@ -7473,9 +7777,10 @@ class HfApi:
|
|
|
7473
7777
|
},
|
|
7474
7778
|
"type": type,
|
|
7475
7779
|
}
|
|
7476
|
-
|
|
7780
|
+
if secrets:
|
|
7781
|
+
payload["model"]["secrets"] = secrets
|
|
7477
7782
|
response = get_session().post(
|
|
7478
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}",
|
|
7783
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}",
|
|
7479
7784
|
headers=self._build_hf_headers(token=token),
|
|
7480
7785
|
json=payload,
|
|
7481
7786
|
)
|
|
@@ -7523,7 +7828,7 @@ class HfApi:
|
|
|
7523
7828
|
namespace = namespace or self._get_namespace(token=token)
|
|
7524
7829
|
|
|
7525
7830
|
response = get_session().get(
|
|
7526
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7831
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7527
7832
|
headers=self._build_hf_headers(token=token),
|
|
7528
7833
|
)
|
|
7529
7834
|
hf_raise_for_status(response)
|
|
@@ -7540,12 +7845,14 @@ class HfApi:
|
|
|
7540
7845
|
instance_type: Optional[str] = None,
|
|
7541
7846
|
min_replica: Optional[int] = None,
|
|
7542
7847
|
max_replica: Optional[int] = None,
|
|
7848
|
+
scale_to_zero_timeout: Optional[int] = None,
|
|
7543
7849
|
# Model update
|
|
7544
7850
|
repository: Optional[str] = None,
|
|
7545
7851
|
framework: Optional[str] = None,
|
|
7546
7852
|
revision: Optional[str] = None,
|
|
7547
7853
|
task: Optional[str] = None,
|
|
7548
7854
|
custom_image: Optional[Dict] = None,
|
|
7855
|
+
secrets: Optional[Dict[str, str]] = None,
|
|
7549
7856
|
# Other
|
|
7550
7857
|
namespace: Optional[str] = None,
|
|
7551
7858
|
token: Union[bool, str, None] = None,
|
|
@@ -7571,6 +7878,8 @@ class HfApi:
|
|
|
7571
7878
|
The minimum number of replicas (instances) to keep running for the Inference Endpoint.
|
|
7572
7879
|
max_replica (`int`, *optional*):
|
|
7573
7880
|
The maximum number of replicas (instances) to scale to for the Inference Endpoint.
|
|
7881
|
+
scale_to_zero_timeout (`int`, *optional*):
|
|
7882
|
+
The duration in minutes before an inactive endpoint is scaled to zero.
|
|
7574
7883
|
|
|
7575
7884
|
repository (`str`, *optional*):
|
|
7576
7885
|
The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`).
|
|
@@ -7583,7 +7892,8 @@ class HfApi:
|
|
|
7583
7892
|
custom_image (`Dict`, *optional*):
|
|
7584
7893
|
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
7585
7894
|
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
7586
|
-
|
|
7895
|
+
secrets (`Dict[str, str]`, *optional*):
|
|
7896
|
+
Secret values to inject in the container environment.
|
|
7587
7897
|
namespace (`str`, *optional*):
|
|
7588
7898
|
The namespace where the Inference Endpoint will be updated. Defaults to the current user's namespace.
|
|
7589
7899
|
token (Union[bool, str, None], optional):
|
|
@@ -7597,29 +7907,35 @@ class HfApi:
|
|
|
7597
7907
|
"""
|
|
7598
7908
|
namespace = namespace or self._get_namespace(token=token)
|
|
7599
7909
|
|
|
7600
|
-
|
|
7601
|
-
|
|
7602
|
-
|
|
7603
|
-
|
|
7604
|
-
|
|
7605
|
-
|
|
7606
|
-
|
|
7607
|
-
|
|
7608
|
-
|
|
7609
|
-
|
|
7610
|
-
|
|
7611
|
-
|
|
7612
|
-
|
|
7613
|
-
payload["
|
|
7614
|
-
|
|
7615
|
-
|
|
7616
|
-
|
|
7617
|
-
|
|
7618
|
-
|
|
7619
|
-
|
|
7910
|
+
# Populate only the fields that are not None
|
|
7911
|
+
payload: Dict = defaultdict(lambda: defaultdict(dict))
|
|
7912
|
+
if accelerator is not None:
|
|
7913
|
+
payload["compute"]["accelerator"] = accelerator
|
|
7914
|
+
if instance_size is not None:
|
|
7915
|
+
payload["compute"]["instanceSize"] = instance_size
|
|
7916
|
+
if instance_type is not None:
|
|
7917
|
+
payload["compute"]["instanceType"] = instance_type
|
|
7918
|
+
if max_replica is not None:
|
|
7919
|
+
payload["compute"]["scaling"]["maxReplica"] = max_replica
|
|
7920
|
+
if min_replica is not None:
|
|
7921
|
+
payload["compute"]["scaling"]["minReplica"] = min_replica
|
|
7922
|
+
if scale_to_zero_timeout is not None:
|
|
7923
|
+
payload["compute"]["scaling"]["scaleToZeroTimeout"] = scale_to_zero_timeout
|
|
7924
|
+
if repository is not None:
|
|
7925
|
+
payload["model"]["repository"] = repository
|
|
7926
|
+
if framework is not None:
|
|
7927
|
+
payload["model"]["framework"] = framework
|
|
7928
|
+
if revision is not None:
|
|
7929
|
+
payload["model"]["revision"] = revision
|
|
7930
|
+
if task is not None:
|
|
7931
|
+
payload["model"]["task"] = task
|
|
7932
|
+
if custom_image is not None:
|
|
7933
|
+
payload["model"]["image"] = {"custom": custom_image}
|
|
7934
|
+
if secrets is not None:
|
|
7935
|
+
payload["model"]["secrets"] = secrets
|
|
7620
7936
|
|
|
7621
7937
|
response = get_session().put(
|
|
7622
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7938
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7623
7939
|
headers=self._build_hf_headers(token=token),
|
|
7624
7940
|
json=payload,
|
|
7625
7941
|
)
|
|
@@ -7650,7 +7966,7 @@ class HfApi:
|
|
|
7650
7966
|
"""
|
|
7651
7967
|
namespace = namespace or self._get_namespace(token=token)
|
|
7652
7968
|
response = get_session().delete(
|
|
7653
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7969
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}",
|
|
7654
7970
|
headers=self._build_hf_headers(token=token),
|
|
7655
7971
|
)
|
|
7656
7972
|
hf_raise_for_status(response)
|
|
@@ -7683,7 +7999,7 @@ class HfApi:
|
|
|
7683
7999
|
namespace = namespace or self._get_namespace(token=token)
|
|
7684
8000
|
|
|
7685
8001
|
response = get_session().post(
|
|
7686
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/pause",
|
|
8002
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/pause",
|
|
7687
8003
|
headers=self._build_hf_headers(token=token),
|
|
7688
8004
|
)
|
|
7689
8005
|
hf_raise_for_status(response)
|
|
@@ -7722,7 +8038,7 @@ class HfApi:
|
|
|
7722
8038
|
namespace = namespace or self._get_namespace(token=token)
|
|
7723
8039
|
|
|
7724
8040
|
response = get_session().post(
|
|
7725
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume",
|
|
8041
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume",
|
|
7726
8042
|
headers=self._build_hf_headers(token=token),
|
|
7727
8043
|
)
|
|
7728
8044
|
try:
|
|
@@ -7764,7 +8080,7 @@ class HfApi:
|
|
|
7764
8080
|
namespace = namespace or self._get_namespace(token=token)
|
|
7765
8081
|
|
|
7766
8082
|
response = get_session().post(
|
|
7767
|
-
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/scale-to-zero",
|
|
8083
|
+
f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/scale-to-zero",
|
|
7768
8084
|
headers=self._build_hf_headers(token=token),
|
|
7769
8085
|
)
|
|
7770
8086
|
hf_raise_for_status(response)
|
|
@@ -8446,13 +8762,13 @@ class HfApi:
|
|
|
8446
8762
|
repo_type: Optional[str] = None,
|
|
8447
8763
|
token: Union[bool, str, None] = None,
|
|
8448
8764
|
) -> List[AccessRequest]:
|
|
8449
|
-
if repo_type not in REPO_TYPES:
|
|
8450
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
8765
|
+
if repo_type not in constants.REPO_TYPES:
|
|
8766
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
8451
8767
|
if repo_type is None:
|
|
8452
|
-
repo_type = REPO_TYPE_MODEL
|
|
8768
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
8453
8769
|
|
|
8454
8770
|
response = get_session().get(
|
|
8455
|
-
f"{ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/{status}",
|
|
8771
|
+
f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/{status}",
|
|
8456
8772
|
headers=self._build_hf_headers(token=token),
|
|
8457
8773
|
)
|
|
8458
8774
|
hf_raise_for_status(response)
|
|
@@ -8601,13 +8917,13 @@ class HfApi:
|
|
|
8601
8917
|
repo_type: Optional[str] = None,
|
|
8602
8918
|
token: Union[bool, str, None] = None,
|
|
8603
8919
|
) -> None:
|
|
8604
|
-
if repo_type not in REPO_TYPES:
|
|
8605
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
8920
|
+
if repo_type not in constants.REPO_TYPES:
|
|
8921
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
8606
8922
|
if repo_type is None:
|
|
8607
|
-
repo_type = REPO_TYPE_MODEL
|
|
8923
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
8608
8924
|
|
|
8609
8925
|
response = get_session().post(
|
|
8610
|
-
f"{ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/handle",
|
|
8926
|
+
f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/handle",
|
|
8611
8927
|
headers=self._build_hf_headers(token=token),
|
|
8612
8928
|
json={"user": user, "status": status},
|
|
8613
8929
|
)
|
|
@@ -8651,13 +8967,13 @@ class HfApi:
|
|
|
8651
8967
|
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8652
8968
|
HTTP 404 if the user does not exist on the Hub.
|
|
8653
8969
|
"""
|
|
8654
|
-
if repo_type not in REPO_TYPES:
|
|
8655
|
-
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
8970
|
+
if repo_type not in constants.REPO_TYPES:
|
|
8971
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
8656
8972
|
if repo_type is None:
|
|
8657
|
-
repo_type = REPO_TYPE_MODEL
|
|
8973
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
8658
8974
|
|
|
8659
8975
|
response = get_session().post(
|
|
8660
|
-
f"{ENDPOINT}/api/models/{repo_id}/user-access-request/grant",
|
|
8976
|
+
f"{constants.ENDPOINT}/api/models/{repo_id}/user-access-request/grant",
|
|
8661
8977
|
headers=self._build_hf_headers(token=token),
|
|
8662
8978
|
json={"user": user},
|
|
8663
8979
|
)
|
|
@@ -8700,7 +9016,7 @@ class HfApi:
|
|
|
8700
9016
|
```
|
|
8701
9017
|
"""
|
|
8702
9018
|
response = get_session().get(
|
|
8703
|
-
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9019
|
+
f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8704
9020
|
headers=self._build_hf_headers(token=token),
|
|
8705
9021
|
)
|
|
8706
9022
|
hf_raise_for_status(response)
|
|
@@ -8751,7 +9067,7 @@ class HfApi:
|
|
|
8751
9067
|
```
|
|
8752
9068
|
"""
|
|
8753
9069
|
response = get_session().get(
|
|
8754
|
-
f"{ENDPOINT}/api/settings/webhooks",
|
|
9070
|
+
f"{constants.ENDPOINT}/api/settings/webhooks",
|
|
8755
9071
|
headers=self._build_hf_headers(token=token),
|
|
8756
9072
|
)
|
|
8757
9073
|
hf_raise_for_status(response)
|
|
@@ -8775,7 +9091,7 @@ class HfApi:
|
|
|
8775
9091
|
*,
|
|
8776
9092
|
url: str,
|
|
8777
9093
|
watched: List[Union[Dict, WebhookWatchedItem]],
|
|
8778
|
-
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
9094
|
+
domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None,
|
|
8779
9095
|
secret: Optional[str] = None,
|
|
8780
9096
|
token: Union[bool, str, None] = None,
|
|
8781
9097
|
) -> WebhookInfo:
|
|
@@ -8823,7 +9139,7 @@ class HfApi:
|
|
|
8823
9139
|
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8824
9140
|
|
|
8825
9141
|
response = get_session().post(
|
|
8826
|
-
f"{ENDPOINT}/api/settings/webhooks",
|
|
9142
|
+
f"{constants.ENDPOINT}/api/settings/webhooks",
|
|
8827
9143
|
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8828
9144
|
headers=self._build_hf_headers(token=token),
|
|
8829
9145
|
)
|
|
@@ -8849,7 +9165,7 @@ class HfApi:
|
|
|
8849
9165
|
*,
|
|
8850
9166
|
url: Optional[str] = None,
|
|
8851
9167
|
watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None,
|
|
8852
|
-
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
9168
|
+
domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None,
|
|
8853
9169
|
secret: Optional[str] = None,
|
|
8854
9170
|
token: Union[bool, str, None] = None,
|
|
8855
9171
|
) -> WebhookInfo:
|
|
@@ -8901,7 +9217,7 @@ class HfApi:
|
|
|
8901
9217
|
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8902
9218
|
|
|
8903
9219
|
response = get_session().post(
|
|
8904
|
-
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9220
|
+
f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8905
9221
|
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8906
9222
|
headers=self._build_hf_headers(token=token),
|
|
8907
9223
|
)
|
|
@@ -8953,7 +9269,7 @@ class HfApi:
|
|
|
8953
9269
|
```
|
|
8954
9270
|
"""
|
|
8955
9271
|
response = get_session().post(
|
|
8956
|
-
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/enable",
|
|
9272
|
+
f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/enable",
|
|
8957
9273
|
headers=self._build_hf_headers(token=token),
|
|
8958
9274
|
)
|
|
8959
9275
|
hf_raise_for_status(response)
|
|
@@ -9004,7 +9320,7 @@ class HfApi:
|
|
|
9004
9320
|
```
|
|
9005
9321
|
"""
|
|
9006
9322
|
response = get_session().post(
|
|
9007
|
-
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/disable",
|
|
9323
|
+
f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/disable",
|
|
9008
9324
|
headers=self._build_hf_headers(token=token),
|
|
9009
9325
|
)
|
|
9010
9326
|
hf_raise_for_status(response)
|
|
@@ -9045,7 +9361,7 @@ class HfApi:
|
|
|
9045
9361
|
```
|
|
9046
9362
|
"""
|
|
9047
9363
|
response = get_session().delete(
|
|
9048
|
-
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9364
|
+
f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9049
9365
|
headers=self._build_hf_headers(token=token),
|
|
9050
9366
|
)
|
|
9051
9367
|
hf_raise_for_status(response)
|
|
@@ -9118,13 +9434,123 @@ class HfApi:
|
|
|
9118
9434
|
if relpath_to_abspath[relpath] != ".gitattributes"
|
|
9119
9435
|
]
|
|
9120
9436
|
|
|
9121
|
-
def
|
|
9437
|
+
def _prepare_upload_folder_additions(
|
|
9438
|
+
self,
|
|
9439
|
+
folder_path: Union[str, Path],
|
|
9440
|
+
path_in_repo: str,
|
|
9441
|
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
|
9442
|
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
|
9443
|
+
repo_type: Optional[str] = None,
|
|
9444
|
+
token: Union[bool, str, None] = None,
|
|
9445
|
+
) -> List[CommitOperationAdd]:
|
|
9446
|
+
"""Generate the list of Add operations for a commit to upload a folder.
|
|
9447
|
+
|
|
9448
|
+
Files not matching the `allow_patterns` (allowlist) and `ignore_patterns` (denylist)
|
|
9449
|
+
constraints are discarded.
|
|
9450
|
+
"""
|
|
9451
|
+
|
|
9452
|
+
folder_path = Path(folder_path).expanduser().resolve()
|
|
9453
|
+
if not folder_path.is_dir():
|
|
9454
|
+
raise ValueError(f"Provided path: '{folder_path}' is not a directory")
|
|
9455
|
+
|
|
9456
|
+
# List files from folder
|
|
9457
|
+
relpath_to_abspath = {
|
|
9458
|
+
path.relative_to(folder_path).as_posix(): path
|
|
9459
|
+
for path in sorted(folder_path.glob("**/*")) # sorted to be deterministic
|
|
9460
|
+
if path.is_file()
|
|
9461
|
+
}
|
|
9462
|
+
|
|
9463
|
+
# Filter files
|
|
9464
|
+
# Patterns are applied on the path relative to `folder_path`. `path_in_repo` is prefixed after the filtering.
|
|
9465
|
+
filtered_repo_objects = list(
|
|
9466
|
+
filter_repo_objects(
|
|
9467
|
+
relpath_to_abspath.keys(), allow_patterns=allow_patterns, ignore_patterns=ignore_patterns
|
|
9468
|
+
)
|
|
9469
|
+
)
|
|
9470
|
+
|
|
9471
|
+
prefix = f"{path_in_repo.strip('/')}/" if path_in_repo else ""
|
|
9472
|
+
|
|
9473
|
+
# If updating a README.md file, make sure the metadata format is valid
|
|
9474
|
+
# It's better to fail early than to fail after all the files have been hashed.
|
|
9475
|
+
if "README.md" in filtered_repo_objects:
|
|
9476
|
+
self._validate_yaml(
|
|
9477
|
+
content=relpath_to_abspath["README.md"].read_text(),
|
|
9478
|
+
repo_type=repo_type,
|
|
9479
|
+
token=token,
|
|
9480
|
+
)
|
|
9481
|
+
if len(filtered_repo_objects) > 30:
|
|
9482
|
+
logger.info(
|
|
9483
|
+
"It seems you are trying to upload a large folder at once. This might take some time and then fail if "
|
|
9484
|
+
"the folder is too large. For such cases, it is recommended to upload in smaller batches or to use "
|
|
9485
|
+
"`HfApi().upload_large_folder(...)`/`huggingface-cli upload-large-folder` instead. For more details, "
|
|
9486
|
+
"check out https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#upload-a-large-folder."
|
|
9487
|
+
)
|
|
9488
|
+
|
|
9489
|
+
logger.info(f"Start hashing {len(filtered_repo_objects)} files.")
|
|
9490
|
+
operations = [
|
|
9491
|
+
CommitOperationAdd(
|
|
9492
|
+
path_or_fileobj=relpath_to_abspath[relpath], # absolute path on disk
|
|
9493
|
+
path_in_repo=prefix + relpath, # "absolute" path in repo
|
|
9494
|
+
)
|
|
9495
|
+
for relpath in filtered_repo_objects
|
|
9496
|
+
]
|
|
9497
|
+
logger.info(f"Finished hashing {len(filtered_repo_objects)} files.")
|
|
9498
|
+
return operations
|
|
9499
|
+
|
|
9500
|
+
def _validate_yaml(self, content: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None):
|
|
9501
|
+
"""
|
|
9502
|
+
Validate YAML from `README.md`, used before file hashing and upload.
|
|
9503
|
+
|
|
9504
|
+
Args:
|
|
9505
|
+
content (`str`):
|
|
9506
|
+
Content of `README.md` to validate.
|
|
9507
|
+
repo_type (`str`, *optional*):
|
|
9508
|
+
The type of the repo to grant access to. Must be one of `model`, `dataset` or `space`.
|
|
9509
|
+
Defaults to `model`.
|
|
9510
|
+
token (Union[bool, str, None], optional):
|
|
9511
|
+
A valid user access token (string). Defaults to the locally saved
|
|
9512
|
+
token, which is the recommended method for authentication (see
|
|
9513
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9514
|
+
To disable authentication, pass `False`.
|
|
9515
|
+
|
|
9516
|
+
Raises:
|
|
9517
|
+
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
9518
|
+
if YAML is invalid
|
|
9519
|
+
"""
|
|
9520
|
+
repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL
|
|
9521
|
+
headers = self._build_hf_headers(token=token)
|
|
9522
|
+
|
|
9523
|
+
response = get_session().post(
|
|
9524
|
+
f"{self.endpoint}/api/validate-yaml",
|
|
9525
|
+
json={"content": content, "repoType": repo_type},
|
|
9526
|
+
headers=headers,
|
|
9527
|
+
)
|
|
9528
|
+
# Handle warnings (example: empty metadata)
|
|
9529
|
+
response_content = response.json()
|
|
9530
|
+
message = "\n".join([f"- {warning.get('message')}" for warning in response_content.get("warnings", [])])
|
|
9531
|
+
if message:
|
|
9532
|
+
warnings.warn(f"Warnings while validating metadata in README.md:\n{message}")
|
|
9533
|
+
|
|
9534
|
+
# Raise on errors
|
|
9535
|
+
try:
|
|
9536
|
+
hf_raise_for_status(response)
|
|
9537
|
+
except BadRequestError as e:
|
|
9538
|
+
errors = response_content.get("errors", [])
|
|
9539
|
+
message = "\n".join([f"- {error.get('message')}" for error in errors])
|
|
9540
|
+
raise ValueError(f"Invalid metadata in README.md.\n{message}") from e
|
|
9541
|
+
|
|
9542
|
+
def get_user_overview(self, username: str, token: Union[bool, str, None] = None) -> User:
|
|
9122
9543
|
"""
|
|
9123
9544
|
Get an overview of a user on the Hub.
|
|
9124
9545
|
|
|
9125
9546
|
Args:
|
|
9126
9547
|
username (`str`):
|
|
9127
9548
|
Username of the user to get an overview of.
|
|
9549
|
+
token (Union[bool, str, None], optional):
|
|
9550
|
+
A valid user access token (string). Defaults to the locally saved
|
|
9551
|
+
token, which is the recommended method for authentication (see
|
|
9552
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9553
|
+
To disable authentication, pass `False`.
|
|
9128
9554
|
|
|
9129
9555
|
Returns:
|
|
9130
9556
|
`User`: A [`User`] object with the user's overview.
|
|
@@ -9133,18 +9559,24 @@ class HfApi:
|
|
|
9133
9559
|
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
9134
9560
|
HTTP 404 If the user does not exist on the Hub.
|
|
9135
9561
|
"""
|
|
9136
|
-
r = get_session().get(
|
|
9137
|
-
|
|
9562
|
+
r = get_session().get(
|
|
9563
|
+
f"{constants.ENDPOINT}/api/users/{username}/overview", headers=self._build_hf_headers(token=token)
|
|
9564
|
+
)
|
|
9138
9565
|
hf_raise_for_status(r)
|
|
9139
9566
|
return User(**r.json())
|
|
9140
9567
|
|
|
9141
|
-
def list_organization_members(self, organization: str) -> Iterable[User]:
|
|
9568
|
+
def list_organization_members(self, organization: str, token: Union[bool, str, None] = None) -> Iterable[User]:
|
|
9142
9569
|
"""
|
|
9143
9570
|
List of members of an organization on the Hub.
|
|
9144
9571
|
|
|
9145
9572
|
Args:
|
|
9146
9573
|
organization (`str`):
|
|
9147
9574
|
Name of the organization to get the members of.
|
|
9575
|
+
token (Union[bool, str, None], optional):
|
|
9576
|
+
A valid user access token (string). Defaults to the locally saved
|
|
9577
|
+
token, which is the recommended method for authentication (see
|
|
9578
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9579
|
+
To disable authentication, pass `False`.
|
|
9148
9580
|
|
|
9149
9581
|
Returns:
|
|
9150
9582
|
`Iterable[User]`: A list of [`User`] objects with the members of the organization.
|
|
@@ -9154,21 +9586,25 @@ class HfApi:
|
|
|
9154
9586
|
HTTP 404 If the organization does not exist on the Hub.
|
|
9155
9587
|
|
|
9156
9588
|
"""
|
|
9157
|
-
|
|
9158
|
-
|
|
9159
|
-
|
|
9160
|
-
|
|
9161
|
-
|
|
9162
|
-
for member in r.json():
|
|
9589
|
+
for member in paginate(
|
|
9590
|
+
path=f"{constants.ENDPOINT}/api/organizations/{organization}/members",
|
|
9591
|
+
params={},
|
|
9592
|
+
headers=self._build_hf_headers(token=token),
|
|
9593
|
+
):
|
|
9163
9594
|
yield User(**member)
|
|
9164
9595
|
|
|
9165
|
-
def list_user_followers(self, username: str) -> Iterable[User]:
|
|
9596
|
+
def list_user_followers(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]:
|
|
9166
9597
|
"""
|
|
9167
9598
|
Get the list of followers of a user on the Hub.
|
|
9168
9599
|
|
|
9169
9600
|
Args:
|
|
9170
9601
|
username (`str`):
|
|
9171
9602
|
Username of the user to get the followers of.
|
|
9603
|
+
token (Union[bool, str, None], optional):
|
|
9604
|
+
A valid user access token (string). Defaults to the locally saved
|
|
9605
|
+
token, which is the recommended method for authentication (see
|
|
9606
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9607
|
+
To disable authentication, pass `False`.
|
|
9172
9608
|
|
|
9173
9609
|
Returns:
|
|
9174
9610
|
`Iterable[User]`: A list of [`User`] objects with the followers of the user.
|
|
@@ -9178,21 +9614,25 @@ class HfApi:
|
|
|
9178
9614
|
HTTP 404 If the user does not exist on the Hub.
|
|
9179
9615
|
|
|
9180
9616
|
"""
|
|
9181
|
-
|
|
9182
|
-
|
|
9183
|
-
|
|
9184
|
-
|
|
9185
|
-
|
|
9186
|
-
for follower in r.json():
|
|
9617
|
+
for follower in paginate(
|
|
9618
|
+
path=f"{constants.ENDPOINT}/api/users/{username}/followers",
|
|
9619
|
+
params={},
|
|
9620
|
+
headers=self._build_hf_headers(token=token),
|
|
9621
|
+
):
|
|
9187
9622
|
yield User(**follower)
|
|
9188
9623
|
|
|
9189
|
-
def list_user_following(self, username: str) -> Iterable[User]:
|
|
9624
|
+
def list_user_following(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]:
|
|
9190
9625
|
"""
|
|
9191
9626
|
Get the list of users followed by a user on the Hub.
|
|
9192
9627
|
|
|
9193
9628
|
Args:
|
|
9194
9629
|
username (`str`):
|
|
9195
9630
|
Username of the user to get the users followed by.
|
|
9631
|
+
token (Union[bool, str, None], optional):
|
|
9632
|
+
A valid user access token (string). Defaults to the locally saved
|
|
9633
|
+
token, which is the recommended method for authentication (see
|
|
9634
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9635
|
+
To disable authentication, pass `False`.
|
|
9196
9636
|
|
|
9197
9637
|
Returns:
|
|
9198
9638
|
`Iterable[User]`: A list of [`User`] objects with the users followed by the user.
|
|
@@ -9202,49 +9642,76 @@ class HfApi:
|
|
|
9202
9642
|
HTTP 404 If the user does not exist on the Hub.
|
|
9203
9643
|
|
|
9204
9644
|
"""
|
|
9645
|
+
for followed_user in paginate(
|
|
9646
|
+
path=f"{constants.ENDPOINT}/api/users/{username}/following",
|
|
9647
|
+
params={},
|
|
9648
|
+
headers=self._build_hf_headers(token=token),
|
|
9649
|
+
):
|
|
9650
|
+
yield User(**followed_user)
|
|
9205
9651
|
|
|
9206
|
-
|
|
9652
|
+
def auth_check(
|
|
9653
|
+
self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
|
|
9654
|
+
) -> None:
|
|
9655
|
+
"""
|
|
9656
|
+
Check if the provided user token has access to a specific repository on the Hugging Face Hub.
|
|
9207
9657
|
|
|
9208
|
-
|
|
9658
|
+
This method verifies whether the user, authenticated via the provided token, has access to the specified
|
|
9659
|
+
repository. If the repository is not found or if the user lacks the required permissions to access it,
|
|
9660
|
+
the method raises an appropriate exception.
|
|
9209
9661
|
|
|
9210
|
-
|
|
9211
|
-
|
|
9662
|
+
Args:
|
|
9663
|
+
repo_id (`str`):
|
|
9664
|
+
The repository to check for access. Format should be `"user/repo_name"`.
|
|
9665
|
+
Example: `"user/my-cool-model"`.
|
|
9666
|
+
|
|
9667
|
+
repo_type (`str`, *optional*):
|
|
9668
|
+
The type of the repository. Should be one of `"model"`, `"dataset"`, or `"space"`.
|
|
9669
|
+
If not specified, the default is `"model"`.
|
|
9212
9670
|
|
|
9671
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
9672
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
9673
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
9674
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
9213
9675
|
|
|
9214
|
-
|
|
9215
|
-
|
|
9216
|
-
|
|
9217
|
-
|
|
9218
|
-
|
|
9219
|
-
) -> List[CommitOperationAdd]:
|
|
9220
|
-
"""Generate the list of Add operations for a commit to upload a folder.
|
|
9676
|
+
Raises:
|
|
9677
|
+
[`~utils.RepositoryNotFoundError`]:
|
|
9678
|
+
Raised if the repository does not exist, is private, or the user does not have access. This can
|
|
9679
|
+
occur if the `repo_id` or `repo_type` is incorrect or if the repository is private but the user
|
|
9680
|
+
is not authenticated.
|
|
9221
9681
|
|
|
9222
|
-
|
|
9223
|
-
|
|
9224
|
-
|
|
9225
|
-
|
|
9226
|
-
|
|
9227
|
-
|
|
9228
|
-
|
|
9229
|
-
|
|
9230
|
-
|
|
9231
|
-
|
|
9232
|
-
|
|
9233
|
-
|
|
9234
|
-
|
|
9235
|
-
|
|
9236
|
-
|
|
9237
|
-
|
|
9238
|
-
|
|
9239
|
-
|
|
9240
|
-
|
|
9241
|
-
|
|
9242
|
-
|
|
9243
|
-
|
|
9244
|
-
|
|
9245
|
-
|
|
9246
|
-
|
|
9247
|
-
|
|
9682
|
+
[`~utils.GatedRepoError`]:
|
|
9683
|
+
Raised if the repository exists but is gated and the user is not authorized to access it.
|
|
9684
|
+
|
|
9685
|
+
Example:
|
|
9686
|
+
Check if the user has access to a repository:
|
|
9687
|
+
|
|
9688
|
+
```python
|
|
9689
|
+
>>> from huggingface_hub import auth_check
|
|
9690
|
+
>>> from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError
|
|
9691
|
+
|
|
9692
|
+
try:
|
|
9693
|
+
auth_check("user/my-cool-model")
|
|
9694
|
+
except GatedRepoError:
|
|
9695
|
+
# Handle gated repository error
|
|
9696
|
+
print("You do not have permission to access this gated repository.")
|
|
9697
|
+
except RepositoryNotFoundError:
|
|
9698
|
+
# Handle repository not found error
|
|
9699
|
+
print("The repository was not found or you do not have access.")
|
|
9700
|
+
```
|
|
9701
|
+
|
|
9702
|
+
In this example:
|
|
9703
|
+
- If the user has access, the method completes successfully.
|
|
9704
|
+
- If the repository is gated or does not exist, appropriate exceptions are raised, allowing the user
|
|
9705
|
+
to handle them accordingly.
|
|
9706
|
+
"""
|
|
9707
|
+
headers = self._build_hf_headers(token=token)
|
|
9708
|
+
if repo_type is None:
|
|
9709
|
+
repo_type = constants.REPO_TYPE_MODEL
|
|
9710
|
+
if repo_type not in constants.REPO_TYPES:
|
|
9711
|
+
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
9712
|
+
path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/auth-check"
|
|
9713
|
+
r = get_session().get(path, headers=headers)
|
|
9714
|
+
hf_raise_for_status(r)
|
|
9248
9715
|
|
|
9249
9716
|
|
|
9250
9717
|
def _parse_revision_from_pr_url(pr_url: str) -> str:
|
|
@@ -9265,6 +9732,7 @@ def _parse_revision_from_pr_url(pr_url: str) -> str:
|
|
|
9265
9732
|
api = HfApi()
|
|
9266
9733
|
|
|
9267
9734
|
whoami = api.whoami
|
|
9735
|
+
auth_check = api.auth_check
|
|
9268
9736
|
get_token_permission = api.get_token_permission
|
|
9269
9737
|
|
|
9270
9738
|
list_models = api.list_models
|
|
@@ -9285,7 +9753,6 @@ list_repo_refs = api.list_repo_refs
|
|
|
9285
9753
|
list_repo_commits = api.list_repo_commits
|
|
9286
9754
|
list_repo_tree = api.list_repo_tree
|
|
9287
9755
|
get_paths_info = api.get_paths_info
|
|
9288
|
-
|
|
9289
9756
|
list_metrics = api.list_metrics
|
|
9290
9757
|
|
|
9291
9758
|
get_model_tags = api.get_model_tags
|
|
@@ -9295,6 +9762,7 @@ create_commit = api.create_commit
|
|
|
9295
9762
|
create_repo = api.create_repo
|
|
9296
9763
|
delete_repo = api.delete_repo
|
|
9297
9764
|
update_repo_visibility = api.update_repo_visibility
|
|
9765
|
+
update_repo_settings = api.update_repo_settings
|
|
9298
9766
|
super_squash_history = api.super_squash_history
|
|
9299
9767
|
move_repo = api.move_repo
|
|
9300
9768
|
upload_file = api.upload_file
|
|
@@ -9303,6 +9771,7 @@ delete_file = api.delete_file
|
|
|
9303
9771
|
delete_folder = api.delete_folder
|
|
9304
9772
|
delete_files = api.delete_files
|
|
9305
9773
|
create_commits_on_pr = api.create_commits_on_pr
|
|
9774
|
+
upload_large_folder = api.upload_large_folder
|
|
9306
9775
|
preupload_lfs_files = api.preupload_lfs_files
|
|
9307
9776
|
create_branch = api.create_branch
|
|
9308
9777
|
delete_branch = api.delete_branch
|