huggingface-hub 0.23.3__py3-none-any.whl → 0.24.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +47 -15
- huggingface_hub/_commit_api.py +38 -8
- huggingface_hub/_inference_endpoints.py +11 -4
- huggingface_hub/_local_folder.py +22 -13
- huggingface_hub/_snapshot_download.py +12 -7
- huggingface_hub/_webhooks_server.py +3 -1
- huggingface_hub/commands/huggingface_cli.py +4 -3
- huggingface_hub/commands/repo_files.py +128 -0
- huggingface_hub/constants.py +12 -0
- huggingface_hub/file_download.py +127 -91
- huggingface_hub/hf_api.py +979 -341
- huggingface_hub/hf_file_system.py +30 -3
- huggingface_hub/hub_mixin.py +103 -41
- huggingface_hub/inference/_client.py +373 -42
- huggingface_hub/inference/_common.py +0 -2
- huggingface_hub/inference/_generated/_async_client.py +390 -48
- huggingface_hub/inference/_generated/types/__init__.py +4 -1
- huggingface_hub/inference/_generated/types/chat_completion.py +41 -21
- huggingface_hub/inference/_generated/types/feature_extraction.py +23 -5
- huggingface_hub/inference/_generated/types/text_generation.py +29 -0
- huggingface_hub/lfs.py +11 -6
- huggingface_hub/repocard_data.py +41 -29
- huggingface_hub/repository.py +6 -6
- huggingface_hub/serialization/__init__.py +8 -3
- huggingface_hub/serialization/_base.py +13 -16
- huggingface_hub/serialization/_tensorflow.py +4 -3
- huggingface_hub/serialization/_torch.py +399 -22
- huggingface_hub/utils/__init__.py +1 -2
- huggingface_hub/utils/_errors.py +1 -1
- huggingface_hub/utils/_fixes.py +14 -3
- huggingface_hub/utils/_paths.py +17 -6
- huggingface_hub/utils/_subprocess.py +0 -1
- huggingface_hub/utils/_telemetry.py +9 -1
- huggingface_hub/utils/_typing.py +26 -1
- huggingface_hub/utils/endpoint_helpers.py +2 -186
- huggingface_hub/utils/sha.py +36 -1
- huggingface_hub/utils/tqdm.py +0 -1
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/METADATA +12 -9
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/RECORD +43 -43
- huggingface_hub/serialization/_numpy.py +0 -68
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.23.3.dist-info → huggingface_hub-0.24.0rc0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py
CHANGED
|
@@ -98,17 +98,18 @@ from .constants import (
|
|
|
98
98
|
SAFETENSORS_MAX_HEADER_LENGTH,
|
|
99
99
|
SAFETENSORS_SINGLE_FILE,
|
|
100
100
|
SPACES_SDK_TYPES,
|
|
101
|
+
WEBHOOK_DOMAIN_T,
|
|
101
102
|
DiscussionStatusFilter,
|
|
102
103
|
DiscussionTypeFilter,
|
|
103
104
|
)
|
|
104
105
|
from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url
|
|
105
106
|
from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData
|
|
106
|
-
from .utils import (
|
|
107
|
+
from .utils import (
|
|
107
108
|
DEFAULT_IGNORE_PATTERNS,
|
|
108
109
|
BadRequestError,
|
|
109
110
|
EntryNotFoundError,
|
|
110
111
|
GatedRepoError,
|
|
111
|
-
HfFolder,
|
|
112
|
+
HfFolder, # noqa: F401 # kept for backward compatibility
|
|
112
113
|
HfHubHTTPError,
|
|
113
114
|
LocalTokenNotFoundError,
|
|
114
115
|
NotASafetensorsRepoError,
|
|
@@ -130,18 +131,81 @@ from .utils import ( # noqa: F401 # imported for backward compatibility
|
|
|
130
131
|
validate_hf_hub_args,
|
|
131
132
|
)
|
|
132
133
|
from .utils import tqdm as hf_tqdm
|
|
133
|
-
from .utils._deprecation import _deprecate_arguments
|
|
134
134
|
from .utils._typing import CallableT
|
|
135
135
|
from .utils.endpoint_helpers import (
|
|
136
|
-
|
|
137
|
-
ModelFilter,
|
|
138
|
-
_is_emission_within_treshold,
|
|
136
|
+
_is_emission_within_threshold,
|
|
139
137
|
)
|
|
140
138
|
|
|
141
139
|
|
|
142
140
|
R = TypeVar("R") # Return type
|
|
143
141
|
CollectionItemType_T = Literal["model", "dataset", "space", "paper"]
|
|
144
142
|
|
|
143
|
+
ExpandModelProperty_T = Literal[
|
|
144
|
+
"author",
|
|
145
|
+
"cardData",
|
|
146
|
+
"config",
|
|
147
|
+
"createdAt",
|
|
148
|
+
"disabled",
|
|
149
|
+
"downloads",
|
|
150
|
+
"downloadsAllTime",
|
|
151
|
+
"gated",
|
|
152
|
+
"gitalyUid",
|
|
153
|
+
"inference",
|
|
154
|
+
"lastModified",
|
|
155
|
+
"library_name",
|
|
156
|
+
"likes",
|
|
157
|
+
"mask_token",
|
|
158
|
+
"model-index",
|
|
159
|
+
"pipeline_tag",
|
|
160
|
+
"private",
|
|
161
|
+
"safetensors",
|
|
162
|
+
"sha",
|
|
163
|
+
"siblings",
|
|
164
|
+
"spaces",
|
|
165
|
+
"tags",
|
|
166
|
+
"transformersInfo",
|
|
167
|
+
"widgetData",
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
ExpandDatasetProperty_T = Literal[
|
|
171
|
+
"author",
|
|
172
|
+
"cardData",
|
|
173
|
+
"citation",
|
|
174
|
+
"createdAt",
|
|
175
|
+
"disabled",
|
|
176
|
+
"description",
|
|
177
|
+
"downloads",
|
|
178
|
+
"downloadsAllTime",
|
|
179
|
+
"gated",
|
|
180
|
+
"gitalyUid",
|
|
181
|
+
"lastModified",
|
|
182
|
+
"likes",
|
|
183
|
+
"paperswithcode_id",
|
|
184
|
+
"private",
|
|
185
|
+
"siblings",
|
|
186
|
+
"sha",
|
|
187
|
+
"tags",
|
|
188
|
+
]
|
|
189
|
+
|
|
190
|
+
ExpandSpaceProperty_T = Literal[
|
|
191
|
+
"author",
|
|
192
|
+
"cardData",
|
|
193
|
+
"datasets",
|
|
194
|
+
"disabled",
|
|
195
|
+
"gitalyUid",
|
|
196
|
+
"lastModified",
|
|
197
|
+
"createdAt",
|
|
198
|
+
"likes",
|
|
199
|
+
"private",
|
|
200
|
+
"runtime",
|
|
201
|
+
"sdk",
|
|
202
|
+
"siblings",
|
|
203
|
+
"sha",
|
|
204
|
+
"subdomain",
|
|
205
|
+
"tags",
|
|
206
|
+
"models",
|
|
207
|
+
]
|
|
208
|
+
|
|
145
209
|
USERNAME_PLACEHOLDER = "hf_user"
|
|
146
210
|
_REGEX_DISCUSSION_URL = re.compile(r".*/discussions/(\d+)$")
|
|
147
211
|
|
|
@@ -177,9 +241,9 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
177
241
|
`None`) and repo_id (`str`).
|
|
178
242
|
|
|
179
243
|
Raises:
|
|
180
|
-
|
|
244
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
181
245
|
If URL cannot be parsed.
|
|
182
|
-
|
|
246
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
183
247
|
If `repo_type` is unknown.
|
|
184
248
|
"""
|
|
185
249
|
input_hf_id = hf_id
|
|
@@ -369,8 +433,9 @@ class AccessRequest:
|
|
|
369
433
|
Username of the user who requested access.
|
|
370
434
|
fullname (`str`):
|
|
371
435
|
Fullname of the user who requested access.
|
|
372
|
-
email (`str`):
|
|
436
|
+
email (`Optional[str]`):
|
|
373
437
|
Email of the user who requested access.
|
|
438
|
+
Can only be `None` in the /accepted list if the user was granted access manually.
|
|
374
439
|
timestamp (`datetime`):
|
|
375
440
|
Timestamp of the request.
|
|
376
441
|
status (`Literal["pending", "accepted", "rejected"]`):
|
|
@@ -381,7 +446,7 @@ class AccessRequest:
|
|
|
381
446
|
|
|
382
447
|
username: str
|
|
383
448
|
fullname: str
|
|
384
|
-
email: str
|
|
449
|
+
email: Optional[str]
|
|
385
450
|
timestamp: datetime
|
|
386
451
|
status: Literal["pending", "accepted", "rejected"]
|
|
387
452
|
|
|
@@ -389,6 +454,48 @@ class AccessRequest:
|
|
|
389
454
|
fields: Optional[Dict[str, Any]] = None
|
|
390
455
|
|
|
391
456
|
|
|
457
|
+
@dataclass
|
|
458
|
+
class WebhookWatchedItem:
|
|
459
|
+
"""Data structure containing information about the items watched by a webhook.
|
|
460
|
+
|
|
461
|
+
Attributes:
|
|
462
|
+
type (`Literal["dataset", "model", "org", "space", "user"]`):
|
|
463
|
+
Type of the item to be watched. Can be one of `["dataset", "model", "org", "space", "user"]`.
|
|
464
|
+
name (`str`):
|
|
465
|
+
Name of the item to be watched. Can be the username, organization name, model name, dataset name or space name.
|
|
466
|
+
"""
|
|
467
|
+
|
|
468
|
+
type: Literal["dataset", "model", "org", "space", "user"]
|
|
469
|
+
name: str
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
@dataclass
|
|
473
|
+
class WebhookInfo:
|
|
474
|
+
"""Data structure containing information about a webhook.
|
|
475
|
+
|
|
476
|
+
Attributes:
|
|
477
|
+
id (`str`):
|
|
478
|
+
ID of the webhook.
|
|
479
|
+
url (`str`):
|
|
480
|
+
URL of the webhook.
|
|
481
|
+
watched (`List[WebhookWatchedItem]`):
|
|
482
|
+
List of items watched by the webhook, see [`WebhookWatchedItem`].
|
|
483
|
+
domains (`List[WEBHOOK_DOMAIN_T]`):
|
|
484
|
+
List of domains the webhook is watching. Can be one of `["repo", "discussions"]`.
|
|
485
|
+
secret (`str`, *optional*):
|
|
486
|
+
Secret of the webhook.
|
|
487
|
+
disabled (`bool`):
|
|
488
|
+
Whether the webhook is disabled or not.
|
|
489
|
+
"""
|
|
490
|
+
|
|
491
|
+
id: str
|
|
492
|
+
url: str
|
|
493
|
+
watched: List[WebhookWatchedItem]
|
|
494
|
+
domains: List[WEBHOOK_DOMAIN_T]
|
|
495
|
+
secret: Optional[str]
|
|
496
|
+
disabled: bool
|
|
497
|
+
|
|
498
|
+
|
|
392
499
|
class RepoUrl(str):
|
|
393
500
|
"""Subclass of `str` describing a repo URL on the Hub.
|
|
394
501
|
|
|
@@ -423,9 +530,9 @@ class RepoUrl(str):
|
|
|
423
530
|
```
|
|
424
531
|
|
|
425
532
|
Raises:
|
|
426
|
-
|
|
533
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
427
534
|
If URL cannot be parsed.
|
|
428
|
-
|
|
535
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
429
536
|
If `repo_type` is unknown.
|
|
430
537
|
"""
|
|
431
538
|
|
|
@@ -604,6 +711,8 @@ class ModelInfo:
|
|
|
604
711
|
If so, whether there is manual or automatic approval.
|
|
605
712
|
downloads (`int`):
|
|
606
713
|
Number of downloads of the model over the last 30 days.
|
|
714
|
+
downloads_all_time (`int`):
|
|
715
|
+
Cumulated number of downloads of the model since its creation.
|
|
607
716
|
likes (`int`):
|
|
608
717
|
Number of likes of the model.
|
|
609
718
|
library_name (`str`, *optional*):
|
|
@@ -638,13 +747,14 @@ class ModelInfo:
|
|
|
638
747
|
sha: Optional[str]
|
|
639
748
|
created_at: Optional[datetime]
|
|
640
749
|
last_modified: Optional[datetime]
|
|
641
|
-
private: bool
|
|
750
|
+
private: Optional[bool]
|
|
642
751
|
gated: Optional[Literal["auto", "manual", False]]
|
|
643
752
|
disabled: Optional[bool]
|
|
644
|
-
downloads: int
|
|
645
|
-
|
|
753
|
+
downloads: Optional[int]
|
|
754
|
+
downloads_all_time: Optional[int]
|
|
755
|
+
likes: Optional[int]
|
|
646
756
|
library_name: Optional[str]
|
|
647
|
-
tags: List[str]
|
|
757
|
+
tags: Optional[List[str]]
|
|
648
758
|
pipeline_tag: Optional[str]
|
|
649
759
|
mask_token: Optional[str]
|
|
650
760
|
card_data: Optional[ModelCardData]
|
|
@@ -664,13 +774,14 @@ class ModelInfo:
|
|
|
664
774
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
665
775
|
created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None)
|
|
666
776
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
667
|
-
self.private = kwargs.pop("private")
|
|
777
|
+
self.private = kwargs.pop("private", None)
|
|
668
778
|
self.gated = kwargs.pop("gated", None)
|
|
669
779
|
self.disabled = kwargs.pop("disabled", None)
|
|
670
|
-
self.downloads = kwargs.pop("downloads")
|
|
671
|
-
self.
|
|
780
|
+
self.downloads = kwargs.pop("downloads", None)
|
|
781
|
+
self.downloads_all_time = kwargs.pop("downloadsAllTime", None)
|
|
782
|
+
self.likes = kwargs.pop("likes", None)
|
|
672
783
|
self.library_name = kwargs.pop("library_name", None)
|
|
673
|
-
self.tags = kwargs.pop("tags")
|
|
784
|
+
self.tags = kwargs.pop("tags", None)
|
|
674
785
|
self.pipeline_tag = kwargs.pop("pipeline_tag", None)
|
|
675
786
|
self.mask_token = kwargs.pop("mask_token", None)
|
|
676
787
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
@@ -757,6 +868,8 @@ class DatasetInfo:
|
|
|
757
868
|
If so, whether there is manual or automatic approval.
|
|
758
869
|
downloads (`int`):
|
|
759
870
|
Number of downloads of the dataset over the last 30 days.
|
|
871
|
+
downloads_all_time (`int`):
|
|
872
|
+
Cumulated number of downloads of the model since its creation.
|
|
760
873
|
likes (`int`):
|
|
761
874
|
Number of likes of the dataset.
|
|
762
875
|
tags (`List[str]`):
|
|
@@ -772,13 +885,14 @@ class DatasetInfo:
|
|
|
772
885
|
sha: Optional[str]
|
|
773
886
|
created_at: Optional[datetime]
|
|
774
887
|
last_modified: Optional[datetime]
|
|
775
|
-
private: bool
|
|
888
|
+
private: Optional[bool]
|
|
776
889
|
gated: Optional[Literal["auto", "manual", False]]
|
|
777
890
|
disabled: Optional[bool]
|
|
778
|
-
downloads: int
|
|
779
|
-
|
|
891
|
+
downloads: Optional[int]
|
|
892
|
+
downloads_all_time: Optional[int]
|
|
893
|
+
likes: Optional[int]
|
|
780
894
|
paperswithcode_id: Optional[str]
|
|
781
|
-
tags: List[str]
|
|
895
|
+
tags: Optional[List[str]]
|
|
782
896
|
card_data: Optional[DatasetCardData]
|
|
783
897
|
siblings: Optional[List[RepoSibling]]
|
|
784
898
|
|
|
@@ -790,13 +904,14 @@ class DatasetInfo:
|
|
|
790
904
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
791
905
|
last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None)
|
|
792
906
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
793
|
-
self.private = kwargs.pop("private")
|
|
907
|
+
self.private = kwargs.pop("private", None)
|
|
794
908
|
self.gated = kwargs.pop("gated", None)
|
|
795
909
|
self.disabled = kwargs.pop("disabled", None)
|
|
796
|
-
self.downloads = kwargs.pop("downloads")
|
|
797
|
-
self.
|
|
910
|
+
self.downloads = kwargs.pop("downloads", None)
|
|
911
|
+
self.downloads_all_time = kwargs.pop("downloadsAllTime", None)
|
|
912
|
+
self.likes = kwargs.pop("likes", None)
|
|
798
913
|
self.paperswithcode_id = kwargs.pop("paperswithcode_id", None)
|
|
799
|
-
self.tags = kwargs.pop("tags")
|
|
914
|
+
self.tags = kwargs.pop("tags", None)
|
|
800
915
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
801
916
|
self.card_data = (
|
|
802
917
|
DatasetCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -889,14 +1004,14 @@ class SpaceInfo:
|
|
|
889
1004
|
sha: Optional[str]
|
|
890
1005
|
created_at: Optional[datetime]
|
|
891
1006
|
last_modified: Optional[datetime]
|
|
892
|
-
private: bool
|
|
1007
|
+
private: Optional[bool]
|
|
893
1008
|
gated: Optional[Literal["auto", "manual", False]]
|
|
894
1009
|
disabled: Optional[bool]
|
|
895
1010
|
host: Optional[str]
|
|
896
1011
|
subdomain: Optional[str]
|
|
897
|
-
likes: int
|
|
1012
|
+
likes: Optional[int]
|
|
898
1013
|
sdk: Optional[str]
|
|
899
|
-
tags: List[str]
|
|
1014
|
+
tags: Optional[List[str]]
|
|
900
1015
|
siblings: Optional[List[RepoSibling]]
|
|
901
1016
|
card_data: Optional[SpaceCardData]
|
|
902
1017
|
runtime: Optional[SpaceRuntime]
|
|
@@ -911,14 +1026,14 @@ class SpaceInfo:
|
|
|
911
1026
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
912
1027
|
last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None)
|
|
913
1028
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
914
|
-
self.private = kwargs.pop("private")
|
|
1029
|
+
self.private = kwargs.pop("private", None)
|
|
915
1030
|
self.gated = kwargs.pop("gated", None)
|
|
916
1031
|
self.disabled = kwargs.pop("disabled", None)
|
|
917
1032
|
self.host = kwargs.pop("host", None)
|
|
918
1033
|
self.subdomain = kwargs.pop("subdomain", None)
|
|
919
|
-
self.likes = kwargs.pop("likes")
|
|
1034
|
+
self.likes = kwargs.pop("likes", None)
|
|
920
1035
|
self.sdk = kwargs.pop("sdk", None)
|
|
921
|
-
self.tags = kwargs.pop("tags")
|
|
1036
|
+
self.tags = kwargs.pop("tags", None)
|
|
922
1037
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
923
1038
|
self.card_data = (
|
|
924
1039
|
SpaceCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -1449,7 +1564,8 @@ class HfApi:
|
|
|
1449
1564
|
def list_models(
|
|
1450
1565
|
self,
|
|
1451
1566
|
*,
|
|
1452
|
-
|
|
1567
|
+
# Search-query parameter
|
|
1568
|
+
filter: Union[str, Iterable[str], None] = None,
|
|
1453
1569
|
author: Optional[str] = None,
|
|
1454
1570
|
library: Optional[Union[str, List[str]]] = None,
|
|
1455
1571
|
language: Optional[Union[str, List[str]]] = None,
|
|
@@ -1458,23 +1574,25 @@ class HfApi:
|
|
|
1458
1574
|
trained_dataset: Optional[Union[str, List[str]]] = None,
|
|
1459
1575
|
tags: Optional[Union[str, List[str]]] = None,
|
|
1460
1576
|
search: Optional[str] = None,
|
|
1577
|
+
pipeline_tag: Optional[str] = None,
|
|
1461
1578
|
emissions_thresholds: Optional[Tuple[float, float]] = None,
|
|
1579
|
+
# Sorting and pagination parameters
|
|
1462
1580
|
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1463
1581
|
direction: Optional[Literal[-1]] = None,
|
|
1464
1582
|
limit: Optional[int] = None,
|
|
1583
|
+
# Additional data to fetch
|
|
1584
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
1465
1585
|
full: Optional[bool] = None,
|
|
1466
1586
|
cardData: bool = False,
|
|
1467
1587
|
fetch_config: bool = False,
|
|
1468
1588
|
token: Union[bool, str, None] = None,
|
|
1469
|
-
pipeline_tag: Optional[str] = None,
|
|
1470
1589
|
) -> Iterable[ModelInfo]:
|
|
1471
1590
|
"""
|
|
1472
1591
|
List models hosted on the Huggingface Hub, given some filters.
|
|
1473
1592
|
|
|
1474
1593
|
Args:
|
|
1475
|
-
filter (
|
|
1476
|
-
A string or
|
|
1477
|
-
on the Hub.
|
|
1594
|
+
filter (`str` or `Iterable[str]`, *optional*):
|
|
1595
|
+
A string or list of string to filter models on the Hub.
|
|
1478
1596
|
author (`str`, *optional*):
|
|
1479
1597
|
A string which identify the author (user or organization) of the
|
|
1480
1598
|
returned models
|
|
@@ -1498,6 +1616,8 @@ class HfApi:
|
|
|
1498
1616
|
as `text-generation` or `spacy`.
|
|
1499
1617
|
search (`str`, *optional*):
|
|
1500
1618
|
A string that will be contained in the returned model ids.
|
|
1619
|
+
pipeline_tag (`str`, *optional*):
|
|
1620
|
+
A string pipeline tag to filter models on the Hub by, such as `summarization`.
|
|
1501
1621
|
emissions_thresholds (`Tuple`, *optional*):
|
|
1502
1622
|
A tuple of two ints or floats representing a minimum and maximum
|
|
1503
1623
|
carbon footprint to filter the resulting models with in grams.
|
|
@@ -1510,6 +1630,10 @@ class HfApi:
|
|
|
1510
1630
|
limit (`int`, *optional*):
|
|
1511
1631
|
The limit on the number of models fetched. Leaving this option
|
|
1512
1632
|
to `None` fetches all models.
|
|
1633
|
+
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
1634
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1635
|
+
This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
|
|
1636
|
+
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gitalyUid"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
1513
1637
|
full (`bool`, *optional*):
|
|
1514
1638
|
Whether to fetch all model data, including the `last_modified`,
|
|
1515
1639
|
the `sha`, the files and the `tags`. This is set to `True` by
|
|
@@ -1526,8 +1650,6 @@ class HfApi:
|
|
|
1526
1650
|
token, which is the recommended method for authentication (see
|
|
1527
1651
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
1528
1652
|
To disable authentication, pass `False`.
|
|
1529
|
-
pipeline_tag (`str`, *optional*):
|
|
1530
|
-
A string pipeline tag to filter models on the Hub by, such as `summarization`
|
|
1531
1653
|
|
|
1532
1654
|
|
|
1533
1655
|
Returns:
|
|
@@ -1540,13 +1662,13 @@ class HfApi:
|
|
|
1540
1662
|
|
|
1541
1663
|
>>> api = HfApi()
|
|
1542
1664
|
|
|
1543
|
-
|
|
1665
|
+
# List all models
|
|
1544
1666
|
>>> api.list_models()
|
|
1545
1667
|
|
|
1546
|
-
|
|
1668
|
+
# List only the text classification models
|
|
1547
1669
|
>>> api.list_models(filter="text-classification")
|
|
1548
1670
|
|
|
1549
|
-
|
|
1671
|
+
# List only models from the AllenNLP library
|
|
1550
1672
|
>>> api.list_models(filter="allennlp")
|
|
1551
1673
|
```
|
|
1552
1674
|
|
|
@@ -1557,40 +1679,33 @@ class HfApi:
|
|
|
1557
1679
|
|
|
1558
1680
|
>>> api = HfApi()
|
|
1559
1681
|
|
|
1560
|
-
|
|
1682
|
+
# List all models with "bert" in their name
|
|
1561
1683
|
>>> api.list_models(search="bert")
|
|
1562
1684
|
|
|
1563
|
-
|
|
1685
|
+
# List all models with "bert" in their name made by google
|
|
1564
1686
|
>>> api.list_models(search="bert", author="google")
|
|
1565
1687
|
```
|
|
1566
1688
|
"""
|
|
1689
|
+
if expand and (full or cardData or fetch_config):
|
|
1690
|
+
raise ValueError("`expand` cannot be used if `full`, `cardData` or `fetch_config` are passed.")
|
|
1691
|
+
|
|
1567
1692
|
if emissions_thresholds is not None and cardData is None:
|
|
1568
1693
|
raise ValueError("`emissions_thresholds` were passed without setting `cardData=True`.")
|
|
1569
1694
|
|
|
1570
1695
|
path = f"{self.endpoint}/api/models"
|
|
1571
1696
|
headers = self._build_hf_headers(token=token)
|
|
1572
|
-
params = {}
|
|
1573
|
-
filter_list = []
|
|
1574
|
-
|
|
1575
|
-
if filter is not None:
|
|
1576
|
-
if isinstance(filter, ModelFilter):
|
|
1577
|
-
params = self._unpack_model_filter(filter)
|
|
1578
|
-
else:
|
|
1579
|
-
params.update({"filter": filter})
|
|
1580
|
-
|
|
1581
|
-
params.update({"full": True})
|
|
1697
|
+
params: Dict[str, Any] = {}
|
|
1582
1698
|
|
|
1583
1699
|
# Build the filter list
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
params.update({"search": model_name})
|
|
1700
|
+
filter_list: List[str] = []
|
|
1701
|
+
if filter:
|
|
1702
|
+
filter_list.extend([filter] if isinstance(filter, str) else filter)
|
|
1588
1703
|
if library:
|
|
1589
1704
|
filter_list.extend([library] if isinstance(library, str) else library)
|
|
1590
1705
|
if task:
|
|
1591
1706
|
filter_list.extend([task] if isinstance(task, str) else task)
|
|
1592
1707
|
if trained_dataset:
|
|
1593
|
-
if
|
|
1708
|
+
if isinstance(trained_dataset, str):
|
|
1594
1709
|
trained_dataset = [trained_dataset]
|
|
1595
1710
|
for dataset in trained_dataset:
|
|
1596
1711
|
if not dataset.startswith("dataset:"):
|
|
@@ -1600,31 +1715,37 @@ class HfApi:
|
|
|
1600
1715
|
filter_list.extend([language] if isinstance(language, str) else language)
|
|
1601
1716
|
if tags:
|
|
1602
1717
|
filter_list.extend([tags] if isinstance(tags, str) else tags)
|
|
1718
|
+
if len(filter_list) > 0:
|
|
1719
|
+
params["filter"] = filter_list
|
|
1603
1720
|
|
|
1721
|
+
# Handle other query params
|
|
1722
|
+
if author:
|
|
1723
|
+
params["author"] = author
|
|
1724
|
+
if pipeline_tag:
|
|
1725
|
+
params["pipeline_tag"] = pipeline_tag
|
|
1726
|
+
search_list = []
|
|
1727
|
+
if model_name:
|
|
1728
|
+
search_list.append(model_name)
|
|
1604
1729
|
if search:
|
|
1605
|
-
|
|
1730
|
+
search_list.append(search)
|
|
1731
|
+
if len(search_list) > 0:
|
|
1732
|
+
params["search"] = search_list
|
|
1606
1733
|
if sort is not None:
|
|
1607
|
-
params
|
|
1734
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1608
1735
|
if direction is not None:
|
|
1609
|
-
params
|
|
1736
|
+
params["direction"] = direction
|
|
1610
1737
|
if limit is not None:
|
|
1611
|
-
params
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
del params["full"]
|
|
1738
|
+
params["limit"] = limit
|
|
1739
|
+
|
|
1740
|
+
# Request additional data
|
|
1741
|
+
if full:
|
|
1742
|
+
params["full"] = True
|
|
1617
1743
|
if fetch_config:
|
|
1618
|
-
params
|
|
1744
|
+
params["config"] = True
|
|
1619
1745
|
if cardData:
|
|
1620
|
-
params
|
|
1621
|
-
if
|
|
1622
|
-
params
|
|
1623
|
-
|
|
1624
|
-
filter_value = params.get("filter", [])
|
|
1625
|
-
if filter_value:
|
|
1626
|
-
filter_list.extend([filter_value] if isinstance(filter_value, str) else list(filter_value))
|
|
1627
|
-
params.update({"filter": filter_list})
|
|
1746
|
+
params["cardData"] = True
|
|
1747
|
+
if expand:
|
|
1748
|
+
params["expand"] = expand
|
|
1628
1749
|
|
|
1629
1750
|
# `items` is a generator
|
|
1630
1751
|
items = paginate(path, params=params, headers=headers)
|
|
@@ -1634,63 +1755,15 @@ class HfApi:
|
|
|
1634
1755
|
if "siblings" not in item:
|
|
1635
1756
|
item["siblings"] = None
|
|
1636
1757
|
model_info = ModelInfo(**item)
|
|
1637
|
-
if emissions_thresholds is None or
|
|
1758
|
+
if emissions_thresholds is None or _is_emission_within_threshold(model_info, *emissions_thresholds):
|
|
1638
1759
|
yield model_info
|
|
1639
1760
|
|
|
1640
|
-
def _unpack_model_filter(self, model_filter: ModelFilter):
|
|
1641
|
-
"""
|
|
1642
|
-
Unpacks a [`ModelFilter`] into something readable for `list_models`
|
|
1643
|
-
"""
|
|
1644
|
-
model_str = ""
|
|
1645
|
-
|
|
1646
|
-
# Handling author
|
|
1647
|
-
if model_filter.author:
|
|
1648
|
-
model_str = f"{model_filter.author}/"
|
|
1649
|
-
|
|
1650
|
-
# Handling model_name
|
|
1651
|
-
if model_filter.model_name:
|
|
1652
|
-
model_str += model_filter.model_name
|
|
1653
|
-
|
|
1654
|
-
filter_list: List[str] = []
|
|
1655
|
-
|
|
1656
|
-
# Handling tasks
|
|
1657
|
-
if model_filter.task:
|
|
1658
|
-
filter_list.extend([model_filter.task] if isinstance(model_filter.task, str) else model_filter.task)
|
|
1659
|
-
|
|
1660
|
-
# Handling dataset
|
|
1661
|
-
if model_filter.trained_dataset:
|
|
1662
|
-
if not isinstance(model_filter.trained_dataset, (list, tuple)):
|
|
1663
|
-
model_filter.trained_dataset = [model_filter.trained_dataset]
|
|
1664
|
-
for dataset in model_filter.trained_dataset:
|
|
1665
|
-
if "dataset:" not in dataset:
|
|
1666
|
-
dataset = f"dataset:{dataset}"
|
|
1667
|
-
filter_list.append(dataset)
|
|
1668
|
-
|
|
1669
|
-
# Handling library
|
|
1670
|
-
if model_filter.library:
|
|
1671
|
-
filter_list.extend(
|
|
1672
|
-
[model_filter.library] if isinstance(model_filter.library, str) else model_filter.library
|
|
1673
|
-
)
|
|
1674
|
-
|
|
1675
|
-
# Handling tags
|
|
1676
|
-
if model_filter.tags:
|
|
1677
|
-
filter_list.extend([model_filter.tags] if isinstance(model_filter.tags, str) else model_filter.tags)
|
|
1678
|
-
|
|
1679
|
-
query_dict: Dict[str, Any] = {}
|
|
1680
|
-
if model_str:
|
|
1681
|
-
query_dict["search"] = model_str
|
|
1682
|
-
if isinstance(model_filter.language, list):
|
|
1683
|
-
filter_list.extend(model_filter.language)
|
|
1684
|
-
elif isinstance(model_filter.language, str):
|
|
1685
|
-
filter_list.append(model_filter.language)
|
|
1686
|
-
query_dict["filter"] = tuple(filter_list)
|
|
1687
|
-
return query_dict
|
|
1688
|
-
|
|
1689
1761
|
@validate_hf_hub_args
|
|
1690
1762
|
def list_datasets(
|
|
1691
1763
|
self,
|
|
1692
1764
|
*,
|
|
1693
|
-
|
|
1765
|
+
# Search-query parameter
|
|
1766
|
+
filter: Union[str, Iterable[str], None] = None,
|
|
1694
1767
|
author: Optional[str] = None,
|
|
1695
1768
|
benchmark: Optional[Union[str, List[str]]] = None,
|
|
1696
1769
|
dataset_name: Optional[str] = None,
|
|
@@ -1698,12 +1771,16 @@ class HfApi:
|
|
|
1698
1771
|
language: Optional[Union[str, List[str]]] = None,
|
|
1699
1772
|
multilinguality: Optional[Union[str, List[str]]] = None,
|
|
1700
1773
|
size_categories: Optional[Union[str, List[str]]] = None,
|
|
1774
|
+
tags: Optional[Union[str, List[str]]] = None,
|
|
1701
1775
|
task_categories: Optional[Union[str, List[str]]] = None,
|
|
1702
1776
|
task_ids: Optional[Union[str, List[str]]] = None,
|
|
1703
1777
|
search: Optional[str] = None,
|
|
1778
|
+
# Sorting and pagination parameters
|
|
1704
1779
|
sort: Optional[Union[Literal["last_modified"], str]] = None,
|
|
1705
1780
|
direction: Optional[Literal[-1]] = None,
|
|
1706
1781
|
limit: Optional[int] = None,
|
|
1782
|
+
# Additional data to fetch
|
|
1783
|
+
expand: Optional[List[ExpandDatasetProperty_T]] = None,
|
|
1707
1784
|
full: Optional[bool] = None,
|
|
1708
1785
|
token: Union[bool, str, None] = None,
|
|
1709
1786
|
) -> Iterable[DatasetInfo]:
|
|
@@ -1711,9 +1788,8 @@ class HfApi:
|
|
|
1711
1788
|
List datasets hosted on the Huggingface Hub, given some filters.
|
|
1712
1789
|
|
|
1713
1790
|
Args:
|
|
1714
|
-
filter (
|
|
1715
|
-
A string or
|
|
1716
|
-
datasets on the hub.
|
|
1791
|
+
filter (`str` or `Iterable[str]`, *optional*):
|
|
1792
|
+
A string or list of string to filter datasets on the hub.
|
|
1717
1793
|
author (`str`, *optional*):
|
|
1718
1794
|
A string which identify the author of the returned datasets.
|
|
1719
1795
|
benchmark (`str` or `List`, *optional*):
|
|
@@ -1736,6 +1812,8 @@ class HfApi:
|
|
|
1736
1812
|
A string or list of strings that can be used to identify datasets on
|
|
1737
1813
|
the Hub by the size of the dataset such as `100K<n<1M` or
|
|
1738
1814
|
`1M<n<10M`.
|
|
1815
|
+
tags (`str` or `List`, *optional*):
|
|
1816
|
+
A string tag or a list of tags to filter datasets on the Hub.
|
|
1739
1817
|
task_categories (`str` or `List`, *optional*):
|
|
1740
1818
|
A string or list of strings that can be used to identify datasets on
|
|
1741
1819
|
the Hub by the designed task, such as `audio_classification` or
|
|
@@ -1755,6 +1833,10 @@ class HfApi:
|
|
|
1755
1833
|
limit (`int`, *optional*):
|
|
1756
1834
|
The limit on the number of datasets fetched. Leaving this option
|
|
1757
1835
|
to `None` fetches all datasets.
|
|
1836
|
+
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
1837
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1838
|
+
This parameter cannot be used if `full` is passed.
|
|
1839
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gitalyUid"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"tags"`.
|
|
1758
1840
|
full (`bool`, *optional*):
|
|
1759
1841
|
Whether to fetch all dataset data, including the `last_modified`,
|
|
1760
1842
|
the `card_data` and the files. Can contain useful information such as the
|
|
@@ -1775,20 +1857,21 @@ class HfApi:
|
|
|
1775
1857
|
|
|
1776
1858
|
>>> api = HfApi()
|
|
1777
1859
|
|
|
1778
|
-
|
|
1860
|
+
# List all datasets
|
|
1779
1861
|
>>> api.list_datasets()
|
|
1780
1862
|
|
|
1781
1863
|
|
|
1782
|
-
|
|
1864
|
+
# List only the text classification datasets
|
|
1783
1865
|
>>> api.list_datasets(filter="task_categories:text-classification")
|
|
1784
1866
|
|
|
1785
1867
|
|
|
1786
|
-
|
|
1868
|
+
# List only the datasets in russian for language modeling
|
|
1787
1869
|
>>> api.list_datasets(
|
|
1788
1870
|
... filter=("language:ru", "task_ids:language-modeling")
|
|
1789
1871
|
... )
|
|
1790
1872
|
|
|
1791
|
-
|
|
1873
|
+
# List FiftyOne datasets (identified by the tag "fiftyone" in dataset card)
|
|
1874
|
+
>>> api.list_datasets(tags="fiftyone")
|
|
1792
1875
|
```
|
|
1793
1876
|
|
|
1794
1877
|
Example usage with the `search` argument:
|
|
@@ -1798,62 +1881,70 @@ class HfApi:
|
|
|
1798
1881
|
|
|
1799
1882
|
>>> api = HfApi()
|
|
1800
1883
|
|
|
1801
|
-
|
|
1884
|
+
# List all datasets with "text" in their name
|
|
1802
1885
|
>>> api.list_datasets(search="text")
|
|
1803
1886
|
|
|
1804
|
-
|
|
1887
|
+
# List all datasets with "text" in their name made by google
|
|
1805
1888
|
>>> api.list_datasets(search="text", author="google")
|
|
1806
1889
|
```
|
|
1807
1890
|
"""
|
|
1891
|
+
if expand and full:
|
|
1892
|
+
raise ValueError("`expand` cannot be used if `full` is passed.")
|
|
1893
|
+
|
|
1808
1894
|
path = f"{self.endpoint}/api/datasets"
|
|
1809
1895
|
headers = self._build_hf_headers(token=token)
|
|
1810
|
-
params = {}
|
|
1811
|
-
filter_list = []
|
|
1896
|
+
params: Dict[str, Any] = {}
|
|
1812
1897
|
|
|
1898
|
+
# Build `filter` list
|
|
1899
|
+
filter_list = []
|
|
1813
1900
|
if filter is not None:
|
|
1814
|
-
if isinstance(filter,
|
|
1815
|
-
|
|
1901
|
+
if isinstance(filter, str):
|
|
1902
|
+
filter_list.append(filter)
|
|
1816
1903
|
else:
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
benchmark,
|
|
1827
|
-
language_creators,
|
|
1828
|
-
language,
|
|
1829
|
-
multilinguality,
|
|
1830
|
-
size_categories,
|
|
1831
|
-
task_categories,
|
|
1832
|
-
task_ids,
|
|
1904
|
+
filter_list.extend(filter)
|
|
1905
|
+
for key, value in (
|
|
1906
|
+
("benchmark", benchmark),
|
|
1907
|
+
("language_creators", language_creators),
|
|
1908
|
+
("language", language),
|
|
1909
|
+
("multilinguality", multilinguality),
|
|
1910
|
+
("size_categories", size_categories),
|
|
1911
|
+
("task_categories", task_categories),
|
|
1912
|
+
("task_ids", task_ids),
|
|
1833
1913
|
):
|
|
1834
|
-
if
|
|
1835
|
-
if
|
|
1836
|
-
|
|
1837
|
-
for
|
|
1838
|
-
if not
|
|
1839
|
-
data = f"{
|
|
1914
|
+
if value:
|
|
1915
|
+
if isinstance(value, str):
|
|
1916
|
+
value = [value]
|
|
1917
|
+
for value_item in value:
|
|
1918
|
+
if not value_item.startswith(f"{key}:"):
|
|
1919
|
+
data = f"{key}:{value_item}"
|
|
1840
1920
|
filter_list.append(data)
|
|
1921
|
+
if tags is not None:
|
|
1922
|
+
filter_list.extend([tags] if isinstance(tags, str) else tags)
|
|
1923
|
+
if len(filter_list) > 0:
|
|
1924
|
+
params["filter"] = filter_list
|
|
1841
1925
|
|
|
1926
|
+
# Handle other query params
|
|
1927
|
+
if author:
|
|
1928
|
+
params["author"] = author
|
|
1929
|
+
search_list = []
|
|
1930
|
+
if dataset_name:
|
|
1931
|
+
search_list.append(dataset_name)
|
|
1842
1932
|
if search:
|
|
1843
|
-
|
|
1933
|
+
search_list.append(search)
|
|
1934
|
+
if len(search_list) > 0:
|
|
1935
|
+
params["search"] = search_list
|
|
1844
1936
|
if sort is not None:
|
|
1845
|
-
params
|
|
1937
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1846
1938
|
if direction is not None:
|
|
1847
|
-
params
|
|
1939
|
+
params["direction"] = direction
|
|
1848
1940
|
if limit is not None:
|
|
1849
|
-
params
|
|
1850
|
-
if full:
|
|
1851
|
-
params.update({"full": True})
|
|
1941
|
+
params["limit"] = limit
|
|
1852
1942
|
|
|
1853
|
-
|
|
1854
|
-
if
|
|
1855
|
-
|
|
1856
|
-
|
|
1943
|
+
# Request additional data
|
|
1944
|
+
if expand:
|
|
1945
|
+
params["expand"] = expand
|
|
1946
|
+
if full:
|
|
1947
|
+
params["full"] = True
|
|
1857
1948
|
|
|
1858
1949
|
items = paginate(path, params=params, headers=headers)
|
|
1859
1950
|
if limit is not None:
|
|
@@ -1863,47 +1954,6 @@ class HfApi:
|
|
|
1863
1954
|
item["siblings"] = None
|
|
1864
1955
|
yield DatasetInfo(**item)
|
|
1865
1956
|
|
|
1866
|
-
def _unpack_dataset_filter(self, dataset_filter: DatasetFilter):
|
|
1867
|
-
"""
|
|
1868
|
-
Unpacks a [`DatasetFilter`] into something readable for `list_datasets`
|
|
1869
|
-
"""
|
|
1870
|
-
dataset_str = ""
|
|
1871
|
-
|
|
1872
|
-
# Handling author
|
|
1873
|
-
if dataset_filter.author:
|
|
1874
|
-
dataset_str = f"{dataset_filter.author}/"
|
|
1875
|
-
|
|
1876
|
-
# Handling dataset_name
|
|
1877
|
-
if dataset_filter.dataset_name:
|
|
1878
|
-
dataset_str += dataset_filter.dataset_name
|
|
1879
|
-
|
|
1880
|
-
filter_list = []
|
|
1881
|
-
data_attributes = [
|
|
1882
|
-
"benchmark",
|
|
1883
|
-
"language_creators",
|
|
1884
|
-
"language",
|
|
1885
|
-
"multilinguality",
|
|
1886
|
-
"size_categories",
|
|
1887
|
-
"task_categories",
|
|
1888
|
-
"task_ids",
|
|
1889
|
-
]
|
|
1890
|
-
|
|
1891
|
-
for attr in data_attributes:
|
|
1892
|
-
curr_attr = getattr(dataset_filter, attr)
|
|
1893
|
-
if curr_attr is not None:
|
|
1894
|
-
if not isinstance(curr_attr, (list, tuple)):
|
|
1895
|
-
curr_attr = [curr_attr]
|
|
1896
|
-
for data in curr_attr:
|
|
1897
|
-
if f"{attr}:" not in data:
|
|
1898
|
-
data = f"{attr}:{data}"
|
|
1899
|
-
filter_list.append(data)
|
|
1900
|
-
|
|
1901
|
-
query_dict: Dict[str, Any] = {}
|
|
1902
|
-
if dataset_str is not None:
|
|
1903
|
-
query_dict["search"] = dataset_str
|
|
1904
|
-
query_dict["filter"] = tuple(filter_list)
|
|
1905
|
-
return query_dict
|
|
1906
|
-
|
|
1907
1957
|
def list_metrics(self) -> List[MetricInfo]:
|
|
1908
1958
|
"""
|
|
1909
1959
|
Get the public list of all the metrics on huggingface.co
|
|
@@ -1921,15 +1971,19 @@ class HfApi:
|
|
|
1921
1971
|
def list_spaces(
|
|
1922
1972
|
self,
|
|
1923
1973
|
*,
|
|
1974
|
+
# Search-query parameter
|
|
1924
1975
|
filter: Union[str, Iterable[str], None] = None,
|
|
1925
1976
|
author: Optional[str] = None,
|
|
1926
1977
|
search: Optional[str] = None,
|
|
1927
|
-
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1928
|
-
direction: Optional[Literal[-1]] = None,
|
|
1929
|
-
limit: Optional[int] = None,
|
|
1930
1978
|
datasets: Union[str, Iterable[str], None] = None,
|
|
1931
1979
|
models: Union[str, Iterable[str], None] = None,
|
|
1932
1980
|
linked: bool = False,
|
|
1981
|
+
# Sorting and pagination parameters
|
|
1982
|
+
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1983
|
+
direction: Optional[Literal[-1]] = None,
|
|
1984
|
+
limit: Optional[int] = None,
|
|
1985
|
+
# Additional data to fetch
|
|
1986
|
+
expand: Optional[List[ExpandSpaceProperty_T]] = None,
|
|
1933
1987
|
full: Optional[bool] = None,
|
|
1934
1988
|
token: Union[bool, str, None] = None,
|
|
1935
1989
|
) -> Iterable[SpaceInfo]:
|
|
@@ -1943,6 +1997,14 @@ class HfApi:
|
|
|
1943
1997
|
A string which identify the author of the returned Spaces.
|
|
1944
1998
|
search (`str`, *optional*):
|
|
1945
1999
|
A string that will be contained in the returned Spaces.
|
|
2000
|
+
datasets (`str` or `Iterable`, *optional*):
|
|
2001
|
+
Whether to return Spaces that make use of a dataset.
|
|
2002
|
+
The name of a specific dataset can be passed as a string.
|
|
2003
|
+
models (`str` or `Iterable`, *optional*):
|
|
2004
|
+
Whether to return Spaces that make use of a model.
|
|
2005
|
+
The name of a specific model can be passed as a string.
|
|
2006
|
+
linked (`bool`, *optional*):
|
|
2007
|
+
Whether to return Spaces that make use of either a model or a dataset.
|
|
1946
2008
|
sort (`Literal["last_modified"]` or `str`, *optional*):
|
|
1947
2009
|
The key with which to sort the resulting Spaces. Possible
|
|
1948
2010
|
values are the properties of the [`huggingface_hub.hf_api.SpaceInfo`]` class.
|
|
@@ -1952,14 +2014,10 @@ class HfApi:
|
|
|
1952
2014
|
limit (`int`, *optional*):
|
|
1953
2015
|
The limit on the number of Spaces fetched. Leaving this option
|
|
1954
2016
|
to `None` fetches all Spaces.
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
Whether to return Spaces that make use of a model.
|
|
1960
|
-
The name of a specific model can be passed as a string.
|
|
1961
|
-
linked (`bool`, *optional*):
|
|
1962
|
-
Whether to return Spaces that make use of either a model or a dataset.
|
|
2017
|
+
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2018
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2019
|
+
This parameter cannot be used if `full` is passed.
|
|
2020
|
+
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"gitalyUid"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"models"`.
|
|
1963
2021
|
full (`bool`, *optional*):
|
|
1964
2022
|
Whether to fetch all Spaces data, including the `last_modified`, `siblings`
|
|
1965
2023
|
and `card_data` fields.
|
|
@@ -1972,29 +2030,36 @@ class HfApi:
|
|
|
1972
2030
|
Returns:
|
|
1973
2031
|
`Iterable[SpaceInfo]`: an iterable of [`huggingface_hub.hf_api.SpaceInfo`] objects.
|
|
1974
2032
|
"""
|
|
2033
|
+
if expand and full:
|
|
2034
|
+
raise ValueError("`expand` cannot be used if `full` is passed.")
|
|
2035
|
+
|
|
1975
2036
|
path = f"{self.endpoint}/api/spaces"
|
|
1976
2037
|
headers = self._build_hf_headers(token=token)
|
|
1977
2038
|
params: Dict[str, Any] = {}
|
|
1978
2039
|
if filter is not None:
|
|
1979
|
-
params
|
|
2040
|
+
params["filter"] = filter
|
|
1980
2041
|
if author is not None:
|
|
1981
|
-
params
|
|
2042
|
+
params["author"] = author
|
|
1982
2043
|
if search is not None:
|
|
1983
|
-
params
|
|
2044
|
+
params["search"] = search
|
|
1984
2045
|
if sort is not None:
|
|
1985
|
-
params
|
|
2046
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1986
2047
|
if direction is not None:
|
|
1987
|
-
params
|
|
2048
|
+
params["direction"] = direction
|
|
1988
2049
|
if limit is not None:
|
|
1989
|
-
params
|
|
1990
|
-
if full:
|
|
1991
|
-
params.update({"full": True})
|
|
2050
|
+
params["limit"] = limit
|
|
1992
2051
|
if linked:
|
|
1993
|
-
params
|
|
2052
|
+
params["linked"] = True
|
|
1994
2053
|
if datasets is not None:
|
|
1995
|
-
params
|
|
2054
|
+
params["datasets"] = datasets
|
|
1996
2055
|
if models is not None:
|
|
1997
|
-
params
|
|
2056
|
+
params["models"] = models
|
|
2057
|
+
|
|
2058
|
+
# Request additional data
|
|
2059
|
+
if expand:
|
|
2060
|
+
params["expand"] = expand
|
|
2061
|
+
if full:
|
|
2062
|
+
params["full"] = True
|
|
1998
2063
|
|
|
1999
2064
|
items = paginate(path, params=params, headers=headers)
|
|
2000
2065
|
if limit is not None:
|
|
@@ -2243,6 +2308,7 @@ class HfApi:
|
|
|
2243
2308
|
timeout: Optional[float] = None,
|
|
2244
2309
|
securityStatus: Optional[bool] = None,
|
|
2245
2310
|
files_metadata: bool = False,
|
|
2311
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
2246
2312
|
token: Union[bool, str, None] = None,
|
|
2247
2313
|
) -> ModelInfo:
|
|
2248
2314
|
"""
|
|
@@ -2265,6 +2331,10 @@ class HfApi:
|
|
|
2265
2331
|
files_metadata (`bool`, *optional*):
|
|
2266
2332
|
Whether or not to retrieve metadata for files in the repository
|
|
2267
2333
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2334
|
+
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
2335
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2336
|
+
This parameter cannot be used if `securityStatus` or `files_metadata` are passed.
|
|
2337
|
+
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gitalyUid"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
2268
2338
|
token (Union[bool, str, None], optional):
|
|
2269
2339
|
A valid user access token (string). Defaults to the locally saved
|
|
2270
2340
|
token, which is the recommended method for authentication (see
|
|
@@ -2286,17 +2356,22 @@ class HfApi:
|
|
|
2286
2356
|
|
|
2287
2357
|
</Tip>
|
|
2288
2358
|
"""
|
|
2359
|
+
if expand and (securityStatus or files_metadata):
|
|
2360
|
+
raise ValueError("`expand` cannot be used if `securityStatus` or `files_metadata` are set.")
|
|
2361
|
+
|
|
2289
2362
|
headers = self._build_hf_headers(token=token)
|
|
2290
2363
|
path = (
|
|
2291
2364
|
f"{self.endpoint}/api/models/{repo_id}"
|
|
2292
2365
|
if revision is None
|
|
2293
2366
|
else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2294
2367
|
)
|
|
2295
|
-
params = {}
|
|
2368
|
+
params: Dict = {}
|
|
2296
2369
|
if securityStatus:
|
|
2297
2370
|
params["securityStatus"] = True
|
|
2298
2371
|
if files_metadata:
|
|
2299
2372
|
params["blobs"] = True
|
|
2373
|
+
if expand:
|
|
2374
|
+
params["expand"] = expand
|
|
2300
2375
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2301
2376
|
hf_raise_for_status(r)
|
|
2302
2377
|
data = r.json()
|
|
@@ -2310,6 +2385,7 @@ class HfApi:
|
|
|
2310
2385
|
revision: Optional[str] = None,
|
|
2311
2386
|
timeout: Optional[float] = None,
|
|
2312
2387
|
files_metadata: bool = False,
|
|
2388
|
+
expand: Optional[List[ExpandDatasetProperty_T]] = None,
|
|
2313
2389
|
token: Union[bool, str, None] = None,
|
|
2314
2390
|
) -> DatasetInfo:
|
|
2315
2391
|
"""
|
|
@@ -2329,6 +2405,10 @@ class HfApi:
|
|
|
2329
2405
|
files_metadata (`bool`, *optional*):
|
|
2330
2406
|
Whether or not to retrieve metadata for files in the repository
|
|
2331
2407
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2408
|
+
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
2409
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2410
|
+
This parameter cannot be used if `files_metadata` is passed.
|
|
2411
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gitalyUid"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"tags"`.
|
|
2332
2412
|
token (Union[bool, str, None], optional):
|
|
2333
2413
|
A valid user access token (string). Defaults to the locally saved
|
|
2334
2414
|
token, which is the recommended method for authentication (see
|
|
@@ -2350,15 +2430,20 @@ class HfApi:
|
|
|
2350
2430
|
|
|
2351
2431
|
</Tip>
|
|
2352
2432
|
"""
|
|
2433
|
+
if expand and files_metadata:
|
|
2434
|
+
raise ValueError("`expand` cannot be used if `files_metadata` is set.")
|
|
2435
|
+
|
|
2353
2436
|
headers = self._build_hf_headers(token=token)
|
|
2354
2437
|
path = (
|
|
2355
2438
|
f"{self.endpoint}/api/datasets/{repo_id}"
|
|
2356
2439
|
if revision is None
|
|
2357
2440
|
else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2358
2441
|
)
|
|
2359
|
-
params = {}
|
|
2442
|
+
params: Dict = {}
|
|
2360
2443
|
if files_metadata:
|
|
2361
2444
|
params["blobs"] = True
|
|
2445
|
+
if expand:
|
|
2446
|
+
params["expand"] = expand
|
|
2362
2447
|
|
|
2363
2448
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2364
2449
|
hf_raise_for_status(r)
|
|
@@ -2373,6 +2458,7 @@ class HfApi:
|
|
|
2373
2458
|
revision: Optional[str] = None,
|
|
2374
2459
|
timeout: Optional[float] = None,
|
|
2375
2460
|
files_metadata: bool = False,
|
|
2461
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
2376
2462
|
token: Union[bool, str, None] = None,
|
|
2377
2463
|
) -> SpaceInfo:
|
|
2378
2464
|
"""
|
|
@@ -2392,6 +2478,10 @@ class HfApi:
|
|
|
2392
2478
|
files_metadata (`bool`, *optional*):
|
|
2393
2479
|
Whether or not to retrieve metadata for files in the repository
|
|
2394
2480
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2481
|
+
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2482
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2483
|
+
This parameter cannot be used if `full` is passed.
|
|
2484
|
+
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"gitalyUid"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"models"`.
|
|
2395
2485
|
token (Union[bool, str, None], optional):
|
|
2396
2486
|
A valid user access token (string). Defaults to the locally saved
|
|
2397
2487
|
token, which is the recommended method for authentication (see
|
|
@@ -2413,15 +2503,20 @@ class HfApi:
|
|
|
2413
2503
|
|
|
2414
2504
|
</Tip>
|
|
2415
2505
|
"""
|
|
2506
|
+
if expand and files_metadata:
|
|
2507
|
+
raise ValueError("`expand` cannot be used if `files_metadata` is set.")
|
|
2508
|
+
|
|
2416
2509
|
headers = self._build_hf_headers(token=token)
|
|
2417
2510
|
path = (
|
|
2418
2511
|
f"{self.endpoint}/api/spaces/{repo_id}"
|
|
2419
2512
|
if revision is None
|
|
2420
2513
|
else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2421
2514
|
)
|
|
2422
|
-
params = {}
|
|
2515
|
+
params: Dict = {}
|
|
2423
2516
|
if files_metadata:
|
|
2424
2517
|
params["blobs"] = True
|
|
2518
|
+
if expand:
|
|
2519
|
+
params["expand"] = expand
|
|
2425
2520
|
|
|
2426
2521
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2427
2522
|
hf_raise_for_status(r)
|
|
@@ -2437,6 +2532,7 @@ class HfApi:
|
|
|
2437
2532
|
repo_type: Optional[str] = None,
|
|
2438
2533
|
timeout: Optional[float] = None,
|
|
2439
2534
|
files_metadata: bool = False,
|
|
2535
|
+
expand: Optional[Union[ExpandModelProperty_T, ExpandDatasetProperty_T, ExpandSpaceProperty_T]] = None,
|
|
2440
2536
|
token: Union[bool, str, None] = None,
|
|
2441
2537
|
) -> Union[ModelInfo, DatasetInfo, SpaceInfo]:
|
|
2442
2538
|
"""
|
|
@@ -2454,6 +2550,10 @@ class HfApi:
|
|
|
2454
2550
|
`None` or `"model"` if getting repository info from a model. Default is `None`.
|
|
2455
2551
|
timeout (`float`, *optional*):
|
|
2456
2552
|
Whether to set a timeout for the request to the Hub.
|
|
2553
|
+
expand (`ExpandModelProperty_T` or `ExpandDatasetProperty_T` or `ExpandSpaceProperty_T`, *optional*):
|
|
2554
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2555
|
+
This parameter cannot be used if `files_metadata` is passed.
|
|
2556
|
+
For an exhaustive list of available properties, check out [`model_info`], [`dataset_info`] or [`space_info`].
|
|
2457
2557
|
files_metadata (`bool`, *optional*):
|
|
2458
2558
|
Whether or not to retrieve metadata for files in the repository
|
|
2459
2559
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
@@ -2493,6 +2593,7 @@ class HfApi:
|
|
|
2493
2593
|
revision=revision,
|
|
2494
2594
|
token=token,
|
|
2495
2595
|
timeout=timeout,
|
|
2596
|
+
expand=expand, # type: ignore[arg-type]
|
|
2496
2597
|
files_metadata=files_metadata,
|
|
2497
2598
|
)
|
|
2498
2599
|
|
|
@@ -3144,6 +3245,7 @@ class HfApi:
|
|
|
3144
3245
|
private: bool = False,
|
|
3145
3246
|
repo_type: Optional[str] = None,
|
|
3146
3247
|
exist_ok: bool = False,
|
|
3248
|
+
resource_group_id: Optional[str] = None,
|
|
3147
3249
|
space_sdk: Optional[str] = None,
|
|
3148
3250
|
space_hardware: Optional[SpaceHardware] = None,
|
|
3149
3251
|
space_storage: Optional[SpaceStorage] = None,
|
|
@@ -3170,6 +3272,11 @@ class HfApi:
|
|
|
3170
3272
|
`None`.
|
|
3171
3273
|
exist_ok (`bool`, *optional*, defaults to `False`):
|
|
3172
3274
|
If `True`, do not raise an error if repo already exists.
|
|
3275
|
+
resource_group_id (`str`, *optional*):
|
|
3276
|
+
Resource group in which to create the repo. Resource groups is only available for organizations and
|
|
3277
|
+
allow to define which members of the organization can access the resource. The ID of a resource group
|
|
3278
|
+
can be found in the URL of the resource's page on the Hub (e.g. `"66670e5163145ca562cb1988"`).
|
|
3279
|
+
To learn more about resource groups, see https://huggingface.co/docs/hub/en/security-resource-groups.
|
|
3173
3280
|
space_sdk (`str`, *optional*):
|
|
3174
3281
|
Choice of SDK to use if repo_type is "space". Can be "streamlit", "gradio", "docker", or "static".
|
|
3175
3282
|
space_hardware (`SpaceHardware` or `str`, *optional*):
|
|
@@ -3237,8 +3344,11 @@ class HfApi:
|
|
|
3237
3344
|
# Testing purposes only.
|
|
3238
3345
|
# See https://github.com/huggingface/huggingface_hub/pull/733/files#r820604472
|
|
3239
3346
|
json["lfsmultipartthresh"] = self._lfsmultipartthresh # type: ignore
|
|
3240
|
-
headers = self._build_hf_headers(token=token)
|
|
3241
3347
|
|
|
3348
|
+
if resource_group_id is not None:
|
|
3349
|
+
json["resourceGroupId"] = resource_group_id
|
|
3350
|
+
|
|
3351
|
+
headers = self._build_hf_headers(token=token)
|
|
3242
3352
|
while True:
|
|
3243
3353
|
r = get_session().post(path, headers=headers, json=json)
|
|
3244
3354
|
if r.status_code == 409 and "Cannot create repo: another conflicting operation is in progress" in r.text:
|
|
@@ -3301,7 +3411,7 @@ class HfApi:
|
|
|
3301
3411
|
If `True`, do not raise an error if repo does not exist.
|
|
3302
3412
|
|
|
3303
3413
|
Raises:
|
|
3304
|
-
|
|
3414
|
+
[`~utils.RepositoryNotFoundError`]
|
|
3305
3415
|
If the repository to delete from cannot be found and `missing_ok` is set to False (default).
|
|
3306
3416
|
"""
|
|
3307
3417
|
organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id)
|
|
@@ -3324,25 +3434,19 @@ class HfApi:
|
|
|
3324
3434
|
raise
|
|
3325
3435
|
|
|
3326
3436
|
@validate_hf_hub_args
|
|
3327
|
-
@_deprecate_arguments(
|
|
3328
|
-
version="0.24.0", deprecated_args=("organization", "name"), custom_message="Use `repo_id` instead."
|
|
3329
|
-
)
|
|
3330
3437
|
def update_repo_visibility(
|
|
3331
3438
|
self,
|
|
3332
3439
|
repo_id: str,
|
|
3333
3440
|
private: bool = False,
|
|
3334
3441
|
*,
|
|
3335
3442
|
token: Union[str, bool, None] = None,
|
|
3336
|
-
organization: Optional[str] = None,
|
|
3337
3443
|
repo_type: Optional[str] = None,
|
|
3338
|
-
name: Optional[str] = None,
|
|
3339
3444
|
) -> Dict[str, bool]:
|
|
3340
3445
|
"""Update the visibility setting of a repository.
|
|
3341
3446
|
|
|
3342
3447
|
Args:
|
|
3343
3448
|
repo_id (`str`, *optional*):
|
|
3344
|
-
A namespace (user or an organization) and a repo name separated
|
|
3345
|
-
by a `/`.
|
|
3449
|
+
A namespace (user or an organization) and a repo name separated by a `/`.
|
|
3346
3450
|
private (`bool`, *optional*, defaults to `False`):
|
|
3347
3451
|
Whether the model repo should be private.
|
|
3348
3452
|
token (Union[bool, str, None], optional):
|
|
@@ -3369,20 +3473,12 @@ class HfApi:
|
|
|
3369
3473
|
</Tip>
|
|
3370
3474
|
"""
|
|
3371
3475
|
if repo_type not in REPO_TYPES:
|
|
3372
|
-
raise ValueError("Invalid repo type")
|
|
3373
|
-
|
|
3374
|
-
organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id)
|
|
3375
|
-
|
|
3376
|
-
if organization is None:
|
|
3377
|
-
namespace = self.whoami(token)["name"]
|
|
3378
|
-
else:
|
|
3379
|
-
namespace = organization
|
|
3380
|
-
|
|
3476
|
+
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
3381
3477
|
if repo_type is None:
|
|
3382
3478
|
repo_type = REPO_TYPE_MODEL # default repo type
|
|
3383
3479
|
|
|
3384
3480
|
r = get_session().put(
|
|
3385
|
-
url=f"{self.endpoint}/api/{repo_type}s/{
|
|
3481
|
+
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
|
|
3386
3482
|
headers=self._build_hf_headers(token=token),
|
|
3387
3483
|
json={"private": private},
|
|
3388
3484
|
)
|
|
@@ -3684,6 +3780,46 @@ class HfApi:
|
|
|
3684
3780
|
num_threads=num_threads,
|
|
3685
3781
|
free_memory=False, # do not remove `CommitOperationAdd.path_or_fileobj` on LFS files for "normal" users
|
|
3686
3782
|
)
|
|
3783
|
+
|
|
3784
|
+
# Remove no-op operations (files that have not changed)
|
|
3785
|
+
operations_without_no_op = []
|
|
3786
|
+
for operation in operations:
|
|
3787
|
+
if (
|
|
3788
|
+
isinstance(operation, CommitOperationAdd)
|
|
3789
|
+
and operation._remote_oid is not None
|
|
3790
|
+
and operation._remote_oid == operation._local_oid
|
|
3791
|
+
):
|
|
3792
|
+
# File already exists on the Hub and has not changed: we can skip it.
|
|
3793
|
+
logger.debug(f"Skipping upload for '{operation.path_in_repo}' as the file has not changed.")
|
|
3794
|
+
continue
|
|
3795
|
+
operations_without_no_op.append(operation)
|
|
3796
|
+
if len(operations) != len(operations_without_no_op):
|
|
3797
|
+
logger.info(
|
|
3798
|
+
f"Removing {len(operations) - len(operations_without_no_op)} file(s) from commit that have not changed."
|
|
3799
|
+
)
|
|
3800
|
+
|
|
3801
|
+
# Return early if empty commit
|
|
3802
|
+
if len(operations_without_no_op) == 0:
|
|
3803
|
+
logger.warning("No files have been modified since last commit. Skipping to prevent empty commit.")
|
|
3804
|
+
|
|
3805
|
+
# Get latest commit info
|
|
3806
|
+
try:
|
|
3807
|
+
info = self.repo_info(repo_id=repo_id, repo_type=repo_type, revision=revision, token=token)
|
|
3808
|
+
except RepositoryNotFoundError as e:
|
|
3809
|
+
e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE)
|
|
3810
|
+
raise
|
|
3811
|
+
|
|
3812
|
+
# Return commit info based on latest commit
|
|
3813
|
+
url_prefix = self.endpoint
|
|
3814
|
+
if repo_type is not None and repo_type != REPO_TYPE_MODEL:
|
|
3815
|
+
url_prefix = f"{url_prefix}/{repo_type}s"
|
|
3816
|
+
return CommitInfo(
|
|
3817
|
+
commit_url=f"{url_prefix}/{repo_id}/commit/{info.sha}",
|
|
3818
|
+
commit_message=commit_message,
|
|
3819
|
+
commit_description=commit_description,
|
|
3820
|
+
oid=info.sha, # type: ignore[arg-type]
|
|
3821
|
+
)
|
|
3822
|
+
|
|
3687
3823
|
files_to_copy = _fetch_files_to_copy(
|
|
3688
3824
|
copies=copies,
|
|
3689
3825
|
repo_type=repo_type,
|
|
@@ -4680,7 +4816,7 @@ class HfApi:
|
|
|
4680
4816
|
ignore_patterns = [ignore_patterns]
|
|
4681
4817
|
ignore_patterns += DEFAULT_IGNORE_PATTERNS
|
|
4682
4818
|
|
|
4683
|
-
delete_operations = self.
|
|
4819
|
+
delete_operations = self._prepare_folder_deletions(
|
|
4684
4820
|
repo_id=repo_id,
|
|
4685
4821
|
repo_type=repo_type,
|
|
4686
4822
|
revision=DEFAULT_REVISION if create_pr else revision,
|
|
@@ -4841,6 +4977,82 @@ class HfApi:
|
|
|
4841
4977
|
parent_commit=parent_commit,
|
|
4842
4978
|
)
|
|
4843
4979
|
|
|
4980
|
+
@validate_hf_hub_args
|
|
4981
|
+
def delete_files(
|
|
4982
|
+
self,
|
|
4983
|
+
repo_id: str,
|
|
4984
|
+
delete_patterns: List[str],
|
|
4985
|
+
*,
|
|
4986
|
+
token: Union[bool, str, None] = None,
|
|
4987
|
+
repo_type: Optional[str] = None,
|
|
4988
|
+
revision: Optional[str] = None,
|
|
4989
|
+
commit_message: Optional[str] = None,
|
|
4990
|
+
commit_description: Optional[str] = None,
|
|
4991
|
+
create_pr: Optional[bool] = None,
|
|
4992
|
+
parent_commit: Optional[str] = None,
|
|
4993
|
+
) -> CommitInfo:
|
|
4994
|
+
"""
|
|
4995
|
+
Delete files from a repository on the Hub.
|
|
4996
|
+
|
|
4997
|
+
If a folder path is provided, the entire folder is deleted as well as
|
|
4998
|
+
all files it contained.
|
|
4999
|
+
|
|
5000
|
+
Args:
|
|
5001
|
+
repo_id (`str`):
|
|
5002
|
+
The repository from which the folder will be deleted, for example:
|
|
5003
|
+
`"username/custom_transformers"`
|
|
5004
|
+
delete_patterns (`List[str]`):
|
|
5005
|
+
List of files or folders to delete. Each string can either be
|
|
5006
|
+
a file path, a folder path or a Unix shell-style wildcard.
|
|
5007
|
+
E.g. `["file.txt", "folder/", "data/*.parquet"]`
|
|
5008
|
+
token (Union[bool, str, None], optional):
|
|
5009
|
+
A valid user access token (string). Defaults to the locally saved
|
|
5010
|
+
token, which is the recommended method for authentication (see
|
|
5011
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
5012
|
+
To disable authentication, pass `False`.
|
|
5013
|
+
to the stored token.
|
|
5014
|
+
repo_type (`str`, *optional*):
|
|
5015
|
+
Type of the repo to delete files from. Can be `"model"`,
|
|
5016
|
+
`"dataset"` or `"space"`. Defaults to `"model"`.
|
|
5017
|
+
revision (`str`, *optional*):
|
|
5018
|
+
The git revision to commit from. Defaults to the head of the `"main"` branch.
|
|
5019
|
+
commit_message (`str`, *optional*):
|
|
5020
|
+
The summary (first line) of the generated commit. Defaults to
|
|
5021
|
+
`f"Delete files using huggingface_hub"`.
|
|
5022
|
+
commit_description (`str` *optional*)
|
|
5023
|
+
The description of the generated commit.
|
|
5024
|
+
create_pr (`boolean`, *optional*):
|
|
5025
|
+
Whether or not to create a Pull Request with that commit. Defaults to `False`.
|
|
5026
|
+
If `revision` is not set, PR is opened against the `"main"` branch. If
|
|
5027
|
+
`revision` is set and is a branch, PR is opened against this branch. If
|
|
5028
|
+
`revision` is set and is not a branch name (example: a commit oid), an
|
|
5029
|
+
`RevisionNotFoundError` is returned by the server.
|
|
5030
|
+
parent_commit (`str`, *optional*):
|
|
5031
|
+
The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported.
|
|
5032
|
+
If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`.
|
|
5033
|
+
If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`.
|
|
5034
|
+
Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be
|
|
5035
|
+
especially useful if the repo is updated / committed to concurrently.
|
|
5036
|
+
"""
|
|
5037
|
+
operations = self._prepare_folder_deletions(
|
|
5038
|
+
repo_id=repo_id, repo_type=repo_type, delete_patterns=delete_patterns, path_in_repo="", revision=revision
|
|
5039
|
+
)
|
|
5040
|
+
|
|
5041
|
+
if commit_message is None:
|
|
5042
|
+
commit_message = f"Delete files {' '.join(delete_patterns)} with huggingface_hub"
|
|
5043
|
+
|
|
5044
|
+
return self.create_commit(
|
|
5045
|
+
repo_id=repo_id,
|
|
5046
|
+
repo_type=repo_type,
|
|
5047
|
+
token=token,
|
|
5048
|
+
operations=operations,
|
|
5049
|
+
revision=revision,
|
|
5050
|
+
commit_message=commit_message,
|
|
5051
|
+
commit_description=commit_description,
|
|
5052
|
+
create_pr=create_pr,
|
|
5053
|
+
parent_commit=parent_commit,
|
|
5054
|
+
)
|
|
5055
|
+
|
|
4844
5056
|
@validate_hf_hub_args
|
|
4845
5057
|
def delete_folder(
|
|
4846
5058
|
self,
|
|
@@ -5003,7 +5215,7 @@ class HfApi:
|
|
|
5003
5215
|
```
|
|
5004
5216
|
|
|
5005
5217
|
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
|
5006
|
-
option, the `cache_dir` will not be used and a `.huggingface/` folder will be created at the root of `local_dir`
|
|
5218
|
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
|
5007
5219
|
to store some metadata related to the downloaded files. While this mechanism is not as robust as the main
|
|
5008
5220
|
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
|
5009
5221
|
|
|
@@ -5046,21 +5258,21 @@ class HfApi:
|
|
|
5046
5258
|
`str`: Local path of file or if networking is off, last version of file cached on disk.
|
|
5047
5259
|
|
|
5048
5260
|
Raises:
|
|
5049
|
-
|
|
5050
|
-
|
|
5051
|
-
|
|
5052
|
-
|
|
5053
|
-
|
|
5054
|
-
|
|
5055
|
-
|
|
5056
|
-
|
|
5057
|
-
|
|
5058
|
-
|
|
5059
|
-
|
|
5060
|
-
|
|
5061
|
-
|
|
5062
|
-
|
|
5063
|
-
|
|
5261
|
+
[`~utils.RepositoryNotFoundError`]
|
|
5262
|
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
|
5263
|
+
or because it is set to `private` and you do not have access.
|
|
5264
|
+
[`~utils.RevisionNotFoundError`]
|
|
5265
|
+
If the revision to download from cannot be found.
|
|
5266
|
+
[`~utils.EntryNotFoundError`]
|
|
5267
|
+
If the file to download cannot be found.
|
|
5268
|
+
[`~utils.LocalEntryNotFoundError`]
|
|
5269
|
+
If network is disabled or unavailable and file is not found in cache.
|
|
5270
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
5271
|
+
If `token=True` but the token cannot be found.
|
|
5272
|
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
|
|
5273
|
+
If ETag cannot be determined.
|
|
5274
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
5275
|
+
If some parameter value is invalid.
|
|
5064
5276
|
"""
|
|
5065
5277
|
from .file_download import hf_hub_download
|
|
5066
5278
|
|
|
@@ -5122,7 +5334,7 @@ class HfApi:
|
|
|
5122
5334
|
`allow_patterns` and `ignore_patterns`.
|
|
5123
5335
|
|
|
5124
5336
|
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
|
5125
|
-
option, the `cache_dir` will not be used and a `.huggingface/` folder will be created at the root of `local_dir`
|
|
5337
|
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
|
5126
5338
|
to store some metadata related to the downloaded files.While this mechanism is not as robust as the main
|
|
5127
5339
|
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
|
5128
5340
|
|
|
@@ -5176,12 +5388,17 @@ class HfApi:
|
|
|
5176
5388
|
`str`: folder path of the repo snapshot.
|
|
5177
5389
|
|
|
5178
5390
|
Raises:
|
|
5179
|
-
|
|
5180
|
-
|
|
5181
|
-
|
|
5182
|
-
|
|
5183
|
-
|
|
5184
|
-
|
|
5391
|
+
[`~utils.RepositoryNotFoundError`]
|
|
5392
|
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
|
5393
|
+
or because it is set to `private` and you do not have access.
|
|
5394
|
+
[`~utils.RevisionNotFoundError`]
|
|
5395
|
+
If the revision to download from cannot be found.
|
|
5396
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
5397
|
+
If `token=True` and the token cannot be found.
|
|
5398
|
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
|
|
5399
|
+
ETag cannot be determined.
|
|
5400
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
5401
|
+
if some parameter value is invalid.
|
|
5185
5402
|
"""
|
|
5186
5403
|
from ._snapshot_download import snapshot_download
|
|
5187
5404
|
|
|
@@ -5252,9 +5469,11 @@ class HfApi:
|
|
|
5252
5469
|
[`SafetensorsRepoMetadata`]: information related to safetensors repo.
|
|
5253
5470
|
|
|
5254
5471
|
Raises:
|
|
5255
|
-
|
|
5472
|
+
[`NotASafetensorsRepoError`]
|
|
5473
|
+
If the repo is not a safetensors repo i.e. doesn't have either a
|
|
5256
5474
|
`model.safetensors` or a `model.safetensors.index.json` file.
|
|
5257
|
-
|
|
5475
|
+
[`SafetensorsParsingError`]
|
|
5476
|
+
If a safetensors file header couldn't be parsed correctly.
|
|
5258
5477
|
|
|
5259
5478
|
Example:
|
|
5260
5479
|
```py
|
|
@@ -5371,9 +5590,11 @@ class HfApi:
|
|
|
5371
5590
|
[`SafetensorsFileMetadata`]: information related to a safetensors file.
|
|
5372
5591
|
|
|
5373
5592
|
Raises:
|
|
5374
|
-
|
|
5593
|
+
[`NotASafetensorsRepoError`]:
|
|
5594
|
+
If the repo is not a safetensors repo i.e. doesn't have either a
|
|
5375
5595
|
`model.safetensors` or a `model.safetensors.index.json` file.
|
|
5376
|
-
|
|
5596
|
+
[`SafetensorsParsingError`]:
|
|
5597
|
+
If a safetensors file header couldn't be parsed correctly.
|
|
5377
5598
|
"""
|
|
5378
5599
|
url = hf_hub_url(
|
|
5379
5600
|
repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, endpoint=self.endpoint
|
|
@@ -6914,11 +7135,11 @@ class HfApi:
|
|
|
6914
7135
|
attributes like `endpoint`, `repo_type` and `repo_id`.
|
|
6915
7136
|
|
|
6916
7137
|
Raises:
|
|
6917
|
-
|
|
6918
|
-
if the HuggingFace API returned an error
|
|
6919
|
-
- [`~utils.RepositoryNotFoundError`]
|
|
7138
|
+
[`~utils.RepositoryNotFoundError`]:
|
|
6920
7139
|
If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist,
|
|
6921
7140
|
or because it is set to `private` and you do not have access.
|
|
7141
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7142
|
+
If the HuggingFace API returned an error
|
|
6922
7143
|
|
|
6923
7144
|
Example:
|
|
6924
7145
|
```python
|
|
@@ -6988,7 +7209,7 @@ class HfApi:
|
|
|
6988
7209
|
|
|
6989
7210
|
Args:
|
|
6990
7211
|
repo_id (`str`):
|
|
6991
|
-
ID of the Space to update. Example: `"
|
|
7212
|
+
ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`.
|
|
6992
7213
|
storage (`str` or [`SpaceStorage`]):
|
|
6993
7214
|
Storage tier. Either 'small', 'medium', or 'large'.
|
|
6994
7215
|
token (Union[bool, str, None], optional):
|
|
@@ -7026,7 +7247,7 @@ class HfApi:
|
|
|
7026
7247
|
|
|
7027
7248
|
Args:
|
|
7028
7249
|
repo_id (`str`):
|
|
7029
|
-
ID of the Space to update. Example: `"
|
|
7250
|
+
ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`.
|
|
7030
7251
|
token (Union[bool, str, None], optional):
|
|
7031
7252
|
A valid user access token (string). Defaults to the locally saved
|
|
7032
7253
|
token, which is the recommended method for authentication (see
|
|
@@ -7141,9 +7362,9 @@ class HfApi:
|
|
|
7141
7362
|
accelerator (`str`):
|
|
7142
7363
|
The hardware accelerator to be used for inference (e.g. `"cpu"`).
|
|
7143
7364
|
instance_size (`str`):
|
|
7144
|
-
The size or type of the instance to be used for hosting the model (e.g. `"
|
|
7365
|
+
The size or type of the instance to be used for hosting the model (e.g. `"x4"`).
|
|
7145
7366
|
instance_type (`str`):
|
|
7146
|
-
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"
|
|
7367
|
+
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`).
|
|
7147
7368
|
region (`str`):
|
|
7148
7369
|
The cloud region in which the Inference Endpoint will be created (e.g. `"us-east-1"`).
|
|
7149
7370
|
vendor (`str`):
|
|
@@ -7178,7 +7399,7 @@ class HfApi:
|
|
|
7178
7399
|
```python
|
|
7179
7400
|
>>> from huggingface_hub import HfApi
|
|
7180
7401
|
>>> api = HfApi()
|
|
7181
|
-
>>> create_inference_endpoint(
|
|
7402
|
+
>>> endpoint = api.create_inference_endpoint(
|
|
7182
7403
|
... "my-endpoint-name",
|
|
7183
7404
|
... repository="gpt2",
|
|
7184
7405
|
... framework="pytorch",
|
|
@@ -7187,8 +7408,8 @@ class HfApi:
|
|
|
7187
7408
|
... vendor="aws",
|
|
7188
7409
|
... region="us-east-1",
|
|
7189
7410
|
... type="protected",
|
|
7190
|
-
... instance_size="
|
|
7191
|
-
... instance_type="
|
|
7411
|
+
... instance_size="x2",
|
|
7412
|
+
... instance_type="intel-icl",
|
|
7192
7413
|
... )
|
|
7193
7414
|
>>> endpoint
|
|
7194
7415
|
InferenceEndpoint(name='my-endpoint-name', status="pending",...)
|
|
@@ -7202,7 +7423,7 @@ class HfApi:
|
|
|
7202
7423
|
# Start an Inference Endpoint running Zephyr-7b-beta on TGI
|
|
7203
7424
|
>>> from huggingface_hub import HfApi
|
|
7204
7425
|
>>> api = HfApi()
|
|
7205
|
-
>>> create_inference_endpoint(
|
|
7426
|
+
>>> endpoint = api.create_inference_endpoint(
|
|
7206
7427
|
... "aws-zephyr-7b-beta-0486",
|
|
7207
7428
|
... repository="HuggingFaceH4/zephyr-7b-beta",
|
|
7208
7429
|
... framework="pytorch",
|
|
@@ -7211,8 +7432,8 @@ class HfApi:
|
|
|
7211
7432
|
... vendor="aws",
|
|
7212
7433
|
... region="us-east-1",
|
|
7213
7434
|
... type="protected",
|
|
7214
|
-
... instance_size="
|
|
7215
|
-
... instance_type="
|
|
7435
|
+
... instance_size="x1",
|
|
7436
|
+
... instance_type="nvidia-a10g",
|
|
7216
7437
|
... custom_image={
|
|
7217
7438
|
... "health_route": "/health",
|
|
7218
7439
|
... "env": {
|
|
@@ -7327,6 +7548,7 @@ class HfApi:
|
|
|
7327
7548
|
framework: Optional[str] = None,
|
|
7328
7549
|
revision: Optional[str] = None,
|
|
7329
7550
|
task: Optional[str] = None,
|
|
7551
|
+
custom_image: Optional[Dict] = None,
|
|
7330
7552
|
# Other
|
|
7331
7553
|
namespace: Optional[str] = None,
|
|
7332
7554
|
token: Union[bool, str, None] = None,
|
|
@@ -7345,9 +7567,9 @@ class HfApi:
|
|
|
7345
7567
|
accelerator (`str`, *optional*):
|
|
7346
7568
|
The hardware accelerator to be used for inference (e.g. `"cpu"`).
|
|
7347
7569
|
instance_size (`str`, *optional*):
|
|
7348
|
-
The size or type of the instance to be used for hosting the model (e.g. `"
|
|
7570
|
+
The size or type of the instance to be used for hosting the model (e.g. `"x4"`).
|
|
7349
7571
|
instance_type (`str`, *optional*):
|
|
7350
|
-
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"
|
|
7572
|
+
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`).
|
|
7351
7573
|
min_replica (`int`, *optional*):
|
|
7352
7574
|
The minimum number of replicas (instances) to keep running for the Inference Endpoint.
|
|
7353
7575
|
max_replica (`int`, *optional*):
|
|
@@ -7361,6 +7583,9 @@ class HfApi:
|
|
|
7361
7583
|
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
|
7362
7584
|
task (`str`, *optional*):
|
|
7363
7585
|
The task on which to deploy the model (e.g. `"text-classification"`).
|
|
7586
|
+
custom_image (`Dict`, *optional*):
|
|
7587
|
+
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
7588
|
+
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
7364
7589
|
|
|
7365
7590
|
namespace (`str`, *optional*):
|
|
7366
7591
|
The namespace where the Inference Endpoint will be updated. Defaults to the current user's namespace.
|
|
@@ -7386,13 +7611,14 @@ class HfApi:
|
|
|
7386
7611
|
"minReplica": min_replica,
|
|
7387
7612
|
},
|
|
7388
7613
|
}
|
|
7389
|
-
if any(value is not None for value in (repository, framework, revision, task)):
|
|
7614
|
+
if any(value is not None for value in (repository, framework, revision, task, custom_image)):
|
|
7615
|
+
image = {"custom": custom_image} if custom_image is not None else {"huggingface": {}}
|
|
7390
7616
|
payload["model"] = {
|
|
7391
7617
|
"framework": framework,
|
|
7392
7618
|
"repository": repository,
|
|
7393
7619
|
"revision": revision,
|
|
7394
7620
|
"task": task,
|
|
7395
|
-
"image":
|
|
7621
|
+
"image": image,
|
|
7396
7622
|
}
|
|
7397
7623
|
|
|
7398
7624
|
response = get_session().put(
|
|
@@ -7468,7 +7694,12 @@ class HfApi:
|
|
|
7468
7694
|
return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token)
|
|
7469
7695
|
|
|
7470
7696
|
def resume_inference_endpoint(
|
|
7471
|
-
self,
|
|
7697
|
+
self,
|
|
7698
|
+
name: str,
|
|
7699
|
+
*,
|
|
7700
|
+
namespace: Optional[str] = None,
|
|
7701
|
+
running_ok: bool = True,
|
|
7702
|
+
token: Union[bool, str, None] = None,
|
|
7472
7703
|
) -> InferenceEndpoint:
|
|
7473
7704
|
"""Resume an Inference Endpoint.
|
|
7474
7705
|
|
|
@@ -7479,6 +7710,9 @@ class HfApi:
|
|
|
7479
7710
|
The name of the Inference Endpoint to resume.
|
|
7480
7711
|
namespace (`str`, *optional*):
|
|
7481
7712
|
The namespace in which the Inference Endpoint is located. Defaults to the current user.
|
|
7713
|
+
running_ok (`bool`, *optional*):
|
|
7714
|
+
If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to
|
|
7715
|
+
`True`.
|
|
7482
7716
|
token (Union[bool, str, None], optional):
|
|
7483
7717
|
A valid user access token (string). Defaults to the locally saved
|
|
7484
7718
|
token, which is the recommended method for authentication (see
|
|
@@ -7494,7 +7728,14 @@ class HfApi:
|
|
|
7494
7728
|
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume",
|
|
7495
7729
|
headers=self._build_hf_headers(token=token),
|
|
7496
7730
|
)
|
|
7497
|
-
|
|
7731
|
+
try:
|
|
7732
|
+
hf_raise_for_status(response)
|
|
7733
|
+
except HfHubHTTPError as error:
|
|
7734
|
+
# If already running (and it's ok), then fetch current status and return
|
|
7735
|
+
if running_ok and error.response.status_code == 400 and "already running" in error.response.text:
|
|
7736
|
+
return self.get_inference_endpoint(name, namespace=namespace, token=token)
|
|
7737
|
+
# Otherwise, raise the error
|
|
7738
|
+
raise
|
|
7498
7739
|
|
|
7499
7740
|
return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token)
|
|
7500
7741
|
|
|
@@ -7855,12 +8096,12 @@ class HfApi:
|
|
|
7855
8096
|
Returns: [`Collection`]
|
|
7856
8097
|
|
|
7857
8098
|
Raises:
|
|
7858
|
-
`HTTPError
|
|
8099
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7859
8100
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
7860
8101
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
7861
|
-
`HTTPError
|
|
8102
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7862
8103
|
HTTP 404 if the item you try to add to the collection does not exist on the Hub.
|
|
7863
|
-
`HTTPError
|
|
8104
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7864
8105
|
HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False)
|
|
7865
8106
|
|
|
7866
8107
|
Example:
|
|
@@ -8044,9 +8285,9 @@ class HfApi:
|
|
|
8044
8285
|
be populated with user's answers.
|
|
8045
8286
|
|
|
8046
8287
|
Raises:
|
|
8047
|
-
`HTTPError
|
|
8288
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8048
8289
|
HTTP 400 if the repo is not gated.
|
|
8049
|
-
`HTTPError
|
|
8290
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8050
8291
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8051
8292
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8052
8293
|
|
|
@@ -8110,9 +8351,9 @@ class HfApi:
|
|
|
8110
8351
|
be populated with user's answers.
|
|
8111
8352
|
|
|
8112
8353
|
Raises:
|
|
8113
|
-
`HTTPError
|
|
8354
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8114
8355
|
HTTP 400 if the repo is not gated.
|
|
8115
|
-
`HTTPError
|
|
8356
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8116
8357
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8117
8358
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8118
8359
|
|
|
@@ -8172,9 +8413,9 @@ class HfApi:
|
|
|
8172
8413
|
be populated with user's answers.
|
|
8173
8414
|
|
|
8174
8415
|
Raises:
|
|
8175
|
-
`HTTPError
|
|
8416
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8176
8417
|
HTTP 400 if the repo is not gated.
|
|
8177
|
-
`HTTPError
|
|
8418
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8178
8419
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8179
8420
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8180
8421
|
|
|
@@ -8222,7 +8463,7 @@ class HfApi:
|
|
|
8222
8463
|
AccessRequest(
|
|
8223
8464
|
username=request["user"]["user"],
|
|
8224
8465
|
fullname=request["user"]["fullname"],
|
|
8225
|
-
email=request["user"]
|
|
8466
|
+
email=request["user"].get("email"),
|
|
8226
8467
|
status=request["status"],
|
|
8227
8468
|
timestamp=parse_datetime(request["timestamp"]),
|
|
8228
8469
|
fields=request.get("fields"), # only if custom fields in form
|
|
@@ -8256,16 +8497,16 @@ class HfApi:
|
|
|
8256
8497
|
To disable authentication, pass `False`.
|
|
8257
8498
|
|
|
8258
8499
|
Raises:
|
|
8259
|
-
`HTTPError
|
|
8500
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8260
8501
|
HTTP 400 if the repo is not gated.
|
|
8261
|
-
`HTTPError
|
|
8502
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8262
8503
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8263
8504
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8264
|
-
`HTTPError
|
|
8505
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8265
8506
|
HTTP 404 if the user does not exist on the Hub.
|
|
8266
|
-
`HTTPError
|
|
8507
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8267
8508
|
HTTP 404 if the user access request cannot be found.
|
|
8268
|
-
`HTTPError
|
|
8509
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8269
8510
|
HTTP 404 if the user access request is already in the pending list.
|
|
8270
8511
|
"""
|
|
8271
8512
|
self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token)
|
|
@@ -8298,16 +8539,16 @@ class HfApi:
|
|
|
8298
8539
|
To disable authentication, pass `False`.
|
|
8299
8540
|
|
|
8300
8541
|
Raises:
|
|
8301
|
-
`HTTPError
|
|
8542
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8302
8543
|
HTTP 400 if the repo is not gated.
|
|
8303
|
-
`HTTPError
|
|
8544
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8304
8545
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8305
8546
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8306
|
-
`HTTPError
|
|
8547
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8307
8548
|
HTTP 404 if the user does not exist on the Hub.
|
|
8308
|
-
`HTTPError
|
|
8549
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8309
8550
|
HTTP 404 if the user access request cannot be found.
|
|
8310
|
-
`HTTPError
|
|
8551
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8311
8552
|
HTTP 404 if the user access request is already in the accepted list.
|
|
8312
8553
|
"""
|
|
8313
8554
|
self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token)
|
|
@@ -8340,16 +8581,16 @@ class HfApi:
|
|
|
8340
8581
|
To disable authentication, pass `False`.
|
|
8341
8582
|
|
|
8342
8583
|
Raises:
|
|
8343
|
-
`HTTPError
|
|
8584
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8344
8585
|
HTTP 400 if the repo is not gated.
|
|
8345
|
-
`HTTPError
|
|
8586
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8346
8587
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8347
8588
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8348
|
-
`HTTPError
|
|
8589
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8349
8590
|
HTTP 404 if the user does not exist on the Hub.
|
|
8350
|
-
`HTTPError
|
|
8591
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8351
8592
|
HTTP 404 if the user access request cannot be found.
|
|
8352
|
-
`HTTPError
|
|
8593
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8353
8594
|
HTTP 404 if the user access request is already in the rejected list.
|
|
8354
8595
|
"""
|
|
8355
8596
|
self._handle_access_request(repo_id, user, "rejected", repo_type=repo_type, token=token)
|
|
@@ -8403,14 +8644,14 @@ class HfApi:
|
|
|
8403
8644
|
To disable authentication, pass `False`.
|
|
8404
8645
|
|
|
8405
8646
|
Raises:
|
|
8406
|
-
`HTTPError
|
|
8647
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8407
8648
|
HTTP 400 if the repo is not gated.
|
|
8408
|
-
`HTTPError
|
|
8649
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8409
8650
|
HTTP 400 if the user already has access to the repo.
|
|
8410
|
-
`HTTPError
|
|
8651
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8411
8652
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8412
8653
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8413
|
-
`HTTPError
|
|
8654
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8414
8655
|
HTTP 404 if the user does not exist on the Hub.
|
|
8415
8656
|
"""
|
|
8416
8657
|
if repo_type not in REPO_TYPES:
|
|
@@ -8426,6 +8667,392 @@ class HfApi:
|
|
|
8426
8667
|
hf_raise_for_status(response)
|
|
8427
8668
|
return response.json()
|
|
8428
8669
|
|
|
8670
|
+
###################
|
|
8671
|
+
# Manage webhooks #
|
|
8672
|
+
###################
|
|
8673
|
+
|
|
8674
|
+
@validate_hf_hub_args
|
|
8675
|
+
def get_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8676
|
+
"""Get a webhook by its id.
|
|
8677
|
+
|
|
8678
|
+
Args:
|
|
8679
|
+
webhook_id (`str`):
|
|
8680
|
+
The unique identifier of the webhook to get.
|
|
8681
|
+
token (Union[bool, str, None], optional):
|
|
8682
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8683
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8684
|
+
To disable authentication, pass `False`.
|
|
8685
|
+
|
|
8686
|
+
Returns:
|
|
8687
|
+
[`WebhookInfo`]:
|
|
8688
|
+
Info about the webhook.
|
|
8689
|
+
|
|
8690
|
+
Example:
|
|
8691
|
+
```python
|
|
8692
|
+
>>> from huggingface_hub import get_webhook
|
|
8693
|
+
>>> webhook = get_webhook("654bbbc16f2ec14d77f109cc")
|
|
8694
|
+
>>> print(webhook)
|
|
8695
|
+
WebhookInfo(
|
|
8696
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8697
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8698
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8699
|
+
secret="my-secret",
|
|
8700
|
+
domains=["repo", "discussion"],
|
|
8701
|
+
disabled=False,
|
|
8702
|
+
)
|
|
8703
|
+
```
|
|
8704
|
+
"""
|
|
8705
|
+
response = get_session().get(
|
|
8706
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8707
|
+
headers=self._build_hf_headers(token=token),
|
|
8708
|
+
)
|
|
8709
|
+
hf_raise_for_status(response)
|
|
8710
|
+
webhook_data = response.json()["webhook"]
|
|
8711
|
+
|
|
8712
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8713
|
+
|
|
8714
|
+
webhook = WebhookInfo(
|
|
8715
|
+
id=webhook_data["id"],
|
|
8716
|
+
url=webhook_data["url"],
|
|
8717
|
+
watched=watched_items,
|
|
8718
|
+
domains=webhook_data["domains"],
|
|
8719
|
+
secret=webhook_data.get("secret"),
|
|
8720
|
+
disabled=webhook_data["disabled"],
|
|
8721
|
+
)
|
|
8722
|
+
|
|
8723
|
+
return webhook
|
|
8724
|
+
|
|
8725
|
+
@validate_hf_hub_args
|
|
8726
|
+
def list_webhooks(self, *, token: Union[bool, str, None] = None) -> List[WebhookInfo]:
|
|
8727
|
+
"""List all configured webhooks.
|
|
8728
|
+
|
|
8729
|
+
Args:
|
|
8730
|
+
token (Union[bool, str, None], optional):
|
|
8731
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8732
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8733
|
+
To disable authentication, pass `False`.
|
|
8734
|
+
|
|
8735
|
+
Returns:
|
|
8736
|
+
`List[WebhookInfo]`:
|
|
8737
|
+
List of webhook info objects.
|
|
8738
|
+
|
|
8739
|
+
Example:
|
|
8740
|
+
```python
|
|
8741
|
+
>>> from huggingface_hub import list_webhooks
|
|
8742
|
+
>>> webhooks = list_webhooks()
|
|
8743
|
+
>>> len(webhooks)
|
|
8744
|
+
2
|
|
8745
|
+
>>> webhooks[0]
|
|
8746
|
+
WebhookInfo(
|
|
8747
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8748
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8749
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8750
|
+
secret="my-secret",
|
|
8751
|
+
domains=["repo", "discussion"],
|
|
8752
|
+
disabled=False,
|
|
8753
|
+
)
|
|
8754
|
+
```
|
|
8755
|
+
"""
|
|
8756
|
+
response = get_session().get(
|
|
8757
|
+
f"{ENDPOINT}/api/settings/webhooks",
|
|
8758
|
+
headers=self._build_hf_headers(token=token),
|
|
8759
|
+
)
|
|
8760
|
+
hf_raise_for_status(response)
|
|
8761
|
+
webhooks_data = response.json()
|
|
8762
|
+
|
|
8763
|
+
return [
|
|
8764
|
+
WebhookInfo(
|
|
8765
|
+
id=webhook["id"],
|
|
8766
|
+
url=webhook["url"],
|
|
8767
|
+
watched=[WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook["watched"]],
|
|
8768
|
+
domains=webhook["domains"],
|
|
8769
|
+
secret=webhook.get("secret"),
|
|
8770
|
+
disabled=webhook["disabled"],
|
|
8771
|
+
)
|
|
8772
|
+
for webhook in webhooks_data
|
|
8773
|
+
]
|
|
8774
|
+
|
|
8775
|
+
@validate_hf_hub_args
|
|
8776
|
+
def create_webhook(
|
|
8777
|
+
self,
|
|
8778
|
+
*,
|
|
8779
|
+
url: str,
|
|
8780
|
+
watched: List[Union[Dict, WebhookWatchedItem]],
|
|
8781
|
+
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
8782
|
+
secret: Optional[str] = None,
|
|
8783
|
+
token: Union[bool, str, None] = None,
|
|
8784
|
+
) -> WebhookInfo:
|
|
8785
|
+
"""Create a new webhook.
|
|
8786
|
+
|
|
8787
|
+
Args:
|
|
8788
|
+
url (`str`):
|
|
8789
|
+
URL to send the payload to.
|
|
8790
|
+
watched (`List[WebhookWatchedItem]`):
|
|
8791
|
+
List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces.
|
|
8792
|
+
Watched items can also be provided as plain dictionaries.
|
|
8793
|
+
domains (`List[Literal["repo", "discussion"]]`, optional):
|
|
8794
|
+
List of domains to watch. It can be "repo", "discussion" or both.
|
|
8795
|
+
secret (`str`, optional):
|
|
8796
|
+
A secret to sign the payload with.
|
|
8797
|
+
token (Union[bool, str, None], optional):
|
|
8798
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8799
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8800
|
+
To disable authentication, pass `False`.
|
|
8801
|
+
|
|
8802
|
+
Returns:
|
|
8803
|
+
[`WebhookInfo`]:
|
|
8804
|
+
Info about the newly created webhook.
|
|
8805
|
+
|
|
8806
|
+
Example:
|
|
8807
|
+
```python
|
|
8808
|
+
>>> from huggingface_hub import create_webhook
|
|
8809
|
+
>>> payload = create_webhook(
|
|
8810
|
+
... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}],
|
|
8811
|
+
... url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8812
|
+
... domains=["repo", "discussion"],
|
|
8813
|
+
... secret="my-secret",
|
|
8814
|
+
... )
|
|
8815
|
+
>>> print(payload)
|
|
8816
|
+
WebhookInfo(
|
|
8817
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8818
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8819
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8820
|
+
domains=["repo", "discussion"],
|
|
8821
|
+
secret="my-secret",
|
|
8822
|
+
disabled=False,
|
|
8823
|
+
)
|
|
8824
|
+
```
|
|
8825
|
+
"""
|
|
8826
|
+
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8827
|
+
|
|
8828
|
+
response = get_session().post(
|
|
8829
|
+
f"{ENDPOINT}/api/settings/webhooks",
|
|
8830
|
+
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8831
|
+
headers=self._build_hf_headers(token=token),
|
|
8832
|
+
)
|
|
8833
|
+
hf_raise_for_status(response)
|
|
8834
|
+
webhook_data = response.json()["webhook"]
|
|
8835
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8836
|
+
|
|
8837
|
+
webhook = WebhookInfo(
|
|
8838
|
+
id=webhook_data["id"],
|
|
8839
|
+
url=webhook_data["url"],
|
|
8840
|
+
watched=watched_items,
|
|
8841
|
+
domains=webhook_data["domains"],
|
|
8842
|
+
secret=webhook_data.get("secret"),
|
|
8843
|
+
disabled=webhook_data["disabled"],
|
|
8844
|
+
)
|
|
8845
|
+
|
|
8846
|
+
return webhook
|
|
8847
|
+
|
|
8848
|
+
@validate_hf_hub_args
|
|
8849
|
+
def update_webhook(
|
|
8850
|
+
self,
|
|
8851
|
+
webhook_id: str,
|
|
8852
|
+
*,
|
|
8853
|
+
url: Optional[str] = None,
|
|
8854
|
+
watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None,
|
|
8855
|
+
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
8856
|
+
secret: Optional[str] = None,
|
|
8857
|
+
token: Union[bool, str, None] = None,
|
|
8858
|
+
) -> WebhookInfo:
|
|
8859
|
+
"""Update an existing webhook.
|
|
8860
|
+
|
|
8861
|
+
Args:
|
|
8862
|
+
webhook_id (`str`):
|
|
8863
|
+
The unique identifier of the webhook to be updated.
|
|
8864
|
+
url (`str`, optional):
|
|
8865
|
+
The URL to which the payload will be sent.
|
|
8866
|
+
watched (`List[WebhookWatchedItem]`, optional):
|
|
8867
|
+
List of items to watch. It can be users, orgs, models, datasets, or spaces.
|
|
8868
|
+
Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries.
|
|
8869
|
+
domains (`List[Literal["repo", "discussion"]]`, optional):
|
|
8870
|
+
The domains to watch. This can include "repo", "discussion", or both.
|
|
8871
|
+
secret (`str`, optional):
|
|
8872
|
+
A secret to sign the payload with, providing an additional layer of security.
|
|
8873
|
+
token (Union[bool, str, None], optional):
|
|
8874
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8875
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8876
|
+
To disable authentication, pass `False`.
|
|
8877
|
+
|
|
8878
|
+
Returns:
|
|
8879
|
+
[`WebhookInfo`]:
|
|
8880
|
+
Info about the updated webhook.
|
|
8881
|
+
|
|
8882
|
+
Example:
|
|
8883
|
+
```python
|
|
8884
|
+
>>> from huggingface_hub import update_webhook
|
|
8885
|
+
>>> updated_payload = update_webhook(
|
|
8886
|
+
... webhook_id="654bbbc16f2ec14d77f109cc",
|
|
8887
|
+
... url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8888
|
+
... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}],
|
|
8889
|
+
... domains=["repo"],
|
|
8890
|
+
... secret="my-secret",
|
|
8891
|
+
... )
|
|
8892
|
+
>>> print(updated_payload)
|
|
8893
|
+
WebhookInfo(
|
|
8894
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8895
|
+
url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8896
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8897
|
+
domains=["repo"],
|
|
8898
|
+
secret="my-secret",
|
|
8899
|
+
disabled=False,
|
|
8900
|
+
```
|
|
8901
|
+
"""
|
|
8902
|
+
if watched is None:
|
|
8903
|
+
watched = []
|
|
8904
|
+
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8905
|
+
|
|
8906
|
+
response = get_session().post(
|
|
8907
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8908
|
+
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8909
|
+
headers=self._build_hf_headers(token=token),
|
|
8910
|
+
)
|
|
8911
|
+
hf_raise_for_status(response)
|
|
8912
|
+
webhook_data = response.json()["webhook"]
|
|
8913
|
+
|
|
8914
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8915
|
+
|
|
8916
|
+
webhook = WebhookInfo(
|
|
8917
|
+
id=webhook_data["id"],
|
|
8918
|
+
url=webhook_data["url"],
|
|
8919
|
+
watched=watched_items,
|
|
8920
|
+
domains=webhook_data["domains"],
|
|
8921
|
+
secret=webhook_data.get("secret"),
|
|
8922
|
+
disabled=webhook_data["disabled"],
|
|
8923
|
+
)
|
|
8924
|
+
|
|
8925
|
+
return webhook
|
|
8926
|
+
|
|
8927
|
+
@validate_hf_hub_args
|
|
8928
|
+
def enable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8929
|
+
"""Enable a webhook (makes it "active").
|
|
8930
|
+
|
|
8931
|
+
Args:
|
|
8932
|
+
webhook_id (`str`):
|
|
8933
|
+
The unique identifier of the webhook to enable.
|
|
8934
|
+
token (Union[bool, str, None], optional):
|
|
8935
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8936
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8937
|
+
To disable authentication, pass `False`.
|
|
8938
|
+
|
|
8939
|
+
Returns:
|
|
8940
|
+
[`WebhookInfo`]:
|
|
8941
|
+
Info about the enabled webhook.
|
|
8942
|
+
|
|
8943
|
+
Example:
|
|
8944
|
+
```python
|
|
8945
|
+
>>> from huggingface_hub import enable_webhook
|
|
8946
|
+
>>> enabled_webhook = enable_webhook("654bbbc16f2ec14d77f109cc")
|
|
8947
|
+
>>> enabled_webhook
|
|
8948
|
+
WebhookInfo(
|
|
8949
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8950
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8951
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8952
|
+
domains=["repo", "discussion"],
|
|
8953
|
+
secret="my-secret",
|
|
8954
|
+
disabled=False,
|
|
8955
|
+
)
|
|
8956
|
+
```
|
|
8957
|
+
"""
|
|
8958
|
+
response = get_session().post(
|
|
8959
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/enable",
|
|
8960
|
+
headers=self._build_hf_headers(token=token),
|
|
8961
|
+
)
|
|
8962
|
+
hf_raise_for_status(response)
|
|
8963
|
+
webhook_data = response.json()["webhook"]
|
|
8964
|
+
|
|
8965
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8966
|
+
|
|
8967
|
+
webhook = WebhookInfo(
|
|
8968
|
+
id=webhook_data["id"],
|
|
8969
|
+
url=webhook_data["url"],
|
|
8970
|
+
watched=watched_items,
|
|
8971
|
+
domains=webhook_data["domains"],
|
|
8972
|
+
secret=webhook_data.get("secret"),
|
|
8973
|
+
disabled=webhook_data["disabled"],
|
|
8974
|
+
)
|
|
8975
|
+
|
|
8976
|
+
return webhook
|
|
8977
|
+
|
|
8978
|
+
@validate_hf_hub_args
|
|
8979
|
+
def disable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8980
|
+
"""Disable a webhook (makes it "disabled").
|
|
8981
|
+
|
|
8982
|
+
Args:
|
|
8983
|
+
webhook_id (`str`):
|
|
8984
|
+
The unique identifier of the webhook to disable.
|
|
8985
|
+
token (Union[bool, str, None], optional):
|
|
8986
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8987
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8988
|
+
To disable authentication, pass `False`.
|
|
8989
|
+
|
|
8990
|
+
Returns:
|
|
8991
|
+
[`WebhookInfo`]:
|
|
8992
|
+
Info about the disabled webhook.
|
|
8993
|
+
|
|
8994
|
+
Example:
|
|
8995
|
+
```python
|
|
8996
|
+
>>> from huggingface_hub import disable_webhook
|
|
8997
|
+
>>> disabled_webhook = disable_webhook("654bbbc16f2ec14d77f109cc")
|
|
8998
|
+
>>> disabled_webhook
|
|
8999
|
+
WebhookInfo(
|
|
9000
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
9001
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
9002
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
9003
|
+
domains=["repo", "discussion"],
|
|
9004
|
+
secret="my-secret",
|
|
9005
|
+
disabled=True,
|
|
9006
|
+
)
|
|
9007
|
+
```
|
|
9008
|
+
"""
|
|
9009
|
+
response = get_session().post(
|
|
9010
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/disable",
|
|
9011
|
+
headers=self._build_hf_headers(token=token),
|
|
9012
|
+
)
|
|
9013
|
+
hf_raise_for_status(response)
|
|
9014
|
+
webhook_data = response.json()["webhook"]
|
|
9015
|
+
|
|
9016
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
9017
|
+
|
|
9018
|
+
webhook = WebhookInfo(
|
|
9019
|
+
id=webhook_data["id"],
|
|
9020
|
+
url=webhook_data["url"],
|
|
9021
|
+
watched=watched_items,
|
|
9022
|
+
domains=webhook_data["domains"],
|
|
9023
|
+
secret=webhook_data.get("secret"),
|
|
9024
|
+
disabled=webhook_data["disabled"],
|
|
9025
|
+
)
|
|
9026
|
+
|
|
9027
|
+
return webhook
|
|
9028
|
+
|
|
9029
|
+
@validate_hf_hub_args
|
|
9030
|
+
def delete_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> None:
|
|
9031
|
+
"""Delete a webhook.
|
|
9032
|
+
|
|
9033
|
+
Args:
|
|
9034
|
+
webhook_id (`str`):
|
|
9035
|
+
The unique identifier of the webhook to delete.
|
|
9036
|
+
token (Union[bool, str, None], optional):
|
|
9037
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
9038
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9039
|
+
To disable authentication, pass `False`.
|
|
9040
|
+
|
|
9041
|
+
Returns:
|
|
9042
|
+
`None`
|
|
9043
|
+
|
|
9044
|
+
Example:
|
|
9045
|
+
```python
|
|
9046
|
+
>>> from huggingface_hub import delete_webhook
|
|
9047
|
+
>>> delete_webhook("654bbbc16f2ec14d77f109cc")
|
|
9048
|
+
```
|
|
9049
|
+
"""
|
|
9050
|
+
response = get_session().delete(
|
|
9051
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9052
|
+
headers=self._build_hf_headers(token=token),
|
|
9053
|
+
)
|
|
9054
|
+
hf_raise_for_status(response)
|
|
9055
|
+
|
|
8429
9056
|
#############
|
|
8430
9057
|
# Internals #
|
|
8431
9058
|
#############
|
|
@@ -8454,7 +9081,7 @@ class HfApi:
|
|
|
8454
9081
|
headers=self.headers,
|
|
8455
9082
|
)
|
|
8456
9083
|
|
|
8457
|
-
def
|
|
9084
|
+
def _prepare_folder_deletions(
|
|
8458
9085
|
self,
|
|
8459
9086
|
repo_id: str,
|
|
8460
9087
|
repo_type: Optional[str],
|
|
@@ -8506,7 +9133,7 @@ class HfApi:
|
|
|
8506
9133
|
`User`: A [`User`] object with the user's overview.
|
|
8507
9134
|
|
|
8508
9135
|
Raises:
|
|
8509
|
-
`HTTPError
|
|
9136
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8510
9137
|
HTTP 404 If the user does not exist on the Hub.
|
|
8511
9138
|
"""
|
|
8512
9139
|
r = get_session().get(f"{ENDPOINT}/api/users/{username}/overview")
|
|
@@ -8526,7 +9153,7 @@ class HfApi:
|
|
|
8526
9153
|
`Iterable[User]`: A list of [`User`] objects with the members of the organization.
|
|
8527
9154
|
|
|
8528
9155
|
Raises:
|
|
8529
|
-
`HTTPError
|
|
9156
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8530
9157
|
HTTP 404 If the organization does not exist on the Hub.
|
|
8531
9158
|
|
|
8532
9159
|
"""
|
|
@@ -8550,7 +9177,7 @@ class HfApi:
|
|
|
8550
9177
|
`Iterable[User]`: A list of [`User`] objects with the followers of the user.
|
|
8551
9178
|
|
|
8552
9179
|
Raises:
|
|
8553
|
-
`HTTPError
|
|
9180
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8554
9181
|
HTTP 404 If the user does not exist on the Hub.
|
|
8555
9182
|
|
|
8556
9183
|
"""
|
|
@@ -8574,7 +9201,7 @@ class HfApi:
|
|
|
8574
9201
|
`Iterable[User]`: A list of [`User`] objects with the users followed by the user.
|
|
8575
9202
|
|
|
8576
9203
|
Raises:
|
|
8577
|
-
`HTTPError
|
|
9204
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8578
9205
|
HTTP 404 If the user does not exist on the Hub.
|
|
8579
9206
|
|
|
8580
9207
|
"""
|
|
@@ -8677,6 +9304,7 @@ upload_file = api.upload_file
|
|
|
8677
9304
|
upload_folder = api.upload_folder
|
|
8678
9305
|
delete_file = api.delete_file
|
|
8679
9306
|
delete_folder = api.delete_folder
|
|
9307
|
+
delete_files = api.delete_files
|
|
8680
9308
|
create_commits_on_pr = api.create_commits_on_pr
|
|
8681
9309
|
preupload_lfs_files = api.preupload_lfs_files
|
|
8682
9310
|
create_branch = api.create_branch
|
|
@@ -8754,6 +9382,16 @@ accept_access_request = api.accept_access_request
|
|
|
8754
9382
|
reject_access_request = api.reject_access_request
|
|
8755
9383
|
grant_access = api.grant_access
|
|
8756
9384
|
|
|
9385
|
+
# Webhooks API
|
|
9386
|
+
create_webhook = api.create_webhook
|
|
9387
|
+
disable_webhook = api.disable_webhook
|
|
9388
|
+
delete_webhook = api.delete_webhook
|
|
9389
|
+
enable_webhook = api.enable_webhook
|
|
9390
|
+
get_webhook = api.get_webhook
|
|
9391
|
+
list_webhooks = api.list_webhooks
|
|
9392
|
+
update_webhook = api.update_webhook
|
|
9393
|
+
|
|
9394
|
+
|
|
8757
9395
|
# User API
|
|
8758
9396
|
get_user_overview = api.get_user_overview
|
|
8759
9397
|
list_organization_members = api.list_organization_members
|