huggingface-hub 0.23.4__py3-none-any.whl → 0.24.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +47 -15
- huggingface_hub/_commit_api.py +38 -8
- huggingface_hub/_inference_endpoints.py +11 -4
- huggingface_hub/_local_folder.py +22 -13
- huggingface_hub/_snapshot_download.py +12 -7
- huggingface_hub/_webhooks_server.py +3 -1
- huggingface_hub/commands/huggingface_cli.py +4 -3
- huggingface_hub/commands/repo_files.py +128 -0
- huggingface_hub/constants.py +12 -0
- huggingface_hub/file_download.py +127 -91
- huggingface_hub/hf_api.py +976 -341
- huggingface_hub/hf_file_system.py +30 -3
- huggingface_hub/hub_mixin.py +17 -6
- huggingface_hub/inference/_client.py +379 -43
- huggingface_hub/inference/_common.py +0 -2
- huggingface_hub/inference/_generated/_async_client.py +396 -49
- huggingface_hub/inference/_generated/types/__init__.py +4 -1
- huggingface_hub/inference/_generated/types/chat_completion.py +41 -21
- huggingface_hub/inference/_generated/types/feature_extraction.py +23 -5
- huggingface_hub/inference/_generated/types/text_generation.py +29 -0
- huggingface_hub/lfs.py +11 -6
- huggingface_hub/repocard_data.py +3 -3
- huggingface_hub/repository.py +6 -6
- huggingface_hub/serialization/__init__.py +8 -3
- huggingface_hub/serialization/_base.py +13 -16
- huggingface_hub/serialization/_tensorflow.py +4 -3
- huggingface_hub/serialization/_torch.py +399 -22
- huggingface_hub/utils/__init__.py +0 -1
- huggingface_hub/utils/_errors.py +1 -1
- huggingface_hub/utils/_fixes.py +14 -3
- huggingface_hub/utils/_paths.py +17 -6
- huggingface_hub/utils/_subprocess.py +0 -1
- huggingface_hub/utils/_telemetry.py +9 -1
- huggingface_hub/utils/endpoint_helpers.py +2 -186
- huggingface_hub/utils/sha.py +36 -1
- huggingface_hub/utils/tqdm.py +0 -1
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/METADATA +12 -9
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/RECORD +42 -42
- huggingface_hub/serialization/_numpy.py +0 -68
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py
CHANGED
|
@@ -98,17 +98,18 @@ from .constants import (
|
|
|
98
98
|
SAFETENSORS_MAX_HEADER_LENGTH,
|
|
99
99
|
SAFETENSORS_SINGLE_FILE,
|
|
100
100
|
SPACES_SDK_TYPES,
|
|
101
|
+
WEBHOOK_DOMAIN_T,
|
|
101
102
|
DiscussionStatusFilter,
|
|
102
103
|
DiscussionTypeFilter,
|
|
103
104
|
)
|
|
104
105
|
from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url
|
|
105
106
|
from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData
|
|
106
|
-
from .utils import (
|
|
107
|
+
from .utils import (
|
|
107
108
|
DEFAULT_IGNORE_PATTERNS,
|
|
108
109
|
BadRequestError,
|
|
109
110
|
EntryNotFoundError,
|
|
110
111
|
GatedRepoError,
|
|
111
|
-
HfFolder,
|
|
112
|
+
HfFolder, # noqa: F401 # kept for backward compatibility
|
|
112
113
|
HfHubHTTPError,
|
|
113
114
|
LocalTokenNotFoundError,
|
|
114
115
|
NotASafetensorsRepoError,
|
|
@@ -130,18 +131,78 @@ from .utils import ( # noqa: F401 # imported for backward compatibility
|
|
|
130
131
|
validate_hf_hub_args,
|
|
131
132
|
)
|
|
132
133
|
from .utils import tqdm as hf_tqdm
|
|
133
|
-
from .utils._deprecation import _deprecate_arguments
|
|
134
134
|
from .utils._typing import CallableT
|
|
135
135
|
from .utils.endpoint_helpers import (
|
|
136
|
-
|
|
137
|
-
ModelFilter,
|
|
138
|
-
_is_emission_within_treshold,
|
|
136
|
+
_is_emission_within_threshold,
|
|
139
137
|
)
|
|
140
138
|
|
|
141
139
|
|
|
142
140
|
R = TypeVar("R") # Return type
|
|
143
141
|
CollectionItemType_T = Literal["model", "dataset", "space", "paper"]
|
|
144
142
|
|
|
143
|
+
ExpandModelProperty_T = Literal[
|
|
144
|
+
"author",
|
|
145
|
+
"cardData",
|
|
146
|
+
"config",
|
|
147
|
+
"createdAt",
|
|
148
|
+
"disabled",
|
|
149
|
+
"downloads",
|
|
150
|
+
"downloadsAllTime",
|
|
151
|
+
"gated",
|
|
152
|
+
"inference",
|
|
153
|
+
"lastModified",
|
|
154
|
+
"library_name",
|
|
155
|
+
"likes",
|
|
156
|
+
"mask_token",
|
|
157
|
+
"model-index",
|
|
158
|
+
"pipeline_tag",
|
|
159
|
+
"private",
|
|
160
|
+
"safetensors",
|
|
161
|
+
"sha",
|
|
162
|
+
"siblings",
|
|
163
|
+
"spaces",
|
|
164
|
+
"tags",
|
|
165
|
+
"transformersInfo",
|
|
166
|
+
"widgetData",
|
|
167
|
+
]
|
|
168
|
+
|
|
169
|
+
ExpandDatasetProperty_T = Literal[
|
|
170
|
+
"author",
|
|
171
|
+
"cardData",
|
|
172
|
+
"citation",
|
|
173
|
+
"createdAt",
|
|
174
|
+
"disabled",
|
|
175
|
+
"description",
|
|
176
|
+
"downloads",
|
|
177
|
+
"downloadsAllTime",
|
|
178
|
+
"gated",
|
|
179
|
+
"lastModified",
|
|
180
|
+
"likes",
|
|
181
|
+
"paperswithcode_id",
|
|
182
|
+
"private",
|
|
183
|
+
"siblings",
|
|
184
|
+
"sha",
|
|
185
|
+
"tags",
|
|
186
|
+
]
|
|
187
|
+
|
|
188
|
+
ExpandSpaceProperty_T = Literal[
|
|
189
|
+
"author",
|
|
190
|
+
"cardData",
|
|
191
|
+
"datasets",
|
|
192
|
+
"disabled",
|
|
193
|
+
"lastModified",
|
|
194
|
+
"createdAt",
|
|
195
|
+
"likes",
|
|
196
|
+
"private",
|
|
197
|
+
"runtime",
|
|
198
|
+
"sdk",
|
|
199
|
+
"siblings",
|
|
200
|
+
"sha",
|
|
201
|
+
"subdomain",
|
|
202
|
+
"tags",
|
|
203
|
+
"models",
|
|
204
|
+
]
|
|
205
|
+
|
|
145
206
|
USERNAME_PLACEHOLDER = "hf_user"
|
|
146
207
|
_REGEX_DISCUSSION_URL = re.compile(r".*/discussions/(\d+)$")
|
|
147
208
|
|
|
@@ -177,9 +238,9 @@ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tu
|
|
|
177
238
|
`None`) and repo_id (`str`).
|
|
178
239
|
|
|
179
240
|
Raises:
|
|
180
|
-
|
|
241
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
181
242
|
If URL cannot be parsed.
|
|
182
|
-
|
|
243
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
183
244
|
If `repo_type` is unknown.
|
|
184
245
|
"""
|
|
185
246
|
input_hf_id = hf_id
|
|
@@ -369,8 +430,9 @@ class AccessRequest:
|
|
|
369
430
|
Username of the user who requested access.
|
|
370
431
|
fullname (`str`):
|
|
371
432
|
Fullname of the user who requested access.
|
|
372
|
-
email (`str`):
|
|
433
|
+
email (`Optional[str]`):
|
|
373
434
|
Email of the user who requested access.
|
|
435
|
+
Can only be `None` in the /accepted list if the user was granted access manually.
|
|
374
436
|
timestamp (`datetime`):
|
|
375
437
|
Timestamp of the request.
|
|
376
438
|
status (`Literal["pending", "accepted", "rejected"]`):
|
|
@@ -381,7 +443,7 @@ class AccessRequest:
|
|
|
381
443
|
|
|
382
444
|
username: str
|
|
383
445
|
fullname: str
|
|
384
|
-
email: str
|
|
446
|
+
email: Optional[str]
|
|
385
447
|
timestamp: datetime
|
|
386
448
|
status: Literal["pending", "accepted", "rejected"]
|
|
387
449
|
|
|
@@ -389,6 +451,48 @@ class AccessRequest:
|
|
|
389
451
|
fields: Optional[Dict[str, Any]] = None
|
|
390
452
|
|
|
391
453
|
|
|
454
|
+
@dataclass
|
|
455
|
+
class WebhookWatchedItem:
|
|
456
|
+
"""Data structure containing information about the items watched by a webhook.
|
|
457
|
+
|
|
458
|
+
Attributes:
|
|
459
|
+
type (`Literal["dataset", "model", "org", "space", "user"]`):
|
|
460
|
+
Type of the item to be watched. Can be one of `["dataset", "model", "org", "space", "user"]`.
|
|
461
|
+
name (`str`):
|
|
462
|
+
Name of the item to be watched. Can be the username, organization name, model name, dataset name or space name.
|
|
463
|
+
"""
|
|
464
|
+
|
|
465
|
+
type: Literal["dataset", "model", "org", "space", "user"]
|
|
466
|
+
name: str
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
@dataclass
|
|
470
|
+
class WebhookInfo:
|
|
471
|
+
"""Data structure containing information about a webhook.
|
|
472
|
+
|
|
473
|
+
Attributes:
|
|
474
|
+
id (`str`):
|
|
475
|
+
ID of the webhook.
|
|
476
|
+
url (`str`):
|
|
477
|
+
URL of the webhook.
|
|
478
|
+
watched (`List[WebhookWatchedItem]`):
|
|
479
|
+
List of items watched by the webhook, see [`WebhookWatchedItem`].
|
|
480
|
+
domains (`List[WEBHOOK_DOMAIN_T]`):
|
|
481
|
+
List of domains the webhook is watching. Can be one of `["repo", "discussions"]`.
|
|
482
|
+
secret (`str`, *optional*):
|
|
483
|
+
Secret of the webhook.
|
|
484
|
+
disabled (`bool`):
|
|
485
|
+
Whether the webhook is disabled or not.
|
|
486
|
+
"""
|
|
487
|
+
|
|
488
|
+
id: str
|
|
489
|
+
url: str
|
|
490
|
+
watched: List[WebhookWatchedItem]
|
|
491
|
+
domains: List[WEBHOOK_DOMAIN_T]
|
|
492
|
+
secret: Optional[str]
|
|
493
|
+
disabled: bool
|
|
494
|
+
|
|
495
|
+
|
|
392
496
|
class RepoUrl(str):
|
|
393
497
|
"""Subclass of `str` describing a repo URL on the Hub.
|
|
394
498
|
|
|
@@ -423,9 +527,9 @@ class RepoUrl(str):
|
|
|
423
527
|
```
|
|
424
528
|
|
|
425
529
|
Raises:
|
|
426
|
-
|
|
530
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
427
531
|
If URL cannot be parsed.
|
|
428
|
-
|
|
532
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
429
533
|
If `repo_type` is unknown.
|
|
430
534
|
"""
|
|
431
535
|
|
|
@@ -604,6 +708,8 @@ class ModelInfo:
|
|
|
604
708
|
If so, whether there is manual or automatic approval.
|
|
605
709
|
downloads (`int`):
|
|
606
710
|
Number of downloads of the model over the last 30 days.
|
|
711
|
+
downloads_all_time (`int`):
|
|
712
|
+
Cumulated number of downloads of the model since its creation.
|
|
607
713
|
likes (`int`):
|
|
608
714
|
Number of likes of the model.
|
|
609
715
|
library_name (`str`, *optional*):
|
|
@@ -638,13 +744,14 @@ class ModelInfo:
|
|
|
638
744
|
sha: Optional[str]
|
|
639
745
|
created_at: Optional[datetime]
|
|
640
746
|
last_modified: Optional[datetime]
|
|
641
|
-
private: bool
|
|
747
|
+
private: Optional[bool]
|
|
642
748
|
gated: Optional[Literal["auto", "manual", False]]
|
|
643
749
|
disabled: Optional[bool]
|
|
644
|
-
downloads: int
|
|
645
|
-
|
|
750
|
+
downloads: Optional[int]
|
|
751
|
+
downloads_all_time: Optional[int]
|
|
752
|
+
likes: Optional[int]
|
|
646
753
|
library_name: Optional[str]
|
|
647
|
-
tags: List[str]
|
|
754
|
+
tags: Optional[List[str]]
|
|
648
755
|
pipeline_tag: Optional[str]
|
|
649
756
|
mask_token: Optional[str]
|
|
650
757
|
card_data: Optional[ModelCardData]
|
|
@@ -664,13 +771,14 @@ class ModelInfo:
|
|
|
664
771
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
665
772
|
created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None)
|
|
666
773
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
667
|
-
self.private = kwargs.pop("private")
|
|
774
|
+
self.private = kwargs.pop("private", None)
|
|
668
775
|
self.gated = kwargs.pop("gated", None)
|
|
669
776
|
self.disabled = kwargs.pop("disabled", None)
|
|
670
|
-
self.downloads = kwargs.pop("downloads")
|
|
671
|
-
self.
|
|
777
|
+
self.downloads = kwargs.pop("downloads", None)
|
|
778
|
+
self.downloads_all_time = kwargs.pop("downloadsAllTime", None)
|
|
779
|
+
self.likes = kwargs.pop("likes", None)
|
|
672
780
|
self.library_name = kwargs.pop("library_name", None)
|
|
673
|
-
self.tags = kwargs.pop("tags")
|
|
781
|
+
self.tags = kwargs.pop("tags", None)
|
|
674
782
|
self.pipeline_tag = kwargs.pop("pipeline_tag", None)
|
|
675
783
|
self.mask_token = kwargs.pop("mask_token", None)
|
|
676
784
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
@@ -757,6 +865,8 @@ class DatasetInfo:
|
|
|
757
865
|
If so, whether there is manual or automatic approval.
|
|
758
866
|
downloads (`int`):
|
|
759
867
|
Number of downloads of the dataset over the last 30 days.
|
|
868
|
+
downloads_all_time (`int`):
|
|
869
|
+
Cumulated number of downloads of the model since its creation.
|
|
760
870
|
likes (`int`):
|
|
761
871
|
Number of likes of the dataset.
|
|
762
872
|
tags (`List[str]`):
|
|
@@ -772,13 +882,14 @@ class DatasetInfo:
|
|
|
772
882
|
sha: Optional[str]
|
|
773
883
|
created_at: Optional[datetime]
|
|
774
884
|
last_modified: Optional[datetime]
|
|
775
|
-
private: bool
|
|
885
|
+
private: Optional[bool]
|
|
776
886
|
gated: Optional[Literal["auto", "manual", False]]
|
|
777
887
|
disabled: Optional[bool]
|
|
778
|
-
downloads: int
|
|
779
|
-
|
|
888
|
+
downloads: Optional[int]
|
|
889
|
+
downloads_all_time: Optional[int]
|
|
890
|
+
likes: Optional[int]
|
|
780
891
|
paperswithcode_id: Optional[str]
|
|
781
|
-
tags: List[str]
|
|
892
|
+
tags: Optional[List[str]]
|
|
782
893
|
card_data: Optional[DatasetCardData]
|
|
783
894
|
siblings: Optional[List[RepoSibling]]
|
|
784
895
|
|
|
@@ -790,13 +901,14 @@ class DatasetInfo:
|
|
|
790
901
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
791
902
|
last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None)
|
|
792
903
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
793
|
-
self.private = kwargs.pop("private")
|
|
904
|
+
self.private = kwargs.pop("private", None)
|
|
794
905
|
self.gated = kwargs.pop("gated", None)
|
|
795
906
|
self.disabled = kwargs.pop("disabled", None)
|
|
796
|
-
self.downloads = kwargs.pop("downloads")
|
|
797
|
-
self.
|
|
907
|
+
self.downloads = kwargs.pop("downloads", None)
|
|
908
|
+
self.downloads_all_time = kwargs.pop("downloadsAllTime", None)
|
|
909
|
+
self.likes = kwargs.pop("likes", None)
|
|
798
910
|
self.paperswithcode_id = kwargs.pop("paperswithcode_id", None)
|
|
799
|
-
self.tags = kwargs.pop("tags")
|
|
911
|
+
self.tags = kwargs.pop("tags", None)
|
|
800
912
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
801
913
|
self.card_data = (
|
|
802
914
|
DatasetCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -889,14 +1001,14 @@ class SpaceInfo:
|
|
|
889
1001
|
sha: Optional[str]
|
|
890
1002
|
created_at: Optional[datetime]
|
|
891
1003
|
last_modified: Optional[datetime]
|
|
892
|
-
private: bool
|
|
1004
|
+
private: Optional[bool]
|
|
893
1005
|
gated: Optional[Literal["auto", "manual", False]]
|
|
894
1006
|
disabled: Optional[bool]
|
|
895
1007
|
host: Optional[str]
|
|
896
1008
|
subdomain: Optional[str]
|
|
897
|
-
likes: int
|
|
1009
|
+
likes: Optional[int]
|
|
898
1010
|
sdk: Optional[str]
|
|
899
|
-
tags: List[str]
|
|
1011
|
+
tags: Optional[List[str]]
|
|
900
1012
|
siblings: Optional[List[RepoSibling]]
|
|
901
1013
|
card_data: Optional[SpaceCardData]
|
|
902
1014
|
runtime: Optional[SpaceRuntime]
|
|
@@ -911,14 +1023,14 @@ class SpaceInfo:
|
|
|
911
1023
|
self.created_at = parse_datetime(created_at) if created_at else None
|
|
912
1024
|
last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None)
|
|
913
1025
|
self.last_modified = parse_datetime(last_modified) if last_modified else None
|
|
914
|
-
self.private = kwargs.pop("private")
|
|
1026
|
+
self.private = kwargs.pop("private", None)
|
|
915
1027
|
self.gated = kwargs.pop("gated", None)
|
|
916
1028
|
self.disabled = kwargs.pop("disabled", None)
|
|
917
1029
|
self.host = kwargs.pop("host", None)
|
|
918
1030
|
self.subdomain = kwargs.pop("subdomain", None)
|
|
919
|
-
self.likes = kwargs.pop("likes")
|
|
1031
|
+
self.likes = kwargs.pop("likes", None)
|
|
920
1032
|
self.sdk = kwargs.pop("sdk", None)
|
|
921
|
-
self.tags = kwargs.pop("tags")
|
|
1033
|
+
self.tags = kwargs.pop("tags", None)
|
|
922
1034
|
card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None)
|
|
923
1035
|
self.card_data = (
|
|
924
1036
|
SpaceCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data
|
|
@@ -1449,7 +1561,8 @@ class HfApi:
|
|
|
1449
1561
|
def list_models(
|
|
1450
1562
|
self,
|
|
1451
1563
|
*,
|
|
1452
|
-
|
|
1564
|
+
# Search-query parameter
|
|
1565
|
+
filter: Union[str, Iterable[str], None] = None,
|
|
1453
1566
|
author: Optional[str] = None,
|
|
1454
1567
|
library: Optional[Union[str, List[str]]] = None,
|
|
1455
1568
|
language: Optional[Union[str, List[str]]] = None,
|
|
@@ -1458,23 +1571,25 @@ class HfApi:
|
|
|
1458
1571
|
trained_dataset: Optional[Union[str, List[str]]] = None,
|
|
1459
1572
|
tags: Optional[Union[str, List[str]]] = None,
|
|
1460
1573
|
search: Optional[str] = None,
|
|
1574
|
+
pipeline_tag: Optional[str] = None,
|
|
1461
1575
|
emissions_thresholds: Optional[Tuple[float, float]] = None,
|
|
1576
|
+
# Sorting and pagination parameters
|
|
1462
1577
|
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1463
1578
|
direction: Optional[Literal[-1]] = None,
|
|
1464
1579
|
limit: Optional[int] = None,
|
|
1580
|
+
# Additional data to fetch
|
|
1581
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
1465
1582
|
full: Optional[bool] = None,
|
|
1466
1583
|
cardData: bool = False,
|
|
1467
1584
|
fetch_config: bool = False,
|
|
1468
1585
|
token: Union[bool, str, None] = None,
|
|
1469
|
-
pipeline_tag: Optional[str] = None,
|
|
1470
1586
|
) -> Iterable[ModelInfo]:
|
|
1471
1587
|
"""
|
|
1472
1588
|
List models hosted on the Huggingface Hub, given some filters.
|
|
1473
1589
|
|
|
1474
1590
|
Args:
|
|
1475
|
-
filter (
|
|
1476
|
-
A string or
|
|
1477
|
-
on the Hub.
|
|
1591
|
+
filter (`str` or `Iterable[str]`, *optional*):
|
|
1592
|
+
A string or list of string to filter models on the Hub.
|
|
1478
1593
|
author (`str`, *optional*):
|
|
1479
1594
|
A string which identify the author (user or organization) of the
|
|
1480
1595
|
returned models
|
|
@@ -1498,6 +1613,8 @@ class HfApi:
|
|
|
1498
1613
|
as `text-generation` or `spacy`.
|
|
1499
1614
|
search (`str`, *optional*):
|
|
1500
1615
|
A string that will be contained in the returned model ids.
|
|
1616
|
+
pipeline_tag (`str`, *optional*):
|
|
1617
|
+
A string pipeline tag to filter models on the Hub by, such as `summarization`.
|
|
1501
1618
|
emissions_thresholds (`Tuple`, *optional*):
|
|
1502
1619
|
A tuple of two ints or floats representing a minimum and maximum
|
|
1503
1620
|
carbon footprint to filter the resulting models with in grams.
|
|
@@ -1510,6 +1627,10 @@ class HfApi:
|
|
|
1510
1627
|
limit (`int`, *optional*):
|
|
1511
1628
|
The limit on the number of models fetched. Leaving this option
|
|
1512
1629
|
to `None` fetches all models.
|
|
1630
|
+
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
1631
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1632
|
+
This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
|
|
1633
|
+
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
1513
1634
|
full (`bool`, *optional*):
|
|
1514
1635
|
Whether to fetch all model data, including the `last_modified`,
|
|
1515
1636
|
the `sha`, the files and the `tags`. This is set to `True` by
|
|
@@ -1526,8 +1647,6 @@ class HfApi:
|
|
|
1526
1647
|
token, which is the recommended method for authentication (see
|
|
1527
1648
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
1528
1649
|
To disable authentication, pass `False`.
|
|
1529
|
-
pipeline_tag (`str`, *optional*):
|
|
1530
|
-
A string pipeline tag to filter models on the Hub by, such as `summarization`
|
|
1531
1650
|
|
|
1532
1651
|
|
|
1533
1652
|
Returns:
|
|
@@ -1540,13 +1659,13 @@ class HfApi:
|
|
|
1540
1659
|
|
|
1541
1660
|
>>> api = HfApi()
|
|
1542
1661
|
|
|
1543
|
-
|
|
1662
|
+
# List all models
|
|
1544
1663
|
>>> api.list_models()
|
|
1545
1664
|
|
|
1546
|
-
|
|
1665
|
+
# List only the text classification models
|
|
1547
1666
|
>>> api.list_models(filter="text-classification")
|
|
1548
1667
|
|
|
1549
|
-
|
|
1668
|
+
# List only models from the AllenNLP library
|
|
1550
1669
|
>>> api.list_models(filter="allennlp")
|
|
1551
1670
|
```
|
|
1552
1671
|
|
|
@@ -1557,40 +1676,33 @@ class HfApi:
|
|
|
1557
1676
|
|
|
1558
1677
|
>>> api = HfApi()
|
|
1559
1678
|
|
|
1560
|
-
|
|
1679
|
+
# List all models with "bert" in their name
|
|
1561
1680
|
>>> api.list_models(search="bert")
|
|
1562
1681
|
|
|
1563
|
-
|
|
1682
|
+
# List all models with "bert" in their name made by google
|
|
1564
1683
|
>>> api.list_models(search="bert", author="google")
|
|
1565
1684
|
```
|
|
1566
1685
|
"""
|
|
1686
|
+
if expand and (full or cardData or fetch_config):
|
|
1687
|
+
raise ValueError("`expand` cannot be used if `full`, `cardData` or `fetch_config` are passed.")
|
|
1688
|
+
|
|
1567
1689
|
if emissions_thresholds is not None and cardData is None:
|
|
1568
1690
|
raise ValueError("`emissions_thresholds` were passed without setting `cardData=True`.")
|
|
1569
1691
|
|
|
1570
1692
|
path = f"{self.endpoint}/api/models"
|
|
1571
1693
|
headers = self._build_hf_headers(token=token)
|
|
1572
|
-
params = {}
|
|
1573
|
-
filter_list = []
|
|
1574
|
-
|
|
1575
|
-
if filter is not None:
|
|
1576
|
-
if isinstance(filter, ModelFilter):
|
|
1577
|
-
params = self._unpack_model_filter(filter)
|
|
1578
|
-
else:
|
|
1579
|
-
params.update({"filter": filter})
|
|
1580
|
-
|
|
1581
|
-
params.update({"full": True})
|
|
1694
|
+
params: Dict[str, Any] = {}
|
|
1582
1695
|
|
|
1583
1696
|
# Build the filter list
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
params.update({"search": model_name})
|
|
1697
|
+
filter_list: List[str] = []
|
|
1698
|
+
if filter:
|
|
1699
|
+
filter_list.extend([filter] if isinstance(filter, str) else filter)
|
|
1588
1700
|
if library:
|
|
1589
1701
|
filter_list.extend([library] if isinstance(library, str) else library)
|
|
1590
1702
|
if task:
|
|
1591
1703
|
filter_list.extend([task] if isinstance(task, str) else task)
|
|
1592
1704
|
if trained_dataset:
|
|
1593
|
-
if
|
|
1705
|
+
if isinstance(trained_dataset, str):
|
|
1594
1706
|
trained_dataset = [trained_dataset]
|
|
1595
1707
|
for dataset in trained_dataset:
|
|
1596
1708
|
if not dataset.startswith("dataset:"):
|
|
@@ -1600,31 +1712,37 @@ class HfApi:
|
|
|
1600
1712
|
filter_list.extend([language] if isinstance(language, str) else language)
|
|
1601
1713
|
if tags:
|
|
1602
1714
|
filter_list.extend([tags] if isinstance(tags, str) else tags)
|
|
1715
|
+
if len(filter_list) > 0:
|
|
1716
|
+
params["filter"] = filter_list
|
|
1603
1717
|
|
|
1718
|
+
# Handle other query params
|
|
1719
|
+
if author:
|
|
1720
|
+
params["author"] = author
|
|
1721
|
+
if pipeline_tag:
|
|
1722
|
+
params["pipeline_tag"] = pipeline_tag
|
|
1723
|
+
search_list = []
|
|
1724
|
+
if model_name:
|
|
1725
|
+
search_list.append(model_name)
|
|
1604
1726
|
if search:
|
|
1605
|
-
|
|
1727
|
+
search_list.append(search)
|
|
1728
|
+
if len(search_list) > 0:
|
|
1729
|
+
params["search"] = search_list
|
|
1606
1730
|
if sort is not None:
|
|
1607
|
-
params
|
|
1731
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1608
1732
|
if direction is not None:
|
|
1609
|
-
params
|
|
1733
|
+
params["direction"] = direction
|
|
1610
1734
|
if limit is not None:
|
|
1611
|
-
params
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
del params["full"]
|
|
1735
|
+
params["limit"] = limit
|
|
1736
|
+
|
|
1737
|
+
# Request additional data
|
|
1738
|
+
if full:
|
|
1739
|
+
params["full"] = True
|
|
1617
1740
|
if fetch_config:
|
|
1618
|
-
params
|
|
1741
|
+
params["config"] = True
|
|
1619
1742
|
if cardData:
|
|
1620
|
-
params
|
|
1621
|
-
if
|
|
1622
|
-
params
|
|
1623
|
-
|
|
1624
|
-
filter_value = params.get("filter", [])
|
|
1625
|
-
if filter_value:
|
|
1626
|
-
filter_list.extend([filter_value] if isinstance(filter_value, str) else list(filter_value))
|
|
1627
|
-
params.update({"filter": filter_list})
|
|
1743
|
+
params["cardData"] = True
|
|
1744
|
+
if expand:
|
|
1745
|
+
params["expand"] = expand
|
|
1628
1746
|
|
|
1629
1747
|
# `items` is a generator
|
|
1630
1748
|
items = paginate(path, params=params, headers=headers)
|
|
@@ -1634,63 +1752,15 @@ class HfApi:
|
|
|
1634
1752
|
if "siblings" not in item:
|
|
1635
1753
|
item["siblings"] = None
|
|
1636
1754
|
model_info = ModelInfo(**item)
|
|
1637
|
-
if emissions_thresholds is None or
|
|
1755
|
+
if emissions_thresholds is None or _is_emission_within_threshold(model_info, *emissions_thresholds):
|
|
1638
1756
|
yield model_info
|
|
1639
1757
|
|
|
1640
|
-
def _unpack_model_filter(self, model_filter: ModelFilter):
|
|
1641
|
-
"""
|
|
1642
|
-
Unpacks a [`ModelFilter`] into something readable for `list_models`
|
|
1643
|
-
"""
|
|
1644
|
-
model_str = ""
|
|
1645
|
-
|
|
1646
|
-
# Handling author
|
|
1647
|
-
if model_filter.author:
|
|
1648
|
-
model_str = f"{model_filter.author}/"
|
|
1649
|
-
|
|
1650
|
-
# Handling model_name
|
|
1651
|
-
if model_filter.model_name:
|
|
1652
|
-
model_str += model_filter.model_name
|
|
1653
|
-
|
|
1654
|
-
filter_list: List[str] = []
|
|
1655
|
-
|
|
1656
|
-
# Handling tasks
|
|
1657
|
-
if model_filter.task:
|
|
1658
|
-
filter_list.extend([model_filter.task] if isinstance(model_filter.task, str) else model_filter.task)
|
|
1659
|
-
|
|
1660
|
-
# Handling dataset
|
|
1661
|
-
if model_filter.trained_dataset:
|
|
1662
|
-
if not isinstance(model_filter.trained_dataset, (list, tuple)):
|
|
1663
|
-
model_filter.trained_dataset = [model_filter.trained_dataset]
|
|
1664
|
-
for dataset in model_filter.trained_dataset:
|
|
1665
|
-
if "dataset:" not in dataset:
|
|
1666
|
-
dataset = f"dataset:{dataset}"
|
|
1667
|
-
filter_list.append(dataset)
|
|
1668
|
-
|
|
1669
|
-
# Handling library
|
|
1670
|
-
if model_filter.library:
|
|
1671
|
-
filter_list.extend(
|
|
1672
|
-
[model_filter.library] if isinstance(model_filter.library, str) else model_filter.library
|
|
1673
|
-
)
|
|
1674
|
-
|
|
1675
|
-
# Handling tags
|
|
1676
|
-
if model_filter.tags:
|
|
1677
|
-
filter_list.extend([model_filter.tags] if isinstance(model_filter.tags, str) else model_filter.tags)
|
|
1678
|
-
|
|
1679
|
-
query_dict: Dict[str, Any] = {}
|
|
1680
|
-
if model_str:
|
|
1681
|
-
query_dict["search"] = model_str
|
|
1682
|
-
if isinstance(model_filter.language, list):
|
|
1683
|
-
filter_list.extend(model_filter.language)
|
|
1684
|
-
elif isinstance(model_filter.language, str):
|
|
1685
|
-
filter_list.append(model_filter.language)
|
|
1686
|
-
query_dict["filter"] = tuple(filter_list)
|
|
1687
|
-
return query_dict
|
|
1688
|
-
|
|
1689
1758
|
@validate_hf_hub_args
|
|
1690
1759
|
def list_datasets(
|
|
1691
1760
|
self,
|
|
1692
1761
|
*,
|
|
1693
|
-
|
|
1762
|
+
# Search-query parameter
|
|
1763
|
+
filter: Union[str, Iterable[str], None] = None,
|
|
1694
1764
|
author: Optional[str] = None,
|
|
1695
1765
|
benchmark: Optional[Union[str, List[str]]] = None,
|
|
1696
1766
|
dataset_name: Optional[str] = None,
|
|
@@ -1698,12 +1768,16 @@ class HfApi:
|
|
|
1698
1768
|
language: Optional[Union[str, List[str]]] = None,
|
|
1699
1769
|
multilinguality: Optional[Union[str, List[str]]] = None,
|
|
1700
1770
|
size_categories: Optional[Union[str, List[str]]] = None,
|
|
1771
|
+
tags: Optional[Union[str, List[str]]] = None,
|
|
1701
1772
|
task_categories: Optional[Union[str, List[str]]] = None,
|
|
1702
1773
|
task_ids: Optional[Union[str, List[str]]] = None,
|
|
1703
1774
|
search: Optional[str] = None,
|
|
1775
|
+
# Sorting and pagination parameters
|
|
1704
1776
|
sort: Optional[Union[Literal["last_modified"], str]] = None,
|
|
1705
1777
|
direction: Optional[Literal[-1]] = None,
|
|
1706
1778
|
limit: Optional[int] = None,
|
|
1779
|
+
# Additional data to fetch
|
|
1780
|
+
expand: Optional[List[ExpandDatasetProperty_T]] = None,
|
|
1707
1781
|
full: Optional[bool] = None,
|
|
1708
1782
|
token: Union[bool, str, None] = None,
|
|
1709
1783
|
) -> Iterable[DatasetInfo]:
|
|
@@ -1711,9 +1785,8 @@ class HfApi:
|
|
|
1711
1785
|
List datasets hosted on the Huggingface Hub, given some filters.
|
|
1712
1786
|
|
|
1713
1787
|
Args:
|
|
1714
|
-
filter (
|
|
1715
|
-
A string or
|
|
1716
|
-
datasets on the hub.
|
|
1788
|
+
filter (`str` or `Iterable[str]`, *optional*):
|
|
1789
|
+
A string or list of string to filter datasets on the hub.
|
|
1717
1790
|
author (`str`, *optional*):
|
|
1718
1791
|
A string which identify the author of the returned datasets.
|
|
1719
1792
|
benchmark (`str` or `List`, *optional*):
|
|
@@ -1736,6 +1809,8 @@ class HfApi:
|
|
|
1736
1809
|
A string or list of strings that can be used to identify datasets on
|
|
1737
1810
|
the Hub by the size of the dataset such as `100K<n<1M` or
|
|
1738
1811
|
`1M<n<10M`.
|
|
1812
|
+
tags (`str` or `List`, *optional*):
|
|
1813
|
+
A string tag or a list of tags to filter datasets on the Hub.
|
|
1739
1814
|
task_categories (`str` or `List`, *optional*):
|
|
1740
1815
|
A string or list of strings that can be used to identify datasets on
|
|
1741
1816
|
the Hub by the designed task, such as `audio_classification` or
|
|
@@ -1755,6 +1830,10 @@ class HfApi:
|
|
|
1755
1830
|
limit (`int`, *optional*):
|
|
1756
1831
|
The limit on the number of datasets fetched. Leaving this option
|
|
1757
1832
|
to `None` fetches all datasets.
|
|
1833
|
+
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
1834
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1835
|
+
This parameter cannot be used if `full` is passed.
|
|
1836
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"tags"`.
|
|
1758
1837
|
full (`bool`, *optional*):
|
|
1759
1838
|
Whether to fetch all dataset data, including the `last_modified`,
|
|
1760
1839
|
the `card_data` and the files. Can contain useful information such as the
|
|
@@ -1775,20 +1854,21 @@ class HfApi:
|
|
|
1775
1854
|
|
|
1776
1855
|
>>> api = HfApi()
|
|
1777
1856
|
|
|
1778
|
-
|
|
1857
|
+
# List all datasets
|
|
1779
1858
|
>>> api.list_datasets()
|
|
1780
1859
|
|
|
1781
1860
|
|
|
1782
|
-
|
|
1861
|
+
# List only the text classification datasets
|
|
1783
1862
|
>>> api.list_datasets(filter="task_categories:text-classification")
|
|
1784
1863
|
|
|
1785
1864
|
|
|
1786
|
-
|
|
1865
|
+
# List only the datasets in russian for language modeling
|
|
1787
1866
|
>>> api.list_datasets(
|
|
1788
1867
|
... filter=("language:ru", "task_ids:language-modeling")
|
|
1789
1868
|
... )
|
|
1790
1869
|
|
|
1791
|
-
|
|
1870
|
+
# List FiftyOne datasets (identified by the tag "fiftyone" in dataset card)
|
|
1871
|
+
>>> api.list_datasets(tags="fiftyone")
|
|
1792
1872
|
```
|
|
1793
1873
|
|
|
1794
1874
|
Example usage with the `search` argument:
|
|
@@ -1798,62 +1878,70 @@ class HfApi:
|
|
|
1798
1878
|
|
|
1799
1879
|
>>> api = HfApi()
|
|
1800
1880
|
|
|
1801
|
-
|
|
1881
|
+
# List all datasets with "text" in their name
|
|
1802
1882
|
>>> api.list_datasets(search="text")
|
|
1803
1883
|
|
|
1804
|
-
|
|
1884
|
+
# List all datasets with "text" in their name made by google
|
|
1805
1885
|
>>> api.list_datasets(search="text", author="google")
|
|
1806
1886
|
```
|
|
1807
1887
|
"""
|
|
1888
|
+
if expand and full:
|
|
1889
|
+
raise ValueError("`expand` cannot be used if `full` is passed.")
|
|
1890
|
+
|
|
1808
1891
|
path = f"{self.endpoint}/api/datasets"
|
|
1809
1892
|
headers = self._build_hf_headers(token=token)
|
|
1810
|
-
params = {}
|
|
1811
|
-
filter_list = []
|
|
1893
|
+
params: Dict[str, Any] = {}
|
|
1812
1894
|
|
|
1895
|
+
# Build `filter` list
|
|
1896
|
+
filter_list = []
|
|
1813
1897
|
if filter is not None:
|
|
1814
|
-
if isinstance(filter,
|
|
1815
|
-
|
|
1898
|
+
if isinstance(filter, str):
|
|
1899
|
+
filter_list.append(filter)
|
|
1816
1900
|
else:
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
benchmark,
|
|
1827
|
-
language_creators,
|
|
1828
|
-
language,
|
|
1829
|
-
multilinguality,
|
|
1830
|
-
size_categories,
|
|
1831
|
-
task_categories,
|
|
1832
|
-
task_ids,
|
|
1901
|
+
filter_list.extend(filter)
|
|
1902
|
+
for key, value in (
|
|
1903
|
+
("benchmark", benchmark),
|
|
1904
|
+
("language_creators", language_creators),
|
|
1905
|
+
("language", language),
|
|
1906
|
+
("multilinguality", multilinguality),
|
|
1907
|
+
("size_categories", size_categories),
|
|
1908
|
+
("task_categories", task_categories),
|
|
1909
|
+
("task_ids", task_ids),
|
|
1833
1910
|
):
|
|
1834
|
-
if
|
|
1835
|
-
if
|
|
1836
|
-
|
|
1837
|
-
for
|
|
1838
|
-
if not
|
|
1839
|
-
data = f"{
|
|
1911
|
+
if value:
|
|
1912
|
+
if isinstance(value, str):
|
|
1913
|
+
value = [value]
|
|
1914
|
+
for value_item in value:
|
|
1915
|
+
if not value_item.startswith(f"{key}:"):
|
|
1916
|
+
data = f"{key}:{value_item}"
|
|
1840
1917
|
filter_list.append(data)
|
|
1918
|
+
if tags is not None:
|
|
1919
|
+
filter_list.extend([tags] if isinstance(tags, str) else tags)
|
|
1920
|
+
if len(filter_list) > 0:
|
|
1921
|
+
params["filter"] = filter_list
|
|
1841
1922
|
|
|
1923
|
+
# Handle other query params
|
|
1924
|
+
if author:
|
|
1925
|
+
params["author"] = author
|
|
1926
|
+
search_list = []
|
|
1927
|
+
if dataset_name:
|
|
1928
|
+
search_list.append(dataset_name)
|
|
1842
1929
|
if search:
|
|
1843
|
-
|
|
1930
|
+
search_list.append(search)
|
|
1931
|
+
if len(search_list) > 0:
|
|
1932
|
+
params["search"] = search_list
|
|
1844
1933
|
if sort is not None:
|
|
1845
|
-
params
|
|
1934
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1846
1935
|
if direction is not None:
|
|
1847
|
-
params
|
|
1936
|
+
params["direction"] = direction
|
|
1848
1937
|
if limit is not None:
|
|
1849
|
-
params
|
|
1850
|
-
if full:
|
|
1851
|
-
params.update({"full": True})
|
|
1938
|
+
params["limit"] = limit
|
|
1852
1939
|
|
|
1853
|
-
|
|
1854
|
-
if
|
|
1855
|
-
|
|
1856
|
-
|
|
1940
|
+
# Request additional data
|
|
1941
|
+
if expand:
|
|
1942
|
+
params["expand"] = expand
|
|
1943
|
+
if full:
|
|
1944
|
+
params["full"] = True
|
|
1857
1945
|
|
|
1858
1946
|
items = paginate(path, params=params, headers=headers)
|
|
1859
1947
|
if limit is not None:
|
|
@@ -1863,47 +1951,6 @@ class HfApi:
|
|
|
1863
1951
|
item["siblings"] = None
|
|
1864
1952
|
yield DatasetInfo(**item)
|
|
1865
1953
|
|
|
1866
|
-
def _unpack_dataset_filter(self, dataset_filter: DatasetFilter):
|
|
1867
|
-
"""
|
|
1868
|
-
Unpacks a [`DatasetFilter`] into something readable for `list_datasets`
|
|
1869
|
-
"""
|
|
1870
|
-
dataset_str = ""
|
|
1871
|
-
|
|
1872
|
-
# Handling author
|
|
1873
|
-
if dataset_filter.author:
|
|
1874
|
-
dataset_str = f"{dataset_filter.author}/"
|
|
1875
|
-
|
|
1876
|
-
# Handling dataset_name
|
|
1877
|
-
if dataset_filter.dataset_name:
|
|
1878
|
-
dataset_str += dataset_filter.dataset_name
|
|
1879
|
-
|
|
1880
|
-
filter_list = []
|
|
1881
|
-
data_attributes = [
|
|
1882
|
-
"benchmark",
|
|
1883
|
-
"language_creators",
|
|
1884
|
-
"language",
|
|
1885
|
-
"multilinguality",
|
|
1886
|
-
"size_categories",
|
|
1887
|
-
"task_categories",
|
|
1888
|
-
"task_ids",
|
|
1889
|
-
]
|
|
1890
|
-
|
|
1891
|
-
for attr in data_attributes:
|
|
1892
|
-
curr_attr = getattr(dataset_filter, attr)
|
|
1893
|
-
if curr_attr is not None:
|
|
1894
|
-
if not isinstance(curr_attr, (list, tuple)):
|
|
1895
|
-
curr_attr = [curr_attr]
|
|
1896
|
-
for data in curr_attr:
|
|
1897
|
-
if f"{attr}:" not in data:
|
|
1898
|
-
data = f"{attr}:{data}"
|
|
1899
|
-
filter_list.append(data)
|
|
1900
|
-
|
|
1901
|
-
query_dict: Dict[str, Any] = {}
|
|
1902
|
-
if dataset_str is not None:
|
|
1903
|
-
query_dict["search"] = dataset_str
|
|
1904
|
-
query_dict["filter"] = tuple(filter_list)
|
|
1905
|
-
return query_dict
|
|
1906
|
-
|
|
1907
1954
|
def list_metrics(self) -> List[MetricInfo]:
|
|
1908
1955
|
"""
|
|
1909
1956
|
Get the public list of all the metrics on huggingface.co
|
|
@@ -1921,15 +1968,19 @@ class HfApi:
|
|
|
1921
1968
|
def list_spaces(
|
|
1922
1969
|
self,
|
|
1923
1970
|
*,
|
|
1971
|
+
# Search-query parameter
|
|
1924
1972
|
filter: Union[str, Iterable[str], None] = None,
|
|
1925
1973
|
author: Optional[str] = None,
|
|
1926
1974
|
search: Optional[str] = None,
|
|
1927
|
-
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1928
|
-
direction: Optional[Literal[-1]] = None,
|
|
1929
|
-
limit: Optional[int] = None,
|
|
1930
1975
|
datasets: Union[str, Iterable[str], None] = None,
|
|
1931
1976
|
models: Union[str, Iterable[str], None] = None,
|
|
1932
1977
|
linked: bool = False,
|
|
1978
|
+
# Sorting and pagination parameters
|
|
1979
|
+
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1980
|
+
direction: Optional[Literal[-1]] = None,
|
|
1981
|
+
limit: Optional[int] = None,
|
|
1982
|
+
# Additional data to fetch
|
|
1983
|
+
expand: Optional[List[ExpandSpaceProperty_T]] = None,
|
|
1933
1984
|
full: Optional[bool] = None,
|
|
1934
1985
|
token: Union[bool, str, None] = None,
|
|
1935
1986
|
) -> Iterable[SpaceInfo]:
|
|
@@ -1943,6 +1994,14 @@ class HfApi:
|
|
|
1943
1994
|
A string which identify the author of the returned Spaces.
|
|
1944
1995
|
search (`str`, *optional*):
|
|
1945
1996
|
A string that will be contained in the returned Spaces.
|
|
1997
|
+
datasets (`str` or `Iterable`, *optional*):
|
|
1998
|
+
Whether to return Spaces that make use of a dataset.
|
|
1999
|
+
The name of a specific dataset can be passed as a string.
|
|
2000
|
+
models (`str` or `Iterable`, *optional*):
|
|
2001
|
+
Whether to return Spaces that make use of a model.
|
|
2002
|
+
The name of a specific model can be passed as a string.
|
|
2003
|
+
linked (`bool`, *optional*):
|
|
2004
|
+
Whether to return Spaces that make use of either a model or a dataset.
|
|
1946
2005
|
sort (`Literal["last_modified"]` or `str`, *optional*):
|
|
1947
2006
|
The key with which to sort the resulting Spaces. Possible
|
|
1948
2007
|
values are the properties of the [`huggingface_hub.hf_api.SpaceInfo`]` class.
|
|
@@ -1952,14 +2011,10 @@ class HfApi:
|
|
|
1952
2011
|
limit (`int`, *optional*):
|
|
1953
2012
|
The limit on the number of Spaces fetched. Leaving this option
|
|
1954
2013
|
to `None` fetches all Spaces.
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
Whether to return Spaces that make use of a model.
|
|
1960
|
-
The name of a specific model can be passed as a string.
|
|
1961
|
-
linked (`bool`, *optional*):
|
|
1962
|
-
Whether to return Spaces that make use of either a model or a dataset.
|
|
2014
|
+
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2015
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2016
|
+
This parameter cannot be used if `full` is passed.
|
|
2017
|
+
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"models"`.
|
|
1963
2018
|
full (`bool`, *optional*):
|
|
1964
2019
|
Whether to fetch all Spaces data, including the `last_modified`, `siblings`
|
|
1965
2020
|
and `card_data` fields.
|
|
@@ -1972,29 +2027,36 @@ class HfApi:
|
|
|
1972
2027
|
Returns:
|
|
1973
2028
|
`Iterable[SpaceInfo]`: an iterable of [`huggingface_hub.hf_api.SpaceInfo`] objects.
|
|
1974
2029
|
"""
|
|
2030
|
+
if expand and full:
|
|
2031
|
+
raise ValueError("`expand` cannot be used if `full` is passed.")
|
|
2032
|
+
|
|
1975
2033
|
path = f"{self.endpoint}/api/spaces"
|
|
1976
2034
|
headers = self._build_hf_headers(token=token)
|
|
1977
2035
|
params: Dict[str, Any] = {}
|
|
1978
2036
|
if filter is not None:
|
|
1979
|
-
params
|
|
2037
|
+
params["filter"] = filter
|
|
1980
2038
|
if author is not None:
|
|
1981
|
-
params
|
|
2039
|
+
params["author"] = author
|
|
1982
2040
|
if search is not None:
|
|
1983
|
-
params
|
|
2041
|
+
params["search"] = search
|
|
1984
2042
|
if sort is not None:
|
|
1985
|
-
params
|
|
2043
|
+
params["sort"] = "lastModified" if sort == "last_modified" else sort
|
|
1986
2044
|
if direction is not None:
|
|
1987
|
-
params
|
|
2045
|
+
params["direction"] = direction
|
|
1988
2046
|
if limit is not None:
|
|
1989
|
-
params
|
|
1990
|
-
if full:
|
|
1991
|
-
params.update({"full": True})
|
|
2047
|
+
params["limit"] = limit
|
|
1992
2048
|
if linked:
|
|
1993
|
-
params
|
|
2049
|
+
params["linked"] = True
|
|
1994
2050
|
if datasets is not None:
|
|
1995
|
-
params
|
|
2051
|
+
params["datasets"] = datasets
|
|
1996
2052
|
if models is not None:
|
|
1997
|
-
params
|
|
2053
|
+
params["models"] = models
|
|
2054
|
+
|
|
2055
|
+
# Request additional data
|
|
2056
|
+
if expand:
|
|
2057
|
+
params["expand"] = expand
|
|
2058
|
+
if full:
|
|
2059
|
+
params["full"] = True
|
|
1998
2060
|
|
|
1999
2061
|
items = paginate(path, params=params, headers=headers)
|
|
2000
2062
|
if limit is not None:
|
|
@@ -2243,6 +2305,7 @@ class HfApi:
|
|
|
2243
2305
|
timeout: Optional[float] = None,
|
|
2244
2306
|
securityStatus: Optional[bool] = None,
|
|
2245
2307
|
files_metadata: bool = False,
|
|
2308
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
2246
2309
|
token: Union[bool, str, None] = None,
|
|
2247
2310
|
) -> ModelInfo:
|
|
2248
2311
|
"""
|
|
@@ -2265,6 +2328,10 @@ class HfApi:
|
|
|
2265
2328
|
files_metadata (`bool`, *optional*):
|
|
2266
2329
|
Whether or not to retrieve metadata for files in the repository
|
|
2267
2330
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2331
|
+
expand (`List[ExpandModelProperty_T]`, *optional*):
|
|
2332
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2333
|
+
This parameter cannot be used if `securityStatus` or `files_metadata` are passed.
|
|
2334
|
+
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"` and `"widgetData"`.
|
|
2268
2335
|
token (Union[bool, str, None], optional):
|
|
2269
2336
|
A valid user access token (string). Defaults to the locally saved
|
|
2270
2337
|
token, which is the recommended method for authentication (see
|
|
@@ -2286,17 +2353,22 @@ class HfApi:
|
|
|
2286
2353
|
|
|
2287
2354
|
</Tip>
|
|
2288
2355
|
"""
|
|
2356
|
+
if expand and (securityStatus or files_metadata):
|
|
2357
|
+
raise ValueError("`expand` cannot be used if `securityStatus` or `files_metadata` are set.")
|
|
2358
|
+
|
|
2289
2359
|
headers = self._build_hf_headers(token=token)
|
|
2290
2360
|
path = (
|
|
2291
2361
|
f"{self.endpoint}/api/models/{repo_id}"
|
|
2292
2362
|
if revision is None
|
|
2293
2363
|
else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2294
2364
|
)
|
|
2295
|
-
params = {}
|
|
2365
|
+
params: Dict = {}
|
|
2296
2366
|
if securityStatus:
|
|
2297
2367
|
params["securityStatus"] = True
|
|
2298
2368
|
if files_metadata:
|
|
2299
2369
|
params["blobs"] = True
|
|
2370
|
+
if expand:
|
|
2371
|
+
params["expand"] = expand
|
|
2300
2372
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2301
2373
|
hf_raise_for_status(r)
|
|
2302
2374
|
data = r.json()
|
|
@@ -2310,6 +2382,7 @@ class HfApi:
|
|
|
2310
2382
|
revision: Optional[str] = None,
|
|
2311
2383
|
timeout: Optional[float] = None,
|
|
2312
2384
|
files_metadata: bool = False,
|
|
2385
|
+
expand: Optional[List[ExpandDatasetProperty_T]] = None,
|
|
2313
2386
|
token: Union[bool, str, None] = None,
|
|
2314
2387
|
) -> DatasetInfo:
|
|
2315
2388
|
"""
|
|
@@ -2329,6 +2402,10 @@ class HfApi:
|
|
|
2329
2402
|
files_metadata (`bool`, *optional*):
|
|
2330
2403
|
Whether or not to retrieve metadata for files in the repository
|
|
2331
2404
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2405
|
+
expand (`List[ExpandDatasetProperty_T]`, *optional*):
|
|
2406
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2407
|
+
This parameter cannot be used if `files_metadata` is passed.
|
|
2408
|
+
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"` and `"tags"`.
|
|
2332
2409
|
token (Union[bool, str, None], optional):
|
|
2333
2410
|
A valid user access token (string). Defaults to the locally saved
|
|
2334
2411
|
token, which is the recommended method for authentication (see
|
|
@@ -2350,15 +2427,20 @@ class HfApi:
|
|
|
2350
2427
|
|
|
2351
2428
|
</Tip>
|
|
2352
2429
|
"""
|
|
2430
|
+
if expand and files_metadata:
|
|
2431
|
+
raise ValueError("`expand` cannot be used if `files_metadata` is set.")
|
|
2432
|
+
|
|
2353
2433
|
headers = self._build_hf_headers(token=token)
|
|
2354
2434
|
path = (
|
|
2355
2435
|
f"{self.endpoint}/api/datasets/{repo_id}"
|
|
2356
2436
|
if revision is None
|
|
2357
2437
|
else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2358
2438
|
)
|
|
2359
|
-
params = {}
|
|
2439
|
+
params: Dict = {}
|
|
2360
2440
|
if files_metadata:
|
|
2361
2441
|
params["blobs"] = True
|
|
2442
|
+
if expand:
|
|
2443
|
+
params["expand"] = expand
|
|
2362
2444
|
|
|
2363
2445
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2364
2446
|
hf_raise_for_status(r)
|
|
@@ -2373,6 +2455,7 @@ class HfApi:
|
|
|
2373
2455
|
revision: Optional[str] = None,
|
|
2374
2456
|
timeout: Optional[float] = None,
|
|
2375
2457
|
files_metadata: bool = False,
|
|
2458
|
+
expand: Optional[List[ExpandModelProperty_T]] = None,
|
|
2376
2459
|
token: Union[bool, str, None] = None,
|
|
2377
2460
|
) -> SpaceInfo:
|
|
2378
2461
|
"""
|
|
@@ -2392,6 +2475,10 @@ class HfApi:
|
|
|
2392
2475
|
files_metadata (`bool`, *optional*):
|
|
2393
2476
|
Whether or not to retrieve metadata for files in the repository
|
|
2394
2477
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2478
|
+
expand (`List[ExpandSpaceProperty_T]`, *optional*):
|
|
2479
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2480
|
+
This parameter cannot be used if `full` is passed.
|
|
2481
|
+
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"models"`.
|
|
2395
2482
|
token (Union[bool, str, None], optional):
|
|
2396
2483
|
A valid user access token (string). Defaults to the locally saved
|
|
2397
2484
|
token, which is the recommended method for authentication (see
|
|
@@ -2413,15 +2500,20 @@ class HfApi:
|
|
|
2413
2500
|
|
|
2414
2501
|
</Tip>
|
|
2415
2502
|
"""
|
|
2503
|
+
if expand and files_metadata:
|
|
2504
|
+
raise ValueError("`expand` cannot be used if `files_metadata` is set.")
|
|
2505
|
+
|
|
2416
2506
|
headers = self._build_hf_headers(token=token)
|
|
2417
2507
|
path = (
|
|
2418
2508
|
f"{self.endpoint}/api/spaces/{repo_id}"
|
|
2419
2509
|
if revision is None
|
|
2420
2510
|
else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2421
2511
|
)
|
|
2422
|
-
params = {}
|
|
2512
|
+
params: Dict = {}
|
|
2423
2513
|
if files_metadata:
|
|
2424
2514
|
params["blobs"] = True
|
|
2515
|
+
if expand:
|
|
2516
|
+
params["expand"] = expand
|
|
2425
2517
|
|
|
2426
2518
|
r = get_session().get(path, headers=headers, timeout=timeout, params=params)
|
|
2427
2519
|
hf_raise_for_status(r)
|
|
@@ -2437,6 +2529,7 @@ class HfApi:
|
|
|
2437
2529
|
repo_type: Optional[str] = None,
|
|
2438
2530
|
timeout: Optional[float] = None,
|
|
2439
2531
|
files_metadata: bool = False,
|
|
2532
|
+
expand: Optional[Union[ExpandModelProperty_T, ExpandDatasetProperty_T, ExpandSpaceProperty_T]] = None,
|
|
2440
2533
|
token: Union[bool, str, None] = None,
|
|
2441
2534
|
) -> Union[ModelInfo, DatasetInfo, SpaceInfo]:
|
|
2442
2535
|
"""
|
|
@@ -2454,6 +2547,10 @@ class HfApi:
|
|
|
2454
2547
|
`None` or `"model"` if getting repository info from a model. Default is `None`.
|
|
2455
2548
|
timeout (`float`, *optional*):
|
|
2456
2549
|
Whether to set a timeout for the request to the Hub.
|
|
2550
|
+
expand (`ExpandModelProperty_T` or `ExpandDatasetProperty_T` or `ExpandSpaceProperty_T`, *optional*):
|
|
2551
|
+
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2552
|
+
This parameter cannot be used if `files_metadata` is passed.
|
|
2553
|
+
For an exhaustive list of available properties, check out [`model_info`], [`dataset_info`] or [`space_info`].
|
|
2457
2554
|
files_metadata (`bool`, *optional*):
|
|
2458
2555
|
Whether or not to retrieve metadata for files in the repository
|
|
2459
2556
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
@@ -2493,6 +2590,7 @@ class HfApi:
|
|
|
2493
2590
|
revision=revision,
|
|
2494
2591
|
token=token,
|
|
2495
2592
|
timeout=timeout,
|
|
2593
|
+
expand=expand, # type: ignore[arg-type]
|
|
2496
2594
|
files_metadata=files_metadata,
|
|
2497
2595
|
)
|
|
2498
2596
|
|
|
@@ -3144,6 +3242,7 @@ class HfApi:
|
|
|
3144
3242
|
private: bool = False,
|
|
3145
3243
|
repo_type: Optional[str] = None,
|
|
3146
3244
|
exist_ok: bool = False,
|
|
3245
|
+
resource_group_id: Optional[str] = None,
|
|
3147
3246
|
space_sdk: Optional[str] = None,
|
|
3148
3247
|
space_hardware: Optional[SpaceHardware] = None,
|
|
3149
3248
|
space_storage: Optional[SpaceStorage] = None,
|
|
@@ -3170,6 +3269,11 @@ class HfApi:
|
|
|
3170
3269
|
`None`.
|
|
3171
3270
|
exist_ok (`bool`, *optional*, defaults to `False`):
|
|
3172
3271
|
If `True`, do not raise an error if repo already exists.
|
|
3272
|
+
resource_group_id (`str`, *optional*):
|
|
3273
|
+
Resource group in which to create the repo. Resource groups is only available for organizations and
|
|
3274
|
+
allow to define which members of the organization can access the resource. The ID of a resource group
|
|
3275
|
+
can be found in the URL of the resource's page on the Hub (e.g. `"66670e5163145ca562cb1988"`).
|
|
3276
|
+
To learn more about resource groups, see https://huggingface.co/docs/hub/en/security-resource-groups.
|
|
3173
3277
|
space_sdk (`str`, *optional*):
|
|
3174
3278
|
Choice of SDK to use if repo_type is "space". Can be "streamlit", "gradio", "docker", or "static".
|
|
3175
3279
|
space_hardware (`SpaceHardware` or `str`, *optional*):
|
|
@@ -3237,8 +3341,11 @@ class HfApi:
|
|
|
3237
3341
|
# Testing purposes only.
|
|
3238
3342
|
# See https://github.com/huggingface/huggingface_hub/pull/733/files#r820604472
|
|
3239
3343
|
json["lfsmultipartthresh"] = self._lfsmultipartthresh # type: ignore
|
|
3240
|
-
headers = self._build_hf_headers(token=token)
|
|
3241
3344
|
|
|
3345
|
+
if resource_group_id is not None:
|
|
3346
|
+
json["resourceGroupId"] = resource_group_id
|
|
3347
|
+
|
|
3348
|
+
headers = self._build_hf_headers(token=token)
|
|
3242
3349
|
while True:
|
|
3243
3350
|
r = get_session().post(path, headers=headers, json=json)
|
|
3244
3351
|
if r.status_code == 409 and "Cannot create repo: another conflicting operation is in progress" in r.text:
|
|
@@ -3301,7 +3408,7 @@ class HfApi:
|
|
|
3301
3408
|
If `True`, do not raise an error if repo does not exist.
|
|
3302
3409
|
|
|
3303
3410
|
Raises:
|
|
3304
|
-
|
|
3411
|
+
[`~utils.RepositoryNotFoundError`]
|
|
3305
3412
|
If the repository to delete from cannot be found and `missing_ok` is set to False (default).
|
|
3306
3413
|
"""
|
|
3307
3414
|
organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id)
|
|
@@ -3324,25 +3431,19 @@ class HfApi:
|
|
|
3324
3431
|
raise
|
|
3325
3432
|
|
|
3326
3433
|
@validate_hf_hub_args
|
|
3327
|
-
@_deprecate_arguments(
|
|
3328
|
-
version="0.24.0", deprecated_args=("organization", "name"), custom_message="Use `repo_id` instead."
|
|
3329
|
-
)
|
|
3330
3434
|
def update_repo_visibility(
|
|
3331
3435
|
self,
|
|
3332
3436
|
repo_id: str,
|
|
3333
3437
|
private: bool = False,
|
|
3334
3438
|
*,
|
|
3335
3439
|
token: Union[str, bool, None] = None,
|
|
3336
|
-
organization: Optional[str] = None,
|
|
3337
3440
|
repo_type: Optional[str] = None,
|
|
3338
|
-
name: Optional[str] = None,
|
|
3339
3441
|
) -> Dict[str, bool]:
|
|
3340
3442
|
"""Update the visibility setting of a repository.
|
|
3341
3443
|
|
|
3342
3444
|
Args:
|
|
3343
3445
|
repo_id (`str`, *optional*):
|
|
3344
|
-
A namespace (user or an organization) and a repo name separated
|
|
3345
|
-
by a `/`.
|
|
3446
|
+
A namespace (user or an organization) and a repo name separated by a `/`.
|
|
3346
3447
|
private (`bool`, *optional*, defaults to `False`):
|
|
3347
3448
|
Whether the model repo should be private.
|
|
3348
3449
|
token (Union[bool, str, None], optional):
|
|
@@ -3369,20 +3470,12 @@ class HfApi:
|
|
|
3369
3470
|
</Tip>
|
|
3370
3471
|
"""
|
|
3371
3472
|
if repo_type not in REPO_TYPES:
|
|
3372
|
-
raise ValueError("Invalid repo type")
|
|
3373
|
-
|
|
3374
|
-
organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id)
|
|
3375
|
-
|
|
3376
|
-
if organization is None:
|
|
3377
|
-
namespace = self.whoami(token)["name"]
|
|
3378
|
-
else:
|
|
3379
|
-
namespace = organization
|
|
3380
|
-
|
|
3473
|
+
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
|
3381
3474
|
if repo_type is None:
|
|
3382
3475
|
repo_type = REPO_TYPE_MODEL # default repo type
|
|
3383
3476
|
|
|
3384
3477
|
r = get_session().put(
|
|
3385
|
-
url=f"{self.endpoint}/api/{repo_type}s/{
|
|
3478
|
+
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
|
|
3386
3479
|
headers=self._build_hf_headers(token=token),
|
|
3387
3480
|
json={"private": private},
|
|
3388
3481
|
)
|
|
@@ -3684,6 +3777,46 @@ class HfApi:
|
|
|
3684
3777
|
num_threads=num_threads,
|
|
3685
3778
|
free_memory=False, # do not remove `CommitOperationAdd.path_or_fileobj` on LFS files for "normal" users
|
|
3686
3779
|
)
|
|
3780
|
+
|
|
3781
|
+
# Remove no-op operations (files that have not changed)
|
|
3782
|
+
operations_without_no_op = []
|
|
3783
|
+
for operation in operations:
|
|
3784
|
+
if (
|
|
3785
|
+
isinstance(operation, CommitOperationAdd)
|
|
3786
|
+
and operation._remote_oid is not None
|
|
3787
|
+
and operation._remote_oid == operation._local_oid
|
|
3788
|
+
):
|
|
3789
|
+
# File already exists on the Hub and has not changed: we can skip it.
|
|
3790
|
+
logger.debug(f"Skipping upload for '{operation.path_in_repo}' as the file has not changed.")
|
|
3791
|
+
continue
|
|
3792
|
+
operations_without_no_op.append(operation)
|
|
3793
|
+
if len(operations) != len(operations_without_no_op):
|
|
3794
|
+
logger.info(
|
|
3795
|
+
f"Removing {len(operations) - len(operations_without_no_op)} file(s) from commit that have not changed."
|
|
3796
|
+
)
|
|
3797
|
+
|
|
3798
|
+
# Return early if empty commit
|
|
3799
|
+
if len(operations_without_no_op) == 0:
|
|
3800
|
+
logger.warning("No files have been modified since last commit. Skipping to prevent empty commit.")
|
|
3801
|
+
|
|
3802
|
+
# Get latest commit info
|
|
3803
|
+
try:
|
|
3804
|
+
info = self.repo_info(repo_id=repo_id, repo_type=repo_type, revision=revision, token=token)
|
|
3805
|
+
except RepositoryNotFoundError as e:
|
|
3806
|
+
e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE)
|
|
3807
|
+
raise
|
|
3808
|
+
|
|
3809
|
+
# Return commit info based on latest commit
|
|
3810
|
+
url_prefix = self.endpoint
|
|
3811
|
+
if repo_type is not None and repo_type != REPO_TYPE_MODEL:
|
|
3812
|
+
url_prefix = f"{url_prefix}/{repo_type}s"
|
|
3813
|
+
return CommitInfo(
|
|
3814
|
+
commit_url=f"{url_prefix}/{repo_id}/commit/{info.sha}",
|
|
3815
|
+
commit_message=commit_message,
|
|
3816
|
+
commit_description=commit_description,
|
|
3817
|
+
oid=info.sha, # type: ignore[arg-type]
|
|
3818
|
+
)
|
|
3819
|
+
|
|
3687
3820
|
files_to_copy = _fetch_files_to_copy(
|
|
3688
3821
|
copies=copies,
|
|
3689
3822
|
repo_type=repo_type,
|
|
@@ -4680,7 +4813,7 @@ class HfApi:
|
|
|
4680
4813
|
ignore_patterns = [ignore_patterns]
|
|
4681
4814
|
ignore_patterns += DEFAULT_IGNORE_PATTERNS
|
|
4682
4815
|
|
|
4683
|
-
delete_operations = self.
|
|
4816
|
+
delete_operations = self._prepare_folder_deletions(
|
|
4684
4817
|
repo_id=repo_id,
|
|
4685
4818
|
repo_type=repo_type,
|
|
4686
4819
|
revision=DEFAULT_REVISION if create_pr else revision,
|
|
@@ -4841,6 +4974,82 @@ class HfApi:
|
|
|
4841
4974
|
parent_commit=parent_commit,
|
|
4842
4975
|
)
|
|
4843
4976
|
|
|
4977
|
+
@validate_hf_hub_args
|
|
4978
|
+
def delete_files(
|
|
4979
|
+
self,
|
|
4980
|
+
repo_id: str,
|
|
4981
|
+
delete_patterns: List[str],
|
|
4982
|
+
*,
|
|
4983
|
+
token: Union[bool, str, None] = None,
|
|
4984
|
+
repo_type: Optional[str] = None,
|
|
4985
|
+
revision: Optional[str] = None,
|
|
4986
|
+
commit_message: Optional[str] = None,
|
|
4987
|
+
commit_description: Optional[str] = None,
|
|
4988
|
+
create_pr: Optional[bool] = None,
|
|
4989
|
+
parent_commit: Optional[str] = None,
|
|
4990
|
+
) -> CommitInfo:
|
|
4991
|
+
"""
|
|
4992
|
+
Delete files from a repository on the Hub.
|
|
4993
|
+
|
|
4994
|
+
If a folder path is provided, the entire folder is deleted as well as
|
|
4995
|
+
all files it contained.
|
|
4996
|
+
|
|
4997
|
+
Args:
|
|
4998
|
+
repo_id (`str`):
|
|
4999
|
+
The repository from which the folder will be deleted, for example:
|
|
5000
|
+
`"username/custom_transformers"`
|
|
5001
|
+
delete_patterns (`List[str]`):
|
|
5002
|
+
List of files or folders to delete. Each string can either be
|
|
5003
|
+
a file path, a folder path or a Unix shell-style wildcard.
|
|
5004
|
+
E.g. `["file.txt", "folder/", "data/*.parquet"]`
|
|
5005
|
+
token (Union[bool, str, None], optional):
|
|
5006
|
+
A valid user access token (string). Defaults to the locally saved
|
|
5007
|
+
token, which is the recommended method for authentication (see
|
|
5008
|
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
5009
|
+
To disable authentication, pass `False`.
|
|
5010
|
+
to the stored token.
|
|
5011
|
+
repo_type (`str`, *optional*):
|
|
5012
|
+
Type of the repo to delete files from. Can be `"model"`,
|
|
5013
|
+
`"dataset"` or `"space"`. Defaults to `"model"`.
|
|
5014
|
+
revision (`str`, *optional*):
|
|
5015
|
+
The git revision to commit from. Defaults to the head of the `"main"` branch.
|
|
5016
|
+
commit_message (`str`, *optional*):
|
|
5017
|
+
The summary (first line) of the generated commit. Defaults to
|
|
5018
|
+
`f"Delete files using huggingface_hub"`.
|
|
5019
|
+
commit_description (`str` *optional*)
|
|
5020
|
+
The description of the generated commit.
|
|
5021
|
+
create_pr (`boolean`, *optional*):
|
|
5022
|
+
Whether or not to create a Pull Request with that commit. Defaults to `False`.
|
|
5023
|
+
If `revision` is not set, PR is opened against the `"main"` branch. If
|
|
5024
|
+
`revision` is set and is a branch, PR is opened against this branch. If
|
|
5025
|
+
`revision` is set and is not a branch name (example: a commit oid), an
|
|
5026
|
+
`RevisionNotFoundError` is returned by the server.
|
|
5027
|
+
parent_commit (`str`, *optional*):
|
|
5028
|
+
The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported.
|
|
5029
|
+
If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`.
|
|
5030
|
+
If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`.
|
|
5031
|
+
Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be
|
|
5032
|
+
especially useful if the repo is updated / committed to concurrently.
|
|
5033
|
+
"""
|
|
5034
|
+
operations = self._prepare_folder_deletions(
|
|
5035
|
+
repo_id=repo_id, repo_type=repo_type, delete_patterns=delete_patterns, path_in_repo="", revision=revision
|
|
5036
|
+
)
|
|
5037
|
+
|
|
5038
|
+
if commit_message is None:
|
|
5039
|
+
commit_message = f"Delete files {' '.join(delete_patterns)} with huggingface_hub"
|
|
5040
|
+
|
|
5041
|
+
return self.create_commit(
|
|
5042
|
+
repo_id=repo_id,
|
|
5043
|
+
repo_type=repo_type,
|
|
5044
|
+
token=token,
|
|
5045
|
+
operations=operations,
|
|
5046
|
+
revision=revision,
|
|
5047
|
+
commit_message=commit_message,
|
|
5048
|
+
commit_description=commit_description,
|
|
5049
|
+
create_pr=create_pr,
|
|
5050
|
+
parent_commit=parent_commit,
|
|
5051
|
+
)
|
|
5052
|
+
|
|
4844
5053
|
@validate_hf_hub_args
|
|
4845
5054
|
def delete_folder(
|
|
4846
5055
|
self,
|
|
@@ -5003,7 +5212,7 @@ class HfApi:
|
|
|
5003
5212
|
```
|
|
5004
5213
|
|
|
5005
5214
|
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
|
5006
|
-
option, the `cache_dir` will not be used and a `.huggingface/` folder will be created at the root of `local_dir`
|
|
5215
|
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
|
5007
5216
|
to store some metadata related to the downloaded files. While this mechanism is not as robust as the main
|
|
5008
5217
|
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
|
5009
5218
|
|
|
@@ -5046,21 +5255,21 @@ class HfApi:
|
|
|
5046
5255
|
`str`: Local path of file or if networking is off, last version of file cached on disk.
|
|
5047
5256
|
|
|
5048
5257
|
Raises:
|
|
5049
|
-
|
|
5050
|
-
|
|
5051
|
-
|
|
5052
|
-
|
|
5053
|
-
|
|
5054
|
-
|
|
5055
|
-
|
|
5056
|
-
|
|
5057
|
-
|
|
5058
|
-
|
|
5059
|
-
|
|
5060
|
-
|
|
5061
|
-
|
|
5062
|
-
|
|
5063
|
-
|
|
5258
|
+
[`~utils.RepositoryNotFoundError`]
|
|
5259
|
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
|
5260
|
+
or because it is set to `private` and you do not have access.
|
|
5261
|
+
[`~utils.RevisionNotFoundError`]
|
|
5262
|
+
If the revision to download from cannot be found.
|
|
5263
|
+
[`~utils.EntryNotFoundError`]
|
|
5264
|
+
If the file to download cannot be found.
|
|
5265
|
+
[`~utils.LocalEntryNotFoundError`]
|
|
5266
|
+
If network is disabled or unavailable and file is not found in cache.
|
|
5267
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
5268
|
+
If `token=True` but the token cannot be found.
|
|
5269
|
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
|
|
5270
|
+
If ETag cannot be determined.
|
|
5271
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
5272
|
+
If some parameter value is invalid.
|
|
5064
5273
|
"""
|
|
5065
5274
|
from .file_download import hf_hub_download
|
|
5066
5275
|
|
|
@@ -5122,7 +5331,7 @@ class HfApi:
|
|
|
5122
5331
|
`allow_patterns` and `ignore_patterns`.
|
|
5123
5332
|
|
|
5124
5333
|
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
|
5125
|
-
option, the `cache_dir` will not be used and a `.huggingface/` folder will be created at the root of `local_dir`
|
|
5334
|
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
|
5126
5335
|
to store some metadata related to the downloaded files.While this mechanism is not as robust as the main
|
|
5127
5336
|
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
|
5128
5337
|
|
|
@@ -5176,12 +5385,17 @@ class HfApi:
|
|
|
5176
5385
|
`str`: folder path of the repo snapshot.
|
|
5177
5386
|
|
|
5178
5387
|
Raises:
|
|
5179
|
-
|
|
5180
|
-
|
|
5181
|
-
|
|
5182
|
-
|
|
5183
|
-
|
|
5184
|
-
|
|
5388
|
+
[`~utils.RepositoryNotFoundError`]
|
|
5389
|
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
|
5390
|
+
or because it is set to `private` and you do not have access.
|
|
5391
|
+
[`~utils.RevisionNotFoundError`]
|
|
5392
|
+
If the revision to download from cannot be found.
|
|
5393
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
5394
|
+
If `token=True` and the token cannot be found.
|
|
5395
|
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
|
|
5396
|
+
ETag cannot be determined.
|
|
5397
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
5398
|
+
if some parameter value is invalid.
|
|
5185
5399
|
"""
|
|
5186
5400
|
from ._snapshot_download import snapshot_download
|
|
5187
5401
|
|
|
@@ -5252,9 +5466,11 @@ class HfApi:
|
|
|
5252
5466
|
[`SafetensorsRepoMetadata`]: information related to safetensors repo.
|
|
5253
5467
|
|
|
5254
5468
|
Raises:
|
|
5255
|
-
|
|
5469
|
+
[`NotASafetensorsRepoError`]
|
|
5470
|
+
If the repo is not a safetensors repo i.e. doesn't have either a
|
|
5256
5471
|
`model.safetensors` or a `model.safetensors.index.json` file.
|
|
5257
|
-
|
|
5472
|
+
[`SafetensorsParsingError`]
|
|
5473
|
+
If a safetensors file header couldn't be parsed correctly.
|
|
5258
5474
|
|
|
5259
5475
|
Example:
|
|
5260
5476
|
```py
|
|
@@ -5371,9 +5587,11 @@ class HfApi:
|
|
|
5371
5587
|
[`SafetensorsFileMetadata`]: information related to a safetensors file.
|
|
5372
5588
|
|
|
5373
5589
|
Raises:
|
|
5374
|
-
|
|
5590
|
+
[`NotASafetensorsRepoError`]:
|
|
5591
|
+
If the repo is not a safetensors repo i.e. doesn't have either a
|
|
5375
5592
|
`model.safetensors` or a `model.safetensors.index.json` file.
|
|
5376
|
-
|
|
5593
|
+
[`SafetensorsParsingError`]:
|
|
5594
|
+
If a safetensors file header couldn't be parsed correctly.
|
|
5377
5595
|
"""
|
|
5378
5596
|
url = hf_hub_url(
|
|
5379
5597
|
repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, endpoint=self.endpoint
|
|
@@ -6914,11 +7132,11 @@ class HfApi:
|
|
|
6914
7132
|
attributes like `endpoint`, `repo_type` and `repo_id`.
|
|
6915
7133
|
|
|
6916
7134
|
Raises:
|
|
6917
|
-
|
|
6918
|
-
if the HuggingFace API returned an error
|
|
6919
|
-
- [`~utils.RepositoryNotFoundError`]
|
|
7135
|
+
[`~utils.RepositoryNotFoundError`]:
|
|
6920
7136
|
If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist,
|
|
6921
7137
|
or because it is set to `private` and you do not have access.
|
|
7138
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7139
|
+
If the HuggingFace API returned an error
|
|
6922
7140
|
|
|
6923
7141
|
Example:
|
|
6924
7142
|
```python
|
|
@@ -6988,7 +7206,7 @@ class HfApi:
|
|
|
6988
7206
|
|
|
6989
7207
|
Args:
|
|
6990
7208
|
repo_id (`str`):
|
|
6991
|
-
ID of the Space to update. Example: `"
|
|
7209
|
+
ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`.
|
|
6992
7210
|
storage (`str` or [`SpaceStorage`]):
|
|
6993
7211
|
Storage tier. Either 'small', 'medium', or 'large'.
|
|
6994
7212
|
token (Union[bool, str, None], optional):
|
|
@@ -7026,7 +7244,7 @@ class HfApi:
|
|
|
7026
7244
|
|
|
7027
7245
|
Args:
|
|
7028
7246
|
repo_id (`str`):
|
|
7029
|
-
ID of the Space to update. Example: `"
|
|
7247
|
+
ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`.
|
|
7030
7248
|
token (Union[bool, str, None], optional):
|
|
7031
7249
|
A valid user access token (string). Defaults to the locally saved
|
|
7032
7250
|
token, which is the recommended method for authentication (see
|
|
@@ -7141,9 +7359,9 @@ class HfApi:
|
|
|
7141
7359
|
accelerator (`str`):
|
|
7142
7360
|
The hardware accelerator to be used for inference (e.g. `"cpu"`).
|
|
7143
7361
|
instance_size (`str`):
|
|
7144
|
-
The size or type of the instance to be used for hosting the model (e.g. `"
|
|
7362
|
+
The size or type of the instance to be used for hosting the model (e.g. `"x4"`).
|
|
7145
7363
|
instance_type (`str`):
|
|
7146
|
-
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"
|
|
7364
|
+
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`).
|
|
7147
7365
|
region (`str`):
|
|
7148
7366
|
The cloud region in which the Inference Endpoint will be created (e.g. `"us-east-1"`).
|
|
7149
7367
|
vendor (`str`):
|
|
@@ -7178,7 +7396,7 @@ class HfApi:
|
|
|
7178
7396
|
```python
|
|
7179
7397
|
>>> from huggingface_hub import HfApi
|
|
7180
7398
|
>>> api = HfApi()
|
|
7181
|
-
>>> create_inference_endpoint(
|
|
7399
|
+
>>> endpoint = api.create_inference_endpoint(
|
|
7182
7400
|
... "my-endpoint-name",
|
|
7183
7401
|
... repository="gpt2",
|
|
7184
7402
|
... framework="pytorch",
|
|
@@ -7187,8 +7405,8 @@ class HfApi:
|
|
|
7187
7405
|
... vendor="aws",
|
|
7188
7406
|
... region="us-east-1",
|
|
7189
7407
|
... type="protected",
|
|
7190
|
-
... instance_size="
|
|
7191
|
-
... instance_type="
|
|
7408
|
+
... instance_size="x2",
|
|
7409
|
+
... instance_type="intel-icl",
|
|
7192
7410
|
... )
|
|
7193
7411
|
>>> endpoint
|
|
7194
7412
|
InferenceEndpoint(name='my-endpoint-name', status="pending",...)
|
|
@@ -7202,7 +7420,7 @@ class HfApi:
|
|
|
7202
7420
|
# Start an Inference Endpoint running Zephyr-7b-beta on TGI
|
|
7203
7421
|
>>> from huggingface_hub import HfApi
|
|
7204
7422
|
>>> api = HfApi()
|
|
7205
|
-
>>> create_inference_endpoint(
|
|
7423
|
+
>>> endpoint = api.create_inference_endpoint(
|
|
7206
7424
|
... "aws-zephyr-7b-beta-0486",
|
|
7207
7425
|
... repository="HuggingFaceH4/zephyr-7b-beta",
|
|
7208
7426
|
... framework="pytorch",
|
|
@@ -7211,8 +7429,8 @@ class HfApi:
|
|
|
7211
7429
|
... vendor="aws",
|
|
7212
7430
|
... region="us-east-1",
|
|
7213
7431
|
... type="protected",
|
|
7214
|
-
... instance_size="
|
|
7215
|
-
... instance_type="
|
|
7432
|
+
... instance_size="x1",
|
|
7433
|
+
... instance_type="nvidia-a10g",
|
|
7216
7434
|
... custom_image={
|
|
7217
7435
|
... "health_route": "/health",
|
|
7218
7436
|
... "env": {
|
|
@@ -7327,6 +7545,7 @@ class HfApi:
|
|
|
7327
7545
|
framework: Optional[str] = None,
|
|
7328
7546
|
revision: Optional[str] = None,
|
|
7329
7547
|
task: Optional[str] = None,
|
|
7548
|
+
custom_image: Optional[Dict] = None,
|
|
7330
7549
|
# Other
|
|
7331
7550
|
namespace: Optional[str] = None,
|
|
7332
7551
|
token: Union[bool, str, None] = None,
|
|
@@ -7345,9 +7564,9 @@ class HfApi:
|
|
|
7345
7564
|
accelerator (`str`, *optional*):
|
|
7346
7565
|
The hardware accelerator to be used for inference (e.g. `"cpu"`).
|
|
7347
7566
|
instance_size (`str`, *optional*):
|
|
7348
|
-
The size or type of the instance to be used for hosting the model (e.g. `"
|
|
7567
|
+
The size or type of the instance to be used for hosting the model (e.g. `"x4"`).
|
|
7349
7568
|
instance_type (`str`, *optional*):
|
|
7350
|
-
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"
|
|
7569
|
+
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`).
|
|
7351
7570
|
min_replica (`int`, *optional*):
|
|
7352
7571
|
The minimum number of replicas (instances) to keep running for the Inference Endpoint.
|
|
7353
7572
|
max_replica (`int`, *optional*):
|
|
@@ -7361,6 +7580,9 @@ class HfApi:
|
|
|
7361
7580
|
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
|
7362
7581
|
task (`str`, *optional*):
|
|
7363
7582
|
The task on which to deploy the model (e.g. `"text-classification"`).
|
|
7583
|
+
custom_image (`Dict`, *optional*):
|
|
7584
|
+
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
7585
|
+
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
7364
7586
|
|
|
7365
7587
|
namespace (`str`, *optional*):
|
|
7366
7588
|
The namespace where the Inference Endpoint will be updated. Defaults to the current user's namespace.
|
|
@@ -7386,13 +7608,14 @@ class HfApi:
|
|
|
7386
7608
|
"minReplica": min_replica,
|
|
7387
7609
|
},
|
|
7388
7610
|
}
|
|
7389
|
-
if any(value is not None for value in (repository, framework, revision, task)):
|
|
7611
|
+
if any(value is not None for value in (repository, framework, revision, task, custom_image)):
|
|
7612
|
+
image = {"custom": custom_image} if custom_image is not None else {"huggingface": {}}
|
|
7390
7613
|
payload["model"] = {
|
|
7391
7614
|
"framework": framework,
|
|
7392
7615
|
"repository": repository,
|
|
7393
7616
|
"revision": revision,
|
|
7394
7617
|
"task": task,
|
|
7395
|
-
"image":
|
|
7618
|
+
"image": image,
|
|
7396
7619
|
}
|
|
7397
7620
|
|
|
7398
7621
|
response = get_session().put(
|
|
@@ -7468,7 +7691,12 @@ class HfApi:
|
|
|
7468
7691
|
return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token)
|
|
7469
7692
|
|
|
7470
7693
|
def resume_inference_endpoint(
|
|
7471
|
-
self,
|
|
7694
|
+
self,
|
|
7695
|
+
name: str,
|
|
7696
|
+
*,
|
|
7697
|
+
namespace: Optional[str] = None,
|
|
7698
|
+
running_ok: bool = True,
|
|
7699
|
+
token: Union[bool, str, None] = None,
|
|
7472
7700
|
) -> InferenceEndpoint:
|
|
7473
7701
|
"""Resume an Inference Endpoint.
|
|
7474
7702
|
|
|
@@ -7479,6 +7707,9 @@ class HfApi:
|
|
|
7479
7707
|
The name of the Inference Endpoint to resume.
|
|
7480
7708
|
namespace (`str`, *optional*):
|
|
7481
7709
|
The namespace in which the Inference Endpoint is located. Defaults to the current user.
|
|
7710
|
+
running_ok (`bool`, *optional*):
|
|
7711
|
+
If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to
|
|
7712
|
+
`True`.
|
|
7482
7713
|
token (Union[bool, str, None], optional):
|
|
7483
7714
|
A valid user access token (string). Defaults to the locally saved
|
|
7484
7715
|
token, which is the recommended method for authentication (see
|
|
@@ -7494,7 +7725,14 @@ class HfApi:
|
|
|
7494
7725
|
f"{INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume",
|
|
7495
7726
|
headers=self._build_hf_headers(token=token),
|
|
7496
7727
|
)
|
|
7497
|
-
|
|
7728
|
+
try:
|
|
7729
|
+
hf_raise_for_status(response)
|
|
7730
|
+
except HfHubHTTPError as error:
|
|
7731
|
+
# If already running (and it's ok), then fetch current status and return
|
|
7732
|
+
if running_ok and error.response.status_code == 400 and "already running" in error.response.text:
|
|
7733
|
+
return self.get_inference_endpoint(name, namespace=namespace, token=token)
|
|
7734
|
+
# Otherwise, raise the error
|
|
7735
|
+
raise
|
|
7498
7736
|
|
|
7499
7737
|
return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token)
|
|
7500
7738
|
|
|
@@ -7855,12 +8093,12 @@ class HfApi:
|
|
|
7855
8093
|
Returns: [`Collection`]
|
|
7856
8094
|
|
|
7857
8095
|
Raises:
|
|
7858
|
-
`HTTPError
|
|
8096
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7859
8097
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
7860
8098
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
7861
|
-
`HTTPError
|
|
8099
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7862
8100
|
HTTP 404 if the item you try to add to the collection does not exist on the Hub.
|
|
7863
|
-
`HTTPError
|
|
8101
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
7864
8102
|
HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False)
|
|
7865
8103
|
|
|
7866
8104
|
Example:
|
|
@@ -8044,9 +8282,9 @@ class HfApi:
|
|
|
8044
8282
|
be populated with user's answers.
|
|
8045
8283
|
|
|
8046
8284
|
Raises:
|
|
8047
|
-
`HTTPError
|
|
8285
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8048
8286
|
HTTP 400 if the repo is not gated.
|
|
8049
|
-
`HTTPError
|
|
8287
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8050
8288
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8051
8289
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8052
8290
|
|
|
@@ -8110,9 +8348,9 @@ class HfApi:
|
|
|
8110
8348
|
be populated with user's answers.
|
|
8111
8349
|
|
|
8112
8350
|
Raises:
|
|
8113
|
-
`HTTPError
|
|
8351
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8114
8352
|
HTTP 400 if the repo is not gated.
|
|
8115
|
-
`HTTPError
|
|
8353
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8116
8354
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8117
8355
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8118
8356
|
|
|
@@ -8172,9 +8410,9 @@ class HfApi:
|
|
|
8172
8410
|
be populated with user's answers.
|
|
8173
8411
|
|
|
8174
8412
|
Raises:
|
|
8175
|
-
`HTTPError
|
|
8413
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8176
8414
|
HTTP 400 if the repo is not gated.
|
|
8177
|
-
`HTTPError
|
|
8415
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8178
8416
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8179
8417
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8180
8418
|
|
|
@@ -8222,7 +8460,7 @@ class HfApi:
|
|
|
8222
8460
|
AccessRequest(
|
|
8223
8461
|
username=request["user"]["user"],
|
|
8224
8462
|
fullname=request["user"]["fullname"],
|
|
8225
|
-
email=request["user"]
|
|
8463
|
+
email=request["user"].get("email"),
|
|
8226
8464
|
status=request["status"],
|
|
8227
8465
|
timestamp=parse_datetime(request["timestamp"]),
|
|
8228
8466
|
fields=request.get("fields"), # only if custom fields in form
|
|
@@ -8256,16 +8494,16 @@ class HfApi:
|
|
|
8256
8494
|
To disable authentication, pass `False`.
|
|
8257
8495
|
|
|
8258
8496
|
Raises:
|
|
8259
|
-
`HTTPError
|
|
8497
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8260
8498
|
HTTP 400 if the repo is not gated.
|
|
8261
|
-
`HTTPError
|
|
8499
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8262
8500
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8263
8501
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8264
|
-
`HTTPError
|
|
8502
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8265
8503
|
HTTP 404 if the user does not exist on the Hub.
|
|
8266
|
-
`HTTPError
|
|
8504
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8267
8505
|
HTTP 404 if the user access request cannot be found.
|
|
8268
|
-
`HTTPError
|
|
8506
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8269
8507
|
HTTP 404 if the user access request is already in the pending list.
|
|
8270
8508
|
"""
|
|
8271
8509
|
self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token)
|
|
@@ -8298,16 +8536,16 @@ class HfApi:
|
|
|
8298
8536
|
To disable authentication, pass `False`.
|
|
8299
8537
|
|
|
8300
8538
|
Raises:
|
|
8301
|
-
`HTTPError
|
|
8539
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8302
8540
|
HTTP 400 if the repo is not gated.
|
|
8303
|
-
`HTTPError
|
|
8541
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8304
8542
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8305
8543
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8306
|
-
`HTTPError
|
|
8544
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8307
8545
|
HTTP 404 if the user does not exist on the Hub.
|
|
8308
|
-
`HTTPError
|
|
8546
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8309
8547
|
HTTP 404 if the user access request cannot be found.
|
|
8310
|
-
`HTTPError
|
|
8548
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8311
8549
|
HTTP 404 if the user access request is already in the accepted list.
|
|
8312
8550
|
"""
|
|
8313
8551
|
self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token)
|
|
@@ -8340,16 +8578,16 @@ class HfApi:
|
|
|
8340
8578
|
To disable authentication, pass `False`.
|
|
8341
8579
|
|
|
8342
8580
|
Raises:
|
|
8343
|
-
`HTTPError
|
|
8581
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8344
8582
|
HTTP 400 if the repo is not gated.
|
|
8345
|
-
`HTTPError
|
|
8583
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8346
8584
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8347
8585
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8348
|
-
`HTTPError
|
|
8586
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8349
8587
|
HTTP 404 if the user does not exist on the Hub.
|
|
8350
|
-
`HTTPError
|
|
8588
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8351
8589
|
HTTP 404 if the user access request cannot be found.
|
|
8352
|
-
`HTTPError
|
|
8590
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8353
8591
|
HTTP 404 if the user access request is already in the rejected list.
|
|
8354
8592
|
"""
|
|
8355
8593
|
self._handle_access_request(repo_id, user, "rejected", repo_type=repo_type, token=token)
|
|
@@ -8403,14 +8641,14 @@ class HfApi:
|
|
|
8403
8641
|
To disable authentication, pass `False`.
|
|
8404
8642
|
|
|
8405
8643
|
Raises:
|
|
8406
|
-
`HTTPError
|
|
8644
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8407
8645
|
HTTP 400 if the repo is not gated.
|
|
8408
|
-
`HTTPError
|
|
8646
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8409
8647
|
HTTP 400 if the user already has access to the repo.
|
|
8410
|
-
`HTTPError
|
|
8648
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8411
8649
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8412
8650
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8413
|
-
`HTTPError
|
|
8651
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8414
8652
|
HTTP 404 if the user does not exist on the Hub.
|
|
8415
8653
|
"""
|
|
8416
8654
|
if repo_type not in REPO_TYPES:
|
|
@@ -8426,6 +8664,392 @@ class HfApi:
|
|
|
8426
8664
|
hf_raise_for_status(response)
|
|
8427
8665
|
return response.json()
|
|
8428
8666
|
|
|
8667
|
+
###################
|
|
8668
|
+
# Manage webhooks #
|
|
8669
|
+
###################
|
|
8670
|
+
|
|
8671
|
+
@validate_hf_hub_args
|
|
8672
|
+
def get_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8673
|
+
"""Get a webhook by its id.
|
|
8674
|
+
|
|
8675
|
+
Args:
|
|
8676
|
+
webhook_id (`str`):
|
|
8677
|
+
The unique identifier of the webhook to get.
|
|
8678
|
+
token (Union[bool, str, None], optional):
|
|
8679
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8680
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8681
|
+
To disable authentication, pass `False`.
|
|
8682
|
+
|
|
8683
|
+
Returns:
|
|
8684
|
+
[`WebhookInfo`]:
|
|
8685
|
+
Info about the webhook.
|
|
8686
|
+
|
|
8687
|
+
Example:
|
|
8688
|
+
```python
|
|
8689
|
+
>>> from huggingface_hub import get_webhook
|
|
8690
|
+
>>> webhook = get_webhook("654bbbc16f2ec14d77f109cc")
|
|
8691
|
+
>>> print(webhook)
|
|
8692
|
+
WebhookInfo(
|
|
8693
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8694
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8695
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8696
|
+
secret="my-secret",
|
|
8697
|
+
domains=["repo", "discussion"],
|
|
8698
|
+
disabled=False,
|
|
8699
|
+
)
|
|
8700
|
+
```
|
|
8701
|
+
"""
|
|
8702
|
+
response = get_session().get(
|
|
8703
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8704
|
+
headers=self._build_hf_headers(token=token),
|
|
8705
|
+
)
|
|
8706
|
+
hf_raise_for_status(response)
|
|
8707
|
+
webhook_data = response.json()["webhook"]
|
|
8708
|
+
|
|
8709
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8710
|
+
|
|
8711
|
+
webhook = WebhookInfo(
|
|
8712
|
+
id=webhook_data["id"],
|
|
8713
|
+
url=webhook_data["url"],
|
|
8714
|
+
watched=watched_items,
|
|
8715
|
+
domains=webhook_data["domains"],
|
|
8716
|
+
secret=webhook_data.get("secret"),
|
|
8717
|
+
disabled=webhook_data["disabled"],
|
|
8718
|
+
)
|
|
8719
|
+
|
|
8720
|
+
return webhook
|
|
8721
|
+
|
|
8722
|
+
@validate_hf_hub_args
|
|
8723
|
+
def list_webhooks(self, *, token: Union[bool, str, None] = None) -> List[WebhookInfo]:
|
|
8724
|
+
"""List all configured webhooks.
|
|
8725
|
+
|
|
8726
|
+
Args:
|
|
8727
|
+
token (Union[bool, str, None], optional):
|
|
8728
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8729
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8730
|
+
To disable authentication, pass `False`.
|
|
8731
|
+
|
|
8732
|
+
Returns:
|
|
8733
|
+
`List[WebhookInfo]`:
|
|
8734
|
+
List of webhook info objects.
|
|
8735
|
+
|
|
8736
|
+
Example:
|
|
8737
|
+
```python
|
|
8738
|
+
>>> from huggingface_hub import list_webhooks
|
|
8739
|
+
>>> webhooks = list_webhooks()
|
|
8740
|
+
>>> len(webhooks)
|
|
8741
|
+
2
|
|
8742
|
+
>>> webhooks[0]
|
|
8743
|
+
WebhookInfo(
|
|
8744
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8745
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8746
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8747
|
+
secret="my-secret",
|
|
8748
|
+
domains=["repo", "discussion"],
|
|
8749
|
+
disabled=False,
|
|
8750
|
+
)
|
|
8751
|
+
```
|
|
8752
|
+
"""
|
|
8753
|
+
response = get_session().get(
|
|
8754
|
+
f"{ENDPOINT}/api/settings/webhooks",
|
|
8755
|
+
headers=self._build_hf_headers(token=token),
|
|
8756
|
+
)
|
|
8757
|
+
hf_raise_for_status(response)
|
|
8758
|
+
webhooks_data = response.json()
|
|
8759
|
+
|
|
8760
|
+
return [
|
|
8761
|
+
WebhookInfo(
|
|
8762
|
+
id=webhook["id"],
|
|
8763
|
+
url=webhook["url"],
|
|
8764
|
+
watched=[WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook["watched"]],
|
|
8765
|
+
domains=webhook["domains"],
|
|
8766
|
+
secret=webhook.get("secret"),
|
|
8767
|
+
disabled=webhook["disabled"],
|
|
8768
|
+
)
|
|
8769
|
+
for webhook in webhooks_data
|
|
8770
|
+
]
|
|
8771
|
+
|
|
8772
|
+
@validate_hf_hub_args
|
|
8773
|
+
def create_webhook(
|
|
8774
|
+
self,
|
|
8775
|
+
*,
|
|
8776
|
+
url: str,
|
|
8777
|
+
watched: List[Union[Dict, WebhookWatchedItem]],
|
|
8778
|
+
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
8779
|
+
secret: Optional[str] = None,
|
|
8780
|
+
token: Union[bool, str, None] = None,
|
|
8781
|
+
) -> WebhookInfo:
|
|
8782
|
+
"""Create a new webhook.
|
|
8783
|
+
|
|
8784
|
+
Args:
|
|
8785
|
+
url (`str`):
|
|
8786
|
+
URL to send the payload to.
|
|
8787
|
+
watched (`List[WebhookWatchedItem]`):
|
|
8788
|
+
List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces.
|
|
8789
|
+
Watched items can also be provided as plain dictionaries.
|
|
8790
|
+
domains (`List[Literal["repo", "discussion"]]`, optional):
|
|
8791
|
+
List of domains to watch. It can be "repo", "discussion" or both.
|
|
8792
|
+
secret (`str`, optional):
|
|
8793
|
+
A secret to sign the payload with.
|
|
8794
|
+
token (Union[bool, str, None], optional):
|
|
8795
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8796
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8797
|
+
To disable authentication, pass `False`.
|
|
8798
|
+
|
|
8799
|
+
Returns:
|
|
8800
|
+
[`WebhookInfo`]:
|
|
8801
|
+
Info about the newly created webhook.
|
|
8802
|
+
|
|
8803
|
+
Example:
|
|
8804
|
+
```python
|
|
8805
|
+
>>> from huggingface_hub import create_webhook
|
|
8806
|
+
>>> payload = create_webhook(
|
|
8807
|
+
... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}],
|
|
8808
|
+
... url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8809
|
+
... domains=["repo", "discussion"],
|
|
8810
|
+
... secret="my-secret",
|
|
8811
|
+
... )
|
|
8812
|
+
>>> print(payload)
|
|
8813
|
+
WebhookInfo(
|
|
8814
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8815
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8816
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8817
|
+
domains=["repo", "discussion"],
|
|
8818
|
+
secret="my-secret",
|
|
8819
|
+
disabled=False,
|
|
8820
|
+
)
|
|
8821
|
+
```
|
|
8822
|
+
"""
|
|
8823
|
+
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8824
|
+
|
|
8825
|
+
response = get_session().post(
|
|
8826
|
+
f"{ENDPOINT}/api/settings/webhooks",
|
|
8827
|
+
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8828
|
+
headers=self._build_hf_headers(token=token),
|
|
8829
|
+
)
|
|
8830
|
+
hf_raise_for_status(response)
|
|
8831
|
+
webhook_data = response.json()["webhook"]
|
|
8832
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8833
|
+
|
|
8834
|
+
webhook = WebhookInfo(
|
|
8835
|
+
id=webhook_data["id"],
|
|
8836
|
+
url=webhook_data["url"],
|
|
8837
|
+
watched=watched_items,
|
|
8838
|
+
domains=webhook_data["domains"],
|
|
8839
|
+
secret=webhook_data.get("secret"),
|
|
8840
|
+
disabled=webhook_data["disabled"],
|
|
8841
|
+
)
|
|
8842
|
+
|
|
8843
|
+
return webhook
|
|
8844
|
+
|
|
8845
|
+
@validate_hf_hub_args
|
|
8846
|
+
def update_webhook(
|
|
8847
|
+
self,
|
|
8848
|
+
webhook_id: str,
|
|
8849
|
+
*,
|
|
8850
|
+
url: Optional[str] = None,
|
|
8851
|
+
watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None,
|
|
8852
|
+
domains: Optional[List[WEBHOOK_DOMAIN_T]] = None,
|
|
8853
|
+
secret: Optional[str] = None,
|
|
8854
|
+
token: Union[bool, str, None] = None,
|
|
8855
|
+
) -> WebhookInfo:
|
|
8856
|
+
"""Update an existing webhook.
|
|
8857
|
+
|
|
8858
|
+
Args:
|
|
8859
|
+
webhook_id (`str`):
|
|
8860
|
+
The unique identifier of the webhook to be updated.
|
|
8861
|
+
url (`str`, optional):
|
|
8862
|
+
The URL to which the payload will be sent.
|
|
8863
|
+
watched (`List[WebhookWatchedItem]`, optional):
|
|
8864
|
+
List of items to watch. It can be users, orgs, models, datasets, or spaces.
|
|
8865
|
+
Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries.
|
|
8866
|
+
domains (`List[Literal["repo", "discussion"]]`, optional):
|
|
8867
|
+
The domains to watch. This can include "repo", "discussion", or both.
|
|
8868
|
+
secret (`str`, optional):
|
|
8869
|
+
A secret to sign the payload with, providing an additional layer of security.
|
|
8870
|
+
token (Union[bool, str, None], optional):
|
|
8871
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8872
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8873
|
+
To disable authentication, pass `False`.
|
|
8874
|
+
|
|
8875
|
+
Returns:
|
|
8876
|
+
[`WebhookInfo`]:
|
|
8877
|
+
Info about the updated webhook.
|
|
8878
|
+
|
|
8879
|
+
Example:
|
|
8880
|
+
```python
|
|
8881
|
+
>>> from huggingface_hub import update_webhook
|
|
8882
|
+
>>> updated_payload = update_webhook(
|
|
8883
|
+
... webhook_id="654bbbc16f2ec14d77f109cc",
|
|
8884
|
+
... url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8885
|
+
... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}],
|
|
8886
|
+
... domains=["repo"],
|
|
8887
|
+
... secret="my-secret",
|
|
8888
|
+
... )
|
|
8889
|
+
>>> print(updated_payload)
|
|
8890
|
+
WebhookInfo(
|
|
8891
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8892
|
+
url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8893
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8894
|
+
domains=["repo"],
|
|
8895
|
+
secret="my-secret",
|
|
8896
|
+
disabled=False,
|
|
8897
|
+
```
|
|
8898
|
+
"""
|
|
8899
|
+
if watched is None:
|
|
8900
|
+
watched = []
|
|
8901
|
+
watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched]
|
|
8902
|
+
|
|
8903
|
+
response = get_session().post(
|
|
8904
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
8905
|
+
json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret},
|
|
8906
|
+
headers=self._build_hf_headers(token=token),
|
|
8907
|
+
)
|
|
8908
|
+
hf_raise_for_status(response)
|
|
8909
|
+
webhook_data = response.json()["webhook"]
|
|
8910
|
+
|
|
8911
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8912
|
+
|
|
8913
|
+
webhook = WebhookInfo(
|
|
8914
|
+
id=webhook_data["id"],
|
|
8915
|
+
url=webhook_data["url"],
|
|
8916
|
+
watched=watched_items,
|
|
8917
|
+
domains=webhook_data["domains"],
|
|
8918
|
+
secret=webhook_data.get("secret"),
|
|
8919
|
+
disabled=webhook_data["disabled"],
|
|
8920
|
+
)
|
|
8921
|
+
|
|
8922
|
+
return webhook
|
|
8923
|
+
|
|
8924
|
+
@validate_hf_hub_args
|
|
8925
|
+
def enable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8926
|
+
"""Enable a webhook (makes it "active").
|
|
8927
|
+
|
|
8928
|
+
Args:
|
|
8929
|
+
webhook_id (`str`):
|
|
8930
|
+
The unique identifier of the webhook to enable.
|
|
8931
|
+
token (Union[bool, str, None], optional):
|
|
8932
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8933
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8934
|
+
To disable authentication, pass `False`.
|
|
8935
|
+
|
|
8936
|
+
Returns:
|
|
8937
|
+
[`WebhookInfo`]:
|
|
8938
|
+
Info about the enabled webhook.
|
|
8939
|
+
|
|
8940
|
+
Example:
|
|
8941
|
+
```python
|
|
8942
|
+
>>> from huggingface_hub import enable_webhook
|
|
8943
|
+
>>> enabled_webhook = enable_webhook("654bbbc16f2ec14d77f109cc")
|
|
8944
|
+
>>> enabled_webhook
|
|
8945
|
+
WebhookInfo(
|
|
8946
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8947
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8948
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
8949
|
+
domains=["repo", "discussion"],
|
|
8950
|
+
secret="my-secret",
|
|
8951
|
+
disabled=False,
|
|
8952
|
+
)
|
|
8953
|
+
```
|
|
8954
|
+
"""
|
|
8955
|
+
response = get_session().post(
|
|
8956
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/enable",
|
|
8957
|
+
headers=self._build_hf_headers(token=token),
|
|
8958
|
+
)
|
|
8959
|
+
hf_raise_for_status(response)
|
|
8960
|
+
webhook_data = response.json()["webhook"]
|
|
8961
|
+
|
|
8962
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
8963
|
+
|
|
8964
|
+
webhook = WebhookInfo(
|
|
8965
|
+
id=webhook_data["id"],
|
|
8966
|
+
url=webhook_data["url"],
|
|
8967
|
+
watched=watched_items,
|
|
8968
|
+
domains=webhook_data["domains"],
|
|
8969
|
+
secret=webhook_data.get("secret"),
|
|
8970
|
+
disabled=webhook_data["disabled"],
|
|
8971
|
+
)
|
|
8972
|
+
|
|
8973
|
+
return webhook
|
|
8974
|
+
|
|
8975
|
+
@validate_hf_hub_args
|
|
8976
|
+
def disable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo:
|
|
8977
|
+
"""Disable a webhook (makes it "disabled").
|
|
8978
|
+
|
|
8979
|
+
Args:
|
|
8980
|
+
webhook_id (`str`):
|
|
8981
|
+
The unique identifier of the webhook to disable.
|
|
8982
|
+
token (Union[bool, str, None], optional):
|
|
8983
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
8984
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
8985
|
+
To disable authentication, pass `False`.
|
|
8986
|
+
|
|
8987
|
+
Returns:
|
|
8988
|
+
[`WebhookInfo`]:
|
|
8989
|
+
Info about the disabled webhook.
|
|
8990
|
+
|
|
8991
|
+
Example:
|
|
8992
|
+
```python
|
|
8993
|
+
>>> from huggingface_hub import disable_webhook
|
|
8994
|
+
>>> disabled_webhook = disable_webhook("654bbbc16f2ec14d77f109cc")
|
|
8995
|
+
>>> disabled_webhook
|
|
8996
|
+
WebhookInfo(
|
|
8997
|
+
id="654bbbc16f2ec14d77f109cc",
|
|
8998
|
+
url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548",
|
|
8999
|
+
watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")],
|
|
9000
|
+
domains=["repo", "discussion"],
|
|
9001
|
+
secret="my-secret",
|
|
9002
|
+
disabled=True,
|
|
9003
|
+
)
|
|
9004
|
+
```
|
|
9005
|
+
"""
|
|
9006
|
+
response = get_session().post(
|
|
9007
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}/disable",
|
|
9008
|
+
headers=self._build_hf_headers(token=token),
|
|
9009
|
+
)
|
|
9010
|
+
hf_raise_for_status(response)
|
|
9011
|
+
webhook_data = response.json()["webhook"]
|
|
9012
|
+
|
|
9013
|
+
watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]]
|
|
9014
|
+
|
|
9015
|
+
webhook = WebhookInfo(
|
|
9016
|
+
id=webhook_data["id"],
|
|
9017
|
+
url=webhook_data["url"],
|
|
9018
|
+
watched=watched_items,
|
|
9019
|
+
domains=webhook_data["domains"],
|
|
9020
|
+
secret=webhook_data.get("secret"),
|
|
9021
|
+
disabled=webhook_data["disabled"],
|
|
9022
|
+
)
|
|
9023
|
+
|
|
9024
|
+
return webhook
|
|
9025
|
+
|
|
9026
|
+
@validate_hf_hub_args
|
|
9027
|
+
def delete_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> None:
|
|
9028
|
+
"""Delete a webhook.
|
|
9029
|
+
|
|
9030
|
+
Args:
|
|
9031
|
+
webhook_id (`str`):
|
|
9032
|
+
The unique identifier of the webhook to delete.
|
|
9033
|
+
token (Union[bool, str, None], optional):
|
|
9034
|
+
A valid user access token (string). Defaults to the locally saved token, which is the recommended
|
|
9035
|
+
method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
9036
|
+
To disable authentication, pass `False`.
|
|
9037
|
+
|
|
9038
|
+
Returns:
|
|
9039
|
+
`None`
|
|
9040
|
+
|
|
9041
|
+
Example:
|
|
9042
|
+
```python
|
|
9043
|
+
>>> from huggingface_hub import delete_webhook
|
|
9044
|
+
>>> delete_webhook("654bbbc16f2ec14d77f109cc")
|
|
9045
|
+
```
|
|
9046
|
+
"""
|
|
9047
|
+
response = get_session().delete(
|
|
9048
|
+
f"{ENDPOINT}/api/settings/webhooks/{webhook_id}",
|
|
9049
|
+
headers=self._build_hf_headers(token=token),
|
|
9050
|
+
)
|
|
9051
|
+
hf_raise_for_status(response)
|
|
9052
|
+
|
|
8429
9053
|
#############
|
|
8430
9054
|
# Internals #
|
|
8431
9055
|
#############
|
|
@@ -8454,7 +9078,7 @@ class HfApi:
|
|
|
8454
9078
|
headers=self.headers,
|
|
8455
9079
|
)
|
|
8456
9080
|
|
|
8457
|
-
def
|
|
9081
|
+
def _prepare_folder_deletions(
|
|
8458
9082
|
self,
|
|
8459
9083
|
repo_id: str,
|
|
8460
9084
|
repo_type: Optional[str],
|
|
@@ -8506,7 +9130,7 @@ class HfApi:
|
|
|
8506
9130
|
`User`: A [`User`] object with the user's overview.
|
|
8507
9131
|
|
|
8508
9132
|
Raises:
|
|
8509
|
-
`HTTPError
|
|
9133
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8510
9134
|
HTTP 404 If the user does not exist on the Hub.
|
|
8511
9135
|
"""
|
|
8512
9136
|
r = get_session().get(f"{ENDPOINT}/api/users/{username}/overview")
|
|
@@ -8526,7 +9150,7 @@ class HfApi:
|
|
|
8526
9150
|
`Iterable[User]`: A list of [`User`] objects with the members of the organization.
|
|
8527
9151
|
|
|
8528
9152
|
Raises:
|
|
8529
|
-
`HTTPError
|
|
9153
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8530
9154
|
HTTP 404 If the organization does not exist on the Hub.
|
|
8531
9155
|
|
|
8532
9156
|
"""
|
|
@@ -8550,7 +9174,7 @@ class HfApi:
|
|
|
8550
9174
|
`Iterable[User]`: A list of [`User`] objects with the followers of the user.
|
|
8551
9175
|
|
|
8552
9176
|
Raises:
|
|
8553
|
-
`HTTPError
|
|
9177
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8554
9178
|
HTTP 404 If the user does not exist on the Hub.
|
|
8555
9179
|
|
|
8556
9180
|
"""
|
|
@@ -8574,7 +9198,7 @@ class HfApi:
|
|
|
8574
9198
|
`Iterable[User]`: A list of [`User`] objects with the users followed by the user.
|
|
8575
9199
|
|
|
8576
9200
|
Raises:
|
|
8577
|
-
`HTTPError
|
|
9201
|
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
|
|
8578
9202
|
HTTP 404 If the user does not exist on the Hub.
|
|
8579
9203
|
|
|
8580
9204
|
"""
|
|
@@ -8677,6 +9301,7 @@ upload_file = api.upload_file
|
|
|
8677
9301
|
upload_folder = api.upload_folder
|
|
8678
9302
|
delete_file = api.delete_file
|
|
8679
9303
|
delete_folder = api.delete_folder
|
|
9304
|
+
delete_files = api.delete_files
|
|
8680
9305
|
create_commits_on_pr = api.create_commits_on_pr
|
|
8681
9306
|
preupload_lfs_files = api.preupload_lfs_files
|
|
8682
9307
|
create_branch = api.create_branch
|
|
@@ -8754,6 +9379,16 @@ accept_access_request = api.accept_access_request
|
|
|
8754
9379
|
reject_access_request = api.reject_access_request
|
|
8755
9380
|
grant_access = api.grant_access
|
|
8756
9381
|
|
|
9382
|
+
# Webhooks API
|
|
9383
|
+
create_webhook = api.create_webhook
|
|
9384
|
+
disable_webhook = api.disable_webhook
|
|
9385
|
+
delete_webhook = api.delete_webhook
|
|
9386
|
+
enable_webhook = api.enable_webhook
|
|
9387
|
+
get_webhook = api.get_webhook
|
|
9388
|
+
list_webhooks = api.list_webhooks
|
|
9389
|
+
update_webhook = api.update_webhook
|
|
9390
|
+
|
|
9391
|
+
|
|
8757
9392
|
# User API
|
|
8758
9393
|
get_user_overview = api.get_user_overview
|
|
8759
9394
|
list_organization_members = api.list_organization_members
|