huggingface-hub 0.34.4__py3-none-any.whl → 1.0.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +46 -45
- huggingface_hub/_commit_api.py +28 -28
- huggingface_hub/_commit_scheduler.py +11 -8
- huggingface_hub/_inference_endpoints.py +8 -8
- huggingface_hub/_jobs_api.py +167 -10
- huggingface_hub/_login.py +13 -39
- huggingface_hub/_oauth.py +8 -8
- huggingface_hub/_snapshot_download.py +14 -28
- huggingface_hub/_space_api.py +4 -4
- huggingface_hub/_tensorboard_logger.py +13 -14
- huggingface_hub/_upload_large_folder.py +15 -15
- huggingface_hub/_webhooks_payload.py +3 -3
- huggingface_hub/_webhooks_server.py +2 -2
- huggingface_hub/cli/_cli_utils.py +2 -2
- huggingface_hub/cli/auth.py +5 -6
- huggingface_hub/cli/cache.py +14 -20
- huggingface_hub/cli/download.py +4 -4
- huggingface_hub/cli/jobs.py +560 -11
- huggingface_hub/cli/lfs.py +4 -4
- huggingface_hub/cli/repo.py +7 -7
- huggingface_hub/cli/repo_files.py +2 -2
- huggingface_hub/cli/upload.py +4 -4
- huggingface_hub/cli/upload_large_folder.py +3 -3
- huggingface_hub/commands/_cli_utils.py +2 -2
- huggingface_hub/commands/delete_cache.py +13 -13
- huggingface_hub/commands/download.py +4 -13
- huggingface_hub/commands/lfs.py +4 -4
- huggingface_hub/commands/repo_files.py +2 -2
- huggingface_hub/commands/scan_cache.py +1 -1
- huggingface_hub/commands/tag.py +1 -3
- huggingface_hub/commands/upload.py +4 -4
- huggingface_hub/commands/upload_large_folder.py +3 -3
- huggingface_hub/commands/user.py +5 -6
- huggingface_hub/community.py +5 -5
- huggingface_hub/constants.py +3 -41
- huggingface_hub/dataclasses.py +16 -19
- huggingface_hub/errors.py +42 -29
- huggingface_hub/fastai_utils.py +8 -9
- huggingface_hub/file_download.py +153 -252
- huggingface_hub/hf_api.py +815 -600
- huggingface_hub/hf_file_system.py +98 -62
- huggingface_hub/hub_mixin.py +37 -57
- huggingface_hub/inference/_client.py +177 -325
- huggingface_hub/inference/_common.py +110 -124
- huggingface_hub/inference/_generated/_async_client.py +226 -432
- huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
- huggingface_hub/inference/_generated/types/base.py +10 -7
- huggingface_hub/inference/_generated/types/chat_completion.py +18 -16
- huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
- huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
- huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
- huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
- huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
- huggingface_hub/inference/_generated/types/summarization.py +2 -2
- huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
- huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
- huggingface_hub/inference/_generated/types/text_generation.py +10 -10
- huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
- huggingface_hub/inference/_generated/types/token_classification.py +2 -2
- huggingface_hub/inference/_generated/types/translation.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
- huggingface_hub/inference/_mcp/_cli_hacks.py +3 -3
- huggingface_hub/inference/_mcp/agent.py +3 -3
- huggingface_hub/inference/_mcp/cli.py +1 -1
- huggingface_hub/inference/_mcp/constants.py +2 -3
- huggingface_hub/inference/_mcp/mcp_client.py +58 -30
- huggingface_hub/inference/_mcp/types.py +10 -7
- huggingface_hub/inference/_mcp/utils.py +11 -7
- huggingface_hub/inference/_providers/__init__.py +2 -2
- huggingface_hub/inference/_providers/_common.py +49 -25
- huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
- huggingface_hub/inference/_providers/cohere.py +3 -3
- huggingface_hub/inference/_providers/fal_ai.py +25 -25
- huggingface_hub/inference/_providers/featherless_ai.py +4 -4
- huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
- huggingface_hub/inference/_providers/hf_inference.py +28 -20
- huggingface_hub/inference/_providers/hyperbolic.py +4 -4
- huggingface_hub/inference/_providers/nebius.py +10 -10
- huggingface_hub/inference/_providers/novita.py +5 -5
- huggingface_hub/inference/_providers/nscale.py +4 -4
- huggingface_hub/inference/_providers/replicate.py +15 -15
- huggingface_hub/inference/_providers/sambanova.py +6 -6
- huggingface_hub/inference/_providers/together.py +7 -7
- huggingface_hub/lfs.py +20 -31
- huggingface_hub/repocard.py +18 -18
- huggingface_hub/repocard_data.py +56 -56
- huggingface_hub/serialization/__init__.py +0 -1
- huggingface_hub/serialization/_base.py +9 -9
- huggingface_hub/serialization/_dduf.py +7 -7
- huggingface_hub/serialization/_torch.py +28 -28
- huggingface_hub/utils/__init__.py +10 -4
- huggingface_hub/utils/_auth.py +5 -5
- huggingface_hub/utils/_cache_manager.py +31 -31
- huggingface_hub/utils/_deprecation.py +1 -1
- huggingface_hub/utils/_dotenv.py +3 -3
- huggingface_hub/utils/_fixes.py +0 -10
- huggingface_hub/utils/_git_credential.py +4 -4
- huggingface_hub/utils/_headers.py +7 -29
- huggingface_hub/utils/_http.py +366 -208
- huggingface_hub/utils/_pagination.py +4 -4
- huggingface_hub/utils/_paths.py +5 -5
- huggingface_hub/utils/_runtime.py +15 -13
- huggingface_hub/utils/_safetensors.py +21 -21
- huggingface_hub/utils/_subprocess.py +9 -9
- huggingface_hub/utils/_telemetry.py +3 -3
- huggingface_hub/utils/_typing.py +25 -5
- huggingface_hub/utils/_validators.py +53 -72
- huggingface_hub/utils/_xet.py +16 -16
- huggingface_hub/utils/_xet_progress_reporting.py +32 -11
- huggingface_hub/utils/insecure_hashlib.py +3 -9
- huggingface_hub/utils/tqdm.py +3 -3
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/METADATA +18 -29
- huggingface_hub-1.0.0rc0.dist-info/RECORD +161 -0
- huggingface_hub/inference_api.py +0 -217
- huggingface_hub/keras_mixin.py +0 -500
- huggingface_hub/repository.py +0 -1477
- huggingface_hub/serialization/_tensorflow.py +0 -95
- huggingface_hub/utils/_hf_folder.py +0 -68
- huggingface_hub-0.34.4.dist-info/RECORD +0 -166
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py
CHANGED
|
@@ -34,22 +34,17 @@ from typing import (
|
|
|
34
34
|
Any,
|
|
35
35
|
BinaryIO,
|
|
36
36
|
Callable,
|
|
37
|
-
Dict,
|
|
38
37
|
Iterable,
|
|
39
38
|
Iterator,
|
|
40
|
-
List,
|
|
41
39
|
Literal,
|
|
42
40
|
Optional,
|
|
43
|
-
Tuple,
|
|
44
|
-
Type,
|
|
45
41
|
TypeVar,
|
|
46
42
|
Union,
|
|
47
43
|
overload,
|
|
48
44
|
)
|
|
49
45
|
from urllib.parse import quote, unquote
|
|
50
46
|
|
|
51
|
-
import
|
|
52
|
-
from requests.exceptions import HTTPError
|
|
47
|
+
import httpx
|
|
53
48
|
from tqdm.auto import tqdm as base_tqdm
|
|
54
49
|
from tqdm.contrib.concurrent import thread_map
|
|
55
50
|
|
|
@@ -67,7 +62,7 @@ from ._commit_api import (
|
|
|
67
62
|
_warn_on_overwriting_operations,
|
|
68
63
|
)
|
|
69
64
|
from ._inference_endpoints import InferenceEndpoint, InferenceEndpointType
|
|
70
|
-
from ._jobs_api import JobInfo
|
|
65
|
+
from ._jobs_api import JobInfo, ScheduledJobInfo, _create_job_spec
|
|
71
66
|
from ._space_api import SpaceHardware, SpaceRuntime, SpaceStorage, SpaceVariable
|
|
72
67
|
from ._upload_large_folder import upload_large_folder_internal
|
|
73
68
|
from .community import (
|
|
@@ -78,32 +73,11 @@ from .community import (
|
|
|
78
73
|
DiscussionWithDetails,
|
|
79
74
|
deserialize_event,
|
|
80
75
|
)
|
|
81
|
-
from .constants import (
|
|
82
|
-
DEFAULT_ETAG_TIMEOUT, # noqa: F401 # kept for backward compatibility
|
|
83
|
-
DEFAULT_REQUEST_TIMEOUT, # noqa: F401 # kept for backward compatibility
|
|
84
|
-
DEFAULT_REVISION, # noqa: F401 # kept for backward compatibility
|
|
85
|
-
DISCUSSION_STATUS, # noqa: F401 # kept for backward compatibility
|
|
86
|
-
DISCUSSION_TYPES, # noqa: F401 # kept for backward compatibility
|
|
87
|
-
ENDPOINT, # noqa: F401 # kept for backward compatibility
|
|
88
|
-
INFERENCE_ENDPOINTS_ENDPOINT, # noqa: F401 # kept for backward compatibility
|
|
89
|
-
REGEX_COMMIT_OID, # noqa: F401 # kept for backward compatibility
|
|
90
|
-
REPO_TYPE_MODEL, # noqa: F401 # kept for backward compatibility
|
|
91
|
-
REPO_TYPES, # noqa: F401 # kept for backward compatibility
|
|
92
|
-
REPO_TYPES_MAPPING, # noqa: F401 # kept for backward compatibility
|
|
93
|
-
REPO_TYPES_URL_PREFIXES, # noqa: F401 # kept for backward compatibility
|
|
94
|
-
SAFETENSORS_INDEX_FILE, # noqa: F401 # kept for backward compatibility
|
|
95
|
-
SAFETENSORS_MAX_HEADER_LENGTH, # noqa: F401 # kept for backward compatibility
|
|
96
|
-
SAFETENSORS_SINGLE_FILE, # noqa: F401 # kept for backward compatibility
|
|
97
|
-
SPACES_SDK_TYPES, # noqa: F401 # kept for backward compatibility
|
|
98
|
-
WEBHOOK_DOMAIN_T, # noqa: F401 # kept for backward compatibility
|
|
99
|
-
DiscussionStatusFilter, # noqa: F401 # kept for backward compatibility
|
|
100
|
-
DiscussionTypeFilter, # noqa: F401 # kept for backward compatibility
|
|
101
|
-
)
|
|
102
76
|
from .errors import (
|
|
103
77
|
BadRequestError,
|
|
104
|
-
EntryNotFoundError,
|
|
105
78
|
GatedRepoError,
|
|
106
79
|
HfHubHTTPError,
|
|
80
|
+
RemoteEntryNotFoundError,
|
|
107
81
|
RepositoryNotFoundError,
|
|
108
82
|
RevisionNotFoundError,
|
|
109
83
|
)
|
|
@@ -111,8 +85,6 @@ from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url
|
|
|
111
85
|
from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData
|
|
112
86
|
from .utils import (
|
|
113
87
|
DEFAULT_IGNORE_PATTERNS,
|
|
114
|
-
HfFolder, # noqa: F401 # kept for backward compatibility
|
|
115
|
-
LocalTokenNotFoundError,
|
|
116
88
|
NotASafetensorsRepoError,
|
|
117
89
|
SafetensorsFileMetadata,
|
|
118
90
|
SafetensorsParsingError,
|
|
@@ -137,7 +109,7 @@ from .utils._auth import (
|
|
|
137
109
|
_get_token_from_file,
|
|
138
110
|
_get_token_from_google_colab,
|
|
139
111
|
)
|
|
140
|
-
from .utils._deprecation import
|
|
112
|
+
from .utils._deprecation import _deprecate_arguments
|
|
141
113
|
from .utils._runtime import is_xet_available
|
|
142
114
|
from .utils._typing import CallableT
|
|
143
115
|
from .utils.endpoint_helpers import _is_emission_within_threshold
|
|
@@ -243,7 +215,7 @@ _AUTH_CHECK_NO_REPO_ERROR_MESSAGE = (
|
|
|
243
215
|
logger = logging.get_logger(__name__)
|
|
244
216
|
|
|
245
217
|
|
|
246
|
-
def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) ->
|
|
218
|
+
def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> tuple[Optional[str], Optional[str], str]:
|
|
247
219
|
"""
|
|
248
220
|
Returns the repo type and ID from a huggingface.co URL linking to a
|
|
249
221
|
repository
|
|
@@ -353,8 +325,8 @@ class BlobLfsInfo(dict):
|
|
|
353
325
|
class BlobSecurityInfo(dict):
|
|
354
326
|
safe: bool # duplicate information with "status" field, keeping it for backward compatibility
|
|
355
327
|
status: str
|
|
356
|
-
av_scan: Optional[
|
|
357
|
-
pickle_import_scan: Optional[
|
|
328
|
+
av_scan: Optional[dict]
|
|
329
|
+
pickle_import_scan: Optional[dict]
|
|
358
330
|
|
|
359
331
|
def __post_init__(self): # hack to make BlogSecurityInfo backward compatible
|
|
360
332
|
self.update(asdict(self))
|
|
@@ -374,7 +346,7 @@ class TransformersInfo(dict):
|
|
|
374
346
|
|
|
375
347
|
@dataclass
|
|
376
348
|
class SafeTensorsInfo(dict):
|
|
377
|
-
parameters:
|
|
349
|
+
parameters: dict[str, int]
|
|
378
350
|
total: int
|
|
379
351
|
|
|
380
352
|
def __post_init__(self): # hack to make SafeTensorsInfo backward compatible
|
|
@@ -417,12 +389,6 @@ class CommitInfo(str):
|
|
|
417
389
|
|
|
418
390
|
repo_url (`RepoUrl`):
|
|
419
391
|
Repo URL of the commit containing info like repo_id, repo_type, etc.
|
|
420
|
-
|
|
421
|
-
_url (`str`, *optional*):
|
|
422
|
-
Legacy url for `str` compatibility. Can be the url to the uploaded file on the Hub (if returned by
|
|
423
|
-
[`upload_file`]), to the uploaded folder on the Hub (if returned by [`upload_folder`]) or to the commit on
|
|
424
|
-
the Hub (if returned by [`create_commit`]). Defaults to `commit_url`. It is deprecated to use this
|
|
425
|
-
attribute. Please use `commit_url` instead.
|
|
426
392
|
"""
|
|
427
393
|
|
|
428
394
|
commit_url: str
|
|
@@ -438,11 +404,8 @@ class CommitInfo(str):
|
|
|
438
404
|
pr_revision: Optional[str] = field(init=False)
|
|
439
405
|
pr_num: Optional[str] = field(init=False)
|
|
440
406
|
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
def __new__(cls, *args, commit_url: str, _url: Optional[str] = None, **kwargs):
|
|
445
|
-
return str.__new__(cls, _url or commit_url)
|
|
407
|
+
def __new__(cls, *args, commit_url: str, **kwargs):
|
|
408
|
+
return str.__new__(cls, commit_url)
|
|
446
409
|
|
|
447
410
|
def __post_init__(self):
|
|
448
411
|
"""Populate pr-related fields after initialization.
|
|
@@ -477,7 +440,7 @@ class AccessRequest:
|
|
|
477
440
|
Timestamp of the request.
|
|
478
441
|
status (`Literal["pending", "accepted", "rejected"]`):
|
|
479
442
|
Status of the request. Can be one of `["pending", "accepted", "rejected"]`.
|
|
480
|
-
fields (`
|
|
443
|
+
fields (`dict[str, Any]`, *optional*):
|
|
481
444
|
Additional fields filled by the user in the gate form.
|
|
482
445
|
"""
|
|
483
446
|
|
|
@@ -488,7 +451,7 @@ class AccessRequest:
|
|
|
488
451
|
status: Literal["pending", "accepted", "rejected"]
|
|
489
452
|
|
|
490
453
|
# Additional fields filled by the user in the gate form
|
|
491
|
-
fields: Optional[
|
|
454
|
+
fields: Optional[dict[str, Any]] = None
|
|
492
455
|
|
|
493
456
|
|
|
494
457
|
@dataclass
|
|
@@ -515,9 +478,9 @@ class WebhookInfo:
|
|
|
515
478
|
ID of the webhook.
|
|
516
479
|
url (`str`):
|
|
517
480
|
URL of the webhook.
|
|
518
|
-
watched (`
|
|
481
|
+
watched (`list[WebhookWatchedItem]`):
|
|
519
482
|
List of items watched by the webhook, see [`WebhookWatchedItem`].
|
|
520
|
-
domains (`
|
|
483
|
+
domains (`list[WEBHOOK_DOMAIN_T]`):
|
|
521
484
|
List of domains the webhook is watching. Can be one of `["repo", "discussions"]`.
|
|
522
485
|
secret (`str`, *optional*):
|
|
523
486
|
Secret of the webhook.
|
|
@@ -527,8 +490,8 @@ class WebhookInfo:
|
|
|
527
490
|
|
|
528
491
|
id: str
|
|
529
492
|
url: str
|
|
530
|
-
watched:
|
|
531
|
-
domains:
|
|
493
|
+
watched: list[WebhookWatchedItem]
|
|
494
|
+
domains: list[constants.WEBHOOK_DOMAIN_T]
|
|
532
495
|
secret: Optional[str]
|
|
533
496
|
disabled: bool
|
|
534
497
|
|
|
@@ -779,17 +742,17 @@ class ModelInfo:
|
|
|
779
742
|
gated (`Literal["auto", "manual", False]`, *optional*):
|
|
780
743
|
Is the repo gated.
|
|
781
744
|
If so, whether there is manual or automatic approval.
|
|
782
|
-
gguf (`
|
|
745
|
+
gguf (`dict`, *optional*):
|
|
783
746
|
GGUF information of the model.
|
|
784
747
|
inference (`Literal["warm"]`, *optional*):
|
|
785
748
|
Status of the model on Inference Providers. Warm if the model is served by at least one provider.
|
|
786
|
-
inference_provider_mapping (`
|
|
749
|
+
inference_provider_mapping (`list[InferenceProviderMapping]`, *optional*):
|
|
787
750
|
A list of [`InferenceProviderMapping`] ordered after the user's provider order.
|
|
788
751
|
likes (`int`):
|
|
789
752
|
Number of likes of the model.
|
|
790
753
|
library_name (`str`, *optional*):
|
|
791
754
|
Library associated with the model.
|
|
792
|
-
tags (`
|
|
755
|
+
tags (`list[str]`):
|
|
793
756
|
List of tags of the model. Compared to `card_data.tags`, contains extra tags computed by the Hub
|
|
794
757
|
(e.g. supported libraries, model's arXiv).
|
|
795
758
|
pipeline_tag (`str`, *optional*):
|
|
@@ -798,9 +761,9 @@ class ModelInfo:
|
|
|
798
761
|
Mask token used by the model.
|
|
799
762
|
widget_data (`Any`, *optional*):
|
|
800
763
|
Widget data associated with the model.
|
|
801
|
-
model_index (`
|
|
764
|
+
model_index (`dict`, *optional*):
|
|
802
765
|
Model index for evaluation.
|
|
803
|
-
config (`
|
|
766
|
+
config (`dict`, *optional*):
|
|
804
767
|
Model configuration.
|
|
805
768
|
transformers_info (`TransformersInfo`, *optional*):
|
|
806
769
|
Transformers-specific info (auto class, processor, etc.) associated with the model.
|
|
@@ -808,13 +771,13 @@ class ModelInfo:
|
|
|
808
771
|
Trending score of the model.
|
|
809
772
|
card_data (`ModelCardData`, *optional*):
|
|
810
773
|
Model Card Metadata as a [`huggingface_hub.repocard_data.ModelCardData`] object.
|
|
811
|
-
siblings (`
|
|
774
|
+
siblings (`list[RepoSibling]`):
|
|
812
775
|
List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the model.
|
|
813
|
-
spaces (`
|
|
776
|
+
spaces (`list[str]`, *optional*):
|
|
814
777
|
List of spaces using the model.
|
|
815
778
|
safetensors (`SafeTensorsInfo`, *optional*):
|
|
816
779
|
Model's safetensors information.
|
|
817
|
-
security_repo_status (`
|
|
780
|
+
security_repo_status (`dict`, *optional*):
|
|
818
781
|
Model's security scan status.
|
|
819
782
|
"""
|
|
820
783
|
|
|
@@ -828,24 +791,24 @@ class ModelInfo:
|
|
|
828
791
|
downloads: Optional[int]
|
|
829
792
|
downloads_all_time: Optional[int]
|
|
830
793
|
gated: Optional[Literal["auto", "manual", False]]
|
|
831
|
-
gguf: Optional[
|
|
794
|
+
gguf: Optional[dict]
|
|
832
795
|
inference: Optional[Literal["warm"]]
|
|
833
|
-
inference_provider_mapping: Optional[
|
|
796
|
+
inference_provider_mapping: Optional[list[InferenceProviderMapping]]
|
|
834
797
|
likes: Optional[int]
|
|
835
798
|
library_name: Optional[str]
|
|
836
|
-
tags: Optional[
|
|
799
|
+
tags: Optional[list[str]]
|
|
837
800
|
pipeline_tag: Optional[str]
|
|
838
801
|
mask_token: Optional[str]
|
|
839
802
|
card_data: Optional[ModelCardData]
|
|
840
803
|
widget_data: Optional[Any]
|
|
841
|
-
model_index: Optional[
|
|
842
|
-
config: Optional[
|
|
804
|
+
model_index: Optional[dict]
|
|
805
|
+
config: Optional[dict]
|
|
843
806
|
transformers_info: Optional[TransformersInfo]
|
|
844
807
|
trending_score: Optional[int]
|
|
845
|
-
siblings: Optional[
|
|
846
|
-
spaces: Optional[
|
|
808
|
+
siblings: Optional[list[RepoSibling]]
|
|
809
|
+
spaces: Optional[list[str]]
|
|
847
810
|
safetensors: Optional[SafeTensorsInfo]
|
|
848
|
-
security_repo_status: Optional[
|
|
811
|
+
security_repo_status: Optional[dict]
|
|
849
812
|
xet_enabled: Optional[bool]
|
|
850
813
|
|
|
851
814
|
def __init__(self, **kwargs):
|
|
@@ -980,11 +943,11 @@ class DatasetInfo:
|
|
|
980
943
|
Cumulated number of downloads of the model since its creation.
|
|
981
944
|
likes (`int`):
|
|
982
945
|
Number of likes of the dataset.
|
|
983
|
-
tags (`
|
|
946
|
+
tags (`list[str]`):
|
|
984
947
|
List of tags of the dataset.
|
|
985
948
|
card_data (`DatasetCardData`, *optional*):
|
|
986
949
|
Model Card Metadata as a [`huggingface_hub.repocard_data.DatasetCardData`] object.
|
|
987
|
-
siblings (`
|
|
950
|
+
siblings (`list[RepoSibling]`):
|
|
988
951
|
List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the dataset.
|
|
989
952
|
paperswithcode_id (`str`, *optional*):
|
|
990
953
|
Papers with code ID of the dataset.
|
|
@@ -1004,10 +967,10 @@ class DatasetInfo:
|
|
|
1004
967
|
downloads_all_time: Optional[int]
|
|
1005
968
|
likes: Optional[int]
|
|
1006
969
|
paperswithcode_id: Optional[str]
|
|
1007
|
-
tags: Optional[
|
|
970
|
+
tags: Optional[list[str]]
|
|
1008
971
|
trending_score: Optional[int]
|
|
1009
972
|
card_data: Optional[DatasetCardData]
|
|
1010
|
-
siblings: Optional[
|
|
973
|
+
siblings: Optional[list[RepoSibling]]
|
|
1011
974
|
xet_enabled: Optional[bool]
|
|
1012
975
|
|
|
1013
976
|
def __init__(self, **kwargs):
|
|
@@ -1099,9 +1062,9 @@ class SpaceInfo:
|
|
|
1099
1062
|
Subdomain of the Space.
|
|
1100
1063
|
likes (`int`):
|
|
1101
1064
|
Number of likes of the Space.
|
|
1102
|
-
tags (`
|
|
1065
|
+
tags (`list[str]`):
|
|
1103
1066
|
List of tags of the Space.
|
|
1104
|
-
siblings (`
|
|
1067
|
+
siblings (`list[RepoSibling]`):
|
|
1105
1068
|
List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the Space.
|
|
1106
1069
|
card_data (`SpaceCardData`, *optional*):
|
|
1107
1070
|
Space Card Metadata as a [`huggingface_hub.repocard_data.SpaceCardData`] object.
|
|
@@ -1109,9 +1072,9 @@ class SpaceInfo:
|
|
|
1109
1072
|
Space runtime information as a [`huggingface_hub.hf_api.SpaceRuntime`] object.
|
|
1110
1073
|
sdk (`str`, *optional*):
|
|
1111
1074
|
SDK used by the Space.
|
|
1112
|
-
models (`
|
|
1075
|
+
models (`list[str]`, *optional*):
|
|
1113
1076
|
List of models used by the Space.
|
|
1114
|
-
datasets (`
|
|
1077
|
+
datasets (`list[str]`, *optional*):
|
|
1115
1078
|
List of datasets used by the Space.
|
|
1116
1079
|
trending_score (`int`, *optional*):
|
|
1117
1080
|
Trending score of the Space.
|
|
@@ -1129,13 +1092,13 @@ class SpaceInfo:
|
|
|
1129
1092
|
subdomain: Optional[str]
|
|
1130
1093
|
likes: Optional[int]
|
|
1131
1094
|
sdk: Optional[str]
|
|
1132
|
-
tags: Optional[
|
|
1133
|
-
siblings: Optional[
|
|
1095
|
+
tags: Optional[list[str]]
|
|
1096
|
+
siblings: Optional[list[RepoSibling]]
|
|
1134
1097
|
trending_score: Optional[int]
|
|
1135
1098
|
card_data: Optional[SpaceCardData]
|
|
1136
1099
|
runtime: Optional[SpaceRuntime]
|
|
1137
|
-
models: Optional[
|
|
1138
|
-
datasets: Optional[
|
|
1100
|
+
models: Optional[list[str]]
|
|
1101
|
+
datasets: Optional[list[str]]
|
|
1139
1102
|
xet_enabled: Optional[bool]
|
|
1140
1103
|
|
|
1141
1104
|
def __init__(self, **kwargs):
|
|
@@ -1223,7 +1186,7 @@ class CollectionItem:
|
|
|
1223
1186
|
id: str,
|
|
1224
1187
|
type: CollectionItemType_T,
|
|
1225
1188
|
position: int,
|
|
1226
|
-
note: Optional[
|
|
1189
|
+
note: Optional[dict] = None,
|
|
1227
1190
|
**kwargs,
|
|
1228
1191
|
) -> None:
|
|
1229
1192
|
self.item_object_id: str = _id # id in database
|
|
@@ -1249,7 +1212,7 @@ class Collection:
|
|
|
1249
1212
|
Title of the collection. E.g. `"Recent models"`.
|
|
1250
1213
|
owner (`str`):
|
|
1251
1214
|
Owner of the collection. E.g. `"TheBloke"`.
|
|
1252
|
-
items (`
|
|
1215
|
+
items (`list[CollectionItem]`):
|
|
1253
1216
|
List of items in the collection.
|
|
1254
1217
|
last_updated (`datetime`):
|
|
1255
1218
|
Date of the last update of the collection.
|
|
@@ -1270,7 +1233,7 @@ class Collection:
|
|
|
1270
1233
|
slug: str
|
|
1271
1234
|
title: str
|
|
1272
1235
|
owner: str
|
|
1273
|
-
items:
|
|
1236
|
+
items: list[CollectionItem]
|
|
1274
1237
|
last_updated: datetime
|
|
1275
1238
|
position: int
|
|
1276
1239
|
private: bool
|
|
@@ -1327,22 +1290,22 @@ class GitRefs:
|
|
|
1327
1290
|
Object is returned by [`list_repo_refs`].
|
|
1328
1291
|
|
|
1329
1292
|
Attributes:
|
|
1330
|
-
branches (`
|
|
1293
|
+
branches (`list[GitRefInfo]`):
|
|
1331
1294
|
A list of [`GitRefInfo`] containing information about branches on the repo.
|
|
1332
|
-
converts (`
|
|
1295
|
+
converts (`list[GitRefInfo]`):
|
|
1333
1296
|
A list of [`GitRefInfo`] containing information about "convert" refs on the repo.
|
|
1334
1297
|
Converts are refs used (internally) to push preprocessed data in Dataset repos.
|
|
1335
|
-
tags (`
|
|
1298
|
+
tags (`list[GitRefInfo]`):
|
|
1336
1299
|
A list of [`GitRefInfo`] containing information about tags on the repo.
|
|
1337
|
-
pull_requests (`
|
|
1300
|
+
pull_requests (`list[GitRefInfo]`, *optional*):
|
|
1338
1301
|
A list of [`GitRefInfo`] containing information about pull requests on the repo.
|
|
1339
1302
|
Only returned if `include_prs=True` is set.
|
|
1340
1303
|
"""
|
|
1341
1304
|
|
|
1342
|
-
branches:
|
|
1343
|
-
converts:
|
|
1344
|
-
tags:
|
|
1345
|
-
pull_requests: Optional[
|
|
1305
|
+
branches: list[GitRefInfo]
|
|
1306
|
+
converts: list[GitRefInfo]
|
|
1307
|
+
tags: list[GitRefInfo]
|
|
1308
|
+
pull_requests: Optional[list[GitRefInfo]] = None
|
|
1346
1309
|
|
|
1347
1310
|
|
|
1348
1311
|
@dataclass
|
|
@@ -1353,7 +1316,7 @@ class GitCommitInfo:
|
|
|
1353
1316
|
Attributes:
|
|
1354
1317
|
commit_id (`str`):
|
|
1355
1318
|
OID of the commit (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`)
|
|
1356
|
-
authors (`
|
|
1319
|
+
authors (`list[str]`):
|
|
1357
1320
|
List of authors of the commit.
|
|
1358
1321
|
created_at (`datetime`):
|
|
1359
1322
|
Datetime when the commit was created.
|
|
@@ -1369,7 +1332,7 @@ class GitCommitInfo:
|
|
|
1369
1332
|
|
|
1370
1333
|
commit_id: str
|
|
1371
1334
|
|
|
1372
|
-
authors:
|
|
1335
|
+
authors: list[str]
|
|
1373
1336
|
created_at: datetime
|
|
1374
1337
|
title: str
|
|
1375
1338
|
message: str
|
|
@@ -1388,11 +1351,11 @@ class UserLikes:
|
|
|
1388
1351
|
Name of the user for which we fetched the likes.
|
|
1389
1352
|
total (`int`):
|
|
1390
1353
|
Total number of likes.
|
|
1391
|
-
datasets (`
|
|
1354
|
+
datasets (`list[str]`):
|
|
1392
1355
|
List of datasets liked by the user (as repo_ids).
|
|
1393
|
-
models (`
|
|
1356
|
+
models (`list[str]`):
|
|
1394
1357
|
List of models liked by the user (as repo_ids).
|
|
1395
|
-
spaces (`
|
|
1358
|
+
spaces (`list[str]`):
|
|
1396
1359
|
List of spaces liked by the user (as repo_ids).
|
|
1397
1360
|
"""
|
|
1398
1361
|
|
|
@@ -1401,9 +1364,9 @@ class UserLikes:
|
|
|
1401
1364
|
total: int
|
|
1402
1365
|
|
|
1403
1366
|
# User likes
|
|
1404
|
-
datasets:
|
|
1405
|
-
models:
|
|
1406
|
-
spaces:
|
|
1367
|
+
datasets: list[str]
|
|
1368
|
+
models: list[str]
|
|
1369
|
+
spaces: list[str]
|
|
1407
1370
|
|
|
1408
1371
|
|
|
1409
1372
|
@dataclass
|
|
@@ -1489,7 +1452,7 @@ class User:
|
|
|
1489
1452
|
num_likes: Optional[int] = None
|
|
1490
1453
|
num_following: Optional[int] = None
|
|
1491
1454
|
num_followers: Optional[int] = None
|
|
1492
|
-
orgs:
|
|
1455
|
+
orgs: list[Organization] = field(default_factory=list)
|
|
1493
1456
|
|
|
1494
1457
|
def __init__(self, **kwargs) -> None:
|
|
1495
1458
|
self.username = kwargs.pop("user", "")
|
|
@@ -1522,7 +1485,7 @@ class PaperInfo:
|
|
|
1522
1485
|
Attributes:
|
|
1523
1486
|
id (`str`):
|
|
1524
1487
|
arXiv paper ID.
|
|
1525
|
-
authors (`
|
|
1488
|
+
authors (`list[str]`, **optional**):
|
|
1526
1489
|
Names of paper authors
|
|
1527
1490
|
published_at (`datetime`, **optional**):
|
|
1528
1491
|
Date paper published.
|
|
@@ -1545,7 +1508,7 @@ class PaperInfo:
|
|
|
1545
1508
|
"""
|
|
1546
1509
|
|
|
1547
1510
|
id: str
|
|
1548
|
-
authors: Optional[
|
|
1511
|
+
authors: Optional[list[str]]
|
|
1549
1512
|
published_at: Optional[datetime]
|
|
1550
1513
|
title: Optional[str]
|
|
1551
1514
|
summary: Optional[str]
|
|
@@ -1709,8 +1672,8 @@ class HfApi:
|
|
|
1709
1672
|
token: Union[str, bool, None] = None,
|
|
1710
1673
|
library_name: Optional[str] = None,
|
|
1711
1674
|
library_version: Optional[str] = None,
|
|
1712
|
-
user_agent: Union[
|
|
1713
|
-
headers: Optional[
|
|
1675
|
+
user_agent: Union[dict, str, None] = None,
|
|
1676
|
+
headers: Optional[dict[str, str]] = None,
|
|
1714
1677
|
) -> None:
|
|
1715
1678
|
self.endpoint = endpoint if endpoint is not None else constants.ENDPOINT
|
|
1716
1679
|
self.token = token
|
|
@@ -1761,7 +1724,7 @@ class HfApi:
|
|
|
1761
1724
|
return self._thread_pool.submit(fn, *args, **kwargs)
|
|
1762
1725
|
|
|
1763
1726
|
@validate_hf_hub_args
|
|
1764
|
-
def whoami(self, token: Union[bool, str, None] = None) ->
|
|
1727
|
+
def whoami(self, token: Union[bool, str, None] = None) -> dict:
|
|
1765
1728
|
"""
|
|
1766
1729
|
Call HF API to know "whoami".
|
|
1767
1730
|
|
|
@@ -1780,7 +1743,7 @@ class HfApi:
|
|
|
1780
1743
|
)
|
|
1781
1744
|
try:
|
|
1782
1745
|
hf_raise_for_status(r)
|
|
1783
|
-
except
|
|
1746
|
+
except HfHubHTTPError as e:
|
|
1784
1747
|
if e.response.status_code == 401:
|
|
1785
1748
|
error_message = "Invalid user token."
|
|
1786
1749
|
# Check which token is the effective one and generate the error message accordingly
|
|
@@ -1793,51 +1756,11 @@ class HfApi:
|
|
|
1793
1756
|
)
|
|
1794
1757
|
elif effective_token == _get_token_from_file():
|
|
1795
1758
|
error_message += " The token stored is invalid. Please run `hf auth login` to update it."
|
|
1796
|
-
raise
|
|
1759
|
+
raise HfHubHTTPError(error_message, response=e.response) from e
|
|
1797
1760
|
raise
|
|
1798
1761
|
return r.json()
|
|
1799
1762
|
|
|
1800
|
-
|
|
1801
|
-
version="1.0",
|
|
1802
|
-
message=(
|
|
1803
|
-
"Permissions are more complex than when `get_token_permission` was first introduced. "
|
|
1804
|
-
"OAuth and fine-grain tokens allows for more detailed permissions. "
|
|
1805
|
-
"If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key."
|
|
1806
|
-
),
|
|
1807
|
-
)
|
|
1808
|
-
def get_token_permission(
|
|
1809
|
-
self, token: Union[bool, str, None] = None
|
|
1810
|
-
) -> Literal["read", "write", "fineGrained", None]:
|
|
1811
|
-
"""
|
|
1812
|
-
Check if a given `token` is valid and return its permissions.
|
|
1813
|
-
|
|
1814
|
-
<Tip warning={true}>
|
|
1815
|
-
|
|
1816
|
-
This method is deprecated and will be removed in version 1.0. Permissions are more complex than when
|
|
1817
|
-
`get_token_permission` was first introduced. OAuth and fine-grain tokens allows for more detailed permissions.
|
|
1818
|
-
If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key.
|
|
1819
|
-
|
|
1820
|
-
</Tip>
|
|
1821
|
-
|
|
1822
|
-
For more details about tokens, please refer to https://huggingface.co/docs/hub/security-tokens#what-are-user-access-tokens.
|
|
1823
|
-
|
|
1824
|
-
Args:
|
|
1825
|
-
token (Union[bool, str, None], optional):
|
|
1826
|
-
A valid user access token (string). Defaults to the locally saved
|
|
1827
|
-
token, which is the recommended method for authentication (see
|
|
1828
|
-
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
1829
|
-
To disable authentication, pass `False`.
|
|
1830
|
-
|
|
1831
|
-
Returns:
|
|
1832
|
-
`Literal["read", "write", "fineGrained", None]`: Permission granted by the token ("read" or "write"). Returns `None` if no
|
|
1833
|
-
token passed, if token is invalid or if role is not returned by the server. This typically happens when the token is an OAuth token.
|
|
1834
|
-
"""
|
|
1835
|
-
try:
|
|
1836
|
-
return self.whoami(token=token)["auth"]["accessToken"]["role"]
|
|
1837
|
-
except (LocalTokenNotFoundError, HTTPError, KeyError):
|
|
1838
|
-
return None
|
|
1839
|
-
|
|
1840
|
-
def get_model_tags(self) -> Dict:
|
|
1763
|
+
def get_model_tags(self) -> dict:
|
|
1841
1764
|
"""
|
|
1842
1765
|
List all valid model tags as a nested namespace object
|
|
1843
1766
|
"""
|
|
@@ -1846,7 +1769,7 @@ class HfApi:
|
|
|
1846
1769
|
hf_raise_for_status(r)
|
|
1847
1770
|
return r.json()
|
|
1848
1771
|
|
|
1849
|
-
def get_dataset_tags(self) ->
|
|
1772
|
+
def get_dataset_tags(self) -> dict:
|
|
1850
1773
|
"""
|
|
1851
1774
|
List all valid dataset tags as a nested namespace object.
|
|
1852
1775
|
"""
|
|
@@ -1862,24 +1785,21 @@ class HfApi:
|
|
|
1862
1785
|
# Search-query parameter
|
|
1863
1786
|
filter: Union[str, Iterable[str], None] = None,
|
|
1864
1787
|
author: Optional[str] = None,
|
|
1788
|
+
apps: Optional[Union[str, list[str]]] = None,
|
|
1865
1789
|
gated: Optional[bool] = None,
|
|
1866
1790
|
inference: Optional[Literal["warm"]] = None,
|
|
1867
|
-
inference_provider: Optional[Union[Literal["all"], "PROVIDER_T",
|
|
1868
|
-
library: Optional[Union[str, List[str]]] = None,
|
|
1869
|
-
language: Optional[Union[str, List[str]]] = None,
|
|
1791
|
+
inference_provider: Optional[Union[Literal["all"], "PROVIDER_T", list["PROVIDER_T"]]] = None,
|
|
1870
1792
|
model_name: Optional[str] = None,
|
|
1871
|
-
|
|
1872
|
-
trained_dataset: Optional[Union[str, List[str]]] = None,
|
|
1873
|
-
tags: Optional[Union[str, List[str]]] = None,
|
|
1793
|
+
trained_dataset: Optional[Union[str, list[str]]] = None,
|
|
1874
1794
|
search: Optional[str] = None,
|
|
1875
1795
|
pipeline_tag: Optional[str] = None,
|
|
1876
|
-
emissions_thresholds: Optional[
|
|
1796
|
+
emissions_thresholds: Optional[tuple[float, float]] = None,
|
|
1877
1797
|
# Sorting and pagination parameters
|
|
1878
1798
|
sort: Union[Literal["last_modified"], str, None] = None,
|
|
1879
1799
|
direction: Optional[Literal[-1]] = None,
|
|
1880
1800
|
limit: Optional[int] = None,
|
|
1881
1801
|
# Additional data to fetch
|
|
1882
|
-
expand: Optional[
|
|
1802
|
+
expand: Optional[list[ExpandModelProperty_T]] = None,
|
|
1883
1803
|
full: Optional[bool] = None,
|
|
1884
1804
|
cardData: bool = False,
|
|
1885
1805
|
fetch_config: bool = False,
|
|
@@ -1891,9 +1811,13 @@ class HfApi:
|
|
|
1891
1811
|
Args:
|
|
1892
1812
|
filter (`str` or `Iterable[str]`, *optional*):
|
|
1893
1813
|
A string or list of string to filter models on the Hub.
|
|
1814
|
+
Models can be filtered by library, language, task, tags, and more.
|
|
1894
1815
|
author (`str`, *optional*):
|
|
1895
1816
|
A string which identify the author (user or organization) of the
|
|
1896
1817
|
returned models.
|
|
1818
|
+
apps (`str` or `List`, *optional*):
|
|
1819
|
+
A string or list of strings to filter models on the Hub that
|
|
1820
|
+
support the specified apps. Example values include `"ollama"` or `["ollama", "vllm"]`.
|
|
1897
1821
|
gated (`bool`, *optional*):
|
|
1898
1822
|
A boolean to filter models on the Hub that are gated or not. By default, all models are returned.
|
|
1899
1823
|
If `gated=True` is passed, only gated models are returned.
|
|
@@ -1903,24 +1827,12 @@ class HfApi:
|
|
|
1903
1827
|
inference_provider (`Literal["all"]` or `str`, *optional*):
|
|
1904
1828
|
A string to filter models on the Hub that are served by a specific provider.
|
|
1905
1829
|
Pass `"all"` to get all models served by at least one provider.
|
|
1906
|
-
library (`str` or `List`, *optional*):
|
|
1907
|
-
A string or list of strings of foundational libraries models were
|
|
1908
|
-
originally trained from, such as pytorch, tensorflow, or allennlp.
|
|
1909
|
-
language (`str` or `List`, *optional*):
|
|
1910
|
-
A string or list of strings of languages, both by name and country
|
|
1911
|
-
code, such as "en" or "English"
|
|
1912
1830
|
model_name (`str`, *optional*):
|
|
1913
1831
|
A string that contain complete or partial names for models on the
|
|
1914
1832
|
Hub, such as "bert" or "bert-base-cased"
|
|
1915
|
-
task (`str` or `List`, *optional*):
|
|
1916
|
-
A string or list of strings of tasks models were designed for, such
|
|
1917
|
-
as: "fill-mask" or "automatic-speech-recognition"
|
|
1918
1833
|
trained_dataset (`str` or `List`, *optional*):
|
|
1919
1834
|
A string tag or a list of string tags of the trained dataset for a
|
|
1920
1835
|
model on the Hub.
|
|
1921
|
-
tags (`str` or `List`, *optional*):
|
|
1922
|
-
A string tag or a list of tags to filter models on the Hub by, such
|
|
1923
|
-
as `text-generation` or `spacy`.
|
|
1924
1836
|
search (`str`, *optional*):
|
|
1925
1837
|
A string that will be contained in the returned model ids.
|
|
1926
1838
|
pipeline_tag (`str`, *optional*):
|
|
@@ -1937,7 +1849,7 @@ class HfApi:
|
|
|
1937
1849
|
limit (`int`, *optional*):
|
|
1938
1850
|
The limit on the number of models fetched. Leaving this option
|
|
1939
1851
|
to `None` fetches all models.
|
|
1940
|
-
expand (`
|
|
1852
|
+
expand (`list[ExpandModelProperty_T]`, *optional*):
|
|
1941
1853
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
1942
1854
|
This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
|
|
1943
1855
|
Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -1991,38 +1903,30 @@ class HfApi:
|
|
|
1991
1903
|
if expand and (full or cardData or fetch_config):
|
|
1992
1904
|
raise ValueError("`expand` cannot be used if `full`, `cardData` or `fetch_config` are passed.")
|
|
1993
1905
|
|
|
1994
|
-
if emissions_thresholds is not None and cardData
|
|
1906
|
+
if emissions_thresholds is not None and not cardData:
|
|
1995
1907
|
raise ValueError("`emissions_thresholds` were passed without setting `cardData=True`.")
|
|
1996
1908
|
|
|
1997
1909
|
path = f"{self.endpoint}/api/models"
|
|
1998
1910
|
headers = self._build_hf_headers(token=token)
|
|
1999
|
-
params:
|
|
1911
|
+
params: dict[str, Any] = {}
|
|
2000
1912
|
|
|
2001
1913
|
# Build the filter list
|
|
2002
|
-
filter_list:
|
|
1914
|
+
filter_list: list[str] = []
|
|
2003
1915
|
if filter:
|
|
2004
1916
|
filter_list.extend([filter] if isinstance(filter, str) else filter)
|
|
2005
|
-
if library:
|
|
2006
|
-
filter_list.extend([library] if isinstance(library, str) else library)
|
|
2007
|
-
if task:
|
|
2008
|
-
filter_list.extend([task] if isinstance(task, str) else task)
|
|
2009
1917
|
if trained_dataset:
|
|
2010
|
-
if isinstance(trained_dataset, str)
|
|
2011
|
-
|
|
2012
|
-
for dataset in trained_dataset:
|
|
2013
|
-
if not dataset.startswith("dataset:"):
|
|
2014
|
-
dataset = f"dataset:{dataset}"
|
|
2015
|
-
filter_list.append(dataset)
|
|
2016
|
-
if language:
|
|
2017
|
-
filter_list.extend([language] if isinstance(language, str) else language)
|
|
2018
|
-
if tags:
|
|
2019
|
-
filter_list.extend([tags] if isinstance(tags, str) else tags)
|
|
1918
|
+
datasets = [trained_dataset] if isinstance(trained_dataset, str) else trained_dataset
|
|
1919
|
+
filter_list.extend(f"dataset:{d}" if not d.startswith("dataset:") else d for d in datasets)
|
|
2020
1920
|
if len(filter_list) > 0:
|
|
2021
1921
|
params["filter"] = filter_list
|
|
2022
1922
|
|
|
2023
1923
|
# Handle other query params
|
|
2024
1924
|
if author:
|
|
2025
1925
|
params["author"] = author
|
|
1926
|
+
if apps:
|
|
1927
|
+
if isinstance(apps, str):
|
|
1928
|
+
apps = [apps]
|
|
1929
|
+
params["apps"] = apps
|
|
2026
1930
|
if gated is not None:
|
|
2027
1931
|
params["gated"] = gated
|
|
2028
1932
|
if inference is not None:
|
|
@@ -2074,6 +1978,7 @@ class HfApi:
|
|
|
2074
1978
|
if emissions_thresholds is None or _is_emission_within_threshold(model_info, *emissions_thresholds):
|
|
2075
1979
|
yield model_info
|
|
2076
1980
|
|
|
1981
|
+
@_deprecate_arguments(version="1.0", deprecated_args=["tags"], custom_message="Use `filter` instead.")
|
|
2077
1982
|
@validate_hf_hub_args
|
|
2078
1983
|
def list_datasets(
|
|
2079
1984
|
self,
|
|
@@ -2081,25 +1986,26 @@ class HfApi:
|
|
|
2081
1986
|
# Search-query parameter
|
|
2082
1987
|
filter: Union[str, Iterable[str], None] = None,
|
|
2083
1988
|
author: Optional[str] = None,
|
|
2084
|
-
benchmark: Optional[Union[str,
|
|
1989
|
+
benchmark: Optional[Union[str, list[str]]] = None,
|
|
2085
1990
|
dataset_name: Optional[str] = None,
|
|
2086
1991
|
gated: Optional[bool] = None,
|
|
2087
|
-
language_creators: Optional[Union[str,
|
|
2088
|
-
language: Optional[Union[str,
|
|
2089
|
-
multilinguality: Optional[Union[str,
|
|
2090
|
-
size_categories: Optional[Union[str,
|
|
2091
|
-
|
|
2092
|
-
|
|
2093
|
-
task_ids: Optional[Union[str, List[str]]] = None,
|
|
1992
|
+
language_creators: Optional[Union[str, list[str]]] = None,
|
|
1993
|
+
language: Optional[Union[str, list[str]]] = None,
|
|
1994
|
+
multilinguality: Optional[Union[str, list[str]]] = None,
|
|
1995
|
+
size_categories: Optional[Union[str, list[str]]] = None,
|
|
1996
|
+
task_categories: Optional[Union[str, list[str]]] = None,
|
|
1997
|
+
task_ids: Optional[Union[str, list[str]]] = None,
|
|
2094
1998
|
search: Optional[str] = None,
|
|
2095
1999
|
# Sorting and pagination parameters
|
|
2096
2000
|
sort: Optional[Union[Literal["last_modified"], str]] = None,
|
|
2097
2001
|
direction: Optional[Literal[-1]] = None,
|
|
2098
2002
|
limit: Optional[int] = None,
|
|
2099
2003
|
# Additional data to fetch
|
|
2100
|
-
expand: Optional[
|
|
2004
|
+
expand: Optional[list[ExpandDatasetProperty_T]] = None,
|
|
2101
2005
|
full: Optional[bool] = None,
|
|
2102
2006
|
token: Union[bool, str, None] = None,
|
|
2007
|
+
# Deprecated arguments - use `filter` instead
|
|
2008
|
+
tags: Optional[Union[str, list[str]]] = None,
|
|
2103
2009
|
) -> Iterable[DatasetInfo]:
|
|
2104
2010
|
"""
|
|
2105
2011
|
List datasets hosted on the Huggingface Hub, given some filters.
|
|
@@ -2134,7 +2040,7 @@ class HfApi:
|
|
|
2134
2040
|
the Hub by the size of the dataset such as `100K<n<1M` or
|
|
2135
2041
|
`1M<n<10M`.
|
|
2136
2042
|
tags (`str` or `List`, *optional*):
|
|
2137
|
-
|
|
2043
|
+
Deprecated. Pass tags in `filter` to filter datasets by tags.
|
|
2138
2044
|
task_categories (`str` or `List`, *optional*):
|
|
2139
2045
|
A string or list of strings that can be used to identify datasets on
|
|
2140
2046
|
the Hub by the designed task, such as `audio_classification` or
|
|
@@ -2154,7 +2060,7 @@ class HfApi:
|
|
|
2154
2060
|
limit (`int`, *optional*):
|
|
2155
2061
|
The limit on the number of datasets fetched. Leaving this option
|
|
2156
2062
|
to `None` fetches all datasets.
|
|
2157
|
-
expand (`
|
|
2063
|
+
expand (`list[ExpandDatasetProperty_T]`, *optional*):
|
|
2158
2064
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2159
2065
|
This parameter cannot be used if `full` is passed.
|
|
2160
2066
|
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -2214,7 +2120,7 @@ class HfApi:
|
|
|
2214
2120
|
|
|
2215
2121
|
path = f"{self.endpoint}/api/datasets"
|
|
2216
2122
|
headers = self._build_hf_headers(token=token)
|
|
2217
|
-
params:
|
|
2123
|
+
params: dict[str, Any] = {}
|
|
2218
2124
|
|
|
2219
2125
|
# Build `filter` list
|
|
2220
2126
|
filter_list = []
|
|
@@ -2301,7 +2207,7 @@ class HfApi:
|
|
|
2301
2207
|
direction: Optional[Literal[-1]] = None,
|
|
2302
2208
|
limit: Optional[int] = None,
|
|
2303
2209
|
# Additional data to fetch
|
|
2304
|
-
expand: Optional[
|
|
2210
|
+
expand: Optional[list[ExpandSpaceProperty_T]] = None,
|
|
2305
2211
|
full: Optional[bool] = None,
|
|
2306
2212
|
token: Union[bool, str, None] = None,
|
|
2307
2213
|
) -> Iterable[SpaceInfo]:
|
|
@@ -2332,7 +2238,7 @@ class HfApi:
|
|
|
2332
2238
|
limit (`int`, *optional*):
|
|
2333
2239
|
The limit on the number of Spaces fetched. Leaving this option
|
|
2334
2240
|
to `None` fetches all Spaces.
|
|
2335
|
-
expand (`
|
|
2241
|
+
expand (`list[ExpandSpaceProperty_T]`, *optional*):
|
|
2336
2242
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2337
2243
|
This parameter cannot be used if `full` is passed.
|
|
2338
2244
|
Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -2353,7 +2259,7 @@ class HfApi:
|
|
|
2353
2259
|
|
|
2354
2260
|
path = f"{self.endpoint}/api/spaces"
|
|
2355
2261
|
headers = self._build_hf_headers(token=token)
|
|
2356
|
-
params:
|
|
2262
|
+
params: dict[str, Any] = {}
|
|
2357
2263
|
if filter is not None:
|
|
2358
2264
|
params["filter"] = filter
|
|
2359
2265
|
if author is not None:
|
|
@@ -2570,7 +2476,7 @@ class HfApi:
|
|
|
2570
2476
|
timeout: Optional[float] = None,
|
|
2571
2477
|
securityStatus: Optional[bool] = None,
|
|
2572
2478
|
files_metadata: bool = False,
|
|
2573
|
-
expand: Optional[
|
|
2479
|
+
expand: Optional[list[ExpandModelProperty_T]] = None,
|
|
2574
2480
|
token: Union[bool, str, None] = None,
|
|
2575
2481
|
) -> ModelInfo:
|
|
2576
2482
|
"""
|
|
@@ -2593,7 +2499,7 @@ class HfApi:
|
|
|
2593
2499
|
files_metadata (`bool`, *optional*):
|
|
2594
2500
|
Whether or not to retrieve metadata for files in the repository
|
|
2595
2501
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2596
|
-
expand (`
|
|
2502
|
+
expand (`list[ExpandModelProperty_T]`, *optional*):
|
|
2597
2503
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2598
2504
|
This parameter cannot be used if `securityStatus` or `files_metadata` are passed.
|
|
2599
2505
|
Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -2627,7 +2533,7 @@ class HfApi:
|
|
|
2627
2533
|
if revision is None
|
|
2628
2534
|
else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2629
2535
|
)
|
|
2630
|
-
params:
|
|
2536
|
+
params: dict = {}
|
|
2631
2537
|
if securityStatus:
|
|
2632
2538
|
params["securityStatus"] = True
|
|
2633
2539
|
if files_metadata:
|
|
@@ -2647,7 +2553,7 @@ class HfApi:
|
|
|
2647
2553
|
revision: Optional[str] = None,
|
|
2648
2554
|
timeout: Optional[float] = None,
|
|
2649
2555
|
files_metadata: bool = False,
|
|
2650
|
-
expand: Optional[
|
|
2556
|
+
expand: Optional[list[ExpandDatasetProperty_T]] = None,
|
|
2651
2557
|
token: Union[bool, str, None] = None,
|
|
2652
2558
|
) -> DatasetInfo:
|
|
2653
2559
|
"""
|
|
@@ -2667,7 +2573,7 @@ class HfApi:
|
|
|
2667
2573
|
files_metadata (`bool`, *optional*):
|
|
2668
2574
|
Whether or not to retrieve metadata for files in the repository
|
|
2669
2575
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2670
|
-
expand (`
|
|
2576
|
+
expand (`list[ExpandDatasetProperty_T]`, *optional*):
|
|
2671
2577
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2672
2578
|
This parameter cannot be used if `files_metadata` is passed.
|
|
2673
2579
|
Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`,`"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -2701,7 +2607,7 @@ class HfApi:
|
|
|
2701
2607
|
if revision is None
|
|
2702
2608
|
else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2703
2609
|
)
|
|
2704
|
-
params:
|
|
2610
|
+
params: dict = {}
|
|
2705
2611
|
if files_metadata:
|
|
2706
2612
|
params["blobs"] = True
|
|
2707
2613
|
if expand:
|
|
@@ -2720,7 +2626,7 @@ class HfApi:
|
|
|
2720
2626
|
revision: Optional[str] = None,
|
|
2721
2627
|
timeout: Optional[float] = None,
|
|
2722
2628
|
files_metadata: bool = False,
|
|
2723
|
-
expand: Optional[
|
|
2629
|
+
expand: Optional[list[ExpandSpaceProperty_T]] = None,
|
|
2724
2630
|
token: Union[bool, str, None] = None,
|
|
2725
2631
|
) -> SpaceInfo:
|
|
2726
2632
|
"""
|
|
@@ -2740,7 +2646,7 @@ class HfApi:
|
|
|
2740
2646
|
files_metadata (`bool`, *optional*):
|
|
2741
2647
|
Whether or not to retrieve metadata for files in the repository
|
|
2742
2648
|
(size, LFS metadata, etc). Defaults to `False`.
|
|
2743
|
-
expand (`
|
|
2649
|
+
expand (`list[ExpandSpaceProperty_T]`, *optional*):
|
|
2744
2650
|
List properties to return in the response. When used, only the properties in the list will be returned.
|
|
2745
2651
|
This parameter cannot be used if `full` is passed.
|
|
2746
2652
|
Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
|
|
@@ -2774,7 +2680,7 @@ class HfApi:
|
|
|
2774
2680
|
if revision is None
|
|
2775
2681
|
else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}")
|
|
2776
2682
|
)
|
|
2777
|
-
params:
|
|
2683
|
+
params: dict = {}
|
|
2778
2684
|
if files_metadata:
|
|
2779
2685
|
params["blobs"] = True
|
|
2780
2686
|
if expand:
|
|
@@ -3005,7 +2911,7 @@ class HfApi:
|
|
|
3005
2911
|
return True
|
|
3006
2912
|
except GatedRepoError: # raise specifically on gated repo
|
|
3007
2913
|
raise
|
|
3008
|
-
except (RepositoryNotFoundError,
|
|
2914
|
+
except (RepositoryNotFoundError, RemoteEntryNotFoundError, RevisionNotFoundError):
|
|
3009
2915
|
return False
|
|
3010
2916
|
|
|
3011
2917
|
@validate_hf_hub_args
|
|
@@ -3016,7 +2922,7 @@ class HfApi:
|
|
|
3016
2922
|
revision: Optional[str] = None,
|
|
3017
2923
|
repo_type: Optional[str] = None,
|
|
3018
2924
|
token: Union[str, bool, None] = None,
|
|
3019
|
-
) ->
|
|
2925
|
+
) -> list[str]:
|
|
3020
2926
|
"""
|
|
3021
2927
|
Get the list of files in a given repo.
|
|
3022
2928
|
|
|
@@ -3035,7 +2941,7 @@ class HfApi:
|
|
|
3035
2941
|
To disable authentication, pass `False`.
|
|
3036
2942
|
|
|
3037
2943
|
Returns:
|
|
3038
|
-
`
|
|
2944
|
+
`list[str]`: the list of files in a given repository.
|
|
3039
2945
|
"""
|
|
3040
2946
|
return [
|
|
3041
2947
|
f.rfilename
|
|
@@ -3095,7 +3001,7 @@ class HfApi:
|
|
|
3095
3001
|
does not exist.
|
|
3096
3002
|
[`~utils.RevisionNotFoundError`]:
|
|
3097
3003
|
If revision is not found (error 404) on the repo.
|
|
3098
|
-
[`~utils.
|
|
3004
|
+
[`~utils.RemoteEntryNotFoundError`]:
|
|
3099
3005
|
If the tree (folder) does not exist (error 404) on the repo.
|
|
3100
3006
|
|
|
3101
3007
|
Examples:
|
|
@@ -3237,7 +3143,7 @@ class HfApi:
|
|
|
3237
3143
|
hf_raise_for_status(response)
|
|
3238
3144
|
data = response.json()
|
|
3239
3145
|
|
|
3240
|
-
def _format_as_git_ref_info(item:
|
|
3146
|
+
def _format_as_git_ref_info(item: dict) -> GitRefInfo:
|
|
3241
3147
|
return GitRefInfo(name=item["name"], ref=item["ref"], target_commit=item["targetCommit"])
|
|
3242
3148
|
|
|
3243
3149
|
return GitRefs(
|
|
@@ -3258,7 +3164,7 @@ class HfApi:
|
|
|
3258
3164
|
token: Union[bool, str, None] = None,
|
|
3259
3165
|
revision: Optional[str] = None,
|
|
3260
3166
|
formatted: bool = False,
|
|
3261
|
-
) ->
|
|
3167
|
+
) -> list[GitCommitInfo]:
|
|
3262
3168
|
"""
|
|
3263
3169
|
Get the list of commits of a given revision for a repo on the Hub.
|
|
3264
3170
|
|
|
@@ -3305,7 +3211,7 @@ class HfApi:
|
|
|
3305
3211
|
```
|
|
3306
3212
|
|
|
3307
3213
|
Returns:
|
|
3308
|
-
|
|
3214
|
+
list[[`GitCommitInfo`]]: list of objects containing information about the commits for a repo on the Hub.
|
|
3309
3215
|
|
|
3310
3216
|
Raises:
|
|
3311
3217
|
[`~utils.RepositoryNotFoundError`]:
|
|
@@ -3339,20 +3245,20 @@ class HfApi:
|
|
|
3339
3245
|
def get_paths_info(
|
|
3340
3246
|
self,
|
|
3341
3247
|
repo_id: str,
|
|
3342
|
-
paths: Union[
|
|
3248
|
+
paths: Union[list[str], str],
|
|
3343
3249
|
*,
|
|
3344
3250
|
expand: bool = False,
|
|
3345
3251
|
revision: Optional[str] = None,
|
|
3346
3252
|
repo_type: Optional[str] = None,
|
|
3347
3253
|
token: Union[str, bool, None] = None,
|
|
3348
|
-
) ->
|
|
3254
|
+
) -> list[Union[RepoFile, RepoFolder]]:
|
|
3349
3255
|
"""
|
|
3350
3256
|
Get information about a repo's paths.
|
|
3351
3257
|
|
|
3352
3258
|
Args:
|
|
3353
3259
|
repo_id (`str`):
|
|
3354
3260
|
A namespace (user or an organization) and a repo name separated by a `/`.
|
|
3355
|
-
paths (`Union[
|
|
3261
|
+
paths (`Union[list[str], str]`, *optional*):
|
|
3356
3262
|
The paths to get information about. If a path do not exist, it is ignored without raising
|
|
3357
3263
|
an exception.
|
|
3358
3264
|
expand (`bool`, *optional*, defaults to `False`):
|
|
@@ -3372,7 +3278,7 @@ class HfApi:
|
|
|
3372
3278
|
To disable authentication, pass `False`.
|
|
3373
3279
|
|
|
3374
3280
|
Returns:
|
|
3375
|
-
`
|
|
3281
|
+
`list[Union[RepoFile, RepoFolder]]`:
|
|
3376
3282
|
The information about the paths, as a list of [`RepoFile`] and [`RepoFolder`] objects.
|
|
3377
3283
|
|
|
3378
3284
|
Raises:
|
|
@@ -3637,8 +3543,8 @@ class HfApi:
|
|
|
3637
3543
|
space_hardware: Optional[SpaceHardware] = None,
|
|
3638
3544
|
space_storage: Optional[SpaceStorage] = None,
|
|
3639
3545
|
space_sleep_time: Optional[int] = None,
|
|
3640
|
-
space_secrets: Optional[
|
|
3641
|
-
space_variables: Optional[
|
|
3546
|
+
space_secrets: Optional[list[dict[str, str]]] = None,
|
|
3547
|
+
space_variables: Optional[list[dict[str, str]]] = None,
|
|
3642
3548
|
) -> RepoUrl:
|
|
3643
3549
|
"""Create an empty repo on the HuggingFace Hub.
|
|
3644
3550
|
|
|
@@ -3675,10 +3581,10 @@ class HfApi:
|
|
|
3675
3581
|
your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure
|
|
3676
3582
|
the sleep time (value is fixed to 48 hours of inactivity).
|
|
3677
3583
|
See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details.
|
|
3678
|
-
space_secrets (`
|
|
3584
|
+
space_secrets (`list[dict[str, str]]`, *optional*):
|
|
3679
3585
|
A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
|
|
3680
3586
|
For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets.
|
|
3681
|
-
space_variables (`
|
|
3587
|
+
space_variables (`list[dict[str, str]]`, *optional*):
|
|
3682
3588
|
A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
|
|
3683
3589
|
For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables.
|
|
3684
3590
|
|
|
@@ -3693,7 +3599,7 @@ class HfApi:
|
|
|
3693
3599
|
if repo_type not in constants.REPO_TYPES:
|
|
3694
3600
|
raise ValueError("Invalid repo type")
|
|
3695
3601
|
|
|
3696
|
-
json:
|
|
3602
|
+
json: dict[str, Any] = {"name": name, "organization": organization}
|
|
3697
3603
|
if private is not None:
|
|
3698
3604
|
json["private"] = private
|
|
3699
3605
|
if repo_type is not None:
|
|
@@ -3753,7 +3659,7 @@ class HfApi:
|
|
|
3753
3659
|
|
|
3754
3660
|
try:
|
|
3755
3661
|
hf_raise_for_status(r)
|
|
3756
|
-
except
|
|
3662
|
+
except HfHubHTTPError as err:
|
|
3757
3663
|
if exist_ok and err.response.status_code == 409:
|
|
3758
3664
|
# Repo already exists and `exist_ok=True`
|
|
3759
3665
|
pass
|
|
@@ -3815,68 +3721,13 @@ class HfApi:
|
|
|
3815
3721
|
json["type"] = repo_type
|
|
3816
3722
|
|
|
3817
3723
|
headers = self._build_hf_headers(token=token)
|
|
3818
|
-
r = get_session().
|
|
3724
|
+
r = get_session().request("DELETE", path, headers=headers, json=json)
|
|
3819
3725
|
try:
|
|
3820
3726
|
hf_raise_for_status(r)
|
|
3821
3727
|
except RepositoryNotFoundError:
|
|
3822
3728
|
if not missing_ok:
|
|
3823
3729
|
raise
|
|
3824
3730
|
|
|
3825
|
-
@_deprecate_method(version="0.32", message="Please use `update_repo_settings` instead.")
|
|
3826
|
-
@validate_hf_hub_args
|
|
3827
|
-
def update_repo_visibility(
|
|
3828
|
-
self,
|
|
3829
|
-
repo_id: str,
|
|
3830
|
-
private: bool = False,
|
|
3831
|
-
*,
|
|
3832
|
-
token: Union[str, bool, None] = None,
|
|
3833
|
-
repo_type: Optional[str] = None,
|
|
3834
|
-
) -> Dict[str, bool]:
|
|
3835
|
-
"""Update the visibility setting of a repository.
|
|
3836
|
-
|
|
3837
|
-
Deprecated. Use `update_repo_settings` instead.
|
|
3838
|
-
|
|
3839
|
-
Args:
|
|
3840
|
-
repo_id (`str`, *optional*):
|
|
3841
|
-
A namespace (user or an organization) and a repo name separated by a `/`.
|
|
3842
|
-
private (`bool`, *optional*, defaults to `False`):
|
|
3843
|
-
Whether the repository should be private.
|
|
3844
|
-
token (Union[bool, str, None], optional):
|
|
3845
|
-
A valid user access token (string). Defaults to the locally saved
|
|
3846
|
-
token, which is the recommended method for authentication (see
|
|
3847
|
-
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
3848
|
-
To disable authentication, pass `False`.
|
|
3849
|
-
repo_type (`str`, *optional*):
|
|
3850
|
-
Set to `"dataset"` or `"space"` if uploading to a dataset or
|
|
3851
|
-
space, `None` or `"model"` if uploading to a model. Default is
|
|
3852
|
-
`None`.
|
|
3853
|
-
|
|
3854
|
-
Returns:
|
|
3855
|
-
The HTTP response in json.
|
|
3856
|
-
|
|
3857
|
-
<Tip>
|
|
3858
|
-
|
|
3859
|
-
Raises the following errors:
|
|
3860
|
-
|
|
3861
|
-
- [`~utils.RepositoryNotFoundError`]
|
|
3862
|
-
If the repository to download from cannot be found. This may be because it doesn't exist,
|
|
3863
|
-
or because it is set to `private` and you do not have access.
|
|
3864
|
-
|
|
3865
|
-
</Tip>
|
|
3866
|
-
"""
|
|
3867
|
-
if repo_type not in constants.REPO_TYPES:
|
|
3868
|
-
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
3869
|
-
if repo_type is None:
|
|
3870
|
-
repo_type = constants.REPO_TYPE_MODEL # default repo type
|
|
3871
|
-
|
|
3872
|
-
r = get_session().put(
|
|
3873
|
-
url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
|
|
3874
|
-
headers=self._build_hf_headers(token=token),
|
|
3875
|
-
json={"private": private},
|
|
3876
|
-
)
|
|
3877
|
-
hf_raise_for_status(r)
|
|
3878
|
-
return r.json()
|
|
3879
|
-
|
|
3880
3731
|
@validate_hf_hub_args
|
|
3881
3732
|
def update_repo_settings(
|
|
3882
3733
|
self,
|
|
@@ -3932,7 +3783,7 @@ class HfApi:
|
|
|
3932
3783
|
repo_type = constants.REPO_TYPE_MODEL # default repo type
|
|
3933
3784
|
|
|
3934
3785
|
# Prepare the JSON payload for the PUT request
|
|
3935
|
-
payload:
|
|
3786
|
+
payload: dict = {}
|
|
3936
3787
|
|
|
3937
3788
|
if gated is not None:
|
|
3938
3789
|
if gated not in ["auto", "manual", False]:
|
|
@@ -4327,12 +4178,12 @@ class HfApi:
|
|
|
4327
4178
|
params = {"create_pr": "1"} if create_pr else None
|
|
4328
4179
|
|
|
4329
4180
|
try:
|
|
4330
|
-
commit_resp = get_session().post(url=commit_url, headers=headers,
|
|
4181
|
+
commit_resp = get_session().post(url=commit_url, headers=headers, content=data, params=params)
|
|
4331
4182
|
hf_raise_for_status(commit_resp, endpoint_name="commit")
|
|
4332
4183
|
except RepositoryNotFoundError as e:
|
|
4333
4184
|
e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE)
|
|
4334
4185
|
raise
|
|
4335
|
-
except
|
|
4186
|
+
except RemoteEntryNotFoundError as e:
|
|
4336
4187
|
if nb_deletions > 0 and "A file with this name doesn't exist" in str(e):
|
|
4337
4188
|
e.append_to_message(
|
|
4338
4189
|
"\nMake sure to differentiate file and folder paths in delete"
|
|
@@ -4642,7 +4493,7 @@ class HfApi:
|
|
|
4642
4493
|
|
|
4643
4494
|
Raises the following errors:
|
|
4644
4495
|
|
|
4645
|
-
- [`
|
|
4496
|
+
- [`HfHubHTTPError`]
|
|
4646
4497
|
if the HuggingFace API returned an error
|
|
4647
4498
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
4648
4499
|
if some parameter value is invalid
|
|
@@ -4676,7 +4527,6 @@ class HfApi:
|
|
|
4676
4527
|
... repo_type="dataset",
|
|
4677
4528
|
... token="my_token",
|
|
4678
4529
|
... )
|
|
4679
|
-
"https://huggingface.co/datasets/username/my-dataset/blob/main/remote/file/path.h5"
|
|
4680
4530
|
|
|
4681
4531
|
>>> upload_file(
|
|
4682
4532
|
... path_or_fileobj=".\\\\local\\\\file\\\\path",
|
|
@@ -4684,7 +4534,6 @@ class HfApi:
|
|
|
4684
4534
|
... repo_id="username/my-model",
|
|
4685
4535
|
... token="my_token",
|
|
4686
4536
|
... )
|
|
4687
|
-
"https://huggingface.co/username/my-model/blob/main/remote/file/path.h5"
|
|
4688
4537
|
|
|
4689
4538
|
>>> upload_file(
|
|
4690
4539
|
... path_or_fileobj=".\\\\local\\\\file\\\\path",
|
|
@@ -4693,7 +4542,6 @@ class HfApi:
|
|
|
4693
4542
|
... token="my_token",
|
|
4694
4543
|
... create_pr=True,
|
|
4695
4544
|
... )
|
|
4696
|
-
"https://huggingface.co/username/my-model/blob/refs%2Fpr%2F1/remote/file/path.h5"
|
|
4697
4545
|
```
|
|
4698
4546
|
"""
|
|
4699
4547
|
if repo_type not in constants.REPO_TYPES:
|
|
@@ -4707,7 +4555,7 @@ class HfApi:
|
|
|
4707
4555
|
path_in_repo=path_in_repo,
|
|
4708
4556
|
)
|
|
4709
4557
|
|
|
4710
|
-
|
|
4558
|
+
return self.create_commit(
|
|
4711
4559
|
repo_id=repo_id,
|
|
4712
4560
|
repo_type=repo_type,
|
|
4713
4561
|
operations=[operation],
|
|
@@ -4719,23 +4567,6 @@ class HfApi:
|
|
|
4719
4567
|
parent_commit=parent_commit,
|
|
4720
4568
|
)
|
|
4721
4569
|
|
|
4722
|
-
if commit_info.pr_url is not None:
|
|
4723
|
-
revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
|
|
4724
|
-
if repo_type in constants.REPO_TYPES_URL_PREFIXES:
|
|
4725
|
-
repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
4726
|
-
revision = revision if revision is not None else constants.DEFAULT_REVISION
|
|
4727
|
-
|
|
4728
|
-
return CommitInfo(
|
|
4729
|
-
commit_url=commit_info.commit_url,
|
|
4730
|
-
commit_message=commit_info.commit_message,
|
|
4731
|
-
commit_description=commit_info.commit_description,
|
|
4732
|
-
oid=commit_info.oid,
|
|
4733
|
-
pr_url=commit_info.pr_url,
|
|
4734
|
-
# Similar to `hf_hub_url` but it's "blob" instead of "resolve"
|
|
4735
|
-
# TODO: remove this in v1.0
|
|
4736
|
-
_url=f"{self.endpoint}/{repo_id}/blob/{revision}/{path_in_repo}",
|
|
4737
|
-
)
|
|
4738
|
-
|
|
4739
4570
|
@overload
|
|
4740
4571
|
def upload_folder( # type: ignore
|
|
4741
4572
|
self,
|
|
@@ -4750,9 +4581,9 @@ class HfApi:
|
|
|
4750
4581
|
revision: Optional[str] = None,
|
|
4751
4582
|
create_pr: Optional[bool] = None,
|
|
4752
4583
|
parent_commit: Optional[str] = None,
|
|
4753
|
-
allow_patterns: Optional[Union[
|
|
4754
|
-
ignore_patterns: Optional[Union[
|
|
4755
|
-
delete_patterns: Optional[Union[
|
|
4584
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
4585
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
4586
|
+
delete_patterns: Optional[Union[list[str], str]] = None,
|
|
4756
4587
|
run_as_future: Literal[False] = ...,
|
|
4757
4588
|
) -> CommitInfo: ...
|
|
4758
4589
|
|
|
@@ -4770,9 +4601,9 @@ class HfApi:
|
|
|
4770
4601
|
revision: Optional[str] = None,
|
|
4771
4602
|
create_pr: Optional[bool] = None,
|
|
4772
4603
|
parent_commit: Optional[str] = None,
|
|
4773
|
-
allow_patterns: Optional[Union[
|
|
4774
|
-
ignore_patterns: Optional[Union[
|
|
4775
|
-
delete_patterns: Optional[Union[
|
|
4604
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
4605
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
4606
|
+
delete_patterns: Optional[Union[list[str], str]] = None,
|
|
4776
4607
|
run_as_future: Literal[True] = ...,
|
|
4777
4608
|
) -> Future[CommitInfo]: ...
|
|
4778
4609
|
|
|
@@ -4791,9 +4622,9 @@ class HfApi:
|
|
|
4791
4622
|
revision: Optional[str] = None,
|
|
4792
4623
|
create_pr: Optional[bool] = None,
|
|
4793
4624
|
parent_commit: Optional[str] = None,
|
|
4794
|
-
allow_patterns: Optional[Union[
|
|
4795
|
-
ignore_patterns: Optional[Union[
|
|
4796
|
-
delete_patterns: Optional[Union[
|
|
4625
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
4626
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
4627
|
+
delete_patterns: Optional[Union[list[str], str]] = None,
|
|
4797
4628
|
run_as_future: bool = False,
|
|
4798
4629
|
) -> Union[CommitInfo, Future[CommitInfo]]:
|
|
4799
4630
|
"""
|
|
@@ -4855,11 +4686,11 @@ class HfApi:
|
|
|
4855
4686
|
If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`.
|
|
4856
4687
|
Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be
|
|
4857
4688
|
especially useful if the repo is updated / committed to concurrently.
|
|
4858
|
-
allow_patterns (`
|
|
4689
|
+
allow_patterns (`list[str]` or `str`, *optional*):
|
|
4859
4690
|
If provided, only files matching at least one pattern are uploaded.
|
|
4860
|
-
ignore_patterns (`
|
|
4691
|
+
ignore_patterns (`list[str]` or `str`, *optional*):
|
|
4861
4692
|
If provided, files matching any of the patterns are not uploaded.
|
|
4862
|
-
delete_patterns (`
|
|
4693
|
+
delete_patterns (`list[str]` or `str`, *optional*):
|
|
4863
4694
|
If provided, remote files matching any of the patterns will be deleted from the repo while committing
|
|
4864
4695
|
new files. This is useful if you don't know which files have already been uploaded.
|
|
4865
4696
|
Note: to avoid discrepancies the `.gitattributes` file is not deleted even if it matches the pattern.
|
|
@@ -4878,7 +4709,7 @@ class HfApi:
|
|
|
4878
4709
|
|
|
4879
4710
|
Raises the following errors:
|
|
4880
4711
|
|
|
4881
|
-
- [`
|
|
4712
|
+
- [`HfHubHTTPError`]
|
|
4882
4713
|
if the HuggingFace API returned an error
|
|
4883
4714
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
4884
4715
|
if some parameter value is invalid
|
|
@@ -4911,7 +4742,6 @@ class HfApi:
|
|
|
4911
4742
|
... token="my_token",
|
|
4912
4743
|
... ignore_patterns="**/logs/*.txt",
|
|
4913
4744
|
... )
|
|
4914
|
-
# "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints"
|
|
4915
4745
|
|
|
4916
4746
|
# Upload checkpoints folder including logs while deleting existing logs from the repo
|
|
4917
4747
|
# Useful if you don't know exactly which log files have already being pushed
|
|
@@ -4923,7 +4753,6 @@ class HfApi:
|
|
|
4923
4753
|
... token="my_token",
|
|
4924
4754
|
... delete_patterns="**/logs/*.txt",
|
|
4925
4755
|
... )
|
|
4926
|
-
"https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints"
|
|
4927
4756
|
|
|
4928
4757
|
# Upload checkpoints folder while creating a PR
|
|
4929
4758
|
>>> upload_folder(
|
|
@@ -4934,8 +4763,6 @@ class HfApi:
|
|
|
4934
4763
|
... token="my_token",
|
|
4935
4764
|
... create_pr=True,
|
|
4936
4765
|
... )
|
|
4937
|
-
"https://huggingface.co/datasets/username/my-dataset/tree/refs%2Fpr%2F1/remote/experiment/checkpoints"
|
|
4938
|
-
|
|
4939
4766
|
```
|
|
4940
4767
|
"""
|
|
4941
4768
|
if repo_type not in constants.REPO_TYPES:
|
|
@@ -4979,7 +4806,7 @@ class HfApi:
|
|
|
4979
4806
|
|
|
4980
4807
|
commit_message = commit_message or "Upload folder using huggingface_hub"
|
|
4981
4808
|
|
|
4982
|
-
|
|
4809
|
+
return self.create_commit(
|
|
4983
4810
|
repo_type=repo_type,
|
|
4984
4811
|
repo_id=repo_id,
|
|
4985
4812
|
operations=commit_operations,
|
|
@@ -4991,24 +4818,6 @@ class HfApi:
|
|
|
4991
4818
|
parent_commit=parent_commit,
|
|
4992
4819
|
)
|
|
4993
4820
|
|
|
4994
|
-
# Create url to uploaded folder (for legacy return value)
|
|
4995
|
-
if create_pr and commit_info.pr_url is not None:
|
|
4996
|
-
revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
|
|
4997
|
-
if repo_type in constants.REPO_TYPES_URL_PREFIXES:
|
|
4998
|
-
repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
|
4999
|
-
revision = revision if revision is not None else constants.DEFAULT_REVISION
|
|
5000
|
-
|
|
5001
|
-
return CommitInfo(
|
|
5002
|
-
commit_url=commit_info.commit_url,
|
|
5003
|
-
commit_message=commit_info.commit_message,
|
|
5004
|
-
commit_description=commit_info.commit_description,
|
|
5005
|
-
oid=commit_info.oid,
|
|
5006
|
-
pr_url=commit_info.pr_url,
|
|
5007
|
-
# Similar to `hf_hub_url` but it's "tree" instead of "resolve"
|
|
5008
|
-
# TODO: remove this in v1.0
|
|
5009
|
-
_url=f"{self.endpoint}/{repo_id}/tree/{revision}/{path_in_repo}",
|
|
5010
|
-
)
|
|
5011
|
-
|
|
5012
4821
|
@validate_hf_hub_args
|
|
5013
4822
|
def delete_file(
|
|
5014
4823
|
self,
|
|
@@ -5066,7 +4875,7 @@ class HfApi:
|
|
|
5066
4875
|
|
|
5067
4876
|
Raises the following errors:
|
|
5068
4877
|
|
|
5069
|
-
- [`
|
|
4878
|
+
- [`HfHubHTTPError`]
|
|
5070
4879
|
if the HuggingFace API returned an error
|
|
5071
4880
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
5072
4881
|
if some parameter value is invalid
|
|
@@ -5075,7 +4884,7 @@ class HfApi:
|
|
|
5075
4884
|
or because it is set to `private` and you do not have access.
|
|
5076
4885
|
- [`~utils.RevisionNotFoundError`]
|
|
5077
4886
|
If the revision to download from cannot be found.
|
|
5078
|
-
- [`~utils.
|
|
4887
|
+
- [`~utils.RemoteEntryNotFoundError`]
|
|
5079
4888
|
If the file to download cannot be found.
|
|
5080
4889
|
|
|
5081
4890
|
</Tip>
|
|
@@ -5103,7 +4912,7 @@ class HfApi:
|
|
|
5103
4912
|
def delete_files(
|
|
5104
4913
|
self,
|
|
5105
4914
|
repo_id: str,
|
|
5106
|
-
delete_patterns:
|
|
4915
|
+
delete_patterns: list[str],
|
|
5107
4916
|
*,
|
|
5108
4917
|
token: Union[bool, str, None] = None,
|
|
5109
4918
|
repo_type: Optional[str] = None,
|
|
@@ -5123,7 +4932,7 @@ class HfApi:
|
|
|
5123
4932
|
repo_id (`str`):
|
|
5124
4933
|
The repository from which the folder will be deleted, for example:
|
|
5125
4934
|
`"username/custom_transformers"`
|
|
5126
|
-
delete_patterns (`
|
|
4935
|
+
delete_patterns (`list[str]`):
|
|
5127
4936
|
List of files or folders to delete. Each string can either be
|
|
5128
4937
|
a file path, a folder path or a Unix shell-style wildcard.
|
|
5129
4938
|
E.g. `["file.txt", "folder/", "data/*.parquet"]`
|
|
@@ -5251,8 +5060,8 @@ class HfApi:
|
|
|
5251
5060
|
repo_type: str, # Repo type is required!
|
|
5252
5061
|
revision: Optional[str] = None,
|
|
5253
5062
|
private: Optional[bool] = None,
|
|
5254
|
-
allow_patterns: Optional[Union[
|
|
5255
|
-
ignore_patterns: Optional[Union[
|
|
5063
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
5064
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
5256
5065
|
num_workers: Optional[int] = None,
|
|
5257
5066
|
print_report: bool = True,
|
|
5258
5067
|
print_report_every: int = 60,
|
|
@@ -5280,9 +5089,9 @@ class HfApi:
|
|
|
5280
5089
|
private (`bool`, `optional`):
|
|
5281
5090
|
Whether the repository should be private.
|
|
5282
5091
|
If `None` (default), the repo will be public unless the organization's default is private.
|
|
5283
|
-
allow_patterns (`
|
|
5092
|
+
allow_patterns (`list[str]` or `str`, *optional*):
|
|
5284
5093
|
If provided, only files matching at least one pattern are uploaded.
|
|
5285
|
-
ignore_patterns (`
|
|
5094
|
+
ignore_patterns (`list[str]` or `str`, *optional*):
|
|
5286
5095
|
If provided, files matching any of the patterns are not uploaded.
|
|
5287
5096
|
num_workers (`int`, *optional*):
|
|
5288
5097
|
Number of workers to start. Defaults to `os.cpu_count() - 2` (minimum 2).
|
|
@@ -5372,7 +5181,6 @@ class HfApi:
|
|
|
5372
5181
|
*,
|
|
5373
5182
|
url: str,
|
|
5374
5183
|
token: Union[bool, str, None] = None,
|
|
5375
|
-
proxies: Optional[Dict] = None,
|
|
5376
5184
|
timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT,
|
|
5377
5185
|
) -> HfFileMetadata:
|
|
5378
5186
|
"""Fetch metadata of a file versioned on the Hub for a given url.
|
|
@@ -5385,8 +5193,6 @@ class HfApi:
|
|
|
5385
5193
|
token, which is the recommended method for authentication (see
|
|
5386
5194
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
5387
5195
|
To disable authentication, pass `False`.
|
|
5388
|
-
proxies (`dict`, *optional*):
|
|
5389
|
-
Dictionary mapping protocol to the URL of the proxy passed to `requests.request`.
|
|
5390
5196
|
timeout (`float`, *optional*, defaults to 10):
|
|
5391
5197
|
How many seconds to wait for the server to send metadata before giving up.
|
|
5392
5198
|
|
|
@@ -5400,7 +5206,6 @@ class HfApi:
|
|
|
5400
5206
|
return get_hf_file_metadata(
|
|
5401
5207
|
url=url,
|
|
5402
5208
|
token=token,
|
|
5403
|
-
proxies=proxies,
|
|
5404
5209
|
timeout=timeout,
|
|
5405
5210
|
library_name=self.library_name,
|
|
5406
5211
|
library_version=self.library_version,
|
|
@@ -5420,14 +5225,9 @@ class HfApi:
|
|
|
5420
5225
|
cache_dir: Union[str, Path, None] = None,
|
|
5421
5226
|
local_dir: Union[str, Path, None] = None,
|
|
5422
5227
|
force_download: bool = False,
|
|
5423
|
-
proxies: Optional[Dict] = None,
|
|
5424
5228
|
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
|
5425
5229
|
token: Union[bool, str, None] = None,
|
|
5426
5230
|
local_files_only: bool = False,
|
|
5427
|
-
# Deprecated args
|
|
5428
|
-
resume_download: Optional[bool] = None,
|
|
5429
|
-
force_filename: Optional[str] = None,
|
|
5430
|
-
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
|
|
5431
5231
|
) -> str:
|
|
5432
5232
|
"""Download a given file if it's not already present in the local cache.
|
|
5433
5233
|
|
|
@@ -5484,12 +5284,9 @@ class HfApi:
|
|
|
5484
5284
|
force_download (`bool`, *optional*, defaults to `False`):
|
|
5485
5285
|
Whether the file should be downloaded even if it already exists in
|
|
5486
5286
|
the local cache.
|
|
5487
|
-
proxies (`dict`, *optional*):
|
|
5488
|
-
Dictionary mapping protocol to the URL of the proxy passed to
|
|
5489
|
-
`requests.request`.
|
|
5490
5287
|
etag_timeout (`float`, *optional*, defaults to `10`):
|
|
5491
5288
|
When fetching ETag, how many seconds to wait for the server to send
|
|
5492
|
-
data before giving up which is passed to `
|
|
5289
|
+
data before giving up which is passed to `httpx.request`.
|
|
5493
5290
|
token (Union[bool, str, None], optional):
|
|
5494
5291
|
A valid user access token (string). Defaults to the locally saved
|
|
5495
5292
|
token, which is the recommended method for authentication (see
|
|
@@ -5508,7 +5305,7 @@ class HfApi:
|
|
|
5508
5305
|
or because it is set to `private` and you do not have access.
|
|
5509
5306
|
[`~utils.RevisionNotFoundError`]
|
|
5510
5307
|
If the revision to download from cannot be found.
|
|
5511
|
-
[`~utils.
|
|
5308
|
+
[`~utils.RemoteEntryNotFoundError`]
|
|
5512
5309
|
If the file to download cannot be found.
|
|
5513
5310
|
[`~utils.LocalEntryNotFoundError`]
|
|
5514
5311
|
If network is disabled or unavailable and file is not found in cache.
|
|
@@ -5536,13 +5333,9 @@ class HfApi:
|
|
|
5536
5333
|
library_version=self.library_version,
|
|
5537
5334
|
cache_dir=cache_dir,
|
|
5538
5335
|
local_dir=local_dir,
|
|
5539
|
-
local_dir_use_symlinks=local_dir_use_symlinks,
|
|
5540
5336
|
user_agent=self.user_agent,
|
|
5541
5337
|
force_download=force_download,
|
|
5542
|
-
force_filename=force_filename,
|
|
5543
|
-
proxies=proxies,
|
|
5544
5338
|
etag_timeout=etag_timeout,
|
|
5545
|
-
resume_download=resume_download,
|
|
5546
5339
|
token=token,
|
|
5547
5340
|
headers=self.headers,
|
|
5548
5341
|
local_files_only=local_files_only,
|
|
@@ -5557,18 +5350,14 @@ class HfApi:
|
|
|
5557
5350
|
revision: Optional[str] = None,
|
|
5558
5351
|
cache_dir: Union[str, Path, None] = None,
|
|
5559
5352
|
local_dir: Union[str, Path, None] = None,
|
|
5560
|
-
proxies: Optional[Dict] = None,
|
|
5561
5353
|
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
|
5562
5354
|
force_download: bool = False,
|
|
5563
5355
|
token: Union[bool, str, None] = None,
|
|
5564
5356
|
local_files_only: bool = False,
|
|
5565
|
-
allow_patterns: Optional[Union[
|
|
5566
|
-
ignore_patterns: Optional[Union[
|
|
5357
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
5358
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
5567
5359
|
max_workers: int = 8,
|
|
5568
|
-
tqdm_class: Optional[
|
|
5569
|
-
# Deprecated args
|
|
5570
|
-
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
|
|
5571
|
-
resume_download: Optional[bool] = None,
|
|
5360
|
+
tqdm_class: Optional[type[base_tqdm]] = None,
|
|
5572
5361
|
) -> str:
|
|
5573
5362
|
"""Download repo files.
|
|
5574
5363
|
|
|
@@ -5598,12 +5387,9 @@ class HfApi:
|
|
|
5598
5387
|
Path to the folder where cached files are stored.
|
|
5599
5388
|
local_dir (`str` or `Path`, *optional*):
|
|
5600
5389
|
If provided, the downloaded files will be placed under this directory.
|
|
5601
|
-
proxies (`dict`, *optional*):
|
|
5602
|
-
Dictionary mapping protocol to the URL of the proxy passed to
|
|
5603
|
-
`requests.request`.
|
|
5604
5390
|
etag_timeout (`float`, *optional*, defaults to `10`):
|
|
5605
5391
|
When fetching ETag, how many seconds to wait for the server to send
|
|
5606
|
-
data before giving up which is passed to `
|
|
5392
|
+
data before giving up which is passed to `httpx.request`.
|
|
5607
5393
|
force_download (`bool`, *optional*, defaults to `False`):
|
|
5608
5394
|
Whether the file should be downloaded even if it already exists in the local cache.
|
|
5609
5395
|
token (Union[bool, str, None], optional):
|
|
@@ -5614,9 +5400,9 @@ class HfApi:
|
|
|
5614
5400
|
local_files_only (`bool`, *optional*, defaults to `False`):
|
|
5615
5401
|
If `True`, avoid downloading the file and return the path to the
|
|
5616
5402
|
local cached file if it exists.
|
|
5617
|
-
allow_patterns (`
|
|
5403
|
+
allow_patterns (`list[str]` or `str`, *optional*):
|
|
5618
5404
|
If provided, only files matching at least one pattern are downloaded.
|
|
5619
|
-
ignore_patterns (`
|
|
5405
|
+
ignore_patterns (`list[str]` or `str`, *optional*):
|
|
5620
5406
|
If provided, files matching any of the patterns are not downloaded.
|
|
5621
5407
|
max_workers (`int`, *optional*):
|
|
5622
5408
|
Number of concurrent threads to download files (1 thread = 1 file download).
|
|
@@ -5657,13 +5443,10 @@ class HfApi:
|
|
|
5657
5443
|
endpoint=self.endpoint,
|
|
5658
5444
|
cache_dir=cache_dir,
|
|
5659
5445
|
local_dir=local_dir,
|
|
5660
|
-
local_dir_use_symlinks=local_dir_use_symlinks,
|
|
5661
5446
|
library_name=self.library_name,
|
|
5662
5447
|
library_version=self.library_version,
|
|
5663
5448
|
user_agent=self.user_agent,
|
|
5664
|
-
proxies=proxies,
|
|
5665
5449
|
etag_timeout=etag_timeout,
|
|
5666
|
-
resume_download=resume_download,
|
|
5667
5450
|
force_download=force_download,
|
|
5668
5451
|
token=token,
|
|
5669
5452
|
local_files_only=local_files_only,
|
|
@@ -6280,7 +6063,7 @@ class HfApi:
|
|
|
6280
6063
|
headers = self._build_hf_headers(token=token)
|
|
6281
6064
|
path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions"
|
|
6282
6065
|
|
|
6283
|
-
params:
|
|
6066
|
+
params: dict[str, Union[str, int]] = {}
|
|
6284
6067
|
if discussion_type is not None:
|
|
6285
6068
|
params["type"] = discussion_type
|
|
6286
6069
|
if discussion_status is not None:
|
|
@@ -6350,7 +6133,7 @@ class HfApi:
|
|
|
6350
6133
|
|
|
6351
6134
|
Raises the following errors:
|
|
6352
6135
|
|
|
6353
|
-
- [`
|
|
6136
|
+
- [`HfHubHTTPError`]
|
|
6354
6137
|
if the HuggingFace API returned an error
|
|
6355
6138
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6356
6139
|
if some parameter value is invalid
|
|
@@ -6443,7 +6226,7 @@ class HfApi:
|
|
|
6443
6226
|
|
|
6444
6227
|
Raises the following errors:
|
|
6445
6228
|
|
|
6446
|
-
- [`
|
|
6229
|
+
- [`HfHubHTTPError`]
|
|
6447
6230
|
if the HuggingFace API returned an error
|
|
6448
6231
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6449
6232
|
if some parameter value is invalid
|
|
@@ -6531,7 +6314,7 @@ class HfApi:
|
|
|
6531
6314
|
|
|
6532
6315
|
Raises the following errors:
|
|
6533
6316
|
|
|
6534
|
-
- [`
|
|
6317
|
+
- [`HfHubHTTPError`]
|
|
6535
6318
|
if the HuggingFace API returned an error
|
|
6536
6319
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6537
6320
|
if some parameter value is invalid
|
|
@@ -6558,7 +6341,7 @@ class HfApi:
|
|
|
6558
6341
|
body: Optional[dict] = None,
|
|
6559
6342
|
token: Union[bool, str, None] = None,
|
|
6560
6343
|
repo_type: Optional[str] = None,
|
|
6561
|
-
) ->
|
|
6344
|
+
) -> httpx.Response:
|
|
6562
6345
|
"""Internal utility to POST changes to a Discussion or Pull Request"""
|
|
6563
6346
|
if not isinstance(discussion_num, int) or discussion_num <= 0:
|
|
6564
6347
|
raise ValueError("Invalid discussion_num, must be a positive integer")
|
|
@@ -6571,7 +6354,7 @@ class HfApi:
|
|
|
6571
6354
|
path = f"{self.endpoint}/api/{repo_id}/discussions/{discussion_num}/{resource}"
|
|
6572
6355
|
|
|
6573
6356
|
headers = self._build_hf_headers(token=token)
|
|
6574
|
-
resp =
|
|
6357
|
+
resp = get_session().post(path, headers=headers, json=body)
|
|
6575
6358
|
hf_raise_for_status(resp)
|
|
6576
6359
|
return resp
|
|
6577
6360
|
|
|
@@ -6634,7 +6417,7 @@ class HfApi:
|
|
|
6634
6417
|
|
|
6635
6418
|
Raises the following errors:
|
|
6636
6419
|
|
|
6637
|
-
- [`
|
|
6420
|
+
- [`HfHubHTTPError`]
|
|
6638
6421
|
if the HuggingFace API returned an error
|
|
6639
6422
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6640
6423
|
if some parameter value is invalid
|
|
@@ -6704,7 +6487,7 @@ class HfApi:
|
|
|
6704
6487
|
|
|
6705
6488
|
Raises the following errors:
|
|
6706
6489
|
|
|
6707
|
-
- [`
|
|
6490
|
+
- [`HfHubHTTPError`]
|
|
6708
6491
|
if the HuggingFace API returned an error
|
|
6709
6492
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6710
6493
|
if some parameter value is invalid
|
|
@@ -6777,7 +6560,7 @@ class HfApi:
|
|
|
6777
6560
|
|
|
6778
6561
|
Raises the following errors:
|
|
6779
6562
|
|
|
6780
|
-
- [`
|
|
6563
|
+
- [`HfHubHTTPError`]
|
|
6781
6564
|
if the HuggingFace API returned an error
|
|
6782
6565
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6783
6566
|
if some parameter value is invalid
|
|
@@ -6789,7 +6572,7 @@ class HfApi:
|
|
|
6789
6572
|
"""
|
|
6790
6573
|
if new_status not in ["open", "closed"]:
|
|
6791
6574
|
raise ValueError("Invalid status, valid statuses are: 'open' and 'closed'")
|
|
6792
|
-
body:
|
|
6575
|
+
body: dict[str, str] = {"status": new_status}
|
|
6793
6576
|
if comment and comment.strip():
|
|
6794
6577
|
body["comment"] = comment.strip()
|
|
6795
6578
|
resp = self._post_discussion_changes(
|
|
@@ -6839,7 +6622,7 @@ class HfApi:
|
|
|
6839
6622
|
|
|
6840
6623
|
Raises the following errors:
|
|
6841
6624
|
|
|
6842
|
-
- [`
|
|
6625
|
+
- [`HfHubHTTPError`]
|
|
6843
6626
|
if the HuggingFace API returned an error
|
|
6844
6627
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6845
6628
|
if some parameter value is invalid
|
|
@@ -6898,7 +6681,7 @@ class HfApi:
|
|
|
6898
6681
|
|
|
6899
6682
|
Raises the following errors:
|
|
6900
6683
|
|
|
6901
|
-
- [`
|
|
6684
|
+
- [`HfHubHTTPError`]
|
|
6902
6685
|
if the HuggingFace API returned an error
|
|
6903
6686
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6904
6687
|
if some parameter value is invalid
|
|
@@ -6959,7 +6742,7 @@ class HfApi:
|
|
|
6959
6742
|
|
|
6960
6743
|
Raises the following errors:
|
|
6961
6744
|
|
|
6962
|
-
- [`
|
|
6745
|
+
- [`HfHubHTTPError`]
|
|
6963
6746
|
if the HuggingFace API returned an error
|
|
6964
6747
|
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
6965
6748
|
if some parameter value is invalid
|
|
@@ -7040,7 +6823,8 @@ class HfApi:
|
|
|
7040
6823
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
7041
6824
|
To disable authentication, pass `False`.
|
|
7042
6825
|
"""
|
|
7043
|
-
r = get_session().
|
|
6826
|
+
r = get_session().request(
|
|
6827
|
+
"DELETE",
|
|
7044
6828
|
f"{self.endpoint}/api/spaces/{repo_id}/secrets",
|
|
7045
6829
|
headers=self._build_hf_headers(token=token),
|
|
7046
6830
|
json={"key": key},
|
|
@@ -7048,7 +6832,7 @@ class HfApi:
|
|
|
7048
6832
|
hf_raise_for_status(r)
|
|
7049
6833
|
|
|
7050
6834
|
@validate_hf_hub_args
|
|
7051
|
-
def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) ->
|
|
6835
|
+
def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) -> dict[str, SpaceVariable]:
|
|
7052
6836
|
"""Gets all variables from a Space.
|
|
7053
6837
|
|
|
7054
6838
|
Variables allow to set environment variables to a Space without hardcoding them.
|
|
@@ -7079,7 +6863,7 @@ class HfApi:
|
|
|
7079
6863
|
*,
|
|
7080
6864
|
description: Optional[str] = None,
|
|
7081
6865
|
token: Union[bool, str, None] = None,
|
|
7082
|
-
) ->
|
|
6866
|
+
) -> dict[str, SpaceVariable]:
|
|
7083
6867
|
"""Adds or updates a variable in a Space.
|
|
7084
6868
|
|
|
7085
6869
|
Variables allow to set environment variables to a Space without hardcoding them.
|
|
@@ -7114,7 +6898,7 @@ class HfApi:
|
|
|
7114
6898
|
@validate_hf_hub_args
|
|
7115
6899
|
def delete_space_variable(
|
|
7116
6900
|
self, repo_id: str, key: str, *, token: Union[bool, str, None] = None
|
|
7117
|
-
) ->
|
|
6901
|
+
) -> dict[str, SpaceVariable]:
|
|
7118
6902
|
"""Deletes a variable from a Space.
|
|
7119
6903
|
|
|
7120
6904
|
Variables allow to set environment variables to a Space without hardcoding them.
|
|
@@ -7131,7 +6915,8 @@ class HfApi:
|
|
|
7131
6915
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
7132
6916
|
To disable authentication, pass `False`.
|
|
7133
6917
|
"""
|
|
7134
|
-
r = get_session().
|
|
6918
|
+
r = get_session().request(
|
|
6919
|
+
"DELETE",
|
|
7135
6920
|
f"{self.endpoint}/api/spaces/{repo_id}/variables",
|
|
7136
6921
|
headers=self._build_hf_headers(token=token),
|
|
7137
6922
|
json={"key": key},
|
|
@@ -7202,7 +6987,7 @@ class HfApi:
|
|
|
7202
6987
|
" you want to set a custom sleep time, you need to upgrade to a paid Hardware.",
|
|
7203
6988
|
UserWarning,
|
|
7204
6989
|
)
|
|
7205
|
-
payload:
|
|
6990
|
+
payload: dict[str, Any] = {"flavor": hardware}
|
|
7206
6991
|
if sleep_time is not None:
|
|
7207
6992
|
payload["sleepTimeSeconds"] = sleep_time
|
|
7208
6993
|
r = get_session().post(
|
|
@@ -7361,8 +7146,8 @@ class HfApi:
|
|
|
7361
7146
|
hardware: Optional[SpaceHardware] = None,
|
|
7362
7147
|
storage: Optional[SpaceStorage] = None,
|
|
7363
7148
|
sleep_time: Optional[int] = None,
|
|
7364
|
-
secrets: Optional[
|
|
7365
|
-
variables: Optional[
|
|
7149
|
+
secrets: Optional[list[dict[str, str]]] = None,
|
|
7150
|
+
variables: Optional[list[dict[str, str]]] = None,
|
|
7366
7151
|
) -> RepoUrl:
|
|
7367
7152
|
"""Duplicate a Space.
|
|
7368
7153
|
|
|
@@ -7393,10 +7178,10 @@ class HfApi:
|
|
|
7393
7178
|
your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure
|
|
7394
7179
|
the sleep time (value is fixed to 48 hours of inactivity).
|
|
7395
7180
|
See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details.
|
|
7396
|
-
secrets (`
|
|
7181
|
+
secrets (`list[dict[str, str]]`, *optional*):
|
|
7397
7182
|
A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
|
|
7398
7183
|
For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets.
|
|
7399
|
-
variables (`
|
|
7184
|
+
variables (`list[dict[str, str]]`, *optional*):
|
|
7400
7185
|
A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
|
|
7401
7186
|
For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables.
|
|
7402
7187
|
|
|
@@ -7408,7 +7193,7 @@ class HfApi:
|
|
|
7408
7193
|
[`~utils.RepositoryNotFoundError`]:
|
|
7409
7194
|
If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist,
|
|
7410
7195
|
or because it is set to `private` and you do not have access.
|
|
7411
|
-
[`
|
|
7196
|
+
[`HfHubHTTPError`]:
|
|
7412
7197
|
If the HuggingFace API returned an error
|
|
7413
7198
|
|
|
7414
7199
|
Example:
|
|
@@ -7436,7 +7221,7 @@ class HfApi:
|
|
|
7436
7221
|
to_repo_name = parsed_to_id.repo_name if to_id is not None else RepoUrl(from_id).repo_name # type: ignore
|
|
7437
7222
|
|
|
7438
7223
|
# repository must be a valid repo_id (namespace/repo_name).
|
|
7439
|
-
payload:
|
|
7224
|
+
payload: dict[str, Any] = {"repository": f"{to_namespace}/{to_repo_name}"}
|
|
7440
7225
|
|
|
7441
7226
|
keys = ["private", "hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"]
|
|
7442
7227
|
values = [private, hardware, storage, sleep_time, secrets, variables]
|
|
@@ -7458,7 +7243,7 @@ class HfApi:
|
|
|
7458
7243
|
|
|
7459
7244
|
try:
|
|
7460
7245
|
hf_raise_for_status(r)
|
|
7461
|
-
except
|
|
7246
|
+
except HfHubHTTPError as err:
|
|
7462
7247
|
if exist_ok and err.response.status_code == 409:
|
|
7463
7248
|
# Repo already exists and `exist_ok=True`
|
|
7464
7249
|
pass
|
|
@@ -7497,7 +7282,7 @@ class HfApi:
|
|
|
7497
7282
|
|
|
7498
7283
|
</Tip>
|
|
7499
7284
|
"""
|
|
7500
|
-
payload:
|
|
7285
|
+
payload: dict[str, SpaceStorage] = {"tier": storage}
|
|
7501
7286
|
r = get_session().post(
|
|
7502
7287
|
f"{self.endpoint}/api/spaces/{repo_id}/storage",
|
|
7503
7288
|
headers=self._build_hf_headers(token=token),
|
|
@@ -7543,7 +7328,7 @@ class HfApi:
|
|
|
7543
7328
|
|
|
7544
7329
|
def list_inference_endpoints(
|
|
7545
7330
|
self, namespace: Optional[str] = None, *, token: Union[bool, str, None] = None
|
|
7546
|
-
) ->
|
|
7331
|
+
) -> list[InferenceEndpoint]:
|
|
7547
7332
|
"""Lists all inference endpoints for the given namespace.
|
|
7548
7333
|
|
|
7549
7334
|
Args:
|
|
@@ -7557,7 +7342,7 @@ class HfApi:
|
|
|
7557
7342
|
To disable authentication, pass `False`.
|
|
7558
7343
|
|
|
7559
7344
|
Returns:
|
|
7560
|
-
|
|
7345
|
+
list[`InferenceEndpoint`]: A list of all inference endpoints for the given namespace.
|
|
7561
7346
|
|
|
7562
7347
|
Example:
|
|
7563
7348
|
```python
|
|
@@ -7572,7 +7357,7 @@ class HfApi:
|
|
|
7572
7357
|
user = self.whoami(token=token)
|
|
7573
7358
|
|
|
7574
7359
|
# List personal endpoints first
|
|
7575
|
-
endpoints:
|
|
7360
|
+
endpoints: list[InferenceEndpoint] = list_inference_endpoints(namespace=self._get_namespace(token=token))
|
|
7576
7361
|
|
|
7577
7362
|
# Then list endpoints for all orgs the user belongs to and ignore 401 errors (no billing or no access)
|
|
7578
7363
|
for org in user.get("orgs", []):
|
|
@@ -7616,14 +7401,14 @@ class HfApi:
|
|
|
7616
7401
|
scale_to_zero_timeout: Optional[int] = None,
|
|
7617
7402
|
revision: Optional[str] = None,
|
|
7618
7403
|
task: Optional[str] = None,
|
|
7619
|
-
custom_image: Optional[
|
|
7620
|
-
env: Optional[
|
|
7621
|
-
secrets: Optional[
|
|
7404
|
+
custom_image: Optional[dict] = None,
|
|
7405
|
+
env: Optional[dict[str, str]] = None,
|
|
7406
|
+
secrets: Optional[dict[str, str]] = None,
|
|
7622
7407
|
type: InferenceEndpointType = InferenceEndpointType.PROTECTED,
|
|
7623
7408
|
domain: Optional[str] = None,
|
|
7624
7409
|
path: Optional[str] = None,
|
|
7625
7410
|
cache_http_responses: Optional[bool] = None,
|
|
7626
|
-
tags: Optional[
|
|
7411
|
+
tags: Optional[list[str]] = None,
|
|
7627
7412
|
namespace: Optional[str] = None,
|
|
7628
7413
|
token: Union[bool, str, None] = None,
|
|
7629
7414
|
) -> InferenceEndpoint:
|
|
@@ -7660,12 +7445,12 @@ class HfApi:
|
|
|
7660
7445
|
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
|
7661
7446
|
task (`str`, *optional*):
|
|
7662
7447
|
The task on which to deploy the model (e.g. `"text-classification"`).
|
|
7663
|
-
custom_image (`
|
|
7448
|
+
custom_image (`dict`, *optional*):
|
|
7664
7449
|
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
7665
7450
|
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
7666
|
-
env (`
|
|
7451
|
+
env (`dict[str, str]`, *optional*):
|
|
7667
7452
|
Non-secret environment variables to inject in the container environment.
|
|
7668
|
-
secrets (`
|
|
7453
|
+
secrets (`dict[str, str]`, *optional*):
|
|
7669
7454
|
Secret values to inject in the container environment.
|
|
7670
7455
|
type ([`InferenceEndpointType]`, *optional*):
|
|
7671
7456
|
The type of the Inference Endpoint, which can be `"protected"` (default), `"public"` or `"private"`.
|
|
@@ -7675,7 +7460,7 @@ class HfApi:
|
|
|
7675
7460
|
The custom path to the deployed model, should start with a `/` (e.g. `"/models/google-bert/bert-base-uncased"`).
|
|
7676
7461
|
cache_http_responses (`bool`, *optional*):
|
|
7677
7462
|
Whether to cache HTTP responses from the Inference Endpoint. Defaults to `False`.
|
|
7678
|
-
tags (`
|
|
7463
|
+
tags (`list[str]`, *optional*):
|
|
7679
7464
|
A list of tags to associate with the Inference Endpoint.
|
|
7680
7465
|
namespace (`str`, *optional*):
|
|
7681
7466
|
The namespace where the Inference Endpoint will be created. Defaults to the current user's namespace.
|
|
@@ -7778,7 +7563,7 @@ class HfApi:
|
|
|
7778
7563
|
else:
|
|
7779
7564
|
image = {"huggingface": {}}
|
|
7780
7565
|
|
|
7781
|
-
payload:
|
|
7566
|
+
payload: dict = {
|
|
7782
7567
|
"accountId": account_id,
|
|
7783
7568
|
"compute": {
|
|
7784
7569
|
"accelerator": accelerator,
|
|
@@ -7867,7 +7652,7 @@ class HfApi:
|
|
|
7867
7652
|
</Tip>
|
|
7868
7653
|
"""
|
|
7869
7654
|
token = token or self.token or get_token()
|
|
7870
|
-
payload:
|
|
7655
|
+
payload: dict = {
|
|
7871
7656
|
"namespace": namespace or self._get_namespace(token=token),
|
|
7872
7657
|
"repoId": repo_id,
|
|
7873
7658
|
}
|
|
@@ -7885,7 +7670,7 @@ class HfApi:
|
|
|
7885
7670
|
|
|
7886
7671
|
@experimental
|
|
7887
7672
|
@validate_hf_hub_args
|
|
7888
|
-
def list_inference_catalog(self, *, token: Union[bool, str, None] = None) ->
|
|
7673
|
+
def list_inference_catalog(self, *, token: Union[bool, str, None] = None) -> list[str]:
|
|
7889
7674
|
"""List models available in the Hugging Face Inference Catalog.
|
|
7890
7675
|
|
|
7891
7676
|
The goal of the Inference Catalog is to provide a curated list of models that are optimized for inference
|
|
@@ -7901,7 +7686,7 @@ class HfApi:
|
|
|
7901
7686
|
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
|
7902
7687
|
|
|
7903
7688
|
Returns:
|
|
7904
|
-
|
|
7689
|
+
list[`str`]: A list of model IDs available in the catalog.
|
|
7905
7690
|
<Tip warning={true}>
|
|
7906
7691
|
|
|
7907
7692
|
`list_inference_catalog` is experimental. Its API is subject to change in the future. Please provide feedback
|
|
@@ -7979,15 +7764,15 @@ class HfApi:
|
|
|
7979
7764
|
framework: Optional[str] = None,
|
|
7980
7765
|
revision: Optional[str] = None,
|
|
7981
7766
|
task: Optional[str] = None,
|
|
7982
|
-
custom_image: Optional[
|
|
7983
|
-
env: Optional[
|
|
7984
|
-
secrets: Optional[
|
|
7767
|
+
custom_image: Optional[dict] = None,
|
|
7768
|
+
env: Optional[dict[str, str]] = None,
|
|
7769
|
+
secrets: Optional[dict[str, str]] = None,
|
|
7985
7770
|
# Route update
|
|
7986
7771
|
domain: Optional[str] = None,
|
|
7987
7772
|
path: Optional[str] = None,
|
|
7988
7773
|
# Other
|
|
7989
7774
|
cache_http_responses: Optional[bool] = None,
|
|
7990
|
-
tags: Optional[
|
|
7775
|
+
tags: Optional[list[str]] = None,
|
|
7991
7776
|
namespace: Optional[str] = None,
|
|
7992
7777
|
token: Union[bool, str, None] = None,
|
|
7993
7778
|
) -> InferenceEndpoint:
|
|
@@ -8023,12 +7808,12 @@ class HfApi:
|
|
|
8023
7808
|
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
|
8024
7809
|
task (`str`, *optional*):
|
|
8025
7810
|
The task on which to deploy the model (e.g. `"text-classification"`).
|
|
8026
|
-
custom_image (`
|
|
7811
|
+
custom_image (`dict`, *optional*):
|
|
8027
7812
|
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
|
8028
7813
|
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
|
8029
|
-
env (`
|
|
7814
|
+
env (`dict[str, str]`, *optional*):
|
|
8030
7815
|
Non-secret environment variables to inject in the container environment
|
|
8031
|
-
secrets (`
|
|
7816
|
+
secrets (`dict[str, str]`, *optional*):
|
|
8032
7817
|
Secret values to inject in the container environment.
|
|
8033
7818
|
|
|
8034
7819
|
domain (`str`, *optional*):
|
|
@@ -8038,7 +7823,7 @@ class HfApi:
|
|
|
8038
7823
|
|
|
8039
7824
|
cache_http_responses (`bool`, *optional*):
|
|
8040
7825
|
Whether to cache HTTP responses from the Inference Endpoint.
|
|
8041
|
-
tags (`
|
|
7826
|
+
tags (`list[str]`, *optional*):
|
|
8042
7827
|
A list of tags to associate with the Inference Endpoint.
|
|
8043
7828
|
|
|
8044
7829
|
namespace (`str`, *optional*):
|
|
@@ -8055,7 +7840,7 @@ class HfApi:
|
|
|
8055
7840
|
namespace = namespace or self._get_namespace(token=token)
|
|
8056
7841
|
|
|
8057
7842
|
# Populate only the fields that are not None
|
|
8058
|
-
payload:
|
|
7843
|
+
payload: dict = defaultdict(lambda: defaultdict(dict))
|
|
8059
7844
|
if accelerator is not None:
|
|
8060
7845
|
payload["compute"]["accelerator"] = accelerator
|
|
8061
7846
|
if instance_size is not None:
|
|
@@ -8262,8 +8047,8 @@ class HfApi:
|
|
|
8262
8047
|
def list_collections(
|
|
8263
8048
|
self,
|
|
8264
8049
|
*,
|
|
8265
|
-
owner: Union[
|
|
8266
|
-
item: Union[
|
|
8050
|
+
owner: Union[list[str], str, None] = None,
|
|
8051
|
+
item: Union[list[str], str, None] = None,
|
|
8267
8052
|
sort: Optional[Literal["lastModified", "trending", "upvotes"]] = None,
|
|
8268
8053
|
limit: Optional[int] = None,
|
|
8269
8054
|
token: Union[bool, str, None] = None,
|
|
@@ -8278,9 +8063,9 @@ class HfApi:
|
|
|
8278
8063
|
</Tip>
|
|
8279
8064
|
|
|
8280
8065
|
Args:
|
|
8281
|
-
owner (`
|
|
8066
|
+
owner (`list[str]` or `str`, *optional*):
|
|
8282
8067
|
Filter by owner's username.
|
|
8283
|
-
item (`
|
|
8068
|
+
item (`list[str]` or `str`, *optional*):
|
|
8284
8069
|
Filter collections containing a particular items. Example: `"models/teknium/OpenHermes-2.5-Mistral-7B"`, `"datasets/squad"` or `"papers/2311.12983"`.
|
|
8285
8070
|
sort (`Literal["lastModified", "trending", "upvotes"]`, *optional*):
|
|
8286
8071
|
Sort collections by last modified, trending or upvotes.
|
|
@@ -8298,7 +8083,7 @@ class HfApi:
|
|
|
8298
8083
|
# Construct the API endpoint
|
|
8299
8084
|
path = f"{self.endpoint}/api/collections"
|
|
8300
8085
|
headers = self._build_hf_headers(token=token)
|
|
8301
|
-
params:
|
|
8086
|
+
params: dict = {}
|
|
8302
8087
|
if owner is not None:
|
|
8303
8088
|
params.update({"owner": owner})
|
|
8304
8089
|
if item is not None:
|
|
@@ -8415,7 +8200,7 @@ class HfApi:
|
|
|
8415
8200
|
)
|
|
8416
8201
|
try:
|
|
8417
8202
|
hf_raise_for_status(r)
|
|
8418
|
-
except
|
|
8203
|
+
except HfHubHTTPError as err:
|
|
8419
8204
|
if exists_ok and err.response.status_code == 409:
|
|
8420
8205
|
# Collection already exists and `exists_ok=True`
|
|
8421
8206
|
slug = r.json()["slug"]
|
|
@@ -8526,7 +8311,7 @@ class HfApi:
|
|
|
8526
8311
|
)
|
|
8527
8312
|
try:
|
|
8528
8313
|
hf_raise_for_status(r)
|
|
8529
|
-
except
|
|
8314
|
+
except HfHubHTTPError as err:
|
|
8530
8315
|
if missing_ok and err.response.status_code == 404:
|
|
8531
8316
|
# Collection doesn't exists and `missing_ok=True`
|
|
8532
8317
|
return
|
|
@@ -8566,12 +8351,12 @@ class HfApi:
|
|
|
8566
8351
|
Returns: [`Collection`]
|
|
8567
8352
|
|
|
8568
8353
|
Raises:
|
|
8569
|
-
[`
|
|
8354
|
+
[`HfHubHTTPError`]:
|
|
8570
8355
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8571
8356
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8572
|
-
[`
|
|
8357
|
+
[`HfHubHTTPError`]:
|
|
8573
8358
|
HTTP 404 if the item you try to add to the collection does not exist on the Hub.
|
|
8574
|
-
[`
|
|
8359
|
+
[`HfHubHTTPError`]:
|
|
8575
8360
|
HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False)
|
|
8576
8361
|
|
|
8577
8362
|
Example:
|
|
@@ -8597,7 +8382,7 @@ class HfApi:
|
|
|
8597
8382
|
(...)
|
|
8598
8383
|
```
|
|
8599
8384
|
"""
|
|
8600
|
-
payload:
|
|
8385
|
+
payload: dict[str, Any] = {"item": {"id": item_id, "type": item_type}}
|
|
8601
8386
|
if note is not None:
|
|
8602
8387
|
payload["note"] = note
|
|
8603
8388
|
r = get_session().post(
|
|
@@ -8607,7 +8392,7 @@ class HfApi:
|
|
|
8607
8392
|
)
|
|
8608
8393
|
try:
|
|
8609
8394
|
hf_raise_for_status(r)
|
|
8610
|
-
except
|
|
8395
|
+
except HfHubHTTPError as err:
|
|
8611
8396
|
if exists_ok and err.response.status_code == 409:
|
|
8612
8397
|
# Item already exists and `exists_ok=True`
|
|
8613
8398
|
return self.get_collection(collection_slug, token=token)
|
|
@@ -8713,7 +8498,7 @@ class HfApi:
|
|
|
8713
8498
|
)
|
|
8714
8499
|
try:
|
|
8715
8500
|
hf_raise_for_status(r)
|
|
8716
|
-
except
|
|
8501
|
+
except HfHubHTTPError as err:
|
|
8717
8502
|
if missing_ok and err.response.status_code == 404:
|
|
8718
8503
|
# Item already deleted and `missing_ok=True`
|
|
8719
8504
|
return
|
|
@@ -8727,7 +8512,7 @@ class HfApi:
|
|
|
8727
8512
|
@validate_hf_hub_args
|
|
8728
8513
|
def list_pending_access_requests(
|
|
8729
8514
|
self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
|
|
8730
|
-
) ->
|
|
8515
|
+
) -> list[AccessRequest]:
|
|
8731
8516
|
"""
|
|
8732
8517
|
Get pending access requests for a given gated repo.
|
|
8733
8518
|
|
|
@@ -8750,14 +8535,14 @@ class HfApi:
|
|
|
8750
8535
|
To disable authentication, pass `False`.
|
|
8751
8536
|
|
|
8752
8537
|
Returns:
|
|
8753
|
-
`
|
|
8538
|
+
`list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
|
|
8754
8539
|
`status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
|
|
8755
8540
|
be populated with user's answers.
|
|
8756
8541
|
|
|
8757
8542
|
Raises:
|
|
8758
|
-
[`
|
|
8543
|
+
[`HfHubHTTPError`]:
|
|
8759
8544
|
HTTP 400 if the repo is not gated.
|
|
8760
|
-
[`
|
|
8545
|
+
[`HfHubHTTPError`]:
|
|
8761
8546
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8762
8547
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8763
8548
|
|
|
@@ -8791,7 +8576,7 @@ class HfApi:
|
|
|
8791
8576
|
@validate_hf_hub_args
|
|
8792
8577
|
def list_accepted_access_requests(
|
|
8793
8578
|
self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
|
|
8794
|
-
) ->
|
|
8579
|
+
) -> list[AccessRequest]:
|
|
8795
8580
|
"""
|
|
8796
8581
|
Get accepted access requests for a given gated repo.
|
|
8797
8582
|
|
|
@@ -8816,14 +8601,14 @@ class HfApi:
|
|
|
8816
8601
|
To disable authentication, pass `False`.
|
|
8817
8602
|
|
|
8818
8603
|
Returns:
|
|
8819
|
-
`
|
|
8604
|
+
`list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
|
|
8820
8605
|
`status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
|
|
8821
8606
|
be populated with user's answers.
|
|
8822
8607
|
|
|
8823
8608
|
Raises:
|
|
8824
|
-
[`
|
|
8609
|
+
[`HfHubHTTPError`]:
|
|
8825
8610
|
HTTP 400 if the repo is not gated.
|
|
8826
|
-
[`
|
|
8611
|
+
[`HfHubHTTPError`]:
|
|
8827
8612
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8828
8613
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8829
8614
|
|
|
@@ -8853,7 +8638,7 @@ class HfApi:
|
|
|
8853
8638
|
@validate_hf_hub_args
|
|
8854
8639
|
def list_rejected_access_requests(
|
|
8855
8640
|
self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
|
|
8856
|
-
) ->
|
|
8641
|
+
) -> list[AccessRequest]:
|
|
8857
8642
|
"""
|
|
8858
8643
|
Get rejected access requests for a given gated repo.
|
|
8859
8644
|
|
|
@@ -8878,14 +8663,14 @@ class HfApi:
|
|
|
8878
8663
|
To disable authentication, pass `False`.
|
|
8879
8664
|
|
|
8880
8665
|
Returns:
|
|
8881
|
-
`
|
|
8666
|
+
`list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
|
|
8882
8667
|
`status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
|
|
8883
8668
|
be populated with user's answers.
|
|
8884
8669
|
|
|
8885
8670
|
Raises:
|
|
8886
|
-
[`
|
|
8671
|
+
[`HfHubHTTPError`]:
|
|
8887
8672
|
HTTP 400 if the repo is not gated.
|
|
8888
|
-
[`
|
|
8673
|
+
[`HfHubHTTPError`]:
|
|
8889
8674
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8890
8675
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8891
8676
|
|
|
@@ -8918,7 +8703,7 @@ class HfApi:
|
|
|
8918
8703
|
status: Literal["accepted", "rejected", "pending"],
|
|
8919
8704
|
repo_type: Optional[str] = None,
|
|
8920
8705
|
token: Union[bool, str, None] = None,
|
|
8921
|
-
) ->
|
|
8706
|
+
) -> list[AccessRequest]:
|
|
8922
8707
|
if repo_type not in constants.REPO_TYPES:
|
|
8923
8708
|
raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
|
|
8924
8709
|
if repo_type is None:
|
|
@@ -8967,16 +8752,16 @@ class HfApi:
|
|
|
8967
8752
|
To disable authentication, pass `False`.
|
|
8968
8753
|
|
|
8969
8754
|
Raises:
|
|
8970
|
-
[`
|
|
8755
|
+
[`HfHubHTTPError`]:
|
|
8971
8756
|
HTTP 400 if the repo is not gated.
|
|
8972
|
-
[`
|
|
8757
|
+
[`HfHubHTTPError`]:
|
|
8973
8758
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
8974
8759
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
8975
|
-
[`
|
|
8760
|
+
[`HfHubHTTPError`]:
|
|
8976
8761
|
HTTP 404 if the user does not exist on the Hub.
|
|
8977
|
-
[`
|
|
8762
|
+
[`HfHubHTTPError`]:
|
|
8978
8763
|
HTTP 404 if the user access request cannot be found.
|
|
8979
|
-
[`
|
|
8764
|
+
[`HfHubHTTPError`]:
|
|
8980
8765
|
HTTP 404 if the user access request is already in the pending list.
|
|
8981
8766
|
"""
|
|
8982
8767
|
self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token)
|
|
@@ -9009,16 +8794,16 @@ class HfApi:
|
|
|
9009
8794
|
To disable authentication, pass `False`.
|
|
9010
8795
|
|
|
9011
8796
|
Raises:
|
|
9012
|
-
[`
|
|
8797
|
+
[`HfHubHTTPError`]:
|
|
9013
8798
|
HTTP 400 if the repo is not gated.
|
|
9014
|
-
[`
|
|
8799
|
+
[`HfHubHTTPError`]:
|
|
9015
8800
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
9016
8801
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
9017
|
-
[`
|
|
8802
|
+
[`HfHubHTTPError`]:
|
|
9018
8803
|
HTTP 404 if the user does not exist on the Hub.
|
|
9019
|
-
[`
|
|
8804
|
+
[`HfHubHTTPError`]:
|
|
9020
8805
|
HTTP 404 if the user access request cannot be found.
|
|
9021
|
-
[`
|
|
8806
|
+
[`HfHubHTTPError`]:
|
|
9022
8807
|
HTTP 404 if the user access request is already in the accepted list.
|
|
9023
8808
|
"""
|
|
9024
8809
|
self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token)
|
|
@@ -9059,16 +8844,16 @@ class HfApi:
|
|
|
9059
8844
|
To disable authentication, pass `False`.
|
|
9060
8845
|
|
|
9061
8846
|
Raises:
|
|
9062
|
-
[`
|
|
8847
|
+
[`HfHubHTTPError`]:
|
|
9063
8848
|
HTTP 400 if the repo is not gated.
|
|
9064
|
-
[`
|
|
8849
|
+
[`HfHubHTTPError`]:
|
|
9065
8850
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
9066
8851
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
9067
|
-
[`
|
|
8852
|
+
[`HfHubHTTPError`]:
|
|
9068
8853
|
HTTP 404 if the user does not exist on the Hub.
|
|
9069
|
-
[`
|
|
8854
|
+
[`HfHubHTTPError`]:
|
|
9070
8855
|
HTTP 404 if the user access request cannot be found.
|
|
9071
|
-
[`
|
|
8856
|
+
[`HfHubHTTPError`]:
|
|
9072
8857
|
HTTP 404 if the user access request is already in the rejected list.
|
|
9073
8858
|
"""
|
|
9074
8859
|
self._handle_access_request(
|
|
@@ -9132,14 +8917,14 @@ class HfApi:
|
|
|
9132
8917
|
To disable authentication, pass `False`.
|
|
9133
8918
|
|
|
9134
8919
|
Raises:
|
|
9135
|
-
[`
|
|
8920
|
+
[`HfHubHTTPError`]:
|
|
9136
8921
|
HTTP 400 if the repo is not gated.
|
|
9137
|
-
[`
|
|
8922
|
+
[`HfHubHTTPError`]:
|
|
9138
8923
|
HTTP 400 if the user already has access to the repo.
|
|
9139
|
-
[`
|
|
8924
|
+
[`HfHubHTTPError`]:
|
|
9140
8925
|
HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
|
|
9141
8926
|
or `admin` role in the organization the repo belongs to or if you passed a `read` token.
|
|
9142
|
-
[`
|
|
8927
|
+
[`HfHubHTTPError`]:
|
|
9143
8928
|
HTTP 404 if the user does not exist on the Hub.
|
|
9144
8929
|
"""
|
|
9145
8930
|
if repo_type not in constants.REPO_TYPES:
|
|
@@ -9211,7 +8996,7 @@ class HfApi:
|
|
|
9211
8996
|
return webhook
|
|
9212
8997
|
|
|
9213
8998
|
@validate_hf_hub_args
|
|
9214
|
-
def list_webhooks(self, *, token: Union[bool, str, None] = None) ->
|
|
8999
|
+
def list_webhooks(self, *, token: Union[bool, str, None] = None) -> list[WebhookInfo]:
|
|
9215
9000
|
"""List all configured webhooks.
|
|
9216
9001
|
|
|
9217
9002
|
Args:
|
|
@@ -9221,7 +9006,7 @@ class HfApi:
|
|
|
9221
9006
|
To disable authentication, pass `False`.
|
|
9222
9007
|
|
|
9223
9008
|
Returns:
|
|
9224
|
-
`
|
|
9009
|
+
`list[WebhookInfo]`:
|
|
9225
9010
|
List of webhook info objects.
|
|
9226
9011
|
|
|
9227
9012
|
Example:
|
|
@@ -9265,8 +9050,8 @@ class HfApi:
|
|
|
9265
9050
|
self,
|
|
9266
9051
|
*,
|
|
9267
9052
|
url: str,
|
|
9268
|
-
watched:
|
|
9269
|
-
domains: Optional[
|
|
9053
|
+
watched: list[Union[dict, WebhookWatchedItem]],
|
|
9054
|
+
domains: Optional[list[constants.WEBHOOK_DOMAIN_T]] = None,
|
|
9270
9055
|
secret: Optional[str] = None,
|
|
9271
9056
|
token: Union[bool, str, None] = None,
|
|
9272
9057
|
) -> WebhookInfo:
|
|
@@ -9275,10 +9060,10 @@ class HfApi:
|
|
|
9275
9060
|
Args:
|
|
9276
9061
|
url (`str`):
|
|
9277
9062
|
URL to send the payload to.
|
|
9278
|
-
watched (`
|
|
9063
|
+
watched (`list[WebhookWatchedItem]`):
|
|
9279
9064
|
List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces.
|
|
9280
9065
|
Watched items can also be provided as plain dictionaries.
|
|
9281
|
-
domains (`
|
|
9066
|
+
domains (`list[Literal["repo", "discussion"]]`, optional):
|
|
9282
9067
|
List of domains to watch. It can be "repo", "discussion" or both.
|
|
9283
9068
|
secret (`str`, optional):
|
|
9284
9069
|
A secret to sign the payload with.
|
|
@@ -9339,8 +9124,8 @@ class HfApi:
|
|
|
9339
9124
|
webhook_id: str,
|
|
9340
9125
|
*,
|
|
9341
9126
|
url: Optional[str] = None,
|
|
9342
|
-
watched: Optional[
|
|
9343
|
-
domains: Optional[
|
|
9127
|
+
watched: Optional[list[Union[dict, WebhookWatchedItem]]] = None,
|
|
9128
|
+
domains: Optional[list[constants.WEBHOOK_DOMAIN_T]] = None,
|
|
9344
9129
|
secret: Optional[str] = None,
|
|
9345
9130
|
token: Union[bool, str, None] = None,
|
|
9346
9131
|
) -> WebhookInfo:
|
|
@@ -9351,10 +9136,10 @@ class HfApi:
|
|
|
9351
9136
|
The unique identifier of the webhook to be updated.
|
|
9352
9137
|
url (`str`, optional):
|
|
9353
9138
|
The URL to which the payload will be sent.
|
|
9354
|
-
watched (`
|
|
9139
|
+
watched (`list[WebhookWatchedItem]`, optional):
|
|
9355
9140
|
List of items to watch. It can be users, orgs, models, datasets, or spaces.
|
|
9356
9141
|
Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries.
|
|
9357
|
-
domains (`
|
|
9142
|
+
domains (`list[Literal["repo", "discussion"]]`, optional):
|
|
9358
9143
|
The domains to watch. This can include "repo", "discussion", or both.
|
|
9359
9144
|
secret (`str`, optional):
|
|
9360
9145
|
A secret to sign the payload with, providing an additional layer of security.
|
|
@@ -9550,8 +9335,8 @@ class HfApi:
|
|
|
9550
9335
|
token: Union[bool, str, None] = None,
|
|
9551
9336
|
library_name: Optional[str] = None,
|
|
9552
9337
|
library_version: Optional[str] = None,
|
|
9553
|
-
user_agent: Union[
|
|
9554
|
-
) ->
|
|
9338
|
+
user_agent: Union[dict, str, None] = None,
|
|
9339
|
+
) -> dict[str, str]:
|
|
9555
9340
|
"""
|
|
9556
9341
|
Alias for [`build_hf_headers`] that uses the token from [`HfApi`] client
|
|
9557
9342
|
when `token` is not provided.
|
|
@@ -9573,9 +9358,9 @@ class HfApi:
|
|
|
9573
9358
|
repo_type: Optional[str],
|
|
9574
9359
|
revision: Optional[str],
|
|
9575
9360
|
path_in_repo: str,
|
|
9576
|
-
delete_patterns: Optional[Union[
|
|
9361
|
+
delete_patterns: Optional[Union[list[str], str]],
|
|
9577
9362
|
token: Union[bool, str, None] = None,
|
|
9578
|
-
) ->
|
|
9363
|
+
) -> list[CommitOperationDelete]:
|
|
9579
9364
|
"""Generate the list of Delete operations for a commit to delete files from a repo.
|
|
9580
9365
|
|
|
9581
9366
|
List remote files and match them against the `delete_patterns` constraints. Returns a list of [`CommitOperationDelete`]
|
|
@@ -9611,11 +9396,11 @@ class HfApi:
|
|
|
9611
9396
|
self,
|
|
9612
9397
|
folder_path: Union[str, Path],
|
|
9613
9398
|
path_in_repo: str,
|
|
9614
|
-
allow_patterns: Optional[Union[
|
|
9615
|
-
ignore_patterns: Optional[Union[
|
|
9399
|
+
allow_patterns: Optional[Union[list[str], str]] = None,
|
|
9400
|
+
ignore_patterns: Optional[Union[list[str], str]] = None,
|
|
9616
9401
|
repo_type: Optional[str] = None,
|
|
9617
9402
|
token: Union[bool, str, None] = None,
|
|
9618
|
-
) ->
|
|
9403
|
+
) -> list[CommitOperationAdd]:
|
|
9619
9404
|
"""Generate the list of Add operations for a commit to upload a folder.
|
|
9620
9405
|
|
|
9621
9406
|
Files not matching the `allow_patterns` (allowlist) and `ignore_patterns` (denylist)
|
|
@@ -9730,7 +9515,7 @@ class HfApi:
|
|
|
9730
9515
|
`User`: A [`User`] object with the user's overview.
|
|
9731
9516
|
|
|
9732
9517
|
Raises:
|
|
9733
|
-
[`
|
|
9518
|
+
[`HfHubHTTPError`]:
|
|
9734
9519
|
HTTP 404 If the user does not exist on the Hub.
|
|
9735
9520
|
"""
|
|
9736
9521
|
r = get_session().get(
|
|
@@ -9756,7 +9541,7 @@ class HfApi:
|
|
|
9756
9541
|
`Iterable[User]`: A list of [`User`] objects with the members of the organization.
|
|
9757
9542
|
|
|
9758
9543
|
Raises:
|
|
9759
|
-
[`
|
|
9544
|
+
[`HfHubHTTPError`]:
|
|
9760
9545
|
HTTP 404 If the organization does not exist on the Hub.
|
|
9761
9546
|
|
|
9762
9547
|
"""
|
|
@@ -9784,7 +9569,7 @@ class HfApi:
|
|
|
9784
9569
|
`Iterable[User]`: A list of [`User`] objects with the followers of the user.
|
|
9785
9570
|
|
|
9786
9571
|
Raises:
|
|
9787
|
-
[`
|
|
9572
|
+
[`HfHubHTTPError`]:
|
|
9788
9573
|
HTTP 404 If the user does not exist on the Hub.
|
|
9789
9574
|
|
|
9790
9575
|
"""
|
|
@@ -9812,7 +9597,7 @@ class HfApi:
|
|
|
9812
9597
|
`Iterable[User]`: A list of [`User`] objects with the users followed by the user.
|
|
9813
9598
|
|
|
9814
9599
|
Raises:
|
|
9815
|
-
[`
|
|
9600
|
+
[`HfHubHTTPError`]:
|
|
9816
9601
|
HTTP 404 If the user does not exist on the Hub.
|
|
9817
9602
|
|
|
9818
9603
|
"""
|
|
@@ -9881,7 +9666,7 @@ class HfApi:
|
|
|
9881
9666
|
`PaperInfo`: A `PaperInfo` object.
|
|
9882
9667
|
|
|
9883
9668
|
Raises:
|
|
9884
|
-
[`
|
|
9669
|
+
[`HfHubHTTPError`]:
|
|
9885
9670
|
HTTP 404 If the paper does not exist on the Hub.
|
|
9886
9671
|
"""
|
|
9887
9672
|
path = f"{self.endpoint}/api/papers/{id}"
|
|
@@ -9957,9 +9742,9 @@ class HfApi:
|
|
|
9957
9742
|
self,
|
|
9958
9743
|
*,
|
|
9959
9744
|
image: str,
|
|
9960
|
-
command:
|
|
9961
|
-
env: Optional[
|
|
9962
|
-
secrets: Optional[
|
|
9745
|
+
command: list[str],
|
|
9746
|
+
env: Optional[dict[str, Any]] = None,
|
|
9747
|
+
secrets: Optional[dict[str, Any]] = None,
|
|
9963
9748
|
flavor: Optional[SpaceHardware] = None,
|
|
9964
9749
|
timeout: Optional[Union[int, float, str]] = None,
|
|
9965
9750
|
namespace: Optional[str] = None,
|
|
@@ -9974,13 +9759,13 @@ class HfApi:
|
|
|
9974
9759
|
Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`.
|
|
9975
9760
|
Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`.
|
|
9976
9761
|
|
|
9977
|
-
command (`
|
|
9762
|
+
command (`list[str]`):
|
|
9978
9763
|
The command to run. Example: `["echo", "hello"]`.
|
|
9979
9764
|
|
|
9980
|
-
env (`
|
|
9765
|
+
env (`dict[str, Any]`, *optional*):
|
|
9981
9766
|
Defines the environment variables for the Job.
|
|
9982
9767
|
|
|
9983
|
-
secrets (`
|
|
9768
|
+
secrets (`dict[str, Any]`, *optional*):
|
|
9984
9769
|
Defines the secret environment variables for the Job.
|
|
9985
9770
|
|
|
9986
9771
|
flavor (`str`, *optional*):
|
|
@@ -10004,7 +9789,7 @@ class HfApi:
|
|
|
10004
9789
|
|
|
10005
9790
|
```python
|
|
10006
9791
|
>>> from huggingface_hub import run_job
|
|
10007
|
-
>>> run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
|
|
9792
|
+
>>> run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"])
|
|
10008
9793
|
```
|
|
10009
9794
|
|
|
10010
9795
|
Run a GPU Job:
|
|
@@ -10013,47 +9798,23 @@ class HfApi:
|
|
|
10013
9798
|
>>> from huggingface_hub import run_job
|
|
10014
9799
|
>>> image = "pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"
|
|
10015
9800
|
>>> command = ["python", "-c", "import torch; print(f"This code ran with the following GPU: {torch.cuda.get_device_name()}")"]
|
|
10016
|
-
>>> run_job(image, command, flavor="a10g-small")
|
|
9801
|
+
>>> run_job(image=image, command=command, flavor="a10g-small")
|
|
10017
9802
|
```
|
|
10018
9803
|
|
|
10019
9804
|
"""
|
|
10020
|
-
if flavor is None:
|
|
10021
|
-
flavor = SpaceHardware.CPU_BASIC
|
|
10022
|
-
|
|
10023
|
-
# prepare payload to send to HF Jobs API
|
|
10024
|
-
input_json: Dict[str, Any] = {
|
|
10025
|
-
"command": command,
|
|
10026
|
-
"arguments": [],
|
|
10027
|
-
"environment": env or {},
|
|
10028
|
-
"flavor": flavor,
|
|
10029
|
-
}
|
|
10030
|
-
# secrets are optional
|
|
10031
|
-
if secrets:
|
|
10032
|
-
input_json["secrets"] = secrets
|
|
10033
|
-
# timeout is optional
|
|
10034
|
-
if timeout:
|
|
10035
|
-
time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24}
|
|
10036
|
-
if isinstance(timeout, str) and timeout[-1] in time_units_factors:
|
|
10037
|
-
input_json["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]])
|
|
10038
|
-
else:
|
|
10039
|
-
input_json["timeoutSeconds"] = int(timeout)
|
|
10040
|
-
# input is either from docker hub or from HF spaces
|
|
10041
|
-
for prefix in (
|
|
10042
|
-
"https://huggingface.co/spaces/",
|
|
10043
|
-
"https://hf.co/spaces/",
|
|
10044
|
-
"huggingface.co/spaces/",
|
|
10045
|
-
"hf.co/spaces/",
|
|
10046
|
-
):
|
|
10047
|
-
if image.startswith(prefix):
|
|
10048
|
-
input_json["spaceId"] = image[len(prefix) :]
|
|
10049
|
-
break
|
|
10050
|
-
else:
|
|
10051
|
-
input_json["dockerImage"] = image
|
|
10052
9805
|
if namespace is None:
|
|
10053
9806
|
namespace = self.whoami(token=token)["name"]
|
|
9807
|
+
job_spec = _create_job_spec(
|
|
9808
|
+
image=image,
|
|
9809
|
+
command=command,
|
|
9810
|
+
env=env,
|
|
9811
|
+
secrets=secrets,
|
|
9812
|
+
flavor=flavor,
|
|
9813
|
+
timeout=timeout,
|
|
9814
|
+
)
|
|
10054
9815
|
response = get_session().post(
|
|
10055
9816
|
f"https://huggingface.co/api/jobs/{namespace}",
|
|
10056
|
-
json=
|
|
9817
|
+
json=job_spec,
|
|
10057
9818
|
headers=self._build_hf_headers(token=token),
|
|
10058
9819
|
)
|
|
10059
9820
|
hf_raise_for_status(response)
|
|
@@ -10086,8 +9847,8 @@ class HfApi:
|
|
|
10086
9847
|
|
|
10087
9848
|
```python
|
|
10088
9849
|
>>> from huggingface_hub import fetch_job_logs, run_job
|
|
10089
|
-
>>> job = run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
|
|
10090
|
-
>>> for log in fetch_job_logs(job.
|
|
9850
|
+
>>> job = run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"])
|
|
9851
|
+
>>> for log in fetch_job_logs(job.id):
|
|
10091
9852
|
... print(log)
|
|
10092
9853
|
Hello from HF compute!
|
|
10093
9854
|
```
|
|
@@ -10113,29 +9874,28 @@ class HfApi:
|
|
|
10113
9874
|
time.sleep(sleep_time)
|
|
10114
9875
|
sleep_time = min(max_wait_time, max(min_wait_time, sleep_time * 2))
|
|
10115
9876
|
try:
|
|
10116
|
-
|
|
9877
|
+
with get_session().stream(
|
|
9878
|
+
"GET",
|
|
10117
9879
|
f"https://huggingface.co/api/jobs/{namespace}/{job_id}/logs",
|
|
10118
9880
|
headers=self._build_hf_headers(token=token),
|
|
10119
|
-
stream=True,
|
|
10120
9881
|
timeout=120,
|
|
10121
|
-
)
|
|
10122
|
-
|
|
10123
|
-
|
|
10124
|
-
|
|
10125
|
-
|
|
10126
|
-
|
|
10127
|
-
|
|
10128
|
-
|
|
10129
|
-
|
|
10130
|
-
|
|
10131
|
-
|
|
10132
|
-
|
|
10133
|
-
except requests.exceptions.ChunkedEncodingError:
|
|
9882
|
+
) as response:
|
|
9883
|
+
log = None
|
|
9884
|
+
for line in response.iter_lines():
|
|
9885
|
+
if line and line.startswith("data: {"):
|
|
9886
|
+
data = json.loads(line[len("data: ") :])
|
|
9887
|
+
# timestamp = data["timestamp"]
|
|
9888
|
+
if not data["data"].startswith("===== Job started"):
|
|
9889
|
+
logging_started = True
|
|
9890
|
+
log = data["data"]
|
|
9891
|
+
yield log
|
|
9892
|
+
logging_finished = logging_started
|
|
9893
|
+
except httpx.DecodingError:
|
|
10134
9894
|
# Response ended prematurely
|
|
10135
9895
|
break
|
|
10136
9896
|
except KeyboardInterrupt:
|
|
10137
9897
|
break
|
|
10138
|
-
except
|
|
9898
|
+
except httpx.NetworkError as err:
|
|
10139
9899
|
is_timeout = err.__context__ and isinstance(getattr(err.__context__, "__cause__", None), TimeoutError)
|
|
10140
9900
|
if logging_started or not is_timeout:
|
|
10141
9901
|
raise
|
|
@@ -10158,7 +9918,7 @@ class HfApi:
|
|
|
10158
9918
|
timeout: Optional[int] = None,
|
|
10159
9919
|
namespace: Optional[str] = None,
|
|
10160
9920
|
token: Union[bool, str, None] = None,
|
|
10161
|
-
) ->
|
|
9921
|
+
) -> list[JobInfo]:
|
|
10162
9922
|
"""
|
|
10163
9923
|
List compute Jobs on Hugging Face infrastructure.
|
|
10164
9924
|
|
|
@@ -10210,8 +9970,8 @@ class HfApi:
|
|
|
10210
9970
|
|
|
10211
9971
|
```python
|
|
10212
9972
|
>>> from huggingface_hub import inspect_job, run_job
|
|
10213
|
-
>>> job = run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
|
|
10214
|
-
>>> inspect_job(job.
|
|
9973
|
+
>>> job = run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"])
|
|
9974
|
+
>>> inspect_job(job.id)
|
|
10215
9975
|
JobInfo(
|
|
10216
9976
|
id='68780d00bbe36d38803f645f',
|
|
10217
9977
|
created_at=datetime.datetime(2025, 7, 16, 20, 35, 12, 808000, tzinfo=datetime.timezone.utc),
|
|
@@ -10269,12 +10029,12 @@ class HfApi:
|
|
|
10269
10029
|
self,
|
|
10270
10030
|
script: str,
|
|
10271
10031
|
*,
|
|
10272
|
-
script_args: Optional[
|
|
10273
|
-
dependencies: Optional[
|
|
10032
|
+
script_args: Optional[list[str]] = None,
|
|
10033
|
+
dependencies: Optional[list[str]] = None,
|
|
10274
10034
|
python: Optional[str] = None,
|
|
10275
10035
|
image: Optional[str] = None,
|
|
10276
|
-
env: Optional[
|
|
10277
|
-
secrets: Optional[
|
|
10036
|
+
env: Optional[dict[str, Any]] = None,
|
|
10037
|
+
secrets: Optional[dict[str, Any]] = None,
|
|
10278
10038
|
flavor: Optional[SpaceHardware] = None,
|
|
10279
10039
|
timeout: Optional[Union[int, float, str]] = None,
|
|
10280
10040
|
namespace: Optional[str] = None,
|
|
@@ -10286,12 +10046,12 @@ class HfApi:
|
|
|
10286
10046
|
|
|
10287
10047
|
Args:
|
|
10288
10048
|
script (`str`):
|
|
10289
|
-
Path or URL of the UV script.
|
|
10049
|
+
Path or URL of the UV script, or a command.
|
|
10290
10050
|
|
|
10291
|
-
script_args (`
|
|
10292
|
-
Arguments to pass to the script.
|
|
10051
|
+
script_args (`list[str]`, *optional*)
|
|
10052
|
+
Arguments to pass to the script or command.
|
|
10293
10053
|
|
|
10294
|
-
dependencies (`
|
|
10054
|
+
dependencies (`list[str]`, *optional*)
|
|
10295
10055
|
Dependencies to use to run the UV script.
|
|
10296
10056
|
|
|
10297
10057
|
python (`str`, *optional*)
|
|
@@ -10300,10 +10060,10 @@ class HfApi:
|
|
|
10300
10060
|
image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"):
|
|
10301
10061
|
Use a custom Docker image with `uv` installed.
|
|
10302
10062
|
|
|
10303
|
-
env (`
|
|
10063
|
+
env (`dict[str, Any]`, *optional*):
|
|
10304
10064
|
Defines the environment variables for the Job.
|
|
10305
10065
|
|
|
10306
|
-
secrets (`
|
|
10066
|
+
secrets (`dict[str, Any]`, *optional*):
|
|
10307
10067
|
Defines the secret environment variables for the Job.
|
|
10308
10068
|
|
|
10309
10069
|
flavor (`str`, *optional*):
|
|
@@ -10324,16 +10084,477 @@ class HfApi:
|
|
|
10324
10084
|
|
|
10325
10085
|
Example:
|
|
10326
10086
|
|
|
10087
|
+
Run a script from a URL:
|
|
10088
|
+
|
|
10327
10089
|
```python
|
|
10328
10090
|
>>> from huggingface_hub import run_uv_job
|
|
10329
10091
|
>>> script = "https://raw.githubusercontent.com/huggingface/trl/refs/heads/main/trl/scripts/sft.py"
|
|
10330
|
-
>>>
|
|
10092
|
+
>>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"]
|
|
10093
|
+
>>> run_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small")
|
|
10094
|
+
```
|
|
10095
|
+
|
|
10096
|
+
Run a local script:
|
|
10097
|
+
|
|
10098
|
+
```python
|
|
10099
|
+
>>> from huggingface_hub import run_uv_job
|
|
10100
|
+
>>> script = "my_sft.py"
|
|
10101
|
+
>>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"]
|
|
10102
|
+
>>> run_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small")
|
|
10103
|
+
```
|
|
10104
|
+
|
|
10105
|
+
Run a command:
|
|
10106
|
+
|
|
10107
|
+
```python
|
|
10108
|
+
>>> from huggingface_hub import run_uv_job
|
|
10109
|
+
>>> script = "lighteval"
|
|
10110
|
+
>>> script_args= ["endpoint", "inference-providers", "model_name=openai/gpt-oss-20b,provider=auto", "lighteval|gsm8k|0|0"]
|
|
10111
|
+
>>> run_uv_job(script, script_args=script_args, dependencies=["lighteval"], flavor="a10g-small")
|
|
10331
10112
|
```
|
|
10332
10113
|
"""
|
|
10333
10114
|
image = image or "ghcr.io/astral-sh/uv:python3.12-bookworm"
|
|
10334
10115
|
env = env or {}
|
|
10335
10116
|
secrets = secrets or {}
|
|
10336
10117
|
|
|
10118
|
+
# Build command
|
|
10119
|
+
command, env, secrets = self._create_uv_command_env_and_secrets(
|
|
10120
|
+
script=script,
|
|
10121
|
+
script_args=script_args,
|
|
10122
|
+
dependencies=dependencies,
|
|
10123
|
+
python=python,
|
|
10124
|
+
env=env,
|
|
10125
|
+
secrets=secrets,
|
|
10126
|
+
namespace=namespace,
|
|
10127
|
+
token=token,
|
|
10128
|
+
_repo=_repo,
|
|
10129
|
+
)
|
|
10130
|
+
# Create RunCommand args
|
|
10131
|
+
return self.run_job(
|
|
10132
|
+
image=image,
|
|
10133
|
+
command=command,
|
|
10134
|
+
env=env,
|
|
10135
|
+
secrets=secrets,
|
|
10136
|
+
flavor=flavor,
|
|
10137
|
+
timeout=timeout,
|
|
10138
|
+
namespace=namespace,
|
|
10139
|
+
token=token,
|
|
10140
|
+
)
|
|
10141
|
+
|
|
10142
|
+
def create_scheduled_job(
|
|
10143
|
+
self,
|
|
10144
|
+
*,
|
|
10145
|
+
image: str,
|
|
10146
|
+
command: list[str],
|
|
10147
|
+
schedule: str,
|
|
10148
|
+
suspend: Optional[bool] = None,
|
|
10149
|
+
concurrency: Optional[bool] = None,
|
|
10150
|
+
env: Optional[dict[str, Any]] = None,
|
|
10151
|
+
secrets: Optional[dict[str, Any]] = None,
|
|
10152
|
+
flavor: Optional[SpaceHardware] = None,
|
|
10153
|
+
timeout: Optional[Union[int, float, str]] = None,
|
|
10154
|
+
namespace: Optional[str] = None,
|
|
10155
|
+
token: Union[bool, str, None] = None,
|
|
10156
|
+
) -> ScheduledJobInfo:
|
|
10157
|
+
"""
|
|
10158
|
+
Create scheduled compute Jobs on Hugging Face infrastructure.
|
|
10159
|
+
|
|
10160
|
+
Args:
|
|
10161
|
+
image (`str`):
|
|
10162
|
+
The Docker image to use.
|
|
10163
|
+
Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`.
|
|
10164
|
+
Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`.
|
|
10165
|
+
|
|
10166
|
+
command (`list[str]`):
|
|
10167
|
+
The command to run. Example: `["echo", "hello"]`.
|
|
10168
|
+
|
|
10169
|
+
schedule (`str`):
|
|
10170
|
+
One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
|
|
10171
|
+
CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday).
|
|
10172
|
+
|
|
10173
|
+
suspend (`bool`, *optional*):
|
|
10174
|
+
If True, the scheduled Job is suspended (paused). Defaults to False.
|
|
10175
|
+
|
|
10176
|
+
concurrency (`bool`, *optional*):
|
|
10177
|
+
If True, multiple instances of this Job can run concurrently. Defaults to False.
|
|
10178
|
+
|
|
10179
|
+
env (`dict[str, Any]`, *optional*):
|
|
10180
|
+
Defines the environment variables for the Job.
|
|
10181
|
+
|
|
10182
|
+
secrets (`dict[str, Any]`, *optional*):
|
|
10183
|
+
Defines the secret environment variables for the Job.
|
|
10184
|
+
|
|
10185
|
+
flavor (`str`, *optional*):
|
|
10186
|
+
Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
|
|
10187
|
+
Defaults to `"cpu-basic"`.
|
|
10188
|
+
|
|
10189
|
+
timeout (`Union[int, float, str]`, *optional*):
|
|
10190
|
+
Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days).
|
|
10191
|
+
Example: `300` or `"5m"` for 5 minutes.
|
|
10192
|
+
|
|
10193
|
+
namespace (`str`, *optional*):
|
|
10194
|
+
The namespace where the Job will be created. Defaults to the current user's namespace.
|
|
10195
|
+
|
|
10196
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10197
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10198
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10199
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10200
|
+
|
|
10201
|
+
Example:
|
|
10202
|
+
Create your first scheduled Job:
|
|
10203
|
+
|
|
10204
|
+
```python
|
|
10205
|
+
>>> from huggingface_hub import create_scheduled_job
|
|
10206
|
+
>>> create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"], schedule="@hourly")
|
|
10207
|
+
```
|
|
10208
|
+
|
|
10209
|
+
Use a CRON schedule expression:
|
|
10210
|
+
|
|
10211
|
+
```python
|
|
10212
|
+
>>> from huggingface_hub import create_scheduled_job
|
|
10213
|
+
>>> create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('this runs every 5min')"], schedule="*/5 * * * *")
|
|
10214
|
+
```
|
|
10215
|
+
|
|
10216
|
+
Create a scheduled GPU Job:
|
|
10217
|
+
|
|
10218
|
+
```python
|
|
10219
|
+
>>> from huggingface_hub import create_scheduled_job
|
|
10220
|
+
>>> image = "pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"
|
|
10221
|
+
>>> command = ["python", "-c", "import torch; print(f"This code ran with the following GPU: {torch.cuda.get_device_name()}")"]
|
|
10222
|
+
>>> create_scheduled_job(image, command, flavor="a10g-small", schedule="@hourly")
|
|
10223
|
+
```
|
|
10224
|
+
|
|
10225
|
+
"""
|
|
10226
|
+
if namespace is None:
|
|
10227
|
+
namespace = self.whoami(token=token)["name"]
|
|
10228
|
+
|
|
10229
|
+
# prepare payload to send to HF Jobs API
|
|
10230
|
+
job_spec = _create_job_spec(
|
|
10231
|
+
image=image,
|
|
10232
|
+
command=command,
|
|
10233
|
+
env=env,
|
|
10234
|
+
secrets=secrets,
|
|
10235
|
+
flavor=flavor,
|
|
10236
|
+
timeout=timeout,
|
|
10237
|
+
)
|
|
10238
|
+
input_json: dict[str, Any] = {
|
|
10239
|
+
"jobSpec": job_spec,
|
|
10240
|
+
"schedule": schedule,
|
|
10241
|
+
}
|
|
10242
|
+
if concurrency is not None:
|
|
10243
|
+
input_json["concurrency"] = concurrency
|
|
10244
|
+
if suspend is not None:
|
|
10245
|
+
input_json["suspend"] = suspend
|
|
10246
|
+
response = get_session().post(
|
|
10247
|
+
f"https://huggingface.co/api/scheduled-jobs/{namespace}",
|
|
10248
|
+
json=input_json,
|
|
10249
|
+
headers=self._build_hf_headers(token=token),
|
|
10250
|
+
)
|
|
10251
|
+
hf_raise_for_status(response)
|
|
10252
|
+
scheduled_job_info = response.json()
|
|
10253
|
+
return ScheduledJobInfo(**scheduled_job_info)
|
|
10254
|
+
|
|
10255
|
+
def list_scheduled_jobs(
|
|
10256
|
+
self,
|
|
10257
|
+
*,
|
|
10258
|
+
timeout: Optional[int] = None,
|
|
10259
|
+
namespace: Optional[str] = None,
|
|
10260
|
+
token: Union[bool, str, None] = None,
|
|
10261
|
+
) -> list[ScheduledJobInfo]:
|
|
10262
|
+
"""
|
|
10263
|
+
List scheduled compute Jobs on Hugging Face infrastructure.
|
|
10264
|
+
|
|
10265
|
+
Args:
|
|
10266
|
+
timeout (`float`, *optional*):
|
|
10267
|
+
Whether to set a timeout for the request to the Hub.
|
|
10268
|
+
|
|
10269
|
+
namespace (`str`, *optional*):
|
|
10270
|
+
The namespace from where it lists the jobs. Defaults to the current user's namespace.
|
|
10271
|
+
|
|
10272
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10273
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10274
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10275
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10276
|
+
"""
|
|
10277
|
+
if namespace is None:
|
|
10278
|
+
namespace = self.whoami(token=token)["name"]
|
|
10279
|
+
response = get_session().get(
|
|
10280
|
+
f"{self.endpoint}/api/scheduled-jobs/{namespace}",
|
|
10281
|
+
headers=self._build_hf_headers(token=token),
|
|
10282
|
+
timeout=timeout,
|
|
10283
|
+
)
|
|
10284
|
+
hf_raise_for_status(response)
|
|
10285
|
+
return [ScheduledJobInfo(**scheduled_job_info) for scheduled_job_info in response.json()]
|
|
10286
|
+
|
|
10287
|
+
def inspect_scheduled_job(
|
|
10288
|
+
self,
|
|
10289
|
+
*,
|
|
10290
|
+
scheduled_job_id: str,
|
|
10291
|
+
namespace: Optional[str] = None,
|
|
10292
|
+
token: Union[bool, str, None] = None,
|
|
10293
|
+
) -> ScheduledJobInfo:
|
|
10294
|
+
"""
|
|
10295
|
+
Inspect a scheduled compute Job on Hugging Face infrastructure.
|
|
10296
|
+
|
|
10297
|
+
Args:
|
|
10298
|
+
scheduled_job_id (`str`):
|
|
10299
|
+
ID of the scheduled Job.
|
|
10300
|
+
|
|
10301
|
+
namespace (`str`, *optional*):
|
|
10302
|
+
The namespace where the scheduled Job is. Defaults to the current user's namespace.
|
|
10303
|
+
|
|
10304
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10305
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10306
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10307
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10308
|
+
|
|
10309
|
+
Example:
|
|
10310
|
+
|
|
10311
|
+
```python
|
|
10312
|
+
>>> from huggingface_hub import inspect_job, create_scheduled_job
|
|
10313
|
+
>>> scheduled_job = create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"], schedule="@hourly")
|
|
10314
|
+
>>> inspect_scheduled_job(scheduled_job.id)
|
|
10315
|
+
```
|
|
10316
|
+
"""
|
|
10317
|
+
if namespace is None:
|
|
10318
|
+
namespace = self.whoami(token=token)["name"]
|
|
10319
|
+
response = get_session().get(
|
|
10320
|
+
f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}",
|
|
10321
|
+
headers=self._build_hf_headers(token=token),
|
|
10322
|
+
)
|
|
10323
|
+
hf_raise_for_status(response)
|
|
10324
|
+
return ScheduledJobInfo(**response.json())
|
|
10325
|
+
|
|
10326
|
+
def delete_scheduled_job(
|
|
10327
|
+
self,
|
|
10328
|
+
*,
|
|
10329
|
+
scheduled_job_id: str,
|
|
10330
|
+
namespace: Optional[str] = None,
|
|
10331
|
+
token: Union[bool, str, None] = None,
|
|
10332
|
+
) -> None:
|
|
10333
|
+
"""
|
|
10334
|
+
Delete a scheduled compute Job on Hugging Face infrastructure.
|
|
10335
|
+
|
|
10336
|
+
Args:
|
|
10337
|
+
scheduled_job_id (`str`):
|
|
10338
|
+
ID of the scheduled Job.
|
|
10339
|
+
|
|
10340
|
+
namespace (`str`, *optional*):
|
|
10341
|
+
The namespace where the scheduled Job is. Defaults to the current user's namespace.
|
|
10342
|
+
|
|
10343
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10344
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10345
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10346
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10347
|
+
"""
|
|
10348
|
+
if namespace is None:
|
|
10349
|
+
namespace = self.whoami(token=token)["name"]
|
|
10350
|
+
response = get_session().delete(
|
|
10351
|
+
f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}",
|
|
10352
|
+
headers=self._build_hf_headers(token=token),
|
|
10353
|
+
)
|
|
10354
|
+
hf_raise_for_status(response)
|
|
10355
|
+
|
|
10356
|
+
def suspend_scheduled_job(
|
|
10357
|
+
self,
|
|
10358
|
+
*,
|
|
10359
|
+
scheduled_job_id: str,
|
|
10360
|
+
namespace: Optional[str] = None,
|
|
10361
|
+
token: Union[bool, str, None] = None,
|
|
10362
|
+
) -> None:
|
|
10363
|
+
"""
|
|
10364
|
+
Suspend (pause) a scheduled compute Job on Hugging Face infrastructure.
|
|
10365
|
+
|
|
10366
|
+
Args:
|
|
10367
|
+
scheduled_job_id (`str`):
|
|
10368
|
+
ID of the scheduled Job.
|
|
10369
|
+
|
|
10370
|
+
namespace (`str`, *optional*):
|
|
10371
|
+
The namespace where the scheduled Job is. Defaults to the current user's namespace.
|
|
10372
|
+
|
|
10373
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10374
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10375
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10376
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10377
|
+
"""
|
|
10378
|
+
if namespace is None:
|
|
10379
|
+
namespace = self.whoami(token=token)["name"]
|
|
10380
|
+
get_session().post(
|
|
10381
|
+
f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}/suspend",
|
|
10382
|
+
headers=self._build_hf_headers(token=token),
|
|
10383
|
+
).raise_for_status()
|
|
10384
|
+
|
|
10385
|
+
def resume_scheduled_job(
|
|
10386
|
+
self,
|
|
10387
|
+
*,
|
|
10388
|
+
scheduled_job_id: str,
|
|
10389
|
+
namespace: Optional[str] = None,
|
|
10390
|
+
token: Union[bool, str, None] = None,
|
|
10391
|
+
) -> None:
|
|
10392
|
+
"""
|
|
10393
|
+
Resume (unpause) a scheduled compute Job on Hugging Face infrastructure.
|
|
10394
|
+
|
|
10395
|
+
Args:
|
|
10396
|
+
scheduled_job_id (`str`):
|
|
10397
|
+
ID of the scheduled Job.
|
|
10398
|
+
|
|
10399
|
+
namespace (`str`, *optional*):
|
|
10400
|
+
The namespace where the scheduled Job is. Defaults to the current user's namespace.
|
|
10401
|
+
|
|
10402
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10403
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10404
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10405
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10406
|
+
"""
|
|
10407
|
+
if namespace is None:
|
|
10408
|
+
namespace = self.whoami(token=token)["name"]
|
|
10409
|
+
get_session().post(
|
|
10410
|
+
f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}/resume",
|
|
10411
|
+
headers=self._build_hf_headers(token=token),
|
|
10412
|
+
).raise_for_status()
|
|
10413
|
+
|
|
10414
|
+
@experimental
|
|
10415
|
+
def create_scheduled_uv_job(
|
|
10416
|
+
self,
|
|
10417
|
+
script: str,
|
|
10418
|
+
*,
|
|
10419
|
+
script_args: Optional[list[str]] = None,
|
|
10420
|
+
schedule: str,
|
|
10421
|
+
suspend: Optional[bool] = None,
|
|
10422
|
+
concurrency: Optional[bool] = None,
|
|
10423
|
+
dependencies: Optional[list[str]] = None,
|
|
10424
|
+
python: Optional[str] = None,
|
|
10425
|
+
image: Optional[str] = None,
|
|
10426
|
+
env: Optional[dict[str, Any]] = None,
|
|
10427
|
+
secrets: Optional[dict[str, Any]] = None,
|
|
10428
|
+
flavor: Optional[SpaceHardware] = None,
|
|
10429
|
+
timeout: Optional[Union[int, float, str]] = None,
|
|
10430
|
+
namespace: Optional[str] = None,
|
|
10431
|
+
token: Union[bool, str, None] = None,
|
|
10432
|
+
_repo: Optional[str] = None,
|
|
10433
|
+
) -> ScheduledJobInfo:
|
|
10434
|
+
"""
|
|
10435
|
+
Run a UV script Job on Hugging Face infrastructure.
|
|
10436
|
+
|
|
10437
|
+
Args:
|
|
10438
|
+
script (`str`):
|
|
10439
|
+
Path or URL of the UV script, or a command.
|
|
10440
|
+
|
|
10441
|
+
script_args (`list[str]`, *optional*)
|
|
10442
|
+
Arguments to pass to the script, or a command.
|
|
10443
|
+
|
|
10444
|
+
schedule (`str`):
|
|
10445
|
+
One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
|
|
10446
|
+
CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday).
|
|
10447
|
+
|
|
10448
|
+
suspend (`bool`, *optional*):
|
|
10449
|
+
If True, the scheduled Job is suspended (paused). Defaults to False.
|
|
10450
|
+
|
|
10451
|
+
concurrency (`bool`, *optional*):
|
|
10452
|
+
If True, multiple instances of this Job can run concurrently. Defaults to False.
|
|
10453
|
+
|
|
10454
|
+
dependencies (`list[str]`, *optional*)
|
|
10455
|
+
Dependencies to use to run the UV script.
|
|
10456
|
+
|
|
10457
|
+
python (`str`, *optional*)
|
|
10458
|
+
Use a specific Python version. Default is 3.12.
|
|
10459
|
+
|
|
10460
|
+
image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"):
|
|
10461
|
+
Use a custom Docker image with `uv` installed.
|
|
10462
|
+
|
|
10463
|
+
env (`dict[str, Any]`, *optional*):
|
|
10464
|
+
Defines the environment variables for the Job.
|
|
10465
|
+
|
|
10466
|
+
secrets (`dict[str, Any]`, *optional*):
|
|
10467
|
+
Defines the secret environment variables for the Job.
|
|
10468
|
+
|
|
10469
|
+
flavor (`str`, *optional*):
|
|
10470
|
+
Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
|
|
10471
|
+
Defaults to `"cpu-basic"`.
|
|
10472
|
+
|
|
10473
|
+
timeout (`Union[int, float, str]`, *optional*):
|
|
10474
|
+
Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days).
|
|
10475
|
+
Example: `300` or `"5m"` for 5 minutes.
|
|
10476
|
+
|
|
10477
|
+
namespace (`str`, *optional*):
|
|
10478
|
+
The namespace where the Job will be created. Defaults to the current user's namespace.
|
|
10479
|
+
|
|
10480
|
+
token `(Union[bool, str, None]`, *optional*):
|
|
10481
|
+
A valid user access token. If not provided, the locally saved token will be used, which is the
|
|
10482
|
+
recommended authentication method. Set to `False` to disable authentication.
|
|
10483
|
+
Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
|
|
10484
|
+
|
|
10485
|
+
Example:
|
|
10486
|
+
|
|
10487
|
+
Schedule a script from a URL:
|
|
10488
|
+
|
|
10489
|
+
```python
|
|
10490
|
+
>>> from huggingface_hub import create_scheduled_uv_job
|
|
10491
|
+
>>> script = "https://raw.githubusercontent.com/huggingface/trl/refs/heads/main/trl/scripts/sft.py"
|
|
10492
|
+
>>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"]
|
|
10493
|
+
>>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small", schedule="@weekly")
|
|
10494
|
+
```
|
|
10495
|
+
|
|
10496
|
+
Schedule a local script:
|
|
10497
|
+
|
|
10498
|
+
```python
|
|
10499
|
+
>>> from huggingface_hub import create_scheduled_uv_job
|
|
10500
|
+
>>> script = "my_sft.py"
|
|
10501
|
+
>>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"]
|
|
10502
|
+
>>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small", schedule="@weekly")
|
|
10503
|
+
```
|
|
10504
|
+
|
|
10505
|
+
Schedule a command:
|
|
10506
|
+
|
|
10507
|
+
```python
|
|
10508
|
+
>>> from huggingface_hub import create_scheduled_uv_job
|
|
10509
|
+
>>> script = "lighteval"
|
|
10510
|
+
>>> script_args= ["endpoint", "inference-providers", "model_name=openai/gpt-oss-20b,provider=auto", "lighteval|gsm8k|0|0"]
|
|
10511
|
+
>>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["lighteval"], flavor="a10g-small", schedule="@weekly")
|
|
10512
|
+
```
|
|
10513
|
+
"""
|
|
10514
|
+
image = image or "ghcr.io/astral-sh/uv:python3.12-bookworm"
|
|
10515
|
+
# Build command
|
|
10516
|
+
command, env, secrets = self._create_uv_command_env_and_secrets(
|
|
10517
|
+
script=script,
|
|
10518
|
+
script_args=script_args,
|
|
10519
|
+
dependencies=dependencies,
|
|
10520
|
+
python=python,
|
|
10521
|
+
env=env,
|
|
10522
|
+
secrets=secrets,
|
|
10523
|
+
namespace=namespace,
|
|
10524
|
+
token=token,
|
|
10525
|
+
_repo=_repo,
|
|
10526
|
+
)
|
|
10527
|
+
# Create RunCommand args
|
|
10528
|
+
return self.create_scheduled_job(
|
|
10529
|
+
image=image,
|
|
10530
|
+
command=command,
|
|
10531
|
+
schedule=schedule,
|
|
10532
|
+
suspend=suspend,
|
|
10533
|
+
concurrency=concurrency,
|
|
10534
|
+
env=env,
|
|
10535
|
+
secrets=secrets,
|
|
10536
|
+
flavor=flavor,
|
|
10537
|
+
timeout=timeout,
|
|
10538
|
+
namespace=namespace,
|
|
10539
|
+
token=token,
|
|
10540
|
+
)
|
|
10541
|
+
|
|
10542
|
+
def _create_uv_command_env_and_secrets(
|
|
10543
|
+
self,
|
|
10544
|
+
*,
|
|
10545
|
+
script: str,
|
|
10546
|
+
script_args: Optional[list[str]],
|
|
10547
|
+
dependencies: Optional[list[str]],
|
|
10548
|
+
python: Optional[str],
|
|
10549
|
+
env: Optional[dict[str, Any]],
|
|
10550
|
+
secrets: Optional[dict[str, Any]],
|
|
10551
|
+
namespace: Optional[str],
|
|
10552
|
+
token: Union[bool, str, None],
|
|
10553
|
+
_repo: Optional[str],
|
|
10554
|
+
) -> tuple[list[str], dict[str, Any], dict[str, Any]]:
|
|
10555
|
+
env = env or {}
|
|
10556
|
+
secrets = secrets or {}
|
|
10557
|
+
|
|
10337
10558
|
# Build command
|
|
10338
10559
|
uv_args = []
|
|
10339
10560
|
if dependencies:
|
|
@@ -10346,8 +10567,9 @@ class HfApi:
|
|
|
10346
10567
|
if namespace is None:
|
|
10347
10568
|
namespace = self.whoami(token=token)["name"]
|
|
10348
10569
|
|
|
10349
|
-
|
|
10350
|
-
|
|
10570
|
+
is_url = script.startswith("http://") or script.startswith("https://")
|
|
10571
|
+
if is_url or not Path(script).is_file():
|
|
10572
|
+
# Direct URL execution or command - no upload needed
|
|
10351
10573
|
command = ["uv", "run"] + uv_args + [script] + script_args
|
|
10352
10574
|
else:
|
|
10353
10575
|
# Local file - upload to HF
|
|
@@ -10358,7 +10580,6 @@ class HfApi:
|
|
|
10358
10580
|
repo_id = _repo
|
|
10359
10581
|
if "/" not in repo_id:
|
|
10360
10582
|
repo_id = f"{namespace}/{repo_id}"
|
|
10361
|
-
repo_id = _repo
|
|
10362
10583
|
else:
|
|
10363
10584
|
repo_id = f"{namespace}/hf-cli-jobs-uv-run-scripts"
|
|
10364
10585
|
|
|
@@ -10375,15 +10596,15 @@ class HfApi:
|
|
|
10375
10596
|
with open(script_path, "r") as f:
|
|
10376
10597
|
script_content = f.read()
|
|
10377
10598
|
|
|
10378
|
-
self.upload_file(
|
|
10599
|
+
commit_hash = self.upload_file(
|
|
10379
10600
|
path_or_fileobj=script_content.encode(),
|
|
10380
10601
|
path_in_repo=filename,
|
|
10381
10602
|
repo_id=repo_id,
|
|
10382
10603
|
repo_type="dataset",
|
|
10383
|
-
)
|
|
10604
|
+
).oid
|
|
10384
10605
|
|
|
10385
|
-
script_url = f"
|
|
10386
|
-
repo_url = f"
|
|
10606
|
+
script_url = f"{self.endpoint}/datasets/{repo_id}/resolve/{commit_hash}/{filename}"
|
|
10607
|
+
repo_url = f"{self.endpoint}/datasets/{repo_id}"
|
|
10387
10608
|
|
|
10388
10609
|
logger.debug(f"✓ Script uploaded to: {repo_url}/blob/main/{filename}")
|
|
10389
10610
|
|
|
@@ -10440,18 +10661,7 @@ class HfApi:
|
|
|
10440
10661
|
pre_command = ["python", "-c", '"' + "; ".join(pre_command) + '"']
|
|
10441
10662
|
command = ["uv", "run"] + uv_args + ["/tmp/script.py"] + script_args
|
|
10442
10663
|
command = ["bash", "-c", " ".join(pre_command) + " && " + " ".join(command)]
|
|
10443
|
-
|
|
10444
|
-
# Create RunCommand args
|
|
10445
|
-
return self.run_job(
|
|
10446
|
-
image=image,
|
|
10447
|
-
command=command,
|
|
10448
|
-
env=env,
|
|
10449
|
-
secrets=secrets,
|
|
10450
|
-
flavor=flavor,
|
|
10451
|
-
timeout=timeout,
|
|
10452
|
-
namespace=namespace,
|
|
10453
|
-
token=token,
|
|
10454
|
-
)
|
|
10664
|
+
return command, env, secrets
|
|
10455
10665
|
|
|
10456
10666
|
|
|
10457
10667
|
def _parse_revision_from_pr_url(pr_url: str) -> str:
|
|
@@ -10473,7 +10683,6 @@ api = HfApi()
|
|
|
10473
10683
|
|
|
10474
10684
|
whoami = api.whoami
|
|
10475
10685
|
auth_check = api.auth_check
|
|
10476
|
-
get_token_permission = api.get_token_permission
|
|
10477
10686
|
|
|
10478
10687
|
list_models = api.list_models
|
|
10479
10688
|
model_info = api.model_info
|
|
@@ -10503,7 +10712,6 @@ get_dataset_tags = api.get_dataset_tags
|
|
|
10503
10712
|
create_commit = api.create_commit
|
|
10504
10713
|
create_repo = api.create_repo
|
|
10505
10714
|
delete_repo = api.delete_repo
|
|
10506
|
-
update_repo_visibility = api.update_repo_visibility
|
|
10507
10715
|
update_repo_settings = api.update_repo_settings
|
|
10508
10716
|
move_repo = api.move_repo
|
|
10509
10717
|
upload_file = api.upload_file
|
|
@@ -10617,3 +10825,10 @@ list_jobs = api.list_jobs
|
|
|
10617
10825
|
inspect_job = api.inspect_job
|
|
10618
10826
|
cancel_job = api.cancel_job
|
|
10619
10827
|
run_uv_job = api.run_uv_job
|
|
10828
|
+
create_scheduled_job = api.create_scheduled_job
|
|
10829
|
+
list_scheduled_jobs = api.list_scheduled_jobs
|
|
10830
|
+
inspect_scheduled_job = api.inspect_scheduled_job
|
|
10831
|
+
delete_scheduled_job = api.delete_scheduled_job
|
|
10832
|
+
suspend_scheduled_job = api.suspend_scheduled_job
|
|
10833
|
+
resume_scheduled_job = api.resume_scheduled_job
|
|
10834
|
+
create_scheduled_uv_job = api.create_scheduled_uv_job
|