huggingface-hub 0.36.0rc0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (132) hide show
  1. huggingface_hub/__init__.py +33 -45
  2. huggingface_hub/_commit_api.py +39 -43
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +20 -20
  6. huggingface_hub/_login.py +17 -43
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +135 -50
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +5 -5
  11. huggingface_hub/_upload_large_folder.py +18 -32
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/__init__.py +0 -14
  15. huggingface_hub/cli/_cli_utils.py +143 -39
  16. huggingface_hub/cli/auth.py +105 -171
  17. huggingface_hub/cli/cache.py +594 -361
  18. huggingface_hub/cli/download.py +120 -112
  19. huggingface_hub/cli/hf.py +38 -41
  20. huggingface_hub/cli/jobs.py +689 -1017
  21. huggingface_hub/cli/lfs.py +120 -143
  22. huggingface_hub/cli/repo.py +282 -216
  23. huggingface_hub/cli/repo_files.py +50 -84
  24. huggingface_hub/cli/system.py +6 -25
  25. huggingface_hub/cli/upload.py +198 -220
  26. huggingface_hub/cli/upload_large_folder.py +91 -106
  27. huggingface_hub/community.py +5 -5
  28. huggingface_hub/constants.py +17 -52
  29. huggingface_hub/dataclasses.py +135 -21
  30. huggingface_hub/errors.py +47 -30
  31. huggingface_hub/fastai_utils.py +8 -9
  32. huggingface_hub/file_download.py +351 -303
  33. huggingface_hub/hf_api.py +398 -570
  34. huggingface_hub/hf_file_system.py +101 -66
  35. huggingface_hub/hub_mixin.py +32 -54
  36. huggingface_hub/inference/_client.py +177 -162
  37. huggingface_hub/inference/_common.py +38 -54
  38. huggingface_hub/inference/_generated/_async_client.py +218 -258
  39. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  40. huggingface_hub/inference/_generated/types/base.py +10 -7
  41. huggingface_hub/inference/_generated/types/chat_completion.py +16 -16
  42. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  43. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  44. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  45. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  46. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  47. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  48. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  49. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  50. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  51. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  52. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  53. huggingface_hub/inference/_generated/types/translation.py +2 -2
  54. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  55. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  56. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  57. huggingface_hub/inference/_mcp/agent.py +3 -3
  58. huggingface_hub/inference/_mcp/constants.py +1 -2
  59. huggingface_hub/inference/_mcp/mcp_client.py +33 -22
  60. huggingface_hub/inference/_mcp/types.py +10 -10
  61. huggingface_hub/inference/_mcp/utils.py +4 -4
  62. huggingface_hub/inference/_providers/__init__.py +12 -4
  63. huggingface_hub/inference/_providers/_common.py +62 -24
  64. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  65. huggingface_hub/inference/_providers/cohere.py +3 -3
  66. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  67. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  68. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  69. huggingface_hub/inference/_providers/hf_inference.py +13 -13
  70. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  71. huggingface_hub/inference/_providers/nebius.py +10 -10
  72. huggingface_hub/inference/_providers/novita.py +5 -5
  73. huggingface_hub/inference/_providers/nscale.py +4 -4
  74. huggingface_hub/inference/_providers/replicate.py +15 -15
  75. huggingface_hub/inference/_providers/sambanova.py +6 -6
  76. huggingface_hub/inference/_providers/together.py +7 -7
  77. huggingface_hub/lfs.py +21 -94
  78. huggingface_hub/repocard.py +15 -16
  79. huggingface_hub/repocard_data.py +57 -57
  80. huggingface_hub/serialization/__init__.py +0 -1
  81. huggingface_hub/serialization/_base.py +9 -9
  82. huggingface_hub/serialization/_dduf.py +7 -7
  83. huggingface_hub/serialization/_torch.py +28 -28
  84. huggingface_hub/utils/__init__.py +11 -6
  85. huggingface_hub/utils/_auth.py +5 -5
  86. huggingface_hub/utils/_cache_manager.py +49 -74
  87. huggingface_hub/utils/_deprecation.py +1 -1
  88. huggingface_hub/utils/_dotenv.py +3 -3
  89. huggingface_hub/utils/_fixes.py +0 -10
  90. huggingface_hub/utils/_git_credential.py +3 -3
  91. huggingface_hub/utils/_headers.py +7 -29
  92. huggingface_hub/utils/_http.py +371 -208
  93. huggingface_hub/utils/_pagination.py +4 -4
  94. huggingface_hub/utils/_parsing.py +98 -0
  95. huggingface_hub/utils/_paths.py +5 -5
  96. huggingface_hub/utils/_runtime.py +59 -23
  97. huggingface_hub/utils/_safetensors.py +21 -21
  98. huggingface_hub/utils/_subprocess.py +9 -9
  99. huggingface_hub/utils/_telemetry.py +3 -3
  100. huggingface_hub/{commands/_cli_utils.py → utils/_terminal.py} +4 -9
  101. huggingface_hub/utils/_typing.py +3 -3
  102. huggingface_hub/utils/_validators.py +53 -72
  103. huggingface_hub/utils/_xet.py +16 -16
  104. huggingface_hub/utils/_xet_progress_reporting.py +1 -1
  105. huggingface_hub/utils/insecure_hashlib.py +3 -9
  106. huggingface_hub/utils/tqdm.py +3 -3
  107. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/METADATA +16 -35
  108. huggingface_hub-1.0.0.dist-info/RECORD +152 -0
  109. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/entry_points.txt +0 -1
  110. huggingface_hub/commands/__init__.py +0 -27
  111. huggingface_hub/commands/delete_cache.py +0 -476
  112. huggingface_hub/commands/download.py +0 -204
  113. huggingface_hub/commands/env.py +0 -39
  114. huggingface_hub/commands/huggingface_cli.py +0 -65
  115. huggingface_hub/commands/lfs.py +0 -200
  116. huggingface_hub/commands/repo.py +0 -151
  117. huggingface_hub/commands/repo_files.py +0 -132
  118. huggingface_hub/commands/scan_cache.py +0 -183
  119. huggingface_hub/commands/tag.py +0 -161
  120. huggingface_hub/commands/upload.py +0 -318
  121. huggingface_hub/commands/upload_large_folder.py +0 -131
  122. huggingface_hub/commands/user.py +0 -208
  123. huggingface_hub/commands/version.py +0 -40
  124. huggingface_hub/inference_api.py +0 -217
  125. huggingface_hub/keras_mixin.py +0 -497
  126. huggingface_hub/repository.py +0 -1471
  127. huggingface_hub/serialization/_tensorflow.py +0 -92
  128. huggingface_hub/utils/_hf_folder.py +0 -68
  129. huggingface_hub-0.36.0rc0.dist-info/RECORD +0 -170
  130. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/LICENSE +0 -0
  131. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/WHEEL +0 -0
  132. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py CHANGED
@@ -33,22 +33,17 @@ from typing import (
33
33
  Any,
34
34
  BinaryIO,
35
35
  Callable,
36
- Dict,
37
36
  Iterable,
38
37
  Iterator,
39
- List,
40
38
  Literal,
41
39
  Optional,
42
- Tuple,
43
- Type,
44
40
  TypeVar,
45
41
  Union,
46
42
  overload,
47
43
  )
48
44
  from urllib.parse import quote
49
45
 
50
- import requests
51
- from requests.exceptions import HTTPError
46
+ import httpx
52
47
  from tqdm.auto import tqdm as base_tqdm
53
48
  from tqdm.contrib.concurrent import thread_map
54
49
 
@@ -76,41 +71,18 @@ from .community import (
76
71
  DiscussionWithDetails,
77
72
  deserialize_event,
78
73
  )
79
- from .constants import (
80
- DEFAULT_ETAG_TIMEOUT, # noqa: F401 # kept for backward compatibility
81
- DEFAULT_REQUEST_TIMEOUT, # noqa: F401 # kept for backward compatibility
82
- DEFAULT_REVISION, # noqa: F401 # kept for backward compatibility
83
- DISCUSSION_STATUS, # noqa: F401 # kept for backward compatibility
84
- DISCUSSION_TYPES, # noqa: F401 # kept for backward compatibility
85
- ENDPOINT, # noqa: F401 # kept for backward compatibility
86
- INFERENCE_ENDPOINTS_ENDPOINT, # noqa: F401 # kept for backward compatibility
87
- REGEX_COMMIT_OID, # noqa: F401 # kept for backward compatibility
88
- REPO_TYPE_MODEL, # noqa: F401 # kept for backward compatibility
89
- REPO_TYPES, # noqa: F401 # kept for backward compatibility
90
- REPO_TYPES_MAPPING, # noqa: F401 # kept for backward compatibility
91
- REPO_TYPES_URL_PREFIXES, # noqa: F401 # kept for backward compatibility
92
- SAFETENSORS_INDEX_FILE, # noqa: F401 # kept for backward compatibility
93
- SAFETENSORS_MAX_HEADER_LENGTH, # noqa: F401 # kept for backward compatibility
94
- SAFETENSORS_SINGLE_FILE, # noqa: F401 # kept for backward compatibility
95
- SPACES_SDK_TYPES, # noqa: F401 # kept for backward compatibility
96
- WEBHOOK_DOMAIN_T, # noqa: F401 # kept for backward compatibility
97
- DiscussionStatusFilter, # noqa: F401 # kept for backward compatibility
98
- DiscussionTypeFilter, # noqa: F401 # kept for backward compatibility
99
- )
100
74
  from .errors import (
101
75
  BadRequestError,
102
- EntryNotFoundError,
103
76
  GatedRepoError,
104
77
  HfHubHTTPError,
78
+ RemoteEntryNotFoundError,
105
79
  RepositoryNotFoundError,
106
80
  RevisionNotFoundError,
107
81
  )
108
- from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url
82
+ from .file_download import DryRunFileInfo, HfFileMetadata, get_hf_file_metadata, hf_hub_url
109
83
  from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData
110
84
  from .utils import (
111
85
  DEFAULT_IGNORE_PATTERNS,
112
- HfFolder, # noqa: F401 # kept for backward compatibility
113
- LocalTokenNotFoundError,
114
86
  NotASafetensorsRepoError,
115
87
  SafetensorsFileMetadata,
116
88
  SafetensorsParsingError,
@@ -131,7 +103,7 @@ from .utils import (
131
103
  )
132
104
  from .utils import tqdm as hf_tqdm
133
105
  from .utils._auth import _get_token_from_environment, _get_token_from_file, _get_token_from_google_colab
134
- from .utils._deprecation import _deprecate_arguments, _deprecate_method
106
+ from .utils._deprecation import _deprecate_arguments
135
107
  from .utils._typing import CallableT
136
108
  from .utils.endpoint_helpers import _is_emission_within_threshold
137
109
 
@@ -173,7 +145,6 @@ ExpandModelProperty_T = Literal[
173
145
  "trendingScore",
174
146
  "usedStorage",
175
147
  "widgetData",
176
- "xetEnabled",
177
148
  ]
178
149
 
179
150
  ExpandDatasetProperty_T = Literal[
@@ -196,7 +167,6 @@ ExpandDatasetProperty_T = Literal[
196
167
  "tags",
197
168
  "trendingScore",
198
169
  "usedStorage",
199
- "xetEnabled",
200
170
  ]
201
171
 
202
172
  ExpandSpaceProperty_T = Literal[
@@ -218,7 +188,6 @@ ExpandSpaceProperty_T = Literal[
218
188
  "tags",
219
189
  "trendingScore",
220
190
  "usedStorage",
221
- "xetEnabled",
222
191
  ]
223
192
 
224
193
  USERNAME_PLACEHOLDER = "hf_user"
@@ -236,7 +205,7 @@ _AUTH_CHECK_NO_REPO_ERROR_MESSAGE = (
236
205
  logger = logging.get_logger(__name__)
237
206
 
238
207
 
239
- def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tuple[Optional[str], Optional[str], str]:
208
+ def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> tuple[Optional[str], Optional[str], str]:
240
209
  """
241
210
  Returns the repo type and ID from a huggingface.co URL linking to a
242
211
  repository
@@ -346,8 +315,8 @@ class BlobLfsInfo(dict):
346
315
  class BlobSecurityInfo(dict):
347
316
  safe: bool # duplicate information with "status" field, keeping it for backward compatibility
348
317
  status: str
349
- av_scan: Optional[Dict]
350
- pickle_import_scan: Optional[Dict]
318
+ av_scan: Optional[dict]
319
+ pickle_import_scan: Optional[dict]
351
320
 
352
321
  def __post_init__(self): # hack to make BlogSecurityInfo backward compatible
353
322
  self.update(asdict(self))
@@ -367,7 +336,7 @@ class TransformersInfo(dict):
367
336
 
368
337
  @dataclass
369
338
  class SafeTensorsInfo(dict):
370
- parameters: Dict[str, int]
339
+ parameters: dict[str, int]
371
340
  total: int
372
341
 
373
342
  def __post_init__(self): # hack to make SafeTensorsInfo backward compatible
@@ -410,12 +379,6 @@ class CommitInfo(str):
410
379
 
411
380
  repo_url (`RepoUrl`):
412
381
  Repo URL of the commit containing info like repo_id, repo_type, etc.
413
-
414
- _url (`str`, *optional*):
415
- Legacy url for `str` compatibility. Can be the url to the uploaded file on the Hub (if returned by
416
- [`upload_file`]), to the uploaded folder on the Hub (if returned by [`upload_folder`]) or to the commit on
417
- the Hub (if returned by [`create_commit`]). Defaults to `commit_url`. It is deprecated to use this
418
- attribute. Please use `commit_url` instead.
419
382
  """
420
383
 
421
384
  commit_url: str
@@ -429,13 +392,10 @@ class CommitInfo(str):
429
392
 
430
393
  # Computed from `pr_url` in `__post_init__`
431
394
  pr_revision: Optional[str] = field(init=False)
432
- pr_num: Optional[str] = field(init=False)
433
-
434
- # legacy url for `str` compatibility (ex: url to uploaded file, url to uploaded folder, url to PR, etc.)
435
- _url: str = field(repr=False, default=None) # type: ignore # defaults to `commit_url`
395
+ pr_num: Optional[int] = field(init=False)
436
396
 
437
- def __new__(cls, *args, commit_url: str, _url: Optional[str] = None, **kwargs):
438
- return str.__new__(cls, _url or commit_url)
397
+ def __new__(cls, *args, commit_url: str, **kwargs):
398
+ return str.__new__(cls, commit_url)
439
399
 
440
400
  def __post_init__(self):
441
401
  """Populate pr-related fields after initialization.
@@ -470,7 +430,7 @@ class AccessRequest:
470
430
  Timestamp of the request.
471
431
  status (`Literal["pending", "accepted", "rejected"]`):
472
432
  Status of the request. Can be one of `["pending", "accepted", "rejected"]`.
473
- fields (`Dict[str, Any]`, *optional*):
433
+ fields (`dict[str, Any]`, *optional*):
474
434
  Additional fields filled by the user in the gate form.
475
435
  """
476
436
 
@@ -481,7 +441,7 @@ class AccessRequest:
481
441
  status: Literal["pending", "accepted", "rejected"]
482
442
 
483
443
  # Additional fields filled by the user in the gate form
484
- fields: Optional[Dict[str, Any]] = None
444
+ fields: Optional[dict[str, Any]] = None
485
445
 
486
446
 
487
447
  @dataclass
@@ -512,9 +472,9 @@ class WebhookInfo:
512
472
  URL of the webhook.
513
473
  job (`JobSpec`, *optional*):
514
474
  Specifications of the Job to trigger.
515
- watched (`List[WebhookWatchedItem]`):
475
+ watched (`list[WebhookWatchedItem]`):
516
476
  List of items watched by the webhook, see [`WebhookWatchedItem`].
517
- domains (`List[WEBHOOK_DOMAIN_T]`):
477
+ domains (`list[WEBHOOK_DOMAIN_T]`):
518
478
  List of domains the webhook is watching. Can be one of `["repo", "discussions"]`.
519
479
  secret (`str`, *optional*):
520
480
  Secret of the webhook.
@@ -525,8 +485,8 @@ class WebhookInfo:
525
485
  id: str
526
486
  url: Optional[str]
527
487
  job: Optional[JobSpec]
528
- watched: List[WebhookWatchedItem]
529
- domains: List[constants.WEBHOOK_DOMAIN_T]
488
+ watched: list[WebhookWatchedItem]
489
+ domains: list[constants.WEBHOOK_DOMAIN_T]
530
490
  secret: Optional[str]
531
491
  disabled: bool
532
492
 
@@ -771,17 +731,17 @@ class ModelInfo:
771
731
  gated (`Literal["auto", "manual", False]`, *optional*):
772
732
  Is the repo gated.
773
733
  If so, whether there is manual or automatic approval.
774
- gguf (`Dict`, *optional*):
734
+ gguf (`dict`, *optional*):
775
735
  GGUF information of the model.
776
736
  inference (`Literal["warm"]`, *optional*):
777
737
  Status of the model on Inference Providers. Warm if the model is served by at least one provider.
778
- inference_provider_mapping (`List[InferenceProviderMapping]`, *optional*):
738
+ inference_provider_mapping (`list[InferenceProviderMapping]`, *optional*):
779
739
  A list of [`InferenceProviderMapping`] ordered after the user's provider order.
780
740
  likes (`int`):
781
741
  Number of likes of the model.
782
742
  library_name (`str`, *optional*):
783
743
  Library associated with the model.
784
- tags (`List[str]`):
744
+ tags (`list[str]`):
785
745
  List of tags of the model. Compared to `card_data.tags`, contains extra tags computed by the Hub
786
746
  (e.g. supported libraries, model's arXiv).
787
747
  pipeline_tag (`str`, *optional*):
@@ -790,9 +750,9 @@ class ModelInfo:
790
750
  Mask token used by the model.
791
751
  widget_data (`Any`, *optional*):
792
752
  Widget data associated with the model.
793
- model_index (`Dict`, *optional*):
753
+ model_index (`dict`, *optional*):
794
754
  Model index for evaluation.
795
- config (`Dict`, *optional*):
755
+ config (`dict`, *optional*):
796
756
  Model configuration.
797
757
  transformers_info (`TransformersInfo`, *optional*):
798
758
  Transformers-specific info (auto class, processor, etc.) associated with the model.
@@ -800,13 +760,13 @@ class ModelInfo:
800
760
  Trending score of the model.
801
761
  card_data (`ModelCardData`, *optional*):
802
762
  Model Card Metadata as a [`huggingface_hub.repocard_data.ModelCardData`] object.
803
- siblings (`List[RepoSibling]`):
763
+ siblings (`list[RepoSibling]`):
804
764
  List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the model.
805
- spaces (`List[str]`, *optional*):
765
+ spaces (`list[str]`, *optional*):
806
766
  List of spaces using the model.
807
767
  safetensors (`SafeTensorsInfo`, *optional*):
808
768
  Model's safetensors information.
809
- security_repo_status (`Dict`, *optional*):
769
+ security_repo_status (`dict`, *optional*):
810
770
  Model's security scan status.
811
771
  """
812
772
 
@@ -820,25 +780,24 @@ class ModelInfo:
820
780
  downloads: Optional[int]
821
781
  downloads_all_time: Optional[int]
822
782
  gated: Optional[Literal["auto", "manual", False]]
823
- gguf: Optional[Dict]
783
+ gguf: Optional[dict]
824
784
  inference: Optional[Literal["warm"]]
825
- inference_provider_mapping: Optional[List[InferenceProviderMapping]]
785
+ inference_provider_mapping: Optional[list[InferenceProviderMapping]]
826
786
  likes: Optional[int]
827
787
  library_name: Optional[str]
828
- tags: Optional[List[str]]
788
+ tags: Optional[list[str]]
829
789
  pipeline_tag: Optional[str]
830
790
  mask_token: Optional[str]
831
791
  card_data: Optional[ModelCardData]
832
792
  widget_data: Optional[Any]
833
- model_index: Optional[Dict]
834
- config: Optional[Dict]
793
+ model_index: Optional[dict]
794
+ config: Optional[dict]
835
795
  transformers_info: Optional[TransformersInfo]
836
796
  trending_score: Optional[int]
837
- siblings: Optional[List[RepoSibling]]
838
- spaces: Optional[List[str]]
797
+ siblings: Optional[list[RepoSibling]]
798
+ spaces: Optional[list[str]]
839
799
  safetensors: Optional[SafeTensorsInfo]
840
- security_repo_status: Optional[Dict]
841
- xet_enabled: Optional[bool]
800
+ security_repo_status: Optional[dict]
842
801
 
843
802
  def __init__(self, **kwargs):
844
803
  self.id = kwargs.pop("id")
@@ -926,7 +885,6 @@ class ModelInfo:
926
885
  else None
927
886
  )
928
887
  self.security_repo_status = kwargs.pop("securityRepoStatus", None)
929
- self.xet_enabled = kwargs.pop("xetEnabled", None)
930
888
  # backwards compatibility
931
889
  self.lastModified = self.last_modified
932
890
  self.cardData = self.card_data
@@ -969,11 +927,11 @@ class DatasetInfo:
969
927
  Cumulated number of downloads of the model since its creation.
970
928
  likes (`int`):
971
929
  Number of likes of the dataset.
972
- tags (`List[str]`):
930
+ tags (`list[str]`):
973
931
  List of tags of the dataset.
974
932
  card_data (`DatasetCardData`, *optional*):
975
933
  Model Card Metadata as a [`huggingface_hub.repocard_data.DatasetCardData`] object.
976
- siblings (`List[RepoSibling]`):
934
+ siblings (`list[RepoSibling]`):
977
935
  List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the dataset.
978
936
  paperswithcode_id (`str`, *optional*):
979
937
  Papers with code ID of the dataset.
@@ -993,11 +951,10 @@ class DatasetInfo:
993
951
  downloads_all_time: Optional[int]
994
952
  likes: Optional[int]
995
953
  paperswithcode_id: Optional[str]
996
- tags: Optional[List[str]]
954
+ tags: Optional[list[str]]
997
955
  trending_score: Optional[int]
998
956
  card_data: Optional[DatasetCardData]
999
- siblings: Optional[List[RepoSibling]]
1000
- xet_enabled: Optional[bool]
957
+ siblings: Optional[list[RepoSibling]]
1001
958
 
1002
959
  def __init__(self, **kwargs):
1003
960
  self.id = kwargs.pop("id")
@@ -1043,7 +1000,6 @@ class DatasetInfo:
1043
1000
  if siblings is not None
1044
1001
  else None
1045
1002
  )
1046
- self.xet_enabled = kwargs.pop("xetEnabled", None)
1047
1003
  # backwards compatibility
1048
1004
  self.lastModified = self.last_modified
1049
1005
  self.cardData = self.card_data
@@ -1085,9 +1041,9 @@ class SpaceInfo:
1085
1041
  Subdomain of the Space.
1086
1042
  likes (`int`):
1087
1043
  Number of likes of the Space.
1088
- tags (`List[str]`):
1044
+ tags (`list[str]`):
1089
1045
  List of tags of the Space.
1090
- siblings (`List[RepoSibling]`):
1046
+ siblings (`list[RepoSibling]`):
1091
1047
  List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the Space.
1092
1048
  card_data (`SpaceCardData`, *optional*):
1093
1049
  Space Card Metadata as a [`huggingface_hub.repocard_data.SpaceCardData`] object.
@@ -1095,9 +1051,9 @@ class SpaceInfo:
1095
1051
  Space runtime information as a [`huggingface_hub.hf_api.SpaceRuntime`] object.
1096
1052
  sdk (`str`, *optional*):
1097
1053
  SDK used by the Space.
1098
- models (`List[str]`, *optional*):
1054
+ models (`list[str]`, *optional*):
1099
1055
  List of models used by the Space.
1100
- datasets (`List[str]`, *optional*):
1056
+ datasets (`list[str]`, *optional*):
1101
1057
  List of datasets used by the Space.
1102
1058
  trending_score (`int`, *optional*):
1103
1059
  Trending score of the Space.
@@ -1115,14 +1071,13 @@ class SpaceInfo:
1115
1071
  subdomain: Optional[str]
1116
1072
  likes: Optional[int]
1117
1073
  sdk: Optional[str]
1118
- tags: Optional[List[str]]
1119
- siblings: Optional[List[RepoSibling]]
1074
+ tags: Optional[list[str]]
1075
+ siblings: Optional[list[RepoSibling]]
1120
1076
  trending_score: Optional[int]
1121
1077
  card_data: Optional[SpaceCardData]
1122
1078
  runtime: Optional[SpaceRuntime]
1123
- models: Optional[List[str]]
1124
- datasets: Optional[List[str]]
1125
- xet_enabled: Optional[bool]
1079
+ models: Optional[list[str]]
1080
+ datasets: Optional[list[str]]
1126
1081
 
1127
1082
  def __init__(self, **kwargs):
1128
1083
  self.id = kwargs.pop("id")
@@ -1171,7 +1126,6 @@ class SpaceInfo:
1171
1126
  self.runtime = SpaceRuntime(runtime) if runtime else None
1172
1127
  self.models = kwargs.pop("models", None)
1173
1128
  self.datasets = kwargs.pop("datasets", None)
1174
- self.xet_enabled = kwargs.pop("xetEnabled", None)
1175
1129
  # backwards compatibility
1176
1130
  self.lastModified = self.last_modified
1177
1131
  self.cardData = self.card_data
@@ -1209,7 +1163,7 @@ class CollectionItem:
1209
1163
  id: str,
1210
1164
  type: CollectionItemType_T,
1211
1165
  position: int,
1212
- note: Optional[Dict] = None,
1166
+ note: Optional[dict] = None,
1213
1167
  **kwargs,
1214
1168
  ) -> None:
1215
1169
  self.item_object_id: str = _id # id in database
@@ -1235,7 +1189,7 @@ class Collection:
1235
1189
  Title of the collection. E.g. `"Recent models"`.
1236
1190
  owner (`str`):
1237
1191
  Owner of the collection. E.g. `"TheBloke"`.
1238
- items (`List[CollectionItem]`):
1192
+ items (`list[CollectionItem]`):
1239
1193
  List of items in the collection.
1240
1194
  last_updated (`datetime`):
1241
1195
  Date of the last update of the collection.
@@ -1256,7 +1210,7 @@ class Collection:
1256
1210
  slug: str
1257
1211
  title: str
1258
1212
  owner: str
1259
- items: List[CollectionItem]
1213
+ items: list[CollectionItem]
1260
1214
  last_updated: datetime
1261
1215
  position: int
1262
1216
  private: bool
@@ -1313,22 +1267,22 @@ class GitRefs:
1313
1267
  Object is returned by [`list_repo_refs`].
1314
1268
 
1315
1269
  Attributes:
1316
- branches (`List[GitRefInfo]`):
1270
+ branches (`list[GitRefInfo]`):
1317
1271
  A list of [`GitRefInfo`] containing information about branches on the repo.
1318
- converts (`List[GitRefInfo]`):
1272
+ converts (`list[GitRefInfo]`):
1319
1273
  A list of [`GitRefInfo`] containing information about "convert" refs on the repo.
1320
1274
  Converts are refs used (internally) to push preprocessed data in Dataset repos.
1321
- tags (`List[GitRefInfo]`):
1275
+ tags (`list[GitRefInfo]`):
1322
1276
  A list of [`GitRefInfo`] containing information about tags on the repo.
1323
- pull_requests (`List[GitRefInfo]`, *optional*):
1277
+ pull_requests (`list[GitRefInfo]`, *optional*):
1324
1278
  A list of [`GitRefInfo`] containing information about pull requests on the repo.
1325
1279
  Only returned if `include_prs=True` is set.
1326
1280
  """
1327
1281
 
1328
- branches: List[GitRefInfo]
1329
- converts: List[GitRefInfo]
1330
- tags: List[GitRefInfo]
1331
- pull_requests: Optional[List[GitRefInfo]] = None
1282
+ branches: list[GitRefInfo]
1283
+ converts: list[GitRefInfo]
1284
+ tags: list[GitRefInfo]
1285
+ pull_requests: Optional[list[GitRefInfo]] = None
1332
1286
 
1333
1287
 
1334
1288
  @dataclass
@@ -1339,7 +1293,7 @@ class GitCommitInfo:
1339
1293
  Attributes:
1340
1294
  commit_id (`str`):
1341
1295
  OID of the commit (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`)
1342
- authors (`List[str]`):
1296
+ authors (`list[str]`):
1343
1297
  List of authors of the commit.
1344
1298
  created_at (`datetime`):
1345
1299
  Datetime when the commit was created.
@@ -1355,7 +1309,7 @@ class GitCommitInfo:
1355
1309
 
1356
1310
  commit_id: str
1357
1311
 
1358
- authors: List[str]
1312
+ authors: list[str]
1359
1313
  created_at: datetime
1360
1314
  title: str
1361
1315
  message: str
@@ -1374,11 +1328,11 @@ class UserLikes:
1374
1328
  Name of the user for which we fetched the likes.
1375
1329
  total (`int`):
1376
1330
  Total number of likes.
1377
- datasets (`List[str]`):
1331
+ datasets (`list[str]`):
1378
1332
  List of datasets liked by the user (as repo_ids).
1379
- models (`List[str]`):
1333
+ models (`list[str]`):
1380
1334
  List of models liked by the user (as repo_ids).
1381
- spaces (`List[str]`):
1335
+ spaces (`list[str]`):
1382
1336
  List of spaces liked by the user (as repo_ids).
1383
1337
  """
1384
1338
 
@@ -1387,9 +1341,9 @@ class UserLikes:
1387
1341
  total: int
1388
1342
 
1389
1343
  # User likes
1390
- datasets: List[str]
1391
- models: List[str]
1392
- spaces: List[str]
1344
+ datasets: list[str]
1345
+ models: list[str]
1346
+ spaces: list[str]
1393
1347
 
1394
1348
 
1395
1349
  @dataclass
@@ -1507,7 +1461,7 @@ class User:
1507
1461
  num_likes: Optional[int] = None
1508
1462
  num_following: Optional[int] = None
1509
1463
  num_followers: Optional[int] = None
1510
- orgs: List[Organization] = field(default_factory=list)
1464
+ orgs: list[Organization] = field(default_factory=list)
1511
1465
 
1512
1466
  def __init__(self, **kwargs) -> None:
1513
1467
  self.username = kwargs.pop("user", "")
@@ -1540,7 +1494,7 @@ class PaperInfo:
1540
1494
  Attributes:
1541
1495
  id (`str`):
1542
1496
  arXiv paper ID.
1543
- authors (`List[str]`, **optional**):
1497
+ authors (`list[str]`, **optional**):
1544
1498
  Names of paper authors
1545
1499
  published_at (`datetime`, **optional**):
1546
1500
  Date paper published.
@@ -1563,7 +1517,7 @@ class PaperInfo:
1563
1517
  """
1564
1518
 
1565
1519
  id: str
1566
- authors: Optional[List[str]]
1520
+ authors: Optional[list[str]]
1567
1521
  published_at: Optional[datetime]
1568
1522
  title: Optional[str]
1569
1523
  summary: Optional[str]
@@ -1727,8 +1681,8 @@ class HfApi:
1727
1681
  token: Union[str, bool, None] = None,
1728
1682
  library_name: Optional[str] = None,
1729
1683
  library_version: Optional[str] = None,
1730
- user_agent: Union[Dict, str, None] = None,
1731
- headers: Optional[Dict[str, str]] = None,
1684
+ user_agent: Union[dict, str, None] = None,
1685
+ headers: Optional[dict[str, str]] = None,
1732
1686
  ) -> None:
1733
1687
  self.endpoint = endpoint if endpoint is not None else constants.ENDPOINT
1734
1688
  self.token = token
@@ -1779,7 +1733,7 @@ class HfApi:
1779
1733
  return self._thread_pool.submit(fn, *args, **kwargs)
1780
1734
 
1781
1735
  @validate_hf_hub_args
1782
- def whoami(self, token: Union[bool, str, None] = None) -> Dict:
1736
+ def whoami(self, token: Union[bool, str, None] = None) -> dict:
1783
1737
  """
1784
1738
  Call HF API to know "whoami".
1785
1739
 
@@ -1798,7 +1752,7 @@ class HfApi:
1798
1752
  )
1799
1753
  try:
1800
1754
  hf_raise_for_status(r)
1801
- except HTTPError as e:
1755
+ except HfHubHTTPError as e:
1802
1756
  if e.response.status_code == 401:
1803
1757
  error_message = "Invalid user token."
1804
1758
  # Check which token is the effective one and generate the error message accordingly
@@ -1811,48 +1765,11 @@ class HfApi:
1811
1765
  )
1812
1766
  elif effective_token == _get_token_from_file():
1813
1767
  error_message += " The token stored is invalid. Please run `hf auth login` to update it."
1814
- raise HTTPError(error_message, request=e.request, response=e.response) from e
1768
+ raise HfHubHTTPError(error_message, response=e.response) from e
1815
1769
  raise
1816
1770
  return r.json()
1817
1771
 
1818
- @_deprecate_method(
1819
- version="1.0",
1820
- message=(
1821
- "Permissions are more complex than when `get_token_permission` was first introduced. "
1822
- "OAuth and fine-grain tokens allows for more detailed permissions. "
1823
- "If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key."
1824
- ),
1825
- )
1826
- def get_token_permission(
1827
- self, token: Union[bool, str, None] = None
1828
- ) -> Literal["read", "write", "fineGrained", None]:
1829
- """
1830
- Check if a given `token` is valid and return its permissions.
1831
-
1832
- > [!WARNING]
1833
- > This method is deprecated and will be removed in version 1.0. Permissions are more complex than when
1834
- > `get_token_permission` was first introduced. OAuth and fine-grain tokens allows for more detailed permissions.
1835
- > If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key.
1836
-
1837
- For more details about tokens, please refer to https://huggingface.co/docs/hub/security-tokens#what-are-user-access-tokens.
1838
-
1839
- Args:
1840
- token (`bool` or `str`, *optional*):
1841
- A valid user access token (string). Defaults to the locally saved
1842
- token, which is the recommended method for authentication (see
1843
- https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
1844
- To disable authentication, pass `False`.
1845
-
1846
- Returns:
1847
- `Literal["read", "write", "fineGrained", None]`: Permission granted by the token ("read" or "write"). Returns `None` if no
1848
- token passed, if token is invalid or if role is not returned by the server. This typically happens when the token is an OAuth token.
1849
- """
1850
- try:
1851
- return self.whoami(token=token)["auth"]["accessToken"]["role"]
1852
- except (LocalTokenNotFoundError, HTTPError, KeyError):
1853
- return None
1854
-
1855
- def get_model_tags(self) -> Dict:
1772
+ def get_model_tags(self) -> dict:
1856
1773
  """
1857
1774
  List all valid model tags as a nested namespace object
1858
1775
  """
@@ -1861,7 +1778,7 @@ class HfApi:
1861
1778
  hf_raise_for_status(r)
1862
1779
  return r.json()
1863
1780
 
1864
- def get_dataset_tags(self) -> Dict:
1781
+ def get_dataset_tags(self) -> dict:
1865
1782
  """
1866
1783
  List all valid dataset tags as a nested namespace object.
1867
1784
  """
@@ -1870,9 +1787,6 @@ class HfApi:
1870
1787
  hf_raise_for_status(r)
1871
1788
  return r.json()
1872
1789
 
1873
- @_deprecate_arguments(
1874
- version="1.0", deprecated_args=["language", "library", "task", "tags"], custom_message="Use `filter` instead."
1875
- )
1876
1790
  @validate_hf_hub_args
1877
1791
  def list_models(
1878
1792
  self,
@@ -1880,30 +1794,25 @@ class HfApi:
1880
1794
  # Search-query parameter
1881
1795
  filter: Union[str, Iterable[str], None] = None,
1882
1796
  author: Optional[str] = None,
1883
- apps: Optional[Union[str, List[str]]] = None,
1797
+ apps: Optional[Union[str, list[str]]] = None,
1884
1798
  gated: Optional[bool] = None,
1885
1799
  inference: Optional[Literal["warm"]] = None,
1886
- inference_provider: Optional[Union[Literal["all"], "PROVIDER_T", List["PROVIDER_T"]]] = None,
1800
+ inference_provider: Optional[Union[Literal["all"], "PROVIDER_T", list["PROVIDER_T"]]] = None,
1887
1801
  model_name: Optional[str] = None,
1888
- trained_dataset: Optional[Union[str, List[str]]] = None,
1802
+ trained_dataset: Optional[Union[str, list[str]]] = None,
1889
1803
  search: Optional[str] = None,
1890
1804
  pipeline_tag: Optional[str] = None,
1891
- emissions_thresholds: Optional[Tuple[float, float]] = None,
1805
+ emissions_thresholds: Optional[tuple[float, float]] = None,
1892
1806
  # Sorting and pagination parameters
1893
1807
  sort: Union[Literal["last_modified"], str, None] = None,
1894
1808
  direction: Optional[Literal[-1]] = None,
1895
1809
  limit: Optional[int] = None,
1896
1810
  # Additional data to fetch
1897
- expand: Optional[List[ExpandModelProperty_T]] = None,
1811
+ expand: Optional[list[ExpandModelProperty_T]] = None,
1898
1812
  full: Optional[bool] = None,
1899
1813
  cardData: bool = False,
1900
1814
  fetch_config: bool = False,
1901
1815
  token: Union[bool, str, None] = None,
1902
- # Deprecated arguments - use `filter` instead
1903
- language: Optional[Union[str, List[str]]] = None,
1904
- library: Optional[Union[str, List[str]]] = None,
1905
- tags: Optional[Union[str, List[str]]] = None,
1906
- task: Optional[Union[str, List[str]]] = None,
1907
1816
  ) -> Iterable[ModelInfo]:
1908
1817
  """
1909
1818
  List models hosted on the Huggingface Hub, given some filters.
@@ -1927,20 +1836,12 @@ class HfApi:
1927
1836
  inference_provider (`Literal["all"]` or `str`, *optional*):
1928
1837
  A string to filter models on the Hub that are served by a specific provider.
1929
1838
  Pass `"all"` to get all models served by at least one provider.
1930
- library (`str` or `List`, *optional*):
1931
- Deprecated. Pass a library name in `filter` to filter models by library.
1932
- language (`str` or `List`, *optional*):
1933
- Deprecated. Pass a language in `filter` to filter models by language.
1934
1839
  model_name (`str`, *optional*):
1935
1840
  A string that contain complete or partial names for models on the
1936
1841
  Hub, such as "bert" or "bert-base-cased"
1937
- task (`str` or `List`, *optional*):
1938
- Deprecated. Pass a task in `filter` to filter models by task.
1939
1842
  trained_dataset (`str` or `List`, *optional*):
1940
1843
  A string tag or a list of string tags of the trained dataset for a
1941
1844
  model on the Hub.
1942
- tags (`str` or `List`, *optional*):
1943
- Deprecated. Pass tags in `filter` to filter models by tags.
1944
1845
  search (`str`, *optional*):
1945
1846
  A string that will be contained in the returned model ids.
1946
1847
  pipeline_tag (`str`, *optional*):
@@ -1957,10 +1858,10 @@ class HfApi:
1957
1858
  limit (`int`, *optional*):
1958
1859
  The limit on the number of models fetched. Leaving this option
1959
1860
  to `None` fetches all models.
1960
- expand (`List[ExpandModelProperty_T]`, *optional*):
1861
+ expand (`list[ExpandModelProperty_T]`, *optional*):
1961
1862
  List properties to return in the response. When used, only the properties in the list will be returned.
1962
1863
  This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
1963
- Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"resourceGroup"` and `"xetEnabled"`.
1864
+ Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, and `"resourceGroup"`.
1964
1865
  full (`bool`, *optional*):
1965
1866
  Whether to fetch all model data, including the `last_modified`,
1966
1867
  the `sha`, the files and the `tags`. This is set to `True` by
@@ -2016,27 +1917,15 @@ class HfApi:
2016
1917
 
2017
1918
  path = f"{self.endpoint}/api/models"
2018
1919
  headers = self._build_hf_headers(token=token)
2019
- params: Dict[str, Any] = {}
1920
+ params: dict[str, Any] = {}
2020
1921
 
2021
1922
  # Build the filter list
2022
- filter_list: List[str] = []
1923
+ filter_list: list[str] = []
2023
1924
  if filter:
2024
1925
  filter_list.extend([filter] if isinstance(filter, str) else filter)
2025
- if library:
2026
- filter_list.extend([library] if isinstance(library, str) else library)
2027
- if task:
2028
- filter_list.extend([task] if isinstance(task, str) else task)
2029
1926
  if trained_dataset:
2030
- if isinstance(trained_dataset, str):
2031
- trained_dataset = [trained_dataset]
2032
- for dataset in trained_dataset:
2033
- if not dataset.startswith("dataset:"):
2034
- dataset = f"dataset:{dataset}"
2035
- filter_list.append(dataset)
2036
- if language:
2037
- filter_list.extend([language] if isinstance(language, str) else language)
2038
- if tags:
2039
- filter_list.extend([tags] if isinstance(tags, str) else tags)
1927
+ datasets = [trained_dataset] if isinstance(trained_dataset, str) else trained_dataset
1928
+ filter_list.extend(f"dataset:{d}" if not d.startswith("dataset:") else d for d in datasets)
2040
1929
  if len(filter_list) > 0:
2041
1930
  params["filter"] = filter_list
2042
1931
 
@@ -2106,26 +1995,26 @@ class HfApi:
2106
1995
  # Search-query parameter
2107
1996
  filter: Union[str, Iterable[str], None] = None,
2108
1997
  author: Optional[str] = None,
2109
- benchmark: Optional[Union[str, List[str]]] = None,
1998
+ benchmark: Optional[Union[str, list[str]]] = None,
2110
1999
  dataset_name: Optional[str] = None,
2111
2000
  gated: Optional[bool] = None,
2112
- language_creators: Optional[Union[str, List[str]]] = None,
2113
- language: Optional[Union[str, List[str]]] = None,
2114
- multilinguality: Optional[Union[str, List[str]]] = None,
2115
- size_categories: Optional[Union[str, List[str]]] = None,
2116
- task_categories: Optional[Union[str, List[str]]] = None,
2117
- task_ids: Optional[Union[str, List[str]]] = None,
2001
+ language_creators: Optional[Union[str, list[str]]] = None,
2002
+ language: Optional[Union[str, list[str]]] = None,
2003
+ multilinguality: Optional[Union[str, list[str]]] = None,
2004
+ size_categories: Optional[Union[str, list[str]]] = None,
2005
+ task_categories: Optional[Union[str, list[str]]] = None,
2006
+ task_ids: Optional[Union[str, list[str]]] = None,
2118
2007
  search: Optional[str] = None,
2119
2008
  # Sorting and pagination parameters
2120
2009
  sort: Optional[Union[Literal["last_modified"], str]] = None,
2121
2010
  direction: Optional[Literal[-1]] = None,
2122
2011
  limit: Optional[int] = None,
2123
2012
  # Additional data to fetch
2124
- expand: Optional[List[ExpandDatasetProperty_T]] = None,
2013
+ expand: Optional[list[ExpandDatasetProperty_T]] = None,
2125
2014
  full: Optional[bool] = None,
2126
2015
  token: Union[bool, str, None] = None,
2127
2016
  # Deprecated arguments - use `filter` instead
2128
- tags: Optional[Union[str, List[str]]] = None,
2017
+ tags: Optional[Union[str, list[str]]] = None,
2129
2018
  ) -> Iterable[DatasetInfo]:
2130
2019
  """
2131
2020
  List datasets hosted on the Huggingface Hub, given some filters.
@@ -2180,10 +2069,10 @@ class HfApi:
2180
2069
  limit (`int`, *optional*):
2181
2070
  The limit on the number of datasets fetched. Leaving this option
2182
2071
  to `None` fetches all datasets.
2183
- expand (`List[ExpandDatasetProperty_T]`, *optional*):
2072
+ expand (`list[ExpandDatasetProperty_T]`, *optional*):
2184
2073
  List properties to return in the response. When used, only the properties in the list will be returned.
2185
2074
  This parameter cannot be used if `full` is passed.
2186
- Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
2075
+ Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, and `"resourceGroup"`.
2187
2076
  full (`bool`, *optional*):
2188
2077
  Whether to fetch all dataset data, including the `last_modified`,
2189
2078
  the `card_data` and the files. Can contain useful information such as the
@@ -2240,7 +2129,7 @@ class HfApi:
2240
2129
 
2241
2130
  path = f"{self.endpoint}/api/datasets"
2242
2131
  headers = self._build_hf_headers(token=token)
2243
- params: Dict[str, Any] = {}
2132
+ params: dict[str, Any] = {}
2244
2133
 
2245
2134
  # Build `filter` list
2246
2135
  filter_list = []
@@ -2327,7 +2216,7 @@ class HfApi:
2327
2216
  direction: Optional[Literal[-1]] = None,
2328
2217
  limit: Optional[int] = None,
2329
2218
  # Additional data to fetch
2330
- expand: Optional[List[ExpandSpaceProperty_T]] = None,
2219
+ expand: Optional[list[ExpandSpaceProperty_T]] = None,
2331
2220
  full: Optional[bool] = None,
2332
2221
  token: Union[bool, str, None] = None,
2333
2222
  ) -> Iterable[SpaceInfo]:
@@ -2358,10 +2247,10 @@ class HfApi:
2358
2247
  limit (`int`, *optional*):
2359
2248
  The limit on the number of Spaces fetched. Leaving this option
2360
2249
  to `None` fetches all Spaces.
2361
- expand (`List[ExpandSpaceProperty_T]`, *optional*):
2250
+ expand (`list[ExpandSpaceProperty_T]`, *optional*):
2362
2251
  List properties to return in the response. When used, only the properties in the list will be returned.
2363
2252
  This parameter cannot be used if `full` is passed.
2364
- Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
2253
+ Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, and `"resourceGroup"`.
2365
2254
  full (`bool`, *optional*):
2366
2255
  Whether to fetch all Spaces data, including the `last_modified`, `siblings`
2367
2256
  and `card_data` fields.
@@ -2379,7 +2268,7 @@ class HfApi:
2379
2268
 
2380
2269
  path = f"{self.endpoint}/api/spaces"
2381
2270
  headers = self._build_hf_headers(token=token)
2382
- params: Dict[str, Any] = {}
2271
+ params: dict[str, Any] = {}
2383
2272
  if filter is not None:
2384
2273
  params["filter"] = filter
2385
2274
  if author is not None:
@@ -2596,7 +2485,7 @@ class HfApi:
2596
2485
  timeout: Optional[float] = None,
2597
2486
  securityStatus: Optional[bool] = None,
2598
2487
  files_metadata: bool = False,
2599
- expand: Optional[List[ExpandModelProperty_T]] = None,
2488
+ expand: Optional[list[ExpandModelProperty_T]] = None,
2600
2489
  token: Union[bool, str, None] = None,
2601
2490
  ) -> ModelInfo:
2602
2491
  """
@@ -2619,10 +2508,10 @@ class HfApi:
2619
2508
  files_metadata (`bool`, *optional*):
2620
2509
  Whether or not to retrieve metadata for files in the repository
2621
2510
  (size, LFS metadata, etc). Defaults to `False`.
2622
- expand (`List[ExpandModelProperty_T]`, *optional*):
2511
+ expand (`list[ExpandModelProperty_T]`, *optional*):
2623
2512
  List properties to return in the response. When used, only the properties in the list will be returned.
2624
2513
  This parameter cannot be used if `securityStatus` or `files_metadata` are passed.
2625
- Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
2514
+ Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"usedStorage"`, and `"resourceGroup"`.
2626
2515
  token (`bool` or `str`, *optional*):
2627
2516
  A valid user access token (string). Defaults to the locally saved
2628
2517
  token, which is the recommended method for authentication (see
@@ -2650,7 +2539,7 @@ class HfApi:
2650
2539
  if revision is None
2651
2540
  else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}")
2652
2541
  )
2653
- params: Dict = {}
2542
+ params: dict = {}
2654
2543
  if securityStatus:
2655
2544
  params["securityStatus"] = True
2656
2545
  if files_metadata:
@@ -2670,7 +2559,7 @@ class HfApi:
2670
2559
  revision: Optional[str] = None,
2671
2560
  timeout: Optional[float] = None,
2672
2561
  files_metadata: bool = False,
2673
- expand: Optional[List[ExpandDatasetProperty_T]] = None,
2562
+ expand: Optional[list[ExpandDatasetProperty_T]] = None,
2674
2563
  token: Union[bool, str, None] = None,
2675
2564
  ) -> DatasetInfo:
2676
2565
  """
@@ -2690,10 +2579,10 @@ class HfApi:
2690
2579
  files_metadata (`bool`, *optional*):
2691
2580
  Whether or not to retrieve metadata for files in the repository
2692
2581
  (size, LFS metadata, etc). Defaults to `False`.
2693
- expand (`List[ExpandDatasetProperty_T]`, *optional*):
2582
+ expand (`list[ExpandDatasetProperty_T]`, *optional*):
2694
2583
  List properties to return in the response. When used, only the properties in the list will be returned.
2695
2584
  This parameter cannot be used if `files_metadata` is passed.
2696
- Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`,`"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
2585
+ Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`,`"usedStorage"`, and `"resourceGroup"`.
2697
2586
  token (`bool` or `str`, *optional*):
2698
2587
  A valid user access token (string). Defaults to the locally saved
2699
2588
  token, which is the recommended method for authentication (see
@@ -2721,7 +2610,7 @@ class HfApi:
2721
2610
  if revision is None
2722
2611
  else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}")
2723
2612
  )
2724
- params: Dict = {}
2613
+ params: dict = {}
2725
2614
  if files_metadata:
2726
2615
  params["blobs"] = True
2727
2616
  if expand:
@@ -2740,7 +2629,7 @@ class HfApi:
2740
2629
  revision: Optional[str] = None,
2741
2630
  timeout: Optional[float] = None,
2742
2631
  files_metadata: bool = False,
2743
- expand: Optional[List[ExpandSpaceProperty_T]] = None,
2632
+ expand: Optional[list[ExpandSpaceProperty_T]] = None,
2744
2633
  token: Union[bool, str, None] = None,
2745
2634
  ) -> SpaceInfo:
2746
2635
  """
@@ -2760,10 +2649,10 @@ class HfApi:
2760
2649
  files_metadata (`bool`, *optional*):
2761
2650
  Whether or not to retrieve metadata for files in the repository
2762
2651
  (size, LFS metadata, etc). Defaults to `False`.
2763
- expand (`List[ExpandSpaceProperty_T]`, *optional*):
2652
+ expand (`list[ExpandSpaceProperty_T]`, *optional*):
2764
2653
  List properties to return in the response. When used, only the properties in the list will be returned.
2765
2654
  This parameter cannot be used if `full` is passed.
2766
- Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
2655
+ Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, and `"resourceGroup"`.
2767
2656
  token (`bool` or `str`, *optional*):
2768
2657
  A valid user access token (string). Defaults to the locally saved
2769
2658
  token, which is the recommended method for authentication (see
@@ -2791,7 +2680,7 @@ class HfApi:
2791
2680
  if revision is None
2792
2681
  else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}")
2793
2682
  )
2794
- params: Dict = {}
2683
+ params: dict = {}
2795
2684
  if files_metadata:
2796
2685
  params["blobs"] = True
2797
2686
  if expand:
@@ -3019,7 +2908,7 @@ class HfApi:
3019
2908
  return True
3020
2909
  except GatedRepoError: # raise specifically on gated repo
3021
2910
  raise
3022
- except (RepositoryNotFoundError, EntryNotFoundError, RevisionNotFoundError):
2911
+ except (RepositoryNotFoundError, RemoteEntryNotFoundError, RevisionNotFoundError):
3023
2912
  return False
3024
2913
 
3025
2914
  @validate_hf_hub_args
@@ -3030,7 +2919,7 @@ class HfApi:
3030
2919
  revision: Optional[str] = None,
3031
2920
  repo_type: Optional[str] = None,
3032
2921
  token: Union[str, bool, None] = None,
3033
- ) -> List[str]:
2922
+ ) -> list[str]:
3034
2923
  """
3035
2924
  Get the list of files in a given repo.
3036
2925
 
@@ -3049,7 +2938,7 @@ class HfApi:
3049
2938
  To disable authentication, pass `False`.
3050
2939
 
3051
2940
  Returns:
3052
- `List[str]`: the list of files in a given repository.
2941
+ `list[str]`: the list of files in a given repository.
3053
2942
  """
3054
2943
  return [
3055
2944
  f.rfilename
@@ -3109,7 +2998,7 @@ class HfApi:
3109
2998
  does not exist.
3110
2999
  [`~utils.RevisionNotFoundError`]:
3111
3000
  If revision is not found (error 404) on the repo.
3112
- [`~utils.EntryNotFoundError`]:
3001
+ [`~utils.RemoteEntryNotFoundError`]:
3113
3002
  If the tree (folder) does not exist (error 404) on the repo.
3114
3003
 
3115
3004
  Examples:
@@ -3251,7 +3140,7 @@ class HfApi:
3251
3140
  hf_raise_for_status(response)
3252
3141
  data = response.json()
3253
3142
 
3254
- def _format_as_git_ref_info(item: Dict) -> GitRefInfo:
3143
+ def _format_as_git_ref_info(item: dict) -> GitRefInfo:
3255
3144
  return GitRefInfo(name=item["name"], ref=item["ref"], target_commit=item["targetCommit"])
3256
3145
 
3257
3146
  return GitRefs(
@@ -3272,7 +3161,7 @@ class HfApi:
3272
3161
  token: Union[bool, str, None] = None,
3273
3162
  revision: Optional[str] = None,
3274
3163
  formatted: bool = False,
3275
- ) -> List[GitCommitInfo]:
3164
+ ) -> list[GitCommitInfo]:
3276
3165
  """
3277
3166
  Get the list of commits of a given revision for a repo on the Hub.
3278
3167
 
@@ -3319,7 +3208,7 @@ class HfApi:
3319
3208
  ```
3320
3209
 
3321
3210
  Returns:
3322
- List[[`GitCommitInfo`]]: list of objects containing information about the commits for a repo on the Hub.
3211
+ list[[`GitCommitInfo`]]: list of objects containing information about the commits for a repo on the Hub.
3323
3212
 
3324
3213
  Raises:
3325
3214
  [`~utils.RepositoryNotFoundError`]:
@@ -3353,20 +3242,20 @@ class HfApi:
3353
3242
  def get_paths_info(
3354
3243
  self,
3355
3244
  repo_id: str,
3356
- paths: Union[List[str], str],
3245
+ paths: Union[list[str], str],
3357
3246
  *,
3358
3247
  expand: bool = False,
3359
3248
  revision: Optional[str] = None,
3360
3249
  repo_type: Optional[str] = None,
3361
3250
  token: Union[str, bool, None] = None,
3362
- ) -> List[Union[RepoFile, RepoFolder]]:
3251
+ ) -> list[Union[RepoFile, RepoFolder]]:
3363
3252
  """
3364
3253
  Get information about a repo's paths.
3365
3254
 
3366
3255
  Args:
3367
3256
  repo_id (`str`):
3368
3257
  A namespace (user or an organization) and a repo name separated by a `/`.
3369
- paths (`Union[List[str], str]`, *optional*):
3258
+ paths (`Union[list[str], str]`, *optional*):
3370
3259
  The paths to get information about. If a path do not exist, it is ignored without raising
3371
3260
  an exception.
3372
3261
  expand (`bool`, *optional*, defaults to `False`):
@@ -3386,7 +3275,7 @@ class HfApi:
3386
3275
  To disable authentication, pass `False`.
3387
3276
 
3388
3277
  Returns:
3389
- `List[Union[RepoFile, RepoFolder]]`:
3278
+ `list[Union[RepoFile, RepoFolder]]`:
3390
3279
  The information about the paths, as a list of [`RepoFile`] and [`RepoFolder`] objects.
3391
3280
 
3392
3281
  Raises:
@@ -3642,8 +3531,8 @@ class HfApi:
3642
3531
  space_hardware: Optional[SpaceHardware] = None,
3643
3532
  space_storage: Optional[SpaceStorage] = None,
3644
3533
  space_sleep_time: Optional[int] = None,
3645
- space_secrets: Optional[List[Dict[str, str]]] = None,
3646
- space_variables: Optional[List[Dict[str, str]]] = None,
3534
+ space_secrets: Optional[list[dict[str, str]]] = None,
3535
+ space_variables: Optional[list[dict[str, str]]] = None,
3647
3536
  ) -> RepoUrl:
3648
3537
  """Create an empty repo on the HuggingFace Hub.
3649
3538
 
@@ -3680,10 +3569,10 @@ class HfApi:
3680
3569
  your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure
3681
3570
  the sleep time (value is fixed to 48 hours of inactivity).
3682
3571
  See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details.
3683
- space_secrets (`List[Dict[str, str]]`, *optional*):
3572
+ space_secrets (`list[dict[str, str]]`, *optional*):
3684
3573
  A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
3685
3574
  For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets.
3686
- space_variables (`List[Dict[str, str]]`, *optional*):
3575
+ space_variables (`list[dict[str, str]]`, *optional*):
3687
3576
  A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
3688
3577
  For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables.
3689
3578
 
@@ -3698,7 +3587,7 @@ class HfApi:
3698
3587
  if repo_type not in constants.REPO_TYPES:
3699
3588
  raise ValueError("Invalid repo type")
3700
3589
 
3701
- json: Dict[str, Any] = {"name": name, "organization": organization}
3590
+ json: dict[str, Any] = {"name": name, "organization": organization}
3702
3591
  if private is not None:
3703
3592
  json["private"] = private
3704
3593
  if repo_type is not None:
@@ -3734,11 +3623,6 @@ class HfApi:
3734
3623
  if provided_space_args:
3735
3624
  warnings.warn(f"Ignoring provided {', '.join(provided_space_args)} because repo_type is not 'space'.")
3736
3625
 
3737
- if getattr(self, "_lfsmultipartthresh", None):
3738
- # Testing purposes only.
3739
- # See https://github.com/huggingface/huggingface_hub/pull/733/files#r820604472
3740
- json["lfsmultipartthresh"] = self._lfsmultipartthresh # type: ignore
3741
-
3742
3626
  if resource_group_id is not None:
3743
3627
  json["resourceGroupId"] = resource_group_id
3744
3628
 
@@ -3758,7 +3642,7 @@ class HfApi:
3758
3642
 
3759
3643
  try:
3760
3644
  hf_raise_for_status(r)
3761
- except HTTPError as err:
3645
+ except HfHubHTTPError as err:
3762
3646
  if exist_ok and err.response.status_code == 409:
3763
3647
  # Repo already exists and `exist_ok=True`
3764
3648
  pass
@@ -3820,65 +3704,13 @@ class HfApi:
3820
3704
  json["type"] = repo_type
3821
3705
 
3822
3706
  headers = self._build_hf_headers(token=token)
3823
- r = get_session().delete(path, headers=headers, json=json)
3707
+ r = get_session().request("DELETE", path, headers=headers, json=json)
3824
3708
  try:
3825
3709
  hf_raise_for_status(r)
3826
3710
  except RepositoryNotFoundError:
3827
3711
  if not missing_ok:
3828
3712
  raise
3829
3713
 
3830
- @_deprecate_method(version="0.32", message="Please use `update_repo_settings` instead.")
3831
- @validate_hf_hub_args
3832
- def update_repo_visibility(
3833
- self,
3834
- repo_id: str,
3835
- private: bool = False,
3836
- *,
3837
- token: Union[str, bool, None] = None,
3838
- repo_type: Optional[str] = None,
3839
- ) -> Dict[str, bool]:
3840
- """Update the visibility setting of a repository.
3841
-
3842
- Deprecated. Use `update_repo_settings` instead.
3843
-
3844
- Args:
3845
- repo_id (`str`, *optional*):
3846
- A namespace (user or an organization) and a repo name separated by a `/`.
3847
- private (`bool`, *optional*, defaults to `False`):
3848
- Whether the repository should be private.
3849
- token (`bool` or `str`, *optional*):
3850
- A valid user access token (string). Defaults to the locally saved
3851
- token, which is the recommended method for authentication (see
3852
- https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
3853
- To disable authentication, pass `False`.
3854
- repo_type (`str`, *optional*):
3855
- Set to `"dataset"` or `"space"` if uploading to a dataset or
3856
- space, `None` or `"model"` if uploading to a model. Default is
3857
- `None`.
3858
-
3859
- Returns:
3860
- The HTTP response in json.
3861
-
3862
- > [!TIP]
3863
- > Raises the following errors:
3864
- >
3865
- > - [`~utils.RepositoryNotFoundError`]
3866
- > If the repository to download from cannot be found. This may be because it doesn't exist,
3867
- > or because it is set to `private` and you do not have access.
3868
- """
3869
- if repo_type not in constants.REPO_TYPES:
3870
- raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
3871
- if repo_type is None:
3872
- repo_type = constants.REPO_TYPE_MODEL # default repo type
3873
-
3874
- r = get_session().put(
3875
- url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings",
3876
- headers=self._build_hf_headers(token=token),
3877
- json={"private": private},
3878
- )
3879
- hf_raise_for_status(r)
3880
- return r.json()
3881
-
3882
3714
  @validate_hf_hub_args
3883
3715
  def update_repo_settings(
3884
3716
  self,
@@ -3888,7 +3720,6 @@ class HfApi:
3888
3720
  private: Optional[bool] = None,
3889
3721
  token: Union[str, bool, None] = None,
3890
3722
  repo_type: Optional[str] = None,
3891
- xet_enabled: Optional[bool] = None,
3892
3723
  ) -> None:
3893
3724
  """
3894
3725
  Update the settings of a repository, including gated access and visibility.
@@ -3914,8 +3745,6 @@ class HfApi:
3914
3745
  repo_type (`str`, *optional*):
3915
3746
  The type of the repository to update settings from (`"model"`, `"dataset"` or `"space"`).
3916
3747
  Defaults to `"model"`.
3917
- xet_enabled (`bool`, *optional*):
3918
- Whether the repository should be enabled for Xet Storage.
3919
3748
  Raises:
3920
3749
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
3921
3750
  If gated is not one of "auto", "manual", or False.
@@ -3934,7 +3763,7 @@ class HfApi:
3934
3763
  repo_type = constants.REPO_TYPE_MODEL # default repo type
3935
3764
 
3936
3765
  # Prepare the JSON payload for the PUT request
3937
- payload: Dict = {}
3766
+ payload: dict = {}
3938
3767
 
3939
3768
  if gated is not None:
3940
3769
  if gated not in ["auto", "manual", False]:
@@ -3944,9 +3773,6 @@ class HfApi:
3944
3773
  if private is not None:
3945
3774
  payload["private"] = private
3946
3775
 
3947
- if xet_enabled is not None:
3948
- payload["xetEnabled"] = xet_enabled
3949
-
3950
3776
  if len(payload) == 0:
3951
3777
  raise ValueError("At least one setting must be updated.")
3952
3778
 
@@ -4317,12 +4143,12 @@ class HfApi:
4317
4143
  params = {"create_pr": "1"} if create_pr else None
4318
4144
 
4319
4145
  try:
4320
- commit_resp = get_session().post(url=commit_url, headers=headers, data=data, params=params)
4146
+ commit_resp = get_session().post(url=commit_url, headers=headers, content=data, params=params)
4321
4147
  hf_raise_for_status(commit_resp, endpoint_name="commit")
4322
4148
  except RepositoryNotFoundError as e:
4323
4149
  e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE)
4324
4150
  raise
4325
- except EntryNotFoundError as e:
4151
+ except RemoteEntryNotFoundError as e:
4326
4152
  if nb_deletions > 0 and "A file with this name doesn't exist" in str(e):
4327
4153
  e.append_to_message(
4328
4154
  "\nMake sure to differentiate file and folder paths in delete"
@@ -4633,7 +4459,6 @@ class HfApi:
4633
4459
  ... repo_type="dataset",
4634
4460
  ... token="my_token",
4635
4461
  ... )
4636
- "https://huggingface.co/datasets/username/my-dataset/blob/main/remote/file/path.h5"
4637
4462
 
4638
4463
  >>> upload_file(
4639
4464
  ... path_or_fileobj=".\\\\local\\\\file\\\\path",
@@ -4641,7 +4466,6 @@ class HfApi:
4641
4466
  ... repo_id="username/my-model",
4642
4467
  ... token="my_token",
4643
4468
  ... )
4644
- "https://huggingface.co/username/my-model/blob/main/remote/file/path.h5"
4645
4469
 
4646
4470
  >>> upload_file(
4647
4471
  ... path_or_fileobj=".\\\\local\\\\file\\\\path",
@@ -4650,7 +4474,6 @@ class HfApi:
4650
4474
  ... token="my_token",
4651
4475
  ... create_pr=True,
4652
4476
  ... )
4653
- "https://huggingface.co/username/my-model/blob/refs%2Fpr%2F1/remote/file/path.h5"
4654
4477
  ```
4655
4478
  """
4656
4479
  if repo_type not in constants.REPO_TYPES:
@@ -4664,7 +4487,7 @@ class HfApi:
4664
4487
  path_in_repo=path_in_repo,
4665
4488
  )
4666
4489
 
4667
- commit_info = self.create_commit(
4490
+ return self.create_commit(
4668
4491
  repo_id=repo_id,
4669
4492
  repo_type=repo_type,
4670
4493
  operations=[operation],
@@ -4676,23 +4499,6 @@ class HfApi:
4676
4499
  parent_commit=parent_commit,
4677
4500
  )
4678
4501
 
4679
- if commit_info.pr_url is not None:
4680
- revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
4681
- if repo_type in constants.REPO_TYPES_URL_PREFIXES:
4682
- repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
4683
- revision = revision if revision is not None else constants.DEFAULT_REVISION
4684
-
4685
- return CommitInfo(
4686
- commit_url=commit_info.commit_url,
4687
- commit_message=commit_info.commit_message,
4688
- commit_description=commit_info.commit_description,
4689
- oid=commit_info.oid,
4690
- pr_url=commit_info.pr_url,
4691
- # Similar to `hf_hub_url` but it's "blob" instead of "resolve"
4692
- # TODO: remove this in v1.0
4693
- _url=f"{self.endpoint}/{repo_id}/blob/{revision}/{path_in_repo}",
4694
- )
4695
-
4696
4502
  @overload
4697
4503
  def upload_folder( # type: ignore
4698
4504
  self,
@@ -4707,9 +4513,9 @@ class HfApi:
4707
4513
  revision: Optional[str] = None,
4708
4514
  create_pr: Optional[bool] = None,
4709
4515
  parent_commit: Optional[str] = None,
4710
- allow_patterns: Optional[Union[List[str], str]] = None,
4711
- ignore_patterns: Optional[Union[List[str], str]] = None,
4712
- delete_patterns: Optional[Union[List[str], str]] = None,
4516
+ allow_patterns: Optional[Union[list[str], str]] = None,
4517
+ ignore_patterns: Optional[Union[list[str], str]] = None,
4518
+ delete_patterns: Optional[Union[list[str], str]] = None,
4713
4519
  run_as_future: Literal[False] = ...,
4714
4520
  ) -> CommitInfo: ...
4715
4521
 
@@ -4727,9 +4533,9 @@ class HfApi:
4727
4533
  revision: Optional[str] = None,
4728
4534
  create_pr: Optional[bool] = None,
4729
4535
  parent_commit: Optional[str] = None,
4730
- allow_patterns: Optional[Union[List[str], str]] = None,
4731
- ignore_patterns: Optional[Union[List[str], str]] = None,
4732
- delete_patterns: Optional[Union[List[str], str]] = None,
4536
+ allow_patterns: Optional[Union[list[str], str]] = None,
4537
+ ignore_patterns: Optional[Union[list[str], str]] = None,
4538
+ delete_patterns: Optional[Union[list[str], str]] = None,
4733
4539
  run_as_future: Literal[True] = ...,
4734
4540
  ) -> Future[CommitInfo]: ...
4735
4541
 
@@ -4748,9 +4554,9 @@ class HfApi:
4748
4554
  revision: Optional[str] = None,
4749
4555
  create_pr: Optional[bool] = None,
4750
4556
  parent_commit: Optional[str] = None,
4751
- allow_patterns: Optional[Union[List[str], str]] = None,
4752
- ignore_patterns: Optional[Union[List[str], str]] = None,
4753
- delete_patterns: Optional[Union[List[str], str]] = None,
4557
+ allow_patterns: Optional[Union[list[str], str]] = None,
4558
+ ignore_patterns: Optional[Union[list[str], str]] = None,
4559
+ delete_patterns: Optional[Union[list[str], str]] = None,
4754
4560
  run_as_future: bool = False,
4755
4561
  ) -> Union[CommitInfo, Future[CommitInfo]]:
4756
4562
  """
@@ -4812,11 +4618,11 @@ class HfApi:
4812
4618
  If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`.
4813
4619
  Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be
4814
4620
  especially useful if the repo is updated / committed to concurrently.
4815
- allow_patterns (`List[str]` or `str`, *optional*):
4621
+ allow_patterns (`list[str]` or `str`, *optional*):
4816
4622
  If provided, only files matching at least one pattern are uploaded.
4817
- ignore_patterns (`List[str]` or `str`, *optional*):
4623
+ ignore_patterns (`list[str]` or `str`, *optional*):
4818
4624
  If provided, files matching any of the patterns are not uploaded.
4819
- delete_patterns (`List[str]` or `str`, *optional*):
4625
+ delete_patterns (`list[str]` or `str`, *optional*):
4820
4626
  If provided, remote files matching any of the patterns will be deleted from the repo while committing
4821
4627
  new files. This is useful if you don't know which files have already been uploaded.
4822
4628
  Note: to avoid discrepancies the `.gitattributes` file is not deleted even if it matches the pattern.
@@ -4859,7 +4665,6 @@ class HfApi:
4859
4665
  ... token="my_token",
4860
4666
  ... ignore_patterns="**/logs/*.txt",
4861
4667
  ... )
4862
- # "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints"
4863
4668
 
4864
4669
  # Upload checkpoints folder including logs while deleting existing logs from the repo
4865
4670
  # Useful if you don't know exactly which log files have already being pushed
@@ -4871,7 +4676,6 @@ class HfApi:
4871
4676
  ... token="my_token",
4872
4677
  ... delete_patterns="**/logs/*.txt",
4873
4678
  ... )
4874
- "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints"
4875
4679
 
4876
4680
  # Upload checkpoints folder while creating a PR
4877
4681
  >>> upload_folder(
@@ -4882,8 +4686,6 @@ class HfApi:
4882
4686
  ... token="my_token",
4883
4687
  ... create_pr=True,
4884
4688
  ... )
4885
- "https://huggingface.co/datasets/username/my-dataset/tree/refs%2Fpr%2F1/remote/experiment/checkpoints"
4886
-
4887
4689
  ```
4888
4690
  """
4889
4691
  if repo_type not in constants.REPO_TYPES:
@@ -4927,7 +4729,7 @@ class HfApi:
4927
4729
 
4928
4730
  commit_message = commit_message or "Upload folder using huggingface_hub"
4929
4731
 
4930
- commit_info = self.create_commit(
4732
+ return self.create_commit(
4931
4733
  repo_type=repo_type,
4932
4734
  repo_id=repo_id,
4933
4735
  operations=commit_operations,
@@ -4939,24 +4741,6 @@ class HfApi:
4939
4741
  parent_commit=parent_commit,
4940
4742
  )
4941
4743
 
4942
- # Create url to uploaded folder (for legacy return value)
4943
- if create_pr and commit_info.pr_url is not None:
4944
- revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="")
4945
- if repo_type in constants.REPO_TYPES_URL_PREFIXES:
4946
- repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
4947
- revision = revision if revision is not None else constants.DEFAULT_REVISION
4948
-
4949
- return CommitInfo(
4950
- commit_url=commit_info.commit_url,
4951
- commit_message=commit_info.commit_message,
4952
- commit_description=commit_info.commit_description,
4953
- oid=commit_info.oid,
4954
- pr_url=commit_info.pr_url,
4955
- # Similar to `hf_hub_url` but it's "tree" instead of "resolve"
4956
- # TODO: remove this in v1.0
4957
- _url=f"{self.endpoint}/{repo_id}/tree/{revision}/{path_in_repo}",
4958
- )
4959
-
4960
4744
  @validate_hf_hub_args
4961
4745
  def delete_file(
4962
4746
  self,
@@ -5048,7 +4832,7 @@ class HfApi:
5048
4832
  def delete_files(
5049
4833
  self,
5050
4834
  repo_id: str,
5051
- delete_patterns: List[str],
4835
+ delete_patterns: list[str],
5052
4836
  *,
5053
4837
  token: Union[bool, str, None] = None,
5054
4838
  repo_type: Optional[str] = None,
@@ -5068,7 +4852,7 @@ class HfApi:
5068
4852
  repo_id (`str`):
5069
4853
  The repository from which the folder will be deleted, for example:
5070
4854
  `"username/custom_transformers"`
5071
- delete_patterns (`List[str]`):
4855
+ delete_patterns (`list[str]`):
5072
4856
  List of files or folders to delete. Each string can either be
5073
4857
  a file path, a folder path or a Unix shell-style wildcard.
5074
4858
  E.g. `["file.txt", "folder/", "data/*.parquet"]`
@@ -5196,8 +4980,8 @@ class HfApi:
5196
4980
  repo_type: str, # Repo type is required!
5197
4981
  revision: Optional[str] = None,
5198
4982
  private: Optional[bool] = None,
5199
- allow_patterns: Optional[Union[List[str], str]] = None,
5200
- ignore_patterns: Optional[Union[List[str], str]] = None,
4983
+ allow_patterns: Optional[Union[list[str], str]] = None,
4984
+ ignore_patterns: Optional[Union[list[str], str]] = None,
5201
4985
  num_workers: Optional[int] = None,
5202
4986
  print_report: bool = True,
5203
4987
  print_report_every: int = 60,
@@ -5225,9 +5009,9 @@ class HfApi:
5225
5009
  private (`bool`, `optional`):
5226
5010
  Whether the repository should be private.
5227
5011
  If `None` (default), the repo will be public unless the organization's default is private.
5228
- allow_patterns (`List[str]` or `str`, *optional*):
5012
+ allow_patterns (`list[str]` or `str`, *optional*):
5229
5013
  If provided, only files matching at least one pattern are uploaded.
5230
- ignore_patterns (`List[str]` or `str`, *optional*):
5014
+ ignore_patterns (`list[str]` or `str`, *optional*):
5231
5015
  If provided, files matching any of the patterns are not uploaded.
5232
5016
  num_workers (`int`, *optional*):
5233
5017
  Number of workers to start. Defaults to `os.cpu_count() - 2` (minimum 2).
@@ -5280,14 +5064,13 @@ class HfApi:
5280
5064
  4. Pre-upload LFS file if at least 1 file and no worker is pre-uploading.
5281
5065
  5. Hash file if at least 1 file and no worker is hashing.
5282
5066
  6. Get upload mode if at least 1 file and no worker is getting upload mode.
5283
- 7. Pre-upload LFS file if at least 1 file (exception: if hf_transfer is enabled, only 1 worker can preupload LFS at a time).
5067
+ 7. Pre-upload LFS file if at least 1 file.
5284
5068
  8. Hash file if at least 1 file to hash.
5285
5069
  9. Get upload mode if at least 1 file to get upload mode.
5286
5070
  10. Commit if at least 1 file to commit and at least 1 min since last commit attempt.
5287
5071
  11. Commit if at least 1 file to commit and all other queues are empty.
5288
5072
 
5289
5073
  Special rules:
5290
- - If `hf_transfer` is enabled, only 1 LFS uploader at a time. Otherwise the CPU would be bloated by `hf_transfer`.
5291
5074
  - Only one worker can commit at a time.
5292
5075
  - If no tasks are available, the worker waits for 10 seconds before checking again.
5293
5076
  """
@@ -5311,7 +5094,6 @@ class HfApi:
5311
5094
  *,
5312
5095
  url: str,
5313
5096
  token: Union[bool, str, None] = None,
5314
- proxies: Optional[Dict] = None,
5315
5097
  timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT,
5316
5098
  ) -> HfFileMetadata:
5317
5099
  """Fetch metadata of a file versioned on the Hub for a given url.
@@ -5324,8 +5106,6 @@ class HfApi:
5324
5106
  token, which is the recommended method for authentication (see
5325
5107
  https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
5326
5108
  To disable authentication, pass `False`.
5327
- proxies (`dict`, *optional*):
5328
- Dictionary mapping protocol to the URL of the proxy passed to `requests.request`.
5329
5109
  timeout (`float`, *optional*, defaults to 10):
5330
5110
  How many seconds to wait for the server to send metadata before giving up.
5331
5111
 
@@ -5339,7 +5119,6 @@ class HfApi:
5339
5119
  return get_hf_file_metadata(
5340
5120
  url=url,
5341
5121
  token=token,
5342
- proxies=proxies,
5343
5122
  timeout=timeout,
5344
5123
  library_name=self.library_name,
5345
5124
  library_version=self.library_version,
@@ -5347,6 +5126,42 @@ class HfApi:
5347
5126
  endpoint=self.endpoint,
5348
5127
  )
5349
5128
 
5129
+ @overload
5130
+ def hf_hub_download(
5131
+ self,
5132
+ repo_id: str,
5133
+ filename: str,
5134
+ *,
5135
+ subfolder: Optional[str] = None,
5136
+ repo_type: Optional[str] = None,
5137
+ revision: Optional[str] = None,
5138
+ cache_dir: Union[str, Path, None] = None,
5139
+ local_dir: Union[str, Path, None] = None,
5140
+ force_download: bool = False,
5141
+ etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
5142
+ token: Union[bool, str, None] = None,
5143
+ local_files_only: bool = False,
5144
+ dry_run: Literal[False] = False,
5145
+ ) -> str: ...
5146
+
5147
+ @overload
5148
+ def hf_hub_download(
5149
+ self,
5150
+ repo_id: str,
5151
+ filename: str,
5152
+ *,
5153
+ subfolder: Optional[str] = None,
5154
+ repo_type: Optional[str] = None,
5155
+ revision: Optional[str] = None,
5156
+ cache_dir: Union[str, Path, None] = None,
5157
+ local_dir: Union[str, Path, None] = None,
5158
+ force_download: bool = False,
5159
+ etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
5160
+ token: Union[bool, str, None] = None,
5161
+ local_files_only: bool = False,
5162
+ dry_run: Literal[True],
5163
+ ) -> DryRunFileInfo: ...
5164
+
5350
5165
  @validate_hf_hub_args
5351
5166
  def hf_hub_download(
5352
5167
  self,
@@ -5359,15 +5174,11 @@ class HfApi:
5359
5174
  cache_dir: Union[str, Path, None] = None,
5360
5175
  local_dir: Union[str, Path, None] = None,
5361
5176
  force_download: bool = False,
5362
- proxies: Optional[Dict] = None,
5363
5177
  etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
5364
5178
  token: Union[bool, str, None] = None,
5365
5179
  local_files_only: bool = False,
5366
- # Deprecated args
5367
- resume_download: Optional[bool] = None,
5368
- force_filename: Optional[str] = None,
5369
- local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
5370
- ) -> str:
5180
+ dry_run: bool = False,
5181
+ ) -> Union[str, DryRunFileInfo]:
5371
5182
  """Download a given file if it's not already present in the local cache.
5372
5183
 
5373
5184
  The new cache file layout looks like this:
@@ -5423,12 +5234,9 @@ class HfApi:
5423
5234
  force_download (`bool`, *optional*, defaults to `False`):
5424
5235
  Whether the file should be downloaded even if it already exists in
5425
5236
  the local cache.
5426
- proxies (`dict`, *optional*):
5427
- Dictionary mapping protocol to the URL of the proxy passed to
5428
- `requests.request`.
5429
5237
  etag_timeout (`float`, *optional*, defaults to `10`):
5430
5238
  When fetching ETag, how many seconds to wait for the server to send
5431
- data before giving up which is passed to `requests.request`.
5239
+ data before giving up which is passed to `httpx.request`.
5432
5240
  token (`bool` or `str`, *optional*):
5433
5241
  A valid user access token (string). Defaults to the locally saved
5434
5242
  token, which is the recommended method for authentication (see
@@ -5437,9 +5245,14 @@ class HfApi:
5437
5245
  local_files_only (`bool`, *optional*, defaults to `False`):
5438
5246
  If `True`, avoid downloading the file and return the path to the
5439
5247
  local cached file if it exists.
5248
+ dry_run (`bool`, *optional*, defaults to `False`):
5249
+ If `True`, perform a dry run without actually downloading the file. Returns a
5250
+ [`DryRunFileInfo`] object containing information about what would be downloaded.
5440
5251
 
5441
5252
  Returns:
5442
- `str`: Local path of file or if networking is off, last version of file cached on disk.
5253
+ `str` or [`DryRunFileInfo`]:
5254
+ - If `dry_run=False`: Local path of file or if networking is off, last version of file cached on disk.
5255
+ - If `dry_run=True`: A [`DryRunFileInfo`] object containing download information.
5443
5256
 
5444
5257
  Raises:
5445
5258
  [`~utils.RepositoryNotFoundError`]
@@ -5447,7 +5260,7 @@ class HfApi:
5447
5260
  or because it is set to `private` and you do not have access.
5448
5261
  [`~utils.RevisionNotFoundError`]
5449
5262
  If the revision to download from cannot be found.
5450
- [`~utils.EntryNotFoundError`]
5263
+ [`~utils.RemoteEntryNotFoundError`]
5451
5264
  If the file to download cannot be found.
5452
5265
  [`~utils.LocalEntryNotFoundError`]
5453
5266
  If network is disabled or unavailable and file is not found in cache.
@@ -5475,13 +5288,9 @@ class HfApi:
5475
5288
  library_version=self.library_version,
5476
5289
  cache_dir=cache_dir,
5477
5290
  local_dir=local_dir,
5478
- local_dir_use_symlinks=local_dir_use_symlinks,
5479
5291
  user_agent=self.user_agent,
5480
5292
  force_download=force_download,
5481
- force_filename=force_filename,
5482
- proxies=proxies,
5483
5293
  etag_timeout=etag_timeout,
5484
- resume_download=resume_download,
5485
5294
  token=token,
5486
5295
  headers=self.headers,
5487
5296
  local_files_only=local_files_only,
@@ -5496,18 +5305,14 @@ class HfApi:
5496
5305
  revision: Optional[str] = None,
5497
5306
  cache_dir: Union[str, Path, None] = None,
5498
5307
  local_dir: Union[str, Path, None] = None,
5499
- proxies: Optional[Dict] = None,
5500
5308
  etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
5501
5309
  force_download: bool = False,
5502
5310
  token: Union[bool, str, None] = None,
5503
5311
  local_files_only: bool = False,
5504
- allow_patterns: Optional[Union[List[str], str]] = None,
5505
- ignore_patterns: Optional[Union[List[str], str]] = None,
5312
+ allow_patterns: Optional[Union[list[str], str]] = None,
5313
+ ignore_patterns: Optional[Union[list[str], str]] = None,
5506
5314
  max_workers: int = 8,
5507
- tqdm_class: Optional[Type[base_tqdm]] = None,
5508
- # Deprecated args
5509
- local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
5510
- resume_download: Optional[bool] = None,
5315
+ tqdm_class: Optional[type[base_tqdm]] = None,
5511
5316
  ) -> str:
5512
5317
  """Download repo files.
5513
5318
 
@@ -5537,12 +5342,9 @@ class HfApi:
5537
5342
  Path to the folder where cached files are stored.
5538
5343
  local_dir (`str` or `Path`, *optional*):
5539
5344
  If provided, the downloaded files will be placed under this directory.
5540
- proxies (`dict`, *optional*):
5541
- Dictionary mapping protocol to the URL of the proxy passed to
5542
- `requests.request`.
5543
5345
  etag_timeout (`float`, *optional*, defaults to `10`):
5544
5346
  When fetching ETag, how many seconds to wait for the server to send
5545
- data before giving up which is passed to `requests.request`.
5347
+ data before giving up which is passed to `httpx.request`.
5546
5348
  force_download (`bool`, *optional*, defaults to `False`):
5547
5349
  Whether the file should be downloaded even if it already exists in the local cache.
5548
5350
  token (`bool` or `str`, *optional*):
@@ -5553,9 +5355,9 @@ class HfApi:
5553
5355
  local_files_only (`bool`, *optional*, defaults to `False`):
5554
5356
  If `True`, avoid downloading the file and return the path to the
5555
5357
  local cached file if it exists.
5556
- allow_patterns (`List[str]` or `str`, *optional*):
5358
+ allow_patterns (`list[str]` or `str`, *optional*):
5557
5359
  If provided, only files matching at least one pattern are downloaded.
5558
- ignore_patterns (`List[str]` or `str`, *optional*):
5360
+ ignore_patterns (`list[str]` or `str`, *optional*):
5559
5361
  If provided, files matching any of the patterns are not downloaded.
5560
5362
  max_workers (`int`, *optional*):
5561
5363
  Number of concurrent threads to download files (1 thread = 1 file download).
@@ -5596,13 +5398,10 @@ class HfApi:
5596
5398
  endpoint=self.endpoint,
5597
5399
  cache_dir=cache_dir,
5598
5400
  local_dir=local_dir,
5599
- local_dir_use_symlinks=local_dir_use_symlinks,
5600
5401
  library_name=self.library_name,
5601
5402
  library_version=self.library_version,
5602
5403
  user_agent=self.user_agent,
5603
- proxies=proxies,
5604
5404
  etag_timeout=etag_timeout,
5605
- resume_download=resume_download,
5606
5405
  force_download=force_download,
5607
5406
  token=token,
5608
5407
  local_files_only=local_files_only,
@@ -6219,7 +6018,7 @@ class HfApi:
6219
6018
  headers = self._build_hf_headers(token=token)
6220
6019
  path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions"
6221
6020
 
6222
- params: Dict[str, Union[str, int]] = {}
6021
+ params: dict[str, Union[str, int]] = {}
6223
6022
  if discussion_type is not None:
6224
6023
  params["type"] = discussion_type
6225
6024
  if discussion_status is not None:
@@ -6488,7 +6287,7 @@ class HfApi:
6488
6287
  body: Optional[dict] = None,
6489
6288
  token: Union[bool, str, None] = None,
6490
6289
  repo_type: Optional[str] = None,
6491
- ) -> requests.Response:
6290
+ ) -> httpx.Response:
6492
6291
  """Internal utility to POST changes to a Discussion or Pull Request"""
6493
6292
  if not isinstance(discussion_num, int) or discussion_num <= 0:
6494
6293
  raise ValueError("Invalid discussion_num, must be a positive integer")
@@ -6501,7 +6300,7 @@ class HfApi:
6501
6300
  path = f"{self.endpoint}/api/{repo_id}/discussions/{discussion_num}/{resource}"
6502
6301
 
6503
6302
  headers = self._build_hf_headers(token=token)
6504
- resp = requests.post(path, headers=headers, json=body)
6303
+ resp = get_session().post(path, headers=headers, json=body)
6505
6304
  hf_raise_for_status(resp)
6506
6305
  return resp
6507
6306
 
@@ -6710,7 +6509,7 @@ class HfApi:
6710
6509
  """
6711
6510
  if new_status not in ["open", "closed"]:
6712
6511
  raise ValueError("Invalid status, valid statuses are: 'open' and 'closed'")
6713
- body: Dict[str, str] = {"status": new_status}
6512
+ body: dict[str, str] = {"status": new_status}
6714
6513
  if comment and comment.strip():
6715
6514
  body["comment"] = comment.strip()
6716
6515
  resp = self._post_discussion_changes(
@@ -6951,7 +6750,8 @@ class HfApi:
6951
6750
  https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
6952
6751
  To disable authentication, pass `False`.
6953
6752
  """
6954
- r = get_session().delete(
6753
+ r = get_session().request(
6754
+ "DELETE",
6955
6755
  f"{self.endpoint}/api/spaces/{repo_id}/secrets",
6956
6756
  headers=self._build_hf_headers(token=token),
6957
6757
  json={"key": key},
@@ -6959,7 +6759,7 @@ class HfApi:
6959
6759
  hf_raise_for_status(r)
6960
6760
 
6961
6761
  @validate_hf_hub_args
6962
- def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) -> Dict[str, SpaceVariable]:
6762
+ def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) -> dict[str, SpaceVariable]:
6963
6763
  """Gets all variables from a Space.
6964
6764
 
6965
6765
  Variables allow to set environment variables to a Space without hardcoding them.
@@ -6990,7 +6790,7 @@ class HfApi:
6990
6790
  *,
6991
6791
  description: Optional[str] = None,
6992
6792
  token: Union[bool, str, None] = None,
6993
- ) -> Dict[str, SpaceVariable]:
6793
+ ) -> dict[str, SpaceVariable]:
6994
6794
  """Adds or updates a variable in a Space.
6995
6795
 
6996
6796
  Variables allow to set environment variables to a Space without hardcoding them.
@@ -7025,7 +6825,7 @@ class HfApi:
7025
6825
  @validate_hf_hub_args
7026
6826
  def delete_space_variable(
7027
6827
  self, repo_id: str, key: str, *, token: Union[bool, str, None] = None
7028
- ) -> Dict[str, SpaceVariable]:
6828
+ ) -> dict[str, SpaceVariable]:
7029
6829
  """Deletes a variable from a Space.
7030
6830
 
7031
6831
  Variables allow to set environment variables to a Space without hardcoding them.
@@ -7042,7 +6842,8 @@ class HfApi:
7042
6842
  https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
7043
6843
  To disable authentication, pass `False`.
7044
6844
  """
7045
- r = get_session().delete(
6845
+ r = get_session().request(
6846
+ "DELETE",
7046
6847
  f"{self.endpoint}/api/spaces/{repo_id}/variables",
7047
6848
  headers=self._build_hf_headers(token=token),
7048
6849
  json={"key": key},
@@ -7110,7 +6911,7 @@ class HfApi:
7110
6911
  " you want to set a custom sleep time, you need to upgrade to a paid Hardware.",
7111
6912
  UserWarning,
7112
6913
  )
7113
- payload: Dict[str, Any] = {"flavor": hardware}
6914
+ payload: dict[str, Any] = {"flavor": hardware}
7114
6915
  if sleep_time is not None:
7115
6916
  payload["sleepTimeSeconds"] = sleep_time
7116
6917
  r = get_session().post(
@@ -7266,8 +7067,8 @@ class HfApi:
7266
7067
  hardware: Optional[SpaceHardware] = None,
7267
7068
  storage: Optional[SpaceStorage] = None,
7268
7069
  sleep_time: Optional[int] = None,
7269
- secrets: Optional[List[Dict[str, str]]] = None,
7270
- variables: Optional[List[Dict[str, str]]] = None,
7070
+ secrets: Optional[list[dict[str, str]]] = None,
7071
+ variables: Optional[list[dict[str, str]]] = None,
7271
7072
  ) -> RepoUrl:
7272
7073
  """Duplicate a Space.
7273
7074
 
@@ -7298,10 +7099,10 @@ class HfApi:
7298
7099
  your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure
7299
7100
  the sleep time (value is fixed to 48 hours of inactivity).
7300
7101
  See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details.
7301
- secrets (`List[Dict[str, str]]`, *optional*):
7102
+ secrets (`list[dict[str, str]]`, *optional*):
7302
7103
  A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
7303
7104
  For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets.
7304
- variables (`List[Dict[str, str]]`, *optional*):
7105
+ variables (`list[dict[str, str]]`, *optional*):
7305
7106
  A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional.
7306
7107
  For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables.
7307
7108
 
@@ -7313,7 +7114,7 @@ class HfApi:
7313
7114
  [`~utils.RepositoryNotFoundError`]:
7314
7115
  If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist,
7315
7116
  or because it is set to `private` and you do not have access.
7316
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
7117
+ [`HfHubHTTPError`]:
7317
7118
  If the HuggingFace API returned an error
7318
7119
 
7319
7120
  Example:
@@ -7341,7 +7142,7 @@ class HfApi:
7341
7142
  to_repo_name = parsed_to_id.repo_name if to_id is not None else RepoUrl(from_id).repo_name # type: ignore
7342
7143
 
7343
7144
  # repository must be a valid repo_id (namespace/repo_name).
7344
- payload: Dict[str, Any] = {"repository": f"{to_namespace}/{to_repo_name}"}
7145
+ payload: dict[str, Any] = {"repository": f"{to_namespace}/{to_repo_name}"}
7345
7146
 
7346
7147
  keys = ["private", "hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"]
7347
7148
  values = [private, hardware, storage, sleep_time, secrets, variables]
@@ -7363,7 +7164,7 @@ class HfApi:
7363
7164
 
7364
7165
  try:
7365
7166
  hf_raise_for_status(r)
7366
- except HTTPError as err:
7167
+ except HfHubHTTPError as err:
7367
7168
  if exist_ok and err.response.status_code == 409:
7368
7169
  # Repo already exists and `exist_ok=True`
7369
7170
  pass
@@ -7399,7 +7200,7 @@ class HfApi:
7399
7200
  > It is not possible to decrease persistent storage after its granted. To do so, you must delete it
7400
7201
  > via [`delete_space_storage`].
7401
7202
  """
7402
- payload: Dict[str, SpaceStorage] = {"tier": storage}
7203
+ payload: dict[str, SpaceStorage] = {"tier": storage}
7403
7204
  r = get_session().post(
7404
7205
  f"{self.endpoint}/api/spaces/{repo_id}/storage",
7405
7206
  headers=self._build_hf_headers(token=token),
@@ -7445,7 +7246,7 @@ class HfApi:
7445
7246
 
7446
7247
  def list_inference_endpoints(
7447
7248
  self, namespace: Optional[str] = None, *, token: Union[bool, str, None] = None
7448
- ) -> List[InferenceEndpoint]:
7249
+ ) -> list[InferenceEndpoint]:
7449
7250
  """Lists all inference endpoints for the given namespace.
7450
7251
 
7451
7252
  Args:
@@ -7459,7 +7260,7 @@ class HfApi:
7459
7260
  To disable authentication, pass `False`.
7460
7261
 
7461
7262
  Returns:
7462
- List[`InferenceEndpoint`]: A list of all inference endpoints for the given namespace.
7263
+ list[`InferenceEndpoint`]: A list of all inference endpoints for the given namespace.
7463
7264
 
7464
7265
  Example:
7465
7266
  ```python
@@ -7474,7 +7275,7 @@ class HfApi:
7474
7275
  user = self.whoami(token=token)
7475
7276
 
7476
7277
  # List personal endpoints first
7477
- endpoints: List[InferenceEndpoint] = list_inference_endpoints(namespace=self._get_namespace(token=token))
7278
+ endpoints: list[InferenceEndpoint] = list_inference_endpoints(namespace=self._get_namespace(token=token))
7478
7279
 
7479
7280
  # Then list endpoints for all orgs the user belongs to and ignore 401 errors (no billing or no access)
7480
7281
  for org in user.get("orgs", []):
@@ -7518,14 +7319,14 @@ class HfApi:
7518
7319
  scale_to_zero_timeout: Optional[int] = None,
7519
7320
  revision: Optional[str] = None,
7520
7321
  task: Optional[str] = None,
7521
- custom_image: Optional[Dict] = None,
7522
- env: Optional[Dict[str, str]] = None,
7523
- secrets: Optional[Dict[str, str]] = None,
7322
+ custom_image: Optional[dict] = None,
7323
+ env: Optional[dict[str, str]] = None,
7324
+ secrets: Optional[dict[str, str]] = None,
7524
7325
  type: InferenceEndpointType = InferenceEndpointType.PROTECTED,
7525
7326
  domain: Optional[str] = None,
7526
7327
  path: Optional[str] = None,
7527
7328
  cache_http_responses: Optional[bool] = None,
7528
- tags: Optional[List[str]] = None,
7329
+ tags: Optional[list[str]] = None,
7529
7330
  namespace: Optional[str] = None,
7530
7331
  token: Union[bool, str, None] = None,
7531
7332
  ) -> InferenceEndpoint:
@@ -7562,12 +7363,12 @@ class HfApi:
7562
7363
  The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
7563
7364
  task (`str`, *optional*):
7564
7365
  The task on which to deploy the model (e.g. `"text-classification"`).
7565
- custom_image (`Dict`, *optional*):
7366
+ custom_image (`dict`, *optional*):
7566
7367
  A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
7567
7368
  Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
7568
- env (`Dict[str, str]`, *optional*):
7369
+ env (`dict[str, str]`, *optional*):
7569
7370
  Non-secret environment variables to inject in the container environment.
7570
- secrets (`Dict[str, str]`, *optional*):
7371
+ secrets (`dict[str, str]`, *optional*):
7571
7372
  Secret values to inject in the container environment.
7572
7373
  type ([`InferenceEndpointType]`, *optional*):
7573
7374
  The type of the Inference Endpoint, which can be `"protected"` (default), `"public"` or `"private"`.
@@ -7577,7 +7378,7 @@ class HfApi:
7577
7378
  The custom path to the deployed model, should start with a `/` (e.g. `"/models/google-bert/bert-base-uncased"`).
7578
7379
  cache_http_responses (`bool`, *optional*):
7579
7380
  Whether to cache HTTP responses from the Inference Endpoint. Defaults to `False`.
7580
- tags (`List[str]`, *optional*):
7381
+ tags (`list[str]`, *optional*):
7581
7382
  A list of tags to associate with the Inference Endpoint.
7582
7383
  namespace (`str`, *optional*):
7583
7384
  The namespace where the Inference Endpoint will be created. Defaults to the current user's namespace.
@@ -7680,7 +7481,7 @@ class HfApi:
7680
7481
  else:
7681
7482
  image = {"huggingface": {}}
7682
7483
 
7683
- payload: Dict = {
7484
+ payload: dict = {
7684
7485
  "accountId": account_id,
7685
7486
  "compute": {
7686
7487
  "accelerator": accelerator,
@@ -7766,7 +7567,7 @@ class HfApi:
7766
7567
  > if you have any suggestions or requests.
7767
7568
  """
7768
7569
  token = token or self.token or get_token()
7769
- payload: Dict = {
7570
+ payload: dict = {
7770
7571
  "namespace": namespace or self._get_namespace(token=token),
7771
7572
  "repoId": repo_id,
7772
7573
  }
@@ -7784,7 +7585,7 @@ class HfApi:
7784
7585
 
7785
7586
  @experimental
7786
7587
  @validate_hf_hub_args
7787
- def list_inference_catalog(self, *, token: Union[bool, str, None] = None) -> List[str]:
7588
+ def list_inference_catalog(self, *, token: Union[bool, str, None] = None) -> list[str]:
7788
7589
  """List models available in the Hugging Face Inference Catalog.
7789
7590
 
7790
7591
  The goal of the Inference Catalog is to provide a curated list of models that are optimized for inference
@@ -7875,15 +7676,15 @@ class HfApi:
7875
7676
  framework: Optional[str] = None,
7876
7677
  revision: Optional[str] = None,
7877
7678
  task: Optional[str] = None,
7878
- custom_image: Optional[Dict] = None,
7879
- env: Optional[Dict[str, str]] = None,
7880
- secrets: Optional[Dict[str, str]] = None,
7679
+ custom_image: Optional[dict] = None,
7680
+ env: Optional[dict[str, str]] = None,
7681
+ secrets: Optional[dict[str, str]] = None,
7881
7682
  # Route update
7882
7683
  domain: Optional[str] = None,
7883
7684
  path: Optional[str] = None,
7884
7685
  # Other
7885
7686
  cache_http_responses: Optional[bool] = None,
7886
- tags: Optional[List[str]] = None,
7687
+ tags: Optional[list[str]] = None,
7887
7688
  namespace: Optional[str] = None,
7888
7689
  token: Union[bool, str, None] = None,
7889
7690
  ) -> InferenceEndpoint:
@@ -7919,12 +7720,12 @@ class HfApi:
7919
7720
  The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
7920
7721
  task (`str`, *optional*):
7921
7722
  The task on which to deploy the model (e.g. `"text-classification"`).
7922
- custom_image (`Dict`, *optional*):
7723
+ custom_image (`dict`, *optional*):
7923
7724
  A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
7924
7725
  Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
7925
- env (`Dict[str, str]`, *optional*):
7726
+ env (`dict[str, str]`, *optional*):
7926
7727
  Non-secret environment variables to inject in the container environment
7927
- secrets (`Dict[str, str]`, *optional*):
7728
+ secrets (`dict[str, str]`, *optional*):
7928
7729
  Secret values to inject in the container environment.
7929
7730
 
7930
7731
  domain (`str`, *optional*):
@@ -7934,7 +7735,7 @@ class HfApi:
7934
7735
 
7935
7736
  cache_http_responses (`bool`, *optional*):
7936
7737
  Whether to cache HTTP responses from the Inference Endpoint.
7937
- tags (`List[str]`, *optional*):
7738
+ tags (`list[str]`, *optional*):
7938
7739
  A list of tags to associate with the Inference Endpoint.
7939
7740
 
7940
7741
  namespace (`str`, *optional*):
@@ -7951,7 +7752,7 @@ class HfApi:
7951
7752
  namespace = namespace or self._get_namespace(token=token)
7952
7753
 
7953
7754
  # Populate only the fields that are not None
7954
- payload: Dict = defaultdict(lambda: defaultdict(dict))
7755
+ payload: dict = defaultdict(lambda: defaultdict(dict))
7955
7756
  if accelerator is not None:
7956
7757
  payload["compute"]["accelerator"] = accelerator
7957
7758
  if instance_size is not None:
@@ -8158,8 +7959,8 @@ class HfApi:
8158
7959
  def list_collections(
8159
7960
  self,
8160
7961
  *,
8161
- owner: Union[List[str], str, None] = None,
8162
- item: Union[List[str], str, None] = None,
7962
+ owner: Union[list[str], str, None] = None,
7963
+ item: Union[list[str], str, None] = None,
8163
7964
  sort: Optional[Literal["lastModified", "trending", "upvotes"]] = None,
8164
7965
  limit: Optional[int] = None,
8165
7966
  token: Union[bool, str, None] = None,
@@ -8171,9 +7972,9 @@ class HfApi:
8171
7972
  > from a collection, you must use [`get_collection`].
8172
7973
 
8173
7974
  Args:
8174
- owner (`List[str]` or `str`, *optional*):
7975
+ owner (`list[str]` or `str`, *optional*):
8175
7976
  Filter by owner's username.
8176
- item (`List[str]` or `str`, *optional*):
7977
+ item (`list[str]` or `str`, *optional*):
8177
7978
  Filter collections containing a particular items. Example: `"models/teknium/OpenHermes-2.5-Mistral-7B"`, `"datasets/squad"` or `"papers/2311.12983"`.
8178
7979
  sort (`Literal["lastModified", "trending", "upvotes"]`, *optional*):
8179
7980
  Sort collections by last modified, trending or upvotes.
@@ -8191,7 +7992,7 @@ class HfApi:
8191
7992
  # Construct the API endpoint
8192
7993
  path = f"{self.endpoint}/api/collections"
8193
7994
  headers = self._build_hf_headers(token=token)
8194
- params: Dict = {}
7995
+ params: dict = {}
8195
7996
  if owner is not None:
8196
7997
  params.update({"owner": owner})
8197
7998
  if item is not None:
@@ -8308,7 +8109,7 @@ class HfApi:
8308
8109
  )
8309
8110
  try:
8310
8111
  hf_raise_for_status(r)
8311
- except HTTPError as err:
8112
+ except HfHubHTTPError as err:
8312
8113
  if exists_ok and err.response.status_code == 409:
8313
8114
  # Collection already exists and `exists_ok=True`
8314
8115
  slug = r.json()["slug"]
@@ -8416,7 +8217,7 @@ class HfApi:
8416
8217
  )
8417
8218
  try:
8418
8219
  hf_raise_for_status(r)
8419
- except HTTPError as err:
8220
+ except HfHubHTTPError as err:
8420
8221
  if missing_ok and err.response.status_code == 404:
8421
8222
  # Collection doesn't exists and `missing_ok=True`
8422
8223
  return
@@ -8456,12 +8257,12 @@ class HfApi:
8456
8257
  Returns: [`Collection`]
8457
8258
 
8458
8259
  Raises:
8459
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8260
+ [`HfHubHTTPError`]:
8460
8261
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8461
8262
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8462
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8263
+ [`HfHubHTTPError`]:
8463
8264
  HTTP 404 if the item you try to add to the collection does not exist on the Hub.
8464
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8265
+ [`HfHubHTTPError`]:
8465
8266
  HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False)
8466
8267
 
8467
8268
  Example:
@@ -8487,7 +8288,7 @@ class HfApi:
8487
8288
  (...)
8488
8289
  ```
8489
8290
  """
8490
- payload: Dict[str, Any] = {"item": {"id": item_id, "type": item_type}}
8291
+ payload: dict[str, Any] = {"item": {"id": item_id, "type": item_type}}
8491
8292
  if note is not None:
8492
8293
  payload["note"] = note
8493
8294
  r = get_session().post(
@@ -8497,7 +8298,7 @@ class HfApi:
8497
8298
  )
8498
8299
  try:
8499
8300
  hf_raise_for_status(r)
8500
- except HTTPError as err:
8301
+ except HfHubHTTPError as err:
8501
8302
  if exists_ok and err.response.status_code == 409:
8502
8303
  # Item already exists and `exists_ok=True`
8503
8304
  return self.get_collection(collection_slug, token=token)
@@ -8603,7 +8404,7 @@ class HfApi:
8603
8404
  )
8604
8405
  try:
8605
8406
  hf_raise_for_status(r)
8606
- except HTTPError as err:
8407
+ except HfHubHTTPError as err:
8607
8408
  if missing_ok and err.response.status_code == 404:
8608
8409
  # Item already deleted and `missing_ok=True`
8609
8410
  return
@@ -8617,7 +8418,7 @@ class HfApi:
8617
8418
  @validate_hf_hub_args
8618
8419
  def list_pending_access_requests(
8619
8420
  self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
8620
- ) -> List[AccessRequest]:
8421
+ ) -> list[AccessRequest]:
8621
8422
  """
8622
8423
  Get pending access requests for a given gated repo.
8623
8424
 
@@ -8640,14 +8441,14 @@ class HfApi:
8640
8441
  To disable authentication, pass `False`.
8641
8442
 
8642
8443
  Returns:
8643
- `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8444
+ `list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8644
8445
  `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
8645
8446
  be populated with user's answers.
8646
8447
 
8647
8448
  Raises:
8648
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8449
+ [`HfHubHTTPError`]:
8649
8450
  HTTP 400 if the repo is not gated.
8650
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8451
+ [`HfHubHTTPError`]:
8651
8452
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8652
8453
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8653
8454
 
@@ -8681,7 +8482,7 @@ class HfApi:
8681
8482
  @validate_hf_hub_args
8682
8483
  def list_accepted_access_requests(
8683
8484
  self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
8684
- ) -> List[AccessRequest]:
8485
+ ) -> list[AccessRequest]:
8685
8486
  """
8686
8487
  Get accepted access requests for a given gated repo.
8687
8488
 
@@ -8706,14 +8507,14 @@ class HfApi:
8706
8507
  To disable authentication, pass `False`.
8707
8508
 
8708
8509
  Returns:
8709
- `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8510
+ `list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8710
8511
  `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
8711
8512
  be populated with user's answers.
8712
8513
 
8713
8514
  Raises:
8714
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8515
+ [`HfHubHTTPError`]:
8715
8516
  HTTP 400 if the repo is not gated.
8716
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8517
+ [`HfHubHTTPError`]:
8717
8518
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8718
8519
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8719
8520
 
@@ -8743,7 +8544,7 @@ class HfApi:
8743
8544
  @validate_hf_hub_args
8744
8545
  def list_rejected_access_requests(
8745
8546
  self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None
8746
- ) -> List[AccessRequest]:
8547
+ ) -> list[AccessRequest]:
8747
8548
  """
8748
8549
  Get rejected access requests for a given gated repo.
8749
8550
 
@@ -8768,14 +8569,14 @@ class HfApi:
8768
8569
  To disable authentication, pass `False`.
8769
8570
 
8770
8571
  Returns:
8771
- `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8572
+ `list[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`,
8772
8573
  `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will
8773
8574
  be populated with user's answers.
8774
8575
 
8775
8576
  Raises:
8776
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8577
+ [`HfHubHTTPError`]:
8777
8578
  HTTP 400 if the repo is not gated.
8778
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8579
+ [`HfHubHTTPError`]:
8779
8580
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8780
8581
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8781
8582
 
@@ -8808,7 +8609,7 @@ class HfApi:
8808
8609
  status: Literal["accepted", "rejected", "pending"],
8809
8610
  repo_type: Optional[str] = None,
8810
8611
  token: Union[bool, str, None] = None,
8811
- ) -> List[AccessRequest]:
8612
+ ) -> list[AccessRequest]:
8812
8613
  if repo_type not in constants.REPO_TYPES:
8813
8614
  raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}")
8814
8615
  if repo_type is None:
@@ -8857,16 +8658,16 @@ class HfApi:
8857
8658
  To disable authentication, pass `False`.
8858
8659
 
8859
8660
  Raises:
8860
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8661
+ [`HfHubHTTPError`]:
8861
8662
  HTTP 400 if the repo is not gated.
8862
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8663
+ [`HfHubHTTPError`]:
8863
8664
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8864
8665
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8865
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8666
+ [`HfHubHTTPError`]:
8866
8667
  HTTP 404 if the user does not exist on the Hub.
8867
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8668
+ [`HfHubHTTPError`]:
8868
8669
  HTTP 404 if the user access request cannot be found.
8869
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8670
+ [`HfHubHTTPError`]:
8870
8671
  HTTP 404 if the user access request is already in the pending list.
8871
8672
  """
8872
8673
  self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token)
@@ -8899,16 +8700,16 @@ class HfApi:
8899
8700
  To disable authentication, pass `False`.
8900
8701
 
8901
8702
  Raises:
8902
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8703
+ [`HfHubHTTPError`]:
8903
8704
  HTTP 400 if the repo is not gated.
8904
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8705
+ [`HfHubHTTPError`]:
8905
8706
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8906
8707
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8907
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8708
+ [`HfHubHTTPError`]:
8908
8709
  HTTP 404 if the user does not exist on the Hub.
8909
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8710
+ [`HfHubHTTPError`]:
8910
8711
  HTTP 404 if the user access request cannot be found.
8911
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8712
+ [`HfHubHTTPError`]:
8912
8713
  HTTP 404 if the user access request is already in the accepted list.
8913
8714
  """
8914
8715
  self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token)
@@ -8949,16 +8750,16 @@ class HfApi:
8949
8750
  To disable authentication, pass `False`.
8950
8751
 
8951
8752
  Raises:
8952
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8753
+ [`HfHubHTTPError`]:
8953
8754
  HTTP 400 if the repo is not gated.
8954
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8755
+ [`HfHubHTTPError`]:
8955
8756
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
8956
8757
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
8957
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8758
+ [`HfHubHTTPError`]:
8958
8759
  HTTP 404 if the user does not exist on the Hub.
8959
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8760
+ [`HfHubHTTPError`]:
8960
8761
  HTTP 404 if the user access request cannot be found.
8961
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8762
+ [`HfHubHTTPError`]:
8962
8763
  HTTP 404 if the user access request is already in the rejected list.
8963
8764
  """
8964
8765
  self._handle_access_request(
@@ -9022,14 +8823,14 @@ class HfApi:
9022
8823
  To disable authentication, pass `False`.
9023
8824
 
9024
8825
  Raises:
9025
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8826
+ [`HfHubHTTPError`]:
9026
8827
  HTTP 400 if the repo is not gated.
9027
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8828
+ [`HfHubHTTPError`]:
9028
8829
  HTTP 400 if the user already has access to the repo.
9029
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8830
+ [`HfHubHTTPError`]:
9030
8831
  HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write`
9031
8832
  or `admin` role in the organization the repo belongs to or if you passed a `read` token.
9032
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
8833
+ [`HfHubHTTPError`]:
9033
8834
  HTTP 404 if the user does not exist on the Hub.
9034
8835
  """
9035
8836
  if repo_type not in constants.REPO_TYPES:
@@ -9103,7 +8904,7 @@ class HfApi:
9103
8904
  return webhook
9104
8905
 
9105
8906
  @validate_hf_hub_args
9106
- def list_webhooks(self, *, token: Union[bool, str, None] = None) -> List[WebhookInfo]:
8907
+ def list_webhooks(self, *, token: Union[bool, str, None] = None) -> list[WebhookInfo]:
9107
8908
  """List all configured webhooks.
9108
8909
 
9109
8910
  Args:
@@ -9113,7 +8914,7 @@ class HfApi:
9113
8914
  To disable authentication, pass `False`.
9114
8915
 
9115
8916
  Returns:
9116
- `List[WebhookInfo]`:
8917
+ `list[WebhookInfo]`:
9117
8918
  List of webhook info objects.
9118
8919
 
9119
8920
  Example:
@@ -9159,8 +8960,8 @@ class HfApi:
9159
8960
  *,
9160
8961
  url: Optional[str] = None,
9161
8962
  job_id: Optional[str] = None,
9162
- watched: List[Union[Dict, WebhookWatchedItem]],
9163
- domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None,
8963
+ watched: list[Union[dict, WebhookWatchedItem]],
8964
+ domains: Optional[list[constants.WEBHOOK_DOMAIN_T]] = None,
9164
8965
  secret: Optional[str] = None,
9165
8966
  token: Union[bool, str, None] = None,
9166
8967
  ) -> WebhookInfo:
@@ -9175,10 +8976,10 @@ class HfApi:
9175
8976
  job_id (`str`):
9176
8977
  ID of the source Job to trigger with the webhook payload in the environment variable WEBHOOK_PAYLOAD.
9177
8978
  Additional environment variables are available for convenience: WEBHOOK_REPO_ID, WEBHOOK_REPO_TYPE and WEBHOOK_SECRET.
9178
- watched (`List[WebhookWatchedItem]`):
8979
+ watched (`list[WebhookWatchedItem]`):
9179
8980
  List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces.
9180
8981
  Watched items can also be provided as plain dictionaries.
9181
- domains (`List[Literal["repo", "discussion"]]`, optional):
8982
+ domains (`list[Literal["repo", "discussion"]]`, optional):
9182
8983
  List of domains to watch. It can be "repo", "discussion" or both.
9183
8984
  secret (`str`, optional):
9184
8985
  A secret to sign the payload with.
@@ -9289,8 +9090,8 @@ class HfApi:
9289
9090
  webhook_id: str,
9290
9091
  *,
9291
9092
  url: Optional[str] = None,
9292
- watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None,
9293
- domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None,
9093
+ watched: Optional[list[Union[dict, WebhookWatchedItem]]] = None,
9094
+ domains: Optional[list[constants.WEBHOOK_DOMAIN_T]] = None,
9294
9095
  secret: Optional[str] = None,
9295
9096
  token: Union[bool, str, None] = None,
9296
9097
  ) -> WebhookInfo:
@@ -9301,10 +9102,10 @@ class HfApi:
9301
9102
  The unique identifier of the webhook to be updated.
9302
9103
  url (`str`, optional):
9303
9104
  The URL to which the payload will be sent.
9304
- watched (`List[WebhookWatchedItem]`, optional):
9105
+ watched (`list[WebhookWatchedItem]`, optional):
9305
9106
  List of items to watch. It can be users, orgs, models, datasets, or spaces.
9306
9107
  Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries.
9307
- domains (`List[Literal["repo", "discussion"]]`, optional):
9108
+ domains (`list[Literal["repo", "discussion"]]`, optional):
9308
9109
  The domains to watch. This can include "repo", "discussion", or both.
9309
9110
  secret (`str`, optional):
9310
9111
  A secret to sign the payload with, providing an additional layer of security.
@@ -9506,8 +9307,8 @@ class HfApi:
9506
9307
  token: Union[bool, str, None] = None,
9507
9308
  library_name: Optional[str] = None,
9508
9309
  library_version: Optional[str] = None,
9509
- user_agent: Union[Dict, str, None] = None,
9510
- ) -> Dict[str, str]:
9310
+ user_agent: Union[dict, str, None] = None,
9311
+ ) -> dict[str, str]:
9511
9312
  """
9512
9313
  Alias for [`build_hf_headers`] that uses the token from [`HfApi`] client
9513
9314
  when `token` is not provided.
@@ -9529,9 +9330,9 @@ class HfApi:
9529
9330
  repo_type: Optional[str],
9530
9331
  revision: Optional[str],
9531
9332
  path_in_repo: str,
9532
- delete_patterns: Optional[Union[List[str], str]],
9333
+ delete_patterns: Optional[Union[list[str], str]],
9533
9334
  token: Union[bool, str, None] = None,
9534
- ) -> List[CommitOperationDelete]:
9335
+ ) -> list[CommitOperationDelete]:
9535
9336
  """Generate the list of Delete operations for a commit to delete files from a repo.
9536
9337
 
9537
9338
  List remote files and match them against the `delete_patterns` constraints. Returns a list of [`CommitOperationDelete`]
@@ -9567,11 +9368,11 @@ class HfApi:
9567
9368
  self,
9568
9369
  folder_path: Union[str, Path],
9569
9370
  path_in_repo: str,
9570
- allow_patterns: Optional[Union[List[str], str]] = None,
9571
- ignore_patterns: Optional[Union[List[str], str]] = None,
9371
+ allow_patterns: Optional[Union[list[str], str]] = None,
9372
+ ignore_patterns: Optional[Union[list[str], str]] = None,
9572
9373
  repo_type: Optional[str] = None,
9573
9374
  token: Union[bool, str, None] = None,
9574
- ) -> List[CommitOperationAdd]:
9375
+ ) -> list[CommitOperationAdd]:
9575
9376
  """Generate the list of Add operations for a commit to upload a folder.
9576
9377
 
9577
9378
  Files not matching the `allow_patterns` (allowlist) and `ignore_patterns` (denylist)
@@ -9686,7 +9487,7 @@ class HfApi:
9686
9487
  `User`: A [`User`] object with the user's overview.
9687
9488
 
9688
9489
  Raises:
9689
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
9490
+ [`HfHubHTTPError`]:
9690
9491
  HTTP 404 If the user does not exist on the Hub.
9691
9492
  """
9692
9493
  r = get_session().get(
@@ -9722,6 +9523,35 @@ class HfApi:
9722
9523
  hf_raise_for_status(r)
9723
9524
  return Organization(**r.json())
9724
9525
 
9526
+ @validate_hf_hub_args
9527
+ def list_organization_followers(self, organization: str, token: Union[bool, str, None] = None) -> Iterable[User]:
9528
+ """
9529
+ List followers of an organization on the Hub.
9530
+
9531
+ Args:
9532
+ organization (`str`):
9533
+ Name of the organization to get the followers of.
9534
+ token (`bool` or `str`, *optional*):
9535
+ A valid user access token (string). Defaults to the locally saved
9536
+ token, which is the recommended method for authentication (see
9537
+ https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
9538
+ To disable authentication, pass `False`.
9539
+
9540
+ Returns:
9541
+ `Iterable[User]`: A list of [`User`] objects with the followers of the organization.
9542
+
9543
+ Raises:
9544
+ [`HfHubHTTPError`]:
9545
+ HTTP 404 If the organization does not exist on the Hub.
9546
+
9547
+ """
9548
+ for follower in paginate(
9549
+ path=f"{constants.ENDPOINT}/api/organizations/{organization}/followers",
9550
+ params={},
9551
+ headers=self._build_hf_headers(token=token),
9552
+ ):
9553
+ yield User(**follower)
9554
+
9725
9555
  def list_organization_members(self, organization: str, token: Union[bool, str, None] = None) -> Iterable[User]:
9726
9556
  """
9727
9557
  List of members of an organization on the Hub.
@@ -9739,7 +9569,7 @@ class HfApi:
9739
9569
  `Iterable[User]`: A list of [`User`] objects with the members of the organization.
9740
9570
 
9741
9571
  Raises:
9742
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
9572
+ [`HfHubHTTPError`]:
9743
9573
  HTTP 404 If the organization does not exist on the Hub.
9744
9574
 
9745
9575
  """
@@ -9767,7 +9597,7 @@ class HfApi:
9767
9597
  `Iterable[User]`: A list of [`User`] objects with the followers of the user.
9768
9598
 
9769
9599
  Raises:
9770
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
9600
+ [`HfHubHTTPError`]:
9771
9601
  HTTP 404 If the user does not exist on the Hub.
9772
9602
 
9773
9603
  """
@@ -9795,7 +9625,7 @@ class HfApi:
9795
9625
  `Iterable[User]`: A list of [`User`] objects with the users followed by the user.
9796
9626
 
9797
9627
  Raises:
9798
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
9628
+ [`HfHubHTTPError`]:
9799
9629
  HTTP 404 If the user does not exist on the Hub.
9800
9630
 
9801
9631
  """
@@ -9864,7 +9694,7 @@ class HfApi:
9864
9694
  `PaperInfo`: A `PaperInfo` object.
9865
9695
 
9866
9696
  Raises:
9867
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError):
9697
+ [`HfHubHTTPError`]:
9868
9698
  HTTP 404 If the paper does not exist on the Hub.
9869
9699
  """
9870
9700
  path = f"{self.endpoint}/api/papers/{id}"
@@ -9940,9 +9770,9 @@ class HfApi:
9940
9770
  self,
9941
9771
  *,
9942
9772
  image: str,
9943
- command: List[str],
9944
- env: Optional[Dict[str, Any]] = None,
9945
- secrets: Optional[Dict[str, Any]] = None,
9773
+ command: list[str],
9774
+ env: Optional[dict[str, Any]] = None,
9775
+ secrets: Optional[dict[str, Any]] = None,
9946
9776
  flavor: Optional[SpaceHardware] = None,
9947
9777
  timeout: Optional[Union[int, float, str]] = None,
9948
9778
  namespace: Optional[str] = None,
@@ -9957,13 +9787,13 @@ class HfApi:
9957
9787
  Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`.
9958
9788
  Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`.
9959
9789
 
9960
- command (`List[str]`):
9790
+ command (`list[str]`):
9961
9791
  The command to run. Example: `["echo", "hello"]`.
9962
9792
 
9963
- env (`Dict[str, Any]`, *optional*):
9793
+ env (`dict[str, Any]`, *optional*):
9964
9794
  Defines the environment variables for the Job.
9965
9795
 
9966
- secrets (`Dict[str, Any]`, *optional*):
9796
+ secrets (`dict[str, Any]`, *optional*):
9967
9797
  Defines the secret environment variables for the Job.
9968
9798
 
9969
9799
  flavor (`str`, *optional*):
@@ -10072,29 +9902,28 @@ class HfApi:
10072
9902
  time.sleep(sleep_time)
10073
9903
  sleep_time = min(max_wait_time, max(min_wait_time, sleep_time * 2))
10074
9904
  try:
10075
- resp = get_session().get(
9905
+ with get_session().stream(
9906
+ "GET",
10076
9907
  f"https://huggingface.co/api/jobs/{namespace}/{job_id}/logs",
10077
9908
  headers=self._build_hf_headers(token=token),
10078
- stream=True,
10079
9909
  timeout=120,
10080
- )
10081
- log = None
10082
- for line in resp.iter_lines(chunk_size=1):
10083
- line = line.decode("utf-8")
10084
- if line and line.startswith("data: {"):
10085
- data = json.loads(line[len("data: ") :])
10086
- # timestamp = data["timestamp"]
10087
- if not data["data"].startswith("===== Job started"):
10088
- logging_started = True
10089
- log = data["data"]
10090
- yield log
10091
- logging_finished = logging_started
10092
- except requests.exceptions.ChunkedEncodingError:
9910
+ ) as response:
9911
+ log = None
9912
+ for line in response.iter_lines():
9913
+ if line and line.startswith("data: {"):
9914
+ data = json.loads(line[len("data: ") :])
9915
+ # timestamp = data["timestamp"]
9916
+ if not data["data"].startswith("===== Job started"):
9917
+ logging_started = True
9918
+ log = data["data"]
9919
+ yield log
9920
+ logging_finished = logging_started
9921
+ except httpx.DecodingError:
10093
9922
  # Response ended prematurely
10094
9923
  break
10095
9924
  except KeyboardInterrupt:
10096
9925
  break
10097
- except requests.exceptions.ConnectionError as err:
9926
+ except httpx.NetworkError as err:
10098
9927
  is_timeout = err.__context__ and isinstance(getattr(err.__context__, "__cause__", None), TimeoutError)
10099
9928
  if logging_started or not is_timeout:
10100
9929
  raise
@@ -10117,7 +9946,7 @@ class HfApi:
10117
9946
  timeout: Optional[int] = None,
10118
9947
  namespace: Optional[str] = None,
10119
9948
  token: Union[bool, str, None] = None,
10120
- ) -> List[JobInfo]:
9949
+ ) -> list[JobInfo]:
10121
9950
  """
10122
9951
  List compute Jobs on Hugging Face infrastructure.
10123
9952
 
@@ -10228,12 +10057,12 @@ class HfApi:
10228
10057
  self,
10229
10058
  script: str,
10230
10059
  *,
10231
- script_args: Optional[List[str]] = None,
10232
- dependencies: Optional[List[str]] = None,
10060
+ script_args: Optional[list[str]] = None,
10061
+ dependencies: Optional[list[str]] = None,
10233
10062
  python: Optional[str] = None,
10234
10063
  image: Optional[str] = None,
10235
- env: Optional[Dict[str, Any]] = None,
10236
- secrets: Optional[Dict[str, Any]] = None,
10064
+ env: Optional[dict[str, Any]] = None,
10065
+ secrets: Optional[dict[str, Any]] = None,
10237
10066
  flavor: Optional[SpaceHardware] = None,
10238
10067
  timeout: Optional[Union[int, float, str]] = None,
10239
10068
  namespace: Optional[str] = None,
@@ -10247,10 +10076,10 @@ class HfApi:
10247
10076
  script (`str`):
10248
10077
  Path or URL of the UV script, or a command.
10249
10078
 
10250
- script_args (`List[str]`, *optional*)
10079
+ script_args (`list[str]`, *optional*)
10251
10080
  Arguments to pass to the script or command.
10252
10081
 
10253
- dependencies (`List[str]`, *optional*)
10082
+ dependencies (`list[str]`, *optional*)
10254
10083
  Dependencies to use to run the UV script.
10255
10084
 
10256
10085
  python (`str`, *optional*)
@@ -10259,10 +10088,10 @@ class HfApi:
10259
10088
  image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"):
10260
10089
  Use a custom Docker image with `uv` installed.
10261
10090
 
10262
- env (`Dict[str, Any]`, *optional*):
10091
+ env (`dict[str, Any]`, *optional*):
10263
10092
  Defines the environment variables for the Job.
10264
10093
 
10265
- secrets (`Dict[str, Any]`, *optional*):
10094
+ secrets (`dict[str, Any]`, *optional*):
10266
10095
  Defines the secret environment variables for the Job.
10267
10096
 
10268
10097
  flavor (`str`, *optional*):
@@ -10342,12 +10171,12 @@ class HfApi:
10342
10171
  self,
10343
10172
  *,
10344
10173
  image: str,
10345
- command: List[str],
10174
+ command: list[str],
10346
10175
  schedule: str,
10347
10176
  suspend: Optional[bool] = None,
10348
10177
  concurrency: Optional[bool] = None,
10349
- env: Optional[Dict[str, Any]] = None,
10350
- secrets: Optional[Dict[str, Any]] = None,
10178
+ env: Optional[dict[str, Any]] = None,
10179
+ secrets: Optional[dict[str, Any]] = None,
10351
10180
  flavor: Optional[SpaceHardware] = None,
10352
10181
  timeout: Optional[Union[int, float, str]] = None,
10353
10182
  namespace: Optional[str] = None,
@@ -10362,7 +10191,7 @@ class HfApi:
10362
10191
  Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`.
10363
10192
  Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`.
10364
10193
 
10365
- command (`List[str]`):
10194
+ command (`list[str]`):
10366
10195
  The command to run. Example: `["echo", "hello"]`.
10367
10196
 
10368
10197
  schedule (`str`):
@@ -10375,10 +10204,10 @@ class HfApi:
10375
10204
  concurrency (`bool`, *optional*):
10376
10205
  If True, multiple instances of this Job can run concurrently. Defaults to False.
10377
10206
 
10378
- env (`Dict[str, Any]`, *optional*):
10207
+ env (`dict[str, Any]`, *optional*):
10379
10208
  Defines the environment variables for the Job.
10380
10209
 
10381
- secrets (`Dict[str, Any]`, *optional*):
10210
+ secrets (`dict[str, Any]`, *optional*):
10382
10211
  Defines the secret environment variables for the Job.
10383
10212
 
10384
10213
  flavor (`str`, *optional*):
@@ -10434,7 +10263,7 @@ class HfApi:
10434
10263
  flavor=flavor,
10435
10264
  timeout=timeout,
10436
10265
  )
10437
- input_json: Dict[str, Any] = {
10266
+ input_json: dict[str, Any] = {
10438
10267
  "jobSpec": job_spec,
10439
10268
  "schedule": schedule,
10440
10269
  }
@@ -10457,7 +10286,7 @@ class HfApi:
10457
10286
  timeout: Optional[int] = None,
10458
10287
  namespace: Optional[str] = None,
10459
10288
  token: Union[bool, str, None] = None,
10460
- ) -> List[ScheduledJobInfo]:
10289
+ ) -> list[ScheduledJobInfo]:
10461
10290
  """
10462
10291
  List scheduled compute Jobs on Hugging Face infrastructure.
10463
10292
 
@@ -10615,15 +10444,15 @@ class HfApi:
10615
10444
  self,
10616
10445
  script: str,
10617
10446
  *,
10618
- script_args: Optional[List[str]] = None,
10447
+ script_args: Optional[list[str]] = None,
10619
10448
  schedule: str,
10620
10449
  suspend: Optional[bool] = None,
10621
10450
  concurrency: Optional[bool] = None,
10622
- dependencies: Optional[List[str]] = None,
10451
+ dependencies: Optional[list[str]] = None,
10623
10452
  python: Optional[str] = None,
10624
10453
  image: Optional[str] = None,
10625
- env: Optional[Dict[str, Any]] = None,
10626
- secrets: Optional[Dict[str, Any]] = None,
10454
+ env: Optional[dict[str, Any]] = None,
10455
+ secrets: Optional[dict[str, Any]] = None,
10627
10456
  flavor: Optional[SpaceHardware] = None,
10628
10457
  timeout: Optional[Union[int, float, str]] = None,
10629
10458
  namespace: Optional[str] = None,
@@ -10637,7 +10466,7 @@ class HfApi:
10637
10466
  script (`str`):
10638
10467
  Path or URL of the UV script, or a command.
10639
10468
 
10640
- script_args (`List[str]`, *optional*)
10469
+ script_args (`list[str]`, *optional*)
10641
10470
  Arguments to pass to the script, or a command.
10642
10471
 
10643
10472
  schedule (`str`):
@@ -10650,7 +10479,7 @@ class HfApi:
10650
10479
  concurrency (`bool`, *optional*):
10651
10480
  If True, multiple instances of this Job can run concurrently. Defaults to False.
10652
10481
 
10653
- dependencies (`List[str]`, *optional*)
10482
+ dependencies (`list[str]`, *optional*)
10654
10483
  Dependencies to use to run the UV script.
10655
10484
 
10656
10485
  python (`str`, *optional*)
@@ -10659,10 +10488,10 @@ class HfApi:
10659
10488
  image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"):
10660
10489
  Use a custom Docker image with `uv` installed.
10661
10490
 
10662
- env (`Dict[str, Any]`, *optional*):
10491
+ env (`dict[str, Any]`, *optional*):
10663
10492
  Defines the environment variables for the Job.
10664
10493
 
10665
- secrets (`Dict[str, Any]`, *optional*):
10494
+ secrets (`dict[str, Any]`, *optional*):
10666
10495
  Defines the secret environment variables for the Job.
10667
10496
 
10668
10497
  flavor (`str`, *optional*):
@@ -10742,15 +10571,15 @@ class HfApi:
10742
10571
  self,
10743
10572
  *,
10744
10573
  script: str,
10745
- script_args: Optional[List[str]],
10746
- dependencies: Optional[List[str]],
10574
+ script_args: Optional[list[str]],
10575
+ dependencies: Optional[list[str]],
10747
10576
  python: Optional[str],
10748
- env: Optional[Dict[str, Any]],
10749
- secrets: Optional[Dict[str, Any]],
10577
+ env: Optional[dict[str, Any]],
10578
+ secrets: Optional[dict[str, Any]],
10750
10579
  namespace: Optional[str],
10751
10580
  token: Union[bool, str, None],
10752
10581
  _repo: Optional[str],
10753
- ) -> Tuple[List[str], Dict[str, Any], Dict[str, Any]]:
10582
+ ) -> tuple[list[str], dict[str, Any], dict[str, Any]]:
10754
10583
  env = env or {}
10755
10584
  secrets = secrets or {}
10756
10585
 
@@ -10882,7 +10711,6 @@ api = HfApi()
10882
10711
 
10883
10712
  whoami = api.whoami
10884
10713
  auth_check = api.auth_check
10885
- get_token_permission = api.get_token_permission
10886
10714
 
10887
10715
  list_models = api.list_models
10888
10716
  model_info = api.model_info
@@ -10912,7 +10740,6 @@ get_dataset_tags = api.get_dataset_tags
10912
10740
  create_commit = api.create_commit
10913
10741
  create_repo = api.create_repo
10914
10742
  delete_repo = api.delete_repo
10915
- update_repo_visibility = api.update_repo_visibility
10916
10743
  update_repo_settings = api.update_repo_settings
10917
10744
  move_repo = api.move_repo
10918
10745
  upload_file = api.upload_file
@@ -11016,6 +10843,7 @@ update_webhook = api.update_webhook
11016
10843
  # User API
11017
10844
  get_user_overview = api.get_user_overview
11018
10845
  get_organization_overview = api.get_organization_overview
10846
+ list_organization_followers = api.list_organization_followers
11019
10847
  list_organization_members = api.list_organization_members
11020
10848
  list_user_followers = api.list_user_followers
11021
10849
  list_user_following = api.list_user_following