huggingface-hub 0.36.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (132) hide show
  1. huggingface_hub/__init__.py +33 -45
  2. huggingface_hub/_commit_api.py +39 -43
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +20 -20
  6. huggingface_hub/_login.py +17 -43
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +135 -50
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +5 -5
  11. huggingface_hub/_upload_large_folder.py +18 -32
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/__init__.py +0 -14
  15. huggingface_hub/cli/_cli_utils.py +143 -39
  16. huggingface_hub/cli/auth.py +105 -171
  17. huggingface_hub/cli/cache.py +594 -361
  18. huggingface_hub/cli/download.py +120 -112
  19. huggingface_hub/cli/hf.py +38 -41
  20. huggingface_hub/cli/jobs.py +689 -1017
  21. huggingface_hub/cli/lfs.py +120 -143
  22. huggingface_hub/cli/repo.py +282 -216
  23. huggingface_hub/cli/repo_files.py +50 -84
  24. huggingface_hub/cli/system.py +6 -25
  25. huggingface_hub/cli/upload.py +198 -220
  26. huggingface_hub/cli/upload_large_folder.py +91 -106
  27. huggingface_hub/community.py +5 -5
  28. huggingface_hub/constants.py +17 -52
  29. huggingface_hub/dataclasses.py +135 -21
  30. huggingface_hub/errors.py +47 -30
  31. huggingface_hub/fastai_utils.py +8 -9
  32. huggingface_hub/file_download.py +351 -303
  33. huggingface_hub/hf_api.py +398 -570
  34. huggingface_hub/hf_file_system.py +101 -66
  35. huggingface_hub/hub_mixin.py +32 -54
  36. huggingface_hub/inference/_client.py +177 -162
  37. huggingface_hub/inference/_common.py +38 -54
  38. huggingface_hub/inference/_generated/_async_client.py +218 -258
  39. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  40. huggingface_hub/inference/_generated/types/base.py +10 -7
  41. huggingface_hub/inference/_generated/types/chat_completion.py +16 -16
  42. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  43. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  44. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  45. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  46. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  47. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  48. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  49. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  50. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  51. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  52. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  53. huggingface_hub/inference/_generated/types/translation.py +2 -2
  54. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  55. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  56. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  57. huggingface_hub/inference/_mcp/agent.py +3 -3
  58. huggingface_hub/inference/_mcp/constants.py +1 -2
  59. huggingface_hub/inference/_mcp/mcp_client.py +33 -22
  60. huggingface_hub/inference/_mcp/types.py +10 -10
  61. huggingface_hub/inference/_mcp/utils.py +4 -4
  62. huggingface_hub/inference/_providers/__init__.py +12 -4
  63. huggingface_hub/inference/_providers/_common.py +62 -24
  64. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  65. huggingface_hub/inference/_providers/cohere.py +3 -3
  66. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  67. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  68. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  69. huggingface_hub/inference/_providers/hf_inference.py +13 -13
  70. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  71. huggingface_hub/inference/_providers/nebius.py +10 -10
  72. huggingface_hub/inference/_providers/novita.py +5 -5
  73. huggingface_hub/inference/_providers/nscale.py +4 -4
  74. huggingface_hub/inference/_providers/replicate.py +15 -15
  75. huggingface_hub/inference/_providers/sambanova.py +6 -6
  76. huggingface_hub/inference/_providers/together.py +7 -7
  77. huggingface_hub/lfs.py +21 -94
  78. huggingface_hub/repocard.py +15 -16
  79. huggingface_hub/repocard_data.py +57 -57
  80. huggingface_hub/serialization/__init__.py +0 -1
  81. huggingface_hub/serialization/_base.py +9 -9
  82. huggingface_hub/serialization/_dduf.py +7 -7
  83. huggingface_hub/serialization/_torch.py +28 -28
  84. huggingface_hub/utils/__init__.py +11 -6
  85. huggingface_hub/utils/_auth.py +5 -5
  86. huggingface_hub/utils/_cache_manager.py +49 -74
  87. huggingface_hub/utils/_deprecation.py +1 -1
  88. huggingface_hub/utils/_dotenv.py +3 -3
  89. huggingface_hub/utils/_fixes.py +0 -10
  90. huggingface_hub/utils/_git_credential.py +3 -3
  91. huggingface_hub/utils/_headers.py +7 -29
  92. huggingface_hub/utils/_http.py +371 -208
  93. huggingface_hub/utils/_pagination.py +4 -4
  94. huggingface_hub/utils/_parsing.py +98 -0
  95. huggingface_hub/utils/_paths.py +5 -5
  96. huggingface_hub/utils/_runtime.py +59 -23
  97. huggingface_hub/utils/_safetensors.py +21 -21
  98. huggingface_hub/utils/_subprocess.py +9 -9
  99. huggingface_hub/utils/_telemetry.py +3 -3
  100. huggingface_hub/{commands/_cli_utils.py → utils/_terminal.py} +4 -9
  101. huggingface_hub/utils/_typing.py +3 -3
  102. huggingface_hub/utils/_validators.py +53 -72
  103. huggingface_hub/utils/_xet.py +16 -16
  104. huggingface_hub/utils/_xet_progress_reporting.py +1 -1
  105. huggingface_hub/utils/insecure_hashlib.py +3 -9
  106. huggingface_hub/utils/tqdm.py +3 -3
  107. {huggingface_hub-0.36.0.dist-info → huggingface_hub-1.0.0.dist-info}/METADATA +16 -35
  108. huggingface_hub-1.0.0.dist-info/RECORD +152 -0
  109. {huggingface_hub-0.36.0.dist-info → huggingface_hub-1.0.0.dist-info}/entry_points.txt +0 -1
  110. huggingface_hub/commands/__init__.py +0 -27
  111. huggingface_hub/commands/delete_cache.py +0 -476
  112. huggingface_hub/commands/download.py +0 -204
  113. huggingface_hub/commands/env.py +0 -39
  114. huggingface_hub/commands/huggingface_cli.py +0 -65
  115. huggingface_hub/commands/lfs.py +0 -200
  116. huggingface_hub/commands/repo.py +0 -151
  117. huggingface_hub/commands/repo_files.py +0 -132
  118. huggingface_hub/commands/scan_cache.py +0 -183
  119. huggingface_hub/commands/tag.py +0 -161
  120. huggingface_hub/commands/upload.py +0 -318
  121. huggingface_hub/commands/upload_large_folder.py +0 -131
  122. huggingface_hub/commands/user.py +0 -208
  123. huggingface_hub/commands/version.py +0 -40
  124. huggingface_hub/inference_api.py +0 -217
  125. huggingface_hub/keras_mixin.py +0 -497
  126. huggingface_hub/repository.py +0 -1471
  127. huggingface_hub/serialization/_tensorflow.py +0 -92
  128. huggingface_hub/utils/_hf_folder.py +0 -68
  129. huggingface_hub-0.36.0.dist-info/RECORD +0 -170
  130. {huggingface_hub-0.36.0.dist-info → huggingface_hub-1.0.0.dist-info}/LICENSE +0 -0
  131. {huggingface_hub-0.36.0.dist-info → huggingface_hub-1.0.0.dist-info}/WHEEL +0 -0
  132. {huggingface_hub-0.36.0.dist-info → huggingface_hub-1.0.0.dist-info}/top_level.txt +0 -0
@@ -15,7 +15,7 @@
15
15
  from dataclasses import dataclass
16
16
  from datetime import datetime
17
17
  from enum import Enum
18
- from typing import Dict, Optional
18
+ from typing import Optional
19
19
 
20
20
  from huggingface_hub.utils import parse_datetime
21
21
 
@@ -128,9 +128,9 @@ class SpaceRuntime:
128
128
  requested_hardware: Optional[SpaceHardware]
129
129
  sleep_time: Optional[int]
130
130
  storage: Optional[SpaceStorage]
131
- raw: Dict
131
+ raw: dict
132
132
 
133
- def __init__(self, data: Dict) -> None:
133
+ def __init__(self, data: dict) -> None:
134
134
  self.stage = data["stage"]
135
135
  self.hardware = data.get("hardware", {}).get("current")
136
136
  self.requested_hardware = data.get("hardware", {}).get("requested")
@@ -160,7 +160,7 @@ class SpaceVariable:
160
160
  description: Optional[str]
161
161
  updated_at: Optional[datetime]
162
162
 
163
- def __init__(self, key: str, values: Dict) -> None:
163
+ def __init__(self, key: str, values: dict) -> None:
164
164
  self.key = key
165
165
  self.value = values["value"]
166
166
  self.description = values.get("description")
@@ -14,7 +14,7 @@
14
14
  """Contains a logger to push training logs to the Hub, using Tensorboard."""
15
15
 
16
16
  from pathlib import Path
17
- from typing import List, Optional, Union
17
+ from typing import Optional, Union
18
18
 
19
19
  from ._commit_scheduler import CommitScheduler
20
20
  from .errors import EntryNotFoundError
@@ -74,10 +74,10 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
74
74
  Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
75
75
  path_in_repo (`str`, *optional*):
76
76
  The path to the folder in the repo where the logs will be pushed. Defaults to "tensorboard/".
77
- repo_allow_patterns (`List[str]` or `str`, *optional*):
77
+ repo_allow_patterns (`list[str]` or `str`, *optional*):
78
78
  A list of patterns to include in the upload. Defaults to `"*.tfevents.*"`. Check out the
79
79
  [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
80
- repo_ignore_patterns (`List[str]` or `str`, *optional*):
80
+ repo_ignore_patterns (`list[str]` or `str`, *optional*):
81
81
  A list of patterns to exclude in the upload. Check out the
82
82
  [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
83
83
  token (`str`, *optional*):
@@ -134,8 +134,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
134
134
  repo_revision: Optional[str] = None,
135
135
  repo_private: Optional[bool] = None,
136
136
  path_in_repo: Optional[str] = "tensorboard",
137
- repo_allow_patterns: Optional[Union[List[str], str]] = "*.tfevents.*",
138
- repo_ignore_patterns: Optional[Union[List[str], str]] = None,
137
+ repo_allow_patterns: Optional[Union[list[str], str]] = "*.tfevents.*",
138
+ repo_ignore_patterns: Optional[Union[list[str], str]] = None,
139
139
  token: Optional[str] = None,
140
140
  **kwargs,
141
141
  ):
@@ -24,15 +24,13 @@ import traceback
24
24
  from datetime import datetime
25
25
  from pathlib import Path
26
26
  from threading import Lock
27
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
27
+ from typing import TYPE_CHECKING, Any, Optional, Union
28
28
  from urllib.parse import quote
29
29
 
30
- from . import constants
31
30
  from ._commit_api import CommitOperationAdd, UploadInfo, _fetch_upload_modes
32
31
  from ._local_folder import LocalUploadFileMetadata, LocalUploadFilePaths, get_local_upload_paths, read_upload_metadata
33
32
  from .constants import DEFAULT_REVISION, REPO_TYPES
34
- from .utils import DEFAULT_IGNORE_PATTERNS, filter_repo_objects, tqdm
35
- from .utils._cache_manager import _format_size
33
+ from .utils import DEFAULT_IGNORE_PATTERNS, _format_size, filter_repo_objects, tqdm
36
34
  from .utils._runtime import is_xet_available
37
35
  from .utils.sha import sha_fileobj
38
36
 
@@ -44,7 +42,7 @@ logger = logging.getLogger(__name__)
44
42
 
45
43
  WAITING_TIME_IF_NO_TASKS = 10 # seconds
46
44
  MAX_NB_FILES_FETCH_UPLOAD_MODE = 100
47
- COMMIT_SIZE_SCALE: List[int] = [20, 50, 75, 100, 125, 200, 250, 400, 600, 1000]
45
+ COMMIT_SIZE_SCALE: list[int] = [20, 50, 75, 100, 125, 200, 250, 400, 600, 1000]
48
46
 
49
47
  UPLOAD_BATCH_SIZE_XET = 256 # Max 256 files per upload batch for XET-enabled repos
50
48
  UPLOAD_BATCH_SIZE_LFS = 1 # Otherwise, batches of 1 for regular LFS upload
@@ -56,7 +54,7 @@ MAX_FILE_SIZE_GB = 50 # Hard limit for individual file size
56
54
  RECOMMENDED_FILE_SIZE_GB = 20 # Recommended maximum for individual file size
57
55
 
58
56
 
59
- def _validate_upload_limits(paths_list: List[LocalUploadFilePaths]) -> None:
57
+ def _validate_upload_limits(paths_list: list[LocalUploadFilePaths]) -> None:
60
58
  """
61
59
  Validate upload against repository limits and warn about potential issues.
62
60
 
@@ -85,7 +83,7 @@ def _validate_upload_limits(paths_list: List[LocalUploadFilePaths]) -> None:
85
83
  # Track immediate children (files and subdirs) for each folder
86
84
  from collections import defaultdict
87
85
 
88
- entries_per_folder: Dict[str, Any] = defaultdict(lambda: {"files": 0, "subdirs": set()})
86
+ entries_per_folder: dict[str, Any] = defaultdict(lambda: {"files": 0, "subdirs": set()})
89
87
 
90
88
  for paths in paths_list:
91
89
  path = Path(paths.path_in_repo)
@@ -160,8 +158,8 @@ def upload_large_folder_internal(
160
158
  repo_type: str, # Repo type is required!
161
159
  revision: Optional[str] = None,
162
160
  private: Optional[bool] = None,
163
- allow_patterns: Optional[Union[List[str], str]] = None,
164
- ignore_patterns: Optional[Union[List[str], str]] = None,
161
+ allow_patterns: Optional[Union[list[str], str]] = None,
162
+ ignore_patterns: Optional[Union[list[str], str]] = None,
165
163
  num_workers: Optional[int] = None,
166
164
  print_report: bool = True,
167
165
  print_report_every: int = 60,
@@ -200,16 +198,7 @@ def upload_large_folder_internal(
200
198
  logger.info(f"Repo created: {repo_url}")
201
199
  repo_id = repo_url.repo_id
202
200
  # 2.1 Check if xet is enabled to set batch file upload size
203
- is_xet_enabled = (
204
- is_xet_available()
205
- and api.repo_info(
206
- repo_id=repo_id,
207
- repo_type=repo_type,
208
- revision=revision,
209
- expand="xetEnabled",
210
- ).xet_enabled
211
- )
212
- upload_batch_size = UPLOAD_BATCH_SIZE_XET if is_xet_enabled else UPLOAD_BATCH_SIZE_LFS
201
+ upload_batch_size = UPLOAD_BATCH_SIZE_XET if is_xet_available() else UPLOAD_BATCH_SIZE_LFS
213
202
 
214
203
  # 3. List files to upload
215
204
  filtered_paths_list = filter_repo_objects(
@@ -284,13 +273,13 @@ class WorkerJob(enum.Enum):
284
273
  WAIT = enum.auto() # if no tasks are available but we don't want to exit
285
274
 
286
275
 
287
- JOB_ITEM_T = Tuple[LocalUploadFilePaths, LocalUploadFileMetadata]
276
+ JOB_ITEM_T = tuple[LocalUploadFilePaths, LocalUploadFileMetadata]
288
277
 
289
278
 
290
279
  class LargeUploadStatus:
291
280
  """Contains information, queues and tasks for a large upload process."""
292
281
 
293
- def __init__(self, items: List[JOB_ITEM_T], upload_batch_size: int = 1):
282
+ def __init__(self, items: list[JOB_ITEM_T], upload_batch_size: int = 1):
294
283
  self.items = items
295
284
  self.queue_sha256: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
296
285
  self.queue_get_upload_mode: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
@@ -423,7 +412,7 @@ def _worker_job(
423
412
  Read `upload_large_folder` docstring for more information on how tasks are prioritized.
424
413
  """
425
414
  while True:
426
- next_job: Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]] = None
415
+ next_job: Optional[tuple[WorkerJob, list[JOB_ITEM_T]]] = None
427
416
 
428
417
  # Determine next task
429
418
  next_job = _determine_next_job(status)
@@ -516,7 +505,7 @@ def _worker_job(
516
505
  status.nb_workers_waiting -= 1
517
506
 
518
507
 
519
- def _determine_next_job(status: LargeUploadStatus) -> Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]]:
508
+ def _determine_next_job(status: LargeUploadStatus) -> Optional[tuple[WorkerJob, list[JOB_ITEM_T]]]:
520
509
  with status.lock:
521
510
  # 1. Commit if more than 5 minutes since last commit attempt (and at least 1 file)
522
511
  if (
@@ -560,10 +549,7 @@ def _determine_next_job(status: LargeUploadStatus) -> Optional[Tuple[WorkerJob,
560
549
  return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, MAX_NB_FILES_FETCH_UPLOAD_MODE))
561
550
 
562
551
  # 7. Preupload LFS file if at least `status.upload_batch_size` files
563
- # Skip if hf_transfer is enabled and there is already a worker preuploading LFS
564
- elif status.queue_preupload_lfs.qsize() >= status.upload_batch_size and (
565
- status.nb_workers_preupload_lfs == 0 or not constants.HF_HUB_ENABLE_HF_TRANSFER
566
- ):
552
+ elif status.queue_preupload_lfs.qsize() >= status.upload_batch_size:
567
553
  status.nb_workers_preupload_lfs += 1
568
554
  logger.debug("Job: preupload LFS")
569
555
  return (WorkerJob.PREUPLOAD_LFS, _get_n(status.queue_preupload_lfs, status.upload_batch_size))
@@ -639,7 +625,7 @@ def _compute_sha256(item: JOB_ITEM_T) -> None:
639
625
  metadata.save(paths)
640
626
 
641
627
 
642
- def _get_upload_mode(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
628
+ def _get_upload_mode(items: list[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
643
629
  """Get upload mode for each file and update metadata.
644
630
 
645
631
  Also receive info if the file should be ignored.
@@ -661,7 +647,7 @@ def _get_upload_mode(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_t
661
647
  metadata.save(paths)
662
648
 
663
649
 
664
- def _preupload_lfs(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
650
+ def _preupload_lfs(items: list[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
665
651
  """Preupload LFS files and update metadata."""
666
652
  additions = [_build_hacky_operation(item) for item in items]
667
653
  api.preupload_lfs_files(
@@ -676,7 +662,7 @@ def _preupload_lfs(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_typ
676
662
  metadata.save(paths)
677
663
 
678
664
 
679
- def _commit(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
665
+ def _commit(items: list[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
680
666
  """Commit files to the repo."""
681
667
  additions = [_build_hacky_operation(item) for item in items]
682
668
  api.create_commit(
@@ -721,11 +707,11 @@ def _build_hacky_operation(item: JOB_ITEM_T) -> HackyCommitOperationAdd:
721
707
  ####################
722
708
 
723
709
 
724
- def _get_one(queue: "queue.Queue[JOB_ITEM_T]") -> List[JOB_ITEM_T]:
710
+ def _get_one(queue: "queue.Queue[JOB_ITEM_T]") -> list[JOB_ITEM_T]:
725
711
  return [queue.get()]
726
712
 
727
713
 
728
- def _get_n(queue: "queue.Queue[JOB_ITEM_T]", n: int) -> List[JOB_ITEM_T]:
714
+ def _get_n(queue: "queue.Queue[JOB_ITEM_T]", n: int) -> list[JOB_ITEM_T]:
729
715
  return [queue.get() for _ in range(min(queue.qsize(), n))]
730
716
 
731
717
 
@@ -14,7 +14,7 @@
14
14
  # limitations under the License.
15
15
  """Contains data structures to parse the webhooks payload."""
16
16
 
17
- from typing import List, Literal, Optional
17
+ from typing import Literal, Optional
18
18
 
19
19
  from .utils import is_pydantic_available
20
20
 
@@ -116,7 +116,7 @@ class WebhookPayloadRepo(ObjectId):
116
116
  name: str
117
117
  private: bool
118
118
  subdomain: Optional[str] = None
119
- tags: Optional[List[str]] = None
119
+ tags: Optional[list[str]] = None
120
120
  type: Literal["dataset", "model", "space"]
121
121
  url: WebhookPayloadUrl
122
122
 
@@ -134,4 +134,4 @@ class WebhookPayload(BaseModel):
134
134
  comment: Optional[WebhookPayloadComment] = None
135
135
  webhook: WebhookPayloadWebhook
136
136
  movedTo: Optional[WebhookPayloadMovedTo] = None
137
- updatedRefs: Optional[List[WebhookPayloadUpdatedRef]] = None
137
+ updatedRefs: Optional[list[WebhookPayloadUpdatedRef]] = None
@@ -18,7 +18,7 @@ import atexit
18
18
  import inspect
19
19
  import os
20
20
  from functools import wraps
21
- from typing import TYPE_CHECKING, Any, Callable, Dict, Optional
21
+ from typing import TYPE_CHECKING, Any, Callable, Optional
22
22
 
23
23
  from .utils import experimental, is_fastapi_available, is_gradio_available
24
24
 
@@ -109,7 +109,7 @@ class WebhooksServer:
109
109
  self._ui = ui
110
110
 
111
111
  self.webhook_secret = webhook_secret or os.getenv("WEBHOOK_SECRET")
112
- self.registered_webhooks: Dict[str, Callable] = {}
112
+ self.registered_webhooks: dict[str, Callable] = {}
113
113
  _warn_on_empty_secret(self.webhook_secret)
114
114
 
115
115
  def add_webhook(self, path: Optional[str] = None) -> Callable:
@@ -11,17 +11,3 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
15
- from abc import ABC, abstractmethod
16
- from argparse import _SubParsersAction
17
-
18
-
19
- class BaseHuggingfaceCLICommand(ABC):
20
- @staticmethod
21
- @abstractmethod
22
- def register_subcommand(parser: _SubParsersAction):
23
- raise NotImplementedError()
24
-
25
- @abstractmethod
26
- def run(self):
27
- raise NotImplementedError()
@@ -11,59 +11,163 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- """Contains a utility for good-looking prints."""
14
+ """Contains CLI utilities (styling, helpers)."""
15
15
 
16
+ import importlib.metadata
16
17
  import os
17
- from typing import List, Union
18
+ import time
19
+ from enum import Enum
20
+ from pathlib import Path
21
+ from typing import TYPE_CHECKING, Annotated, Optional
18
22
 
23
+ import click
24
+ import typer
19
25
 
20
- class ANSI:
26
+ from huggingface_hub import __version__, constants
27
+ from huggingface_hub.utils import ANSI, get_session, hf_raise_for_status, installation_method, logging
28
+
29
+
30
+ logger = logging.get_logger()
31
+
32
+
33
+ if TYPE_CHECKING:
34
+ from huggingface_hub.hf_api import HfApi
35
+
36
+
37
+ def get_hf_api(token: Optional[str] = None) -> "HfApi":
38
+ # Import here to avoid circular import
39
+ from huggingface_hub.hf_api import HfApi
40
+
41
+ return HfApi(token=token, library_name="huggingface-cli", library_version=__version__)
42
+
43
+
44
+ #### TYPER UTILS
45
+
46
+
47
+ class AlphabeticalMixedGroup(typer.core.TyperGroup):
21
48
  """
22
- Helper for en.wikipedia.org/wiki/ANSI_escape_code
49
+ Typer Group that lists commands and sub-apps mixed and alphabetically.
23
50
  """
24
51
 
25
- _bold = "\u001b[1m"
26
- _gray = "\u001b[90m"
27
- _red = "\u001b[31m"
28
- _reset = "\u001b[0m"
29
- _yellow = "\u001b[33m"
52
+ def list_commands(self, ctx: click.Context) -> list[str]: # type: ignore[name-defined]
53
+ # click.Group stores both commands and sub-groups in `self.commands`
54
+ return sorted(self.commands.keys())
55
+
56
+
57
+ def typer_factory(help: str) -> typer.Typer:
58
+ return typer.Typer(
59
+ help=help,
60
+ add_completion=True,
61
+ no_args_is_help=True,
62
+ cls=AlphabeticalMixedGroup,
63
+ # Disable rich completely for consistent experience
64
+ rich_markup_mode=None,
65
+ rich_help_panel=None,
66
+ pretty_exceptions_enable=False,
67
+ )
68
+
30
69
 
31
- @classmethod
32
- def bold(cls, s: str) -> str:
33
- return cls._format(s, cls._bold)
70
+ class RepoType(str, Enum):
71
+ model = "model"
72
+ dataset = "dataset"
73
+ space = "space"
34
74
 
35
- @classmethod
36
- def gray(cls, s: str) -> str:
37
- return cls._format(s, cls._gray)
38
75
 
39
- @classmethod
40
- def red(cls, s: str) -> str:
41
- return cls._format(s, cls._bold + cls._red)
76
+ RepoIdArg = Annotated[
77
+ str,
78
+ typer.Argument(
79
+ help="The ID of the repo (e.g. `username/repo-name`).",
80
+ ),
81
+ ]
42
82
 
43
- @classmethod
44
- def yellow(cls, s: str) -> str:
45
- return cls._format(s, cls._yellow)
46
83
 
47
- @classmethod
48
- def _format(cls, s: str, code: str) -> str:
49
- if os.environ.get("NO_COLOR"):
50
- # See https://no-color.org/
51
- return s
52
- return f"{code}{s}{cls._reset}"
84
+ RepoTypeOpt = Annotated[
85
+ RepoType,
86
+ typer.Option(
87
+ help="The type of repository (model, dataset, or space).",
88
+ ),
89
+ ]
53
90
 
91
+ TokenOpt = Annotated[
92
+ Optional[str],
93
+ typer.Option(
94
+ help="A User Access Token generated from https://huggingface.co/settings/tokens.",
95
+ ),
96
+ ]
54
97
 
55
- def tabulate(rows: List[List[Union[str, int]]], headers: List[str]) -> str:
98
+ PrivateOpt = Annotated[
99
+ bool,
100
+ typer.Option(
101
+ help="Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists.",
102
+ ),
103
+ ]
104
+
105
+ RevisionOpt = Annotated[
106
+ Optional[str],
107
+ typer.Option(
108
+ help="Git revision id which can be a branch name, a tag, or a commit hash.",
109
+ ),
110
+ ]
111
+
112
+
113
+ ### PyPI VERSION CHECKER
114
+
115
+
116
+ def check_cli_update() -> None:
56
117
  """
57
- Inspired by:
118
+ Check whether a newer version of `huggingface_hub` is available on PyPI.
119
+
120
+ If a newer version is found, notify the user and suggest updating.
121
+ If current version is a pre-release (e.g. `1.0.0.rc1`), or a dev version (e.g. `1.0.0.dev1`), no check is performed.
58
122
 
59
- - stackoverflow.com/a/8356620/593036
60
- - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
123
+ This function is called at the entry point of the CLI. It only performs the check once every 24 hours, and any error
124
+ during the check is caught and logged, to avoid breaking the CLI.
61
125
  """
62
- col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
63
- row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
64
- lines = []
65
- lines.append(row_format.format(*headers))
66
- lines.append(row_format.format(*["-" * w for w in col_widths]))
67
- for row in rows:
68
- lines.append(row_format.format(*row))
69
- return "\n".join(lines)
126
+ try:
127
+ _check_cli_update()
128
+ except Exception:
129
+ # We don't want the CLI to fail on version checks, no matter the reason.
130
+ logger.debug("Error while checking for CLI update.", exc_info=True)
131
+
132
+
133
+ def _check_cli_update() -> None:
134
+ current_version = importlib.metadata.version("huggingface_hub")
135
+
136
+ # Skip if current version is a pre-release or dev version
137
+ if any(tag in current_version for tag in ["rc", "dev"]):
138
+ return
139
+
140
+ # Skip if already checked in the last 24 hours
141
+ if os.path.exists(constants.CHECK_FOR_UPDATE_DONE_PATH):
142
+ mtime = os.path.getmtime(constants.CHECK_FOR_UPDATE_DONE_PATH)
143
+ if (time.time() - mtime) < 24 * 3600:
144
+ return
145
+
146
+ # Touch the file to mark that we did the check now
147
+ Path(constants.CHECK_FOR_UPDATE_DONE_PATH).touch()
148
+
149
+ # Check latest version from PyPI
150
+ response = get_session().get("https://pypi.org/pypi/huggingface_hub/json", timeout=2)
151
+ hf_raise_for_status(response)
152
+ data = response.json()
153
+ latest_version = data["info"]["version"]
154
+
155
+ # If latest version is different from current, notify user
156
+ if current_version != latest_version:
157
+ method = installation_method()
158
+ if method == "brew":
159
+ update_command = "brew upgrade huggingface-cli"
160
+ elif method == "hf_installer" and os.name == "nt":
161
+ update_command = 'powershell -NoProfile -Command "iwr -useb https://hf.co/cli/install.ps1 | iex"'
162
+ elif method == "hf_installer":
163
+ update_command = "curl -LsSf https://hf.co/cli/install.sh | sh -"
164
+ else: # unknown => likely pip
165
+ update_command = "pip install -U huggingface_hub"
166
+
167
+ click.echo(
168
+ ANSI.yellow(
169
+ f"A new version of huggingface_hub ({latest_version}) is available! "
170
+ f"You are using version {current_version}.\n"
171
+ f"To update, run: {ANSI.bold(update_command)}\n",
172
+ )
173
+ )