huggingface-hub 1.0.0rc1__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (34) hide show
  1. huggingface_hub/__init__.py +1 -1
  2. huggingface_hub/_commit_api.py +126 -66
  3. huggingface_hub/_commit_scheduler.py +4 -7
  4. huggingface_hub/_login.py +9 -15
  5. huggingface_hub/_tensorboard_logger.py +2 -5
  6. huggingface_hub/_webhooks_server.py +8 -20
  7. huggingface_hub/cli/repo.py +137 -5
  8. huggingface_hub/dataclasses.py +3 -12
  9. huggingface_hub/fastai_utils.py +22 -32
  10. huggingface_hub/file_download.py +18 -21
  11. huggingface_hub/hf_api.py +258 -410
  12. huggingface_hub/hf_file_system.py +17 -44
  13. huggingface_hub/inference/_client.py +25 -47
  14. huggingface_hub/inference/_generated/_async_client.py +25 -47
  15. huggingface_hub/inference/_mcp/agent.py +2 -5
  16. huggingface_hub/inference/_mcp/mcp_client.py +2 -5
  17. huggingface_hub/inference/_providers/__init__.py +11 -0
  18. huggingface_hub/inference/_providers/_common.py +1 -0
  19. huggingface_hub/inference/_providers/publicai.py +6 -0
  20. huggingface_hub/inference/_providers/scaleway.py +28 -0
  21. huggingface_hub/lfs.py +14 -8
  22. huggingface_hub/repocard.py +12 -16
  23. huggingface_hub/serialization/_base.py +3 -6
  24. huggingface_hub/serialization/_torch.py +16 -34
  25. huggingface_hub/utils/_cache_manager.py +41 -71
  26. huggingface_hub/utils/_chunk_utils.py +2 -3
  27. huggingface_hub/utils/_http.py +27 -30
  28. huggingface_hub/utils/logging.py +8 -11
  29. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/METADATA +2 -2
  30. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/RECORD +34 -32
  31. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/LICENSE +0 -0
  32. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/WHEEL +0 -0
  33. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/entry_points.txt +0 -0
  34. {huggingface_hub-1.0.0rc1.dist-info → huggingface_hub-1.0.0rc2.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "1.0.0.rc1"
49
+ __version__ = "1.0.0.rc2"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -33,6 +33,7 @@ from .utils import (
33
33
  validate_hf_hub_args,
34
34
  )
35
35
  from .utils import tqdm as hf_tqdm
36
+ from .utils._runtime import is_xet_available
36
37
 
37
38
 
38
39
  if TYPE_CHECKING:
@@ -353,7 +354,7 @@ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
353
354
 
354
355
 
355
356
  @validate_hf_hub_args
356
- def _upload_lfs_files(
357
+ def _upload_files(
357
358
  *,
358
359
  additions: list[CommitOperationAdd],
359
360
  repo_type: str,
@@ -362,6 +363,86 @@ def _upload_lfs_files(
362
363
  endpoint: Optional[str] = None,
363
364
  num_threads: int = 5,
364
365
  revision: Optional[str] = None,
366
+ create_pr: Optional[bool] = None,
367
+ ):
368
+ """
369
+ Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
370
+ """
371
+ xet_additions: list[CommitOperationAdd] = []
372
+ lfs_actions: list[dict[str, Any]] = []
373
+ lfs_oid2addop: dict[str, CommitOperationAdd] = {}
374
+
375
+ for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
376
+ chunk_list = [op for op in chunk]
377
+
378
+ transfers: list[str] = ["basic", "multipart"]
379
+ has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
380
+ if is_xet_available():
381
+ if not has_buffered_io_data:
382
+ transfers.append("xet")
383
+ else:
384
+ logger.warning(
385
+ "Uploading files as a binary IO buffer is not supported by Xet Storage. "
386
+ "Falling back to HTTP upload."
387
+ )
388
+
389
+ actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info(
390
+ upload_infos=[op.upload_info for op in chunk_list],
391
+ repo_id=repo_id,
392
+ repo_type=repo_type,
393
+ revision=revision,
394
+ endpoint=endpoint,
395
+ headers=headers,
396
+ token=None, # already passed in 'headers'
397
+ transfers=transfers,
398
+ )
399
+ if errors_chunk:
400
+ message = "\n".join(
401
+ [
402
+ f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
403
+ for err in errors_chunk
404
+ ]
405
+ )
406
+ raise ValueError(f"LFS batch API returned errors:\n{message}")
407
+
408
+ # If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO),
409
+ # fall back to LFS for this chunk.
410
+ if chosen_transfer == "xet" and ("xet" in transfers):
411
+ xet_additions.extend(chunk_list)
412
+ else:
413
+ lfs_actions.extend(actions_chunk)
414
+ for op in chunk_list:
415
+ lfs_oid2addop[op.upload_info.sha256.hex()] = op
416
+
417
+ if len(lfs_actions) > 0:
418
+ _upload_lfs_files(
419
+ actions=lfs_actions,
420
+ oid2addop=lfs_oid2addop,
421
+ headers=headers,
422
+ endpoint=endpoint,
423
+ num_threads=num_threads,
424
+ )
425
+
426
+ if len(xet_additions) > 0:
427
+ _upload_xet_files(
428
+ additions=xet_additions,
429
+ repo_type=repo_type,
430
+ repo_id=repo_id,
431
+ headers=headers,
432
+ endpoint=endpoint,
433
+ revision=revision,
434
+ create_pr=create_pr,
435
+ )
436
+
437
+
438
+ @validate_hf_hub_args
439
+ def _upload_lfs_files(
440
+ *,
441
+ actions: list[dict[str, Any]],
442
+ oid2addop: dict[str, CommitOperationAdd],
443
+ headers: dict[str, str],
444
+ endpoint: Optional[str] = None,
445
+ num_threads: int = 5,
365
446
  ):
366
447
  """
367
448
  Uploads the content of `additions` to the Hub using the large file storage protocol.
@@ -370,9 +451,21 @@ def _upload_lfs_files(
370
451
  - LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
371
452
 
372
453
  Args:
373
- additions (`List` of `CommitOperationAdd`):
374
- The files to be uploaded
375
- repo_type (`str`):
454
+ actions (`list[dict[str, Any]]`):
455
+ LFS batch actions returned by the server.
456
+ oid2addop (`dict[str, CommitOperationAdd]`):
457
+ A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
458
+ headers (`dict[str, str]`):
459
+ Headers to use for the request, including authorization headers and user agent.
460
+ endpoint (`str`, *optional*):
461
+ The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
462
+ num_threads (`int`, *optional*):
463
+ The number of concurrent threads to use when uploading. Defaults to 5.
464
+
465
+ Raises:
466
+ [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
467
+ If an upload failed for any reason
468
+ [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
376
469
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
377
470
  repo_id (`str`):
378
471
  A namespace (user or an organization) and a repo name separated
@@ -392,50 +485,17 @@ def _upload_lfs_files(
392
485
  [`HfHubHTTPError`]
393
486
  If the LFS batch endpoint returned an HTTP error.
394
487
  """
395
- # Step 1: retrieve upload instructions from the LFS batch endpoint.
396
- # Upload instructions are retrieved by chunk of 256 files to avoid reaching
397
- # the payload limit.
398
- batch_actions: list[dict] = []
399
- for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400
- batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401
- upload_infos=[op.upload_info for op in chunk],
402
- repo_id=repo_id,
403
- repo_type=repo_type,
404
- revision=revision,
405
- endpoint=endpoint,
406
- headers=headers,
407
- token=None, # already passed in 'headers'
408
- )
409
-
410
- # If at least 1 error, we do not retrieve information for other chunks
411
- if batch_errors_chunk:
412
- message = "\n".join(
413
- [
414
- f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
415
- for err in batch_errors_chunk
416
- ]
417
- )
418
- raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
419
-
420
- batch_actions += batch_actions_chunk
421
- oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
422
-
423
- # Step 2: ignore files that have already been uploaded
488
+ # Filter out files already present upstream
424
489
  filtered_actions = []
425
- for action in batch_actions:
490
+ for action in actions:
426
491
  if action.get("actions") is None:
427
492
  logger.debug(
428
- f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
429
- " present upstream - skipping upload."
493
+ f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload."
430
494
  )
431
495
  else:
432
496
  filtered_actions.append(action)
433
497
 
434
- if len(filtered_actions) == 0:
435
- logger.debug("No LFS files to upload.")
436
- return
437
-
438
- # Step 3: upload files concurrently according to these instructions
498
+ # Upload according to server-provided actions
439
499
  def _wrapped_lfs_upload(batch_action) -> None:
440
500
  try:
441
501
  operation = oid2addop[batch_action["oid"]]
@@ -479,7 +539,7 @@ def _upload_xet_files(
479
539
  This chunks the files and deduplicates the chunks before uploading them to xetcas storage.
480
540
 
481
541
  Args:
482
- additions (`List` of `CommitOperationAdd`):
542
+ additions (`` of `CommitOperationAdd`):
483
543
  The files to be uploaded.
484
544
  repo_type (`str`):
485
545
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
@@ -576,30 +636,30 @@ def _upload_xet_files(
576
636
  progress, progress_callback = None, None
577
637
 
578
638
  try:
579
- for i, chunk in enumerate(chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES)):
580
- _chunk = [op for op in chunk]
581
-
582
- bytes_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, bytes)]
583
- paths_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, (str, Path))]
584
-
585
- if len(paths_ops) > 0:
586
- upload_files(
587
- [str(op.path_or_fileobj) for op in paths_ops],
588
- xet_endpoint,
589
- access_token_info,
590
- token_refresher,
591
- progress_callback,
592
- repo_type,
593
- )
594
- if len(bytes_ops) > 0:
595
- upload_bytes(
596
- [op.path_or_fileobj for op in bytes_ops],
597
- xet_endpoint,
598
- access_token_info,
599
- token_refresher,
600
- progress_callback,
601
- repo_type,
602
- )
639
+ all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)]
640
+ all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))]
641
+
642
+ if len(all_paths_ops) > 0:
643
+ all_paths = [str(op.path_or_fileobj) for op in all_paths_ops]
644
+ upload_files(
645
+ all_paths,
646
+ xet_endpoint,
647
+ access_token_info,
648
+ token_refresher,
649
+ progress_callback,
650
+ repo_type,
651
+ )
652
+
653
+ if len(all_bytes_ops) > 0:
654
+ all_bytes = [op.path_or_fileobj for op in all_bytes_ops]
655
+ upload_bytes(
656
+ all_bytes,
657
+ xet_endpoint,
658
+ access_token_info,
659
+ token_refresher,
660
+ progress_callback,
661
+ repo_type,
662
+ )
603
663
 
604
664
  finally:
605
665
  if progress is not None:
@@ -205,13 +205,10 @@ class CommitScheduler:
205
205
  """
206
206
  Push folder to the Hub and return the commit info.
207
207
 
208
- <Tip warning={true}>
209
-
210
- This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
211
- queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
212
- issues.
213
-
214
- </Tip>
208
+ > [!WARNING]
209
+ > This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
210
+ > queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
211
+ > issues.
215
212
 
216
213
  The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
217
214
  uploads only changed files. If no changes are found, the method returns without committing anything. If you want
huggingface_hub/_login.py CHANGED
@@ -70,21 +70,15 @@ def login(
70
70
  To log in from outside of a script, one can also use `hf auth login` which is
71
71
  a cli command that wraps [`login`].
72
72
 
73
- <Tip>
74
-
75
- [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
76
- extends its capabilities.
77
-
78
- </Tip>
79
-
80
- <Tip>
81
-
82
- When the token is not passed, [`login`] will automatically detect if the script runs
83
- in a notebook or not. However, this detection might not be accurate due to the
84
- variety of notebooks that exists nowadays. If that is the case, you can always force
85
- the UI by using [`notebook_login`] or [`interpreter_login`].
86
-
87
- </Tip>
73
+ > [!TIP]
74
+ > [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
75
+ > extends its capabilities.
76
+
77
+ > [!TIP]
78
+ > When the token is not passed, [`login`] will automatically detect if the script runs
79
+ > in a notebook or not. However, this detection might not be accurate due to the
80
+ > variety of notebooks that exists nowadays. If that is the case, you can always force
81
+ > the UI by using [`notebook_login`] or [`interpreter_login`].
88
82
 
89
83
  Args:
90
84
  token (`str`, *optional*):
@@ -52,11 +52,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
52
52
  issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
53
53
  minutes (default to every 5 minutes).
54
54
 
55
- <Tip warning={true}>
56
-
57
- `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
58
-
59
- </Tip>
55
+ > [!WARNING]
56
+ > `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
60
57
 
61
58
  Args:
62
59
  repo_id (`str`):
@@ -53,17 +53,11 @@ class WebhooksServer:
53
53
  Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
54
54
  WebhooksServer and deploy it on a Space.
55
55
 
56
- <Tip warning={true}>
56
+ > [!WARNING]
57
+ > `WebhooksServer` is experimental. Its API is subject to change in the future.
57
58
 
58
- `WebhooksServer` is experimental. Its API is subject to change in the future.
59
-
60
- </Tip>
61
-
62
- <Tip warning={true}>
63
-
64
- You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
65
-
66
- </Tip>
59
+ > [!WARNING]
60
+ > You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
67
61
 
68
62
  Args:
69
63
  ui (`gradio.Blocks`, optional):
@@ -240,17 +234,11 @@ def webhook_endpoint(path: Optional[str] = None) -> Callable:
240
234
  Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
241
235
  server and deploy it on a Space.
242
236
 
243
- <Tip warning={true}>
244
-
245
- `webhook_endpoint` is experimental. Its API is subject to change in the future.
246
-
247
- </Tip>
248
-
249
- <Tip warning={true}>
250
-
251
- You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
237
+ > [!WARNING]
238
+ > `webhook_endpoint` is experimental. Its API is subject to change in the future.
252
239
 
253
- </Tip>
240
+ > [!WARNING]
241
+ > You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
254
242
 
255
243
  Args:
256
244
  path (`str`, optional):
@@ -21,6 +21,7 @@ Usage:
21
21
  hf repo create my-cool-model --private
22
22
  """
23
23
 
24
+ import enum
24
25
  from typing import Annotated, Optional
25
26
 
26
27
  import typer
@@ -44,8 +45,16 @@ from ._cli_utils import (
44
45
  logger = logging.get_logger(__name__)
45
46
 
46
47
  repo_cli = typer_factory(help="Manage repos on the Hub.")
47
- tag_app = typer_factory(help="Manage tags for a repo on the Hub.")
48
- repo_cli.add_typer(tag_app, name="tag")
48
+ tag_cli = typer_factory(help="Manage tags for a repo on the Hub.")
49
+ branch_cli = typer_factory(help="Manage branches for a repo on the Hub.")
50
+ repo_cli.add_typer(tag_cli, name="tag")
51
+ repo_cli.add_typer(branch_cli, name="branch")
52
+
53
+
54
+ class GatedChoices(str, enum.Enum):
55
+ auto = "auto"
56
+ manual = "manual"
57
+ false = "false"
49
58
 
50
59
 
51
60
  @repo_cli.command("create", help="Create a new repo on the Hub.")
@@ -87,7 +96,130 @@ def repo_create(
87
96
  print(f"Your repo is now available at {ANSI.bold(repo_url)}")
88
97
 
89
98
 
90
- @tag_app.command("create", help="Create a tag for a repo.")
99
+ @repo_cli.command("delete", help="Delete a repo from the Hub. this is an irreversible operation.")
100
+ def repo_delete(
101
+ repo_id: RepoIdArg,
102
+ repo_type: RepoTypeOpt = RepoType.model,
103
+ token: TokenOpt = None,
104
+ missing_ok: Annotated[
105
+ bool,
106
+ typer.Option(
107
+ help="If set to True, do not raise an error if repo does not exist.",
108
+ ),
109
+ ] = False,
110
+ ) -> None:
111
+ api = get_hf_api(token=token)
112
+ api.delete_repo(
113
+ repo_id=repo_id,
114
+ repo_type=repo_type.value,
115
+ missing_ok=missing_ok,
116
+ )
117
+ print(f"Successfully deleted {ANSI.bold(repo_id)} on the Hub.")
118
+
119
+
120
+ @repo_cli.command("move", help="Move a repository from a namespace to another namespace.")
121
+ def repo_move(
122
+ from_id: RepoIdArg,
123
+ to_id: RepoIdArg,
124
+ token: TokenOpt = None,
125
+ repo_type: RepoTypeOpt = RepoType.model,
126
+ ) -> None:
127
+ api = get_hf_api(token=token)
128
+ api.move_repo(
129
+ from_id=from_id,
130
+ to_id=to_id,
131
+ repo_type=repo_type.value,
132
+ )
133
+ print(f"Successfully moved {ANSI.bold(from_id)} to {ANSI.bold(to_id)} on the Hub.")
134
+
135
+
136
+ @repo_cli.command("settings", help="Update the settings of a repository.")
137
+ def repo_settings(
138
+ repo_id: RepoIdArg,
139
+ gated: Annotated[
140
+ Optional[GatedChoices],
141
+ typer.Option(
142
+ help="The gated status for the repository.",
143
+ ),
144
+ ] = None,
145
+ private: Annotated[
146
+ Optional[bool],
147
+ typer.Option(
148
+ help="Whether the repository should be private.",
149
+ ),
150
+ ] = None,
151
+ xet_enabled: Annotated[
152
+ Optional[bool],
153
+ typer.Option(
154
+ help=" Whether the repository should be enabled for Xet Storage.",
155
+ ),
156
+ ] = None,
157
+ token: TokenOpt = None,
158
+ repo_type: RepoTypeOpt = RepoType.model,
159
+ ) -> None:
160
+ api = get_hf_api(token=token)
161
+ api.update_repo_settings(
162
+ repo_id=repo_id,
163
+ gated=(gated.value if gated else None), # type: ignore [arg-type]
164
+ private=private,
165
+ xet_enabled=xet_enabled,
166
+ repo_type=repo_type.value,
167
+ )
168
+ print(f"Successfully updated the settings of {ANSI.bold(repo_id)} on the Hub.")
169
+
170
+
171
+ @branch_cli.command("create", help="Create a new branch for a repo on the Hub.")
172
+ def branch_create(
173
+ repo_id: RepoIdArg,
174
+ branch: Annotated[
175
+ str,
176
+ typer.Argument(
177
+ help="The name of the branch to create.",
178
+ ),
179
+ ],
180
+ revision: RevisionOpt = None,
181
+ token: TokenOpt = None,
182
+ repo_type: RepoTypeOpt = RepoType.model,
183
+ exist_ok: Annotated[
184
+ bool,
185
+ typer.Option(
186
+ help="If set to True, do not raise an error if branch already exists.",
187
+ ),
188
+ ] = False,
189
+ ) -> None:
190
+ api = get_hf_api(token=token)
191
+ api.create_branch(
192
+ repo_id=repo_id,
193
+ branch=branch,
194
+ revision=revision,
195
+ repo_type=repo_type.value,
196
+ exist_ok=exist_ok,
197
+ )
198
+ print(f"Successfully created {ANSI.bold(branch)} branch on {repo_type.value} {ANSI.bold(repo_id)}")
199
+
200
+
201
+ @branch_cli.command("delete", help="Delete a branch from a repo on the Hub.")
202
+ def branch_delete(
203
+ repo_id: RepoIdArg,
204
+ branch: Annotated[
205
+ str,
206
+ typer.Argument(
207
+ help="The name of the branch to delete.",
208
+ ),
209
+ ],
210
+ token: TokenOpt = None,
211
+ repo_type: RepoTypeOpt = RepoType.model,
212
+ ) -> None:
213
+ api = get_hf_api(token=token)
214
+ api.delete_branch(
215
+ repo_id=repo_id,
216
+ branch=branch,
217
+ repo_type=repo_type.value,
218
+ )
219
+ print(f"Successfully deleted {ANSI.bold(branch)} branch on {repo_type.value} {ANSI.bold(repo_id)}")
220
+
221
+
222
+ @tag_cli.command("create", help="Create a tag for a repo.")
91
223
  def tag_create(
92
224
  repo_id: RepoIdArg,
93
225
  tag: Annotated[
@@ -127,7 +259,7 @@ def tag_create(
127
259
  print(f"Tag {ANSI.bold(tag)} created on {ANSI.bold(repo_id)}")
128
260
 
129
261
 
130
- @tag_app.command("list", help="List tags for a repo.")
262
+ @tag_cli.command("list", help="List tags for a repo.")
131
263
  def tag_list(
132
264
  repo_id: RepoIdArg,
133
265
  token: TokenOpt = None,
@@ -152,7 +284,7 @@ def tag_list(
152
284
  print(t.name)
153
285
 
154
286
 
155
- @tag_app.command("delete", help="Delete a tag for a repo.")
287
+ @tag_cli.command("delete", help="Delete a tag for a repo.")
156
288
  def tag_delete(
157
289
  repo_id: RepoIdArg,
158
290
  tag: Annotated[
@@ -1,18 +1,7 @@
1
1
  import inspect
2
2
  from dataclasses import _MISSING_TYPE, MISSING, Field, field, fields
3
3
  from functools import wraps
4
- from typing import (
5
- Any,
6
- Callable,
7
- Literal,
8
- Optional,
9
- Type,
10
- TypeVar,
11
- Union,
12
- get_args,
13
- get_origin,
14
- overload,
15
- )
4
+ from typing import Any, Callable, ForwardRef, Literal, Optional, Type, TypeVar, Union, get_args, get_origin, overload
16
5
 
17
6
  from .errors import (
18
7
  StrictDataclassClassValidationError,
@@ -322,6 +311,8 @@ def type_validator(name: str, value: Any, expected_type: Any) -> None:
322
311
  validator(name, value, args)
323
312
  elif isinstance(expected_type, type): # simple types
324
313
  _validate_simple_type(name, value, expected_type)
314
+ elif isinstance(expected_type, ForwardRef) or isinstance(expected_type, str):
315
+ return
325
316
  else:
326
317
  raise TypeError(f"Unsupported type for field '{name}': {expected_type}")
327
318
 
@@ -34,13 +34,11 @@ def _check_fastai_fastcore_versions(
34
34
  fastcore_min_version (`str`, *optional*):
35
35
  The minimum fastcore version supported.
36
36
 
37
- <Tip>
38
- Raises the following error:
39
-
40
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
41
- if the fastai or fastcore libraries are not available or are of an invalid version.
42
-
43
- </Tip>
37
+ > [!TIP]
38
+ > Raises the following error:
39
+ >
40
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
41
+ > if the fastai or fastcore libraries are not available or are of an invalid version.
44
42
  """
45
43
 
46
44
  if (get_fastcore_version() or get_fastai_version()) == "N/A":
@@ -88,15 +86,13 @@ def _check_fastai_fastcore_pyproject_versions(
88
86
  fastcore_min_version (`str`, *optional*):
89
87
  The minimum fastcore version supported.
90
88
 
91
- <Tip>
92
- Raises the following errors:
93
-
94
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
95
- if the `toml` module is not installed.
96
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
97
- if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
98
-
99
- </Tip>
89
+ > [!TIP]
90
+ > Raises the following errors:
91
+ >
92
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
93
+ > if the `toml` module is not installed.
94
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
95
+ > if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
100
96
  """
101
97
 
102
98
  try:
@@ -253,14 +249,11 @@ def _save_pretrained_fastai(
253
249
  config (`dict`, *optional*):
254
250
  Configuration object. Will be uploaded as a .json file. Example: 'https://huggingface.co/espejelomar/fastai-pet-breeds-classification/blob/main/config.json'.
255
251
 
256
- <Tip>
257
-
258
- Raises the following error:
259
-
260
- - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
261
- if the config file provided is not a dictionary.
262
-
263
- </Tip>
252
+ > [!TIP]
253
+ > Raises the following error:
254
+ >
255
+ > - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
256
+ > if the config file provided is not a dictionary.
264
257
  """
265
258
  _check_fastai_fastcore_versions()
266
259
 
@@ -394,14 +387,11 @@ def push_to_hub_fastai(
394
387
  Returns:
395
388
  The url of the commit of your model in the given repository.
396
389
 
397
- <Tip>
398
-
399
- Raises the following error:
400
-
401
- - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
402
- if the user is not log on to the Hugging Face Hub.
403
-
404
- </Tip>
390
+ > [!TIP]
391
+ > Raises the following error:
392
+ >
393
+ > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
394
+ > if the user is not log on to the Hugging Face Hub.
405
395
  """
406
396
  _check_fastai_fastcore_versions()
407
397
  api = HfApi(endpoint=api_endpoint)