huggingface-hub 0.35.3__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (42) hide show
  1. huggingface_hub/__init__.py +7 -1
  2. huggingface_hub/_commit_api.py +125 -65
  3. huggingface_hub/_commit_scheduler.py +4 -7
  4. huggingface_hub/_jobs_api.py +1 -1
  5. huggingface_hub/_login.py +9 -15
  6. huggingface_hub/_tensorboard_logger.py +2 -5
  7. huggingface_hub/_webhooks_server.py +9 -21
  8. huggingface_hub/cli/download.py +2 -2
  9. huggingface_hub/cli/repo_files.py +1 -1
  10. huggingface_hub/cli/upload.py +1 -1
  11. huggingface_hub/cli/upload_large_folder.py +1 -1
  12. huggingface_hub/community.py +16 -8
  13. huggingface_hub/fastai_utils.py +22 -32
  14. huggingface_hub/file_download.py +17 -20
  15. huggingface_hub/hf_api.py +514 -541
  16. huggingface_hub/hf_file_system.py +45 -40
  17. huggingface_hub/inference/_client.py +28 -49
  18. huggingface_hub/inference/_generated/_async_client.py +28 -49
  19. huggingface_hub/inference/_generated/types/image_to_image.py +6 -2
  20. huggingface_hub/inference/_mcp/agent.py +2 -5
  21. huggingface_hub/inference/_mcp/mcp_client.py +2 -5
  22. huggingface_hub/inference/_providers/__init__.py +5 -0
  23. huggingface_hub/inference/_providers/_common.py +1 -0
  24. huggingface_hub/inference/_providers/clarifai.py +13 -0
  25. huggingface_hub/keras_mixin.py +3 -6
  26. huggingface_hub/lfs.py +12 -4
  27. huggingface_hub/repocard.py +12 -16
  28. huggingface_hub/repository.py +15 -21
  29. huggingface_hub/serialization/_base.py +3 -6
  30. huggingface_hub/serialization/_tensorflow.py +3 -6
  31. huggingface_hub/serialization/_torch.py +17 -35
  32. huggingface_hub/utils/_cache_manager.py +41 -71
  33. huggingface_hub/utils/_chunk_utils.py +2 -3
  34. huggingface_hub/utils/_http.py +29 -34
  35. huggingface_hub/utils/_validators.py +2 -2
  36. huggingface_hub/utils/logging.py +8 -11
  37. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/METADATA +2 -2
  38. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/RECORD +42 -41
  39. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/LICENSE +0 -0
  40. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/WHEEL +0 -0
  41. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/entry_points.txt +0 -0
  42. {huggingface_hub-0.35.3.dist-info → huggingface_hub-0.36.0.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.35.3"
49
+ __version__ = "0.36.0"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -159,6 +159,7 @@ _SUBMOD_ATTRS = {
159
159
  "GitRefs",
160
160
  "HfApi",
161
161
  "ModelInfo",
162
+ "Organization",
162
163
  "RepoUrl",
163
164
  "SpaceInfo",
164
165
  "User",
@@ -212,6 +213,7 @@ _SUBMOD_ATTRS = {
212
213
  "get_full_repo_name",
213
214
  "get_inference_endpoint",
214
215
  "get_model_tags",
216
+ "get_organization_overview",
215
217
  "get_paths_info",
216
218
  "get_repo_discussions",
217
219
  "get_safetensors_metadata",
@@ -699,6 +701,7 @@ __all__ = [
699
701
  "ObjectDetectionInput",
700
702
  "ObjectDetectionOutputElement",
701
703
  "ObjectDetectionParameters",
704
+ "Organization",
702
705
  "PYTORCH_WEIGHTS_NAME",
703
706
  "Padding",
704
707
  "PyTorchModelHubMixin",
@@ -870,6 +873,7 @@ __all__ = [
870
873
  "get_hf_file_metadata",
871
874
  "get_inference_endpoint",
872
875
  "get_model_tags",
876
+ "get_organization_overview",
873
877
  "get_paths_info",
874
878
  "get_repo_discussions",
875
879
  "get_safetensors_metadata",
@@ -1179,6 +1183,7 @@ if TYPE_CHECKING: # pragma: no cover
1179
1183
  GitRefs, # noqa: F401
1180
1184
  HfApi, # noqa: F401
1181
1185
  ModelInfo, # noqa: F401
1186
+ Organization, # noqa: F401
1182
1187
  RepoUrl, # noqa: F401
1183
1188
  SpaceInfo, # noqa: F401
1184
1189
  User, # noqa: F401
@@ -1232,6 +1237,7 @@ if TYPE_CHECKING: # pragma: no cover
1232
1237
  get_full_repo_name, # noqa: F401
1233
1238
  get_inference_endpoint, # noqa: F401
1234
1239
  get_model_tags, # noqa: F401
1240
+ get_organization_overview, # noqa: F401
1235
1241
  get_paths_info, # noqa: F401
1236
1242
  get_repo_discussions, # noqa: F401
1237
1243
  get_safetensors_metadata, # noqa: F401
@@ -33,6 +33,7 @@ from .utils import (
33
33
  validate_hf_hub_args,
34
34
  )
35
35
  from .utils import tqdm as hf_tqdm
36
+ from .utils._runtime import is_xet_available
36
37
 
37
38
 
38
39
  if TYPE_CHECKING:
@@ -353,7 +354,7 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
353
354
 
354
355
 
355
356
  @validate_hf_hub_args
356
- def _upload_lfs_files(
357
+ def _upload_files(
357
358
  *,
358
359
  additions: List[CommitOperationAdd],
359
360
  repo_type: str,
@@ -362,6 +363,86 @@ def _upload_lfs_files(
362
363
  endpoint: Optional[str] = None,
363
364
  num_threads: int = 5,
364
365
  revision: Optional[str] = None,
366
+ create_pr: Optional[bool] = None,
367
+ ):
368
+ """
369
+ Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
370
+ """
371
+ xet_additions: List[CommitOperationAdd] = []
372
+ lfs_actions: List[Dict] = []
373
+ lfs_oid2addop: Dict[str, CommitOperationAdd] = {}
374
+
375
+ for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
376
+ chunk_list = [op for op in chunk]
377
+
378
+ transfers: List[str] = ["basic", "multipart"]
379
+ has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
380
+ if is_xet_available():
381
+ if not has_buffered_io_data:
382
+ transfers.append("xet")
383
+ else:
384
+ logger.warning(
385
+ "Uploading files as a binary IO buffer is not supported by Xet Storage. "
386
+ "Falling back to HTTP upload."
387
+ )
388
+
389
+ actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info(
390
+ upload_infos=[op.upload_info for op in chunk_list],
391
+ repo_id=repo_id,
392
+ repo_type=repo_type,
393
+ revision=revision,
394
+ endpoint=endpoint,
395
+ headers=headers,
396
+ token=None, # already passed in 'headers'
397
+ transfers=transfers,
398
+ )
399
+ if errors_chunk:
400
+ message = "\n".join(
401
+ [
402
+ f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
403
+ for err in errors_chunk
404
+ ]
405
+ )
406
+ raise ValueError(f"LFS batch API returned errors:\n{message}")
407
+
408
+ # If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO),
409
+ # fall back to LFS for this chunk.
410
+ if chosen_transfer == "xet" and ("xet" in transfers):
411
+ xet_additions.extend(chunk_list)
412
+ else:
413
+ lfs_actions.extend(actions_chunk)
414
+ for op in chunk_list:
415
+ lfs_oid2addop[op.upload_info.sha256.hex()] = op
416
+
417
+ if len(lfs_actions) > 0:
418
+ _upload_lfs_files(
419
+ actions=lfs_actions,
420
+ oid2addop=lfs_oid2addop,
421
+ headers=headers,
422
+ endpoint=endpoint,
423
+ num_threads=num_threads,
424
+ )
425
+
426
+ if len(xet_additions) > 0:
427
+ _upload_xet_files(
428
+ additions=xet_additions,
429
+ repo_type=repo_type,
430
+ repo_id=repo_id,
431
+ headers=headers,
432
+ endpoint=endpoint,
433
+ revision=revision,
434
+ create_pr=create_pr,
435
+ )
436
+
437
+
438
+ @validate_hf_hub_args
439
+ def _upload_lfs_files(
440
+ *,
441
+ actions: List[Dict],
442
+ oid2addop: Dict[str, CommitOperationAdd],
443
+ headers: Dict[str, str],
444
+ endpoint: Optional[str] = None,
445
+ num_threads: int = 5,
365
446
  ):
366
447
  """
367
448
  Uploads the content of `additions` to the Hub using the large file storage protocol.
@@ -370,9 +451,21 @@ def _upload_lfs_files(
370
451
  - LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
371
452
 
372
453
  Args:
373
- additions (`List` of `CommitOperationAdd`):
374
- The files to be uploaded
375
- repo_type (`str`):
454
+ actions (`List[Dict]`):
455
+ LFS batch actions returned by the server.
456
+ oid2addop (`Dict[str, CommitOperationAdd]`):
457
+ A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
458
+ headers (`Dict[str, str]`):
459
+ Headers to use for the request, including authorization headers and user agent.
460
+ endpoint (`str`, *optional*):
461
+ The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
462
+ num_threads (`int`, *optional*):
463
+ The number of concurrent threads to use when uploading. Defaults to 5.
464
+
465
+ Raises:
466
+ [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
467
+ If an upload failed for any reason
468
+ [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
376
469
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
377
470
  repo_id (`str`):
378
471
  A namespace (user or an organization) and a repo name separated
@@ -392,50 +485,17 @@ def _upload_lfs_files(
392
485
  [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
393
486
  If the LFS batch endpoint returned an HTTP error.
394
487
  """
395
- # Step 1: retrieve upload instructions from the LFS batch endpoint.
396
- # Upload instructions are retrieved by chunk of 256 files to avoid reaching
397
- # the payload limit.
398
- batch_actions: List[Dict] = []
399
- for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400
- batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401
- upload_infos=[op.upload_info for op in chunk],
402
- repo_id=repo_id,
403
- repo_type=repo_type,
404
- revision=revision,
405
- endpoint=endpoint,
406
- headers=headers,
407
- token=None, # already passed in 'headers'
408
- )
409
-
410
- # If at least 1 error, we do not retrieve information for other chunks
411
- if batch_errors_chunk:
412
- message = "\n".join(
413
- [
414
- f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
415
- for err in batch_errors_chunk
416
- ]
417
- )
418
- raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
419
-
420
- batch_actions += batch_actions_chunk
421
- oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
422
-
423
- # Step 2: ignore files that have already been uploaded
488
+ # Filter out files already present upstream
424
489
  filtered_actions = []
425
- for action in batch_actions:
490
+ for action in actions:
426
491
  if action.get("actions") is None:
427
492
  logger.debug(
428
- f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
429
- " present upstream - skipping upload."
493
+ f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload."
430
494
  )
431
495
  else:
432
496
  filtered_actions.append(action)
433
497
 
434
- if len(filtered_actions) == 0:
435
- logger.debug("No LFS files to upload.")
436
- return
437
-
438
- # Step 3: upload files concurrently according to these instructions
498
+ # Upload according to server-provided actions
439
499
  def _wrapped_lfs_upload(batch_action) -> None:
440
500
  try:
441
501
  operation = oid2addop[batch_action["oid"]]
@@ -576,30 +636,30 @@ def _upload_xet_files(
576
636
  progress, progress_callback = None, None
577
637
 
578
638
  try:
579
- for i, chunk in enumerate(chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES)):
580
- _chunk = [op for op in chunk]
581
-
582
- bytes_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, bytes)]
583
- paths_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, (str, Path))]
584
-
585
- if len(paths_ops) > 0:
586
- upload_files(
587
- [str(op.path_or_fileobj) for op in paths_ops],
588
- xet_endpoint,
589
- access_token_info,
590
- token_refresher,
591
- progress_callback,
592
- repo_type,
593
- )
594
- if len(bytes_ops) > 0:
595
- upload_bytes(
596
- [op.path_or_fileobj for op in bytes_ops],
597
- xet_endpoint,
598
- access_token_info,
599
- token_refresher,
600
- progress_callback,
601
- repo_type,
602
- )
639
+ all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)]
640
+ all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))]
641
+
642
+ if len(all_paths_ops) > 0:
643
+ all_paths = [str(op.path_or_fileobj) for op in all_paths_ops]
644
+ upload_files(
645
+ all_paths,
646
+ xet_endpoint,
647
+ access_token_info,
648
+ token_refresher,
649
+ progress_callback,
650
+ repo_type,
651
+ )
652
+
653
+ if len(all_bytes_ops) > 0:
654
+ all_bytes = [op.path_or_fileobj for op in all_bytes_ops]
655
+ upload_bytes(
656
+ all_bytes,
657
+ xet_endpoint,
658
+ access_token_info,
659
+ token_refresher,
660
+ progress_callback,
661
+ repo_type,
662
+ )
603
663
 
604
664
  finally:
605
665
  if progress is not None:
@@ -205,13 +205,10 @@ class CommitScheduler:
205
205
  """
206
206
  Push folder to the Hub and return the commit info.
207
207
 
208
- <Tip warning={true}>
209
-
210
- This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
211
- queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
212
- issues.
213
-
214
- </Tip>
208
+ > [!WARNING]
209
+ > This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
210
+ > queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
211
+ > issues.
215
212
 
216
213
  The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
217
214
  uploads only changed files. If no changes are found, the method returns without committing anything. If you want
@@ -30,7 +30,7 @@ class JobStage(str, Enum):
30
30
  ```py
31
31
  assert JobStage.COMPLETED == "COMPLETED"
32
32
  ```
33
-
33
+ Possible values are: `COMPLETED`, `CANCELED`, `ERROR`, `DELETED`, `RUNNING`.
34
34
  Taken from https://github.com/huggingface/moon-landing/blob/main/server/job_types/JobInfo.ts#L61 (private url).
35
35
  """
36
36
 
huggingface_hub/_login.py CHANGED
@@ -78,21 +78,15 @@ def login(
78
78
  To log in from outside of a script, one can also use `hf auth login` which is
79
79
  a cli command that wraps [`login`].
80
80
 
81
- <Tip>
82
-
83
- [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
84
- extends its capabilities.
85
-
86
- </Tip>
87
-
88
- <Tip>
89
-
90
- When the token is not passed, [`login`] will automatically detect if the script runs
91
- in a notebook or not. However, this detection might not be accurate due to the
92
- variety of notebooks that exists nowadays. If that is the case, you can always force
93
- the UI by using [`notebook_login`] or [`interpreter_login`].
94
-
95
- </Tip>
81
+ > [!TIP]
82
+ > [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
83
+ > extends its capabilities.
84
+
85
+ > [!TIP]
86
+ > When the token is not passed, [`login`] will automatically detect if the script runs
87
+ > in a notebook or not. However, this detection might not be accurate due to the
88
+ > variety of notebooks that exists nowadays. If that is the case, you can always force
89
+ > the UI by using [`notebook_login`] or [`interpreter_login`].
96
90
 
97
91
  Args:
98
92
  token (`str`, *optional*):
@@ -52,11 +52,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
52
52
  issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
53
53
  minutes (default to every 5 minutes).
54
54
 
55
- <Tip warning={true}>
56
-
57
- `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
58
-
59
- </Tip>
55
+ > [!WARNING]
56
+ > `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
60
57
 
61
58
  Args:
62
59
  repo_id (`str`):
@@ -32,7 +32,7 @@ if is_fastapi_available():
32
32
  from fastapi.responses import JSONResponse
33
33
  else:
34
34
  # Will fail at runtime if FastAPI is not available
35
- FastAPI = Request = JSONResponse = None # type: ignore [misc, assignment]
35
+ FastAPI = Request = JSONResponse = None # type: ignore
36
36
 
37
37
 
38
38
  _global_app: Optional["WebhooksServer"] = None
@@ -53,17 +53,11 @@ class WebhooksServer:
53
53
  Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
54
54
  WebhooksServer and deploy it on a Space.
55
55
 
56
- <Tip warning={true}>
56
+ > [!WARNING]
57
+ > `WebhooksServer` is experimental. Its API is subject to change in the future.
57
58
 
58
- `WebhooksServer` is experimental. Its API is subject to change in the future.
59
-
60
- </Tip>
61
-
62
- <Tip warning={true}>
63
-
64
- You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
65
-
66
- </Tip>
59
+ > [!WARNING]
60
+ > You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
67
61
 
68
62
  Args:
69
63
  ui (`gradio.Blocks`, optional):
@@ -240,17 +234,11 @@ def webhook_endpoint(path: Optional[str] = None) -> Callable:
240
234
  Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
241
235
  server and deploy it on a Space.
242
236
 
243
- <Tip warning={true}>
244
-
245
- `webhook_endpoint` is experimental. Its API is subject to change in the future.
246
-
247
- </Tip>
248
-
249
- <Tip warning={true}>
250
-
251
- You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
237
+ > [!WARNING]
238
+ > `webhook_endpoint` is experimental. Its API is subject to change in the future.
252
239
 
253
- </Tip>
240
+ > [!WARNING]
241
+ > You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
254
242
 
255
243
  Args:
256
244
  path (`str`, optional):
@@ -155,7 +155,7 @@ class DownloadCommand(BaseHuggingfaceCLICommand):
155
155
  force_download=self.force_download,
156
156
  token=self.token,
157
157
  local_dir=self.local_dir,
158
- library_name="hf",
158
+ library_name="huggingface-cli",
159
159
  )
160
160
 
161
161
  # Otherwise: use `snapshot_download` to ensure all files comes from same revision
@@ -176,6 +176,6 @@ class DownloadCommand(BaseHuggingfaceCLICommand):
176
176
  cache_dir=self.cache_dir,
177
177
  token=self.token,
178
178
  local_dir=self.local_dir,
179
- library_name="hf",
179
+ library_name="huggingface-cli",
180
180
  max_workers=self.max_workers,
181
181
  )
@@ -51,7 +51,7 @@ class DeleteFilesSubCommand:
51
51
  self.repo_id: str = args.repo_id
52
52
  self.repo_type: Optional[str] = args.repo_type
53
53
  self.revision: Optional[str] = args.revision
54
- self.api: HfApi = HfApi(token=args.token, library_name="hf")
54
+ self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
55
55
  self.patterns: List[str] = args.patterns
56
56
  self.commit_message: Optional[str] = args.commit_message
57
57
  self.commit_description: Optional[str] = args.commit_description
@@ -151,7 +151,7 @@ class UploadCommand(BaseHuggingfaceCLICommand):
151
151
  self.commit_message: Optional[str] = args.commit_message
152
152
  self.commit_description: Optional[str] = args.commit_description
153
153
  self.create_pr: bool = args.create_pr
154
- self.api: HfApi = HfApi(token=args.token, library_name="hf")
154
+ self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
155
155
  self.quiet: bool = args.quiet # disable warnings and progress bars
156
156
 
157
157
  # Check `--every` is valid
@@ -79,7 +79,7 @@ class UploadLargeFolderCommand(BaseHuggingfaceCLICommand):
79
79
  self.include: Optional[List[str]] = args.include
80
80
  self.exclude: Optional[List[str]] = args.exclude
81
81
 
82
- self.api: HfApi = HfApi(token=args.token, library_name="hf")
82
+ self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
83
83
 
84
84
  self.num_workers: Optional[int] = args.num_workers
85
85
  self.no_report: bool = args.no_report
@@ -7,7 +7,7 @@ for more information on Pull Requests, Discussions, and the community tab.
7
7
 
8
8
  from dataclasses import dataclass
9
9
  from datetime import datetime
10
- from typing import List, Literal, Optional, Union
10
+ from typing import List, Literal, Optional, TypedDict, Union
11
11
 
12
12
  from . import constants
13
13
  from .utils import parse_datetime
@@ -143,6 +143,14 @@ class DiscussionWithDetails(Discussion):
143
143
  diff: Optional[str]
144
144
 
145
145
 
146
+ class DiscussionEventArgs(TypedDict):
147
+ id: str
148
+ type: str
149
+ created_at: datetime
150
+ author: str
151
+ _event: dict
152
+
153
+
146
154
  @dataclass
147
155
  class DiscussionEvent:
148
156
  """
@@ -319,13 +327,13 @@ def deserialize_event(event: dict) -> DiscussionEvent:
319
327
  event_type: str = event["type"]
320
328
  created_at = parse_datetime(event["createdAt"])
321
329
 
322
- common_args = dict(
323
- id=event_id,
324
- type=event_type,
325
- created_at=created_at,
326
- author=event.get("author", {}).get("name", "deleted"),
327
- _event=event,
328
- )
330
+ common_args: DiscussionEventArgs = {
331
+ "id": event_id,
332
+ "type": event_type,
333
+ "created_at": created_at,
334
+ "author": event.get("author", {}).get("name", "deleted"),
335
+ "_event": event,
336
+ }
329
337
 
330
338
  if event_type == "comment":
331
339
  return DiscussionComment(
@@ -35,13 +35,11 @@ def _check_fastai_fastcore_versions(
35
35
  fastcore_min_version (`str`, *optional*):
36
36
  The minimum fastcore version supported.
37
37
 
38
- <Tip>
39
- Raises the following error:
40
-
41
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
42
- if the fastai or fastcore libraries are not available or are of an invalid version.
43
-
44
- </Tip>
38
+ > [!TIP]
39
+ > Raises the following error:
40
+ >
41
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
42
+ > if the fastai or fastcore libraries are not available or are of an invalid version.
45
43
  """
46
44
 
47
45
  if (get_fastcore_version() or get_fastai_version()) == "N/A":
@@ -89,15 +87,13 @@ def _check_fastai_fastcore_pyproject_versions(
89
87
  fastcore_min_version (`str`, *optional*):
90
88
  The minimum fastcore version supported.
91
89
 
92
- <Tip>
93
- Raises the following errors:
94
-
95
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
96
- if the `toml` module is not installed.
97
- - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
98
- if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
99
-
100
- </Tip>
90
+ > [!TIP]
91
+ > Raises the following errors:
92
+ >
93
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
94
+ > if the `toml` module is not installed.
95
+ > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
96
+ > if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
101
97
  """
102
98
 
103
99
  try:
@@ -254,14 +250,11 @@ def _save_pretrained_fastai(
254
250
  config (`dict`, *optional*):
255
251
  Configuration object. Will be uploaded as a .json file. Example: 'https://huggingface.co/espejelomar/fastai-pet-breeds-classification/blob/main/config.json'.
256
252
 
257
- <Tip>
258
-
259
- Raises the following error:
260
-
261
- - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
262
- if the config file provided is not a dictionary.
263
-
264
- </Tip>
253
+ > [!TIP]
254
+ > Raises the following error:
255
+ >
256
+ > - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
257
+ > if the config file provided is not a dictionary.
265
258
  """
266
259
  _check_fastai_fastcore_versions()
267
260
 
@@ -395,14 +388,11 @@ def push_to_hub_fastai(
395
388
  Returns:
396
389
  The url of the commit of your model in the given repository.
397
390
 
398
- <Tip>
399
-
400
- Raises the following error:
401
-
402
- - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
403
- if the user is not log on to the Hugging Face Hub.
404
-
405
- </Tip>
391
+ > [!TIP]
392
+ > Raises the following error:
393
+ >
394
+ > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
395
+ > if the user is not log on to the Hugging Face Hub.
406
396
  """
407
397
  _check_fastai_fastcore_versions()
408
398
  api = HfApi(endpoint=api_endpoint)
@@ -214,26 +214,23 @@ def hf_hub_url(
214
214
  'https://huggingface.co/julien-c/EsperBERTo-small/resolve/main/pytorch_model.bin'
215
215
  ```
216
216
 
217
- <Tip>
218
-
219
- Notes:
220
-
221
- Cloudfront is replicated over the globe so downloads are way faster for
222
- the end user (and it also lowers our bandwidth costs).
223
-
224
- Cloudfront aggressively caches files by default (default TTL is 24
225
- hours), however this is not an issue here because we implement a
226
- git-based versioning system on huggingface.co, which means that we store
227
- the files on S3/Cloudfront in a content-addressable way (i.e., the file
228
- name is its hash). Using content-addressable filenames means cache can't
229
- ever be stale.
230
-
231
- In terms of client-side caching from this library, we base our caching
232
- on the objects' entity tag (`ETag`), which is an identifier of a
233
- specific version of a resource [1]_. An object's ETag is: its git-sha1
234
- if stored in git, or its sha256 if stored in git-lfs.
235
-
236
- </Tip>
217
+ > [!TIP]
218
+ > Notes:
219
+ >
220
+ > Cloudfront is replicated over the globe so downloads are way faster for
221
+ > the end user (and it also lowers our bandwidth costs).
222
+ >
223
+ > Cloudfront aggressively caches files by default (default TTL is 24
224
+ > hours), however this is not an issue here because we implement a
225
+ > git-based versioning system on huggingface.co, which means that we store
226
+ > the files on S3/Cloudfront in a content-addressable way (i.e., the file
227
+ > name is its hash). Using content-addressable filenames means cache can't
228
+ > ever be stale.
229
+ >
230
+ > In terms of client-side caching from this library, we base our caching
231
+ > on the objects' entity tag (`ETag`), which is an identifier of a
232
+ > specific version of a resource [1]_. An object's ETag is: its git-sha1
233
+ > if stored in git, or its sha256 if stored in git-lfs.
237
234
 
238
235
  References:
239
236