huggingface-hub 0.35.1__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (127) hide show
  1. huggingface_hub/__init__.py +28 -45
  2. huggingface_hub/_commit_api.py +28 -28
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +20 -20
  6. huggingface_hub/_login.py +13 -39
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +14 -28
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +5 -5
  11. huggingface_hub/_upload_large_folder.py +15 -15
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/__init__.py +0 -14
  15. huggingface_hub/cli/_cli_utils.py +80 -3
  16. huggingface_hub/cli/auth.py +104 -150
  17. huggingface_hub/cli/cache.py +102 -126
  18. huggingface_hub/cli/download.py +93 -110
  19. huggingface_hub/cli/hf.py +37 -41
  20. huggingface_hub/cli/jobs.py +689 -1017
  21. huggingface_hub/cli/lfs.py +120 -143
  22. huggingface_hub/cli/repo.py +158 -216
  23. huggingface_hub/cli/repo_files.py +50 -84
  24. huggingface_hub/cli/system.py +6 -25
  25. huggingface_hub/cli/upload.py +198 -212
  26. huggingface_hub/cli/upload_large_folder.py +90 -105
  27. huggingface_hub/commands/_cli_utils.py +2 -2
  28. huggingface_hub/commands/delete_cache.py +11 -11
  29. huggingface_hub/commands/download.py +4 -13
  30. huggingface_hub/commands/lfs.py +4 -4
  31. huggingface_hub/commands/repo_files.py +2 -2
  32. huggingface_hub/commands/tag.py +1 -3
  33. huggingface_hub/commands/upload.py +4 -4
  34. huggingface_hub/commands/upload_large_folder.py +3 -3
  35. huggingface_hub/commands/user.py +4 -5
  36. huggingface_hub/community.py +5 -5
  37. huggingface_hub/constants.py +3 -41
  38. huggingface_hub/dataclasses.py +16 -22
  39. huggingface_hub/errors.py +43 -30
  40. huggingface_hub/fastai_utils.py +8 -9
  41. huggingface_hub/file_download.py +154 -253
  42. huggingface_hub/hf_api.py +329 -558
  43. huggingface_hub/hf_file_system.py +104 -62
  44. huggingface_hub/hub_mixin.py +32 -54
  45. huggingface_hub/inference/_client.py +178 -163
  46. huggingface_hub/inference/_common.py +38 -54
  47. huggingface_hub/inference/_generated/_async_client.py +219 -259
  48. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  49. huggingface_hub/inference/_generated/types/base.py +10 -7
  50. huggingface_hub/inference/_generated/types/chat_completion.py +16 -16
  51. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  52. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  53. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  54. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  55. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  56. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  57. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  58. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  59. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  60. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  61. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  62. huggingface_hub/inference/_generated/types/translation.py +2 -2
  63. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  64. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  65. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  66. huggingface_hub/inference/_mcp/agent.py +3 -3
  67. huggingface_hub/inference/_mcp/constants.py +1 -2
  68. huggingface_hub/inference/_mcp/mcp_client.py +33 -22
  69. huggingface_hub/inference/_mcp/types.py +10 -10
  70. huggingface_hub/inference/_mcp/utils.py +4 -4
  71. huggingface_hub/inference/_providers/__init__.py +2 -13
  72. huggingface_hub/inference/_providers/_common.py +24 -25
  73. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  74. huggingface_hub/inference/_providers/cohere.py +3 -3
  75. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  76. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  77. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  78. huggingface_hub/inference/_providers/hf_inference.py +13 -13
  79. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  80. huggingface_hub/inference/_providers/nebius.py +10 -10
  81. huggingface_hub/inference/_providers/novita.py +5 -5
  82. huggingface_hub/inference/_providers/nscale.py +4 -4
  83. huggingface_hub/inference/_providers/replicate.py +15 -15
  84. huggingface_hub/inference/_providers/sambanova.py +6 -6
  85. huggingface_hub/inference/_providers/together.py +7 -7
  86. huggingface_hub/lfs.py +24 -33
  87. huggingface_hub/repocard.py +16 -17
  88. huggingface_hub/repocard_data.py +56 -56
  89. huggingface_hub/serialization/__init__.py +0 -1
  90. huggingface_hub/serialization/_base.py +9 -9
  91. huggingface_hub/serialization/_dduf.py +7 -7
  92. huggingface_hub/serialization/_torch.py +28 -28
  93. huggingface_hub/utils/__init__.py +10 -4
  94. huggingface_hub/utils/_auth.py +5 -5
  95. huggingface_hub/utils/_cache_manager.py +31 -31
  96. huggingface_hub/utils/_deprecation.py +1 -1
  97. huggingface_hub/utils/_dotenv.py +3 -3
  98. huggingface_hub/utils/_fixes.py +0 -10
  99. huggingface_hub/utils/_git_credential.py +3 -3
  100. huggingface_hub/utils/_headers.py +7 -29
  101. huggingface_hub/utils/_http.py +369 -209
  102. huggingface_hub/utils/_pagination.py +4 -4
  103. huggingface_hub/utils/_paths.py +5 -5
  104. huggingface_hub/utils/_runtime.py +15 -13
  105. huggingface_hub/utils/_safetensors.py +21 -21
  106. huggingface_hub/utils/_subprocess.py +9 -9
  107. huggingface_hub/utils/_telemetry.py +3 -3
  108. huggingface_hub/utils/_typing.py +3 -3
  109. huggingface_hub/utils/_validators.py +53 -72
  110. huggingface_hub/utils/_xet.py +16 -16
  111. huggingface_hub/utils/_xet_progress_reporting.py +1 -1
  112. huggingface_hub/utils/insecure_hashlib.py +3 -9
  113. huggingface_hub/utils/tqdm.py +3 -3
  114. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/METADATA +17 -26
  115. huggingface_hub-1.0.0rc1.dist-info/RECORD +161 -0
  116. huggingface_hub/inference/_providers/publicai.py +0 -6
  117. huggingface_hub/inference/_providers/scaleway.py +0 -28
  118. huggingface_hub/inference_api.py +0 -217
  119. huggingface_hub/keras_mixin.py +0 -500
  120. huggingface_hub/repository.py +0 -1477
  121. huggingface_hub/serialization/_tensorflow.py +0 -95
  122. huggingface_hub/utils/_hf_folder.py +0 -68
  123. huggingface_hub-0.35.1.dist-info/RECORD +0 -168
  124. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/LICENSE +0 -0
  125. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/WHEEL +0 -0
  126. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/entry_points.txt +0 -0
  127. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.35.1"
49
+ __version__ = "1.0.0.rc1"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -217,7 +217,6 @@ _SUBMOD_ATTRS = {
217
217
  "get_safetensors_metadata",
218
218
  "get_space_runtime",
219
219
  "get_space_variables",
220
- "get_token_permission",
221
220
  "get_user_overview",
222
221
  "get_webhook",
223
222
  "grant_access",
@@ -278,7 +277,6 @@ _SUBMOD_ATTRS = {
278
277
  "update_collection_metadata",
279
278
  "update_inference_endpoint",
280
279
  "update_repo_settings",
281
- "update_repo_visibility",
282
280
  "update_webhook",
283
281
  "upload_file",
284
282
  "upload_folder",
@@ -471,15 +469,6 @@ _SUBMOD_ATTRS = {
471
469
  "inference._mcp.mcp_client": [
472
470
  "MCPClient",
473
471
  ],
474
- "inference_api": [
475
- "InferenceApi",
476
- ],
477
- "keras_mixin": [
478
- "KerasModelHubMixin",
479
- "from_pretrained_keras",
480
- "push_to_hub_keras",
481
- "save_pretrained_keras",
482
- ],
483
472
  "repocard": [
484
473
  "DatasetCard",
485
474
  "ModelCard",
@@ -497,12 +486,8 @@ _SUBMOD_ATTRS = {
497
486
  "ModelCardData",
498
487
  "SpaceCardData",
499
488
  ],
500
- "repository": [
501
- "Repository",
502
- ],
503
489
  "serialization": [
504
490
  "StateDictSplit",
505
- "get_tf_storage_size",
506
491
  "get_torch_storage_id",
507
492
  "get_torch_storage_size",
508
493
  "load_state_dict_from_file",
@@ -510,7 +495,6 @@ _SUBMOD_ATTRS = {
510
495
  "save_torch_model",
511
496
  "save_torch_state_dict",
512
497
  "split_state_dict_into_shards_factory",
513
- "split_tf_state_dict_into_shards",
514
498
  "split_torch_state_dict_into_shards",
515
499
  ],
516
500
  "serialization._dduf": [
@@ -520,6 +504,8 @@ _SUBMOD_ATTRS = {
520
504
  "read_dduf_file",
521
505
  ],
522
506
  "utils": [
507
+ "ASYNC_CLIENT_FACTORY_T",
508
+ "CLIENT_FACTORY_T",
523
509
  "CacheNotFound",
524
510
  "CachedFileInfo",
525
511
  "CachedRepoInfo",
@@ -527,14 +513,19 @@ _SUBMOD_ATTRS = {
527
513
  "CorruptedCacheException",
528
514
  "DeleteCacheStrategy",
529
515
  "HFCacheInfo",
530
- "HfFolder",
516
+ "HfHubAsyncTransport",
517
+ "HfHubTransport",
531
518
  "cached_assets_path",
532
- "configure_http_backend",
519
+ "close_session",
533
520
  "dump_environment_info",
521
+ "get_async_session",
534
522
  "get_session",
535
523
  "get_token",
524
+ "hf_raise_for_status",
536
525
  "logging",
537
526
  "scan_cache_dir",
527
+ "set_async_client_factory",
528
+ "set_client_factory",
538
529
  ],
539
530
  }
540
531
 
@@ -550,6 +541,7 @@ _SUBMOD_ATTRS = {
550
541
  # ```
551
542
 
552
543
  __all__ = [
544
+ "ASYNC_CLIENT_FACTORY_T",
553
545
  "Agent",
554
546
  "AsyncInferenceClient",
555
547
  "AudioClassificationInput",
@@ -564,6 +556,7 @@ __all__ = [
564
556
  "AutomaticSpeechRecognitionOutput",
565
557
  "AutomaticSpeechRecognitionOutputChunk",
566
558
  "AutomaticSpeechRecognitionParameters",
559
+ "CLIENT_FACTORY_T",
567
560
  "CONFIG_NAME",
568
561
  "CacheNotFound",
569
562
  "CachedFileInfo",
@@ -652,7 +645,8 @@ __all__ = [
652
645
  "HfFileSystemFile",
653
646
  "HfFileSystemResolvedPath",
654
647
  "HfFileSystemStreamFile",
655
- "HfFolder",
648
+ "HfHubAsyncTransport",
649
+ "HfHubTransport",
656
650
  "ImageClassificationInput",
657
651
  "ImageClassificationOutputElement",
658
652
  "ImageClassificationOutputTransform",
@@ -674,7 +668,6 @@ __all__ = [
674
668
  "ImageToVideoOutput",
675
669
  "ImageToVideoParameters",
676
670
  "ImageToVideoTargetSize",
677
- "InferenceApi",
678
671
  "InferenceClient",
679
672
  "InferenceEndpoint",
680
673
  "InferenceEndpointError",
@@ -686,7 +679,6 @@ __all__ = [
686
679
  "JobOwner",
687
680
  "JobStage",
688
681
  "JobStatus",
689
- "KerasModelHubMixin",
690
682
  "MCPClient",
691
683
  "ModelCard",
692
684
  "ModelCardData",
@@ -711,7 +703,6 @@ __all__ = [
711
703
  "REPO_TYPE_SPACE",
712
704
  "RepoCard",
713
705
  "RepoUrl",
714
- "Repository",
715
706
  "SentenceSimilarityInput",
716
707
  "SentenceSimilarityInputData",
717
708
  "SpaceCard",
@@ -824,8 +815,8 @@ __all__ = [
824
815
  "cancel_access_request",
825
816
  "cancel_job",
826
817
  "change_discussion_status",
818
+ "close_session",
827
819
  "comment_discussion",
828
- "configure_http_backend",
829
820
  "create_branch",
830
821
  "create_collection",
831
822
  "create_commit",
@@ -862,7 +853,7 @@ __all__ = [
862
853
  "fetch_job_logs",
863
854
  "file_exists",
864
855
  "from_pretrained_fastai",
865
- "from_pretrained_keras",
856
+ "get_async_session",
866
857
  "get_collection",
867
858
  "get_dataset_tags",
868
859
  "get_discussion_details",
@@ -876,9 +867,7 @@ __all__ = [
876
867
  "get_session",
877
868
  "get_space_runtime",
878
869
  "get_space_variables",
879
- "get_tf_storage_size",
880
870
  "get_token",
881
- "get_token_permission",
882
871
  "get_torch_storage_id",
883
872
  "get_torch_storage_size",
884
873
  "get_user_overview",
@@ -886,6 +875,7 @@ __all__ = [
886
875
  "grant_access",
887
876
  "hf_hub_download",
888
877
  "hf_hub_url",
878
+ "hf_raise_for_status",
889
879
  "inspect_job",
890
880
  "inspect_scheduled_job",
891
881
  "interpreter_login",
@@ -932,7 +922,6 @@ __all__ = [
932
922
  "permanently_delete_lfs_files",
933
923
  "preupload_lfs_files",
934
924
  "push_to_hub_fastai",
935
- "push_to_hub_keras",
936
925
  "read_dduf_file",
937
926
  "reject_access_request",
938
927
  "rename_discussion",
@@ -948,16 +937,16 @@ __all__ = [
948
937
  "run_as_future",
949
938
  "run_job",
950
939
  "run_uv_job",
951
- "save_pretrained_keras",
952
940
  "save_torch_model",
953
941
  "save_torch_state_dict",
954
942
  "scale_to_zero_inference_endpoint",
955
943
  "scan_cache_dir",
944
+ "set_async_client_factory",
945
+ "set_client_factory",
956
946
  "set_space_sleep_time",
957
947
  "snapshot_download",
958
948
  "space_info",
959
949
  "split_state_dict_into_shards_factory",
960
- "split_tf_state_dict_into_shards",
961
950
  "split_torch_state_dict_into_shards",
962
951
  "super_squash_history",
963
952
  "suspend_scheduled_job",
@@ -967,7 +956,6 @@ __all__ = [
967
956
  "update_collection_metadata",
968
957
  "update_inference_endpoint",
969
958
  "update_repo_settings",
970
- "update_repo_visibility",
971
959
  "update_webhook",
972
960
  "upload_file",
973
961
  "upload_folder",
@@ -1237,7 +1225,6 @@ if TYPE_CHECKING: # pragma: no cover
1237
1225
  get_safetensors_metadata, # noqa: F401
1238
1226
  get_space_runtime, # noqa: F401
1239
1227
  get_space_variables, # noqa: F401
1240
- get_token_permission, # noqa: F401
1241
1228
  get_user_overview, # noqa: F401
1242
1229
  get_webhook, # noqa: F401
1243
1230
  grant_access, # noqa: F401
@@ -1298,7 +1285,6 @@ if TYPE_CHECKING: # pragma: no cover
1298
1285
  update_collection_metadata, # noqa: F401
1299
1286
  update_inference_endpoint, # noqa: F401
1300
1287
  update_repo_settings, # noqa: F401
1301
- update_repo_visibility, # noqa: F401
1302
1288
  update_webhook, # noqa: F401
1303
1289
  upload_file, # noqa: F401
1304
1290
  upload_folder, # noqa: F401
@@ -1485,13 +1471,6 @@ if TYPE_CHECKING: # pragma: no cover
1485
1471
  )
1486
1472
  from .inference._mcp.agent import Agent # noqa: F401
1487
1473
  from .inference._mcp.mcp_client import MCPClient # noqa: F401
1488
- from .inference_api import InferenceApi # noqa: F401
1489
- from .keras_mixin import (
1490
- KerasModelHubMixin, # noqa: F401
1491
- from_pretrained_keras, # noqa: F401
1492
- push_to_hub_keras, # noqa: F401
1493
- save_pretrained_keras, # noqa: F401
1494
- )
1495
1474
  from .repocard import (
1496
1475
  DatasetCard, # noqa: F401
1497
1476
  ModelCard, # noqa: F401
@@ -1509,10 +1488,8 @@ if TYPE_CHECKING: # pragma: no cover
1509
1488
  ModelCardData, # noqa: F401
1510
1489
  SpaceCardData, # noqa: F401
1511
1490
  )
1512
- from .repository import Repository # noqa: F401
1513
1491
  from .serialization import (
1514
1492
  StateDictSplit, # noqa: F401
1515
- get_tf_storage_size, # noqa: F401
1516
1493
  get_torch_storage_id, # noqa: F401
1517
1494
  get_torch_storage_size, # noqa: F401
1518
1495
  load_state_dict_from_file, # noqa: F401
@@ -1520,7 +1497,6 @@ if TYPE_CHECKING: # pragma: no cover
1520
1497
  save_torch_model, # noqa: F401
1521
1498
  save_torch_state_dict, # noqa: F401
1522
1499
  split_state_dict_into_shards_factory, # noqa: F401
1523
- split_tf_state_dict_into_shards, # noqa: F401
1524
1500
  split_torch_state_dict_into_shards, # noqa: F401
1525
1501
  )
1526
1502
  from .serialization._dduf import (
@@ -1530,6 +1506,8 @@ if TYPE_CHECKING: # pragma: no cover
1530
1506
  read_dduf_file, # noqa: F401
1531
1507
  )
1532
1508
  from .utils import (
1509
+ ASYNC_CLIENT_FACTORY_T, # noqa: F401
1510
+ CLIENT_FACTORY_T, # noqa: F401
1533
1511
  CachedFileInfo, # noqa: F401
1534
1512
  CachedRepoInfo, # noqa: F401
1535
1513
  CachedRevisionInfo, # noqa: F401
@@ -1537,12 +1515,17 @@ if TYPE_CHECKING: # pragma: no cover
1537
1515
  CorruptedCacheException, # noqa: F401
1538
1516
  DeleteCacheStrategy, # noqa: F401
1539
1517
  HFCacheInfo, # noqa: F401
1540
- HfFolder, # noqa: F401
1518
+ HfHubAsyncTransport, # noqa: F401
1519
+ HfHubTransport, # noqa: F401
1541
1520
  cached_assets_path, # noqa: F401
1542
- configure_http_backend, # noqa: F401
1521
+ close_session, # noqa: F401
1543
1522
  dump_environment_info, # noqa: F401
1523
+ get_async_session, # noqa: F401
1544
1524
  get_session, # noqa: F401
1545
1525
  get_token, # noqa: F401
1526
+ hf_raise_for_status, # noqa: F401
1546
1527
  logging, # noqa: F401
1547
1528
  scan_cache_dir, # noqa: F401
1529
+ set_async_client_factory, # noqa: F401
1530
+ set_client_factory, # noqa: F401
1548
1531
  )
@@ -11,7 +11,7 @@ from contextlib import contextmanager
11
11
  from dataclasses import dataclass, field
12
12
  from itertools import groupby
13
13
  from pathlib import Path, PurePosixPath
14
- from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union
14
+ from typing import TYPE_CHECKING, Any, BinaryIO, Iterable, Iterator, Literal, Optional, Union
15
15
 
16
16
  from tqdm.contrib.concurrent import thread_map
17
17
 
@@ -235,7 +235,7 @@ class CommitOperationAdd:
235
235
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
236
236
 
237
237
  >>> with operation.as_file(with_tqdm=True) as file:
238
- ... requests.put(..., data=file)
238
+ ... httpx.put(..., data=file)
239
239
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
240
240
  ```
241
241
  """
@@ -306,7 +306,7 @@ def _validate_path_in_repo(path_in_repo: str) -> str:
306
306
  CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete]
307
307
 
308
308
 
309
- def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
309
+ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
310
310
  """
311
311
  Warn user when a list of operations is expected to overwrite itself in a single
312
312
  commit.
@@ -321,7 +321,7 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
321
321
  delete before upload) but can happen if a user deletes an entire folder and then
322
322
  add new files to it.
323
323
  """
324
- nb_additions_per_path: Dict[str, int] = defaultdict(int)
324
+ nb_additions_per_path: dict[str, int] = defaultdict(int)
325
325
  for operation in operations:
326
326
  path_in_repo = operation.path_in_repo
327
327
  if isinstance(operation, CommitOperationAdd):
@@ -355,10 +355,10 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
355
355
  @validate_hf_hub_args
356
356
  def _upload_lfs_files(
357
357
  *,
358
- additions: List[CommitOperationAdd],
358
+ additions: list[CommitOperationAdd],
359
359
  repo_type: str,
360
360
  repo_id: str,
361
- headers: Dict[str, str],
361
+ headers: dict[str, str],
362
362
  endpoint: Optional[str] = None,
363
363
  num_threads: int = 5,
364
364
  revision: Optional[str] = None,
@@ -377,7 +377,7 @@ def _upload_lfs_files(
377
377
  repo_id (`str`):
378
378
  A namespace (user or an organization) and a repo name separated
379
379
  by a `/`.
380
- headers (`Dict[str, str]`):
380
+ headers (`dict[str, str]`):
381
381
  Headers to use for the request, including authorization headers and user agent.
382
382
  num_threads (`int`, *optional*):
383
383
  The number of concurrent threads to use when uploading. Defaults to 5.
@@ -389,13 +389,13 @@ def _upload_lfs_files(
389
389
  If an upload failed for any reason
390
390
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
391
391
  If the server returns malformed responses
392
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
392
+ [`HfHubHTTPError`]
393
393
  If the LFS batch endpoint returned an HTTP error.
394
394
  """
395
395
  # Step 1: retrieve upload instructions from the LFS batch endpoint.
396
396
  # Upload instructions are retrieved by chunk of 256 files to avoid reaching
397
397
  # the payload limit.
398
- batch_actions: List[Dict] = []
398
+ batch_actions: list[dict] = []
399
399
  for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400
400
  batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401
401
  upload_infos=[op.upload_info for op in chunk],
@@ -466,10 +466,10 @@ def _upload_lfs_files(
466
466
  @validate_hf_hub_args
467
467
  def _upload_xet_files(
468
468
  *,
469
- additions: List[CommitOperationAdd],
469
+ additions: list[CommitOperationAdd],
470
470
  repo_type: str,
471
471
  repo_id: str,
472
- headers: Dict[str, str],
472
+ headers: dict[str, str],
473
473
  endpoint: Optional[str] = None,
474
474
  revision: Optional[str] = None,
475
475
  create_pr: Optional[bool] = None,
@@ -486,7 +486,7 @@ def _upload_xet_files(
486
486
  repo_id (`str`):
487
487
  A namespace (user or an organization) and a repo name separated
488
488
  by a `/`.
489
- headers (`Dict[str, str]`):
489
+ headers (`dict[str, str]`):
490
490
  Headers to use for the request, including authorization headers and user agent.
491
491
  endpoint: (`str`, *optional*):
492
492
  The endpoint to use for the xetcas service. Defaults to `constants.ENDPOINT`.
@@ -500,7 +500,7 @@ def _upload_xet_files(
500
500
  If an upload failed for any reason.
501
501
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
502
502
  If the server returns malformed responses or if the user is unauthorized to upload to xet storage.
503
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
503
+ [`HfHubHTTPError`]
504
504
  If the LFS batch endpoint returned an HTTP error.
505
505
 
506
506
  **How it works:**
@@ -555,7 +555,7 @@ def _upload_xet_files(
555
555
  xet_endpoint = xet_connection_info.endpoint
556
556
  access_token_info = (xet_connection_info.access_token, xet_connection_info.expiration_unix_epoch)
557
557
 
558
- def token_refresher() -> Tuple[str, int]:
558
+ def token_refresher() -> tuple[str, int]:
559
559
  new_xet_connection = fetch_xet_connection_info_from_repo_info(
560
560
  token_type=XetTokenType.WRITE,
561
561
  repo_id=repo_id,
@@ -628,7 +628,7 @@ def _fetch_upload_modes(
628
628
  additions: Iterable[CommitOperationAdd],
629
629
  repo_type: str,
630
630
  repo_id: str,
631
- headers: Dict[str, str],
631
+ headers: dict[str, str],
632
632
  revision: str,
633
633
  endpoint: Optional[str] = None,
634
634
  create_pr: bool = False,
@@ -647,7 +647,7 @@ def _fetch_upload_modes(
647
647
  repo_id (`str`):
648
648
  A namespace (user or an organization) and a repo name separated
649
649
  by a `/`.
650
- headers (`Dict[str, str]`):
650
+ headers (`dict[str, str]`):
651
651
  Headers to use for the request, including authorization headers and user agent.
652
652
  revision (`str`):
653
653
  The git revision to upload the files to. Can be any valid git revision.
@@ -665,12 +665,12 @@ def _fetch_upload_modes(
665
665
  endpoint = endpoint if endpoint is not None else constants.ENDPOINT
666
666
 
667
667
  # Fetch upload mode (LFS or regular) chunk by chunk.
668
- upload_modes: Dict[str, UploadMode] = {}
669
- should_ignore_info: Dict[str, bool] = {}
670
- oid_info: Dict[str, Optional[str]] = {}
668
+ upload_modes: dict[str, UploadMode] = {}
669
+ should_ignore_info: dict[str, bool] = {}
670
+ oid_info: dict[str, Optional[str]] = {}
671
671
 
672
672
  for chunk in chunk_iterable(additions, 256):
673
- payload: Dict = {
673
+ payload: dict = {
674
674
  "files": [
675
675
  {
676
676
  "path": op.path_in_repo,
@@ -713,10 +713,10 @@ def _fetch_files_to_copy(
713
713
  copies: Iterable[CommitOperationCopy],
714
714
  repo_type: str,
715
715
  repo_id: str,
716
- headers: Dict[str, str],
716
+ headers: dict[str, str],
717
717
  revision: str,
718
718
  endpoint: Optional[str] = None,
719
- ) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]:
719
+ ) -> dict[tuple[str, Optional[str]], Union["RepoFile", bytes]]:
720
720
  """
721
721
  Fetch information about the files to copy.
722
722
 
@@ -732,12 +732,12 @@ def _fetch_files_to_copy(
732
732
  repo_id (`str`):
733
733
  A namespace (user or an organization) and a repo name separated
734
734
  by a `/`.
735
- headers (`Dict[str, str]`):
735
+ headers (`dict[str, str]`):
736
736
  Headers to use for the request, including authorization headers and user agent.
737
737
  revision (`str`):
738
738
  The git revision to upload the files to. Can be any valid git revision.
739
739
 
740
- Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
740
+ Returns: `dict[tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
741
741
  Key is the file path and revision of the file to copy.
742
742
  Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files).
743
743
 
@@ -750,9 +750,9 @@ def _fetch_files_to_copy(
750
750
  from .hf_api import HfApi, RepoFolder
751
751
 
752
752
  hf_api = HfApi(endpoint=endpoint, headers=headers)
753
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
753
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
754
754
  # Store (path, revision) -> oid mapping
755
- oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {}
755
+ oid_info: dict[tuple[str, Optional[str]], Optional[str]] = {}
756
756
  # 1. Fetch OIDs for destination paths in batches.
757
757
  dest_paths = [op.path_in_repo for op in copies]
758
758
  for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE):
@@ -812,11 +812,11 @@ def _fetch_files_to_copy(
812
812
 
813
813
  def _prepare_commit_payload(
814
814
  operations: Iterable[CommitOperation],
815
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]],
815
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]],
816
816
  commit_message: str,
817
817
  commit_description: Optional[str] = None,
818
818
  parent_commit: Optional[str] = None,
819
- ) -> Iterable[Dict[str, Any]]:
819
+ ) -> Iterable[dict[str, Any]]:
820
820
  """
821
821
  Builds the payload to POST to the `/commit` API of the Hub.
822
822
 
@@ -7,7 +7,7 @@ from dataclasses import dataclass
7
7
  from io import SEEK_END, SEEK_SET, BytesIO
8
8
  from pathlib import Path
9
9
  from threading import Lock, Thread
10
- from typing import Dict, List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi
13
13
  from .utils import filter_repo_objects
@@ -53,9 +53,9 @@ class CommitScheduler:
53
53
  Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
54
54
  token (`str`, *optional*):
55
55
  The token to use to commit to the repo. Defaults to the token saved on the machine.
56
- allow_patterns (`List[str]` or `str`, *optional*):
56
+ allow_patterns (`list[str]` or `str`, *optional*):
57
57
  If provided, only files matching at least one pattern are uploaded.
58
- ignore_patterns (`List[str]` or `str`, *optional*):
58
+ ignore_patterns (`list[str]` or `str`, *optional*):
59
59
  If provided, files matching any of the patterns are not uploaded.
60
60
  squash_history (`bool`, *optional*):
61
61
  Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
@@ -108,8 +108,8 @@ class CommitScheduler:
108
108
  revision: Optional[str] = None,
109
109
  private: Optional[bool] = None,
110
110
  token: Optional[str] = None,
111
- allow_patterns: Optional[Union[List[str], str]] = None,
112
- ignore_patterns: Optional[Union[List[str], str]] = None,
111
+ allow_patterns: Optional[Union[list[str], str]] = None,
112
+ ignore_patterns: Optional[Union[list[str], str]] = None,
113
113
  squash_history: bool = False,
114
114
  hf_api: Optional["HfApi"] = None,
115
115
  ) -> None:
@@ -138,7 +138,7 @@ class CommitScheduler:
138
138
  self.token = token
139
139
 
140
140
  # Keep track of already uploaded files
141
- self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp
141
+ self.last_uploaded: dict[Path, float] = {} # key is local path, value is timestamp
142
142
 
143
143
  # Scheduler
144
144
  if not every > 0:
@@ -232,7 +232,7 @@ class CommitScheduler:
232
232
  prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else ""
233
233
 
234
234
  # Filter with pattern + filter out unchanged files + retrieve current file size
235
- files_to_upload: List[_FileToUpload] = []
235
+ files_to_upload: list[_FileToUpload] = []
236
236
  for relpath in filter_repo_objects(
237
237
  relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns
238
238
  ):
@@ -315,10 +315,13 @@ class PartialFileIO(BytesIO):
315
315
  return self._size_limit
316
316
 
317
317
  def __getattribute__(self, name: str):
318
- if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported
318
+ if name.startswith("_") or name in ("read", "tell", "seek", "fileno"): # only 4 public methods supported
319
319
  return super().__getattribute__(name)
320
320
  raise NotImplementedError(f"PartialFileIO does not support '{name}'.")
321
321
 
322
+ def fileno(self):
323
+ raise AttributeError("PartialFileIO does not have a fileno.")
324
+
322
325
  def tell(self) -> int:
323
326
  """Return the current file position."""
324
327
  return self._file.tell()
@@ -2,7 +2,7 @@ import time
2
2
  from dataclasses import dataclass, field
3
3
  from datetime import datetime
4
4
  from enum import Enum
5
- from typing import TYPE_CHECKING, Dict, Optional, Union
5
+ from typing import TYPE_CHECKING, Optional, Union
6
6
 
7
7
  from huggingface_hub.errors import InferenceEndpointError, InferenceEndpointTimeoutError
8
8
 
@@ -62,7 +62,7 @@ class InferenceEndpoint:
62
62
  The timestamp of the last update of the Inference Endpoint.
63
63
  type ([`InferenceEndpointType`]):
64
64
  The type of the Inference Endpoint (public, protected, private).
65
- raw (`Dict`):
65
+ raw (`dict`):
66
66
  The raw dictionary data returned from the API.
67
67
  token (`str` or `bool`, *optional*):
68
68
  Authentication token for the Inference Endpoint, if set when requesting the API. Will default to the
@@ -112,7 +112,7 @@ class InferenceEndpoint:
112
112
  type: InferenceEndpointType = field(repr=False, init=False)
113
113
 
114
114
  # Raw dict from the API
115
- raw: Dict = field(repr=False)
115
+ raw: dict = field(repr=False)
116
116
 
117
117
  # Internal fields
118
118
  _token: Union[str, bool, None] = field(repr=False, compare=False)
@@ -120,7 +120,7 @@ class InferenceEndpoint:
120
120
 
121
121
  @classmethod
122
122
  def from_raw(
123
- cls, raw: Dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None
123
+ cls, raw: dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None
124
124
  ) -> "InferenceEndpoint":
125
125
  """Initialize object from raw dictionary."""
126
126
  if api is None:
@@ -260,8 +260,8 @@ class InferenceEndpoint:
260
260
  framework: Optional[str] = None,
261
261
  revision: Optional[str] = None,
262
262
  task: Optional[str] = None,
263
- custom_image: Optional[Dict] = None,
264
- secrets: Optional[Dict[str, str]] = None,
263
+ custom_image: Optional[dict] = None,
264
+ secrets: Optional[dict[str, str]] = None,
265
265
  ) -> "InferenceEndpoint":
266
266
  """Update the Inference Endpoint.
267
267
 
@@ -293,10 +293,10 @@ class InferenceEndpoint:
293
293
  The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
294
294
  task (`str`, *optional*):
295
295
  The task on which to deploy the model (e.g. `"text-classification"`).
296
- custom_image (`Dict`, *optional*):
296
+ custom_image (`dict`, *optional*):
297
297
  A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
298
298
  Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
299
- secrets (`Dict[str, str]`, *optional*):
299
+ secrets (`dict[str, str]`, *optional*):
300
300
  Secret values to inject in the container environment.
301
301
  Returns:
302
302
  [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.