huggingface-hub 0.36.0rc0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (132) hide show
  1. huggingface_hub/__init__.py +33 -45
  2. huggingface_hub/_commit_api.py +39 -43
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +20 -20
  6. huggingface_hub/_login.py +17 -43
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +135 -50
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +5 -5
  11. huggingface_hub/_upload_large_folder.py +18 -32
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/__init__.py +0 -14
  15. huggingface_hub/cli/_cli_utils.py +143 -39
  16. huggingface_hub/cli/auth.py +105 -171
  17. huggingface_hub/cli/cache.py +594 -361
  18. huggingface_hub/cli/download.py +120 -112
  19. huggingface_hub/cli/hf.py +38 -41
  20. huggingface_hub/cli/jobs.py +689 -1017
  21. huggingface_hub/cli/lfs.py +120 -143
  22. huggingface_hub/cli/repo.py +282 -216
  23. huggingface_hub/cli/repo_files.py +50 -84
  24. huggingface_hub/cli/system.py +6 -25
  25. huggingface_hub/cli/upload.py +198 -220
  26. huggingface_hub/cli/upload_large_folder.py +91 -106
  27. huggingface_hub/community.py +5 -5
  28. huggingface_hub/constants.py +17 -52
  29. huggingface_hub/dataclasses.py +135 -21
  30. huggingface_hub/errors.py +47 -30
  31. huggingface_hub/fastai_utils.py +8 -9
  32. huggingface_hub/file_download.py +351 -303
  33. huggingface_hub/hf_api.py +398 -570
  34. huggingface_hub/hf_file_system.py +101 -66
  35. huggingface_hub/hub_mixin.py +32 -54
  36. huggingface_hub/inference/_client.py +177 -162
  37. huggingface_hub/inference/_common.py +38 -54
  38. huggingface_hub/inference/_generated/_async_client.py +218 -258
  39. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  40. huggingface_hub/inference/_generated/types/base.py +10 -7
  41. huggingface_hub/inference/_generated/types/chat_completion.py +16 -16
  42. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  43. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  44. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  45. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  46. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  47. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  48. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  49. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  50. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  51. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  52. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  53. huggingface_hub/inference/_generated/types/translation.py +2 -2
  54. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  55. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  56. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  57. huggingface_hub/inference/_mcp/agent.py +3 -3
  58. huggingface_hub/inference/_mcp/constants.py +1 -2
  59. huggingface_hub/inference/_mcp/mcp_client.py +33 -22
  60. huggingface_hub/inference/_mcp/types.py +10 -10
  61. huggingface_hub/inference/_mcp/utils.py +4 -4
  62. huggingface_hub/inference/_providers/__init__.py +12 -4
  63. huggingface_hub/inference/_providers/_common.py +62 -24
  64. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  65. huggingface_hub/inference/_providers/cohere.py +3 -3
  66. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  67. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  68. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  69. huggingface_hub/inference/_providers/hf_inference.py +13 -13
  70. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  71. huggingface_hub/inference/_providers/nebius.py +10 -10
  72. huggingface_hub/inference/_providers/novita.py +5 -5
  73. huggingface_hub/inference/_providers/nscale.py +4 -4
  74. huggingface_hub/inference/_providers/replicate.py +15 -15
  75. huggingface_hub/inference/_providers/sambanova.py +6 -6
  76. huggingface_hub/inference/_providers/together.py +7 -7
  77. huggingface_hub/lfs.py +21 -94
  78. huggingface_hub/repocard.py +15 -16
  79. huggingface_hub/repocard_data.py +57 -57
  80. huggingface_hub/serialization/__init__.py +0 -1
  81. huggingface_hub/serialization/_base.py +9 -9
  82. huggingface_hub/serialization/_dduf.py +7 -7
  83. huggingface_hub/serialization/_torch.py +28 -28
  84. huggingface_hub/utils/__init__.py +11 -6
  85. huggingface_hub/utils/_auth.py +5 -5
  86. huggingface_hub/utils/_cache_manager.py +49 -74
  87. huggingface_hub/utils/_deprecation.py +1 -1
  88. huggingface_hub/utils/_dotenv.py +3 -3
  89. huggingface_hub/utils/_fixes.py +0 -10
  90. huggingface_hub/utils/_git_credential.py +3 -3
  91. huggingface_hub/utils/_headers.py +7 -29
  92. huggingface_hub/utils/_http.py +371 -208
  93. huggingface_hub/utils/_pagination.py +4 -4
  94. huggingface_hub/utils/_parsing.py +98 -0
  95. huggingface_hub/utils/_paths.py +5 -5
  96. huggingface_hub/utils/_runtime.py +59 -23
  97. huggingface_hub/utils/_safetensors.py +21 -21
  98. huggingface_hub/utils/_subprocess.py +9 -9
  99. huggingface_hub/utils/_telemetry.py +3 -3
  100. huggingface_hub/{commands/_cli_utils.py → utils/_terminal.py} +4 -9
  101. huggingface_hub/utils/_typing.py +3 -3
  102. huggingface_hub/utils/_validators.py +53 -72
  103. huggingface_hub/utils/_xet.py +16 -16
  104. huggingface_hub/utils/_xet_progress_reporting.py +1 -1
  105. huggingface_hub/utils/insecure_hashlib.py +3 -9
  106. huggingface_hub/utils/tqdm.py +3 -3
  107. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/METADATA +16 -35
  108. huggingface_hub-1.0.0.dist-info/RECORD +152 -0
  109. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/entry_points.txt +0 -1
  110. huggingface_hub/commands/__init__.py +0 -27
  111. huggingface_hub/commands/delete_cache.py +0 -476
  112. huggingface_hub/commands/download.py +0 -204
  113. huggingface_hub/commands/env.py +0 -39
  114. huggingface_hub/commands/huggingface_cli.py +0 -65
  115. huggingface_hub/commands/lfs.py +0 -200
  116. huggingface_hub/commands/repo.py +0 -151
  117. huggingface_hub/commands/repo_files.py +0 -132
  118. huggingface_hub/commands/scan_cache.py +0 -183
  119. huggingface_hub/commands/tag.py +0 -161
  120. huggingface_hub/commands/upload.py +0 -318
  121. huggingface_hub/commands/upload_large_folder.py +0 -131
  122. huggingface_hub/commands/user.py +0 -208
  123. huggingface_hub/commands/version.py +0 -40
  124. huggingface_hub/inference_api.py +0 -217
  125. huggingface_hub/keras_mixin.py +0 -497
  126. huggingface_hub/repository.py +0 -1471
  127. huggingface_hub/serialization/_tensorflow.py +0 -92
  128. huggingface_hub/utils/_hf_folder.py +0 -68
  129. huggingface_hub-0.36.0rc0.dist-info/RECORD +0 -170
  130. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/LICENSE +0 -0
  131. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/WHEEL +0 -0
  132. {huggingface_hub-0.36.0rc0.dist-info → huggingface_hub-1.0.0.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.36.0.rc0"
49
+ __version__ = "1.0.0"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -111,6 +111,9 @@ _SUBMOD_ATTRS = {
111
111
  "WebhooksServer",
112
112
  "webhook_endpoint",
113
113
  ],
114
+ "cli._cli_utils": [
115
+ "typer_factory",
116
+ ],
114
117
  "community": [
115
118
  "Discussion",
116
119
  "DiscussionComment",
@@ -138,6 +141,7 @@ _SUBMOD_ATTRS = {
138
141
  "push_to_hub_fastai",
139
142
  ],
140
143
  "file_download": [
144
+ "DryRunFileInfo",
141
145
  "HfFileMetadata",
142
146
  "_CACHED_NO_EXIST",
143
147
  "get_hf_file_metadata",
@@ -219,7 +223,6 @@ _SUBMOD_ATTRS = {
219
223
  "get_safetensors_metadata",
220
224
  "get_space_runtime",
221
225
  "get_space_variables",
222
- "get_token_permission",
223
226
  "get_user_overview",
224
227
  "get_webhook",
225
228
  "grant_access",
@@ -234,6 +237,7 @@ _SUBMOD_ATTRS = {
234
237
  "list_lfs_files",
235
238
  "list_liked_repos",
236
239
  "list_models",
240
+ "list_organization_followers",
237
241
  "list_organization_members",
238
242
  "list_papers",
239
243
  "list_pending_access_requests",
@@ -280,7 +284,6 @@ _SUBMOD_ATTRS = {
280
284
  "update_collection_metadata",
281
285
  "update_inference_endpoint",
282
286
  "update_repo_settings",
283
- "update_repo_visibility",
284
287
  "update_webhook",
285
288
  "upload_file",
286
289
  "upload_folder",
@@ -473,15 +476,6 @@ _SUBMOD_ATTRS = {
473
476
  "inference._mcp.mcp_client": [
474
477
  "MCPClient",
475
478
  ],
476
- "inference_api": [
477
- "InferenceApi",
478
- ],
479
- "keras_mixin": [
480
- "KerasModelHubMixin",
481
- "from_pretrained_keras",
482
- "push_to_hub_keras",
483
- "save_pretrained_keras",
484
- ],
485
479
  "repocard": [
486
480
  "DatasetCard",
487
481
  "ModelCard",
@@ -499,12 +493,8 @@ _SUBMOD_ATTRS = {
499
493
  "ModelCardData",
500
494
  "SpaceCardData",
501
495
  ],
502
- "repository": [
503
- "Repository",
504
- ],
505
496
  "serialization": [
506
497
  "StateDictSplit",
507
- "get_tf_storage_size",
508
498
  "get_torch_storage_id",
509
499
  "get_torch_storage_size",
510
500
  "load_state_dict_from_file",
@@ -512,7 +502,6 @@ _SUBMOD_ATTRS = {
512
502
  "save_torch_model",
513
503
  "save_torch_state_dict",
514
504
  "split_state_dict_into_shards_factory",
515
- "split_tf_state_dict_into_shards",
516
505
  "split_torch_state_dict_into_shards",
517
506
  ],
518
507
  "serialization._dduf": [
@@ -522,6 +511,8 @@ _SUBMOD_ATTRS = {
522
511
  "read_dduf_file",
523
512
  ],
524
513
  "utils": [
514
+ "ASYNC_CLIENT_FACTORY_T",
515
+ "CLIENT_FACTORY_T",
525
516
  "CacheNotFound",
526
517
  "CachedFileInfo",
527
518
  "CachedRepoInfo",
@@ -529,14 +520,17 @@ _SUBMOD_ATTRS = {
529
520
  "CorruptedCacheException",
530
521
  "DeleteCacheStrategy",
531
522
  "HFCacheInfo",
532
- "HfFolder",
533
523
  "cached_assets_path",
534
- "configure_http_backend",
524
+ "close_session",
535
525
  "dump_environment_info",
526
+ "get_async_session",
536
527
  "get_session",
537
528
  "get_token",
529
+ "hf_raise_for_status",
538
530
  "logging",
539
531
  "scan_cache_dir",
532
+ "set_async_client_factory",
533
+ "set_client_factory",
540
534
  ],
541
535
  }
542
536
 
@@ -552,6 +546,7 @@ _SUBMOD_ATTRS = {
552
546
  # ```
553
547
 
554
548
  __all__ = [
549
+ "ASYNC_CLIENT_FACTORY_T",
555
550
  "Agent",
556
551
  "AsyncInferenceClient",
557
552
  "AudioClassificationInput",
@@ -566,6 +561,7 @@ __all__ = [
566
561
  "AutomaticSpeechRecognitionOutput",
567
562
  "AutomaticSpeechRecognitionOutputChunk",
568
563
  "AutomaticSpeechRecognitionParameters",
564
+ "CLIENT_FACTORY_T",
569
565
  "CONFIG_NAME",
570
566
  "CacheNotFound",
571
567
  "CachedFileInfo",
@@ -634,6 +630,7 @@ __all__ = [
634
630
  "DocumentQuestionAnsweringInputData",
635
631
  "DocumentQuestionAnsweringOutputElement",
636
632
  "DocumentQuestionAnsweringParameters",
633
+ "DryRunFileInfo",
637
634
  "EvalResult",
638
635
  "FLAX_WEIGHTS_NAME",
639
636
  "FeatureExtractionInput",
@@ -654,7 +651,6 @@ __all__ = [
654
651
  "HfFileSystemFile",
655
652
  "HfFileSystemResolvedPath",
656
653
  "HfFileSystemStreamFile",
657
- "HfFolder",
658
654
  "ImageClassificationInput",
659
655
  "ImageClassificationOutputElement",
660
656
  "ImageClassificationOutputTransform",
@@ -676,7 +672,6 @@ __all__ = [
676
672
  "ImageToVideoOutput",
677
673
  "ImageToVideoParameters",
678
674
  "ImageToVideoTargetSize",
679
- "InferenceApi",
680
675
  "InferenceClient",
681
676
  "InferenceEndpoint",
682
677
  "InferenceEndpointError",
@@ -688,7 +683,6 @@ __all__ = [
688
683
  "JobOwner",
689
684
  "JobStage",
690
685
  "JobStatus",
691
- "KerasModelHubMixin",
692
686
  "MCPClient",
693
687
  "ModelCard",
694
688
  "ModelCardData",
@@ -714,7 +708,6 @@ __all__ = [
714
708
  "REPO_TYPE_SPACE",
715
709
  "RepoCard",
716
710
  "RepoUrl",
717
- "Repository",
718
711
  "SentenceSimilarityInput",
719
712
  "SentenceSimilarityInputData",
720
713
  "SpaceCard",
@@ -827,8 +820,8 @@ __all__ = [
827
820
  "cancel_access_request",
828
821
  "cancel_job",
829
822
  "change_discussion_status",
823
+ "close_session",
830
824
  "comment_discussion",
831
- "configure_http_backend",
832
825
  "create_branch",
833
826
  "create_collection",
834
827
  "create_commit",
@@ -865,7 +858,7 @@ __all__ = [
865
858
  "fetch_job_logs",
866
859
  "file_exists",
867
860
  "from_pretrained_fastai",
868
- "from_pretrained_keras",
861
+ "get_async_session",
869
862
  "get_collection",
870
863
  "get_dataset_tags",
871
864
  "get_discussion_details",
@@ -880,9 +873,7 @@ __all__ = [
880
873
  "get_session",
881
874
  "get_space_runtime",
882
875
  "get_space_variables",
883
- "get_tf_storage_size",
884
876
  "get_token",
885
- "get_token_permission",
886
877
  "get_torch_storage_id",
887
878
  "get_torch_storage_size",
888
879
  "get_user_overview",
@@ -890,6 +881,7 @@ __all__ = [
890
881
  "grant_access",
891
882
  "hf_hub_download",
892
883
  "hf_hub_url",
884
+ "hf_raise_for_status",
893
885
  "inspect_job",
894
886
  "inspect_scheduled_job",
895
887
  "interpreter_login",
@@ -902,6 +894,7 @@ __all__ = [
902
894
  "list_lfs_files",
903
895
  "list_liked_repos",
904
896
  "list_models",
897
+ "list_organization_followers",
905
898
  "list_organization_members",
906
899
  "list_papers",
907
900
  "list_pending_access_requests",
@@ -936,7 +929,6 @@ __all__ = [
936
929
  "permanently_delete_lfs_files",
937
930
  "preupload_lfs_files",
938
931
  "push_to_hub_fastai",
939
- "push_to_hub_keras",
940
932
  "read_dduf_file",
941
933
  "reject_access_request",
942
934
  "rename_discussion",
@@ -952,26 +944,26 @@ __all__ = [
952
944
  "run_as_future",
953
945
  "run_job",
954
946
  "run_uv_job",
955
- "save_pretrained_keras",
956
947
  "save_torch_model",
957
948
  "save_torch_state_dict",
958
949
  "scale_to_zero_inference_endpoint",
959
950
  "scan_cache_dir",
951
+ "set_async_client_factory",
952
+ "set_client_factory",
960
953
  "set_space_sleep_time",
961
954
  "snapshot_download",
962
955
  "space_info",
963
956
  "split_state_dict_into_shards_factory",
964
- "split_tf_state_dict_into_shards",
965
957
  "split_torch_state_dict_into_shards",
966
958
  "super_squash_history",
967
959
  "suspend_scheduled_job",
968
960
  "try_to_load_from_cache",
961
+ "typer_factory",
969
962
  "unlike",
970
963
  "update_collection_item",
971
964
  "update_collection_metadata",
972
965
  "update_inference_endpoint",
973
966
  "update_repo_settings",
974
- "update_repo_visibility",
975
967
  "update_webhook",
976
968
  "upload_file",
977
969
  "upload_folder",
@@ -1135,6 +1127,7 @@ if TYPE_CHECKING: # pragma: no cover
1135
1127
  WebhooksServer, # noqa: F401
1136
1128
  webhook_endpoint, # noqa: F401
1137
1129
  )
1130
+ from .cli._cli_utils import typer_factory # noqa: F401
1138
1131
  from .community import (
1139
1132
  Discussion, # noqa: F401
1140
1133
  DiscussionComment, # noqa: F401
@@ -1163,6 +1156,7 @@ if TYPE_CHECKING: # pragma: no cover
1163
1156
  )
1164
1157
  from .file_download import (
1165
1158
  _CACHED_NO_EXIST, # noqa: F401
1159
+ DryRunFileInfo, # noqa: F401
1166
1160
  HfFileMetadata, # noqa: F401
1167
1161
  get_hf_file_metadata, # noqa: F401
1168
1162
  hf_hub_download, # noqa: F401
@@ -1243,7 +1237,6 @@ if TYPE_CHECKING: # pragma: no cover
1243
1237
  get_safetensors_metadata, # noqa: F401
1244
1238
  get_space_runtime, # noqa: F401
1245
1239
  get_space_variables, # noqa: F401
1246
- get_token_permission, # noqa: F401
1247
1240
  get_user_overview, # noqa: F401
1248
1241
  get_webhook, # noqa: F401
1249
1242
  grant_access, # noqa: F401
@@ -1258,6 +1251,7 @@ if TYPE_CHECKING: # pragma: no cover
1258
1251
  list_lfs_files, # noqa: F401
1259
1252
  list_liked_repos, # noqa: F401
1260
1253
  list_models, # noqa: F401
1254
+ list_organization_followers, # noqa: F401
1261
1255
  list_organization_members, # noqa: F401
1262
1256
  list_papers, # noqa: F401
1263
1257
  list_pending_access_requests, # noqa: F401
@@ -1304,7 +1298,6 @@ if TYPE_CHECKING: # pragma: no cover
1304
1298
  update_collection_metadata, # noqa: F401
1305
1299
  update_inference_endpoint, # noqa: F401
1306
1300
  update_repo_settings, # noqa: F401
1307
- update_repo_visibility, # noqa: F401
1308
1301
  update_webhook, # noqa: F401
1309
1302
  upload_file, # noqa: F401
1310
1303
  upload_folder, # noqa: F401
@@ -1491,13 +1484,6 @@ if TYPE_CHECKING: # pragma: no cover
1491
1484
  )
1492
1485
  from .inference._mcp.agent import Agent # noqa: F401
1493
1486
  from .inference._mcp.mcp_client import MCPClient # noqa: F401
1494
- from .inference_api import InferenceApi # noqa: F401
1495
- from .keras_mixin import (
1496
- KerasModelHubMixin, # noqa: F401
1497
- from_pretrained_keras, # noqa: F401
1498
- push_to_hub_keras, # noqa: F401
1499
- save_pretrained_keras, # noqa: F401
1500
- )
1501
1487
  from .repocard import (
1502
1488
  DatasetCard, # noqa: F401
1503
1489
  ModelCard, # noqa: F401
@@ -1515,10 +1501,8 @@ if TYPE_CHECKING: # pragma: no cover
1515
1501
  ModelCardData, # noqa: F401
1516
1502
  SpaceCardData, # noqa: F401
1517
1503
  )
1518
- from .repository import Repository # noqa: F401
1519
1504
  from .serialization import (
1520
1505
  StateDictSplit, # noqa: F401
1521
- get_tf_storage_size, # noqa: F401
1522
1506
  get_torch_storage_id, # noqa: F401
1523
1507
  get_torch_storage_size, # noqa: F401
1524
1508
  load_state_dict_from_file, # noqa: F401
@@ -1526,7 +1510,6 @@ if TYPE_CHECKING: # pragma: no cover
1526
1510
  save_torch_model, # noqa: F401
1527
1511
  save_torch_state_dict, # noqa: F401
1528
1512
  split_state_dict_into_shards_factory, # noqa: F401
1529
- split_tf_state_dict_into_shards, # noqa: F401
1530
1513
  split_torch_state_dict_into_shards, # noqa: F401
1531
1514
  )
1532
1515
  from .serialization._dduf import (
@@ -1536,6 +1519,8 @@ if TYPE_CHECKING: # pragma: no cover
1536
1519
  read_dduf_file, # noqa: F401
1537
1520
  )
1538
1521
  from .utils import (
1522
+ ASYNC_CLIENT_FACTORY_T, # noqa: F401
1523
+ CLIENT_FACTORY_T, # noqa: F401
1539
1524
  CachedFileInfo, # noqa: F401
1540
1525
  CachedRepoInfo, # noqa: F401
1541
1526
  CachedRevisionInfo, # noqa: F401
@@ -1543,12 +1528,15 @@ if TYPE_CHECKING: # pragma: no cover
1543
1528
  CorruptedCacheException, # noqa: F401
1544
1529
  DeleteCacheStrategy, # noqa: F401
1545
1530
  HFCacheInfo, # noqa: F401
1546
- HfFolder, # noqa: F401
1547
1531
  cached_assets_path, # noqa: F401
1548
- configure_http_backend, # noqa: F401
1532
+ close_session, # noqa: F401
1549
1533
  dump_environment_info, # noqa: F401
1534
+ get_async_session, # noqa: F401
1550
1535
  get_session, # noqa: F401
1551
1536
  get_token, # noqa: F401
1537
+ hf_raise_for_status, # noqa: F401
1552
1538
  logging, # noqa: F401
1553
1539
  scan_cache_dir, # noqa: F401
1540
+ set_async_client_factory, # noqa: F401
1541
+ set_client_factory, # noqa: F401
1554
1542
  )
@@ -11,7 +11,7 @@ from contextlib import contextmanager
11
11
  from dataclasses import dataclass, field
12
12
  from itertools import groupby
13
13
  from pathlib import Path, PurePosixPath
14
- from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union
14
+ from typing import TYPE_CHECKING, Any, BinaryIO, Iterable, Iterator, Literal, Optional, Union
15
15
 
16
16
  from tqdm.contrib.concurrent import thread_map
17
17
 
@@ -236,7 +236,7 @@ class CommitOperationAdd:
236
236
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
237
237
 
238
238
  >>> with operation.as_file(with_tqdm=True) as file:
239
- ... requests.put(..., data=file)
239
+ ... httpx.put(..., data=file)
240
240
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
241
241
  ```
242
242
  """
@@ -307,7 +307,7 @@ def _validate_path_in_repo(path_in_repo: str) -> str:
307
307
  CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete]
308
308
 
309
309
 
310
- def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
310
+ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
311
311
  """
312
312
  Warn user when a list of operations is expected to overwrite itself in a single
313
313
  commit.
@@ -322,7 +322,7 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
322
322
  delete before upload) but can happen if a user deletes an entire folder and then
323
323
  add new files to it.
324
324
  """
325
- nb_additions_per_path: Dict[str, int] = defaultdict(int)
325
+ nb_additions_per_path: dict[str, int] = defaultdict(int)
326
326
  for operation in operations:
327
327
  path_in_repo = operation.path_in_repo
328
328
  if isinstance(operation, CommitOperationAdd):
@@ -356,10 +356,10 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
356
356
  @validate_hf_hub_args
357
357
  def _upload_files(
358
358
  *,
359
- additions: List[CommitOperationAdd],
359
+ additions: list[CommitOperationAdd],
360
360
  repo_type: str,
361
361
  repo_id: str,
362
- headers: Dict[str, str],
362
+ headers: dict[str, str],
363
363
  endpoint: Optional[str] = None,
364
364
  num_threads: int = 5,
365
365
  revision: Optional[str] = None,
@@ -368,14 +368,14 @@ def _upload_files(
368
368
  """
369
369
  Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
370
370
  """
371
- xet_additions: List[CommitOperationAdd] = []
372
- lfs_actions: List[Dict] = []
373
- lfs_oid2addop: Dict[str, CommitOperationAdd] = {}
371
+ xet_additions: list[CommitOperationAdd] = []
372
+ lfs_actions: list[dict[str, Any]] = []
373
+ lfs_oid2addop: dict[str, CommitOperationAdd] = {}
374
374
 
375
375
  for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
376
376
  chunk_list = [op for op in chunk]
377
377
 
378
- transfers: List[str] = ["basic", "multipart"]
378
+ transfers: list[str] = ["basic", "multipart"]
379
379
  has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
380
380
  if is_xet_available():
381
381
  if not has_buffered_io_data:
@@ -438,9 +438,9 @@ def _upload_files(
438
438
  @validate_hf_hub_args
439
439
  def _upload_lfs_files(
440
440
  *,
441
- actions: List[Dict],
442
- oid2addop: Dict[str, CommitOperationAdd],
443
- headers: Dict[str, str],
441
+ actions: list[dict[str, Any]],
442
+ oid2addop: dict[str, CommitOperationAdd],
443
+ headers: dict[str, str],
444
444
  endpoint: Optional[str] = None,
445
445
  num_threads: int = 5,
446
446
  ):
@@ -451,11 +451,11 @@ def _upload_lfs_files(
451
451
  - LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
452
452
 
453
453
  Args:
454
- actions (`List[Dict]`):
454
+ actions (`list[dict[str, Any]]`):
455
455
  LFS batch actions returned by the server.
456
- oid2addop (`Dict[str, CommitOperationAdd]`):
456
+ oid2addop (`dict[str, CommitOperationAdd]`):
457
457
  A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
458
- headers (`Dict[str, str]`):
458
+ headers (`dict[str, str]`):
459
459
  Headers to use for the request, including authorization headers and user agent.
460
460
  endpoint (`str`, *optional*):
461
461
  The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
@@ -470,7 +470,7 @@ def _upload_lfs_files(
470
470
  repo_id (`str`):
471
471
  A namespace (user or an organization) and a repo name separated
472
472
  by a `/`.
473
- headers (`Dict[str, str]`):
473
+ headers (`dict[str, str]`):
474
474
  Headers to use for the request, including authorization headers and user agent.
475
475
  num_threads (`int`, *optional*):
476
476
  The number of concurrent threads to use when uploading. Defaults to 5.
@@ -482,7 +482,7 @@ def _upload_lfs_files(
482
482
  If an upload failed for any reason
483
483
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
484
484
  If the server returns malformed responses
485
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
485
+ [`HfHubHTTPError`]
486
486
  If the LFS batch endpoint returned an HTTP error.
487
487
  """
488
488
  # Filter out files already present upstream
@@ -503,11 +503,7 @@ def _upload_lfs_files(
503
503
  except Exception as exc:
504
504
  raise RuntimeError(f"Error while uploading '{operation.path_in_repo}' to the Hub.") from exc
505
505
 
506
- if constants.HF_HUB_ENABLE_HF_TRANSFER:
507
- logger.debug(f"Uploading {len(filtered_actions)} LFS files to the Hub using `hf_transfer`.")
508
- for action in hf_tqdm(filtered_actions, name="huggingface_hub.lfs_upload"):
509
- _wrapped_lfs_upload(action)
510
- elif len(filtered_actions) == 1:
506
+ if len(filtered_actions) == 1:
511
507
  logger.debug("Uploading 1 LFS file to the Hub")
512
508
  _wrapped_lfs_upload(filtered_actions[0])
513
509
  else:
@@ -526,10 +522,10 @@ def _upload_lfs_files(
526
522
  @validate_hf_hub_args
527
523
  def _upload_xet_files(
528
524
  *,
529
- additions: List[CommitOperationAdd],
525
+ additions: list[CommitOperationAdd],
530
526
  repo_type: str,
531
527
  repo_id: str,
532
- headers: Dict[str, str],
528
+ headers: dict[str, str],
533
529
  endpoint: Optional[str] = None,
534
530
  revision: Optional[str] = None,
535
531
  create_pr: Optional[bool] = None,
@@ -539,14 +535,14 @@ def _upload_xet_files(
539
535
  This chunks the files and deduplicates the chunks before uploading them to xetcas storage.
540
536
 
541
537
  Args:
542
- additions (`List` of `CommitOperationAdd`):
538
+ additions (`` of `CommitOperationAdd`):
543
539
  The files to be uploaded.
544
540
  repo_type (`str`):
545
541
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
546
542
  repo_id (`str`):
547
543
  A namespace (user or an organization) and a repo name separated
548
544
  by a `/`.
549
- headers (`Dict[str, str]`):
545
+ headers (`dict[str, str]`):
550
546
  Headers to use for the request, including authorization headers and user agent.
551
547
  endpoint: (`str`, *optional*):
552
548
  The endpoint to use for the xetcas service. Defaults to `constants.ENDPOINT`.
@@ -560,7 +556,7 @@ def _upload_xet_files(
560
556
  If an upload failed for any reason.
561
557
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
562
558
  If the server returns malformed responses or if the user is unauthorized to upload to xet storage.
563
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
559
+ [`HfHubHTTPError`]
564
560
  If the LFS batch endpoint returned an HTTP error.
565
561
 
566
562
  **How it works:**
@@ -615,7 +611,7 @@ def _upload_xet_files(
615
611
  xet_endpoint = xet_connection_info.endpoint
616
612
  access_token_info = (xet_connection_info.access_token, xet_connection_info.expiration_unix_epoch)
617
613
 
618
- def token_refresher() -> Tuple[str, int]:
614
+ def token_refresher() -> tuple[str, int]:
619
615
  new_xet_connection = fetch_xet_connection_info_from_repo_info(
620
616
  token_type=XetTokenType.WRITE,
621
617
  repo_id=repo_id,
@@ -688,7 +684,7 @@ def _fetch_upload_modes(
688
684
  additions: Iterable[CommitOperationAdd],
689
685
  repo_type: str,
690
686
  repo_id: str,
691
- headers: Dict[str, str],
687
+ headers: dict[str, str],
692
688
  revision: str,
693
689
  endpoint: Optional[str] = None,
694
690
  create_pr: bool = False,
@@ -707,7 +703,7 @@ def _fetch_upload_modes(
707
703
  repo_id (`str`):
708
704
  A namespace (user or an organization) and a repo name separated
709
705
  by a `/`.
710
- headers (`Dict[str, str]`):
706
+ headers (`dict[str, str]`):
711
707
  Headers to use for the request, including authorization headers and user agent.
712
708
  revision (`str`):
713
709
  The git revision to upload the files to. Can be any valid git revision.
@@ -725,12 +721,12 @@ def _fetch_upload_modes(
725
721
  endpoint = endpoint if endpoint is not None else constants.ENDPOINT
726
722
 
727
723
  # Fetch upload mode (LFS or regular) chunk by chunk.
728
- upload_modes: Dict[str, UploadMode] = {}
729
- should_ignore_info: Dict[str, bool] = {}
730
- oid_info: Dict[str, Optional[str]] = {}
724
+ upload_modes: dict[str, UploadMode] = {}
725
+ should_ignore_info: dict[str, bool] = {}
726
+ oid_info: dict[str, Optional[str]] = {}
731
727
 
732
728
  for chunk in chunk_iterable(additions, 256):
733
- payload: Dict = {
729
+ payload: dict = {
734
730
  "files": [
735
731
  {
736
732
  "path": op.path_in_repo,
@@ -773,10 +769,10 @@ def _fetch_files_to_copy(
773
769
  copies: Iterable[CommitOperationCopy],
774
770
  repo_type: str,
775
771
  repo_id: str,
776
- headers: Dict[str, str],
772
+ headers: dict[str, str],
777
773
  revision: str,
778
774
  endpoint: Optional[str] = None,
779
- ) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]:
775
+ ) -> dict[tuple[str, Optional[str]], Union["RepoFile", bytes]]:
780
776
  """
781
777
  Fetch information about the files to copy.
782
778
 
@@ -792,12 +788,12 @@ def _fetch_files_to_copy(
792
788
  repo_id (`str`):
793
789
  A namespace (user or an organization) and a repo name separated
794
790
  by a `/`.
795
- headers (`Dict[str, str]`):
791
+ headers (`dict[str, str]`):
796
792
  Headers to use for the request, including authorization headers and user agent.
797
793
  revision (`str`):
798
794
  The git revision to upload the files to. Can be any valid git revision.
799
795
 
800
- Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
796
+ Returns: `dict[tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
801
797
  Key is the file path and revision of the file to copy.
802
798
  Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files).
803
799
 
@@ -810,9 +806,9 @@ def _fetch_files_to_copy(
810
806
  from .hf_api import HfApi, RepoFolder
811
807
 
812
808
  hf_api = HfApi(endpoint=endpoint, headers=headers)
813
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
809
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
814
810
  # Store (path, revision) -> oid mapping
815
- oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {}
811
+ oid_info: dict[tuple[str, Optional[str]], Optional[str]] = {}
816
812
  # 1. Fetch OIDs for destination paths in batches.
817
813
  dest_paths = [op.path_in_repo for op in copies]
818
814
  for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE):
@@ -872,11 +868,11 @@ def _fetch_files_to_copy(
872
868
 
873
869
  def _prepare_commit_payload(
874
870
  operations: Iterable[CommitOperation],
875
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]],
871
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]],
876
872
  commit_message: str,
877
873
  commit_description: Optional[str] = None,
878
874
  parent_commit: Optional[str] = None,
879
- ) -> Iterable[Dict[str, Any]]:
875
+ ) -> Iterable[dict[str, Any]]:
880
876
  """
881
877
  Builds the payload to POST to the `/commit` API of the Hub.
882
878
 
@@ -7,7 +7,7 @@ from dataclasses import dataclass
7
7
  from io import SEEK_END, SEEK_SET, BytesIO
8
8
  from pathlib import Path
9
9
  from threading import Lock, Thread
10
- from typing import Dict, List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi
13
13
  from .utils import filter_repo_objects
@@ -53,9 +53,9 @@ class CommitScheduler:
53
53
  Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
54
54
  token (`str`, *optional*):
55
55
  The token to use to commit to the repo. Defaults to the token saved on the machine.
56
- allow_patterns (`List[str]` or `str`, *optional*):
56
+ allow_patterns (`list[str]` or `str`, *optional*):
57
57
  If provided, only files matching at least one pattern are uploaded.
58
- ignore_patterns (`List[str]` or `str`, *optional*):
58
+ ignore_patterns (`list[str]` or `str`, *optional*):
59
59
  If provided, files matching any of the patterns are not uploaded.
60
60
  squash_history (`bool`, *optional*):
61
61
  Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
@@ -108,8 +108,8 @@ class CommitScheduler:
108
108
  revision: Optional[str] = None,
109
109
  private: Optional[bool] = None,
110
110
  token: Optional[str] = None,
111
- allow_patterns: Optional[Union[List[str], str]] = None,
112
- ignore_patterns: Optional[Union[List[str], str]] = None,
111
+ allow_patterns: Optional[Union[list[str], str]] = None,
112
+ ignore_patterns: Optional[Union[list[str], str]] = None,
113
113
  squash_history: bool = False,
114
114
  hf_api: Optional["HfApi"] = None,
115
115
  ) -> None:
@@ -138,7 +138,7 @@ class CommitScheduler:
138
138
  self.token = token
139
139
 
140
140
  # Keep track of already uploaded files
141
- self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp
141
+ self.last_uploaded: dict[Path, float] = {} # key is local path, value is timestamp
142
142
 
143
143
  # Scheduler
144
144
  if not every > 0:
@@ -229,7 +229,7 @@ class CommitScheduler:
229
229
  prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else ""
230
230
 
231
231
  # Filter with pattern + filter out unchanged files + retrieve current file size
232
- files_to_upload: List[_FileToUpload] = []
232
+ files_to_upload: list[_FileToUpload] = []
233
233
  for relpath in filter_repo_objects(
234
234
  relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns
235
235
  ):
@@ -312,10 +312,13 @@ class PartialFileIO(BytesIO):
312
312
  return self._size_limit
313
313
 
314
314
  def __getattribute__(self, name: str):
315
- if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported
315
+ if name.startswith("_") or name in ("read", "tell", "seek", "fileno"): # only 4 public methods supported
316
316
  return super().__getattribute__(name)
317
317
  raise NotImplementedError(f"PartialFileIO does not support '{name}'.")
318
318
 
319
+ def fileno(self):
320
+ raise AttributeError("PartialFileIO does not have a fileno.")
321
+
319
322
  def tell(self) -> int:
320
323
  """Return the current file position."""
321
324
  return self._file.tell()