huggingface-hub 1.0.0rc1__tar.gz → 1.0.0rc3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (171) hide show
  1. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/PKG-INFO +2 -2
  2. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/README.md +1 -1
  3. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/setup.py +0 -1
  4. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/__init__.py +4 -7
  5. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_commit_api.py +126 -66
  6. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_commit_scheduler.py +4 -7
  7. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_login.py +10 -16
  8. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_snapshot_download.py +119 -21
  9. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_tensorboard_logger.py +2 -5
  10. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_upload_large_folder.py +1 -2
  11. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_webhooks_server.py +8 -20
  12. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/_cli_utils.py +12 -6
  13. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/download.py +32 -7
  14. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/repo.py +137 -5
  15. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/dataclasses.py +122 -2
  16. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/errors.py +4 -0
  17. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/fastai_utils.py +22 -32
  18. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/file_download.py +234 -38
  19. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/hf_api.py +385 -424
  20. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/hf_file_system.py +55 -65
  21. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_client.py +27 -48
  22. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/_async_client.py +27 -48
  23. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/image_to_image.py +6 -2
  24. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/agent.py +2 -5
  25. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/mcp_client.py +6 -8
  26. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/__init__.py +16 -0
  27. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/_common.py +2 -0
  28. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/fal_ai.py +2 -0
  29. huggingface_hub-1.0.0rc3/src/huggingface_hub/inference/_providers/publicai.py +6 -0
  30. huggingface_hub-1.0.0rc3/src/huggingface_hub/inference/_providers/scaleway.py +28 -0
  31. huggingface_hub-1.0.0rc3/src/huggingface_hub/inference/_providers/zai_org.py +17 -0
  32. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/lfs.py +14 -8
  33. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/repocard.py +12 -16
  34. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/serialization/_base.py +3 -6
  35. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/serialization/_torch.py +16 -34
  36. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/__init__.py +1 -2
  37. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_cache_manager.py +42 -72
  38. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_chunk_utils.py +2 -3
  39. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_http.py +37 -68
  40. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_validators.py +2 -2
  41. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/logging.py +8 -11
  42. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/PKG-INFO +2 -2
  43. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/SOURCES.txt +3 -15
  44. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/entry_points.txt +0 -1
  45. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/__init__.py +0 -27
  46. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/_cli_utils.py +0 -74
  47. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/delete_cache.py +0 -476
  48. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/download.py +0 -195
  49. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/env.py +0 -39
  50. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/huggingface_cli.py +0 -65
  51. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/lfs.py +0 -200
  52. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/repo.py +0 -151
  53. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/repo_files.py +0 -132
  54. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/scan_cache.py +0 -183
  55. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/tag.py +0 -159
  56. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/upload.py +0 -318
  57. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/upload_large_folder.py +0 -131
  58. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/user.py +0 -207
  59. huggingface_hub-1.0.0rc1/src/huggingface_hub/commands/version.py +0 -40
  60. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/LICENSE +0 -0
  61. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/MANIFEST.in +0 -0
  62. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/pyproject.toml +0 -0
  63. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/setup.cfg +0 -0
  64. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_inference_endpoints.py +0 -0
  65. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_jobs_api.py +0 -0
  66. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_local_folder.py +0 -0
  67. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_oauth.py +0 -0
  68. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_space_api.py +0 -0
  69. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/_webhooks_payload.py +0 -0
  70. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/__init__.py +0 -0
  71. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/auth.py +0 -0
  72. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/cache.py +0 -0
  73. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/hf.py +0 -0
  74. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/jobs.py +0 -0
  75. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/lfs.py +0 -0
  76. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/repo_files.py +0 -0
  77. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/system.py +0 -0
  78. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/upload.py +0 -0
  79. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/cli/upload_large_folder.py +0 -0
  80. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/community.py +0 -0
  81. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/constants.py +0 -0
  82. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/hub_mixin.py +0 -0
  83. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/__init__.py +0 -0
  84. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_common.py +0 -0
  85. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/__init__.py +0 -0
  86. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/__init__.py +0 -0
  87. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/audio_classification.py +0 -0
  88. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/audio_to_audio.py +0 -0
  89. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +0 -0
  90. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/base.py +0 -0
  91. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/chat_completion.py +0 -0
  92. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/depth_estimation.py +0 -0
  93. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/document_question_answering.py +0 -0
  94. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/feature_extraction.py +0 -0
  95. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/fill_mask.py +0 -0
  96. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/image_classification.py +0 -0
  97. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/image_segmentation.py +0 -0
  98. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/image_to_text.py +0 -0
  99. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/image_to_video.py +0 -0
  100. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/object_detection.py +0 -0
  101. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/question_answering.py +0 -0
  102. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/sentence_similarity.py +0 -0
  103. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/summarization.py +0 -0
  104. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/table_question_answering.py +0 -0
  105. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text2text_generation.py +0 -0
  106. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_classification.py +0 -0
  107. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_generation.py +0 -0
  108. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_to_audio.py +0 -0
  109. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_to_image.py +0 -0
  110. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_to_speech.py +0 -0
  111. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/text_to_video.py +0 -0
  112. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/token_classification.py +0 -0
  113. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/translation.py +0 -0
  114. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/video_classification.py +0 -0
  115. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/visual_question_answering.py +0 -0
  116. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/zero_shot_classification.py +0 -0
  117. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +0 -0
  118. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +0 -0
  119. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/__init__.py +0 -0
  120. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/_cli_hacks.py +0 -0
  121. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/cli.py +0 -0
  122. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/constants.py +0 -0
  123. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/types.py +0 -0
  124. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_mcp/utils.py +0 -0
  125. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/black_forest_labs.py +0 -0
  126. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/cerebras.py +0 -0
  127. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/cohere.py +0 -0
  128. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/featherless_ai.py +0 -0
  129. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/fireworks_ai.py +0 -0
  130. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/groq.py +0 -0
  131. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/hf_inference.py +0 -0
  132. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/hyperbolic.py +0 -0
  133. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/nebius.py +0 -0
  134. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/novita.py +0 -0
  135. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/nscale.py +0 -0
  136. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/openai.py +0 -0
  137. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/replicate.py +0 -0
  138. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/sambanova.py +0 -0
  139. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/inference/_providers/together.py +0 -0
  140. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/py.typed +0 -0
  141. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/repocard_data.py +0 -0
  142. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/serialization/__init__.py +0 -0
  143. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/serialization/_dduf.py +0 -0
  144. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/templates/datasetcard_template.md +0 -0
  145. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/templates/modelcard_template.md +0 -0
  146. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_auth.py +0 -0
  147. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_cache_assets.py +0 -0
  148. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_datetime.py +0 -0
  149. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_deprecation.py +0 -0
  150. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_dotenv.py +0 -0
  151. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_experimental.py +0 -0
  152. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_fixes.py +0 -0
  153. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_git_credential.py +0 -0
  154. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_headers.py +0 -0
  155. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_lfs.py +0 -0
  156. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_pagination.py +0 -0
  157. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_paths.py +0 -0
  158. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_runtime.py +0 -0
  159. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_safetensors.py +0 -0
  160. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_subprocess.py +0 -0
  161. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_telemetry.py +0 -0
  162. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_typing.py +0 -0
  163. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_xet.py +0 -0
  164. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/_xet_progress_reporting.py +0 -0
  165. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/endpoint_helpers.py +0 -0
  166. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/insecure_hashlib.py +0 -0
  167. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/sha.py +0 -0
  168. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub/utils/tqdm.py +0 -0
  169. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/dependency_links.txt +0 -0
  170. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/requires.txt +0 -0
  171. {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc3}/src/huggingface_hub.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huggingface_hub
3
- Version: 1.0.0rc1
3
+ Version: 1.0.0rc3
4
4
  Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
5
5
  Home-page: https://github.com/huggingface/huggingface_hub
6
6
  Author: Hugging Face, Inc.
@@ -105,7 +105,7 @@ If you prefer, you can also install it with [conda](https://huggingface.co/docs/
105
105
  In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run:
106
106
 
107
107
  ```bash
108
- pip install huggingface_hub[inference]
108
+ pip install "huggingface_hub[inference]"
109
109
  ```
110
110
 
111
111
  To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation).
@@ -65,7 +65,7 @@ If you prefer, you can also install it with [conda](https://huggingface.co/docs/
65
65
  In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run:
66
66
 
67
67
  ```bash
68
- pip install huggingface_hub[inference]
68
+ pip install "huggingface_hub[inference]"
69
69
  ```
70
70
 
71
71
  To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation).
@@ -131,7 +131,6 @@ setup(
131
131
  extras_require=extras,
132
132
  entry_points={
133
133
  "console_scripts": [
134
- "huggingface-cli=huggingface_hub.commands.huggingface_cli:main",
135
134
  "hf=huggingface_hub.cli.hf:main",
136
135
  "tiny-agents=huggingface_hub.inference._mcp.cli:app",
137
136
  ],
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "1.0.0.rc1"
49
+ __version__ = "1.0.0.rc3"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -138,6 +138,7 @@ _SUBMOD_ATTRS = {
138
138
  "push_to_hub_fastai",
139
139
  ],
140
140
  "file_download": [
141
+ "DryRunFileInfo",
141
142
  "HfFileMetadata",
142
143
  "_CACHED_NO_EXIST",
143
144
  "get_hf_file_metadata",
@@ -513,8 +514,6 @@ _SUBMOD_ATTRS = {
513
514
  "CorruptedCacheException",
514
515
  "DeleteCacheStrategy",
515
516
  "HFCacheInfo",
516
- "HfHubAsyncTransport",
517
- "HfHubTransport",
518
517
  "cached_assets_path",
519
518
  "close_session",
520
519
  "dump_environment_info",
@@ -625,6 +624,7 @@ __all__ = [
625
624
  "DocumentQuestionAnsweringInputData",
626
625
  "DocumentQuestionAnsweringOutputElement",
627
626
  "DocumentQuestionAnsweringParameters",
627
+ "DryRunFileInfo",
628
628
  "EvalResult",
629
629
  "FLAX_WEIGHTS_NAME",
630
630
  "FeatureExtractionInput",
@@ -645,8 +645,6 @@ __all__ = [
645
645
  "HfFileSystemFile",
646
646
  "HfFileSystemResolvedPath",
647
647
  "HfFileSystemStreamFile",
648
- "HfHubAsyncTransport",
649
- "HfHubTransport",
650
648
  "ImageClassificationInput",
651
649
  "ImageClassificationOutputElement",
652
650
  "ImageClassificationOutputTransform",
@@ -1147,6 +1145,7 @@ if TYPE_CHECKING: # pragma: no cover
1147
1145
  )
1148
1146
  from .file_download import (
1149
1147
  _CACHED_NO_EXIST, # noqa: F401
1148
+ DryRunFileInfo, # noqa: F401
1150
1149
  HfFileMetadata, # noqa: F401
1151
1150
  get_hf_file_metadata, # noqa: F401
1152
1151
  hf_hub_download, # noqa: F401
@@ -1515,8 +1514,6 @@ if TYPE_CHECKING: # pragma: no cover
1515
1514
  CorruptedCacheException, # noqa: F401
1516
1515
  DeleteCacheStrategy, # noqa: F401
1517
1516
  HFCacheInfo, # noqa: F401
1518
- HfHubAsyncTransport, # noqa: F401
1519
- HfHubTransport, # noqa: F401
1520
1517
  cached_assets_path, # noqa: F401
1521
1518
  close_session, # noqa: F401
1522
1519
  dump_environment_info, # noqa: F401
@@ -33,6 +33,7 @@ from .utils import (
33
33
  validate_hf_hub_args,
34
34
  )
35
35
  from .utils import tqdm as hf_tqdm
36
+ from .utils._runtime import is_xet_available
36
37
 
37
38
 
38
39
  if TYPE_CHECKING:
@@ -353,7 +354,7 @@ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
353
354
 
354
355
 
355
356
  @validate_hf_hub_args
356
- def _upload_lfs_files(
357
+ def _upload_files(
357
358
  *,
358
359
  additions: list[CommitOperationAdd],
359
360
  repo_type: str,
@@ -362,6 +363,86 @@ def _upload_lfs_files(
362
363
  endpoint: Optional[str] = None,
363
364
  num_threads: int = 5,
364
365
  revision: Optional[str] = None,
366
+ create_pr: Optional[bool] = None,
367
+ ):
368
+ """
369
+ Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
370
+ """
371
+ xet_additions: list[CommitOperationAdd] = []
372
+ lfs_actions: list[dict[str, Any]] = []
373
+ lfs_oid2addop: dict[str, CommitOperationAdd] = {}
374
+
375
+ for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
376
+ chunk_list = [op for op in chunk]
377
+
378
+ transfers: list[str] = ["basic", "multipart"]
379
+ has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
380
+ if is_xet_available():
381
+ if not has_buffered_io_data:
382
+ transfers.append("xet")
383
+ else:
384
+ logger.warning(
385
+ "Uploading files as a binary IO buffer is not supported by Xet Storage. "
386
+ "Falling back to HTTP upload."
387
+ )
388
+
389
+ actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info(
390
+ upload_infos=[op.upload_info for op in chunk_list],
391
+ repo_id=repo_id,
392
+ repo_type=repo_type,
393
+ revision=revision,
394
+ endpoint=endpoint,
395
+ headers=headers,
396
+ token=None, # already passed in 'headers'
397
+ transfers=transfers,
398
+ )
399
+ if errors_chunk:
400
+ message = "\n".join(
401
+ [
402
+ f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
403
+ for err in errors_chunk
404
+ ]
405
+ )
406
+ raise ValueError(f"LFS batch API returned errors:\n{message}")
407
+
408
+ # If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO),
409
+ # fall back to LFS for this chunk.
410
+ if chosen_transfer == "xet" and ("xet" in transfers):
411
+ xet_additions.extend(chunk_list)
412
+ else:
413
+ lfs_actions.extend(actions_chunk)
414
+ for op in chunk_list:
415
+ lfs_oid2addop[op.upload_info.sha256.hex()] = op
416
+
417
+ if len(lfs_actions) > 0:
418
+ _upload_lfs_files(
419
+ actions=lfs_actions,
420
+ oid2addop=lfs_oid2addop,
421
+ headers=headers,
422
+ endpoint=endpoint,
423
+ num_threads=num_threads,
424
+ )
425
+
426
+ if len(xet_additions) > 0:
427
+ _upload_xet_files(
428
+ additions=xet_additions,
429
+ repo_type=repo_type,
430
+ repo_id=repo_id,
431
+ headers=headers,
432
+ endpoint=endpoint,
433
+ revision=revision,
434
+ create_pr=create_pr,
435
+ )
436
+
437
+
438
+ @validate_hf_hub_args
439
+ def _upload_lfs_files(
440
+ *,
441
+ actions: list[dict[str, Any]],
442
+ oid2addop: dict[str, CommitOperationAdd],
443
+ headers: dict[str, str],
444
+ endpoint: Optional[str] = None,
445
+ num_threads: int = 5,
365
446
  ):
366
447
  """
367
448
  Uploads the content of `additions` to the Hub using the large file storage protocol.
@@ -370,9 +451,21 @@ def _upload_lfs_files(
370
451
  - LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
371
452
 
372
453
  Args:
373
- additions (`List` of `CommitOperationAdd`):
374
- The files to be uploaded
375
- repo_type (`str`):
454
+ actions (`list[dict[str, Any]]`):
455
+ LFS batch actions returned by the server.
456
+ oid2addop (`dict[str, CommitOperationAdd]`):
457
+ A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
458
+ headers (`dict[str, str]`):
459
+ Headers to use for the request, including authorization headers and user agent.
460
+ endpoint (`str`, *optional*):
461
+ The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
462
+ num_threads (`int`, *optional*):
463
+ The number of concurrent threads to use when uploading. Defaults to 5.
464
+
465
+ Raises:
466
+ [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
467
+ If an upload failed for any reason
468
+ [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
376
469
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
377
470
  repo_id (`str`):
378
471
  A namespace (user or an organization) and a repo name separated
@@ -392,50 +485,17 @@ def _upload_lfs_files(
392
485
  [`HfHubHTTPError`]
393
486
  If the LFS batch endpoint returned an HTTP error.
394
487
  """
395
- # Step 1: retrieve upload instructions from the LFS batch endpoint.
396
- # Upload instructions are retrieved by chunk of 256 files to avoid reaching
397
- # the payload limit.
398
- batch_actions: list[dict] = []
399
- for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400
- batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401
- upload_infos=[op.upload_info for op in chunk],
402
- repo_id=repo_id,
403
- repo_type=repo_type,
404
- revision=revision,
405
- endpoint=endpoint,
406
- headers=headers,
407
- token=None, # already passed in 'headers'
408
- )
409
-
410
- # If at least 1 error, we do not retrieve information for other chunks
411
- if batch_errors_chunk:
412
- message = "\n".join(
413
- [
414
- f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
415
- for err in batch_errors_chunk
416
- ]
417
- )
418
- raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
419
-
420
- batch_actions += batch_actions_chunk
421
- oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
422
-
423
- # Step 2: ignore files that have already been uploaded
488
+ # Filter out files already present upstream
424
489
  filtered_actions = []
425
- for action in batch_actions:
490
+ for action in actions:
426
491
  if action.get("actions") is None:
427
492
  logger.debug(
428
- f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
429
- " present upstream - skipping upload."
493
+ f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload."
430
494
  )
431
495
  else:
432
496
  filtered_actions.append(action)
433
497
 
434
- if len(filtered_actions) == 0:
435
- logger.debug("No LFS files to upload.")
436
- return
437
-
438
- # Step 3: upload files concurrently according to these instructions
498
+ # Upload according to server-provided actions
439
499
  def _wrapped_lfs_upload(batch_action) -> None:
440
500
  try:
441
501
  operation = oid2addop[batch_action["oid"]]
@@ -479,7 +539,7 @@ def _upload_xet_files(
479
539
  This chunks the files and deduplicates the chunks before uploading them to xetcas storage.
480
540
 
481
541
  Args:
482
- additions (`List` of `CommitOperationAdd`):
542
+ additions (`` of `CommitOperationAdd`):
483
543
  The files to be uploaded.
484
544
  repo_type (`str`):
485
545
  Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
@@ -576,30 +636,30 @@ def _upload_xet_files(
576
636
  progress, progress_callback = None, None
577
637
 
578
638
  try:
579
- for i, chunk in enumerate(chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES)):
580
- _chunk = [op for op in chunk]
581
-
582
- bytes_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, bytes)]
583
- paths_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, (str, Path))]
584
-
585
- if len(paths_ops) > 0:
586
- upload_files(
587
- [str(op.path_or_fileobj) for op in paths_ops],
588
- xet_endpoint,
589
- access_token_info,
590
- token_refresher,
591
- progress_callback,
592
- repo_type,
593
- )
594
- if len(bytes_ops) > 0:
595
- upload_bytes(
596
- [op.path_or_fileobj for op in bytes_ops],
597
- xet_endpoint,
598
- access_token_info,
599
- token_refresher,
600
- progress_callback,
601
- repo_type,
602
- )
639
+ all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)]
640
+ all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))]
641
+
642
+ if len(all_paths_ops) > 0:
643
+ all_paths = [str(op.path_or_fileobj) for op in all_paths_ops]
644
+ upload_files(
645
+ all_paths,
646
+ xet_endpoint,
647
+ access_token_info,
648
+ token_refresher,
649
+ progress_callback,
650
+ repo_type,
651
+ )
652
+
653
+ if len(all_bytes_ops) > 0:
654
+ all_bytes = [op.path_or_fileobj for op in all_bytes_ops]
655
+ upload_bytes(
656
+ all_bytes,
657
+ xet_endpoint,
658
+ access_token_info,
659
+ token_refresher,
660
+ progress_callback,
661
+ repo_type,
662
+ )
603
663
 
604
664
  finally:
605
665
  if progress is not None:
@@ -205,13 +205,10 @@ class CommitScheduler:
205
205
  """
206
206
  Push folder to the Hub and return the commit info.
207
207
 
208
- <Tip warning={true}>
209
-
210
- This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
211
- queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
212
- issues.
213
-
214
- </Tip>
208
+ > [!WARNING]
209
+ > This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
210
+ > queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
211
+ > issues.
215
212
 
216
213
  The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
217
214
  uploads only changed files. If no changes are found, the method returns without committing anything. If you want
@@ -20,7 +20,7 @@ from pathlib import Path
20
20
  from typing import Optional
21
21
 
22
22
  from . import constants
23
- from .commands._cli_utils import ANSI
23
+ from .cli._cli_utils import ANSI
24
24
  from .utils import (
25
25
  capture_output,
26
26
  get_token,
@@ -70,21 +70,15 @@ def login(
70
70
  To log in from outside of a script, one can also use `hf auth login` which is
71
71
  a cli command that wraps [`login`].
72
72
 
73
- <Tip>
73
+ > [!TIP]
74
+ > [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
75
+ > extends its capabilities.
74
76
 
75
- [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
76
- extends its capabilities.
77
-
78
- </Tip>
79
-
80
- <Tip>
81
-
82
- When the token is not passed, [`login`] will automatically detect if the script runs
83
- in a notebook or not. However, this detection might not be accurate due to the
84
- variety of notebooks that exists nowadays. If that is the case, you can always force
85
- the UI by using [`notebook_login`] or [`interpreter_login`].
86
-
87
- </Tip>
77
+ > [!TIP]
78
+ > When the token is not passed, [`login`] will automatically detect if the script runs
79
+ > in a notebook or not. However, this detection might not be accurate due to the
80
+ > variety of notebooks that exists nowadays. If that is the case, you can always force
81
+ > the UI by using [`notebook_login`] or [`interpreter_login`].
88
82
 
89
83
  Args:
90
84
  token (`str`, *optional*):
@@ -250,7 +244,7 @@ def interpreter_login(*, skip_if_logged_in: bool = False) -> None:
250
244
  logger.info("User is already logged in.")
251
245
  return
252
246
 
253
- from .commands.delete_cache import _ask_for_confirmation_no_tui
247
+ from .cli.cache import _ask_for_confirmation_no_tui
254
248
 
255
249
  print(_HF_LOGO_ASCII)
256
250
  if get_token() is not None:
@@ -1,6 +1,6 @@
1
1
  import os
2
2
  from pathlib import Path
3
- from typing import Iterable, Optional, Union
3
+ from typing import Iterable, List, Literal, Optional, Union, overload
4
4
 
5
5
  import httpx
6
6
  from tqdm.auto import tqdm as base_tqdm
@@ -8,13 +8,14 @@ from tqdm.contrib.concurrent import thread_map
8
8
 
9
9
  from . import constants
10
10
  from .errors import (
11
+ DryRunError,
11
12
  GatedRepoError,
12
13
  HfHubHTTPError,
13
14
  LocalEntryNotFoundError,
14
15
  RepositoryNotFoundError,
15
16
  RevisionNotFoundError,
16
17
  )
17
- from .file_download import REGEX_COMMIT_HASH, hf_hub_download, repo_folder_name
18
+ from .file_download import REGEX_COMMIT_HASH, DryRunFileInfo, hf_hub_download, repo_folder_name
18
19
  from .hf_api import DatasetInfo, HfApi, ModelInfo, RepoFile, SpaceInfo
19
20
  from .utils import OfflineModeIsEnabled, filter_repo_objects, logging, validate_hf_hub_args
20
21
  from .utils import tqdm as hf_tqdm
@@ -25,6 +26,81 @@ logger = logging.get_logger(__name__)
25
26
  VERY_LARGE_REPO_THRESHOLD = 50000 # After this limit, we don't consider `repo_info.siblings` to be reliable enough
26
27
 
27
28
 
29
+ @overload
30
+ def snapshot_download(
31
+ repo_id: str,
32
+ *,
33
+ repo_type: Optional[str] = None,
34
+ revision: Optional[str] = None,
35
+ cache_dir: Union[str, Path, None] = None,
36
+ local_dir: Union[str, Path, None] = None,
37
+ library_name: Optional[str] = None,
38
+ library_version: Optional[str] = None,
39
+ user_agent: Optional[Union[dict, str]] = None,
40
+ etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
41
+ force_download: bool = False,
42
+ token: Optional[Union[bool, str]] = None,
43
+ local_files_only: bool = False,
44
+ allow_patterns: Optional[Union[list[str], str]] = None,
45
+ ignore_patterns: Optional[Union[list[str], str]] = None,
46
+ max_workers: int = 8,
47
+ tqdm_class: Optional[type[base_tqdm]] = None,
48
+ headers: Optional[dict[str, str]] = None,
49
+ endpoint: Optional[str] = None,
50
+ dry_run: Literal[False] = False,
51
+ ) -> str: ...
52
+
53
+
54
+ @overload
55
+ def snapshot_download(
56
+ repo_id: str,
57
+ *,
58
+ repo_type: Optional[str] = None,
59
+ revision: Optional[str] = None,
60
+ cache_dir: Union[str, Path, None] = None,
61
+ local_dir: Union[str, Path, None] = None,
62
+ library_name: Optional[str] = None,
63
+ library_version: Optional[str] = None,
64
+ user_agent: Optional[Union[dict, str]] = None,
65
+ etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
66
+ force_download: bool = False,
67
+ token: Optional[Union[bool, str]] = None,
68
+ local_files_only: bool = False,
69
+ allow_patterns: Optional[Union[list[str], str]] = None,
70
+ ignore_patterns: Optional[Union[list[str], str]] = None,
71
+ max_workers: int = 8,
72
+ tqdm_class: Optional[type[base_tqdm]] = None,
73
+ headers: Optional[dict[str, str]] = None,
74
+ endpoint: Optional[str] = None,
75
+ dry_run: Literal[True] = True,
76
+ ) -> list[DryRunFileInfo]: ...
77
+
78
+
79
+ @overload
80
+ def snapshot_download(
81
+ repo_id: str,
82
+ *,
83
+ repo_type: Optional[str] = None,
84
+ revision: Optional[str] = None,
85
+ cache_dir: Union[str, Path, None] = None,
86
+ local_dir: Union[str, Path, None] = None,
87
+ library_name: Optional[str] = None,
88
+ library_version: Optional[str] = None,
89
+ user_agent: Optional[Union[dict, str]] = None,
90
+ etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
91
+ force_download: bool = False,
92
+ token: Optional[Union[bool, str]] = None,
93
+ local_files_only: bool = False,
94
+ allow_patterns: Optional[Union[list[str], str]] = None,
95
+ ignore_patterns: Optional[Union[list[str], str]] = None,
96
+ max_workers: int = 8,
97
+ tqdm_class: Optional[type[base_tqdm]] = None,
98
+ headers: Optional[dict[str, str]] = None,
99
+ endpoint: Optional[str] = None,
100
+ dry_run: bool = False,
101
+ ) -> Union[str, list[DryRunFileInfo]]: ...
102
+
103
+
28
104
  @validate_hf_hub_args
29
105
  def snapshot_download(
30
106
  repo_id: str,
@@ -46,7 +122,8 @@ def snapshot_download(
46
122
  tqdm_class: Optional[type[base_tqdm]] = None,
47
123
  headers: Optional[dict[str, str]] = None,
48
124
  endpoint: Optional[str] = None,
49
- ) -> str:
125
+ dry_run: bool = False,
126
+ ) -> Union[str, list[DryRunFileInfo]]:
50
127
  """Download repo files.
51
128
 
52
129
  Download a whole snapshot of a repo's files at the specified revision. This is useful when you want all files from
@@ -109,9 +186,14 @@ def snapshot_download(
109
186
  Note that the `tqdm_class` is not passed to each individual download.
110
187
  Defaults to the custom HF progress bar that can be disabled by setting
111
188
  `HF_HUB_DISABLE_PROGRESS_BARS` environment variable.
189
+ dry_run (`bool`, *optional*, defaults to `False`):
190
+ If `True`, perform a dry run without actually downloading the files. Returns a list of
191
+ [`DryRunFileInfo`] objects containing information about what would be downloaded.
112
192
 
113
193
  Returns:
114
- `str`: folder path of the repo snapshot.
194
+ `str` or list of [`DryRunFileInfo`]:
195
+ - If `dry_run=False`: Local snapshot path.
196
+ - If `dry_run=True`: A list of [`DryRunFileInfo`] objects containing download information.
115
197
 
116
198
  Raises:
117
199
  [`~utils.RepositoryNotFoundError`]
@@ -187,6 +269,11 @@ def snapshot_download(
187
269
  # - f the specified revision is a branch or tag, look inside "refs".
188
270
  # => if local_dir is not None, we will return the path to the local folder if it exists.
189
271
  if repo_info is None:
272
+ if dry_run:
273
+ raise DryRunError(
274
+ "Dry run cannot be performed as the repository cannot be accessed. Please check your internet connection or authentication token."
275
+ ) from api_call_error
276
+
190
277
  # Try to get which commit hash corresponds to the specified revision
191
278
  commit_hash = None
192
279
  if REGEX_COMMIT_HASH.match(revision):
@@ -273,6 +360,8 @@ def snapshot_download(
273
360
  tqdm_desc = f"Fetching {len(filtered_repo_files)} files"
274
361
  else:
275
362
  tqdm_desc = "Fetching ... files"
363
+ if dry_run:
364
+ tqdm_desc = "[dry-run] " + tqdm_desc
276
365
 
277
366
  commit_hash = repo_info.sha
278
367
  snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash)
@@ -288,28 +377,33 @@ def snapshot_download(
288
377
  except OSError as e:
289
378
  logger.warning(f"Ignored error while writing commit hash to {ref_path}: {e}.")
290
379
 
380
+ results: List[Union[str, DryRunFileInfo]] = []
381
+
291
382
  # we pass the commit_hash to hf_hub_download
292
383
  # so no network call happens if we already
293
384
  # have the file locally.
294
- def _inner_hf_hub_download(repo_file: str):
295
- return hf_hub_download(
296
- repo_id,
297
- filename=repo_file,
298
- repo_type=repo_type,
299
- revision=commit_hash,
300
- endpoint=endpoint,
301
- cache_dir=cache_dir,
302
- local_dir=local_dir,
303
- library_name=library_name,
304
- library_version=library_version,
305
- user_agent=user_agent,
306
- etag_timeout=etag_timeout,
307
- force_download=force_download,
308
- token=token,
309
- headers=headers,
385
+ def _inner_hf_hub_download(repo_file: str) -> None:
386
+ results.append(
387
+ hf_hub_download( # type: ignore[no-matching-overload] # ty not happy, don't know why :/
388
+ repo_id,
389
+ filename=repo_file,
390
+ repo_type=repo_type,
391
+ revision=commit_hash,
392
+ endpoint=endpoint,
393
+ cache_dir=cache_dir,
394
+ local_dir=local_dir,
395
+ library_name=library_name,
396
+ library_version=library_version,
397
+ user_agent=user_agent,
398
+ etag_timeout=etag_timeout,
399
+ force_download=force_download,
400
+ token=token,
401
+ headers=headers,
402
+ dry_run=dry_run,
403
+ )
310
404
  )
311
405
 
312
- if constants.HF_HUB_ENABLE_HF_TRANSFER:
406
+ if constants.HF_HUB_ENABLE_HF_TRANSFER and not dry_run:
313
407
  # when using hf_transfer we don't want extra parallelism
314
408
  # from the one hf_transfer provides
315
409
  for file in filtered_repo_files:
@@ -324,6 +418,10 @@ def snapshot_download(
324
418
  tqdm_class=tqdm_class or hf_tqdm,
325
419
  )
326
420
 
421
+ if dry_run:
422
+ assert all(isinstance(r, DryRunFileInfo) for r in results)
423
+ return results # type: ignore
424
+
327
425
  if local_dir is not None:
328
426
  return str(os.path.realpath(local_dir))
329
427
  return snapshot_folder
@@ -52,11 +52,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
52
52
  issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
53
53
  minutes (default to every 5 minutes).
54
54
 
55
- <Tip warning={true}>
56
-
57
- `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
58
-
59
- </Tip>
55
+ > [!WARNING]
56
+ > `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
60
57
 
61
58
  Args:
62
59
  repo_id (`str`):
@@ -31,8 +31,7 @@ from . import constants
31
31
  from ._commit_api import CommitOperationAdd, UploadInfo, _fetch_upload_modes
32
32
  from ._local_folder import LocalUploadFileMetadata, LocalUploadFilePaths, get_local_upload_paths, read_upload_metadata
33
33
  from .constants import DEFAULT_REVISION, REPO_TYPES
34
- from .utils import DEFAULT_IGNORE_PATTERNS, filter_repo_objects, tqdm
35
- from .utils._cache_manager import _format_size
34
+ from .utils import DEFAULT_IGNORE_PATTERNS, _format_size, filter_repo_objects, tqdm
36
35
  from .utils._runtime import is_xet_available
37
36
  from .utils.sha import sha_fileobj
38
37