huggingface-hub 1.0.0rc1__tar.gz → 1.0.0rc2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/PKG-INFO +2 -2
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/README.md +1 -1
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/__init__.py +1 -1
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_commit_api.py +126 -66
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_commit_scheduler.py +4 -7
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_login.py +9 -15
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_tensorboard_logger.py +2 -5
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_webhooks_server.py +8 -20
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/repo.py +137 -5
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/dataclasses.py +3 -12
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/fastai_utils.py +22 -32
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/file_download.py +18 -21
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/hf_api.py +258 -410
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/hf_file_system.py +17 -44
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_client.py +25 -47
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/_async_client.py +25 -47
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/agent.py +2 -5
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/mcp_client.py +2 -5
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/__init__.py +11 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/_common.py +1 -0
- huggingface_hub-1.0.0rc2/src/huggingface_hub/inference/_providers/publicai.py +6 -0
- huggingface_hub-1.0.0rc2/src/huggingface_hub/inference/_providers/scaleway.py +28 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/lfs.py +14 -8
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/repocard.py +12 -16
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/serialization/_base.py +3 -6
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/serialization/_torch.py +16 -34
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_cache_manager.py +41 -71
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_chunk_utils.py +2 -3
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_http.py +27 -30
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/logging.py +8 -11
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/PKG-INFO +2 -2
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/SOURCES.txt +2 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/LICENSE +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/MANIFEST.in +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/pyproject.toml +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/setup.cfg +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/setup.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_inference_endpoints.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_jobs_api.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_local_folder.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_oauth.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_snapshot_download.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_space_api.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_upload_large_folder.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_webhooks_payload.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/_cli_utils.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/auth.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/cache.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/download.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/hf.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/jobs.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/lfs.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/repo_files.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/system.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/upload.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/cli/upload_large_folder.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/_cli_utils.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/delete_cache.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/download.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/env.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/huggingface_cli.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/lfs.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/repo.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/repo_files.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/scan_cache.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/tag.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/upload.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/upload_large_folder.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/user.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/commands/version.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/community.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/constants.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/errors.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/hub_mixin.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_common.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/audio_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/audio_to_audio.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/base.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/chat_completion.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/depth_estimation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/document_question_answering.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/feature_extraction.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/fill_mask.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/image_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/image_segmentation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/image_to_image.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/image_to_text.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/image_to_video.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/object_detection.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/question_answering.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/sentence_similarity.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/summarization.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/table_question_answering.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text2text_generation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_generation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_to_audio.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_to_image.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_to_speech.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/text_to_video.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/token_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/translation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/video_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/visual_question_answering.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/zero_shot_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/_cli_hacks.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/cli.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/constants.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/types.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_mcp/utils.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/black_forest_labs.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/cerebras.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/cohere.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/fal_ai.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/featherless_ai.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/fireworks_ai.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/groq.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/hf_inference.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/hyperbolic.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/nebius.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/novita.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/nscale.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/openai.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/replicate.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/sambanova.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/inference/_providers/together.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/py.typed +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/repocard_data.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/serialization/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/serialization/_dduf.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/templates/datasetcard_template.md +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/templates/modelcard_template.md +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/__init__.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_auth.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_cache_assets.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_datetime.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_deprecation.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_dotenv.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_experimental.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_fixes.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_git_credential.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_headers.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_lfs.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_pagination.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_paths.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_runtime.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_safetensors.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_subprocess.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_telemetry.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_typing.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_validators.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_xet.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/_xet_progress_reporting.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/endpoint_helpers.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/insecure_hashlib.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/sha.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/utils/tqdm.py +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/dependency_links.txt +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/entry_points.txt +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/requires.txt +0 -0
- {huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: huggingface_hub
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.0rc2
|
|
4
4
|
Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
|
|
5
5
|
Home-page: https://github.com/huggingface/huggingface_hub
|
|
6
6
|
Author: Hugging Face, Inc.
|
|
@@ -105,7 +105,7 @@ If you prefer, you can also install it with [conda](https://huggingface.co/docs/
|
|
|
105
105
|
In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run:
|
|
106
106
|
|
|
107
107
|
```bash
|
|
108
|
-
pip install huggingface_hub[inference]
|
|
108
|
+
pip install "huggingface_hub[inference]"
|
|
109
109
|
```
|
|
110
110
|
|
|
111
111
|
To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation).
|
|
@@ -65,7 +65,7 @@ If you prefer, you can also install it with [conda](https://huggingface.co/docs/
|
|
|
65
65
|
In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run:
|
|
66
66
|
|
|
67
67
|
```bash
|
|
68
|
-
pip install huggingface_hub[inference]
|
|
68
|
+
pip install "huggingface_hub[inference]"
|
|
69
69
|
```
|
|
70
70
|
|
|
71
71
|
To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation).
|
|
@@ -33,6 +33,7 @@ from .utils import (
|
|
|
33
33
|
validate_hf_hub_args,
|
|
34
34
|
)
|
|
35
35
|
from .utils import tqdm as hf_tqdm
|
|
36
|
+
from .utils._runtime import is_xet_available
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
if TYPE_CHECKING:
|
|
@@ -353,7 +354,7 @@ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
|
|
|
353
354
|
|
|
354
355
|
|
|
355
356
|
@validate_hf_hub_args
|
|
356
|
-
def
|
|
357
|
+
def _upload_files(
|
|
357
358
|
*,
|
|
358
359
|
additions: list[CommitOperationAdd],
|
|
359
360
|
repo_type: str,
|
|
@@ -362,6 +363,86 @@ def _upload_lfs_files(
|
|
|
362
363
|
endpoint: Optional[str] = None,
|
|
363
364
|
num_threads: int = 5,
|
|
364
365
|
revision: Optional[str] = None,
|
|
366
|
+
create_pr: Optional[bool] = None,
|
|
367
|
+
):
|
|
368
|
+
"""
|
|
369
|
+
Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
|
|
370
|
+
"""
|
|
371
|
+
xet_additions: list[CommitOperationAdd] = []
|
|
372
|
+
lfs_actions: list[dict[str, Any]] = []
|
|
373
|
+
lfs_oid2addop: dict[str, CommitOperationAdd] = {}
|
|
374
|
+
|
|
375
|
+
for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
|
|
376
|
+
chunk_list = [op for op in chunk]
|
|
377
|
+
|
|
378
|
+
transfers: list[str] = ["basic", "multipart"]
|
|
379
|
+
has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
|
|
380
|
+
if is_xet_available():
|
|
381
|
+
if not has_buffered_io_data:
|
|
382
|
+
transfers.append("xet")
|
|
383
|
+
else:
|
|
384
|
+
logger.warning(
|
|
385
|
+
"Uploading files as a binary IO buffer is not supported by Xet Storage. "
|
|
386
|
+
"Falling back to HTTP upload."
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info(
|
|
390
|
+
upload_infos=[op.upload_info for op in chunk_list],
|
|
391
|
+
repo_id=repo_id,
|
|
392
|
+
repo_type=repo_type,
|
|
393
|
+
revision=revision,
|
|
394
|
+
endpoint=endpoint,
|
|
395
|
+
headers=headers,
|
|
396
|
+
token=None, # already passed in 'headers'
|
|
397
|
+
transfers=transfers,
|
|
398
|
+
)
|
|
399
|
+
if errors_chunk:
|
|
400
|
+
message = "\n".join(
|
|
401
|
+
[
|
|
402
|
+
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
|
|
403
|
+
for err in errors_chunk
|
|
404
|
+
]
|
|
405
|
+
)
|
|
406
|
+
raise ValueError(f"LFS batch API returned errors:\n{message}")
|
|
407
|
+
|
|
408
|
+
# If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO),
|
|
409
|
+
# fall back to LFS for this chunk.
|
|
410
|
+
if chosen_transfer == "xet" and ("xet" in transfers):
|
|
411
|
+
xet_additions.extend(chunk_list)
|
|
412
|
+
else:
|
|
413
|
+
lfs_actions.extend(actions_chunk)
|
|
414
|
+
for op in chunk_list:
|
|
415
|
+
lfs_oid2addop[op.upload_info.sha256.hex()] = op
|
|
416
|
+
|
|
417
|
+
if len(lfs_actions) > 0:
|
|
418
|
+
_upload_lfs_files(
|
|
419
|
+
actions=lfs_actions,
|
|
420
|
+
oid2addop=lfs_oid2addop,
|
|
421
|
+
headers=headers,
|
|
422
|
+
endpoint=endpoint,
|
|
423
|
+
num_threads=num_threads,
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
if len(xet_additions) > 0:
|
|
427
|
+
_upload_xet_files(
|
|
428
|
+
additions=xet_additions,
|
|
429
|
+
repo_type=repo_type,
|
|
430
|
+
repo_id=repo_id,
|
|
431
|
+
headers=headers,
|
|
432
|
+
endpoint=endpoint,
|
|
433
|
+
revision=revision,
|
|
434
|
+
create_pr=create_pr,
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
@validate_hf_hub_args
|
|
439
|
+
def _upload_lfs_files(
|
|
440
|
+
*,
|
|
441
|
+
actions: list[dict[str, Any]],
|
|
442
|
+
oid2addop: dict[str, CommitOperationAdd],
|
|
443
|
+
headers: dict[str, str],
|
|
444
|
+
endpoint: Optional[str] = None,
|
|
445
|
+
num_threads: int = 5,
|
|
365
446
|
):
|
|
366
447
|
"""
|
|
367
448
|
Uploads the content of `additions` to the Hub using the large file storage protocol.
|
|
@@ -370,9 +451,21 @@ def _upload_lfs_files(
|
|
|
370
451
|
- LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
|
|
371
452
|
|
|
372
453
|
Args:
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
454
|
+
actions (`list[dict[str, Any]]`):
|
|
455
|
+
LFS batch actions returned by the server.
|
|
456
|
+
oid2addop (`dict[str, CommitOperationAdd]`):
|
|
457
|
+
A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
|
|
458
|
+
headers (`dict[str, str]`):
|
|
459
|
+
Headers to use for the request, including authorization headers and user agent.
|
|
460
|
+
endpoint (`str`, *optional*):
|
|
461
|
+
The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
|
|
462
|
+
num_threads (`int`, *optional*):
|
|
463
|
+
The number of concurrent threads to use when uploading. Defaults to 5.
|
|
464
|
+
|
|
465
|
+
Raises:
|
|
466
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
467
|
+
If an upload failed for any reason
|
|
468
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
376
469
|
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
|
377
470
|
repo_id (`str`):
|
|
378
471
|
A namespace (user or an organization) and a repo name separated
|
|
@@ -392,50 +485,17 @@ def _upload_lfs_files(
|
|
|
392
485
|
[`HfHubHTTPError`]
|
|
393
486
|
If the LFS batch endpoint returned an HTTP error.
|
|
394
487
|
"""
|
|
395
|
-
#
|
|
396
|
-
# Upload instructions are retrieved by chunk of 256 files to avoid reaching
|
|
397
|
-
# the payload limit.
|
|
398
|
-
batch_actions: list[dict] = []
|
|
399
|
-
for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
|
|
400
|
-
batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
|
|
401
|
-
upload_infos=[op.upload_info for op in chunk],
|
|
402
|
-
repo_id=repo_id,
|
|
403
|
-
repo_type=repo_type,
|
|
404
|
-
revision=revision,
|
|
405
|
-
endpoint=endpoint,
|
|
406
|
-
headers=headers,
|
|
407
|
-
token=None, # already passed in 'headers'
|
|
408
|
-
)
|
|
409
|
-
|
|
410
|
-
# If at least 1 error, we do not retrieve information for other chunks
|
|
411
|
-
if batch_errors_chunk:
|
|
412
|
-
message = "\n".join(
|
|
413
|
-
[
|
|
414
|
-
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
|
|
415
|
-
for err in batch_errors_chunk
|
|
416
|
-
]
|
|
417
|
-
)
|
|
418
|
-
raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
|
|
419
|
-
|
|
420
|
-
batch_actions += batch_actions_chunk
|
|
421
|
-
oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
|
|
422
|
-
|
|
423
|
-
# Step 2: ignore files that have already been uploaded
|
|
488
|
+
# Filter out files already present upstream
|
|
424
489
|
filtered_actions = []
|
|
425
|
-
for action in
|
|
490
|
+
for action in actions:
|
|
426
491
|
if action.get("actions") is None:
|
|
427
492
|
logger.debug(
|
|
428
|
-
f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
|
|
429
|
-
" present upstream - skipping upload."
|
|
493
|
+
f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload."
|
|
430
494
|
)
|
|
431
495
|
else:
|
|
432
496
|
filtered_actions.append(action)
|
|
433
497
|
|
|
434
|
-
|
|
435
|
-
logger.debug("No LFS files to upload.")
|
|
436
|
-
return
|
|
437
|
-
|
|
438
|
-
# Step 3: upload files concurrently according to these instructions
|
|
498
|
+
# Upload according to server-provided actions
|
|
439
499
|
def _wrapped_lfs_upload(batch_action) -> None:
|
|
440
500
|
try:
|
|
441
501
|
operation = oid2addop[batch_action["oid"]]
|
|
@@ -479,7 +539,7 @@ def _upload_xet_files(
|
|
|
479
539
|
This chunks the files and deduplicates the chunks before uploading them to xetcas storage.
|
|
480
540
|
|
|
481
541
|
Args:
|
|
482
|
-
additions (
|
|
542
|
+
additions (`` of `CommitOperationAdd`):
|
|
483
543
|
The files to be uploaded.
|
|
484
544
|
repo_type (`str`):
|
|
485
545
|
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
|
@@ -576,30 +636,30 @@ def _upload_xet_files(
|
|
|
576
636
|
progress, progress_callback = None, None
|
|
577
637
|
|
|
578
638
|
try:
|
|
579
|
-
for
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
639
|
+
all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)]
|
|
640
|
+
all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))]
|
|
641
|
+
|
|
642
|
+
if len(all_paths_ops) > 0:
|
|
643
|
+
all_paths = [str(op.path_or_fileobj) for op in all_paths_ops]
|
|
644
|
+
upload_files(
|
|
645
|
+
all_paths,
|
|
646
|
+
xet_endpoint,
|
|
647
|
+
access_token_info,
|
|
648
|
+
token_refresher,
|
|
649
|
+
progress_callback,
|
|
650
|
+
repo_type,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
if len(all_bytes_ops) > 0:
|
|
654
|
+
all_bytes = [op.path_or_fileobj for op in all_bytes_ops]
|
|
655
|
+
upload_bytes(
|
|
656
|
+
all_bytes,
|
|
657
|
+
xet_endpoint,
|
|
658
|
+
access_token_info,
|
|
659
|
+
token_refresher,
|
|
660
|
+
progress_callback,
|
|
661
|
+
repo_type,
|
|
662
|
+
)
|
|
603
663
|
|
|
604
664
|
finally:
|
|
605
665
|
if progress is not None:
|
{huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_commit_scheduler.py
RENAMED
|
@@ -205,13 +205,10 @@ class CommitScheduler:
|
|
|
205
205
|
"""
|
|
206
206
|
Push folder to the Hub and return the commit info.
|
|
207
207
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
issues.
|
|
213
|
-
|
|
214
|
-
</Tip>
|
|
208
|
+
> [!WARNING]
|
|
209
|
+
> This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
|
|
210
|
+
> queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
|
|
211
|
+
> issues.
|
|
215
212
|
|
|
216
213
|
The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
|
|
217
214
|
uploads only changed files. If no changes are found, the method returns without committing anything. If you want
|
|
@@ -70,21 +70,15 @@ def login(
|
|
|
70
70
|
To log in from outside of a script, one can also use `hf auth login` which is
|
|
71
71
|
a cli command that wraps [`login`].
|
|
72
72
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
When the token is not passed, [`login`] will automatically detect if the script runs
|
|
83
|
-
in a notebook or not. However, this detection might not be accurate due to the
|
|
84
|
-
variety of notebooks that exists nowadays. If that is the case, you can always force
|
|
85
|
-
the UI by using [`notebook_login`] or [`interpreter_login`].
|
|
86
|
-
|
|
87
|
-
</Tip>
|
|
73
|
+
> [!TIP]
|
|
74
|
+
> [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
|
|
75
|
+
> extends its capabilities.
|
|
76
|
+
|
|
77
|
+
> [!TIP]
|
|
78
|
+
> When the token is not passed, [`login`] will automatically detect if the script runs
|
|
79
|
+
> in a notebook or not. However, this detection might not be accurate due to the
|
|
80
|
+
> variety of notebooks that exists nowadays. If that is the case, you can always force
|
|
81
|
+
> the UI by using [`notebook_login`] or [`interpreter_login`].
|
|
88
82
|
|
|
89
83
|
Args:
|
|
90
84
|
token (`str`, *optional*):
|
{huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_tensorboard_logger.py
RENAMED
|
@@ -52,11 +52,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
|
|
|
52
52
|
issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
|
|
53
53
|
minutes (default to every 5 minutes).
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
`HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
|
|
58
|
-
|
|
59
|
-
</Tip>
|
|
55
|
+
> [!WARNING]
|
|
56
|
+
> `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
|
|
60
57
|
|
|
61
58
|
Args:
|
|
62
59
|
repo_id (`str`):
|
{huggingface_hub-1.0.0rc1 → huggingface_hub-1.0.0rc2}/src/huggingface_hub/_webhooks_server.py
RENAMED
|
@@ -53,17 +53,11 @@ class WebhooksServer:
|
|
|
53
53
|
Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
|
|
54
54
|
WebhooksServer and deploy it on a Space.
|
|
55
55
|
|
|
56
|
-
|
|
56
|
+
> [!WARNING]
|
|
57
|
+
> `WebhooksServer` is experimental. Its API is subject to change in the future.
|
|
57
58
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
</Tip>
|
|
61
|
-
|
|
62
|
-
<Tip warning={true}>
|
|
63
|
-
|
|
64
|
-
You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
|
|
65
|
-
|
|
66
|
-
</Tip>
|
|
59
|
+
> [!WARNING]
|
|
60
|
+
> You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
|
|
67
61
|
|
|
68
62
|
Args:
|
|
69
63
|
ui (`gradio.Blocks`, optional):
|
|
@@ -240,17 +234,11 @@ def webhook_endpoint(path: Optional[str] = None) -> Callable:
|
|
|
240
234
|
Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
|
|
241
235
|
server and deploy it on a Space.
|
|
242
236
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
`webhook_endpoint` is experimental. Its API is subject to change in the future.
|
|
246
|
-
|
|
247
|
-
</Tip>
|
|
248
|
-
|
|
249
|
-
<Tip warning={true}>
|
|
250
|
-
|
|
251
|
-
You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
|
|
237
|
+
> [!WARNING]
|
|
238
|
+
> `webhook_endpoint` is experimental. Its API is subject to change in the future.
|
|
252
239
|
|
|
253
|
-
|
|
240
|
+
> [!WARNING]
|
|
241
|
+
> You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
|
|
254
242
|
|
|
255
243
|
Args:
|
|
256
244
|
path (`str`, optional):
|
|
@@ -21,6 +21,7 @@ Usage:
|
|
|
21
21
|
hf repo create my-cool-model --private
|
|
22
22
|
"""
|
|
23
23
|
|
|
24
|
+
import enum
|
|
24
25
|
from typing import Annotated, Optional
|
|
25
26
|
|
|
26
27
|
import typer
|
|
@@ -44,8 +45,16 @@ from ._cli_utils import (
|
|
|
44
45
|
logger = logging.get_logger(__name__)
|
|
45
46
|
|
|
46
47
|
repo_cli = typer_factory(help="Manage repos on the Hub.")
|
|
47
|
-
|
|
48
|
-
|
|
48
|
+
tag_cli = typer_factory(help="Manage tags for a repo on the Hub.")
|
|
49
|
+
branch_cli = typer_factory(help="Manage branches for a repo on the Hub.")
|
|
50
|
+
repo_cli.add_typer(tag_cli, name="tag")
|
|
51
|
+
repo_cli.add_typer(branch_cli, name="branch")
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class GatedChoices(str, enum.Enum):
|
|
55
|
+
auto = "auto"
|
|
56
|
+
manual = "manual"
|
|
57
|
+
false = "false"
|
|
49
58
|
|
|
50
59
|
|
|
51
60
|
@repo_cli.command("create", help="Create a new repo on the Hub.")
|
|
@@ -87,7 +96,130 @@ def repo_create(
|
|
|
87
96
|
print(f"Your repo is now available at {ANSI.bold(repo_url)}")
|
|
88
97
|
|
|
89
98
|
|
|
90
|
-
@
|
|
99
|
+
@repo_cli.command("delete", help="Delete a repo from the Hub. this is an irreversible operation.")
|
|
100
|
+
def repo_delete(
|
|
101
|
+
repo_id: RepoIdArg,
|
|
102
|
+
repo_type: RepoTypeOpt = RepoType.model,
|
|
103
|
+
token: TokenOpt = None,
|
|
104
|
+
missing_ok: Annotated[
|
|
105
|
+
bool,
|
|
106
|
+
typer.Option(
|
|
107
|
+
help="If set to True, do not raise an error if repo does not exist.",
|
|
108
|
+
),
|
|
109
|
+
] = False,
|
|
110
|
+
) -> None:
|
|
111
|
+
api = get_hf_api(token=token)
|
|
112
|
+
api.delete_repo(
|
|
113
|
+
repo_id=repo_id,
|
|
114
|
+
repo_type=repo_type.value,
|
|
115
|
+
missing_ok=missing_ok,
|
|
116
|
+
)
|
|
117
|
+
print(f"Successfully deleted {ANSI.bold(repo_id)} on the Hub.")
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@repo_cli.command("move", help="Move a repository from a namespace to another namespace.")
|
|
121
|
+
def repo_move(
|
|
122
|
+
from_id: RepoIdArg,
|
|
123
|
+
to_id: RepoIdArg,
|
|
124
|
+
token: TokenOpt = None,
|
|
125
|
+
repo_type: RepoTypeOpt = RepoType.model,
|
|
126
|
+
) -> None:
|
|
127
|
+
api = get_hf_api(token=token)
|
|
128
|
+
api.move_repo(
|
|
129
|
+
from_id=from_id,
|
|
130
|
+
to_id=to_id,
|
|
131
|
+
repo_type=repo_type.value,
|
|
132
|
+
)
|
|
133
|
+
print(f"Successfully moved {ANSI.bold(from_id)} to {ANSI.bold(to_id)} on the Hub.")
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
@repo_cli.command("settings", help="Update the settings of a repository.")
|
|
137
|
+
def repo_settings(
|
|
138
|
+
repo_id: RepoIdArg,
|
|
139
|
+
gated: Annotated[
|
|
140
|
+
Optional[GatedChoices],
|
|
141
|
+
typer.Option(
|
|
142
|
+
help="The gated status for the repository.",
|
|
143
|
+
),
|
|
144
|
+
] = None,
|
|
145
|
+
private: Annotated[
|
|
146
|
+
Optional[bool],
|
|
147
|
+
typer.Option(
|
|
148
|
+
help="Whether the repository should be private.",
|
|
149
|
+
),
|
|
150
|
+
] = None,
|
|
151
|
+
xet_enabled: Annotated[
|
|
152
|
+
Optional[bool],
|
|
153
|
+
typer.Option(
|
|
154
|
+
help=" Whether the repository should be enabled for Xet Storage.",
|
|
155
|
+
),
|
|
156
|
+
] = None,
|
|
157
|
+
token: TokenOpt = None,
|
|
158
|
+
repo_type: RepoTypeOpt = RepoType.model,
|
|
159
|
+
) -> None:
|
|
160
|
+
api = get_hf_api(token=token)
|
|
161
|
+
api.update_repo_settings(
|
|
162
|
+
repo_id=repo_id,
|
|
163
|
+
gated=(gated.value if gated else None), # type: ignore [arg-type]
|
|
164
|
+
private=private,
|
|
165
|
+
xet_enabled=xet_enabled,
|
|
166
|
+
repo_type=repo_type.value,
|
|
167
|
+
)
|
|
168
|
+
print(f"Successfully updated the settings of {ANSI.bold(repo_id)} on the Hub.")
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
@branch_cli.command("create", help="Create a new branch for a repo on the Hub.")
|
|
172
|
+
def branch_create(
|
|
173
|
+
repo_id: RepoIdArg,
|
|
174
|
+
branch: Annotated[
|
|
175
|
+
str,
|
|
176
|
+
typer.Argument(
|
|
177
|
+
help="The name of the branch to create.",
|
|
178
|
+
),
|
|
179
|
+
],
|
|
180
|
+
revision: RevisionOpt = None,
|
|
181
|
+
token: TokenOpt = None,
|
|
182
|
+
repo_type: RepoTypeOpt = RepoType.model,
|
|
183
|
+
exist_ok: Annotated[
|
|
184
|
+
bool,
|
|
185
|
+
typer.Option(
|
|
186
|
+
help="If set to True, do not raise an error if branch already exists.",
|
|
187
|
+
),
|
|
188
|
+
] = False,
|
|
189
|
+
) -> None:
|
|
190
|
+
api = get_hf_api(token=token)
|
|
191
|
+
api.create_branch(
|
|
192
|
+
repo_id=repo_id,
|
|
193
|
+
branch=branch,
|
|
194
|
+
revision=revision,
|
|
195
|
+
repo_type=repo_type.value,
|
|
196
|
+
exist_ok=exist_ok,
|
|
197
|
+
)
|
|
198
|
+
print(f"Successfully created {ANSI.bold(branch)} branch on {repo_type.value} {ANSI.bold(repo_id)}")
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
@branch_cli.command("delete", help="Delete a branch from a repo on the Hub.")
|
|
202
|
+
def branch_delete(
|
|
203
|
+
repo_id: RepoIdArg,
|
|
204
|
+
branch: Annotated[
|
|
205
|
+
str,
|
|
206
|
+
typer.Argument(
|
|
207
|
+
help="The name of the branch to delete.",
|
|
208
|
+
),
|
|
209
|
+
],
|
|
210
|
+
token: TokenOpt = None,
|
|
211
|
+
repo_type: RepoTypeOpt = RepoType.model,
|
|
212
|
+
) -> None:
|
|
213
|
+
api = get_hf_api(token=token)
|
|
214
|
+
api.delete_branch(
|
|
215
|
+
repo_id=repo_id,
|
|
216
|
+
branch=branch,
|
|
217
|
+
repo_type=repo_type.value,
|
|
218
|
+
)
|
|
219
|
+
print(f"Successfully deleted {ANSI.bold(branch)} branch on {repo_type.value} {ANSI.bold(repo_id)}")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
@tag_cli.command("create", help="Create a tag for a repo.")
|
|
91
223
|
def tag_create(
|
|
92
224
|
repo_id: RepoIdArg,
|
|
93
225
|
tag: Annotated[
|
|
@@ -127,7 +259,7 @@ def tag_create(
|
|
|
127
259
|
print(f"Tag {ANSI.bold(tag)} created on {ANSI.bold(repo_id)}")
|
|
128
260
|
|
|
129
261
|
|
|
130
|
-
@
|
|
262
|
+
@tag_cli.command("list", help="List tags for a repo.")
|
|
131
263
|
def tag_list(
|
|
132
264
|
repo_id: RepoIdArg,
|
|
133
265
|
token: TokenOpt = None,
|
|
@@ -152,7 +284,7 @@ def tag_list(
|
|
|
152
284
|
print(t.name)
|
|
153
285
|
|
|
154
286
|
|
|
155
|
-
@
|
|
287
|
+
@tag_cli.command("delete", help="Delete a tag for a repo.")
|
|
156
288
|
def tag_delete(
|
|
157
289
|
repo_id: RepoIdArg,
|
|
158
290
|
tag: Annotated[
|
|
@@ -1,18 +1,7 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
from dataclasses import _MISSING_TYPE, MISSING, Field, field, fields
|
|
3
3
|
from functools import wraps
|
|
4
|
-
from typing import
|
|
5
|
-
Any,
|
|
6
|
-
Callable,
|
|
7
|
-
Literal,
|
|
8
|
-
Optional,
|
|
9
|
-
Type,
|
|
10
|
-
TypeVar,
|
|
11
|
-
Union,
|
|
12
|
-
get_args,
|
|
13
|
-
get_origin,
|
|
14
|
-
overload,
|
|
15
|
-
)
|
|
4
|
+
from typing import Any, Callable, ForwardRef, Literal, Optional, Type, TypeVar, Union, get_args, get_origin, overload
|
|
16
5
|
|
|
17
6
|
from .errors import (
|
|
18
7
|
StrictDataclassClassValidationError,
|
|
@@ -322,6 +311,8 @@ def type_validator(name: str, value: Any, expected_type: Any) -> None:
|
|
|
322
311
|
validator(name, value, args)
|
|
323
312
|
elif isinstance(expected_type, type): # simple types
|
|
324
313
|
_validate_simple_type(name, value, expected_type)
|
|
314
|
+
elif isinstance(expected_type, ForwardRef) or isinstance(expected_type, str):
|
|
315
|
+
return
|
|
325
316
|
else:
|
|
326
317
|
raise TypeError(f"Unsupported type for field '{name}': {expected_type}")
|
|
327
318
|
|