huggingface-hub 0.29.0rc2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. huggingface_hub/__init__.py +160 -46
  2. huggingface_hub/_commit_api.py +277 -71
  3. huggingface_hub/_commit_scheduler.py +15 -15
  4. huggingface_hub/_inference_endpoints.py +33 -22
  5. huggingface_hub/_jobs_api.py +301 -0
  6. huggingface_hub/_local_folder.py +18 -3
  7. huggingface_hub/_login.py +31 -63
  8. huggingface_hub/_oauth.py +460 -0
  9. huggingface_hub/_snapshot_download.py +241 -81
  10. huggingface_hub/_space_api.py +18 -10
  11. huggingface_hub/_tensorboard_logger.py +15 -19
  12. huggingface_hub/_upload_large_folder.py +196 -76
  13. huggingface_hub/_webhooks_payload.py +3 -3
  14. huggingface_hub/_webhooks_server.py +15 -25
  15. huggingface_hub/{commands → cli}/__init__.py +1 -15
  16. huggingface_hub/cli/_cli_utils.py +173 -0
  17. huggingface_hub/cli/auth.py +147 -0
  18. huggingface_hub/cli/cache.py +841 -0
  19. huggingface_hub/cli/download.py +189 -0
  20. huggingface_hub/cli/hf.py +60 -0
  21. huggingface_hub/cli/inference_endpoints.py +377 -0
  22. huggingface_hub/cli/jobs.py +772 -0
  23. huggingface_hub/cli/lfs.py +175 -0
  24. huggingface_hub/cli/repo.py +315 -0
  25. huggingface_hub/cli/repo_files.py +94 -0
  26. huggingface_hub/{commands/env.py → cli/system.py} +10 -13
  27. huggingface_hub/cli/upload.py +294 -0
  28. huggingface_hub/cli/upload_large_folder.py +117 -0
  29. huggingface_hub/community.py +20 -12
  30. huggingface_hub/constants.py +83 -59
  31. huggingface_hub/dataclasses.py +609 -0
  32. huggingface_hub/errors.py +99 -30
  33. huggingface_hub/fastai_utils.py +30 -41
  34. huggingface_hub/file_download.py +606 -346
  35. huggingface_hub/hf_api.py +2445 -1132
  36. huggingface_hub/hf_file_system.py +269 -152
  37. huggingface_hub/hub_mixin.py +61 -66
  38. huggingface_hub/inference/_client.py +501 -630
  39. huggingface_hub/inference/_common.py +133 -121
  40. huggingface_hub/inference/_generated/_async_client.py +536 -722
  41. huggingface_hub/inference/_generated/types/__init__.py +6 -1
  42. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +5 -6
  43. huggingface_hub/inference/_generated/types/base.py +10 -7
  44. huggingface_hub/inference/_generated/types/chat_completion.py +77 -31
  45. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  46. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  47. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  48. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  49. huggingface_hub/inference/_generated/types/image_to_image.py +8 -2
  50. huggingface_hub/inference/_generated/types/image_to_text.py +2 -3
  51. huggingface_hub/inference/_generated/types/image_to_video.py +60 -0
  52. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  53. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  54. huggingface_hub/inference/_generated/types/table_question_answering.py +5 -5
  55. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  56. huggingface_hub/inference/_generated/types/text_generation.py +11 -11
  57. huggingface_hub/inference/_generated/types/text_to_audio.py +1 -2
  58. huggingface_hub/inference/_generated/types/text_to_speech.py +1 -2
  59. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  60. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  61. huggingface_hub/inference/_generated/types/translation.py +2 -2
  62. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  63. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  64. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  65. huggingface_hub/inference/_mcp/__init__.py +0 -0
  66. huggingface_hub/inference/_mcp/_cli_hacks.py +88 -0
  67. huggingface_hub/inference/_mcp/agent.py +100 -0
  68. huggingface_hub/inference/_mcp/cli.py +247 -0
  69. huggingface_hub/inference/_mcp/constants.py +81 -0
  70. huggingface_hub/inference/_mcp/mcp_client.py +395 -0
  71. huggingface_hub/inference/_mcp/types.py +45 -0
  72. huggingface_hub/inference/_mcp/utils.py +128 -0
  73. huggingface_hub/inference/_providers/__init__.py +149 -20
  74. huggingface_hub/inference/_providers/_common.py +160 -37
  75. huggingface_hub/inference/_providers/black_forest_labs.py +12 -9
  76. huggingface_hub/inference/_providers/cerebras.py +6 -0
  77. huggingface_hub/inference/_providers/clarifai.py +13 -0
  78. huggingface_hub/inference/_providers/cohere.py +32 -0
  79. huggingface_hub/inference/_providers/fal_ai.py +231 -22
  80. huggingface_hub/inference/_providers/featherless_ai.py +38 -0
  81. huggingface_hub/inference/_providers/fireworks_ai.py +22 -1
  82. huggingface_hub/inference/_providers/groq.py +9 -0
  83. huggingface_hub/inference/_providers/hf_inference.py +143 -33
  84. huggingface_hub/inference/_providers/hyperbolic.py +9 -5
  85. huggingface_hub/inference/_providers/nebius.py +47 -5
  86. huggingface_hub/inference/_providers/novita.py +48 -5
  87. huggingface_hub/inference/_providers/nscale.py +44 -0
  88. huggingface_hub/inference/_providers/openai.py +25 -0
  89. huggingface_hub/inference/_providers/publicai.py +6 -0
  90. huggingface_hub/inference/_providers/replicate.py +46 -9
  91. huggingface_hub/inference/_providers/sambanova.py +37 -1
  92. huggingface_hub/inference/_providers/scaleway.py +28 -0
  93. huggingface_hub/inference/_providers/together.py +34 -5
  94. huggingface_hub/inference/_providers/wavespeed.py +138 -0
  95. huggingface_hub/inference/_providers/zai_org.py +17 -0
  96. huggingface_hub/lfs.py +33 -100
  97. huggingface_hub/repocard.py +34 -38
  98. huggingface_hub/repocard_data.py +79 -59
  99. huggingface_hub/serialization/__init__.py +0 -1
  100. huggingface_hub/serialization/_base.py +12 -15
  101. huggingface_hub/serialization/_dduf.py +8 -8
  102. huggingface_hub/serialization/_torch.py +69 -69
  103. huggingface_hub/utils/__init__.py +27 -8
  104. huggingface_hub/utils/_auth.py +7 -7
  105. huggingface_hub/utils/_cache_manager.py +92 -147
  106. huggingface_hub/utils/_chunk_utils.py +2 -3
  107. huggingface_hub/utils/_deprecation.py +1 -1
  108. huggingface_hub/utils/_dotenv.py +55 -0
  109. huggingface_hub/utils/_experimental.py +7 -5
  110. huggingface_hub/utils/_fixes.py +0 -10
  111. huggingface_hub/utils/_git_credential.py +5 -5
  112. huggingface_hub/utils/_headers.py +8 -30
  113. huggingface_hub/utils/_http.py +399 -237
  114. huggingface_hub/utils/_pagination.py +6 -6
  115. huggingface_hub/utils/_parsing.py +98 -0
  116. huggingface_hub/utils/_paths.py +5 -5
  117. huggingface_hub/utils/_runtime.py +74 -22
  118. huggingface_hub/utils/_safetensors.py +21 -21
  119. huggingface_hub/utils/_subprocess.py +13 -11
  120. huggingface_hub/utils/_telemetry.py +4 -4
  121. huggingface_hub/{commands/_cli_utils.py → utils/_terminal.py} +4 -4
  122. huggingface_hub/utils/_typing.py +25 -5
  123. huggingface_hub/utils/_validators.py +55 -74
  124. huggingface_hub/utils/_verification.py +167 -0
  125. huggingface_hub/utils/_xet.py +235 -0
  126. huggingface_hub/utils/_xet_progress_reporting.py +162 -0
  127. huggingface_hub/utils/insecure_hashlib.py +3 -5
  128. huggingface_hub/utils/logging.py +8 -11
  129. huggingface_hub/utils/tqdm.py +33 -4
  130. {huggingface_hub-0.29.0rc2.dist-info → huggingface_hub-1.1.3.dist-info}/METADATA +94 -82
  131. huggingface_hub-1.1.3.dist-info/RECORD +155 -0
  132. {huggingface_hub-0.29.0rc2.dist-info → huggingface_hub-1.1.3.dist-info}/WHEEL +1 -1
  133. huggingface_hub-1.1.3.dist-info/entry_points.txt +6 -0
  134. huggingface_hub/commands/delete_cache.py +0 -428
  135. huggingface_hub/commands/download.py +0 -200
  136. huggingface_hub/commands/huggingface_cli.py +0 -61
  137. huggingface_hub/commands/lfs.py +0 -200
  138. huggingface_hub/commands/repo_files.py +0 -128
  139. huggingface_hub/commands/scan_cache.py +0 -181
  140. huggingface_hub/commands/tag.py +0 -159
  141. huggingface_hub/commands/upload.py +0 -299
  142. huggingface_hub/commands/upload_large_folder.py +0 -129
  143. huggingface_hub/commands/user.py +0 -304
  144. huggingface_hub/commands/version.py +0 -37
  145. huggingface_hub/inference_api.py +0 -217
  146. huggingface_hub/keras_mixin.py +0 -500
  147. huggingface_hub/repository.py +0 -1477
  148. huggingface_hub/serialization/_tensorflow.py +0 -95
  149. huggingface_hub/utils/_hf_folder.py +0 -68
  150. huggingface_hub-0.29.0rc2.dist-info/RECORD +0 -131
  151. huggingface_hub-0.29.0rc2.dist-info/entry_points.txt +0 -6
  152. {huggingface_hub-0.29.0rc2.dist-info → huggingface_hub-1.1.3.dist-info/licenses}/LICENSE +0 -0
  153. {huggingface_hub-0.29.0rc2.dist-info → huggingface_hub-1.1.3.dist-info}/top_level.txt +0 -0
@@ -1,129 +0,0 @@
1
- # coding=utf-8
2
- # Copyright 2023-present, the HuggingFace Inc. team.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
- """Contains command to upload a large folder with the CLI."""
16
-
17
- import os
18
- from argparse import Namespace, _SubParsersAction
19
- from typing import List, Optional
20
-
21
- from huggingface_hub import logging
22
- from huggingface_hub.commands import BaseHuggingfaceCLICommand
23
- from huggingface_hub.hf_api import HfApi
24
- from huggingface_hub.utils import disable_progress_bars
25
-
26
- from ._cli_utils import ANSI
27
-
28
-
29
- logger = logging.get_logger(__name__)
30
-
31
-
32
- class UploadLargeFolderCommand(BaseHuggingfaceCLICommand):
33
- @staticmethod
34
- def register_subcommand(parser: _SubParsersAction):
35
- subparser = parser.add_parser("upload-large-folder", help="Upload a large folder to a repo on the Hub")
36
- subparser.add_argument(
37
- "repo_id", type=str, help="The ID of the repo to upload to (e.g. `username/repo-name`)."
38
- )
39
- subparser.add_argument("local_path", type=str, help="Local path to the file or folder to upload.")
40
- subparser.add_argument(
41
- "--repo-type",
42
- choices=["model", "dataset", "space"],
43
- help="Type of the repo to upload to (e.g. `dataset`).",
44
- )
45
- subparser.add_argument(
46
- "--revision",
47
- type=str,
48
- help=("An optional Git revision to push to. It can be a branch name or a PR reference."),
49
- )
50
- subparser.add_argument(
51
- "--private",
52
- action="store_true",
53
- help=(
54
- "Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists."
55
- ),
56
- )
57
- subparser.add_argument("--include", nargs="*", type=str, help="Glob patterns to match files to upload.")
58
- subparser.add_argument("--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to upload.")
59
- subparser.add_argument(
60
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
61
- )
62
- subparser.add_argument(
63
- "--num-workers", type=int, help="Number of workers to use to hash, upload and commit files."
64
- )
65
- subparser.add_argument("--no-report", action="store_true", help="Whether to disable regular status report.")
66
- subparser.add_argument("--no-bars", action="store_true", help="Whether to disable progress bars.")
67
- subparser.set_defaults(func=UploadLargeFolderCommand)
68
-
69
- def __init__(self, args: Namespace) -> None:
70
- self.repo_id: str = args.repo_id
71
- self.local_path: str = args.local_path
72
- self.repo_type: str = args.repo_type
73
- self.revision: Optional[str] = args.revision
74
- self.private: bool = args.private
75
-
76
- self.include: Optional[List[str]] = args.include
77
- self.exclude: Optional[List[str]] = args.exclude
78
-
79
- self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
80
-
81
- self.num_workers: Optional[int] = args.num_workers
82
- self.no_report: bool = args.no_report
83
- self.no_bars: bool = args.no_bars
84
-
85
- if not os.path.isdir(self.local_path):
86
- raise ValueError("Large upload is only supported for folders.")
87
-
88
- def run(self) -> None:
89
- logging.set_verbosity_info()
90
-
91
- print(
92
- ANSI.yellow(
93
- "You are about to upload a large folder to the Hub using `huggingface-cli upload-large-folder`. "
94
- "This is a new feature so feedback is very welcome!\n"
95
- "\n"
96
- "A few things to keep in mind:\n"
97
- " - Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations\n"
98
- " - Do not start several processes in parallel.\n"
99
- " - You can interrupt and resume the process at any time. "
100
- "The script will pick up where it left off except for partially uploaded files that would have to be entirely reuploaded.\n"
101
- " - Do not upload the same folder to several repositories. If you need to do so, you must delete the `./.cache/huggingface/` folder first.\n"
102
- "\n"
103
- f"Some temporary metadata will be stored under `{self.local_path}/.cache/huggingface`.\n"
104
- " - You must not modify those files manually.\n"
105
- " - You must not delete the `./.cache/huggingface/` folder while a process is running.\n"
106
- " - You can delete the `./.cache/huggingface/` folder to reinitialize the upload state when process is not running. Files will have to be hashed and preuploaded again, except for already committed files.\n"
107
- "\n"
108
- "If the process output is too verbose, you can disable the progress bars with `--no-bars`. "
109
- "You can also entirely disable the status report with `--no-report`.\n"
110
- "\n"
111
- "For more details, run `huggingface-cli upload-large-folder --help` or check the documentation at "
112
- "https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-large-folder."
113
- )
114
- )
115
-
116
- if self.no_bars:
117
- disable_progress_bars()
118
-
119
- self.api.upload_large_folder(
120
- repo_id=self.repo_id,
121
- folder_path=self.local_path,
122
- repo_type=self.repo_type,
123
- revision=self.revision,
124
- private=self.private,
125
- allow_patterns=self.include,
126
- ignore_patterns=self.exclude,
127
- num_workers=self.num_workers,
128
- print_report=not self.no_report,
129
- )
@@ -1,304 +0,0 @@
1
- # Copyright 2020 The HuggingFace Team. All rights reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Contains commands to authenticate to the Hugging Face Hub and interact with your repositories.
15
-
16
- Usage:
17
- # login and save token locally.
18
- huggingface-cli login --token=hf_*** --add-to-git-credential
19
-
20
- # switch between tokens
21
- huggingface-cli auth switch
22
-
23
- # list all tokens
24
- huggingface-cli auth list
25
-
26
- # logout from a specific token, if no token-name is provided, all tokens will be deleted from your machine.
27
- huggingface-cli logout --token-name=your_token_name
28
-
29
- # find out which huggingface.co account you are logged in as
30
- huggingface-cli whoami
31
-
32
- # create a new dataset repo on the Hub
33
- huggingface-cli repo create mydataset --type=dataset
34
-
35
- """
36
-
37
- import subprocess
38
- from argparse import _SubParsersAction
39
- from typing import List, Optional
40
-
41
- from requests.exceptions import HTTPError
42
-
43
- from huggingface_hub.commands import BaseHuggingfaceCLICommand
44
- from huggingface_hub.constants import ENDPOINT, REPO_TYPES, REPO_TYPES_URL_PREFIXES, SPACES_SDK_TYPES
45
- from huggingface_hub.hf_api import HfApi
46
-
47
- from .._login import ( # noqa: F401 # for backward compatibility # noqa: F401 # for backward compatibility
48
- NOTEBOOK_LOGIN_PASSWORD_HTML,
49
- NOTEBOOK_LOGIN_TOKEN_HTML_END,
50
- NOTEBOOK_LOGIN_TOKEN_HTML_START,
51
- auth_list,
52
- auth_switch,
53
- login,
54
- logout,
55
- notebook_login,
56
- )
57
- from ..utils import get_stored_tokens, get_token, logging
58
- from ._cli_utils import ANSI
59
-
60
-
61
- logger = logging.get_logger(__name__)
62
-
63
- try:
64
- from InquirerPy import inquirer
65
- from InquirerPy.base.control import Choice
66
-
67
- _inquirer_py_available = True
68
- except ImportError:
69
- _inquirer_py_available = False
70
-
71
-
72
- class UserCommands(BaseHuggingfaceCLICommand):
73
- @staticmethod
74
- def register_subcommand(parser: _SubParsersAction):
75
- login_parser = parser.add_parser("login", help="Log in using a token from huggingface.co/settings/tokens")
76
- login_parser.add_argument(
77
- "--token",
78
- type=str,
79
- help="Token generated from https://huggingface.co/settings/tokens",
80
- )
81
- login_parser.add_argument(
82
- "--add-to-git-credential",
83
- action="store_true",
84
- help="Optional: Save token to git credential helper.",
85
- )
86
- login_parser.set_defaults(func=lambda args: LoginCommand(args))
87
- whoami_parser = parser.add_parser("whoami", help="Find out which huggingface.co account you are logged in as.")
88
- whoami_parser.set_defaults(func=lambda args: WhoamiCommand(args))
89
-
90
- logout_parser = parser.add_parser("logout", help="Log out")
91
- logout_parser.add_argument(
92
- "--token-name",
93
- type=str,
94
- help="Optional: Name of the access token to log out from.",
95
- )
96
- logout_parser.set_defaults(func=lambda args: LogoutCommand(args))
97
-
98
- auth_parser = parser.add_parser("auth", help="Other authentication related commands")
99
- auth_subparsers = auth_parser.add_subparsers(help="Authentication subcommands")
100
- auth_switch_parser = auth_subparsers.add_parser("switch", help="Switch between access tokens")
101
- auth_switch_parser.add_argument(
102
- "--token-name",
103
- type=str,
104
- help="Optional: Name of the access token to switch to.",
105
- )
106
- auth_switch_parser.add_argument(
107
- "--add-to-git-credential",
108
- action="store_true",
109
- help="Optional: Save token to git credential helper.",
110
- )
111
- auth_switch_parser.set_defaults(func=lambda args: AuthSwitchCommand(args))
112
- auth_list_parser = auth_subparsers.add_parser("list", help="List all stored access tokens")
113
- auth_list_parser.set_defaults(func=lambda args: AuthListCommand(args))
114
- # new system: git-based repo system
115
- repo_parser = parser.add_parser("repo", help="{create} Commands to interact with your huggingface.co repos.")
116
- repo_subparsers = repo_parser.add_subparsers(help="huggingface.co repos related commands")
117
- repo_create_parser = repo_subparsers.add_parser("create", help="Create a new repo on huggingface.co")
118
- repo_create_parser.add_argument(
119
- "name",
120
- type=str,
121
- help="Name for your repo. Will be namespaced under your username to build the repo id.",
122
- )
123
- repo_create_parser.add_argument(
124
- "--type",
125
- type=str,
126
- help='Optional: repo_type: set to "dataset" or "space" if creating a dataset or space, default is model.',
127
- )
128
- repo_create_parser.add_argument("--organization", type=str, help="Optional: organization namespace.")
129
- repo_create_parser.add_argument(
130
- "--space_sdk",
131
- type=str,
132
- help='Optional: Hugging Face Spaces SDK type. Required when --type is set to "space".',
133
- choices=SPACES_SDK_TYPES,
134
- )
135
- repo_create_parser.add_argument(
136
- "-y",
137
- "--yes",
138
- action="store_true",
139
- help="Optional: answer Yes to the prompt",
140
- )
141
- repo_create_parser.set_defaults(func=lambda args: RepoCreateCommand(args))
142
-
143
-
144
- class BaseUserCommand:
145
- def __init__(self, args):
146
- self.args = args
147
- self._api = HfApi()
148
-
149
-
150
- class LoginCommand(BaseUserCommand):
151
- def run(self):
152
- logging.set_verbosity_info()
153
- login(
154
- token=self.args.token,
155
- add_to_git_credential=self.args.add_to_git_credential,
156
- )
157
-
158
-
159
- class LogoutCommand(BaseUserCommand):
160
- def run(self):
161
- logging.set_verbosity_info()
162
- logout(token_name=self.args.token_name)
163
-
164
-
165
- class AuthSwitchCommand(BaseUserCommand):
166
- def run(self):
167
- logging.set_verbosity_info()
168
- token_name = self.args.token_name
169
- if token_name is None:
170
- token_name = self._select_token_name()
171
-
172
- if token_name is None:
173
- print("No token name provided. Aborting.")
174
- exit()
175
- auth_switch(token_name, add_to_git_credential=self.args.add_to_git_credential)
176
-
177
- def _select_token_name(self) -> Optional[str]:
178
- token_names = list(get_stored_tokens().keys())
179
-
180
- if not token_names:
181
- logger.error("No stored tokens found. Please login first.")
182
- return None
183
-
184
- if _inquirer_py_available:
185
- return self._select_token_name_tui(token_names)
186
- # if inquirer is not available, use a simpler terminal UI
187
- print("Available stored tokens:")
188
- for i, token_name in enumerate(token_names, 1):
189
- print(f"{i}. {token_name}")
190
- while True:
191
- try:
192
- choice = input("Enter the number of the token to switch to (or 'q' to quit): ")
193
- if choice.lower() == "q":
194
- return None
195
- index = int(choice) - 1
196
- if 0 <= index < len(token_names):
197
- return token_names[index]
198
- else:
199
- print("Invalid selection. Please try again.")
200
- except ValueError:
201
- print("Invalid input. Please enter a number or 'q' to quit.")
202
-
203
- def _select_token_name_tui(self, token_names: List[str]) -> Optional[str]:
204
- choices = [Choice(token_name, name=token_name) for token_name in token_names]
205
- try:
206
- return inquirer.select(
207
- message="Select a token to switch to:",
208
- choices=choices,
209
- default=None,
210
- ).execute()
211
- except KeyboardInterrupt:
212
- logger.info("Token selection cancelled.")
213
- return None
214
-
215
-
216
- class AuthListCommand(BaseUserCommand):
217
- def run(self):
218
- logging.set_verbosity_info()
219
- auth_list()
220
-
221
-
222
- class WhoamiCommand(BaseUserCommand):
223
- def run(self):
224
- token = get_token()
225
- if token is None:
226
- print("Not logged in")
227
- exit()
228
- try:
229
- info = self._api.whoami(token)
230
- print(info["name"])
231
- orgs = [org["name"] for org in info["orgs"]]
232
- if orgs:
233
- print(ANSI.bold("orgs: "), ",".join(orgs))
234
-
235
- if ENDPOINT != "https://huggingface.co":
236
- print(f"Authenticated through private endpoint: {ENDPOINT}")
237
- except HTTPError as e:
238
- print(e)
239
- print(ANSI.red(e.response.text))
240
- exit(1)
241
-
242
-
243
- class RepoCreateCommand(BaseUserCommand):
244
- def run(self):
245
- token = get_token()
246
- if token is None:
247
- print("Not logged in")
248
- exit(1)
249
- try:
250
- stdout = subprocess.check_output(["git", "--version"]).decode("utf-8")
251
- print(ANSI.gray(stdout.strip()))
252
- except FileNotFoundError:
253
- print("Looks like you do not have git installed, please install.")
254
-
255
- try:
256
- stdout = subprocess.check_output(["git-lfs", "--version"]).decode("utf-8")
257
- print(ANSI.gray(stdout.strip()))
258
- except FileNotFoundError:
259
- print(
260
- ANSI.red(
261
- "Looks like you do not have git-lfs installed, please install."
262
- " You can install from https://git-lfs.github.com/."
263
- " Then run `git lfs install` (you only have to do this once)."
264
- )
265
- )
266
- print("")
267
-
268
- user = self._api.whoami(token)["name"]
269
- namespace = self.args.organization if self.args.organization is not None else user
270
-
271
- repo_id = f"{namespace}/{self.args.name}"
272
-
273
- if self.args.type not in REPO_TYPES:
274
- print("Invalid repo --type")
275
- exit(1)
276
-
277
- if self.args.type in REPO_TYPES_URL_PREFIXES:
278
- prefixed_repo_id = REPO_TYPES_URL_PREFIXES[self.args.type] + repo_id
279
- else:
280
- prefixed_repo_id = repo_id
281
-
282
- print(f"You are about to create {ANSI.bold(prefixed_repo_id)}")
283
-
284
- if not self.args.yes:
285
- choice = input("Proceed? [Y/n] ").lower()
286
- if not (choice == "" or choice == "y" or choice == "yes"):
287
- print("Abort")
288
- exit()
289
- try:
290
- url = self._api.create_repo(
291
- repo_id=repo_id,
292
- token=token,
293
- repo_type=self.args.type,
294
- space_sdk=self.args.space_sdk,
295
- )
296
- except HTTPError as e:
297
- print(e)
298
- print(ANSI.red(e.response.text))
299
- exit(1)
300
- print("\nYour repo now lives at:")
301
- print(f" {ANSI.bold(url)}")
302
- print("\nYou can clone it locally with the command below, and commit/push as usual.")
303
- print(f"\n git clone {url}")
304
- print("")
@@ -1,37 +0,0 @@
1
- # Copyright 2022 The HuggingFace Team. All rights reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Contains command to print information about the version.
15
-
16
- Usage:
17
- huggingface-cli version
18
- """
19
-
20
- from argparse import _SubParsersAction
21
-
22
- from huggingface_hub import __version__
23
-
24
- from . import BaseHuggingfaceCLICommand
25
-
26
-
27
- class VersionCommand(BaseHuggingfaceCLICommand):
28
- def __init__(self, args):
29
- self.args = args
30
-
31
- @staticmethod
32
- def register_subcommand(parser: _SubParsersAction):
33
- version_parser = parser.add_parser("version", help="Print information about the huggingface-cli version.")
34
- version_parser.set_defaults(func=VersionCommand)
35
-
36
- def run(self) -> None:
37
- print(f"huggingface_hub version: {__version__}")
@@ -1,217 +0,0 @@
1
- import io
2
- from typing import Any, Dict, List, Optional, Union
3
-
4
- from . import constants
5
- from .hf_api import HfApi
6
- from .utils import build_hf_headers, get_session, is_pillow_available, logging, validate_hf_hub_args
7
- from .utils._deprecation import _deprecate_method
8
-
9
-
10
- logger = logging.get_logger(__name__)
11
-
12
-
13
- ALL_TASKS = [
14
- # NLP
15
- "text-classification",
16
- "token-classification",
17
- "table-question-answering",
18
- "question-answering",
19
- "zero-shot-classification",
20
- "translation",
21
- "summarization",
22
- "conversational",
23
- "feature-extraction",
24
- "text-generation",
25
- "text2text-generation",
26
- "fill-mask",
27
- "sentence-similarity",
28
- # Audio
29
- "text-to-speech",
30
- "automatic-speech-recognition",
31
- "audio-to-audio",
32
- "audio-classification",
33
- "voice-activity-detection",
34
- # Computer vision
35
- "image-classification",
36
- "object-detection",
37
- "image-segmentation",
38
- "text-to-image",
39
- "image-to-image",
40
- # Others
41
- "tabular-classification",
42
- "tabular-regression",
43
- ]
44
-
45
-
46
- class InferenceApi:
47
- """Client to configure requests and make calls to the HuggingFace Inference API.
48
-
49
- Example:
50
-
51
- ```python
52
- >>> from huggingface_hub.inference_api import InferenceApi
53
-
54
- >>> # Mask-fill example
55
- >>> inference = InferenceApi("bert-base-uncased")
56
- >>> inference(inputs="The goal of life is [MASK].")
57
- [{'sequence': 'the goal of life is life.', 'score': 0.10933292657136917, 'token': 2166, 'token_str': 'life'}]
58
-
59
- >>> # Question Answering example
60
- >>> inference = InferenceApi("deepset/roberta-base-squad2")
61
- >>> inputs = {
62
- ... "question": "What's my name?",
63
- ... "context": "My name is Clara and I live in Berkeley.",
64
- ... }
65
- >>> inference(inputs)
66
- {'score': 0.9326569437980652, 'start': 11, 'end': 16, 'answer': 'Clara'}
67
-
68
- >>> # Zero-shot example
69
- >>> inference = InferenceApi("typeform/distilbert-base-uncased-mnli")
70
- >>> inputs = "Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!"
71
- >>> params = {"candidate_labels": ["refund", "legal", "faq"]}
72
- >>> inference(inputs, params)
73
- {'sequence': 'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!', 'labels': ['refund', 'faq', 'legal'], 'scores': [0.9378499388694763, 0.04914155602455139, 0.013008488342165947]}
74
-
75
- >>> # Overriding configured task
76
- >>> inference = InferenceApi("bert-base-uncased", task="feature-extraction")
77
-
78
- >>> # Text-to-image
79
- >>> inference = InferenceApi("stabilityai/stable-diffusion-2-1")
80
- >>> inference("cat")
81
- <PIL.PngImagePlugin.PngImageFile image (...)>
82
-
83
- >>> # Return as raw response to parse the output yourself
84
- >>> inference = InferenceApi("mio/amadeus")
85
- >>> response = inference("hello world", raw_response=True)
86
- >>> response.headers
87
- {"Content-Type": "audio/flac", ...}
88
- >>> response.content # raw bytes from server
89
- b'(...)'
90
- ```
91
- """
92
-
93
- @validate_hf_hub_args
94
- @_deprecate_method(
95
- version="1.0",
96
- message=(
97
- "`InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out"
98
- " this guide to learn how to convert your script to use it:"
99
- " https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client."
100
- ),
101
- )
102
- def __init__(
103
- self,
104
- repo_id: str,
105
- task: Optional[str] = None,
106
- token: Optional[str] = None,
107
- gpu: bool = False,
108
- ):
109
- """Inits headers and API call information.
110
-
111
- Args:
112
- repo_id (``str``):
113
- Id of repository (e.g. `user/bert-base-uncased`).
114
- task (``str``, `optional`, defaults ``None``):
115
- Whether to force a task instead of using task specified in the
116
- repository.
117
- token (`str`, `optional`):
118
- The API token to use as HTTP bearer authorization. This is not
119
- the authentication token. You can find the token in
120
- https://huggingface.co/settings/token. Alternatively, you can
121
- find both your organizations and personal API tokens using
122
- `HfApi().whoami(token)`.
123
- gpu (`bool`, `optional`, defaults `False`):
124
- Whether to use GPU instead of CPU for inference(requires Startup
125
- plan at least).
126
- """
127
- self.options = {"wait_for_model": True, "use_gpu": gpu}
128
- self.headers = build_hf_headers(token=token)
129
-
130
- # Configure task
131
- model_info = HfApi(token=token).model_info(repo_id=repo_id)
132
- if not model_info.pipeline_tag and not task:
133
- raise ValueError(
134
- "Task not specified in the repository. Please add it to the model card"
135
- " using pipeline_tag"
136
- " (https://huggingface.co/docs#how-is-a-models-type-of-inference-api-and-widget-determined)"
137
- )
138
-
139
- if task and task != model_info.pipeline_tag:
140
- if task not in ALL_TASKS:
141
- raise ValueError(f"Invalid task {task}. Make sure it's valid.")
142
-
143
- logger.warning(
144
- "You're using a different task than the one specified in the"
145
- " repository. Be sure to know what you're doing :)"
146
- )
147
- self.task = task
148
- else:
149
- assert model_info.pipeline_tag is not None, "Pipeline tag cannot be None"
150
- self.task = model_info.pipeline_tag
151
-
152
- self.api_url = f"{constants.INFERENCE_ENDPOINT}/pipeline/{self.task}/{repo_id}"
153
-
154
- def __repr__(self):
155
- # Do not add headers to repr to avoid leaking token.
156
- return f"InferenceAPI(api_url='{self.api_url}', task='{self.task}', options={self.options})"
157
-
158
- def __call__(
159
- self,
160
- inputs: Optional[Union[str, Dict, List[str], List[List[str]]]] = None,
161
- params: Optional[Dict] = None,
162
- data: Optional[bytes] = None,
163
- raw_response: bool = False,
164
- ) -> Any:
165
- """Make a call to the Inference API.
166
-
167
- Args:
168
- inputs (`str` or `Dict` or `List[str]` or `List[List[str]]`, *optional*):
169
- Inputs for the prediction.
170
- params (`Dict`, *optional*):
171
- Additional parameters for the models. Will be sent as `parameters` in the
172
- payload.
173
- data (`bytes`, *optional*):
174
- Bytes content of the request. In this case, leave `inputs` and `params` empty.
175
- raw_response (`bool`, defaults to `False`):
176
- If `True`, the raw `Response` object is returned. You can parse its content
177
- as preferred. By default, the content is parsed into a more practical format
178
- (json dictionary or PIL Image for example).
179
- """
180
- # Build payload
181
- payload: Dict[str, Any] = {
182
- "options": self.options,
183
- }
184
- if inputs:
185
- payload["inputs"] = inputs
186
- if params:
187
- payload["parameters"] = params
188
-
189
- # Make API call
190
- response = get_session().post(self.api_url, headers=self.headers, json=payload, data=data)
191
-
192
- # Let the user handle the response
193
- if raw_response:
194
- return response
195
-
196
- # By default, parse the response for the user.
197
- content_type = response.headers.get("Content-Type") or ""
198
- if content_type.startswith("image"):
199
- if not is_pillow_available():
200
- raise ImportError(
201
- f"Task '{self.task}' returned as image but Pillow is not installed."
202
- " Please install it (`pip install Pillow`) or pass"
203
- " `raw_response=True` to get the raw `Response` object and parse"
204
- " the image by yourself."
205
- )
206
-
207
- from PIL import Image
208
-
209
- return Image.open(io.BytesIO(response.content))
210
- elif content_type == "application/json":
211
- return response.json()
212
- else:
213
- raise NotImplementedError(
214
- f"{content_type} output type is not implemented yet. You can pass"
215
- " `raw_response=True` to get the raw `Response` object and parse the"
216
- " output by yourself."
217
- )