huggingface-hub 0.34.4__py3-none-any.whl → 1.0.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (125) hide show
  1. huggingface_hub/__init__.py +46 -45
  2. huggingface_hub/_commit_api.py +28 -28
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +167 -10
  6. huggingface_hub/_login.py +13 -39
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +14 -28
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +13 -14
  11. huggingface_hub/_upload_large_folder.py +15 -15
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/_cli_utils.py +2 -2
  15. huggingface_hub/cli/auth.py +5 -6
  16. huggingface_hub/cli/cache.py +14 -20
  17. huggingface_hub/cli/download.py +4 -4
  18. huggingface_hub/cli/jobs.py +560 -11
  19. huggingface_hub/cli/lfs.py +4 -4
  20. huggingface_hub/cli/repo.py +7 -7
  21. huggingface_hub/cli/repo_files.py +2 -2
  22. huggingface_hub/cli/upload.py +4 -4
  23. huggingface_hub/cli/upload_large_folder.py +3 -3
  24. huggingface_hub/commands/_cli_utils.py +2 -2
  25. huggingface_hub/commands/delete_cache.py +13 -13
  26. huggingface_hub/commands/download.py +4 -13
  27. huggingface_hub/commands/lfs.py +4 -4
  28. huggingface_hub/commands/repo_files.py +2 -2
  29. huggingface_hub/commands/scan_cache.py +1 -1
  30. huggingface_hub/commands/tag.py +1 -3
  31. huggingface_hub/commands/upload.py +4 -4
  32. huggingface_hub/commands/upload_large_folder.py +3 -3
  33. huggingface_hub/commands/user.py +5 -6
  34. huggingface_hub/community.py +5 -5
  35. huggingface_hub/constants.py +3 -41
  36. huggingface_hub/dataclasses.py +16 -19
  37. huggingface_hub/errors.py +42 -29
  38. huggingface_hub/fastai_utils.py +8 -9
  39. huggingface_hub/file_download.py +153 -252
  40. huggingface_hub/hf_api.py +815 -600
  41. huggingface_hub/hf_file_system.py +98 -62
  42. huggingface_hub/hub_mixin.py +37 -57
  43. huggingface_hub/inference/_client.py +177 -325
  44. huggingface_hub/inference/_common.py +110 -124
  45. huggingface_hub/inference/_generated/_async_client.py +226 -432
  46. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  47. huggingface_hub/inference/_generated/types/base.py +10 -7
  48. huggingface_hub/inference/_generated/types/chat_completion.py +18 -16
  49. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  50. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  51. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  52. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  53. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  54. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  55. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  56. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  57. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  58. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  59. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  60. huggingface_hub/inference/_generated/types/translation.py +2 -2
  61. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  62. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  63. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  64. huggingface_hub/inference/_mcp/_cli_hacks.py +3 -3
  65. huggingface_hub/inference/_mcp/agent.py +3 -3
  66. huggingface_hub/inference/_mcp/cli.py +1 -1
  67. huggingface_hub/inference/_mcp/constants.py +2 -3
  68. huggingface_hub/inference/_mcp/mcp_client.py +58 -30
  69. huggingface_hub/inference/_mcp/types.py +10 -7
  70. huggingface_hub/inference/_mcp/utils.py +11 -7
  71. huggingface_hub/inference/_providers/__init__.py +2 -2
  72. huggingface_hub/inference/_providers/_common.py +49 -25
  73. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  74. huggingface_hub/inference/_providers/cohere.py +3 -3
  75. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  76. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  77. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  78. huggingface_hub/inference/_providers/hf_inference.py +28 -20
  79. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  80. huggingface_hub/inference/_providers/nebius.py +10 -10
  81. huggingface_hub/inference/_providers/novita.py +5 -5
  82. huggingface_hub/inference/_providers/nscale.py +4 -4
  83. huggingface_hub/inference/_providers/replicate.py +15 -15
  84. huggingface_hub/inference/_providers/sambanova.py +6 -6
  85. huggingface_hub/inference/_providers/together.py +7 -7
  86. huggingface_hub/lfs.py +20 -31
  87. huggingface_hub/repocard.py +18 -18
  88. huggingface_hub/repocard_data.py +56 -56
  89. huggingface_hub/serialization/__init__.py +0 -1
  90. huggingface_hub/serialization/_base.py +9 -9
  91. huggingface_hub/serialization/_dduf.py +7 -7
  92. huggingface_hub/serialization/_torch.py +28 -28
  93. huggingface_hub/utils/__init__.py +10 -4
  94. huggingface_hub/utils/_auth.py +5 -5
  95. huggingface_hub/utils/_cache_manager.py +31 -31
  96. huggingface_hub/utils/_deprecation.py +1 -1
  97. huggingface_hub/utils/_dotenv.py +3 -3
  98. huggingface_hub/utils/_fixes.py +0 -10
  99. huggingface_hub/utils/_git_credential.py +4 -4
  100. huggingface_hub/utils/_headers.py +7 -29
  101. huggingface_hub/utils/_http.py +366 -208
  102. huggingface_hub/utils/_pagination.py +4 -4
  103. huggingface_hub/utils/_paths.py +5 -5
  104. huggingface_hub/utils/_runtime.py +15 -13
  105. huggingface_hub/utils/_safetensors.py +21 -21
  106. huggingface_hub/utils/_subprocess.py +9 -9
  107. huggingface_hub/utils/_telemetry.py +3 -3
  108. huggingface_hub/utils/_typing.py +25 -5
  109. huggingface_hub/utils/_validators.py +53 -72
  110. huggingface_hub/utils/_xet.py +16 -16
  111. huggingface_hub/utils/_xet_progress_reporting.py +32 -11
  112. huggingface_hub/utils/insecure_hashlib.py +3 -9
  113. huggingface_hub/utils/tqdm.py +3 -3
  114. {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/METADATA +18 -29
  115. huggingface_hub-1.0.0rc0.dist-info/RECORD +161 -0
  116. huggingface_hub/inference_api.py +0 -217
  117. huggingface_hub/keras_mixin.py +0 -500
  118. huggingface_hub/repository.py +0 -1477
  119. huggingface_hub/serialization/_tensorflow.py +0 -95
  120. huggingface_hub/utils/_hf_folder.py +0 -68
  121. huggingface_hub-0.34.4.dist-info/RECORD +0 -166
  122. {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/LICENSE +0 -0
  123. {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/WHEEL +0 -0
  124. {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/entry_points.txt +0 -0
  125. {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@ import time
2
2
  from dataclasses import dataclass, field
3
3
  from datetime import datetime
4
4
  from enum import Enum
5
- from typing import TYPE_CHECKING, Dict, Optional, Union
5
+ from typing import TYPE_CHECKING, Optional, Union
6
6
 
7
7
  from huggingface_hub.errors import InferenceEndpointError, InferenceEndpointTimeoutError
8
8
 
@@ -62,7 +62,7 @@ class InferenceEndpoint:
62
62
  The timestamp of the last update of the Inference Endpoint.
63
63
  type ([`InferenceEndpointType`]):
64
64
  The type of the Inference Endpoint (public, protected, private).
65
- raw (`Dict`):
65
+ raw (`dict`):
66
66
  The raw dictionary data returned from the API.
67
67
  token (`str` or `bool`, *optional*):
68
68
  Authentication token for the Inference Endpoint, if set when requesting the API. Will default to the
@@ -112,7 +112,7 @@ class InferenceEndpoint:
112
112
  type: InferenceEndpointType = field(repr=False, init=False)
113
113
 
114
114
  # Raw dict from the API
115
- raw: Dict = field(repr=False)
115
+ raw: dict = field(repr=False)
116
116
 
117
117
  # Internal fields
118
118
  _token: Union[str, bool, None] = field(repr=False, compare=False)
@@ -120,7 +120,7 @@ class InferenceEndpoint:
120
120
 
121
121
  @classmethod
122
122
  def from_raw(
123
- cls, raw: Dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None
123
+ cls, raw: dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None
124
124
  ) -> "InferenceEndpoint":
125
125
  """Initialize object from raw dictionary."""
126
126
  if api is None:
@@ -260,8 +260,8 @@ class InferenceEndpoint:
260
260
  framework: Optional[str] = None,
261
261
  revision: Optional[str] = None,
262
262
  task: Optional[str] = None,
263
- custom_image: Optional[Dict] = None,
264
- secrets: Optional[Dict[str, str]] = None,
263
+ custom_image: Optional[dict] = None,
264
+ secrets: Optional[dict[str, str]] = None,
265
265
  ) -> "InferenceEndpoint":
266
266
  """Update the Inference Endpoint.
267
267
 
@@ -293,10 +293,10 @@ class InferenceEndpoint:
293
293
  The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
294
294
  task (`str`, *optional*):
295
295
  The task on which to deploy the model (e.g. `"text-classification"`).
296
- custom_image (`Dict`, *optional*):
296
+ custom_image (`dict`, *optional*):
297
297
  A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
298
298
  Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
299
- secrets (`Dict[str, str]`, *optional*):
299
+ secrets (`dict[str, str]`, *optional*):
300
300
  Secret values to inject in the container environment.
301
301
  Returns:
302
302
  [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
@@ -15,7 +15,7 @@
15
15
  from dataclasses import dataclass
16
16
  from datetime import datetime
17
17
  from enum import Enum
18
- from typing import Any, Dict, List, Optional
18
+ from typing import Any, Optional, Union
19
19
 
20
20
  from huggingface_hub import constants
21
21
  from huggingface_hub._space_api import SpaceHardware
@@ -71,13 +71,13 @@ class JobInfo:
71
71
  space_id (`str` or `None`):
72
72
  The Docker image from Hugging Face Spaces used for the Job.
73
73
  Can be None if docker_image is present instead.
74
- command (`List[str]` or `None`):
74
+ command (`list[str]` or `None`):
75
75
  Command of the Job, e.g. `["python", "-c", "print('hello world')"]`
76
- arguments (`List[str]` or `None`):
76
+ arguments (`list[str]` or `None`):
77
77
  Arguments passed to the command
78
- environment (`Dict[str]` or `None`):
78
+ environment (`dict[str]` or `None`):
79
79
  Environment variables of the Job as a dictionary.
80
- secrets (`Dict[str]` or `None`):
80
+ secrets (`dict[str]` or `None`):
81
81
  Secret environment variables of the Job (encrypted).
82
82
  flavor (`str` or `None`):
83
83
  Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
@@ -85,7 +85,7 @@ class JobInfo:
85
85
  status: (`JobStatus` or `None`):
86
86
  Status of the Job, e.g. `JobStatus(stage="RUNNING", message=None)`
87
87
  See [`JobStage`] for possible stage values.
88
- status: (`JobOwner` or `None`):
88
+ owner: (`JobOwner` or `None`):
89
89
  Owner of the Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
90
90
 
91
91
  Example:
@@ -111,10 +111,10 @@ class JobInfo:
111
111
  created_at: Optional[datetime]
112
112
  docker_image: Optional[str]
113
113
  space_id: Optional[str]
114
- command: Optional[List[str]]
115
- arguments: Optional[List[str]]
116
- environment: Optional[Dict[str, Any]]
117
- secrets: Optional[Dict[str, Any]]
114
+ command: Optional[list[str]]
115
+ arguments: Optional[list[str]]
116
+ environment: Optional[dict[str, Any]]
117
+ secrets: Optional[dict[str, Any]]
118
118
  flavor: Optional[SpaceHardware]
119
119
  status: JobStatus
120
120
  owner: JobOwner
@@ -142,3 +142,160 @@ class JobInfo:
142
142
  # Inferred fields
143
143
  self.endpoint = kwargs.get("endpoint", constants.ENDPOINT)
144
144
  self.url = f"{self.endpoint}/jobs/{self.owner.name}/{self.id}"
145
+
146
+
147
+ @dataclass
148
+ class JobSpec:
149
+ docker_image: Optional[str]
150
+ space_id: Optional[str]
151
+ command: Optional[list[str]]
152
+ arguments: Optional[list[str]]
153
+ environment: Optional[dict[str, Any]]
154
+ secrets: Optional[dict[str, Any]]
155
+ flavor: Optional[SpaceHardware]
156
+ timeout: Optional[int]
157
+ tags: Optional[list[str]]
158
+ arch: Optional[str]
159
+
160
+ def __init__(self, **kwargs) -> None:
161
+ self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image")
162
+ self.space_id = kwargs.get("spaceId") or kwargs.get("space_id")
163
+ self.command = kwargs.get("command")
164
+ self.arguments = kwargs.get("arguments")
165
+ self.environment = kwargs.get("environment")
166
+ self.secrets = kwargs.get("secrets")
167
+ self.flavor = kwargs.get("flavor")
168
+ self.timeout = kwargs.get("timeout")
169
+ self.tags = kwargs.get("tags")
170
+ self.arch = kwargs.get("arch")
171
+
172
+
173
+ @dataclass
174
+ class LastJobInfo:
175
+ id: str
176
+ at: datetime
177
+
178
+ def __init__(self, **kwargs) -> None:
179
+ self.id = kwargs["id"]
180
+ self.at = parse_datetime(kwargs["at"])
181
+
182
+
183
+ @dataclass
184
+ class ScheduledJobStatus:
185
+ last_job: Optional[LastJobInfo]
186
+ next_job_run_at: Optional[datetime]
187
+
188
+ def __init__(self, **kwargs) -> None:
189
+ last_job = kwargs.get("lastJob") or kwargs.get("last_job")
190
+ self.last_job = LastJobInfo(**last_job) if last_job else None
191
+ next_job_run_at = kwargs.get("nextJobRunAt") or kwargs.get("next_job_run_at")
192
+ self.next_job_run_at = parse_datetime(str(next_job_run_at)) if next_job_run_at else None
193
+
194
+
195
+ @dataclass
196
+ class ScheduledJobInfo:
197
+ """
198
+ Contains information about a Job.
199
+
200
+ Args:
201
+ id (`str`):
202
+ Scheduled Job ID.
203
+ created_at (`datetime` or `None`):
204
+ When the scheduled Job was created.
205
+ tags (`list[str]` or `None`):
206
+ The tags of the scheduled Job.
207
+ schedule (`str` or `None`):
208
+ One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
209
+ CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday).
210
+ suspend (`bool` or `None`):
211
+ Whether the scheduled job is suspended (paused).
212
+ concurrency (`bool` or `None`):
213
+ Whether multiple instances of this Job can run concurrently.
214
+ status (`ScheduledJobStatus` or `None`):
215
+ Status of the scheduled Job.
216
+ owner: (`JobOwner` or `None`):
217
+ Owner of the scheduled Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
218
+ job_spec: (`JobSpec` or `None`):
219
+ Specifications of the Job.
220
+
221
+ Example:
222
+
223
+ ```python
224
+ >>> from huggingface_hub import run_job
225
+ >>> scheduled_job = create_scheduled_job(
226
+ ... image="python:3.12",
227
+ ... command=["python", "-c", "print('Hello from the cloud!')"],
228
+ ... schedule="@hourly",
229
+ ... )
230
+ >>> scheduled_job.id
231
+ '687fb701029421ae5549d999'
232
+ >>> scheduled_job.status.next_job_run_at
233
+ datetime.datetime(2025, 7, 22, 17, 6, 25, 79000, tzinfo=datetime.timezone.utc)
234
+ ```
235
+ """
236
+
237
+ id: str
238
+ created_at: Optional[datetime]
239
+ job_spec: JobSpec
240
+ schedule: Optional[str]
241
+ suspend: Optional[bool]
242
+ concurrency: Optional[bool]
243
+ status: ScheduledJobStatus
244
+ owner: JobOwner
245
+
246
+ def __init__(self, **kwargs) -> None:
247
+ self.id = kwargs["id"]
248
+ created_at = kwargs.get("createdAt") or kwargs.get("created_at")
249
+ self.created_at = parse_datetime(created_at) if created_at else None
250
+ self.job_spec = JobSpec(**(kwargs.get("job_spec") or kwargs.get("jobSpec", {})))
251
+ self.schedule = kwargs.get("schedule")
252
+ self.suspend = kwargs.get("suspend")
253
+ self.concurrency = kwargs.get("concurrency")
254
+ status = kwargs.get("status", {})
255
+ self.status = ScheduledJobStatus(
256
+ last_job=status.get("last_job") or status.get("lastJob"),
257
+ next_job_run_at=status.get("next_job_run_at") or status.get("nextJobRunAt"),
258
+ )
259
+ owner = kwargs.get("owner", {})
260
+ self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"])
261
+
262
+
263
+ def _create_job_spec(
264
+ *,
265
+ image: str,
266
+ command: list[str],
267
+ env: Optional[dict[str, Any]],
268
+ secrets: Optional[dict[str, Any]],
269
+ flavor: Optional[SpaceHardware],
270
+ timeout: Optional[Union[int, float, str]],
271
+ ) -> dict[str, Any]:
272
+ # prepare job spec to send to HF Jobs API
273
+ job_spec: dict[str, Any] = {
274
+ "command": command,
275
+ "arguments": [],
276
+ "environment": env or {},
277
+ "flavor": flavor or SpaceHardware.CPU_BASIC,
278
+ }
279
+ # secrets are optional
280
+ if secrets:
281
+ job_spec["secrets"] = secrets
282
+ # timeout is optional
283
+ if timeout:
284
+ time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24}
285
+ if isinstance(timeout, str) and timeout[-1] in time_units_factors:
286
+ job_spec["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]])
287
+ else:
288
+ job_spec["timeoutSeconds"] = int(timeout)
289
+ # input is either from docker hub or from HF spaces
290
+ for prefix in (
291
+ "https://huggingface.co/spaces/",
292
+ "https://hf.co/spaces/",
293
+ "huggingface.co/spaces/",
294
+ "hf.co/spaces/",
295
+ ):
296
+ if image.startswith(prefix):
297
+ job_spec["spaceId"] = image[len(prefix) :]
298
+ break
299
+ else:
300
+ job_spec["dockerImage"] = image
301
+ return job_spec
huggingface_hub/_login.py CHANGED
@@ -41,7 +41,6 @@ from .utils._auth import (
41
41
  _save_token,
42
42
  get_stored_tokens,
43
43
  )
44
- from .utils._deprecation import _deprecate_arguments, _deprecate_positional_args
45
44
 
46
45
 
47
46
  logger = logging.get_logger(__name__)
@@ -55,18 +54,11 @@ _HF_LOGO_ASCII = """
55
54
  """
56
55
 
57
56
 
58
- @_deprecate_arguments(
59
- version="1.0",
60
- deprecated_args="write_permission",
61
- custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
62
- )
63
- @_deprecate_positional_args(version="1.0")
64
57
  def login(
65
58
  token: Optional[str] = None,
66
59
  *,
67
60
  add_to_git_credential: bool = False,
68
- new_session: bool = True,
69
- write_permission: bool = False,
61
+ skip_if_logged_in: bool = False,
70
62
  ) -> None:
71
63
  """Login the machine to access the Hub.
72
64
 
@@ -102,10 +94,8 @@ def login(
102
94
  is configured, a warning will be displayed to the user. If `token` is `None`,
103
95
  the value of `add_to_git_credential` is ignored and will be prompted again
104
96
  to the end user.
105
- new_session (`bool`, defaults to `True`):
106
- If `True`, will request a token even if one is already saved on the machine.
107
- write_permission (`bool`):
108
- Ignored and deprecated argument.
97
+ skip_if_logged_in (`bool`, defaults to `False`):
98
+ If `True`, do not prompt for token if user is already logged in.
109
99
  Raises:
110
100
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
111
101
  If an organization token is passed. Only personal account tokens are valid
@@ -125,9 +115,9 @@ def login(
125
115
  )
126
116
  _login(token, add_to_git_credential=add_to_git_credential)
127
117
  elif is_notebook():
128
- notebook_login(new_session=new_session)
118
+ notebook_login(skip_if_logged_in=skip_if_logged_in)
129
119
  else:
130
- interpreter_login(new_session=new_session)
120
+ interpreter_login(skip_if_logged_in=skip_if_logged_in)
131
121
 
132
122
 
133
123
  def logout(token_name: Optional[str] = None) -> None:
@@ -242,13 +232,7 @@ def auth_list() -> None:
242
232
  ###
243
233
 
244
234
 
245
- @_deprecate_arguments(
246
- version="1.0",
247
- deprecated_args="write_permission",
248
- custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
249
- )
250
- @_deprecate_positional_args(version="1.0")
251
- def interpreter_login(*, new_session: bool = True, write_permission: bool = False) -> None:
235
+ def interpreter_login(*, skip_if_logged_in: bool = False) -> None:
252
236
  """
253
237
  Displays a prompt to log in to the HF website and store the token.
254
238
 
@@ -259,12 +243,10 @@ def interpreter_login(*, new_session: bool = True, write_permission: bool = Fals
259
243
  For more details, see [`login`].
260
244
 
261
245
  Args:
262
- new_session (`bool`, defaults to `True`):
263
- If `True`, will request a token even if one is already saved on the machine.
264
- write_permission (`bool`):
265
- Ignored and deprecated argument.
246
+ skip_if_logged_in (`bool`, defaults to `False`):
247
+ If `True`, do not prompt for token if user is already logged in.
266
248
  """
267
- if not new_session and get_token() is not None:
249
+ if not skip_if_logged_in and get_token() is not None:
268
250
  logger.info("User is already logged in.")
269
251
  return
270
252
 
@@ -314,13 +296,7 @@ NOTEBOOK_LOGIN_TOKEN_HTML_END = """
314
296
  notebooks. </center>"""
315
297
 
316
298
 
317
- @_deprecate_arguments(
318
- version="1.0",
319
- deprecated_args="write_permission",
320
- custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
321
- )
322
- @_deprecate_positional_args(version="1.0")
323
- def notebook_login(*, new_session: bool = True, write_permission: bool = False) -> None:
299
+ def notebook_login(*, skip_if_logged_in: bool = False) -> None:
324
300
  """
325
301
  Displays a widget to log in to the HF website and store the token.
326
302
 
@@ -331,10 +307,8 @@ def notebook_login(*, new_session: bool = True, write_permission: bool = False)
331
307
  For more details, see [`login`].
332
308
 
333
309
  Args:
334
- new_session (`bool`, defaults to `True`):
335
- If `True`, will request a token even if one is already saved on the machine.
336
- write_permission (`bool`):
337
- Ignored and deprecated argument.
310
+ skip_if_logged_in (`bool`, defaults to `False`):
311
+ If `True`, do not prompt for token if user is already logged in.
338
312
  """
339
313
  try:
340
314
  import ipywidgets.widgets as widgets # type: ignore
@@ -344,7 +318,7 @@ def notebook_login(*, new_session: bool = True, write_permission: bool = False)
344
318
  "The `notebook_login` function can only be used in a notebook (Jupyter or"
345
319
  " Colab) and you need the `ipywidgets` module: `pip install ipywidgets`."
346
320
  )
347
- if not new_session and get_token() is not None:
321
+ if not skip_if_logged_in and get_token() is not None:
348
322
  logger.info("User is already logged in.")
349
323
  return
350
324
 
huggingface_hub/_oauth.py CHANGED
@@ -6,7 +6,7 @@ import time
6
6
  import urllib.parse
7
7
  import warnings
8
8
  from dataclasses import dataclass
9
- from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Union
9
+ from typing import TYPE_CHECKING, Literal, Optional, Union
10
10
 
11
11
  from . import constants
12
12
  from .hf_api import whoami
@@ -39,7 +39,7 @@ class OAuthOrgInfo:
39
39
  Whether the org has a payment method set up. Hugging Face field.
40
40
  role_in_org (`Optional[str]`, *optional*):
41
41
  The user's role in the org. Hugging Face field.
42
- security_restrictions (`Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]]`, *optional*):
42
+ security_restrictions (`Optional[list[Literal["ip", "token-policy", "mfa", "sso"]]]`, *optional*):
43
43
  Array of security restrictions that the user hasn't completed for this org. Possible values: "ip", "token-policy", "mfa", "sso". Hugging Face field.
44
44
  """
45
45
 
@@ -50,7 +50,7 @@ class OAuthOrgInfo:
50
50
  is_enterprise: bool
51
51
  can_pay: Optional[bool] = None
52
52
  role_in_org: Optional[str] = None
53
- security_restrictions: Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]] = None
53
+ security_restrictions: Optional[list[Literal["ip", "token-policy", "mfa", "sso"]]] = None
54
54
 
55
55
 
56
56
  @dataclass
@@ -79,7 +79,7 @@ class OAuthUserInfo:
79
79
  Whether the user is a pro user. Hugging Face field.
80
80
  can_pay (`Optional[bool]`, *optional*):
81
81
  Whether the user has a payment method set up. Hugging Face field.
82
- orgs (`Optional[List[OrgInfo]]`, *optional*):
82
+ orgs (`Optional[list[OrgInfo]]`, *optional*):
83
83
  List of organizations the user is part of. Hugging Face field.
84
84
  """
85
85
 
@@ -93,7 +93,7 @@ class OAuthUserInfo:
93
93
  website: Optional[str]
94
94
  is_pro: bool
95
95
  can_pay: Optional[bool]
96
- orgs: Optional[List[OAuthOrgInfo]]
96
+ orgs: Optional[list[OAuthOrgInfo]]
97
97
 
98
98
 
99
99
  @dataclass
@@ -306,7 +306,7 @@ def _add_oauth_routes(app: "fastapi.FastAPI", route_prefix: str) -> None:
306
306
  target_url = request.query_params.get("_target_url")
307
307
 
308
308
  # Build redirect URI with the same query params as before and bump nb_redirects count
309
- query_params: Dict[str, Union[int, str]] = {"_nb_redirects": nb_redirects + 1}
309
+ query_params: dict[str, Union[int, str]] = {"_nb_redirects": nb_redirects + 1}
310
310
  if target_url:
311
311
  query_params["_target_url"] = target_url
312
312
 
@@ -406,7 +406,7 @@ def _get_redirect_target(request: "fastapi.Request", default_target: str = "/")
406
406
  return request.query_params.get("_target_url", default_target)
407
407
 
408
408
 
409
- def _get_mocked_oauth_info() -> Dict:
409
+ def _get_mocked_oauth_info() -> dict:
410
410
  token = get_token()
411
411
  if token is None:
412
412
  raise ValueError(
@@ -449,7 +449,7 @@ def _get_mocked_oauth_info() -> Dict:
449
449
  }
450
450
 
451
451
 
452
- def _get_oauth_uris(route_prefix: str = "/") -> Tuple[str, str, str]:
452
+ def _get_oauth_uris(route_prefix: str = "/") -> tuple[str, str, str]:
453
453
  route_prefix = route_prefix.strip("/")
454
454
  if route_prefix:
455
455
  route_prefix = f"/{route_prefix}"
@@ -1,8 +1,8 @@
1
1
  import os
2
2
  from pathlib import Path
3
- from typing import Dict, Iterable, List, Literal, Optional, Type, Union
3
+ from typing import Iterable, Optional, Union
4
4
 
5
- import requests
5
+ import httpx
6
6
  from tqdm.auto import tqdm as base_tqdm
7
7
  from tqdm.contrib.concurrent import thread_map
8
8
 
@@ -35,21 +35,17 @@ def snapshot_download(
35
35
  local_dir: Union[str, Path, None] = None,
36
36
  library_name: Optional[str] = None,
37
37
  library_version: Optional[str] = None,
38
- user_agent: Optional[Union[Dict, str]] = None,
39
- proxies: Optional[Dict] = None,
38
+ user_agent: Optional[Union[dict, str]] = None,
40
39
  etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
41
40
  force_download: bool = False,
42
41
  token: Optional[Union[bool, str]] = None,
43
42
  local_files_only: bool = False,
44
- allow_patterns: Optional[Union[List[str], str]] = None,
45
- ignore_patterns: Optional[Union[List[str], str]] = None,
43
+ allow_patterns: Optional[Union[list[str], str]] = None,
44
+ ignore_patterns: Optional[Union[list[str], str]] = None,
46
45
  max_workers: int = 8,
47
- tqdm_class: Optional[Type[base_tqdm]] = None,
48
- headers: Optional[Dict[str, str]] = None,
46
+ tqdm_class: Optional[type[base_tqdm]] = None,
47
+ headers: Optional[dict[str, str]] = None,
49
48
  endpoint: Optional[str] = None,
50
- # Deprecated args
51
- local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
52
- resume_download: Optional[bool] = None,
53
49
  ) -> str:
54
50
  """Download repo files.
55
51
 
@@ -85,12 +81,9 @@ def snapshot_download(
85
81
  The version of the library.
86
82
  user_agent (`str`, `dict`, *optional*):
87
83
  The user-agent info in the form of a dictionary or a string.
88
- proxies (`dict`, *optional*):
89
- Dictionary mapping protocol to the URL of the proxy passed to
90
- `requests.request`.
91
84
  etag_timeout (`float`, *optional*, defaults to `10`):
92
85
  When fetching ETag, how many seconds to wait for the server to send
93
- data before giving up which is passed to `requests.request`.
86
+ data before giving up which is passed to `httpx.request`.
94
87
  force_download (`bool`, *optional*, defaults to `False`):
95
88
  Whether the file should be downloaded even if it already exists in the local cache.
96
89
  token (`str`, `bool`, *optional*):
@@ -103,9 +96,9 @@ def snapshot_download(
103
96
  local_files_only (`bool`, *optional*, defaults to `False`):
104
97
  If `True`, avoid downloading the file and return the path to the
105
98
  local cached file if it exists.
106
- allow_patterns (`List[str]` or `str`, *optional*):
99
+ allow_patterns (`list[str]` or `str`, *optional*):
107
100
  If provided, only files matching at least one pattern are downloaded.
108
- ignore_patterns (`List[str]` or `str`, *optional*):
101
+ ignore_patterns (`list[str]` or `str`, *optional*):
109
102
  If provided, files matching any of the patterns are not downloaded.
110
103
  max_workers (`int`, *optional*):
111
104
  Number of concurrent threads to download files (1 thread = 1 file download).
@@ -163,14 +156,10 @@ def snapshot_download(
163
156
  try:
164
157
  # if we have internet connection we want to list files to download
165
158
  repo_info = api.repo_info(repo_id=repo_id, repo_type=repo_type, revision=revision)
166
- except (requests.exceptions.SSLError, requests.exceptions.ProxyError):
167
- # Actually raise for those subclasses of ConnectionError
159
+ except httpx.ProxyError:
160
+ # Actually raise on proxy error
168
161
  raise
169
- except (
170
- requests.exceptions.ConnectionError,
171
- requests.exceptions.Timeout,
172
- OfflineModeIsEnabled,
173
- ) as error:
162
+ except (httpx.ConnectError, httpx.TimeoutException, OfflineModeIsEnabled) as error:
174
163
  # Internet connection is down
175
164
  # => will try to use local files only
176
165
  api_call_error = error
@@ -178,7 +167,7 @@ def snapshot_download(
178
167
  except RevisionNotFoundError:
179
168
  # The repo was found but the revision doesn't exist on the Hub (never existed or got deleted)
180
169
  raise
181
- except requests.HTTPError as error:
170
+ except HfHubHTTPError as error:
182
171
  # Multiple reasons for an http error:
183
172
  # - Repository is private and invalid/missing token sent
184
173
  # - Repository is gated and invalid/missing token sent
@@ -311,13 +300,10 @@ def snapshot_download(
311
300
  endpoint=endpoint,
312
301
  cache_dir=cache_dir,
313
302
  local_dir=local_dir,
314
- local_dir_use_symlinks=local_dir_use_symlinks,
315
303
  library_name=library_name,
316
304
  library_version=library_version,
317
305
  user_agent=user_agent,
318
- proxies=proxies,
319
306
  etag_timeout=etag_timeout,
320
- resume_download=resume_download,
321
307
  force_download=force_download,
322
308
  token=token,
323
309
  headers=headers,
@@ -15,7 +15,7 @@
15
15
  from dataclasses import dataclass
16
16
  from datetime import datetime
17
17
  from enum import Enum
18
- from typing import Dict, Optional
18
+ from typing import Optional
19
19
 
20
20
  from huggingface_hub.utils import parse_datetime
21
21
 
@@ -128,9 +128,9 @@ class SpaceRuntime:
128
128
  requested_hardware: Optional[SpaceHardware]
129
129
  sleep_time: Optional[int]
130
130
  storage: Optional[SpaceStorage]
131
- raw: Dict
131
+ raw: dict
132
132
 
133
- def __init__(self, data: Dict) -> None:
133
+ def __init__(self, data: dict) -> None:
134
134
  self.stage = data["stage"]
135
135
  self.hardware = data.get("hardware", {}).get("current")
136
136
  self.requested_hardware = data.get("hardware", {}).get("requested")
@@ -160,7 +160,7 @@ class SpaceVariable:
160
160
  description: Optional[str]
161
161
  updated_at: Optional[datetime]
162
162
 
163
- def __init__(self, key: str, values: Dict) -> None:
163
+ def __init__(self, key: str, values: dict) -> None:
164
164
  self.key = key
165
165
  self.value = values["value"]
166
166
  self.description = values.get("description")
@@ -14,7 +14,7 @@
14
14
  """Contains a logger to push training logs to the Hub, using Tensorboard."""
15
15
 
16
16
  from pathlib import Path
17
- from typing import TYPE_CHECKING, List, Optional, Union
17
+ from typing import Optional, Union
18
18
 
19
19
  from ._commit_scheduler import CommitScheduler
20
20
  from .errors import EntryNotFoundError
@@ -26,25 +26,24 @@ from .utils import experimental
26
26
  # or from 'torch.utils.tensorboard'. Both are compatible so let's try to load
27
27
  # from either of them.
28
28
  try:
29
- from tensorboardX import SummaryWriter
29
+ from tensorboardX import SummaryWriter as _RuntimeSummaryWriter
30
30
 
31
31
  is_summary_writer_available = True
32
-
33
32
  except ImportError:
34
33
  try:
35
- from torch.utils.tensorboard import SummaryWriter
34
+ from torch.utils.tensorboard import SummaryWriter as _RuntimeSummaryWriter
36
35
 
37
- is_summary_writer_available = False
36
+ is_summary_writer_available = True
38
37
  except ImportError:
39
38
  # Dummy class to avoid failing at import. Will raise on instance creation.
40
- SummaryWriter = object
41
- is_summary_writer_available = False
39
+ class _DummySummaryWriter:
40
+ pass
42
41
 
43
- if TYPE_CHECKING:
44
- from tensorboardX import SummaryWriter
42
+ _RuntimeSummaryWriter = _DummySummaryWriter # type: ignore[assignment]
43
+ is_summary_writer_available = False
45
44
 
46
45
 
47
- class HFSummaryWriter(SummaryWriter):
46
+ class HFSummaryWriter(_RuntimeSummaryWriter):
48
47
  """
49
48
  Wrapper around the tensorboard's `SummaryWriter` to push training logs to the Hub.
50
49
 
@@ -78,10 +77,10 @@ class HFSummaryWriter(SummaryWriter):
78
77
  Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
79
78
  path_in_repo (`str`, *optional*):
80
79
  The path to the folder in the repo where the logs will be pushed. Defaults to "tensorboard/".
81
- repo_allow_patterns (`List[str]` or `str`, *optional*):
80
+ repo_allow_patterns (`list[str]` or `str`, *optional*):
82
81
  A list of patterns to include in the upload. Defaults to `"*.tfevents.*"`. Check out the
83
82
  [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
84
- repo_ignore_patterns (`List[str]` or `str`, *optional*):
83
+ repo_ignore_patterns (`list[str]` or `str`, *optional*):
85
84
  A list of patterns to exclude in the upload. Check out the
86
85
  [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
87
86
  token (`str`, *optional*):
@@ -138,8 +137,8 @@ class HFSummaryWriter(SummaryWriter):
138
137
  repo_revision: Optional[str] = None,
139
138
  repo_private: Optional[bool] = None,
140
139
  path_in_repo: Optional[str] = "tensorboard",
141
- repo_allow_patterns: Optional[Union[List[str], str]] = "*.tfevents.*",
142
- repo_ignore_patterns: Optional[Union[List[str], str]] = None,
140
+ repo_allow_patterns: Optional[Union[list[str], str]] = "*.tfevents.*",
141
+ repo_ignore_patterns: Optional[Union[list[str], str]] = None,
143
142
  token: Optional[str] = None,
144
143
  **kwargs,
145
144
  ):