huggingface-hub 0.12.0rc0__py3-none-any.whl → 0.13.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. huggingface_hub/__init__.py +166 -126
  2. huggingface_hub/_commit_api.py +25 -51
  3. huggingface_hub/_login.py +4 -13
  4. huggingface_hub/_snapshot_download.py +45 -23
  5. huggingface_hub/_space_api.py +7 -0
  6. huggingface_hub/commands/delete_cache.py +13 -39
  7. huggingface_hub/commands/env.py +1 -3
  8. huggingface_hub/commands/huggingface_cli.py +1 -3
  9. huggingface_hub/commands/lfs.py +4 -8
  10. huggingface_hub/commands/scan_cache.py +5 -16
  11. huggingface_hub/commands/user.py +27 -45
  12. huggingface_hub/community.py +4 -4
  13. huggingface_hub/constants.py +22 -19
  14. huggingface_hub/fastai_utils.py +14 -23
  15. huggingface_hub/file_download.py +210 -121
  16. huggingface_hub/hf_api.py +500 -255
  17. huggingface_hub/hub_mixin.py +181 -176
  18. huggingface_hub/inference_api.py +4 -10
  19. huggingface_hub/keras_mixin.py +39 -71
  20. huggingface_hub/lfs.py +8 -24
  21. huggingface_hub/repocard.py +33 -48
  22. huggingface_hub/repocard_data.py +141 -30
  23. huggingface_hub/repository.py +41 -112
  24. huggingface_hub/templates/modelcard_template.md +39 -34
  25. huggingface_hub/utils/__init__.py +1 -0
  26. huggingface_hub/utils/_cache_assets.py +1 -4
  27. huggingface_hub/utils/_cache_manager.py +17 -39
  28. huggingface_hub/utils/_deprecation.py +8 -12
  29. huggingface_hub/utils/_errors.py +10 -57
  30. huggingface_hub/utils/_fixes.py +2 -6
  31. huggingface_hub/utils/_git_credential.py +5 -16
  32. huggingface_hub/utils/_headers.py +22 -11
  33. huggingface_hub/utils/_http.py +1 -4
  34. huggingface_hub/utils/_paths.py +5 -12
  35. huggingface_hub/utils/_runtime.py +2 -1
  36. huggingface_hub/utils/_telemetry.py +120 -0
  37. huggingface_hub/utils/_validators.py +5 -13
  38. huggingface_hub/utils/endpoint_helpers.py +1 -3
  39. huggingface_hub/utils/logging.py +10 -8
  40. {huggingface_hub-0.12.0rc0.dist-info → huggingface_hub-0.13.0rc0.dist-info}/METADATA +7 -14
  41. huggingface_hub-0.13.0rc0.dist-info/RECORD +56 -0
  42. huggingface_hub/py.typed +0 -0
  43. huggingface_hub-0.12.0rc0.dist-info/RECORD +0 -56
  44. {huggingface_hub-0.12.0rc0.dist-info → huggingface_hub-0.13.0rc0.dist-info}/LICENSE +0 -0
  45. {huggingface_hub-0.12.0rc0.dist-info → huggingface_hub-0.13.0rc0.dist-info}/WHEEL +0 -0
  46. {huggingface_hub-0.12.0rc0.dist-info → huggingface_hub-0.13.0rc0.dist-info}/entry_points.txt +0 -0
  47. {huggingface_hub-0.12.0rc0.dist-info → huggingface_hub-0.13.0rc0.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.12.0.rc0"
49
+ __version__ = "0.13.0.rc0"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -94,6 +94,7 @@ _SUBMOD_ATTRS = {
94
94
  ],
95
95
  "file_download": [
96
96
  "HfFileMetadata",
97
+ "_CACHED_NO_EXIST",
97
98
  "cached_download",
98
99
  "get_hf_file_metadata",
99
100
  "hf_hub_download",
@@ -106,6 +107,7 @@ _SUBMOD_ATTRS = {
106
107
  "CommitOperationAdd",
107
108
  "CommitOperationDelete",
108
109
  "DatasetSearchArguments",
110
+ "GitCommitInfo",
109
111
  "GitRefInfo",
110
112
  "GitRefs",
111
113
  "HfApi",
@@ -128,6 +130,7 @@ _SUBMOD_ATTRS = {
128
130
  "delete_repo",
129
131
  "delete_space_secret",
130
132
  "delete_tag",
133
+ "duplicate_space",
131
134
  "edit_discussion_comment",
132
135
  "get_dataset_tags",
133
136
  "get_discussion_details",
@@ -140,15 +143,18 @@ _SUBMOD_ATTRS = {
140
143
  "list_liked_repos",
141
144
  "list_metrics",
142
145
  "list_models",
146
+ "list_repo_commits",
143
147
  "list_repo_files",
144
148
  "list_repo_refs",
145
149
  "list_spaces",
146
150
  "merge_pull_request",
147
151
  "model_info",
148
152
  "move_repo",
153
+ "pause_space",
149
154
  "rename_discussion",
150
155
  "repo_type_and_id_from_hf_id",
151
156
  "request_space_hardware",
157
+ "restart_space",
152
158
  "set_access_token",
153
159
  "space_info",
154
160
  "unlike",
@@ -174,6 +180,8 @@ _SUBMOD_ATTRS = {
174
180
  "repocard": [
175
181
  "DatasetCard",
176
182
  "ModelCard",
183
+ "RepoCard",
184
+ "SpaceCard",
177
185
  "metadata_eval_result",
178
186
  "metadata_load",
179
187
  "metadata_save",
@@ -184,6 +192,7 @@ _SUBMOD_ATTRS = {
184
192
  "DatasetCardData",
185
193
  "EvalResult",
186
194
  "ModelCardData",
195
+ "SpaceCardData",
187
196
  ],
188
197
  "repository": [
189
198
  "Repository",
@@ -257,9 +266,7 @@ def _attach(package_name, submodules=None, submod_attrs=None):
257
266
  else:
258
267
  submodules = set(submodules)
259
268
 
260
- attr_to_modules = {
261
- attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
262
- }
269
+ attr_to_modules = {attr: mod for mod, attrs in submod_attrs.items() for attr in attrs}
263
270
 
264
271
  __all__ = list(submodules | attr_to_modules.keys())
265
272
 
@@ -292,9 +299,7 @@ def _attach(package_name, submodules=None, submod_attrs=None):
292
299
  return __getattr__, __dir__, list(__all__)
293
300
 
294
301
 
295
- __getattr__, __dir__, __all__ = _attach(
296
- __name__, submodules=[], submod_attrs=_SUBMOD_ATTRS
297
- )
302
+ __getattr__, __dir__, __all__ = _attach(__name__, submodules=[], submod_attrs=_SUBMOD_ATTRS)
298
303
 
299
304
  # WARNING: any content below this statement is generated automatically. Any manual edit
300
305
  # will be lost when re-generating this file !
@@ -308,125 +313,160 @@ __getattr__, __dir__, __all__ = _attach(
308
313
  # make style
309
314
  # ```
310
315
  if TYPE_CHECKING: # pragma: no cover
311
- from ._login import interpreter_login # noqa: F401
312
- from ._login import login # noqa: F401
313
- from ._login import logout # noqa: F401
314
- from ._login import notebook_login # noqa: F401
316
+ from ._login import (
317
+ interpreter_login, # noqa: F401
318
+ login, # noqa: F401
319
+ logout, # noqa: F401
320
+ notebook_login, # noqa: F401
321
+ )
315
322
  from ._snapshot_download import snapshot_download # noqa: F401
316
- from ._space_api import SpaceHardware # noqa: F401
317
- from ._space_api import SpaceRuntime # noqa: F401
318
- from ._space_api import SpaceStage # noqa: F401
319
- from .community import Discussion # noqa: F401
320
- from .community import DiscussionComment # noqa: F401
321
- from .community import DiscussionCommit # noqa: F401
322
- from .community import DiscussionEvent # noqa: F401
323
- from .community import DiscussionStatusChange # noqa: F401
324
- from .community import DiscussionTitleChange # noqa: F401
325
- from .community import DiscussionWithDetails # noqa: F401
326
- from .constants import CONFIG_NAME # noqa: F401
327
- from .constants import FLAX_WEIGHTS_NAME # noqa: F401
328
- from .constants import HUGGINGFACE_CO_URL_HOME # noqa: F401
329
- from .constants import HUGGINGFACE_CO_URL_TEMPLATE # noqa: F401
330
- from .constants import PYTORCH_WEIGHTS_NAME # noqa: F401
331
- from .constants import REPO_TYPE_DATASET # noqa: F401
332
- from .constants import REPO_TYPE_MODEL # noqa: F401
333
- from .constants import REPO_TYPE_SPACE # noqa: F401
334
- from .constants import TF2_WEIGHTS_NAME # noqa: F401
335
- from .constants import TF_WEIGHTS_NAME # noqa: F401
336
- from .fastai_utils import _save_pretrained_fastai # noqa: F401
337
- from .fastai_utils import from_pretrained_fastai # noqa: F401
338
- from .fastai_utils import push_to_hub_fastai # noqa: F401
339
- from .file_download import HfFileMetadata # noqa: F401
340
- from .file_download import cached_download # noqa: F401
341
- from .file_download import get_hf_file_metadata # noqa: F401
342
- from .file_download import hf_hub_download # noqa: F401
343
- from .file_download import hf_hub_url # noqa: F401
344
- from .file_download import try_to_load_from_cache # noqa: F401
345
- from .hf_api import CommitInfo # noqa: F401
346
- from .hf_api import CommitOperation # noqa: F401
347
- from .hf_api import CommitOperationAdd # noqa: F401
348
- from .hf_api import CommitOperationDelete # noqa: F401
349
- from .hf_api import DatasetSearchArguments # noqa: F401
350
- from .hf_api import GitRefInfo # noqa: F401
351
- from .hf_api import GitRefs # noqa: F401
352
- from .hf_api import HfApi # noqa: F401
353
- from .hf_api import ModelSearchArguments # noqa: F401
354
- from .hf_api import RepoUrl # noqa: F401
355
- from .hf_api import UserLikes # noqa: F401
356
- from .hf_api import add_space_secret # noqa: F401
357
- from .hf_api import change_discussion_status # noqa: F401
358
- from .hf_api import comment_discussion # noqa: F401
359
- from .hf_api import create_branch # noqa: F401
360
- from .hf_api import create_commit # noqa: F401
361
- from .hf_api import create_discussion # noqa: F401
362
- from .hf_api import create_pull_request # noqa: F401
363
- from .hf_api import create_repo # noqa: F401
364
- from .hf_api import create_tag # noqa: F401
365
- from .hf_api import dataset_info # noqa: F401
366
- from .hf_api import delete_branch # noqa: F401
367
- from .hf_api import delete_file # noqa: F401
368
- from .hf_api import delete_folder # noqa: F401
369
- from .hf_api import delete_repo # noqa: F401
370
- from .hf_api import delete_space_secret # noqa: F401
371
- from .hf_api import delete_tag # noqa: F401
372
- from .hf_api import edit_discussion_comment # noqa: F401
373
- from .hf_api import get_dataset_tags # noqa: F401
374
- from .hf_api import get_discussion_details # noqa: F401
375
- from .hf_api import get_full_repo_name # noqa: F401
376
- from .hf_api import get_model_tags # noqa: F401
377
- from .hf_api import get_repo_discussions # noqa: F401
378
- from .hf_api import get_space_runtime # noqa: F401
379
- from .hf_api import like # noqa: F401
380
- from .hf_api import list_datasets # noqa: F401
381
- from .hf_api import list_liked_repos # noqa: F401
382
- from .hf_api import list_metrics # noqa: F401
383
- from .hf_api import list_models # noqa: F401
384
- from .hf_api import list_repo_files # noqa: F401
385
- from .hf_api import list_repo_refs # noqa: F401
386
- from .hf_api import list_spaces # noqa: F401
387
- from .hf_api import merge_pull_request # noqa: F401
388
- from .hf_api import model_info # noqa: F401
389
- from .hf_api import move_repo # noqa: F401
390
- from .hf_api import rename_discussion # noqa: F401
391
- from .hf_api import repo_type_and_id_from_hf_id # noqa: F401
392
- from .hf_api import request_space_hardware # noqa: F401
393
- from .hf_api import set_access_token # noqa: F401
394
- from .hf_api import space_info # noqa: F401
395
- from .hf_api import unlike # noqa: F401
396
- from .hf_api import unset_access_token # noqa: F401
397
- from .hf_api import update_repo_visibility # noqa: F401
398
- from .hf_api import upload_file # noqa: F401
399
- from .hf_api import upload_folder # noqa: F401
400
- from .hf_api import whoami # noqa: F401
401
- from .hub_mixin import ModelHubMixin # noqa: F401
402
- from .hub_mixin import PyTorchModelHubMixin # noqa: F401
323
+ from ._space_api import (
324
+ SpaceHardware, # noqa: F401
325
+ SpaceRuntime, # noqa: F401
326
+ SpaceStage, # noqa: F401
327
+ )
328
+ from .community import (
329
+ Discussion, # noqa: F401
330
+ DiscussionComment, # noqa: F401
331
+ DiscussionCommit, # noqa: F401
332
+ DiscussionEvent, # noqa: F401
333
+ DiscussionStatusChange, # noqa: F401
334
+ DiscussionTitleChange, # noqa: F401
335
+ DiscussionWithDetails, # noqa: F401
336
+ )
337
+ from .constants import (
338
+ CONFIG_NAME, # noqa: F401
339
+ FLAX_WEIGHTS_NAME, # noqa: F401
340
+ HUGGINGFACE_CO_URL_HOME, # noqa: F401
341
+ HUGGINGFACE_CO_URL_TEMPLATE, # noqa: F401
342
+ PYTORCH_WEIGHTS_NAME, # noqa: F401
343
+ REPO_TYPE_DATASET, # noqa: F401
344
+ REPO_TYPE_MODEL, # noqa: F401
345
+ REPO_TYPE_SPACE, # noqa: F401
346
+ TF2_WEIGHTS_NAME, # noqa: F401
347
+ TF_WEIGHTS_NAME, # noqa: F401
348
+ )
349
+ from .fastai_utils import (
350
+ _save_pretrained_fastai, # noqa: F401
351
+ from_pretrained_fastai, # noqa: F401
352
+ push_to_hub_fastai, # noqa: F401
353
+ )
354
+ from .file_download import (
355
+ _CACHED_NO_EXIST, # noqa: F401
356
+ HfFileMetadata, # noqa: F401
357
+ cached_download, # noqa: F401
358
+ get_hf_file_metadata, # noqa: F401
359
+ hf_hub_download, # noqa: F401
360
+ hf_hub_url, # noqa: F401
361
+ try_to_load_from_cache, # noqa: F401
362
+ )
363
+ from .hf_api import (
364
+ CommitInfo, # noqa: F401
365
+ CommitOperation, # noqa: F401
366
+ CommitOperationAdd, # noqa: F401
367
+ CommitOperationDelete, # noqa: F401
368
+ DatasetSearchArguments, # noqa: F401
369
+ GitCommitInfo, # noqa: F401
370
+ GitRefInfo, # noqa: F401
371
+ GitRefs, # noqa: F401
372
+ HfApi, # noqa: F401
373
+ ModelSearchArguments, # noqa: F401
374
+ RepoUrl, # noqa: F401
375
+ UserLikes, # noqa: F401
376
+ add_space_secret, # noqa: F401
377
+ change_discussion_status, # noqa: F401
378
+ comment_discussion, # noqa: F401
379
+ create_branch, # noqa: F401
380
+ create_commit, # noqa: F401
381
+ create_discussion, # noqa: F401
382
+ create_pull_request, # noqa: F401
383
+ create_repo, # noqa: F401
384
+ create_tag, # noqa: F401
385
+ dataset_info, # noqa: F401
386
+ delete_branch, # noqa: F401
387
+ delete_file, # noqa: F401
388
+ delete_folder, # noqa: F401
389
+ delete_repo, # noqa: F401
390
+ delete_space_secret, # noqa: F401
391
+ delete_tag, # noqa: F401
392
+ duplicate_space, # noqa: F401
393
+ edit_discussion_comment, # noqa: F401
394
+ get_dataset_tags, # noqa: F401
395
+ get_discussion_details, # noqa: F401
396
+ get_full_repo_name, # noqa: F401
397
+ get_model_tags, # noqa: F401
398
+ get_repo_discussions, # noqa: F401
399
+ get_space_runtime, # noqa: F401
400
+ like, # noqa: F401
401
+ list_datasets, # noqa: F401
402
+ list_liked_repos, # noqa: F401
403
+ list_metrics, # noqa: F401
404
+ list_models, # noqa: F401
405
+ list_repo_commits, # noqa: F401
406
+ list_repo_files, # noqa: F401
407
+ list_repo_refs, # noqa: F401
408
+ list_spaces, # noqa: F401
409
+ merge_pull_request, # noqa: F401
410
+ model_info, # noqa: F401
411
+ move_repo, # noqa: F401
412
+ pause_space, # noqa: F401
413
+ rename_discussion, # noqa: F401
414
+ repo_type_and_id_from_hf_id, # noqa: F401
415
+ request_space_hardware, # noqa: F401
416
+ restart_space, # noqa: F401
417
+ set_access_token, # noqa: F401
418
+ space_info, # noqa: F401
419
+ unlike, # noqa: F401
420
+ unset_access_token, # noqa: F401
421
+ update_repo_visibility, # noqa: F401
422
+ upload_file, # noqa: F401
423
+ upload_folder, # noqa: F401
424
+ whoami, # noqa: F401
425
+ )
426
+ from .hub_mixin import (
427
+ ModelHubMixin, # noqa: F401
428
+ PyTorchModelHubMixin, # noqa: F401
429
+ )
403
430
  from .inference_api import InferenceApi # noqa: F401
404
- from .keras_mixin import KerasModelHubMixin # noqa: F401
405
- from .keras_mixin import from_pretrained_keras # noqa: F401
406
- from .keras_mixin import push_to_hub_keras # noqa: F401
407
- from .keras_mixin import save_pretrained_keras # noqa: F401
408
- from .repocard import DatasetCard # noqa: F401
409
- from .repocard import ModelCard # noqa: F401
410
- from .repocard import metadata_eval_result # noqa: F401
411
- from .repocard import metadata_load # noqa: F401
412
- from .repocard import metadata_save # noqa: F401
413
- from .repocard import metadata_update # noqa: F401
414
- from .repocard_data import CardData # noqa: F401
415
- from .repocard_data import DatasetCardData # noqa: F401
416
- from .repocard_data import EvalResult # noqa: F401
417
- from .repocard_data import ModelCardData # noqa: F401
431
+ from .keras_mixin import (
432
+ KerasModelHubMixin, # noqa: F401
433
+ from_pretrained_keras, # noqa: F401
434
+ push_to_hub_keras, # noqa: F401
435
+ save_pretrained_keras, # noqa: F401
436
+ )
437
+ from .repocard import (
438
+ DatasetCard, # noqa: F401
439
+ ModelCard, # noqa: F401
440
+ RepoCard, # noqa: F401
441
+ SpaceCard, # noqa: F401
442
+ metadata_eval_result, # noqa: F401
443
+ metadata_load, # noqa: F401
444
+ metadata_save, # noqa: F401
445
+ metadata_update, # noqa: F401
446
+ )
447
+ from .repocard_data import (
448
+ CardData, # noqa: F401
449
+ DatasetCardData, # noqa: F401
450
+ EvalResult, # noqa: F401
451
+ ModelCardData, # noqa: F401
452
+ SpaceCardData, # noqa: F401
453
+ )
418
454
  from .repository import Repository # noqa: F401
419
- from .utils import CachedFileInfo # noqa: F401
420
- from .utils import CachedRepoInfo # noqa: F401
421
- from .utils import CachedRevisionInfo # noqa: F401
422
- from .utils import CacheNotFound # noqa: F401
423
- from .utils import CorruptedCacheException # noqa: F401
424
- from .utils import DeleteCacheStrategy # noqa: F401
425
- from .utils import HFCacheInfo # noqa: F401
426
- from .utils import HfFolder # noqa: F401
427
- from .utils import cached_assets_path # noqa: F401
428
- from .utils import dump_environment_info # noqa: F401
429
- from .utils import logging # noqa: F401
430
- from .utils import scan_cache_dir # noqa: F401
431
- from .utils.endpoint_helpers import DatasetFilter # noqa: F401
432
- from .utils.endpoint_helpers import ModelFilter # noqa: F401
455
+ from .utils import (
456
+ CachedFileInfo, # noqa: F401
457
+ CachedRepoInfo, # noqa: F401
458
+ CachedRevisionInfo, # noqa: F401
459
+ CacheNotFound, # noqa: F401
460
+ CorruptedCacheException, # noqa: F401
461
+ DeleteCacheStrategy, # noqa: F401
462
+ HFCacheInfo, # noqa: F401
463
+ HfFolder, # noqa: F401
464
+ cached_assets_path, # noqa: F401
465
+ dump_environment_info, # noqa: F401
466
+ logging, # noqa: F401
467
+ scan_cache_dir, # noqa: F401
468
+ )
469
+ from .utils.endpoint_helpers import (
470
+ DatasetFilter, # noqa: F401
471
+ ModelFilter, # noqa: F401
472
+ )
@@ -11,15 +11,20 @@ from dataclasses import dataclass, field
11
11
  from pathlib import Path, PurePosixPath
12
12
  from typing import Any, BinaryIO, Dict, Iterable, Iterator, List, Optional, Union
13
13
 
14
- from tqdm.contrib.concurrent import thread_map
15
-
16
14
  import requests
15
+ from tqdm.contrib.concurrent import thread_map
17
16
 
18
17
  from .constants import ENDPOINT
19
18
  from .lfs import UploadInfo, _validate_batch_actions, lfs_upload, post_lfs_batch_info
20
- from .utils import build_hf_headers, chunk_iterable, hf_raise_for_status, logging
19
+ from .utils import (
20
+ build_hf_headers,
21
+ chunk_iterable,
22
+ hf_raise_for_status,
23
+ logging,
24
+ tqdm_stream_file,
25
+ validate_hf_hub_args,
26
+ )
21
27
  from .utils import tqdm as hf_tqdm
22
- from .utils import tqdm_stream_file, validate_hf_hub_args
23
28
  from .utils._deprecation import _deprecate_method
24
29
  from .utils._typing import Literal
25
30
 
@@ -55,8 +60,7 @@ class CommitOperationDelete:
55
60
  self.is_folder = self.path_in_repo.endswith("/")
56
61
  if not isinstance(self.is_folder, bool):
57
62
  raise ValueError(
58
- "Wrong value for `is_folder`. Must be one of [`True`, `False`,"
59
- f" `'auto'`]. Got '{self.is_folder}'."
63
+ f"Wrong value for `is_folder`. Must be one of [`True`, `False`, `'auto'`]. Got '{self.is_folder}'."
60
64
  )
61
65
 
62
66
 
@@ -97,10 +101,7 @@ class CommitOperationAdd:
97
101
  if isinstance(self.path_or_fileobj, str):
98
102
  path_or_fileobj = os.path.normpath(os.path.expanduser(self.path_or_fileobj))
99
103
  if not os.path.isfile(path_or_fileobj):
100
- raise ValueError(
101
- f"Provided path: '{path_or_fileobj}' is not a file on the local"
102
- " file system"
103
- )
104
+ raise ValueError(f"Provided path: '{path_or_fileobj}' is not a file on the local file system")
104
105
  elif not isinstance(self.path_or_fileobj, (io.BufferedIOBase, bytes)):
105
106
  # ^^ Inspired from: https://stackoverflow.com/questions/44584829/how-to-determine-if-file-is-opened-in-binary-or-text-mode
106
107
  raise ValueError(
@@ -114,8 +115,7 @@ class CommitOperationAdd:
114
115
  self.path_or_fileobj.seek(0, os.SEEK_CUR)
115
116
  except (OSError, AttributeError) as exc:
116
117
  raise ValueError(
117
- "path_or_fileobj is a file-like object but does not implement"
118
- " seek() and tell()"
118
+ "path_or_fileobj is a file-like object but does not implement seek() and tell()"
119
119
  ) from exc
120
120
 
121
121
  # Compute "upload_info" attribute
@@ -126,9 +126,7 @@ class CommitOperationAdd:
126
126
  else:
127
127
  self.upload_info = UploadInfo.from_fileobj(self.path_or_fileobj)
128
128
 
129
- @_deprecate_method(
130
- version="0.14", message="Operation is validated at initialization."
131
- )
129
+ @_deprecate_method(version="0.14", message="Operation is validated at initialization.")
132
130
  def validate(self) -> None:
133
131
  pass
134
132
 
@@ -172,9 +170,7 @@ class CommitOperationAdd:
172
170
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
173
171
  ```
174
172
  """
175
- if isinstance(self.path_or_fileobj, str) or isinstance(
176
- self.path_or_fileobj, Path
177
- ):
173
+ if isinstance(self.path_or_fileobj, str) or isinstance(self.path_or_fileobj, Path):
178
174
  if with_tqdm:
179
175
  with tqdm_stream_file(self.path_or_fileobj) as file:
180
176
  yield file
@@ -302,8 +298,7 @@ def upload_lfs_files(
302
298
  if batch_errors_chunk:
303
299
  message = "\n".join(
304
300
  [
305
- f'Encountered error for file with OID {err.get("oid")}:'
306
- f' `{err.get("error", {}).get("message")}'
301
+ f'Encountered error for file with OID {err.get("oid")}: `{err.get("error", {}).get("message")}'
307
302
  for err in batch_errors_chunk
308
303
  ]
309
304
  )
@@ -331,17 +326,12 @@ def upload_lfs_files(
331
326
  def _inner_upload_lfs_object(batch_action):
332
327
  try:
333
328
  operation = oid2addop[batch_action["oid"]]
334
- return _upload_lfs_object(
335
- operation=operation, lfs_batch_action=batch_action, token=token
336
- )
329
+ return _upload_lfs_object(operation=operation, lfs_batch_action=batch_action, token=token)
337
330
  except Exception as exc:
338
- raise RuntimeError(
339
- f"Error while uploading '{operation.path_in_repo}' to the Hub."
340
- ) from exc
331
+ raise RuntimeError(f"Error while uploading '{operation.path_in_repo}' to the Hub.") from exc
341
332
 
342
333
  logger.debug(
343
- f"Uploading {len(filtered_actions)} LFS files to the Hub using up to"
344
- f" {num_threads} threads concurrently"
334
+ f"Uploading {len(filtered_actions)} LFS files to the Hub using up to {num_threads} threads concurrently"
345
335
  )
346
336
  thread_map(
347
337
  _inner_upload_lfs_object,
@@ -352,9 +342,7 @@ def upload_lfs_files(
352
342
  )
353
343
 
354
344
 
355
- def _upload_lfs_object(
356
- operation: CommitOperationAdd, lfs_batch_action: dict, token: Optional[str]
357
- ):
345
+ def _upload_lfs_object(operation: CommitOperationAdd, lfs_batch_action: dict, token: Optional[str]):
358
346
  """
359
347
  Handles uploading a given object to the Hub with the LFS protocol.
360
348
 
@@ -379,10 +367,7 @@ def _upload_lfs_object(
379
367
  actions = lfs_batch_action.get("actions")
380
368
  if actions is None:
381
369
  # The file was already uploaded
382
- logger.debug(
383
- f"Content of file {operation.path_in_repo} is already present upstream"
384
- " - skipping upload"
385
- )
370
+ logger.debug(f"Content of file {operation.path_in_repo} is already present upstream - skipping upload")
386
371
  return
387
372
  upload_action = lfs_batch_action["actions"].get("upload")
388
373
  verify_action = lfs_batch_action["actions"].get("verify")
@@ -424,7 +409,7 @@ def fetch_upload_modes(
424
409
  create_pr: bool = False,
425
410
  ) -> Dict[str, UploadMode]:
426
411
  """
427
- Requests the Hub "preupload" endpoint to determine wether each input file
412
+ Requests the Hub "preupload" endpoint to determine whether each input file
428
413
  should be uploaded as a regular git blob or as git LFS blob.
429
414
 
430
415
  Args:
@@ -476,9 +461,7 @@ def fetch_upload_modes(
476
461
  )
477
462
  hf_raise_for_status(resp)
478
463
  preupload_info = _validate_preupload_info(resp.json())
479
- upload_modes.update(
480
- **{file["path"]: file["uploadMode"] for file in preupload_info["files"]}
481
- )
464
+ upload_modes.update(**{file["path"]: file["uploadMode"] for file in preupload_info["files"]})
482
465
 
483
466
  # If a file is empty, it is most likely a mistake.
484
467
  # => a warning message is triggered to warn the user.
@@ -490,10 +473,7 @@ def fetch_upload_modes(
490
473
  if addition.upload_info.size == 0:
491
474
  path = addition.path_in_repo
492
475
  if not path.endswith(".gitkeep"):
493
- warnings.warn(
494
- f"About to commit an empty file: '{path}'. Are you sure this is"
495
- " intended?"
496
- )
476
+ warnings.warn(f"About to commit an empty file: '{path}'. Are you sure this is intended?")
497
477
  upload_modes[path] = "regular"
498
478
 
499
479
  return upload_modes
@@ -527,10 +507,7 @@ def prepare_commit_payload(
527
507
  # 2. Send operations, one per line
528
508
  for operation in operations:
529
509
  # 2.a. Case adding a regular file
530
- if (
531
- isinstance(operation, CommitOperationAdd)
532
- and upload_modes.get(operation.path_in_repo) == "regular"
533
- ):
510
+ if isinstance(operation, CommitOperationAdd) and upload_modes.get(operation.path_in_repo) == "regular":
534
511
  yield {
535
512
  "key": "file",
536
513
  "value": {
@@ -540,10 +517,7 @@ def prepare_commit_payload(
540
517
  },
541
518
  }
542
519
  # 2.b. Case adding an LFS file
543
- elif (
544
- isinstance(operation, CommitOperationAdd)
545
- and upload_modes.get(operation.path_in_repo) == "lfs"
546
- ):
520
+ elif isinstance(operation, CommitOperationAdd) and upload_modes.get(operation.path_in_repo) == "lfs":
547
521
  yield {
548
522
  "key": "lfsFile",
549
523
  "value": {
huggingface_hub/_login.py CHANGED
@@ -138,10 +138,7 @@ def interpreter_login() -> None:
138
138
  )
139
139
  print(" Setting a new token will erase the existing one.")
140
140
 
141
- print(
142
- " To login, `huggingface_hub` requires a token generated from"
143
- " https://huggingface.co/settings/tokens ."
144
- )
141
+ print(" To login, `huggingface_hub` requires a token generated from https://huggingface.co/settings/tokens .")
145
142
  if os.name == "nt":
146
143
  print("Token can be pasted using 'Right-Click'.")
147
144
  token = getpass("Token: ")
@@ -193,14 +190,10 @@ def notebook_login() -> None:
193
190
  " Colab) and you need the `ipywidgets` module: `pip install ipywidgets`."
194
191
  )
195
192
 
196
- box_layout = widgets.Layout(
197
- display="flex", flex_flow="column", align_items="center", width="50%"
198
- )
193
+ box_layout = widgets.Layout(display="flex", flex_flow="column", align_items="center", width="50%")
199
194
 
200
195
  token_widget = widgets.Password(description="Token:")
201
- git_checkbox_widget = widgets.Checkbox(
202
- value=True, description="Add token as git credential?"
203
- )
196
+ git_checkbox_widget = widgets.Checkbox(value=True, description="Add token as git credential?")
204
197
  token_finish_button = widgets.Button(description="Login")
205
198
 
206
199
  login_token_widget = widgets.VBox(
@@ -304,8 +297,6 @@ def _set_store_as_git_credential_helper_globally() -> None:
304
297
  raise EnvironmentError(exc.stderr)
305
298
 
306
299
 
307
- @_deprecate_method(
308
- version="0.14", message="Please use `list_credential_helpers` instead."
309
- )
300
+ @_deprecate_method(version="0.14", message="Please use `list_credential_helpers` instead.")
310
301
  def _currently_setup_credential_helpers(directory: Optional[str] = None) -> List[str]:
311
302
  return list_credential_helpers(directory)