huggingface-hub 1.0.0rc0__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +4 -4
- huggingface_hub/_commit_api.py +126 -66
- huggingface_hub/_commit_scheduler.py +4 -7
- huggingface_hub/_login.py +9 -15
- huggingface_hub/_tensorboard_logger.py +2 -5
- huggingface_hub/_webhooks_server.py +8 -20
- huggingface_hub/cli/__init__.py +0 -14
- huggingface_hub/cli/_cli_utils.py +79 -2
- huggingface_hub/cli/auth.py +104 -149
- huggingface_hub/cli/cache.py +97 -121
- huggingface_hub/cli/download.py +93 -110
- huggingface_hub/cli/hf.py +37 -41
- huggingface_hub/cli/jobs.py +687 -1014
- huggingface_hub/cli/lfs.py +116 -139
- huggingface_hub/cli/repo.py +290 -214
- huggingface_hub/cli/repo_files.py +50 -84
- huggingface_hub/cli/system.py +6 -25
- huggingface_hub/cli/upload.py +198 -212
- huggingface_hub/cli/upload_large_folder.py +90 -105
- huggingface_hub/dataclasses.py +3 -12
- huggingface_hub/errors.py +1 -1
- huggingface_hub/fastai_utils.py +22 -32
- huggingface_hub/file_download.py +18 -21
- huggingface_hub/hf_api.py +258 -410
- huggingface_hub/hf_file_system.py +17 -44
- huggingface_hub/inference/_client.py +25 -47
- huggingface_hub/inference/_generated/_async_client.py +25 -47
- huggingface_hub/inference/_mcp/agent.py +2 -5
- huggingface_hub/inference/_mcp/mcp_client.py +2 -5
- huggingface_hub/inference/_providers/__init__.py +11 -0
- huggingface_hub/inference/_providers/_common.py +1 -0
- huggingface_hub/inference/_providers/publicai.py +6 -0
- huggingface_hub/inference/_providers/scaleway.py +28 -0
- huggingface_hub/lfs.py +14 -8
- huggingface_hub/repocard.py +12 -16
- huggingface_hub/serialization/_base.py +3 -6
- huggingface_hub/serialization/_torch.py +16 -34
- huggingface_hub/utils/__init__.py +1 -1
- huggingface_hub/utils/_cache_manager.py +41 -71
- huggingface_hub/utils/_chunk_utils.py +2 -3
- huggingface_hub/utils/_http.py +32 -35
- huggingface_hub/utils/logging.py +8 -11
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/METADATA +7 -2
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/RECORD +48 -46
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/LICENSE +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/WHEEL +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc2.dist-info}/top_level.txt +0 -0
huggingface_hub/__init__.py
CHANGED
|
@@ -46,7 +46,7 @@ import sys
|
|
|
46
46
|
from typing import TYPE_CHECKING
|
|
47
47
|
|
|
48
48
|
|
|
49
|
-
__version__ = "1.0.0.
|
|
49
|
+
__version__ = "1.0.0.rc2"
|
|
50
50
|
|
|
51
51
|
# Alphabetical order of definitions is ensured in tests
|
|
52
52
|
# WARNING: any comment added in this dictionary definition will be lost when
|
|
@@ -516,7 +516,7 @@ _SUBMOD_ATTRS = {
|
|
|
516
516
|
"HfHubAsyncTransport",
|
|
517
517
|
"HfHubTransport",
|
|
518
518
|
"cached_assets_path",
|
|
519
|
-
"
|
|
519
|
+
"close_session",
|
|
520
520
|
"dump_environment_info",
|
|
521
521
|
"get_async_session",
|
|
522
522
|
"get_session",
|
|
@@ -815,7 +815,7 @@ __all__ = [
|
|
|
815
815
|
"cancel_access_request",
|
|
816
816
|
"cancel_job",
|
|
817
817
|
"change_discussion_status",
|
|
818
|
-
"
|
|
818
|
+
"close_session",
|
|
819
819
|
"comment_discussion",
|
|
820
820
|
"create_branch",
|
|
821
821
|
"create_collection",
|
|
@@ -1518,7 +1518,7 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1518
1518
|
HfHubAsyncTransport, # noqa: F401
|
|
1519
1519
|
HfHubTransport, # noqa: F401
|
|
1520
1520
|
cached_assets_path, # noqa: F401
|
|
1521
|
-
|
|
1521
|
+
close_session, # noqa: F401
|
|
1522
1522
|
dump_environment_info, # noqa: F401
|
|
1523
1523
|
get_async_session, # noqa: F401
|
|
1524
1524
|
get_session, # noqa: F401
|
huggingface_hub/_commit_api.py
CHANGED
|
@@ -33,6 +33,7 @@ from .utils import (
|
|
|
33
33
|
validate_hf_hub_args,
|
|
34
34
|
)
|
|
35
35
|
from .utils import tqdm as hf_tqdm
|
|
36
|
+
from .utils._runtime import is_xet_available
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
if TYPE_CHECKING:
|
|
@@ -353,7 +354,7 @@ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
|
|
|
353
354
|
|
|
354
355
|
|
|
355
356
|
@validate_hf_hub_args
|
|
356
|
-
def
|
|
357
|
+
def _upload_files(
|
|
357
358
|
*,
|
|
358
359
|
additions: list[CommitOperationAdd],
|
|
359
360
|
repo_type: str,
|
|
@@ -362,6 +363,86 @@ def _upload_lfs_files(
|
|
|
362
363
|
endpoint: Optional[str] = None,
|
|
363
364
|
num_threads: int = 5,
|
|
364
365
|
revision: Optional[str] = None,
|
|
366
|
+
create_pr: Optional[bool] = None,
|
|
367
|
+
):
|
|
368
|
+
"""
|
|
369
|
+
Negotiates per-file transfer (LFS vs Xet) and uploads in batches.
|
|
370
|
+
"""
|
|
371
|
+
xet_additions: list[CommitOperationAdd] = []
|
|
372
|
+
lfs_actions: list[dict[str, Any]] = []
|
|
373
|
+
lfs_oid2addop: dict[str, CommitOperationAdd] = {}
|
|
374
|
+
|
|
375
|
+
for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
|
|
376
|
+
chunk_list = [op for op in chunk]
|
|
377
|
+
|
|
378
|
+
transfers: list[str] = ["basic", "multipart"]
|
|
379
|
+
has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list)
|
|
380
|
+
if is_xet_available():
|
|
381
|
+
if not has_buffered_io_data:
|
|
382
|
+
transfers.append("xet")
|
|
383
|
+
else:
|
|
384
|
+
logger.warning(
|
|
385
|
+
"Uploading files as a binary IO buffer is not supported by Xet Storage. "
|
|
386
|
+
"Falling back to HTTP upload."
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info(
|
|
390
|
+
upload_infos=[op.upload_info for op in chunk_list],
|
|
391
|
+
repo_id=repo_id,
|
|
392
|
+
repo_type=repo_type,
|
|
393
|
+
revision=revision,
|
|
394
|
+
endpoint=endpoint,
|
|
395
|
+
headers=headers,
|
|
396
|
+
token=None, # already passed in 'headers'
|
|
397
|
+
transfers=transfers,
|
|
398
|
+
)
|
|
399
|
+
if errors_chunk:
|
|
400
|
+
message = "\n".join(
|
|
401
|
+
[
|
|
402
|
+
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
|
|
403
|
+
for err in errors_chunk
|
|
404
|
+
]
|
|
405
|
+
)
|
|
406
|
+
raise ValueError(f"LFS batch API returned errors:\n{message}")
|
|
407
|
+
|
|
408
|
+
# If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO),
|
|
409
|
+
# fall back to LFS for this chunk.
|
|
410
|
+
if chosen_transfer == "xet" and ("xet" in transfers):
|
|
411
|
+
xet_additions.extend(chunk_list)
|
|
412
|
+
else:
|
|
413
|
+
lfs_actions.extend(actions_chunk)
|
|
414
|
+
for op in chunk_list:
|
|
415
|
+
lfs_oid2addop[op.upload_info.sha256.hex()] = op
|
|
416
|
+
|
|
417
|
+
if len(lfs_actions) > 0:
|
|
418
|
+
_upload_lfs_files(
|
|
419
|
+
actions=lfs_actions,
|
|
420
|
+
oid2addop=lfs_oid2addop,
|
|
421
|
+
headers=headers,
|
|
422
|
+
endpoint=endpoint,
|
|
423
|
+
num_threads=num_threads,
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
if len(xet_additions) > 0:
|
|
427
|
+
_upload_xet_files(
|
|
428
|
+
additions=xet_additions,
|
|
429
|
+
repo_type=repo_type,
|
|
430
|
+
repo_id=repo_id,
|
|
431
|
+
headers=headers,
|
|
432
|
+
endpoint=endpoint,
|
|
433
|
+
revision=revision,
|
|
434
|
+
create_pr=create_pr,
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
@validate_hf_hub_args
|
|
439
|
+
def _upload_lfs_files(
|
|
440
|
+
*,
|
|
441
|
+
actions: list[dict[str, Any]],
|
|
442
|
+
oid2addop: dict[str, CommitOperationAdd],
|
|
443
|
+
headers: dict[str, str],
|
|
444
|
+
endpoint: Optional[str] = None,
|
|
445
|
+
num_threads: int = 5,
|
|
365
446
|
):
|
|
366
447
|
"""
|
|
367
448
|
Uploads the content of `additions` to the Hub using the large file storage protocol.
|
|
@@ -370,9 +451,21 @@ def _upload_lfs_files(
|
|
|
370
451
|
- LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
|
|
371
452
|
|
|
372
453
|
Args:
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
454
|
+
actions (`list[dict[str, Any]]`):
|
|
455
|
+
LFS batch actions returned by the server.
|
|
456
|
+
oid2addop (`dict[str, CommitOperationAdd]`):
|
|
457
|
+
A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object.
|
|
458
|
+
headers (`dict[str, str]`):
|
|
459
|
+
Headers to use for the request, including authorization headers and user agent.
|
|
460
|
+
endpoint (`str`, *optional*):
|
|
461
|
+
The endpoint to use for the request. Defaults to `constants.ENDPOINT`.
|
|
462
|
+
num_threads (`int`, *optional*):
|
|
463
|
+
The number of concurrent threads to use when uploading. Defaults to 5.
|
|
464
|
+
|
|
465
|
+
Raises:
|
|
466
|
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
|
467
|
+
If an upload failed for any reason
|
|
468
|
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
376
469
|
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
|
377
470
|
repo_id (`str`):
|
|
378
471
|
A namespace (user or an organization) and a repo name separated
|
|
@@ -392,50 +485,17 @@ def _upload_lfs_files(
|
|
|
392
485
|
[`HfHubHTTPError`]
|
|
393
486
|
If the LFS batch endpoint returned an HTTP error.
|
|
394
487
|
"""
|
|
395
|
-
#
|
|
396
|
-
# Upload instructions are retrieved by chunk of 256 files to avoid reaching
|
|
397
|
-
# the payload limit.
|
|
398
|
-
batch_actions: list[dict] = []
|
|
399
|
-
for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
|
|
400
|
-
batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
|
|
401
|
-
upload_infos=[op.upload_info for op in chunk],
|
|
402
|
-
repo_id=repo_id,
|
|
403
|
-
repo_type=repo_type,
|
|
404
|
-
revision=revision,
|
|
405
|
-
endpoint=endpoint,
|
|
406
|
-
headers=headers,
|
|
407
|
-
token=None, # already passed in 'headers'
|
|
408
|
-
)
|
|
409
|
-
|
|
410
|
-
# If at least 1 error, we do not retrieve information for other chunks
|
|
411
|
-
if batch_errors_chunk:
|
|
412
|
-
message = "\n".join(
|
|
413
|
-
[
|
|
414
|
-
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
|
|
415
|
-
for err in batch_errors_chunk
|
|
416
|
-
]
|
|
417
|
-
)
|
|
418
|
-
raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
|
|
419
|
-
|
|
420
|
-
batch_actions += batch_actions_chunk
|
|
421
|
-
oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
|
|
422
|
-
|
|
423
|
-
# Step 2: ignore files that have already been uploaded
|
|
488
|
+
# Filter out files already present upstream
|
|
424
489
|
filtered_actions = []
|
|
425
|
-
for action in
|
|
490
|
+
for action in actions:
|
|
426
491
|
if action.get("actions") is None:
|
|
427
492
|
logger.debug(
|
|
428
|
-
f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
|
|
429
|
-
" present upstream - skipping upload."
|
|
493
|
+
f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload."
|
|
430
494
|
)
|
|
431
495
|
else:
|
|
432
496
|
filtered_actions.append(action)
|
|
433
497
|
|
|
434
|
-
|
|
435
|
-
logger.debug("No LFS files to upload.")
|
|
436
|
-
return
|
|
437
|
-
|
|
438
|
-
# Step 3: upload files concurrently according to these instructions
|
|
498
|
+
# Upload according to server-provided actions
|
|
439
499
|
def _wrapped_lfs_upload(batch_action) -> None:
|
|
440
500
|
try:
|
|
441
501
|
operation = oid2addop[batch_action["oid"]]
|
|
@@ -479,7 +539,7 @@ def _upload_xet_files(
|
|
|
479
539
|
This chunks the files and deduplicates the chunks before uploading them to xetcas storage.
|
|
480
540
|
|
|
481
541
|
Args:
|
|
482
|
-
additions (
|
|
542
|
+
additions (`` of `CommitOperationAdd`):
|
|
483
543
|
The files to be uploaded.
|
|
484
544
|
repo_type (`str`):
|
|
485
545
|
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
|
@@ -576,30 +636,30 @@ def _upload_xet_files(
|
|
|
576
636
|
progress, progress_callback = None, None
|
|
577
637
|
|
|
578
638
|
try:
|
|
579
|
-
for
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
639
|
+
all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)]
|
|
640
|
+
all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))]
|
|
641
|
+
|
|
642
|
+
if len(all_paths_ops) > 0:
|
|
643
|
+
all_paths = [str(op.path_or_fileobj) for op in all_paths_ops]
|
|
644
|
+
upload_files(
|
|
645
|
+
all_paths,
|
|
646
|
+
xet_endpoint,
|
|
647
|
+
access_token_info,
|
|
648
|
+
token_refresher,
|
|
649
|
+
progress_callback,
|
|
650
|
+
repo_type,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
if len(all_bytes_ops) > 0:
|
|
654
|
+
all_bytes = [op.path_or_fileobj for op in all_bytes_ops]
|
|
655
|
+
upload_bytes(
|
|
656
|
+
all_bytes,
|
|
657
|
+
xet_endpoint,
|
|
658
|
+
access_token_info,
|
|
659
|
+
token_refresher,
|
|
660
|
+
progress_callback,
|
|
661
|
+
repo_type,
|
|
662
|
+
)
|
|
603
663
|
|
|
604
664
|
finally:
|
|
605
665
|
if progress is not None:
|
|
@@ -205,13 +205,10 @@ class CommitScheduler:
|
|
|
205
205
|
"""
|
|
206
206
|
Push folder to the Hub and return the commit info.
|
|
207
207
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
issues.
|
|
213
|
-
|
|
214
|
-
</Tip>
|
|
208
|
+
> [!WARNING]
|
|
209
|
+
> This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
|
|
210
|
+
> queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
|
|
211
|
+
> issues.
|
|
215
212
|
|
|
216
213
|
The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
|
|
217
214
|
uploads only changed files. If no changes are found, the method returns without committing anything. If you want
|
huggingface_hub/_login.py
CHANGED
|
@@ -70,21 +70,15 @@ def login(
|
|
|
70
70
|
To log in from outside of a script, one can also use `hf auth login` which is
|
|
71
71
|
a cli command that wraps [`login`].
|
|
72
72
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
When the token is not passed, [`login`] will automatically detect if the script runs
|
|
83
|
-
in a notebook or not. However, this detection might not be accurate due to the
|
|
84
|
-
variety of notebooks that exists nowadays. If that is the case, you can always force
|
|
85
|
-
the UI by using [`notebook_login`] or [`interpreter_login`].
|
|
86
|
-
|
|
87
|
-
</Tip>
|
|
73
|
+
> [!TIP]
|
|
74
|
+
> [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
|
|
75
|
+
> extends its capabilities.
|
|
76
|
+
|
|
77
|
+
> [!TIP]
|
|
78
|
+
> When the token is not passed, [`login`] will automatically detect if the script runs
|
|
79
|
+
> in a notebook or not. However, this detection might not be accurate due to the
|
|
80
|
+
> variety of notebooks that exists nowadays. If that is the case, you can always force
|
|
81
|
+
> the UI by using [`notebook_login`] or [`interpreter_login`].
|
|
88
82
|
|
|
89
83
|
Args:
|
|
90
84
|
token (`str`, *optional*):
|
|
@@ -52,11 +52,8 @@ class HFSummaryWriter(_RuntimeSummaryWriter):
|
|
|
52
52
|
issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
|
|
53
53
|
minutes (default to every 5 minutes).
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
`HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
|
|
58
|
-
|
|
59
|
-
</Tip>
|
|
55
|
+
> [!WARNING]
|
|
56
|
+
> `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
|
|
60
57
|
|
|
61
58
|
Args:
|
|
62
59
|
repo_id (`str`):
|
|
@@ -53,17 +53,11 @@ class WebhooksServer:
|
|
|
53
53
|
Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
|
|
54
54
|
WebhooksServer and deploy it on a Space.
|
|
55
55
|
|
|
56
|
-
|
|
56
|
+
> [!WARNING]
|
|
57
|
+
> `WebhooksServer` is experimental. Its API is subject to change in the future.
|
|
57
58
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
</Tip>
|
|
61
|
-
|
|
62
|
-
<Tip warning={true}>
|
|
63
|
-
|
|
64
|
-
You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
|
|
65
|
-
|
|
66
|
-
</Tip>
|
|
59
|
+
> [!WARNING]
|
|
60
|
+
> You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
|
|
67
61
|
|
|
68
62
|
Args:
|
|
69
63
|
ui (`gradio.Blocks`, optional):
|
|
@@ -240,17 +234,11 @@ def webhook_endpoint(path: Optional[str] = None) -> Callable:
|
|
|
240
234
|
Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
|
|
241
235
|
server and deploy it on a Space.
|
|
242
236
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
`webhook_endpoint` is experimental. Its API is subject to change in the future.
|
|
246
|
-
|
|
247
|
-
</Tip>
|
|
248
|
-
|
|
249
|
-
<Tip warning={true}>
|
|
250
|
-
|
|
251
|
-
You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
|
|
237
|
+
> [!WARNING]
|
|
238
|
+
> `webhook_endpoint` is experimental. Its API is subject to change in the future.
|
|
252
239
|
|
|
253
|
-
|
|
240
|
+
> [!WARNING]
|
|
241
|
+
> You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
|
|
254
242
|
|
|
255
243
|
Args:
|
|
256
244
|
path (`str`, optional):
|
huggingface_hub/cli/__init__.py
CHANGED
|
@@ -11,17 +11,3 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
from abc import ABC, abstractmethod
|
|
16
|
-
from argparse import _SubParsersAction
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class BaseHuggingfaceCLICommand(ABC):
|
|
20
|
-
@staticmethod
|
|
21
|
-
@abstractmethod
|
|
22
|
-
def register_subcommand(parser: _SubParsersAction):
|
|
23
|
-
raise NotImplementedError()
|
|
24
|
-
|
|
25
|
-
@abstractmethod
|
|
26
|
-
def run(self):
|
|
27
|
-
raise NotImplementedError()
|
|
@@ -11,10 +11,17 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
"""Contains
|
|
14
|
+
"""Contains CLI utilities (styling, helpers)."""
|
|
15
15
|
|
|
16
16
|
import os
|
|
17
|
-
from
|
|
17
|
+
from enum import Enum
|
|
18
|
+
from typing import Annotated, Optional, Union
|
|
19
|
+
|
|
20
|
+
import click
|
|
21
|
+
import typer
|
|
22
|
+
|
|
23
|
+
from huggingface_hub import __version__
|
|
24
|
+
from huggingface_hub.hf_api import HfApi
|
|
18
25
|
|
|
19
26
|
|
|
20
27
|
class ANSI:
|
|
@@ -67,3 +74,73 @@ def tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
|
|
|
67
74
|
for row in rows:
|
|
68
75
|
lines.append(row_format.format(*row))
|
|
69
76
|
return "\n".join(lines)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
#### TYPER UTILS
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class AlphabeticalMixedGroup(typer.core.TyperGroup):
|
|
83
|
+
"""
|
|
84
|
+
Typer Group that lists commands and sub-apps mixed and alphabetically.
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
def list_commands(self, ctx: click.Context) -> list[str]: # type: ignore[name-defined]
|
|
88
|
+
# click.Group stores both commands and sub-groups in `self.commands`
|
|
89
|
+
return sorted(self.commands.keys())
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def typer_factory(help: str) -> typer.Typer:
|
|
93
|
+
return typer.Typer(
|
|
94
|
+
help=help,
|
|
95
|
+
add_completion=True,
|
|
96
|
+
rich_markup_mode=None,
|
|
97
|
+
no_args_is_help=True,
|
|
98
|
+
cls=AlphabeticalMixedGroup,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class RepoType(str, Enum):
|
|
103
|
+
model = "model"
|
|
104
|
+
dataset = "dataset"
|
|
105
|
+
space = "space"
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
RepoIdArg = Annotated[
|
|
109
|
+
str,
|
|
110
|
+
typer.Argument(
|
|
111
|
+
help="The ID of the repo (e.g. `username/repo-name`).",
|
|
112
|
+
),
|
|
113
|
+
]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
RepoTypeOpt = Annotated[
|
|
117
|
+
RepoType,
|
|
118
|
+
typer.Option(
|
|
119
|
+
help="The type of repository (model, dataset, or space).",
|
|
120
|
+
),
|
|
121
|
+
]
|
|
122
|
+
|
|
123
|
+
TokenOpt = Annotated[
|
|
124
|
+
Optional[str],
|
|
125
|
+
typer.Option(
|
|
126
|
+
help="A User Access Token generated from https://huggingface.co/settings/tokens.",
|
|
127
|
+
),
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
PrivateOpt = Annotated[
|
|
131
|
+
bool,
|
|
132
|
+
typer.Option(
|
|
133
|
+
help="Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists.",
|
|
134
|
+
),
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
RevisionOpt = Annotated[
|
|
138
|
+
Optional[str],
|
|
139
|
+
typer.Option(
|
|
140
|
+
help="Git revision id which can be a branch name, a tag, or a commit hash.",
|
|
141
|
+
),
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def get_hf_api(token: Optional[str] = None) -> HfApi:
|
|
146
|
+
return HfApi(token=token, library_name="hf", library_version=__version__)
|