huggingface-hub 0.33.4__py3-none-any.whl → 0.34.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +47 -1
- huggingface_hub/_commit_api.py +21 -28
- huggingface_hub/_jobs_api.py +145 -0
- huggingface_hub/_local_folder.py +7 -1
- huggingface_hub/_login.py +5 -5
- huggingface_hub/_oauth.py +1 -1
- huggingface_hub/_snapshot_download.py +11 -6
- huggingface_hub/_upload_large_folder.py +46 -23
- huggingface_hub/cli/__init__.py +27 -0
- huggingface_hub/cli/_cli_utils.py +69 -0
- huggingface_hub/cli/auth.py +210 -0
- huggingface_hub/cli/cache.py +405 -0
- huggingface_hub/cli/download.py +181 -0
- huggingface_hub/cli/hf.py +66 -0
- huggingface_hub/cli/jobs.py +522 -0
- huggingface_hub/cli/lfs.py +198 -0
- huggingface_hub/cli/repo.py +243 -0
- huggingface_hub/cli/repo_files.py +128 -0
- huggingface_hub/cli/system.py +52 -0
- huggingface_hub/cli/upload.py +316 -0
- huggingface_hub/cli/upload_large_folder.py +132 -0
- huggingface_hub/commands/_cli_utils.py +5 -0
- huggingface_hub/commands/delete_cache.py +3 -1
- huggingface_hub/commands/download.py +4 -0
- huggingface_hub/commands/env.py +3 -0
- huggingface_hub/commands/huggingface_cli.py +2 -0
- huggingface_hub/commands/repo.py +4 -0
- huggingface_hub/commands/repo_files.py +4 -0
- huggingface_hub/commands/scan_cache.py +3 -1
- huggingface_hub/commands/tag.py +3 -1
- huggingface_hub/commands/upload.py +4 -0
- huggingface_hub/commands/upload_large_folder.py +3 -1
- huggingface_hub/commands/user.py +11 -1
- huggingface_hub/commands/version.py +3 -0
- huggingface_hub/constants.py +1 -0
- huggingface_hub/file_download.py +16 -5
- huggingface_hub/hf_api.py +519 -7
- huggingface_hub/hf_file_system.py +8 -16
- huggingface_hub/hub_mixin.py +3 -3
- huggingface_hub/inference/_client.py +38 -39
- huggingface_hub/inference/_common.py +44 -14
- huggingface_hub/inference/_generated/_async_client.py +50 -51
- huggingface_hub/inference/_generated/types/__init__.py +1 -0
- huggingface_hub/inference/_generated/types/image_to_video.py +60 -0
- huggingface_hub/inference/_mcp/cli.py +36 -18
- huggingface_hub/inference/_mcp/constants.py +8 -0
- huggingface_hub/inference/_mcp/types.py +3 -0
- huggingface_hub/inference/_providers/__init__.py +4 -1
- huggingface_hub/inference/_providers/_common.py +3 -6
- huggingface_hub/inference/_providers/fal_ai.py +85 -42
- huggingface_hub/inference/_providers/hf_inference.py +17 -9
- huggingface_hub/inference/_providers/replicate.py +19 -1
- huggingface_hub/keras_mixin.py +2 -2
- huggingface_hub/repocard.py +1 -1
- huggingface_hub/repository.py +2 -2
- huggingface_hub/utils/_auth.py +1 -1
- huggingface_hub/utils/_cache_manager.py +2 -2
- huggingface_hub/utils/_dotenv.py +51 -0
- huggingface_hub/utils/_headers.py +1 -1
- huggingface_hub/utils/_runtime.py +1 -1
- huggingface_hub/utils/_xet.py +6 -2
- huggingface_hub/utils/_xet_progress_reporting.py +141 -0
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/METADATA +7 -8
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/RECORD +68 -51
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/entry_points.txt +1 -0
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.33.4.dist-info → huggingface_hub-0.34.0rc0.dist-info}/top_level.txt +0 -0
huggingface_hub/__init__.py
CHANGED
|
@@ -46,7 +46,7 @@ import sys
|
|
|
46
46
|
from typing import TYPE_CHECKING
|
|
47
47
|
|
|
48
48
|
|
|
49
|
-
__version__ = "0.
|
|
49
|
+
__version__ = "0.34.0.rc0"
|
|
50
50
|
|
|
51
51
|
# Alphabetical order of definitions is ensured in tests
|
|
52
52
|
# WARNING: any comment added in this dictionary definition will be lost when
|
|
@@ -62,6 +62,12 @@ _SUBMOD_ATTRS = {
|
|
|
62
62
|
"InferenceEndpointTimeoutError",
|
|
63
63
|
"InferenceEndpointType",
|
|
64
64
|
],
|
|
65
|
+
"_jobs_api": [
|
|
66
|
+
"JobInfo",
|
|
67
|
+
"JobOwner",
|
|
68
|
+
"JobStage",
|
|
69
|
+
"JobStatus",
|
|
70
|
+
],
|
|
65
71
|
"_login": [
|
|
66
72
|
"auth_list",
|
|
67
73
|
"auth_switch",
|
|
@@ -165,6 +171,7 @@ _SUBMOD_ATTRS = {
|
|
|
165
171
|
"add_space_variable",
|
|
166
172
|
"auth_check",
|
|
167
173
|
"cancel_access_request",
|
|
174
|
+
"cancel_job",
|
|
168
175
|
"change_discussion_status",
|
|
169
176
|
"comment_discussion",
|
|
170
177
|
"create_branch",
|
|
@@ -194,6 +201,7 @@ _SUBMOD_ATTRS = {
|
|
|
194
201
|
"duplicate_space",
|
|
195
202
|
"edit_discussion_comment",
|
|
196
203
|
"enable_webhook",
|
|
204
|
+
"fetch_job_logs",
|
|
197
205
|
"file_exists",
|
|
198
206
|
"get_collection",
|
|
199
207
|
"get_dataset_tags",
|
|
@@ -210,11 +218,13 @@ _SUBMOD_ATTRS = {
|
|
|
210
218
|
"get_user_overview",
|
|
211
219
|
"get_webhook",
|
|
212
220
|
"grant_access",
|
|
221
|
+
"inspect_job",
|
|
213
222
|
"list_accepted_access_requests",
|
|
214
223
|
"list_collections",
|
|
215
224
|
"list_datasets",
|
|
216
225
|
"list_inference_catalog",
|
|
217
226
|
"list_inference_endpoints",
|
|
227
|
+
"list_jobs",
|
|
218
228
|
"list_lfs_files",
|
|
219
229
|
"list_liked_repos",
|
|
220
230
|
"list_models",
|
|
@@ -251,6 +261,8 @@ _SUBMOD_ATTRS = {
|
|
|
251
261
|
"resume_inference_endpoint",
|
|
252
262
|
"revision_exists",
|
|
253
263
|
"run_as_future",
|
|
264
|
+
"run_job",
|
|
265
|
+
"run_uv_job",
|
|
254
266
|
"scale_to_zero_inference_endpoint",
|
|
255
267
|
"set_space_sleep_time",
|
|
256
268
|
"space_info",
|
|
@@ -360,6 +372,10 @@ _SUBMOD_ATTRS = {
|
|
|
360
372
|
"ImageToTextInput",
|
|
361
373
|
"ImageToTextOutput",
|
|
362
374
|
"ImageToTextParameters",
|
|
375
|
+
"ImageToVideoInput",
|
|
376
|
+
"ImageToVideoOutput",
|
|
377
|
+
"ImageToVideoParameters",
|
|
378
|
+
"ImageToVideoTargetSize",
|
|
363
379
|
"ObjectDetectionBoundingBox",
|
|
364
380
|
"ObjectDetectionInput",
|
|
365
381
|
"ObjectDetectionOutputElement",
|
|
@@ -648,6 +664,10 @@ __all__ = [
|
|
|
648
664
|
"ImageToTextInput",
|
|
649
665
|
"ImageToTextOutput",
|
|
650
666
|
"ImageToTextParameters",
|
|
667
|
+
"ImageToVideoInput",
|
|
668
|
+
"ImageToVideoOutput",
|
|
669
|
+
"ImageToVideoParameters",
|
|
670
|
+
"ImageToVideoTargetSize",
|
|
651
671
|
"InferenceApi",
|
|
652
672
|
"InferenceClient",
|
|
653
673
|
"InferenceEndpoint",
|
|
@@ -656,6 +676,10 @@ __all__ = [
|
|
|
656
676
|
"InferenceEndpointTimeoutError",
|
|
657
677
|
"InferenceEndpointType",
|
|
658
678
|
"InferenceTimeoutError",
|
|
679
|
+
"JobInfo",
|
|
680
|
+
"JobOwner",
|
|
681
|
+
"JobStage",
|
|
682
|
+
"JobStatus",
|
|
659
683
|
"KerasModelHubMixin",
|
|
660
684
|
"MCPClient",
|
|
661
685
|
"ModelCard",
|
|
@@ -792,6 +816,7 @@ __all__ = [
|
|
|
792
816
|
"auth_switch",
|
|
793
817
|
"cached_assets_path",
|
|
794
818
|
"cancel_access_request",
|
|
819
|
+
"cancel_job",
|
|
795
820
|
"change_discussion_status",
|
|
796
821
|
"comment_discussion",
|
|
797
822
|
"configure_http_backend",
|
|
@@ -825,6 +850,7 @@ __all__ = [
|
|
|
825
850
|
"enable_webhook",
|
|
826
851
|
"export_entries_as_dduf",
|
|
827
852
|
"export_folder_as_dduf",
|
|
853
|
+
"fetch_job_logs",
|
|
828
854
|
"file_exists",
|
|
829
855
|
"from_pretrained_fastai",
|
|
830
856
|
"from_pretrained_keras",
|
|
@@ -851,12 +877,14 @@ __all__ = [
|
|
|
851
877
|
"grant_access",
|
|
852
878
|
"hf_hub_download",
|
|
853
879
|
"hf_hub_url",
|
|
880
|
+
"inspect_job",
|
|
854
881
|
"interpreter_login",
|
|
855
882
|
"list_accepted_access_requests",
|
|
856
883
|
"list_collections",
|
|
857
884
|
"list_datasets",
|
|
858
885
|
"list_inference_catalog",
|
|
859
886
|
"list_inference_endpoints",
|
|
887
|
+
"list_jobs",
|
|
860
888
|
"list_lfs_files",
|
|
861
889
|
"list_liked_repos",
|
|
862
890
|
"list_models",
|
|
@@ -907,6 +935,8 @@ __all__ = [
|
|
|
907
935
|
"resume_inference_endpoint",
|
|
908
936
|
"revision_exists",
|
|
909
937
|
"run_as_future",
|
|
938
|
+
"run_job",
|
|
939
|
+
"run_uv_job",
|
|
910
940
|
"save_pretrained_keras",
|
|
911
941
|
"save_torch_model",
|
|
912
942
|
"save_torch_state_dict",
|
|
@@ -1044,6 +1074,12 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1044
1074
|
InferenceEndpointTimeoutError, # noqa: F401
|
|
1045
1075
|
InferenceEndpointType, # noqa: F401
|
|
1046
1076
|
)
|
|
1077
|
+
from ._jobs_api import (
|
|
1078
|
+
JobInfo, # noqa: F401
|
|
1079
|
+
JobOwner, # noqa: F401
|
|
1080
|
+
JobStage, # noqa: F401
|
|
1081
|
+
JobStatus, # noqa: F401
|
|
1082
|
+
)
|
|
1047
1083
|
from ._login import (
|
|
1048
1084
|
auth_list, # noqa: F401
|
|
1049
1085
|
auth_switch, # noqa: F401
|
|
@@ -1143,6 +1179,7 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1143
1179
|
add_space_variable, # noqa: F401
|
|
1144
1180
|
auth_check, # noqa: F401
|
|
1145
1181
|
cancel_access_request, # noqa: F401
|
|
1182
|
+
cancel_job, # noqa: F401
|
|
1146
1183
|
change_discussion_status, # noqa: F401
|
|
1147
1184
|
comment_discussion, # noqa: F401
|
|
1148
1185
|
create_branch, # noqa: F401
|
|
@@ -1172,6 +1209,7 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1172
1209
|
duplicate_space, # noqa: F401
|
|
1173
1210
|
edit_discussion_comment, # noqa: F401
|
|
1174
1211
|
enable_webhook, # noqa: F401
|
|
1212
|
+
fetch_job_logs, # noqa: F401
|
|
1175
1213
|
file_exists, # noqa: F401
|
|
1176
1214
|
get_collection, # noqa: F401
|
|
1177
1215
|
get_dataset_tags, # noqa: F401
|
|
@@ -1188,11 +1226,13 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1188
1226
|
get_user_overview, # noqa: F401
|
|
1189
1227
|
get_webhook, # noqa: F401
|
|
1190
1228
|
grant_access, # noqa: F401
|
|
1229
|
+
inspect_job, # noqa: F401
|
|
1191
1230
|
list_accepted_access_requests, # noqa: F401
|
|
1192
1231
|
list_collections, # noqa: F401
|
|
1193
1232
|
list_datasets, # noqa: F401
|
|
1194
1233
|
list_inference_catalog, # noqa: F401
|
|
1195
1234
|
list_inference_endpoints, # noqa: F401
|
|
1235
|
+
list_jobs, # noqa: F401
|
|
1196
1236
|
list_lfs_files, # noqa: F401
|
|
1197
1237
|
list_liked_repos, # noqa: F401
|
|
1198
1238
|
list_models, # noqa: F401
|
|
@@ -1229,6 +1269,8 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1229
1269
|
resume_inference_endpoint, # noqa: F401
|
|
1230
1270
|
revision_exists, # noqa: F401
|
|
1231
1271
|
run_as_future, # noqa: F401
|
|
1272
|
+
run_job, # noqa: F401
|
|
1273
|
+
run_uv_job, # noqa: F401
|
|
1232
1274
|
scale_to_zero_inference_endpoint, # noqa: F401
|
|
1233
1275
|
set_space_sleep_time, # noqa: F401
|
|
1234
1276
|
space_info, # noqa: F401
|
|
@@ -1336,6 +1378,10 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
1336
1378
|
ImageToTextInput, # noqa: F401
|
|
1337
1379
|
ImageToTextOutput, # noqa: F401
|
|
1338
1380
|
ImageToTextParameters, # noqa: F401
|
|
1381
|
+
ImageToVideoInput, # noqa: F401
|
|
1382
|
+
ImageToVideoOutput, # noqa: F401
|
|
1383
|
+
ImageToVideoParameters, # noqa: F401
|
|
1384
|
+
ImageToVideoTargetSize, # noqa: F401
|
|
1339
1385
|
ObjectDetectionBoundingBox, # noqa: F401
|
|
1340
1386
|
ObjectDetectionInput, # noqa: F401
|
|
1341
1387
|
ObjectDetectionOutputElement, # noqa: F401
|
huggingface_hub/_commit_api.py
CHANGED
|
@@ -4,7 +4,6 @@ Type definitions and utilities for the `create_commit` API
|
|
|
4
4
|
|
|
5
5
|
import base64
|
|
6
6
|
import io
|
|
7
|
-
import math
|
|
8
7
|
import os
|
|
9
8
|
import warnings
|
|
10
9
|
from collections import defaultdict
|
|
@@ -23,6 +22,7 @@ from .lfs import UploadInfo, lfs_upload, post_lfs_batch_info
|
|
|
23
22
|
from .utils import (
|
|
24
23
|
FORBIDDEN_FOLDERS,
|
|
25
24
|
XetTokenType,
|
|
25
|
+
are_progress_bars_disabled,
|
|
26
26
|
chunk_iterable,
|
|
27
27
|
fetch_xet_connection_info_from_repo_info,
|
|
28
28
|
get_session,
|
|
@@ -33,7 +33,6 @@ from .utils import (
|
|
|
33
33
|
validate_hf_hub_args,
|
|
34
34
|
)
|
|
35
35
|
from .utils import tqdm as hf_tqdm
|
|
36
|
-
from .utils.tqdm import _get_progress_bar_context
|
|
37
36
|
|
|
38
37
|
|
|
39
38
|
if TYPE_CHECKING:
|
|
@@ -529,9 +528,12 @@ def _upload_xet_files(
|
|
|
529
528
|
"""
|
|
530
529
|
if len(additions) == 0:
|
|
531
530
|
return
|
|
531
|
+
|
|
532
532
|
# at this point, we know that hf_xet is installed
|
|
533
533
|
from hf_xet import upload_bytes, upload_files
|
|
534
534
|
|
|
535
|
+
from .utils._xet_progress_reporting import XetProgressReporter
|
|
536
|
+
|
|
535
537
|
try:
|
|
536
538
|
xet_connection_info = fetch_xet_connection_info_from_repo_info(
|
|
537
539
|
token_type=XetTokenType.WRITE,
|
|
@@ -567,32 +569,18 @@ def _upload_xet_files(
|
|
|
567
569
|
raise XetRefreshTokenError("Failed to refresh xet token")
|
|
568
570
|
return new_xet_connection.access_token, new_xet_connection.expiration_unix_epoch
|
|
569
571
|
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
bytes_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, bytes)]
|
|
576
|
-
paths_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, (str, Path))]
|
|
577
|
-
expected_size = sum(op.upload_info.size for op in bytes_ops + paths_ops)
|
|
572
|
+
if not are_progress_bars_disabled():
|
|
573
|
+
progress = XetProgressReporter()
|
|
574
|
+
progress_callback = progress.update_progress
|
|
575
|
+
else:
|
|
576
|
+
progress, progress_callback = None, None
|
|
578
577
|
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
description = "Uploading..."
|
|
583
|
-
progress_cm = _get_progress_bar_context(
|
|
584
|
-
desc=description,
|
|
585
|
-
total=expected_size,
|
|
586
|
-
initial=0,
|
|
587
|
-
unit="B",
|
|
588
|
-
unit_scale=True,
|
|
589
|
-
name="huggingface_hub.xet_put",
|
|
590
|
-
log_level=logger.getEffectiveLevel(),
|
|
591
|
-
)
|
|
592
|
-
with progress_cm as progress:
|
|
578
|
+
try:
|
|
579
|
+
for i, chunk in enumerate(chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES)):
|
|
580
|
+
_chunk = [op for op in chunk]
|
|
593
581
|
|
|
594
|
-
|
|
595
|
-
|
|
582
|
+
bytes_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, bytes)]
|
|
583
|
+
paths_ops = [op for op in _chunk if isinstance(op.path_or_fileobj, (str, Path))]
|
|
596
584
|
|
|
597
585
|
if len(paths_ops) > 0:
|
|
598
586
|
upload_files(
|
|
@@ -600,7 +588,7 @@ def _upload_xet_files(
|
|
|
600
588
|
xet_endpoint,
|
|
601
589
|
access_token_info,
|
|
602
590
|
token_refresher,
|
|
603
|
-
|
|
591
|
+
progress_callback,
|
|
604
592
|
repo_type,
|
|
605
593
|
)
|
|
606
594
|
if len(bytes_ops) > 0:
|
|
@@ -609,9 +597,14 @@ def _upload_xet_files(
|
|
|
609
597
|
xet_endpoint,
|
|
610
598
|
access_token_info,
|
|
611
599
|
token_refresher,
|
|
612
|
-
|
|
600
|
+
progress_callback,
|
|
613
601
|
repo_type,
|
|
614
602
|
)
|
|
603
|
+
|
|
604
|
+
finally:
|
|
605
|
+
if progress is not None:
|
|
606
|
+
progress.close(False)
|
|
607
|
+
|
|
615
608
|
return
|
|
616
609
|
|
|
617
610
|
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# coding=utf-8
|
|
2
|
+
# Copyright 2025-present, the HuggingFace Inc. team.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from datetime import datetime
|
|
17
|
+
from enum import Enum
|
|
18
|
+
from typing import Any, Dict, List, Optional
|
|
19
|
+
|
|
20
|
+
from huggingface_hub import constants
|
|
21
|
+
from huggingface_hub._space_api import SpaceHardware
|
|
22
|
+
from huggingface_hub.utils._datetime import parse_datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class JobStage(str, Enum):
|
|
26
|
+
"""
|
|
27
|
+
Enumeration of possible stage of a Job on the Hub.
|
|
28
|
+
|
|
29
|
+
Value can be compared to a string:
|
|
30
|
+
```py
|
|
31
|
+
assert JobStage.COMPLETED == "COMPLETED"
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
Taken from https://github.com/huggingface/moon-landing/blob/main/server/job_types/JobInfo.ts#L61 (private url).
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
# Copied from moon-landing > server > lib > Job.ts
|
|
38
|
+
COMPLETED = "COMPLETED"
|
|
39
|
+
CANCELED = "CANCELED"
|
|
40
|
+
ERROR = "ERROR"
|
|
41
|
+
DELETED = "DELETED"
|
|
42
|
+
RUNNING = "RUNNING"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class JobStatus:
|
|
47
|
+
stage: JobStage
|
|
48
|
+
message: Optional[str]
|
|
49
|
+
|
|
50
|
+
def __init__(self, **kwargs) -> None:
|
|
51
|
+
self.stage = kwargs["stage"]
|
|
52
|
+
self.message = kwargs.get("message")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class JobOwner:
|
|
57
|
+
id: str
|
|
58
|
+
name: str
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class JobInfo:
|
|
63
|
+
"""
|
|
64
|
+
Contains information about a Job.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
id (`str`):
|
|
68
|
+
Job ID.
|
|
69
|
+
created_at (`datetime` or `None`):
|
|
70
|
+
When the Job was created.
|
|
71
|
+
docker_image (`str` or `None`):
|
|
72
|
+
The Docker image from Docker Hub used for the Job.
|
|
73
|
+
Can be None if space_id is present instead.
|
|
74
|
+
space_id (`str` or `None`):
|
|
75
|
+
The Docker image from Hugging Face Spaces used for the Job.
|
|
76
|
+
Can be None if docker_image is present instead.
|
|
77
|
+
command (`List[str]` or `None`):
|
|
78
|
+
Command of the Job, e.g. `["python", "-c", "print('hello world')"]`
|
|
79
|
+
arguments (`List[str]` or `None`):
|
|
80
|
+
Arguments passed to the command
|
|
81
|
+
environment (`Dict[str]` or `None`):
|
|
82
|
+
Environment variables of the Job as a dictionary.
|
|
83
|
+
secrets (`Dict[str]` or `None`):
|
|
84
|
+
Secret environment variables of the Job (encrypted).
|
|
85
|
+
flavor (`str` or `None`):
|
|
86
|
+
Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
|
|
87
|
+
E.g. `"cpu-basic"`.
|
|
88
|
+
status: (`JobStatus` or `None`):
|
|
89
|
+
Status of the Job, e.g. `JobStatus(stage="RUNNING", message=None)`
|
|
90
|
+
See [`JobStage`] for possible stage values.
|
|
91
|
+
status: (`JobOwner` or `None`):
|
|
92
|
+
Owner of the Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq")`
|
|
93
|
+
|
|
94
|
+
Example:
|
|
95
|
+
|
|
96
|
+
```python
|
|
97
|
+
>>> from huggingface_hub import run_job
|
|
98
|
+
>>> job = run_job(
|
|
99
|
+
... image="python:3.12",
|
|
100
|
+
... command=["python", "-c", "print('Hello from the cloud!')"]
|
|
101
|
+
... )
|
|
102
|
+
>>> job
|
|
103
|
+
JobInfo(id='687fb701029421ae5549d998', created_at=datetime.datetime(2025, 7, 22, 16, 6, 25, 79000, tzinfo=datetime.timezone.utc), docker_image='python:3.12', space_id=None, command=['python', '-c', "print('Hello from the cloud!')"], arguments=[], environment={}, secrets={}, flavor='cpu-basic', status=JobStatus(stage='RUNNING', message=None), owner=JobOwner(id='5e9ecfc04957053f60648a3e', name='lhoestq'), endpoint='https://huggingface.co', url='https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998')
|
|
104
|
+
>>> job.id
|
|
105
|
+
'687fb701029421ae5549d998'
|
|
106
|
+
>>> job.url
|
|
107
|
+
'https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998'
|
|
108
|
+
>>> job.status.stage
|
|
109
|
+
'RUNNING'
|
|
110
|
+
```
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
id: str
|
|
114
|
+
created_at: Optional[datetime]
|
|
115
|
+
docker_image: Optional[str]
|
|
116
|
+
space_id: Optional[str]
|
|
117
|
+
command: Optional[List[str]]
|
|
118
|
+
arguments: Optional[List[str]]
|
|
119
|
+
environment: Optional[Dict[str, Any]]
|
|
120
|
+
secrets: Optional[Dict[str, Any]]
|
|
121
|
+
flavor: Optional[SpaceHardware]
|
|
122
|
+
status: Optional[JobStatus]
|
|
123
|
+
owner: Optional[JobOwner]
|
|
124
|
+
|
|
125
|
+
# Inferred fields
|
|
126
|
+
endpoint: str
|
|
127
|
+
url: str
|
|
128
|
+
|
|
129
|
+
def __init__(self, **kwargs) -> None:
|
|
130
|
+
self.id = kwargs["id"]
|
|
131
|
+
created_at = kwargs.get("createdAt") or kwargs.get("created_at")
|
|
132
|
+
self.created_at = parse_datetime(created_at) if created_at else None
|
|
133
|
+
self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image")
|
|
134
|
+
self.space_id = kwargs.get("spaceId") or kwargs.get("space_id")
|
|
135
|
+
self.owner = JobOwner(**(kwargs["owner"] if isinstance(kwargs.get("owner"), dict) else {}))
|
|
136
|
+
self.command = kwargs.get("command")
|
|
137
|
+
self.arguments = kwargs.get("arguments")
|
|
138
|
+
self.environment = kwargs.get("environment")
|
|
139
|
+
self.secrets = kwargs.get("secrets")
|
|
140
|
+
self.flavor = kwargs.get("flavor")
|
|
141
|
+
self.status = JobStatus(**(kwargs["status"] if isinstance(kwargs.get("status"), dict) else {}))
|
|
142
|
+
|
|
143
|
+
# Inferred fields
|
|
144
|
+
self.endpoint = kwargs.get("endpoint", constants.ENDPOINT)
|
|
145
|
+
self.url = f"{self.endpoint}/jobs/{self.owner.name}/{self.id}"
|
huggingface_hub/_local_folder.py
CHANGED
|
@@ -86,7 +86,13 @@ class LocalDownloadFilePaths:
|
|
|
86
86
|
|
|
87
87
|
def incomplete_path(self, etag: str) -> Path:
|
|
88
88
|
"""Return the path where a file will be temporarily downloaded before being moved to `file_path`."""
|
|
89
|
-
|
|
89
|
+
path = self.metadata_path.parent / f"{_short_hash(self.metadata_path.name)}.{etag}.incomplete"
|
|
90
|
+
resolved_path = str(path.resolve())
|
|
91
|
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
|
92
|
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix.
|
|
93
|
+
if len(resolved_path) > 255 and not resolved_path.startswith("\\\\?\\"):
|
|
94
|
+
path = Path("\\\\?\\" + resolved_path)
|
|
95
|
+
return path
|
|
90
96
|
|
|
91
97
|
|
|
92
98
|
@dataclass(frozen=True)
|
huggingface_hub/_login.py
CHANGED
|
@@ -75,7 +75,7 @@ def login(
|
|
|
75
75
|
components. If `token` is not provided, it will be prompted to the user either with
|
|
76
76
|
a widget (in a notebook) or via the terminal.
|
|
77
77
|
|
|
78
|
-
To log in from outside of a script, one can also use `
|
|
78
|
+
To log in from outside of a script, one can also use `hf auth login` which is
|
|
79
79
|
a cli command that wraps [`login`].
|
|
80
80
|
|
|
81
81
|
<Tip>
|
|
@@ -120,7 +120,7 @@ def login(
|
|
|
120
120
|
logger.info(
|
|
121
121
|
"The token has not been saved to the git credentials helper. Pass "
|
|
122
122
|
"`add_to_git_credential=True` in this function directly or "
|
|
123
|
-
"`--add-to-git-credential` if using via `
|
|
123
|
+
"`--add-to-git-credential` if using via `hf`CLI if "
|
|
124
124
|
"you want to set the git credential as well."
|
|
125
125
|
)
|
|
126
126
|
_login(token, add_to_git_credential=add_to_git_credential)
|
|
@@ -233,7 +233,7 @@ def auth_list() -> None:
|
|
|
233
233
|
)
|
|
234
234
|
elif current_token_name is None:
|
|
235
235
|
logger.warning(
|
|
236
|
-
"\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `
|
|
236
|
+
"\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `hf auth login` to log in."
|
|
237
237
|
)
|
|
238
238
|
|
|
239
239
|
|
|
@@ -273,8 +273,8 @@ def interpreter_login(*, new_session: bool = True, write_permission: bool = Fals
|
|
|
273
273
|
print(_HF_LOGO_ASCII)
|
|
274
274
|
if get_token() is not None:
|
|
275
275
|
logger.info(
|
|
276
|
-
" A token is already saved on your machine. Run `
|
|
277
|
-
"
|
|
276
|
+
" A token is already saved on your machine. Run `hf auth whoami`"
|
|
277
|
+
" to get more information or `hf auth logout` if you want"
|
|
278
278
|
" to log out."
|
|
279
279
|
)
|
|
280
280
|
logger.info(" Setting a new token will erase the existing one.")
|
huggingface_hub/_oauth.py
CHANGED
|
@@ -415,7 +415,7 @@ def _get_mocked_oauth_info() -> Dict:
|
|
|
415
415
|
if token is None:
|
|
416
416
|
raise ValueError(
|
|
417
417
|
"Your machine must be logged in to HF to debug an OAuth app locally. Please"
|
|
418
|
-
" run `
|
|
418
|
+
" run `hf auth login` or set `HF_TOKEN` as environment variable "
|
|
419
419
|
"with one of your access token. You can generate a new token in your "
|
|
420
420
|
"settings page (https://huggingface.co/settings/tokens)."
|
|
421
421
|
)
|
|
@@ -254,14 +254,19 @@ def snapshot_download(
|
|
|
254
254
|
# At this stage, internet connection is up and running
|
|
255
255
|
# => let's download the files!
|
|
256
256
|
assert repo_info.sha is not None, "Repo info returned from server must have a revision sha."
|
|
257
|
-
assert repo_info.siblings is not None, "Repo info returned from server must have a siblings list."
|
|
258
257
|
|
|
259
258
|
# Corner case: on very large repos, the siblings list in `repo_info` might not contain all files.
|
|
260
259
|
# In that case, we need to use the `list_repo_tree` method to prevent caching issues.
|
|
261
|
-
repo_files: Iterable[str] = [f.rfilename for f in repo_info.siblings]
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
260
|
+
repo_files: Iterable[str] = [f.rfilename for f in repo_info.siblings] if repo_info.siblings is not None else []
|
|
261
|
+
unreliable_nb_files = (
|
|
262
|
+
repo_info.siblings is None
|
|
263
|
+
or len(repo_info.siblings) == 0
|
|
264
|
+
or len(repo_info.siblings) > VERY_LARGE_REPO_THRESHOLD
|
|
265
|
+
)
|
|
266
|
+
if unreliable_nb_files:
|
|
267
|
+
logger.info(
|
|
268
|
+
"Number of files in the repo is unreliable. Using `list_repo_tree` to ensure all files are listed."
|
|
269
|
+
)
|
|
265
270
|
repo_files = (
|
|
266
271
|
f.rfilename
|
|
267
272
|
for f in api.list_repo_tree(repo_id=repo_id, recursive=True, revision=revision, repo_type=repo_type)
|
|
@@ -274,7 +279,7 @@ def snapshot_download(
|
|
|
274
279
|
ignore_patterns=ignore_patterns,
|
|
275
280
|
)
|
|
276
281
|
|
|
277
|
-
if not
|
|
282
|
+
if not unreliable_nb_files:
|
|
278
283
|
filtered_repo_files = list(filtered_repo_files)
|
|
279
284
|
tqdm_desc = f"Fetching {len(filtered_repo_files)} files"
|
|
280
285
|
else:
|