huggingface-hub 0.31.0rc0__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- huggingface_hub/__init__.py +145 -46
- huggingface_hub/_commit_api.py +168 -119
- huggingface_hub/_commit_scheduler.py +15 -15
- huggingface_hub/_inference_endpoints.py +15 -12
- huggingface_hub/_jobs_api.py +301 -0
- huggingface_hub/_local_folder.py +18 -3
- huggingface_hub/_login.py +31 -63
- huggingface_hub/_oauth.py +460 -0
- huggingface_hub/_snapshot_download.py +239 -80
- huggingface_hub/_space_api.py +5 -5
- huggingface_hub/_tensorboard_logger.py +15 -19
- huggingface_hub/_upload_large_folder.py +172 -76
- huggingface_hub/_webhooks_payload.py +3 -3
- huggingface_hub/_webhooks_server.py +13 -25
- huggingface_hub/{commands → cli}/__init__.py +1 -15
- huggingface_hub/cli/_cli_utils.py +173 -0
- huggingface_hub/cli/auth.py +147 -0
- huggingface_hub/cli/cache.py +841 -0
- huggingface_hub/cli/download.py +189 -0
- huggingface_hub/cli/hf.py +60 -0
- huggingface_hub/cli/inference_endpoints.py +377 -0
- huggingface_hub/cli/jobs.py +772 -0
- huggingface_hub/cli/lfs.py +175 -0
- huggingface_hub/cli/repo.py +315 -0
- huggingface_hub/cli/repo_files.py +94 -0
- huggingface_hub/{commands/env.py → cli/system.py} +10 -13
- huggingface_hub/cli/upload.py +294 -0
- huggingface_hub/cli/upload_large_folder.py +117 -0
- huggingface_hub/community.py +20 -12
- huggingface_hub/constants.py +38 -53
- huggingface_hub/dataclasses.py +609 -0
- huggingface_hub/errors.py +80 -30
- huggingface_hub/fastai_utils.py +30 -41
- huggingface_hub/file_download.py +435 -351
- huggingface_hub/hf_api.py +2050 -1124
- huggingface_hub/hf_file_system.py +269 -152
- huggingface_hub/hub_mixin.py +43 -63
- huggingface_hub/inference/_client.py +347 -434
- huggingface_hub/inference/_common.py +133 -121
- huggingface_hub/inference/_generated/_async_client.py +397 -541
- huggingface_hub/inference/_generated/types/__init__.py +5 -1
- huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
- huggingface_hub/inference/_generated/types/base.py +10 -7
- huggingface_hub/inference/_generated/types/chat_completion.py +59 -23
- huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
- huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
- huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
- huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
- huggingface_hub/inference/_generated/types/image_to_image.py +6 -2
- huggingface_hub/inference/_generated/types/image_to_video.py +60 -0
- huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
- huggingface_hub/inference/_generated/types/summarization.py +2 -2
- huggingface_hub/inference/_generated/types/table_question_answering.py +5 -5
- huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
- huggingface_hub/inference/_generated/types/text_generation.py +10 -10
- huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
- huggingface_hub/inference/_generated/types/token_classification.py +2 -2
- huggingface_hub/inference/_generated/types/translation.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
- huggingface_hub/inference/_mcp/__init__.py +0 -0
- huggingface_hub/inference/_mcp/_cli_hacks.py +88 -0
- huggingface_hub/inference/_mcp/agent.py +100 -0
- huggingface_hub/inference/_mcp/cli.py +247 -0
- huggingface_hub/inference/_mcp/constants.py +81 -0
- huggingface_hub/inference/_mcp/mcp_client.py +395 -0
- huggingface_hub/inference/_mcp/types.py +45 -0
- huggingface_hub/inference/_mcp/utils.py +128 -0
- huggingface_hub/inference/_providers/__init__.py +82 -7
- huggingface_hub/inference/_providers/_common.py +129 -27
- huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
- huggingface_hub/inference/_providers/cerebras.py +1 -1
- huggingface_hub/inference/_providers/clarifai.py +13 -0
- huggingface_hub/inference/_providers/cohere.py +20 -3
- huggingface_hub/inference/_providers/fal_ai.py +183 -56
- huggingface_hub/inference/_providers/featherless_ai.py +38 -0
- huggingface_hub/inference/_providers/fireworks_ai.py +18 -0
- huggingface_hub/inference/_providers/groq.py +9 -0
- huggingface_hub/inference/_providers/hf_inference.py +69 -30
- huggingface_hub/inference/_providers/hyperbolic.py +4 -4
- huggingface_hub/inference/_providers/nebius.py +33 -5
- huggingface_hub/inference/_providers/novita.py +5 -5
- huggingface_hub/inference/_providers/nscale.py +44 -0
- huggingface_hub/inference/_providers/openai.py +3 -1
- huggingface_hub/inference/_providers/publicai.py +6 -0
- huggingface_hub/inference/_providers/replicate.py +31 -13
- huggingface_hub/inference/_providers/sambanova.py +18 -4
- huggingface_hub/inference/_providers/scaleway.py +28 -0
- huggingface_hub/inference/_providers/together.py +20 -5
- huggingface_hub/inference/_providers/wavespeed.py +138 -0
- huggingface_hub/inference/_providers/zai_org.py +17 -0
- huggingface_hub/lfs.py +33 -100
- huggingface_hub/repocard.py +34 -38
- huggingface_hub/repocard_data.py +57 -57
- huggingface_hub/serialization/__init__.py +0 -1
- huggingface_hub/serialization/_base.py +12 -15
- huggingface_hub/serialization/_dduf.py +8 -8
- huggingface_hub/serialization/_torch.py +69 -69
- huggingface_hub/utils/__init__.py +19 -8
- huggingface_hub/utils/_auth.py +7 -7
- huggingface_hub/utils/_cache_manager.py +92 -147
- huggingface_hub/utils/_chunk_utils.py +2 -3
- huggingface_hub/utils/_deprecation.py +1 -1
- huggingface_hub/utils/_dotenv.py +55 -0
- huggingface_hub/utils/_experimental.py +7 -5
- huggingface_hub/utils/_fixes.py +0 -10
- huggingface_hub/utils/_git_credential.py +5 -5
- huggingface_hub/utils/_headers.py +8 -30
- huggingface_hub/utils/_http.py +398 -239
- huggingface_hub/utils/_pagination.py +4 -4
- huggingface_hub/utils/_parsing.py +98 -0
- huggingface_hub/utils/_paths.py +5 -5
- huggingface_hub/utils/_runtime.py +61 -24
- huggingface_hub/utils/_safetensors.py +21 -21
- huggingface_hub/utils/_subprocess.py +9 -9
- huggingface_hub/utils/_telemetry.py +4 -4
- huggingface_hub/{commands/_cli_utils.py → utils/_terminal.py} +4 -4
- huggingface_hub/utils/_typing.py +25 -5
- huggingface_hub/utils/_validators.py +55 -74
- huggingface_hub/utils/_verification.py +167 -0
- huggingface_hub/utils/_xet.py +64 -17
- huggingface_hub/utils/_xet_progress_reporting.py +162 -0
- huggingface_hub/utils/insecure_hashlib.py +3 -5
- huggingface_hub/utils/logging.py +8 -11
- huggingface_hub/utils/tqdm.py +5 -4
- {huggingface_hub-0.31.0rc0.dist-info → huggingface_hub-1.1.3.dist-info}/METADATA +94 -85
- huggingface_hub-1.1.3.dist-info/RECORD +155 -0
- {huggingface_hub-0.31.0rc0.dist-info → huggingface_hub-1.1.3.dist-info}/WHEEL +1 -1
- huggingface_hub-1.1.3.dist-info/entry_points.txt +6 -0
- huggingface_hub/commands/delete_cache.py +0 -474
- huggingface_hub/commands/download.py +0 -200
- huggingface_hub/commands/huggingface_cli.py +0 -61
- huggingface_hub/commands/lfs.py +0 -200
- huggingface_hub/commands/repo_files.py +0 -128
- huggingface_hub/commands/scan_cache.py +0 -181
- huggingface_hub/commands/tag.py +0 -159
- huggingface_hub/commands/upload.py +0 -314
- huggingface_hub/commands/upload_large_folder.py +0 -129
- huggingface_hub/commands/user.py +0 -304
- huggingface_hub/commands/version.py +0 -37
- huggingface_hub/inference_api.py +0 -217
- huggingface_hub/keras_mixin.py +0 -500
- huggingface_hub/repository.py +0 -1477
- huggingface_hub/serialization/_tensorflow.py +0 -95
- huggingface_hub/utils/_hf_folder.py +0 -68
- huggingface_hub-0.31.0rc0.dist-info/RECORD +0 -135
- huggingface_hub-0.31.0rc0.dist-info/entry_points.txt +0 -6
- {huggingface_hub-0.31.0rc0.dist-info → huggingface_hub-1.1.3.dist-info/licenses}/LICENSE +0 -0
- {huggingface_hub-0.31.0rc0.dist-info → huggingface_hub-1.1.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
# coding=utf-8
|
|
2
|
+
# Copyright 2025-present, the HuggingFace Inc. team.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from datetime import datetime
|
|
17
|
+
from enum import Enum
|
|
18
|
+
from typing import Any, Optional, Union
|
|
19
|
+
|
|
20
|
+
from huggingface_hub import constants
|
|
21
|
+
from huggingface_hub._space_api import SpaceHardware
|
|
22
|
+
from huggingface_hub.utils._datetime import parse_datetime
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class JobStage(str, Enum):
|
|
26
|
+
"""
|
|
27
|
+
Enumeration of possible stage of a Job on the Hub.
|
|
28
|
+
|
|
29
|
+
Value can be compared to a string:
|
|
30
|
+
```py
|
|
31
|
+
assert JobStage.COMPLETED == "COMPLETED"
|
|
32
|
+
```
|
|
33
|
+
Possible values are: `COMPLETED`, `CANCELED`, `ERROR`, `DELETED`, `RUNNING`.
|
|
34
|
+
Taken from https://github.com/huggingface/moon-landing/blob/main/server/job_types/JobInfo.ts#L61 (private url).
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
# Copied from moon-landing > server > lib > Job.ts
|
|
38
|
+
COMPLETED = "COMPLETED"
|
|
39
|
+
CANCELED = "CANCELED"
|
|
40
|
+
ERROR = "ERROR"
|
|
41
|
+
DELETED = "DELETED"
|
|
42
|
+
RUNNING = "RUNNING"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class JobStatus:
|
|
47
|
+
stage: JobStage
|
|
48
|
+
message: Optional[str]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class JobOwner:
|
|
53
|
+
id: str
|
|
54
|
+
name: str
|
|
55
|
+
type: str
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class JobInfo:
|
|
60
|
+
"""
|
|
61
|
+
Contains information about a Job.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
id (`str`):
|
|
65
|
+
Job ID.
|
|
66
|
+
created_at (`datetime` or `None`):
|
|
67
|
+
When the Job was created.
|
|
68
|
+
docker_image (`str` or `None`):
|
|
69
|
+
The Docker image from Docker Hub used for the Job.
|
|
70
|
+
Can be None if space_id is present instead.
|
|
71
|
+
space_id (`str` or `None`):
|
|
72
|
+
The Docker image from Hugging Face Spaces used for the Job.
|
|
73
|
+
Can be None if docker_image is present instead.
|
|
74
|
+
command (`list[str]` or `None`):
|
|
75
|
+
Command of the Job, e.g. `["python", "-c", "print('hello world')"]`
|
|
76
|
+
arguments (`list[str]` or `None`):
|
|
77
|
+
Arguments passed to the command
|
|
78
|
+
environment (`dict[str]` or `None`):
|
|
79
|
+
Environment variables of the Job as a dictionary.
|
|
80
|
+
secrets (`dict[str]` or `None`):
|
|
81
|
+
Secret environment variables of the Job (encrypted).
|
|
82
|
+
flavor (`str` or `None`):
|
|
83
|
+
Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
|
|
84
|
+
E.g. `"cpu-basic"`.
|
|
85
|
+
status: (`JobStatus` or `None`):
|
|
86
|
+
Status of the Job, e.g. `JobStatus(stage="RUNNING", message=None)`
|
|
87
|
+
See [`JobStage`] for possible stage values.
|
|
88
|
+
owner: (`JobOwner` or `None`):
|
|
89
|
+
Owner of the Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
|
|
90
|
+
|
|
91
|
+
Example:
|
|
92
|
+
|
|
93
|
+
```python
|
|
94
|
+
>>> from huggingface_hub import run_job
|
|
95
|
+
>>> job = run_job(
|
|
96
|
+
... image="python:3.12",
|
|
97
|
+
... command=["python", "-c", "print('Hello from the cloud!')"]
|
|
98
|
+
... )
|
|
99
|
+
>>> job
|
|
100
|
+
JobInfo(id='687fb701029421ae5549d998', created_at=datetime.datetime(2025, 7, 22, 16, 6, 25, 79000, tzinfo=datetime.timezone.utc), docker_image='python:3.12', space_id=None, command=['python', '-c', "print('Hello from the cloud!')"], arguments=[], environment={}, secrets={}, flavor='cpu-basic', status=JobStatus(stage='RUNNING', message=None), owner=JobOwner(id='5e9ecfc04957053f60648a3e', name='lhoestq', type='user'), endpoint='https://huggingface.co', url='https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998')
|
|
101
|
+
>>> job.id
|
|
102
|
+
'687fb701029421ae5549d998'
|
|
103
|
+
>>> job.url
|
|
104
|
+
'https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998'
|
|
105
|
+
>>> job.status.stage
|
|
106
|
+
'RUNNING'
|
|
107
|
+
```
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
id: str
|
|
111
|
+
created_at: Optional[datetime]
|
|
112
|
+
docker_image: Optional[str]
|
|
113
|
+
space_id: Optional[str]
|
|
114
|
+
command: Optional[list[str]]
|
|
115
|
+
arguments: Optional[list[str]]
|
|
116
|
+
environment: Optional[dict[str, Any]]
|
|
117
|
+
secrets: Optional[dict[str, Any]]
|
|
118
|
+
flavor: Optional[SpaceHardware]
|
|
119
|
+
status: JobStatus
|
|
120
|
+
owner: JobOwner
|
|
121
|
+
|
|
122
|
+
# Inferred fields
|
|
123
|
+
endpoint: str
|
|
124
|
+
url: str
|
|
125
|
+
|
|
126
|
+
def __init__(self, **kwargs) -> None:
|
|
127
|
+
self.id = kwargs["id"]
|
|
128
|
+
created_at = kwargs.get("createdAt") or kwargs.get("created_at")
|
|
129
|
+
self.created_at = parse_datetime(created_at) if created_at else None
|
|
130
|
+
self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image")
|
|
131
|
+
self.space_id = kwargs.get("spaceId") or kwargs.get("space_id")
|
|
132
|
+
owner = kwargs.get("owner", {})
|
|
133
|
+
self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"])
|
|
134
|
+
self.command = kwargs.get("command")
|
|
135
|
+
self.arguments = kwargs.get("arguments")
|
|
136
|
+
self.environment = kwargs.get("environment")
|
|
137
|
+
self.secrets = kwargs.get("secrets")
|
|
138
|
+
self.flavor = kwargs.get("flavor")
|
|
139
|
+
status = kwargs.get("status", {})
|
|
140
|
+
self.status = JobStatus(stage=status["stage"], message=status.get("message"))
|
|
141
|
+
|
|
142
|
+
# Inferred fields
|
|
143
|
+
self.endpoint = kwargs.get("endpoint", constants.ENDPOINT)
|
|
144
|
+
self.url = f"{self.endpoint}/jobs/{self.owner.name}/{self.id}"
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@dataclass
|
|
148
|
+
class JobSpec:
|
|
149
|
+
docker_image: Optional[str]
|
|
150
|
+
space_id: Optional[str]
|
|
151
|
+
command: Optional[list[str]]
|
|
152
|
+
arguments: Optional[list[str]]
|
|
153
|
+
environment: Optional[dict[str, Any]]
|
|
154
|
+
secrets: Optional[dict[str, Any]]
|
|
155
|
+
flavor: Optional[SpaceHardware]
|
|
156
|
+
timeout: Optional[int]
|
|
157
|
+
tags: Optional[list[str]]
|
|
158
|
+
arch: Optional[str]
|
|
159
|
+
|
|
160
|
+
def __init__(self, **kwargs) -> None:
|
|
161
|
+
self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image")
|
|
162
|
+
self.space_id = kwargs.get("spaceId") or kwargs.get("space_id")
|
|
163
|
+
self.command = kwargs.get("command")
|
|
164
|
+
self.arguments = kwargs.get("arguments")
|
|
165
|
+
self.environment = kwargs.get("environment")
|
|
166
|
+
self.secrets = kwargs.get("secrets")
|
|
167
|
+
self.flavor = kwargs.get("flavor")
|
|
168
|
+
self.timeout = kwargs.get("timeout")
|
|
169
|
+
self.tags = kwargs.get("tags")
|
|
170
|
+
self.arch = kwargs.get("arch")
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
@dataclass
|
|
174
|
+
class LastJobInfo:
|
|
175
|
+
id: str
|
|
176
|
+
at: datetime
|
|
177
|
+
|
|
178
|
+
def __init__(self, **kwargs) -> None:
|
|
179
|
+
self.id = kwargs["id"]
|
|
180
|
+
self.at = parse_datetime(kwargs["at"])
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
@dataclass
|
|
184
|
+
class ScheduledJobStatus:
|
|
185
|
+
last_job: Optional[LastJobInfo]
|
|
186
|
+
next_job_run_at: Optional[datetime]
|
|
187
|
+
|
|
188
|
+
def __init__(self, **kwargs) -> None:
|
|
189
|
+
last_job = kwargs.get("lastJob") or kwargs.get("last_job")
|
|
190
|
+
self.last_job = LastJobInfo(**last_job) if last_job else None
|
|
191
|
+
next_job_run_at = kwargs.get("nextJobRunAt") or kwargs.get("next_job_run_at")
|
|
192
|
+
self.next_job_run_at = parse_datetime(str(next_job_run_at)) if next_job_run_at else None
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@dataclass
|
|
196
|
+
class ScheduledJobInfo:
|
|
197
|
+
"""
|
|
198
|
+
Contains information about a Job.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
id (`str`):
|
|
202
|
+
Scheduled Job ID.
|
|
203
|
+
created_at (`datetime` or `None`):
|
|
204
|
+
When the scheduled Job was created.
|
|
205
|
+
tags (`list[str]` or `None`):
|
|
206
|
+
The tags of the scheduled Job.
|
|
207
|
+
schedule (`str` or `None`):
|
|
208
|
+
One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
|
|
209
|
+
CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday).
|
|
210
|
+
suspend (`bool` or `None`):
|
|
211
|
+
Whether the scheduled job is suspended (paused).
|
|
212
|
+
concurrency (`bool` or `None`):
|
|
213
|
+
Whether multiple instances of this Job can run concurrently.
|
|
214
|
+
status (`ScheduledJobStatus` or `None`):
|
|
215
|
+
Status of the scheduled Job.
|
|
216
|
+
owner: (`JobOwner` or `None`):
|
|
217
|
+
Owner of the scheduled Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
|
|
218
|
+
job_spec: (`JobSpec` or `None`):
|
|
219
|
+
Specifications of the Job.
|
|
220
|
+
|
|
221
|
+
Example:
|
|
222
|
+
|
|
223
|
+
```python
|
|
224
|
+
>>> from huggingface_hub import run_job
|
|
225
|
+
>>> scheduled_job = create_scheduled_job(
|
|
226
|
+
... image="python:3.12",
|
|
227
|
+
... command=["python", "-c", "print('Hello from the cloud!')"],
|
|
228
|
+
... schedule="@hourly",
|
|
229
|
+
... )
|
|
230
|
+
>>> scheduled_job.id
|
|
231
|
+
'687fb701029421ae5549d999'
|
|
232
|
+
>>> scheduled_job.status.next_job_run_at
|
|
233
|
+
datetime.datetime(2025, 7, 22, 17, 6, 25, 79000, tzinfo=datetime.timezone.utc)
|
|
234
|
+
```
|
|
235
|
+
"""
|
|
236
|
+
|
|
237
|
+
id: str
|
|
238
|
+
created_at: Optional[datetime]
|
|
239
|
+
job_spec: JobSpec
|
|
240
|
+
schedule: Optional[str]
|
|
241
|
+
suspend: Optional[bool]
|
|
242
|
+
concurrency: Optional[bool]
|
|
243
|
+
status: ScheduledJobStatus
|
|
244
|
+
owner: JobOwner
|
|
245
|
+
|
|
246
|
+
def __init__(self, **kwargs) -> None:
|
|
247
|
+
self.id = kwargs["id"]
|
|
248
|
+
created_at = kwargs.get("createdAt") or kwargs.get("created_at")
|
|
249
|
+
self.created_at = parse_datetime(created_at) if created_at else None
|
|
250
|
+
self.job_spec = JobSpec(**(kwargs.get("job_spec") or kwargs.get("jobSpec", {})))
|
|
251
|
+
self.schedule = kwargs.get("schedule")
|
|
252
|
+
self.suspend = kwargs.get("suspend")
|
|
253
|
+
self.concurrency = kwargs.get("concurrency")
|
|
254
|
+
status = kwargs.get("status", {})
|
|
255
|
+
self.status = ScheduledJobStatus(
|
|
256
|
+
last_job=status.get("last_job") or status.get("lastJob"),
|
|
257
|
+
next_job_run_at=status.get("next_job_run_at") or status.get("nextJobRunAt"),
|
|
258
|
+
)
|
|
259
|
+
owner = kwargs.get("owner", {})
|
|
260
|
+
self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"])
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def _create_job_spec(
|
|
264
|
+
*,
|
|
265
|
+
image: str,
|
|
266
|
+
command: list[str],
|
|
267
|
+
env: Optional[dict[str, Any]],
|
|
268
|
+
secrets: Optional[dict[str, Any]],
|
|
269
|
+
flavor: Optional[SpaceHardware],
|
|
270
|
+
timeout: Optional[Union[int, float, str]],
|
|
271
|
+
) -> dict[str, Any]:
|
|
272
|
+
# prepare job spec to send to HF Jobs API
|
|
273
|
+
job_spec: dict[str, Any] = {
|
|
274
|
+
"command": command,
|
|
275
|
+
"arguments": [],
|
|
276
|
+
"environment": env or {},
|
|
277
|
+
"flavor": flavor or SpaceHardware.CPU_BASIC,
|
|
278
|
+
}
|
|
279
|
+
# secrets are optional
|
|
280
|
+
if secrets:
|
|
281
|
+
job_spec["secrets"] = secrets
|
|
282
|
+
# timeout is optional
|
|
283
|
+
if timeout:
|
|
284
|
+
time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24}
|
|
285
|
+
if isinstance(timeout, str) and timeout[-1] in time_units_factors:
|
|
286
|
+
job_spec["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]])
|
|
287
|
+
else:
|
|
288
|
+
job_spec["timeoutSeconds"] = int(timeout)
|
|
289
|
+
# input is either from docker hub or from HF spaces
|
|
290
|
+
for prefix in (
|
|
291
|
+
"https://huggingface.co/spaces/",
|
|
292
|
+
"https://hf.co/spaces/",
|
|
293
|
+
"huggingface.co/spaces/",
|
|
294
|
+
"hf.co/spaces/",
|
|
295
|
+
):
|
|
296
|
+
if image.startswith(prefix):
|
|
297
|
+
job_spec["spaceId"] = image[len(prefix) :]
|
|
298
|
+
break
|
|
299
|
+
else:
|
|
300
|
+
job_spec["dockerImage"] = image
|
|
301
|
+
return job_spec
|
huggingface_hub/_local_folder.py
CHANGED
|
@@ -86,7 +86,13 @@ class LocalDownloadFilePaths:
|
|
|
86
86
|
|
|
87
87
|
def incomplete_path(self, etag: str) -> Path:
|
|
88
88
|
"""Return the path where a file will be temporarily downloaded before being moved to `file_path`."""
|
|
89
|
-
|
|
89
|
+
path = self.metadata_path.parent / f"{_short_hash(self.metadata_path.name)}.{etag}.incomplete"
|
|
90
|
+
resolved_path = str(path.resolve())
|
|
91
|
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
|
92
|
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix.
|
|
93
|
+
if os.name == "nt" and len(resolved_path) > 255 and not resolved_path.startswith("\\\\?\\"):
|
|
94
|
+
path = Path("\\\\?\\" + resolved_path)
|
|
95
|
+
return path
|
|
90
96
|
|
|
91
97
|
|
|
92
98
|
@dataclass(frozen=True)
|
|
@@ -149,6 +155,7 @@ class LocalUploadFileMetadata:
|
|
|
149
155
|
should_ignore: Optional[bool] = None
|
|
150
156
|
sha256: Optional[str] = None
|
|
151
157
|
upload_mode: Optional[str] = None
|
|
158
|
+
remote_oid: Optional[str] = None
|
|
152
159
|
is_uploaded: bool = False
|
|
153
160
|
is_committed: bool = False
|
|
154
161
|
|
|
@@ -174,6 +181,10 @@ class LocalUploadFileMetadata:
|
|
|
174
181
|
f.write(self.upload_mode)
|
|
175
182
|
f.write("\n")
|
|
176
183
|
|
|
184
|
+
if self.remote_oid is not None:
|
|
185
|
+
f.write(self.remote_oid)
|
|
186
|
+
f.write("\n")
|
|
187
|
+
|
|
177
188
|
f.write(str(int(self.is_uploaded)) + "\n")
|
|
178
189
|
f.write(str(int(self.is_committed)) + "\n")
|
|
179
190
|
|
|
@@ -195,7 +206,7 @@ def get_local_download_paths(local_dir: Path, filename: str) -> LocalDownloadFil
|
|
|
195
206
|
[`LocalDownloadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path, incomplete_path).
|
|
196
207
|
"""
|
|
197
208
|
# filename is the path in the Hub repository (separated by '/')
|
|
198
|
-
# make sure to have a cross
|
|
209
|
+
# make sure to have a cross-platform transcription
|
|
199
210
|
sanitized_filename = os.path.join(*filename.split("/"))
|
|
200
211
|
if os.name == "nt":
|
|
201
212
|
if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename:
|
|
@@ -235,7 +246,7 @@ def get_local_upload_paths(local_dir: Path, filename: str) -> LocalUploadFilePat
|
|
|
235
246
|
[`LocalUploadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path).
|
|
236
247
|
"""
|
|
237
248
|
# filename is the path in the Hub repository (separated by '/')
|
|
238
|
-
# make sure to have a cross
|
|
249
|
+
# make sure to have a cross-platform transcription
|
|
239
250
|
sanitized_filename = os.path.join(*filename.split("/"))
|
|
240
251
|
if os.name == "nt":
|
|
241
252
|
if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename:
|
|
@@ -346,6 +357,9 @@ def read_upload_metadata(local_dir: Path, filename: str) -> LocalUploadFileMetad
|
|
|
346
357
|
if upload_mode not in (None, "regular", "lfs"):
|
|
347
358
|
raise ValueError(f"Invalid upload mode in metadata {paths.path_in_repo}: {upload_mode}")
|
|
348
359
|
|
|
360
|
+
_remote_oid = f.readline().strip()
|
|
361
|
+
remote_oid = None if _remote_oid == "" else _remote_oid
|
|
362
|
+
|
|
349
363
|
is_uploaded = bool(int(f.readline().strip()))
|
|
350
364
|
is_committed = bool(int(f.readline().strip()))
|
|
351
365
|
|
|
@@ -355,6 +369,7 @@ def read_upload_metadata(local_dir: Path, filename: str) -> LocalUploadFileMetad
|
|
|
355
369
|
should_ignore=should_ignore,
|
|
356
370
|
sha256=sha256,
|
|
357
371
|
upload_mode=upload_mode,
|
|
372
|
+
remote_oid=remote_oid,
|
|
358
373
|
is_uploaded=is_uploaded,
|
|
359
374
|
is_committed=is_committed,
|
|
360
375
|
)
|
huggingface_hub/_login.py
CHANGED
|
@@ -19,9 +19,11 @@ from getpass import getpass
|
|
|
19
19
|
from pathlib import Path
|
|
20
20
|
from typing import Optional
|
|
21
21
|
|
|
22
|
+
import typer
|
|
23
|
+
|
|
22
24
|
from . import constants
|
|
23
|
-
from .commands._cli_utils import ANSI
|
|
24
25
|
from .utils import (
|
|
26
|
+
ANSI,
|
|
25
27
|
capture_output,
|
|
26
28
|
get_token,
|
|
27
29
|
is_google_colab,
|
|
@@ -41,7 +43,6 @@ from .utils._auth import (
|
|
|
41
43
|
_save_token,
|
|
42
44
|
get_stored_tokens,
|
|
43
45
|
)
|
|
44
|
-
from .utils._deprecation import _deprecate_arguments, _deprecate_positional_args
|
|
45
46
|
|
|
46
47
|
|
|
47
48
|
logger = logging.get_logger(__name__)
|
|
@@ -55,18 +56,11 @@ _HF_LOGO_ASCII = """
|
|
|
55
56
|
"""
|
|
56
57
|
|
|
57
58
|
|
|
58
|
-
@_deprecate_arguments(
|
|
59
|
-
version="1.0",
|
|
60
|
-
deprecated_args="write_permission",
|
|
61
|
-
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
|
62
|
-
)
|
|
63
|
-
@_deprecate_positional_args(version="1.0")
|
|
64
59
|
def login(
|
|
65
60
|
token: Optional[str] = None,
|
|
66
61
|
*,
|
|
67
62
|
add_to_git_credential: bool = False,
|
|
68
|
-
|
|
69
|
-
write_permission: bool = False,
|
|
63
|
+
skip_if_logged_in: bool = False,
|
|
70
64
|
) -> None:
|
|
71
65
|
"""Login the machine to access the Hub.
|
|
72
66
|
|
|
@@ -75,24 +69,18 @@ def login(
|
|
|
75
69
|
components. If `token` is not provided, it will be prompted to the user either with
|
|
76
70
|
a widget (in a notebook) or via the terminal.
|
|
77
71
|
|
|
78
|
-
To log in from outside of a script, one can also use `
|
|
72
|
+
To log in from outside of a script, one can also use `hf auth login` which is
|
|
79
73
|
a cli command that wraps [`login`].
|
|
80
74
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
extends its capabilities.
|
|
85
|
-
|
|
86
|
-
</Tip>
|
|
87
|
-
|
|
88
|
-
<Tip>
|
|
75
|
+
> [!TIP]
|
|
76
|
+
> [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
|
|
77
|
+
> extends its capabilities.
|
|
89
78
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
</Tip>
|
|
79
|
+
> [!TIP]
|
|
80
|
+
> When the token is not passed, [`login`] will automatically detect if the script runs
|
|
81
|
+
> in a notebook or not. However, this detection might not be accurate due to the
|
|
82
|
+
> variety of notebooks that exists nowadays. If that is the case, you can always force
|
|
83
|
+
> the UI by using [`notebook_login`] or [`interpreter_login`].
|
|
96
84
|
|
|
97
85
|
Args:
|
|
98
86
|
token (`str`, *optional*):
|
|
@@ -102,10 +90,8 @@ def login(
|
|
|
102
90
|
is configured, a warning will be displayed to the user. If `token` is `None`,
|
|
103
91
|
the value of `add_to_git_credential` is ignored and will be prompted again
|
|
104
92
|
to the end user.
|
|
105
|
-
|
|
106
|
-
If `True`,
|
|
107
|
-
write_permission (`bool`):
|
|
108
|
-
Ignored and deprecated argument.
|
|
93
|
+
skip_if_logged_in (`bool`, defaults to `False`):
|
|
94
|
+
If `True`, do not prompt for token if user is already logged in.
|
|
109
95
|
Raises:
|
|
110
96
|
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
|
111
97
|
If an organization token is passed. Only personal account tokens are valid
|
|
@@ -120,14 +106,14 @@ def login(
|
|
|
120
106
|
logger.info(
|
|
121
107
|
"The token has not been saved to the git credentials helper. Pass "
|
|
122
108
|
"`add_to_git_credential=True` in this function directly or "
|
|
123
|
-
"`--add-to-git-credential` if using via `
|
|
109
|
+
"`--add-to-git-credential` if using via `hf`CLI if "
|
|
124
110
|
"you want to set the git credential as well."
|
|
125
111
|
)
|
|
126
112
|
_login(token, add_to_git_credential=add_to_git_credential)
|
|
127
113
|
elif is_notebook():
|
|
128
|
-
notebook_login(
|
|
114
|
+
notebook_login(skip_if_logged_in=skip_if_logged_in)
|
|
129
115
|
else:
|
|
130
|
-
interpreter_login(
|
|
116
|
+
interpreter_login(skip_if_logged_in=skip_if_logged_in)
|
|
131
117
|
|
|
132
118
|
|
|
133
119
|
def logout(token_name: Optional[str] = None) -> None:
|
|
@@ -137,7 +123,7 @@ def logout(token_name: Optional[str] = None) -> None:
|
|
|
137
123
|
|
|
138
124
|
Args:
|
|
139
125
|
token_name (`str`, *optional*):
|
|
140
|
-
Name of the access token to logout from. If `None`, will
|
|
126
|
+
Name of the access token to logout from. If `None`, will log out from all saved access tokens.
|
|
141
127
|
Raises:
|
|
142
128
|
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError):
|
|
143
129
|
If the access token name is not found.
|
|
@@ -233,7 +219,7 @@ def auth_list() -> None:
|
|
|
233
219
|
)
|
|
234
220
|
elif current_token_name is None:
|
|
235
221
|
logger.warning(
|
|
236
|
-
"\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `
|
|
222
|
+
"\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `hf auth login` to log in."
|
|
237
223
|
)
|
|
238
224
|
|
|
239
225
|
|
|
@@ -242,13 +228,7 @@ def auth_list() -> None:
|
|
|
242
228
|
###
|
|
243
229
|
|
|
244
230
|
|
|
245
|
-
|
|
246
|
-
version="1.0",
|
|
247
|
-
deprecated_args="write_permission",
|
|
248
|
-
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
|
249
|
-
)
|
|
250
|
-
@_deprecate_positional_args(version="1.0")
|
|
251
|
-
def interpreter_login(*, new_session: bool = True, write_permission: bool = False) -> None:
|
|
231
|
+
def interpreter_login(*, skip_if_logged_in: bool = False) -> None:
|
|
252
232
|
"""
|
|
253
233
|
Displays a prompt to log in to the HF website and store the token.
|
|
254
234
|
|
|
@@ -259,22 +239,18 @@ def interpreter_login(*, new_session: bool = True, write_permission: bool = Fals
|
|
|
259
239
|
For more details, see [`login`].
|
|
260
240
|
|
|
261
241
|
Args:
|
|
262
|
-
|
|
263
|
-
If `True`,
|
|
264
|
-
write_permission (`bool`):
|
|
265
|
-
Ignored and deprecated argument.
|
|
242
|
+
skip_if_logged_in (`bool`, defaults to `False`):
|
|
243
|
+
If `True`, do not prompt for token if user is already logged in.
|
|
266
244
|
"""
|
|
267
|
-
if not
|
|
245
|
+
if not skip_if_logged_in and get_token() is not None:
|
|
268
246
|
logger.info("User is already logged in.")
|
|
269
247
|
return
|
|
270
248
|
|
|
271
|
-
from .commands.delete_cache import _ask_for_confirmation_no_tui
|
|
272
|
-
|
|
273
249
|
print(_HF_LOGO_ASCII)
|
|
274
250
|
if get_token() is not None:
|
|
275
251
|
logger.info(
|
|
276
|
-
" A token is already saved on your machine. Run `
|
|
277
|
-
"
|
|
252
|
+
" A token is already saved on your machine. Run `hf auth whoami`"
|
|
253
|
+
" to get more information or `hf auth logout` if you want"
|
|
278
254
|
" to log out."
|
|
279
255
|
)
|
|
280
256
|
logger.info(" Setting a new token will erase the existing one.")
|
|
@@ -285,7 +261,7 @@ def interpreter_login(*, new_session: bool = True, write_permission: bool = Fals
|
|
|
285
261
|
if os.name == "nt":
|
|
286
262
|
logger.info("Token can be pasted using 'Right-Click'.")
|
|
287
263
|
token = getpass("Enter your token (input will not be visible): ")
|
|
288
|
-
add_to_git_credential =
|
|
264
|
+
add_to_git_credential = typer.confirm("Add token as git credential?")
|
|
289
265
|
|
|
290
266
|
_login(token=token, add_to_git_credential=add_to_git_credential)
|
|
291
267
|
|
|
@@ -314,13 +290,7 @@ NOTEBOOK_LOGIN_TOKEN_HTML_END = """
|
|
|
314
290
|
notebooks. </center>"""
|
|
315
291
|
|
|
316
292
|
|
|
317
|
-
|
|
318
|
-
version="1.0",
|
|
319
|
-
deprecated_args="write_permission",
|
|
320
|
-
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
|
321
|
-
)
|
|
322
|
-
@_deprecate_positional_args(version="1.0")
|
|
323
|
-
def notebook_login(*, new_session: bool = True, write_permission: bool = False) -> None:
|
|
293
|
+
def notebook_login(*, skip_if_logged_in: bool = False) -> None:
|
|
324
294
|
"""
|
|
325
295
|
Displays a widget to log in to the HF website and store the token.
|
|
326
296
|
|
|
@@ -331,10 +301,8 @@ def notebook_login(*, new_session: bool = True, write_permission: bool = False)
|
|
|
331
301
|
For more details, see [`login`].
|
|
332
302
|
|
|
333
303
|
Args:
|
|
334
|
-
|
|
335
|
-
If `True`,
|
|
336
|
-
write_permission (`bool`):
|
|
337
|
-
Ignored and deprecated argument.
|
|
304
|
+
skip_if_logged_in (`bool`, defaults to `False`):
|
|
305
|
+
If `True`, do not prompt for token if user is already logged in.
|
|
338
306
|
"""
|
|
339
307
|
try:
|
|
340
308
|
import ipywidgets.widgets as widgets # type: ignore
|
|
@@ -344,7 +312,7 @@ def notebook_login(*, new_session: bool = True, write_permission: bool = False)
|
|
|
344
312
|
"The `notebook_login` function can only be used in a notebook (Jupyter or"
|
|
345
313
|
" Colab) and you need the `ipywidgets` module: `pip install ipywidgets`."
|
|
346
314
|
)
|
|
347
|
-
if not
|
|
315
|
+
if not skip_if_logged_in and get_token() is not None:
|
|
348
316
|
logger.info("User is already logged in.")
|
|
349
317
|
return
|
|
350
318
|
|