huggingface-hub 1.0.0rc0__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +4 -4
- huggingface_hub/cli/__init__.py +0 -14
- huggingface_hub/cli/_cli_utils.py +79 -2
- huggingface_hub/cli/auth.py +104 -149
- huggingface_hub/cli/cache.py +97 -121
- huggingface_hub/cli/download.py +93 -110
- huggingface_hub/cli/hf.py +37 -41
- huggingface_hub/cli/jobs.py +687 -1014
- huggingface_hub/cli/lfs.py +116 -139
- huggingface_hub/cli/repo.py +159 -215
- huggingface_hub/cli/repo_files.py +50 -84
- huggingface_hub/cli/system.py +6 -25
- huggingface_hub/cli/upload.py +198 -212
- huggingface_hub/cli/upload_large_folder.py +90 -105
- huggingface_hub/errors.py +1 -1
- huggingface_hub/utils/__init__.py +1 -1
- huggingface_hub/utils/_http.py +5 -5
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/METADATA +6 -1
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/RECORD +23 -23
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/LICENSE +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/WHEEL +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-1.0.0rc0.dist-info → huggingface_hub-1.0.0rc1.dist-info}/top_level.txt +0 -0
huggingface_hub/cli/jobs.py
CHANGED
|
@@ -28,1072 +28,745 @@ Usage:
|
|
|
28
28
|
|
|
29
29
|
# Cancel a running job
|
|
30
30
|
hf jobs cancel <job-id>
|
|
31
|
+
|
|
32
|
+
# Run a UV script
|
|
33
|
+
hf jobs uv run <script>
|
|
34
|
+
|
|
35
|
+
# Schedule a job
|
|
36
|
+
hf jobs scheduled run <schedule> <image> <command>
|
|
37
|
+
|
|
38
|
+
# List scheduled jobs
|
|
39
|
+
hf jobs scheduled ps [-a] [-f key=value] [--format TEMPLATE]
|
|
40
|
+
|
|
41
|
+
# Inspect a scheduled job
|
|
42
|
+
hf jobs scheduled inspect <scheduled_job_id>
|
|
43
|
+
|
|
44
|
+
# Suspend a scheduled job
|
|
45
|
+
hf jobs scheduled suspend <scheduled_job_id>
|
|
46
|
+
|
|
47
|
+
# Resume a scheduled job
|
|
48
|
+
hf jobs scheduled resume <scheduled_job_id>
|
|
49
|
+
|
|
50
|
+
# Delete a scheduled job
|
|
51
|
+
hf jobs scheduled delete <scheduled_job_id>
|
|
52
|
+
|
|
31
53
|
"""
|
|
32
54
|
|
|
33
55
|
import json
|
|
34
56
|
import os
|
|
35
57
|
import re
|
|
36
|
-
from argparse import Namespace, _SubParsersAction
|
|
37
58
|
from dataclasses import asdict
|
|
38
59
|
from pathlib import Path
|
|
39
|
-
from typing import Optional, Union
|
|
60
|
+
from typing import Annotated, Dict, Optional, Union
|
|
61
|
+
|
|
62
|
+
import typer
|
|
40
63
|
|
|
41
|
-
from huggingface_hub import
|
|
64
|
+
from huggingface_hub import SpaceHardware, get_token
|
|
42
65
|
from huggingface_hub.errors import HfHubHTTPError
|
|
43
66
|
from huggingface_hub.utils import logging
|
|
44
67
|
from huggingface_hub.utils._dotenv import load_dotenv
|
|
45
68
|
|
|
46
|
-
from . import
|
|
69
|
+
from ._cli_utils import TokenOpt, get_hf_api, typer_factory
|
|
47
70
|
|
|
48
71
|
|
|
49
72
|
logger = logging.get_logger(__name__)
|
|
50
73
|
|
|
51
74
|
SUGGESTED_FLAVORS = [item.value for item in SpaceHardware if item.value != "zero-a10g"]
|
|
52
75
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
76
|
+
# Common job-related options
|
|
77
|
+
ImageArg = Annotated[
|
|
78
|
+
str,
|
|
79
|
+
typer.Argument(
|
|
80
|
+
help="The Docker image to use.",
|
|
81
|
+
),
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
ImageOpt = Annotated[
|
|
85
|
+
Optional[str],
|
|
86
|
+
typer.Option(
|
|
87
|
+
help="Use a custom Docker image with `uv` installed.",
|
|
88
|
+
),
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
FlavorOpt = Annotated[
|
|
92
|
+
Optional[SpaceHardware],
|
|
93
|
+
typer.Option(
|
|
94
|
+
help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
|
|
95
|
+
),
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
EnvOpt = Annotated[
|
|
99
|
+
Optional[list[str]],
|
|
100
|
+
typer.Option(
|
|
101
|
+
"-e",
|
|
102
|
+
"--env",
|
|
103
|
+
help="Set environment variables. E.g. --env ENV=value",
|
|
104
|
+
),
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
SecretsOpt = Annotated[
|
|
108
|
+
Optional[list[str]],
|
|
109
|
+
typer.Option(
|
|
110
|
+
"-s",
|
|
111
|
+
"--secrets",
|
|
112
|
+
help="Set secret environment variables. E.g. --secrets SECRET=value or `--secrets HF_TOKEN` to pass your Hugging Face token.",
|
|
113
|
+
),
|
|
114
|
+
]
|
|
115
|
+
|
|
116
|
+
EnvFileOpt = Annotated[
|
|
117
|
+
Optional[str],
|
|
118
|
+
typer.Option(
|
|
119
|
+
"--env-file",
|
|
120
|
+
help="Read in a file of environment variables.",
|
|
121
|
+
),
|
|
122
|
+
]
|
|
123
|
+
|
|
124
|
+
SecretsFileOpt = Annotated[
|
|
125
|
+
Optional[str],
|
|
126
|
+
typer.Option(
|
|
127
|
+
help="Read in a file of secret environment variables.",
|
|
128
|
+
),
|
|
129
|
+
]
|
|
130
|
+
|
|
131
|
+
TimeoutOpt = Annotated[
|
|
132
|
+
Optional[str],
|
|
133
|
+
typer.Option(
|
|
134
|
+
help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
|
|
135
|
+
),
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
DetachOpt = Annotated[
|
|
139
|
+
bool,
|
|
140
|
+
typer.Option(
|
|
141
|
+
"-d",
|
|
142
|
+
"--detach",
|
|
143
|
+
help="Run the Job in the background and print the Job ID.",
|
|
144
|
+
),
|
|
145
|
+
]
|
|
146
|
+
|
|
147
|
+
NamespaceOpt = Annotated[
|
|
148
|
+
Optional[str],
|
|
149
|
+
typer.Option(
|
|
150
|
+
help="The namespace where the job will be running. Defaults to the current user's namespace.",
|
|
151
|
+
),
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
WithOpt = Annotated[
|
|
155
|
+
Optional[list[str]],
|
|
156
|
+
typer.Option(
|
|
157
|
+
"--with",
|
|
158
|
+
help="Run with the given packages installed",
|
|
159
|
+
),
|
|
160
|
+
]
|
|
161
|
+
|
|
162
|
+
PythonOpt = Annotated[
|
|
163
|
+
Optional[str],
|
|
164
|
+
typer.Option(
|
|
165
|
+
"-p",
|
|
166
|
+
"--python",
|
|
167
|
+
help="The Python interpreter to use for the run environment",
|
|
168
|
+
),
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
SuspendOpt = Annotated[
|
|
172
|
+
Optional[bool],
|
|
173
|
+
typer.Option(
|
|
174
|
+
help="Suspend (pause) the scheduled Job",
|
|
175
|
+
),
|
|
176
|
+
]
|
|
177
|
+
|
|
178
|
+
ConcurrencyOpt = Annotated[
|
|
179
|
+
Optional[bool],
|
|
180
|
+
typer.Option(
|
|
181
|
+
help="Allow multiple instances of this Job to run concurrently",
|
|
182
|
+
),
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
ScheduleArg = Annotated[
|
|
186
|
+
str,
|
|
187
|
+
typer.Argument(
|
|
188
|
+
help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
|
|
189
|
+
),
|
|
190
|
+
]
|
|
191
|
+
|
|
192
|
+
ScriptArg = Annotated[
|
|
193
|
+
str,
|
|
194
|
+
typer.Argument(
|
|
195
|
+
help="UV script to run (local file or URL)",
|
|
196
|
+
),
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
ScriptArgsArg = Annotated[
|
|
200
|
+
Optional[list[str]],
|
|
201
|
+
typer.Argument(
|
|
202
|
+
help="Arguments for the script",
|
|
203
|
+
),
|
|
204
|
+
]
|
|
205
|
+
|
|
206
|
+
CommandArg = Annotated[
|
|
207
|
+
list[str],
|
|
208
|
+
typer.Argument(
|
|
209
|
+
help="The command to run.",
|
|
210
|
+
),
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
JobIdArg = Annotated[
|
|
214
|
+
str,
|
|
215
|
+
typer.Argument(
|
|
216
|
+
help="Job ID",
|
|
217
|
+
),
|
|
218
|
+
]
|
|
219
|
+
|
|
220
|
+
ScheduledJobIdArg = Annotated[
|
|
221
|
+
str,
|
|
222
|
+
typer.Argument(
|
|
223
|
+
help="Scheduled Job ID",
|
|
224
|
+
),
|
|
225
|
+
]
|
|
226
|
+
|
|
227
|
+
RepoOpt = Annotated[
|
|
228
|
+
Optional[str],
|
|
229
|
+
typer.Option(
|
|
230
|
+
help="Repository name for the script (creates ephemeral if not specified)",
|
|
231
|
+
),
|
|
232
|
+
]
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
jobs_cli = typer_factory(help="Run and manage Jobs on the Hub.")
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@jobs_cli.command("run", help="Run a Job")
|
|
239
|
+
def jobs_run(
|
|
240
|
+
image: ImageArg,
|
|
241
|
+
command: CommandArg,
|
|
242
|
+
env: EnvOpt = None,
|
|
243
|
+
secrets: SecretsOpt = None,
|
|
244
|
+
env_file: EnvFileOpt = None,
|
|
245
|
+
secrets_file: SecretsFileOpt = None,
|
|
246
|
+
flavor: FlavorOpt = None,
|
|
247
|
+
timeout: TimeoutOpt = None,
|
|
248
|
+
detach: DetachOpt = False,
|
|
249
|
+
namespace: NamespaceOpt = None,
|
|
250
|
+
token: TokenOpt = None,
|
|
251
|
+
) -> None:
|
|
252
|
+
env_map: dict[str, Optional[str]] = {}
|
|
253
|
+
if env_file:
|
|
254
|
+
env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
|
|
255
|
+
for env_value in env or []:
|
|
256
|
+
env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
257
|
+
|
|
258
|
+
secrets_map: dict[str, Optional[str]] = {}
|
|
259
|
+
extended_environ = _get_extended_environ()
|
|
260
|
+
if secrets_file:
|
|
261
|
+
secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
|
|
262
|
+
for secret in secrets or []:
|
|
263
|
+
secrets_map.update(load_dotenv(secret, environ=extended_environ))
|
|
264
|
+
|
|
265
|
+
api = get_hf_api(token=token)
|
|
266
|
+
job = api.run_job(
|
|
267
|
+
image=image,
|
|
268
|
+
command=command,
|
|
269
|
+
env=env_map,
|
|
270
|
+
secrets=secrets_map,
|
|
271
|
+
flavor=flavor,
|
|
272
|
+
timeout=timeout,
|
|
273
|
+
namespace=namespace,
|
|
274
|
+
)
|
|
275
|
+
# Always print the job ID to the user
|
|
276
|
+
print(f"Job started with ID: {job.id}")
|
|
277
|
+
print(f"View at: {job.url}")
|
|
278
|
+
|
|
279
|
+
if detach:
|
|
280
|
+
return
|
|
281
|
+
# Now let's stream the logs
|
|
282
|
+
for log in api.fetch_job_logs(job_id=job.id):
|
|
283
|
+
print(log)
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
@jobs_cli.command("logs", help="Fetch the logs of a Job")
|
|
287
|
+
def jobs_logs(
|
|
288
|
+
job_id: JobIdArg,
|
|
289
|
+
namespace: NamespaceOpt = None,
|
|
290
|
+
token: TokenOpt = None,
|
|
291
|
+
) -> None:
|
|
292
|
+
api = get_hf_api(token=token)
|
|
293
|
+
for log in api.fetch_job_logs(job_id=job_id, namespace=namespace):
|
|
294
|
+
print(log)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _matches_filters(job_properties: dict[str, str], filters: dict[str, str]) -> bool:
|
|
298
|
+
"""Check if scheduled job matches all specified filters."""
|
|
299
|
+
for key, pattern in filters.items():
|
|
300
|
+
# Check if property exists
|
|
301
|
+
if key not in job_properties:
|
|
302
|
+
return False
|
|
303
|
+
# Support pattern matching with wildcards
|
|
304
|
+
if "*" in pattern or "?" in pattern:
|
|
305
|
+
# Convert glob pattern to regex
|
|
306
|
+
regex_pattern = pattern.replace("*", ".*").replace("?", ".")
|
|
307
|
+
if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
|
|
308
|
+
return False
|
|
309
|
+
# Simple substring matching
|
|
310
|
+
elif pattern.lower() not in job_properties[key].lower():
|
|
311
|
+
return False
|
|
312
|
+
return True
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def _print_output(rows: list[list[Union[str, int]]], headers: list[str], fmt: Optional[str]) -> None:
|
|
316
|
+
"""Print output according to the chosen format."""
|
|
317
|
+
if fmt:
|
|
318
|
+
# Use custom template if provided
|
|
319
|
+
template = fmt
|
|
320
|
+
for row in rows:
|
|
321
|
+
line = template
|
|
322
|
+
for i, field in enumerate(["id", "image", "command", "created", "status"]):
|
|
323
|
+
placeholder = f"{{{{.{field}}}}}"
|
|
324
|
+
if placeholder in line:
|
|
325
|
+
line = line.replace(placeholder, str(row[i]))
|
|
326
|
+
print(line)
|
|
327
|
+
else:
|
|
328
|
+
# Default tabular format
|
|
329
|
+
print(_tabulate(rows, headers=headers))
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
@jobs_cli.command("ps", help="List Jobs")
|
|
333
|
+
def jobs_ps(
|
|
334
|
+
all: Annotated[
|
|
335
|
+
bool,
|
|
336
|
+
typer.Option(
|
|
220
337
|
"-a",
|
|
221
338
|
"--all",
|
|
222
|
-
action="store_true",
|
|
223
339
|
help="Show all Jobs (default shows just running)",
|
|
224
|
-
)
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
"--token",
|
|
232
|
-
type=str,
|
|
233
|
-
help="A User Access Token generated from https://huggingface.co/settings/tokens",
|
|
234
|
-
)
|
|
235
|
-
# Add Docker-style filtering argument
|
|
236
|
-
run_parser.add_argument(
|
|
340
|
+
),
|
|
341
|
+
] = False,
|
|
342
|
+
namespace: NamespaceOpt = None,
|
|
343
|
+
token: TokenOpt = None,
|
|
344
|
+
filter: Annotated[
|
|
345
|
+
Optional[list[str]],
|
|
346
|
+
typer.Option(
|
|
237
347
|
"-f",
|
|
238
348
|
"--filter",
|
|
239
|
-
action="append",
|
|
240
|
-
default=[],
|
|
241
349
|
help="Filter output based on conditions provided (format: key=value)",
|
|
242
|
-
)
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
350
|
+
),
|
|
351
|
+
] = None,
|
|
352
|
+
format: Annotated[
|
|
353
|
+
Optional[str],
|
|
354
|
+
typer.Option(
|
|
247
355
|
help="Format output using a custom template",
|
|
248
|
-
)
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
356
|
+
),
|
|
357
|
+
] = None,
|
|
358
|
+
) -> None:
|
|
359
|
+
try:
|
|
360
|
+
api = get_hf_api(token=token)
|
|
361
|
+
# Fetch jobs data
|
|
362
|
+
jobs = api.list_jobs(namespace=namespace)
|
|
363
|
+
# Define table headers
|
|
364
|
+
table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
|
|
365
|
+
rows: list[list[Union[str, int]]] = []
|
|
366
|
+
|
|
367
|
+
filters: dict[str, str] = {}
|
|
368
|
+
for f in filter or []:
|
|
260
369
|
if "=" in f:
|
|
261
370
|
key, value = f.split("=", 1)
|
|
262
|
-
|
|
371
|
+
filters[key.lower()] = value
|
|
263
372
|
else:
|
|
264
373
|
print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
|
|
374
|
+
# Process jobs data
|
|
375
|
+
for job in jobs:
|
|
376
|
+
# Extract job data for filtering
|
|
377
|
+
status = job.status.stage if job.status else "UNKNOWN"
|
|
378
|
+
if not all and status not in ("RUNNING", "UPDATING"):
|
|
379
|
+
# Skip job if not all jobs should be shown and status doesn't match criteria
|
|
380
|
+
continue
|
|
381
|
+
# Extract job data for output
|
|
382
|
+
job_id = job.id
|
|
265
383
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
Fetch and display job information for the current user.
|
|
269
|
-
Uses Docker-style filtering with -f/--filter flag and key=value pairs.
|
|
270
|
-
"""
|
|
271
|
-
try:
|
|
272
|
-
api = HfApi(token=self.token)
|
|
273
|
-
|
|
274
|
-
# Fetch jobs data
|
|
275
|
-
jobs = api.list_jobs(namespace=self.namespace)
|
|
276
|
-
|
|
277
|
-
# Define table headers
|
|
278
|
-
table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
|
|
384
|
+
# Extract image or space information
|
|
385
|
+
image_or_space = job.docker_image or "N/A"
|
|
279
386
|
|
|
280
|
-
#
|
|
281
|
-
|
|
387
|
+
# Extract and format command
|
|
388
|
+
cmd = job.command or []
|
|
389
|
+
command_str = " ".join(cmd) if cmd else "N/A"
|
|
282
390
|
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
status = job.status.stage if job.status else "UNKNOWN"
|
|
391
|
+
# Extract creation time
|
|
392
|
+
created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
|
|
286
393
|
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
# Extract job ID
|
|
292
|
-
job_id = job.id
|
|
293
|
-
|
|
294
|
-
# Extract image or space information
|
|
295
|
-
image_or_space = job.docker_image or "N/A"
|
|
296
|
-
|
|
297
|
-
# Extract and format command
|
|
298
|
-
command = job.command or []
|
|
299
|
-
command_str = " ".join(command) if command else "N/A"
|
|
300
|
-
|
|
301
|
-
# Extract creation time
|
|
302
|
-
created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
|
|
303
|
-
|
|
304
|
-
# Create a dict with all job properties for filtering
|
|
305
|
-
job_properties = {
|
|
306
|
-
"id": job_id,
|
|
307
|
-
"image": image_or_space,
|
|
308
|
-
"status": status.lower(),
|
|
309
|
-
"command": command_str,
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
# Check if job matches all filters
|
|
313
|
-
if not self._matches_filters(job_properties):
|
|
314
|
-
continue
|
|
315
|
-
|
|
316
|
-
# Create row
|
|
317
|
-
rows.append([job_id, image_or_space, command_str, created_at, status])
|
|
318
|
-
|
|
319
|
-
# Handle empty results
|
|
320
|
-
if not rows:
|
|
321
|
-
filters_msg = ""
|
|
322
|
-
if self.filters:
|
|
323
|
-
filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
|
|
324
|
-
|
|
325
|
-
print(f"No jobs found{filters_msg}")
|
|
326
|
-
return
|
|
327
|
-
|
|
328
|
-
# Apply custom format if provided or use default tabular format
|
|
329
|
-
self._print_output(rows, table_headers)
|
|
330
|
-
|
|
331
|
-
except HfHubHTTPError as e:
|
|
332
|
-
print(f"Error fetching jobs data: {e}")
|
|
333
|
-
except (KeyError, ValueError, TypeError) as e:
|
|
334
|
-
print(f"Error processing jobs data: {e}")
|
|
335
|
-
except Exception as e:
|
|
336
|
-
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
337
|
-
|
|
338
|
-
def _matches_filters(self, job_properties: dict[str, str]) -> bool:
|
|
339
|
-
"""Check if job matches all specified filters."""
|
|
340
|
-
for key, pattern in self.filters.items():
|
|
341
|
-
# Check if property exists
|
|
342
|
-
if key not in job_properties:
|
|
343
|
-
return False
|
|
394
|
+
# Create a dict with all job properties for filtering
|
|
395
|
+
props = {"id": job_id, "image": image_or_space, "status": status.lower(), "command": command_str}
|
|
396
|
+
if not _matches_filters(props, filters):
|
|
397
|
+
continue
|
|
344
398
|
|
|
345
|
-
#
|
|
346
|
-
|
|
347
|
-
# Convert glob pattern to regex
|
|
348
|
-
regex_pattern = pattern.replace("*", ".*").replace("?", ".")
|
|
349
|
-
if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
|
|
350
|
-
return False
|
|
351
|
-
# Simple substring matching
|
|
352
|
-
elif pattern.lower() not in job_properties[key].lower():
|
|
353
|
-
return False
|
|
399
|
+
# Create row
|
|
400
|
+
rows.append([job_id, image_or_space, command_str, created_at, status])
|
|
354
401
|
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
if self.format:
|
|
360
|
-
# Custom template formatting (simplified)
|
|
361
|
-
template = self.format
|
|
362
|
-
for row in rows:
|
|
363
|
-
line = template
|
|
364
|
-
for i, field in enumerate(["id", "image", "command", "created", "status"]):
|
|
365
|
-
placeholder = f"{{{{.{field}}}}}"
|
|
366
|
-
if placeholder in line:
|
|
367
|
-
line = line.replace(placeholder, str(row[i]))
|
|
368
|
-
print(line)
|
|
369
|
-
else:
|
|
370
|
-
# Default tabular format
|
|
371
|
-
print(
|
|
372
|
-
_tabulate(
|
|
373
|
-
rows,
|
|
374
|
-
headers=headers,
|
|
375
|
-
)
|
|
402
|
+
# Handle empty results
|
|
403
|
+
if not rows:
|
|
404
|
+
filters_msg = (
|
|
405
|
+
f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
|
|
376
406
|
)
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
class InspectCommand(BaseHuggingfaceCLICommand):
|
|
380
|
-
@staticmethod
|
|
381
|
-
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
382
|
-
run_parser = parser.add_parser("inspect", help="Display detailed information on one or more Jobs")
|
|
383
|
-
run_parser.add_argument(
|
|
384
|
-
"--namespace",
|
|
385
|
-
type=str,
|
|
386
|
-
help="The namespace where the job is running. Defaults to the current user's namespace.",
|
|
387
|
-
)
|
|
388
|
-
run_parser.add_argument(
|
|
389
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
390
|
-
)
|
|
391
|
-
run_parser.add_argument("job_ids", nargs="...", help="The jobs to inspect")
|
|
392
|
-
run_parser.set_defaults(func=InspectCommand)
|
|
393
|
-
|
|
394
|
-
def __init__(self, args: Namespace) -> None:
|
|
395
|
-
self.namespace: Optional[str] = args.namespace
|
|
396
|
-
self.token: Optional[str] = args.token
|
|
397
|
-
self.job_ids: list[str] = args.job_ids
|
|
398
|
-
|
|
399
|
-
def run(self) -> None:
|
|
400
|
-
api = HfApi(token=self.token)
|
|
401
|
-
jobs = [api.inspect_job(job_id=job_id, namespace=self.namespace) for job_id in self.job_ids]
|
|
402
|
-
print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
class CancelCommand(BaseHuggingfaceCLICommand):
|
|
406
|
-
@staticmethod
|
|
407
|
-
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
408
|
-
run_parser = parser.add_parser("cancel", help="Cancel a Job")
|
|
409
|
-
run_parser.add_argument("job_id", type=str, help="Job ID")
|
|
410
|
-
run_parser.add_argument(
|
|
411
|
-
"--namespace",
|
|
412
|
-
type=str,
|
|
413
|
-
help="The namespace where the job is running. Defaults to the current user's namespace.",
|
|
414
|
-
)
|
|
415
|
-
run_parser.add_argument(
|
|
416
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
417
|
-
)
|
|
418
|
-
run_parser.set_defaults(func=CancelCommand)
|
|
419
|
-
|
|
420
|
-
def __init__(self, args: Namespace) -> None:
|
|
421
|
-
self.job_id: str = args.job_id
|
|
422
|
-
self.namespace = args.namespace
|
|
423
|
-
self.token: Optional[str] = args.token
|
|
424
|
-
|
|
425
|
-
def run(self) -> None:
|
|
426
|
-
api = HfApi(token=self.token)
|
|
427
|
-
api.cancel_job(job_id=self.job_id, namespace=self.namespace)
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
class UvCommand(BaseHuggingfaceCLICommand):
|
|
431
|
-
"""Run UV scripts on Hugging Face infrastructure."""
|
|
432
|
-
|
|
433
|
-
@staticmethod
|
|
434
|
-
def register_subcommand(parser):
|
|
435
|
-
"""Register UV run subcommand."""
|
|
436
|
-
uv_parser = parser.add_parser(
|
|
437
|
-
"uv",
|
|
438
|
-
help="Run UV scripts (Python with inline dependencies) on HF infrastructure",
|
|
439
|
-
)
|
|
440
|
-
|
|
441
|
-
subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
|
|
442
|
-
|
|
443
|
-
# Run command only
|
|
444
|
-
run_parser = subparsers.add_parser(
|
|
445
|
-
"run",
|
|
446
|
-
help="Run a UV script (local file or URL) on HF infrastructure",
|
|
447
|
-
)
|
|
448
|
-
run_parser.add_argument("script", help="UV script to run (local file or URL)")
|
|
449
|
-
run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
|
|
450
|
-
run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
|
|
451
|
-
run_parser.add_argument(
|
|
452
|
-
"--repo",
|
|
453
|
-
help="Repository name for the script (creates ephemeral if not specified)",
|
|
454
|
-
)
|
|
455
|
-
run_parser.add_argument(
|
|
456
|
-
"--flavor",
|
|
457
|
-
type=str,
|
|
458
|
-
help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
|
|
459
|
-
)
|
|
460
|
-
run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
|
|
461
|
-
run_parser.add_argument(
|
|
462
|
-
"-s",
|
|
463
|
-
"--secrets",
|
|
464
|
-
action="append",
|
|
465
|
-
help=(
|
|
466
|
-
"Set secret environment variables. E.g. --secrets SECRET=value "
|
|
467
|
-
"or `--secrets HF_TOKEN` to pass your Hugging Face token."
|
|
468
|
-
),
|
|
469
|
-
)
|
|
470
|
-
run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
|
|
471
|
-
run_parser.add_argument(
|
|
472
|
-
"--secrets-file",
|
|
473
|
-
type=str,
|
|
474
|
-
help="Read in a file of secret environment variables.",
|
|
475
|
-
)
|
|
476
|
-
run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
|
|
477
|
-
run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
|
|
478
|
-
run_parser.add_argument(
|
|
479
|
-
"--namespace",
|
|
480
|
-
type=str,
|
|
481
|
-
help="The namespace where the Job will be created. Defaults to the current user's namespace.",
|
|
482
|
-
)
|
|
483
|
-
run_parser.add_argument("--token", type=str, help="HF token")
|
|
484
|
-
# UV options
|
|
485
|
-
run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
|
|
486
|
-
run_parser.add_argument(
|
|
487
|
-
"-p", "--python", type=str, help="The Python interpreter to use for the run environment"
|
|
488
|
-
)
|
|
489
|
-
run_parser.set_defaults(func=UvCommand)
|
|
490
|
-
|
|
491
|
-
def __init__(self, args: Namespace) -> None:
|
|
492
|
-
"""Initialize the command with parsed arguments."""
|
|
493
|
-
self.script = args.script
|
|
494
|
-
self.script_args = args.script_args
|
|
495
|
-
self.dependencies = args.with_
|
|
496
|
-
self.python = args.python
|
|
497
|
-
self.image = args.image
|
|
498
|
-
self.env: dict[str, Optional[str]] = {}
|
|
499
|
-
if args.env_file:
|
|
500
|
-
self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
|
|
501
|
-
for env_value in args.env or []:
|
|
502
|
-
self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
503
|
-
self.secrets: dict[str, Optional[str]] = {}
|
|
504
|
-
extended_environ = _get_extended_environ()
|
|
505
|
-
if args.secrets_file:
|
|
506
|
-
self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
|
|
507
|
-
for secret in args.secrets or []:
|
|
508
|
-
self.secrets.update(load_dotenv(secret, environ=extended_environ))
|
|
509
|
-
self.flavor: Optional[SpaceHardware] = args.flavor
|
|
510
|
-
self.timeout: Optional[str] = args.timeout
|
|
511
|
-
self.detach: bool = args.detach
|
|
512
|
-
self.namespace: Optional[str] = args.namespace
|
|
513
|
-
self.token: Optional[str] = args.token
|
|
514
|
-
self._repo = args.repo
|
|
515
|
-
|
|
516
|
-
def run(self) -> None:
|
|
517
|
-
"""Execute UV command."""
|
|
518
|
-
logging.set_verbosity(logging.INFO)
|
|
519
|
-
api = HfApi(token=self.token)
|
|
520
|
-
job = api.run_uv_job(
|
|
521
|
-
script=self.script,
|
|
522
|
-
script_args=self.script_args,
|
|
523
|
-
dependencies=self.dependencies,
|
|
524
|
-
python=self.python,
|
|
525
|
-
image=self.image,
|
|
526
|
-
env=self.env,
|
|
527
|
-
secrets=self.secrets,
|
|
528
|
-
flavor=self.flavor,
|
|
529
|
-
timeout=self.timeout,
|
|
530
|
-
namespace=self.namespace,
|
|
531
|
-
_repo=self._repo,
|
|
532
|
-
)
|
|
533
|
-
|
|
534
|
-
# Always print the job ID to the user
|
|
535
|
-
print(f"Job started with ID: {job.id}")
|
|
536
|
-
print(f"View at: {job.url}")
|
|
537
|
-
|
|
538
|
-
if self.detach:
|
|
407
|
+
print(f"No jobs found{filters_msg}")
|
|
539
408
|
return
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
409
|
+
# Apply custom format if provided or use default tabular format
|
|
410
|
+
_print_output(rows, table_headers, format)
|
|
411
|
+
|
|
412
|
+
except HfHubHTTPError as e:
|
|
413
|
+
print(f"Error fetching jobs data: {e}")
|
|
414
|
+
except (KeyError, ValueError, TypeError) as e:
|
|
415
|
+
print(f"Error processing jobs data: {e}")
|
|
416
|
+
except Exception as e:
|
|
417
|
+
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
@jobs_cli.command("inspect", help="Display detailed information on one or more Jobs")
|
|
421
|
+
def jobs_inspect(
|
|
422
|
+
job_ids: Annotated[
|
|
423
|
+
list[str],
|
|
424
|
+
typer.Argument(
|
|
425
|
+
help="The jobs to inspect",
|
|
426
|
+
),
|
|
427
|
+
],
|
|
428
|
+
namespace: NamespaceOpt = None,
|
|
429
|
+
token: TokenOpt = None,
|
|
430
|
+
) -> None:
|
|
431
|
+
api = get_hf_api(token=token)
|
|
432
|
+
jobs = [api.inspect_job(job_id=job_id, namespace=namespace) for job_id in job_ids]
|
|
433
|
+
print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
@jobs_cli.command("cancel", help="Cancel a Job")
|
|
437
|
+
def jobs_cancel(
|
|
438
|
+
job_id: JobIdArg,
|
|
439
|
+
namespace: NamespaceOpt = None,
|
|
440
|
+
token: TokenOpt = None,
|
|
441
|
+
) -> None:
|
|
442
|
+
api = get_hf_api(token=token)
|
|
443
|
+
api.cancel_job(job_id=job_id, namespace=namespace)
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
uv_app = typer_factory(help="Run UV scripts (Python with inline dependencies) on HF infrastructure")
|
|
447
|
+
jobs_cli.add_typer(uv_app, name="uv")
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
@uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
|
|
451
|
+
def jobs_uv_run(
|
|
452
|
+
script: ScriptArg,
|
|
453
|
+
script_args: ScriptArgsArg = None,
|
|
454
|
+
image: ImageOpt = None,
|
|
455
|
+
repo: RepoOpt = None,
|
|
456
|
+
flavor: FlavorOpt = None,
|
|
457
|
+
env: EnvOpt = None,
|
|
458
|
+
secrets: SecretsOpt = None,
|
|
459
|
+
env_file: EnvFileOpt = None,
|
|
460
|
+
secrets_file: SecretsFileOpt = None,
|
|
461
|
+
timeout: TimeoutOpt = None,
|
|
462
|
+
detach: DetachOpt = False,
|
|
463
|
+
namespace: NamespaceOpt = None,
|
|
464
|
+
token: TokenOpt = None,
|
|
465
|
+
with_: WithOpt = None,
|
|
466
|
+
python: PythonOpt = None,
|
|
467
|
+
) -> None:
|
|
468
|
+
env_map: dict[str, Optional[str]] = {}
|
|
469
|
+
if env_file:
|
|
470
|
+
env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
|
|
471
|
+
for env_value in env or []:
|
|
472
|
+
env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
473
|
+
secrets_map: dict[str, Optional[str]] = {}
|
|
474
|
+
extended_environ = _get_extended_environ()
|
|
475
|
+
if secrets_file:
|
|
476
|
+
secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
|
|
477
|
+
for secret in secrets or []:
|
|
478
|
+
secrets_map.update(load_dotenv(secret, environ=extended_environ))
|
|
479
|
+
|
|
480
|
+
api = get_hf_api(token=token)
|
|
481
|
+
job = api.run_uv_job(
|
|
482
|
+
script=script,
|
|
483
|
+
script_args=script_args or [],
|
|
484
|
+
dependencies=with_,
|
|
485
|
+
python=python,
|
|
486
|
+
image=image,
|
|
487
|
+
env=env_map,
|
|
488
|
+
secrets=secrets_map,
|
|
489
|
+
flavor=flavor, # type: ignore[arg-type]
|
|
490
|
+
timeout=timeout,
|
|
491
|
+
namespace=namespace,
|
|
492
|
+
_repo=repo,
|
|
493
|
+
)
|
|
494
|
+
# Always print the job ID to the user
|
|
495
|
+
print(f"Job started with ID: {job.id}")
|
|
496
|
+
print(f"View at: {job.url}")
|
|
497
|
+
if detach:
|
|
498
|
+
return
|
|
499
|
+
# Now let's stream the logs
|
|
500
|
+
for log in api.fetch_job_logs(job_id=job.id):
|
|
501
|
+
print(log)
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
scheduled_app = typer_factory(help="Create and manage scheduled Jobs on the Hub.")
|
|
505
|
+
jobs_cli.add_typer(scheduled_app, name="scheduled")
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
@scheduled_app.command("run", help="Schedule a Job")
|
|
509
|
+
def scheduled_run(
|
|
510
|
+
schedule: ScheduleArg,
|
|
511
|
+
image: ImageArg,
|
|
512
|
+
command: CommandArg,
|
|
513
|
+
suspend: SuspendOpt = None,
|
|
514
|
+
concurrency: ConcurrencyOpt = None,
|
|
515
|
+
env: EnvOpt = None,
|
|
516
|
+
secrets: SecretsOpt = None,
|
|
517
|
+
env_file: EnvFileOpt = None,
|
|
518
|
+
secrets_file: SecretsFileOpt = None,
|
|
519
|
+
flavor: FlavorOpt = None,
|
|
520
|
+
timeout: TimeoutOpt = None,
|
|
521
|
+
namespace: NamespaceOpt = None,
|
|
522
|
+
token: TokenOpt = None,
|
|
523
|
+
) -> None:
|
|
524
|
+
env_map: dict[str, Optional[str]] = {}
|
|
525
|
+
if env_file:
|
|
526
|
+
env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
|
|
527
|
+
for env_value in env or []:
|
|
528
|
+
env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
529
|
+
secrets_map: dict[str, Optional[str]] = {}
|
|
530
|
+
extended_environ = _get_extended_environ()
|
|
531
|
+
if secrets_file:
|
|
532
|
+
secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
|
|
533
|
+
for secret in secrets or []:
|
|
534
|
+
secrets_map.update(load_dotenv(secret, environ=extended_environ))
|
|
535
|
+
|
|
536
|
+
api = get_hf_api(token=token)
|
|
537
|
+
scheduled_job = api.create_scheduled_job(
|
|
538
|
+
image=image,
|
|
539
|
+
command=command,
|
|
540
|
+
schedule=schedule,
|
|
541
|
+
suspend=suspend,
|
|
542
|
+
concurrency=concurrency,
|
|
543
|
+
env=env_map,
|
|
544
|
+
secrets=secrets_map,
|
|
545
|
+
flavor=flavor,
|
|
546
|
+
timeout=timeout,
|
|
547
|
+
namespace=namespace,
|
|
548
|
+
)
|
|
549
|
+
print(f"Scheduled Job created with ID: {scheduled_job.id}")
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
@scheduled_app.command("ps", help="List scheduled Jobs")
|
|
553
|
+
def scheduled_ps(
|
|
554
|
+
all: Annotated[
|
|
555
|
+
bool,
|
|
556
|
+
typer.Option(
|
|
676
557
|
"-a",
|
|
677
558
|
"--all",
|
|
678
|
-
action="store_true",
|
|
679
559
|
help="Show all scheduled Jobs (default hides suspended)",
|
|
680
|
-
)
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
"--token",
|
|
688
|
-
type=str,
|
|
689
|
-
help="A User Access Token generated from https://huggingface.co/settings/tokens",
|
|
690
|
-
)
|
|
691
|
-
# Add Docker-style filtering argument
|
|
692
|
-
run_parser.add_argument(
|
|
560
|
+
),
|
|
561
|
+
] = False,
|
|
562
|
+
namespace: NamespaceOpt = None,
|
|
563
|
+
token: TokenOpt = None,
|
|
564
|
+
filter: Annotated[
|
|
565
|
+
Optional[list[str]],
|
|
566
|
+
typer.Option(
|
|
693
567
|
"-f",
|
|
694
568
|
"--filter",
|
|
695
|
-
action="append",
|
|
696
|
-
default=[],
|
|
697
569
|
help="Filter output based on conditions provided (format: key=value)",
|
|
698
|
-
)
|
|
699
|
-
|
|
700
|
-
|
|
570
|
+
),
|
|
571
|
+
] = None,
|
|
572
|
+
format: Annotated[
|
|
573
|
+
Optional[str],
|
|
574
|
+
typer.Option(
|
|
701
575
|
"--format",
|
|
702
|
-
type=str,
|
|
703
576
|
help="Format output using a custom template",
|
|
704
|
-
)
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
# Parse filter arguments (key=value pairs)
|
|
715
|
-
for f in args.filter:
|
|
577
|
+
),
|
|
578
|
+
] = None,
|
|
579
|
+
) -> None:
|
|
580
|
+
try:
|
|
581
|
+
api = get_hf_api(token=token)
|
|
582
|
+
scheduled_jobs = api.list_scheduled_jobs(namespace=namespace)
|
|
583
|
+
table_headers = ["ID", "SCHEDULE", "IMAGE/SPACE", "COMMAND", "LAST RUN", "NEXT RUN", "SUSPEND"]
|
|
584
|
+
rows: list[list[Union[str, int]]] = []
|
|
585
|
+
filters: dict[str, str] = {}
|
|
586
|
+
for f in filter or []:
|
|
716
587
|
if "=" in f:
|
|
717
588
|
key, value = f.split("=", 1)
|
|
718
|
-
|
|
589
|
+
filters[key.lower()] = value
|
|
719
590
|
else:
|
|
720
591
|
print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
|
|
721
592
|
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
"
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
593
|
+
for scheduled_job in scheduled_jobs:
|
|
594
|
+
suspend = scheduled_job.suspend or False
|
|
595
|
+
if not all and suspend:
|
|
596
|
+
continue
|
|
597
|
+
sj_id = scheduled_job.id
|
|
598
|
+
schedule = scheduled_job.schedule or "N/A"
|
|
599
|
+
image_or_space = scheduled_job.job_spec.docker_image or "N/A"
|
|
600
|
+
cmd = scheduled_job.job_spec.command or []
|
|
601
|
+
command_str = " ".join(cmd) if cmd else "N/A"
|
|
602
|
+
last_job_at = (
|
|
603
|
+
scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
|
|
604
|
+
if scheduled_job.status.last_job
|
|
605
|
+
else "N/A"
|
|
606
|
+
)
|
|
607
|
+
next_job_run_at = (
|
|
608
|
+
scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
609
|
+
if scheduled_job.status.next_job_run_at
|
|
610
|
+
else "N/A"
|
|
611
|
+
)
|
|
612
|
+
props = {"id": sj_id, "image": image_or_space, "suspend": str(suspend), "command": command_str}
|
|
613
|
+
if not _matches_filters(props, filters):
|
|
614
|
+
continue
|
|
615
|
+
rows.append([sj_id, schedule, image_or_space, command_str, last_job_at, next_job_run_at, suspend])
|
|
616
|
+
|
|
617
|
+
if not rows:
|
|
618
|
+
filters_msg = (
|
|
619
|
+
f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
|
|
620
|
+
)
|
|
621
|
+
print(f"No scheduled jobs found{filters_msg}")
|
|
622
|
+
return
|
|
623
|
+
_print_output(rows, table_headers, format)
|
|
624
|
+
|
|
625
|
+
except HfHubHTTPError as e:
|
|
626
|
+
print(f"Error fetching scheduled jobs data: {e}")
|
|
627
|
+
except (KeyError, ValueError, TypeError) as e:
|
|
628
|
+
print(f"Error processing scheduled jobs data: {e}")
|
|
629
|
+
except Exception as e:
|
|
630
|
+
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
@scheduled_app.command("inspect", help="Display detailed information on one or more scheduled Jobs")
|
|
634
|
+
def scheduled_inspect(
|
|
635
|
+
scheduled_job_ids: Annotated[
|
|
636
|
+
list[str],
|
|
637
|
+
typer.Argument(
|
|
638
|
+
help="The scheduled jobs to inspect",
|
|
639
|
+
),
|
|
640
|
+
],
|
|
641
|
+
namespace: NamespaceOpt = None,
|
|
642
|
+
token: TokenOpt = None,
|
|
643
|
+
) -> None:
|
|
644
|
+
api = get_hf_api(token=token)
|
|
645
|
+
scheduled_jobs = [
|
|
646
|
+
api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
|
|
647
|
+
for scheduled_job_id in scheduled_job_ids
|
|
648
|
+
]
|
|
649
|
+
print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
@scheduled_app.command("delete", help="Delete a scheduled Job")
|
|
653
|
+
def scheduled_delete(
|
|
654
|
+
scheduled_job_id: ScheduledJobIdArg,
|
|
655
|
+
namespace: NamespaceOpt = None,
|
|
656
|
+
token: TokenOpt = None,
|
|
657
|
+
) -> None:
|
|
658
|
+
api = get_hf_api(token=token)
|
|
659
|
+
api.delete_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
@scheduled_app.command("suspend", help="Suspend (pause) a scheduled Job")
|
|
663
|
+
def scheduled_suspend(
|
|
664
|
+
scheduled_job_id: ScheduledJobIdArg,
|
|
665
|
+
namespace: NamespaceOpt = None,
|
|
666
|
+
token: TokenOpt = None,
|
|
667
|
+
) -> None:
|
|
668
|
+
api = get_hf_api(token=token)
|
|
669
|
+
api.suspend_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+
@scheduled_app.command("resume", help="Resume (unpause) a scheduled Job")
|
|
673
|
+
def scheduled_resume(
|
|
674
|
+
scheduled_job_id: ScheduledJobIdArg,
|
|
675
|
+
namespace: NamespaceOpt = None,
|
|
676
|
+
token: TokenOpt = None,
|
|
677
|
+
) -> None:
|
|
678
|
+
api = get_hf_api(token=token)
|
|
679
|
+
api.resume_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
|
|
680
|
+
|
|
681
|
+
|
|
682
|
+
scheduled_uv_app = typer_factory(help="Schedule UV scripts on HF infrastructure")
|
|
683
|
+
scheduled_app.add_typer(scheduled_uv_app, name="uv")
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
@scheduled_uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
|
|
687
|
+
def scheduled_uv_run(
|
|
688
|
+
schedule: ScheduleArg,
|
|
689
|
+
script: ScriptArg,
|
|
690
|
+
script_args: ScriptArgsArg = None,
|
|
691
|
+
suspend: SuspendOpt = None,
|
|
692
|
+
concurrency: ConcurrencyOpt = None,
|
|
693
|
+
image: ImageOpt = None,
|
|
694
|
+
repo: RepoOpt = None,
|
|
695
|
+
flavor: FlavorOpt = None,
|
|
696
|
+
env: EnvOpt = None,
|
|
697
|
+
secrets: SecretsOpt = None,
|
|
698
|
+
env_file: EnvFileOpt = None,
|
|
699
|
+
secrets_file: SecretsFileOpt = None,
|
|
700
|
+
timeout: TimeoutOpt = None,
|
|
701
|
+
namespace: NamespaceOpt = None,
|
|
702
|
+
token: TokenOpt = None,
|
|
703
|
+
with_: WithOpt = None,
|
|
704
|
+
python: PythonOpt = None,
|
|
705
|
+
) -> None:
|
|
706
|
+
env_map: dict[str, Optional[str]] = {}
|
|
707
|
+
if env_file:
|
|
708
|
+
env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
|
|
709
|
+
for env_value in env or []:
|
|
710
|
+
env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
711
|
+
secrets_map: dict[str, Optional[str]] = {}
|
|
712
|
+
extended_environ = _get_extended_environ()
|
|
713
|
+
if secrets_file:
|
|
714
|
+
secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
|
|
715
|
+
for secret in secrets or []:
|
|
716
|
+
secrets_map.update(load_dotenv(secret, environ=extended_environ))
|
|
717
|
+
|
|
718
|
+
api = get_hf_api(token=token)
|
|
719
|
+
job = api.create_scheduled_uv_job(
|
|
720
|
+
script=script,
|
|
721
|
+
script_args=script_args or [],
|
|
722
|
+
schedule=schedule,
|
|
723
|
+
suspend=suspend,
|
|
724
|
+
concurrency=concurrency,
|
|
725
|
+
dependencies=with_,
|
|
726
|
+
python=python,
|
|
727
|
+
image=image,
|
|
728
|
+
env=env_map,
|
|
729
|
+
secrets=secrets_map,
|
|
730
|
+
flavor=flavor, # type: ignore[arg-type]
|
|
731
|
+
timeout=timeout,
|
|
732
|
+
namespace=namespace,
|
|
733
|
+
_repo=repo,
|
|
734
|
+
)
|
|
735
|
+
print(f"Scheduled Job created with ID: {job.id}")
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
### UTILS
|
|
750
739
|
|
|
751
|
-
# Skip job if not all jobs should be shown and status doesn't match criteria
|
|
752
|
-
if not self.all and suspend:
|
|
753
|
-
continue
|
|
754
|
-
|
|
755
|
-
# Extract job ID
|
|
756
|
-
scheduled_job_id = scheduled_job.id
|
|
757
|
-
|
|
758
|
-
# Extract schedule
|
|
759
|
-
schedule = scheduled_job.schedule
|
|
760
|
-
|
|
761
|
-
# Extract image or space information
|
|
762
|
-
image_or_space = scheduled_job.job_spec.docker_image or "N/A"
|
|
763
|
-
|
|
764
|
-
# Extract and format command
|
|
765
|
-
command = scheduled_job.job_spec.command or []
|
|
766
|
-
command_str = " ".join(command) if command else "N/A"
|
|
767
|
-
|
|
768
|
-
# Extract status
|
|
769
|
-
last_job_at = (
|
|
770
|
-
scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
|
|
771
|
-
if scheduled_job.status.last_job
|
|
772
|
-
else "N/A"
|
|
773
|
-
)
|
|
774
|
-
next_job_run_at = (
|
|
775
|
-
scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
776
|
-
if scheduled_job.status.next_job_run_at
|
|
777
|
-
else "N/A"
|
|
778
|
-
)
|
|
779
|
-
|
|
780
|
-
# Create a dict with all job properties for filtering
|
|
781
|
-
job_properties = {
|
|
782
|
-
"id": scheduled_job_id,
|
|
783
|
-
"image": image_or_space,
|
|
784
|
-
"suspend": str(suspend),
|
|
785
|
-
"command": command_str,
|
|
786
|
-
}
|
|
787
|
-
|
|
788
|
-
# Check if job matches all filters
|
|
789
|
-
if not self._matches_filters(job_properties):
|
|
790
|
-
continue
|
|
791
|
-
|
|
792
|
-
# Create row
|
|
793
|
-
rows.append(
|
|
794
|
-
[
|
|
795
|
-
scheduled_job_id,
|
|
796
|
-
schedule,
|
|
797
|
-
image_or_space,
|
|
798
|
-
command_str,
|
|
799
|
-
last_job_at,
|
|
800
|
-
next_job_run_at,
|
|
801
|
-
suspend,
|
|
802
|
-
]
|
|
803
|
-
)
|
|
804
|
-
|
|
805
|
-
# Handle empty results
|
|
806
|
-
if not rows:
|
|
807
|
-
filters_msg = ""
|
|
808
|
-
if self.filters:
|
|
809
|
-
filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
|
|
810
|
-
|
|
811
|
-
print(f"No scheduled jobs found{filters_msg}")
|
|
812
|
-
return
|
|
813
|
-
|
|
814
|
-
# Apply custom format if provided or use default tabular format
|
|
815
|
-
self._print_output(rows, table_headers)
|
|
816
|
-
|
|
817
|
-
except HfHubHTTPError as e:
|
|
818
|
-
print(f"Error fetching scheduled jobs data: {e}")
|
|
819
|
-
except (KeyError, ValueError, TypeError) as e:
|
|
820
|
-
print(f"Error processing scheduled jobs data: {e}")
|
|
821
|
-
except Exception as e:
|
|
822
|
-
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
823
|
-
|
|
824
|
-
def _matches_filters(self, job_properties: dict[str, str]) -> bool:
|
|
825
|
-
"""Check if scheduled job matches all specified filters."""
|
|
826
|
-
for key, pattern in self.filters.items():
|
|
827
|
-
# Check if property exists
|
|
828
|
-
if key not in job_properties:
|
|
829
|
-
return False
|
|
830
740
|
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
regex_pattern = pattern.replace("*", ".*").replace("?", ".")
|
|
835
|
-
if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
|
|
836
|
-
return False
|
|
837
|
-
# Simple substring matching
|
|
838
|
-
elif pattern.lower() not in job_properties[key].lower():
|
|
839
|
-
return False
|
|
741
|
+
def _tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
|
|
742
|
+
"""
|
|
743
|
+
Inspired by:
|
|
840
744
|
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
headers=headers,
|
|
863
|
-
)
|
|
864
|
-
)
|
|
745
|
+
- stackoverflow.com/a/8356620/593036
|
|
746
|
+
- stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
|
|
747
|
+
"""
|
|
748
|
+
col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
|
|
749
|
+
terminal_width = max(os.get_terminal_size().columns, len(headers) * 12)
|
|
750
|
+
while len(headers) + sum(col_widths) > terminal_width:
|
|
751
|
+
col_to_minimize = col_widths.index(max(col_widths))
|
|
752
|
+
col_widths[col_to_minimize] //= 2
|
|
753
|
+
if len(headers) + sum(col_widths) <= terminal_width:
|
|
754
|
+
col_widths[col_to_minimize] = terminal_width - sum(col_widths) - len(headers) + col_widths[col_to_minimize]
|
|
755
|
+
row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
|
|
756
|
+
lines = []
|
|
757
|
+
lines.append(row_format.format(*headers))
|
|
758
|
+
lines.append(row_format.format(*["-" * w for w in col_widths]))
|
|
759
|
+
for row in rows:
|
|
760
|
+
row_format_args = [
|
|
761
|
+
str(x)[: col_width - 3] + "..." if len(str(x)) > col_width else str(x)
|
|
762
|
+
for x, col_width in zip(row, col_widths)
|
|
763
|
+
]
|
|
764
|
+
lines.append(row_format.format(*row_format_args))
|
|
765
|
+
return "\n".join(lines)
|
|
865
766
|
|
|
866
767
|
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
"--namespace",
|
|
873
|
-
type=str,
|
|
874
|
-
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
875
|
-
)
|
|
876
|
-
run_parser.add_argument(
|
|
877
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
878
|
-
)
|
|
879
|
-
run_parser.add_argument("scheduled_job_ids", nargs="...", help="The scheduled jobs to inspect")
|
|
880
|
-
run_parser.set_defaults(func=ScheduledInspectCommand)
|
|
881
|
-
|
|
882
|
-
def __init__(self, args: Namespace) -> None:
|
|
883
|
-
self.namespace: Optional[str] = args.namespace
|
|
884
|
-
self.token: Optional[str] = args.token
|
|
885
|
-
self.scheduled_job_ids: list[str] = args.scheduled_job_ids
|
|
886
|
-
|
|
887
|
-
def run(self) -> None:
|
|
888
|
-
api = HfApi(token=self.token)
|
|
889
|
-
scheduled_jobs = [
|
|
890
|
-
api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=self.namespace)
|
|
891
|
-
for scheduled_job_id in self.scheduled_job_ids
|
|
892
|
-
]
|
|
893
|
-
print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
class ScheduledDeleteCommand(BaseHuggingfaceCLICommand):
|
|
897
|
-
@staticmethod
|
|
898
|
-
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
899
|
-
run_parser = parser.add_parser("delete", help="Delete a scheduled Job")
|
|
900
|
-
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
901
|
-
run_parser.add_argument(
|
|
902
|
-
"--namespace",
|
|
903
|
-
type=str,
|
|
904
|
-
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
905
|
-
)
|
|
906
|
-
run_parser.add_argument(
|
|
907
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
908
|
-
)
|
|
909
|
-
run_parser.set_defaults(func=ScheduledDeleteCommand)
|
|
910
|
-
|
|
911
|
-
def __init__(self, args: Namespace) -> None:
|
|
912
|
-
self.scheduled_job_id: str = args.scheduled_job_id
|
|
913
|
-
self.namespace = args.namespace
|
|
914
|
-
self.token: Optional[str] = args.token
|
|
915
|
-
|
|
916
|
-
def run(self) -> None:
|
|
917
|
-
api = HfApi(token=self.token)
|
|
918
|
-
api.delete_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
class ScheduledSuspendCommand(BaseHuggingfaceCLICommand):
|
|
922
|
-
@staticmethod
|
|
923
|
-
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
924
|
-
run_parser = parser.add_parser("suspend", help="Suspend (pause) a scheduled Job")
|
|
925
|
-
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
926
|
-
run_parser.add_argument(
|
|
927
|
-
"--namespace",
|
|
928
|
-
type=str,
|
|
929
|
-
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
930
|
-
)
|
|
931
|
-
run_parser.add_argument(
|
|
932
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
933
|
-
)
|
|
934
|
-
run_parser.set_defaults(func=ScheduledSuspendCommand)
|
|
935
|
-
|
|
936
|
-
def __init__(self, args: Namespace) -> None:
|
|
937
|
-
self.scheduled_job_id: str = args.scheduled_job_id
|
|
938
|
-
self.namespace = args.namespace
|
|
939
|
-
self.token: Optional[str] = args.token
|
|
940
|
-
|
|
941
|
-
def run(self) -> None:
|
|
942
|
-
api = HfApi(token=self.token)
|
|
943
|
-
api.suspend_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
class ScheduledResumeCommand(BaseHuggingfaceCLICommand):
|
|
947
|
-
@staticmethod
|
|
948
|
-
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
949
|
-
run_parser = parser.add_parser("resume", help="Resume (unpause) a scheduled Job")
|
|
950
|
-
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
951
|
-
run_parser.add_argument(
|
|
952
|
-
"--namespace",
|
|
953
|
-
type=str,
|
|
954
|
-
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
955
|
-
)
|
|
956
|
-
run_parser.add_argument(
|
|
957
|
-
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
958
|
-
)
|
|
959
|
-
run_parser.set_defaults(func=ScheduledResumeCommand)
|
|
960
|
-
|
|
961
|
-
def __init__(self, args: Namespace) -> None:
|
|
962
|
-
self.scheduled_job_id: str = args.scheduled_job_id
|
|
963
|
-
self.namespace = args.namespace
|
|
964
|
-
self.token: Optional[str] = args.token
|
|
965
|
-
|
|
966
|
-
def run(self) -> None:
|
|
967
|
-
api = HfApi(token=self.token)
|
|
968
|
-
api.resume_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
class ScheduledUvCommand(BaseHuggingfaceCLICommand):
|
|
972
|
-
"""Schedule UV scripts on Hugging Face infrastructure."""
|
|
973
|
-
|
|
974
|
-
@staticmethod
|
|
975
|
-
def register_subcommand(parser):
|
|
976
|
-
"""Register UV run subcommand."""
|
|
977
|
-
uv_parser = parser.add_parser(
|
|
978
|
-
"uv",
|
|
979
|
-
help="Schedule UV scripts (Python with inline dependencies) on HF infrastructure",
|
|
980
|
-
)
|
|
981
|
-
|
|
982
|
-
subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
|
|
983
|
-
|
|
984
|
-
# Run command only
|
|
985
|
-
run_parser = subparsers.add_parser(
|
|
986
|
-
"run",
|
|
987
|
-
help="Run a UV script (local file or URL) on HF infrastructure",
|
|
988
|
-
)
|
|
989
|
-
run_parser.add_argument(
|
|
990
|
-
"schedule",
|
|
991
|
-
type=str,
|
|
992
|
-
help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
|
|
993
|
-
)
|
|
994
|
-
run_parser.add_argument("script", help="UV script to run (local file or URL)")
|
|
995
|
-
run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
|
|
996
|
-
run_parser.add_argument(
|
|
997
|
-
"--suspend",
|
|
998
|
-
action="store_true",
|
|
999
|
-
help="Suspend (pause) the scheduled Job",
|
|
1000
|
-
default=None,
|
|
1001
|
-
)
|
|
1002
|
-
run_parser.add_argument(
|
|
1003
|
-
"--concurrency",
|
|
1004
|
-
action="store_true",
|
|
1005
|
-
help="Allow multiple instances of this Job to run concurrently",
|
|
1006
|
-
default=None,
|
|
1007
|
-
)
|
|
1008
|
-
run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
|
|
1009
|
-
run_parser.add_argument(
|
|
1010
|
-
"--repo",
|
|
1011
|
-
help="Repository name for the script (creates ephemeral if not specified)",
|
|
1012
|
-
)
|
|
1013
|
-
run_parser.add_argument(
|
|
1014
|
-
"--flavor",
|
|
1015
|
-
type=str,
|
|
1016
|
-
help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
|
|
1017
|
-
)
|
|
1018
|
-
run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
|
|
1019
|
-
run_parser.add_argument(
|
|
1020
|
-
"-s",
|
|
1021
|
-
"--secrets",
|
|
1022
|
-
action="append",
|
|
1023
|
-
help=(
|
|
1024
|
-
"Set secret environment variables. E.g. --secrets SECRET=value "
|
|
1025
|
-
"or `--secrets HF_TOKEN` to pass your Hugging Face token."
|
|
1026
|
-
),
|
|
1027
|
-
)
|
|
1028
|
-
run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
|
|
1029
|
-
run_parser.add_argument(
|
|
1030
|
-
"--secrets-file",
|
|
1031
|
-
type=str,
|
|
1032
|
-
help="Read in a file of secret environment variables.",
|
|
1033
|
-
)
|
|
1034
|
-
run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
|
|
1035
|
-
run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
|
|
1036
|
-
run_parser.add_argument(
|
|
1037
|
-
"--namespace",
|
|
1038
|
-
type=str,
|
|
1039
|
-
help="The namespace where the Job will be created. Defaults to the current user's namespace.",
|
|
1040
|
-
)
|
|
1041
|
-
run_parser.add_argument("--token", type=str, help="HF token")
|
|
1042
|
-
# UV options
|
|
1043
|
-
run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
|
|
1044
|
-
run_parser.add_argument(
|
|
1045
|
-
"-p", "--python", type=str, help="The Python interpreter to use for the run environment"
|
|
1046
|
-
)
|
|
1047
|
-
run_parser.set_defaults(func=ScheduledUvCommand)
|
|
1048
|
-
|
|
1049
|
-
def __init__(self, args: Namespace) -> None:
|
|
1050
|
-
"""Initialize the command with parsed arguments."""
|
|
1051
|
-
self.schedule: str = args.schedule
|
|
1052
|
-
self.script = args.script
|
|
1053
|
-
self.script_args = args.script_args
|
|
1054
|
-
self.suspend: Optional[bool] = args.suspend
|
|
1055
|
-
self.concurrency: Optional[bool] = args.concurrency
|
|
1056
|
-
self.dependencies = args.with_
|
|
1057
|
-
self.python = args.python
|
|
1058
|
-
self.image = args.image
|
|
1059
|
-
self.env: dict[str, Optional[str]] = {}
|
|
1060
|
-
if args.env_file:
|
|
1061
|
-
self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
|
|
1062
|
-
for env_value in args.env or []:
|
|
1063
|
-
self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
1064
|
-
self.secrets: dict[str, Optional[str]] = {}
|
|
1065
|
-
extended_environ = _get_extended_environ()
|
|
1066
|
-
if args.secrets_file:
|
|
1067
|
-
self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
|
|
1068
|
-
for secret in args.secrets or []:
|
|
1069
|
-
self.secrets.update(load_dotenv(secret, environ=extended_environ))
|
|
1070
|
-
self.flavor: Optional[SpaceHardware] = args.flavor
|
|
1071
|
-
self.timeout: Optional[str] = args.timeout
|
|
1072
|
-
self.detach: bool = args.detach
|
|
1073
|
-
self.namespace: Optional[str] = args.namespace
|
|
1074
|
-
self.token: Optional[str] = args.token
|
|
1075
|
-
self._repo = args.repo
|
|
1076
|
-
|
|
1077
|
-
def run(self) -> None:
|
|
1078
|
-
"""Schedule UV command."""
|
|
1079
|
-
logging.set_verbosity(logging.INFO)
|
|
1080
|
-
api = HfApi(token=self.token)
|
|
1081
|
-
job = api.create_scheduled_uv_job(
|
|
1082
|
-
script=self.script,
|
|
1083
|
-
script_args=self.script_args,
|
|
1084
|
-
schedule=self.schedule,
|
|
1085
|
-
suspend=self.suspend,
|
|
1086
|
-
concurrency=self.concurrency,
|
|
1087
|
-
dependencies=self.dependencies,
|
|
1088
|
-
python=self.python,
|
|
1089
|
-
image=self.image,
|
|
1090
|
-
env=self.env,
|
|
1091
|
-
secrets=self.secrets,
|
|
1092
|
-
flavor=self.flavor,
|
|
1093
|
-
timeout=self.timeout,
|
|
1094
|
-
namespace=self.namespace,
|
|
1095
|
-
_repo=self._repo,
|
|
1096
|
-
)
|
|
1097
|
-
|
|
1098
|
-
# Always print the job ID to the user
|
|
1099
|
-
print(f"Scheduled Job created with ID: {job.id}")
|
|
768
|
+
def _get_extended_environ() -> Dict[str, str]:
|
|
769
|
+
extended_environ = os.environ.copy()
|
|
770
|
+
if (token := get_token()) is not None:
|
|
771
|
+
extended_environ["HF_TOKEN"] = token
|
|
772
|
+
return extended_environ
|