huggingface-hub 1.0.0rc0__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

@@ -28,1072 +28,745 @@ Usage:
28
28
 
29
29
  # Cancel a running job
30
30
  hf jobs cancel <job-id>
31
+
32
+ # Run a UV script
33
+ hf jobs uv run <script>
34
+
35
+ # Schedule a job
36
+ hf jobs scheduled run <schedule> <image> <command>
37
+
38
+ # List scheduled jobs
39
+ hf jobs scheduled ps [-a] [-f key=value] [--format TEMPLATE]
40
+
41
+ # Inspect a scheduled job
42
+ hf jobs scheduled inspect <scheduled_job_id>
43
+
44
+ # Suspend a scheduled job
45
+ hf jobs scheduled suspend <scheduled_job_id>
46
+
47
+ # Resume a scheduled job
48
+ hf jobs scheduled resume <scheduled_job_id>
49
+
50
+ # Delete a scheduled job
51
+ hf jobs scheduled delete <scheduled_job_id>
52
+
31
53
  """
32
54
 
33
55
  import json
34
56
  import os
35
57
  import re
36
- from argparse import Namespace, _SubParsersAction
37
58
  from dataclasses import asdict
38
59
  from pathlib import Path
39
- from typing import Optional, Union
60
+ from typing import Annotated, Dict, Optional, Union
61
+
62
+ import typer
40
63
 
41
- from huggingface_hub import HfApi, SpaceHardware, get_token
64
+ from huggingface_hub import SpaceHardware, get_token
42
65
  from huggingface_hub.errors import HfHubHTTPError
43
66
  from huggingface_hub.utils import logging
44
67
  from huggingface_hub.utils._dotenv import load_dotenv
45
68
 
46
- from . import BaseHuggingfaceCLICommand
69
+ from ._cli_utils import TokenOpt, get_hf_api, typer_factory
47
70
 
48
71
 
49
72
  logger = logging.get_logger(__name__)
50
73
 
51
74
  SUGGESTED_FLAVORS = [item.value for item in SpaceHardware if item.value != "zero-a10g"]
52
75
 
53
-
54
- class JobsCommands(BaseHuggingfaceCLICommand):
55
- @staticmethod
56
- def register_subcommand(parser: _SubParsersAction):
57
- jobs_parser = parser.add_parser("jobs", help="Run and manage Jobs on the Hub.")
58
- jobs_subparsers = jobs_parser.add_subparsers(help="huggingface.co jobs related commands")
59
-
60
- # Show help if no subcommand is provided
61
- jobs_parser.set_defaults(func=lambda args: jobs_parser.print_help())
62
-
63
- # Register commands
64
- InspectCommand.register_subcommand(jobs_subparsers)
65
- LogsCommand.register_subcommand(jobs_subparsers)
66
- PsCommand.register_subcommand(jobs_subparsers)
67
- RunCommand.register_subcommand(jobs_subparsers)
68
- CancelCommand.register_subcommand(jobs_subparsers)
69
- UvCommand.register_subcommand(jobs_subparsers)
70
- ScheduledJobsCommands.register_subcommand(jobs_subparsers)
71
-
72
-
73
- class RunCommand(BaseHuggingfaceCLICommand):
74
- @staticmethod
75
- def register_subcommand(parser: _SubParsersAction) -> None:
76
- run_parser = parser.add_parser("run", help="Run a Job")
77
- run_parser.add_argument("image", type=str, help="The Docker image to use.")
78
- run_parser.add_argument("-e", "--env", action="append", help="Set environment variables. E.g. --env ENV=value")
79
- run_parser.add_argument(
80
- "-s",
81
- "--secrets",
82
- action="append",
83
- help=(
84
- "Set secret environment variables. E.g. --secrets SECRET=value "
85
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
86
- ),
87
- )
88
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
89
- run_parser.add_argument("--secrets-file", type=str, help="Read in a file of secret environment variables.")
90
- run_parser.add_argument(
91
- "--flavor",
92
- type=str,
93
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
94
- )
95
- run_parser.add_argument(
96
- "--timeout",
97
- type=str,
98
- help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
99
- )
100
- run_parser.add_argument(
101
- "-d",
102
- "--detach",
103
- action="store_true",
104
- help="Run the Job in the background and print the Job ID.",
105
- )
106
- run_parser.add_argument(
107
- "--namespace",
108
- type=str,
109
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
110
- )
111
- run_parser.add_argument(
112
- "--token",
113
- type=str,
114
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
115
- )
116
- run_parser.add_argument("command", nargs="...", help="The command to run.")
117
- run_parser.set_defaults(func=RunCommand)
118
-
119
- def __init__(self, args: Namespace) -> None:
120
- self.image: str = args.image
121
- self.command: list[str] = args.command
122
- self.env: dict[str, Optional[str]] = {}
123
- if args.env_file:
124
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
125
- for env_value in args.env or []:
126
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
127
- self.secrets: dict[str, Optional[str]] = {}
128
- extended_environ = _get_extended_environ()
129
- if args.secrets_file:
130
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
131
- for secret in args.secrets or []:
132
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
133
- self.flavor: Optional[SpaceHardware] = args.flavor
134
- self.timeout: Optional[str] = args.timeout
135
- self.detach: bool = args.detach
136
- self.namespace: Optional[str] = args.namespace
137
- self.token: Optional[str] = args.token
138
-
139
- def run(self) -> None:
140
- api = HfApi(token=self.token)
141
- job = api.run_job(
142
- image=self.image,
143
- command=self.command,
144
- env=self.env,
145
- secrets=self.secrets,
146
- flavor=self.flavor,
147
- timeout=self.timeout,
148
- namespace=self.namespace,
149
- )
150
- # Always print the job ID to the user
151
- print(f"Job started with ID: {job.id}")
152
- print(f"View at: {job.url}")
153
-
154
- if self.detach:
155
- return
156
-
157
- # Now let's stream the logs
158
- for log in api.fetch_job_logs(job_id=job.id):
159
- print(log)
160
-
161
-
162
- class LogsCommand(BaseHuggingfaceCLICommand):
163
- @staticmethod
164
- def register_subcommand(parser: _SubParsersAction) -> None:
165
- run_parser = parser.add_parser("logs", help="Fetch the logs of a Job")
166
- run_parser.add_argument("job_id", type=str, help="Job ID")
167
- run_parser.add_argument(
168
- "--namespace",
169
- type=str,
170
- help="The namespace where the job is running. Defaults to the current user's namespace.",
171
- )
172
- run_parser.add_argument(
173
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
174
- )
175
- run_parser.set_defaults(func=LogsCommand)
176
-
177
- def __init__(self, args: Namespace) -> None:
178
- self.job_id: str = args.job_id
179
- self.namespace: Optional[str] = args.namespace
180
- self.token: Optional[str] = args.token
181
-
182
- def run(self) -> None:
183
- api = HfApi(token=self.token)
184
- for log in api.fetch_job_logs(job_id=self.job_id, namespace=self.namespace):
185
- print(log)
186
-
187
-
188
- def _tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
189
- """
190
- Inspired by:
191
-
192
- - stackoverflow.com/a/8356620/593036
193
- - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
194
- """
195
- col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
196
- terminal_width = max(os.get_terminal_size().columns, len(headers) * 12)
197
- while len(headers) + sum(col_widths) > terminal_width:
198
- col_to_minimize = col_widths.index(max(col_widths))
199
- col_widths[col_to_minimize] //= 2
200
- if len(headers) + sum(col_widths) <= terminal_width:
201
- col_widths[col_to_minimize] = terminal_width - sum(col_widths) - len(headers) + col_widths[col_to_minimize]
202
- row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
203
- lines = []
204
- lines.append(row_format.format(*headers))
205
- lines.append(row_format.format(*["-" * w for w in col_widths]))
206
- for row in rows:
207
- row_format_args = [
208
- str(x)[: col_width - 3] + "..." if len(str(x)) > col_width else str(x)
209
- for x, col_width in zip(row, col_widths)
210
- ]
211
- lines.append(row_format.format(*row_format_args))
212
- return "\n".join(lines)
213
-
214
-
215
- class PsCommand(BaseHuggingfaceCLICommand):
216
- @staticmethod
217
- def register_subcommand(parser: _SubParsersAction) -> None:
218
- run_parser = parser.add_parser("ps", help="List Jobs")
219
- run_parser.add_argument(
76
+ # Common job-related options
77
+ ImageArg = Annotated[
78
+ str,
79
+ typer.Argument(
80
+ help="The Docker image to use.",
81
+ ),
82
+ ]
83
+
84
+ ImageOpt = Annotated[
85
+ Optional[str],
86
+ typer.Option(
87
+ help="Use a custom Docker image with `uv` installed.",
88
+ ),
89
+ ]
90
+
91
+ FlavorOpt = Annotated[
92
+ Optional[SpaceHardware],
93
+ typer.Option(
94
+ help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
95
+ ),
96
+ ]
97
+
98
+ EnvOpt = Annotated[
99
+ Optional[list[str]],
100
+ typer.Option(
101
+ "-e",
102
+ "--env",
103
+ help="Set environment variables. E.g. --env ENV=value",
104
+ ),
105
+ ]
106
+
107
+ SecretsOpt = Annotated[
108
+ Optional[list[str]],
109
+ typer.Option(
110
+ "-s",
111
+ "--secrets",
112
+ help="Set secret environment variables. E.g. --secrets SECRET=value or `--secrets HF_TOKEN` to pass your Hugging Face token.",
113
+ ),
114
+ ]
115
+
116
+ EnvFileOpt = Annotated[
117
+ Optional[str],
118
+ typer.Option(
119
+ "--env-file",
120
+ help="Read in a file of environment variables.",
121
+ ),
122
+ ]
123
+
124
+ SecretsFileOpt = Annotated[
125
+ Optional[str],
126
+ typer.Option(
127
+ help="Read in a file of secret environment variables.",
128
+ ),
129
+ ]
130
+
131
+ TimeoutOpt = Annotated[
132
+ Optional[str],
133
+ typer.Option(
134
+ help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
135
+ ),
136
+ ]
137
+
138
+ DetachOpt = Annotated[
139
+ bool,
140
+ typer.Option(
141
+ "-d",
142
+ "--detach",
143
+ help="Run the Job in the background and print the Job ID.",
144
+ ),
145
+ ]
146
+
147
+ NamespaceOpt = Annotated[
148
+ Optional[str],
149
+ typer.Option(
150
+ help="The namespace where the job will be running. Defaults to the current user's namespace.",
151
+ ),
152
+ ]
153
+
154
+ WithOpt = Annotated[
155
+ Optional[list[str]],
156
+ typer.Option(
157
+ "--with",
158
+ help="Run with the given packages installed",
159
+ ),
160
+ ]
161
+
162
+ PythonOpt = Annotated[
163
+ Optional[str],
164
+ typer.Option(
165
+ "-p",
166
+ "--python",
167
+ help="The Python interpreter to use for the run environment",
168
+ ),
169
+ ]
170
+
171
+ SuspendOpt = Annotated[
172
+ Optional[bool],
173
+ typer.Option(
174
+ help="Suspend (pause) the scheduled Job",
175
+ ),
176
+ ]
177
+
178
+ ConcurrencyOpt = Annotated[
179
+ Optional[bool],
180
+ typer.Option(
181
+ help="Allow multiple instances of this Job to run concurrently",
182
+ ),
183
+ ]
184
+
185
+ ScheduleArg = Annotated[
186
+ str,
187
+ typer.Argument(
188
+ help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
189
+ ),
190
+ ]
191
+
192
+ ScriptArg = Annotated[
193
+ str,
194
+ typer.Argument(
195
+ help="UV script to run (local file or URL)",
196
+ ),
197
+ ]
198
+
199
+ ScriptArgsArg = Annotated[
200
+ Optional[list[str]],
201
+ typer.Argument(
202
+ help="Arguments for the script",
203
+ ),
204
+ ]
205
+
206
+ CommandArg = Annotated[
207
+ list[str],
208
+ typer.Argument(
209
+ help="The command to run.",
210
+ ),
211
+ ]
212
+
213
+ JobIdArg = Annotated[
214
+ str,
215
+ typer.Argument(
216
+ help="Job ID",
217
+ ),
218
+ ]
219
+
220
+ ScheduledJobIdArg = Annotated[
221
+ str,
222
+ typer.Argument(
223
+ help="Scheduled Job ID",
224
+ ),
225
+ ]
226
+
227
+ RepoOpt = Annotated[
228
+ Optional[str],
229
+ typer.Option(
230
+ help="Repository name for the script (creates ephemeral if not specified)",
231
+ ),
232
+ ]
233
+
234
+
235
+ jobs_cli = typer_factory(help="Run and manage Jobs on the Hub.")
236
+
237
+
238
+ @jobs_cli.command("run", help="Run a Job")
239
+ def jobs_run(
240
+ image: ImageArg,
241
+ command: CommandArg,
242
+ env: EnvOpt = None,
243
+ secrets: SecretsOpt = None,
244
+ env_file: EnvFileOpt = None,
245
+ secrets_file: SecretsFileOpt = None,
246
+ flavor: FlavorOpt = None,
247
+ timeout: TimeoutOpt = None,
248
+ detach: DetachOpt = False,
249
+ namespace: NamespaceOpt = None,
250
+ token: TokenOpt = None,
251
+ ) -> None:
252
+ env_map: dict[str, Optional[str]] = {}
253
+ if env_file:
254
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
255
+ for env_value in env or []:
256
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
257
+
258
+ secrets_map: dict[str, Optional[str]] = {}
259
+ extended_environ = _get_extended_environ()
260
+ if secrets_file:
261
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
262
+ for secret in secrets or []:
263
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
264
+
265
+ api = get_hf_api(token=token)
266
+ job = api.run_job(
267
+ image=image,
268
+ command=command,
269
+ env=env_map,
270
+ secrets=secrets_map,
271
+ flavor=flavor,
272
+ timeout=timeout,
273
+ namespace=namespace,
274
+ )
275
+ # Always print the job ID to the user
276
+ print(f"Job started with ID: {job.id}")
277
+ print(f"View at: {job.url}")
278
+
279
+ if detach:
280
+ return
281
+ # Now let's stream the logs
282
+ for log in api.fetch_job_logs(job_id=job.id):
283
+ print(log)
284
+
285
+
286
+ @jobs_cli.command("logs", help="Fetch the logs of a Job")
287
+ def jobs_logs(
288
+ job_id: JobIdArg,
289
+ namespace: NamespaceOpt = None,
290
+ token: TokenOpt = None,
291
+ ) -> None:
292
+ api = get_hf_api(token=token)
293
+ for log in api.fetch_job_logs(job_id=job_id, namespace=namespace):
294
+ print(log)
295
+
296
+
297
+ def _matches_filters(job_properties: dict[str, str], filters: dict[str, str]) -> bool:
298
+ """Check if scheduled job matches all specified filters."""
299
+ for key, pattern in filters.items():
300
+ # Check if property exists
301
+ if key not in job_properties:
302
+ return False
303
+ # Support pattern matching with wildcards
304
+ if "*" in pattern or "?" in pattern:
305
+ # Convert glob pattern to regex
306
+ regex_pattern = pattern.replace("*", ".*").replace("?", ".")
307
+ if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
308
+ return False
309
+ # Simple substring matching
310
+ elif pattern.lower() not in job_properties[key].lower():
311
+ return False
312
+ return True
313
+
314
+
315
+ def _print_output(rows: list[list[Union[str, int]]], headers: list[str], fmt: Optional[str]) -> None:
316
+ """Print output according to the chosen format."""
317
+ if fmt:
318
+ # Use custom template if provided
319
+ template = fmt
320
+ for row in rows:
321
+ line = template
322
+ for i, field in enumerate(["id", "image", "command", "created", "status"]):
323
+ placeholder = f"{{{{.{field}}}}}"
324
+ if placeholder in line:
325
+ line = line.replace(placeholder, str(row[i]))
326
+ print(line)
327
+ else:
328
+ # Default tabular format
329
+ print(_tabulate(rows, headers=headers))
330
+
331
+
332
+ @jobs_cli.command("ps", help="List Jobs")
333
+ def jobs_ps(
334
+ all: Annotated[
335
+ bool,
336
+ typer.Option(
220
337
  "-a",
221
338
  "--all",
222
- action="store_true",
223
339
  help="Show all Jobs (default shows just running)",
224
- )
225
- run_parser.add_argument(
226
- "--namespace",
227
- type=str,
228
- help="The namespace from where it lists the jobs. Defaults to the current user's namespace.",
229
- )
230
- run_parser.add_argument(
231
- "--token",
232
- type=str,
233
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
234
- )
235
- # Add Docker-style filtering argument
236
- run_parser.add_argument(
340
+ ),
341
+ ] = False,
342
+ namespace: NamespaceOpt = None,
343
+ token: TokenOpt = None,
344
+ filter: Annotated[
345
+ Optional[list[str]],
346
+ typer.Option(
237
347
  "-f",
238
348
  "--filter",
239
- action="append",
240
- default=[],
241
349
  help="Filter output based on conditions provided (format: key=value)",
242
- )
243
- # Add option to format output
244
- run_parser.add_argument(
245
- "--format",
246
- type=str,
350
+ ),
351
+ ] = None,
352
+ format: Annotated[
353
+ Optional[str],
354
+ typer.Option(
247
355
  help="Format output using a custom template",
248
- )
249
- run_parser.set_defaults(func=PsCommand)
250
-
251
- def __init__(self, args: Namespace) -> None:
252
- self.all: bool = args.all
253
- self.namespace: Optional[str] = args.namespace
254
- self.token: Optional[str] = args.token
255
- self.format: Optional[str] = args.format
256
- self.filters: dict[str, str] = {}
257
-
258
- # Parse filter arguments (key=value pairs)
259
- for f in args.filter:
356
+ ),
357
+ ] = None,
358
+ ) -> None:
359
+ try:
360
+ api = get_hf_api(token=token)
361
+ # Fetch jobs data
362
+ jobs = api.list_jobs(namespace=namespace)
363
+ # Define table headers
364
+ table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
365
+ rows: list[list[Union[str, int]]] = []
366
+
367
+ filters: dict[str, str] = {}
368
+ for f in filter or []:
260
369
  if "=" in f:
261
370
  key, value = f.split("=", 1)
262
- self.filters[key.lower()] = value
371
+ filters[key.lower()] = value
263
372
  else:
264
373
  print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
374
+ # Process jobs data
375
+ for job in jobs:
376
+ # Extract job data for filtering
377
+ status = job.status.stage if job.status else "UNKNOWN"
378
+ if not all and status not in ("RUNNING", "UPDATING"):
379
+ # Skip job if not all jobs should be shown and status doesn't match criteria
380
+ continue
381
+ # Extract job data for output
382
+ job_id = job.id
265
383
 
266
- def run(self) -> None:
267
- """
268
- Fetch and display job information for the current user.
269
- Uses Docker-style filtering with -f/--filter flag and key=value pairs.
270
- """
271
- try:
272
- api = HfApi(token=self.token)
273
-
274
- # Fetch jobs data
275
- jobs = api.list_jobs(namespace=self.namespace)
276
-
277
- # Define table headers
278
- table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
384
+ # Extract image or space information
385
+ image_or_space = job.docker_image or "N/A"
279
386
 
280
- # Process jobs data
281
- rows = []
387
+ # Extract and format command
388
+ cmd = job.command or []
389
+ command_str = " ".join(cmd) if cmd else "N/A"
282
390
 
283
- for job in jobs:
284
- # Extract job data for filtering
285
- status = job.status.stage if job.status else "UNKNOWN"
391
+ # Extract creation time
392
+ created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
286
393
 
287
- # Skip job if not all jobs should be shown and status doesn't match criteria
288
- if not self.all and status not in ("RUNNING", "UPDATING"):
289
- continue
290
-
291
- # Extract job ID
292
- job_id = job.id
293
-
294
- # Extract image or space information
295
- image_or_space = job.docker_image or "N/A"
296
-
297
- # Extract and format command
298
- command = job.command or []
299
- command_str = " ".join(command) if command else "N/A"
300
-
301
- # Extract creation time
302
- created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
303
-
304
- # Create a dict with all job properties for filtering
305
- job_properties = {
306
- "id": job_id,
307
- "image": image_or_space,
308
- "status": status.lower(),
309
- "command": command_str,
310
- }
311
-
312
- # Check if job matches all filters
313
- if not self._matches_filters(job_properties):
314
- continue
315
-
316
- # Create row
317
- rows.append([job_id, image_or_space, command_str, created_at, status])
318
-
319
- # Handle empty results
320
- if not rows:
321
- filters_msg = ""
322
- if self.filters:
323
- filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
324
-
325
- print(f"No jobs found{filters_msg}")
326
- return
327
-
328
- # Apply custom format if provided or use default tabular format
329
- self._print_output(rows, table_headers)
330
-
331
- except HfHubHTTPError as e:
332
- print(f"Error fetching jobs data: {e}")
333
- except (KeyError, ValueError, TypeError) as e:
334
- print(f"Error processing jobs data: {e}")
335
- except Exception as e:
336
- print(f"Unexpected error - {type(e).__name__}: {e}")
337
-
338
- def _matches_filters(self, job_properties: dict[str, str]) -> bool:
339
- """Check if job matches all specified filters."""
340
- for key, pattern in self.filters.items():
341
- # Check if property exists
342
- if key not in job_properties:
343
- return False
394
+ # Create a dict with all job properties for filtering
395
+ props = {"id": job_id, "image": image_or_space, "status": status.lower(), "command": command_str}
396
+ if not _matches_filters(props, filters):
397
+ continue
344
398
 
345
- # Support pattern matching with wildcards
346
- if "*" in pattern or "?" in pattern:
347
- # Convert glob pattern to regex
348
- regex_pattern = pattern.replace("*", ".*").replace("?", ".")
349
- if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
350
- return False
351
- # Simple substring matching
352
- elif pattern.lower() not in job_properties[key].lower():
353
- return False
399
+ # Create row
400
+ rows.append([job_id, image_or_space, command_str, created_at, status])
354
401
 
355
- return True
356
-
357
- def _print_output(self, rows, headers):
358
- """Print output according to the chosen format."""
359
- if self.format:
360
- # Custom template formatting (simplified)
361
- template = self.format
362
- for row in rows:
363
- line = template
364
- for i, field in enumerate(["id", "image", "command", "created", "status"]):
365
- placeholder = f"{{{{.{field}}}}}"
366
- if placeholder in line:
367
- line = line.replace(placeholder, str(row[i]))
368
- print(line)
369
- else:
370
- # Default tabular format
371
- print(
372
- _tabulate(
373
- rows,
374
- headers=headers,
375
- )
402
+ # Handle empty results
403
+ if not rows:
404
+ filters_msg = (
405
+ f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
376
406
  )
377
-
378
-
379
- class InspectCommand(BaseHuggingfaceCLICommand):
380
- @staticmethod
381
- def register_subcommand(parser: _SubParsersAction) -> None:
382
- run_parser = parser.add_parser("inspect", help="Display detailed information on one or more Jobs")
383
- run_parser.add_argument(
384
- "--namespace",
385
- type=str,
386
- help="The namespace where the job is running. Defaults to the current user's namespace.",
387
- )
388
- run_parser.add_argument(
389
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
390
- )
391
- run_parser.add_argument("job_ids", nargs="...", help="The jobs to inspect")
392
- run_parser.set_defaults(func=InspectCommand)
393
-
394
- def __init__(self, args: Namespace) -> None:
395
- self.namespace: Optional[str] = args.namespace
396
- self.token: Optional[str] = args.token
397
- self.job_ids: list[str] = args.job_ids
398
-
399
- def run(self) -> None:
400
- api = HfApi(token=self.token)
401
- jobs = [api.inspect_job(job_id=job_id, namespace=self.namespace) for job_id in self.job_ids]
402
- print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
403
-
404
-
405
- class CancelCommand(BaseHuggingfaceCLICommand):
406
- @staticmethod
407
- def register_subcommand(parser: _SubParsersAction) -> None:
408
- run_parser = parser.add_parser("cancel", help="Cancel a Job")
409
- run_parser.add_argument("job_id", type=str, help="Job ID")
410
- run_parser.add_argument(
411
- "--namespace",
412
- type=str,
413
- help="The namespace where the job is running. Defaults to the current user's namespace.",
414
- )
415
- run_parser.add_argument(
416
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
417
- )
418
- run_parser.set_defaults(func=CancelCommand)
419
-
420
- def __init__(self, args: Namespace) -> None:
421
- self.job_id: str = args.job_id
422
- self.namespace = args.namespace
423
- self.token: Optional[str] = args.token
424
-
425
- def run(self) -> None:
426
- api = HfApi(token=self.token)
427
- api.cancel_job(job_id=self.job_id, namespace=self.namespace)
428
-
429
-
430
- class UvCommand(BaseHuggingfaceCLICommand):
431
- """Run UV scripts on Hugging Face infrastructure."""
432
-
433
- @staticmethod
434
- def register_subcommand(parser):
435
- """Register UV run subcommand."""
436
- uv_parser = parser.add_parser(
437
- "uv",
438
- help="Run UV scripts (Python with inline dependencies) on HF infrastructure",
439
- )
440
-
441
- subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
442
-
443
- # Run command only
444
- run_parser = subparsers.add_parser(
445
- "run",
446
- help="Run a UV script (local file or URL) on HF infrastructure",
447
- )
448
- run_parser.add_argument("script", help="UV script to run (local file or URL)")
449
- run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
450
- run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
451
- run_parser.add_argument(
452
- "--repo",
453
- help="Repository name for the script (creates ephemeral if not specified)",
454
- )
455
- run_parser.add_argument(
456
- "--flavor",
457
- type=str,
458
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
459
- )
460
- run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
461
- run_parser.add_argument(
462
- "-s",
463
- "--secrets",
464
- action="append",
465
- help=(
466
- "Set secret environment variables. E.g. --secrets SECRET=value "
467
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
468
- ),
469
- )
470
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
471
- run_parser.add_argument(
472
- "--secrets-file",
473
- type=str,
474
- help="Read in a file of secret environment variables.",
475
- )
476
- run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
477
- run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
478
- run_parser.add_argument(
479
- "--namespace",
480
- type=str,
481
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
482
- )
483
- run_parser.add_argument("--token", type=str, help="HF token")
484
- # UV options
485
- run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
486
- run_parser.add_argument(
487
- "-p", "--python", type=str, help="The Python interpreter to use for the run environment"
488
- )
489
- run_parser.set_defaults(func=UvCommand)
490
-
491
- def __init__(self, args: Namespace) -> None:
492
- """Initialize the command with parsed arguments."""
493
- self.script = args.script
494
- self.script_args = args.script_args
495
- self.dependencies = args.with_
496
- self.python = args.python
497
- self.image = args.image
498
- self.env: dict[str, Optional[str]] = {}
499
- if args.env_file:
500
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
501
- for env_value in args.env or []:
502
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
503
- self.secrets: dict[str, Optional[str]] = {}
504
- extended_environ = _get_extended_environ()
505
- if args.secrets_file:
506
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
507
- for secret in args.secrets or []:
508
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
509
- self.flavor: Optional[SpaceHardware] = args.flavor
510
- self.timeout: Optional[str] = args.timeout
511
- self.detach: bool = args.detach
512
- self.namespace: Optional[str] = args.namespace
513
- self.token: Optional[str] = args.token
514
- self._repo = args.repo
515
-
516
- def run(self) -> None:
517
- """Execute UV command."""
518
- logging.set_verbosity(logging.INFO)
519
- api = HfApi(token=self.token)
520
- job = api.run_uv_job(
521
- script=self.script,
522
- script_args=self.script_args,
523
- dependencies=self.dependencies,
524
- python=self.python,
525
- image=self.image,
526
- env=self.env,
527
- secrets=self.secrets,
528
- flavor=self.flavor,
529
- timeout=self.timeout,
530
- namespace=self.namespace,
531
- _repo=self._repo,
532
- )
533
-
534
- # Always print the job ID to the user
535
- print(f"Job started with ID: {job.id}")
536
- print(f"View at: {job.url}")
537
-
538
- if self.detach:
407
+ print(f"No jobs found{filters_msg}")
539
408
  return
540
-
541
- # Now let's stream the logs
542
- for log in api.fetch_job_logs(job_id=job.id):
543
- print(log)
544
-
545
-
546
- def _get_extended_environ() -> dict[str, str]:
547
- extended_environ = os.environ.copy()
548
- if (token := get_token()) is not None:
549
- extended_environ["HF_TOKEN"] = token
550
- return extended_environ
551
-
552
-
553
- class ScheduledJobsCommands(BaseHuggingfaceCLICommand):
554
- @staticmethod
555
- def register_subcommand(parser: _SubParsersAction):
556
- scheduled_jobs_parser = parser.add_parser("scheduled", help="Create and manage scheduled Jobs on the Hub.")
557
- scheduled_jobs_subparsers = scheduled_jobs_parser.add_subparsers(
558
- help="huggingface.co scheduled jobs related commands"
559
- )
560
-
561
- # Show help if no subcommand is provided
562
- scheduled_jobs_parser.set_defaults(func=lambda args: scheduled_jobs_subparsers.print_help())
563
-
564
- # Register commands
565
- ScheduledRunCommand.register_subcommand(scheduled_jobs_subparsers)
566
- ScheduledPsCommand.register_subcommand(scheduled_jobs_subparsers)
567
- ScheduledInspectCommand.register_subcommand(scheduled_jobs_subparsers)
568
- ScheduledDeleteCommand.register_subcommand(scheduled_jobs_subparsers)
569
- ScheduledSuspendCommand.register_subcommand(scheduled_jobs_subparsers)
570
- ScheduledResumeCommand.register_subcommand(scheduled_jobs_subparsers)
571
- ScheduledUvCommand.register_subcommand(scheduled_jobs_subparsers)
572
-
573
-
574
- class ScheduledRunCommand(BaseHuggingfaceCLICommand):
575
- @staticmethod
576
- def register_subcommand(parser: _SubParsersAction) -> None:
577
- run_parser = parser.add_parser("run", help="Schedule a Job")
578
- run_parser.add_argument(
579
- "schedule",
580
- type=str,
581
- help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
582
- )
583
- run_parser.add_argument("image", type=str, help="The Docker image to use.")
584
- run_parser.add_argument(
585
- "--suspend",
586
- action="store_true",
587
- help="Suspend (pause) the scheduled Job",
588
- default=None,
589
- )
590
- run_parser.add_argument(
591
- "--concurrency",
592
- action="store_true",
593
- help="Allow multiple instances of this Job to run concurrently",
594
- default=None,
595
- )
596
- run_parser.add_argument("-e", "--env", action="append", help="Set environment variables. E.g. --env ENV=value")
597
- run_parser.add_argument(
598
- "-s",
599
- "--secrets",
600
- action="append",
601
- help=(
602
- "Set secret environment variables. E.g. --secrets SECRET=value "
603
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
604
- ),
605
- )
606
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
607
- run_parser.add_argument("--secrets-file", type=str, help="Read in a file of secret environment variables.")
608
- run_parser.add_argument(
609
- "--flavor",
610
- type=str,
611
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
612
- )
613
- run_parser.add_argument(
614
- "--timeout",
615
- type=str,
616
- help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
617
- )
618
- run_parser.add_argument(
619
- "--namespace",
620
- type=str,
621
- help="The namespace where the scheduled Job will be created. Defaults to the current user's namespace.",
622
- )
623
- run_parser.add_argument(
624
- "--token",
625
- type=str,
626
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
627
- )
628
- run_parser.add_argument("command", nargs="...", help="The command to run.")
629
- run_parser.set_defaults(func=ScheduledRunCommand)
630
-
631
- def __init__(self, args: Namespace) -> None:
632
- self.schedule: str = args.schedule
633
- self.image: str = args.image
634
- self.command: list[str] = args.command
635
- self.suspend: Optional[bool] = args.suspend
636
- self.concurrency: Optional[bool] = args.concurrency
637
- self.env: dict[str, Optional[str]] = {}
638
- if args.env_file:
639
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
640
- for env_value in args.env or []:
641
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
642
- self.secrets: dict[str, Optional[str]] = {}
643
- extended_environ = _get_extended_environ()
644
- if args.secrets_file:
645
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
646
- for secret in args.secrets or []:
647
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
648
- self.flavor: Optional[SpaceHardware] = args.flavor
649
- self.timeout: Optional[str] = args.timeout
650
- self.namespace: Optional[str] = args.namespace
651
- self.token: Optional[str] = args.token
652
-
653
- def run(self) -> None:
654
- api = HfApi(token=self.token)
655
- scheduled_job = api.create_scheduled_job(
656
- image=self.image,
657
- command=self.command,
658
- schedule=self.schedule,
659
- suspend=self.suspend,
660
- concurrency=self.concurrency,
661
- env=self.env,
662
- secrets=self.secrets,
663
- flavor=self.flavor,
664
- timeout=self.timeout,
665
- namespace=self.namespace,
666
- )
667
- # Always print the scheduled job ID to the user
668
- print(f"Scheduled Job created with ID: {scheduled_job.id}")
669
-
670
-
671
- class ScheduledPsCommand(BaseHuggingfaceCLICommand):
672
- @staticmethod
673
- def register_subcommand(parser: _SubParsersAction) -> None:
674
- run_parser = parser.add_parser("ps", help="List scheduled Jobs")
675
- run_parser.add_argument(
409
+ # Apply custom format if provided or use default tabular format
410
+ _print_output(rows, table_headers, format)
411
+
412
+ except HfHubHTTPError as e:
413
+ print(f"Error fetching jobs data: {e}")
414
+ except (KeyError, ValueError, TypeError) as e:
415
+ print(f"Error processing jobs data: {e}")
416
+ except Exception as e:
417
+ print(f"Unexpected error - {type(e).__name__}: {e}")
418
+
419
+
420
+ @jobs_cli.command("inspect", help="Display detailed information on one or more Jobs")
421
+ def jobs_inspect(
422
+ job_ids: Annotated[
423
+ list[str],
424
+ typer.Argument(
425
+ help="The jobs to inspect",
426
+ ),
427
+ ],
428
+ namespace: NamespaceOpt = None,
429
+ token: TokenOpt = None,
430
+ ) -> None:
431
+ api = get_hf_api(token=token)
432
+ jobs = [api.inspect_job(job_id=job_id, namespace=namespace) for job_id in job_ids]
433
+ print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
434
+
435
+
436
+ @jobs_cli.command("cancel", help="Cancel a Job")
437
+ def jobs_cancel(
438
+ job_id: JobIdArg,
439
+ namespace: NamespaceOpt = None,
440
+ token: TokenOpt = None,
441
+ ) -> None:
442
+ api = get_hf_api(token=token)
443
+ api.cancel_job(job_id=job_id, namespace=namespace)
444
+
445
+
446
+ uv_app = typer_factory(help="Run UV scripts (Python with inline dependencies) on HF infrastructure")
447
+ jobs_cli.add_typer(uv_app, name="uv")
448
+
449
+
450
+ @uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
451
+ def jobs_uv_run(
452
+ script: ScriptArg,
453
+ script_args: ScriptArgsArg = None,
454
+ image: ImageOpt = None,
455
+ repo: RepoOpt = None,
456
+ flavor: FlavorOpt = None,
457
+ env: EnvOpt = None,
458
+ secrets: SecretsOpt = None,
459
+ env_file: EnvFileOpt = None,
460
+ secrets_file: SecretsFileOpt = None,
461
+ timeout: TimeoutOpt = None,
462
+ detach: DetachOpt = False,
463
+ namespace: NamespaceOpt = None,
464
+ token: TokenOpt = None,
465
+ with_: WithOpt = None,
466
+ python: PythonOpt = None,
467
+ ) -> None:
468
+ env_map: dict[str, Optional[str]] = {}
469
+ if env_file:
470
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
471
+ for env_value in env or []:
472
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
473
+ secrets_map: dict[str, Optional[str]] = {}
474
+ extended_environ = _get_extended_environ()
475
+ if secrets_file:
476
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
477
+ for secret in secrets or []:
478
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
479
+
480
+ api = get_hf_api(token=token)
481
+ job = api.run_uv_job(
482
+ script=script,
483
+ script_args=script_args or [],
484
+ dependencies=with_,
485
+ python=python,
486
+ image=image,
487
+ env=env_map,
488
+ secrets=secrets_map,
489
+ flavor=flavor, # type: ignore[arg-type]
490
+ timeout=timeout,
491
+ namespace=namespace,
492
+ _repo=repo,
493
+ )
494
+ # Always print the job ID to the user
495
+ print(f"Job started with ID: {job.id}")
496
+ print(f"View at: {job.url}")
497
+ if detach:
498
+ return
499
+ # Now let's stream the logs
500
+ for log in api.fetch_job_logs(job_id=job.id):
501
+ print(log)
502
+
503
+
504
+ scheduled_app = typer_factory(help="Create and manage scheduled Jobs on the Hub.")
505
+ jobs_cli.add_typer(scheduled_app, name="scheduled")
506
+
507
+
508
+ @scheduled_app.command("run", help="Schedule a Job")
509
+ def scheduled_run(
510
+ schedule: ScheduleArg,
511
+ image: ImageArg,
512
+ command: CommandArg,
513
+ suspend: SuspendOpt = None,
514
+ concurrency: ConcurrencyOpt = None,
515
+ env: EnvOpt = None,
516
+ secrets: SecretsOpt = None,
517
+ env_file: EnvFileOpt = None,
518
+ secrets_file: SecretsFileOpt = None,
519
+ flavor: FlavorOpt = None,
520
+ timeout: TimeoutOpt = None,
521
+ namespace: NamespaceOpt = None,
522
+ token: TokenOpt = None,
523
+ ) -> None:
524
+ env_map: dict[str, Optional[str]] = {}
525
+ if env_file:
526
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
527
+ for env_value in env or []:
528
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
529
+ secrets_map: dict[str, Optional[str]] = {}
530
+ extended_environ = _get_extended_environ()
531
+ if secrets_file:
532
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
533
+ for secret in secrets or []:
534
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
535
+
536
+ api = get_hf_api(token=token)
537
+ scheduled_job = api.create_scheduled_job(
538
+ image=image,
539
+ command=command,
540
+ schedule=schedule,
541
+ suspend=suspend,
542
+ concurrency=concurrency,
543
+ env=env_map,
544
+ secrets=secrets_map,
545
+ flavor=flavor,
546
+ timeout=timeout,
547
+ namespace=namespace,
548
+ )
549
+ print(f"Scheduled Job created with ID: {scheduled_job.id}")
550
+
551
+
552
+ @scheduled_app.command("ps", help="List scheduled Jobs")
553
+ def scheduled_ps(
554
+ all: Annotated[
555
+ bool,
556
+ typer.Option(
676
557
  "-a",
677
558
  "--all",
678
- action="store_true",
679
559
  help="Show all scheduled Jobs (default hides suspended)",
680
- )
681
- run_parser.add_argument(
682
- "--namespace",
683
- type=str,
684
- help="The namespace from where it lists the jobs. Defaults to the current user's namespace.",
685
- )
686
- run_parser.add_argument(
687
- "--token",
688
- type=str,
689
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
690
- )
691
- # Add Docker-style filtering argument
692
- run_parser.add_argument(
560
+ ),
561
+ ] = False,
562
+ namespace: NamespaceOpt = None,
563
+ token: TokenOpt = None,
564
+ filter: Annotated[
565
+ Optional[list[str]],
566
+ typer.Option(
693
567
  "-f",
694
568
  "--filter",
695
- action="append",
696
- default=[],
697
569
  help="Filter output based on conditions provided (format: key=value)",
698
- )
699
- # Add option to format output
700
- run_parser.add_argument(
570
+ ),
571
+ ] = None,
572
+ format: Annotated[
573
+ Optional[str],
574
+ typer.Option(
701
575
  "--format",
702
- type=str,
703
576
  help="Format output using a custom template",
704
- )
705
- run_parser.set_defaults(func=ScheduledPsCommand)
706
-
707
- def __init__(self, args: Namespace) -> None:
708
- self.all: bool = args.all
709
- self.namespace: Optional[str] = args.namespace
710
- self.token: Optional[str] = args.token
711
- self.format: Optional[str] = args.format
712
- self.filters: dict[str, str] = {}
713
-
714
- # Parse filter arguments (key=value pairs)
715
- for f in args.filter:
577
+ ),
578
+ ] = None,
579
+ ) -> None:
580
+ try:
581
+ api = get_hf_api(token=token)
582
+ scheduled_jobs = api.list_scheduled_jobs(namespace=namespace)
583
+ table_headers = ["ID", "SCHEDULE", "IMAGE/SPACE", "COMMAND", "LAST RUN", "NEXT RUN", "SUSPEND"]
584
+ rows: list[list[Union[str, int]]] = []
585
+ filters: dict[str, str] = {}
586
+ for f in filter or []:
716
587
  if "=" in f:
717
588
  key, value = f.split("=", 1)
718
- self.filters[key.lower()] = value
589
+ filters[key.lower()] = value
719
590
  else:
720
591
  print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
721
592
 
722
- def run(self) -> None:
723
- """
724
- Fetch and display scheduked job information for the current user.
725
- Uses Docker-style filtering with -f/--filter flag and key=value pairs.
726
- """
727
- try:
728
- api = HfApi(token=self.token)
729
-
730
- # Fetch jobs data
731
- scheduled_jobs = api.list_scheduled_jobs(namespace=self.namespace)
732
-
733
- # Define table headers
734
- table_headers = [
735
- "ID",
736
- "SCHEDULE",
737
- "IMAGE/SPACE",
738
- "COMMAND",
739
- "LAST RUN",
740
- "NEXT RUN",
741
- "SUSPEND",
742
- ]
743
-
744
- # Process jobs data
745
- rows = []
746
-
747
- for scheduled_job in scheduled_jobs:
748
- # Extract job data for filtering
749
- suspend = scheduled_job.suspend
593
+ for scheduled_job in scheduled_jobs:
594
+ suspend = scheduled_job.suspend or False
595
+ if not all and suspend:
596
+ continue
597
+ sj_id = scheduled_job.id
598
+ schedule = scheduled_job.schedule or "N/A"
599
+ image_or_space = scheduled_job.job_spec.docker_image or "N/A"
600
+ cmd = scheduled_job.job_spec.command or []
601
+ command_str = " ".join(cmd) if cmd else "N/A"
602
+ last_job_at = (
603
+ scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
604
+ if scheduled_job.status.last_job
605
+ else "N/A"
606
+ )
607
+ next_job_run_at = (
608
+ scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
609
+ if scheduled_job.status.next_job_run_at
610
+ else "N/A"
611
+ )
612
+ props = {"id": sj_id, "image": image_or_space, "suspend": str(suspend), "command": command_str}
613
+ if not _matches_filters(props, filters):
614
+ continue
615
+ rows.append([sj_id, schedule, image_or_space, command_str, last_job_at, next_job_run_at, suspend])
616
+
617
+ if not rows:
618
+ filters_msg = (
619
+ f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
620
+ )
621
+ print(f"No scheduled jobs found{filters_msg}")
622
+ return
623
+ _print_output(rows, table_headers, format)
624
+
625
+ except HfHubHTTPError as e:
626
+ print(f"Error fetching scheduled jobs data: {e}")
627
+ except (KeyError, ValueError, TypeError) as e:
628
+ print(f"Error processing scheduled jobs data: {e}")
629
+ except Exception as e:
630
+ print(f"Unexpected error - {type(e).__name__}: {e}")
631
+
632
+
633
+ @scheduled_app.command("inspect", help="Display detailed information on one or more scheduled Jobs")
634
+ def scheduled_inspect(
635
+ scheduled_job_ids: Annotated[
636
+ list[str],
637
+ typer.Argument(
638
+ help="The scheduled jobs to inspect",
639
+ ),
640
+ ],
641
+ namespace: NamespaceOpt = None,
642
+ token: TokenOpt = None,
643
+ ) -> None:
644
+ api = get_hf_api(token=token)
645
+ scheduled_jobs = [
646
+ api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
647
+ for scheduled_job_id in scheduled_job_ids
648
+ ]
649
+ print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
650
+
651
+
652
+ @scheduled_app.command("delete", help="Delete a scheduled Job")
653
+ def scheduled_delete(
654
+ scheduled_job_id: ScheduledJobIdArg,
655
+ namespace: NamespaceOpt = None,
656
+ token: TokenOpt = None,
657
+ ) -> None:
658
+ api = get_hf_api(token=token)
659
+ api.delete_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
660
+
661
+
662
+ @scheduled_app.command("suspend", help="Suspend (pause) a scheduled Job")
663
+ def scheduled_suspend(
664
+ scheduled_job_id: ScheduledJobIdArg,
665
+ namespace: NamespaceOpt = None,
666
+ token: TokenOpt = None,
667
+ ) -> None:
668
+ api = get_hf_api(token=token)
669
+ api.suspend_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
670
+
671
+
672
+ @scheduled_app.command("resume", help="Resume (unpause) a scheduled Job")
673
+ def scheduled_resume(
674
+ scheduled_job_id: ScheduledJobIdArg,
675
+ namespace: NamespaceOpt = None,
676
+ token: TokenOpt = None,
677
+ ) -> None:
678
+ api = get_hf_api(token=token)
679
+ api.resume_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
680
+
681
+
682
+ scheduled_uv_app = typer_factory(help="Schedule UV scripts on HF infrastructure")
683
+ scheduled_app.add_typer(scheduled_uv_app, name="uv")
684
+
685
+
686
+ @scheduled_uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
687
+ def scheduled_uv_run(
688
+ schedule: ScheduleArg,
689
+ script: ScriptArg,
690
+ script_args: ScriptArgsArg = None,
691
+ suspend: SuspendOpt = None,
692
+ concurrency: ConcurrencyOpt = None,
693
+ image: ImageOpt = None,
694
+ repo: RepoOpt = None,
695
+ flavor: FlavorOpt = None,
696
+ env: EnvOpt = None,
697
+ secrets: SecretsOpt = None,
698
+ env_file: EnvFileOpt = None,
699
+ secrets_file: SecretsFileOpt = None,
700
+ timeout: TimeoutOpt = None,
701
+ namespace: NamespaceOpt = None,
702
+ token: TokenOpt = None,
703
+ with_: WithOpt = None,
704
+ python: PythonOpt = None,
705
+ ) -> None:
706
+ env_map: dict[str, Optional[str]] = {}
707
+ if env_file:
708
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
709
+ for env_value in env or []:
710
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
711
+ secrets_map: dict[str, Optional[str]] = {}
712
+ extended_environ = _get_extended_environ()
713
+ if secrets_file:
714
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
715
+ for secret in secrets or []:
716
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
717
+
718
+ api = get_hf_api(token=token)
719
+ job = api.create_scheduled_uv_job(
720
+ script=script,
721
+ script_args=script_args or [],
722
+ schedule=schedule,
723
+ suspend=suspend,
724
+ concurrency=concurrency,
725
+ dependencies=with_,
726
+ python=python,
727
+ image=image,
728
+ env=env_map,
729
+ secrets=secrets_map,
730
+ flavor=flavor, # type: ignore[arg-type]
731
+ timeout=timeout,
732
+ namespace=namespace,
733
+ _repo=repo,
734
+ )
735
+ print(f"Scheduled Job created with ID: {job.id}")
736
+
737
+
738
+ ### UTILS
750
739
 
751
- # Skip job if not all jobs should be shown and status doesn't match criteria
752
- if not self.all and suspend:
753
- continue
754
-
755
- # Extract job ID
756
- scheduled_job_id = scheduled_job.id
757
-
758
- # Extract schedule
759
- schedule = scheduled_job.schedule
760
-
761
- # Extract image or space information
762
- image_or_space = scheduled_job.job_spec.docker_image or "N/A"
763
-
764
- # Extract and format command
765
- command = scheduled_job.job_spec.command or []
766
- command_str = " ".join(command) if command else "N/A"
767
-
768
- # Extract status
769
- last_job_at = (
770
- scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
771
- if scheduled_job.status.last_job
772
- else "N/A"
773
- )
774
- next_job_run_at = (
775
- scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
776
- if scheduled_job.status.next_job_run_at
777
- else "N/A"
778
- )
779
-
780
- # Create a dict with all job properties for filtering
781
- job_properties = {
782
- "id": scheduled_job_id,
783
- "image": image_or_space,
784
- "suspend": str(suspend),
785
- "command": command_str,
786
- }
787
-
788
- # Check if job matches all filters
789
- if not self._matches_filters(job_properties):
790
- continue
791
-
792
- # Create row
793
- rows.append(
794
- [
795
- scheduled_job_id,
796
- schedule,
797
- image_or_space,
798
- command_str,
799
- last_job_at,
800
- next_job_run_at,
801
- suspend,
802
- ]
803
- )
804
-
805
- # Handle empty results
806
- if not rows:
807
- filters_msg = ""
808
- if self.filters:
809
- filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
810
-
811
- print(f"No scheduled jobs found{filters_msg}")
812
- return
813
-
814
- # Apply custom format if provided or use default tabular format
815
- self._print_output(rows, table_headers)
816
-
817
- except HfHubHTTPError as e:
818
- print(f"Error fetching scheduled jobs data: {e}")
819
- except (KeyError, ValueError, TypeError) as e:
820
- print(f"Error processing scheduled jobs data: {e}")
821
- except Exception as e:
822
- print(f"Unexpected error - {type(e).__name__}: {e}")
823
-
824
- def _matches_filters(self, job_properties: dict[str, str]) -> bool:
825
- """Check if scheduled job matches all specified filters."""
826
- for key, pattern in self.filters.items():
827
- # Check if property exists
828
- if key not in job_properties:
829
- return False
830
740
 
831
- # Support pattern matching with wildcards
832
- if "*" in pattern or "?" in pattern:
833
- # Convert glob pattern to regex
834
- regex_pattern = pattern.replace("*", ".*").replace("?", ".")
835
- if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
836
- return False
837
- # Simple substring matching
838
- elif pattern.lower() not in job_properties[key].lower():
839
- return False
741
+ def _tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
742
+ """
743
+ Inspired by:
840
744
 
841
- return True
842
-
843
- def _print_output(self, rows, headers):
844
- """Print output according to the chosen format."""
845
- if self.format:
846
- # Custom template formatting (simplified)
847
- template = self.format
848
- for row in rows:
849
- line = template
850
- for i, field in enumerate(
851
- ["id", "schedule", "image", "command", "last_job_at", "next_job_run_at", "suspend"]
852
- ):
853
- placeholder = f"{{{{.{field}}}}}"
854
- if placeholder in line:
855
- line = line.replace(placeholder, str(row[i]))
856
- print(line)
857
- else:
858
- # Default tabular format
859
- print(
860
- _tabulate(
861
- rows,
862
- headers=headers,
863
- )
864
- )
745
+ - stackoverflow.com/a/8356620/593036
746
+ - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
747
+ """
748
+ col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
749
+ terminal_width = max(os.get_terminal_size().columns, len(headers) * 12)
750
+ while len(headers) + sum(col_widths) > terminal_width:
751
+ col_to_minimize = col_widths.index(max(col_widths))
752
+ col_widths[col_to_minimize] //= 2
753
+ if len(headers) + sum(col_widths) <= terminal_width:
754
+ col_widths[col_to_minimize] = terminal_width - sum(col_widths) - len(headers) + col_widths[col_to_minimize]
755
+ row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
756
+ lines = []
757
+ lines.append(row_format.format(*headers))
758
+ lines.append(row_format.format(*["-" * w for w in col_widths]))
759
+ for row in rows:
760
+ row_format_args = [
761
+ str(x)[: col_width - 3] + "..." if len(str(x)) > col_width else str(x)
762
+ for x, col_width in zip(row, col_widths)
763
+ ]
764
+ lines.append(row_format.format(*row_format_args))
765
+ return "\n".join(lines)
865
766
 
866
767
 
867
- class ScheduledInspectCommand(BaseHuggingfaceCLICommand):
868
- @staticmethod
869
- def register_subcommand(parser: _SubParsersAction) -> None:
870
- run_parser = parser.add_parser("inspect", help="Display detailed information on one or more scheduled Jobs")
871
- run_parser.add_argument(
872
- "--namespace",
873
- type=str,
874
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
875
- )
876
- run_parser.add_argument(
877
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
878
- )
879
- run_parser.add_argument("scheduled_job_ids", nargs="...", help="The scheduled jobs to inspect")
880
- run_parser.set_defaults(func=ScheduledInspectCommand)
881
-
882
- def __init__(self, args: Namespace) -> None:
883
- self.namespace: Optional[str] = args.namespace
884
- self.token: Optional[str] = args.token
885
- self.scheduled_job_ids: list[str] = args.scheduled_job_ids
886
-
887
- def run(self) -> None:
888
- api = HfApi(token=self.token)
889
- scheduled_jobs = [
890
- api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=self.namespace)
891
- for scheduled_job_id in self.scheduled_job_ids
892
- ]
893
- print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
894
-
895
-
896
- class ScheduledDeleteCommand(BaseHuggingfaceCLICommand):
897
- @staticmethod
898
- def register_subcommand(parser: _SubParsersAction) -> None:
899
- run_parser = parser.add_parser("delete", help="Delete a scheduled Job")
900
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
901
- run_parser.add_argument(
902
- "--namespace",
903
- type=str,
904
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
905
- )
906
- run_parser.add_argument(
907
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
908
- )
909
- run_parser.set_defaults(func=ScheduledDeleteCommand)
910
-
911
- def __init__(self, args: Namespace) -> None:
912
- self.scheduled_job_id: str = args.scheduled_job_id
913
- self.namespace = args.namespace
914
- self.token: Optional[str] = args.token
915
-
916
- def run(self) -> None:
917
- api = HfApi(token=self.token)
918
- api.delete_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
919
-
920
-
921
- class ScheduledSuspendCommand(BaseHuggingfaceCLICommand):
922
- @staticmethod
923
- def register_subcommand(parser: _SubParsersAction) -> None:
924
- run_parser = parser.add_parser("suspend", help="Suspend (pause) a scheduled Job")
925
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
926
- run_parser.add_argument(
927
- "--namespace",
928
- type=str,
929
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
930
- )
931
- run_parser.add_argument(
932
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
933
- )
934
- run_parser.set_defaults(func=ScheduledSuspendCommand)
935
-
936
- def __init__(self, args: Namespace) -> None:
937
- self.scheduled_job_id: str = args.scheduled_job_id
938
- self.namespace = args.namespace
939
- self.token: Optional[str] = args.token
940
-
941
- def run(self) -> None:
942
- api = HfApi(token=self.token)
943
- api.suspend_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
944
-
945
-
946
- class ScheduledResumeCommand(BaseHuggingfaceCLICommand):
947
- @staticmethod
948
- def register_subcommand(parser: _SubParsersAction) -> None:
949
- run_parser = parser.add_parser("resume", help="Resume (unpause) a scheduled Job")
950
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
951
- run_parser.add_argument(
952
- "--namespace",
953
- type=str,
954
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
955
- )
956
- run_parser.add_argument(
957
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
958
- )
959
- run_parser.set_defaults(func=ScheduledResumeCommand)
960
-
961
- def __init__(self, args: Namespace) -> None:
962
- self.scheduled_job_id: str = args.scheduled_job_id
963
- self.namespace = args.namespace
964
- self.token: Optional[str] = args.token
965
-
966
- def run(self) -> None:
967
- api = HfApi(token=self.token)
968
- api.resume_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
969
-
970
-
971
- class ScheduledUvCommand(BaseHuggingfaceCLICommand):
972
- """Schedule UV scripts on Hugging Face infrastructure."""
973
-
974
- @staticmethod
975
- def register_subcommand(parser):
976
- """Register UV run subcommand."""
977
- uv_parser = parser.add_parser(
978
- "uv",
979
- help="Schedule UV scripts (Python with inline dependencies) on HF infrastructure",
980
- )
981
-
982
- subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
983
-
984
- # Run command only
985
- run_parser = subparsers.add_parser(
986
- "run",
987
- help="Run a UV script (local file or URL) on HF infrastructure",
988
- )
989
- run_parser.add_argument(
990
- "schedule",
991
- type=str,
992
- help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
993
- )
994
- run_parser.add_argument("script", help="UV script to run (local file or URL)")
995
- run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
996
- run_parser.add_argument(
997
- "--suspend",
998
- action="store_true",
999
- help="Suspend (pause) the scheduled Job",
1000
- default=None,
1001
- )
1002
- run_parser.add_argument(
1003
- "--concurrency",
1004
- action="store_true",
1005
- help="Allow multiple instances of this Job to run concurrently",
1006
- default=None,
1007
- )
1008
- run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
1009
- run_parser.add_argument(
1010
- "--repo",
1011
- help="Repository name for the script (creates ephemeral if not specified)",
1012
- )
1013
- run_parser.add_argument(
1014
- "--flavor",
1015
- type=str,
1016
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
1017
- )
1018
- run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
1019
- run_parser.add_argument(
1020
- "-s",
1021
- "--secrets",
1022
- action="append",
1023
- help=(
1024
- "Set secret environment variables. E.g. --secrets SECRET=value "
1025
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
1026
- ),
1027
- )
1028
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
1029
- run_parser.add_argument(
1030
- "--secrets-file",
1031
- type=str,
1032
- help="Read in a file of secret environment variables.",
1033
- )
1034
- run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
1035
- run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
1036
- run_parser.add_argument(
1037
- "--namespace",
1038
- type=str,
1039
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
1040
- )
1041
- run_parser.add_argument("--token", type=str, help="HF token")
1042
- # UV options
1043
- run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
1044
- run_parser.add_argument(
1045
- "-p", "--python", type=str, help="The Python interpreter to use for the run environment"
1046
- )
1047
- run_parser.set_defaults(func=ScheduledUvCommand)
1048
-
1049
- def __init__(self, args: Namespace) -> None:
1050
- """Initialize the command with parsed arguments."""
1051
- self.schedule: str = args.schedule
1052
- self.script = args.script
1053
- self.script_args = args.script_args
1054
- self.suspend: Optional[bool] = args.suspend
1055
- self.concurrency: Optional[bool] = args.concurrency
1056
- self.dependencies = args.with_
1057
- self.python = args.python
1058
- self.image = args.image
1059
- self.env: dict[str, Optional[str]] = {}
1060
- if args.env_file:
1061
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
1062
- for env_value in args.env or []:
1063
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
1064
- self.secrets: dict[str, Optional[str]] = {}
1065
- extended_environ = _get_extended_environ()
1066
- if args.secrets_file:
1067
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
1068
- for secret in args.secrets or []:
1069
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
1070
- self.flavor: Optional[SpaceHardware] = args.flavor
1071
- self.timeout: Optional[str] = args.timeout
1072
- self.detach: bool = args.detach
1073
- self.namespace: Optional[str] = args.namespace
1074
- self.token: Optional[str] = args.token
1075
- self._repo = args.repo
1076
-
1077
- def run(self) -> None:
1078
- """Schedule UV command."""
1079
- logging.set_verbosity(logging.INFO)
1080
- api = HfApi(token=self.token)
1081
- job = api.create_scheduled_uv_job(
1082
- script=self.script,
1083
- script_args=self.script_args,
1084
- schedule=self.schedule,
1085
- suspend=self.suspend,
1086
- concurrency=self.concurrency,
1087
- dependencies=self.dependencies,
1088
- python=self.python,
1089
- image=self.image,
1090
- env=self.env,
1091
- secrets=self.secrets,
1092
- flavor=self.flavor,
1093
- timeout=self.timeout,
1094
- namespace=self.namespace,
1095
- _repo=self._repo,
1096
- )
1097
-
1098
- # Always print the job ID to the user
1099
- print(f"Scheduled Job created with ID: {job.id}")
768
+ def _get_extended_environ() -> Dict[str, str]:
769
+ extended_environ = os.environ.copy()
770
+ if (token := get_token()) is not None:
771
+ extended_environ["HF_TOKEN"] = token
772
+ return extended_environ