huggingface-hub 0.35.1__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (127) hide show
  1. huggingface_hub/__init__.py +28 -45
  2. huggingface_hub/_commit_api.py +28 -28
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +20 -20
  6. huggingface_hub/_login.py +13 -39
  7. huggingface_hub/_oauth.py +8 -8
  8. huggingface_hub/_snapshot_download.py +14 -28
  9. huggingface_hub/_space_api.py +4 -4
  10. huggingface_hub/_tensorboard_logger.py +5 -5
  11. huggingface_hub/_upload_large_folder.py +15 -15
  12. huggingface_hub/_webhooks_payload.py +3 -3
  13. huggingface_hub/_webhooks_server.py +2 -2
  14. huggingface_hub/cli/__init__.py +0 -14
  15. huggingface_hub/cli/_cli_utils.py +80 -3
  16. huggingface_hub/cli/auth.py +104 -150
  17. huggingface_hub/cli/cache.py +102 -126
  18. huggingface_hub/cli/download.py +93 -110
  19. huggingface_hub/cli/hf.py +37 -41
  20. huggingface_hub/cli/jobs.py +689 -1017
  21. huggingface_hub/cli/lfs.py +120 -143
  22. huggingface_hub/cli/repo.py +158 -216
  23. huggingface_hub/cli/repo_files.py +50 -84
  24. huggingface_hub/cli/system.py +6 -25
  25. huggingface_hub/cli/upload.py +198 -212
  26. huggingface_hub/cli/upload_large_folder.py +90 -105
  27. huggingface_hub/commands/_cli_utils.py +2 -2
  28. huggingface_hub/commands/delete_cache.py +11 -11
  29. huggingface_hub/commands/download.py +4 -13
  30. huggingface_hub/commands/lfs.py +4 -4
  31. huggingface_hub/commands/repo_files.py +2 -2
  32. huggingface_hub/commands/tag.py +1 -3
  33. huggingface_hub/commands/upload.py +4 -4
  34. huggingface_hub/commands/upload_large_folder.py +3 -3
  35. huggingface_hub/commands/user.py +4 -5
  36. huggingface_hub/community.py +5 -5
  37. huggingface_hub/constants.py +3 -41
  38. huggingface_hub/dataclasses.py +16 -22
  39. huggingface_hub/errors.py +43 -30
  40. huggingface_hub/fastai_utils.py +8 -9
  41. huggingface_hub/file_download.py +154 -253
  42. huggingface_hub/hf_api.py +329 -558
  43. huggingface_hub/hf_file_system.py +104 -62
  44. huggingface_hub/hub_mixin.py +32 -54
  45. huggingface_hub/inference/_client.py +178 -163
  46. huggingface_hub/inference/_common.py +38 -54
  47. huggingface_hub/inference/_generated/_async_client.py +219 -259
  48. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  49. huggingface_hub/inference/_generated/types/base.py +10 -7
  50. huggingface_hub/inference/_generated/types/chat_completion.py +16 -16
  51. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  52. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  53. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  54. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  55. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  56. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  57. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  58. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  59. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  60. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  61. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  62. huggingface_hub/inference/_generated/types/translation.py +2 -2
  63. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  64. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  65. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  66. huggingface_hub/inference/_mcp/agent.py +3 -3
  67. huggingface_hub/inference/_mcp/constants.py +1 -2
  68. huggingface_hub/inference/_mcp/mcp_client.py +33 -22
  69. huggingface_hub/inference/_mcp/types.py +10 -10
  70. huggingface_hub/inference/_mcp/utils.py +4 -4
  71. huggingface_hub/inference/_providers/__init__.py +2 -13
  72. huggingface_hub/inference/_providers/_common.py +24 -25
  73. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  74. huggingface_hub/inference/_providers/cohere.py +3 -3
  75. huggingface_hub/inference/_providers/fal_ai.py +25 -25
  76. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  77. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  78. huggingface_hub/inference/_providers/hf_inference.py +13 -13
  79. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  80. huggingface_hub/inference/_providers/nebius.py +10 -10
  81. huggingface_hub/inference/_providers/novita.py +5 -5
  82. huggingface_hub/inference/_providers/nscale.py +4 -4
  83. huggingface_hub/inference/_providers/replicate.py +15 -15
  84. huggingface_hub/inference/_providers/sambanova.py +6 -6
  85. huggingface_hub/inference/_providers/together.py +7 -7
  86. huggingface_hub/lfs.py +24 -33
  87. huggingface_hub/repocard.py +16 -17
  88. huggingface_hub/repocard_data.py +56 -56
  89. huggingface_hub/serialization/__init__.py +0 -1
  90. huggingface_hub/serialization/_base.py +9 -9
  91. huggingface_hub/serialization/_dduf.py +7 -7
  92. huggingface_hub/serialization/_torch.py +28 -28
  93. huggingface_hub/utils/__init__.py +10 -4
  94. huggingface_hub/utils/_auth.py +5 -5
  95. huggingface_hub/utils/_cache_manager.py +31 -31
  96. huggingface_hub/utils/_deprecation.py +1 -1
  97. huggingface_hub/utils/_dotenv.py +3 -3
  98. huggingface_hub/utils/_fixes.py +0 -10
  99. huggingface_hub/utils/_git_credential.py +3 -3
  100. huggingface_hub/utils/_headers.py +7 -29
  101. huggingface_hub/utils/_http.py +369 -209
  102. huggingface_hub/utils/_pagination.py +4 -4
  103. huggingface_hub/utils/_paths.py +5 -5
  104. huggingface_hub/utils/_runtime.py +15 -13
  105. huggingface_hub/utils/_safetensors.py +21 -21
  106. huggingface_hub/utils/_subprocess.py +9 -9
  107. huggingface_hub/utils/_telemetry.py +3 -3
  108. huggingface_hub/utils/_typing.py +3 -3
  109. huggingface_hub/utils/_validators.py +53 -72
  110. huggingface_hub/utils/_xet.py +16 -16
  111. huggingface_hub/utils/_xet_progress_reporting.py +1 -1
  112. huggingface_hub/utils/insecure_hashlib.py +3 -9
  113. huggingface_hub/utils/tqdm.py +3 -3
  114. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/METADATA +17 -26
  115. huggingface_hub-1.0.0rc1.dist-info/RECORD +161 -0
  116. huggingface_hub/inference/_providers/publicai.py +0 -6
  117. huggingface_hub/inference/_providers/scaleway.py +0 -28
  118. huggingface_hub/inference_api.py +0 -217
  119. huggingface_hub/keras_mixin.py +0 -500
  120. huggingface_hub/repository.py +0 -1477
  121. huggingface_hub/serialization/_tensorflow.py +0 -95
  122. huggingface_hub/utils/_hf_folder.py +0 -68
  123. huggingface_hub-0.35.1.dist-info/RECORD +0 -168
  124. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/LICENSE +0 -0
  125. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/WHEEL +0 -0
  126. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/entry_points.txt +0 -0
  127. {huggingface_hub-0.35.1.dist-info → huggingface_hub-1.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -28,1073 +28,745 @@ Usage:
28
28
 
29
29
  # Cancel a running job
30
30
  hf jobs cancel <job-id>
31
+
32
+ # Run a UV script
33
+ hf jobs uv run <script>
34
+
35
+ # Schedule a job
36
+ hf jobs scheduled run <schedule> <image> <command>
37
+
38
+ # List scheduled jobs
39
+ hf jobs scheduled ps [-a] [-f key=value] [--format TEMPLATE]
40
+
41
+ # Inspect a scheduled job
42
+ hf jobs scheduled inspect <scheduled_job_id>
43
+
44
+ # Suspend a scheduled job
45
+ hf jobs scheduled suspend <scheduled_job_id>
46
+
47
+ # Resume a scheduled job
48
+ hf jobs scheduled resume <scheduled_job_id>
49
+
50
+ # Delete a scheduled job
51
+ hf jobs scheduled delete <scheduled_job_id>
52
+
31
53
  """
32
54
 
33
55
  import json
34
56
  import os
35
57
  import re
36
- from argparse import Namespace, _SubParsersAction
37
58
  from dataclasses import asdict
38
59
  from pathlib import Path
39
- from typing import Dict, List, Optional, Union
60
+ from typing import Annotated, Dict, Optional, Union
40
61
 
41
- import requests
62
+ import typer
42
63
 
43
- from huggingface_hub import HfApi, SpaceHardware, get_token
64
+ from huggingface_hub import SpaceHardware, get_token
65
+ from huggingface_hub.errors import HfHubHTTPError
44
66
  from huggingface_hub.utils import logging
45
67
  from huggingface_hub.utils._dotenv import load_dotenv
46
68
 
47
- from . import BaseHuggingfaceCLICommand
69
+ from ._cli_utils import TokenOpt, get_hf_api, typer_factory
48
70
 
49
71
 
50
72
  logger = logging.get_logger(__name__)
51
73
 
52
74
  SUGGESTED_FLAVORS = [item.value for item in SpaceHardware if item.value != "zero-a10g"]
53
75
 
54
-
55
- class JobsCommands(BaseHuggingfaceCLICommand):
56
- @staticmethod
57
- def register_subcommand(parser: _SubParsersAction):
58
- jobs_parser = parser.add_parser("jobs", help="Run and manage Jobs on the Hub.")
59
- jobs_subparsers = jobs_parser.add_subparsers(help="huggingface.co jobs related commands")
60
-
61
- # Show help if no subcommand is provided
62
- jobs_parser.set_defaults(func=lambda args: jobs_parser.print_help())
63
-
64
- # Register commands
65
- InspectCommand.register_subcommand(jobs_subparsers)
66
- LogsCommand.register_subcommand(jobs_subparsers)
67
- PsCommand.register_subcommand(jobs_subparsers)
68
- RunCommand.register_subcommand(jobs_subparsers)
69
- CancelCommand.register_subcommand(jobs_subparsers)
70
- UvCommand.register_subcommand(jobs_subparsers)
71
- ScheduledJobsCommands.register_subcommand(jobs_subparsers)
72
-
73
-
74
- class RunCommand(BaseHuggingfaceCLICommand):
75
- @staticmethod
76
- def register_subcommand(parser: _SubParsersAction) -> None:
77
- run_parser = parser.add_parser("run", help="Run a Job")
78
- run_parser.add_argument("image", type=str, help="The Docker image to use.")
79
- run_parser.add_argument("-e", "--env", action="append", help="Set environment variables. E.g. --env ENV=value")
80
- run_parser.add_argument(
81
- "-s",
82
- "--secrets",
83
- action="append",
84
- help=(
85
- "Set secret environment variables. E.g. --secrets SECRET=value "
86
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
87
- ),
88
- )
89
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
90
- run_parser.add_argument("--secrets-file", type=str, help="Read in a file of secret environment variables.")
91
- run_parser.add_argument(
92
- "--flavor",
93
- type=str,
94
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
95
- )
96
- run_parser.add_argument(
97
- "--timeout",
98
- type=str,
99
- help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
100
- )
101
- run_parser.add_argument(
102
- "-d",
103
- "--detach",
104
- action="store_true",
105
- help="Run the Job in the background and print the Job ID.",
106
- )
107
- run_parser.add_argument(
108
- "--namespace",
109
- type=str,
110
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
111
- )
112
- run_parser.add_argument(
113
- "--token",
114
- type=str,
115
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
116
- )
117
- run_parser.add_argument("command", nargs="...", help="The command to run.")
118
- run_parser.set_defaults(func=RunCommand)
119
-
120
- def __init__(self, args: Namespace) -> None:
121
- self.image: str = args.image
122
- self.command: List[str] = args.command
123
- self.env: dict[str, Optional[str]] = {}
124
- if args.env_file:
125
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
126
- for env_value in args.env or []:
127
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
128
- self.secrets: dict[str, Optional[str]] = {}
129
- extended_environ = _get_extended_environ()
130
- if args.secrets_file:
131
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
132
- for secret in args.secrets or []:
133
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
134
- self.flavor: Optional[SpaceHardware] = args.flavor
135
- self.timeout: Optional[str] = args.timeout
136
- self.detach: bool = args.detach
137
- self.namespace: Optional[str] = args.namespace
138
- self.token: Optional[str] = args.token
139
-
140
- def run(self) -> None:
141
- api = HfApi(token=self.token)
142
- job = api.run_job(
143
- image=self.image,
144
- command=self.command,
145
- env=self.env,
146
- secrets=self.secrets,
147
- flavor=self.flavor,
148
- timeout=self.timeout,
149
- namespace=self.namespace,
150
- )
151
- # Always print the job ID to the user
152
- print(f"Job started with ID: {job.id}")
153
- print(f"View at: {job.url}")
154
-
155
- if self.detach:
156
- return
157
-
158
- # Now let's stream the logs
159
- for log in api.fetch_job_logs(job_id=job.id):
160
- print(log)
161
-
162
-
163
- class LogsCommand(BaseHuggingfaceCLICommand):
164
- @staticmethod
165
- def register_subcommand(parser: _SubParsersAction) -> None:
166
- run_parser = parser.add_parser("logs", help="Fetch the logs of a Job")
167
- run_parser.add_argument("job_id", type=str, help="Job ID")
168
- run_parser.add_argument(
169
- "--namespace",
170
- type=str,
171
- help="The namespace where the job is running. Defaults to the current user's namespace.",
172
- )
173
- run_parser.add_argument(
174
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
175
- )
176
- run_parser.set_defaults(func=LogsCommand)
177
-
178
- def __init__(self, args: Namespace) -> None:
179
- self.job_id: str = args.job_id
180
- self.namespace: Optional[str] = args.namespace
181
- self.token: Optional[str] = args.token
182
-
183
- def run(self) -> None:
184
- api = HfApi(token=self.token)
185
- for log in api.fetch_job_logs(job_id=self.job_id, namespace=self.namespace):
186
- print(log)
187
-
188
-
189
- def _tabulate(rows: List[List[Union[str, int]]], headers: List[str]) -> str:
190
- """
191
- Inspired by:
192
-
193
- - stackoverflow.com/a/8356620/593036
194
- - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
195
- """
196
- col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
197
- terminal_width = max(os.get_terminal_size().columns, len(headers) * 12)
198
- while len(headers) + sum(col_widths) > terminal_width:
199
- col_to_minimize = col_widths.index(max(col_widths))
200
- col_widths[col_to_minimize] //= 2
201
- if len(headers) + sum(col_widths) <= terminal_width:
202
- col_widths[col_to_minimize] = terminal_width - sum(col_widths) - len(headers) + col_widths[col_to_minimize]
203
- row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
204
- lines = []
205
- lines.append(row_format.format(*headers))
206
- lines.append(row_format.format(*["-" * w for w in col_widths]))
207
- for row in rows:
208
- row_format_args = [
209
- str(x)[: col_width - 3] + "..." if len(str(x)) > col_width else str(x)
210
- for x, col_width in zip(row, col_widths)
211
- ]
212
- lines.append(row_format.format(*row_format_args))
213
- return "\n".join(lines)
214
-
215
-
216
- class PsCommand(BaseHuggingfaceCLICommand):
217
- @staticmethod
218
- def register_subcommand(parser: _SubParsersAction) -> None:
219
- run_parser = parser.add_parser("ps", help="List Jobs")
220
- run_parser.add_argument(
76
+ # Common job-related options
77
+ ImageArg = Annotated[
78
+ str,
79
+ typer.Argument(
80
+ help="The Docker image to use.",
81
+ ),
82
+ ]
83
+
84
+ ImageOpt = Annotated[
85
+ Optional[str],
86
+ typer.Option(
87
+ help="Use a custom Docker image with `uv` installed.",
88
+ ),
89
+ ]
90
+
91
+ FlavorOpt = Annotated[
92
+ Optional[SpaceHardware],
93
+ typer.Option(
94
+ help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
95
+ ),
96
+ ]
97
+
98
+ EnvOpt = Annotated[
99
+ Optional[list[str]],
100
+ typer.Option(
101
+ "-e",
102
+ "--env",
103
+ help="Set environment variables. E.g. --env ENV=value",
104
+ ),
105
+ ]
106
+
107
+ SecretsOpt = Annotated[
108
+ Optional[list[str]],
109
+ typer.Option(
110
+ "-s",
111
+ "--secrets",
112
+ help="Set secret environment variables. E.g. --secrets SECRET=value or `--secrets HF_TOKEN` to pass your Hugging Face token.",
113
+ ),
114
+ ]
115
+
116
+ EnvFileOpt = Annotated[
117
+ Optional[str],
118
+ typer.Option(
119
+ "--env-file",
120
+ help="Read in a file of environment variables.",
121
+ ),
122
+ ]
123
+
124
+ SecretsFileOpt = Annotated[
125
+ Optional[str],
126
+ typer.Option(
127
+ help="Read in a file of secret environment variables.",
128
+ ),
129
+ ]
130
+
131
+ TimeoutOpt = Annotated[
132
+ Optional[str],
133
+ typer.Option(
134
+ help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
135
+ ),
136
+ ]
137
+
138
+ DetachOpt = Annotated[
139
+ bool,
140
+ typer.Option(
141
+ "-d",
142
+ "--detach",
143
+ help="Run the Job in the background and print the Job ID.",
144
+ ),
145
+ ]
146
+
147
+ NamespaceOpt = Annotated[
148
+ Optional[str],
149
+ typer.Option(
150
+ help="The namespace where the job will be running. Defaults to the current user's namespace.",
151
+ ),
152
+ ]
153
+
154
+ WithOpt = Annotated[
155
+ Optional[list[str]],
156
+ typer.Option(
157
+ "--with",
158
+ help="Run with the given packages installed",
159
+ ),
160
+ ]
161
+
162
+ PythonOpt = Annotated[
163
+ Optional[str],
164
+ typer.Option(
165
+ "-p",
166
+ "--python",
167
+ help="The Python interpreter to use for the run environment",
168
+ ),
169
+ ]
170
+
171
+ SuspendOpt = Annotated[
172
+ Optional[bool],
173
+ typer.Option(
174
+ help="Suspend (pause) the scheduled Job",
175
+ ),
176
+ ]
177
+
178
+ ConcurrencyOpt = Annotated[
179
+ Optional[bool],
180
+ typer.Option(
181
+ help="Allow multiple instances of this Job to run concurrently",
182
+ ),
183
+ ]
184
+
185
+ ScheduleArg = Annotated[
186
+ str,
187
+ typer.Argument(
188
+ help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
189
+ ),
190
+ ]
191
+
192
+ ScriptArg = Annotated[
193
+ str,
194
+ typer.Argument(
195
+ help="UV script to run (local file or URL)",
196
+ ),
197
+ ]
198
+
199
+ ScriptArgsArg = Annotated[
200
+ Optional[list[str]],
201
+ typer.Argument(
202
+ help="Arguments for the script",
203
+ ),
204
+ ]
205
+
206
+ CommandArg = Annotated[
207
+ list[str],
208
+ typer.Argument(
209
+ help="The command to run.",
210
+ ),
211
+ ]
212
+
213
+ JobIdArg = Annotated[
214
+ str,
215
+ typer.Argument(
216
+ help="Job ID",
217
+ ),
218
+ ]
219
+
220
+ ScheduledJobIdArg = Annotated[
221
+ str,
222
+ typer.Argument(
223
+ help="Scheduled Job ID",
224
+ ),
225
+ ]
226
+
227
+ RepoOpt = Annotated[
228
+ Optional[str],
229
+ typer.Option(
230
+ help="Repository name for the script (creates ephemeral if not specified)",
231
+ ),
232
+ ]
233
+
234
+
235
+ jobs_cli = typer_factory(help="Run and manage Jobs on the Hub.")
236
+
237
+
238
+ @jobs_cli.command("run", help="Run a Job")
239
+ def jobs_run(
240
+ image: ImageArg,
241
+ command: CommandArg,
242
+ env: EnvOpt = None,
243
+ secrets: SecretsOpt = None,
244
+ env_file: EnvFileOpt = None,
245
+ secrets_file: SecretsFileOpt = None,
246
+ flavor: FlavorOpt = None,
247
+ timeout: TimeoutOpt = None,
248
+ detach: DetachOpt = False,
249
+ namespace: NamespaceOpt = None,
250
+ token: TokenOpt = None,
251
+ ) -> None:
252
+ env_map: dict[str, Optional[str]] = {}
253
+ if env_file:
254
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
255
+ for env_value in env or []:
256
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
257
+
258
+ secrets_map: dict[str, Optional[str]] = {}
259
+ extended_environ = _get_extended_environ()
260
+ if secrets_file:
261
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
262
+ for secret in secrets or []:
263
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
264
+
265
+ api = get_hf_api(token=token)
266
+ job = api.run_job(
267
+ image=image,
268
+ command=command,
269
+ env=env_map,
270
+ secrets=secrets_map,
271
+ flavor=flavor,
272
+ timeout=timeout,
273
+ namespace=namespace,
274
+ )
275
+ # Always print the job ID to the user
276
+ print(f"Job started with ID: {job.id}")
277
+ print(f"View at: {job.url}")
278
+
279
+ if detach:
280
+ return
281
+ # Now let's stream the logs
282
+ for log in api.fetch_job_logs(job_id=job.id):
283
+ print(log)
284
+
285
+
286
+ @jobs_cli.command("logs", help="Fetch the logs of a Job")
287
+ def jobs_logs(
288
+ job_id: JobIdArg,
289
+ namespace: NamespaceOpt = None,
290
+ token: TokenOpt = None,
291
+ ) -> None:
292
+ api = get_hf_api(token=token)
293
+ for log in api.fetch_job_logs(job_id=job_id, namespace=namespace):
294
+ print(log)
295
+
296
+
297
+ def _matches_filters(job_properties: dict[str, str], filters: dict[str, str]) -> bool:
298
+ """Check if scheduled job matches all specified filters."""
299
+ for key, pattern in filters.items():
300
+ # Check if property exists
301
+ if key not in job_properties:
302
+ return False
303
+ # Support pattern matching with wildcards
304
+ if "*" in pattern or "?" in pattern:
305
+ # Convert glob pattern to regex
306
+ regex_pattern = pattern.replace("*", ".*").replace("?", ".")
307
+ if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
308
+ return False
309
+ # Simple substring matching
310
+ elif pattern.lower() not in job_properties[key].lower():
311
+ return False
312
+ return True
313
+
314
+
315
+ def _print_output(rows: list[list[Union[str, int]]], headers: list[str], fmt: Optional[str]) -> None:
316
+ """Print output according to the chosen format."""
317
+ if fmt:
318
+ # Use custom template if provided
319
+ template = fmt
320
+ for row in rows:
321
+ line = template
322
+ for i, field in enumerate(["id", "image", "command", "created", "status"]):
323
+ placeholder = f"{{{{.{field}}}}}"
324
+ if placeholder in line:
325
+ line = line.replace(placeholder, str(row[i]))
326
+ print(line)
327
+ else:
328
+ # Default tabular format
329
+ print(_tabulate(rows, headers=headers))
330
+
331
+
332
+ @jobs_cli.command("ps", help="List Jobs")
333
+ def jobs_ps(
334
+ all: Annotated[
335
+ bool,
336
+ typer.Option(
221
337
  "-a",
222
338
  "--all",
223
- action="store_true",
224
339
  help="Show all Jobs (default shows just running)",
225
- )
226
- run_parser.add_argument(
227
- "--namespace",
228
- type=str,
229
- help="The namespace from where it lists the jobs. Defaults to the current user's namespace.",
230
- )
231
- run_parser.add_argument(
232
- "--token",
233
- type=str,
234
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
235
- )
236
- # Add Docker-style filtering argument
237
- run_parser.add_argument(
340
+ ),
341
+ ] = False,
342
+ namespace: NamespaceOpt = None,
343
+ token: TokenOpt = None,
344
+ filter: Annotated[
345
+ Optional[list[str]],
346
+ typer.Option(
238
347
  "-f",
239
348
  "--filter",
240
- action="append",
241
- default=[],
242
349
  help="Filter output based on conditions provided (format: key=value)",
243
- )
244
- # Add option to format output
245
- run_parser.add_argument(
246
- "--format",
247
- type=str,
350
+ ),
351
+ ] = None,
352
+ format: Annotated[
353
+ Optional[str],
354
+ typer.Option(
248
355
  help="Format output using a custom template",
249
- )
250
- run_parser.set_defaults(func=PsCommand)
251
-
252
- def __init__(self, args: Namespace) -> None:
253
- self.all: bool = args.all
254
- self.namespace: Optional[str] = args.namespace
255
- self.token: Optional[str] = args.token
256
- self.format: Optional[str] = args.format
257
- self.filters: Dict[str, str] = {}
258
-
259
- # Parse filter arguments (key=value pairs)
260
- for f in args.filter:
356
+ ),
357
+ ] = None,
358
+ ) -> None:
359
+ try:
360
+ api = get_hf_api(token=token)
361
+ # Fetch jobs data
362
+ jobs = api.list_jobs(namespace=namespace)
363
+ # Define table headers
364
+ table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
365
+ rows: list[list[Union[str, int]]] = []
366
+
367
+ filters: dict[str, str] = {}
368
+ for f in filter or []:
261
369
  if "=" in f:
262
370
  key, value = f.split("=", 1)
263
- self.filters[key.lower()] = value
371
+ filters[key.lower()] = value
264
372
  else:
265
373
  print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
374
+ # Process jobs data
375
+ for job in jobs:
376
+ # Extract job data for filtering
377
+ status = job.status.stage if job.status else "UNKNOWN"
378
+ if not all and status not in ("RUNNING", "UPDATING"):
379
+ # Skip job if not all jobs should be shown and status doesn't match criteria
380
+ continue
381
+ # Extract job data for output
382
+ job_id = job.id
266
383
 
267
- def run(self) -> None:
268
- """
269
- Fetch and display job information for the current user.
270
- Uses Docker-style filtering with -f/--filter flag and key=value pairs.
271
- """
272
- try:
273
- api = HfApi(token=self.token)
274
-
275
- # Fetch jobs data
276
- jobs = api.list_jobs(namespace=self.namespace)
277
-
278
- # Define table headers
279
- table_headers = ["JOB ID", "IMAGE/SPACE", "COMMAND", "CREATED", "STATUS"]
384
+ # Extract image or space information
385
+ image_or_space = job.docker_image or "N/A"
280
386
 
281
- # Process jobs data
282
- rows = []
387
+ # Extract and format command
388
+ cmd = job.command or []
389
+ command_str = " ".join(cmd) if cmd else "N/A"
283
390
 
284
- for job in jobs:
285
- # Extract job data for filtering
286
- status = job.status.stage if job.status else "UNKNOWN"
391
+ # Extract creation time
392
+ created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
287
393
 
288
- # Skip job if not all jobs should be shown and status doesn't match criteria
289
- if not self.all and status not in ("RUNNING", "UPDATING"):
290
- continue
291
-
292
- # Extract job ID
293
- job_id = job.id
294
-
295
- # Extract image or space information
296
- image_or_space = job.docker_image or "N/A"
297
-
298
- # Extract and format command
299
- command = job.command or []
300
- command_str = " ".join(command) if command else "N/A"
301
-
302
- # Extract creation time
303
- created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
304
-
305
- # Create a dict with all job properties for filtering
306
- job_properties = {
307
- "id": job_id,
308
- "image": image_or_space,
309
- "status": status.lower(),
310
- "command": command_str,
311
- }
312
-
313
- # Check if job matches all filters
314
- if not self._matches_filters(job_properties):
315
- continue
316
-
317
- # Create row
318
- rows.append([job_id, image_or_space, command_str, created_at, status])
319
-
320
- # Handle empty results
321
- if not rows:
322
- filters_msg = ""
323
- if self.filters:
324
- filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
325
-
326
- print(f"No jobs found{filters_msg}")
327
- return
328
-
329
- # Apply custom format if provided or use default tabular format
330
- self._print_output(rows, table_headers)
331
-
332
- except requests.RequestException as e:
333
- print(f"Error fetching jobs data: {e}")
334
- except (KeyError, ValueError, TypeError) as e:
335
- print(f"Error processing jobs data: {e}")
336
- except Exception as e:
337
- print(f"Unexpected error - {type(e).__name__}: {e}")
338
-
339
- def _matches_filters(self, job_properties: Dict[str, str]) -> bool:
340
- """Check if job matches all specified filters."""
341
- for key, pattern in self.filters.items():
342
- # Check if property exists
343
- if key not in job_properties:
344
- return False
394
+ # Create a dict with all job properties for filtering
395
+ props = {"id": job_id, "image": image_or_space, "status": status.lower(), "command": command_str}
396
+ if not _matches_filters(props, filters):
397
+ continue
345
398
 
346
- # Support pattern matching with wildcards
347
- if "*" in pattern or "?" in pattern:
348
- # Convert glob pattern to regex
349
- regex_pattern = pattern.replace("*", ".*").replace("?", ".")
350
- if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
351
- return False
352
- # Simple substring matching
353
- elif pattern.lower() not in job_properties[key].lower():
354
- return False
399
+ # Create row
400
+ rows.append([job_id, image_or_space, command_str, created_at, status])
355
401
 
356
- return True
357
-
358
- def _print_output(self, rows, headers):
359
- """Print output according to the chosen format."""
360
- if self.format:
361
- # Custom template formatting (simplified)
362
- template = self.format
363
- for row in rows:
364
- line = template
365
- for i, field in enumerate(["id", "image", "command", "created", "status"]):
366
- placeholder = f"{{{{.{field}}}}}"
367
- if placeholder in line:
368
- line = line.replace(placeholder, str(row[i]))
369
- print(line)
370
- else:
371
- # Default tabular format
372
- print(
373
- _tabulate(
374
- rows,
375
- headers=headers,
376
- )
402
+ # Handle empty results
403
+ if not rows:
404
+ filters_msg = (
405
+ f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
377
406
  )
378
-
379
-
380
- class InspectCommand(BaseHuggingfaceCLICommand):
381
- @staticmethod
382
- def register_subcommand(parser: _SubParsersAction) -> None:
383
- run_parser = parser.add_parser("inspect", help="Display detailed information on one or more Jobs")
384
- run_parser.add_argument(
385
- "--namespace",
386
- type=str,
387
- help="The namespace where the job is running. Defaults to the current user's namespace.",
388
- )
389
- run_parser.add_argument(
390
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
391
- )
392
- run_parser.add_argument("job_ids", nargs="...", help="The jobs to inspect")
393
- run_parser.set_defaults(func=InspectCommand)
394
-
395
- def __init__(self, args: Namespace) -> None:
396
- self.namespace: Optional[str] = args.namespace
397
- self.token: Optional[str] = args.token
398
- self.job_ids: List[str] = args.job_ids
399
-
400
- def run(self) -> None:
401
- api = HfApi(token=self.token)
402
- jobs = [api.inspect_job(job_id=job_id, namespace=self.namespace) for job_id in self.job_ids]
403
- print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
404
-
405
-
406
- class CancelCommand(BaseHuggingfaceCLICommand):
407
- @staticmethod
408
- def register_subcommand(parser: _SubParsersAction) -> None:
409
- run_parser = parser.add_parser("cancel", help="Cancel a Job")
410
- run_parser.add_argument("job_id", type=str, help="Job ID")
411
- run_parser.add_argument(
412
- "--namespace",
413
- type=str,
414
- help="The namespace where the job is running. Defaults to the current user's namespace.",
415
- )
416
- run_parser.add_argument(
417
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
418
- )
419
- run_parser.set_defaults(func=CancelCommand)
420
-
421
- def __init__(self, args: Namespace) -> None:
422
- self.job_id: str = args.job_id
423
- self.namespace = args.namespace
424
- self.token: Optional[str] = args.token
425
-
426
- def run(self) -> None:
427
- api = HfApi(token=self.token)
428
- api.cancel_job(job_id=self.job_id, namespace=self.namespace)
429
-
430
-
431
- class UvCommand(BaseHuggingfaceCLICommand):
432
- """Run UV scripts on Hugging Face infrastructure."""
433
-
434
- @staticmethod
435
- def register_subcommand(parser):
436
- """Register UV run subcommand."""
437
- uv_parser = parser.add_parser(
438
- "uv",
439
- help="Run UV scripts (Python with inline dependencies) on HF infrastructure",
440
- )
441
-
442
- subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
443
-
444
- # Run command only
445
- run_parser = subparsers.add_parser(
446
- "run",
447
- help="Run a UV script (local file or URL) on HF infrastructure",
448
- )
449
- run_parser.add_argument("script", help="UV script to run (local file or URL)")
450
- run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
451
- run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
452
- run_parser.add_argument(
453
- "--repo",
454
- help="Repository name for the script (creates ephemeral if not specified)",
455
- )
456
- run_parser.add_argument(
457
- "--flavor",
458
- type=str,
459
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
460
- )
461
- run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
462
- run_parser.add_argument(
463
- "-s",
464
- "--secrets",
465
- action="append",
466
- help=(
467
- "Set secret environment variables. E.g. --secrets SECRET=value "
468
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
469
- ),
470
- )
471
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
472
- run_parser.add_argument(
473
- "--secrets-file",
474
- type=str,
475
- help="Read in a file of secret environment variables.",
476
- )
477
- run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
478
- run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
479
- run_parser.add_argument(
480
- "--namespace",
481
- type=str,
482
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
483
- )
484
- run_parser.add_argument("--token", type=str, help="HF token")
485
- # UV options
486
- run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
487
- run_parser.add_argument(
488
- "-p", "--python", type=str, help="The Python interpreter to use for the run environment"
489
- )
490
- run_parser.set_defaults(func=UvCommand)
491
-
492
- def __init__(self, args: Namespace) -> None:
493
- """Initialize the command with parsed arguments."""
494
- self.script = args.script
495
- self.script_args = args.script_args
496
- self.dependencies = args.with_
497
- self.python = args.python
498
- self.image = args.image
499
- self.env: dict[str, Optional[str]] = {}
500
- if args.env_file:
501
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
502
- for env_value in args.env or []:
503
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
504
- self.secrets: dict[str, Optional[str]] = {}
505
- extended_environ = _get_extended_environ()
506
- if args.secrets_file:
507
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
508
- for secret in args.secrets or []:
509
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
510
- self.flavor: Optional[SpaceHardware] = args.flavor
511
- self.timeout: Optional[str] = args.timeout
512
- self.detach: bool = args.detach
513
- self.namespace: Optional[str] = args.namespace
514
- self.token: Optional[str] = args.token
515
- self._repo = args.repo
516
-
517
- def run(self) -> None:
518
- """Execute UV command."""
519
- logging.set_verbosity(logging.INFO)
520
- api = HfApi(token=self.token)
521
- job = api.run_uv_job(
522
- script=self.script,
523
- script_args=self.script_args,
524
- dependencies=self.dependencies,
525
- python=self.python,
526
- image=self.image,
527
- env=self.env,
528
- secrets=self.secrets,
529
- flavor=self.flavor,
530
- timeout=self.timeout,
531
- namespace=self.namespace,
532
- _repo=self._repo,
533
- )
534
-
535
- # Always print the job ID to the user
536
- print(f"Job started with ID: {job.id}")
537
- print(f"View at: {job.url}")
538
-
539
- if self.detach:
407
+ print(f"No jobs found{filters_msg}")
540
408
  return
541
-
542
- # Now let's stream the logs
543
- for log in api.fetch_job_logs(job_id=job.id):
544
- print(log)
545
-
546
-
547
- def _get_extended_environ() -> Dict[str, str]:
548
- extended_environ = os.environ.copy()
549
- if (token := get_token()) is not None:
550
- extended_environ["HF_TOKEN"] = token
551
- return extended_environ
552
-
553
-
554
- class ScheduledJobsCommands(BaseHuggingfaceCLICommand):
555
- @staticmethod
556
- def register_subcommand(parser: _SubParsersAction):
557
- scheduled_jobs_parser = parser.add_parser("scheduled", help="Create and manage scheduled Jobs on the Hub.")
558
- scheduled_jobs_subparsers = scheduled_jobs_parser.add_subparsers(
559
- help="huggingface.co scheduled jobs related commands"
560
- )
561
-
562
- # Show help if no subcommand is provided
563
- scheduled_jobs_parser.set_defaults(func=lambda args: scheduled_jobs_subparsers.print_help())
564
-
565
- # Register commands
566
- ScheduledRunCommand.register_subcommand(scheduled_jobs_subparsers)
567
- ScheduledPsCommand.register_subcommand(scheduled_jobs_subparsers)
568
- ScheduledInspectCommand.register_subcommand(scheduled_jobs_subparsers)
569
- ScheduledDeleteCommand.register_subcommand(scheduled_jobs_subparsers)
570
- ScheduledSuspendCommand.register_subcommand(scheduled_jobs_subparsers)
571
- ScheduledResumeCommand.register_subcommand(scheduled_jobs_subparsers)
572
- ScheduledUvCommand.register_subcommand(scheduled_jobs_subparsers)
573
-
574
-
575
- class ScheduledRunCommand(BaseHuggingfaceCLICommand):
576
- @staticmethod
577
- def register_subcommand(parser: _SubParsersAction) -> None:
578
- run_parser = parser.add_parser("run", help="Schedule a Job")
579
- run_parser.add_argument(
580
- "schedule",
581
- type=str,
582
- help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
583
- )
584
- run_parser.add_argument("image", type=str, help="The Docker image to use.")
585
- run_parser.add_argument(
586
- "--suspend",
587
- action="store_true",
588
- help="Suspend (pause) the scheduled Job",
589
- default=None,
590
- )
591
- run_parser.add_argument(
592
- "--concurrency",
593
- action="store_true",
594
- help="Allow multiple instances of this Job to run concurrently",
595
- default=None,
596
- )
597
- run_parser.add_argument("-e", "--env", action="append", help="Set environment variables. E.g. --env ENV=value")
598
- run_parser.add_argument(
599
- "-s",
600
- "--secrets",
601
- action="append",
602
- help=(
603
- "Set secret environment variables. E.g. --secrets SECRET=value "
604
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
605
- ),
606
- )
607
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
608
- run_parser.add_argument("--secrets-file", type=str, help="Read in a file of secret environment variables.")
609
- run_parser.add_argument(
610
- "--flavor",
611
- type=str,
612
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
613
- )
614
- run_parser.add_argument(
615
- "--timeout",
616
- type=str,
617
- help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
618
- )
619
- run_parser.add_argument(
620
- "--namespace",
621
- type=str,
622
- help="The namespace where the scheduled Job will be created. Defaults to the current user's namespace.",
623
- )
624
- run_parser.add_argument(
625
- "--token",
626
- type=str,
627
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
628
- )
629
- run_parser.add_argument("command", nargs="...", help="The command to run.")
630
- run_parser.set_defaults(func=ScheduledRunCommand)
631
-
632
- def __init__(self, args: Namespace) -> None:
633
- self.schedule: str = args.schedule
634
- self.image: str = args.image
635
- self.command: List[str] = args.command
636
- self.suspend: Optional[bool] = args.suspend
637
- self.concurrency: Optional[bool] = args.concurrency
638
- self.env: dict[str, Optional[str]] = {}
639
- if args.env_file:
640
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
641
- for env_value in args.env or []:
642
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
643
- self.secrets: dict[str, Optional[str]] = {}
644
- extended_environ = _get_extended_environ()
645
- if args.secrets_file:
646
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
647
- for secret in args.secrets or []:
648
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
649
- self.flavor: Optional[SpaceHardware] = args.flavor
650
- self.timeout: Optional[str] = args.timeout
651
- self.namespace: Optional[str] = args.namespace
652
- self.token: Optional[str] = args.token
653
-
654
- def run(self) -> None:
655
- api = HfApi(token=self.token)
656
- scheduled_job = api.create_scheduled_job(
657
- image=self.image,
658
- command=self.command,
659
- schedule=self.schedule,
660
- suspend=self.suspend,
661
- concurrency=self.concurrency,
662
- env=self.env,
663
- secrets=self.secrets,
664
- flavor=self.flavor,
665
- timeout=self.timeout,
666
- namespace=self.namespace,
667
- )
668
- # Always print the scheduled job ID to the user
669
- print(f"Scheduled Job created with ID: {scheduled_job.id}")
670
-
671
-
672
- class ScheduledPsCommand(BaseHuggingfaceCLICommand):
673
- @staticmethod
674
- def register_subcommand(parser: _SubParsersAction) -> None:
675
- run_parser = parser.add_parser("ps", help="List scheduled Jobs")
676
- run_parser.add_argument(
409
+ # Apply custom format if provided or use default tabular format
410
+ _print_output(rows, table_headers, format)
411
+
412
+ except HfHubHTTPError as e:
413
+ print(f"Error fetching jobs data: {e}")
414
+ except (KeyError, ValueError, TypeError) as e:
415
+ print(f"Error processing jobs data: {e}")
416
+ except Exception as e:
417
+ print(f"Unexpected error - {type(e).__name__}: {e}")
418
+
419
+
420
+ @jobs_cli.command("inspect", help="Display detailed information on one or more Jobs")
421
+ def jobs_inspect(
422
+ job_ids: Annotated[
423
+ list[str],
424
+ typer.Argument(
425
+ help="The jobs to inspect",
426
+ ),
427
+ ],
428
+ namespace: NamespaceOpt = None,
429
+ token: TokenOpt = None,
430
+ ) -> None:
431
+ api = get_hf_api(token=token)
432
+ jobs = [api.inspect_job(job_id=job_id, namespace=namespace) for job_id in job_ids]
433
+ print(json.dumps([asdict(job) for job in jobs], indent=4, default=str))
434
+
435
+
436
+ @jobs_cli.command("cancel", help="Cancel a Job")
437
+ def jobs_cancel(
438
+ job_id: JobIdArg,
439
+ namespace: NamespaceOpt = None,
440
+ token: TokenOpt = None,
441
+ ) -> None:
442
+ api = get_hf_api(token=token)
443
+ api.cancel_job(job_id=job_id, namespace=namespace)
444
+
445
+
446
+ uv_app = typer_factory(help="Run UV scripts (Python with inline dependencies) on HF infrastructure")
447
+ jobs_cli.add_typer(uv_app, name="uv")
448
+
449
+
450
+ @uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
451
+ def jobs_uv_run(
452
+ script: ScriptArg,
453
+ script_args: ScriptArgsArg = None,
454
+ image: ImageOpt = None,
455
+ repo: RepoOpt = None,
456
+ flavor: FlavorOpt = None,
457
+ env: EnvOpt = None,
458
+ secrets: SecretsOpt = None,
459
+ env_file: EnvFileOpt = None,
460
+ secrets_file: SecretsFileOpt = None,
461
+ timeout: TimeoutOpt = None,
462
+ detach: DetachOpt = False,
463
+ namespace: NamespaceOpt = None,
464
+ token: TokenOpt = None,
465
+ with_: WithOpt = None,
466
+ python: PythonOpt = None,
467
+ ) -> None:
468
+ env_map: dict[str, Optional[str]] = {}
469
+ if env_file:
470
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
471
+ for env_value in env or []:
472
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
473
+ secrets_map: dict[str, Optional[str]] = {}
474
+ extended_environ = _get_extended_environ()
475
+ if secrets_file:
476
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
477
+ for secret in secrets or []:
478
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
479
+
480
+ api = get_hf_api(token=token)
481
+ job = api.run_uv_job(
482
+ script=script,
483
+ script_args=script_args or [],
484
+ dependencies=with_,
485
+ python=python,
486
+ image=image,
487
+ env=env_map,
488
+ secrets=secrets_map,
489
+ flavor=flavor, # type: ignore[arg-type]
490
+ timeout=timeout,
491
+ namespace=namespace,
492
+ _repo=repo,
493
+ )
494
+ # Always print the job ID to the user
495
+ print(f"Job started with ID: {job.id}")
496
+ print(f"View at: {job.url}")
497
+ if detach:
498
+ return
499
+ # Now let's stream the logs
500
+ for log in api.fetch_job_logs(job_id=job.id):
501
+ print(log)
502
+
503
+
504
+ scheduled_app = typer_factory(help="Create and manage scheduled Jobs on the Hub.")
505
+ jobs_cli.add_typer(scheduled_app, name="scheduled")
506
+
507
+
508
+ @scheduled_app.command("run", help="Schedule a Job")
509
+ def scheduled_run(
510
+ schedule: ScheduleArg,
511
+ image: ImageArg,
512
+ command: CommandArg,
513
+ suspend: SuspendOpt = None,
514
+ concurrency: ConcurrencyOpt = None,
515
+ env: EnvOpt = None,
516
+ secrets: SecretsOpt = None,
517
+ env_file: EnvFileOpt = None,
518
+ secrets_file: SecretsFileOpt = None,
519
+ flavor: FlavorOpt = None,
520
+ timeout: TimeoutOpt = None,
521
+ namespace: NamespaceOpt = None,
522
+ token: TokenOpt = None,
523
+ ) -> None:
524
+ env_map: dict[str, Optional[str]] = {}
525
+ if env_file:
526
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
527
+ for env_value in env or []:
528
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
529
+ secrets_map: dict[str, Optional[str]] = {}
530
+ extended_environ = _get_extended_environ()
531
+ if secrets_file:
532
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
533
+ for secret in secrets or []:
534
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
535
+
536
+ api = get_hf_api(token=token)
537
+ scheduled_job = api.create_scheduled_job(
538
+ image=image,
539
+ command=command,
540
+ schedule=schedule,
541
+ suspend=suspend,
542
+ concurrency=concurrency,
543
+ env=env_map,
544
+ secrets=secrets_map,
545
+ flavor=flavor,
546
+ timeout=timeout,
547
+ namespace=namespace,
548
+ )
549
+ print(f"Scheduled Job created with ID: {scheduled_job.id}")
550
+
551
+
552
+ @scheduled_app.command("ps", help="List scheduled Jobs")
553
+ def scheduled_ps(
554
+ all: Annotated[
555
+ bool,
556
+ typer.Option(
677
557
  "-a",
678
558
  "--all",
679
- action="store_true",
680
559
  help="Show all scheduled Jobs (default hides suspended)",
681
- )
682
- run_parser.add_argument(
683
- "--namespace",
684
- type=str,
685
- help="The namespace from where it lists the jobs. Defaults to the current user's namespace.",
686
- )
687
- run_parser.add_argument(
688
- "--token",
689
- type=str,
690
- help="A User Access Token generated from https://huggingface.co/settings/tokens",
691
- )
692
- # Add Docker-style filtering argument
693
- run_parser.add_argument(
560
+ ),
561
+ ] = False,
562
+ namespace: NamespaceOpt = None,
563
+ token: TokenOpt = None,
564
+ filter: Annotated[
565
+ Optional[list[str]],
566
+ typer.Option(
694
567
  "-f",
695
568
  "--filter",
696
- action="append",
697
- default=[],
698
569
  help="Filter output based on conditions provided (format: key=value)",
699
- )
700
- # Add option to format output
701
- run_parser.add_argument(
570
+ ),
571
+ ] = None,
572
+ format: Annotated[
573
+ Optional[str],
574
+ typer.Option(
702
575
  "--format",
703
- type=str,
704
576
  help="Format output using a custom template",
705
- )
706
- run_parser.set_defaults(func=ScheduledPsCommand)
707
-
708
- def __init__(self, args: Namespace) -> None:
709
- self.all: bool = args.all
710
- self.namespace: Optional[str] = args.namespace
711
- self.token: Optional[str] = args.token
712
- self.format: Optional[str] = args.format
713
- self.filters: Dict[str, str] = {}
714
-
715
- # Parse filter arguments (key=value pairs)
716
- for f in args.filter:
577
+ ),
578
+ ] = None,
579
+ ) -> None:
580
+ try:
581
+ api = get_hf_api(token=token)
582
+ scheduled_jobs = api.list_scheduled_jobs(namespace=namespace)
583
+ table_headers = ["ID", "SCHEDULE", "IMAGE/SPACE", "COMMAND", "LAST RUN", "NEXT RUN", "SUSPEND"]
584
+ rows: list[list[Union[str, int]]] = []
585
+ filters: dict[str, str] = {}
586
+ for f in filter or []:
717
587
  if "=" in f:
718
588
  key, value = f.split("=", 1)
719
- self.filters[key.lower()] = value
589
+ filters[key.lower()] = value
720
590
  else:
721
591
  print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
722
592
 
723
- def run(self) -> None:
724
- """
725
- Fetch and display scheduked job information for the current user.
726
- Uses Docker-style filtering with -f/--filter flag and key=value pairs.
727
- """
728
- try:
729
- api = HfApi(token=self.token)
730
-
731
- # Fetch jobs data
732
- scheduled_jobs = api.list_scheduled_jobs(namespace=self.namespace)
733
-
734
- # Define table headers
735
- table_headers = [
736
- "ID",
737
- "SCHEDULE",
738
- "IMAGE/SPACE",
739
- "COMMAND",
740
- "LAST RUN",
741
- "NEXT RUN",
742
- "SUSPEND",
743
- ]
744
-
745
- # Process jobs data
746
- rows = []
747
-
748
- for scheduled_job in scheduled_jobs:
749
- # Extract job data for filtering
750
- suspend = scheduled_job.suspend
751
-
752
- # Skip job if not all jobs should be shown and status doesn't match criteria
753
- if not self.all and suspend:
754
- continue
755
-
756
- # Extract job ID
757
- scheduled_job_id = scheduled_job.id
758
-
759
- # Extract schedule
760
- schedule = scheduled_job.schedule
761
-
762
- # Extract image or space information
763
- image_or_space = scheduled_job.job_spec.docker_image or "N/A"
764
-
765
- # Extract and format command
766
- command = scheduled_job.job_spec.command or []
767
- command_str = " ".join(command) if command else "N/A"
768
-
769
- # Extract status
770
- last_job_at = (
771
- scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
772
- if scheduled_job.status.last_job
773
- else "N/A"
774
- )
775
- next_job_run_at = (
776
- scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
777
- if scheduled_job.status.next_job_run_at
778
- else "N/A"
779
- )
780
-
781
- # Create a dict with all job properties for filtering
782
- job_properties = {
783
- "id": scheduled_job_id,
784
- "image": image_or_space,
785
- "suspend": str(suspend),
786
- "command": command_str,
787
- }
788
-
789
- # Check if job matches all filters
790
- if not self._matches_filters(job_properties):
791
- continue
792
-
793
- # Create row
794
- rows.append(
795
- [
796
- scheduled_job_id,
797
- schedule,
798
- image_or_space,
799
- command_str,
800
- last_job_at,
801
- next_job_run_at,
802
- suspend,
803
- ]
804
- )
805
-
806
- # Handle empty results
807
- if not rows:
808
- filters_msg = ""
809
- if self.filters:
810
- filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
811
-
812
- print(f"No scheduled jobs found{filters_msg}")
813
- return
814
-
815
- # Apply custom format if provided or use default tabular format
816
- self._print_output(rows, table_headers)
817
-
818
- except requests.RequestException as e:
819
- print(f"Error fetching scheduled jobs data: {e}")
820
- except (KeyError, ValueError, TypeError) as e:
821
- print(f"Error processing scheduled jobs data: {e}")
822
- except Exception as e:
823
- print(f"Unexpected error - {type(e).__name__}: {e}")
824
-
825
- def _matches_filters(self, job_properties: Dict[str, str]) -> bool:
826
- """Check if scheduled job matches all specified filters."""
827
- for key, pattern in self.filters.items():
828
- # Check if property exists
829
- if key not in job_properties:
830
- return False
831
-
832
- # Support pattern matching with wildcards
833
- if "*" in pattern or "?" in pattern:
834
- # Convert glob pattern to regex
835
- regex_pattern = pattern.replace("*", ".*").replace("?", ".")
836
- if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
837
- return False
838
- # Simple substring matching
839
- elif pattern.lower() not in job_properties[key].lower():
840
- return False
841
-
842
- return True
843
-
844
- def _print_output(self, rows, headers):
845
- """Print output according to the chosen format."""
846
- if self.format:
847
- # Custom template formatting (simplified)
848
- template = self.format
849
- for row in rows:
850
- line = template
851
- for i, field in enumerate(
852
- ["id", "schedule", "image", "command", "last_job_at", "next_job_run_at", "suspend"]
853
- ):
854
- placeholder = f"{{{{.{field}}}}}"
855
- if placeholder in line:
856
- line = line.replace(placeholder, str(row[i]))
857
- print(line)
858
- else:
859
- # Default tabular format
860
- print(
861
- _tabulate(
862
- rows,
863
- headers=headers,
864
- )
593
+ for scheduled_job in scheduled_jobs:
594
+ suspend = scheduled_job.suspend or False
595
+ if not all and suspend:
596
+ continue
597
+ sj_id = scheduled_job.id
598
+ schedule = scheduled_job.schedule or "N/A"
599
+ image_or_space = scheduled_job.job_spec.docker_image or "N/A"
600
+ cmd = scheduled_job.job_spec.command or []
601
+ command_str = " ".join(cmd) if cmd else "N/A"
602
+ last_job_at = (
603
+ scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
604
+ if scheduled_job.status.last_job
605
+ else "N/A"
865
606
  )
607
+ next_job_run_at = (
608
+ scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
609
+ if scheduled_job.status.next_job_run_at
610
+ else "N/A"
611
+ )
612
+ props = {"id": sj_id, "image": image_or_space, "suspend": str(suspend), "command": command_str}
613
+ if not _matches_filters(props, filters):
614
+ continue
615
+ rows.append([sj_id, schedule, image_or_space, command_str, last_job_at, next_job_run_at, suspend])
616
+
617
+ if not rows:
618
+ filters_msg = (
619
+ f" matching filters: {', '.join([f'{k}={v}' for k, v in filters.items()])}" if filters else ""
620
+ )
621
+ print(f"No scheduled jobs found{filters_msg}")
622
+ return
623
+ _print_output(rows, table_headers, format)
624
+
625
+ except HfHubHTTPError as e:
626
+ print(f"Error fetching scheduled jobs data: {e}")
627
+ except (KeyError, ValueError, TypeError) as e:
628
+ print(f"Error processing scheduled jobs data: {e}")
629
+ except Exception as e:
630
+ print(f"Unexpected error - {type(e).__name__}: {e}")
631
+
632
+
633
+ @scheduled_app.command("inspect", help="Display detailed information on one or more scheduled Jobs")
634
+ def scheduled_inspect(
635
+ scheduled_job_ids: Annotated[
636
+ list[str],
637
+ typer.Argument(
638
+ help="The scheduled jobs to inspect",
639
+ ),
640
+ ],
641
+ namespace: NamespaceOpt = None,
642
+ token: TokenOpt = None,
643
+ ) -> None:
644
+ api = get_hf_api(token=token)
645
+ scheduled_jobs = [
646
+ api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
647
+ for scheduled_job_id in scheduled_job_ids
648
+ ]
649
+ print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
650
+
651
+
652
+ @scheduled_app.command("delete", help="Delete a scheduled Job")
653
+ def scheduled_delete(
654
+ scheduled_job_id: ScheduledJobIdArg,
655
+ namespace: NamespaceOpt = None,
656
+ token: TokenOpt = None,
657
+ ) -> None:
658
+ api = get_hf_api(token=token)
659
+ api.delete_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
660
+
661
+
662
+ @scheduled_app.command("suspend", help="Suspend (pause) a scheduled Job")
663
+ def scheduled_suspend(
664
+ scheduled_job_id: ScheduledJobIdArg,
665
+ namespace: NamespaceOpt = None,
666
+ token: TokenOpt = None,
667
+ ) -> None:
668
+ api = get_hf_api(token=token)
669
+ api.suspend_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
670
+
671
+
672
+ @scheduled_app.command("resume", help="Resume (unpause) a scheduled Job")
673
+ def scheduled_resume(
674
+ scheduled_job_id: ScheduledJobIdArg,
675
+ namespace: NamespaceOpt = None,
676
+ token: TokenOpt = None,
677
+ ) -> None:
678
+ api = get_hf_api(token=token)
679
+ api.resume_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=namespace)
680
+
681
+
682
+ scheduled_uv_app = typer_factory(help="Schedule UV scripts on HF infrastructure")
683
+ scheduled_app.add_typer(scheduled_uv_app, name="uv")
684
+
685
+
686
+ @scheduled_uv_app.command("run", help="Run a UV script (local file or URL) on HF infrastructure")
687
+ def scheduled_uv_run(
688
+ schedule: ScheduleArg,
689
+ script: ScriptArg,
690
+ script_args: ScriptArgsArg = None,
691
+ suspend: SuspendOpt = None,
692
+ concurrency: ConcurrencyOpt = None,
693
+ image: ImageOpt = None,
694
+ repo: RepoOpt = None,
695
+ flavor: FlavorOpt = None,
696
+ env: EnvOpt = None,
697
+ secrets: SecretsOpt = None,
698
+ env_file: EnvFileOpt = None,
699
+ secrets_file: SecretsFileOpt = None,
700
+ timeout: TimeoutOpt = None,
701
+ namespace: NamespaceOpt = None,
702
+ token: TokenOpt = None,
703
+ with_: WithOpt = None,
704
+ python: PythonOpt = None,
705
+ ) -> None:
706
+ env_map: dict[str, Optional[str]] = {}
707
+ if env_file:
708
+ env_map.update(load_dotenv(Path(env_file).read_text(), environ=os.environ.copy()))
709
+ for env_value in env or []:
710
+ env_map.update(load_dotenv(env_value, environ=os.environ.copy()))
711
+ secrets_map: dict[str, Optional[str]] = {}
712
+ extended_environ = _get_extended_environ()
713
+ if secrets_file:
714
+ secrets_map.update(load_dotenv(Path(secrets_file).read_text(), environ=extended_environ))
715
+ for secret in secrets or []:
716
+ secrets_map.update(load_dotenv(secret, environ=extended_environ))
717
+
718
+ api = get_hf_api(token=token)
719
+ job = api.create_scheduled_uv_job(
720
+ script=script,
721
+ script_args=script_args or [],
722
+ schedule=schedule,
723
+ suspend=suspend,
724
+ concurrency=concurrency,
725
+ dependencies=with_,
726
+ python=python,
727
+ image=image,
728
+ env=env_map,
729
+ secrets=secrets_map,
730
+ flavor=flavor, # type: ignore[arg-type]
731
+ timeout=timeout,
732
+ namespace=namespace,
733
+ _repo=repo,
734
+ )
735
+ print(f"Scheduled Job created with ID: {job.id}")
736
+
737
+
738
+ ### UTILS
739
+
740
+
741
+ def _tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
742
+ """
743
+ Inspired by:
866
744
 
867
-
868
- class ScheduledInspectCommand(BaseHuggingfaceCLICommand):
869
- @staticmethod
870
- def register_subcommand(parser: _SubParsersAction) -> None:
871
- run_parser = parser.add_parser("inspect", help="Display detailed information on one or more scheduled Jobs")
872
- run_parser.add_argument(
873
- "--namespace",
874
- type=str,
875
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
876
- )
877
- run_parser.add_argument(
878
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
879
- )
880
- run_parser.add_argument("scheduled_job_ids", nargs="...", help="The scheduled jobs to inspect")
881
- run_parser.set_defaults(func=ScheduledInspectCommand)
882
-
883
- def __init__(self, args: Namespace) -> None:
884
- self.namespace: Optional[str] = args.namespace
885
- self.token: Optional[str] = args.token
886
- self.scheduled_job_ids: List[str] = args.scheduled_job_ids
887
-
888
- def run(self) -> None:
889
- api = HfApi(token=self.token)
890
- scheduled_jobs = [
891
- api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=self.namespace)
892
- for scheduled_job_id in self.scheduled_job_ids
745
+ - stackoverflow.com/a/8356620/593036
746
+ - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
747
+ """
748
+ col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
749
+ terminal_width = max(os.get_terminal_size().columns, len(headers) * 12)
750
+ while len(headers) + sum(col_widths) > terminal_width:
751
+ col_to_minimize = col_widths.index(max(col_widths))
752
+ col_widths[col_to_minimize] //= 2
753
+ if len(headers) + sum(col_widths) <= terminal_width:
754
+ col_widths[col_to_minimize] = terminal_width - sum(col_widths) - len(headers) + col_widths[col_to_minimize]
755
+ row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
756
+ lines = []
757
+ lines.append(row_format.format(*headers))
758
+ lines.append(row_format.format(*["-" * w for w in col_widths]))
759
+ for row in rows:
760
+ row_format_args = [
761
+ str(x)[: col_width - 3] + "..." if len(str(x)) > col_width else str(x)
762
+ for x, col_width in zip(row, col_widths)
893
763
  ]
894
- print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
895
-
896
-
897
- class ScheduledDeleteCommand(BaseHuggingfaceCLICommand):
898
- @staticmethod
899
- def register_subcommand(parser: _SubParsersAction) -> None:
900
- run_parser = parser.add_parser("delete", help="Delete a scheduled Job")
901
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
902
- run_parser.add_argument(
903
- "--namespace",
904
- type=str,
905
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
906
- )
907
- run_parser.add_argument(
908
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
909
- )
910
- run_parser.set_defaults(func=ScheduledDeleteCommand)
911
-
912
- def __init__(self, args: Namespace) -> None:
913
- self.scheduled_job_id: str = args.scheduled_job_id
914
- self.namespace = args.namespace
915
- self.token: Optional[str] = args.token
916
-
917
- def run(self) -> None:
918
- api = HfApi(token=self.token)
919
- api.delete_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
920
-
921
-
922
- class ScheduledSuspendCommand(BaseHuggingfaceCLICommand):
923
- @staticmethod
924
- def register_subcommand(parser: _SubParsersAction) -> None:
925
- run_parser = parser.add_parser("suspend", help="Suspend (pause) a scheduled Job")
926
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
927
- run_parser.add_argument(
928
- "--namespace",
929
- type=str,
930
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
931
- )
932
- run_parser.add_argument(
933
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
934
- )
935
- run_parser.set_defaults(func=ScheduledSuspendCommand)
936
-
937
- def __init__(self, args: Namespace) -> None:
938
- self.scheduled_job_id: str = args.scheduled_job_id
939
- self.namespace = args.namespace
940
- self.token: Optional[str] = args.token
941
-
942
- def run(self) -> None:
943
- api = HfApi(token=self.token)
944
- api.suspend_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
945
-
946
-
947
- class ScheduledResumeCommand(BaseHuggingfaceCLICommand):
948
- @staticmethod
949
- def register_subcommand(parser: _SubParsersAction) -> None:
950
- run_parser = parser.add_parser("resume", help="Resume (unpause) a scheduled Job")
951
- run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
952
- run_parser.add_argument(
953
- "--namespace",
954
- type=str,
955
- help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
956
- )
957
- run_parser.add_argument(
958
- "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
959
- )
960
- run_parser.set_defaults(func=ScheduledResumeCommand)
961
-
962
- def __init__(self, args: Namespace) -> None:
963
- self.scheduled_job_id: str = args.scheduled_job_id
964
- self.namespace = args.namespace
965
- self.token: Optional[str] = args.token
966
-
967
- def run(self) -> None:
968
- api = HfApi(token=self.token)
969
- api.resume_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
970
-
971
-
972
- class ScheduledUvCommand(BaseHuggingfaceCLICommand):
973
- """Schedule UV scripts on Hugging Face infrastructure."""
974
-
975
- @staticmethod
976
- def register_subcommand(parser):
977
- """Register UV run subcommand."""
978
- uv_parser = parser.add_parser(
979
- "uv",
980
- help="Schedule UV scripts (Python with inline dependencies) on HF infrastructure",
981
- )
982
-
983
- subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
984
-
985
- # Run command only
986
- run_parser = subparsers.add_parser(
987
- "run",
988
- help="Run a UV script (local file or URL) on HF infrastructure",
989
- )
990
- run_parser.add_argument(
991
- "schedule",
992
- type=str,
993
- help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
994
- )
995
- run_parser.add_argument("script", help="UV script to run (local file or URL)")
996
- run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
997
- run_parser.add_argument(
998
- "--suspend",
999
- action="store_true",
1000
- help="Suspend (pause) the scheduled Job",
1001
- default=None,
1002
- )
1003
- run_parser.add_argument(
1004
- "--concurrency",
1005
- action="store_true",
1006
- help="Allow multiple instances of this Job to run concurrently",
1007
- default=None,
1008
- )
1009
- run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
1010
- run_parser.add_argument(
1011
- "--repo",
1012
- help="Repository name for the script (creates ephemeral if not specified)",
1013
- )
1014
- run_parser.add_argument(
1015
- "--flavor",
1016
- type=str,
1017
- help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
1018
- )
1019
- run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
1020
- run_parser.add_argument(
1021
- "-s",
1022
- "--secrets",
1023
- action="append",
1024
- help=(
1025
- "Set secret environment variables. E.g. --secrets SECRET=value "
1026
- "or `--secrets HF_TOKEN` to pass your Hugging Face token."
1027
- ),
1028
- )
1029
- run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
1030
- run_parser.add_argument(
1031
- "--secrets-file",
1032
- type=str,
1033
- help="Read in a file of secret environment variables.",
1034
- )
1035
- run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
1036
- run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
1037
- run_parser.add_argument(
1038
- "--namespace",
1039
- type=str,
1040
- help="The namespace where the Job will be created. Defaults to the current user's namespace.",
1041
- )
1042
- run_parser.add_argument("--token", type=str, help="HF token")
1043
- # UV options
1044
- run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
1045
- run_parser.add_argument(
1046
- "-p", "--python", type=str, help="The Python interpreter to use for the run environment"
1047
- )
1048
- run_parser.set_defaults(func=ScheduledUvCommand)
1049
-
1050
- def __init__(self, args: Namespace) -> None:
1051
- """Initialize the command with parsed arguments."""
1052
- self.schedule: str = args.schedule
1053
- self.script = args.script
1054
- self.script_args = args.script_args
1055
- self.suspend: Optional[bool] = args.suspend
1056
- self.concurrency: Optional[bool] = args.concurrency
1057
- self.dependencies = args.with_
1058
- self.python = args.python
1059
- self.image = args.image
1060
- self.env: dict[str, Optional[str]] = {}
1061
- if args.env_file:
1062
- self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
1063
- for env_value in args.env or []:
1064
- self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
1065
- self.secrets: dict[str, Optional[str]] = {}
1066
- extended_environ = _get_extended_environ()
1067
- if args.secrets_file:
1068
- self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
1069
- for secret in args.secrets or []:
1070
- self.secrets.update(load_dotenv(secret, environ=extended_environ))
1071
- self.flavor: Optional[SpaceHardware] = args.flavor
1072
- self.timeout: Optional[str] = args.timeout
1073
- self.detach: bool = args.detach
1074
- self.namespace: Optional[str] = args.namespace
1075
- self.token: Optional[str] = args.token
1076
- self._repo = args.repo
1077
-
1078
- def run(self) -> None:
1079
- """Schedule UV command."""
1080
- logging.set_verbosity(logging.INFO)
1081
- api = HfApi(token=self.token)
1082
- job = api.create_scheduled_uv_job(
1083
- script=self.script,
1084
- script_args=self.script_args,
1085
- schedule=self.schedule,
1086
- suspend=self.suspend,
1087
- concurrency=self.concurrency,
1088
- dependencies=self.dependencies,
1089
- python=self.python,
1090
- image=self.image,
1091
- env=self.env,
1092
- secrets=self.secrets,
1093
- flavor=self.flavor,
1094
- timeout=self.timeout,
1095
- namespace=self.namespace,
1096
- _repo=self._repo,
1097
- )
1098
-
1099
- # Always print the job ID to the user
1100
- print(f"Scheduled Job created with ID: {job.id}")
764
+ lines.append(row_format.format(*row_format_args))
765
+ return "\n".join(lines)
766
+
767
+
768
+ def _get_extended_environ() -> Dict[str, str]:
769
+ extended_environ = os.environ.copy()
770
+ if (token := get_token()) is not None:
771
+ extended_environ["HF_TOKEN"] = token
772
+ return extended_environ