huggingface-hub 0.34.4__py3-none-any.whl → 1.0.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of huggingface-hub might be problematic. Click here for more details.
- huggingface_hub/__init__.py +46 -45
- huggingface_hub/_commit_api.py +28 -28
- huggingface_hub/_commit_scheduler.py +11 -8
- huggingface_hub/_inference_endpoints.py +8 -8
- huggingface_hub/_jobs_api.py +167 -10
- huggingface_hub/_login.py +13 -39
- huggingface_hub/_oauth.py +8 -8
- huggingface_hub/_snapshot_download.py +14 -28
- huggingface_hub/_space_api.py +4 -4
- huggingface_hub/_tensorboard_logger.py +13 -14
- huggingface_hub/_upload_large_folder.py +15 -15
- huggingface_hub/_webhooks_payload.py +3 -3
- huggingface_hub/_webhooks_server.py +2 -2
- huggingface_hub/cli/_cli_utils.py +2 -2
- huggingface_hub/cli/auth.py +5 -6
- huggingface_hub/cli/cache.py +14 -20
- huggingface_hub/cli/download.py +4 -4
- huggingface_hub/cli/jobs.py +560 -11
- huggingface_hub/cli/lfs.py +4 -4
- huggingface_hub/cli/repo.py +7 -7
- huggingface_hub/cli/repo_files.py +2 -2
- huggingface_hub/cli/upload.py +4 -4
- huggingface_hub/cli/upload_large_folder.py +3 -3
- huggingface_hub/commands/_cli_utils.py +2 -2
- huggingface_hub/commands/delete_cache.py +13 -13
- huggingface_hub/commands/download.py +4 -13
- huggingface_hub/commands/lfs.py +4 -4
- huggingface_hub/commands/repo_files.py +2 -2
- huggingface_hub/commands/scan_cache.py +1 -1
- huggingface_hub/commands/tag.py +1 -3
- huggingface_hub/commands/upload.py +4 -4
- huggingface_hub/commands/upload_large_folder.py +3 -3
- huggingface_hub/commands/user.py +5 -6
- huggingface_hub/community.py +5 -5
- huggingface_hub/constants.py +3 -41
- huggingface_hub/dataclasses.py +16 -19
- huggingface_hub/errors.py +42 -29
- huggingface_hub/fastai_utils.py +8 -9
- huggingface_hub/file_download.py +153 -252
- huggingface_hub/hf_api.py +815 -600
- huggingface_hub/hf_file_system.py +98 -62
- huggingface_hub/hub_mixin.py +37 -57
- huggingface_hub/inference/_client.py +177 -325
- huggingface_hub/inference/_common.py +110 -124
- huggingface_hub/inference/_generated/_async_client.py +226 -432
- huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
- huggingface_hub/inference/_generated/types/base.py +10 -7
- huggingface_hub/inference/_generated/types/chat_completion.py +18 -16
- huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
- huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
- huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
- huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
- huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
- huggingface_hub/inference/_generated/types/summarization.py +2 -2
- huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
- huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
- huggingface_hub/inference/_generated/types/text_generation.py +10 -10
- huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
- huggingface_hub/inference/_generated/types/token_classification.py +2 -2
- huggingface_hub/inference/_generated/types/translation.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
- huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
- huggingface_hub/inference/_mcp/_cli_hacks.py +3 -3
- huggingface_hub/inference/_mcp/agent.py +3 -3
- huggingface_hub/inference/_mcp/cli.py +1 -1
- huggingface_hub/inference/_mcp/constants.py +2 -3
- huggingface_hub/inference/_mcp/mcp_client.py +58 -30
- huggingface_hub/inference/_mcp/types.py +10 -7
- huggingface_hub/inference/_mcp/utils.py +11 -7
- huggingface_hub/inference/_providers/__init__.py +2 -2
- huggingface_hub/inference/_providers/_common.py +49 -25
- huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
- huggingface_hub/inference/_providers/cohere.py +3 -3
- huggingface_hub/inference/_providers/fal_ai.py +25 -25
- huggingface_hub/inference/_providers/featherless_ai.py +4 -4
- huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
- huggingface_hub/inference/_providers/hf_inference.py +28 -20
- huggingface_hub/inference/_providers/hyperbolic.py +4 -4
- huggingface_hub/inference/_providers/nebius.py +10 -10
- huggingface_hub/inference/_providers/novita.py +5 -5
- huggingface_hub/inference/_providers/nscale.py +4 -4
- huggingface_hub/inference/_providers/replicate.py +15 -15
- huggingface_hub/inference/_providers/sambanova.py +6 -6
- huggingface_hub/inference/_providers/together.py +7 -7
- huggingface_hub/lfs.py +20 -31
- huggingface_hub/repocard.py +18 -18
- huggingface_hub/repocard_data.py +56 -56
- huggingface_hub/serialization/__init__.py +0 -1
- huggingface_hub/serialization/_base.py +9 -9
- huggingface_hub/serialization/_dduf.py +7 -7
- huggingface_hub/serialization/_torch.py +28 -28
- huggingface_hub/utils/__init__.py +10 -4
- huggingface_hub/utils/_auth.py +5 -5
- huggingface_hub/utils/_cache_manager.py +31 -31
- huggingface_hub/utils/_deprecation.py +1 -1
- huggingface_hub/utils/_dotenv.py +3 -3
- huggingface_hub/utils/_fixes.py +0 -10
- huggingface_hub/utils/_git_credential.py +4 -4
- huggingface_hub/utils/_headers.py +7 -29
- huggingface_hub/utils/_http.py +366 -208
- huggingface_hub/utils/_pagination.py +4 -4
- huggingface_hub/utils/_paths.py +5 -5
- huggingface_hub/utils/_runtime.py +15 -13
- huggingface_hub/utils/_safetensors.py +21 -21
- huggingface_hub/utils/_subprocess.py +9 -9
- huggingface_hub/utils/_telemetry.py +3 -3
- huggingface_hub/utils/_typing.py +25 -5
- huggingface_hub/utils/_validators.py +53 -72
- huggingface_hub/utils/_xet.py +16 -16
- huggingface_hub/utils/_xet_progress_reporting.py +32 -11
- huggingface_hub/utils/insecure_hashlib.py +3 -9
- huggingface_hub/utils/tqdm.py +3 -3
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/METADATA +18 -29
- huggingface_hub-1.0.0rc0.dist-info/RECORD +161 -0
- huggingface_hub/inference_api.py +0 -217
- huggingface_hub/keras_mixin.py +0 -500
- huggingface_hub/repository.py +0 -1477
- huggingface_hub/serialization/_tensorflow.py +0 -95
- huggingface_hub/utils/_hf_folder.py +0 -68
- huggingface_hub-0.34.4.dist-info/RECORD +0 -166
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/LICENSE +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/WHEEL +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/entry_points.txt +0 -0
- {huggingface_hub-0.34.4.dist-info → huggingface_hub-1.0.0rc0.dist-info}/top_level.txt +0 -0
huggingface_hub/cli/jobs.py
CHANGED
|
@@ -36,11 +36,10 @@ import re
|
|
|
36
36
|
from argparse import Namespace, _SubParsersAction
|
|
37
37
|
from dataclasses import asdict
|
|
38
38
|
from pathlib import Path
|
|
39
|
-
from typing import
|
|
40
|
-
|
|
41
|
-
import requests
|
|
39
|
+
from typing import Optional, Union
|
|
42
40
|
|
|
43
41
|
from huggingface_hub import HfApi, SpaceHardware, get_token
|
|
42
|
+
from huggingface_hub.errors import HfHubHTTPError
|
|
44
43
|
from huggingface_hub.utils import logging
|
|
45
44
|
from huggingface_hub.utils._dotenv import load_dotenv
|
|
46
45
|
|
|
@@ -68,6 +67,7 @@ class JobsCommands(BaseHuggingfaceCLICommand):
|
|
|
68
67
|
RunCommand.register_subcommand(jobs_subparsers)
|
|
69
68
|
CancelCommand.register_subcommand(jobs_subparsers)
|
|
70
69
|
UvCommand.register_subcommand(jobs_subparsers)
|
|
70
|
+
ScheduledJobsCommands.register_subcommand(jobs_subparsers)
|
|
71
71
|
|
|
72
72
|
|
|
73
73
|
class RunCommand(BaseHuggingfaceCLICommand):
|
|
@@ -118,7 +118,7 @@ class RunCommand(BaseHuggingfaceCLICommand):
|
|
|
118
118
|
|
|
119
119
|
def __init__(self, args: Namespace) -> None:
|
|
120
120
|
self.image: str = args.image
|
|
121
|
-
self.command:
|
|
121
|
+
self.command: list[str] = args.command
|
|
122
122
|
self.env: dict[str, Optional[str]] = {}
|
|
123
123
|
if args.env_file:
|
|
124
124
|
self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
|
|
@@ -185,7 +185,7 @@ class LogsCommand(BaseHuggingfaceCLICommand):
|
|
|
185
185
|
print(log)
|
|
186
186
|
|
|
187
187
|
|
|
188
|
-
def _tabulate(rows:
|
|
188
|
+
def _tabulate(rows: list[list[Union[str, int]]], headers: list[str]) -> str:
|
|
189
189
|
"""
|
|
190
190
|
Inspired by:
|
|
191
191
|
|
|
@@ -253,7 +253,7 @@ class PsCommand(BaseHuggingfaceCLICommand):
|
|
|
253
253
|
self.namespace: Optional[str] = args.namespace
|
|
254
254
|
self.token: Optional[str] = args.token
|
|
255
255
|
self.format: Optional[str] = args.format
|
|
256
|
-
self.filters:
|
|
256
|
+
self.filters: dict[str, str] = {}
|
|
257
257
|
|
|
258
258
|
# Parse filter arguments (key=value pairs)
|
|
259
259
|
for f in args.filter:
|
|
@@ -299,7 +299,7 @@ class PsCommand(BaseHuggingfaceCLICommand):
|
|
|
299
299
|
command_str = " ".join(command) if command else "N/A"
|
|
300
300
|
|
|
301
301
|
# Extract creation time
|
|
302
|
-
created_at = job.created_at
|
|
302
|
+
created_at = job.created_at.strftime("%Y-%m-%d %H:%M:%S") if job.created_at else "N/A"
|
|
303
303
|
|
|
304
304
|
# Create a dict with all job properties for filtering
|
|
305
305
|
job_properties = {
|
|
@@ -328,14 +328,14 @@ class PsCommand(BaseHuggingfaceCLICommand):
|
|
|
328
328
|
# Apply custom format if provided or use default tabular format
|
|
329
329
|
self._print_output(rows, table_headers)
|
|
330
330
|
|
|
331
|
-
except
|
|
331
|
+
except HfHubHTTPError as e:
|
|
332
332
|
print(f"Error fetching jobs data: {e}")
|
|
333
333
|
except (KeyError, ValueError, TypeError) as e:
|
|
334
334
|
print(f"Error processing jobs data: {e}")
|
|
335
335
|
except Exception as e:
|
|
336
336
|
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
337
337
|
|
|
338
|
-
def _matches_filters(self, job_properties:
|
|
338
|
+
def _matches_filters(self, job_properties: dict[str, str]) -> bool:
|
|
339
339
|
"""Check if job matches all specified filters."""
|
|
340
340
|
for key, pattern in self.filters.items():
|
|
341
341
|
# Check if property exists
|
|
@@ -394,7 +394,7 @@ class InspectCommand(BaseHuggingfaceCLICommand):
|
|
|
394
394
|
def __init__(self, args: Namespace) -> None:
|
|
395
395
|
self.namespace: Optional[str] = args.namespace
|
|
396
396
|
self.token: Optional[str] = args.token
|
|
397
|
-
self.job_ids:
|
|
397
|
+
self.job_ids: list[str] = args.job_ids
|
|
398
398
|
|
|
399
399
|
def run(self) -> None:
|
|
400
400
|
api = HfApi(token=self.token)
|
|
@@ -543,8 +543,557 @@ class UvCommand(BaseHuggingfaceCLICommand):
|
|
|
543
543
|
print(log)
|
|
544
544
|
|
|
545
545
|
|
|
546
|
-
def _get_extended_environ() ->
|
|
546
|
+
def _get_extended_environ() -> dict[str, str]:
|
|
547
547
|
extended_environ = os.environ.copy()
|
|
548
548
|
if (token := get_token()) is not None:
|
|
549
549
|
extended_environ["HF_TOKEN"] = token
|
|
550
550
|
return extended_environ
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
class ScheduledJobsCommands(BaseHuggingfaceCLICommand):
|
|
554
|
+
@staticmethod
|
|
555
|
+
def register_subcommand(parser: _SubParsersAction):
|
|
556
|
+
scheduled_jobs_parser = parser.add_parser("scheduled", help="Create and manage scheduled Jobs on the Hub.")
|
|
557
|
+
scheduled_jobs_subparsers = scheduled_jobs_parser.add_subparsers(
|
|
558
|
+
help="huggingface.co scheduled jobs related commands"
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
# Show help if no subcommand is provided
|
|
562
|
+
scheduled_jobs_parser.set_defaults(func=lambda args: scheduled_jobs_subparsers.print_help())
|
|
563
|
+
|
|
564
|
+
# Register commands
|
|
565
|
+
ScheduledRunCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
566
|
+
ScheduledPsCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
567
|
+
ScheduledInspectCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
568
|
+
ScheduledDeleteCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
569
|
+
ScheduledSuspendCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
570
|
+
ScheduledResumeCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
571
|
+
ScheduledUvCommand.register_subcommand(scheduled_jobs_subparsers)
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
class ScheduledRunCommand(BaseHuggingfaceCLICommand):
|
|
575
|
+
@staticmethod
|
|
576
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
577
|
+
run_parser = parser.add_parser("run", help="Schedule a Job")
|
|
578
|
+
run_parser.add_argument(
|
|
579
|
+
"schedule",
|
|
580
|
+
type=str,
|
|
581
|
+
help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
|
|
582
|
+
)
|
|
583
|
+
run_parser.add_argument("image", type=str, help="The Docker image to use.")
|
|
584
|
+
run_parser.add_argument(
|
|
585
|
+
"--suspend",
|
|
586
|
+
action="store_true",
|
|
587
|
+
help="Suspend (pause) the scheduled Job",
|
|
588
|
+
default=None,
|
|
589
|
+
)
|
|
590
|
+
run_parser.add_argument(
|
|
591
|
+
"--concurrency",
|
|
592
|
+
action="store_true",
|
|
593
|
+
help="Allow multiple instances of this Job to run concurrently",
|
|
594
|
+
default=None,
|
|
595
|
+
)
|
|
596
|
+
run_parser.add_argument("-e", "--env", action="append", help="Set environment variables. E.g. --env ENV=value")
|
|
597
|
+
run_parser.add_argument(
|
|
598
|
+
"-s",
|
|
599
|
+
"--secrets",
|
|
600
|
+
action="append",
|
|
601
|
+
help=(
|
|
602
|
+
"Set secret environment variables. E.g. --secrets SECRET=value "
|
|
603
|
+
"or `--secrets HF_TOKEN` to pass your Hugging Face token."
|
|
604
|
+
),
|
|
605
|
+
)
|
|
606
|
+
run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
|
|
607
|
+
run_parser.add_argument("--secrets-file", type=str, help="Read in a file of secret environment variables.")
|
|
608
|
+
run_parser.add_argument(
|
|
609
|
+
"--flavor",
|
|
610
|
+
type=str,
|
|
611
|
+
help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
|
|
612
|
+
)
|
|
613
|
+
run_parser.add_argument(
|
|
614
|
+
"--timeout",
|
|
615
|
+
type=str,
|
|
616
|
+
help="Max duration: int/float with s (seconds, default), m (minutes), h (hours) or d (days).",
|
|
617
|
+
)
|
|
618
|
+
run_parser.add_argument(
|
|
619
|
+
"--namespace",
|
|
620
|
+
type=str,
|
|
621
|
+
help="The namespace where the scheduled Job will be created. Defaults to the current user's namespace.",
|
|
622
|
+
)
|
|
623
|
+
run_parser.add_argument(
|
|
624
|
+
"--token",
|
|
625
|
+
type=str,
|
|
626
|
+
help="A User Access Token generated from https://huggingface.co/settings/tokens",
|
|
627
|
+
)
|
|
628
|
+
run_parser.add_argument("command", nargs="...", help="The command to run.")
|
|
629
|
+
run_parser.set_defaults(func=ScheduledRunCommand)
|
|
630
|
+
|
|
631
|
+
def __init__(self, args: Namespace) -> None:
|
|
632
|
+
self.schedule: str = args.schedule
|
|
633
|
+
self.image: str = args.image
|
|
634
|
+
self.command: list[str] = args.command
|
|
635
|
+
self.suspend: Optional[bool] = args.suspend
|
|
636
|
+
self.concurrency: Optional[bool] = args.concurrency
|
|
637
|
+
self.env: dict[str, Optional[str]] = {}
|
|
638
|
+
if args.env_file:
|
|
639
|
+
self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
|
|
640
|
+
for env_value in args.env or []:
|
|
641
|
+
self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
642
|
+
self.secrets: dict[str, Optional[str]] = {}
|
|
643
|
+
extended_environ = _get_extended_environ()
|
|
644
|
+
if args.secrets_file:
|
|
645
|
+
self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
|
|
646
|
+
for secret in args.secrets or []:
|
|
647
|
+
self.secrets.update(load_dotenv(secret, environ=extended_environ))
|
|
648
|
+
self.flavor: Optional[SpaceHardware] = args.flavor
|
|
649
|
+
self.timeout: Optional[str] = args.timeout
|
|
650
|
+
self.namespace: Optional[str] = args.namespace
|
|
651
|
+
self.token: Optional[str] = args.token
|
|
652
|
+
|
|
653
|
+
def run(self) -> None:
|
|
654
|
+
api = HfApi(token=self.token)
|
|
655
|
+
scheduled_job = api.create_scheduled_job(
|
|
656
|
+
image=self.image,
|
|
657
|
+
command=self.command,
|
|
658
|
+
schedule=self.schedule,
|
|
659
|
+
suspend=self.suspend,
|
|
660
|
+
concurrency=self.concurrency,
|
|
661
|
+
env=self.env,
|
|
662
|
+
secrets=self.secrets,
|
|
663
|
+
flavor=self.flavor,
|
|
664
|
+
timeout=self.timeout,
|
|
665
|
+
namespace=self.namespace,
|
|
666
|
+
)
|
|
667
|
+
# Always print the scheduled job ID to the user
|
|
668
|
+
print(f"Scheduled Job created with ID: {scheduled_job.id}")
|
|
669
|
+
|
|
670
|
+
|
|
671
|
+
class ScheduledPsCommand(BaseHuggingfaceCLICommand):
|
|
672
|
+
@staticmethod
|
|
673
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
674
|
+
run_parser = parser.add_parser("ps", help="List scheduled Jobs")
|
|
675
|
+
run_parser.add_argument(
|
|
676
|
+
"-a",
|
|
677
|
+
"--all",
|
|
678
|
+
action="store_true",
|
|
679
|
+
help="Show all scheduled Jobs (default hides suspended)",
|
|
680
|
+
)
|
|
681
|
+
run_parser.add_argument(
|
|
682
|
+
"--namespace",
|
|
683
|
+
type=str,
|
|
684
|
+
help="The namespace from where it lists the jobs. Defaults to the current user's namespace.",
|
|
685
|
+
)
|
|
686
|
+
run_parser.add_argument(
|
|
687
|
+
"--token",
|
|
688
|
+
type=str,
|
|
689
|
+
help="A User Access Token generated from https://huggingface.co/settings/tokens",
|
|
690
|
+
)
|
|
691
|
+
# Add Docker-style filtering argument
|
|
692
|
+
run_parser.add_argument(
|
|
693
|
+
"-f",
|
|
694
|
+
"--filter",
|
|
695
|
+
action="append",
|
|
696
|
+
default=[],
|
|
697
|
+
help="Filter output based on conditions provided (format: key=value)",
|
|
698
|
+
)
|
|
699
|
+
# Add option to format output
|
|
700
|
+
run_parser.add_argument(
|
|
701
|
+
"--format",
|
|
702
|
+
type=str,
|
|
703
|
+
help="Format output using a custom template",
|
|
704
|
+
)
|
|
705
|
+
run_parser.set_defaults(func=ScheduledPsCommand)
|
|
706
|
+
|
|
707
|
+
def __init__(self, args: Namespace) -> None:
|
|
708
|
+
self.all: bool = args.all
|
|
709
|
+
self.namespace: Optional[str] = args.namespace
|
|
710
|
+
self.token: Optional[str] = args.token
|
|
711
|
+
self.format: Optional[str] = args.format
|
|
712
|
+
self.filters: dict[str, str] = {}
|
|
713
|
+
|
|
714
|
+
# Parse filter arguments (key=value pairs)
|
|
715
|
+
for f in args.filter:
|
|
716
|
+
if "=" in f:
|
|
717
|
+
key, value = f.split("=", 1)
|
|
718
|
+
self.filters[key.lower()] = value
|
|
719
|
+
else:
|
|
720
|
+
print(f"Warning: Ignoring invalid filter format '{f}'. Use key=value format.")
|
|
721
|
+
|
|
722
|
+
def run(self) -> None:
|
|
723
|
+
"""
|
|
724
|
+
Fetch and display scheduked job information for the current user.
|
|
725
|
+
Uses Docker-style filtering with -f/--filter flag and key=value pairs.
|
|
726
|
+
"""
|
|
727
|
+
try:
|
|
728
|
+
api = HfApi(token=self.token)
|
|
729
|
+
|
|
730
|
+
# Fetch jobs data
|
|
731
|
+
scheduled_jobs = api.list_scheduled_jobs(namespace=self.namespace)
|
|
732
|
+
|
|
733
|
+
# Define table headers
|
|
734
|
+
table_headers = [
|
|
735
|
+
"ID",
|
|
736
|
+
"SCHEDULE",
|
|
737
|
+
"IMAGE/SPACE",
|
|
738
|
+
"COMMAND",
|
|
739
|
+
"LAST RUN",
|
|
740
|
+
"NEXT RUN",
|
|
741
|
+
"SUSPEND",
|
|
742
|
+
]
|
|
743
|
+
|
|
744
|
+
# Process jobs data
|
|
745
|
+
rows = []
|
|
746
|
+
|
|
747
|
+
for scheduled_job in scheduled_jobs:
|
|
748
|
+
# Extract job data for filtering
|
|
749
|
+
suspend = scheduled_job.suspend
|
|
750
|
+
|
|
751
|
+
# Skip job if not all jobs should be shown and status doesn't match criteria
|
|
752
|
+
if not self.all and suspend:
|
|
753
|
+
continue
|
|
754
|
+
|
|
755
|
+
# Extract job ID
|
|
756
|
+
scheduled_job_id = scheduled_job.id
|
|
757
|
+
|
|
758
|
+
# Extract schedule
|
|
759
|
+
schedule = scheduled_job.schedule
|
|
760
|
+
|
|
761
|
+
# Extract image or space information
|
|
762
|
+
image_or_space = scheduled_job.job_spec.docker_image or "N/A"
|
|
763
|
+
|
|
764
|
+
# Extract and format command
|
|
765
|
+
command = scheduled_job.job_spec.command or []
|
|
766
|
+
command_str = " ".join(command) if command else "N/A"
|
|
767
|
+
|
|
768
|
+
# Extract status
|
|
769
|
+
last_job_at = (
|
|
770
|
+
scheduled_job.status.last_job.at.strftime("%Y-%m-%d %H:%M:%S")
|
|
771
|
+
if scheduled_job.status.last_job
|
|
772
|
+
else "N/A"
|
|
773
|
+
)
|
|
774
|
+
next_job_run_at = (
|
|
775
|
+
scheduled_job.status.next_job_run_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
776
|
+
if scheduled_job.status.next_job_run_at
|
|
777
|
+
else "N/A"
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
# Create a dict with all job properties for filtering
|
|
781
|
+
job_properties = {
|
|
782
|
+
"id": scheduled_job_id,
|
|
783
|
+
"image": image_or_space,
|
|
784
|
+
"suspend": str(suspend),
|
|
785
|
+
"command": command_str,
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
# Check if job matches all filters
|
|
789
|
+
if not self._matches_filters(job_properties):
|
|
790
|
+
continue
|
|
791
|
+
|
|
792
|
+
# Create row
|
|
793
|
+
rows.append(
|
|
794
|
+
[
|
|
795
|
+
scheduled_job_id,
|
|
796
|
+
schedule,
|
|
797
|
+
image_or_space,
|
|
798
|
+
command_str,
|
|
799
|
+
last_job_at,
|
|
800
|
+
next_job_run_at,
|
|
801
|
+
suspend,
|
|
802
|
+
]
|
|
803
|
+
)
|
|
804
|
+
|
|
805
|
+
# Handle empty results
|
|
806
|
+
if not rows:
|
|
807
|
+
filters_msg = ""
|
|
808
|
+
if self.filters:
|
|
809
|
+
filters_msg = f" matching filters: {', '.join([f'{k}={v}' for k, v in self.filters.items()])}"
|
|
810
|
+
|
|
811
|
+
print(f"No scheduled jobs found{filters_msg}")
|
|
812
|
+
return
|
|
813
|
+
|
|
814
|
+
# Apply custom format if provided or use default tabular format
|
|
815
|
+
self._print_output(rows, table_headers)
|
|
816
|
+
|
|
817
|
+
except HfHubHTTPError as e:
|
|
818
|
+
print(f"Error fetching scheduled jobs data: {e}")
|
|
819
|
+
except (KeyError, ValueError, TypeError) as e:
|
|
820
|
+
print(f"Error processing scheduled jobs data: {e}")
|
|
821
|
+
except Exception as e:
|
|
822
|
+
print(f"Unexpected error - {type(e).__name__}: {e}")
|
|
823
|
+
|
|
824
|
+
def _matches_filters(self, job_properties: dict[str, str]) -> bool:
|
|
825
|
+
"""Check if scheduled job matches all specified filters."""
|
|
826
|
+
for key, pattern in self.filters.items():
|
|
827
|
+
# Check if property exists
|
|
828
|
+
if key not in job_properties:
|
|
829
|
+
return False
|
|
830
|
+
|
|
831
|
+
# Support pattern matching with wildcards
|
|
832
|
+
if "*" in pattern or "?" in pattern:
|
|
833
|
+
# Convert glob pattern to regex
|
|
834
|
+
regex_pattern = pattern.replace("*", ".*").replace("?", ".")
|
|
835
|
+
if not re.search(f"^{regex_pattern}$", job_properties[key], re.IGNORECASE):
|
|
836
|
+
return False
|
|
837
|
+
# Simple substring matching
|
|
838
|
+
elif pattern.lower() not in job_properties[key].lower():
|
|
839
|
+
return False
|
|
840
|
+
|
|
841
|
+
return True
|
|
842
|
+
|
|
843
|
+
def _print_output(self, rows, headers):
|
|
844
|
+
"""Print output according to the chosen format."""
|
|
845
|
+
if self.format:
|
|
846
|
+
# Custom template formatting (simplified)
|
|
847
|
+
template = self.format
|
|
848
|
+
for row in rows:
|
|
849
|
+
line = template
|
|
850
|
+
for i, field in enumerate(
|
|
851
|
+
["id", "schedule", "image", "command", "last_job_at", "next_job_run_at", "suspend"]
|
|
852
|
+
):
|
|
853
|
+
placeholder = f"{{{{.{field}}}}}"
|
|
854
|
+
if placeholder in line:
|
|
855
|
+
line = line.replace(placeholder, str(row[i]))
|
|
856
|
+
print(line)
|
|
857
|
+
else:
|
|
858
|
+
# Default tabular format
|
|
859
|
+
print(
|
|
860
|
+
_tabulate(
|
|
861
|
+
rows,
|
|
862
|
+
headers=headers,
|
|
863
|
+
)
|
|
864
|
+
)
|
|
865
|
+
|
|
866
|
+
|
|
867
|
+
class ScheduledInspectCommand(BaseHuggingfaceCLICommand):
|
|
868
|
+
@staticmethod
|
|
869
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
870
|
+
run_parser = parser.add_parser("inspect", help="Display detailed information on one or more scheduled Jobs")
|
|
871
|
+
run_parser.add_argument(
|
|
872
|
+
"--namespace",
|
|
873
|
+
type=str,
|
|
874
|
+
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
875
|
+
)
|
|
876
|
+
run_parser.add_argument(
|
|
877
|
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
878
|
+
)
|
|
879
|
+
run_parser.add_argument("scheduled_job_ids", nargs="...", help="The scheduled jobs to inspect")
|
|
880
|
+
run_parser.set_defaults(func=ScheduledInspectCommand)
|
|
881
|
+
|
|
882
|
+
def __init__(self, args: Namespace) -> None:
|
|
883
|
+
self.namespace: Optional[str] = args.namespace
|
|
884
|
+
self.token: Optional[str] = args.token
|
|
885
|
+
self.scheduled_job_ids: list[str] = args.scheduled_job_ids
|
|
886
|
+
|
|
887
|
+
def run(self) -> None:
|
|
888
|
+
api = HfApi(token=self.token)
|
|
889
|
+
scheduled_jobs = [
|
|
890
|
+
api.inspect_scheduled_job(scheduled_job_id=scheduled_job_id, namespace=self.namespace)
|
|
891
|
+
for scheduled_job_id in self.scheduled_job_ids
|
|
892
|
+
]
|
|
893
|
+
print(json.dumps([asdict(scheduled_job) for scheduled_job in scheduled_jobs], indent=4, default=str))
|
|
894
|
+
|
|
895
|
+
|
|
896
|
+
class ScheduledDeleteCommand(BaseHuggingfaceCLICommand):
|
|
897
|
+
@staticmethod
|
|
898
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
899
|
+
run_parser = parser.add_parser("delete", help="Delete a scheduled Job")
|
|
900
|
+
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
901
|
+
run_parser.add_argument(
|
|
902
|
+
"--namespace",
|
|
903
|
+
type=str,
|
|
904
|
+
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
905
|
+
)
|
|
906
|
+
run_parser.add_argument(
|
|
907
|
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
908
|
+
)
|
|
909
|
+
run_parser.set_defaults(func=ScheduledDeleteCommand)
|
|
910
|
+
|
|
911
|
+
def __init__(self, args: Namespace) -> None:
|
|
912
|
+
self.scheduled_job_id: str = args.scheduled_job_id
|
|
913
|
+
self.namespace = args.namespace
|
|
914
|
+
self.token: Optional[str] = args.token
|
|
915
|
+
|
|
916
|
+
def run(self) -> None:
|
|
917
|
+
api = HfApi(token=self.token)
|
|
918
|
+
api.delete_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
class ScheduledSuspendCommand(BaseHuggingfaceCLICommand):
|
|
922
|
+
@staticmethod
|
|
923
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
924
|
+
run_parser = parser.add_parser("suspend", help="Suspend (pause) a scheduled Job")
|
|
925
|
+
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
926
|
+
run_parser.add_argument(
|
|
927
|
+
"--namespace",
|
|
928
|
+
type=str,
|
|
929
|
+
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
930
|
+
)
|
|
931
|
+
run_parser.add_argument(
|
|
932
|
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
933
|
+
)
|
|
934
|
+
run_parser.set_defaults(func=ScheduledSuspendCommand)
|
|
935
|
+
|
|
936
|
+
def __init__(self, args: Namespace) -> None:
|
|
937
|
+
self.scheduled_job_id: str = args.scheduled_job_id
|
|
938
|
+
self.namespace = args.namespace
|
|
939
|
+
self.token: Optional[str] = args.token
|
|
940
|
+
|
|
941
|
+
def run(self) -> None:
|
|
942
|
+
api = HfApi(token=self.token)
|
|
943
|
+
api.suspend_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
944
|
+
|
|
945
|
+
|
|
946
|
+
class ScheduledResumeCommand(BaseHuggingfaceCLICommand):
|
|
947
|
+
@staticmethod
|
|
948
|
+
def register_subcommand(parser: _SubParsersAction) -> None:
|
|
949
|
+
run_parser = parser.add_parser("resume", help="Resume (unpause) a scheduled Job")
|
|
950
|
+
run_parser.add_argument("scheduled_job_id", type=str, help="Scheduled Job ID")
|
|
951
|
+
run_parser.add_argument(
|
|
952
|
+
"--namespace",
|
|
953
|
+
type=str,
|
|
954
|
+
help="The namespace where the scheduled job is. Defaults to the current user's namespace.",
|
|
955
|
+
)
|
|
956
|
+
run_parser.add_argument(
|
|
957
|
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
|
958
|
+
)
|
|
959
|
+
run_parser.set_defaults(func=ScheduledResumeCommand)
|
|
960
|
+
|
|
961
|
+
def __init__(self, args: Namespace) -> None:
|
|
962
|
+
self.scheduled_job_id: str = args.scheduled_job_id
|
|
963
|
+
self.namespace = args.namespace
|
|
964
|
+
self.token: Optional[str] = args.token
|
|
965
|
+
|
|
966
|
+
def run(self) -> None:
|
|
967
|
+
api = HfApi(token=self.token)
|
|
968
|
+
api.resume_scheduled_job(scheduled_job_id=self.scheduled_job_id, namespace=self.namespace)
|
|
969
|
+
|
|
970
|
+
|
|
971
|
+
class ScheduledUvCommand(BaseHuggingfaceCLICommand):
|
|
972
|
+
"""Schedule UV scripts on Hugging Face infrastructure."""
|
|
973
|
+
|
|
974
|
+
@staticmethod
|
|
975
|
+
def register_subcommand(parser):
|
|
976
|
+
"""Register UV run subcommand."""
|
|
977
|
+
uv_parser = parser.add_parser(
|
|
978
|
+
"uv",
|
|
979
|
+
help="Schedule UV scripts (Python with inline dependencies) on HF infrastructure",
|
|
980
|
+
)
|
|
981
|
+
|
|
982
|
+
subparsers = uv_parser.add_subparsers(dest="uv_command", help="UV commands", required=True)
|
|
983
|
+
|
|
984
|
+
# Run command only
|
|
985
|
+
run_parser = subparsers.add_parser(
|
|
986
|
+
"run",
|
|
987
|
+
help="Run a UV script (local file or URL) on HF infrastructure",
|
|
988
|
+
)
|
|
989
|
+
run_parser.add_argument(
|
|
990
|
+
"schedule",
|
|
991
|
+
type=str,
|
|
992
|
+
help="One of annually, yearly, monthly, weekly, daily, hourly, or a CRON schedule expression.",
|
|
993
|
+
)
|
|
994
|
+
run_parser.add_argument("script", help="UV script to run (local file or URL)")
|
|
995
|
+
run_parser.add_argument("script_args", nargs="...", help="Arguments for the script", default=[])
|
|
996
|
+
run_parser.add_argument(
|
|
997
|
+
"--suspend",
|
|
998
|
+
action="store_true",
|
|
999
|
+
help="Suspend (pause) the scheduled Job",
|
|
1000
|
+
default=None,
|
|
1001
|
+
)
|
|
1002
|
+
run_parser.add_argument(
|
|
1003
|
+
"--concurrency",
|
|
1004
|
+
action="store_true",
|
|
1005
|
+
help="Allow multiple instances of this Job to run concurrently",
|
|
1006
|
+
default=None,
|
|
1007
|
+
)
|
|
1008
|
+
run_parser.add_argument("--image", type=str, help="Use a custom Docker image with `uv` installed.")
|
|
1009
|
+
run_parser.add_argument(
|
|
1010
|
+
"--repo",
|
|
1011
|
+
help="Repository name for the script (creates ephemeral if not specified)",
|
|
1012
|
+
)
|
|
1013
|
+
run_parser.add_argument(
|
|
1014
|
+
"--flavor",
|
|
1015
|
+
type=str,
|
|
1016
|
+
help=f"Flavor for the hardware, as in HF Spaces. Defaults to `cpu-basic`. Possible values: {', '.join(SUGGESTED_FLAVORS)}.",
|
|
1017
|
+
)
|
|
1018
|
+
run_parser.add_argument("-e", "--env", action="append", help="Environment variables")
|
|
1019
|
+
run_parser.add_argument(
|
|
1020
|
+
"-s",
|
|
1021
|
+
"--secrets",
|
|
1022
|
+
action="append",
|
|
1023
|
+
help=(
|
|
1024
|
+
"Set secret environment variables. E.g. --secrets SECRET=value "
|
|
1025
|
+
"or `--secrets HF_TOKEN` to pass your Hugging Face token."
|
|
1026
|
+
),
|
|
1027
|
+
)
|
|
1028
|
+
run_parser.add_argument("--env-file", type=str, help="Read in a file of environment variables.")
|
|
1029
|
+
run_parser.add_argument(
|
|
1030
|
+
"--secrets-file",
|
|
1031
|
+
type=str,
|
|
1032
|
+
help="Read in a file of secret environment variables.",
|
|
1033
|
+
)
|
|
1034
|
+
run_parser.add_argument("--timeout", type=str, help="Max duration (e.g., 30s, 5m, 1h)")
|
|
1035
|
+
run_parser.add_argument("-d", "--detach", action="store_true", help="Run in background")
|
|
1036
|
+
run_parser.add_argument(
|
|
1037
|
+
"--namespace",
|
|
1038
|
+
type=str,
|
|
1039
|
+
help="The namespace where the Job will be created. Defaults to the current user's namespace.",
|
|
1040
|
+
)
|
|
1041
|
+
run_parser.add_argument("--token", type=str, help="HF token")
|
|
1042
|
+
# UV options
|
|
1043
|
+
run_parser.add_argument("--with", action="append", help="Run with the given packages installed", dest="with_")
|
|
1044
|
+
run_parser.add_argument(
|
|
1045
|
+
"-p", "--python", type=str, help="The Python interpreter to use for the run environment"
|
|
1046
|
+
)
|
|
1047
|
+
run_parser.set_defaults(func=ScheduledUvCommand)
|
|
1048
|
+
|
|
1049
|
+
def __init__(self, args: Namespace) -> None:
|
|
1050
|
+
"""Initialize the command with parsed arguments."""
|
|
1051
|
+
self.schedule: str = args.schedule
|
|
1052
|
+
self.script = args.script
|
|
1053
|
+
self.script_args = args.script_args
|
|
1054
|
+
self.suspend: Optional[bool] = args.suspend
|
|
1055
|
+
self.concurrency: Optional[bool] = args.concurrency
|
|
1056
|
+
self.dependencies = args.with_
|
|
1057
|
+
self.python = args.python
|
|
1058
|
+
self.image = args.image
|
|
1059
|
+
self.env: dict[str, Optional[str]] = {}
|
|
1060
|
+
if args.env_file:
|
|
1061
|
+
self.env.update(load_dotenv(Path(args.env_file).read_text(), environ=os.environ.copy()))
|
|
1062
|
+
for env_value in args.env or []:
|
|
1063
|
+
self.env.update(load_dotenv(env_value, environ=os.environ.copy()))
|
|
1064
|
+
self.secrets: dict[str, Optional[str]] = {}
|
|
1065
|
+
extended_environ = _get_extended_environ()
|
|
1066
|
+
if args.secrets_file:
|
|
1067
|
+
self.secrets.update(load_dotenv(Path(args.secrets_file).read_text(), environ=extended_environ))
|
|
1068
|
+
for secret in args.secrets or []:
|
|
1069
|
+
self.secrets.update(load_dotenv(secret, environ=extended_environ))
|
|
1070
|
+
self.flavor: Optional[SpaceHardware] = args.flavor
|
|
1071
|
+
self.timeout: Optional[str] = args.timeout
|
|
1072
|
+
self.detach: bool = args.detach
|
|
1073
|
+
self.namespace: Optional[str] = args.namespace
|
|
1074
|
+
self.token: Optional[str] = args.token
|
|
1075
|
+
self._repo = args.repo
|
|
1076
|
+
|
|
1077
|
+
def run(self) -> None:
|
|
1078
|
+
"""Schedule UV command."""
|
|
1079
|
+
logging.set_verbosity(logging.INFO)
|
|
1080
|
+
api = HfApi(token=self.token)
|
|
1081
|
+
job = api.create_scheduled_uv_job(
|
|
1082
|
+
script=self.script,
|
|
1083
|
+
script_args=self.script_args,
|
|
1084
|
+
schedule=self.schedule,
|
|
1085
|
+
suspend=self.suspend,
|
|
1086
|
+
concurrency=self.concurrency,
|
|
1087
|
+
dependencies=self.dependencies,
|
|
1088
|
+
python=self.python,
|
|
1089
|
+
image=self.image,
|
|
1090
|
+
env=self.env,
|
|
1091
|
+
secrets=self.secrets,
|
|
1092
|
+
flavor=self.flavor,
|
|
1093
|
+
timeout=self.timeout,
|
|
1094
|
+
namespace=self.namespace,
|
|
1095
|
+
_repo=self._repo,
|
|
1096
|
+
)
|
|
1097
|
+
|
|
1098
|
+
# Always print the job ID to the user
|
|
1099
|
+
print(f"Scheduled Job created with ID: {job.id}")
|
huggingface_hub/cli/lfs.py
CHANGED
|
@@ -21,7 +21,7 @@ import os
|
|
|
21
21
|
import subprocess
|
|
22
22
|
import sys
|
|
23
23
|
from argparse import _SubParsersAction
|
|
24
|
-
from typing import
|
|
24
|
+
from typing import Optional
|
|
25
25
|
|
|
26
26
|
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
|
27
27
|
from huggingface_hub.lfs import LFS_MULTIPART_UPLOAD_COMMAND
|
|
@@ -87,14 +87,14 @@ class LfsEnableCommand:
|
|
|
87
87
|
print("Local repo set up for largefiles")
|
|
88
88
|
|
|
89
89
|
|
|
90
|
-
def write_msg(msg:
|
|
90
|
+
def write_msg(msg: dict):
|
|
91
91
|
"""Write out the message in Line delimited JSON."""
|
|
92
92
|
msg_str = json.dumps(msg) + "\n"
|
|
93
93
|
sys.stdout.write(msg_str)
|
|
94
94
|
sys.stdout.flush()
|
|
95
95
|
|
|
96
96
|
|
|
97
|
-
def read_msg() -> Optional[
|
|
97
|
+
def read_msg() -> Optional[dict]:
|
|
98
98
|
"""Read Line delimited JSON from stdin."""
|
|
99
99
|
msg = json.loads(sys.stdin.readline().strip())
|
|
100
100
|
|
|
@@ -144,7 +144,7 @@ class LfsUploadCommand:
|
|
|
144
144
|
completion_url = msg["action"]["href"]
|
|
145
145
|
header = msg["action"]["header"]
|
|
146
146
|
chunk_size = int(header.pop("chunk_size"))
|
|
147
|
-
presigned_urls:
|
|
147
|
+
presigned_urls: list[str] = list(header.values())
|
|
148
148
|
|
|
149
149
|
# Send a "started" progress event to allow other workers to start.
|
|
150
150
|
# Otherwise they're delayed until first "progress" event is reported,
|