huggingface-hub 0.33.5__py3-none-any.whl → 0.34.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (68) hide show
  1. huggingface_hub/__init__.py +487 -525
  2. huggingface_hub/_commit_api.py +21 -28
  3. huggingface_hub/_jobs_api.py +145 -0
  4. huggingface_hub/_local_folder.py +7 -1
  5. huggingface_hub/_login.py +5 -5
  6. huggingface_hub/_oauth.py +6 -10
  7. huggingface_hub/_snapshot_download.py +11 -6
  8. huggingface_hub/_upload_large_folder.py +46 -23
  9. huggingface_hub/cli/__init__.py +27 -0
  10. huggingface_hub/cli/_cli_utils.py +69 -0
  11. huggingface_hub/cli/auth.py +210 -0
  12. huggingface_hub/cli/cache.py +405 -0
  13. huggingface_hub/cli/download.py +181 -0
  14. huggingface_hub/cli/hf.py +66 -0
  15. huggingface_hub/cli/jobs.py +522 -0
  16. huggingface_hub/cli/lfs.py +198 -0
  17. huggingface_hub/cli/repo.py +243 -0
  18. huggingface_hub/cli/repo_files.py +128 -0
  19. huggingface_hub/cli/system.py +52 -0
  20. huggingface_hub/cli/upload.py +316 -0
  21. huggingface_hub/cli/upload_large_folder.py +132 -0
  22. huggingface_hub/commands/_cli_utils.py +5 -0
  23. huggingface_hub/commands/delete_cache.py +3 -1
  24. huggingface_hub/commands/download.py +4 -0
  25. huggingface_hub/commands/env.py +3 -0
  26. huggingface_hub/commands/huggingface_cli.py +2 -0
  27. huggingface_hub/commands/repo.py +4 -0
  28. huggingface_hub/commands/repo_files.py +4 -0
  29. huggingface_hub/commands/scan_cache.py +3 -1
  30. huggingface_hub/commands/tag.py +3 -1
  31. huggingface_hub/commands/upload.py +4 -0
  32. huggingface_hub/commands/upload_large_folder.py +3 -1
  33. huggingface_hub/commands/user.py +11 -1
  34. huggingface_hub/commands/version.py +3 -0
  35. huggingface_hub/constants.py +1 -0
  36. huggingface_hub/file_download.py +16 -5
  37. huggingface_hub/hf_api.py +519 -7
  38. huggingface_hub/hf_file_system.py +8 -16
  39. huggingface_hub/hub_mixin.py +3 -3
  40. huggingface_hub/inference/_client.py +38 -39
  41. huggingface_hub/inference/_common.py +38 -11
  42. huggingface_hub/inference/_generated/_async_client.py +50 -51
  43. huggingface_hub/inference/_generated/types/__init__.py +1 -0
  44. huggingface_hub/inference/_generated/types/image_to_video.py +60 -0
  45. huggingface_hub/inference/_mcp/cli.py +36 -18
  46. huggingface_hub/inference/_mcp/constants.py +8 -0
  47. huggingface_hub/inference/_mcp/types.py +3 -0
  48. huggingface_hub/inference/_providers/__init__.py +4 -1
  49. huggingface_hub/inference/_providers/_common.py +3 -6
  50. huggingface_hub/inference/_providers/fal_ai.py +85 -42
  51. huggingface_hub/inference/_providers/hf_inference.py +17 -9
  52. huggingface_hub/inference/_providers/replicate.py +19 -1
  53. huggingface_hub/keras_mixin.py +2 -2
  54. huggingface_hub/repocard.py +1 -1
  55. huggingface_hub/repository.py +2 -2
  56. huggingface_hub/utils/_auth.py +1 -1
  57. huggingface_hub/utils/_cache_manager.py +2 -2
  58. huggingface_hub/utils/_dotenv.py +51 -0
  59. huggingface_hub/utils/_headers.py +1 -1
  60. huggingface_hub/utils/_runtime.py +1 -1
  61. huggingface_hub/utils/_xet.py +6 -2
  62. huggingface_hub/utils/_xet_progress_reporting.py +141 -0
  63. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/METADATA +7 -8
  64. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/RECORD +68 -51
  65. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/entry_points.txt +1 -0
  66. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/LICENSE +0 -0
  67. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/WHEEL +0 -0
  68. {huggingface_hub-0.33.5.dist-info → huggingface_hub-0.34.0.dist-info}/top_level.txt +0 -0
huggingface_hub/hf_api.py CHANGED
@@ -19,6 +19,7 @@ import io
19
19
  import json
20
20
  import re
21
21
  import struct
22
+ import time
22
23
  import warnings
23
24
  from collections import defaultdict
24
25
  from concurrent.futures import Future, ThreadPoolExecutor
@@ -27,6 +28,7 @@ from datetime import datetime
27
28
  from functools import wraps
28
29
  from itertools import islice
29
30
  from pathlib import Path
31
+ from textwrap import dedent
30
32
  from typing import (
31
33
  TYPE_CHECKING,
32
34
  Any,
@@ -65,6 +67,7 @@ from ._commit_api import (
65
67
  _warn_on_overwriting_operations,
66
68
  )
67
69
  from ._inference_endpoints import InferenceEndpoint, InferenceEndpointType
70
+ from ._jobs_api import JobInfo
68
71
  from ._space_api import SpaceHardware, SpaceRuntime, SpaceStorage, SpaceVariable
69
72
  from ._upload_large_folder import upload_large_folder_internal
70
73
  from .community import (
@@ -739,7 +742,7 @@ class InferenceProviderMapping:
739
742
  @dataclass
740
743
  class ModelInfo:
741
744
  """
742
- Contains information about a model on the Hub.
745
+ Contains information about a model on the Hub. This object is returned by [`model_info`] and [`list_models`].
743
746
 
744
747
  <Tip>
745
748
 
@@ -938,7 +941,7 @@ class ModelInfo:
938
941
  @dataclass
939
942
  class DatasetInfo:
940
943
  """
941
- Contains information about a dataset on the Hub.
944
+ Contains information about a dataset on the Hub. This object is returned by [`dataset_info`] and [`list_datasets`].
942
945
 
943
946
  <Tip>
944
947
 
@@ -1057,7 +1060,7 @@ class DatasetInfo:
1057
1060
  @dataclass
1058
1061
  class SpaceInfo:
1059
1062
  """
1060
- Contains information about a Space on the Hub.
1063
+ Contains information about a Space on the Hub. This object is returned by [`space_info`] and [`list_spaces`].
1061
1064
 
1062
1065
  <Tip>
1063
1066
 
@@ -1781,10 +1784,10 @@ class HfApi:
1781
1784
  elif effective_token == _get_token_from_environment():
1782
1785
  error_message += (
1783
1786
  " The token from HF_TOKEN environment variable is invalid. "
1784
- "Note that HF_TOKEN takes precedence over `huggingface-cli login`."
1787
+ "Note that HF_TOKEN takes precedence over `hf auth login`."
1785
1788
  )
1786
1789
  elif effective_token == _get_token_from_file():
1787
- error_message += " The token stored is invalid. Please run `huggingface-cli login` to update it."
1790
+ error_message += " The token stored is invalid. Please run `hf auth login` to update it."
1788
1791
  raise HTTPError(error_message, request=e.request, response=e.response) from e
1789
1792
  return r.json()
1790
1793
 
@@ -1931,7 +1934,7 @@ class HfApi:
1931
1934
  expand (`List[ExpandModelProperty_T]`, *optional*):
1932
1935
  List properties to return in the response. When used, only the properties in the list will be returned.
1933
1936
  This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed.
1934
- Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`.
1937
+ Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"resourceGroup"` and `"xetEnabled"`.
1935
1938
  full (`bool`, *optional*):
1936
1939
  Whether to fetch all model data, including the `last_modified`,
1937
1940
  the `sha`, the files and the `tags`. This is set to `True` by
@@ -5392,6 +5395,7 @@ class HfApi:
5392
5395
  library_name=self.library_name,
5393
5396
  library_version=self.library_version,
5394
5397
  user_agent=self.user_agent,
5398
+ endpoint=self.endpoint,
5395
5399
  )
5396
5400
 
5397
5401
  @validate_hf_hub_args
@@ -9642,7 +9646,7 @@ class HfApi:
9642
9646
  log(
9643
9647
  "It seems you are trying to upload a large folder at once. This might take some time and then fail if "
9644
9648
  "the folder is too large. For such cases, it is recommended to upload in smaller batches or to use "
9645
- "`HfApi().upload_large_folder(...)`/`huggingface-cli upload-large-folder` instead. For more details, "
9649
+ "`HfApi().upload_large_folder(...)`/`hf upload-large-folder` instead. For more details, "
9646
9650
  "check out https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#upload-a-large-folder."
9647
9651
  )
9648
9652
 
@@ -9939,6 +9943,506 @@ class HfApi:
9939
9943
  r = get_session().get(path, headers=headers)
9940
9944
  hf_raise_for_status(r)
9941
9945
 
9946
+ def run_job(
9947
+ self,
9948
+ *,
9949
+ image: str,
9950
+ command: List[str],
9951
+ env: Optional[Dict[str, Any]] = None,
9952
+ secrets: Optional[Dict[str, Any]] = None,
9953
+ flavor: Optional[SpaceHardware] = None,
9954
+ timeout: Optional[Union[int, float, str]] = None,
9955
+ namespace: Optional[str] = None,
9956
+ token: Union[bool, str, None] = None,
9957
+ ) -> JobInfo:
9958
+ """
9959
+ Run compute Jobs on Hugging Face infrastructure.
9960
+
9961
+ Args:
9962
+ image (`str`):
9963
+ The Docker image to use.
9964
+ Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`.
9965
+ Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`.
9966
+
9967
+ command (`List[str]`):
9968
+ The command to run. Example: `["echo", "hello"]`.
9969
+
9970
+ env (`Dict[str, Any]`, *optional*):
9971
+ Defines the environment variables for the Job.
9972
+
9973
+ secrets (`Dict[str, Any]`, *optional*):
9974
+ Defines the secret environment variables for the Job.
9975
+
9976
+ flavor (`str`, *optional*):
9977
+ Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
9978
+ Defaults to `"cpu-basic"`.
9979
+
9980
+ timeout (`Union[int, float, str]`, *optional*):
9981
+ Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days).
9982
+ Example: `300` or `"5m"` for 5 minutes.
9983
+
9984
+ namespace (`str`, *optional*):
9985
+ The namespace where the Job will be created. Defaults to the current user's namespace.
9986
+
9987
+ token `(Union[bool, str, None]`, *optional*):
9988
+ A valid user access token. If not provided, the locally saved token will be used, which is the
9989
+ recommended authentication method. Set to `False` to disable authentication.
9990
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
9991
+
9992
+ Example:
9993
+ Run your first Job:
9994
+
9995
+ ```python
9996
+ >>> from huggingface_hub import run_job
9997
+ >>> run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
9998
+ ```
9999
+
10000
+ Run a GPU Job:
10001
+
10002
+ ```python
10003
+ >>> from huggingface_hub import run_job
10004
+ >>> image = "pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"
10005
+ >>> command = ["python", "-c", "import torch; print(f"This code ran with the following GPU: {torch.cuda.get_device_name()}")"]
10006
+ >>> run_job(image, command, flavor="a10g-small")
10007
+ ```
10008
+
10009
+ """
10010
+ if flavor is None:
10011
+ flavor = SpaceHardware.CPU_BASIC
10012
+
10013
+ # prepare payload to send to HF Jobs API
10014
+ input_json: Dict[str, Any] = {
10015
+ "command": command,
10016
+ "arguments": [],
10017
+ "environment": env or {},
10018
+ "flavor": flavor,
10019
+ }
10020
+ # secrets are optional
10021
+ if secrets:
10022
+ input_json["secrets"] = secrets
10023
+ # timeout is optional
10024
+ if timeout:
10025
+ time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24}
10026
+ if isinstance(timeout, str) and timeout[-1] in time_units_factors:
10027
+ input_json["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]])
10028
+ else:
10029
+ input_json["timeoutSeconds"] = int(timeout)
10030
+ # input is either from docker hub or from HF spaces
10031
+ for prefix in (
10032
+ "https://huggingface.co/spaces/",
10033
+ "https://hf.co/spaces/",
10034
+ "huggingface.co/spaces/",
10035
+ "hf.co/spaces/",
10036
+ ):
10037
+ if image.startswith(prefix):
10038
+ input_json["spaceId"] = image[len(prefix) :]
10039
+ break
10040
+ else:
10041
+ input_json["dockerImage"] = image
10042
+ if namespace is None:
10043
+ namespace = self.whoami(token=token)["name"]
10044
+ response = get_session().post(
10045
+ f"https://huggingface.co/api/jobs/{namespace}",
10046
+ json=input_json,
10047
+ headers=self._build_hf_headers(token=token),
10048
+ )
10049
+ hf_raise_for_status(response)
10050
+ job_info = response.json()
10051
+ return JobInfo(**job_info, endpoint=self.endpoint)
10052
+
10053
+ def fetch_job_logs(
10054
+ self,
10055
+ *,
10056
+ job_id: str,
10057
+ namespace: Optional[str] = None,
10058
+ token: Union[bool, str, None] = None,
10059
+ ) -> Iterable[str]:
10060
+ """
10061
+ Fetch all the logs from a compute Job on Hugging Face infrastructure.
10062
+
10063
+ Args:
10064
+ job_id (`str`):
10065
+ ID of the Job.
10066
+
10067
+ namespace (`str`, *optional*):
10068
+ The namespace where the Job is running. Defaults to the current user's namespace.
10069
+
10070
+ token `(Union[bool, str, None]`, *optional*):
10071
+ A valid user access token. If not provided, the locally saved token will be used, which is the
10072
+ recommended authentication method. Set to `False` to disable authentication.
10073
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
10074
+
10075
+ Example:
10076
+
10077
+ ```python
10078
+ >>> from huggingface_hub import fetch_job_logs, run_job
10079
+ >>> job = run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
10080
+ >>> for log in fetch_job_logs(job.job_id):
10081
+ ... print(log)
10082
+ Hello from HF compute!
10083
+ ```
10084
+ """
10085
+ if namespace is None:
10086
+ namespace = self.whoami(token=token)["name"]
10087
+ logging_finished = logging_started = False
10088
+ job_finished = False
10089
+ # - We need to retry because sometimes the /logs doesn't return logs when the job just started.
10090
+ # (for example it can return only two lines: one for "Job started" and one empty line)
10091
+ # - Timeouts can happen in case of build errors
10092
+ # - ChunkedEncodingError can happen in case of stopped logging in the middle of streaming
10093
+ # - Infinite empty log stream can happen in case of build error
10094
+ # (the logs stream is infinite and empty except for the Job started message)
10095
+ # - there is a ": keep-alive" every 30 seconds
10096
+
10097
+ # We don't use http_backoff since we need to check ourselves if ConnectionError.__context__ is a TimeoutError
10098
+ max_retries = 5
10099
+ min_wait_time = 1
10100
+ max_wait_time = 10
10101
+ sleep_time = 0
10102
+ for _ in range(max_retries):
10103
+ time.sleep(sleep_time)
10104
+ sleep_time = min(max_wait_time, max(min_wait_time, sleep_time * 2))
10105
+ try:
10106
+ resp = get_session().get(
10107
+ f"https://huggingface.co/api/jobs/{namespace}/{job_id}/logs",
10108
+ headers=self._build_hf_headers(token=token),
10109
+ stream=True,
10110
+ timeout=120,
10111
+ )
10112
+ log = None
10113
+ for line in resp.iter_lines(chunk_size=1):
10114
+ line = line.decode("utf-8")
10115
+ if line and line.startswith("data: {"):
10116
+ data = json.loads(line[len("data: ") :])
10117
+ # timestamp = data["timestamp"]
10118
+ if not data["data"].startswith("===== Job started"):
10119
+ logging_started = True
10120
+ log = data["data"]
10121
+ yield log
10122
+ logging_finished = logging_started
10123
+ except requests.exceptions.ChunkedEncodingError:
10124
+ # Response ended prematurely
10125
+ break
10126
+ except KeyboardInterrupt:
10127
+ break
10128
+ except requests.exceptions.ConnectionError as err:
10129
+ is_timeout = err.__context__ and isinstance(getattr(err.__context__, "__cause__", None), TimeoutError)
10130
+ if logging_started or not is_timeout:
10131
+ raise
10132
+ if logging_finished or job_finished:
10133
+ break
10134
+ job_status = (
10135
+ get_session()
10136
+ .get(
10137
+ f"https://huggingface.co/api/jobs/{namespace}/{job_id}",
10138
+ headers=self._build_hf_headers(token=token),
10139
+ )
10140
+ .json()
10141
+ )
10142
+ if "status" in job_status and job_status["status"]["stage"] not in ("RUNNING", "UPDATING"):
10143
+ job_finished = True
10144
+
10145
+ def list_jobs(
10146
+ self,
10147
+ *,
10148
+ timeout: Optional[int] = None,
10149
+ namespace: Optional[str] = None,
10150
+ token: Union[bool, str, None] = None,
10151
+ ) -> List[JobInfo]:
10152
+ """
10153
+ List compute Jobs on Hugging Face infrastructure.
10154
+
10155
+ Args:
10156
+ timeout (`float`, *optional*):
10157
+ Whether to set a timeout for the request to the Hub.
10158
+
10159
+ namespace (`str`, *optional*):
10160
+ The namespace from where it lists the jobs. Defaults to the current user's namespace.
10161
+
10162
+ token `(Union[bool, str, None]`, *optional*):
10163
+ A valid user access token. If not provided, the locally saved token will be used, which is the
10164
+ recommended authentication method. Set to `False` to disable authentication.
10165
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
10166
+ """
10167
+ if namespace is None:
10168
+ namespace = whoami(token=token)["name"]
10169
+ response = get_session().get(
10170
+ f"{self.endpoint}/api/jobs/{namespace}",
10171
+ headers=self._build_hf_headers(token=token),
10172
+ timeout=timeout,
10173
+ )
10174
+ response.raise_for_status()
10175
+ return [JobInfo(**job_info, endpoint=self.endpoint) for job_info in response.json()]
10176
+
10177
+ def inspect_job(
10178
+ self,
10179
+ *,
10180
+ job_id: str,
10181
+ namespace: Optional[str] = None,
10182
+ token: Union[bool, str, None] = None,
10183
+ ) -> JobInfo:
10184
+ """
10185
+ Inspect a compute Job on Hugging Face infrastructure.
10186
+
10187
+ Args:
10188
+ job_id (`str`):
10189
+ ID of the Job.
10190
+
10191
+ namespace (`str`, *optional*):
10192
+ The namespace where the Job is running. Defaults to the current user's namespace.
10193
+
10194
+ token `(Union[bool, str, None]`, *optional*):
10195
+ A valid user access token. If not provided, the locally saved token will be used, which is the
10196
+ recommended authentication method. Set to `False` to disable authentication.
10197
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
10198
+
10199
+ Example:
10200
+
10201
+ ```python
10202
+ >>> from huggingface_hub import inspect_job, run_job
10203
+ >>> job = run_job("python:3.12", ["python", "-c" ,"print('Hello from HF compute!')"])
10204
+ >>> inspect_job(job.job_id)
10205
+ JobInfo(
10206
+ id='68780d00bbe36d38803f645f',
10207
+ created_at=datetime.datetime(2025, 7, 16, 20, 35, 12, 808000, tzinfo=datetime.timezone.utc),
10208
+ docker_image='python:3.12',
10209
+ space_id=None,
10210
+ command=['python', '-c', "print('Hello from HF compute!')"],
10211
+ arguments=[],
10212
+ environment={},
10213
+ secrets={},
10214
+ flavor='cpu-basic',
10215
+ status=JobStatus(stage='RUNNING', message=None)
10216
+ )
10217
+ ```
10218
+ """
10219
+ if namespace is None:
10220
+ namespace = self.whoami(token=token)["name"]
10221
+ response = get_session().get(
10222
+ f"{self.endpoint}/api/jobs/{namespace}/{job_id}",
10223
+ headers=self._build_hf_headers(token=token),
10224
+ )
10225
+ response.raise_for_status()
10226
+ return JobInfo(**response.json(), endpoint=self.endpoint)
10227
+
10228
+ def cancel_job(
10229
+ self,
10230
+ *,
10231
+ job_id: str,
10232
+ namespace: Optional[str] = None,
10233
+ token: Union[bool, str, None] = None,
10234
+ ) -> None:
10235
+ """
10236
+ Cancel a compute Job on Hugging Face infrastructure.
10237
+
10238
+ Args:
10239
+ job_id (`str`):
10240
+ ID of the Job.
10241
+
10242
+ namespace (`str`, *optional*):
10243
+ The namespace where the Job is running. Defaults to the current user's namespace.
10244
+
10245
+ token `(Union[bool, str, None]`, *optional*):
10246
+ A valid user access token. If not provided, the locally saved token will be used, which is the
10247
+ recommended authentication method. Set to `False` to disable authentication.
10248
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
10249
+ """
10250
+ if namespace is None:
10251
+ namespace = self.whoami(token=token)["name"]
10252
+ get_session().post(
10253
+ f"{self.endpoint}/api/jobs/{namespace}/{job_id}/cancel",
10254
+ headers=self._build_hf_headers(token=token),
10255
+ ).raise_for_status()
10256
+
10257
+ @experimental
10258
+ def run_uv_job(
10259
+ self,
10260
+ script: str,
10261
+ *,
10262
+ script_args: Optional[List[str]] = None,
10263
+ dependencies: Optional[List[str]] = None,
10264
+ python: Optional[str] = None,
10265
+ image: Optional[str] = None,
10266
+ env: Optional[Dict[str, Any]] = None,
10267
+ secrets: Optional[Dict[str, Any]] = None,
10268
+ flavor: Optional[SpaceHardware] = None,
10269
+ timeout: Optional[Union[int, float, str]] = None,
10270
+ namespace: Optional[str] = None,
10271
+ token: Union[bool, str, None] = None,
10272
+ _repo: Optional[str] = None,
10273
+ ) -> JobInfo:
10274
+ """
10275
+ Run a UV script Job on Hugging Face infrastructure.
10276
+
10277
+ Args:
10278
+ script (`str`):
10279
+ Path or URL of the UV script.
10280
+
10281
+ script_args (`List[str]`, *optional*)
10282
+ Arguments to pass to the script.
10283
+
10284
+ dependencies (`List[str]`, *optional*)
10285
+ Dependencies to use to run the UV script.
10286
+
10287
+ python (`str`, *optional*)
10288
+ Use a specific Python version. Default is 3.12.
10289
+
10290
+ image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm-slim"):
10291
+ Use a custom Docker image with `uv` installed.
10292
+
10293
+ env (`Dict[str, Any]`, *optional*):
10294
+ Defines the environment variables for the Job.
10295
+
10296
+ secrets (`Dict[str, Any]`, *optional*):
10297
+ Defines the secret environment variables for the Job.
10298
+
10299
+ flavor (`str`, *optional*):
10300
+ Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values.
10301
+ Defaults to `"cpu-basic"`.
10302
+
10303
+ timeout (`Union[int, float, str]`, *optional*):
10304
+ Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days).
10305
+ Example: `300` or `"5m"` for 5 minutes.
10306
+
10307
+ namespace (`str`, *optional*):
10308
+ The namespace where the Job will be created. Defaults to the current user's namespace.
10309
+
10310
+ token `(Union[bool, str, None]`, *optional*):
10311
+ A valid user access token. If not provided, the locally saved token will be used, which is the
10312
+ recommended authentication method. Set to `False` to disable authentication.
10313
+ Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication.
10314
+
10315
+ Example:
10316
+
10317
+ ```python
10318
+ >>> from huggingface_hub import run_uv_job
10319
+ >>> script = "https://raw.githubusercontent.com/huggingface/trl/refs/heads/main/trl/scripts/sft.py"
10320
+ >>> run_uv_job(script, dependencies=["trl"], flavor="a10g-small")
10321
+ ```
10322
+ """
10323
+ image = image or "ghcr.io/astral-sh/uv:python3.12-bookworm-slim"
10324
+ env = env or {}
10325
+ secrets = secrets or {}
10326
+
10327
+ # Build command
10328
+ uv_args = []
10329
+ if dependencies:
10330
+ for dependency in dependencies:
10331
+ uv_args += ["--with", dependency]
10332
+ if python:
10333
+ uv_args += ["--python", python]
10334
+ script_args = script_args or []
10335
+
10336
+ if namespace is None:
10337
+ namespace = self.whoami(token=token)["name"]
10338
+
10339
+ if script.startswith("http://") or script.startswith("https://"):
10340
+ # Direct URL execution - no upload needed
10341
+ command = ["uv", "run"] + uv_args + [script] + script_args
10342
+ else:
10343
+ # Local file - upload to HF
10344
+ script_path = Path(script)
10345
+ filename = script_path.name
10346
+ # Parse repo
10347
+ if _repo:
10348
+ repo_id = _repo
10349
+ if "/" not in repo_id:
10350
+ repo_id = f"{namespace}/{repo_id}"
10351
+ repo_id = _repo
10352
+ else:
10353
+ repo_id = f"{namespace}/hf-cli-jobs-uv-run-scripts"
10354
+
10355
+ # Create repo if needed
10356
+ try:
10357
+ self.repo_info(repo_id, repo_type="dataset")
10358
+ logger.debug(f"Using existing repository: {repo_id}")
10359
+ except RepositoryNotFoundError:
10360
+ logger.info(f"Creating repository: {repo_id}")
10361
+ create_repo(repo_id, repo_type="dataset", private=True, exist_ok=True)
10362
+
10363
+ # Upload script
10364
+ logger.info(f"Uploading {script_path.name} to {repo_id}...")
10365
+ with open(script_path, "r") as f:
10366
+ script_content = f.read()
10367
+
10368
+ self.upload_file(
10369
+ path_or_fileobj=script_content.encode(),
10370
+ path_in_repo=filename,
10371
+ repo_id=repo_id,
10372
+ repo_type="dataset",
10373
+ )
10374
+
10375
+ script_url = f"https://huggingface.co/datasets/{repo_id}/resolve/main/{filename}"
10376
+ repo_url = f"https://huggingface.co/datasets/{repo_id}"
10377
+
10378
+ logger.debug(f"✓ Script uploaded to: {repo_url}/blob/main/{filename}")
10379
+
10380
+ # Create and upload minimal README
10381
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S UTC")
10382
+ readme_content = dedent(
10383
+ f"""
10384
+ ---
10385
+ tags:
10386
+ - hf-cli-jobs-uv-script
10387
+ - ephemeral
10388
+ viewer: false
10389
+ ---
10390
+
10391
+ # UV Script: {filename}
10392
+
10393
+ Executed via `hf jobs uv run` on {timestamp}
10394
+
10395
+ ## Run this script
10396
+
10397
+ ```bash
10398
+ hf jobs uv run {filename}
10399
+ ```
10400
+
10401
+ ---
10402
+ *Created with [hf jobs](https://huggingface.co/docs/huggingface_hub/main/en/guides/jobs)*
10403
+ """
10404
+ )
10405
+ self.upload_file(
10406
+ path_or_fileobj=readme_content.encode(),
10407
+ path_in_repo="README.md",
10408
+ repo_id=repo_id,
10409
+ repo_type="dataset",
10410
+ )
10411
+
10412
+ secrets["UV_SCRIPT_HF_TOKEN"] = token or self.token or get_token()
10413
+ env["UV_SCRIPT_URL"] = script_url
10414
+
10415
+ pre_command = (
10416
+ dedent(
10417
+ """
10418
+ import urllib.request
10419
+ import os
10420
+ from pathlib import Path
10421
+ o = urllib.request.build_opener()
10422
+ o.addheaders = [("Authorization", "Bearer " + os.environ["UV_SCRIPT_HF_TOKEN"])]
10423
+ Path("/tmp/script.py").write_bytes(o.open(os.environ["UV_SCRIPT_URL"]).read())
10424
+ """
10425
+ )
10426
+ .strip()
10427
+ .replace('"', r"\"")
10428
+ .split("\n")
10429
+ )
10430
+ pre_command = ["python", "-c", '"' + "; ".join(pre_command) + '"']
10431
+ command = ["uv", "run"] + uv_args + ["/tmp/script.py"] + script_args
10432
+ command = ["bash", "-c", " ".join(pre_command) + " && " + " ".join(command)]
10433
+
10434
+ # Create RunCommand args
10435
+ return self.run_job(
10436
+ image=image,
10437
+ command=command,
10438
+ env=env,
10439
+ secrets=secrets,
10440
+ flavor=flavor,
10441
+ timeout=timeout,
10442
+ namespace=namespace,
10443
+ token=token,
10444
+ )
10445
+
9942
10446
 
9943
10447
  def _parse_revision_from_pr_url(pr_url: str) -> str:
9944
10448
  """Safely parse revision number from a PR url.
@@ -10095,3 +10599,11 @@ get_user_overview = api.get_user_overview
10095
10599
  list_organization_members = api.list_organization_members
10096
10600
  list_user_followers = api.list_user_followers
10097
10601
  list_user_following = api.list_user_following
10602
+
10603
+ # Jobs API
10604
+ run_job = api.run_job
10605
+ fetch_job_logs = api.fetch_job_logs
10606
+ list_jobs = api.list_jobs
10607
+ inspect_job = api.inspect_job
10608
+ cancel_job = api.cancel_job
10609
+ run_uv_job = api.run_uv_job