huggingface-hub 0.34.6__py3-none-any.whl → 0.35.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (38) hide show
  1. huggingface_hub/__init__.py +19 -1
  2. huggingface_hub/_jobs_api.py +159 -2
  3. huggingface_hub/_tensorboard_logger.py +9 -10
  4. huggingface_hub/cli/auth.py +1 -1
  5. huggingface_hub/cli/cache.py +3 -9
  6. huggingface_hub/cli/jobs.py +551 -1
  7. huggingface_hub/cli/repo.py +6 -4
  8. huggingface_hub/commands/delete_cache.py +2 -2
  9. huggingface_hub/commands/scan_cache.py +1 -1
  10. huggingface_hub/commands/user.py +1 -1
  11. huggingface_hub/hf_api.py +522 -78
  12. huggingface_hub/hf_file_system.py +3 -1
  13. huggingface_hub/hub_mixin.py +5 -3
  14. huggingface_hub/inference/_client.py +17 -180
  15. huggingface_hub/inference/_common.py +72 -70
  16. huggingface_hub/inference/_generated/_async_client.py +34 -200
  17. huggingface_hub/inference/_generated/types/chat_completion.py +2 -0
  18. huggingface_hub/inference/_mcp/_cli_hacks.py +3 -3
  19. huggingface_hub/inference/_mcp/cli.py +1 -1
  20. huggingface_hub/inference/_mcp/constants.py +1 -1
  21. huggingface_hub/inference/_mcp/mcp_client.py +28 -11
  22. huggingface_hub/inference/_mcp/types.py +3 -0
  23. huggingface_hub/inference/_mcp/utils.py +7 -3
  24. huggingface_hub/inference/_providers/_common.py +28 -4
  25. huggingface_hub/inference/_providers/black_forest_labs.py +1 -1
  26. huggingface_hub/inference/_providers/fal_ai.py +2 -2
  27. huggingface_hub/inference/_providers/hf_inference.py +15 -7
  28. huggingface_hub/inference/_providers/replicate.py +1 -1
  29. huggingface_hub/repocard.py +2 -1
  30. huggingface_hub/utils/_git_credential.py +1 -1
  31. huggingface_hub/utils/_typing.py +24 -4
  32. huggingface_hub/utils/_xet_progress_reporting.py +31 -10
  33. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/METADATA +7 -4
  34. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/RECORD +38 -38
  35. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/LICENSE +0 -0
  36. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/WHEEL +0 -0
  37. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/entry_points.txt +0 -0
  38. {huggingface_hub-0.34.6.dist-info → huggingface_hub-0.35.0rc1.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.34.6"
49
+ __version__ = "0.35.0.rc1"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -182,6 +182,8 @@ _SUBMOD_ATTRS = {
182
182
  "create_inference_endpoint_from_catalog",
183
183
  "create_pull_request",
184
184
  "create_repo",
185
+ "create_scheduled_job",
186
+ "create_scheduled_uv_job",
185
187
  "create_tag",
186
188
  "create_webhook",
187
189
  "dataset_info",
@@ -192,6 +194,7 @@ _SUBMOD_ATTRS = {
192
194
  "delete_folder",
193
195
  "delete_inference_endpoint",
194
196
  "delete_repo",
197
+ "delete_scheduled_job",
195
198
  "delete_space_secret",
196
199
  "delete_space_storage",
197
200
  "delete_space_variable",
@@ -219,6 +222,7 @@ _SUBMOD_ATTRS = {
219
222
  "get_webhook",
220
223
  "grant_access",
221
224
  "inspect_job",
225
+ "inspect_scheduled_job",
222
226
  "list_accepted_access_requests",
223
227
  "list_collections",
224
228
  "list_datasets",
@@ -259,6 +263,7 @@ _SUBMOD_ATTRS = {
259
263
  "request_space_storage",
260
264
  "restart_space",
261
265
  "resume_inference_endpoint",
266
+ "resume_scheduled_job",
262
267
  "revision_exists",
263
268
  "run_as_future",
264
269
  "run_job",
@@ -267,6 +272,7 @@ _SUBMOD_ATTRS = {
267
272
  "set_space_sleep_time",
268
273
  "space_info",
269
274
  "super_squash_history",
275
+ "suspend_scheduled_job",
270
276
  "unlike",
271
277
  "update_collection_item",
272
278
  "update_collection_metadata",
@@ -828,6 +834,8 @@ __all__ = [
828
834
  "create_inference_endpoint_from_catalog",
829
835
  "create_pull_request",
830
836
  "create_repo",
837
+ "create_scheduled_job",
838
+ "create_scheduled_uv_job",
831
839
  "create_tag",
832
840
  "create_webhook",
833
841
  "dataset_info",
@@ -838,6 +846,7 @@ __all__ = [
838
846
  "delete_folder",
839
847
  "delete_inference_endpoint",
840
848
  "delete_repo",
849
+ "delete_scheduled_job",
841
850
  "delete_space_secret",
842
851
  "delete_space_storage",
843
852
  "delete_space_variable",
@@ -878,6 +887,7 @@ __all__ = [
878
887
  "hf_hub_download",
879
888
  "hf_hub_url",
880
889
  "inspect_job",
890
+ "inspect_scheduled_job",
881
891
  "interpreter_login",
882
892
  "list_accepted_access_requests",
883
893
  "list_collections",
@@ -933,6 +943,7 @@ __all__ = [
933
943
  "request_space_storage",
934
944
  "restart_space",
935
945
  "resume_inference_endpoint",
946
+ "resume_scheduled_job",
936
947
  "revision_exists",
937
948
  "run_as_future",
938
949
  "run_job",
@@ -949,6 +960,7 @@ __all__ = [
949
960
  "split_tf_state_dict_into_shards",
950
961
  "split_torch_state_dict_into_shards",
951
962
  "super_squash_history",
963
+ "suspend_scheduled_job",
952
964
  "try_to_load_from_cache",
953
965
  "unlike",
954
966
  "update_collection_item",
@@ -1190,6 +1202,8 @@ if TYPE_CHECKING: # pragma: no cover
1190
1202
  create_inference_endpoint_from_catalog, # noqa: F401
1191
1203
  create_pull_request, # noqa: F401
1192
1204
  create_repo, # noqa: F401
1205
+ create_scheduled_job, # noqa: F401
1206
+ create_scheduled_uv_job, # noqa: F401
1193
1207
  create_tag, # noqa: F401
1194
1208
  create_webhook, # noqa: F401
1195
1209
  dataset_info, # noqa: F401
@@ -1200,6 +1214,7 @@ if TYPE_CHECKING: # pragma: no cover
1200
1214
  delete_folder, # noqa: F401
1201
1215
  delete_inference_endpoint, # noqa: F401
1202
1216
  delete_repo, # noqa: F401
1217
+ delete_scheduled_job, # noqa: F401
1203
1218
  delete_space_secret, # noqa: F401
1204
1219
  delete_space_storage, # noqa: F401
1205
1220
  delete_space_variable, # noqa: F401
@@ -1227,6 +1242,7 @@ if TYPE_CHECKING: # pragma: no cover
1227
1242
  get_webhook, # noqa: F401
1228
1243
  grant_access, # noqa: F401
1229
1244
  inspect_job, # noqa: F401
1245
+ inspect_scheduled_job, # noqa: F401
1230
1246
  list_accepted_access_requests, # noqa: F401
1231
1247
  list_collections, # noqa: F401
1232
1248
  list_datasets, # noqa: F401
@@ -1267,6 +1283,7 @@ if TYPE_CHECKING: # pragma: no cover
1267
1283
  request_space_storage, # noqa: F401
1268
1284
  restart_space, # noqa: F401
1269
1285
  resume_inference_endpoint, # noqa: F401
1286
+ resume_scheduled_job, # noqa: F401
1270
1287
  revision_exists, # noqa: F401
1271
1288
  run_as_future, # noqa: F401
1272
1289
  run_job, # noqa: F401
@@ -1275,6 +1292,7 @@ if TYPE_CHECKING: # pragma: no cover
1275
1292
  set_space_sleep_time, # noqa: F401
1276
1293
  space_info, # noqa: F401
1277
1294
  super_squash_history, # noqa: F401
1295
+ suspend_scheduled_job, # noqa: F401
1278
1296
  unlike, # noqa: F401
1279
1297
  update_collection_item, # noqa: F401
1280
1298
  update_collection_metadata, # noqa: F401
@@ -15,7 +15,7 @@
15
15
  from dataclasses import dataclass
16
16
  from datetime import datetime
17
17
  from enum import Enum
18
- from typing import Any, Dict, List, Optional
18
+ from typing import Any, Dict, List, Optional, Union
19
19
 
20
20
  from huggingface_hub import constants
21
21
  from huggingface_hub._space_api import SpaceHardware
@@ -85,7 +85,7 @@ class JobInfo:
85
85
  status: (`JobStatus` or `None`):
86
86
  Status of the Job, e.g. `JobStatus(stage="RUNNING", message=None)`
87
87
  See [`JobStage`] for possible stage values.
88
- status: (`JobOwner` or `None`):
88
+ owner: (`JobOwner` or `None`):
89
89
  Owner of the Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
90
90
 
91
91
  Example:
@@ -142,3 +142,160 @@ class JobInfo:
142
142
  # Inferred fields
143
143
  self.endpoint = kwargs.get("endpoint", constants.ENDPOINT)
144
144
  self.url = f"{self.endpoint}/jobs/{self.owner.name}/{self.id}"
145
+
146
+
147
+ @dataclass
148
+ class JobSpec:
149
+ docker_image: Optional[str]
150
+ space_id: Optional[str]
151
+ command: Optional[List[str]]
152
+ arguments: Optional[List[str]]
153
+ environment: Optional[Dict[str, Any]]
154
+ secrets: Optional[Dict[str, Any]]
155
+ flavor: Optional[SpaceHardware]
156
+ timeout: Optional[int]
157
+ tags: Optional[List[str]]
158
+ arch: Optional[str]
159
+
160
+ def __init__(self, **kwargs) -> None:
161
+ self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image")
162
+ self.space_id = kwargs.get("spaceId") or kwargs.get("space_id")
163
+ self.command = kwargs.get("command")
164
+ self.arguments = kwargs.get("arguments")
165
+ self.environment = kwargs.get("environment")
166
+ self.secrets = kwargs.get("secrets")
167
+ self.flavor = kwargs.get("flavor")
168
+ self.timeout = kwargs.get("timeout")
169
+ self.tags = kwargs.get("tags")
170
+ self.arch = kwargs.get("arch")
171
+
172
+
173
+ @dataclass
174
+ class LastJobInfo:
175
+ id: str
176
+ at: datetime
177
+
178
+ def __init__(self, **kwargs) -> None:
179
+ self.id = kwargs["id"]
180
+ self.at = parse_datetime(kwargs["at"])
181
+
182
+
183
+ @dataclass
184
+ class ScheduledJobStatus:
185
+ last_job: Optional[LastJobInfo]
186
+ next_job_run_at: Optional[datetime]
187
+
188
+ def __init__(self, **kwargs) -> None:
189
+ last_job = kwargs.get("lastJob") or kwargs.get("last_job")
190
+ self.last_job = LastJobInfo(**last_job) if last_job else None
191
+ next_job_run_at = kwargs.get("nextJobRunAt") or kwargs.get("next_job_run_at")
192
+ self.next_job_run_at = parse_datetime(str(next_job_run_at)) if next_job_run_at else None
193
+
194
+
195
+ @dataclass
196
+ class ScheduledJobInfo:
197
+ """
198
+ Contains information about a Job.
199
+
200
+ Args:
201
+ id (`str`):
202
+ Scheduled Job ID.
203
+ created_at (`datetime` or `None`):
204
+ When the scheduled Job was created.
205
+ tags (`List[str]` or `None`):
206
+ The tags of the scheduled Job.
207
+ schedule (`str` or `None`):
208
+ One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a
209
+ CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday).
210
+ suspend (`bool` or `None`):
211
+ Whether the scheduled job is suspended (paused).
212
+ concurrency (`bool` or `None`):
213
+ Whether multiple instances of this Job can run concurrently.
214
+ status (`ScheduledJobStatus` or `None`):
215
+ Status of the scheduled Job.
216
+ owner: (`JobOwner` or `None`):
217
+ Owner of the scheduled Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")`
218
+ job_spec: (`JobSpec` or `None`):
219
+ Specifications of the Job.
220
+
221
+ Example:
222
+
223
+ ```python
224
+ >>> from huggingface_hub import run_job
225
+ >>> scheduled_job = create_scheduled_job(
226
+ ... image="python:3.12",
227
+ ... command=["python", "-c", "print('Hello from the cloud!')"],
228
+ ... schedule="@hourly",
229
+ ... )
230
+ >>> scheduled_job.id
231
+ '687fb701029421ae5549d999'
232
+ >>> scheduled_job.status.next_job_run_at
233
+ datetime.datetime(2025, 7, 22, 17, 6, 25, 79000, tzinfo=datetime.timezone.utc)
234
+ ```
235
+ """
236
+
237
+ id: str
238
+ created_at: Optional[datetime]
239
+ job_spec: JobSpec
240
+ schedule: Optional[str]
241
+ suspend: Optional[bool]
242
+ concurrency: Optional[bool]
243
+ status: ScheduledJobStatus
244
+ owner: JobOwner
245
+
246
+ def __init__(self, **kwargs) -> None:
247
+ self.id = kwargs["id"]
248
+ created_at = kwargs.get("createdAt") or kwargs.get("created_at")
249
+ self.created_at = parse_datetime(created_at) if created_at else None
250
+ self.job_spec = JobSpec(**(kwargs.get("job_spec") or kwargs.get("jobSpec", {})))
251
+ self.schedule = kwargs.get("schedule")
252
+ self.suspend = kwargs.get("suspend")
253
+ self.concurrency = kwargs.get("concurrency")
254
+ status = kwargs.get("status", {})
255
+ self.status = ScheduledJobStatus(
256
+ last_job=status.get("last_job") or status.get("lastJob"),
257
+ next_job_run_at=status.get("next_job_run_at") or status.get("nextJobRunAt"),
258
+ )
259
+ owner = kwargs.get("owner", {})
260
+ self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"])
261
+
262
+
263
+ def _create_job_spec(
264
+ *,
265
+ image: str,
266
+ command: List[str],
267
+ env: Optional[Dict[str, Any]],
268
+ secrets: Optional[Dict[str, Any]],
269
+ flavor: Optional[SpaceHardware],
270
+ timeout: Optional[Union[int, float, str]],
271
+ ) -> Dict[str, Any]:
272
+ # prepare job spec to send to HF Jobs API
273
+ job_spec: Dict[str, Any] = {
274
+ "command": command,
275
+ "arguments": [],
276
+ "environment": env or {},
277
+ "flavor": flavor or SpaceHardware.CPU_BASIC,
278
+ }
279
+ # secrets are optional
280
+ if secrets:
281
+ job_spec["secrets"] = secrets
282
+ # timeout is optional
283
+ if timeout:
284
+ time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24}
285
+ if isinstance(timeout, str) and timeout[-1] in time_units_factors:
286
+ job_spec["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]])
287
+ else:
288
+ job_spec["timeoutSeconds"] = int(timeout)
289
+ # input is either from docker hub or from HF spaces
290
+ for prefix in (
291
+ "https://huggingface.co/spaces/",
292
+ "https://hf.co/spaces/",
293
+ "huggingface.co/spaces/",
294
+ "hf.co/spaces/",
295
+ ):
296
+ if image.startswith(prefix):
297
+ job_spec["spaceId"] = image[len(prefix) :]
298
+ break
299
+ else:
300
+ job_spec["dockerImage"] = image
301
+ return job_spec
@@ -14,7 +14,7 @@
14
14
  """Contains a logger to push training logs to the Hub, using Tensorboard."""
15
15
 
16
16
  from pathlib import Path
17
- from typing import TYPE_CHECKING, List, Optional, Union
17
+ from typing import List, Optional, Union
18
18
 
19
19
  from ._commit_scheduler import CommitScheduler
20
20
  from .errors import EntryNotFoundError
@@ -26,25 +26,24 @@ from .utils import experimental
26
26
  # or from 'torch.utils.tensorboard'. Both are compatible so let's try to load
27
27
  # from either of them.
28
28
  try:
29
- from tensorboardX import SummaryWriter
29
+ from tensorboardX import SummaryWriter as _RuntimeSummaryWriter
30
30
 
31
31
  is_summary_writer_available = True
32
-
33
32
  except ImportError:
34
33
  try:
35
- from torch.utils.tensorboard import SummaryWriter
34
+ from torch.utils.tensorboard import SummaryWriter as _RuntimeSummaryWriter
36
35
 
37
- is_summary_writer_available = False
36
+ is_summary_writer_available = True
38
37
  except ImportError:
39
38
  # Dummy class to avoid failing at import. Will raise on instance creation.
40
- SummaryWriter = object
41
- is_summary_writer_available = False
39
+ class _DummySummaryWriter:
40
+ pass
42
41
 
43
- if TYPE_CHECKING:
44
- from tensorboardX import SummaryWriter
42
+ _RuntimeSummaryWriter = _DummySummaryWriter # type: ignore[assignment]
43
+ is_summary_writer_available = False
45
44
 
46
45
 
47
- class HFSummaryWriter(SummaryWriter):
46
+ class HFSummaryWriter(_RuntimeSummaryWriter):
48
47
  """
49
48
  Wrapper around the tensorboard's `SummaryWriter` to push training logs to the Hub.
50
49
 
@@ -200,7 +200,7 @@ class AuthWhoami(BaseAuthCommand):
200
200
  exit()
201
201
  try:
202
202
  info = self._api.whoami(token)
203
- print(info["name"])
203
+ print(ANSI.bold("user: "), info["name"])
204
204
  orgs = [org["name"] for org in info["orgs"]]
205
205
  if orgs:
206
206
  print(ANSI.bold("orgs: "), ",".join(orgs))
@@ -21,13 +21,7 @@ from functools import wraps
21
21
  from tempfile import mkstemp
22
22
  from typing import Any, Callable, Iterable, List, Literal, Optional, Union
23
23
 
24
- from ..utils import (
25
- CachedRepoInfo,
26
- CachedRevisionInfo,
27
- CacheNotFound,
28
- HFCacheInfo,
29
- scan_cache_dir,
30
- )
24
+ from ..utils import CachedRepoInfo, CachedRevisionInfo, CacheNotFound, HFCacheInfo, scan_cache_dir
31
25
  from . import BaseHuggingfaceCLICommand
32
26
  from ._cli_utils import ANSI, tabulate
33
27
 
@@ -52,7 +46,7 @@ def require_inquirer_py(fn: Callable) -> Callable:
52
46
  if not _inquirer_py_available:
53
47
  raise ImportError(
54
48
  "The 'cache delete' command requires extra dependencies for the TUI.\n"
55
- "Please run 'pip install huggingface_hub[cli]' to install them.\n"
49
+ "Please run 'pip install \"huggingface_hub[cli]\"' to install them.\n"
56
50
  "Otherwise, disable TUI using the '--disable-tui' flag."
57
51
  )
58
52
  return fn(*args, **kwargs)
@@ -149,7 +143,7 @@ class CacheCommand(BaseHuggingfaceCLICommand):
149
143
  if self.verbosity >= 3:
150
144
  print(ANSI.gray(message))
151
145
  for warning in hf_cache_info.warnings:
152
- print(ANSI.gray(warning))
146
+ print(ANSI.gray(str(warning)))
153
147
  else:
154
148
  print(ANSI.gray(message + " Use -vvv to print details."))
155
149