huggingface-hub 0.23.4__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (43) hide show
  1. huggingface_hub/__init__.py +47 -15
  2. huggingface_hub/_commit_api.py +38 -8
  3. huggingface_hub/_inference_endpoints.py +11 -4
  4. huggingface_hub/_local_folder.py +22 -13
  5. huggingface_hub/_snapshot_download.py +12 -7
  6. huggingface_hub/_webhooks_server.py +3 -1
  7. huggingface_hub/commands/huggingface_cli.py +4 -3
  8. huggingface_hub/commands/repo_files.py +128 -0
  9. huggingface_hub/constants.py +12 -0
  10. huggingface_hub/file_download.py +127 -91
  11. huggingface_hub/hf_api.py +976 -341
  12. huggingface_hub/hf_file_system.py +30 -3
  13. huggingface_hub/hub_mixin.py +17 -6
  14. huggingface_hub/inference/_client.py +379 -43
  15. huggingface_hub/inference/_common.py +0 -2
  16. huggingface_hub/inference/_generated/_async_client.py +396 -49
  17. huggingface_hub/inference/_generated/types/__init__.py +4 -1
  18. huggingface_hub/inference/_generated/types/chat_completion.py +41 -21
  19. huggingface_hub/inference/_generated/types/feature_extraction.py +23 -5
  20. huggingface_hub/inference/_generated/types/text_generation.py +29 -0
  21. huggingface_hub/lfs.py +11 -6
  22. huggingface_hub/repocard_data.py +3 -3
  23. huggingface_hub/repository.py +6 -6
  24. huggingface_hub/serialization/__init__.py +8 -3
  25. huggingface_hub/serialization/_base.py +13 -16
  26. huggingface_hub/serialization/_tensorflow.py +4 -3
  27. huggingface_hub/serialization/_torch.py +399 -22
  28. huggingface_hub/utils/__init__.py +0 -1
  29. huggingface_hub/utils/_errors.py +1 -1
  30. huggingface_hub/utils/_fixes.py +14 -3
  31. huggingface_hub/utils/_paths.py +17 -6
  32. huggingface_hub/utils/_subprocess.py +0 -1
  33. huggingface_hub/utils/_telemetry.py +9 -1
  34. huggingface_hub/utils/endpoint_helpers.py +2 -186
  35. huggingface_hub/utils/sha.py +36 -1
  36. huggingface_hub/utils/tqdm.py +0 -1
  37. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/METADATA +12 -9
  38. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/RECORD +42 -42
  39. huggingface_hub/serialization/_numpy.py +0 -68
  40. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/LICENSE +0 -0
  41. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/WHEEL +0 -0
  42. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/entry_points.txt +0 -0
  43. {huggingface_hub-0.23.4.dist-info → huggingface_hub-0.24.0.dist-info}/top_level.txt +0 -0
@@ -16,9 +16,7 @@ with the aim for a user-friendly interface.
16
16
 
17
17
  import math
18
18
  import re
19
- import warnings
20
- from dataclasses import dataclass
21
- from typing import TYPE_CHECKING, List, Optional, Union
19
+ from typing import TYPE_CHECKING
22
20
 
23
21
  from ..repocard_data import ModelCardData
24
22
 
@@ -27,7 +25,7 @@ if TYPE_CHECKING:
27
25
  from ..hf_api import ModelInfo
28
26
 
29
27
 
30
- def _is_emission_within_treshold(model_info: "ModelInfo", minimum_threshold: float, maximum_threshold: float) -> bool:
28
+ def _is_emission_within_threshold(model_info: "ModelInfo", minimum_threshold: float, maximum_threshold: float) -> bool:
31
29
  """Checks if a model's emission is within a given threshold.
32
30
 
33
31
  Args:
@@ -66,185 +64,3 @@ def _is_emission_within_treshold(model_info: "ModelInfo", minimum_threshold: flo
66
64
 
67
65
  emission_value = float(matched.group(0))
68
66
  return minimum_threshold <= emission_value <= maximum_threshold
69
-
70
-
71
- @dataclass
72
- class DatasetFilter:
73
- """
74
- A class that converts human-readable dataset search parameters into ones
75
- compatible with the REST API. For all parameters capitalization does not
76
- matter.
77
-
78
- <Tip warning={true}>
79
-
80
- The `DatasetFilter` class is deprecated and will be removed in huggingface_hub>=0.24. Please pass the filter parameters as keyword arguments directly to [`list_datasets`].
81
-
82
- </Tip>
83
-
84
- Args:
85
- author (`str`, *optional*):
86
- A string that can be used to identify datasets on
87
- the Hub by the original uploader (author or organization), such as
88
- `facebook` or `huggingface`.
89
- benchmark (`str` or `List`, *optional*):
90
- A string or list of strings that can be used to identify datasets on
91
- the Hub by their official benchmark.
92
- dataset_name (`str`, *optional*):
93
- A string or list of strings that can be used to identify datasets on
94
- the Hub by its name, such as `SQAC` or `wikineural`
95
- language_creators (`str` or `List`, *optional*):
96
- A string or list of strings that can be used to identify datasets on
97
- the Hub with how the data was curated, such as `crowdsourced` or
98
- `machine_generated`.
99
- language (`str` or `List`, *optional*):
100
- A string or list of strings representing a two-character language to
101
- filter datasets by on the Hub.
102
- multilinguality (`str` or `List`, *optional*):
103
- A string or list of strings representing a filter for datasets that
104
- contain multiple languages.
105
- size_categories (`str` or `List`, *optional*):
106
- A string or list of strings that can be used to identify datasets on
107
- the Hub by the size of the dataset such as `100K<n<1M` or
108
- `1M<n<10M`.
109
- task_categories (`str` or `List`, *optional*):
110
- A string or list of strings that can be used to identify datasets on
111
- the Hub by the designed task, such as `audio_classification` or
112
- `named_entity_recognition`.
113
- task_ids (`str` or `List`, *optional*):
114
- A string or list of strings that can be used to identify datasets on
115
- the Hub by the specific task such as `speech_emotion_recognition` or
116
- `paraphrase`.
117
-
118
- Examples:
119
-
120
- ```py
121
- >>> from huggingface_hub import DatasetFilter
122
-
123
- >>> # Using author
124
- >>> new_filter = DatasetFilter(author="facebook")
125
-
126
- >>> # Using benchmark
127
- >>> new_filter = DatasetFilter(benchmark="raft")
128
-
129
- >>> # Using dataset_name
130
- >>> new_filter = DatasetFilter(dataset_name="wikineural")
131
-
132
- >>> # Using language_creator
133
- >>> new_filter = DatasetFilter(language_creator="crowdsourced")
134
-
135
- >>> # Using language
136
- >>> new_filter = DatasetFilter(language="en")
137
-
138
- >>> # Using multilinguality
139
- >>> new_filter = DatasetFilter(multilinguality="multilingual")
140
-
141
- >>> # Using size_categories
142
- >>> new_filter = DatasetFilter(size_categories="100K<n<1M")
143
-
144
- >>> # Using task_categories
145
- >>> new_filter = DatasetFilter(task_categories="audio_classification")
146
-
147
- >>> # Using task_ids
148
- >>> new_filter = DatasetFilter(task_ids="paraphrase")
149
- ```
150
- """
151
-
152
- author: Optional[str] = None
153
- benchmark: Optional[Union[str, List[str]]] = None
154
- dataset_name: Optional[str] = None
155
- language_creators: Optional[Union[str, List[str]]] = None
156
- language: Optional[Union[str, List[str]]] = None
157
- multilinguality: Optional[Union[str, List[str]]] = None
158
- size_categories: Optional[Union[str, List[str]]] = None
159
- task_categories: Optional[Union[str, List[str]]] = None
160
- task_ids: Optional[Union[str, List[str]]] = None
161
-
162
- def __post_init__(self):
163
- warnings.warn(
164
- "'DatasetFilter' is deprecated and will be removed in huggingface_hub>=0.24. Please pass the filter parameters as keyword arguments directly to the `list_datasets` method.",
165
- category=FutureWarning,
166
- )
167
-
168
-
169
- @dataclass
170
- class ModelFilter:
171
- """
172
- A class that converts human-readable model search parameters into ones
173
- compatible with the REST API. For all parameters capitalization does not
174
- matter.
175
-
176
- <Tip warning={true}>
177
-
178
- The `ModelFilter` class is deprecated and will be removed in huggingface_hub>=0.24. Please pass the filter parameters as keyword arguments directly to [`list_models`].
179
-
180
- </Tip>
181
-
182
- Args:
183
- author (`str`, *optional*):
184
- A string that can be used to identify models on the Hub by the
185
- original uploader (author or organization), such as `facebook` or
186
- `huggingface`.
187
- library (`str` or `List`, *optional*):
188
- A string or list of strings of foundational libraries models were
189
- originally trained from, such as pytorch, tensorflow, or allennlp.
190
- language (`str` or `List`, *optional*):
191
- A string or list of strings of languages, both by name and country
192
- code, such as "en" or "English"
193
- model_name (`str`, *optional*):
194
- A string that contain complete or partial names for models on the
195
- Hub, such as "bert" or "bert-base-cased"
196
- task (`str` or `List`, *optional*):
197
- A string or list of strings of tasks models were designed for, such
198
- as: "fill-mask" or "automatic-speech-recognition"
199
- tags (`str` or `List`, *optional*):
200
- A string tag or a list of tags to filter models on the Hub by, such
201
- as `text-generation` or `spacy`.
202
- trained_dataset (`str` or `List`, *optional*):
203
- A string tag or a list of string tags of the trained dataset for a
204
- model on the Hub.
205
-
206
- Examples:
207
-
208
- ```python
209
- >>> from huggingface_hub import ModelFilter
210
-
211
- >>> # For the author_or_organization
212
- >>> new_filter = ModelFilter(author_or_organization="facebook")
213
-
214
- >>> # For the library
215
- >>> new_filter = ModelFilter(library="pytorch")
216
-
217
- >>> # For the language
218
- >>> new_filter = ModelFilter(language="french")
219
-
220
- >>> # For the model_name
221
- >>> new_filter = ModelFilter(model_name="bert")
222
-
223
- >>> # For the task
224
- >>> new_filter = ModelFilter(task="text-classification")
225
-
226
- >>> from huggingface_hub import HfApi
227
-
228
- >>> api = HfApi()
229
- # To list model tags
230
-
231
- >>> new_filter = ModelFilter(tags="benchmark:raft")
232
-
233
- >>> # Related to the dataset
234
- >>> new_filter = ModelFilter(trained_dataset="common_voice")
235
- ```
236
- """
237
-
238
- author: Optional[str] = None
239
- library: Optional[Union[str, List[str]]] = None
240
- language: Optional[Union[str, List[str]]] = None
241
- model_name: Optional[str] = None
242
- task: Optional[Union[str, List[str]]] = None
243
- trained_dataset: Optional[Union[str, List[str]]] = None
244
- tags: Optional[Union[str, List[str]]] = None
245
-
246
- def __post_init__(self):
247
- warnings.warn(
248
- "'ModelFilter' is deprecated and will be removed in huggingface_hub>=0.24. Please pass the filter parameters as keyword arguments directly to the `list_models` method.",
249
- FutureWarning,
250
- )
@@ -2,7 +2,7 @@
2
2
 
3
3
  from typing import BinaryIO, Optional
4
4
 
5
- from .insecure_hashlib import sha256
5
+ from .insecure_hashlib import sha1, sha256
6
6
 
7
7
 
8
8
  def sha_fileobj(fileobj: BinaryIO, chunk_size: Optional[int] = None) -> bytes:
@@ -27,3 +27,38 @@ def sha_fileobj(fileobj: BinaryIO, chunk_size: Optional[int] = None) -> bytes:
27
27
  if not chunk:
28
28
  break
29
29
  return sha.digest()
30
+
31
+
32
+ def git_hash(data: bytes) -> str:
33
+ """
34
+ Computes the git-sha1 hash of the given bytes, using the same algorithm as git.
35
+
36
+ This is equivalent to running `git hash-object`. See https://git-scm.com/docs/git-hash-object
37
+ for more details.
38
+
39
+ Note: this method is valid for regular files. For LFS files, the proper git hash is supposed to be computed on the
40
+ pointer file content, not the actual file content. However, for simplicity, we directly compare the sha256 of
41
+ the LFS file content when we want to compare LFS files.
42
+
43
+ Args:
44
+ data (`bytes`):
45
+ The data to compute the git-hash for.
46
+
47
+ Returns:
48
+ `str`: the git-hash of `data` as an hexadecimal string.
49
+
50
+ Example:
51
+ ```python
52
+ >>> from huggingface_hub.utils.sha import git_hash
53
+ >>> git_hash(b"Hello, World!")
54
+ 'b45ef6fec89518d314f546fd6c3025367b721684'
55
+ ```
56
+ """
57
+ # Taken from https://gist.github.com/msabramo/763200
58
+ # Note: no need to optimize by reading the file in chunks as we're not supposed to hash huge files (5MB maximum).
59
+ sha = sha1()
60
+ sha.update(b"blob ")
61
+ sha.update(str(len(data)).encode())
62
+ sha.update(b"\0")
63
+ sha.update(data)
64
+ return sha.hexdigest()
@@ -1,4 +1,3 @@
1
- #!/usr/bin/env python
2
1
  # coding=utf-8
3
2
  # Copyright 2021 The HuggingFace Inc. team. All rights reserved.
4
3
  #
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: huggingface-hub
3
- Version: 0.23.4
3
+ Version: 0.24.0
4
4
  Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
5
5
  Home-page: https://github.com/huggingface/huggingface_hub
6
6
  Author: Hugging Face, Inc.
@@ -36,20 +36,21 @@ Requires-Dist: aiohttp ; extra == 'all'
36
36
  Requires-Dist: minijinja >=1.0 ; extra == 'all'
37
37
  Requires-Dist: jedi ; extra == 'all'
38
38
  Requires-Dist: Jinja2 ; extra == 'all'
39
- Requires-Dist: pytest ; extra == 'all'
39
+ Requires-Dist: pytest <8.2.2,>=8.1.1 ; extra == 'all'
40
40
  Requires-Dist: pytest-cov ; extra == 'all'
41
41
  Requires-Dist: pytest-env ; extra == 'all'
42
42
  Requires-Dist: pytest-xdist ; extra == 'all'
43
43
  Requires-Dist: pytest-vcr ; extra == 'all'
44
44
  Requires-Dist: pytest-asyncio ; extra == 'all'
45
45
  Requires-Dist: pytest-rerunfailures ; extra == 'all'
46
+ Requires-Dist: pytest-mock ; extra == 'all'
46
47
  Requires-Dist: urllib3 <2.0 ; extra == 'all'
47
48
  Requires-Dist: soundfile ; extra == 'all'
48
49
  Requires-Dist: Pillow ; extra == 'all'
49
50
  Requires-Dist: gradio ; extra == 'all'
50
51
  Requires-Dist: numpy ; extra == 'all'
51
52
  Requires-Dist: fastapi ; extra == 'all'
52
- Requires-Dist: ruff >=0.3.0 ; extra == 'all'
53
+ Requires-Dist: ruff >=0.5.0 ; extra == 'all'
53
54
  Requires-Dist: mypy ==1.5.1 ; extra == 'all'
54
55
  Requires-Dist: typing-extensions >=4.8.0 ; extra == 'all'
55
56
  Requires-Dist: types-PyYAML ; extra == 'all'
@@ -66,20 +67,21 @@ Requires-Dist: aiohttp ; extra == 'dev'
66
67
  Requires-Dist: minijinja >=1.0 ; extra == 'dev'
67
68
  Requires-Dist: jedi ; extra == 'dev'
68
69
  Requires-Dist: Jinja2 ; extra == 'dev'
69
- Requires-Dist: pytest ; extra == 'dev'
70
+ Requires-Dist: pytest <8.2.2,>=8.1.1 ; extra == 'dev'
70
71
  Requires-Dist: pytest-cov ; extra == 'dev'
71
72
  Requires-Dist: pytest-env ; extra == 'dev'
72
73
  Requires-Dist: pytest-xdist ; extra == 'dev'
73
74
  Requires-Dist: pytest-vcr ; extra == 'dev'
74
75
  Requires-Dist: pytest-asyncio ; extra == 'dev'
75
76
  Requires-Dist: pytest-rerunfailures ; extra == 'dev'
77
+ Requires-Dist: pytest-mock ; extra == 'dev'
76
78
  Requires-Dist: urllib3 <2.0 ; extra == 'dev'
77
79
  Requires-Dist: soundfile ; extra == 'dev'
78
80
  Requires-Dist: Pillow ; extra == 'dev'
79
81
  Requires-Dist: gradio ; extra == 'dev'
80
82
  Requires-Dist: numpy ; extra == 'dev'
81
83
  Requires-Dist: fastapi ; extra == 'dev'
82
- Requires-Dist: ruff >=0.3.0 ; extra == 'dev'
84
+ Requires-Dist: ruff >=0.5.0 ; extra == 'dev'
83
85
  Requires-Dist: mypy ==1.5.1 ; extra == 'dev'
84
86
  Requires-Dist: typing-extensions >=4.8.0 ; extra == 'dev'
85
87
  Requires-Dist: types-PyYAML ; extra == 'dev'
@@ -98,7 +100,7 @@ Provides-Extra: inference
98
100
  Requires-Dist: aiohttp ; extra == 'inference'
99
101
  Requires-Dist: minijinja >=1.0 ; extra == 'inference'
100
102
  Provides-Extra: quality
101
- Requires-Dist: ruff >=0.3.0 ; extra == 'quality'
103
+ Requires-Dist: ruff >=0.5.0 ; extra == 'quality'
102
104
  Requires-Dist: mypy ==1.5.1 ; extra == 'quality'
103
105
  Provides-Extra: tensorflow
104
106
  Requires-Dist: tensorflow ; extra == 'tensorflow'
@@ -113,13 +115,14 @@ Requires-Dist: aiohttp ; extra == 'testing'
113
115
  Requires-Dist: minijinja >=1.0 ; extra == 'testing'
114
116
  Requires-Dist: jedi ; extra == 'testing'
115
117
  Requires-Dist: Jinja2 ; extra == 'testing'
116
- Requires-Dist: pytest ; extra == 'testing'
118
+ Requires-Dist: pytest <8.2.2,>=8.1.1 ; extra == 'testing'
117
119
  Requires-Dist: pytest-cov ; extra == 'testing'
118
120
  Requires-Dist: pytest-env ; extra == 'testing'
119
121
  Requires-Dist: pytest-xdist ; extra == 'testing'
120
122
  Requires-Dist: pytest-vcr ; extra == 'testing'
121
123
  Requires-Dist: pytest-asyncio ; extra == 'testing'
122
124
  Requires-Dist: pytest-rerunfailures ; extra == 'testing'
125
+ Requires-Dist: pytest-mock ; extra == 'testing'
123
126
  Requires-Dist: urllib3 <2.0 ; extra == 'testing'
124
127
  Requires-Dist: soundfile ; extra == 'testing'
125
128
  Requires-Dist: Pillow ; extra == 'testing'
@@ -128,7 +131,7 @@ Requires-Dist: numpy ; extra == 'testing'
128
131
  Requires-Dist: fastapi ; extra == 'testing'
129
132
  Provides-Extra: torch
130
133
  Requires-Dist: torch ; extra == 'torch'
131
- Requires-Dist: safetensors ; extra == 'torch'
134
+ Requires-Dist: safetensors[torch] ; extra == 'torch'
132
135
  Provides-Extra: typing
133
136
  Requires-Dist: typing-extensions >=4.8.0 ; extra == 'typing'
134
137
  Requires-Dist: types-PyYAML ; extra == 'typing'
@@ -152,7 +155,7 @@ Requires-Dist: types-urllib3 ; extra == 'typing'
152
155
  <a href="https://huggingface.co/docs/huggingface_hub/en/index"><img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/huggingface_hub/index.svg?down_color=red&down_message=offline&up_message=online&label=doc"></a>
153
156
  <a href="https://github.com/huggingface/huggingface_hub/releases"><img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/huggingface_hub.svg"></a>
154
157
  <a href="https://github.com/huggingface/huggingface_hub"><img alt="PyPi version" src="https://img.shields.io/pypi/pyversions/huggingface_hub.svg"></a>
155
- <a href="https://pypi.org/project/huggingface-hub"><img alt="downloads" src="https://static.pepy.tech/badge/huggingface_hub/month"></a>
158
+ <a href="https://pypi.org/project/huggingface-hub"><img alt="PyPI - Downloads" src="https://img.shields.io/pypi/dm/huggingface_hub"></a>
156
159
  <a href="https://codecov.io/gh/huggingface/huggingface_hub"><img alt="Code coverage" src="https://codecov.io/gh/huggingface/huggingface_hub/branch/main/graph/badge.svg?token=RXP95LE2XL"></a>
157
160
  </p>
158
161
 
@@ -1,56 +1,57 @@
1
- huggingface_hub/__init__.py,sha256=_QKxFRMVRJsIJt3WxUA8lExcJrRvo9plMZ8VfhlQuE4,32692
2
- huggingface_hub/_commit_api.py,sha256=Z1sQnJx1xWfspsX6vS8eGTmr-9QujIoItjbnJVVyyCQ,29299
1
+ huggingface_hub/__init__.py,sha256=taJPTUHh112irc5PAZGGTB5Hhbykr6L4fwKBQI5KBhs,33897
2
+ huggingface_hub/_commit_api.py,sha256=Yj1ft_WbsnqjSbiYHgdqGmLTF6BTA4E8kAGYW89t2sQ,31057
3
3
  huggingface_hub/_commit_scheduler.py,sha256=nlJS_vnLb8i92NLrRwJX8Mg9QZ7f3kfLbLlQuEd5YjU,13647
4
- huggingface_hub/_inference_endpoints.py,sha256=rBx6xgnSJq0JtntF1_zphj7NsCmduICqgZfmvscdE_w,15667
5
- huggingface_hub/_local_folder.py,sha256=ajjI3vRgV9kGrx2ZPeTnDm8lfGN1eyMshn5gxM_7Q38,8441
4
+ huggingface_hub/_inference_endpoints.py,sha256=th6vlJ2vUg314x7uMLzQHfy4AuX5mFlJqNobVIz5yOY,15944
5
+ huggingface_hub/_local_folder.py,sha256=rJlq1_45EEbThwVIfvmhJmmxbmNTBqbdQQBZKBbuiig,9033
6
6
  huggingface_hub/_login.py,sha256=E-3hbns3Jo0mjnyPWQVz9c0xPEXuQ-KQhZCQ9R1BE7o,15478
7
7
  huggingface_hub/_multi_commits.py,sha256=mFmCP_5hNsruEgDF6kOVyaFkpnbSdNxPWfGUlFbl5O8,12535
8
- huggingface_hub/_snapshot_download.py,sha256=pN7CEl8X_JJRdrFDeBk0nYecVM7rvULJty9vuxrHnMU,14039
8
+ huggingface_hub/_snapshot_download.py,sha256=a-lfZaDe7a1l19rVbpbGV9S0EnAt3M7qKc3QkXDyL2g,14374
9
9
  huggingface_hub/_space_api.py,sha256=Mae_lqTRyTWyszI5mlObJ2fn9slPxkFPcFTEVADoNQM,5255
10
10
  huggingface_hub/_tensorboard_logger.py,sha256=x_56MOZiU2-9QQ1XHOWem39ySRLe29hkalxy2nRaRL4,7470
11
11
  huggingface_hub/_webhooks_payload.py,sha256=Xm3KaK7tCOGBlXkuZvbym6zjHXrT1XCrbUFWuXiBmNY,3617
12
- huggingface_hub/_webhooks_server.py,sha256=9RQ4AS5JVssJhM66FzlyOSQhKwrKG-dV_x6SA8GeOQw,15497
12
+ huggingface_hub/_webhooks_server.py,sha256=niyE3SefO-7FqHs3YCxdQt29rIxpPYQahgNpPtDYA6Q,15620
13
13
  huggingface_hub/community.py,sha256=SBaOfI-3atCzRbO0gDS8BYxctbdvD4G0X6D0GfY8Fgc,12203
14
- huggingface_hub/constants.py,sha256=_xLHaNnAcA9KnENaABbsee3UctmaViE8AQ6njk17ni4,7591
14
+ huggingface_hub/constants.py,sha256=BG3n2gl4JbxMw_JRvNTFyMcNnZIPzvT3KXSH-jm2J08,8005
15
15
  huggingface_hub/errors.py,sha256=IM0lNbExLzaYEs0HrrPvY4-kyj6DiP2Szu7Jy9slHOE,2083
16
16
  huggingface_hub/fastai_utils.py,sha256=5I7zAfgHJU_mZnxnf9wgWTHrCRu_EAV8VTangDVfE_o,16676
17
- huggingface_hub/file_download.py,sha256=n5ovYqh1-xe3ptRHuS-EXn6X_-3ZVI7C-pQrHD45DtA,82236
18
- huggingface_hub/hf_api.py,sha256=hyMkURhYXalCNG4Qqx3PhN7Ucru8m18ZidEok_T2504,375216
19
- huggingface_hub/hf_file_system.py,sha256=EHSWD6Pdm9ED-cgNh-ozoiz69pODssKrObKybVJPBQA,37830
20
- huggingface_hub/hub_mixin.py,sha256=G02KssorMoPEQChccHyL4uTH-wn539bSwwwsyrAdPTk,36712
17
+ huggingface_hub/file_download.py,sha256=Lf1RhCMb9HkXPUy90O_zUc-fonmFTwE2xadbZpVoKrM,84243
18
+ huggingface_hub/hf_api.py,sha256=YK4EcYD7vvGOjzAO_7pSrr2len7u4xa7yvwn6CojdIA,406692
19
+ huggingface_hub/hf_file_system.py,sha256=HlYbWFhMrPWNqGUQfQrZR6H70QK0PgsxRvO4FantCNc,39160
20
+ huggingface_hub/hub_mixin.py,sha256=bm5hZGeOHBSUBfiAXJv8cU05nAZr65TxnkUJLWLwAEg,37308
21
21
  huggingface_hub/inference_api.py,sha256=UXOKu_Ez2I3hDsjguqCcCrj03WFDndehpngYiIAucdg,8331
22
22
  huggingface_hub/keras_mixin.py,sha256=2DF-hNGdxJCxqvcw46id-ExH_865ZAXsJd2vmpAuWHQ,19484
23
- huggingface_hub/lfs.py,sha256=GNmKV_SURcGxMa3p_OyF8ttoq7fZhHjgpyxYzP4VTqU,19690
23
+ huggingface_hub/lfs.py,sha256=4131E5p4HOWqe5JBNFePUKHoZ49LE8_y1vRp3y4sEe0,20073
24
24
  huggingface_hub/repocard.py,sha256=oUrGim27nCHkevPDZDbUp68uKTxB8xbdoyeqv24pexc,34605
25
- huggingface_hub/repocard_data.py,sha256=wmiDDFNY8oJzT31PB4RRQhM2lRdy_QtNebqSWCClYZw,32708
26
- huggingface_hub/repository.py,sha256=87QxXPTK9PCztFW69oD4RZsNMLL9yxoQDdn-F81wSdM,54548
25
+ huggingface_hub/repocard_data.py,sha256=tATP6rp7MOlbPXVCMGhNMaDP7RmKPQkX_pXtBVOYmRQ,32726
26
+ huggingface_hub/repository.py,sha256=Q2a90DiBKTJnfO4XDhH8ER7PMGAabNEwcdxcccRwXU0,54556
27
27
  huggingface_hub/commands/__init__.py,sha256=AkbM2a-iGh0Vq_xAWhK3mu3uZ44km8-X5uWjKcvcrUQ,928
28
28
  huggingface_hub/commands/_cli_utils.py,sha256=qRdl9opi3yJxIVNCnrmte-jFWmYbjVqd8gBlin8NNzY,1971
29
29
  huggingface_hub/commands/delete_cache.py,sha256=Rb1BtIltJPnQ-th7tcK_L4mFqfk785t3KXV77xXKBP4,16131
30
30
  huggingface_hub/commands/download.py,sha256=s0dSqUTWG26Q5F2rEFAr_jY2xW4yOvDbSM20vYCjD3I,7880
31
31
  huggingface_hub/commands/env.py,sha256=yYl4DSS14V8t244nAi0t77Izx5LIdgS_dy6xiV5VQME,1226
32
- huggingface_hub/commands/huggingface_cli.py,sha256=-MkVPxIKIhP1aTFcExz7krEEDdaVpG9cV7P70ZBJh-U,2030
32
+ huggingface_hub/commands/huggingface_cli.py,sha256=704MgmavR8szQY66E6ci3WRdYB0SkvOILlaS_jVqirs,2131
33
33
  huggingface_hub/commands/lfs.py,sha256=6E769AoRxUDiIOapn1_QvTbNtdUnUiouu2F4Gopp4do,7318
34
+ huggingface_hub/commands/repo_files.py,sha256=N7B8qDioq78U1dVDiu4i-jIRfpY0VCCih0aM6FFcaQU,4917
34
35
  huggingface_hub/commands/scan_cache.py,sha256=4o_jQsZloicRa-P8gncUBncVyWswpSF9T6KGlNrGodk,5183
35
36
  huggingface_hub/commands/tag.py,sha256=gCoR8G95lhHBzyVytTxT7MnqTmjKYtStDnHXcysOJwg,6287
36
37
  huggingface_hub/commands/upload.py,sha256=Mr69qO60otqCVw0sVSBPykUTkL9HO-pkCyulSD2mROM,13622
37
38
  huggingface_hub/commands/user.py,sha256=QApZJOCQEHADhjunM3hlQ72uqHsearCiCE4SdpzGdcc,6893
38
39
  huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
- huggingface_hub/inference/_client.py,sha256=3KlFz4JFB6AuQT0mwHad4N42akkBvZDWS9bcyhwUT2g,117600
40
- huggingface_hub/inference/_common.py,sha256=L4b0A_raoWAUfl7d2vn6-rLfUcHcG5kjn_wUYIkx4uY,16362
40
+ huggingface_hub/inference/_client.py,sha256=MTpnZ7-9289RGrGcFvqD54irbneYQkwlcXh_rbYp2eE,132771
41
+ huggingface_hub/inference/_common.py,sha256=3xbeCOjLgSPRJcbtxKnv1DNXr_TOMivOeQyvg-Ma1HU,16306
41
42
  huggingface_hub/inference/_templating.py,sha256=LCy-U_25R-l5dhcEHsyRwiOrgvKQHXkdSmynWCfsPjI,3991
42
43
  huggingface_hub/inference/_types.py,sha256=C73l5-RO8P1UMBHF8OAO9CRUq7Xdv33pcADoJsGMPSU,1782
43
44
  huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
- huggingface_hub/inference/_generated/_async_client.py,sha256=h0Zwdxfb9UgrqpzYsxv36TPOKGOy2LDM2AHSTrHQkdo,120732
45
- huggingface_hub/inference/_generated/types/__init__.py,sha256=Ro2qZb2STQz8V3bfElXY4DvmkxKuBaPjzY5BgH-1khI,5110
45
+ huggingface_hub/inference/_generated/_async_client.py,sha256=bYY7CXLtO9zf3rDV7SsaUIThCF4KJq4U-POfLgww9_M,136498
46
+ huggingface_hub/inference/_generated/types/__init__.py,sha256=uEsA0z8Gcu34q0gNAZVcqHFqJT5BPrhnM9qS_LQgN0Q,5215
46
47
  huggingface_hub/inference/_generated/types/audio_classification.py,sha256=wk4kUTLQZoXWLpiUOpKRHRRE-JYqqJlzGVe62VACR-0,1347
47
48
  huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=n7GeCepzt254yoSLsdjrI1j4fzYgjWzxoaKE5gZJc48,881
48
49
  huggingface_hub/inference/_generated/types/automatic_speech_recognition.py,sha256=-7UHu5QTGwSrJFnrbMgzeUFpJQOGyTmfK_QHgtnx6j8,5352
49
50
  huggingface_hub/inference/_generated/types/base.py,sha256=Cq4gUVtwwLmWyiIIq4NSL8kRk0EWk9QWWHc5Vup2LVg,6213
50
- huggingface_hub/inference/_generated/types/chat_completion.py,sha256=cPe_VAs-bfsUELY0fZtMnId6fdVoJnAcRmJItD5Otck,8185
51
+ huggingface_hub/inference/_generated/types/chat_completion.py,sha256=k8VAiyauZDbSqxQuUQpMTVHBRUho49-ujedYBl3jy8w,8809
51
52
  huggingface_hub/inference/_generated/types/depth_estimation.py,sha256=lmLmd8S313ZMCG94RblwquL0UN_0hJmXAhWUqSIrtwc,898
52
53
  huggingface_hub/inference/_generated/types/document_question_answering.py,sha256=_hBzK4Pu9X_zXsgOO4JNSloIKuVfE5m7eGwEw5YTfZ4,3264
53
- huggingface_hub/inference/_generated/types/feature_extraction.py,sha256=KerTrRR5YR02X0qBDzrtK8953amCGf_adSUbfWOozD4,664
54
+ huggingface_hub/inference/_generated/types/feature_extraction.py,sha256=FYjXh60gaRKCwwrb9DFEshAY_7axO88RifzHfig7RNI,1490
54
55
  huggingface_hub/inference/_generated/types/fill_mask.py,sha256=JcYIbTDXc4f7k2FNY3fCWtJ9ke3HUZFz2pDOOrDuxOs,1714
55
56
  huggingface_hub/inference/_generated/types/image_classification.py,sha256=W1QVfc0j7t6qbxjICUQDwygRx43yPPGZKyStogHkHqg,1359
56
57
  huggingface_hub/inference/_generated/types/image_segmentation.py,sha256=nVQc5Qhv37qqmTn_M8xegpNgk14ozKelsGIYC8hba_0,1803
@@ -63,7 +64,7 @@ huggingface_hub/inference/_generated/types/summarization.py,sha256=RWCXh7yftI_JW
63
64
  huggingface_hub/inference/_generated/types/table_question_answering.py,sha256=PuVZlR6dI6FEUK7pjMSVMtzkDgrcxdKjfcnDbVmPdSs,1569
64
65
  huggingface_hub/inference/_generated/types/text2text_generation.py,sha256=SZYfdhyraG5vZ2Jzm1C8k9w9IYLxMtm5UUu1tU2oOQk,1604
65
66
  huggingface_hub/inference/_generated/types/text_classification.py,sha256=vC7B1sBzZ4gdLjE2i2Y7w5cpdaFwQKK1dlWqW0asjIk,1347
66
- huggingface_hub/inference/_generated/types/text_generation.py,sha256=wR2DrDazFmeqIchkHXPUv17d4zWUmiUSPBdUFCDqJNY,4284
67
+ huggingface_hub/inference/_generated/types/text_generation.py,sha256=OIU_Hwrn2Sra8pUbLOwal9gnYCe4d01m5iebWwzLwX8,5776
67
68
  huggingface_hub/inference/_generated/types/text_to_audio.py,sha256=cgvECsiwsycgP9Tfs_GU1CJfo9AngVn6x9s4fHCP-g4,4819
68
69
  huggingface_hub/inference/_generated/types/text_to_image.py,sha256=oBGeJ-S9WfsMxVQlvEOll9yaCyMXZ277wsYFD8bt87U,1931
69
70
  huggingface_hub/inference/_generated/types/token_classification.py,sha256=7oL8AZOTWtf2bYD2T3236GDNMtUl7FtydaB6We7wbfw,1890
@@ -73,43 +74,42 @@ huggingface_hub/inference/_generated/types/visual_question_answering.py,sha256=0
73
74
  huggingface_hub/inference/_generated/types/zero_shot_classification.py,sha256=u6jfFCqDv9XqeAN5E9_Xf7jqMZgqTRFF_S9PtWbiBUk,1963
74
75
  huggingface_hub/inference/_generated/types/zero_shot_image_classification.py,sha256=qVH6Ms0FjF8TraGy4BYiS8lmvGq9xiIDdXqGFynLHMA,1689
75
76
  huggingface_hub/inference/_generated/types/zero_shot_object_detection.py,sha256=PU4OOlQ2aAOosW2JlG2Z27MEQpmE6BxcygH_ns3w1KQ,1662
76
- huggingface_hub/serialization/__init__.py,sha256=W74TaCtYnMfpvGEQr1SS-OBmqPUFnM9AeWT9hTJCG9Y,910
77
- huggingface_hub/serialization/_base.py,sha256=2wxdid6ee8RASEKhCkpNdP8Kj9x4dRm6j8h72L1AtFQ,8239
78
- huggingface_hub/serialization/_numpy.py,sha256=E-boJoUuDdyMTaAMRIiHha7F9GH9YhFNQPEioG4UkaY,2690
79
- huggingface_hub/serialization/_tensorflow.py,sha256=4Wf_wzmLSzZua9hGGmArfngDzz3yw19PWJMdTT76uxc,3578
80
- huggingface_hub/serialization/_torch.py,sha256=t-pTq4O3NpAprVJIojtC8Rq-kNJ889IluJtJtoLoqVk,7705
77
+ huggingface_hub/serialization/__init__.py,sha256=z5MLxMqz0Y2qST-3Lj0PZHUONL-SGRlc0g4Z6MdL6rw,988
78
+ huggingface_hub/serialization/_base.py,sha256=JZneES-HgcRH9C2SQehIGRDtT7nS7emu-RRV4ZjB6xo,8124
79
+ huggingface_hub/serialization/_tensorflow.py,sha256=zHOvEMg-JHC55Fm4roDT3LUCDO5zB9qtXZffG065RAM,3625
80
+ huggingface_hub/serialization/_torch.py,sha256=tjFmv6WvMn4rYATXDF0H2SoDzg32-nJdbaNsQAO6Xo0,24208
81
81
  huggingface_hub/templates/datasetcard_template.md,sha256=W-EMqR6wndbrnZorkVv56URWPG49l7MATGeI015kTvs,5503
82
82
  huggingface_hub/templates/modelcard_template.md,sha256=4AqArS3cqdtbit5Bo-DhjcnDFR-pza5hErLLTPM4Yuc,6870
83
- huggingface_hub/utils/__init__.py,sha256=ljxHEvOAJlGtyepVLiTePBFR1CmOnmuvCrfsbMN3HqA,3643
83
+ huggingface_hub/utils/__init__.py,sha256=piJsHZr4Bi2UbMMXKi6GRcakCl_uHYucvWB6uhV_WvE,3621
84
84
  huggingface_hub/utils/_cache_assets.py,sha256=kai77HPQMfYpROouMBQCr_gdBCaeTm996Sqj0dExbNg,5728
85
85
  huggingface_hub/utils/_cache_manager.py,sha256=Fs1XVP1UGzUTogMfMfEi_MfpURzHyW__djX0s2oLmrY,29307
86
86
  huggingface_hub/utils/_chunk_utils.py,sha256=kRCaj5228_vKcyLWspd8Xq01f17Jz6ds5Sr9ed5d_RU,2130
87
87
  huggingface_hub/utils/_datetime.py,sha256=DHnktKm1taeOe2XCBgNU4pVck5d70qu8FJ7nACD6C3k,2554
88
88
  huggingface_hub/utils/_deprecation.py,sha256=HZhRGGUX_QMKBBBwHHlffLtmCSK01TOpeXHefZbPfwI,4872
89
- huggingface_hub/utils/_errors.py,sha256=N5nUkCCaj8393wntazeTcKNrwDZfsDVHVMxxreHPfaE,15141
89
+ huggingface_hub/utils/_errors.py,sha256=q5Y2kUOFnDiAb23_n48xUzy2ezKt73FjVeLSozUzY9Y,15142
90
90
  huggingface_hub/utils/_experimental.py,sha256=crCPH6k6-11wwH2GZuZzZzZbjUotay49ywV1SSJhMHM,2395
91
- huggingface_hub/utils/_fixes.py,sha256=wJ0FGewO6_zZFo65crJWcth9zODZz4TdyeDxkGNSeB0,2898
91
+ huggingface_hub/utils/_fixes.py,sha256=F0_BDG2bHg8TSzEijYyQR_i2lygY3r0HxTQyebbRNGc,3207
92
92
  huggingface_hub/utils/_git_credential.py,sha256=SDdsiREr1TcAR2Ze2TB0E5cYzVJgvDZrs60od9lAsMc,4596
93
93
  huggingface_hub/utils/_headers.py,sha256=05sDPAi7-Fs3Z4YLbrTJTAbIT7yjSX9DEqotd6gHqhQ,9593
94
94
  huggingface_hub/utils/_hf_folder.py,sha256=gWH-TT9h_6X_CyrtLTtKNEawf9kKlCHraFiOu09BuLk,3613
95
95
  huggingface_hub/utils/_http.py,sha256=-Vuphx-pX9dvVBUf-AS2dECjO0HJBscXzith_FKOgO4,13458
96
96
  huggingface_hub/utils/_pagination.py,sha256=hzLFLd8i_DKkPRVYzOx2CxLt5lcocEiAxDJriQUjAjY,1841
97
- huggingface_hub/utils/_paths.py,sha256=bs6PlgsVdAINC9bAKivVOcOod1lIun0YgJbQ3VpmpPE,4646
97
+ huggingface_hub/utils/_paths.py,sha256=w1ZhFmmD5ykWjp_hAvhjtOoa2ZUcOXJrF4a6O3QpAWo,5042
98
98
  huggingface_hub/utils/_runtime.py,sha256=QooW0cgJ349PX8x46KBluN01KMMvUm0ZQ9SsmidBH74,11041
99
99
  huggingface_hub/utils/_safetensors.py,sha256=GW3nyv7xQcuwObKYeYoT9VhURVzG1DZTbKBKho8Bbos,4458
100
- huggingface_hub/utils/_subprocess.py,sha256=34ETD8JvLzm16NRZHciaCLXdE9aRyxuDdOA5gdNvMJ8,4617
101
- huggingface_hub/utils/_telemetry.py,sha256=jHAdgWNcL9nVvMT3ec3i78O-cwL09GnlifuokzpQjMI,4641
100
+ huggingface_hub/utils/_subprocess.py,sha256=6GpGD4qE9-Z1-Ocs3JuCLjR4NcRlknA-hAuQlqiprYY,4595
101
+ huggingface_hub/utils/_telemetry.py,sha256=54LXeIJU5pEGghPAh06gqNAR-UoxOjVLvKqAQscwqZs,4890
102
102
  huggingface_hub/utils/_token.py,sha256=cxBZaafW2IsJ2dKWd55v7056zycW1ewp_nPk8dNcSO4,5476
103
103
  huggingface_hub/utils/_typing.py,sha256=UO0-GeTbiKFV9GqDh4YNRyScQSRAAZRoUeEYQX4P0rE,2882
104
104
  huggingface_hub/utils/_validators.py,sha256=dDsVG31iooTYrIyi5Vwr1DukL0fEmJwu3ceVNduhsuE,9204
105
- huggingface_hub/utils/endpoint_helpers.py,sha256=n_VguR_L2Vl6Mi_4PFO2iAd5xaPeQRiD8KRBpzs4nMw,9536
105
+ huggingface_hub/utils/endpoint_helpers.py,sha256=9VtIAlxQ5H_4y30sjCAgbu7XCqAtNLC7aRYxaNn0hLI,2366
106
106
  huggingface_hub/utils/insecure_hashlib.py,sha256=OjxlvtSQHpbLp9PWSrXBDJ0wHjxCBU-SQJgucEEXDbU,1058
107
107
  huggingface_hub/utils/logging.py,sha256=Cp03s0uEl3kDM9XHQW9a8GAoExODQ-e7kEtgMt-_To8,4728
108
- huggingface_hub/utils/sha.py,sha256=QLlIwPCyz46MmUc_4L8xl87KfYoBks9kPgsMZ5JCz-o,902
109
- huggingface_hub/utils/tqdm.py,sha256=x35PqUA8bBBztPrqhv87Y_TGl5CdlfBs4pe6k1YyDJ8,9390
110
- huggingface_hub-0.23.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
111
- huggingface_hub-0.23.4.dist-info/METADATA,sha256=Z6W3iTz87tQa5ZN0zoCH78vn-tS1qErB5JkzEI67V0s,12994
112
- huggingface_hub-0.23.4.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
113
- huggingface_hub-0.23.4.dist-info/entry_points.txt,sha256=Y3Z2L02rBG7va_iE6RPXolIgwOdwUFONyRN3kXMxZ0g,131
114
- huggingface_hub-0.23.4.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
115
- huggingface_hub-0.23.4.dist-info/RECORD,,
108
+ huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
109
+ huggingface_hub/utils/tqdm.py,sha256=jQiVYwRG78HK4_54u0vTtz6Kt9IMGiHy3ixbIn3h2TU,9368
110
+ huggingface_hub-0.24.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
111
+ huggingface_hub-0.24.0.dist-info/METADATA,sha256=AC5gX-fq3jcUR2QU1EqHrVKKO6cHh6HF6jBECkfglcQ,13183
112
+ huggingface_hub-0.24.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
113
+ huggingface_hub-0.24.0.dist-info/entry_points.txt,sha256=Y3Z2L02rBG7va_iE6RPXolIgwOdwUFONyRN3kXMxZ0g,131
114
+ huggingface_hub-0.24.0.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
115
+ huggingface_hub-0.24.0.dist-info/RECORD,,
@@ -1,68 +0,0 @@
1
- # Copyright 2024 The HuggingFace Team. All rights reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Contains numpy-specific helpers."""
15
-
16
- from typing import TYPE_CHECKING, Dict, Union
17
-
18
- from ._base import FILENAME_PATTERN, MAX_SHARD_SIZE, StateDictSplit, split_state_dict_into_shards_factory
19
-
20
-
21
- if TYPE_CHECKING:
22
- import numpy as np
23
-
24
-
25
- def split_numpy_state_dict_into_shards(
26
- state_dict: Dict[str, "np.ndarray"],
27
- *,
28
- filename_pattern: str = FILENAME_PATTERN,
29
- max_shard_size: Union[int, str] = MAX_SHARD_SIZE,
30
- ) -> StateDictSplit:
31
- """
32
- Split a model state dictionary in shards so that each shard is smaller than a given size.
33
-
34
- The shards are determined by iterating through the `state_dict` in the order of its keys. There is no optimization
35
- made to make each shard as close as possible to the maximum size passed. For example, if the limit is 10GB and we
36
- have tensors of sizes [6GB, 6GB, 2GB, 6GB, 2GB, 2GB] they will get sharded as [6GB], [6+2GB], [6+2+2GB] and not
37
- [6+2+2GB], [6+2GB], [6GB].
38
-
39
- <Tip warning={true}>
40
-
41
- If one of the model's tensor is bigger than `max_shard_size`, it will end up in its own shard which will have a
42
- size greater than `max_shard_size`.
43
-
44
- </Tip>
45
-
46
- Args:
47
- state_dict (`Dict[str, np.ndarray]`):
48
- The state dictionary to save.
49
- filename_pattern (`str`, *optional*):
50
- The pattern to generate the files names in which the model will be saved. Pattern must be a string that
51
- can be formatted with `filename_pattern.format(suffix=...)` and must contain the keyword `suffix`
52
- Defaults to `"model{suffix}.safetensors"`.
53
- max_shard_size (`int` or `str`, *optional*):
54
- The maximum size of each shard, in bytes. Defaults to 5GB.
55
-
56
- Returns:
57
- [`StateDictSplit`]: A `StateDictSplit` object containing the shards and the index to retrieve them.
58
- """
59
- return split_state_dict_into_shards_factory(
60
- state_dict,
61
- max_shard_size=max_shard_size,
62
- filename_pattern=filename_pattern,
63
- get_tensor_size=get_tensor_size,
64
- )
65
-
66
-
67
- def get_tensor_size(tensor: "np.ndarray") -> int:
68
- return tensor.nbytes