safetensors 0.4.6.dev0__tar.gz → 0.5.0rc0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of safetensors might be problematic. Click here for more details.

Files changed (53) hide show
  1. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/PKG-INFO +2 -2
  2. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/Cargo.lock +33 -33
  3. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/Cargo.toml +2 -2
  4. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/src/lib.rs +73 -76
  5. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/pyproject.toml +1 -0
  6. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/Cargo.toml +1 -1
  7. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/src/tensor.rs +2 -2
  8. safetensors-0.4.6.dev0/bindings/python/README.md +0 -60
  9. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/.gitignore +0 -0
  10. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/MANIFEST.in +0 -0
  11. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/Makefile +0 -0
  12. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0/bindings/python}/README.md +0 -0
  13. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/benches/test_flax.py +0 -0
  14. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/benches/test_mlx.py +0 -0
  15. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/benches/test_paddle.py +0 -0
  16. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/benches/test_pt.py +0 -0
  17. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/benches/test_tf.py +0 -0
  18. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/convert.py +0 -0
  19. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/convert_all.py +0 -0
  20. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/fuzz.py +0 -0
  21. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/__init__.py +0 -0
  22. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/__init__.pyi +0 -0
  23. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/flax.py +0 -0
  24. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/mlx.py +0 -0
  25. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/numpy.py +0 -0
  26. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/paddle.py +0 -0
  27. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/py.typed +0 -0
  28. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/tensorflow.py +0 -0
  29. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/py_src/safetensors/torch.py +0 -0
  30. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/setup.cfg +0 -0
  31. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/stub.py +0 -0
  32. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/data/__init__.py +0 -0
  33. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_flax_comparison.py +0 -0
  34. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_mlx_comparison.py +0 -0
  35. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_paddle_comparison.py +0 -0
  36. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_pt_comparison.py +0 -0
  37. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_pt_model.py +0 -0
  38. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_simple.py +0 -0
  39. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/bindings/python/tests/test_tf_comparison.py +0 -0
  40. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/__init__.py +0 -0
  41. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/__init__.pyi +0 -0
  42. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/flax.py +0 -0
  43. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/mlx.py +0 -0
  44. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/numpy.py +0 -0
  45. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/paddle.py +0 -0
  46. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/py.typed +0 -0
  47. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/tensorflow.py +0 -0
  48. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/py_src/safetensors/torch.py +0 -0
  49. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/LICENSE +0 -0
  50. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/README.md +0 -0
  51. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/benches/benchmark.rs +0 -0
  52. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/src/lib.rs +0 -0
  53. {safetensors-0.4.6.dev0 → safetensors-0.5.0rc0}/safetensors/src/slice.rs +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: safetensors
3
- Version: 0.4.6.dev0
3
+ Version: 0.5.0rc0
4
4
  Classifier: Development Status :: 5 - Production/Stable
5
5
  Classifier: Intended Audience :: Developers
6
6
  Classifier: Intended Audience :: Education
@@ -1,6 +1,6 @@
1
1
  # This file is automatically @generated by Cargo.
2
2
  # It is not intended for manual editing.
3
- version = 3
3
+ version = 4
4
4
 
5
5
  [[package]]
6
6
  name = "autocfg"
@@ -28,15 +28,15 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
28
28
 
29
29
  [[package]]
30
30
  name = "itoa"
31
- version = "1.0.11"
31
+ version = "1.0.14"
32
32
  source = "registry+https://github.com/rust-lang/crates.io-index"
33
- checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
33
+ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
34
34
 
35
35
  [[package]]
36
36
  name = "libc"
37
- version = "0.2.161"
37
+ version = "0.2.169"
38
38
  source = "registry+https://github.com/rust-lang/crates.io-index"
39
- checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
39
+ checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
40
40
 
41
41
  [[package]]
42
42
  name = "memchr"
@@ -70,24 +70,24 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
70
70
 
71
71
  [[package]]
72
72
  name = "portable-atomic"
73
- version = "1.9.0"
73
+ version = "1.10.0"
74
74
  source = "registry+https://github.com/rust-lang/crates.io-index"
75
- checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
75
+ checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
76
76
 
77
77
  [[package]]
78
78
  name = "proc-macro2"
79
- version = "1.0.89"
79
+ version = "1.0.92"
80
80
  source = "registry+https://github.com/rust-lang/crates.io-index"
81
- checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
81
+ checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
82
82
  dependencies = [
83
83
  "unicode-ident",
84
84
  ]
85
85
 
86
86
  [[package]]
87
87
  name = "pyo3"
88
- version = "0.22.6"
88
+ version = "0.23.3"
89
89
  source = "registry+https://github.com/rust-lang/crates.io-index"
90
- checksum = "f402062616ab18202ae8319da13fa4279883a2b8a9d9f83f20dbade813ce1884"
90
+ checksum = "e484fd2c8b4cb67ab05a318f1fd6fa8f199fcc30819f08f07d200809dba26c15"
91
91
  dependencies = [
92
92
  "cfg-if",
93
93
  "indoc",
@@ -103,9 +103,9 @@ dependencies = [
103
103
 
104
104
  [[package]]
105
105
  name = "pyo3-build-config"
106
- version = "0.22.6"
106
+ version = "0.23.3"
107
107
  source = "registry+https://github.com/rust-lang/crates.io-index"
108
- checksum = "b14b5775b5ff446dd1056212d778012cbe8a0fbffd368029fd9e25b514479c38"
108
+ checksum = "dc0e0469a84f208e20044b98965e1561028180219e35352a2afaf2b942beff3b"
109
109
  dependencies = [
110
110
  "once_cell",
111
111
  "target-lexicon",
@@ -113,9 +113,9 @@ dependencies = [
113
113
 
114
114
  [[package]]
115
115
  name = "pyo3-ffi"
116
- version = "0.22.6"
116
+ version = "0.23.3"
117
117
  source = "registry+https://github.com/rust-lang/crates.io-index"
118
- checksum = "9ab5bcf04a2cdcbb50c7d6105de943f543f9ed92af55818fd17b660390fc8636"
118
+ checksum = "eb1547a7f9966f6f1a0f0227564a9945fe36b90da5a93b3933fc3dc03fae372d"
119
119
  dependencies = [
120
120
  "libc",
121
121
  "pyo3-build-config",
@@ -123,9 +123,9 @@ dependencies = [
123
123
 
124
124
  [[package]]
125
125
  name = "pyo3-macros"
126
- version = "0.22.6"
126
+ version = "0.23.3"
127
127
  source = "registry+https://github.com/rust-lang/crates.io-index"
128
- checksum = "0fd24d897903a9e6d80b968368a34e1525aeb719d568dba8b3d4bfa5dc67d453"
128
+ checksum = "fdb6da8ec6fa5cedd1626c886fc8749bdcbb09424a86461eb8cdf096b7c33257"
129
129
  dependencies = [
130
130
  "proc-macro2",
131
131
  "pyo3-macros-backend",
@@ -135,9 +135,9 @@ dependencies = [
135
135
 
136
136
  [[package]]
137
137
  name = "pyo3-macros-backend"
138
- version = "0.22.6"
138
+ version = "0.23.3"
139
139
  source = "registry+https://github.com/rust-lang/crates.io-index"
140
- checksum = "36c011a03ba1e50152b4b394b479826cad97e7a21eb52df179cd91ac411cbfbe"
140
+ checksum = "38a385202ff5a92791168b1136afae5059d3ac118457bb7bc304c197c2d33e7d"
141
141
  dependencies = [
142
142
  "heck",
143
143
  "proc-macro2",
@@ -148,9 +148,9 @@ dependencies = [
148
148
 
149
149
  [[package]]
150
150
  name = "quote"
151
- version = "1.0.37"
151
+ version = "1.0.38"
152
152
  source = "registry+https://github.com/rust-lang/crates.io-index"
153
- checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
153
+ checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
154
154
  dependencies = [
155
155
  "proc-macro2",
156
156
  ]
@@ -163,7 +163,7 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
163
163
 
164
164
  [[package]]
165
165
  name = "safetensors"
166
- version = "0.4.6-dev.0"
166
+ version = "0.5.0-rc.0"
167
167
  dependencies = [
168
168
  "serde",
169
169
  "serde_json",
@@ -171,7 +171,7 @@ dependencies = [
171
171
 
172
172
  [[package]]
173
173
  name = "safetensors-python"
174
- version = "0.4.6-dev.0"
174
+ version = "0.5.0-rc.0"
175
175
  dependencies = [
176
176
  "memmap2",
177
177
  "pyo3",
@@ -181,18 +181,18 @@ dependencies = [
181
181
 
182
182
  [[package]]
183
183
  name = "serde"
184
- version = "1.0.214"
184
+ version = "1.0.217"
185
185
  source = "registry+https://github.com/rust-lang/crates.io-index"
186
- checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
186
+ checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
187
187
  dependencies = [
188
188
  "serde_derive",
189
189
  ]
190
190
 
191
191
  [[package]]
192
192
  name = "serde_derive"
193
- version = "1.0.214"
193
+ version = "1.0.217"
194
194
  source = "registry+https://github.com/rust-lang/crates.io-index"
195
- checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
195
+ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
196
196
  dependencies = [
197
197
  "proc-macro2",
198
198
  "quote",
@@ -201,9 +201,9 @@ dependencies = [
201
201
 
202
202
  [[package]]
203
203
  name = "serde_json"
204
- version = "1.0.132"
204
+ version = "1.0.134"
205
205
  source = "registry+https://github.com/rust-lang/crates.io-index"
206
- checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
206
+ checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d"
207
207
  dependencies = [
208
208
  "itoa",
209
209
  "memchr",
@@ -213,9 +213,9 @@ dependencies = [
213
213
 
214
214
  [[package]]
215
215
  name = "syn"
216
- version = "2.0.87"
216
+ version = "2.0.94"
217
217
  source = "registry+https://github.com/rust-lang/crates.io-index"
218
- checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
218
+ checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3"
219
219
  dependencies = [
220
220
  "proc-macro2",
221
221
  "quote",
@@ -230,9 +230,9 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
230
230
 
231
231
  [[package]]
232
232
  name = "unicode-ident"
233
- version = "1.0.13"
233
+ version = "1.0.14"
234
234
  source = "registry+https://github.com/rust-lang/crates.io-index"
235
- checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
235
+ checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
236
236
 
237
237
  [[package]]
238
238
  name = "unindent"
@@ -1,6 +1,6 @@
1
1
  [package]
2
2
  name = "safetensors-python"
3
- version = "0.4.6-dev.0"
3
+ version = "0.5.0-rc.0"
4
4
  edition = "2021"
5
5
 
6
6
  # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -9,7 +9,7 @@ name = "safetensors_rust"
9
9
  crate-type = ["cdylib"]
10
10
 
11
11
  [dependencies]
12
- pyo3 = { version = "0.22", features = ["abi3", "abi3-py38"] }
12
+ pyo3 = { version = "0.23", features = ["abi3", "abi3-py38"] }
13
13
  memmap2 = "0.9"
14
14
  serde_json = "1.0"
15
15
 
@@ -5,7 +5,7 @@ use pyo3::exceptions::{PyException, PyFileNotFoundError};
5
5
  use pyo3::prelude::*;
6
6
  use pyo3::sync::GILOnceCell;
7
7
  use pyo3::types::IntoPyDict;
8
- use pyo3::types::{PyByteArray, PyBytes, PyDict, PyList, PySlice};
8
+ use pyo3::types::{PyBool, PyByteArray, PyBytes, PyDict, PyList, PySlice};
9
9
  use pyo3::Bound as PyBound;
10
10
  use pyo3::{intern, PyErr};
11
11
  use safetensors::slice::TensorIndexer;
@@ -32,7 +32,7 @@ struct PyView<'a> {
32
32
  data_len: usize,
33
33
  }
34
34
 
35
- impl<'a> View for &PyView<'a> {
35
+ impl View for &PyView<'_> {
36
36
  fn data(&self) -> std::borrow::Cow<[u8]> {
37
37
  Cow::Borrowed(self.data.as_bytes())
38
38
  }
@@ -122,7 +122,7 @@ fn serialize<'b>(
122
122
  let metadata_map = metadata.map(HashMap::from_iter);
123
123
  let out = safetensors::tensor::serialize(&tensors, &metadata_map)
124
124
  .map_err(|e| SafetensorError::new_err(format!("Error while serializing: {e:?}")))?;
125
- let pybytes = PyBytes::new_bound(py, &out);
125
+ let pybytes = PyBytes::new(py, &out);
126
126
  Ok(pybytes)
127
127
  }
128
128
 
@@ -173,10 +173,10 @@ fn deserialize(py: Python, bytes: &[u8]) -> PyResult<Vec<(String, HashMap<String
173
173
  let mut items = Vec::with_capacity(tensors.len());
174
174
 
175
175
  for (tensor_name, tensor) in tensors {
176
- let pyshape: PyObject = PyList::new_bound(py, tensor.shape().iter()).into();
177
- let pydtype: PyObject = format!("{:?}", tensor.dtype()).into_py(py);
176
+ let pyshape: PyObject = PyList::new(py, tensor.shape().iter())?.into();
177
+ let pydtype: PyObject = format!("{:?}", tensor.dtype()).into_pyobject(py)?.into();
178
178
 
179
- let pydata: PyObject = PyByteArray::new_bound(py, tensor.data()).into();
179
+ let pydata: PyObject = PyByteArray::new(py, tensor.data()).into();
180
180
 
181
181
  let map = HashMap::from([
182
182
  ("shape".to_string(), pyshape),
@@ -266,6 +266,7 @@ enum Device {
266
266
  Npu(usize),
267
267
  Xpu(usize),
268
268
  Xla(usize),
269
+ Mlu(usize),
269
270
  /// User didn't specify acceletor, torch
270
271
  /// is responsible for choosing.
271
272
  Anonymous(usize),
@@ -294,10 +295,12 @@ impl<'source> FromPyObject<'source> for Device {
294
295
  "npu" => Ok(Device::Npu(0)),
295
296
  "xpu" => Ok(Device::Xpu(0)),
296
297
  "xla" => Ok(Device::Xla(0)),
298
+ "mlu" => Ok(Device::Mlu(0)),
297
299
  name if name.starts_with("cuda:") => parse_device(name).map(Device::Cuda),
298
300
  name if name.starts_with("npu:") => parse_device(name).map(Device::Npu),
299
301
  name if name.starts_with("xpu:") => parse_device(name).map(Device::Xpu),
300
302
  name if name.starts_with("xla:") => parse_device(name).map(Device::Xla),
303
+ name if name.starts_with("mlu:") => parse_device(name).map(Device::Mlu),
301
304
  name => Err(SafetensorError::new_err(format!(
302
305
  "device {name} is invalid"
303
306
  ))),
@@ -310,16 +313,21 @@ impl<'source> FromPyObject<'source> for Device {
310
313
  }
311
314
  }
312
315
 
313
- impl IntoPy<PyObject> for Device {
314
- fn into_py(self, py: Python<'_>) -> PyObject {
316
+ impl<'py> IntoPyObject<'py> for Device {
317
+ type Target = PyAny;
318
+ type Output = pyo3::Bound<'py, Self::Target>;
319
+ type Error = std::convert::Infallible;
320
+
321
+ fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
315
322
  match self {
316
- Device::Cpu => "cpu".into_py(py),
317
- Device::Cuda(n) => format!("cuda:{n}").into_py(py),
318
- Device::Mps => "mps".into_py(py),
319
- Device::Npu(n) => format!("npu:{n}").into_py(py),
320
- Device::Xpu(n) => format!("xpu:{n}").into_py(py),
321
- Device::Xla(n) => format!("xla:{n}").into_py(py),
322
- Device::Anonymous(n) => n.into_py(py),
323
+ Device::Cpu => "cpu".into_pyobject(py).map(|x| x.into_any()),
324
+ Device::Cuda(n) => format!("cuda:{n}").into_pyobject(py).map(|x| x.into_any()),
325
+ Device::Mps => "mps".into_pyobject(py).map(|x| x.into_any()),
326
+ Device::Npu(n) => format!("npu:{n}").into_pyobject(py).map(|x| x.into_any()),
327
+ Device::Xpu(n) => format!("xpu:{n}").into_pyobject(py).map(|x| x.into_any()),
328
+ Device::Xla(n) => format!("xla:{n}").into_pyobject(py).map(|x| x.into_any()),
329
+ Device::Mlu(n) => format!("mlu:{n}").into_pyobject(py).map(|x| x.into_any()),
330
+ Device::Anonymous(n) => n.into_pyobject(py).map(|x| x.into_any()),
323
331
  }
324
332
  }
325
333
  }
@@ -409,11 +417,11 @@ impl Open {
409
417
  Python::with_gil(|py| -> PyResult<()> {
410
418
  match framework {
411
419
  Framework::Pytorch => {
412
- let module = PyModule::import_bound(py, intern!(py, "torch"))?;
420
+ let module = PyModule::import(py, intern!(py, "torch"))?;
413
421
  TORCH_MODULE.get_or_init(py, || module.into())
414
422
  }
415
423
  _ => {
416
- let module = PyModule::import_bound(py, intern!(py, "numpy"))?;
424
+ let module = PyModule::import(py, intern!(py, "numpy"))?;
417
425
  NUMPY_MODULE.get_or_init(py, || module.into())
418
426
  }
419
427
  };
@@ -432,9 +440,9 @@ impl Open {
432
440
  // Same for torch.asarray which is necessary for zero-copy tensor
433
441
  if version >= Version::new(1, 11, 0) {
434
442
  // storage = torch.ByteStorage.from_file(filename, shared=False, size=size).untyped()
435
- let py_filename: PyObject = filename.into_py(py);
436
- let size: PyObject = buffer.len().into_py(py);
437
- let shared: PyObject = false.into_py(py);
443
+ let py_filename: PyObject = filename.into_pyobject(py)?.into();
444
+ let size: PyObject = buffer.len().into_pyobject(py)?.into();
445
+ let shared: PyObject = PyBool::new(py, false).to_owned().into();
438
446
  let (size_name, storage_name) = if version >= Version::new(2, 0, 0) {
439
447
  (intern!(py, "nbytes"), intern!(py, "UntypedStorage"))
440
448
  } else {
@@ -442,7 +450,7 @@ impl Open {
442
450
  };
443
451
 
444
452
  let kwargs =
445
- [(intern!(py, "shared"), shared), (size_name, size)].into_py_dict_bound(py);
453
+ [(intern!(py, "shared"), shared), (size_name, size)].into_py_dict(py)?;
446
454
  let storage = module
447
455
  .getattr(storage_name)?
448
456
  // .getattr(intern!(py, "from_file"))?
@@ -453,7 +461,7 @@ impl Open {
453
461
  Ok(untyped) => untyped,
454
462
  Err(_) => storage.getattr(intern!(py, "_untyped"))?,
455
463
  };
456
- let storage = untyped.call0()?.into_py(py);
464
+ let storage = untyped.call0()?.into_pyobject(py)?.into();
457
465
  let gil_storage = GILOnceCell::new();
458
466
  gil_storage.get_or_init(py, || storage);
459
467
 
@@ -528,7 +536,7 @@ impl Open {
528
536
  &mmap[info.data_offsets.0 + self.offset..info.data_offsets.1 + self.offset];
529
537
 
530
538
  let array: PyObject =
531
- Python::with_gil(|py| PyByteArray::new_bound(py, data).into_py(py));
539
+ Python::with_gil(|py| PyByteArray::new(py, data).into_any().into());
532
540
 
533
541
  create_tensor(
534
542
  &self.framework,
@@ -543,14 +551,14 @@ impl Open {
543
551
  let torch = get_module(py, &TORCH_MODULE)?;
544
552
  let dtype: PyObject = get_pydtype(torch, info.dtype, false)?;
545
553
  let torch_uint8: PyObject = get_pydtype(torch, Dtype::U8, false)?;
546
- let kwargs = [(intern!(py, "dtype"), torch_uint8)].into_py_dict_bound(py);
547
- let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
554
+ let kwargs = [(intern!(py, "dtype"), torch_uint8)].into_py_dict(py)?;
555
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
548
556
  let shape = info.shape.to_vec();
549
- let shape: PyObject = shape.into_py(py);
557
+ let shape: PyObject = shape.into_pyobject(py)?.into();
550
558
 
551
559
  let start = (info.data_offsets.0 + self.offset) as isize;
552
560
  let stop = (info.data_offsets.1 + self.offset) as isize;
553
- let slice = PySlice::new_bound(py, start, stop, 1);
561
+ let slice = PySlice::new(py, start, stop, 1);
554
562
  let storage: &PyObject = storage
555
563
  .get(py)
556
564
  .ok_or_else(|| SafetensorError::new_err("Could not find storage"))?;
@@ -559,7 +567,7 @@ impl Open {
559
567
  .getattr(intern!(py, "__getitem__"))?
560
568
  .call1((slice,))?;
561
569
 
562
- let sys = PyModule::import_bound(py, intern!(py, "sys"))?;
570
+ let sys = PyModule::import(py, intern!(py, "sys"))?;
563
571
  let byteorder: String = sys.getattr(intern!(py, "byteorder"))?.extract()?;
564
572
 
565
573
  let mut tensor = torch
@@ -570,7 +578,7 @@ impl Open {
570
578
 
571
579
  if byteorder == "big" {
572
580
  let inplace_kwargs =
573
- [(intern!(py, "inplace"), false.into_py(py))].into_py_dict_bound(py);
581
+ [(intern!(py, "inplace"), PyBool::new(py, false))].into_py_dict(py)?;
574
582
 
575
583
  let intermediary_dtype = match info.dtype {
576
584
  Dtype::BF16 => Some(Dtype::F16),
@@ -581,8 +589,7 @@ impl Open {
581
589
  if let Some(intermediary_dtype) = intermediary_dtype {
582
590
  // Reinterpret to f16 for numpy compatibility.
583
591
  let dtype: PyObject = get_pydtype(torch, intermediary_dtype, false)?;
584
- let view_kwargs =
585
- [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
592
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
586
593
  tensor = tensor
587
594
  .getattr(intern!(py, "view"))?
588
595
  .call((), Some(&view_kwargs))?;
@@ -596,8 +603,7 @@ impl Open {
596
603
  if intermediary_dtype.is_some() {
597
604
  // Reinterpret to f16 for numpy compatibility.
598
605
  let dtype: PyObject = get_pydtype(torch, info.dtype, false)?;
599
- let view_kwargs =
600
- [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
606
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
601
607
  tensor = tensor
602
608
  .getattr(intern!(py, "view"))?
603
609
  .call((), Some(&view_kwargs))?;
@@ -606,11 +612,11 @@ impl Open {
606
612
 
607
613
  tensor = tensor.getattr(intern!(py, "reshape"))?.call1((shape,))?;
608
614
  if self.device != Device::Cpu {
609
- let device: PyObject = self.device.clone().into_py(py);
610
- let kwargs = PyDict::new_bound(py);
615
+ let device: PyObject = self.device.clone().into_pyobject(py)?.into();
616
+ let kwargs = PyDict::new(py);
611
617
  tensor = tensor.call_method("to", (device,), Some(&kwargs))?;
612
618
  }
613
- Ok(tensor.into_py(py))
619
+ Ok(tensor.into_pyobject(py)?.into())
614
620
  // torch.asarray(storage[start + n : stop + n], dtype=torch.uint8).view(dtype=dtype).reshape(shape)
615
621
  })
616
622
  }
@@ -814,7 +820,7 @@ impl PySafeSlice {
814
820
  /// ```
815
821
  pub fn get_shape(&self, py: Python) -> PyResult<PyObject> {
816
822
  let shape = self.info.shape.clone();
817
- let shape: PyObject = shape.into_py(py);
823
+ let shape: PyObject = shape.into_pyobject(py)?.into();
818
824
  Ok(shape)
819
825
  }
820
826
 
@@ -834,7 +840,7 @@ impl PySafeSlice {
834
840
  /// ```
835
841
  pub fn get_dtype(&self, py: Python) -> PyResult<PyObject> {
836
842
  let dtype = self.info.dtype;
837
- let dtype: PyObject = format!("{:?}", dtype).into_py(py);
843
+ let dtype: PyObject = format!("{:?}", dtype).into_pyobject(py)?.into();
838
844
  Ok(dtype)
839
845
  }
840
846
 
@@ -848,12 +854,7 @@ impl PySafeSlice {
848
854
  Slice::Slice(slice) => vec![slice],
849
855
  Slice::Slices(slices) => {
850
856
  if slices.is_empty() && is_list {
851
- vec![SliceIndex::Slice(PySlice::new_bound(
852
- pyslices.py(),
853
- 0,
854
- 0,
855
- 0,
856
- ))]
857
+ vec![SliceIndex::Slice(PySlice::new(pyslices.py(), 0, 0, 0))]
857
858
  } else if is_list {
858
859
  return Err(SafetensorError::new_err(
859
860
  "Non empty lists are not implemented",
@@ -893,7 +894,7 @@ impl PySafeSlice {
893
894
  let length = iterator.remaining_byte_len();
894
895
  Python::with_gil(|py| {
895
896
  let array: PyObject =
896
- PyByteArray::new_bound_with(py, length, |bytes: &mut [u8]| {
897
+ PyByteArray::new_with(py, length, |bytes: &mut [u8]| {
897
898
  for slice in iterator {
898
899
  let len = slice.len();
899
900
  bytes[offset..offset + slice.len()].copy_from_slice(slice);
@@ -901,7 +902,8 @@ impl PySafeSlice {
901
902
  }
902
903
  Ok(())
903
904
  })?
904
- .into_py(py);
905
+ .into_any()
906
+ .into();
905
907
  create_tensor(
906
908
  &self.framework,
907
909
  self.info.dtype,
@@ -915,14 +917,14 @@ impl PySafeSlice {
915
917
  let torch = get_module(py, &TORCH_MODULE)?;
916
918
  let dtype: PyObject = get_pydtype(torch, self.info.dtype, false)?;
917
919
  let torch_uint8: PyObject = get_pydtype(torch, Dtype::U8, false)?;
918
- let kwargs = [(intern!(py, "dtype"), torch_uint8)].into_py_dict_bound(py);
919
- let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
920
+ let kwargs = [(intern!(py, "dtype"), torch_uint8)].into_py_dict(py)?;
921
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
920
922
  let shape = self.info.shape.to_vec();
921
- let shape: PyObject = shape.into_py(py);
923
+ let shape: PyObject = shape.into_pyobject(py)?.into();
922
924
 
923
925
  let start = (self.info.data_offsets.0 + self.offset) as isize;
924
926
  let stop = (self.info.data_offsets.1 + self.offset) as isize;
925
- let slice = PySlice::new_bound(py, start, stop, 1);
927
+ let slice = PySlice::new(py, start, stop, 1);
926
928
  let storage: &PyObject = storage
927
929
  .get(py)
928
930
  .ok_or_else(|| SafetensorError::new_err("Could not find storage"))?;
@@ -932,9 +934,9 @@ impl PySafeSlice {
932
934
  .getattr(intern!(py, "__getitem__"))?
933
935
  .call1((slice,))?;
934
936
 
935
- let slices = slices.into_py(py);
937
+ let slices = slices.into_pyobject(py)?;
936
938
 
937
- let sys = PyModule::import_bound(py, intern!(py, "sys"))?;
939
+ let sys = PyModule::import(py, intern!(py, "sys"))?;
938
940
  let byteorder: String = sys.getattr(intern!(py, "byteorder"))?.extract()?;
939
941
 
940
942
  let mut tensor = torch
@@ -946,7 +948,7 @@ impl PySafeSlice {
946
948
  // Important, do NOT use inplace otherwise the slice itself
947
949
  // is byteswapped, meaning multiple calls will fails
948
950
  let inplace_kwargs =
949
- [(intern!(py, "inplace"), false.into_py(py))].into_py_dict_bound(py);
951
+ [(intern!(py, "inplace"), PyBool::new(py, false))].into_py_dict(py)?;
950
952
 
951
953
  let intermediary_dtype = match self.info.dtype {
952
954
  Dtype::BF16 => Some(Dtype::F16),
@@ -957,7 +959,7 @@ impl PySafeSlice {
957
959
  if let Some(intermediary_dtype) = intermediary_dtype {
958
960
  // Reinterpret to f16 for numpy compatibility.
959
961
  let dtype: PyObject = get_pydtype(torch, intermediary_dtype, false)?;
960
- let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
962
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
961
963
  tensor = tensor
962
964
  .getattr(intern!(py, "view"))?
963
965
  .call((), Some(&view_kwargs))?;
@@ -971,7 +973,7 @@ impl PySafeSlice {
971
973
  if intermediary_dtype.is_some() {
972
974
  // Reinterpret to f16 for numpy compatibility.
973
975
  let dtype: PyObject = get_pydtype(torch, self.info.dtype, false)?;
974
- let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
976
+ let view_kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
975
977
  tensor = tensor
976
978
  .getattr(intern!(py, "view"))?
977
979
  .call((), Some(&view_kwargs))?;
@@ -983,11 +985,11 @@ impl PySafeSlice {
983
985
  .getattr(intern!(py, "__getitem__"))?
984
986
  .call1((slices,))?;
985
987
  if self.device != Device::Cpu {
986
- let device: PyObject = self.device.clone().into_py(py);
987
- let kwargs = PyDict::new_bound(py);
988
+ let device: PyObject = self.device.clone().into_pyobject(py)?.into();
989
+ let kwargs = PyDict::new(py);
988
990
  tensor = tensor.call_method("to", (device,), Some(&kwargs))?;
989
991
  }
990
- Ok(tensor.into_py(py))
992
+ Ok(tensor.into())
991
993
  }),
992
994
  }
993
995
  }
@@ -1039,9 +1041,9 @@ fn create_tensor<'a>(
1039
1041
  // Torch==1.10 does not allow frombuffer on empty buffers so we create
1040
1042
  // the tensor manually.
1041
1043
  // let zeros = module.getattr(intern!(py, "zeros"))?;
1042
- let shape: PyObject = shape.clone().into_py(py);
1044
+ let shape: PyObject = shape.clone().into_pyobject(py)?.into();
1043
1045
  let args = (shape,);
1044
- let kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict_bound(py);
1046
+ let kwargs = [(intern!(py, "dtype"), dtype)].into_py_dict(py)?;
1045
1047
  module.call_method("zeros", args, Some(&kwargs))?
1046
1048
  } else {
1047
1049
  // let frombuffer = module.getattr(intern!(py, "frombuffer"))?;
@@ -1049,13 +1051,13 @@ fn create_tensor<'a>(
1049
1051
  (intern!(py, "buffer"), array),
1050
1052
  (intern!(py, "dtype"), dtype),
1051
1053
  ]
1052
- .into_py_dict_bound(py);
1054
+ .into_py_dict(py)?;
1053
1055
  let mut tensor = module.call_method("frombuffer", (), Some(&kwargs))?;
1054
- let sys = PyModule::import_bound(py, intern!(py, "sys"))?;
1056
+ let sys = PyModule::import(py, intern!(py, "sys"))?;
1055
1057
  let byteorder: String = sys.getattr(intern!(py, "byteorder"))?.extract()?;
1056
1058
  if byteorder == "big" {
1057
1059
  let inplace_kwargs =
1058
- [(intern!(py, "inplace"), false.into_py(py))].into_py_dict_bound(py);
1060
+ [(intern!(py, "inplace"), PyBool::new(py, false))].into_py_dict(py)?;
1059
1061
  tensor = tensor
1060
1062
  .getattr("byteswap")?
1061
1063
  .call((), Some(&inplace_kwargs))?;
@@ -1066,7 +1068,7 @@ fn create_tensor<'a>(
1066
1068
  let tensor = match framework {
1067
1069
  Framework::Flax => {
1068
1070
  let module = Python::with_gil(|py| -> PyResult<&Py<PyModule>> {
1069
- let module = PyModule::import_bound(py, intern!(py, "jax"))?;
1071
+ let module = PyModule::import(py, intern!(py, "jax"))?;
1070
1072
  Ok(FLAX_MODULE.get_or_init(py, || module.into()))
1071
1073
  })?
1072
1074
  .bind(py);
@@ -1077,7 +1079,7 @@ fn create_tensor<'a>(
1077
1079
  }
1078
1080
  Framework::Tensorflow => {
1079
1081
  let module = Python::with_gil(|py| -> PyResult<&Py<PyModule>> {
1080
- let module = PyModule::import_bound(py, intern!(py, "tensorflow"))?;
1082
+ let module = PyModule::import(py, intern!(py, "tensorflow"))?;
1081
1083
  Ok(TENSORFLOW_MODULE.get_or_init(py, || module.into()))
1082
1084
  })?
1083
1085
  .bind(py);
@@ -1087,7 +1089,7 @@ fn create_tensor<'a>(
1087
1089
  }
1088
1090
  Framework::Mlx => {
1089
1091
  let module = Python::with_gil(|py| -> PyResult<&Py<PyModule>> {
1090
- let module = PyModule::import_bound(py, intern!(py, "mlx"))?;
1092
+ let module = PyModule::import(py, intern!(py, "mlx"))?;
1091
1093
  Ok(MLX_MODULE.get_or_init(py, || module.into()))
1092
1094
  })?
1093
1095
  .bind(py);
@@ -1098,8 +1100,8 @@ fn create_tensor<'a>(
1098
1100
  }
1099
1101
  Framework::Pytorch => {
1100
1102
  if device != &Device::Cpu {
1101
- let device: PyObject = device.clone().into_py(py);
1102
- let kwargs = PyDict::new_bound(py);
1103
+ let device: PyObject = device.clone().into_pyobject(py)?.into();
1104
+ let kwargs = PyDict::new(py);
1103
1105
  tensor = tensor.call_method("to", (device,), Some(&kwargs))?;
1104
1106
  }
1105
1107
  tensor
@@ -1107,7 +1109,7 @@ fn create_tensor<'a>(
1107
1109
  Framework::Numpy => tensor,
1108
1110
  };
1109
1111
  // let tensor = tensor.into_py_bound(py);
1110
- Ok(tensor.into_py(py))
1112
+ Ok(tensor.into())
1111
1113
  })
1112
1114
  }
1113
1115
 
@@ -1137,9 +1139,7 @@ fn get_pydtype(module: &PyBound<'_, PyModule>, dtype: Dtype, is_numpy: bool) ->
1137
1139
  Dtype::I8 => module.getattr(intern!(py, "int8"))?.into(),
1138
1140
  Dtype::BOOL => {
1139
1141
  if is_numpy {
1140
- py.import_bound("builtins")?
1141
- .getattr(intern!(py, "bool"))?
1142
- .into()
1142
+ py.import("builtins")?.getattr(intern!(py, "bool"))?.into()
1143
1143
  } else {
1144
1144
  module.getattr(intern!(py, "bool"))?.into()
1145
1145
  }
@@ -1170,10 +1170,7 @@ fn _safetensors_rust(m: &PyBound<'_, PyModule>) -> PyResult<()> {
1170
1170
  m.add_function(wrap_pyfunction!(serialize_file, m)?)?;
1171
1171
  m.add_function(wrap_pyfunction!(deserialize, m)?)?;
1172
1172
  m.add_class::<safe_open>()?;
1173
- m.add(
1174
- "SafetensorError",
1175
- m.py().get_type_bound::<SafetensorError>(),
1176
- )?;
1173
+ m.add("SafetensorError", m.py().get_type::<SafetensorError>())?;
1177
1174
  m.add("__version__", env!("CARGO_PKG_VERSION"))?;
1178
1175
  Ok(())
1179
1176
  }
@@ -23,6 +23,7 @@ dynamic = [
23
23
  'description',
24
24
  'license',
25
25
  'readme',
26
+ 'version',
26
27
  ]
27
28
 
28
29
  [project.urls]
@@ -1,6 +1,6 @@
1
1
  [package]
2
2
  name = "safetensors"
3
- version = "0.4.6-dev.0"
3
+ version = "0.5.0-rc.0"
4
4
  edition = "2021"
5
5
  homepage = "https://github.com/huggingface/safetensors"
6
6
  repository = "https://github.com/huggingface/safetensors"
@@ -565,7 +565,7 @@ pub struct TensorView<'data> {
565
565
  data: &'data [u8],
566
566
  }
567
567
 
568
- impl<'data> View for &TensorView<'data> {
568
+ impl View for &TensorView<'_> {
569
569
  fn dtype(&self) -> Dtype {
570
570
  self.dtype
571
571
  }
@@ -583,7 +583,7 @@ impl<'data> View for &TensorView<'data> {
583
583
  }
584
584
  }
585
585
 
586
- impl<'data> View for TensorView<'data> {
586
+ impl View for TensorView<'_> {
587
587
  fn dtype(&self) -> Dtype {
588
588
  self.dtype
589
589
  }
@@ -1,60 +0,0 @@
1
- ## Installation
2
-
3
- ```
4
- pip install safetensors
5
- ```
6
-
7
-
8
- ## Usage
9
-
10
- ### Numpy
11
-
12
- ```python
13
- from safetensors.numpy import save_file, load_file
14
- import numpy as np
15
-
16
- tensors = {
17
- "a": np.zeros((2, 2)),
18
- "b": np.zeros((2, 3), dtype=np.uint8)
19
- }
20
-
21
- save_file(tensors, "./model.safetensors")
22
-
23
-
24
- # Now loading
25
- loaded = load_file("./model.safetensors")
26
- ```
27
-
28
- ### Torch
29
-
30
- ```python
31
- from safetensors.torch import save_file, load_file
32
- import torch
33
-
34
- tensors = {
35
- "a": torch.zeros((2, 2)),
36
- "b": torch.zeros((2, 3), dtype=torch.uint8)
37
- }
38
-
39
- save_file(tensors, "./model.safetensors")
40
-
41
-
42
- # Now loading
43
- loaded = load_file("./model.safetensors")
44
- ```
45
-
46
- ### Developing
47
-
48
- ```
49
- # inside ./safetensors/bindings/python
50
- pip install .[dev]
51
- ```
52
- Should be enough to install this library locally.
53
-
54
- ### Testing
55
-
56
- ```
57
- # inside ./safetensors/bindings/python
58
- pip install .[dev]
59
- pytest -sv tests/
60
- ```