legend-pydataobj 1.11.2__tar.gz → 1.11.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/PKG-INFO +2 -2
  2. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/PKG-INFO +2 -2
  3. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/SOURCES.txt +1 -0
  4. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/_version.py +2 -2
  5. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/write/array.py +5 -0
  6. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/exceptions.py +6 -0
  7. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/histogram.py +8 -3
  8. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/lgdo.py +6 -0
  9. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/table.py +6 -0
  10. legend_pydataobj-1.11.4/tests/lh5/test_exceptions.py +17 -0
  11. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_write.py +13 -0
  12. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_array.py +13 -0
  13. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_encoded.py +50 -0
  14. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_histogram.py +13 -1
  15. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_scalar.py +13 -0
  16. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_struct.py +14 -0
  17. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_table.py +18 -0
  18. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_vectorofvectors.py +17 -0
  19. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/LICENSE +0 -0
  20. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/README.md +0 -0
  21. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/pyproject.toml +0 -0
  22. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/setup.cfg +0 -0
  23. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/dependency_links.txt +0 -0
  24. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/entry_points.txt +0 -0
  25. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/not-zip-safe +0 -0
  26. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/requires.txt +0 -0
  27. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/legend_pydataobj.egg-info/top_level.txt +0 -0
  28. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/__init__.py +6 -6
  29. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/cli.py +0 -0
  30. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/__init__.py +3 -3
  31. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/base.py +0 -0
  32. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/generic.py +0 -0
  33. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/radware.py +0 -0
  34. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/utils.py +0 -0
  35. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/compression/varlen.py +0 -0
  36. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lgdo_utils.py +0 -0
  37. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/__init__.py +2 -2
  38. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/__init__.py +10 -10
  39. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/__init__.py +0 -0
  40. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/array.py +0 -0
  41. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/composite.py +0 -0
  42. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/encoded.py +0 -0
  43. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/ndarray.py +0 -0
  44. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/scalar.py +0 -0
  45. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/utils.py +0 -0
  46. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/read/vector_of_vectors.py +0 -0
  47. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/write/__init__.py +0 -0
  48. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/write/composite.py +0 -0
  49. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/write/scalar.py +0 -0
  50. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/_serializers/write/vector_of_vectors.py +0 -0
  51. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/core.py +0 -0
  52. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/datatype.py +0 -0
  53. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/iterator.py +0 -0
  54. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/store.py +0 -0
  55. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/tools.py +0 -0
  56. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5/utils.py +0 -0
  57. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/lh5_store.py +0 -0
  58. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/logging.py +0 -0
  59. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/__init__.py +3 -3
  60. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/array.py +0 -0
  61. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/arrayofequalsizedarrays.py +0 -0
  62. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/encoded.py +0 -0
  63. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/fixedsizearray.py +0 -0
  64. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/scalar.py +0 -0
  65. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/struct.py +0 -0
  66. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/vectorofvectors.py +0 -0
  67. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/vovutils.py +0 -0
  68. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/types/waveformtable.py +0 -0
  69. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/units.py +0 -0
  70. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/src/lgdo/utils.py +0 -0
  71. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/conftest.py +0 -0
  72. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/sigcompress/LDQTA_r117_20200110T105115Z_cal_geds_raw-0.dat +0 -0
  73. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/sigcompress/special-wf-clipped.dat +0 -0
  74. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/test_compression.py +0 -0
  75. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/test_radware_sigcompress.py +0 -0
  76. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/test_str2wfcodec.py +0 -0
  77. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/compression/test_uleb128_zigzag_diff.py +0 -0
  78. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/conftest.py +0 -0
  79. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/conftest.py +0 -0
  80. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_core.py +0 -0
  81. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_datatype.py +0 -0
  82. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_iterator.py +0 -0
  83. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_store.py +0 -0
  84. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_tools.py +0 -0
  85. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/lh5/test_lh5_utils.py +0 -0
  86. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/test_cli.py +0 -0
  87. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/test_lgdo_utils.py +0 -0
  88. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_arrayofequalsizedarrays.py +0 -0
  89. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_fixedsizearray.py +0 -0
  90. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_representations.py +0 -0
  91. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_table_eval.py +0 -0
  92. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_vovutils.py +0 -0
  93. {legend_pydataobj-1.11.2 → legend_pydataobj-1.11.4}/tests/types/test_waveformtable.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: legend_pydataobj
3
- Version: 1.11.2
3
+ Version: 1.11.4
4
4
  Summary: LEGEND Python Data Objects
5
5
  Author: The LEGEND Collaboration
6
6
  Maintainer: The LEGEND Collaboration
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: legend_pydataobj
3
- Version: 1.11.2
3
+ Version: 1.11.4
4
4
  Summary: LEGEND Python Data Objects
5
5
  Author: The LEGEND Collaboration
6
6
  Maintainer: The LEGEND Collaboration
@@ -69,6 +69,7 @@ tests/compression/sigcompress/LDQTA_r117_20200110T105115Z_cal_geds_raw-0.dat
69
69
  tests/compression/sigcompress/special-wf-clipped.dat
70
70
  tests/lh5/conftest.py
71
71
  tests/lh5/test_core.py
72
+ tests/lh5/test_exceptions.py
72
73
  tests/lh5/test_lh5_datatype.py
73
74
  tests/lh5/test_lh5_iterator.py
74
75
  tests/lh5/test_lh5_store.py
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.11.2'
16
- __version_tuple__ = version_tuple = (1, 11, 2)
15
+ __version__ = version = '1.11.4'
16
+ __version_tuple__ = version_tuple = (1, 11, 4)
@@ -64,6 +64,11 @@ def _h5_write_array(
64
64
  if "hdf5_settings" in obj.attrs:
65
65
  h5py_kwargs |= obj.attrs["hdf5_settings"]
66
66
 
67
+ # HACK: a tuple is strictly requested for the "chunks" setting, but
68
+ # we'd like to pass a list too in some situations
69
+ if "chunks" in h5py_kwargs and isinstance(h5py_kwargs["chunks"], list):
70
+ h5py_kwargs["chunks"] = tuple(h5py_kwargs["chunks"])
71
+
67
72
  # create HDF5 dataset
68
73
  ds = group.create_dataset(name, data=nda, **h5py_kwargs)
69
74
 
@@ -16,6 +16,9 @@ class LH5DecodeError(Exception):
16
16
  + super().__str__()
17
17
  )
18
18
 
19
+ def __reduce__(self) -> tuple: # for pickling.
20
+ return self.__class__, (*self.args, self.file, self.obj)
21
+
19
22
 
20
23
  class LH5EncodeError(Exception):
21
24
  def __init__(
@@ -32,3 +35,6 @@ class LH5EncodeError(Exception):
32
35
  f"while writing object {self.group}/{self.name} to file {self.file}: "
33
36
  + super().__str__()
34
37
  )
38
+
39
+ def __reduce__(self) -> tuple: # for pickling.
40
+ return self.__class__, (*self.args, self.file, self.group, self.name)
@@ -418,13 +418,18 @@ class Histogram(Struct):
418
418
 
419
419
  def __setitem__(self, name: str, obj: LGDO) -> None:
420
420
  # do not allow for new attributes on this
421
- msg = "histogram fields cannot be mutated"
422
- raise TypeError(msg)
421
+ known_keys = ("binning", "weights", "isdensity")
422
+ if name in known_keys and not dict.__contains__(self, name):
423
+ # but allow initialization while unpickling (after __init__() this is unreachable)
424
+ dict.__setitem__(self, name, obj)
425
+ else:
426
+ msg = "histogram fields cannot be mutated "
427
+ raise TypeError(msg)
423
428
 
424
429
  def __getattr__(self, name: str) -> None:
425
430
  # do not allow for new attributes on this
426
431
  msg = "histogram fields cannot be mutated"
427
- raise TypeError(msg)
432
+ raise AttributeError(msg)
428
433
 
429
434
  def add_field(self, name: str | int, obj: LGDO) -> None: # noqa: ARG002
430
435
  """
@@ -11,6 +11,12 @@ import pandas as pd
11
11
  class LGDO(ABC):
12
12
  """Abstract base class representing a LEGEND Data Object (LGDO)."""
13
13
 
14
+ def __new__(cls, *_args, **_kwargs):
15
+ # allow for (un-)pickling LGDO objects.
16
+ obj = super().__new__(cls)
17
+ obj.attrs = {}
18
+ return obj
19
+
14
20
  @abstractmethod
15
21
  def __init__(self, attrs: dict[str, Any] | None = None) -> None:
16
22
  self.attrs = {} if attrs is None else dict(attrs)
@@ -41,6 +41,12 @@ class Table(Struct):
41
41
  :meth:`__len__` to access valid data, which returns the ``size`` attribute.
42
42
  """
43
43
 
44
+ def __new__(cls, *args, **kwargs):
45
+ # allow for (un-)pickling LGDO objects.
46
+ obj = super().__new__(cls, *args, **kwargs)
47
+ obj.size = None
48
+ return obj
49
+
44
50
  def __init__(
45
51
  self,
46
52
  col_dict: Mapping[str, LGDO] | pd.DataFrame | ak.Array | None = None,
@@ -0,0 +1,17 @@
1
+ from __future__ import annotations
2
+
3
+ import pickle
4
+
5
+ from lgdo.lh5.exceptions import LH5DecodeError, LH5EncodeError
6
+
7
+
8
+ def test_pickle():
9
+ # test (un-)pickling of LH5 exceptions; e.g. for multiprocessing use.
10
+
11
+ ex = LH5EncodeError("message", "file", "group", "name")
12
+ ex = pickle.loads(pickle.dumps(ex))
13
+ assert isinstance(ex, LH5EncodeError)
14
+
15
+ ex = LH5DecodeError("message", "file", "obj")
16
+ ex = pickle.loads(pickle.dumps(ex))
17
+ assert isinstance(ex, LH5DecodeError)
@@ -44,6 +44,19 @@ def test_write_with_hdf5_compression(lgnd_file, tmptestdir):
44
44
  assert h5f["/geds/raw/waveform/values"].compression is None
45
45
  assert h5f["/geds/raw/waveform/values"].shuffle is False
46
46
 
47
+ store.write(
48
+ wft.values,
49
+ "/geds/raw/waveform/values",
50
+ f"{tmptestdir}/tmp-pygama-hdf5-compressed-wfs.lh5",
51
+ wo_mode="overwrite_file",
52
+ chunks=[1, 10],
53
+ compression=None,
54
+ shuffle=False,
55
+ )
56
+ with h5py.File(f"{tmptestdir}/tmp-pygama-hdf5-compressed-wfs.lh5") as h5f:
57
+ assert h5f["/geds/raw/waveform/values"].compression is None
58
+ assert h5f["/geds/raw/waveform/values"].shuffle is False
59
+
47
60
 
48
61
  def test_write_empty_vov(tmptestdir):
49
62
  vov = types.VectorOfVectors(flattened_data=[], cumulative_length=[])
@@ -1,5 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import pickle
4
+
3
5
  import awkward as ak
4
6
  import numpy as np
5
7
  import pandas as pd
@@ -61,3 +63,14 @@ def test_view():
61
63
 
62
64
  with pytest.raises(ValueError):
63
65
  a.view_as("ak", with_units=True)
66
+
67
+
68
+ def test_pickle():
69
+ obj = Array(nda=np.array([1, 2, 3, 4]))
70
+ obj.attrs["attr1"] = 1
71
+
72
+ ex = pickle.loads(pickle.dumps(obj))
73
+ assert isinstance(ex, Array)
74
+ assert ex.attrs["attr1"] == 1
75
+ assert ex.attrs["datatype"] == obj.attrs["datatype"]
76
+ assert np.all(ex.nda == np.array([1, 2, 3, 4]))
@@ -1,5 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import pickle
4
+
3
5
  import awkward as ak
4
6
  import awkward_pandas as akpd
5
7
  import numpy as np
@@ -285,3 +287,51 @@ def test_aoeesa_view_as():
285
287
 
286
288
  with pytest.raises(TypeError):
287
289
  df = voev.view_as("np")
290
+
291
+
292
+ def test_aoeesa_pickle():
293
+ obj = ArrayOfEncodedEqualSizedArrays(
294
+ encoded_data=VectorOfVectors(
295
+ flattened_data=Array(nda=np.array([1, 2, 3, 4, 5, 2, 4, 8, 9, 7, 5, 3, 1])),
296
+ cumulative_length=Array(nda=np.array([2, 5, 6, 10, 13])),
297
+ ),
298
+ decoded_size=99,
299
+ )
300
+
301
+ ex = pickle.loads(pickle.dumps(obj))
302
+
303
+ desired = [
304
+ [1, 2],
305
+ [3, 4, 5],
306
+ [2],
307
+ [4, 8, 9, 7],
308
+ [5, 3, 1],
309
+ ]
310
+
311
+ for i, v in enumerate(ex):
312
+ assert np.array_equal(v, desired[i])
313
+
314
+
315
+ def test_voev_pickle():
316
+ obj = VectorOfEncodedVectors(
317
+ encoded_data=VectorOfVectors(
318
+ flattened_data=Array(nda=np.array([1, 2, 3, 4, 5, 2, 4, 8, 9, 7, 5, 3, 1])),
319
+ cumulative_length=Array(nda=np.array([2, 5, 6, 10, 13])),
320
+ ),
321
+ decoded_size=Array(shape=5, fill_val=6),
322
+ attrs={"units": "s"},
323
+ )
324
+
325
+ ex = pickle.loads(pickle.dumps(obj))
326
+
327
+ desired = [
328
+ [1, 2],
329
+ [3, 4, 5],
330
+ [2],
331
+ [4, 8, 9, 7],
332
+ [5, 3, 1],
333
+ ]
334
+
335
+ for i, (v, s) in enumerate(ex):
336
+ assert np.array_equal(v, desired[i])
337
+ assert s == 6
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
+ import pickle
4
5
 
5
6
  import hist
6
7
  import numpy as np
@@ -266,7 +267,7 @@ def test_view_as_np():
266
267
  def test_not_like_table():
267
268
  h = Histogram(np.array([1, 1]), (np.array([0, 1, 2]),))
268
269
  assert h.form_datatype() == "struct{binning,weights,isdensity}"
269
- with pytest.raises(TypeError):
270
+ with pytest.raises(AttributeError):
270
271
  x = h.x # noqa: F841
271
272
  with pytest.raises(TypeError):
272
273
  h["x"] = Scalar(1.0)
@@ -392,3 +393,14 @@ def test_histogram_fill():
392
393
 
393
394
  with pytest.raises(ValueError, match="data must be"):
394
395
  h.fill(np.ones(shape=(5, 5)))
396
+
397
+
398
+ def test_pickle():
399
+ obj = Histogram(np.array([1, 1]), (Histogram.Axis.from_range_edges([0, 1, 2]),))
400
+ obj.attrs["attr1"] = 1
401
+
402
+ ex = pickle.loads(pickle.dumps(obj))
403
+ assert isinstance(ex, Histogram)
404
+ assert ex.attrs["attr1"] == 1
405
+ assert ex.attrs["datatype"] == obj.attrs["datatype"]
406
+ assert np.all(ex.weights == obj.weights)
@@ -1,5 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import pickle
4
+
3
5
  import pytest
4
6
 
5
7
  import lgdo
@@ -33,3 +35,14 @@ def test_getattrs():
33
35
 
34
36
  def test_equality():
35
37
  assert lgdo.Scalar(value=42) == lgdo.Scalar(value=42)
38
+
39
+
40
+ def test_pickle():
41
+ obj = lgdo.Scalar(value=10)
42
+ obj.attrs["attr1"] = 1
43
+
44
+ ex = pickle.loads(pickle.dumps(obj))
45
+ assert isinstance(ex, lgdo.Scalar)
46
+ assert ex.attrs["attr1"] == 1
47
+ assert ex.attrs["datatype"] == obj.attrs["datatype"]
48
+ assert ex.value == 10
@@ -1,5 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import pickle
4
+
3
5
  import pytest
4
6
 
5
7
  import lgdo
@@ -78,3 +80,15 @@ def test_remove_field():
78
80
 
79
81
  struct.remove_field("array1", delete=True)
80
82
  assert list(struct.keys()) == []
83
+
84
+
85
+ def test_pickle():
86
+ obj_dict = {"scalar1": lgdo.Scalar(value=10)}
87
+ attrs = {"attr1": 1}
88
+ struct = lgdo.Struct(obj_dict=obj_dict, attrs=attrs)
89
+
90
+ ex = pickle.loads(pickle.dumps(struct))
91
+ assert isinstance(ex, lgdo.Struct)
92
+ assert ex.attrs["attr1"] == 1
93
+ assert ex.attrs["datatype"] == struct.attrs["datatype"]
94
+ assert ex["scalar1"].value == 10
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import pickle
3
4
  import warnings
4
5
 
5
6
  import awkward as ak
@@ -221,3 +222,20 @@ def test_remove_column():
221
222
 
222
223
  tbl.remove_column("c")
223
224
  assert list(tbl.keys()) == ["b"]
225
+
226
+
227
+ def test_pickle():
228
+ col_dict = {
229
+ "a": lgdo.Array(nda=np.array([1, 2, 3, 4])),
230
+ "b": lgdo.Array(nda=np.array([5, 6, 7, 8])),
231
+ "c": lgdo.Array(nda=np.array([9, 10, 11, 12])),
232
+ }
233
+ obj = Table(col_dict=col_dict)
234
+ obj.attrs["attr1"] = 1
235
+
236
+ ex = pickle.loads(pickle.dumps(obj))
237
+ assert isinstance(ex, Table)
238
+ assert ex.attrs["attr1"] == 1
239
+ assert ex.attrs["datatype"] == obj.attrs["datatype"]
240
+ for key, val in col_dict.items():
241
+ assert ex[key] == val
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import copy
4
+ import pickle
4
5
  from collections import namedtuple
5
6
 
6
7
  import awkward as ak
@@ -441,3 +442,19 @@ def test_lh5_iterator_view_as(lgnd_test_data):
441
442
 
442
443
  for obj, _, _ in it:
443
444
  assert ak.is_valid(obj.view_as("ak"))
445
+
446
+
447
+ def test_pickle(testvov):
448
+ obj = testvov.v2d
449
+ ex = pickle.loads(pickle.dumps(obj))
450
+
451
+ desired = [
452
+ np.array([1, 2]),
453
+ np.array([3, 4, 5]),
454
+ np.array([2]),
455
+ np.array([4, 8, 9, 7]),
456
+ np.array([5, 3, 1]),
457
+ ]
458
+
459
+ for i in range(len(desired)):
460
+ assert np.array_equal(desired[i], ex[i])
@@ -62,23 +62,23 @@ from .types import (
62
62
  )
63
63
 
64
64
  __all__ = [
65
+ "LGDO",
65
66
  "Array",
66
- "ArrayOfEqualSizedArrays",
67
67
  "ArrayOfEncodedEqualSizedArrays",
68
+ "ArrayOfEqualSizedArrays",
68
69
  "FixedSizeArray",
69
70
  "Histogram",
70
- "LGDO",
71
+ "LH5Iterator",
72
+ "LH5Store",
71
73
  "Scalar",
72
74
  "Struct",
73
75
  "Table",
74
- "VectorOfVectors",
75
76
  "VectorOfEncodedVectors",
77
+ "VectorOfVectors",
76
78
  "WaveformTable",
79
+ "__version__",
77
80
  "load_dfs",
78
81
  "load_nda",
79
82
  "ls",
80
83
  "show",
81
- "LH5Iterator",
82
- "LH5Store",
83
- "__version__",
84
84
  ]
@@ -30,9 +30,9 @@ from .radware import RadwareSigcompress
30
30
  from .varlen import ULEB128ZigZagDiff
31
31
 
32
32
  __all__ = [
33
- "WaveformCodec",
34
- "encode",
35
- "decode",
36
33
  "RadwareSigcompress",
37
34
  "ULEB128ZigZagDiff",
35
+ "WaveformCodec",
36
+ "decode",
37
+ "encode",
38
38
  ]
@@ -20,10 +20,10 @@ __all__ = [
20
20
  "LH5Store",
21
21
  "load_dfs",
22
22
  "load_nda",
23
+ "ls",
23
24
  "read",
24
- "write",
25
25
  "read_as",
26
- "ls",
27
26
  "read_n_rows",
28
27
  "show",
28
+ "write",
29
29
  ]
@@ -24,22 +24,22 @@ from .write.scalar import _h5_write_scalar
24
24
  from .write.vector_of_vectors import _h5_write_vector_of_vectors
25
25
 
26
26
  __all__ = [
27
- "_h5_read_lgdo",
28
- "_h5_read_vector_of_vectors",
29
- "_h5_read_ndarray",
30
27
  "_h5_read_array",
28
+ "_h5_read_array_of_encoded_equalsized_arrays",
29
+ "_h5_read_array_of_equalsized_arrays",
31
30
  "_h5_read_encoded_array",
32
31
  "_h5_read_fixedsize_array",
33
- "_h5_read_array_of_equalsized_arrays",
34
- "_h5_read_struct",
35
- "_h5_read_table",
36
32
  "_h5_read_histogram",
33
+ "_h5_read_lgdo",
34
+ "_h5_read_ndarray",
37
35
  "_h5_read_scalar",
38
- "_h5_read_array_of_encoded_equalsized_arrays",
36
+ "_h5_read_struct",
37
+ "_h5_read_table",
39
38
  "_h5_read_vector_of_encoded_vectors",
40
- "_h5_write_scalar",
39
+ "_h5_read_vector_of_vectors",
41
40
  "_h5_write_array",
42
- "_h5_write_vector_of_vectors",
43
- "_h5_write_struct",
44
41
  "_h5_write_lgdo",
42
+ "_h5_write_scalar",
43
+ "_h5_write_struct",
44
+ "_h5_write_vector_of_vectors",
45
45
  ]
@@ -15,17 +15,17 @@ from .vectorofvectors import VectorOfVectors
15
15
  from .waveformtable import WaveformTable
16
16
 
17
17
  __all__ = [
18
+ "LGDO",
18
19
  "Array",
19
- "ArrayOfEqualSizedArrays",
20
20
  "ArrayOfEncodedEqualSizedArrays",
21
+ "ArrayOfEqualSizedArrays",
21
22
  "FixedSizeArray",
22
23
  "Histogram",
23
- "LGDO",
24
24
  "Scalar",
25
25
  "Struct",
26
26
  "Table",
27
- "VectorOfVectors",
28
27
  "VectorOfEncodedVectors",
28
+ "VectorOfVectors",
29
29
  "WaveformTable",
30
30
  ]
31
31