legend-pydataobj 1.5.0a5__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {legend_pydataobj-1.5.0a5.dist-info → legend_pydataobj-1.6.0.dist-info}/METADATA +1 -1
- legend_pydataobj-1.6.0.dist-info/RECORD +54 -0
- {legend_pydataobj-1.5.0a5.dist-info → legend_pydataobj-1.6.0.dist-info}/WHEEL +1 -1
- {legend_pydataobj-1.5.0a5.dist-info → legend_pydataobj-1.6.0.dist-info}/entry_points.txt +1 -0
- lgdo/__init__.py +7 -4
- lgdo/_version.py +2 -2
- lgdo/cli.py +237 -12
- lgdo/compression/__init__.py +1 -0
- lgdo/lh5/__init__.py +9 -1
- lgdo/lh5/_serializers/__init__.py +43 -0
- lgdo/lh5/_serializers/read/__init__.py +0 -0
- lgdo/lh5/_serializers/read/array.py +34 -0
- lgdo/lh5/_serializers/read/composite.py +405 -0
- lgdo/lh5/_serializers/read/encoded.py +129 -0
- lgdo/lh5/_serializers/read/ndarray.py +104 -0
- lgdo/lh5/_serializers/read/scalar.py +34 -0
- lgdo/lh5/_serializers/read/utils.py +12 -0
- lgdo/lh5/_serializers/read/vector_of_vectors.py +195 -0
- lgdo/lh5/_serializers/write/__init__.py +0 -0
- lgdo/lh5/_serializers/write/array.py +92 -0
- lgdo/lh5/_serializers/write/composite.py +259 -0
- lgdo/lh5/_serializers/write/scalar.py +23 -0
- lgdo/lh5/_serializers/write/vector_of_vectors.py +95 -0
- lgdo/lh5/core.py +272 -0
- lgdo/lh5/datatype.py +46 -0
- lgdo/lh5/exceptions.py +34 -0
- lgdo/lh5/iterator.py +1 -1
- lgdo/lh5/store.py +69 -1160
- lgdo/lh5/tools.py +27 -53
- lgdo/lh5/utils.py +130 -27
- lgdo/lh5_store.py +59 -2
- lgdo/logging.py +4 -3
- lgdo/types/__init__.py +1 -0
- lgdo/types/array.py +3 -0
- lgdo/types/arrayofequalsizedarrays.py +1 -0
- lgdo/types/encoded.py +3 -8
- lgdo/types/fixedsizearray.py +1 -0
- lgdo/types/struct.py +1 -0
- lgdo/types/table.py +69 -26
- lgdo/types/vectorofvectors.py +314 -458
- lgdo/types/vovutils.py +320 -0
- lgdo/types/waveformtable.py +1 -0
- lgdo/utils.py +1 -32
- legend_pydataobj-1.5.0a5.dist-info/RECORD +0 -36
- {legend_pydataobj-1.5.0a5.dist-info → legend_pydataobj-1.6.0.dist-info}/LICENSE +0 -0
- {legend_pydataobj-1.5.0a5.dist-info → legend_pydataobj-1.6.0.dist-info}/top_level.txt +0 -0
lgdo/lh5/tools.py
CHANGED
@@ -2,11 +2,9 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import fnmatch
|
4
4
|
import glob
|
5
|
-
import inspect
|
6
5
|
import logging
|
7
6
|
import os
|
8
|
-
from
|
9
|
-
from typing import Any
|
7
|
+
from copy import copy
|
10
8
|
from warnings import warn
|
11
9
|
|
12
10
|
import h5py
|
@@ -20,7 +18,11 @@ from .store import LH5Store
|
|
20
18
|
log = logging.getLogger(__name__)
|
21
19
|
|
22
20
|
|
23
|
-
def ls(
|
21
|
+
def ls(
|
22
|
+
lh5_file: str | h5py.Group,
|
23
|
+
lh5_group: str = "",
|
24
|
+
recursive: bool = False,
|
25
|
+
) -> list[str]:
|
24
26
|
"""Return a list of LH5 groups in the input file and group, similar
|
25
27
|
to ``ls`` or ``h5ls``. Supports wildcards in group names.
|
26
28
|
|
@@ -32,6 +34,8 @@ def ls(lh5_file: str | h5py.Group, lh5_group: str = "") -> list[str]:
|
|
32
34
|
lh5_group
|
33
35
|
group to search. add a ``/`` to the end of the group name if you want to
|
34
36
|
list all objects inside that group.
|
37
|
+
recursive
|
38
|
+
if ``True``, recurse into subgroups.
|
35
39
|
"""
|
36
40
|
|
37
41
|
log.debug(
|
@@ -49,15 +53,30 @@ def ls(lh5_file: str | h5py.Group, lh5_group: str = "") -> list[str]:
|
|
49
53
|
if lh5_group == "":
|
50
54
|
lh5_group = "*"
|
51
55
|
|
56
|
+
# get the first group in the group path
|
52
57
|
splitpath = lh5_group.split("/", 1)
|
58
|
+
# filter out objects that don't match lh5_group pattern
|
53
59
|
matchingkeys = fnmatch.filter(lh5_file.keys(), splitpath[0])
|
54
60
|
|
61
|
+
ret = []
|
62
|
+
# if there were no "/" in lh5_group just return the result
|
55
63
|
if len(splitpath) == 1:
|
56
|
-
|
64
|
+
ret = matchingkeys
|
65
|
+
|
66
|
+
else:
|
67
|
+
for key in matchingkeys:
|
68
|
+
ret.extend([f"{key}/{path}" for path in ls(lh5_file[key], splitpath[1])])
|
69
|
+
|
70
|
+
if recursive:
|
71
|
+
rec_ret = copy(ret)
|
72
|
+
for obj in ret:
|
73
|
+
try:
|
74
|
+
rec_ret += ls(lh5_file, lh5_group=f"{obj}/", recursive=True)
|
75
|
+
except AttributeError:
|
76
|
+
continue
|
77
|
+
|
78
|
+
return rec_ret
|
57
79
|
|
58
|
-
ret = []
|
59
|
-
for key in matchingkeys:
|
60
|
-
ret.extend([f"{key}/{path}" for path in ls(lh5_file[key], splitpath[1])])
|
61
80
|
return ret
|
62
81
|
|
63
82
|
|
@@ -169,51 +188,6 @@ def show(
|
|
169
188
|
key = k_new
|
170
189
|
|
171
190
|
|
172
|
-
def read_as(
|
173
|
-
name: str,
|
174
|
-
lh5_file: str | h5py.File | Iterable[str | h5py.File],
|
175
|
-
library: str,
|
176
|
-
**kwargs,
|
177
|
-
) -> Any:
|
178
|
-
"""Read LH5 data from disk straight into a third-party data format view.
|
179
|
-
|
180
|
-
This function is nothing more than a shortcut chained call to
|
181
|
-
:meth:`.LH5Store.read` and to :meth:`.LGDO.view_as`.
|
182
|
-
|
183
|
-
Parameters
|
184
|
-
----------
|
185
|
-
name
|
186
|
-
LH5 object name on disk.
|
187
|
-
lh5_file
|
188
|
-
LH5 file name.
|
189
|
-
library
|
190
|
-
string ID of the third-party data format library (``np``, ``pd``,
|
191
|
-
``ak``, etc).
|
192
|
-
|
193
|
-
See Also
|
194
|
-
--------
|
195
|
-
.LH5Store.read, .LGDO.view_as
|
196
|
-
"""
|
197
|
-
# determine which keyword arguments should be forwarded to read() and which
|
198
|
-
# should be forwarded to view_as()
|
199
|
-
read_kwargs = inspect.signature(LH5Store.read).parameters.keys()
|
200
|
-
|
201
|
-
kwargs1 = {}
|
202
|
-
kwargs2 = {}
|
203
|
-
for k, v in kwargs.items():
|
204
|
-
if k in read_kwargs:
|
205
|
-
kwargs1[k] = v
|
206
|
-
else:
|
207
|
-
kwargs2[k] = v
|
208
|
-
|
209
|
-
# read the LGDO from disk
|
210
|
-
store = LH5Store()
|
211
|
-
obj, _ = store.read(name, lh5_file, **kwargs1)
|
212
|
-
|
213
|
-
# and finally return a view
|
214
|
-
return obj.view_as(library, **kwargs2)
|
215
|
-
|
216
|
-
|
217
191
|
def load_nda(
|
218
192
|
f_list: str | list[str],
|
219
193
|
par_list: list[str],
|
lgdo/lh5/utils.py
CHANGED
@@ -1,46 +1,149 @@
|
|
1
1
|
"""Implements utilities for LEGEND Data Objects."""
|
2
|
+
|
2
3
|
from __future__ import annotations
|
3
4
|
|
4
5
|
import glob
|
5
6
|
import logging
|
6
7
|
import os
|
7
8
|
import string
|
9
|
+
from collections.abc import Mapping, Sequence
|
10
|
+
from typing import Any
|
8
11
|
|
9
|
-
|
12
|
+
import h5py
|
10
13
|
|
14
|
+
from .. import types
|
15
|
+
from . import _serializers, datatype
|
16
|
+
from .exceptions import LH5DecodeError
|
11
17
|
|
12
|
-
|
13
|
-
"""Parse datatype string and return type, dimensions and elements.
|
18
|
+
log = logging.getLogger(__name__)
|
14
19
|
|
15
|
-
Parameters
|
16
|
-
----------
|
17
|
-
datatype
|
18
|
-
a LGDO-formatted datatype string.
|
19
20
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
fields in the struct.
|
21
|
+
def get_buffer(
|
22
|
+
name: str,
|
23
|
+
lh5_file: str | h5py.File | Sequence[str | h5py.File],
|
24
|
+
size: int | None = None,
|
25
|
+
field_mask: Mapping[str, bool] | Sequence[str] | None = None,
|
26
|
+
) -> types.LGDO:
|
27
|
+
"""Returns an LGDO appropriate for use as a pre-allocated buffer.
|
28
|
+
|
29
|
+
Sets size to `size` if object has a size.
|
30
30
|
"""
|
31
|
-
|
32
|
-
|
31
|
+
obj, n_rows = _serializers._h5_read_lgdo(
|
32
|
+
name, lh5_file, n_rows=0, field_mask=field_mask
|
33
|
+
)
|
33
34
|
|
34
|
-
|
35
|
-
|
35
|
+
if hasattr(obj, "resize") and size is not None:
|
36
|
+
obj.resize(new_size=size)
|
36
37
|
|
37
|
-
|
38
|
-
if datatype.endswith(">"):
|
39
|
-
datatype, dims = parse("{}<{}>", datatype)
|
40
|
-
dims = [int(i) for i in dims.split(",")]
|
41
|
-
return datatype, tuple(dims), element_description
|
38
|
+
return obj
|
42
39
|
|
43
|
-
|
40
|
+
|
41
|
+
def read_n_rows(name: str, h5f: str | h5py.File) -> int | None:
|
42
|
+
"""Look up the number of rows in an Array-like LGDO object on disk.
|
43
|
+
|
44
|
+
Return ``None`` if `name` is a :class:`.Scalar` or a :class:`.Struct`.
|
45
|
+
"""
|
46
|
+
if not isinstance(h5f, h5py.File):
|
47
|
+
h5f = h5py.File(h5f, "r")
|
48
|
+
|
49
|
+
try:
|
50
|
+
attrs = h5f[name].attrs
|
51
|
+
except KeyError as e:
|
52
|
+
msg = "not found"
|
53
|
+
raise LH5DecodeError(msg, h5f, name) from e
|
54
|
+
except AttributeError as e:
|
55
|
+
msg = "missing 'datatype' attribute"
|
56
|
+
raise LH5DecodeError(msg, h5f, name) from e
|
57
|
+
|
58
|
+
lgdotype = datatype.datatype(attrs["datatype"])
|
59
|
+
|
60
|
+
# scalars are dim-0 datasets
|
61
|
+
if lgdotype is types.Scalar:
|
62
|
+
return None
|
63
|
+
|
64
|
+
# structs don't have rows
|
65
|
+
if lgdotype is types.Struct:
|
66
|
+
return None
|
67
|
+
|
68
|
+
# tables should have elements with all the same length
|
69
|
+
if lgdotype is types.Table:
|
70
|
+
# read out each of the fields
|
71
|
+
rows_read = None
|
72
|
+
for field in datatype.get_struct_fields(attrs["datatype"]):
|
73
|
+
n_rows_read = read_n_rows(name + "/" + field, h5f)
|
74
|
+
if not rows_read:
|
75
|
+
rows_read = n_rows_read
|
76
|
+
elif rows_read != n_rows_read:
|
77
|
+
log.warning(
|
78
|
+
f"'{field}' field in table '{name}' has {rows_read} rows, "
|
79
|
+
f"{n_rows_read} was expected"
|
80
|
+
)
|
81
|
+
return rows_read
|
82
|
+
|
83
|
+
# length of vector of vectors is the length of its cumulative_length
|
84
|
+
if lgdotype is types.VectorOfVectors:
|
85
|
+
return read_n_rows(f"{name}/cumulative_length", h5f)
|
86
|
+
|
87
|
+
# length of vector of encoded vectors is the length of its decoded_size
|
88
|
+
if lgdotype in (types.VectorOfEncodedVectors, types.ArrayOfEncodedEqualSizedArrays):
|
89
|
+
return read_n_rows(f"{name}/encoded_data", h5f)
|
90
|
+
|
91
|
+
# return array length (without reading the array!)
|
92
|
+
if issubclass(lgdotype, types.Array):
|
93
|
+
# compute the number of rows to read
|
94
|
+
return h5f[name].shape[0]
|
95
|
+
|
96
|
+
msg = f"don't know how to read rows of LGDO {lgdotype.__name__}"
|
97
|
+
raise LH5DecodeError(msg, h5f, name)
|
98
|
+
|
99
|
+
|
100
|
+
def get_h5_group(
|
101
|
+
group: str | h5py.Group,
|
102
|
+
base_group: h5py.Group,
|
103
|
+
grp_attrs: Mapping[str, Any] | None = None,
|
104
|
+
overwrite: bool = False,
|
105
|
+
) -> h5py.Group:
|
106
|
+
"""
|
107
|
+
Returns an existing :mod:`h5py` group from a base group or creates a
|
108
|
+
new one. Can also set (or replace) group attributes.
|
109
|
+
|
110
|
+
Parameters
|
111
|
+
----------
|
112
|
+
group
|
113
|
+
name of the HDF5 group.
|
114
|
+
base_group
|
115
|
+
HDF5 group to be used as a base.
|
116
|
+
grp_attrs
|
117
|
+
HDF5 group attributes.
|
118
|
+
overwrite
|
119
|
+
whether overwrite group attributes, ignored if `grp_attrs` is
|
120
|
+
``None``.
|
121
|
+
"""
|
122
|
+
if not isinstance(group, h5py.Group):
|
123
|
+
if group in base_group:
|
124
|
+
group = base_group[group]
|
125
|
+
else:
|
126
|
+
group = base_group.create_group(group)
|
127
|
+
if grp_attrs is not None:
|
128
|
+
group.attrs.update(grp_attrs)
|
129
|
+
return group
|
130
|
+
if (
|
131
|
+
grp_attrs is not None
|
132
|
+
and len(set(grp_attrs.items()) ^ set(group.attrs.items())) > 0
|
133
|
+
):
|
134
|
+
if not overwrite:
|
135
|
+
msg = (
|
136
|
+
f"Provided {grp_attrs=} are different from "
|
137
|
+
f"existing ones {dict(group.attrs)=} but overwrite flag is not set"
|
138
|
+
)
|
139
|
+
raise RuntimeError(msg)
|
140
|
+
|
141
|
+
log.debug(f"overwriting {group}.attrs...")
|
142
|
+
for key in group.attrs:
|
143
|
+
group.attrs.pop(key)
|
144
|
+
group.attrs.update(grp_attrs)
|
145
|
+
|
146
|
+
return group
|
44
147
|
|
45
148
|
|
46
149
|
def expand_vars(expr: str, substitute: dict[str, str] | None = None) -> str:
|
lgdo/lh5_store.py
CHANGED
@@ -1,3 +1,8 @@
|
|
1
|
+
"""
|
2
|
+
.. warning::
|
3
|
+
This subpackage is deprecated, use :mod:`lgdo.lh5`.
|
4
|
+
"""
|
5
|
+
|
1
6
|
from __future__ import annotations
|
2
7
|
|
3
8
|
import sys
|
@@ -23,12 +28,16 @@ from .types import (
|
|
23
28
|
WaveformTable, # noqa: F401
|
24
29
|
)
|
25
30
|
|
26
|
-
DEFAULT_HDF5_COMPRESSION = None
|
27
31
|
LGDO = Union[Array, Scalar, Struct, VectorOfVectors]
|
28
|
-
DEFAULT_HDF5_SETTINGS: dict[str, ...] = {"shuffle": True, "compression": "gzip"}
|
29
32
|
|
30
33
|
|
31
34
|
class LH5Iterator(lh5.LH5Iterator):
|
35
|
+
"""
|
36
|
+
.. warning::
|
37
|
+
This class is deprecated, use :class:`lgdo.lh5.iterator.LH5Iterator`.
|
38
|
+
|
39
|
+
"""
|
40
|
+
|
32
41
|
def __init__(
|
33
42
|
self,
|
34
43
|
lh5_files: str | list[str],
|
@@ -70,6 +79,11 @@ class LH5Iterator(lh5.LH5Iterator):
|
|
70
79
|
write_start: int = 0,
|
71
80
|
**h5py_kwargs,
|
72
81
|
) -> None:
|
82
|
+
"""
|
83
|
+
.. warning::
|
84
|
+
This method is deprecated, use :meth:`lgdo.lh5.iterator.LH5Iterator.write`.
|
85
|
+
|
86
|
+
"""
|
73
87
|
warn(
|
74
88
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5 containing LH5Iterator. "
|
75
89
|
"The object you are calling this function from uses the old LH5Iterator class."
|
@@ -102,6 +116,11 @@ class LH5Iterator(lh5.LH5Iterator):
|
|
102
116
|
obj_buf_start: int = 0,
|
103
117
|
decompress: bool = True,
|
104
118
|
) -> tuple[LGDO, int]:
|
119
|
+
"""
|
120
|
+
.. warning::
|
121
|
+
This method is deprecated, use :meth:`lgdo.lh5.iterator.LH5Iterator.read`.
|
122
|
+
|
123
|
+
"""
|
105
124
|
warn(
|
106
125
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5 containing LH5Iterator. "
|
107
126
|
"The object you are calling this function from uses the old LH5Iterator class."
|
@@ -124,6 +143,12 @@ class LH5Iterator(lh5.LH5Iterator):
|
|
124
143
|
|
125
144
|
|
126
145
|
class LH5Store(lh5.LH5Store):
|
146
|
+
"""
|
147
|
+
.. warning::
|
148
|
+
This class is deprecated, use :class:`lgdo.lh5.iterator.LH5Store`.
|
149
|
+
|
150
|
+
"""
|
151
|
+
|
127
152
|
def __init__(self, base_path: str = "", keep_open: bool = False):
|
128
153
|
warn(
|
129
154
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5 containing LH5Store. "
|
@@ -140,6 +165,11 @@ class LH5Store(lh5.LH5Store):
|
|
140
165
|
lh5_file: str | h5py.File | list[str | h5py.File],
|
141
166
|
**kwargs,
|
142
167
|
) -> tuple[LGDO, int]:
|
168
|
+
"""
|
169
|
+
.. warning::
|
170
|
+
This method is deprecated, use :meth:`lgdo.lh5.store.LH5Store.read`.
|
171
|
+
|
172
|
+
"""
|
143
173
|
warn(
|
144
174
|
"LH5Store.read_object() has been renamed to LH5Store.read(), "
|
145
175
|
"Please update your code."
|
@@ -156,6 +186,11 @@ class LH5Store(lh5.LH5Store):
|
|
156
186
|
lh5_file: str | h5py.File,
|
157
187
|
**kwargs,
|
158
188
|
) -> tuple[LGDO, int]:
|
189
|
+
"""
|
190
|
+
.. warning::
|
191
|
+
This method is deprecated, use :meth:`lgdo.lh5.store.LH5Store.write`.
|
192
|
+
|
193
|
+
"""
|
159
194
|
warn(
|
160
195
|
"LH5Store.write_object() has been renamed to LH5Store.write(), "
|
161
196
|
"Please update your code."
|
@@ -172,6 +207,12 @@ def load_dfs(
|
|
172
207
|
lh5_group: str = "",
|
173
208
|
idx_list: list[np.ndarray | list | tuple] | None = None,
|
174
209
|
) -> pd.DataFrame:
|
210
|
+
"""
|
211
|
+
.. warning::
|
212
|
+
This function is deprecated, use :meth:`lgdo.types.lgdo.LGDO.view_as` to
|
213
|
+
view LGDO data as a Pandas data structure.
|
214
|
+
|
215
|
+
"""
|
175
216
|
warn(
|
176
217
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5. "
|
177
218
|
"Please replace 'from lgdo.lh5_store import load_dfs' with 'from lgdo.lh5 import load_dfs'. "
|
@@ -188,6 +229,12 @@ def load_nda(
|
|
188
229
|
lh5_group: str = "",
|
189
230
|
idx_list: list[np.ndarray | list | tuple] | None = None,
|
190
231
|
) -> dict[str, np.ndarray]:
|
232
|
+
"""
|
233
|
+
.. warning::
|
234
|
+
This function is deprecated, use :meth:`lgdo.types.lgdo.LGDO.view_as` to
|
235
|
+
view LGDO data as a NumPy data structure.
|
236
|
+
|
237
|
+
"""
|
191
238
|
warn(
|
192
239
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5. "
|
193
240
|
"Please replace 'from lgdo.lh5_store import load_nda' with 'from lgdo.lh5 import load_nda'. "
|
@@ -199,6 +246,11 @@ def load_nda(
|
|
199
246
|
|
200
247
|
|
201
248
|
def ls(lh5_file: str | h5py.Group, lh5_group: str = "") -> list[str]:
|
249
|
+
"""
|
250
|
+
.. warning::
|
251
|
+
This function is deprecated, import :func:`lgdo.lh5.tools.ls`.
|
252
|
+
|
253
|
+
"""
|
202
254
|
warn(
|
203
255
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5. "
|
204
256
|
"Please replace 'from lgdo.lh5_store import ls' with 'from lgdo.lh5 import ls'. "
|
@@ -216,6 +268,11 @@ def show(
|
|
216
268
|
indent: str = "",
|
217
269
|
header: bool = True,
|
218
270
|
) -> None:
|
271
|
+
"""
|
272
|
+
.. warning::
|
273
|
+
This function is deprecated, import :func:`lgdo.lh5.tools.show`.
|
274
|
+
|
275
|
+
"""
|
219
276
|
warn(
|
220
277
|
"lgdo.lh5_store has moved to a subfolder lgdo.lh5. "
|
221
278
|
"Please replace 'from lgdo.lh5_store import show' with 'from lgdo.lh5 import show'. "
|
lgdo/logging.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
"""This module implements some helpers for setting up logging."""
|
2
|
+
|
2
3
|
from __future__ import annotations
|
3
4
|
|
4
5
|
import logging
|
@@ -16,7 +17,7 @@ CRITICAL = logging.CRITICAL
|
|
16
17
|
def setup(level: int = logging.INFO, logger: logging.Logger | None = None) -> None:
|
17
18
|
"""Setup a colorful logging output.
|
18
19
|
|
19
|
-
If `logger` is None, sets up only the ``
|
20
|
+
If `logger` is None, sets up only the ``lgdo`` logger.
|
20
21
|
|
21
22
|
Parameters
|
22
23
|
----------
|
@@ -27,7 +28,7 @@ def setup(level: int = logging.INFO, logger: logging.Logger | None = None) -> No
|
|
27
28
|
|
28
29
|
Examples
|
29
30
|
--------
|
30
|
-
>>> from
|
31
|
+
>>> from lgdo import logging
|
31
32
|
>>> logging.setup(level=logging.DEBUG)
|
32
33
|
"""
|
33
34
|
handler = colorlog.StreamHandler()
|
@@ -36,7 +37,7 @@ def setup(level: int = logging.INFO, logger: logging.Logger | None = None) -> No
|
|
36
37
|
)
|
37
38
|
|
38
39
|
if logger is None:
|
39
|
-
logger = colorlog.getLogger("
|
40
|
+
logger = colorlog.getLogger("lgdo")
|
40
41
|
|
41
42
|
logger.setLevel(level)
|
42
43
|
logger.addHandler(handler)
|
lgdo/types/__init__.py
CHANGED
lgdo/types/array.py
CHANGED
@@ -2,6 +2,7 @@
|
|
2
2
|
Implements a LEGEND Data Object representing an n-dimensional array and
|
3
3
|
corresponding utilities.
|
4
4
|
"""
|
5
|
+
|
5
6
|
from __future__ import annotations
|
6
7
|
|
7
8
|
import logging
|
@@ -182,6 +183,7 @@ class Array(LGDO):
|
|
182
183
|
if self.nda.ndim == 1:
|
183
184
|
return pd.Series(self.nda, copy=False)
|
184
185
|
|
186
|
+
# if array is multi-dim, use awkward
|
185
187
|
return akpd.from_awkward(self.view_as("ak"))
|
186
188
|
|
187
189
|
if library == "np":
|
@@ -195,6 +197,7 @@ class Array(LGDO):
|
|
195
197
|
msg = "Pint does not support Awkward yet, you must view the data with_units=False"
|
196
198
|
raise ValueError(msg)
|
197
199
|
|
200
|
+
# NOTE: this is zero-copy!
|
198
201
|
return ak.Array(self.nda)
|
199
202
|
|
200
203
|
msg = f"{library} is not a supported third-party format."
|
lgdo/types/encoded.py
CHANGED
@@ -179,8 +179,8 @@ class VectorOfEncodedVectors(LGDO):
|
|
179
179
|
|
180
180
|
def __str__(self) -> str:
|
181
181
|
string = ""
|
182
|
-
pos
|
183
|
-
|
182
|
+
for pos, res in enumerate(self):
|
183
|
+
vec, size = res[0], res[1]
|
184
184
|
if pos != 0:
|
185
185
|
string += " "
|
186
186
|
|
@@ -200,8 +200,6 @@ class VectorOfEncodedVectors(LGDO):
|
|
200
200
|
if pos < len(self.encoded_data.cumulative_length):
|
201
201
|
string += ",\n"
|
202
202
|
|
203
|
-
pos += 1
|
204
|
-
|
205
203
|
string = f"[{string}]"
|
206
204
|
|
207
205
|
attrs = self.getattrs()
|
@@ -400,8 +398,7 @@ class ArrayOfEncodedEqualSizedArrays(LGDO):
|
|
400
398
|
|
401
399
|
def __str__(self) -> str:
|
402
400
|
string = ""
|
403
|
-
pos
|
404
|
-
for vec in self:
|
401
|
+
for pos, vec in enumerate(self):
|
405
402
|
if pos != 0:
|
406
403
|
string += " "
|
407
404
|
|
@@ -418,8 +415,6 @@ class ArrayOfEncodedEqualSizedArrays(LGDO):
|
|
418
415
|
if pos < len(self.encoded_data.cumulative_length):
|
419
416
|
string += ",\n"
|
420
417
|
|
421
|
-
pos += 1
|
422
|
-
|
423
418
|
string = f"[{string}] decoded_size={self.decoded_size}"
|
424
419
|
|
425
420
|
attrs = self.getattrs()
|
lgdo/types/fixedsizearray.py
CHANGED