legend-pydataobj 1.14.0__py3-none-any.whl → 1.14.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/METADATA +1 -1
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/RECORD +13 -13
- lgdo/_version.py +2 -2
- lgdo/lh5/_serializers/write/composite.py +162 -149
- lgdo/lh5/core.py +29 -20
- lgdo/lh5/exceptions.py +28 -13
- lgdo/lh5/store.py +5 -1
- lgdo/lh5/tools.py +5 -1
- lgdo/lh5/utils.py +8 -2
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/WHEEL +0 -0
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/entry_points.txt +0 -0
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/licenses/LICENSE +0 -0
- {legend_pydataobj-1.14.0.dist-info → legend_pydataobj-1.14.2.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
|
-
legend_pydataobj-1.14.
|
1
|
+
legend_pydataobj-1.14.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
2
2
|
lgdo/__init__.py,sha256=fkRv79kdtBasw31gPVK9SdLQ2vEEajTV2t3UPDvFg9o,3206
|
3
|
-
lgdo/_version.py,sha256=
|
3
|
+
lgdo/_version.py,sha256=XJY_CvF8oIeHKlGNoyQktgBeMSHaZBYQy5zzYmcKBkI,513
|
4
4
|
lgdo/cli.py,sha256=s_EWTBWW76l7zWb6gaTSTjiT-0RzzcYEmjeFEQCVxfk,4647
|
5
5
|
lgdo/lgdo_utils.py,sha256=6a2YWEwpyEMXlAyTHZMO01aqxy6SxJzPZkGNWKNWuS0,2567
|
6
6
|
lgdo/logging.py,sha256=82wIOj7l7xr3WYyeHdpSXbbjzHJsy-uRyKYUYx2vMfQ,1003
|
@@ -14,14 +14,14 @@ lgdo/compression/utils.py,sha256=W2RkBrxPpXlat84dnU9Ad7d_tTws0irtGl7O1dNWjnk,114
|
|
14
14
|
lgdo/compression/varlen.py,sha256=bjyxhHzfpi6PIPy-Uc47W8_LrRbFoJLJ2kVeD5nhyqo,15125
|
15
15
|
lgdo/lh5/__init__.py,sha256=smHTawINIiogHNfYJq3aPvtxleTnBMdPADRCdc1wea8,748
|
16
16
|
lgdo/lh5/concat.py,sha256=BZCgK7TWPKK8fMmha8K83d3bC31FVO1b5LOW7x-Ru1s,6186
|
17
|
-
lgdo/lh5/core.py,sha256=
|
17
|
+
lgdo/lh5/core.py,sha256=vcYZBJ8QFcfzCl80wmgg9uD2bdYBBG8bytcFj-MZpJ0,14109
|
18
18
|
lgdo/lh5/datatype.py,sha256=ry3twFaosuBoskiTKqtBYRMk9PQAf403593xKaItfog,1827
|
19
|
-
lgdo/lh5/exceptions.py,sha256=
|
19
|
+
lgdo/lh5/exceptions.py,sha256=Q374YeqajpptVCYfxJYrThiPZSnfpdbGV3qVwoUuEFo,1697
|
20
20
|
lgdo/lh5/iterator.py,sha256=vuN98pa-xHDWXM2GMxvMxFEJGfHatMX6ajqnaP55PuY,20680
|
21
21
|
lgdo/lh5/settings.py,sha256=cmPd6ZvneAF5sFMA1qf-9g_YSSygJcQSRmZDp1_sBEU,1001
|
22
|
-
lgdo/lh5/store.py,sha256=
|
23
|
-
lgdo/lh5/tools.py,sha256=
|
24
|
-
lgdo/lh5/utils.py,sha256=
|
22
|
+
lgdo/lh5/store.py,sha256=nW8hwamd5yfaskuYaMrnWT7I-H7UiwQTXWHRpnfd9l4,8837
|
23
|
+
lgdo/lh5/tools.py,sha256=4OWRA6f__yeZSXgrc3JIpO597x-gtj-s9Y2ycnS6yLM,6663
|
24
|
+
lgdo/lh5/utils.py,sha256=PPdOcqBCMqDsHy5vpQIoThoeFRtZHN4DL7IDKzucUxU,6905
|
25
25
|
lgdo/lh5/_serializers/__init__.py,sha256=eZzxMp1SeZWG0PkEXUiCz3XyprQ8EmelHUmJogC8xYE,1263
|
26
26
|
lgdo/lh5/_serializers/read/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
27
27
|
lgdo/lh5/_serializers/read/array.py,sha256=uWfMCihfAmW2DE2ewip2qCK_kvQC_mb2zvOv26uzijc,1000
|
@@ -33,7 +33,7 @@ lgdo/lh5/_serializers/read/utils.py,sha256=YfSqPO-83A1XvhhuULxQ0Qz2A5ODa3sb7ApNx
|
|
33
33
|
lgdo/lh5/_serializers/read/vector_of_vectors.py,sha256=765P8mElGArAaEPkHTAUXFQ47t1_3-3BQAete0LckBQ,7207
|
34
34
|
lgdo/lh5/_serializers/write/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
35
35
|
lgdo/lh5/_serializers/write/array.py,sha256=qzRNPQ4mtvc7HYPE3vUcM6bi7lWYnolNStdJVcDfzPU,3174
|
36
|
-
lgdo/lh5/_serializers/write/composite.py,sha256=
|
36
|
+
lgdo/lh5/_serializers/write/composite.py,sha256=LKRXRP5LVzrHUnFKDcKSn2nxfzdLwBWcFsp4HXT2vKo,12953
|
37
37
|
lgdo/lh5/_serializers/write/scalar.py,sha256=JPt_fcdTKOSFp5hfJdcKIfK4hxhcD8vhOlvDF-7btQ8,763
|
38
38
|
lgdo/lh5/_serializers/write/vector_of_vectors.py,sha256=puGQX9XF5P_5DVbm_Cc6TvPrsDywgBLSYtkqFNltbB4,3493
|
39
39
|
lgdo/types/__init__.py,sha256=DNfOErPiAZg-7Gygkp6ZKAi20Yrm1mfderZHvKo1Y4s,821
|
@@ -49,8 +49,8 @@ lgdo/types/table.py,sha256=huhgpzdAUx0bRaEaitwnb-Ve7oAu5B6zxPK5EXPUfg0,20233
|
|
49
49
|
lgdo/types/vectorofvectors.py,sha256=k1LwNnX3TcRAhOujj85kNkfZN0MXZYL9aaMUbr82JlE,26910
|
50
50
|
lgdo/types/vovutils.py,sha256=LW3ZcwECxVYxxcFadAtY3nnK-9-rk8Xbg_m8hY30lo4,10708
|
51
51
|
lgdo/types/waveformtable.py,sha256=9S_NMg894NZTGt2pLuskwH4-zQ5EbLnzWI6FVui6fXE,9827
|
52
|
-
legend_pydataobj-1.14.
|
53
|
-
legend_pydataobj-1.14.
|
54
|
-
legend_pydataobj-1.14.
|
55
|
-
legend_pydataobj-1.14.
|
56
|
-
legend_pydataobj-1.14.
|
52
|
+
legend_pydataobj-1.14.2.dist-info/METADATA,sha256=glxIddfCqz4ly2s46a-QYe_FWZYiSgaceXxkjmHfFTk,44443
|
53
|
+
legend_pydataobj-1.14.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
54
|
+
legend_pydataobj-1.14.2.dist-info/entry_points.txt,sha256=0KWfnwbuwhNn0vPUqARukjp04Ca6lzfZBSirouRmk7I,76
|
55
|
+
legend_pydataobj-1.14.2.dist-info/top_level.txt,sha256=KyR-EUloqiXcQ62IWnzBmtInDtvsHl4q2ZJAZgTcLXE,5
|
56
|
+
legend_pydataobj-1.14.2.dist-info/RECORD,,
|
lgdo/_version.py
CHANGED
@@ -52,140 +52,166 @@ def _h5_write_lgdo(
|
|
52
52
|
# In hdf5, 'a' is really "modify" -- in addition to appending, you can
|
53
53
|
# change any object in the file. So we use file:append for
|
54
54
|
# write_object:overwrite.
|
55
|
+
opened_here = False
|
55
56
|
if not isinstance(lh5_file, h5py.File):
|
56
57
|
mode = "w" if wo_mode == "of" or not Path(lh5_file).exists() else "a"
|
57
|
-
lh5_file = h5py.File(lh5_file, mode=mode, **file_kwargs)
|
58
58
|
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
)
|
64
|
-
|
65
|
-
group = utils.get_h5_group(group, lh5_file)
|
59
|
+
try:
|
60
|
+
fh = h5py.File(lh5_file, mode=mode, **file_kwargs)
|
61
|
+
except OSError as oe:
|
62
|
+
raise LH5EncodeError(oe, lh5_file) from oe
|
66
63
|
|
67
|
-
|
68
|
-
if name in group or (
|
69
|
-
("datatype" in group.attrs or group == "/")
|
70
|
-
and (len(name) <= 2 or "/" not in name[1:-1])
|
71
|
-
):
|
72
|
-
pass
|
73
|
-
# group is in file but not struct or need to create nesting
|
64
|
+
opened_here = True
|
74
65
|
else:
|
75
|
-
|
76
|
-
# if name is nested, iterate up from parent
|
77
|
-
# otherwise we just need to iterate the group
|
78
|
-
if len(name) > 2 and "/" in name[1:-1]:
|
79
|
-
group = utils.get_h5_group(
|
80
|
-
name[:-1].rsplit("/", 1)[0],
|
81
|
-
group,
|
82
|
-
)
|
83
|
-
curr_name = (
|
84
|
-
name.rsplit("/", 1)[1]
|
85
|
-
if name[-1] != "/"
|
86
|
-
else name[:-1].rsplit("/", 1)[1]
|
87
|
-
)
|
88
|
-
else:
|
89
|
-
curr_name = name
|
90
|
-
# initialize the object to be written
|
91
|
-
obj = types.Struct({curr_name.replace("/", ""): obj})
|
66
|
+
fh = lh5_file
|
92
67
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
if len(group) > 1:
|
100
|
-
break
|
101
|
-
curr_name = group.name
|
102
|
-
group = group.parent
|
103
|
-
if group.name != "/":
|
104
|
-
obj = types.Struct({curr_name[len(group.name) + 1 :]: obj})
|
105
|
-
else:
|
106
|
-
obj = types.Struct({curr_name[1:]: obj})
|
107
|
-
# if the group has more than one child, we need to append else we can overwrite
|
108
|
-
wo_mode = "ac" if len(group) > 1 else "o"
|
109
|
-
|
110
|
-
# set the new name
|
111
|
-
if group.name == "/":
|
112
|
-
name = "/"
|
113
|
-
elif group.parent.name == "/":
|
114
|
-
name = group.name[1:]
|
115
|
-
else:
|
116
|
-
name = group.name[len(group.parent.name) + 1 :]
|
117
|
-
# get the new group
|
118
|
-
group = utils.get_h5_group(group.parent if group.name != "/" else "/", lh5_file)
|
68
|
+
try:
|
69
|
+
log.debug(
|
70
|
+
f"writing {obj!r}[{start_row}:{n_rows}] as "
|
71
|
+
f"{fh.filename}:{group}/{name}[{write_start}:], "
|
72
|
+
f"mode = {wo_mode}, h5py_kwargs = {h5py_kwargs}"
|
73
|
+
)
|
119
74
|
|
120
|
-
|
121
|
-
msg = f"can't overwrite '{name}' in wo_mode 'write_safe'"
|
122
|
-
raise LH5EncodeError(msg, lh5_file, group, name)
|
75
|
+
group = utils.get_h5_group(group, fh)
|
123
76
|
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
and wo_mode not in ["w", "o", "of"]
|
129
|
-
and name in group
|
77
|
+
# name already in file
|
78
|
+
if name in group or (
|
79
|
+
("datatype" in group.attrs or group == "/")
|
80
|
+
and (len(name) <= 2 or "/" not in name[1:-1])
|
130
81
|
):
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
82
|
+
pass
|
83
|
+
# group is in file but not struct or need to create nesting
|
84
|
+
else:
|
85
|
+
# check if name is nested
|
86
|
+
# if name is nested, iterate up from parent
|
87
|
+
# otherwise we just need to iterate the group
|
88
|
+
if len(name) > 2 and "/" in name[1:-1]:
|
89
|
+
group = utils.get_h5_group(
|
90
|
+
name[:-1].rsplit("/", 1)[0],
|
91
|
+
group,
|
92
|
+
)
|
93
|
+
curr_name = (
|
94
|
+
name.rsplit("/", 1)[1]
|
95
|
+
if name[-1] != "/"
|
96
|
+
else name[:-1].rsplit("/", 1)[1]
|
97
|
+
)
|
98
|
+
else:
|
99
|
+
curr_name = name
|
100
|
+
# initialize the object to be written
|
101
|
+
obj = types.Struct({curr_name.replace("/", ""): obj})
|
102
|
+
|
103
|
+
# if base group already has a child we just append
|
104
|
+
if len(group) >= 1:
|
105
|
+
wo_mode = "ac"
|
106
|
+
else:
|
107
|
+
# iterate up the group hierarchy until we reach the root or a group with more than one child
|
108
|
+
while group.name != "/":
|
109
|
+
if len(group) > 1:
|
110
|
+
break
|
111
|
+
curr_name = group.name
|
112
|
+
group = group.parent
|
113
|
+
if group.name != "/":
|
114
|
+
obj = types.Struct({curr_name[len(group.name) + 1 :]: obj})
|
115
|
+
else:
|
116
|
+
obj = types.Struct({curr_name[1:]: obj})
|
117
|
+
# if the group has more than one child, we need to append else we can overwrite
|
118
|
+
wo_mode = "ac" if len(group) > 1 else "o"
|
119
|
+
|
120
|
+
# set the new name
|
121
|
+
if group.name == "/":
|
122
|
+
name = "/"
|
123
|
+
elif group.parent.name == "/":
|
124
|
+
name = group.name[1:]
|
125
|
+
else:
|
126
|
+
name = group.name[len(group.parent.name) + 1 :]
|
127
|
+
# get the new group
|
128
|
+
group = utils.get_h5_group(group.parent if group.name != "/" else "/", fh)
|
129
|
+
|
130
|
+
if wo_mode == "w" and name in group:
|
131
|
+
msg = f"can't overwrite '{name}' in wo_mode 'write_safe'"
|
132
|
+
raise LH5EncodeError(msg, fh, group, name)
|
133
|
+
|
134
|
+
# struct, table, waveform table or histogram.
|
135
|
+
if isinstance(obj, types.Struct):
|
136
|
+
if (
|
137
|
+
isinstance(obj, types.Histogram)
|
138
|
+
and wo_mode not in ["w", "o", "of"]
|
139
|
+
and name in group
|
140
|
+
):
|
141
|
+
msg = f"can't append-write to histogram in wo_mode '{wo_mode}'"
|
142
|
+
raise LH5EncodeError(msg, fh, group, name)
|
143
|
+
if isinstance(obj, types.Histogram) and write_start != 0:
|
144
|
+
msg = f"can't write histogram in wo_mode '{wo_mode}' with write_start != 0"
|
145
|
+
raise LH5EncodeError(msg, fh, group, name)
|
146
|
+
|
147
|
+
return _h5_write_struct(
|
148
|
+
obj,
|
149
|
+
name,
|
150
|
+
fh,
|
151
|
+
group=group,
|
152
|
+
start_row=start_row,
|
153
|
+
n_rows=n_rows, # if isinstance(obj, types.Table | types.Histogram) else None,
|
154
|
+
wo_mode=wo_mode,
|
155
|
+
write_start=write_start,
|
156
|
+
**h5py_kwargs,
|
157
|
+
)
|
152
158
|
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
):
|
157
|
-
group = utils.get_h5_group(
|
158
|
-
name, group, grp_attrs=obj.attrs, overwrite=(wo_mode == "o")
|
159
|
-
)
|
159
|
+
# scalars
|
160
|
+
if isinstance(obj, types.Scalar):
|
161
|
+
return _h5_write_scalar(obj, name, fh, group, wo_mode)
|
160
162
|
|
161
|
-
#
|
162
|
-
|
163
|
+
# vector of encoded vectors
|
164
|
+
if isinstance(
|
165
|
+
obj, (types.VectorOfEncodedVectors, types.ArrayOfEncodedEqualSizedArrays)
|
166
|
+
):
|
167
|
+
group = utils.get_h5_group(
|
168
|
+
name, group, grp_attrs=obj.attrs, overwrite=(wo_mode == "o")
|
169
|
+
)
|
163
170
|
|
164
|
-
|
165
|
-
obj.encoded_data
|
166
|
-
"encoded_data",
|
167
|
-
lh5_file,
|
168
|
-
group=group,
|
169
|
-
start_row=start_row,
|
170
|
-
n_rows=n_rows,
|
171
|
-
wo_mode=wo_mode,
|
172
|
-
write_start=write_start,
|
173
|
-
**h5py_kwargs,
|
174
|
-
)
|
171
|
+
# ask not to further compress flattened_data, it is already compressed!
|
172
|
+
obj.encoded_data.flattened_data.attrs["compression"] = None
|
175
173
|
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
lh5_file,
|
174
|
+
_h5_write_vector_of_vectors(
|
175
|
+
obj.encoded_data,
|
176
|
+
"encoded_data",
|
177
|
+
fh,
|
181
178
|
group=group,
|
179
|
+
start_row=start_row,
|
180
|
+
n_rows=n_rows,
|
182
181
|
wo_mode=wo_mode,
|
182
|
+
write_start=write_start,
|
183
|
+
**h5py_kwargs,
|
183
184
|
)
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
185
|
+
|
186
|
+
if isinstance(obj.decoded_size, types.Scalar):
|
187
|
+
_h5_write_scalar(
|
188
|
+
obj.decoded_size,
|
189
|
+
"decoded_size",
|
190
|
+
fh,
|
191
|
+
group=group,
|
192
|
+
wo_mode=wo_mode,
|
193
|
+
)
|
194
|
+
else:
|
195
|
+
_h5_write_array(
|
196
|
+
obj.decoded_size,
|
197
|
+
"decoded_size",
|
198
|
+
fh,
|
199
|
+
group=group,
|
200
|
+
start_row=start_row,
|
201
|
+
n_rows=n_rows,
|
202
|
+
wo_mode=wo_mode,
|
203
|
+
write_start=write_start,
|
204
|
+
**h5py_kwargs,
|
205
|
+
)
|
206
|
+
|
207
|
+
return None
|
208
|
+
|
209
|
+
# vector of vectors
|
210
|
+
if isinstance(obj, types.VectorOfVectors):
|
211
|
+
return _h5_write_vector_of_vectors(
|
212
|
+
obj,
|
213
|
+
name,
|
214
|
+
fh,
|
189
215
|
group=group,
|
190
216
|
start_row=start_row,
|
191
217
|
n_rows=n_rows,
|
@@ -194,38 +220,25 @@ def _h5_write_lgdo(
|
|
194
220
|
**h5py_kwargs,
|
195
221
|
)
|
196
222
|
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
**h5py_kwargs,
|
211
|
-
)
|
212
|
-
|
213
|
-
# if we get this far, must be one of the Array types
|
214
|
-
if isinstance(obj, types.Array):
|
215
|
-
return _h5_write_array(
|
216
|
-
obj,
|
217
|
-
name,
|
218
|
-
lh5_file,
|
219
|
-
group=group,
|
220
|
-
start_row=start_row,
|
221
|
-
n_rows=n_rows,
|
222
|
-
wo_mode=wo_mode,
|
223
|
-
write_start=write_start,
|
224
|
-
**h5py_kwargs,
|
225
|
-
)
|
223
|
+
# if we get this far, must be one of the Array types
|
224
|
+
if isinstance(obj, types.Array):
|
225
|
+
return _h5_write_array(
|
226
|
+
obj,
|
227
|
+
name,
|
228
|
+
fh,
|
229
|
+
group=group,
|
230
|
+
start_row=start_row,
|
231
|
+
n_rows=n_rows,
|
232
|
+
wo_mode=wo_mode,
|
233
|
+
write_start=write_start,
|
234
|
+
**h5py_kwargs,
|
235
|
+
)
|
226
236
|
|
227
|
-
|
228
|
-
|
237
|
+
msg = f"do not know how to write '{name}' of type '{type(obj).__name__}'"
|
238
|
+
raise LH5EncodeError(msg, fh, group, name)
|
239
|
+
finally:
|
240
|
+
if opened_here:
|
241
|
+
fh.close()
|
229
242
|
|
230
243
|
|
231
244
|
def _h5_write_struct(
|
lgdo/lh5/core.py
CHANGED
@@ -14,6 +14,7 @@ from numpy.typing import ArrayLike
|
|
14
14
|
|
15
15
|
from .. import types
|
16
16
|
from . import _serializers
|
17
|
+
from .exceptions import LH5DecodeError
|
17
18
|
from .utils import read_n_rows
|
18
19
|
|
19
20
|
|
@@ -110,15 +111,20 @@ def read(
|
|
110
111
|
object
|
111
112
|
the read-out object
|
112
113
|
"""
|
114
|
+
close_after = False
|
113
115
|
if isinstance(lh5_file, h5py.File):
|
114
116
|
lh5_obj = lh5_file[name]
|
115
117
|
elif isinstance(lh5_file, (str, Path)):
|
116
|
-
|
118
|
+
try:
|
119
|
+
lh5_file = h5py.File(str(Path(lh5_file)), mode="r", locking=locking)
|
120
|
+
except (OSError, FileExistsError) as oe:
|
121
|
+
raise LH5DecodeError(oe, lh5_file) from oe
|
122
|
+
|
123
|
+
close_after = True
|
117
124
|
try:
|
118
125
|
lh5_obj = lh5_file[name]
|
119
126
|
except KeyError as ke:
|
120
|
-
|
121
|
-
raise KeyError(err) from ke
|
127
|
+
raise LH5DecodeError(str(ke), lh5_file, name) from ke
|
122
128
|
else:
|
123
129
|
if obj_buf is not None:
|
124
130
|
obj_buf.resize(obj_buf_start)
|
@@ -173,23 +179,26 @@ def read(
|
|
173
179
|
if isinstance(idx, np.ndarray) and idx.dtype == np.dtype("?"):
|
174
180
|
idx = np.where(idx)[0]
|
175
181
|
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
182
|
+
try:
|
183
|
+
obj, n_rows_read = _serializers._h5_read_lgdo(
|
184
|
+
lh5_obj.id,
|
185
|
+
lh5_obj.file.filename,
|
186
|
+
lh5_obj.name,
|
187
|
+
start_row=start_row,
|
188
|
+
n_rows=n_rows,
|
189
|
+
idx=idx,
|
190
|
+
use_h5idx=use_h5idx,
|
191
|
+
field_mask=field_mask,
|
192
|
+
obj_buf=obj_buf,
|
193
|
+
obj_buf_start=obj_buf_start,
|
194
|
+
decompress=decompress,
|
195
|
+
)
|
196
|
+
with suppress(AttributeError):
|
197
|
+
obj.resize(obj_buf_start + n_rows_read)
|
198
|
+
return obj
|
199
|
+
finally:
|
200
|
+
if close_after:
|
201
|
+
lh5_file.close()
|
193
202
|
|
194
203
|
|
195
204
|
def write(
|
lgdo/lh5/exceptions.py
CHANGED
@@ -4,17 +4,21 @@ import h5py
|
|
4
4
|
|
5
5
|
|
6
6
|
class LH5DecodeError(Exception):
|
7
|
-
def __init__(
|
7
|
+
def __init__(
|
8
|
+
self, message: str, file: str | h5py.File, oname: str | None = None
|
9
|
+
) -> None:
|
8
10
|
super().__init__(message)
|
9
11
|
|
10
|
-
self.file =
|
12
|
+
self.file = file.filename if isinstance(file, h5py.File) else file
|
11
13
|
self.obj = oname
|
12
14
|
|
13
15
|
def __str__(self) -> str:
|
14
|
-
|
15
|
-
f"while
|
16
|
-
|
17
|
-
|
16
|
+
if self.obj is None:
|
17
|
+
msg = f"while opening file {self.file} for decoding: "
|
18
|
+
else:
|
19
|
+
msg = f"while decoding object '{self.obj}' in file {self.file}: "
|
20
|
+
|
21
|
+
return msg + super().__str__()
|
18
22
|
|
19
23
|
def __reduce__(self) -> tuple: # for pickling.
|
20
24
|
return self.__class__, (*self.args, self.file, self.obj)
|
@@ -22,19 +26,30 @@ class LH5DecodeError(Exception):
|
|
22
26
|
|
23
27
|
class LH5EncodeError(Exception):
|
24
28
|
def __init__(
|
25
|
-
self,
|
29
|
+
self,
|
30
|
+
message: str,
|
31
|
+
file: str | h5py.File,
|
32
|
+
group: str | h5py.Group | None = None,
|
33
|
+
name: str | None = None,
|
26
34
|
) -> None:
|
27
35
|
super().__init__(message)
|
28
36
|
|
29
37
|
self.file = file.filename if isinstance(file, h5py.File) else file
|
30
|
-
self.group = (
|
31
|
-
|
38
|
+
self.group = (
|
39
|
+
(group.name if isinstance(file, h5py.File) else group).rstrip("/")
|
40
|
+
if group is not None
|
41
|
+
else None
|
42
|
+
)
|
43
|
+
self.name = name.lstrip("/") if name is not None else None
|
32
44
|
|
33
45
|
def __str__(self) -> str:
|
34
|
-
|
35
|
-
f"while
|
36
|
-
|
37
|
-
|
46
|
+
if self.name is None:
|
47
|
+
msg = f"while opening file {self.file} for encoding: "
|
48
|
+
else:
|
49
|
+
msg = (
|
50
|
+
f"while encoding object {self.group}/{self.name} to file {self.file}: "
|
51
|
+
)
|
52
|
+
return msg + super().__str__()
|
38
53
|
|
39
54
|
def __reduce__(self) -> tuple: # for pickling.
|
40
55
|
return self.__class__, (*self.args, self.file, self.group, self.name)
|
lgdo/lh5/store.py
CHANGED
@@ -19,6 +19,7 @@ from numpy.typing import ArrayLike
|
|
19
19
|
from .. import types
|
20
20
|
from . import _serializers, utils
|
21
21
|
from .core import read
|
22
|
+
from .exceptions import LH5DecodeError
|
22
23
|
|
23
24
|
log = logging.getLogger(__name__)
|
24
25
|
|
@@ -125,7 +126,10 @@ class LH5Store:
|
|
125
126
|
"fs_page_size": page_buffer,
|
126
127
|
}
|
127
128
|
)
|
128
|
-
|
129
|
+
try:
|
130
|
+
h5f = h5py.File(full_path, mode, **file_kwargs)
|
131
|
+
except (OSError, FileExistsError) as oe:
|
132
|
+
raise LH5DecodeError(oe, full_path) from oe
|
129
133
|
|
130
134
|
if self.keep_open:
|
131
135
|
if isinstance(self.keep_open, int) and len(self.files) >= self.keep_open:
|
lgdo/lh5/tools.py
CHANGED
@@ -8,6 +8,7 @@ from pathlib import Path
|
|
8
8
|
import h5py
|
9
9
|
|
10
10
|
from . import utils
|
11
|
+
from .exceptions import LH5DecodeError
|
11
12
|
from .store import LH5Store
|
12
13
|
|
13
14
|
log = logging.getLogger(__name__)
|
@@ -123,7 +124,10 @@ def show(
|
|
123
124
|
|
124
125
|
# open file
|
125
126
|
if isinstance(lh5_file, (str, Path)):
|
126
|
-
|
127
|
+
try:
|
128
|
+
lh5_file = h5py.File(utils.expand_path(Path(lh5_file)), "r", locking=False)
|
129
|
+
except (OSError, FileExistsError) as oe:
|
130
|
+
raise LH5DecodeError(oe, lh5_file) from oe
|
127
131
|
|
128
132
|
# go to group
|
129
133
|
if lh5_group != "/":
|
lgdo/lh5/utils.py
CHANGED
@@ -45,7 +45,10 @@ def read_n_rows(name: str, h5f: str | Path | h5py.File) -> int | None:
|
|
45
45
|
Return ``None`` if `name` is a :class:`.Scalar` or a :class:`.Struct`.
|
46
46
|
"""
|
47
47
|
if not isinstance(h5f, h5py.File):
|
48
|
-
|
48
|
+
try:
|
49
|
+
h5f = h5py.File(h5f, "r", locking=False)
|
50
|
+
except (OSError, FileExistsError) as oe:
|
51
|
+
raise LH5DecodeError(oe, h5f, None) from oe
|
49
52
|
|
50
53
|
try:
|
51
54
|
h5o = h5f[name].id
|
@@ -61,7 +64,10 @@ def read_size_in_bytes(name: str, h5f: str | Path | h5py.File) -> int | None:
|
|
61
64
|
recursively through members of a Struct or Table
|
62
65
|
"""
|
63
66
|
if not isinstance(h5f, h5py.File):
|
64
|
-
|
67
|
+
try:
|
68
|
+
h5f = h5py.File(h5f, "r", locking=False)
|
69
|
+
except (OSError, FileExistsError) as oe:
|
70
|
+
raise LH5DecodeError(oe, h5f) from oe
|
65
71
|
|
66
72
|
try:
|
67
73
|
h5o = h5f[name].id
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|