legend-pydataobj 1.11.6__py3-none-any.whl → 1.11.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: legend_pydataobj
3
- Version: 1.11.6
3
+ Version: 1.11.8
4
4
  Summary: LEGEND Python Data Objects
5
5
  Author: The LEGEND Collaboration
6
6
  Maintainer: The LEGEND Collaboration
@@ -726,6 +726,7 @@ Requires-Dist: pylegendtestdata; extra == "test"
726
726
  Requires-Dist: pytest>=6.0; extra == "test"
727
727
  Requires-Dist: pytest-cov; extra == "test"
728
728
  Requires-Dist: dbetto; extra == "test"
729
+ Dynamic: license-file
729
730
 
730
731
  # legend-pydataobj
731
732
 
@@ -1,8 +1,8 @@
1
- lgdo/__init__.py,sha256=QMYK9HhoMi0pbahPN8mPD18gyTxscFgo7QKfCxVhy-0,3196
2
- lgdo/_version.py,sha256=Uoj5vQkNSraBmH8UOZgP0kMxvcHSlKlg1J7QJpv8_BM,413
3
- lgdo/cli.py,sha256=Qm2EPmoIVxENAR8BeW7oWpTdHT4GbV-owfzM5NkgjvM,9353
1
+ legend_pydataobj-1.11.8.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
2
+ lgdo/__init__.py,sha256=fkRv79kdtBasw31gPVK9SdLQ2vEEajTV2t3UPDvFg9o,3206
3
+ lgdo/_version.py,sha256=L3yqa8Aseny4agSopGdx4D8Sk3htDn12nBlc9W8wWes,513
4
+ lgdo/cli.py,sha256=s_EWTBWW76l7zWb6gaTSTjiT-0RzzcYEmjeFEQCVxfk,4647
4
5
  lgdo/lgdo_utils.py,sha256=6a2YWEwpyEMXlAyTHZMO01aqxy6SxJzPZkGNWKNWuS0,2567
5
- lgdo/lh5_store.py,sha256=5BzbJA9sLcqjp8bJDc2olwOiw0VS6rmfg3cfh1kQkRY,8512
6
6
  lgdo/logging.py,sha256=82wIOj7l7xr3WYyeHdpSXbbjzHJsy-uRyKYUYx2vMfQ,1003
7
7
  lgdo/units.py,sha256=VQYME86_ev9S7Fq8RyCOQNqYr29MphTTYemmEouZafk,161
8
8
  lgdo/utils.py,sha256=WRTmXnaQ-h2hVxwJ27qiOigdsD3DHcaDrdDjvupCuZU,3940
@@ -12,44 +12,44 @@ lgdo/compression/generic.py,sha256=tF3UhLJbUDcovLxpIzgQRxFSjZ5Fz3uDRy9kI4mFntQ,2
12
12
  lgdo/compression/radware.py,sha256=GcNTtjuyL7VBBqziUBmSqNXuhqy1bJJgvcyvyumPtrc,23839
13
13
  lgdo/compression/utils.py,sha256=W2RkBrxPpXlat84dnU9Ad7d_tTws0irtGl7O1dNWjnk,1140
14
14
  lgdo/compression/varlen.py,sha256=6ZZUItyoOfygDdE0DyoISeFZfqdbH6xl7T0eclfarzg,15127
15
- lgdo/lh5/__init__.py,sha256=FflSA1LF3RTom1YvlGmTleJcl1ytxwez8B_Hn_o10wU,811
16
- lgdo/lh5/core.py,sha256=__-A6Abctzfwfo4-xJi68xs2e4vfzONEQTJVrUCOw-I,13922
15
+ lgdo/lh5/__init__.py,sha256=UTzKGmpgFoHwVB_yNULvJsHD_uQQGl-R87l-3QBkh7w,773
16
+ lgdo/lh5/concat.py,sha256=BZCgK7TWPKK8fMmha8K83d3bC31FVO1b5LOW7x-Ru1s,6186
17
+ lgdo/lh5/core.py,sha256=3o6JsX6aNkMa3plX96a4vG7LWmfco33OuUzV_mMFStQ,13626
17
18
  lgdo/lh5/datatype.py,sha256=O_7BqOlX8PFMyG0ppkfUT5aps5HEqX0bpuKcJO3jhu0,1691
18
19
  lgdo/lh5/exceptions.py,sha256=3kj8avXl4eBGvebl3LG12gJEmw91W0T8PYR0AfvUAyM,1211
19
- lgdo/lh5/iterator.py,sha256=ZaBBnmuNIjinwO0JUY55wLxX8Om9rVRRzXBC5uHmSKM,19772
20
- lgdo/lh5/store.py,sha256=3wAaQDd1Zmo0_bQ9DbB-FbKS4Uy_Tb642qKHXtZpSw4,10643
21
- lgdo/lh5/tools.py,sha256=T9CgHA8A3_tVBMtiNJ6hATQKhdqI61m3cX4p2wGKc6c,9937
22
- lgdo/lh5/utils.py,sha256=ioz8DlyXZsejwnU2qYdIccdHcF12H62jgLkZsiDOLSM,6243
20
+ lgdo/lh5/iterator.py,sha256=1ob9B7Bf3ioGCtZkUZoL6ibTxAwLf4ld8_33ghVVEa4,20498
21
+ lgdo/lh5/store.py,sha256=qkBm3gPbr1R2UlQpUuDR5sGRMzpYJBWFL8fDIry6tmQ,8474
22
+ lgdo/lh5/tools.py,sha256=drtJWHY82wCFuFr6LVVnm2AQgs_wZuFmAvyOB4tcOHs,6431
23
+ lgdo/lh5/utils.py,sha256=f2H7H1D-RfDN3g_YrVDQEPaHevn5yDJFA-uznK9cgx8,6336
23
24
  lgdo/lh5/_serializers/__init__.py,sha256=eZzxMp1SeZWG0PkEXUiCz3XyprQ8EmelHUmJogC8xYE,1263
24
25
  lgdo/lh5/_serializers/read/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
26
  lgdo/lh5/_serializers/read/array.py,sha256=uWfMCihfAmW2DE2ewip2qCK_kvQC_mb2zvOv26uzijc,1000
26
- lgdo/lh5/_serializers/read/composite.py,sha256=yTm5dfTgkIL7eG9iZXxhdiRhG04cQLd_hybP4wmxCJE,11809
27
+ lgdo/lh5/_serializers/read/composite.py,sha256=UvkZHEhf0V7SFLxzF52eyP68hU0guGOLqosrfmIfeys,11729
27
28
  lgdo/lh5/_serializers/read/encoded.py,sha256=Q98c08d8LkZq2AlY4rThYECVaEqwbv4T2Urn7TGnsyE,4130
28
29
  lgdo/lh5/_serializers/read/ndarray.py,sha256=lFCXD6bSzmMOH7cVmvRYXakkfMCI8EoqTPNONRJ1F0s,3690
29
30
  lgdo/lh5/_serializers/read/scalar.py,sha256=kwhWm1T91pXf86CqtUUD8_qheSR92gXZrQVtssV5YCg,922
30
- lgdo/lh5/_serializers/read/utils.py,sha256=USacxDA0eY-u9lDOZDuJHcScoSVMNeAYljmRvW0T1Jk,7587
31
- lgdo/lh5/_serializers/read/vector_of_vectors.py,sha256=Fqh2gXFqeR2y0ofQn4GoSdSAATPvHiuBzzcgL16e6ss,7205
31
+ lgdo/lh5/_serializers/read/utils.py,sha256=YfSqPO-83A1XvhhuULxQ0Qz2A5ODa3sb7ApNxQVJXd0,7581
32
+ lgdo/lh5/_serializers/read/vector_of_vectors.py,sha256=765P8mElGArAaEPkHTAUXFQ47t1_3-3BQAete0LckBQ,7207
32
33
  lgdo/lh5/_serializers/write/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
34
  lgdo/lh5/_serializers/write/array.py,sha256=66DKnW2yqIBlUGNBPWcE-m4W0B2-nTKusDHGX9m6GY0,3223
34
- lgdo/lh5/_serializers/write/composite.py,sha256=I6lH0nWFIpAfZyG4-0rLxzg3mfazZ_FEhQVp1FZ0aA4,9254
35
+ lgdo/lh5/_serializers/write/composite.py,sha256=wiq9O3Cb08wrAm14L7Jz69ppL7SnYEDHgW6pJtY8aBI,9425
35
36
  lgdo/lh5/_serializers/write/scalar.py,sha256=JPt_fcdTKOSFp5hfJdcKIfK4hxhcD8vhOlvDF-7btQ8,763
36
37
  lgdo/lh5/_serializers/write/vector_of_vectors.py,sha256=puGQX9XF5P_5DVbm_Cc6TvPrsDywgBLSYtkqFNltbB4,3493
37
38
  lgdo/types/__init__.py,sha256=DNfOErPiAZg-7Gygkp6ZKAi20Yrm1mfderZHvKo1Y4s,821
38
- lgdo/types/array.py,sha256=sUxh1CNCaefrnybt5qdjmmMpVQa_RqFxUv1tJ_pyBbc,6537
39
+ lgdo/types/array.py,sha256=vxViJScqKw4zGUrrIOuuU_9Y0oTfOkEEhs0TOyUYjwI,9284
39
40
  lgdo/types/arrayofequalsizedarrays.py,sha256=DOGJiTmc1QCdm7vLbE6uIRXoMPtt8uuCfmwQawgWf5s,4949
40
- lgdo/types/encoded.py,sha256=JW4U5ow7KLMzhKnmhdnxbC3SZJAs4bOEDZWKG4KY1uU,15293
41
+ lgdo/types/encoded.py,sha256=_e8u_BPfpjJbLnEdyTo9QG3kbNsGj0BN4gjdj3L1ndw,15640
41
42
  lgdo/types/fixedsizearray.py,sha256=7RjUwTz1bW0pcrdy27JlfrXPAuOU89Kj7pOuSUCojK8,1527
42
- lgdo/types/histogram.py,sha256=y6j2VDuGYYnLy7WI4J90ApS0PAwic4kCpouZPX09Nus,19974
43
- lgdo/types/lgdo.py,sha256=RQ2P70N7IWMBDnLLuJI3sm6zQTIKyOMSsKZtBNzmE90,2928
43
+ lgdo/types/histogram.py,sha256=Jz1lLH56BfYnmcUhxUHK1h2wLDQ0Abgyd-6LznU-3-k,19979
44
+ lgdo/types/lgdo.py,sha256=21YNtJCHnSO3M60rjsAdbMO5crDjL_0BtuFpudZ2xvU,4500
44
45
  lgdo/types/scalar.py,sha256=c5Es2vyDqyWTPV6mujzfIzMpC1jNWkEIcvYyWQUxH3Q,1933
45
46
  lgdo/types/struct.py,sha256=Q0OWLVd4B0ciLb8t6VsxU3MPbmGLZ7WfQNno1lSQS0Q,4918
46
- lgdo/types/table.py,sha256=VIHQOPXJHJgiCjMMb_p7EdbcCqLFSObHMdHSxC1Dm5Y,19212
47
- lgdo/types/vectorofvectors.py,sha256=Al9FmY44M-vnzhPdQlFOzwm06LNGBI7RSLSdU5pl9us,24663
48
- lgdo/types/vovutils.py,sha256=7BWPP0BSj-92ifbCIUBcfqxG5-TS8uxujTyJJuDFI04,10302
49
- lgdo/types/waveformtable.py,sha256=f2tS4f1OEoYaTM5ldCX9zmw8iSISCT3t3wS1SrPdu_o,9901
50
- legend_pydataobj-1.11.6.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
51
- legend_pydataobj-1.11.6.dist-info/METADATA,sha256=tZZ9ocWZZR9ECF5Hg8DcQCL6uzCthV8L1ApxIbOu6UY,44421
52
- legend_pydataobj-1.11.6.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
53
- legend_pydataobj-1.11.6.dist-info/entry_points.txt,sha256=Uu5MTlppBZxB4QGlLv-oX8FqACWjAZDNii__TBDJwLQ,72
54
- legend_pydataobj-1.11.6.dist-info/top_level.txt,sha256=KyR-EUloqiXcQ62IWnzBmtInDtvsHl4q2ZJAZgTcLXE,5
55
- legend_pydataobj-1.11.6.dist-info/RECORD,,
47
+ lgdo/types/table.py,sha256=FkWesoEA9bmGGSW8Ewig1Zs77ffUoR_nggfYSmkWpjU,20079
48
+ lgdo/types/vectorofvectors.py,sha256=GbAKV_ehXN4XdWSwnmKS_ErCiudRetcH_3wo7iDrVjw,26854
49
+ lgdo/types/vovutils.py,sha256=LW3ZcwECxVYxxcFadAtY3nnK-9-rk8Xbg_m8hY30lo4,10708
50
+ lgdo/types/waveformtable.py,sha256=9S_NMg894NZTGt2pLuskwH4-zQ5EbLnzWI6FVui6fXE,9827
51
+ legend_pydataobj-1.11.8.dist-info/METADATA,sha256=qFXYWIsv8umqbHxK7ltDFwbPU_z5STG6E5CUAxZk6EY,44443
52
+ legend_pydataobj-1.11.8.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
53
+ legend_pydataobj-1.11.8.dist-info/entry_points.txt,sha256=0KWfnwbuwhNn0vPUqARukjp04Ca6lzfZBSirouRmk7I,76
54
+ legend_pydataobj-1.11.8.dist-info/top_level.txt,sha256=KyR-EUloqiXcQ62IWnzBmtInDtvsHl4q2ZJAZgTcLXE,5
55
+ legend_pydataobj-1.11.8.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,3 +1,3 @@
1
1
  [console_scripts]
2
- lh5concat = lgdo.cli:lh5concat
2
+ lh5concat = lgdo.cli:lh5concat_cli
3
3
  lh5ls = lgdo.cli:lh5ls
lgdo/__init__.py CHANGED
@@ -45,7 +45,7 @@ browsed easily in python like any `HDF5 <https://www.hdfgroup.org>`_ file using
45
45
  from __future__ import annotations
46
46
 
47
47
  from ._version import version as __version__
48
- from .lh5_store import LH5Iterator, LH5Store, load_dfs, load_nda, ls, show
48
+ from .lh5 import LH5Iterator, ls, read, read_as, read_n_rows, show, write
49
49
  from .types import (
50
50
  LGDO,
51
51
  Array,
@@ -69,7 +69,6 @@ __all__ = [
69
69
  "FixedSizeArray",
70
70
  "Histogram",
71
71
  "LH5Iterator",
72
- "LH5Store",
73
72
  "Scalar",
74
73
  "Struct",
75
74
  "Table",
@@ -77,8 +76,10 @@ __all__ = [
77
76
  "VectorOfVectors",
78
77
  "WaveformTable",
79
78
  "__version__",
80
- "load_dfs",
81
- "load_nda",
82
79
  "ls",
80
+ "read",
81
+ "read_as",
82
+ "read_n_rows",
83
83
  "show",
84
+ "write",
84
85
  ]
lgdo/_version.py CHANGED
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '1.11.6'
16
- __version_tuple__ = version_tuple = (1, 11, 6)
20
+ __version__ = version = '1.11.8'
21
+ __version_tuple__ = version_tuple = (1, 11, 8)
lgdo/cli.py CHANGED
@@ -3,12 +3,12 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import argparse
6
- import fnmatch
7
6
  import logging
8
7
  import sys
9
8
 
10
- from . import Array, Scalar, Struct, Table, VectorOfVectors, __version__, lh5
9
+ from . import __version__, lh5
11
10
  from . import logging as lgdogging # eheheh
11
+ from .lh5.concat import lh5concat
12
12
 
13
13
  log = logging.getLogger(__name__)
14
14
 
@@ -80,7 +80,7 @@ def lh5ls(args=None):
80
80
  )
81
81
 
82
82
 
83
- def lh5concat(args=None):
83
+ def lh5concat_cli(args=None):
84
84
  """Command line interface for concatenating array-like LGDOs in LH5 files."""
85
85
  parser = argparse.ArgumentParser(
86
86
  prog="lh5concat",
@@ -174,155 +174,10 @@ Exclude the /data/table1/col1 Table column:
174
174
  print(__version__) # noqa: T201
175
175
  sys.exit()
176
176
 
177
- if len(args.lh5_file) < 2:
178
- msg = "you must provide at least two input files"
179
- raise RuntimeError(msg)
180
-
181
- # determine list of objects by recursively ls'ing first file
182
- file0 = args.lh5_file[0]
183
- obj_list_full = set(lh5.ls(file0, recursive=True))
184
-
185
- # let's remove objects with nested LGDOs inside
186
- to_remove = set()
187
- for name in obj_list_full:
188
- if len(fnmatch.filter(obj_list_full, f"{name}/*")) > 1:
189
- to_remove.add(name)
190
- obj_list_full -= to_remove
191
-
192
- obj_list = set()
193
- # now first remove excluded stuff
194
- if args.exclude is not None:
195
- for exc in args.exclude:
196
- obj_list_full -= set(fnmatch.filter(obj_list_full, exc.strip("/")))
197
-
198
- # then make list of included, based on latest list
199
- if args.include is not None:
200
- for inc in args.include:
201
- obj_list |= set(fnmatch.filter(obj_list_full, inc.strip("/")))
202
- else:
203
- obj_list = obj_list_full
204
-
205
- # sort
206
- obj_list = sorted(obj_list)
207
-
208
- msg = f"objects matching include patterns {args.include} in {file0}: {obj_list}"
209
- log.debug(msg)
210
-
211
- # 1. read first valid lgdo from left to right
212
- store = lh5.LH5Store()
213
- h5f0 = store.gimme_file(file0)
214
- lgdos = {}
215
- lgdo_structs = {}
216
- # loop over object list in the first file
217
- for name in obj_list:
218
- # now loop over groups starting from root
219
- current = ""
220
- for item in name.split("/"):
221
- current = f"{current}/{item}".strip("/")
222
-
223
- if current in lgdos:
224
- break
225
-
226
- # not even an LGDO (i.e. a plain HDF5 group)!
227
- if "datatype" not in h5f0[current].attrs:
228
- continue
229
-
230
- # read as little as possible
231
- obj, _ = store.read(current, h5f0, n_rows=1)
232
- if isinstance(obj, (Table, Array, VectorOfVectors)):
233
- # read all!
234
- obj, _ = store.read(current, h5f0)
235
- lgdos[current] = obj
236
- elif isinstance(obj, Struct):
237
- # structs might be used in a "group-like" fashion (i.e. they might only
238
- # contain array-like objects).
239
- # note: handle after handling tables, as tables also satisfy this check.
240
- lgdo_structs[current] = obj.attrs["datatype"]
241
- continue
242
- elif isinstance(obj, Scalar):
243
- msg = f"cannot concat scalar field {current}"
244
- log.warning(msg)
245
-
246
- break
247
-
248
- msg = f"first-level, array-like objects: {lgdos.keys()}"
249
- log.debug(msg)
250
- msg = f"nested structs: {lgdo_structs.keys()}"
251
- log.debug(msg)
252
-
253
- h5f0.close()
254
-
255
- if lgdos == {}:
256
- msg = "did not find any field to concatenate, exit"
257
- log.error(msg)
258
- return
259
-
260
- # 2. remove (nested) table fields based on obj_list
261
-
262
- def _inplace_table_filter(name, table, obj_list):
263
- # filter objects nested in this LGDO
264
- skm = fnmatch.filter(obj_list, f"{name}/*")
265
- kept = {it.removeprefix(name).strip("/").split("/")[0] for it in skm}
266
-
267
- # now remove fields
268
- for k in list(table.keys()):
269
- if k not in kept:
270
- table.remove_column(k)
271
-
272
- msg = f"fields left in table '{name}': {table.keys()}"
273
- log.debug(msg)
274
-
275
- # recurse!
276
- for k2, v2 in table.items():
277
- if not isinstance(v2, Table):
278
- continue
279
-
280
- _inplace_table_filter(f"{name}/{k2}", v2, obj_list)
281
-
282
- for key, val in lgdos.items():
283
- if not isinstance(val, Table):
284
- continue
285
-
286
- _inplace_table_filter(key, val, obj_list)
287
-
288
- # 3. write to output file
289
- msg = f"creating output file {args.output}"
290
- log.info(msg)
291
-
292
- first_done = False
293
- for name, obj in lgdos.items():
294
- store.write(
295
- obj,
296
- name,
297
- args.output,
298
- wo_mode="overwrite_file"
299
- if (args.overwrite and not first_done)
300
- else "write_safe",
301
- )
302
-
303
- first_done = True
304
-
305
- # 4. loop over rest of files/names and write-append
306
-
307
- for file in args.lh5_file[1:]:
308
- msg = f"appending file {file} to {args.output}"
309
- log.info(msg)
310
-
311
- for name in lgdos:
312
- obj, _ = store.read(name, file)
313
- # need to remove nested LGDOs from obj too before appending
314
- if isinstance(obj, Table):
315
- _inplace_table_filter(name, obj, obj_list)
316
-
317
- store.write(obj, name, args.output, wo_mode="append")
318
-
319
- # 5. reset datatypes of the "group-like" structs
320
-
321
- if lgdo_structs != {}:
322
- output_file = store.gimme_file(args.output, mode="a")
323
- for struct, struct_dtype in lgdo_structs.items():
324
- msg = f"reset datatype of struct {struct} to {struct_dtype}"
325
- log.debug(msg)
326
-
327
- output_file[struct].attrs["datatype"] = struct_dtype
328
- output_file.close()
177
+ lh5concat(
178
+ lh5_files=args.lh5_file,
179
+ overwrite=args.overwrite,
180
+ output=args.output,
181
+ include_list=args.include,
182
+ exclude_list=args.exclude,
183
+ )
lgdo/lh5/__init__.py CHANGED
@@ -11,15 +11,14 @@ from ._serializers.write.array import DEFAULT_HDF5_SETTINGS
11
11
  from .core import read, read_as, write
12
12
  from .iterator import LH5Iterator
13
13
  from .store import LH5Store
14
- from .tools import load_dfs, load_nda, ls, show
14
+ from .tools import ls, show
15
15
  from .utils import read_n_rows
16
16
 
17
17
  __all__ = [
18
18
  "DEFAULT_HDF5_SETTINGS",
19
19
  "LH5Iterator",
20
20
  "LH5Store",
21
- "load_dfs",
22
- "load_nda",
21
+ "concat",
23
22
  "ls",
24
23
  "read",
25
24
  "read_as",
@@ -353,15 +353,13 @@ def _h5_read_table(
353
353
  table = Table(col_dict=col_dict, attrs=attrs)
354
354
 
355
355
  # set (write) loc to end of tree
356
- table.loc = n_rows_read
356
+ table.resize(do_warn=True)
357
357
  return table, n_rows_read
358
358
 
359
359
  # We have read all fields into the object buffer. Run
360
360
  # checks: All columns should be the same size. So update
361
361
  # table's size as necessary, warn if any mismatches are found
362
362
  obj_buf.resize(do_warn=True)
363
- # set (write) loc to end of tree
364
- obj_buf.loc = obj_buf_start + n_rows_read
365
363
 
366
364
  # check attributes
367
365
  utils.check_obj_buf_attrs(obj_buf.attrs, attrs, fname, oname)
@@ -34,7 +34,7 @@ def build_field_mask(field_mask: Mapping[str, bool] | Collection[str]) -> defaul
34
34
  default = not field_mask[next(iter(field_mask.keys()))]
35
35
  return defaultdict(lambda: default, field_mask)
36
36
  if isinstance(field_mask, (list, tuple, set)):
37
- return defaultdict(bool, {field: True for field in field_mask})
37
+ return defaultdict(bool, dict.fromkeys(field_mask, True))
38
38
  if isinstance(field_mask, defaultdict):
39
39
  return field_mask
40
40
  msg = "bad field_mask type"
@@ -123,7 +123,7 @@ def _h5_read_vector_of_vectors(
123
123
  )
124
124
  msg = (
125
125
  f"cumulative_length non-increasing between entries "
126
- f"{start_row} and {start_row+n_rows_read}"
126
+ f"{start_row} and {start_row + n_rows_read}"
127
127
  )
128
128
  raise LH5DecodeError(msg, fname, oname)
129
129
 
@@ -1,8 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- import os
5
4
  from inspect import signature
5
+ from pathlib import Path
6
6
 
7
7
  import h5py
8
8
 
@@ -53,7 +53,7 @@ def _h5_write_lgdo(
53
53
  # change any object in the file. So we use file:append for
54
54
  # write_object:overwrite.
55
55
  if not isinstance(lh5_file, h5py.File):
56
- mode = "w" if wo_mode == "of" or not os.path.exists(lh5_file) else "a"
56
+ mode = "w" if wo_mode == "of" or not Path(lh5_file).exists() else "a"
57
57
  lh5_file = h5py.File(lh5_file, mode=mode, **file_kwargs)
58
58
 
59
59
  log.debug(
@@ -186,19 +186,20 @@ def _h5_write_struct(
186
186
  write_start=0,
187
187
  **h5py_kwargs,
188
188
  ):
189
+ # this works for structs and derived (tables)
189
190
  assert isinstance(obj, types.Struct)
190
191
 
191
192
  # In order to append a column, we need to update the
192
- # `table{old_fields}` value in `group.attrs['datatype"]` to include
193
+ # `struct/table{old_fields}` value in `group.attrs['datatype"]` to include
193
194
  # the new fields. One way to do this is to override
194
195
  # `obj.attrs["datatype"]` to include old and new fields. Then we
195
- # can write the fields to the table as normal.
196
+ # can write the fields to the struct/table as normal.
196
197
  if wo_mode == "ac":
197
198
  old_group = utils.get_h5_group(name, group)
198
199
  lgdotype = datatype.datatype(old_group.attrs["datatype"])
199
200
  fields = datatype.get_struct_fields(old_group.attrs["datatype"])
200
- if not issubclass(lgdotype, types.Struct):
201
- msg = f"Trying to append columns to an object of type {lgdotype.__name__}"
201
+ if lgdotype is not type(obj):
202
+ msg = f"Trying to append columns to an object of different type {lgdotype.__name__}!={type(obj)}"
202
203
  raise LH5EncodeError(msg, lh5_file, group, name)
203
204
 
204
205
  # If the mode is `append_column`, make sure we aren't appending
@@ -211,8 +212,12 @@ def _h5_write_struct(
211
212
  "column(s) to a table with the same field(s)"
212
213
  )
213
214
  raise LH5EncodeError(msg, lh5_file, group, name)
215
+
214
216
  # It doesn't matter what key we access, as all fields in the old table have the same size
215
- if old_group[next(iter(old_group.keys()))].size != obj.size:
217
+ if (
218
+ isinstance(obj, types.Table)
219
+ and old_group[next(iter(old_group.keys()))].size != obj.size
220
+ ):
216
221
  msg = (
217
222
  f"Table sizes don't match. Trying to append column of size {obj.size} "
218
223
  f"to a table of size {old_group[next(iter(old_group.keys()))].size}."
@@ -222,7 +227,8 @@ def _h5_write_struct(
222
227
  # Now we can append the obj.keys() to the old fields, and then update obj.attrs.
223
228
  fields.extend(list(obj.keys()))
224
229
  obj.attrs.pop("datatype")
225
- obj.attrs["datatype"] = "table" + "{" + ",".join(fields) + "}"
230
+
231
+ obj.attrs["datatype"] = obj.datatype_name() + "{" + ",".join(fields) + "}"
226
232
 
227
233
  group = utils.get_h5_group(
228
234
  name,