legend-daq2lh5 1.0.2__tar.gz → 1.2.0a1__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (71) hide show
  1. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/PKG-INFO +16 -3
  2. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/README.md +13 -0
  3. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/setup.cfg +2 -2
  4. legend_daq2lh5-1.2.0a1/src/daq2lh5/_version.py +16 -0
  5. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/buffer_processor/buffer_processor.py +23 -5
  6. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/buffer_processor/lh5_buffer_processor.py +10 -14
  7. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/build_raw.py +11 -9
  8. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/data_decoder.py +31 -33
  9. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/fc/fc_config_decoder.py +1 -1
  10. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_digitizers.py +2 -2
  11. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_flashcam.py +1 -0
  12. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_packet.py +14 -7
  13. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_streamer.py +126 -14
  14. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/raw_buffer.py +8 -10
  15. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/PKG-INFO +17 -4
  16. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/SOURCES.txt +1 -0
  17. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/requires.txt +2 -2
  18. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/buffer_processor/test_buffer_processor.py +138 -180
  19. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/buffer_processor/test_lh5_buffer_processor.py +160 -139
  20. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/compass/test_compass_header_decoder.py +24 -32
  21. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/conftest.py +1 -1
  22. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/fc/test_fc_event_decoder.py +1 -1
  23. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/orca/conftest.py +1 -1
  24. legend_daq2lh5-1.2.0a1/tests/orca/test_orca_fc.py +40 -0
  25. legend_daq2lh5-1.2.0a1/tests/orca/test_orca_packet.py +41 -0
  26. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/test_build_raw.py +97 -12
  27. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/test_daq_to_raw.py +5 -5
  28. legend_daq2lh5-1.0.2/src/daq2lh5/_version.py +0 -8
  29. legend_daq2lh5-1.0.2/tests/orca/test_orca_packet.py +0 -2
  30. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/LICENSE +0 -0
  31. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/pyproject.toml +0 -0
  32. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/setup.py +0 -0
  33. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/__init__.py +0 -0
  34. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/buffer_processor/__init__.py +0 -0
  35. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/cli.py +0 -0
  36. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/compass/__init__.py +0 -0
  37. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/compass/compass_config_parser.py +0 -0
  38. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/compass/compass_event_decoder.py +0 -0
  39. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/compass/compass_header_decoder.py +0 -0
  40. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/compass/compass_streamer.py +0 -0
  41. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/data_streamer.py +0 -0
  42. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/fc/__init__.py +0 -0
  43. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/fc/fc_event_decoder.py +0 -0
  44. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/fc/fc_status_decoder.py +0 -0
  45. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/fc/fc_streamer.py +0 -0
  46. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/logging.py +0 -0
  47. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/__init__.py +0 -0
  48. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_base.py +0 -0
  49. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_header.py +0 -0
  50. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_header_decoder.py +0 -0
  51. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/daq2lh5/orca/orca_run_decoder.py +0 -0
  52. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/dependency_links.txt +0 -0
  53. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/entry_points.txt +0 -0
  54. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/not-zip-safe +0 -0
  55. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/src/legend_daq2lh5.egg-info/top_level.txt +0 -0
  56. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/buffer_processor/test_buffer_processor_configs/buffer_processor_config.json +0 -0
  57. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/buffer_processor/test_buffer_processor_configs/lh5_buffer_processor_config.json +0 -0
  58. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/buffer_processor/test_buffer_processor_configs/raw_out_spec_no_proc.json +0 -0
  59. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/compass/conftest.py +0 -0
  60. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/compass/test_compass_event_decoder.py +0 -0
  61. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/compass/test_compass_streamer.py +0 -0
  62. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/configs/fc-out-spec.json +0 -0
  63. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/configs/orca-out-spec-cli.json +0 -0
  64. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/configs/orca-out-spec.json +0 -0
  65. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/fc/conftest.py +0 -0
  66. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/fc/test_fc_config_decoder.py +0 -0
  67. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/fc/test_fc_status_decoder.py +0 -0
  68. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/fc/test_fc_streamer.py +0 -0
  69. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/orca/test_or_run_decoder_for_run.py +0 -0
  70. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/test_cli.py +0 -0
  71. {legend_daq2lh5-1.0.2 → legend_daq2lh5-1.2.0a1}/tests/test_raw_buffer.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: legend_daq2lh5
3
- Version: 1.0.2
3
+ Version: 1.2.0a1
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -26,10 +26,10 @@ Classifier: Topic :: Software Development
26
26
  Requires-Python: >=3.9
27
27
  Description-Content-Type: text/markdown
28
28
  License-File: LICENSE
29
- Requires-Dist: dspeed~=1.1
29
+ Requires-Dist: dspeed>=1.3.0a4
30
30
  Requires-Dist: h5py>=3.2.0
31
31
  Requires-Dist: hdf5plugin
32
- Requires-Dist: legend-pydataobj~=1.1
32
+ Requires-Dist: legend-pydataobj>=1.5.0a1
33
33
  Requires-Dist: numpy>=1.21
34
34
  Requires-Dist: pyfcutils
35
35
  Requires-Dist: tqdm>=4.27
@@ -60,3 +60,16 @@ Requires-Dist: pytest-cov; extra == "test"
60
60
  ![GitHub pull requests](https://img.shields.io/github/issues-pr/legend-exp/legend-daq2lh5?logo=github)
61
61
  ![License](https://img.shields.io/github/license/legend-exp/legend-daq2lh5)
62
62
  [![Read the Docs](https://img.shields.io/readthedocs/legend-daq2lh5?logo=readthedocs)](https://legend-daq2lh5.readthedocs.io)
63
+
64
+ JSON-configurable conversion of digitized data into
65
+ [LEGEND HDF5](https://legend-exp.github.io/legend-data-format-specs/dev/hdf5/),
66
+ with optional data pre-processing via [dspeed](https://dspeed.readthedocs.io)
67
+ and data compression via [legend-pydataobj](https://legend-pydataobj.readthedocs.io).
68
+
69
+ Currently supported DAQ data formats:
70
+ * [FlashCam](https://www.mizzi-computer.de/home)
71
+ * [CoMPASS](https://www.caen.it/products/compass)
72
+ * [ORCA](https://github.com/unc-enap/Orca), reading out:
73
+ - FlashCam
74
+ - [Struck SIS3302](https://www.struck.de/sis3302.htm)
75
+ - [Struck SIS3316](https://www.struck.de/sis3316.html)
@@ -10,3 +10,16 @@
10
10
  ![GitHub pull requests](https://img.shields.io/github/issues-pr/legend-exp/legend-daq2lh5?logo=github)
11
11
  ![License](https://img.shields.io/github/license/legend-exp/legend-daq2lh5)
12
12
  [![Read the Docs](https://img.shields.io/readthedocs/legend-daq2lh5?logo=readthedocs)](https://legend-daq2lh5.readthedocs.io)
13
+
14
+ JSON-configurable conversion of digitized data into
15
+ [LEGEND HDF5](https://legend-exp.github.io/legend-data-format-specs/dev/hdf5/),
16
+ with optional data pre-processing via [dspeed](https://dspeed.readthedocs.io)
17
+ and data compression via [legend-pydataobj](https://legend-pydataobj.readthedocs.io).
18
+
19
+ Currently supported DAQ data formats:
20
+ * [FlashCam](https://www.mizzi-computer.de/home)
21
+ * [CoMPASS](https://www.caen.it/products/compass)
22
+ * [ORCA](https://github.com/unc-enap/Orca), reading out:
23
+ - FlashCam
24
+ - [Struck SIS3302](https://www.struck.de/sis3302.htm)
25
+ - [Struck SIS3316](https://www.struck.de/sis3316.html)
@@ -31,10 +31,10 @@ classifiers =
31
31
  [options]
32
32
  packages = find:
33
33
  install_requires =
34
- dspeed~=1.1
34
+ dspeed>=1.3.0a4
35
35
  h5py>=3.2.0
36
36
  hdf5plugin
37
- legend-pydataobj~=1.1
37
+ legend-pydataobj>=1.5.0a1
38
38
  numpy>=1.21
39
39
  pyfcutils
40
40
  tqdm>=4.27
@@ -0,0 +1,16 @@
1
+ # file generated by setuptools_scm
2
+ # don't change, don't track in version control
3
+ TYPE_CHECKING = False
4
+ if TYPE_CHECKING:
5
+ from typing import Tuple, Union
6
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
7
+ else:
8
+ VERSION_TUPLE = object
9
+
10
+ version: str
11
+ __version__: str
12
+ __version_tuple__: VERSION_TUPLE
13
+ version_tuple: VERSION_TUPLE
14
+
15
+ __version__ = version = '1.2.0a1'
16
+ __version_tuple__ = version_tuple = (1, 2, 0)
@@ -48,10 +48,17 @@ def buffer_processor(rb: RawBuffer) -> Table:
48
48
  ``"dtype_conv": {"lgdo": "dtype" [, ...]}`` `(dict)`
49
49
  Casts `lgdo` to the requested data type.
50
50
 
51
- ``"compression": { "lgdo": "codec_name" [, ...]}`` `(dict)`
51
+ ``"compression": {"lgdo": "codec_name" [, ...]}`` `(dict)`
52
52
  Updates the `compression` attribute of `lgdo` to `codec_name`. The
53
53
  attribute sets the compression algorithm applied by
54
- :func:`~.lgdo.lh5_store.LH5Store.read_object` before writing `lgdo` to
54
+ :func:`~lgdo.lh5.store.LH5Store.read` before writing `lgdo` to
55
+ disk. Can be used to apply custom waveform compression algorithms from
56
+ :mod:`lgdo.compression`.
57
+
58
+ ``"hdf5_settings": {"lgdo": { <HDF5 settings> }}`` `(dict)`
59
+ Updates the `hdf5_settings` attribute of `lgdo`. The attribute sets the
60
+ HDF5 dataset options applied by
61
+ :func:`~lgdo.lh5.store.LH5Store.read` before writing `lgdo` to
55
62
  disk.
56
63
 
57
64
  Parameters
@@ -102,7 +109,9 @@ def buffer_processor(rb: RawBuffer) -> Table:
102
109
  ,}
103
110
  "compression": {
104
111
  "windowed_waveform/values": RadwareSigcompress(codec_shift=-32768),
105
- "presummed_waveform/values": ULEB128ZigZagDiff(),
112
+ }
113
+ "hdf5_settings": {
114
+ "presummed_waveform/values": {"shuffle": True, "compression": "lzf"},
106
115
  }
107
116
  }
108
117
  },
@@ -143,7 +152,7 @@ def buffer_processor(rb: RawBuffer) -> Table:
143
152
  if "drop" in rb.proc_spec.keys():
144
153
  process_drop(rb, tmp_table)
145
154
 
146
- # at last, assign compression attributes
155
+ # assign compression attributes
147
156
  if "compression" in rb.proc_spec.keys():
148
157
  for name, codec in rb.proc_spec["compression"].items():
149
158
  ptr = tmp_table
@@ -154,6 +163,15 @@ def buffer_processor(rb: RawBuffer) -> Table:
154
163
  codec if isinstance(codec, WaveformCodec) else str2wfcodec(codec)
155
164
  )
156
165
 
166
+ # and HDF5 settings
167
+ if "hdf5_settings" in rb.proc_spec.keys():
168
+ for name, settings in rb.proc_spec["hdf5_settings"].items():
169
+ ptr = tmp_table
170
+ for word in name.split("/"):
171
+ ptr = ptr[word]
172
+
173
+ ptr.attrs["hdf5_settings"] = settings
174
+
157
175
  return tmp_table
158
176
 
159
177
 
@@ -277,7 +295,7 @@ def process_windowed_t0(t0s: Array, dts: Array, start_index: int) -> Array:
277
295
 
278
296
 
279
297
  def process_dsp(rb: RawBuffer, tmp_table: Table) -> None:
280
- r"""Run a DSP processing chain.
298
+ r"""Run a DSP processing chain with :mod:`dspeed`.
281
299
 
282
300
  Run a provided DSP config from `rb.proc_spec` using
283
301
  :func:`.dsp.build_processing_chain`, and add specified outputs to the
@@ -6,7 +6,7 @@ import os
6
6
 
7
7
  import h5py
8
8
  import lgdo
9
- from lgdo import LH5Store
9
+ from lgdo import lh5
10
10
 
11
11
  from ..buffer_processor.buffer_processor import buffer_processor
12
12
  from ..raw_buffer import RawBuffer, RawBufferLibrary
@@ -54,14 +54,14 @@ def lh5_buffer_processor(
54
54
  """
55
55
 
56
56
  # Initialize the input raw file
57
- raw_store = LH5Store()
57
+ raw_store = lh5.LH5Store()
58
58
  lh5_file = raw_store.gimme_file(lh5_raw_file_in, "r")
59
59
  if lh5_file is None:
60
60
  raise ValueError(f"input file not found: {lh5_raw_file_in}")
61
61
  return
62
62
 
63
63
  # List the groups in the raw file
64
- lh5_groups = lgdo.ls(lh5_raw_file_in)
64
+ lh5_groups = lh5.ls(lh5_raw_file_in)
65
65
  lh5_tables = []
66
66
 
67
67
  # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw
@@ -69,21 +69,19 @@ def lh5_buffer_processor(
69
69
  # Make sure that the upper level key isn't a dataset
70
70
  if isinstance(lh5_file[tb], h5py.Dataset):
71
71
  lh5_tables.append(f"{tb}")
72
- elif "raw" not in tb and lgdo.ls(lh5_file, f"{tb}/raw"):
72
+ elif "raw" not in tb and lh5.ls(lh5_file, f"{tb}/raw"):
73
73
  lh5_tables.append(f"{tb}/raw")
74
74
  # Look one layer deeper for a :meth:`lgdo.Table` if necessary
75
- elif lgdo.ls(lh5_file, f"{tb}"):
75
+ elif lh5.ls(lh5_file, f"{tb}"):
76
76
  # Check to make sure that this isn't a table itself
77
- maybe_table, _ = raw_store.read_object(f"{tb}", lh5_file)
77
+ maybe_table, _ = raw_store.read(f"{tb}", lh5_file)
78
78
  if isinstance(maybe_table, lgdo.Table):
79
79
  lh5_tables.append(f"{tb}")
80
80
  del maybe_table
81
81
  # otherwise, go deeper
82
82
  else:
83
- for sub_table in lgdo.ls(lh5_file, f"{tb}"):
84
- maybe_table, _ = raw_store.read_object(
85
- f"{tb}/{sub_table}", lh5_file
86
- )
83
+ for sub_table in lh5.ls(lh5_file, f"{tb}"):
84
+ maybe_table, _ = raw_store.read(f"{tb}/{sub_table}", lh5_file)
87
85
  if isinstance(maybe_table, lgdo.Table):
88
86
  lh5_tables.append(f"{tb}/{sub_table}")
89
87
  del maybe_table
@@ -114,7 +112,7 @@ def lh5_buffer_processor(
114
112
 
115
113
  # Write everything in the raw file to the new file, check for proc_spec under either the group name, out_name, or the name
116
114
  for tb in lh5_tables:
117
- lgdo_obj, _ = raw_store.read_object(f"{tb}", lh5_file)
115
+ lgdo_obj, _ = raw_store.read(f"{tb}", lh5_file)
118
116
 
119
117
  # Find the out_name.
120
118
  # If the top level group has an lgdo table in it, then the out_name is group
@@ -198,6 +196,4 @@ def lh5_buffer_processor(
198
196
  pass
199
197
 
200
198
  # Write the (possibly processed) lgdo_obj to a file
201
- raw_store.write_object(
202
- lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name
203
- )
199
+ raw_store.write(lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name)
@@ -6,10 +6,8 @@ import logging
6
6
  import os
7
7
  import time
8
8
 
9
- import hdf5plugin
10
- import lgdo
11
9
  import numpy as np
12
- from lgdo.lh5_store import DEFAULT_HDF5_COMPRESSION
10
+ from lgdo import lh5
13
11
  from tqdm.auto import tqdm
14
12
 
15
13
  from .compass.compass_streamer import CompassStreamer
@@ -28,7 +26,7 @@ def build_raw(
28
26
  n_max: int = np.inf,
29
27
  overwrite: bool = False,
30
28
  compass_config_file: str = None,
31
- hdf5_compression: str | dict | hdf5plugin.filters.Filter = DEFAULT_HDF5_COMPRESSION,
29
+ hdf5_settings: dict[str, ...] = None,
32
30
  **kwargs,
33
31
  ) -> None:
34
32
  """Convert data into LEGEND HDF5 raw-tier format.
@@ -77,12 +75,16 @@ def build_raw(
77
75
  json-shorthand for the output specification (see
78
76
  :mod:`.compass.compass_event_decoder`).
79
77
 
80
- hdf5_compression
81
- forwarded to :meth:`~.lgdo.lh5_store.LH5Store.write_object`.
78
+ hdf5_settings
79
+ keyword arguments (as a dict) forwarded to
80
+ :meth:`lgdo.lh5.store.LH5Store.write`.
82
81
 
83
82
  **kwargs
84
83
  sent to :class:`.RawBufferLibrary` generation as `kw_dict` argument.
85
84
  """
85
+ if hdf5_settings is None:
86
+ hdf5_settings = {}
87
+
86
88
  # convert any environment variables in in_stream so that we can check for readability
87
89
  in_stream = os.path.expandvars(in_stream)
88
90
  # later: fix if in_stream is not a file
@@ -222,8 +224,8 @@ def build_raw(
222
224
  os.remove(out_file_glob[0])
223
225
 
224
226
  # Write header data
225
- lh5_store = lgdo.LH5Store(keep_open=True)
226
- write_to_lh5_and_clear(header_data, lh5_store, hdf5_compression=hdf5_compression)
227
+ lh5_store = lh5.LH5Store(keep_open=True)
228
+ write_to_lh5_and_clear(header_data, lh5_store, **hdf5_settings)
227
229
 
228
230
  # Now loop through the data
229
231
  n_bytes_last = streamer.n_bytes_read
@@ -248,7 +250,7 @@ def build_raw(
248
250
  if log.getEffectiveLevel() <= logging.INFO and n_max < np.inf:
249
251
  progress_bar.update(n_read)
250
252
 
251
- write_to_lh5_and_clear(chunk_list, lh5_store, hdf5_compression=hdf5_compression)
253
+ write_to_lh5_and_clear(chunk_list, lh5_store, **hdf5_settings)
252
254
 
253
255
  if n_max <= 0:
254
256
  log.info(f"Wrote {n_max} rows, exiting...")
@@ -3,13 +3,10 @@ Base classes for decoding data into raw LGDO Tables or files
3
3
  """
4
4
  from __future__ import annotations
5
5
 
6
- from typing import Union
7
-
8
6
  import lgdo
9
7
  import numpy as np
10
- from lgdo import LH5Store
11
-
12
- LGDO = Union[lgdo.Scalar, lgdo.Struct, lgdo.Array, lgdo.VectorOfVectors]
8
+ from lgdo import LGDO
9
+ from lgdo.lh5 import LH5Store
13
10
 
14
11
 
15
12
  class DataDecoder:
@@ -18,37 +15,39 @@ class DataDecoder:
18
15
  Most decoders will repeatedly decode the same set of values from each
19
16
  packet. The values that get decoded need to be described by a dict stored
20
17
  in `self.decoded_values` that helps determine how to set up the buffers and
21
- write them to file as :class:`~.lgdo.LGDO`\ s. :class:`~.lgdo.table.Table`\ s
22
- are made whose columns correspond to the elements of `decoded_values`, and
23
- packet data gets pushed to the end of the table one row at a time.
18
+ write them to file as :class:`~lgdo.types.lgdo.LGDO`\ s.
19
+ :class:`~lgdo.types.table.Table`\ s are made whose columns correspond to
20
+ the elements of `decoded_values`, and packet data gets pushed to the end of
21
+ the table one row at a time.
24
22
 
25
23
  Any key-value entry in a configuration dictionary attached to an element
26
24
  of `decoded_values` is typically interpreted as an attribute to be attached
27
25
  to the corresponding LGDO. This feature can be for example exploited to
28
- specify the data compression algorithm used by
29
- :meth:`~.lgdo.lh5_store.LH5Store.write_object` to write LGDOs to disk.
26
+ specify HDF5 dataset settings used by
27
+ :meth:`~lgdo.lh5.store.LH5Store.write` to write LGDOs to disk.
30
28
 
31
29
  For example ::
32
30
 
33
31
  from lgdo.compression import RadwareSigcompress
34
32
 
35
33
  FCEventDecoder.decoded_values = {
36
- "packet_id": {"dtype": "uint32", "compression": "gzip"},
34
+ "packet_id": {"dtype": "uint32", "hdf5_settings": {"compression": "gzip"}},
37
35
  # ...
38
36
  "waveform": {
39
37
  "dtype": "uint16",
40
38
  "datatype": "waveform",
41
39
  # ...
42
40
  "compression": {"values": RadwareSigcompress(codec_shift=-32768)},
41
+ "hdf5_settings": {"t0": {"compression": "lzf", shuffle: True}},
43
42
  }
44
43
  }
45
44
 
46
- LGDOs corresponding to ``packet_id`` and ``waveform`` will have their
47
- `compression` attribute set as ``"gzip"`` and
48
- ``RadwareSigcompress(codec_shift=-32768)``, respectively. Before being
49
- written to disk, they will compressed with the HDF5 built-in Gzip filter
50
- and with the :class:`~.lgdo.compression.radware.RadwareSigcompress`
51
- waveform compressor.
45
+ The LGDO corresponding to ``packet_id`` will have its `hdf5_settings`
46
+ attribute set as ``{"compression": "gzip"}``, while ``waveform.values``
47
+ will have its `compression` attribute set to
48
+ ``RadwareSigcompress(codec_shift=-32768)``. Before being written to disk,
49
+ they will be compressed with the HDF5 built-in Gzip filter and with the
50
+ :class:`~lgdo.compression.radware.RadwareSigcompress` waveform compressor.
52
51
 
53
52
  Examples
54
53
  --------
@@ -118,7 +117,7 @@ class DataDecoder:
118
117
  """Make an LGDO for this :class:`DataDecoder` to fill.
119
118
 
120
119
  This default version of this function allocates a
121
- :class:`~.lgdo.table.Table` using the `decoded_values` for key. If a
120
+ :class:`~lgdo.types.table.Table` using the `decoded_values` for key. If a
122
121
  different type of LGDO object is required for this decoder, overload
123
122
  this function.
124
123
 
@@ -178,7 +177,10 @@ class DataDecoder:
178
177
  dt = attrs.pop("dt")
179
178
  dt_units = attrs.pop("dt_units")
180
179
  wf_len = attrs.pop("wf_len")
181
- compression = attrs.pop("compression", None)
180
+ settings = {
181
+ "compression": attrs.pop("compression", {}),
182
+ "hdf5_settings": attrs.pop("hdf5_settings", {}),
183
+ }
182
184
 
183
185
  wf_table = lgdo.WaveformTable(
184
186
  size=size,
@@ -190,24 +192,20 @@ class DataDecoder:
190
192
  dtype=dtype,
191
193
  attrs=attrs,
192
194
  )
193
- if compression is not None:
194
- if not isinstance(compression, dict):
195
- raise RuntimeError(
196
- "waveform/compression attribute must be a dictionary"
197
- )
198
-
199
- if "values" in compression:
200
- wf_table.values.attrs["compression"] = compression["values"]
201
- if "t0" in compression:
202
- wf_table.t0.attrs["compression"] = compression["t0"]
203
- if "dt" in compression:
204
- wf_table.dt.attrs["compression"] = compression["dt"]
195
+
196
+ # attach compression/hdf5_settings to sub-fields
197
+ for el in ["values", "t0", "dt"]:
198
+ for settings_name in ("hdf5_settings", "compression"):
199
+ if el in settings[settings_name]:
200
+ wf_table[el].attrs[settings_name] = settings[settings_name][
201
+ el
202
+ ]
205
203
 
206
204
  data_obj.add_field(field, wf_table)
207
205
  continue
208
206
 
209
207
  # Parse datatype for remaining lgdos
210
- datatype, shape, elements = lgdo.lgdo_utils.parse_datatype(datatype)
208
+ datatype, shape, elements = lgdo.lh5.utils.parse_datatype(datatype)
211
209
 
212
210
  # ArrayOfEqualSizedArrays
213
211
  if datatype == "array_of_equalsized_arrays":
@@ -258,7 +256,7 @@ class DataDecoder:
258
256
  n_rows = self.garbage_table.loc
259
257
  if n_rows == 0:
260
258
  return
261
- lh5_store.write_object(
259
+ lh5_store.write(
262
260
  self.garbage_table, "garbage", filename, group, n_rows=n_rows, append=True
263
261
  )
264
262
  self.garbage_table.clear()
@@ -28,7 +28,7 @@ class FCConfigDecoder(DataDecoder):
28
28
  >>> decoder = FCConfigDecoder()
29
29
  >>> config = decoder.decode_config(fc)
30
30
  >>> type(config)
31
- lgdo.struct.Struct
31
+ lgdo.types.struct.Struct
32
32
  """
33
33
 
34
34
  def __init__(self, *args, **kwargs) -> None:
@@ -98,11 +98,11 @@ class ORSIS3302DecoderForEnergy(OrcaDecoder):
98
98
  sys.exit()
99
99
  self.decoded_values[ccc]["waveform"]["wf_len"] = trace_length
100
100
 
101
- def get_key_lists(self) -> list[list[str]]:
101
+ def get_key_lists(self) -> list[list[int]]:
102
102
  key_lists = []
103
103
  for key in self.decoded_values.keys():
104
104
  key_lists.append([key])
105
- return [key_lists]
105
+ return key_lists
106
106
 
107
107
  def get_decoded_values(self, key: int = None) -> dict[str, Any]:
108
108
  if key is None:
@@ -326,6 +326,7 @@ class ORFlashCamListenerStatusDecoder(OrcaDecoder):
326
326
  def decode_packet(
327
327
  self, packet: OrcaPacket, packet_id: int, rbl: RawBufferLibrary
328
328
  ) -> bool:
329
+ return False # FIXME: skip decoding until pyfcutils is updated
329
330
  """Decode the ORCA FlashCam Status packet."""
330
331
  # aliases for brevity
331
332
  if len(rbl) != 1:
@@ -47,11 +47,9 @@ def hex_dump(
47
47
  as_short: bool = False,
48
48
  id_dict: dict = None,
49
49
  use_logging: bool = True,
50
+ return_output=False,
50
51
  ) -> None:
51
- dump_cmd = print # noqa: T202
52
- if use_logging:
53
- dump_cmd = log.debug
54
-
52
+ output = []
55
53
  data_id = get_data_id(packet, shift=shift_data_id)
56
54
  n_words = get_n_words(packet)
57
55
  if id_dict is not None:
@@ -62,9 +60,9 @@ def hex_dump(
62
60
  else:
63
61
  heading = f"data ID = {data_id}"
64
62
  if print_n_words:
65
- dump_cmd(f"{heading}: {n_words} words")
63
+ output.append(f"{heading}: {n_words} words")
66
64
  else:
67
- dump_cmd(f"{heading}:")
65
+ output.append(f"{heading}:")
68
66
  n_to_print = int(np.minimum(n_words, max_words))
69
67
  pad = int(np.ceil(np.log10(n_to_print)))
70
68
  for i in range(n_to_print):
@@ -76,4 +74,13 @@ def hex_dump(
76
74
  line += f" {packet[i]}"
77
75
  if as_short:
78
76
  line += f" {np.frombuffer(packet[i:i+1].tobytes(), dtype='uint16')}"
79
- dump_cmd(line)
77
+ output.append(line)
78
+
79
+ dump_cmd = print # noqa: T202
80
+ if use_logging:
81
+ dump_cmd = log.debug
82
+ for line in output:
83
+ dump_cmd(line)
84
+
85
+ if return_output:
86
+ return output