legend-daq2lh5 1.2.1__tar.gz → 1.3.0__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (77) hide show
  1. {legend_daq2lh5-1.2.1/src/legend_daq2lh5.egg-info → legend_daq2lh5-1.3.0}/PKG-INFO +3 -2
  2. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/setup.cfg +1 -0
  3. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/_version.py +2 -2
  4. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/build_raw.py +2 -1
  5. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/data_decoder.py +1 -1
  6. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/data_streamer.py +1 -1
  7. legend_daq2lh5-1.3.0/src/daq2lh5/llama/llama_base.py +14 -0
  8. legend_daq2lh5-1.3.0/src/daq2lh5/llama/llama_event_decoder.py +328 -0
  9. legend_daq2lh5-1.3.0/src/daq2lh5/llama/llama_header_decoder.py +149 -0
  10. legend_daq2lh5-1.3.0/src/daq2lh5/llama/llama_streamer.py +156 -0
  11. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/logging.py +2 -0
  12. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0/src/legend_daq2lh5.egg-info}/PKG-INFO +3 -2
  13. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/SOURCES.txt +8 -0
  14. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/requires.txt +1 -0
  15. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/conftest.py +1 -1
  16. legend_daq2lh5-1.3.0/tests/llama/conftest.py +7 -0
  17. legend_daq2lh5-1.3.0/tests/llama/test_llama_event_decoder.py +127 -0
  18. legend_daq2lh5-1.3.0/tests/llama/test_llama_header_decoder.py +16 -0
  19. legend_daq2lh5-1.3.0/tests/llama/test_llama_streamer.py +36 -0
  20. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/LICENSE +0 -0
  21. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/README.md +0 -0
  22. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/pyproject.toml +0 -0
  23. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/setup.py +0 -0
  24. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/__init__.py +0 -0
  25. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/buffer_processor/__init__.py +0 -0
  26. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/buffer_processor/buffer_processor.py +0 -0
  27. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/buffer_processor/lh5_buffer_processor.py +0 -0
  28. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/cli.py +0 -0
  29. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/compass/__init__.py +0 -0
  30. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/compass/compass_config_parser.py +0 -0
  31. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/compass/compass_event_decoder.py +0 -0
  32. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/compass/compass_header_decoder.py +0 -0
  33. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/compass/compass_streamer.py +0 -0
  34. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/fc/__init__.py +0 -0
  35. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/fc/fc_config_decoder.py +0 -0
  36. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/fc/fc_event_decoder.py +0 -0
  37. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/fc/fc_status_decoder.py +0 -0
  38. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/fc/fc_streamer.py +0 -0
  39. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/__init__.py +0 -0
  40. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_base.py +0 -0
  41. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_digitizers.py +0 -0
  42. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_flashcam.py +0 -0
  43. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_header.py +0 -0
  44. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_header_decoder.py +0 -0
  45. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_packet.py +0 -0
  46. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_run_decoder.py +0 -0
  47. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/orca/orca_streamer.py +0 -0
  48. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/daq2lh5/raw_buffer.py +0 -0
  49. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/dependency_links.txt +0 -0
  50. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/entry_points.txt +0 -0
  51. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/not-zip-safe +0 -0
  52. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/src/legend_daq2lh5.egg-info/top_level.txt +0 -0
  53. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/buffer_processor/test_buffer_processor.py +0 -0
  54. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/buffer_processor/test_buffer_processor_configs/buffer_processor_config.json +0 -0
  55. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/buffer_processor/test_buffer_processor_configs/lh5_buffer_processor_config.json +0 -0
  56. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/buffer_processor/test_buffer_processor_configs/raw_out_spec_no_proc.json +0 -0
  57. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/buffer_processor/test_lh5_buffer_processor.py +0 -0
  58. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/compass/conftest.py +0 -0
  59. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/compass/test_compass_event_decoder.py +0 -0
  60. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/compass/test_compass_header_decoder.py +0 -0
  61. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/compass/test_compass_streamer.py +0 -0
  62. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/configs/fc-out-spec.json +0 -0
  63. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/configs/orca-out-spec-cli.json +0 -0
  64. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/configs/orca-out-spec.json +0 -0
  65. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/fc/conftest.py +0 -0
  66. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/fc/test_fc_config_decoder.py +0 -0
  67. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/fc/test_fc_event_decoder.py +0 -0
  68. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/fc/test_fc_status_decoder.py +0 -0
  69. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/fc/test_fc_streamer.py +0 -0
  70. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/orca/conftest.py +0 -0
  71. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/orca/test_or_run_decoder_for_run.py +0 -0
  72. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/orca/test_orca_fc.py +0 -0
  73. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/orca/test_orca_packet.py +0 -0
  74. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/test_build_raw.py +0 -0
  75. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/test_cli.py +0 -0
  76. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/test_daq_to_raw.py +0 -0
  77. {legend_daq2lh5-1.2.1 → legend_daq2lh5-1.3.0}/tests/test_raw_buffer.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: legend_daq2lh5
3
- Version: 1.2.1
3
+ Version: 1.3.0
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -32,6 +32,7 @@ Requires-Dist: hdf5plugin
32
32
  Requires-Dist: legend-pydataobj>=1.6
33
33
  Requires-Dist: numpy>=1.21
34
34
  Requires-Dist: pyfcutils
35
+ Requires-Dist: pyyaml
35
36
  Requires-Dist: tqdm>=4.27
36
37
  Requires-Dist: xmltodict
37
38
  Provides-Extra: all
@@ -37,6 +37,7 @@ install_requires =
37
37
  legend-pydataobj>=1.6
38
38
  numpy>=1.21
39
39
  pyfcutils
40
+ pyyaml
40
41
  tqdm>=4.27
41
42
  xmltodict
42
43
  python_requires = >=3.9
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.2.1'
16
- __version_tuple__ = version_tuple = (1, 2, 1)
15
+ __version__ = version = '1.3.0'
16
+ __version_tuple__ = version_tuple = (1, 3, 0)
@@ -12,6 +12,7 @@ from tqdm.auto import tqdm
12
12
 
13
13
  from .compass.compass_streamer import CompassStreamer
14
14
  from .fc.fc_streamer import FCStreamer
15
+ from .llama.llama_streamer import LLAMAStreamer
15
16
  from .orca.orca_streamer import OrcaStreamer
16
17
  from .raw_buffer import RawBufferLibrary, write_to_lh5_and_clear
17
18
 
@@ -180,7 +181,7 @@ def build_raw(
180
181
  elif in_stream_type == "Compass":
181
182
  streamer = CompassStreamer(compass_config_file)
182
183
  elif in_stream_type == "LlamaDaq":
183
- raise NotImplementedError("LlamaDaq streaming not yet implemented")
184
+ streamer = LLAMAStreamer()
184
185
  elif in_stream_type == "MGDO":
185
186
  raise NotImplementedError("MGDO streaming not yet implemented")
186
187
  else:
@@ -74,7 +74,7 @@ class DataDecoder:
74
74
  def __init__(
75
75
  self, garbage_length: int = 256, packet_size_guess: int = 1024
76
76
  ) -> None:
77
- self.garbage_table = lgdo.Table(garbage_length)
77
+ self.garbage_table = lgdo.Table(size=garbage_length)
78
78
  shape_guess = (garbage_length, packet_size_guess)
79
79
  self.garbage_table.add_field(
80
80
  "packets", lgdo.VectorOfVectors(shape_guess=shape_guess, dtype="uint8")
@@ -350,7 +350,7 @@ class DataStreamer(ABC):
350
350
  if len(key_list) == 1:
351
351
  this_name = f"{dec_key}_{key_list[0]}"
352
352
  else:
353
- this_name = f"{dec_key}_{ii}"
353
+ this_name = f"{dec_key}_{ii}" # this can cause a name clash e.g. for [[1],[2,3]] ...
354
354
  rb = RawBuffer(
355
355
  key_list=key_list, out_stream=out_stream, out_name=this_name
356
356
  )
@@ -0,0 +1,14 @@
1
+ """
2
+ General utilities for llamaDAQ data decoding
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+
9
+ log = logging.getLogger(__name__)
10
+
11
+
12
+ # build a unique flat identifier for fadc and channel together
13
+ def join_fadcid_chid(fadcid: int, chid: int) -> int:
14
+ return (fadcid << 4) + chid
@@ -0,0 +1,328 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import logging
5
+ from typing import Any
6
+
7
+ import lgdo
8
+ import numpy as np
9
+
10
+ from ..data_decoder import DataDecoder
11
+ from .llama_header_decoder import LLAMA_Channel_Configs_t
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+ # put decoded values here
16
+ llama_decoded_values_template = {
17
+ # packet index in file
18
+ "packet_id": {"dtype": "uint32"},
19
+ # combined index of FADC and channel
20
+ "fch_id": {"dtype": "uint32"},
21
+ # time since epoch
22
+ "timestamp": {"dtype": "uint64", "units": "clock_ticks"},
23
+ "status_flag": {"dtype": "uint32"},
24
+ # waveform data --> not always present
25
+ # "waveform": {
26
+ # "dtype": "uint16",
27
+ # "datatype": "waveform",
28
+ # "wf_len": 65532, # max value. override this before initializing buffers to save RAM
29
+ # "dt": 8, # override if a different clock rate is used
30
+ # "dt_units": "ns",
31
+ # "t0_units": "ns",
32
+ # }
33
+ }
34
+ # """Default llamaDAQ SIS3316 Event decoded values.
35
+ #
36
+ # Warning
37
+ # -------
38
+ # This configuration will be dynamically modified by the decoder at runtime.
39
+ # """
40
+
41
+
42
+ def check_dict_spec_equal(
43
+ d1: dict[str, Any], d2: dict[str, Any], specs: list[str]
44
+ ) -> bool:
45
+ for spec in specs:
46
+ if d1.get(spec) != d2.get(spec):
47
+ return False
48
+ return True
49
+
50
+
51
+ class LLAMAEventDecoder(DataDecoder):
52
+ """Decode llamaDAQ SIS3316 digitizer event data."""
53
+
54
+ def __init__(self, *args, **kwargs) -> None:
55
+ # these are read for every event (decode_event)
56
+ # One set of settings per fch, since settings can be different per channel group
57
+ self.decoded_values: dict[int, dict[str, Any]] = {}
58
+ super().__init__(*args, **kwargs)
59
+ self.skipped_channels = {}
60
+ self.channel_configs = None
61
+ self.dt_raw: dict[int, float] = (
62
+ {}
63
+ ) # need to buffer that to update t0 for avg waveforms per event
64
+ self.t0_raw: dict[int, float] = (
65
+ {}
66
+ ) # store when receiving channel configs and use for each waveform
67
+ self.t0_avg_const: dict[int, float] = (
68
+ {}
69
+ ) # constant part of the t0 of averaged waveforms
70
+
71
+ def set_channel_configs(self, channel_configs: LLAMA_Channel_Configs_t) -> None:
72
+ """Receive channel configurations from llama_streamer after header was parsed
73
+ Adapt self.decoded_values dict based on read configuration
74
+ """
75
+ self.channel_configs = channel_configs
76
+ for fch, config in self.channel_configs.items():
77
+ self.decoded_values[fch] = copy.deepcopy(llama_decoded_values_template)
78
+ format_bits = config["format_bits"]
79
+ sample_clock_freq = config["sample_freq"]
80
+ avg_mode = config["avg_mode"]
81
+ dt_raw: float = 1 / sample_clock_freq * 1000
82
+ dt_avg: float = dt_raw * (1 << (avg_mode + 1))
83
+ # t0 generation functions from llamaDAQ -> EventConfig.hh
84
+ t0_raw: float = (
85
+ float(config["sample_start_index"]) - float(config["sample_pretrigger"])
86
+ ) * dt_raw # location of the trigger is at t = 0
87
+ t0_avg: float = (
88
+ -float(config["sample_pretrigger"]) * float(dt_raw)
89
+ - float(config["avg_sample_pretrigger"]) * dt_avg
90
+ ) # additional offset to be added independently for every event
91
+ self.dt_raw[fch] = dt_raw
92
+ self.t0_raw[fch] = t0_raw
93
+ self.t0_avg_const[fch] = t0_avg
94
+ if config["sample_length"] > 0:
95
+ self.__add_waveform(
96
+ self.decoded_values[fch], False, config["sample_length"], dt_raw
97
+ )
98
+ if config["avg_sample_length"] > 0 and avg_mode > 0:
99
+ self.__add_waveform(
100
+ self.decoded_values[fch], True, config["avg_sample_length"], dt_avg
101
+ )
102
+ if format_bits & 0x01:
103
+ self.__add_accum1till6(self.decoded_values[fch])
104
+ if format_bits & 0x02:
105
+ self.__add_accum7and8(self.decoded_values[fch])
106
+ if format_bits & 0x04:
107
+ self.__add_maw(self.decoded_values[fch])
108
+ if format_bits & 0x08:
109
+ self.__add_energy(self.decoded_values[fch])
110
+
111
+ def get_key_lists(self) -> list[list[int | str]]:
112
+ """
113
+ Return a list of lists of keys available for this decoder.
114
+ Each inner list are the fch_id's which share the exact same settings (trace lengths, avg mode, ...),
115
+ so they can end up in the same buffer.
116
+ """
117
+ if self.channel_configs is None:
118
+ raise RuntimeError(
119
+ "Identification of key lists requires channel configs to be set!"
120
+ )
121
+
122
+ params_for_equality = ["sample_length", "avg_sample_length", "avg_mode"]
123
+
124
+ def check_equal(c1, c2):
125
+ return check_dict_spec_equal(c1, c2, params_for_equality)
126
+
127
+ kll: list[list[int]] = [] # key-list-list
128
+ for fch_id, config in self.channel_configs.items():
129
+ for kl in kll:
130
+ # use 1st entry of a list of list as "archetype"
131
+ if check_equal(config, self.channel_configs[kl[0]]):
132
+ kl.append(fch_id)
133
+ break
134
+ else:
135
+ kll.append([fch_id])
136
+ log.debug(f"key lists are: {repr(kll)}")
137
+ return kll
138
+
139
+ # copied from ORCA SIS3316
140
+ def get_decoded_values(self, key: int = None) -> dict[str, Any]:
141
+ if key is None:
142
+ raise RuntimeError("Key is None!")
143
+ dec_vals_list = self.decoded_values.values()
144
+ if len(dec_vals_list) == 0:
145
+ raise RuntimeError("decoded_values not built yet!")
146
+
147
+ return dec_vals_list # Get first thing we find
148
+ else:
149
+ dec_vals_list = self.decoded_values[key]
150
+ return dec_vals_list
151
+
152
+ def decode_packet(
153
+ self,
154
+ packet: bytes,
155
+ evt_rbkd: lgdo.Table | dict[int, lgdo.Table],
156
+ packet_id: int,
157
+ fch_id: int,
158
+ # header: lgdo.Table | dict[int, lgdo.Table]
159
+ ) -> bool:
160
+ """
161
+ Decodes a single packet, which is a single SIS3316 event, as specified in the Struck manual.
162
+ A single packet corresponds to a single event and channel, and has a unique timestamp.
163
+ packets of different channel groups can vary in size!
164
+ """
165
+
166
+ # Check if this fch_id should be recorded.
167
+ if fch_id not in evt_rbkd:
168
+ if fch_id not in self.skipped_channels:
169
+ self.skipped_channels[fch_id] = 0
170
+ log.info(f"Skipping channel: {fch_id}")
171
+ log.debug(f"evt_rbkd: {evt_rbkd.keys()}")
172
+ self.skipped_channels[fch_id] += 1
173
+ return False
174
+
175
+ tbl = evt_rbkd[fch_id].lgdo
176
+ ii = evt_rbkd[fch_id].loc
177
+
178
+ # parse the raw event data into numpy arrays of 16 and 32 bit ints
179
+ evt_data_32 = np.frombuffer(packet, dtype=np.uint32)
180
+ evt_data_16 = np.frombuffer(packet, dtype=np.uint16)
181
+
182
+ # e sti gran binaries non ce li metti
183
+ # fch_id = (evt_data_32[0] >> 4) & 0x00000fff --> to be read earlier, since we need size for chopping out the event from the stream
184
+ timestamp = ((evt_data_32[0] & 0xFFFF0000) << 16) + evt_data_32[1]
185
+ format_bits = (evt_data_32[0]) & 0x0000000F
186
+ tbl["fch_id"].nda[ii] = fch_id
187
+ tbl["packet_id"].nda[ii] = packet_id
188
+ tbl["timestamp"].nda[ii] = timestamp
189
+ offset = 2
190
+ if format_bits & 0x1:
191
+ tbl["peakHighValue"].nda[ii] = evt_data_16[4]
192
+ tbl["peakHighIndex"].nda[ii] = evt_data_16[5]
193
+ tbl["information"].nda[ii] = (evt_data_32[offset + 1] >> 24) & 0xFF
194
+ tbl["accSum1"].nda[ii] = evt_data_32[offset + 2]
195
+ tbl["accSum2"].nda[ii] = evt_data_32[offset + 3]
196
+ tbl["accSum3"].nda[ii] = evt_data_32[offset + 4]
197
+ tbl["accSum4"].nda[ii] = evt_data_32[offset + 5]
198
+ tbl["accSum5"].nda[ii] = evt_data_32[offset + 6]
199
+ tbl["accSum6"].nda[ii] = evt_data_32[offset + 7]
200
+ offset += 7
201
+ if format_bits & 0x2:
202
+ tbl["accSum7"].nda[ii] = evt_data_32[offset + 0]
203
+ tbl["accSum8"].nda[ii] = evt_data_32[offset + 1]
204
+ offset += 2
205
+ if format_bits & 0x4:
206
+ tbl["mawMax"].nda[ii] = evt_data_32[offset + 0]
207
+ tbl["mawBefore"].nda[ii] = evt_data_32[offset + 1]
208
+ tbl["mawAfter"].nda[ii] = evt_data_32[offset + 2]
209
+ offset += 3
210
+ if format_bits & 0x8:
211
+ tbl["startEnergy"].nda[ii] = evt_data_32[offset + 0]
212
+ tbl["maxEnergy"].nda[ii] = evt_data_32[offset + 1]
213
+ offset += 2
214
+
215
+ raw_length_32 = (evt_data_32[offset + 0]) & 0x03FFFFFF
216
+ tbl["status_flag"].nda[ii] = (
217
+ (evt_data_32[offset + 0]) & 0x04000000
218
+ ) >> 26 # bit 26
219
+ maw_test_flag = ((evt_data_32[offset + 0]) & 0x08000000) >> 27 # bit 27
220
+ avg_data_coming = False
221
+ if evt_data_32[offset + 0] & 0xF0000000 == 0xE0000000:
222
+ avg_data_coming = False
223
+ elif evt_data_32[offset + 0] & 0xF0000000 == 0xA0000000:
224
+ avg_data_coming = True
225
+ else:
226
+ raise RuntimeError("Data corruption 1!")
227
+ offset += 1
228
+ avg_length_32 = 0
229
+ if avg_data_coming:
230
+ avg_count_status = (
231
+ evt_data_32[offset + 0] & 0x00FF0000
232
+ ) >> 16 # bits 23 - 16
233
+ avg_length_32 = evt_data_32[offset + 0] & 0x0000FFFF
234
+ if evt_data_32[offset + 0] & 0xF0000000 != 0xE0000000:
235
+ raise RuntimeError("Data corruption 2!")
236
+ offset += 1
237
+
238
+ # --- now the offset points to the raw wf data ---
239
+
240
+ if maw_test_flag:
241
+ raise RuntimeError("Cannot handle data with MAW test data!")
242
+
243
+ # compute expected and actual array dimensions
244
+ raw_length_16 = 2 * raw_length_32
245
+ avg_length_16 = 2 * avg_length_32
246
+ header_length_16 = offset * 2
247
+ expected_wf_length = len(evt_data_16) - header_length_16
248
+
249
+ # error check: waveform size must match expectations
250
+ if raw_length_16 + avg_length_16 != expected_wf_length:
251
+ raise RuntimeError(
252
+ f"Waveform sizes {raw_length_16} (raw) and {avg_length_16} (avg) doesn't match expected size {expected_wf_length}."
253
+ )
254
+
255
+ # store waveform if available:
256
+ if raw_length_16 > 0:
257
+ tbl["waveform"]["values"].nda[ii] = evt_data_16[
258
+ offset * 2 : offset * 2 + raw_length_16
259
+ ]
260
+ offset += raw_length_32
261
+ tbl["waveform"]["t0"].nda[ii] = self.t0_raw[fch_id]
262
+
263
+ # store pre-averaged (avg) waveform if available:
264
+ if avg_length_16 > 0:
265
+ tbl["avgwaveform"]["values"].nda[ii] = evt_data_16[
266
+ offset * 2 : offset * 2 + avg_length_16
267
+ ]
268
+ offset += avg_length_32
269
+ # need to update avg waveform t0 based on the offset I get per event
270
+ tbl["avgwaveform"]["t0"].nda[ii] = (
271
+ self.t0_avg_const[fch_id]
272
+ + float(avg_count_status) * self.dt_raw[fch_id]
273
+ )
274
+
275
+ if offset != len(evt_data_32):
276
+ raise RuntimeError("I messed up...")
277
+
278
+ evt_rbkd[fch_id].loc += 1
279
+
280
+ return evt_rbkd[fch_id].is_full()
281
+
282
+ def __add_waveform(
283
+ self,
284
+ decoded_values_fch: dict[str, Any],
285
+ is_avg: bool,
286
+ max_samples: int,
287
+ dt: float,
288
+ ) -> None:
289
+ """
290
+ Averaged samples are available from the 125 MHz (16 bit) variatnt of the SIS3316 and can be stored independently of raw samples.
291
+ I use waveform for raw samples (dt from clock itself) and avgwaveform from averaged samples (dt from clock * avg number).
292
+
293
+ GERDA used to have the low-frequency (waveform) & the high-frequency (aux waveform); here: LF = avgwaveform & HF = waveform.
294
+ """
295
+ name: str = "avgwaveform" if is_avg else "waveform"
296
+ decoded_values_fch[name] = {
297
+ "dtype": "uint16",
298
+ "datatype": "waveform",
299
+ "wf_len": max_samples, # max value. override this before initializing buffers to save RAM
300
+ "dt": dt, # the sample pitch (inverse of clock speed)
301
+ # "t0": t0, # Adding t0 here does not work
302
+ "dt_units": "ns",
303
+ "t0_units": "ns",
304
+ }
305
+
306
+ def __add_accum1till6(self, decoded_values_fch: dict[str, Any]) -> None:
307
+ decoded_values_fch["peakHighValue"] = {"dtype": "uint32", "units": "adc"}
308
+ decoded_values_fch["peakHighIndex"] = {"dtype": "uint32", "units": "adc"}
309
+ decoded_values_fch["information"] = {"dtype": "uint32"}
310
+ decoded_values_fch["accSum1"] = {"dtype": "uint32", "units": "adc"}
311
+ decoded_values_fch["accSum2"] = {"dtype": "uint32", "units": "adc"}
312
+ decoded_values_fch["accSum3"] = {"dtype": "uint32", "units": "adc"}
313
+ decoded_values_fch["accSum4"] = {"dtype": "uint32", "units": "adc"}
314
+ decoded_values_fch["accSum5"] = {"dtype": "uint32", "units": "adc"}
315
+ decoded_values_fch["accSum6"] = {"dtype": "uint32", "units": "adc"}
316
+
317
+ def __add_accum7and8(self, decoded_values_fch: dict[str, Any]) -> None:
318
+ decoded_values_fch["accSum7"] = {"dtype": "uint32", "units": "adc"}
319
+ decoded_values_fch["accSum8"] = {"dtype": "uint32", "units": "adc"}
320
+
321
+ def __add_maw(self, decoded_values_fch: dict[str, Any]) -> None:
322
+ decoded_values_fch["mawMax"] = {"dtype": "uint32", "units": "adc"}
323
+ decoded_values_fch["mawBefore"] = {"dtype": "uint32", "units": "adc"}
324
+ decoded_values_fch["mawAfter"] = {"dtype": "uint32", "units": "adc"}
325
+
326
+ def __add_energy(self, decoded_values_fch: dict[str, Any]) -> None:
327
+ decoded_values_fch["startEnergy"] = {"dtype": "uint32", "units": "adc"}
328
+ decoded_values_fch["maxEnergy"] = {"dtype": "uint32", "units": "adc"}
@@ -0,0 +1,149 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import logging
5
+ from typing import Any, Dict
6
+
7
+ import lgdo
8
+ import numpy as np
9
+
10
+ from ..data_decoder import DataDecoder
11
+ from .llama_base import join_fadcid_chid
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+ LLAMA_Channel_Configs_t = Dict[int, Dict[str, Any]]
16
+
17
+
18
+ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
19
+ """
20
+ Decode llamaDAQ header data. Includes the file header as well as all available ("open") channel configurations.
21
+ """
22
+
23
+ @staticmethod
24
+ def magic_bytes() -> int:
25
+ return 0x4972414C
26
+
27
+ def __init__(self, *args, **kwargs) -> None:
28
+ super().__init__(*args, **kwargs)
29
+ self.config = lgdo.Struct()
30
+ self.channel_configs = None
31
+
32
+ def decode_header(self, f_in: io.BufferedReader) -> lgdo.Struct:
33
+ n_bytes_read = 0
34
+
35
+ f_in.seek(0) # should be there anyhow, but re-set if not
36
+ header = f_in.read(16) # read 16 bytes
37
+ n_bytes_read += 16
38
+ evt_data_32 = np.frombuffer(header, dtype=np.uint32)
39
+ evt_data_16 = np.frombuffer(header, dtype=np.uint16)
40
+
41
+ # line0: magic bytes
42
+ magic = evt_data_32[0]
43
+ # print(hex(magic))
44
+ if magic == self.magic_bytes():
45
+ log.info("Read in file as llamaDAQ-SIS3316, magic bytes correct.")
46
+ else:
47
+ log.error("Magic bytes not matching for llamaDAQ file!")
48
+ raise RuntimeError("wrong file type")
49
+
50
+ self.version_major = evt_data_16[4]
51
+ self.version_minor = evt_data_16[3]
52
+ self.version_patch = evt_data_16[2]
53
+ self.length_econf = evt_data_16[5]
54
+ self.number_chOpen = evt_data_32[3]
55
+
56
+ log.debug(
57
+ f"File version: {self.version_major}.{self.version_minor}.{self.version_patch}"
58
+ )
59
+ log.debug(
60
+ f"{self.number_chOpen} channels open, each config {self.length_econf} bytes long"
61
+ )
62
+
63
+ n_bytes_read += self.__decode_channel_configs(f_in)
64
+
65
+ # print(self.channel_configs[0]["MAW3_offset"])
66
+
67
+ # assemble LGDO struct:
68
+ self.config.add_field("version_major", lgdo.Scalar(self.version_major))
69
+ self.config.add_field("version_minor", lgdo.Scalar(self.version_minor))
70
+ self.config.add_field("version_patch", lgdo.Scalar(self.version_patch))
71
+ self.config.add_field("length_econf", lgdo.Scalar(self.length_econf))
72
+ self.config.add_field("number_chOpen", lgdo.Scalar(self.number_chOpen))
73
+
74
+ for fch_id, fch_content in self.channel_configs.items():
75
+ fch_lgdo = lgdo.Struct()
76
+ for key, value in fch_content.items():
77
+ fch_lgdo.add_field(key, lgdo.Scalar(value))
78
+ self.config.add_field(f"fch_{fch_id:02d}", fch_lgdo)
79
+
80
+ return self.config, n_bytes_read
81
+
82
+ # override from DataDecoder
83
+ def make_lgdo(self, key: int = None, size: int = None) -> lgdo.Struct:
84
+ return self.config
85
+
86
+ def get_channel_configs(self) -> LLAMA_Channel_Configs_t:
87
+ return self.channel_configs
88
+
89
+ def __decode_channel_configs(self, f_in: io.BufferedReader) -> int:
90
+ """
91
+ Reads the metadata from the beginning of the file (the "channel configuration" part, directly after the file header).
92
+ Creates a dictionary of the metadata for each FADC/channel combination, which is returned
93
+
94
+ FADC-ID and channel-ID are combined into a single id for flattening:
95
+ (fadcid << 4) + chid
96
+
97
+ returns number of bytes read
98
+ """
99
+ # f_in.seek(16) #should be after file header anyhow, but re-set if not
100
+ n_bytes_read = 0
101
+ self.channel_configs = {}
102
+
103
+ if self.length_econf != 88:
104
+ raise RuntimeError("Invalid channel configuration format")
105
+
106
+ for _i in range(0, self.number_chOpen):
107
+ # print("reading in channel config {}".format(i))
108
+
109
+ channel = f_in.read(self.length_econf)
110
+ n_bytes_read += self.length_econf
111
+ ch_dpf = channel[16:32]
112
+ evt_data_32 = np.frombuffer(channel, dtype=np.uint32)
113
+ evt_data_dpf = np.frombuffer(ch_dpf, dtype=np.float64)
114
+
115
+ fadc_index = evt_data_32[0]
116
+ channel_index = evt_data_32[1]
117
+ fch_id = join_fadcid_chid(fadc_index, channel_index)
118
+
119
+ if fch_id in self.channel_configs:
120
+ raise RuntimeError(
121
+ f"duplicate channel configuration in file: FADCID: {fadc_index}, ChannelID: {channel_index}"
122
+ )
123
+ else:
124
+ self.channel_configs[fch_id] = {}
125
+
126
+ self.channel_configs[fch_id]["14BitFlag"] = evt_data_32[2] & 0x00000001
127
+ if evt_data_32[2] & 0x00000002 == 0:
128
+ log.warning("Channel in configuration marked as non-open!")
129
+ self.channel_configs[fch_id]["ADC_offset"] = evt_data_32[3]
130
+ self.channel_configs[fch_id]["sample_freq"] = evt_data_dpf[
131
+ 0
132
+ ] # 64 bit float
133
+ self.channel_configs[fch_id]["gain"] = evt_data_dpf[1]
134
+ self.channel_configs[fch_id]["format_bits"] = evt_data_32[8]
135
+ self.channel_configs[fch_id]["sample_start_index"] = evt_data_32[9]
136
+ self.channel_configs[fch_id]["sample_pretrigger"] = evt_data_32[10]
137
+ self.channel_configs[fch_id]["avg_sample_pretrigger"] = evt_data_32[11]
138
+ self.channel_configs[fch_id]["avg_mode"] = evt_data_32[12]
139
+ self.channel_configs[fch_id]["sample_length"] = evt_data_32[13]
140
+ self.channel_configs[fch_id]["avg_sample_length"] = evt_data_32[14]
141
+ self.channel_configs[fch_id]["MAW_buffer_length"] = evt_data_32[15]
142
+ self.channel_configs[fch_id]["event_length"] = evt_data_32[16]
143
+ self.channel_configs[fch_id]["event_header_length"] = evt_data_32[17]
144
+ self.channel_configs[fch_id]["accum6_offset"] = evt_data_32[18]
145
+ self.channel_configs[fch_id]["accum2_offset"] = evt_data_32[19]
146
+ self.channel_configs[fch_id]["MAW3_offset"] = evt_data_32[20]
147
+ self.channel_configs[fch_id]["energy_offset"] = evt_data_32[21]
148
+
149
+ return n_bytes_read
@@ -0,0 +1,156 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+
5
+ import numpy as np
6
+
7
+ from ..data_decoder import DataDecoder
8
+ from ..data_streamer import DataStreamer
9
+ from ..raw_buffer import RawBuffer, RawBufferLibrary
10
+ from .llama_event_decoder import LLAMAEventDecoder
11
+ from .llama_header_decoder import LLAMAHeaderDecoder
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ class LLAMAStreamer(DataStreamer):
17
+ """
18
+ Decode SIS3316 data acquired using llamaDAQ.
19
+ """
20
+
21
+ def __init__(self) -> None:
22
+ super().__init__()
23
+ self.in_stream = None
24
+ self.event_rbkd = None
25
+ self.header_decoder = LLAMAHeaderDecoder()
26
+ self.event_decoder = LLAMAEventDecoder()
27
+
28
+ def get_decoder_list(self) -> list[DataDecoder]:
29
+ dec_list = []
30
+ dec_list.append(self.header_decoder)
31
+ dec_list.append(self.event_decoder)
32
+ return dec_list
33
+
34
+ def open_stream(
35
+ self,
36
+ llama_filename: str,
37
+ rb_lib: RawBufferLibrary = None,
38
+ buffer_size: int = 8192,
39
+ chunk_mode: str = "any_full",
40
+ out_stream: str = "",
41
+ ) -> list[RawBuffer]:
42
+ """Initialize the LLAMA data stream.
43
+
44
+ Refer to the documentation for
45
+ :meth:`.data_streamer.DataStreamer.open_stream` for a description
46
+ of the parameters.
47
+ """
48
+
49
+ if self.in_stream is not None:
50
+ raise RuntimeError("tried to open stream while previous one still open")
51
+ self.in_stream = open(llama_filename.encode("utf-8"), "rb")
52
+ self.n_bytes_read = 0
53
+ self.packet_id = 0
54
+
55
+ # read header info here
56
+ header, n_bytes_hdr = self.header_decoder.decode_header(self.in_stream)
57
+ self.n_bytes_read += n_bytes_hdr
58
+
59
+ self.event_decoder.set_channel_configs(
60
+ self.header_decoder.get_channel_configs()
61
+ )
62
+
63
+ # as far as I can tell, this happens if a user does not specify output.
64
+ # Then I can still get a rb_lib, but that misses keys entirely, which I need since channels can have different setups.
65
+ # So I try to hack my own here in case there is none provided.
66
+ # if rb_lib is None:
67
+ # rb_lib = self.__hack_rb_lib(self.header_decoder.get_channel_configs())
68
+
69
+ # initialize the buffers in rb_lib. Store them for fast lookup
70
+ # Docu tells me to use initialize instead, but that does not exits (?)
71
+ super().open_stream(
72
+ llama_filename,
73
+ rb_lib,
74
+ buffer_size=buffer_size,
75
+ chunk_mode=chunk_mode,
76
+ out_stream=out_stream,
77
+ )
78
+ if rb_lib is None:
79
+ rb_lib = self.rb_lib
80
+
81
+ self.event_rbkd = (
82
+ rb_lib["LLAMAEventDecoder"].get_keyed_dict()
83
+ if "LLAMAEventDecoder" in rb_lib
84
+ else None
85
+ )
86
+
87
+ if "LLAMAHeaderDecoder" in rb_lib:
88
+ config_rb_list = rb_lib["LLAMAHeaderDecoder"]
89
+ if len(config_rb_list) != 1:
90
+ log.warning(
91
+ f"config_rb_list had length {len(config_rb_list)}, ignoring all but the first"
92
+ )
93
+ rb = config_rb_list[0]
94
+ else:
95
+ rb = RawBuffer(lgdo=header)
96
+ rb.loc = 1 # we have filled this buffer
97
+ return [rb]
98
+
99
+ def close_stream(self) -> None:
100
+ if self.in_stream is None:
101
+ raise RuntimeError("tried to close an unopened stream")
102
+ self.in_stream.close()
103
+ self.in_stream = None
104
+
105
+ def read_packet(self) -> bool:
106
+ """Reads a single packet's worth of data in to the :class:`.RawBufferLibrary`.
107
+
108
+ Returns
109
+ -------
110
+ still_has_data
111
+ returns `True` while there is still data to read.
112
+ """
113
+
114
+ packet, fch_id = self.__read_bytes()
115
+ if packet is None:
116
+ return False # EOF
117
+ self.packet_id += 1
118
+ self.n_bytes_read += len(packet)
119
+
120
+ self.any_full |= self.event_decoder.decode_packet(
121
+ packet, self.event_rbkd, self.packet_id, fch_id
122
+ )
123
+
124
+ return True
125
+
126
+ def __read_bytes(self) -> tuple[bytes | None, int]:
127
+ """
128
+ return bytes if read successful or None if EOF.
129
+ int is the fch_id (needs to be fetched to obtain the size of the event)
130
+ """
131
+ if self.in_stream is None:
132
+ raise RuntimeError("No stream open!")
133
+
134
+ position = self.in_stream.tell() # save position of the event header's 1st byte
135
+ data1 = self.in_stream.read(
136
+ 4
137
+ ) # read the first (32 bit) word of the event's header: channelID & format bits
138
+ if len(data1) < 4:
139
+ return None, -1 # EOF, I guess
140
+ self.in_stream.seek(position) # go back to 1st position of event header
141
+
142
+ header_data_32 = np.frombuffer(data1, dtype=np.uint32)
143
+ fch_id = (header_data_32[0] >> 4) & 0x00000FFF
144
+
145
+ event_length_32 = self.header_decoder.get_channel_configs()[fch_id][
146
+ "event_length"
147
+ ]
148
+ event_length_8 = event_length_32 * 4
149
+
150
+ packet = self.in_stream.read(event_length_8)
151
+ if len(packet) < event_length_8:
152
+ raise RuntimeError(
153
+ f"Tried to read {event_length_8} bytes but got {len(packet)}"
154
+ )
155
+
156
+ return packet, fch_id
@@ -11,6 +11,8 @@ ERROR = logging.ERROR
11
11
  FATAL = logging.FATAL
12
12
  CRITICAL = logging.CRITICAL
13
13
 
14
+ root = logging.root
15
+
14
16
 
15
17
  def setup(level: int = logging.INFO, logger: logging.Logger = None) -> None:
16
18
  """Setup a colorful logging output.
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: legend_daq2lh5
3
- Version: 1.2.1
3
+ Version: 1.3.0
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -32,6 +32,7 @@ Requires-Dist: hdf5plugin
32
32
  Requires-Dist: legend-pydataobj>=1.6
33
33
  Requires-Dist: numpy>=1.21
34
34
  Requires-Dist: pyfcutils
35
+ Requires-Dist: pyyaml
35
36
  Requires-Dist: tqdm>=4.27
36
37
  Requires-Dist: xmltodict
37
38
  Provides-Extra: all
@@ -24,6 +24,10 @@ src/daq2lh5/fc/fc_config_decoder.py
24
24
  src/daq2lh5/fc/fc_event_decoder.py
25
25
  src/daq2lh5/fc/fc_status_decoder.py
26
26
  src/daq2lh5/fc/fc_streamer.py
27
+ src/daq2lh5/llama/llama_base.py
28
+ src/daq2lh5/llama/llama_event_decoder.py
29
+ src/daq2lh5/llama/llama_header_decoder.py
30
+ src/daq2lh5/llama/llama_streamer.py
27
31
  src/daq2lh5/orca/__init__.py
28
32
  src/daq2lh5/orca/orca_base.py
29
33
  src/daq2lh5/orca/orca_digitizers.py
@@ -62,6 +66,10 @@ tests/fc/test_fc_config_decoder.py
62
66
  tests/fc/test_fc_event_decoder.py
63
67
  tests/fc/test_fc_status_decoder.py
64
68
  tests/fc/test_fc_streamer.py
69
+ tests/llama/conftest.py
70
+ tests/llama/test_llama_event_decoder.py
71
+ tests/llama/test_llama_header_decoder.py
72
+ tests/llama/test_llama_streamer.py
65
73
  tests/orca/conftest.py
66
74
  tests/orca/test_or_run_decoder_for_run.py
67
75
  tests/orca/test_orca_fc.py
@@ -4,6 +4,7 @@ hdf5plugin
4
4
  legend-pydataobj>=1.6
5
5
  numpy>=1.21
6
6
  pyfcutils
7
+ pyyaml
7
8
  tqdm>=4.27
8
9
  xmltodict
9
10
 
@@ -30,7 +30,7 @@ def pytest_sessionfinish(session, exitstatus):
30
30
  @pytest.fixture(scope="session")
31
31
  def lgnd_test_data():
32
32
  ldata = LegendTestData()
33
- ldata.checkout("ecb370e")
33
+ ldata.checkout("88d8c01")
34
34
  return ldata
35
35
 
36
36
 
@@ -0,0 +1,7 @@
1
+ import pytest
2
+
3
+
4
+ # lgnd_test_data (LegendTestData) from fixture in root conftest.py
5
+ @pytest.fixture(scope="module")
6
+ def test_data_path(lgnd_test_data):
7
+ return lgnd_test_data.get_path("llamaDAQ/20241218-150158-pulser.bin")
@@ -0,0 +1,127 @@
1
+ import lgdo
2
+ import pytest
3
+
4
+ from daq2lh5.llama.llama_event_decoder import LLAMAEventDecoder, check_dict_spec_equal
5
+ from daq2lh5.llama.llama_streamer import LLAMAStreamer
6
+
7
+
8
+ def test_check_dict_spec_equal():
9
+ d1 = {"X": "1", "Y": "2", "Z": "3"}
10
+ d2 = {"X": "2", "Y": "2", "Z": "3"}
11
+ assert check_dict_spec_equal(d1, d2, ["Y", "Z"])
12
+ assert not check_dict_spec_equal(d1, d2, ["X", "Y"])
13
+
14
+
15
+ @pytest.fixture(scope="module")
16
+ def open_stream(test_data_path):
17
+ streamer = LLAMAStreamer()
18
+ streamer.open_stream(test_data_path)
19
+ yield streamer
20
+ streamer.close_stream()
21
+
22
+
23
+ def test_get_key_lists(open_stream):
24
+ evt_dec: LLAMAEventDecoder = open_stream.event_decoder
25
+ assert evt_dec.get_key_lists() == [[0], [4]]
26
+
27
+
28
+ def test_get_decoded_values(open_stream):
29
+ evt_dec: LLAMAEventDecoder = open_stream.event_decoder
30
+ dec_vals_0 = evt_dec.get_decoded_values(0)
31
+ assert dec_vals_0["waveform"]["wf_len"] == 2000
32
+ assert dec_vals_0["avgwaveform"]["wf_len"] == 10000
33
+ dec_vals_4 = evt_dec.get_decoded_values(4)
34
+ assert dec_vals_4["waveform"]["wf_len"] == 2000
35
+ assert dec_vals_4["avgwaveform"]["wf_len"] == 500
36
+
37
+
38
+ def test_first_packet(open_stream):
39
+ good_packet = open_stream.read_packet()
40
+ assert good_packet
41
+ evt_dec: LLAMAEventDecoder = open_stream.event_decoder
42
+ assert evt_dec is not None
43
+ evt_rbkd = open_stream.event_rbkd
44
+ tbl = evt_rbkd[0].lgdo
45
+ assert isinstance(tbl, lgdo.Table)
46
+ ii = evt_rbkd[0].loc
47
+ assert ii == 1
48
+ ii = ii - 1 # use the last written entry (which is the only one, actually)
49
+ assert tbl["fch_id"].nda[ii] == 0
50
+ assert tbl["packet_id"].nda[ii] == 1
51
+ assert tbl["timestamp"].nda[ii] == 757530
52
+ assert tbl["peakHighValue"].nda[ii] == 9454
53
+ assert tbl["peakHighIndex"].nda[ii] == 1968
54
+ assert tbl["information"].nda[ii] == 0
55
+ assert tbl["accSum1"].nda[ii] == 7826
56
+ assert tbl["accSum2"].nda[ii] == 7826
57
+ assert tbl["accSum3"].nda[ii] == 7826
58
+ assert tbl["accSum4"].nda[ii] == 7826
59
+ assert tbl["accSum5"].nda[ii] == 7826
60
+ assert tbl["accSum6"].nda[ii] == 7826
61
+ assert tbl["accSum7"].nda[ii] == 7826
62
+ assert tbl["accSum8"].nda[ii] == 7826
63
+ assert (
64
+ tbl["waveform"]["dt"].nda[ii] > 3.999 and tbl["waveform"]["dt"].nda[ii] < 4.001
65
+ )
66
+ assert (
67
+ tbl["avgwaveform"]["dt"].nda[ii] > 15.999
68
+ and tbl["avgwaveform"]["dt"].nda[ii] < 16.001
69
+ )
70
+ assert (
71
+ tbl["waveform"]["t0"].nda[ii] > -4000.1
72
+ and tbl["waveform"]["t0"].nda[ii] < -3999.9
73
+ )
74
+ assert (
75
+ tbl["avgwaveform"]["t0"].nda[ii] > -8000.1
76
+ and tbl["avgwaveform"]["t0"].nda[ii] < -7999.9
77
+ )
78
+
79
+
80
+ def test_first_packet_ch4(open_stream):
81
+ evt_rbkd = open_stream.event_rbkd
82
+ while True:
83
+ good_packet = open_stream.read_packet()
84
+ if not good_packet:
85
+ break
86
+ tbl = evt_rbkd[4].lgdo
87
+ assert evt_rbkd[4].loc > 0, "Not a single event of channel 4"
88
+ ii = 0
89
+ assert tbl["fch_id"].nda[ii] == 4
90
+ assert tbl["packet_id"].nda[ii] == 10
91
+ assert tbl["timestamp"].nda[ii] == 757530
92
+ assert tbl["peakHighValue"].nda[ii] == 7923
93
+ assert tbl["peakHighIndex"].nda[ii] == 371
94
+ assert tbl["information"].nda[ii] == 0
95
+ assert tbl["accSum1"].nda[ii] == 7912
96
+ assert tbl["accSum2"].nda[ii] == 7912
97
+ assert tbl["accSum3"].nda[ii] == 7912
98
+ assert tbl["accSum4"].nda[ii] == 7912
99
+ assert tbl["accSum5"].nda[ii] == 7912
100
+ assert tbl["accSum6"].nda[ii] == 7912
101
+ assert tbl["accSum7"].nda[ii] == 7912
102
+ assert tbl["accSum8"].nda[ii] == 7912
103
+ assert (
104
+ tbl["waveform"]["dt"].nda[ii] > 3.999 and tbl["waveform"]["dt"].nda[ii] < 4.001
105
+ )
106
+ assert (
107
+ tbl["avgwaveform"]["dt"].nda[ii] > 31.999
108
+ and tbl["avgwaveform"]["dt"].nda[ii] < 32.001
109
+ )
110
+ assert (
111
+ tbl["waveform"]["t0"].nda[ii] > -4000.1
112
+ and tbl["waveform"]["t0"].nda[ii] < -3999.9
113
+ )
114
+ assert (
115
+ tbl["avgwaveform"]["t0"].nda[ii] > -4000.1
116
+ and tbl["avgwaveform"]["t0"].nda[ii] < -3999.9
117
+ )
118
+
119
+
120
+ def test_event_count(open_stream):
121
+ evt_rbkd = open_stream.event_rbkd
122
+ while True:
123
+ good_packet = open_stream.read_packet()
124
+ if not good_packet:
125
+ break
126
+ assert evt_rbkd[0].loc == 37
127
+ assert evt_rbkd[4].loc == 37
@@ -0,0 +1,16 @@
1
+ from daq2lh5.llama.llama_header_decoder import LLAMAHeaderDecoder
2
+ from daq2lh5.llama.llama_streamer import LLAMAStreamer
3
+
4
+
5
+ def test_read_header(test_data_path):
6
+ streamer = LLAMAStreamer()
7
+ streamer.open_stream(test_data_path)
8
+ header = streamer.header_decoder
9
+ assert isinstance(header, LLAMAHeaderDecoder)
10
+ # following data is specific to the particular test file:
11
+ assert header.version_major == 2
12
+ assert header.version_minor == 0
13
+ assert header.version_patch == 0
14
+ assert header.length_econf == 88 # 22 words of 4 bytes
15
+ assert header.number_chOpen == 2
16
+ streamer.close_stream()
@@ -0,0 +1,36 @@
1
+ from daq2lh5.llama.llama_event_decoder import LLAMAEventDecoder
2
+ from daq2lh5.llama.llama_header_decoder import LLAMAHeaderDecoder
3
+ from daq2lh5.llama.llama_streamer import LLAMAStreamer
4
+ from daq2lh5.raw_buffer import RawBuffer
5
+
6
+
7
+ def test_get_decoder_list():
8
+ streamer = LLAMAStreamer()
9
+ assert len(streamer.get_decoder_list()) == 2
10
+ assert isinstance(streamer.get_decoder_list()[0], LLAMAHeaderDecoder)
11
+ assert isinstance(streamer.get_decoder_list()[1], LLAMAEventDecoder)
12
+
13
+
14
+ # test_data_path (str) from fixture in ./conftest.py
15
+ def test_open_stream(test_data_path):
16
+ streamer = LLAMAStreamer()
17
+ rbl: list[RawBuffer] = streamer.open_stream(test_data_path)
18
+ assert len(rbl) == 1
19
+ assert isinstance(rbl[0], RawBuffer)
20
+ assert streamer.rb_lib is not None
21
+ nbytes_hdr = streamer.n_bytes_read
22
+ assert nbytes_hdr > 0
23
+ assert streamer.read_packet() # there has to be at last a single good packet
24
+ assert streamer.packet_id == 1
25
+ assert streamer.n_bytes_read > nbytes_hdr
26
+ streamer.close_stream()
27
+
28
+
29
+ def test_open_stream_multiple(test_data_path):
30
+ streamer = LLAMAStreamer()
31
+ rbl: list[RawBuffer] = streamer.open_stream(test_data_path)
32
+ assert len(rbl) == 1
33
+ streamer.close_stream()
34
+ rbl: list[RawBuffer] = streamer.open_stream(test_data_path)
35
+ assert len(rbl) == 1
36
+ streamer.close_stream()
File without changes
File without changes
File without changes