legend-daq2lh5 1.2.2__py3-none-any.whl → 1.3.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
daq2lh5/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.2.2'
16
- __version_tuple__ = version_tuple = (1, 2, 2)
15
+ __version__ = version = '1.3.0'
16
+ __version_tuple__ = version_tuple = (1, 3, 0)
daq2lh5/build_raw.py CHANGED
@@ -12,6 +12,7 @@ from tqdm.auto import tqdm
12
12
 
13
13
  from .compass.compass_streamer import CompassStreamer
14
14
  from .fc.fc_streamer import FCStreamer
15
+ from .llama.llama_streamer import LLAMAStreamer
15
16
  from .orca.orca_streamer import OrcaStreamer
16
17
  from .raw_buffer import RawBufferLibrary, write_to_lh5_and_clear
17
18
 
@@ -180,7 +181,7 @@ def build_raw(
180
181
  elif in_stream_type == "Compass":
181
182
  streamer = CompassStreamer(compass_config_file)
182
183
  elif in_stream_type == "LlamaDaq":
183
- raise NotImplementedError("LlamaDaq streaming not yet implemented")
184
+ streamer = LLAMAStreamer()
184
185
  elif in_stream_type == "MGDO":
185
186
  raise NotImplementedError("MGDO streaming not yet implemented")
186
187
  else:
daq2lh5/data_streamer.py CHANGED
@@ -350,7 +350,7 @@ class DataStreamer(ABC):
350
350
  if len(key_list) == 1:
351
351
  this_name = f"{dec_key}_{key_list[0]}"
352
352
  else:
353
- this_name = f"{dec_key}_{ii}"
353
+ this_name = f"{dec_key}_{ii}" # this can cause a name clash e.g. for [[1],[2,3]] ...
354
354
  rb = RawBuffer(
355
355
  key_list=key_list, out_stream=out_stream, out_name=this_name
356
356
  )
@@ -0,0 +1,14 @@
1
+ """
2
+ General utilities for llamaDAQ data decoding
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+
9
+ log = logging.getLogger(__name__)
10
+
11
+
12
+ # build a unique flat identifier for fadc and channel together
13
+ def join_fadcid_chid(fadcid: int, chid: int) -> int:
14
+ return (fadcid << 4) + chid
@@ -0,0 +1,328 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import logging
5
+ from typing import Any
6
+
7
+ import lgdo
8
+ import numpy as np
9
+
10
+ from ..data_decoder import DataDecoder
11
+ from .llama_header_decoder import LLAMA_Channel_Configs_t
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+ # put decoded values here
16
+ llama_decoded_values_template = {
17
+ # packet index in file
18
+ "packet_id": {"dtype": "uint32"},
19
+ # combined index of FADC and channel
20
+ "fch_id": {"dtype": "uint32"},
21
+ # time since epoch
22
+ "timestamp": {"dtype": "uint64", "units": "clock_ticks"},
23
+ "status_flag": {"dtype": "uint32"},
24
+ # waveform data --> not always present
25
+ # "waveform": {
26
+ # "dtype": "uint16",
27
+ # "datatype": "waveform",
28
+ # "wf_len": 65532, # max value. override this before initializing buffers to save RAM
29
+ # "dt": 8, # override if a different clock rate is used
30
+ # "dt_units": "ns",
31
+ # "t0_units": "ns",
32
+ # }
33
+ }
34
+ # """Default llamaDAQ SIS3316 Event decoded values.
35
+ #
36
+ # Warning
37
+ # -------
38
+ # This configuration will be dynamically modified by the decoder at runtime.
39
+ # """
40
+
41
+
42
+ def check_dict_spec_equal(
43
+ d1: dict[str, Any], d2: dict[str, Any], specs: list[str]
44
+ ) -> bool:
45
+ for spec in specs:
46
+ if d1.get(spec) != d2.get(spec):
47
+ return False
48
+ return True
49
+
50
+
51
+ class LLAMAEventDecoder(DataDecoder):
52
+ """Decode llamaDAQ SIS3316 digitizer event data."""
53
+
54
+ def __init__(self, *args, **kwargs) -> None:
55
+ # these are read for every event (decode_event)
56
+ # One set of settings per fch, since settings can be different per channel group
57
+ self.decoded_values: dict[int, dict[str, Any]] = {}
58
+ super().__init__(*args, **kwargs)
59
+ self.skipped_channels = {}
60
+ self.channel_configs = None
61
+ self.dt_raw: dict[int, float] = (
62
+ {}
63
+ ) # need to buffer that to update t0 for avg waveforms per event
64
+ self.t0_raw: dict[int, float] = (
65
+ {}
66
+ ) # store when receiving channel configs and use for each waveform
67
+ self.t0_avg_const: dict[int, float] = (
68
+ {}
69
+ ) # constant part of the t0 of averaged waveforms
70
+
71
+ def set_channel_configs(self, channel_configs: LLAMA_Channel_Configs_t) -> None:
72
+ """Receive channel configurations from llama_streamer after header was parsed
73
+ Adapt self.decoded_values dict based on read configuration
74
+ """
75
+ self.channel_configs = channel_configs
76
+ for fch, config in self.channel_configs.items():
77
+ self.decoded_values[fch] = copy.deepcopy(llama_decoded_values_template)
78
+ format_bits = config["format_bits"]
79
+ sample_clock_freq = config["sample_freq"]
80
+ avg_mode = config["avg_mode"]
81
+ dt_raw: float = 1 / sample_clock_freq * 1000
82
+ dt_avg: float = dt_raw * (1 << (avg_mode + 1))
83
+ # t0 generation functions from llamaDAQ -> EventConfig.hh
84
+ t0_raw: float = (
85
+ float(config["sample_start_index"]) - float(config["sample_pretrigger"])
86
+ ) * dt_raw # location of the trigger is at t = 0
87
+ t0_avg: float = (
88
+ -float(config["sample_pretrigger"]) * float(dt_raw)
89
+ - float(config["avg_sample_pretrigger"]) * dt_avg
90
+ ) # additional offset to be added independently for every event
91
+ self.dt_raw[fch] = dt_raw
92
+ self.t0_raw[fch] = t0_raw
93
+ self.t0_avg_const[fch] = t0_avg
94
+ if config["sample_length"] > 0:
95
+ self.__add_waveform(
96
+ self.decoded_values[fch], False, config["sample_length"], dt_raw
97
+ )
98
+ if config["avg_sample_length"] > 0 and avg_mode > 0:
99
+ self.__add_waveform(
100
+ self.decoded_values[fch], True, config["avg_sample_length"], dt_avg
101
+ )
102
+ if format_bits & 0x01:
103
+ self.__add_accum1till6(self.decoded_values[fch])
104
+ if format_bits & 0x02:
105
+ self.__add_accum7and8(self.decoded_values[fch])
106
+ if format_bits & 0x04:
107
+ self.__add_maw(self.decoded_values[fch])
108
+ if format_bits & 0x08:
109
+ self.__add_energy(self.decoded_values[fch])
110
+
111
+ def get_key_lists(self) -> list[list[int | str]]:
112
+ """
113
+ Return a list of lists of keys available for this decoder.
114
+ Each inner list are the fch_id's which share the exact same settings (trace lengths, avg mode, ...),
115
+ so they can end up in the same buffer.
116
+ """
117
+ if self.channel_configs is None:
118
+ raise RuntimeError(
119
+ "Identification of key lists requires channel configs to be set!"
120
+ )
121
+
122
+ params_for_equality = ["sample_length", "avg_sample_length", "avg_mode"]
123
+
124
+ def check_equal(c1, c2):
125
+ return check_dict_spec_equal(c1, c2, params_for_equality)
126
+
127
+ kll: list[list[int]] = [] # key-list-list
128
+ for fch_id, config in self.channel_configs.items():
129
+ for kl in kll:
130
+ # use 1st entry of a list of list as "archetype"
131
+ if check_equal(config, self.channel_configs[kl[0]]):
132
+ kl.append(fch_id)
133
+ break
134
+ else:
135
+ kll.append([fch_id])
136
+ log.debug(f"key lists are: {repr(kll)}")
137
+ return kll
138
+
139
+ # copied from ORCA SIS3316
140
+ def get_decoded_values(self, key: int = None) -> dict[str, Any]:
141
+ if key is None:
142
+ raise RuntimeError("Key is None!")
143
+ dec_vals_list = self.decoded_values.values()
144
+ if len(dec_vals_list) == 0:
145
+ raise RuntimeError("decoded_values not built yet!")
146
+
147
+ return dec_vals_list # Get first thing we find
148
+ else:
149
+ dec_vals_list = self.decoded_values[key]
150
+ return dec_vals_list
151
+
152
+ def decode_packet(
153
+ self,
154
+ packet: bytes,
155
+ evt_rbkd: lgdo.Table | dict[int, lgdo.Table],
156
+ packet_id: int,
157
+ fch_id: int,
158
+ # header: lgdo.Table | dict[int, lgdo.Table]
159
+ ) -> bool:
160
+ """
161
+ Decodes a single packet, which is a single SIS3316 event, as specified in the Struck manual.
162
+ A single packet corresponds to a single event and channel, and has a unique timestamp.
163
+ packets of different channel groups can vary in size!
164
+ """
165
+
166
+ # Check if this fch_id should be recorded.
167
+ if fch_id not in evt_rbkd:
168
+ if fch_id not in self.skipped_channels:
169
+ self.skipped_channels[fch_id] = 0
170
+ log.info(f"Skipping channel: {fch_id}")
171
+ log.debug(f"evt_rbkd: {evt_rbkd.keys()}")
172
+ self.skipped_channels[fch_id] += 1
173
+ return False
174
+
175
+ tbl = evt_rbkd[fch_id].lgdo
176
+ ii = evt_rbkd[fch_id].loc
177
+
178
+ # parse the raw event data into numpy arrays of 16 and 32 bit ints
179
+ evt_data_32 = np.frombuffer(packet, dtype=np.uint32)
180
+ evt_data_16 = np.frombuffer(packet, dtype=np.uint16)
181
+
182
+ # e sti gran binaries non ce li metti
183
+ # fch_id = (evt_data_32[0] >> 4) & 0x00000fff --> to be read earlier, since we need size for chopping out the event from the stream
184
+ timestamp = ((evt_data_32[0] & 0xFFFF0000) << 16) + evt_data_32[1]
185
+ format_bits = (evt_data_32[0]) & 0x0000000F
186
+ tbl["fch_id"].nda[ii] = fch_id
187
+ tbl["packet_id"].nda[ii] = packet_id
188
+ tbl["timestamp"].nda[ii] = timestamp
189
+ offset = 2
190
+ if format_bits & 0x1:
191
+ tbl["peakHighValue"].nda[ii] = evt_data_16[4]
192
+ tbl["peakHighIndex"].nda[ii] = evt_data_16[5]
193
+ tbl["information"].nda[ii] = (evt_data_32[offset + 1] >> 24) & 0xFF
194
+ tbl["accSum1"].nda[ii] = evt_data_32[offset + 2]
195
+ tbl["accSum2"].nda[ii] = evt_data_32[offset + 3]
196
+ tbl["accSum3"].nda[ii] = evt_data_32[offset + 4]
197
+ tbl["accSum4"].nda[ii] = evt_data_32[offset + 5]
198
+ tbl["accSum5"].nda[ii] = evt_data_32[offset + 6]
199
+ tbl["accSum6"].nda[ii] = evt_data_32[offset + 7]
200
+ offset += 7
201
+ if format_bits & 0x2:
202
+ tbl["accSum7"].nda[ii] = evt_data_32[offset + 0]
203
+ tbl["accSum8"].nda[ii] = evt_data_32[offset + 1]
204
+ offset += 2
205
+ if format_bits & 0x4:
206
+ tbl["mawMax"].nda[ii] = evt_data_32[offset + 0]
207
+ tbl["mawBefore"].nda[ii] = evt_data_32[offset + 1]
208
+ tbl["mawAfter"].nda[ii] = evt_data_32[offset + 2]
209
+ offset += 3
210
+ if format_bits & 0x8:
211
+ tbl["startEnergy"].nda[ii] = evt_data_32[offset + 0]
212
+ tbl["maxEnergy"].nda[ii] = evt_data_32[offset + 1]
213
+ offset += 2
214
+
215
+ raw_length_32 = (evt_data_32[offset + 0]) & 0x03FFFFFF
216
+ tbl["status_flag"].nda[ii] = (
217
+ (evt_data_32[offset + 0]) & 0x04000000
218
+ ) >> 26 # bit 26
219
+ maw_test_flag = ((evt_data_32[offset + 0]) & 0x08000000) >> 27 # bit 27
220
+ avg_data_coming = False
221
+ if evt_data_32[offset + 0] & 0xF0000000 == 0xE0000000:
222
+ avg_data_coming = False
223
+ elif evt_data_32[offset + 0] & 0xF0000000 == 0xA0000000:
224
+ avg_data_coming = True
225
+ else:
226
+ raise RuntimeError("Data corruption 1!")
227
+ offset += 1
228
+ avg_length_32 = 0
229
+ if avg_data_coming:
230
+ avg_count_status = (
231
+ evt_data_32[offset + 0] & 0x00FF0000
232
+ ) >> 16 # bits 23 - 16
233
+ avg_length_32 = evt_data_32[offset + 0] & 0x0000FFFF
234
+ if evt_data_32[offset + 0] & 0xF0000000 != 0xE0000000:
235
+ raise RuntimeError("Data corruption 2!")
236
+ offset += 1
237
+
238
+ # --- now the offset points to the raw wf data ---
239
+
240
+ if maw_test_flag:
241
+ raise RuntimeError("Cannot handle data with MAW test data!")
242
+
243
+ # compute expected and actual array dimensions
244
+ raw_length_16 = 2 * raw_length_32
245
+ avg_length_16 = 2 * avg_length_32
246
+ header_length_16 = offset * 2
247
+ expected_wf_length = len(evt_data_16) - header_length_16
248
+
249
+ # error check: waveform size must match expectations
250
+ if raw_length_16 + avg_length_16 != expected_wf_length:
251
+ raise RuntimeError(
252
+ f"Waveform sizes {raw_length_16} (raw) and {avg_length_16} (avg) doesn't match expected size {expected_wf_length}."
253
+ )
254
+
255
+ # store waveform if available:
256
+ if raw_length_16 > 0:
257
+ tbl["waveform"]["values"].nda[ii] = evt_data_16[
258
+ offset * 2 : offset * 2 + raw_length_16
259
+ ]
260
+ offset += raw_length_32
261
+ tbl["waveform"]["t0"].nda[ii] = self.t0_raw[fch_id]
262
+
263
+ # store pre-averaged (avg) waveform if available:
264
+ if avg_length_16 > 0:
265
+ tbl["avgwaveform"]["values"].nda[ii] = evt_data_16[
266
+ offset * 2 : offset * 2 + avg_length_16
267
+ ]
268
+ offset += avg_length_32
269
+ # need to update avg waveform t0 based on the offset I get per event
270
+ tbl["avgwaveform"]["t0"].nda[ii] = (
271
+ self.t0_avg_const[fch_id]
272
+ + float(avg_count_status) * self.dt_raw[fch_id]
273
+ )
274
+
275
+ if offset != len(evt_data_32):
276
+ raise RuntimeError("I messed up...")
277
+
278
+ evt_rbkd[fch_id].loc += 1
279
+
280
+ return evt_rbkd[fch_id].is_full()
281
+
282
+ def __add_waveform(
283
+ self,
284
+ decoded_values_fch: dict[str, Any],
285
+ is_avg: bool,
286
+ max_samples: int,
287
+ dt: float,
288
+ ) -> None:
289
+ """
290
+ Averaged samples are available from the 125 MHz (16 bit) variatnt of the SIS3316 and can be stored independently of raw samples.
291
+ I use waveform for raw samples (dt from clock itself) and avgwaveform from averaged samples (dt from clock * avg number).
292
+
293
+ GERDA used to have the low-frequency (waveform) & the high-frequency (aux waveform); here: LF = avgwaveform & HF = waveform.
294
+ """
295
+ name: str = "avgwaveform" if is_avg else "waveform"
296
+ decoded_values_fch[name] = {
297
+ "dtype": "uint16",
298
+ "datatype": "waveform",
299
+ "wf_len": max_samples, # max value. override this before initializing buffers to save RAM
300
+ "dt": dt, # the sample pitch (inverse of clock speed)
301
+ # "t0": t0, # Adding t0 here does not work
302
+ "dt_units": "ns",
303
+ "t0_units": "ns",
304
+ }
305
+
306
+ def __add_accum1till6(self, decoded_values_fch: dict[str, Any]) -> None:
307
+ decoded_values_fch["peakHighValue"] = {"dtype": "uint32", "units": "adc"}
308
+ decoded_values_fch["peakHighIndex"] = {"dtype": "uint32", "units": "adc"}
309
+ decoded_values_fch["information"] = {"dtype": "uint32"}
310
+ decoded_values_fch["accSum1"] = {"dtype": "uint32", "units": "adc"}
311
+ decoded_values_fch["accSum2"] = {"dtype": "uint32", "units": "adc"}
312
+ decoded_values_fch["accSum3"] = {"dtype": "uint32", "units": "adc"}
313
+ decoded_values_fch["accSum4"] = {"dtype": "uint32", "units": "adc"}
314
+ decoded_values_fch["accSum5"] = {"dtype": "uint32", "units": "adc"}
315
+ decoded_values_fch["accSum6"] = {"dtype": "uint32", "units": "adc"}
316
+
317
+ def __add_accum7and8(self, decoded_values_fch: dict[str, Any]) -> None:
318
+ decoded_values_fch["accSum7"] = {"dtype": "uint32", "units": "adc"}
319
+ decoded_values_fch["accSum8"] = {"dtype": "uint32", "units": "adc"}
320
+
321
+ def __add_maw(self, decoded_values_fch: dict[str, Any]) -> None:
322
+ decoded_values_fch["mawMax"] = {"dtype": "uint32", "units": "adc"}
323
+ decoded_values_fch["mawBefore"] = {"dtype": "uint32", "units": "adc"}
324
+ decoded_values_fch["mawAfter"] = {"dtype": "uint32", "units": "adc"}
325
+
326
+ def __add_energy(self, decoded_values_fch: dict[str, Any]) -> None:
327
+ decoded_values_fch["startEnergy"] = {"dtype": "uint32", "units": "adc"}
328
+ decoded_values_fch["maxEnergy"] = {"dtype": "uint32", "units": "adc"}
@@ -0,0 +1,149 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import logging
5
+ from typing import Any, Dict
6
+
7
+ import lgdo
8
+ import numpy as np
9
+
10
+ from ..data_decoder import DataDecoder
11
+ from .llama_base import join_fadcid_chid
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+ LLAMA_Channel_Configs_t = Dict[int, Dict[str, Any]]
16
+
17
+
18
+ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
19
+ """
20
+ Decode llamaDAQ header data. Includes the file header as well as all available ("open") channel configurations.
21
+ """
22
+
23
+ @staticmethod
24
+ def magic_bytes() -> int:
25
+ return 0x4972414C
26
+
27
+ def __init__(self, *args, **kwargs) -> None:
28
+ super().__init__(*args, **kwargs)
29
+ self.config = lgdo.Struct()
30
+ self.channel_configs = None
31
+
32
+ def decode_header(self, f_in: io.BufferedReader) -> lgdo.Struct:
33
+ n_bytes_read = 0
34
+
35
+ f_in.seek(0) # should be there anyhow, but re-set if not
36
+ header = f_in.read(16) # read 16 bytes
37
+ n_bytes_read += 16
38
+ evt_data_32 = np.frombuffer(header, dtype=np.uint32)
39
+ evt_data_16 = np.frombuffer(header, dtype=np.uint16)
40
+
41
+ # line0: magic bytes
42
+ magic = evt_data_32[0]
43
+ # print(hex(magic))
44
+ if magic == self.magic_bytes():
45
+ log.info("Read in file as llamaDAQ-SIS3316, magic bytes correct.")
46
+ else:
47
+ log.error("Magic bytes not matching for llamaDAQ file!")
48
+ raise RuntimeError("wrong file type")
49
+
50
+ self.version_major = evt_data_16[4]
51
+ self.version_minor = evt_data_16[3]
52
+ self.version_patch = evt_data_16[2]
53
+ self.length_econf = evt_data_16[5]
54
+ self.number_chOpen = evt_data_32[3]
55
+
56
+ log.debug(
57
+ f"File version: {self.version_major}.{self.version_minor}.{self.version_patch}"
58
+ )
59
+ log.debug(
60
+ f"{self.number_chOpen} channels open, each config {self.length_econf} bytes long"
61
+ )
62
+
63
+ n_bytes_read += self.__decode_channel_configs(f_in)
64
+
65
+ # print(self.channel_configs[0]["MAW3_offset"])
66
+
67
+ # assemble LGDO struct:
68
+ self.config.add_field("version_major", lgdo.Scalar(self.version_major))
69
+ self.config.add_field("version_minor", lgdo.Scalar(self.version_minor))
70
+ self.config.add_field("version_patch", lgdo.Scalar(self.version_patch))
71
+ self.config.add_field("length_econf", lgdo.Scalar(self.length_econf))
72
+ self.config.add_field("number_chOpen", lgdo.Scalar(self.number_chOpen))
73
+
74
+ for fch_id, fch_content in self.channel_configs.items():
75
+ fch_lgdo = lgdo.Struct()
76
+ for key, value in fch_content.items():
77
+ fch_lgdo.add_field(key, lgdo.Scalar(value))
78
+ self.config.add_field(f"fch_{fch_id:02d}", fch_lgdo)
79
+
80
+ return self.config, n_bytes_read
81
+
82
+ # override from DataDecoder
83
+ def make_lgdo(self, key: int = None, size: int = None) -> lgdo.Struct:
84
+ return self.config
85
+
86
+ def get_channel_configs(self) -> LLAMA_Channel_Configs_t:
87
+ return self.channel_configs
88
+
89
+ def __decode_channel_configs(self, f_in: io.BufferedReader) -> int:
90
+ """
91
+ Reads the metadata from the beginning of the file (the "channel configuration" part, directly after the file header).
92
+ Creates a dictionary of the metadata for each FADC/channel combination, which is returned
93
+
94
+ FADC-ID and channel-ID are combined into a single id for flattening:
95
+ (fadcid << 4) + chid
96
+
97
+ returns number of bytes read
98
+ """
99
+ # f_in.seek(16) #should be after file header anyhow, but re-set if not
100
+ n_bytes_read = 0
101
+ self.channel_configs = {}
102
+
103
+ if self.length_econf != 88:
104
+ raise RuntimeError("Invalid channel configuration format")
105
+
106
+ for _i in range(0, self.number_chOpen):
107
+ # print("reading in channel config {}".format(i))
108
+
109
+ channel = f_in.read(self.length_econf)
110
+ n_bytes_read += self.length_econf
111
+ ch_dpf = channel[16:32]
112
+ evt_data_32 = np.frombuffer(channel, dtype=np.uint32)
113
+ evt_data_dpf = np.frombuffer(ch_dpf, dtype=np.float64)
114
+
115
+ fadc_index = evt_data_32[0]
116
+ channel_index = evt_data_32[1]
117
+ fch_id = join_fadcid_chid(fadc_index, channel_index)
118
+
119
+ if fch_id in self.channel_configs:
120
+ raise RuntimeError(
121
+ f"duplicate channel configuration in file: FADCID: {fadc_index}, ChannelID: {channel_index}"
122
+ )
123
+ else:
124
+ self.channel_configs[fch_id] = {}
125
+
126
+ self.channel_configs[fch_id]["14BitFlag"] = evt_data_32[2] & 0x00000001
127
+ if evt_data_32[2] & 0x00000002 == 0:
128
+ log.warning("Channel in configuration marked as non-open!")
129
+ self.channel_configs[fch_id]["ADC_offset"] = evt_data_32[3]
130
+ self.channel_configs[fch_id]["sample_freq"] = evt_data_dpf[
131
+ 0
132
+ ] # 64 bit float
133
+ self.channel_configs[fch_id]["gain"] = evt_data_dpf[1]
134
+ self.channel_configs[fch_id]["format_bits"] = evt_data_32[8]
135
+ self.channel_configs[fch_id]["sample_start_index"] = evt_data_32[9]
136
+ self.channel_configs[fch_id]["sample_pretrigger"] = evt_data_32[10]
137
+ self.channel_configs[fch_id]["avg_sample_pretrigger"] = evt_data_32[11]
138
+ self.channel_configs[fch_id]["avg_mode"] = evt_data_32[12]
139
+ self.channel_configs[fch_id]["sample_length"] = evt_data_32[13]
140
+ self.channel_configs[fch_id]["avg_sample_length"] = evt_data_32[14]
141
+ self.channel_configs[fch_id]["MAW_buffer_length"] = evt_data_32[15]
142
+ self.channel_configs[fch_id]["event_length"] = evt_data_32[16]
143
+ self.channel_configs[fch_id]["event_header_length"] = evt_data_32[17]
144
+ self.channel_configs[fch_id]["accum6_offset"] = evt_data_32[18]
145
+ self.channel_configs[fch_id]["accum2_offset"] = evt_data_32[19]
146
+ self.channel_configs[fch_id]["MAW3_offset"] = evt_data_32[20]
147
+ self.channel_configs[fch_id]["energy_offset"] = evt_data_32[21]
148
+
149
+ return n_bytes_read
@@ -0,0 +1,156 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+
5
+ import numpy as np
6
+
7
+ from ..data_decoder import DataDecoder
8
+ from ..data_streamer import DataStreamer
9
+ from ..raw_buffer import RawBuffer, RawBufferLibrary
10
+ from .llama_event_decoder import LLAMAEventDecoder
11
+ from .llama_header_decoder import LLAMAHeaderDecoder
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ class LLAMAStreamer(DataStreamer):
17
+ """
18
+ Decode SIS3316 data acquired using llamaDAQ.
19
+ """
20
+
21
+ def __init__(self) -> None:
22
+ super().__init__()
23
+ self.in_stream = None
24
+ self.event_rbkd = None
25
+ self.header_decoder = LLAMAHeaderDecoder()
26
+ self.event_decoder = LLAMAEventDecoder()
27
+
28
+ def get_decoder_list(self) -> list[DataDecoder]:
29
+ dec_list = []
30
+ dec_list.append(self.header_decoder)
31
+ dec_list.append(self.event_decoder)
32
+ return dec_list
33
+
34
+ def open_stream(
35
+ self,
36
+ llama_filename: str,
37
+ rb_lib: RawBufferLibrary = None,
38
+ buffer_size: int = 8192,
39
+ chunk_mode: str = "any_full",
40
+ out_stream: str = "",
41
+ ) -> list[RawBuffer]:
42
+ """Initialize the LLAMA data stream.
43
+
44
+ Refer to the documentation for
45
+ :meth:`.data_streamer.DataStreamer.open_stream` for a description
46
+ of the parameters.
47
+ """
48
+
49
+ if self.in_stream is not None:
50
+ raise RuntimeError("tried to open stream while previous one still open")
51
+ self.in_stream = open(llama_filename.encode("utf-8"), "rb")
52
+ self.n_bytes_read = 0
53
+ self.packet_id = 0
54
+
55
+ # read header info here
56
+ header, n_bytes_hdr = self.header_decoder.decode_header(self.in_stream)
57
+ self.n_bytes_read += n_bytes_hdr
58
+
59
+ self.event_decoder.set_channel_configs(
60
+ self.header_decoder.get_channel_configs()
61
+ )
62
+
63
+ # as far as I can tell, this happens if a user does not specify output.
64
+ # Then I can still get a rb_lib, but that misses keys entirely, which I need since channels can have different setups.
65
+ # So I try to hack my own here in case there is none provided.
66
+ # if rb_lib is None:
67
+ # rb_lib = self.__hack_rb_lib(self.header_decoder.get_channel_configs())
68
+
69
+ # initialize the buffers in rb_lib. Store them for fast lookup
70
+ # Docu tells me to use initialize instead, but that does not exits (?)
71
+ super().open_stream(
72
+ llama_filename,
73
+ rb_lib,
74
+ buffer_size=buffer_size,
75
+ chunk_mode=chunk_mode,
76
+ out_stream=out_stream,
77
+ )
78
+ if rb_lib is None:
79
+ rb_lib = self.rb_lib
80
+
81
+ self.event_rbkd = (
82
+ rb_lib["LLAMAEventDecoder"].get_keyed_dict()
83
+ if "LLAMAEventDecoder" in rb_lib
84
+ else None
85
+ )
86
+
87
+ if "LLAMAHeaderDecoder" in rb_lib:
88
+ config_rb_list = rb_lib["LLAMAHeaderDecoder"]
89
+ if len(config_rb_list) != 1:
90
+ log.warning(
91
+ f"config_rb_list had length {len(config_rb_list)}, ignoring all but the first"
92
+ )
93
+ rb = config_rb_list[0]
94
+ else:
95
+ rb = RawBuffer(lgdo=header)
96
+ rb.loc = 1 # we have filled this buffer
97
+ return [rb]
98
+
99
+ def close_stream(self) -> None:
100
+ if self.in_stream is None:
101
+ raise RuntimeError("tried to close an unopened stream")
102
+ self.in_stream.close()
103
+ self.in_stream = None
104
+
105
+ def read_packet(self) -> bool:
106
+ """Reads a single packet's worth of data in to the :class:`.RawBufferLibrary`.
107
+
108
+ Returns
109
+ -------
110
+ still_has_data
111
+ returns `True` while there is still data to read.
112
+ """
113
+
114
+ packet, fch_id = self.__read_bytes()
115
+ if packet is None:
116
+ return False # EOF
117
+ self.packet_id += 1
118
+ self.n_bytes_read += len(packet)
119
+
120
+ self.any_full |= self.event_decoder.decode_packet(
121
+ packet, self.event_rbkd, self.packet_id, fch_id
122
+ )
123
+
124
+ return True
125
+
126
+ def __read_bytes(self) -> tuple[bytes | None, int]:
127
+ """
128
+ return bytes if read successful or None if EOF.
129
+ int is the fch_id (needs to be fetched to obtain the size of the event)
130
+ """
131
+ if self.in_stream is None:
132
+ raise RuntimeError("No stream open!")
133
+
134
+ position = self.in_stream.tell() # save position of the event header's 1st byte
135
+ data1 = self.in_stream.read(
136
+ 4
137
+ ) # read the first (32 bit) word of the event's header: channelID & format bits
138
+ if len(data1) < 4:
139
+ return None, -1 # EOF, I guess
140
+ self.in_stream.seek(position) # go back to 1st position of event header
141
+
142
+ header_data_32 = np.frombuffer(data1, dtype=np.uint32)
143
+ fch_id = (header_data_32[0] >> 4) & 0x00000FFF
144
+
145
+ event_length_32 = self.header_decoder.get_channel_configs()[fch_id][
146
+ "event_length"
147
+ ]
148
+ event_length_8 = event_length_32 * 4
149
+
150
+ packet = self.in_stream.read(event_length_8)
151
+ if len(packet) < event_length_8:
152
+ raise RuntimeError(
153
+ f"Tried to read {event_length_8} bytes but got {len(packet)}"
154
+ )
155
+
156
+ return packet, fch_id
daq2lh5/logging.py CHANGED
@@ -11,6 +11,8 @@ ERROR = logging.ERROR
11
11
  FATAL = logging.FATAL
12
12
  CRITICAL = logging.CRITICAL
13
13
 
14
+ root = logging.root
15
+
14
16
 
15
17
  def setup(level: int = logging.INFO, logger: logging.Logger = None) -> None:
16
18
  """Setup a colorful logging output.
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: legend_daq2lh5
3
- Version: 1.2.2
3
+ Version: 1.3.0
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -26,27 +26,28 @@ Classifier: Topic :: Software Development
26
26
  Requires-Python: >=3.9
27
27
  Description-Content-Type: text/markdown
28
28
  License-File: LICENSE
29
- Requires-Dist: dspeed >=1.3.0a4
30
- Requires-Dist: h5py >=3.2.0
29
+ Requires-Dist: dspeed>=1.3.0a4
30
+ Requires-Dist: h5py>=3.2.0
31
31
  Requires-Dist: hdf5plugin
32
- Requires-Dist: legend-pydataobj >=1.6
33
- Requires-Dist: numpy >=1.21
32
+ Requires-Dist: legend-pydataobj>=1.6
33
+ Requires-Dist: numpy>=1.21
34
34
  Requires-Dist: pyfcutils
35
- Requires-Dist: tqdm >=4.27
35
+ Requires-Dist: pyyaml
36
+ Requires-Dist: tqdm>=4.27
36
37
  Requires-Dist: xmltodict
37
38
  Provides-Extra: all
38
- Requires-Dist: legend-daq2lh5[docs,test] ; extra == 'all'
39
+ Requires-Dist: legend-daq2lh5[docs,test]; extra == "all"
39
40
  Provides-Extra: docs
40
- Requires-Dist: furo ; extra == 'docs'
41
- Requires-Dist: myst-parser ; extra == 'docs'
42
- Requires-Dist: sphinx ; extra == 'docs'
43
- Requires-Dist: sphinx-copybutton ; extra == 'docs'
44
- Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
41
+ Requires-Dist: furo; extra == "docs"
42
+ Requires-Dist: myst-parser; extra == "docs"
43
+ Requires-Dist: sphinx; extra == "docs"
44
+ Requires-Dist: sphinx-copybutton; extra == "docs"
45
+ Requires-Dist: sphinx-inline-tabs; extra == "docs"
45
46
  Provides-Extra: test
46
- Requires-Dist: pre-commit ; extra == 'test'
47
- Requires-Dist: pylegendtestdata ; extra == 'test'
48
- Requires-Dist: pytest >=6.0 ; extra == 'test'
49
- Requires-Dist: pytest-cov ; extra == 'test'
47
+ Requires-Dist: pre-commit; extra == "test"
48
+ Requires-Dist: pylegendtestdata; extra == "test"
49
+ Requires-Dist: pytest>=6.0; extra == "test"
50
+ Requires-Dist: pytest-cov; extra == "test"
50
51
 
51
52
  # legend-daq2lh5
52
53
 
@@ -1,10 +1,10 @@
1
1
  daq2lh5/__init__.py,sha256=VPmwKuZSA0icpce05ojhnsKWhR4_QUgD0oVXUoN9wks,975
2
- daq2lh5/_version.py,sha256=XEVwqOPlIChKtEnSO5v_SvghWXnn9WeQSoJ436w3v9Y,411
3
- daq2lh5/build_raw.py,sha256=JFXC5ln9u353TUZMksY3zydLiV2HlxqdI6_Y2_ZMCIE,10524
2
+ daq2lh5/_version.py,sha256=HGwtpza1HCPtlyqElUvIyH97K44TO13CYiYVZNezQ1M,411
3
+ daq2lh5/build_raw.py,sha256=w0sS8MEN-UhAHSjlPbqDlMBfzjeznIIrKxNxuRxPFqA,10531
4
4
  daq2lh5/cli.py,sha256=7bPfH1XbyAS48wZn_0unj4Y5MD5kF7V34Q5srn4jKVM,2913
5
5
  daq2lh5/data_decoder.py,sha256=Cn40fodfKs7pKa2odzG1j806iw9IyQVfbbWObNGmof8,10677
6
- daq2lh5/data_streamer.py,sha256=uv_pCFR45Yoi8GK7p_MYqGk_lzWspeg8cWZensCDqAQ,14131
7
- daq2lh5/logging.py,sha256=_1gq0S1PyzfQsSTiNm--ERJy-4FRfborTz0-vkMelyE,966
6
+ daq2lh5/data_streamer.py,sha256=WuyqneVg8Kf7V072aUWYT5q3vmPZoobjAZ7oSw0sC9k,14187
7
+ daq2lh5/logging.py,sha256=rYNeToaZBTCaIiC42a4CUroAo1PCOreTXbpEZyMO8Fo,987
8
8
  daq2lh5/raw_buffer.py,sha256=2jJvH7BbVkJCzF5A2nOIdcgwUs68eLnpvc0FvTjckaI,17688
9
9
  daq2lh5/buffer_processor/__init__.py,sha256=7k6v_KPximtv7805QnX4-xp_S3vqvqwDfdV3q95oZJo,84
10
10
  daq2lh5/buffer_processor/buffer_processor.py,sha256=GUxpNDbqGLuUEZmXjeratipbzmki12RFNYZkxgMtesg,14483
@@ -19,6 +19,10 @@ daq2lh5/fc/fc_config_decoder.py,sha256=RLRfUOZN0vYbAprqTymP7TGg641IiP9rgCGIOwWVK
19
19
  daq2lh5/fc/fc_event_decoder.py,sha256=JIRsySnxeuY3wmxjJOrTXo6wpelVup8WIvxU-fkPL-A,8131
20
20
  daq2lh5/fc/fc_status_decoder.py,sha256=o_3vTAgYXelZxIsreCYioVYid2mY-wqloYKlxoCqX5Q,3390
21
21
  daq2lh5/fc/fc_streamer.py,sha256=S0imXdVsiyolPvxI1uiBngpC58DporSNZPqx1HeVi5o,5737
22
+ daq2lh5/llama/llama_base.py,sha256=B-NCBjE_FQE1WxijWi4Z1XBy4rqLHm2XhkC40s7Sdms,290
23
+ daq2lh5/llama/llama_event_decoder.py,sha256=NGzUA1DLChcfnEUNqMAPWq4uqXwUd-FH2e-xXcj2lzM,13894
24
+ daq2lh5/llama/llama_header_decoder.py,sha256=NB7wVH2r99hveQ2KJ-9YMkJMZ6ccNfFsjYa5HbuThAU,6114
25
+ daq2lh5/llama/llama_streamer.py,sha256=Bmcj5Bs28KSV4y08TeJcntzUAkqz6HqlSNm7Kffgloc,5203
22
26
  daq2lh5/orca/__init__.py,sha256=Xf6uOIOzk_QkKH_7VizGlCo3iuiAgLtUE3A07x_HXC0,175
23
27
  daq2lh5/orca/orca_base.py,sha256=-XIolXsHj-1EdewaGxyvJTZvRGZsDyZe-5PzVOd-LFY,1333
24
28
  daq2lh5/orca/orca_digitizers.py,sha256=BsAA3OgQ13YIirDM8pd_xDY3F5FqEY4YjSHviflmov8,20867
@@ -28,9 +32,9 @@ daq2lh5/orca/orca_header_decoder.py,sha256=ORIIyfx22ybyKc-uyWy5ER49-dl3BGpHdfV8O
28
32
  daq2lh5/orca/orca_packet.py,sha256=nOHuBXsTI1SzTjHZtff0txSQYvkwo4XGx3fpk7XfYj8,2489
29
33
  daq2lh5/orca/orca_run_decoder.py,sha256=3atKXC6mDi8_PK6ICUBBJ-LyaTM8OU31kKWIpmttRr4,2065
30
34
  daq2lh5/orca/orca_streamer.py,sha256=VbD9PF-rx_Rk-rEy7XECPmgxr6kZSUf0tC7Qbol3Qeg,15693
31
- legend_daq2lh5-1.2.2.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
32
- legend_daq2lh5-1.2.2.dist-info/METADATA,sha256=3fISxtb-PvR-D2YG03ZuQYi95ORmLDCPk0w2f0e-wDc,3950
33
- legend_daq2lh5-1.2.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
34
- legend_daq2lh5-1.2.2.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
35
- legend_daq2lh5-1.2.2.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
36
- legend_daq2lh5-1.2.2.dist-info/RECORD,,
35
+ legend_daq2lh5-1.3.0.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
+ legend_daq2lh5-1.3.0.dist-info/METADATA,sha256=3fR5Z-61fjaRaAEObVk9k8HpTGxiDF0wL4f6djIX8aQ,3956
37
+ legend_daq2lh5-1.3.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
38
+ legend_daq2lh5-1.3.0.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
39
+ legend_daq2lh5-1.3.0.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
40
+ legend_daq2lh5-1.3.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5