legend-daq2lh5 1.2.2__py3-none-any.whl → 1.4.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- daq2lh5/_version.py +2 -2
- daq2lh5/buffer_processor/lh5_buffer_processor.py +7 -5
- daq2lh5/build_raw.py +19 -14
- daq2lh5/data_streamer.py +1 -1
- daq2lh5/llama/llama_base.py +14 -0
- daq2lh5/llama/llama_event_decoder.py +328 -0
- daq2lh5/llama/llama_header_decoder.py +149 -0
- daq2lh5/llama/llama_streamer.py +156 -0
- daq2lh5/logging.py +2 -0
- daq2lh5/raw_buffer.py +78 -82
- daq2lh5/utils.py +35 -0
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/METADATA +18 -17
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/RECORD +17 -12
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/WHEEL +1 -1
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/LICENSE +0 -0
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/entry_points.txt +0 -0
- {legend_daq2lh5-1.2.2.dist-info → legend_daq2lh5-1.4.0.dist-info}/top_level.txt +0 -0
daq2lh5/_version.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
-
import json
|
4
3
|
import logging
|
5
4
|
import os
|
6
5
|
|
@@ -8,6 +7,7 @@ import h5py
|
|
8
7
|
import lgdo
|
9
8
|
from lgdo import lh5
|
10
9
|
|
10
|
+
from .. import utils
|
11
11
|
from ..buffer_processor.buffer_processor import buffer_processor
|
12
12
|
from ..raw_buffer import RawBuffer, RawBufferLibrary
|
13
13
|
|
@@ -104,11 +104,13 @@ def lh5_buffer_processor(
|
|
104
104
|
raw_store.gimme_file(proc_file_name, "a")
|
105
105
|
|
106
106
|
# Do key expansion on the out_spec
|
107
|
-
|
108
|
-
|
109
|
-
|
107
|
+
allowed_exts = [ext for exts in utils.__file_extensions__.values() for ext in exts]
|
108
|
+
if isinstance(out_spec, str) and any(
|
109
|
+
[out_spec.endswith(ext) for ext in allowed_exts]
|
110
|
+
):
|
111
|
+
out_spec = utils.load_dict(out_spec)
|
110
112
|
if isinstance(out_spec, dict):
|
111
|
-
RawBufferLibrary(
|
113
|
+
RawBufferLibrary(config=out_spec)
|
112
114
|
|
113
115
|
# Write everything in the raw file to the new file, check for proc_spec under either the group name, out_name, or the name
|
114
116
|
for tb in lh5_tables:
|
daq2lh5/build_raw.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import glob
|
4
|
-
import json
|
5
4
|
import logging
|
6
5
|
import os
|
7
6
|
import time
|
@@ -10,8 +9,10 @@ import numpy as np
|
|
10
9
|
from lgdo import lh5
|
11
10
|
from tqdm.auto import tqdm
|
12
11
|
|
12
|
+
from . import utils
|
13
13
|
from .compass.compass_streamer import CompassStreamer
|
14
14
|
from .fc.fc_streamer import FCStreamer
|
15
|
+
from .llama.llama_streamer import LLAMAStreamer
|
15
16
|
from .orca.orca_streamer import OrcaStreamer
|
16
17
|
from .raw_buffer import RawBufferLibrary, write_to_lh5_and_clear
|
17
18
|
|
@@ -48,12 +49,14 @@ def build_raw(
|
|
48
49
|
Specification for the output stream.
|
49
50
|
|
50
51
|
- if None, uses ``{in_stream}.lh5`` as the output filename.
|
51
|
-
- if a str not ending
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
52
|
+
- if a str not ending with a config file extension, interpreted as the
|
53
|
+
output filename.
|
54
|
+
- if a str ending with a config file extension, interpreted as a
|
55
|
+
filename containing shorthand for the output specification (see
|
56
|
+
:mod:`.raw_buffer`).
|
57
|
+
- if a dict, should be a dict loaded from the shorthand notation for
|
58
|
+
RawBufferLibraries (see :mod:`.raw_buffer`), which is then used to
|
59
|
+
build a :class:`.RawBufferLibrary`.
|
57
60
|
- if a :class:`.RawBufferLibrary`, the mapping of data to output file /
|
58
61
|
group is taken from that.
|
59
62
|
|
@@ -71,8 +74,8 @@ def build_raw(
|
|
71
74
|
|
72
75
|
- if None, CompassDecoder will sacrifice the first packet to determine
|
73
76
|
waveform length
|
74
|
-
- if a str ending
|
75
|
-
|
77
|
+
- if a str ending with a config file extension, interpreted as a
|
78
|
+
filename containing shorthand for the output specification (see
|
76
79
|
:mod:`.compass.compass_event_decoder`).
|
77
80
|
|
78
81
|
hdf5_settings
|
@@ -119,11 +122,13 @@ def build_raw(
|
|
119
122
|
|
120
123
|
# process out_spec and setup rb_lib if specified
|
121
124
|
rb_lib = None
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
+
allowed_exts = [ext for exts in utils.__file_extensions__.values() for ext in exts]
|
126
|
+
if isinstance(out_spec, str) and any(
|
127
|
+
[out_spec.endswith(ext) for ext in allowed_exts]
|
128
|
+
):
|
129
|
+
out_spec = utils.load_dict(out_spec)
|
125
130
|
if isinstance(out_spec, dict):
|
126
|
-
out_spec = RawBufferLibrary(
|
131
|
+
out_spec = RawBufferLibrary(config=out_spec, kw_dict=kwargs)
|
127
132
|
if isinstance(out_spec, RawBufferLibrary):
|
128
133
|
rb_lib = out_spec
|
129
134
|
# if no rb_lib, write all data to file
|
@@ -180,7 +185,7 @@ def build_raw(
|
|
180
185
|
elif in_stream_type == "Compass":
|
181
186
|
streamer = CompassStreamer(compass_config_file)
|
182
187
|
elif in_stream_type == "LlamaDaq":
|
183
|
-
|
188
|
+
streamer = LLAMAStreamer()
|
184
189
|
elif in_stream_type == "MGDO":
|
185
190
|
raise NotImplementedError("MGDO streaming not yet implemented")
|
186
191
|
else:
|
daq2lh5/data_streamer.py
CHANGED
@@ -350,7 +350,7 @@ class DataStreamer(ABC):
|
|
350
350
|
if len(key_list) == 1:
|
351
351
|
this_name = f"{dec_key}_{key_list[0]}"
|
352
352
|
else:
|
353
|
-
this_name = f"{dec_key}_{ii}"
|
353
|
+
this_name = f"{dec_key}_{ii}" # this can cause a name clash e.g. for [[1],[2,3]] ...
|
354
354
|
rb = RawBuffer(
|
355
355
|
key_list=key_list, out_stream=out_stream, out_name=this_name
|
356
356
|
)
|
@@ -0,0 +1,14 @@
|
|
1
|
+
"""
|
2
|
+
General utilities for llamaDAQ data decoding
|
3
|
+
"""
|
4
|
+
|
5
|
+
from __future__ import annotations
|
6
|
+
|
7
|
+
import logging
|
8
|
+
|
9
|
+
log = logging.getLogger(__name__)
|
10
|
+
|
11
|
+
|
12
|
+
# build a unique flat identifier for fadc and channel together
|
13
|
+
def join_fadcid_chid(fadcid: int, chid: int) -> int:
|
14
|
+
return (fadcid << 4) + chid
|
@@ -0,0 +1,328 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import copy
|
4
|
+
import logging
|
5
|
+
from typing import Any
|
6
|
+
|
7
|
+
import lgdo
|
8
|
+
import numpy as np
|
9
|
+
|
10
|
+
from ..data_decoder import DataDecoder
|
11
|
+
from .llama_header_decoder import LLAMA_Channel_Configs_t
|
12
|
+
|
13
|
+
log = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
# put decoded values here
|
16
|
+
llama_decoded_values_template = {
|
17
|
+
# packet index in file
|
18
|
+
"packet_id": {"dtype": "uint32"},
|
19
|
+
# combined index of FADC and channel
|
20
|
+
"fch_id": {"dtype": "uint32"},
|
21
|
+
# time since epoch
|
22
|
+
"timestamp": {"dtype": "uint64", "units": "clock_ticks"},
|
23
|
+
"status_flag": {"dtype": "uint32"},
|
24
|
+
# waveform data --> not always present
|
25
|
+
# "waveform": {
|
26
|
+
# "dtype": "uint16",
|
27
|
+
# "datatype": "waveform",
|
28
|
+
# "wf_len": 65532, # max value. override this before initializing buffers to save RAM
|
29
|
+
# "dt": 8, # override if a different clock rate is used
|
30
|
+
# "dt_units": "ns",
|
31
|
+
# "t0_units": "ns",
|
32
|
+
# }
|
33
|
+
}
|
34
|
+
# """Default llamaDAQ SIS3316 Event decoded values.
|
35
|
+
#
|
36
|
+
# Warning
|
37
|
+
# -------
|
38
|
+
# This configuration will be dynamically modified by the decoder at runtime.
|
39
|
+
# """
|
40
|
+
|
41
|
+
|
42
|
+
def check_dict_spec_equal(
|
43
|
+
d1: dict[str, Any], d2: dict[str, Any], specs: list[str]
|
44
|
+
) -> bool:
|
45
|
+
for spec in specs:
|
46
|
+
if d1.get(spec) != d2.get(spec):
|
47
|
+
return False
|
48
|
+
return True
|
49
|
+
|
50
|
+
|
51
|
+
class LLAMAEventDecoder(DataDecoder):
|
52
|
+
"""Decode llamaDAQ SIS3316 digitizer event data."""
|
53
|
+
|
54
|
+
def __init__(self, *args, **kwargs) -> None:
|
55
|
+
# these are read for every event (decode_event)
|
56
|
+
# One set of settings per fch, since settings can be different per channel group
|
57
|
+
self.decoded_values: dict[int, dict[str, Any]] = {}
|
58
|
+
super().__init__(*args, **kwargs)
|
59
|
+
self.skipped_channels = {}
|
60
|
+
self.channel_configs = None
|
61
|
+
self.dt_raw: dict[int, float] = (
|
62
|
+
{}
|
63
|
+
) # need to buffer that to update t0 for avg waveforms per event
|
64
|
+
self.t0_raw: dict[int, float] = (
|
65
|
+
{}
|
66
|
+
) # store when receiving channel configs and use for each waveform
|
67
|
+
self.t0_avg_const: dict[int, float] = (
|
68
|
+
{}
|
69
|
+
) # constant part of the t0 of averaged waveforms
|
70
|
+
|
71
|
+
def set_channel_configs(self, channel_configs: LLAMA_Channel_Configs_t) -> None:
|
72
|
+
"""Receive channel configurations from llama_streamer after header was parsed
|
73
|
+
Adapt self.decoded_values dict based on read configuration
|
74
|
+
"""
|
75
|
+
self.channel_configs = channel_configs
|
76
|
+
for fch, config in self.channel_configs.items():
|
77
|
+
self.decoded_values[fch] = copy.deepcopy(llama_decoded_values_template)
|
78
|
+
format_bits = config["format_bits"]
|
79
|
+
sample_clock_freq = config["sample_freq"]
|
80
|
+
avg_mode = config["avg_mode"]
|
81
|
+
dt_raw: float = 1 / sample_clock_freq * 1000
|
82
|
+
dt_avg: float = dt_raw * (1 << (avg_mode + 1))
|
83
|
+
# t0 generation functions from llamaDAQ -> EventConfig.hh
|
84
|
+
t0_raw: float = (
|
85
|
+
float(config["sample_start_index"]) - float(config["sample_pretrigger"])
|
86
|
+
) * dt_raw # location of the trigger is at t = 0
|
87
|
+
t0_avg: float = (
|
88
|
+
-float(config["sample_pretrigger"]) * float(dt_raw)
|
89
|
+
- float(config["avg_sample_pretrigger"]) * dt_avg
|
90
|
+
) # additional offset to be added independently for every event
|
91
|
+
self.dt_raw[fch] = dt_raw
|
92
|
+
self.t0_raw[fch] = t0_raw
|
93
|
+
self.t0_avg_const[fch] = t0_avg
|
94
|
+
if config["sample_length"] > 0:
|
95
|
+
self.__add_waveform(
|
96
|
+
self.decoded_values[fch], False, config["sample_length"], dt_raw
|
97
|
+
)
|
98
|
+
if config["avg_sample_length"] > 0 and avg_mode > 0:
|
99
|
+
self.__add_waveform(
|
100
|
+
self.decoded_values[fch], True, config["avg_sample_length"], dt_avg
|
101
|
+
)
|
102
|
+
if format_bits & 0x01:
|
103
|
+
self.__add_accum1till6(self.decoded_values[fch])
|
104
|
+
if format_bits & 0x02:
|
105
|
+
self.__add_accum7and8(self.decoded_values[fch])
|
106
|
+
if format_bits & 0x04:
|
107
|
+
self.__add_maw(self.decoded_values[fch])
|
108
|
+
if format_bits & 0x08:
|
109
|
+
self.__add_energy(self.decoded_values[fch])
|
110
|
+
|
111
|
+
def get_key_lists(self) -> list[list[int | str]]:
|
112
|
+
"""
|
113
|
+
Return a list of lists of keys available for this decoder.
|
114
|
+
Each inner list are the fch_id's which share the exact same settings (trace lengths, avg mode, ...),
|
115
|
+
so they can end up in the same buffer.
|
116
|
+
"""
|
117
|
+
if self.channel_configs is None:
|
118
|
+
raise RuntimeError(
|
119
|
+
"Identification of key lists requires channel configs to be set!"
|
120
|
+
)
|
121
|
+
|
122
|
+
params_for_equality = ["sample_length", "avg_sample_length", "avg_mode"]
|
123
|
+
|
124
|
+
def check_equal(c1, c2):
|
125
|
+
return check_dict_spec_equal(c1, c2, params_for_equality)
|
126
|
+
|
127
|
+
kll: list[list[int]] = [] # key-list-list
|
128
|
+
for fch_id, config in self.channel_configs.items():
|
129
|
+
for kl in kll:
|
130
|
+
# use 1st entry of a list of list as "archetype"
|
131
|
+
if check_equal(config, self.channel_configs[kl[0]]):
|
132
|
+
kl.append(fch_id)
|
133
|
+
break
|
134
|
+
else:
|
135
|
+
kll.append([fch_id])
|
136
|
+
log.debug(f"key lists are: {repr(kll)}")
|
137
|
+
return kll
|
138
|
+
|
139
|
+
# copied from ORCA SIS3316
|
140
|
+
def get_decoded_values(self, key: int = None) -> dict[str, Any]:
|
141
|
+
if key is None:
|
142
|
+
raise RuntimeError("Key is None!")
|
143
|
+
dec_vals_list = self.decoded_values.values()
|
144
|
+
if len(dec_vals_list) == 0:
|
145
|
+
raise RuntimeError("decoded_values not built yet!")
|
146
|
+
|
147
|
+
return dec_vals_list # Get first thing we find
|
148
|
+
else:
|
149
|
+
dec_vals_list = self.decoded_values[key]
|
150
|
+
return dec_vals_list
|
151
|
+
|
152
|
+
def decode_packet(
|
153
|
+
self,
|
154
|
+
packet: bytes,
|
155
|
+
evt_rbkd: lgdo.Table | dict[int, lgdo.Table],
|
156
|
+
packet_id: int,
|
157
|
+
fch_id: int,
|
158
|
+
# header: lgdo.Table | dict[int, lgdo.Table]
|
159
|
+
) -> bool:
|
160
|
+
"""
|
161
|
+
Decodes a single packet, which is a single SIS3316 event, as specified in the Struck manual.
|
162
|
+
A single packet corresponds to a single event and channel, and has a unique timestamp.
|
163
|
+
packets of different channel groups can vary in size!
|
164
|
+
"""
|
165
|
+
|
166
|
+
# Check if this fch_id should be recorded.
|
167
|
+
if fch_id not in evt_rbkd:
|
168
|
+
if fch_id not in self.skipped_channels:
|
169
|
+
self.skipped_channels[fch_id] = 0
|
170
|
+
log.info(f"Skipping channel: {fch_id}")
|
171
|
+
log.debug(f"evt_rbkd: {evt_rbkd.keys()}")
|
172
|
+
self.skipped_channels[fch_id] += 1
|
173
|
+
return False
|
174
|
+
|
175
|
+
tbl = evt_rbkd[fch_id].lgdo
|
176
|
+
ii = evt_rbkd[fch_id].loc
|
177
|
+
|
178
|
+
# parse the raw event data into numpy arrays of 16 and 32 bit ints
|
179
|
+
evt_data_32 = np.frombuffer(packet, dtype=np.uint32)
|
180
|
+
evt_data_16 = np.frombuffer(packet, dtype=np.uint16)
|
181
|
+
|
182
|
+
# e sti gran binaries non ce li metti
|
183
|
+
# fch_id = (evt_data_32[0] >> 4) & 0x00000fff --> to be read earlier, since we need size for chopping out the event from the stream
|
184
|
+
timestamp = ((evt_data_32[0] & 0xFFFF0000) << 16) + evt_data_32[1]
|
185
|
+
format_bits = (evt_data_32[0]) & 0x0000000F
|
186
|
+
tbl["fch_id"].nda[ii] = fch_id
|
187
|
+
tbl["packet_id"].nda[ii] = packet_id
|
188
|
+
tbl["timestamp"].nda[ii] = timestamp
|
189
|
+
offset = 2
|
190
|
+
if format_bits & 0x1:
|
191
|
+
tbl["peakHighValue"].nda[ii] = evt_data_16[4]
|
192
|
+
tbl["peakHighIndex"].nda[ii] = evt_data_16[5]
|
193
|
+
tbl["information"].nda[ii] = (evt_data_32[offset + 1] >> 24) & 0xFF
|
194
|
+
tbl["accSum1"].nda[ii] = evt_data_32[offset + 2]
|
195
|
+
tbl["accSum2"].nda[ii] = evt_data_32[offset + 3]
|
196
|
+
tbl["accSum3"].nda[ii] = evt_data_32[offset + 4]
|
197
|
+
tbl["accSum4"].nda[ii] = evt_data_32[offset + 5]
|
198
|
+
tbl["accSum5"].nda[ii] = evt_data_32[offset + 6]
|
199
|
+
tbl["accSum6"].nda[ii] = evt_data_32[offset + 7]
|
200
|
+
offset += 7
|
201
|
+
if format_bits & 0x2:
|
202
|
+
tbl["accSum7"].nda[ii] = evt_data_32[offset + 0]
|
203
|
+
tbl["accSum8"].nda[ii] = evt_data_32[offset + 1]
|
204
|
+
offset += 2
|
205
|
+
if format_bits & 0x4:
|
206
|
+
tbl["mawMax"].nda[ii] = evt_data_32[offset + 0]
|
207
|
+
tbl["mawBefore"].nda[ii] = evt_data_32[offset + 1]
|
208
|
+
tbl["mawAfter"].nda[ii] = evt_data_32[offset + 2]
|
209
|
+
offset += 3
|
210
|
+
if format_bits & 0x8:
|
211
|
+
tbl["startEnergy"].nda[ii] = evt_data_32[offset + 0]
|
212
|
+
tbl["maxEnergy"].nda[ii] = evt_data_32[offset + 1]
|
213
|
+
offset += 2
|
214
|
+
|
215
|
+
raw_length_32 = (evt_data_32[offset + 0]) & 0x03FFFFFF
|
216
|
+
tbl["status_flag"].nda[ii] = (
|
217
|
+
(evt_data_32[offset + 0]) & 0x04000000
|
218
|
+
) >> 26 # bit 26
|
219
|
+
maw_test_flag = ((evt_data_32[offset + 0]) & 0x08000000) >> 27 # bit 27
|
220
|
+
avg_data_coming = False
|
221
|
+
if evt_data_32[offset + 0] & 0xF0000000 == 0xE0000000:
|
222
|
+
avg_data_coming = False
|
223
|
+
elif evt_data_32[offset + 0] & 0xF0000000 == 0xA0000000:
|
224
|
+
avg_data_coming = True
|
225
|
+
else:
|
226
|
+
raise RuntimeError("Data corruption 1!")
|
227
|
+
offset += 1
|
228
|
+
avg_length_32 = 0
|
229
|
+
if avg_data_coming:
|
230
|
+
avg_count_status = (
|
231
|
+
evt_data_32[offset + 0] & 0x00FF0000
|
232
|
+
) >> 16 # bits 23 - 16
|
233
|
+
avg_length_32 = evt_data_32[offset + 0] & 0x0000FFFF
|
234
|
+
if evt_data_32[offset + 0] & 0xF0000000 != 0xE0000000:
|
235
|
+
raise RuntimeError("Data corruption 2!")
|
236
|
+
offset += 1
|
237
|
+
|
238
|
+
# --- now the offset points to the raw wf data ---
|
239
|
+
|
240
|
+
if maw_test_flag:
|
241
|
+
raise RuntimeError("Cannot handle data with MAW test data!")
|
242
|
+
|
243
|
+
# compute expected and actual array dimensions
|
244
|
+
raw_length_16 = 2 * raw_length_32
|
245
|
+
avg_length_16 = 2 * avg_length_32
|
246
|
+
header_length_16 = offset * 2
|
247
|
+
expected_wf_length = len(evt_data_16) - header_length_16
|
248
|
+
|
249
|
+
# error check: waveform size must match expectations
|
250
|
+
if raw_length_16 + avg_length_16 != expected_wf_length:
|
251
|
+
raise RuntimeError(
|
252
|
+
f"Waveform sizes {raw_length_16} (raw) and {avg_length_16} (avg) doesn't match expected size {expected_wf_length}."
|
253
|
+
)
|
254
|
+
|
255
|
+
# store waveform if available:
|
256
|
+
if raw_length_16 > 0:
|
257
|
+
tbl["waveform"]["values"].nda[ii] = evt_data_16[
|
258
|
+
offset * 2 : offset * 2 + raw_length_16
|
259
|
+
]
|
260
|
+
offset += raw_length_32
|
261
|
+
tbl["waveform"]["t0"].nda[ii] = self.t0_raw[fch_id]
|
262
|
+
|
263
|
+
# store pre-averaged (avg) waveform if available:
|
264
|
+
if avg_length_16 > 0:
|
265
|
+
tbl["avgwaveform"]["values"].nda[ii] = evt_data_16[
|
266
|
+
offset * 2 : offset * 2 + avg_length_16
|
267
|
+
]
|
268
|
+
offset += avg_length_32
|
269
|
+
# need to update avg waveform t0 based on the offset I get per event
|
270
|
+
tbl["avgwaveform"]["t0"].nda[ii] = (
|
271
|
+
self.t0_avg_const[fch_id]
|
272
|
+
+ float(avg_count_status) * self.dt_raw[fch_id]
|
273
|
+
)
|
274
|
+
|
275
|
+
if offset != len(evt_data_32):
|
276
|
+
raise RuntimeError("I messed up...")
|
277
|
+
|
278
|
+
evt_rbkd[fch_id].loc += 1
|
279
|
+
|
280
|
+
return evt_rbkd[fch_id].is_full()
|
281
|
+
|
282
|
+
def __add_waveform(
|
283
|
+
self,
|
284
|
+
decoded_values_fch: dict[str, Any],
|
285
|
+
is_avg: bool,
|
286
|
+
max_samples: int,
|
287
|
+
dt: float,
|
288
|
+
) -> None:
|
289
|
+
"""
|
290
|
+
Averaged samples are available from the 125 MHz (16 bit) variatnt of the SIS3316 and can be stored independently of raw samples.
|
291
|
+
I use waveform for raw samples (dt from clock itself) and avgwaveform from averaged samples (dt from clock * avg number).
|
292
|
+
|
293
|
+
GERDA used to have the low-frequency (waveform) & the high-frequency (aux waveform); here: LF = avgwaveform & HF = waveform.
|
294
|
+
"""
|
295
|
+
name: str = "avgwaveform" if is_avg else "waveform"
|
296
|
+
decoded_values_fch[name] = {
|
297
|
+
"dtype": "uint16",
|
298
|
+
"datatype": "waveform",
|
299
|
+
"wf_len": max_samples, # max value. override this before initializing buffers to save RAM
|
300
|
+
"dt": dt, # the sample pitch (inverse of clock speed)
|
301
|
+
# "t0": t0, # Adding t0 here does not work
|
302
|
+
"dt_units": "ns",
|
303
|
+
"t0_units": "ns",
|
304
|
+
}
|
305
|
+
|
306
|
+
def __add_accum1till6(self, decoded_values_fch: dict[str, Any]) -> None:
|
307
|
+
decoded_values_fch["peakHighValue"] = {"dtype": "uint32", "units": "adc"}
|
308
|
+
decoded_values_fch["peakHighIndex"] = {"dtype": "uint32", "units": "adc"}
|
309
|
+
decoded_values_fch["information"] = {"dtype": "uint32"}
|
310
|
+
decoded_values_fch["accSum1"] = {"dtype": "uint32", "units": "adc"}
|
311
|
+
decoded_values_fch["accSum2"] = {"dtype": "uint32", "units": "adc"}
|
312
|
+
decoded_values_fch["accSum3"] = {"dtype": "uint32", "units": "adc"}
|
313
|
+
decoded_values_fch["accSum4"] = {"dtype": "uint32", "units": "adc"}
|
314
|
+
decoded_values_fch["accSum5"] = {"dtype": "uint32", "units": "adc"}
|
315
|
+
decoded_values_fch["accSum6"] = {"dtype": "uint32", "units": "adc"}
|
316
|
+
|
317
|
+
def __add_accum7and8(self, decoded_values_fch: dict[str, Any]) -> None:
|
318
|
+
decoded_values_fch["accSum7"] = {"dtype": "uint32", "units": "adc"}
|
319
|
+
decoded_values_fch["accSum8"] = {"dtype": "uint32", "units": "adc"}
|
320
|
+
|
321
|
+
def __add_maw(self, decoded_values_fch: dict[str, Any]) -> None:
|
322
|
+
decoded_values_fch["mawMax"] = {"dtype": "uint32", "units": "adc"}
|
323
|
+
decoded_values_fch["mawBefore"] = {"dtype": "uint32", "units": "adc"}
|
324
|
+
decoded_values_fch["mawAfter"] = {"dtype": "uint32", "units": "adc"}
|
325
|
+
|
326
|
+
def __add_energy(self, decoded_values_fch: dict[str, Any]) -> None:
|
327
|
+
decoded_values_fch["startEnergy"] = {"dtype": "uint32", "units": "adc"}
|
328
|
+
decoded_values_fch["maxEnergy"] = {"dtype": "uint32", "units": "adc"}
|
@@ -0,0 +1,149 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import io
|
4
|
+
import logging
|
5
|
+
from typing import Any, Dict
|
6
|
+
|
7
|
+
import lgdo
|
8
|
+
import numpy as np
|
9
|
+
|
10
|
+
from ..data_decoder import DataDecoder
|
11
|
+
from .llama_base import join_fadcid_chid
|
12
|
+
|
13
|
+
log = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
LLAMA_Channel_Configs_t = Dict[int, Dict[str, Any]]
|
16
|
+
|
17
|
+
|
18
|
+
class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
|
19
|
+
"""
|
20
|
+
Decode llamaDAQ header data. Includes the file header as well as all available ("open") channel configurations.
|
21
|
+
"""
|
22
|
+
|
23
|
+
@staticmethod
|
24
|
+
def magic_bytes() -> int:
|
25
|
+
return 0x4972414C
|
26
|
+
|
27
|
+
def __init__(self, *args, **kwargs) -> None:
|
28
|
+
super().__init__(*args, **kwargs)
|
29
|
+
self.config = lgdo.Struct()
|
30
|
+
self.channel_configs = None
|
31
|
+
|
32
|
+
def decode_header(self, f_in: io.BufferedReader) -> lgdo.Struct:
|
33
|
+
n_bytes_read = 0
|
34
|
+
|
35
|
+
f_in.seek(0) # should be there anyhow, but re-set if not
|
36
|
+
header = f_in.read(16) # read 16 bytes
|
37
|
+
n_bytes_read += 16
|
38
|
+
evt_data_32 = np.frombuffer(header, dtype=np.uint32)
|
39
|
+
evt_data_16 = np.frombuffer(header, dtype=np.uint16)
|
40
|
+
|
41
|
+
# line0: magic bytes
|
42
|
+
magic = evt_data_32[0]
|
43
|
+
# print(hex(magic))
|
44
|
+
if magic == self.magic_bytes():
|
45
|
+
log.info("Read in file as llamaDAQ-SIS3316, magic bytes correct.")
|
46
|
+
else:
|
47
|
+
log.error("Magic bytes not matching for llamaDAQ file!")
|
48
|
+
raise RuntimeError("wrong file type")
|
49
|
+
|
50
|
+
self.version_major = evt_data_16[4]
|
51
|
+
self.version_minor = evt_data_16[3]
|
52
|
+
self.version_patch = evt_data_16[2]
|
53
|
+
self.length_econf = evt_data_16[5]
|
54
|
+
self.number_chOpen = evt_data_32[3]
|
55
|
+
|
56
|
+
log.debug(
|
57
|
+
f"File version: {self.version_major}.{self.version_minor}.{self.version_patch}"
|
58
|
+
)
|
59
|
+
log.debug(
|
60
|
+
f"{self.number_chOpen} channels open, each config {self.length_econf} bytes long"
|
61
|
+
)
|
62
|
+
|
63
|
+
n_bytes_read += self.__decode_channel_configs(f_in)
|
64
|
+
|
65
|
+
# print(self.channel_configs[0]["MAW3_offset"])
|
66
|
+
|
67
|
+
# assemble LGDO struct:
|
68
|
+
self.config.add_field("version_major", lgdo.Scalar(self.version_major))
|
69
|
+
self.config.add_field("version_minor", lgdo.Scalar(self.version_minor))
|
70
|
+
self.config.add_field("version_patch", lgdo.Scalar(self.version_patch))
|
71
|
+
self.config.add_field("length_econf", lgdo.Scalar(self.length_econf))
|
72
|
+
self.config.add_field("number_chOpen", lgdo.Scalar(self.number_chOpen))
|
73
|
+
|
74
|
+
for fch_id, fch_content in self.channel_configs.items():
|
75
|
+
fch_lgdo = lgdo.Struct()
|
76
|
+
for key, value in fch_content.items():
|
77
|
+
fch_lgdo.add_field(key, lgdo.Scalar(value))
|
78
|
+
self.config.add_field(f"fch_{fch_id:02d}", fch_lgdo)
|
79
|
+
|
80
|
+
return self.config, n_bytes_read
|
81
|
+
|
82
|
+
# override from DataDecoder
|
83
|
+
def make_lgdo(self, key: int = None, size: int = None) -> lgdo.Struct:
|
84
|
+
return self.config
|
85
|
+
|
86
|
+
def get_channel_configs(self) -> LLAMA_Channel_Configs_t:
|
87
|
+
return self.channel_configs
|
88
|
+
|
89
|
+
def __decode_channel_configs(self, f_in: io.BufferedReader) -> int:
|
90
|
+
"""
|
91
|
+
Reads the metadata from the beginning of the file (the "channel configuration" part, directly after the file header).
|
92
|
+
Creates a dictionary of the metadata for each FADC/channel combination, which is returned
|
93
|
+
|
94
|
+
FADC-ID and channel-ID are combined into a single id for flattening:
|
95
|
+
(fadcid << 4) + chid
|
96
|
+
|
97
|
+
returns number of bytes read
|
98
|
+
"""
|
99
|
+
# f_in.seek(16) #should be after file header anyhow, but re-set if not
|
100
|
+
n_bytes_read = 0
|
101
|
+
self.channel_configs = {}
|
102
|
+
|
103
|
+
if self.length_econf != 88:
|
104
|
+
raise RuntimeError("Invalid channel configuration format")
|
105
|
+
|
106
|
+
for _i in range(0, self.number_chOpen):
|
107
|
+
# print("reading in channel config {}".format(i))
|
108
|
+
|
109
|
+
channel = f_in.read(self.length_econf)
|
110
|
+
n_bytes_read += self.length_econf
|
111
|
+
ch_dpf = channel[16:32]
|
112
|
+
evt_data_32 = np.frombuffer(channel, dtype=np.uint32)
|
113
|
+
evt_data_dpf = np.frombuffer(ch_dpf, dtype=np.float64)
|
114
|
+
|
115
|
+
fadc_index = evt_data_32[0]
|
116
|
+
channel_index = evt_data_32[1]
|
117
|
+
fch_id = join_fadcid_chid(fadc_index, channel_index)
|
118
|
+
|
119
|
+
if fch_id in self.channel_configs:
|
120
|
+
raise RuntimeError(
|
121
|
+
f"duplicate channel configuration in file: FADCID: {fadc_index}, ChannelID: {channel_index}"
|
122
|
+
)
|
123
|
+
else:
|
124
|
+
self.channel_configs[fch_id] = {}
|
125
|
+
|
126
|
+
self.channel_configs[fch_id]["14BitFlag"] = evt_data_32[2] & 0x00000001
|
127
|
+
if evt_data_32[2] & 0x00000002 == 0:
|
128
|
+
log.warning("Channel in configuration marked as non-open!")
|
129
|
+
self.channel_configs[fch_id]["ADC_offset"] = evt_data_32[3]
|
130
|
+
self.channel_configs[fch_id]["sample_freq"] = evt_data_dpf[
|
131
|
+
0
|
132
|
+
] # 64 bit float
|
133
|
+
self.channel_configs[fch_id]["gain"] = evt_data_dpf[1]
|
134
|
+
self.channel_configs[fch_id]["format_bits"] = evt_data_32[8]
|
135
|
+
self.channel_configs[fch_id]["sample_start_index"] = evt_data_32[9]
|
136
|
+
self.channel_configs[fch_id]["sample_pretrigger"] = evt_data_32[10]
|
137
|
+
self.channel_configs[fch_id]["avg_sample_pretrigger"] = evt_data_32[11]
|
138
|
+
self.channel_configs[fch_id]["avg_mode"] = evt_data_32[12]
|
139
|
+
self.channel_configs[fch_id]["sample_length"] = evt_data_32[13]
|
140
|
+
self.channel_configs[fch_id]["avg_sample_length"] = evt_data_32[14]
|
141
|
+
self.channel_configs[fch_id]["MAW_buffer_length"] = evt_data_32[15]
|
142
|
+
self.channel_configs[fch_id]["event_length"] = evt_data_32[16]
|
143
|
+
self.channel_configs[fch_id]["event_header_length"] = evt_data_32[17]
|
144
|
+
self.channel_configs[fch_id]["accum6_offset"] = evt_data_32[18]
|
145
|
+
self.channel_configs[fch_id]["accum2_offset"] = evt_data_32[19]
|
146
|
+
self.channel_configs[fch_id]["MAW3_offset"] = evt_data_32[20]
|
147
|
+
self.channel_configs[fch_id]["energy_offset"] = evt_data_32[21]
|
148
|
+
|
149
|
+
return n_bytes_read
|
@@ -0,0 +1,156 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import logging
|
4
|
+
|
5
|
+
import numpy as np
|
6
|
+
|
7
|
+
from ..data_decoder import DataDecoder
|
8
|
+
from ..data_streamer import DataStreamer
|
9
|
+
from ..raw_buffer import RawBuffer, RawBufferLibrary
|
10
|
+
from .llama_event_decoder import LLAMAEventDecoder
|
11
|
+
from .llama_header_decoder import LLAMAHeaderDecoder
|
12
|
+
|
13
|
+
log = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class LLAMAStreamer(DataStreamer):
|
17
|
+
"""
|
18
|
+
Decode SIS3316 data acquired using llamaDAQ.
|
19
|
+
"""
|
20
|
+
|
21
|
+
def __init__(self) -> None:
|
22
|
+
super().__init__()
|
23
|
+
self.in_stream = None
|
24
|
+
self.event_rbkd = None
|
25
|
+
self.header_decoder = LLAMAHeaderDecoder()
|
26
|
+
self.event_decoder = LLAMAEventDecoder()
|
27
|
+
|
28
|
+
def get_decoder_list(self) -> list[DataDecoder]:
|
29
|
+
dec_list = []
|
30
|
+
dec_list.append(self.header_decoder)
|
31
|
+
dec_list.append(self.event_decoder)
|
32
|
+
return dec_list
|
33
|
+
|
34
|
+
def open_stream(
|
35
|
+
self,
|
36
|
+
llama_filename: str,
|
37
|
+
rb_lib: RawBufferLibrary = None,
|
38
|
+
buffer_size: int = 8192,
|
39
|
+
chunk_mode: str = "any_full",
|
40
|
+
out_stream: str = "",
|
41
|
+
) -> list[RawBuffer]:
|
42
|
+
"""Initialize the LLAMA data stream.
|
43
|
+
|
44
|
+
Refer to the documentation for
|
45
|
+
:meth:`.data_streamer.DataStreamer.open_stream` for a description
|
46
|
+
of the parameters.
|
47
|
+
"""
|
48
|
+
|
49
|
+
if self.in_stream is not None:
|
50
|
+
raise RuntimeError("tried to open stream while previous one still open")
|
51
|
+
self.in_stream = open(llama_filename.encode("utf-8"), "rb")
|
52
|
+
self.n_bytes_read = 0
|
53
|
+
self.packet_id = 0
|
54
|
+
|
55
|
+
# read header info here
|
56
|
+
header, n_bytes_hdr = self.header_decoder.decode_header(self.in_stream)
|
57
|
+
self.n_bytes_read += n_bytes_hdr
|
58
|
+
|
59
|
+
self.event_decoder.set_channel_configs(
|
60
|
+
self.header_decoder.get_channel_configs()
|
61
|
+
)
|
62
|
+
|
63
|
+
# as far as I can tell, this happens if a user does not specify output.
|
64
|
+
# Then I can still get a rb_lib, but that misses keys entirely, which I need since channels can have different setups.
|
65
|
+
# So I try to hack my own here in case there is none provided.
|
66
|
+
# if rb_lib is None:
|
67
|
+
# rb_lib = self.__hack_rb_lib(self.header_decoder.get_channel_configs())
|
68
|
+
|
69
|
+
# initialize the buffers in rb_lib. Store them for fast lookup
|
70
|
+
# Docu tells me to use initialize instead, but that does not exits (?)
|
71
|
+
super().open_stream(
|
72
|
+
llama_filename,
|
73
|
+
rb_lib,
|
74
|
+
buffer_size=buffer_size,
|
75
|
+
chunk_mode=chunk_mode,
|
76
|
+
out_stream=out_stream,
|
77
|
+
)
|
78
|
+
if rb_lib is None:
|
79
|
+
rb_lib = self.rb_lib
|
80
|
+
|
81
|
+
self.event_rbkd = (
|
82
|
+
rb_lib["LLAMAEventDecoder"].get_keyed_dict()
|
83
|
+
if "LLAMAEventDecoder" in rb_lib
|
84
|
+
else None
|
85
|
+
)
|
86
|
+
|
87
|
+
if "LLAMAHeaderDecoder" in rb_lib:
|
88
|
+
config_rb_list = rb_lib["LLAMAHeaderDecoder"]
|
89
|
+
if len(config_rb_list) != 1:
|
90
|
+
log.warning(
|
91
|
+
f"config_rb_list had length {len(config_rb_list)}, ignoring all but the first"
|
92
|
+
)
|
93
|
+
rb = config_rb_list[0]
|
94
|
+
else:
|
95
|
+
rb = RawBuffer(lgdo=header)
|
96
|
+
rb.loc = 1 # we have filled this buffer
|
97
|
+
return [rb]
|
98
|
+
|
99
|
+
def close_stream(self) -> None:
|
100
|
+
if self.in_stream is None:
|
101
|
+
raise RuntimeError("tried to close an unopened stream")
|
102
|
+
self.in_stream.close()
|
103
|
+
self.in_stream = None
|
104
|
+
|
105
|
+
def read_packet(self) -> bool:
|
106
|
+
"""Reads a single packet's worth of data in to the :class:`.RawBufferLibrary`.
|
107
|
+
|
108
|
+
Returns
|
109
|
+
-------
|
110
|
+
still_has_data
|
111
|
+
returns `True` while there is still data to read.
|
112
|
+
"""
|
113
|
+
|
114
|
+
packet, fch_id = self.__read_bytes()
|
115
|
+
if packet is None:
|
116
|
+
return False # EOF
|
117
|
+
self.packet_id += 1
|
118
|
+
self.n_bytes_read += len(packet)
|
119
|
+
|
120
|
+
self.any_full |= self.event_decoder.decode_packet(
|
121
|
+
packet, self.event_rbkd, self.packet_id, fch_id
|
122
|
+
)
|
123
|
+
|
124
|
+
return True
|
125
|
+
|
126
|
+
def __read_bytes(self) -> tuple[bytes | None, int]:
|
127
|
+
"""
|
128
|
+
return bytes if read successful or None if EOF.
|
129
|
+
int is the fch_id (needs to be fetched to obtain the size of the event)
|
130
|
+
"""
|
131
|
+
if self.in_stream is None:
|
132
|
+
raise RuntimeError("No stream open!")
|
133
|
+
|
134
|
+
position = self.in_stream.tell() # save position of the event header's 1st byte
|
135
|
+
data1 = self.in_stream.read(
|
136
|
+
4
|
137
|
+
) # read the first (32 bit) word of the event's header: channelID & format bits
|
138
|
+
if len(data1) < 4:
|
139
|
+
return None, -1 # EOF, I guess
|
140
|
+
self.in_stream.seek(position) # go back to 1st position of event header
|
141
|
+
|
142
|
+
header_data_32 = np.frombuffer(data1, dtype=np.uint32)
|
143
|
+
fch_id = (header_data_32[0] >> 4) & 0x00000FFF
|
144
|
+
|
145
|
+
event_length_32 = self.header_decoder.get_channel_configs()[fch_id][
|
146
|
+
"event_length"
|
147
|
+
]
|
148
|
+
event_length_8 = event_length_32 * 4
|
149
|
+
|
150
|
+
packet = self.in_stream.read(event_length_8)
|
151
|
+
if len(packet) < event_length_8:
|
152
|
+
raise RuntimeError(
|
153
|
+
f"Tried to read {event_length_8} bytes but got {len(packet)}"
|
154
|
+
)
|
155
|
+
|
156
|
+
return packet, fch_id
|
daq2lh5/logging.py
CHANGED
daq2lh5/raw_buffer.py
CHANGED
@@ -19,48 +19,49 @@ output to a :class:`.RawBufferList`.
|
|
19
19
|
:class:`.RawBufferLibrary`: a dictionary of :class:`RawBufferList`\ s, e.g. one
|
20
20
|
for each :class:`~.data_decoder.DataDecoder`. Keyed by the decoder name.
|
21
21
|
|
22
|
-
:class:`.RawBuffer` supports a
|
23
|
-
:meth:`.RawBufferLibrary.
|
24
|
-
|
25
|
-
Example
|
26
|
-
example, the user would call
|
27
|
-
kw_dict)`` with ``kw_dict``
|
28
|
-
|
29
|
-
|
22
|
+
:class:`.RawBuffer` supports a config file short-hand notation, see
|
23
|
+
:meth:`.RawBufferLibrary.set_from_dict` for full specification.
|
24
|
+
|
25
|
+
Example YAML yielding a valid :class:`.RawBufferLibrary` is below (other
|
26
|
+
formats like JSON are also supported). In the example, the user would call
|
27
|
+
``RawBufferLibrary.set_from_dict(config, kw_dict)`` with ``kw_dict``
|
28
|
+
containing an entry for ``'file_key'``. The other keywords ``{key}`` and
|
29
|
+
``{name}`` are understood by and filled in during
|
30
|
+
:meth:`.RawBufferLibrary.set_from_dict` unless overloaded in ``kw_dict``.
|
30
31
|
Note the use of the wildcard ``*``: this will match all other decoder names /
|
31
32
|
keys.
|
32
33
|
|
33
|
-
.. code-block ::
|
34
|
-
|
35
|
-
|
36
|
-
"
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
"{
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
34
|
+
.. code-block :: yaml
|
35
|
+
|
36
|
+
FCEventDecoder:
|
37
|
+
"g{key:0>3d}":
|
38
|
+
key_list:
|
39
|
+
- [24, 64]
|
40
|
+
out_stream: "$DATADIR/{file_key}_geds.lh5:/geds"
|
41
|
+
proc_spec:
|
42
|
+
window:
|
43
|
+
- waveform
|
44
|
+
- 10
|
45
|
+
- 100
|
46
|
+
- windowed_waveform
|
47
|
+
spms:
|
48
|
+
key_list:
|
49
|
+
- [6, 23]
|
50
|
+
out_stream: "$DATADIR/{file_key}_spms.lh5:/spms"
|
51
|
+
puls:
|
52
|
+
key_list:
|
53
|
+
- 0
|
54
|
+
out_stream: "$DATADIR/{file_key}_auxs.lh5:/auxs"
|
55
|
+
muvt:
|
56
|
+
key_list:
|
57
|
+
- 1
|
58
|
+
- 5
|
59
|
+
out_stream: "$DATADIR/{file_key}_auxs.lh5:/auxs"
|
60
|
+
|
61
|
+
"*":
|
62
|
+
"{name}":
|
63
|
+
key_list: ["*"]
|
64
|
+
out_stream: "$DATADIR/{file_key}_{name}.lh5"
|
64
65
|
"""
|
65
66
|
|
66
67
|
from __future__ import annotations
|
@@ -105,7 +106,7 @@ class RawBuffer:
|
|
105
106
|
the name or identifier of the object in the output stream.
|
106
107
|
proc_spec
|
107
108
|
a dictionary containing the following:
|
108
|
-
- a DSP config file, passed as a dictionary, or as a path to a
|
109
|
+
- a DSP config file, passed as a dictionary, or as a path to a config file
|
109
110
|
- an array containing: the name of an LGDO object stored in the :class:`.RawBuffer` to be sliced,
|
110
111
|
the start and end indices of the slice, and the new name for the sliced object
|
111
112
|
- a dictionary of fields to drop
|
@@ -185,29 +186,27 @@ class RawBufferList(list):
|
|
185
186
|
self.keyed_dict[key] = rb
|
186
187
|
return self.keyed_dict
|
187
188
|
|
188
|
-
def
|
189
|
-
|
190
|
-
|
191
|
-
"""Set up a :class:`.RawBufferList` from a dictionary written in JSON
|
192
|
-
shorthand. See :meth:`.RawBufferLibrary.set_from_json_dict` for details.
|
189
|
+
def set_from_dict(self, config: dict, kw_dict: dict[str, str] = None) -> None:
|
190
|
+
"""Set up a :class:`.RawBufferList` from a dictionary. See
|
191
|
+
:meth:`.RawBufferLibrary.set_from_dict` for details.
|
193
192
|
|
194
193
|
Notes
|
195
194
|
-----
|
196
|
-
`
|
195
|
+
`config` is changed by this function.
|
197
196
|
"""
|
198
|
-
|
199
|
-
for name in
|
197
|
+
expand_rblist_dict(config, {} if kw_dict is None else kw_dict)
|
198
|
+
for name in config:
|
200
199
|
rb = RawBuffer()
|
201
|
-
if "key_list" in
|
202
|
-
rb.key_list =
|
203
|
-
if "out_stream" in
|
204
|
-
rb.out_stream =
|
205
|
-
if "proc_spec" in
|
206
|
-
rb.proc_spec =
|
200
|
+
if "key_list" in config[name]:
|
201
|
+
rb.key_list = config[name]["key_list"]
|
202
|
+
if "out_stream" in config[name]:
|
203
|
+
rb.out_stream = config[name]["out_stream"]
|
204
|
+
if "proc_spec" in config[name]:
|
205
|
+
rb.proc_spec = config[name][
|
207
206
|
"proc_spec"
|
208
207
|
] # If you swap this with the next line, then key expansion doesn't work
|
209
|
-
if "out_name" in
|
210
|
-
rb.out_name =
|
208
|
+
if "out_name" in config[name]:
|
209
|
+
rb.out_name = config[name]["out_name"]
|
211
210
|
else:
|
212
211
|
rb.out_name = name
|
213
212
|
self.append(rb)
|
@@ -250,15 +249,12 @@ class RawBufferLibrary(dict):
|
|
250
249
|
write to them.
|
251
250
|
"""
|
252
251
|
|
253
|
-
def __init__(self,
|
254
|
-
if
|
255
|
-
self.
|
252
|
+
def __init__(self, config: dict = None, kw_dict: dict[str, str] = None) -> None:
|
253
|
+
if config is not None:
|
254
|
+
self.set_from_dict(config, kw_dict)
|
256
255
|
|
257
|
-
def
|
258
|
-
|
259
|
-
) -> None:
|
260
|
-
r"""Set up a :class:`.RawBufferLibrary` from a dictionary written in
|
261
|
-
JSON shorthand.
|
256
|
+
def set_from_dict(self, config: dict, kw_dict: dict[str, str] = None) -> None:
|
257
|
+
r"""Set up a :class:`.RawBufferLibrary` from a dictionary.
|
262
258
|
|
263
259
|
Basic structure:
|
264
260
|
|
@@ -288,7 +284,7 @@ class RawBufferLibrary(dict):
|
|
288
284
|
to the first and last integer keys in a contiguous range (e.g. of
|
289
285
|
channels) that get stored to the same buffer. These simply get
|
290
286
|
replaced with the explicit list of integers in the range. We use
|
291
|
-
lists not tuples for
|
287
|
+
lists not tuples for config file format compliance.
|
292
288
|
* The ``name`` can include ``{key:xxx}`` format specifiers, indicating
|
293
289
|
that each key in ``key_list`` should be given its own buffer with the
|
294
290
|
corresponding name. The same specifier can appear in ``out_path`` to
|
@@ -307,18 +303,18 @@ class RawBufferLibrary(dict):
|
|
307
303
|
|
308
304
|
Parameters
|
309
305
|
----------
|
310
|
-
|
311
|
-
loaded from a
|
312
|
-
`
|
306
|
+
config
|
307
|
+
loaded from a config file written in the allowed shorthand.
|
308
|
+
`config` is changed by this function.
|
313
309
|
kw_dict
|
314
310
|
dictionary of keyword-value pairs for substitutions into the
|
315
311
|
``out_stream`` and ``out_name`` fields.
|
316
312
|
"""
|
317
|
-
for list_name in
|
313
|
+
for list_name in config:
|
318
314
|
if list_name not in self:
|
319
315
|
self[list_name] = RawBufferList()
|
320
|
-
self[list_name].
|
321
|
-
|
316
|
+
self[list_name].set_from_dict(
|
317
|
+
config[list_name], {} if kw_dict is None else kw_dict
|
322
318
|
)
|
323
319
|
|
324
320
|
def get_list_of(self, attribute: str, unique: bool = True) -> list:
|
@@ -352,26 +348,26 @@ class RawBufferLibrary(dict):
|
|
352
348
|
rb_list.clear_full()
|
353
349
|
|
354
350
|
|
355
|
-
def
|
356
|
-
"""Expand shorthands in a
|
351
|
+
def expand_rblist_dict(config: dict, kw_dict: dict[str, str]) -> None:
|
352
|
+
"""Expand shorthands in a dictionary representing a
|
357
353
|
:class:`.RawBufferList`.
|
358
354
|
|
359
|
-
See :meth:`.RawBufferLibrary.
|
355
|
+
See :meth:`.RawBufferLibrary.set_from_dict` for details.
|
360
356
|
|
361
357
|
Notes
|
362
358
|
-----
|
363
|
-
The input
|
359
|
+
The input dictionary is changed by this function.
|
364
360
|
"""
|
365
361
|
# get the original list of groups because we are going to change the
|
366
|
-
# dict.keys() of
|
362
|
+
# dict.keys() of config inside the next list. Note: we have to convert
|
367
363
|
# from dict_keys to list here otherwise the loop complains about changing
|
368
364
|
# the dictionary during iteration
|
369
|
-
buffer_names = list(
|
365
|
+
buffer_names = list(config.keys())
|
370
366
|
for name in buffer_names:
|
371
367
|
if name == "":
|
372
368
|
raise ValueError("buffer name can't be empty")
|
373
369
|
|
374
|
-
info =
|
370
|
+
info = config[name] # changes to info will change config[name]
|
375
371
|
# make sure we have a key list
|
376
372
|
if "key_list" not in info:
|
377
373
|
raise ValueError(f"'{name}' is missing key_list")
|
@@ -399,12 +395,12 @@ def expand_rblist_json_dict(json_dict: dict, kw_dict: dict[str, str]) -> None:
|
|
399
395
|
continue # will be handled later, once the key_list is known
|
400
396
|
for key in info["key_list"]:
|
401
397
|
expanded_name = name.format(key=key)
|
402
|
-
|
403
|
-
|
404
|
-
|
398
|
+
config[expanded_name] = info.copy()
|
399
|
+
config[expanded_name]["key_list"] = [key]
|
400
|
+
config.pop(name)
|
405
401
|
|
406
402
|
# now re-iterate and expand out_paths
|
407
|
-
for name, info in
|
403
|
+
for name, info in config.items():
|
408
404
|
if len(info["key_list"]) == 1 and not (
|
409
405
|
isinstance(info["key_list"][0], str) and "*" in info["key_list"][0]
|
410
406
|
):
|
daq2lh5/utils.py
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
from pathlib import Path
|
6
|
+
|
7
|
+
import yaml
|
8
|
+
|
9
|
+
log = logging.getLogger(__name__)
|
10
|
+
|
11
|
+
|
12
|
+
__file_extensions__ = {"json": [".json"], "yaml": [".yaml", ".yml"]}
|
13
|
+
|
14
|
+
|
15
|
+
def load_dict(fname: str, ftype: str | None = None) -> dict:
|
16
|
+
"""Load a text file as a Python dict."""
|
17
|
+
fname = Path(fname)
|
18
|
+
|
19
|
+
# determine file type from extension
|
20
|
+
if ftype is None:
|
21
|
+
for _ftype, exts in __file_extensions__.items():
|
22
|
+
if fname.suffix in exts:
|
23
|
+
ftype = _ftype
|
24
|
+
|
25
|
+
msg = f"loading {ftype} dict from: {fname}"
|
26
|
+
log.debug(msg)
|
27
|
+
|
28
|
+
with fname.open() as f:
|
29
|
+
if ftype == "json":
|
30
|
+
return json.load(f)
|
31
|
+
if ftype == "yaml":
|
32
|
+
return yaml.safe_load(f)
|
33
|
+
|
34
|
+
msg = f"unsupported file format {ftype}"
|
35
|
+
raise NotImplementedError(msg)
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.2
|
2
2
|
Name: legend_daq2lh5
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.4.0
|
4
4
|
Summary: Convert digitizer data to LH5
|
5
5
|
Home-page: https://github.com/legend-exp/legend-daq2lh5
|
6
6
|
Author: Jason Detwiler
|
@@ -26,27 +26,28 @@ Classifier: Topic :: Software Development
|
|
26
26
|
Requires-Python: >=3.9
|
27
27
|
Description-Content-Type: text/markdown
|
28
28
|
License-File: LICENSE
|
29
|
-
Requires-Dist: dspeed
|
30
|
-
Requires-Dist: h5py
|
29
|
+
Requires-Dist: dspeed>=1.3.0a4
|
30
|
+
Requires-Dist: h5py>=3.2.0
|
31
31
|
Requires-Dist: hdf5plugin
|
32
|
-
Requires-Dist: legend-pydataobj
|
33
|
-
Requires-Dist: numpy
|
32
|
+
Requires-Dist: legend-pydataobj>=1.6
|
33
|
+
Requires-Dist: numpy>=1.21
|
34
34
|
Requires-Dist: pyfcutils
|
35
|
-
Requires-Dist:
|
35
|
+
Requires-Dist: pyyaml
|
36
|
+
Requires-Dist: tqdm>=4.27
|
36
37
|
Requires-Dist: xmltodict
|
37
38
|
Provides-Extra: all
|
38
|
-
Requires-Dist: legend-daq2lh5[docs,test]
|
39
|
+
Requires-Dist: legend-daq2lh5[docs,test]; extra == "all"
|
39
40
|
Provides-Extra: docs
|
40
|
-
Requires-Dist: furo
|
41
|
-
Requires-Dist: myst-parser
|
42
|
-
Requires-Dist: sphinx
|
43
|
-
Requires-Dist: sphinx-copybutton
|
44
|
-
Requires-Dist: sphinx-inline-tabs
|
41
|
+
Requires-Dist: furo; extra == "docs"
|
42
|
+
Requires-Dist: myst-parser; extra == "docs"
|
43
|
+
Requires-Dist: sphinx; extra == "docs"
|
44
|
+
Requires-Dist: sphinx-copybutton; extra == "docs"
|
45
|
+
Requires-Dist: sphinx-inline-tabs; extra == "docs"
|
45
46
|
Provides-Extra: test
|
46
|
-
Requires-Dist: pre-commit
|
47
|
-
Requires-Dist: pylegendtestdata
|
48
|
-
Requires-Dist: pytest
|
49
|
-
Requires-Dist: pytest-cov
|
47
|
+
Requires-Dist: pre-commit; extra == "test"
|
48
|
+
Requires-Dist: pylegendtestdata; extra == "test"
|
49
|
+
Requires-Dist: pytest>=6.0; extra == "test"
|
50
|
+
Requires-Dist: pytest-cov; extra == "test"
|
50
51
|
|
51
52
|
# legend-daq2lh5
|
52
53
|
|
@@ -1,14 +1,15 @@
|
|
1
1
|
daq2lh5/__init__.py,sha256=VPmwKuZSA0icpce05ojhnsKWhR4_QUgD0oVXUoN9wks,975
|
2
|
-
daq2lh5/_version.py,sha256=
|
3
|
-
daq2lh5/build_raw.py,sha256=
|
2
|
+
daq2lh5/_version.py,sha256=R8-T9fmURjcuoxYpHTAjyNAhgJPDtI2jogCjqYYkfCU,411
|
3
|
+
daq2lh5/build_raw.py,sha256=SDpdOU8qpfzMtx8gtFu-RZYqxutQo1smJSkv-LrH9YE,10672
|
4
4
|
daq2lh5/cli.py,sha256=7bPfH1XbyAS48wZn_0unj4Y5MD5kF7V34Q5srn4jKVM,2913
|
5
5
|
daq2lh5/data_decoder.py,sha256=Cn40fodfKs7pKa2odzG1j806iw9IyQVfbbWObNGmof8,10677
|
6
|
-
daq2lh5/data_streamer.py,sha256=
|
7
|
-
daq2lh5/logging.py,sha256=
|
8
|
-
daq2lh5/raw_buffer.py,sha256=
|
6
|
+
daq2lh5/data_streamer.py,sha256=WuyqneVg8Kf7V072aUWYT5q3vmPZoobjAZ7oSw0sC9k,14187
|
7
|
+
daq2lh5/logging.py,sha256=rYNeToaZBTCaIiC42a4CUroAo1PCOreTXbpEZyMO8Fo,987
|
8
|
+
daq2lh5/raw_buffer.py,sha256=LfkVAOZa4cWz227Ef22rKi57Shk7GbENoxUEkxC6IgU,17403
|
9
|
+
daq2lh5/utils.py,sha256=Pc8Oh0ZVBqwwVehyhSJttHnX5tWbOyEZPk-rVg8mb0c,839
|
9
10
|
daq2lh5/buffer_processor/__init__.py,sha256=7k6v_KPximtv7805QnX4-xp_S3vqvqwDfdV3q95oZJo,84
|
10
11
|
daq2lh5/buffer_processor/buffer_processor.py,sha256=GUxpNDbqGLuUEZmXjeratipbzmki12RFNYZkxgMtesg,14483
|
11
|
-
daq2lh5/buffer_processor/lh5_buffer_processor.py,sha256=
|
12
|
+
daq2lh5/buffer_processor/lh5_buffer_processor.py,sha256=einRDLI6EVR-U_TT2GdCZPctFnosJ774eMiUj-ahn6c,8316
|
12
13
|
daq2lh5/compass/__init__.py,sha256=mOXHWp7kRDgNTPQty3E8k2KPSy_vAzjneKfAcCVaPyE,132
|
13
14
|
daq2lh5/compass/compass_config_parser.py,sha256=zeAsOo1dOJPGLL8-zkAcdYRkqt8BodtOPi96n7fWsl4,12300
|
14
15
|
daq2lh5/compass/compass_event_decoder.py,sha256=kiPOaEu8SgLD2wbSPbBahcbTBBRAIw35wtVLBcwPcXY,7386
|
@@ -19,6 +20,10 @@ daq2lh5/fc/fc_config_decoder.py,sha256=RLRfUOZN0vYbAprqTymP7TGg641IiP9rgCGIOwWVK
|
|
19
20
|
daq2lh5/fc/fc_event_decoder.py,sha256=JIRsySnxeuY3wmxjJOrTXo6wpelVup8WIvxU-fkPL-A,8131
|
20
21
|
daq2lh5/fc/fc_status_decoder.py,sha256=o_3vTAgYXelZxIsreCYioVYid2mY-wqloYKlxoCqX5Q,3390
|
21
22
|
daq2lh5/fc/fc_streamer.py,sha256=S0imXdVsiyolPvxI1uiBngpC58DporSNZPqx1HeVi5o,5737
|
23
|
+
daq2lh5/llama/llama_base.py,sha256=B-NCBjE_FQE1WxijWi4Z1XBy4rqLHm2XhkC40s7Sdms,290
|
24
|
+
daq2lh5/llama/llama_event_decoder.py,sha256=NGzUA1DLChcfnEUNqMAPWq4uqXwUd-FH2e-xXcj2lzM,13894
|
25
|
+
daq2lh5/llama/llama_header_decoder.py,sha256=NB7wVH2r99hveQ2KJ-9YMkJMZ6ccNfFsjYa5HbuThAU,6114
|
26
|
+
daq2lh5/llama/llama_streamer.py,sha256=Bmcj5Bs28KSV4y08TeJcntzUAkqz6HqlSNm7Kffgloc,5203
|
22
27
|
daq2lh5/orca/__init__.py,sha256=Xf6uOIOzk_QkKH_7VizGlCo3iuiAgLtUE3A07x_HXC0,175
|
23
28
|
daq2lh5/orca/orca_base.py,sha256=-XIolXsHj-1EdewaGxyvJTZvRGZsDyZe-5PzVOd-LFY,1333
|
24
29
|
daq2lh5/orca/orca_digitizers.py,sha256=BsAA3OgQ13YIirDM8pd_xDY3F5FqEY4YjSHviflmov8,20867
|
@@ -28,9 +33,9 @@ daq2lh5/orca/orca_header_decoder.py,sha256=ORIIyfx22ybyKc-uyWy5ER49-dl3BGpHdfV8O
|
|
28
33
|
daq2lh5/orca/orca_packet.py,sha256=nOHuBXsTI1SzTjHZtff0txSQYvkwo4XGx3fpk7XfYj8,2489
|
29
34
|
daq2lh5/orca/orca_run_decoder.py,sha256=3atKXC6mDi8_PK6ICUBBJ-LyaTM8OU31kKWIpmttRr4,2065
|
30
35
|
daq2lh5/orca/orca_streamer.py,sha256=VbD9PF-rx_Rk-rEy7XECPmgxr6kZSUf0tC7Qbol3Qeg,15693
|
31
|
-
legend_daq2lh5-1.
|
32
|
-
legend_daq2lh5-1.
|
33
|
-
legend_daq2lh5-1.
|
34
|
-
legend_daq2lh5-1.
|
35
|
-
legend_daq2lh5-1.
|
36
|
-
legend_daq2lh5-1.
|
36
|
+
legend_daq2lh5-1.4.0.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
37
|
+
legend_daq2lh5-1.4.0.dist-info/METADATA,sha256=SKEW8u7wRrCK5zTY01iMGDYGPmrmg1ED5g9fsbwLVVA,3956
|
38
|
+
legend_daq2lh5-1.4.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
39
|
+
legend_daq2lh5-1.4.0.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
|
40
|
+
legend_daq2lh5-1.4.0.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
|
41
|
+
legend_daq2lh5-1.4.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|