legend-daq2lh5 1.6.2__py3-none-any.whl → 1.6.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- daq2lh5/_version.py +2 -2
- daq2lh5/compass/compass_event_decoder.py +50 -42
- daq2lh5/compass/compass_header_decoder.py +191 -18
- daq2lh5/compass/compass_streamer.py +39 -33
- daq2lh5/fc/fc_event_decoder.py +35 -18
- daq2lh5/fc/fc_eventheader_decoder.py +6 -3
- daq2lh5/orca/orca_streamer.py +16 -7
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/METADATA +1 -1
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/RECORD +13 -13
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/WHEEL +0 -0
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/entry_points.txt +0 -0
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/licenses/LICENSE +0 -0
- {legend_daq2lh5-1.6.2.dist-info → legend_daq2lh5-1.6.3.dist-info}/top_level.txt +0 -0
daq2lh5/_version.py
CHANGED
@@ -25,8 +25,10 @@ compass_decoded_values = {
|
|
25
25
|
"channel": {"dtype": "uint32"},
|
26
26
|
# Timestamp of event
|
27
27
|
"timestamp": {"dtype": "float64", "units": "ps"},
|
28
|
-
# Energy of event
|
28
|
+
# Energy of event in channels
|
29
29
|
"energy": {"dtype": "uint32"},
|
30
|
+
# Energy of event, calibrated
|
31
|
+
"energy_calibrated": {"dtype": "float64"},
|
30
32
|
# Energy short of event
|
31
33
|
"energy_short": {"dtype": "uint32"},
|
32
34
|
# Flags that the digitizer raised
|
@@ -153,50 +155,56 @@ class CompassEventDecoder(DataDecoder):
|
|
153
155
|
# the time stamp also does not care about if we have an energy short present
|
154
156
|
tbl["timestamp"].nda[ii] = np.frombuffer(packet[4:12], dtype=np.uint64)[0]
|
155
157
|
|
156
|
-
#
|
157
|
-
|
158
|
+
# stumble our way through the energy, depending on what the header says
|
159
|
+
bytes_read = 12
|
160
|
+
if int(header["energy_channels"].value) == 1:
|
158
161
|
tbl["energy"].nda[ii] = np.frombuffer(packet[12:14], dtype=np.uint16)[0]
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
]
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
): # make sure that the waveform we read in is the same length as in the config
|
171
|
-
raise RuntimeError(
|
172
|
-
f"Waveform size {tbl['num_samples'].nda[ii]} doesn't match expected size {self.decoded_values[bc]['waveform']['wf_len']}. "
|
173
|
-
"Skipping packet"
|
174
|
-
)
|
175
|
-
|
176
|
-
tbl["waveform"]["values"].nda[ii] = np.frombuffer(
|
177
|
-
packet[25:], dtype=np.uint16
|
178
|
-
)
|
179
|
-
|
162
|
+
bytes_read += 2
|
163
|
+
if int(header["energy_calibrated"].value) == 1:
|
164
|
+
tbl["energy_calibrated"].nda[ii] = None
|
165
|
+
elif (int(header["energy_calibrated"].value) == 1) and (
|
166
|
+
int(header["energy_channels"].value) == 0
|
167
|
+
):
|
168
|
+
tbl["energy_calibrated"].nda[ii] = np.frombuffer(
|
169
|
+
packet[14:22], dtype=np.float64
|
170
|
+
)[0]
|
171
|
+
bytes_read += 8
|
172
|
+
tbl["energy"].nda[ii] = None
|
180
173
|
else:
|
181
|
-
tbl["
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
174
|
+
tbl["energy_calibrated"].nda[ii] = np.frombuffer(
|
175
|
+
packet[12:20], dtype=np.float64
|
176
|
+
)[0]
|
177
|
+
bytes_read += 8
|
178
|
+
|
179
|
+
# now handle the energy short
|
180
|
+
if int(header["energy_short"].value) == 1:
|
181
|
+
tbl["energy_short"].nda[ii] = np.frombuffer(
|
182
|
+
packet[bytes_read : bytes_read + 2], dtype=np.uint16
|
183
|
+
)[0]
|
184
|
+
bytes_read += 2
|
185
|
+
else:
|
186
|
+
tbl["energy_short"].nda[ii] = 0
|
187
|
+
|
188
|
+
tbl["flags"].nda[ii] = np.frombuffer(
|
189
|
+
packet[bytes_read : bytes_read + 4], np.uint32
|
190
|
+
)[0]
|
191
|
+
bytes_read += 5 # skip over the waveform code
|
192
|
+
tbl["num_samples"].nda[ii] = np.frombuffer(
|
193
|
+
packet[bytes_read : bytes_read + 4], dtype=np.uint32
|
194
|
+
)[0]
|
195
|
+
bytes_read += 4
|
196
|
+
|
197
|
+
if (
|
198
|
+
tbl["num_samples"].nda[ii] != self.decoded_values[bc]["waveform"]["wf_len"]
|
199
|
+
): # make sure that the waveform we read in is the same length as in the config
|
200
|
+
raise RuntimeError(
|
201
|
+
f"Waveform size {tbl['num_samples'].nda[ii]} doesn't match expected size {self.decoded_values[bc]['waveform']['wf_len']}. "
|
202
|
+
"Skipping packet"
|
199
203
|
)
|
200
204
|
|
205
|
+
tbl["waveform"]["values"].nda[ii] = np.frombuffer(
|
206
|
+
packet[bytes_read:], dtype=np.uint16
|
207
|
+
)
|
208
|
+
|
201
209
|
evt_rbkd[bc].loc += 1
|
202
210
|
return evt_rbkd[bc].is_full()
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
3
3
|
import logging
|
4
4
|
|
5
5
|
import lgdo
|
6
|
+
import numpy as np
|
6
7
|
|
7
8
|
from ..data_decoder import DataDecoder
|
8
9
|
from .compass_config_parser import compass_config_to_struct
|
@@ -19,9 +20,7 @@ class CompassHeaderDecoder(DataDecoder):
|
|
19
20
|
super().__init__(*args, **kwargs)
|
20
21
|
self.config = None # initialize to none, because compass_config_to_struct always returns a struct
|
21
22
|
|
22
|
-
def decode_header(
|
23
|
-
self, in_stream: bytes, config_file: str = None, wf_len: int = None
|
24
|
-
) -> dict:
|
23
|
+
def decode_header(self, in_stream: bytes, config_file: str = None) -> dict:
|
25
24
|
"""Decode the CoMPASS file header, and add CoMPASS config data to the header, if present.
|
26
25
|
|
27
26
|
Parameters
|
@@ -30,8 +29,6 @@ class CompassHeaderDecoder(DataDecoder):
|
|
30
29
|
The stream of data to have its header decoded
|
31
30
|
config_file
|
32
31
|
The config file for the CoMPASS data, if present
|
33
|
-
wf_len
|
34
|
-
The length of the first waveform in the file, only pre-calculated when the config_file is none
|
35
32
|
|
36
33
|
Returns
|
37
34
|
-------
|
@@ -39,27 +36,203 @@ class CompassHeaderDecoder(DataDecoder):
|
|
39
36
|
A dict containing the header information, as well as the important config information
|
40
37
|
of wf_len and num_enabled_channels
|
41
38
|
"""
|
42
|
-
|
43
|
-
|
39
|
+
wf_len = None
|
44
40
|
config_names = [
|
45
41
|
"energy_channels", # energy is given in channels (0: false, 1: true)
|
46
42
|
"energy_calibrated", # energy is given in keV/MeV, according to the calibration (0: false, 1: true)
|
47
43
|
"energy_short", # energy short is present (0: false, 1: true)
|
48
44
|
"waveform_samples", # waveform samples are present (0: false, 1: true)
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
]
|
57
|
-
|
58
|
-
|
45
|
+
"header_present", # there is a 2 byte header present in the file (0: false, 1: true)
|
46
|
+
] # need to determine which of these are present in a file
|
47
|
+
|
48
|
+
# First need to check if the first two bytes are of the form 0xCAEx
|
49
|
+
# CoMPASS specs say that every file should start with this header, but if CoMPASS writes size-limited files, then this header may not be present in *all* files...
|
50
|
+
header_in_bytes = in_stream.read(2)
|
51
|
+
|
52
|
+
if header_in_bytes[-1] == int.from_bytes(b"\xca", byteorder="big"):
|
53
|
+
log.debug("header is present in file.")
|
54
|
+
header_in_binary = bin(int.from_bytes(header_in_bytes, byteorder="little"))
|
55
|
+
header_as_list = str(header_in_binary)[
|
56
|
+
::-1
|
57
|
+
] # reverse it as we care about bit 0, bit 1, etc.
|
58
|
+
header_dict = dict(
|
59
|
+
{
|
60
|
+
"energy_channels": int(header_as_list[0]) == 1,
|
61
|
+
"energy_calibrated": int(header_as_list[1]) == 1,
|
62
|
+
"energy_channels_calibrated": int(header_as_list[0])
|
63
|
+
== 1 & int(header_as_list[1])
|
64
|
+
== 1,
|
65
|
+
"energy_short": int(header_as_list[2]) == 1,
|
66
|
+
"waveform_samples": int(header_as_list[3]) == 1,
|
67
|
+
"header_present": True,
|
68
|
+
}
|
69
|
+
)
|
70
|
+
|
71
|
+
# if we don't have the wf_len, get it now
|
72
|
+
|
73
|
+
if config_file is None:
|
74
|
+
if header_dict["waveform_samples"] == 0:
|
75
|
+
wf_len = 0
|
76
|
+
else:
|
77
|
+
wf_byte_len = 4
|
78
|
+
bytes_to_read = (
|
79
|
+
12 # covers 2-byte board, 2-byte channel, 8-byte time stamp
|
80
|
+
)
|
81
|
+
bytes_to_read += (
|
82
|
+
2 * header_dict["energy_channels"]
|
83
|
+
+ 8 * header_dict["energy_calibrated"]
|
84
|
+
+ 2 * header_dict["energy_short"]
|
85
|
+
)
|
86
|
+
bytes_to_read += 4 + 1 # 4-byte flags, 1-byte waveform code
|
87
|
+
first_bytes = in_stream.read(bytes_to_read + wf_byte_len)
|
88
|
+
|
89
|
+
wf_len = np.frombuffer(
|
90
|
+
first_bytes[bytes_to_read : bytes_to_read + wf_byte_len],
|
91
|
+
dtype=np.uint32,
|
92
|
+
)[0]
|
93
|
+
|
94
|
+
# if header is not present, we need to play some tricks
|
95
|
+
# either energy short is present or not
|
96
|
+
# and one of three options for energy (ADC, calibrated, both)
|
97
|
+
else:
|
98
|
+
# If the 2 byte header is not present, then we have read in the board by accident
|
99
|
+
header_in_bytes += in_stream.read(
|
100
|
+
10
|
101
|
+
) # read in the 2-byte ch and 8-byte timestamp
|
102
|
+
bytes_read = 12
|
103
|
+
fixed_header_start_len = (
|
104
|
+
12 # always 12 bytes: 2-byte board, 2-byte channel, 8-byte timestamp
|
105
|
+
)
|
106
|
+
possible_energy_header_byte_lengths = [
|
107
|
+
2,
|
108
|
+
8,
|
109
|
+
10,
|
110
|
+
] # either ADC, Calibrated, or both
|
111
|
+
possible_energy_short_header_byte_lengths = [
|
112
|
+
0,
|
113
|
+
2,
|
114
|
+
] # energy short is present or not
|
115
|
+
fixed_header_part = 5 # 5 bytes from flags + code
|
116
|
+
wf_len_bytes = 4 # wf_len is 4 bytes long
|
117
|
+
|
118
|
+
for prefix in possible_energy_header_byte_lengths:
|
119
|
+
terminate = False
|
120
|
+
for suffix in possible_energy_short_header_byte_lengths:
|
121
|
+
|
122
|
+
# ---- first packet -------
|
123
|
+
# don't read more than we have to, check how many more bytes we need to read in
|
124
|
+
difference = (
|
125
|
+
fixed_header_start_len
|
126
|
+
+ prefix
|
127
|
+
+ suffix
|
128
|
+
+ fixed_header_part
|
129
|
+
+ wf_len_bytes
|
130
|
+
- bytes_read
|
131
|
+
)
|
132
|
+
if difference > 0:
|
133
|
+
# just read a bit more data
|
134
|
+
header_in_bytes += in_stream.read(difference)
|
135
|
+
bytes_read += difference
|
136
|
+
|
137
|
+
wf_len_guess = np.frombuffer(
|
138
|
+
header_in_bytes[
|
139
|
+
fixed_header_start_len
|
140
|
+
+ prefix
|
141
|
+
+ suffix
|
142
|
+
+ fixed_header_part : fixed_header_start_len
|
143
|
+
+ prefix
|
144
|
+
+ suffix
|
145
|
+
+ fixed_header_part
|
146
|
+
+ wf_len_bytes
|
147
|
+
],
|
148
|
+
dtype=np.uint32,
|
149
|
+
)[0]
|
150
|
+
|
151
|
+
# read in the first waveform data
|
152
|
+
difference = (
|
153
|
+
fixed_header_start_len
|
154
|
+
+ prefix
|
155
|
+
+ suffix
|
156
|
+
+ fixed_header_part
|
157
|
+
+ wf_len_bytes
|
158
|
+
+ 2 * wf_len_guess
|
159
|
+
- bytes_read
|
160
|
+
)
|
161
|
+
if difference > 0:
|
162
|
+
header_in_bytes += in_stream.read(2 * wf_len_guess)
|
163
|
+
bytes_read += 2 * wf_len_guess
|
164
|
+
|
165
|
+
# ------ second packet header ----------
|
166
|
+
difference = (
|
167
|
+
2
|
168
|
+
* (
|
169
|
+
fixed_header_start_len
|
170
|
+
+ prefix
|
171
|
+
+ suffix
|
172
|
+
+ fixed_header_part
|
173
|
+
+ wf_len_bytes
|
174
|
+
)
|
175
|
+
+ 2 * wf_len_guess
|
176
|
+
- bytes_read
|
177
|
+
)
|
178
|
+
if difference > 0:
|
179
|
+
header_in_bytes += in_stream.read(difference)
|
180
|
+
bytes_read += (
|
181
|
+
fixed_header_start_len
|
182
|
+
+ prefix
|
183
|
+
+ suffix
|
184
|
+
+ fixed_header_part
|
185
|
+
+ wf_len_bytes
|
186
|
+
)
|
187
|
+
wf_len_guess_2 = np.frombuffer(
|
188
|
+
header_in_bytes[
|
189
|
+
2
|
190
|
+
* (
|
191
|
+
fixed_header_start_len
|
192
|
+
+ prefix
|
193
|
+
+ suffix
|
194
|
+
+ fixed_header_part
|
195
|
+
)
|
196
|
+
+ wf_len_bytes
|
197
|
+
+ 2
|
198
|
+
* wf_len_guess : 2
|
199
|
+
* (
|
200
|
+
fixed_header_start_len
|
201
|
+
+ prefix
|
202
|
+
+ suffix
|
203
|
+
+ fixed_header_part
|
204
|
+
+ wf_len_bytes
|
205
|
+
)
|
206
|
+
+ 2 * wf_len_guess
|
207
|
+
],
|
208
|
+
dtype=np.uint32,
|
209
|
+
)[0]
|
210
|
+
|
211
|
+
# if the waveform lengths agree, then we can stride packets correctly
|
212
|
+
if wf_len_guess_2 == wf_len_guess:
|
213
|
+
header_dict = dict(
|
214
|
+
{
|
215
|
+
"energy_channels": prefix == 2,
|
216
|
+
"energy_calibrated": prefix == 8,
|
217
|
+
"energy_channels_calibrated": prefix == 10,
|
218
|
+
"energy_short": suffix == 2,
|
219
|
+
"waveform_samples": wf_len != 0,
|
220
|
+
"header_present": False,
|
221
|
+
}
|
222
|
+
)
|
223
|
+
wf_len = wf_len_guess
|
224
|
+
terminate = True
|
225
|
+
break
|
226
|
+
if terminate:
|
227
|
+
break
|
228
|
+
|
229
|
+
self.config = compass_config_to_struct(config_file, wf_len)
|
230
|
+
|
231
|
+
for name in config_names:
|
59
232
|
if name in self.config:
|
60
233
|
log.warning(f"{name} already in self.config. skipping...")
|
61
234
|
continue
|
62
|
-
value = int(
|
235
|
+
value = int(header_dict[name])
|
63
236
|
self.config.add_field(
|
64
237
|
str(name), lgdo.Scalar(value)
|
65
238
|
) # self.config is a struct
|
@@ -72,22 +72,6 @@ class CompassStreamer(DataStreamer):
|
|
72
72
|
So, we must read this header once, and then proceed to read packets in.
|
73
73
|
"""
|
74
74
|
# If a config file is not present, the wf_len can be determined by opening the first few bytes of the in_stream
|
75
|
-
wf_len = None
|
76
|
-
if self.compass_config_file is None:
|
77
|
-
self.set_in_stream(stream_name)
|
78
|
-
|
79
|
-
first_bytes = self.in_stream.read(27)
|
80
|
-
|
81
|
-
energy_short = str(
|
82
|
-
bin(int.from_bytes(first_bytes[:2], byteorder="little"))
|
83
|
-
)[::-1][2]
|
84
|
-
|
85
|
-
if int(energy_short) == 1:
|
86
|
-
[wf_len] = np.frombuffer(first_bytes[23:27], dtype=np.uint32)
|
87
|
-
else:
|
88
|
-
[wf_len] = np.frombuffer(first_bytes[21:25], dtype=np.uint32)
|
89
|
-
|
90
|
-
self.close_stream()
|
91
75
|
|
92
76
|
# set the in_stream
|
93
77
|
self.set_in_stream(stream_name)
|
@@ -95,11 +79,20 @@ class CompassStreamer(DataStreamer):
|
|
95
79
|
|
96
80
|
# read in and decode the file header info, passing the compass_config_file, if present
|
97
81
|
self.header = self.header_decoder.decode_header(
|
98
|
-
self.in_stream, self.compass_config_file
|
82
|
+
self.in_stream, self.compass_config_file
|
99
83
|
) # returns an lgdo.Struct
|
100
|
-
self.
|
101
|
-
|
102
|
-
|
84
|
+
self.close_stream()
|
85
|
+
|
86
|
+
# Now we are ready to read the data
|
87
|
+
self.set_in_stream(stream_name)
|
88
|
+
self.n_bytes_read = 0
|
89
|
+
|
90
|
+
if int(self.header["header_present"].value) == 1:
|
91
|
+
# read 2 bytes if we need to
|
92
|
+
self.in_stream.read(2)
|
93
|
+
self.n_bytes_read += (
|
94
|
+
2 # there are 2 bytes in the header, for a 16 bit number to read out
|
95
|
+
)
|
103
96
|
|
104
97
|
# set up data loop variables
|
105
98
|
self.packet_id = 0
|
@@ -171,16 +164,34 @@ class CompassStreamer(DataStreamer):
|
|
171
164
|
if self.in_stream is None:
|
172
165
|
raise RuntimeError("self.in_stream is None")
|
173
166
|
|
174
|
-
if (
|
167
|
+
if (
|
168
|
+
(self.packet_id == 0)
|
169
|
+
and (self.n_bytes_read != 2)
|
170
|
+
and (int(self.header["header_present"].value) == 1)
|
171
|
+
):
|
175
172
|
raise RuntimeError(
|
176
173
|
f"The 2 byte filer header was not converted, instead read in {self.n_bytes_read} for the file header"
|
177
174
|
)
|
175
|
+
if (
|
176
|
+
(self.packet_id == 0)
|
177
|
+
and (self.n_bytes_read != 0)
|
178
|
+
and (int(self.header["header_present"].value) == 0)
|
179
|
+
):
|
180
|
+
raise RuntimeError(
|
181
|
+
f"The header was not converted, instead read in {self.n_bytes_read} for the file header"
|
182
|
+
)
|
178
183
|
|
179
|
-
# packets have metadata of variable lengths, depending on
|
184
|
+
# packets have metadata of variable lengths, depending on what the header shows
|
185
|
+
header_length = 12 # header always starts with 2-bytes of board, 2-bytes of channel, and 8-bytes of timestamp
|
186
|
+
if int(self.header["energy_channels"].value) == 1:
|
187
|
+
header_length += 2 # if the energy is recorded in ADC channels, then there are an extra 2 bytes in the metadata
|
188
|
+
if int(self.header["energy_calibrated"].value) == 1:
|
189
|
+
header_length += 8 # if the energy is recorded in keV/MeV, then there are an extra 8 bytes in the metadata
|
180
190
|
if int(self.header["energy_short"].value) == 1:
|
181
|
-
header_length
|
182
|
-
|
183
|
-
|
191
|
+
header_length += 2 # if the energy short is present, then there are an extra 2 bytes in the metadata
|
192
|
+
header_length += (
|
193
|
+
4 + 1 + 4
|
194
|
+
) # the flags, the waveform code bytes, and the waveform length
|
184
195
|
|
185
196
|
# read the packet's metadata into the buffer
|
186
197
|
pkt_hdr = self.buffer[:header_length]
|
@@ -190,16 +201,11 @@ class CompassStreamer(DataStreamer):
|
|
190
201
|
# return None once we run out of file
|
191
202
|
if n_bytes_read == 0:
|
192
203
|
return None
|
193
|
-
if
|
204
|
+
if n_bytes_read not in [23, 25, 29, 31, 33]:
|
194
205
|
raise RuntimeError(f"only got {n_bytes_read} bytes for packet header")
|
195
206
|
|
196
|
-
|
197
|
-
|
198
|
-
[num_samples] = np.frombuffer(pkt_hdr[21:25], dtype=np.uint32)
|
199
|
-
pkt_length = header_length + 2 * num_samples
|
200
|
-
if n_bytes_read == 23:
|
201
|
-
[num_samples] = np.frombuffer(pkt_hdr[19:23], dtype=np.uint32)
|
202
|
-
pkt_length = header_length + 2 * num_samples
|
207
|
+
[num_samples] = np.frombuffer(pkt_hdr[-4:], dtype=np.uint32)
|
208
|
+
pkt_length = header_length + 2 * num_samples
|
203
209
|
|
204
210
|
# load into buffer, resizing as necessary
|
205
211
|
if len(self.buffer) < pkt_length:
|
daq2lh5/fc/fc_event_decoder.py
CHANGED
@@ -152,24 +152,41 @@ class FCEventDecoder(DataDecoder):
|
|
152
152
|
tbl["board_id"].nda[loc] = fcio.event.card_address[ii]
|
153
153
|
tbl["fc_input"].nda[loc] = fcio.event.card_channel[ii]
|
154
154
|
tbl["event_type"].nda[loc] = fcio.event.type
|
155
|
-
tbl["eventnumber"].nda[loc] = fcio.event.timestamp[0]
|
156
155
|
tbl["numtraces"].nda[loc] = fcio.event.num_traces
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
156
|
+
|
157
|
+
# the order of names is crucial here!
|
158
|
+
timestamp_names = [
|
159
|
+
"eventnumber",
|
160
|
+
"ts_pps",
|
161
|
+
"ts_ticks",
|
162
|
+
"ts_maxticks",
|
163
|
+
]
|
164
|
+
for name, value in zip(timestamp_names, fcio.event.timestamp):
|
165
|
+
tbl[name].nda[loc] = value
|
166
|
+
|
167
|
+
timeoffset_names = [
|
168
|
+
"mu_offset_sec",
|
169
|
+
"mu_offset_usec",
|
170
|
+
"to_master_sec",
|
171
|
+
"delta_mu_usec",
|
172
|
+
"abs_delta_mu_usec",
|
173
|
+
"to_start_sec",
|
174
|
+
"to_start_usec",
|
175
|
+
]
|
176
|
+
for name, value in zip(timeoffset_names, fcio.event.timeoffset):
|
177
|
+
tbl[name].nda[loc] = value
|
178
|
+
|
179
|
+
deadregion_names = [
|
180
|
+
"dr_start_pps",
|
181
|
+
"dr_start_ticks",
|
182
|
+
"dr_stop_pps",
|
183
|
+
"dr_stop_ticks",
|
184
|
+
"dr_maxticks",
|
185
|
+
]
|
186
|
+
for name, value in zip(deadregion_names, fcio.event.deadregion[:5]):
|
187
|
+
tbl[name].nda[loc] = value
|
188
|
+
|
189
|
+
# if event_type == 11: would provide the same check
|
173
190
|
# however, the usage of deadregion[5]/[6] must never change
|
174
191
|
# and it will always be present if deadregion[7..] is ever used
|
175
192
|
if fcio.event.deadregion_size >= 7:
|
@@ -179,7 +196,7 @@ class FCEventDecoder(DataDecoder):
|
|
179
196
|
tbl["dr_ch_idx"].nda[loc] = 0
|
180
197
|
tbl["dr_ch_len"].nda[loc] = fcio.config.adcs
|
181
198
|
|
182
|
-
# The following values are calculated
|
199
|
+
# The following values are calculated by fcio-py, derived from fields above.
|
183
200
|
tbl["timestamp"].nda[loc] = fcio.event.unix_time_utc_sec
|
184
201
|
tbl["deadinterval_nsec"].nda[loc] = fcio.event.dead_interval_nsec[ii]
|
185
202
|
tbl["deadtime"].nda[loc] = fcio.event.dead_time_sec[ii]
|
@@ -14,9 +14,12 @@ log = logging.getLogger(__name__)
|
|
14
14
|
|
15
15
|
def get_key(streamid: int, card_address: int, card_input: int, iwf: int = -1) -> int:
|
16
16
|
if streamid > 0 or iwf < 0:
|
17
|
-
# For backwards compatibility only the lower 16-bit of the streamid are
|
18
|
-
|
19
|
-
|
17
|
+
# For backwards compatibility only the lower 16-bit of the streamid are used.
|
18
|
+
return (
|
19
|
+
(int(streamid) & 0xFFFF) * 1000000
|
20
|
+
+ int(card_address) * 100
|
21
|
+
+ int(card_input)
|
22
|
+
)
|
20
23
|
else:
|
21
24
|
return iwf
|
22
25
|
|
daq2lh5/orca/orca_streamer.py
CHANGED
@@ -8,7 +8,7 @@ import logging
|
|
8
8
|
import numpy as np
|
9
9
|
|
10
10
|
from ..data_streamer import DataStreamer
|
11
|
-
from ..raw_buffer import RawBuffer, RawBufferLibrary
|
11
|
+
from ..raw_buffer import RawBuffer, RawBufferLibrary, RawBufferList
|
12
12
|
from . import orca_packet
|
13
13
|
from .orca_base import OrcaDecoder
|
14
14
|
from .orca_digitizers import ( # noqa: F401
|
@@ -351,6 +351,18 @@ class OrcaStreamer(DataStreamer):
|
|
351
351
|
if rb_lib is not None and "*" not in rb_lib:
|
352
352
|
keep_decoders = []
|
353
353
|
for name in decoder_names:
|
354
|
+
# Decoding ORFCIO streams requires decoding ORFCIOConfig packets,
|
355
|
+
# as it opens the wrapped fcio stream (in orca_fcio.py) and decodes the fields
|
356
|
+
# required for the other FCIO packets.
|
357
|
+
# With `out_stream == ''` the lgdo buffer will be allocated, and the packet
|
358
|
+
# decoded, but not written to the out_stream the user defined.
|
359
|
+
if name == "ORFCIOConfigDecoder" and name not in rb_lib:
|
360
|
+
rb_lib[name] = RawBufferList()
|
361
|
+
rb_lib[name].append(
|
362
|
+
RawBuffer(
|
363
|
+
lgdo=None, key_list=["*"], out_name="{key}", out_stream=""
|
364
|
+
)
|
365
|
+
)
|
354
366
|
if name in rb_lib:
|
355
367
|
keep_decoders.append(name)
|
356
368
|
decoder_names = keep_decoders
|
@@ -366,8 +378,7 @@ class OrcaStreamer(DataStreamer):
|
|
366
378
|
shift_data_id=False
|
367
379
|
)
|
368
380
|
instantiated_decoders = {"OrcaHeaderDecoder": self.header_decoder}
|
369
|
-
for data_id in id_to_dec_name_dict.
|
370
|
-
name = id_to_dec_name_dict[data_id]
|
381
|
+
for data_id, name in id_to_dec_name_dict.items():
|
371
382
|
if name not in instantiated_decoders:
|
372
383
|
if name not in globals():
|
373
384
|
self.missing_decoders.append(data_id)
|
@@ -384,8 +395,6 @@ class OrcaStreamer(DataStreamer):
|
|
384
395
|
chunk_mode=chunk_mode,
|
385
396
|
out_stream=out_stream,
|
386
397
|
)
|
387
|
-
if rb_lib is None:
|
388
|
-
rb_lib = self.rb_lib
|
389
398
|
good_buffers = []
|
390
399
|
for data_id in self.decoder_id_dict.keys():
|
391
400
|
name = id_to_dec_name_dict[data_id]
|
@@ -401,8 +410,8 @@ class OrcaStreamer(DataStreamer):
|
|
401
410
|
log.debug(f"rb_lib = {self.rb_lib}")
|
402
411
|
|
403
412
|
# return header raw buffer
|
404
|
-
if "OrcaHeaderDecoder" in rb_lib:
|
405
|
-
header_rb_list = rb_lib["OrcaHeaderDecoder"]
|
413
|
+
if "OrcaHeaderDecoder" in self.rb_lib:
|
414
|
+
header_rb_list = self.rb_lib["OrcaHeaderDecoder"]
|
406
415
|
if len(header_rb_list) != 1:
|
407
416
|
log.warning(
|
408
417
|
f"header_rb_list had length {len(header_rb_list)}, ignoring all but the first"
|
@@ -1,5 +1,5 @@
|
|
1
1
|
daq2lh5/__init__.py,sha256=VPmwKuZSA0icpce05ojhnsKWhR4_QUgD0oVXUoN9wks,975
|
2
|
-
daq2lh5/_version.py,sha256=
|
2
|
+
daq2lh5/_version.py,sha256=RrM0JkMSEquWJIGj8RCqoST__VpnXFzYf3k-uS1FCko,511
|
3
3
|
daq2lh5/build_raw.py,sha256=wehR1jADL_ponguOMfAsMTxsebE-qdztFw4Txm76fpw,10716
|
4
4
|
daq2lh5/cli.py,sha256=yCLshhXZdXpeaVElqLYwGHR1uN7fcPDRnsrFQgGdwYM,3210
|
5
5
|
daq2lh5/data_decoder.py,sha256=45ckhpfqKh6_FfPUn_iETZLjRFd4VtvQhet4l2KvldA,10606
|
@@ -12,13 +12,13 @@ daq2lh5/buffer_processor/buffer_processor.py,sha256=mG4kJXt8V9Jji_UTBfllRffIVqLP
|
|
12
12
|
daq2lh5/buffer_processor/lh5_buffer_processor.py,sha256=dKND-18TTPDZH8VxdEaQNZPK7w_G-dC4fsBns1NIjMY,8409
|
13
13
|
daq2lh5/compass/__init__.py,sha256=mOXHWp7kRDgNTPQty3E8k2KPSy_vAzjneKfAcCVaPyE,132
|
14
14
|
daq2lh5/compass/compass_config_parser.py,sha256=zeAsOo1dOJPGLL8-zkAcdYRkqt8BodtOPi96n7fWsl4,12300
|
15
|
-
daq2lh5/compass/compass_event_decoder.py,sha256=
|
16
|
-
daq2lh5/compass/compass_header_decoder.py,sha256=
|
17
|
-
daq2lh5/compass/compass_streamer.py,sha256=
|
15
|
+
daq2lh5/compass/compass_event_decoder.py,sha256=hNEz4q45vDxsC6JnChW55rCsYaYtGaxZ4gXYrIEK8Iw,7463
|
16
|
+
daq2lh5/compass/compass_header_decoder.py,sha256=e2uVnx6o36K-BzPTTpl2ot8r4rwZp-qLniTXee3q2aI,10251
|
17
|
+
daq2lh5/compass/compass_streamer.py,sha256=JkR5HvX9_Vl0RgiTS26A4Ae0EelmrpbgG-GihDhUJ3w,9186
|
18
18
|
daq2lh5/fc/__init__.py,sha256=bB1j6r-bDmylNi0iutQeAJGjsDSjLSoXMqFfXWwfb8I,141
|
19
19
|
daq2lh5/fc/fc_config_decoder.py,sha256=f-WqmvfDDDnx4ho2q4w5Hmzu55yvweHsi1hgM2X4RbM,4654
|
20
|
-
daq2lh5/fc/fc_event_decoder.py,sha256=
|
21
|
-
daq2lh5/fc/fc_eventheader_decoder.py,sha256=
|
20
|
+
daq2lh5/fc/fc_event_decoder.py,sha256=_g-bznhBHk8v-VSHYHJa5LlJYP69gWZ0LNXslYgEAlw,7897
|
21
|
+
daq2lh5/fc/fc_eventheader_decoder.py,sha256=m9XLpLl52Set1ITpCLp9_0L6wS6AaAN8uDUdDdqYG5Y,12390
|
22
22
|
daq2lh5/fc/fc_fsp_decoder.py,sha256=sCv3YfZ0QnN7LAS2msYo9XbMeEIFuKmyLp3SiB4fQ9M,35074
|
23
23
|
daq2lh5/fc/fc_status_decoder.py,sha256=o3DjpXtx3VIk3hWrodWopYczfXiJr-ekSUx_HqW2cyc,8818
|
24
24
|
daq2lh5/fc/fc_streamer.py,sha256=iJSd2OiOYAjFtBg-Yr9D64_kp3xO_9Jt5XpO9q0sFpE,9208
|
@@ -36,11 +36,11 @@ daq2lh5/orca/orca_header.py,sha256=2WlB8rFXwzD89lX51JXYiOlop0-C4pWEEsIWKq2ouDM,4
|
|
36
36
|
daq2lh5/orca/orca_header_decoder.py,sha256=ORIIyfx22ybyKc-uyWy5ER49-dl3BGpHdfV8OCDmjIw,1632
|
37
37
|
daq2lh5/orca/orca_packet.py,sha256=LtKEAcH2VGzY8AQEqAokZTHonShACsKisGdQR0AMDAM,2835
|
38
38
|
daq2lh5/orca/orca_run_decoder.py,sha256=61gghgjqD1ovH3KoHFJuKJp0GLJn4X0MIuoYrsIzMfQ,2059
|
39
|
-
daq2lh5/orca/orca_streamer.py,sha256=
|
39
|
+
daq2lh5/orca/orca_streamer.py,sha256=a9iy1FfbN1niX4gcJQpPCwiv1fKLLEZMcaGs2lOnfFw,17284
|
40
40
|
daq2lh5/orca/skim_orca_file.py,sha256=ESWfbv9yDRGaPf3Ptlw6Dxnc4uwhJoagb_aYlWNyrq0,1954
|
41
|
-
legend_daq2lh5-1.6.
|
42
|
-
legend_daq2lh5-1.6.
|
43
|
-
legend_daq2lh5-1.6.
|
44
|
-
legend_daq2lh5-1.6.
|
45
|
-
legend_daq2lh5-1.6.
|
46
|
-
legend_daq2lh5-1.6.
|
41
|
+
legend_daq2lh5-1.6.3.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
42
|
+
legend_daq2lh5-1.6.3.dist-info/METADATA,sha256=NaUXE8unZmqIsT_akldDSofULeVX3L6TsQV4b9XIQfc,4005
|
43
|
+
legend_daq2lh5-1.6.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
44
|
+
legend_daq2lh5-1.6.3.dist-info/entry_points.txt,sha256=RPm2GPw2YADil9tWgvZsIysmJz9KuktBWH_1P38PWoc,119
|
45
|
+
legend_daq2lh5-1.6.3.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
|
46
|
+
legend_daq2lh5-1.6.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|