legend-daq2lh5 1.4.0__py3-none-any.whl → 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
daq2lh5/_version.py CHANGED
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '1.4.0'
16
- __version_tuple__ = version_tuple = (1, 4, 0)
20
+ __version__ = version = '1.4.2'
21
+ __version_tuple__ = version_tuple = (1, 4, 2)
File without changes
@@ -17,10 +17,10 @@ llama_decoded_values_template = {
17
17
  # packet index in file
18
18
  "packet_id": {"dtype": "uint32"},
19
19
  # combined index of FADC and channel
20
- "fch_id": {"dtype": "uint32"},
20
+ "fadc_channel_id": {"dtype": "uint32"},
21
21
  # time since epoch
22
- "timestamp": {"dtype": "uint64", "units": "clock_ticks"},
23
- "status_flag": {"dtype": "uint32"},
22
+ "time_since_run_start": {"dtype": "float64", "units": "s"},
23
+ "fadc_status_bits": {"dtype": "uint32"},
24
24
  # waveform data --> not always present
25
25
  # "waveform": {
26
26
  # "dtype": "uint16",
@@ -69,8 +69,9 @@ class LLAMAEventDecoder(DataDecoder):
69
69
  ) # constant part of the t0 of averaged waveforms
70
70
 
71
71
  def set_channel_configs(self, channel_configs: LLAMA_Channel_Configs_t) -> None:
72
- """Receive channel configurations from llama_streamer after header was parsed
73
- Adapt self.decoded_values dict based on read configuration
72
+ """Receive channel configurations from llama_streamer after header was parsed.
73
+
74
+ Adapt self.decoded_values dict based on read configuration.
74
75
  """
75
76
  self.channel_configs = channel_configs
76
77
  for fch, config in self.channel_configs.items():
@@ -109,10 +110,11 @@ class LLAMAEventDecoder(DataDecoder):
109
110
  self.__add_energy(self.decoded_values[fch])
110
111
 
111
112
  def get_key_lists(self) -> list[list[int | str]]:
112
- """
113
- Return a list of lists of keys available for this decoder.
114
- Each inner list are the fch_id's which share the exact same settings (trace lengths, avg mode, ...),
115
- so they can end up in the same buffer.
113
+ """Get list of keys.
114
+
115
+ Return a list of lists of keys available for this decoder. Each inner
116
+ list are the fadc_channel_id's which share the exact same settings
117
+ (trace lengths, avg mode, ...), so they can end up in the same buffer.
116
118
  """
117
119
  if self.channel_configs is None:
118
120
  raise RuntimeError(
@@ -125,14 +127,14 @@ class LLAMAEventDecoder(DataDecoder):
125
127
  return check_dict_spec_equal(c1, c2, params_for_equality)
126
128
 
127
129
  kll: list[list[int]] = [] # key-list-list
128
- for fch_id, config in self.channel_configs.items():
130
+ for fadc_channel_id, config in self.channel_configs.items():
129
131
  for kl in kll:
130
132
  # use 1st entry of a list of list as "archetype"
131
133
  if check_equal(config, self.channel_configs[kl[0]]):
132
- kl.append(fch_id)
134
+ kl.append(fadc_channel_id)
133
135
  break
134
136
  else:
135
- kll.append([fch_id])
137
+ kll.append([fadc_channel_id])
136
138
  log.debug(f"key lists are: {repr(kll)}")
137
139
  return kll
138
140
 
@@ -154,66 +156,78 @@ class LLAMAEventDecoder(DataDecoder):
154
156
  packet: bytes,
155
157
  evt_rbkd: lgdo.Table | dict[int, lgdo.Table],
156
158
  packet_id: int,
157
- fch_id: int,
159
+ fadc_channel_id: int,
158
160
  # header: lgdo.Table | dict[int, lgdo.Table]
159
161
  ) -> bool:
160
- """
161
- Decodes a single packet, which is a single SIS3316 event, as specified in the Struck manual.
162
- A single packet corresponds to a single event and channel, and has a unique timestamp.
163
- packets of different channel groups can vary in size!
162
+ """Decode packet.
163
+
164
+ Decodes a single packet, which is a single SIS3316 event, as specified
165
+ in the Struck manual. A single packet corresponds to a single event
166
+ and channel, and has a unique timestamp. packets of different channel
167
+ groups can vary in size!
164
168
  """
165
169
 
166
- # Check if this fch_id should be recorded.
167
- if fch_id not in evt_rbkd:
168
- if fch_id not in self.skipped_channels:
169
- self.skipped_channels[fch_id] = 0
170
- log.info(f"Skipping channel: {fch_id}")
170
+ # Check if this fadc_channel_id should be recorded.
171
+ if fadc_channel_id not in evt_rbkd:
172
+ if fadc_channel_id not in self.skipped_channels:
173
+ self.skipped_channels[fadc_channel_id] = 0
174
+ log.info(f"Skipping channel: {fadc_channel_id}")
171
175
  log.debug(f"evt_rbkd: {evt_rbkd.keys()}")
172
- self.skipped_channels[fch_id] += 1
176
+ self.skipped_channels[fadc_channel_id] += 1
173
177
  return False
174
178
 
175
- tbl = evt_rbkd[fch_id].lgdo
176
- ii = evt_rbkd[fch_id].loc
179
+ tbl = evt_rbkd[fadc_channel_id].lgdo
180
+ ii = evt_rbkd[fadc_channel_id].loc
177
181
 
178
182
  # parse the raw event data into numpy arrays of 16 and 32 bit ints
179
183
  evt_data_32 = np.frombuffer(packet, dtype=np.uint32)
180
184
  evt_data_16 = np.frombuffer(packet, dtype=np.uint16)
181
185
 
182
186
  # e sti gran binaries non ce li metti
183
- # fch_id = (evt_data_32[0] >> 4) & 0x00000fff --> to be read earlier, since we need size for chopping out the event from the stream
184
- timestamp = ((evt_data_32[0] & 0xFFFF0000) << 16) + evt_data_32[1]
187
+ # fadc_channel_id = (evt_data_32[0] >> 4) & 0x00000fff --> to be read earlier,
188
+ # since we need size for chopping out the event from the stream
189
+ timestamp_in_clock_ticks = ((evt_data_32[0] & 0xFFFF0000) << 16) + evt_data_32[
190
+ 1
191
+ ]
185
192
  format_bits = (evt_data_32[0]) & 0x0000000F
186
- tbl["fch_id"].nda[ii] = fch_id
193
+
194
+ timestamp = timestamp_in_clock_ticks * self.dt_raw[fadc_channel_id] * 1e-9
195
+
196
+ tbl["fadc_channel_id"].nda[ii] = fadc_channel_id
187
197
  tbl["packet_id"].nda[ii] = packet_id
188
- tbl["timestamp"].nda[ii] = timestamp
198
+ tbl["time_since_run_start"].nda[ii] = timestamp
199
+
189
200
  offset = 2
190
201
  if format_bits & 0x1:
191
- tbl["peakHighValue"].nda[ii] = evt_data_16[4]
192
- tbl["peakHighIndex"].nda[ii] = evt_data_16[5]
193
- tbl["information"].nda[ii] = (evt_data_32[offset + 1] >> 24) & 0xFF
194
- tbl["accSum1"].nda[ii] = evt_data_32[offset + 2]
195
- tbl["accSum2"].nda[ii] = evt_data_32[offset + 3]
196
- tbl["accSum3"].nda[ii] = evt_data_32[offset + 4]
197
- tbl["accSum4"].nda[ii] = evt_data_32[offset + 5]
198
- tbl["accSum5"].nda[ii] = evt_data_32[offset + 6]
199
- tbl["accSum6"].nda[ii] = evt_data_32[offset + 7]
202
+ tbl["wf_max_sample_value"].nda[ii] = evt_data_16[4]
203
+ tbl["wf_max_sample_idx"].nda[ii] = evt_data_16[5]
204
+ tbl["info_bits"].nda[ii] = (evt_data_32[offset + 1] >> 24) & 0xFF
205
+ tbl["cumsum_1"].nda[ii] = evt_data_32[offset + 2]
206
+ tbl["cumsum_2"].nda[ii] = evt_data_32[offset + 3]
207
+ tbl["cumsum_3"].nda[ii] = evt_data_32[offset + 4]
208
+ tbl["cumsum_4"].nda[ii] = evt_data_32[offset + 5]
209
+ tbl["cumsum_5"].nda[ii] = evt_data_32[offset + 6]
210
+ tbl["cumsum_6"].nda[ii] = evt_data_32[offset + 7]
200
211
  offset += 7
212
+
201
213
  if format_bits & 0x2:
202
- tbl["accSum7"].nda[ii] = evt_data_32[offset + 0]
203
- tbl["accSum8"].nda[ii] = evt_data_32[offset + 1]
214
+ tbl["cumsum_7"].nda[ii] = evt_data_32[offset + 0]
215
+ tbl["cumsum_8"].nda[ii] = evt_data_32[offset + 1]
204
216
  offset += 2
217
+
205
218
  if format_bits & 0x4:
206
- tbl["mawMax"].nda[ii] = evt_data_32[offset + 0]
207
- tbl["mawBefore"].nda[ii] = evt_data_32[offset + 1]
208
- tbl["mawAfter"].nda[ii] = evt_data_32[offset + 2]
219
+ tbl["maw_max"].nda[ii] = evt_data_32[offset + 0]
220
+ tbl["maw_before"].nda[ii] = evt_data_32[offset + 1]
221
+ tbl["maw_after"].nda[ii] = evt_data_32[offset + 2]
209
222
  offset += 3
223
+
210
224
  if format_bits & 0x8:
211
- tbl["startEnergy"].nda[ii] = evt_data_32[offset + 0]
212
- tbl["maxEnergy"].nda[ii] = evt_data_32[offset + 1]
225
+ tbl["start_energy"].nda[ii] = evt_data_32[offset + 0]
226
+ tbl["max_energy"].nda[ii] = evt_data_32[offset + 1]
213
227
  offset += 2
214
228
 
215
229
  raw_length_32 = (evt_data_32[offset + 0]) & 0x03FFFFFF
216
- tbl["status_flag"].nda[ii] = (
230
+ tbl["fadc_status_bits"].nda[ii] = (
217
231
  (evt_data_32[offset + 0]) & 0x04000000
218
232
  ) >> 26 # bit 26
219
233
  maw_test_flag = ((evt_data_32[offset + 0]) & 0x08000000) >> 27 # bit 27
@@ -249,35 +263,36 @@ class LLAMAEventDecoder(DataDecoder):
249
263
  # error check: waveform size must match expectations
250
264
  if raw_length_16 + avg_length_16 != expected_wf_length:
251
265
  raise RuntimeError(
252
- f"Waveform sizes {raw_length_16} (raw) and {avg_length_16} (avg) doesn't match expected size {expected_wf_length}."
266
+ f"Waveform sizes {raw_length_16} (raw) and {avg_length_16} (avg) "
267
+ f"doesn't match expected size {expected_wf_length}."
253
268
  )
254
269
 
255
270
  # store waveform if available:
256
271
  if raw_length_16 > 0:
257
- tbl["waveform"]["values"].nda[ii] = evt_data_16[
272
+ tbl["waveform_windowed"]["values"].nda[ii] = evt_data_16[
258
273
  offset * 2 : offset * 2 + raw_length_16
259
274
  ]
260
275
  offset += raw_length_32
261
- tbl["waveform"]["t0"].nda[ii] = self.t0_raw[fch_id]
276
+ tbl["waveform_windowed"]["t0"].nda[ii] = self.t0_raw[fadc_channel_id]
262
277
 
263
278
  # store pre-averaged (avg) waveform if available:
264
279
  if avg_length_16 > 0:
265
- tbl["avgwaveform"]["values"].nda[ii] = evt_data_16[
280
+ tbl["waveform_presummed"]["values"].nda[ii] = evt_data_16[
266
281
  offset * 2 : offset * 2 + avg_length_16
267
282
  ]
268
283
  offset += avg_length_32
269
284
  # need to update avg waveform t0 based on the offset I get per event
270
- tbl["avgwaveform"]["t0"].nda[ii] = (
271
- self.t0_avg_const[fch_id]
272
- + float(avg_count_status) * self.dt_raw[fch_id]
285
+ tbl["waveform_presummed"]["t0"].nda[ii] = (
286
+ self.t0_avg_const[fadc_channel_id]
287
+ + float(avg_count_status) * self.dt_raw[fadc_channel_id]
273
288
  )
274
289
 
275
290
  if offset != len(evt_data_32):
276
291
  raise RuntimeError("I messed up...")
277
292
 
278
- evt_rbkd[fch_id].loc += 1
293
+ evt_rbkd[fadc_channel_id].loc += 1
279
294
 
280
- return evt_rbkd[fch_id].is_full()
295
+ return evt_rbkd[fadc_channel_id].is_full()
281
296
 
282
297
  def __add_waveform(
283
298
  self,
@@ -286,13 +301,17 @@ class LLAMAEventDecoder(DataDecoder):
286
301
  max_samples: int,
287
302
  dt: float,
288
303
  ) -> None:
289
- """
290
- Averaged samples are available from the 125 MHz (16 bit) variatnt of the SIS3316 and can be stored independently of raw samples.
291
- I use waveform for raw samples (dt from clock itself) and avgwaveform from averaged samples (dt from clock * avg number).
304
+ """Helper function to add waveform.
305
+
306
+ Averaged samples are available from the 125 MHz (16 bit) variatnt of
307
+ the SIS3316 and can be stored independently of raw samples. I use
308
+ waveform for raw samples (dt from clock itself) and waveform_presummed
309
+ from averaged samples (dt from clock * avg number).
292
310
 
293
- GERDA used to have the low-frequency (waveform) & the high-frequency (aux waveform); here: LF = avgwaveform & HF = waveform.
311
+ GERDA used to have the low-frequency (waveform) & the high-frequency
312
+ (aux waveform); here: LF = waveform_presummed & HF = waveform_windowed.
294
313
  """
295
- name: str = "avgwaveform" if is_avg else "waveform"
314
+ name: str = "waveform_presummed" if is_avg else "waveform_windowed"
296
315
  decoded_values_fch[name] = {
297
316
  "dtype": "uint16",
298
317
  "datatype": "waveform",
@@ -304,25 +323,25 @@ class LLAMAEventDecoder(DataDecoder):
304
323
  }
305
324
 
306
325
  def __add_accum1till6(self, decoded_values_fch: dict[str, Any]) -> None:
307
- decoded_values_fch["peakHighValue"] = {"dtype": "uint32", "units": "adc"}
308
- decoded_values_fch["peakHighIndex"] = {"dtype": "uint32", "units": "adc"}
309
- decoded_values_fch["information"] = {"dtype": "uint32"}
310
- decoded_values_fch["accSum1"] = {"dtype": "uint32", "units": "adc"}
311
- decoded_values_fch["accSum2"] = {"dtype": "uint32", "units": "adc"}
312
- decoded_values_fch["accSum3"] = {"dtype": "uint32", "units": "adc"}
313
- decoded_values_fch["accSum4"] = {"dtype": "uint32", "units": "adc"}
314
- decoded_values_fch["accSum5"] = {"dtype": "uint32", "units": "adc"}
315
- decoded_values_fch["accSum6"] = {"dtype": "uint32", "units": "adc"}
326
+ decoded_values_fch["wf_max_sample_value"] = {"dtype": "uint32", "units": "adc"}
327
+ decoded_values_fch["wf_max_sample_idx"] = {"dtype": "uint32", "units": "adc"}
328
+ decoded_values_fch["info_bits"] = {"dtype": "uint32"}
329
+ decoded_values_fch["cumsum_1"] = {"dtype": "uint32", "units": "adc"}
330
+ decoded_values_fch["cumsum_2"] = {"dtype": "uint32", "units": "adc"}
331
+ decoded_values_fch["cumsum_3"] = {"dtype": "uint32", "units": "adc"}
332
+ decoded_values_fch["cumsum_4"] = {"dtype": "uint32", "units": "adc"}
333
+ decoded_values_fch["cumsum_5"] = {"dtype": "uint32", "units": "adc"}
334
+ decoded_values_fch["cumsum_6"] = {"dtype": "uint32", "units": "adc"}
316
335
 
317
336
  def __add_accum7and8(self, decoded_values_fch: dict[str, Any]) -> None:
318
- decoded_values_fch["accSum7"] = {"dtype": "uint32", "units": "adc"}
319
- decoded_values_fch["accSum8"] = {"dtype": "uint32", "units": "adc"}
337
+ decoded_values_fch["cumsum_7"] = {"dtype": "uint32", "units": "adc"}
338
+ decoded_values_fch["cumsum_8"] = {"dtype": "uint32", "units": "adc"}
320
339
 
321
340
  def __add_maw(self, decoded_values_fch: dict[str, Any]) -> None:
322
- decoded_values_fch["mawMax"] = {"dtype": "uint32", "units": "adc"}
323
- decoded_values_fch["mawBefore"] = {"dtype": "uint32", "units": "adc"}
324
- decoded_values_fch["mawAfter"] = {"dtype": "uint32", "units": "adc"}
341
+ decoded_values_fch["maw_max"] = {"dtype": "uint32", "units": "adc"}
342
+ decoded_values_fch["maw_before"] = {"dtype": "uint32", "units": "adc"}
343
+ decoded_values_fch["maw_after"] = {"dtype": "uint32", "units": "adc"}
325
344
 
326
345
  def __add_energy(self, decoded_values_fch: dict[str, Any]) -> None:
327
- decoded_values_fch["startEnergy"] = {"dtype": "uint32", "units": "adc"}
328
- decoded_values_fch["maxEnergy"] = {"dtype": "uint32", "units": "adc"}
346
+ decoded_values_fch["start_energy"] = {"dtype": "uint32", "units": "adc"}
347
+ decoded_values_fch["max_energy"] = {"dtype": "uint32", "units": "adc"}
@@ -16,8 +16,10 @@ LLAMA_Channel_Configs_t = Dict[int, Dict[str, Any]]
16
16
 
17
17
 
18
18
  class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
19
- """
20
- Decode llamaDAQ header data. Includes the file header as well as all available ("open") channel configurations.
19
+ """Decode llamaDAQ header data.
20
+
21
+ Includes the file header as well as all available ("open") channel
22
+ configurations.
21
23
  """
22
24
 
23
25
  @staticmethod
@@ -62,7 +64,7 @@ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
62
64
 
63
65
  n_bytes_read += self.__decode_channel_configs(f_in)
64
66
 
65
- # print(self.channel_configs[0]["MAW3_offset"])
67
+ # print(self.channel_configs[0]["maw3_offset"])
66
68
 
67
69
  # assemble LGDO struct:
68
70
  self.config.add_field("version_major", lgdo.Scalar(self.version_major))
@@ -87,14 +89,15 @@ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
87
89
  return self.channel_configs
88
90
 
89
91
  def __decode_channel_configs(self, f_in: io.BufferedReader) -> int:
90
- """
91
- Reads the metadata from the beginning of the file (the "channel configuration" part, directly after the file header).
92
- Creates a dictionary of the metadata for each FADC/channel combination, which is returned
92
+ """Reads the metadata.
93
93
 
94
- FADC-ID and channel-ID are combined into a single id for flattening:
95
- (fadcid << 4) + chid
94
+ Reads the metadata from the beginning of the file (the "channel
95
+ configuration" part, directly after the file header). Creates a
96
+ dictionary of the metadata for each FADC/channel combination, which is
97
+ returned. Returns number of bytes read.
96
98
 
97
- returns number of bytes read
99
+ FADC-ID and channel-ID are combined into a single id for flattening:
100
+ ``(fadcid << 4) + chid``.
98
101
  """
99
102
  # f_in.seek(16) #should be after file header anyhow, but re-set if not
100
103
  n_bytes_read = 0
@@ -118,15 +121,16 @@ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
118
121
 
119
122
  if fch_id in self.channel_configs:
120
123
  raise RuntimeError(
121
- f"duplicate channel configuration in file: FADCID: {fadc_index}, ChannelID: {channel_index}"
124
+ f"duplicate channel configuration in file: FADCID: "
125
+ f"{fadc_index}, ChannelID: {channel_index}"
122
126
  )
123
127
  else:
124
128
  self.channel_configs[fch_id] = {}
125
129
 
126
- self.channel_configs[fch_id]["14BitFlag"] = evt_data_32[2] & 0x00000001
130
+ self.channel_configs[fch_id]["14_bit_flag"] = evt_data_32[2] & 0x00000001
127
131
  if evt_data_32[2] & 0x00000002 == 0:
128
132
  log.warning("Channel in configuration marked as non-open!")
129
- self.channel_configs[fch_id]["ADC_offset"] = evt_data_32[3]
133
+ self.channel_configs[fch_id]["adc_offset"] = evt_data_32[3]
130
134
  self.channel_configs[fch_id]["sample_freq"] = evt_data_dpf[
131
135
  0
132
136
  ] # 64 bit float
@@ -138,12 +142,12 @@ class LLAMAHeaderDecoder(DataDecoder): # DataDecoder currently unused
138
142
  self.channel_configs[fch_id]["avg_mode"] = evt_data_32[12]
139
143
  self.channel_configs[fch_id]["sample_length"] = evt_data_32[13]
140
144
  self.channel_configs[fch_id]["avg_sample_length"] = evt_data_32[14]
141
- self.channel_configs[fch_id]["MAW_buffer_length"] = evt_data_32[15]
145
+ self.channel_configs[fch_id]["maw_buffer_length"] = evt_data_32[15]
142
146
  self.channel_configs[fch_id]["event_length"] = evt_data_32[16]
143
147
  self.channel_configs[fch_id]["event_header_length"] = evt_data_32[17]
144
148
  self.channel_configs[fch_id]["accum6_offset"] = evt_data_32[18]
145
149
  self.channel_configs[fch_id]["accum2_offset"] = evt_data_32[19]
146
- self.channel_configs[fch_id]["MAW3_offset"] = evt_data_32[20]
150
+ self.channel_configs[fch_id]["maw3_offset"] = evt_data_32[20]
147
151
  self.channel_configs[fch_id]["energy_offset"] = evt_data_32[21]
148
152
 
149
153
  return n_bytes_read
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: legend_daq2lh5
3
- Version: 1.4.0
3
+ Version: 1.4.2
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -1,5 +1,5 @@
1
1
  daq2lh5/__init__.py,sha256=VPmwKuZSA0icpce05ojhnsKWhR4_QUgD0oVXUoN9wks,975
2
- daq2lh5/_version.py,sha256=R8-T9fmURjcuoxYpHTAjyNAhgJPDtI2jogCjqYYkfCU,411
2
+ daq2lh5/_version.py,sha256=Ls_J-pNiuTKX1KU6pZ6tPQXAMcmLAzW_HHHgGVQNEd0,511
3
3
  daq2lh5/build_raw.py,sha256=SDpdOU8qpfzMtx8gtFu-RZYqxutQo1smJSkv-LrH9YE,10672
4
4
  daq2lh5/cli.py,sha256=7bPfH1XbyAS48wZn_0unj4Y5MD5kF7V34Q5srn4jKVM,2913
5
5
  daq2lh5/data_decoder.py,sha256=Cn40fodfKs7pKa2odzG1j806iw9IyQVfbbWObNGmof8,10677
@@ -20,9 +20,10 @@ daq2lh5/fc/fc_config_decoder.py,sha256=RLRfUOZN0vYbAprqTymP7TGg641IiP9rgCGIOwWVK
20
20
  daq2lh5/fc/fc_event_decoder.py,sha256=JIRsySnxeuY3wmxjJOrTXo6wpelVup8WIvxU-fkPL-A,8131
21
21
  daq2lh5/fc/fc_status_decoder.py,sha256=o_3vTAgYXelZxIsreCYioVYid2mY-wqloYKlxoCqX5Q,3390
22
22
  daq2lh5/fc/fc_streamer.py,sha256=S0imXdVsiyolPvxI1uiBngpC58DporSNZPqx1HeVi5o,5737
23
+ daq2lh5/llama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
24
  daq2lh5/llama/llama_base.py,sha256=B-NCBjE_FQE1WxijWi4Z1XBy4rqLHm2XhkC40s7Sdms,290
24
- daq2lh5/llama/llama_event_decoder.py,sha256=NGzUA1DLChcfnEUNqMAPWq4uqXwUd-FH2e-xXcj2lzM,13894
25
- daq2lh5/llama/llama_header_decoder.py,sha256=NB7wVH2r99hveQ2KJ-9YMkJMZ6ccNfFsjYa5HbuThAU,6114
25
+ daq2lh5/llama/llama_event_decoder.py,sha256=D7MRPmE-ucNCjeLJzY3RepmijjkvGHzu3vNV0zkcLho,14489
26
+ daq2lh5/llama/llama_header_decoder.py,sha256=xi_BMw4HLQc-PQ5wIZWSHSsBZvrCpS9_G03X7Mr6tGA,6175
26
27
  daq2lh5/llama/llama_streamer.py,sha256=Bmcj5Bs28KSV4y08TeJcntzUAkqz6HqlSNm7Kffgloc,5203
27
28
  daq2lh5/orca/__init__.py,sha256=Xf6uOIOzk_QkKH_7VizGlCo3iuiAgLtUE3A07x_HXC0,175
28
29
  daq2lh5/orca/orca_base.py,sha256=-XIolXsHj-1EdewaGxyvJTZvRGZsDyZe-5PzVOd-LFY,1333
@@ -33,9 +34,9 @@ daq2lh5/orca/orca_header_decoder.py,sha256=ORIIyfx22ybyKc-uyWy5ER49-dl3BGpHdfV8O
33
34
  daq2lh5/orca/orca_packet.py,sha256=nOHuBXsTI1SzTjHZtff0txSQYvkwo4XGx3fpk7XfYj8,2489
34
35
  daq2lh5/orca/orca_run_decoder.py,sha256=3atKXC6mDi8_PK6ICUBBJ-LyaTM8OU31kKWIpmttRr4,2065
35
36
  daq2lh5/orca/orca_streamer.py,sha256=VbD9PF-rx_Rk-rEy7XECPmgxr6kZSUf0tC7Qbol3Qeg,15693
36
- legend_daq2lh5-1.4.0.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
37
- legend_daq2lh5-1.4.0.dist-info/METADATA,sha256=SKEW8u7wRrCK5zTY01iMGDYGPmrmg1ED5g9fsbwLVVA,3956
38
- legend_daq2lh5-1.4.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
39
- legend_daq2lh5-1.4.0.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
40
- legend_daq2lh5-1.4.0.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
41
- legend_daq2lh5-1.4.0.dist-info/RECORD,,
37
+ legend_daq2lh5-1.4.2.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
38
+ legend_daq2lh5-1.4.2.dist-info/METADATA,sha256=a7rqLL0JYeSoxoD9lopkQN55eyrSC0Ten8M__syVX9A,3956
39
+ legend_daq2lh5-1.4.2.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
40
+ legend_daq2lh5-1.4.2.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
41
+ legend_daq2lh5-1.4.2.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
42
+ legend_daq2lh5-1.4.2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5