legend-daq2lh5 1.1.0__py3-none-any.whl → 1.2.0a1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
daq2lh5/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.1.0'
16
- __version_tuple__ = version_tuple = (1, 1, 0)
15
+ __version__ = version = '1.2.0a1'
16
+ __version_tuple__ = version_tuple = (1, 2, 0)
@@ -51,14 +51,14 @@ def buffer_processor(rb: RawBuffer) -> Table:
51
51
  ``"compression": {"lgdo": "codec_name" [, ...]}`` `(dict)`
52
52
  Updates the `compression` attribute of `lgdo` to `codec_name`. The
53
53
  attribute sets the compression algorithm applied by
54
- :func:`~.lgdo.lh5_store.LH5Store.read_object` before writing `lgdo` to
54
+ :func:`~lgdo.lh5.store.LH5Store.read` before writing `lgdo` to
55
55
  disk. Can be used to apply custom waveform compression algorithms from
56
56
  :mod:`lgdo.compression`.
57
57
 
58
58
  ``"hdf5_settings": {"lgdo": { <HDF5 settings> }}`` `(dict)`
59
59
  Updates the `hdf5_settings` attribute of `lgdo`. The attribute sets the
60
60
  HDF5 dataset options applied by
61
- :func:`~.lgdo.lh5_store.LH5Store.read_object` before writing `lgdo` to
61
+ :func:`~lgdo.lh5.store.LH5Store.read` before writing `lgdo` to
62
62
  disk.
63
63
 
64
64
  Parameters
@@ -295,7 +295,7 @@ def process_windowed_t0(t0s: Array, dts: Array, start_index: int) -> Array:
295
295
 
296
296
 
297
297
  def process_dsp(rb: RawBuffer, tmp_table: Table) -> None:
298
- r"""Run a DSP processing chain.
298
+ r"""Run a DSP processing chain with :mod:`dspeed`.
299
299
 
300
300
  Run a provided DSP config from `rb.proc_spec` using
301
301
  :func:`.dsp.build_processing_chain`, and add specified outputs to the
@@ -6,7 +6,7 @@ import os
6
6
 
7
7
  import h5py
8
8
  import lgdo
9
- from lgdo import LH5Store
9
+ from lgdo import lh5
10
10
 
11
11
  from ..buffer_processor.buffer_processor import buffer_processor
12
12
  from ..raw_buffer import RawBuffer, RawBufferLibrary
@@ -54,14 +54,14 @@ def lh5_buffer_processor(
54
54
  """
55
55
 
56
56
  # Initialize the input raw file
57
- raw_store = LH5Store()
57
+ raw_store = lh5.LH5Store()
58
58
  lh5_file = raw_store.gimme_file(lh5_raw_file_in, "r")
59
59
  if lh5_file is None:
60
60
  raise ValueError(f"input file not found: {lh5_raw_file_in}")
61
61
  return
62
62
 
63
63
  # List the groups in the raw file
64
- lh5_groups = lgdo.ls(lh5_raw_file_in)
64
+ lh5_groups = lh5.ls(lh5_raw_file_in)
65
65
  lh5_tables = []
66
66
 
67
67
  # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw
@@ -69,21 +69,19 @@ def lh5_buffer_processor(
69
69
  # Make sure that the upper level key isn't a dataset
70
70
  if isinstance(lh5_file[tb], h5py.Dataset):
71
71
  lh5_tables.append(f"{tb}")
72
- elif "raw" not in tb and lgdo.ls(lh5_file, f"{tb}/raw"):
72
+ elif "raw" not in tb and lh5.ls(lh5_file, f"{tb}/raw"):
73
73
  lh5_tables.append(f"{tb}/raw")
74
74
  # Look one layer deeper for a :meth:`lgdo.Table` if necessary
75
- elif lgdo.ls(lh5_file, f"{tb}"):
75
+ elif lh5.ls(lh5_file, f"{tb}"):
76
76
  # Check to make sure that this isn't a table itself
77
- maybe_table, _ = raw_store.read_object(f"{tb}", lh5_file)
77
+ maybe_table, _ = raw_store.read(f"{tb}", lh5_file)
78
78
  if isinstance(maybe_table, lgdo.Table):
79
79
  lh5_tables.append(f"{tb}")
80
80
  del maybe_table
81
81
  # otherwise, go deeper
82
82
  else:
83
- for sub_table in lgdo.ls(lh5_file, f"{tb}"):
84
- maybe_table, _ = raw_store.read_object(
85
- f"{tb}/{sub_table}", lh5_file
86
- )
83
+ for sub_table in lh5.ls(lh5_file, f"{tb}"):
84
+ maybe_table, _ = raw_store.read(f"{tb}/{sub_table}", lh5_file)
87
85
  if isinstance(maybe_table, lgdo.Table):
88
86
  lh5_tables.append(f"{tb}/{sub_table}")
89
87
  del maybe_table
@@ -114,7 +112,7 @@ def lh5_buffer_processor(
114
112
 
115
113
  # Write everything in the raw file to the new file, check for proc_spec under either the group name, out_name, or the name
116
114
  for tb in lh5_tables:
117
- lgdo_obj, _ = raw_store.read_object(f"{tb}", lh5_file)
115
+ lgdo_obj, _ = raw_store.read(f"{tb}", lh5_file)
118
116
 
119
117
  # Find the out_name.
120
118
  # If the top level group has an lgdo table in it, then the out_name is group
@@ -198,6 +196,4 @@ def lh5_buffer_processor(
198
196
  pass
199
197
 
200
198
  # Write the (possibly processed) lgdo_obj to a file
201
- raw_store.write_object(
202
- lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name
203
- )
199
+ raw_store.write(lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name)
daq2lh5/build_raw.py CHANGED
@@ -6,8 +6,8 @@ import logging
6
6
  import os
7
7
  import time
8
8
 
9
- import lgdo
10
9
  import numpy as np
10
+ from lgdo import lh5
11
11
  from tqdm.auto import tqdm
12
12
 
13
13
  from .compass.compass_streamer import CompassStreamer
@@ -77,7 +77,7 @@ def build_raw(
77
77
 
78
78
  hdf5_settings
79
79
  keyword arguments (as a dict) forwarded to
80
- :meth:`~.lgdo.lh5_store.LH5Store.write_object`.
80
+ :meth:`lgdo.lh5.store.LH5Store.write`.
81
81
 
82
82
  **kwargs
83
83
  sent to :class:`.RawBufferLibrary` generation as `kw_dict` argument.
@@ -224,7 +224,7 @@ def build_raw(
224
224
  os.remove(out_file_glob[0])
225
225
 
226
226
  # Write header data
227
- lh5_store = lgdo.LH5Store(keep_open=True)
227
+ lh5_store = lh5.LH5Store(keep_open=True)
228
228
  write_to_lh5_and_clear(header_data, lh5_store, **hdf5_settings)
229
229
 
230
230
  # Now loop through the data
daq2lh5/data_decoder.py CHANGED
@@ -3,13 +3,10 @@ Base classes for decoding data into raw LGDO Tables or files
3
3
  """
4
4
  from __future__ import annotations
5
5
 
6
- from typing import Union
7
-
8
6
  import lgdo
9
7
  import numpy as np
10
- from lgdo import LH5Store
11
-
12
- LGDO = Union[lgdo.Scalar, lgdo.Struct, lgdo.Array, lgdo.VectorOfVectors]
8
+ from lgdo import LGDO
9
+ from lgdo.lh5 import LH5Store
13
10
 
14
11
 
15
12
  class DataDecoder:
@@ -18,15 +15,16 @@ class DataDecoder:
18
15
  Most decoders will repeatedly decode the same set of values from each
19
16
  packet. The values that get decoded need to be described by a dict stored
20
17
  in `self.decoded_values` that helps determine how to set up the buffers and
21
- write them to file as :class:`~.lgdo.LGDO`\ s. :class:`~.lgdo.table.Table`\ s
22
- are made whose columns correspond to the elements of `decoded_values`, and
23
- packet data gets pushed to the end of the table one row at a time.
18
+ write them to file as :class:`~lgdo.types.lgdo.LGDO`\ s.
19
+ :class:`~lgdo.types.table.Table`\ s are made whose columns correspond to
20
+ the elements of `decoded_values`, and packet data gets pushed to the end of
21
+ the table one row at a time.
24
22
 
25
23
  Any key-value entry in a configuration dictionary attached to an element
26
24
  of `decoded_values` is typically interpreted as an attribute to be attached
27
25
  to the corresponding LGDO. This feature can be for example exploited to
28
26
  specify HDF5 dataset settings used by
29
- :meth:`~.lgdo.lh5_store.LH5Store.write_object` to write LGDOs to disk.
27
+ :meth:`~lgdo.lh5.store.LH5Store.write` to write LGDOs to disk.
30
28
 
31
29
  For example ::
32
30
 
@@ -49,7 +47,7 @@ class DataDecoder:
49
47
  will have its `compression` attribute set to
50
48
  ``RadwareSigcompress(codec_shift=-32768)``. Before being written to disk,
51
49
  they will be compressed with the HDF5 built-in Gzip filter and with the
52
- :class:`~.lgdo.compression.radware.RadwareSigcompress` waveform compressor.
50
+ :class:`~lgdo.compression.radware.RadwareSigcompress` waveform compressor.
53
51
 
54
52
  Examples
55
53
  --------
@@ -119,7 +117,7 @@ class DataDecoder:
119
117
  """Make an LGDO for this :class:`DataDecoder` to fill.
120
118
 
121
119
  This default version of this function allocates a
122
- :class:`~.lgdo.table.Table` using the `decoded_values` for key. If a
120
+ :class:`~lgdo.types.table.Table` using the `decoded_values` for key. If a
123
121
  different type of LGDO object is required for this decoder, overload
124
122
  this function.
125
123
 
@@ -207,7 +205,7 @@ class DataDecoder:
207
205
  continue
208
206
 
209
207
  # Parse datatype for remaining lgdos
210
- datatype, shape, elements = lgdo.lgdo_utils.parse_datatype(datatype)
208
+ datatype, shape, elements = lgdo.lh5.utils.parse_datatype(datatype)
211
209
 
212
210
  # ArrayOfEqualSizedArrays
213
211
  if datatype == "array_of_equalsized_arrays":
@@ -258,7 +256,7 @@ class DataDecoder:
258
256
  n_rows = self.garbage_table.loc
259
257
  if n_rows == 0:
260
258
  return
261
- lh5_store.write_object(
259
+ lh5_store.write(
262
260
  self.garbage_table, "garbage", filename, group, n_rows=n_rows, append=True
263
261
  )
264
262
  self.garbage_table.clear()
@@ -28,7 +28,7 @@ class FCConfigDecoder(DataDecoder):
28
28
  >>> decoder = FCConfigDecoder()
29
29
  >>> config = decoder.decode_config(fc)
30
30
  >>> type(config)
31
- lgdo.struct.Struct
31
+ lgdo.types.struct.Struct
32
32
  """
33
33
 
34
34
  def __init__(self, *args, **kwargs) -> None:
@@ -98,11 +98,11 @@ class ORSIS3302DecoderForEnergy(OrcaDecoder):
98
98
  sys.exit()
99
99
  self.decoded_values[ccc]["waveform"]["wf_len"] = trace_length
100
100
 
101
- def get_key_lists(self) -> list[list[str]]:
101
+ def get_key_lists(self) -> list[list[int]]:
102
102
  key_lists = []
103
103
  for key in self.decoded_values.keys():
104
104
  key_lists.append([key])
105
- return [key_lists]
105
+ return key_lists
106
106
 
107
107
  def get_decoded_values(self, key: int = None) -> dict[str, Any]:
108
108
  if key is None:
daq2lh5/raw_buffer.py CHANGED
@@ -65,21 +65,19 @@ keys.
65
65
  from __future__ import annotations
66
66
 
67
67
  import os
68
- from typing import Union
69
68
 
70
69
  import lgdo
71
- from lgdo import LH5Store
70
+ from lgdo import LGDO
71
+ from lgdo.lh5 import LH5Store
72
72
 
73
73
  from .buffer_processor.buffer_processor import buffer_processor
74
74
 
75
- LGDO = Union[lgdo.Scalar, lgdo.Struct, lgdo.Array, lgdo.VectorOfVectors]
76
-
77
75
 
78
76
  class RawBuffer:
79
77
  r"""Base class to represent a buffer of raw data.
80
78
 
81
79
  A :class:`RawBuffer` is in essence a an LGDO object (typically a
82
- :class:`~.lgdo.table.Table`) to which decoded data will be written, along
80
+ :class:`~lgdo.types.table.Table`) to which decoded data will be written, along
83
81
  with some meta-data distinguishing what data goes into it, and where the
84
82
  LGDO gets written out. Also holds on to the current location in the buffer
85
83
  for writing.
@@ -88,7 +86,7 @@ class RawBuffer:
88
86
  ----------
89
87
  lgdo
90
88
  the LGDO used as the actual buffer. Typically a
91
- :class:`~.lgdo.table.Table`. Set to ``None`` upon creation so that the
89
+ :class:`~lgdo.types.table.Table`. Set to ``None`` upon creation so that the
92
90
  user or a decoder can initialize it later.
93
91
  key_list
94
92
  a list of keys (e.g. channel numbers) identifying data to be written
@@ -107,7 +105,7 @@ class RawBuffer:
107
105
  proc_spec
108
106
  a dictionary containing the following:
109
107
  - a DSP config file, passed as a dictionary, or as a path to a JSON file
110
- - an array containing: the name of an :class:`~.lgdo` object stored in the :class:`.RawBuffer` to be sliced,
108
+ - an array containing: the name of an LGDO object stored in the :class:`.RawBuffer` to be sliced,
111
109
  the start and end indices of the slice, and the new name for the sliced object
112
110
  - a dictionary of fields to drop
113
111
  - a dictionary of new fields and their return datatype
@@ -440,11 +438,11 @@ def write_to_lh5_and_clear(
440
438
  files (saves some time opening / closing files).
441
439
  **kwargs
442
440
  keyword-arguments forwarded to
443
- :meth:`.lgdo.lh5_store.LH5Store.write_object`.
441
+ :meth:`lgdo.lh5.store.LH5Store.write`.
444
442
 
445
443
  See Also
446
444
  --------
447
- .lgdo.lh5_store.LH5Store.write_object
445
+ lgdo.lh5.store.LH5Store.write
448
446
  """
449
447
  if lh5_store is None:
450
448
  lh5_store = lgdo.LH5Store()
@@ -470,7 +468,7 @@ def write_to_lh5_and_clear(
470
468
 
471
469
  # write if requested...
472
470
  if filename != "":
473
- lh5_store.write_object(
471
+ lh5_store.write(
474
472
  lgdo_to_write,
475
473
  rb.out_name,
476
474
  filename,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
- Name: legend-daq2lh5
3
- Version: 1.1.0
2
+ Name: legend_daq2lh5
3
+ Version: 1.2.0a1
4
4
  Summary: Convert digitizer data to LH5
5
5
  Home-page: https://github.com/legend-exp/legend-daq2lh5
6
6
  Author: Jason Detwiler
@@ -26,10 +26,10 @@ Classifier: Topic :: Software Development
26
26
  Requires-Python: >=3.9
27
27
  Description-Content-Type: text/markdown
28
28
  License-File: LICENSE
29
- Requires-Dist: dspeed >=1.1
29
+ Requires-Dist: dspeed >=1.3.0a4
30
30
  Requires-Dist: h5py >=3.2.0
31
31
  Requires-Dist: hdf5plugin
32
- Requires-Dist: legend-pydataobj >=1.4.1
32
+ Requires-Dist: legend-pydataobj >=1.5.0a1
33
33
  Requires-Dist: numpy >=1.21
34
34
  Requires-Dist: pyfcutils
35
35
  Requires-Dist: tqdm >=4.27
@@ -1,36 +1,36 @@
1
1
  daq2lh5/__init__.py,sha256=VPmwKuZSA0icpce05ojhnsKWhR4_QUgD0oVXUoN9wks,975
2
- daq2lh5/_version.py,sha256=CqDGE4B1ZqZ-56mxeOFcXRTmlxrdOh4ayrjbcPjziE4,411
3
- daq2lh5/build_raw.py,sha256=cheTM1H0NUTJ4TprGMLWMHnXsVRJk_EhWak7xwaH5P0,10525
2
+ daq2lh5/_version.py,sha256=k5PS9p0a5Ey36DDxagN4mnTZow7bHSa0Oh_ycx0FrX4,413
3
+ daq2lh5/build_raw.py,sha256=JFXC5ln9u353TUZMksY3zydLiV2HlxqdI6_Y2_ZMCIE,10524
4
4
  daq2lh5/cli.py,sha256=HCZ9Vyg-gqvairN9zJIpBjw5vLpp9ZUOOQYLFxloLL8,2912
5
- daq2lh5/data_decoder.py,sha256=40cRLJH25BgCj9Z-EDCD5A2XrKZRWnSDALNkr35QI3U,10708
5
+ daq2lh5/data_decoder.py,sha256=ka2WIJuPvsG892__HCW1SagCEzyiZJ2kQP6zGDMtlr0,10641
6
6
  daq2lh5/data_streamer.py,sha256=6SEAekOHyfC4k3E0df0lW37ap6ZemVFbH8PYMl6UvCU,14130
7
7
  daq2lh5/logging.py,sha256=Nu3wgIoWN7cyUxuzPom5rMwFvTlBu8p8d9uONHDquRg,965
8
- daq2lh5/raw_buffer.py,sha256=yVLUYhxLe6KOjwxq_k91MAekx3ZUYmWL32GAhu3ahls,17784
8
+ daq2lh5/raw_buffer.py,sha256=dyPUok0N3MP41oP9F8sO_PrH7-SWs9UdPh7dqCF729g,17687
9
9
  daq2lh5/buffer_processor/__init__.py,sha256=7k6v_KPximtv7805QnX4-xp_S3vqvqwDfdV3q95oZJo,84
10
- daq2lh5/buffer_processor/buffer_processor.py,sha256=dUgG1PRoxZgTX5XmxBKyEDnIx7Met_l02kLimsphjq8,14480
11
- daq2lh5/buffer_processor/lh5_buffer_processor.py,sha256=Jf-lVM6t9Ui-6-0fr9tXkMsX6ButHZB7I0gyJcRQqw8,8323
10
+ daq2lh5/buffer_processor/buffer_processor.py,sha256=GUxpNDbqGLuUEZmXjeratipbzmki12RFNYZkxgMtesg,14483
11
+ daq2lh5/buffer_processor/lh5_buffer_processor.py,sha256=yL1ru0_GTsZx099oi45sXL-FxPfdChtStd_IFtZNI_Q,8222
12
12
  daq2lh5/compass/__init__.py,sha256=mOXHWp7kRDgNTPQty3E8k2KPSy_vAzjneKfAcCVaPyE,132
13
13
  daq2lh5/compass/compass_config_parser.py,sha256=zeAsOo1dOJPGLL8-zkAcdYRkqt8BodtOPi96n7fWsl4,12300
14
14
  daq2lh5/compass/compass_event_decoder.py,sha256=kiPOaEu8SgLD2wbSPbBahcbTBBRAIw35wtVLBcwPcXY,7386
15
15
  daq2lh5/compass/compass_header_decoder.py,sha256=AA-Md2FIT3nD4mXX9CrWvbbfmKiA436-BTmzcU3_XOY,2823
16
16
  daq2lh5/compass/compass_streamer.py,sha256=zSl7IqO0ID0wcixkLE9QVEG3bF9hfGVITVPomCeOFTM,8841
17
17
  daq2lh5/fc/__init__.py,sha256=bB1j6r-bDmylNi0iutQeAJGjsDSjLSoXMqFfXWwfb8I,141
18
- daq2lh5/fc/fc_config_decoder.py,sha256=6PA4AGxfoI9S22lfoMFgb4L_tMloT1TF7HWi2r7OJaM,1990
18
+ daq2lh5/fc/fc_config_decoder.py,sha256=RLRfUOZN0vYbAprqTymP7TGg641IiP9rgCGIOwWVKzU,1996
19
19
  daq2lh5/fc/fc_event_decoder.py,sha256=JIRsySnxeuY3wmxjJOrTXo6wpelVup8WIvxU-fkPL-A,8131
20
20
  daq2lh5/fc/fc_status_decoder.py,sha256=o_3vTAgYXelZxIsreCYioVYid2mY-wqloYKlxoCqX5Q,3390
21
21
  daq2lh5/fc/fc_streamer.py,sha256=S0imXdVsiyolPvxI1uiBngpC58DporSNZPqx1HeVi5o,5737
22
22
  daq2lh5/orca/__init__.py,sha256=Xf6uOIOzk_QkKH_7VizGlCo3iuiAgLtUE3A07x_HXC0,175
23
23
  daq2lh5/orca/orca_base.py,sha256=-XIolXsHj-1EdewaGxyvJTZvRGZsDyZe-5PzVOd-LFY,1333
24
- daq2lh5/orca/orca_digitizers.py,sha256=rpk2SSDQgE681FB_iaewAuTXCVEqhUTlhLAm0RGJDfo,20869
24
+ daq2lh5/orca/orca_digitizers.py,sha256=BsAA3OgQ13YIirDM8pd_xDY3F5FqEY4YjSHviflmov8,20867
25
25
  daq2lh5/orca/orca_flashcam.py,sha256=gsvPorUXk1Jn-U93GsxXJ5z6pbTK2yjsYDqZFVCm57U,33088
26
26
  daq2lh5/orca/orca_header.py,sha256=1tDRG8l9Gqu4c0K4BjXBSC5eiLTzY_HaCsgNBiv5EgI,4283
27
27
  daq2lh5/orca/orca_header_decoder.py,sha256=ORIIyfx22ybyKc-uyWy5ER49-dl3BGpHdfV8OCDmjIw,1632
28
28
  daq2lh5/orca/orca_packet.py,sha256=TcdfuYN8_gcug_Xdjz98KqjHw1MqJ4J98zc7WI2xtf4,2488
29
29
  daq2lh5/orca/orca_run_decoder.py,sha256=3atKXC6mDi8_PK6ICUBBJ-LyaTM8OU31kKWIpmttRr4,2065
30
30
  daq2lh5/orca/orca_streamer.py,sha256=VbD9PF-rx_Rk-rEy7XECPmgxr6kZSUf0tC7Qbol3Qeg,15693
31
- legend_daq2lh5-1.1.0.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
32
- legend_daq2lh5-1.1.0.dist-info/METADATA,sha256=-hWP-SmhkRHTo2nRZs1Hxi4bQ3Ao2lymLKz_Dz4av_Y,3747
33
- legend_daq2lh5-1.1.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
34
- legend_daq2lh5-1.1.0.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
35
- legend_daq2lh5-1.1.0.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
36
- legend_daq2lh5-1.1.0.dist-info/RECORD,,
31
+ legend_daq2lh5-1.2.0a1.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
32
+ legend_daq2lh5-1.2.0a1.dist-info/METADATA,sha256=QiBKAO0ycatdNK5W8HlhHXA28pUnqFr2iPnjyjr2RAE,3755
33
+ legend_daq2lh5-1.2.0a1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
34
+ legend_daq2lh5-1.2.0a1.dist-info/entry_points.txt,sha256=R08R4NrHi0ab5MJN_qKqzePVzrLSsw5WpmbiwwduYjw,59
35
+ legend_daq2lh5-1.2.0a1.dist-info/top_level.txt,sha256=MJQVLyLqMgMKBdVfNXFaCKCjHKakAs19VLbC9ctXZ7A,8
36
+ legend_daq2lh5-1.2.0a1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.3)
2
+ Generator: bdist_wheel (0.42.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5