reboost 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
reboost/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.2.4'
21
- __version_tuple__ = version_tuple = (0, 2, 4)
20
+ __version__ = version = '0.2.6'
21
+ __version_tuple__ = version_tuple = (0, 2, 6)
reboost/build_glm.py CHANGED
@@ -153,6 +153,10 @@ def get_stp_evtids(
153
153
  )
154
154
  evtids = lh5_obj.view_as("ak")
155
155
 
156
+ # pick the first evtid
157
+ if evtids.ndim > 1:
158
+ evtids = ak.fill_none(ak.firsts(evtids, axis=-1), -1)
159
+
156
160
  # if the evtids_proc is not set then this is the first valid chunk
157
161
  if evtids_proc is None:
158
162
  evtids_proc = evtids
reboost/build_hit.py CHANGED
@@ -166,6 +166,7 @@ import awkward as ak
166
166
  import dbetto
167
167
  from dbetto import AttrsDict
168
168
  from lgdo import lh5
169
+ from lgdo.types import Struct
169
170
 
170
171
  from reboost.iterator import GLMIterator
171
172
  from reboost.profile import ProfileDict
@@ -187,6 +188,7 @@ def build_hit(
187
188
  in_field: str = "stp",
188
189
  out_field: str = "hit",
189
190
  buffer: int = int(5e6),
191
+ overwrite: bool = False,
190
192
  ) -> None | ak.Array:
191
193
  """Build the hit tier from the remage step files.
192
194
 
@@ -213,6 +215,8 @@ def build_hit(
213
215
  name of the output field
214
216
  buffer
215
217
  buffer size for use in the `LH5Iterator`.
218
+ overwrite
219
+ flag to overwrite the existing output.
216
220
  """
217
221
  # extract the config file
218
222
  if isinstance(config, str):
@@ -263,6 +267,7 @@ def build_hit(
263
267
  for mapping in proc_group.get("detector_mapping")
264
268
  ]
265
269
  )
270
+
266
271
  # loop over detectors
267
272
  for in_det_idx, (in_detector, out_detectors) in enumerate(detectors_mapping.items()):
268
273
  msg = f"... processing {in_detector} (to {out_detectors})"
@@ -341,22 +346,17 @@ def build_hit(
341
346
  # assign units in the output table
342
347
  hit_table = utils.assign_units(hit_table, attrs)
343
348
 
344
- # get the IO mode
345
-
346
349
  new_hit_file = (file_idx == 0) or (
347
350
  files.hit[file_idx] != files.hit[file_idx - 1]
348
351
  )
349
352
 
350
- wo_mode = (
351
- "of"
352
- if (
353
- group_idx == 0
354
- and out_det_idx == 0
355
- and in_det_idx == 0
356
- and chunk_idx == 0
357
- and new_hit_file
358
- )
359
- else "append"
353
+ wo_mode = utils.get_wo_mode(
354
+ group=group_idx,
355
+ out_det=out_det_idx,
356
+ in_det=in_det_idx,
357
+ chunk=chunk_idx,
358
+ new_hit_file=new_hit_file,
359
+ overwrite=overwrite,
360
360
  )
361
361
 
362
362
  # now write
@@ -364,12 +364,20 @@ def build_hit(
364
364
  if time_dict is not None:
365
365
  start_time = time.time()
366
366
 
367
- lh5.write(
368
- hit_table,
369
- f"{out_detector}/{out_field}",
370
- files.hit[file_idx],
371
- wo_mode=wo_mode,
372
- )
367
+ if wo_mode != "a":
368
+ lh5.write(
369
+ Struct({out_detector: hit_table}),
370
+ out_field,
371
+ files.hit[file_idx],
372
+ wo_mode=wo_mode,
373
+ )
374
+ else:
375
+ lh5.write(
376
+ hit_table,
377
+ f"{out_field}/{out_detector}",
378
+ files.hit[file_idx],
379
+ wo_mode=wo_mode,
380
+ )
373
381
  if time_dict is not None:
374
382
  time_dict[proc_name].update_field("write", start_time)
375
383
 
reboost/cli.py CHANGED
@@ -175,6 +175,8 @@ def cli(args=None) -> None:
175
175
  msg += f" in_field: {args.in_field}\n"
176
176
  msg += f" out_field: {args.out_field}\n"
177
177
  msg += f" buffer: {args.buffer}"
178
+ msg += f" overwrite: {args.overwrite}"
179
+
178
180
  log.info(msg)
179
181
 
180
182
  build_hit(
@@ -188,4 +190,5 @@ def cli(args=None) -> None:
188
190
  in_field=args.in_field,
189
191
  out_field=args.out_field,
190
192
  buffer=args.buffer,
193
+ overwrite=args.overwrite,
191
194
  )
reboost/hpge/psd.py CHANGED
@@ -3,29 +3,33 @@ from __future__ import annotations
3
3
  import logging
4
4
 
5
5
  import awkward as ak
6
+ import numba
6
7
  import numpy as np
7
- from lgdo import Array
8
+ import pint
9
+ import pyg4ometry
10
+ from lgdo import Array, VectorOfVectors
11
+ from numpy.typing import ArrayLike, NDArray
12
+
13
+ from .. import units
14
+ from ..units import ureg as u
15
+ from .utils import HPGeScalarRZField
8
16
 
9
17
  log = logging.getLogger(__name__)
10
18
 
11
19
 
12
20
  def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array:
13
- """Computes R90 for each hit in a ged.
21
+ """R90 HPGe pulse shape heuristic.
14
22
 
15
23
  Parameters
16
24
  ----------
17
25
  edep
18
- awkward array of energy
26
+ array of energy.
19
27
  xloc
20
- awkward array of x coordinate position
28
+ array of x coordinate position.
21
29
  yloc
22
- awkward array of y coordinate position
30
+ array of y coordinate position.
23
31
  zloc
24
- awkward array of z coordinate position
25
-
26
- Returns
27
- -------
28
- r90
32
+ array of z coordinate position.
29
33
  """
30
34
  tot_energy = ak.sum(edep, axis=-1, keepdims=True)
31
35
 
@@ -44,7 +48,7 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
44
48
  sorted_dist = dist[sorted_indices]
45
49
  sorted_edep = edep[sorted_indices]
46
50
 
47
- def cumsum(layout, **_kwargs):
51
+ def _ak_cumsum(layout, **_kwargs):
48
52
  if layout.is_numpy:
49
53
  return ak.contents.NumpyArray(np.cumsum(layout.data))
50
54
 
@@ -52,7 +56,7 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
52
56
 
53
57
  # Calculate the cumulative sum of energies for each event
54
58
  cumsum_edep = ak.transform(
55
- cumsum, sorted_edep
59
+ _ak_cumsum, sorted_edep
56
60
  ) # Implement cumulative sum over whole jagged array
57
61
  if len(edep) == 1:
58
62
  cumsum_edep_corrected = cumsum_edep
@@ -72,3 +76,163 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
72
76
  r90 = sorted_dist[r90_indices]
73
77
 
74
78
  return Array(ak.flatten(r90).to_numpy())
79
+
80
+
81
+ def drift_time(
82
+ xloc: ArrayLike,
83
+ yloc: ArrayLike,
84
+ zloc: ArrayLike,
85
+ dt_map: HPGeScalarRZField,
86
+ coord_offset: pint.Quantity | pyg4ometry.gdml.Position = (0, 0, 0) * u.m,
87
+ ) -> VectorOfVectors:
88
+ """Calculates drift times for each step (cluster) in an HPGe detector.
89
+
90
+ Parameters
91
+ ----------
92
+ xloc
93
+ awkward array of x coordinate position.
94
+ yloc
95
+ awkward array of y coordinate position.
96
+ zloc
97
+ awkward array of z coordinate position.
98
+ dt_map
99
+ the drift time map.
100
+ coord_offset
101
+ this `(x, y, z)` coordinates will be subtracted to (xloc, yloc, zloc)`
102
+ before drift time computation. The length units must be the same as
103
+ `xloc`, `yloc` and `zloc`.
104
+ """
105
+ # sanitize coord_offset
106
+ coord_offset = units.pg4_to_pint(coord_offset)
107
+
108
+ # unit handling (for matching with drift time map units)
109
+ xu, yu = [units.units_convfact(data, dt_map.r_units) for data in (xloc, yloc)]
110
+ zu = units.units_convfact(zloc, dt_map.z_units)
111
+
112
+ # unwrap LGDOs
113
+ xloc, yloc, zloc = [units.unwrap_lgdo(data)[0] for data in (xloc, yloc, zloc)]
114
+
115
+ # awkward transform to apply the drift time map to the step coordinates
116
+ def _ak_dt_map(layouts, **_kwargs):
117
+ if layouts[0].is_numpy and layouts[1].is_numpy:
118
+ return ak.contents.NumpyArray(
119
+ dt_map.φ(np.stack([layouts[0].data, layouts[1].data], axis=1))
120
+ )
121
+
122
+ return None
123
+
124
+ # transform coordinates
125
+ xloc = xu * xloc - coord_offset[0].to(dt_map.r_units).m
126
+ yloc = yu * yloc - coord_offset[1].to(dt_map.r_units).m
127
+ zloc = zu * zloc - coord_offset[2].to(dt_map.z_units).m
128
+
129
+ # evaluate the drift time
130
+ dt_values = ak.transform(
131
+ _ak_dt_map,
132
+ np.sqrt(xloc**2 + yloc**2),
133
+ zloc,
134
+ )
135
+
136
+ return VectorOfVectors(
137
+ dt_values,
138
+ attrs={"units": units.unit_to_lh5_attr(dt_map.φ_units)},
139
+ )
140
+
141
+
142
+ def drift_time_heuristic(
143
+ drift_time: ArrayLike,
144
+ edep: ArrayLike,
145
+ ) -> Array:
146
+ """HPGe drift-time-based pulse-shape heuristic.
147
+
148
+ See :func:`_drift_time_heuristic_impl` for a description of the algorithm.
149
+
150
+ Parameters
151
+ ----------
152
+ drift_time
153
+ drift time of charges originating from steps/clusters. Can be
154
+ calculated with :func:`drift_time`.
155
+ edep
156
+ energy deposited in step/cluster (same shape as `drift_time`).
157
+ """
158
+ # extract LGDO data and units
159
+ drift_time, t_units = units.unwrap_lgdo(drift_time)
160
+ edep, e_units = units.unwrap_lgdo(edep)
161
+
162
+ # we want to attach the right units to the dt heuristic, if possible
163
+ attrs = {}
164
+ if t_units is not None and e_units is not None:
165
+ attrs["units"] = units.unit_to_lh5_attr(t_units / e_units)
166
+
167
+ return Array(_drift_time_heuristic_impl(drift_time, edep), attrs=attrs)
168
+
169
+
170
+ @numba.njit(cache=True)
171
+ def _drift_time_heuristic_impl(
172
+ dt: ak.Array,
173
+ edep: ak.Array,
174
+ ) -> NDArray:
175
+ r"""Low-level implementation of the HPGe drift-time-based pulse-shape heuristic.
176
+
177
+ Accepts Awkward arrays and uses Numba to speed up the computation.
178
+
179
+ For each hit (collection of steps), the drift times and corresponding
180
+ energies are sorted in ascending order. The function finds the optimal
181
+ split point :math:`m` that maximizes the *identification metric*:
182
+
183
+ .. math::
184
+
185
+ I = \frac{|T_1 - T_2|}{E_\text{s}(E_1, E_2)}
186
+
187
+ where:
188
+
189
+ .. math::
190
+
191
+ T_1 = \frac{\sum_{i < m} t_i E_i}{\sum_{i < m} E_i}
192
+ \quad \text{and} \quad
193
+ T_2 = \frac{\sum_{i \geq m} t_i E_i}{\sum_{i \geq m} E_i}
194
+
195
+ are the energy-weighted mean drift times of the two groups.
196
+
197
+ .. math::
198
+
199
+ E_\text{scale}(E_1, E_2) = \frac{1}{\sqrt{E_1 E_2}}
200
+
201
+ is the scaling factor.
202
+
203
+ The function iterates over all possible values of :math:`m` and selects the
204
+ maximum `I` as the drift time heuristic value.
205
+ """
206
+ dt_heu = np.zeros(len(dt))
207
+
208
+ # loop over hits
209
+ for i in range(len(dt)):
210
+ t = np.asarray(dt[i])
211
+ e = np.asarray(edep[i])
212
+
213
+ valid_idx = np.where(e > 0)[0]
214
+ if len(valid_idx) < 2:
215
+ continue
216
+
217
+ t = t[valid_idx]
218
+ e = e[valid_idx]
219
+
220
+ sort_idx = np.argsort(t)
221
+ t = t[sort_idx]
222
+ e = e[sort_idx]
223
+
224
+ max_id_metric = 0
225
+ for j in range(1, len(t)):
226
+ e1 = np.sum(e[:j])
227
+ e2 = np.sum(e[j:])
228
+
229
+ t1 = np.sum(t[:j] * e[:j]) / e1
230
+ t2 = np.sum(t[j:] * e[j:]) / e2
231
+
232
+ id_metric = abs(t1 - t2) * np.sqrt(e1 * e2)
233
+
234
+ max_id_metric = max(max_id_metric, id_metric)
235
+
236
+ dt_heu[i] = max_id_metric
237
+
238
+ return dt_heu
reboost/hpge/utils.py ADDED
@@ -0,0 +1,78 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Callable, NamedTuple
4
+
5
+ import lgdo
6
+ import numpy as np
7
+ import pint
8
+ from dbetto import AttrsDict
9
+ from lgdo import lh5
10
+ from scipy.interpolate import RegularGridInterpolator
11
+
12
+
13
+ class HPGeScalarRZField(NamedTuple):
14
+ """A scalar field defined in the cylindrical-like (r, z) HPGe plane."""
15
+
16
+ φ: Callable
17
+ "Scalar field, function of the coordinates (r, z)."
18
+ r_units: pint.Unit
19
+ "Physical units of the coordinate `r`."
20
+ z_units: pint.Unit
21
+ "Physical units of the coordinate `z`."
22
+ φ_units: pint.Unit
23
+ "Physical units of the field."
24
+
25
+
26
+ def get_hpge_scalar_rz_field(
27
+ filename: str, obj: str, field: str, out_of_bounds_val: int | float = np.nan, **kwargs
28
+ ) -> HPGeScalarRZField:
29
+ """Create an interpolator for a gridded scalar HPGe field defined on `(r, z)`.
30
+
31
+ Reads from disk the following data structure: ::
32
+
33
+ FILENAME/
34
+ └── OBJ · struct{r,z,FIELD}
35
+ ├── r · array<1>{real} ── {'units': 'UNITS'}
36
+ ├── z · array<1>{real} ── {'units': 'UNITS'}
37
+ └── FIELD · array<2>{real} ── {'units': 'UNITS'}
38
+
39
+ where ``FILENAME``, ``OBJ`` and ``FIELD`` are provided as
40
+ arguments to this function. `obj` is a :class:`~lgdo.types.struct.Struct`,
41
+ `r` and `z` are one dimensional arrays specifying the radial and z
42
+ coordinates of the rectangular grid — not the coordinates of each single
43
+ grid point. In this coordinate system, the center of the p+ contact surface
44
+ is at `(0, 0)`, with the p+ contact facing downwards. `field` is instead a
45
+ two-dimensional array specifying the field value at each grid point. The
46
+ first and second dimensions are `r` and `z`, respectively. NaN values are
47
+ interpreted as points outside the detector profile in the `(r, z)` plane.
48
+
49
+ Before returning a :class:`HPGeScalarRZField`, the gridded field is fed to
50
+ :class:`scipy.interpolate.RegularGridInterpolator`.
51
+
52
+ Parameters
53
+ ----------
54
+ filename
55
+ name of the LH5 file containing the gridded scalar field.
56
+ obj
57
+ name of the HDF5 dataset where the data is saved.
58
+ field
59
+ name of the HDF5 dataset holding the field values.
60
+ out_of_bounds_val
61
+ value to use to replace NaNs in the field values.
62
+ """
63
+ data = lh5.read(obj, filename)
64
+
65
+ if not isinstance(data, lgdo.Struct):
66
+ msg = f"{obj} in {filename} is not an LGDO Struct"
67
+ raise ValueError(msg)
68
+
69
+ data = AttrsDict(
70
+ {
71
+ k: np.nan_to_num(data[k].view_as("np", with_units=True), nan=out_of_bounds_val)
72
+ for k in ("r", "z", field)
73
+ }
74
+ )
75
+
76
+ interpolator = RegularGridInterpolator((data.r.m, data.z.m), data[field].m, **kwargs)
77
+
78
+ return HPGeScalarRZField(interpolator, data.r.u, data.z.u, data[field].u)
reboost/iterator.py CHANGED
@@ -129,7 +129,10 @@ class GLMIterator:
129
129
  time_start = time.time()
130
130
 
131
131
  stp_rows, n_steps = self.sto.read(
132
- f"{self.stp_field}/{self.lh5_group}", self.stp_file, start_row=start, n_rows=n
132
+ f"{self.stp_field}/{self.lh5_group}",
133
+ self.stp_file,
134
+ start_row=int(start),
135
+ n_rows=int(n),
133
136
  )
134
137
 
135
138
  # save time
reboost/units.py ADDED
@@ -0,0 +1,75 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Any
5
+
6
+ import pint
7
+ import pyg4ometry as pg4
8
+ from lgdo import LGDO
9
+
10
+ log = logging.getLogger(__name__)
11
+
12
+ ureg = pint.get_application_registry()
13
+ """The physical units registry."""
14
+
15
+ # default pretty printing of physical units
16
+ ureg.formatter.default_format = "~P"
17
+
18
+
19
+ def pg4_to_pint(obj) -> pint.Quantity:
20
+ """Convert pyg4ometry object to pint Quantity."""
21
+ if isinstance(obj, pint.Quantity):
22
+ return obj
23
+ if isinstance(obj, pg4.gdml.Defines.VectorBase):
24
+ return [getattr(obj, field).eval() for field in ("x", "y", "z")] * ureg(obj.unit)
25
+ msg = f"I don't know how to convert object of type {type(obj)} to pint object"
26
+ raise ValueError(msg)
27
+
28
+
29
+ def units_convfact(data: Any, target_units: pint.Units) -> float:
30
+ """Calculate numeric conversion factor to reach `target_units`.
31
+
32
+ Parameters
33
+ ----------
34
+ data
35
+ starting data structure. If an LGDO, try to determine units by peeking
36
+ into its attributes. Otherwise, just return 1.
37
+ target_units
38
+ units you wish to convert data to.
39
+ """
40
+ if isinstance(data, LGDO) and "units" in data.attrs:
41
+ return ureg(data.attrs["units"]).to(target_units).magnitude
42
+ return 1
43
+
44
+
45
+ def unwrap_lgdo(data: Any, library: str = "ak") -> tuple(Any, pint.Unit | None):
46
+ """Return a view of the data held by the LGDO and its physical units.
47
+
48
+ Parameters
49
+ ----------
50
+ data
51
+ the data container. If not an LGDO, it will be returned as is with
52
+ ``None`` units.
53
+ library
54
+ forwarded to :func:`lgdo.view_as`.
55
+
56
+ Returns
57
+ -------
58
+ A tuple of the un-lgdo'd data and the data units.
59
+ """
60
+ ret_data = data
61
+ ret_units = None
62
+ if isinstance(data, LGDO):
63
+ ret_data = data.view_as(library)
64
+ if "units" in data.attrs:
65
+ ret_units = ureg(data.attrs["units"]).u
66
+
67
+ return ret_data, ret_units
68
+
69
+
70
+ def unit_to_lh5_attr(unit: pint.Unit) -> str:
71
+ """Convert Pint unit to a string that can be used as attrs["units"] in an LGDO."""
72
+ # TODO: we should check if this can be always parsed by Unitful.jl
73
+ if isinstance(unit, pint.Unit):
74
+ return f"{unit:~C}"
75
+ return unit
reboost/utils.py CHANGED
@@ -9,11 +9,28 @@ from contextlib import contextmanager
9
9
  from pathlib import Path
10
10
 
11
11
  from dbetto import AttrsDict
12
- from lgdo.types import Table
12
+ from lgdo.types import Table, VectorOfVectors
13
13
 
14
14
  log = logging.getLogger(__name__)
15
15
 
16
16
 
17
+ def get_wo_mode(
18
+ group: int, out_det: int, in_det: int, chunk: int, new_hit_file: bool, overwrite: bool = False
19
+ ):
20
+ """Get the mode for lh5 file writing."""
21
+ indices = [group, out_det, in_det, chunk]
22
+
23
+ good_idx = all(i == 0 for i in indices)
24
+
25
+ if good_idx and new_hit_file:
26
+ return "of" if overwrite else "w"
27
+
28
+ # if we have a detector not the first and chunk 0 append column
29
+ if ((in_det > 0) or (out_det > 0)) & (chunk == 0):
30
+ return "ac"
31
+ return "a"
32
+
33
+
17
34
  def get_file_dict(
18
35
  stp_files: list[str] | str,
19
36
  glm_files: list[str] | str | None,
@@ -113,7 +130,11 @@ def assign_units(tab: Table, units: Mapping) -> Table:
113
130
  """
114
131
  for field in tab:
115
132
  if field in units:
116
- tab[field].attrs["units"] = units[field]
133
+ if not isinstance(tab[field], VectorOfVectors):
134
+ tab[field].attrs["units"] = units[field]
135
+ else:
136
+ tab[field].flattened_data.attrs["units"] = units[field]
137
+
117
138
  return tab
118
139
 
119
140
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.2.4
3
+ Version: 0.2.6
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -700,7 +700,7 @@ Requires-Dist: colorlog
700
700
  Requires-Dist: numpy
701
701
  Requires-Dist: scipy
702
702
  Requires-Dist: numba
703
- Requires-Dist: legend-pydataobj>=1.11.6
703
+ Requires-Dist: legend-pydataobj>=1.11.10
704
704
  Requires-Dist: legend-pygeom-optics>=0.6.5
705
705
  Requires-Dist: hist
706
706
  Requires-Dist: dbetto
@@ -708,6 +708,7 @@ Requires-Dist: particle
708
708
  Requires-Dist: pandas
709
709
  Requires-Dist: matplotlib
710
710
  Requires-Dist: pygama
711
+ Requires-Dist: pyg4ometry
711
712
  Provides-Extra: all
712
713
  Requires-Dist: reboost[docs,test]; extra == "all"
713
714
  Provides-Extra: docs
@@ -722,7 +723,7 @@ Requires-Dist: pytest-cov; extra == "test"
722
723
  Requires-Dist: legend-pygeom-hpges; extra == "test"
723
724
  Requires-Dist: legend-pygeom-tools; extra == "test"
724
725
  Requires-Dist: pyg4ometry; extra == "test"
725
- Requires-Dist: pylegendtestdata; extra == "test"
726
+ Requires-Dist: pylegendtestdata>=0.6; extra == "test"
726
727
  Dynamic: license-file
727
728
 
728
729
  # reboost
@@ -1,18 +1,20 @@
1
1
  reboost/__init__.py,sha256=RVNl3Qgx_hTUeBGXaWYmiTcmXUDhTfvlAGGC8bo_jP8,316
2
- reboost/_version.py,sha256=1LUN_sRKOiFInoB6AlW6TYoQMCh1Z4KutwcHNvHcfB0,511
2
+ reboost/_version.py,sha256=nObnONsicQ3YX6SG5MVBxmIp5dmRacXDauSqZijWQbY,511
3
3
  reboost/build_evt.py,sha256=zj3wG_kaV3EoRMQ33AkCNa_2Fv8cLtRuhyRyRmSrOYQ,4797
4
- reboost/build_glm.py,sha256=LQkM6x6mMOE92-c78uoclOvP9zp3vdMuLQCSP2f2Zk4,9263
5
- reboost/build_hit.py,sha256=_JDe7j2lYmvQD7ABoSgVhIw6CiCQHqnqFGvZbcRY-EU,14290
4
+ reboost/build_glm.py,sha256=kSY9hQjEsOE-0PiblhdBy_SvFIlgXLX6CUlgpxW-_OI,9389
5
+ reboost/build_hit.py,sha256=OyXkYdLIpSBmq3MLNXD_kLjVqxeSQcs7RGAOZSy1Bns,14695
6
6
  reboost/build_tcm.py,sha256=-PawBHoHj0zsm4XsZu5bco9d9a09STicZchduefSNfI,2951
7
- reboost/cli.py,sha256=HTZ05DRnDodcf_D6BJCCavx5HqhKDadJCgf-oh8HTJk,6365
7
+ reboost/cli.py,sha256=swPJcYzvg18rSOMN-mpe0PCMf1-a9V7osIssX7JP7k0,6459
8
8
  reboost/core.py,sha256=7Nclc6RUCOSJ1CWVAX0rFNJGM1LEgqvc4tD04CxEAtg,10766
9
- reboost/iterator.py,sha256=72AyoRTgMpWghZt2UOqRj0RGiNzaiBAwgNIUZdduK2s,4698
9
+ reboost/iterator.py,sha256=0KmrekpZwOYZJaP0nmp-SNrr2WmGUKeNUVcqO-OChhY,4757
10
10
  reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
11
11
  reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
12
- reboost/utils.py,sha256=wzfW0_W5IF3sw3DcrLOLDNZV2LKfUoEi6Ks8Db5gQHg,7785
12
+ reboost/units.py,sha256=3EH8XlpbsObdu5vLgxhm1600L6UNYD5jng4SjJT_1QE,2202
13
+ reboost/utils.py,sha256=eBw0ZzwhlniTLbjz9tnstCXSYrjSeH4FJ0fkJ9-uqps,8450
13
14
  reboost/hpge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- reboost/hpge/psd.py,sha256=vFs8Y5XVW261pB6aOvWmIDzqOaBg-gEOLhL9PbjlEKI,2113
15
+ reboost/hpge/psd.py,sha256=868OUJzO9TNja0YSrZ3NDGeEAbUtpDZnmvBDm0jCC9E,6856
15
16
  reboost/hpge/surface.py,sha256=SZyTmOCTipf27jYaJhtdInzGF1RZ2wKpbtf6HlOQYwM,3662
17
+ reboost/hpge/utils.py,sha256=0Rx4HubCOm8JMECjWcAJXfAch9OkSlRpUkdsSlzwZ2E,2830
16
18
  reboost/math/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
19
  reboost/math/functions.py,sha256=OymiYTcA0NXxxm-MBDw5kqyNwHoLCmuv4J48AwnSrbU,5633
18
20
  reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
@@ -28,9 +30,9 @@ reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
30
  reboost/shape/cluster.py,sha256=RIvBlhHzp88aaUZGofp5SD9bimnoiqIOddhQ84jiwoM,8135
29
31
  reboost/shape/group.py,sha256=Q3DhEPxbhw3p4bwvpswSd0A-p224l5vRZnfQIEkOVJE,4475
30
32
  reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
- reboost-0.2.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
32
- reboost-0.2.4.dist-info/METADATA,sha256=3TF-pB7slRO8Y7UuAqVHfZpUJdUKUzAW_UKshjJzYv4,44219
33
- reboost-0.2.4.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
34
- reboost-0.2.4.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
35
- reboost-0.2.4.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
36
- reboost-0.2.4.dist-info/RECORD,,
33
+ reboost-0.2.6.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
34
+ reboost-0.2.6.dist-info/METADATA,sha256=Irdm3IrpsoXDPWyqeYSa4QHI6MdACh0ZYAnGMQwofbg,44251
35
+ reboost-0.2.6.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
36
+ reboost-0.2.6.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
37
+ reboost-0.2.6.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
38
+ reboost-0.2.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.0.0)
2
+ Generator: setuptools (80.7.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5