reboost 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reboost/_version.py +2 -2
- reboost/build_hit.py +14 -17
- reboost/cli.py +3 -0
- reboost/hpge/psd.py +176 -12
- reboost/hpge/utils.py +78 -0
- reboost/units.py +75 -0
- reboost/utils.py +17 -6
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/METADATA +4 -3
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/RECORD +13 -11
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/WHEEL +1 -1
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/entry_points.txt +0 -0
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/licenses/LICENSE +0 -0
- {reboost-0.2.3.dist-info → reboost-0.2.5.dist-info}/top_level.txt +0 -0
reboost/_version.py
CHANGED
reboost/build_hit.py
CHANGED
|
@@ -166,6 +166,7 @@ import awkward as ak
|
|
|
166
166
|
import dbetto
|
|
167
167
|
from dbetto import AttrsDict
|
|
168
168
|
from lgdo import lh5
|
|
169
|
+
from lgdo.types import Struct
|
|
169
170
|
|
|
170
171
|
from reboost.iterator import GLMIterator
|
|
171
172
|
from reboost.profile import ProfileDict
|
|
@@ -187,6 +188,7 @@ def build_hit(
|
|
|
187
188
|
in_field: str = "stp",
|
|
188
189
|
out_field: str = "hit",
|
|
189
190
|
buffer: int = int(5e6),
|
|
191
|
+
overwrite: bool = False,
|
|
190
192
|
) -> None | ak.Array:
|
|
191
193
|
"""Build the hit tier from the remage step files.
|
|
192
194
|
|
|
@@ -213,6 +215,8 @@ def build_hit(
|
|
|
213
215
|
name of the output field
|
|
214
216
|
buffer
|
|
215
217
|
buffer size for use in the `LH5Iterator`.
|
|
218
|
+
overwrite
|
|
219
|
+
flag to overwrite the existing output.
|
|
216
220
|
"""
|
|
217
221
|
# extract the config file
|
|
218
222
|
if isinstance(config, str):
|
|
@@ -238,7 +242,7 @@ def build_hit(
|
|
|
238
242
|
for file_idx, (stp_file, glm_file) in enumerate(zip(files.stp, files.glm)):
|
|
239
243
|
msg = (
|
|
240
244
|
f"... starting post processing of {stp_file} to {files.hit[file_idx]} "
|
|
241
|
-
if files.hit is not None
|
|
245
|
+
if files.hit[file_idx] is not None
|
|
242
246
|
else f"... starting post processing of {stp_file}"
|
|
243
247
|
)
|
|
244
248
|
log.info(msg)
|
|
@@ -263,6 +267,7 @@ def build_hit(
|
|
|
263
267
|
for mapping in proc_group.get("detector_mapping")
|
|
264
268
|
]
|
|
265
269
|
)
|
|
270
|
+
|
|
266
271
|
# loop over detectors
|
|
267
272
|
for in_det_idx, (in_detector, out_detectors) in enumerate(detectors_mapping.items()):
|
|
268
273
|
msg = f"... processing {in_detector} (to {out_detectors})"
|
|
@@ -299,7 +304,7 @@ def build_hit(
|
|
|
299
304
|
# produce the hit table
|
|
300
305
|
for out_det_idx, out_detector in enumerate(out_detectors):
|
|
301
306
|
# loop over the rows
|
|
302
|
-
if out_detector not in output_tables and files.hit is None:
|
|
307
|
+
if out_detector not in output_tables and files.hit[file_idx] is None:
|
|
303
308
|
output_tables[out_detector] = None
|
|
304
309
|
|
|
305
310
|
# get the attributes
|
|
@@ -341,32 +346,24 @@ def build_hit(
|
|
|
341
346
|
# assign units in the output table
|
|
342
347
|
hit_table = utils.assign_units(hit_table, attrs)
|
|
343
348
|
|
|
344
|
-
# get the IO mode
|
|
345
|
-
|
|
346
349
|
new_hit_file = (file_idx == 0) or (
|
|
347
350
|
files.hit[file_idx] != files.hit[file_idx - 1]
|
|
348
351
|
)
|
|
349
352
|
|
|
350
|
-
wo_mode = (
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
and out_det_idx == 0
|
|
355
|
-
and in_det_idx == 0
|
|
356
|
-
and chunk_idx == 0
|
|
357
|
-
and new_hit_file
|
|
358
|
-
)
|
|
359
|
-
else "append"
|
|
353
|
+
wo_mode = utils.get_wo_mode(
|
|
354
|
+
[group_idx, out_det_idx, in_det_idx, chunk_idx],
|
|
355
|
+
new_hit_file,
|
|
356
|
+
overwrite=overwrite,
|
|
360
357
|
)
|
|
361
358
|
|
|
362
359
|
# now write
|
|
363
|
-
if files.hit is not None:
|
|
360
|
+
if files.hit[file_idx] is not None:
|
|
364
361
|
if time_dict is not None:
|
|
365
362
|
start_time = time.time()
|
|
366
363
|
|
|
367
364
|
lh5.write(
|
|
368
|
-
hit_table,
|
|
369
|
-
|
|
365
|
+
Struct({out_detector: hit_table}),
|
|
366
|
+
out_field,
|
|
370
367
|
files.hit[file_idx],
|
|
371
368
|
wo_mode=wo_mode,
|
|
372
369
|
)
|
reboost/cli.py
CHANGED
|
@@ -175,6 +175,8 @@ def cli(args=None) -> None:
|
|
|
175
175
|
msg += f" in_field: {args.in_field}\n"
|
|
176
176
|
msg += f" out_field: {args.out_field}\n"
|
|
177
177
|
msg += f" buffer: {args.buffer}"
|
|
178
|
+
msg += f" overwrite: {args.overwrite}"
|
|
179
|
+
|
|
178
180
|
log.info(msg)
|
|
179
181
|
|
|
180
182
|
build_hit(
|
|
@@ -188,4 +190,5 @@ def cli(args=None) -> None:
|
|
|
188
190
|
in_field=args.in_field,
|
|
189
191
|
out_field=args.out_field,
|
|
190
192
|
buffer=args.buffer,
|
|
193
|
+
overwrite=args.overwrite,
|
|
191
194
|
)
|
reboost/hpge/psd.py
CHANGED
|
@@ -3,29 +3,33 @@ from __future__ import annotations
|
|
|
3
3
|
import logging
|
|
4
4
|
|
|
5
5
|
import awkward as ak
|
|
6
|
+
import numba
|
|
6
7
|
import numpy as np
|
|
7
|
-
|
|
8
|
+
import pint
|
|
9
|
+
import pyg4ometry
|
|
10
|
+
from lgdo import Array, VectorOfVectors
|
|
11
|
+
from numpy.typing import ArrayLike, NDArray
|
|
12
|
+
|
|
13
|
+
from .. import units
|
|
14
|
+
from ..units import ureg as u
|
|
15
|
+
from .utils import HPGeScalarRZField
|
|
8
16
|
|
|
9
17
|
log = logging.getLogger(__name__)
|
|
10
18
|
|
|
11
19
|
|
|
12
20
|
def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array:
|
|
13
|
-
"""
|
|
21
|
+
"""R90 HPGe pulse shape heuristic.
|
|
14
22
|
|
|
15
23
|
Parameters
|
|
16
24
|
----------
|
|
17
25
|
edep
|
|
18
|
-
|
|
26
|
+
array of energy.
|
|
19
27
|
xloc
|
|
20
|
-
|
|
28
|
+
array of x coordinate position.
|
|
21
29
|
yloc
|
|
22
|
-
|
|
30
|
+
array of y coordinate position.
|
|
23
31
|
zloc
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
Returns
|
|
27
|
-
-------
|
|
28
|
-
r90
|
|
32
|
+
array of z coordinate position.
|
|
29
33
|
"""
|
|
30
34
|
tot_energy = ak.sum(edep, axis=-1, keepdims=True)
|
|
31
35
|
|
|
@@ -44,7 +48,7 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
|
|
|
44
48
|
sorted_dist = dist[sorted_indices]
|
|
45
49
|
sorted_edep = edep[sorted_indices]
|
|
46
50
|
|
|
47
|
-
def
|
|
51
|
+
def _ak_cumsum(layout, **_kwargs):
|
|
48
52
|
if layout.is_numpy:
|
|
49
53
|
return ak.contents.NumpyArray(np.cumsum(layout.data))
|
|
50
54
|
|
|
@@ -52,7 +56,7 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
|
|
|
52
56
|
|
|
53
57
|
# Calculate the cumulative sum of energies for each event
|
|
54
58
|
cumsum_edep = ak.transform(
|
|
55
|
-
|
|
59
|
+
_ak_cumsum, sorted_edep
|
|
56
60
|
) # Implement cumulative sum over whole jagged array
|
|
57
61
|
if len(edep) == 1:
|
|
58
62
|
cumsum_edep_corrected = cumsum_edep
|
|
@@ -72,3 +76,163 @@ def r90(edep: ak.Array, xloc: ak.Array, yloc: ak.Array, zloc: ak.Array) -> Array
|
|
|
72
76
|
r90 = sorted_dist[r90_indices]
|
|
73
77
|
|
|
74
78
|
return Array(ak.flatten(r90).to_numpy())
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def drift_time(
|
|
82
|
+
xloc: ArrayLike,
|
|
83
|
+
yloc: ArrayLike,
|
|
84
|
+
zloc: ArrayLike,
|
|
85
|
+
dt_map: HPGeScalarRZField,
|
|
86
|
+
coord_offset: pint.Quantity | pyg4ometry.gdml.Position = (0, 0, 0) * u.m,
|
|
87
|
+
) -> VectorOfVectors:
|
|
88
|
+
"""Calculates drift times for each step (cluster) in an HPGe detector.
|
|
89
|
+
|
|
90
|
+
Parameters
|
|
91
|
+
----------
|
|
92
|
+
xloc
|
|
93
|
+
awkward array of x coordinate position.
|
|
94
|
+
yloc
|
|
95
|
+
awkward array of y coordinate position.
|
|
96
|
+
zloc
|
|
97
|
+
awkward array of z coordinate position.
|
|
98
|
+
dt_map
|
|
99
|
+
the drift time map.
|
|
100
|
+
coord_offset
|
|
101
|
+
this `(x, y, z)` coordinates will be subtracted to (xloc, yloc, zloc)`
|
|
102
|
+
before drift time computation. The length units must be the same as
|
|
103
|
+
`xloc`, `yloc` and `zloc`.
|
|
104
|
+
"""
|
|
105
|
+
# sanitize coord_offset
|
|
106
|
+
coord_offset = units.pg4_to_pint(coord_offset)
|
|
107
|
+
|
|
108
|
+
# unit handling (for matching with drift time map units)
|
|
109
|
+
xu, yu = [units.units_convfact(data, dt_map.r_units) for data in (xloc, yloc)]
|
|
110
|
+
zu = units.units_convfact(zloc, dt_map.z_units)
|
|
111
|
+
|
|
112
|
+
# unwrap LGDOs
|
|
113
|
+
xloc, yloc, zloc = [units.unwrap_lgdo(data)[0] for data in (xloc, yloc, zloc)]
|
|
114
|
+
|
|
115
|
+
# awkward transform to apply the drift time map to the step coordinates
|
|
116
|
+
def _ak_dt_map(layouts, **_kwargs):
|
|
117
|
+
if layouts[0].is_numpy and layouts[1].is_numpy:
|
|
118
|
+
return ak.contents.NumpyArray(
|
|
119
|
+
dt_map.φ(np.stack([layouts[0].data, layouts[1].data], axis=1))
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
# transform coordinates
|
|
125
|
+
xloc = xu * xloc - coord_offset[0].to(dt_map.r_units).m
|
|
126
|
+
yloc = yu * yloc - coord_offset[1].to(dt_map.r_units).m
|
|
127
|
+
zloc = zu * zloc - coord_offset[2].to(dt_map.z_units).m
|
|
128
|
+
|
|
129
|
+
# evaluate the drift time
|
|
130
|
+
dt_values = ak.transform(
|
|
131
|
+
_ak_dt_map,
|
|
132
|
+
np.sqrt(xloc**2 + yloc**2),
|
|
133
|
+
zloc,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
return VectorOfVectors(
|
|
137
|
+
dt_values,
|
|
138
|
+
attrs={"units": units.unit_to_lh5_attr(dt_map.φ_units)},
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def drift_time_heuristic(
|
|
143
|
+
drift_time: ArrayLike,
|
|
144
|
+
edep: ArrayLike,
|
|
145
|
+
) -> Array:
|
|
146
|
+
"""HPGe drift-time-based pulse-shape heuristic.
|
|
147
|
+
|
|
148
|
+
See :func:`_drift_time_heuristic_impl` for a description of the algorithm.
|
|
149
|
+
|
|
150
|
+
Parameters
|
|
151
|
+
----------
|
|
152
|
+
drift_time
|
|
153
|
+
drift time of charges originating from steps/clusters. Can be
|
|
154
|
+
calculated with :func:`drift_time`.
|
|
155
|
+
edep
|
|
156
|
+
energy deposited in step/cluster (same shape as `drift_time`).
|
|
157
|
+
"""
|
|
158
|
+
# extract LGDO data and units
|
|
159
|
+
drift_time, t_units = units.unwrap_lgdo(drift_time)
|
|
160
|
+
edep, e_units = units.unwrap_lgdo(edep)
|
|
161
|
+
|
|
162
|
+
# we want to attach the right units to the dt heuristic, if possible
|
|
163
|
+
attrs = {}
|
|
164
|
+
if t_units is not None and e_units is not None:
|
|
165
|
+
attrs["units"] = units.unit_to_lh5_attr(t_units / e_units)
|
|
166
|
+
|
|
167
|
+
return Array(_drift_time_heuristic_impl(drift_time, edep), attrs=attrs)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
@numba.njit(cache=True)
|
|
171
|
+
def _drift_time_heuristic_impl(
|
|
172
|
+
dt: ak.Array,
|
|
173
|
+
edep: ak.Array,
|
|
174
|
+
) -> NDArray:
|
|
175
|
+
r"""Low-level implementation of the HPGe drift-time-based pulse-shape heuristic.
|
|
176
|
+
|
|
177
|
+
Accepts Awkward arrays and uses Numba to speed up the computation.
|
|
178
|
+
|
|
179
|
+
For each hit (collection of steps), the drift times and corresponding
|
|
180
|
+
energies are sorted in ascending order. The function finds the optimal
|
|
181
|
+
split point :math:`m` that maximizes the *identification metric*:
|
|
182
|
+
|
|
183
|
+
.. math::
|
|
184
|
+
|
|
185
|
+
I = \frac{|T_1 - T_2|}{E_\text{s}(E_1, E_2)}
|
|
186
|
+
|
|
187
|
+
where:
|
|
188
|
+
|
|
189
|
+
.. math::
|
|
190
|
+
|
|
191
|
+
T_1 = \frac{\sum_{i < m} t_i E_i}{\sum_{i < m} E_i}
|
|
192
|
+
\quad \text{and} \quad
|
|
193
|
+
T_2 = \frac{\sum_{i \geq m} t_i E_i}{\sum_{i \geq m} E_i}
|
|
194
|
+
|
|
195
|
+
are the energy-weighted mean drift times of the two groups.
|
|
196
|
+
|
|
197
|
+
.. math::
|
|
198
|
+
|
|
199
|
+
E_\text{scale}(E_1, E_2) = \frac{1}{\sqrt{E_1 E_2}}
|
|
200
|
+
|
|
201
|
+
is the scaling factor.
|
|
202
|
+
|
|
203
|
+
The function iterates over all possible values of :math:`m` and selects the
|
|
204
|
+
maximum `I` as the drift time heuristic value.
|
|
205
|
+
"""
|
|
206
|
+
dt_heu = np.zeros(len(dt))
|
|
207
|
+
|
|
208
|
+
# loop over hits
|
|
209
|
+
for i in range(len(dt)):
|
|
210
|
+
t = np.asarray(dt[i])
|
|
211
|
+
e = np.asarray(edep[i])
|
|
212
|
+
|
|
213
|
+
valid_idx = np.where(e > 0)[0]
|
|
214
|
+
if len(valid_idx) < 2:
|
|
215
|
+
continue
|
|
216
|
+
|
|
217
|
+
t = t[valid_idx]
|
|
218
|
+
e = e[valid_idx]
|
|
219
|
+
|
|
220
|
+
sort_idx = np.argsort(t)
|
|
221
|
+
t = t[sort_idx]
|
|
222
|
+
e = e[sort_idx]
|
|
223
|
+
|
|
224
|
+
max_id_metric = 0
|
|
225
|
+
for j in range(1, len(t)):
|
|
226
|
+
e1 = np.sum(e[:j])
|
|
227
|
+
e2 = np.sum(e[j:])
|
|
228
|
+
|
|
229
|
+
t1 = np.sum(t[:j] * e[:j]) / e1
|
|
230
|
+
t2 = np.sum(t[j:] * e[j:]) / e2
|
|
231
|
+
|
|
232
|
+
id_metric = abs(t1 - t2) * np.sqrt(e1 * e2)
|
|
233
|
+
|
|
234
|
+
max_id_metric = max(max_id_metric, id_metric)
|
|
235
|
+
|
|
236
|
+
dt_heu[i] = max_id_metric
|
|
237
|
+
|
|
238
|
+
return dt_heu
|
reboost/hpge/utils.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable, NamedTuple
|
|
4
|
+
|
|
5
|
+
import lgdo
|
|
6
|
+
import numpy as np
|
|
7
|
+
import pint
|
|
8
|
+
from dbetto import AttrsDict
|
|
9
|
+
from lgdo import lh5
|
|
10
|
+
from scipy.interpolate import RegularGridInterpolator
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class HPGeScalarRZField(NamedTuple):
|
|
14
|
+
"""A scalar field defined in the cylindrical-like (r, z) HPGe plane."""
|
|
15
|
+
|
|
16
|
+
φ: Callable
|
|
17
|
+
"Scalar field, function of the coordinates (r, z)."
|
|
18
|
+
r_units: pint.Unit
|
|
19
|
+
"Physical units of the coordinate `r`."
|
|
20
|
+
z_units: pint.Unit
|
|
21
|
+
"Physical units of the coordinate `z`."
|
|
22
|
+
φ_units: pint.Unit
|
|
23
|
+
"Physical units of the field."
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_hpge_scalar_rz_field(
|
|
27
|
+
filename: str, obj: str, field: str, out_of_bounds_val: int | float = np.nan, **kwargs
|
|
28
|
+
) -> HPGeScalarRZField:
|
|
29
|
+
"""Create an interpolator for a gridded scalar HPGe field defined on `(r, z)`.
|
|
30
|
+
|
|
31
|
+
Reads from disk the following data structure: ::
|
|
32
|
+
|
|
33
|
+
FILENAME/
|
|
34
|
+
└── OBJ · struct{r,z,FIELD}
|
|
35
|
+
├── r · array<1>{real} ── {'units': 'UNITS'}
|
|
36
|
+
├── z · array<1>{real} ── {'units': 'UNITS'}
|
|
37
|
+
└── FIELD · array<2>{real} ── {'units': 'UNITS'}
|
|
38
|
+
|
|
39
|
+
where ``FILENAME``, ``OBJ`` and ``FIELD`` are provided as
|
|
40
|
+
arguments to this function. `obj` is a :class:`~lgdo.types.struct.Struct`,
|
|
41
|
+
`r` and `z` are one dimensional arrays specifying the radial and z
|
|
42
|
+
coordinates of the rectangular grid — not the coordinates of each single
|
|
43
|
+
grid point. In this coordinate system, the center of the p+ contact surface
|
|
44
|
+
is at `(0, 0)`, with the p+ contact facing downwards. `field` is instead a
|
|
45
|
+
two-dimensional array specifying the field value at each grid point. The
|
|
46
|
+
first and second dimensions are `r` and `z`, respectively. NaN values are
|
|
47
|
+
interpreted as points outside the detector profile in the `(r, z)` plane.
|
|
48
|
+
|
|
49
|
+
Before returning a :class:`HPGeScalarRZField`, the gridded field is fed to
|
|
50
|
+
:class:`scipy.interpolate.RegularGridInterpolator`.
|
|
51
|
+
|
|
52
|
+
Parameters
|
|
53
|
+
----------
|
|
54
|
+
filename
|
|
55
|
+
name of the LH5 file containing the gridded scalar field.
|
|
56
|
+
obj
|
|
57
|
+
name of the HDF5 dataset where the data is saved.
|
|
58
|
+
field
|
|
59
|
+
name of the HDF5 dataset holding the field values.
|
|
60
|
+
out_of_bounds_val
|
|
61
|
+
value to use to replace NaNs in the field values.
|
|
62
|
+
"""
|
|
63
|
+
data = lh5.read(obj, filename)
|
|
64
|
+
|
|
65
|
+
if not isinstance(data, lgdo.Struct):
|
|
66
|
+
msg = f"{obj} in {filename} is not an LGDO Struct"
|
|
67
|
+
raise ValueError(msg)
|
|
68
|
+
|
|
69
|
+
data = AttrsDict(
|
|
70
|
+
{
|
|
71
|
+
k: np.nan_to_num(data[k].view_as("np", with_units=True), nan=out_of_bounds_val)
|
|
72
|
+
for k in ("r", "z", field)
|
|
73
|
+
}
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
interpolator = RegularGridInterpolator((data.r.m, data.z.m), data[field].m, **kwargs)
|
|
77
|
+
|
|
78
|
+
return HPGeScalarRZField(interpolator, data.r.u, data.z.u, data[field].u)
|
reboost/units.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import pint
|
|
7
|
+
import pyg4ometry as pg4
|
|
8
|
+
from lgdo import LGDO
|
|
9
|
+
|
|
10
|
+
log = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
ureg = pint.get_application_registry()
|
|
13
|
+
"""The physical units registry."""
|
|
14
|
+
|
|
15
|
+
# default pretty printing of physical units
|
|
16
|
+
ureg.formatter.default_format = "~P"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def pg4_to_pint(obj) -> pint.Quantity:
|
|
20
|
+
"""Convert pyg4ometry object to pint Quantity."""
|
|
21
|
+
if isinstance(obj, pint.Quantity):
|
|
22
|
+
return obj
|
|
23
|
+
if isinstance(obj, pg4.gdml.Defines.VectorBase):
|
|
24
|
+
return [getattr(obj, field).eval() for field in ("x", "y", "z")] * ureg(obj.unit)
|
|
25
|
+
msg = f"I don't know how to convert object of type {type(obj)} to pint object"
|
|
26
|
+
raise ValueError(msg)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def units_convfact(data: Any, target_units: pint.Units) -> float:
|
|
30
|
+
"""Calculate numeric conversion factor to reach `target_units`.
|
|
31
|
+
|
|
32
|
+
Parameters
|
|
33
|
+
----------
|
|
34
|
+
data
|
|
35
|
+
starting data structure. If an LGDO, try to determine units by peeking
|
|
36
|
+
into its attributes. Otherwise, just return 1.
|
|
37
|
+
target_units
|
|
38
|
+
units you wish to convert data to.
|
|
39
|
+
"""
|
|
40
|
+
if isinstance(data, LGDO) and "units" in data.attrs:
|
|
41
|
+
return ureg(data.attrs["units"]).to(target_units).magnitude
|
|
42
|
+
return 1
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def unwrap_lgdo(data: Any, library: str = "ak") -> tuple(Any, pint.Unit | None):
|
|
46
|
+
"""Return a view of the data held by the LGDO and its physical units.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
data
|
|
51
|
+
the data container. If not an LGDO, it will be returned as is with
|
|
52
|
+
``None`` units.
|
|
53
|
+
library
|
|
54
|
+
forwarded to :func:`lgdo.view_as`.
|
|
55
|
+
|
|
56
|
+
Returns
|
|
57
|
+
-------
|
|
58
|
+
A tuple of the un-lgdo'd data and the data units.
|
|
59
|
+
"""
|
|
60
|
+
ret_data = data
|
|
61
|
+
ret_units = None
|
|
62
|
+
if isinstance(data, LGDO):
|
|
63
|
+
ret_data = data.view_as(library)
|
|
64
|
+
if "units" in data.attrs:
|
|
65
|
+
ret_units = ureg(data.attrs["units"]).u
|
|
66
|
+
|
|
67
|
+
return ret_data, ret_units
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def unit_to_lh5_attr(unit: pint.Unit) -> str:
|
|
71
|
+
"""Convert Pint unit to a string that can be used as attrs["units"] in an LGDO."""
|
|
72
|
+
# TODO: we should check if this can be always parsed by Unitful.jl
|
|
73
|
+
if isinstance(unit, pint.Unit):
|
|
74
|
+
return f"{unit:~C}"
|
|
75
|
+
return unit
|
reboost/utils.py
CHANGED
|
@@ -14,6 +14,14 @@ from lgdo.types import Table
|
|
|
14
14
|
log = logging.getLogger(__name__)
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
def get_wo_mode(indices: list[int], new_hit_file: bool, overwrite: bool = False):
|
|
18
|
+
"""Get the mode for lh5 file writing."""
|
|
19
|
+
good_idx = all(i == 0 for i in indices)
|
|
20
|
+
if good_idx and new_hit_file:
|
|
21
|
+
return "of" if overwrite else "w"
|
|
22
|
+
return "ac"
|
|
23
|
+
|
|
24
|
+
|
|
17
25
|
def get_file_dict(
|
|
18
26
|
stp_files: list[str] | str,
|
|
19
27
|
glm_files: list[str] | str | None,
|
|
@@ -37,6 +45,9 @@ def get_file_dict(
|
|
|
37
45
|
files will be created in memory.
|
|
38
46
|
"""
|
|
39
47
|
# make a list of the right length
|
|
48
|
+
if isinstance(stp_files, str):
|
|
49
|
+
stp_files = [stp_files]
|
|
50
|
+
|
|
40
51
|
glm_files_list = [None] * len(stp_files) if glm_files is None else glm_files
|
|
41
52
|
|
|
42
53
|
# make a list of files in case
|
|
@@ -44,13 +55,13 @@ def get_file_dict(
|
|
|
44
55
|
# 2) hit_files and stp_files are both lists of different length
|
|
45
56
|
|
|
46
57
|
hit_is_list = isinstance(hit_files, list)
|
|
47
|
-
stp_is_list = isinstance(stp_files, list)
|
|
48
58
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
)
|
|
52
|
-
|
|
53
|
-
|
|
59
|
+
if not hit_is_list:
|
|
60
|
+
hit_files_list = [hit_files] * len(stp_files)
|
|
61
|
+
elif hit_is_list and len(hit_files) == 1 and len(stp_files) > 1:
|
|
62
|
+
hit_files_list = [hit_files[0]] * len(stp_files)
|
|
63
|
+
else:
|
|
64
|
+
hit_files_list = hit_files
|
|
54
65
|
|
|
55
66
|
files = {}
|
|
56
67
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: reboost
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.5
|
|
4
4
|
Summary: New LEGEND Monte-Carlo simulation post-processing
|
|
5
5
|
Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
|
|
6
6
|
Maintainer: The LEGEND Collaboration
|
|
@@ -700,7 +700,7 @@ Requires-Dist: colorlog
|
|
|
700
700
|
Requires-Dist: numpy
|
|
701
701
|
Requires-Dist: scipy
|
|
702
702
|
Requires-Dist: numba
|
|
703
|
-
Requires-Dist: legend-pydataobj>=1.11.
|
|
703
|
+
Requires-Dist: legend-pydataobj>=1.11.10
|
|
704
704
|
Requires-Dist: legend-pygeom-optics>=0.6.5
|
|
705
705
|
Requires-Dist: hist
|
|
706
706
|
Requires-Dist: dbetto
|
|
@@ -708,6 +708,7 @@ Requires-Dist: particle
|
|
|
708
708
|
Requires-Dist: pandas
|
|
709
709
|
Requires-Dist: matplotlib
|
|
710
710
|
Requires-Dist: pygama
|
|
711
|
+
Requires-Dist: pyg4ometry
|
|
711
712
|
Provides-Extra: all
|
|
712
713
|
Requires-Dist: reboost[docs,test]; extra == "all"
|
|
713
714
|
Provides-Extra: docs
|
|
@@ -722,7 +723,7 @@ Requires-Dist: pytest-cov; extra == "test"
|
|
|
722
723
|
Requires-Dist: legend-pygeom-hpges; extra == "test"
|
|
723
724
|
Requires-Dist: legend-pygeom-tools; extra == "test"
|
|
724
725
|
Requires-Dist: pyg4ometry; extra == "test"
|
|
725
|
-
Requires-Dist: pylegendtestdata; extra == "test"
|
|
726
|
+
Requires-Dist: pylegendtestdata>=0.6; extra == "test"
|
|
726
727
|
Dynamic: license-file
|
|
727
728
|
|
|
728
729
|
# reboost
|
|
@@ -1,18 +1,20 @@
|
|
|
1
1
|
reboost/__init__.py,sha256=RVNl3Qgx_hTUeBGXaWYmiTcmXUDhTfvlAGGC8bo_jP8,316
|
|
2
|
-
reboost/_version.py,sha256=
|
|
2
|
+
reboost/_version.py,sha256=N3oBwJUFmS-AwCjqOcSlRW4GvSq-uJJMaBvoGfv1-hM,511
|
|
3
3
|
reboost/build_evt.py,sha256=zj3wG_kaV3EoRMQ33AkCNa_2Fv8cLtRuhyRyRmSrOYQ,4797
|
|
4
4
|
reboost/build_glm.py,sha256=LQkM6x6mMOE92-c78uoclOvP9zp3vdMuLQCSP2f2Zk4,9263
|
|
5
|
-
reboost/build_hit.py,sha256=
|
|
5
|
+
reboost/build_hit.py,sha256=yfOUzAaKFGrQ0ENgUvZUh9Q9EVK42kISNcpuRLOjlRg,14167
|
|
6
6
|
reboost/build_tcm.py,sha256=-PawBHoHj0zsm4XsZu5bco9d9a09STicZchduefSNfI,2951
|
|
7
|
-
reboost/cli.py,sha256=
|
|
7
|
+
reboost/cli.py,sha256=swPJcYzvg18rSOMN-mpe0PCMf1-a9V7osIssX7JP7k0,6459
|
|
8
8
|
reboost/core.py,sha256=7Nclc6RUCOSJ1CWVAX0rFNJGM1LEgqvc4tD04CxEAtg,10766
|
|
9
9
|
reboost/iterator.py,sha256=72AyoRTgMpWghZt2UOqRj0RGiNzaiBAwgNIUZdduK2s,4698
|
|
10
10
|
reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
|
|
11
11
|
reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
|
|
12
|
-
reboost/
|
|
12
|
+
reboost/units.py,sha256=3EH8XlpbsObdu5vLgxhm1600L6UNYD5jng4SjJT_1QE,2202
|
|
13
|
+
reboost/utils.py,sha256=R9_JUOp_CqEn52iLQCKEs_rj9N_icHmMXf8DGettggs,8050
|
|
13
14
|
reboost/hpge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
reboost/hpge/psd.py,sha256=
|
|
15
|
+
reboost/hpge/psd.py,sha256=868OUJzO9TNja0YSrZ3NDGeEAbUtpDZnmvBDm0jCC9E,6856
|
|
15
16
|
reboost/hpge/surface.py,sha256=SZyTmOCTipf27jYaJhtdInzGF1RZ2wKpbtf6HlOQYwM,3662
|
|
17
|
+
reboost/hpge/utils.py,sha256=0Rx4HubCOm8JMECjWcAJXfAch9OkSlRpUkdsSlzwZ2E,2830
|
|
16
18
|
reboost/math/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
19
|
reboost/math/functions.py,sha256=OymiYTcA0NXxxm-MBDw5kqyNwHoLCmuv4J48AwnSrbU,5633
|
|
18
20
|
reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
|
|
@@ -28,9 +30,9 @@ reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
28
30
|
reboost/shape/cluster.py,sha256=RIvBlhHzp88aaUZGofp5SD9bimnoiqIOddhQ84jiwoM,8135
|
|
29
31
|
reboost/shape/group.py,sha256=Q3DhEPxbhw3p4bwvpswSd0A-p224l5vRZnfQIEkOVJE,4475
|
|
30
32
|
reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
|
-
reboost-0.2.
|
|
32
|
-
reboost-0.2.
|
|
33
|
-
reboost-0.2.
|
|
34
|
-
reboost-0.2.
|
|
35
|
-
reboost-0.2.
|
|
36
|
-
reboost-0.2.
|
|
33
|
+
reboost-0.2.5.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
34
|
+
reboost-0.2.5.dist-info/METADATA,sha256=ys5cRQSdNlDrzQW1qxwLuoFq1M8QOhdSC5DKE4meiGQ,44251
|
|
35
|
+
reboost-0.2.5.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
|
|
36
|
+
reboost-0.2.5.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
|
|
37
|
+
reboost-0.2.5.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
|
|
38
|
+
reboost-0.2.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|