reboost 0.7.0__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reboost/_version.py +2 -2
- reboost/core.py +0 -2
- reboost/hpge/psd.py +11 -9
- reboost/optmap/cli.py +40 -101
- reboost/optmap/convolve.py +51 -367
- reboost/optmap/create.py +37 -124
- reboost/optmap/evt.py +5 -2
- reboost/optmap/mapview.py +9 -7
- reboost/optmap/optmap.py +11 -12
- reboost/spms/pe.py +83 -4
- reboost/utils.py +1 -1
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/METADATA +5 -2
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/RECORD +17 -17
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/WHEEL +0 -0
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/entry_points.txt +0 -0
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/licenses/LICENSE +0 -0
- {reboost-0.7.0.dist-info → reboost-0.8.1.dist-info}/top_level.txt +0 -0
reboost/optmap/create.py
CHANGED
|
@@ -9,17 +9,14 @@ from pathlib import Path
|
|
|
9
9
|
from typing import Literal
|
|
10
10
|
|
|
11
11
|
import numpy as np
|
|
12
|
-
import
|
|
13
|
-
from lgdo import Array, Histogram, Scalar, lh5
|
|
12
|
+
from lgdo import Histogram, lh5
|
|
14
13
|
from numba import njit
|
|
15
14
|
from numpy.typing import NDArray
|
|
16
15
|
|
|
17
16
|
from ..log_utils import setup_log
|
|
18
17
|
from .evt import (
|
|
19
|
-
EVT_TABLE_NAME,
|
|
20
18
|
generate_optmap_evt,
|
|
21
19
|
get_optical_detectors_from_geom,
|
|
22
|
-
read_optmap_evt,
|
|
23
20
|
)
|
|
24
21
|
from .optmap import OpticalMap
|
|
25
22
|
|
|
@@ -27,23 +24,21 @@ log = logging.getLogger(__name__)
|
|
|
27
24
|
|
|
28
25
|
|
|
29
26
|
def _optmaps_for_channels(
|
|
30
|
-
|
|
27
|
+
all_det_ids: dict[int, str],
|
|
31
28
|
settings,
|
|
32
29
|
chfilter: tuple[str | int] | Literal["*"] = (),
|
|
33
30
|
use_shmem: bool = False,
|
|
34
31
|
):
|
|
35
|
-
all_det_ids = [ch_id for ch_id in optmap_evt_columns if ch_id.isnumeric()]
|
|
36
|
-
|
|
37
32
|
if chfilter != "*":
|
|
38
33
|
chfilter = [str(ch) for ch in chfilter] # normalize types
|
|
39
|
-
optmap_det_ids =
|
|
34
|
+
optmap_det_ids = {det: name for det, name in all_det_ids.items() if str(det) in chfilter}
|
|
40
35
|
else:
|
|
41
36
|
optmap_det_ids = all_det_ids
|
|
42
37
|
|
|
43
38
|
log.info("creating empty optmaps")
|
|
44
39
|
optmap_count = len(optmap_det_ids) + 1
|
|
45
40
|
optmaps = [
|
|
46
|
-
OpticalMap("all" if i == 0 else optmap_det_ids[i - 1], settings, use_shmem)
|
|
41
|
+
OpticalMap("all" if i == 0 else list(optmap_det_ids.values())[i - 1], settings, use_shmem)
|
|
47
42
|
for i in range(optmap_count)
|
|
48
43
|
]
|
|
49
44
|
|
|
@@ -76,34 +71,6 @@ def _fill_hit_maps(optmaps: list[OpticalMap], loc, hitcounts: NDArray, ch_idx_to
|
|
|
76
71
|
optmaps[i].fill_hits(locm)
|
|
77
72
|
|
|
78
73
|
|
|
79
|
-
def _count_multi_ph_detection(hitcounts) -> NDArray:
|
|
80
|
-
hits_per_primary = hitcounts.sum(axis=1)
|
|
81
|
-
bins = np.arange(0, hits_per_primary.max() + 1.5) - 0.5
|
|
82
|
-
return np.histogram(hits_per_primary, bins)[0]
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def _fit_multi_ph_detection(hits_per_primary) -> float:
|
|
86
|
-
if len(hits_per_primary) <= 2: # have only 0 and 1 hits, can't fit (and also don't need to).
|
|
87
|
-
return np.inf
|
|
88
|
-
|
|
89
|
-
x = np.arange(0, len(hits_per_primary))
|
|
90
|
-
popt, pcov = scipy.optimize.curve_fit(
|
|
91
|
-
lambda x, p0, k: p0 * np.exp(-k * x), x[1:], hits_per_primary[1:]
|
|
92
|
-
)
|
|
93
|
-
best_fit_exponent = popt[1]
|
|
94
|
-
|
|
95
|
-
log.info(
|
|
96
|
-
"p(> 1 detected photon)/p(1 detected photon) = %f",
|
|
97
|
-
sum(hits_per_primary[2:]) / hits_per_primary[1],
|
|
98
|
-
)
|
|
99
|
-
log.info(
|
|
100
|
-
"p(> 1 detected photon)/p(<=1 detected photon) = %f",
|
|
101
|
-
sum(hits_per_primary[2:]) / sum(hits_per_primary[0:2]),
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
return best_fit_exponent
|
|
105
|
-
|
|
106
|
-
|
|
107
74
|
def _create_optical_maps_process_init(optmaps, log_level) -> None:
|
|
108
75
|
# need to use shared global state. passing the shared memory arrays via "normal" arguments to
|
|
109
76
|
# the worker function is not supported...
|
|
@@ -115,34 +82,29 @@ def _create_optical_maps_process_init(optmaps, log_level) -> None:
|
|
|
115
82
|
|
|
116
83
|
|
|
117
84
|
def _create_optical_maps_process(
|
|
118
|
-
optmap_events_fn, buffer_len,
|
|
119
|
-
) ->
|
|
85
|
+
optmap_events_fn, buffer_len, all_det_ids, ch_idx_to_map_idx
|
|
86
|
+
) -> bool:
|
|
120
87
|
log.info("started worker task for %s", optmap_events_fn)
|
|
121
88
|
x = _create_optical_maps_chunk(
|
|
122
89
|
optmap_events_fn,
|
|
123
90
|
buffer_len,
|
|
124
|
-
is_stp_file,
|
|
125
91
|
all_det_ids,
|
|
126
92
|
_shared_optmaps,
|
|
127
93
|
ch_idx_to_map_idx,
|
|
128
94
|
)
|
|
129
95
|
log.info("finished worker task for %s", optmap_events_fn)
|
|
130
|
-
return
|
|
96
|
+
return x
|
|
131
97
|
|
|
132
98
|
|
|
133
99
|
def _create_optical_maps_chunk(
|
|
134
|
-
optmap_events_fn, buffer_len,
|
|
135
|
-
) ->
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
else:
|
|
139
|
-
optmap_events_it = generate_optmap_evt(optmap_events_fn, all_det_ids, buffer_len)
|
|
100
|
+
optmap_events_fn, buffer_len, all_det_ids, optmaps, ch_idx_to_map_idx
|
|
101
|
+
) -> bool:
|
|
102
|
+
cols = [str(c) for c in all_det_ids]
|
|
103
|
+
optmap_events_it = generate_optmap_evt(optmap_events_fn, cols, buffer_len)
|
|
140
104
|
|
|
141
|
-
hits_per_primary = np.zeros(10, dtype=np.int64)
|
|
142
|
-
hits_per_primary_len = 0
|
|
143
105
|
for it_count, events_lgdo in enumerate(optmap_events_it):
|
|
144
106
|
optmap_events = events_lgdo.view_as("pd")
|
|
145
|
-
hitcounts = optmap_events[
|
|
107
|
+
hitcounts = optmap_events[cols].to_numpy()
|
|
146
108
|
loc = optmap_events[["xloc", "yloc", "zloc"]].to_numpy()
|
|
147
109
|
|
|
148
110
|
log.debug("filling vertex histogram (%d)", it_count)
|
|
@@ -150,23 +112,19 @@ def _create_optical_maps_chunk(
|
|
|
150
112
|
|
|
151
113
|
log.debug("filling hits histogram (%d)", it_count)
|
|
152
114
|
_fill_hit_maps(optmaps, loc, hitcounts, ch_idx_to_map_idx)
|
|
153
|
-
hpp = _count_multi_ph_detection(hitcounts)
|
|
154
|
-
hits_per_primary_len = max(hits_per_primary_len, len(hpp))
|
|
155
|
-
hits_per_primary[0 : len(hpp)] += hpp
|
|
156
115
|
|
|
157
116
|
# commit the final part of the hits to the maps.
|
|
158
117
|
for i in range(len(optmaps)):
|
|
159
118
|
optmaps[i].fill_hits_flush()
|
|
160
119
|
gc.collect()
|
|
161
120
|
|
|
162
|
-
return
|
|
121
|
+
return True
|
|
163
122
|
|
|
164
123
|
|
|
165
124
|
def create_optical_maps(
|
|
166
125
|
optmap_events_fn: list[str],
|
|
167
126
|
settings,
|
|
168
127
|
buffer_len: int = int(5e6),
|
|
169
|
-
is_stp_file: bool = True,
|
|
170
128
|
chfilter: tuple[str | int] | Literal["*"] = (),
|
|
171
129
|
output_lh5_fn: str | None = None,
|
|
172
130
|
after_save: Callable[[int, str, OpticalMap]] | None = None,
|
|
@@ -182,8 +140,6 @@ def create_optical_maps(
|
|
|
182
140
|
list of filenames to lh5 files, that can either be stp files from remage or "optmap-evt"
|
|
183
141
|
files with a table ``/optmap_evt`` with columns ``{x,y,z}loc`` and one column (with numeric
|
|
184
142
|
header) for each SiPM channel.
|
|
185
|
-
is_stp_file
|
|
186
|
-
if true, do convert a remage output file (stp file) on-the-fly to an optmap-evt file.
|
|
187
143
|
chfilter
|
|
188
144
|
tuple of detector ids that will be included in the resulting optmap. Those have to match
|
|
189
145
|
the column names in ``optmap_events_fn``.
|
|
@@ -196,12 +152,7 @@ def create_optical_maps(
|
|
|
196
152
|
|
|
197
153
|
use_shmem = n_procs is None or n_procs > 1
|
|
198
154
|
|
|
199
|
-
|
|
200
|
-
optmap_evt_columns = list(
|
|
201
|
-
lh5.read(EVT_TABLE_NAME, optmap_events_fn[0], start_row=0, n_rows=1).keys()
|
|
202
|
-
) # peek into the (first) file to find column names.
|
|
203
|
-
else:
|
|
204
|
-
optmap_evt_columns = [str(i) for i in get_optical_detectors_from_geom(geom_fn)]
|
|
155
|
+
optmap_evt_columns = get_optical_detectors_from_geom(geom_fn)
|
|
205
156
|
|
|
206
157
|
all_det_ids, optmaps, optmap_det_ids = _optmaps_for_channels(
|
|
207
158
|
optmap_evt_columns, settings, chfilter=chfilter, use_shmem=use_shmem
|
|
@@ -209,11 +160,17 @@ def create_optical_maps(
|
|
|
209
160
|
|
|
210
161
|
# indices for later use in _compute_hit_maps.
|
|
211
162
|
ch_idx_to_map_idx = np.array(
|
|
212
|
-
[
|
|
163
|
+
[
|
|
164
|
+
list(optmap_det_ids.keys()).index(d) + 1 if d in optmap_det_ids else -1
|
|
165
|
+
for d in all_det_ids
|
|
166
|
+
]
|
|
213
167
|
)
|
|
214
168
|
assert np.sum(ch_idx_to_map_idx > 0) == len(optmaps) - 1
|
|
215
169
|
|
|
216
|
-
log.info(
|
|
170
|
+
log.info(
|
|
171
|
+
"creating optical map groups: %s",
|
|
172
|
+
", ".join(["all", *[str(t) for t in optmap_det_ids.items()]]),
|
|
173
|
+
)
|
|
217
174
|
|
|
218
175
|
q = []
|
|
219
176
|
|
|
@@ -221,9 +178,7 @@ def create_optical_maps(
|
|
|
221
178
|
if not use_shmem:
|
|
222
179
|
for fn in optmap_events_fn:
|
|
223
180
|
q.append(
|
|
224
|
-
_create_optical_maps_chunk(
|
|
225
|
-
fn, buffer_len, is_stp_file, all_det_ids, optmaps, ch_idx_to_map_idx
|
|
226
|
-
)
|
|
181
|
+
_create_optical_maps_chunk(fn, buffer_len, all_det_ids, optmaps, ch_idx_to_map_idx)
|
|
227
182
|
)
|
|
228
183
|
else:
|
|
229
184
|
ctx = mp.get_context("forkserver")
|
|
@@ -243,14 +198,14 @@ def create_optical_maps(
|
|
|
243
198
|
for fn in optmap_events_fn:
|
|
244
199
|
r = pool.apply_async(
|
|
245
200
|
_create_optical_maps_process,
|
|
246
|
-
args=(fn, buffer_len,
|
|
201
|
+
args=(fn, buffer_len, all_det_ids, ch_idx_to_map_idx),
|
|
247
202
|
)
|
|
248
203
|
pool_results.append((r, fn))
|
|
249
204
|
|
|
250
205
|
pool.close()
|
|
251
206
|
for r, fn in pool_results:
|
|
252
207
|
try:
|
|
253
|
-
q.append(
|
|
208
|
+
q.append(r.get())
|
|
254
209
|
except BaseException as e:
|
|
255
210
|
msg = f"error while processing file {fn}"
|
|
256
211
|
raise RuntimeError(msg) from e # re-throw errors of workers.
|
|
@@ -258,17 +213,8 @@ def create_optical_maps(
|
|
|
258
213
|
pool.join()
|
|
259
214
|
log.info("joined worker process pool")
|
|
260
215
|
|
|
261
|
-
# merge hitcounts.
|
|
262
216
|
if len(q) != len(optmap_events_fn):
|
|
263
217
|
log.error("got %d results for %d files", len(q), len(optmap_events_fn))
|
|
264
|
-
hits_per_primary = np.zeros(10, dtype=np.int64)
|
|
265
|
-
hits_per_primary_len = 0
|
|
266
|
-
for hitcounts in q:
|
|
267
|
-
hits_per_primary[0 : len(hitcounts)] += hitcounts
|
|
268
|
-
hits_per_primary_len = max(hits_per_primary_len, len(hitcounts))
|
|
269
|
-
|
|
270
|
-
hits_per_primary = hits_per_primary[0:hits_per_primary_len]
|
|
271
|
-
hits_per_primary_exponent = _fit_multi_ph_detection(hits_per_primary)
|
|
272
218
|
|
|
273
219
|
# all maps share the same vertex histogram.
|
|
274
220
|
for i in range(1, len(optmaps)):
|
|
@@ -279,7 +225,7 @@ def create_optical_maps(
|
|
|
279
225
|
optmaps[i].create_probability()
|
|
280
226
|
if check_after_create:
|
|
281
227
|
optmaps[i].check_histograms()
|
|
282
|
-
group = "all" if i == 0 else "
|
|
228
|
+
group = "all" if i == 0 else "channels/" + list(optmap_det_ids.values())[i - 1]
|
|
283
229
|
if output_lh5_fn is not None:
|
|
284
230
|
optmaps[i].write_lh5(lh5_file=output_lh5_fn, group=group)
|
|
285
231
|
|
|
@@ -288,14 +234,12 @@ def create_optical_maps(
|
|
|
288
234
|
|
|
289
235
|
optmaps[i] = None # clear some memory.
|
|
290
236
|
|
|
291
|
-
if output_lh5_fn is not None:
|
|
292
|
-
lh5.write(Array(hits_per_primary), "_hitcounts", lh5_file=output_lh5_fn)
|
|
293
|
-
lh5.write(Scalar(hits_per_primary_exponent), "_hitcounts_exp", lh5_file=output_lh5_fn)
|
|
294
|
-
|
|
295
237
|
|
|
296
238
|
def list_optical_maps(lh5_file: str) -> list[str]:
|
|
297
|
-
maps = lh5.ls(lh5_file)
|
|
298
|
-
|
|
239
|
+
maps = list(lh5.ls(lh5_file, "/channels/"))
|
|
240
|
+
if "all" in lh5.ls(lh5_file):
|
|
241
|
+
maps.append("all")
|
|
242
|
+
return maps
|
|
299
243
|
|
|
300
244
|
|
|
301
245
|
def _merge_optical_maps_process(
|
|
@@ -313,9 +257,9 @@ def _merge_optical_maps_process(
|
|
|
313
257
|
|
|
314
258
|
all_edges = None
|
|
315
259
|
for optmap_fn in map_l5_files:
|
|
316
|
-
nr_det = lh5.read(f"/{d}/
|
|
260
|
+
nr_det = lh5.read(f"/{d}/_nr_det", optmap_fn)
|
|
317
261
|
assert isinstance(nr_det, Histogram)
|
|
318
|
-
nr_gen = lh5.read(f"/{d}/
|
|
262
|
+
nr_gen = lh5.read(f"/{d}/_nr_gen", optmap_fn)
|
|
319
263
|
assert isinstance(nr_gen, Histogram)
|
|
320
264
|
|
|
321
265
|
assert OpticalMap._edges_eq(nr_det.binning, nr_gen.binning)
|
|
@@ -333,7 +277,8 @@ def _merge_optical_maps_process(
|
|
|
333
277
|
merged_map.check_histograms(include_prefix=True)
|
|
334
278
|
|
|
335
279
|
if write_part_file:
|
|
336
|
-
|
|
280
|
+
d_for_tmp = d.replace("/", "_")
|
|
281
|
+
output_lh5_fn = f"{output_lh5_fn}_{d_for_tmp}.mappart.lh5"
|
|
337
282
|
wo_mode = "overwrite_file" if write_part_file else "write_safe"
|
|
338
283
|
merged_map.write_lh5(lh5_file=output_lh5_fn, group=d, wo_mode=wo_mode)
|
|
339
284
|
|
|
@@ -410,7 +355,7 @@ def merge_optical_maps(
|
|
|
410
355
|
# transfer to actual output file.
|
|
411
356
|
for d, part_fn in q:
|
|
412
357
|
assert isinstance(part_fn, str)
|
|
413
|
-
for h_name in ("
|
|
358
|
+
for h_name in ("_nr_det", "_nr_gen", "prob", "prob_unc"):
|
|
414
359
|
obj = f"/{d}/{h_name}"
|
|
415
360
|
log.info("transfer %s from %s", obj, part_fn)
|
|
416
361
|
h = lh5.read(obj, part_fn)
|
|
@@ -418,41 +363,14 @@ def merge_optical_maps(
|
|
|
418
363
|
lh5.write(h, obj, output_lh5_fn, wo_mode="write_safe")
|
|
419
364
|
Path(part_fn).unlink()
|
|
420
365
|
|
|
421
|
-
# merge hitcounts.
|
|
422
|
-
hits_per_primary = np.zeros(10, dtype=np.int64)
|
|
423
|
-
hits_per_primary_len = 0
|
|
424
|
-
for optmap_fn in map_l5_files:
|
|
425
|
-
if "_hitcounts" not in lh5.ls(optmap_fn):
|
|
426
|
-
log.warning("skipping _hitcounts calculations, missing in file %s", optmap_fn)
|
|
427
|
-
return
|
|
428
|
-
hitcounts = lh5.read("/_hitcounts", optmap_fn)
|
|
429
|
-
assert isinstance(hitcounts, Array)
|
|
430
|
-
hits_per_primary[0 : len(hitcounts)] += hitcounts
|
|
431
|
-
hits_per_primary_len = max(hits_per_primary_len, len(hitcounts))
|
|
432
|
-
|
|
433
|
-
hits_per_primary = hits_per_primary[0:hits_per_primary_len]
|
|
434
|
-
lh5.write(Array(hits_per_primary), "_hitcounts", lh5_file=output_lh5_fn)
|
|
435
|
-
|
|
436
|
-
# re-calculate hitcounts exponent.
|
|
437
|
-
hits_per_primary_exponent = _fit_multi_ph_detection(hits_per_primary)
|
|
438
|
-
lh5.write(Scalar(hits_per_primary_exponent), "_hitcounts_exp", lh5_file=output_lh5_fn)
|
|
439
|
-
|
|
440
366
|
|
|
441
367
|
def check_optical_map(map_l5_file: str):
|
|
442
368
|
"""Run a health check on the map file.
|
|
443
369
|
|
|
444
370
|
This checks for consistency, and outputs details on map statistics.
|
|
445
371
|
"""
|
|
446
|
-
if "_hitcounts_exp"
|
|
447
|
-
log.
|
|
448
|
-
elif lh5.read("_hitcounts_exp", lh5_file=map_l5_file).value != np.inf:
|
|
449
|
-
log.error("unexpected _hitcounts_exp not equal to positive infinity")
|
|
450
|
-
return
|
|
451
|
-
|
|
452
|
-
if "_hitcounts" not in lh5.ls(map_l5_file):
|
|
453
|
-
log.info("no _hitcounts found")
|
|
454
|
-
elif lh5.read("_hitcounts", lh5_file=map_l5_file).nda.shape != (2,):
|
|
455
|
-
log.error("unexpected _hitcounts shape")
|
|
372
|
+
if "_hitcounts_exp" in lh5.ls(map_l5_file):
|
|
373
|
+
log.error("found _hitcounts_exp which is not supported any more")
|
|
456
374
|
return
|
|
457
375
|
|
|
458
376
|
all_binning = None
|
|
@@ -503,8 +421,3 @@ def rebin_optical_maps(map_l5_file: str, output_lh5_file: str, factor: int):
|
|
|
503
421
|
om_new.h_hits = _rebin_map(om.h_hits, factor)
|
|
504
422
|
om_new.create_probability()
|
|
505
423
|
om_new.write_lh5(lh5_file=output_lh5_file, group=submap, wo_mode="write_safe")
|
|
506
|
-
|
|
507
|
-
# just copy hitcounts exponent.
|
|
508
|
-
for dset in ("_hitcounts_exp", "_hitcounts"):
|
|
509
|
-
if dset in lh5.ls(map_l5_file):
|
|
510
|
-
lh5.write(lh5.read(dset, lh5_file=map_l5_file), dset, lh5_file=output_lh5_file)
|
reboost/optmap/evt.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
|
+
from collections import OrderedDict
|
|
4
5
|
from collections.abc import Generator, Iterable
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
|
|
@@ -125,13 +126,15 @@ def build_optmap_evt(
|
|
|
125
126
|
lh5_out_file_tmp.rename(lh5_out_file)
|
|
126
127
|
|
|
127
128
|
|
|
128
|
-
def get_optical_detectors_from_geom(geom_fn) ->
|
|
129
|
+
def get_optical_detectors_from_geom(geom_fn) -> dict[int, str]:
|
|
129
130
|
import pyg4ometry
|
|
130
131
|
import pygeomtools
|
|
131
132
|
|
|
132
133
|
geom_registry = pyg4ometry.gdml.Reader(geom_fn).getRegistry()
|
|
133
134
|
detectors = pygeomtools.get_all_sensvols(geom_registry)
|
|
134
|
-
return
|
|
135
|
+
return OrderedDict(
|
|
136
|
+
[(d.uid, name) for name, d in detectors.items() if d.detector_type == "optical"]
|
|
137
|
+
)
|
|
135
138
|
|
|
136
139
|
|
|
137
140
|
def read_optmap_evt(lh5_file: str, buffer_len: int = int(5e6)) -> LH5Iterator:
|
reboost/optmap/mapview.py
CHANGED
|
@@ -92,14 +92,16 @@ def _channel_selector(fig) -> None:
|
|
|
92
92
|
def _read_data(
|
|
93
93
|
optmap_fn: str,
|
|
94
94
|
detid: str = "all",
|
|
95
|
-
histogram_choice: str = "
|
|
95
|
+
histogram_choice: str = "prob",
|
|
96
96
|
) -> tuple[tuple[NDArray], NDArray]:
|
|
97
|
-
histogram = histogram_choice if histogram_choice != "
|
|
97
|
+
histogram = histogram_choice if histogram_choice != "prob_unc_rel" else "prob"
|
|
98
|
+
detid = f"channels/{detid}" if detid != all and not detid.startswith("channels/") else detid
|
|
99
|
+
|
|
98
100
|
optmap_all = lh5.read(f"/{detid}/{histogram}", optmap_fn)
|
|
99
101
|
optmap_edges = tuple([b.edges for b in optmap_all.binning])
|
|
100
102
|
optmap_weights = optmap_all.weights.nda.copy()
|
|
101
|
-
if histogram_choice == "
|
|
102
|
-
optmap_err = lh5.read(f"/{detid}/
|
|
103
|
+
if histogram_choice == "prob_unc_rel":
|
|
104
|
+
optmap_err = lh5.read(f"/{detid}/prob_unc", optmap_fn)
|
|
103
105
|
divmask = optmap_weights > 0
|
|
104
106
|
optmap_weights[divmask] = optmap_err.weights.nda[divmask] / optmap_weights[divmask]
|
|
105
107
|
optmap_weights[~divmask] = -1
|
|
@@ -112,13 +114,13 @@ def _prepare_data(
|
|
|
112
114
|
divide_fn: str | None = None,
|
|
113
115
|
cmap_min: float | Literal["auto"] = 1e-4,
|
|
114
116
|
cmap_max: float | Literal["auto"] = 1e-2,
|
|
115
|
-
histogram_choice: str = "
|
|
117
|
+
histogram_choice: str = "prob",
|
|
116
118
|
detid: str = "all",
|
|
117
119
|
) -> tuple[tuple[NDArray], NDArray]:
|
|
118
120
|
optmap_edges, optmap_weights = _read_data(optmap_fn, detid, histogram_choice)
|
|
119
121
|
|
|
120
122
|
if divide_fn is not None:
|
|
121
|
-
|
|
123
|
+
_, divide_map = _read_data(divide_fn, detid, histogram_choice)
|
|
122
124
|
divmask = divide_map > 0
|
|
123
125
|
optmap_weights[divmask] = optmap_weights[divmask] / divide_map[divmask]
|
|
124
126
|
optmap_weights[~divmask] = -1
|
|
@@ -158,7 +160,7 @@ def view_optmap(
|
|
|
158
160
|
start_axis: int = 2,
|
|
159
161
|
cmap_min: float | Literal["auto"] = 1e-4,
|
|
160
162
|
cmap_max: float | Literal["auto"] = 1e-2,
|
|
161
|
-
histogram_choice: str = "
|
|
163
|
+
histogram_choice: str = "prob",
|
|
162
164
|
title: str | None = None,
|
|
163
165
|
) -> None:
|
|
164
166
|
available_dets = list_optical_maps(optmap_fn)
|
reboost/optmap/optmap.py
CHANGED
|
@@ -8,7 +8,7 @@ import multiprocessing as mp
|
|
|
8
8
|
from collections.abc import Mapping
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
|
-
from lgdo import Histogram, lh5
|
|
11
|
+
from lgdo import Histogram, Struct, lh5
|
|
12
12
|
from numpy.typing import NDArray
|
|
13
13
|
|
|
14
14
|
log = logging.getLogger(__name__)
|
|
@@ -66,10 +66,10 @@ class OpticalMap:
|
|
|
66
66
|
raise RuntimeError(msg)
|
|
67
67
|
return h.weights.nda, h.binning
|
|
68
68
|
|
|
69
|
-
om.h_vertex, bin_nr_gen = read_hist("
|
|
70
|
-
om.h_hits, bin_nr_det = read_hist("
|
|
71
|
-
om.h_prob, bin_p_det = read_hist("
|
|
72
|
-
om.h_prob_uncert, bin_p_det_err = read_hist("
|
|
69
|
+
om.h_vertex, bin_nr_gen = read_hist("_nr_gen", lh5_file, group=group)
|
|
70
|
+
om.h_hits, bin_nr_det = read_hist("_nr_det", lh5_file, group=group)
|
|
71
|
+
om.h_prob, bin_p_det = read_hist("prob", lh5_file, group=group)
|
|
72
|
+
om.h_prob_uncert, bin_p_det_err = read_hist("prob_unc", lh5_file, group=group)
|
|
73
73
|
|
|
74
74
|
for bins in (bin_nr_det, bin_p_det, bin_p_det_err):
|
|
75
75
|
if not OpticalMap._edges_eq(bin_nr_gen, bins):
|
|
@@ -227,18 +227,17 @@ class OpticalMap:
|
|
|
227
227
|
|
|
228
228
|
def write_hist(h: NDArray, name: str, fn: str, group: str, wo_mode: str):
|
|
229
229
|
lh5.write(
|
|
230
|
-
Histogram(self._nda(h), self.binning),
|
|
231
|
-
|
|
230
|
+
Struct({name: Histogram(self._nda(h), self.binning)}),
|
|
231
|
+
group,
|
|
232
232
|
fn,
|
|
233
|
-
group=group,
|
|
234
233
|
wo_mode=wo_mode,
|
|
235
234
|
)
|
|
236
235
|
|
|
237
236
|
# only use the passed wo_mode for the first file.
|
|
238
|
-
write_hist(self.h_vertex, "
|
|
239
|
-
write_hist(self.h_hits, "
|
|
240
|
-
write_hist(self.h_prob, "
|
|
241
|
-
write_hist(self.h_prob_uncert, "
|
|
237
|
+
write_hist(self.h_vertex, "_nr_gen", lh5_file, group, wo_mode)
|
|
238
|
+
write_hist(self.h_hits, "_nr_det", lh5_file, group, "append_column")
|
|
239
|
+
write_hist(self.h_prob, "prob", lh5_file, group, "append_column")
|
|
240
|
+
write_hist(self.h_prob_uncert, "prob_unc", lh5_file, group, "append_column")
|
|
242
241
|
|
|
243
242
|
def get_settings(self) -> dict:
|
|
244
243
|
"""Get the binning settings that were used to create this optical map instance."""
|
reboost/spms/pe.py
CHANGED
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import logging
|
|
4
4
|
|
|
5
5
|
import awkward as ak
|
|
6
|
+
import numpy as np
|
|
6
7
|
from lgdo import VectorOfVectors
|
|
7
8
|
|
|
8
9
|
from ..optmap import convolve
|
|
@@ -21,6 +22,80 @@ def load_optmap(map_file: str, spm_det_uid: int) -> convolve.OptmapForConvolve:
|
|
|
21
22
|
return convolve.open_optmap_single(map_file, spm_det_uid)
|
|
22
23
|
|
|
23
24
|
|
|
25
|
+
def _nested_unflatten(data: ak.Array, lengths: ak.Array):
|
|
26
|
+
return ak.unflatten(ak.unflatten(ak.flatten(data), ak.flatten(lengths)), ak.num(lengths))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def corrected_photoelectrons(
|
|
30
|
+
simulated_pe: ak.Array,
|
|
31
|
+
simulated_uids: ak.Array,
|
|
32
|
+
data_pe: ak.Array,
|
|
33
|
+
data_uids: ak.Array,
|
|
34
|
+
*,
|
|
35
|
+
seed: int | None = None,
|
|
36
|
+
) -> tuple[ak.Array, ak.Array]:
|
|
37
|
+
r"""Add a correction to the observed number of photoelectrons (p.e.) using forced trigger data.
|
|
38
|
+
|
|
39
|
+
For every simulated event a corresponding forced trigger event in data is chosen
|
|
40
|
+
and the resulting number of p.e. for each channel (i) is:
|
|
41
|
+
|
|
42
|
+
.. math::
|
|
43
|
+
|
|
44
|
+
n_i = n_{\text{sim},i} + n_{\text{data},i}
|
|
45
|
+
|
|
46
|
+
.. warning::
|
|
47
|
+
The number of supplied forced trigger events in data should ideally be
|
|
48
|
+
more than that in the simulations. If this is not the case and "allow_data_reuse"
|
|
49
|
+
is True then some data events will be used multiple times. This introduces
|
|
50
|
+
a small amount of correlation between the simulated events, but is probably acceptable
|
|
51
|
+
in most circumstances.
|
|
52
|
+
|
|
53
|
+
Parameters
|
|
54
|
+
----------
|
|
55
|
+
simulated_pe
|
|
56
|
+
The number of number of detected pe per sipm channel.
|
|
57
|
+
simulated_uids
|
|
58
|
+
The unique identifier (uid) for each sipm hit.
|
|
59
|
+
data_pe
|
|
60
|
+
The collection of forced trigger pe.
|
|
61
|
+
data_uids
|
|
62
|
+
The uids for each forced trigger event.
|
|
63
|
+
seed
|
|
64
|
+
Seed for random number generator
|
|
65
|
+
|
|
66
|
+
Returns
|
|
67
|
+
-------
|
|
68
|
+
a tuple of the corrected pe and sipm uids.
|
|
69
|
+
"""
|
|
70
|
+
rand = np.random.default_rng(seed=seed)
|
|
71
|
+
rand_ints = rand.integers(0, len(data_pe), size=len(simulated_pe))
|
|
72
|
+
|
|
73
|
+
selected_data_pe = data_pe[rand_ints]
|
|
74
|
+
selected_data_uids = data_uids[rand_ints]
|
|
75
|
+
|
|
76
|
+
# combine sims with data
|
|
77
|
+
pe_tot = ak.concatenate([simulated_pe, selected_data_pe], axis=1)
|
|
78
|
+
uid_tot = ak.concatenate([simulated_uids, selected_data_uids], axis=1)
|
|
79
|
+
|
|
80
|
+
# sort by uid
|
|
81
|
+
order = ak.argsort(uid_tot)
|
|
82
|
+
pe_tot = pe_tot[order]
|
|
83
|
+
uid_tot = uid_tot[order]
|
|
84
|
+
|
|
85
|
+
# add an extra axis
|
|
86
|
+
n = ak.run_lengths(uid_tot)
|
|
87
|
+
|
|
88
|
+
# add another dimension
|
|
89
|
+
pe_tot = _nested_unflatten(pe_tot, n)
|
|
90
|
+
uid_tot = _nested_unflatten(uid_tot, n)
|
|
91
|
+
|
|
92
|
+
# sum pe and take the first uid (should all be the same)
|
|
93
|
+
corrected_pe = ak.sum(pe_tot, axis=-1)
|
|
94
|
+
uid_tot = ak.fill_none(ak.firsts(uid_tot, axis=-1), np.nan)
|
|
95
|
+
|
|
96
|
+
return corrected_pe, uid_tot
|
|
97
|
+
|
|
98
|
+
|
|
24
99
|
def detected_photoelectrons(
|
|
25
100
|
num_scint_ph: ak.Array,
|
|
26
101
|
particle: ak.Array,
|
|
@@ -30,8 +105,9 @@ def detected_photoelectrons(
|
|
|
30
105
|
zloc: ak.Array,
|
|
31
106
|
optmap: convolve.OptmapForConvolve,
|
|
32
107
|
material: str,
|
|
33
|
-
|
|
108
|
+
spm_detector: str,
|
|
34
109
|
map_scaling: float = 1,
|
|
110
|
+
map_scaling_sigma: float = 0,
|
|
35
111
|
) -> VectorOfVectors:
|
|
36
112
|
"""Derive the number of detected photoelectrons (p.e.) from scintillator hits using an optical map.
|
|
37
113
|
|
|
@@ -54,10 +130,13 @@ def detected_photoelectrons(
|
|
|
54
130
|
the optical map loaded via py:func:`load_optmap`.
|
|
55
131
|
material
|
|
56
132
|
scintillating material name.
|
|
57
|
-
|
|
58
|
-
SiPM detector
|
|
133
|
+
spm_detector
|
|
134
|
+
SiPM detector name as used in the optical map.
|
|
59
135
|
map_scaling
|
|
60
136
|
scale the detection probability in the map for this detector by this factor.
|
|
137
|
+
map_scaling_sigma
|
|
138
|
+
if larger than zero, sample the used scaling factor for each (reshaped) event
|
|
139
|
+
from a normal distribution with this standard deviation.
|
|
61
140
|
"""
|
|
62
141
|
hits = ak.Array(
|
|
63
142
|
{
|
|
@@ -72,7 +151,7 @@ def detected_photoelectrons(
|
|
|
72
151
|
|
|
73
152
|
scint_mat_params = convolve._get_scint_params(material)
|
|
74
153
|
pe = convolve.iterate_stepwise_depositions_pois(
|
|
75
|
-
hits, optmap, scint_mat_params,
|
|
154
|
+
hits, optmap, scint_mat_params, spm_detector, map_scaling, map_scaling_sigma
|
|
76
155
|
)
|
|
77
156
|
|
|
78
157
|
return VectorOfVectors(pe, attrs={"units": "ns"})
|
reboost/utils.py
CHANGED
|
@@ -284,7 +284,7 @@ def get_function_string(expr: str, aliases: dict | None = None) -> tuple[str, di
|
|
|
284
284
|
if "." not in func_call:
|
|
285
285
|
continue
|
|
286
286
|
|
|
287
|
-
subpackage,
|
|
287
|
+
subpackage, _func = func_call.rsplit(".", 1)
|
|
288
288
|
package = subpackage.split(".")[0]
|
|
289
289
|
|
|
290
290
|
# import the subpackage
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: reboost
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.8.1
|
|
4
4
|
Summary: New LEGEND Monte-Carlo simulation post-processing
|
|
5
5
|
Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
|
|
6
6
|
Maintainer: The LEGEND Collaboration
|
|
@@ -700,10 +700,11 @@ Requires-Dist: hdf5plugin
|
|
|
700
700
|
Requires-Dist: colorlog
|
|
701
701
|
Requires-Dist: numpy
|
|
702
702
|
Requires-Dist: scipy
|
|
703
|
-
Requires-Dist: numba
|
|
703
|
+
Requires-Dist: numba>=0.60
|
|
704
704
|
Requires-Dist: legend-pydataobj>=1.15.1
|
|
705
705
|
Requires-Dist: legend-pygeom-optics>=0.12.0
|
|
706
706
|
Requires-Dist: legend-pygeom-tools>=0.0.11
|
|
707
|
+
Requires-Dist: legend-pygeom-hpges
|
|
707
708
|
Requires-Dist: hist
|
|
708
709
|
Requires-Dist: dbetto
|
|
709
710
|
Requires-Dist: particle
|
|
@@ -728,6 +729,8 @@ Dynamic: license-file
|
|
|
728
729
|
|
|
729
730
|
# reboost
|
|
730
731
|
|
|
732
|
+
[](https://pypi.org/project/reboost/)
|
|
733
|
+
[](https://anaconda.org/conda-forge/reboost)
|
|
731
734
|

|
|
732
735
|
[](https://github.com/legend-exp/reboost/actions)
|
|
733
736
|
[](https://github.com/pre-commit/pre-commit)
|