reboost 0.6.1__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reboost/_version.py +16 -3
- reboost/build_hit.py +111 -64
- reboost/cli.py +1 -0
- reboost/core.py +18 -7
- reboost/daq/__init__.py +5 -0
- reboost/daq/core.py +262 -0
- reboost/daq/utils.py +28 -0
- reboost/hpge/psd.py +444 -94
- reboost/hpge/surface.py +34 -1
- reboost/hpge/utils.py +2 -1
- reboost/iterator.py +4 -1
- reboost/math/stats.py +2 -2
- reboost/optmap/cli.py +1 -1
- reboost/optmap/convolve.py +270 -24
- reboost/optmap/create.py +2 -1
- reboost/optmap/optmap.py +2 -2
- reboost/shape/cluster.py +4 -4
- reboost/spms/__init__.py +5 -0
- reboost/spms/pe.py +99 -0
- reboost/units.py +40 -8
- reboost/utils.py +110 -3
- {reboost-0.6.1.dist-info → reboost-0.7.0.dist-info}/METADATA +4 -4
- reboost-0.7.0.dist-info/RECORD +42 -0
- reboost-0.6.1.dist-info/RECORD +0 -37
- {reboost-0.6.1.dist-info → reboost-0.7.0.dist-info}/WHEEL +0 -0
- {reboost-0.6.1.dist-info → reboost-0.7.0.dist-info}/entry_points.txt +0 -0
- {reboost-0.6.1.dist-info → reboost-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {reboost-0.6.1.dist-info → reboost-0.7.0.dist-info}/top_level.txt +0 -0
reboost/_version.py
CHANGED
|
@@ -1,7 +1,14 @@
|
|
|
1
1
|
# file generated by setuptools-scm
|
|
2
2
|
# don't change, don't track in version control
|
|
3
3
|
|
|
4
|
-
__all__ = [
|
|
4
|
+
__all__ = [
|
|
5
|
+
"__version__",
|
|
6
|
+
"__version_tuple__",
|
|
7
|
+
"version",
|
|
8
|
+
"version_tuple",
|
|
9
|
+
"__commit_id__",
|
|
10
|
+
"commit_id",
|
|
11
|
+
]
|
|
5
12
|
|
|
6
13
|
TYPE_CHECKING = False
|
|
7
14
|
if TYPE_CHECKING:
|
|
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
|
|
|
9
16
|
from typing import Union
|
|
10
17
|
|
|
11
18
|
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
19
|
+
COMMIT_ID = Union[str, None]
|
|
12
20
|
else:
|
|
13
21
|
VERSION_TUPLE = object
|
|
22
|
+
COMMIT_ID = object
|
|
14
23
|
|
|
15
24
|
version: str
|
|
16
25
|
__version__: str
|
|
17
26
|
__version_tuple__: VERSION_TUPLE
|
|
18
27
|
version_tuple: VERSION_TUPLE
|
|
28
|
+
commit_id: COMMIT_ID
|
|
29
|
+
__commit_id__: COMMIT_ID
|
|
19
30
|
|
|
20
|
-
__version__ = version = '0.
|
|
21
|
-
__version_tuple__ = version_tuple = (0,
|
|
31
|
+
__version__ = version = '0.7.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 7, 0)
|
|
33
|
+
|
|
34
|
+
__commit_id__ = commit_id = None
|
reboost/build_hit.py
CHANGED
|
@@ -7,11 +7,16 @@ A :func:`build_hit` to parse the following configuration file:
|
|
|
7
7
|
# dictionary of objects useful for later computation. they are constructed with
|
|
8
8
|
# auxiliary data (e.g. metadata). They can be accessed later as OBJECTS (all caps)
|
|
9
9
|
objects:
|
|
10
|
-
lmeta: LegendMetadata(ARGS.legendmetadata)
|
|
10
|
+
lmeta: legendmeta.LegendMetadata(ARGS.legendmetadata)
|
|
11
11
|
geometry: pyg4ometry.load(ARGS.gdml)
|
|
12
12
|
user_pars: dbetto.TextDB(ARGS.par)
|
|
13
13
|
dataprod_pars: dbetto.TextDB(ARGS.dataprod_cycle)
|
|
14
14
|
|
|
15
|
+
_spms: OBJECTS.lmeta.channelmap(on=ARGS.timestamp)
|
|
16
|
+
.group("system").spms
|
|
17
|
+
.map("name")
|
|
18
|
+
spms: "{name: spm.daq.rawid for name, spm in OBJECTS._spms.items()}"
|
|
19
|
+
|
|
15
20
|
# processing chain is defined to act on a group of detectors
|
|
16
21
|
processing_groups:
|
|
17
22
|
|
|
@@ -107,9 +112,10 @@ A :func:`build_hit` to parse the following configuration file:
|
|
|
107
112
|
outputs:
|
|
108
113
|
- evtid
|
|
109
114
|
- tot_edep_wlsr
|
|
115
|
+
- num_scint_ph_lar
|
|
110
116
|
|
|
111
117
|
operations:
|
|
112
|
-
tot_edep_wlsr: ak.sum(HITS[(HITS.
|
|
118
|
+
tot_edep_wlsr: ak.sum(HITS.edep[np.abs(HITS.zloc) < 3000], axis=-1)
|
|
113
119
|
|
|
114
120
|
- name: spms
|
|
115
121
|
|
|
@@ -117,11 +123,14 @@ A :func:`build_hit` to parse the following configuration file:
|
|
|
117
123
|
# same name as the current detector. This can be overridden for special processors
|
|
118
124
|
|
|
119
125
|
detector_mapping:
|
|
120
|
-
- output: OBJECTS.
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
126
|
+
- output: OBJECTS.spms.keys()
|
|
127
|
+
input: lar
|
|
128
|
+
|
|
129
|
+
hit_table_layout: reboost.shape.group_by_time(STEPS, window=10)
|
|
130
|
+
|
|
131
|
+
pre_operations:
|
|
132
|
+
num_scint_ph_lar: reboost.spms.emitted_scintillation_photons(HITS.edep, HITS.particle, "lar")
|
|
133
|
+
# num_scint_ph_pen: ...
|
|
125
134
|
|
|
126
135
|
outputs:
|
|
127
136
|
- t0
|
|
@@ -130,22 +139,23 @@ A :func:`build_hit` to parse the following configuration file:
|
|
|
130
139
|
|
|
131
140
|
detector_objects:
|
|
132
141
|
meta: pygeomtools.get_sensvol_metadata(OBJECTS.geometry, DETECTOR)
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
hit_table_layout: reboost.shape.group_by_time(STEPS, window=10)
|
|
142
|
+
spm_uid: OBJECTS.spms[DETECTOR]
|
|
143
|
+
optmap_lar: reboost.spms.load_optmap(ARGS.optmap_path_pen, DETECTOR_OBJECTS.spm_uid)
|
|
144
|
+
optmap_pen: reboost.spms.load_optmap(ARGS.optmap_path_lar, DETECTOR_OBJECTS.spm_uid)
|
|
137
145
|
|
|
138
146
|
operations:
|
|
139
147
|
pe_times_lar: reboost.spms.detected_photoelectrons(
|
|
140
|
-
|
|
148
|
+
HITS.num_scint_ph_lar, HITS.particle, HITS.time, HITS.xloc, HITS.yloc, HITS.zloc,
|
|
141
149
|
DETECTOR_OBJECTS.optmap_lar,
|
|
142
|
-
|
|
150
|
+
"lar",
|
|
151
|
+
DETECTOR_OBJECTS.spm_uid
|
|
143
152
|
)
|
|
144
153
|
|
|
145
154
|
pe_times_pen: reboost.spms.detected_photoelectrons(
|
|
146
|
-
|
|
155
|
+
HITS.num_scint_ph_pen, HITS.particle, HITS.time, HITS.xloc, HITS.yloc, HITS.zloc,
|
|
147
156
|
DETECTOR_OBJECTS.optmap_pen,
|
|
148
|
-
|
|
157
|
+
"pen",
|
|
158
|
+
DETECTOR_OBJECTS.spm_uid
|
|
149
159
|
)
|
|
150
160
|
|
|
151
161
|
pe_times: ak.concatenate([HITS.pe_times_lar, HITS.pe_times_pen], axis=-1)
|
|
@@ -189,6 +199,7 @@ def build_hit(
|
|
|
189
199
|
out_field: str = "hit",
|
|
190
200
|
buffer: int = int(5e6),
|
|
191
201
|
overwrite: bool = False,
|
|
202
|
+
allow_missing_inputs=True,
|
|
192
203
|
) -> None | ak.Array:
|
|
193
204
|
"""Build the hit tier from the remage step files.
|
|
194
205
|
|
|
@@ -215,6 +226,8 @@ def build_hit(
|
|
|
215
226
|
buffer size for use in the `LH5Iterator`.
|
|
216
227
|
overwrite
|
|
217
228
|
flag to overwrite the existing output.
|
|
229
|
+
allow_missing_inputs
|
|
230
|
+
Flag to allow an input table to be missing, generally when there were no events.
|
|
218
231
|
"""
|
|
219
232
|
# extract the config file
|
|
220
233
|
if isinstance(config, str):
|
|
@@ -235,9 +248,10 @@ def build_hit(
|
|
|
235
248
|
files = utils.get_file_dict(stp_files=stp_files, glm_files=glm_files, hit_files=hit_files)
|
|
236
249
|
|
|
237
250
|
output_tables = {}
|
|
251
|
+
output_tables_names = set()
|
|
238
252
|
|
|
239
253
|
# iterate over files
|
|
240
|
-
for file_idx, (stp_file, glm_file) in enumerate(zip(files.stp, files.glm)):
|
|
254
|
+
for file_idx, (stp_file, glm_file) in enumerate(zip(files.stp, files.glm, strict=True)):
|
|
241
255
|
msg = (
|
|
242
256
|
f"starting processing of {stp_file} to {files.hit[file_idx]} "
|
|
243
257
|
if files.hit[file_idx] is not None
|
|
@@ -256,7 +270,7 @@ def build_hit(
|
|
|
256
270
|
|
|
257
271
|
# extract the output detectors and the mapping to input detectors
|
|
258
272
|
detectors_mapping = core.get_detector_mapping(
|
|
259
|
-
proc_group.get("detector_mapping"), global_objects
|
|
273
|
+
proc_group.get("detector_mapping"), global_objects, args
|
|
260
274
|
)
|
|
261
275
|
|
|
262
276
|
# loop over detectors
|
|
@@ -275,20 +289,30 @@ def build_hit(
|
|
|
275
289
|
|
|
276
290
|
lh5_group = proc_group.get("lh5_group", "stp")
|
|
277
291
|
if lh5_group is None:
|
|
278
|
-
lh5_group = "
|
|
292
|
+
lh5_group = ""
|
|
293
|
+
table = in_detector
|
|
294
|
+
else:
|
|
295
|
+
table = f"{lh5_group}/{in_detector}"
|
|
279
296
|
|
|
280
297
|
# begin iterating over the glm
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
298
|
+
# check if the in_detector is in the file
|
|
299
|
+
if table in lh5.ls(stp_file, lh5_group + "/"):
|
|
300
|
+
iterator = GLMIterator(
|
|
301
|
+
glm_file,
|
|
302
|
+
stp_file,
|
|
303
|
+
lh5_group=in_detector,
|
|
304
|
+
start_row=start_evtid,
|
|
305
|
+
stp_field=lh5_group,
|
|
306
|
+
n_rows=n_evtid,
|
|
307
|
+
buffer=buffer,
|
|
308
|
+
time_dict=time_dict[proc_name],
|
|
309
|
+
reshaped_files="hit_table_layout" not in proc_group,
|
|
310
|
+
)
|
|
311
|
+
elif allow_missing_inputs:
|
|
312
|
+
continue
|
|
313
|
+
else:
|
|
314
|
+
msg = f"Requested input detector {in_detector} is not present in the group {lh5_group} and missing inputs were not allowed"
|
|
315
|
+
raise ValueError(msg)
|
|
292
316
|
|
|
293
317
|
for stps, chunk_idx, _ in iterator:
|
|
294
318
|
# converting to awkward
|
|
@@ -304,6 +328,21 @@ def build_hit(
|
|
|
304
328
|
if time_dict is not None:
|
|
305
329
|
time_dict[proc_name].update_field("conv", start_time)
|
|
306
330
|
|
|
331
|
+
if "hit_table_layout" in proc_group:
|
|
332
|
+
hit_table_layouted = core.evaluate_hit_table_layout(
|
|
333
|
+
copy.deepcopy(ak_obj),
|
|
334
|
+
expression=proc_group["hit_table_layout"],
|
|
335
|
+
time_dict=time_dict[proc_name],
|
|
336
|
+
)
|
|
337
|
+
else:
|
|
338
|
+
hit_table_layouted = copy.deepcopy(stps)
|
|
339
|
+
|
|
340
|
+
local_dict = {"OBJECTS": global_objects}
|
|
341
|
+
for field, info in proc_group.get("pre_operations", {}).items():
|
|
342
|
+
_evaluate_operation(
|
|
343
|
+
hit_table_layouted, field, info, local_dict, time_dict[proc_name]
|
|
344
|
+
)
|
|
345
|
+
|
|
307
346
|
# produce the hit table
|
|
308
347
|
for out_det_idx, out_detector in enumerate(out_detectors):
|
|
309
348
|
# loop over the rows
|
|
@@ -313,14 +352,12 @@ def build_hit(
|
|
|
313
352
|
# get the attributes
|
|
314
353
|
attrs = utils.copy_units(stps)
|
|
315
354
|
|
|
316
|
-
if
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
else:
|
|
323
|
-
hit_table = copy.deepcopy(stps)
|
|
355
|
+
# if we have more than one output detector, make an independent copy.
|
|
356
|
+
hit_table = (
|
|
357
|
+
copy.deepcopy(hit_table_layouted)
|
|
358
|
+
if len(out_detectors) > 1
|
|
359
|
+
else hit_table_layouted
|
|
360
|
+
)
|
|
324
361
|
|
|
325
362
|
local_dict = {
|
|
326
363
|
"DETECTOR_OBJECTS": det_objects[out_detector],
|
|
@@ -329,28 +366,10 @@ def build_hit(
|
|
|
329
366
|
}
|
|
330
367
|
# add fields
|
|
331
368
|
for field, info in proc_group.get("operations", {}).items():
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
units = None
|
|
335
|
-
else:
|
|
336
|
-
expression = info["expression"]
|
|
337
|
-
units = info.get("units", None)
|
|
338
|
-
|
|
339
|
-
# evaluate the expression
|
|
340
|
-
col = core.evaluate_output_column(
|
|
341
|
-
hit_table,
|
|
342
|
-
table_name="HITS",
|
|
343
|
-
expression=expression,
|
|
344
|
-
local_dict=local_dict,
|
|
345
|
-
time_dict=time_dict[proc_name],
|
|
346
|
-
name=field,
|
|
369
|
+
_evaluate_operation(
|
|
370
|
+
hit_table, field, info, local_dict, time_dict[proc_name]
|
|
347
371
|
)
|
|
348
372
|
|
|
349
|
-
if units is not None:
|
|
350
|
-
col.attrs["units"] = units
|
|
351
|
-
|
|
352
|
-
core.add_field_with_nesting(hit_table, field, col)
|
|
353
|
-
|
|
354
373
|
# remove unwanted fields
|
|
355
374
|
if "outputs" in proc_group:
|
|
356
375
|
hit_table = core.remove_columns(
|
|
@@ -363,16 +382,12 @@ def build_hit(
|
|
|
363
382
|
# now write
|
|
364
383
|
if files.hit[file_idx] is not None:
|
|
365
384
|
# get modes to write with
|
|
366
|
-
new_hit_file = (file_idx == 0) or (
|
|
367
|
-
files.hit[file_idx] != files.hit[file_idx - 1]
|
|
368
|
-
)
|
|
369
|
-
|
|
370
385
|
wo_mode = utils.get_wo_mode(
|
|
371
386
|
group=group_idx,
|
|
372
387
|
out_det=out_det_idx,
|
|
373
388
|
in_det=in_det_idx,
|
|
374
389
|
chunk=chunk_idx,
|
|
375
|
-
new_hit_file=
|
|
390
|
+
new_hit_file=utils.is_new_hit_file(files, file_idx),
|
|
376
391
|
overwrite=overwrite,
|
|
377
392
|
)
|
|
378
393
|
# write the file
|
|
@@ -390,6 +405,8 @@ def build_hit(
|
|
|
390
405
|
hit_table, output_tables[out_detector]
|
|
391
406
|
)
|
|
392
407
|
|
|
408
|
+
output_tables_names.add(out_detector)
|
|
409
|
+
|
|
393
410
|
# forward some data, if requested
|
|
394
411
|
# possible improvement: iterate over data if it's a lot
|
|
395
412
|
if "forward" in config and files.hit[file_idx] is not None:
|
|
@@ -400,20 +417,50 @@ def build_hit(
|
|
|
400
417
|
|
|
401
418
|
for obj in obj_list:
|
|
402
419
|
try:
|
|
420
|
+
wo_mode = utils.get_wo_mode_forwarded(
|
|
421
|
+
output_tables_names, utils.is_new_hit_file(files, file_idx), overwrite
|
|
422
|
+
)
|
|
403
423
|
lh5.write(
|
|
404
424
|
lh5.read(obj, stp_file),
|
|
405
425
|
obj,
|
|
406
426
|
files.hit[file_idx],
|
|
407
|
-
wo_mode=
|
|
427
|
+
wo_mode=wo_mode,
|
|
408
428
|
)
|
|
429
|
+
output_tables_names.add(obj)
|
|
409
430
|
except LH5EncodeError as e:
|
|
410
431
|
msg = f"cannot forward object {obj} as it has been already processed by reboost"
|
|
411
432
|
raise RuntimeError(msg) from e
|
|
412
433
|
|
|
413
434
|
# return output table or nothing
|
|
414
|
-
log.
|
|
435
|
+
log.info(time_dict)
|
|
415
436
|
|
|
416
437
|
if output_tables == {}:
|
|
417
438
|
output_tables = None
|
|
418
439
|
|
|
419
440
|
return output_tables, time_dict
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def _evaluate_operation(
|
|
444
|
+
hit_table, field: str, info: str | dict, local_dict: dict, time_dict: ProfileDict
|
|
445
|
+
) -> None:
|
|
446
|
+
if isinstance(info, str):
|
|
447
|
+
expression = info
|
|
448
|
+
units = None
|
|
449
|
+
else:
|
|
450
|
+
expression = info["expression"]
|
|
451
|
+
units = info.get("units", None)
|
|
452
|
+
|
|
453
|
+
# evaluate the expression
|
|
454
|
+
col = core.evaluate_output_column(
|
|
455
|
+
hit_table,
|
|
456
|
+
table_name="HITS",
|
|
457
|
+
expression=expression,
|
|
458
|
+
local_dict=local_dict,
|
|
459
|
+
time_dict=time_dict,
|
|
460
|
+
name=field,
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
if units is not None:
|
|
464
|
+
col.attrs["units"] = units
|
|
465
|
+
|
|
466
|
+
core.add_field_with_nesting(hit_table, field, col)
|
reboost/cli.py
CHANGED
reboost/core.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import contextlib
|
|
3
4
|
import logging
|
|
4
5
|
import time
|
|
5
6
|
from typing import Any
|
|
@@ -145,11 +146,14 @@ def evaluate_output_column(
|
|
|
145
146
|
if globals_dict == {}:
|
|
146
147
|
globals_dict = None
|
|
147
148
|
|
|
149
|
+
ctx = contextlib.nullcontext()
|
|
148
150
|
if globals_dict is not None and "pyg4ometry" in globals_dict:
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
res = hit_table.eval(
|
|
151
|
+
ctx = utils.filter_logging(logging.CRITICAL)
|
|
152
|
+
|
|
153
|
+
with ctx:
|
|
154
|
+
res = hit_table.eval(
|
|
155
|
+
func_call, local_dict, modules=globals_dict, library="ak", with_units=True
|
|
156
|
+
)
|
|
153
157
|
|
|
154
158
|
# how long did it take
|
|
155
159
|
if time_dict is not None:
|
|
@@ -227,7 +231,9 @@ def get_global_objects(
|
|
|
227
231
|
return AttrsDict(res)
|
|
228
232
|
|
|
229
233
|
|
|
230
|
-
def get_detector_mapping(
|
|
234
|
+
def get_detector_mapping(
|
|
235
|
+
detector_mapping: dict, global_objects: AttrsDict, args: AttrsDict
|
|
236
|
+
) -> dict:
|
|
231
237
|
"""Get all the detector mapping using :func:`get_one_detector_mapping`.
|
|
232
238
|
|
|
233
239
|
Parameters
|
|
@@ -236,6 +242,8 @@ def get_detector_mapping(detector_mapping: dict, global_objects: AttrsDict) -> d
|
|
|
236
242
|
dictionary of detector mapping
|
|
237
243
|
global_objects
|
|
238
244
|
dictionary of global objects to use in evaluating the mapping.
|
|
245
|
+
args
|
|
246
|
+
any arguments the expression can depend on, is passed as `locals` to `eval()`.
|
|
239
247
|
"""
|
|
240
248
|
return utils.merge_dicts(
|
|
241
249
|
[
|
|
@@ -243,6 +251,7 @@ def get_detector_mapping(detector_mapping: dict, global_objects: AttrsDict) -> d
|
|
|
243
251
|
mapping["output"],
|
|
244
252
|
input_detector_name=mapping.get("input", None),
|
|
245
253
|
objects=global_objects,
|
|
254
|
+
args=args,
|
|
246
255
|
)
|
|
247
256
|
for mapping in detector_mapping
|
|
248
257
|
]
|
|
@@ -253,6 +262,7 @@ def get_one_detector_mapping(
|
|
|
253
262
|
output_detector_expression: str | list,
|
|
254
263
|
objects: AttrsDict | None = None,
|
|
255
264
|
input_detector_name: str | None = None,
|
|
265
|
+
args: AttrsDict | None = None,
|
|
256
266
|
) -> dict:
|
|
257
267
|
"""Extract the output detectors and the list of input to outputs by parsing the expressions.
|
|
258
268
|
|
|
@@ -282,7 +292,8 @@ def get_one_detector_mapping(
|
|
|
282
292
|
dictionary of objects that can be referenced in the expression.
|
|
283
293
|
input_detector_name
|
|
284
294
|
Optional input detector name for all the outputs.
|
|
285
|
-
|
|
295
|
+
args
|
|
296
|
+
any arguments the expression can depend on, is passed as `locals` to `eval()`.
|
|
286
297
|
|
|
287
298
|
Returns
|
|
288
299
|
-------
|
|
@@ -318,7 +329,7 @@ def get_one_detector_mapping(
|
|
|
318
329
|
|
|
319
330
|
# if no package was imported its just a name
|
|
320
331
|
try:
|
|
321
|
-
objs = evaluate_object(expression_tmp, local_dict={"OBJECTS": objects})
|
|
332
|
+
objs = evaluate_object(expression_tmp, local_dict={"ARGS": args, "OBJECTS": objects})
|
|
322
333
|
out_names.extend(objs)
|
|
323
334
|
except Exception:
|
|
324
335
|
out_names.append(expression_tmp)
|