reboost 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
reboost/build_hit.py ADDED
@@ -0,0 +1,466 @@
1
+ """Routines to build the `hit` tier from the `stp` tier.
2
+
3
+ A :func:`build_hit` to parse the following configuration file:
4
+
5
+ .. code-block:: yaml
6
+
7
+ # dictionary of objects useful for later computation. they are constructed with
8
+ # auxiliary data (e.g. metadata). They can be accessed later as OBJECTS (all caps)
9
+ objects:
10
+ lmeta: legendmeta.LegendMetadata(ARGS.legendmetadata)
11
+ geometry: pyg4ometry.load(ARGS.gdml)
12
+ user_pars: dbetto.TextDB(ARGS.par)
13
+ dataprod_pars: dbetto.TextDB(ARGS.dataprod_cycle)
14
+
15
+ _spms: OBJECTS.lmeta.channelmap(on=ARGS.timestamp)
16
+ .group("system").spms
17
+ .map("name")
18
+ spms: "{name: spm.daq.rawid for name, spm in OBJECTS._spms.items()}"
19
+
20
+ # processing chain is defined to act on a group of detectors
21
+ processing_groups:
22
+
23
+ # start with HPGe stuff, give it an optional name
24
+ - name: geds
25
+
26
+ # this is a list of included detectors (part of the processing group)
27
+ detector_mapping:
28
+ - output: OBJECTS.lmeta.channelmap(on=ARGS.timestamp)
29
+ .group('system').geds
30
+ .group('analysis.status').on
31
+ .map('name').keys()
32
+
33
+ # which columns we actually want to see in the output table
34
+ outputs:
35
+ - t0
36
+ - evtid
37
+ - energy
38
+ - r90
39
+ - drift_time
40
+
41
+ # in this section we define objects that will be instantiated at each
42
+ # iteration of the for loop over input tables (i.e. detectors)
43
+ detector_objects:
44
+ # The following assumes that the detector metadata is stored in the GDML file
45
+ pyobj: pygeomhpges.make_hpge(pygeomtools.get_sensvol_metadata(OBJECTS.geometry, DETECTOR))
46
+ phyvol: OBJECTS.geometry.physical_volume_dict[DETECTOR]
47
+ drift_time_map: lgdo.lh5.read(DETECTOR, ARGS.dtmap_file)
48
+
49
+ # finally, the processing chain
50
+ operations:
51
+
52
+ t0: ak.fill_none(ak.firsts(HITS.time, axis=-1), np.nan)
53
+
54
+ evtid: ak.fill_none(ak.firsts(HITS.__evtid, axis=-1), np.nan)
55
+
56
+ # distance to the nplus surface in mm
57
+ distance_to_nplus_surface_mm: reboost.hpge.distance_to_surface(
58
+ HITS.__xloc, HITS.__yloc, HITS.__zloc,
59
+ DETECTOR_OBJECTS.pyobj,
60
+ DETECTOR_OBJECTS.phyvol.position.eval(),
61
+ surface_type='nplus')
62
+
63
+ # activness based on FCCD (no TL)
64
+ activeness: ak.where(
65
+ HITS.distance_to_nplus_surface_mm <
66
+ lmeta.hardware.detectors.germanium.diodes[DETECTOR].characterization.combined_0vbb_fccd_in_mm.value,
67
+ 0,
68
+ 1
69
+ )
70
+
71
+ activeness2: reboost.math.piecewise_linear(
72
+ HITS.distance_to_nplus_surface_mm,
73
+ PARS.tlayer[DETECTOR].start_in_mm,
74
+ PARS.fccd_in_mm,
75
+ )
76
+
77
+ # summed energy of the hit accounting for activeness
78
+ energy_raw: ak.sum(HITS.__edep * HITS.activeness, axis=-1)
79
+
80
+ # energy with smearing
81
+ energy: reboost.math.sample_convolve(
82
+ scipy.stats.norm, # resolution distribution
83
+ loc=HITS.energy_raw, # parameters of the distribution (observable to convolve)
84
+ scale=np.sqrt(PARS.a + PARS.b * HITS.energy_raw) # another parameter
85
+ )
86
+
87
+ # this is going to return "run lengths" (awkward jargon)
88
+ clusters_lengths: reboost.shape.cluster.naive(
89
+ HITS, # can also pass the exact fields (x, y, z)
90
+ size=1,
91
+ units="mm"
92
+ )
93
+
94
+ # example of low level reduction on clusters
95
+ energy_clustered: ak.sum(ak.unflatten(HITS.__edep, HITS.clusters_lengths), axis=-1)
96
+
97
+ # example of using a reboost helper
98
+ steps_clustered: reboost.shape.reduction.energy_weighted_average(HITS, HITS.clusters_lengths)
99
+
100
+ r90: reboost.hpge.psd.r90(HITS.steps_clustered)
101
+
102
+ drift_time: reboost.hpge.psd.drift_time(
103
+ HITS.steps_clustered,
104
+ DETECTOR_OBJECTS.drift_time_map
105
+ )
106
+
107
+ # example basic processing of steps in scintillators
108
+ - name: lar
109
+ detector_mapping:
110
+ - output: scintillators
111
+
112
+ outputs:
113
+ - evtid
114
+ - tot_edep_wlsr
115
+ - num_scint_ph_lar
116
+
117
+ operations:
118
+ tot_edep_wlsr: ak.sum(HITS.edep[np.abs(HITS.zloc) < 3000], axis=-1)
119
+
120
+ - name: spms
121
+
122
+ # by default, reboost looks in the steps input table for a table with the
123
+ # same name as the current detector. This can be overridden for special processors
124
+
125
+ detector_mapping:
126
+ - output: OBJECTS.spms.keys()
127
+ input: lar
128
+
129
+ hit_table_layout: reboost.shape.group_by_time(STEPS, window=10)
130
+
131
+ pre_operations:
132
+ num_scint_ph_lar: reboost.spms.emitted_scintillation_photons(HITS.edep, HITS.particle, "lar")
133
+ # num_scint_ph_pen: ...
134
+
135
+ outputs:
136
+ - t0
137
+ - evtid
138
+ - pe_times
139
+
140
+ detector_objects:
141
+ meta: pygeomtools.get_sensvol_metadata(OBJECTS.geometry, DETECTOR)
142
+ spm_uid: OBJECTS.spms[DETECTOR]
143
+ optmap_lar: reboost.spms.load_optmap(ARGS.optmap_path_pen, DETECTOR_OBJECTS.spm_uid)
144
+ optmap_pen: reboost.spms.load_optmap(ARGS.optmap_path_lar, DETECTOR_OBJECTS.spm_uid)
145
+
146
+ operations:
147
+ pe_times_lar: reboost.spms.detected_photoelectrons(
148
+ HITS.num_scint_ph_lar, HITS.particle, HITS.time, HITS.xloc, HITS.yloc, HITS.zloc,
149
+ DETECTOR_OBJECTS.optmap_lar,
150
+ "lar",
151
+ DETECTOR_OBJECTS.spm_uid
152
+ )
153
+
154
+ pe_times_pen: reboost.spms.detected_photoelectrons(
155
+ HITS.num_scint_ph_pen, HITS.particle, HITS.time, HITS.xloc, HITS.yloc, HITS.zloc,
156
+ DETECTOR_OBJECTS.optmap_pen,
157
+ "pen",
158
+ DETECTOR_OBJECTS.spm_uid
159
+ )
160
+
161
+ pe_times: ak.concatenate([HITS.pe_times_lar, HITS.pe_times_pen], axis=-1)
162
+
163
+ # can list here some lh5 objects that should just be forwarded to the
164
+ # output file, without any processing
165
+ forward:
166
+ - /vtx
167
+ - /some/dataset
168
+ """
169
+
170
+ from __future__ import annotations
171
+
172
+ import copy
173
+ import logging
174
+ import time
175
+ from collections.abc import Mapping
176
+
177
+ import awkward as ak
178
+ import dbetto
179
+ from dbetto import AttrsDict
180
+ from lgdo import lh5
181
+ from lgdo.lh5.exceptions import LH5EncodeError
182
+
183
+ from . import core, utils
184
+ from .iterator import GLMIterator
185
+ from .profile import ProfileDict
186
+
187
+ log = logging.getLogger(__name__)
188
+
189
+
190
+ def build_hit(
191
+ config: Mapping | str,
192
+ args: Mapping | AttrsDict,
193
+ stp_files: str | list[str],
194
+ glm_files: str | list[str] | None,
195
+ hit_files: str | list[str] | None,
196
+ *,
197
+ start_evtid: int = 0,
198
+ n_evtid: int | None = None,
199
+ out_field: str = "hit",
200
+ buffer: int = int(5e6),
201
+ overwrite: bool = False,
202
+ allow_missing_inputs=True,
203
+ ) -> None | ak.Array:
204
+ """Build the hit tier from the remage step files.
205
+
206
+ Parameters
207
+ ----------
208
+ config
209
+ dictionary or path to YAML file containing the processing chain.
210
+ args
211
+ dictionary or :class:`dbetto.AttrsDict` of the global arguments.
212
+ stp_files
213
+ list of strings or string of the stp file path.
214
+ glm_files
215
+ list of strings or string of the glm file path, if `None` will be build in memory.
216
+ hit_files
217
+ list of strings or string of the hit file path. The `hit` file can also be `None` in which
218
+ case the hits are returned as an `ak.Array` in memory.
219
+ start_evtid
220
+ first evtid to read.
221
+ n_evtid
222
+ number of evtid to read, if `None` read all.
223
+ out_field
224
+ name of the output field
225
+ buffer
226
+ buffer size for use in the `LH5Iterator`.
227
+ overwrite
228
+ flag to overwrite the existing output.
229
+ allow_missing_inputs
230
+ Flag to allow an input table to be missing, generally when there were no events.
231
+ """
232
+ # extract the config file
233
+ if isinstance(config, str):
234
+ config = dbetto.utils.load_dict(config)
235
+
236
+ # get the arguments
237
+ if not isinstance(args, AttrsDict):
238
+ args = AttrsDict(args)
239
+
240
+ time_dict = ProfileDict()
241
+
242
+ # get the global objects
243
+ global_objects = core.get_global_objects(
244
+ expressions=config.get("objects", {}), local_dict={"ARGS": args}, time_dict=time_dict
245
+ )
246
+
247
+ # get the input files
248
+ files = utils.get_file_dict(stp_files=stp_files, glm_files=glm_files, hit_files=hit_files)
249
+
250
+ output_tables = {}
251
+ output_tables_names = set()
252
+
253
+ # iterate over files
254
+ for file_idx, (stp_file, glm_file) in enumerate(zip(files.stp, files.glm, strict=True)):
255
+ msg = (
256
+ f"starting processing of {stp_file} to {files.hit[file_idx]} "
257
+ if files.hit[file_idx] is not None
258
+ else f"starting processing of {stp_file}"
259
+ )
260
+ log.info(msg)
261
+
262
+ # loop over processing groups
263
+ for group_idx, proc_group in enumerate(config["processing_groups"]):
264
+ proc_name = proc_group.get("name", "default")
265
+ msg = f"starting group {proc_name}"
266
+ log.debug(msg)
267
+
268
+ if proc_name not in time_dict:
269
+ time_dict[proc_name] = ProfileDict()
270
+
271
+ # extract the output detectors and the mapping to input detectors
272
+ detectors_mapping = core.get_detector_mapping(
273
+ proc_group.get("detector_mapping"), global_objects, args
274
+ )
275
+
276
+ # loop over detectors
277
+ for in_det_idx, (in_detector, out_detectors) in enumerate(detectors_mapping.items()):
278
+ msg = f"processing {in_detector} (to {out_detectors})"
279
+ log.debug(msg)
280
+
281
+ # get detector objects
282
+ det_objects = core.get_detector_objects(
283
+ output_detectors=out_detectors,
284
+ args=args,
285
+ global_objects=global_objects,
286
+ expressions=proc_group.get("detector_objects", {}),
287
+ time_dict=time_dict[proc_name],
288
+ )
289
+
290
+ lh5_group = proc_group.get("lh5_group", "stp")
291
+ if lh5_group is None:
292
+ lh5_group = ""
293
+ table = in_detector
294
+ else:
295
+ table = f"{lh5_group}/{in_detector}"
296
+
297
+ # begin iterating over the glm
298
+ # check if the in_detector is in the file
299
+ if table in lh5.ls(stp_file, lh5_group + "/"):
300
+ iterator = GLMIterator(
301
+ glm_file,
302
+ stp_file,
303
+ lh5_group=in_detector,
304
+ start_row=start_evtid,
305
+ stp_field=lh5_group,
306
+ n_rows=n_evtid,
307
+ buffer=buffer,
308
+ time_dict=time_dict[proc_name],
309
+ reshaped_files="hit_table_layout" not in proc_group,
310
+ )
311
+ elif allow_missing_inputs:
312
+ continue
313
+ else:
314
+ msg = f"Requested input detector {in_detector} is not present in the group {lh5_group} and missing inputs were not allowed"
315
+ raise ValueError(msg)
316
+
317
+ for stps, chunk_idx, _ in iterator:
318
+ # converting to awkward
319
+ if stps is None:
320
+ continue
321
+
322
+ # convert to awkward
323
+ if time_dict is not None:
324
+ start_time = time.time()
325
+
326
+ ak_obj = stps.view_as("ak")
327
+
328
+ if time_dict is not None:
329
+ time_dict[proc_name].update_field("conv", start_time)
330
+
331
+ if "hit_table_layout" in proc_group:
332
+ hit_table_layouted = core.evaluate_hit_table_layout(
333
+ copy.deepcopy(ak_obj),
334
+ expression=proc_group["hit_table_layout"],
335
+ time_dict=time_dict[proc_name],
336
+ )
337
+ else:
338
+ hit_table_layouted = copy.deepcopy(stps)
339
+
340
+ local_dict = {"OBJECTS": global_objects}
341
+ for field, info in proc_group.get("pre_operations", {}).items():
342
+ _evaluate_operation(
343
+ hit_table_layouted, field, info, local_dict, time_dict[proc_name]
344
+ )
345
+
346
+ # produce the hit table
347
+ for out_det_idx, out_detector in enumerate(out_detectors):
348
+ # loop over the rows
349
+ if out_detector not in output_tables and files.hit[file_idx] is None:
350
+ output_tables[out_detector] = None
351
+
352
+ # get the attributes
353
+ attrs = utils.copy_units(stps)
354
+
355
+ # if we have more than one output detector, make an independent copy.
356
+ hit_table = (
357
+ copy.deepcopy(hit_table_layouted)
358
+ if len(out_detectors) > 1
359
+ else hit_table_layouted
360
+ )
361
+
362
+ local_dict = {
363
+ "DETECTOR_OBJECTS": det_objects[out_detector],
364
+ "OBJECTS": global_objects,
365
+ "DETECTOR": out_detector,
366
+ }
367
+ # add fields
368
+ for field, info in proc_group.get("operations", {}).items():
369
+ _evaluate_operation(
370
+ hit_table, field, info, local_dict, time_dict[proc_name]
371
+ )
372
+
373
+ # remove unwanted fields
374
+ if "outputs" in proc_group:
375
+ hit_table = core.remove_columns(
376
+ hit_table, outputs=proc_group["outputs"]
377
+ )
378
+
379
+ # assign units in the output table
380
+ hit_table = utils.assign_units(hit_table, attrs)
381
+
382
+ # now write
383
+ if files.hit[file_idx] is not None:
384
+ # get modes to write with
385
+ wo_mode = utils.get_wo_mode(
386
+ group=group_idx,
387
+ out_det=out_det_idx,
388
+ in_det=in_det_idx,
389
+ chunk=chunk_idx,
390
+ new_hit_file=utils.is_new_hit_file(files, file_idx),
391
+ overwrite=overwrite,
392
+ )
393
+ # write the file
394
+ utils.write_lh5(
395
+ hit_table,
396
+ files.hit[file_idx],
397
+ time_dict[proc_name],
398
+ out_field=out_field,
399
+ out_detector=out_detector,
400
+ wo_mode=wo_mode,
401
+ )
402
+
403
+ else:
404
+ output_tables[out_detector] = core.merge(
405
+ hit_table, output_tables[out_detector]
406
+ )
407
+
408
+ output_tables_names.add(out_detector)
409
+
410
+ # forward some data, if requested
411
+ # possible improvement: iterate over data if it's a lot
412
+ if "forward" in config and files.hit[file_idx] is not None:
413
+ obj_list = config["forward"]
414
+
415
+ if not isinstance(obj_list, list):
416
+ obj_list = [obj_list]
417
+
418
+ for obj in obj_list:
419
+ try:
420
+ wo_mode = utils.get_wo_mode_forwarded(
421
+ output_tables_names, utils.is_new_hit_file(files, file_idx), overwrite
422
+ )
423
+ lh5.write(
424
+ lh5.read(obj, stp_file),
425
+ obj,
426
+ files.hit[file_idx],
427
+ wo_mode=wo_mode,
428
+ )
429
+ output_tables_names.add(obj)
430
+ except LH5EncodeError as e:
431
+ msg = f"cannot forward object {obj} as it has been already processed by reboost"
432
+ raise RuntimeError(msg) from e
433
+
434
+ # return output table or nothing
435
+ log.info(time_dict)
436
+
437
+ if output_tables == {}:
438
+ output_tables = None
439
+
440
+ return output_tables, time_dict
441
+
442
+
443
+ def _evaluate_operation(
444
+ hit_table, field: str, info: str | dict, local_dict: dict, time_dict: ProfileDict
445
+ ) -> None:
446
+ if isinstance(info, str):
447
+ expression = info
448
+ units = None
449
+ else:
450
+ expression = info["expression"]
451
+ units = info.get("units", None)
452
+
453
+ # evaluate the expression
454
+ col = core.evaluate_output_column(
455
+ hit_table,
456
+ table_name="HITS",
457
+ expression=expression,
458
+ local_dict=local_dict,
459
+ time_dict=time_dict,
460
+ name=field,
461
+ )
462
+
463
+ if units is not None:
464
+ col.attrs["units"] = units
465
+
466
+ core.add_field_with_nesting(hit_table, field, col)
reboost/cli.py ADDED
@@ -0,0 +1,194 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import logging
5
+
6
+ import dbetto
7
+
8
+ from .build_glm import build_glm
9
+ from .build_hit import build_hit
10
+ from .log_utils import setup_log
11
+ from .utils import _check_input_file, _check_output_file, get_file_list
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ def cli(args=None) -> None:
17
+ parser = argparse.ArgumentParser(
18
+ prog="reboost",
19
+ description="%(prog)s command line interface",
20
+ formatter_class=argparse.RawTextHelpFormatter,
21
+ )
22
+
23
+ parser.add_argument(
24
+ "--verbose",
25
+ "-v",
26
+ action="count",
27
+ default=1,
28
+ help="""Increase the program verbosity""",
29
+ )
30
+
31
+ subparsers = parser.add_subparsers(dest="command", required=True)
32
+
33
+ # glm parser
34
+ glm_parser = subparsers.add_parser("build-glm", help="build glm file from remage stp file")
35
+
36
+ glm_parser.add_argument(
37
+ "--stp-file",
38
+ "-s",
39
+ required=True,
40
+ type=str,
41
+ help="Path to the stp file, if multithreaded this will be appended with _t$idx.",
42
+ )
43
+ glm_parser.add_argument(
44
+ "--glm-file",
45
+ "-g",
46
+ required=True,
47
+ type=str,
48
+ help="Path to the glm file, if multithreaded this will be appended with _t$idx. ",
49
+ )
50
+
51
+ # optional args
52
+ glm_parser.add_argument(
53
+ "--out-table-name", "-n", type=str, default="glm", help="Output table name."
54
+ )
55
+ glm_parser.add_argument("--id-name", "-i", type=str, default="g4_evtid", help="ID column name.")
56
+ glm_parser.add_argument(
57
+ "--evtid-buffer", "-e", type=int, default=int(1e7), help="event id buffer size."
58
+ )
59
+ glm_parser.add_argument(
60
+ "--stp-buffer", "-b", type=int, default=int(1e7), help="stp buffer size."
61
+ )
62
+ glm_parser.add_argument(
63
+ "--overwrite", "-w", action="store_true", help="Overwrite the input file if it exists."
64
+ )
65
+ glm_parser.add_argument(
66
+ "--threads",
67
+ "-t",
68
+ required=False,
69
+ default=None,
70
+ type=int,
71
+ help="Number of threads used for remage",
72
+ )
73
+
74
+ # hit parser
75
+ hit_parser = subparsers.add_parser("build-hit", help="build hit file from remage stp file")
76
+
77
+ hit_parser.add_argument(
78
+ "--config", type=str, required=True, help="Path to the configuration file ."
79
+ )
80
+ hit_parser.add_argument("--args", type=str, required=True, help="Path to args file.")
81
+ hit_parser.add_argument(
82
+ "--stp-file",
83
+ type=str,
84
+ required=True,
85
+ help="stp file to process, if multithreaded this will be appended with _t$idx",
86
+ )
87
+ hit_parser.add_argument(
88
+ "--glm-file",
89
+ type=str,
90
+ required=False,
91
+ default=None,
92
+ help="glm file to process, if multithreaded this will be appended with _t$idx",
93
+ )
94
+ hit_parser.add_argument(
95
+ "--hit-file",
96
+ type=str,
97
+ required=True,
98
+ help="hit file to produce, if multithreaded this will be appended with _t$idx",
99
+ )
100
+
101
+ # optional args
102
+ hit_parser.add_argument("--start-evtid", type=int, default=0, help="Start event id.")
103
+ hit_parser.add_argument(
104
+ "--n-evtid", type=int, default=None, help="Number of event id to process."
105
+ )
106
+ hit_parser.add_argument("--out-field", type=str, default="hit", help="Output field name.")
107
+ hit_parser.add_argument("--buffer", type=int, default=int(5e6), help="Buffer size.")
108
+
109
+ hit_parser.add_argument(
110
+ "--overwrite", "-w", action="store_true", help="Overwrite the input file if it exists."
111
+ )
112
+ hit_parser.add_argument(
113
+ "--threads",
114
+ "-t",
115
+ required=False,
116
+ default=None,
117
+ type=int,
118
+ help="Number of threads used for remage",
119
+ )
120
+
121
+ args = parser.parse_args(args)
122
+
123
+ log_level = (None, logging.INFO, logging.DEBUG)[min(args.verbose, 2)]
124
+ setup_log(log_level)
125
+
126
+ if args.command == "build-glm":
127
+ # catch some cases
128
+ glm_files = get_file_list(args.glm_file, threads=args.threads)
129
+ stp_files = get_file_list(args.stp_file, threads=args.threads)
130
+
131
+ _check_input_file(parser, stp_files)
132
+
133
+ if args.overwrite is False:
134
+ _check_output_file(parser, glm_files)
135
+
136
+ msg = "Running build_glm with arguments: \n"
137
+ msg += f" glm file: {glm_files}\n"
138
+ msg += f" stp file: {stp_files}\n"
139
+ msg += f" out_table_name: {args.out_table_name}\n"
140
+ msg += f" evtid_name: {args.id_name}\n"
141
+ msg += f" evtid_buffer: {args.evtid_buffer}\n"
142
+ msg += f" stp_buffer: {args.stp_buffer}"
143
+
144
+ log.info(msg)
145
+
146
+ build_glm(
147
+ stp_files,
148
+ glm_files,
149
+ out_table_name=args.out_table_name,
150
+ id_name=args.id_name,
151
+ evtid_buffer=args.evtid_buffer,
152
+ stp_buffer=args.stp_buffer,
153
+ )
154
+
155
+ elif args.command == "build-hit":
156
+ glm_files = get_file_list(args.glm_file, threads=args.threads)
157
+ stp_files = get_file_list(args.stp_file, threads=args.threads)
158
+ hit_files = get_file_list(args.hit_file, threads=args.threads)
159
+
160
+ _check_input_file(parser, stp_files)
161
+
162
+ if args.glm_file is not None:
163
+ _check_input_file(parser, glm_files)
164
+
165
+ if args.overwrite is False:
166
+ _check_output_file(parser, hit_files)
167
+
168
+ msg = "Running build_hit with arguments: \n"
169
+ msg += f" config: {args.config}\n"
170
+ msg += f" args: {args.args}\n"
171
+ msg += f" glm files: {glm_files}\n"
172
+ msg += f" stp files: {stp_files}\n"
173
+ msg += f" hit files: {hit_files}\n"
174
+ msg += f" start_evtid: {args.start_evtid}\n"
175
+ msg += f" n_evtid: {args.n_evtid}\n"
176
+ msg += f" out_field: {args.out_field}\n"
177
+ msg += f" buffer: {args.buffer} \n"
178
+ msg += f" overwrite: {args.overwrite} \n"
179
+
180
+ log.info(msg)
181
+
182
+ build_hit(
183
+ config=args.config,
184
+ args=dbetto.AttrsDict(dbetto.utils.load_dict(args.args)),
185
+ stp_files=stp_files,
186
+ glm_files=glm_files,
187
+ hit_files=hit_files,
188
+ start_evtid=args.start_evtid,
189
+ n_evtid=args.n_evtid,
190
+ out_field=args.out_field,
191
+ buffer=args.buffer,
192
+ overwrite=args.overwrite,
193
+ )
194
+ log.info("successfully finished post-processing")