reboost 0.5.0__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
reboost/__init__.py CHANGED
@@ -2,20 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  from lgdo import lh5
4
4
 
5
- from . import build_hit, core, iterator, math, shape
6
5
  from ._version import version as __version__
6
+ from .build_hit import build_hit
7
7
 
8
8
  __all__ = [
9
9
  "__version__",
10
- "build_glm",
11
10
  "build_hit",
12
- "build_hit",
13
- "build_tcm",
14
- "core",
15
- "iterator",
16
- "math",
17
- "optmap",
18
- "shape",
19
11
  ]
20
12
 
21
13
  lh5.settings.DEFAULT_HDF5_SETTINGS = {"shuffle": True, "compression": "lzf"}
reboost/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.5.0'
21
- __version_tuple__ = version_tuple = (0, 5, 0)
20
+ __version__ = version = '0.5.2'
21
+ __version_tuple__ = version_tuple = (0, 5, 2)
reboost/build_evt.py CHANGED
@@ -78,9 +78,6 @@ def build_evt(
78
78
  -------
79
79
  ak.Array of the evt tier data (if the data is not saved to disk)
80
80
  """
81
- msg = "... beginning the evt tier processing"
82
- log.info(msg)
83
-
84
81
  # create the objects needed for evaluate expression
85
82
 
86
83
  file_info = {
reboost/build_glm.py CHANGED
@@ -224,8 +224,8 @@ def build_glm(
224
224
  glm_sum = {}
225
225
 
226
226
  for file_idx, stp_file in enumerate(files.stp):
227
- msg = f"Start generating glm for {stp_file} "
228
- log.info(msg)
227
+ msg = f"start generating glm for {stp_file} "
228
+ log.debug(msg)
229
229
 
230
230
  # loop over the lh5_tables
231
231
  lh5_table_list = [
@@ -298,8 +298,6 @@ def build_glm(
298
298
  if glm_sum[lh5_subgroup] is None
299
299
  else ak.concatenate((glm_sum[lh5_subgroup], glm))
300
300
  )
301
- msg = f"Finished generating glm for {stp_file} "
302
- log.info(msg)
303
301
 
304
302
  # return if it was requested to keep glm in memory
305
303
  if glm_sum is not None:
reboost/build_hit.py CHANGED
@@ -239,17 +239,17 @@ def build_hit(
239
239
  # iterate over files
240
240
  for file_idx, (stp_file, glm_file) in enumerate(zip(files.stp, files.glm)):
241
241
  msg = (
242
- f"... starting post processing of {stp_file} to {files.hit[file_idx]} "
242
+ f"starting processing of {stp_file} to {files.hit[file_idx]} "
243
243
  if files.hit[file_idx] is not None
244
- else f"... starting post processing of {stp_file}"
244
+ else f"starting processing of {stp_file}"
245
245
  )
246
246
  log.info(msg)
247
247
 
248
248
  # loop over processing groups
249
249
  for group_idx, proc_group in enumerate(config["processing_groups"]):
250
250
  proc_name = proc_group.get("name", "default")
251
- msg = f"... starting group {proc_name}"
252
- log.info(msg)
251
+ msg = f"starting group {proc_name}"
252
+ log.debug(msg)
253
253
 
254
254
  if proc_name not in time_dict:
255
255
  time_dict[proc_name] = ProfileDict()
@@ -261,7 +261,7 @@ def build_hit(
261
261
 
262
262
  # loop over detectors
263
263
  for in_det_idx, (in_detector, out_detectors) in enumerate(detectors_mapping.items()):
264
- msg = f"... processing {in_detector} (to {out_detectors})"
264
+ msg = f"processing {in_detector} (to {out_detectors})"
265
265
  log.debug(msg)
266
266
 
267
267
  # get detector objects
@@ -328,7 +328,14 @@ def build_hit(
328
328
  "DETECTOR": out_detector,
329
329
  }
330
330
  # add fields
331
- for field, expression in proc_group.get("operations", {}).items():
331
+ for field, info in proc_group.get("operations", {}).items():
332
+ if isinstance(info, str):
333
+ expression = info
334
+ units = None
335
+ else:
336
+ expression = info["expression"]
337
+ units = info.get("units", None)
338
+
332
339
  # evaluate the expression
333
340
  col = core.evaluate_output_column(
334
341
  hit_table,
@@ -338,6 +345,10 @@ def build_hit(
338
345
  time_dict=time_dict[proc_name],
339
346
  name=field,
340
347
  )
348
+
349
+ if units is not None:
350
+ col.attrs["units"] = units
351
+
341
352
  core.add_field_with_nesting(hit_table, field, col)
342
353
 
343
354
  # remove unwanted fields
@@ -393,14 +404,14 @@ def build_hit(
393
404
  lh5.read(obj, stp_file),
394
405
  obj,
395
406
  files.hit[file_idx],
396
- wo_mode="write_safe",
407
+ wo_mode="write_safe" if file_idx == 0 else "append",
397
408
  )
398
409
  except LH5EncodeError as e:
399
410
  msg = f"cannot forward object {obj} as it has been already processed by reboost"
400
411
  raise RuntimeError(msg) from e
401
412
 
402
413
  # return output table or nothing
403
- log.info(time_dict)
414
+ log.debug(time_dict)
404
415
 
405
416
  if output_tables == {}:
406
417
  output_tables = None
reboost/core.py CHANGED
@@ -139,7 +139,7 @@ def get_global_objects(
139
139
  time_start = time.time()
140
140
 
141
141
  msg = f"Getting global objects with {expressions.keys()} and {local_dict}"
142
- log.info(msg)
142
+ log.debug(msg)
143
143
  res = {}
144
144
 
145
145
  for obj_name, expression in expressions.items():
reboost/iterator.py CHANGED
@@ -113,7 +113,7 @@ class GLMIterator:
113
113
  if self.use_glm:
114
114
  self.buffer = int(buffer * glm_n_rows / stp_n_rows)
115
115
  msg = f"Number of stp rows {stp_n_rows}, number of glm rows {glm_n_rows} changing buffer from {buffer} to {self.buffer}"
116
- log.info(msg)
116
+ log.debug(msg)
117
117
 
118
118
  def __iter__(self) -> typing.Iterator:
119
119
  self.current_i_entry = 0
@@ -75,8 +75,14 @@ def iterate_stepwise_depositions(
75
75
  mode: str = "no-fano",
76
76
  ):
77
77
  # those np functions are not supported by numba, but needed for efficient array access below.
78
- x0 = structured_to_unstructured(edep_df[["xloc_pre", "yloc_pre", "zloc_pre"]], np.float64)
79
- x1 = structured_to_unstructured(edep_df[["xloc_post", "yloc_post", "zloc_post"]], np.float64)
78
+ if "xloc_pre" in edep_df.dtype.names:
79
+ x0 = structured_to_unstructured(edep_df[["xloc_pre", "yloc_pre", "zloc_pre"]], np.float64)
80
+ x1 = structured_to_unstructured(
81
+ edep_df[["xloc_post", "yloc_post", "zloc_post"]], np.float64
82
+ )
83
+ else:
84
+ x0 = structured_to_unstructured(edep_df[["xloc", "yloc", "zloc"]], np.float64)
85
+ x1 = None
80
86
 
81
87
  rng = np.random.default_rng() if rng is None else rng
82
88
  output_map, res = _iterate_stepwise_depositions(
@@ -157,12 +163,16 @@ def _iterate_stepwise_depositions(
157
163
 
158
164
  # do the scintillation.
159
165
  part, charge = pdgid_map[t.particle]
166
+
167
+ # if we have both pre and post step points use them
168
+ # else pass as None
169
+
160
170
  scint_times = sc.scintillate(
161
171
  scint_mat_params,
162
172
  x0[rowid],
163
- x1[rowid],
164
- t.v_pre,
165
- t.v_post,
173
+ x1[rowid] if x1 is not None else None,
174
+ t.v_pre if x1 is not None else None,
175
+ t.v_post if x1 is not None else None,
166
176
  t.time,
167
177
  part,
168
178
  charge,
@@ -358,7 +368,7 @@ def convolve(
358
368
  it = LH5Iterator(edep_file, edep_path, buffer_len=buffer_len)
359
369
 
360
370
  for it_count, edep_lgdo in enumerate(it):
361
- edep_df = edep_lgdo.view_as("pd").iloc.to_records()
371
+ edep_df = edep_lgdo.view_as("pd").to_records()
362
372
 
363
373
  log.info("start event processing (%d)", it_count)
364
374
  output_map = iterate_stepwise_depositions(
reboost/optmap/create.py CHANGED
@@ -131,7 +131,7 @@ def _create_optical_maps_chunk(
131
131
  hits_per_primary = np.zeros(10, dtype=np.int64)
132
132
  hits_per_primary_len = 0
133
133
  for it_count, events_lgdo in enumerate(optmap_events_it):
134
- optmap_events = events_lgdo.view_as("pd").iloc
134
+ optmap_events = events_lgdo.view_as("pd")
135
135
  hitcounts = optmap_events[all_det_ids].to_numpy()
136
136
  loc = optmap_events[["xloc", "yloc", "zloc"]].to_numpy()
137
137
 
reboost/optmap/evt.py CHANGED
@@ -25,7 +25,6 @@ def build_optmap_evt(
25
25
  if lh5_out_file_tmp.exists():
26
26
  msg = f"temporary output file {lh5_out_file_tmp} already exists"
27
27
  raise RuntimeError(msg)
28
-
29
28
  vert_it = LH5Iterator(lh5_in_file, "vtx", buffer_len=buffer_len)
30
29
  opti_it = LH5Iterator(lh5_in_file, "stp/optical", buffer_len=buffer_len)
31
30
 
@@ -80,8 +79,7 @@ def build_optmap_evt(
80
79
  _store_vert_df()
81
80
 
82
81
  # read the next vertex chunk into memory.
83
- (vert_lgdo, vert_entry) = next(vert_it)
84
- vert_df = vert_lgdo.view_as("pd").iloc
82
+ vert_df = next(vert_it).view_as("pd")
85
83
 
86
84
  # prepare vertex coordinates.
87
85
  vert_df = vert_df.set_index("evtid", drop=True).drop(["n_part", "time"], axis=1)
@@ -95,9 +93,10 @@ def build_optmap_evt(
95
93
  # use smaller integer type uint8 to spare RAM when storing types.
96
94
  hit_count_type = np.uint8
97
95
  for opti_it_count, opti_lgdo in enumerate(opti_it):
98
- opti_df = opti_lgdo.view_as("pd").iloc
96
+ opti_df = opti_lgdo.view_as("pd")
99
97
 
100
98
  log.info("build evt table (%d)", opti_it_count)
99
+
101
100
  for t in opti_df[["evtid", "det_uid"]].itertuples(name=None, index=False):
102
101
  _ensure_vert_df(vert_it, t[0])
103
102
  vert_df.loc[t[0], str(t[1])] += 1
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.5.0
3
+ Version: 0.5.2
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -701,7 +701,7 @@ Requires-Dist: numpy
701
701
  Requires-Dist: scipy
702
702
  Requires-Dist: numba
703
703
  Requires-Dist: legend-pydataobj>=1.12.0
704
- Requires-Dist: legend-pygeom-optics>=0.6.5
704
+ Requires-Dist: legend-pygeom-optics>=0.9.2
705
705
  Requires-Dist: hist
706
706
  Requires-Dist: dbetto
707
707
  Requires-Dist: particle
@@ -1,11 +1,11 @@
1
- reboost/__init__.py,sha256=0xtTxM_T8Sp6U3-EJRgFZ6WuqPk31QGQms7jhwouhvs,403
2
- reboost/_version.py,sha256=N2U3TRgLfYxjXfoF4Dy9PxAbZq24zjCym3P3cwuxKP8,511
3
- reboost/build_evt.py,sha256=TnJRiZ2O6eChorZeh-_SGgWa9jZQYAl_SsmJsM-wYrY,4779
4
- reboost/build_glm.py,sha256=uYoKl8U5gHy4oW1Neik5F_1jy5yhnFKS1ezVBA-vQaQ,9550
5
- reboost/build_hit.py,sha256=riij8oDF3Ajqfmxs52doA92LgdykF773hqDzsHe0eWw,15089
1
+ reboost/__init__.py,sha256=3cYLf7XEyFRX5GK8f50gY4ecGR5O5HORITpDthOFpOg,265
2
+ reboost/_version.py,sha256=066-gEgl89JZttvQIeF8xCmsrdAhgid_eoH5BBz7G58,511
3
+ reboost/build_evt.py,sha256=yH0bf4bwbp4feWV3JgvSAD5RcvhOX6c9PhH8FAe3Xv4,4710
4
+ reboost/build_glm.py,sha256=IerSLQfe51ZO7CQP2kmfPnOIVaDtcfw3byOM02Vaz6o,9472
5
+ reboost/build_hit.py,sha256=23JL5B7qThdHZqAK_HWoytqcEOWDhGsk4n5UMtojJ1c,15513
6
6
  reboost/cli.py,sha256=HZgqUZK0tSmnlGqoXjrbmLitW_i001TzibxvDrRxLLg,6324
7
- reboost/core.py,sha256=B577b5KzAYpGI7c4eCxIRKgt9tq6hwVr2-DtYTml1e8,12826
8
- reboost/iterator.py,sha256=J--4f7ANW15Nb4kL_OQ0kbIw7U0ks73asefLd-uGoV4,6898
7
+ reboost/core.py,sha256=pUco_IaTKf50PTVrtyFwoYveJVS58mqs9P3TUrtEyjs,12827
8
+ reboost/iterator.py,sha256=o4xAkJmQY5Cn_2pJPY6HbShVWV02loWY9ZlCxFePMdg,6899
9
9
  reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
10
10
  reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
11
11
  reboost/units.py,sha256=3EH8XlpbsObdu5vLgxhm1600L6UNYD5jng4SjJT_1QE,2202
@@ -19,9 +19,9 @@ reboost/math/functions.py,sha256=OymiYTcA0NXxxm-MBDw5kqyNwHoLCmuv4J48AwnSrbU,563
19
19
  reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
20
20
  reboost/optmap/__init__.py,sha256=imvuyld-GLw8qdwqW-lXCg2feptcTyQo3wIzPvDHwmY,93
21
21
  reboost/optmap/cli.py,sha256=SzbPRgsbR5Llm3aSJubH02Ym8FQyTH7kvuLjK7faLiY,9572
22
- reboost/optmap/convolve.py,sha256=vj5Wl2M5BeyvLbcnLo8xzaOAIKXUHUu023sXIP7xTCI,14034
23
- reboost/optmap/create.py,sha256=gd9VWceU0AOX_ABuLvmnAi2GkUdiugNF6CMUT2CbDbE,17053
24
- reboost/optmap/evt.py,sha256=CRhUXpgnV3nTA6gY-e6V2W2C4K6nJ8AEoW2ffa2ww9s,4571
22
+ reboost/optmap/convolve.py,sha256=_volpLmhW5mOPA0KkzXRyHyqkj4_zDSnvfHv1Dtuxm8,14390
23
+ reboost/optmap/create.py,sha256=B-MWurmnzl4Y62N2Pj7IwM1IaEEt2fydpZa_t0gmsxo,17048
24
+ reboost/optmap/evt.py,sha256=UYESkMAwDbE_ap4Jb-a2n0uWxHRnYmHzQiXh0vexaPQ,4513
25
25
  reboost/optmap/mapview.py,sha256=73kpe0_SKDj9bIhEx1ybX1sBP8TyvufiLfps84A_ijA,6798
26
26
  reboost/optmap/numba_pdg.py,sha256=y8cXR5PWE2Liprp4ou7vl9do76dl84vXU52ZJD9_I7A,731
27
27
  reboost/optmap/optmap.py,sha256=j4rfbQ84PYSpE-BvP4Rdt96ZjPdwy8P4e4eZz1mATys,12817
@@ -29,9 +29,9 @@ reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  reboost/shape/cluster.py,sha256=RIvBlhHzp88aaUZGofp5SD9bimnoiqIOddhQ84jiwoM,8135
30
30
  reboost/shape/group.py,sha256=_z2qCOret3E-kj-nrp1-J5j2lEwQpgfYdQp2pgpDHR8,4449
31
31
  reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- reboost-0.5.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
33
- reboost-0.5.0.dist-info/METADATA,sha256=CpCyIjvfYUoGAC2Sh5b4JbtLLjx2QT281m8TKNmb0rs,44250
34
- reboost-0.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
- reboost-0.5.0.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
36
- reboost-0.5.0.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
37
- reboost-0.5.0.dist-info/RECORD,,
32
+ reboost-0.5.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
33
+ reboost-0.5.2.dist-info/METADATA,sha256=8RgyM0cqo8X_SpUghqaN1hd_SudMixVUIVxQPTVDJMk,44250
34
+ reboost-0.5.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
+ reboost-0.5.2.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
36
+ reboost-0.5.2.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
37
+ reboost-0.5.2.dist-info/RECORD,,