reboost 0.10.0a0__tar.gz → 0.10.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {reboost-0.10.0a0/src/reboost.egg-info → reboost-0.10.0a2}/PKG-INFO +1 -1
  2. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/_version.py +3 -3
  3. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/build_hit.py +13 -6
  4. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/core.py +26 -10
  5. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/evt.py +0 -23
  6. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/units.py +18 -1
  7. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/utils.py +1 -47
  8. {reboost-0.10.0a0 → reboost-0.10.0a2/src/reboost.egg-info}/PKG-INFO +1 -1
  9. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/basic.yaml +5 -0
  10. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/test_build_hit.py +24 -2
  11. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_core.py +17 -4
  12. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_optmap.py +0 -11
  13. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_units.py +9 -1
  14. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_utils.py +1 -26
  15. {reboost-0.10.0a0 → reboost-0.10.0a2}/LICENSE +0 -0
  16. {reboost-0.10.0a0 → reboost-0.10.0a2}/README.md +0 -0
  17. {reboost-0.10.0a0 → reboost-0.10.0a2}/pyproject.toml +0 -0
  18. {reboost-0.10.0a0 → reboost-0.10.0a2}/setup.cfg +0 -0
  19. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/__init__.py +0 -0
  20. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/__main__.py +0 -0
  21. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/build_evt.py +0 -0
  22. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/build_glm.py +0 -0
  23. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/cli.py +0 -0
  24. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/daq/__init__.py +0 -0
  25. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/daq/core.py +0 -0
  26. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/daq/utils.py +0 -0
  27. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/hpge/__init__.py +0 -0
  28. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/hpge/psd.py +0 -0
  29. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/hpge/surface.py +0 -0
  30. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/hpge/utils.py +0 -0
  31. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/iterator.py +0 -0
  32. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/log_utils.py +0 -0
  33. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/math/__init__.py +0 -0
  34. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/math/functions.py +0 -0
  35. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/math/stats.py +0 -0
  36. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/__init__.py +0 -0
  37. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/__main__.py +0 -0
  38. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/cli.py +0 -0
  39. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/convolve.py +0 -0
  40. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/create.py +0 -0
  41. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/mapview.py +0 -0
  42. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/numba_pdg.py +0 -0
  43. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/optmap/optmap.py +0 -0
  44. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/profile.py +0 -0
  45. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/shape/__init__.py +0 -0
  46. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/shape/cluster.py +0 -0
  47. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/shape/group.py +0 -0
  48. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/shape/reduction.py +0 -0
  49. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/spms/__init__.py +0 -0
  50. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost/spms/pe.py +0 -0
  51. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/SOURCES.txt +0 -0
  52. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/dependency_links.txt +0 -0
  53. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/entry_points.txt +0 -0
  54. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/not-zip-safe +0 -0
  55. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/requires.txt +0 -0
  56. {reboost-0.10.0a0 → reboost-0.10.0a2}/src/reboost.egg-info/top_level.txt +0 -0
  57. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/conftest.py +0 -0
  58. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/evt/test_evt.py +0 -0
  59. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/glm/test_build_glm.py +0 -0
  60. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/args.yaml +0 -0
  61. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/foward_only.yaml +0 -0
  62. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/geom.gdml +0 -0
  63. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/hit_config.yaml +0 -0
  64. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/pars.yaml +0 -0
  65. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/reshape.yaml +0 -0
  66. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hit/configs/spms.yaml +0 -0
  67. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/simulation/gammas.mac +0 -0
  68. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/simulation/geometry.gdml +0 -0
  69. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/simulation/make_dt_map.jl +0 -0
  70. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/simulation/make_geom.py +0 -0
  71. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_current.py +0 -0
  72. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_dt_heuristic.py +0 -0
  73. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_files/drift_time_maps.lh5 +0 -0
  74. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_files/internal_electron.lh5 +0 -0
  75. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_hpge_map.py +0 -0
  76. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_r90.py +0 -0
  77. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/hpge/test_surface.py +0 -0
  78. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/spms/test_pe.py +0 -0
  79. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_cli.py +0 -0
  80. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_math.py +0 -0
  81. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_optmap_dets.gdml +0 -0
  82. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_profile.py +0 -0
  83. {reboost-0.10.0a0 → reboost-0.10.0a2}/tests/test_shape.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.10.0a0
3
+ Version: 0.10.0a2
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.10.0a0'
32
- __version_tuple__ = version_tuple = (0, 10, 0, 'a0')
31
+ __version__ = version = '0.10.0a2'
32
+ __version_tuple__ = version_tuple = (0, 10, 0, 'a2')
33
33
 
34
- __commit_id__ = commit_id = 'gd31e91903'
34
+ __commit_id__ = commit_id = 'g8f2cabc6a'
@@ -51,14 +51,15 @@ A :func:`build_hit` to parse the following configuration file:
51
51
 
52
52
  t0: ak.fill_none(ak.firsts(HITS.time, axis=-1), np.nan)
53
53
 
54
- evtid: ak.fill_none(ak.firsts(HITS.__evtid, axis=-1), np.nan)
54
+ evtid: ak.fill_none(ak.firsts(HITS.evtid, axis=-1), np.nan)
55
55
 
56
56
  # distance to the nplus surface in mm
57
57
  distance_to_nplus_surface_mm: reboost.hpge.distance_to_surface(
58
- HITS.__xloc, HITS.__yloc, HITS.__zloc,
58
+ HITS.xloc, HITS.yloc, HITS.zloc,
59
59
  DETECTOR_OBJECTS.pyobj,
60
60
  DETECTOR_OBJECTS.phyvol.position.eval(),
61
- surface_type='nplus')
61
+ surface_type='nplus',
62
+ unit='m')
62
63
 
63
64
  # activness based on FCCD (no TL)
64
65
  activeness: ak.where(
@@ -75,7 +76,7 @@ A :func:`build_hit` to parse the following configuration file:
75
76
  )
76
77
 
77
78
  # summed energy of the hit accounting for activeness
78
- energy_raw: ak.sum(HITS.__edep * HITS.activeness, axis=-1)
79
+ energy_raw: ak.sum(HITS.edep * HITS.activeness, axis=-1)
79
80
 
80
81
  # energy with smearing
81
82
  energy: reboost.math.sample_convolve(
@@ -92,7 +93,7 @@ A :func:`build_hit` to parse the following configuration file:
92
93
  )
93
94
 
94
95
  # example of low level reduction on clusters
95
- energy_clustered: ak.sum(ak.unflatten(HITS.__edep, HITS.clusters_lengths), axis=-1)
96
+ energy_clustered: ak.sum(ak.unflatten(HITS.edep, HITS.clusters_lengths), axis=-1)
96
97
 
97
98
  # example of using a reboost helper
98
99
  steps_clustered: reboost.shape.reduction.energy_weighted_average(HITS, HITS.clusters_lengths)
@@ -115,7 +116,7 @@ A :func:`build_hit` to parse the following configuration file:
115
116
  - num_scint_ph_lar
116
117
 
117
118
  operations:
118
- tot_edep_wlsr: ak.sum(HITS.edep[np.abs(HITS.zloc) < 3000], axis=-1)
119
+ tot_edep_wlsr: ak.sum(HITS.edep[np.abs(HITS.zloc) < 3], axis=-1)
119
120
 
120
121
  - name: spms
121
122
 
@@ -180,6 +181,8 @@ from dbetto import AttrsDict
180
181
  from lgdo import lh5
181
182
  from lgdo.lh5.exceptions import LH5EncodeError
182
183
 
184
+ from reboost import units
185
+
183
186
  from . import core, utils
184
187
  from .iterator import GLMIterator
185
188
  from .profile import ProfileDict
@@ -334,6 +337,7 @@ def build_hit(
334
337
  expression=proc_group["hit_table_layout"],
335
338
  time_dict=time_dict[proc_name],
336
339
  )
340
+
337
341
  else:
338
342
  hit_table_layouted = copy.deepcopy(stps)
339
343
 
@@ -448,5 +452,8 @@ def _evaluate_operation(
448
452
  time_dict=time_dict,
449
453
  name=field,
450
454
  )
455
+ if not isinstance(info, str) and "units" in info:
456
+ col = units.attach_units(col, info["units"])
457
+ units.move_units_to_flattened_data(col)
451
458
 
452
459
  core.add_field_with_nesting(hit_table, field, col)
@@ -11,16 +11,25 @@ from dbetto import AttrsDict
11
11
  from lgdo import lh5
12
12
  from lgdo.types import LGDO, Table
13
13
 
14
- from . import utils
14
+ from . import units, utils
15
15
  from .profile import ProfileDict
16
16
 
17
17
  log = logging.getLogger(__name__)
18
18
 
19
19
 
20
20
  def read_data_at_channel_as_ak(
21
- channels: ak.Array, rows: ak.Array, file: str, field: str, group: str, tab_map: dict[int, str]
21
+ channels: ak.Array,
22
+ rows: ak.Array,
23
+ file: str,
24
+ field: str,
25
+ group: str,
26
+ tab_map: dict[int, str],
27
+ with_units: bool = False,
22
28
  ) -> ak.Array:
23
- r"""Read the data from a particular field to an awkward array. This replaces the TCM like object defined by the channels and rows with the corresponding data field.
29
+ r"""Read the data from a particular field to an Awkward array.
30
+
31
+ This replaces the TCM like object defined by the channels and rows with the
32
+ corresponding data field.
24
33
 
25
34
  Parameters
26
35
  ----------
@@ -68,7 +77,9 @@ def read_data_at_channel_as_ak(
68
77
  tcm_rows = np.where(ak.flatten(channels == key))[0]
69
78
 
70
79
  # read the data with sorted idx
71
- data_ch = lh5.read(f"{group}/{tab_name}/{field}", file, idx=idx[arg_idx]).view_as("ak")
80
+ data_ch = lh5.read(f"{group}/{tab_name}/{field}", file, idx=idx[arg_idx])
81
+ units = data_ch.attrs.get("units", None)
82
+ data_ch = data_ch.view_as("ak")
72
83
 
73
84
  # sort back to order for tcm
74
85
  data_ch = data_ch[np.argsort(arg_idx)]
@@ -85,8 +96,12 @@ def read_data_at_channel_as_ak(
85
96
 
86
97
  # sort the final data
87
98
  data_flat = data_flat[np.argsort(tcm_rows_full)]
99
+ data_unflat = ak.unflatten(data_flat, reorder)
100
+
101
+ if with_units and units is not None:
102
+ return ak.with_parameter(data_unflat, "units", units)
88
103
 
89
- return ak.unflatten(data_flat, reorder)
104
+ return data_unflat
90
105
 
91
106
 
92
107
  def evaluate_output_column(
@@ -441,6 +456,10 @@ def evaluate_hit_table_layout(
441
456
 
442
457
  res = eval(group_func, globs, locs)
443
458
 
459
+ if isinstance(res, Table):
460
+ for data in res.values():
461
+ units.move_units_to_flattened_data(data)
462
+
444
463
  if time_dict is not None:
445
464
  time_dict.update_field(name="hit_layout", time_start=time_start)
446
465
 
@@ -519,8 +538,5 @@ def remove_columns(tab: Table, outputs: list) -> Table:
519
538
 
520
539
  def merge(hit_table: Table, output_table: ak.Array | None):
521
540
  """Merge the table with the array."""
522
- return (
523
- hit_table.view_as("ak")
524
- if output_table is None
525
- else ak.concatenate((output_table, hit_table.view_as("ak")))
526
- )
541
+ hit_table = hit_table.view_as("ak", with_units=True)
542
+ return hit_table if output_table is None else ak.concatenate((output_table, hit_table))
@@ -3,10 +3,8 @@ from __future__ import annotations
3
3
  import logging
4
4
  from collections import OrderedDict
5
5
  from collections.abc import Generator, Iterable
6
- from pathlib import Path
7
6
 
8
7
  import numpy as np
9
- from lgdo import lh5
10
8
  from lgdo.lh5 import LH5Iterator
11
9
  from lgdo.types import Table
12
10
 
@@ -105,27 +103,6 @@ def generate_optmap_evt(
105
103
  assert had_last_chunk, "did not reach last chunk in optmap-evt building"
106
104
 
107
105
 
108
- def build_optmap_evt(
109
- lh5_in_file: str, lh5_out_file: str, detectors: Iterable[str | int], buffer_len: int = int(5e6)
110
- ) -> None:
111
- """Create a faster map for lookup of the hits in each detector, for each primary event."""
112
- lh5_out_file = Path(lh5_out_file)
113
- lh5_out_file_tmp = lh5_out_file.with_stem(".evt-tmp." + lh5_out_file.stem)
114
- if lh5_out_file_tmp.exists():
115
- msg = f"temporary output file {lh5_out_file_tmp} already exists"
116
- raise RuntimeError(msg)
117
-
118
- for vert_it_count, chunk in enumerate(generate_optmap_evt(lh5_in_file, detectors, buffer_len)):
119
- log.info("store evt file %s (%d)", lh5_out_file_tmp, vert_it_count - 1)
120
- lh5.write(Table(chunk), name=EVT_TABLE_NAME, lh5_file=lh5_out_file_tmp, wo_mode="append")
121
-
122
- # after finishing the output file, rename to the actual output file name.
123
- if lh5_out_file.exists():
124
- msg = f"output file {lh5_out_file_tmp} already exists after writing tmp output file"
125
- raise RuntimeError(msg)
126
- lh5_out_file_tmp.rename(lh5_out_file)
127
-
128
-
129
106
  def get_optical_detectors_from_geom(geom_fn) -> dict[int, str]:
130
107
  import pyg4ometry
131
108
  import pygeomtools
@@ -7,7 +7,7 @@ import awkward as ak
7
7
  import numpy as np
8
8
  import pint
9
9
  import pyg4ometry as pg4
10
- from lgdo import LGDO
10
+ from lgdo import LGDO, VectorOfVectors
11
11
 
12
12
  log = logging.getLogger(__name__)
13
13
 
@@ -69,6 +69,23 @@ def attach_units(data: ak.Array | LGDO, unit: str | None) -> ak.Array | LGDO:
69
69
  return data
70
70
 
71
71
 
72
+ def move_units_to_flattened_data(data: LGDO) -> None:
73
+ """If `data` is a VectorOfVectors move units from attrs to flattened data attrs.
74
+
75
+ Parameters
76
+ ----------
77
+ data
78
+ the nested data structure
79
+ """
80
+ if isinstance(data, VectorOfVectors) and ("units" in data.attrs):
81
+ unit = data.attrs.pop("units")
82
+ if isinstance(data.flattened_data, VectorOfVectors):
83
+ data.flattened_data.attrs |= {"units": unit}
84
+ move_units_to_flattened_data(data.flattened_data)
85
+ else:
86
+ data.flattened_data.attrs |= {"units": unit}
87
+
88
+
72
89
  def units_conv_ak(data: Any | LGDO | ak.Array, target_units: pint.Unit | str) -> Any | ak.Array:
73
90
  """Calculate numeric conversion factor to reach `target_units`, and apply to data converted to ak.
74
91
 
@@ -5,7 +5,7 @@ import itertools
5
5
  import logging
6
6
  import re
7
7
  import time
8
- from collections.abc import Iterable, Mapping
8
+ from collections.abc import Iterable
9
9
  from contextlib import contextmanager
10
10
  from pathlib import Path
11
11
 
@@ -182,52 +182,6 @@ def get_file_list(path: str | None, threads: int | None = None) -> list[str]:
182
182
  return [f"{(Path(path).with_suffix(''))}_t{idx}.lh5" for idx in range(threads)]
183
183
 
184
184
 
185
- def copy_units(tab: Table) -> dict:
186
- """Extract a dictionary of attributes (i.e. units).
187
-
188
- Parameters
189
- ----------
190
- tab
191
- Table to get the units from.
192
-
193
- Returns
194
- -------
195
- a dictionary with the units for each field
196
- in the table.
197
- """
198
- units = {}
199
-
200
- for field in tab:
201
- if "units" in tab[field].attrs:
202
- units[field] = tab[field].attrs["units"]
203
-
204
- return units
205
-
206
-
207
- def assign_units(tab: Table, units: Mapping) -> Table:
208
- """Copy the attributes from the map of attributes to the table.
209
-
210
- Parameters
211
- ----------
212
- tab
213
- Table to add attributes to.
214
- units
215
- mapping (dictionary like) of units of each field
216
-
217
- Returns
218
- -------
219
- an updated table with LGDO attributes.
220
- """
221
- for field in tab:
222
- if field in units:
223
- if not isinstance(tab[field], VectorOfVectors):
224
- tab[field].attrs["units"] = units[field]
225
- else:
226
- tab[field].flattened_data.attrs["units"] = units[field]
227
-
228
- return tab
229
-
230
-
231
185
  def _search_string(string: str):
232
186
  """Capture the characters matching the pattern for a function call."""
233
187
  pattern = r"\b([a-zA-Z_][a-zA-Z0-9_\.]*)\s*\("
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.10.0a0
3
+ Version: 0.10.0a2
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -6,11 +6,16 @@ processing_groups:
6
6
 
7
7
  outputs:
8
8
  - t0
9
+ - t0_u
9
10
  - evtid
10
11
  - energy
12
+ - xloc
11
13
 
12
14
  operations:
13
15
  t0: ak.fill_none(ak.firsts(HITS.time, axis=-1), np.nan)
16
+ t0_u:
17
+ expression: ak.fill_none(ak.firsts(HITS.time, axis=-1), np.nan)
18
+ units: ns
14
19
  energy: ak.sum(HITS.edep, axis=-1)
15
20
 
16
21
  forward:
@@ -11,6 +11,7 @@ import pytest
11
11
  from lgdo import Array, Struct, Table, VectorOfVectors, lh5
12
12
 
13
13
  import reboost
14
+ from reboost import units
14
15
 
15
16
 
16
17
  @pytest.fixture(scope="module")
@@ -136,12 +137,24 @@ def test_reshape(test_gen_lh5_flat, tmptestdir):
136
137
  ) # m
137
138
  tab = Table(data)
138
139
 
140
+ for field in data.values():
141
+ units.move_units_to_flattened_data(field)
142
+
139
143
  output = lh5.read("stp/det1", outfile)
140
144
 
141
145
  # check the outputs
142
146
  assert output == tab
143
147
  assert lh5.read("vtx", outfile) == Table({"evtid": Array([0, 1])})
144
148
 
149
+ # test units
150
+ for field, unit in zip(
151
+ ["edep", "time", "xloc", "yloc", "zloc", "dist_to_surf"],
152
+ ["keV", "ns", "m", "m", "m", "m"],
153
+ strict=True,
154
+ ):
155
+ assert "units" not in output[field].attrs
156
+ assert output[field].flattened_data.attrs["units"] == unit
157
+
145
158
 
146
159
  def test_only_forward(test_gen_lh5_flat, tmptestdir):
147
160
  outfile = f"{tmptestdir}/basic_hit_foward_only.lh5"
@@ -181,15 +194,20 @@ def test_basic(test_gen_lh5, tmptestdir):
181
194
  == b"Zstandard compression: http://www.zstd.net"
182
195
  )
183
196
 
184
- hits = lh5.read("hit/det1", outfile).view_as("ak")
197
+ hits = lh5.read("hit/det1", outfile).view_as("ak", with_units=True)
185
198
 
186
199
  assert ak.all(hits.energy == [300, 330])
187
200
  assert ak.all(hits.t0 == [0, 0.1])
201
+
188
202
  assert hits.evtid[0] == 0
189
203
  assert hits.evtid[1] == 1
190
204
 
191
205
  assert len(hits) == 2
192
206
 
207
+ assert ak.parameters(hits.t0) == {}
208
+ assert ak.parameters(hits.t0_u)["units"] == "ns"
209
+ assert ak.parameters(hits.xloc)["units"] == "m"
210
+
193
211
  # test in memory
194
212
 
195
213
  hits, time_dict = reboost.build_hit(
@@ -205,6 +223,10 @@ def test_basic(test_gen_lh5, tmptestdir):
205
223
  assert ak.all(hits["det1"].evtid[0] == [0, 0])
206
224
  assert ak.all(hits["det1"].evtid[1] == [1, 1, 1])
207
225
 
226
+ assert ak.parameters(hits["det1"].t0) == {}
227
+ assert ak.parameters(hits["det1"].t0_u)["units"] == "ns"
228
+ assert ak.parameters(hits["det1"].xloc)["units"] == "m"
229
+
208
230
  assert set(time_dict.keys()) == {"global_objects", "geds"}
209
231
  assert set(time_dict["geds"].keys()) == {
210
232
  "detector_objects",
@@ -213,7 +235,7 @@ def test_basic(test_gen_lh5, tmptestdir):
213
235
  "expressions",
214
236
  }
215
237
  assert set(time_dict["geds"]["read"].keys()) == {"stp"}
216
- assert set(time_dict["geds"]["expressions"].keys()) == {"t0", "energy"}
238
+ assert set(time_dict["geds"]["expressions"].keys()) == {"t0", "t0_u", "energy"}
217
239
 
218
240
 
219
241
  def test_file_merging(test_gen_lh5, tmptestdir):
@@ -59,13 +59,13 @@ def hitfiles(tmptestdir):
59
59
  # make some hit tier files
60
60
  channel1 = Table(
61
61
  {
62
- "energy": Array([100, 200, 400, 300]),
62
+ "energy": Array([100, 200, 400, 300], attrs={"units": "keV"}),
63
63
  "times": VectorOfVectors([[0.1], [0.2, 0.3], [0.4, 98], [2]]),
64
64
  }
65
65
  )
66
66
  channel2 = Table(
67
67
  {
68
- "energy": Array([10, 70, 0, 56, 400, 400]),
68
+ "energy": Array([10, 70, 0, 56, 400, 400], attrs={"units": "keV"}),
69
69
  "times": VectorOfVectors([[12], [], [-0.4, 0.4], [89], [1], [2]]),
70
70
  }
71
71
  )
@@ -83,12 +83,13 @@ def hitfiles(tmptestdir):
83
83
 
84
84
  def test_read_data_at_channel(hitfiles):
85
85
  # make a TCM
86
- tcm_channels = ak.Array([[0], [0], [0, 1], [1], [1], [0, 1], [1], [1]])
87
- tcm_rows = ak.Array([[0], [1], [2, 0], [1], [2], [3, 3], [4], [5]])
86
+ tcm_channels = ak.Array([[0], [0], [0, 1], [1], [1], [0, 1], [1], [], [1]])
87
+ tcm_rows = ak.Array([[0], [1], [2, 0], [1], [2], [3, 3], [4], [], [5]])
88
88
 
89
89
  energy = reboost.core.read_data_at_channel_as_ak(
90
90
  tcm_channels, tcm_rows, hitfiles[2], "energy", "hit", {"det001": 0, "det002": 1}
91
91
  )
92
+ assert "units" not in ak.parameters(energy)
92
93
 
93
94
  # check the same
94
95
  assert len(energy) == len(tcm_channels)
@@ -105,6 +106,18 @@ def test_read_data_at_channel(hitfiles):
105
106
  )
106
107
  assert len(times) == len(tcm_channels)
107
108
 
109
+ energy = reboost.core.read_data_at_channel_as_ak(
110
+ tcm_channels,
111
+ tcm_rows,
112
+ hitfiles[2],
113
+ "energy",
114
+ "hit",
115
+ {"det001": 0, "det002": 1},
116
+ with_units=True,
117
+ )
118
+ assert "units" in ak.parameters(energy)
119
+ assert ak.parameters(energy)["units"] == "keV"
120
+
108
121
 
109
122
  def test_get_objects(test_data_configs, make_gdml):
110
123
  # check basic eval
@@ -13,7 +13,6 @@ from reboost.optmap.create import (
13
13
  merge_optical_maps,
14
14
  rebin_optical_maps,
15
15
  )
16
- from reboost.optmap.evt import build_optmap_evt
17
16
  from reboost.optmap.optmap import OpticalMap
18
17
 
19
18
 
@@ -53,16 +52,6 @@ def tbl_hits(tmptestdir):
53
52
  return (str(hit_file),)
54
53
 
55
54
 
56
- def test_optmap_evt(tbl_hits, tmptestdir):
57
- evt_out_file = tmptestdir / "evt-out.lh5"
58
- build_optmap_evt(
59
- tbl_hits[0],
60
- str(evt_out_file),
61
- detectors=("1", "002", "003"),
62
- buffer_len=20, # note: shorter window sizes (e.g. 10) do not work.
63
- )
64
-
65
-
66
55
  @pytest.fixture
67
56
  def tbl_evt_fns(tmptestdir) -> tuple[str]:
68
57
  evt_count = 100
@@ -4,7 +4,7 @@ import awkward as ak
4
4
  import pint
5
5
  import pyg4ometry as pg4
6
6
  import pytest
7
- from lgdo.types import Array
7
+ from lgdo.types import Array, VectorOfVectors
8
8
 
9
9
  from reboost import units
10
10
  from reboost.units import ureg as u
@@ -92,3 +92,11 @@ def test_attach_units():
92
92
  data_units = units.attach_units(Array([1, 2, 3]), "mm")
93
93
 
94
94
  assert units.get_unit_str(data_units) == "mm"
95
+
96
+
97
+ def test_move_units_to_flattened_data():
98
+ data = VectorOfVectors([[1, 2, 3]], attrs={"units": "mm"})
99
+ units.move_units_to_flattened_data(data)
100
+
101
+ assert data.attrs.get("units") is None
102
+ assert data.flattened_data.attrs.get("units") == "mm"
@@ -6,14 +6,11 @@ from pathlib import Path
6
6
 
7
7
  import pytest
8
8
  import yaml
9
- from lgdo.types import Array, Table, VectorOfVectors
9
+ from lgdo.types import VectorOfVectors
10
10
 
11
11
  import reboost
12
12
  from reboost import utils
13
- from reboost.shape import group
14
13
  from reboost.utils import (
15
- assign_units,
16
- copy_units,
17
14
  get_file_dict,
18
15
  get_function_string,
19
16
  get_table_names,
@@ -204,28 +201,6 @@ def test_get_files_dict():
204
201
  assert files.glm == ["glm1.lh5", "glm2.lh5"]
205
202
 
206
203
 
207
- def test_units():
208
- table = Table({"a": Array([1, 2, 3]), "b": Array([4, 5, 6]), "evtid": Array([0, 0, 1])})
209
-
210
- table.a.attrs = {"datatype": "array<1>{real}", "units": "ns"}
211
- table.b.attrs = {"datatype": "array<1>{real}", "units": "keV"}
212
-
213
- units = copy_units(table)
214
-
215
- assert units["a"] == "ns"
216
- assert units["b"] == "keV"
217
- reshaped = group.group_by_evtid(table.view_as("ak"))
218
-
219
- # also add an array field
220
- units["c"] = "keV"
221
- reshaped["c"] = Array([1, 2])
222
- reshaped = assign_units(reshaped, units)
223
-
224
- assert reshaped.a.flattened_data.attrs["units"] == "ns"
225
- assert reshaped.b.flattened_data.attrs["units"] == "keV"
226
- assert reshaped.c.attrs["units"] == "keV"
227
-
228
-
229
204
  def test_table_names():
230
205
  names = "['hit/det001','hit/det002']"
231
206
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes