reboost 0.1.1__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
reboost/__init__.py CHANGED
@@ -1,6 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
- from reboost import build_hit, core, iterator, math, optmap, shape
3
+ from reboost import build_hit, core, iterator, math, shape
4
4
  from reboost._version import version as __version__
5
5
 
6
- __all__ = ["__version__", "build_hit", "core", "iterator", "math", "optmap", "shape"]
6
+ __all__ = [
7
+ "__version__",
8
+ "build_glm",
9
+ "build_hit",
10
+ "build_hit",
11
+ "build_tcm",
12
+ "core",
13
+ "iterator",
14
+ "math",
15
+ "optmap",
16
+ "shape",
17
+ ]
reboost/_version.py CHANGED
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '0.1.1'
16
- __version_tuple__ = version_tuple = (0, 1, 1)
20
+ __version__ = version = '0.2.1'
21
+ __version_tuple__ = version_tuple = (0, 2, 1)
reboost/build_evt.py CHANGED
@@ -1,9 +1,170 @@
1
+ """A program for combining the hits from various detectors, to build events.
2
+
3
+ Is able to parse a config file with the following format config file:
4
+
5
+ .. code-block:: yaml
6
+
7
+ channels:
8
+ geds_on:
9
+ - det001
10
+ - det002
11
+ geds_ac:
12
+ - det003
13
+
14
+ outputs:
15
+ - energy
16
+ - multiplicity
17
+
18
+ operations:
19
+ energy_id:
20
+ channels: geds_on
21
+ aggregation_mode: gather
22
+ query: "hit.energy > 25"
23
+ expression: tcm.channel_id
24
+
25
+ energy:
26
+ aggregation_mode: keep_at_ch:evt.energy_id
27
+ expression: "hit.energy > 25"
28
+ channels: geds_on
29
+
30
+ multiplicity:
31
+ channels: geds_on
32
+ aggregation_mode: sum
33
+ expression: "hit.energy > 25"
34
+ initial: 0
35
+
36
+
37
+ Must contain:
38
+ - "channels": dictionary of channel groupings
39
+ - "outputs": fields for the output file
40
+ - "operations": operations to perform see :func:`pygama.evt.build_evt.evaluate_expression` for more details.
41
+ """
42
+
1
43
  from __future__ import annotations
2
44
 
3
45
  import logging
4
46
 
47
+ import awkward as ak
48
+ import numpy as np
49
+ from lgdo import Table
50
+ from lgdo.lh5 import LH5Iterator, write
51
+ from pygama.evt.build_evt import evaluate_expression
52
+ from pygama.evt.utils import TCMData
53
+
54
+ from . import utils
55
+
5
56
  log = logging.getLogger(__name__)
6
57
 
7
58
 
8
- def build_evt() -> None:
9
- pass
59
+ def build_evt(
60
+ hit_file: str, tcm_file: str, evt_file: str | None, config: dict, buffer: int = int(5e6)
61
+ ) -> ak.Array | None:
62
+ """Generates the event tier from the hit and tcm.
63
+
64
+ Parameters
65
+ ----------
66
+ hit_file
67
+ path to the hit tier file
68
+ tcm_file
69
+ path to the tcm tier file
70
+ evt_file
71
+ path to the evt tier (output) file, if `None` the :class:`Table` is returned in memory
72
+ config
73
+ dictionary of the configuration.
74
+
75
+ buffer
76
+ number of events to process simultaneously
77
+
78
+ Returns
79
+ -------
80
+ ak.Array of the evt tier data (if the data is not saved to disk)
81
+ """
82
+ msg = "... beginning the evt tier processing"
83
+ log.info(msg)
84
+
85
+ # create the objects needed for evaluate expression
86
+
87
+ file_info = {
88
+ "hit": (hit_file, "hit", "det{:03}"),
89
+ "evt": (evt_file, "evt"),
90
+ }
91
+
92
+ # iterate through the TCM
93
+
94
+ out_ak = ak.Array([])
95
+ mode = "of"
96
+
97
+ # get channel groupings
98
+ channels = {}
99
+ for group, info in config["channels"].items():
100
+ if isinstance(info, str):
101
+ channels[group] = [info]
102
+
103
+ elif isinstance(info, list):
104
+ channels[group] = info
105
+
106
+ for tcm_lh5, _, n_rows_read in LH5Iterator(tcm_file, "tcm", buffer_len=buffer):
107
+ tcm_lh5_sel = tcm_lh5
108
+ tcm_ak = tcm_lh5_sel.view_as("ak")[:n_rows_read]
109
+
110
+ tcm = TCMData(
111
+ id=np.array(ak.flatten(tcm_ak.array_id)),
112
+ idx=np.array(ak.flatten(tcm_ak.array_idx)),
113
+ cumulative_length=np.array(np.cumsum(ak.num(tcm_ak.array_id, axis=-1))),
114
+ )
115
+
116
+ n_rows = len(tcm.cumulative_length)
117
+ out_tab = Table(size=n_rows)
118
+
119
+ for name, info in config["operations"].items():
120
+ msg = f"computing field {name}"
121
+ log.debug(msg)
122
+
123
+ defaultv = info.get("initial", np.nan)
124
+ if isinstance(defaultv, str) and (defaultv in ["np.nan", "np.inf", "-np.inf"]):
125
+ defaultv = eval(defaultv)
126
+
127
+ channels_use = utils.get_channels_from_groups(info.get("channels", []), channels)
128
+ channels_exclude = utils.get_channels_from_groups(
129
+ info.get("exclude_channels", []), channels
130
+ )
131
+
132
+ if "aggregation_mode" not in info:
133
+ field = out_tab.eval(
134
+ info["expression"].replace("evt.", ""), info.get("parameters", {})
135
+ )
136
+ else:
137
+ field = evaluate_expression(
138
+ file_info,
139
+ tcm,
140
+ channels_use,
141
+ table=out_tab,
142
+ mode=info["aggregation_mode"],
143
+ expr=info["expression"],
144
+ query=info.get("query", None),
145
+ sorter=info.get("sort", None),
146
+ channels_skip=channels_exclude,
147
+ default_value=defaultv,
148
+ n_rows=n_rows,
149
+ )
150
+
151
+ msg = f"field {field}"
152
+ log.debug(msg)
153
+ out_tab.add_field(name, field)
154
+
155
+ # remove fields if necessary
156
+ existing_cols = list(out_tab.keys())
157
+ for col in existing_cols:
158
+ if col not in config["outputs"]:
159
+ out_tab.remove_column(col, delete=True)
160
+
161
+ # write
162
+ if evt_file is not None:
163
+ write(out_tab, "evt", evt_file, wo_mode=mode)
164
+ mode = "append"
165
+ else:
166
+ out_ak = ak.concatenate((out_ak, out_tab.view_as("ak")))
167
+
168
+ if evt_file is None:
169
+ return out_ak
170
+ return None
reboost/build_glm.py CHANGED
@@ -221,7 +221,7 @@ def build_glm(
221
221
  log.info(msg)
222
222
 
223
223
  # loop over the lh5_tables
224
- lh5_table_list = [table for table in lh5.ls(stp_file, "stp/") if table != "stp/vertices"]
224
+ lh5_table_list = list(lh5.ls(stp_file, "stp/"))
225
225
 
226
226
  # get rows in the table
227
227
  if files.glm is None:
reboost/build_hit.py CHANGED
@@ -195,7 +195,7 @@ def build_hit(
195
195
  config
196
196
  dictionary or path to YAML file containing the processing chain.
197
197
  args
198
- dictionary or :class:`legendmeta.AttrsDict` of the global arguments.
198
+ dictionary or :class:`dbetto.AttrsDict` of the global arguments.
199
199
  stp_files
200
200
  list of strings or string of the stp file path.
201
201
  glm_files
@@ -246,7 +246,9 @@ def build_hit(
246
246
  # loop over processing groups
247
247
  for group_idx, proc_group in enumerate(config["processing_groups"]):
248
248
  proc_name = proc_group.get("name", "default")
249
- time_dict[proc_name] = ProfileDict()
249
+
250
+ if proc_name not in time_dict:
251
+ time_dict[proc_name] = ProfileDict()
250
252
 
251
253
  # extract the output detectors and the mapping to input detectors
252
254
  detectors_mapping = utils.merge_dicts(
reboost/build_tcm.py ADDED
@@ -0,0 +1,111 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import re
5
+
6
+ import awkward as ak
7
+ from lgdo import Table, lh5
8
+
9
+ from reboost import shape
10
+
11
+ log = logging.getLogger(__name__)
12
+
13
+
14
+ def build_tcm(
15
+ hit_file: str,
16
+ out_file: str,
17
+ channels: list[str],
18
+ time_name: str = "t0",
19
+ idx_name: str = "global_evtid",
20
+ time_window_in_us: float = 10,
21
+ ) -> None:
22
+ """Build the (Time Coincidence Map) TCM from the hit tier.
23
+
24
+ Parameters
25
+ ----------
26
+ hit_file
27
+ path to hit tier file.
28
+ out_file
29
+ output path for tcm.
30
+ channels
31
+ list of channel names to include.
32
+ time_name
33
+ name of the hit tier field used for time grouping.
34
+ idx_name
35
+ name of the hit tier field used for index grouping.
36
+ time_window_in_us
37
+ time window used to define the grouping.
38
+ """
39
+ hash_func = r"\d+"
40
+
41
+ msg = "start building time-coincidence map"
42
+ log.info(msg)
43
+
44
+ chan_ids = [re.search(hash_func, channel).group() for channel in channels]
45
+
46
+ hit_data = []
47
+ for channel in channels:
48
+ hit_data.append(
49
+ lh5.read(f"{channel}/hit", hit_file, field_mask=[idx_name, time_name]).view_as("ak")
50
+ )
51
+ tcm = get_tcm_from_ak(
52
+ hit_data, chan_ids, window=time_window_in_us, time_name=time_name, idx_name=idx_name
53
+ )
54
+
55
+ if tcm is not None:
56
+ lh5.write(tcm, "tcm", out_file, wo_mode="of")
57
+
58
+
59
+ def get_tcm_from_ak(
60
+ hit_data: list[ak.Array],
61
+ channels: list[int],
62
+ *,
63
+ window: float = 10,
64
+ time_name: str = "t0",
65
+ idx_name: str = "global_evtid",
66
+ ) -> Table:
67
+ """Builds a time-coincidence map from a hit of hit data Tables.
68
+
69
+ - build an ak.Array of the data merging channels with fields base on "time_name", and "idx_name" and adding a field `rawid` from the channel idx, also add the row (`hit_idx`)
70
+ - sorts this array by "idx_name" then "time_name" fields
71
+ - group by "idx_name" and "time_name" based on the window parameter
72
+
73
+ Parameters
74
+ ----------
75
+ hit_data
76
+ list of hit tier data for each channel
77
+ channels
78
+ list of channel indices
79
+ window
80
+ time window for selecting coincidences (in us)
81
+ time_name
82
+ name of the field for time information
83
+ idx_name
84
+ name of the decay index field
85
+
86
+ Returns
87
+ -------
88
+ an LGDO.VectorOfVectors containing the time-coincidence map
89
+ """
90
+ # build ak_obj for sorting
91
+ sort_objs = []
92
+
93
+ for ch_idx, data_tmp in zip(channels, hit_data):
94
+ obj_tmp = ak.copy(data_tmp)
95
+ obj_tmp = obj_tmp[[time_name, idx_name]]
96
+ hit_idx = ak.local_index(obj_tmp)
97
+
98
+ obj_tmp = ak.with_field(obj_tmp, hit_idx, "array_idx")
99
+
100
+ obj_tmp["array_id"] = int(ch_idx)
101
+ sort_objs.append(obj_tmp)
102
+
103
+ obj_tot = ak.concatenate(sort_objs)
104
+
105
+ return shape.group.group_by_time(
106
+ obj_tot,
107
+ time_name=time_name,
108
+ evtid_name=idx_name,
109
+ window=window,
110
+ fields=["array_id", "array_idx"],
111
+ )
reboost/core.py CHANGED
@@ -104,6 +104,9 @@ def evaluate_object(
104
104
  -------
105
105
  the evaluated object.
106
106
  """
107
+ msg = f"Evaluating object with expression {expression} and {local_dict}"
108
+ log.debug(msg)
109
+
107
110
  func_call, globals_dict = utils.get_function_string(expression)
108
111
 
109
112
  if "pyg4ometry" in globals_dict:
reboost/iterator.py CHANGED
@@ -131,5 +131,5 @@ class GLMIterator:
131
131
  vert_rows = None
132
132
  # vertex table should have same structure as glm
133
133
 
134
- return (stp_rows, vert_rows, self.current_i_entry, n_steps)
134
+ return (stp_rows, vert_rows, self.current_i_entry - 1, n_steps)
135
135
  return (None, None, self.current_i_entry, 0)
@@ -0,0 +1,5 @@
1
+ from __future__ import annotations
2
+
3
+ from .optmap import OpticalMap
4
+
5
+ __all__ = ["OpticalMap"]
reboost/optmap/cli.py CHANGED
@@ -82,9 +82,11 @@ def optical_cli() -> None:
82
82
  help="default: %(default)s",
83
83
  )
84
84
  mapview_parser.add_argument(
85
- "--display-error",
86
- action="store_true",
87
- help="display error instead of magnitude. default: %(default)s",
85
+ "--hist",
86
+ choices=("nr_gen", "nr_det", "p_det", "p_det_err", "p_det_err_rel"),
87
+ action="store",
88
+ default="p_det",
89
+ help="select optical map histogram to show. default: %(default)s",
88
90
  )
89
91
  mapview_parser.add_argument(
90
92
  "--divide",
@@ -168,6 +170,12 @@ def optical_cli() -> None:
168
170
  )
169
171
  convolve_parser.add_argument("--output", help="output hit LH5 file", metavar="OUTPUT_HIT")
170
172
 
173
+ # STEP X: rebin maps
174
+ rebin_parser = subparsers.add_parser("rebin", help="rebin optical maps")
175
+ rebin_parser.add_argument("input", help="input map LH5 files", metavar="INPUT_MAP")
176
+ rebin_parser.add_argument("output", help="output map LH5 file", metavar="OUTPUT_MAP")
177
+ rebin_parser.add_argument("--factor", type=int, help="integer scale-down factor")
178
+
171
179
  args = parser.parse_args()
172
180
 
173
181
  log_level = (None, logging.INFO, logging.DEBUG)[min(args.verbose, 2)]
@@ -229,7 +237,7 @@ def optical_cli() -> None:
229
237
  cmap_min=args.min,
230
238
  cmap_max=args.max,
231
239
  title=args.title,
232
- show_error=args.display_error,
240
+ histogram_choice=args.hist,
233
241
  )
234
242
 
235
243
  # STEP 2c: merge maps
@@ -261,3 +269,11 @@ def optical_cli() -> None:
261
269
  _check_input_file(parser, [args.map, args.edep])
262
270
  _check_output_file(parser, args.output)
263
271
  convolve(args.map, args.edep, args.edep_lgdo, args.material, args.output, args.bufsize)
272
+
273
+ # STEP X: rebin maps
274
+ if args.command == "rebin":
275
+ from reboost.optmap.create import rebin_optical_maps
276
+
277
+ _check_input_file(parser, args.input)
278
+ _check_output_file(parser, args.output)
279
+ rebin_optical_maps(args.input, args.output, args.factor)
reboost/optmap/create.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import copy
3
4
  import gc
4
5
  import logging
5
6
  import multiprocessing as mp
@@ -430,7 +431,7 @@ def check_optical_map(map_l5_file: str):
430
431
  for submap in list_optical_maps(map_l5_file):
431
432
  try:
432
433
  om = OpticalMap.load_from_file(map_l5_file, submap)
433
- except BaseException:
434
+ except Exception:
434
435
  log.exception("error while loading optical map %s", submap)
435
436
  continue
436
437
  om.check_histograms(include_prefix=True)
@@ -439,3 +440,42 @@ def check_optical_map(map_l5_file: str):
439
440
  log.error("edges of optical map %s differ", submap)
440
441
  else:
441
442
  all_binning = om.binning
443
+
444
+
445
+ def rebin_optical_maps(map_l5_file: str, output_lh5_file: str, factor: int):
446
+ """Rebin the optical map by an integral factor.
447
+
448
+ .. note ::
449
+
450
+ the factor has to divide the bincounts on all axes.
451
+ """
452
+ if not isinstance(factor, int) or factor <= 1:
453
+ msg = f"invalid rebin factor {factor}"
454
+ raise ValueError(msg)
455
+
456
+ def _rebin_map(large: NDArray, factor: int) -> NDArray:
457
+ factor = np.full(3, factor, dtype=int)
458
+ sh = np.column_stack([np.array(large.shape) // factor, factor]).ravel()
459
+ return large.reshape(sh).sum(axis=(1, 3, 5))
460
+
461
+ for submap in list_optical_maps(map_l5_file):
462
+ log.info("rebinning optical map group: %s", submap)
463
+
464
+ om = OpticalMap.load_from_file(map_l5_file, submap)
465
+
466
+ settings = om.get_settings()
467
+ if not all(b % factor == 0 for b in settings["bins"]):
468
+ msg = f"invalid factor {factor}, not a divisor"
469
+ raise ValueError(msg)
470
+ settings = copy.copy(settings)
471
+ settings["bins"] = [b // factor for b in settings["bins"]]
472
+
473
+ om_new = OpticalMap.create_empty(om.name, settings)
474
+ om_new.h_vertex = _rebin_map(om.h_vertex, factor)
475
+ om_new.h_hits = _rebin_map(om.h_hits, factor)
476
+ om_new.create_probability()
477
+ om_new.write_lh5(lh5_file=output_lh5_file, group=submap, wo_mode="write_safe")
478
+
479
+ # just copy hitcounts exponent.
480
+ for dset in ("_hitcounts_exp", "_hitcounts"):
481
+ lh5.write(lh5.read(dset, lh5_file=map_l5_file), dset, lh5_file=output_lh5_file)
reboost/optmap/evt.py CHANGED
@@ -60,38 +60,40 @@ def build_optmap_evt(
60
60
  # This function follows the assumption, that the output event ids are at least "somewhat"
61
61
  # monotonic, i.e. later chunks do not contain lower evtids than the previous chunk(s).
62
62
  # Going back is not implemented.
63
- def _ensure_vert_df(vert_it: LH5Iterator, evtid: int):
63
+ def _ensure_vert_df(vert_it: LH5Iterator, evtid: int) -> None:
64
64
  nonlocal vert_df, vert_df_bounds, vert_it_count, hits_expected
65
65
 
66
- if vert_df_bounds is not None and vert_df is not None:
67
- if evtid < vert_df_bounds[0]:
68
- msg = "non-monotonic evtid encountered, but cannot go back"
69
- raise KeyError(msg)
70
- if evtid >= vert_df_bounds[0] and evtid <= vert_df_bounds[1]:
71
- return # vert_df already contains the given evtid.
72
-
73
- # here, evtid > vert_df_bounds[1] (or vert_df_bounds is still None). We need to fetch
74
- # the next event table chunk.
75
-
76
- vert_it_count += 1
77
- # we might have filled a dataframe, save it to disk.
78
- _store_vert_df()
79
-
80
- # read the next vertex chunk into memory.
81
- (vert_lgdo, vert_entry, vert_n_rows) = next(vert_it)
82
- vert_df = vert_lgdo.view_as("pd").iloc[0:vert_n_rows]
83
-
84
- # prepare vertex coordinates.
85
- vert_df = vert_df.set_index("evtid", drop=True).drop(["n_part", "time"], axis=1)
86
- vert_df_bounds = [vert_df.index.min(), vert_df.index.max()]
87
- hits_expected = 0
88
- # add columns for all detectors.
89
- for d in detectors:
90
- vert_df[d] = hit_count_type(0)
66
+ # skipping multiple chunks is possible in sparsely populated simulations.
67
+ while vert_df_bounds is None or evtid > vert_df_bounds[1] or evtid < vert_df_bounds[0]:
68
+ if vert_df_bounds is not None and vert_df is not None:
69
+ if evtid < vert_df_bounds[0]:
70
+ msg = "non-monotonic evtid encountered, but cannot go back"
71
+ raise KeyError(msg)
72
+ if evtid >= vert_df_bounds[0] and evtid <= vert_df_bounds[1]:
73
+ return # vert_df already contains the given evtid.
74
+
75
+ # here, evtid > vert_df_bounds[1] (or vert_df_bounds is still None). We need to fetch
76
+ # the next event table chunk.
77
+
78
+ vert_it_count += 1
79
+ # we might have filled a dataframe, save it to disk.
80
+ _store_vert_df()
81
+
82
+ # read the next vertex chunk into memory.
83
+ (vert_lgdo, vert_entry, vert_n_rows) = next(vert_it)
84
+ vert_df = vert_lgdo.view_as("pd").iloc[0:vert_n_rows]
85
+
86
+ # prepare vertex coordinates.
87
+ vert_df = vert_df.set_index("evtid", drop=True).drop(["n_part", "time"], axis=1)
88
+ vert_df_bounds = [vert_df.index.min(), vert_df.index.max()]
89
+ hits_expected = 0
90
+ # add columns for all detectors.
91
+ for d in detectors:
92
+ vert_df[d] = hit_count_type(0)
91
93
 
92
94
  log.info("prepare evt table")
93
- # use smaller integer type uint16 to spare RAM when storing types.
94
- hit_count_type = np.uint16
95
+ # use smaller integer type uint8 to spare RAM when storing types.
96
+ hit_count_type = np.uint8
95
97
  for opti_it_count, (opti_lgdo, opti_entry, opti_n_rows) in enumerate(opti_it):
96
98
  assert (opti_it_count == 0) == (opti_entry == 0)
97
99
  opti_df = opti_lgdo.view_as("pd").iloc[0:opti_n_rows]
reboost/optmap/mapview.py CHANGED
@@ -92,12 +92,13 @@ def _channel_selector(fig) -> None:
92
92
  def _read_data(
93
93
  optmap_fn: str,
94
94
  detid: str = "all",
95
- show_error: bool = False,
95
+ histogram_choice: str = "p_det",
96
96
  ) -> tuple[tuple[NDArray], NDArray]:
97
- optmap_all = lh5.read(f"/{detid}/p_det", optmap_fn)
97
+ histogram = histogram_choice if histogram_choice != "p_det_err_rel" else "p_det"
98
+ optmap_all = lh5.read(f"/{detid}/{histogram}", optmap_fn)
98
99
  optmap_edges = tuple([b.edges for b in optmap_all.binning])
99
100
  optmap_weights = optmap_all.weights.nda.copy()
100
- if show_error:
101
+ if histogram_choice == "p_det_err_rel":
101
102
  optmap_err = lh5.read(f"/{detid}/p_det_err", optmap_fn)
102
103
  divmask = optmap_weights > 0
103
104
  optmap_weights[divmask] = optmap_err.weights.nda[divmask] / optmap_weights[divmask]
@@ -111,13 +112,13 @@ def _prepare_data(
111
112
  divide_fn: str | None = None,
112
113
  cmap_min: float | Literal["auto"] = 1e-4,
113
114
  cmap_max: float | Literal["auto"] = 1e-2,
114
- show_error: bool = False,
115
+ histogram_choice: str = "p_det",
115
116
  detid: str = "all",
116
117
  ) -> tuple[tuple[NDArray], NDArray]:
117
- optmap_edges, optmap_weights = _read_data(optmap_fn, detid, show_error)
118
+ optmap_edges, optmap_weights = _read_data(optmap_fn, detid, histogram_choice)
118
119
 
119
120
  if divide_fn is not None:
120
- divide_edges, divide_map = _read_data(divide_fn, detid, show_error)
121
+ divide_edges, divide_map = _read_data(divide_fn, detid, histogram_choice)
121
122
  divmask = divide_map > 0
122
123
  optmap_weights[divmask] = optmap_weights[divmask] / divide_map[divmask]
123
124
  optmap_weights[~divmask] = -1
@@ -157,12 +158,12 @@ def view_optmap(
157
158
  start_axis: int = 2,
158
159
  cmap_min: float | Literal["auto"] = 1e-4,
159
160
  cmap_max: float | Literal["auto"] = 1e-2,
160
- show_error: bool = False,
161
+ histogram_choice: str = "p_det",
161
162
  title: str | None = None,
162
163
  ) -> None:
163
164
  available_dets = list_optical_maps(optmap_fn)
164
165
 
165
- prepare_args = (optmap_fn, divide_fn, cmap_min, cmap_max, show_error)
166
+ prepare_args = (optmap_fn, divide_fn, cmap_min, cmap_max, histogram_choice)
166
167
  edges, weights, cmap_min, cmap_max = _prepare_data(*prepare_args, detid)
167
168
 
168
169
  fig = plt.figure(figsize=(10, 10))
reboost/optmap/optmap.py CHANGED
@@ -48,6 +48,7 @@ class OpticalMap:
48
48
  for i in range(3)
49
49
  ]
50
50
 
51
+ @staticmethod
51
52
  def create_empty(name: str, settings: Mapping[str, str]) -> OpticalMap:
52
53
  om = OpticalMap(name, settings)
53
54
  om.h_vertex = om._prepare_hist()
@@ -115,7 +116,7 @@ class OpticalMap:
115
116
  assert ax.is_range
116
117
  assert ax.closedleft
117
118
  oor_mask &= (ax.first <= col) & (col < ax.last)
118
- idx_s = np.floor((col - ax.first).astype(np.float64) / ax.step).astype(np.int64)
119
+ idx_s = np.floor((col.astype(np.float64) - ax.first) / ax.step).astype(np.int64)
119
120
  assert np.all(idx_s[oor_mask] < self._single_shape[dim])
120
121
  idx += s * idx_s
121
122
 
@@ -215,9 +216,11 @@ class OpticalMap:
215
216
  return ratio_0, ratio_err_0
216
217
 
217
218
  def create_probability(self) -> None:
219
+ """Compute probability map (and map uncertainty) from vertex and hit map."""
218
220
  self.h_prob, self.h_prob_uncert = self._divide_hist(self.h_hits, self.h_vertex)
219
221
 
220
222
  def write_lh5(self, lh5_file: str, group: str = "all", wo_mode: str = "write_safe") -> None:
223
+ """Write this map to a LH5 file."""
221
224
  if wo_mode not in ("write_safe", "overwrite_file"):
222
225
  msg = f"invalid wo_mode {wo_mode} for optical map"
223
226
  raise ValueError(msg)
@@ -237,6 +240,25 @@ class OpticalMap:
237
240
  write_hist(self.h_prob, "p_det", lh5_file, group, "write_safe")
238
241
  write_hist(self.h_prob_uncert, "p_det_err", lh5_file, group, "write_safe")
239
242
 
243
+ def get_settings(self) -> dict:
244
+ """Get the binning settings that were used to create this optical map instance."""
245
+ if self.settings is not None:
246
+ return self.settings
247
+
248
+ range_in_m = []
249
+ bins = []
250
+ for b in self.binning:
251
+ if not b.is_range:
252
+ msg = "cannot get binning settings for variable binning map"
253
+ raise RuntimeError(msg)
254
+ if b.get_binedgeattrs().get("units") != "m":
255
+ msg = "invalid units. can only work with optical maps in meter"
256
+ raise RuntimeError(msg)
257
+ range_in_m.append([b.first, b.last])
258
+ bins.append(b.nbins)
259
+
260
+ return {"range_in_m": np.array(range_in_m), "bins": np.array(bins)}
261
+
240
262
  def check_histograms(self, include_prefix: bool = False) -> None:
241
263
  log_prefix = "" if not include_prefix else self.name + " - "
242
264
 
reboost/shape/cluster.py CHANGED
@@ -3,47 +3,203 @@ from __future__ import annotations
3
3
  import logging
4
4
 
5
5
  import awkward as ak
6
+ import numba
6
7
  import numpy as np
7
8
  from lgdo import VectorOfVectors
8
9
 
9
10
  log = logging.getLogger(__name__)
10
11
 
11
12
 
12
- def cluster_by_sorted_field(
13
- cluster_variable: ak.Array | VectorOfVectors, field: ak.Array | VectorOfVectors
13
+ def apply_cluster(
14
+ cluster_run_lengths: VectorOfVectors | ak.Array, field: ak.Array | VectorOfVectors
14
15
  ) -> VectorOfVectors:
15
- """Extract the cumulative lengths of the clusters per event.
16
+ """Apply clustering to a field.
16
17
 
17
- Example processing block:
18
+ Parameters
19
+ ----------
20
+ cluster_ids
21
+ run lengths of each cluster
22
+ field
23
+ the field to cluster
24
+ """
25
+ if isinstance(cluster_run_lengths, VectorOfVectors):
26
+ cluster_run_lengths = cluster_run_lengths.view_as("ak")
18
27
 
19
- .. code-block:: yaml
28
+ if isinstance(field, VectorOfVectors):
29
+ field = field.view_as("ak")
30
+
31
+ n_cluster = ak.num(cluster_run_lengths, axis=-1)
32
+ clusters = ak.unflatten(ak.flatten(field), ak.flatten(cluster_run_lengths))
33
+
34
+ # reshape into cluster oriented
35
+ return VectorOfVectors(ak.unflatten(clusters, n_cluster))
36
+
37
+
38
+ def cluster_by_step_length(
39
+ trackid: ak.Array | VectorOfVectors,
40
+ pos_x: ak.Array | VectorOfVectors,
41
+ pos_y: ak.Array | VectorOfVectors,
42
+ pos_z: ak.Array | VectorOfVectors,
43
+ dist: ak.Array | VectorOfVectors,
44
+ surf_cut: float = 2,
45
+ threshold: float = 0.1,
46
+ threshold_surf: float = 0.0,
47
+ ) -> VectorOfVectors:
48
+ """Perform clustering based on the step length.
20
49
 
21
- trackid_cluster_edep: reboost.shape.cluster.cluster_by_sorted_field(HITS.trackid,HITS.edep)
50
+ Steps are clustered based on distance, if either:
51
+ - a step is in a new track,
52
+ - a step moves from surface to bulk region (or visa versa),
53
+ - the distance between the first step and the cluster and the current is above a threshold.
54
+
55
+ Then a new cluster is started. The surface region is defined as the volume
56
+ less than surf_cut distance to the surface. This allows for a fine tuning of the
57
+ parameters to be different for bulk and surface.
22
58
 
23
59
  Parameters
24
60
  ----------
25
61
  trackid
26
- the id of the tracks.
27
- field
28
- another field to cluster.
62
+ index of the track.
63
+ pos_x
64
+ x position of the step.
65
+ pos_y
66
+ y position of the step.
67
+ pos_z
68
+ z position of the step.
69
+ dist
70
+ distance to the detector surface.
71
+ surf_cut
72
+ Size of the surface region (in mm)
73
+ threshold
74
+ Distance threshold in mm to combine steps in the bulk.
75
+ threshold_surf
76
+ Distance threshold in mm to combine steps in the surface.
29
77
 
30
78
  Returns
31
79
  -------
32
- A VectorOfVectors with the clustered field, an additional axis is present due to the clustering
80
+ Array of the run lengths of each cluster within a hit.
33
81
  """
34
- if isinstance(cluster_variable, VectorOfVectors):
35
- cluster_variable = cluster_variable.view_as("ak")
82
+ # type conversions
83
+ if isinstance(pos_x, VectorOfVectors):
84
+ pos_x = pos_x.view_as("ak")
85
+
86
+ if isinstance(pos_y, VectorOfVectors):
87
+ pos_y = pos_y.view_as("ak")
88
+
89
+ if isinstance(pos_z, VectorOfVectors):
90
+ pos_z = pos_z.view_as("ak")
91
+
92
+ if isinstance(trackid, VectorOfVectors):
93
+ trackid = trackid.view_as("ak")
94
+
95
+ if isinstance(dist, VectorOfVectors):
96
+ dist = dist.view_as("ak")
97
+
98
+ pos = np.vstack(
99
+ [
100
+ ak.flatten(pos_x).to_numpy(),
101
+ ak.flatten(pos_y).to_numpy(),
102
+ ak.flatten(pos_z).to_numpy(),
103
+ ]
104
+ ).T
105
+
106
+ indices_flat = cluster_by_distance_numba(
107
+ ak.flatten(ak.local_index(trackid)).to_numpy(),
108
+ ak.flatten(trackid).to_numpy(),
109
+ pos,
110
+ ak.flatten(dist).to_numpy(),
111
+ surf_cut=surf_cut,
112
+ threshold=threshold,
113
+ threshold_surf=threshold_surf,
114
+ )
115
+
116
+ # reshape into being event oriented
117
+ indices = ak.unflatten(indices_flat, ak.num(ak.local_index(trackid)))
118
+
119
+ # number of steps per cluster
120
+ counts = ak.run_lengths(indices)
121
+
122
+ return VectorOfVectors(counts)
123
+
124
+
125
+ @numba.njit
126
+ def cluster_by_distance_numba(
127
+ local_index: np.ndarray,
128
+ trackid: np.ndarray,
129
+ pos: np.ndarray,
130
+ dist_to_surf: np.ndarray,
131
+ surf_cut: float = 2,
132
+ threshold: float = 0.1,
133
+ threshold_surf: float = 0.0,
134
+ ) -> np.ndarray:
135
+ """Cluster steps by the distance between points in the same track.
136
+
137
+ This function gives the basic numerical calculations for
138
+ :func:`cluster_by_step_length`.
36
139
 
37
- if isinstance(field, VectorOfVectors):
38
- field = cluster_variable.view_as("ak")
140
+ Parameters
141
+ ----------
142
+ local_index
143
+ 1D array of the local index within each hit (step group)
144
+ trackid
145
+ 1D array of index of the track
146
+ pos
147
+ `(n,3)` size array of the positions
148
+ dist_to_surf
149
+ 1D array of the distance to the detector surface.
150
+ surf_cut
151
+ Size of the surface region (in mm)
152
+ threshold
153
+ Distance threshold in mm to combine steps in the bulk.
154
+ threshold_surf
155
+ Distance threshold in mm to combine steps in the surface.
156
+
157
+ Returns
158
+ -------
159
+ np.ndarray
160
+ 1D array of cluster indices
161
+ """
39
162
 
40
- # run length of each cluster
41
- counts = ak.run_lengths(cluster_variable)
163
+ def _dist(a, b):
164
+ return np.sqrt(np.sum((a - b) ** 2))
42
165
 
43
- n_cluster = ak.num(counts, axis=-1)
44
- clusters = ak.unflatten(ak.flatten(field), ak.flatten(counts))
166
+ n = len(local_index)
167
+ out = np.zeros(n, dtype=numba.int32)
45
168
 
46
- return VectorOfVectors(ak.unflatten(clusters, n_cluster))
169
+ trackid_prev = -1
170
+ pos_prev = np.zeros(3, dtype=numba.float64)
171
+ cluster_idx = -1
172
+ is_surf_prev = False
173
+
174
+ for idx in range(n):
175
+ thr = threshold if dist_to_surf[idx] > surf_cut else threshold_surf
176
+
177
+ new_cluster = (
178
+ (trackid[idx] != trackid_prev)
179
+ or (is_surf_prev and (dist_to_surf[idx] > surf_cut))
180
+ or ((not is_surf_prev) and (dist_to_surf[idx] < surf_cut))
181
+ or (_dist(pos[idx, :], pos_prev) > thr)
182
+ )
183
+
184
+ # New hit, reset cluster index
185
+ if idx == 0 or local_index[idx] == 0:
186
+ cluster_idx = 0
187
+ pos_prev = pos[idx]
188
+
189
+ # either new track, moving from surface to bulk,
190
+ # moving from bulk to surface, or stepping more than
191
+ # the threshold. Start a new cluster.
192
+ elif new_cluster:
193
+ cluster_idx += 1
194
+ pos_prev = pos[idx, :]
195
+
196
+ out[idx] = cluster_idx
197
+
198
+ # Update previous values
199
+ trackid_prev = trackid[idx]
200
+ is_surf_prev = dist_to_surf[idx] < surf_cut
201
+
202
+ return out
47
203
 
48
204
 
49
205
  def step_lengths(
reboost/shape/group.py CHANGED
@@ -25,8 +25,11 @@ def _sort_data(obj: ak.Array, *, time_name: str = "time", evtid_name: str = "evt
25
25
  -------
26
26
  sorted awkward array
27
27
  """
28
- indices = np.lexsort((obj[time_name], obj[evtid_name]))
29
- return obj[indices]
28
+ obj_unflat = ak.unflatten(obj, ak.run_lengths(obj[evtid_name]))
29
+ indices = ak.argsort(obj_unflat[time_name], axis=-1)
30
+ sorted_obj = obj_unflat[indices]
31
+
32
+ return ak.flatten(sorted_obj)
30
33
 
31
34
 
32
35
  def group_by_evtid(data: Table | ak.Array, *, evtid_name: str = "evtid") -> Table:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.1.1
3
+ Version: 0.2.1
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -707,6 +707,7 @@ Requires-Dist: dbetto
707
707
  Requires-Dist: particle
708
708
  Requires-Dist: pandas
709
709
  Requires-Dist: matplotlib
710
+ Requires-Dist: pygama
710
711
  Provides-Extra: all
711
712
  Requires-Dist: reboost[docs,test]; extra == "all"
712
713
  Provides-Extra: docs
@@ -722,6 +723,7 @@ Requires-Dist: legend-pygeom-hpges; extra == "test"
722
723
  Requires-Dist: legend-pygeom-tools; extra == "test"
723
724
  Requires-Dist: pyg4ometry; extra == "test"
724
725
  Requires-Dist: pylegendtestdata; extra == "test"
726
+ Dynamic: license-file
725
727
 
726
728
  # reboost
727
729
 
@@ -735,4 +737,17 @@ Requires-Dist: pylegendtestdata; extra == "test"
735
737
  ![License](https://img.shields.io/github/license/legend-exp/reboost)
736
738
  [![Read the Docs](https://img.shields.io/readthedocs/reboost?logo=readthedocs)](https://reboost.readthedocs.io)
737
739
 
738
- New LEGEND Monte-Carlo simulation post-processing [WIP]
740
+ _reboost_ is a package to post-process
741
+ [remage](https://remage.readthedocs.io/en/stable/) simulations. Post processing
742
+ is the step of applying a detector response model to the (idealised) _remage_ /
743
+ _Geant4_ simulations to ''boost" them allowing comparison to data.
744
+
745
+ _reboost_ provides tools to:
746
+
747
+ - apply a HPGe detector response model to the simulations,
748
+ - dedicated tools to generate optical maps,
749
+ - functionality to control the full post-processing chain with configuration
750
+ files.
751
+
752
+ For more information see our dedicated
753
+ [documentation](https://reboost.readthedocs.io/en/stable/)!
@@ -0,0 +1,36 @@
1
+ reboost/__init__.py,sha256=RVNl3Qgx_hTUeBGXaWYmiTcmXUDhTfvlAGGC8bo_jP8,316
2
+ reboost/_version.py,sha256=UoNvMtd4wCG76RwoSpNCUtaFyTwakGcZolfjXzNVSMY,511
3
+ reboost/build_evt.py,sha256=5Q3T0LCl8xMtyRRhcs6layC1xh4vp2f26PgB1yab2zs,4798
4
+ reboost/build_glm.py,sha256=kxQN6MYl-vfKnOHufPqf5ifEoaToqaR30iMXISxIhYQ,9253
5
+ reboost/build_hit.py,sha256=sJR2qXup-qP1IoDLRxquuTyeI7DP_1S80QZ-w94qaZY,13293
6
+ reboost/build_tcm.py,sha256=N1rZwht88ZaKWmURch1VrVUbQROXfP56D0aj_JLsRhU,2951
7
+ reboost/cli.py,sha256=HTZ05DRnDodcf_D6BJCCavx5HqhKDadJCgf-oh8HTJk,6365
8
+ reboost/core.py,sha256=AamREubQsAqJ-y10NRx18r-PuqlQj3iTl2PzOTWXGQI,10540
9
+ reboost/iterator.py,sha256=cqfh3c0uLP67S0YGaw05-McZQzdMb8BISULIm3PEbKA,3990
10
+ reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
11
+ reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
12
+ reboost/utils.py,sha256=PMnHvSD5MpIzJyA3IQD_fLAK-O1RMY68DPGbQJp7Yww,4967
13
+ reboost/hpge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ reboost/hpge/psd.py,sha256=vFs8Y5XVW261pB6aOvWmIDzqOaBg-gEOLhL9PbjlEKI,2113
15
+ reboost/hpge/surface.py,sha256=SZyTmOCTipf27jYaJhtdInzGF1RZ2wKpbtf6HlOQYwM,3662
16
+ reboost/math/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ reboost/math/functions.py,sha256=ZgQpm87pGE0wH4Ekjm-8SbEmzfZ5MlAxS-fTw0RsNMc,1875
18
+ reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
19
+ reboost/optmap/__init__.py,sha256=imvuyld-GLw8qdwqW-lXCg2feptcTyQo3wIzPvDHwmY,93
20
+ reboost/optmap/cli.py,sha256=wBexh-zrr5ABherEyk9xigxdArvOAKiiRQwAYon9Sro,9408
21
+ reboost/optmap/convolve.py,sha256=5FksUrVIG8ysn42QbWBrAx8M1HfAVJtaJJyE8oJ1NGM,12043
22
+ reboost/optmap/create.py,sha256=Nm5-xEe8M9q2GFQnUv8oN8qpAz9nZArIrQcPboqRmCQ,17153
23
+ reboost/optmap/evt.py,sha256=m3NWuLEk4zDQJO5vXq_XLLnqmkQwmtdKI3fqmZQBBvc,4707
24
+ reboost/optmap/mapview.py,sha256=73kpe0_SKDj9bIhEx1ybX1sBP8TyvufiLfps84A_ijA,6798
25
+ reboost/optmap/numba_pdg.py,sha256=y8cXR5PWE2Liprp4ou7vl9do76dl84vXU52ZJD9_I7A,731
26
+ reboost/optmap/optmap.py,sha256=j4rfbQ84PYSpE-BvP4Rdt96ZjPdwy8P4e4eZz1mATys,12817
27
+ reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ reboost/shape/cluster.py,sha256=Cj4V1maPR-q_w6rKwF_hLW3Zmsv6zHva_I5oA2mm3PY,7442
29
+ reboost/shape/group.py,sha256=bSmFCl_yi1hGaKudjiicDEJsiBNyAHiKYdr8ZuH4pSM,4406
30
+ reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ reboost-0.2.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
32
+ reboost-0.2.1.dist-info/METADATA,sha256=zSFjGcEzPVcoBtwwbVyRdbI95BooP7swkYvojFfqNjU,44219
33
+ reboost-0.2.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
34
+ reboost-0.2.1.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
35
+ reboost-0.2.1.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
36
+ reboost-0.2.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,35 +0,0 @@
1
- reboost/__init__.py,sha256=yMYemxriXanK9QewWEIKVh5JbOzmnQhS_B4EjW494R0,242
2
- reboost/_version.py,sha256=PKIMyjdUACH4-ONvtunQCnYE2UhlMfp9su83e3HXl5E,411
3
- reboost/build_evt.py,sha256=jCmczvzk1aJ8-EWTz4WCcx0FyDqY6qtCwoLiEX-Yc0I,122
4
- reboost/build_glm.py,sha256=NQzdV6EFQkWjVcXbdMoOJQt6KmkJ6XWGW133xtUMtYA,9295
5
- reboost/build_hit.py,sha256=o7iR5SLA9fM3zrJ1M_GWSx6ONeM_nVXDYb5jK3HtxiU,13249
6
- reboost/cli.py,sha256=HTZ05DRnDodcf_D6BJCCavx5HqhKDadJCgf-oh8HTJk,6365
7
- reboost/core.py,sha256=6dxron2cUVqNr-UUh2MHQ4HVGWFVmmR_vAPhv-7XKHw,10443
8
- reboost/iterator.py,sha256=E0nn2Sg_zXe05affnoKr0-z3Y-eQl70tgT687UGgmy0,3986
9
- reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
10
- reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
11
- reboost/utils.py,sha256=PMnHvSD5MpIzJyA3IQD_fLAK-O1RMY68DPGbQJp7Yww,4967
12
- reboost/hpge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- reboost/hpge/psd.py,sha256=vFs8Y5XVW261pB6aOvWmIDzqOaBg-gEOLhL9PbjlEKI,2113
14
- reboost/hpge/surface.py,sha256=SZyTmOCTipf27jYaJhtdInzGF1RZ2wKpbtf6HlOQYwM,3662
15
- reboost/math/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- reboost/math/functions.py,sha256=ZgQpm87pGE0wH4Ekjm-8SbEmzfZ5MlAxS-fTw0RsNMc,1875
17
- reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
18
- reboost/optmap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- reboost/optmap/cli.py,sha256=GQTRrLO1fBxzpUPl560DWbxsUIYmuuStzNN2dTor97Y,8675
20
- reboost/optmap/convolve.py,sha256=5FksUrVIG8ysn42QbWBrAx8M1HfAVJtaJJyE8oJ1NGM,12043
21
- reboost/optmap/create.py,sha256=BJuAJ_U9iZ0V5-CCxmrR30JovqUwu_GAWkBSWL-N5bM,15616
22
- reboost/optmap/evt.py,sha256=RmEOap9gnclgT5O0ls097TO5HDkWXcS9JzY7MqseWbE,4439
23
- reboost/optmap/mapview.py,sha256=lSB8brbL3rQPik7gCCwkjXXv-qmEOTsVoCPaoWwWwCM,6643
24
- reboost/optmap/numba_pdg.py,sha256=y8cXR5PWE2Liprp4ou7vl9do76dl84vXU52ZJD9_I7A,731
25
- reboost/optmap/optmap.py,sha256=NvxIdu28qOSHemfg0lFYF7DcTZAse9mjZQyMq3a3feY,11913
26
- reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- reboost/shape/cluster.py,sha256=T9YoP7Tx0uS1_nhE3PZ80LcutXw-GuWSFMCP_pXYvCA,3029
28
- reboost/shape/group.py,sha256=CSA2OG4rg5Qwea2qfpMRy5UtDDUFvNknXwfXNJeFn7c,4293
29
- reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- reboost-0.1.1.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
31
- reboost-0.1.1.dist-info/METADATA,sha256=SrHP-kabqtTe1kk_9-rz8mIU1oQmzMUqHRqSnFu4Olg,43645
32
- reboost-0.1.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
33
- reboost-0.1.1.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
34
- reboost-0.1.1.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
35
- reboost-0.1.1.dist-info/RECORD,,