reboost 0.5.5__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
reboost/__init__.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import hdf5plugin
3
4
  from lgdo import lh5
4
5
 
5
6
  from ._version import version as __version__
@@ -10,4 +11,4 @@ __all__ = [
10
11
  "build_hit",
11
12
  ]
12
13
 
13
- lh5.settings.DEFAULT_HDF5_SETTINGS = {"shuffle": True, "compression": "lzf"}
14
+ lh5.settings.DEFAULT_HDF5_SETTINGS = {"compression": hdf5plugin.Zstd()}
reboost/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.5.5'
21
- __version_tuple__ = version_tuple = (0, 5, 5)
20
+ __version__ = version = '0.6.1'
21
+ __version_tuple__ = version_tuple = (0, 6, 1)
reboost/build_evt.py CHANGED
@@ -1,166 +1,134 @@
1
- """A program for combining the hits from various detectors, to build events.
1
+ from __future__ import annotations
2
2
 
3
- Is able to parse a config file with the following format config file:
3
+ import logging
4
4
 
5
- .. code-block:: yaml
5
+ import awkward as ak
6
+ import numpy as np
7
+ from dbetto import AttrsDict
8
+ from lgdo import Array, Table, VectorOfVectors, lh5
6
9
 
7
- channels:
8
- geds_on:
9
- - det001
10
- - det002
11
- geds_ac:
12
- - det003
10
+ from . import core, math, shape, utils
11
+ from .shape import group
13
12
 
14
- outputs:
15
- - energy
16
- - multiplicity
13
+ log = logging.getLogger(__name__)
17
14
 
18
- operations:
19
- energy_id:
20
- channels: geds_on
21
- aggregation_mode: gather
22
- query: "hit.energy > 25"
23
- expression: tcm.channel_id
24
15
 
25
- energy:
26
- aggregation_mode: keep_at_ch:evt.energy_id
27
- expression: "hit.energy > 25"
28
- channels: geds_on
16
+ def build_evt(
17
+ tcm: VectorOfVectors,
18
+ hitfile: str,
19
+ outfile: str | None,
20
+ channel_groups: AttrsDict,
21
+ pars: AttrsDict,
22
+ run_part: AttrsDict,
23
+ ) -> Table | None:
24
+ """Build events out of a TCM.
29
25
 
30
- multiplicity:
31
- channels: geds_on
32
- aggregation_mode: sum
33
- expression: "hit.energy > 25"
34
- initial: 0
26
+ Parameters
27
+ ----------
28
+ tcm
29
+ the time coincidence map.
30
+ hitfile
31
+ file with the hits.
32
+ outfile
33
+ the path to the output-file, if `None` with return
34
+ the events in memory.
35
+ channel_groups
36
+ a dictionary of groups of channels. For example:
35
37
 
38
+ .. code-block:: python
36
39
 
37
- Must contain:
38
- - "channels": dictionary of channel groupings
39
- - "outputs": fields for the output file
40
- - "operations": operations to perform see :func:`pygama.evt.build_evt.evaluate_expression` for more details.
41
- """
40
+ {"det1": "on", "det2": "off", "det3": "ac"}
42
41
 
43
- from __future__ import annotations
42
+ pars
43
+ A dictionary of parameters. The first key should
44
+ be the run ID, followed by different sets of parameters
45
+ arranged in groups. Run numbers should be given in the
46
+ format `"p00-r001"`, etc.
44
47
 
45
- import logging
48
+ For example:
46
49
 
47
- import awkward as ak
48
- import numpy as np
49
- from lgdo import Table
50
- from lgdo.lh5 import LH5Iterator, write
51
- from pygama.evt.build_evt import evaluate_expression
52
- from pygama.evt.utils import TCMData
50
+ .. code-block:: python
53
51
 
54
- from . import utils
52
+ {"p03-r000": {"reso": {"det1": [1, 2], "det2": [0, 1]}}}
55
53
 
56
- log = logging.getLogger(__name__)
54
+ run_part
55
+ The run partitioning file giving the number of events
56
+ for each run. This should be organized as a dictionary
57
+ with the following format:
57
58
 
59
+ .. code-block:: python
58
60
 
59
- def build_evt(
60
- hit_file: str, tcm_file: str, evt_file: str | None, config: dict, buffer: int = int(5e6)
61
- ) -> ak.Array | None:
62
- """Generates the event tier from the hit and tcm.
63
-
64
- Parameters
65
- ----------
66
- hit_file
67
- path to the hit tier file
68
- tcm_file
69
- path to the tcm tier file
70
- evt_file
71
- path to the evt tier (output) file, if `None` the :class:`Table` is returned in memory
72
- config
73
- dictionary of the configuration.
74
- buffer
75
- number of events to process simultaneously
61
+ {"p03-r000": 1000, "p03-r001": 2000}
76
62
 
77
63
  Returns
78
64
  -------
79
- ak.Array of the evt tier data (if the data is not saved to disk)
65
+ the event file in memory as a table if no output file is specified.
80
66
  """
81
- # create the objects needed for evaluate expression
67
+ tcm_tables = utils.get_table_names(tcm)
68
+ tcm_ak = tcm.view_as("ak")
69
+
70
+ # loop over the runs
71
+ cum_sum = 0
72
+ tab = None
73
+
74
+ for idx, (run_full, n_event) in enumerate(run_part.items()):
75
+ period, run = run_full.split("-")
76
+ pars_tmp = pars[run_full]
82
77
 
83
- file_info = {
84
- "hit": (hit_file, "hit", "det{:03}"),
85
- "evt": (evt_file, "evt"),
86
- }
78
+ # create an output table
79
+ out_tab = Table(size=n_event)
87
80
 
88
- # iterate through the TCM
81
+ tcm_tmp = tcm_ak[cum_sum : cum_sum + n_event]
89
82
 
90
- out_ak = ak.Array([])
91
- mode = "overwrite_file"
83
+ # usabilities
92
84
 
93
- # get channel groupings
94
- channels = {}
95
- for group, info in config["channels"].items():
96
- if isinstance(info, str):
97
- channels[group] = [info]
85
+ is_off = shape.group.get_isin_group(
86
+ tcm_tmp.table_key, channel_groups, tcm_tables, group="off"
87
+ )
88
+
89
+ # filter out off channels
90
+ channels = tcm_tmp.table_key[~is_off]
91
+ rows = tcm_tmp.row_in_table[~is_off]
92
+ out_tab.add_field("channel", VectorOfVectors(channels))
93
+ out_tab.add_field("row_in_table", VectorOfVectors(rows))
94
+
95
+ out_tab.add_field("period", Array(np.ones(len(channels)) * int(period[1:])))
96
+ out_tab.add_field("run", Array(np.ones(len(channels)) * int(run[1:])))
98
97
 
99
- elif isinstance(info, list):
100
- channels[group] = info
98
+ # now check for channels in ac
99
+ is_good = group.get_isin_group(channels, channel_groups, tcm_tables, group="on")
101
100
 
102
- for tcm_lh5 in LH5Iterator(tcm_file, "tcm", buffer_len=buffer):
103
- tcm_lh5_sel = tcm_lh5
104
- tcm_ak = tcm_lh5_sel.view_as("ak")
101
+ # get energy
102
+ energy_true = core.read_data_at_channel_as_ak(
103
+ channels, rows, hitfile, "energy", "hit", tcm_tables
104
+ )
105
105
 
106
- tcm = TCMData(
107
- id=np.array(ak.flatten(tcm_ak.array_id)),
108
- idx=np.array(ak.flatten(tcm_ak.array_idx)),
109
- cumulative_length=np.array(np.cumsum(ak.num(tcm_ak.array_id, axis=-1))),
106
+ energy = math.stats.apply_energy_resolution(
107
+ energy_true,
108
+ channels,
109
+ tcm_tables,
110
+ pars_tmp.reso,
111
+ lambda energy, sig0, sig1: np.sqrt(energy * sig1**2 + sig0**2),
110
112
  )
111
113
 
112
- n_rows = len(tcm.cumulative_length)
113
- out_tab = Table(size=n_rows)
114
+ out_tab.add_field("is_good", VectorOfVectors(is_good[energy > 25]))
114
115
 
115
- for name, info in config["operations"].items():
116
- msg = f"computing field {name}"
117
- log.debug(msg)
116
+ out_tab.add_field("energy", VectorOfVectors(energy[energy > 25]))
117
+ out_tab.add_field("multiplicity", Array(ak.sum(energy > 25, axis=-1).to_numpy()))
118
118
 
119
- defaultv = info.get("initial", np.nan)
120
- if isinstance(defaultv, str) and (defaultv in ["np.nan", "np.inf", "-np.inf"]):
121
- defaultv = eval(defaultv)
119
+ # write table
120
+ wo_mode = "of" if idx == 0 else "append"
122
121
 
123
- channels_use = utils.get_channels_from_groups(info.get("channels", []), channels)
124
- channels_exclude = utils.get_channels_from_groups(
125
- info.get("exclude_channels", []), channels
126
- )
122
+ # add attrs
123
+ out_tab.attrs["tables"] = tcm.attrs["tables"]
127
124
 
128
- if "aggregation_mode" not in info:
129
- field = out_tab.eval(
130
- info["expression"].replace("evt.", ""), info.get("parameters", {})
131
- )
132
- else:
133
- field = evaluate_expression(
134
- file_info,
135
- tcm,
136
- channels_use,
137
- table=out_tab,
138
- mode=info["aggregation_mode"],
139
- expr=info["expression"],
140
- query=info.get("query", None),
141
- sorter=info.get("sort", None),
142
- channels_skip=channels_exclude,
143
- default_value=defaultv,
144
- n_rows=n_rows,
145
- )
146
-
147
- msg = f"field {field}"
148
- log.debug(msg)
149
- out_tab.add_field(name, field)
150
-
151
- # remove fields if necessary
152
- existing_cols = list(out_tab.keys())
153
- for col in existing_cols:
154
- if col not in config["outputs"]:
155
- out_tab.remove_column(col, delete=True)
156
-
157
- # write
158
- if evt_file is not None:
159
- write(out_tab, "evt", evt_file, wo_mode=mode)
160
- mode = "append"
125
+ if outfile is not None:
126
+ lh5.write(out_tab, "evt", outfile, wo_mode=wo_mode)
161
127
  else:
162
- out_ak = ak.concatenate((out_ak, out_tab.view_as("ak")))
128
+ tab = (
129
+ ak.concatenate((tab, out_tab.view_as("ak")))
130
+ if tab is not None
131
+ else out_tab.view_as("ak")
132
+ )
163
133
 
164
- if evt_file is None:
165
- return out_ak
166
- return None
134
+ return Table(tab)
reboost/core.py CHANGED
@@ -5,7 +5,9 @@ import time
5
5
  from typing import Any
6
6
 
7
7
  import awkward as ak
8
+ import numpy as np
8
9
  from dbetto import AttrsDict
10
+ from lgdo import lh5
9
11
  from lgdo.types import LGDO, Table
10
12
 
11
13
  from . import utils
@@ -14,6 +16,78 @@ from .profile import ProfileDict
14
16
  log = logging.getLogger(__name__)
15
17
 
16
18
 
19
+ def read_data_at_channel_as_ak(
20
+ channels: ak.Array, rows: ak.Array, file: str, field: str, group: str, tab_map: dict[int, str]
21
+ ) -> ak.Array:
22
+ r"""Read the data from a particular field to an awkward array. This replaces the TCM like object defined by the channels and rows with the corresponding data field.
23
+
24
+ Parameters
25
+ ----------
26
+ channels
27
+ Array of the channel indices (uids).
28
+ rows
29
+ Array of the rows in the files to gather data from.
30
+ file
31
+ File to read the data from.
32
+ field
33
+ the field to read.
34
+ group
35
+ the group to read data from (eg. `hit` or `stp`.)
36
+ tab_map
37
+ mapping between indices and table names. Of the form:
38
+
39
+ .. code:: python
40
+
41
+ {NAME: UID}
42
+
43
+ For example:
44
+
45
+ .. code:: python
46
+
47
+ {"det001": 1, "det002": 2}
48
+
49
+ Returns
50
+ -------
51
+ an array with the data, of the same same as the channels and rows.
52
+ """
53
+ # initialise the output
54
+ data_flat = None
55
+ tcm_rows_full = None
56
+
57
+ # save the unflattening
58
+ reorder = ak.num(rows)
59
+
60
+ for tab_name, key in tab_map.items():
61
+ # get the rows to read
62
+
63
+ idx = ak.flatten(rows[channels == key]).to_numpy()
64
+ arg_idx = np.argsort(idx)
65
+
66
+ # get the rows in the flattened data we want to append to
67
+ tcm_rows = np.where(ak.flatten(channels == key))[0]
68
+
69
+ # read the data with sorted idx
70
+ data_ch = lh5.read(f"{group}/{tab_name}/{field}", file, idx=idx[arg_idx]).view_as("ak")
71
+
72
+ # sort back to order for tcm
73
+ data_ch = data_ch[np.argsort(arg_idx)]
74
+
75
+ # append to output
76
+ data_flat = ak.concatenate((data_flat, data_ch)) if data_flat is not None else data_ch
77
+ tcm_rows_full = (
78
+ np.concatenate((tcm_rows_full, tcm_rows)) if tcm_rows_full is not None else tcm_rows
79
+ )
80
+
81
+ if len(data_flat) != len(tcm_rows_full):
82
+ msg = "every index in the tcm should have been read"
83
+ raise ValueError(msg)
84
+
85
+ # sort the final data
86
+ data_flat = data_flat[np.argsort(tcm_rows_full)]
87
+
88
+ return ak.unflatten(data_flat, reorder)
89
+
90
+
17
91
  def evaluate_output_column(
18
92
  hit_table: Table,
19
93
  expression: str,
reboost/math/stats.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
+ from typing import Callable
4
5
 
5
6
  import awkward as ak
6
7
  import numpy as np
@@ -10,7 +11,68 @@ from numpy.typing import ArrayLike
10
11
  log = logging.getLogger(__name__)
11
12
 
12
13
 
13
- def gaussian_sample(mu: ArrayLike, sigma: ArrayLike | float, *, seed: int = 999) -> Array:
14
+ def get_resolution(
15
+ energies: ak.Array, channels: ak.Array, tcm_tables: dict, reso_pars: dict, reso_func: Callable
16
+ ) -> ak.Array:
17
+ """Get the resolution for each energy.
18
+
19
+ Parameters
20
+ ----------
21
+ energies
22
+ the energies to smear
23
+ channels
24
+ the channel index for each energy
25
+ tcm_tables
26
+ the mapping from indices to channel names.
27
+ reso_pars
28
+ the pars for each channel.
29
+ reso_func
30
+ the function to compute the resolution.
31
+ """
32
+ n_pars = len(reso_pars[next(iter(reso_pars))])
33
+
34
+ pars_shaped = []
35
+
36
+ for _ in range(n_pars):
37
+ pars_shaped.append(np.zeros(len(ak.flatten(channels))))
38
+
39
+ num = ak.num(channels, axis=-1)
40
+
41
+ for key, value in tcm_tables.items():
42
+ for i in range(n_pars):
43
+ pars_shaped[i][ak.flatten(channels) == value] = reso_pars[key][i]
44
+
45
+ ch_reso = reso_func(ak.flatten(energies), *pars_shaped)
46
+ return ak.unflatten(ch_reso, num)
47
+
48
+
49
+ def apply_energy_resolution(
50
+ energies: ak.Array, channels: ak.Array, tcm_tables: dict, reso_pars: dict, reso_func: Callable
51
+ ):
52
+ """Apply the energy resolution sampling to an array with many channels.
53
+
54
+ Parameters
55
+ ----------
56
+ energies
57
+ the energies to smear
58
+ channels
59
+ the channel index for each energy
60
+ tcm_tables
61
+ the mapping from indices to channel names.
62
+ reso_pars
63
+ the pars for each channel.
64
+ reso_func
65
+ the function to compute the resolution.
66
+ """
67
+ num = ak.num(channels, axis=-1)
68
+
69
+ ch_reso = get_resolution(energies, channels, tcm_tables, reso_pars, reso_func)
70
+ energies_flat_smear = gaussian_sample(ak.flatten(energies), ak.flatten(ch_reso))
71
+
72
+ return ak.unflatten(energies_flat_smear, num)
73
+
74
+
75
+ def gaussian_sample(mu: ArrayLike, sigma: ArrayLike | float, *, seed: int | None = None) -> Array:
14
76
  r"""Generate samples from a gaussian.
15
77
 
16
78
  Based on:
reboost/optmap/cli.py CHANGED
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import argparse
4
- import json
5
4
  import logging
6
- from pathlib import Path
5
+
6
+ import dbetto
7
7
 
8
8
  from ..log_utils import setup_log
9
9
  from ..utils import _check_input_file, _check_output_file
@@ -35,14 +35,18 @@ def optical_cli() -> None:
35
35
 
36
36
  subparsers = parser.add_subparsers(dest="command", required=True)
37
37
 
38
- # STEP 1: build evt file from hit tier
39
- evt_parser = subparsers.add_parser("evt", help="build evt file from remage hit file")
40
- evt_parser.add_argument(
38
+ # STEP 1: build evt file from stp tier
39
+ evt_parser = subparsers.add_parser("evt", help="build optmap-evt file from remage stp file")
40
+ evt_parser_det_group = evt_parser.add_mutually_exclusive_group(required=True)
41
+ evt_parser_det_group.add_argument(
42
+ "--geom",
43
+ help="GDML geometry file",
44
+ )
45
+ evt_parser_det_group.add_argument(
41
46
  "--detectors",
42
- help="file that contains a list of detector ids that are part of the input file",
43
- required=True,
47
+ help="file with detector ids of all optical channels.",
44
48
  )
45
- evt_parser.add_argument("input", help="input hit LH5 file", metavar="INPUT_HIT")
49
+ evt_parser.add_argument("input", help="input stp LH5 file", metavar="INPUT_STP")
46
50
  evt_parser.add_argument("output", help="output evt LH5 file", metavar="OUTPUT_EVT")
47
51
 
48
52
  # STEP 2a: build map file from evt tier
@@ -55,7 +59,20 @@ def optical_cli() -> None:
55
59
  )
56
60
  map_parser.add_argument(
57
61
  "--detectors",
58
- help="file that contains a list of detector ids that will be produced as additional output maps.",
62
+ help=(
63
+ "file that contains a list of detector ids that will be produced as additional output maps."
64
+ + "By default, all channels will be included."
65
+ ),
66
+ )
67
+ map_parser_det_group = map_parser.add_mutually_exclusive_group(required=True)
68
+ map_parser_det_group.add_argument(
69
+ "--geom",
70
+ help="GDML geometry file",
71
+ )
72
+ map_parser_det_group.add_argument(
73
+ "--evt",
74
+ action="store_true",
75
+ help="the input file is already an optmap-evt file.",
59
76
  )
60
77
  map_parser.add_argument(
61
78
  "--n-procs",
@@ -69,7 +86,9 @@ def optical_cli() -> None:
69
86
  action="store_true",
70
87
  help="""Check map statistics after creation. default: %(default)s""",
71
88
  )
72
- map_parser.add_argument("input", help="input evt LH5 file", metavar="INPUT_EVT", nargs="+")
89
+ map_parser.add_argument(
90
+ "input", help="input stp or optmap-evt LH5 file", metavar="INPUT_EVT", nargs="+"
91
+ )
73
92
  map_parser.add_argument("output", help="output map LH5 file", metavar="OUTPUT_MAP")
74
93
 
75
94
  # STEP 2b: view maps
@@ -171,7 +190,7 @@ def optical_cli() -> None:
171
190
  convolve_parser.add_argument(
172
191
  "--dist-mode",
173
192
  action="store",
174
- default="multinomial+no-fano",
193
+ default="poisson+no-fano",
175
194
  )
176
195
  convolve_parser.add_argument("--output", help="output hit LH5 file", metavar="OUTPUT_HIT")
177
196
 
@@ -188,15 +207,18 @@ def optical_cli() -> None:
188
207
 
189
208
  # STEP 1: build evt file from hit tier
190
209
  if args.command == "evt":
191
- from .evt import build_optmap_evt
210
+ from .evt import build_optmap_evt, get_optical_detectors_from_geom
192
211
 
193
- _check_input_file(parser, args.detectors)
194
212
  _check_input_file(parser, args.input)
195
213
  _check_output_file(parser, args.output)
196
214
 
197
- # load detector ids from a JSON array
198
- with Path.open(Path(args.detectors)) as detectors_f:
199
- detectors = json.load(detectors_f)
215
+ # load detector ids from the geometry.
216
+ if args.geom is not None:
217
+ _check_input_file(parser, args.geom, "geometry")
218
+ detectors = get_optical_detectors_from_geom(args.geom)
219
+ else:
220
+ _check_input_file(parser, args.detectors, "detectors")
221
+ detectors = dbetto.utils.load_dict(args.detectors)
200
222
 
201
223
  build_optmap_evt(args.input, args.output, detectors, args.bufsize)
202
224
 
@@ -209,23 +231,23 @@ def optical_cli() -> None:
209
231
 
210
232
  # load settings for binning from config file.
211
233
  _check_input_file(parser, args.input, "settings")
212
- with Path.open(Path(args.settings)) as settings_f:
213
- settings = json.load(settings_f)
234
+ settings = dbetto.utils.load_dict(args.settings)
214
235
 
215
- chfilter = ()
236
+ chfilter = "*"
216
237
  if args.detectors is not None:
217
- # load detector ids from a JSON array
218
- with Path.open(Path(args.detectors)) as detectors_f:
219
- chfilter = json.load(detectors_f)
238
+ # load detector ids from a JSON/YAML array
239
+ chfilter = dbetto.utils.load_dict(args.detectors)
220
240
 
221
241
  create_optical_maps(
222
242
  args.input,
223
243
  settings,
224
244
  args.bufsize,
245
+ is_stp_file=(not args.evt),
225
246
  chfilter=chfilter,
226
247
  output_lh5_fn=args.output,
227
248
  check_after_create=args.check,
228
249
  n_procs=args.n_procs,
250
+ geom_fn=args.geom,
229
251
  )
230
252
 
231
253
  # STEP 2b: view maps
@@ -251,8 +273,7 @@ def optical_cli() -> None:
251
273
 
252
274
  # load settings for binning from config file.
253
275
  _check_input_file(parser, args.input, "settings")
254
- with Path.open(Path(args.settings)) as settings_f:
255
- settings = json.load(settings_f)
276
+ settings = dbetto.utils.load_dict(args.settings)
256
277
 
257
278
  _check_input_file(parser, args.input)
258
279
  _check_output_file(parser, args.output)
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
+ import re
4
5
 
5
6
  import legendoptics.scintillate as sc
6
7
  import numba
@@ -24,8 +25,8 @@ OPTMAP_SUM_CH = -2
24
25
 
25
26
  def open_optmap(optmap_fn: str):
26
27
  maps = lh5.ls(optmap_fn)
27
- # TODO: rewrite logic to only accept _<number> instead of a blacklist
28
- det_ntuples = [m for m in maps if m not in ("all", "_hitcounts", "_hitcounts_exp", "all_orig")]
28
+ # only accept _<number> (/all is read separately)
29
+ det_ntuples = [m for m in maps if re.match(r"_\d+$", m)]
29
30
  detids = np.array([int(m.lstrip("_")) for m in det_ntuples])
30
31
  detidx = np.arange(0, detids.shape[0])
31
32
 
@@ -53,17 +54,25 @@ def open_optmap(optmap_fn: str):
53
54
 
54
55
  # give this check some numerical slack.
55
56
  if np.any(
56
- ow[OPTMAP_SUM_CH][ow[OPTMAP_ANY_CH] >= 0] - ow[OPTMAP_ANY_CH][ow[OPTMAP_ANY_CH] >= 0]
57
+ np.abs(
58
+ ow[OPTMAP_SUM_CH][ow[OPTMAP_ANY_CH] >= 0] - ow[OPTMAP_ANY_CH][ow[OPTMAP_ANY_CH] >= 0]
59
+ )
57
60
  < -1e-15
58
61
  ):
59
62
  msg = "optical map does not fulfill relation sum(p_i) >= p_any"
60
63
  raise ValueError(msg)
61
64
 
62
- # get the exponent from the optical map file
63
- optmap_multi_det_exp = lh5.read("/_hitcounts_exp", optmap_fn).value
64
- assert isinstance(optmap_multi_det_exp, float)
65
+ try:
66
+ # check the exponent from the optical map file
67
+ optmap_multi_det_exp = lh5.read("/_hitcounts_exp", optmap_fn).value
68
+ assert isinstance(optmap_multi_det_exp, float)
69
+ if np.isfinite(optmap_multi_det_exp):
70
+ msg = f"found finite _hitcounts_exp {optmap_multi_det_exp} which is not supported any more"
71
+ raise RuntimeError(msg)
72
+ except KeyError: # the _hitcounts_exp might not be always present.
73
+ pass
65
74
 
66
- return detids, detidx, optmap_edges, ow, optmap_multi_det_exp
75
+ return detids, detidx, optmap_edges, ow
67
76
 
68
77
 
69
78
  def iterate_stepwise_depositions(
@@ -71,7 +80,7 @@ def iterate_stepwise_depositions(
71
80
  optmap_for_convolve,
72
81
  scint_mat_params: sc.ComputedScintParams,
73
82
  rng: np.random.Generator = None,
74
- dist: str = "multinomial",
83
+ dist: str = "poisson",
75
84
  mode: str = "no-fano",
76
85
  ):
77
86
  # those np functions are not supported by numba, but needed for efficient array access below.
@@ -144,7 +153,6 @@ def _iterate_stepwise_depositions(
144
153
  detidx,
145
154
  optmap_edges,
146
155
  optmap_weights,
147
- optmap_multi_det_exp,
148
156
  scint_mat_params: sc.ComputedScintParams,
149
157
  dist: str,
150
158
  mode: str,
@@ -223,8 +231,6 @@ def _iterate_stepwise_depositions(
223
231
  # we detect this energy deposition; we should at least get one photon out here!
224
232
 
225
233
  detsel_size = 1
226
- if np.isfinite(optmap_multi_det_exp):
227
- detsel_size = rng.geometric(1 - np.exp(-optmap_multi_det_exp))
228
234
 
229
235
  px_sum = optmap_weights[OPTMAP_SUM_CH, cur_bins[0], cur_bins[1], cur_bins[2]]
230
236
  assert px_sum >= 0.0 # should not be negative.
@@ -238,7 +244,7 @@ def _iterate_stepwise_depositions(
238
244
  detp[d] = 0.0
239
245
  det_no_stats += had_det_no_stats
240
246
 
241
- # should be equivalent to rng.choice(detidx, size=(detsel_size, p=detp)
247
+ # should be equivalent to rng.choice(detidx, size=detsel_size, p=detp)
242
248
  detsel = detidx[
243
249
  np.searchsorted(np.cumsum(detp), rng.random(size=(detsel_size,)), side="right")
244
250
  ]
@@ -339,7 +345,7 @@ def convolve(
339
345
  material: str,
340
346
  output_file: str | None = None,
341
347
  buffer_len: int = int(1e6),
342
- dist_mode: str = "multinomial+no-fano",
348
+ dist_mode: str = "poisson+no-fano",
343
349
  ):
344
350
  if material not in ["lar", "pen"]:
345
351
  msg = f"unknown material {material} for scintillation"
@@ -356,13 +362,18 @@ def convolve(
356
362
  (1 * pint.get_application_registry().ns), # dummy!
357
363
  )
358
364
 
359
- log.info("opening map %s", map_file)
360
- optmap_for_convolve = open_optmap(map_file)
361
-
362
365
  # special handling of distributions and flags.
363
366
  dist, mode = dist_mode.split("+")
364
- assert dist in ("multinomial", "poisson")
365
- assert mode in ("", "no-fano")
367
+ if (
368
+ dist not in ("multinomial", "poisson")
369
+ or mode not in ("", "no-fano")
370
+ or (dist == "poisson" and mode != "no-fano")
371
+ ):
372
+ msg = f"unsupported statistical distribution {dist_mode} for scintillation emission"
373
+ raise ValueError(msg)
374
+
375
+ log.info("opening map %s", map_file)
376
+ optmap_for_convolve = open_optmap(map_file)
366
377
 
367
378
  log.info("opening energy deposition hit output %s", edep_file)
368
379
  it = LH5Iterator(edep_file, edep_path, buffer_len=buffer_len)
reboost/optmap/create.py CHANGED
@@ -14,7 +14,12 @@ from numba import njit
14
14
  from numpy.typing import NDArray
15
15
 
16
16
  from ..log_utils import setup_log
17
- from .evt import EVT_TABLE_NAME, read_optmap_evt
17
+ from .evt import (
18
+ EVT_TABLE_NAME,
19
+ generate_optmap_evt,
20
+ get_optical_detectors_from_geom,
21
+ read_optmap_evt,
22
+ )
18
23
  from .optmap import OpticalMap
19
24
 
20
25
  log = logging.getLogger(__name__)
@@ -109,12 +114,13 @@ def _create_optical_maps_process_init(optmaps, log_level) -> None:
109
114
 
110
115
 
111
116
  def _create_optical_maps_process(
112
- optmap_events_fn, buffer_len, all_det_ids, ch_idx_to_map_idx
117
+ optmap_events_fn, buffer_len, is_stp_file, all_det_ids, ch_idx_to_map_idx
113
118
  ) -> None:
114
119
  log.info("started worker task for %s", optmap_events_fn)
115
120
  x = _create_optical_maps_chunk(
116
121
  optmap_events_fn,
117
122
  buffer_len,
123
+ is_stp_file,
118
124
  all_det_ids,
119
125
  _shared_optmaps,
120
126
  ch_idx_to_map_idx,
@@ -124,9 +130,12 @@ def _create_optical_maps_process(
124
130
 
125
131
 
126
132
  def _create_optical_maps_chunk(
127
- optmap_events_fn, buffer_len, all_det_ids, optmaps, ch_idx_to_map_idx
133
+ optmap_events_fn, buffer_len, is_stp_file, all_det_ids, optmaps, ch_idx_to_map_idx
128
134
  ) -> None:
129
- optmap_events_it = read_optmap_evt(optmap_events_fn, buffer_len)
135
+ if not is_stp_file:
136
+ optmap_events_it = read_optmap_evt(optmap_events_fn, buffer_len)
137
+ else:
138
+ optmap_events_it = generate_optmap_evt(optmap_events_fn, all_det_ids, buffer_len)
130
139
 
131
140
  hits_per_primary = np.zeros(10, dtype=np.int64)
132
141
  hits_per_primary_len = 0
@@ -156,19 +165,24 @@ def create_optical_maps(
156
165
  optmap_events_fn: list[str],
157
166
  settings,
158
167
  buffer_len: int = int(5e6),
168
+ is_stp_file: bool = True,
159
169
  chfilter: tuple[str | int] | Literal["*"] = (),
160
170
  output_lh5_fn: str | None = None,
161
171
  after_save: Callable[[int, str, OpticalMap]] | None = None,
162
172
  check_after_create: bool = False,
163
173
  n_procs: int | None = 1,
174
+ geom_fn: str | None = None,
164
175
  ) -> None:
165
176
  """Create optical maps.
166
177
 
167
178
  Parameters
168
179
  ----------
169
180
  optmap_events_fn
170
- list of filenames to lh5 files with a table ``/optmap_evt`` with columns ``{x,y,z}loc``
171
- and one column (with numeric header) for each SiPM channel.
181
+ list of filenames to lh5 files, that can either be stp files from remage or "optmap-evt"
182
+ files with a table ``/optmap_evt`` with columns ``{x,y,z}loc`` and one column (with numeric
183
+ header) for each SiPM channel.
184
+ is_stp_file
185
+ if true, do convert a remage output file (stp file) on-the-fly to an optmap-evt file.
172
186
  chfilter
173
187
  tuple of detector ids that will be included in the resulting optmap. Those have to match
174
188
  the column names in ``optmap_events_fn``.
@@ -181,9 +195,13 @@ def create_optical_maps(
181
195
 
182
196
  use_shmem = n_procs is None or n_procs > 1
183
197
 
184
- optmap_evt_columns = list(
185
- lh5.read(EVT_TABLE_NAME, optmap_events_fn[0], start_row=0, n_rows=1).keys()
186
- ) # peek into the (first) file to find column names.
198
+ if not is_stp_file:
199
+ optmap_evt_columns = list(
200
+ lh5.read(EVT_TABLE_NAME, optmap_events_fn[0], start_row=0, n_rows=1).keys()
201
+ ) # peek into the (first) file to find column names.
202
+ else:
203
+ optmap_evt_columns = [str(i) for i in get_optical_detectors_from_geom(geom_fn)]
204
+
187
205
  all_det_ids, optmaps, optmap_det_ids = _optmaps_for_channels(
188
206
  optmap_evt_columns, settings, chfilter=chfilter, use_shmem=use_shmem
189
207
  )
@@ -202,7 +220,9 @@ def create_optical_maps(
202
220
  if not use_shmem:
203
221
  for fn in optmap_events_fn:
204
222
  q.append(
205
- _create_optical_maps_chunk(fn, buffer_len, all_det_ids, optmaps, ch_idx_to_map_idx)
223
+ _create_optical_maps_chunk(
224
+ fn, buffer_len, is_stp_file, all_det_ids, optmaps, ch_idx_to_map_idx
225
+ )
206
226
  )
207
227
  else:
208
228
  ctx = mp.get_context("forkserver")
@@ -222,7 +242,7 @@ def create_optical_maps(
222
242
  for fn in optmap_events_fn:
223
243
  r = pool.apply_async(
224
244
  _create_optical_maps_process,
225
- args=(fn, buffer_len, all_det_ids, ch_idx_to_map_idx),
245
+ args=(fn, buffer_len, is_stp_file, all_det_ids, ch_idx_to_map_idx),
226
246
  )
227
247
  pool_results.append((r, fn))
228
248
 
@@ -401,6 +421,9 @@ def merge_optical_maps(
401
421
  hits_per_primary = np.zeros(10, dtype=np.int64)
402
422
  hits_per_primary_len = 0
403
423
  for optmap_fn in map_l5_files:
424
+ if "_hitcounts" not in lh5.ls(optmap_fn):
425
+ log.warning("skipping _hitcounts calculations, missing in file %s", optmap_fn)
426
+ return
404
427
  hitcounts = lh5.read("/_hitcounts", optmap_fn)
405
428
  assert isinstance(hitcounts, Array)
406
429
  hits_per_primary[0 : len(hitcounts)] += hitcounts
@@ -417,13 +440,18 @@ def merge_optical_maps(
417
440
  def check_optical_map(map_l5_file: str):
418
441
  """Run a health check on the map file.
419
442
 
420
- This checks for consistency, and output details on map statistics.
443
+ This checks for consistency, and outputs details on map statistics.
421
444
  """
422
- if lh5.read("_hitcounts_exp", lh5_file=map_l5_file).value != np.inf:
423
- log.error("unexpected hitcount exp not equal to positive infinity")
445
+ if "_hitcounts_exp" not in lh5.ls(map_l5_file):
446
+ log.info("no _hitcounts_exp found")
447
+ elif lh5.read("_hitcounts_exp", lh5_file=map_l5_file).value != np.inf:
448
+ log.error("unexpected _hitcounts_exp not equal to positive infinity")
424
449
  return
425
- if lh5.read("_hitcounts", lh5_file=map_l5_file).nda.shape != (2,):
426
- log.error("unexpected hitcount shape")
450
+
451
+ if "_hitcounts" not in lh5.ls(map_l5_file):
452
+ log.info("no _hitcounts found")
453
+ elif lh5.read("_hitcounts", lh5_file=map_l5_file).nda.shape != (2,):
454
+ log.error("unexpected _hitcounts shape")
427
455
  return
428
456
 
429
457
  all_binning = None
@@ -477,4 +505,5 @@ def rebin_optical_maps(map_l5_file: str, output_lh5_file: str, factor: int):
477
505
 
478
506
  # just copy hitcounts exponent.
479
507
  for dset in ("_hitcounts_exp", "_hitcounts"):
480
- lh5.write(lh5.read(dset, lh5_file=map_l5_file), dset, lh5_file=output_lh5_file)
508
+ if dset in lh5.ls(map_l5_file):
509
+ lh5.write(lh5.read(dset, lh5_file=map_l5_file), dset, lh5_file=output_lh5_file)
reboost/optmap/evt.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- from collections.abc import Iterable
4
+ from collections.abc import Generator, Iterable
5
5
  from pathlib import Path
6
6
 
7
7
  import numpy as np
@@ -14,20 +14,18 @@ log = logging.getLogger(__name__)
14
14
  EVT_TABLE_NAME = "optmap_evt"
15
15
 
16
16
 
17
- def build_optmap_evt(
18
- lh5_in_file: str, lh5_out_file: str, detectors: Iterable[str | int], buffer_len: int = int(5e6)
19
- ) -> None:
17
+ def generate_optmap_evt(
18
+ lh5_in_file: str, detectors: Iterable[str | int], buffer_len: int = int(5e6)
19
+ ) -> Generator[Table, None, None]:
20
20
  """Create a faster map for lookup of the hits in each detector, for each primary event."""
21
21
  log.info("reading file %s", lh5_in_file)
22
22
 
23
- lh5_out_file = Path(lh5_out_file)
24
- lh5_out_file_tmp = lh5_out_file.with_stem(".evt-tmp." + lh5_out_file.stem)
25
- if lh5_out_file_tmp.exists():
26
- msg = f"temporary output file {lh5_out_file_tmp} already exists"
27
- raise RuntimeError(msg)
28
23
  vert_it = LH5Iterator(lh5_in_file, "vtx", buffer_len=buffer_len)
29
24
  opti_it = LH5Iterator(lh5_in_file, "stp/optical", buffer_len=buffer_len)
30
25
 
26
+ if len(detectors) == 0:
27
+ msg = "detector array cannot be empty for optmap-evt building"
28
+ raise ValueError(msg)
31
29
  detectors = [str(d) for d in detectors]
32
30
  for d in detectors:
33
31
  if not d.isnumeric():
@@ -35,11 +33,11 @@ def build_optmap_evt(
35
33
 
36
34
  vert_df = None
37
35
  vert_df_bounds = None
38
- vert_it_count = 0
39
36
  hits_expected = 0
37
+ had_last_chunk = False
40
38
 
41
- def _store_vert_df():
42
- nonlocal vert_df
39
+ def _store_vert_df(last_chunk: bool) -> Generator[Table, None, None]:
40
+ nonlocal vert_df, had_last_chunk
43
41
  if vert_df is None:
44
42
  return
45
43
 
@@ -49,8 +47,8 @@ def build_optmap_evt(
49
47
  hits_sum += np.sum(vert_df[d])
50
48
  assert hits_sum == hits_expected
51
49
 
52
- log.info("store evt file %s (%d)", lh5_out_file_tmp, vert_it_count - 1)
53
- lh5.write(Table(vert_df), name=EVT_TABLE_NAME, lh5_file=lh5_out_file_tmp, wo_mode="append")
50
+ yield Table(vert_df)
51
+ had_last_chunk = last_chunk
54
52
  vert_df = None
55
53
 
56
54
  # helper function for "windowed join". while iterating the optical hits, we have to
@@ -59,8 +57,8 @@ def build_optmap_evt(
59
57
  # This function follows the assumption, that the output event ids are at least "somewhat"
60
58
  # monotonic, i.e. later chunks do not contain lower evtids than the previous chunk(s).
61
59
  # Going back is not implemented.
62
- def _ensure_vert_df(vert_it: LH5Iterator, evtid: int) -> None:
63
- nonlocal vert_df, vert_df_bounds, vert_it_count, hits_expected
60
+ def _ensure_vert_df(vert_it: LH5Iterator, evtid: int) -> Generator[Table, None, None]:
61
+ nonlocal vert_df, vert_df_bounds, hits_expected
64
62
 
65
63
  # skipping multiple chunks is possible in sparsely populated simulations.
66
64
  while vert_df_bounds is None or evtid > vert_df_bounds[1] or evtid < vert_df_bounds[0]:
@@ -74,9 +72,8 @@ def build_optmap_evt(
74
72
  # here, evtid > vert_df_bounds[1] (or vert_df_bounds is still None). We need to fetch
75
73
  # the next event table chunk.
76
74
 
77
- vert_it_count += 1
78
75
  # we might have filled a dataframe, save it to disk.
79
- _store_vert_df()
76
+ yield from _store_vert_df(last_chunk=False)
80
77
 
81
78
  # read the next vertex chunk into memory.
82
79
  vert_df = next(vert_it).view_as("pd")
@@ -98,11 +95,28 @@ def build_optmap_evt(
98
95
  log.info("build evt table (%d)", opti_it_count)
99
96
 
100
97
  for t in opti_df[["evtid", "det_uid"]].itertuples(name=None, index=False):
101
- _ensure_vert_df(vert_it, t[0])
98
+ yield from _ensure_vert_df(vert_it, t[0])
102
99
  vert_df.loc[t[0], str(t[1])] += 1
103
100
  hits_expected += 1
104
101
 
105
- _store_vert_df() # store the last chunk.
102
+ yield from _store_vert_df(last_chunk=True) # store the last chunk.
103
+
104
+ assert had_last_chunk, "did not reach last chunk in optmap-evt building"
105
+
106
+
107
+ def build_optmap_evt(
108
+ lh5_in_file: str, lh5_out_file: str, detectors: Iterable[str | int], buffer_len: int = int(5e6)
109
+ ) -> None:
110
+ """Create a faster map for lookup of the hits in each detector, for each primary event."""
111
+ lh5_out_file = Path(lh5_out_file)
112
+ lh5_out_file_tmp = lh5_out_file.with_stem(".evt-tmp." + lh5_out_file.stem)
113
+ if lh5_out_file_tmp.exists():
114
+ msg = f"temporary output file {lh5_out_file_tmp} already exists"
115
+ raise RuntimeError(msg)
116
+
117
+ for vert_it_count, chunk in enumerate(generate_optmap_evt(lh5_in_file, detectors, buffer_len)):
118
+ log.info("store evt file %s (%d)", lh5_out_file_tmp, vert_it_count - 1)
119
+ lh5.write(Table(chunk), name=EVT_TABLE_NAME, lh5_file=lh5_out_file_tmp, wo_mode="append")
106
120
 
107
121
  # after finishing the output file, rename to the actual output file name.
108
122
  if lh5_out_file.exists():
@@ -111,5 +125,14 @@ def build_optmap_evt(
111
125
  lh5_out_file_tmp.rename(lh5_out_file)
112
126
 
113
127
 
128
+ def get_optical_detectors_from_geom(geom_fn) -> list[int]:
129
+ import pyg4ometry
130
+ import pygeomtools
131
+
132
+ geom_registry = pyg4ometry.gdml.Reader(geom_fn).getRegistry()
133
+ detectors = pygeomtools.get_all_sensvols(geom_registry)
134
+ return [d.uid for d in detectors.values() if d.detector_type == "optical"]
135
+
136
+
114
137
  def read_optmap_evt(lh5_file: str, buffer_len: int = int(5e6)) -> LH5Iterator:
115
138
  return LH5Iterator(lh5_file, EVT_TABLE_NAME, buffer_len=buffer_len)
reboost/shape/group.py CHANGED
@@ -4,11 +4,49 @@ import logging
4
4
 
5
5
  import awkward as ak
6
6
  import numpy as np
7
+ from dbetto import AttrsDict
7
8
  from lgdo import Table, VectorOfVectors
9
+ from numpy.typing import ArrayLike
8
10
 
9
11
  log = logging.getLogger(__name__)
10
12
 
11
13
 
14
+ def isin(channels: ak.Array, chan_list: list):
15
+ """Check if each element of the awkward array channels is in the channel list."""
16
+ num_channels = ak.num(channels, axis=-1)
17
+ channels_flat = ak.flatten(channels)
18
+ isin = np.isin(channels_flat, chan_list)
19
+
20
+ # unflatten
21
+ return ak.unflatten(isin, num_channels)
22
+
23
+
24
+ def get_isin_group(
25
+ channels: ArrayLike, groups: AttrsDict, tcm_tables: dict, group: str = "off"
26
+ ) -> ak.Array:
27
+ """For each channel check if it is in the group.
28
+
29
+ Parameters
30
+ ----------
31
+ channels
32
+ Array of the channel indices.
33
+ groups
34
+ A mapping of the group for every channel name.
35
+ tcm_tables
36
+ the mapping of indices to table names
37
+ group
38
+ the group to select.
39
+
40
+ Returns
41
+ -------
42
+ an awkward array of the same shape of channels of booleans.
43
+ """
44
+ usability = {uid: groups[name] for name, uid in tcm_tables.items()}
45
+ group_idx = [key for key, item in usability.items() if item == group]
46
+
47
+ return isin(channels, group_idx)
48
+
49
+
12
50
  def _sort_data(obj: ak.Array, *, time_name: str = "time", evtid_name: str = "evtid") -> ak.Array:
13
51
  """Sort the data by evtid then time.
14
52
 
reboost/utils.py CHANGED
@@ -18,6 +18,16 @@ from .profile import ProfileDict
18
18
  log = logging.getLogger(__name__)
19
19
 
20
20
 
21
+ def get_table_names(tcm: VectorOfVectors) -> dict:
22
+ """Extract table names from tcm.attrs['tables'] and return them as a dictionary."""
23
+ raw = tcm.attrs["tables"]
24
+ cleaned = raw.strip("[]").replace(" ", "").replace("'", "")
25
+ tables = cleaned.split(",")
26
+ tables = [tab.split("/")[-1] for tab in tables]
27
+
28
+ return {name: idx for idx, name in enumerate(tables)}
29
+
30
+
21
31
  def get_wo_mode(
22
32
  group: int, out_det: int, in_det: int, chunk: int, new_hit_file: bool, overwrite: bool = False
23
33
  ) -> str:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reboost
3
- Version: 0.5.5
3
+ Version: 0.6.1
4
4
  Summary: New LEGEND Monte-Carlo simulation post-processing
5
5
  Author-email: Manuel Huber <info@manuelhu.de>, Toby Dixon <toby.dixon.23@ucl.ac.uk>, Luigi Pertoldi <gipert@pm.me>
6
6
  Maintainer: The LEGEND Collaboration
@@ -696,12 +696,14 @@ Classifier: Topic :: Scientific/Engineering
696
696
  Requires-Python: >=3.9
697
697
  Description-Content-Type: text/markdown
698
698
  License-File: LICENSE
699
+ Requires-Dist: hdf5plugin
699
700
  Requires-Dist: colorlog
700
701
  Requires-Dist: numpy
701
702
  Requires-Dist: scipy
702
703
  Requires-Dist: numba
703
704
  Requires-Dist: legend-pydataobj>=1.14
704
705
  Requires-Dist: legend-pygeom-optics>=0.9.2
706
+ Requires-Dist: legend-pygeom-tools>=0.0.11
705
707
  Requires-Dist: hist
706
708
  Requires-Dist: dbetto
707
709
  Requires-Dist: particle
@@ -721,8 +723,6 @@ Requires-Dist: pre-commit; extra == "test"
721
723
  Requires-Dist: pytest>=6.0; extra == "test"
722
724
  Requires-Dist: pytest-cov; extra == "test"
723
725
  Requires-Dist: legend-pygeom-hpges; extra == "test"
724
- Requires-Dist: legend-pygeom-tools; extra == "test"
725
- Requires-Dist: pyg4ometry; extra == "test"
726
726
  Requires-Dist: pylegendtestdata>=0.6; extra == "test"
727
727
  Dynamic: license-file
728
728
 
@@ -1,37 +1,37 @@
1
- reboost/__init__.py,sha256=3cYLf7XEyFRX5GK8f50gY4ecGR5O5HORITpDthOFpOg,265
2
- reboost/_version.py,sha256=tKngLgUb_iFMdwvy40ZSx3eTf2ZsHvVnKan3fsexL-g,511
3
- reboost/build_evt.py,sha256=yH0bf4bwbp4feWV3JgvSAD5RcvhOX6c9PhH8FAe3Xv4,4710
1
+ reboost/__init__.py,sha256=VZz9uo7i2jgAx8Zi15SptLZnE_qcnGuNWwqkD3rYHFA,278
2
+ reboost/_version.py,sha256=a3_WODLDfpmAw3pMw7qGqmRuXHTCC3STyQd2R1iEOgA,511
3
+ reboost/build_evt.py,sha256=VXIfK_pfe_Cgym6gI8dESwONZi-v_4fll0Pn09vePQY,3767
4
4
  reboost/build_glm.py,sha256=IerSLQfe51ZO7CQP2kmfPnOIVaDtcfw3byOM02Vaz6o,9472
5
5
  reboost/build_hit.py,sha256=23JL5B7qThdHZqAK_HWoytqcEOWDhGsk4n5UMtojJ1c,15513
6
6
  reboost/cli.py,sha256=HZgqUZK0tSmnlGqoXjrbmLitW_i001TzibxvDrRxLLg,6324
7
- reboost/core.py,sha256=pUco_IaTKf50PTVrtyFwoYveJVS58mqs9P3TUrtEyjs,12827
7
+ reboost/core.py,sha256=WGbWe2rcfMDEaehVyw7peqAHoTFWoCu5J6CdWHC5aWA,14974
8
8
  reboost/iterator.py,sha256=fATFDxu2PUc0e48OdJJujZo2kwykfRLH1oBtcB-s5pM,6905
9
9
  reboost/log_utils.py,sha256=VqS_9OC5NeNU3jcowVOBB0NJ6ssYvNWnirEY-JVduEA,766
10
10
  reboost/profile.py,sha256=EOTmjmS8Rm_nYgBWNh6Rntl2XDsxdyed7yEdWtsZEeg,2598
11
11
  reboost/units.py,sha256=3EH8XlpbsObdu5vLgxhm1600L6UNYD5jng4SjJT_1QE,2202
12
- reboost/utils.py,sha256=FXHX7n4YnaYK-MSUDfe9ssEewHjcajkjDb9zeqCzopU,10560
12
+ reboost/utils.py,sha256=WT2jlyRT3LEMBEBRJtYIat3KIIZKocv4rHlTViBbhYM,10938
13
13
  reboost/hpge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  reboost/hpge/psd.py,sha256=jAUAoQ_PMz76wyA1NXYHNKtOwoCnRT3My8_LCFrKi-U,13860
15
15
  reboost/hpge/surface.py,sha256=lbWcFnFFWKxtFKs755GyM9US_IfyxaoM6MpOIZgIMM0,7478
16
16
  reboost/hpge/utils.py,sha256=0Rx4HubCOm8JMECjWcAJXfAch9OkSlRpUkdsSlzwZ2E,2830
17
17
  reboost/math/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  reboost/math/functions.py,sha256=OymiYTcA0NXxxm-MBDw5kqyNwHoLCmuv4J48AwnSrbU,5633
19
- reboost/math/stats.py,sha256=iiOEi87x93kqPWeSmlRiA5Oe-R8XR-plm6Z532PhC9M,1401
19
+ reboost/math/stats.py,sha256=cG-6mQx33Dzpv3ABkHLEwC104WJ_PMgbWtmjg37SBj4,3164
20
20
  reboost/optmap/__init__.py,sha256=imvuyld-GLw8qdwqW-lXCg2feptcTyQo3wIzPvDHwmY,93
21
- reboost/optmap/cli.py,sha256=SzbPRgsbR5Llm3aSJubH02Ym8FQyTH7kvuLjK7faLiY,9572
22
- reboost/optmap/convolve.py,sha256=_volpLmhW5mOPA0KkzXRyHyqkj4_zDSnvfHv1Dtuxm8,14390
23
- reboost/optmap/create.py,sha256=B-MWurmnzl4Y62N2Pj7IwM1IaEEt2fydpZa_t0gmsxo,17048
24
- reboost/optmap/evt.py,sha256=UYESkMAwDbE_ap4Jb-a2n0uWxHRnYmHzQiXh0vexaPQ,4513
21
+ reboost/optmap/cli.py,sha256=N8J2Hd8m_csYU9CtpAp7Rc3LHy6eNzZ26gWZgHCiUso,10250
22
+ reboost/optmap/convolve.py,sha256=x7boLDcBJIsontoB0yemvzHSE2hlRpUomlDXc3jqdr4,14668
23
+ reboost/optmap/create.py,sha256=_6GZbdRvmjDFs6DDbWC-THZxaNPUiLAOIDNaigMKJSQ,18139
24
+ reboost/optmap/evt.py,sha256=UrjjNNeS7Uie4Ah9y_f5PyroFutLGo5aOFcwReOEy7o,5556
25
25
  reboost/optmap/mapview.py,sha256=73kpe0_SKDj9bIhEx1ybX1sBP8TyvufiLfps84A_ijA,6798
26
26
  reboost/optmap/numba_pdg.py,sha256=y8cXR5PWE2Liprp4ou7vl9do76dl84vXU52ZJD9_I7A,731
27
27
  reboost/optmap/optmap.py,sha256=j4rfbQ84PYSpE-BvP4Rdt96ZjPdwy8P4e4eZz1mATys,12817
28
28
  reboost/shape/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  reboost/shape/cluster.py,sha256=RIvBlhHzp88aaUZGofp5SD9bimnoiqIOddhQ84jiwoM,8135
30
- reboost/shape/group.py,sha256=_z2qCOret3E-kj-nrp1-J5j2lEwQpgfYdQp2pgpDHR8,4449
30
+ reboost/shape/group.py,sha256=gOCYgir2gZqmW1JXtbNRPlQqP0gmUcbe7RVb9CbY1pU,5540
31
31
  reboost/shape/reduction.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- reboost-0.5.5.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
33
- reboost-0.5.5.dist-info/METADATA,sha256=BgZAUrcCJTX0ZHssvfS3w-uFe7Zg9Gt5vJUdMNOp8pE,44248
34
- reboost-0.5.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
- reboost-0.5.5.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
36
- reboost-0.5.5.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
37
- reboost-0.5.5.dist-info/RECORD,,
32
+ reboost-0.6.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
33
+ reboost-0.6.1.dist-info/METADATA,sha256=T1TyULoQ01tNoZcuF1tfWdmG2WsV53DQuKCldpPEHRw,44222
34
+ reboost-0.6.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
+ reboost-0.6.1.dist-info/entry_points.txt,sha256=DxhD6BidSWNot9BrejHJjQ7RRLmrMaBIl52T75oWTwM,93
36
+ reboost-0.6.1.dist-info/top_level.txt,sha256=q-IBsDepaY_AbzbRmQoW8EZrITXRVawVnNrB-_zyXZs,8
37
+ reboost-0.6.1.dist-info/RECORD,,