legend-dataflow-scripts 0.1.3__tar.gz → 0.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {legend_dataflow_scripts-0.1.3/src/legend_dataflow_scripts.egg-info → legend_dataflow_scripts-0.1.5}/PKG-INFO +1 -1
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5/src/legend_dataflow_scripts.egg-info}/PKG-INFO +1 -1
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/_version.py +2 -2
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/eopt.py +4 -1
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/hit/aoe.py +1 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/hit/ecal.py +36 -11
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/hit/lq.py +4 -3
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/hit/qc.py +23 -12
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/alias_table.py +5 -3
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/convert_np.py +1 -1
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/workflow/__init__.py +2 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/LICENSE +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/README.md +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/pyproject.toml +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/setup.cfg +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/SOURCES.txt +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/dependency_links.txt +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/entry_points.txt +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/not-zip-safe +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/requires.txt +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legend_dataflow_scripts.egg-info/top_level.txt +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/dplms.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/evtsel.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/nopt.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/pz.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/svm.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/dsp/svm_build.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/par/geds/hit/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/tier/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/tier/dsp.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/tier/hit.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/__init__.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/cfgtools.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/log.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/plot_dict.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/utils/pulser_removal.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/workflow/execenv.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/workflow/filedb.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/workflow/pre_compile_catalog.py +0 -0
- {legend_dataflow_scripts-0.1.3 → legend_dataflow_scripts-0.1.5}/src/legenddataflowscripts/workflow/utils.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: legend-dataflow-scripts
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.5
|
|
4
4
|
Summary: Python package for the processing scripts for LEGEND-200 data
|
|
5
5
|
Author-email: George Marshall <ggmarsh@uw.edu>, Luigi Pertoldi <gipert@pm.me>
|
|
6
6
|
Maintainer: The LEGEND Collaboration
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: legend-dataflow-scripts
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.5
|
|
4
4
|
Summary: Python package for the processing scripts for LEGEND-200 data
|
|
5
5
|
Author-email: George Marshall <ggmarsh@uw.edu>, Luigi Pertoldi <gipert@pm.me>
|
|
6
6
|
Maintainer: The LEGEND Collaboration
|
|
@@ -23,7 +23,10 @@ from pygama.pargen.dsp_optimize import (
|
|
|
23
23
|
from ....utils import build_log
|
|
24
24
|
|
|
25
25
|
warnings.filterwarnings(action="ignore", category=RuntimeWarning)
|
|
26
|
-
|
|
26
|
+
try:
|
|
27
|
+
warnings.filterwarnings(action="ignore", category=np.exceptions.RankWarning)
|
|
28
|
+
except AttributeError: # np < 2
|
|
29
|
+
warnings.filterwarnings(action="ignore", category=np.RankWarning)
|
|
27
30
|
|
|
28
31
|
|
|
29
32
|
def par_geds_dsp_eopt() -> None:
|
|
@@ -33,7 +33,10 @@ mpl.use("agg")
|
|
|
33
33
|
sto = lh5.LH5Store()
|
|
34
34
|
|
|
35
35
|
warnings.filterwarnings(action="ignore", category=RuntimeWarning)
|
|
36
|
-
|
|
36
|
+
try:
|
|
37
|
+
warnings.filterwarnings(action="ignore", category=np.exceptions.RankWarning)
|
|
38
|
+
except AttributeError: # np < 2
|
|
39
|
+
warnings.filterwarnings(action="ignore", category=np.RankWarning)
|
|
37
40
|
|
|
38
41
|
|
|
39
42
|
def plot_2614_timemap(
|
|
@@ -485,7 +488,7 @@ def par_geds_hit_ecal() -> None:
|
|
|
485
488
|
msg = "invalid tier"
|
|
486
489
|
raise ValueError(msg)
|
|
487
490
|
|
|
488
|
-
build_log(config_dict, args.log)
|
|
491
|
+
log = build_log(config_dict, args.log)
|
|
489
492
|
|
|
490
493
|
chmap = LegendMetadata(args.metadata).channelmap(
|
|
491
494
|
args.timestamp, system=args.datatype
|
|
@@ -575,14 +578,23 @@ def par_geds_hit_ecal() -> None:
|
|
|
575
578
|
):
|
|
576
579
|
e_uncal = data.query(selection_string)[energy_param].to_numpy()
|
|
577
580
|
|
|
578
|
-
|
|
579
|
-
e_uncal[
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
581
|
+
if len(e_uncal) > 0:
|
|
582
|
+
if isinstance(e_uncal[0], np.ndarray | list):
|
|
583
|
+
e_uncal = np.concatenate([arr for arr in e_uncal if len(arr) > 0])
|
|
584
|
+
hist, bins, _ = pgh.get_hist(
|
|
585
|
+
e_uncal[
|
|
586
|
+
(e_uncal > np.nanpercentile(e_uncal, 95))
|
|
587
|
+
& (e_uncal < np.nanpercentile(e_uncal, 99.9))
|
|
588
|
+
],
|
|
589
|
+
dx=1,
|
|
590
|
+
range=[
|
|
591
|
+
np.nanpercentile(e_uncal, 95),
|
|
592
|
+
np.nanpercentile(e_uncal, 99.9),
|
|
593
|
+
],
|
|
594
|
+
)
|
|
595
|
+
else:
|
|
596
|
+
msg = f"e_uncal should not be empty! energy_param: {energy_param}"
|
|
597
|
+
raise ValueError(msg)
|
|
586
598
|
|
|
587
599
|
guess = 2614.511 / bins[np.nanargmax(hist)]
|
|
588
600
|
full_object_dict[cal_energy_param] = HPGeCalibration(
|
|
@@ -644,8 +656,21 @@ def par_geds_hit_ecal() -> None:
|
|
|
644
656
|
interp_energy_kev={"Qbb": 2039.0},
|
|
645
657
|
)
|
|
646
658
|
|
|
659
|
+
energy = data[energy_param].to_numpy()
|
|
660
|
+
if isinstance(energy[0], np.ndarray | list):
|
|
661
|
+
energy = np.concatenate(energy)
|
|
662
|
+
|
|
663
|
+
if len(energy) < len(data):
|
|
664
|
+
log.warning("len(energy) and len(data) are not the same")
|
|
665
|
+
energy = np.pad(
|
|
666
|
+
energy, (0, len(data) - len(energy)), constant_values=np.nan
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
if len(data) < len(energy):
|
|
670
|
+
energy = energy[: len(data)]
|
|
671
|
+
|
|
647
672
|
data[cal_energy_param] = nb_poly(
|
|
648
|
-
|
|
673
|
+
energy, full_object_dict[cal_energy_param].pars
|
|
649
674
|
)
|
|
650
675
|
|
|
651
676
|
results_dict[cal_energy_param] = get_results_dict(
|
|
@@ -295,7 +295,7 @@ def par_geds_hit_lq() -> None:
|
|
|
295
295
|
|
|
296
296
|
data["run_timestamp"] = args.timestamp
|
|
297
297
|
|
|
298
|
-
out_dicts,
|
|
298
|
+
out_dicts, results_dicts, plot_dicts, lq_dict = run_lq_calibration(
|
|
299
299
|
data,
|
|
300
300
|
cal_dicts={args.timestamp: cal_dict},
|
|
301
301
|
results_dicts={args.timestamp: eres_dict},
|
|
@@ -305,12 +305,13 @@ def par_geds_hit_lq() -> None:
|
|
|
305
305
|
debug_mode=args.debug,
|
|
306
306
|
)
|
|
307
307
|
cal_dict = out_dicts[args.timestamp]
|
|
308
|
-
|
|
308
|
+
results_dict = results_dicts[args.timestamp]
|
|
309
309
|
plot_dict = plot_dicts[args.timestamp]
|
|
310
310
|
lq = lq_dict[args.timestamp]
|
|
311
311
|
|
|
312
312
|
else:
|
|
313
313
|
lq = None
|
|
314
|
+
results_dict = {}
|
|
314
315
|
|
|
315
316
|
if args.plot_file:
|
|
316
317
|
Path(args.plot_file).parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -320,7 +321,7 @@ def par_geds_hit_lq() -> None:
|
|
|
320
321
|
final_hit_dict = convert_dict_np_to_float(
|
|
321
322
|
{
|
|
322
323
|
"pars": {"operations": cal_dict},
|
|
323
|
-
"results": dict(**ecal_dict["results"], lq=
|
|
324
|
+
"results": dict(**ecal_dict["results"], lq=results_dict),
|
|
324
325
|
}
|
|
325
326
|
)
|
|
326
327
|
Path(args.hit_pars).parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -129,6 +129,7 @@ def build_qc(
|
|
|
129
129
|
else:
|
|
130
130
|
hit_dict_fft = {}
|
|
131
131
|
plot_dict_fft = {}
|
|
132
|
+
fft_data = None
|
|
132
133
|
|
|
133
134
|
if overwrite is not None:
|
|
134
135
|
for name in kwarg_dict_fft["cut_parameters"]:
|
|
@@ -234,7 +235,7 @@ def build_qc(
|
|
|
234
235
|
exp = info["expression"]
|
|
235
236
|
for key in info.get("parameters", None):
|
|
236
237
|
exp = re.sub(f"(?<![a-zA-Z0-9]){key}(?![a-zA-Z0-9])", f"@{key}", exp)
|
|
237
|
-
if outname not in fft_data:
|
|
238
|
+
if fft_data is not None and outname not in fft_data:
|
|
238
239
|
fft_data[outname] = fft_data.eval(
|
|
239
240
|
exp, local_dict=info.get("parameters", None)
|
|
240
241
|
)
|
|
@@ -251,24 +252,34 @@ def build_qc(
|
|
|
251
252
|
((sf_cal) * (1 - sf_cal))
|
|
252
253
|
/ len(data.query("~is_pulser & ~is_recovering"))
|
|
253
254
|
)
|
|
254
|
-
sf_fft = len(fft_data.query(f"{entry} & ~is_recovering")) / len(
|
|
255
|
-
fft_data.query("~is_recovering")
|
|
256
|
-
)
|
|
257
|
-
sf_fft_err = 100 * np.sqrt(
|
|
258
|
-
((sf_fft) * (1 - sf_fft)) / len(fft_data.query("~is_recovering"))
|
|
259
|
-
)
|
|
260
255
|
sf_cal *= 100
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
msg = f"{entry} cut applied: {sf_cal:.2f}% of events passed the cut for cal data, {sf_fft:.2f}% for fft data"
|
|
256
|
+
msg = f"{entry} cut applied: {sf_cal:.2f}% of events passed the cut for cal data"
|
|
264
257
|
log.info(msg)
|
|
258
|
+
|
|
265
259
|
qc_results[entry] = {
|
|
266
260
|
"sf_cal": sf_cal,
|
|
267
261
|
"sf_cal_err": sf_cal_err,
|
|
268
|
-
"sf_fft": sf_fft,
|
|
269
|
-
"sf_fft_err": sf_fft_err,
|
|
270
262
|
}
|
|
271
263
|
|
|
264
|
+
if fft_data is not None:
|
|
265
|
+
sf_fft = len(fft_data.query(f"{entry} & ~is_recovering")) / len(
|
|
266
|
+
fft_data.query("~is_recovering")
|
|
267
|
+
)
|
|
268
|
+
sf_fft_err = 100 * np.sqrt(
|
|
269
|
+
((sf_fft) * (1 - sf_fft)) / len(fft_data.query("~is_recovering"))
|
|
270
|
+
)
|
|
271
|
+
sf_fft *= 100
|
|
272
|
+
msg = f"{entry} cut applied: {sf_fft:.2f}% of events passed the cut for fft data"
|
|
273
|
+
|
|
274
|
+
log.info(msg)
|
|
275
|
+
|
|
276
|
+
qc_results[entry].update(
|
|
277
|
+
{
|
|
278
|
+
"sf_fft": sf_fft,
|
|
279
|
+
"sf_fft_err": sf_fft_err,
|
|
280
|
+
}
|
|
281
|
+
)
|
|
282
|
+
|
|
272
283
|
out_dict = convert_dict_np_to_float(
|
|
273
284
|
{"operations": hit_dict, "results": {"qc": qc_results}}
|
|
274
285
|
)
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
+
from pathlib import Path
|
|
4
5
|
|
|
5
6
|
import h5py
|
|
6
7
|
|
|
7
8
|
|
|
8
|
-
def alias_table(file, mapping):
|
|
9
|
+
def alias_table(file: str | Path, mapping: str):
|
|
9
10
|
"""
|
|
10
11
|
Create an alias table for the given file and mapping.
|
|
11
12
|
|
|
@@ -13,11 +14,12 @@ def alias_table(file, mapping):
|
|
|
13
14
|
file (str): Path to the input file.
|
|
14
15
|
mapping (dict): Mapping of current table name and alias table name.
|
|
15
16
|
|
|
16
|
-
Returns:
|
|
17
|
-
dict: A dictionary containing the alias table.
|
|
18
17
|
"""
|
|
19
18
|
if isinstance(mapping, str):
|
|
20
19
|
mapping = json.loads(mapping)
|
|
20
|
+
if isinstance(mapping, list):
|
|
21
|
+
for m in mapping:
|
|
22
|
+
alias_table(file, m)
|
|
21
23
|
with h5py.File(file, "a") as f:
|
|
22
24
|
for raw_id, alias in mapping.items():
|
|
23
25
|
if raw_id in f:
|
|
@@ -24,7 +24,7 @@ def convert_dict_np_to_float(dic: dict) -> dict:
|
|
|
24
24
|
convert_dict_np_to_float(value)
|
|
25
25
|
elif isinstance(value, np.float32 | np.float64):
|
|
26
26
|
dic[key] = float(value)
|
|
27
|
-
elif isinstance(dic[key], Sequence):
|
|
27
|
+
elif isinstance(dic[key], Sequence) and not isinstance(dic[key], str):
|
|
28
28
|
dic[key] = [
|
|
29
29
|
float(x) if isinstance(x, np.float32 | np.float64) else x for x in value
|
|
30
30
|
]
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from .execenv import execenv_prefix, execenv_pyexe
|
|
4
|
+
from .pre_compile_catalog import pre_compile_catalog
|
|
4
5
|
from .utils import (
|
|
5
6
|
as_ro,
|
|
6
7
|
set_last_rule_name,
|
|
@@ -13,6 +14,7 @@ __all__ = [
|
|
|
13
14
|
"as_ro",
|
|
14
15
|
"execenv_prefix",
|
|
15
16
|
"execenv_pyexe",
|
|
17
|
+
"pre_compile_catalog",
|
|
16
18
|
"set_last_rule_name",
|
|
17
19
|
"subst_vars",
|
|
18
20
|
"subst_vars_impl",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|