legend-dataflow-scripts 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/METADATA +1 -2
- {legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/RECORD +13 -13
- legenddataflowscripts/_version.py +2 -2
- legenddataflowscripts/par/geds/dsp/dplms.py +2 -1
- legenddataflowscripts/par/geds/dsp/eopt.py +5 -5
- legenddataflowscripts/par/geds/dsp/evtsel.py +9 -11
- legenddataflowscripts/par/geds/dsp/nopt.py +1 -1
- legenddataflowscripts/par/geds/dsp/pz.py +0 -1
- legenddataflowscripts/par/geds/hit/ecal.py +4 -4
- legenddataflowscripts/workflow/filedb.py +7 -2
- {legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/WHEEL +0 -0
- {legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/entry_points.txt +0 -0
- {legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/top_level.txt +0 -0
{legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: legend-dataflow-scripts
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.2
|
|
4
4
|
Summary: Python package for the processing scripts for LEGEND-200 data
|
|
5
5
|
Author-email: George Marshall <ggmarsh@uw.edu>, Luigi Pertoldi <gipert@pm.me>
|
|
6
6
|
Maintainer: The LEGEND Collaboration
|
|
@@ -23,7 +23,6 @@ Requires-Dist: pygama>=2.1
|
|
|
23
23
|
Requires-Dist: dspeed>=1.6
|
|
24
24
|
Requires-Dist: pylegendmeta>=1.2.5
|
|
25
25
|
Requires-Dist: legend-pydataobj>=1.11
|
|
26
|
-
Requires-Dist: legend-daq2lh5>=1.6.1
|
|
27
26
|
Requires-Dist: pip
|
|
28
27
|
Provides-Extra: test
|
|
29
28
|
Requires-Dist: legend-dataflow-scripts; extra == "test"
|
|
@@ -1,18 +1,18 @@
|
|
|
1
1
|
legenddataflowscripts/__init__.py,sha256=hlpvTxSBjOyXlZUyOyYx3VwT5LS6zNzhAZnTmfT3NjU,303
|
|
2
|
-
legenddataflowscripts/_version.py,sha256=
|
|
2
|
+
legenddataflowscripts/_version.py,sha256=o3ZTescp-19Z9cvBGq9dQnbppljgzdUYUf98Nov0spY,704
|
|
3
3
|
legenddataflowscripts/par/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
legenddataflowscripts/par/geds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
legenddataflowscripts/par/geds/dsp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
legenddataflowscripts/par/geds/dsp/dplms.py,sha256=
|
|
7
|
-
legenddataflowscripts/par/geds/dsp/eopt.py,sha256=
|
|
8
|
-
legenddataflowscripts/par/geds/dsp/evtsel.py,sha256=
|
|
9
|
-
legenddataflowscripts/par/geds/dsp/nopt.py,sha256=
|
|
10
|
-
legenddataflowscripts/par/geds/dsp/pz.py,sha256=
|
|
6
|
+
legenddataflowscripts/par/geds/dsp/dplms.py,sha256=GIMQo_qiI-ga5zsdf3oDnxjDzw5T6gqVCQob-Mf2Pvw,5376
|
|
7
|
+
legenddataflowscripts/par/geds/dsp/eopt.py,sha256=epnkSddyzI0sXwPyA8gJxoRumGubv3tQpa2J_uRusV8,13615
|
|
8
|
+
legenddataflowscripts/par/geds/dsp/evtsel.py,sha256=tyF5sCovm22w0Rdd9vD8NyzM2ym8AGO8IR9oKQFYAWA,17296
|
|
9
|
+
legenddataflowscripts/par/geds/dsp/nopt.py,sha256=l3Z5RqPM4my6xSTG78zmjQIorCwGLMP-ipTbNOauWFY,3977
|
|
10
|
+
legenddataflowscripts/par/geds/dsp/pz.py,sha256=l9nc3MmSjghmYOR3LBbbGZMnHc52MFy5F7EOnYAPqvc,8062
|
|
11
11
|
legenddataflowscripts/par/geds/dsp/svm.py,sha256=eDneRB_PQZp8Q4n2VheTX3kbu4ufZQ-jnuCCjvtwFpk,826
|
|
12
12
|
legenddataflowscripts/par/geds/dsp/svm_build.py,sha256=w5-vT6rXmpl7V9rdkfc7_g6GTzn86i41tHkIT-3f5YI,1931
|
|
13
13
|
legenddataflowscripts/par/geds/hit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
14
|
legenddataflowscripts/par/geds/hit/aoe.py,sha256=jAH0Rh3JCnV67vhv9xUZtPdGB8ADJXgG9Lo8t8YQjqs,10841
|
|
15
|
-
legenddataflowscripts/par/geds/hit/ecal.py,sha256=
|
|
15
|
+
legenddataflowscripts/par/geds/hit/ecal.py,sha256=j8Z90r9UP5Hn-cawf2lvDL8tF2Uvfp4rDjlM4sGbM-M,27009
|
|
16
16
|
legenddataflowscripts/par/geds/hit/lq.py,sha256=vXgK83RlJJ4UUjQQJWfmFGIbT0AEP3EaLALM5LhvZ6s,11133
|
|
17
17
|
legenddataflowscripts/par/geds/hit/qc.py,sha256=vr6j5sRTvjmDmErW2uUteCPsj5qfBrpn7ssvLKzov4M,12408
|
|
18
18
|
legenddataflowscripts/tier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -27,11 +27,11 @@ legenddataflowscripts/utils/plot_dict.py,sha256=6f2ZB8J1GNAGfldQjgl1gkKMDcqPo1W7
|
|
|
27
27
|
legenddataflowscripts/utils/pulser_removal.py,sha256=kuARdp1jf-lsUWcb0_KRDp-ZXzkHNrDCXUc3h7TJm7Q,424
|
|
28
28
|
legenddataflowscripts/workflow/__init__.py,sha256=JhudKYhBT8bXtX4LCqxQCHzUiITpugAtFxePWEtphC4,474
|
|
29
29
|
legenddataflowscripts/workflow/execenv.py,sha256=qTG4N9ovEPxA0QtqG0wWUIuK50BZIcYvpVlpy-XgxPw,9257
|
|
30
|
-
legenddataflowscripts/workflow/filedb.py,sha256=
|
|
30
|
+
legenddataflowscripts/workflow/filedb.py,sha256=6scz8DjdvbXs8OXOy2-6BJWVSALQzmy5cLLn-OsdXsU,3656
|
|
31
31
|
legenddataflowscripts/workflow/pre_compile_catalog.py,sha256=cEK0KXh-ClSE2Bo9MK471o79XG22bMY5r-2tIihtCfk,790
|
|
32
32
|
legenddataflowscripts/workflow/utils.py,sha256=VVCsj7wNaqV6sw2Xnk_xykhVv3BKTX4hqQtKE4UUayg,3170
|
|
33
|
-
legend_dataflow_scripts-0.2.
|
|
34
|
-
legend_dataflow_scripts-0.2.
|
|
35
|
-
legend_dataflow_scripts-0.2.
|
|
36
|
-
legend_dataflow_scripts-0.2.
|
|
37
|
-
legend_dataflow_scripts-0.2.
|
|
33
|
+
legend_dataflow_scripts-0.2.2.dist-info/METADATA,sha256=9n2aNwe2pg_qGG5v3e7ly_yQNWELnCpLRe3PC2ygsEA,3085
|
|
34
|
+
legend_dataflow_scripts-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
35
|
+
legend_dataflow_scripts-0.2.2.dist-info/entry_points.txt,sha256=B197waSm-orA_ZS-9rkxNDsmOHdCn8CbWodnlqXQKRg,1313
|
|
36
|
+
legend_dataflow_scripts-0.2.2.dist-info/top_level.txt,sha256=s8E2chjJNYUbrN6whFG_VCsJKySFp1IOXLcUefA7DB0,22
|
|
37
|
+
legend_dataflow_scripts-0.2.2.dist-info/RECORD,,
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.2.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 2,
|
|
31
|
+
__version__ = version = '0.2.2'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 2, 2)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -37,6 +37,7 @@ def par_geds_dsp_dplms() -> None:
|
|
|
37
37
|
"--config-file", help="Config file", type=str, nargs="*", required=True
|
|
38
38
|
)
|
|
39
39
|
|
|
40
|
+
argparser.add_argument("--channel", help="channel", type=str, required=True)
|
|
40
41
|
argparser.add_argument(
|
|
41
42
|
"--raw-table-name", help="raw table name", type=str, required=True
|
|
42
43
|
)
|
|
@@ -53,7 +54,7 @@ def par_geds_dsp_dplms() -> None:
|
|
|
53
54
|
t0 = time.time()
|
|
54
55
|
|
|
55
56
|
dplms_dict = Props.read_from(args.config_file)
|
|
56
|
-
db_dict = Props.read_from(args.
|
|
57
|
+
db_dict = Props.read_from(args.database)
|
|
57
58
|
|
|
58
59
|
if dplms_dict["run_dplms"] is True:
|
|
59
60
|
with Path(args.fft_raw_filelist).open() as f:
|
|
@@ -131,10 +131,10 @@ def par_geds_dsp_eopt() -> None:
|
|
|
131
131
|
full_dt = (init_data["tp_99"].nda - init_data["tp_0_est"].nda)[idx_list[-1]]
|
|
132
132
|
flat_val = np.ceil(1.1 * np.nanpercentile(full_dt, 99) / 100) / 10
|
|
133
133
|
|
|
134
|
-
if flat_val < 1
|
|
135
|
-
flat_val = 1
|
|
136
|
-
elif flat_val > 4:
|
|
137
|
-
flat_val = 4
|
|
134
|
+
if flat_val < opt_dict.get("min_flat_value", 1):
|
|
135
|
+
flat_val = opt_dict.get("min_flat_value", 1)
|
|
136
|
+
elif flat_val > opt_dict.get("max_flat_value", 4):
|
|
137
|
+
flat_val = opt_dict.get("max_flat_value", 4)
|
|
138
138
|
flat_val = f"{flat_val}*us"
|
|
139
139
|
|
|
140
140
|
db_dict["cusp"] = {"flat": flat_val}
|
|
@@ -311,7 +311,7 @@ def par_geds_dsp_eopt() -> None:
|
|
|
311
311
|
|
|
312
312
|
optimisers = [bopt_cusp, bopt_zac, bopt_trap]
|
|
313
313
|
|
|
314
|
-
out_param_dict,
|
|
314
|
+
out_param_dict, _ = run_bayesian_optimisation(
|
|
315
315
|
tb_data,
|
|
316
316
|
dsp_config,
|
|
317
317
|
[fom],
|
|
@@ -133,9 +133,6 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
133
133
|
db_dict = Props.read_from(args.decay_const)
|
|
134
134
|
|
|
135
135
|
Path(args.peak_file).parent.mkdir(parents=True, exist_ok=True)
|
|
136
|
-
rng = np.random.default_rng()
|
|
137
|
-
rand_num = f"{rng.integers(0, 99999):05d}"
|
|
138
|
-
temp_output = f"{args.peak_file}.{rand_num}"
|
|
139
136
|
if peak_dict.pop("run_selection") is True:
|
|
140
137
|
log.debug("Starting peak selection")
|
|
141
138
|
|
|
@@ -166,8 +163,10 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
166
163
|
if lh5_path[-1] != "/":
|
|
167
164
|
lh5_path += "/"
|
|
168
165
|
|
|
166
|
+
energy_field = peak_dict.get("energy_param", "daqenergy")
|
|
167
|
+
|
|
169
168
|
tb = lh5.read(
|
|
170
|
-
lh5_path, raw_files, field_mask=[
|
|
169
|
+
lh5_path, raw_files, field_mask=[energy_field, "t_sat_lo", "timestamp"]
|
|
171
170
|
)
|
|
172
171
|
|
|
173
172
|
if args.no_pulse is False:
|
|
@@ -195,14 +194,14 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
195
194
|
"operations"
|
|
196
195
|
]
|
|
197
196
|
else:
|
|
198
|
-
E_uncal = tb.
|
|
197
|
+
E_uncal = tb[energy_field].nda
|
|
199
198
|
E_uncal = E_uncal[E_uncal > 200]
|
|
200
199
|
guess_keV = 2620 / np.nanpercentile(E_uncal, 99) # usual simple guess
|
|
201
200
|
|
|
202
201
|
# daqenergy is an int so use integer binning (dx used to be bugged as output so switched to nbins)
|
|
203
202
|
|
|
204
203
|
hpge_cal = pgc.HPGeCalibration(
|
|
205
|
-
|
|
204
|
+
energy_field,
|
|
206
205
|
peaks_kev,
|
|
207
206
|
guess_keV,
|
|
208
207
|
0,
|
|
@@ -213,7 +212,7 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
213
212
|
roughpars = hpge_cal.pars
|
|
214
213
|
raw_dict = {
|
|
215
214
|
"daqenergy_cal": {
|
|
216
|
-
"expression": "
|
|
215
|
+
"expression": f"{energy_field}*a",
|
|
217
216
|
"parameters": {"a": round(float(roughpars[1]), 5)},
|
|
218
217
|
}
|
|
219
218
|
}
|
|
@@ -385,7 +384,7 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
385
384
|
lh5.write(
|
|
386
385
|
out_tbl,
|
|
387
386
|
name=lh5_path,
|
|
388
|
-
lh5_file=
|
|
387
|
+
lh5_file=args.peak_file,
|
|
389
388
|
wo_mode="a",
|
|
390
389
|
)
|
|
391
390
|
peak_dict["obj_buf"] = None
|
|
@@ -416,7 +415,7 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
416
415
|
lh5.write(
|
|
417
416
|
out_tbl,
|
|
418
417
|
name=lh5_path,
|
|
419
|
-
lh5_file=
|
|
418
|
+
lh5_file=args.peak_file,
|
|
420
419
|
wo_mode="a",
|
|
421
420
|
)
|
|
422
421
|
peak_dict["obj_buf"] = None
|
|
@@ -431,7 +430,6 @@ def par_geds_dsp_evtsel() -> None:
|
|
|
431
430
|
log.debug(msg)
|
|
432
431
|
|
|
433
432
|
else:
|
|
434
|
-
Path(
|
|
433
|
+
Path(args.peak_file).touch()
|
|
435
434
|
msg = f"event selection completed in {time.time() - t0} seconds"
|
|
436
435
|
log.debug(msg)
|
|
437
|
-
Path(temp_output).rename(args.peak_file)
|
|
@@ -52,7 +52,7 @@ def par_geds_dsp_nopt() -> None:
|
|
|
52
52
|
t0 = time.time()
|
|
53
53
|
|
|
54
54
|
opt_dict = Props.read_from(args.config_file)
|
|
55
|
-
db_dict = Props.read_from(args.
|
|
55
|
+
db_dict = Props.read_from(args.database)
|
|
56
56
|
|
|
57
57
|
if opt_dict.pop("run_nopt") is True:
|
|
58
58
|
with Path(args.raw_filelist).open() as f:
|
|
@@ -21,7 +21,6 @@ from ....utils import (
|
|
|
21
21
|
|
|
22
22
|
def par_geds_dsp_pz() -> None:
|
|
23
23
|
argparser = argparse.ArgumentParser()
|
|
24
|
-
argparser.add_argument("--configs", help="configs path", type=str, required=True)
|
|
25
24
|
argparser.add_argument("--log", help="log file", type=str)
|
|
26
25
|
argparser.add_argument(
|
|
27
26
|
"-p", "--no-pulse", help="no pulser present", action="store_true"
|
|
@@ -71,7 +71,7 @@ def plot_2614_timemap(
|
|
|
71
71
|
norm=LogNorm(),
|
|
72
72
|
)
|
|
73
73
|
|
|
74
|
-
ticks,
|
|
74
|
+
ticks, _ = plt.xticks()
|
|
75
75
|
plt.xlabel(
|
|
76
76
|
f"Time starting : {datetime.utcfromtimestamp(ticks[0]).strftime('%d/%m/%y %H:%M')}"
|
|
77
77
|
)
|
|
@@ -125,7 +125,7 @@ def plot_pulser_timemap(
|
|
|
125
125
|
norm=LogNorm(),
|
|
126
126
|
)
|
|
127
127
|
plt.ylim([mean - n_spread * spread, mean + n_spread * spread])
|
|
128
|
-
ticks,
|
|
128
|
+
ticks, _ = plt.xticks()
|
|
129
129
|
plt.xlabel(
|
|
130
130
|
f"Time starting : {datetime.utcfromtimestamp(ticks[0]).strftime('%d/%m/%y %H:%M')}"
|
|
131
131
|
)
|
|
@@ -264,7 +264,7 @@ def bin_survival_fraction(
|
|
|
264
264
|
data.query(selection_string)[cal_energy_param],
|
|
265
265
|
bins=np.arange(erange[0], erange[1] + dx, dx),
|
|
266
266
|
)
|
|
267
|
-
counts_fail,
|
|
267
|
+
counts_fail, _, _ = pgh.get_hist(
|
|
268
268
|
data.query(f"(~{cut_field})&(~{pulser_field})")[cal_energy_param],
|
|
269
269
|
bins=np.arange(erange[0], erange[1] + dx, dx),
|
|
270
270
|
)
|
|
@@ -303,7 +303,7 @@ def plot_baseline_timemap(
|
|
|
303
303
|
norm=LogNorm(),
|
|
304
304
|
)
|
|
305
305
|
|
|
306
|
-
ticks,
|
|
306
|
+
ticks, _ = plt.xticks()
|
|
307
307
|
plt.xlabel(
|
|
308
308
|
f"Time starting : {datetime.utcfromtimestamp(ticks[0]).strftime('%d/%m/%y %H:%M')}"
|
|
309
309
|
)
|
|
@@ -42,6 +42,11 @@ def build_filedb() -> None:
|
|
|
42
42
|
ignore = []
|
|
43
43
|
|
|
44
44
|
fdb = FileDB(config, scan=False)
|
|
45
|
+
try:
|
|
46
|
+
fdb.scan_files([args.scan_path])
|
|
47
|
+
except Exception as e:
|
|
48
|
+
msg = f"error when building {args.output} from {args.scan_path}"
|
|
49
|
+
raise RuntimeError(msg) from e
|
|
45
50
|
fdb.scan_files([args.scan_path])
|
|
46
51
|
fdb.scan_tables_columns(dir_files_conform=True)
|
|
47
52
|
|
|
@@ -86,7 +91,7 @@ def build_filedb() -> None:
|
|
|
86
91
|
if (
|
|
87
92
|
(loc_timestamps == default).all() or not found
|
|
88
93
|
) and row.raw_file not in ignore:
|
|
89
|
-
msg = "something went wrong! no valid first timestamp found. Likely: the file is empty"
|
|
94
|
+
msg = "something went wrong! no valid first timestamp found. Likely: the file {row.raw_file} is empty"
|
|
90
95
|
raise RuntimeError(msg)
|
|
91
96
|
|
|
92
97
|
timestamps[i] = np.min(loc_timestamps)
|
|
@@ -97,7 +102,7 @@ def build_filedb() -> None:
|
|
|
97
102
|
if (
|
|
98
103
|
timestamps[i] < 0 or timestamps[i] > 4102444800
|
|
99
104
|
) and row.raw_file not in ignore:
|
|
100
|
-
msg = f"something went wrong! timestamp {timestamps[i]} does not make sense"
|
|
105
|
+
msg = f"something went wrong! timestamp {timestamps[i]} does not make sense in {row.raw_file}"
|
|
101
106
|
raise RuntimeError(msg)
|
|
102
107
|
|
|
103
108
|
fdb.df["first_timestamp"] = timestamps
|
|
File without changes
|
{legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{legend_dataflow_scripts-0.2.1.dist-info → legend_dataflow_scripts-0.2.2.dist-info}/top_level.txt
RENAMED
|
File without changes
|