lstosa 0.10.18__py3-none-any.whl → 0.10.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/METADATA +2 -2
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/RECORD +41 -40
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/WHEEL +1 -1
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/entry_points.txt +1 -0
- osa/_version.py +9 -4
- osa/configs/options.py +2 -0
- osa/configs/sequencer.cfg +12 -4
- osa/conftest.py +127 -3
- osa/high_level/significance.py +3 -3
- osa/high_level/tests/test_significance.py +3 -0
- osa/job.py +48 -25
- osa/nightsummary/extract.py +11 -2
- osa/nightsummary/tests/test_extract.py +3 -0
- osa/paths.py +102 -23
- osa/provenance/capture.py +1 -1
- osa/provenance/config/definition.yaml +7 -0
- osa/provenance/utils.py +22 -7
- osa/scripts/autocloser.py +0 -10
- osa/scripts/calibration_pipeline.py +4 -0
- osa/scripts/closer.py +132 -53
- osa/scripts/copy_datacheck.py +5 -3
- osa/scripts/datasequence.py +45 -71
- osa/scripts/provprocess.py +16 -7
- osa/scripts/sequencer.py +34 -26
- osa/scripts/sequencer_catB_tailcuts.py +223 -0
- osa/scripts/sequencer_webmaker.py +4 -0
- osa/scripts/simulate_processing.py +4 -7
- osa/scripts/tests/test_osa_scripts.py +64 -20
- osa/scripts/update_source_catalog.py +5 -2
- osa/tests/test_jobs.py +28 -11
- osa/tests/test_paths.py +6 -6
- osa/utils/cliopts.py +37 -32
- osa/utils/register.py +18 -13
- osa/utils/tests/test_utils.py +14 -0
- osa/utils/utils.py +173 -56
- osa/workflow/dl3.py +1 -2
- osa/workflow/stages.py +16 -11
- osa/workflow/tests/test_dl3.py +2 -1
- osa/workflow/tests/test_stages.py +7 -4
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/LICENSE +0 -0
- {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/top_level.txt +0 -0
|
@@ -30,7 +30,6 @@ ALL_SCRIPTS = [
|
|
|
30
30
|
options.date = datetime.datetime.fromisoformat("2020-01-17")
|
|
31
31
|
options.tel_id = "LST1"
|
|
32
32
|
options.prod_id = "v0.1.0"
|
|
33
|
-
options.dl1_prod_id = "tailcut84"
|
|
34
33
|
options.directory = "test_osa/test_files0/running_analysis/20200117/v0.1.0/"
|
|
35
34
|
|
|
36
35
|
|
|
@@ -65,7 +64,11 @@ def test_simulate_processing(
|
|
|
65
64
|
run_summary_file,
|
|
66
65
|
r0_data,
|
|
67
66
|
merged_run_summary,
|
|
68
|
-
drive_log
|
|
67
|
+
drive_log,
|
|
68
|
+
dl1b_config_files,
|
|
69
|
+
tailcuts_log_files,
|
|
70
|
+
rf_models,
|
|
71
|
+
tailcuts_finder_dir,
|
|
69
72
|
):
|
|
70
73
|
|
|
71
74
|
for file in drs4_time_calibration_files:
|
|
@@ -80,13 +83,16 @@ def test_simulate_processing(
|
|
|
80
83
|
assert run_summary_file.exists()
|
|
81
84
|
assert merged_run_summary.exists()
|
|
82
85
|
assert drive_log.exists()
|
|
86
|
+
assert rf_models[1].exists()
|
|
87
|
+
assert dl1b_config_files[0].exists()
|
|
88
|
+
assert tailcuts_log_files[0].exists()
|
|
83
89
|
|
|
84
90
|
remove_provlog()
|
|
85
|
-
rc = run_program("simulate_processing", "-p", "--force")
|
|
91
|
+
rc = run_program("simulate_processing", "-p", "--force", "-d", "2020-01-17", "LST1")
|
|
86
92
|
assert rc.returncode == 0
|
|
87
93
|
|
|
88
94
|
prov_dl1_path = Path("./test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84/log")
|
|
89
|
-
prov_dl2_path = Path("./test_osa/test_files0/DL2/20200117/v0.1.0/
|
|
95
|
+
prov_dl2_path = Path("./test_osa/test_files0/DL2/20200117/v0.1.0/tailcut84/nsb_tuning_0.14/log")
|
|
90
96
|
prov_file_dl1 = prov_dl1_path / "calibration_to_dl1_01807_prov.log"
|
|
91
97
|
prov_file_dl2 = prov_dl2_path / "calibration_to_dl2_01807_prov.log"
|
|
92
98
|
json_file_dl1 = prov_dl1_path / "calibration_to_dl1_01807_prov.json"
|
|
@@ -101,23 +107,23 @@ def test_simulate_processing(
|
|
|
101
107
|
|
|
102
108
|
with open(json_file_dl1) as file:
|
|
103
109
|
dl1 = yaml.safe_load(file)
|
|
104
|
-
assert len(dl1["entity"]) ==
|
|
110
|
+
assert len(dl1["entity"]) == 42
|
|
105
111
|
assert len(dl1["activity"]) == 5
|
|
106
112
|
assert len(dl1["used"]) == 15
|
|
107
113
|
assert len(dl1["wasGeneratedBy"]) == 10
|
|
108
114
|
|
|
109
115
|
with open(json_file_dl2) as file:
|
|
110
116
|
dl2 = yaml.safe_load(file)
|
|
111
|
-
assert len(dl2["entity"]) ==
|
|
112
|
-
assert len(dl2["activity"]) ==
|
|
113
|
-
assert len(dl2["used"]) ==
|
|
114
|
-
assert len(dl2["wasGeneratedBy"]) ==
|
|
117
|
+
assert len(dl2["entity"]) == 42
|
|
118
|
+
assert len(dl2["activity"]) == 5
|
|
119
|
+
assert len(dl2["used"]) == 15
|
|
120
|
+
assert len(dl2["wasGeneratedBy"]) == 10
|
|
115
121
|
|
|
116
|
-
rc = run_program("simulate_processing", "-p")
|
|
122
|
+
rc = run_program("simulate_processing", "-p", "-d", "2020-01-17", "LST1")
|
|
117
123
|
assert rc.returncode == 0
|
|
118
124
|
|
|
119
125
|
remove_provlog()
|
|
120
|
-
rc = run_program("simulate_processing", "-p")
|
|
126
|
+
rc = run_program("simulate_processing", "-p", "-d", "2020-01-17", "LST1")
|
|
121
127
|
assert rc.returncode == 0
|
|
122
128
|
|
|
123
129
|
|
|
@@ -129,6 +135,10 @@ def test_simulated_sequencer(
|
|
|
129
135
|
r0_data,
|
|
130
136
|
merged_run_summary,
|
|
131
137
|
gain_selection_flag_file,
|
|
138
|
+
dl1b_config_files,
|
|
139
|
+
tailcuts_log_files,
|
|
140
|
+
rf_models,
|
|
141
|
+
dl2_merged,
|
|
132
142
|
):
|
|
133
143
|
assert run_summary_file.exists()
|
|
134
144
|
assert run_catalog.exists()
|
|
@@ -143,7 +153,10 @@ def test_simulated_sequencer(
|
|
|
143
153
|
for file in systematic_correction_files:
|
|
144
154
|
assert file.exists()
|
|
145
155
|
|
|
146
|
-
|
|
156
|
+
for file in dl2_merged:
|
|
157
|
+
assert file.exists()
|
|
158
|
+
|
|
159
|
+
rc = run_program("sequencer", "-d", "2020-01-17", "--no-gainsel", "-s", "-t", "LST1")
|
|
147
160
|
|
|
148
161
|
assert rc.returncode == 0
|
|
149
162
|
now = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M")
|
|
@@ -152,8 +165,8 @@ def test_simulated_sequencer(
|
|
|
152
165
|
=================================== Starting sequencer.py at {now} UTC for LST, Telescope: LST1, Date: 2020-01-17 ===================================
|
|
153
166
|
Tel Seq Parent Type Run Subruns Source Action Tries JobID State CPU_time Exit DL1% MUONS% DL1AB% DATACHECK% DL2%
|
|
154
167
|
LST1 1 None PEDCALIB 1809 5 None None None None None None None None None None None None
|
|
155
|
-
LST1 2 1 DATA 1807 11 Crab None None None None None None 0 0 0 0
|
|
156
|
-
LST1 3 1 DATA 1808 9 MadeUpSource None None None None None None 0 0 0 0
|
|
168
|
+
LST1 2 1 DATA 1807 11 Crab None None None None None None 0 0 0 0 100
|
|
169
|
+
LST1 3 1 DATA 1808 9 MadeUpSource None None None None None None 0 0 0 0 100
|
|
157
170
|
""" # noqa: E501
|
|
158
171
|
)
|
|
159
172
|
|
|
@@ -186,6 +199,9 @@ def test_closer(
|
|
|
186
199
|
longterm_dir,
|
|
187
200
|
longterm_link_latest_dir,
|
|
188
201
|
daily_datacheck_dl1_files,
|
|
202
|
+
dl1b_config_files,
|
|
203
|
+
tailcuts_log_files,
|
|
204
|
+
rf_models,
|
|
189
205
|
):
|
|
190
206
|
# First assure that the end of night flag is not set and remove it otherwise
|
|
191
207
|
night_finished_flag = Path(
|
|
@@ -209,6 +225,7 @@ def test_closer(
|
|
|
209
225
|
assert longterm_link_latest_dir.exists()
|
|
210
226
|
for check_file in daily_datacheck_dl1_files:
|
|
211
227
|
assert check_file.exists()
|
|
228
|
+
assert rf_models[2].exists()
|
|
212
229
|
|
|
213
230
|
run_program("closer", "-y", "-v", "-t", "-d", "2020-01-17", "LST1")
|
|
214
231
|
closed_seq_file = running_analysis_dir / "sequence_LST1_01809.closed"
|
|
@@ -227,9 +244,6 @@ def test_closer(
|
|
|
227
244
|
"./test_osa/test_files0/DL1/20200117/v0.1.0/tailcut84/datacheck/"
|
|
228
245
|
"datacheck_dl1_LST-1.Run01808.0011.h5"
|
|
229
246
|
)
|
|
230
|
-
assert os.path.exists(
|
|
231
|
-
"./test_osa/test_files0/DL2/20200117/v0.1.0/model2/dl2_LST-1.Run01808.0011.h5"
|
|
232
|
-
)
|
|
233
247
|
# Assert that the link to dl1 and muons files have been created
|
|
234
248
|
assert os.path.islink(
|
|
235
249
|
"./test_osa/test_files0/running_analysis/20200117/v0.1.0/muons_LST-1.Run01808.0011.fits"
|
|
@@ -242,7 +256,16 @@ def test_closer(
|
|
|
242
256
|
assert closed_seq_file.exists()
|
|
243
257
|
|
|
244
258
|
|
|
245
|
-
def test_datasequence(
|
|
259
|
+
def test_datasequence(
|
|
260
|
+
running_analysis_dir,
|
|
261
|
+
run_catalog,
|
|
262
|
+
run_catalog_dir,
|
|
263
|
+
rf_models_base_dir,
|
|
264
|
+
rf_models,
|
|
265
|
+
catB_closed_file,
|
|
266
|
+
dl1b_config_files,
|
|
267
|
+
tailcuts_log_files,
|
|
268
|
+
):
|
|
246
269
|
drs4_file = "drs4_pedestal.Run00001.0000.fits"
|
|
247
270
|
calib_file = "calibration.Run00002.0000.hdf5"
|
|
248
271
|
timecalib_file = "time_calibration.Run00002.0000.hdf5"
|
|
@@ -250,9 +273,16 @@ def test_datasequence(running_analysis_dir):
|
|
|
250
273
|
drive_file = "DrivePosition_20200117.txt"
|
|
251
274
|
runsummary_file = "RunSummary_20200117.ecsv"
|
|
252
275
|
prod_id = "v0.1.0"
|
|
253
|
-
run_number = "
|
|
276
|
+
run_number = "01807.0000"
|
|
254
277
|
options.directory = running_analysis_dir
|
|
255
278
|
|
|
279
|
+
assert run_catalog_dir.exists()
|
|
280
|
+
assert run_catalog.exists()
|
|
281
|
+
assert rf_models_base_dir.exists()
|
|
282
|
+
assert rf_models[1].exists()
|
|
283
|
+
assert catB_closed_file.exists()
|
|
284
|
+
assert dl1b_config_files[0].exists()
|
|
285
|
+
|
|
256
286
|
output = run_program(
|
|
257
287
|
"datasequence",
|
|
258
288
|
"--date=2020-01-17",
|
|
@@ -264,6 +294,8 @@ def test_datasequence(running_analysis_dir):
|
|
|
264
294
|
f"--systematic-correction-file={systematic_correction_file}",
|
|
265
295
|
f"--drive-file={drive_file}",
|
|
266
296
|
f"--run-summary={runsummary_file}",
|
|
297
|
+
f"--dl1b-config={dl1b_config_files[0]}",
|
|
298
|
+
"--dl1-prod-id=tailcut84",
|
|
267
299
|
run_number,
|
|
268
300
|
"LST1",
|
|
269
301
|
)
|
|
@@ -288,7 +320,15 @@ def test_calibration_pipeline(running_analysis_dir):
|
|
|
288
320
|
assert output.returncode == 0
|
|
289
321
|
|
|
290
322
|
|
|
291
|
-
def test_is_sequencer_successful(
|
|
323
|
+
def test_is_sequencer_successful(
|
|
324
|
+
run_summary,
|
|
325
|
+
running_analysis_dir,
|
|
326
|
+
dl1b_config_files,
|
|
327
|
+
tailcuts_log_files,
|
|
328
|
+
rf_models,
|
|
329
|
+
merged_run_summary,
|
|
330
|
+
):
|
|
331
|
+
assert merged_run_summary.exists()
|
|
292
332
|
options.directory = running_analysis_dir
|
|
293
333
|
options.test = True
|
|
294
334
|
seq_tuple = is_finished_check(run_summary)
|
|
@@ -300,12 +340,14 @@ def test_drs4_pedestal_cmd(base_test_dir):
|
|
|
300
340
|
from osa.scripts.calibration_pipeline import drs4_pedestal_command
|
|
301
341
|
|
|
302
342
|
cmd = drs4_pedestal_command(drs4_pedestal_run_id="01804")
|
|
343
|
+
r0_dir = base_test_dir / "R0"
|
|
303
344
|
expected_command = [
|
|
304
345
|
cfg.get("lstchain", "drs4_baseline"),
|
|
305
346
|
"-r",
|
|
306
347
|
"01804",
|
|
307
348
|
"-b",
|
|
308
349
|
base_test_dir,
|
|
350
|
+
f"--r0-dir={r0_dir}",
|
|
309
351
|
"--no-progress",
|
|
310
352
|
]
|
|
311
353
|
assert cmd == expected_command
|
|
@@ -315,6 +357,7 @@ def test_calibration_file_cmd(base_test_dir):
|
|
|
315
357
|
from osa.scripts.calibration_pipeline import calibration_file_command
|
|
316
358
|
|
|
317
359
|
cmd = calibration_file_command(drs4_pedestal_run_id="01804", pedcal_run_id="01809")
|
|
360
|
+
r0_dir = base_test_dir / "R0"
|
|
318
361
|
expected_command = [
|
|
319
362
|
cfg.get("lstchain", "charge_calibration"),
|
|
320
363
|
"-p",
|
|
@@ -323,6 +366,7 @@ def test_calibration_file_cmd(base_test_dir):
|
|
|
323
366
|
"01809",
|
|
324
367
|
"-b",
|
|
325
368
|
base_test_dir,
|
|
369
|
+
f"--r0-dir={r0_dir}",
|
|
326
370
|
]
|
|
327
371
|
assert cmd == expected_command
|
|
328
372
|
|
|
@@ -12,7 +12,7 @@ from astropy.time import Time
|
|
|
12
12
|
from lstchain.io.io import dl1_params_lstcam_key
|
|
13
13
|
from lstchain.reco.utils import add_delta_t_key, get_effective_time
|
|
14
14
|
|
|
15
|
-
from osa.paths import get_major_version
|
|
15
|
+
from osa.paths import get_major_version, get_dl1_prod_id
|
|
16
16
|
from osa.utils.utils import get_lstchain_version
|
|
17
17
|
|
|
18
18
|
pd.set_option("display.float_format", "{:.1f}".format)
|
|
@@ -24,6 +24,7 @@ log = logging.getLogger(__name__)
|
|
|
24
24
|
BASE_DL1 = Path("/fefs/aswg/data/real/DL1")
|
|
25
25
|
BASE_MONITORING = Path("/fefs/aswg/data/real/monitoring")
|
|
26
26
|
CATALOG_DIR = Path("/fefs/aswg/data/real/OSA/Catalog")
|
|
27
|
+
TAILCUTS_DIR = Path("/fefs/aswg/data/real/auxiliary/TailCuts")
|
|
27
28
|
|
|
28
29
|
|
|
29
30
|
def add_table_to_html(html_table):
|
|
@@ -125,7 +126,9 @@ def add_start_and_elapsed(table: Table, datedir: str, version: str) -> None:
|
|
|
125
126
|
|
|
126
127
|
for run in table["run_id"]:
|
|
127
128
|
major_version = get_major_version(version)
|
|
128
|
-
|
|
129
|
+
dl1b_config_file = TAILCUTS_DIR / f"dl1ab_Run{run:05d}.json"
|
|
130
|
+
dl1_prod_id = get_dl1_prod_id(dl1b_config_file)
|
|
131
|
+
file = BASE_DL1 / datedir / major_version / dl1_prod_id / f"dl1_LST-1.Run{run:05d}.h5"
|
|
129
132
|
df = pd.read_hdf(file, key=dl1_params_lstcam_key)
|
|
130
133
|
|
|
131
134
|
# Timestamp of the first event
|
osa/tests/test_jobs.py
CHANGED
|
@@ -9,18 +9,21 @@ from osa.configs.config import cfg, DEFAULT_CFG
|
|
|
9
9
|
|
|
10
10
|
extra_files = Path(os.getenv("OSA_TEST_DATA", "extra"))
|
|
11
11
|
datasequence_history_file = extra_files / "history_files/sequence_LST1_04185.0010.history"
|
|
12
|
+
datasequence_history_file2 = extra_files / "history_files/sequence_LST1_04185.0001.history"
|
|
12
13
|
calibration_history_file = extra_files / "history_files/sequence_LST1_04183.history"
|
|
13
14
|
options.date = "2020-01-17"
|
|
14
15
|
options.tel_id = "LST1"
|
|
15
16
|
options.prod_id = "v0.1.0"
|
|
16
17
|
|
|
17
18
|
|
|
18
|
-
def test_historylevel(
|
|
19
|
+
def test_historylevel(
|
|
20
|
+
run_catalog,
|
|
21
|
+
dl1b_config_files,
|
|
22
|
+
tailcuts_log_files,
|
|
23
|
+
rf_models,
|
|
24
|
+
):
|
|
19
25
|
from osa.job import historylevel
|
|
20
26
|
|
|
21
|
-
options.dl1_prod_id = "tailcut84"
|
|
22
|
-
options.dl2_prod_id = "model1"
|
|
23
|
-
|
|
24
27
|
level, rc = historylevel(datasequence_history_file, "DATA")
|
|
25
28
|
assert level == 0
|
|
26
29
|
assert rc == 0
|
|
@@ -29,11 +32,8 @@ def test_historylevel():
|
|
|
29
32
|
assert level == 0
|
|
30
33
|
assert rc == 0
|
|
31
34
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
level, rc = historylevel(datasequence_history_file, "DATA")
|
|
36
|
-
assert level == 1
|
|
35
|
+
level, rc = historylevel(datasequence_history_file2, "DATA")
|
|
36
|
+
assert level == 2
|
|
37
37
|
assert rc == 0
|
|
38
38
|
|
|
39
39
|
|
|
@@ -41,6 +41,7 @@ def test_preparejobs(running_analysis_dir, sequence_list):
|
|
|
41
41
|
from osa.job import prepare_jobs
|
|
42
42
|
|
|
43
43
|
options.simulate = False
|
|
44
|
+
options.test = True
|
|
44
45
|
options.directory = running_analysis_dir
|
|
45
46
|
prepare_jobs(sequence_list)
|
|
46
47
|
expected_calib_script = os.path.join(running_analysis_dir, "sequence_LST1_01809.py")
|
|
@@ -94,6 +95,7 @@ def test_job_header_template(sequence_list, running_analysis_dir):
|
|
|
94
95
|
from osa.job import job_header_template
|
|
95
96
|
|
|
96
97
|
# Extract the first sequence
|
|
98
|
+
options.test = False
|
|
97
99
|
first_sequence = sequence_list[0]
|
|
98
100
|
header = job_header_template(first_sequence)
|
|
99
101
|
output_string1 = dedent(
|
|
@@ -138,13 +140,17 @@ def test_create_job_template_scheduler(
|
|
|
138
140
|
calibration_file,
|
|
139
141
|
run_summary_file,
|
|
140
142
|
pedestal_ids_file,
|
|
143
|
+
dl1b_config_files,
|
|
144
|
+
rf_models,
|
|
141
145
|
):
|
|
142
146
|
from osa.job import data_sequence_job_template
|
|
143
147
|
|
|
144
148
|
assert pedestal_ids_file.exists()
|
|
149
|
+
assert rf_models[1].exists()
|
|
145
150
|
|
|
146
151
|
options.test = False
|
|
147
152
|
options.simulate = False
|
|
153
|
+
|
|
148
154
|
content1 = data_sequence_job_template(sequence_list[1])
|
|
149
155
|
expected_content1 = dedent(
|
|
150
156
|
f"""\
|
|
@@ -184,6 +190,8 @@ def test_create_job_template_scheduler(
|
|
|
184
190
|
'--systematic-correction-file={Path.cwd()}/test_osa/test_files0/monitoring/PixelCalibration/Cat-A/ffactor_systematics/20200725/pro/ffactor_systematics_20200725.h5',
|
|
185
191
|
'--drive-file={Path.cwd()}/test_osa/test_files0/monitoring/DrivePositioning/DrivePosition_log_20200117.txt',
|
|
186
192
|
'--run-summary={run_summary_file}',
|
|
193
|
+
'--dl1b-config={dl1b_config_files[0]}',
|
|
194
|
+
'--dl1-prod-id=tailcut84',
|
|
187
195
|
f'01807.{{subruns:04d}}',
|
|
188
196
|
'LST1'
|
|
189
197
|
])
|
|
@@ -230,6 +238,8 @@ def test_create_job_template_scheduler(
|
|
|
230
238
|
'--systematic-correction-file={Path.cwd()}/test_osa/test_files0/monitoring/PixelCalibration/Cat-A/ffactor_systematics/20200725/pro/ffactor_systematics_20200725.h5',
|
|
231
239
|
'--drive-file={Path.cwd()}/test_osa/test_files0/monitoring/DrivePositioning/DrivePosition_log_20200117.txt',
|
|
232
240
|
'--run-summary={run_summary_file}',
|
|
241
|
+
'--dl1b-config={dl1b_config_files[1]}',
|
|
242
|
+
'--dl1-prod-id=tailcut84',
|
|
233
243
|
f'--pedestal-ids-file={Path.cwd()}/test_osa/test_files0/auxiliary/PedestalFinder/20200117/pedestal_ids_Run01808.{{subruns:04d}}.h5',
|
|
234
244
|
f'01808.{{subruns:04d}}',
|
|
235
245
|
'LST1'
|
|
@@ -252,6 +262,8 @@ def test_create_job_template_local(
|
|
|
252
262
|
run_summary_file,
|
|
253
263
|
pedestal_ids_file,
|
|
254
264
|
r0_data,
|
|
265
|
+
dl1b_config_files,
|
|
266
|
+
rf_models,
|
|
255
267
|
):
|
|
256
268
|
"""Check the job file in local mode (assuming no scheduler)."""
|
|
257
269
|
from osa.job import data_sequence_job_template
|
|
@@ -266,6 +278,7 @@ def test_create_job_template_local(
|
|
|
266
278
|
assert file.exists()
|
|
267
279
|
|
|
268
280
|
assert pedestal_ids_file.exists()
|
|
281
|
+
assert rf_models[0].exists()
|
|
269
282
|
|
|
270
283
|
options.test = True
|
|
271
284
|
options.simulate = False
|
|
@@ -296,6 +309,8 @@ def test_create_job_template_local(
|
|
|
296
309
|
'--systematic-correction-file={Path.cwd()}/test_osa/test_files0/monitoring/PixelCalibration/Cat-A/ffactor_systematics/20200725/pro/ffactor_systematics_20200725.h5',
|
|
297
310
|
'--drive-file={Path.cwd()}/test_osa/test_files0/monitoring/DrivePositioning/DrivePosition_log_20200117.txt',
|
|
298
311
|
'--run-summary={run_summary_file}',
|
|
312
|
+
'--dl1b-config={dl1b_config_files[0]}',
|
|
313
|
+
'--dl1-prod-id=tailcut84',
|
|
299
314
|
f'01807.{{subruns:04d}}',
|
|
300
315
|
'LST1'
|
|
301
316
|
])
|
|
@@ -329,6 +344,8 @@ def test_create_job_template_local(
|
|
|
329
344
|
'--systematic-correction-file={Path.cwd()}/test_osa/test_files0/monitoring/PixelCalibration/Cat-A/ffactor_systematics/20200725/pro/ffactor_systematics_20200725.h5',
|
|
330
345
|
'--drive-file={Path.cwd()}/test_osa/test_files0/monitoring/DrivePositioning/DrivePosition_log_20200117.txt',
|
|
331
346
|
'--run-summary={run_summary_file}',
|
|
347
|
+
'--dl1b-config={dl1b_config_files[1]}',
|
|
348
|
+
'--dl1-prod-id=tailcut84',
|
|
332
349
|
f'--pedestal-ids-file={Path.cwd()}/test_osa/test_files0/auxiliary/PedestalFinder/20200117/pedestal_ids_Run01808.{{subruns:04d}}.h5',
|
|
333
350
|
f'01808.{{subruns:04d}}',
|
|
334
351
|
'LST1'
|
|
@@ -337,11 +354,11 @@ def test_create_job_template_local(
|
|
|
337
354
|
sys.exit(proc.returncode)"""
|
|
338
355
|
)
|
|
339
356
|
|
|
340
|
-
options.simulate = True
|
|
341
|
-
|
|
342
357
|
assert content1 == expected_content1
|
|
343
358
|
assert content2 == expected_content2
|
|
344
359
|
|
|
360
|
+
options.simulate = True
|
|
361
|
+
|
|
345
362
|
|
|
346
363
|
def test_create_job_scheduler_calibration(sequence_list):
|
|
347
364
|
"""Check the pilot job file for the calibration pipeline."""
|
osa/tests/test_paths.py
CHANGED
|
@@ -52,8 +52,8 @@ def test_destination_dir():
|
|
|
52
52
|
from osa.paths import destination_dir
|
|
53
53
|
|
|
54
54
|
datedir = date_to_dir(options.date)
|
|
55
|
-
|
|
56
|
-
|
|
55
|
+
dl1_prod_id = "tailcut84"
|
|
56
|
+
dl2_prod_id = "tailcut84/nsb_tuning_0.14"
|
|
57
57
|
options.prod_id = cfg.get("LST1", "PROD_ID")
|
|
58
58
|
base_directory = cfg.get("LST1", "BASE")
|
|
59
59
|
base_path = Path(base_directory)
|
|
@@ -67,14 +67,14 @@ def test_destination_dir():
|
|
|
67
67
|
}
|
|
68
68
|
|
|
69
69
|
for concept, dst_dir in data_types.items():
|
|
70
|
-
directory = destination_dir(concept, create_dir=False)
|
|
70
|
+
directory = destination_dir(concept, create_dir=False, dl1_prod_id=dl1_prod_id, dl2_prod_id=dl2_prod_id)
|
|
71
71
|
if concept == "DL1AB":
|
|
72
72
|
expected_directory = (
|
|
73
|
-
base_path / dst_dir / datedir / options.prod_id /
|
|
73
|
+
base_path / dst_dir / datedir / options.prod_id / dl1_prod_id
|
|
74
74
|
)
|
|
75
75
|
elif concept == "DATACHECK":
|
|
76
76
|
expected_directory = (
|
|
77
|
-
base_path / dst_dir / datedir / options.prod_id /
|
|
77
|
+
base_path / dst_dir / datedir / options.prod_id / dl1_prod_id / "datacheck"
|
|
78
78
|
)
|
|
79
79
|
elif concept == "MUON":
|
|
80
80
|
expected_directory = (
|
|
@@ -86,7 +86,7 @@ def test_destination_dir():
|
|
|
86
86
|
)
|
|
87
87
|
elif concept == "DL2":
|
|
88
88
|
expected_directory = (
|
|
89
|
-
base_path / dst_dir / datedir / options.prod_id /
|
|
89
|
+
base_path / dst_dir / datedir / options.prod_id / dl2_prod_id
|
|
90
90
|
)
|
|
91
91
|
|
|
92
92
|
assert directory == expected_directory
|
osa/utils/cliopts.py
CHANGED
|
@@ -6,15 +6,11 @@ from argparse import ArgumentParser
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
|
|
8
8
|
from osa.configs import options
|
|
9
|
-
from osa.configs.config import cfg
|
|
10
9
|
from osa.paths import analysis_path, DEFAULT_CFG
|
|
11
10
|
from osa.utils.logging import myLogger
|
|
12
11
|
from osa.utils.utils import (
|
|
13
|
-
get_dl1_prod_id,
|
|
14
|
-
get_dl2_prod_id,
|
|
15
12
|
get_prod_id,
|
|
16
13
|
is_defined,
|
|
17
|
-
set_prod_ids,
|
|
18
14
|
YESTERDAY,
|
|
19
15
|
)
|
|
20
16
|
|
|
@@ -32,8 +28,6 @@ __all__ = [
|
|
|
32
28
|
"sequencer_webmaker_argparser",
|
|
33
29
|
"valid_date",
|
|
34
30
|
"get_prod_id",
|
|
35
|
-
"get_dl1_prod_id",
|
|
36
|
-
"get_dl2_prod_id",
|
|
37
31
|
"calibration_pipeline_cliparsing",
|
|
38
32
|
"calibration_pipeline_argparser",
|
|
39
33
|
"autocloser_cli_parser",
|
|
@@ -130,7 +124,7 @@ def closercliparsing():
|
|
|
130
124
|
# setting the default date and directory if needed
|
|
131
125
|
options.date = set_default_date_if_needed()
|
|
132
126
|
options.directory = analysis_path(options.tel_id)
|
|
133
|
-
|
|
127
|
+
options.prod_id = get_prod_id()
|
|
134
128
|
|
|
135
129
|
|
|
136
130
|
def calibration_pipeline_argparser():
|
|
@@ -184,10 +178,10 @@ def data_sequence_argparser():
|
|
|
184
178
|
help="Set the prod ID to define data directories",
|
|
185
179
|
)
|
|
186
180
|
parser.add_argument(
|
|
187
|
-
"--no-
|
|
181
|
+
"--no-dl1ab",
|
|
188
182
|
action="store_true",
|
|
189
183
|
default=False,
|
|
190
|
-
help="Do not
|
|
184
|
+
help="Do not launch the script lstchain_dl1ab (default False)",
|
|
191
185
|
)
|
|
192
186
|
parser.add_argument("--pedcal-file", type=Path, help="Path of the calibration file")
|
|
193
187
|
parser.add_argument("--drs4-pedestal-file", type=Path, help="Path of the DRS4 pedestal file")
|
|
@@ -210,6 +204,18 @@ def data_sequence_argparser():
|
|
|
210
204
|
type=Path,
|
|
211
205
|
help="Path to a file containing the ids of the interleaved pedestal events",
|
|
212
206
|
)
|
|
207
|
+
parser.add_argument(
|
|
208
|
+
"--dl1b-config",
|
|
209
|
+
type=Path,
|
|
210
|
+
default=None,
|
|
211
|
+
help="Configuration file for the production of DL1b files"
|
|
212
|
+
)
|
|
213
|
+
parser.add_argument(
|
|
214
|
+
"--dl1-prod-id",
|
|
215
|
+
type=str,
|
|
216
|
+
default=None,
|
|
217
|
+
help="Production id of the DL1b files"
|
|
218
|
+
)
|
|
213
219
|
parser.add_argument("run_number", help="Number of the run to be processed")
|
|
214
220
|
parser.add_argument("tel_id", choices=["ST", "LST1", "LST2"])
|
|
215
221
|
return parser
|
|
@@ -226,7 +232,7 @@ def data_sequence_cli_parsing():
|
|
|
226
232
|
options.verbose = opts.verbose
|
|
227
233
|
options.simulate = opts.simulate
|
|
228
234
|
options.prod_id = opts.prod_id
|
|
229
|
-
options.
|
|
235
|
+
options.no_dl1ab = opts.no_dl1ab
|
|
230
236
|
options.tel_id = opts.tel_id
|
|
231
237
|
|
|
232
238
|
log.debug(f"The options and arguments are {opts}")
|
|
@@ -234,8 +240,7 @@ def data_sequence_cli_parsing():
|
|
|
234
240
|
# setting the default date and directory if needed
|
|
235
241
|
options.date = set_default_date_if_needed()
|
|
236
242
|
options.directory = analysis_path(options.tel_id)
|
|
237
|
-
|
|
238
|
-
set_prod_ids()
|
|
243
|
+
options.prod_id = get_prod_id()
|
|
239
244
|
|
|
240
245
|
return (
|
|
241
246
|
opts.pedcal_file,
|
|
@@ -246,6 +251,8 @@ def data_sequence_cli_parsing():
|
|
|
246
251
|
opts.run_summary,
|
|
247
252
|
opts.pedestal_ids_file,
|
|
248
253
|
opts.run_number,
|
|
254
|
+
opts.dl1b_config,
|
|
255
|
+
opts.dl1_prod_id,
|
|
249
256
|
)
|
|
250
257
|
|
|
251
258
|
|
|
@@ -269,10 +276,10 @@ def sequencer_argparser():
|
|
|
269
276
|
"calibration products already produced (default False)",
|
|
270
277
|
)
|
|
271
278
|
parser.add_argument(
|
|
272
|
-
"--no-
|
|
279
|
+
"--no-dl1ab",
|
|
273
280
|
action="store_true",
|
|
274
281
|
default=False,
|
|
275
|
-
help="Do not
|
|
282
|
+
help="Do not launch the script lstchain_dl1ab (default False)",
|
|
276
283
|
)
|
|
277
284
|
parser.add_argument(
|
|
278
285
|
"--no-gainsel",
|
|
@@ -304,13 +311,13 @@ def sequencer_cli_parsing():
|
|
|
304
311
|
set_common_globals(opts)
|
|
305
312
|
options.no_submit = opts.no_submit
|
|
306
313
|
options.no_calib = opts.no_calib
|
|
307
|
-
options.
|
|
314
|
+
options.no_dl1ab = opts.no_dl1ab
|
|
308
315
|
options.no_gainsel = opts.no_gainsel
|
|
309
316
|
options.force_submit = opts.force_submit
|
|
310
317
|
|
|
311
318
|
log.debug(f"the options are {opts}")
|
|
312
319
|
|
|
313
|
-
|
|
320
|
+
options.prod_id = get_prod_id()
|
|
314
321
|
|
|
315
322
|
# setting the default date and directory if needed
|
|
316
323
|
options.date = set_default_date_if_needed()
|
|
@@ -352,7 +359,7 @@ def provprocess_argparser():
|
|
|
352
359
|
)
|
|
353
360
|
parser.add_argument("pedcal_run_id", help="Number of the used pedcal used in the calibration")
|
|
354
361
|
parser.add_argument("run", help="Number of the run whose provenance is to be extracted")
|
|
355
|
-
parser.add_argument("date", action="store", type=
|
|
362
|
+
parser.add_argument("date", action="store", type=valid_date, help="Date (YYYY-MM-DD) of the start of the night")
|
|
356
363
|
parser.add_argument("prod_id", action="store", type=str, help="Production ID")
|
|
357
364
|
|
|
358
365
|
return parser
|
|
@@ -376,7 +383,8 @@ def provprocessparsing():
|
|
|
376
383
|
options.filter = opts.filter
|
|
377
384
|
options.quit = opts.quit
|
|
378
385
|
options.no_dl2 = opts.no_dl2
|
|
379
|
-
|
|
386
|
+
options.prod_id = get_prod_id()
|
|
387
|
+
options.tel_id = "LST1"
|
|
380
388
|
|
|
381
389
|
|
|
382
390
|
def simproc_argparser():
|
|
@@ -402,15 +410,15 @@ def simproc_argparser():
|
|
|
402
410
|
dest="append",
|
|
403
411
|
help="append provenance capture to existing prov.log file",
|
|
404
412
|
)
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
413
|
+
parser.add_argument(
|
|
414
|
+
"-d",
|
|
415
|
+
"--date",
|
|
416
|
+
action="store",
|
|
417
|
+
type=valid_date,
|
|
418
|
+
dest="date",
|
|
419
|
+
help="observation ending date YYYY-MM-DD [default today]",
|
|
420
|
+
)
|
|
421
|
+
parser.add_argument("tel_id", choices=["ST", "LST1", "LST2"])
|
|
414
422
|
|
|
415
423
|
return parser
|
|
416
424
|
|
|
@@ -424,6 +432,8 @@ def simprocparsing():
|
|
|
424
432
|
options.provenance = opts.provenance
|
|
425
433
|
options.force = opts.force
|
|
426
434
|
options.append = opts.append
|
|
435
|
+
options.date = opts.date
|
|
436
|
+
options.tel_id = opts.tel_id
|
|
427
437
|
|
|
428
438
|
|
|
429
439
|
def copy_datacheck_argparser():
|
|
@@ -445,11 +455,6 @@ def copy_datacheck_parsing():
|
|
|
445
455
|
options.directory = analysis_path(options.tel_id)
|
|
446
456
|
options.prod_id = get_prod_id()
|
|
447
457
|
|
|
448
|
-
if cfg.get("LST1", "DL1_PROD_ID") is not None:
|
|
449
|
-
options.dl1_prod_id = get_dl1_prod_id()
|
|
450
|
-
else:
|
|
451
|
-
options.dl1_prod_id = options.prod_id
|
|
452
|
-
|
|
453
458
|
|
|
454
459
|
def sequencer_webmaker_argparser():
|
|
455
460
|
parser = ArgumentParser(
|