lstosa 0.10.14__py3-none-any.whl → 0.10.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/METADATA +1 -1
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/RECORD +16 -15
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/WHEEL +1 -1
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/entry_points.txt +1 -0
- osa/_version.py +2 -2
- osa/configs/sequencer.cfg +5 -1
- osa/job.py +17 -2
- osa/scripts/gain_selection.py +359 -185
- osa/scripts/gainsel_webmaker.py +157 -0
- osa/scripts/sequencer.py +47 -3
- osa/scripts/sequencer_webmaker.py +4 -4
- osa/scripts/tests/test_osa_scripts.py +27 -0
- osa/tests/test_jobs.py +9 -3
- osa/utils/cliopts.py +8 -0
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/LICENSE +0 -0
- {lstosa-0.10.14.dist-info → lstosa-0.10.15.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.15
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
osa/__init__.py,sha256=crotf1NMTfNdZuCua_5T_jk3kvZrAAwVw4FPrfxv994,193
|
|
2
|
-
osa/_version.py,sha256=
|
|
2
|
+
osa/_version.py,sha256=zLN3_5vILM2SpYp9rDAVJoNlgXr6RmLnoGoKrqnkdX8,415
|
|
3
3
|
osa/conftest.py,sha256=NBeGqTUBRqCPirDSDPny4bf1e_OJXbiePazHwaoQPY4,20072
|
|
4
|
-
osa/job.py,sha256=
|
|
4
|
+
osa/job.py,sha256=vl7kBRM8Oe5YK2onUWGtGmHWX0AEktS2sbOuS5fsbgU,27074
|
|
5
5
|
osa/osadb.py,sha256=pkCuYbEG-moHG0uQHxwB7giQAv2XTld4HJ5gdn1F1hA,2422
|
|
6
6
|
osa/paths.py,sha256=R-LwBlyoIJ-PuTJ8vcSYWMwzQY7YrgXq7gan0zhVzPY,14399
|
|
7
7
|
osa/raw.py,sha256=ZNIsuqfx5ljoz_hwhSuafdKf-wr8-cxRJmel-A2endg,1337
|
|
@@ -12,7 +12,7 @@ osa/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
12
12
|
osa/configs/config.py,sha256=cX0Vr4sorBVQ2KRqPIRSEGENKH_uC5lrgVHx1hp6YTk,1148
|
|
13
13
|
osa/configs/datamodel.py,sha256=L_WRM91PBlMrtuE30akh7YR-56P0g9D994qzKSfhNJc,1950
|
|
14
14
|
osa/configs/options.py,sha256=CyL7WnHiC_pvB3mnjRF7Wg43uPzQgmwlbvIqkRzlDLA,524
|
|
15
|
-
osa/configs/sequencer.cfg,sha256=
|
|
15
|
+
osa/configs/sequencer.cfg,sha256=ZHgY1GvIHkdQIg0GtR5iSNXb1C0giDObxL1SH77G1Io,4991
|
|
16
16
|
osa/high_level/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
17
|
osa/high_level/selection_cuts.toml,sha256=ReSmcKtOPZY5JsZ9ExnxYdz7OrJEB8gghCbzHmeOyFg,128
|
|
18
18
|
osa/high_level/significance.py,sha256=Y1jokkHCo-D_qSqxKiQzc6KJSmivznaJRS2xY-txNIo,9039
|
|
@@ -39,19 +39,20 @@ osa/scripts/calibration_pipeline.py,sha256=g9o1chqCRRSo7GNuQZRABjGnJYjZyfhkPgRLP
|
|
|
39
39
|
osa/scripts/closer.py,sha256=p11tq2YJLwsRK_CBWXU1465ArzLJ0qWlUCnubGqLPUo,18277
|
|
40
40
|
osa/scripts/copy_datacheck.py,sha256=tfDs6oTdPbii4BOXp6bTHuED0xNJeqaPFrv6Ed7ZnWc,3104
|
|
41
41
|
osa/scripts/datasequence.py,sha256=gXAp8arbLPEK-sca9VnME6-2XfUzBFIoEFchlUZYrXI,9260
|
|
42
|
-
osa/scripts/gain_selection.py,sha256=
|
|
42
|
+
osa/scripts/gain_selection.py,sha256=YIxSSkROox9Xqdgtz9yTFzhWg5F0Qx1ZEJkauaQF2Y8,22871
|
|
43
|
+
osa/scripts/gainsel_webmaker.py,sha256=40_DX7RUmImX-31iqRTFPEUvdVXXPSKp0xMEJWuAu80,5006
|
|
43
44
|
osa/scripts/provprocess.py,sha256=mufkZe6_qwH3DGqTFxINIc01hciF5RMpw3n_Mp7vwXU,18629
|
|
44
45
|
osa/scripts/reprocess_longterm.py,sha256=wMfc3UVwickkGFiviIhOlB9ebMIqQPWoUrgg8hQ78Lg,2138
|
|
45
46
|
osa/scripts/reprocessing.py,sha256=A97kqX7QtD7ewoUvFhmSaQVVIfplViGhzTtOenTx37w,3630
|
|
46
|
-
osa/scripts/sequencer.py,sha256=
|
|
47
|
-
osa/scripts/sequencer_webmaker.py,sha256=
|
|
47
|
+
osa/scripts/sequencer.py,sha256=zLG_lZ4Nh0oOhTr3TeAG3RsG-iDBOwvt68Vemn-KpGI,10572
|
|
48
|
+
osa/scripts/sequencer_webmaker.py,sha256=gdBYxB85wZIH01ZZKAnlygMiqBEChR1gTHmCGdA08Xo,4792
|
|
48
49
|
osa/scripts/show_run_summary.py,sha256=SoDLVKdQHOJkfenFguBOfXf10Gyv7heXSQAFnDVZqMs,2468
|
|
49
50
|
osa/scripts/simulate_processing.py,sha256=NiRVYiwZENt_mnKncytgJT23_-tJMb1B5PswM12nnX4,6941
|
|
50
51
|
osa/scripts/update_source_catalog.py,sha256=GHwWFc-y6S4KkUJxUVM5drdAnVDD0-n3D-Tv3CCmh4E,7218
|
|
51
52
|
osa/scripts/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
|
-
osa/scripts/tests/test_osa_scripts.py,sha256=
|
|
53
|
+
osa/scripts/tests/test_osa_scripts.py,sha256=8rOHbujP-eutTM4QOdDvKbQEPon3gSzteua1PAgKO1E,13790
|
|
53
54
|
osa/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
|
-
osa/tests/test_jobs.py,sha256=
|
|
55
|
+
osa/tests/test_jobs.py,sha256=tdgZE21jisCvtlB6oAqXbV4sM1KPMG5tD-Rmcaf5OWo,15488
|
|
55
56
|
osa/tests/test_osa.py,sha256=QCOsjUgPuNMHoef3Ym2sDXVjun2LaBrfKyroAIH-os8,415
|
|
56
57
|
osa/tests/test_osadb.py,sha256=pJHV1dxxblGH2sjS-JPDPTkMn-ew1MzbioCFyg7wbB8,1599
|
|
57
58
|
osa/tests/test_paths.py,sha256=sFCxG5uPGLcto76E7X1I26-kRx5faxgHGh9z8LvHz2M,3173
|
|
@@ -59,7 +60,7 @@ osa/tests/test_raw.py,sha256=WkgwEc_vY0D6nREo-BSm6F-5xDpqidMC0DkS86pXlRU,1058
|
|
|
59
60
|
osa/tests/test_report.py,sha256=OY-EsrXytoS6esfjUeLnIAmCMIw9EzoGD-elySafyhE,1365
|
|
60
61
|
osa/tests/test_veto.py,sha256=UIsooji_5Z8TtAhc0UlD2VqheVd9DBufuMxinJ3e0w8,1066
|
|
61
62
|
osa/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
62
|
-
osa/utils/cliopts.py,sha256=
|
|
63
|
+
osa/utils/cliopts.py,sha256=4uBf4gfUDARWzhzlobPERGn6Pv1ONWdpAXOKxh_Dtm4,14533
|
|
63
64
|
osa/utils/iofile.py,sha256=kJ7KB1suynhS2cTf7EeHwhMXq3delC_ls2HFpCzvsZo,2021
|
|
64
65
|
osa/utils/logging.py,sha256=1WcNPjjslo3y25jcEY_fe0yXOeJ6frZrGLAy1GJpu_k,1491
|
|
65
66
|
osa/utils/mail.py,sha256=uQfqPQdiOVHTvEAXr9H15a7-g9DtYVNKjMEb9GnI0oY,554
|
|
@@ -74,9 +75,9 @@ osa/workflow/dl3.py,sha256=kz7L5jcKHFJ--UdQ8HQKLzWO6nxc2LLOTz42ExcqzTk,9921
|
|
|
74
75
|
osa/workflow/stages.py,sha256=WYgUM2XDIaUjCc4_Zs_VSGW6gk73EaKcHk6ZMnPds74,6692
|
|
75
76
|
osa/workflow/tests/test_dl3.py,sha256=aY5bb-8OcZGAXG3JPCZihChzkA_GsWjRIa31BHZn3Dg,299
|
|
76
77
|
osa/workflow/tests/test_stages.py,sha256=TmC00XFACWZp740TQeFaokWi3C50ovj_XGiySWrrdZk,3944
|
|
77
|
-
lstosa-0.10.
|
|
78
|
-
lstosa-0.10.
|
|
79
|
-
lstosa-0.10.
|
|
80
|
-
lstosa-0.10.
|
|
81
|
-
lstosa-0.10.
|
|
82
|
-
lstosa-0.10.
|
|
78
|
+
lstosa-0.10.15.dist-info/LICENSE,sha256=h6iWot11EtMvaDaS_AvCHKLTNByO5wEbMyNj1c90y1c,1519
|
|
79
|
+
lstosa-0.10.15.dist-info/METADATA,sha256=MMb8yy_4losD6R4MOaW8kHk0R5xz_C-q-kby_yVzKNk,7349
|
|
80
|
+
lstosa-0.10.15.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
|
81
|
+
lstosa-0.10.15.dist-info/entry_points.txt,sha256=qfARj13Vqt3I-E0tXuJwyC79cv84bjCwEj5uK67vWts,981
|
|
82
|
+
lstosa-0.10.15.dist-info/top_level.txt,sha256=_Tj8zVHdrOoWZuuWTHbDpNofxW0imUmKdlXhnxsXJek,4
|
|
83
|
+
lstosa-0.10.15.dist-info/RECORD,,
|
|
@@ -6,6 +6,7 @@ copy_datacheck = osa.scripts.copy_datacheck:main
|
|
|
6
6
|
datasequence = osa.scripts.datasequence:main
|
|
7
7
|
dl3_stage = osa.workflow.dl3:main
|
|
8
8
|
gain_selection = osa.scripts.gain_selection:main
|
|
9
|
+
gainsel_webmaker = osa.scripts.gainsel_webmaker:main
|
|
9
10
|
provprocess = osa.scripts.provprocess:main
|
|
10
11
|
reprocess_longterm = osa.scripts.reprocess_longterm:main
|
|
11
12
|
reprocessing = osa.scripts.reprocessing:main
|
osa/_version.py
CHANGED
osa/configs/sequencer.cfg
CHANGED
|
@@ -29,6 +29,7 @@ LONGTERM_DIR: %(OSA_DIR)s/DL1DataCheck_LongTerm
|
|
|
29
29
|
MERGED_SUMMARY: %(OSA_DIR)s/Catalog/merged_RunSummary.ecsv
|
|
30
30
|
SEQUENCER_WEB_DIR: %(OSA_DIR)s/SequencerWeb
|
|
31
31
|
GAIN_SELECTION_FLAG_DIR: %(OSA_DIR)s/GainSel
|
|
32
|
+
GAIN_SELECTION_WEB_DIR: %(OSA_DIR)s/GainSelWeb
|
|
32
33
|
|
|
33
34
|
# To be set by the user. Using PROD-ID will overcome the automatic
|
|
34
35
|
# fetching of lstchain version. Otherwise leave it empty (and without the colon symbol).
|
|
@@ -54,6 +55,7 @@ dl1_to_dl2: lstchain_dl1_to_dl2
|
|
|
54
55
|
dl1a_config: /software/lstchain/data/lstchain_standard_config.json
|
|
55
56
|
store_image_dl1ab: True
|
|
56
57
|
merge_dl1_datacheck: True
|
|
58
|
+
use_ff_heuristic_gain_selection: False
|
|
57
59
|
dl1b_config: /software/lstchain/data/lstchain_standard_config.json
|
|
58
60
|
dl2_config: /software/lstchain/data/lstchain_standard_config.json
|
|
59
61
|
rf_models: /data/models/prod5/zenith_20deg/20201023_v0.6.3
|
|
@@ -70,11 +72,13 @@ electron: /path/to/DL2/electron_mc_testing.h5
|
|
|
70
72
|
PARTITION_PEDCALIB: short, long
|
|
71
73
|
PARTITION_DATA: short, long
|
|
72
74
|
MEMSIZE_PEDCALIB: 3GB
|
|
73
|
-
MEMSIZE_DATA:
|
|
75
|
+
MEMSIZE_DATA: 6GB
|
|
76
|
+
MEMSIZE_GAINSEL: 2GB
|
|
74
77
|
WALLTIME: 1:15:00
|
|
75
78
|
# Days from current day up to which the jobs are fetched from the queue.
|
|
76
79
|
# Default is None (left empty).
|
|
77
80
|
STARTTIME_DAYS_SACCT:
|
|
81
|
+
ACCOUNT: dpps
|
|
78
82
|
|
|
79
83
|
[WEBSERVER]
|
|
80
84
|
# Set the server address and port to transfer the datacheck plots
|
osa/job.py
CHANGED
|
@@ -342,6 +342,7 @@ def scheduler_env_variables(sequence, scheduler="slurm"):
|
|
|
342
342
|
|
|
343
343
|
sbatch_parameters.append(f"--partition={cfg.get('SLURM', f'PARTITION_{sequence.type}')}")
|
|
344
344
|
sbatch_parameters.append(f"--mem-per-cpu={cfg.get('SLURM', f'MEMSIZE_{sequence.type}')}")
|
|
345
|
+
sbatch_parameters.append(f"--account={cfg.get('SLURM', 'ACCOUNT')}")
|
|
345
346
|
|
|
346
347
|
return ["#SBATCH " + line for line in sbatch_parameters]
|
|
347
348
|
|
|
@@ -652,7 +653,7 @@ def get_squeue_output(squeue_output: StringIO) -> pd.DataFrame:
|
|
|
652
653
|
return df
|
|
653
654
|
|
|
654
655
|
|
|
655
|
-
def run_sacct() -> StringIO:
|
|
656
|
+
def run_sacct(job_id: str = None) -> StringIO:
|
|
656
657
|
"""Run sacct to obtain the job information."""
|
|
657
658
|
if shutil.which("sacct") is None:
|
|
658
659
|
log.warning("No job info available since sacct command is not available")
|
|
@@ -667,13 +668,18 @@ def run_sacct() -> StringIO:
|
|
|
667
668
|
"-o",
|
|
668
669
|
",".join(FORMAT_SLURM),
|
|
669
670
|
]
|
|
671
|
+
|
|
672
|
+
if job_id:
|
|
673
|
+
sacct_cmd.append("--jobs")
|
|
674
|
+
sacct_cmd.append(job_id)
|
|
675
|
+
|
|
670
676
|
if cfg.get("SLURM", "STARTTIME_DAYS_SACCT"):
|
|
671
677
|
days = int(cfg.get("SLURM", "STARTTIME_DAYS_SACCT"))
|
|
672
678
|
start_date = (datetime.date.today() - datetime.timedelta(days=days)).isoformat()
|
|
673
679
|
sacct_cmd.extend(["--starttime", start_date])
|
|
674
680
|
|
|
675
681
|
return StringIO(sp.check_output(sacct_cmd).decode())
|
|
676
|
-
|
|
682
|
+
|
|
677
683
|
|
|
678
684
|
def get_sacct_output(sacct_output: StringIO) -> pd.DataFrame:
|
|
679
685
|
"""
|
|
@@ -808,3 +814,12 @@ def update_sequence_state(sequence, filtered_job_info: pd.DataFrame) -> None:
|
|
|
808
814
|
sequence.exit = "0:15"
|
|
809
815
|
elif any("RUNNING" in job for job in filtered_job_info.State):
|
|
810
816
|
sequence.state = "RUNNING"
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def job_finished_in_timeout(job_id: str) -> bool:
|
|
820
|
+
"""Return True if the input job_id finished in TIMEOUT state."""
|
|
821
|
+
job_status = get_sacct_output(run_sacct(job_id=job_id))["State"]
|
|
822
|
+
if job_id and job_status.item() == "TIMEOUT":
|
|
823
|
+
return True
|
|
824
|
+
else:
|
|
825
|
+
return False
|
osa/scripts/gain_selection.py
CHANGED
|
@@ -1,24 +1,28 @@
|
|
|
1
1
|
"""Script to run the gain selection over a list of dates."""
|
|
2
2
|
import logging
|
|
3
3
|
import re
|
|
4
|
-
import shutil
|
|
5
4
|
import glob
|
|
6
5
|
import pandas as pd
|
|
7
6
|
import subprocess as sp
|
|
8
7
|
from pathlib import Path
|
|
9
8
|
from textwrap import dedent
|
|
10
|
-
from io import StringIO
|
|
11
9
|
import argparse
|
|
10
|
+
import sys
|
|
12
11
|
|
|
13
12
|
from astropy.table import Table
|
|
14
|
-
from lstchain.paths import
|
|
13
|
+
from lstchain.paths import parse_r0_filename
|
|
14
|
+
from datetime import datetime
|
|
15
15
|
|
|
16
16
|
from osa.scripts.reprocessing import get_list_of_dates, check_job_status_and_wait
|
|
17
|
-
from osa.utils.utils import wait_for_daytime
|
|
17
|
+
from osa.utils.utils import wait_for_daytime, date_to_dir, date_to_iso
|
|
18
18
|
from osa.utils.logging import myLogger
|
|
19
|
-
from osa.
|
|
19
|
+
from osa.utils.iofile import append_to_file
|
|
20
|
+
from osa.utils.cliopts import valid_date
|
|
21
|
+
from osa.job import get_sacct_output, run_sacct, job_finished_in_timeout
|
|
20
22
|
from osa.configs.config import cfg
|
|
21
23
|
from osa.paths import DEFAULT_CFG
|
|
24
|
+
from osa.nightsummary.nightsummary import run_summary_table
|
|
25
|
+
|
|
22
26
|
|
|
23
27
|
log = myLogger(logging.getLogger(__name__))
|
|
24
28
|
|
|
@@ -49,23 +53,16 @@ parser.add_argument(
|
|
|
49
53
|
"-d",
|
|
50
54
|
"--date",
|
|
51
55
|
default=None,
|
|
52
|
-
type=
|
|
53
|
-
help="Night to apply the gain selection in
|
|
56
|
+
type=valid_date,
|
|
57
|
+
help="Night to apply the gain selection in YYYY-MM-DD format",
|
|
54
58
|
)
|
|
55
59
|
parser.add_argument(
|
|
56
60
|
"-l",
|
|
57
61
|
"--dates-file",
|
|
58
62
|
default=None,
|
|
59
63
|
help="List of dates to apply the gain selection. The input file should list"
|
|
60
|
-
"the dates in the format
|
|
61
|
-
)
|
|
62
|
-
parser.add_argument(
|
|
63
|
-
"-o",
|
|
64
|
-
"--output-basedir",
|
|
65
|
-
type=Path,
|
|
66
|
-
default=Path("/fefs/aswg/data/real/R0G"),
|
|
67
|
-
help="Output directory of the gain selected files. Default is /fefs/aswg/data/real/R0G."
|
|
68
|
-
)
|
|
64
|
+
"the dates in the format YYYY-MM-DD, one date per line.",
|
|
65
|
+
)
|
|
69
66
|
parser.add_argument(
|
|
70
67
|
"-s",
|
|
71
68
|
"--start-time",
|
|
@@ -86,67 +83,205 @@ parser.add_argument(
|
|
|
86
83
|
type=str,
|
|
87
84
|
default=None,
|
|
88
85
|
help="Choose tool to apply the gain selection regardless the date. Possible options are: lst_dvr (by default used for dates "
|
|
89
|
-
"previous to
|
|
86
|
+
"previous to 2023-12-05) and lstchain_r0_to_r0g (by default used for dates later than 2023-12-05).",
|
|
87
|
+
)
|
|
88
|
+
parser.add_argument(
|
|
89
|
+
"--simulate",
|
|
90
|
+
action="store_true",
|
|
91
|
+
default=False,
|
|
92
|
+
help="Simulate launching of the gain selection script. Dry run.",
|
|
93
|
+
)
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
"-v",
|
|
96
|
+
"--verbose",
|
|
97
|
+
action="store_true",
|
|
98
|
+
default=False,
|
|
99
|
+
help="Activate debugging mode.",
|
|
90
100
|
)
|
|
91
101
|
|
|
92
102
|
def get_sbatch_script(
|
|
93
|
-
run_id
|
|
103
|
+
run_id: str,
|
|
104
|
+
subrun: str,
|
|
105
|
+
input_file: Path,
|
|
106
|
+
output_dir: Path,
|
|
107
|
+
log_dir: Path,
|
|
108
|
+
log_file: Path,
|
|
109
|
+
ref_time: int,
|
|
110
|
+
ref_counter: int,
|
|
111
|
+
module: int,
|
|
112
|
+
ref_source: str,
|
|
113
|
+
tool: str
|
|
94
114
|
):
|
|
95
115
|
"""Build the sbatch job pilot script for running the gain selection."""
|
|
96
|
-
|
|
97
|
-
|
|
116
|
+
mem_per_job = cfg.get("SLURM", "MEMSIZE_GAINSEL")
|
|
117
|
+
sbatch_script = dedent(
|
|
98
118
|
f"""\
|
|
99
119
|
#!/bin/bash
|
|
100
120
|
|
|
101
121
|
#SBATCH -D {log_dir}
|
|
102
122
|
#SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
|
|
103
123
|
#SBATCH --job-name "gain_selection_{run_id:05d}"
|
|
104
|
-
#SBATCH --export {PATH}
|
|
105
124
|
#SBATCH --partition=short,long
|
|
125
|
+
#SBATCH --mem={mem_per_job}
|
|
126
|
+
"""
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
if tool == "lst_dvr":
|
|
130
|
+
sbatch_script += dedent(
|
|
131
|
+
f"""
|
|
132
|
+
#SBATCH --export {PATH}
|
|
106
133
|
|
|
107
134
|
lst_dvr {input_file} {output_dir} {ref_time} {ref_counter} {module} {ref_source}
|
|
108
135
|
"""
|
|
109
136
|
)
|
|
137
|
+
|
|
110
138
|
elif tool == "lstchain_r0_to_r0g":
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
139
|
+
cmd = f"lstchain_r0_to_r0g --R0-file={input_file} --output-dir={output_dir} --log={log_file}"
|
|
140
|
+
if not cfg.getboolean("lstchain", "use_ff_heuristic_gain_selection"):
|
|
141
|
+
cmd += " --no-flatfield-heuristic"
|
|
142
|
+
sbatch_script += dedent(cmd)
|
|
114
143
|
|
|
115
|
-
|
|
116
|
-
#SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
|
|
117
|
-
#SBATCH --job-name "gain_selection_{run_id:05d}"
|
|
118
|
-
#SBATCH --mem=40GB
|
|
119
|
-
#SBATCH --partition=short,long
|
|
144
|
+
return sbatch_script
|
|
120
145
|
|
|
121
|
-
lstchain_r0_to_r0g --R0-file={input_file} --output-dir={output_dir} --log={log_file} --no-flatfield-heuristic
|
|
122
|
-
"""
|
|
123
|
-
)
|
|
124
146
|
|
|
125
|
-
def
|
|
147
|
+
def launch_gainsel_for_data_run(
|
|
148
|
+
date: datetime, run: Table, output_dir: Path, r0_dir: Path, log_dir: Path, tool: str, simulate: bool = False
|
|
149
|
+
):
|
|
150
|
+
"""
|
|
151
|
+
Create the gain selection sbatch script and launch it for a given run.
|
|
152
|
+
|
|
153
|
+
Runs from before 20231205 without UCTS or TIB info are directly copied to the final directory.
|
|
154
|
+
Subruns that do not have four streams are also directly copied.
|
|
155
|
+
"""
|
|
156
|
+
run_id = run["run_id"]
|
|
157
|
+
ref_time = run["dragon_reference_time"]
|
|
158
|
+
ref_counter = run["dragon_reference_counter"]
|
|
159
|
+
module = run["dragon_reference_module_index"]
|
|
160
|
+
ref_source = run["dragon_reference_source"].upper()
|
|
161
|
+
|
|
162
|
+
files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
163
|
+
subrun_numbers = [int(file[-12:-8]) for file in files]
|
|
164
|
+
|
|
165
|
+
if tool == "lst_dvr" and ref_source not in ["UCTS", "TIB"]:
|
|
166
|
+
input_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
167
|
+
|
|
168
|
+
if is_run_already_copied(date, run_id):
|
|
169
|
+
log.info(f"The R0 files corresponding to run {run_id} have already been copied to the R0G directory.")
|
|
170
|
+
else:
|
|
171
|
+
if not simulate:
|
|
172
|
+
for file in input_files:
|
|
173
|
+
log.debug(
|
|
174
|
+
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
175
|
+
f"be applied. Copying directly the R0 files to {output_dir}."
|
|
176
|
+
)
|
|
177
|
+
sp.run(["cp", file, output_dir])
|
|
178
|
+
|
|
179
|
+
else:
|
|
180
|
+
log.info(
|
|
181
|
+
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
182
|
+
f"be applied. Simulate copy of the R0 files directly to {output_dir}."
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
else:
|
|
186
|
+
n_subruns = max(subrun_numbers)
|
|
187
|
+
|
|
188
|
+
for subrun in range(n_subruns + 1):
|
|
189
|
+
|
|
190
|
+
r0_files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.{subrun:04d}.fits.fz")
|
|
191
|
+
|
|
192
|
+
if len(r0_files) != 4:
|
|
193
|
+
if not simulate and not is_run_already_copied(date, run_id):
|
|
194
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files, so gain"
|
|
195
|
+
f"selection cannot be applied. Copying directly the R0 files to {output_dir}.")
|
|
196
|
+
for file in r0_files:
|
|
197
|
+
sp.run(["cp", file, output_dir])
|
|
198
|
+
elif is_run_already_copied(date, run_id):
|
|
199
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files. The R0 files"
|
|
200
|
+
f"have already been copied to {output_dir}.")
|
|
201
|
+
elif simulate:
|
|
202
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files, so gain"
|
|
203
|
+
f"selection cannot be applied. Simulate copy of the R0 files directly to {output_dir}.")
|
|
204
|
+
|
|
205
|
+
else:
|
|
206
|
+
history_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.history"
|
|
207
|
+
if history_file.exists():
|
|
208
|
+
if not simulate:
|
|
209
|
+
update_history_file(run_id, subrun, log_dir, history_file)
|
|
210
|
+
|
|
211
|
+
if history_file.read_text() == "": # history_file is empty
|
|
212
|
+
log.debug(f"Gain selection is still running for run {run_id:05d}.{subrun:04d}")
|
|
213
|
+
continue
|
|
214
|
+
else:
|
|
215
|
+
gainsel_rc = history_file.read_text().splitlines()[-1][-1]
|
|
216
|
+
if gainsel_rc == "1":
|
|
217
|
+
job_id = get_last_job_id(run_id, subrun, log_dir)
|
|
218
|
+
if job_finished_in_timeout(job_id) and not simulate:
|
|
219
|
+
# Relaunch the job that finished in TIMEOUT
|
|
220
|
+
job_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.sh"
|
|
221
|
+
sp.run(["sbatch", job_file], stdout=sp.PIPE, stderr=sp.STDOUT, check=True)
|
|
222
|
+
else:
|
|
223
|
+
log.warning(f"Gain selection failed for run {run_id:05d}.{subrun:04d}")
|
|
224
|
+
elif gainsel_rc == "0":
|
|
225
|
+
log.debug(f"Gain selection finished successfully for run {run_id:05d}.{subrun:04d},"
|
|
226
|
+
"no additional jobs will be submitted for this subrun.")
|
|
227
|
+
else:
|
|
228
|
+
log.debug("Creating and launching the gain selection sbatch script for subrun {run_id:05d}.{subrun:04d}")
|
|
229
|
+
if not simulate:
|
|
230
|
+
log_file = log_dir / f"r0_to_r0g_{run_id:05d}.{subrun:04d}.log"
|
|
231
|
+
job_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.sh"
|
|
232
|
+
r0_files.sort()
|
|
233
|
+
with open(job_file, "w") as f:
|
|
234
|
+
f.write(
|
|
235
|
+
get_sbatch_script(
|
|
236
|
+
run_id,
|
|
237
|
+
subrun,
|
|
238
|
+
r0_files[0],
|
|
239
|
+
output_dir,
|
|
240
|
+
log_dir,
|
|
241
|
+
log_file,
|
|
242
|
+
ref_time,
|
|
243
|
+
ref_counter,
|
|
244
|
+
module,
|
|
245
|
+
ref_source,
|
|
246
|
+
tool,
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
#submit job
|
|
251
|
+
history_file.touch()
|
|
252
|
+
sp.run(["sbatch", job_file], stdout=sp.PIPE, stderr=sp.STDOUT, check=True)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def apply_gain_selection(date: datetime, start: int, end: int, tool: str = None, no_queue_check: bool = False, simulate: bool = False):
|
|
126
256
|
"""
|
|
127
257
|
Submit the jobs to apply the gain selection to the data for a given date
|
|
128
258
|
on a subrun-by-subrun basis.
|
|
129
259
|
"""
|
|
130
260
|
|
|
131
261
|
if not tool:
|
|
132
|
-
if date < "20231205":
|
|
262
|
+
if date_to_dir(date) < "20231205":
|
|
133
263
|
tool = "lst_dvr"
|
|
134
264
|
else:
|
|
135
265
|
tool = "lstchain_r0_to_r0g"
|
|
136
266
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
summary_table
|
|
267
|
+
summary_table = run_summary_table(date)
|
|
268
|
+
|
|
269
|
+
if len(summary_table) == 0:
|
|
270
|
+
log.warning(f"No runs are found in the run summary of {date_to_iso(date)}. Nothing to do. Exiting.")
|
|
271
|
+
sys.exit(0)
|
|
272
|
+
|
|
140
273
|
# Apply gain selection only to DATA runs
|
|
141
274
|
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
142
275
|
log.info(f"Found {len(data_runs)} DATA runs to which apply the gain selection")
|
|
143
276
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
277
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
278
|
+
date_str = date_to_dir(date)
|
|
279
|
+
r0_dir = base_dir / "R0" / date_str
|
|
280
|
+
output_dir = base_dir / f"R0G/{date_str}"
|
|
281
|
+
log_dir = base_dir / f"R0G/log/{date_str}"
|
|
282
|
+
if not simulate:
|
|
283
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
284
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
150
285
|
|
|
151
286
|
for run in data_runs:
|
|
152
287
|
if not no_queue_check:
|
|
@@ -156,171 +291,203 @@ def apply_gain_selection(date: str, start: int, end: int, output_basedir: Path =
|
|
|
156
291
|
# Avoid running jobs while it is still night time
|
|
157
292
|
wait_for_daytime(start, end)
|
|
158
293
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
166
|
-
subrun_numbers = [int(file[-12:-8]) for file in files]
|
|
167
|
-
input_files = []
|
|
168
|
-
|
|
169
|
-
if tool == "lst_dvr" and ref_source not in ["UCTS", "TIB"]:
|
|
170
|
-
input_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
171
|
-
log.info(
|
|
172
|
-
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
173
|
-
f"be applied. Copying directly the R0 files to {output_dir}."
|
|
174
|
-
)
|
|
175
|
-
for file in input_files:
|
|
176
|
-
sp.run(["cp", file, output_dir])
|
|
294
|
+
if not is_closed(date, run["run_id"]):
|
|
295
|
+
launch_gainsel_for_data_run(date, run, output_dir, r0_dir, log_dir, tool, simulate)
|
|
296
|
+
|
|
297
|
+
calib_runs = summary_table[summary_table["run_type"] != "DATA"]
|
|
298
|
+
log.info(f"Found {len(calib_runs)} NO-DATA runs")
|
|
177
299
|
|
|
300
|
+
for run in calib_runs:
|
|
301
|
+
run_id = run["run_id"]
|
|
302
|
+
|
|
303
|
+
if is_run_already_copied(date, run_id):
|
|
304
|
+
log.info(f"The R0 files corresponding to run {run_id:05d} have already been copied, nothing to do.")
|
|
178
305
|
else:
|
|
179
|
-
|
|
306
|
+
log.info(f"Copying R0 files corresponding to run {run_id} directly to {output_dir}")
|
|
307
|
+
if not simulate:
|
|
308
|
+
# Avoid copying files while it is still night time
|
|
309
|
+
wait_for_daytime(start, end)
|
|
180
310
|
|
|
181
|
-
|
|
182
|
-
new_files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.{subrun:04d}.fits.fz")
|
|
311
|
+
r0_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
183
312
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
f"selection cannot be applied. Copying directly the R0 files to {output_dir}."
|
|
187
|
-
)
|
|
188
|
-
for file in new_files:
|
|
189
|
-
sp.run(["cp", file, output_dir])
|
|
313
|
+
for file in r0_files:
|
|
314
|
+
sp.run(["cp", file, output_dir])
|
|
190
315
|
|
|
191
|
-
else:
|
|
192
|
-
new_files.sort()
|
|
193
|
-
input_files.append(new_files[0])
|
|
194
|
-
|
|
195
|
-
log.info("Creating and launching the sbatch scripts for the rest of the runs to apply gain selection")
|
|
196
|
-
for file in input_files:
|
|
197
|
-
run_info = run_info_from_filename(file)
|
|
198
|
-
job_file = log_dir / f"gain_selection_{run_info.run:05d}.{run_info.subrun:04d}.sh"
|
|
199
|
-
with open(job_file, "w") as f:
|
|
200
|
-
f.write(
|
|
201
|
-
get_sbatch_script(
|
|
202
|
-
run_id,
|
|
203
|
-
run_info.subrun,
|
|
204
|
-
file,
|
|
205
|
-
output_dir,
|
|
206
|
-
log_dir,
|
|
207
|
-
log_file,
|
|
208
|
-
ref_time,
|
|
209
|
-
ref_counter,
|
|
210
|
-
module,
|
|
211
|
-
ref_source,
|
|
212
|
-
tool,
|
|
213
|
-
)
|
|
214
|
-
)
|
|
215
|
-
sp.run(["sbatch", job_file], check=True)
|
|
216
316
|
|
|
217
|
-
|
|
218
|
-
|
|
317
|
+
def get_last_job_id(run_id: str, subrun: str, log_dir: Path) -> int:
|
|
318
|
+
"""Get job id of the last gain selection job that was launched for a given subrun."""
|
|
319
|
+
filenames = glob.glob(f"{log_dir}/gain_selection_{run_id:05d}_{subrun:04d}_*.log")
|
|
320
|
+
if filenames:
|
|
321
|
+
match = re.search(f'gain_selection_{run_id:05d}_{subrun:04d}_(\d+).log', sorted(filenames)[-1])
|
|
322
|
+
job_id = match.group(1)
|
|
323
|
+
return job_id
|
|
219
324
|
|
|
220
|
-
for run in calib_runs:
|
|
221
|
-
run_id = run["run_id"]
|
|
222
|
-
log.info(f"Copying R0 files corresponding to run {run_id} directly to {output_dir}")
|
|
223
|
-
# Avoid copying files while it is still night time
|
|
224
|
-
wait_for_daytime(start, end)
|
|
225
325
|
|
|
226
|
-
|
|
227
|
-
|
|
326
|
+
def update_history_file(run_id: str, subrun: str, log_dir: Path, history_file: Path):
|
|
327
|
+
"""
|
|
328
|
+
Update the gain selection history file with the result
|
|
329
|
+
of the last job launched for a given subrun.
|
|
330
|
+
"""
|
|
331
|
+
job_id = get_last_job_id(run_id, subrun, log_dir)
|
|
332
|
+
if not job_id:
|
|
333
|
+
log.debug(f"Cannot find a job_id for the run {run_id:05d}.{subrun:04d}")
|
|
334
|
+
else:
|
|
335
|
+
job_status = get_sacct_output(run_sacct(job_id=job_id))["State"]
|
|
336
|
+
if job_status.item() in ["RUNNING", "PENDING"]:
|
|
337
|
+
log.info(f"Job {job_id} is still running.")
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
elif job_status.item() == "COMPLETED":
|
|
341
|
+
log.debug(f"Job {job_id} finished successfully, updating history file.")
|
|
342
|
+
string_to_write = (
|
|
343
|
+
f"{run_id:05d}.{subrun:04d} gain_selection 0\n"
|
|
344
|
+
)
|
|
345
|
+
append_to_file(history_file, string_to_write)
|
|
346
|
+
|
|
347
|
+
else:
|
|
348
|
+
log.info(f"Job {job_id} failed, updating history file.")
|
|
349
|
+
string_to_write = (
|
|
350
|
+
f"{run_id:05d}.{subrun:04d} gain_selection 1\n"
|
|
351
|
+
)
|
|
352
|
+
append_to_file(history_file, string_to_write)
|
|
228
353
|
|
|
229
|
-
for file in r0_files:
|
|
230
|
-
sp.run(["cp", file, output_dir])
|
|
231
354
|
|
|
232
|
-
def
|
|
233
|
-
"""
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
355
|
+
def is_run_already_copied(date: datetime, run_id: int) -> bool:
|
|
356
|
+
"""Check if the R0 files of a given run have already been copied to the R0G directory."""
|
|
357
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
358
|
+
r0_files = glob.glob(f"{base_dir}/R0/{date_to_dir(date)}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
359
|
+
r0g_files = glob.glob(f"{base_dir}/R0G/{date_to_dir(date)}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
360
|
+
return len(r0_files)==len(r0g_files)
|
|
237
361
|
|
|
238
|
-
sacct_cmd = [
|
|
239
|
-
"sacct",
|
|
240
|
-
"-n",
|
|
241
|
-
"--parsable2",
|
|
242
|
-
"--delimiter=,",
|
|
243
|
-
"--units=G",
|
|
244
|
-
"-o",
|
|
245
|
-
",".join(FORMAT_SLURM),
|
|
246
|
-
"-j",
|
|
247
|
-
job,
|
|
248
|
-
]
|
|
249
362
|
|
|
250
|
-
|
|
363
|
+
def is_closed(date: datetime, run_id: str) -> bool:
|
|
364
|
+
"""Check if run is already closed."""
|
|
365
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
366
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
367
|
+
closed_run_file = log_dir / f"gain_selection_{run_id:05d}.closed"
|
|
368
|
+
return closed_run_file.exists()
|
|
251
369
|
|
|
252
370
|
|
|
253
|
-
def GainSel_flag_file(date:
|
|
371
|
+
def GainSel_flag_file(date: datetime) -> Path:
|
|
372
|
+
"""Return the path to the file indicating the completion of the gain selection stage."""
|
|
254
373
|
filename = cfg.get("LSTOSA", "gain_selection_check")
|
|
255
374
|
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
256
|
-
flagfile = GainSel_dir / date / filename
|
|
375
|
+
flagfile = GainSel_dir / date_to_dir(date) / filename
|
|
257
376
|
return flagfile.resolve()
|
|
258
377
|
|
|
259
378
|
|
|
260
|
-
def GainSel_finished(date:
|
|
379
|
+
def GainSel_finished(date: datetime) -> bool:
|
|
261
380
|
"""Check if gain selection finished successfully."""
|
|
262
381
|
flagfile = GainSel_flag_file(date)
|
|
263
382
|
return flagfile.exists()
|
|
264
383
|
|
|
265
384
|
|
|
266
|
-
def
|
|
385
|
+
def check_gainsel_jobs_runwise(date: datetime, run_id: int) -> bool:
|
|
267
386
|
"""Search for failed jobs in the log directory."""
|
|
268
|
-
|
|
269
|
-
log_dir =
|
|
270
|
-
|
|
271
|
-
|
|
387
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
388
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
389
|
+
history_files = log_dir.glob(f"gain_selection_{run_id:05d}.????.history")
|
|
390
|
+
failed_subruns = []
|
|
391
|
+
log.info(f"Checking all history files of run {run_id}")
|
|
392
|
+
|
|
393
|
+
for file in history_files:
|
|
394
|
+
match = re.search(f"gain_selection_{run_id:05d}.(\d+).history", str(file))
|
|
395
|
+
subrun = match.group(1)
|
|
396
|
+
if file.read_text() != "":
|
|
397
|
+
gainsel_rc = file.read_text().splitlines()[-1][-1]
|
|
398
|
+
|
|
399
|
+
if gainsel_rc == "1":
|
|
400
|
+
log.warning(f"Gain selection failed for run {run_id}.{subrun}")
|
|
401
|
+
failed_subruns.append(file)
|
|
402
|
+
|
|
403
|
+
elif gainsel_rc == "0":
|
|
404
|
+
log.debug(f"Gain selection finished successfully for run {run_id}.{subrun}")
|
|
405
|
+
else:
|
|
406
|
+
log.info(f"Gain selection is still running for run {run_id}.{subrun}")
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
if failed_subruns:
|
|
410
|
+
log.warning(f"{date_to_iso(date)}: Some gain selection jobs did not finish successfully for run {run_id}")
|
|
411
|
+
return False
|
|
412
|
+
else:
|
|
413
|
+
log.info(f"{date_to_iso(date)}: All jobs finished successfully for run {run_id}, creating the corresponding .closed file")
|
|
414
|
+
closed_run_file = log_dir / f"gain_selection_{run_id:05d}.closed"
|
|
415
|
+
closed_run_file.touch()
|
|
416
|
+
return True
|
|
272
417
|
|
|
273
|
-
for job in jobs:
|
|
274
|
-
output = run_sacct_j(job)
|
|
275
|
-
df = get_sacct_output(output)
|
|
276
418
|
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
419
|
+
def check_warnings_in_logs(date: datetime, run_id: int):
|
|
420
|
+
"""Look for warnings in the log files created by lstchain_r0_to_r0g."""
|
|
421
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
422
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
423
|
+
log_files = log_dir.glob(f"r0_to_r0g_{run_id:05d}.*.log")
|
|
424
|
+
for file in log_files:
|
|
425
|
+
content = file.read_text().splitlines()
|
|
426
|
+
for line in content:
|
|
427
|
+
if "FlatField(FF)-like events are not tagged as FF" in line:
|
|
428
|
+
log.warning(f"Warning for run {run_id}: {line}")
|
|
280
429
|
|
|
281
|
-
if failed_jobs:
|
|
282
|
-
log.warning(f"{date}: some jobs did not finish successfully")
|
|
283
430
|
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
run_summary_dir = Path("/fefs/aswg/data/real/monitoring/RunSummary")
|
|
289
|
-
run_summary_file = run_summary_dir / f"RunSummary_{date}.ecsv"
|
|
290
|
-
summary_table = Table.read(run_summary_file)
|
|
291
|
-
runs = summary_table["run_id"]
|
|
292
|
-
missing_runs = []
|
|
293
|
-
|
|
294
|
-
r0_files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run?????.????.fits.fz")
|
|
295
|
-
r0g_files = glob.glob(f"/fefs/aswg/data/real/R0G/{date}/LST-1.?.Run?????.????.fits.fz")
|
|
296
|
-
all_r0_runs = [parse_r0_filename(i).run for i in r0_files]
|
|
297
|
-
all_r0g_runs = [parse_r0_filename(i).run for i in r0g_files]
|
|
298
|
-
|
|
299
|
-
for run in all_r0_runs:
|
|
300
|
-
if run not in runs:
|
|
301
|
-
if run not in all_r0g_runs:
|
|
302
|
-
missing_runs.append(run)
|
|
303
|
-
|
|
304
|
-
missing_runs.sort()
|
|
305
|
-
if missing_runs:
|
|
306
|
-
log.info(
|
|
307
|
-
f"Some runs are missing. Copying R0 files of runs {pd.Series(missing_runs).unique()} "
|
|
308
|
-
f"directly to /fefs/aswg/data/real/R0G/{date}"
|
|
309
|
-
)
|
|
431
|
+
def check_failed_jobs(date: datetime):
|
|
432
|
+
"""Search for failed jobs in the log directory."""
|
|
310
433
|
|
|
311
|
-
|
|
312
|
-
output_dir = Path(f"/fefs/aswg/data/real/R0G/{date}/")
|
|
313
|
-
files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run{run:05d}.????.fits.fz")
|
|
314
|
-
for file in files:
|
|
315
|
-
sp.run(["cp", file, output_dir])
|
|
434
|
+
summary_table = run_summary_table(date)
|
|
316
435
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
436
|
+
if len(summary_table) == 0:
|
|
437
|
+
log.warning(f"No runs are found in the run summary of {date_to_iso(date)}. Nothing to do. Exiting.")
|
|
438
|
+
sys.exit(0)
|
|
320
439
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
440
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
441
|
+
failed_runs = []
|
|
442
|
+
|
|
443
|
+
for run in data_runs:
|
|
444
|
+
run_id = run["run_id"]
|
|
445
|
+
check_warnings_in_logs(date, run_id)
|
|
446
|
+
if not is_closed(date, run_id):
|
|
447
|
+
if not check_gainsel_jobs_runwise(date, run_id):
|
|
448
|
+
log.warning(f"Gain selection did not finish successfully for run {run_id}.")
|
|
449
|
+
failed_runs.append(run)
|
|
450
|
+
|
|
451
|
+
if failed_runs:
|
|
452
|
+
log.warning(f"Gain selection did not finish successfully for {date_to_iso(date)}, cannot create the flag file.")
|
|
453
|
+
return
|
|
454
|
+
|
|
455
|
+
runs = summary_table["run_id"]
|
|
456
|
+
missing_runs = []
|
|
457
|
+
|
|
458
|
+
date_str = date_to_dir(date)
|
|
459
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
460
|
+
r0_files = glob.glob(f"{base_dir}/R0/{date_str}/LST-1.?.Run?????.????.fits.fz")
|
|
461
|
+
r0g_files = glob.glob(f"{base_dir}/R0G/{date_str}/LST-1.?.Run?????.????.fits.fz")
|
|
462
|
+
all_r0_runs = [parse_r0_filename(i).run for i in r0_files]
|
|
463
|
+
all_r0g_runs = [parse_r0_filename(i).run for i in r0g_files]
|
|
464
|
+
|
|
465
|
+
for run in all_r0_runs:
|
|
466
|
+
if run not in runs:
|
|
467
|
+
if run not in all_r0g_runs:
|
|
468
|
+
missing_runs.append(run)
|
|
469
|
+
|
|
470
|
+
missing_runs.sort()
|
|
471
|
+
if missing_runs:
|
|
472
|
+
output_dir = base_dir / f"R0G/{date_str}/"
|
|
473
|
+
log.info(
|
|
474
|
+
f"Some runs are missing. Copying R0 files of runs {pd.Series(missing_runs).unique()} "
|
|
475
|
+
f"directly to {output_dir}"
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
for run in missing_runs:
|
|
479
|
+
|
|
480
|
+
files = base_dir.glob(f"R0/{date_str}/LST-1.?.Run{run:05d}.????.fits.fz")
|
|
481
|
+
for file in files:
|
|
482
|
+
sp.run(["cp", file, output_dir])
|
|
483
|
+
|
|
484
|
+
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
485
|
+
flagfile_dir = GainSel_dir / date_str
|
|
486
|
+
flagfile_dir.mkdir(parents=True, exist_ok=True)
|
|
487
|
+
|
|
488
|
+
flagfile = GainSel_flag_file(date)
|
|
489
|
+
log.info(f"Gain selection finished successfully, creating flag file for date {date_to_iso(date)} ({flagfile})")
|
|
490
|
+
flagfile.touch()
|
|
324
491
|
|
|
325
492
|
|
|
326
493
|
def main():
|
|
@@ -329,22 +496,29 @@ def main():
|
|
|
329
496
|
script for each of them. The input file should list the dates in the format
|
|
330
497
|
YYYYMMDD one date per line.
|
|
331
498
|
"""
|
|
332
|
-
log.setLevel(logging.INFO)
|
|
333
499
|
args = parser.parse_args()
|
|
500
|
+
|
|
501
|
+
if args.verbose:
|
|
502
|
+
log.setLevel(logging.DEBUG)
|
|
503
|
+
else:
|
|
504
|
+
log.setLevel(logging.INFO)
|
|
334
505
|
|
|
335
506
|
if args.date:
|
|
336
|
-
if args.
|
|
337
|
-
log.
|
|
338
|
-
|
|
507
|
+
if GainSel_finished(args.date):
|
|
508
|
+
log.warning(f"Gain selection already done for date {date_to_iso(args.date)}. Exiting.")
|
|
509
|
+
sys.exit(0)
|
|
510
|
+
elif args.check:
|
|
511
|
+
log.info(f"Checking gain selection status for date {date_to_iso(args.date)}")
|
|
512
|
+
check_failed_jobs(args.date)
|
|
339
513
|
else:
|
|
340
|
-
log.info(f"
|
|
514
|
+
log.info(f"\nApplying gain selection to date {date_to_iso(args.date)}")
|
|
341
515
|
apply_gain_selection(
|
|
342
516
|
args.date,
|
|
343
517
|
args.start_time,
|
|
344
|
-
args.end_time,
|
|
345
|
-
args.output_basedir,
|
|
518
|
+
args.end_time,
|
|
346
519
|
args.tool,
|
|
347
520
|
no_queue_check=args.no_queue_check,
|
|
521
|
+
simulate=args.simulate,
|
|
348
522
|
)
|
|
349
523
|
|
|
350
524
|
|
|
@@ -355,7 +529,7 @@ def main():
|
|
|
355
529
|
if args.check:
|
|
356
530
|
for date in list_of_dates:
|
|
357
531
|
log.info(f"Checking gain selection status for date {date}")
|
|
358
|
-
check_failed_jobs(date
|
|
532
|
+
check_failed_jobs(date)
|
|
359
533
|
else:
|
|
360
534
|
for date in list_of_dates:
|
|
361
535
|
log.info(f"Applying gain selection to date {date}")
|
|
@@ -363,9 +537,9 @@ def main():
|
|
|
363
537
|
date,
|
|
364
538
|
args.start_time,
|
|
365
539
|
args.end_time,
|
|
366
|
-
args.output_basedir,
|
|
367
540
|
args.tool,
|
|
368
541
|
no_queue_check=args.no_queue_check,
|
|
542
|
+
simulate=args.simulate,
|
|
369
543
|
)
|
|
370
544
|
log.info("Done! No more dates to process.")
|
|
371
545
|
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from argparse import ArgumentParser
|
|
3
|
+
from datetime import datetime, timedelta
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from astropy.table import Table
|
|
8
|
+
|
|
9
|
+
from osa.configs import options
|
|
10
|
+
from osa.configs.config import cfg
|
|
11
|
+
from osa.nightsummary.nightsummary import run_summary_table
|
|
12
|
+
from osa.paths import DEFAULT_CFG
|
|
13
|
+
from osa.scripts.sequencer_webmaker import html_content
|
|
14
|
+
from osa.utils.utils import date_to_dir, date_to_iso
|
|
15
|
+
|
|
16
|
+
log = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def valid_date(string):
|
|
20
|
+
"""Check if the string is a valid date and return a datetime object."""
|
|
21
|
+
return datetime.strptime(string, "%Y-%m-%d")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
common_parser = ArgumentParser(add_help=False)
|
|
25
|
+
common_parser.add_argument(
|
|
26
|
+
"-c",
|
|
27
|
+
"--config",
|
|
28
|
+
type=Path,
|
|
29
|
+
default=DEFAULT_CFG,
|
|
30
|
+
help="Use specific config file [default configs/sequencer.cfg]",
|
|
31
|
+
)
|
|
32
|
+
common_parser.add_argument(
|
|
33
|
+
"-d",
|
|
34
|
+
"--date",
|
|
35
|
+
help="Date of the start of the night in ISO format (YYYY-MM-DD). Defaults to yesterday",
|
|
36
|
+
type=valid_date,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def check_gainsel_jobs_runwise(date: datetime, run_id: int) -> bool:
|
|
41
|
+
"""Search for failed jobs in the log directory."""
|
|
42
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
43
|
+
flat_date = date_to_dir(date)
|
|
44
|
+
log_dir = base_dir / f"R0G/log/{flat_date}"
|
|
45
|
+
history_files = log_dir.glob(f"gain_selection_{run_id:05d}.????.history")
|
|
46
|
+
|
|
47
|
+
success_subruns = 0
|
|
48
|
+
failed_subruns = 0
|
|
49
|
+
pending_subruns = 0
|
|
50
|
+
|
|
51
|
+
for file in history_files:
|
|
52
|
+
if file.read_text() != "":
|
|
53
|
+
gainsel_rc = file.read_text().splitlines()[-1][-1]
|
|
54
|
+
|
|
55
|
+
if gainsel_rc == "1":
|
|
56
|
+
failed_subruns += 1
|
|
57
|
+
|
|
58
|
+
elif gainsel_rc == "0":
|
|
59
|
+
success_subruns += 1
|
|
60
|
+
|
|
61
|
+
else:
|
|
62
|
+
pending_subruns += 1
|
|
63
|
+
|
|
64
|
+
return {"pending": pending_subruns, "success": success_subruns, "failed": failed_subruns}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def check_failed_jobs(date: datetime) -> pd.DataFrame:
|
|
68
|
+
"""Search for failed jobs in the log directory."""
|
|
69
|
+
summary_table = run_summary_table(date)
|
|
70
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
71
|
+
|
|
72
|
+
gainsel_status_dict = {}
|
|
73
|
+
for run in data_runs:
|
|
74
|
+
run_id = run["run_id"]
|
|
75
|
+
gainsel_job_status = check_gainsel_jobs_runwise(date, run_id)
|
|
76
|
+
gainsel_status_dict[run_id] = gainsel_job_status
|
|
77
|
+
|
|
78
|
+
gainsel_df = pd.DataFrame(gainsel_status_dict.values(), index=gainsel_status_dict.keys())
|
|
79
|
+
gainsel_df.reset_index(inplace=True)
|
|
80
|
+
gainsel_df.rename(columns={"index": "run_id"}, inplace=True)
|
|
81
|
+
summary_table = summary_table.to_pandas()
|
|
82
|
+
|
|
83
|
+
final_table = pd.merge(summary_table, gainsel_df, on="run_id")[
|
|
84
|
+
[
|
|
85
|
+
"run_id",
|
|
86
|
+
"n_subruns",
|
|
87
|
+
"pending",
|
|
88
|
+
"success",
|
|
89
|
+
"failed",
|
|
90
|
+
]
|
|
91
|
+
]
|
|
92
|
+
|
|
93
|
+
def determine_status(row):
|
|
94
|
+
if row["failed"] > 0:
|
|
95
|
+
return "FAILED"
|
|
96
|
+
elif row["pending"] == row["n_subruns"]:
|
|
97
|
+
return "PENDING"
|
|
98
|
+
elif row["success"] == row["n_subruns"]:
|
|
99
|
+
return "COMPLETED"
|
|
100
|
+
elif row["pending"] > 0:
|
|
101
|
+
return "RUNNING"
|
|
102
|
+
else:
|
|
103
|
+
return "NOT STARTED"
|
|
104
|
+
|
|
105
|
+
final_table["GainSel%"] = round(final_table["success"] * 100 / final_table["n_subruns"])
|
|
106
|
+
final_table["GainSelStatus"] = final_table.apply(determine_status, axis=1)
|
|
107
|
+
|
|
108
|
+
return final_table
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def main():
|
|
112
|
+
"""Produce the html file with the processing OSA Gain Selection status.
|
|
113
|
+
|
|
114
|
+
It creates an HTML file osa_gainsel_status_YYYY-MM-DD.html
|
|
115
|
+
"""
|
|
116
|
+
args = ArgumentParser(
|
|
117
|
+
description=(
|
|
118
|
+
"Script to create an HTML file with the gain selection status "
|
|
119
|
+
"(osa_gainsel_status_YYYY-MM-DD.html)"
|
|
120
|
+
),
|
|
121
|
+
parents=[common_parser],
|
|
122
|
+
).parse_args()
|
|
123
|
+
|
|
124
|
+
if args.date:
|
|
125
|
+
flat_date = date_to_dir(args.date)
|
|
126
|
+
options.date = args.date
|
|
127
|
+
|
|
128
|
+
else:
|
|
129
|
+
# yesterday by default
|
|
130
|
+
yesterday = datetime.now() - timedelta(days=1)
|
|
131
|
+
options.date = yesterday
|
|
132
|
+
flat_date = date_to_dir(yesterday)
|
|
133
|
+
|
|
134
|
+
date = date_to_iso(options.date)
|
|
135
|
+
run_summary_directory = Path(cfg.get("LST1", "RUN_SUMMARY_DIR"))
|
|
136
|
+
run_summary_file = run_summary_directory / f"RunSummary_{flat_date}.ecsv"
|
|
137
|
+
|
|
138
|
+
gain_selection_web_directory = Path(cfg.get("LST1", "GAIN_SELECTION_WEB_DIR"))
|
|
139
|
+
gain_selection_web_directory.mkdir(parents=True, exist_ok=True)
|
|
140
|
+
html_file = gain_selection_web_directory / f"osa_gainsel_status_{date}.html"
|
|
141
|
+
|
|
142
|
+
# Create and save the HTML file
|
|
143
|
+
if not run_summary_file.is_file() or len(Table.read(run_summary_file)["run_id"]) == 0:
|
|
144
|
+
content = "<p>No data found</p>"
|
|
145
|
+
log.warning(f"No data found for date {date}, creating an empty HTML file.")
|
|
146
|
+
|
|
147
|
+
else:
|
|
148
|
+
# Get the table with the gain selection check report in HTML format:
|
|
149
|
+
table_gain_selection_jobs = check_failed_jobs(options.date)
|
|
150
|
+
content = table_gain_selection_jobs.to_html(justify="left")
|
|
151
|
+
|
|
152
|
+
html_file.write_text(html_content(content, date, "OSA Gain Selection"))
|
|
153
|
+
log.info(f"Created HTML file {html_file}")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
if __name__ == "__main__":
|
|
157
|
+
main()
|
osa/scripts/sequencer.py
CHANGED
|
@@ -9,6 +9,7 @@ import logging
|
|
|
9
9
|
import os
|
|
10
10
|
import sys
|
|
11
11
|
from decimal import Decimal
|
|
12
|
+
import datetime
|
|
12
13
|
|
|
13
14
|
from osa import osadb
|
|
14
15
|
from osa.configs import options
|
|
@@ -28,7 +29,7 @@ from osa.paths import analysis_path
|
|
|
28
29
|
from osa.report import start
|
|
29
30
|
from osa.utils.cliopts import sequencer_cli_parsing
|
|
30
31
|
from osa.utils.logging import myLogger
|
|
31
|
-
from osa.utils.utils import is_day_closed, gettag, date_to_iso
|
|
32
|
+
from osa.utils.utils import is_day_closed, gettag, date_to_iso
|
|
32
33
|
from osa.veto import get_closed_list, get_veto_list
|
|
33
34
|
from osa.scripts.gain_selection import GainSel_finished
|
|
34
35
|
|
|
@@ -98,9 +99,9 @@ def single_process(telescope):
|
|
|
98
99
|
log.warning("No runs found for this date. Nothing to do. Exiting.")
|
|
99
100
|
sys.exit(0)
|
|
100
101
|
|
|
101
|
-
if not options.no_gainsel and not GainSel_finished(
|
|
102
|
+
if not options.no_gainsel and not GainSel_finished(options.date):
|
|
102
103
|
log.info(
|
|
103
|
-
f"Gain selection did not finish successfully for date {options.date}."
|
|
104
|
+
f"Gain selection did not finish successfully for date {date_to_iso(options.date)}. "
|
|
104
105
|
"Try again later, once gain selection has finished."
|
|
105
106
|
)
|
|
106
107
|
sys.exit()
|
|
@@ -109,6 +110,15 @@ def single_process(telescope):
|
|
|
109
110
|
log.info(f"Date {date_to_iso(options.date)} is already closed for {options.tel_id}")
|
|
110
111
|
return sequence_list
|
|
111
112
|
|
|
113
|
+
if not options.test and not options.simulate:
|
|
114
|
+
if is_sequencer_running(options.date):
|
|
115
|
+
log.info(f"Sequencer is still running for date {date_to_iso(options.date)}. Try again later.")
|
|
116
|
+
sys.exit(0)
|
|
117
|
+
|
|
118
|
+
elif is_sequencer_completed(options.date) and not options.force_submit:
|
|
119
|
+
log.info(f"Sequencer already finished for date {date_to_iso(options.date)}. Exiting")
|
|
120
|
+
sys.exit(0)
|
|
121
|
+
|
|
112
122
|
# Build the sequences
|
|
113
123
|
sequence_list = build_sequences(options.date)
|
|
114
124
|
|
|
@@ -306,5 +316,39 @@ def output_matrix(matrix: list, padding_space: int):
|
|
|
306
316
|
log.info(stringrow)
|
|
307
317
|
|
|
308
318
|
|
|
319
|
+
def is_sequencer_running(date: datetime.datetime) -> bool:
|
|
320
|
+
"""Check if the jobs launched by sequencer are running or pending for the given date."""
|
|
321
|
+
summary_table = run_summary_table(date)
|
|
322
|
+
sacct_output = run_sacct()
|
|
323
|
+
sacct_info = get_sacct_output(sacct_output)
|
|
324
|
+
|
|
325
|
+
for run in summary_table["run_id"]:
|
|
326
|
+
jobs_run = sacct_info[sacct_info["JobName"]==f"LST1_{run:05d}"]
|
|
327
|
+
queued_jobs = jobs_run[(jobs_run["State"] == "RUNNING") | (jobs_run["State"] == "PENDING")]
|
|
328
|
+
if len(queued_jobs) != 0:
|
|
329
|
+
return True
|
|
330
|
+
|
|
331
|
+
return False
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def is_sequencer_completed(date: datetime.datetime) -> bool:
|
|
335
|
+
"""Check if the jobs launched by sequencer are running or pending for the given date."""
|
|
336
|
+
summary_table = run_summary_table(date)
|
|
337
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
338
|
+
sacct_output = run_sacct()
|
|
339
|
+
sacct_info = get_sacct_output(sacct_output)
|
|
340
|
+
|
|
341
|
+
for run in data_runs["run_id"]:
|
|
342
|
+
jobs_run = sacct_info[sacct_info["JobName"]==f"LST1_{run:05d}"]
|
|
343
|
+
if len(jobs_run["JobID"].unique())>1:
|
|
344
|
+
last_job_id = sorted(jobs_run["JobID"].unique())[-1]
|
|
345
|
+
jobs_run = sacct_info[sacct_info["JobID"]==last_job_id]
|
|
346
|
+
incomplete_jobs = jobs_run[(jobs_run["State"] != "COMPLETED")]
|
|
347
|
+
if len(jobs_run) == 0 or len(incomplete_jobs) != 0:
|
|
348
|
+
return False
|
|
349
|
+
|
|
350
|
+
return True
|
|
351
|
+
|
|
352
|
+
|
|
309
353
|
if __name__ == "__main__":
|
|
310
354
|
main()
|
|
@@ -20,7 +20,7 @@ from osa.utils.utils import is_day_closed, date_to_iso, date_to_dir
|
|
|
20
20
|
log = myLogger(logging.getLogger())
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
def html_content(body: str, date: str) -> str:
|
|
23
|
+
def html_content(body: str, date: str, title: str) -> str:
|
|
24
24
|
"""Build the HTML content.
|
|
25
25
|
|
|
26
26
|
Parameters
|
|
@@ -43,11 +43,11 @@ def html_content(body: str, date: str) -> str:
|
|
|
43
43
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
|
44
44
|
<head>
|
|
45
45
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
|
46
|
-
<title>
|
|
46
|
+
<title>{title} status</title><link href="osa.css" rel="stylesheet"
|
|
47
47
|
type="text/css" /><style>table{{width:152ex;}}</style>
|
|
48
48
|
</head>
|
|
49
49
|
<body>
|
|
50
|
-
<h1>
|
|
50
|
+
<h1>{title} processing status</h1>
|
|
51
51
|
<p>Processing data from: {date}. Last updated: {time_update} UTC</p>
|
|
52
52
|
{body}
|
|
53
53
|
</body>
|
|
@@ -159,7 +159,7 @@ def main():
|
|
|
159
159
|
directory.mkdir(parents=True, exist_ok=True)
|
|
160
160
|
|
|
161
161
|
html_file = directory / Path(f"osa_status_{flat_date}.html")
|
|
162
|
-
html_file.write_text(html_content(html_table, date), encoding="utf-8")
|
|
162
|
+
html_file.write_text(html_content(html_table, date, "OSA Sequencer"), encoding="utf-8")
|
|
163
163
|
|
|
164
164
|
log.info("Done")
|
|
165
165
|
|
|
@@ -23,6 +23,7 @@ ALL_SCRIPTS = [
|
|
|
23
23
|
"theta2_significance",
|
|
24
24
|
"source_coordinates",
|
|
25
25
|
"sequencer_webmaker",
|
|
26
|
+
"gainsel_webmaker",
|
|
26
27
|
]
|
|
27
28
|
|
|
28
29
|
options.date = datetime.datetime.fromisoformat("2020-01-17")
|
|
@@ -397,3 +398,29 @@ def test_sequencer_webmaker(
|
|
|
397
398
|
# Running without test option will make the script fail
|
|
398
399
|
output = sp.run(["sequencer_webmaker", "-d", "2020-01-17"])
|
|
399
400
|
assert output.returncode != 0
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def test_gainsel_webmaker(
|
|
404
|
+
base_test_dir,
|
|
405
|
+
):
|
|
406
|
+
|
|
407
|
+
output = sp.run(["gainsel_webmaker", "-d", "2020-01-17"])
|
|
408
|
+
assert output.returncode == 0
|
|
409
|
+
directory = base_test_dir / "OSA" / "GainSelWeb"
|
|
410
|
+
expected_file = directory / "osa_gainsel_status_2020-01-17.html"
|
|
411
|
+
assert expected_file.exists()
|
|
412
|
+
|
|
413
|
+
# Test a date with non-existing run summary
|
|
414
|
+
output = sp.run(["gainsel_webmaker", "-d", "2024-01-12"])
|
|
415
|
+
assert output.returncode == 0
|
|
416
|
+
directory = base_test_dir / "OSA" / "GainSelWeb"
|
|
417
|
+
expected_file = directory / "osa_gainsel_status_2024-01-12.html"
|
|
418
|
+
assert expected_file.exists()
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def test_gainsel_web_content():
|
|
422
|
+
from osa.scripts.gainsel_webmaker import check_failed_jobs
|
|
423
|
+
|
|
424
|
+
table = check_failed_jobs(options.date)
|
|
425
|
+
assert table["GainSelStatus"][0] == "NOT STARTED"
|
|
426
|
+
assert table["GainSel%"][0] == 0.0
|
osa/tests/test_jobs.py
CHANGED
|
@@ -71,6 +71,7 @@ def test_scheduler_env_variables(sequence_list, running_analysis_dir):
|
|
|
71
71
|
"#SBATCH --error=log/Run01809.%4a_jobid_%A.err",
|
|
72
72
|
f'#SBATCH --partition={cfg.get("SLURM", "PARTITION_PEDCALIB")}',
|
|
73
73
|
"#SBATCH --mem-per-cpu=3GB",
|
|
74
|
+
"#SBATCH --account=dpps",
|
|
74
75
|
]
|
|
75
76
|
# Extract the second sequence
|
|
76
77
|
second_sequence = sequence_list[1]
|
|
@@ -83,7 +84,8 @@ def test_scheduler_env_variables(sequence_list, running_analysis_dir):
|
|
|
83
84
|
"#SBATCH --error=log/Run01807.%4a_jobid_%A.err",
|
|
84
85
|
"#SBATCH --array=0-10",
|
|
85
86
|
f'#SBATCH --partition={cfg.get("SLURM", "PARTITION_DATA")}',
|
|
86
|
-
"#SBATCH --mem-per-cpu=
|
|
87
|
+
"#SBATCH --mem-per-cpu=6GB",
|
|
88
|
+
"#SBATCH --account=dpps",
|
|
87
89
|
]
|
|
88
90
|
|
|
89
91
|
|
|
@@ -104,7 +106,8 @@ def test_job_header_template(sequence_list, running_analysis_dir):
|
|
|
104
106
|
#SBATCH --output=log/Run01809.%4a_jobid_%A.out
|
|
105
107
|
#SBATCH --error=log/Run01809.%4a_jobid_%A.err
|
|
106
108
|
#SBATCH --partition={cfg.get('SLURM', 'PARTITION_PEDCALIB')}
|
|
107
|
-
#SBATCH --mem-per-cpu=3GB
|
|
109
|
+
#SBATCH --mem-per-cpu=3GB
|
|
110
|
+
#SBATCH --account=dpps"""
|
|
108
111
|
)
|
|
109
112
|
assert header == output_string1
|
|
110
113
|
|
|
@@ -122,7 +125,8 @@ def test_job_header_template(sequence_list, running_analysis_dir):
|
|
|
122
125
|
#SBATCH --error=log/Run01807.%4a_jobid_%A.err
|
|
123
126
|
#SBATCH --array=0-10
|
|
124
127
|
#SBATCH --partition={cfg.get('SLURM', 'PARTITION_DATA')}
|
|
125
|
-
#SBATCH --mem-per-cpu=
|
|
128
|
+
#SBATCH --mem-per-cpu=6GB
|
|
129
|
+
#SBATCH --account=dpps"""
|
|
126
130
|
)
|
|
127
131
|
assert header == output_string2
|
|
128
132
|
|
|
@@ -154,6 +158,7 @@ def test_create_job_template_scheduler(
|
|
|
154
158
|
#SBATCH --array=0-10
|
|
155
159
|
#SBATCH --partition={cfg.get('SLURM', 'PARTITION_DATA')}
|
|
156
160
|
#SBATCH --mem-per-cpu={cfg.get('SLURM', 'MEMSIZE_DATA')}
|
|
161
|
+
#SBATCH --account={cfg.get('SLURM', 'ACCOUNT')}
|
|
157
162
|
|
|
158
163
|
import os
|
|
159
164
|
import subprocess
|
|
@@ -199,6 +204,7 @@ def test_create_job_template_scheduler(
|
|
|
199
204
|
#SBATCH --array=0-8
|
|
200
205
|
#SBATCH --partition={cfg.get('SLURM', 'PARTITION_DATA')}
|
|
201
206
|
#SBATCH --mem-per-cpu={cfg.get('SLURM', 'MEMSIZE_DATA')}
|
|
207
|
+
#SBATCH --account={cfg.get('SLURM', 'ACCOUNT')}
|
|
202
208
|
|
|
203
209
|
import os
|
|
204
210
|
import subprocess
|
osa/utils/cliopts.py
CHANGED
|
@@ -280,6 +280,13 @@ def sequencer_argparser():
|
|
|
280
280
|
default=False,
|
|
281
281
|
help="Do not check if the gain selection finished correctly (default False)",
|
|
282
282
|
)
|
|
283
|
+
parser.add_argument(
|
|
284
|
+
"-f",
|
|
285
|
+
"--force-submit",
|
|
286
|
+
action="store_true",
|
|
287
|
+
default=False,
|
|
288
|
+
help="Force sequencer to submit jobs"
|
|
289
|
+
)
|
|
283
290
|
parser.add_argument(
|
|
284
291
|
"tel_id",
|
|
285
292
|
choices=["ST", "LST1", "LST2", "all"],
|
|
@@ -299,6 +306,7 @@ def sequencer_cli_parsing():
|
|
|
299
306
|
options.no_calib = opts.no_calib
|
|
300
307
|
options.no_dl2 = opts.no_dl2
|
|
301
308
|
options.no_gainsel = opts.no_gainsel
|
|
309
|
+
options.force_submit = opts.force_submit
|
|
302
310
|
|
|
303
311
|
log.debug(f"the options are {opts}")
|
|
304
312
|
|
|
File without changes
|
|
File without changes
|