lstosa 0.10.13__tar.gz → 0.10.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lstosa-0.10.13 → lstosa-0.10.15}/PKG-INFO +1 -1
- {lstosa-0.10.13 → lstosa-0.10.15}/pyproject.toml +1 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/PKG-INFO +1 -1
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/SOURCES.txt +1 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/entry_points.txt +1 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/_version.py +2 -2
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/configs/sequencer.cfg +5 -1
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/job.py +17 -2
- lstosa-0.10.15/src/osa/scripts/gain_selection.py +548 -0
- lstosa-0.10.15/src/osa/scripts/gainsel_webmaker.py +157 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/reprocessing.py +7 -2
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/sequencer.py +47 -3
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/sequencer_webmaker.py +4 -4
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/tests/test_osa_scripts.py +27 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_jobs.py +9 -3
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/cliopts.py +8 -0
- lstosa-0.10.13/src/osa/scripts/gain_selection.py +0 -350
- {lstosa-0.10.13 → lstosa-0.10.15}/.coveragerc +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/.gitignore +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/.mailmap +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/.pre-commit-config.yaml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/.readthedocs.yml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/LICENSE +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/MANIFEST.in +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/README.md +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/codemeta.json +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/crontab/crontab.txt +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/dev/mysql.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/Makefile +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/_static/logo_lstosa.png +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/authors.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/components/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/conf.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/configuration.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/contribute.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/documents/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/howto/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/introduction/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/introduction/reduction_steps_lstchain.png +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/jobs.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/make.bat +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/nightsummary.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/provenance.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/references.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/reports.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/scripts/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/troubleshooting/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/utils.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/veto.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/workflow/LSTOSA_flow.png +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/docs/workflow/index.rst +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/environment.yml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/example_sequencer.txt +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/history_files/sequence_LST1_04183.history +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/history_files/sequence_LST1_04183_failed.history +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/history_files/sequence_LST1_04183_oneline.history +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/history_files/sequence_LST1_04185.0010.history +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/sacct_output.csv +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/extra/squeue_output.csv +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/setup.cfg +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/dependency_links.txt +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/requires.txt +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/lstosa.egg-info/top_level.txt +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/configs/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/configs/config.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/configs/datamodel.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/configs/options.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/conftest.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/high_level/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/high_level/selection_cuts.toml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/high_level/significance.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/high_level/tests/test_significance.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/database.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/extract.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/nightsummary.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/set_source_coordinates.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/tests/test_database.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/tests/test_extract.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/tests/test_nightsummary.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/nightsummary/tests/test_source_coordinates.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/osadb.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/paths.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/capture.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/config/definition.yaml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/config/environment.yaml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/config/logger.yaml +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/io.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/provenance/utils.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/raw.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/report.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/autocloser.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/calibration_pipeline.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/closer.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/copy_datacheck.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/datasequence.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/provprocess.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/reprocess_longterm.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/show_run_summary.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/simulate_processing.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/tests/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/scripts/update_source_catalog.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_osa.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_osadb.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_paths.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_raw.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_report.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/tests/test_veto.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/iofile.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/logging.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/mail.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/register.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/tests/test_iofile.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/tests/test_utils.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/utils/utils.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/version.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/veto.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/webserver/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/webserver/utils.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/workflow/__init__.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/workflow/dl3.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/workflow/stages.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/workflow/tests/test_dl3.py +0 -0
- {lstosa-0.10.13 → lstosa-0.10.15}/src/osa/workflow/tests/test_stages.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.15
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -75,6 +75,7 @@ reprocessing = "osa.scripts.reprocessing:main"
|
|
|
75
75
|
reprocess_longterm = "osa.scripts.reprocess_longterm:main"
|
|
76
76
|
gain_selection = "osa.scripts.gain_selection:main"
|
|
77
77
|
update_source_catalog = "osa.scripts.update_source_catalog:main"
|
|
78
|
+
gainsel_webmaker = "osa.scripts.gainsel_webmaker:main"
|
|
78
79
|
|
|
79
80
|
[tool.setuptools.packages.find]
|
|
80
81
|
where = ["src"]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.15
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -90,6 +90,7 @@ src/osa/scripts/closer.py
|
|
|
90
90
|
src/osa/scripts/copy_datacheck.py
|
|
91
91
|
src/osa/scripts/datasequence.py
|
|
92
92
|
src/osa/scripts/gain_selection.py
|
|
93
|
+
src/osa/scripts/gainsel_webmaker.py
|
|
93
94
|
src/osa/scripts/provprocess.py
|
|
94
95
|
src/osa/scripts/reprocess_longterm.py
|
|
95
96
|
src/osa/scripts/reprocessing.py
|
|
@@ -6,6 +6,7 @@ copy_datacheck = osa.scripts.copy_datacheck:main
|
|
|
6
6
|
datasequence = osa.scripts.datasequence:main
|
|
7
7
|
dl3_stage = osa.workflow.dl3:main
|
|
8
8
|
gain_selection = osa.scripts.gain_selection:main
|
|
9
|
+
gainsel_webmaker = osa.scripts.gainsel_webmaker:main
|
|
9
10
|
provprocess = osa.scripts.provprocess:main
|
|
10
11
|
reprocess_longterm = osa.scripts.reprocess_longterm:main
|
|
11
12
|
reprocessing = osa.scripts.reprocessing:main
|
|
@@ -29,6 +29,7 @@ LONGTERM_DIR: %(OSA_DIR)s/DL1DataCheck_LongTerm
|
|
|
29
29
|
MERGED_SUMMARY: %(OSA_DIR)s/Catalog/merged_RunSummary.ecsv
|
|
30
30
|
SEQUENCER_WEB_DIR: %(OSA_DIR)s/SequencerWeb
|
|
31
31
|
GAIN_SELECTION_FLAG_DIR: %(OSA_DIR)s/GainSel
|
|
32
|
+
GAIN_SELECTION_WEB_DIR: %(OSA_DIR)s/GainSelWeb
|
|
32
33
|
|
|
33
34
|
# To be set by the user. Using PROD-ID will overcome the automatic
|
|
34
35
|
# fetching of lstchain version. Otherwise leave it empty (and without the colon symbol).
|
|
@@ -54,6 +55,7 @@ dl1_to_dl2: lstchain_dl1_to_dl2
|
|
|
54
55
|
dl1a_config: /software/lstchain/data/lstchain_standard_config.json
|
|
55
56
|
store_image_dl1ab: True
|
|
56
57
|
merge_dl1_datacheck: True
|
|
58
|
+
use_ff_heuristic_gain_selection: False
|
|
57
59
|
dl1b_config: /software/lstchain/data/lstchain_standard_config.json
|
|
58
60
|
dl2_config: /software/lstchain/data/lstchain_standard_config.json
|
|
59
61
|
rf_models: /data/models/prod5/zenith_20deg/20201023_v0.6.3
|
|
@@ -70,11 +72,13 @@ electron: /path/to/DL2/electron_mc_testing.h5
|
|
|
70
72
|
PARTITION_PEDCALIB: short, long
|
|
71
73
|
PARTITION_DATA: short, long
|
|
72
74
|
MEMSIZE_PEDCALIB: 3GB
|
|
73
|
-
MEMSIZE_DATA:
|
|
75
|
+
MEMSIZE_DATA: 6GB
|
|
76
|
+
MEMSIZE_GAINSEL: 2GB
|
|
74
77
|
WALLTIME: 1:15:00
|
|
75
78
|
# Days from current day up to which the jobs are fetched from the queue.
|
|
76
79
|
# Default is None (left empty).
|
|
77
80
|
STARTTIME_DAYS_SACCT:
|
|
81
|
+
ACCOUNT: dpps
|
|
78
82
|
|
|
79
83
|
[WEBSERVER]
|
|
80
84
|
# Set the server address and port to transfer the datacheck plots
|
|
@@ -342,6 +342,7 @@ def scheduler_env_variables(sequence, scheduler="slurm"):
|
|
|
342
342
|
|
|
343
343
|
sbatch_parameters.append(f"--partition={cfg.get('SLURM', f'PARTITION_{sequence.type}')}")
|
|
344
344
|
sbatch_parameters.append(f"--mem-per-cpu={cfg.get('SLURM', f'MEMSIZE_{sequence.type}')}")
|
|
345
|
+
sbatch_parameters.append(f"--account={cfg.get('SLURM', 'ACCOUNT')}")
|
|
345
346
|
|
|
346
347
|
return ["#SBATCH " + line for line in sbatch_parameters]
|
|
347
348
|
|
|
@@ -652,7 +653,7 @@ def get_squeue_output(squeue_output: StringIO) -> pd.DataFrame:
|
|
|
652
653
|
return df
|
|
653
654
|
|
|
654
655
|
|
|
655
|
-
def run_sacct() -> StringIO:
|
|
656
|
+
def run_sacct(job_id: str = None) -> StringIO:
|
|
656
657
|
"""Run sacct to obtain the job information."""
|
|
657
658
|
if shutil.which("sacct") is None:
|
|
658
659
|
log.warning("No job info available since sacct command is not available")
|
|
@@ -667,13 +668,18 @@ def run_sacct() -> StringIO:
|
|
|
667
668
|
"-o",
|
|
668
669
|
",".join(FORMAT_SLURM),
|
|
669
670
|
]
|
|
671
|
+
|
|
672
|
+
if job_id:
|
|
673
|
+
sacct_cmd.append("--jobs")
|
|
674
|
+
sacct_cmd.append(job_id)
|
|
675
|
+
|
|
670
676
|
if cfg.get("SLURM", "STARTTIME_DAYS_SACCT"):
|
|
671
677
|
days = int(cfg.get("SLURM", "STARTTIME_DAYS_SACCT"))
|
|
672
678
|
start_date = (datetime.date.today() - datetime.timedelta(days=days)).isoformat()
|
|
673
679
|
sacct_cmd.extend(["--starttime", start_date])
|
|
674
680
|
|
|
675
681
|
return StringIO(sp.check_output(sacct_cmd).decode())
|
|
676
|
-
|
|
682
|
+
|
|
677
683
|
|
|
678
684
|
def get_sacct_output(sacct_output: StringIO) -> pd.DataFrame:
|
|
679
685
|
"""
|
|
@@ -808,3 +814,12 @@ def update_sequence_state(sequence, filtered_job_info: pd.DataFrame) -> None:
|
|
|
808
814
|
sequence.exit = "0:15"
|
|
809
815
|
elif any("RUNNING" in job for job in filtered_job_info.State):
|
|
810
816
|
sequence.state = "RUNNING"
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def job_finished_in_timeout(job_id: str) -> bool:
|
|
820
|
+
"""Return True if the input job_id finished in TIMEOUT state."""
|
|
821
|
+
job_status = get_sacct_output(run_sacct(job_id=job_id))["State"]
|
|
822
|
+
if job_id and job_status.item() == "TIMEOUT":
|
|
823
|
+
return True
|
|
824
|
+
else:
|
|
825
|
+
return False
|
|
@@ -0,0 +1,548 @@
|
|
|
1
|
+
"""Script to run the gain selection over a list of dates."""
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
import glob
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import subprocess as sp
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from textwrap import dedent
|
|
9
|
+
import argparse
|
|
10
|
+
import sys
|
|
11
|
+
|
|
12
|
+
from astropy.table import Table
|
|
13
|
+
from lstchain.paths import parse_r0_filename
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
|
|
16
|
+
from osa.scripts.reprocessing import get_list_of_dates, check_job_status_and_wait
|
|
17
|
+
from osa.utils.utils import wait_for_daytime, date_to_dir, date_to_iso
|
|
18
|
+
from osa.utils.logging import myLogger
|
|
19
|
+
from osa.utils.iofile import append_to_file
|
|
20
|
+
from osa.utils.cliopts import valid_date
|
|
21
|
+
from osa.job import get_sacct_output, run_sacct, job_finished_in_timeout
|
|
22
|
+
from osa.configs.config import cfg
|
|
23
|
+
from osa.paths import DEFAULT_CFG
|
|
24
|
+
from osa.nightsummary.nightsummary import run_summary_table
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
log = myLogger(logging.getLogger(__name__))
|
|
28
|
+
|
|
29
|
+
PATH = "PATH=/fefs/aswg/software/offline_dvr/bin:$PATH"
|
|
30
|
+
|
|
31
|
+
parser = argparse.ArgumentParser()
|
|
32
|
+
parser.add_argument(
|
|
33
|
+
"--check",
|
|
34
|
+
action="store_true",
|
|
35
|
+
default=False,
|
|
36
|
+
help="Check if any job failed",
|
|
37
|
+
)
|
|
38
|
+
parser.add_argument(
|
|
39
|
+
"--no-queue-check",
|
|
40
|
+
action="store_true",
|
|
41
|
+
default=False,
|
|
42
|
+
help="Do not wait until the number of jobs in the slurm queue is < 1500",
|
|
43
|
+
)
|
|
44
|
+
parser.add_argument(
|
|
45
|
+
"-c",
|
|
46
|
+
"--config",
|
|
47
|
+
action="store",
|
|
48
|
+
type=Path,
|
|
49
|
+
default=DEFAULT_CFG,
|
|
50
|
+
help="Configuration file",
|
|
51
|
+
)
|
|
52
|
+
parser.add_argument(
|
|
53
|
+
"-d",
|
|
54
|
+
"--date",
|
|
55
|
+
default=None,
|
|
56
|
+
type=valid_date,
|
|
57
|
+
help="Night to apply the gain selection in YYYY-MM-DD format",
|
|
58
|
+
)
|
|
59
|
+
parser.add_argument(
|
|
60
|
+
"-l",
|
|
61
|
+
"--dates-file",
|
|
62
|
+
default=None,
|
|
63
|
+
help="List of dates to apply the gain selection. The input file should list"
|
|
64
|
+
"the dates in the format YYYY-MM-DD, one date per line.",
|
|
65
|
+
)
|
|
66
|
+
parser.add_argument(
|
|
67
|
+
"-s",
|
|
68
|
+
"--start-time",
|
|
69
|
+
type=int,
|
|
70
|
+
default=10,
|
|
71
|
+
help="Time to (re)start gain selection in HH format. Default is 10.",
|
|
72
|
+
)
|
|
73
|
+
parser.add_argument(
|
|
74
|
+
"-e",
|
|
75
|
+
"--end-time",
|
|
76
|
+
type=int,
|
|
77
|
+
default=18,
|
|
78
|
+
help="Time to stop gain selection in HH format. Default is 18.",
|
|
79
|
+
)
|
|
80
|
+
parser.add_argument(
|
|
81
|
+
"-t",
|
|
82
|
+
"--tool",
|
|
83
|
+
type=str,
|
|
84
|
+
default=None,
|
|
85
|
+
help="Choose tool to apply the gain selection regardless the date. Possible options are: lst_dvr (by default used for dates "
|
|
86
|
+
"previous to 2023-12-05) and lstchain_r0_to_r0g (by default used for dates later than 2023-12-05).",
|
|
87
|
+
)
|
|
88
|
+
parser.add_argument(
|
|
89
|
+
"--simulate",
|
|
90
|
+
action="store_true",
|
|
91
|
+
default=False,
|
|
92
|
+
help="Simulate launching of the gain selection script. Dry run.",
|
|
93
|
+
)
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
"-v",
|
|
96
|
+
"--verbose",
|
|
97
|
+
action="store_true",
|
|
98
|
+
default=False,
|
|
99
|
+
help="Activate debugging mode.",
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def get_sbatch_script(
|
|
103
|
+
run_id: str,
|
|
104
|
+
subrun: str,
|
|
105
|
+
input_file: Path,
|
|
106
|
+
output_dir: Path,
|
|
107
|
+
log_dir: Path,
|
|
108
|
+
log_file: Path,
|
|
109
|
+
ref_time: int,
|
|
110
|
+
ref_counter: int,
|
|
111
|
+
module: int,
|
|
112
|
+
ref_source: str,
|
|
113
|
+
tool: str
|
|
114
|
+
):
|
|
115
|
+
"""Build the sbatch job pilot script for running the gain selection."""
|
|
116
|
+
mem_per_job = cfg.get("SLURM", "MEMSIZE_GAINSEL")
|
|
117
|
+
sbatch_script = dedent(
|
|
118
|
+
f"""\
|
|
119
|
+
#!/bin/bash
|
|
120
|
+
|
|
121
|
+
#SBATCH -D {log_dir}
|
|
122
|
+
#SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
|
|
123
|
+
#SBATCH --job-name "gain_selection_{run_id:05d}"
|
|
124
|
+
#SBATCH --partition=short,long
|
|
125
|
+
#SBATCH --mem={mem_per_job}
|
|
126
|
+
"""
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
if tool == "lst_dvr":
|
|
130
|
+
sbatch_script += dedent(
|
|
131
|
+
f"""
|
|
132
|
+
#SBATCH --export {PATH}
|
|
133
|
+
|
|
134
|
+
lst_dvr {input_file} {output_dir} {ref_time} {ref_counter} {module} {ref_source}
|
|
135
|
+
"""
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
elif tool == "lstchain_r0_to_r0g":
|
|
139
|
+
cmd = f"lstchain_r0_to_r0g --R0-file={input_file} --output-dir={output_dir} --log={log_file}"
|
|
140
|
+
if not cfg.getboolean("lstchain", "use_ff_heuristic_gain_selection"):
|
|
141
|
+
cmd += " --no-flatfield-heuristic"
|
|
142
|
+
sbatch_script += dedent(cmd)
|
|
143
|
+
|
|
144
|
+
return sbatch_script
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def launch_gainsel_for_data_run(
|
|
148
|
+
date: datetime, run: Table, output_dir: Path, r0_dir: Path, log_dir: Path, tool: str, simulate: bool = False
|
|
149
|
+
):
|
|
150
|
+
"""
|
|
151
|
+
Create the gain selection sbatch script and launch it for a given run.
|
|
152
|
+
|
|
153
|
+
Runs from before 20231205 without UCTS or TIB info are directly copied to the final directory.
|
|
154
|
+
Subruns that do not have four streams are also directly copied.
|
|
155
|
+
"""
|
|
156
|
+
run_id = run["run_id"]
|
|
157
|
+
ref_time = run["dragon_reference_time"]
|
|
158
|
+
ref_counter = run["dragon_reference_counter"]
|
|
159
|
+
module = run["dragon_reference_module_index"]
|
|
160
|
+
ref_source = run["dragon_reference_source"].upper()
|
|
161
|
+
|
|
162
|
+
files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
163
|
+
subrun_numbers = [int(file[-12:-8]) for file in files]
|
|
164
|
+
|
|
165
|
+
if tool == "lst_dvr" and ref_source not in ["UCTS", "TIB"]:
|
|
166
|
+
input_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
167
|
+
|
|
168
|
+
if is_run_already_copied(date, run_id):
|
|
169
|
+
log.info(f"The R0 files corresponding to run {run_id} have already been copied to the R0G directory.")
|
|
170
|
+
else:
|
|
171
|
+
if not simulate:
|
|
172
|
+
for file in input_files:
|
|
173
|
+
log.debug(
|
|
174
|
+
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
175
|
+
f"be applied. Copying directly the R0 files to {output_dir}."
|
|
176
|
+
)
|
|
177
|
+
sp.run(["cp", file, output_dir])
|
|
178
|
+
|
|
179
|
+
else:
|
|
180
|
+
log.info(
|
|
181
|
+
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
182
|
+
f"be applied. Simulate copy of the R0 files directly to {output_dir}."
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
else:
|
|
186
|
+
n_subruns = max(subrun_numbers)
|
|
187
|
+
|
|
188
|
+
for subrun in range(n_subruns + 1):
|
|
189
|
+
|
|
190
|
+
r0_files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.{subrun:04d}.fits.fz")
|
|
191
|
+
|
|
192
|
+
if len(r0_files) != 4:
|
|
193
|
+
if not simulate and not is_run_already_copied(date, run_id):
|
|
194
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files, so gain"
|
|
195
|
+
f"selection cannot be applied. Copying directly the R0 files to {output_dir}.")
|
|
196
|
+
for file in r0_files:
|
|
197
|
+
sp.run(["cp", file, output_dir])
|
|
198
|
+
elif is_run_already_copied(date, run_id):
|
|
199
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files. The R0 files"
|
|
200
|
+
f"have already been copied to {output_dir}.")
|
|
201
|
+
elif simulate:
|
|
202
|
+
log.debug(f"Run {run_id:05d}.{subrun:04d} does not have 4 streams of R0 files, so gain"
|
|
203
|
+
f"selection cannot be applied. Simulate copy of the R0 files directly to {output_dir}.")
|
|
204
|
+
|
|
205
|
+
else:
|
|
206
|
+
history_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.history"
|
|
207
|
+
if history_file.exists():
|
|
208
|
+
if not simulate:
|
|
209
|
+
update_history_file(run_id, subrun, log_dir, history_file)
|
|
210
|
+
|
|
211
|
+
if history_file.read_text() == "": # history_file is empty
|
|
212
|
+
log.debug(f"Gain selection is still running for run {run_id:05d}.{subrun:04d}")
|
|
213
|
+
continue
|
|
214
|
+
else:
|
|
215
|
+
gainsel_rc = history_file.read_text().splitlines()[-1][-1]
|
|
216
|
+
if gainsel_rc == "1":
|
|
217
|
+
job_id = get_last_job_id(run_id, subrun, log_dir)
|
|
218
|
+
if job_finished_in_timeout(job_id) and not simulate:
|
|
219
|
+
# Relaunch the job that finished in TIMEOUT
|
|
220
|
+
job_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.sh"
|
|
221
|
+
sp.run(["sbatch", job_file], stdout=sp.PIPE, stderr=sp.STDOUT, check=True)
|
|
222
|
+
else:
|
|
223
|
+
log.warning(f"Gain selection failed for run {run_id:05d}.{subrun:04d}")
|
|
224
|
+
elif gainsel_rc == "0":
|
|
225
|
+
log.debug(f"Gain selection finished successfully for run {run_id:05d}.{subrun:04d},"
|
|
226
|
+
"no additional jobs will be submitted for this subrun.")
|
|
227
|
+
else:
|
|
228
|
+
log.debug("Creating and launching the gain selection sbatch script for subrun {run_id:05d}.{subrun:04d}")
|
|
229
|
+
if not simulate:
|
|
230
|
+
log_file = log_dir / f"r0_to_r0g_{run_id:05d}.{subrun:04d}.log"
|
|
231
|
+
job_file = log_dir / f"gain_selection_{run_id:05d}.{subrun:04d}.sh"
|
|
232
|
+
r0_files.sort()
|
|
233
|
+
with open(job_file, "w") as f:
|
|
234
|
+
f.write(
|
|
235
|
+
get_sbatch_script(
|
|
236
|
+
run_id,
|
|
237
|
+
subrun,
|
|
238
|
+
r0_files[0],
|
|
239
|
+
output_dir,
|
|
240
|
+
log_dir,
|
|
241
|
+
log_file,
|
|
242
|
+
ref_time,
|
|
243
|
+
ref_counter,
|
|
244
|
+
module,
|
|
245
|
+
ref_source,
|
|
246
|
+
tool,
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
#submit job
|
|
251
|
+
history_file.touch()
|
|
252
|
+
sp.run(["sbatch", job_file], stdout=sp.PIPE, stderr=sp.STDOUT, check=True)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def apply_gain_selection(date: datetime, start: int, end: int, tool: str = None, no_queue_check: bool = False, simulate: bool = False):
|
|
256
|
+
"""
|
|
257
|
+
Submit the jobs to apply the gain selection to the data for a given date
|
|
258
|
+
on a subrun-by-subrun basis.
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
if not tool:
|
|
262
|
+
if date_to_dir(date) < "20231205":
|
|
263
|
+
tool = "lst_dvr"
|
|
264
|
+
else:
|
|
265
|
+
tool = "lstchain_r0_to_r0g"
|
|
266
|
+
|
|
267
|
+
summary_table = run_summary_table(date)
|
|
268
|
+
|
|
269
|
+
if len(summary_table) == 0:
|
|
270
|
+
log.warning(f"No runs are found in the run summary of {date_to_iso(date)}. Nothing to do. Exiting.")
|
|
271
|
+
sys.exit(0)
|
|
272
|
+
|
|
273
|
+
# Apply gain selection only to DATA runs
|
|
274
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
275
|
+
log.info(f"Found {len(data_runs)} DATA runs to which apply the gain selection")
|
|
276
|
+
|
|
277
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
278
|
+
date_str = date_to_dir(date)
|
|
279
|
+
r0_dir = base_dir / "R0" / date_str
|
|
280
|
+
output_dir = base_dir / f"R0G/{date_str}"
|
|
281
|
+
log_dir = base_dir / f"R0G/log/{date_str}"
|
|
282
|
+
if not simulate:
|
|
283
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
284
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
285
|
+
|
|
286
|
+
for run in data_runs:
|
|
287
|
+
if not no_queue_check:
|
|
288
|
+
# Check slurm queue status and sleep for a while to avoid overwhelming the queue
|
|
289
|
+
check_job_status_and_wait(max_jobs=1500)
|
|
290
|
+
|
|
291
|
+
# Avoid running jobs while it is still night time
|
|
292
|
+
wait_for_daytime(start, end)
|
|
293
|
+
|
|
294
|
+
if not is_closed(date, run["run_id"]):
|
|
295
|
+
launch_gainsel_for_data_run(date, run, output_dir, r0_dir, log_dir, tool, simulate)
|
|
296
|
+
|
|
297
|
+
calib_runs = summary_table[summary_table["run_type"] != "DATA"]
|
|
298
|
+
log.info(f"Found {len(calib_runs)} NO-DATA runs")
|
|
299
|
+
|
|
300
|
+
for run in calib_runs:
|
|
301
|
+
run_id = run["run_id"]
|
|
302
|
+
|
|
303
|
+
if is_run_already_copied(date, run_id):
|
|
304
|
+
log.info(f"The R0 files corresponding to run {run_id:05d} have already been copied, nothing to do.")
|
|
305
|
+
else:
|
|
306
|
+
log.info(f"Copying R0 files corresponding to run {run_id} directly to {output_dir}")
|
|
307
|
+
if not simulate:
|
|
308
|
+
# Avoid copying files while it is still night time
|
|
309
|
+
wait_for_daytime(start, end)
|
|
310
|
+
|
|
311
|
+
r0_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
312
|
+
|
|
313
|
+
for file in r0_files:
|
|
314
|
+
sp.run(["cp", file, output_dir])
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def get_last_job_id(run_id: str, subrun: str, log_dir: Path) -> int:
|
|
318
|
+
"""Get job id of the last gain selection job that was launched for a given subrun."""
|
|
319
|
+
filenames = glob.glob(f"{log_dir}/gain_selection_{run_id:05d}_{subrun:04d}_*.log")
|
|
320
|
+
if filenames:
|
|
321
|
+
match = re.search(f'gain_selection_{run_id:05d}_{subrun:04d}_(\d+).log', sorted(filenames)[-1])
|
|
322
|
+
job_id = match.group(1)
|
|
323
|
+
return job_id
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def update_history_file(run_id: str, subrun: str, log_dir: Path, history_file: Path):
|
|
327
|
+
"""
|
|
328
|
+
Update the gain selection history file with the result
|
|
329
|
+
of the last job launched for a given subrun.
|
|
330
|
+
"""
|
|
331
|
+
job_id = get_last_job_id(run_id, subrun, log_dir)
|
|
332
|
+
if not job_id:
|
|
333
|
+
log.debug(f"Cannot find a job_id for the run {run_id:05d}.{subrun:04d}")
|
|
334
|
+
else:
|
|
335
|
+
job_status = get_sacct_output(run_sacct(job_id=job_id))["State"]
|
|
336
|
+
if job_status.item() in ["RUNNING", "PENDING"]:
|
|
337
|
+
log.info(f"Job {job_id} is still running.")
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
elif job_status.item() == "COMPLETED":
|
|
341
|
+
log.debug(f"Job {job_id} finished successfully, updating history file.")
|
|
342
|
+
string_to_write = (
|
|
343
|
+
f"{run_id:05d}.{subrun:04d} gain_selection 0\n"
|
|
344
|
+
)
|
|
345
|
+
append_to_file(history_file, string_to_write)
|
|
346
|
+
|
|
347
|
+
else:
|
|
348
|
+
log.info(f"Job {job_id} failed, updating history file.")
|
|
349
|
+
string_to_write = (
|
|
350
|
+
f"{run_id:05d}.{subrun:04d} gain_selection 1\n"
|
|
351
|
+
)
|
|
352
|
+
append_to_file(history_file, string_to_write)
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def is_run_already_copied(date: datetime, run_id: int) -> bool:
|
|
356
|
+
"""Check if the R0 files of a given run have already been copied to the R0G directory."""
|
|
357
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
358
|
+
r0_files = glob.glob(f"{base_dir}/R0/{date_to_dir(date)}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
359
|
+
r0g_files = glob.glob(f"{base_dir}/R0G/{date_to_dir(date)}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
360
|
+
return len(r0_files)==len(r0g_files)
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def is_closed(date: datetime, run_id: str) -> bool:
|
|
364
|
+
"""Check if run is already closed."""
|
|
365
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
366
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
367
|
+
closed_run_file = log_dir / f"gain_selection_{run_id:05d}.closed"
|
|
368
|
+
return closed_run_file.exists()
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def GainSel_flag_file(date: datetime) -> Path:
|
|
372
|
+
"""Return the path to the file indicating the completion of the gain selection stage."""
|
|
373
|
+
filename = cfg.get("LSTOSA", "gain_selection_check")
|
|
374
|
+
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
375
|
+
flagfile = GainSel_dir / date_to_dir(date) / filename
|
|
376
|
+
return flagfile.resolve()
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
def GainSel_finished(date: datetime) -> bool:
|
|
380
|
+
"""Check if gain selection finished successfully."""
|
|
381
|
+
flagfile = GainSel_flag_file(date)
|
|
382
|
+
return flagfile.exists()
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def check_gainsel_jobs_runwise(date: datetime, run_id: int) -> bool:
|
|
386
|
+
"""Search for failed jobs in the log directory."""
|
|
387
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
388
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
389
|
+
history_files = log_dir.glob(f"gain_selection_{run_id:05d}.????.history")
|
|
390
|
+
failed_subruns = []
|
|
391
|
+
log.info(f"Checking all history files of run {run_id}")
|
|
392
|
+
|
|
393
|
+
for file in history_files:
|
|
394
|
+
match = re.search(f"gain_selection_{run_id:05d}.(\d+).history", str(file))
|
|
395
|
+
subrun = match.group(1)
|
|
396
|
+
if file.read_text() != "":
|
|
397
|
+
gainsel_rc = file.read_text().splitlines()[-1][-1]
|
|
398
|
+
|
|
399
|
+
if gainsel_rc == "1":
|
|
400
|
+
log.warning(f"Gain selection failed for run {run_id}.{subrun}")
|
|
401
|
+
failed_subruns.append(file)
|
|
402
|
+
|
|
403
|
+
elif gainsel_rc == "0":
|
|
404
|
+
log.debug(f"Gain selection finished successfully for run {run_id}.{subrun}")
|
|
405
|
+
else:
|
|
406
|
+
log.info(f"Gain selection is still running for run {run_id}.{subrun}")
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
if failed_subruns:
|
|
410
|
+
log.warning(f"{date_to_iso(date)}: Some gain selection jobs did not finish successfully for run {run_id}")
|
|
411
|
+
return False
|
|
412
|
+
else:
|
|
413
|
+
log.info(f"{date_to_iso(date)}: All jobs finished successfully for run {run_id}, creating the corresponding .closed file")
|
|
414
|
+
closed_run_file = log_dir / f"gain_selection_{run_id:05d}.closed"
|
|
415
|
+
closed_run_file.touch()
|
|
416
|
+
return True
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def check_warnings_in_logs(date: datetime, run_id: int):
|
|
420
|
+
"""Look for warnings in the log files created by lstchain_r0_to_r0g."""
|
|
421
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
422
|
+
log_dir = base_dir / f"R0G/log/{date_to_dir(date)}"
|
|
423
|
+
log_files = log_dir.glob(f"r0_to_r0g_{run_id:05d}.*.log")
|
|
424
|
+
for file in log_files:
|
|
425
|
+
content = file.read_text().splitlines()
|
|
426
|
+
for line in content:
|
|
427
|
+
if "FlatField(FF)-like events are not tagged as FF" in line:
|
|
428
|
+
log.warning(f"Warning for run {run_id}: {line}")
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def check_failed_jobs(date: datetime):
|
|
432
|
+
"""Search for failed jobs in the log directory."""
|
|
433
|
+
|
|
434
|
+
summary_table = run_summary_table(date)
|
|
435
|
+
|
|
436
|
+
if len(summary_table) == 0:
|
|
437
|
+
log.warning(f"No runs are found in the run summary of {date_to_iso(date)}. Nothing to do. Exiting.")
|
|
438
|
+
sys.exit(0)
|
|
439
|
+
|
|
440
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
441
|
+
failed_runs = []
|
|
442
|
+
|
|
443
|
+
for run in data_runs:
|
|
444
|
+
run_id = run["run_id"]
|
|
445
|
+
check_warnings_in_logs(date, run_id)
|
|
446
|
+
if not is_closed(date, run_id):
|
|
447
|
+
if not check_gainsel_jobs_runwise(date, run_id):
|
|
448
|
+
log.warning(f"Gain selection did not finish successfully for run {run_id}.")
|
|
449
|
+
failed_runs.append(run)
|
|
450
|
+
|
|
451
|
+
if failed_runs:
|
|
452
|
+
log.warning(f"Gain selection did not finish successfully for {date_to_iso(date)}, cannot create the flag file.")
|
|
453
|
+
return
|
|
454
|
+
|
|
455
|
+
runs = summary_table["run_id"]
|
|
456
|
+
missing_runs = []
|
|
457
|
+
|
|
458
|
+
date_str = date_to_dir(date)
|
|
459
|
+
base_dir = Path(cfg.get("LST1", "BASE"))
|
|
460
|
+
r0_files = glob.glob(f"{base_dir}/R0/{date_str}/LST-1.?.Run?????.????.fits.fz")
|
|
461
|
+
r0g_files = glob.glob(f"{base_dir}/R0G/{date_str}/LST-1.?.Run?????.????.fits.fz")
|
|
462
|
+
all_r0_runs = [parse_r0_filename(i).run for i in r0_files]
|
|
463
|
+
all_r0g_runs = [parse_r0_filename(i).run for i in r0g_files]
|
|
464
|
+
|
|
465
|
+
for run in all_r0_runs:
|
|
466
|
+
if run not in runs:
|
|
467
|
+
if run not in all_r0g_runs:
|
|
468
|
+
missing_runs.append(run)
|
|
469
|
+
|
|
470
|
+
missing_runs.sort()
|
|
471
|
+
if missing_runs:
|
|
472
|
+
output_dir = base_dir / f"R0G/{date_str}/"
|
|
473
|
+
log.info(
|
|
474
|
+
f"Some runs are missing. Copying R0 files of runs {pd.Series(missing_runs).unique()} "
|
|
475
|
+
f"directly to {output_dir}"
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
for run in missing_runs:
|
|
479
|
+
|
|
480
|
+
files = base_dir.glob(f"R0/{date_str}/LST-1.?.Run{run:05d}.????.fits.fz")
|
|
481
|
+
for file in files:
|
|
482
|
+
sp.run(["cp", file, output_dir])
|
|
483
|
+
|
|
484
|
+
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
485
|
+
flagfile_dir = GainSel_dir / date_str
|
|
486
|
+
flagfile_dir.mkdir(parents=True, exist_ok=True)
|
|
487
|
+
|
|
488
|
+
flagfile = GainSel_flag_file(date)
|
|
489
|
+
log.info(f"Gain selection finished successfully, creating flag file for date {date_to_iso(date)} ({flagfile})")
|
|
490
|
+
flagfile.touch()
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def main():
|
|
494
|
+
"""
|
|
495
|
+
Loop over the dates listed in the input file and launch the gain selection
|
|
496
|
+
script for each of them. The input file should list the dates in the format
|
|
497
|
+
YYYYMMDD one date per line.
|
|
498
|
+
"""
|
|
499
|
+
args = parser.parse_args()
|
|
500
|
+
|
|
501
|
+
if args.verbose:
|
|
502
|
+
log.setLevel(logging.DEBUG)
|
|
503
|
+
else:
|
|
504
|
+
log.setLevel(logging.INFO)
|
|
505
|
+
|
|
506
|
+
if args.date:
|
|
507
|
+
if GainSel_finished(args.date):
|
|
508
|
+
log.warning(f"Gain selection already done for date {date_to_iso(args.date)}. Exiting.")
|
|
509
|
+
sys.exit(0)
|
|
510
|
+
elif args.check:
|
|
511
|
+
log.info(f"Checking gain selection status for date {date_to_iso(args.date)}")
|
|
512
|
+
check_failed_jobs(args.date)
|
|
513
|
+
else:
|
|
514
|
+
log.info(f"\nApplying gain selection to date {date_to_iso(args.date)}")
|
|
515
|
+
apply_gain_selection(
|
|
516
|
+
args.date,
|
|
517
|
+
args.start_time,
|
|
518
|
+
args.end_time,
|
|
519
|
+
args.tool,
|
|
520
|
+
no_queue_check=args.no_queue_check,
|
|
521
|
+
simulate=args.simulate,
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
elif args.dates_file:
|
|
526
|
+
list_of_dates = get_list_of_dates(args.dates_file)
|
|
527
|
+
log.info(f"Found {len(list_of_dates)} dates to apply or check gain selection")
|
|
528
|
+
|
|
529
|
+
if args.check:
|
|
530
|
+
for date in list_of_dates:
|
|
531
|
+
log.info(f"Checking gain selection status for date {date}")
|
|
532
|
+
check_failed_jobs(date)
|
|
533
|
+
else:
|
|
534
|
+
for date in list_of_dates:
|
|
535
|
+
log.info(f"Applying gain selection to date {date}")
|
|
536
|
+
apply_gain_selection(
|
|
537
|
+
date,
|
|
538
|
+
args.start_time,
|
|
539
|
+
args.end_time,
|
|
540
|
+
args.tool,
|
|
541
|
+
no_queue_check=args.no_queue_check,
|
|
542
|
+
simulate=args.simulate,
|
|
543
|
+
)
|
|
544
|
+
log.info("Done! No more dates to process.")
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
if __name__ == "__main__":
|
|
548
|
+
main()
|