lstosa 0.10.11__tar.gz → 0.10.13__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lstosa-0.10.11 → lstosa-0.10.13}/PKG-INFO +1 -1
- lstosa-0.10.13/crontab/crontab.txt +101 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/PKG-INFO +1 -1
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/SOURCES.txt +1 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/_version.py +2 -2
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/configs/sequencer.cfg +2 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/conftest.py +22 -3
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/autocloser.py +7 -3
- lstosa-0.10.13/src/osa/scripts/gain_selection.py +350 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/sequencer.py +16 -1
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/sequencer_webmaker.py +5 -2
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/tests/test_osa_scripts.py +2 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/cliopts.py +19 -0
- lstosa-0.10.11/src/osa/scripts/gain_selection.py +0 -237
- {lstosa-0.10.11 → lstosa-0.10.13}/.coveragerc +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/.gitignore +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/.mailmap +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/.pre-commit-config.yaml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/.readthedocs.yml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/LICENSE +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/MANIFEST.in +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/README.md +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/codemeta.json +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/dev/mysql.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/Makefile +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/_static/logo_lstosa.png +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/authors.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/components/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/conf.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/configuration.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/contribute.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/documents/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/howto/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/introduction/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/introduction/reduction_steps_lstchain.png +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/jobs.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/make.bat +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/nightsummary.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/provenance.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/references.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/reports.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/scripts/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/troubleshooting/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/utils.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/veto.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/workflow/LSTOSA_flow.png +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/docs/workflow/index.rst +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/environment.yml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/example_sequencer.txt +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/history_files/sequence_LST1_04183.history +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/history_files/sequence_LST1_04183_failed.history +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/history_files/sequence_LST1_04183_oneline.history +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/history_files/sequence_LST1_04185.0010.history +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/sacct_output.csv +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/extra/squeue_output.csv +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/pyproject.toml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/setup.cfg +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/dependency_links.txt +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/entry_points.txt +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/requires.txt +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/lstosa.egg-info/top_level.txt +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/configs/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/configs/config.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/configs/datamodel.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/configs/options.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/high_level/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/high_level/selection_cuts.toml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/high_level/significance.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/high_level/tests/test_significance.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/job.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/database.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/extract.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/nightsummary.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/set_source_coordinates.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/tests/test_database.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/tests/test_extract.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/tests/test_nightsummary.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/nightsummary/tests/test_source_coordinates.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/osadb.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/paths.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/capture.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/config/definition.yaml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/config/environment.yaml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/config/logger.yaml +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/io.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/provenance/utils.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/raw.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/report.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/calibration_pipeline.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/closer.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/copy_datacheck.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/datasequence.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/provprocess.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/reprocess_longterm.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/reprocessing.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/show_run_summary.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/simulate_processing.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/tests/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/scripts/update_source_catalog.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_jobs.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_osa.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_osadb.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_paths.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_raw.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_report.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/tests/test_veto.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/iofile.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/logging.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/mail.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/register.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/tests/test_iofile.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/tests/test_utils.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/utils/utils.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/version.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/veto.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/webserver/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/webserver/utils.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/workflow/__init__.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/workflow/dl3.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/workflow/stages.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/workflow/tests/test_dl3.py +0 -0
- {lstosa-0.10.11 → lstosa-0.10.13}/src/osa/workflow/tests/test_stages.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.13
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
#SHELL=/usr/bin/bash
|
|
2
|
+
#BASH_ENV="/local/home/lstanalyzer/.bashrc"
|
|
3
|
+
#
|
|
4
|
+
#-Datacheck jobs
|
|
5
|
+
#
|
|
6
|
+
#30 5 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Data_Check_daily_script_v2.sh
|
|
7
|
+
#05 00-07 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v7.sh
|
|
8
|
+
#05 00-07 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v7.sh
|
|
9
|
+
#05 00-07 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v9.sh
|
|
10
|
+
# EDIT Daniel Morcuende 2023-01-30: Running the data check during the night might be causing busy states of the camera during
|
|
11
|
+
# data taking. I modified the cron above to launch only during the day.
|
|
12
|
+
05 08-13 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v9.sh
|
|
13
|
+
#00 08 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v7.sh
|
|
14
|
+
#00 09 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v7.sh
|
|
15
|
+
00 08 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v9.sh
|
|
16
|
+
#05 12 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v7.sh
|
|
17
|
+
#05 12 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_no_nightsummary_v7.sh
|
|
18
|
+
05 12 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_no_nightsummary_v9.sh
|
|
19
|
+
#05 12 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v9.sh
|
|
20
|
+
#00 14,16,18,20,21,22,23 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v8.sh
|
|
21
|
+
#00 14,16,18,20,21,22,23 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_close_night_v8.sh
|
|
22
|
+
#00 14,16,18,20,21,22,23 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v10.sh
|
|
23
|
+
# EDIT Daniel Morcuende 2023-01-30: Running the data check during the night might be causing busy states of the camera during
|
|
24
|
+
# data taking. I modified the cron above to launch only during the day up to 18:00 UTC.
|
|
25
|
+
00 14,16,18 * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/Process_Run_incremental_script_v10.sh
|
|
26
|
+
#40 * * * * /usr/bin/bash /fefs/onsite/pipeline/data-check/test_cron.sh
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
SHELL=/usr/bin/bash
|
|
30
|
+
BASH_ENV="/local/home/lstanalyzer/.bashrc"
|
|
31
|
+
CRON_TZ=UTC
|
|
32
|
+
# Update astropy auxiliary data (e.g. IERS) every week
|
|
33
|
+
0 0 * * SUN source /fefs/aswg/lstosa/utils/osa-conda; python /fefs/aswg/data/aux/astropy/get_iers_data.py > /dev/null 2>&1
|
|
34
|
+
#
|
|
35
|
+
# lstosa jobs
|
|
36
|
+
#
|
|
37
|
+
# [COPY OBS OVERVIEW] Copy observations overview and daily camera checks to lst1 datacheck webserver
|
|
38
|
+
#
|
|
39
|
+
00 09 * * * /usr/bin/bash /fefs/aswg/lstosa/misc/copy_camera_daily_check_to_www.sh >/dev/null 2>&1
|
|
40
|
+
#
|
|
41
|
+
# [RUN SUMMARY] Produce the run summary file at the end of the observations to make sure it contains all the runs taken
|
|
42
|
+
#
|
|
43
|
+
05 07 * * * obsdate=`date +\%Y\%m\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; lstchain_create_run_summary -d $obsdate --overwrite > /dev/null 2>&1
|
|
44
|
+
#
|
|
45
|
+
# [MERGED RUN SUMMARY] Produce the merged run summary file containing all the runs taken by the LST.
|
|
46
|
+
# This only appends the runs of the given date.
|
|
47
|
+
#
|
|
48
|
+
15 07 * * * obsdate=`date +\%Y\%m\%d -d yesterday`; source /fefs/aswg/lstosa/utils/osa-conda; lstchain_merge_run_summaries -d $obsdate /fefs/aswg/data/real/OSA/Catalog/merged_RunSummary.ecsv > /dev/null 2>&1
|
|
49
|
+
#
|
|
50
|
+
# [GAIN SELECTION]
|
|
51
|
+
#
|
|
52
|
+
#50 07 * * * obsdate=`date +\%Y\%m\%d -d yesterday`; export obsdate; conda activate /fefs/aswg/software/conda/envs/osa-dev; gain_selection --no-queue-check -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -d $obsdate -o /fefs/aswg/data/real/R0G -s 7 >> /fefs/aswg/data/real/OSA/GainSel_log/${obsdate}_LST1.log 2>&1
|
|
53
|
+
#
|
|
54
|
+
# [GAIN SELECTION CHECK]
|
|
55
|
+
#
|
|
56
|
+
#20 08 * * * obsdate=`date +\%Y\%m\%d -d yesterday`; export obsdate; conda activate /fefs/aswg/software/conda/envs/osa-dev; gain_selection --check -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -d $obsdate -o /fefs/aswg/data/real/R0G -s 7 >> /fefs/aswg/data/real/OSA/GainSel_log/${obsdate}_check_LST1.log 2>&1
|
|
57
|
+
#
|
|
58
|
+
# [SIMULATED SEQUENCER] Launch sequencer once in the morning simulated. It also produces the run summary file if not found.
|
|
59
|
+
#
|
|
60
|
+
10 08 * * * obsdate=`date +\%Y-\%m-\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; sequencer -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -s -d $obsdate LST1 >> /fefs/aswg/data/real/OSA/Sequencer_log/${obsdate}_LST1.log 2>&1
|
|
61
|
+
#
|
|
62
|
+
# [SEQUENCER] Launch sequencer once in the morning
|
|
63
|
+
#
|
|
64
|
+
50 07 * * * obsdate=`date +\%Y-\%m-\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; sequencer -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -d $obsdate --no-gainsel --no-dl2 LST1 > /dev/null 2>&1
|
|
65
|
+
#
|
|
66
|
+
# [WEB SEQUENCER] Make sequencer xhtml table and copy it to the lst1 webserver
|
|
67
|
+
#
|
|
68
|
+
*/15 08-18 * * * obsdate=`date +\%Y\%m\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; sequencer_webmaker -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg; if [ $? = 0 ]; then scp /fefs/aswg/data/real/OSA/SequencerWeb/osa_status_$obsdate.html datacheck:/home/www/html/datacheck/lstosa/sequencer.xhtml; fi > /dev/null 2>&1
|
|
69
|
+
#
|
|
70
|
+
# [AUTOCLOSER] Launch the closer without forcing it (no -f option).
|
|
71
|
+
#
|
|
72
|
+
0 10-18/2 * * * obsdate=`date +\%Y-\%m-\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; autocloser -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -d $obsdate --no-gainsel --no-dl2 LST1 >> /fefs/aswg/data/real/OSA/Autocloser_log/${obsdate}_LST1.log 2>&1
|
|
73
|
+
#
|
|
74
|
+
# [COPY DATACHECK] Copy the available calibration and DL1 datacheck to the LST1 webserver.
|
|
75
|
+
#
|
|
76
|
+
05,20,40 08-18 * * * obsdate=`date +\%Y-\%m-\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; copy_datacheck -c /fefs/aswg/lstosa/cfg/sequencer_v0.10.cfg -d $obsdate LST1 > /dev/null 2>&1
|
|
77
|
+
#
|
|
78
|
+
# [SOURCE CATALOG]
|
|
79
|
+
#
|
|
80
|
+
00 11-19 * * * obsdate=`date +\%Y-\%m-\%d -d yesterday`; export obsdate; source /fefs/aswg/lstosa/utils/osa-conda; update_source_catalog $obsdate > /dev/null 2>&1
|
|
81
|
+
#
|
|
82
|
+
# [COPY CUMULATIVE LST-1 OBSERVATION TIME]
|
|
83
|
+
# (added by Daniel Morcuende on 2024-04-05)
|
|
84
|
+
#
|
|
85
|
+
0 11 * * FRI /usr/bin/bash /fefs/aswg/lstosa/misc/update_cumulative_obs_time.sh >/dev/null 2>&1
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# Daniel Morcuende (2023-11-08) periodically remove leftover jobs kept in the SLURM as pending after job dependency not satisfied because of failure (Reason in squeue: 'DependencyNeverSatisfied').
|
|
89
|
+
# It does only look for lstosa jobs launched by sequencer with the prefix "LST1_" and also the merging jobs launched by autocloser like "longterm_dl1_datacheck".
|
|
90
|
+
*/30 * * * * /local/home/lstanalyzer/remove_dependency_never_satisfied_jobs.sh > /dev/null 2>&1
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
SHELL=/usr/bin/bash
|
|
96
|
+
BASH_ENV="/local/home/lstanalyzer/.bashrc"
|
|
97
|
+
CRON_TZ=UTC
|
|
98
|
+
#
|
|
99
|
+
# Restart camera processing of calibration runs in realtime in case it was killed for whatever reason
|
|
100
|
+
#
|
|
101
|
+
04 * * * * cd /local/home/lstanalyzer/camera/lst-camera-watchers;./lst1_watcher_restarter
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.13
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -28,6 +28,7 @@ HIGH_LEVEL_DIR: %(OSA_DIR)s/HighLevel
|
|
|
28
28
|
LONGTERM_DIR: %(OSA_DIR)s/DL1DataCheck_LongTerm
|
|
29
29
|
MERGED_SUMMARY: %(OSA_DIR)s/Catalog/merged_RunSummary.ecsv
|
|
30
30
|
SEQUENCER_WEB_DIR: %(OSA_DIR)s/SequencerWeb
|
|
31
|
+
GAIN_SELECTION_FLAG_DIR: %(OSA_DIR)s/GainSel
|
|
31
32
|
|
|
32
33
|
# To be set by the user. Using PROD-ID will overcome the automatic
|
|
33
34
|
# fetching of lstchain version. Otherwise leave it empty (and without the colon symbol).
|
|
@@ -138,6 +139,7 @@ WORKFLOWPREFIX: Workflow
|
|
|
138
139
|
GRAPHSUFFIX: .dot
|
|
139
140
|
SVGSUFFIX: .svg
|
|
140
141
|
end_of_activity: NightFinished.txt
|
|
142
|
+
gain_selection_check: GainSelFinished.txt
|
|
141
143
|
|
|
142
144
|
[OUTPUT]
|
|
143
145
|
# REPORTWIDTH is the width in characters of the heading frame for the output
|
|
@@ -48,6 +48,13 @@ def monitoring_dir(base_test_dir):
|
|
|
48
48
|
return monitoring_dir
|
|
49
49
|
|
|
50
50
|
|
|
51
|
+
@pytest.fixture(scope="session")
|
|
52
|
+
def osa_dir(base_test_dir):
|
|
53
|
+
osa_dir = base_test_dir / "OSA"
|
|
54
|
+
osa_dir.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
return osa_dir
|
|
56
|
+
|
|
57
|
+
|
|
51
58
|
@pytest.fixture(scope="session")
|
|
52
59
|
def run_summary_dir(monitoring_dir):
|
|
53
60
|
summary_dir = monitoring_dir / "RunSummary"
|
|
@@ -436,6 +443,8 @@ def sequence_file_list(
|
|
|
436
443
|
drs4_time_calibration_files,
|
|
437
444
|
systematic_correction_files,
|
|
438
445
|
r0_data,
|
|
446
|
+
gain_selection_flag_file,
|
|
447
|
+
merged_run_summary,
|
|
439
448
|
):
|
|
440
449
|
for r0_file in r0_data:
|
|
441
450
|
assert r0_file.exists()
|
|
@@ -448,6 +457,8 @@ def sequence_file_list(
|
|
|
448
457
|
|
|
449
458
|
assert run_summary_file.exists()
|
|
450
459
|
assert run_catalog.exists()
|
|
460
|
+
assert gain_selection_flag_file.exists()
|
|
461
|
+
assert merged_run_summary.exists()
|
|
451
462
|
|
|
452
463
|
run_program("sequencer", "-d", "2020-01-17", "--no-submit", "-t", "LST1")
|
|
453
464
|
# First sequence in the list corresponds to the calibration run 1809
|
|
@@ -548,11 +559,9 @@ def run_catalog(run_catalog_dir):
|
|
|
548
559
|
|
|
549
560
|
|
|
550
561
|
@pytest.fixture(scope="session")
|
|
551
|
-
def database(
|
|
562
|
+
def database(osa_dir):
|
|
552
563
|
import sqlite3
|
|
553
564
|
|
|
554
|
-
osa_dir = base_test_dir / "OSA"
|
|
555
|
-
osa_dir.mkdir(parents=True, exist_ok=True)
|
|
556
565
|
db_file = osa_dir / "osa.db"
|
|
557
566
|
with sqlite3.connect(db_file) as connection:
|
|
558
567
|
cursor = connection.cursor()
|
|
@@ -562,3 +571,13 @@ def database(base_test_dir):
|
|
|
562
571
|
)
|
|
563
572
|
cursor.connection.commit()
|
|
564
573
|
yield cursor
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
@pytest.fixture(scope="session")
|
|
577
|
+
def gain_selection_flag_file(osa_dir):
|
|
578
|
+
|
|
579
|
+
GainSel_dir = osa_dir / "GainSel" / "20200117"
|
|
580
|
+
GainSel_dir.mkdir(parents=True, exist_ok=True)
|
|
581
|
+
file = GainSel_dir / "GainSelFinished.txt"
|
|
582
|
+
file.touch()
|
|
583
|
+
return file
|
|
@@ -41,6 +41,7 @@ class Telescope:
|
|
|
41
41
|
config_file: Path,
|
|
42
42
|
ignore_cronlock: bool = False,
|
|
43
43
|
test: bool = False,
|
|
44
|
+
no_gainsel: bool = False,
|
|
44
45
|
):
|
|
45
46
|
"""
|
|
46
47
|
Parameters
|
|
@@ -82,7 +83,7 @@ class Telescope:
|
|
|
82
83
|
if not self.lock_automatic_sequencer() and not ignore_cronlock:
|
|
83
84
|
log.warning(f"{self.telescope} already locked! Ignoring {self.telescope}")
|
|
84
85
|
return
|
|
85
|
-
if not self.simulate_sequencer(date, config_file, test):
|
|
86
|
+
if not self.simulate_sequencer(date, config_file, test, no_gainsel):
|
|
86
87
|
log.warning(
|
|
87
88
|
f"Simulation of the sequencer failed "
|
|
88
89
|
f"for {self.telescope}! Ignoring {self.telescope}"
|
|
@@ -121,7 +122,7 @@ class Telescope:
|
|
|
121
122
|
self.locked = True
|
|
122
123
|
return True
|
|
123
124
|
|
|
124
|
-
def simulate_sequencer(self, date: str, config_file: Path, test: bool):
|
|
125
|
+
def simulate_sequencer(self, date: str, config_file: Path, test: bool, no_gainsel: bool):
|
|
125
126
|
"""Launch the sequencer in simulation mode."""
|
|
126
127
|
if test:
|
|
127
128
|
self.read_file()
|
|
@@ -135,6 +136,9 @@ class Telescope:
|
|
|
135
136
|
date,
|
|
136
137
|
self.telescope,
|
|
137
138
|
]
|
|
139
|
+
if no_gainsel:
|
|
140
|
+
sequencer_cmd.insert(1, "--no-gainsel")
|
|
141
|
+
|
|
138
142
|
log.debug(f"Executing {' '.join(sequencer_cmd)}")
|
|
139
143
|
sequencer = subprocess.Popen(
|
|
140
144
|
sequencer_cmd,
|
|
@@ -445,7 +449,7 @@ def main():
|
|
|
445
449
|
# create telescope and sequence objects
|
|
446
450
|
log.info("Simulating sequencer...")
|
|
447
451
|
|
|
448
|
-
telescope = Telescope(args.tel_id, date, args.config)
|
|
452
|
+
telescope = Telescope(args.tel_id, date, args.config, no_gainsel=args.no_gainsel)
|
|
449
453
|
|
|
450
454
|
log.info(f"Processing {args.tel_id}...")
|
|
451
455
|
|
|
@@ -0,0 +1,350 @@
|
|
|
1
|
+
"""Script to run the gain selection over a list of dates."""
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
import shutil
|
|
5
|
+
import glob
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import subprocess as sp
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from textwrap import dedent
|
|
10
|
+
from io import StringIO
|
|
11
|
+
import argparse
|
|
12
|
+
|
|
13
|
+
from astropy.table import Table
|
|
14
|
+
from lstchain.paths import run_info_from_filename, parse_r0_filename
|
|
15
|
+
|
|
16
|
+
from osa.scripts.reprocessing import get_list_of_dates, check_job_status_and_wait
|
|
17
|
+
from osa.utils.utils import wait_for_daytime
|
|
18
|
+
from osa.utils.logging import myLogger
|
|
19
|
+
from osa.job import get_sacct_output, FORMAT_SLURM
|
|
20
|
+
from osa.configs.config import cfg
|
|
21
|
+
from osa.paths import DEFAULT_CFG
|
|
22
|
+
|
|
23
|
+
log = myLogger(logging.getLogger(__name__))
|
|
24
|
+
|
|
25
|
+
PATH = "PATH=/fefs/aswg/software/offline_dvr/bin:$PATH"
|
|
26
|
+
|
|
27
|
+
parser = argparse.ArgumentParser(add_help=False)
|
|
28
|
+
parser.add_argument(
|
|
29
|
+
"--check",
|
|
30
|
+
action="store_true",
|
|
31
|
+
default=False,
|
|
32
|
+
help="Check if any job failed",
|
|
33
|
+
)
|
|
34
|
+
parser.add_argument(
|
|
35
|
+
"--no-queue-check",
|
|
36
|
+
action="store_true",
|
|
37
|
+
default=False,
|
|
38
|
+
help="Do not wait until the number of jobs in the slurm queue is < 1500",
|
|
39
|
+
)
|
|
40
|
+
parser.add_argument(
|
|
41
|
+
"-c",
|
|
42
|
+
"--config",
|
|
43
|
+
action="store",
|
|
44
|
+
type=Path,
|
|
45
|
+
default=DEFAULT_CFG,
|
|
46
|
+
help="Configuration file",
|
|
47
|
+
)
|
|
48
|
+
parser.add_argument(
|
|
49
|
+
"-d",
|
|
50
|
+
"--date",
|
|
51
|
+
default=None,
|
|
52
|
+
type=str,
|
|
53
|
+
help="Night to apply the gain selection",
|
|
54
|
+
)
|
|
55
|
+
parser.add_argument(
|
|
56
|
+
"-l",
|
|
57
|
+
"--dates-file",
|
|
58
|
+
default=None,
|
|
59
|
+
help="List of dates to apply the gain selection",
|
|
60
|
+
)
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
"-o",
|
|
63
|
+
"--output-basedir",
|
|
64
|
+
type=Path,
|
|
65
|
+
default=Path("/fefs/aswg/data/real/R0G"),
|
|
66
|
+
help="Output directory of the gain selected files. Default is /fefs/aswg/data/real/R0G."
|
|
67
|
+
)
|
|
68
|
+
parser.add_argument(
|
|
69
|
+
"-s",
|
|
70
|
+
"--start-time",
|
|
71
|
+
type=int,
|
|
72
|
+
default=10,
|
|
73
|
+
help="Time to (re)start gain selection in HH format. Default is 10.",
|
|
74
|
+
)
|
|
75
|
+
parser.add_argument(
|
|
76
|
+
"-e",
|
|
77
|
+
"--end-time",
|
|
78
|
+
type=int,
|
|
79
|
+
default=18,
|
|
80
|
+
help="Time to stop gain selection in HH format. Default is 18.",
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
def get_sbatch_script(
|
|
84
|
+
run_id, subrun, input_file, output_dir, log_dir, log_file, ref_time, ref_counter, module, ref_source, script
|
|
85
|
+
):
|
|
86
|
+
"""Build the sbatch job pilot script for running the gain selection."""
|
|
87
|
+
if script=="old":
|
|
88
|
+
return dedent(
|
|
89
|
+
f"""\
|
|
90
|
+
#!/bin/bash
|
|
91
|
+
|
|
92
|
+
#SBATCH -D {log_dir}
|
|
93
|
+
#SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
|
|
94
|
+
#SBATCH --job-name "gain_selection_{run_id:05d}"
|
|
95
|
+
#SBATCH --export {PATH}
|
|
96
|
+
#SBATCH --partition=short,long
|
|
97
|
+
|
|
98
|
+
lst_dvr {input_file} {output_dir} {ref_time} {ref_counter} {module} {ref_source}
|
|
99
|
+
"""
|
|
100
|
+
)
|
|
101
|
+
elif script=="new":
|
|
102
|
+
return dedent(
|
|
103
|
+
f"""\
|
|
104
|
+
#!/bin/bash
|
|
105
|
+
|
|
106
|
+
#SBATCH -D {log_dir}
|
|
107
|
+
#SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
|
|
108
|
+
#SBATCH --job-name "gain_selection_{run_id:05d}"
|
|
109
|
+
#SBATCH --mem=40GB
|
|
110
|
+
#SBATCH --partition=short,long
|
|
111
|
+
|
|
112
|
+
lstchain_r0_to_r0g --R0-file={input_file} --output-dir={output_dir} --log={log_file} --no-flatfield-heuristic
|
|
113
|
+
"""
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
def apply_gain_selection(date: str, start: int, end: int, output_basedir: Path = None, no_queue_check: bool = False):
|
|
117
|
+
"""
|
|
118
|
+
Submit the jobs to apply the gain selection to the data for a given date
|
|
119
|
+
on a subrun-by-subrun basis.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
if date < "20231205":
|
|
123
|
+
script = "old"
|
|
124
|
+
else:
|
|
125
|
+
script = "new"
|
|
126
|
+
|
|
127
|
+
run_summary_dir = Path("/fefs/aswg/data/real/monitoring/RunSummary")
|
|
128
|
+
run_summary_file = run_summary_dir / f"RunSummary_{date}.ecsv"
|
|
129
|
+
summary_table = Table.read(run_summary_file)
|
|
130
|
+
# Apply gain selection only to DATA runs
|
|
131
|
+
data_runs = summary_table[summary_table["run_type"] == "DATA"]
|
|
132
|
+
log.info(f"Found {len(data_runs)} DATA runs to which apply the gain selection")
|
|
133
|
+
|
|
134
|
+
output_dir = output_basedir / date
|
|
135
|
+
log_dir = output_basedir / "log" / date
|
|
136
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
137
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
138
|
+
log_file = log_dir / f"r0_to_r0g_{date}.log"
|
|
139
|
+
r0_dir = Path(f"/fefs/aswg/data/real/R0/{date}")
|
|
140
|
+
|
|
141
|
+
for run in data_runs:
|
|
142
|
+
if not no_queue_check:
|
|
143
|
+
# Check slurm queue status and sleep for a while to avoid overwhelming the queue
|
|
144
|
+
check_job_status_and_wait(max_jobs=1500)
|
|
145
|
+
|
|
146
|
+
# Avoid running jobs while it is still night time
|
|
147
|
+
wait_for_daytime(start, end)
|
|
148
|
+
|
|
149
|
+
run_id = run["run_id"]
|
|
150
|
+
ref_time = run["dragon_reference_time"]
|
|
151
|
+
ref_counter = run["dragon_reference_counter"]
|
|
152
|
+
module = run["dragon_reference_module_index"]
|
|
153
|
+
ref_source = run["dragon_reference_source"].upper()
|
|
154
|
+
|
|
155
|
+
files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
156
|
+
subrun_numbers = [int(file[-12:-8]) for file in files]
|
|
157
|
+
input_files = []
|
|
158
|
+
|
|
159
|
+
if date < "20231205" and ref_source not in ["UCTS", "TIB"]:
|
|
160
|
+
input_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
161
|
+
log.info(
|
|
162
|
+
f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
|
|
163
|
+
f"be applied. Copying directly the R0 files to {output_dir}."
|
|
164
|
+
)
|
|
165
|
+
for file in input_files:
|
|
166
|
+
sp.run(["cp", file, output_dir])
|
|
167
|
+
|
|
168
|
+
else:
|
|
169
|
+
n_subruns = max(subrun_numbers)
|
|
170
|
+
|
|
171
|
+
for subrun in range(n_subruns + 1):
|
|
172
|
+
new_files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.{subrun:04d}.fits.fz")
|
|
173
|
+
|
|
174
|
+
if len(new_files) != 4:
|
|
175
|
+
log.info(f"Run {run_id}.{subrun:05d} does not have 4 streams of R0 files, so gain"
|
|
176
|
+
f"selection cannot be applied. Copying directly the R0 files to {output_dir}."
|
|
177
|
+
)
|
|
178
|
+
for file in new_files:
|
|
179
|
+
sp.run(["cp", file, output_dir])
|
|
180
|
+
|
|
181
|
+
else:
|
|
182
|
+
new_files.sort()
|
|
183
|
+
input_files.append(new_files[0])
|
|
184
|
+
|
|
185
|
+
log.info("Creating and launching the sbatch scripts for the rest of the runs to apply gain selection")
|
|
186
|
+
for file in input_files:
|
|
187
|
+
run_info = run_info_from_filename(file)
|
|
188
|
+
job_file = log_dir / f"gain_selection_{run_info.run:05d}.{run_info.subrun:04d}.sh"
|
|
189
|
+
with open(job_file, "w") as f:
|
|
190
|
+
f.write(
|
|
191
|
+
get_sbatch_script(
|
|
192
|
+
run_id,
|
|
193
|
+
run_info.subrun,
|
|
194
|
+
file,
|
|
195
|
+
output_dir,
|
|
196
|
+
log_dir,
|
|
197
|
+
log_file,
|
|
198
|
+
ref_time,
|
|
199
|
+
ref_counter,
|
|
200
|
+
module,
|
|
201
|
+
ref_source,
|
|
202
|
+
script,
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
sp.run(["sbatch", job_file], check=True)
|
|
206
|
+
|
|
207
|
+
calib_runs = summary_table[summary_table["run_type"] != "DATA"]
|
|
208
|
+
log.info(f"Found {len(calib_runs)} NO-DATA runs")
|
|
209
|
+
|
|
210
|
+
for run in calib_runs:
|
|
211
|
+
run_id = run["run_id"]
|
|
212
|
+
log.info(f"Copying R0 files corresponding to run {run_id} directly to {output_dir}")
|
|
213
|
+
# Avoid copying files while it is still night time
|
|
214
|
+
wait_for_daytime(start, end)
|
|
215
|
+
|
|
216
|
+
run_id = run["run_id"]
|
|
217
|
+
r0_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
|
|
218
|
+
|
|
219
|
+
for file in r0_files:
|
|
220
|
+
sp.run(["cp", file, output_dir])
|
|
221
|
+
|
|
222
|
+
def run_sacct_j(job) -> StringIO:
|
|
223
|
+
"""Run sacct to obtain the job information."""
|
|
224
|
+
if shutil.which("sacct") is None:
|
|
225
|
+
log.warning("No job info available since sacct command is not available")
|
|
226
|
+
return StringIO()
|
|
227
|
+
|
|
228
|
+
sacct_cmd = [
|
|
229
|
+
"sacct",
|
|
230
|
+
"-n",
|
|
231
|
+
"--parsable2",
|
|
232
|
+
"--delimiter=,",
|
|
233
|
+
"--units=G",
|
|
234
|
+
"-o",
|
|
235
|
+
",".join(FORMAT_SLURM),
|
|
236
|
+
"-j",
|
|
237
|
+
job,
|
|
238
|
+
]
|
|
239
|
+
|
|
240
|
+
return StringIO(sp.check_output(sacct_cmd).decode())
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def GainSel_flag_file(date: str) -> Path:
|
|
244
|
+
filename = cfg.get("LSTOSA", "gain_selection_check")
|
|
245
|
+
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
246
|
+
flagfile = GainSel_dir / date / filename
|
|
247
|
+
return flagfile.resolve()
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def GainSel_finished(date: str) -> bool:
|
|
251
|
+
"""Check if gain selection finished successfully."""
|
|
252
|
+
flagfile = GainSel_flag_file(date)
|
|
253
|
+
return flagfile.exists()
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def check_failed_jobs(date: str, output_basedir: Path = None):
|
|
257
|
+
"""Search for failed jobs in the log directory."""
|
|
258
|
+
failed_jobs = []
|
|
259
|
+
log_dir = output_basedir / "log" / date
|
|
260
|
+
filenames = glob.glob(f"{log_dir}/gain_selection*.log")
|
|
261
|
+
jobs = [re.search(r'(?<=_)(.[0-9.]+?)(?=.log)', i).group(0) for i in filenames]
|
|
262
|
+
|
|
263
|
+
for job in jobs:
|
|
264
|
+
output = run_sacct_j(job)
|
|
265
|
+
df = get_sacct_output(output)
|
|
266
|
+
|
|
267
|
+
if not df.iloc[0]["State"] == "COMPLETED":
|
|
268
|
+
log.warning(f"Job {job} did not finish successfully")
|
|
269
|
+
failed_jobs.append(job)
|
|
270
|
+
|
|
271
|
+
if failed_jobs:
|
|
272
|
+
log.warning(f"{date}: some jobs did not finish successfully")
|
|
273
|
+
|
|
274
|
+
else:
|
|
275
|
+
log.info(f"{date}: all jobs finished successfully")
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
run_summary_dir = Path("/fefs/aswg/data/real/monitoring/RunSummary")
|
|
279
|
+
run_summary_file = run_summary_dir / f"RunSummary_{date}.ecsv"
|
|
280
|
+
summary_table = Table.read(run_summary_file)
|
|
281
|
+
runs = summary_table["run_id"]
|
|
282
|
+
missing_runs = []
|
|
283
|
+
|
|
284
|
+
r0_files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run?????.????.fits.fz")
|
|
285
|
+
r0g_files = glob.glob(f"/fefs/aswg/data/real/R0G/{date}/LST-1.?.Run?????.????.fits.fz")
|
|
286
|
+
all_r0_runs = [parse_r0_filename(i).run for i in r0_files]
|
|
287
|
+
all_r0g_runs = [parse_r0_filename(i).run for i in r0g_files]
|
|
288
|
+
|
|
289
|
+
for run in all_r0_runs:
|
|
290
|
+
if run not in runs:
|
|
291
|
+
if run not in all_r0g_runs:
|
|
292
|
+
missing_runs.append(run)
|
|
293
|
+
|
|
294
|
+
missing_runs.sort()
|
|
295
|
+
if missing_runs:
|
|
296
|
+
log.info(
|
|
297
|
+
f"Some runs are missing. Copying R0 files of runs {pd.Series(missing_runs).unique()} "
|
|
298
|
+
f"directly to /fefs/aswg/data/real/R0G/{date}"
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
for run in missing_runs:
|
|
302
|
+
output_dir = Path(f"/fefs/aswg/data/real/R0G/{date}/")
|
|
303
|
+
files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run{run:05d}.????.fits.fz")
|
|
304
|
+
for file in files:
|
|
305
|
+
sp.run(["cp", file, output_dir])
|
|
306
|
+
|
|
307
|
+
GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
|
|
308
|
+
flagfile_dir = GainSel_dir / date
|
|
309
|
+
flagfile_dir.mkdir(parents=True, exist_ok=True)
|
|
310
|
+
|
|
311
|
+
flagfile = GainSel_flag_file(date)
|
|
312
|
+
log.info(f"Gain selection finished successfully, creating flag file for date {date} ({flagfile})")
|
|
313
|
+
flagfile.touch()
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def main():
|
|
317
|
+
"""
|
|
318
|
+
Loop over the dates listed in the input file and launch the gain selection
|
|
319
|
+
script for each of them. The input file should list the dates in the format
|
|
320
|
+
YYYYMMDD one date per line.
|
|
321
|
+
"""
|
|
322
|
+
log.setLevel(logging.INFO)
|
|
323
|
+
args = parser.parse_args()
|
|
324
|
+
|
|
325
|
+
if args.date:
|
|
326
|
+
if args.check:
|
|
327
|
+
log.info(f"Checking gain selection status for date {args.date}")
|
|
328
|
+
check_failed_jobs(args.date, args.output_basedir)
|
|
329
|
+
else:
|
|
330
|
+
log.info(f"Applying gain selection to date {args.date}")
|
|
331
|
+
apply_gain_selection(args.date, args.start_time, args.end_time, args.output_basedir, no_queue_check=args.no_queue_check)
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
elif args.dates_file:
|
|
335
|
+
list_of_dates = get_list_of_dates(args.dates_file)
|
|
336
|
+
log.info(f"Found {len(list_of_dates)} dates to apply or check gain selection")
|
|
337
|
+
|
|
338
|
+
if args.check:
|
|
339
|
+
for date in list_of_dates:
|
|
340
|
+
log.info(f"Checking gain selection status for date {date}")
|
|
341
|
+
check_failed_jobs(date, args.output_basedir)
|
|
342
|
+
else:
|
|
343
|
+
for date in list_of_dates:
|
|
344
|
+
log.info(f"Applying gain selection to date {date}")
|
|
345
|
+
apply_gain_selection(date, args.start_time, args.end_time, args.output_basedir, no_queue_check=args.no_queue_check)
|
|
346
|
+
log.info("Done! No more dates to process.")
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
if __name__ == "__main__":
|
|
350
|
+
main()
|
|
@@ -7,6 +7,7 @@ prepares a SLURM job array which launches the data sequences for every subrun.
|
|
|
7
7
|
|
|
8
8
|
import logging
|
|
9
9
|
import os
|
|
10
|
+
import sys
|
|
10
11
|
from decimal import Decimal
|
|
11
12
|
|
|
12
13
|
from osa import osadb
|
|
@@ -22,12 +23,14 @@ from osa.job import (
|
|
|
22
23
|
run_squeue,
|
|
23
24
|
)
|
|
24
25
|
from osa.nightsummary.extract import build_sequences
|
|
26
|
+
from osa.nightsummary.nightsummary import run_summary_table
|
|
25
27
|
from osa.paths import analysis_path
|
|
26
28
|
from osa.report import start
|
|
27
29
|
from osa.utils.cliopts import sequencer_cli_parsing
|
|
28
30
|
from osa.utils.logging import myLogger
|
|
29
|
-
from osa.utils.utils import is_day_closed, gettag, date_to_iso
|
|
31
|
+
from osa.utils.utils import is_day_closed, gettag, date_to_iso, date_to_dir
|
|
30
32
|
from osa.veto import get_closed_list, get_veto_list
|
|
33
|
+
from osa.scripts.gain_selection import GainSel_finished
|
|
31
34
|
|
|
32
35
|
__all__ = [
|
|
33
36
|
"single_process",
|
|
@@ -90,6 +93,18 @@ def single_process(telescope):
|
|
|
90
93
|
if not options.simulate:
|
|
91
94
|
os.makedirs(options.log_directory, exist_ok=True)
|
|
92
95
|
|
|
96
|
+
summary_table = run_summary_table(options.date)
|
|
97
|
+
if len(summary_table) == 0:
|
|
98
|
+
log.warning("No runs found for this date. Nothing to do. Exiting.")
|
|
99
|
+
sys.exit(0)
|
|
100
|
+
|
|
101
|
+
if not options.no_gainsel and not GainSel_finished(date_to_dir(options.date)):
|
|
102
|
+
log.info(
|
|
103
|
+
f"Gain selection did not finish successfully for date {options.date}."
|
|
104
|
+
"Try again later, once gain selection has finished."
|
|
105
|
+
)
|
|
106
|
+
sys.exit()
|
|
107
|
+
|
|
93
108
|
if is_day_closed():
|
|
94
109
|
log.info(f"Date {date_to_iso(options.date)} is already closed for {options.tel_id}")
|
|
95
110
|
return sequence_list
|