lstosa 0.10.10__tar.gz → 0.10.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. {lstosa-0.10.10 → lstosa-0.10.12}/PKG-INFO +1 -1
  2. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/PKG-INFO +1 -1
  3. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/SOURCES.txt +0 -1
  4. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/_version.py +2 -2
  5. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/configs/sequencer.cfg +2 -0
  6. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/conftest.py +22 -3
  7. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/paths.py +40 -7
  8. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/autocloser.py +7 -3
  9. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/closer.py +41 -23
  10. lstosa-0.10.12/src/osa/scripts/gain_selection.py +349 -0
  11. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/sequencer.py +16 -1
  12. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/tests/test_osa_scripts.py +2 -0
  13. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/cliopts.py +13 -0
  14. lstosa-0.10.10/src/osa/scripts/gain_selection.py +0 -237
  15. lstosa-0.10.10/src/osa/scripts/show_run_summary.py +0 -180
  16. {lstosa-0.10.10 → lstosa-0.10.12}/.coveragerc +0 -0
  17. {lstosa-0.10.10 → lstosa-0.10.12}/.gitignore +0 -0
  18. {lstosa-0.10.10 → lstosa-0.10.12}/.mailmap +0 -0
  19. {lstosa-0.10.10 → lstosa-0.10.12}/.pre-commit-config.yaml +0 -0
  20. {lstosa-0.10.10 → lstosa-0.10.12}/.readthedocs.yml +0 -0
  21. {lstosa-0.10.10 → lstosa-0.10.12}/LICENSE +0 -0
  22. {lstosa-0.10.10 → lstosa-0.10.12}/MANIFEST.in +0 -0
  23. {lstosa-0.10.10 → lstosa-0.10.12}/README.md +0 -0
  24. {lstosa-0.10.10 → lstosa-0.10.12}/codemeta.json +0 -0
  25. {lstosa-0.10.10 → lstosa-0.10.12}/dev/mysql.py +0 -0
  26. {lstosa-0.10.10 → lstosa-0.10.12}/docs/Makefile +0 -0
  27. {lstosa-0.10.10 → lstosa-0.10.12}/docs/_static/logo_lstosa.png +0 -0
  28. {lstosa-0.10.10 → lstosa-0.10.12}/docs/authors.rst +0 -0
  29. {lstosa-0.10.10 → lstosa-0.10.12}/docs/components/index.rst +0 -0
  30. {lstosa-0.10.10 → lstosa-0.10.12}/docs/conf.py +0 -0
  31. {lstosa-0.10.10 → lstosa-0.10.12}/docs/configuration.rst +0 -0
  32. {lstosa-0.10.10 → lstosa-0.10.12}/docs/contribute.rst +0 -0
  33. {lstosa-0.10.10 → lstosa-0.10.12}/docs/documents/index.rst +0 -0
  34. {lstosa-0.10.10 → lstosa-0.10.12}/docs/howto/index.rst +0 -0
  35. {lstosa-0.10.10 → lstosa-0.10.12}/docs/index.rst +0 -0
  36. {lstosa-0.10.10 → lstosa-0.10.12}/docs/introduction/index.rst +0 -0
  37. {lstosa-0.10.10 → lstosa-0.10.12}/docs/introduction/reduction_steps_lstchain.png +0 -0
  38. {lstosa-0.10.10 → lstosa-0.10.12}/docs/jobs.rst +0 -0
  39. {lstosa-0.10.10 → lstosa-0.10.12}/docs/make.bat +0 -0
  40. {lstosa-0.10.10 → lstosa-0.10.12}/docs/nightsummary.rst +0 -0
  41. {lstosa-0.10.10 → lstosa-0.10.12}/docs/provenance.rst +0 -0
  42. {lstosa-0.10.10 → lstosa-0.10.12}/docs/references.rst +0 -0
  43. {lstosa-0.10.10 → lstosa-0.10.12}/docs/reports.rst +0 -0
  44. {lstosa-0.10.10 → lstosa-0.10.12}/docs/scripts/index.rst +0 -0
  45. {lstosa-0.10.10 → lstosa-0.10.12}/docs/troubleshooting/index.rst +0 -0
  46. {lstosa-0.10.10 → lstosa-0.10.12}/docs/utils.rst +0 -0
  47. {lstosa-0.10.10 → lstosa-0.10.12}/docs/veto.rst +0 -0
  48. {lstosa-0.10.10 → lstosa-0.10.12}/docs/workflow/LSTOSA_flow.png +0 -0
  49. {lstosa-0.10.10 → lstosa-0.10.12}/docs/workflow/index.rst +0 -0
  50. {lstosa-0.10.10 → lstosa-0.10.12}/environment.yml +0 -0
  51. {lstosa-0.10.10 → lstosa-0.10.12}/extra/example_sequencer.txt +0 -0
  52. {lstosa-0.10.10 → lstosa-0.10.12}/extra/history_files/sequence_LST1_04183.history +0 -0
  53. {lstosa-0.10.10 → lstosa-0.10.12}/extra/history_files/sequence_LST1_04183_failed.history +0 -0
  54. {lstosa-0.10.10 → lstosa-0.10.12}/extra/history_files/sequence_LST1_04183_oneline.history +0 -0
  55. {lstosa-0.10.10 → lstosa-0.10.12}/extra/history_files/sequence_LST1_04185.0010.history +0 -0
  56. {lstosa-0.10.10 → lstosa-0.10.12}/extra/sacct_output.csv +0 -0
  57. {lstosa-0.10.10 → lstosa-0.10.12}/extra/squeue_output.csv +0 -0
  58. {lstosa-0.10.10 → lstosa-0.10.12}/pyproject.toml +0 -0
  59. {lstosa-0.10.10 → lstosa-0.10.12}/setup.cfg +0 -0
  60. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/dependency_links.txt +0 -0
  61. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/entry_points.txt +0 -0
  62. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/requires.txt +0 -0
  63. {lstosa-0.10.10 → lstosa-0.10.12}/src/lstosa.egg-info/top_level.txt +0 -0
  64. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/__init__.py +0 -0
  65. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/configs/__init__.py +0 -0
  66. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/configs/config.py +0 -0
  67. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/configs/datamodel.py +0 -0
  68. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/configs/options.py +0 -0
  69. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/high_level/__init__.py +0 -0
  70. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/high_level/selection_cuts.toml +0 -0
  71. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/high_level/significance.py +0 -0
  72. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/high_level/tests/test_significance.py +0 -0
  73. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/job.py +0 -0
  74. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/__init__.py +0 -0
  75. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/database.py +0 -0
  76. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/extract.py +0 -0
  77. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/nightsummary.py +0 -0
  78. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/set_source_coordinates.py +0 -0
  79. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/tests/test_database.py +0 -0
  80. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/tests/test_extract.py +0 -0
  81. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/tests/test_nightsummary.py +0 -0
  82. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/nightsummary/tests/test_source_coordinates.py +0 -0
  83. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/osadb.py +0 -0
  84. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/__init__.py +0 -0
  85. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/capture.py +0 -0
  86. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/config/definition.yaml +0 -0
  87. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/config/environment.yaml +0 -0
  88. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/config/logger.yaml +0 -0
  89. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/io.py +0 -0
  90. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/provenance/utils.py +0 -0
  91. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/raw.py +0 -0
  92. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/report.py +0 -0
  93. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/__init__.py +0 -0
  94. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/calibration_pipeline.py +0 -0
  95. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/copy_datacheck.py +0 -0
  96. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/datasequence.py +0 -0
  97. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/provprocess.py +0 -0
  98. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/reprocess_longterm.py +0 -0
  99. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/reprocessing.py +0 -0
  100. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/sequencer_webmaker.py +0 -0
  101. /lstosa-0.10.10/src/osa/scripts/show_run_summary_tcu.py → /lstosa-0.10.12/src/osa/scripts/show_run_summary.py +0 -0
  102. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/simulate_processing.py +0 -0
  103. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/tests/__init__.py +0 -0
  104. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/scripts/update_source_catalog.py +0 -0
  105. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/__init__.py +0 -0
  106. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_jobs.py +0 -0
  107. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_osa.py +0 -0
  108. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_osadb.py +0 -0
  109. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_paths.py +0 -0
  110. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_raw.py +0 -0
  111. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_report.py +0 -0
  112. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/tests/test_veto.py +0 -0
  113. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/__init__.py +0 -0
  114. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/iofile.py +0 -0
  115. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/logging.py +0 -0
  116. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/mail.py +0 -0
  117. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/register.py +0 -0
  118. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/tests/test_iofile.py +0 -0
  119. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/tests/test_utils.py +0 -0
  120. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/utils/utils.py +0 -0
  121. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/version.py +0 -0
  122. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/veto.py +0 -0
  123. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/webserver/__init__.py +0 -0
  124. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/webserver/utils.py +0 -0
  125. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/workflow/__init__.py +0 -0
  126. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/workflow/dl3.py +0 -0
  127. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/workflow/stages.py +0 -0
  128. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/workflow/tests/test_dl3.py +0 -0
  129. {lstosa-0.10.10 → lstosa-0.10.12}/src/osa/workflow/tests/test_stages.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lstosa
3
- Version: 0.10.10
3
+ Version: 0.10.12
4
4
  Summary: Onsite analysis pipeline for the CTA LST-1
5
5
  Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
6
6
  Author-email: Daniel Morcuende <dmorcuen@ucm.es>
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lstosa
3
- Version: 0.10.10
3
+ Version: 0.10.12
4
4
  Summary: Onsite analysis pipeline for the CTA LST-1
5
5
  Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
6
6
  Author-email: Daniel Morcuende <dmorcuen@ucm.es>
@@ -95,7 +95,6 @@ src/osa/scripts/reprocessing.py
95
95
  src/osa/scripts/sequencer.py
96
96
  src/osa/scripts/sequencer_webmaker.py
97
97
  src/osa/scripts/show_run_summary.py
98
- src/osa/scripts/show_run_summary_tcu.py
99
98
  src/osa/scripts/simulate_processing.py
100
99
  src/osa/scripts/update_source_catalog.py
101
100
  src/osa/scripts/tests/__init__.py
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.10.10'
16
- __version_tuple__ = version_tuple = (0, 10, 10)
15
+ __version__ = version = '0.10.12'
16
+ __version_tuple__ = version_tuple = (0, 10, 12)
@@ -28,6 +28,7 @@ HIGH_LEVEL_DIR: %(OSA_DIR)s/HighLevel
28
28
  LONGTERM_DIR: %(OSA_DIR)s/DL1DataCheck_LongTerm
29
29
  MERGED_SUMMARY: %(OSA_DIR)s/Catalog/merged_RunSummary.ecsv
30
30
  SEQUENCER_WEB_DIR: %(OSA_DIR)s/SequencerWeb
31
+ GAIN_SELECTION_FLAG_DIR: %(OSA_DIR)s/GainSel
31
32
 
32
33
  # To be set by the user. Using PROD-ID will overcome the automatic
33
34
  # fetching of lstchain version. Otherwise leave it empty (and without the colon symbol).
@@ -138,6 +139,7 @@ WORKFLOWPREFIX: Workflow
138
139
  GRAPHSUFFIX: .dot
139
140
  SVGSUFFIX: .svg
140
141
  end_of_activity: NightFinished.txt
142
+ gain_selection_check: GainSelFinished.txt
141
143
 
142
144
  [OUTPUT]
143
145
  # REPORTWIDTH is the width in characters of the heading frame for the output
@@ -48,6 +48,13 @@ def monitoring_dir(base_test_dir):
48
48
  return monitoring_dir
49
49
 
50
50
 
51
+ @pytest.fixture(scope="session")
52
+ def osa_dir(base_test_dir):
53
+ osa_dir = base_test_dir / "OSA"
54
+ osa_dir.mkdir(parents=True, exist_ok=True)
55
+ return osa_dir
56
+
57
+
51
58
  @pytest.fixture(scope="session")
52
59
  def run_summary_dir(monitoring_dir):
53
60
  summary_dir = monitoring_dir / "RunSummary"
@@ -436,6 +443,8 @@ def sequence_file_list(
436
443
  drs4_time_calibration_files,
437
444
  systematic_correction_files,
438
445
  r0_data,
446
+ gain_selection_flag_file,
447
+ merged_run_summary,
439
448
  ):
440
449
  for r0_file in r0_data:
441
450
  assert r0_file.exists()
@@ -448,6 +457,8 @@ def sequence_file_list(
448
457
 
449
458
  assert run_summary_file.exists()
450
459
  assert run_catalog.exists()
460
+ assert gain_selection_flag_file.exists()
461
+ assert merged_run_summary.exists()
451
462
 
452
463
  run_program("sequencer", "-d", "2020-01-17", "--no-submit", "-t", "LST1")
453
464
  # First sequence in the list corresponds to the calibration run 1809
@@ -548,11 +559,9 @@ def run_catalog(run_catalog_dir):
548
559
 
549
560
 
550
561
  @pytest.fixture(scope="session")
551
- def database(base_test_dir):
562
+ def database(osa_dir):
552
563
  import sqlite3
553
564
 
554
- osa_dir = base_test_dir / "OSA"
555
- osa_dir.mkdir(parents=True, exist_ok=True)
556
565
  db_file = osa_dir / "osa.db"
557
566
  with sqlite3.connect(db_file) as connection:
558
567
  cursor = connection.cursor()
@@ -562,3 +571,13 @@ def database(base_test_dir):
562
571
  )
563
572
  cursor.connection.commit()
564
573
  yield cursor
574
+
575
+
576
+ @pytest.fixture(scope="session")
577
+ def gain_selection_flag_file(osa_dir):
578
+
579
+ GainSel_dir = osa_dir / "GainSel" / "20200117"
580
+ GainSel_dir.mkdir(parents=True, exist_ok=True)
581
+ file = GainSel_dir / "GainSelFinished.txt"
582
+ file.touch()
583
+ return file
@@ -5,6 +5,8 @@ import re
5
5
  from datetime import datetime
6
6
  from pathlib import Path
7
7
  from typing import List
8
+ import subprocess
9
+ import time
8
10
 
9
11
  import lstchain
10
12
  from astropy.table import Table
@@ -359,15 +361,46 @@ def get_latest_version_file(longterm_files: List[str]) -> Path:
359
361
  )
360
362
 
361
363
 
362
- def create_longterm_symlink():
364
+ def is_job_completed(job_id: str):
365
+ """
366
+ Check whether SLURM job `job_id` has finished.
367
+
368
+ It keeps checking every 10 minutes for one our.
369
+ """
370
+ n_max = 10
371
+ n = 0
372
+ while n < n_max:
373
+ # Check if the status of the SLURM job is "COMPLETED"
374
+ status = subprocess.run(["sacct", "--format=state", "--jobs", job_id], capture_output=True, text=True)
375
+ if "COMPLETED" in status.stdout:
376
+ log.debug(f"Job {job_id} finished successfully!")
377
+ return True
378
+ n += 1
379
+ log.debug(f"Job {job_id} is not completed yet, checking again in 10 minutes...")
380
+ time.sleep(600) # wait 10 minutes to check again
381
+ log.info(f"The maximum number of checks of job {job_id} was reached, job {job_id} did not finish succesfully yet.")
382
+ return False
383
+
384
+
385
+ def create_longterm_symlink(cherenkov_job_id: str = None):
363
386
  """If the created longterm DL1 datacheck file corresponds to the latest
364
387
  version available, make symlink to it in the "all" common directory."""
388
+ if not cherenkov_job_id or is_job_completed(cherenkov_job_id):
389
+ nightdir = utils.date_to_dir(options.date)
390
+ longterm_dir = Path(cfg.get("LST1", "LONGTERM_DIR"))
391
+ linked_longterm_file = longterm_dir / f"night_wise/all/DL1_datacheck_{nightdir}.h5"
392
+ all_longterm_files = longterm_dir.rglob(f"v*/{nightdir}/DL1_datacheck_{nightdir}.h5")
393
+ latest_version_file = get_latest_version_file(all_longterm_files)
394
+ log.info("Symlink the latest version longterm DL1 datacheck file in the common directory.")
395
+ linked_longterm_file.unlink(missing_ok=True)
396
+ linked_longterm_file.symlink_to(latest_version_file)
397
+ else:
398
+ log.warning(f"Job {cherenkov_job_id} (lstchain_cherenkov_transparency) did not finish successfully.")
399
+
400
+ def dl1_datacheck_longterm_file_exits() -> bool:
401
+ """Return true if the longterm DL1 datacheck file was already produced."""
365
402
  nightdir = utils.date_to_dir(options.date)
366
403
  longterm_dir = Path(cfg.get("LST1", "LONGTERM_DIR"))
367
- linked_longterm_file = longterm_dir / f"night_wise/all/DL1_datacheck_{nightdir}.h5"
368
- all_longterm_files = longterm_dir.rglob(f"v*/{nightdir}/DL1_datacheck_{nightdir}.h5")
369
- latest_version_file = get_latest_version_file(all_longterm_files)
404
+ longterm_file = longterm_dir / options.prod_id / nightdir / f"DL1_datacheck_{nightdir}.h5"
405
+ return longterm_file.exists()
370
406
 
371
- log.info("Symlink the latest version longterm DL1 datacheck file in the common directory.")
372
- linked_longterm_file.unlink(missing_ok=True)
373
- linked_longterm_file.symlink_to(latest_version_file)
@@ -41,6 +41,7 @@ class Telescope:
41
41
  config_file: Path,
42
42
  ignore_cronlock: bool = False,
43
43
  test: bool = False,
44
+ no_gainsel: bool = False,
44
45
  ):
45
46
  """
46
47
  Parameters
@@ -82,7 +83,7 @@ class Telescope:
82
83
  if not self.lock_automatic_sequencer() and not ignore_cronlock:
83
84
  log.warning(f"{self.telescope} already locked! Ignoring {self.telescope}")
84
85
  return
85
- if not self.simulate_sequencer(date, config_file, test):
86
+ if not self.simulate_sequencer(date, config_file, test, no_gainsel):
86
87
  log.warning(
87
88
  f"Simulation of the sequencer failed "
88
89
  f"for {self.telescope}! Ignoring {self.telescope}"
@@ -121,7 +122,7 @@ class Telescope:
121
122
  self.locked = True
122
123
  return True
123
124
 
124
- def simulate_sequencer(self, date: str, config_file: Path, test: bool):
125
+ def simulate_sequencer(self, date: str, config_file: Path, test: bool, no_gainsel: bool):
125
126
  """Launch the sequencer in simulation mode."""
126
127
  if test:
127
128
  self.read_file()
@@ -135,6 +136,9 @@ class Telescope:
135
136
  date,
136
137
  self.telescope,
137
138
  ]
139
+ if no_gainsel:
140
+ sequencer_cmd.insert(1, "--no-gainsel")
141
+
138
142
  log.debug(f"Executing {' '.join(sequencer_cmd)}")
139
143
  sequencer = subprocess.Popen(
140
144
  sequencer_cmd,
@@ -445,7 +449,7 @@ def main():
445
449
  # create telescope and sequence objects
446
450
  log.info("Simulating sequencer...")
447
451
 
448
- telescope = Telescope(args.tel_id, date, args.config)
452
+ telescope = Telescope(args.tel_id, date, args.config, no_gainsel=args.no_gainsel)
449
453
 
450
454
  log.info(f"Processing {args.tel_id}...")
451
455
 
@@ -24,7 +24,11 @@ from osa.job import (
24
24
  )
25
25
  from osa.nightsummary.extract import extract_runs, extract_sequences
26
26
  from osa.nightsummary.nightsummary import run_summary_table
27
- from osa.paths import destination_dir, create_longterm_symlink
27
+ from osa.paths import (
28
+ destination_dir,
29
+ create_longterm_symlink,
30
+ dl1_datacheck_longterm_file_exits
31
+ )
28
32
  from osa.raw import is_raw_data_available
29
33
  from osa.report import start
30
34
  from osa.utils.cliopts import closercliparsing
@@ -154,38 +158,42 @@ def ask_for_closing():
154
158
  def post_process(seq_tuple):
155
159
  """Set of last instructions."""
156
160
  seq_list = seq_tuple[1]
161
+
162
+ if dl1_datacheck_longterm_file_exits() and not options.test:
163
+ create_longterm_symlink()
157
164
 
158
- # Close the sequences
159
- post_process_files(seq_list)
165
+ else:
166
+ # Close the sequences
167
+ post_process_files(seq_list)
160
168
 
161
- # Merge DL1 datacheck files and produce PDFs. It also produces
162
- # the daily datacheck report using the longterm script, and updates
163
- # the longterm DL1 datacheck file with the cherenkov_transparency script.
164
- if cfg.getboolean("lstchain", "merge_dl1_datacheck"):
165
- list_job_id = merge_dl1_datacheck(seq_list)
166
- longterm_job_id = daily_datacheck(daily_longterm_cmd(list_job_id))
167
- cherenkov_transparency(cherenkov_transparency_cmd(longterm_job_id))
168
- create_longterm_symlink()
169
+ # Extract the provenance info
170
+ extract_provenance(seq_list)
169
171
 
170
- # Extract the provenance info
171
- extract_provenance(seq_list)
172
+ # Merge DL1b files run-wise
173
+ merge_files(seq_list, data_level="DL1AB")
172
174
 
173
- # Merge DL1b files run-wise
174
- merge_files(seq_list, data_level="DL1AB")
175
+ merge_muon_files(seq_list)
175
176
 
176
- merge_muon_files(seq_list)
177
+ # Merge DL2 files run-wise
178
+ if not options.no_dl2:
179
+ merge_files(seq_list, data_level="DL2")
177
180
 
178
- # Merge DL2 files run-wise
179
- if not options.no_dl2:
180
- merge_files(seq_list, data_level="DL2")
181
+ # Merge DL1 datacheck files and produce PDFs. It also produces
182
+ # the daily datacheck report using the longterm script, and updates
183
+ # the longterm DL1 datacheck file with the cherenkov_transparency script.
184
+ if cfg.getboolean("lstchain", "merge_dl1_datacheck"):
185
+ list_job_id = merge_dl1_datacheck(seq_list)
186
+ longterm_job_id = daily_datacheck(daily_longterm_cmd(list_job_id))
187
+ cherenkov_job_id = cherenkov_transparency(cherenkov_transparency_cmd(longterm_job_id))
188
+ create_longterm_symlink(cherenkov_job_id)
181
189
 
182
- time.sleep(600)
190
+ time.sleep(600)
183
191
 
184
192
  # Check if all jobs launched by autocloser finished correctly
185
193
  # before creating the NightFinished.txt file
186
194
  n_max = 6
187
195
  n = 0
188
- while not all_closer_jobs_finished_correctly() & n <= n_max:
196
+ while not all_closer_jobs_finished_correctly() and n <= n_max:
189
197
  log.info(
190
198
  "All jobs launched by autocloser did not finished correctly yet. "
191
199
  "Checking again in 10 minutes..."
@@ -194,7 +202,7 @@ def post_process(seq_tuple):
194
202
  n += 1
195
203
 
196
204
  if n > n_max:
197
- send_warning_mail(date=options.date)
205
+ send_warning_mail(date=date_to_iso(options.date))
198
206
  return False
199
207
 
200
208
  if options.seqtoclose is None:
@@ -536,6 +544,7 @@ def cherenkov_transparency_cmd(longterm_job_id: str) -> List[str]:
536
544
 
537
545
  return [
538
546
  "sbatch",
547
+ "--parsable",
539
548
  "-D",
540
549
  options.directory,
541
550
  "-o",
@@ -553,7 +562,16 @@ def cherenkov_transparency(cmd: List[str]):
553
562
  log.debug(f"Executing {stringify(cmd)}")
554
563
 
555
564
  if not options.simulate and not options.test and shutil.which("sbatch") is not None:
556
- subprocess.run(cmd, check=True)
565
+ job = subprocess.run(
566
+ cmd,
567
+ encoding="utf-8",
568
+ capture_output=True,
569
+ text=True,
570
+ check=True,
571
+ )
572
+ job_id = job.stdout.strip()
573
+ return job_id
574
+
557
575
  else:
558
576
  log.debug("Simulate launching scripts")
559
577
 
@@ -0,0 +1,349 @@
1
+ """Script to run the gain selection over a list of dates."""
2
+ import logging
3
+ import re
4
+ import shutil
5
+ import glob
6
+ import pandas as pd
7
+ import subprocess as sp
8
+ from pathlib import Path
9
+ from textwrap import dedent
10
+ from io import StringIO
11
+ import argparse
12
+
13
+ from astropy.table import Table
14
+ from lstchain.paths import run_info_from_filename, parse_r0_filename
15
+
16
+ from osa.scripts.reprocessing import get_list_of_dates, check_job_status_and_wait
17
+ from osa.utils.utils import wait_for_daytime
18
+ from osa.utils.logging import myLogger
19
+ from osa.job import get_sacct_output, FORMAT_SLURM
20
+ from osa.configs.config import cfg
21
+ from osa.paths import DEFAULT_CFG
22
+
23
+ log = myLogger(logging.getLogger(__name__))
24
+
25
+ PATH = "PATH=/fefs/aswg/software/offline_dvr/bin:$PATH"
26
+
27
+ parser = argparse.ArgumentParser(add_help=False)
28
+ parser.add_argument(
29
+ "--check",
30
+ action="store_true",
31
+ default=False,
32
+ help="Check if any job failed",
33
+ )
34
+ parser.add_argument(
35
+ "--no-queue-check",
36
+ action="store_true",
37
+ default=False,
38
+ help="Do not wait until the number of jobs in the slurm queue is < 1500",
39
+ )
40
+ parser.add_argument(
41
+ "-c",
42
+ "--config",
43
+ action="store",
44
+ type=Path,
45
+ default=DEFAULT_CFG,
46
+ help="Configuration file",
47
+ )
48
+ parser.add_argument(
49
+ "-d",
50
+ "--date",
51
+ default=None,
52
+ type=str,
53
+ help="Night to apply the gain selection",
54
+ )
55
+ parser.add_argument(
56
+ "-l",
57
+ "--dates-file",
58
+ default=None,
59
+ help="List of dates to apply the gain selection",
60
+ )
61
+ parser.add_argument(
62
+ "-o",
63
+ "--output-basedir",
64
+ type=Path,
65
+ default=Path("/fefs/aswg/data/real/R0G"),
66
+ help="Output directory of the gain selected files. Default is /fefs/aswg/data/real/R0G."
67
+ )
68
+ parser.add_argument(
69
+ "-s",
70
+ "--start-time",
71
+ type=int,
72
+ default=10,
73
+ help="Time to (re)start gain selection in HH format. Default is 10.",
74
+ )
75
+ parser.add_argument(
76
+ "-e",
77
+ "--end-time",
78
+ type=int,
79
+ default=18,
80
+ help="Time to stop gain selection in HH format. Default is 18.",
81
+ )
82
+
83
+ def get_sbatch_script(
84
+ run_id, subrun, input_file, output_dir, log_dir, log_file, ref_time, ref_counter, module, ref_source, script
85
+ ):
86
+ """Build the sbatch job pilot script for running the gain selection."""
87
+ if script=="old":
88
+ return dedent(
89
+ f"""\
90
+ #!/bin/bash
91
+
92
+ #SBATCH -D {log_dir}
93
+ #SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
94
+ #SBATCH --job-name "gain_selection_{run_id:05d}"
95
+ #SBATCH --export {PATH}
96
+ #SBATCH --partition=short,long
97
+
98
+ lst_dvr {input_file} {output_dir} {ref_time} {ref_counter} {module} {ref_source}
99
+ """
100
+ )
101
+ elif script=="new":
102
+ return dedent(
103
+ f"""\
104
+ #!/bin/bash
105
+
106
+ #SBATCH -D {log_dir}
107
+ #SBATCH -o "gain_selection_{run_id:05d}_{subrun:04d}_%j.log"
108
+ #SBATCH --job-name "gain_selection_{run_id:05d}"
109
+ #SBATCH --mem=40GB
110
+ #SBATCH --partition=short,long
111
+
112
+ lstchain_r0_to_r0g --R0-file={input_file} --output-dir={output_dir} --log={log_file} --no-flatfield-heuristic
113
+ """
114
+ )
115
+
116
+ def apply_gain_selection(date: str, start: int, end: int, output_basedir: Path = None, no_queue_check: bool = False):
117
+ """
118
+ Submit the jobs to apply the gain selection to the data for a given date
119
+ on a subrun-by-subrun basis.
120
+ """
121
+
122
+ if date < "20231205":
123
+ script = "old"
124
+ else:
125
+ script = "new"
126
+
127
+ run_summary_dir = Path("/fefs/aswg/data/real/monitoring/RunSummary")
128
+ run_summary_file = run_summary_dir / f"RunSummary_{date}.ecsv"
129
+ summary_table = Table.read(run_summary_file)
130
+ # Apply gain selection only to DATA runs
131
+ data_runs = summary_table[summary_table["run_type"] == "DATA"]
132
+ log.info(f"Found {len(data_runs)} DATA runs to which apply the gain selection")
133
+
134
+ output_dir = output_basedir / date
135
+ log_dir = output_basedir / "log" / date
136
+ output_dir.mkdir(parents=True, exist_ok=True)
137
+ log_dir.mkdir(parents=True, exist_ok=True)
138
+ log_file = log_dir / f"r0_to_r0g_{date}.log"
139
+ r0_dir = Path(f"/fefs/aswg/data/real/R0/{date}")
140
+
141
+ for run in data_runs:
142
+ if not no_queue_check:
143
+ # Check slurm queue status and sleep for a while to avoid overwhelming the queue
144
+ check_job_status_and_wait(max_jobs=1500)
145
+
146
+ # Avoid running jobs while it is still night time
147
+ wait_for_daytime(start, end)
148
+
149
+ run_id = run["run_id"]
150
+ ref_time = run["dragon_reference_time"]
151
+ ref_counter = run["dragon_reference_counter"]
152
+ module = run["dragon_reference_module_index"]
153
+ ref_source = run["dragon_reference_source"].upper()
154
+
155
+ files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.????.fits.fz")
156
+ subrun_numbers = [int(file[-12:-8]) for file in files]
157
+ input_files = []
158
+
159
+ if date < "20231205" and ref_source not in ["UCTS", "TIB"]:
160
+ input_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
161
+ log.info(
162
+ f"Run {run_id} does not have UCTS or TIB info, so gain selection cannot"
163
+ f"be applied. Copying directly the R0 files to {output_dir}."
164
+ )
165
+ for file in input_files:
166
+ sp.run(["cp", file, output_dir])
167
+
168
+ else:
169
+ n_subruns = max(subrun_numbers)
170
+
171
+ for subrun in range(n_subruns + 1):
172
+ new_files = glob.glob(f"{r0_dir}/LST-1.?.Run{run_id:05d}.{subrun:04d}.fits.fz")
173
+
174
+ if len(new_files) != 4:
175
+ log.info(f"Run {run_id}.{subrun:05d} does not have 4 streams of R0 files, so gain"
176
+ f"selection cannot be applied. Copying directly the R0 files to {output_dir}."
177
+ )
178
+ for file in new_files:
179
+ sp.run(["cp", file, output_dir])
180
+
181
+ else:
182
+ new_files.sort()
183
+ input_files.append(new_files[0])
184
+
185
+ log.info("Creating and launching the sbatch scripts for the rest of the runs to apply gain selection")
186
+ for file in input_files:
187
+ run_info = run_info_from_filename(file)
188
+ job_file = log_dir / f"gain_selection_{run_info.run:05d}.{run_info.subrun:04d}.sh"
189
+ with open(job_file, "w") as f:
190
+ f.write(
191
+ get_sbatch_script(
192
+ run_id,
193
+ run_info.subrun,
194
+ file,
195
+ output_dir,
196
+ log_dir,
197
+ log_file,
198
+ ref_time,
199
+ ref_counter,
200
+ module,
201
+ ref_source,
202
+ script,
203
+ )
204
+ )
205
+ sp.run(["sbatch", job_file], check=True)
206
+
207
+ calib_runs = summary_table[summary_table["run_type"] != "DATA"]
208
+ log.info(f"Found {len(calib_runs)} NO-DATA runs")
209
+
210
+ for run in calib_runs:
211
+ log.info(f"Copying R0 files corresponding to run {run} directly to {output_dir}")
212
+ # Avoid copying files while it is still night time
213
+ wait_for_daytime(start, end)
214
+
215
+ run_id = run["run_id"]
216
+ r0_files = r0_dir.glob(f"LST-1.?.Run{run_id:05d}.????.fits.fz")
217
+
218
+ for file in r0_files:
219
+ sp.run(["cp", file, output_dir])
220
+
221
+ def run_sacct_j(job) -> StringIO:
222
+ """Run sacct to obtain the job information."""
223
+ if shutil.which("sacct") is None:
224
+ log.warning("No job info available since sacct command is not available")
225
+ return StringIO()
226
+
227
+ sacct_cmd = [
228
+ "sacct",
229
+ "-n",
230
+ "--parsable2",
231
+ "--delimiter=,",
232
+ "--units=G",
233
+ "-o",
234
+ ",".join(FORMAT_SLURM),
235
+ "-j",
236
+ job,
237
+ ]
238
+
239
+ return StringIO(sp.check_output(sacct_cmd).decode())
240
+
241
+
242
+ def GainSel_flag_file(date: str) -> Path:
243
+ filename = cfg.get("LSTOSA", "gain_selection_check")
244
+ GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
245
+ flagfile = GainSel_dir / date / filename
246
+ return flagfile.resolve()
247
+
248
+
249
+ def GainSel_finished(date: str) -> bool:
250
+ """Check if gain selection finished successfully."""
251
+ flagfile = GainSel_flag_file(date)
252
+ return flagfile.exists()
253
+
254
+
255
+ def check_failed_jobs(date: str, output_basedir: Path = None):
256
+ """Search for failed jobs in the log directory."""
257
+ failed_jobs = []
258
+ log_dir = output_basedir / "log" / date
259
+ filenames = glob.glob(f"{log_dir}/gain_selection*.log")
260
+ jobs = [re.search(r'(?<=_)(.[0-9.]+?)(?=.log)', i).group(0) for i in filenames]
261
+
262
+ for job in jobs:
263
+ output = run_sacct_j(job)
264
+ df = get_sacct_output(output)
265
+
266
+ if not df.iloc[0]["State"] == "COMPLETED":
267
+ log.warning(f"Job {job} did not finish successfully")
268
+ failed_jobs.append(job)
269
+
270
+ if failed_jobs:
271
+ log.warning(f"{date}: some jobs did not finish successfully")
272
+
273
+ else:
274
+ log.info(f"{date}: all jobs finished successfully")
275
+
276
+
277
+ run_summary_dir = Path("/fefs/aswg/data/real/monitoring/RunSummary")
278
+ run_summary_file = run_summary_dir / f"RunSummary_{date}.ecsv"
279
+ summary_table = Table.read(run_summary_file)
280
+ runs = summary_table["run_id"]
281
+ missing_runs = []
282
+
283
+ r0_files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run?????.????.fits.fz")
284
+ r0g_files = glob.glob(f"/fefs/aswg/data/real/R0G/{date}/LST-1.?.Run?????.????.fits.fz")
285
+ all_r0_runs = [parse_r0_filename(i).run for i in r0_files]
286
+ all_r0g_runs = [parse_r0_filename(i).run for i in r0g_files]
287
+
288
+ for run in all_r0_runs:
289
+ if run not in runs:
290
+ if run not in all_r0g_runs:
291
+ missing_runs.append(run)
292
+
293
+ missing_runs.sort()
294
+ if missing_runs:
295
+ log.info(
296
+ f"Some runs are missing. Copying R0 files of runs {pd.Series(missing_runs).unique()} "
297
+ f"directly to /fefs/aswg/data/real/R0G/{date}"
298
+ )
299
+
300
+ for run in missing_runs:
301
+ output_dir = Path(f"/fefs/aswg/data/real/R0G/{date}/")
302
+ files = glob.glob(f"/fefs/aswg/data/real/R0/{date}/LST-1.?.Run{run:05d}.????.fits.fz")
303
+ for file in files:
304
+ sp.run(["cp", file, output_dir])
305
+
306
+ GainSel_dir = Path(cfg.get("LST1", "GAIN_SELECTION_FLAG_DIR"))
307
+ flagfile_dir = GainSel_dir / date
308
+ flagfile_dir.mkdir(parents=True, exist_ok=True)
309
+
310
+ flagfile = GainSel_flag_file(date)
311
+ log.info(f"Gain selection finished successfully, creating flag file for date {date} ({flagfile})")
312
+ flagfile.touch()
313
+
314
+
315
+ def main():
316
+ """
317
+ Loop over the dates listed in the input file and launch the gain selection
318
+ script for each of them. The input file should list the dates in the format
319
+ YYYYMMDD one date per line.
320
+ """
321
+ log.setLevel(logging.INFO)
322
+ args = parser.parse_args()
323
+
324
+ if args.date:
325
+ if args.check:
326
+ log.info(f"Checking gain selection status for date {args.date}")
327
+ check_failed_jobs(args.date, args.output_basedir)
328
+ else:
329
+ log.info(f"Applying gain selection to date {args.date}")
330
+ apply_gain_selection(args.date, args.start_time, args.end_time, args.output_basedir, no_queue_check=args.no_queue_check)
331
+
332
+
333
+ elif args.dates_file:
334
+ list_of_dates = get_list_of_dates(args.dates_file)
335
+ log.info(f"Found {len(list_of_dates)} dates to apply or check gain selection")
336
+
337
+ if args.check:
338
+ for date in list_of_dates:
339
+ log.info(f"Checking gain selection status for date {date}")
340
+ check_failed_jobs(date, args.output_basedir)
341
+ else:
342
+ for date in list_of_dates:
343
+ log.info(f"Applying gain selection to date {date}")
344
+ apply_gain_selection(date, args.start_time, args.end_time, args.output_basedir, no_queue_check=args.no_queue_check)
345
+ log.info("Done! No more dates to process.")
346
+
347
+
348
+ if __name__ == "__main__":
349
+ main()