lstosa 0.10.18__py3-none-any.whl → 0.10.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/METADATA +2 -2
  2. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/RECORD +41 -40
  3. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/WHEEL +1 -1
  4. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/entry_points.txt +1 -0
  5. osa/_version.py +9 -4
  6. osa/configs/options.py +2 -0
  7. osa/configs/sequencer.cfg +12 -4
  8. osa/conftest.py +127 -3
  9. osa/high_level/significance.py +3 -3
  10. osa/high_level/tests/test_significance.py +3 -0
  11. osa/job.py +48 -25
  12. osa/nightsummary/extract.py +11 -2
  13. osa/nightsummary/tests/test_extract.py +3 -0
  14. osa/paths.py +102 -23
  15. osa/provenance/capture.py +1 -1
  16. osa/provenance/config/definition.yaml +7 -0
  17. osa/provenance/utils.py +22 -7
  18. osa/scripts/autocloser.py +0 -10
  19. osa/scripts/calibration_pipeline.py +4 -0
  20. osa/scripts/closer.py +132 -53
  21. osa/scripts/copy_datacheck.py +5 -3
  22. osa/scripts/datasequence.py +45 -71
  23. osa/scripts/provprocess.py +16 -7
  24. osa/scripts/sequencer.py +34 -26
  25. osa/scripts/sequencer_catB_tailcuts.py +223 -0
  26. osa/scripts/sequencer_webmaker.py +4 -0
  27. osa/scripts/simulate_processing.py +4 -7
  28. osa/scripts/tests/test_osa_scripts.py +64 -20
  29. osa/scripts/update_source_catalog.py +5 -2
  30. osa/tests/test_jobs.py +28 -11
  31. osa/tests/test_paths.py +6 -6
  32. osa/utils/cliopts.py +37 -32
  33. osa/utils/register.py +18 -13
  34. osa/utils/tests/test_utils.py +14 -0
  35. osa/utils/utils.py +173 -56
  36. osa/workflow/dl3.py +1 -2
  37. osa/workflow/stages.py +16 -11
  38. osa/workflow/tests/test_dl3.py +2 -1
  39. osa/workflow/tests/test_stages.py +7 -4
  40. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/LICENSE +0 -0
  41. {lstosa-0.10.18.dist-info → lstosa-0.10.19.dist-info}/top_level.txt +0 -0
osa/utils/register.py CHANGED
@@ -7,7 +7,11 @@ from pathlib import Path
7
7
 
8
8
  from osa.configs import options
9
9
  from osa.configs.config import cfg
10
- from osa.paths import destination_dir
10
+ from osa.paths import (
11
+ destination_dir,
12
+ get_dl1_prod_id_and_config,
13
+ get_dl2_prod_id
14
+ )
11
15
  from osa.utils.logging import myLogger
12
16
  from osa.veto import set_closed_sequence
13
17
 
@@ -70,11 +74,9 @@ def create_symlinks(input_file, output_file, prefix, suffix):
70
74
  """
71
75
 
72
76
  analysis_dir = Path(options.directory)
73
- dl1ab_dir = analysis_dir / options.dl1_prod_id
74
77
 
75
78
  if prefix == "dl1_LST-1" and suffix == ".h5":
76
79
  dl1_filepath_analysis_dir = analysis_dir / input_file.name
77
- dl1_filepath_dl1_dir = dl1ab_dir / input_file.name
78
80
  # Remove the original DL1 files pre DL1ab stage and keep only symlinks
79
81
  if dl1_filepath_analysis_dir.is_file() and not dl1_filepath_analysis_dir.is_symlink():
80
82
  dl1_filepath_analysis_dir.unlink()
@@ -83,8 +85,8 @@ def create_symlinks(input_file, output_file, prefix, suffix):
83
85
  dl1_filepath_analysis_dir.symlink_to(output_file.resolve())
84
86
 
85
87
  # Also set the symlink in the DL1ab subdirectory
86
- if not dl1_filepath_dl1_dir.is_symlink():
87
- dl1_filepath_dl1_dir.symlink_to(output_file.resolve())
88
+ if not input_file.is_symlink():
89
+ input_file.symlink_to(output_file.resolve())
88
90
 
89
91
  if prefix == "muons_LST-1" and suffix == ".fits":
90
92
  input_file.symlink_to(output_file.resolve())
@@ -93,7 +95,7 @@ def create_symlinks(input_file, output_file, prefix, suffix):
93
95
  input_file.symlink_to(output_file.resolve())
94
96
 
95
97
 
96
- def register_run_concept_files(run_string, concept):
98
+ def register_run_concept_files(run_string, sequence_type, concept):
97
99
  """
98
100
  Prepare files to be moved to final destination directories
99
101
  from the running_analysis original directory.
@@ -107,17 +109,20 @@ def register_run_concept_files(run_string, concept):
107
109
  initial_dir = Path(options.directory) # running_analysis
108
110
 
109
111
  # For MUON and INTERLEAVED data products, the initial directory is running_analysis
112
+ if sequence_type=="DATA":
113
+ dl1_prod_id = get_dl1_prod_id_and_config(int(run_string))[0]
114
+ dl2_prod_id = get_dl2_prod_id(int(run_string))
110
115
 
111
116
  if concept == "DL2":
112
- initial_dir = initial_dir / options.dl2_prod_id
117
+ initial_dir = initial_dir / dl2_prod_id
113
118
 
114
119
  elif concept == "DL1AB":
115
- initial_dir = initial_dir / options.dl1_prod_id
120
+ initial_dir = initial_dir / dl1_prod_id
116
121
 
117
122
  elif concept == "DATACHECK":
118
- initial_dir = initial_dir / options.dl1_prod_id
123
+ initial_dir = initial_dir / dl1_prod_id
119
124
 
120
- output_dir = destination_dir(concept, create_dir=False)
125
+ output_dir = destination_dir(concept, create_dir=False, dl1_prod_id=dl1_prod_id, dl2_prod_id=dl2_prod_id)
121
126
  data_level = cfg.get("PATTERN", f"{concept}TYPE")
122
127
  prefix = cfg.get("PATTERN", f"{concept}PREFIX")
123
128
  suffix = cfg.get("PATTERN", f"{concept}SUFFIX")
@@ -167,7 +172,7 @@ def register_non_existing_file(file_path, concept, seq_list):
167
172
 
168
173
  if run_str_found is not None:
169
174
  log.debug(f"Registering file {run_str_found}")
170
- register_run_concept_files(sequence.run_str, concept)
175
+ register_run_concept_files(sequence.run_str, sequence.type, concept)
171
176
  if options.seqtoclose is None and not file_path.exists():
172
177
  log.debug("File does not exists")
173
178
 
@@ -177,13 +182,13 @@ def register_non_existing_file(file_path, concept, seq_list):
177
182
 
178
183
  if calib_run_str_found is not None:
179
184
  log.debug(f"Registering file {calib_run_str_found}")
180
- register_run_concept_files(str(sequence.run), concept)
185
+ register_run_concept_files(str(sequence.run), sequence.type, concept)
181
186
  if options.seqtoclose is None and not file_path.exists():
182
187
  log.debug("File does not exists")
183
188
 
184
189
  if drs4_run_str_found is not None:
185
190
  log.debug(f"Registering file {drs4_run_str_found}")
186
- register_run_concept_files(str(sequence.previousrun), concept)
191
+ register_run_concept_files(str(sequence.previousrun), sequence.type, concept)
187
192
  if options.seqtoclose is None and not file_path.exists():
188
193
  log.debug("File does not exists")
189
194
 
@@ -75,3 +75,17 @@ def test_create_lock(base_test_dir):
75
75
  lock_path = base_test_dir / "test_lock.closed"
76
76
  is_closed = create_lock(lock_path)
77
77
  assert is_closed is False
78
+
79
+
80
+ def test_get_RF_model(
81
+ run_catalog_dir,
82
+ run_catalog,
83
+ rf_models,
84
+ dl1b_config_files,
85
+ tailcuts_log_files,
86
+ ):
87
+ from osa.utils.utils import get_RF_model
88
+ from pathlib import Path
89
+
90
+ expected_model = Path("test_osa/test_files0/models/AllSky/20240918_v0.10.12_allsky_nsb_tuning_0.14/dec_2276")
91
+ assert get_RF_model(1807) == expected_model.resolve()
osa/utils/utils.py CHANGED
@@ -4,10 +4,17 @@
4
4
  import inspect
5
5
  import logging
6
6
  import os
7
+ import re
8
+ import json
7
9
  import time
10
+ import numpy as np
8
11
  from datetime import datetime, timedelta
9
12
  from pathlib import Path
10
13
  from socket import gethostname
14
+ from gammapy.data import observatory_locations
15
+ from astropy import units as u
16
+ from astropy.table import Table
17
+ from lstchain.onsite import find_filter_wheels
11
18
 
12
19
  import osa.paths
13
20
  from osa.configs import options
@@ -15,6 +22,7 @@ from osa.configs.config import cfg
15
22
  from osa.utils.iofile import write_to_file
16
23
  from osa.utils.logging import myLogger
17
24
 
25
+
18
26
  __all__ = [
19
27
  "get_lstchain_version",
20
28
  "date_to_dir",
@@ -26,12 +34,9 @@ __all__ = [
26
34
  "create_lock",
27
35
  "stringify",
28
36
  "gettag",
29
- "get_dl1_prod_id",
30
- "get_dl2_prod_id",
31
37
  "time_to_seconds",
32
38
  "DATACHECK_FILE_PATTERNS",
33
39
  "YESTERDAY",
34
- "set_prod_ids",
35
40
  "is_night_time",
36
41
  "cron_lock",
37
42
  "example_seq",
@@ -85,44 +90,6 @@ def get_prod_id():
85
90
  return options.prod_id
86
91
 
87
92
 
88
- def get_dl1_prod_id():
89
- """
90
- Get the prod ID for the dl1 products provided
91
- it is defined in the configuration file.
92
-
93
- Returns
94
- -------
95
- dl1_prod_id: string
96
- """
97
- if not options.dl1_prod_id:
98
- if cfg.get("LST1", "DL1_PROD_ID") is not None:
99
- options.dl1_prod_id = cfg.get("LST1", "DL1_PROD_ID")
100
- else:
101
- options.dl1_prod_id = get_lstchain_version()
102
-
103
- log.debug(f"Getting prod ID for DL1 products: {options.dl1_prod_id}")
104
-
105
- return options.dl1_prod_id
106
-
107
-
108
- def get_dl2_prod_id():
109
- """
110
-
111
- Returns
112
- -------
113
-
114
- """
115
- if not options.dl2_prod_id:
116
- if cfg.get("LST1", "DL2_PROD_ID") is not None:
117
- options.dl2_prod_id = cfg.get("LST1", "DL2_PROD_ID")
118
- else:
119
- options.dl2_prod_id = get_lstchain_version()
120
-
121
- log.debug(f"Getting prod ID for DL2 products: {options.dl2_prod_id}")
122
-
123
- return options.dl2_prod_id
124
-
125
-
126
93
  def create_lock(lockfile) -> bool:
127
94
  """
128
95
  Create a lock file to prevent multiple instances of the same analysis.
@@ -244,21 +211,6 @@ def time_to_seconds(timestring):
244
211
  return int(hours) * 3600 + int(minutes) * 60 + int(seconds)
245
212
 
246
213
 
247
- def set_prod_ids():
248
- """Set the product IDs."""
249
- options.prod_id = get_prod_id()
250
-
251
- if cfg.get("LST1", "DL1_PROD_ID") is not None:
252
- options.dl1_prod_id = get_dl1_prod_id()
253
- else:
254
- options.dl1_prod_id = options.prod_id
255
-
256
- if cfg.get("LST1", "DL2_PROD_ID") is not None:
257
- options.dl2_prod_id = get_dl2_prod_id()
258
- else:
259
- options.dl2_prod_id = options.prod_id
260
-
261
-
262
214
  def is_night_time(hour):
263
215
  """Check if it is nighttime."""
264
216
  if 8 <= hour <= 18:
@@ -285,3 +237,168 @@ def wait_for_daytime(start=8, end=18):
285
237
  while time.localtime().tm_hour <= start or time.localtime().tm_hour >= end:
286
238
  log.info("Waiting for sunrise to not interfere with the data-taking. Sleeping.")
287
239
  time.sleep(3600)
240
+
241
+
242
+ def get_calib_filters(run_id):
243
+ """Get the filters used for the calibration."""
244
+ if options.test: # Run tests avoiding the access to the database
245
+ return 52
246
+
247
+ else:
248
+ mongodb = cfg.get("database", "caco_db")
249
+ try:
250
+ # Cast run_id to int to avoid problems with numpy int64 encoding in MongoDB
251
+ return find_filter_wheels(int(run_id), mongodb)
252
+ except IOError:
253
+ log.warning("No filter information found in database. Assuming positions 52.")
254
+ return 52
255
+
256
+
257
+ def culmination_angle(dec: u.Quantity) -> u.Quantity:
258
+ """
259
+ Calculate culmination angle for a given declination.
260
+
261
+ Parameters
262
+ ----------
263
+ dec: Quantity
264
+ declination coordinate in degrees
265
+
266
+ Returns
267
+ -------
268
+ Culmination angle in degrees
269
+ """
270
+ location = observatory_locations["cta_north"]
271
+ Lat = location.lat # latitude of the LST1 site
272
+ return abs(Lat - dec)
273
+
274
+
275
+ def convert_dec_string(dec_str: str) -> u.Quantity:
276
+ """Return the declination angle in degrees corresponding to a
277
+ given string of the form "dec_XXXX" or "dec_min_XXXX"."""
278
+
279
+ # Check if dec_str has a valid format
280
+ pattern = r'^dec_(\d{3,4})$|^dec_min_(\d{3,4})$'
281
+ if re.match(pattern, dec_str):
282
+
283
+ # Split the string into parts
284
+ parts = dec_str.split('_')
285
+
286
+ # Extract the sign, degrees, and minutes
287
+ sign = 1 if 'min' not in parts else -1
288
+ degrees = int(parts[-1])
289
+
290
+ # Calculate the numerical value
291
+ dec_value = sign * (degrees / 100)
292
+
293
+ return dec_value*u.deg
294
+
295
+
296
+ def get_declinations_dict(list1: list, list2: list) -> dict:
297
+ """Return a dictionary created from two given lists."""
298
+ corresponding_dict = {}
299
+ for index, element in enumerate(list2):
300
+ corresponding_dict[element] = list1[index]
301
+ return corresponding_dict
302
+
303
+
304
+ def get_nsb_dict(rf_models_dir: Path, rf_models_prefix: str) -> dict:
305
+ """Return a dictionary with the NSB level of the RF models and the path to each model."""
306
+ rf_models = sorted(rf_models_dir.glob(f"{rf_models_prefix}*"))
307
+ pattern = r"nsb_tuning_([\d.]+)"
308
+ nsb_dict = {
309
+ float(re.search(pattern, str(rf_model)).group(1)): rf_model
310
+ for rf_model in rf_models if re.search(pattern, str(rf_model))
311
+ }
312
+ return nsb_dict
313
+
314
+
315
+ def get_mc_nsb_dir(run_id: int, rf_models_dir: Path) -> Path:
316
+ """
317
+ Return the path of the RF models directory with the NSB level
318
+ closest to that of the data for a given run.
319
+ """
320
+ additional_nsb = get_nsb_level(run_id)
321
+ rf_models_prefix = cfg.get("lstchain", "mc_prod")
322
+ nsb_dict = get_nsb_dict(rf_models_dir, rf_models_prefix)
323
+ closest_nsb_value = min(nsb_dict.keys(), key=lambda x: abs(float(x) - additional_nsb))
324
+
325
+ return nsb_dict[closest_nsb_value]
326
+
327
+
328
+ def get_nsb_level(run_id):
329
+ """Choose the closest NSB among those that are processed with the same cleaning level."""
330
+ tailcuts_finder_dir = Path(cfg.get(options.tel_id, "TAILCUTS_FINDER_DIR"))
331
+ log_file = tailcuts_finder_dir / f"log_find_tailcuts_Run{run_id:05d}.log"
332
+ with open(log_file, "r") as file:
333
+ log_content = file.read()
334
+ match = re.search(r"Additional NSB rate \(over dark MC\): ([\d.]+)", log_content)
335
+ nsb = float(match.group(1))
336
+
337
+ dl1b_config_filename = tailcuts_finder_dir / f"dl1ab_Run{run_id:05d}.json"
338
+ with open(dl1b_config_filename) as json_file:
339
+ dl1b_config = json.load(json_file)
340
+ picture_th = dl1b_config["tailcuts_clean_with_pedestal_threshold"]["picture_thresh"]
341
+
342
+ nsb_levels = np.array([0.00, 0.07, 0.14, 0.22, 0.38, 0.50, 0.81, 1.25, 1.76, 2.34])
343
+ pth = np.array([8, 8, 8, 8, 10, 10, 12, 14, 16, 18])
344
+ candidate_nsbs = nsb_levels[pth==picture_th]
345
+
346
+ diff = abs(candidate_nsbs - nsb)
347
+ return candidate_nsbs[np.argsort(diff)][0]
348
+
349
+
350
+ def get_RF_model(run_id: int) -> Path:
351
+ """Get the path of the RF models to be used in the DL2 production for a given run.
352
+
353
+ The choice of the models is based on the adequate additional NSB level
354
+ and the proper declination line of the MC used for the training.
355
+ """
356
+ run_catalog_dir = Path(cfg.get(options.tel_id, "RUN_CATALOG"))
357
+ run_catalog_file = run_catalog_dir / f"RunCatalog_{date_to_dir(options.date)}.ecsv"
358
+ run_catalog = Table.read(run_catalog_file)
359
+ pointing_dec = run_catalog[run_catalog["run_id"]==run_id]["source_dec"]*u.deg
360
+ # the "source_dec" given in the run catalogs is not actually the source declination, but the pointing declination
361
+ pointing_culmination = culmination_angle(pointing_dec)
362
+
363
+ rf_models_base_dir = Path(cfg.get(options.tel_id, "RF_MODELS"))
364
+ rf_models_dir = get_mc_nsb_dir(run_id, rf_models_base_dir)
365
+ dec_list = os.listdir(rf_models_dir)
366
+ dec_list = [i for i in dec_list if i.startswith("dec")]
367
+
368
+ # Convert each string in the list to numerical values
369
+ dec_values = [convert_dec_string(dec) for dec in dec_list]
370
+ dec_values = [dec for dec in dec_values if dec is not None]
371
+
372
+ closest_declination = min(dec_values, key=lambda x: abs(x - pointing_dec))
373
+ closest_dec_culmination = culmination_angle(closest_declination)
374
+
375
+ lst_location = observatory_locations["cta_north"]
376
+ lst_latitude = lst_location.lat # latitude of the LST1 site
377
+ closest_lines = sorted(sorted(dec_values, key=lambda x: abs(x - lst_latitude))[:2])
378
+
379
+ if pointing_dec < closest_lines[0] or pointing_dec > closest_lines[1]:
380
+ # If the pointing declination is between the two MC lines closest to the latitude of
381
+ # the LST1 site, this check is not necessary.
382
+ log.debug(
383
+ f"The declination closest to {pointing_dec} is: {closest_declination}."
384
+ "Checking if the culmination angle is larger than the one of the pointing."
385
+ )
386
+ while closest_dec_culmination > pointing_culmination:
387
+ # If the culmination angle of the closest declination line is larger than for
388
+ # the pointing declination, remove it from the declination lines list and
389
+ # look for the second closest declination line.
390
+ declinations_dict = get_declinations_dict(dec_list, dec_values)
391
+ declination_str = declinations_dict[closest_declination]
392
+ dec_values.remove(closest_declination)
393
+ dec_list.remove(declination_str)
394
+ closest_declination = min(dec_values, key=lambda x: abs(x - pointing_dec))
395
+ closest_dec_culmination = culmination_angle(closest_declination)
396
+
397
+ log.debug(f"The declination line to use for the DL2 production is: {closest_declination}")
398
+
399
+ declinations_dict = get_declinations_dict(dec_list, dec_values)
400
+ declination_str = declinations_dict[closest_declination]
401
+
402
+ rf_model_path = rf_models_dir / declination_str
403
+
404
+ return rf_model_path.resolve()
osa/workflow/dl3.py CHANGED
@@ -21,7 +21,7 @@ from osa.configs import options
21
21
  from osa.configs.config import cfg
22
22
  from osa.nightsummary.extract import build_sequences, get_source_list
23
23
  from osa.paths import destination_dir, DEFAULT_CFG, create_source_directories, analysis_path
24
- from osa.utils.cliopts import get_prod_id, get_dl2_prod_id
24
+ from osa.utils.cliopts import get_prod_id
25
25
  from osa.utils.logging import myLogger
26
26
  from osa.utils.utils import stringify, YESTERDAY
27
27
 
@@ -250,7 +250,6 @@ def setup_global_options(date_obs, telescope):
250
250
  options.date = date_obs
251
251
  options.tel_id = telescope
252
252
  options.prod_id = get_prod_id()
253
- options.dl2_prod_id = get_dl2_prod_id()
254
253
  options.directory = analysis_path(options.tel_id)
255
254
 
256
255
 
osa/workflow/stages.py CHANGED
@@ -18,7 +18,7 @@ from osa.configs.config import cfg
18
18
  from osa.report import history
19
19
  from osa.utils.logging import myLogger
20
20
  from osa.utils.utils import stringify, date_to_dir
21
- from osa.paths import get_run_date
21
+ from osa.paths import get_run_date, get_dl1_prod_id_and_config
22
22
 
23
23
  log = myLogger(logging.getLogger(__name__))
24
24
 
@@ -87,7 +87,7 @@ class AnalysisStage:
87
87
  self._remove_drs4_baseline()
88
88
 
89
89
  def _remove_drs4_baseline(self):
90
- drs4_pedestal_basedir = Path(cfg.get("LST1", "PEDESTAL_DIR"))
90
+ drs4_pedestal_basedir = Path(cfg.get("LST1", "CAT_A_PEDESTAL_DIR"))
91
91
  date = date_to_dir(get_run_date(self.run))
92
92
  drs4_pedestal_dir = drs4_pedestal_basedir / date / lstchain.__version__
93
93
  file = drs4_pedestal_dir / "drs4_pedestal.Run{self.run}.0000.h5"
@@ -97,7 +97,7 @@ class AnalysisStage:
97
97
  drs4_pedestal_dir_pro.unlink(missing_ok=True)
98
98
 
99
99
  def _remove_calibration(self):
100
- calib_basedir = Path(cfg.get("LST1", "CALIB_DIR"))
100
+ calib_basedir = Path(cfg.get("LST1", "CAT_A_CALIB_DIR"))
101
101
  date = date_to_dir(get_run_date(self.run))
102
102
  calib_dir = file = calib_basedir / date / lstchain.__version__
103
103
  file = calib_dir / f"calibration_filters_{options.filters}.Run{self.run}.0000.h5"
@@ -115,19 +115,24 @@ class AnalysisStage:
115
115
  interleaved_output_file.unlink(missing_ok=True)
116
116
 
117
117
  def _remove_dl1b_output(self, file_prefix):
118
- dl1ab_subdirectory = options.directory / options.dl1_prod_id
118
+ dl1_prod_id = get_dl1_prod_id_and_config(int(self.run[:5]))[0]
119
+ dl1ab_subdirectory = options.directory / dl1_prod_id
119
120
  output_file = dl1ab_subdirectory / f"{file_prefix}{self.run}.h5"
120
121
  output_file.unlink(missing_ok=True)
121
122
 
122
123
  def _write_checkpoint(self):
123
124
  """Write the checkpoint in the history file."""
124
- command_to_prod_id = {
125
- cfg.get("lstchain", "r0_to_dl1"): options.prod_id,
126
- cfg.get("lstchain", "dl1ab"): options.dl1_prod_id,
127
- cfg.get("lstchain", "check_dl1"): options.dl1_prod_id,
128
- cfg.get("lstchain", "dl1_to_dl2"): options.dl2_prod_id
129
- }
130
- prod_id = command_to_prod_id.get(self.command)
125
+ if self.command==cfg.get("lstchain", "r0_to_dl1"):
126
+ prod_id = options.prod_id
127
+ elif self.command==cfg.get("lstchain", "dl1ab"):
128
+ dl1_prod_id = get_dl1_prod_id_and_config(int(self.run[:5]))[0]
129
+ prod_id = dl1_prod_id
130
+ elif self.command==cfg.get("lstchain", "check_dl1"):
131
+ dl1_prod_id = get_dl1_prod_id_and_config(int(self.run[:5]))[0]
132
+ prod_id = dl1_prod_id
133
+ #elif self.command==cfg.get("lstchain", "dl1_to_dl2"):
134
+ # dl2_prod_id = get_dl2_prod_id(int(self.run[:5]))
135
+ # prod_id = dl2_prod_id
131
136
  history(
132
137
  run=self.run,
133
138
  prod_id=prod_id,
@@ -1,6 +1,7 @@
1
1
  import subprocess as sp
2
+ import pytest
2
3
 
3
-
4
+ @pytest.mark.skip(reason="Currently the DL3 production is not working")
4
5
  def test_dl3_stage():
5
6
  output = sp.run(
6
7
  ["dl3_stage", "-d", "2020-01-17", "-s", "LST1"], text=True, stdout=sp.PIPE, stderr=sp.PIPE
@@ -4,7 +4,10 @@ import tenacity
4
4
  from osa.configs import options
5
5
 
6
6
 
7
- def test_analysis_stage(running_analysis_dir):
7
+ def test_analysis_stage(
8
+ running_analysis_dir,
9
+ dl1b_config_files,
10
+ ):
8
11
  from osa.workflow.stages import AnalysisStage
9
12
 
10
13
  options.simulate = False
@@ -23,7 +26,7 @@ def test_analysis_stage(running_analysis_dir):
23
26
  f"--calibration-file={calibration_file}",
24
27
  f"--drive-file={drive_file}",
25
28
  ]
26
- stage = AnalysisStage(run="01000.0001", command_args=cmd)
29
+ stage = AnalysisStage(run="01807.0001", command_args=cmd)
27
30
  assert stage.rc is None
28
31
  assert stage.show_command() == " ".join(cmd)
29
32
  with pytest.raises(tenacity.RetryError):
@@ -44,7 +47,7 @@ def test_analysis_stage(running_analysis_dir):
44
47
  "--input-file=dl1a_file.h5",
45
48
  "--output-file=dl1b_file.h5",
46
49
  ]
47
- stage = AnalysisStage(run="01000.0001", command_args=cmd)
50
+ stage = AnalysisStage(run="01807.0001", command_args=cmd)
48
51
  assert stage.rc is None
49
52
  assert stage.show_command() == " ".join(cmd)
50
53
  with pytest.raises(tenacity.RetryError):
@@ -61,7 +64,7 @@ def test_analysis_stage(running_analysis_dir):
61
64
 
62
65
  # Third step
63
66
  cmd = ["lstchain_check_dl1", "--input-file=dl1_file.h5", "--batch"]
64
- stage = AnalysisStage(run="01000.0001", command_args=cmd)
67
+ stage = AnalysisStage(run="01807.0001", command_args=cmd)
65
68
  assert stage.rc is None
66
69
  assert stage.show_command() == " ".join(cmd)
67
70
  with pytest.raises(tenacity.RetryError):