lstosa 0.11.6__py3-none-any.whl → 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/METADATA +1 -1
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/RECORD +17 -16
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/WHEEL +1 -1
- osa/_version.py +2 -2
- osa/configs/sequencer.cfg +2 -1
- osa/conftest.py +4 -4
- osa/paths.py +55 -7
- osa/scripts/closer.py +5 -5
- osa/scripts/gain_selection.py +6 -4
- osa/scripts/gainsel_webmaker.py +4 -0
- osa/scripts/interleaved_date.py +212 -0
- osa/scripts/reprocess_longterm.py +1 -1
- osa/scripts/tests/test_osa_scripts.py +4 -4
- osa/webserver/utils.py +2 -2
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/LICENSE +0 -0
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/entry_points.txt +0 -0
- {lstosa-0.11.6.dist-info → lstosa-0.11.7.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lstosa
|
|
3
|
-
Version: 0.11.
|
|
3
|
+
Version: 0.11.7
|
|
4
4
|
Summary: Onsite analysis pipeline for the CTA LST-1
|
|
5
5
|
Author: María Láinez, José Enrique Ruiz, Lab Saha, Andrés Baquero, José Luis Contreras, Maximilian Linhoff
|
|
6
6
|
Author-email: Daniel Morcuende <dmorcuen@ucm.es>
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
osa/__init__.py,sha256=crotf1NMTfNdZuCua_5T_jk3kvZrAAwVw4FPrfxv994,193
|
|
2
|
-
osa/_version.py,sha256=
|
|
3
|
-
osa/conftest.py,sha256=
|
|
2
|
+
osa/_version.py,sha256=KU0yNPBwd_J_8Kv0fRgFC7mC3xqwbIvT7rTRw-5uF-A,706
|
|
3
|
+
osa/conftest.py,sha256=ziqbE03yLFiBw4bEQv-1jEYz0rNpTLwfsvxBYEri0TE,24608
|
|
4
4
|
osa/job.py,sha256=CunwW7xA4mWEocS6KkDZ1K6h_LYh_ePZMyGHyCa6CKg,27863
|
|
5
5
|
osa/osadb.py,sha256=pkCuYbEG-moHG0uQHxwB7giQAv2XTld4HJ5gdn1F1hA,2422
|
|
6
|
-
osa/paths.py,sha256=
|
|
6
|
+
osa/paths.py,sha256=tgFXTMyDKVLMLuoXGYFcfeTmOcVcnZXLw1GnMOruvhc,19963
|
|
7
7
|
osa/raw.py,sha256=ZNIsuqfx5ljoz_hwhSuafdKf-wr8-cxRJmel-A2endg,1337
|
|
8
8
|
osa/report.py,sha256=sL2V7n8Y_UUaSDbWJY2o4UxDb4FU5AaFIRR8R25DB8o,4634
|
|
9
9
|
osa/version.py,sha256=9T2TtuGBQeOy5PJDxMCeGlqx5baxLaq47VmFTDc09z8,796
|
|
@@ -12,7 +12,7 @@ osa/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
12
12
|
osa/configs/config.py,sha256=cX0Vr4sorBVQ2KRqPIRSEGENKH_uC5lrgVHx1hp6YTk,1148
|
|
13
13
|
osa/configs/datamodel.py,sha256=L_WRM91PBlMrtuE30akh7YR-56P0g9D994qzKSfhNJc,1950
|
|
14
14
|
osa/configs/options.py,sha256=WbsyKhOs1Ud4Yt21O8KcISUvJYmhipa8vl_pxD_TZf4,558
|
|
15
|
-
osa/configs/sequencer.cfg,sha256=
|
|
15
|
+
osa/configs/sequencer.cfg,sha256=CEqegb_Irbx0fM6KcQgbvoMXIAI1TyYTKjR8AToSr8k,5690
|
|
16
16
|
osa/high_level/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
17
|
osa/high_level/selection_cuts.toml,sha256=ReSmcKtOPZY5JsZ9ExnxYdz7OrJEB8gghCbzHmeOyFg,128
|
|
18
18
|
osa/high_level/significance.py,sha256=mMeG_y2wDnt0O2lSosYkSjaGZQl0V4GnuFrqKwSKSbE,9066
|
|
@@ -36,13 +36,14 @@ osa/provenance/config/logger.yaml,sha256=hy_lH3DfbRFh2VM_iawI-c-3wE0cjTRHy465C2e
|
|
|
36
36
|
osa/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
37
37
|
osa/scripts/autocloser.py,sha256=yjROBHDMbKHu0HAb0UW_3pZ-K9R20gQJTlMzKCA9dvU,14401
|
|
38
38
|
osa/scripts/calibration_pipeline.py,sha256=oWX2L9WIofVHp5qYsX3YMfwoslfiUgnXtWA2EI8KuG4,6056
|
|
39
|
-
osa/scripts/closer.py,sha256=
|
|
39
|
+
osa/scripts/closer.py,sha256=lvaNH2l_fO8PvzThtJ0SDcf9i05vnc-ZfKp4xtj4s2A,21722
|
|
40
40
|
osa/scripts/copy_datacheck.py,sha256=rAJu5-5oJxspynjYqVWLDKnXTF7Ib9xtzSf3_DiIHsk,3212
|
|
41
41
|
osa/scripts/datasequence.py,sha256=0EZYlkX7ouaD66Ia2a5SWKbL0Fhlk8p4knKh0TG-2Gw,8592
|
|
42
|
-
osa/scripts/gain_selection.py,sha256=
|
|
43
|
-
osa/scripts/gainsel_webmaker.py,sha256=
|
|
42
|
+
osa/scripts/gain_selection.py,sha256=hgA1o7l8wi6SiGNiG7gQlR2kJ50c3_56ZtaN6DvZeBw,23296
|
|
43
|
+
osa/scripts/gainsel_webmaker.py,sha256=nCu4x7O3xnsSIOBSQt5j5J-fxlSrBKUlb2A8hI2dWec,5253
|
|
44
|
+
osa/scripts/interleaved_date.py,sha256=J5aKbPlUdx-dRhYE48SPgYv7B0AxTibUwXVuwyWY8FY,8019
|
|
44
45
|
osa/scripts/provprocess.py,sha256=Zv8sHTOgGNuEzu1QPUF142VQbHyfOsrxO073-xA6KG8,19107
|
|
45
|
-
osa/scripts/reprocess_longterm.py,sha256=
|
|
46
|
+
osa/scripts/reprocess_longterm.py,sha256=a8gc7iiBB16T2sxwYlQrqp5Kgda-WZcKsqYq7dYC6Fc,2137
|
|
46
47
|
osa/scripts/reprocessing.py,sha256=IL30e2McFA0EGbzvkTXdzmKJGBTbafYlCA-PsafyzyY,4571
|
|
47
48
|
osa/scripts/sequencer.py,sha256=hqkTXuYKvHHD6-s_wbzBulb2qpMF-pTLif0UOcIXu70,13150
|
|
48
49
|
osa/scripts/sequencer_catB_tailcuts.py,sha256=zVBuCBiiIoO3HKHbJ0zqaDtuwxkxcziR4T5i-2WyDX8,10476
|
|
@@ -51,7 +52,7 @@ osa/scripts/show_run_summary.py,sha256=ofznLdoFHKQB-TELD6g5zMqD_TE9M9BuJR12zHvNd
|
|
|
51
52
|
osa/scripts/simulate_processing.py,sha256=D3b0kjyUS6Lm7pkQNJ-tJzQMveTwoPabfBBW80RMqN0,6828
|
|
52
53
|
osa/scripts/update_source_catalog.py,sha256=Po4KSBOQCAT2Do1DUu5wnKV48Dq_pONfvtD47hh8fYI,8277
|
|
53
54
|
osa/scripts/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
|
-
osa/scripts/tests/test_osa_scripts.py,sha256=
|
|
55
|
+
osa/scripts/tests/test_osa_scripts.py,sha256=tKcsjkV7y_YXPCqLir_gXuyAwrqLwoIJxeJ8IpobMZI,15098
|
|
55
56
|
osa/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
57
|
osa/tests/test_jobs.py,sha256=ozrMBjHujDhEMzEvJE7WynChqjbBTcORHsHmFXozeWk,16112
|
|
57
58
|
osa/tests/test_osa.py,sha256=QCOsjUgPuNMHoef3Ym2sDXVjun2LaBrfKyroAIH-os8,415
|
|
@@ -70,15 +71,15 @@ osa/utils/utils.py,sha256=a--RvA0P2JHVkuyG8uBy1BSFINLi33Ek-rNZ9jg3Fs8,13514
|
|
|
70
71
|
osa/utils/tests/test_iofile.py,sha256=e35_EqJerp-dEOrOqwXEUZCc5P_9llf2QfveltagfIk,399
|
|
71
72
|
osa/utils/tests/test_utils.py,sha256=ybngpeUyzHgr96Gcx8r2g5oHFTo3aDQIwkC5aE9Ztic,2357
|
|
72
73
|
osa/webserver/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
73
|
-
osa/webserver/utils.py,sha256=
|
|
74
|
+
osa/webserver/utils.py,sha256=NcxGQ3fDVuQvZ9d99E4LmIrY3KK49e2qJsTad6jjvGc,2135
|
|
74
75
|
osa/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
75
76
|
osa/workflow/dl3.py,sha256=ZMXPrdJA0WOxDiHEW9sdM2vmYix8R3eSMTGc6o42yxg,9860
|
|
76
77
|
osa/workflow/stages.py,sha256=ILg91VyNXcy--si7CpDa1UFRiugqIH6nKl10Ru2zZVc,7422
|
|
77
78
|
osa/workflow/tests/test_dl3.py,sha256=81Vt4lNxNjdKi_ehzymqfFWFjncw7GuQcix9S0Yigaw,384
|
|
78
79
|
osa/workflow/tests/test_stages.py,sha256=aslfOIjf-rvf3r9N7NtimaOKkVB6K1M3bidgHOzRkMs,3985
|
|
79
|
-
lstosa-0.11.
|
|
80
|
-
lstosa-0.11.
|
|
81
|
-
lstosa-0.11.
|
|
82
|
-
lstosa-0.11.
|
|
83
|
-
lstosa-0.11.
|
|
84
|
-
lstosa-0.11.
|
|
80
|
+
lstosa-0.11.7.dist-info/LICENSE,sha256=h6iWot11EtMvaDaS_AvCHKLTNByO5wEbMyNj1c90y1c,1519
|
|
81
|
+
lstosa-0.11.7.dist-info/METADATA,sha256=NPpGT5dYeuo0baO6qjNuzqE0PCLzFdpdBa4OsqkREgk,7302
|
|
82
|
+
lstosa-0.11.7.dist-info/WHEEL,sha256=WnJ8fYhv8N4SYVK2lLYNI6N0kVATA7b0piVUNvqIIJE,91
|
|
83
|
+
lstosa-0.11.7.dist-info/entry_points.txt,sha256=CzDKpLjZZQm8jJBxOVpMR8Czpgg_Yh-k6IPETp30VZE,1048
|
|
84
|
+
lstosa-0.11.7.dist-info/top_level.txt,sha256=_Tj8zVHdrOoWZuuWTHbDpNofxW0imUmKdlXhnxsXJek,4
|
|
85
|
+
lstosa-0.11.7.dist-info/RECORD,,
|
osa/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.11.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 11,
|
|
31
|
+
__version__ = version = '0.11.7'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 11, 7)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
osa/configs/sequencer.cfg
CHANGED
|
@@ -26,11 +26,12 @@ DL1_DIR: %(BASE)s/DL1
|
|
|
26
26
|
DL1AB_DIR: %(BASE)s/DL1
|
|
27
27
|
DL2_DIR: %(BASE)s/DL2
|
|
28
28
|
DL3_DIR: %(BASE)s/DL3
|
|
29
|
+
DATACHECK_DIR: %(DL1_DIR)s/datacheck_files
|
|
29
30
|
RF_MODELS: %(BASE)s/models/AllSky
|
|
30
31
|
OSA_DIR: %(BASE)s/OSA
|
|
31
32
|
CLOSER_DIR: %(OSA_DIR)s/Closer
|
|
32
33
|
HIGH_LEVEL_DIR: %(OSA_DIR)s/HighLevel
|
|
33
|
-
LONGTERM_DIR: %(
|
|
34
|
+
LONGTERM_DIR: %(DATACHECK_DIR)s/night_wise
|
|
34
35
|
MERGED_SUMMARY: %(OSA_DIR)s/Catalog/merged_RunSummary.ecsv
|
|
35
36
|
SOURCE_CATALOG: %(OSA_DIR)s/Catalog
|
|
36
37
|
SEQUENCER_WEB_DIR: %(OSA_DIR)s/SequencerWeb
|
osa/conftest.py
CHANGED
|
@@ -511,15 +511,15 @@ def datacheck_dl1_files(base_test_dir):
|
|
|
511
511
|
|
|
512
512
|
|
|
513
513
|
@pytest.fixture(scope="session")
|
|
514
|
-
def longterm_dir(
|
|
515
|
-
directory =
|
|
514
|
+
def longterm_dir(datacheck_dir):
|
|
515
|
+
directory = datacheck_dir / prod_id / date_to_dir(date)
|
|
516
516
|
directory.mkdir(parents=True, exist_ok=True)
|
|
517
517
|
return directory
|
|
518
518
|
|
|
519
519
|
|
|
520
520
|
@pytest.fixture(scope="session")
|
|
521
|
-
def
|
|
522
|
-
directory = base_test_dir / "
|
|
521
|
+
def datacheck_dir(base_test_dir):
|
|
522
|
+
directory = base_test_dir / "DL1" / "datacheck_files" / "night_wise"
|
|
523
523
|
directory.mkdir(parents=True, exist_ok=True)
|
|
524
524
|
return directory
|
|
525
525
|
|
osa/paths.py
CHANGED
|
@@ -388,18 +388,66 @@ def is_job_completed(job_id: str):
|
|
|
388
388
|
return False
|
|
389
389
|
|
|
390
390
|
|
|
391
|
-
def create_longterm_symlink(
|
|
391
|
+
def create_longterm_symlink():
|
|
392
392
|
"""If the created longterm DL1 datacheck file corresponds to the latest
|
|
393
393
|
version available, make symlink to it in the "all" common directory."""
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
394
|
+
|
|
395
|
+
nightdir = utils.date_to_dir(options.date)
|
|
396
|
+
longterm_dir = Path(cfg.get("LST1", "LONGTERM_DIR"))
|
|
397
|
+
output_dir = Path(cfg.get("LST1", "DATACHECK_DIR"))
|
|
398
|
+
|
|
399
|
+
extensions = ["h5", "log", "html"]
|
|
400
|
+
|
|
401
|
+
for ext in extensions:
|
|
402
|
+
linked_longterm_file = output_dir / f"night_wise/DL1_datacheck_{nightdir}.{ext}"
|
|
403
|
+
all_longterm_files = longterm_dir.rglob(f"v*/{nightdir}/DL1_datacheck_{nightdir}.{ext}")
|
|
399
404
|
latest_version_file = get_latest_version_file(all_longterm_files)
|
|
400
|
-
log.info("Symlink the latest version longterm DL1 datacheck file in the common directory.")
|
|
401
405
|
linked_longterm_file.unlink(missing_ok=True)
|
|
402
406
|
linked_longterm_file.symlink_to(latest_version_file)
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def create_runwise_datacheck_symlinks():
|
|
410
|
+
"""Create symlinks of the run-wise datacheck files in the "datacheck" directory."""
|
|
411
|
+
nightdir = utils.date_to_dir(options.date)
|
|
412
|
+
dl1_dir = Path(cfg.get("LST1", "DL1_DIR")) / nightdir / options.prod_id
|
|
413
|
+
output_dir = Path(cfg.get("LST1", "DATACHECK_DIR")) / nightdir
|
|
414
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
415
|
+
|
|
416
|
+
patterns = [
|
|
417
|
+
"tailcut*/datacheck/datacheck_dl1_LST-1.Run?????.pdf",
|
|
418
|
+
"tailcut*/datacheck/datacheck_dl1_LST-1.Run?????.h5",
|
|
419
|
+
]
|
|
420
|
+
|
|
421
|
+
for pattern in patterns:
|
|
422
|
+
for input_file in dl1_dir.rglob(pattern):
|
|
423
|
+
output_file = output_dir / input_file.name
|
|
424
|
+
if not output_file.is_symlink():
|
|
425
|
+
output_file.symlink_to(input_file.resolve())
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def create_muons_symlinks():
|
|
429
|
+
"""Create symlinks of the muon files in the "datacheck" directory."""
|
|
430
|
+
nightdir = utils.date_to_dir(options.date)
|
|
431
|
+
muons_dir = destination_dir("MUON", create_dir=False)
|
|
432
|
+
muons_file_list = muons_dir.rglob("muons_LST-1*.fits")
|
|
433
|
+
output_dir = Path(cfg.get("LST1", "DATACHECK_DIR")) / nightdir / "muons"
|
|
434
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
435
|
+
|
|
436
|
+
for input_file in muons_file_list:
|
|
437
|
+
output_file = output_dir / input_file.name
|
|
438
|
+
if not output_file.is_symlink():
|
|
439
|
+
print(f"input file exists: {input_file.exists()}")
|
|
440
|
+
output_file.symlink_to(input_file.resolve())
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def create_datacheck_symlinks(cherenkov_job_id: str=None):
|
|
444
|
+
"""Once all steps of autocloser have finished successfully, create symlinks of the run-wise
|
|
445
|
+
and night-wise datacheck files, and of the muon files in the "datacheck" directory."""
|
|
446
|
+
if not cherenkov_job_id or is_job_completed(cherenkov_job_id):
|
|
447
|
+
log.info("Creating symlinks of the datacheck and muon files in the common directory.")
|
|
448
|
+
create_longterm_symlink()
|
|
449
|
+
create_runwise_datacheck_symlinks()
|
|
450
|
+
create_muons_symlinks()
|
|
403
451
|
else:
|
|
404
452
|
log.warning(f"Job {cherenkov_job_id} (lstchain_cherenkov_transparency) did not finish successfully.")
|
|
405
453
|
|
osa/scripts/closer.py
CHANGED
|
@@ -26,7 +26,7 @@ from osa.nightsummary.extract import extract_runs, extract_sequences
|
|
|
26
26
|
from osa.nightsummary.nightsummary import run_summary_table
|
|
27
27
|
from osa.paths import (
|
|
28
28
|
destination_dir,
|
|
29
|
-
|
|
29
|
+
create_datacheck_symlinks,
|
|
30
30
|
dl1_datacheck_longterm_file_exits
|
|
31
31
|
)
|
|
32
32
|
from osa.raw import is_raw_data_available
|
|
@@ -161,7 +161,7 @@ def post_process(seq_tuple):
|
|
|
161
161
|
|
|
162
162
|
if dl1_datacheck_longterm_file_exits() and not options.test:
|
|
163
163
|
if cfg.getboolean("lstchain", "create_longterm_symlink"):
|
|
164
|
-
|
|
164
|
+
create_datacheck_symlinks()
|
|
165
165
|
|
|
166
166
|
else:
|
|
167
167
|
# Close the sequences
|
|
@@ -187,7 +187,7 @@ def post_process(seq_tuple):
|
|
|
187
187
|
longterm_job_id = daily_datacheck(daily_longterm_cmd(list_job_id))
|
|
188
188
|
cherenkov_job_id = cherenkov_transparency(cherenkov_transparency_cmd(longterm_job_id))
|
|
189
189
|
if cfg.getboolean("lstchain", "create_longterm_symlink"):
|
|
190
|
-
|
|
190
|
+
create_datacheck_symlinks(cherenkov_job_id)
|
|
191
191
|
|
|
192
192
|
time.sleep(600)
|
|
193
193
|
|
|
@@ -584,7 +584,7 @@ def merge_muon_files(sequence_list):
|
|
|
584
584
|
def daily_longterm_cmd(parent_job_ids: List[str]) -> List[str]:
|
|
585
585
|
"""Build the daily longterm command."""
|
|
586
586
|
nightdir = date_to_dir(options.date)
|
|
587
|
-
datacheck_dir = destination_dir("DATACHECK", create_dir=False, dl1_prod_id="
|
|
587
|
+
datacheck_dir = destination_dir("DATACHECK", create_dir=False, dl1_prod_id="tailcut*")
|
|
588
588
|
muons_dir = destination_dir("MUON", create_dir=False)
|
|
589
589
|
longterm_dir = Path(cfg.get("LST1", "LONGTERM_DIR")) / options.prod_id / nightdir
|
|
590
590
|
longterm_output_file = longterm_dir / f"DL1_datacheck_{nightdir}.h5"
|
|
@@ -629,7 +629,7 @@ def daily_datacheck(cmd: List[str]):
|
|
|
629
629
|
def cherenkov_transparency_cmd(longterm_job_id: str) -> List[str]:
|
|
630
630
|
"""Build the cherenkov transparency command."""
|
|
631
631
|
nightdir = date_to_dir(options.date)
|
|
632
|
-
datacheck_dir = destination_dir("DATACHECK", create_dir=False, dl1_prod_id="
|
|
632
|
+
datacheck_dir = destination_dir("DATACHECK", create_dir=False, dl1_prod_id="tailcut*")
|
|
633
633
|
longterm_dir = Path(cfg.get("LST1", "LONGTERM_DIR")) / options.prod_id / nightdir
|
|
634
634
|
longterm_datacheck_file = longterm_dir / f"DL1_datacheck_{nightdir}.h5"
|
|
635
635
|
slurm_account = cfg.get("SLURM", "ACCOUNT")
|
osa/scripts/gain_selection.py
CHANGED
|
@@ -334,21 +334,23 @@ def update_history_file(run_id: str, subrun: str, log_dir: Path, history_file: P
|
|
|
334
334
|
log.debug(f"Cannot find a job_id for the run {run_id:05d}.{subrun:04d}")
|
|
335
335
|
else:
|
|
336
336
|
job_status = get_sacct_output(run_sacct(job_id=job_id))["State"]
|
|
337
|
+
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
338
|
+
|
|
337
339
|
if job_status.item() in ["RUNNING", "PENDING"]:
|
|
338
340
|
log.info(f"Job {job_id} is still running.")
|
|
339
341
|
return
|
|
340
|
-
|
|
342
|
+
|
|
341
343
|
elif job_status.item() == "COMPLETED":
|
|
342
344
|
log.debug(f"Job {job_id} finished successfully, updating history file.")
|
|
343
345
|
string_to_write = (
|
|
344
|
-
f"{run_id:05d}.{subrun:04d} gain_selection 0\n"
|
|
346
|
+
f"{now} | {run_id:05d}.{subrun:04d} gain_selection 0\n"
|
|
345
347
|
)
|
|
346
348
|
append_to_file(history_file, string_to_write)
|
|
347
|
-
|
|
349
|
+
|
|
348
350
|
else:
|
|
349
351
|
log.info(f"Job {job_id} failed, updating history file.")
|
|
350
352
|
string_to_write = (
|
|
351
|
-
f"{run_id:05d}.{subrun:04d} gain_selection 1\n"
|
|
353
|
+
f"{now} | {run_id:05d}.{subrun:04d} gain_selection 1\n"
|
|
352
354
|
)
|
|
353
355
|
append_to_file(history_file, string_to_write)
|
|
354
356
|
|
osa/scripts/gainsel_webmaker.py
CHANGED
|
@@ -144,6 +144,10 @@ def main():
|
|
|
144
144
|
content = "<p>No data found</p>"
|
|
145
145
|
log.warning(f"No data found for date {date}, creating an empty HTML file.")
|
|
146
146
|
|
|
147
|
+
elif len(Table.read(run_summary_file)[Table.read(run_summary_file)["run_type"] == "DATA"]) == 0:
|
|
148
|
+
content = "<p>Only calibration events were taken</p>"
|
|
149
|
+
log.warning(f"No DATA runs for date {date}, creating an empty HTML file.")
|
|
150
|
+
|
|
147
151
|
else:
|
|
148
152
|
# Get the table with the gain selection check report in HTML format:
|
|
149
153
|
table_gain_selection_jobs = check_failed_jobs(options.date)
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Remove interleaved directories for data runs that have already been processed.
|
|
3
|
+
|
|
4
|
+
Given an input date (<YYYYMMDD>), the script searches for interleaved directories
|
|
5
|
+
within the last month. For each date in that period, it retrieves the runs taken
|
|
6
|
+
and identifies the corresponding observed sources.
|
|
7
|
+
It creates a shell file to remove all of the interleaved files identified
|
|
8
|
+
(those that do not correspond to Crab observations). Then, it have to be removed
|
|
9
|
+
using SLURM command sbatch.
|
|
10
|
+
|
|
11
|
+
At the moment, this shell files to be removed are saved in my workspace:
|
|
12
|
+
fefs/aswg/workspace/maria.rivero/remove_sh
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import os
|
|
16
|
+
import sys
|
|
17
|
+
import csv
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from dateutil.relativedelta import relativedelta
|
|
20
|
+
import glob
|
|
21
|
+
|
|
22
|
+
# Data directories to look for links of interleaved directories
|
|
23
|
+
base_dirs = [
|
|
24
|
+
"/fefs/onsite/data/lst-pipe/LSTN-01/running_analysis",
|
|
25
|
+
"/fefs/aswg/data/real/running_analysis"
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
# Directory of RunSummary files
|
|
29
|
+
summary_dir = "/fefs/aswg/data/real/monitoring/RunSummary"
|
|
30
|
+
backup_summary_dir = "/fefs/onsite/data/lst-pipe/LSTN-01/monitoring/RunSummary"
|
|
31
|
+
|
|
32
|
+
# Directories to check run's sources in RunCatalogs
|
|
33
|
+
catalog_dir = "/fefs/aswg/data/real/monitoring/RunCatalog"
|
|
34
|
+
backup_catalog_dir = "/fefs/onsite/data/lst-pipe/LSTN-01/monitoring/RunCatalog"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def find_interleaved(target_date_str):
|
|
38
|
+
"""Look into base directories to find interleaved directories to be removed.
|
|
39
|
+
Args:
|
|
40
|
+
target_date_str (str): Last date to search within a month.
|
|
41
|
+
Returns:
|
|
42
|
+
interleaved_paths (list): Paths to interleaved directory.
|
|
43
|
+
interleaved_dates (list): Observation dates of each interleaved.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
# Change date format (YYYYMMDD to python date)
|
|
47
|
+
try:
|
|
48
|
+
target_date = datetime.strptime(target_date_str, "%Y%m%d").date()
|
|
49
|
+
except ValueError:
|
|
50
|
+
print("Invalid format. Use YYYYMMDD")
|
|
51
|
+
sys.exit(1)
|
|
52
|
+
|
|
53
|
+
interleaved_paths = []
|
|
54
|
+
interleaved_dates = []
|
|
55
|
+
# Let's look into both dirs and check if both exist
|
|
56
|
+
for base_dir in base_dirs:
|
|
57
|
+
if not os.path.isdir(base_dir):
|
|
58
|
+
print(f"Path not found: {base_dir}")
|
|
59
|
+
continue
|
|
60
|
+
# Look in each date directory
|
|
61
|
+
for date_dir in sorted(os.listdir(base_dir)):
|
|
62
|
+
date_path = os.path.join(base_dir, date_dir) # given date path
|
|
63
|
+
if not os.path.isdir(date_path):
|
|
64
|
+
continue
|
|
65
|
+
|
|
66
|
+
if not (len(date_dir) == 8 and date_dir.isdigit()):
|
|
67
|
+
continue
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
date_obj = datetime.strptime(date_dir, "%Y%m%d").date() #save it as date python object
|
|
71
|
+
except ValueError:
|
|
72
|
+
continue
|
|
73
|
+
# search only in the last month to the input date
|
|
74
|
+
if date_obj > target_date or date_obj < (target_date - relativedelta(months=1)):
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
# look for interleaved directory (save path and date)
|
|
78
|
+
for root, dirs, _ in os.walk(date_path):
|
|
79
|
+
for d in dirs:
|
|
80
|
+
if d == "interleaved":
|
|
81
|
+
interleaved_paths.append(os.path.join(root, d))
|
|
82
|
+
interleaved_dates.append(date_dir)
|
|
83
|
+
return interleaved_paths, interleaved_dates
|
|
84
|
+
|
|
85
|
+
def info_dates(date, catalog_dir, runs_id):
|
|
86
|
+
"""Given an observation date, it classifies runs in RunCatalog by their sources (Crab or not).
|
|
87
|
+
Args:
|
|
88
|
+
date (str): <YYYYMMDD> format.
|
|
89
|
+
catalog_dir (str): path to RunCatalog files.
|
|
90
|
+
runs_id (list): DATA runs taken from RunSummary
|
|
91
|
+
Returns:
|
|
92
|
+
entry (dict): stores runs by source (Crab or other) and saves other sources' names.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
entry = {"crab": [], "other_source": [], "others_names": []}
|
|
96
|
+
filename = f"RunCatalog_{date}.ecsv"
|
|
97
|
+
catalog_file = os.path.join(catalog_dir,filename)
|
|
98
|
+
if not os.path.isfile(catalog_file):
|
|
99
|
+
print(f"File not found: {catalog_file}")
|
|
100
|
+
return entry
|
|
101
|
+
|
|
102
|
+
else:
|
|
103
|
+
with open(catalog_file, "r") as f:
|
|
104
|
+
reader = csv.reader(f)
|
|
105
|
+
for row in reader:
|
|
106
|
+
if not row or row[0].startswith("#") or row[0].startswith("run"):
|
|
107
|
+
continue # skip comment and header lines
|
|
108
|
+
if int(row[0]) in runs_id:
|
|
109
|
+
if "Crab" in row[1]:
|
|
110
|
+
entry["crab"].append(int(row[0]))
|
|
111
|
+
elif "crab" in row[1]:
|
|
112
|
+
entry["crab"].append(int(row[0]))
|
|
113
|
+
else:
|
|
114
|
+
entry["other_source"].append(int(row[0]))
|
|
115
|
+
entry["others_names"].append(row[1])
|
|
116
|
+
else:
|
|
117
|
+
continue
|
|
118
|
+
return(entry)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def summary_dates(date, summary_dir):
|
|
122
|
+
"""Given an observation date, it stores run_ids and types.
|
|
123
|
+
Args:
|
|
124
|
+
date (str): <YYYYMMDD> format.
|
|
125
|
+
summary_dir (str): path to RunSummary files.
|
|
126
|
+
Returns:
|
|
127
|
+
entry (dict): stores run_id and run_type.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
entry = {"run_id": [], "run_type": []}
|
|
131
|
+
filename = f"RunSummary_{date}.ecsv"
|
|
132
|
+
summary_file = os.path.join(summary_dir, filename)
|
|
133
|
+
if not os.path.isfile(summary_file):
|
|
134
|
+
print(f"File not found: {summary_file}")
|
|
135
|
+
return entry
|
|
136
|
+
else:
|
|
137
|
+
with open(summary_file, "r") as f:
|
|
138
|
+
reader = csv.reader(f)
|
|
139
|
+
for row in reader:
|
|
140
|
+
if not row or row[0].startswith("#"):
|
|
141
|
+
continue # skip comment lines
|
|
142
|
+
try:
|
|
143
|
+
entry["run_id"].append(int(row[0]))
|
|
144
|
+
entry["run_type"].append(row[2])
|
|
145
|
+
|
|
146
|
+
except ValueError:
|
|
147
|
+
continue
|
|
148
|
+
return(entry)
|
|
149
|
+
|
|
150
|
+
if __name__ == "__main__":
|
|
151
|
+
if len(sys.argv) < 2:
|
|
152
|
+
print("Use: python interleaved_date.py <YYYYMMDD>")
|
|
153
|
+
sys.exit(1)
|
|
154
|
+
|
|
155
|
+
date_arg = sys.argv[1]
|
|
156
|
+
found_paths, found_dates = find_interleaved(date_arg)
|
|
157
|
+
month = date_arg[:6]
|
|
158
|
+
recordfile = f'/fefs/aswg/workspace/maria.rivero/remove_sh/entries_rm{month}.sh'
|
|
159
|
+
with open(recordfile, 'w') as file:
|
|
160
|
+
file.write('#!/bin/bash \n')
|
|
161
|
+
|
|
162
|
+
dl1_paths = [
|
|
163
|
+
p.replace("/running_analysis/", "/DL1/")
|
|
164
|
+
for p in found_paths
|
|
165
|
+
]
|
|
166
|
+
print('Interleaved path: ' , dl1_paths)
|
|
167
|
+
|
|
168
|
+
for path,link_path, date in zip(dl1_paths, found_paths, found_dates):
|
|
169
|
+
summary = summary_dates(date, summary_dir)
|
|
170
|
+
if not summary['run_id']:
|
|
171
|
+
summary = summary_dates(date, backup_summary_dir)
|
|
172
|
+
if not summary['run_id']:
|
|
173
|
+
continue
|
|
174
|
+
|
|
175
|
+
data_runs = [
|
|
176
|
+
run_id
|
|
177
|
+
for run_id, run_type in zip(summary["run_id"], summary["run_type"])
|
|
178
|
+
if run_type == "DATA"]
|
|
179
|
+
|
|
180
|
+
entry = info_dates(date, catalog_dir, data_runs)
|
|
181
|
+
if entry["crab"] == [] and entry["other_source"] == []:
|
|
182
|
+
entry = info_dates(date, backup_catalog_dir, data_runs)
|
|
183
|
+
if not entry["other_source"]:
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
print('\n Dates with interleaved: ' , date)
|
|
187
|
+
|
|
188
|
+
print('RunSummary info:')
|
|
189
|
+
print(summary)
|
|
190
|
+
print('Run info (Crab or not):')
|
|
191
|
+
print(entry)
|
|
192
|
+
|
|
193
|
+
found_dataruns = sorted(entry["crab"] + entry["other_source"])
|
|
194
|
+
if not entry["crab"] and len(found_dataruns) == len(data_runs):
|
|
195
|
+
print(f"rm -r {path}")
|
|
196
|
+
with open(recordfile, 'a') as file:
|
|
197
|
+
file.write(f"rm -r {path} \n")
|
|
198
|
+
file.write(f"rm -r {link_path} \n")
|
|
199
|
+
else:
|
|
200
|
+
for runid in entry["other_source"]:
|
|
201
|
+
run_str = f"{runid:05d}" # run_ids must be always five digits
|
|
202
|
+
filename = f"interleaved_LST-1.Run{run_str}.*.h5"
|
|
203
|
+
filepath = os.path.join(path, filename)
|
|
204
|
+
link_filepath = os.path.join(link_path, filename)
|
|
205
|
+
matching_files = glob.glob(filepath) # check that exist files with run_id
|
|
206
|
+
if matching_files:
|
|
207
|
+
print(f"rm {filepath}")
|
|
208
|
+
with open(recordfile, 'a') as file:
|
|
209
|
+
file.write(f"rm {filepath} \n")
|
|
210
|
+
file.write(f"rm {link_filepath} \n")
|
|
211
|
+
else:
|
|
212
|
+
continue
|
|
@@ -30,7 +30,7 @@ def run_longterm(date: str, prod_id: str, new_prod_id: str, log_dir: Path):
|
|
|
30
30
|
log_dir : Path
|
|
31
31
|
Path to the directory where job logs will be stored.
|
|
32
32
|
"""
|
|
33
|
-
dl1_dir = ANALYSIS_PATH / "DL1" / date / prod_id / "
|
|
33
|
+
dl1_dir = ANALYSIS_PATH / "DL1" / date / prod_id / "tailcut*" / "datacheck"
|
|
34
34
|
muons_dir = ANALYSIS_PATH / "DL1" / date / prod_id / "muons"
|
|
35
35
|
new_longterm_dir = LONGTERM_PATH / new_prod_id / date
|
|
36
36
|
longterm_output_file = new_longterm_dir / f"DL1_datacheck_{date}.h5"
|
|
@@ -198,7 +198,7 @@ def test_closer(
|
|
|
198
198
|
systematic_correction_files,
|
|
199
199
|
merged_run_summary,
|
|
200
200
|
longterm_dir,
|
|
201
|
-
|
|
201
|
+
datacheck_dir,
|
|
202
202
|
daily_datacheck_dl1_files,
|
|
203
203
|
dl1b_config_files,
|
|
204
204
|
tailcuts_log_files,
|
|
@@ -223,7 +223,7 @@ def test_closer(
|
|
|
223
223
|
assert obs_file.exists()
|
|
224
224
|
assert merged_run_summary.exists()
|
|
225
225
|
assert longterm_dir.exists()
|
|
226
|
-
assert
|
|
226
|
+
assert datacheck_dir.exists()
|
|
227
227
|
for check_file in daily_datacheck_dl1_files:
|
|
228
228
|
assert check_file.exists()
|
|
229
229
|
assert rf_models[2].exists()
|
|
@@ -389,8 +389,8 @@ def test_daily_longterm_cmd():
|
|
|
389
389
|
"log/longterm_daily_%j.log",
|
|
390
390
|
"--dependency=afterok:12345,54321",
|
|
391
391
|
"lstchain_longterm_dl1_check",
|
|
392
|
-
"--input-dir=test_osa/test_files0/DL1/20200117/v0.1.0/
|
|
393
|
-
"--output-file=test_osa/test_files0/
|
|
392
|
+
"--input-dir=test_osa/test_files0/DL1/20200117/v0.1.0/tailcut*/datacheck",
|
|
393
|
+
"--output-file=test_osa/test_files0/DL1/datacheck_files/night_wise/v0.1.0/20200117/DL1_datacheck_20200117.h5",
|
|
394
394
|
"--muons-dir=test_osa/test_files0/DL1/20200117/v0.1.0/muons",
|
|
395
395
|
"--batch",
|
|
396
396
|
]
|
osa/webserver/utils.py
CHANGED
|
@@ -37,8 +37,8 @@ def directory_in_webserver(host: str, datacheck_type: str, date: str, prod_id: s
|
|
|
37
37
|
DATACHECK_WEB_DIRS = {
|
|
38
38
|
"PEDESTAL": f"drs4/{prod_id}/{date}",
|
|
39
39
|
"CALIB": f"enf_calibration/{prod_id}/{date}",
|
|
40
|
-
"DL1AB": f"dl1/{
|
|
41
|
-
"LONGTERM": f"dl1/{
|
|
40
|
+
"DL1AB": f"dl1/{date}/pdf",
|
|
41
|
+
"LONGTERM": f"dl1/{date}",
|
|
42
42
|
"HIGH_LEVEL": f"high_level/{prod_id}/{date}",
|
|
43
43
|
}
|
|
44
44
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|