looper 1.7.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- looper/__main__.py +1 -1
- looper/_version.py +2 -1
- looper/cli_divvy.py +10 -6
- looper/cli_pydantic.py +413 -0
- looper/command_models/DEVELOPER.md +85 -0
- looper/command_models/README.md +4 -0
- looper/command_models/__init__.py +6 -0
- looper/command_models/arguments.py +293 -0
- looper/command_models/commands.py +335 -0
- looper/conductor.py +147 -28
- looper/const.py +9 -0
- looper/divvy.py +56 -47
- looper/exceptions.py +9 -1
- looper/looper.py +196 -169
- looper/pipeline_interface.py +2 -12
- looper/project.py +154 -176
- looper/schemas/pipeline_interface_schema_generic.yaml +14 -6
- looper/utils.py +450 -78
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/METADATA +24 -14
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/RECORD +24 -19
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/WHEEL +1 -1
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/entry_points.txt +1 -1
- looper/cli_looper.py +0 -796
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/LICENSE.txt +0 -0
- {looper-1.7.0.dist-info → looper-2.0.0.dist-info}/top_level.txt +0 -0
looper/conductor.py
CHANGED
@@ -4,10 +4,12 @@ import importlib
|
|
4
4
|
import logging
|
5
5
|
import os
|
6
6
|
import subprocess
|
7
|
+
import signal
|
8
|
+
import psutil
|
9
|
+
import sys
|
7
10
|
import time
|
8
11
|
import yaml
|
9
12
|
from math import ceil
|
10
|
-
from copy import copy, deepcopy
|
11
13
|
from json import loads
|
12
14
|
from subprocess import check_output
|
13
15
|
from typing import *
|
@@ -19,14 +21,19 @@ from jinja2.exceptions import UndefinedError
|
|
19
21
|
from peppy.const import CONFIG_KEY, SAMPLE_NAME_ATTR, SAMPLE_YAML_EXT
|
20
22
|
from peppy.exceptions import RemoteYAMLError
|
21
23
|
from pipestat import PipestatError
|
22
|
-
from ubiquerg import expandpath
|
24
|
+
from ubiquerg import expandpath
|
23
25
|
from yaml import dump
|
24
|
-
from yacman import YAMLConfigManager
|
26
|
+
from yacman import FutureYAMLConfigManager as YAMLConfigManager
|
25
27
|
|
26
28
|
from .const import *
|
27
|
-
from .exceptions import JobSubmissionException
|
29
|
+
from .exceptions import JobSubmissionException
|
28
30
|
from .processed_project import populate_sample_paths
|
29
|
-
from .utils import
|
31
|
+
from .utils import (
|
32
|
+
fetch_sample_flags,
|
33
|
+
jinja_render_template_strictly,
|
34
|
+
expand_nested_var_templates,
|
35
|
+
)
|
36
|
+
from .const import PipelineLevel
|
30
37
|
|
31
38
|
|
32
39
|
_LOGGER = logging.getLogger(__name__)
|
@@ -85,11 +92,23 @@ def _get_yaml_path(namespaces, template_key, default_name_appendix="", filename=
|
|
85
92
|
|
86
93
|
def write_pipestat_config(looper_pipestat_config_path, pipestat_config_dict):
|
87
94
|
"""
|
88
|
-
This
|
95
|
+
This writes a combined configuration file to be passed to a PipestatManager.
|
96
|
+
:param str looper_pipestat_config_path: path to the created pipestat configuration file
|
97
|
+
:param dict pipestat_config_dict: the dict containing key value pairs to be written to the pipestat configutation
|
98
|
+
return bool
|
89
99
|
"""
|
100
|
+
|
101
|
+
if not os.path.exists(os.path.dirname(looper_pipestat_config_path)):
|
102
|
+
try:
|
103
|
+
os.makedirs(os.path.dirname(looper_pipestat_config_path))
|
104
|
+
except FileExistsError:
|
105
|
+
pass
|
106
|
+
|
90
107
|
with open(looper_pipestat_config_path, "w") as f:
|
91
108
|
yaml.dump(pipestat_config_dict, f)
|
92
|
-
|
109
|
+
_LOGGER.debug(
|
110
|
+
msg=f"Initialized pipestat config file: {looper_pipestat_config_path}"
|
111
|
+
)
|
93
112
|
|
94
113
|
return True
|
95
114
|
|
@@ -176,8 +195,12 @@ class SubmissionConductor(object):
|
|
176
195
|
the project level, rather that on the sample level)
|
177
196
|
"""
|
178
197
|
super(SubmissionConductor, self).__init__()
|
198
|
+
|
179
199
|
self.collate = collate
|
180
200
|
self.section_key = PROJECT_PL_KEY if self.collate else SAMPLE_PL_KEY
|
201
|
+
self.pipeline_interface_type = (
|
202
|
+
"project_interface" if self.collate else "sample_interface"
|
203
|
+
)
|
181
204
|
self.pl_iface = pipeline_interface
|
182
205
|
self.pl_name = self.pl_iface.pipeline_name
|
183
206
|
self.prj = prj
|
@@ -197,6 +220,7 @@ class SubmissionConductor(object):
|
|
197
220
|
self._curr_size = 0
|
198
221
|
self._failed_sample_names = []
|
199
222
|
self._curr_skip_pool = []
|
223
|
+
self.process_id = None # this is used for currently submitted subprocess
|
200
224
|
|
201
225
|
if self.extra_pipe_args:
|
202
226
|
_LOGGER.debug(
|
@@ -261,8 +285,12 @@ class SubmissionConductor(object):
|
|
261
285
|
|
262
286
|
:param bool frorce: whether to force the project submission (ignore status/flags)
|
263
287
|
"""
|
288
|
+
psms = {}
|
264
289
|
if self.prj.pipestat_configured_project:
|
265
|
-
|
290
|
+
for piface in self.prj.project_pipeline_interfaces:
|
291
|
+
if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
|
292
|
+
psms[piface.psm.pipeline_name] = piface.psm
|
293
|
+
psm = psms[self.pl_name]
|
266
294
|
status = psm.get_status()
|
267
295
|
if not force and status is not None:
|
268
296
|
_LOGGER.info(f"> Skipping project. Determined status: {status}")
|
@@ -288,12 +316,11 @@ class SubmissionConductor(object):
|
|
288
316
|
)
|
289
317
|
)
|
290
318
|
if self.prj.pipestat_configured:
|
291
|
-
|
292
|
-
sample_statuses = psms[self.pl_name].get_status(
|
319
|
+
sample_statuses = self.pl_iface.psm.get_status(
|
293
320
|
record_identifier=sample.sample_name
|
294
321
|
)
|
295
322
|
if sample_statuses == "failed" and rerun is True:
|
296
|
-
|
323
|
+
self.pl_iface.psm.set_status(
|
297
324
|
record_identifier=sample.sample_name, status_identifier="waiting"
|
298
325
|
)
|
299
326
|
sample_statuses = "waiting"
|
@@ -303,23 +330,27 @@ class SubmissionConductor(object):
|
|
303
330
|
|
304
331
|
use_this_sample = True # default to running this sample
|
305
332
|
msg = None
|
333
|
+
if rerun and sample_statuses == []:
|
334
|
+
msg = f"> Skipping sample because rerun requested, but no failed or waiting flag found."
|
335
|
+
use_this_sample = False
|
306
336
|
if sample_statuses:
|
307
337
|
status_str = ", ".join(sample_statuses)
|
308
338
|
failed_flag = any("failed" in x for x in sample_statuses)
|
339
|
+
waiting_flag = any("waiting" in x for x in sample_statuses)
|
309
340
|
if self.ignore_flags:
|
310
341
|
msg = f"> Found existing status: {status_str}. Ignoring."
|
311
342
|
else: # this pipeline already has a status
|
312
343
|
msg = f"> Found existing status: {status_str}. Skipping sample."
|
313
|
-
if failed_flag:
|
344
|
+
if failed_flag and not rerun:
|
314
345
|
msg += " Use rerun to ignore failed status." # help guidance
|
315
346
|
use_this_sample = False
|
316
347
|
if rerun:
|
317
348
|
# Rescue the sample if rerun requested, and failed flag is found
|
318
|
-
if failed_flag:
|
319
|
-
msg = f"> Re-running
|
349
|
+
if failed_flag or waiting_flag:
|
350
|
+
msg = f"> Re-running sample. Status: {status_str}"
|
320
351
|
use_this_sample = True
|
321
352
|
else:
|
322
|
-
msg = f"> Skipping sample because rerun requested, but no failed flag found. Status: {status_str}"
|
353
|
+
msg = f"> Skipping sample because rerun requested, but no failed or waiting flag found. Status: {status_str}"
|
323
354
|
use_this_sample = False
|
324
355
|
if msg:
|
325
356
|
_LOGGER.info(msg)
|
@@ -372,6 +403,10 @@ class SubmissionConductor(object):
|
|
372
403
|
not for dry run)
|
373
404
|
"""
|
374
405
|
submitted = False
|
406
|
+
|
407
|
+
# Override signal handler so that Ctrl+C can be used to gracefully terminate child process
|
408
|
+
signal.signal(signal.SIGINT, self._signal_int_handler)
|
409
|
+
|
375
410
|
if not self._pool:
|
376
411
|
_LOGGER.debug("No submission (no pooled samples): %s", self.pl_name)
|
377
412
|
# submitted = False
|
@@ -400,9 +435,10 @@ class SubmissionConductor(object):
|
|
400
435
|
submission_command = "{} {}".format(sub_cmd, script)
|
401
436
|
# Capture submission command return value so that we can
|
402
437
|
# intercept and report basic submission failures; #167
|
403
|
-
|
404
|
-
|
405
|
-
|
438
|
+
process = subprocess.Popen(submission_command, shell=True)
|
439
|
+
self.process_id = process.pid
|
440
|
+
process.wait()
|
441
|
+
if process.returncode != 0:
|
406
442
|
fails = (
|
407
443
|
"" if self.collate else [s.sample_name for s in self._samples]
|
408
444
|
)
|
@@ -469,6 +505,87 @@ class SubmissionConductor(object):
|
|
469
505
|
# name concordant with 1-based, not 0-based indexing.
|
470
506
|
return "lump{}".format(self._num_total_job_submissions + 1)
|
471
507
|
|
508
|
+
def _signal_int_handler(self, signal, frame):
|
509
|
+
"""
|
510
|
+
For catching interrupt (Ctrl +C) signals. Fails gracefully.
|
511
|
+
"""
|
512
|
+
signal_type = "SIGINT"
|
513
|
+
self._generic_signal_handler(signal_type)
|
514
|
+
|
515
|
+
def _generic_signal_handler(self, signal_type):
|
516
|
+
"""
|
517
|
+
Function for handling both SIGTERM and SIGINT
|
518
|
+
"""
|
519
|
+
message = "Received " + signal_type + ". Failing gracefully..."
|
520
|
+
_LOGGER.warning(msg=message)
|
521
|
+
|
522
|
+
self._terminate_current_subprocess()
|
523
|
+
|
524
|
+
sys.exit(1)
|
525
|
+
|
526
|
+
def _terminate_current_subprocess(self):
|
527
|
+
"""This terminates the current sub process associated with self.process_id"""
|
528
|
+
|
529
|
+
def pskill(proc_pid, sig=signal.SIGINT):
|
530
|
+
parent_process = psutil.Process(proc_pid)
|
531
|
+
for child_proc in parent_process.children(recursive=True):
|
532
|
+
child_proc.send_signal(sig)
|
533
|
+
parent_process.send_signal(sig)
|
534
|
+
|
535
|
+
if self.process_id is None:
|
536
|
+
return
|
537
|
+
|
538
|
+
# Gently wait for the subprocess before attempting to kill it
|
539
|
+
sys.stdout.flush()
|
540
|
+
still_running = self._attend_process(psutil.Process(self.process_id), 0)
|
541
|
+
sleeptime = 0.25
|
542
|
+
time_waiting = 0
|
543
|
+
|
544
|
+
while still_running and time_waiting < 3:
|
545
|
+
try:
|
546
|
+
if time_waiting > 2:
|
547
|
+
pskill(self.process_id, signal.SIGKILL)
|
548
|
+
elif time_waiting > 1:
|
549
|
+
pskill(self.process_id, signal.SIGTERM)
|
550
|
+
else:
|
551
|
+
pskill(self.process_id, signal.SIGINT)
|
552
|
+
|
553
|
+
except OSError:
|
554
|
+
# This would happen if the child process ended between the check
|
555
|
+
# and the next kill step
|
556
|
+
still_running = False
|
557
|
+
time_waiting = time_waiting + sleeptime
|
558
|
+
|
559
|
+
# Now see if it's still running
|
560
|
+
time_waiting = time_waiting + sleeptime
|
561
|
+
if not self._attend_process(psutil.Process(self.process_id), sleeptime):
|
562
|
+
still_running = False
|
563
|
+
|
564
|
+
if still_running:
|
565
|
+
_LOGGER.warning(f"Unable to halt child process: {self.process_id}")
|
566
|
+
else:
|
567
|
+
if time_waiting > 0:
|
568
|
+
note = f"terminated after {time_waiting} sec"
|
569
|
+
else:
|
570
|
+
note = "was already terminated"
|
571
|
+
_LOGGER.warning(msg=f"Child process {self.process_id} {note}.")
|
572
|
+
|
573
|
+
def _attend_process(self, proc, sleeptime):
|
574
|
+
"""
|
575
|
+
Waits on a process for a given time to see if it finishes, returns True
|
576
|
+
if it's still running after the given time or False as soon as it
|
577
|
+
returns.
|
578
|
+
|
579
|
+
:param psutil.Process proc: Process object opened by psutil.Popen()
|
580
|
+
:param float sleeptime: Time to wait
|
581
|
+
:return bool: True if process is still running; otherwise false
|
582
|
+
"""
|
583
|
+
try:
|
584
|
+
proc.wait(timeout=int(sleeptime))
|
585
|
+
except psutil.TimeoutExpired:
|
586
|
+
return True
|
587
|
+
return False
|
588
|
+
|
472
589
|
def _jobname(self, pool):
|
473
590
|
"""Create the name for a job submission."""
|
474
591
|
return "{}_{}".format(self.pl_iface.pipeline_name, self._sample_lump_name(pool))
|
@@ -528,12 +645,7 @@ class SubmissionConductor(object):
|
|
528
645
|
:return yacman.YAMLConfigManager: pipestat namespace
|
529
646
|
"""
|
530
647
|
try:
|
531
|
-
|
532
|
-
self.prj.get_pipestat_managers(sample_name)
|
533
|
-
if sample_name
|
534
|
-
else self.prj.get_pipestat_managers(project_level=True)
|
535
|
-
)
|
536
|
-
psm = psms[self.pl_iface.pipeline_name]
|
648
|
+
psm = self.pl_iface.psm
|
537
649
|
except (PipestatError, AttributeError) as e:
|
538
650
|
# pipestat section faulty or not found in project.looper or sample
|
539
651
|
# or project is missing required pipestat attributes
|
@@ -548,6 +660,8 @@ class SubmissionConductor(object):
|
|
548
660
|
"results_file": psm.file,
|
549
661
|
"record_identifier": psm.record_identifier,
|
550
662
|
"config_file": psm.config_path,
|
663
|
+
"output_schema": psm.cfg["_schema_path"],
|
664
|
+
"pephub_path": psm.cfg["pephub_path"],
|
551
665
|
}
|
552
666
|
filtered_namespace = {k: v for k, v in full_namespace.items() if v}
|
553
667
|
return YAMLConfigManager(filtered_namespace)
|
@@ -571,7 +685,11 @@ class SubmissionConductor(object):
|
|
571
685
|
pipeline=self.pl_iface,
|
572
686
|
compute=self.prj.dcc.compute,
|
573
687
|
)
|
574
|
-
|
688
|
+
|
689
|
+
if self.pipeline_interface_type is None:
|
690
|
+
templ = self.pl_iface["command_template"]
|
691
|
+
else:
|
692
|
+
templ = self.pl_iface[self.pipeline_interface_type]["command_template"]
|
575
693
|
if not self.override_extra:
|
576
694
|
extras_template = (
|
577
695
|
EXTRA_PROJECT_CMD_TEMPLATE
|
@@ -611,8 +729,10 @@ class SubmissionConductor(object):
|
|
611
729
|
_LOGGER.debug(f"namespace pipelines: { pl_iface }")
|
612
730
|
|
613
731
|
namespaces["pipeline"]["var_templates"] = pl_iface[VAR_TEMPL_KEY] or {}
|
614
|
-
|
615
|
-
|
732
|
+
|
733
|
+
namespaces["pipeline"]["var_templates"] = expand_nested_var_templates(
|
734
|
+
namespaces["pipeline"]["var_templates"], namespaces
|
735
|
+
)
|
616
736
|
|
617
737
|
# pre_submit hook namespace updates
|
618
738
|
namespaces = _exec_pre_submit(pl_iface, namespaces)
|
@@ -621,7 +741,6 @@ class SubmissionConductor(object):
|
|
621
741
|
argstring = jinja_render_template_strictly(
|
622
742
|
template=templ, namespaces=namespaces
|
623
743
|
)
|
624
|
-
print(argstring)
|
625
744
|
except UndefinedError as jinja_exception:
|
626
745
|
_LOGGER.warning(NOT_SUB_MSG.format(str(jinja_exception)))
|
627
746
|
except KeyError as e:
|
looper/const.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
""" Shared project constants """
|
2
2
|
|
3
3
|
import os
|
4
|
+
from enum import Enum
|
4
5
|
|
5
6
|
__author__ = "Databio lab"
|
6
7
|
__email__ = "nathan@code.databio.org"
|
@@ -92,6 +93,7 @@ __all__ = [
|
|
92
93
|
"DEBUG_EIDO_VALIDATION",
|
93
94
|
"LOOPER_GENERIC_OUTPUT_SCHEMA",
|
94
95
|
"LOOPER_GENERIC_COUNT_LINES",
|
96
|
+
"PipelineLevel",
|
95
97
|
]
|
96
98
|
|
97
99
|
FLAGS = ["completed", "running", "failed", "waiting", "partial"]
|
@@ -268,3 +270,10 @@ MESSAGE_BY_SUBCOMMAND = {
|
|
268
270
|
"init-piface": "Initialize generic pipeline interface.",
|
269
271
|
"link": "Create directory of symlinks for reported results.",
|
270
272
|
}
|
273
|
+
|
274
|
+
# Add project/sample enum
|
275
|
+
|
276
|
+
|
277
|
+
class PipelineLevel(Enum):
|
278
|
+
SAMPLE = "sample"
|
279
|
+
PROJECT = "project"
|
looper/divvy.py
CHANGED
@@ -1,16 +1,14 @@
|
|
1
1
|
""" Computing configuration representation """
|
2
2
|
|
3
3
|
import logging
|
4
|
-
import logmuse
|
5
4
|
import os
|
6
|
-
import sys
|
7
5
|
import shutil
|
8
|
-
|
9
|
-
|
6
|
+
|
7
|
+
|
10
8
|
from shutil import copytree
|
9
|
+
from yacman import FutureYAMLConfigManager as YAMLConfigManager
|
10
|
+
from yacman import write_lock, FILEPATH_KEY, load_yaml, select_config
|
11
11
|
|
12
|
-
from ubiquerg import is_writable, VersionInHelpParser
|
13
|
-
import yacman
|
14
12
|
|
15
13
|
from .const import (
|
16
14
|
COMPUTE_SETTINGS_VARNAME,
|
@@ -21,14 +19,13 @@ from .const import (
|
|
21
19
|
)
|
22
20
|
from .utils import write_submit_script
|
23
21
|
|
24
|
-
# from . import __version__
|
25
22
|
|
26
23
|
_LOGGER = logging.getLogger(__name__)
|
27
24
|
|
28
25
|
# This is the divvy.py submodule from divvy
|
29
26
|
|
30
27
|
|
31
|
-
class ComputingConfiguration(
|
28
|
+
class ComputingConfiguration(YAMLConfigManager):
|
32
29
|
"""
|
33
30
|
Represents computing configuration objects.
|
34
31
|
|
@@ -44,36 +41,31 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
44
41
|
`DIVCFG` file)
|
45
42
|
"""
|
46
43
|
|
47
|
-
def __init__(
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
validate_on_write
|
44
|
+
def __init__(
|
45
|
+
self,
|
46
|
+
entries=None,
|
47
|
+
wait_max=None,
|
48
|
+
strict_ro_locks=False,
|
49
|
+
schema_source=None,
|
50
|
+
validate_on_write=False,
|
51
|
+
):
|
52
|
+
super().__init__(
|
53
|
+
entries, wait_max, strict_ro_locks, schema_source, validate_on_write
|
57
54
|
)
|
58
55
|
|
59
|
-
if
|
60
|
-
raise Exception(
|
61
|
-
"Your divvy config file is not in divvy config format "
|
62
|
-
"(it lacks a compute_packages section): '{}'".format(filepath)
|
63
|
-
)
|
64
|
-
# We require that compute_packages be present, even if empty
|
56
|
+
if "compute_packages" not in self:
|
65
57
|
self["compute_packages"] = {}
|
66
|
-
|
67
58
|
# Initialize default compute settings.
|
68
59
|
_LOGGER.debug("Establishing project compute settings")
|
69
60
|
self.compute = None
|
70
61
|
self.setdefault("adapters", None)
|
71
62
|
self.activate_package(DEFAULT_COMPUTE_RESOURCES_NAME)
|
72
|
-
self.config_file = self.filepath
|
73
63
|
|
74
64
|
def write(self, filename=None):
|
75
|
-
|
76
|
-
|
65
|
+
with write_lock(self) as locked_ym:
|
66
|
+
locked_ym.rebase()
|
67
|
+
locked_ym.write()
|
68
|
+
filename = filename or getattr(self, FILEPATH_KEY)
|
77
69
|
filedir = os.path.dirname(filename)
|
78
70
|
# For this object, we *also* have to write the template files
|
79
71
|
for pkg_name, pkg in self["compute_packages"].items():
|
@@ -119,9 +111,12 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
119
111
|
|
120
112
|
:return str: path to folder with default submission templates
|
121
113
|
"""
|
122
|
-
|
123
|
-
os.path.dirname(
|
124
|
-
|
114
|
+
if self.filepath:
|
115
|
+
return os.path.join(os.path.dirname(self.filepath), "divvy_templates")
|
116
|
+
else:
|
117
|
+
return os.path.join(
|
118
|
+
os.path.dirname(__file__), "default_config", "divvy_templates"
|
119
|
+
)
|
125
120
|
|
126
121
|
def activate_package(self, package_name):
|
127
122
|
"""
|
@@ -151,23 +146,30 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
151
146
|
# Augment compute, creating it if needed.
|
152
147
|
if self.compute is None:
|
153
148
|
_LOGGER.debug("Creating Project compute")
|
154
|
-
self.compute =
|
149
|
+
self.compute = YAMLConfigManager()
|
155
150
|
_LOGGER.debug(
|
156
151
|
"Adding entries for package_name '{}'".format(package_name)
|
157
152
|
)
|
158
153
|
|
159
|
-
self.compute.
|
154
|
+
self.compute.update_from_obj(self["compute_packages"][package_name])
|
160
155
|
|
161
156
|
# Ensure submission template is absolute. This *used to be* handled
|
162
157
|
# at update (so the paths were stored as absolutes in the packages),
|
163
158
|
# but now, it makes more sense to do it here so we can piggyback on
|
164
159
|
# the default update() method and not even have to do that.
|
165
160
|
if not os.path.isabs(self.compute["submission_template"]):
|
161
|
+
|
166
162
|
try:
|
167
|
-
self.
|
168
|
-
os.path.
|
169
|
-
|
170
|
-
|
163
|
+
if self.filepath:
|
164
|
+
self.compute["submission_template"] = os.path.join(
|
165
|
+
os.path.dirname(self.filepath),
|
166
|
+
self.compute["submission_template"],
|
167
|
+
)
|
168
|
+
else:
|
169
|
+
self.compute["submission_template"] = os.path.join(
|
170
|
+
os.path.dirname(self.default_config_file),
|
171
|
+
self.compute["submission_template"],
|
172
|
+
)
|
171
173
|
except AttributeError as e:
|
172
174
|
# Environment and environment compute should at least have been
|
173
175
|
# set as null-valued attributes, so execution here is an error.
|
@@ -200,14 +202,19 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
200
202
|
self.reset_active_settings()
|
201
203
|
return self.activate_package(package_name)
|
202
204
|
|
203
|
-
def get_active_package(self):
|
205
|
+
def get_active_package(self) -> YAMLConfigManager:
|
204
206
|
"""
|
205
207
|
Returns settings for the currently active compute package
|
206
208
|
|
207
|
-
:return
|
209
|
+
:return YAMLConfigManager: data defining the active compute package
|
208
210
|
"""
|
209
211
|
return self.compute
|
210
212
|
|
213
|
+
@property
|
214
|
+
def compute_packages(self):
|
215
|
+
|
216
|
+
return self["compute_packages"]
|
217
|
+
|
211
218
|
def list_compute_packages(self):
|
212
219
|
"""
|
213
220
|
Returns a list of available compute packages.
|
@@ -222,7 +229,7 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
222
229
|
|
223
230
|
:return bool: success flag
|
224
231
|
"""
|
225
|
-
self.compute =
|
232
|
+
self.compute = YAMLConfigManager()
|
226
233
|
return True
|
227
234
|
|
228
235
|
def update_packages(self, config_file):
|
@@ -235,11 +242,11 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
235
242
|
|
236
243
|
:param str config_file: path to file with new divvy configuration data
|
237
244
|
"""
|
238
|
-
entries =
|
245
|
+
entries = load_yaml(config_file)
|
239
246
|
self.update(entries)
|
240
247
|
return True
|
241
248
|
|
242
|
-
def get_adapters(self):
|
249
|
+
def get_adapters(self) -> YAMLConfigManager:
|
243
250
|
"""
|
244
251
|
Get current adapters, if defined.
|
245
252
|
|
@@ -248,9 +255,9 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
|
|
248
255
|
package-specific set of adapters, if any defined in 'adapters' section
|
249
256
|
under currently active compute package.
|
250
257
|
|
251
|
-
:return
|
258
|
+
:return YAMLConfigManager: current adapters mapping
|
252
259
|
"""
|
253
|
-
adapters =
|
260
|
+
adapters = YAMLConfigManager()
|
254
261
|
if "adapters" in self and self["adapters"] is not None:
|
255
262
|
adapters.update(self["adapters"])
|
256
263
|
if "compute" in self and "adapters" in self.compute:
|
@@ -376,7 +383,7 @@ def select_divvy_config(filepath):
|
|
376
383
|
:param str | NoneType filepath: direct file path specification
|
377
384
|
:return str: path to the config file to read
|
378
385
|
"""
|
379
|
-
divcfg =
|
386
|
+
divcfg = select_config(
|
380
387
|
config_filepath=filepath,
|
381
388
|
config_env_vars=COMPUTE_SETTINGS_VARNAME,
|
382
389
|
default_config_filepath=DEFAULT_CONFIG_FILEPATH,
|
@@ -404,11 +411,13 @@ def divvy_init(config_path, template_config_path):
|
|
404
411
|
_LOGGER.error("You must specify a template config file path.")
|
405
412
|
return
|
406
413
|
|
414
|
+
if not os.path.isabs(config_path):
|
415
|
+
config_path = os.path.abspath(config_path)
|
416
|
+
|
407
417
|
if config_path and not os.path.exists(config_path):
|
408
|
-
# dcc.write(config_path)
|
409
418
|
# Init should *also* write the templates.
|
410
419
|
dest_folder = os.path.dirname(config_path)
|
411
|
-
copytree(os.path.dirname(template_config_path), dest_folder)
|
420
|
+
copytree(os.path.dirname(template_config_path), dest_folder, dirs_exist_ok=True)
|
412
421
|
template_subfolder = os.path.join(dest_folder, "divvy_templates")
|
413
422
|
_LOGGER.info("Wrote divvy templates to folder: {}".format(template_subfolder))
|
414
423
|
new_template = os.path.join(
|
looper/exceptions.py
CHANGED
@@ -15,6 +15,7 @@ _all__ = [
|
|
15
15
|
"PipelineInterfaceConfigError",
|
16
16
|
"PipelineInterfaceRequirementsError",
|
17
17
|
"MisconfigurationException",
|
18
|
+
"LooperReportError",
|
18
19
|
]
|
19
20
|
|
20
21
|
|
@@ -31,7 +32,7 @@ class SampleFailedException(LooperError):
|
|
31
32
|
|
32
33
|
|
33
34
|
class MisconfigurationException(LooperError):
|
34
|
-
"""
|
35
|
+
"""Looper not properly configured"""
|
35
36
|
|
36
37
|
def __init__(self, key):
|
37
38
|
super(MisconfigurationException, self).__init__(key)
|
@@ -109,3 +110,10 @@ class PipelineInterfaceRequirementsError(LooperError):
|
|
109
110
|
)
|
110
111
|
)
|
111
112
|
self.error_specs = typename_by_requirement
|
113
|
+
|
114
|
+
|
115
|
+
class LooperReportError(LooperError):
|
116
|
+
"""Looper reporting errors"""
|
117
|
+
|
118
|
+
def __init__(self, reason):
|
119
|
+
super(LooperReportError, self).__init__(reason)
|