looper 1.8.0__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- looper/_version.py +2 -1
- looper/cli_pydantic.py +20 -9
- looper/command_models/commands.py +1 -1
- looper/conductor.py +106 -9
- looper/const.py +1 -0
- looper/divvy.py +0 -6
- looper/looper.py +4 -6
- looper/pipeline_interface.py +2 -3
- looper/project.py +2 -65
- looper/utils.py +133 -9
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/METADATA +4 -3
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/RECORD +16 -16
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/WHEEL +1 -1
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/LICENSE.txt +0 -0
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/entry_points.txt +0 -0
- {looper-1.8.0.dist-info → looper-1.9.0.dist-info}/top_level.txt +0 -0
looper/_version.py
CHANGED
@@ -1 +1,2 @@
|
|
1
|
-
__version__ = "1.
|
1
|
+
__version__ = "1.9.0"
|
2
|
+
# You must change the version in parser = pydantic2_argparse.ArgumentParser in cli_pydantic.py!!!
|
looper/cli_pydantic.py
CHANGED
@@ -17,19 +17,17 @@ It is well possible that this script will be removed again.
|
|
17
17
|
# with types.
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
-
import os
|
21
20
|
import sys
|
22
21
|
|
23
22
|
import logmuse
|
24
|
-
import
|
23
|
+
import pydantic_argparse
|
25
24
|
import yaml
|
26
25
|
from eido import inspect_project
|
27
26
|
from pephubclient import PEPHubClient
|
28
|
-
from
|
27
|
+
from pydantic_argparse.argparse.parser import ArgumentParser
|
29
28
|
|
30
29
|
from divvy import select_divvy_config
|
31
30
|
|
32
|
-
from .const import PipelineLevel
|
33
31
|
from . import __version__
|
34
32
|
|
35
33
|
from .command_models.arguments import ArgumentEnum
|
@@ -151,8 +149,12 @@ def run_looper(args: TopLevelParser, parser: ArgumentParser, test_args=None):
|
|
151
149
|
looper_config_dict = read_looper_dotfile()
|
152
150
|
_LOGGER.info(f"Using looper config ({looper_cfg_path}).")
|
153
151
|
|
152
|
+
cli_modifiers_dict = None
|
154
153
|
for looper_config_key, looper_config_item in looper_config_dict.items():
|
155
|
-
|
154
|
+
if looper_config_key == CLI_KEY:
|
155
|
+
cli_modifiers_dict = looper_config_item
|
156
|
+
else:
|
157
|
+
setattr(subcommand_args, looper_config_key, looper_config_item)
|
156
158
|
|
157
159
|
except OSError:
|
158
160
|
parser.print_help(sys.stderr)
|
@@ -168,7 +170,11 @@ def run_looper(args: TopLevelParser, parser: ArgumentParser, test_args=None):
|
|
168
170
|
)
|
169
171
|
|
170
172
|
subcommand_args = enrich_args_via_cfg(
|
171
|
-
subcommand_name,
|
173
|
+
subcommand_name,
|
174
|
+
subcommand_args,
|
175
|
+
parser,
|
176
|
+
test_args=test_args,
|
177
|
+
cli_modifiers=cli_modifiers_dict,
|
172
178
|
)
|
173
179
|
|
174
180
|
# If project pipeline interface defined in the cli, change name to: "pipeline_interface"
|
@@ -246,11 +252,15 @@ def run_looper(args: TopLevelParser, parser: ArgumentParser, test_args=None):
|
|
246
252
|
# Check at the beginning if user wants to use pipestat and pipestat is configurable
|
247
253
|
is_pipestat_configured = (
|
248
254
|
prj._check_if_pipestat_configured(pipeline_type=PipelineLevel.PROJECT.value)
|
249
|
-
if getattr(
|
255
|
+
if getattr(subcommand_args, "project", None)
|
250
256
|
else prj._check_if_pipestat_configured()
|
251
257
|
)
|
252
258
|
|
253
259
|
if subcommand_name in ["run", "rerun"]:
|
260
|
+
if getattr(subcommand_args, "project", None):
|
261
|
+
_LOGGER.warning(
|
262
|
+
"Project flag set but 'run' command was used. Please use 'runp' to run at project-level."
|
263
|
+
)
|
254
264
|
rerun = subcommand_name == "rerun"
|
255
265
|
run = Runner(prj)
|
256
266
|
try:
|
@@ -321,11 +331,12 @@ def run_looper(args: TopLevelParser, parser: ArgumentParser, test_args=None):
|
|
321
331
|
|
322
332
|
|
323
333
|
def main(test_args=None) -> None:
|
324
|
-
parser =
|
334
|
+
parser = pydantic_argparse.ArgumentParser(
|
325
335
|
model=TopLevelParser,
|
326
336
|
prog="looper",
|
327
|
-
description="Looper
|
337
|
+
description="Looper: A job submitter for Portable Encapsulated Projects",
|
328
338
|
add_help=True,
|
339
|
+
version="1.9.0",
|
329
340
|
)
|
330
341
|
|
331
342
|
parser = add_short_arguments(parser, ArgumentEnum)
|
looper/conductor.py
CHANGED
@@ -4,10 +4,12 @@ import importlib
|
|
4
4
|
import logging
|
5
5
|
import os
|
6
6
|
import subprocess
|
7
|
+
import signal
|
8
|
+
import psutil
|
9
|
+
import sys
|
7
10
|
import time
|
8
11
|
import yaml
|
9
12
|
from math import ceil
|
10
|
-
from copy import copy, deepcopy
|
11
13
|
from json import loads
|
12
14
|
from subprocess import check_output
|
13
15
|
from typing import *
|
@@ -19,14 +21,18 @@ from jinja2.exceptions import UndefinedError
|
|
19
21
|
from peppy.const import CONFIG_KEY, SAMPLE_NAME_ATTR, SAMPLE_YAML_EXT
|
20
22
|
from peppy.exceptions import RemoteYAMLError
|
21
23
|
from pipestat import PipestatError
|
22
|
-
from ubiquerg import expandpath
|
24
|
+
from ubiquerg import expandpath
|
23
25
|
from yaml import dump
|
24
26
|
from yacman import FutureYAMLConfigManager as YAMLConfigManager
|
25
27
|
|
26
28
|
from .const import *
|
27
|
-
from .exceptions import JobSubmissionException
|
29
|
+
from .exceptions import JobSubmissionException
|
28
30
|
from .processed_project import populate_sample_paths
|
29
|
-
from .utils import
|
31
|
+
from .utils import (
|
32
|
+
fetch_sample_flags,
|
33
|
+
jinja_render_template_strictly,
|
34
|
+
expand_nested_var_templates,
|
35
|
+
)
|
30
36
|
from .const import PipelineLevel
|
31
37
|
|
32
38
|
|
@@ -189,6 +195,7 @@ class SubmissionConductor(object):
|
|
189
195
|
the project level, rather that on the sample level)
|
190
196
|
"""
|
191
197
|
super(SubmissionConductor, self).__init__()
|
198
|
+
|
192
199
|
self.collate = collate
|
193
200
|
self.section_key = PROJECT_PL_KEY if self.collate else SAMPLE_PL_KEY
|
194
201
|
self.pl_iface = pipeline_interface
|
@@ -210,6 +217,7 @@ class SubmissionConductor(object):
|
|
210
217
|
self._curr_size = 0
|
211
218
|
self._failed_sample_names = []
|
212
219
|
self._curr_skip_pool = []
|
220
|
+
self.process_id = None # this is used for currently submitted subprocess
|
213
221
|
|
214
222
|
if self.extra_pipe_args:
|
215
223
|
_LOGGER.debug(
|
@@ -392,6 +400,10 @@ class SubmissionConductor(object):
|
|
392
400
|
not for dry run)
|
393
401
|
"""
|
394
402
|
submitted = False
|
403
|
+
|
404
|
+
# Override signal handler so that Ctrl+C can be used to gracefully terminate child process
|
405
|
+
signal.signal(signal.SIGINT, self._signal_int_handler)
|
406
|
+
|
395
407
|
if not self._pool:
|
396
408
|
_LOGGER.debug("No submission (no pooled samples): %s", self.pl_name)
|
397
409
|
# submitted = False
|
@@ -420,9 +432,10 @@ class SubmissionConductor(object):
|
|
420
432
|
submission_command = "{} {}".format(sub_cmd, script)
|
421
433
|
# Capture submission command return value so that we can
|
422
434
|
# intercept and report basic submission failures; #167
|
423
|
-
|
424
|
-
|
425
|
-
|
435
|
+
process = subprocess.Popen(submission_command, shell=True)
|
436
|
+
self.process_id = process.pid
|
437
|
+
process.wait()
|
438
|
+
if process.returncode != 0:
|
426
439
|
fails = (
|
427
440
|
"" if self.collate else [s.sample_name for s in self._samples]
|
428
441
|
)
|
@@ -489,6 +502,87 @@ class SubmissionConductor(object):
|
|
489
502
|
# name concordant with 1-based, not 0-based indexing.
|
490
503
|
return "lump{}".format(self._num_total_job_submissions + 1)
|
491
504
|
|
505
|
+
def _signal_int_handler(self, signal, frame):
|
506
|
+
"""
|
507
|
+
For catching interrupt (Ctrl +C) signals. Fails gracefully.
|
508
|
+
"""
|
509
|
+
signal_type = "SIGINT"
|
510
|
+
self._generic_signal_handler(signal_type)
|
511
|
+
|
512
|
+
def _generic_signal_handler(self, signal_type):
|
513
|
+
"""
|
514
|
+
Function for handling both SIGTERM and SIGINT
|
515
|
+
"""
|
516
|
+
message = "Received " + signal_type + ". Failing gracefully..."
|
517
|
+
_LOGGER.warning(msg=message)
|
518
|
+
|
519
|
+
self._terminate_current_subprocess()
|
520
|
+
|
521
|
+
sys.exit(1)
|
522
|
+
|
523
|
+
def _terminate_current_subprocess(self):
|
524
|
+
"""This terminates the current sub process associated with self.process_id"""
|
525
|
+
|
526
|
+
def pskill(proc_pid, sig=signal.SIGINT):
|
527
|
+
parent_process = psutil.Process(proc_pid)
|
528
|
+
for child_proc in parent_process.children(recursive=True):
|
529
|
+
child_proc.send_signal(sig)
|
530
|
+
parent_process.send_signal(sig)
|
531
|
+
|
532
|
+
if self.process_id is None:
|
533
|
+
return
|
534
|
+
|
535
|
+
# Gently wait for the subprocess before attempting to kill it
|
536
|
+
sys.stdout.flush()
|
537
|
+
still_running = self._attend_process(psutil.Process(self.process_id), 0)
|
538
|
+
sleeptime = 0.25
|
539
|
+
time_waiting = 0
|
540
|
+
|
541
|
+
while still_running and time_waiting < 3:
|
542
|
+
try:
|
543
|
+
if time_waiting > 2:
|
544
|
+
pskill(self.process_id, signal.SIGKILL)
|
545
|
+
elif time_waiting > 1:
|
546
|
+
pskill(self.process_id, signal.SIGTERM)
|
547
|
+
else:
|
548
|
+
pskill(self.process_id, signal.SIGINT)
|
549
|
+
|
550
|
+
except OSError:
|
551
|
+
# This would happen if the child process ended between the check
|
552
|
+
# and the next kill step
|
553
|
+
still_running = False
|
554
|
+
time_waiting = time_waiting + sleeptime
|
555
|
+
|
556
|
+
# Now see if it's still running
|
557
|
+
time_waiting = time_waiting + sleeptime
|
558
|
+
if not self._attend_process(psutil.Process(self.process_id), sleeptime):
|
559
|
+
still_running = False
|
560
|
+
|
561
|
+
if still_running:
|
562
|
+
_LOGGER.warning(f"Unable to halt child process: {self.process_id}")
|
563
|
+
else:
|
564
|
+
if time_waiting > 0:
|
565
|
+
note = f"terminated after {time_waiting} sec"
|
566
|
+
else:
|
567
|
+
note = "was already terminated"
|
568
|
+
_LOGGER.warning(msg=f"Child process {self.process_id} {note}.")
|
569
|
+
|
570
|
+
def _attend_process(self, proc, sleeptime):
|
571
|
+
"""
|
572
|
+
Waits on a process for a given time to see if it finishes, returns True
|
573
|
+
if it's still running after the given time or False as soon as it
|
574
|
+
returns.
|
575
|
+
|
576
|
+
:param psutil.Process proc: Process object opened by psutil.Popen()
|
577
|
+
:param float sleeptime: Time to wait
|
578
|
+
:return bool: True if process is still running; otherwise false
|
579
|
+
"""
|
580
|
+
try:
|
581
|
+
proc.wait(timeout=int(sleeptime))
|
582
|
+
except psutil.TimeoutExpired:
|
583
|
+
return True
|
584
|
+
return False
|
585
|
+
|
492
586
|
def _jobname(self, pool):
|
493
587
|
"""Create the name for a job submission."""
|
494
588
|
return "{}_{}".format(self.pl_iface.pipeline_name, self._sample_lump_name(pool))
|
@@ -563,6 +657,7 @@ class SubmissionConductor(object):
|
|
563
657
|
"results_file": psm.file,
|
564
658
|
"record_identifier": psm.record_identifier,
|
565
659
|
"config_file": psm.config_path,
|
660
|
+
"output_schema": psm.cfg["_schema_path"],
|
566
661
|
}
|
567
662
|
filtered_namespace = {k: v for k, v in full_namespace.items() if v}
|
568
663
|
return YAMLConfigManager(filtered_namespace)
|
@@ -626,8 +721,10 @@ class SubmissionConductor(object):
|
|
626
721
|
_LOGGER.debug(f"namespace pipelines: { pl_iface }")
|
627
722
|
|
628
723
|
namespaces["pipeline"]["var_templates"] = pl_iface[VAR_TEMPL_KEY] or {}
|
629
|
-
|
630
|
-
|
724
|
+
|
725
|
+
namespaces["pipeline"]["var_templates"] = expand_nested_var_templates(
|
726
|
+
namespaces["pipeline"]["var_templates"], namespaces
|
727
|
+
)
|
631
728
|
|
632
729
|
# pre_submit hook namespace updates
|
633
730
|
namespaces = _exec_pre_submit(pl_iface, namespaces)
|
looper/const.py
CHANGED
looper/divvy.py
CHANGED
@@ -1,18 +1,13 @@
|
|
1
1
|
""" Computing configuration representation """
|
2
2
|
|
3
3
|
import logging
|
4
|
-
import logmuse
|
5
4
|
import os
|
6
|
-
import sys
|
7
5
|
import shutil
|
8
|
-
import yaml
|
9
6
|
|
10
7
|
|
11
8
|
from shutil import copytree
|
12
9
|
from yacman import FutureYAMLConfigManager as YAMLConfigManager
|
13
10
|
from yacman import write_lock, FILEPATH_KEY, load_yaml, select_config
|
14
|
-
from yaml import SafeLoader
|
15
|
-
from ubiquerg import is_writable, VersionInHelpParser
|
16
11
|
|
17
12
|
|
18
13
|
from .const import (
|
@@ -24,7 +19,6 @@ from .const import (
|
|
24
19
|
)
|
25
20
|
from .utils import write_submit_script
|
26
21
|
|
27
|
-
# from . import __version__
|
28
22
|
|
29
23
|
_LOGGER = logging.getLogger(__name__)
|
30
24
|
|
looper/looper.py
CHANGED
@@ -33,14 +33,12 @@ from rich.color import Color
|
|
33
33
|
from rich.console import Console
|
34
34
|
from rich.table import Table
|
35
35
|
from ubiquerg.cli_tools import query_yes_no
|
36
|
-
from ubiquerg.collection import uniqify
|
37
36
|
|
38
37
|
|
39
38
|
from .conductor import SubmissionConductor
|
40
39
|
|
41
40
|
from .exceptions import *
|
42
41
|
from .const import *
|
43
|
-
from .pipeline_interface import PipelineInterface
|
44
42
|
from .project import Project
|
45
43
|
from .utils import (
|
46
44
|
desired_samples_range_skipped,
|
@@ -94,7 +92,7 @@ class Checker(Executor):
|
|
94
92
|
psms = {}
|
95
93
|
if getattr(args, "project", None):
|
96
94
|
|
97
|
-
for piface in self.prj.
|
95
|
+
for piface in self.prj.project_pipeline_interfaces:
|
98
96
|
if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
|
99
97
|
psms[piface.psm.pipeline_name] = piface.psm
|
100
98
|
s = piface.psm.get_status() or "unknown"
|
@@ -565,7 +563,7 @@ class Reporter(Executor):
|
|
565
563
|
|
566
564
|
if project_level:
|
567
565
|
|
568
|
-
for piface in self.prj.
|
566
|
+
for piface in self.prj.project_pipeline_interfaces:
|
569
567
|
if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
|
570
568
|
psms[piface.psm.pipeline_name] = piface.psm
|
571
569
|
report_directory = piface.psm.summarize(
|
@@ -598,7 +596,7 @@ class Linker(Executor):
|
|
598
596
|
psms = {}
|
599
597
|
|
600
598
|
if project_level:
|
601
|
-
for piface in self.prj.
|
599
|
+
for piface in self.prj.project_pipeline_interfaces:
|
602
600
|
if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
|
603
601
|
psms[piface.psm.pipeline_name] = piface.psm
|
604
602
|
linked_results_path = piface.psm.link(link_dir=link_dir)
|
@@ -623,7 +621,7 @@ class Tabulator(Executor):
|
|
623
621
|
results = []
|
624
622
|
psms = {}
|
625
623
|
if project_level:
|
626
|
-
for piface in self.prj.
|
624
|
+
for piface in self.prj.project_pipeline_interfaces:
|
627
625
|
if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
|
628
626
|
psms[piface.psm.pipeline_name] = piface.psm
|
629
627
|
results = piface.psm.table()
|
looper/pipeline_interface.py
CHANGED
@@ -17,7 +17,7 @@ from .exceptions import (
|
|
17
17
|
InvalidResourceSpecificationException,
|
18
18
|
PipelineInterfaceConfigError,
|
19
19
|
)
|
20
|
-
from .utils import
|
20
|
+
from .utils import render_nested_var_templates
|
21
21
|
|
22
22
|
__author__ = "Michal Stolarczyk"
|
23
23
|
__email__ = "michal@virginia.edu"
|
@@ -89,8 +89,7 @@ class PipelineInterface(YAMLConfigManager):
|
|
89
89
|
var_templates = {}
|
90
90
|
if curr_data:
|
91
91
|
var_templates.update(curr_data)
|
92
|
-
|
93
|
-
var_templates[k] = jinja_render_template_strictly(v, namespaces)
|
92
|
+
var_templates = render_nested_var_templates(var_templates, namespaces)
|
94
93
|
return var_templates
|
95
94
|
|
96
95
|
def get_pipeline_schemas(self, schema_key=INPUT_SCHEMA_KEY):
|
looper/project.py
CHANGED
@@ -10,18 +10,14 @@ try:
|
|
10
10
|
except ImportError:
|
11
11
|
# cached_property was introduced in python 3.8
|
12
12
|
cached_property = property
|
13
|
-
from logging import getLogger
|
14
13
|
|
15
14
|
from .divvy import ComputingConfiguration
|
16
15
|
from eido import PathAttrNotFoundError, read_schema
|
17
16
|
from jsonschema import ValidationError
|
18
17
|
from pandas.core.common import flatten
|
19
|
-
from peppy import CONFIG_KEY, OUTDIR_KEY
|
20
|
-
from peppy import Project as peppyProject
|
21
18
|
from peppy.utils import make_abs_via_cfg
|
22
|
-
from pipestat import
|
23
|
-
|
24
|
-
from yacman import YAMLConfigManager
|
19
|
+
from pipestat import PipestatManager
|
20
|
+
|
25
21
|
from .conductor import write_pipestat_config
|
26
22
|
|
27
23
|
from .exceptions import *
|
@@ -374,65 +370,6 @@ class Project(peppyProject):
|
|
374
370
|
except KeyError:
|
375
371
|
return None
|
376
372
|
|
377
|
-
def build_submission_bundles(self, protocol, priority=True):
|
378
|
-
"""
|
379
|
-
Create pipelines to submit for each sample of a particular protocol.
|
380
|
-
|
381
|
-
With the argument (flag) to the priority parameter, there's control
|
382
|
-
over whether to submit pipeline(s) from only one of the project's
|
383
|
-
known pipeline locations with a match for the protocol, or whether to
|
384
|
-
submit pipelines created from all locations with a match for the
|
385
|
-
protocol.
|
386
|
-
|
387
|
-
:param str protocol: name of the protocol/library for which to
|
388
|
-
create pipeline(s)
|
389
|
-
:param bool priority: to only submit pipeline(s) from the first of the
|
390
|
-
pipelines location(s) (indicated in the project config file) that
|
391
|
-
has a match for the given protocol; optional, default True
|
392
|
-
:return Iterable[(PipelineInterface, type, str, str)]:
|
393
|
-
:raises AssertionError: if there's a failure in the attempt to
|
394
|
-
partition an interface's pipeline scripts into disjoint subsets of
|
395
|
-
those already mapped and those not yet mapped
|
396
|
-
"""
|
397
|
-
|
398
|
-
if not priority:
|
399
|
-
raise NotImplementedError(
|
400
|
-
"Currently, only prioritized protocol mapping is supported "
|
401
|
-
"(i.e., pipeline interfaces collection is a prioritized list, "
|
402
|
-
"so only the first interface with a protocol match is used.)"
|
403
|
-
)
|
404
|
-
|
405
|
-
# Pull out the collection of interfaces (potentially one from each of
|
406
|
-
# the locations indicated in the project configuration file) as a
|
407
|
-
# sort of pool of information about possible ways in which to submit
|
408
|
-
# pipeline(s) for sample(s) of the indicated protocol.
|
409
|
-
pifaces = self.interfaces.get_pipeline_interface(protocol)
|
410
|
-
if not pifaces:
|
411
|
-
raise PipelineInterfaceConfigError(
|
412
|
-
"No interfaces for protocol: {}".format(protocol)
|
413
|
-
)
|
414
|
-
|
415
|
-
# coonvert to a list, in the future we might allow to match multiple
|
416
|
-
pifaces = pifaces if isinstance(pifaces, str) else [pifaces]
|
417
|
-
|
418
|
-
job_submission_bundles = []
|
419
|
-
new_jobs = []
|
420
|
-
|
421
|
-
_LOGGER.debug("Building pipelines matched by protocol: {}".format(protocol))
|
422
|
-
|
423
|
-
for pipe_iface in pifaces:
|
424
|
-
# Determine how to reference the pipeline and where it is.
|
425
|
-
path = pipe_iface["path"]
|
426
|
-
if not (os.path.exists(path) or is_command_callable(path)):
|
427
|
-
_LOGGER.warning("Missing pipeline script: {}".format(path))
|
428
|
-
continue
|
429
|
-
|
430
|
-
# Add this bundle to the collection of ones relevant for the
|
431
|
-
# current PipelineInterface.
|
432
|
-
new_jobs.append(pipe_iface)
|
433
|
-
job_submission_bundles.append(new_jobs)
|
434
|
-
return list(itertools.chain(*job_submission_bundles))
|
435
|
-
|
436
373
|
@staticmethod
|
437
374
|
def get_schemas(pifaces, schema_key=INPUT_SCHEMA_KEY):
|
438
375
|
"""
|
looper/utils.py
CHANGED
@@ -1,12 +1,11 @@
|
|
1
1
|
""" Helpers without an obvious logical home. """
|
2
2
|
|
3
3
|
import argparse
|
4
|
-
from collections import defaultdict
|
4
|
+
from collections import defaultdict
|
5
5
|
import glob
|
6
6
|
import itertools
|
7
7
|
from logging import getLogger
|
8
8
|
import os
|
9
|
-
import sys
|
10
9
|
from typing import *
|
11
10
|
import re
|
12
11
|
|
@@ -14,13 +13,14 @@ import jinja2
|
|
14
13
|
import yaml
|
15
14
|
from peppy import Project as peppyProject
|
16
15
|
from peppy.const import *
|
17
|
-
from ubiquerg import convert_value, expandpath, parse_registry_path
|
16
|
+
from ubiquerg import convert_value, expandpath, parse_registry_path, deep_update
|
18
17
|
from pephubclient.constants import RegistryPath
|
19
18
|
from pydantic import ValidationError
|
19
|
+
from yacman import load_yaml
|
20
20
|
|
21
21
|
from .const import *
|
22
22
|
from .command_models.commands import SUPPORTED_COMMANDS
|
23
|
-
from .exceptions import MisconfigurationException
|
23
|
+
from .exceptions import MisconfigurationException
|
24
24
|
|
25
25
|
_LOGGER = getLogger(__name__)
|
26
26
|
|
@@ -253,7 +253,13 @@ def read_yaml_file(filepath):
|
|
253
253
|
return data
|
254
254
|
|
255
255
|
|
256
|
-
def enrich_args_via_cfg(
|
256
|
+
def enrich_args_via_cfg(
|
257
|
+
subcommand_name,
|
258
|
+
parser_args,
|
259
|
+
aux_parser,
|
260
|
+
test_args=None,
|
261
|
+
cli_modifiers=None,
|
262
|
+
):
|
257
263
|
"""
|
258
264
|
Read in a looper dotfile and set arguments.
|
259
265
|
|
@@ -270,6 +276,33 @@ def enrich_args_via_cfg(subcommand_name, parser_args, aux_parser, test_args=None
|
|
270
276
|
if os.path.exists(parser_args.config_file)
|
271
277
|
else dict()
|
272
278
|
)
|
279
|
+
|
280
|
+
# If user provided project-level modifiers in the looper config, they are prioritized
|
281
|
+
if cfg_args_all:
|
282
|
+
for key, value in cfg_args_all.items():
|
283
|
+
if getattr(parser_args, key, None):
|
284
|
+
new_value = getattr(parser_args, key)
|
285
|
+
cfg_args_all[key] = new_value
|
286
|
+
else:
|
287
|
+
cfg_args_all = {}
|
288
|
+
|
289
|
+
looper_config_cli_modifiers = None
|
290
|
+
if cli_modifiers:
|
291
|
+
if str(subcommand_name) in cli_modifiers:
|
292
|
+
looper_config_cli_modifiers = cli_modifiers[subcommand_name]
|
293
|
+
looper_config_cli_modifiers = (
|
294
|
+
{k.replace("-", "_"): v for k, v in looper_config_cli_modifiers.items()}
|
295
|
+
if looper_config_cli_modifiers
|
296
|
+
else None
|
297
|
+
)
|
298
|
+
|
299
|
+
if looper_config_cli_modifiers:
|
300
|
+
_LOGGER.warning(
|
301
|
+
"CLI modifiers were provided in Looper Config and in PEP Project Config. Merging..."
|
302
|
+
)
|
303
|
+
deep_update(cfg_args_all, looper_config_cli_modifiers)
|
304
|
+
_LOGGER.debug(msg=f"Merged CLI modifiers: {cfg_args_all}")
|
305
|
+
|
273
306
|
result = argparse.Namespace()
|
274
307
|
if test_args:
|
275
308
|
cli_args, _ = aux_parser.parse_known_args(args=test_args)
|
@@ -503,6 +536,33 @@ def initiate_looper_config(
|
|
503
536
|
return True
|
504
537
|
|
505
538
|
|
539
|
+
def determine_pipeline_type(piface_path: str, looper_config_path: str):
|
540
|
+
"""
|
541
|
+
Read pipeline interface from disk and determine if pipeline type is sample or project-level
|
542
|
+
|
543
|
+
|
544
|
+
:param str piface_path: path to pipeline_interface
|
545
|
+
:param str looper_config_path: path to looper config file
|
546
|
+
:return Tuple[Union[str,None],Union[str,None]] : (pipeline type, resolved path) or (None, None)
|
547
|
+
"""
|
548
|
+
|
549
|
+
if piface_path is None:
|
550
|
+
return None, None
|
551
|
+
piface_path = expandpath(piface_path)
|
552
|
+
if not os.path.isabs(piface_path):
|
553
|
+
piface_path = os.path.realpath(
|
554
|
+
os.path.join(os.path.dirname(looper_config_path), piface_path)
|
555
|
+
)
|
556
|
+
try:
|
557
|
+
piface_dict = load_yaml(piface_path)
|
558
|
+
except FileNotFoundError:
|
559
|
+
return None, None
|
560
|
+
|
561
|
+
pipeline_type = piface_dict.get("pipeline_type", None)
|
562
|
+
|
563
|
+
return pipeline_type, piface_path
|
564
|
+
|
565
|
+
|
506
566
|
def read_looper_config_file(looper_config_path: str) -> dict:
|
507
567
|
"""
|
508
568
|
Read Looper config file which includes:
|
@@ -543,12 +603,46 @@ def read_looper_config_file(looper_config_path: str) -> dict:
|
|
543
603
|
if PIPESTAT_KEY in dp_data:
|
544
604
|
return_dict[PIPESTAT_KEY] = dp_data[PIPESTAT_KEY]
|
545
605
|
|
606
|
+
if SAMPLE_MODS_KEY in dp_data:
|
607
|
+
return_dict[SAMPLE_MODS_KEY] = dp_data[SAMPLE_MODS_KEY]
|
608
|
+
|
609
|
+
if CLI_KEY in dp_data:
|
610
|
+
return_dict[CLI_KEY] = dp_data[CLI_KEY]
|
611
|
+
|
546
612
|
if PIPELINE_INTERFACES_KEY in dp_data:
|
613
|
+
|
547
614
|
dp_data.setdefault(PIPELINE_INTERFACES_KEY, {})
|
548
|
-
|
549
|
-
|
550
|
-
"
|
551
|
-
|
615
|
+
|
616
|
+
if isinstance(dp_data.get(PIPELINE_INTERFACES_KEY), dict) and (
|
617
|
+
dp_data.get(PIPELINE_INTERFACES_KEY).get("sample")
|
618
|
+
or dp_data.get(PIPELINE_INTERFACES_KEY).get("project")
|
619
|
+
):
|
620
|
+
# Support original nesting of pipeline interfaces under "sample" and "project"
|
621
|
+
return_dict[SAMPLE_PL_ARG] = dp_data.get(PIPELINE_INTERFACES_KEY).get(
|
622
|
+
"sample"
|
623
|
+
)
|
624
|
+
return_dict[PROJECT_PL_ARG] = dp_data.get(PIPELINE_INTERFACES_KEY).get(
|
625
|
+
"project"
|
626
|
+
)
|
627
|
+
else:
|
628
|
+
# infer pipeline type based from interface instead of nested keys: https://github.com/pepkit/looper/issues/465
|
629
|
+
all_pipeline_interfaces = dp_data.get(PIPELINE_INTERFACES_KEY)
|
630
|
+
sample_pifaces = []
|
631
|
+
project_pifaces = []
|
632
|
+
if isinstance(all_pipeline_interfaces, str):
|
633
|
+
all_pipeline_interfaces = [all_pipeline_interfaces]
|
634
|
+
for piface in all_pipeline_interfaces:
|
635
|
+
pipeline_type, piface_path = determine_pipeline_type(
|
636
|
+
piface, looper_config_path
|
637
|
+
)
|
638
|
+
if pipeline_type == PipelineLevel.SAMPLE.value:
|
639
|
+
sample_pifaces.append(piface_path)
|
640
|
+
elif pipeline_type == PipelineLevel.PROJECT.value:
|
641
|
+
project_pifaces.append(piface_path)
|
642
|
+
if len(sample_pifaces) > 0:
|
643
|
+
return_dict[SAMPLE_PL_ARG] = sample_pifaces
|
644
|
+
if len(project_pifaces) > 0:
|
645
|
+
return_dict[PROJECT_PL_ARG] = project_pifaces
|
552
646
|
|
553
647
|
else:
|
554
648
|
_LOGGER.warning(
|
@@ -819,3 +913,33 @@ def inspect_looper_config_file(looper_config_dict) -> None:
|
|
819
913
|
print("LOOPER INSPECT")
|
820
914
|
for key, value in looper_config_dict.items():
|
821
915
|
print(f"{key} {value}")
|
916
|
+
|
917
|
+
|
918
|
+
def expand_nested_var_templates(var_templates_dict, namespaces):
|
919
|
+
|
920
|
+
"Takes all var_templates as a dict and recursively expands any paths."
|
921
|
+
|
922
|
+
result = {}
|
923
|
+
|
924
|
+
for k, v in var_templates_dict.items():
|
925
|
+
if isinstance(v, dict):
|
926
|
+
result[k] = expand_nested_var_templates(v, namespaces)
|
927
|
+
else:
|
928
|
+
result[k] = expandpath(v)
|
929
|
+
|
930
|
+
return result
|
931
|
+
|
932
|
+
|
933
|
+
def render_nested_var_templates(var_templates_dict, namespaces):
|
934
|
+
|
935
|
+
"Takes all var_templates as a dict and recursively renders the jinja templates."
|
936
|
+
|
937
|
+
result = {}
|
938
|
+
|
939
|
+
for k, v in var_templates_dict.items():
|
940
|
+
if isinstance(v, dict):
|
941
|
+
result[k] = expand_nested_var_templates(v, namespaces)
|
942
|
+
else:
|
943
|
+
result[k] = jinja_render_template_strictly(v, namespaces)
|
944
|
+
|
945
|
+
return result
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: looper
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.9.0
|
4
4
|
Summary: A pipeline submission engine that parses sample inputs and submits pipelines for each sample.
|
5
5
|
Home-page: https://github.com/pepkit/looper
|
6
6
|
Author: Nathan Sheffield, Vince Reuter, Michal Stolarczyk, Johanna Klughammer, Andre Rendeiro
|
@@ -26,9 +26,10 @@ Requires-Dist: pipestat >=0.9.2
|
|
26
26
|
Requires-Dist: peppy >=0.40.2
|
27
27
|
Requires-Dist: pyyaml >=3.12
|
28
28
|
Requires-Dist: rich >=9.10.0
|
29
|
-
Requires-Dist: ubiquerg >=0.
|
29
|
+
Requires-Dist: ubiquerg >=0.8.1a1
|
30
30
|
Requires-Dist: yacman ==0.9.3
|
31
|
-
Requires-Dist:
|
31
|
+
Requires-Dist: pydantic-argparse >=0.9.0
|
32
|
+
Requires-Dist: psutil
|
32
33
|
|
33
34
|
# <img src="docs/img/looper_logo.svg" alt="looper logo" height="70">
|
34
35
|
|
@@ -1,24 +1,24 @@
|
|
1
1
|
looper/__init__.py,sha256=f_z9YY4ibOk7eyWoaViH_VaCXMlPQeiftbnibSFj-3E,1333
|
2
2
|
looper/__main__.py,sha256=OOCmI-dPUvInnJHkHNMf54cblNJ3Yl9ELOwZcfOXmD8,240
|
3
|
-
looper/_version.py,sha256=
|
3
|
+
looper/_version.py,sha256=qQzG7GXbJTUF2ruA1lKA3GEc5yhNqC4Eil6teqYTSTY,120
|
4
4
|
looper/cli_divvy.py,sha256=J07x83sqC4jJeu3_yS6KOARPWmwKGAV7JvN33T5zDac,5907
|
5
|
-
looper/cli_pydantic.py,sha256=
|
6
|
-
looper/conductor.py,sha256=
|
7
|
-
looper/const.py,sha256=
|
8
|
-
looper/divvy.py,sha256=
|
5
|
+
looper/cli_pydantic.py,sha256=bSnaCNKMgAg_PsYuguAzkf8JYuBTzLRI4QvnGdl_JE8,14017
|
6
|
+
looper/conductor.py,sha256=DSpil080IYYqu-76ms25jrNSTmog0449tPXn0nK38Dw,34786
|
7
|
+
looper/const.py,sha256=OscEELQsyLKlSrmwuXfyLRwpAUJUEpGD2UxBeLJDXgw,8703
|
8
|
+
looper/divvy.py,sha256=5x8hV1lT5tEQdAUtVjn0rNwYnJroNij0RyDn-wHf4QE,15251
|
9
9
|
looper/exceptions.py,sha256=r6SKKt-m8CXQnXGDnuiwoA6zBJhIZflygBKjX4RCloI,3419
|
10
|
-
looper/looper.py,sha256=
|
10
|
+
looper/looper.py,sha256=ZWTulMz6NobnYFUjev513TJwXqknrb4_gZrV-a_fT9g,30041
|
11
11
|
looper/parser_types.py,sha256=d3FHt54f9jo9VZMr5SQkbghcAdABqiYZW2JBGO5EBnw,2327
|
12
|
-
looper/pipeline_interface.py,sha256=
|
12
|
+
looper/pipeline_interface.py,sha256=dBXwsU59vR4qmUC59Bt3iM2187mXSDdysMNOhf63pPw,14922
|
13
13
|
looper/plugins.py,sha256=MaMdPmK9U_4FkNJE5kccohBbY1i2qj1NTEucubFOJek,5747
|
14
14
|
looper/processed_project.py,sha256=jZxoMYafvr-OHFxylc5ivGty1VwXBZhl0kgoFkY-174,9837
|
15
|
-
looper/project.py,sha256=
|
16
|
-
looper/utils.py,sha256=
|
15
|
+
looper/project.py,sha256=svkCChwpbFBSJZdYXWcOol0GZnWNWaw32yyye2ajkXw,34279
|
16
|
+
looper/utils.py,sha256=KkXQ6igvuuWBhb-q3TzCUYf39aoWD9CGJ06f5zhVAyw,33799
|
17
17
|
looper/command_models/DEVELOPER.md,sha256=eRxnrO-vqNJjExzamXKEq5wr_-Zw6PQEwkS9RPinYrk,2775
|
18
18
|
looper/command_models/README.md,sha256=3RGegeZlTZYnhcHXRu6bdI_81WZom2q7QYMV-KGYY7U,588
|
19
19
|
looper/command_models/__init__.py,sha256=6QWC2TewowEL7dATli5YpMmFWuXaLEPktofJCXkYUBI,187
|
20
20
|
looper/command_models/arguments.py,sha256=emK7gc_fVgrSPHE2cShxJX05VrgOEn4H7szU8DBev7Q,8808
|
21
|
-
looper/command_models/commands.py,sha256=
|
21
|
+
looper/command_models/commands.py,sha256=WieHeBGkZQlKFqUph6GEpd12dIUmJNJ4lLMgN2xeZJA,9723
|
22
22
|
looper/default_config/divvy_config.yaml,sha256=wK5kLDGBV2wwoyqg2rl3X8SXjds4x0mwBUjUzF1Ln7g,1705
|
23
23
|
looper/default_config/divvy_templates/localhost_bulker_template.sub,sha256=yn5VB9Brt7Hck9LT17hD2o8Kn-76gYJQk_A-8C1Gr4k,164
|
24
24
|
looper/default_config/divvy_templates/localhost_docker_template.sub,sha256=XRr7AlR7-TP1L3hyBMfka_RgWRL9vzOlS5Kd1xSNwT0,183
|
@@ -60,9 +60,9 @@ looper/schemas/divvy_config_schema.yaml,sha256=7GJfKLc3VX4RGjHnOE1zxwsHXhj_ur9za
|
|
60
60
|
looper/schemas/pipeline_interface_schema_generic.yaml,sha256=D16Rkpj03H9WnvA_N18iNU-hH_HwOuyESJ8Hk5hZSXc,1518
|
61
61
|
looper/schemas/pipeline_interface_schema_project.yaml,sha256=-ZWyA0lKXWik3obuLNVk3IsAZYfbLVbCDvJnD-Fcluo,1567
|
62
62
|
looper/schemas/pipeline_interface_schema_sample.yaml,sha256=x0OwVnijJpvm50DscvvJujdK4UAI7d71pqVemQS-D-0,1564
|
63
|
-
looper-1.
|
64
|
-
looper-1.
|
65
|
-
looper-1.
|
66
|
-
looper-1.
|
67
|
-
looper-1.
|
68
|
-
looper-1.
|
63
|
+
looper-1.9.0.dist-info/LICENSE.txt,sha256=oB6ZGDa4kcznznJKJsLLFFcOZyi8Y6e2Jv0rJozgp-I,1269
|
64
|
+
looper-1.9.0.dist-info/METADATA,sha256=h9Pdu_tGy4pTsX6m88wWWCF3eT5RsS96Sw9Kdq5wjLc,1798
|
65
|
+
looper-1.9.0.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
66
|
+
looper-1.9.0.dist-info/entry_points.txt,sha256=ejZpghZG3OoTK69u9rTW-yLyI6SC63bBTUb-Vw26HG4,87
|
67
|
+
looper-1.9.0.dist-info/top_level.txt,sha256=I0Yf7djsoQAMzwHBbDiQi9hGtq4Z41_Ma5CX8qXG8Y8,7
|
68
|
+
looper-1.9.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|