looper 1.7.0a1__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
looper/conductor.py CHANGED
@@ -4,9 +4,12 @@ import importlib
4
4
  import logging
5
5
  import os
6
6
  import subprocess
7
+ import signal
8
+ import psutil
9
+ import sys
7
10
  import time
8
11
  import yaml
9
- from copy import copy, deepcopy
12
+ from math import ceil
10
13
  from json import loads
11
14
  from subprocess import check_output
12
15
  from typing import *
@@ -18,14 +21,19 @@ from jinja2.exceptions import UndefinedError
18
21
  from peppy.const import CONFIG_KEY, SAMPLE_NAME_ATTR, SAMPLE_YAML_EXT
19
22
  from peppy.exceptions import RemoteYAMLError
20
23
  from pipestat import PipestatError
21
- from ubiquerg import expandpath, is_command_callable
24
+ from ubiquerg import expandpath
22
25
  from yaml import dump
23
- from yacman import YAMLConfigManager
26
+ from yacman import FutureYAMLConfigManager as YAMLConfigManager
24
27
 
25
28
  from .const import *
26
- from .exceptions import JobSubmissionException, SampleFailedException
29
+ from .exceptions import JobSubmissionException
27
30
  from .processed_project import populate_sample_paths
28
- from .utils import fetch_sample_flags, jinja_render_template_strictly
31
+ from .utils import (
32
+ fetch_sample_flags,
33
+ jinja_render_template_strictly,
34
+ expand_nested_var_templates,
35
+ )
36
+ from .const import PipelineLevel
29
37
 
30
38
 
31
39
  _LOGGER = logging.getLogger(__name__)
@@ -84,11 +92,23 @@ def _get_yaml_path(namespaces, template_key, default_name_appendix="", filename=
84
92
 
85
93
  def write_pipestat_config(looper_pipestat_config_path, pipestat_config_dict):
86
94
  """
87
- This is run at the project level, not at the sample level.
95
+ This writes a combined configuration file to be passed to a PipestatManager.
96
+ :param str looper_pipestat_config_path: path to the created pipestat configuration file
97
+ :param dict pipestat_config_dict: the dict containing key value pairs to be written to the pipestat configutation
98
+ return bool
88
99
  """
100
+
101
+ if not os.path.exists(os.path.dirname(looper_pipestat_config_path)):
102
+ try:
103
+ os.makedirs(os.path.dirname(looper_pipestat_config_path))
104
+ except FileExistsError:
105
+ pass
106
+
89
107
  with open(looper_pipestat_config_path, "w") as f:
90
108
  yaml.dump(pipestat_config_dict, f)
91
- print(f"Initialized looper config file: {looper_pipestat_config_path}")
109
+ _LOGGER.debug(
110
+ msg=f"Initialized pipestat config file: {looper_pipestat_config_path}"
111
+ )
92
112
 
93
113
  return True
94
114
 
@@ -132,6 +152,7 @@ class SubmissionConductor(object):
132
152
  compute_variables=None,
133
153
  max_cmds=None,
134
154
  max_size=None,
155
+ max_jobs=None,
135
156
  automatic=True,
136
157
  collate=False,
137
158
  ):
@@ -166,14 +187,20 @@ class SubmissionConductor(object):
166
187
  include in a single job script.
167
188
  :param int | float | NoneType max_size: Upper bound on total file
168
189
  size of inputs used by the commands lumped into single job script.
190
+ :param int | float | NoneType max_jobs: Upper bound on total number of jobs to
191
+ group samples for submission.
169
192
  :param bool automatic: Whether the submission should be automatic once
170
193
  the pool reaches capacity.
171
194
  :param bool collate: Whether a collate job is to be submitted (runs on
172
195
  the project level, rather that on the sample level)
173
196
  """
174
197
  super(SubmissionConductor, self).__init__()
198
+
175
199
  self.collate = collate
176
200
  self.section_key = PROJECT_PL_KEY if self.collate else SAMPLE_PL_KEY
201
+ self.pipeline_interface_type = (
202
+ "project_interface" if self.collate else "sample_interface"
203
+ )
177
204
  self.pl_iface = pipeline_interface
178
205
  self.pl_name = self.pl_iface.pipeline_name
179
206
  self.prj = prj
@@ -193,6 +220,7 @@ class SubmissionConductor(object):
193
220
  self._curr_size = 0
194
221
  self._failed_sample_names = []
195
222
  self._curr_skip_pool = []
223
+ self.process_id = None # this is used for currently submitted subprocess
196
224
 
197
225
  if self.extra_pipe_args:
198
226
  _LOGGER.debug(
@@ -200,6 +228,16 @@ class SubmissionConductor(object):
200
228
  "{}".format(self.extra_pipe_args)
201
229
  )
202
230
 
231
+ if max_jobs:
232
+ if max_jobs == 0 or max_jobs < 0:
233
+ raise ValueError(
234
+ "If specified, max job command count must be a positive integer, greater than zero."
235
+ )
236
+
237
+ num_samples = len(self.prj.samples)
238
+ samples_per_job = num_samples / max_jobs
239
+ max_cmds = ceil(samples_per_job)
240
+
203
241
  if not self.collate:
204
242
  self.automatic = automatic
205
243
  if max_cmds is None and max_size is None:
@@ -247,8 +285,12 @@ class SubmissionConductor(object):
247
285
 
248
286
  :param bool frorce: whether to force the project submission (ignore status/flags)
249
287
  """
288
+ psms = {}
250
289
  if self.prj.pipestat_configured_project:
251
- psm = self.prj.get_pipestat_managers(project_level=True)[self.pl_name]
290
+ for piface in self.prj.project_pipeline_interfaces:
291
+ if piface.psm.pipeline_type == PipelineLevel.PROJECT.value:
292
+ psms[piface.psm.pipeline_name] = piface.psm
293
+ psm = psms[self.pl_name]
252
294
  status = psm.get_status()
253
295
  if not force and status is not None:
254
296
  _LOGGER.info(f"> Skipping project. Determined status: {status}")
@@ -274,12 +316,11 @@ class SubmissionConductor(object):
274
316
  )
275
317
  )
276
318
  if self.prj.pipestat_configured:
277
- psms = self.prj.get_pipestat_managers(sample_name=sample.sample_name)
278
- sample_statuses = psms[self.pl_name].get_status(
319
+ sample_statuses = self.pl_iface.psm.get_status(
279
320
  record_identifier=sample.sample_name
280
321
  )
281
322
  if sample_statuses == "failed" and rerun is True:
282
- psms[self.pl_name].set_status(
323
+ self.pl_iface.psm.set_status(
283
324
  record_identifier=sample.sample_name, status_identifier="waiting"
284
325
  )
285
326
  sample_statuses = "waiting"
@@ -289,23 +330,27 @@ class SubmissionConductor(object):
289
330
 
290
331
  use_this_sample = True # default to running this sample
291
332
  msg = None
333
+ if rerun and sample_statuses == []:
334
+ msg = f"> Skipping sample because rerun requested, but no failed or waiting flag found."
335
+ use_this_sample = False
292
336
  if sample_statuses:
293
337
  status_str = ", ".join(sample_statuses)
294
338
  failed_flag = any("failed" in x for x in sample_statuses)
339
+ waiting_flag = any("waiting" in x for x in sample_statuses)
295
340
  if self.ignore_flags:
296
341
  msg = f"> Found existing status: {status_str}. Ignoring."
297
342
  else: # this pipeline already has a status
298
343
  msg = f"> Found existing status: {status_str}. Skipping sample."
299
- if failed_flag:
344
+ if failed_flag and not rerun:
300
345
  msg += " Use rerun to ignore failed status." # help guidance
301
346
  use_this_sample = False
302
347
  if rerun:
303
348
  # Rescue the sample if rerun requested, and failed flag is found
304
- if failed_flag:
305
- msg = f"> Re-running failed sample. Status: {status_str}"
349
+ if failed_flag or waiting_flag:
350
+ msg = f"> Re-running sample. Status: {status_str}"
306
351
  use_this_sample = True
307
352
  else:
308
- msg = f"> Skipping sample because rerun requested, but no failed flag found. Status: {status_str}"
353
+ msg = f"> Skipping sample because rerun requested, but no failed or waiting flag found. Status: {status_str}"
309
354
  use_this_sample = False
310
355
  if msg:
311
356
  _LOGGER.info(msg)
@@ -358,6 +403,10 @@ class SubmissionConductor(object):
358
403
  not for dry run)
359
404
  """
360
405
  submitted = False
406
+
407
+ # Override signal handler so that Ctrl+C can be used to gracefully terminate child process
408
+ signal.signal(signal.SIGINT, self._signal_int_handler)
409
+
361
410
  if not self._pool:
362
411
  _LOGGER.debug("No submission (no pooled samples): %s", self.pl_name)
363
412
  # submitted = False
@@ -386,9 +435,10 @@ class SubmissionConductor(object):
386
435
  submission_command = "{} {}".format(sub_cmd, script)
387
436
  # Capture submission command return value so that we can
388
437
  # intercept and report basic submission failures; #167
389
- try:
390
- subprocess.check_call(submission_command, shell=True)
391
- except subprocess.CalledProcessError:
438
+ process = subprocess.Popen(submission_command, shell=True)
439
+ self.process_id = process.pid
440
+ process.wait()
441
+ if process.returncode != 0:
392
442
  fails = (
393
443
  "" if self.collate else [s.sample_name for s in self._samples]
394
444
  )
@@ -455,6 +505,87 @@ class SubmissionConductor(object):
455
505
  # name concordant with 1-based, not 0-based indexing.
456
506
  return "lump{}".format(self._num_total_job_submissions + 1)
457
507
 
508
+ def _signal_int_handler(self, signal, frame):
509
+ """
510
+ For catching interrupt (Ctrl +C) signals. Fails gracefully.
511
+ """
512
+ signal_type = "SIGINT"
513
+ self._generic_signal_handler(signal_type)
514
+
515
+ def _generic_signal_handler(self, signal_type):
516
+ """
517
+ Function for handling both SIGTERM and SIGINT
518
+ """
519
+ message = "Received " + signal_type + ". Failing gracefully..."
520
+ _LOGGER.warning(msg=message)
521
+
522
+ self._terminate_current_subprocess()
523
+
524
+ sys.exit(1)
525
+
526
+ def _terminate_current_subprocess(self):
527
+ """This terminates the current sub process associated with self.process_id"""
528
+
529
+ def pskill(proc_pid, sig=signal.SIGINT):
530
+ parent_process = psutil.Process(proc_pid)
531
+ for child_proc in parent_process.children(recursive=True):
532
+ child_proc.send_signal(sig)
533
+ parent_process.send_signal(sig)
534
+
535
+ if self.process_id is None:
536
+ return
537
+
538
+ # Gently wait for the subprocess before attempting to kill it
539
+ sys.stdout.flush()
540
+ still_running = self._attend_process(psutil.Process(self.process_id), 0)
541
+ sleeptime = 0.25
542
+ time_waiting = 0
543
+
544
+ while still_running and time_waiting < 3:
545
+ try:
546
+ if time_waiting > 2:
547
+ pskill(self.process_id, signal.SIGKILL)
548
+ elif time_waiting > 1:
549
+ pskill(self.process_id, signal.SIGTERM)
550
+ else:
551
+ pskill(self.process_id, signal.SIGINT)
552
+
553
+ except OSError:
554
+ # This would happen if the child process ended between the check
555
+ # and the next kill step
556
+ still_running = False
557
+ time_waiting = time_waiting + sleeptime
558
+
559
+ # Now see if it's still running
560
+ time_waiting = time_waiting + sleeptime
561
+ if not self._attend_process(psutil.Process(self.process_id), sleeptime):
562
+ still_running = False
563
+
564
+ if still_running:
565
+ _LOGGER.warning(f"Unable to halt child process: {self.process_id}")
566
+ else:
567
+ if time_waiting > 0:
568
+ note = f"terminated after {time_waiting} sec"
569
+ else:
570
+ note = "was already terminated"
571
+ _LOGGER.warning(msg=f"Child process {self.process_id} {note}.")
572
+
573
+ def _attend_process(self, proc, sleeptime):
574
+ """
575
+ Waits on a process for a given time to see if it finishes, returns True
576
+ if it's still running after the given time or False as soon as it
577
+ returns.
578
+
579
+ :param psutil.Process proc: Process object opened by psutil.Popen()
580
+ :param float sleeptime: Time to wait
581
+ :return bool: True if process is still running; otherwise false
582
+ """
583
+ try:
584
+ proc.wait(timeout=int(sleeptime))
585
+ except psutil.TimeoutExpired:
586
+ return True
587
+ return False
588
+
458
589
  def _jobname(self, pool):
459
590
  """Create the name for a job submission."""
460
591
  return "{}_{}".format(self.pl_iface.pipeline_name, self._sample_lump_name(pool))
@@ -514,12 +645,7 @@ class SubmissionConductor(object):
514
645
  :return yacman.YAMLConfigManager: pipestat namespace
515
646
  """
516
647
  try:
517
- psms = (
518
- self.prj.get_pipestat_managers(sample_name)
519
- if sample_name
520
- else self.prj.get_pipestat_managers(project_level=True)
521
- )
522
- psm = psms[self.pl_iface.pipeline_name]
648
+ psm = self.pl_iface.psm
523
649
  except (PipestatError, AttributeError) as e:
524
650
  # pipestat section faulty or not found in project.looper or sample
525
651
  # or project is missing required pipestat attributes
@@ -534,6 +660,8 @@ class SubmissionConductor(object):
534
660
  "results_file": psm.file,
535
661
  "record_identifier": psm.record_identifier,
536
662
  "config_file": psm.config_path,
663
+ "output_schema": psm.cfg["_schema_path"],
664
+ "pephub_path": psm.cfg["pephub_path"],
537
665
  }
538
666
  filtered_namespace = {k: v for k, v in full_namespace.items() if v}
539
667
  return YAMLConfigManager(filtered_namespace)
@@ -557,7 +685,11 @@ class SubmissionConductor(object):
557
685
  pipeline=self.pl_iface,
558
686
  compute=self.prj.dcc.compute,
559
687
  )
560
- templ = self.pl_iface["command_template"]
688
+
689
+ if self.pipeline_interface_type is None:
690
+ templ = self.pl_iface["command_template"]
691
+ else:
692
+ templ = self.pl_iface[self.pipeline_interface_type]["command_template"]
561
693
  if not self.override_extra:
562
694
  extras_template = (
563
695
  EXTRA_PROJECT_CMD_TEMPLATE
@@ -597,8 +729,10 @@ class SubmissionConductor(object):
597
729
  _LOGGER.debug(f"namespace pipelines: { pl_iface }")
598
730
 
599
731
  namespaces["pipeline"]["var_templates"] = pl_iface[VAR_TEMPL_KEY] or {}
600
- for k, v in namespaces["pipeline"]["var_templates"].items():
601
- namespaces["pipeline"]["var_templates"][k] = expandpath(v)
732
+
733
+ namespaces["pipeline"]["var_templates"] = expand_nested_var_templates(
734
+ namespaces["pipeline"]["var_templates"], namespaces
735
+ )
602
736
 
603
737
  # pre_submit hook namespace updates
604
738
  namespaces = _exec_pre_submit(pl_iface, namespaces)
@@ -607,7 +741,6 @@ class SubmissionConductor(object):
607
741
  argstring = jinja_render_template_strictly(
608
742
  template=templ, namespaces=namespaces
609
743
  )
610
- print(argstring)
611
744
  except UndefinedError as jinja_exception:
612
745
  _LOGGER.warning(NOT_SUB_MSG.format(str(jinja_exception)))
613
746
  except KeyError as e:
looper/const.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """ Shared project constants """
2
2
 
3
3
  import os
4
+ from enum import Enum
4
5
 
5
6
  __author__ = "Databio lab"
6
7
  __email__ = "nathan@code.databio.org"
@@ -92,6 +93,7 @@ __all__ = [
92
93
  "DEBUG_EIDO_VALIDATION",
93
94
  "LOOPER_GENERIC_OUTPUT_SCHEMA",
94
95
  "LOOPER_GENERIC_COUNT_LINES",
96
+ "PipelineLevel",
95
97
  ]
96
98
 
97
99
  FLAGS = ["completed", "running", "failed", "waiting", "partial"]
@@ -268,3 +270,10 @@ MESSAGE_BY_SUBCOMMAND = {
268
270
  "init-piface": "Initialize generic pipeline interface.",
269
271
  "link": "Create directory of symlinks for reported results.",
270
272
  }
273
+
274
+ # Add project/sample enum
275
+
276
+
277
+ class PipelineLevel(Enum):
278
+ SAMPLE = "sample"
279
+ PROJECT = "project"
looper/divvy.py CHANGED
@@ -1,16 +1,14 @@
1
1
  """ Computing configuration representation """
2
2
 
3
3
  import logging
4
- import logmuse
5
4
  import os
6
- import sys
7
5
  import shutil
8
- import yaml
9
- from yaml import SafeLoader
6
+
7
+
10
8
  from shutil import copytree
9
+ from yacman import FutureYAMLConfigManager as YAMLConfigManager
10
+ from yacman import write_lock, FILEPATH_KEY, load_yaml, select_config
11
11
 
12
- from ubiquerg import is_writable, VersionInHelpParser
13
- import yacman
14
12
 
15
13
  from .const import (
16
14
  COMPUTE_SETTINGS_VARNAME,
@@ -21,14 +19,13 @@ from .const import (
21
19
  )
22
20
  from .utils import write_submit_script
23
21
 
24
- # from . import __version__
25
22
 
26
23
  _LOGGER = logging.getLogger(__name__)
27
24
 
28
25
  # This is the divvy.py submodule from divvy
29
26
 
30
27
 
31
- class ComputingConfiguration(yacman.YAMLConfigManager):
28
+ class ComputingConfiguration(YAMLConfigManager):
32
29
  """
33
30
  Represents computing configuration objects.
34
31
 
@@ -44,36 +41,31 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
44
41
  `DIVCFG` file)
45
42
  """
46
43
 
47
- def __init__(self, entries=None, filepath=None):
48
- if not entries and not filepath:
49
- # Handle the case of an empty one, when we'll use the default
50
- filepath = select_divvy_config(None)
51
-
52
- super(ComputingConfiguration, self).__init__(
53
- entries=entries,
54
- filepath=filepath,
55
- schema_source=DEFAULT_CONFIG_SCHEMA,
56
- validate_on_write=True,
44
+ def __init__(
45
+ self,
46
+ entries=None,
47
+ wait_max=None,
48
+ strict_ro_locks=False,
49
+ schema_source=None,
50
+ validate_on_write=False,
51
+ ):
52
+ super().__init__(
53
+ entries, wait_max, strict_ro_locks, schema_source, validate_on_write
57
54
  )
58
55
 
59
- if not "compute_packages" in self:
60
- raise Exception(
61
- "Your divvy config file is not in divvy config format "
62
- "(it lacks a compute_packages section): '{}'".format(filepath)
63
- )
64
- # We require that compute_packages be present, even if empty
56
+ if "compute_packages" not in self:
65
57
  self["compute_packages"] = {}
66
-
67
58
  # Initialize default compute settings.
68
59
  _LOGGER.debug("Establishing project compute settings")
69
60
  self.compute = None
70
61
  self.setdefault("adapters", None)
71
62
  self.activate_package(DEFAULT_COMPUTE_RESOURCES_NAME)
72
- self.config_file = self.filepath
73
63
 
74
64
  def write(self, filename=None):
75
- super(ComputingConfiguration, self).write(filepath=filename, exclude_case=True)
76
- filename = filename or getattr(self, yacman.FILEPATH_KEY)
65
+ with write_lock(self) as locked_ym:
66
+ locked_ym.rebase()
67
+ locked_ym.write()
68
+ filename = filename or getattr(self, FILEPATH_KEY)
77
69
  filedir = os.path.dirname(filename)
78
70
  # For this object, we *also* have to write the template files
79
71
  for pkg_name, pkg in self["compute_packages"].items():
@@ -119,9 +111,12 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
119
111
 
120
112
  :return str: path to folder with default submission templates
121
113
  """
122
- return os.path.join(
123
- os.path.dirname(__file__), "default_config", "divvy_templates"
124
- )
114
+ if self.filepath:
115
+ return os.path.join(os.path.dirname(self.filepath), "divvy_templates")
116
+ else:
117
+ return os.path.join(
118
+ os.path.dirname(__file__), "default_config", "divvy_templates"
119
+ )
125
120
 
126
121
  def activate_package(self, package_name):
127
122
  """
@@ -151,23 +146,30 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
151
146
  # Augment compute, creating it if needed.
152
147
  if self.compute is None:
153
148
  _LOGGER.debug("Creating Project compute")
154
- self.compute = yacman.YAMLConfigManager()
149
+ self.compute = YAMLConfigManager()
155
150
  _LOGGER.debug(
156
151
  "Adding entries for package_name '{}'".format(package_name)
157
152
  )
158
153
 
159
- self.compute.update(self["compute_packages"][package_name])
154
+ self.compute.update_from_obj(self["compute_packages"][package_name])
160
155
 
161
156
  # Ensure submission template is absolute. This *used to be* handled
162
157
  # at update (so the paths were stored as absolutes in the packages),
163
158
  # but now, it makes more sense to do it here so we can piggyback on
164
159
  # the default update() method and not even have to do that.
165
160
  if not os.path.isabs(self.compute["submission_template"]):
161
+
166
162
  try:
167
- self.compute["submission_template"] = os.path.join(
168
- os.path.dirname(self.filepath),
169
- self.compute["submission_template"],
170
- )
163
+ if self.filepath:
164
+ self.compute["submission_template"] = os.path.join(
165
+ os.path.dirname(self.filepath),
166
+ self.compute["submission_template"],
167
+ )
168
+ else:
169
+ self.compute["submission_template"] = os.path.join(
170
+ os.path.dirname(self.default_config_file),
171
+ self.compute["submission_template"],
172
+ )
171
173
  except AttributeError as e:
172
174
  # Environment and environment compute should at least have been
173
175
  # set as null-valued attributes, so execution here is an error.
@@ -200,14 +202,19 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
200
202
  self.reset_active_settings()
201
203
  return self.activate_package(package_name)
202
204
 
203
- def get_active_package(self):
205
+ def get_active_package(self) -> YAMLConfigManager:
204
206
  """
205
207
  Returns settings for the currently active compute package
206
208
 
207
- :return yacman.YacAttMap: data defining the active compute package
209
+ :return YAMLConfigManager: data defining the active compute package
208
210
  """
209
211
  return self.compute
210
212
 
213
+ @property
214
+ def compute_packages(self):
215
+
216
+ return self["compute_packages"]
217
+
211
218
  def list_compute_packages(self):
212
219
  """
213
220
  Returns a list of available compute packages.
@@ -222,7 +229,7 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
222
229
 
223
230
  :return bool: success flag
224
231
  """
225
- self.compute = yacman.YacAttMap()
232
+ self.compute = YAMLConfigManager()
226
233
  return True
227
234
 
228
235
  def update_packages(self, config_file):
@@ -235,11 +242,11 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
235
242
 
236
243
  :param str config_file: path to file with new divvy configuration data
237
244
  """
238
- entries = yacman.load_yaml(config_file)
245
+ entries = load_yaml(config_file)
239
246
  self.update(entries)
240
247
  return True
241
248
 
242
- def get_adapters(self):
249
+ def get_adapters(self) -> YAMLConfigManager:
243
250
  """
244
251
  Get current adapters, if defined.
245
252
 
@@ -248,9 +255,9 @@ class ComputingConfiguration(yacman.YAMLConfigManager):
248
255
  package-specific set of adapters, if any defined in 'adapters' section
249
256
  under currently active compute package.
250
257
 
251
- :return yacman.YAMLConfigManager: current adapters mapping
258
+ :return YAMLConfigManager: current adapters mapping
252
259
  """
253
- adapters = yacman.YAMLConfigManager()
260
+ adapters = YAMLConfigManager()
254
261
  if "adapters" in self and self["adapters"] is not None:
255
262
  adapters.update(self["adapters"])
256
263
  if "compute" in self and "adapters" in self.compute:
@@ -376,7 +383,7 @@ def select_divvy_config(filepath):
376
383
  :param str | NoneType filepath: direct file path specification
377
384
  :return str: path to the config file to read
378
385
  """
379
- divcfg = yacman.select_config(
386
+ divcfg = select_config(
380
387
  config_filepath=filepath,
381
388
  config_env_vars=COMPUTE_SETTINGS_VARNAME,
382
389
  default_config_filepath=DEFAULT_CONFIG_FILEPATH,
@@ -404,11 +411,13 @@ def divvy_init(config_path, template_config_path):
404
411
  _LOGGER.error("You must specify a template config file path.")
405
412
  return
406
413
 
414
+ if not os.path.isabs(config_path):
415
+ config_path = os.path.abspath(config_path)
416
+
407
417
  if config_path and not os.path.exists(config_path):
408
- # dcc.write(config_path)
409
418
  # Init should *also* write the templates.
410
419
  dest_folder = os.path.dirname(config_path)
411
- copytree(os.path.dirname(template_config_path), dest_folder)
420
+ copytree(os.path.dirname(template_config_path), dest_folder, dirs_exist_ok=True)
412
421
  template_subfolder = os.path.join(dest_folder, "divvy_templates")
413
422
  _LOGGER.info("Wrote divvy templates to folder: {}".format(template_subfolder))
414
423
  new_template = os.path.join(
looper/exceptions.py CHANGED
@@ -15,6 +15,7 @@ _all__ = [
15
15
  "PipelineInterfaceConfigError",
16
16
  "PipelineInterfaceRequirementsError",
17
17
  "MisconfigurationException",
18
+ "LooperReportError",
18
19
  ]
19
20
 
20
21
 
@@ -31,7 +32,7 @@ class SampleFailedException(LooperError):
31
32
 
32
33
 
33
34
  class MisconfigurationException(LooperError):
34
- """Duplication of pipeline identifier precludes unique pipeline ref."""
35
+ """Looper not properly configured"""
35
36
 
36
37
  def __init__(self, key):
37
38
  super(MisconfigurationException, self).__init__(key)
@@ -109,3 +110,10 @@ class PipelineInterfaceRequirementsError(LooperError):
109
110
  )
110
111
  )
111
112
  self.error_specs = typename_by_requirement
113
+
114
+
115
+ class LooperReportError(LooperError):
116
+ """Looper reporting errors"""
117
+
118
+ def __init__(self, reason):
119
+ super(LooperReportError, self).__init__(reason)