toil 6.0.0__py3-none-any.whl → 6.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. toil/batchSystems/abstractBatchSystem.py +19 -4
  2. toil/batchSystems/abstractGridEngineBatchSystem.py +22 -22
  3. toil/batchSystems/cleanup_support.py +7 -3
  4. toil/batchSystems/lsf.py +7 -7
  5. toil/batchSystems/slurm.py +85 -14
  6. toil/bus.py +38 -0
  7. toil/common.py +20 -18
  8. toil/cwl/cwltoil.py +81 -63
  9. toil/exceptions.py +1 -1
  10. toil/fileStores/abstractFileStore.py +53 -4
  11. toil/fileStores/cachingFileStore.py +4 -20
  12. toil/fileStores/nonCachingFileStore.py +5 -14
  13. toil/job.py +46 -30
  14. toil/jobStores/abstractJobStore.py +21 -23
  15. toil/jobStores/aws/utils.py +5 -4
  16. toil/jobStores/fileJobStore.py +1 -1
  17. toil/leader.py +17 -14
  18. toil/lib/conversions.py +19 -0
  19. toil/lib/generatedEC2Lists.py +8 -8
  20. toil/lib/io.py +28 -2
  21. toil/lib/resources.py +8 -1
  22. toil/lib/threading.py +27 -12
  23. toil/options/common.py +5 -7
  24. toil/options/wdl.py +5 -0
  25. toil/provisioners/abstractProvisioner.py +8 -0
  26. toil/statsAndLogging.py +36 -8
  27. toil/test/batchSystems/test_slurm.py +21 -6
  28. toil/test/cactus/__init__.py +0 -0
  29. toil/test/cactus/test_cactus_integration.py +58 -0
  30. toil/test/cwl/cwlTest.py +243 -151
  31. toil/test/docs/scriptsTest.py +2 -2
  32. toil/test/jobStores/jobStoreTest.py +7 -5
  33. toil/test/lib/test_ec2.py +1 -1
  34. toil/test/options/__init__.py +13 -0
  35. toil/test/options/options.py +37 -0
  36. toil/test/provisioners/clusterTest.py +9 -8
  37. toil/test/utils/toilDebugTest.py +1 -1
  38. toil/test/utils/utilsTest.py +3 -3
  39. toil/test/wdl/wdltoil_test.py +91 -16
  40. toil/utils/toilDebugFile.py +1 -1
  41. toil/utils/toilStats.py +309 -266
  42. toil/utils/toilStatus.py +1 -1
  43. toil/version.py +9 -9
  44. toil/wdl/wdltoil.py +341 -189
  45. toil/worker.py +31 -16
  46. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/METADATA +6 -7
  47. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/RECORD +51 -47
  48. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/LICENSE +0 -0
  49. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/WHEEL +0 -0
  50. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/entry_points.txt +0 -0
  51. {toil-6.0.0.dist-info → toil-6.1.0.dist-info}/top_level.txt +0 -0
@@ -69,8 +69,8 @@ class ToilDocumentationTest(ToilTest):
69
69
  def testCwlexample(self):
70
70
  self.checkExitCode("tutorial_cwlexample.py")
71
71
 
72
- def testDiscoverfiles(self):
73
- self.checkExitCode("tutorial_discoverfiles.py")
72
+ def testStats(self):
73
+ self.checkExitCode("tutorial_stats.py")
74
74
 
75
75
  def testDynamic(self):
76
76
  self.checkExitCode("tutorial_dynamic.py")
@@ -40,7 +40,6 @@ from toil.job import Job, JobDescription, TemporaryID
40
40
  from toil.jobStores.abstractJobStore import (NoSuchFileException,
41
41
  NoSuchJobException)
42
42
  from toil.jobStores.fileJobStore import FileJobStore
43
- from toil.lib.aws.utils import create_s3_bucket, get_object_for_url
44
43
  from toil.lib.io import mkdtemp
45
44
  from toil.lib.memoize import memoize
46
45
  from toil.lib.retry import retry
@@ -548,14 +547,16 @@ class AbstractJobStoreTest:
548
547
  jobNames = ['testStatsAndLogging_writeLogFiles']
549
548
  jobLogList = ['string', b'bytes', '', b'newline\n']
550
549
  config = self._createConfig()
551
- setattr(config, 'writeLogs', '.')
550
+ setattr(config, 'writeLogs', self._createTempDir())
552
551
  setattr(config, 'writeLogsGzip', None)
553
552
  StatsAndLogging.writeLogFiles(jobNames, jobLogList, config)
554
- jobLogFile = os.path.join(config.writeLogs, jobNames[0] + '000.log')
553
+ jobLogFile = os.path.join(config.writeLogs, jobNames[0] + '_000.log')
554
+ # The log directory should get exactly one file, names after this
555
+ # easy job name with no replacements needed.
556
+ self.assertEqual(os.listdir(config.writeLogs), [os.path.basename(jobLogFile)])
555
557
  self.assertTrue(os.path.isfile(jobLogFile))
556
558
  with open(jobLogFile) as f:
557
559
  self.assertEqual(f.read(), 'string\nbytes\n\nnewline\n')
558
- os.remove(jobLogFile)
559
560
 
560
561
  def testBatchCreate(self):
561
562
  """Test creation of many jobs."""
@@ -1463,6 +1464,7 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1463
1464
 
1464
1465
  def _hashTestFile(self, url: str) -> str:
1465
1466
  from toil.jobStores.aws.jobStore import AWSJobStore
1467
+ from toil.lib.aws.utils import get_object_for_url
1466
1468
  str(AWSJobStore) # to prevent removal of that import
1467
1469
  key = get_object_for_url(urlparse.urlparse(url), existing=True)
1468
1470
  contents = key.get().get('Body').read()
@@ -1471,7 +1473,7 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1471
1473
  def _createExternalStore(self):
1472
1474
  """A S3.Bucket instance is returned"""
1473
1475
  from toil.jobStores.aws.jobStore import establish_boto3_session
1474
- from toil.lib.aws.utils import retry_s3
1476
+ from toil.lib.aws.utils import retry_s3, create_s3_bucket
1475
1477
 
1476
1478
  resource = establish_boto3_session().resource(
1477
1479
  "s3", region_name=self.awsRegion()
toil/test/lib/test_ec2.py CHANGED
@@ -20,7 +20,6 @@ from toil.lib.aws.ami import (aws_marketplace_flatcar_ami_search,
20
20
  feed_flatcar_ami_release,
21
21
  flatcar_release_feed_amis,
22
22
  get_flatcar_ami)
23
- from toil.lib.aws.session import establish_boto3_session
24
23
  from toil.test import ToilTest, needs_aws_ec2, needs_online
25
24
 
26
25
  logger = logging.getLogger(__name__)
@@ -59,6 +58,7 @@ class FlatcarFeedTest(ToilTest):
59
58
  class AMITest(ToilTest):
60
59
  @classmethod
61
60
  def setUpClass(cls):
61
+ from toil.lib.aws.session import establish_boto3_session
62
62
  session = establish_boto3_session(region_name='us-west-2')
63
63
  cls.ec2_client = session.client('ec2')
64
64
 
@@ -0,0 +1,13 @@
1
+ # Copyright (C) 2015-2021 Regents of the University of California
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
@@ -0,0 +1,37 @@
1
+ from configargparse import ArgParser
2
+
3
+ from toil.common import addOptions, Toil
4
+ from toil.test import ToilTest
5
+
6
+
7
+ class OptionsTest(ToilTest):
8
+ """
9
+ Class to test functionality of all Toil options
10
+ """
11
+ def test_default_caching_slurm(self):
12
+ """
13
+ Test to ensure that caching will be set to false when running on Slurm
14
+ :return:
15
+ """
16
+ parser = ArgParser()
17
+ addOptions(parser, jobstore_as_flag=True, wdl=False, cwl=False)
18
+ test_args = ["--jobstore=example-jobstore", "--batchSystem=slurm"]
19
+ options = parser.parse_args(test_args)
20
+ with Toil(options) as toil:
21
+ caching_value = toil.config.caching
22
+ self.assertEqual(caching_value, False)
23
+
24
+ def test_caching_option_priority(self):
25
+ """
26
+ Test to ensure that the --caching option takes priority over the default_caching() return value
27
+ :return:
28
+ """
29
+ parser = ArgParser()
30
+ addOptions(parser, jobstore_as_flag=True, wdl=False, cwl=False)
31
+ # the kubernetes batchsystem (and I think all batchsystems including singlemachine) return False
32
+ # for default_caching
33
+ test_args = ["--jobstore=example-jobstore", "--batchSystem=kubernetes", "--caching=True"]
34
+ options = parser.parse_args(test_args)
35
+ with Toil(options) as toil:
36
+ caching_value = toil.config.caching
37
+ self.assertEqual(caching_value, True)
@@ -17,6 +17,7 @@ import os
17
17
  import subprocess
18
18
  import time
19
19
  from uuid import uuid4
20
+ from typing import Optional, List
20
21
 
21
22
  from toil.lib.aws import zone_to_region
22
23
  from toil.lib.retry import retry
@@ -28,7 +29,7 @@ log = logging.getLogger(__name__)
28
29
  @needs_aws_ec2
29
30
  @needs_fetchable_appliance
30
31
  class AbstractClusterTest(ToilTest):
31
- def __init__(self, methodName):
32
+ def __init__(self, methodName: str) -> None:
32
33
  super().__init__(methodName=methodName)
33
34
  self.keyName = os.getenv('TOIL_AWS_KEYNAME').strip() or 'id_rsa'
34
35
  self.clusterName = 'aws-provisioner-test-' + str(uuid4())
@@ -43,13 +44,13 @@ class AbstractClusterTest(ToilTest):
43
44
  # Where should we put our virtualenv?
44
45
  self.venvDir = '/tmp/venv'
45
46
 
46
- def python(self):
47
+ def python(self) -> str:
47
48
  """
48
49
  Return the full path to the venv Python on the leader.
49
50
  """
50
51
  return os.path.join(self.venvDir, 'bin/python')
51
52
 
52
- def pip(self):
53
+ def pip(self) -> str:
53
54
  """
54
55
  Return the full path to the venv pip on the leader.
55
56
  """
@@ -63,7 +64,7 @@ class AbstractClusterTest(ToilTest):
63
64
  """
64
65
  subprocess.check_call(['toil', 'destroy-cluster', '-p=aws', '-z', self.zone, self.clusterName])
65
66
 
66
- def setUp(self):
67
+ def setUp(self) -> None:
67
68
  """
68
69
  Set up for the test.
69
70
  Must be overridden to call this method and set self.jobStore.
@@ -73,13 +74,13 @@ class AbstractClusterTest(ToilTest):
73
74
  # If this fails, no tests will run.
74
75
  self.destroyCluster()
75
76
 
76
- def tearDown(self):
77
+ def tearDown(self) -> None:
77
78
  # Note that teardown will run even if the test crashes.
78
79
  super().tearDown()
79
80
  self.destroyCluster()
80
81
  subprocess.check_call(['toil', 'clean', self.jobStore])
81
82
 
82
- def sshUtil(self, command):
83
+ def sshUtil(self, command: List[str]) -> None:
83
84
  """
84
85
  Run the given command on the cluster.
85
86
  Raise subprocess.CalledProcessError if it fails.
@@ -155,7 +156,7 @@ class AbstractClusterTest(ToilTest):
155
156
  subprocess.check_call(cmd)
156
157
 
157
158
  @retry(errors=[subprocess.CalledProcessError], intervals=[1, 1])
158
- def createClusterUtil(self, args=None):
159
+ def createClusterUtil(self, args: Optional[List[str]]=None) -> None:
159
160
  args = [] if args is None else args
160
161
 
161
162
  command = ['toil', 'launch-cluster', '-p=aws', '-z', self.zone, f'--keyPairName={self.keyName}',
@@ -167,5 +168,5 @@ class AbstractClusterTest(ToilTest):
167
168
  subprocess.check_call(command)
168
169
  # If we fail, tearDown will destroy the cluster.
169
170
 
170
- def launchCluster(self):
171
+ def launchCluster(self) -> None:
171
172
  self.createClusterUtil()
@@ -142,7 +142,7 @@ class DebugJobTest(ToilTest):
142
142
  os.path.abspath("src/toil/test/docs/scripts/example_alwaysfail.py"),
143
143
  "--retryCount=0",
144
144
  "--logCritical",
145
- "--disableProgress=True",
145
+ "--disableProgress",
146
146
  job_store
147
147
  ], stderr=subprocess.DEVNULL)
148
148
  raise RuntimeError("Failing workflow succeeded!")
@@ -39,7 +39,7 @@ from toil.test import (ToilTest,
39
39
  needs_rsync3,
40
40
  slow)
41
41
  from toil.test.sort.sortTest import makeFileToSort
42
- from toil.utils.toilStats import getStats, processData
42
+ from toil.utils.toilStats import get_stats, process_data
43
43
  from toil.utils.toilStatus import ToilStatus
44
44
  from toil.version import python
45
45
 
@@ -298,8 +298,8 @@ class UtilsTest(ToilTest):
298
298
  config = Config()
299
299
  config.setOptions(options)
300
300
  jobStore = Toil.resumeJobStore(config.jobStore)
301
- stats = getStats(jobStore)
302
- collatedStats = processData(jobStore.config, stats)
301
+ stats = get_stats(jobStore)
302
+ collatedStats = process_data(jobStore.config, stats)
303
303
  self.assertTrue(len(collatedStats.job_types) == 2, "Some jobs are not represented in the stats.")
304
304
 
305
305
  def check_status(self, status, status_fn, seconds=20):
@@ -3,27 +3,31 @@ import os
3
3
  import shutil
4
4
  import subprocess
5
5
  import unittest
6
- import uuid
7
- from typing import Any, Dict, List, Optional, Set
8
- from unittest.mock import patch
6
+ from uuid import uuid4
7
+ from typing import Optional
9
8
 
10
9
  from unittest.mock import patch
11
10
  from typing import Any, Dict, List, Set
12
11
 
12
+ import pytest
13
+
14
+ from toil.provisioners import cluster_factory
13
15
  from toil.test import (ToilTest,
14
16
  needs_docker_cuda,
15
17
  needs_google_storage,
16
18
  needs_singularity_or_docker,
17
- slow)
19
+ slow, integrative)
20
+ from toil.test.provisioners.clusterTest import AbstractClusterTest
18
21
  from toil.version import exactPython
19
22
  from toil.wdl.wdltoil import WDLSectionJob, WDLWorkflowGraph
20
23
 
21
24
 
22
25
  class BaseWDLTest(ToilTest):
23
26
  """Base test class for WDL tests."""
27
+
24
28
  def setUp(self) -> None:
25
29
  """Runs anew before each test to create farm fresh temp dirs."""
26
- self.output_dir = os.path.join('/tmp/', 'toil-wdl-test-' + str(uuid.uuid4()))
30
+ self.output_dir = os.path.join('/tmp/', 'toil-wdl-test-' + str(uuid4()))
27
31
  os.makedirs(self.output_dir)
28
32
 
29
33
  def tearDown(self) -> None:
@@ -36,6 +40,7 @@ class WDLConformanceTests(BaseWDLTest):
36
40
  WDL conformance tests for Toil.
37
41
  """
38
42
  wdl_dir = "wdl-conformance-tests"
43
+
39
44
  @classmethod
40
45
  def setUpClass(cls) -> None:
41
46
 
@@ -87,6 +92,7 @@ class WDLConformanceTests(BaseWDLTest):
87
92
 
88
93
  class WDLTests(BaseWDLTest):
89
94
  """Tests for Toil's MiniWDL-based implementation."""
95
+
90
96
  @classmethod
91
97
  def setUpClass(cls) -> None:
92
98
  """Runs once for all tests."""
@@ -101,7 +107,8 @@ class WDLTests(BaseWDLTest):
101
107
  wdl = os.path.abspath('src/toil/test/wdl/md5sum/md5sum.1.0.wdl')
102
108
  json_file = os.path.abspath('src/toil/test/wdl/md5sum/md5sum.json')
103
109
 
104
- result_json = subprocess.check_output(self.base_command + [wdl, json_file, '-o', self.output_dir, '--logDebug', '--retryCount=0'])
110
+ result_json = subprocess.check_output(
111
+ self.base_command + [wdl, json_file, '-o', self.output_dir, '--logDebug', '--retryCount=0'])
105
112
  result = json.loads(result_json)
106
113
 
107
114
  assert 'ga4ghMd5.value' in result
@@ -115,7 +122,9 @@ class WDLTests(BaseWDLTest):
115
122
  wdl_file = os.path.abspath('src/toil/test/wdl/miniwdl_self_test/self_test.wdl')
116
123
  json_file = os.path.abspath('src/toil/test/wdl/miniwdl_self_test/inputs.json')
117
124
 
118
- result_json = subprocess.check_output(self.base_command + [wdl_file, json_file, '--logDebug', '-o', self.output_dir, '--outputDialect', 'miniwdl'] + (extra_args or []))
125
+ result_json = subprocess.check_output(
126
+ self.base_command + [wdl_file, json_file, '--logDebug', '-o', self.output_dir, '--outputDialect',
127
+ 'miniwdl'] + (extra_args or []))
119
128
  result = json.loads(result_json)
120
129
 
121
130
  # Expect MiniWDL-style output with a designated "dir"
@@ -172,7 +181,8 @@ class WDLTests(BaseWDLTest):
172
181
  "GiraffeDeepVariant.runDeepVariantCallVariants.in_dv_gpu_container": "google/deepvariant:1.3.0-gpu"
173
182
  })
174
183
 
175
- result_json = subprocess.check_output(self.base_command + [wdl_file, json_file, '-o', self.output_dir, '--outputDialect', 'miniwdl'])
184
+ result_json = subprocess.check_output(
185
+ self.base_command + [wdl_file, json_file, '-o', self.output_dir, '--outputDialect', 'miniwdl'])
176
186
  result = json.loads(result_json)
177
187
 
178
188
  # Expect MiniWDL-style output with a designated "dir"
@@ -200,7 +210,9 @@ class WDLTests(BaseWDLTest):
200
210
  wdl_file = f"{base_uri}/workflows/giraffe.wdl"
201
211
  json_file = f"{base_uri}/params/giraffe.json"
202
212
 
203
- result_json = subprocess.check_output(self.base_command + [wdl_file, json_file, '-o', self.output_dir, '--outputDialect', 'miniwdl', '--scale', '0.1'])
213
+ result_json = subprocess.check_output(
214
+ self.base_command + [wdl_file, json_file, '-o', self.output_dir, '--outputDialect', 'miniwdl', '--scale',
215
+ '0.1'])
204
216
  result = json.loads(result_json)
205
217
 
206
218
  # Expect MiniWDL-style output with a designated "dir"
@@ -265,7 +277,6 @@ class WDLTests(BaseWDLTest):
265
277
  # worth extracting a base type for this interface.
266
278
  with patch.object(WDLWorkflowGraph, 'is_decl', mock_is_decl):
267
279
  with patch.object(WDLWorkflowGraph, 'get_transitive_dependencies', mock_get_transitive_dependencies):
268
-
269
280
  with self.subTest(msg="Two unrelated decls can coalesce"):
270
281
  # Set up two unrelated decls
271
282
  all_decls = {"decl1", "decl2"}
@@ -275,7 +286,7 @@ class WDLTests(BaseWDLTest):
275
286
  }
276
287
 
277
288
  result = WDLSectionJob.coalesce_nodes(["decl1", "decl2"], WDLWorkflowGraph([]))
278
-
289
+
279
290
  # Make sure they coalesced
280
291
  assert len(result) == 1
281
292
  assert "decl1" in result[0]
@@ -289,12 +300,11 @@ class WDLTests(BaseWDLTest):
289
300
  }
290
301
 
291
302
  result = WDLSectionJob.coalesce_nodes(["decl", "nondecl"], WDLWorkflowGraph([]))
292
-
303
+
293
304
  assert len(result) == 2
294
305
  assert len(result[0]) == 1
295
306
  assert len(result[1]) == 1
296
307
 
297
-
298
308
  with self.subTest(msg="Two adjacent decls with a common dependency can coalesce"):
299
309
  all_decls = {"decl1", "decl2"}
300
310
  all_deps = {
@@ -304,7 +314,7 @@ class WDLTests(BaseWDLTest):
304
314
  }
305
315
 
306
316
  result = WDLSectionJob.coalesce_nodes(["base", "decl1", "decl2"], WDLWorkflowGraph([]))
307
-
317
+
308
318
  assert len(result) == 2
309
319
  assert "base" in result[0]
310
320
  assert "decl1" in result[1]
@@ -319,7 +329,7 @@ class WDLTests(BaseWDLTest):
319
329
  }
320
330
 
321
331
  result = WDLSectionJob.coalesce_nodes(["base", "decl1", "decl2"], WDLWorkflowGraph([]))
322
-
332
+
323
333
  assert len(result) == 3
324
334
  assert "base" in result[0]
325
335
 
@@ -332,12 +342,77 @@ class WDLTests(BaseWDLTest):
332
342
  }
333
343
 
334
344
  result = WDLSectionJob.coalesce_nodes(["decl1", "decl2", "successor"], WDLWorkflowGraph([]))
335
-
345
+
336
346
  assert len(result) == 2
337
347
  assert "decl1" in result[0]
338
348
  assert "decl2" in result[0]
339
349
  assert "successor" in result[1]
340
350
 
341
351
 
352
+ @integrative
353
+ @slow
354
+ @pytest.mark.timeout(600)
355
+ class WDLKubernetesClusterTest(AbstractClusterTest):
356
+ """
357
+ Ensure WDL works on the Kubernetes batchsystem.
358
+ """
359
+
360
+ def __init__(self, name):
361
+ super().__init__(name)
362
+ self.clusterName = 'wdl-integration-test-' + str(uuid4())
363
+ # t2.medium is the minimum t2 instance that permits Kubernetes
364
+ self.leaderNodeType = "t2.medium"
365
+ self.instanceTypes = ["t2.medium"]
366
+ self.clusterType = "kubernetes"
367
+
368
+ def setUp(self) -> None:
369
+ super().setUp()
370
+ self.jobStore = f'aws:{self.awsRegion()}:wdl-test-{uuid4()}'
371
+
372
+ def launchCluster(self) -> None:
373
+ self.createClusterUtil(args=['--leaderStorage', str(self.requestedLeaderStorage),
374
+ '--nodeTypes', ",".join(self.instanceTypes),
375
+ '-w', ",".join(self.numWorkers),
376
+ '--nodeStorage', str(self.requestedLeaderStorage)])
377
+
378
+ def test_wdl_kubernetes_cluster(self):
379
+ """
380
+ Test that a wdl workflow works on a kubernetes cluster. Launches a cluster with 1 worker. This runs a wdl
381
+ workflow that performs an image pull on the worker.
382
+ :return:
383
+ """
384
+ self.numWorkers = "1"
385
+ self.requestedLeaderStorage = 30
386
+ # create the cluster
387
+ self.launchCluster()
388
+ # get leader
389
+ self.cluster = cluster_factory(
390
+ provisioner="aws", zone=self.zone, clusterName=self.clusterName
391
+ )
392
+ self.leader = self.cluster.getLeader()
393
+
394
+ url = "https://github.com/DataBiosphere/wdl-conformance-tests.git"
395
+ commit = "09b9659cd01473e836738a2e0dd205df0adb49c5"
396
+ wdl_dir = "wdl_conformance_tests"
397
+
398
+ # get the wdl-conformance-tests repo to get WDL tasks to run
399
+ self.sshUtil([
400
+ "bash",
401
+ "-c",
402
+ f"git clone {url} {wdl_dir} && cd {wdl_dir} && git checkout {commit}"
403
+ ])
404
+
405
+ # run on kubernetes batchsystem
406
+ toil_options = ['--batchSystem=kubernetes',
407
+ f"--jobstore={self.jobStore}"]
408
+
409
+ # run WDL workflow that will run singularity
410
+ test_options = [f"tests/md5sum/md5sum.wdl", f"tests/md5sum/md5sum.json"]
411
+ self.sshUtil([
412
+ "bash",
413
+ "-c",
414
+ f"cd {wdl_dir} && toil-wdl-runner {' '.join(test_options)} {' '.join(toil_options)}"])
415
+
416
+
342
417
  if __name__ == "__main__":
343
418
  unittest.main() # run all tests
@@ -17,11 +17,11 @@ import logging
17
17
  import os.path
18
18
  import sys
19
19
  from typing import Optional
20
- from distutils.util import strtobool
21
20
 
22
21
  from toil.common import Config, Toil, parser_with_common_options
23
22
  from toil.jobStores.fileJobStore import FileJobStore
24
23
  from toil.lib.resources import glob
24
+ from toil.lib.conversions import strtobool
25
25
  from toil.statsAndLogging import set_logging_from_options
26
26
 
27
27
  logger = logging.getLogger(__name__)