toil 6.1.0a1__py3-none-any.whl → 8.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (193) hide show
  1. toil/__init__.py +122 -315
  2. toil/batchSystems/__init__.py +1 -0
  3. toil/batchSystems/abstractBatchSystem.py +173 -89
  4. toil/batchSystems/abstractGridEngineBatchSystem.py +272 -148
  5. toil/batchSystems/awsBatch.py +244 -135
  6. toil/batchSystems/cleanup_support.py +26 -16
  7. toil/batchSystems/contained_executor.py +31 -28
  8. toil/batchSystems/gridengine.py +86 -50
  9. toil/batchSystems/htcondor.py +166 -89
  10. toil/batchSystems/kubernetes.py +632 -382
  11. toil/batchSystems/local_support.py +20 -15
  12. toil/batchSystems/lsf.py +134 -81
  13. toil/batchSystems/lsfHelper.py +13 -11
  14. toil/batchSystems/mesos/__init__.py +41 -29
  15. toil/batchSystems/mesos/batchSystem.py +290 -151
  16. toil/batchSystems/mesos/executor.py +79 -50
  17. toil/batchSystems/mesos/test/__init__.py +31 -23
  18. toil/batchSystems/options.py +46 -28
  19. toil/batchSystems/registry.py +53 -19
  20. toil/batchSystems/singleMachine.py +296 -125
  21. toil/batchSystems/slurm.py +603 -138
  22. toil/batchSystems/torque.py +47 -33
  23. toil/bus.py +186 -76
  24. toil/common.py +664 -368
  25. toil/cwl/__init__.py +1 -1
  26. toil/cwl/cwltoil.py +1136 -483
  27. toil/cwl/utils.py +17 -22
  28. toil/deferred.py +63 -42
  29. toil/exceptions.py +5 -3
  30. toil/fileStores/__init__.py +5 -5
  31. toil/fileStores/abstractFileStore.py +140 -60
  32. toil/fileStores/cachingFileStore.py +717 -269
  33. toil/fileStores/nonCachingFileStore.py +116 -87
  34. toil/job.py +1225 -368
  35. toil/jobStores/abstractJobStore.py +416 -266
  36. toil/jobStores/aws/jobStore.py +863 -477
  37. toil/jobStores/aws/utils.py +201 -120
  38. toil/jobStores/conftest.py +3 -2
  39. toil/jobStores/fileJobStore.py +292 -154
  40. toil/jobStores/googleJobStore.py +140 -74
  41. toil/jobStores/utils.py +36 -15
  42. toil/leader.py +668 -272
  43. toil/lib/accelerators.py +115 -18
  44. toil/lib/aws/__init__.py +74 -31
  45. toil/lib/aws/ami.py +122 -87
  46. toil/lib/aws/iam.py +284 -108
  47. toil/lib/aws/s3.py +31 -0
  48. toil/lib/aws/session.py +214 -39
  49. toil/lib/aws/utils.py +287 -231
  50. toil/lib/bioio.py +13 -5
  51. toil/lib/compatibility.py +11 -6
  52. toil/lib/conversions.py +104 -47
  53. toil/lib/docker.py +131 -103
  54. toil/lib/ec2.py +361 -199
  55. toil/lib/ec2nodes.py +174 -106
  56. toil/lib/encryption/_dummy.py +5 -3
  57. toil/lib/encryption/_nacl.py +10 -6
  58. toil/lib/encryption/conftest.py +1 -0
  59. toil/lib/exceptions.py +26 -7
  60. toil/lib/expando.py +5 -3
  61. toil/lib/ftp_utils.py +217 -0
  62. toil/lib/generatedEC2Lists.py +127 -19
  63. toil/lib/humanize.py +6 -2
  64. toil/lib/integration.py +341 -0
  65. toil/lib/io.py +141 -15
  66. toil/lib/iterables.py +4 -2
  67. toil/lib/memoize.py +12 -8
  68. toil/lib/misc.py +66 -21
  69. toil/lib/objects.py +2 -2
  70. toil/lib/resources.py +68 -15
  71. toil/lib/retry.py +126 -81
  72. toil/lib/threading.py +299 -82
  73. toil/lib/throttle.py +16 -15
  74. toil/options/common.py +843 -409
  75. toil/options/cwl.py +175 -90
  76. toil/options/runner.py +50 -0
  77. toil/options/wdl.py +73 -17
  78. toil/provisioners/__init__.py +117 -46
  79. toil/provisioners/abstractProvisioner.py +332 -157
  80. toil/provisioners/aws/__init__.py +70 -33
  81. toil/provisioners/aws/awsProvisioner.py +1145 -715
  82. toil/provisioners/clusterScaler.py +541 -279
  83. toil/provisioners/gceProvisioner.py +282 -179
  84. toil/provisioners/node.py +155 -79
  85. toil/realtimeLogger.py +34 -22
  86. toil/resource.py +137 -75
  87. toil/server/app.py +128 -62
  88. toil/server/celery_app.py +3 -1
  89. toil/server/cli/wes_cwl_runner.py +82 -53
  90. toil/server/utils.py +54 -28
  91. toil/server/wes/abstract_backend.py +64 -26
  92. toil/server/wes/amazon_wes_utils.py +21 -15
  93. toil/server/wes/tasks.py +121 -63
  94. toil/server/wes/toil_backend.py +142 -107
  95. toil/server/wsgi_app.py +4 -3
  96. toil/serviceManager.py +58 -22
  97. toil/statsAndLogging.py +224 -70
  98. toil/test/__init__.py +282 -183
  99. toil/test/batchSystems/batchSystemTest.py +460 -210
  100. toil/test/batchSystems/batch_system_plugin_test.py +90 -0
  101. toil/test/batchSystems/test_gridengine.py +173 -0
  102. toil/test/batchSystems/test_lsf_helper.py +67 -58
  103. toil/test/batchSystems/test_slurm.py +110 -49
  104. toil/test/cactus/__init__.py +0 -0
  105. toil/test/cactus/test_cactus_integration.py +56 -0
  106. toil/test/cwl/cwlTest.py +496 -287
  107. toil/test/cwl/measure_default_memory.cwl +12 -0
  108. toil/test/cwl/not_run_required_input.cwl +29 -0
  109. toil/test/cwl/scatter_duplicate_outputs.cwl +40 -0
  110. toil/test/cwl/seqtk_seq.cwl +1 -1
  111. toil/test/docs/scriptsTest.py +69 -46
  112. toil/test/jobStores/jobStoreTest.py +427 -264
  113. toil/test/lib/aws/test_iam.py +118 -50
  114. toil/test/lib/aws/test_s3.py +16 -9
  115. toil/test/lib/aws/test_utils.py +5 -6
  116. toil/test/lib/dockerTest.py +118 -141
  117. toil/test/lib/test_conversions.py +113 -115
  118. toil/test/lib/test_ec2.py +58 -50
  119. toil/test/lib/test_integration.py +104 -0
  120. toil/test/lib/test_misc.py +12 -5
  121. toil/test/mesos/MesosDataStructuresTest.py +23 -10
  122. toil/test/mesos/helloWorld.py +7 -6
  123. toil/test/mesos/stress.py +25 -20
  124. toil/test/options/__init__.py +13 -0
  125. toil/test/options/options.py +42 -0
  126. toil/test/provisioners/aws/awsProvisionerTest.py +320 -150
  127. toil/test/provisioners/clusterScalerTest.py +440 -250
  128. toil/test/provisioners/clusterTest.py +166 -44
  129. toil/test/provisioners/gceProvisionerTest.py +174 -100
  130. toil/test/provisioners/provisionerTest.py +25 -13
  131. toil/test/provisioners/restartScript.py +5 -4
  132. toil/test/server/serverTest.py +188 -141
  133. toil/test/sort/restart_sort.py +137 -68
  134. toil/test/sort/sort.py +134 -66
  135. toil/test/sort/sortTest.py +91 -49
  136. toil/test/src/autoDeploymentTest.py +141 -101
  137. toil/test/src/busTest.py +20 -18
  138. toil/test/src/checkpointTest.py +8 -2
  139. toil/test/src/deferredFunctionTest.py +49 -35
  140. toil/test/src/dockerCheckTest.py +32 -24
  141. toil/test/src/environmentTest.py +135 -0
  142. toil/test/src/fileStoreTest.py +539 -272
  143. toil/test/src/helloWorldTest.py +7 -4
  144. toil/test/src/importExportFileTest.py +61 -31
  145. toil/test/src/jobDescriptionTest.py +46 -21
  146. toil/test/src/jobEncapsulationTest.py +2 -0
  147. toil/test/src/jobFileStoreTest.py +74 -50
  148. toil/test/src/jobServiceTest.py +187 -73
  149. toil/test/src/jobTest.py +121 -71
  150. toil/test/src/miscTests.py +19 -18
  151. toil/test/src/promisedRequirementTest.py +82 -36
  152. toil/test/src/promisesTest.py +7 -6
  153. toil/test/src/realtimeLoggerTest.py +10 -6
  154. toil/test/src/regularLogTest.py +71 -37
  155. toil/test/src/resourceTest.py +80 -49
  156. toil/test/src/restartDAGTest.py +36 -22
  157. toil/test/src/resumabilityTest.py +9 -2
  158. toil/test/src/retainTempDirTest.py +45 -14
  159. toil/test/src/systemTest.py +12 -8
  160. toil/test/src/threadingTest.py +44 -25
  161. toil/test/src/toilContextManagerTest.py +10 -7
  162. toil/test/src/userDefinedJobArgTypeTest.py +8 -5
  163. toil/test/src/workerTest.py +73 -23
  164. toil/test/utils/toilDebugTest.py +103 -33
  165. toil/test/utils/toilKillTest.py +4 -5
  166. toil/test/utils/utilsTest.py +245 -106
  167. toil/test/wdl/wdltoil_test.py +818 -149
  168. toil/test/wdl/wdltoil_test_kubernetes.py +91 -0
  169. toil/toilState.py +120 -35
  170. toil/utils/toilConfig.py +13 -4
  171. toil/utils/toilDebugFile.py +44 -27
  172. toil/utils/toilDebugJob.py +214 -27
  173. toil/utils/toilDestroyCluster.py +11 -6
  174. toil/utils/toilKill.py +8 -3
  175. toil/utils/toilLaunchCluster.py +256 -140
  176. toil/utils/toilMain.py +37 -16
  177. toil/utils/toilRsyncCluster.py +32 -14
  178. toil/utils/toilSshCluster.py +49 -22
  179. toil/utils/toilStats.py +356 -273
  180. toil/utils/toilStatus.py +292 -139
  181. toil/utils/toilUpdateEC2Instances.py +3 -1
  182. toil/version.py +12 -12
  183. toil/wdl/utils.py +5 -5
  184. toil/wdl/wdltoil.py +3913 -1033
  185. toil/worker.py +367 -184
  186. {toil-6.1.0a1.dist-info → toil-8.0.0.dist-info}/LICENSE +25 -0
  187. toil-8.0.0.dist-info/METADATA +173 -0
  188. toil-8.0.0.dist-info/RECORD +253 -0
  189. {toil-6.1.0a1.dist-info → toil-8.0.0.dist-info}/WHEEL +1 -1
  190. toil-6.1.0a1.dist-info/METADATA +0 -125
  191. toil-6.1.0a1.dist-info/RECORD +0 -237
  192. {toil-6.1.0a1.dist-info → toil-8.0.0.dist-info}/entry_points.txt +0 -0
  193. {toil-6.1.0a1.dist-info → toil-8.0.0.dist-info}/top_level.txt +0 -0
@@ -22,27 +22,30 @@ class HelloWorldTest(ToilTest):
22
22
  options.logLevel = "INFO"
23
23
  Job.Runner.startToil(HelloWorld(), options)
24
24
 
25
+
25
26
  class HelloWorld(Job):
26
27
  def __init__(self):
27
- Job.__init__(self, memory=100000, cores=1, disk="3G")
28
+ Job.__init__(self, memory=100000, cores=1, disk="3G")
28
29
 
29
30
  def run(self, fileStore):
30
31
  fileID = self.addChildJobFn(childFn, cores=1, memory="1M", disk="3G").rv()
31
32
  self.addFollowOn(FollowOn(fileID))
32
33
 
34
+
33
35
  def childFn(job):
34
36
  with job.fileStore.writeGlobalFileStream() as (fH, fileID):
35
37
  fH.write(b"Hello, World!")
36
38
  return fileID
37
39
 
40
+
38
41
  class FollowOn(Job):
39
- def __init__(self,fileId):
42
+ def __init__(self, fileId):
40
43
  Job.__init__(self)
41
- self.fileId=fileId
44
+ self.fileId = fileId
42
45
 
43
46
  def run(self, fileStore):
44
47
  tempDir = fileStore.getLocalTempDir()
45
- tempFilePath = "/".join([tempDir,"LocalCopy"])
48
+ tempFilePath = "/".join([tempDir, "LocalCopy"])
46
49
  with fileStore.readGlobalFileStream(self.fileId) as globalFile:
47
50
  with open(tempFilePath, "wb") as localFile:
48
51
  localFile.write(globalFile.read())
@@ -28,14 +28,16 @@ class ImportExportFileTest(ToilTest):
28
28
  def setUp(self):
29
29
  super().setUp()
30
30
  self.tmp_dir = self._createTempDir()
31
- self.output_file_path = f'{self.tmp_dir}/out'
32
- self.message_portion_1 = 'What do you get when you cross a seal and a polar bear?'
33
- self.message_portion_2 = ' A polar bear.'
31
+ self.output_file_path = f"{self.tmp_dir}/out"
32
+ self.message_portion_1 = (
33
+ "What do you get when you cross a seal and a polar bear?"
34
+ )
35
+ self.message_portion_2 = " A polar bear."
34
36
 
35
37
  def create_file(self, content, executable=False):
36
- file_path = f'{self.tmp_dir}/{uuid.uuid4()}'
38
+ file_path = f"{self.tmp_dir}/{uuid.uuid4()}"
37
39
 
38
- with open(file_path, 'w') as f:
40
+ with open(file_path, "w") as f:
39
41
  f.write(content)
40
42
 
41
43
  if executable:
@@ -48,26 +50,44 @@ class ImportExportFileTest(ToilTest):
48
50
  with Toil(options) as toil:
49
51
  if not options.restart:
50
52
  msg_portion_file_path = self.create_file(content=self.message_portion_1)
51
- msg_portion_file_id = toil.importFile(f'file://{msg_portion_file_path}')
53
+ msg_portion_file_id = toil.importFile(f"file://{msg_portion_file_path}")
52
54
  self.assertIsInstance(msg_portion_file_id, FileID)
53
- self.assertEqual(os.stat(msg_portion_file_path).st_size, msg_portion_file_id.size)
55
+ self.assertEqual(
56
+ os.stat(msg_portion_file_path).st_size, msg_portion_file_id.size
57
+ )
54
58
 
55
59
  file_that_can_trigger_failure_when_job_starts = self.create_file(
56
- content='Time to freak out!' if fail else 'Keep calm and carry on.')
57
- self.trigger_file_id = toil.importFile(f'file://{file_that_can_trigger_failure_when_job_starts}')
60
+ content="Time to freak out!" if fail else "Keep calm and carry on."
61
+ )
62
+ self.trigger_file_id = toil.importFile(
63
+ f"file://{file_that_can_trigger_failure_when_job_starts}"
64
+ )
58
65
  workflow_final_output_file_id = toil.start(
59
- RestartingJob(msg_portion_file_id, self.trigger_file_id, self.message_portion_2))
66
+ RestartingJob(
67
+ msg_portion_file_id,
68
+ self.trigger_file_id,
69
+ self.message_portion_2,
70
+ )
71
+ )
60
72
  else:
61
73
  # TODO: We're hackily updating this file without using the
62
74
  # correct FileStore interface. User code should not do this!
63
75
  with toil._jobStore.update_file_stream(self.trigger_file_id) as f:
64
- f.write(('Time to freak out!' if fail else 'Keep calm and carry on.').encode('utf-8'))
76
+ f.write(
77
+ (
78
+ "Time to freak out!" if fail else "Keep calm and carry on."
79
+ ).encode("utf-8")
80
+ )
65
81
 
66
82
  workflow_final_output_file_id = toil.restart()
67
83
 
68
- toil.exportFile(workflow_final_output_file_id, f'file://{self.output_file_path}')
84
+ toil.exportFile(
85
+ workflow_final_output_file_id, f"file://{self.output_file_path}"
86
+ )
69
87
  with open(self.output_file_path) as f:
70
- self.assertEqual(f.read(), f'{self.message_portion_1}{self.message_portion_2}')
88
+ self.assertEqual(
89
+ f.read(), f"{self.message_portion_1}{self.message_portion_2}"
90
+ )
71
91
 
72
92
  def _run_import_export_workflow(self, restart):
73
93
  options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
@@ -100,17 +120,21 @@ class ImportExportFileTest(ToilTest):
100
120
  with Toil(options) as toil:
101
121
  # TODO: test this with non-local (AWS, Google)
102
122
  # Note: this is somewhat done in src/toil/test/src/fileStoreTest.py
103
- with self.subTest('Testing permissions are preserved for local importFile/exportFile'):
123
+ with self.subTest(
124
+ "Testing permissions are preserved for local importFile/exportFile"
125
+ ):
104
126
  for executable in True, False:
105
- file_path = self.create_file(content='Hello', executable=executable)
127
+ file_path = self.create_file(content="Hello", executable=executable)
106
128
  initial_permissions = os.stat(file_path).st_mode & stat.S_IXUSR
107
- file_id = toil.importFile(f'file://{file_path}')
108
- toil.exportFile(file_id, f'file://{self.output_file_path}')
109
- current_permissions = os.stat(self.output_file_path).st_mode & stat.S_IXUSR
129
+ file_id = toil.importFile(f"file://{file_path}")
130
+ toil.exportFile(file_id, f"file://{self.output_file_path}")
131
+ current_permissions = (
132
+ os.stat(self.output_file_path).st_mode & stat.S_IXUSR
133
+ )
110
134
  assert initial_permissions == current_permissions
111
135
 
112
- with self.subTest('Testing relative paths without the file:// schema.'):
113
- relative_path_data = 'Everything is relative.'
136
+ with self.subTest("Testing relative paths without the file:// schema."):
137
+ relative_path_data = "Everything is relative."
114
138
  file_path = self.create_file(content=relative_path_data)
115
139
 
116
140
  file_id = toil.importFile(os.path.relpath(file_path))
@@ -118,31 +142,37 @@ class ImportExportFileTest(ToilTest):
118
142
  with open(self.output_file_path) as f:
119
143
  self.assertEqual(f.read(), relative_path_data)
120
144
 
121
- with self.subTest('Test local importFile accepts a shared_file_name.'):
145
+ with self.subTest("Test local importFile accepts a shared_file_name."):
122
146
  # TODO: whyyyy do we allow this? shared file names are not unique and can overwrite each other
123
147
  # ...not only that... we can't use exportFile on them afterwards!?
124
- file_path = self.create_file(content='why')
125
- shared_file_name = 'users_should_probably_not_be_allowed_to_make_shared_files.bad'
126
- toil.importFile(f'file://{file_path}', sharedFileName=shared_file_name)
127
- with toil._jobStore.read_shared_file_stream(shared_file_name, encoding='utf-8') as f:
128
- self.assertEqual(f.read(), 'why')
148
+ file_path = self.create_file(content="why")
149
+ shared_file_name = (
150
+ "users_should_probably_not_be_allowed_to_make_shared_files.bad"
151
+ )
152
+ toil.importFile(f"file://{file_path}", sharedFileName=shared_file_name)
153
+ with toil._jobStore.read_shared_file_stream(
154
+ shared_file_name, encoding="utf-8"
155
+ ) as f:
156
+ self.assertEqual(f.read(), "why")
129
157
 
130
158
 
131
159
  class RestartingJob(Job):
132
160
  def __init__(self, msg_portion_file_id, trigger_file_id, message_portion_2):
133
- Job.__init__(self, memory=100000, cores=1, disk="1M")
161
+ Job.__init__(self, memory=100000, cores=1, disk="1M")
134
162
  self.msg_portion_file_id = msg_portion_file_id
135
163
  self.trigger_file_id = trigger_file_id
136
164
  self.message_portion_2 = message_portion_2
137
165
 
138
166
  def run(self, file_store):
139
167
  with file_store.readGlobalFileStream(self.trigger_file_id) as readable:
140
- if readable.read() == b'Time to freak out!':
141
- raise RuntimeError('D:')
168
+ if readable.read() == b"Time to freak out!":
169
+ raise RuntimeError("D:")
142
170
 
143
171
  with file_store.writeGlobalFileStream() as (writable, output_file_id):
144
- with file_store.readGlobalFileStream(self.msg_portion_file_id, encoding='utf-8') as readable:
172
+ with file_store.readGlobalFileStream(
173
+ self.msg_portion_file_id, encoding="utf-8"
174
+ ) as readable:
145
175
  # combine readable.read() (the original message 1) with message 2
146
176
  # this will be the final output of the workflow
147
- writable.write(f'{readable.read()}{self.message_portion_2}'.encode())
177
+ writable.write(f"{readable.read()}{self.message_portion_2}".encode())
148
178
  return output_file_id
@@ -18,6 +18,7 @@ from configargparse import ArgumentParser
18
18
 
19
19
  from toil.common import Toil
20
20
  from toil.job import Job, JobDescription, TemporaryID
21
+ from toil.resource import ModuleDescriptor
21
22
  from toil.test import ToilTest
22
23
 
23
24
 
@@ -30,7 +31,7 @@ class JobDescriptionTest(ToilTest):
30
31
  Job.Runner.addToilOptions(parser)
31
32
  options = parser.parse_args(args=[self.jobStorePath])
32
33
  self.toil = Toil(options)
33
- self.assertEqual( self.toil, self.toil.__enter__() )
34
+ self.assertEqual(self.toil, self.toil.__enter__())
34
35
 
35
36
  def tearDown(self):
36
37
  self.toil.__exit__(None, None, None)
@@ -43,17 +44,31 @@ class JobDescriptionTest(ToilTest):
43
44
  Tests the public interface of a JobDescription.
44
45
  """
45
46
 
46
- command = "by your command"
47
- memory = 2^32
48
- disk = 2^32
47
+ memory = 2 ^ 32
48
+ disk = 2 ^ 32
49
49
  cores = "1"
50
50
  preemptible = 1
51
51
 
52
- j = JobDescription(command=command, requirements={"memory": memory, "cores": cores, "disk": disk, "preemptible": preemptible},
53
- jobName='testJobGraph', unitName='noName')
52
+ j = JobDescription(
53
+ requirements={
54
+ "memory": memory,
55
+ "cores": cores,
56
+ "disk": disk,
57
+ "preemptible": preemptible,
58
+ },
59
+ jobName="testJobGraph",
60
+ unitName="noName",
61
+ )
62
+
63
+ # Without a body, and with nothing to run, nextSuccessors will be None
64
+ self.assertEqual(j.has_body(), False)
65
+ self.assertEqual(j.nextSuccessors(), None)
66
+
67
+ # Attach a body so the job has something to do itself.
68
+ j.attach_body("fake", ModuleDescriptor.forModule("toil"))
69
+ self.assertEqual(j.has_body(), True)
54
70
 
55
- #Check attributes
56
- self.assertEqual(j.command, command)
71
+ # Check attributes
57
72
  self.assertEqual(j.memory, memory)
58
73
  self.assertEqual(j.disk, disk)
59
74
  self.assertEqual(j.cores, int(cores))
@@ -67,30 +82,40 @@ class JobDescriptionTest(ToilTest):
67
82
  self.assertEqual(j.predecessorsFinished, set())
68
83
  self.assertEqual(j.logJobStoreFileID, None)
69
84
 
70
- #Check equals function (should be based on object identity and not contents)
71
- j2 = JobDescription(command=command, requirements={"memory": memory, "cores": cores, "disk": disk, "preemptible": preemptible},
72
- jobName='testJobGraph', unitName='noName')
85
+ # Check equals function (should be based on object identity and not contents)
86
+ j2 = JobDescription(
87
+ requirements={
88
+ "memory": memory,
89
+ "cores": cores,
90
+ "disk": disk,
91
+ "preemptible": preemptible,
92
+ },
93
+ jobName="testJobGraph",
94
+ unitName="noName",
95
+ )
96
+ j2.attach_body("fake", ModuleDescriptor.forModule("toil"))
73
97
  self.assertNotEqual(j, j2)
74
98
  ###TODO test other functionality
75
99
 
76
100
  def testJobDescriptionSequencing(self):
77
- j = JobDescription(command='command', requirements={}, jobName='unimportant')
101
+ j = JobDescription(requirements={}, jobName="unimportant")
78
102
 
79
- j.addChild('child')
80
- j.addFollowOn('followOn')
103
+ j.addChild("child")
104
+ j.addFollowOn("followOn")
81
105
 
82
- # With a command, nothing should be ready to run
106
+ # With a body, nothing should be ready to run
107
+ j.attach_body("fake", ModuleDescriptor.forModule("toil"))
83
108
  self.assertEqual(list(j.nextSuccessors()), [])
84
109
 
85
- # With command cleared, child should be ready to run
86
- j.command = None
87
- self.assertEqual(list(j.nextSuccessors()), ['child'])
110
+ # With body cleared, child should be ready to run
111
+ j.detach_body()
112
+ self.assertEqual(list(j.nextSuccessors()), ["child"])
88
113
 
89
114
  # Without the child, the follow-on should be ready to run
90
- j.filterSuccessors(lambda jID: jID != 'child')
91
- self.assertEqual(list(j.nextSuccessors()), ['followOn'])
115
+ j.filterSuccessors(lambda jID: jID != "child")
116
+ self.assertEqual(list(j.nextSuccessors()), ["followOn"])
92
117
 
93
118
  # Without the follow-on, we should return None, to be distinct from an
94
119
  # empty list. Nothing left to do!
95
- j.filterSuccessors(lambda jID: jID != 'followOn')
120
+ j.filterSuccessors(lambda jID: jID != "followOn")
96
121
  self.assertEqual(j.nextSuccessors(), None)
@@ -20,6 +20,7 @@ from toil.test.src.jobTest import fn1Test
20
20
 
21
21
  class JobEncapsulationTest(ToilTest):
22
22
  """Tests testing the EncapsulationJob class."""
23
+
23
24
  def testEncapsulation(self):
24
25
  """
25
26
  Tests the Job.encapsulation method, which uses the EncapsulationJob
@@ -61,6 +62,7 @@ class JobEncapsulationTest(ToilTest):
61
62
  def noOp():
62
63
  pass
63
64
 
65
+
64
66
  def encapsulatedJobFn(job, string, outFile):
65
67
  a = job.addChildFn(fn1Test, string, outFile, name="inner-a")
66
68
  b = a.addFollowOnFn(fn1Test, a.rv(), outFile, name="inner-b")
@@ -23,7 +23,7 @@ from toil.test import ToilTest, slow
23
23
 
24
24
  logger = logging.getLogger(__name__)
25
25
 
26
- PREFIX_LENGTH=200
26
+ PREFIX_LENGTH = 200
27
27
 
28
28
 
29
29
  # TODO: This test is ancient and while similar tests exist in `fileStoreTest.py`, none of them look
@@ -32,6 +32,7 @@ class JobFileStoreTest(ToilTest):
32
32
  """
33
33
  Tests testing the methods defined in :class:toil.fileStores.abstractFileStore.AbstractFileStore.
34
34
  """
35
+
35
36
  def testCachingFileStore(self):
36
37
  options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
37
38
  with Toil(options) as workflow:
@@ -43,38 +44,42 @@ class JobFileStoreTest(ToilTest):
43
44
  with Toil(options) as workflow:
44
45
  workflow.start(Job.wrapJobFn(simpleFileStoreJob))
45
46
 
46
- def _testJobFileStore(self, retryCount=0, badWorker=0.0, stringNo=1, stringLength=1000000,
47
- testNo=2):
47
+ def _testJobFileStore(
48
+ self, retryCount=0, badWorker=0.0, stringNo=1, stringLength=1000000, testNo=2
49
+ ):
48
50
  """
49
51
  Creates a chain of jobs, each reading and writing files using the
50
52
  toil.fileStores.abstractFileStore.AbstractFileStore interface. Verifies the files written are always what we
51
53
  expect.
52
54
  """
53
55
  for test in range(testNo):
54
- #Make a list of random strings, each of 100k chars and hash the first 200
55
- #base prefix to the string
56
+ # Make a list of random strings, each of 100k chars and hash the first 200
57
+ # base prefix to the string
56
58
  def randomString():
57
59
  chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
58
60
  s = "".join([random.choice(chars) for i in range(stringLength)])
59
61
  return s[:PREFIX_LENGTH], s
60
- #Total length is 2 million characters (20 strings of length 100K each)
62
+
63
+ # Total length is 2 million characters (20 strings of length 100K each)
61
64
  testStrings = dict([randomString() for i in range(stringNo)])
62
65
  options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
63
66
  options.logLevel = "DEBUG"
64
- options.retryCount=retryCount
65
- options.badWorker=badWorker
67
+ options.retryCount = retryCount
68
+ options.badWorker = badWorker
66
69
  options.badWorkerFailInterval = 1.0
67
70
  chainLength = 10
68
71
  # Run the workflow, the return value being the number of failed jobs
69
- Job.Runner.startToil(Job.wrapJobFn(fileTestJob, [],
70
- testStrings, chainLength),
71
- options)
72
+ Job.Runner.startToil(
73
+ Job.wrapJobFn(fileTestJob, [], testStrings, chainLength), options
74
+ )
72
75
 
73
76
  def testJobFileStore(self):
74
77
  """
75
78
  Tests case that about half the files are cached
76
79
  """
77
- self._testJobFileStore(retryCount=0, badWorker=0.0, stringNo=5, stringLength=1000000)
80
+ self._testJobFileStore(
81
+ retryCount=0, badWorker=0.0, stringNo=5, stringLength=1000000
82
+ )
78
83
 
79
84
  @slow
80
85
  def testJobFileStoreWithBadWorker(self):
@@ -82,79 +87,98 @@ class JobFileStoreTest(ToilTest):
82
87
  Tests case that about half the files are cached and the worker is randomly
83
88
  failing.
84
89
  """
85
- self._testJobFileStore(retryCount=100, badWorker=0.5, stringNo=5, stringLength=1000000)
90
+ self._testJobFileStore(
91
+ retryCount=100, badWorker=0.5, stringNo=5, stringLength=1000000
92
+ )
86
93
 
87
94
 
88
95
  def fileTestJob(job, inputFileStoreIDs, testStrings, chainLength):
89
96
  """
90
97
  Test job exercises toil.fileStores.abstractFileStore.AbstractFileStore functions
91
98
  """
92
- outputFileStoreIds = [] #Strings passed to the next job in the chain
99
+ outputFileStoreIds = [] # Strings passed to the next job in the chain
93
100
 
94
- #Load the input jobStoreFileIDs and check that they map to the
95
- #same set of random input strings, exercising the different functions in the fileStore interface
101
+ # Load the input jobStoreFileIDs and check that they map to the
102
+ # same set of random input strings, exercising the different functions in the fileStore interface
96
103
  for fileStoreID in inputFileStoreIDs:
97
104
  if random.random() > 0.5:
98
- #Read the file for the fileStoreID, randomly picking a way to invoke readGlobalFile
105
+ # Read the file for the fileStoreID, randomly picking a way to invoke readGlobalFile
99
106
  if random.random() > 0.5:
100
- local_path = job.fileStore.getLocalTempFileName() if random.random() > 0.5 else None
107
+ local_path = (
108
+ job.fileStore.getLocalTempFileName()
109
+ if random.random() > 0.5
110
+ else None
111
+ )
101
112
  cache = random.random() > 0.5
102
113
 
103
- tempFile = job.fileStore.readGlobalFile(fileStoreID,
104
- local_path,
105
- cache=cache)
114
+ tempFile = job.fileStore.readGlobalFile(
115
+ fileStoreID, local_path, cache=cache
116
+ )
106
117
  with open(tempFile) as fH:
107
118
  string = fH.readline()
108
- logging.info("Downloaded %s to local path %s with cache %s and got %s with %d letters",
109
- fileStoreID, local_path, cache, tempFile, len(string))
119
+ logging.info(
120
+ "Downloaded %s to local path %s with cache %s and got %s with %d letters",
121
+ fileStoreID,
122
+ local_path,
123
+ cache,
124
+ tempFile,
125
+ len(string),
126
+ )
110
127
  else:
111
- #Check the local file is as we expect
112
- with job.fileStore.readGlobalFileStream(fileStoreID, 'utf-8') as fH:
128
+ # Check the local file is as we expect
129
+ with job.fileStore.readGlobalFileStream(fileStoreID, "utf-8") as fH:
113
130
  string = fH.readline()
114
131
  logging.info("Streamed %s and got %d letters", fileStoreID, len(string))
115
- #Check the string we get back is what we expect
116
- assert string[:PREFIX_LENGTH] in testStrings, f"Could not find string: {string[:PREFIX_LENGTH]}"
117
- assert testStrings[string[:PREFIX_LENGTH]] == string, f"Mismatch in string: {string[:PREFIX_LENGTH]}"
118
-
119
- #This allows the file to be passed to the next job
132
+ # Check the string we get back is what we expect
133
+ assert (
134
+ string[:PREFIX_LENGTH] in testStrings
135
+ ), f"Could not find string: {string[:PREFIX_LENGTH]}"
136
+ assert (
137
+ testStrings[string[:PREFIX_LENGTH]] == string
138
+ ), f"Mismatch in string: {string[:PREFIX_LENGTH]}"
139
+
140
+ # This allows the file to be passed to the next job
120
141
  outputFileStoreIds.append(fileStoreID)
121
142
  else:
122
- #This tests deletion
143
+ # This tests deletion
123
144
  logging.info("Deleted %s", fileStoreID)
124
145
  job.fileStore.deleteGlobalFile(fileStoreID)
125
146
 
126
- #Fill out the output strings until we have the same number as the input strings
127
- #exercising different ways of writing files to the file store
147
+ # Fill out the output strings until we have the same number as the input strings
148
+ # exercising different ways of writing files to the file store
128
149
  while len(outputFileStoreIds) < len(testStrings):
129
- #Pick a string and write it into a file
150
+ # Pick a string and write it into a file
130
151
  testString = random.choice(list(testStrings.values()))
131
152
  if random.random() > 0.5:
132
- #Make a local copy of the file
133
- tempFile = job.fileStore.getLocalTempFile() if random.random() > 0.5 \
134
- else os.path.join(job.fileStore.getLocalTempDir(), "temp.txt")
135
- with open(tempFile, 'w') as fH:
153
+ # Make a local copy of the file
154
+ tempFile = (
155
+ job.fileStore.getLocalTempFile()
156
+ if random.random() > 0.5
157
+ else os.path.join(job.fileStore.getLocalTempDir(), "temp.txt")
158
+ )
159
+ with open(tempFile, "w") as fH:
136
160
  fH.write(testString)
137
- #Write a local copy of the file using the local file
161
+ # Write a local copy of the file using the local file
138
162
  fileStoreID = job.fileStore.writeGlobalFile(tempFile)
139
163
 
140
164
  # Make sure it returned a valid and correct FileID with the right size
141
165
  assert isinstance(fileStoreID, FileID)
142
- assert fileStoreID.size == len(testString.encode('utf-8'))
166
+ assert fileStoreID.size == len(testString.encode("utf-8"))
143
167
 
144
168
  outputFileStoreIds.append(fileStoreID)
145
169
  else:
146
- #Use the writeGlobalFileStream method to write the file
170
+ # Use the writeGlobalFileStream method to write the file
147
171
  with job.fileStore.writeGlobalFileStream() as (fH, fileStoreID):
148
- fH.write(testString.encode('utf-8'))
172
+ fH.write(testString.encode("utf-8"))
149
173
  outputFileStoreIds.append(fileStoreID)
150
174
 
151
- #Make sure it returned a valid and correct FileID with the right size
175
+ # Make sure it returned a valid and correct FileID with the right size
152
176
  assert isinstance(fileStoreID, FileID)
153
- assert fileStoreID.size == len(testString.encode('utf-8'))
177
+ assert fileStoreID.size == len(testString.encode("utf-8"))
154
178
 
155
179
  if chainLength > 0:
156
- #Make a child that will read these files and check it gets the same results
157
- job.addChildJobFn(fileTestJob, outputFileStoreIds, testStrings, chainLength-1)
180
+ # Make a child that will read these files and check it gets the same results
181
+ job.addChildJobFn(fileTestJob, outputFileStoreIds, testStrings, chainLength - 1)
158
182
 
159
183
 
160
184
  fileStoreString = "Testing writeGlobalFile"
@@ -163,13 +187,13 @@ streamingFileStoreString = "Testing writeGlobalFileStream"
163
187
 
164
188
  def simpleFileStoreJob(job):
165
189
  localFilePath = os.path.join(job.fileStore.getLocalTempDir(), "parentTemp.txt")
166
- with open(localFilePath, 'w') as f:
190
+ with open(localFilePath, "w") as f:
167
191
  f.write(fileStoreString)
168
192
  testID1 = job.fileStore.writeGlobalFile(localFilePath)
169
193
 
170
194
  testID2 = None
171
195
  with job.fileStore.writeGlobalFileStream() as (f, fileID):
172
- f.write(streamingFileStoreString.encode('utf-8'))
196
+ f.write(streamingFileStoreString.encode("utf-8"))
173
197
  testID2 = fileID
174
198
 
175
199
  job.addChildJobFn(fileStoreChild, testID1, testID2)
@@ -177,12 +201,12 @@ def simpleFileStoreJob(job):
177
201
 
178
202
  def fileStoreChild(job, testID1, testID2):
179
203
  with job.fileStore.readGlobalFileStream(testID1) as f:
180
- assert(f.read().decode('utf-8') == fileStoreString)
204
+ assert f.read().decode("utf-8") == fileStoreString
181
205
 
182
206
  localFilePath = os.path.join(job.fileStore.getLocalTempDir(), "childTemp.txt")
183
207
  job.fileStore.readGlobalFile(testID2, localFilePath)
184
208
  with open(localFilePath) as f:
185
- assert(f.read() == streamingFileStoreString)
209
+ assert f.read() == streamingFileStoreString
186
210
 
187
211
  job.fileStore.deleteLocalFile(testID2)
188
212
  try: