toil 7.0.0__py3-none-any.whl → 8.1.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. toil/__init__.py +124 -86
  2. toil/batchSystems/__init__.py +1 -0
  3. toil/batchSystems/abstractBatchSystem.py +137 -77
  4. toil/batchSystems/abstractGridEngineBatchSystem.py +211 -101
  5. toil/batchSystems/awsBatch.py +237 -128
  6. toil/batchSystems/cleanup_support.py +22 -16
  7. toil/batchSystems/contained_executor.py +30 -26
  8. toil/batchSystems/gridengine.py +85 -49
  9. toil/batchSystems/htcondor.py +164 -87
  10. toil/batchSystems/kubernetes.py +622 -386
  11. toil/batchSystems/local_support.py +17 -12
  12. toil/batchSystems/lsf.py +132 -79
  13. toil/batchSystems/lsfHelper.py +13 -11
  14. toil/batchSystems/mesos/__init__.py +41 -29
  15. toil/batchSystems/mesos/batchSystem.py +288 -149
  16. toil/batchSystems/mesos/executor.py +77 -49
  17. toil/batchSystems/mesos/test/__init__.py +31 -23
  18. toil/batchSystems/options.py +39 -29
  19. toil/batchSystems/registry.py +53 -19
  20. toil/batchSystems/singleMachine.py +293 -123
  21. toil/batchSystems/slurm.py +651 -155
  22. toil/batchSystems/torque.py +46 -32
  23. toil/bus.py +141 -73
  24. toil/common.py +784 -397
  25. toil/cwl/__init__.py +1 -1
  26. toil/cwl/cwltoil.py +1137 -534
  27. toil/cwl/utils.py +17 -22
  28. toil/deferred.py +62 -41
  29. toil/exceptions.py +5 -3
  30. toil/fileStores/__init__.py +5 -5
  31. toil/fileStores/abstractFileStore.py +88 -57
  32. toil/fileStores/cachingFileStore.py +711 -247
  33. toil/fileStores/nonCachingFileStore.py +113 -75
  34. toil/job.py +1031 -349
  35. toil/jobStores/abstractJobStore.py +387 -243
  36. toil/jobStores/aws/jobStore.py +772 -412
  37. toil/jobStores/aws/utils.py +161 -109
  38. toil/jobStores/conftest.py +1 -0
  39. toil/jobStores/fileJobStore.py +289 -151
  40. toil/jobStores/googleJobStore.py +137 -70
  41. toil/jobStores/utils.py +36 -15
  42. toil/leader.py +614 -269
  43. toil/lib/accelerators.py +115 -18
  44. toil/lib/aws/__init__.py +55 -28
  45. toil/lib/aws/ami.py +122 -87
  46. toil/lib/aws/iam.py +284 -108
  47. toil/lib/aws/s3.py +31 -0
  48. toil/lib/aws/session.py +204 -58
  49. toil/lib/aws/utils.py +290 -213
  50. toil/lib/bioio.py +13 -5
  51. toil/lib/compatibility.py +11 -6
  52. toil/lib/conversions.py +83 -49
  53. toil/lib/docker.py +131 -103
  54. toil/lib/dockstore.py +379 -0
  55. toil/lib/ec2.py +322 -209
  56. toil/lib/ec2nodes.py +174 -105
  57. toil/lib/encryption/_dummy.py +5 -3
  58. toil/lib/encryption/_nacl.py +10 -6
  59. toil/lib/encryption/conftest.py +1 -0
  60. toil/lib/exceptions.py +26 -7
  61. toil/lib/expando.py +4 -2
  62. toil/lib/ftp_utils.py +217 -0
  63. toil/lib/generatedEC2Lists.py +127 -19
  64. toil/lib/history.py +1271 -0
  65. toil/lib/history_submission.py +681 -0
  66. toil/lib/humanize.py +6 -2
  67. toil/lib/io.py +121 -12
  68. toil/lib/iterables.py +4 -2
  69. toil/lib/memoize.py +12 -8
  70. toil/lib/misc.py +83 -18
  71. toil/lib/objects.py +2 -2
  72. toil/lib/resources.py +19 -7
  73. toil/lib/retry.py +125 -87
  74. toil/lib/threading.py +282 -80
  75. toil/lib/throttle.py +15 -14
  76. toil/lib/trs.py +390 -0
  77. toil/lib/web.py +38 -0
  78. toil/options/common.py +850 -402
  79. toil/options/cwl.py +185 -90
  80. toil/options/runner.py +50 -0
  81. toil/options/wdl.py +70 -19
  82. toil/provisioners/__init__.py +111 -46
  83. toil/provisioners/abstractProvisioner.py +322 -157
  84. toil/provisioners/aws/__init__.py +62 -30
  85. toil/provisioners/aws/awsProvisioner.py +980 -627
  86. toil/provisioners/clusterScaler.py +541 -279
  87. toil/provisioners/gceProvisioner.py +283 -180
  88. toil/provisioners/node.py +147 -79
  89. toil/realtimeLogger.py +34 -22
  90. toil/resource.py +137 -75
  91. toil/server/app.py +127 -61
  92. toil/server/celery_app.py +3 -1
  93. toil/server/cli/wes_cwl_runner.py +84 -55
  94. toil/server/utils.py +56 -31
  95. toil/server/wes/abstract_backend.py +64 -26
  96. toil/server/wes/amazon_wes_utils.py +21 -15
  97. toil/server/wes/tasks.py +121 -63
  98. toil/server/wes/toil_backend.py +142 -107
  99. toil/server/wsgi_app.py +4 -3
  100. toil/serviceManager.py +58 -22
  101. toil/statsAndLogging.py +183 -65
  102. toil/test/__init__.py +263 -179
  103. toil/test/batchSystems/batchSystemTest.py +438 -195
  104. toil/test/batchSystems/batch_system_plugin_test.py +18 -7
  105. toil/test/batchSystems/test_gridengine.py +173 -0
  106. toil/test/batchSystems/test_lsf_helper.py +67 -58
  107. toil/test/batchSystems/test_slurm.py +265 -49
  108. toil/test/cactus/test_cactus_integration.py +20 -22
  109. toil/test/cwl/conftest.py +39 -0
  110. toil/test/cwl/cwlTest.py +375 -72
  111. toil/test/cwl/measure_default_memory.cwl +12 -0
  112. toil/test/cwl/not_run_required_input.cwl +29 -0
  113. toil/test/cwl/optional-file.cwl +18 -0
  114. toil/test/cwl/scatter_duplicate_outputs.cwl +40 -0
  115. toil/test/docs/scriptsTest.py +60 -34
  116. toil/test/jobStores/jobStoreTest.py +412 -235
  117. toil/test/lib/aws/test_iam.py +116 -48
  118. toil/test/lib/aws/test_s3.py +16 -9
  119. toil/test/lib/aws/test_utils.py +5 -6
  120. toil/test/lib/dockerTest.py +118 -141
  121. toil/test/lib/test_conversions.py +113 -115
  122. toil/test/lib/test_ec2.py +57 -49
  123. toil/test/lib/test_history.py +212 -0
  124. toil/test/lib/test_misc.py +12 -5
  125. toil/test/lib/test_trs.py +161 -0
  126. toil/test/mesos/MesosDataStructuresTest.py +23 -10
  127. toil/test/mesos/helloWorld.py +7 -6
  128. toil/test/mesos/stress.py +25 -20
  129. toil/test/options/options.py +7 -2
  130. toil/test/provisioners/aws/awsProvisionerTest.py +293 -140
  131. toil/test/provisioners/clusterScalerTest.py +440 -250
  132. toil/test/provisioners/clusterTest.py +81 -42
  133. toil/test/provisioners/gceProvisionerTest.py +174 -100
  134. toil/test/provisioners/provisionerTest.py +25 -13
  135. toil/test/provisioners/restartScript.py +5 -4
  136. toil/test/server/serverTest.py +188 -141
  137. toil/test/sort/restart_sort.py +137 -68
  138. toil/test/sort/sort.py +134 -66
  139. toil/test/sort/sortTest.py +91 -49
  140. toil/test/src/autoDeploymentTest.py +140 -100
  141. toil/test/src/busTest.py +20 -18
  142. toil/test/src/checkpointTest.py +8 -2
  143. toil/test/src/deferredFunctionTest.py +49 -35
  144. toil/test/src/dockerCheckTest.py +33 -26
  145. toil/test/src/environmentTest.py +20 -10
  146. toil/test/src/fileStoreTest.py +538 -271
  147. toil/test/src/helloWorldTest.py +7 -4
  148. toil/test/src/importExportFileTest.py +61 -31
  149. toil/test/src/jobDescriptionTest.py +32 -17
  150. toil/test/src/jobEncapsulationTest.py +2 -0
  151. toil/test/src/jobFileStoreTest.py +74 -50
  152. toil/test/src/jobServiceTest.py +187 -73
  153. toil/test/src/jobTest.py +120 -70
  154. toil/test/src/miscTests.py +19 -18
  155. toil/test/src/promisedRequirementTest.py +82 -36
  156. toil/test/src/promisesTest.py +7 -6
  157. toil/test/src/realtimeLoggerTest.py +6 -6
  158. toil/test/src/regularLogTest.py +71 -37
  159. toil/test/src/resourceTest.py +80 -49
  160. toil/test/src/restartDAGTest.py +36 -22
  161. toil/test/src/resumabilityTest.py +9 -2
  162. toil/test/src/retainTempDirTest.py +45 -14
  163. toil/test/src/systemTest.py +12 -8
  164. toil/test/src/threadingTest.py +44 -25
  165. toil/test/src/toilContextManagerTest.py +10 -7
  166. toil/test/src/userDefinedJobArgTypeTest.py +8 -5
  167. toil/test/src/workerTest.py +33 -16
  168. toil/test/utils/toilDebugTest.py +70 -58
  169. toil/test/utils/toilKillTest.py +4 -5
  170. toil/test/utils/utilsTest.py +239 -102
  171. toil/test/wdl/wdltoil_test.py +789 -148
  172. toil/test/wdl/wdltoil_test_kubernetes.py +37 -23
  173. toil/toilState.py +52 -26
  174. toil/utils/toilConfig.py +13 -4
  175. toil/utils/toilDebugFile.py +44 -27
  176. toil/utils/toilDebugJob.py +85 -25
  177. toil/utils/toilDestroyCluster.py +11 -6
  178. toil/utils/toilKill.py +8 -3
  179. toil/utils/toilLaunchCluster.py +251 -145
  180. toil/utils/toilMain.py +37 -16
  181. toil/utils/toilRsyncCluster.py +27 -14
  182. toil/utils/toilSshCluster.py +45 -22
  183. toil/utils/toilStats.py +75 -36
  184. toil/utils/toilStatus.py +226 -119
  185. toil/utils/toilUpdateEC2Instances.py +3 -1
  186. toil/version.py +6 -6
  187. toil/wdl/utils.py +5 -5
  188. toil/wdl/wdltoil.py +3528 -1053
  189. toil/worker.py +370 -149
  190. toil-8.1.0b1.dist-info/METADATA +178 -0
  191. toil-8.1.0b1.dist-info/RECORD +259 -0
  192. {toil-7.0.0.dist-info → toil-8.1.0b1.dist-info}/WHEEL +1 -1
  193. toil-7.0.0.dist-info/METADATA +0 -158
  194. toil-7.0.0.dist-info/RECORD +0 -244
  195. {toil-7.0.0.dist-info → toil-8.1.0b1.dist-info}/LICENSE +0 -0
  196. {toil-7.0.0.dist-info → toil-8.1.0b1.dist-info}/entry_points.txt +0 -0
  197. {toil-7.0.0.dist-info → toil-8.1.0b1.dist-info}/top_level.txt +0 -0
@@ -28,7 +28,7 @@ from itertools import chain, islice
28
28
  from queue import Queue
29
29
  from tempfile import mkstemp
30
30
  from threading import Thread
31
- from typing import Any, Tuple
31
+ from typing import Any
32
32
  from urllib.request import Request, urlopen
33
33
 
34
34
  import pytest
@@ -37,20 +37,21 @@ from stubserver import FTPStubServer
37
37
  from toil.common import Config, Toil
38
38
  from toil.fileStores import FileID
39
39
  from toil.job import Job, JobDescription, TemporaryID
40
- from toil.jobStores.abstractJobStore import (NoSuchFileException,
41
- NoSuchJobException)
40
+ from toil.jobStores.abstractJobStore import NoSuchFileException, NoSuchJobException
42
41
  from toil.jobStores.fileJobStore import FileJobStore
43
42
  from toil.lib.io import mkdtemp
44
43
  from toil.lib.memoize import memoize
45
44
  from toil.lib.retry import retry
46
45
  from toil.statsAndLogging import StatsAndLogging
47
- from toil.test import (ToilTest,
48
- make_tests,
49
- needs_aws_s3,
50
- needs_encryption,
51
- needs_google_project,
52
- needs_google_storage,
53
- slow)
46
+ from toil.test import (
47
+ ToilTest,
48
+ make_tests,
49
+ needs_aws_s3,
50
+ needs_encryption,
51
+ needs_google_project,
52
+ needs_google_storage,
53
+ slow,
54
+ )
54
55
 
55
56
  # noinspection PyPackageRequirements
56
57
  # (installed by `make prepare`)
@@ -82,11 +83,11 @@ class AbstractJobStoreTest:
82
83
  def setUpClass(cls):
83
84
  super().setUpClass()
84
85
  logging.basicConfig(level=logging.DEBUG)
85
- logging.getLogger('boto').setLevel(logging.CRITICAL)
86
- logging.getLogger('boto').setLevel(logging.WARNING)
87
- logging.getLogger('boto3.resources').setLevel(logging.WARNING)
88
- logging.getLogger('botocore.auth').setLevel(logging.WARNING)
89
- logging.getLogger('botocore.hooks').setLevel(logging.WARNING)
86
+ logging.getLogger("boto").setLevel(logging.CRITICAL)
87
+ logging.getLogger("boto").setLevel(logging.WARNING)
88
+ logging.getLogger("boto3.resources").setLevel(logging.WARNING)
89
+ logging.getLogger("botocore.auth").setLevel(logging.WARNING)
90
+ logging.getLogger("botocore.hooks").setLevel(logging.WARNING)
90
91
 
91
92
  # The use of @memoize ensures that we only have one instance of per class even with the
92
93
  # generative import/export tests attempts to instantiate more. This in turn enables us to
@@ -113,7 +114,7 @@ class AbstractJobStoreTest:
113
114
 
114
115
  def setUp(self):
115
116
  super().setUp()
116
- self.namePrefix = 'jobstore-test-' + str(uuid.uuid4())
117
+ self.namePrefix = "jobstore-test-" + str(uuid.uuid4())
117
118
  self.config = self._createConfig()
118
119
 
119
120
  # Jobstores to be used in testing.
@@ -127,10 +128,16 @@ class AbstractJobStoreTest:
127
128
  self.jobstore_resumed_noconfig.resume()
128
129
 
129
130
  # Requirements for jobs to be created.
130
- self.arbitraryRequirements = {'memory': 1, 'disk': 2, 'cores': 1, 'preemptible': False}
131
+ self.arbitraryRequirements = {
132
+ "memory": 1,
133
+ "disk": 2,
134
+ "cores": 1,
135
+ "preemptible": False,
136
+ }
131
137
  # Function to make an arbitrary new job
132
- self.arbitraryJob = lambda: JobDescription(jobName='arbitrary',
133
- requirements=self.arbitraryRequirements)
138
+ self.arbitraryJob = lambda: JobDescription(
139
+ jobName="arbitrary", requirements=self.arbitraryRequirements
140
+ )
134
141
 
135
142
  self.parentJobReqs = dict(memory=12, cores=34, disk=35, preemptible=True)
136
143
  self.childJobReqs1 = dict(memory=23, cores=45, disk=46, preemptible=True)
@@ -143,8 +150,12 @@ class AbstractJobStoreTest:
143
150
 
144
151
  def testInitialState(self):
145
152
  """Ensure proper handling of nonexistent files."""
146
- self.assertFalse(self.jobstore_initialized.job_exists('nonexistentFile'))
147
- self.assertRaises(NoSuchJobException, self.jobstore_initialized.load_job, 'nonexistentFile')
153
+ self.assertFalse(self.jobstore_initialized.job_exists("nonexistentFile"))
154
+ self.assertRaises(
155
+ NoSuchJobException,
156
+ self.jobstore_initialized.load_job,
157
+ "nonexistentFile",
158
+ )
148
159
 
149
160
  def testJobCreation(self):
150
161
  """
@@ -157,8 +168,9 @@ class AbstractJobStoreTest:
157
168
  jobstore = self.jobstore_initialized
158
169
 
159
170
  # Create a job and verify its existence/properties
160
- job = JobDescription(requirements=self.parentJobReqs,
161
- jobName='test1', unitName='onParent')
171
+ job = JobDescription(
172
+ requirements=self.parentJobReqs, jobName="test1", unitName="onParent"
173
+ )
162
174
  self.assertTrue(isinstance(job.jobStoreID, TemporaryID))
163
175
  jobstore.assign_job_id(job)
164
176
  self.assertFalse(isinstance(job.jobStoreID, TemporaryID))
@@ -167,12 +179,12 @@ class AbstractJobStoreTest:
167
179
  self.assertEqual(created, job)
168
180
 
169
181
  self.assertTrue(jobstore.job_exists(job.jobStoreID))
170
- self.assertEqual(job.memory, self.parentJobReqs['memory'])
171
- self.assertEqual(job.cores, self.parentJobReqs['cores'])
172
- self.assertEqual(job.disk, self.parentJobReqs['disk'])
173
- self.assertEqual(job.preemptible, self.parentJobReqs['preemptible'])
174
- self.assertEqual(job.jobName, 'test1')
175
- self.assertEqual(job.unitName, 'onParent')
182
+ self.assertEqual(job.memory, self.parentJobReqs["memory"])
183
+ self.assertEqual(job.cores, self.parentJobReqs["cores"])
184
+ self.assertEqual(job.disk, self.parentJobReqs["disk"])
185
+ self.assertEqual(job.preemptible, self.parentJobReqs["preemptible"])
186
+ self.assertEqual(job.jobName, "test1")
187
+ self.assertEqual(job.unitName, "onParent")
176
188
 
177
189
  def testConfigEquality(self):
178
190
  """
@@ -191,8 +203,9 @@ class AbstractJobStoreTest:
191
203
  """Tests that a job created via one JobStore instance can be loaded from another."""
192
204
 
193
205
  # Create a job on the first jobstore.
194
- jobDesc1 = JobDescription(requirements=self.parentJobReqs,
195
- jobName='test1', unitName='onJS1')
206
+ jobDesc1 = JobDescription(
207
+ requirements=self.parentJobReqs, jobName="test1", unitName="onJS1"
208
+ )
196
209
  self.jobstore_initialized.assign_job_id(jobDesc1)
197
210
  self.jobstore_initialized.create_job(jobDesc1)
198
211
 
@@ -203,11 +216,13 @@ class AbstractJobStoreTest:
203
216
 
204
217
  def testChildLoadingEquality(self):
205
218
  """Test that loading a child job operates as expected."""
206
- job = JobDescription(requirements=self.parentJobReqs,
207
- jobName='test1', unitName='onParent')
219
+ job = JobDescription(
220
+ requirements=self.parentJobReqs, jobName="test1", unitName="onParent"
221
+ )
208
222
 
209
- childJob = JobDescription(requirements=self.childJobReqs1,
210
- jobName='test2', unitName='onChild1')
223
+ childJob = JobDescription(
224
+ requirements=self.childJobReqs1, jobName="test2", unitName="onChild1"
225
+ )
211
226
  self.jobstore_initialized.assign_job_id(job)
212
227
  self.jobstore_initialized.assign_job_id(childJob)
213
228
  self.jobstore_initialized.create_job(job)
@@ -215,7 +230,10 @@ class AbstractJobStoreTest:
215
230
  job.addChild(childJob.jobStoreID)
216
231
  self.jobstore_initialized.update_job(job)
217
232
 
218
- self.assertEqual(self.jobstore_initialized.load_job(list(job.allSuccessors())[0])._body, childJob._body)
233
+ self.assertEqual(
234
+ self.jobstore_initialized.load_job(list(job.allSuccessors())[0])._body,
235
+ childJob._body,
236
+ )
219
237
 
220
238
  def testPersistantFilesToDelete(self):
221
239
  """
@@ -230,28 +248,35 @@ class AbstractJobStoreTest:
230
248
  """
231
249
 
232
250
  # Create a job.
233
- job = JobDescription(requirements=self.parentJobReqs,
234
- jobName='test1', unitName='onJS1')
251
+ job = JobDescription(
252
+ requirements=self.parentJobReqs, jobName="test1", unitName="onJS1"
253
+ )
235
254
 
236
255
  self.jobstore_initialized.assign_job_id(job)
237
256
  self.jobstore_initialized.create_job(job)
238
- job.filesToDelete = ['1', '2']
257
+ job.filesToDelete = ["1", "2"]
239
258
  self.jobstore_initialized.update_job(job)
240
- self.assertEqual(self.jobstore_initialized.load_job(job.jobStoreID).filesToDelete, ['1', '2'])
259
+ self.assertEqual(
260
+ self.jobstore_initialized.load_job(job.jobStoreID).filesToDelete,
261
+ ["1", "2"],
262
+ )
241
263
 
242
264
  def testUpdateBehavior(self):
243
265
  """Tests the proper behavior during updating jobs."""
244
266
  jobstore1 = self.jobstore_initialized
245
267
  jobstore2 = self.jobstore_resumed_noconfig
246
268
 
247
- job1 = JobDescription(requirements=self.parentJobReqs,
248
- jobName='test1', unitName='onParent')
269
+ job1 = JobDescription(
270
+ requirements=self.parentJobReqs, jobName="test1", unitName="onParent"
271
+ )
249
272
 
250
- childJob1 = JobDescription(requirements=self.childJobReqs1,
251
- jobName='test2', unitName='onChild1')
273
+ childJob1 = JobDescription(
274
+ requirements=self.childJobReqs1, jobName="test2", unitName="onChild1"
275
+ )
252
276
 
253
- childJob2 = JobDescription(requirements=self.childJobReqs2,
254
- jobName='test3', unitName='onChild2')
277
+ childJob2 = JobDescription(
278
+ requirements=self.childJobReqs2, jobName="test3", unitName="onChild2"
279
+ )
255
280
 
256
281
  jobstore1.assign_job_id(job1)
257
282
  jobstore1.create_job(job1)
@@ -270,7 +295,9 @@ class AbstractJobStoreTest:
270
295
 
271
296
  # Check equivalence between jobstore1 and jobstore2.
272
297
  # While job1 and job2 share a jobStoreID, job1 has not been "refreshed" to show the newly added child jobs.
273
- self.assertNotEqual(sorted(job2.allSuccessors()), sorted(job1.allSuccessors()))
298
+ self.assertNotEqual(
299
+ sorted(job2.allSuccessors()), sorted(job1.allSuccessors())
300
+ )
274
301
 
275
302
  # Reload parent job on jobstore, "refreshing" the job.
276
303
  job1 = jobstore1.load_job(job1.jobStoreID)
@@ -287,18 +314,21 @@ class AbstractJobStoreTest:
287
314
  """Tests the consequences of deleting jobs."""
288
315
  # A local jobstore object for testing.
289
316
  jobstore = self.jobstore_initialized
290
- job = JobDescription(requirements=self.parentJobReqs,
291
- jobName='test1', unitName='onJob')
317
+ job = JobDescription(
318
+ requirements=self.parentJobReqs, jobName="test1", unitName="onJob"
319
+ )
292
320
  # Create job
293
321
  jobstore.assign_job_id(job)
294
322
  jobstore.create_job(job)
295
323
 
296
324
  # Create child Jobs
297
- child1 = JobDescription(requirements=self.childJobReqs1,
298
- jobName='test2', unitName='onChild1')
325
+ child1 = JobDescription(
326
+ requirements=self.childJobReqs1, jobName="test2", unitName="onChild1"
327
+ )
299
328
 
300
- child2 = JobDescription(requirements=self.childJobReqs2,
301
- jobName='test3', unitName='onChild2')
329
+ child2 = JobDescription(
330
+ requirements=self.childJobReqs2, jobName="test3", unitName="onChild2"
331
+ )
302
332
 
303
333
  # Add children to parent.
304
334
  jobstore.assign_job_id(child1)
@@ -320,7 +350,10 @@ class AbstractJobStoreTest:
320
350
  # jobs that show up are a subset of all existing jobs. If we had deleted jobs before
321
351
  # this we would have to worry about ghost jobs appearing and this assertion would not
322
352
  # be valid
323
- self.assertTrue({j.jobStoreID for j in (childJobs + [job])} >= {j.jobStoreID for j in jobstore.jobs()})
353
+ self.assertTrue(
354
+ {j.jobStoreID for j in (childJobs + [job])}
355
+ >= {j.jobStoreID for j in jobstore.jobs()}
356
+ )
324
357
 
325
358
  # Test job deletions
326
359
  # First delete parent, this should have no effect on the children
@@ -333,12 +366,14 @@ class AbstractJobStoreTest:
333
366
  self.assertTrue(jobstore.job_exists(childJob.jobStoreID))
334
367
  jobstore.delete_job(childJob.jobStoreID)
335
368
  self.assertFalse(jobstore.job_exists(childJob.jobStoreID))
336
- self.assertRaises(NoSuchJobException, jobstore.load_job, childJob.jobStoreID)
369
+ self.assertRaises(
370
+ NoSuchJobException, jobstore.load_job, childJob.jobStoreID
371
+ )
337
372
 
338
373
  try:
339
- with jobstore.read_shared_file_stream('missing') as _:
374
+ with jobstore.read_shared_file_stream("missing") as _:
340
375
  pass
341
- self.fail('Expecting NoSuchFileException')
376
+ self.fail("Expecting NoSuchFileException")
342
377
  except NoSuchFileException:
343
378
  pass
344
379
 
@@ -347,36 +382,40 @@ class AbstractJobStoreTest:
347
382
  jobstore1 = self.jobstore_initialized
348
383
  jobstore2 = self.jobstore_resumed_noconfig
349
384
 
350
- bar = b'bar'
385
+ bar = b"bar"
351
386
 
352
- with jobstore1.write_shared_file_stream('foo') as f:
387
+ with jobstore1.write_shared_file_stream("foo") as f:
353
388
  f.write(bar)
354
389
  # ... read that file on worker, ...
355
- with jobstore2.read_shared_file_stream('foo') as f:
390
+ with jobstore2.read_shared_file_stream("foo") as f:
356
391
  self.assertEqual(bar, f.read())
357
392
  # ... and read it again on jobstore1.
358
- with jobstore1.read_shared_file_stream('foo') as f:
393
+ with jobstore1.read_shared_file_stream("foo") as f:
359
394
  self.assertEqual(bar, f.read())
360
395
 
361
- with jobstore1.write_shared_file_stream('nonEncrypted', encrypted=False) as f:
396
+ with jobstore1.write_shared_file_stream(
397
+ "nonEncrypted", encrypted=False
398
+ ) as f:
362
399
  f.write(bar)
363
- self.assertUrl(jobstore1.get_shared_public_url('nonEncrypted'))
364
- self.assertRaises(NoSuchFileException, jobstore1.get_shared_public_url, 'missing')
400
+ self.assertUrl(jobstore1.get_shared_public_url("nonEncrypted"))
401
+ self.assertRaises(
402
+ NoSuchFileException, jobstore1.get_shared_public_url, "missing"
403
+ )
365
404
 
366
405
  def testReadWriteSharedFilesTextMode(self):
367
406
  """Checks if text mode is compatible for shared file streams."""
368
407
  jobstore1 = self.jobstore_initialized
369
408
  jobstore2 = self.jobstore_resumed_noconfig
370
409
 
371
- bar = 'bar'
410
+ bar = "bar"
372
411
 
373
- with jobstore1.write_shared_file_stream('foo', encoding='utf-8') as f:
412
+ with jobstore1.write_shared_file_stream("foo", encoding="utf-8") as f:
374
413
  f.write(bar)
375
414
 
376
- with jobstore2.read_shared_file_stream('foo', encoding='utf-8') as f:
415
+ with jobstore2.read_shared_file_stream("foo", encoding="utf-8") as f:
377
416
  self.assertEqual(bar, f.read())
378
417
 
379
- with jobstore1.read_shared_file_stream('foo', encoding='utf-8') as f:
418
+ with jobstore1.read_shared_file_stream("foo", encoding="utf-8") as f:
380
419
  self.assertEqual(bar, f.read())
381
420
 
382
421
  def testReadWriteFileStreamTextMode(self):
@@ -386,19 +425,22 @@ class AbstractJobStoreTest:
386
425
  jobstore.assign_job_id(job)
387
426
  jobstore.create_job(job)
388
427
 
389
- foo = 'foo'
390
- bar = 'bar'
428
+ foo = "foo"
429
+ bar = "bar"
391
430
 
392
- with jobstore.write_file_stream(job.jobStoreID, encoding='utf-8') as (f, fileID):
431
+ with jobstore.write_file_stream(job.jobStoreID, encoding="utf-8") as (
432
+ f,
433
+ fileID,
434
+ ):
393
435
  f.write(foo)
394
436
 
395
- with jobstore.read_file_stream(fileID, encoding='utf-8') as f:
437
+ with jobstore.read_file_stream(fileID, encoding="utf-8") as f:
396
438
  self.assertEqual(foo, f.read())
397
439
 
398
- with jobstore.update_file_stream(fileID, encoding='utf-8') as f:
440
+ with jobstore.update_file_stream(fileID, encoding="utf-8") as f:
399
441
  f.write(bar)
400
442
 
401
- with jobstore.read_file_stream(fileID, encoding='utf-8') as f:
443
+ with jobstore.read_file_stream(fileID, encoding="utf-8") as f:
402
444
  self.assertEqual(bar, f.read())
403
445
 
404
446
  def testPerJobFiles(self):
@@ -407,19 +449,22 @@ class AbstractJobStoreTest:
407
449
  jobstore2 = self.jobstore_resumed_noconfig
408
450
 
409
451
  # Create jobNodeOnJS1
410
- jobOnJobStore1 = JobDescription(requirements=self.parentJobReqs,
411
- jobName='test1', unitName='onJobStore1')
452
+ jobOnJobStore1 = JobDescription(
453
+ requirements=self.parentJobReqs, jobName="test1", unitName="onJobStore1"
454
+ )
412
455
 
413
456
  # First recreate job
414
457
  jobstore1.assign_job_id(jobOnJobStore1)
415
458
  jobstore1.create_job(jobOnJobStore1)
416
- fileOne = jobstore2.get_empty_file_store_id(jobOnJobStore1.jobStoreID, cleanup=True)
459
+ fileOne = jobstore2.get_empty_file_store_id(
460
+ jobOnJobStore1.jobStoreID, cleanup=True
461
+ )
417
462
  # Check file exists
418
463
  self.assertTrue(jobstore2.file_exists(fileOne))
419
464
  self.assertTrue(jobstore1.file_exists(fileOne))
420
- one = b'one'
421
- two = b'two'
422
- three = b'three'
465
+ one = b"one"
466
+ two = b"two"
467
+ three = b"three"
423
468
  # ... write to the file on jobstore2, ...
424
469
  with jobstore2.update_file_stream(fileOne) as f:
425
470
  f.write(one)
@@ -431,20 +476,22 @@ class AbstractJobStoreTest:
431
476
  fh, path = mkstemp()
432
477
  try:
433
478
  os.close(fh)
434
- tmpPath = path + '.read-only'
479
+ tmpPath = path + ".read-only"
435
480
  jobstore1.read_file(fileOne, tmpPath)
436
481
  try:
437
482
  shutil.copyfile(tmpPath, path)
438
483
  finally:
439
484
  os.unlink(tmpPath)
440
- with open(path, 'rb+') as f:
485
+ with open(path, "rb+") as f:
441
486
  self.assertEqual(f.read(), one)
442
487
  # Write a different string to the local file ...
443
488
  f.seek(0)
444
489
  f.truncate(0)
445
490
  f.write(two)
446
491
  # ... and create a second file from the local file.
447
- fileTwo = jobstore1.write_file(path, jobOnJobStore1.jobStoreID, cleanup=True)
492
+ fileTwo = jobstore1.write_file(
493
+ path, jobOnJobStore1.jobStoreID, cleanup=True
494
+ )
448
495
  with jobstore2.read_file_stream(fileTwo) as f:
449
496
  self.assertEqual(f.read(), two)
450
497
  # Now update the first file from the local file ...
@@ -454,7 +501,9 @@ class AbstractJobStoreTest:
454
501
  finally:
455
502
  os.unlink(path)
456
503
  # Create a third file to test the last remaining method.
457
- with jobstore2.write_file_stream(jobOnJobStore1.jobStoreID, cleanup=True) as (f, fileThree):
504
+ with jobstore2.write_file_stream(
505
+ jobOnJobStore1.jobStoreID, cleanup=True
506
+ ) as (f, fileThree):
458
507
  f.write(three)
459
508
  with jobstore1.read_file_stream(fileThree) as f:
460
509
  self.assertEqual(f.read(), three)
@@ -465,11 +514,11 @@ class AbstractJobStoreTest:
465
514
  #
466
515
  for store in jobstore2, jobstore1:
467
516
  self.assertFalse(store.file_exists(fileOne))
468
- self.assertRaises(NoSuchFileException, store.read_file, fileOne, '')
517
+ self.assertRaises(NoSuchFileException, store.read_file, fileOne, "")
469
518
  try:
470
519
  with store.read_file_stream(fileOne) as _:
471
520
  pass
472
- self.fail('Expecting NoSuchFileException')
521
+ self.fail("Expecting NoSuchFileException")
473
522
  except NoSuchFileException:
474
523
  pass
475
524
 
@@ -478,16 +527,17 @@ class AbstractJobStoreTest:
478
527
  jobstore1 = self.jobstore_initialized
479
528
  jobstore2 = self.jobstore_resumed_noconfig
480
529
 
481
- jobOnJobStore1 = JobDescription(requirements=self.parentJobReqs,
482
- jobName='test1', unitName='onJobStore1')
530
+ jobOnJobStore1 = JobDescription(
531
+ requirements=self.parentJobReqs, jobName="test1", unitName="onJobStore1"
532
+ )
483
533
 
484
534
  jobstore1.assign_job_id(jobOnJobStore1)
485
535
  jobstore1.create_job(jobOnJobStore1)
486
536
 
487
537
  # Test stats and logging
488
538
  stats = None
489
- one = b'one'
490
- two = b'two'
539
+ one = b"one"
540
+ two = b"two"
491
541
 
492
542
  # Allows stats to be read/written to/from in read/writeStatsAndLogging.
493
543
  def callback(f2):
@@ -504,7 +554,9 @@ class AbstractJobStoreTest:
504
554
  jobstore2.write_logs(one)
505
555
  self.assertEqual(1, jobstore1.read_logs(callback))
506
556
  self.assertEqual({one}, stats)
507
- self.assertEqual(0, jobstore1.read_logs(callback)) # read_logs purges saved stats etc
557
+ self.assertEqual(
558
+ 0, jobstore1.read_logs(callback)
559
+ ) # read_logs purges saved stats etc
508
560
 
509
561
  jobstore2.write_logs(one)
510
562
  jobstore2.write_logs(two)
@@ -528,19 +580,21 @@ class AbstractJobStoreTest:
528
580
 
529
581
  def testWriteLogFiles(self):
530
582
  """Test writing log files."""
531
- jobNames = ['testStatsAndLogging_writeLogFiles']
532
- jobLogList = ['string', b'bytes', '', b'newline\n']
583
+ jobNames = ["testStatsAndLogging_writeLogFiles"]
584
+ jobLogList = ["string", b"bytes", "", b"newline\n"]
533
585
  config = self._createConfig()
534
- setattr(config, 'writeLogs', self._createTempDir())
535
- setattr(config, 'writeLogsGzip', None)
586
+ setattr(config, "writeLogs", self._createTempDir())
587
+ setattr(config, "writeLogsGzip", None)
536
588
  StatsAndLogging.writeLogFiles(jobNames, jobLogList, config)
537
- jobLogFile = os.path.join(config.writeLogs, jobNames[0] + '_000.log')
589
+ jobLogFile = os.path.join(config.writeLogs, jobNames[0] + "_000.log")
538
590
  # The log directory should get exactly one file, names after this
539
591
  # easy job name with no replacements needed.
540
- self.assertEqual(os.listdir(config.writeLogs), [os.path.basename(jobLogFile)])
592
+ self.assertEqual(
593
+ os.listdir(config.writeLogs), [os.path.basename(jobLogFile)]
594
+ )
541
595
  self.assertTrue(os.path.isfile(jobLogFile))
542
596
  with open(jobLogFile) as f:
543
- self.assertEqual(f.read(), 'string\nbytes\n\nnewline\n')
597
+ self.assertEqual(f.read(), "string\nbytes\n\nnewline\n")
544
598
 
545
599
  def testBatchCreate(self):
546
600
  """Test creation of many jobs."""
@@ -549,8 +603,11 @@ class AbstractJobStoreTest:
549
603
  jobs = []
550
604
  with jobstore.batch():
551
605
  for i in range(100):
552
- overlargeJob = JobDescription(requirements=jobRequirements,
553
- jobName='test-overlarge', unitName='onJobStore')
606
+ overlargeJob = JobDescription(
607
+ requirements=jobRequirements,
608
+ jobName="test-overlarge",
609
+ unitName="onJobStore",
610
+ )
554
611
  jobstore.assign_job_id(overlargeJob)
555
612
  jobstore.create_job(overlargeJob)
556
613
  jobs.append(overlargeJob)
@@ -618,16 +675,16 @@ class AbstractJobStoreTest:
618
675
  try:
619
676
  store = self.externalStoreCache[self]
620
677
  except KeyError:
621
- logger.debug('Creating new external store for %s', self)
678
+ logger.debug("Creating new external store for %s", self)
622
679
  store = self.externalStoreCache[self] = self._createExternalStore()
623
680
  else:
624
- logger.debug('Reusing external store for %s', self)
681
+ logger.debug("Reusing external store for %s", self)
625
682
  return store
626
683
 
627
684
  @classmethod
628
685
  def cleanUpExternalStores(cls):
629
686
  for test, store in cls.externalStoreCache.items():
630
- logger.debug('Cleaning up external store for %s.', test)
687
+ logger.debug("Cleaning up external store for %s.", test)
631
688
  test._cleanUpExternalStore(store)
632
689
 
633
690
  mpTestPartSize = 5 << 20
@@ -637,9 +694,11 @@ class AbstractJobStoreTest:
637
694
 
638
695
  testClasses = [FileJobStoreTest, AWSJobStoreTest, GoogleJobStoreTest]
639
696
 
640
- activeTestClassesByName = {testCls.__name__: testCls
641
- for testCls in testClasses
642
- if not getattr(testCls, '__unittest_skip__', False)}
697
+ activeTestClassesByName = {
698
+ testCls.__name__: testCls
699
+ for testCls in testClasses
700
+ if not getattr(testCls, "__unittest_skip__", False)
701
+ }
643
702
 
644
703
  def testImportExportFile(self, otherCls, size, moveExports):
645
704
  """
@@ -659,7 +718,7 @@ class AbstractJobStoreTest:
659
718
 
660
719
  # The string in otherCls() is arbitrary as long as it returns a class that has access
661
720
  # to ._externalStore() and ._prepareTestFile()
662
- other = otherCls('testSharedFiles')
721
+ other = otherCls("testSharedFiles")
663
722
  store = other._externalStore()
664
723
 
665
724
  srcUrl, srcMd5 = other._prepareTestFile(store, size)
@@ -674,9 +733,11 @@ class AbstractJobStoreTest:
674
733
  self.jobstore_initialized.export_file(jobStoreFileID, dstUrl)
675
734
  self.assertEqual(fileMD5, other._hashTestFile(dstUrl))
676
735
 
677
- if otherCls.__name__ == 'FileJobStoreTest':
736
+ if otherCls.__name__ == "FileJobStoreTest":
678
737
  if isinstance(self.jobstore_initialized, FileJobStore):
679
- jobStorePath = self.jobstore_initialized._get_file_path_from_id(jobStoreFileID)
738
+ jobStorePath = self.jobstore_initialized._get_file_path_from_id(
739
+ jobStoreFileID
740
+ )
680
741
  jobStoreHasLink = os.path.islink(jobStorePath)
681
742
  if self.jobstore_initialized.moveExports:
682
743
  # Ensure the export performed a move / link
@@ -690,14 +751,20 @@ class AbstractJobStoreTest:
690
751
  os.remove(srcUrl[7:])
691
752
  os.remove(dstUrl[7:])
692
753
 
693
- make_tests(testImportExportFile, cls, otherCls=activeTestClassesByName,
694
- size=dict(zero=0,
695
- one=1,
696
- oneMiB=2 ** 20,
697
- partSizeMinusOne=cls.mpTestPartSize - 1,
698
- partSize=cls.mpTestPartSize,
699
- partSizePlusOne=cls.mpTestPartSize + 1),
700
- moveExports={'deactivated': None, 'activated': True})
754
+ make_tests(
755
+ testImportExportFile,
756
+ cls,
757
+ otherCls=activeTestClassesByName,
758
+ size=dict(
759
+ zero=0,
760
+ one=1,
761
+ oneMiB=2**20,
762
+ partSizeMinusOne=cls.mpTestPartSize - 1,
763
+ partSize=cls.mpTestPartSize,
764
+ partSizePlusOne=cls.mpTestPartSize + 1,
765
+ ),
766
+ moveExports={"deactivated": None, "activated": True},
767
+ )
701
768
 
702
769
  def testImportSharedFile(self, otherCls):
703
770
  """
@@ -708,33 +775,36 @@ class AbstractJobStoreTest:
708
775
  """
709
776
  # Prepare test file in other job store
710
777
  self.jobstore_initialized.part_size = cls.mpTestPartSize
711
- other = otherCls('testSharedFiles')
778
+ other = otherCls("testSharedFiles")
712
779
  store = other._externalStore()
713
780
 
714
781
  srcUrl, srcMd5 = other._prepareTestFile(store, 42)
715
782
  # Import into job store under test
716
- self.assertIsNone(self.jobstore_initialized.import_file(srcUrl, shared_file_name='foo'))
717
- with self.jobstore_initialized.read_shared_file_stream('foo') as f:
783
+ self.assertIsNone(
784
+ self.jobstore_initialized.import_file(
785
+ srcUrl, shared_file_name="foo"
786
+ )
787
+ )
788
+ with self.jobstore_initialized.read_shared_file_stream("foo") as f:
718
789
  fileMD5 = hashlib.md5(f.read()).hexdigest()
719
790
  self.assertEqual(fileMD5, srcMd5)
720
- if otherCls.__name__ == 'FileJobStoreTest': # Remove local Files
791
+ if otherCls.__name__ == "FileJobStoreTest": # Remove local Files
721
792
  os.remove(srcUrl[7:])
722
793
 
723
- make_tests(testImportSharedFile,
724
- cls,
725
- otherCls=activeTestClassesByName)
794
+ make_tests(testImportSharedFile, cls, otherCls=activeTestClassesByName)
726
795
 
727
796
  def testImportHttpFile(self):
728
- '''Test importing a file over HTTP.'''
729
- http = socketserver.TCPServer(('', 0), StubHttpRequestHandler)
797
+ """Test importing a file over HTTP."""
798
+ http = socketserver.TCPServer(("", 0), StubHttpRequestHandler)
730
799
  try:
731
800
  httpThread = threading.Thread(target=http.serve_forever)
732
801
  httpThread.start()
733
802
  try:
734
803
  assignedPort = http.server_address[1]
735
- url = 'http://localhost:%d' % assignedPort
804
+ url = "http://localhost:%d" % assignedPort
736
805
  with self.jobstore_initialized.read_file_stream(
737
- self.jobstore_initialized.import_file(url)) as readable:
806
+ self.jobstore_initialized.import_file(url)
807
+ ) as readable:
738
808
  f1 = readable.read()
739
809
  f2 = StubHttpRequestHandler.fileContents
740
810
  if isinstance(f1, bytes) and not isinstance(f2, bytes):
@@ -749,20 +819,25 @@ class AbstractJobStoreTest:
749
819
  http.server_close()
750
820
 
751
821
  def testImportFtpFile(self):
752
- '''Test importing a file over FTP'''
753
- ftpfile = {'name': 'foo', 'content': 'foo bar baz qux'}
822
+ """Test importing a file over FTP"""
823
+ ftpfile = {"name": "foo", "content": "foo bar baz qux"}
754
824
  ftp = FTPStubServer(0)
755
825
  ftp.run()
756
826
  try:
757
827
  ftp.add_file(**ftpfile)
758
828
  assignedPort = ftp.server.server_address[1]
759
- url = 'ftp://user1:passwd@localhost:%d/%s' % (assignedPort, ftpfile['name'])
760
- with self.jobstore_initialized.read_file_stream(self.jobstore_initialized.import_file(url)) as readable:
829
+ url = "ftp://user1:passwd@localhost:%d/%s" % (
830
+ assignedPort,
831
+ ftpfile["name"],
832
+ )
833
+ with self.jobstore_initialized.read_file_stream(
834
+ self.jobstore_initialized.import_file(url)
835
+ ) as readable:
761
836
  imported_content = readable.read()
762
837
  # python 2/3 string/bytestring compat
763
838
  if isinstance(imported_content, bytes):
764
- imported_content = imported_content.decode('utf-8')
765
- self.assertEqual(imported_content, ftpfile['content'])
839
+ imported_content = imported_content.decode("utf-8")
840
+ self.assertEqual(imported_content, ftpfile["content"])
766
841
  finally:
767
842
  ftp.stop()
768
843
 
@@ -778,12 +853,19 @@ class AbstractJobStoreTest:
778
853
  job = self.arbitraryJob()
779
854
  self.jobstore_initialized.assign_job_id(job)
780
855
  self.jobstore_initialized.create_job(job)
781
- fileIDs = [self.jobstore_initialized.get_empty_file_store_id(job.jobStoreID, cleanup=True) for _ in
782
- range(0, numFiles)]
856
+ fileIDs = [
857
+ self.jobstore_initialized.get_empty_file_store_id(
858
+ job.jobStoreID, cleanup=True
859
+ )
860
+ for _ in range(0, numFiles)
861
+ ]
783
862
  self.jobstore_initialized.delete_job(job.jobStoreID)
784
863
  for fileID in fileIDs:
785
864
  # NB: the fooStream() methods return context managers
786
- self.assertRaises(NoSuchFileException, self.jobstore_initialized.read_file_stream(fileID).__enter__)
865
+ self.assertRaises(
866
+ NoSuchFileException,
867
+ self.jobstore_initialized.read_file_stream(fileID).__enter__,
868
+ )
787
869
 
788
870
  @slow
789
871
  def testMultipartUploads(self):
@@ -818,9 +900,10 @@ class AbstractJobStoreTest:
818
900
  checksumThread.start()
819
901
  try:
820
902
  # Should not block. On Linux, /dev/random blocks when it's running low on entropy
821
- with open('/dev/urandom', 'rb') as readable:
822
- with self.jobstore_initialized.write_file_stream(job.jobStoreID, cleanup=True) as (
823
- writable, fileId):
903
+ with open("/dev/urandom", "rb") as readable:
904
+ with self.jobstore_initialized.write_file_stream(
905
+ job.jobStoreID, cleanup=True
906
+ ) as (writable, fileId):
824
907
  for i in range(int(partSize * partsPerFile / bufSize)):
825
908
  buf = readable.read(bufSize)
826
909
  checksumQueue.put(buf)
@@ -845,13 +928,15 @@ class AbstractJobStoreTest:
845
928
  checksum = hashlib.md5()
846
929
  fh, path = mkstemp()
847
930
  try:
848
- with os.fdopen(fh, 'wb+') as writable:
849
- with open('/dev/urandom', 'rb') as readable:
931
+ with os.fdopen(fh, "wb+") as writable:
932
+ with open("/dev/urandom", "rb") as readable:
850
933
  for i in range(int(partSize * partsPerFile / bufSize)):
851
934
  buf = readable.read(bufSize)
852
935
  writable.write(buf)
853
936
  checksum.update(buf)
854
- fileId = self.jobstore_initialized.write_file(path, job.jobStoreID, cleanup=True)
937
+ fileId = self.jobstore_initialized.write_file(
938
+ path, job.jobStoreID, cleanup=True
939
+ )
855
940
  finally:
856
941
  os.unlink(path)
857
942
  before = checksum.hexdigest()
@@ -869,14 +954,18 @@ class AbstractJobStoreTest:
869
954
  self.jobstore_initialized.delete_job(job.jobStoreID)
870
955
 
871
956
  def testZeroLengthFiles(self):
872
- '''Test reading and writing of empty files.'''
957
+ """Test reading and writing of empty files."""
873
958
  job = self.arbitraryJob()
874
959
  self.jobstore_initialized.assign_job_id(job)
875
960
  self.jobstore_initialized.create_job(job)
876
- nullFile = self.jobstore_initialized.write_file('/dev/null', job.jobStoreID, cleanup=True)
961
+ nullFile = self.jobstore_initialized.write_file(
962
+ "/dev/null", job.jobStoreID, cleanup=True
963
+ )
877
964
  with self.jobstore_initialized.read_file_stream(nullFile) as f:
878
965
  assert not f.read()
879
- with self.jobstore_initialized.write_file_stream(job.jobStoreID, cleanup=True) as (f, nullStream):
966
+ with self.jobstore_initialized.write_file_stream(
967
+ job.jobStoreID, cleanup=True
968
+ ) as (f, nullStream):
880
969
  pass
881
970
  with self.jobstore_initialized.read_file_stream(nullStream) as f:
882
971
  assert not f.read()
@@ -884,12 +973,12 @@ class AbstractJobStoreTest:
884
973
 
885
974
  @slow
886
975
  def testLargeFile(self):
887
- '''Test the reading and writing of large files.'''
976
+ """Test the reading and writing of large files."""
888
977
  # Write a large file.
889
978
  dirPath = self._createTempDir()
890
- filePath = os.path.join(dirPath, 'large')
979
+ filePath = os.path.join(dirPath, "large")
891
980
  hashIn = hashlib.md5()
892
- with open(filePath, 'wb') as f:
981
+ with open(filePath, "wb") as f:
893
982
  for i in range(0, 10):
894
983
  buf = os.urandom(self._partSize())
895
984
  f.write(buf)
@@ -899,7 +988,9 @@ class AbstractJobStoreTest:
899
988
  job = self.arbitraryJob()
900
989
  self.jobstore_initialized.assign_job_id(job)
901
990
  self.jobstore_initialized.create_job(job)
902
- jobStoreFileID = self.jobstore_initialized.write_file(filePath, job.jobStoreID, cleanup=True)
991
+ jobStoreFileID = self.jobstore_initialized.write_file(
992
+ filePath, job.jobStoreID, cleanup=True
993
+ )
903
994
 
904
995
  # Remove the local file.
905
996
  os.unlink(filePath)
@@ -909,7 +1000,7 @@ class AbstractJobStoreTest:
909
1000
 
910
1001
  # Reread the file to confirm success.
911
1002
  hashOut = hashlib.md5()
912
- with open(filePath, 'rb') as f:
1003
+ with open(filePath, "rb") as f:
913
1004
  while True:
914
1005
  buf = f.read(self._partSize())
915
1006
  if not buf:
@@ -927,8 +1018,8 @@ class AbstractJobStoreTest:
927
1018
 
928
1019
  def assertUrl(self, url):
929
1020
 
930
- prefix, path = url.split(':', 1)
931
- if prefix == 'file':
1021
+ prefix, path = url.split(":", 1)
1022
+ if prefix == "file":
932
1023
  self.assertTrue(os.path.exists(path))
933
1024
  else:
934
1025
  try:
@@ -966,8 +1057,7 @@ class AbstractJobStoreTest:
966
1057
  self.assertEqual(len(list(jobstore.jobs())), 101)
967
1058
 
968
1059
  # See how long it takes to clean with cache
969
- jobCache = {job.jobStoreID: job
970
- for job in jobstore.jobs()}
1060
+ jobCache = {job.jobStoreID: job for job in jobstore.jobs()}
971
1061
  cacheStart = time.time()
972
1062
  jobstore.clean(jobCache)
973
1063
  cacheEnd = time.time()
@@ -982,13 +1072,15 @@ class AbstractJobStoreTest:
982
1072
  # NB: the 'thread' method seems to be needed here to actually
983
1073
  # ensure the timeout is raised, probably because the only
984
1074
  # "live" thread doesn't hold the GIL.
985
- @pytest.mark.timeout(45, method='thread')
1075
+ @pytest.mark.timeout(45, method="thread")
986
1076
  def testPartialReadFromStream(self):
987
1077
  """Test whether readFileStream will deadlock on a partial read."""
988
1078
  job = self.arbitraryJob()
989
1079
  self.jobstore_initialized.assign_job_id(job)
990
1080
  self.jobstore_initialized.create_job(job)
991
- with self.jobstore_initialized.write_file_stream(job.jobStoreID, cleanup=True) as (f, fileID):
1081
+ with self.jobstore_initialized.write_file_stream(
1082
+ job.jobStoreID, cleanup=True
1083
+ ) as (f, fileID):
992
1084
  # Write enough data to make sure the writer thread
993
1085
  # will get blocked on the write. Technically anything
994
1086
  # greater than the pipe buffer size plus the libc
@@ -996,7 +1088,7 @@ class AbstractJobStoreTest:
996
1088
  # but this gives us a lot of extra room just to be sure.
997
1089
 
998
1090
  # python 3 requires self.fileContents to be a bytestring
999
- a = b'a'
1091
+ a = b"a"
1000
1092
  f.write(a * 300000)
1001
1093
  with self.jobstore_initialized.read_file_stream(fileID) as f:
1002
1094
  self.assertEqual(f.read(1), a)
@@ -1069,9 +1161,9 @@ class AbstractEncryptedJobStoreTest:
1069
1161
 
1070
1162
  def _createConfig(self):
1071
1163
  config = super()._createConfig()
1072
- sseKeyFile = os.path.join(self.sseKeyDir, 'keyFile')
1073
- with open(sseKeyFile, 'w') as f:
1074
- f.write('01234567890123456789012345678901')
1164
+ sseKeyFile = os.path.join(self.sseKeyDir, "keyFile")
1165
+ with open(sseKeyFile, "w") as f:
1166
+ f.write("01234567890123456789012345678901")
1075
1167
  config.sseKey = sseKeyFile
1076
1168
  # config.attrib['sse_key'] = sseKeyFile
1077
1169
  return config
@@ -1081,9 +1173,11 @@ class AbstractEncryptedJobStoreTest:
1081
1173
  Create an encrypted file. Read it in encrypted mode then try with encryption off
1082
1174
  to ensure that it fails.
1083
1175
  """
1084
- phrase = b'This file is encrypted.'
1085
- fileName = 'foo'
1086
- with self.jobstore_initialized.write_shared_file_stream(fileName, encrypted=True) as f:
1176
+ phrase = b"This file is encrypted."
1177
+ fileName = "foo"
1178
+ with self.jobstore_initialized.write_shared_file_stream(
1179
+ fileName, encrypted=True
1180
+ ) as f:
1087
1181
  f.write(phrase)
1088
1182
  with self.jobstore_initialized.read_shared_file_stream(fileName) as f:
1089
1183
  self.assertEqual(phrase, f.read())
@@ -1094,7 +1188,9 @@ class AbstractEncryptedJobStoreTest:
1094
1188
  with self.jobstore_initialized.read_shared_file_stream(fileName) as f:
1095
1189
  self.assertEqual(phrase, f.read())
1096
1190
  except AssertionError as e:
1097
- self.assertEqual("Content is encrypted but no key was provided.", e.args[0])
1191
+ self.assertEqual(
1192
+ "Content is encrypted but no key was provided.", e.args[0]
1193
+ )
1098
1194
  else:
1099
1195
  self.fail("Read encryption content with encryption off.")
1100
1196
 
@@ -1110,21 +1206,21 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1110
1206
  shutil.rmtree(self.jobstore_initialized.jobStoreDir)
1111
1207
 
1112
1208
  def _prepareTestFile(self, dirPath, size=None):
1113
- fileName = 'testfile_%s' % uuid.uuid4()
1209
+ fileName = "testfile_%s" % uuid.uuid4()
1114
1210
  localFilePath = dirPath + fileName
1115
- url = 'file://%s' % localFilePath
1211
+ url = "file://%s" % localFilePath
1116
1212
  if size is None:
1117
1213
  return url
1118
1214
  else:
1119
1215
  content = os.urandom(size)
1120
- with open(localFilePath, 'wb') as writable:
1216
+ with open(localFilePath, "wb") as writable:
1121
1217
  writable.write(content)
1122
1218
 
1123
1219
  return url, hashlib.md5(content).hexdigest()
1124
1220
 
1125
1221
  def _hashTestFile(self, url):
1126
1222
  localFilePath = FileJobStore._extract_path_from_url(urlparse.urlparse(url))
1127
- with open(localFilePath, 'rb') as f:
1223
+ with open(localFilePath, "rb") as f:
1128
1224
  return hashlib.md5(f.read()).hexdigest()
1129
1225
 
1130
1226
  def _createExternalStore(self):
@@ -1141,7 +1237,9 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1141
1237
  job = self.arbitraryJob()
1142
1238
  self.jobstore_initialized.assign_job_id(job)
1143
1239
  self.jobstore_initialized.create_job(job)
1144
- fileID = self.jobstore_initialized.write_file(path, job.jobStoreID, cleanup=True)
1240
+ fileID = self.jobstore_initialized.write_file(
1241
+ path, job.jobStoreID, cleanup=True
1242
+ )
1145
1243
  self.assertTrue(fileID.endswith(os.path.basename(path)))
1146
1244
  finally:
1147
1245
  os.unlink(path)
@@ -1152,12 +1250,19 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1152
1250
  original_filestore = None
1153
1251
  try:
1154
1252
  original_filestore = self._createExternalStore()
1155
- dir_symlinked_to_original_filestore = f'{original_filestore}-am-i-real'
1253
+ dir_symlinked_to_original_filestore = f"{original_filestore}-am-i-real"
1156
1254
  os.symlink(original_filestore, dir_symlinked_to_original_filestore)
1157
- filejobstore_using_symlink = FileJobStore(dir_symlinked_to_original_filestore, fanOut=2)
1158
- self.assertEqual(dir_symlinked_to_original_filestore, filejobstore_using_symlink.jobStoreDir)
1255
+ filejobstore_using_symlink = FileJobStore(
1256
+ dir_symlinked_to_original_filestore, fanOut=2
1257
+ )
1258
+ self.assertEqual(
1259
+ dir_symlinked_to_original_filestore,
1260
+ filejobstore_using_symlink.jobStoreDir,
1261
+ )
1159
1262
  finally:
1160
- if dir_symlinked_to_original_filestore and os.path.exists(dir_symlinked_to_original_filestore):
1263
+ if dir_symlinked_to_original_filestore and os.path.exists(
1264
+ dir_symlinked_to_original_filestore
1265
+ ):
1161
1266
  os.unlink(dir_symlinked_to_original_filestore)
1162
1267
  if original_filestore and os.path.exists(original_filestore):
1163
1268
  shutil.rmtree(original_filestore)
@@ -1171,21 +1276,23 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1171
1276
  try:
1172
1277
  # Grab a temp directory to make files in. Make sure it's on the
1173
1278
  # same device as everything else.
1174
- temp_dir = os.path.abspath(self.namePrefix + '-import')
1279
+ temp_dir = os.path.abspath(self.namePrefix + "-import")
1175
1280
  os.mkdir(temp_dir)
1176
- to_import = os.path.join(temp_dir, 'import-me')
1177
- with open(to_import, 'w') as f:
1178
- f.write('test')
1281
+ to_import = os.path.join(temp_dir, "import-me")
1282
+ with open(to_import, "w") as f:
1283
+ f.write("test")
1179
1284
 
1180
1285
  # And a temp directory next to the job store to download to
1181
- download_dir = os.path.abspath(self.namePrefix + '-dl')
1286
+ download_dir = os.path.abspath(self.namePrefix + "-dl")
1182
1287
  os.mkdir(download_dir)
1183
1288
 
1184
1289
  # Import it as a symlink
1185
- file_id = self.jobstore_initialized.import_file('file://' + to_import, symlink=True)
1290
+ file_id = self.jobstore_initialized.import_file(
1291
+ "file://" + to_import, symlink=True
1292
+ )
1186
1293
 
1187
1294
  # Take it out as a hard link or copy
1188
- download_to = os.path.join(download_dir, 'downloaded')
1295
+ download_to = os.path.join(download_dir, "downloaded")
1189
1296
  self.jobstore_initialized.read_file(file_id, download_to)
1190
1297
 
1191
1298
  # Make sure it isn't a symlink
@@ -1210,7 +1317,9 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1210
1317
  for link_imports in [True, False]:
1211
1318
  self.jobstore_initialized.linkImports = link_imports
1212
1319
  # Import into job store under test
1213
- jobStoreFileID = self.jobstore_initialized.import_file(srcUrl, symlink=symlink)
1320
+ jobStoreFileID = self.jobstore_initialized.import_file(
1321
+ srcUrl, symlink=symlink
1322
+ )
1214
1323
  self.assertTrue(isinstance(jobStoreFileID, FileID))
1215
1324
  with self.jobstore_initialized.read_file_stream(jobStoreFileID) as f:
1216
1325
  # gets abs path
@@ -1226,16 +1335,44 @@ class FileJobStoreTest(AbstractJobStoreTest.Test):
1226
1335
  os.remove(filename)
1227
1336
  os.remove(srcUrl[7:])
1228
1337
 
1338
+ def test_symlink_read_control(self):
1339
+ """
1340
+ Test that files are read by symlink when expected
1341
+ """
1342
+
1343
+ for should_link in (False, True):
1344
+ # Configure a jobstore to symlink out reads or not, as appropriate
1345
+ config = self._createConfig()
1346
+ config.symlink_job_store_reads = should_link
1347
+ store = FileJobStore(
1348
+ self.namePrefix + ("-link" if should_link else "-nolink")
1349
+ )
1350
+ store.initialize(config)
1351
+
1352
+ # Put something in the job store
1353
+ src_url, _ = self._prepareTestFile(self._externalStore(), 1)
1354
+ file_id = store.import_file(src_url, symlink=False)
1355
+
1356
+ # Read it out, accepting a symlink
1357
+ dest_dir = self._createTempDir()
1358
+ dest_path = os.path.join(dest_dir, "file.dat")
1359
+ store.read_file(file_id, dest_path, symlink=True)
1360
+
1361
+ # Make sure we get a symlink exactly when configured to
1362
+ assert os.path.exists(dest_path)
1363
+ assert os.path.islink(dest_path) == should_link
1364
+
1229
1365
 
1230
1366
  @needs_google_project
1231
1367
  @needs_google_storage
1232
1368
  @pytest.mark.xfail
1233
1369
  class GoogleJobStoreTest(AbstractJobStoreTest.Test):
1234
- projectID = os.getenv('TOIL_GOOGLE_PROJECTID')
1370
+ projectID = os.getenv("TOIL_GOOGLE_PROJECTID")
1235
1371
  headers = {"x-goog-project-id": projectID}
1236
1372
 
1237
1373
  def _createJobStore(self):
1238
1374
  from toil.jobStores.googleJobStore import GoogleJobStore
1375
+
1239
1376
  return GoogleJobStore(GoogleJobStoreTest.projectID + ":" + self.namePrefix)
1240
1377
 
1241
1378
  def _corruptJobStore(self):
@@ -1245,24 +1382,31 @@ class GoogleJobStoreTest(AbstractJobStoreTest.Test):
1245
1382
 
1246
1383
  def _prepareTestFile(self, bucket, size=None):
1247
1384
  from toil.jobStores.googleJobStore import GoogleJobStore
1248
- fileName = 'testfile_%s' % uuid.uuid4()
1249
- url = f'gs://{bucket.name}/{fileName}'
1385
+
1386
+ fileName = "testfile_%s" % uuid.uuid4()
1387
+ url = f"gs://{bucket.name}/{fileName}"
1250
1388
  if size is None:
1251
1389
  return url
1252
- with open('/dev/urandom', 'rb') as readable:
1390
+ with open("/dev/urandom", "rb") as readable:
1253
1391
  contents = str(readable.read(size))
1254
- GoogleJobStore._write_to_url(BytesIO(bytes(contents, 'utf-8')), urlparse.urlparse(url))
1392
+ GoogleJobStore._write_to_url(
1393
+ BytesIO(bytes(contents, "utf-8")), urlparse.urlparse(url)
1394
+ )
1255
1395
  return url, hashlib.md5(contents.encode()).hexdigest()
1256
1396
 
1257
1397
  def _hashTestFile(self, url):
1258
1398
  from toil.jobStores.googleJobStore import GoogleJobStore
1259
- contents = GoogleJobStore._get_blob_from_url(urlparse.urlparse(url)).download_as_string()
1399
+
1400
+ contents = GoogleJobStore._get_blob_from_url(
1401
+ urlparse.urlparse(url)
1402
+ ).download_as_string()
1260
1403
  return hashlib.md5(contents).hexdigest()
1261
1404
 
1262
1405
  @google_retry
1263
1406
  def _createExternalStore(self):
1264
1407
  from google.cloud import storage
1265
- bucketName = ("import-export-test-" + str(uuid.uuid4()))
1408
+
1409
+ bucketName = "import-export-test-" + str(uuid.uuid4())
1266
1410
  storageClient = storage.Client()
1267
1411
  return storageClient.create_bucket(bucketName)
1268
1412
 
@@ -1281,11 +1425,13 @@ class GoogleJobStoreTest(AbstractJobStoreTest.Test):
1281
1425
  class AWSJobStoreTest(AbstractJobStoreTest.Test):
1282
1426
  def _createJobStore(self):
1283
1427
  from toil.jobStores.aws.jobStore import AWSJobStore
1428
+
1284
1429
  partSize = self._partSize()
1285
- return AWSJobStore(self.awsRegion() + ':' + self.namePrefix, partSize=partSize)
1430
+ return AWSJobStore(self.awsRegion() + ":" + self.namePrefix, partSize=partSize)
1286
1431
 
1287
1432
  def _corruptJobStore(self):
1288
1433
  from toil.jobStores.aws.jobStore import AWSJobStore
1434
+
1289
1435
  assert isinstance(self.jobstore_initialized, AWSJobStore) # type hinting
1290
1436
  self.jobstore_initialized.destroy()
1291
1437
 
@@ -1301,47 +1447,63 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1301
1447
  from toil.lib.aws.session import establish_boto3_session
1302
1448
  from toil.lib.aws.utils import retry_s3
1303
1449
 
1304
- externalAWSLocation = 'us-west-1'
1305
- for testRegion in 'us-east-1', 'us-west-2':
1450
+ externalAWSLocation = "us-west-1"
1451
+ for testRegion in "us-east-1", "us-west-2":
1306
1452
  # We run this test twice, once with the default s3 server us-east-1 as the test region
1307
1453
  # and once with another server (us-west-2). The external server is always us-west-1.
1308
1454
  # This incidentally tests that the BucketLocationConflictException is thrown when using
1309
1455
  # both the default, and a non-default server.
1310
1456
  testJobStoreUUID = str(uuid.uuid4())
1311
1457
  # Create the bucket at the external region
1312
- bucketName = 'domain-test-' + testJobStoreUUID + '--files'
1313
- client = establish_boto3_session().client('s3', region_name=externalAWSLocation)
1314
- resource = establish_boto3_session().resource('s3', region_name=externalAWSLocation)
1458
+ bucketName = "domain-test-" + testJobStoreUUID + "--files"
1459
+ client = establish_boto3_session().client(
1460
+ "s3", region_name=externalAWSLocation
1461
+ )
1462
+ resource = establish_boto3_session().resource(
1463
+ "s3", region_name=externalAWSLocation
1464
+ )
1315
1465
 
1316
1466
  for attempt in retry_s3(delays=(2, 5, 10, 30, 60), timeout=600):
1317
1467
  with attempt:
1318
1468
  # Create the bucket at the home region
1319
- client.create_bucket(Bucket=bucketName,
1320
- CreateBucketConfiguration={'LocationConstraint': externalAWSLocation})
1321
-
1322
- owner_tag = os.environ.get('TOIL_OWNER_TAG')
1469
+ client.create_bucket(
1470
+ Bucket=bucketName,
1471
+ CreateBucketConfiguration={
1472
+ "LocationConstraint": externalAWSLocation
1473
+ },
1474
+ )
1475
+
1476
+ owner_tag = os.environ.get("TOIL_OWNER_TAG")
1323
1477
  if owner_tag:
1324
1478
  for attempt in retry_s3(delays=(1, 1, 2, 4, 8, 16), timeout=33):
1325
1479
  with attempt:
1326
1480
  bucket_tagging = resource.BucketTagging(bucketName)
1327
- bucket_tagging.put(Tagging={'TagSet': [{'Key': 'Owner', 'Value': owner_tag}]})
1328
-
1329
- options = Job.Runner.getDefaultOptions('aws:' + testRegion + ':domain-test-' + testJobStoreUUID)
1330
- options.logLevel = 'DEBUG'
1481
+ bucket_tagging.put(
1482
+ Tagging={"TagSet": [{"Key": "Owner", "Value": owner_tag}]}
1483
+ )
1484
+
1485
+ options = Job.Runner.getDefaultOptions(
1486
+ "aws:" + testRegion + ":domain-test-" + testJobStoreUUID
1487
+ )
1488
+ options.logLevel = "DEBUG"
1331
1489
  try:
1332
1490
  with Toil(options) as toil:
1333
1491
  pass
1334
1492
  except BucketLocationConflictException:
1335
1493
  # Catch the expected BucketLocationConflictException and ensure that the bound
1336
1494
  # domains don't exist in SDB.
1337
- sdb = establish_boto3_session().client(region_name=self.awsRegion(), service_name="sdb")
1495
+ sdb = establish_boto3_session().client(
1496
+ region_name=self.awsRegion(), service_name="sdb"
1497
+ )
1338
1498
  next_token = None
1339
1499
  allDomainNames = []
1340
1500
  while True:
1341
1501
  if next_token is None:
1342
1502
  domains = sdb.list_domains(MaxNumberOfDomains=100)
1343
1503
  else:
1344
- domains = sdb.list_domains(MaxNumberOfDomains=100, NextToken=next_token)
1504
+ domains = sdb.list_domains(
1505
+ MaxNumberOfDomains=100, NextToken=next_token
1506
+ )
1345
1507
  allDomainNames.extend(domains["DomainNames"])
1346
1508
  next_token = domains.get("NextToken")
1347
1509
  if next_token is None:
@@ -1356,7 +1518,10 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1356
1518
  client.delete_bucket(Bucket=bucketName)
1357
1519
  except ClientError as e:
1358
1520
  # The actual HTTP code of the error is in status.
1359
- if e.response.get('ResponseMetadata', {}).get('HTTPStatusCode') == 404:
1521
+ if (
1522
+ e.response.get("ResponseMetadata", {}).get("HTTPStatusCode")
1523
+ == 404
1524
+ ):
1360
1525
  # The bucket doesn't exist; maybe a failed delete actually succeeded.
1361
1526
  pass
1362
1527
  else:
@@ -1365,25 +1530,29 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1365
1530
  @slow
1366
1531
  def testInlinedFiles(self):
1367
1532
  from toil.jobStores.aws.jobStore import AWSJobStore
1533
+
1368
1534
  jobstore = self.jobstore_initialized
1369
1535
  for encrypted in (True, False):
1370
1536
  n = AWSJobStore.FileInfo.maxInlinedSize()
1371
1537
  sizes = (1, n // 2, n - 1, n, n + 1, 2 * n)
1372
1538
  for size in chain(sizes, islice(reversed(sizes), 1)):
1373
1539
  s = os.urandom(size)
1374
- with jobstore.write_shared_file_stream('foo') as f:
1540
+ with jobstore.write_shared_file_stream("foo") as f:
1375
1541
  f.write(s)
1376
- with jobstore.read_shared_file_stream('foo') as f:
1542
+ with jobstore.read_shared_file_stream("foo") as f:
1377
1543
  self.assertEqual(s, f.read())
1378
1544
 
1379
1545
  def testOverlargeJob(self):
1380
1546
  jobstore = self.jobstore_initialized
1381
1547
  jobRequirements = dict(memory=12, cores=34, disk=35, preemptible=True)
1382
- overlargeJob = JobDescription(requirements=jobRequirements,
1383
- jobName='test-overlarge', unitName='onJobStore')
1548
+ overlargeJob = JobDescription(
1549
+ requirements=jobRequirements,
1550
+ jobName="test-overlarge",
1551
+ unitName="onJobStore",
1552
+ )
1384
1553
 
1385
1554
  # Make the pickled size of the job larger than 256K
1386
- with open("/dev/urandom", 'rb') as random:
1555
+ with open("/dev/urandom", "rb") as random:
1387
1556
  overlargeJob.jobName = str(random.read(512 * 1024))
1388
1557
  jobstore.assign_job_id(overlargeJob)
1389
1558
  jobstore.create_job(overlargeJob)
@@ -1395,33 +1564,39 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1395
1564
  jobstore.delete_job(overlargeJob.jobStoreID)
1396
1565
 
1397
1566
  def testMultiThreadImportFile(self) -> None:
1398
- """ Tests that importFile is thread-safe."""
1567
+ """Tests that importFile is thread-safe."""
1399
1568
 
1400
1569
  from concurrent.futures.thread import ThreadPoolExecutor
1401
1570
 
1402
1571
  from toil.lib.threading import cpu_count
1403
1572
 
1404
- threads: Tuple[int, ...] = (2, cpu_count()) if cpu_count() > 2 else (2, )
1573
+ threads: tuple[int, ...] = (2, cpu_count()) if cpu_count() > 2 else (2,)
1405
1574
  num_of_files: int = 5
1406
1575
  size: int = 1 << 16 + 1
1407
1576
 
1408
1577
  # The string in otherCls() is arbitrary as long as it returns a class that has access
1409
1578
  # to ._externalStore() and ._prepareTestFile()
1410
- other: AbstractJobStoreTest.Test = AWSJobStoreTest('testSharedFiles')
1579
+ other: AbstractJobStoreTest.Test = AWSJobStoreTest("testSharedFiles")
1411
1580
  store: Any = other._externalStore()
1412
1581
 
1413
1582
  # prepare test files to import
1414
- logger.debug(f'Preparing {num_of_files} test files for testMultiThreadImportFile().')
1583
+ logger.debug(
1584
+ f"Preparing {num_of_files} test files for testMultiThreadImportFile()."
1585
+ )
1415
1586
  test_files = [other._prepareTestFile(store, size) for _ in range(num_of_files)]
1416
1587
 
1417
1588
  for thread_count in threads:
1418
- with self.subTest(f'Testing threaded importFile with "{thread_count}" threads.'):
1589
+ with self.subTest(
1590
+ f'Testing threaded importFile with "{thread_count}" threads.'
1591
+ ):
1419
1592
  results = []
1420
1593
 
1421
1594
  with ThreadPoolExecutor(max_workers=thread_count) as executor:
1422
1595
  for url, expected_md5 in test_files:
1423
1596
  # run jobStore.importFile() asynchronously
1424
- future = executor.submit(self.jobstore_initialized.import_file, url)
1597
+ future = executor.submit(
1598
+ self.jobstore_initialized.import_file, url
1599
+ )
1425
1600
  results.append((future, expected_md5))
1426
1601
 
1427
1602
  self.assertEqual(len(results), num_of_files)
@@ -1431,33 +1606,39 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1431
1606
  self.assertIsInstance(file_id, FileID)
1432
1607
 
1433
1608
  with self.jobstore_initialized.read_file_stream(file_id) as f:
1434
- self.assertEqual(hashlib.md5(f.read()).hexdigest(), expected_md5)
1609
+ self.assertEqual(
1610
+ hashlib.md5(f.read()).hexdigest(), expected_md5
1611
+ )
1435
1612
 
1436
1613
  def _prepareTestFile(self, bucket, size=None):
1437
1614
  from toil.lib.aws.utils import retry_s3
1438
1615
 
1439
- file_name = 'testfile_%s' % uuid.uuid4()
1440
- url = f's3://{bucket.name}/{file_name}'
1616
+ file_name = "testfile_%s" % uuid.uuid4()
1617
+ url = f"s3://{bucket.name}/{file_name}"
1441
1618
  if size is None:
1442
1619
  return url
1443
- with open('/dev/urandom', 'rb') as readable:
1620
+ with open("/dev/urandom", "rb") as readable:
1444
1621
  for attempt in retry_s3():
1445
1622
  with attempt:
1446
1623
  bucket.put_object(Key=file_name, Body=str(readable.read(size)))
1447
- return url, hashlib.md5(bucket.Object(file_name).get().get('Body').read()).hexdigest()
1624
+ return (
1625
+ url,
1626
+ hashlib.md5(bucket.Object(file_name).get().get("Body").read()).hexdigest(),
1627
+ )
1448
1628
 
1449
1629
  def _hashTestFile(self, url: str) -> str:
1450
1630
  from toil.jobStores.aws.jobStore import AWSJobStore
1451
1631
  from toil.lib.aws.utils import get_object_for_url
1632
+
1452
1633
  str(AWSJobStore) # to prevent removal of that import
1453
1634
  key = get_object_for_url(urlparse.urlparse(url), existing=True)
1454
- contents = key.get().get('Body').read()
1635
+ contents = key.get().get("Body").read()
1455
1636
  return hashlib.md5(contents).hexdigest()
1456
1637
 
1457
1638
  def _createExternalStore(self):
1458
1639
  """A S3.Bucket instance is returned"""
1459
1640
  from toil.jobStores.aws.jobStore import establish_boto3_session
1460
- from toil.lib.aws.utils import retry_s3, create_s3_bucket
1641
+ from toil.lib.aws.utils import create_s3_bucket, retry_s3
1461
1642
 
1462
1643
  resource = establish_boto3_session().resource(
1463
1644
  "s3", region_name=self.awsRegion()
@@ -1488,6 +1669,7 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1488
1669
 
1489
1670
  def _batchDeletionSize(self):
1490
1671
  from toil.jobStores.aws.jobStore import AWSJobStore
1672
+
1491
1673
  return AWSJobStore.itemsPerBatchDelete
1492
1674
 
1493
1675
 
@@ -1495,15 +1677,10 @@ class AWSJobStoreTest(AbstractJobStoreTest.Test):
1495
1677
  class InvalidAWSJobStoreTest(ToilTest):
1496
1678
  def testInvalidJobStoreName(self):
1497
1679
  from toil.jobStores.aws.jobStore import AWSJobStore
1498
- self.assertRaises(ValueError,
1499
- AWSJobStore,
1500
- 'us-west-2:a--b')
1501
- self.assertRaises(ValueError,
1502
- AWSJobStore,
1503
- 'us-west-2:' + ('a' * 100))
1504
- self.assertRaises(ValueError,
1505
- AWSJobStore,
1506
- 'us-west-2:a_b')
1680
+
1681
+ self.assertRaises(ValueError, AWSJobStore, "us-west-2:a--b")
1682
+ self.assertRaises(ValueError, AWSJobStore, "us-west-2:" + ("a" * 100))
1683
+ self.assertRaises(ValueError, AWSJobStore, "us-west-2:a_b")
1507
1684
 
1508
1685
 
1509
1686
  @needs_aws_s3
@@ -1514,14 +1691,14 @@ class EncryptedAWSJobStoreTest(AWSJobStoreTest, AbstractEncryptedJobStoreTest.Te
1514
1691
 
1515
1692
 
1516
1693
  class StubHttpRequestHandler(http.server.SimpleHTTPRequestHandler):
1517
- fileContents = 'A good programmer looks both ways before crossing a one-way street'
1694
+ fileContents = "A good programmer looks both ways before crossing a one-way street"
1518
1695
 
1519
1696
  def do_GET(self):
1520
1697
  self.send_response(200)
1521
1698
  self.send_header("Content-type", "text/plain")
1522
1699
  self.send_header("Content-length", len(self.fileContents))
1523
1700
  self.end_headers()
1524
- self.fileContents = self.fileContents.encode('utf-8')
1701
+ self.fileContents = self.fileContents.encode("utf-8")
1525
1702
  self.wfile.write(self.fileContents)
1526
1703
 
1527
1704