toil 8.1.0b1__py3-none-any.whl → 9.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (275) hide show
  1. toil/__init__.py +0 -35
  2. toil/batchSystems/abstractBatchSystem.py +1 -1
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
  4. toil/batchSystems/awsBatch.py +1 -1
  5. toil/batchSystems/cleanup_support.py +1 -1
  6. toil/batchSystems/kubernetes.py +53 -7
  7. toil/batchSystems/local_support.py +1 -1
  8. toil/batchSystems/mesos/batchSystem.py +13 -8
  9. toil/batchSystems/mesos/test/__init__.py +3 -2
  10. toil/batchSystems/registry.py +15 -118
  11. toil/batchSystems/singleMachine.py +1 -1
  12. toil/batchSystems/slurm.py +27 -26
  13. toil/bus.py +5 -3
  14. toil/common.py +59 -12
  15. toil/cwl/cwltoil.py +81 -38
  16. toil/cwl/utils.py +103 -3
  17. toil/job.py +64 -49
  18. toil/jobStores/abstractJobStore.py +35 -239
  19. toil/jobStores/aws/jobStore.py +2 -1
  20. toil/jobStores/fileJobStore.py +27 -2
  21. toil/jobStores/googleJobStore.py +110 -33
  22. toil/leader.py +9 -0
  23. toil/lib/accelerators.py +4 -2
  24. toil/lib/aws/utils.py.orig +504 -0
  25. toil/lib/bioio.py +1 -1
  26. toil/lib/docker.py +252 -91
  27. toil/lib/dockstore.py +11 -3
  28. toil/lib/exceptions.py +5 -3
  29. toil/lib/generatedEC2Lists.py +81 -19
  30. toil/lib/history.py +87 -13
  31. toil/lib/history_submission.py +23 -9
  32. toil/lib/io.py +34 -22
  33. toil/lib/misc.py +8 -2
  34. toil/lib/plugins.py +106 -0
  35. toil/lib/resources.py +2 -1
  36. toil/lib/threading.py +11 -10
  37. toil/lib/url.py +320 -0
  38. toil/options/common.py +8 -0
  39. toil/options/cwl.py +13 -1
  40. toil/options/runner.py +17 -10
  41. toil/options/wdl.py +22 -0
  42. toil/provisioners/aws/awsProvisioner.py +25 -2
  43. toil/server/api_spec/LICENSE +201 -0
  44. toil/server/api_spec/README.rst +5 -0
  45. toil/server/app.py +12 -6
  46. toil/server/cli/wes_cwl_runner.py +3 -2
  47. toil/server/wes/abstract_backend.py +21 -43
  48. toil/server/wes/toil_backend.py +2 -2
  49. toil/test/__init__.py +275 -115
  50. toil/test/batchSystems/batchSystemTest.py +228 -213
  51. toil/test/batchSystems/batch_system_plugin_test.py +7 -0
  52. toil/test/batchSystems/test_slurm.py +27 -0
  53. toil/test/cactus/pestis.tar.gz +0 -0
  54. toil/test/conftest.py +7 -0
  55. toil/test/cwl/2.fasta +11 -0
  56. toil/test/cwl/2.fastq +12 -0
  57. toil/test/cwl/conftest.py +1 -1
  58. toil/test/cwl/cwlTest.py +1175 -870
  59. toil/test/cwl/directory/directory/file.txt +15 -0
  60. toil/test/cwl/download_directory_file.json +4 -0
  61. toil/test/cwl/download_directory_s3.json +4 -0
  62. toil/test/cwl/download_file.json +6 -0
  63. toil/test/cwl/download_http.json +6 -0
  64. toil/test/cwl/download_https.json +6 -0
  65. toil/test/cwl/download_s3.json +6 -0
  66. toil/test/cwl/download_subdirectory_file.json +5 -0
  67. toil/test/cwl/download_subdirectory_s3.json +5 -0
  68. toil/test/cwl/empty.json +1 -0
  69. toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
  70. toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
  71. toil/test/cwl/optional-file-exists.json +6 -0
  72. toil/test/cwl/optional-file-missing.json +6 -0
  73. toil/test/cwl/preemptible_expression.json +1 -0
  74. toil/test/cwl/revsort-job-missing.json +6 -0
  75. toil/test/cwl/revsort-job.json +6 -0
  76. toil/test/cwl/s3_secondary_file.json +16 -0
  77. toil/test/cwl/seqtk_seq_job.json +6 -0
  78. toil/test/cwl/stream.json +6 -0
  79. toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
  80. toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
  81. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
  82. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
  83. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
  84. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
  85. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
  86. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
  87. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
  88. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
  89. toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
  90. toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
  91. toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
  92. toil/test/cwl/whale.txt +16 -0
  93. toil/test/docs/scripts/example_alwaysfail.py +38 -0
  94. toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
  95. toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
  96. toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
  97. toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
  98. toil/test/docs/scripts/tutorial_arguments.py +23 -0
  99. toil/test/docs/scripts/tutorial_debugging.patch +12 -0
  100. toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
  101. toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
  102. toil/test/docs/scripts/tutorial_docker.py +20 -0
  103. toil/test/docs/scripts/tutorial_dynamic.py +24 -0
  104. toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
  105. toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
  106. toil/test/docs/scripts/tutorial_helloworld.py +15 -0
  107. toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
  108. toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
  109. toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
  110. toil/test/docs/scripts/tutorial_managing.py +29 -0
  111. toil/test/docs/scripts/tutorial_managing2.py +56 -0
  112. toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
  113. toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
  114. toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
  115. toil/test/docs/scripts/tutorial_promises.py +25 -0
  116. toil/test/docs/scripts/tutorial_promises2.py +30 -0
  117. toil/test/docs/scripts/tutorial_quickstart.py +22 -0
  118. toil/test/docs/scripts/tutorial_requirements.py +44 -0
  119. toil/test/docs/scripts/tutorial_services.py +45 -0
  120. toil/test/docs/scripts/tutorial_staging.py +45 -0
  121. toil/test/docs/scripts/tutorial_stats.py +64 -0
  122. toil/test/docs/scriptsTest.py +2 -1
  123. toil/test/lib/aws/test_iam.py +3 -1
  124. toil/test/lib/dockerTest.py +205 -122
  125. toil/test/lib/test_history.py +101 -77
  126. toil/test/lib/test_url.py +69 -0
  127. toil/test/lib/url_plugin_test.py +105 -0
  128. toil/test/provisioners/aws/awsProvisionerTest.py +13 -10
  129. toil/test/provisioners/clusterTest.py +17 -4
  130. toil/test/provisioners/gceProvisionerTest.py +17 -15
  131. toil/test/server/serverTest.py +78 -36
  132. toil/test/sort/sort.py +4 -1
  133. toil/test/src/busTest.py +17 -17
  134. toil/test/src/deferredFunctionTest.py +145 -132
  135. toil/test/src/importExportFileTest.py +71 -63
  136. toil/test/src/jobEncapsulationTest.py +27 -28
  137. toil/test/src/jobServiceTest.py +149 -133
  138. toil/test/src/jobTest.py +219 -211
  139. toil/test/src/miscTests.py +66 -60
  140. toil/test/src/promisedRequirementTest.py +163 -169
  141. toil/test/src/regularLogTest.py +24 -24
  142. toil/test/src/resourceTest.py +82 -76
  143. toil/test/src/restartDAGTest.py +51 -47
  144. toil/test/src/resumabilityTest.py +24 -19
  145. toil/test/src/retainTempDirTest.py +60 -57
  146. toil/test/src/systemTest.py +17 -13
  147. toil/test/src/threadingTest.py +29 -32
  148. toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
  149. toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
  150. toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
  151. toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
  152. toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
  153. toil/test/utils/toilDebugTest.py +117 -102
  154. toil/test/utils/toilKillTest.py +54 -53
  155. toil/test/utils/utilsTest.py +303 -229
  156. toil/test/wdl/lint_error.wdl +9 -0
  157. toil/test/wdl/md5sum/empty_file.json +1 -0
  158. toil/test/wdl/md5sum/md5sum-gs.json +1 -0
  159. toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
  160. toil/test/wdl/md5sum/md5sum.input +1 -0
  161. toil/test/wdl/md5sum/md5sum.json +1 -0
  162. toil/test/wdl/md5sum/md5sum.wdl +25 -0
  163. toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
  164. toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
  165. toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
  166. toil/test/wdl/standard_library/as_map.json +16 -0
  167. toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
  168. toil/test/wdl/standard_library/as_pairs.json +7 -0
  169. toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
  170. toil/test/wdl/standard_library/ceil.json +3 -0
  171. toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
  172. toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
  173. toil/test/wdl/standard_library/collect_by_key.json +1 -0
  174. toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
  175. toil/test/wdl/standard_library/cross.json +11 -0
  176. toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
  177. toil/test/wdl/standard_library/flatten.json +7 -0
  178. toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
  179. toil/test/wdl/standard_library/floor.json +3 -0
  180. toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
  181. toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
  182. toil/test/wdl/standard_library/keys.json +8 -0
  183. toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
  184. toil/test/wdl/standard_library/length.json +7 -0
  185. toil/test/wdl/standard_library/length_as_input.wdl +16 -0
  186. toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
  187. toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
  188. toil/test/wdl/standard_library/length_invalid.json +3 -0
  189. toil/test/wdl/standard_library/range.json +3 -0
  190. toil/test/wdl/standard_library/range_0.json +3 -0
  191. toil/test/wdl/standard_library/range_as_input.wdl +17 -0
  192. toil/test/wdl/standard_library/range_invalid.json +3 -0
  193. toil/test/wdl/standard_library/read_boolean.json +3 -0
  194. toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
  195. toil/test/wdl/standard_library/read_float.json +3 -0
  196. toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
  197. toil/test/wdl/standard_library/read_int.json +3 -0
  198. toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
  199. toil/test/wdl/standard_library/read_json.json +3 -0
  200. toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
  201. toil/test/wdl/standard_library/read_lines.json +3 -0
  202. toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
  203. toil/test/wdl/standard_library/read_map.json +3 -0
  204. toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
  205. toil/test/wdl/standard_library/read_string.json +3 -0
  206. toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
  207. toil/test/wdl/standard_library/read_tsv.json +3 -0
  208. toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
  209. toil/test/wdl/standard_library/round.json +3 -0
  210. toil/test/wdl/standard_library/round_as_command.wdl +16 -0
  211. toil/test/wdl/standard_library/round_as_input.wdl +16 -0
  212. toil/test/wdl/standard_library/size.json +3 -0
  213. toil/test/wdl/standard_library/size_as_command.wdl +17 -0
  214. toil/test/wdl/standard_library/size_as_output.wdl +36 -0
  215. toil/test/wdl/standard_library/stderr.json +3 -0
  216. toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
  217. toil/test/wdl/standard_library/stdout.json +3 -0
  218. toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
  219. toil/test/wdl/standard_library/sub.json +3 -0
  220. toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
  221. toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
  222. toil/test/wdl/standard_library/transpose.json +6 -0
  223. toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
  224. toil/test/wdl/standard_library/write_json.json +6 -0
  225. toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
  226. toil/test/wdl/standard_library/write_lines.json +7 -0
  227. toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
  228. toil/test/wdl/standard_library/write_map.json +6 -0
  229. toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
  230. toil/test/wdl/standard_library/write_tsv.json +6 -0
  231. toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
  232. toil/test/wdl/standard_library/zip.json +12 -0
  233. toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
  234. toil/test/wdl/test.csv +3 -0
  235. toil/test/wdl/test.tsv +3 -0
  236. toil/test/wdl/testfiles/croo.wdl +38 -0
  237. toil/test/wdl/testfiles/drop_files.wdl +62 -0
  238. toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
  239. toil/test/wdl/testfiles/empty.txt +0 -0
  240. toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
  241. toil/test/wdl/testfiles/random.wdl +66 -0
  242. toil/test/wdl/testfiles/read_file.wdl +18 -0
  243. toil/test/wdl/testfiles/string_file_coercion.json +1 -0
  244. toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
  245. toil/test/wdl/testfiles/test.json +4 -0
  246. toil/test/wdl/testfiles/test_boolean.txt +1 -0
  247. toil/test/wdl/testfiles/test_float.txt +1 -0
  248. toil/test/wdl/testfiles/test_int.txt +1 -0
  249. toil/test/wdl/testfiles/test_lines.txt +5 -0
  250. toil/test/wdl/testfiles/test_map.txt +2 -0
  251. toil/test/wdl/testfiles/test_string.txt +1 -0
  252. toil/test/wdl/testfiles/url_to_file.wdl +13 -0
  253. toil/test/wdl/testfiles/url_to_optional_file.wdl +14 -0
  254. toil/test/wdl/testfiles/vocab.json +1 -0
  255. toil/test/wdl/testfiles/vocab.wdl +66 -0
  256. toil/test/wdl/testfiles/wait.wdl +34 -0
  257. toil/test/wdl/wdl_specification/type_pair.json +23 -0
  258. toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
  259. toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
  260. toil/test/wdl/wdl_specification/v1_spec.json +1 -0
  261. toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
  262. toil/test/wdl/wdltoil_test.py +751 -529
  263. toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
  264. toil/utils/toilSshCluster.py +23 -0
  265. toil/utils/toilUpdateEC2Instances.py +1 -0
  266. toil/version.py +5 -5
  267. toil/wdl/wdltoil.py +518 -437
  268. toil/worker.py +11 -6
  269. {toil-8.1.0b1.dist-info → toil-9.0.0.dist-info}/METADATA +25 -24
  270. toil-9.0.0.dist-info/RECORD +444 -0
  271. {toil-8.1.0b1.dist-info → toil-9.0.0.dist-info}/WHEEL +1 -1
  272. toil-8.1.0b1.dist-info/RECORD +0 -259
  273. {toil-8.1.0b1.dist-info → toil-9.0.0.dist-info}/entry_points.txt +0 -0
  274. {toil-8.1.0b1.dist-info → toil-9.0.0.dist-info/licenses}/LICENSE +0 -0
  275. {toil-8.1.0b1.dist-info → toil-9.0.0.dist-info}/top_level.txt +0 -0
toil/__init__.py CHANGED
@@ -480,38 +480,3 @@ def logProcessContext(config: "Config") -> None:
480
480
  log.info("Running Toil version %s on host %s.", version, socket.gethostname())
481
481
  log.debug("Configuration: %s", config.__dict__)
482
482
 
483
-
484
- try:
485
- cache_path = "~/.cache/aws/cached_temporary_credentials"
486
- datetime_format = (
487
- "%Y-%m-%dT%H:%M:%SZ" # incidentally the same as the format used by AWS
488
- )
489
- log = logging.getLogger(__name__)
490
-
491
- # But in addition to our manual cache, we also are going to turn on boto3's
492
- # new built-in caching layer.
493
-
494
- def datetime_to_str(dt):
495
- """
496
- Convert a naive (implicitly UTC) datetime object into a string, explicitly UTC.
497
-
498
- >>> datetime_to_str(datetime(1970, 1, 1, 0, 0, 0))
499
- '1970-01-01T00:00:00Z'
500
- """
501
- return dt.strftime(datetime_format)
502
-
503
- def str_to_datetime(s):
504
- """
505
- Convert a string, explicitly UTC into a naive (implicitly UTC) datetime object.
506
-
507
- >>> str_to_datetime( '1970-01-01T00:00:00Z' )
508
- datetime.datetime(1970, 1, 1, 0, 0)
509
-
510
- Just to show that the constructor args for seconds and microseconds are optional:
511
- >>> datetime(1970, 1, 1, 0, 0, 0)
512
- datetime.datetime(1970, 1, 1, 0, 0)
513
- """
514
- return datetime.strptime(s, datetime_format)
515
-
516
- except ImportError:
517
- pass
@@ -310,7 +310,7 @@ class BatchSystemSupport(AbstractBatchSystem):
310
310
  """Partial implementation of AbstractBatchSystem, support methods."""
311
311
 
312
312
  def __init__(
313
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
313
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
314
314
  ) -> None:
315
315
  """
316
316
  Initialize initial state of the object.
@@ -421,7 +421,7 @@ class AbstractGridEngineBatchSystem(BatchSystemCleanupSupport):
421
421
  raise NotImplementedError()
422
422
 
423
423
  def __init__(
424
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
424
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: float
425
425
  ) -> None:
426
426
  super().__init__(config, maxCores, maxMemory, maxDisk)
427
427
  self.config = config
@@ -83,7 +83,7 @@ class AWSBatchBatchSystem(BatchSystemCleanupSupport):
83
83
  return True
84
84
 
85
85
  def __init__(
86
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
86
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
87
87
  ) -> None:
88
88
  super().__init__(config, maxCores, maxMemory, maxDisk)
89
89
 
@@ -45,7 +45,7 @@ class BatchSystemCleanupSupport(BatchSystemLocalSupport):
45
45
  return contexts
46
46
 
47
47
  def __init__(
48
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
48
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
49
49
  ) -> None:
50
50
  super().__init__(config, maxCores, maxMemory, maxDisk)
51
51
 
@@ -21,6 +21,7 @@ cannot yet be launched. That functionality will need to wait for user-mode
21
21
  Docker
22
22
  """
23
23
  import datetime
24
+ import io
24
25
  import logging
25
26
  import math
26
27
  import os
@@ -50,7 +51,8 @@ else:
50
51
  from typing import Protocol, TypedDict, runtime_checkable
51
52
 
52
53
  import urllib3
53
- import yaml
54
+ import ruamel.yaml as yaml
55
+ import json
54
56
 
55
57
  # The Right Way to use the Kubernetes module is to `import kubernetes` and then you get all your stuff as like ApiClient. But this doesn't work for the stubs: the stubs seem to only support importing things from the internal modules in `kubernetes` where they are actually defined. See for example <https://github.com/MaterializeInc/kubernetes-stubs/issues/9 and <https://github.com/MaterializeInc/kubernetes-stubs/issues/10>. So we just import all the things we use into our global namespace here.
56
58
  from kubernetes.client import (
@@ -77,6 +79,7 @@ from kubernetes.client import (
77
79
  V1ResourceRequirements,
78
80
  V1SecretVolumeSource,
79
81
  V1SecurityContext,
82
+ V1PodSecurityContext,
80
83
  V1Toleration,
81
84
  V1Volume,
82
85
  V1VolumeMount,
@@ -148,7 +151,7 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
148
151
  customObjects: NotRequired[CustomObjectsApi]
149
152
 
150
153
  def __init__(
151
- self, config: Config, maxCores: int, maxMemory: int, maxDisk: int
154
+ self, config: Config, maxCores: int, maxMemory: float, maxDisk: int
152
155
  ) -> None:
153
156
  super().__init__(config, maxCores, maxMemory, maxDisk)
154
157
 
@@ -314,7 +317,10 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
314
317
  del here[k]
315
318
 
316
319
  drop_boring(root_dict)
317
- return yaml.dump(root_dict)
320
+ s = io.StringIO()
321
+ YAML = yaml.YAML(typ='safe')
322
+ YAML.dump(root_dict, s)
323
+ return s.getvalue()
318
324
 
319
325
  @overload
320
326
  def _api(
@@ -803,6 +809,26 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
803
809
  ],
804
810
  )
805
811
 
812
+ class FakeResponse:
813
+ data: str
814
+
815
+ T = TypeVar('T')
816
+ def _load_kubernetes_object(self, file: str, cls: type[T]) -> T:
817
+ """
818
+ Deserialize a YAML representation into a Kubernetes object
819
+ :param file: Path to YAML file
820
+ :param cls: Kubernetes API model type for deserialized object
821
+ :return: Deserialized object
822
+ """
823
+ YAML = yaml.YAML(typ='safe')
824
+ object_def = YAML.load(open('container.yaml').read())
825
+ # The kubernetes API does not have an actual deserializer, so this is a workaround
826
+ # See: https://github.com/kubernetes-client/python/issues/977
827
+ faked_response = self.FakeResponse()
828
+ faked_response.data = json.dumps(object_def)
829
+ return ApiClient().deserialize(faked_response, cls)
830
+
831
+
806
832
  def _create_pod_spec(
807
833
  self,
808
834
  command: str,
@@ -946,17 +972,24 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
946
972
  volume_mounts=mounts,
947
973
  )
948
974
 
975
+ if self.config.kubernetes_security_context:
976
+ container.security_context = self._load_kubernetes_object(self.config.kubernetes_security_context, V1SecurityContext)
977
+
949
978
  # In case security context rules are not allowed to be set, we only apply
950
979
  # a security context at all if we need to turn on privileged mode.
951
980
  if self.config.kubernetes_privileged:
952
- container.security_context = V1SecurityContext(
953
- privileged=self.config.kubernetes_privileged
954
- )
981
+ if container.security_context is None:
982
+ container.security_context = V1SecurityContext()
983
+ container.security_context.privileged = self.config.kubernetes_privileged
955
984
 
956
985
  # Wrap the container in a spec
957
986
  pod_spec = V1PodSpec(
958
987
  containers=[container], volumes=volumes, restart_policy="Never"
959
988
  )
989
+
990
+ if self.config.kubernetes_pod_security_context:
991
+ pod_spec.security_context = self._load_kubernetes_object(self.config.kubernetes_pod_security_context, V1PodSecurityContext)
992
+
960
993
  # Tell the spec where to land
961
994
  placement.apply(pod_spec)
962
995
 
@@ -2126,7 +2159,18 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
2126
2159
  "privileged operations, such as FUSE. On Toil-managed clusters with --enableFuse, "
2127
2160
  "this is set to True. (default: %(default)s)",
2128
2161
  )
2129
-
2162
+ parser.add_argument("--kubernetesPodSecurityContext",
2163
+ dest='kubernetes_pod_security_context',
2164
+ type=str,
2165
+ env_var="TOIL_KUBERNETES_POD_SECURITY_CONTEXT",
2166
+ default=None,
2167
+ help="Path to a YAML defining a pod security context to apply to all pods.")
2168
+ parser.add_argument("--kubernetesSecurityContext",
2169
+ dest='kubernetes_security_context',
2170
+ type=str,
2171
+ env_var="TOIL_KUBERNETES_SECURITY_CONTEXT",
2172
+ default=None,
2173
+ help="Path to a YAML defining a security context to apply to all containers.")
2130
2174
  OptionType = TypeVar("OptionType")
2131
2175
 
2132
2176
  @classmethod
@@ -2138,3 +2182,5 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
2138
2182
  )
2139
2183
  setOption("kubernetes_pod_timeout")
2140
2184
  setOption("kubernetes_privileged")
2185
+ setOption("kubernetes_pod_security_context")
2186
+ setOption("kubernetes_security_context")
@@ -30,7 +30,7 @@ class BatchSystemLocalSupport(BatchSystemSupport):
30
30
  """Adds a local queue for helper jobs, useful for CWL & others."""
31
31
 
32
32
  def __init__(
33
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
33
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
34
34
  ) -> None:
35
35
  super().__init__(config, maxCores, maxMemory, maxDisk)
36
36
  max_local_jobs = (
@@ -40,6 +40,7 @@ from toil.batchSystems.abstractBatchSystem import (
40
40
  from toil.batchSystems.local_support import BatchSystemLocalSupport
41
41
  from toil.batchSystems.mesos import JobQueue, MesosShape, TaskData, ToilJob
42
42
  from toil.batchSystems.options import OptionSetter
43
+ from toil.common import Config
43
44
  from toil.job import JobDescription
44
45
  from toil.lib.conversions import b_to_mib, mib_to_b
45
46
  from toil.lib.memoize import strict_bool
@@ -61,22 +62,26 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
61
62
  """
62
63
 
63
64
  @classmethod
64
- def supportsAutoDeployment(cls):
65
+ def supportsAutoDeployment(cls) -> bool:
65
66
  return True
66
67
 
67
68
  @classmethod
68
- def supportsWorkerCleanup(cls):
69
+ def supportsWorkerCleanup(cls) -> bool:
69
70
  return True
70
71
 
71
72
  class ExecutorInfo:
72
- def __init__(self, nodeAddress, agentId, nodeInfo, lastSeen):
73
+ def __init__(
74
+ self, nodeAddress: str, agentId: str, nodeInfo: str, lastSeen: str
75
+ ) -> None:
73
76
  super().__init__()
74
77
  self.nodeAddress = nodeAddress
75
78
  self.agentId = agentId
76
79
  self.nodeInfo = nodeInfo
77
80
  self.lastSeen = lastSeen
78
81
 
79
- def __init__(self, config, maxCores, maxMemory, maxDisk):
82
+ def __init__(
83
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
84
+ ) -> None:
80
85
  super().__init__(config, maxCores, maxMemory, maxDisk)
81
86
 
82
87
  # The auto-deployed resource representing the user script. Will be passed along in every
@@ -165,13 +170,13 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
165
170
 
166
171
  self._startDriver(config)
167
172
 
168
- def setUserScript(self, userScript):
173
+ def setUserScript(self, userScript: str) -> None:
169
174
  self.userScript = userScript
170
175
 
171
- def ignoreNode(self, nodeAddress):
176
+ def ignoreNode(self, nodeAddress: str) -> None:
172
177
  self.ignoredNodes.add(nodeAddress)
173
178
 
174
- def unignoreNode(self, nodeAddress):
179
+ def unignoreNode(self, nodeAddress: str) -> None:
175
180
  self.ignoredNodes.remove(nodeAddress)
176
181
 
177
182
  def issueBatchJob(
@@ -179,7 +184,7 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
179
184
  command: str,
180
185
  jobNode: JobDescription,
181
186
  job_environment: Optional[dict[str, str]] = None,
182
- ):
187
+ ) -> str:
183
188
  """
184
189
  Issues the following command returning a unique jobID. Command is the string to run, memory
185
190
  is an int giving the number of bytes the job needs to run in and cores is the number of cpus
@@ -7,6 +7,7 @@ from abc import ABCMeta, abstractmethod
7
7
  from contextlib import closing
8
8
  from shutil import which
9
9
  from urllib.request import urlopen
10
+ from typing import Optional
10
11
 
11
12
  from toil.lib.retry import retry
12
13
  from toil.lib.threading import ExceptionalThread, cpu_count
@@ -25,7 +26,7 @@ class MesosTestSupport:
25
26
  with closing(urlopen("http://127.0.0.1:5050/version")) as content:
26
27
  content.read()
27
28
 
28
- def _startMesos(self, numCores=None):
29
+ def _startMesos(self, numCores: Optional[int] = None) -> None:
29
30
  if numCores is None:
30
31
  numCores = cpu_count()
31
32
  shutil.rmtree("/tmp/mesos", ignore_errors=True)
@@ -52,7 +53,7 @@ class MesosTestSupport:
52
53
  log.warning("Forcibly killing child which ignored SIGTERM")
53
54
  process.kill()
54
55
 
55
- def _stopMesos(self):
56
+ def _stopMesos(self) -> None:
56
57
  self._stopProcess(self.agent.popen)
57
58
  self.agent.join()
58
59
  self._stopProcess(self.master.popen)
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import importlib
16
15
  import logging
17
16
  import pkgutil
18
17
  import warnings
@@ -21,6 +20,7 @@ from typing import TYPE_CHECKING, Callable
21
20
 
22
21
  from toil.lib.compatibility import deprecated
23
22
  from toil.lib.memoize import memoize
23
+ import toil.lib.plugins
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from toil.batchSystems.abstractBatchSystem import AbstractBatchSystem
@@ -40,17 +40,14 @@ def add_batch_system_factory(
40
40
 
41
41
  :param class_factory: A function that returns a batch system class (NOT an instance), which implements :class:`toil.batchSystems.abstractBatchSystem.AbstractBatchSystem`.
42
42
  """
43
- _registry_keys.append(key)
44
- _registry[key] = class_factory
43
+ toil.lib.plugins.register_plugin("batch_system", key, class_factory)
45
44
 
46
45
 
47
46
  def get_batch_systems() -> Sequence[str]:
48
47
  """
49
- Get the names of all the availsble batch systems.
48
+ Get the names of all the available batch systems.
50
49
  """
51
- _load_all_plugins()
52
-
53
- return _registry_keys
50
+ return toil.lib.plugins.get_plugin_names("batch_system")
54
51
 
55
52
 
56
53
  def get_batch_system(key: str) -> type["AbstractBatchSystem"]:
@@ -60,8 +57,7 @@ def get_batch_system(key: str) -> type["AbstractBatchSystem"]:
60
57
  :raises: KeyError if the key is not the name of a batch system, and
61
58
  ImportError if the batch system's class cannot be loaded.
62
59
  """
63
-
64
- return _registry[key]()
60
+ return toil.lib.plugins.get_plugin("batch_system", key)()
65
61
 
66
62
 
67
63
  DEFAULT_BATCH_SYSTEM = "single_machine"
@@ -126,114 +122,15 @@ def kubernetes_batch_system_factory():
126
122
 
127
123
 
128
124
  #####
129
- # Registry implementation
130
- #####
131
-
132
- _registry: dict[str, Callable[[], type["AbstractBatchSystem"]]] = {
133
- "aws_batch": aws_batch_batch_system_factory,
134
- "single_machine": single_machine_batch_system_factory,
135
- "grid_engine": gridengine_batch_system_factory,
136
- "lsf": lsf_batch_system_factory,
137
- "mesos": mesos_batch_system_factory,
138
- "slurm": slurm_batch_system_factory,
139
- "torque": torque_batch_system_factory,
140
- "htcondor": htcondor_batch_system_factory,
141
- "kubernetes": kubernetes_batch_system_factory,
142
- }
143
- _registry_keys = list(_registry.keys())
144
-
145
- # We will load any packages starting with this prefix and let them call
146
- # add_batch_system_factory()
147
- _PLUGIN_NAME_PREFIX = "toil_batch_system_"
148
-
149
-
150
- @memoize
151
- def _load_all_plugins() -> None:
152
- """
153
- Load all the batch system plugins that are installed.
154
- """
155
-
156
- for finder, name, is_pkg in pkgutil.iter_modules():
157
- # For all installed packages
158
- if name.startswith(_PLUGIN_NAME_PREFIX):
159
- # If it is a Toil batch system plugin, import it
160
- importlib.import_module(name)
161
-
162
-
163
- #####
164
- # Deprecated API
125
+ # Registers all built-in batch system
165
126
  #####
166
127
 
167
- # We used to directly access these constants, but now the Right Way to use this
168
- # module is add_batch_system_factory() to register and get_batch_systems() to
169
- # get the list/get_batch_system() to get a class by name.
170
-
171
-
172
- def __getattr__(name):
173
- """
174
- Implement a fallback attribute getter to handle deprecated constants.
175
-
176
- See <https://stackoverflow.com/a/48242860>.
177
- """
178
- if name == "BATCH_SYSTEM_FACTORY_REGISTRY":
179
- warnings.warn(
180
- "BATCH_SYSTEM_FACTORY_REGISTRY is deprecated; use get_batch_system() or add_batch_system_factory()",
181
- DeprecationWarning,
182
- )
183
- return _registry
184
- elif name == "BATCH_SYSTEMS":
185
- warnings.warn(
186
- "BATCH_SYSTEMS is deprecated; use get_batch_systems()", DeprecationWarning
187
- )
188
- return _registry_keys
189
- else:
190
- raise AttributeError(f"Module {__name__} ahs no attribute {name}")
191
-
192
-
193
- @deprecated(new_function_name="add_batch_system_factory")
194
- def addBatchSystemFactory(
195
- key: str, batchSystemFactory: Callable[[], type["AbstractBatchSystem"]]
196
- ):
197
- """
198
- Deprecated method to add a batch system.
199
- """
200
- return add_batch_system_factory(key, batchSystemFactory)
201
-
202
-
203
- #####
204
- # Testing utilities
205
- #####
206
-
207
- # We need a snapshot save/restore system for testing. We can't just tamper with
208
- # the globals because module-level globals are their own references, so we
209
- # can't touch this module's global name bindings from a client module.
210
-
211
-
212
- def save_batch_system_plugin_state() -> (
213
- tuple[list[str], dict[str, Callable[[], type["AbstractBatchSystem"]]]]
214
- ):
215
- """
216
- Return a snapshot of the plugin registry that can be restored to remove
217
- added plugins. Useful for testing the plugin system in-process with other
218
- tests.
219
- """
220
-
221
- snapshot = (list(_registry_keys), dict(_registry))
222
- return snapshot
223
-
224
-
225
- def restore_batch_system_plugin_state(
226
- snapshot: tuple[list[str], dict[str, Callable[[], type["AbstractBatchSystem"]]]]
227
- ):
228
- """
229
- Restore the batch system registry state to a snapshot from
230
- save_batch_system_plugin_state().
231
- """
232
-
233
- # We need to apply the snapshot without rebinding the names, because that
234
- # won't affect modules that imported the names.
235
- wanted_batch_systems, wanted_registry = snapshot
236
- _registry_keys.clear()
237
- _registry_keys.extend(wanted_batch_systems)
238
- _registry.clear()
239
- _registry.update(wanted_registry)
128
+ add_batch_system_factory("aws_batch", aws_batch_batch_system_factory)
129
+ add_batch_system_factory("single_machine", single_machine_batch_system_factory)
130
+ add_batch_system_factory("grid_engine", gridengine_batch_system_factory)
131
+ add_batch_system_factory("lsf", lsf_batch_system_factory)
132
+ add_batch_system_factory("mesos", mesos_batch_system_factory)
133
+ add_batch_system_factory("slurm", slurm_batch_system_factory)
134
+ add_batch_system_factory("torque", torque_batch_system_factory)
135
+ add_batch_system_factory("htcondor", htcondor_batch_system_factory)
136
+ add_batch_system_factory("kubernetes", kubernetes_batch_system_factory)
@@ -94,7 +94,7 @@ class SingleMachineBatchSystem(BatchSystemSupport):
94
94
  self,
95
95
  config: Config,
96
96
  maxCores: float,
97
- maxMemory: int,
97
+ maxMemory: float,
98
98
  maxDisk: int,
99
99
  max_jobs: Optional[int] = None,
100
100
  ) -> None:
@@ -101,6 +101,32 @@ def parse_slurm_time(slurm_time: str) -> int:
101
101
  total_seconds += multiplier * int(elapsed_split[index])
102
102
  return total_seconds
103
103
 
104
+ # For parsing user-provided option overrides (or self-generated
105
+ # options) for sbatch, we need a way to recognize long, long-with-equals, and
106
+ # short forms.
107
+ def option_detector(long: str, short: str | None = None) -> Callable[[str], bool]:
108
+ """
109
+ Get a function that returns true if it sees the long or short
110
+ option.
111
+ """
112
+ def is_match(option: str) -> bool:
113
+ return option == f"--{long}" or option.startswith(f"--{long}=") or (short is not None and option == f"-{short}")
114
+ return is_match
115
+
116
+ def any_option_detector(options: list[str | tuple[str, str]]) -> Callable[[str], bool]:
117
+ """
118
+ Get a function that returns true if it sees any of the long
119
+ options or long or short option pairs.
120
+ """
121
+ detectors = [option_detector(o) if isinstance(o, str) else option_detector(*o) for o in options]
122
+ def is_match(option: str) -> bool:
123
+ for detector in detectors:
124
+ if detector(option):
125
+ return True
126
+ return False
127
+ return is_match
128
+
129
+
104
130
 
105
131
  class SlurmBatchSystem(AbstractGridEngineBatchSystem):
106
132
  class PartitionInfo(NamedTuple):
@@ -647,31 +673,6 @@ class SlurmBatchSystem(AbstractGridEngineBatchSystem):
647
673
  # Also any extra arguments from --slurmArgs or TOIL_SLURM_ARGS
648
674
  nativeConfig: str = self.boss.config.slurm_args # type: ignore[attr-defined]
649
675
 
650
- # For parsing user-provided option overrides (or self-generated
651
- # options) we need a way to recognize long, long-with-equals, and
652
- # short forms.
653
- def option_detector(long: str, short: str | None = None) -> Callable[[str], bool]:
654
- """
655
- Get a function that returns true if it sees the long or short
656
- option.
657
- """
658
- def is_match(option: str) -> bool:
659
- return option == f"--{long}" or option.startswith(f"--{long}=") or (short is not None and option == f"-{short}")
660
- return is_match
661
-
662
- def any_option_detector(options: list[str | tuple[str, str]]) -> Callable[[str], bool]:
663
- """
664
- Get a function that returns true if it sees any of the long
665
- options or long or short option pairs.
666
- """
667
- detectors = [option_detector(o) if isinstance(o, str) else option_detector(*o) for o in options]
668
- def is_match(option: str) -> bool:
669
- for detector in detectors:
670
- if detector(option):
671
- return True
672
- return False
673
- return is_match
674
-
675
676
  is_any_mem_option = any_option_detector(["mem", "mem-per-cpu", "mem-per-gpu"])
676
677
  is_any_cpus_option = any_option_detector([("cpus-per-task", "c"), "cpus-per-gpu"])
677
678
  is_export_option = option_detector("export")
@@ -842,7 +843,7 @@ class SlurmBatchSystem(AbstractGridEngineBatchSystem):
842
843
  return sbatch_line
843
844
 
844
845
  def __init__(
845
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
846
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: float
846
847
  ) -> None:
847
848
  super().__init__(config, maxCores, maxMemory, maxDisk)
848
849
  self.partitions = SlurmBatchSystem.PartitionSet()
toil/bus.py CHANGED
@@ -69,13 +69,15 @@ import tempfile
69
69
  import threading
70
70
  from collections.abc import Iterator
71
71
  from dataclasses import dataclass
72
- from typing import IO, Any, Callable, NamedTuple, Optional, TypeVar, cast
72
+ from typing import IO, Any, Callable, NamedTuple, Optional, TypeVar, TYPE_CHECKING, cast
73
73
 
74
74
  from pubsub.core import Publisher
75
75
  from pubsub.core.listener import Listener
76
76
  from pubsub.core.topicobj import Topic
77
77
  from pubsub.core.topicutils import ALL_TOPICS
78
78
 
79
+ from toil.lib.misc import FileDescriptorOrPath
80
+
79
81
  logger = logging.getLogger(__name__)
80
82
 
81
83
  # We define some ways to talk about jobs.
@@ -434,7 +436,7 @@ class MessageBus:
434
436
  connection._set_bus(self)
435
437
  return connection
436
438
 
437
- def connect_output_file(self, file_path: str) -> Any:
439
+ def connect_output_file(self, file_path: FileDescriptorOrPath) -> Any:
438
440
  """
439
441
  Send copies of all messages to the given output file.
440
442
 
@@ -736,7 +738,7 @@ class JobStatus:
736
738
  ) # if the exit code is -1 and the job id is specified, we assume the job is running
737
739
 
738
740
 
739
- def replay_message_bus(path: str) -> dict[str, JobStatus]:
741
+ def replay_message_bus(path: FileDescriptorOrPath) -> dict[str, JobStatus]:
740
742
  """
741
743
  Replay all the messages and work out what they mean for jobs.
742
744