toil 8.1.0b1__py3-none-any.whl → 8.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (254) hide show
  1. toil/__init__.py +0 -35
  2. toil/batchSystems/abstractBatchSystem.py +1 -1
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
  4. toil/batchSystems/awsBatch.py +1 -1
  5. toil/batchSystems/cleanup_support.py +1 -1
  6. toil/batchSystems/kubernetes.py +53 -7
  7. toil/batchSystems/local_support.py +1 -1
  8. toil/batchSystems/mesos/batchSystem.py +13 -8
  9. toil/batchSystems/mesos/test/__init__.py +3 -2
  10. toil/batchSystems/singleMachine.py +1 -1
  11. toil/batchSystems/slurm.py +27 -26
  12. toil/bus.py +5 -3
  13. toil/common.py +39 -11
  14. toil/cwl/cwltoil.py +1 -1
  15. toil/job.py +64 -49
  16. toil/jobStores/abstractJobStore.py +24 -3
  17. toil/jobStores/fileJobStore.py +25 -1
  18. toil/jobStores/googleJobStore.py +104 -30
  19. toil/leader.py +9 -0
  20. toil/lib/accelerators.py +3 -1
  21. toil/lib/aws/utils.py.orig +504 -0
  22. toil/lib/bioio.py +1 -1
  23. toil/lib/docker.py +252 -91
  24. toil/lib/dockstore.py +11 -3
  25. toil/lib/exceptions.py +5 -3
  26. toil/lib/history.py +87 -13
  27. toil/lib/history_submission.py +23 -9
  28. toil/lib/io.py +34 -22
  29. toil/lib/misc.py +7 -1
  30. toil/lib/resources.py +2 -1
  31. toil/lib/threading.py +11 -10
  32. toil/options/common.py +8 -0
  33. toil/options/wdl.py +11 -0
  34. toil/server/api_spec/LICENSE +201 -0
  35. toil/server/api_spec/README.rst +5 -0
  36. toil/server/cli/wes_cwl_runner.py +2 -1
  37. toil/test/__init__.py +275 -115
  38. toil/test/batchSystems/batchSystemTest.py +227 -205
  39. toil/test/batchSystems/test_slurm.py +27 -0
  40. toil/test/cactus/pestis.tar.gz +0 -0
  41. toil/test/conftest.py +7 -0
  42. toil/test/cwl/2.fasta +11 -0
  43. toil/test/cwl/2.fastq +12 -0
  44. toil/test/cwl/conftest.py +1 -1
  45. toil/test/cwl/cwlTest.py +999 -867
  46. toil/test/cwl/directory/directory/file.txt +15 -0
  47. toil/test/cwl/download_directory_file.json +4 -0
  48. toil/test/cwl/download_directory_s3.json +4 -0
  49. toil/test/cwl/download_file.json +6 -0
  50. toil/test/cwl/download_http.json +6 -0
  51. toil/test/cwl/download_https.json +6 -0
  52. toil/test/cwl/download_s3.json +6 -0
  53. toil/test/cwl/download_subdirectory_file.json +5 -0
  54. toil/test/cwl/download_subdirectory_s3.json +5 -0
  55. toil/test/cwl/empty.json +1 -0
  56. toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
  57. toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
  58. toil/test/cwl/optional-file-exists.json +6 -0
  59. toil/test/cwl/optional-file-missing.json +6 -0
  60. toil/test/cwl/preemptible_expression.json +1 -0
  61. toil/test/cwl/revsort-job-missing.json +6 -0
  62. toil/test/cwl/revsort-job.json +6 -0
  63. toil/test/cwl/s3_secondary_file.json +16 -0
  64. toil/test/cwl/seqtk_seq_job.json +6 -0
  65. toil/test/cwl/stream.json +6 -0
  66. toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
  67. toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
  68. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
  69. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
  70. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
  71. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
  72. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
  73. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
  74. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
  75. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
  76. toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
  77. toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
  78. toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
  79. toil/test/cwl/whale.txt +16 -0
  80. toil/test/docs/scripts/example_alwaysfail.py +38 -0
  81. toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
  82. toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
  83. toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
  84. toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
  85. toil/test/docs/scripts/tutorial_arguments.py +23 -0
  86. toil/test/docs/scripts/tutorial_debugging.patch +12 -0
  87. toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
  88. toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
  89. toil/test/docs/scripts/tutorial_docker.py +20 -0
  90. toil/test/docs/scripts/tutorial_dynamic.py +24 -0
  91. toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
  92. toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
  93. toil/test/docs/scripts/tutorial_helloworld.py +15 -0
  94. toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
  95. toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
  96. toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
  97. toil/test/docs/scripts/tutorial_managing.py +29 -0
  98. toil/test/docs/scripts/tutorial_managing2.py +56 -0
  99. toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
  100. toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
  101. toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
  102. toil/test/docs/scripts/tutorial_promises.py +25 -0
  103. toil/test/docs/scripts/tutorial_promises2.py +30 -0
  104. toil/test/docs/scripts/tutorial_quickstart.py +22 -0
  105. toil/test/docs/scripts/tutorial_requirements.py +44 -0
  106. toil/test/docs/scripts/tutorial_services.py +45 -0
  107. toil/test/docs/scripts/tutorial_staging.py +45 -0
  108. toil/test/docs/scripts/tutorial_stats.py +64 -0
  109. toil/test/lib/aws/test_iam.py +3 -1
  110. toil/test/lib/dockerTest.py +205 -122
  111. toil/test/lib/test_history.py +101 -77
  112. toil/test/provisioners/aws/awsProvisionerTest.py +12 -9
  113. toil/test/provisioners/clusterTest.py +4 -4
  114. toil/test/provisioners/gceProvisionerTest.py +16 -14
  115. toil/test/sort/sort.py +4 -1
  116. toil/test/src/busTest.py +17 -17
  117. toil/test/src/deferredFunctionTest.py +145 -132
  118. toil/test/src/importExportFileTest.py +71 -63
  119. toil/test/src/jobEncapsulationTest.py +27 -28
  120. toil/test/src/jobServiceTest.py +149 -133
  121. toil/test/src/jobTest.py +219 -211
  122. toil/test/src/miscTests.py +66 -60
  123. toil/test/src/promisedRequirementTest.py +163 -169
  124. toil/test/src/regularLogTest.py +24 -24
  125. toil/test/src/resourceTest.py +82 -76
  126. toil/test/src/restartDAGTest.py +51 -47
  127. toil/test/src/resumabilityTest.py +24 -19
  128. toil/test/src/retainTempDirTest.py +60 -57
  129. toil/test/src/systemTest.py +17 -13
  130. toil/test/src/threadingTest.py +29 -32
  131. toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
  132. toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
  133. toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
  134. toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
  135. toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
  136. toil/test/utils/toilDebugTest.py +117 -102
  137. toil/test/utils/toilKillTest.py +54 -53
  138. toil/test/utils/utilsTest.py +303 -229
  139. toil/test/wdl/lint_error.wdl +9 -0
  140. toil/test/wdl/md5sum/empty_file.json +1 -0
  141. toil/test/wdl/md5sum/md5sum-gs.json +1 -0
  142. toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
  143. toil/test/wdl/md5sum/md5sum.input +1 -0
  144. toil/test/wdl/md5sum/md5sum.json +1 -0
  145. toil/test/wdl/md5sum/md5sum.wdl +25 -0
  146. toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
  147. toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
  148. toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
  149. toil/test/wdl/standard_library/as_map.json +16 -0
  150. toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
  151. toil/test/wdl/standard_library/as_pairs.json +7 -0
  152. toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
  153. toil/test/wdl/standard_library/ceil.json +3 -0
  154. toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
  155. toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
  156. toil/test/wdl/standard_library/collect_by_key.json +1 -0
  157. toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
  158. toil/test/wdl/standard_library/cross.json +11 -0
  159. toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
  160. toil/test/wdl/standard_library/flatten.json +7 -0
  161. toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
  162. toil/test/wdl/standard_library/floor.json +3 -0
  163. toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
  164. toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
  165. toil/test/wdl/standard_library/keys.json +8 -0
  166. toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
  167. toil/test/wdl/standard_library/length.json +7 -0
  168. toil/test/wdl/standard_library/length_as_input.wdl +16 -0
  169. toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
  170. toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
  171. toil/test/wdl/standard_library/length_invalid.json +3 -0
  172. toil/test/wdl/standard_library/range.json +3 -0
  173. toil/test/wdl/standard_library/range_0.json +3 -0
  174. toil/test/wdl/standard_library/range_as_input.wdl +17 -0
  175. toil/test/wdl/standard_library/range_invalid.json +3 -0
  176. toil/test/wdl/standard_library/read_boolean.json +3 -0
  177. toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
  178. toil/test/wdl/standard_library/read_float.json +3 -0
  179. toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
  180. toil/test/wdl/standard_library/read_int.json +3 -0
  181. toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
  182. toil/test/wdl/standard_library/read_json.json +3 -0
  183. toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
  184. toil/test/wdl/standard_library/read_lines.json +3 -0
  185. toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
  186. toil/test/wdl/standard_library/read_map.json +3 -0
  187. toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
  188. toil/test/wdl/standard_library/read_string.json +3 -0
  189. toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
  190. toil/test/wdl/standard_library/read_tsv.json +3 -0
  191. toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
  192. toil/test/wdl/standard_library/round.json +3 -0
  193. toil/test/wdl/standard_library/round_as_command.wdl +16 -0
  194. toil/test/wdl/standard_library/round_as_input.wdl +16 -0
  195. toil/test/wdl/standard_library/size.json +3 -0
  196. toil/test/wdl/standard_library/size_as_command.wdl +17 -0
  197. toil/test/wdl/standard_library/size_as_output.wdl +36 -0
  198. toil/test/wdl/standard_library/stderr.json +3 -0
  199. toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
  200. toil/test/wdl/standard_library/stdout.json +3 -0
  201. toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
  202. toil/test/wdl/standard_library/sub.json +3 -0
  203. toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
  204. toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
  205. toil/test/wdl/standard_library/transpose.json +6 -0
  206. toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
  207. toil/test/wdl/standard_library/write_json.json +6 -0
  208. toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
  209. toil/test/wdl/standard_library/write_lines.json +7 -0
  210. toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
  211. toil/test/wdl/standard_library/write_map.json +6 -0
  212. toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
  213. toil/test/wdl/standard_library/write_tsv.json +6 -0
  214. toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
  215. toil/test/wdl/standard_library/zip.json +12 -0
  216. toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
  217. toil/test/wdl/test.csv +3 -0
  218. toil/test/wdl/test.tsv +3 -0
  219. toil/test/wdl/testfiles/croo.wdl +38 -0
  220. toil/test/wdl/testfiles/drop_files.wdl +62 -0
  221. toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
  222. toil/test/wdl/testfiles/empty.txt +0 -0
  223. toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
  224. toil/test/wdl/testfiles/random.wdl +66 -0
  225. toil/test/wdl/testfiles/string_file_coercion.json +1 -0
  226. toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
  227. toil/test/wdl/testfiles/test.json +4 -0
  228. toil/test/wdl/testfiles/test_boolean.txt +1 -0
  229. toil/test/wdl/testfiles/test_float.txt +1 -0
  230. toil/test/wdl/testfiles/test_int.txt +1 -0
  231. toil/test/wdl/testfiles/test_lines.txt +5 -0
  232. toil/test/wdl/testfiles/test_map.txt +2 -0
  233. toil/test/wdl/testfiles/test_string.txt +1 -0
  234. toil/test/wdl/testfiles/url_to_file.wdl +13 -0
  235. toil/test/wdl/testfiles/url_to_optional_file.wdl +13 -0
  236. toil/test/wdl/testfiles/vocab.json +1 -0
  237. toil/test/wdl/testfiles/vocab.wdl +66 -0
  238. toil/test/wdl/testfiles/wait.wdl +34 -0
  239. toil/test/wdl/wdl_specification/type_pair.json +23 -0
  240. toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
  241. toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
  242. toil/test/wdl/wdl_specification/v1_spec.json +1 -0
  243. toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
  244. toil/test/wdl/wdltoil_test.py +680 -407
  245. toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
  246. toil/version.py +9 -9
  247. toil/wdl/wdltoil.py +336 -123
  248. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/METADATA +5 -4
  249. toil-8.2.0.dist-info/RECORD +439 -0
  250. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/WHEEL +1 -1
  251. toil-8.1.0b1.dist-info/RECORD +0 -259
  252. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/entry_points.txt +0 -0
  253. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info/licenses}/LICENSE +0 -0
  254. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/top_level.txt +0 -0
toil/__init__.py CHANGED
@@ -480,38 +480,3 @@ def logProcessContext(config: "Config") -> None:
480
480
  log.info("Running Toil version %s on host %s.", version, socket.gethostname())
481
481
  log.debug("Configuration: %s", config.__dict__)
482
482
 
483
-
484
- try:
485
- cache_path = "~/.cache/aws/cached_temporary_credentials"
486
- datetime_format = (
487
- "%Y-%m-%dT%H:%M:%SZ" # incidentally the same as the format used by AWS
488
- )
489
- log = logging.getLogger(__name__)
490
-
491
- # But in addition to our manual cache, we also are going to turn on boto3's
492
- # new built-in caching layer.
493
-
494
- def datetime_to_str(dt):
495
- """
496
- Convert a naive (implicitly UTC) datetime object into a string, explicitly UTC.
497
-
498
- >>> datetime_to_str(datetime(1970, 1, 1, 0, 0, 0))
499
- '1970-01-01T00:00:00Z'
500
- """
501
- return dt.strftime(datetime_format)
502
-
503
- def str_to_datetime(s):
504
- """
505
- Convert a string, explicitly UTC into a naive (implicitly UTC) datetime object.
506
-
507
- >>> str_to_datetime( '1970-01-01T00:00:00Z' )
508
- datetime.datetime(1970, 1, 1, 0, 0)
509
-
510
- Just to show that the constructor args for seconds and microseconds are optional:
511
- >>> datetime(1970, 1, 1, 0, 0, 0)
512
- datetime.datetime(1970, 1, 1, 0, 0)
513
- """
514
- return datetime.strptime(s, datetime_format)
515
-
516
- except ImportError:
517
- pass
@@ -310,7 +310,7 @@ class BatchSystemSupport(AbstractBatchSystem):
310
310
  """Partial implementation of AbstractBatchSystem, support methods."""
311
311
 
312
312
  def __init__(
313
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
313
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
314
314
  ) -> None:
315
315
  """
316
316
  Initialize initial state of the object.
@@ -421,7 +421,7 @@ class AbstractGridEngineBatchSystem(BatchSystemCleanupSupport):
421
421
  raise NotImplementedError()
422
422
 
423
423
  def __init__(
424
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
424
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: float
425
425
  ) -> None:
426
426
  super().__init__(config, maxCores, maxMemory, maxDisk)
427
427
  self.config = config
@@ -83,7 +83,7 @@ class AWSBatchBatchSystem(BatchSystemCleanupSupport):
83
83
  return True
84
84
 
85
85
  def __init__(
86
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
86
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
87
87
  ) -> None:
88
88
  super().__init__(config, maxCores, maxMemory, maxDisk)
89
89
 
@@ -45,7 +45,7 @@ class BatchSystemCleanupSupport(BatchSystemLocalSupport):
45
45
  return contexts
46
46
 
47
47
  def __init__(
48
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
48
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
49
49
  ) -> None:
50
50
  super().__init__(config, maxCores, maxMemory, maxDisk)
51
51
 
@@ -21,6 +21,7 @@ cannot yet be launched. That functionality will need to wait for user-mode
21
21
  Docker
22
22
  """
23
23
  import datetime
24
+ import io
24
25
  import logging
25
26
  import math
26
27
  import os
@@ -50,7 +51,8 @@ else:
50
51
  from typing import Protocol, TypedDict, runtime_checkable
51
52
 
52
53
  import urllib3
53
- import yaml
54
+ import ruamel.yaml as yaml
55
+ import json
54
56
 
55
57
  # The Right Way to use the Kubernetes module is to `import kubernetes` and then you get all your stuff as like ApiClient. But this doesn't work for the stubs: the stubs seem to only support importing things from the internal modules in `kubernetes` where they are actually defined. See for example <https://github.com/MaterializeInc/kubernetes-stubs/issues/9 and <https://github.com/MaterializeInc/kubernetes-stubs/issues/10>. So we just import all the things we use into our global namespace here.
56
58
  from kubernetes.client import (
@@ -77,6 +79,7 @@ from kubernetes.client import (
77
79
  V1ResourceRequirements,
78
80
  V1SecretVolumeSource,
79
81
  V1SecurityContext,
82
+ V1PodSecurityContext,
80
83
  V1Toleration,
81
84
  V1Volume,
82
85
  V1VolumeMount,
@@ -148,7 +151,7 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
148
151
  customObjects: NotRequired[CustomObjectsApi]
149
152
 
150
153
  def __init__(
151
- self, config: Config, maxCores: int, maxMemory: int, maxDisk: int
154
+ self, config: Config, maxCores: int, maxMemory: float, maxDisk: int
152
155
  ) -> None:
153
156
  super().__init__(config, maxCores, maxMemory, maxDisk)
154
157
 
@@ -314,7 +317,10 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
314
317
  del here[k]
315
318
 
316
319
  drop_boring(root_dict)
317
- return yaml.dump(root_dict)
320
+ s = io.StringIO()
321
+ YAML = yaml.YAML(typ='safe')
322
+ YAML.dump(root_dict, s)
323
+ return s.getvalue()
318
324
 
319
325
  @overload
320
326
  def _api(
@@ -803,6 +809,26 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
803
809
  ],
804
810
  )
805
811
 
812
+ class FakeResponse:
813
+ data: str
814
+
815
+ T = TypeVar('T')
816
+ def _load_kubernetes_object(self, file: str, cls: type[T]) -> T:
817
+ """
818
+ Deserialize a YAML representation into a Kubernetes object
819
+ :param file: Path to YAML file
820
+ :param cls: Kubernetes API model type for deserialized object
821
+ :return: Deserialized object
822
+ """
823
+ YAML = yaml.YAML(typ='safe')
824
+ object_def = YAML.load(open('container.yaml').read())
825
+ # The kubernetes API does not have an actual deserializer, so this is a workaround
826
+ # See: https://github.com/kubernetes-client/python/issues/977
827
+ faked_response = self.FakeResponse()
828
+ faked_response.data = json.dumps(object_def)
829
+ return ApiClient().deserialize(faked_response, cls)
830
+
831
+
806
832
  def _create_pod_spec(
807
833
  self,
808
834
  command: str,
@@ -946,17 +972,24 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
946
972
  volume_mounts=mounts,
947
973
  )
948
974
 
975
+ if self.config.kubernetes_security_context:
976
+ container.security_context = self._load_kubernetes_object(self.config.kubernetes_security_context, V1SecurityContext)
977
+
949
978
  # In case security context rules are not allowed to be set, we only apply
950
979
  # a security context at all if we need to turn on privileged mode.
951
980
  if self.config.kubernetes_privileged:
952
- container.security_context = V1SecurityContext(
953
- privileged=self.config.kubernetes_privileged
954
- )
981
+ if container.security_context is None:
982
+ container.security_context = V1SecurityContext()
983
+ container.security_context.privileged = self.config.kubernetes_privileged
955
984
 
956
985
  # Wrap the container in a spec
957
986
  pod_spec = V1PodSpec(
958
987
  containers=[container], volumes=volumes, restart_policy="Never"
959
988
  )
989
+
990
+ if self.config.kubernetes_pod_security_context:
991
+ pod_spec.security_context = self._load_kubernetes_object(self.config.kubernetes_pod_security_context, V1PodSecurityContext)
992
+
960
993
  # Tell the spec where to land
961
994
  placement.apply(pod_spec)
962
995
 
@@ -2126,7 +2159,18 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
2126
2159
  "privileged operations, such as FUSE. On Toil-managed clusters with --enableFuse, "
2127
2160
  "this is set to True. (default: %(default)s)",
2128
2161
  )
2129
-
2162
+ parser.add_argument("--kubernetesPodSecurityContext",
2163
+ dest='kubernetes_pod_security_context',
2164
+ type=str,
2165
+ env_var="TOIL_KUBERNETES_POD_SECURITY_CONTEXT",
2166
+ default=None,
2167
+ help="Path to a YAML defining a pod security context to apply to all pods.")
2168
+ parser.add_argument("--kubernetesSecurityContext",
2169
+ dest='kubernetes_security_context',
2170
+ type=str,
2171
+ env_var="TOIL_KUBERNETES_SECURITY_CONTEXT",
2172
+ default=None,
2173
+ help="Path to a YAML defining a security context to apply to all containers.")
2130
2174
  OptionType = TypeVar("OptionType")
2131
2175
 
2132
2176
  @classmethod
@@ -2138,3 +2182,5 @@ class KubernetesBatchSystem(BatchSystemCleanupSupport):
2138
2182
  )
2139
2183
  setOption("kubernetes_pod_timeout")
2140
2184
  setOption("kubernetes_privileged")
2185
+ setOption("kubernetes_pod_security_context")
2186
+ setOption("kubernetes_security_context")
@@ -30,7 +30,7 @@ class BatchSystemLocalSupport(BatchSystemSupport):
30
30
  """Adds a local queue for helper jobs, useful for CWL & others."""
31
31
 
32
32
  def __init__(
33
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
33
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
34
34
  ) -> None:
35
35
  super().__init__(config, maxCores, maxMemory, maxDisk)
36
36
  max_local_jobs = (
@@ -40,6 +40,7 @@ from toil.batchSystems.abstractBatchSystem import (
40
40
  from toil.batchSystems.local_support import BatchSystemLocalSupport
41
41
  from toil.batchSystems.mesos import JobQueue, MesosShape, TaskData, ToilJob
42
42
  from toil.batchSystems.options import OptionSetter
43
+ from toil.common import Config
43
44
  from toil.job import JobDescription
44
45
  from toil.lib.conversions import b_to_mib, mib_to_b
45
46
  from toil.lib.memoize import strict_bool
@@ -61,22 +62,26 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
61
62
  """
62
63
 
63
64
  @classmethod
64
- def supportsAutoDeployment(cls):
65
+ def supportsAutoDeployment(cls) -> bool:
65
66
  return True
66
67
 
67
68
  @classmethod
68
- def supportsWorkerCleanup(cls):
69
+ def supportsWorkerCleanup(cls) -> bool:
69
70
  return True
70
71
 
71
72
  class ExecutorInfo:
72
- def __init__(self, nodeAddress, agentId, nodeInfo, lastSeen):
73
+ def __init__(
74
+ self, nodeAddress: str, agentId: str, nodeInfo: str, lastSeen: str
75
+ ) -> None:
73
76
  super().__init__()
74
77
  self.nodeAddress = nodeAddress
75
78
  self.agentId = agentId
76
79
  self.nodeInfo = nodeInfo
77
80
  self.lastSeen = lastSeen
78
81
 
79
- def __init__(self, config, maxCores, maxMemory, maxDisk):
82
+ def __init__(
83
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: int
84
+ ) -> None:
80
85
  super().__init__(config, maxCores, maxMemory, maxDisk)
81
86
 
82
87
  # The auto-deployed resource representing the user script. Will be passed along in every
@@ -165,13 +170,13 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
165
170
 
166
171
  self._startDriver(config)
167
172
 
168
- def setUserScript(self, userScript):
173
+ def setUserScript(self, userScript: str) -> None:
169
174
  self.userScript = userScript
170
175
 
171
- def ignoreNode(self, nodeAddress):
176
+ def ignoreNode(self, nodeAddress: str) -> None:
172
177
  self.ignoredNodes.add(nodeAddress)
173
178
 
174
- def unignoreNode(self, nodeAddress):
179
+ def unignoreNode(self, nodeAddress: str) -> None:
175
180
  self.ignoredNodes.remove(nodeAddress)
176
181
 
177
182
  def issueBatchJob(
@@ -179,7 +184,7 @@ class MesosBatchSystem(BatchSystemLocalSupport, AbstractScalableBatchSystem, Sch
179
184
  command: str,
180
185
  jobNode: JobDescription,
181
186
  job_environment: Optional[dict[str, str]] = None,
182
- ):
187
+ ) -> str:
183
188
  """
184
189
  Issues the following command returning a unique jobID. Command is the string to run, memory
185
190
  is an int giving the number of bytes the job needs to run in and cores is the number of cpus
@@ -7,6 +7,7 @@ from abc import ABCMeta, abstractmethod
7
7
  from contextlib import closing
8
8
  from shutil import which
9
9
  from urllib.request import urlopen
10
+ from typing import Optional
10
11
 
11
12
  from toil.lib.retry import retry
12
13
  from toil.lib.threading import ExceptionalThread, cpu_count
@@ -25,7 +26,7 @@ class MesosTestSupport:
25
26
  with closing(urlopen("http://127.0.0.1:5050/version")) as content:
26
27
  content.read()
27
28
 
28
- def _startMesos(self, numCores=None):
29
+ def _startMesos(self, numCores: Optional[int] = None) -> None:
29
30
  if numCores is None:
30
31
  numCores = cpu_count()
31
32
  shutil.rmtree("/tmp/mesos", ignore_errors=True)
@@ -52,7 +53,7 @@ class MesosTestSupport:
52
53
  log.warning("Forcibly killing child which ignored SIGTERM")
53
54
  process.kill()
54
55
 
55
- def _stopMesos(self):
56
+ def _stopMesos(self) -> None:
56
57
  self._stopProcess(self.agent.popen)
57
58
  self.agent.join()
58
59
  self._stopProcess(self.master.popen)
@@ -94,7 +94,7 @@ class SingleMachineBatchSystem(BatchSystemSupport):
94
94
  self,
95
95
  config: Config,
96
96
  maxCores: float,
97
- maxMemory: int,
97
+ maxMemory: float,
98
98
  maxDisk: int,
99
99
  max_jobs: Optional[int] = None,
100
100
  ) -> None:
@@ -101,6 +101,32 @@ def parse_slurm_time(slurm_time: str) -> int:
101
101
  total_seconds += multiplier * int(elapsed_split[index])
102
102
  return total_seconds
103
103
 
104
+ # For parsing user-provided option overrides (or self-generated
105
+ # options) for sbatch, we need a way to recognize long, long-with-equals, and
106
+ # short forms.
107
+ def option_detector(long: str, short: str | None = None) -> Callable[[str], bool]:
108
+ """
109
+ Get a function that returns true if it sees the long or short
110
+ option.
111
+ """
112
+ def is_match(option: str) -> bool:
113
+ return option == f"--{long}" or option.startswith(f"--{long}=") or (short is not None and option == f"-{short}")
114
+ return is_match
115
+
116
+ def any_option_detector(options: list[str | tuple[str, str]]) -> Callable[[str], bool]:
117
+ """
118
+ Get a function that returns true if it sees any of the long
119
+ options or long or short option pairs.
120
+ """
121
+ detectors = [option_detector(o) if isinstance(o, str) else option_detector(*o) for o in options]
122
+ def is_match(option: str) -> bool:
123
+ for detector in detectors:
124
+ if detector(option):
125
+ return True
126
+ return False
127
+ return is_match
128
+
129
+
104
130
 
105
131
  class SlurmBatchSystem(AbstractGridEngineBatchSystem):
106
132
  class PartitionInfo(NamedTuple):
@@ -647,31 +673,6 @@ class SlurmBatchSystem(AbstractGridEngineBatchSystem):
647
673
  # Also any extra arguments from --slurmArgs or TOIL_SLURM_ARGS
648
674
  nativeConfig: str = self.boss.config.slurm_args # type: ignore[attr-defined]
649
675
 
650
- # For parsing user-provided option overrides (or self-generated
651
- # options) we need a way to recognize long, long-with-equals, and
652
- # short forms.
653
- def option_detector(long: str, short: str | None = None) -> Callable[[str], bool]:
654
- """
655
- Get a function that returns true if it sees the long or short
656
- option.
657
- """
658
- def is_match(option: str) -> bool:
659
- return option == f"--{long}" or option.startswith(f"--{long}=") or (short is not None and option == f"-{short}")
660
- return is_match
661
-
662
- def any_option_detector(options: list[str | tuple[str, str]]) -> Callable[[str], bool]:
663
- """
664
- Get a function that returns true if it sees any of the long
665
- options or long or short option pairs.
666
- """
667
- detectors = [option_detector(o) if isinstance(o, str) else option_detector(*o) for o in options]
668
- def is_match(option: str) -> bool:
669
- for detector in detectors:
670
- if detector(option):
671
- return True
672
- return False
673
- return is_match
674
-
675
676
  is_any_mem_option = any_option_detector(["mem", "mem-per-cpu", "mem-per-gpu"])
676
677
  is_any_cpus_option = any_option_detector([("cpus-per-task", "c"), "cpus-per-gpu"])
677
678
  is_export_option = option_detector("export")
@@ -842,7 +843,7 @@ class SlurmBatchSystem(AbstractGridEngineBatchSystem):
842
843
  return sbatch_line
843
844
 
844
845
  def __init__(
845
- self, config: Config, maxCores: float, maxMemory: int, maxDisk: int
846
+ self, config: Config, maxCores: float, maxMemory: float, maxDisk: float
846
847
  ) -> None:
847
848
  super().__init__(config, maxCores, maxMemory, maxDisk)
848
849
  self.partitions = SlurmBatchSystem.PartitionSet()
toil/bus.py CHANGED
@@ -69,13 +69,15 @@ import tempfile
69
69
  import threading
70
70
  from collections.abc import Iterator
71
71
  from dataclasses import dataclass
72
- from typing import IO, Any, Callable, NamedTuple, Optional, TypeVar, cast
72
+ from typing import IO, Any, Callable, NamedTuple, Optional, TypeVar, TYPE_CHECKING, cast
73
73
 
74
74
  from pubsub.core import Publisher
75
75
  from pubsub.core.listener import Listener
76
76
  from pubsub.core.topicobj import Topic
77
77
  from pubsub.core.topicutils import ALL_TOPICS
78
78
 
79
+ from toil.lib.misc import FileDescriptorOrPath
80
+
79
81
  logger = logging.getLogger(__name__)
80
82
 
81
83
  # We define some ways to talk about jobs.
@@ -434,7 +436,7 @@ class MessageBus:
434
436
  connection._set_bus(self)
435
437
  return connection
436
438
 
437
- def connect_output_file(self, file_path: str) -> Any:
439
+ def connect_output_file(self, file_path: FileDescriptorOrPath) -> Any:
438
440
  """
439
441
  Send copies of all messages to the given output file.
440
442
 
@@ -736,7 +738,7 @@ class JobStatus:
736
738
  ) # if the exit code is -1 and the job id is specified, we assume the job is running
737
739
 
738
740
 
739
- def replay_message_bus(path: str) -> dict[str, JobStatus]:
741
+ def replay_message_bus(path: FileDescriptorOrPath) -> dict[str, JobStatus]:
740
742
  """
741
743
  Replay all the messages and work out what they mean for jobs.
742
744
 
toil/common.py CHANGED
@@ -74,6 +74,7 @@ from toil.lib.compatibility import deprecated
74
74
  from toil.lib.history import HistoryManager
75
75
  from toil.lib.history_submission import ask_user_about_publishing_metrics, create_history_submission, create_current_submission
76
76
  from toil.lib.io import AtomicFileCreate, try_path, get_toil_home
77
+ from toil.lib.misc import StrPath
77
78
  from toil.lib.memoize import memoize
78
79
  from toil.lib.retry import retry
79
80
  from toil.lib.threading import ensure_filesystem_lockable
@@ -126,6 +127,8 @@ class Config:
126
127
  kubernetes_service_account: Optional[str]
127
128
  kubernetes_pod_timeout: float
128
129
  kubernetes_privileged: bool
130
+ kubernetes_pod_security_context: Optional[str]
131
+ kubernetes_security_context: Optional[str]
129
132
  tes_endpoint: str
130
133
  tes_user: str
131
134
  tes_password: str
@@ -138,7 +141,7 @@ class Config:
138
141
  batch_logs_dir: Optional[str]
139
142
  """The backing scheduler will be instructed, if possible, to save logs
140
143
  to this directory, where the leader can read them."""
141
- statePollingWait: int
144
+ statePollingWait: float
142
145
  state_polling_timeout: int
143
146
  disableAutoDeployment: bool
144
147
 
@@ -208,6 +211,7 @@ class Config:
208
211
 
209
212
  # Retrying/rescuing jobs
210
213
  retryCount: int
214
+ stop_on_first_failure: bool
211
215
  enableUnlimitedPreemptibleRetries: bool
212
216
  doubleMem: bool
213
217
  maxJobDuration: int
@@ -386,6 +390,7 @@ class Config:
386
390
 
387
391
  # Retrying/rescuing jobs
388
392
  set_option("retryCount")
393
+ set_option("stop_on_first_failure")
389
394
  set_option("enableUnlimitedPreemptibleRetries")
390
395
  set_option("doubleMem")
391
396
  set_option("maxJobDuration")
@@ -398,7 +403,7 @@ class Config:
398
403
  set_option("writeLogsGzip")
399
404
  set_option("writeLogsFromAllJobs")
400
405
  set_option("write_messages")
401
-
406
+
402
407
  # Data Publishing Options
403
408
  set_option("publish_workflow_metrics")
404
409
 
@@ -653,7 +658,7 @@ def update_config(filepath: str, key: str, new_value: Union[str, bool, int, floa
653
658
  :param key: Setting to set. Must be the command-line option name, not the
654
659
  destination variable name.
655
660
  """
656
-
661
+
657
662
  yaml = YAML(typ="rt")
658
663
  data = yaml.load(open(filepath))
659
664
 
@@ -678,6 +683,17 @@ def parser_with_common_options(
678
683
  prog: Optional[str] = None,
679
684
  default_log_level: Optional[int] = None,
680
685
  ) -> ArgParser:
686
+ """
687
+ Get a command-line option parser for a Toil subcommand.
688
+
689
+ The returned parser just has basic options (like version reporting and
690
+ logging) used by all Toil subcommands.
691
+
692
+ Toil Python workflows should use
693
+ :meth:`toil.job.Job.Runner.getDefaultArgumentParser` instead, which makes
694
+ sure to add all the important options for actually running a workflow.
695
+ """
696
+
681
697
  parser = ArgParser(
682
698
  prog=prog or "Toil", formatter_class=ArgumentDefaultsHelpFormatter
683
699
  )
@@ -781,7 +797,7 @@ def addOptions(
781
797
  :param typ: string of either "cwl" or "wdl" to specify which runner to check against
782
798
  :return: None, raise parser error if option is found
783
799
  """
784
- check_parser = ArgParser()
800
+ check_parser = ArgParser(allow_abbrev=False)
785
801
  if typ == "wdl":
786
802
  add_cwl_options(check_parser)
787
803
  if typ == "cwl":
@@ -1489,21 +1505,33 @@ class Toil(ContextManager["Toil"]):
1489
1505
  self._jobStore.export_file(file_id, dst_uri)
1490
1506
 
1491
1507
  @staticmethod
1492
- def normalize_uri(uri: str, check_existence: bool = False) -> str:
1508
+ def normalize_uri(uri: str, check_existence: bool = False, dir_path: Optional[str] = None) -> str:
1493
1509
  """
1494
- Given a URI, if it has no scheme, prepend "file:".
1510
+ Given a URI, if it has no scheme, make it a properly quoted file: URI.
1495
1511
 
1496
1512
  :param check_existence: If set, raise FileNotFoundError if a URI points to
1497
1513
  a local file that does not exist.
1514
+
1515
+ :param dir_path: If specified, interpret relative paths relative to the
1516
+ given directory path instead of the current one.
1498
1517
  """
1499
- if urlparse(uri).scheme == "file":
1518
+
1519
+ parsed = urlparse(uri)
1520
+ if parsed.scheme == "file":
1500
1521
  uri = unquote(
1501
- urlparse(uri).path
1522
+ parsed.path
1502
1523
  ) # this should strip off the local file scheme; it will be added back
1524
+ parsed = urlparse(uri)
1503
1525
 
1504
1526
  # account for the scheme-less case, which should be coerced to a local absolute path
1505
- if urlparse(uri).scheme == "":
1506
- abs_path = os.path.abspath(uri)
1527
+ if parsed.scheme == "":
1528
+ if dir_path is not None:
1529
+ # To support relative paths from a particular directory, join
1530
+ # the directory on. If uri is already an abs path, join() will
1531
+ # not do anything
1532
+ abs_path = os.path.join(dir_path, uri)
1533
+ else:
1534
+ abs_path = os.path.abspath(uri)
1507
1535
  if not os.path.exists(abs_path) and check_existence:
1508
1536
  raise FileNotFoundError(
1509
1537
  f'Could not find local file "{abs_path}" when importing "{uri}".\n'
@@ -2019,7 +2047,7 @@ def cacheDirName(workflowID: str) -> str:
2019
2047
  return f"cache-{workflowID}"
2020
2048
 
2021
2049
 
2022
- def getDirSizeRecursively(dirPath: str) -> int:
2050
+ def getDirSizeRecursively(dirPath: StrPath) -> int:
2023
2051
  """
2024
2052
  This method will return the cumulative number of bytes occupied by the files
2025
2053
  on disk in the directory and its subdirectories.
toil/cwl/cwltoil.py CHANGED
@@ -2979,7 +2979,7 @@ def makeRootJob(
2979
2979
  # This will consist of files that we were not able to get a file size for
2980
2980
  leader_metadata = dict()
2981
2981
  for filename, file_data in metadata.items():
2982
- if file_data.size is None:
2982
+ if file_data[2] is None: # size
2983
2983
  leader_metadata[filename] = file_data
2984
2984
  else:
2985
2985
  worker_metadata[filename] = file_data