toil 8.1.0b1__py3-none-any.whl → 8.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (254) hide show
  1. toil/__init__.py +0 -35
  2. toil/batchSystems/abstractBatchSystem.py +1 -1
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
  4. toil/batchSystems/awsBatch.py +1 -1
  5. toil/batchSystems/cleanup_support.py +1 -1
  6. toil/batchSystems/kubernetes.py +53 -7
  7. toil/batchSystems/local_support.py +1 -1
  8. toil/batchSystems/mesos/batchSystem.py +13 -8
  9. toil/batchSystems/mesos/test/__init__.py +3 -2
  10. toil/batchSystems/singleMachine.py +1 -1
  11. toil/batchSystems/slurm.py +27 -26
  12. toil/bus.py +5 -3
  13. toil/common.py +39 -11
  14. toil/cwl/cwltoil.py +1 -1
  15. toil/job.py +64 -49
  16. toil/jobStores/abstractJobStore.py +24 -3
  17. toil/jobStores/fileJobStore.py +25 -1
  18. toil/jobStores/googleJobStore.py +104 -30
  19. toil/leader.py +9 -0
  20. toil/lib/accelerators.py +3 -1
  21. toil/lib/aws/utils.py.orig +504 -0
  22. toil/lib/bioio.py +1 -1
  23. toil/lib/docker.py +252 -91
  24. toil/lib/dockstore.py +11 -3
  25. toil/lib/exceptions.py +5 -3
  26. toil/lib/history.py +87 -13
  27. toil/lib/history_submission.py +23 -9
  28. toil/lib/io.py +34 -22
  29. toil/lib/misc.py +7 -1
  30. toil/lib/resources.py +2 -1
  31. toil/lib/threading.py +11 -10
  32. toil/options/common.py +8 -0
  33. toil/options/wdl.py +11 -0
  34. toil/server/api_spec/LICENSE +201 -0
  35. toil/server/api_spec/README.rst +5 -0
  36. toil/server/cli/wes_cwl_runner.py +2 -1
  37. toil/test/__init__.py +275 -115
  38. toil/test/batchSystems/batchSystemTest.py +227 -205
  39. toil/test/batchSystems/test_slurm.py +27 -0
  40. toil/test/cactus/pestis.tar.gz +0 -0
  41. toil/test/conftest.py +7 -0
  42. toil/test/cwl/2.fasta +11 -0
  43. toil/test/cwl/2.fastq +12 -0
  44. toil/test/cwl/conftest.py +1 -1
  45. toil/test/cwl/cwlTest.py +999 -867
  46. toil/test/cwl/directory/directory/file.txt +15 -0
  47. toil/test/cwl/download_directory_file.json +4 -0
  48. toil/test/cwl/download_directory_s3.json +4 -0
  49. toil/test/cwl/download_file.json +6 -0
  50. toil/test/cwl/download_http.json +6 -0
  51. toil/test/cwl/download_https.json +6 -0
  52. toil/test/cwl/download_s3.json +6 -0
  53. toil/test/cwl/download_subdirectory_file.json +5 -0
  54. toil/test/cwl/download_subdirectory_s3.json +5 -0
  55. toil/test/cwl/empty.json +1 -0
  56. toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
  57. toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
  58. toil/test/cwl/optional-file-exists.json +6 -0
  59. toil/test/cwl/optional-file-missing.json +6 -0
  60. toil/test/cwl/preemptible_expression.json +1 -0
  61. toil/test/cwl/revsort-job-missing.json +6 -0
  62. toil/test/cwl/revsort-job.json +6 -0
  63. toil/test/cwl/s3_secondary_file.json +16 -0
  64. toil/test/cwl/seqtk_seq_job.json +6 -0
  65. toil/test/cwl/stream.json +6 -0
  66. toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
  67. toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
  68. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
  69. toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
  70. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
  71. toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
  72. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
  73. toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
  74. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
  75. toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
  76. toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
  77. toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
  78. toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
  79. toil/test/cwl/whale.txt +16 -0
  80. toil/test/docs/scripts/example_alwaysfail.py +38 -0
  81. toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
  82. toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
  83. toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
  84. toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
  85. toil/test/docs/scripts/tutorial_arguments.py +23 -0
  86. toil/test/docs/scripts/tutorial_debugging.patch +12 -0
  87. toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
  88. toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
  89. toil/test/docs/scripts/tutorial_docker.py +20 -0
  90. toil/test/docs/scripts/tutorial_dynamic.py +24 -0
  91. toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
  92. toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
  93. toil/test/docs/scripts/tutorial_helloworld.py +15 -0
  94. toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
  95. toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
  96. toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
  97. toil/test/docs/scripts/tutorial_managing.py +29 -0
  98. toil/test/docs/scripts/tutorial_managing2.py +56 -0
  99. toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
  100. toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
  101. toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
  102. toil/test/docs/scripts/tutorial_promises.py +25 -0
  103. toil/test/docs/scripts/tutorial_promises2.py +30 -0
  104. toil/test/docs/scripts/tutorial_quickstart.py +22 -0
  105. toil/test/docs/scripts/tutorial_requirements.py +44 -0
  106. toil/test/docs/scripts/tutorial_services.py +45 -0
  107. toil/test/docs/scripts/tutorial_staging.py +45 -0
  108. toil/test/docs/scripts/tutorial_stats.py +64 -0
  109. toil/test/lib/aws/test_iam.py +3 -1
  110. toil/test/lib/dockerTest.py +205 -122
  111. toil/test/lib/test_history.py +101 -77
  112. toil/test/provisioners/aws/awsProvisionerTest.py +12 -9
  113. toil/test/provisioners/clusterTest.py +4 -4
  114. toil/test/provisioners/gceProvisionerTest.py +16 -14
  115. toil/test/sort/sort.py +4 -1
  116. toil/test/src/busTest.py +17 -17
  117. toil/test/src/deferredFunctionTest.py +145 -132
  118. toil/test/src/importExportFileTest.py +71 -63
  119. toil/test/src/jobEncapsulationTest.py +27 -28
  120. toil/test/src/jobServiceTest.py +149 -133
  121. toil/test/src/jobTest.py +219 -211
  122. toil/test/src/miscTests.py +66 -60
  123. toil/test/src/promisedRequirementTest.py +163 -169
  124. toil/test/src/regularLogTest.py +24 -24
  125. toil/test/src/resourceTest.py +82 -76
  126. toil/test/src/restartDAGTest.py +51 -47
  127. toil/test/src/resumabilityTest.py +24 -19
  128. toil/test/src/retainTempDirTest.py +60 -57
  129. toil/test/src/systemTest.py +17 -13
  130. toil/test/src/threadingTest.py +29 -32
  131. toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
  132. toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
  133. toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
  134. toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
  135. toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
  136. toil/test/utils/toilDebugTest.py +117 -102
  137. toil/test/utils/toilKillTest.py +54 -53
  138. toil/test/utils/utilsTest.py +303 -229
  139. toil/test/wdl/lint_error.wdl +9 -0
  140. toil/test/wdl/md5sum/empty_file.json +1 -0
  141. toil/test/wdl/md5sum/md5sum-gs.json +1 -0
  142. toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
  143. toil/test/wdl/md5sum/md5sum.input +1 -0
  144. toil/test/wdl/md5sum/md5sum.json +1 -0
  145. toil/test/wdl/md5sum/md5sum.wdl +25 -0
  146. toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
  147. toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
  148. toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
  149. toil/test/wdl/standard_library/as_map.json +16 -0
  150. toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
  151. toil/test/wdl/standard_library/as_pairs.json +7 -0
  152. toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
  153. toil/test/wdl/standard_library/ceil.json +3 -0
  154. toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
  155. toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
  156. toil/test/wdl/standard_library/collect_by_key.json +1 -0
  157. toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
  158. toil/test/wdl/standard_library/cross.json +11 -0
  159. toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
  160. toil/test/wdl/standard_library/flatten.json +7 -0
  161. toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
  162. toil/test/wdl/standard_library/floor.json +3 -0
  163. toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
  164. toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
  165. toil/test/wdl/standard_library/keys.json +8 -0
  166. toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
  167. toil/test/wdl/standard_library/length.json +7 -0
  168. toil/test/wdl/standard_library/length_as_input.wdl +16 -0
  169. toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
  170. toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
  171. toil/test/wdl/standard_library/length_invalid.json +3 -0
  172. toil/test/wdl/standard_library/range.json +3 -0
  173. toil/test/wdl/standard_library/range_0.json +3 -0
  174. toil/test/wdl/standard_library/range_as_input.wdl +17 -0
  175. toil/test/wdl/standard_library/range_invalid.json +3 -0
  176. toil/test/wdl/standard_library/read_boolean.json +3 -0
  177. toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
  178. toil/test/wdl/standard_library/read_float.json +3 -0
  179. toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
  180. toil/test/wdl/standard_library/read_int.json +3 -0
  181. toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
  182. toil/test/wdl/standard_library/read_json.json +3 -0
  183. toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
  184. toil/test/wdl/standard_library/read_lines.json +3 -0
  185. toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
  186. toil/test/wdl/standard_library/read_map.json +3 -0
  187. toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
  188. toil/test/wdl/standard_library/read_string.json +3 -0
  189. toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
  190. toil/test/wdl/standard_library/read_tsv.json +3 -0
  191. toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
  192. toil/test/wdl/standard_library/round.json +3 -0
  193. toil/test/wdl/standard_library/round_as_command.wdl +16 -0
  194. toil/test/wdl/standard_library/round_as_input.wdl +16 -0
  195. toil/test/wdl/standard_library/size.json +3 -0
  196. toil/test/wdl/standard_library/size_as_command.wdl +17 -0
  197. toil/test/wdl/standard_library/size_as_output.wdl +36 -0
  198. toil/test/wdl/standard_library/stderr.json +3 -0
  199. toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
  200. toil/test/wdl/standard_library/stdout.json +3 -0
  201. toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
  202. toil/test/wdl/standard_library/sub.json +3 -0
  203. toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
  204. toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
  205. toil/test/wdl/standard_library/transpose.json +6 -0
  206. toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
  207. toil/test/wdl/standard_library/write_json.json +6 -0
  208. toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
  209. toil/test/wdl/standard_library/write_lines.json +7 -0
  210. toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
  211. toil/test/wdl/standard_library/write_map.json +6 -0
  212. toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
  213. toil/test/wdl/standard_library/write_tsv.json +6 -0
  214. toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
  215. toil/test/wdl/standard_library/zip.json +12 -0
  216. toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
  217. toil/test/wdl/test.csv +3 -0
  218. toil/test/wdl/test.tsv +3 -0
  219. toil/test/wdl/testfiles/croo.wdl +38 -0
  220. toil/test/wdl/testfiles/drop_files.wdl +62 -0
  221. toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
  222. toil/test/wdl/testfiles/empty.txt +0 -0
  223. toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
  224. toil/test/wdl/testfiles/random.wdl +66 -0
  225. toil/test/wdl/testfiles/string_file_coercion.json +1 -0
  226. toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
  227. toil/test/wdl/testfiles/test.json +4 -0
  228. toil/test/wdl/testfiles/test_boolean.txt +1 -0
  229. toil/test/wdl/testfiles/test_float.txt +1 -0
  230. toil/test/wdl/testfiles/test_int.txt +1 -0
  231. toil/test/wdl/testfiles/test_lines.txt +5 -0
  232. toil/test/wdl/testfiles/test_map.txt +2 -0
  233. toil/test/wdl/testfiles/test_string.txt +1 -0
  234. toil/test/wdl/testfiles/url_to_file.wdl +13 -0
  235. toil/test/wdl/testfiles/url_to_optional_file.wdl +13 -0
  236. toil/test/wdl/testfiles/vocab.json +1 -0
  237. toil/test/wdl/testfiles/vocab.wdl +66 -0
  238. toil/test/wdl/testfiles/wait.wdl +34 -0
  239. toil/test/wdl/wdl_specification/type_pair.json +23 -0
  240. toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
  241. toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
  242. toil/test/wdl/wdl_specification/v1_spec.json +1 -0
  243. toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
  244. toil/test/wdl/wdltoil_test.py +680 -407
  245. toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
  246. toil/version.py +9 -9
  247. toil/wdl/wdltoil.py +336 -123
  248. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/METADATA +5 -4
  249. toil-8.2.0.dist-info/RECORD +439 -0
  250. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/WHEEL +1 -1
  251. toil-8.1.0b1.dist-info/RECORD +0 -259
  252. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/entry_points.txt +0 -0
  253. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info/licenses}/LICENSE +0 -0
  254. {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/top_level.txt +0 -0
toil/lib/history.py CHANGED
@@ -27,6 +27,7 @@ import uuid
27
27
  from dataclasses import dataclass
28
28
  from typing import Any, Iterable, Iterator, Optional, TypeVar, Callable
29
29
 
30
+ from toil.lib.conversions import strtobool
30
31
  from toil.lib.io import get_toil_home
31
32
  from toil.lib.retry import ErrorCondition, retry
32
33
 
@@ -126,15 +127,26 @@ class HistoryManager:
126
127
  Class responsible for managing the history of Toil runs.
127
128
  """
128
129
 
129
- # Should workflow run history be recorded?
130
- WORKFLOW_HISTORY_ENABLED = True
131
- # Should job history be recorded? Can only be true if
132
- # WORKFLOW_HISTORY_ENABLED is also true.
133
- #
134
- # TODO: When Dockstore can take job metrics alongside whole-workflow
135
- # metrics, and we've tested to make sure history recording doesn't slow
136
- # down our leader job processing rate, turn on actual job history logging.
137
- JOB_HISTORY_ENABLED = False
130
+ @classmethod
131
+ def enabled(cls) -> bool:
132
+ """
133
+ Return True if history should be read from and written to the database.
134
+
135
+ If False, no access at all shoulf be made to the database.
136
+ """
137
+ return strtobool(os.environ.get("TOIL_HISTORY", 'True'))
138
+
139
+ @classmethod
140
+ def enabled_job(cls) -> bool:
141
+ """
142
+ Return True if job history should be read from and written to the database.
143
+
144
+ Always returns False if enabled() returns False.
145
+ """
146
+ # TODO: When Dockstore can take job metrics alongside whole-workflow
147
+ # metrics, and we've tested to make sure history recording doesn't slow
148
+ # down our leader job processing rate, turn on actual job history logging.
149
+ return cls.enabled() and strtobool(os.environ.get("TOIL_JOB_HISTORY", 'False'))
138
150
 
139
151
  # For testing, we can move the database path for the class.
140
152
  database_path_override: Optional[str] = None
@@ -164,6 +176,12 @@ class HistoryManager:
164
176
  on Python versions that support it. In order to run any commands
165
177
  outside of a transaction use the no_transaction context manager.
166
178
  """
179
+
180
+ if not cls.enabled():
181
+ # Make sure we're not missing an enabled check along any codepath
182
+ # that wants to access the database.
183
+ raise RuntimeError("Attempting to connect to database when HistoryManager is disabled!")
184
+
167
185
  if not os.path.exists(cls.database_path()):
168
186
  # Make the database and protect it from snoopers and busybodies
169
187
  con = sqlite3.connect(cls.database_path())
@@ -345,7 +363,7 @@ class HistoryManager:
345
363
  updated.
346
364
  """
347
365
 
348
- if not cls.WORKFLOW_HISTORY_ENABLED:
366
+ if not cls.enabled():
349
367
  return
350
368
 
351
369
  logger.info("Recording workflow creation of %s in %s", workflow_id, job_store_spec)
@@ -375,7 +393,7 @@ class HistoryManager:
375
393
 
376
394
  # TODO: Make name of this function less general?
377
395
 
378
- if not cls.WORKFLOW_HISTORY_ENABLED:
396
+ if not cls.enabled():
379
397
  return
380
398
 
381
399
  logger.info("Workflow %s is a run of %s", workflow_id, workflow_name)
@@ -431,7 +449,7 @@ class HistoryManager:
431
449
  :param disk_bytes: Observed job disk usage.
432
450
  """
433
451
 
434
- if not cls.WORKFLOW_HISTORY_ENABLED or not cls.JOB_HISTORY_ENABLED:
452
+ if not cls.enabled_job():
435
453
  return
436
454
 
437
455
  logger.debug("Workflow %s ran job %s", workflow_id, job_name)
@@ -506,7 +524,7 @@ class HistoryManager:
506
524
  :param platform_machine: CPU type ("AMD64", etc.) used to run the workflow leader.
507
525
  """
508
526
 
509
- if not cls.WORKFLOW_HISTORY_ENABLED:
527
+ if not cls.enabled():
510
528
  return
511
529
 
512
530
  logger.info("Workflow %s stopped. Success: %s", workflow_id, succeeded)
@@ -577,6 +595,9 @@ class HistoryManager:
577
595
  List all known workflows and their summary statistics.
578
596
  """
579
597
 
598
+ if not cls.enabled():
599
+ return []
600
+
580
601
  workflows = []
581
602
 
582
603
  con = cls.connection()
@@ -632,6 +653,9 @@ class HistoryManager:
632
653
  :param limit: Get no more than this many.
633
654
  """
634
655
 
656
+ if not cls.enabled():
657
+ return []
658
+
635
659
  attempts = []
636
660
 
637
661
  con = cls.connection()
@@ -706,6 +730,9 @@ class HistoryManager:
706
730
  :param limit: Get no more than this many.
707
731
  """
708
732
 
733
+ if not cls.enabled_job():
734
+ return []
735
+
709
736
  attempts = []
710
737
 
711
738
  con = cls.connection()
@@ -783,6 +810,9 @@ class HistoryManager:
783
810
 
784
811
  # TODO: Consolidate with the other 2 ways to query workflow attempts!
785
812
 
813
+ if not cls.enabled():
814
+ return None
815
+
786
816
  attempts = []
787
817
 
788
818
  con = cls.connection()
@@ -857,6 +887,9 @@ class HistoryManager:
857
887
  Doesn't check to make sure the workflow has a TRS ID.
858
888
  """
859
889
 
890
+ if not cls.enabled_job():
891
+ return []
892
+
860
893
  attempts = []
861
894
 
862
895
  con = cls.connection()
@@ -922,6 +955,9 @@ class HistoryManager:
922
955
  Does not mark the workflow attempt's job attempts as submitted.
923
956
  """
924
957
 
958
+ if not cls.enabled():
959
+ return
960
+
925
961
  con = cls.connection()
926
962
  cur = con.cursor()
927
963
  try:
@@ -945,6 +981,9 @@ class HistoryManager:
945
981
  Mark a collection of job attempts as submitted to Dockstore in a single transaction.
946
982
  """
947
983
 
984
+ if not cls.enabled_job():
985
+ return
986
+
948
987
  con = cls.connection()
949
988
  cur = con.cursor()
950
989
  try:
@@ -969,6 +1008,10 @@ class HistoryManager:
969
1008
  """
970
1009
  Count workflows in the database.
971
1010
  """
1011
+
1012
+ if not cls.enabled():
1013
+ return 0
1014
+
972
1015
  con = cls.connection()
973
1016
  cur = con.cursor()
974
1017
  try:
@@ -994,6 +1037,10 @@ class HistoryManager:
994
1037
  """
995
1038
  Count workflow attempts in the database.
996
1039
  """
1040
+
1041
+ if not cls.enabled():
1042
+ return 0
1043
+
997
1044
  con = cls.connection()
998
1045
  cur = con.cursor()
999
1046
  try:
@@ -1019,6 +1066,10 @@ class HistoryManager:
1019
1066
  """
1020
1067
  Count job attempts in the database.
1021
1068
  """
1069
+
1070
+ if not cls.enabled_job():
1071
+ return 0
1072
+
1022
1073
  con = cls.connection()
1023
1074
  cur = con.cursor()
1024
1075
  try:
@@ -1044,6 +1095,10 @@ class HistoryManager:
1044
1095
  """
1045
1096
  Get workflows that have a successful attempt and no unsubmitted attempts or job attempts.
1046
1097
  """
1098
+
1099
+ if not cls.enabled():
1100
+ return []
1101
+
1047
1102
  ids = []
1048
1103
 
1049
1104
  con = cls.connection()
@@ -1105,6 +1160,9 @@ class HistoryManager:
1105
1160
  Get workflows that are old.
1106
1161
  """
1107
1162
 
1163
+ if not cls.enabled():
1164
+ return []
1165
+
1108
1166
  ids = []
1109
1167
 
1110
1168
  con = cls.connection()
@@ -1150,6 +1208,9 @@ class HistoryManager:
1150
1208
  Succeeds if the workflow does not exist.
1151
1209
  """
1152
1210
 
1211
+ if not cls.enabled():
1212
+ return
1213
+
1153
1214
  con = cls.connection()
1154
1215
  cur = con.cursor()
1155
1216
  try:
@@ -1173,6 +1234,9 @@ class HistoryManager:
1173
1234
  Get the total number of bytes used by the database.
1174
1235
  """
1175
1236
 
1237
+ if not cls.enabled():
1238
+ return 0
1239
+
1176
1240
  con = cls.connection()
1177
1241
  cur = con.cursor()
1178
1242
  try:
@@ -1202,6 +1266,9 @@ class HistoryManager:
1202
1266
  """
1203
1267
  Shrink the database to remove unused space.
1204
1268
  """
1269
+
1270
+ if not cls.enabled():
1271
+ return
1205
1272
 
1206
1273
  con = cls.connection()
1207
1274
  cur = con.cursor()
@@ -1226,6 +1293,9 @@ class HistoryManager:
1226
1293
  important.
1227
1294
  """
1228
1295
 
1296
+ if not cls.enabled():
1297
+ return
1298
+
1229
1299
  db_size = cls.get_database_byte_size()
1230
1300
 
1231
1301
  if db_size < limit:
@@ -1264,6 +1334,10 @@ class HistoryManager:
1264
1334
 
1265
1335
  For debugging tests.
1266
1336
  """
1337
+
1338
+ if not cls.enabled():
1339
+ return []
1340
+
1267
1341
  return cls.connection().iterdump()
1268
1342
 
1269
1343
 
@@ -82,7 +82,7 @@ def job_execution_id(job_attempt: JobAttemptSummary) -> str:
82
82
  def get_parsed_trs_spec(workflow_attempt: WorkflowAttemptSummary) -> tuple[str, str]:
83
83
  """
84
84
  Get the TRS ID and version of the workflow, or raise an error.
85
-
85
+
86
86
  :returns: The TRS ID and the TRS version of the wrokflow run.
87
87
  :raises: ValueError if the workflow does not have a TRS spec or if the spec
88
88
  does not contain a version.
@@ -259,7 +259,7 @@ class Submission:
259
259
  return all_submitted_and_marked
260
260
 
261
261
 
262
- def create_history_submission(batch_size: int = 10, desired_tasks: int = 0) -> Submission:
262
+ def create_history_submission(batch_size: Optional[int] = None, desired_tasks: Optional[int] = None) -> Submission:
263
263
  """
264
264
  Make a package of data about recent workflow runs to send in.
265
265
 
@@ -271,6 +271,12 @@ def create_history_submission(batch_size: int = 10, desired_tasks: int = 0) -> S
271
271
  batch. Use 0 to not submit any task information.
272
272
  """
273
273
 
274
+ # By default, include the things we are set to track history for.
275
+ if batch_size is None:
276
+ batch_size = 10 if HistoryManager.enabled() else 0
277
+ if desired_tasks is None:
278
+ desired_tasks = 50 if HistoryManager.enabled_job() else 0
279
+
274
280
  # Collect together some workflows and some lists of tasks into a submission.
275
281
  submission = Submission()
276
282
 
@@ -325,15 +331,16 @@ def create_current_submission(workflow_id: str, attempt_number: int) -> Submissi
325
331
  submission = Submission()
326
332
  try:
327
333
  workflow_attempt = HistoryManager.get_workflow_attempt(workflow_id, attempt_number)
328
- if workflow_attempt is not None:
334
+ if workflow_attempt is not None and HistoryManager.enabled():
329
335
  if not workflow_attempt.submitted_to_dockstore:
330
336
  submission.add_workflow_attempt(workflow_attempt)
331
- try:
332
- job_attempts = HistoryManager.get_unsubmitted_job_attempts(workflow_attempt.workflow_id, workflow_attempt.attempt_number)
333
- submission.add_job_attempts(workflow_attempt, job_attempts)
334
- except:
335
- logger.exception("Could not compose metrics report for workflow task set")
336
- # Keep going with just the workflow.
337
+ if HistoryManager.enabled_job():
338
+ try:
339
+ job_attempts = HistoryManager.get_unsubmitted_job_attempts(workflow_attempt.workflow_id, workflow_attempt.attempt_number)
340
+ submission.add_job_attempts(workflow_attempt, job_attempts)
341
+ except:
342
+ logger.exception("Could not compose metrics report for workflow task set")
343
+ # Keep going with just the workflow.
337
344
  except:
338
345
  logger.exception("Could not compose metrics report for workflow execution")
339
346
  # Keep going with an empty submission.
@@ -493,6 +500,13 @@ def display_dialog_tkinter(title: str, text: str, options: dict[KeyType, str], t
493
500
  # If we run out of time, hide the window and move on without a choice.
494
501
  root.after(int(timeout * 1000), close_root)
495
502
 
503
+ # To make the dialog pop up over the terminal instead of behind it, we
504
+ # lift it and temporarily make it topmost. We don't keep it topmost
505
+ # because we want to let the user switch away from it.
506
+ root.attributes('-topmost', True)
507
+ root.after(10, lambda: root.attributes('-topmost', False))
508
+ root.lift()
509
+
496
510
  # Run the window's main loop
497
511
  root.mainloop()
498
512
 
toil/lib/io.py CHANGED
@@ -6,15 +6,17 @@ import stat
6
6
  import sys
7
7
  import tempfile
8
8
  import uuid
9
- from collections.abc import Iterator
9
+ from collections.abc import Iterator, Iterable
10
10
  from contextlib import contextmanager
11
11
  from io import BytesIO
12
12
  from typing import IO, Any, Callable, Optional, Protocol, Union
13
13
 
14
14
  from toil.lib.memoize import memoize
15
+ from toil.lib.misc import StrPath
15
16
 
16
17
  logger = logging.getLogger(__name__)
17
18
 
19
+
18
20
  @memoize
19
21
  def get_toil_home() -> str:
20
22
  """
@@ -43,6 +45,9 @@ REMOTE_SCHEMES = STANDARD_SCHEMES + [TOIL_URI_SCHEME]
43
45
  ALL_SCHEMES = REMOTE_SCHEMES + ["file:"]
44
46
 
45
47
  def is_standard_url(filename: str) -> bool:
48
+ """
49
+ Return True if the given URL is a non-Toil, non-file: URL.
50
+ """
46
51
  return is_url_with_scheme(filename, STANDARD_SCHEMES)
47
52
 
48
53
  def is_remote_url(filename: str) -> bool:
@@ -70,16 +75,23 @@ def is_url_with_scheme(filename: str, schemes: list[str]) -> bool:
70
75
  return False
71
76
 
72
77
  def is_toil_url(filename: str) -> bool:
78
+ """
79
+ Return True if a URL is a toilfile: URL.
80
+ """
73
81
  return is_url_with_scheme(filename, [TOIL_URI_SCHEME])
74
82
 
75
83
  def is_file_url(filename: str) -> bool:
76
- return is_url_with_scheme(filename, ["file:"])
84
+ """
85
+ Return True if a URL is a file: URL.
77
86
 
87
+ Will return False for bare paths.
88
+ """
89
+ return is_url_with_scheme(filename, ["file:"])
78
90
 
79
91
  def mkdtemp(
80
92
  suffix: Optional[str] = None,
81
93
  prefix: Optional[str] = None,
82
- dir: Optional[str] = None,
94
+ dir: Optional[StrPath] = None,
83
95
  ) -> str:
84
96
  """
85
97
  Make a temporary directory like tempfile.mkdtemp, but with relaxed permissions.
@@ -174,28 +186,28 @@ def robust_rmtree(path: Union[str, bytes]) -> None:
174
186
  raise
175
187
 
176
188
 
177
- def atomic_tmp_file(final_path: str) -> str:
189
+ def atomic_tmp_file(final_path: StrPath) -> str:
178
190
  """Return a tmp file name to use with atomic_install. This will be in the
179
191
  same directory as final_path. The temporary file will have the same extension
180
192
  as finalPath. It the final path is in /dev (/dev/null, /dev/stdout), it is
181
193
  returned unchanged and atomic_tmp_install will do nothing."""
182
194
  final_dir = os.path.dirname(os.path.normpath(final_path)) # can be empty
183
195
  if final_dir == "/dev":
184
- return final_path
196
+ return str(final_path)
185
197
  final_basename = os.path.basename(final_path)
186
198
  final_ext = os.path.splitext(final_path)[1]
187
- base_name = f"{final_basename}.{uuid.uuid4()}.tmp{final_ext}"
199
+ base_name = f"{final_basename}.{str(uuid.uuid4())}.tmp{final_ext}"
188
200
  return os.path.join(final_dir, base_name)
189
201
 
190
202
 
191
- def atomic_install(tmp_path, final_path) -> None:
203
+ def atomic_install(tmp_path: StrPath, final_path: StrPath) -> None:
192
204
  """atomic install of tmp_path as final_path"""
193
205
  if os.path.dirname(os.path.normpath(final_path)) != "/dev":
194
206
  os.rename(tmp_path, final_path)
195
207
 
196
208
 
197
209
  @contextmanager
198
- def AtomicFileCreate(final_path: str, keep: bool = False) -> Iterator[str]:
210
+ def AtomicFileCreate(final_path: StrPath, keep: bool = False) -> Iterator[str]:
199
211
  """Context manager to create a temporary file. Entering returns path to
200
212
  the temporary file in the same directory as finalPath. If the code in
201
213
  context succeeds, the file renamed to its actual name. If an error
@@ -250,23 +262,23 @@ def make_public_dir(in_directory: str, suggested_name: Optional[str] = None) ->
250
262
  our old default.
251
263
  """
252
264
  if suggested_name is not None:
253
- generated_dir_path: str = os.path.join(in_directory, suggested_name)
265
+ generated_dir_path1 = os.path.join(in_directory, suggested_name)
254
266
  try:
255
- os.mkdir(generated_dir_path)
256
- os.chmod(generated_dir_path, 0o777)
257
- return generated_dir_path
267
+ os.mkdir(generated_dir_path1)
268
+ os.chmod(generated_dir_path1, 0o777)
269
+ return generated_dir_path1
258
270
  except FileExistsError:
259
271
  pass
260
272
  for i in range(
261
273
  4, 32 + 1
262
274
  ): # make random uuids and truncate to lengths starting at 4 and working up to max 32
263
275
  for _ in range(10): # make 10 attempts for each length
264
- truncated_uuid: str = str(uuid.uuid4()).replace("-", "")[:i]
265
- generated_dir_path: str = os.path.join(in_directory, truncated_uuid)
276
+ truncated_uuid = str(uuid.uuid4()).replace("-", "")[:i]
277
+ generated_dir_path2 = os.path.join(in_directory, truncated_uuid)
266
278
  try:
267
- os.mkdir(generated_dir_path)
268
- os.chmod(generated_dir_path, 0o777)
269
- return generated_dir_path
279
+ os.mkdir(generated_dir_path2)
280
+ os.chmod(generated_dir_path2, 0o777)
281
+ return generated_dir_path2
270
282
  except FileExistsError:
271
283
  pass
272
284
  this_should_never_happen: str = os.path.join(in_directory, str(uuid.uuid4()))
@@ -328,7 +340,7 @@ class WriteWatchingStream:
328
340
 
329
341
  self.backingStream = backingStream
330
342
  # We have no write listeners yet
331
- self.writeListeners = []
343
+ self.writeListeners: list[Callable[[int], None]] = []
332
344
 
333
345
  def onWrite(self, listener: Callable[[int], None]) -> None:
334
346
  """
@@ -339,7 +351,7 @@ class WriteWatchingStream:
339
351
 
340
352
  # Implement the file API from https://docs.python.org/2.4/lib/bltin-file-objects.html
341
353
 
342
- def write(self, data):
354
+ def write(self, data: bytes) -> None:
343
355
  """
344
356
  Write the given data to the file.
345
357
  """
@@ -351,7 +363,7 @@ class WriteWatchingStream:
351
363
  # Send out notifications
352
364
  listener(len(data))
353
365
 
354
- def writelines(self, datas):
366
+ def writelines(self, datas: Iterable[bytes]) -> None:
355
367
  """
356
368
  Write each string from the given iterable, without newlines.
357
369
  """
@@ -359,14 +371,14 @@ class WriteWatchingStream:
359
371
  for data in datas:
360
372
  self.write(data)
361
373
 
362
- def flush(self):
374
+ def flush(self) -> None:
363
375
  """
364
376
  Flush the backing stream.
365
377
  """
366
378
 
367
379
  self.backingStream.flush()
368
380
 
369
- def close(self):
381
+ def close(self) -> None:
370
382
  """
371
383
  Close the backing stream.
372
384
  """
toil/lib/misc.py CHANGED
@@ -9,10 +9,16 @@ import sys
9
9
  import time
10
10
  from collections.abc import Iterator
11
11
  from contextlib import closing
12
- from typing import Optional
12
+ from typing import Optional, Union
13
+ if sys.version_info >= (3, 10):
14
+ from typing import TypeAlias
15
+ else:
16
+ from typing_extensions import TypeAlias
13
17
 
14
18
  logger = logging.getLogger(__name__)
15
19
 
20
+ StrPath: TypeAlias = Union[str, os.PathLike[str]]
21
+ FileDescriptorOrPath: TypeAlias = Union[int, bytes, os.PathLike[bytes], StrPath]
16
22
 
17
23
  def get_public_ip() -> str:
18
24
  """Get the IP that this machine uses to contact the internet.
toil/lib/resources.py CHANGED
@@ -17,6 +17,7 @@ import os
17
17
  import resource
18
18
  import sys
19
19
 
20
+ from toil.lib.misc import StrPath
20
21
 
21
22
  class ResourceMonitor:
22
23
  """
@@ -89,7 +90,7 @@ class ResourceMonitor:
89
90
  )
90
91
 
91
92
 
92
- def glob(glob_pattern: str, directoryname: str) -> list[str]:
93
+ def glob(glob_pattern: str, directoryname: StrPath) -> list[str]:
93
94
  """
94
95
  Walks through a directory and its subdirectories looking for files matching
95
96
  the glob_pattern and returns a list=[].
toil/lib/threading.py CHANGED
@@ -36,12 +36,13 @@ import psutil
36
36
 
37
37
  from toil.lib.exceptions import raise_
38
38
  from toil.lib.io import robust_rmtree
39
+ from toil.lib.misc import StrPath
39
40
 
40
41
  logger = logging.getLogger(__name__)
41
42
 
42
43
 
43
44
  def ensure_filesystem_lockable(
44
- path: str, timeout: float = 30, hint: Optional[str] = None
45
+ path: StrPath, timeout: float = 30, hint: Optional[str] = None
45
46
  ) -> None:
46
47
  """
47
48
  Make sure that the filesystem used at the given path is one where locks are safe to use.
@@ -71,7 +72,7 @@ def ensure_filesystem_lockable(
71
72
  # Start a child process to stat the path. See <https://unix.stackexchange.com/a/402236>.
72
73
  # We really should call statfs but no bindings for it are in PyPI.
73
74
  completed = subprocess.run(
74
- ["stat", "-f", "-c", "%T", path],
75
+ ["stat", "-f", "-c", "%T", str(path)],
75
76
  check=True,
76
77
  capture_output=True,
77
78
  timeout=timeout,
@@ -85,7 +86,7 @@ def ensure_filesystem_lockable(
85
86
  # Stat didn't work. Maybe we don't have the right version of stat installed?
86
87
  logger.warning(
87
88
  "Could not determine filesystem type at %s because of: %s",
88
- path,
89
+ str(path),
89
90
  e.stderr.decode("utf-8", errors="replace").strip(),
90
91
  )
91
92
  # If we don't know the filesystem type, keep going anyway.
@@ -107,7 +108,7 @@ def ensure_filesystem_lockable(
107
108
  # flaky with regard to locks actually locking anything).
108
109
  logger.debug(
109
110
  "Detected that %s has lockable filesystem type: %s",
110
- path,
111
+ str(path),
111
112
  filesystem_type,
112
113
  )
113
114
 
@@ -518,7 +519,7 @@ def process_name_exists(base_dir: str, name: str) -> bool:
518
519
  # Similar to the process naming system above, we define a global mutex system
519
520
  # for critical sections, based just around file locks.
520
521
  @contextmanager
521
- def global_mutex(base_dir: str, mutex: str) -> Iterator[None]:
522
+ def global_mutex(base_dir: StrPath, mutex: str) -> Iterator[None]:
522
523
  """
523
524
  Context manager that locks a mutex. The mutex is identified by the given
524
525
  name, and scoped to the given directory. Works across all containers that
@@ -527,7 +528,7 @@ def global_mutex(base_dir: str, mutex: str) -> Iterator[None]:
527
528
 
528
529
  Only works between processes, NOT between threads.
529
530
 
530
- :param str base_dir: Base directory to work in. Defines the shared namespace.
531
+ :param base_dir: Base directory to work in. Defines the shared namespace.
531
532
  :param str mutex: Mutex to lock. Must be a permissible path component.
532
533
  """
533
534
 
@@ -674,7 +675,7 @@ class LastProcessStandingArena:
674
675
  Consider using a try/finally; this class is not a context manager.
675
676
  """
676
677
 
677
- def __init__(self, base_dir: str, name: str) -> None:
678
+ def __init__(self, base_dir: StrPath, name: str) -> None:
678
679
  """
679
680
  Connect to the arena specified by the given base_dir and name.
680
681
 
@@ -683,12 +684,12 @@ class LastProcessStandingArena:
683
684
 
684
685
  Doesn't enter or leave the arena.
685
686
 
686
- :param str base_dir: Base directory to work in. Defines the shared namespace.
687
- :param str name: Name of the arena. Must be a permissible path component.
687
+ :param base_dir: Base directory to work in. Defines the shared namespace.
688
+ :param name: Name of the arena. Must be a permissible path component.
688
689
  """
689
690
 
690
691
  # Save the base_dir which namespaces everything
691
- self.base_dir = base_dir
692
+ self.base_dir = str(base_dir)
692
693
 
693
694
  # We need a mutex name to allow only one process to be entering or
694
695
  # leaving at a time.
toil/options/common.py CHANGED
@@ -860,6 +860,14 @@ def add_base_toil_options(
860
860
  help=f"Number of times to retry a failing job before giving up and "
861
861
  f"labeling job failed. default={1}",
862
862
  )
863
+ job_options.add_argument(
864
+ "--stopOnFirstFailure",
865
+ dest="stop_on_first_failure",
866
+ type=strtobool,
867
+ default=False,
868
+ metavar="BOOL",
869
+ help="Stop the workflow at the first complete job failure.",
870
+ )
863
871
  job_options.add_argument(
864
872
  "--enableUnlimitedPreemptibleRetries",
865
873
  "--enableUnlimitedPreemptableRetries",
toil/options/wdl.py CHANGED
@@ -86,3 +86,14 @@ def add_wdl_options(parser: ArgumentParser, suppress: bool = True) -> None:
86
86
  default=None,
87
87
  help=suppress_help or "Keep and return all call outputs as workflow outputs"
88
88
  )
89
+
90
+ strict_arguments = ["--wdlStrict"] + (
91
+ ["--strict"] if not suppress else []
92
+ )
93
+ parser.add_argument(
94
+ *strict_arguments,
95
+ dest="strict",
96
+ type=strtobool,
97
+ default=False,
98
+ help=suppress_help or "Exit runner if workflow has any lint warnings"
99
+ )