toil 8.1.0b1__py3-none-any.whl → 8.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toil/__init__.py +0 -35
- toil/batchSystems/abstractBatchSystem.py +1 -1
- toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
- toil/batchSystems/awsBatch.py +1 -1
- toil/batchSystems/cleanup_support.py +1 -1
- toil/batchSystems/kubernetes.py +53 -7
- toil/batchSystems/local_support.py +1 -1
- toil/batchSystems/mesos/batchSystem.py +13 -8
- toil/batchSystems/mesos/test/__init__.py +3 -2
- toil/batchSystems/singleMachine.py +1 -1
- toil/batchSystems/slurm.py +27 -26
- toil/bus.py +5 -3
- toil/common.py +39 -11
- toil/cwl/cwltoil.py +1 -1
- toil/job.py +64 -49
- toil/jobStores/abstractJobStore.py +24 -3
- toil/jobStores/fileJobStore.py +25 -1
- toil/jobStores/googleJobStore.py +104 -30
- toil/leader.py +9 -0
- toil/lib/accelerators.py +3 -1
- toil/lib/aws/utils.py.orig +504 -0
- toil/lib/bioio.py +1 -1
- toil/lib/docker.py +252 -91
- toil/lib/dockstore.py +11 -3
- toil/lib/exceptions.py +5 -3
- toil/lib/history.py +87 -13
- toil/lib/history_submission.py +23 -9
- toil/lib/io.py +34 -22
- toil/lib/misc.py +7 -1
- toil/lib/resources.py +2 -1
- toil/lib/threading.py +11 -10
- toil/options/common.py +8 -0
- toil/options/wdl.py +11 -0
- toil/server/api_spec/LICENSE +201 -0
- toil/server/api_spec/README.rst +5 -0
- toil/server/cli/wes_cwl_runner.py +2 -1
- toil/test/__init__.py +275 -115
- toil/test/batchSystems/batchSystemTest.py +227 -205
- toil/test/batchSystems/test_slurm.py +27 -0
- toil/test/cactus/pestis.tar.gz +0 -0
- toil/test/conftest.py +7 -0
- toil/test/cwl/2.fasta +11 -0
- toil/test/cwl/2.fastq +12 -0
- toil/test/cwl/conftest.py +1 -1
- toil/test/cwl/cwlTest.py +999 -867
- toil/test/cwl/directory/directory/file.txt +15 -0
- toil/test/cwl/download_directory_file.json +4 -0
- toil/test/cwl/download_directory_s3.json +4 -0
- toil/test/cwl/download_file.json +6 -0
- toil/test/cwl/download_http.json +6 -0
- toil/test/cwl/download_https.json +6 -0
- toil/test/cwl/download_s3.json +6 -0
- toil/test/cwl/download_subdirectory_file.json +5 -0
- toil/test/cwl/download_subdirectory_s3.json +5 -0
- toil/test/cwl/empty.json +1 -0
- toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
- toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
- toil/test/cwl/optional-file-exists.json +6 -0
- toil/test/cwl/optional-file-missing.json +6 -0
- toil/test/cwl/preemptible_expression.json +1 -0
- toil/test/cwl/revsort-job-missing.json +6 -0
- toil/test/cwl/revsort-job.json +6 -0
- toil/test/cwl/s3_secondary_file.json +16 -0
- toil/test/cwl/seqtk_seq_job.json +6 -0
- toil/test/cwl/stream.json +6 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
- toil/test/cwl/whale.txt +16 -0
- toil/test/docs/scripts/example_alwaysfail.py +38 -0
- toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
- toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
- toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
- toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
- toil/test/docs/scripts/tutorial_arguments.py +23 -0
- toil/test/docs/scripts/tutorial_debugging.patch +12 -0
- toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
- toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
- toil/test/docs/scripts/tutorial_docker.py +20 -0
- toil/test/docs/scripts/tutorial_dynamic.py +24 -0
- toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
- toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
- toil/test/docs/scripts/tutorial_helloworld.py +15 -0
- toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
- toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
- toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
- toil/test/docs/scripts/tutorial_managing.py +29 -0
- toil/test/docs/scripts/tutorial_managing2.py +56 -0
- toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
- toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
- toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
- toil/test/docs/scripts/tutorial_promises.py +25 -0
- toil/test/docs/scripts/tutorial_promises2.py +30 -0
- toil/test/docs/scripts/tutorial_quickstart.py +22 -0
- toil/test/docs/scripts/tutorial_requirements.py +44 -0
- toil/test/docs/scripts/tutorial_services.py +45 -0
- toil/test/docs/scripts/tutorial_staging.py +45 -0
- toil/test/docs/scripts/tutorial_stats.py +64 -0
- toil/test/lib/aws/test_iam.py +3 -1
- toil/test/lib/dockerTest.py +205 -122
- toil/test/lib/test_history.py +101 -77
- toil/test/provisioners/aws/awsProvisionerTest.py +12 -9
- toil/test/provisioners/clusterTest.py +4 -4
- toil/test/provisioners/gceProvisionerTest.py +16 -14
- toil/test/sort/sort.py +4 -1
- toil/test/src/busTest.py +17 -17
- toil/test/src/deferredFunctionTest.py +145 -132
- toil/test/src/importExportFileTest.py +71 -63
- toil/test/src/jobEncapsulationTest.py +27 -28
- toil/test/src/jobServiceTest.py +149 -133
- toil/test/src/jobTest.py +219 -211
- toil/test/src/miscTests.py +66 -60
- toil/test/src/promisedRequirementTest.py +163 -169
- toil/test/src/regularLogTest.py +24 -24
- toil/test/src/resourceTest.py +82 -76
- toil/test/src/restartDAGTest.py +51 -47
- toil/test/src/resumabilityTest.py +24 -19
- toil/test/src/retainTempDirTest.py +60 -57
- toil/test/src/systemTest.py +17 -13
- toil/test/src/threadingTest.py +29 -32
- toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
- toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
- toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
- toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
- toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
- toil/test/utils/toilDebugTest.py +117 -102
- toil/test/utils/toilKillTest.py +54 -53
- toil/test/utils/utilsTest.py +303 -229
- toil/test/wdl/lint_error.wdl +9 -0
- toil/test/wdl/md5sum/empty_file.json +1 -0
- toil/test/wdl/md5sum/md5sum-gs.json +1 -0
- toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
- toil/test/wdl/md5sum/md5sum.input +1 -0
- toil/test/wdl/md5sum/md5sum.json +1 -0
- toil/test/wdl/md5sum/md5sum.wdl +25 -0
- toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
- toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
- toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
- toil/test/wdl/standard_library/as_map.json +16 -0
- toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
- toil/test/wdl/standard_library/as_pairs.json +7 -0
- toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
- toil/test/wdl/standard_library/ceil.json +3 -0
- toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
- toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
- toil/test/wdl/standard_library/collect_by_key.json +1 -0
- toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
- toil/test/wdl/standard_library/cross.json +11 -0
- toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
- toil/test/wdl/standard_library/flatten.json +7 -0
- toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
- toil/test/wdl/standard_library/floor.json +3 -0
- toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
- toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
- toil/test/wdl/standard_library/keys.json +8 -0
- toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
- toil/test/wdl/standard_library/length.json +7 -0
- toil/test/wdl/standard_library/length_as_input.wdl +16 -0
- toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
- toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
- toil/test/wdl/standard_library/length_invalid.json +3 -0
- toil/test/wdl/standard_library/range.json +3 -0
- toil/test/wdl/standard_library/range_0.json +3 -0
- toil/test/wdl/standard_library/range_as_input.wdl +17 -0
- toil/test/wdl/standard_library/range_invalid.json +3 -0
- toil/test/wdl/standard_library/read_boolean.json +3 -0
- toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_float.json +3 -0
- toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_int.json +3 -0
- toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_json.json +3 -0
- toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_lines.json +3 -0
- toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_map.json +3 -0
- toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_string.json +3 -0
- toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_tsv.json +3 -0
- toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
- toil/test/wdl/standard_library/round.json +3 -0
- toil/test/wdl/standard_library/round_as_command.wdl +16 -0
- toil/test/wdl/standard_library/round_as_input.wdl +16 -0
- toil/test/wdl/standard_library/size.json +3 -0
- toil/test/wdl/standard_library/size_as_command.wdl +17 -0
- toil/test/wdl/standard_library/size_as_output.wdl +36 -0
- toil/test/wdl/standard_library/stderr.json +3 -0
- toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
- toil/test/wdl/standard_library/stdout.json +3 -0
- toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
- toil/test/wdl/standard_library/sub.json +3 -0
- toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
- toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
- toil/test/wdl/standard_library/transpose.json +6 -0
- toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
- toil/test/wdl/standard_library/write_json.json +6 -0
- toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_lines.json +7 -0
- toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_map.json +6 -0
- toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_tsv.json +6 -0
- toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
- toil/test/wdl/standard_library/zip.json +12 -0
- toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
- toil/test/wdl/test.csv +3 -0
- toil/test/wdl/test.tsv +3 -0
- toil/test/wdl/testfiles/croo.wdl +38 -0
- toil/test/wdl/testfiles/drop_files.wdl +62 -0
- toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
- toil/test/wdl/testfiles/empty.txt +0 -0
- toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
- toil/test/wdl/testfiles/random.wdl +66 -0
- toil/test/wdl/testfiles/string_file_coercion.json +1 -0
- toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
- toil/test/wdl/testfiles/test.json +4 -0
- toil/test/wdl/testfiles/test_boolean.txt +1 -0
- toil/test/wdl/testfiles/test_float.txt +1 -0
- toil/test/wdl/testfiles/test_int.txt +1 -0
- toil/test/wdl/testfiles/test_lines.txt +5 -0
- toil/test/wdl/testfiles/test_map.txt +2 -0
- toil/test/wdl/testfiles/test_string.txt +1 -0
- toil/test/wdl/testfiles/url_to_file.wdl +13 -0
- toil/test/wdl/testfiles/url_to_optional_file.wdl +13 -0
- toil/test/wdl/testfiles/vocab.json +1 -0
- toil/test/wdl/testfiles/vocab.wdl +66 -0
- toil/test/wdl/testfiles/wait.wdl +34 -0
- toil/test/wdl/wdl_specification/type_pair.json +23 -0
- toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
- toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
- toil/test/wdl/wdl_specification/v1_spec.json +1 -0
- toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
- toil/test/wdl/wdltoil_test.py +680 -407
- toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
- toil/version.py +9 -9
- toil/wdl/wdltoil.py +336 -123
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/METADATA +5 -4
- toil-8.2.0.dist-info/RECORD +439 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/WHEEL +1 -1
- toil-8.1.0b1.dist-info/RECORD +0 -259
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/entry_points.txt +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info/licenses}/LICENSE +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/top_level.txt +0 -0
toil/job.py
CHANGED
|
@@ -51,11 +51,11 @@ from typing import (
|
|
|
51
51
|
from urllib.error import HTTPError
|
|
52
52
|
from urllib.parse import urlsplit, unquote, urljoin
|
|
53
53
|
|
|
54
|
-
from toil import memoize
|
|
55
|
-
|
|
56
54
|
import dill
|
|
57
55
|
from configargparse import ArgParser
|
|
58
56
|
|
|
57
|
+
from toil.lib.memoize import memoize
|
|
58
|
+
from toil.lib.misc import StrPath
|
|
59
59
|
from toil.lib.io import is_remote_url
|
|
60
60
|
|
|
61
61
|
if sys.version_info < (3, 11):
|
|
@@ -140,8 +140,8 @@ class FilesDownloadedStoppingPointReached(DebugStoppingPointReached):
|
|
|
140
140
|
"""
|
|
141
141
|
|
|
142
142
|
def __init__(
|
|
143
|
-
self, message, host_and_job_paths: Optional[list[tuple[str, str]]] = None
|
|
144
|
-
):
|
|
143
|
+
self, message: str, host_and_job_paths: Optional[list[tuple[str, str]]] = None
|
|
144
|
+
) -> None:
|
|
145
145
|
super().__init__(message)
|
|
146
146
|
|
|
147
147
|
# Save the host and user-code-visible paths of files, in case we're
|
|
@@ -307,7 +307,7 @@ def parse_accelerator(
|
|
|
307
307
|
parsed["model"] = possible_description
|
|
308
308
|
elif isinstance(spec, dict):
|
|
309
309
|
# It's a dict, so merge with the defaults.
|
|
310
|
-
parsed.update(spec)
|
|
310
|
+
parsed.update(cast(AcceleratorRequirement, spec))
|
|
311
311
|
# TODO: make sure they didn't misspell keys or something
|
|
312
312
|
else:
|
|
313
313
|
raise TypeError(
|
|
@@ -816,7 +816,7 @@ class JobDescription(Requirer):
|
|
|
816
816
|
|
|
817
817
|
def __init__(
|
|
818
818
|
self,
|
|
819
|
-
requirements: Mapping[str, Union[int, str, bool]],
|
|
819
|
+
requirements: Mapping[str, Union[int, str, float, bool, list]],
|
|
820
820
|
jobName: str,
|
|
821
821
|
unitName: Optional[str] = "",
|
|
822
822
|
displayName: Optional[str] = "",
|
|
@@ -1767,7 +1767,7 @@ class Job:
|
|
|
1767
1767
|
# Holds flags set by set_debug_flag()
|
|
1768
1768
|
self._debug_flags: set[str] = set()
|
|
1769
1769
|
|
|
1770
|
-
def __str__(self):
|
|
1770
|
+
def __str__(self) -> str:
|
|
1771
1771
|
"""
|
|
1772
1772
|
Produce a useful logging string to identify this Job and distinguish it
|
|
1773
1773
|
from its JobDescription.
|
|
@@ -1812,16 +1812,16 @@ class Job:
|
|
|
1812
1812
|
return self.description.disk
|
|
1813
1813
|
|
|
1814
1814
|
@disk.setter
|
|
1815
|
-
def disk(self, val):
|
|
1815
|
+
def disk(self, val: int) -> None:
|
|
1816
1816
|
self.description.disk = val
|
|
1817
1817
|
|
|
1818
1818
|
@property
|
|
1819
|
-
def memory(self):
|
|
1819
|
+
def memory(self) -> int:
|
|
1820
1820
|
"""The maximum number of bytes of memory the job will require to run."""
|
|
1821
1821
|
return self.description.memory
|
|
1822
1822
|
|
|
1823
1823
|
@memory.setter
|
|
1824
|
-
def memory(self, val):
|
|
1824
|
+
def memory(self, val: int) -> None:
|
|
1825
1825
|
self.description.memory = val
|
|
1826
1826
|
|
|
1827
1827
|
@property
|
|
@@ -1830,7 +1830,7 @@ class Job:
|
|
|
1830
1830
|
return self.description.cores
|
|
1831
1831
|
|
|
1832
1832
|
@cores.setter
|
|
1833
|
-
def cores(self, val):
|
|
1833
|
+
def cores(self, val: int) -> None:
|
|
1834
1834
|
self.description.cores = val
|
|
1835
1835
|
|
|
1836
1836
|
@property
|
|
@@ -1848,11 +1848,11 @@ class Job:
|
|
|
1848
1848
|
return self.description.preemptible
|
|
1849
1849
|
|
|
1850
1850
|
@deprecated(new_function_name="preemptible")
|
|
1851
|
-
def preemptable(self):
|
|
1851
|
+
def preemptable(self) -> bool:
|
|
1852
1852
|
return self.description.preemptible
|
|
1853
1853
|
|
|
1854
1854
|
@preemptible.setter
|
|
1855
|
-
def preemptible(self, val):
|
|
1855
|
+
def preemptible(self, val: bool) -> None:
|
|
1856
1856
|
self.description.preemptible = val
|
|
1857
1857
|
|
|
1858
1858
|
@property
|
|
@@ -1865,13 +1865,13 @@ class Job:
|
|
|
1865
1865
|
return self.description.files_to_use
|
|
1866
1866
|
|
|
1867
1867
|
@files_to_use.setter
|
|
1868
|
-
def files_to_use(self, val: set[FileID]):
|
|
1868
|
+
def files_to_use(self, val: set[FileID]) -> None:
|
|
1869
1869
|
self.description.files_to_use = val
|
|
1870
1870
|
|
|
1871
|
-
def add_to_files_to_use(self, val: FileID):
|
|
1871
|
+
def add_to_files_to_use(self, val: FileID) -> None:
|
|
1872
1872
|
self.description.files_to_use.add(val)
|
|
1873
1873
|
|
|
1874
|
-
def remove_from_files_to_use(self, val: FileID):
|
|
1874
|
+
def remove_from_files_to_use(self, val: FileID) -> None:
|
|
1875
1875
|
self.description.files_to_use.remove(val)
|
|
1876
1876
|
|
|
1877
1877
|
def assignConfig(self, config: Config) -> None:
|
|
@@ -2296,7 +2296,7 @@ class Job:
|
|
|
2296
2296
|
|
|
2297
2297
|
return {self._registry[jid] for jid in roots}
|
|
2298
2298
|
|
|
2299
|
-
def checkJobGraphConnected(self):
|
|
2299
|
+
def checkJobGraphConnected(self) -> None:
|
|
2300
2300
|
"""
|
|
2301
2301
|
:raises toil.job.JobGraphDeadlockException: if :func:`toil.job.Job.getRootJobs` does \
|
|
2302
2302
|
not contain exactly one root job.
|
|
@@ -2312,7 +2312,7 @@ class Job:
|
|
|
2312
2312
|
"Graph does not contain exactly one" " root job: %s" % rootJobs
|
|
2313
2313
|
)
|
|
2314
2314
|
|
|
2315
|
-
def checkJobGraphAcylic(self):
|
|
2315
|
+
def checkJobGraphAcylic(self) -> None:
|
|
2316
2316
|
"""
|
|
2317
2317
|
:raises toil.job.JobGraphDeadlockException: if the connected component \
|
|
2318
2318
|
of jobs containing this job contains any cycles of child/followOn dependencies \
|
|
@@ -2486,10 +2486,13 @@ class Job:
|
|
|
2486
2486
|
"""Used to setup and run Toil workflow."""
|
|
2487
2487
|
|
|
2488
2488
|
@staticmethod
|
|
2489
|
-
def getDefaultArgumentParser(jobstore_as_flag: bool = False) ->
|
|
2489
|
+
def getDefaultArgumentParser(jobstore_as_flag: bool = False) -> ArgParser:
|
|
2490
2490
|
"""
|
|
2491
2491
|
Get argument parser with added toil workflow options.
|
|
2492
2492
|
|
|
2493
|
+
This is the Right Way to get an argument parser in a Toil Python
|
|
2494
|
+
workflow.
|
|
2495
|
+
|
|
2493
2496
|
:param jobstore_as_flag: make the job store option a --jobStore flag instead of a required jobStore positional argument.
|
|
2494
2497
|
:returns: The argument parser used by a toil workflow with added Toil options.
|
|
2495
2498
|
"""
|
|
@@ -2499,7 +2502,7 @@ class Job:
|
|
|
2499
2502
|
|
|
2500
2503
|
@staticmethod
|
|
2501
2504
|
def getDefaultOptions(
|
|
2502
|
-
jobStore: Optional[
|
|
2505
|
+
jobStore: Optional[StrPath] = None, jobstore_as_flag: bool = False
|
|
2503
2506
|
) -> Namespace:
|
|
2504
2507
|
"""
|
|
2505
2508
|
Get default options for a toil workflow.
|
|
@@ -2520,9 +2523,9 @@ class Job:
|
|
|
2520
2523
|
)
|
|
2521
2524
|
arguments = []
|
|
2522
2525
|
if jobstore_as_flag and jobStore is not None:
|
|
2523
|
-
arguments = ["--jobstore", jobStore]
|
|
2526
|
+
arguments = ["--jobstore", str(jobStore)]
|
|
2524
2527
|
if not jobstore_as_flag and jobStore is not None:
|
|
2525
|
-
arguments = [jobStore]
|
|
2528
|
+
arguments = [str(jobStore)]
|
|
2526
2529
|
return parser.parse_args(args=arguments)
|
|
2527
2530
|
|
|
2528
2531
|
@staticmethod
|
|
@@ -2534,6 +2537,13 @@ class Job:
|
|
|
2534
2537
|
Adds the default toil options to an :mod:`optparse` or :mod:`argparse`
|
|
2535
2538
|
parser object.
|
|
2536
2539
|
|
|
2540
|
+
Consider using :meth:`getDefaultArgumentParser` instead, which will
|
|
2541
|
+
produce a parser of the correct class to use Toil's config file and
|
|
2542
|
+
environment variables. If ther parser passed here is just an
|
|
2543
|
+
:class:`argparse.ArgumentParser` and not a
|
|
2544
|
+
:class:`configargparse.ArgParser`, the Toil config file and
|
|
2545
|
+
environment variables will not be respected.
|
|
2546
|
+
|
|
2537
2547
|
:param parser: Options object to add toil options to.
|
|
2538
2548
|
:param jobstore_as_flag: make the job store option a --jobStore flag instead of a required jobStore positional argument.
|
|
2539
2549
|
"""
|
|
@@ -2571,13 +2581,13 @@ class Job:
|
|
|
2571
2581
|
|
|
2572
2582
|
def __init__(
|
|
2573
2583
|
self,
|
|
2574
|
-
memory=None,
|
|
2575
|
-
cores=None,
|
|
2576
|
-
disk=None,
|
|
2577
|
-
accelerators=None,
|
|
2578
|
-
preemptible=None,
|
|
2579
|
-
unitName=
|
|
2580
|
-
):
|
|
2584
|
+
memory: Optional[ParseableIndivisibleResource] = None,
|
|
2585
|
+
cores: Optional[ParseableDivisibleResource] = None,
|
|
2586
|
+
disk: Optional[ParseableIndivisibleResource] = None,
|
|
2587
|
+
accelerators: Optional[ParseableAcceleratorRequirement] = None,
|
|
2588
|
+
preemptible: Optional[ParseableFlag] = None,
|
|
2589
|
+
unitName: Optional[str] = "",
|
|
2590
|
+
) -> None:
|
|
2581
2591
|
"""
|
|
2582
2592
|
Memory, core and disk requirements are specified identically to as in \
|
|
2583
2593
|
:func:`toil.job.Job.__init__`.
|
|
@@ -2603,7 +2613,7 @@ class Job:
|
|
|
2603
2613
|
self.hostID = None
|
|
2604
2614
|
|
|
2605
2615
|
@abstractmethod
|
|
2606
|
-
def start(self, job: "
|
|
2616
|
+
def start(self, job: "ServiceHostJob") -> Any:
|
|
2607
2617
|
"""
|
|
2608
2618
|
Start the service.
|
|
2609
2619
|
|
|
@@ -2616,7 +2626,7 @@ class Job:
|
|
|
2616
2626
|
"""
|
|
2617
2627
|
|
|
2618
2628
|
@abstractmethod
|
|
2619
|
-
def stop(self, job: "
|
|
2629
|
+
def stop(self, job: "ServiceHostJob") -> None:
|
|
2620
2630
|
"""
|
|
2621
2631
|
Stops the service. Function can block until complete.
|
|
2622
2632
|
|
|
@@ -3140,7 +3150,7 @@ class Job:
|
|
|
3140
3150
|
startTime = time.time()
|
|
3141
3151
|
startClock = ResourceMonitor.get_total_cpu_time()
|
|
3142
3152
|
baseDir = os.getcwd()
|
|
3143
|
-
|
|
3153
|
+
|
|
3144
3154
|
succeeded = False
|
|
3145
3155
|
try:
|
|
3146
3156
|
yield
|
|
@@ -3303,7 +3313,9 @@ class FunctionWrappingJob(Job):
|
|
|
3303
3313
|
Job used to wrap a function. In its `run` method the wrapped function is called.
|
|
3304
3314
|
"""
|
|
3305
3315
|
|
|
3306
|
-
def __init__(
|
|
3316
|
+
def __init__(
|
|
3317
|
+
self, userFunction: Callable[[...], Any], *args: Any, **kwargs: Any
|
|
3318
|
+
) -> None:
|
|
3307
3319
|
"""
|
|
3308
3320
|
:param callable userFunction: The function to wrap. It will be called with ``*args`` and
|
|
3309
3321
|
``**kwargs`` as arguments.
|
|
@@ -3326,7 +3338,9 @@ class FunctionWrappingJob(Job):
|
|
|
3326
3338
|
list(zip(argSpec.args[-len(argSpec.defaults) :], argSpec.defaults))
|
|
3327
3339
|
)
|
|
3328
3340
|
|
|
3329
|
-
def resolve(
|
|
3341
|
+
def resolve(
|
|
3342
|
+
key, default: Optional[Any] = None, dehumanize: bool = False
|
|
3343
|
+
) -> Any:
|
|
3330
3344
|
try:
|
|
3331
3345
|
# First, try constructor arguments, ...
|
|
3332
3346
|
value = kwargs.pop(key)
|
|
@@ -3360,7 +3374,7 @@ class FunctionWrappingJob(Job):
|
|
|
3360
3374
|
self._args = args
|
|
3361
3375
|
self._kwargs = kwargs
|
|
3362
3376
|
|
|
3363
|
-
def _getUserFunction(self):
|
|
3377
|
+
def _getUserFunction(self) -> Callable[..., Any]:
|
|
3364
3378
|
logger.debug(
|
|
3365
3379
|
"Loading user function %s from module %s.",
|
|
3366
3380
|
self.userFunctionName,
|
|
@@ -3369,14 +3383,14 @@ class FunctionWrappingJob(Job):
|
|
|
3369
3383
|
userFunctionModule = self._loadUserModule(self.userFunctionModule)
|
|
3370
3384
|
return getattr(userFunctionModule, self.userFunctionName)
|
|
3371
3385
|
|
|
3372
|
-
def run(self, fileStore):
|
|
3386
|
+
def run(self, fileStore: "AbstractFileStore") -> Any:
|
|
3373
3387
|
userFunction = self._getUserFunction()
|
|
3374
3388
|
return userFunction(*self._args, **self._kwargs)
|
|
3375
3389
|
|
|
3376
|
-
def getUserScript(self):
|
|
3390
|
+
def getUserScript(self) -> str:
|
|
3377
3391
|
return self.userFunctionModule
|
|
3378
3392
|
|
|
3379
|
-
def _jobName(self):
|
|
3393
|
+
def _jobName(self) -> str:
|
|
3380
3394
|
return ".".join(
|
|
3381
3395
|
(
|
|
3382
3396
|
self.__class__.__name__,
|
|
@@ -3414,10 +3428,10 @@ class JobFunctionWrappingJob(FunctionWrappingJob):
|
|
|
3414
3428
|
"""
|
|
3415
3429
|
|
|
3416
3430
|
@property
|
|
3417
|
-
def fileStore(self):
|
|
3431
|
+
def fileStore(self) -> "AbstractFileStore":
|
|
3418
3432
|
return self._fileStore
|
|
3419
3433
|
|
|
3420
|
-
def run(self, fileStore):
|
|
3434
|
+
def run(self, fileStore: "AbstractFileStore") -> Any:
|
|
3421
3435
|
userFunction = self._getUserFunction()
|
|
3422
3436
|
rValue = userFunction(*((self,) + tuple(self._args)), **self._kwargs)
|
|
3423
3437
|
return rValue
|
|
@@ -3513,7 +3527,7 @@ class EncapsulatedJob(Job):
|
|
|
3513
3527
|
the same value after A or A.encapsulate() has been run.
|
|
3514
3528
|
"""
|
|
3515
3529
|
|
|
3516
|
-
def __init__(self, job, unitName=None):
|
|
3530
|
+
def __init__(self, job: Optional[Job], unitName: Optional[str] = None) -> None:
|
|
3517
3531
|
"""
|
|
3518
3532
|
:param toil.job.Job job: the job to encapsulate.
|
|
3519
3533
|
:param str unitName: human-readable name to identify this job instance.
|
|
@@ -3547,7 +3561,7 @@ class EncapsulatedJob(Job):
|
|
|
3547
3561
|
self.encapsulatedJob = None
|
|
3548
3562
|
self.encapsulatedFollowOn = None
|
|
3549
3563
|
|
|
3550
|
-
def addChild(self, childJob):
|
|
3564
|
+
def addChild(self, childJob: Job) -> Job:
|
|
3551
3565
|
if self.encapsulatedFollowOn is None:
|
|
3552
3566
|
raise RuntimeError(
|
|
3553
3567
|
"Children cannot be added to EncapsulatedJob while it is running"
|
|
@@ -3563,7 +3577,7 @@ class EncapsulatedJob(Job):
|
|
|
3563
3577
|
self.encapsulatedFollowOn, service, parentService=parentService
|
|
3564
3578
|
)
|
|
3565
3579
|
|
|
3566
|
-
def addFollowOn(self, followOnJob):
|
|
3580
|
+
def addFollowOn(self, followOnJob: Job) -> Job:
|
|
3567
3581
|
if self.encapsulatedFollowOn is None:
|
|
3568
3582
|
raise RuntimeError(
|
|
3569
3583
|
"Follow-ons cannot be added to EncapsulatedJob while it is running"
|
|
@@ -3986,7 +4000,7 @@ class CombineImportsJob(Job):
|
|
|
3986
4000
|
self._d = d
|
|
3987
4001
|
super().__init__(**kwargs)
|
|
3988
4002
|
|
|
3989
|
-
def run(self, file_store: AbstractFileStore) -> Promised[Dict[str, FileID]]:
|
|
4003
|
+
def run(self, file_store: "AbstractFileStore") -> Promised[Dict[str, FileID]]:
|
|
3990
4004
|
"""
|
|
3991
4005
|
Merge the dicts
|
|
3992
4006
|
"""
|
|
@@ -4041,7 +4055,7 @@ class WorkerImportJob(Job):
|
|
|
4041
4055
|
path_to_fileid[file] = imported
|
|
4042
4056
|
return path_to_fileid
|
|
4043
4057
|
|
|
4044
|
-
def run(self, file_store: AbstractFileStore) -> Promised[Dict[str, FileID]]:
|
|
4058
|
+
def run(self, file_store: "AbstractFileStore") -> Promised[Dict[str, FileID]]:
|
|
4045
4059
|
"""
|
|
4046
4060
|
Import the workflow inputs and then create and run the workflow.
|
|
4047
4061
|
:return: Promise of workflow outputs
|
|
@@ -4077,7 +4091,7 @@ class ImportsJob(Job):
|
|
|
4077
4091
|
self._import_worker_disk = import_worker_disk
|
|
4078
4092
|
|
|
4079
4093
|
def run(
|
|
4080
|
-
self, file_store: AbstractFileStore
|
|
4094
|
+
self, file_store: "AbstractFileStore"
|
|
4081
4095
|
) -> Tuple[Promised[Dict[str, FileID]], Dict[str, FileMetadata]]:
|
|
4082
4096
|
"""
|
|
4083
4097
|
Import the workflow inputs and then create and run the workflow.
|
|
@@ -4107,7 +4121,8 @@ class ImportsJob(Job):
|
|
|
4107
4121
|
# schedule the individual file
|
|
4108
4122
|
per_batch_files.append(filename)
|
|
4109
4123
|
file_batches.append(per_batch_files)
|
|
4110
|
-
# reset
|
|
4124
|
+
# reset batch to empty
|
|
4125
|
+
per_batch_files = []
|
|
4111
4126
|
per_batch_size = 0
|
|
4112
4127
|
else:
|
|
4113
4128
|
per_batch_size += filesize
|
|
@@ -4274,7 +4289,7 @@ class PromisedRequirement:
|
|
|
4274
4289
|
C = B.addChildFn(h, cores=PromisedRequirement(lambda x: 2*x, B.rv()))
|
|
4275
4290
|
"""
|
|
4276
4291
|
|
|
4277
|
-
def __init__(self, valueOrCallable, *args):
|
|
4292
|
+
def __init__(self, valueOrCallable: Any, *args: Any) -> None:
|
|
4278
4293
|
"""
|
|
4279
4294
|
Initialize this Promised Requirement.
|
|
4280
4295
|
|
|
@@ -4298,7 +4313,7 @@ class PromisedRequirement:
|
|
|
4298
4313
|
self._func = dill.dumps(func)
|
|
4299
4314
|
self._args = list(args)
|
|
4300
4315
|
|
|
4301
|
-
def getValue(self):
|
|
4316
|
+
def getValue(self) -> Any:
|
|
4302
4317
|
"""Return PromisedRequirement value."""
|
|
4303
4318
|
func = dill.loads(self._func)
|
|
4304
4319
|
return func(*self._args)
|
|
@@ -1686,17 +1686,39 @@ class AbstractJobStore(ABC):
|
|
|
1686
1686
|
sharedFileName: str,
|
|
1687
1687
|
encoding: Optional[str] = None,
|
|
1688
1688
|
errors: Optional[str] = None,
|
|
1689
|
-
) -> ContextManager[IO[bytes]]:
|
|
1689
|
+
) -> Union[ContextManager[IO[str]], ContextManager[IO[bytes]]]:
|
|
1690
1690
|
return self.read_shared_file_stream(sharedFileName, encoding, errors)
|
|
1691
1691
|
|
|
1692
|
+
@overload
|
|
1692
1693
|
@abstractmethod
|
|
1693
1694
|
@contextmanager
|
|
1694
1695
|
def read_shared_file_stream(
|
|
1695
1696
|
self,
|
|
1696
1697
|
shared_file_name: str,
|
|
1697
|
-
encoding:
|
|
1698
|
+
encoding: str,
|
|
1699
|
+
errors: Optional[str] = None,
|
|
1700
|
+
) -> Iterator[IO[str]]:
|
|
1701
|
+
"""If encoding is specified, then a text file handle is provided."""
|
|
1702
|
+
|
|
1703
|
+
@overload
|
|
1704
|
+
@abstractmethod
|
|
1705
|
+
@contextmanager
|
|
1706
|
+
def read_shared_file_stream(
|
|
1707
|
+
self,
|
|
1708
|
+
shared_file_name: str,
|
|
1709
|
+
encoding: Literal[None] = None,
|
|
1698
1710
|
errors: Optional[str] = None,
|
|
1699
1711
|
) -> Iterator[IO[bytes]]:
|
|
1712
|
+
"""If no encoding is provided, then a bytest file handle is provided."""
|
|
1713
|
+
|
|
1714
|
+
@abstractmethod
|
|
1715
|
+
@contextmanager
|
|
1716
|
+
def read_shared_file_stream(
|
|
1717
|
+
self,
|
|
1718
|
+
shared_file_name: str,
|
|
1719
|
+
encoding: Optional[str] = None,
|
|
1720
|
+
errors: Optional[str] = None,
|
|
1721
|
+
) -> Union[Iterator[IO[str]], Iterator[IO[bytes]]]:
|
|
1700
1722
|
"""
|
|
1701
1723
|
Returns a context manager yielding a readable file handle to the global file referenced
|
|
1702
1724
|
by the given name.
|
|
@@ -1711,7 +1733,6 @@ class AbstractJobStore(ABC):
|
|
|
1711
1733
|
are the same as for open(). Defaults to 'strict' when an encoding is specified.
|
|
1712
1734
|
|
|
1713
1735
|
:return: a context manager yielding a readable file handle
|
|
1714
|
-
:rtype: Iterator[IO[bytes]]
|
|
1715
1736
|
"""
|
|
1716
1737
|
raise NotImplementedError()
|
|
1717
1738
|
|
toil/jobStores/fileJobStore.py
CHANGED
|
@@ -395,6 +395,7 @@ class FileJobStore(AbstractJobStore):
|
|
|
395
395
|
|
|
396
396
|
@classmethod
|
|
397
397
|
def _url_exists(cls, url: ParseResult) -> bool:
|
|
398
|
+
# Note that broken symlinks will not be shown to exist.
|
|
398
399
|
return os.path.exists(cls._extract_path_from_url(url))
|
|
399
400
|
|
|
400
401
|
@classmethod
|
|
@@ -771,8 +772,31 @@ class FileJobStore(AbstractJobStore):
|
|
|
771
772
|
) as f:
|
|
772
773
|
yield f
|
|
773
774
|
|
|
775
|
+
@overload
|
|
774
776
|
@contextmanager
|
|
775
|
-
def read_shared_file_stream(
|
|
777
|
+
def read_shared_file_stream(
|
|
778
|
+
self,
|
|
779
|
+
shared_file_name: str,
|
|
780
|
+
encoding: str,
|
|
781
|
+
errors: Optional[str] = None,
|
|
782
|
+
) -> Iterator[IO[str]]: ...
|
|
783
|
+
|
|
784
|
+
@overload
|
|
785
|
+
@contextmanager
|
|
786
|
+
def read_shared_file_stream(
|
|
787
|
+
self,
|
|
788
|
+
shared_file_name: str,
|
|
789
|
+
encoding: Literal[None] = None,
|
|
790
|
+
errors: Optional[str] = None,
|
|
791
|
+
) -> Iterator[IO[bytes]]: ...
|
|
792
|
+
|
|
793
|
+
@contextmanager
|
|
794
|
+
def read_shared_file_stream(
|
|
795
|
+
self,
|
|
796
|
+
shared_file_name: str,
|
|
797
|
+
encoding: Optional[str] = None,
|
|
798
|
+
errors: Optional[str] = None,
|
|
799
|
+
) -> Union[Iterator[IO[bytes]], Iterator[IO[str]]]:
|
|
776
800
|
self._requireValidSharedFileName(shared_file_name)
|
|
777
801
|
try:
|
|
778
802
|
with open(
|