toil 8.1.0b1__py3-none-any.whl → 8.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toil/__init__.py +0 -35
- toil/batchSystems/abstractBatchSystem.py +1 -1
- toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
- toil/batchSystems/awsBatch.py +1 -1
- toil/batchSystems/cleanup_support.py +1 -1
- toil/batchSystems/kubernetes.py +53 -7
- toil/batchSystems/local_support.py +1 -1
- toil/batchSystems/mesos/batchSystem.py +13 -8
- toil/batchSystems/mesos/test/__init__.py +3 -2
- toil/batchSystems/singleMachine.py +1 -1
- toil/batchSystems/slurm.py +27 -26
- toil/bus.py +5 -3
- toil/common.py +39 -11
- toil/cwl/cwltoil.py +1 -1
- toil/job.py +64 -49
- toil/jobStores/abstractJobStore.py +24 -3
- toil/jobStores/fileJobStore.py +25 -1
- toil/jobStores/googleJobStore.py +104 -30
- toil/leader.py +9 -0
- toil/lib/accelerators.py +3 -1
- toil/lib/aws/utils.py.orig +504 -0
- toil/lib/bioio.py +1 -1
- toil/lib/docker.py +252 -91
- toil/lib/dockstore.py +11 -3
- toil/lib/exceptions.py +5 -3
- toil/lib/history.py +87 -13
- toil/lib/history_submission.py +23 -9
- toil/lib/io.py +34 -22
- toil/lib/misc.py +7 -1
- toil/lib/resources.py +2 -1
- toil/lib/threading.py +11 -10
- toil/options/common.py +8 -0
- toil/options/wdl.py +11 -0
- toil/server/api_spec/LICENSE +201 -0
- toil/server/api_spec/README.rst +5 -0
- toil/server/cli/wes_cwl_runner.py +2 -1
- toil/test/__init__.py +275 -115
- toil/test/batchSystems/batchSystemTest.py +227 -205
- toil/test/batchSystems/test_slurm.py +27 -0
- toil/test/cactus/pestis.tar.gz +0 -0
- toil/test/conftest.py +7 -0
- toil/test/cwl/2.fasta +11 -0
- toil/test/cwl/2.fastq +12 -0
- toil/test/cwl/conftest.py +1 -1
- toil/test/cwl/cwlTest.py +999 -867
- toil/test/cwl/directory/directory/file.txt +15 -0
- toil/test/cwl/download_directory_file.json +4 -0
- toil/test/cwl/download_directory_s3.json +4 -0
- toil/test/cwl/download_file.json +6 -0
- toil/test/cwl/download_http.json +6 -0
- toil/test/cwl/download_https.json +6 -0
- toil/test/cwl/download_s3.json +6 -0
- toil/test/cwl/download_subdirectory_file.json +5 -0
- toil/test/cwl/download_subdirectory_s3.json +5 -0
- toil/test/cwl/empty.json +1 -0
- toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
- toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
- toil/test/cwl/optional-file-exists.json +6 -0
- toil/test/cwl/optional-file-missing.json +6 -0
- toil/test/cwl/preemptible_expression.json +1 -0
- toil/test/cwl/revsort-job-missing.json +6 -0
- toil/test/cwl/revsort-job.json +6 -0
- toil/test/cwl/s3_secondary_file.json +16 -0
- toil/test/cwl/seqtk_seq_job.json +6 -0
- toil/test/cwl/stream.json +6 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
- toil/test/cwl/whale.txt +16 -0
- toil/test/docs/scripts/example_alwaysfail.py +38 -0
- toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
- toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
- toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
- toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
- toil/test/docs/scripts/tutorial_arguments.py +23 -0
- toil/test/docs/scripts/tutorial_debugging.patch +12 -0
- toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
- toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
- toil/test/docs/scripts/tutorial_docker.py +20 -0
- toil/test/docs/scripts/tutorial_dynamic.py +24 -0
- toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
- toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
- toil/test/docs/scripts/tutorial_helloworld.py +15 -0
- toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
- toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
- toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
- toil/test/docs/scripts/tutorial_managing.py +29 -0
- toil/test/docs/scripts/tutorial_managing2.py +56 -0
- toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
- toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
- toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
- toil/test/docs/scripts/tutorial_promises.py +25 -0
- toil/test/docs/scripts/tutorial_promises2.py +30 -0
- toil/test/docs/scripts/tutorial_quickstart.py +22 -0
- toil/test/docs/scripts/tutorial_requirements.py +44 -0
- toil/test/docs/scripts/tutorial_services.py +45 -0
- toil/test/docs/scripts/tutorial_staging.py +45 -0
- toil/test/docs/scripts/tutorial_stats.py +64 -0
- toil/test/lib/aws/test_iam.py +3 -1
- toil/test/lib/dockerTest.py +205 -122
- toil/test/lib/test_history.py +101 -77
- toil/test/provisioners/aws/awsProvisionerTest.py +12 -9
- toil/test/provisioners/clusterTest.py +4 -4
- toil/test/provisioners/gceProvisionerTest.py +16 -14
- toil/test/sort/sort.py +4 -1
- toil/test/src/busTest.py +17 -17
- toil/test/src/deferredFunctionTest.py +145 -132
- toil/test/src/importExportFileTest.py +71 -63
- toil/test/src/jobEncapsulationTest.py +27 -28
- toil/test/src/jobServiceTest.py +149 -133
- toil/test/src/jobTest.py +219 -211
- toil/test/src/miscTests.py +66 -60
- toil/test/src/promisedRequirementTest.py +163 -169
- toil/test/src/regularLogTest.py +24 -24
- toil/test/src/resourceTest.py +82 -76
- toil/test/src/restartDAGTest.py +51 -47
- toil/test/src/resumabilityTest.py +24 -19
- toil/test/src/retainTempDirTest.py +60 -57
- toil/test/src/systemTest.py +17 -13
- toil/test/src/threadingTest.py +29 -32
- toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
- toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
- toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
- toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
- toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
- toil/test/utils/toilDebugTest.py +117 -102
- toil/test/utils/toilKillTest.py +54 -53
- toil/test/utils/utilsTest.py +303 -229
- toil/test/wdl/lint_error.wdl +9 -0
- toil/test/wdl/md5sum/empty_file.json +1 -0
- toil/test/wdl/md5sum/md5sum-gs.json +1 -0
- toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
- toil/test/wdl/md5sum/md5sum.input +1 -0
- toil/test/wdl/md5sum/md5sum.json +1 -0
- toil/test/wdl/md5sum/md5sum.wdl +25 -0
- toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
- toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
- toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
- toil/test/wdl/standard_library/as_map.json +16 -0
- toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
- toil/test/wdl/standard_library/as_pairs.json +7 -0
- toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
- toil/test/wdl/standard_library/ceil.json +3 -0
- toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
- toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
- toil/test/wdl/standard_library/collect_by_key.json +1 -0
- toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
- toil/test/wdl/standard_library/cross.json +11 -0
- toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
- toil/test/wdl/standard_library/flatten.json +7 -0
- toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
- toil/test/wdl/standard_library/floor.json +3 -0
- toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
- toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
- toil/test/wdl/standard_library/keys.json +8 -0
- toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
- toil/test/wdl/standard_library/length.json +7 -0
- toil/test/wdl/standard_library/length_as_input.wdl +16 -0
- toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
- toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
- toil/test/wdl/standard_library/length_invalid.json +3 -0
- toil/test/wdl/standard_library/range.json +3 -0
- toil/test/wdl/standard_library/range_0.json +3 -0
- toil/test/wdl/standard_library/range_as_input.wdl +17 -0
- toil/test/wdl/standard_library/range_invalid.json +3 -0
- toil/test/wdl/standard_library/read_boolean.json +3 -0
- toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_float.json +3 -0
- toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_int.json +3 -0
- toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_json.json +3 -0
- toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_lines.json +3 -0
- toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_map.json +3 -0
- toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_string.json +3 -0
- toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_tsv.json +3 -0
- toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
- toil/test/wdl/standard_library/round.json +3 -0
- toil/test/wdl/standard_library/round_as_command.wdl +16 -0
- toil/test/wdl/standard_library/round_as_input.wdl +16 -0
- toil/test/wdl/standard_library/size.json +3 -0
- toil/test/wdl/standard_library/size_as_command.wdl +17 -0
- toil/test/wdl/standard_library/size_as_output.wdl +36 -0
- toil/test/wdl/standard_library/stderr.json +3 -0
- toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
- toil/test/wdl/standard_library/stdout.json +3 -0
- toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
- toil/test/wdl/standard_library/sub.json +3 -0
- toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
- toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
- toil/test/wdl/standard_library/transpose.json +6 -0
- toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
- toil/test/wdl/standard_library/write_json.json +6 -0
- toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_lines.json +7 -0
- toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_map.json +6 -0
- toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_tsv.json +6 -0
- toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
- toil/test/wdl/standard_library/zip.json +12 -0
- toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
- toil/test/wdl/test.csv +3 -0
- toil/test/wdl/test.tsv +3 -0
- toil/test/wdl/testfiles/croo.wdl +38 -0
- toil/test/wdl/testfiles/drop_files.wdl +62 -0
- toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
- toil/test/wdl/testfiles/empty.txt +0 -0
- toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
- toil/test/wdl/testfiles/random.wdl +66 -0
- toil/test/wdl/testfiles/string_file_coercion.json +1 -0
- toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
- toil/test/wdl/testfiles/test.json +4 -0
- toil/test/wdl/testfiles/test_boolean.txt +1 -0
- toil/test/wdl/testfiles/test_float.txt +1 -0
- toil/test/wdl/testfiles/test_int.txt +1 -0
- toil/test/wdl/testfiles/test_lines.txt +5 -0
- toil/test/wdl/testfiles/test_map.txt +2 -0
- toil/test/wdl/testfiles/test_string.txt +1 -0
- toil/test/wdl/testfiles/url_to_file.wdl +13 -0
- toil/test/wdl/testfiles/url_to_optional_file.wdl +13 -0
- toil/test/wdl/testfiles/vocab.json +1 -0
- toil/test/wdl/testfiles/vocab.wdl +66 -0
- toil/test/wdl/testfiles/wait.wdl +34 -0
- toil/test/wdl/wdl_specification/type_pair.json +23 -0
- toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
- toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
- toil/test/wdl/wdl_specification/v1_spec.json +1 -0
- toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
- toil/test/wdl/wdltoil_test.py +680 -407
- toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
- toil/version.py +9 -9
- toil/wdl/wdltoil.py +336 -123
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/METADATA +5 -4
- toil-8.2.0.dist-info/RECORD +439 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/WHEEL +1 -1
- toil-8.1.0b1.dist-info/RECORD +0 -259
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/entry_points.txt +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info/licenses}/LICENSE +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/top_level.txt +0 -0
toil/lib/docker.py
CHANGED
|
@@ -11,26 +11,40 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
+
from collections.abc import Generator
|
|
14
15
|
import base64
|
|
15
16
|
import logging
|
|
16
17
|
import os
|
|
17
18
|
import re
|
|
18
19
|
import struct
|
|
19
20
|
from shlex import quote
|
|
20
|
-
from typing import
|
|
21
|
+
from typing import (
|
|
22
|
+
cast,
|
|
23
|
+
overload,
|
|
24
|
+
Any,
|
|
25
|
+
Literal,
|
|
26
|
+
NoReturn,
|
|
27
|
+
Optional,
|
|
28
|
+
Union,
|
|
29
|
+
TYPE_CHECKING,
|
|
30
|
+
)
|
|
21
31
|
|
|
22
32
|
import requests
|
|
23
33
|
|
|
24
34
|
import docker
|
|
25
|
-
from docker.errors import (
|
|
35
|
+
from docker.errors import ( # type: ignore[import-not-found]
|
|
26
36
|
ContainerError,
|
|
27
37
|
ImageNotFound,
|
|
28
38
|
NotFound,
|
|
29
39
|
create_api_error_from_http_exception,
|
|
30
40
|
)
|
|
31
|
-
from docker.utils.socket import consume_socket_output, demux_adaptor
|
|
41
|
+
from docker.utils.socket import consume_socket_output, demux_adaptor # type: ignore[import-not-found]
|
|
32
42
|
from toil.lib.accelerators import get_host_accelerator_numbers
|
|
33
43
|
|
|
44
|
+
if TYPE_CHECKING:
|
|
45
|
+
from docker.models.containers import Container # type: ignore[import-not-found]
|
|
46
|
+
from toil.job import Job
|
|
47
|
+
|
|
34
48
|
logger = logging.getLogger(__name__)
|
|
35
49
|
|
|
36
50
|
FORGO = 0
|
|
@@ -38,50 +52,193 @@ STOP = 1
|
|
|
38
52
|
RM = 2
|
|
39
53
|
|
|
40
54
|
|
|
41
|
-
def dockerCheckOutput(*args, **kwargs):
|
|
55
|
+
def dockerCheckOutput(*args: Any, **kwargs: Any) -> NoReturn:
|
|
42
56
|
raise RuntimeError(
|
|
43
57
|
"dockerCheckOutput() using subprocess.check_output() has been removed, "
|
|
44
58
|
"please switch to apiDockerCall()."
|
|
45
59
|
)
|
|
46
60
|
|
|
47
61
|
|
|
48
|
-
def dockerCall(*args, **kwargs):
|
|
62
|
+
def dockerCall(*args: Any, **kwargs: Any) -> NoReturn:
|
|
49
63
|
raise RuntimeError(
|
|
50
64
|
"dockerCall() using subprocess.check_output() has been removed, "
|
|
51
65
|
"please switch to apiDockerCall()."
|
|
52
66
|
)
|
|
53
67
|
|
|
54
68
|
|
|
55
|
-
def subprocessDockerCall(*args, **kwargs):
|
|
69
|
+
def subprocessDockerCall(*args: Any, **kwargs: Any) -> NoReturn:
|
|
56
70
|
raise RuntimeError(
|
|
57
71
|
"subprocessDockerCall() has been removed, " "please switch to apiDockerCall()."
|
|
58
72
|
)
|
|
59
73
|
|
|
60
74
|
|
|
75
|
+
@overload
|
|
76
|
+
def apiDockerCall(
|
|
77
|
+
job: "Job",
|
|
78
|
+
image: str,
|
|
79
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
80
|
+
deferParam: Optional[int] = None,
|
|
81
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
82
|
+
working_dir: Optional[str] = None,
|
|
83
|
+
containerName: Optional[str] = None,
|
|
84
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
85
|
+
detach: Literal[False] = False,
|
|
86
|
+
log_config: Optional[dict[str, str]] = None,
|
|
87
|
+
auto_remove: Optional[bool] = None,
|
|
88
|
+
remove: Optional[bool] = False,
|
|
89
|
+
user: Optional[str] = None,
|
|
90
|
+
environment: Optional[dict[str, str]] = None,
|
|
91
|
+
stdout: Optional[bool] = None,
|
|
92
|
+
stderr: bool = False,
|
|
93
|
+
stream: bool = False,
|
|
94
|
+
demux: Literal[False] = False,
|
|
95
|
+
streamfile: Optional[str] = None,
|
|
96
|
+
accelerators: Optional[list[int]] = None,
|
|
97
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
98
|
+
**kwargs: Any,
|
|
99
|
+
) -> str:
|
|
100
|
+
"""detach=False + demux=False → str"""
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@overload
|
|
104
|
+
def apiDockerCall(
|
|
105
|
+
job: "Job",
|
|
106
|
+
image: str,
|
|
107
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
108
|
+
deferParam: Optional[int] = None,
|
|
109
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
110
|
+
working_dir: Optional[str] = None,
|
|
111
|
+
containerName: Optional[str] = None,
|
|
112
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
113
|
+
detach: Literal[False] = False,
|
|
114
|
+
log_config: Optional[dict[str, str]] = None,
|
|
115
|
+
auto_remove: Optional[bool] = None,
|
|
116
|
+
remove: Optional[bool] = False,
|
|
117
|
+
user: Optional[str] = None,
|
|
118
|
+
environment: Optional[dict[str, str]] = None,
|
|
119
|
+
stdout: Optional[bool] = None,
|
|
120
|
+
stderr: bool = False,
|
|
121
|
+
stream: Literal[True] = True,
|
|
122
|
+
demux: Literal[True] = True,
|
|
123
|
+
streamfile: Optional[str] = None,
|
|
124
|
+
accelerators: Optional[list[int]] = None,
|
|
125
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
126
|
+
**kwargs: Any,
|
|
127
|
+
) -> Generator[tuple[str, str]]:
|
|
128
|
+
"""detach=False + demux=True + stream=True → log generator"""
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@overload
|
|
132
|
+
def apiDockerCall(
|
|
133
|
+
job: "Job",
|
|
134
|
+
image: str,
|
|
135
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
136
|
+
deferParam: Optional[int] = None,
|
|
137
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
138
|
+
working_dir: Optional[str] = None,
|
|
139
|
+
containerName: Optional[str] = None,
|
|
140
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
141
|
+
detach: Literal[False] = False,
|
|
142
|
+
log_config: Optional[dict[str, str]] = None,
|
|
143
|
+
auto_remove: Optional[bool] = None,
|
|
144
|
+
remove: Optional[bool] = False,
|
|
145
|
+
user: Optional[str] = None,
|
|
146
|
+
environment: Optional[dict[str, str]] = None,
|
|
147
|
+
stdout: Optional[bool] = None,
|
|
148
|
+
stderr: bool = False,
|
|
149
|
+
stream: Literal[False] = False,
|
|
150
|
+
demux: Literal[True] = True,
|
|
151
|
+
streamfile: Optional[str] = None,
|
|
152
|
+
accelerators: Optional[list[int]] = None,
|
|
153
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
154
|
+
**kwargs: Any,
|
|
155
|
+
) -> tuple[str, str]:
|
|
156
|
+
"""detach=False + demux=True + stream=False → string"""
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
@overload
|
|
61
160
|
def apiDockerCall(
|
|
62
|
-
job,
|
|
63
|
-
image,
|
|
64
|
-
parameters=None,
|
|
65
|
-
deferParam=None,
|
|
66
|
-
volumes=None,
|
|
67
|
-
working_dir=None,
|
|
68
|
-
containerName=None,
|
|
69
|
-
entrypoint=None,
|
|
70
|
-
detach=
|
|
71
|
-
log_config=None,
|
|
72
|
-
auto_remove=None,
|
|
73
|
-
remove=False,
|
|
74
|
-
user=None,
|
|
75
|
-
environment=None,
|
|
76
|
-
stdout=None,
|
|
77
|
-
stderr=False,
|
|
78
|
-
stream=False,
|
|
79
|
-
demux=False,
|
|
80
|
-
streamfile=None,
|
|
161
|
+
job: "Job",
|
|
162
|
+
image: str,
|
|
163
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
164
|
+
deferParam: Optional[int] = None,
|
|
165
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
166
|
+
working_dir: Optional[str] = None,
|
|
167
|
+
containerName: Optional[str] = None,
|
|
168
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
169
|
+
detach: Literal[True] = True,
|
|
170
|
+
log_config: Optional[dict[str, str]] = None,
|
|
171
|
+
auto_remove: Optional[bool] = None,
|
|
172
|
+
remove: Optional[bool] = False,
|
|
173
|
+
user: Optional[str] = None,
|
|
174
|
+
environment: Optional[dict[str, str]] = None,
|
|
175
|
+
stdout: Optional[bool] = None,
|
|
176
|
+
stderr: bool = False,
|
|
177
|
+
stream: bool = False,
|
|
178
|
+
demux: bool = False,
|
|
179
|
+
streamfile: Optional[str] = None,
|
|
81
180
|
accelerators: Optional[list[int]] = None,
|
|
82
|
-
timeout=365 * 24 * 60 * 60,
|
|
83
|
-
**kwargs,
|
|
84
|
-
):
|
|
181
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
182
|
+
**kwargs: Any,
|
|
183
|
+
) -> "Container":
|
|
184
|
+
"""detach=True → Container"""
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@overload
|
|
188
|
+
def apiDockerCall(
|
|
189
|
+
job: "Job",
|
|
190
|
+
image: str,
|
|
191
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
192
|
+
deferParam: Optional[int] = None,
|
|
193
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
194
|
+
working_dir: Optional[str] = None,
|
|
195
|
+
containerName: Optional[str] = None,
|
|
196
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
197
|
+
detach: bool = False,
|
|
198
|
+
log_config: Optional[dict[str, str]] = None,
|
|
199
|
+
auto_remove: Optional[bool] = None,
|
|
200
|
+
remove: Optional[bool] = False,
|
|
201
|
+
user: Optional[str] = None,
|
|
202
|
+
environment: Optional[dict[str, str]] = None,
|
|
203
|
+
stdout: Optional[bool] = None,
|
|
204
|
+
stderr: bool = False,
|
|
205
|
+
stream: bool = False,
|
|
206
|
+
demux: bool = False,
|
|
207
|
+
streamfile: Optional[str] = None,
|
|
208
|
+
accelerators: Optional[list[int]] = None,
|
|
209
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
210
|
+
**kwargs: Any,
|
|
211
|
+
) -> Union[
|
|
212
|
+
str, "Container", Generator[tuple[str, str]], Generator[str], tuple[str, str]
|
|
213
|
+
]: ...
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def apiDockerCall(
|
|
217
|
+
job: "Job",
|
|
218
|
+
image: str,
|
|
219
|
+
parameters: Optional[Union[list[str], list[list[str]]]] = None,
|
|
220
|
+
deferParam: Optional[int] = None,
|
|
221
|
+
volumes: Optional[dict[str, dict[str, str]]] = None,
|
|
222
|
+
working_dir: Optional[str] = None,
|
|
223
|
+
containerName: Optional[str] = None,
|
|
224
|
+
entrypoint: Optional[Union[str, list[str]]] = None,
|
|
225
|
+
detach: bool = False,
|
|
226
|
+
log_config: Optional[dict[str, str]] = None,
|
|
227
|
+
auto_remove: Optional[bool] = None,
|
|
228
|
+
remove: Optional[bool] = False,
|
|
229
|
+
user: Optional[str] = None,
|
|
230
|
+
environment: Optional[dict[str, str]] = None,
|
|
231
|
+
stdout: Optional[bool] = None,
|
|
232
|
+
stderr: bool = False,
|
|
233
|
+
stream: bool = False,
|
|
234
|
+
demux: bool = False,
|
|
235
|
+
streamfile: Optional[str] = None,
|
|
236
|
+
accelerators: Optional[list[int]] = None,
|
|
237
|
+
timeout: int = 365 * 24 * 60 * 60,
|
|
238
|
+
**kwargs: Any,
|
|
239
|
+
) -> Union[
|
|
240
|
+
str, "Container", Generator[tuple[str, str]], Generator[str], tuple[str, str]
|
|
241
|
+
]:
|
|
85
242
|
"""
|
|
86
243
|
A toil wrapper for the python docker API.
|
|
87
244
|
|
|
@@ -109,66 +266,67 @@ def apiDockerCall(
|
|
|
109
266
|
without output capture, the container is started and returned without
|
|
110
267
|
waiting for it to finish.
|
|
111
268
|
|
|
112
|
-
:param
|
|
113
|
-
:param
|
|
114
|
-
|
|
115
|
-
:param
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
:param
|
|
124
|
-
:param int deferParam: Action to take on the container upon job completion.
|
|
269
|
+
:param job: The Job instance for the calling function.
|
|
270
|
+
:param image: Name of the Docker image to be used.
|
|
271
|
+
(e.g. 'quay.io/ucsc_cgl/samtools:latest')
|
|
272
|
+
:param parameters: A list of string elements. If there are
|
|
273
|
+
multiple elements, these will be joined with
|
|
274
|
+
spaces. This handling of multiple elements
|
|
275
|
+
provides backwards compatibility with previous
|
|
276
|
+
versions which called docker using
|
|
277
|
+
subprocess.check_call().
|
|
278
|
+
If list of lists: list[list[str]], then treat
|
|
279
|
+
as successive commands chained with pipe.
|
|
280
|
+
:param deferParam: Action to take on the container upon job completion.
|
|
125
281
|
FORGO (0) leaves the container untouched and running.
|
|
126
282
|
STOP (1) Sends SIGTERM, then SIGKILL if necessary to the container.
|
|
127
283
|
RM (2) Immediately send SIGKILL to the container. This is the default
|
|
128
284
|
behavior if deferParam is set to None.
|
|
129
|
-
:param
|
|
130
|
-
:param
|
|
131
|
-
|
|
132
|
-
:param
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
:param
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
(stdout, stderr). Ignored if detach=True. (default: False).
|
|
147
|
-
:param str streamfile: Collect container output to this file if detach=True and
|
|
148
|
-
stderr and/or stdout are True. Defaults to "output.log".
|
|
149
|
-
:param dict log_config: Specify the logs to return from the container. See:
|
|
150
|
-
https://docker-py.readthedocs.io/en/stable/containers.html
|
|
151
|
-
:param bool remove: Remove the container on exit or not.
|
|
152
|
-
:param str user: The container will be run with the privileges of
|
|
153
|
-
the user specified. Can be an actual name, such
|
|
154
|
-
as 'root' or 'lifeisaboutfishtacos', or it can be
|
|
155
|
-
the uid or gid of the user ('0' is root; '1000' is
|
|
156
|
-
an example of a less privileged uid or gid), or a
|
|
157
|
-
complement of the uid:gid (RECOMMENDED), such as
|
|
158
|
-
'0:0' (root user : root group) or '1000:1000'
|
|
159
|
-
(some other user : some other user group).
|
|
285
|
+
:param volumes: A dictionary of volume locations to volume options.
|
|
286
|
+
:param working_dir: The working directory.
|
|
287
|
+
:param containerName: The name/ID of the container.
|
|
288
|
+
:param entrypoint: Prepends commands sent to the container. See:
|
|
289
|
+
https://docker-py.readthedocs.io/en/stable/containers.html
|
|
290
|
+
:param detach: Run the container in detached mode. (equivalent to '-d')
|
|
291
|
+
:param log_config: Specify the logs to return from the container. See:
|
|
292
|
+
https://docker-py.readthedocs.io/en/stable/containers.html
|
|
293
|
+
:param remove: Remove the container on exit or not.
|
|
294
|
+
:param user: The container will be run with the privileges of
|
|
295
|
+
the user specified. Can be an actual name, such
|
|
296
|
+
as 'root' or 'lifeisaboutfishtacos', or it can be
|
|
297
|
+
the uid or gid of the user ('0' is root; '1000' is
|
|
298
|
+
an example of a less privileged uid or gid), or a
|
|
299
|
+
complement of the uid:gid (RECOMMENDED), such as
|
|
300
|
+
'0:0' (root user : root group) or '1000:1000'
|
|
301
|
+
(some other user : some other user group).
|
|
160
302
|
:param environment: Allows one to set environment variables inside of the
|
|
161
|
-
container
|
|
162
|
-
:param
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
303
|
+
container.
|
|
304
|
+
:param stdout: Return logs from STDOUT when detach=False (default: True).
|
|
305
|
+
Block and capture stdout to a file when detach=True
|
|
306
|
+
(default: False). Output capture defaults to output.log,
|
|
307
|
+
and can be specified with the "streamfile" kwarg.
|
|
308
|
+
:param stderr: Return logs from STDERR when detach=False (default: False).
|
|
309
|
+
Block and capture stderr to a file when detach=True
|
|
310
|
+
(default: False). Output capture defaults to output.log,
|
|
311
|
+
and can be specified with the "streamfile" kwarg.
|
|
312
|
+
:param stream: If True and detach=False, return a log generator instead
|
|
313
|
+
of a string. Ignored if detach=True. (default: False).
|
|
314
|
+
:param demux: Similar to `demux` in container.exec_run(). If True and
|
|
315
|
+
detach=False, returns a tuple of (stdout, stderr). If
|
|
316
|
+
stream=True, returns a log generator with tuples of
|
|
317
|
+
(stdout, stderr). Ignored if detach=True. (default: False).
|
|
318
|
+
:param streamfile: Collect container output to this file if detach=True and
|
|
319
|
+
stderr and/or stdout are True. Defaults to "output.log".
|
|
167
320
|
:param accelerators: Toil accelerator numbers (usually GPUs) to forward to
|
|
168
321
|
the container. These are interpreted in the current
|
|
169
322
|
Python process's environment. See
|
|
170
323
|
toil.lib.accelerators.get_individual_local_accelerators()
|
|
171
324
|
for the menu of available accelerators.
|
|
325
|
+
:param timeout: Use the given timeout in seconds for interactions with
|
|
326
|
+
the Docker daemon. Note that the underlying docker module is
|
|
327
|
+
not always able to abort ongoing reads and writes in order
|
|
328
|
+
to respect the timeout. Defaults to 1 year (i.e. wait
|
|
329
|
+
essentially indefinitely).
|
|
172
330
|
:param kwargs: Additional keyword arguments supplied to the docker API's
|
|
173
331
|
run command. The list is 75 keywords total, for examples
|
|
174
332
|
and full documentation see:
|
|
@@ -210,9 +368,8 @@ def apiDockerCall(
|
|
|
210
368
|
chain_params = [
|
|
211
369
|
" ".join(quote(arg) for arg in command) for command in parameters
|
|
212
370
|
]
|
|
213
|
-
command = " | ".join(chain_params)
|
|
214
371
|
pipe_prefix = "set -eo pipefail && "
|
|
215
|
-
command = [pipe_prefix +
|
|
372
|
+
command: Union[None, str, list[str]] = [pipe_prefix + " | ".join(chain_params)]
|
|
216
373
|
logger.debug("Calling docker with: " + repr(command))
|
|
217
374
|
|
|
218
375
|
# If 'parameters' is a normal list, join all elements into a single string
|
|
@@ -221,8 +378,8 @@ def apiDockerCall(
|
|
|
221
378
|
# Note that this is still a list, and the docker API prefers this as best
|
|
222
379
|
# practice:
|
|
223
380
|
# http://docker-py.readthedocs.io/en/stable/containers.html
|
|
224
|
-
elif len(parameters) > 0 and
|
|
225
|
-
command = " ".join(quote(arg) for arg in parameters)
|
|
381
|
+
elif len(parameters) > 0 and isinstance(parameters, list):
|
|
382
|
+
command = " ".join(quote(arg) for arg in cast(list[str], parameters))
|
|
226
383
|
logger.debug("Calling docker with: " + repr(command))
|
|
227
384
|
|
|
228
385
|
# If the 'parameters' lists are empty, they are respecified as None, which
|
|
@@ -237,7 +394,7 @@ def apiDockerCall(
|
|
|
237
394
|
if deferParam not in (None, FORGO, STOP, RM):
|
|
238
395
|
raise RuntimeError("Please provide a valid value for deferParam.")
|
|
239
396
|
|
|
240
|
-
client = docker.from_env(version="auto", timeout=timeout)
|
|
397
|
+
client = docker.from_env(version="auto", timeout=timeout) # type: ignore[attr-defined]
|
|
241
398
|
|
|
242
399
|
if deferParam is None:
|
|
243
400
|
deferParam = RM
|
|
@@ -259,7 +416,7 @@ def apiDockerCall(
|
|
|
259
416
|
device_requests = []
|
|
260
417
|
if accelerators:
|
|
261
418
|
# Map accelerator numbers to host numbers
|
|
262
|
-
host_accelerators = []
|
|
419
|
+
host_accelerators: list[int] = []
|
|
263
420
|
accelerator_mapping = get_host_accelerator_numbers()
|
|
264
421
|
for our_number in accelerators:
|
|
265
422
|
if our_number >= len(accelerator_mapping):
|
|
@@ -272,7 +429,8 @@ def apiDockerCall(
|
|
|
272
429
|
# TODO: Here we assume that the host accelerators are all GPUs
|
|
273
430
|
device_requests.append(
|
|
274
431
|
docker.types.DeviceRequest(
|
|
275
|
-
device_ids=[",".join(host_accelerators)],
|
|
432
|
+
device_ids=[",".join(str(x) for x in host_accelerators)],
|
|
433
|
+
capabilities=[["gpu"]],
|
|
276
434
|
)
|
|
277
435
|
)
|
|
278
436
|
|
|
@@ -399,7 +557,7 @@ def dockerKill(
|
|
|
399
557
|
to respect the timeout. Defaults to 1 year (i.e. wait
|
|
400
558
|
essentially indefinitely).
|
|
401
559
|
"""
|
|
402
|
-
client = docker.from_env(version="auto", timeout=timeout)
|
|
560
|
+
client = docker.from_env(version="auto", timeout=timeout) # type: ignore[attr-defined]
|
|
403
561
|
try:
|
|
404
562
|
this_container = client.containers.get(container_name)
|
|
405
563
|
while this_container.status == "running":
|
|
@@ -432,7 +590,9 @@ def dockerStop(container_name: str, remove: bool = False) -> None:
|
|
|
432
590
|
dockerKill(container_name, gentleKill=True, remove=remove)
|
|
433
591
|
|
|
434
592
|
|
|
435
|
-
def containerIsRunning(
|
|
593
|
+
def containerIsRunning(
|
|
594
|
+
container_name: str, timeout: int = 365 * 24 * 60 * 60
|
|
595
|
+
) -> Optional[bool]:
|
|
436
596
|
"""
|
|
437
597
|
Checks whether the container is running or not.
|
|
438
598
|
|
|
@@ -444,7 +604,7 @@ def containerIsRunning(container_name: str, timeout: int = 365 * 24 * 60 * 60):
|
|
|
444
604
|
:returns: True if status is 'running', False if status is anything else,
|
|
445
605
|
and None if the container does not exist.
|
|
446
606
|
"""
|
|
447
|
-
client = docker.from_env(version="auto", timeout=timeout)
|
|
607
|
+
client = docker.from_env(version="auto", timeout=timeout) # type: ignore[attr-defined]
|
|
448
608
|
try:
|
|
449
609
|
this_container = client.containers.get(container_name)
|
|
450
610
|
if this_container.status == "running":
|
|
@@ -459,7 +619,7 @@ def containerIsRunning(container_name: str, timeout: int = 365 * 24 * 60 * 60):
|
|
|
459
619
|
raise create_api_error_from_http_exception(e)
|
|
460
620
|
|
|
461
621
|
|
|
462
|
-
def getContainerName(job):
|
|
622
|
+
def getContainerName(job: "Job") -> str:
|
|
463
623
|
"""
|
|
464
624
|
Create a random string including the job name, and return it. Name will
|
|
465
625
|
match ``[a-zA-Z0-9][a-zA-Z0-9_.-]``.
|
|
@@ -473,12 +633,13 @@ def getContainerName(job):
|
|
|
473
633
|
return name
|
|
474
634
|
|
|
475
635
|
|
|
476
|
-
def _multiplexed_response_stream_helper(
|
|
636
|
+
def _multiplexed_response_stream_helper(
|
|
637
|
+
response: requests.Response,
|
|
638
|
+
) -> Generator[tuple[Any, Union[bytes, Any]]]:
|
|
477
639
|
"""
|
|
478
640
|
A generator of multiplexed data blocks coming from a response stream modified from:
|
|
479
641
|
https://github.com/docker/docker-py/blob/4.3.1-release/docker/api/client.py#L370
|
|
480
642
|
|
|
481
|
-
:param response: requests.Response
|
|
482
643
|
:return: a generator with tuples of (stream_type, data)
|
|
483
644
|
"""
|
|
484
645
|
while True:
|
toil/lib/dockstore.py
CHANGED
|
@@ -45,8 +45,15 @@ logger = logging.getLogger(__name__)
|
|
|
45
45
|
|
|
46
46
|
# We assume TRS_ROOT is actually a Dockstore instance.
|
|
47
47
|
|
|
48
|
+
# This is a publish-able token for production Dockstore for Toil to use.
|
|
49
|
+
# This is NOT a secret value.
|
|
50
|
+
DEFAULT_DOCKSTORE_TOKEN = "2bff46294daddef6df185452b04db6143ea8a59f52ee3c325d3e1df418511b7d"
|
|
51
|
+
|
|
48
52
|
# How should we authenticate our Dockstore requests?
|
|
49
|
-
DOCKSTORE_TOKEN = os.environ.get("TOIL_DOCKSTORE_TOKEN")
|
|
53
|
+
DOCKSTORE_TOKEN = os.environ.get("TOIL_DOCKSTORE_TOKEN", DEFAULT_DOCKSTORE_TOKEN)
|
|
54
|
+
|
|
55
|
+
# What platform should we report metrics as?
|
|
56
|
+
DOCKSTORE_PLATFORM = "TOIL"
|
|
50
57
|
|
|
51
58
|
|
|
52
59
|
# This is a https://schema.org/CompletedActionStatus
|
|
@@ -346,7 +353,7 @@ def send_metrics(trs_workflow_id: str, trs_version: str, workflow_runs: list[Run
|
|
|
346
353
|
|
|
347
354
|
# Set the submission query string metadata
|
|
348
355
|
submission_params = {
|
|
349
|
-
"platform":
|
|
356
|
+
"platform": DOCKSTORE_PLATFORM,
|
|
350
357
|
"description": "Workflow status from Toil"
|
|
351
358
|
}
|
|
352
359
|
|
|
@@ -366,6 +373,7 @@ def send_metrics(trs_workflow_id: str, trs_version: str, workflow_runs: list[Run
|
|
|
366
373
|
try:
|
|
367
374
|
result = web_session.post(endpoint_url, params=submission_params, json=to_post, headers=headers)
|
|
368
375
|
result.raise_for_status()
|
|
376
|
+
logger.debug("Workflow metrics were accepted by Dockstore. Dockstore response code: %s", result.status_code)
|
|
369
377
|
except requests.HTTPError as e:
|
|
370
378
|
logger.warning("Workflow metrics were not accepted by Dockstore. Dockstore complained: %s", e.response.text)
|
|
371
379
|
raise
|
|
@@ -375,5 +383,5 @@ def get_metrics_url(trs_workflow_id: str, trs_version: str, execution_id: str) -
|
|
|
375
383
|
Get the URL where a workflow metrics object (for a workflow, or for a set of tasks) can be fetched back from.
|
|
376
384
|
"""
|
|
377
385
|
|
|
378
|
-
return f"{TRS_ROOT}/api/api/ga4gh/v2/extended/{quote(trs_workflow_id, safe='')}/versions/{quote(trs_version, safe='')}/execution?platform=
|
|
386
|
+
return f"{TRS_ROOT}/api/api/ga4gh/v2/extended/{quote(trs_workflow_id, safe='')}/versions/{quote(trs_version, safe='')}/execution?platform={DOCKSTORE_PLATFORM}&executionId={quote(execution_id, safe='')}"
|
|
379
387
|
|
toil/lib/exceptions.py
CHANGED
|
@@ -15,6 +15,8 @@
|
|
|
15
15
|
# 5.14.2018: copied into Toil from https://github.com/BD2KGenomics/bd2k-python-lib
|
|
16
16
|
|
|
17
17
|
import sys
|
|
18
|
+
from typing import Optional
|
|
19
|
+
import logging
|
|
18
20
|
from urllib.parse import ParseResult
|
|
19
21
|
|
|
20
22
|
|
|
@@ -39,15 +41,15 @@ class panic:
|
|
|
39
41
|
the primary exception will be reraised.
|
|
40
42
|
"""
|
|
41
43
|
|
|
42
|
-
def __init__(self, log=None):
|
|
44
|
+
def __init__(self, log: Optional[logging.Logger] = None) -> None:
|
|
43
45
|
super().__init__()
|
|
44
46
|
self.log = log
|
|
45
47
|
self.exc_info = None
|
|
46
48
|
|
|
47
|
-
def __enter__(self):
|
|
49
|
+
def __enter__(self) -> None:
|
|
48
50
|
self.exc_info = sys.exc_info()
|
|
49
51
|
|
|
50
|
-
def __exit__(self, *exc_info):
|
|
52
|
+
def __exit__(self, *exc_info) -> None:
|
|
51
53
|
if self.log is not None and exc_info and exc_info[0]:
|
|
52
54
|
self.log.warning("Exception during panic", exc_info=exc_info)
|
|
53
55
|
exc_type, exc_value, traceback = self.exc_info
|