toil 8.1.0b1__py3-none-any.whl → 8.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toil/__init__.py +0 -35
- toil/batchSystems/abstractBatchSystem.py +1 -1
- toil/batchSystems/abstractGridEngineBatchSystem.py +1 -1
- toil/batchSystems/awsBatch.py +1 -1
- toil/batchSystems/cleanup_support.py +1 -1
- toil/batchSystems/kubernetes.py +53 -7
- toil/batchSystems/local_support.py +1 -1
- toil/batchSystems/mesos/batchSystem.py +13 -8
- toil/batchSystems/mesos/test/__init__.py +3 -2
- toil/batchSystems/singleMachine.py +1 -1
- toil/batchSystems/slurm.py +27 -26
- toil/bus.py +5 -3
- toil/common.py +39 -11
- toil/cwl/cwltoil.py +1 -1
- toil/job.py +64 -49
- toil/jobStores/abstractJobStore.py +24 -3
- toil/jobStores/fileJobStore.py +25 -1
- toil/jobStores/googleJobStore.py +104 -30
- toil/leader.py +9 -0
- toil/lib/accelerators.py +3 -1
- toil/lib/aws/utils.py.orig +504 -0
- toil/lib/bioio.py +1 -1
- toil/lib/docker.py +252 -91
- toil/lib/dockstore.py +11 -3
- toil/lib/exceptions.py +5 -3
- toil/lib/history.py +87 -13
- toil/lib/history_submission.py +23 -9
- toil/lib/io.py +34 -22
- toil/lib/misc.py +7 -1
- toil/lib/resources.py +2 -1
- toil/lib/threading.py +11 -10
- toil/options/common.py +8 -0
- toil/options/wdl.py +11 -0
- toil/server/api_spec/LICENSE +201 -0
- toil/server/api_spec/README.rst +5 -0
- toil/server/cli/wes_cwl_runner.py +2 -1
- toil/test/__init__.py +275 -115
- toil/test/batchSystems/batchSystemTest.py +227 -205
- toil/test/batchSystems/test_slurm.py +27 -0
- toil/test/cactus/pestis.tar.gz +0 -0
- toil/test/conftest.py +7 -0
- toil/test/cwl/2.fasta +11 -0
- toil/test/cwl/2.fastq +12 -0
- toil/test/cwl/conftest.py +1 -1
- toil/test/cwl/cwlTest.py +999 -867
- toil/test/cwl/directory/directory/file.txt +15 -0
- toil/test/cwl/download_directory_file.json +4 -0
- toil/test/cwl/download_directory_s3.json +4 -0
- toil/test/cwl/download_file.json +6 -0
- toil/test/cwl/download_http.json +6 -0
- toil/test/cwl/download_https.json +6 -0
- toil/test/cwl/download_s3.json +6 -0
- toil/test/cwl/download_subdirectory_file.json +5 -0
- toil/test/cwl/download_subdirectory_s3.json +5 -0
- toil/test/cwl/empty.json +1 -0
- toil/test/cwl/mock_mpi/fake_mpi.yml +8 -0
- toil/test/cwl/mock_mpi/fake_mpi_run.py +42 -0
- toil/test/cwl/optional-file-exists.json +6 -0
- toil/test/cwl/optional-file-missing.json +6 -0
- toil/test/cwl/preemptible_expression.json +1 -0
- toil/test/cwl/revsort-job-missing.json +6 -0
- toil/test/cwl/revsort-job.json +6 -0
- toil/test/cwl/s3_secondary_file.json +16 -0
- toil/test/cwl/seqtk_seq_job.json +6 -0
- toil/test/cwl/stream.json +6 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.dat +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f1i +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f2_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f3_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f4_TSM0 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.f5 +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.info +0 -0
- toil/test/cwl/test_filename_conflict_resolution.ms/table.lock +0 -0
- toil/test/cwl/whale.txt +16 -0
- toil/test/docs/scripts/example_alwaysfail.py +38 -0
- toil/test/docs/scripts/example_alwaysfail_with_files.wdl +33 -0
- toil/test/docs/scripts/example_cachingbenchmark.py +117 -0
- toil/test/docs/scripts/stagingExampleFiles/in.txt +1 -0
- toil/test/docs/scripts/stagingExampleFiles/out.txt +2 -0
- toil/test/docs/scripts/tutorial_arguments.py +23 -0
- toil/test/docs/scripts/tutorial_debugging.patch +12 -0
- toil/test/docs/scripts/tutorial_debugging_hangs.wdl +126 -0
- toil/test/docs/scripts/tutorial_debugging_works.wdl +129 -0
- toil/test/docs/scripts/tutorial_docker.py +20 -0
- toil/test/docs/scripts/tutorial_dynamic.py +24 -0
- toil/test/docs/scripts/tutorial_encapsulation.py +28 -0
- toil/test/docs/scripts/tutorial_encapsulation2.py +29 -0
- toil/test/docs/scripts/tutorial_helloworld.py +15 -0
- toil/test/docs/scripts/tutorial_invokeworkflow.py +27 -0
- toil/test/docs/scripts/tutorial_invokeworkflow2.py +30 -0
- toil/test/docs/scripts/tutorial_jobfunctions.py +22 -0
- toil/test/docs/scripts/tutorial_managing.py +29 -0
- toil/test/docs/scripts/tutorial_managing2.py +56 -0
- toil/test/docs/scripts/tutorial_multiplejobs.py +25 -0
- toil/test/docs/scripts/tutorial_multiplejobs2.py +21 -0
- toil/test/docs/scripts/tutorial_multiplejobs3.py +22 -0
- toil/test/docs/scripts/tutorial_promises.py +25 -0
- toil/test/docs/scripts/tutorial_promises2.py +30 -0
- toil/test/docs/scripts/tutorial_quickstart.py +22 -0
- toil/test/docs/scripts/tutorial_requirements.py +44 -0
- toil/test/docs/scripts/tutorial_services.py +45 -0
- toil/test/docs/scripts/tutorial_staging.py +45 -0
- toil/test/docs/scripts/tutorial_stats.py +64 -0
- toil/test/lib/aws/test_iam.py +3 -1
- toil/test/lib/dockerTest.py +205 -122
- toil/test/lib/test_history.py +101 -77
- toil/test/provisioners/aws/awsProvisionerTest.py +12 -9
- toil/test/provisioners/clusterTest.py +4 -4
- toil/test/provisioners/gceProvisionerTest.py +16 -14
- toil/test/sort/sort.py +4 -1
- toil/test/src/busTest.py +17 -17
- toil/test/src/deferredFunctionTest.py +145 -132
- toil/test/src/importExportFileTest.py +71 -63
- toil/test/src/jobEncapsulationTest.py +27 -28
- toil/test/src/jobServiceTest.py +149 -133
- toil/test/src/jobTest.py +219 -211
- toil/test/src/miscTests.py +66 -60
- toil/test/src/promisedRequirementTest.py +163 -169
- toil/test/src/regularLogTest.py +24 -24
- toil/test/src/resourceTest.py +82 -76
- toil/test/src/restartDAGTest.py +51 -47
- toil/test/src/resumabilityTest.py +24 -19
- toil/test/src/retainTempDirTest.py +60 -57
- toil/test/src/systemTest.py +17 -13
- toil/test/src/threadingTest.py +29 -32
- toil/test/utils/ABCWorkflowDebug/B_file.txt +1 -0
- toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +204 -0
- toil/test/utils/ABCWorkflowDebug/mkFile.py +16 -0
- toil/test/utils/ABCWorkflowDebug/sleep.cwl +12 -0
- toil/test/utils/ABCWorkflowDebug/sleep.yaml +1 -0
- toil/test/utils/toilDebugTest.py +117 -102
- toil/test/utils/toilKillTest.py +54 -53
- toil/test/utils/utilsTest.py +303 -229
- toil/test/wdl/lint_error.wdl +9 -0
- toil/test/wdl/md5sum/empty_file.json +1 -0
- toil/test/wdl/md5sum/md5sum-gs.json +1 -0
- toil/test/wdl/md5sum/md5sum.1.0.wdl +32 -0
- toil/test/wdl/md5sum/md5sum.input +1 -0
- toil/test/wdl/md5sum/md5sum.json +1 -0
- toil/test/wdl/md5sum/md5sum.wdl +25 -0
- toil/test/wdl/miniwdl_self_test/inputs-namespaced.json +1 -0
- toil/test/wdl/miniwdl_self_test/inputs.json +1 -0
- toil/test/wdl/miniwdl_self_test/self_test.wdl +40 -0
- toil/test/wdl/standard_library/as_map.json +16 -0
- toil/test/wdl/standard_library/as_map_as_input.wdl +23 -0
- toil/test/wdl/standard_library/as_pairs.json +7 -0
- toil/test/wdl/standard_library/as_pairs_as_input.wdl +23 -0
- toil/test/wdl/standard_library/ceil.json +3 -0
- toil/test/wdl/standard_library/ceil_as_command.wdl +16 -0
- toil/test/wdl/standard_library/ceil_as_input.wdl +16 -0
- toil/test/wdl/standard_library/collect_by_key.json +1 -0
- toil/test/wdl/standard_library/collect_by_key_as_input.wdl +23 -0
- toil/test/wdl/standard_library/cross.json +11 -0
- toil/test/wdl/standard_library/cross_as_input.wdl +19 -0
- toil/test/wdl/standard_library/flatten.json +7 -0
- toil/test/wdl/standard_library/flatten_as_input.wdl +18 -0
- toil/test/wdl/standard_library/floor.json +3 -0
- toil/test/wdl/standard_library/floor_as_command.wdl +16 -0
- toil/test/wdl/standard_library/floor_as_input.wdl +16 -0
- toil/test/wdl/standard_library/keys.json +8 -0
- toil/test/wdl/standard_library/keys_as_input.wdl +24 -0
- toil/test/wdl/standard_library/length.json +7 -0
- toil/test/wdl/standard_library/length_as_input.wdl +16 -0
- toil/test/wdl/standard_library/length_as_input_with_map.json +7 -0
- toil/test/wdl/standard_library/length_as_input_with_map.wdl +17 -0
- toil/test/wdl/standard_library/length_invalid.json +3 -0
- toil/test/wdl/standard_library/range.json +3 -0
- toil/test/wdl/standard_library/range_0.json +3 -0
- toil/test/wdl/standard_library/range_as_input.wdl +17 -0
- toil/test/wdl/standard_library/range_invalid.json +3 -0
- toil/test/wdl/standard_library/read_boolean.json +3 -0
- toil/test/wdl/standard_library/read_boolean_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_float.json +3 -0
- toil/test/wdl/standard_library/read_float_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_int.json +3 -0
- toil/test/wdl/standard_library/read_int_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_json.json +3 -0
- toil/test/wdl/standard_library/read_json_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_lines.json +3 -0
- toil/test/wdl/standard_library/read_lines_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_map.json +3 -0
- toil/test/wdl/standard_library/read_map_as_output.wdl +31 -0
- toil/test/wdl/standard_library/read_string.json +3 -0
- toil/test/wdl/standard_library/read_string_as_command.wdl +17 -0
- toil/test/wdl/standard_library/read_tsv.json +3 -0
- toil/test/wdl/standard_library/read_tsv_as_output.wdl +31 -0
- toil/test/wdl/standard_library/round.json +3 -0
- toil/test/wdl/standard_library/round_as_command.wdl +16 -0
- toil/test/wdl/standard_library/round_as_input.wdl +16 -0
- toil/test/wdl/standard_library/size.json +3 -0
- toil/test/wdl/standard_library/size_as_command.wdl +17 -0
- toil/test/wdl/standard_library/size_as_output.wdl +36 -0
- toil/test/wdl/standard_library/stderr.json +3 -0
- toil/test/wdl/standard_library/stderr_as_output.wdl +30 -0
- toil/test/wdl/standard_library/stdout.json +3 -0
- toil/test/wdl/standard_library/stdout_as_output.wdl +30 -0
- toil/test/wdl/standard_library/sub.json +3 -0
- toil/test/wdl/standard_library/sub_as_input.wdl +17 -0
- toil/test/wdl/standard_library/sub_as_input_with_file.wdl +17 -0
- toil/test/wdl/standard_library/transpose.json +6 -0
- toil/test/wdl/standard_library/transpose_as_input.wdl +18 -0
- toil/test/wdl/standard_library/write_json.json +6 -0
- toil/test/wdl/standard_library/write_json_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_lines.json +7 -0
- toil/test/wdl/standard_library/write_lines_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_map.json +6 -0
- toil/test/wdl/standard_library/write_map_as_command.wdl +17 -0
- toil/test/wdl/standard_library/write_tsv.json +6 -0
- toil/test/wdl/standard_library/write_tsv_as_command.wdl +17 -0
- toil/test/wdl/standard_library/zip.json +12 -0
- toil/test/wdl/standard_library/zip_as_input.wdl +19 -0
- toil/test/wdl/test.csv +3 -0
- toil/test/wdl/test.tsv +3 -0
- toil/test/wdl/testfiles/croo.wdl +38 -0
- toil/test/wdl/testfiles/drop_files.wdl +62 -0
- toil/test/wdl/testfiles/drop_files_subworkflow.wdl +13 -0
- toil/test/wdl/testfiles/empty.txt +0 -0
- toil/test/wdl/testfiles/not_enough_outputs.wdl +33 -0
- toil/test/wdl/testfiles/random.wdl +66 -0
- toil/test/wdl/testfiles/string_file_coercion.json +1 -0
- toil/test/wdl/testfiles/string_file_coercion.wdl +35 -0
- toil/test/wdl/testfiles/test.json +4 -0
- toil/test/wdl/testfiles/test_boolean.txt +1 -0
- toil/test/wdl/testfiles/test_float.txt +1 -0
- toil/test/wdl/testfiles/test_int.txt +1 -0
- toil/test/wdl/testfiles/test_lines.txt +5 -0
- toil/test/wdl/testfiles/test_map.txt +2 -0
- toil/test/wdl/testfiles/test_string.txt +1 -0
- toil/test/wdl/testfiles/url_to_file.wdl +13 -0
- toil/test/wdl/testfiles/url_to_optional_file.wdl +13 -0
- toil/test/wdl/testfiles/vocab.json +1 -0
- toil/test/wdl/testfiles/vocab.wdl +66 -0
- toil/test/wdl/testfiles/wait.wdl +34 -0
- toil/test/wdl/wdl_specification/type_pair.json +23 -0
- toil/test/wdl/wdl_specification/type_pair_basic.wdl +36 -0
- toil/test/wdl/wdl_specification/type_pair_with_files.wdl +36 -0
- toil/test/wdl/wdl_specification/v1_spec.json +1 -0
- toil/test/wdl/wdl_specification/v1_spec_declaration.wdl +39 -0
- toil/test/wdl/wdltoil_test.py +680 -407
- toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
- toil/version.py +9 -9
- toil/wdl/wdltoil.py +336 -123
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/METADATA +5 -4
- toil-8.2.0.dist-info/RECORD +439 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/WHEEL +1 -1
- toil-8.1.0b1.dist-info/RECORD +0 -259
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/entry_points.txt +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info/licenses}/LICENSE +0 -0
- {toil-8.1.0b1.dist-info → toil-8.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from toil.common import Toil
|
|
7
|
+
from toil.job import Job
|
|
8
|
+
from toil.lib.io import mkdtemp
|
|
9
|
+
from toil.test import get_data
|
|
10
|
+
from toil.version import python
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
This workflow's purpose is to create files and jobs for viewing using stats,
|
|
16
|
+
status, and printDot() in toilDebugTest.py. It's intended for future use in a
|
|
17
|
+
debugging tutorial containing a broken job. It is also a minor integration test.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def initialize_jobs(job):
|
|
22
|
+
"""
|
|
23
|
+
Stub function used to start a toil workflow since toil workflows can only
|
|
24
|
+
start with one job (but afterwards can run many in parallel).
|
|
25
|
+
"""
|
|
26
|
+
job.fileStore.log_to_leader("""initialize_jobs""")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def writeA(job, mkFile):
|
|
30
|
+
"""Runs a program, and writes a string 'A' into A.txt using mkFile.py."""
|
|
31
|
+
job.fileStore.log_to_leader("""writeA""")
|
|
32
|
+
|
|
33
|
+
# temp folder for the run
|
|
34
|
+
tempDir = job.fileStore.getLocalTempDir()
|
|
35
|
+
|
|
36
|
+
# import files
|
|
37
|
+
mkFile_fs = job.fileStore.readGlobalFile(
|
|
38
|
+
mkFile[0], userPath=os.path.join(tempDir, mkFile[1])
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# make a file (A.txt) and writes a string 'A' into it using 'mkFile.py'
|
|
42
|
+
content = "A"
|
|
43
|
+
cmd = python + " " + mkFile_fs + " " + "A.txt" + " " + content
|
|
44
|
+
this_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
45
|
+
this_process.wait()
|
|
46
|
+
|
|
47
|
+
# get the output file and return it as a tuple of location + name
|
|
48
|
+
output_filename = "A.txt"
|
|
49
|
+
output_file = job.fileStore.writeGlobalFile(output_filename)
|
|
50
|
+
A1 = (output_file, output_filename)
|
|
51
|
+
rvDict = {"A1": A1}
|
|
52
|
+
return rvDict
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def writeB(job, mkFile, B_file):
|
|
56
|
+
"""
|
|
57
|
+
Runs a program, extracts a string 'B' from an existing file, B_file.txt, and
|
|
58
|
+
writes it into B.txt using mkFile.py.
|
|
59
|
+
"""
|
|
60
|
+
job.fileStore.log_to_leader("""writeB""")
|
|
61
|
+
|
|
62
|
+
# temp folder for the run
|
|
63
|
+
tempDir = job.fileStore.getLocalTempDir()
|
|
64
|
+
|
|
65
|
+
# import files
|
|
66
|
+
mkFile_fs = job.fileStore.readGlobalFile(
|
|
67
|
+
mkFile[0], userPath=os.path.join(tempDir, mkFile[1])
|
|
68
|
+
)
|
|
69
|
+
B_file_fs = job.fileStore.readGlobalFile(
|
|
70
|
+
B_file[0], userPath=os.path.join(tempDir, B_file[1])
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# make a file (B.txt) and write the contents of 'B_file.txt' into it using 'mkFile.py'
|
|
74
|
+
with open(B_file_fs) as f:
|
|
75
|
+
file_contents = ""
|
|
76
|
+
for line in f:
|
|
77
|
+
file_contents = file_contents + line
|
|
78
|
+
|
|
79
|
+
cmd = python + " " + mkFile_fs + " " + "B.txt" + " " + file_contents
|
|
80
|
+
this_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
81
|
+
this_process.wait()
|
|
82
|
+
|
|
83
|
+
# get the output file and return it as a tuple of location + name
|
|
84
|
+
output_filename = "B.txt"
|
|
85
|
+
output_file = job.fileStore.writeGlobalFile(output_filename)
|
|
86
|
+
B1 = (output_file, output_filename)
|
|
87
|
+
rvDict = {"B1": B1}
|
|
88
|
+
return rvDict
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def writeC(job):
|
|
92
|
+
"""Creates/writes a file, C.txt, containing the string 'C'."""
|
|
93
|
+
job.fileStore.log_to_leader("""writeC""")
|
|
94
|
+
|
|
95
|
+
# temp folder for the run
|
|
96
|
+
tempDir = job.fileStore.getLocalTempDir()
|
|
97
|
+
|
|
98
|
+
# get the output file and return it as a tuple of location + name
|
|
99
|
+
output_filename = os.path.join(tempDir, "C.txt")
|
|
100
|
+
with open(output_filename, "w") as f:
|
|
101
|
+
f.write("C")
|
|
102
|
+
output_file = job.fileStore.writeGlobalFile(output_filename)
|
|
103
|
+
C1 = (output_file, "C.txt")
|
|
104
|
+
rvDict = {"C1": C1}
|
|
105
|
+
return rvDict
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def writeABC(job, A_dict, B_dict, C_dict, filepath: str) -> None:
|
|
109
|
+
"""Takes 3 files (specified as dictionaries) and writes their contents to ABC.txt."""
|
|
110
|
+
job.fileStore.log_to_leader("""writeABC""")
|
|
111
|
+
|
|
112
|
+
# temp folder for the run
|
|
113
|
+
tempDir = job.fileStore.getLocalTempDir()
|
|
114
|
+
|
|
115
|
+
# import files
|
|
116
|
+
A_fs = job.fileStore.readGlobalFile(
|
|
117
|
+
A_dict["A1"][0], userPath=os.path.join(tempDir, A_dict["A1"][1])
|
|
118
|
+
)
|
|
119
|
+
B_fs = job.fileStore.readGlobalFile(
|
|
120
|
+
B_dict["B1"][0], userPath=os.path.join(tempDir, B_dict["B1"][1])
|
|
121
|
+
)
|
|
122
|
+
C_fs = job.fileStore.readGlobalFile(
|
|
123
|
+
C_dict["C1"][0], userPath=os.path.join(tempDir, C_dict["C1"][1])
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
file_contents = ""
|
|
127
|
+
with open(A_fs) as f:
|
|
128
|
+
for line in f:
|
|
129
|
+
file_contents = file_contents + line
|
|
130
|
+
|
|
131
|
+
with open(B_fs) as f:
|
|
132
|
+
for line in f:
|
|
133
|
+
file_contents = file_contents + line
|
|
134
|
+
|
|
135
|
+
with open(C_fs) as f:
|
|
136
|
+
for line in f:
|
|
137
|
+
file_contents = file_contents + line
|
|
138
|
+
|
|
139
|
+
with open(os.path.join(tempDir, "ABC.txt"), "w") as f:
|
|
140
|
+
f.write(file_contents)
|
|
141
|
+
|
|
142
|
+
# get the output file and return it as a tuple of location + name
|
|
143
|
+
output_filename = os.path.join(tempDir, "ABC.txt")
|
|
144
|
+
output_file = job.fileStore.writeGlobalFile(output_filename)
|
|
145
|
+
job.fileStore.export_file(output_file, "file://" + filepath)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def finalize_jobs(job, num):
|
|
149
|
+
"""Does nothing but should be recorded in stats, status, and printDot()."""
|
|
150
|
+
job.fileStore.log_to_leader("""finalize_jobs""")
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def broken_job(job, num):
|
|
154
|
+
"""A job that will always fail. To be used for a tutorial."""
|
|
155
|
+
job.fileStore.log_to_leader("""broken_job""")
|
|
156
|
+
file = toil.importFile(None)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
if __name__ == "__main__":
|
|
160
|
+
jobStorePath = sys.argv[1] if len(sys.argv) > 1 else mkdtemp("debugWorkflow")
|
|
161
|
+
options = Job.Runner.getDefaultOptions(jobStorePath)
|
|
162
|
+
options.clean = "never"
|
|
163
|
+
options.stats = True
|
|
164
|
+
options.logLevel = "INFO"
|
|
165
|
+
with Toil(options) as toil:
|
|
166
|
+
|
|
167
|
+
with get_data("test/utils/ABCWorkflowDebug/B_file.txt") as B_file_path:
|
|
168
|
+
B_file0 = toil.importFile(B_file_path.as_uri())
|
|
169
|
+
B_file0_preserveThisFilename = "B_file.txt"
|
|
170
|
+
B_file = (B_file0, B_file0_preserveThisFilename)
|
|
171
|
+
|
|
172
|
+
with get_data("test/utils/ABCWorkflowDebug/mkFile.py") as mkFile_path:
|
|
173
|
+
file_maker0 = toil.importFile(mkFile_path.as_uri())
|
|
174
|
+
file_maker0_preserveThisFilename = "mkFile.py"
|
|
175
|
+
file_maker = (file_maker0, file_maker0_preserveThisFilename)
|
|
176
|
+
|
|
177
|
+
job0 = Job.wrapJobFn(initialize_jobs)
|
|
178
|
+
job1 = Job.wrapJobFn(writeA, file_maker)
|
|
179
|
+
job2 = Job.wrapJobFn(writeB, file_maker, B_file)
|
|
180
|
+
job3 = Job.wrapJobFn(writeC)
|
|
181
|
+
with get_data("test/utils/ABCWorkflowDebug/ABC.txt") as filepath:
|
|
182
|
+
job4 = Job.wrapJobFn(
|
|
183
|
+
writeABC, job1.rv(), job2.rv(), job3.rv(), str(filepath)
|
|
184
|
+
)
|
|
185
|
+
job5 = Job.wrapJobFn(finalize_jobs, 1)
|
|
186
|
+
job6 = Job.wrapJobFn(finalize_jobs, 2)
|
|
187
|
+
job7 = Job.wrapJobFn(finalize_jobs, 3)
|
|
188
|
+
job8 = Job.wrapJobFn(finalize_jobs, 4)
|
|
189
|
+
|
|
190
|
+
# write files 'A.txt', 'B.txt', and 'C.txt'
|
|
191
|
+
job0.addChild(job1)
|
|
192
|
+
job1.addChild(job2)
|
|
193
|
+
job2.addChild(job3)
|
|
194
|
+
|
|
195
|
+
# finally use 'A.txt', 'B.txt', and 'C.txt' to write ABC.txt
|
|
196
|
+
job0.addFollowOn(job4)
|
|
197
|
+
|
|
198
|
+
# these jobs do nothing, but should display in status
|
|
199
|
+
job4.addChild(job5)
|
|
200
|
+
job4.addChild(job6)
|
|
201
|
+
job4.addChild(job7)
|
|
202
|
+
job4.addChild(job8)
|
|
203
|
+
|
|
204
|
+
toil.start(job0)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from configargparse import ArgumentParser
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def main():
|
|
5
|
+
parser = ArgumentParser(description="Creates a file and writes into it.")
|
|
6
|
+
parser.add_argument("file_name", help="File name to be written to.")
|
|
7
|
+
parser.add_argument("contents", help="A string to be written into the file.")
|
|
8
|
+
|
|
9
|
+
args, unknown_args = parser.parse_known_args()
|
|
10
|
+
|
|
11
|
+
with open(args.file_name, "w") as f:
|
|
12
|
+
f.write(args.contents)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
if __name__ == "__main__":
|
|
16
|
+
main()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
message: 100
|
toil/test/utils/toilDebugTest.py
CHANGED
|
@@ -13,30 +13,34 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
import logging
|
|
15
15
|
import os
|
|
16
|
+
from pathlib import Path
|
|
16
17
|
import subprocess
|
|
17
|
-
import tempfile
|
|
18
18
|
|
|
19
19
|
from toil.lib.resources import glob
|
|
20
|
-
from toil.test import
|
|
20
|
+
from toil.test import get_data, pneeds_wdl as needs_wdl, pslow as slow
|
|
21
21
|
from toil.version import python
|
|
22
22
|
|
|
23
|
+
import pytest
|
|
24
|
+
|
|
23
25
|
logger = logging.getLogger(__name__)
|
|
24
26
|
|
|
25
27
|
|
|
26
|
-
def workflow_debug_jobstore() ->
|
|
27
|
-
job_store_path =
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
28
|
+
def workflow_debug_jobstore(tmp_path: Path) -> Path:
|
|
29
|
+
job_store_path = tmp_path / "toilWorkflowRun"
|
|
30
|
+
with get_data("test/utils/ABCWorkflowDebug/debugWorkflow.py") as debugWorkflow_py:
|
|
31
|
+
subprocess.check_call(
|
|
32
|
+
[
|
|
33
|
+
python,
|
|
34
|
+
str(debugWorkflow_py),
|
|
35
|
+
str(job_store_path),
|
|
36
|
+
]
|
|
37
|
+
)
|
|
35
38
|
return job_store_path
|
|
36
39
|
|
|
37
40
|
|
|
38
41
|
@slow
|
|
39
|
-
|
|
42
|
+
@pytest.mark.slow
|
|
43
|
+
def testJobStoreContents(tmp_path: Path) -> None:
|
|
40
44
|
"""
|
|
41
45
|
Test toilDebugFile.printContentsOfJobStore().
|
|
42
46
|
|
|
@@ -46,19 +50,22 @@ def testJobStoreContents():
|
|
|
46
50
|
"""
|
|
47
51
|
contents = ["A.txt", "B.txt", "C.txt", "ABC.txt", "mkFile.py"]
|
|
48
52
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
53
|
+
original_path = os.getcwd()
|
|
54
|
+
os.chdir(tmp_path)
|
|
55
|
+
with get_data("utils/toilDebugFile.py") as toilDebugFile:
|
|
56
|
+
subprocess.check_call(
|
|
57
|
+
[
|
|
58
|
+
python,
|
|
59
|
+
str(toilDebugFile),
|
|
60
|
+
str(workflow_debug_jobstore(tmp_path)),
|
|
61
|
+
"--logDebug",
|
|
62
|
+
"--listFilesInJobStore=True",
|
|
63
|
+
]
|
|
64
|
+
)
|
|
65
|
+
jobstoreFileContents = tmp_path / "jobstore_files.txt"
|
|
59
66
|
files = []
|
|
60
67
|
match = 0
|
|
61
|
-
with open(
|
|
68
|
+
with jobstoreFileContents.open() as f:
|
|
62
69
|
for line in f:
|
|
63
70
|
files.append(line.strip())
|
|
64
71
|
for xfile in files:
|
|
@@ -70,10 +77,10 @@ def testJobStoreContents():
|
|
|
70
77
|
logger.debug(match)
|
|
71
78
|
# C.txt will match twice (once with 'C.txt', and once with 'ABC.txt')
|
|
72
79
|
assert match == 6
|
|
73
|
-
os.
|
|
80
|
+
os.chdir(original_path)
|
|
74
81
|
|
|
75
82
|
|
|
76
|
-
def fetchFiles(symLink: bool, jobStoreDir:
|
|
83
|
+
def fetchFiles(symLink: bool, jobStoreDir: Path, outputDir: Path) -> None:
|
|
77
84
|
"""
|
|
78
85
|
Fn for testFetchJobStoreFiles() and testFetchJobStoreFilesWSymlinks().
|
|
79
86
|
|
|
@@ -84,21 +91,22 @@ def fetchFiles(symLink: bool, jobStoreDir: str, outputDir: str):
|
|
|
84
91
|
then delete them.
|
|
85
92
|
"""
|
|
86
93
|
contents = ["A.txt", "B.txt", "C.txt", "ABC.txt", "mkFile.py"]
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
94
|
+
with get_data("utils/toilDebugFile.py") as toilDebugFile:
|
|
95
|
+
cmd = [
|
|
96
|
+
python,
|
|
97
|
+
str(toilDebugFile),
|
|
98
|
+
str(jobStoreDir),
|
|
99
|
+
"--fetch",
|
|
100
|
+
"*A.txt",
|
|
101
|
+
"*B.txt",
|
|
102
|
+
"*C.txt",
|
|
103
|
+
"*ABC.txt",
|
|
104
|
+
"*mkFile.py",
|
|
105
|
+
f"--localFilePath={outputDir}",
|
|
106
|
+
f"--useSymlinks={symLink}",
|
|
107
|
+
]
|
|
108
|
+
print(cmd)
|
|
109
|
+
subprocess.check_call(cmd)
|
|
102
110
|
for xfile in contents:
|
|
103
111
|
matchingFilesFound = glob(glob_pattern="*" + xfile, directoryname=outputDir)
|
|
104
112
|
assert len(matchingFilesFound) >= 1
|
|
@@ -109,42 +117,45 @@ def fetchFiles(symLink: bool, jobStoreDir: str, outputDir: str):
|
|
|
109
117
|
|
|
110
118
|
|
|
111
119
|
# expected run time = 4s
|
|
112
|
-
def testFetchJobStoreFiles() -> None:
|
|
120
|
+
def testFetchJobStoreFiles(tmp_path: Path) -> None:
|
|
113
121
|
"""Test toilDebugFile.fetchJobStoreFiles() symlinks."""
|
|
114
|
-
job_store_dir = workflow_debug_jobstore()
|
|
115
|
-
output_dir =
|
|
116
|
-
|
|
122
|
+
job_store_dir = workflow_debug_jobstore(tmp_path)
|
|
123
|
+
output_dir = tmp_path / "testoutput"
|
|
124
|
+
output_dir.mkdir()
|
|
117
125
|
for symlink in (True, False):
|
|
118
126
|
fetchFiles(symLink=symlink, jobStoreDir=job_store_dir, outputDir=output_dir)
|
|
119
127
|
|
|
120
128
|
|
|
121
|
-
class
|
|
129
|
+
class TestDebugJob:
|
|
122
130
|
"""
|
|
123
131
|
Test the toil debug-job command.
|
|
124
132
|
"""
|
|
125
133
|
|
|
126
|
-
def _get_job_store_and_job_id(self):
|
|
134
|
+
def _get_job_store_and_job_id(self, tmp_path: Path) -> tuple[Path, str]:
|
|
127
135
|
"""
|
|
128
136
|
Get a job store and the ID of a failing job within it.
|
|
129
137
|
"""
|
|
130
138
|
|
|
131
139
|
# First make a job store.
|
|
132
|
-
job_store =
|
|
140
|
+
job_store = tmp_path / "tree"
|
|
133
141
|
|
|
134
142
|
logger.info("Running workflow that always fails")
|
|
135
143
|
try:
|
|
136
144
|
# Run an always-failing workflow
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
145
|
+
with get_data(
|
|
146
|
+
"test/docs/scripts/example_alwaysfail.py"
|
|
147
|
+
) as example_alwaysfail_py:
|
|
148
|
+
subprocess.check_call(
|
|
149
|
+
[
|
|
150
|
+
python,
|
|
151
|
+
str(example_alwaysfail_py),
|
|
152
|
+
"--retryCount=0",
|
|
153
|
+
"--logCritical",
|
|
154
|
+
"--disableProgress",
|
|
155
|
+
str(job_store),
|
|
156
|
+
],
|
|
157
|
+
stderr=subprocess.DEVNULL,
|
|
158
|
+
)
|
|
148
159
|
raise RuntimeError("Failing workflow succeeded!")
|
|
149
160
|
except subprocess.CalledProcessError:
|
|
150
161
|
# Should fail to run
|
|
@@ -152,40 +163,38 @@ class DebugJobTest(ToilTest):
|
|
|
152
163
|
|
|
153
164
|
# Get the job ID.
|
|
154
165
|
# TODO: This assumes a lot about the FileJobStore. Use the MessageBus instead?
|
|
155
|
-
job_id = (
|
|
156
|
-
"kind-explode/"
|
|
157
|
-
+ os.listdir(os.path.join(job_store, "jobs/kind-explode"))[0]
|
|
158
|
-
)
|
|
166
|
+
job_id = "kind-explode/" + os.listdir(job_store / "jobs/kind-explode")[0]
|
|
159
167
|
|
|
160
168
|
return job_store, job_id
|
|
161
169
|
|
|
162
|
-
def _get_wdl_job_store_and_job_name(self):
|
|
170
|
+
def _get_wdl_job_store_and_job_name(self, tmp_path: Path) -> tuple[Path, str]:
|
|
163
171
|
"""
|
|
164
172
|
Get a job store and the name of a failed job in it that actually wanted to use some files.
|
|
165
173
|
"""
|
|
166
174
|
|
|
167
175
|
# First make a job store.
|
|
168
|
-
job_store =
|
|
176
|
+
job_store = tmp_path / "tree"
|
|
169
177
|
|
|
170
178
|
logger.info("Running workflow that always fails")
|
|
171
179
|
# Run an always-failing workflow
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
180
|
+
with get_data(
|
|
181
|
+
"test/docs/scripts/example_alwaysfail_with_files.wdl"
|
|
182
|
+
) as wdl_file:
|
|
183
|
+
wf_result = subprocess.run(
|
|
184
|
+
[
|
|
185
|
+
"toil-wdl-runner",
|
|
186
|
+
str(wdl_file),
|
|
187
|
+
"--retryCount=0",
|
|
188
|
+
"--logDebug",
|
|
189
|
+
"--disableProgress",
|
|
190
|
+
"--jobStore",
|
|
191
|
+
str(job_store),
|
|
192
|
+
],
|
|
193
|
+
stdout=subprocess.PIPE,
|
|
194
|
+
stderr=subprocess.STDOUT,
|
|
195
|
+
encoding="utf-8",
|
|
196
|
+
errors="replace",
|
|
197
|
+
)
|
|
189
198
|
logger.debug("Always-failing workflow output: %s", wf_result.stdout)
|
|
190
199
|
if wf_result.returncode == 0:
|
|
191
200
|
raise RuntimeError("Failing workflow succeeded!")
|
|
@@ -195,58 +204,66 @@ class DebugJobTest(ToilTest):
|
|
|
195
204
|
# Make sure that the job store we created actually has its job store
|
|
196
205
|
# root job ID file. If it doesn't, we failed during workflow setup and
|
|
197
206
|
# not because of a real failing job.
|
|
198
|
-
assert
|
|
199
|
-
|
|
200
|
-
), "Failed workflow still needs a root job"
|
|
207
|
+
assert (
|
|
208
|
+
job_store / "files/shared/rootJobStoreID"
|
|
209
|
+
).exists(), "Failed workflow still needs a root job"
|
|
201
210
|
|
|
202
211
|
# Get a job name for a job that fails
|
|
203
212
|
job_name = "WDLTaskJob"
|
|
204
213
|
|
|
205
214
|
return job_store, job_name
|
|
206
215
|
|
|
207
|
-
def test_run_job(self):
|
|
216
|
+
def test_run_job(self, tmp_path: Path) -> None:
|
|
208
217
|
"""
|
|
209
218
|
Make sure that we can use toil debug-job to try and run a job in-process.
|
|
210
219
|
"""
|
|
211
220
|
|
|
212
|
-
job_store, job_id = self._get_job_store_and_job_id()
|
|
221
|
+
job_store, job_id = self._get_job_store_and_job_id(tmp_path)
|
|
213
222
|
|
|
214
223
|
logger.info("Trying to rerun job %s", job_id)
|
|
215
224
|
|
|
216
225
|
# Rerun the job, which should fail again
|
|
217
226
|
output = subprocess.check_output(
|
|
218
|
-
["toil", "debug-job", "--logDebug", job_store, job_id],
|
|
227
|
+
["toil", "debug-job", "--logDebug", str(job_store), job_id],
|
|
219
228
|
stderr=subprocess.STDOUT,
|
|
220
229
|
)
|
|
221
230
|
# Even if the job fails, the attempt to run it will succeed.
|
|
222
231
|
log = output.decode("utf-8")
|
|
223
232
|
assert "Boom!" in log, f"Did not find the expected exception message in: {log}"
|
|
224
233
|
|
|
225
|
-
def test_print_job_info(self):
|
|
234
|
+
def test_print_job_info(self, tmp_path: Path) -> None:
|
|
226
235
|
"""
|
|
227
236
|
Make sure that we can use --printJobInfo to get information on a job from a job store.
|
|
228
237
|
"""
|
|
229
238
|
|
|
230
|
-
job_store, job_id = self._get_job_store_and_job_id()
|
|
239
|
+
job_store, job_id = self._get_job_store_and_job_id(tmp_path)
|
|
231
240
|
|
|
232
241
|
logger.info("Trying to print job info for job %s", job_id)
|
|
233
242
|
|
|
234
243
|
# Print the job info and make sure that doesn't crash.
|
|
235
244
|
subprocess.check_call(
|
|
236
|
-
[
|
|
245
|
+
[
|
|
246
|
+
"toil",
|
|
247
|
+
"debug-job",
|
|
248
|
+
"--logDebug",
|
|
249
|
+
str(job_store),
|
|
250
|
+
"--printJobInfo",
|
|
251
|
+
job_id,
|
|
252
|
+
]
|
|
237
253
|
)
|
|
238
254
|
|
|
239
255
|
@needs_wdl
|
|
240
|
-
|
|
256
|
+
@pytest.mark.wdl
|
|
257
|
+
def test_retrieve_task_directory(self, tmp_path: Path) -> None:
|
|
241
258
|
"""
|
|
242
259
|
Make sure that we can use --retrieveTaskDirectory to get the input files for a job.
|
|
243
260
|
"""
|
|
244
261
|
|
|
245
|
-
job_store, job_name = self._get_wdl_job_store_and_job_name()
|
|
262
|
+
job_store, job_name = self._get_wdl_job_store_and_job_name(tmp_path)
|
|
246
263
|
|
|
247
264
|
logger.info("Trying to retrieve task dorectory for job %s", job_name)
|
|
248
265
|
|
|
249
|
-
dest_dir =
|
|
266
|
+
dest_dir = tmp_path / "dump"
|
|
250
267
|
|
|
251
268
|
# Print the job info and make sure that doesn't crash.
|
|
252
269
|
subprocess.check_call(
|
|
@@ -254,18 +271,16 @@ class DebugJobTest(ToilTest):
|
|
|
254
271
|
"toil",
|
|
255
272
|
"debug-job",
|
|
256
273
|
"--logDebug",
|
|
257
|
-
job_store,
|
|
274
|
+
str(job_store),
|
|
258
275
|
job_name,
|
|
259
276
|
"--retrieveTaskDirectory",
|
|
260
|
-
dest_dir,
|
|
277
|
+
str(dest_dir),
|
|
261
278
|
]
|
|
262
279
|
)
|
|
263
280
|
|
|
264
|
-
first_file =
|
|
265
|
-
dest_dir
|
|
266
|
-
"inside/mnt/miniwdl_task_container/work/_miniwdl_inputs/0/test.txt"
|
|
281
|
+
first_file = (
|
|
282
|
+
dest_dir
|
|
283
|
+
/ "inside/mnt/miniwdl_task_container/work/_miniwdl_inputs/0/test.txt"
|
|
267
284
|
)
|
|
268
|
-
assert
|
|
269
|
-
|
|
270
|
-
), "Input file not found in fake container environment"
|
|
271
|
-
self.assertEqual(open(first_file).read(), "These are the contents\n")
|
|
285
|
+
assert first_file.exists(), "Input file not found in fake container environment"
|
|
286
|
+
assert first_file.read_text() == "These are the contents\n"
|