toil 9.1.1__py3-none-any.whl → 9.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toil/__init__.py +5 -9
- toil/batchSystems/abstractBatchSystem.py +23 -22
- toil/batchSystems/abstractGridEngineBatchSystem.py +17 -12
- toil/batchSystems/awsBatch.py +8 -8
- toil/batchSystems/cleanup_support.py +4 -4
- toil/batchSystems/contained_executor.py +3 -3
- toil/batchSystems/gridengine.py +3 -4
- toil/batchSystems/htcondor.py +5 -5
- toil/batchSystems/kubernetes.py +65 -63
- toil/batchSystems/local_support.py +2 -3
- toil/batchSystems/lsf.py +6 -7
- toil/batchSystems/mesos/batchSystem.py +11 -7
- toil/batchSystems/mesos/test/__init__.py +1 -2
- toil/batchSystems/options.py +9 -10
- toil/batchSystems/registry.py +3 -7
- toil/batchSystems/singleMachine.py +8 -11
- toil/batchSystems/slurm.py +49 -38
- toil/batchSystems/torque.py +3 -4
- toil/bus.py +36 -34
- toil/common.py +129 -89
- toil/cwl/cwltoil.py +857 -729
- toil/cwl/utils.py +44 -35
- toil/fileStores/__init__.py +3 -1
- toil/fileStores/abstractFileStore.py +28 -30
- toil/fileStores/cachingFileStore.py +8 -8
- toil/fileStores/nonCachingFileStore.py +10 -21
- toil/job.py +159 -158
- toil/jobStores/abstractJobStore.py +68 -69
- toil/jobStores/aws/jobStore.py +249 -213
- toil/jobStores/aws/utils.py +13 -24
- toil/jobStores/fileJobStore.py +28 -22
- toil/jobStores/googleJobStore.py +21 -17
- toil/jobStores/utils.py +3 -7
- toil/leader.py +17 -22
- toil/lib/accelerators.py +6 -4
- toil/lib/aws/__init__.py +9 -10
- toil/lib/aws/ami.py +33 -19
- toil/lib/aws/iam.py +6 -6
- toil/lib/aws/s3.py +259 -157
- toil/lib/aws/session.py +76 -76
- toil/lib/aws/utils.py +51 -43
- toil/lib/checksum.py +19 -15
- toil/lib/compatibility.py +3 -2
- toil/lib/conversions.py +45 -18
- toil/lib/directory.py +29 -26
- toil/lib/docker.py +93 -99
- toil/lib/dockstore.py +77 -50
- toil/lib/ec2.py +39 -38
- toil/lib/ec2nodes.py +11 -4
- toil/lib/exceptions.py +8 -5
- toil/lib/ftp_utils.py +9 -14
- toil/lib/generatedEC2Lists.py +161 -20
- toil/lib/history.py +141 -97
- toil/lib/history_submission.py +163 -72
- toil/lib/io.py +27 -17
- toil/lib/memoize.py +2 -1
- toil/lib/misc.py +15 -11
- toil/lib/pipes.py +40 -25
- toil/lib/plugins.py +12 -8
- toil/lib/resources.py +1 -0
- toil/lib/retry.py +32 -38
- toil/lib/threading.py +12 -12
- toil/lib/throttle.py +1 -2
- toil/lib/trs.py +113 -51
- toil/lib/url.py +14 -23
- toil/lib/web.py +7 -2
- toil/options/common.py +18 -15
- toil/options/cwl.py +2 -2
- toil/options/runner.py +9 -5
- toil/options/wdl.py +1 -3
- toil/provisioners/__init__.py +9 -9
- toil/provisioners/abstractProvisioner.py +22 -20
- toil/provisioners/aws/__init__.py +20 -14
- toil/provisioners/aws/awsProvisioner.py +10 -8
- toil/provisioners/clusterScaler.py +19 -18
- toil/provisioners/gceProvisioner.py +2 -3
- toil/provisioners/node.py +11 -13
- toil/realtimeLogger.py +4 -4
- toil/resource.py +5 -5
- toil/server/app.py +2 -2
- toil/server/cli/wes_cwl_runner.py +11 -11
- toil/server/utils.py +18 -21
- toil/server/wes/abstract_backend.py +9 -8
- toil/server/wes/amazon_wes_utils.py +3 -3
- toil/server/wes/tasks.py +3 -5
- toil/server/wes/toil_backend.py +17 -21
- toil/server/wsgi_app.py +3 -3
- toil/serviceManager.py +3 -4
- toil/statsAndLogging.py +12 -13
- toil/test/__init__.py +33 -24
- toil/test/batchSystems/batchSystemTest.py +12 -11
- toil/test/batchSystems/batch_system_plugin_test.py +3 -5
- toil/test/batchSystems/test_slurm.py +38 -24
- toil/test/cwl/conftest.py +5 -6
- toil/test/cwl/cwlTest.py +194 -78
- toil/test/cwl/download_file_uri.json +6 -0
- toil/test/cwl/download_file_uri_no_hostname.json +6 -0
- toil/test/docs/scripts/tutorial_staging.py +1 -0
- toil/test/jobStores/jobStoreTest.py +9 -7
- toil/test/lib/aws/test_iam.py +1 -3
- toil/test/lib/aws/test_s3.py +1 -1
- toil/test/lib/dockerTest.py +9 -9
- toil/test/lib/test_ec2.py +12 -11
- toil/test/lib/test_history.py +4 -4
- toil/test/lib/test_trs.py +16 -14
- toil/test/lib/test_url.py +7 -6
- toil/test/lib/url_plugin_test.py +12 -18
- toil/test/provisioners/aws/awsProvisionerTest.py +10 -8
- toil/test/provisioners/clusterScalerTest.py +2 -5
- toil/test/provisioners/clusterTest.py +1 -3
- toil/test/server/serverTest.py +13 -4
- toil/test/sort/restart_sort.py +2 -6
- toil/test/sort/sort.py +3 -8
- toil/test/src/deferredFunctionTest.py +7 -7
- toil/test/src/environmentTest.py +1 -2
- toil/test/src/fileStoreTest.py +5 -5
- toil/test/src/importExportFileTest.py +5 -6
- toil/test/src/jobServiceTest.py +22 -14
- toil/test/src/jobTest.py +121 -25
- toil/test/src/miscTests.py +5 -7
- toil/test/src/promisedRequirementTest.py +8 -7
- toil/test/src/regularLogTest.py +2 -3
- toil/test/src/resourceTest.py +5 -8
- toil/test/src/restartDAGTest.py +5 -6
- toil/test/src/resumabilityTest.py +2 -2
- toil/test/src/retainTempDirTest.py +3 -3
- toil/test/src/systemTest.py +3 -3
- toil/test/src/threadingTest.py +1 -1
- toil/test/src/workerTest.py +1 -2
- toil/test/utils/toilDebugTest.py +6 -4
- toil/test/utils/toilKillTest.py +1 -1
- toil/test/utils/utilsTest.py +15 -14
- toil/test/wdl/wdltoil_test.py +247 -124
- toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
- toil/toilState.py +2 -3
- toil/utils/toilDebugFile.py +3 -8
- toil/utils/toilDebugJob.py +1 -2
- toil/utils/toilLaunchCluster.py +1 -2
- toil/utils/toilSshCluster.py +2 -0
- toil/utils/toilStats.py +19 -24
- toil/utils/toilStatus.py +11 -14
- toil/version.py +10 -10
- toil/wdl/wdltoil.py +313 -209
- toil/worker.py +18 -12
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/METADATA +11 -14
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/RECORD +150 -153
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/WHEEL +1 -1
- toil/test/cwl/staging_cat.cwl +0 -27
- toil/test/cwl/staging_make_file.cwl +0 -25
- toil/test/cwl/staging_workflow.cwl +0 -43
- toil/test/cwl/zero_default.cwl +0 -61
- toil/test/utils/ABCWorkflowDebug/ABC.txt +0 -1
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/entry_points.txt +0 -0
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/licenses/LICENSE +0 -0
- {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/top_level.txt +0 -0
toil/provisioners/node.py
CHANGED
|
@@ -18,7 +18,7 @@ import subprocess
|
|
|
18
18
|
import time
|
|
19
19
|
from itertools import count
|
|
20
20
|
from shlex import quote
|
|
21
|
-
from typing import Any
|
|
21
|
+
from typing import Any
|
|
22
22
|
|
|
23
23
|
from toil.lib.memoize import parse_iso_utc
|
|
24
24
|
|
|
@@ -35,11 +35,11 @@ class Node:
|
|
|
35
35
|
publicIP: str,
|
|
36
36
|
privateIP: str,
|
|
37
37
|
name: str,
|
|
38
|
-
launchTime:
|
|
39
|
-
nodeType:
|
|
38
|
+
launchTime: datetime.datetime | str,
|
|
39
|
+
nodeType: str | None,
|
|
40
40
|
preemptible: bool,
|
|
41
|
-
tags:
|
|
42
|
-
use_private_ip:
|
|
41
|
+
tags: dict[str, str] | None = None,
|
|
42
|
+
use_private_ip: bool | None = None,
|
|
43
43
|
) -> None:
|
|
44
44
|
"""
|
|
45
45
|
Create a new node.
|
|
@@ -65,9 +65,9 @@ class Node:
|
|
|
65
65
|
self.name = name
|
|
66
66
|
# Typing should prevent an empty launch time, but just to make sure,
|
|
67
67
|
# check it at runtime.
|
|
68
|
-
assert
|
|
69
|
-
|
|
70
|
-
)
|
|
68
|
+
assert (
|
|
69
|
+
launchTime is not None
|
|
70
|
+
), f"Attempted to create a Node {name} without a launch time"
|
|
71
71
|
if isinstance(launchTime, datetime.datetime):
|
|
72
72
|
self.launchTime = launchTime
|
|
73
73
|
else:
|
|
@@ -79,9 +79,7 @@ class Node:
|
|
|
79
79
|
self.launchTime = datetime.datetime.fromisoformat(launchTime)
|
|
80
80
|
if self.launchTime.tzinfo is None:
|
|
81
81
|
# Read naive datatimes as in UTC
|
|
82
|
-
self.launchTime = self.launchTime.replace(
|
|
83
|
-
tzinfo=datetime.timezone.utc
|
|
84
|
-
)
|
|
82
|
+
self.launchTime = self.launchTime.replace(tzinfo=datetime.timezone.utc)
|
|
85
83
|
self.nodeType = nodeType
|
|
86
84
|
self.preemptible = preemptible
|
|
87
85
|
self.tags = tags
|
|
@@ -123,13 +121,13 @@ class Node:
|
|
|
123
121
|
True
|
|
124
122
|
|
|
125
123
|
>>> node = Node("127.0.0.1", "127.0.0.1", "localhost",
|
|
126
|
-
... datetime.datetime.now(datetime.timezone.utc) -
|
|
124
|
+
... datetime.datetime.now(datetime.timezone.utc) -
|
|
127
125
|
... datetime.timedelta(minutes=5), None, False)
|
|
128
126
|
>>> node.remainingBillingInterval() < 0.99
|
|
129
127
|
True
|
|
130
128
|
>>> node.remainingBillingInterval() > 0.9
|
|
131
129
|
True
|
|
132
|
-
|
|
130
|
+
|
|
133
131
|
"""
|
|
134
132
|
now = datetime.datetime.now(datetime.timezone.utc)
|
|
135
133
|
delta = now - self.launchTime
|
toil/realtimeLogger.py
CHANGED
|
@@ -20,7 +20,7 @@ import os.path
|
|
|
20
20
|
import socketserver as SocketServer
|
|
21
21
|
import threading
|
|
22
22
|
from types import TracebackType
|
|
23
|
-
from typing import TYPE_CHECKING, Any
|
|
23
|
+
from typing import TYPE_CHECKING, Any
|
|
24
24
|
|
|
25
25
|
from toil.lib.misc import get_public_ip
|
|
26
26
|
from toil.statsAndLogging import set_log_level
|
|
@@ -246,8 +246,8 @@ class RealtimeLogger(metaclass=RealtimeLoggerMetaclass):
|
|
|
246
246
|
# noinspection PyUnusedLocal
|
|
247
247
|
def __exit__(
|
|
248
248
|
self,
|
|
249
|
-
exc_type:
|
|
250
|
-
exc_val:
|
|
251
|
-
exc_tb:
|
|
249
|
+
exc_type: type[BaseException] | None,
|
|
250
|
+
exc_val: BaseException | None,
|
|
251
|
+
exc_tb: TracebackType | None,
|
|
252
252
|
) -> None:
|
|
253
253
|
RealtimeLogger._stopLeader()
|
toil/resource.py
CHANGED
|
@@ -20,12 +20,12 @@ import os
|
|
|
20
20
|
import shutil
|
|
21
21
|
import sys
|
|
22
22
|
from collections import namedtuple
|
|
23
|
-
from collections.abc import Sequence
|
|
23
|
+
from collections.abc import Callable, Sequence
|
|
24
24
|
from contextlib import closing
|
|
25
25
|
from io import BytesIO
|
|
26
26
|
from pydoc import locate
|
|
27
27
|
from types import ModuleType
|
|
28
|
-
from typing import IO, TYPE_CHECKING, BinaryIO,
|
|
28
|
+
from typing import IO, TYPE_CHECKING, BinaryIO, Optional
|
|
29
29
|
from urllib.error import HTTPError
|
|
30
30
|
from urllib.request import urlopen
|
|
31
31
|
from zipfile import ZipFile
|
|
@@ -157,7 +157,7 @@ class Resource(namedtuple("Resource", ("name", "pathHash", "url", "contentHash")
|
|
|
157
157
|
|
|
158
158
|
return self
|
|
159
159
|
|
|
160
|
-
def download(self, callback:
|
|
160
|
+
def download(self, callback: Callable[[str], None] | None = None) -> None:
|
|
161
161
|
"""
|
|
162
162
|
Download this resource from its URL to a file on the local system.
|
|
163
163
|
|
|
@@ -625,7 +625,7 @@ class ModuleDescriptor(
|
|
|
625
625
|
return self.dirPath
|
|
626
626
|
|
|
627
627
|
@classmethod
|
|
628
|
-
def _initModuleName(cls, dirPath: str) ->
|
|
628
|
+
def _initModuleName(cls, dirPath: str) -> str | None:
|
|
629
629
|
for name in ("__init__.py", "__init__.pyc", "__init__.pyo"):
|
|
630
630
|
if os.path.exists(os.path.join(dirPath, name)):
|
|
631
631
|
return name
|
|
@@ -656,7 +656,7 @@ class ModuleDescriptor(
|
|
|
656
656
|
sys.path.append(module.dirPath)
|
|
657
657
|
return module
|
|
658
658
|
|
|
659
|
-
def load(self) ->
|
|
659
|
+
def load(self) -> ModuleType | None:
|
|
660
660
|
module = self.makeLoadable()
|
|
661
661
|
try:
|
|
662
662
|
return importlib.import_module(module.name)
|
toil/server/app.py
CHANGED
|
@@ -16,8 +16,8 @@ import logging
|
|
|
16
16
|
import os
|
|
17
17
|
|
|
18
18
|
import connexion # type: ignore
|
|
19
|
-
from connexion.options import SwaggerUIOptions # type: ignore[import-untyped]
|
|
20
19
|
from configargparse import ArgumentParser
|
|
20
|
+
from connexion.options import SwaggerUIOptions # type: ignore[import-untyped]
|
|
21
21
|
|
|
22
22
|
from toil.lib.aws import get_current_aws_region, running_on_ec2, running_on_ecs
|
|
23
23
|
from toil.server.wes.toil_backend import ToilBackend
|
|
@@ -210,6 +210,6 @@ def start_server(args: argparse.Namespace) -> None:
|
|
|
210
210
|
"workers": args.workers,
|
|
211
211
|
# The uvicorn worker class must be specified for gunicorn to work on connexion 3
|
|
212
212
|
# https://github.com/spec-first/connexion/issues/1755#issuecomment-1778522142
|
|
213
|
-
"worker_class": "uvicorn.workers.UvicornWorker"
|
|
213
|
+
"worker_class": "uvicorn.workers.UvicornWorker",
|
|
214
214
|
},
|
|
215
215
|
)
|
|
@@ -7,7 +7,7 @@ import time
|
|
|
7
7
|
from base64 import b64encode
|
|
8
8
|
from collections.abc import Iterable
|
|
9
9
|
from io import BytesIO
|
|
10
|
-
from typing import Any,
|
|
10
|
+
from typing import Any, cast
|
|
11
11
|
from urllib.parse import urldefrag, urljoin, urlparse
|
|
12
12
|
|
|
13
13
|
import ruamel.yaml
|
|
@@ -107,7 +107,7 @@ class WESClientWithWorkflowEngineParameters(WESClient): # type: ignore
|
|
|
107
107
|
TODO: Propose a PR in wes-service to include workflow_engine_params.
|
|
108
108
|
"""
|
|
109
109
|
|
|
110
|
-
def __init__(self, endpoint: str, auth:
|
|
110
|
+
def __init__(self, endpoint: str, auth: tuple[str, str] | None = None) -> None:
|
|
111
111
|
"""
|
|
112
112
|
:param endpoint: The http(s) URL of the WES server. Must include the
|
|
113
113
|
protocol.
|
|
@@ -212,9 +212,9 @@ class WESClientWithWorkflowEngineParameters(WESClient): # type: ignore
|
|
|
212
212
|
def build_wes_request(
|
|
213
213
|
self,
|
|
214
214
|
workflow_file: str,
|
|
215
|
-
workflow_params_file:
|
|
216
|
-
attachments:
|
|
217
|
-
workflow_engine_parameters:
|
|
215
|
+
workflow_params_file: str | None,
|
|
216
|
+
attachments: list[str] | None,
|
|
217
|
+
workflow_engine_parameters: list[str] | None = None,
|
|
218
218
|
) -> tuple[dict[str, str], Iterable[tuple[str, tuple[str, BytesIO]]]]:
|
|
219
219
|
"""
|
|
220
220
|
Build the workflow run request to submit to WES.
|
|
@@ -295,9 +295,9 @@ class WESClientWithWorkflowEngineParameters(WESClient): # type: ignore
|
|
|
295
295
|
def run_with_engine_options(
|
|
296
296
|
self,
|
|
297
297
|
workflow_file: str,
|
|
298
|
-
workflow_params_file:
|
|
299
|
-
attachments:
|
|
300
|
-
workflow_engine_parameters:
|
|
298
|
+
workflow_params_file: str | None,
|
|
299
|
+
attachments: list[str] | None,
|
|
300
|
+
workflow_engine_parameters: list[str] | None,
|
|
301
301
|
) -> dict[str, Any]:
|
|
302
302
|
"""
|
|
303
303
|
Composes and sends a post request that signals the WES server to run a
|
|
@@ -325,7 +325,7 @@ class WESClientWithWorkflowEngineParameters(WESClient): # type: ignore
|
|
|
325
325
|
return cast(dict[str, Any], wes_response(post_result))
|
|
326
326
|
|
|
327
327
|
|
|
328
|
-
def get_deps_from_cwltool(cwl_file: str, input_file:
|
|
328
|
+
def get_deps_from_cwltool(cwl_file: str, input_file: str | None = None) -> list[str]:
|
|
329
329
|
"""
|
|
330
330
|
Return a list of dependencies of the given workflow from cwltool.
|
|
331
331
|
|
|
@@ -387,8 +387,8 @@ def get_deps_from_cwltool(cwl_file: str, input_file: Optional[str] = None) -> li
|
|
|
387
387
|
def submit_run(
|
|
388
388
|
client: WESClientWithWorkflowEngineParameters,
|
|
389
389
|
cwl_file: str,
|
|
390
|
-
input_file:
|
|
391
|
-
engine_options:
|
|
390
|
+
input_file: str | None = None,
|
|
391
|
+
engine_options: list[str] | None = None,
|
|
392
392
|
) -> str:
|
|
393
393
|
"""
|
|
394
394
|
Given a CWL file, its input files, and an optional list of engine options,
|
toil/server/utils.py
CHANGED
|
@@ -16,7 +16,6 @@ import logging
|
|
|
16
16
|
import os
|
|
17
17
|
from abc import abstractmethod
|
|
18
18
|
from datetime import datetime
|
|
19
|
-
from typing import Optional
|
|
20
19
|
from urllib.parse import urlparse
|
|
21
20
|
|
|
22
21
|
from toil.lib.io import AtomicFileCreate
|
|
@@ -57,7 +56,7 @@ def link_file(src: str, dest: str) -> None:
|
|
|
57
56
|
|
|
58
57
|
|
|
59
58
|
def download_file_from_internet(
|
|
60
|
-
src: str, dest: str, content_type:
|
|
59
|
+
src: str, dest: str, content_type: str | None = None
|
|
61
60
|
) -> None:
|
|
62
61
|
"""
|
|
63
62
|
Download a file from the Internet and write it to dest.
|
|
@@ -77,9 +76,7 @@ def download_file_from_internet(
|
|
|
77
76
|
f.write(response.content)
|
|
78
77
|
|
|
79
78
|
|
|
80
|
-
def download_file_from_s3(
|
|
81
|
-
src: str, dest: str, content_type: Optional[str] = None
|
|
82
|
-
) -> None:
|
|
79
|
+
def download_file_from_s3(src: str, dest: str, content_type: str | None = None) -> None:
|
|
83
80
|
"""
|
|
84
81
|
Download a file from Amazon S3 and write it to dest.
|
|
85
82
|
"""
|
|
@@ -108,7 +105,7 @@ def get_file_class(path: str) -> str:
|
|
|
108
105
|
|
|
109
106
|
|
|
110
107
|
@retry(errors=[OSError, BlockingIOError])
|
|
111
|
-
def safe_read_file(file: str) ->
|
|
108
|
+
def safe_read_file(file: str) -> str | None:
|
|
112
109
|
"""
|
|
113
110
|
Safely read a file by acquiring a shared lock to prevent other processes
|
|
114
111
|
from writing to it while reading.
|
|
@@ -175,15 +172,15 @@ class MemoryStateCache:
|
|
|
175
172
|
"""
|
|
176
173
|
|
|
177
174
|
super().__init__()
|
|
178
|
-
self._data: dict[tuple[str, str],
|
|
175
|
+
self._data: dict[tuple[str, str], str | None] = {}
|
|
179
176
|
|
|
180
|
-
def get(self, workflow_id: str, key: str) ->
|
|
177
|
+
def get(self, workflow_id: str, key: str) -> str | None:
|
|
181
178
|
"""
|
|
182
179
|
Get a key value from memory.
|
|
183
180
|
"""
|
|
184
181
|
return self._data.get((workflow_id, key))
|
|
185
182
|
|
|
186
|
-
def set(self, workflow_id: str, key: str, value:
|
|
183
|
+
def set(self, workflow_id: str, key: str, value: str | None) -> None:
|
|
187
184
|
"""
|
|
188
185
|
Set or clear a key value in memory.
|
|
189
186
|
"""
|
|
@@ -234,7 +231,7 @@ class AbstractStateStore:
|
|
|
234
231
|
self._cache = MemoryStateCache()
|
|
235
232
|
|
|
236
233
|
@abstractmethod
|
|
237
|
-
def get(self, workflow_id: str, key: str) ->
|
|
234
|
+
def get(self, workflow_id: str, key: str) -> str | None:
|
|
238
235
|
"""
|
|
239
236
|
Get the value of the given key for the given workflow, or None if the
|
|
240
237
|
key is not set for the workflow.
|
|
@@ -242,21 +239,21 @@ class AbstractStateStore:
|
|
|
242
239
|
raise NotImplementedError
|
|
243
240
|
|
|
244
241
|
@abstractmethod
|
|
245
|
-
def set(self, workflow_id: str, key: str, value:
|
|
242
|
+
def set(self, workflow_id: str, key: str, value: str | None) -> None:
|
|
246
243
|
"""
|
|
247
244
|
Set the value of the given key for the given workflow. If the value is
|
|
248
245
|
None, clear the key.
|
|
249
246
|
"""
|
|
250
247
|
raise NotImplementedError
|
|
251
248
|
|
|
252
|
-
def read_cache(self, workflow_id: str, key: str) ->
|
|
249
|
+
def read_cache(self, workflow_id: str, key: str) -> str | None:
|
|
253
250
|
"""
|
|
254
251
|
Read a value from a local cache, without checking the actual backend.
|
|
255
252
|
"""
|
|
256
253
|
|
|
257
254
|
return self._cache.get(workflow_id, key)
|
|
258
255
|
|
|
259
|
-
def write_cache(self, workflow_id: str, key: str, value:
|
|
256
|
+
def write_cache(self, workflow_id: str, key: str, value: str | None) -> None:
|
|
260
257
|
"""
|
|
261
258
|
Write a value to a local cache, without modifying the actual backend.
|
|
262
259
|
"""
|
|
@@ -298,13 +295,13 @@ class FileStateStore(AbstractStateStore):
|
|
|
298
295
|
logger.debug("Connected to FileStateStore at %s", url)
|
|
299
296
|
self._base_dir = parse.path
|
|
300
297
|
|
|
301
|
-
def get(self, workflow_id: str, key: str) ->
|
|
298
|
+
def get(self, workflow_id: str, key: str) -> str | None:
|
|
302
299
|
"""
|
|
303
300
|
Get a key value from the filesystem.
|
|
304
301
|
"""
|
|
305
302
|
return safe_read_file(os.path.join(self._base_dir, workflow_id, key))
|
|
306
303
|
|
|
307
|
-
def set(self, workflow_id: str, key: str, value:
|
|
304
|
+
def set(self, workflow_id: str, key: str, value: str | None) -> None:
|
|
308
305
|
"""
|
|
309
306
|
Set or clear a key value on the filesystem.
|
|
310
307
|
"""
|
|
@@ -365,7 +362,7 @@ if HAVE_S3:
|
|
|
365
362
|
path = os.path.join(self._base_path, workflow_id, key)
|
|
366
363
|
return self._bucket, path
|
|
367
364
|
|
|
368
|
-
def get(self, workflow_id: str, key: str) ->
|
|
365
|
+
def get(self, workflow_id: str, key: str) -> str | None:
|
|
369
366
|
"""
|
|
370
367
|
Get a key value from S3.
|
|
371
368
|
"""
|
|
@@ -378,7 +375,7 @@ if HAVE_S3:
|
|
|
378
375
|
except self._client.exceptions.NoSuchKey:
|
|
379
376
|
return None
|
|
380
377
|
|
|
381
|
-
def set(self, workflow_id: str, key: str, value:
|
|
378
|
+
def set(self, workflow_id: str, key: str, value: str | None) -> None:
|
|
382
379
|
"""
|
|
383
380
|
Set or clear a key value on S3.
|
|
384
381
|
"""
|
|
@@ -451,26 +448,26 @@ class WorkflowStateStore:
|
|
|
451
448
|
self._state_store = state_store
|
|
452
449
|
self._workflow_id = workflow_id
|
|
453
450
|
|
|
454
|
-
def get(self, key: str) ->
|
|
451
|
+
def get(self, key: str) -> str | None:
|
|
455
452
|
"""
|
|
456
453
|
Get the given item of workflow state.
|
|
457
454
|
"""
|
|
458
455
|
return self._state_store.get(self._workflow_id, key)
|
|
459
456
|
|
|
460
|
-
def set(self, key: str, value:
|
|
457
|
+
def set(self, key: str, value: str | None) -> None:
|
|
461
458
|
"""
|
|
462
459
|
Set the given item of workflow state.
|
|
463
460
|
"""
|
|
464
461
|
self._state_store.set(self._workflow_id, key, value)
|
|
465
462
|
|
|
466
|
-
def read_cache(self, key: str) ->
|
|
463
|
+
def read_cache(self, key: str) -> str | None:
|
|
467
464
|
"""
|
|
468
465
|
Read a value from a local cache, without checking the actual backend.
|
|
469
466
|
"""
|
|
470
467
|
|
|
471
468
|
return self._state_store.read_cache(self._workflow_id, key)
|
|
472
469
|
|
|
473
|
-
def write_cache(self, key: str, value:
|
|
470
|
+
def write_cache(self, key: str, value: str | None) -> None:
|
|
474
471
|
"""
|
|
475
472
|
Write a value to a local cache, without modifying the actual backend.
|
|
476
473
|
"""
|
|
@@ -4,7 +4,8 @@ import json
|
|
|
4
4
|
import logging
|
|
5
5
|
import os
|
|
6
6
|
from abc import abstractmethod
|
|
7
|
-
from
|
|
7
|
+
from collections.abc import Callable
|
|
8
|
+
from typing import Any, Union
|
|
8
9
|
from urllib.parse import urldefrag
|
|
9
10
|
|
|
10
11
|
import connexion # type: ignore
|
|
@@ -27,8 +28,8 @@ class VersionNotImplementedException(Exception):
|
|
|
27
28
|
def __init__(
|
|
28
29
|
self,
|
|
29
30
|
wf_type: str,
|
|
30
|
-
version:
|
|
31
|
-
supported_versions:
|
|
31
|
+
version: str | None = None,
|
|
32
|
+
supported_versions: list[str] | None = None,
|
|
32
33
|
) -> None:
|
|
33
34
|
if version:
|
|
34
35
|
message = (
|
|
@@ -159,7 +160,7 @@ class WESBackend:
|
|
|
159
160
|
|
|
160
161
|
@abstractmethod
|
|
161
162
|
def list_runs(
|
|
162
|
-
self, page_size:
|
|
163
|
+
self, page_size: int | None = None, page_token: str | None = None
|
|
163
164
|
) -> dict[str, Any]:
|
|
164
165
|
"""
|
|
165
166
|
List the workflow runs.
|
|
@@ -207,7 +208,7 @@ class WESBackend:
|
|
|
207
208
|
raise NotImplementedError
|
|
208
209
|
|
|
209
210
|
@staticmethod
|
|
210
|
-
def log_for_run(run_id:
|
|
211
|
+
def log_for_run(run_id: str | None, message: str) -> None:
|
|
211
212
|
if run_id:
|
|
212
213
|
logging.info("Workflow %s: %s", run_id, message)
|
|
213
214
|
else:
|
|
@@ -224,7 +225,7 @@ class WESBackend:
|
|
|
224
225
|
)
|
|
225
226
|
|
|
226
227
|
def collect_attachments(
|
|
227
|
-
self, args: dict[str, Any], run_id:
|
|
228
|
+
self, args: dict[str, Any], run_id: str | None, temp_dir: str | None
|
|
228
229
|
) -> tuple[str, dict[str, Any]]:
|
|
229
230
|
"""
|
|
230
231
|
Collect attachments from the current request by staging uploaded files
|
|
@@ -240,7 +241,7 @@ class WESBackend:
|
|
|
240
241
|
has_attachments = False
|
|
241
242
|
for k, v in args.items():
|
|
242
243
|
if k == "workflow_attachment":
|
|
243
|
-
for file in
|
|
244
|
+
for file in v or []:
|
|
244
245
|
dest = os.path.join(temp_dir, self.secure_path(file.filename))
|
|
245
246
|
if not os.path.isdir(os.path.dirname(dest)):
|
|
246
247
|
os.makedirs(os.path.dirname(dest))
|
|
@@ -251,7 +252,7 @@ class WESBackend:
|
|
|
251
252
|
file.save(dest)
|
|
252
253
|
has_attachments = True
|
|
253
254
|
body["workflow_attachment"] = (
|
|
254
|
-
|
|
255
|
+
"file://%s" % temp_dir
|
|
255
256
|
) # Reference to temp working dir.
|
|
256
257
|
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
|
|
257
258
|
if v is not None:
|
|
@@ -22,7 +22,7 @@ import json
|
|
|
22
22
|
import logging
|
|
23
23
|
import zipfile
|
|
24
24
|
from os import path
|
|
25
|
-
from typing import IO,
|
|
25
|
+
from typing import IO, TypedDict, cast
|
|
26
26
|
from urllib.parse import ParseResult, urlparse
|
|
27
27
|
|
|
28
28
|
from toil.bus import JobStatus
|
|
@@ -237,7 +237,7 @@ def parse_workflow_manifest_file(manifest_file: str) -> WorkflowPlan:
|
|
|
237
237
|
|
|
238
238
|
|
|
239
239
|
def workflow_manifest_url_to_path(
|
|
240
|
-
url: ParseResult, parent_dir:
|
|
240
|
+
url: ParseResult, parent_dir: str | None = None
|
|
241
241
|
) -> str:
|
|
242
242
|
"""
|
|
243
243
|
Interpret a possibly-relative parsed URL, relative to the given parent directory.
|
|
@@ -249,7 +249,7 @@ def workflow_manifest_url_to_path(
|
|
|
249
249
|
|
|
250
250
|
|
|
251
251
|
# This one is all UCSC code
|
|
252
|
-
def task_filter(task: TaskLog, job_status: JobStatus) ->
|
|
252
|
+
def task_filter(task: TaskLog, job_status: JobStatus) -> TaskLog | None:
|
|
253
253
|
"""
|
|
254
254
|
AGC requires task names to be annotated with an AWS Batch job ID that they
|
|
255
255
|
were run under. If it encounters an un-annotated task name, it will crash.
|
toil/server/wes/tasks.py
CHANGED
|
@@ -20,7 +20,7 @@ import subprocess
|
|
|
20
20
|
import sys
|
|
21
21
|
import tempfile
|
|
22
22
|
import zipfile
|
|
23
|
-
from typing import Any
|
|
23
|
+
from typing import Any
|
|
24
24
|
from urllib.parse import urldefrag
|
|
25
25
|
|
|
26
26
|
from celery.exceptions import SoftTimeLimitExceeded # type: ignore
|
|
@@ -143,7 +143,7 @@ class ToilWorkflowRunner:
|
|
|
143
143
|
return dest
|
|
144
144
|
|
|
145
145
|
def sort_options(
|
|
146
|
-
self, workflow_engine_parameters:
|
|
146
|
+
self, workflow_engine_parameters: dict[str, str | None] | None = None
|
|
147
147
|
) -> list[str]:
|
|
148
148
|
"""
|
|
149
149
|
Sort the command line arguments in the order that can be recognized by
|
|
@@ -316,9 +316,7 @@ class ToilWorkflowRunner:
|
|
|
316
316
|
|
|
317
317
|
return command_args
|
|
318
318
|
|
|
319
|
-
def call_cmd(
|
|
320
|
-
self, cmd: Union[list[str], str], cwd: str
|
|
321
|
-
) -> "subprocess.Popen[bytes]":
|
|
319
|
+
def call_cmd(self, cmd: list[str] | str, cwd: str) -> "subprocess.Popen[bytes]":
|
|
322
320
|
"""
|
|
323
321
|
Calls a command with Popen. Writes stdout, stderr, and the command to
|
|
324
322
|
separate files.
|
toil/server/wes/toil_backend.py
CHANGED
|
@@ -17,9 +17,9 @@ import os
|
|
|
17
17
|
import shutil
|
|
18
18
|
import uuid
|
|
19
19
|
from collections import Counter
|
|
20
|
-
from collections.abc import Generator
|
|
20
|
+
from collections.abc import Callable, Generator
|
|
21
21
|
from contextlib import contextmanager
|
|
22
|
-
from typing import Any,
|
|
22
|
+
from typing import Any, TextIO, overload
|
|
23
23
|
|
|
24
24
|
from flask import send_from_directory
|
|
25
25
|
from werkzeug.utils import redirect
|
|
@@ -83,9 +83,9 @@ class ToilWorkflow:
|
|
|
83
83
|
@overload
|
|
84
84
|
def fetch_state(self, key: str, default: str) -> str: ...
|
|
85
85
|
@overload
|
|
86
|
-
def fetch_state(self, key: str, default: None = None) ->
|
|
86
|
+
def fetch_state(self, key: str, default: None = None) -> str | None: ...
|
|
87
87
|
|
|
88
|
-
def fetch_state(self, key: str, default:
|
|
88
|
+
def fetch_state(self, key: str, default: str | None = None) -> str | None:
|
|
89
89
|
"""
|
|
90
90
|
Return the contents of the given key in the workflow's state
|
|
91
91
|
store. If the key does not exist, the default value is returned.
|
|
@@ -96,7 +96,7 @@ class ToilWorkflow:
|
|
|
96
96
|
return value
|
|
97
97
|
|
|
98
98
|
@contextmanager
|
|
99
|
-
def fetch_scratch(self, filename: str) -> Generator[
|
|
99
|
+
def fetch_scratch(self, filename: str) -> Generator[TextIO | None, None, None]:
|
|
100
100
|
"""
|
|
101
101
|
Get a context manager for either a stream for the given file from the
|
|
102
102
|
workflow's scratch directory, or None if it isn't there.
|
|
@@ -181,7 +181,7 @@ class ToilWorkflow:
|
|
|
181
181
|
# Stream in the file
|
|
182
182
|
return json.load(f)
|
|
183
183
|
|
|
184
|
-
def _get_scratch_file_path(self, path: str) ->
|
|
184
|
+
def _get_scratch_file_path(self, path: str) -> str | None:
|
|
185
185
|
"""
|
|
186
186
|
Return the given relative path from self.scratch_dir, if it is a file,
|
|
187
187
|
and None otherwise.
|
|
@@ -190,21 +190,21 @@ class ToilWorkflow:
|
|
|
190
190
|
return None
|
|
191
191
|
return path
|
|
192
192
|
|
|
193
|
-
def get_stdout_path(self) ->
|
|
193
|
+
def get_stdout_path(self) -> str | None:
|
|
194
194
|
"""
|
|
195
195
|
Return the path to the standard output log, relative to the run's
|
|
196
196
|
scratch_dir, or None if it doesn't exist.
|
|
197
197
|
"""
|
|
198
198
|
return self._get_scratch_file_path("stdout")
|
|
199
199
|
|
|
200
|
-
def get_stderr_path(self) ->
|
|
200
|
+
def get_stderr_path(self) -> str | None:
|
|
201
201
|
"""
|
|
202
202
|
Return the path to the standard output log, relative to the run's
|
|
203
203
|
scratch_dir, or None if it doesn't exist.
|
|
204
204
|
"""
|
|
205
205
|
return self._get_scratch_file_path("stderr")
|
|
206
206
|
|
|
207
|
-
def get_messages_path(self) ->
|
|
207
|
+
def get_messages_path(self) -> str | None:
|
|
208
208
|
"""
|
|
209
209
|
Return the path to the bus message log, relative to the run's
|
|
210
210
|
scratch_dir, or None if it doesn't exist.
|
|
@@ -213,10 +213,8 @@ class ToilWorkflow:
|
|
|
213
213
|
|
|
214
214
|
def get_task_logs(
|
|
215
215
|
self,
|
|
216
|
-
filter_function:
|
|
217
|
-
|
|
218
|
-
] = None,
|
|
219
|
-
) -> list[dict[str, Union[str, int, None]]]:
|
|
216
|
+
filter_function: None | (Callable[[TaskLog, JobStatus], TaskLog | None]) = None,
|
|
217
|
+
) -> list[dict[str, str | int | None]]:
|
|
220
218
|
"""
|
|
221
219
|
Return all the task log objects for the individual tasks in the workflow.
|
|
222
220
|
|
|
@@ -243,7 +241,7 @@ class ToilWorkflow:
|
|
|
243
241
|
# Compose log objects from recovered job info.
|
|
244
242
|
logs: list[TaskLog] = []
|
|
245
243
|
for job_status in job_statuses.values():
|
|
246
|
-
task:
|
|
244
|
+
task: TaskLog | None = {
|
|
247
245
|
"name": job_status.name,
|
|
248
246
|
"exit_code": job_status.exit_code,
|
|
249
247
|
}
|
|
@@ -268,9 +266,9 @@ class ToilBackend(WESBackend):
|
|
|
268
266
|
def __init__(
|
|
269
267
|
self,
|
|
270
268
|
work_dir: str,
|
|
271
|
-
state_store:
|
|
269
|
+
state_store: str | None,
|
|
272
270
|
options: list[str],
|
|
273
|
-
dest_bucket_base:
|
|
271
|
+
dest_bucket_base: str | None,
|
|
274
272
|
bypass_celery: bool = False,
|
|
275
273
|
wes_dialect: str = "standard",
|
|
276
274
|
) -> None:
|
|
@@ -384,14 +382,12 @@ class ToilBackend(WESBackend):
|
|
|
384
382
|
logger.info("Using server ID: %s", self.server_id)
|
|
385
383
|
|
|
386
384
|
self.supported_versions = {
|
|
387
|
-
"py": ["3.
|
|
385
|
+
"py": ["3.10", "3.11", "3.12", "3.13", "3.14"],
|
|
388
386
|
"cwl": ["v1.0", "v1.1", "v1.2"],
|
|
389
387
|
"wdl": ["draft-2", "1.0"],
|
|
390
388
|
}
|
|
391
389
|
|
|
392
|
-
def _get_run(
|
|
393
|
-
self, run_id: str, should_exists: Optional[bool] = None
|
|
394
|
-
) -> ToilWorkflow:
|
|
390
|
+
def _get_run(self, run_id: str, should_exists: bool | None = None) -> ToilWorkflow:
|
|
395
391
|
"""
|
|
396
392
|
Helper method to instantiate a ToilWorkflow object.
|
|
397
393
|
|
|
@@ -490,7 +486,7 @@ class ToilBackend(WESBackend):
|
|
|
490
486
|
|
|
491
487
|
@handle_errors
|
|
492
488
|
def list_runs(
|
|
493
|
-
self, page_size:
|
|
489
|
+
self, page_size: int | None = None, page_token: str | None = None
|
|
494
490
|
) -> dict[str, Any]:
|
|
495
491
|
"""List the workflow runs."""
|
|
496
492
|
# TODO: implement pagination
|
toil/server/wsgi_app.py
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
from typing import Any
|
|
14
|
+
from typing import Any
|
|
15
15
|
|
|
16
16
|
from gunicorn.app.base import BaseApplication # type: ignore
|
|
17
17
|
|
|
@@ -30,7 +30,7 @@ class GunicornApplication(BaseApplication): # type: ignore
|
|
|
30
30
|
For more details, see: https://docs.gunicorn.org/en/latest/custom.html
|
|
31
31
|
"""
|
|
32
32
|
|
|
33
|
-
def __init__(self, app: object, options:
|
|
33
|
+
def __init__(self, app: object, options: dict[str, Any] | None = None):
|
|
34
34
|
self.options = options or {}
|
|
35
35
|
self.application = app
|
|
36
36
|
super().__init__()
|
|
@@ -52,7 +52,7 @@ class GunicornApplication(BaseApplication): # type: ignore
|
|
|
52
52
|
return self.application
|
|
53
53
|
|
|
54
54
|
|
|
55
|
-
def run_app(app: object, options:
|
|
55
|
+
def run_app(app: object, options: dict[str, Any] | None = None) -> None:
|
|
56
56
|
"""
|
|
57
57
|
Run a Gunicorn WSGI server.
|
|
58
58
|
"""
|
toil/serviceManager.py
CHANGED
|
@@ -18,7 +18,6 @@ import time
|
|
|
18
18
|
from collections.abc import Iterable
|
|
19
19
|
from queue import Empty, Queue
|
|
20
20
|
from threading import Event, Thread
|
|
21
|
-
from typing import Optional
|
|
22
21
|
|
|
23
22
|
from toil.job import ServiceJobDescription
|
|
24
23
|
from toil.jobStores.abstractJobStore import AbstractJobStore
|
|
@@ -112,7 +111,7 @@ class ServiceManager:
|
|
|
112
111
|
# Asynchronously schedule the services
|
|
113
112
|
self.__clients_in.put(client_id)
|
|
114
113
|
|
|
115
|
-
def get_ready_client(self, maxWait: float) ->
|
|
114
|
+
def get_ready_client(self, maxWait: float) -> str | None:
|
|
116
115
|
"""
|
|
117
116
|
Fetch a ready client, waiting as needed.
|
|
118
117
|
|
|
@@ -132,7 +131,7 @@ class ServiceManager:
|
|
|
132
131
|
except Empty:
|
|
133
132
|
return None
|
|
134
133
|
|
|
135
|
-
def get_unservable_client(self, maxWait: float) ->
|
|
134
|
+
def get_unservable_client(self, maxWait: float) -> str | None:
|
|
136
135
|
"""
|
|
137
136
|
Fetch a client whos services failed to start.
|
|
138
137
|
|
|
@@ -152,7 +151,7 @@ class ServiceManager:
|
|
|
152
151
|
except Empty:
|
|
153
152
|
return None
|
|
154
153
|
|
|
155
|
-
def get_startable_service(self, maxWait: float) ->
|
|
154
|
+
def get_startable_service(self, maxWait: float) -> str | None:
|
|
156
155
|
"""
|
|
157
156
|
Fetch a service job that is ready to start.
|
|
158
157
|
|