toil 8.2.0__py3-none-any.whl → 9.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toil/batchSystems/registry.py +15 -118
- toil/common.py +20 -1
- toil/cwl/cwltoil.py +80 -37
- toil/cwl/utils.py +103 -3
- toil/jobStores/abstractJobStore.py +11 -236
- toil/jobStores/aws/jobStore.py +2 -1
- toil/jobStores/fileJobStore.py +2 -1
- toil/jobStores/googleJobStore.py +7 -4
- toil/lib/accelerators.py +1 -1
- toil/lib/generatedEC2Lists.py +81 -19
- toil/lib/misc.py +1 -1
- toil/lib/plugins.py +106 -0
- toil/lib/url.py +320 -0
- toil/options/cwl.py +13 -1
- toil/options/runner.py +17 -10
- toil/options/wdl.py +12 -1
- toil/provisioners/aws/awsProvisioner.py +25 -2
- toil/server/app.py +12 -6
- toil/server/cli/wes_cwl_runner.py +2 -2
- toil/server/wes/abstract_backend.py +21 -43
- toil/server/wes/toil_backend.py +2 -2
- toil/test/__init__.py +2 -2
- toil/test/batchSystems/batchSystemTest.py +2 -9
- toil/test/batchSystems/batch_system_plugin_test.py +7 -0
- toil/test/cwl/cwlTest.py +181 -8
- toil/test/docs/scriptsTest.py +2 -1
- toil/test/lib/test_url.py +69 -0
- toil/test/lib/url_plugin_test.py +105 -0
- toil/test/provisioners/aws/awsProvisionerTest.py +1 -1
- toil/test/provisioners/clusterTest.py +15 -2
- toil/test/provisioners/gceProvisionerTest.py +1 -1
- toil/test/server/serverTest.py +78 -36
- toil/test/wdl/md5sum/md5sum-gs.json +1 -1
- toil/test/wdl/testfiles/read_file.wdl +18 -0
- toil/test/wdl/testfiles/url_to_optional_file.wdl +2 -1
- toil/test/wdl/wdltoil_test.py +74 -125
- toil/utils/toilSshCluster.py +23 -0
- toil/utils/toilUpdateEC2Instances.py +1 -0
- toil/version.py +9 -9
- toil/wdl/wdltoil.py +182 -314
- toil/worker.py +11 -6
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/METADATA +23 -23
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/RECORD +47 -42
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/WHEEL +1 -1
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/entry_points.txt +0 -0
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/licenses/LICENSE +0 -0
- {toil-8.2.0.dist-info → toil-9.0.0.dist-info}/top_level.txt +0 -0
toil/lib/misc.py
CHANGED
|
@@ -27,7 +27,7 @@ def get_public_ip() -> str:
|
|
|
27
27
|
try:
|
|
28
28
|
# Try to get the internet-facing IP by attempting a connection
|
|
29
29
|
# to a non-existent server and reading what IP was used.
|
|
30
|
-
ip = "127.0.0.1"
|
|
30
|
+
ip: str = "127.0.0.1"
|
|
31
31
|
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
|
|
32
32
|
# 203.0.113.0/24 is reserved as TEST-NET-3 by RFC 5737, so
|
|
33
33
|
# there is guaranteed to be no one listening on the other
|
toil/lib/plugins.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# Copyright (C) 2015-2025 Regents of the University of California
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""
|
|
16
|
+
Generic plugin system for Toil plugins.
|
|
17
|
+
|
|
18
|
+
Plugins come in Python packages named::
|
|
19
|
+
|
|
20
|
+
toil_{PLUGIN_TYPE}_{WHATEVER}
|
|
21
|
+
|
|
22
|
+
When looking for plugins, Toil will list all the Python packages with the right
|
|
23
|
+
name prefix for the given type of plugin, and load them. The plugin modules
|
|
24
|
+
then have an opportunity to import :meth:`register_plugin` and register
|
|
25
|
+
themselves.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
import importlib
|
|
29
|
+
from typing import Any, Literal, Union
|
|
30
|
+
import pkgutil
|
|
31
|
+
from toil.lib.memoize import memoize
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
PluginType = Union[Literal["batch_system"], Literal["url_access"]]
|
|
35
|
+
plugin_types: list[PluginType] = ["batch_system", "url_access"]
|
|
36
|
+
|
|
37
|
+
_registry: dict[str, dict[str, Any]] = {k: {} for k in plugin_types}
|
|
38
|
+
|
|
39
|
+
def register_plugin(
|
|
40
|
+
plugin_type: PluginType, plugin_name: str, plugin_being_registered: Any
|
|
41
|
+
) -> None:
|
|
42
|
+
"""
|
|
43
|
+
Adds a plugin to the registry for the given type of plugin.
|
|
44
|
+
|
|
45
|
+
:param plugin_name: For batch systems, this is the string the user will use
|
|
46
|
+
to select the batch system on the command line with ``--batchSystem``.
|
|
47
|
+
For URL access plugins, this is the URL scheme that the plugin
|
|
48
|
+
implements.
|
|
49
|
+
:param plugin_being_registered: This is a function that, when called,
|
|
50
|
+
imports and returns a plugin-provided class type. For batch systems,
|
|
51
|
+
the resulting type must extend
|
|
52
|
+
:class:`toil.batchSystems.abstractBatchSystem.AbstractBatchSystem`. For
|
|
53
|
+
URL access plugins, it must extend :class:`toil.lib.url.URLAccess`.
|
|
54
|
+
Note that the function used here should return the class itslef; it
|
|
55
|
+
should not construct an instance of the class.
|
|
56
|
+
"""
|
|
57
|
+
_registry[plugin_type][plugin_name] = plugin_being_registered
|
|
58
|
+
|
|
59
|
+
def remove_plugin(
|
|
60
|
+
plugin_type: PluginType, plugin_name: str) -> None:
|
|
61
|
+
"""
|
|
62
|
+
Removes a plugin from the registry for the given type of plugin.
|
|
63
|
+
"""
|
|
64
|
+
try:
|
|
65
|
+
del _registry[plugin_type][plugin_name]
|
|
66
|
+
except KeyError:
|
|
67
|
+
# If the plugin does not exist, it can be ignored
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
def get_plugin_names(plugin_type:PluginType) -> list[str]:
|
|
71
|
+
"""
|
|
72
|
+
Get the names of all the available plugins of the given type.
|
|
73
|
+
"""
|
|
74
|
+
_load_all_plugins(plugin_type)
|
|
75
|
+
return list(_registry[plugin_type].keys())
|
|
76
|
+
|
|
77
|
+
def get_plugin(plugin_type: PluginType, plugin_name: str) -> Any:
|
|
78
|
+
"""
|
|
79
|
+
Get a plugin class factory function by name.
|
|
80
|
+
|
|
81
|
+
:raises: KeyError if plugin_name is not the name of a plugin of the given
|
|
82
|
+
type.
|
|
83
|
+
"""
|
|
84
|
+
_load_all_plugins(plugin_type)
|
|
85
|
+
return _registry[plugin_type][plugin_name]
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _plugin_name_prefix(plugin_type: PluginType) -> str:
|
|
89
|
+
"""
|
|
90
|
+
Get prefix for plugin type.
|
|
91
|
+
|
|
92
|
+
Any packages with prefix will count as toil plugins of that type.
|
|
93
|
+
"""
|
|
94
|
+
return f"toil_{plugin_type}_"
|
|
95
|
+
|
|
96
|
+
@memoize
|
|
97
|
+
def _load_all_plugins(plugin_type: PluginType) -> None:
|
|
98
|
+
"""
|
|
99
|
+
Load all the plugins of the given type that are installed.
|
|
100
|
+
"""
|
|
101
|
+
prefix = _plugin_name_prefix(plugin_type)
|
|
102
|
+
for finder, name, is_pkg in pkgutil.iter_modules():
|
|
103
|
+
# For all installed packages
|
|
104
|
+
if name.startswith(prefix):
|
|
105
|
+
# If it is a Toil batch system plugin, import it
|
|
106
|
+
importlib.import_module(name)
|
toil/lib/url.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
# Copyright (C) 2015-2025 Regents of the University of California
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
import logging
|
|
15
|
+
import os
|
|
16
|
+
from abc import ABC, ABCMeta, abstractmethod
|
|
17
|
+
from typing import (
|
|
18
|
+
IO,
|
|
19
|
+
TYPE_CHECKING,
|
|
20
|
+
Any,
|
|
21
|
+
Callable,
|
|
22
|
+
ContextManager,
|
|
23
|
+
Literal,
|
|
24
|
+
Optional,
|
|
25
|
+
Union,
|
|
26
|
+
cast,
|
|
27
|
+
overload,
|
|
28
|
+
Type,
|
|
29
|
+
)
|
|
30
|
+
from urllib.parse import ParseResult, urlparse
|
|
31
|
+
|
|
32
|
+
from toil.lib.exceptions import UnimplementedURLException
|
|
33
|
+
from toil.lib.memoize import memoize
|
|
34
|
+
from toil.lib.plugins import register_plugin, get_plugin
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
from botocore.exceptions import ProxyConnectionError
|
|
38
|
+
except ImportError:
|
|
39
|
+
|
|
40
|
+
class ProxyConnectionError(BaseException): # type: ignore
|
|
41
|
+
"""Dummy class."""
|
|
42
|
+
|
|
43
|
+
logger = logging.getLogger(__name__)
|
|
44
|
+
|
|
45
|
+
class URLAccess:
|
|
46
|
+
"""
|
|
47
|
+
Widget for accessing external storage (URLs).
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def url_exists(cls, src_uri: str) -> bool:
|
|
52
|
+
"""
|
|
53
|
+
Return True if the file at the given URI exists, and False otherwise.
|
|
54
|
+
|
|
55
|
+
May raise an error if file existence cannot be determined.
|
|
56
|
+
|
|
57
|
+
:param src_uri: URL that points to a file or object in the storage
|
|
58
|
+
mechanism of a supported URL scheme e.g. a blob in an AWS s3 bucket.
|
|
59
|
+
"""
|
|
60
|
+
parseResult = urlparse(src_uri)
|
|
61
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
62
|
+
return otherCls._url_exists(parseResult)
|
|
63
|
+
|
|
64
|
+
@classmethod
|
|
65
|
+
def get_size(cls, src_uri: str) -> Optional[int]:
|
|
66
|
+
"""
|
|
67
|
+
Get the size in bytes of the file at the given URL, or None if it cannot be obtained.
|
|
68
|
+
|
|
69
|
+
:param src_uri: URL that points to a file or object in the storage
|
|
70
|
+
mechanism of a supported URL scheme e.g. a blob in an AWS s3 bucket.
|
|
71
|
+
"""
|
|
72
|
+
parseResult = urlparse(src_uri)
|
|
73
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
74
|
+
return otherCls._get_size(parseResult)
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def get_is_directory(cls, src_uri: str) -> bool:
|
|
78
|
+
"""
|
|
79
|
+
Return True if the thing at the given URL is a directory, and False if
|
|
80
|
+
it is a file. The URL may or may not end in '/'.
|
|
81
|
+
"""
|
|
82
|
+
parseResult = urlparse(src_uri)
|
|
83
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
84
|
+
return otherCls._get_is_directory(parseResult)
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def list_url(cls, src_uri: str) -> list[str]:
|
|
88
|
+
"""
|
|
89
|
+
List the directory at the given URL. Returned path components can be
|
|
90
|
+
joined with '/' onto the passed URL to form new URLs. Those that end in
|
|
91
|
+
'/' correspond to directories. The provided URL may or may not end with
|
|
92
|
+
'/'.
|
|
93
|
+
|
|
94
|
+
Currently supported schemes are:
|
|
95
|
+
|
|
96
|
+
- 's3' for objects in Amazon S3
|
|
97
|
+
e.g. s3://bucket/prefix/
|
|
98
|
+
|
|
99
|
+
- 'file' for local files
|
|
100
|
+
e.g. file:///local/dir/path/
|
|
101
|
+
|
|
102
|
+
:param str src_uri: URL that points to a directory or prefix in the storage mechanism of a
|
|
103
|
+
supported URL scheme e.g. a prefix in an AWS s3 bucket.
|
|
104
|
+
|
|
105
|
+
:return: A list of URL components in the given directory, already URL-encoded.
|
|
106
|
+
"""
|
|
107
|
+
parseResult = urlparse(src_uri)
|
|
108
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
109
|
+
return otherCls._list_url(parseResult)
|
|
110
|
+
|
|
111
|
+
@classmethod
|
|
112
|
+
def read_from_url(cls, src_uri: str, writable: IO[bytes]) -> tuple[int, bool]:
|
|
113
|
+
"""
|
|
114
|
+
Read the given URL and write its content into the given writable stream.
|
|
115
|
+
|
|
116
|
+
Raises FileNotFoundError if the URL doesn't exist.
|
|
117
|
+
|
|
118
|
+
:return: The size of the file in bytes and whether the executable permission bit is set
|
|
119
|
+
"""
|
|
120
|
+
parseResult = urlparse(src_uri)
|
|
121
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
122
|
+
return otherCls._read_from_url(parseResult, writable)
|
|
123
|
+
|
|
124
|
+
@classmethod
|
|
125
|
+
def open_url(cls, src_uri: str) -> IO[bytes]:
|
|
126
|
+
"""
|
|
127
|
+
Read from the given URI.
|
|
128
|
+
|
|
129
|
+
Raises FileNotFoundError if the URL doesn't exist.
|
|
130
|
+
|
|
131
|
+
Has a readable stream interface, unlike :meth:`read_from_url` which
|
|
132
|
+
takes a writable stream.
|
|
133
|
+
"""
|
|
134
|
+
parseResult = urlparse(src_uri)
|
|
135
|
+
otherCls = cls._find_url_implementation(parseResult)
|
|
136
|
+
return otherCls._open_url(parseResult)
|
|
137
|
+
|
|
138
|
+
@classmethod
|
|
139
|
+
@abstractmethod
|
|
140
|
+
def _url_exists(cls, url: ParseResult) -> bool:
|
|
141
|
+
"""
|
|
142
|
+
Return True if the item at the given URL exists, and Flase otherwise.
|
|
143
|
+
|
|
144
|
+
May raise an error if file existence cannot be determined.
|
|
145
|
+
"""
|
|
146
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
147
|
+
|
|
148
|
+
@classmethod
|
|
149
|
+
@abstractmethod
|
|
150
|
+
def _get_size(cls, url: ParseResult) -> Optional[int]:
|
|
151
|
+
"""
|
|
152
|
+
Get the size of the object at the given URL, or None if it cannot be obtained.
|
|
153
|
+
"""
|
|
154
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
155
|
+
|
|
156
|
+
@classmethod
|
|
157
|
+
@abstractmethod
|
|
158
|
+
def _get_is_directory(cls, url: ParseResult) -> bool:
|
|
159
|
+
"""
|
|
160
|
+
Return True if the thing at the given URL is a directory, and False if
|
|
161
|
+
it is a file or it is known not to exist. The URL may or may not end in
|
|
162
|
+
'/'.
|
|
163
|
+
|
|
164
|
+
:param url: URL that points to a file or object, or directory or prefix,
|
|
165
|
+
in the storage mechanism of a supported URL scheme e.g. a blob
|
|
166
|
+
in an AWS s3 bucket.
|
|
167
|
+
"""
|
|
168
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
169
|
+
|
|
170
|
+
@classmethod
|
|
171
|
+
@abstractmethod
|
|
172
|
+
def _read_from_url(cls, url: ParseResult, writable: IO[bytes]) -> tuple[int, bool]:
|
|
173
|
+
"""
|
|
174
|
+
Reads the contents of the object at the specified location and writes it to the given
|
|
175
|
+
writable stream.
|
|
176
|
+
|
|
177
|
+
Raises FileNotFoundError if the thing at the URL is not found.
|
|
178
|
+
|
|
179
|
+
:param ParseResult url: URL that points to a file or object in the storage
|
|
180
|
+
mechanism of a supported URL scheme e.g. a blob in an AWS s3 bucket.
|
|
181
|
+
|
|
182
|
+
:param IO[bytes] writable: a writable stream
|
|
183
|
+
|
|
184
|
+
:return: The size of the file in bytes and whether the executable permission bit is set
|
|
185
|
+
"""
|
|
186
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
187
|
+
|
|
188
|
+
@classmethod
|
|
189
|
+
@abstractmethod
|
|
190
|
+
def _list_url(cls, url: ParseResult) -> list[str]:
|
|
191
|
+
"""
|
|
192
|
+
List the contents of the given URL, which may or may not end in '/'
|
|
193
|
+
|
|
194
|
+
Returns a list of URL components. Those that end in '/' are meant to be
|
|
195
|
+
directories, while those that do not are meant to be files.
|
|
196
|
+
|
|
197
|
+
:param ParseResult url: URL that points to a directory or prefix in the
|
|
198
|
+
storage mechanism of a supported URL scheme e.g. a prefix in an AWS s3
|
|
199
|
+
bucket.
|
|
200
|
+
|
|
201
|
+
:return: The children of the given URL, already URL-encoded if
|
|
202
|
+
appropriate. (If the URL is a bare path, no encoding is done.)
|
|
203
|
+
"""
|
|
204
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
205
|
+
|
|
206
|
+
@classmethod
|
|
207
|
+
@abstractmethod
|
|
208
|
+
def _open_url(cls, url: ParseResult) -> IO[bytes]:
|
|
209
|
+
"""
|
|
210
|
+
Get a stream of the object at the specified location.
|
|
211
|
+
|
|
212
|
+
Raises FileNotFoundError if the thing at the URL is not found.
|
|
213
|
+
"""
|
|
214
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
215
|
+
|
|
216
|
+
@classmethod
|
|
217
|
+
@abstractmethod
|
|
218
|
+
def _write_to_url(
|
|
219
|
+
cls,
|
|
220
|
+
readable: Union[IO[bytes], IO[str]],
|
|
221
|
+
url: ParseResult,
|
|
222
|
+
executable: bool = False,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""
|
|
225
|
+
Reads the contents of the given readable stream and writes it to the object at the
|
|
226
|
+
specified location. Raises FileNotFoundError if the URL doesn't exist.
|
|
227
|
+
|
|
228
|
+
:param Union[IO[bytes], IO[str]] readable: a readable stream
|
|
229
|
+
|
|
230
|
+
:param ParseResult url: URL that points to a file or object in the storage
|
|
231
|
+
mechanism of a supported URL scheme e.g. a blob in an AWS s3 bucket.
|
|
232
|
+
|
|
233
|
+
:param bool executable: determines if the file has executable permissions
|
|
234
|
+
"""
|
|
235
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
236
|
+
|
|
237
|
+
@classmethod
|
|
238
|
+
@abstractmethod
|
|
239
|
+
def _supports_url(cls, url: ParseResult, export: bool = False) -> bool:
|
|
240
|
+
"""
|
|
241
|
+
Returns True if the url access implementation supports the URL's scheme.
|
|
242
|
+
|
|
243
|
+
:param ParseResult url: a parsed URL that may be supported
|
|
244
|
+
|
|
245
|
+
:param bool export: Determines if the url is supported for exported
|
|
246
|
+
|
|
247
|
+
:return bool: returns true if the cls supports the URL
|
|
248
|
+
"""
|
|
249
|
+
raise NotImplementedError(f"No implementation for {url}")
|
|
250
|
+
|
|
251
|
+
@classmethod
|
|
252
|
+
def _find_url_implementation(
|
|
253
|
+
cls, url: ParseResult, export: bool = False
|
|
254
|
+
) -> type["URLAccess"]:
|
|
255
|
+
"""
|
|
256
|
+
Returns the URLAccess subclass that supports the given URL.
|
|
257
|
+
|
|
258
|
+
:param ParseResult url: The given URL
|
|
259
|
+
|
|
260
|
+
:param bool export: Determines if the url is supported for exporting
|
|
261
|
+
|
|
262
|
+
"""
|
|
263
|
+
try:
|
|
264
|
+
implementation_factory = get_plugin("url_access", url.scheme.lower())
|
|
265
|
+
except KeyError:
|
|
266
|
+
raise UnimplementedURLException(url, "export" if export else "import")
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
implementation = cast(Type[URLAccess], implementation_factory())
|
|
270
|
+
except (ImportError, ProxyConnectionError):
|
|
271
|
+
logger.debug(
|
|
272
|
+
"Unable to import implementation for scheme '%s', as is expected if the corresponding extra was "
|
|
273
|
+
"omitted at installation time.",
|
|
274
|
+
url.scheme.lower(),
|
|
275
|
+
)
|
|
276
|
+
raise UnimplementedURLException(url, "export" if export else "import")
|
|
277
|
+
|
|
278
|
+
if implementation._supports_url(url, export):
|
|
279
|
+
return implementation
|
|
280
|
+
raise UnimplementedURLException(url, "export" if export else "import")
|
|
281
|
+
|
|
282
|
+
#####
|
|
283
|
+
# Built-in url access
|
|
284
|
+
#####
|
|
285
|
+
|
|
286
|
+
def file_job_store_factory() -> type[URLAccess]:
|
|
287
|
+
from toil.jobStores.fileJobStore import FileJobStore
|
|
288
|
+
|
|
289
|
+
return FileJobStore
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def google_job_store_factory() -> type[URLAccess]:
|
|
293
|
+
from toil.jobStores.googleJobStore import GoogleJobStore
|
|
294
|
+
|
|
295
|
+
return GoogleJobStore
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def aws_job_store_factory() -> type[URLAccess]:
|
|
299
|
+
from toil.jobStores.aws.jobStore import AWSJobStore
|
|
300
|
+
|
|
301
|
+
return AWSJobStore
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def job_store_support_factory() -> type[URLAccess]:
|
|
305
|
+
from toil.jobStores.abstractJobStore import JobStoreSupport
|
|
306
|
+
|
|
307
|
+
return JobStoreSupport
|
|
308
|
+
|
|
309
|
+
#make sure my py still works and the tests work
|
|
310
|
+
# can then get rid of _url_access_classes method
|
|
311
|
+
|
|
312
|
+
#####
|
|
313
|
+
# Registers all built-in urls
|
|
314
|
+
#####
|
|
315
|
+
register_plugin("url_access", "file", file_job_store_factory)
|
|
316
|
+
register_plugin("url_access", "gs", google_job_store_factory)
|
|
317
|
+
register_plugin("url_access", "s3", aws_job_store_factory)
|
|
318
|
+
register_plugin("url_access", "http", job_store_support_factory)
|
|
319
|
+
register_plugin("url_access", "https", job_store_support_factory)
|
|
320
|
+
register_plugin("url_access", "ftp", job_store_support_factory)
|
toil/options/cwl.py
CHANGED
|
@@ -3,6 +3,8 @@ from argparse import ArgumentParser
|
|
|
3
3
|
|
|
4
4
|
from configargparse import SUPPRESS
|
|
5
5
|
|
|
6
|
+
from toil.lib.conversions import human2bytes
|
|
7
|
+
from toil.options.common import make_open_interval_action
|
|
6
8
|
from toil.version import baseVersion
|
|
7
9
|
|
|
8
10
|
|
|
@@ -411,9 +413,19 @@ def add_cwl_options(parser: ArgumentParser, suppress: bool = True) -> None:
|
|
|
411
413
|
"--no-cwl-default-ram",
|
|
412
414
|
action="store_false",
|
|
413
415
|
help=suppress_help
|
|
414
|
-
or "Do not apply CWL specification default ramMin, so that Toil --defaultMemory applies.",
|
|
416
|
+
or "Do not apply CWL specification default ramMin, so that Toil --defaultMemory applies. This can help jobs get to Slurm with no memory limit assigned.",
|
|
415
417
|
dest="cwl_default_ram",
|
|
416
418
|
)
|
|
419
|
+
parser.add_argument(
|
|
420
|
+
"--cwl-min-ram",
|
|
421
|
+
type=human2bytes,
|
|
422
|
+
action=make_open_interval_action(1),
|
|
423
|
+
help=suppress_help
|
|
424
|
+
or "Specify a minimum memory allocation for all tasks ."
|
|
425
|
+
"If --no-cwl-default-ram is passed, this does not apply to tools that do not "
|
|
426
|
+
"specify a memory requirement; --defaultMemory is used for those tools"
|
|
427
|
+
"in that case."
|
|
428
|
+
)
|
|
417
429
|
parser.add_argument(
|
|
418
430
|
"--destBucket",
|
|
419
431
|
type=str,
|
toil/options/runner.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from argparse import ArgumentParser
|
|
1
|
+
from argparse import ArgumentParser, SUPPRESS
|
|
2
2
|
|
|
3
3
|
from toil.lib.conversions import human2bytes
|
|
4
4
|
|
|
@@ -25,17 +25,23 @@ def add_runner_options(
|
|
|
25
25
|
help="Run the file imports on a worker instead of the leader. This is useful if the leader is not optimized for high network performance. "
|
|
26
26
|
"If set to true, the argument --importWorkersDisk must also be set."
|
|
27
27
|
)
|
|
28
|
-
|
|
28
|
+
import_workers_batchsize_argument = ["--importWorkersBatchSize"]
|
|
29
29
|
if cwl:
|
|
30
|
-
|
|
30
|
+
import_workers_batchsize_argument.append("--import-workers-batch-size")
|
|
31
31
|
parser.add_argument(
|
|
32
|
-
*
|
|
33
|
-
dest="
|
|
32
|
+
*import_workers_batchsize_argument,
|
|
33
|
+
dest="import_workers_batchsize",
|
|
34
34
|
type=lambda x: human2bytes(str(x)),
|
|
35
35
|
default="1 GiB",
|
|
36
|
-
help="Specify the file size
|
|
37
|
-
|
|
36
|
+
help="Specify the target total file size for file import batches. "
|
|
37
|
+
"As many files as can fit will go into each batch import job. This should be set in conjunction with the argument --runImportsOnWorkers."
|
|
38
38
|
)
|
|
39
|
+
|
|
40
|
+
# Deprecated
|
|
41
|
+
parser.add_argument(
|
|
42
|
+
"--importWorkersThreshold", "--import-workers-threshold", dest="import_workers_batchsize",type=lambda x: human2bytes(str(x)), help=SUPPRESS
|
|
43
|
+
)
|
|
44
|
+
|
|
39
45
|
import_workers_disk_argument = ["--importWorkersDisk"]
|
|
40
46
|
if cwl:
|
|
41
47
|
import_workers_disk_argument.append("--import-workers-disk")
|
|
@@ -44,7 +50,8 @@ def add_runner_options(
|
|
|
44
50
|
dest="import_workers_disk",
|
|
45
51
|
type=lambda x: human2bytes(str(x)),
|
|
46
52
|
default="1 MiB",
|
|
47
|
-
help="Specify the disk size each import worker will get. This
|
|
48
|
-
"
|
|
49
|
-
"
|
|
53
|
+
help="Specify the disk size each import worker will get. This usually will not need to be set as Toil will attempt to use file streaming when downloading files. "
|
|
54
|
+
"If not possible, for example, when downloading from AWS to a GCE job store, "
|
|
55
|
+
"this should be set to the largest file size of all files to import. This should be set in conjunction with the arguments "
|
|
56
|
+
"--runImportsOnWorkers and --importWorkersBatchSize."
|
|
50
57
|
)
|
toil/options/wdl.py
CHANGED
|
@@ -96,4 +96,15 @@ def add_wdl_options(parser: ArgumentParser, suppress: bool = True) -> None:
|
|
|
96
96
|
type=strtobool,
|
|
97
97
|
default=False,
|
|
98
98
|
help=suppress_help or "Exit runner if workflow has any lint warnings"
|
|
99
|
-
)
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
quant_check_arguments = ["--wdlQuantCheck"] + (
|
|
102
|
+
["--quantCheck"] if not suppress else []
|
|
103
|
+
)
|
|
104
|
+
parser.add_argument(
|
|
105
|
+
*quant_check_arguments,
|
|
106
|
+
dest="quant_check",
|
|
107
|
+
type=strtobool,
|
|
108
|
+
default=True,
|
|
109
|
+
help=suppress_help or "Whether to relax quantifier validation rules"
|
|
110
|
+
)
|
|
@@ -109,6 +109,11 @@ _INSTANCE_PROFILE_ROLE_NAME = "toil"
|
|
|
109
109
|
_TAG_KEY_TOIL_NODE_TYPE = "ToilNodeType"
|
|
110
110
|
# The tag that specifies the cluster name on all nodes
|
|
111
111
|
_TAG_KEY_TOIL_CLUSTER_NAME = "clusterName"
|
|
112
|
+
# The tag we use to store the SSH key name.
|
|
113
|
+
# TODO: Get rid of this once
|
|
114
|
+
# <https://github.com/adamchainz/ec2-metadata/pull/562> is merged and we can
|
|
115
|
+
# get the SSH key name from the instance metadata.
|
|
116
|
+
_TAG_KEY_TOIL_SSH_KEY = "sshKeyName"
|
|
112
117
|
# How much storage on the root volume is expected to go to overhead and be
|
|
113
118
|
# unavailable to jobs when the node comes up?
|
|
114
119
|
# TODO: measure
|
|
@@ -309,11 +314,28 @@ class AWSProvisioner(AbstractProvisioner):
|
|
|
309
314
|
for tag in instance["Tags"]:
|
|
310
315
|
if tag.get("Key") == "Name":
|
|
311
316
|
self.clusterName = tag["Value"]
|
|
317
|
+
elif tag.get("Key") == _TAG_KEY_TOIL_SSH_KEY:
|
|
318
|
+
# If we can't get an SSH key from the instance metadata, we
|
|
319
|
+
# might be able to use this one from the tags.
|
|
320
|
+
self._keyName = tag["Value"]
|
|
312
321
|
# Determine what subnet we, the leader, are in
|
|
313
322
|
self._leader_subnet = instance["SubnetId"]
|
|
314
323
|
# Determine where to deploy workers.
|
|
315
324
|
self._worker_subnets_by_zone = self._get_good_subnets_like(self._leader_subnet)
|
|
316
325
|
|
|
326
|
+
# Find the SSH key name to use to start instances
|
|
327
|
+
if hasattr(ec2_metadata, 'public_keys') and isinstance(ec2_metadata.public_keys, dict):
|
|
328
|
+
key_names = list(ec2_metadata.public_keys.keys())
|
|
329
|
+
if len(key_names) > 0 and isinstance(key_names[0], str):
|
|
330
|
+
# We have a key name from the EC2 metadata. This should always
|
|
331
|
+
# be the case once
|
|
332
|
+
# <https://github.com/adamchainz/ec2-metadata/pull/562> is
|
|
333
|
+
# merged. Override anything from the tags.
|
|
334
|
+
self._keyName = key_names[0]
|
|
335
|
+
|
|
336
|
+
if not hasattr(self, '_keyName'):
|
|
337
|
+
raise RuntimeError("Unable to determine the SSH key name the cluster is using")
|
|
338
|
+
|
|
317
339
|
self._leaderPrivateIP = ec2_metadata.private_ipv4 # this is PRIVATE IP
|
|
318
340
|
self._tags = {
|
|
319
341
|
k: v
|
|
@@ -495,6 +517,7 @@ class AWSProvisioner(AbstractProvisioner):
|
|
|
495
517
|
# Make tags for the leader specifically
|
|
496
518
|
leader_tags = dict(self._tags)
|
|
497
519
|
leader_tags[_TAG_KEY_TOIL_NODE_TYPE] = "leader"
|
|
520
|
+
leader_tags[_TAG_KEY_TOIL_SSH_KEY] = self._keyName
|
|
498
521
|
logger.debug("Launching leader with tags: %s", leader_tags)
|
|
499
522
|
|
|
500
523
|
instances: list[Instance] = create_instances(
|
|
@@ -1144,7 +1167,7 @@ class AWSProvisioner(AbstractProvisioner):
|
|
|
1144
1167
|
workerInstances = [
|
|
1145
1168
|
i
|
|
1146
1169
|
for i in workerInstances
|
|
1147
|
-
if preemptible == (i
|
|
1170
|
+
if preemptible == (i.get("SpotInstanceRequestId") is not None)
|
|
1148
1171
|
]
|
|
1149
1172
|
logger.debug(
|
|
1150
1173
|
"%spreemptible workers found in cluster: %s",
|
|
@@ -1161,7 +1184,7 @@ class AWSProvisioner(AbstractProvisioner):
|
|
|
1161
1184
|
name=i["InstanceId"],
|
|
1162
1185
|
launchTime=i["LaunchTime"],
|
|
1163
1186
|
nodeType=i["InstanceType"],
|
|
1164
|
-
preemptible=i
|
|
1187
|
+
preemptible=i.get("SpotInstanceRequestId") is not None,
|
|
1165
1188
|
tags=collapse_tags(i["Tags"]),
|
|
1166
1189
|
)
|
|
1167
1190
|
for i in workerInstances
|
toil/server/app.py
CHANGED
|
@@ -16,6 +16,7 @@ import logging
|
|
|
16
16
|
import os
|
|
17
17
|
|
|
18
18
|
import connexion # type: ignore
|
|
19
|
+
from connexion.options import SwaggerUIOptions # type: ignore[import-untyped]
|
|
19
20
|
from configargparse import ArgumentParser
|
|
20
21
|
|
|
21
22
|
from toil.lib.aws import get_current_aws_region, running_on_ec2, running_on_ecs
|
|
@@ -133,8 +134,10 @@ def create_app(args: argparse.Namespace) -> "connexion.FlaskApp":
|
|
|
133
134
|
"""
|
|
134
135
|
Create a "connexion.FlaskApp" instance with Toil server configurations.
|
|
135
136
|
"""
|
|
137
|
+
swagger_ui_options = SwaggerUIOptions(swagger_ui=args.swagger_ui)
|
|
138
|
+
|
|
136
139
|
flask_app = connexion.FlaskApp(
|
|
137
|
-
__name__, specification_dir="api_spec/",
|
|
140
|
+
__name__, specification_dir="api_spec/", swagger_ui_options=swagger_ui_options
|
|
138
141
|
)
|
|
139
142
|
|
|
140
143
|
flask_app.app.config["JSON_SORT_KEYS"] = False
|
|
@@ -164,16 +167,16 @@ def create_app(args: argparse.Namespace) -> "connexion.FlaskApp":
|
|
|
164
167
|
if isinstance(backend, ToilBackend):
|
|
165
168
|
# We extend the WES API to allow presenting log data
|
|
166
169
|
base_url = "/toil/wes/v1"
|
|
167
|
-
flask_app.
|
|
170
|
+
flask_app.add_url_rule(
|
|
168
171
|
f"{base_url}/logs/<run_id>/stdout", view_func=backend.get_stdout
|
|
169
172
|
)
|
|
170
|
-
flask_app.
|
|
173
|
+
flask_app.add_url_rule(
|
|
171
174
|
f"{base_url}/logs/<run_id>/stderr", view_func=backend.get_stderr
|
|
172
175
|
)
|
|
173
176
|
# To be a well-behaved AGC engine we can implement the default status check endpoint
|
|
174
|
-
flask_app.
|
|
177
|
+
flask_app.add_url_rule("/engine/v1/status", view_func=backend.get_health)
|
|
175
178
|
# And we can provide lost humans some information on what they are looking at
|
|
176
|
-
flask_app.
|
|
179
|
+
flask_app.add_url_rule("/", view_func=backend.get_homepage)
|
|
177
180
|
|
|
178
181
|
return flask_app
|
|
179
182
|
|
|
@@ -201,9 +204,12 @@ def start_server(args: argparse.Namespace) -> None:
|
|
|
201
204
|
else:
|
|
202
205
|
# start a production WSGI server
|
|
203
206
|
run_app(
|
|
204
|
-
flask_app
|
|
207
|
+
flask_app,
|
|
205
208
|
options={
|
|
206
209
|
"bind": f"{host}:{port}",
|
|
207
210
|
"workers": args.workers,
|
|
211
|
+
# The uvicorn worker class must be specified for gunicorn to work on connexion 3
|
|
212
|
+
# https://github.com/spec-first/connexion/issues/1755#issuecomment-1778522142
|
|
213
|
+
"worker_class": "uvicorn.workers.UvicornWorker"
|
|
208
214
|
},
|
|
209
215
|
)
|
|
@@ -14,7 +14,7 @@ import ruamel.yaml
|
|
|
14
14
|
import schema_salad
|
|
15
15
|
from configargparse import ArgumentParser
|
|
16
16
|
from wes_client.util import WESClient # type: ignore
|
|
17
|
-
from wes_client.util import
|
|
17
|
+
from wes_client.util import wes_response as wes_response
|
|
18
18
|
|
|
19
19
|
from toil.lib.web import web_session
|
|
20
20
|
from toil.wdl.utils import get_version as get_wdl_version
|
|
@@ -144,7 +144,7 @@ class WESClientWithWorkflowEngineParameters(WESClient): # type: ignore
|
|
|
144
144
|
return "3.8"
|
|
145
145
|
elif extension == "cwl":
|
|
146
146
|
with open(workflow_file) as f:
|
|
147
|
-
yaml = ruamel.yaml.YAML(typ=
|
|
147
|
+
yaml = ruamel.yaml.YAML(typ="safe", pure=True)
|
|
148
148
|
return str(yaml.load(f)["cwlVersion"])
|
|
149
149
|
elif extension == "wdl":
|
|
150
150
|
with open(workflow_file) as f:
|