toil 6.1.0a1__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. toil/__init__.py +1 -232
  2. toil/batchSystems/abstractBatchSystem.py +41 -17
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +79 -65
  4. toil/batchSystems/awsBatch.py +8 -8
  5. toil/batchSystems/cleanup_support.py +7 -3
  6. toil/batchSystems/contained_executor.py +4 -5
  7. toil/batchSystems/gridengine.py +1 -1
  8. toil/batchSystems/htcondor.py +5 -5
  9. toil/batchSystems/kubernetes.py +25 -11
  10. toil/batchSystems/local_support.py +3 -3
  11. toil/batchSystems/lsf.py +9 -9
  12. toil/batchSystems/mesos/batchSystem.py +4 -4
  13. toil/batchSystems/mesos/executor.py +3 -2
  14. toil/batchSystems/options.py +9 -0
  15. toil/batchSystems/singleMachine.py +11 -10
  16. toil/batchSystems/slurm.py +129 -16
  17. toil/batchSystems/torque.py +1 -1
  18. toil/bus.py +45 -3
  19. toil/common.py +56 -31
  20. toil/cwl/cwltoil.py +442 -371
  21. toil/deferred.py +1 -1
  22. toil/exceptions.py +1 -1
  23. toil/fileStores/abstractFileStore.py +69 -20
  24. toil/fileStores/cachingFileStore.py +6 -22
  25. toil/fileStores/nonCachingFileStore.py +6 -15
  26. toil/job.py +270 -86
  27. toil/jobStores/abstractJobStore.py +37 -31
  28. toil/jobStores/aws/jobStore.py +280 -218
  29. toil/jobStores/aws/utils.py +60 -31
  30. toil/jobStores/conftest.py +2 -2
  31. toil/jobStores/fileJobStore.py +3 -3
  32. toil/jobStores/googleJobStore.py +3 -4
  33. toil/leader.py +89 -38
  34. toil/lib/aws/__init__.py +26 -10
  35. toil/lib/aws/iam.py +2 -2
  36. toil/lib/aws/session.py +62 -22
  37. toil/lib/aws/utils.py +73 -37
  38. toil/lib/conversions.py +24 -1
  39. toil/lib/ec2.py +118 -69
  40. toil/lib/expando.py +1 -1
  41. toil/lib/generatedEC2Lists.py +8 -8
  42. toil/lib/io.py +42 -4
  43. toil/lib/misc.py +1 -3
  44. toil/lib/resources.py +57 -16
  45. toil/lib/retry.py +12 -5
  46. toil/lib/threading.py +29 -14
  47. toil/lib/throttle.py +1 -1
  48. toil/options/common.py +31 -30
  49. toil/options/wdl.py +5 -0
  50. toil/provisioners/__init__.py +9 -3
  51. toil/provisioners/abstractProvisioner.py +12 -2
  52. toil/provisioners/aws/__init__.py +20 -15
  53. toil/provisioners/aws/awsProvisioner.py +406 -329
  54. toil/provisioners/gceProvisioner.py +2 -2
  55. toil/provisioners/node.py +13 -5
  56. toil/server/app.py +1 -1
  57. toil/statsAndLogging.py +93 -23
  58. toil/test/__init__.py +27 -12
  59. toil/test/batchSystems/batchSystemTest.py +40 -33
  60. toil/test/batchSystems/batch_system_plugin_test.py +79 -0
  61. toil/test/batchSystems/test_slurm.py +22 -7
  62. toil/test/cactus/__init__.py +0 -0
  63. toil/test/cactus/test_cactus_integration.py +58 -0
  64. toil/test/cwl/cwlTest.py +245 -236
  65. toil/test/cwl/seqtk_seq.cwl +1 -1
  66. toil/test/docs/scriptsTest.py +11 -14
  67. toil/test/jobStores/jobStoreTest.py +40 -54
  68. toil/test/lib/aws/test_iam.py +2 -2
  69. toil/test/lib/test_ec2.py +1 -1
  70. toil/test/options/__init__.py +13 -0
  71. toil/test/options/options.py +37 -0
  72. toil/test/provisioners/aws/awsProvisionerTest.py +51 -34
  73. toil/test/provisioners/clusterTest.py +99 -16
  74. toil/test/server/serverTest.py +2 -2
  75. toil/test/src/autoDeploymentTest.py +1 -1
  76. toil/test/src/dockerCheckTest.py +2 -1
  77. toil/test/src/environmentTest.py +125 -0
  78. toil/test/src/fileStoreTest.py +1 -1
  79. toil/test/src/jobDescriptionTest.py +18 -8
  80. toil/test/src/jobTest.py +1 -1
  81. toil/test/src/realtimeLoggerTest.py +4 -0
  82. toil/test/src/workerTest.py +52 -19
  83. toil/test/utils/toilDebugTest.py +62 -4
  84. toil/test/utils/utilsTest.py +23 -21
  85. toil/test/wdl/wdltoil_test.py +49 -21
  86. toil/test/wdl/wdltoil_test_kubernetes.py +77 -0
  87. toil/toilState.py +68 -9
  88. toil/utils/toilDebugFile.py +1 -1
  89. toil/utils/toilDebugJob.py +153 -26
  90. toil/utils/toilLaunchCluster.py +12 -2
  91. toil/utils/toilRsyncCluster.py +7 -2
  92. toil/utils/toilSshCluster.py +7 -3
  93. toil/utils/toilStats.py +310 -266
  94. toil/utils/toilStatus.py +98 -52
  95. toil/version.py +11 -11
  96. toil/wdl/wdltoil.py +644 -225
  97. toil/worker.py +125 -83
  98. {toil-6.1.0a1.dist-info → toil-7.0.0.dist-info}/LICENSE +25 -0
  99. toil-7.0.0.dist-info/METADATA +158 -0
  100. {toil-6.1.0a1.dist-info → toil-7.0.0.dist-info}/RECORD +103 -96
  101. {toil-6.1.0a1.dist-info → toil-7.0.0.dist-info}/WHEEL +1 -1
  102. toil-6.1.0a1.dist-info/METADATA +0 -125
  103. {toil-6.1.0a1.dist-info → toil-7.0.0.dist-info}/entry_points.txt +0 -0
  104. {toil-6.1.0a1.dist-info → toil-7.0.0.dist-info}/top_level.txt +0 -0
@@ -42,13 +42,13 @@ class GCEProvisioner(AbstractProvisioner):
42
42
  NODE_BOTO_PATH = "/root/.boto" # boto file path on instances
43
43
  SOURCE_IMAGE = b'projects/kinvolk-public/global/images/family/flatcar-stable'
44
44
 
45
- def __init__(self, clusterName, clusterType, zone, nodeStorage, nodeStorageOverrides, sseKey):
45
+ def __init__(self, clusterName, clusterType, zone, nodeStorage, nodeStorageOverrides, sseKey, enable_fuse):
46
46
  self.cloud = 'gce'
47
47
  self._sseKey = sseKey
48
48
 
49
49
  # Call base class constructor, which will call createClusterSettings()
50
50
  # or readClusterSettings()
51
- super().__init__(clusterName, clusterType, zone, nodeStorage, nodeStorageOverrides)
51
+ super().__init__(clusterName, clusterType, zone, nodeStorage, nodeStorageOverrides, enable_fuse)
52
52
 
53
53
  def supportedClusterTypes(self):
54
54
  return {'mesos'}
toil/provisioners/node.py CHANGED
@@ -18,6 +18,7 @@ import socket
18
18
  import subprocess
19
19
  import time
20
20
  from itertools import count
21
+ from typing import Union, Dict, Optional, List, Any
21
22
 
22
23
  from toil.lib.memoize import parse_iso_utc
23
24
 
@@ -29,7 +30,8 @@ logger = logging.getLogger(__name__)
29
30
  class Node:
30
31
  maxWaitTime = 7 * 60
31
32
 
32
- def __init__(self, publicIP, privateIP, name, launchTime, nodeType, preemptible, tags=None, use_private_ip=None):
33
+ def __init__(self, publicIP: str, privateIP: str, name: str, launchTime: Union[datetime.datetime, str],
34
+ nodeType: Optional[str], preemptible: bool, tags: Optional[Dict[str, str]] = None, use_private_ip: Optional[bool] = None) -> None:
33
35
  self.publicIP = publicIP
34
36
  self.privateIP = privateIP
35
37
  if use_private_ip:
@@ -37,7 +39,13 @@ class Node:
37
39
  else:
38
40
  self.effectiveIP = self.publicIP or self.privateIP
39
41
  self.name = name
40
- self.launchTime = launchTime
42
+ if isinstance(launchTime, datetime.datetime):
43
+ self.launchTime = launchTime
44
+ else:
45
+ try:
46
+ self.launchTime = parse_iso_utc(launchTime)
47
+ except ValueError:
48
+ self.launchTime = datetime.datetime.fromisoformat(launchTime)
41
49
  self.nodeType = nodeType
42
50
  self.preemptible = preemptible
43
51
  self.tags = tags
@@ -65,12 +73,12 @@ class Node:
65
73
  """
66
74
  if self.launchTime:
67
75
  now = datetime.datetime.utcnow()
68
- delta = now - parse_iso_utc(self.launchTime)
76
+ delta = now - self.launchTime
69
77
  return 1 - delta.total_seconds() / 3600.0 % 1.0
70
78
  else:
71
79
  return 1
72
80
 
73
- def waitForNode(self, role, keyName='core'):
81
+ def waitForNode(self, role: str, keyName: str='core') -> None:
74
82
  self._waitForSSHPort()
75
83
  # wait here so docker commands can be used reliably afterwards
76
84
  self._waitForSSHKeys(keyName=keyName)
@@ -288,7 +296,7 @@ class Node:
288
296
  % (' '.join(args), exit_code, stdout, stderr))
289
297
  return stdout
290
298
 
291
- def coreRsync(self, args, applianceName='toil_leader', **kwargs):
299
+ def coreRsync(self, args: List[str], applianceName: str = 'toil_leader', **kwargs: Any) -> int:
292
300
  remoteRsync = "docker exec -i %s rsync -v" % applianceName # Access rsync inside appliance
293
301
  parsedArgs = []
294
302
  sshCommand = "ssh"
toil/server/app.py CHANGED
@@ -87,7 +87,7 @@ def create_app(args: argparse.Namespace) -> "connexion.FlaskApp":
87
87
 
88
88
  if args.cors:
89
89
  # enable cross origin resource sharing
90
- from flask_cors import CORS # type: ignore
90
+ from flask_cors import CORS
91
91
  CORS(flask_app.app, resources={r"/ga4gh/*": {"origins": args.cors_origins}})
92
92
 
93
93
  # add workflow execution service (WES) API endpoints
toil/statsAndLogging.py CHANGED
@@ -12,6 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  import gzip
15
+ import io
15
16
  import json
16
17
  import logging
17
18
  import os
@@ -21,8 +22,9 @@ from logging.handlers import RotatingFileHandler
21
22
  from threading import Event, Thread
22
23
  from typing import IO, TYPE_CHECKING, Any, Callable, List, Optional, Union
23
24
 
25
+ from toil.lib.conversions import strtobool
24
26
  from toil.lib.expando import Expando
25
- from toil.lib.resources import get_total_cpu_time
27
+ from toil.lib.resources import ResourceMonitor
26
28
 
27
29
  if TYPE_CHECKING:
28
30
  from toil.common import Config
@@ -38,6 +40,7 @@ __loggingFiles = []
38
40
 
39
41
  class StatsAndLogging:
40
42
  """A thread to aggregate statistics and logging."""
43
+
41
44
  def __init__(self, jobStore: 'AbstractJobStore', config: 'Config') -> None:
42
45
  self._stop = Event()
43
46
  self._worker = Thread(target=self.statsAndLoggingAggregator,
@@ -49,7 +52,7 @@ class StatsAndLogging:
49
52
  self._worker.start()
50
53
 
51
54
  @classmethod
52
- def formatLogStream(cls, stream: Union[IO[str], IO[bytes]], job_name: Optional[str] = None) -> str:
55
+ def formatLogStream(cls, stream: Union[IO[str], IO[bytes]], stream_name: str) -> str:
53
56
  """
54
57
  Given a stream of text or bytes, and the job name, job itself, or some
55
58
  other optional stringifyable identity info for the job, return a big
@@ -62,7 +65,7 @@ class StatsAndLogging:
62
65
 
63
66
  :param stream: The stream of text or bytes to print for the user.
64
67
  """
65
- lines = [f'Log from job "{job_name}" follows:', '=========>']
68
+ lines = [f'{stream_name} follows:', '=========>']
66
69
 
67
70
  for line in stream:
68
71
  if isinstance(line, bytes):
@@ -73,15 +76,15 @@ class StatsAndLogging:
73
76
 
74
77
  return '\n'.join(lines)
75
78
 
76
-
77
79
  @classmethod
78
- def logWithFormatting(cls, jobStoreID: str, jobLogs: Union[IO[str], IO[bytes]], method: Callable[[str], None] = logger.debug,
79
- message: Optional[str] = None) -> None:
80
+ def logWithFormatting(cls, stream_name: str, jobLogs: Union[IO[str], IO[bytes]],
81
+ method: Callable[[str], None] = logger.debug,
82
+ message: Optional[str] = None) -> None:
80
83
  if message is not None:
81
84
  method(message)
82
85
 
83
- # Format and log the logs, identifying the job with its job store ID.
84
- method(cls.formatLogStream(jobLogs, jobStoreID))
86
+ # Format and log the logs, identifying the stream with the given name.
87
+ method(cls.formatLogStream(jobLogs, stream_name))
85
88
 
86
89
  @classmethod
87
90
  def writeLogFiles(cls, jobNames: List[str], jobLogList: List[str], config: 'Config', failed: bool = False) -> None:
@@ -95,11 +98,11 @@ class StatsAndLogging:
95
98
  logName = ('failed_' if failed else '') + logName
96
99
  counter = 0
97
100
  while True:
98
- suffix = str(counter).zfill(3) + logExtension
101
+ suffix = '_' + str(counter).zfill(3) + logExtension
99
102
  fullName = os.path.join(logPath, logName + suffix)
100
103
  # The maximum file name size in the default HFS+ file system is 255 UTF-16 encoding units, so basically 255 characters
101
104
  if len(fullName) >= 255:
102
- return fullName[:(255-len(suffix))] + suffix
105
+ return fullName[:(255 - len(suffix))] + suffix
103
106
  if not os.path.exists(fullName):
104
107
  return fullName
105
108
  counter += 1
@@ -118,6 +121,9 @@ class StatsAndLogging:
118
121
  # we don't have anywhere to write the logs, return now
119
122
  return
120
123
 
124
+ # Make sure the destination exists
125
+ os.makedirs(path, exist_ok=True)
126
+
121
127
  fullName = createName(path, mainFileName, extension, failed)
122
128
  with writeFn(fullName, 'wb') as f:
123
129
  for l in jobLogList:
@@ -141,7 +147,7 @@ class StatsAndLogging:
141
147
  """
142
148
  # Overall timing
143
149
  startTime = time.time()
144
- startClock = get_total_cpu_time()
150
+ startClock = ResourceMonitor.get_total_cpu_time()
145
151
 
146
152
  def callback(fileHandle: Union[IO[bytes], IO[str]]) -> None:
147
153
  statsStr = fileHandle.read()
@@ -150,8 +156,10 @@ class StatsAndLogging:
150
156
  stats = json.loads(statsStr, object_hook=Expando)
151
157
  if not stats:
152
158
  return
159
+
153
160
  try:
154
- logs = stats.workers.logsToMaster
161
+ # Handle all the log_to_leader messages
162
+ logs = stats.workers.logs_to_leader
155
163
  except AttributeError:
156
164
  # To be expected if there were no calls to log_to_leader()
157
165
  pass
@@ -160,6 +168,28 @@ class StatsAndLogging:
160
168
  logger.log(int(message.level),
161
169
  'Got message from job at time %s: %s',
162
170
  time.strftime('%m-%d-%Y %H:%M:%S'), message.text)
171
+
172
+ try:
173
+ # Handle all the user-level text streams reported back (command output, etc.)
174
+ user_logs = stats.workers.logging_user_streams
175
+ except AttributeError:
176
+ # To be expected if there were no calls to log_user_stream()
177
+ pass
178
+ else:
179
+ for stream_entry in user_logs:
180
+ try:
181
+ # Unpack the stream name and text.
182
+ name, text = stream_entry.name, stream_entry.text
183
+ except AttributeError:
184
+ # Doesn't have a user-provided stream name and stream
185
+ # text, so skip it.
186
+ continue
187
+ # Since this is sent as inline text we need to pretend to stream it.
188
+ # TODO: Save these as individual files if they start to get too big?
189
+ cls.logWithFormatting(name, io.StringIO(text), logger.info)
190
+ # Save it as a log file, as if it were a Toil-level job.
191
+ cls.writeLogFiles([name], [text], config=config)
192
+
163
193
  try:
164
194
  logs = stats.logs
165
195
  except AttributeError:
@@ -168,7 +198,7 @@ class StatsAndLogging:
168
198
  # we may have multiple jobs per worker
169
199
  jobNames = logs.names
170
200
  messages = logs.messages
171
- cls.logWithFormatting(jobNames[0], messages,
201
+ cls.logWithFormatting(f'Log from job "{jobNames[0]}"', messages,
172
202
  message='Received Toil worker log. Disable debug level logging to hide this output')
173
203
  cls.writeLogFiles(jobNames, messages, config=config)
174
204
 
@@ -182,7 +212,7 @@ class StatsAndLogging:
182
212
 
183
213
  # Finish the stats file
184
214
  text = json.dumps(dict(total_time=str(time.time() - startTime),
185
- total_clock=str(get_total_cpu_time() - startClock)), ensure_ascii=True)
215
+ total_clock=str(ResourceMonitor.get_total_cpu_time() - startClock)), ensure_ascii=True)
186
216
  jobStore.write_logs(text)
187
217
 
188
218
  def check(self) -> None:
@@ -208,32 +238,70 @@ def set_log_level(level: str, set_logger: Optional[logging.Logger] = None) -> No
208
238
  level = "CRITICAL" if level.upper() == "OFF" else level.upper()
209
239
  set_logger = set_logger if set_logger else root_logger
210
240
  set_logger.setLevel(level)
211
-
212
241
  # Suppress any random loggers introduced by libraries we use.
213
242
  # Especially boto/boto3. They print too much. -__-
214
243
  suppress_exotic_logging(__name__)
215
244
 
216
245
 
217
- def add_logging_options(parser: ArgumentParser) -> None:
218
- """Add logging options to set the global log level."""
246
+ def install_log_color(set_logger: Optional[logging.Logger] = None) -> None:
247
+ """Make logs colored."""
248
+ # Most of this code is taken from miniwdl
249
+ # delayed import
250
+ import coloredlogs # type: ignore[import-untyped]
251
+
252
+ level_styles = dict(coloredlogs.DEFAULT_LEVEL_STYLES)
253
+ level_styles["debug"]["color"] = 242
254
+ level_styles["notice"] = {"color": "green", "bold": True}
255
+ level_styles["error"]["bold"] = True
256
+ level_styles["warning"]["bold"] = True
257
+ level_styles["info"] = {}
258
+ field_styles = dict(coloredlogs.DEFAULT_FIELD_STYLES)
259
+ field_styles["asctime"] = {"color": "blue"}
260
+ field_styles["name"] = {"color": "magenta"}
261
+ field_styles["levelname"] = {"color": "blue"}
262
+ field_styles["threadName"] = {"color": "blue"}
263
+ fmt = "[%(asctime)s] [%(threadName)s] [%(levelname).1s] [%(name)s] %(message)s" # mimic old toil logging format
264
+ set_logger = set_logger if set_logger else root_logger
265
+ coloredlogs.install(
266
+ level=set_logger.getEffectiveLevel(),
267
+ logger=set_logger,
268
+ level_styles=level_styles,
269
+ field_styles=field_styles,
270
+ datefmt="%Y-%m-%dT%H:%M:%S%z", # mimic old toil date format
271
+ fmt=fmt,
272
+ )
273
+
274
+
275
+ def add_logging_options(parser: ArgumentParser, default_level: Optional[int] = None) -> None:
276
+ """
277
+ Add logging options to set the global log level.
278
+
279
+ :param default_level: A logging level, like logging.INFO, to use as the default.
280
+ """
281
+ if default_level is None:
282
+ # Make sure we have a log levle to make the default
283
+ default_level = DEFAULT_LOGLEVEL
284
+ default_level_name = logging.getLevelName(default_level)
285
+
219
286
  group = parser.add_argument_group("Logging Options")
220
- default_loglevel = logging.getLevelName(DEFAULT_LOGLEVEL)
221
287
 
222
288
  levels = ['Critical', 'Error', 'Warning', 'Debug', 'Info']
223
289
  for level in levels:
224
- group.add_argument(f"--log{level}", dest="logLevel", default=default_loglevel, action="store_const",
225
- const=level, help=f"Turn on loglevel {level}. Default: {default_loglevel}.")
290
+ group.add_argument(f"--log{level}", dest="logLevel", default=default_level_name, action="store_const",
291
+ const=level, help=f"Turn on loglevel {level}. Default: {default_level_name}.")
226
292
 
227
293
  levels += [l.lower() for l in levels] + [l.upper() for l in levels]
228
- group.add_argument("--logOff", dest="logLevel", default=default_loglevel,
294
+ group.add_argument("--logOff", dest="logLevel", default=default_level_name,
229
295
  action="store_const", const="CRITICAL", help="Same as --logCRITICAL.")
230
296
  # Maybe deprecate the above in favor of --logLevel?
231
297
 
232
- group.add_argument("--logLevel", dest="logLevel", default=default_loglevel, choices=levels,
233
- help=f"Set the log level. Default: {default_loglevel}. Options: {levels}.")
298
+ group.add_argument("--logLevel", dest="logLevel", default=default_level_name, choices=levels,
299
+ help=f"Set the log level. Default: {default_level_name}. Options: {levels}.")
234
300
  group.add_argument("--logFile", dest="logFile", help="File to log in.")
235
301
  group.add_argument("--rotatingLogging", dest="logRotating", action="store_true", default=False,
236
302
  help="Turn on rotating logging, which prevents log files from getting too big.")
303
+ group.add_argument("--logColors", dest="colored_logs", default=True, type=strtobool, metavar="BOOL",
304
+ help="Enable or disable colored logging. Default: %(default)s")
237
305
 
238
306
 
239
307
  def configure_root_logger() -> None:
@@ -264,6 +332,8 @@ def set_logging_from_options(options: Union["Config", Namespace]) -> None:
264
332
  configure_root_logger()
265
333
  options.logLevel = options.logLevel or logging.getLevelName(root_logger.getEffectiveLevel())
266
334
  set_log_level(options.logLevel)
335
+ if options.colored_logs:
336
+ install_log_color()
267
337
  logger.debug(f"Root logger is at level '{logging.getLevelName(root_logger.getEffectiveLevel())}', "
268
338
  f"'toil' logger at level '{logging.getLevelName(toil_logger.getEffectiveLevel())}'.")
269
339
 
toil/test/__init__.py CHANGED
@@ -36,6 +36,7 @@ from typing import (Any,
36
36
  Dict,
37
37
  Generator,
38
38
  List,
39
+ Literal,
39
40
  Optional,
40
41
  Tuple,
41
42
  Type,
@@ -46,17 +47,15 @@ from unittest.util import strclass
46
47
  from urllib.error import HTTPError, URLError
47
48
  from urllib.request import urlopen
48
49
 
49
- import pytz
50
50
 
51
- if sys.version_info >= (3, 8):
52
- from typing import Literal
51
+ if sys.version_info >= (3, 9):
52
+ import zoneinfo
53
53
  else:
54
- from typing_extensions import Literal
54
+ from backports import zoneinfo
55
55
 
56
56
  from toil import ApplianceImageNotFound, applianceSelf, toilPackageDirPath
57
57
  from toil.lib.accelerators import (have_working_nvidia_docker_runtime,
58
58
  have_working_nvidia_smi)
59
- from toil.lib.aws import running_on_ec2
60
59
  from toil.lib.io import mkdtemp
61
60
  from toil.lib.iterables import concat
62
61
  from toil.lib.memoize import memoize
@@ -86,8 +85,8 @@ class ToilTest(unittest.TestCase):
86
85
  _tempDirs: List[str] = []
87
86
 
88
87
  def setup_method(self, method: Any) -> None:
89
- western = pytz.timezone('America/Los_Angeles')
90
- california_time = western.localize(datetime.datetime.now())
88
+ western = zoneinfo.ZoneInfo("America/Los_Angeles")
89
+ california_time = datetime.datetime.now(tz=western)
91
90
  timestamp = california_time.strftime("%b %d %Y %H:%M:%S:%f %Z")
92
91
  print(f"\n\n[TEST] {strclass(self.__class__)}:{self._testMethodName} ({timestamp})\n\n")
93
92
 
@@ -127,6 +126,7 @@ class ToilTest(unittest.TestCase):
127
126
  Use us-west-2 unless running on EC2, in which case use the region in which
128
127
  the instance is located
129
128
  """
129
+ from toil.lib.aws import running_on_ec2
130
130
  return cls._region() if running_on_ec2() else 'us-west-2'
131
131
 
132
132
  @classmethod
@@ -372,14 +372,15 @@ def needs_aws_s3(test_item: MT) -> MT:
372
372
  # TODO: we just check for generic access to the AWS account
373
373
  test_item = _mark_test('aws-s3', needs_online(test_item))
374
374
  try:
375
- from boto import config
376
- boto_credentials = config.get('Credentials', 'aws_access_key_id')
375
+ from boto3 import Session
376
+ session = Session()
377
+ boto3_credentials = session.get_credentials()
377
378
  except ImportError:
378
379
  return unittest.skip("Install Toil with the 'aws' extra to include this test.")(
379
380
  test_item
380
381
  )
381
-
382
- if not (boto_credentials or os.path.exists(os.path.expanduser('~/.aws/credentials')) or running_on_ec2()):
382
+ from toil.lib.aws import running_on_ec2
383
+ if not (boto3_credentials or os.path.exists(os.path.expanduser('~/.aws/credentials')) or running_on_ec2()):
383
384
  return unittest.skip("Configure AWS credentials to include this test.")(test_item)
384
385
  return test_item
385
386
 
@@ -632,6 +633,20 @@ def needs_cwl(test_item: MT) -> MT:
632
633
  else:
633
634
  return test_item
634
635
 
636
+ def needs_wdl(test_item: MT) -> MT:
637
+ """
638
+ Use as a decorator before test classes or methods to only run them if miniwdl is installed
639
+ and configured.
640
+ """
641
+ test_item = _mark_test('wdl', test_item)
642
+ try:
643
+ # noinspection PyUnresolvedReferences
644
+ import WDL # noqa
645
+ except ImportError:
646
+ return unittest.skip("Install Toil with the 'wdl' extra to include this test.")(test_item)
647
+ else:
648
+ return test_item
649
+
635
650
 
636
651
  def needs_server(test_item: MT) -> MT:
637
652
  """
@@ -747,7 +762,7 @@ def integrative(test_item: MT) -> MT:
747
762
  return test_item
748
763
  else:
749
764
  return unittest.skip(
750
- 'Set TOIL_TEST_INTEGRATIVE="True" to include this integration test, '
765
+ 'Set TOIL_TEST_INTEGRATIVE=True to include this integration test, '
751
766
  "or run `make integration_test_local` to run all integration tests."
752
767
  )(test_item)
753
768
 
@@ -140,19 +140,16 @@ class hidden:
140
140
  """
141
141
  return self.createConfig()
142
142
 
143
- def _mockJobDescription(self, jobStoreID=None, command=None, **kwargs):
143
+ def _mockJobDescription(self, jobStoreID=None, **kwargs):
144
144
  """
145
- Create a mock-up JobDescription with the given ID, command, and other parameters.
145
+ Create a mock-up JobDescription with the given ID and other parameters.
146
146
  """
147
147
 
148
148
  # TODO: Use a real unittest.Mock? For now we make a real instance and just hack it up.
149
149
 
150
150
  desc = JobDescription(**kwargs)
151
- # Normally we can't pass in a command or ID, and the job
152
- # serialization logic takes care of filling them in. We set them
153
- # here.
154
- if command is not None:
155
- desc.command = command
151
+ # Normally we can't pass in an ID, and the job serialization logic
152
+ # takes care of filling it in. We set it here.
156
153
  if jobStoreID is not None:
157
154
  desc.jobStoreID = jobStoreID
158
155
 
@@ -185,12 +182,12 @@ class hidden:
185
182
 
186
183
  @retry_flaky_test(prepare=[tearDown, setUp])
187
184
  def test_run_jobs(self):
188
- jobDesc1 = self._mockJobDescription(command='sleep 1000', jobName='test1', unitName=None,
185
+ jobDesc1 = self._mockJobDescription(jobName='test1', unitName=None,
189
186
  jobStoreID='1', requirements=defaultRequirements)
190
- jobDesc2 = self._mockJobDescription(command='sleep 1000', jobName='test2', unitName=None,
187
+ jobDesc2 = self._mockJobDescription(jobName='test2', unitName=None,
191
188
  jobStoreID='2', requirements=defaultRequirements)
192
- job1 = self.batchSystem.issueBatchJob(jobDesc1)
193
- job2 = self.batchSystem.issueBatchJob(jobDesc2)
189
+ job1 = self.batchSystem.issueBatchJob('sleep 1000', jobDesc1)
190
+ job2 = self.batchSystem.issueBatchJob('sleep 1000', jobDesc2)
194
191
 
195
192
  issuedIDs = self._waitForJobsToIssue(2)
196
193
  self.assertEqual(set(issuedIDs), {job1, job2})
@@ -219,9 +216,9 @@ class hidden:
219
216
  # then check for it having happened, but we can't guarantee that
220
217
  # the batch system will run against the same filesystem we are
221
218
  # looking at.
222
- jobDesc3 = self._mockJobDescription(command="mktemp -d", jobName='test3', unitName=None,
219
+ jobDesc3 = self._mockJobDescription(jobName='test3', unitName=None,
223
220
  jobStoreID='3', requirements=defaultRequirements)
224
- job3 = self.batchSystem.issueBatchJob(jobDesc3)
221
+ job3 = self.batchSystem.issueBatchJob("mktemp -d", jobDesc3)
225
222
 
226
223
  jobUpdateInfo = self.batchSystem.getUpdatedBatchJob(maxWait=1000)
227
224
  jobID, exitStatus, wallTime = jobUpdateInfo.jobID, jobUpdateInfo.exitStatus, jobUpdateInfo.wallTime
@@ -252,18 +249,18 @@ class hidden:
252
249
 
253
250
  # Turn into a string which convinces bash to take all args and paste them back together and run them
254
251
  command = "bash -c \"\\${@}\" bash eval " + script_protected
255
- jobDesc4 = self._mockJobDescription(command=command, jobName='test4', unitName=None,
252
+ jobDesc4 = self._mockJobDescription(jobName='test4', unitName=None,
256
253
  jobStoreID='4', requirements=defaultRequirements)
257
- job4 = self.batchSystem.issueBatchJob(jobDesc4)
254
+ job4 = self.batchSystem.issueBatchJob(command, jobDesc4)
258
255
  jobUpdateInfo = self.batchSystem.getUpdatedBatchJob(maxWait=1000)
259
256
  jobID, exitStatus, wallTime = jobUpdateInfo.jobID, jobUpdateInfo.exitStatus, jobUpdateInfo.wallTime
260
257
  self.assertEqual(exitStatus, 42)
261
258
  self.assertEqual(jobID, job4)
262
259
  # Now set the variable and ensure that it is present
263
260
  self.batchSystem.setEnv('FOO', 'bar')
264
- jobDesc5 = self._mockJobDescription(command=command, jobName='test5', unitName=None,
261
+ jobDesc5 = self._mockJobDescription(jobName='test5', unitName=None,
265
262
  jobStoreID='5', requirements=defaultRequirements)
266
- job5 = self.batchSystem.issueBatchJob(jobDesc5)
263
+ job5 = self.batchSystem.issueBatchJob(command, jobDesc5)
267
264
  jobUpdateInfo = self.batchSystem.getUpdatedBatchJob(maxWait=1000)
268
265
  self.assertEqual(jobUpdateInfo.exitStatus, 23)
269
266
  self.assertEqual(jobUpdateInfo.jobID, job5)
@@ -274,18 +271,18 @@ class hidden:
274
271
  command = "bash -c \"\\${@}\" bash eval " + script.replace(';', r'\;')
275
272
 
276
273
  # Issue a job with a job environment variable
277
- job_desc_6 = self._mockJobDescription(command=command, jobName='test6', unitName=None,
274
+ job_desc_6 = self._mockJobDescription(jobName='test6', unitName=None,
278
275
  jobStoreID='6', requirements=defaultRequirements)
279
- job6 = self.batchSystem.issueBatchJob(job_desc_6, job_environment={
276
+ job6 = self.batchSystem.issueBatchJob(command, job_desc_6, job_environment={
280
277
  'FOO': 'bar'
281
278
  })
282
279
  job_update_info = self.batchSystem.getUpdatedBatchJob(maxWait=1000)
283
280
  self.assertEqual(job_update_info.exitStatus, 23) # this should succeed
284
281
  self.assertEqual(job_update_info.jobID, job6)
285
282
  # Now check that the environment variable doesn't exist for other jobs
286
- job_desc_7 = self._mockJobDescription(command=command, jobName='test7', unitName=None,
283
+ job_desc_7 = self._mockJobDescription(jobName='test7', unitName=None,
287
284
  jobStoreID='7', requirements=defaultRequirements)
288
- job7 = self.batchSystem.issueBatchJob(job_desc_7)
285
+ job7 = self.batchSystem.issueBatchJob(command, job_desc_7)
289
286
  job_update_info = self.batchSystem.getUpdatedBatchJob(maxWait=1000)
290
287
  self.assertEqual(job_update_info.exitStatus, 42)
291
288
  self.assertEqual(job_update_info.jobID, job7)
@@ -619,9 +616,9 @@ class MesosBatchSystemTest(hidden.AbstractBatchSystemTest, MesosTestSupport):
619
616
 
620
617
  def testIgnoreNode(self):
621
618
  self.batchSystem.ignoreNode('localhost')
622
- jobDesc = self._mockJobDescription(command='sleep 1000', jobName='test2', unitName=None,
619
+ jobDesc = self._mockJobDescription(jobName='test2', unitName=None,
623
620
  jobStoreID='1', requirements=defaultRequirements)
624
- job = self.batchSystem.issueBatchJob(jobDesc)
621
+ job = self.batchSystem.issueBatchJob('sleep 1000', jobDesc)
625
622
 
626
623
  issuedID = self._waitForJobsToIssue(1)
627
624
  self.assertEqual(set(issuedID), {job})
@@ -731,8 +728,13 @@ class SingleMachineBatchSystemTest(hidden.AbstractBatchSystemTest):
731
728
  command += ' hide'
732
729
 
733
730
  # Start the job
734
- self.batchSystem.issueBatchJob(self._mockJobDescription(command=command, jobName='fork',
735
- jobStoreID='1', requirements=defaultRequirements))
731
+ self.batchSystem.issueBatchJob(
732
+ command,
733
+ self._mockJobDescription(
734
+ jobName='fork',
735
+ jobStoreID='1',
736
+ requirements=defaultRequirements)
737
+ )
736
738
  # Wait
737
739
  time.sleep(10)
738
740
 
@@ -863,13 +865,18 @@ class MaxCoresSingleMachineBatchSystemTest(ToilTest):
863
865
  try:
864
866
  jobIds = set()
865
867
  for i in range(0, int(jobs)):
866
- jobIds.add(bs.issueBatchJob(JobDescription(command=self.scriptCommand(),
867
- requirements=dict(
868
- cores=float(coresPerJob),
869
- memory=1, disk=1,
870
- accelerators=[],
871
- preemptible=preemptible),
872
- jobName=str(i), unitName='')))
868
+ desc = JobDescription(
869
+ requirements=dict(
870
+ cores=float(coresPerJob),
871
+ memory=1,
872
+ disk=1,
873
+ accelerators=[],
874
+ preemptible=preemptible
875
+ ),
876
+ jobName=str(i),
877
+ unitName=''
878
+ )
879
+ jobIds.add(bs.issueBatchJob(self.scriptCommand(), desc))
873
880
  self.assertEqual(len(jobIds), jobs)
874
881
  while jobIds:
875
882
  job = bs.getUpdatedBatchJob(maxWait=10)
@@ -894,7 +901,7 @@ class MaxCoresSingleMachineBatchSystemTest(ToilTest):
894
901
  @skipIf(SingleMachineBatchSystem.numCores < 3, 'Need at least three cores to run this test')
895
902
  def testServices(self):
896
903
  options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
897
- options.logDebug = True
904
+ options.logLevel = "DEBUG"
898
905
  options.maxCores = 3
899
906
  self.assertTrue(options.maxCores <= SingleMachineBatchSystem.numCores)
900
907
  Job.Runner.startToil(Job.wrapJobFn(parentJob, self.scriptCommand()), options)
@@ -0,0 +1,79 @@
1
+ # Copyright (C) 2015-2021 Regents of the University of California
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import logging
15
+ from typing import Optional, Dict, List, Type
16
+ from configargparse import ArgParser, ArgumentParser
17
+
18
+ from toil.batchSystems.abstractBatchSystem import (AbstractBatchSystem, UpdatedBatchJobInfo)
19
+ from toil.batchSystems.cleanup_support import BatchSystemCleanupSupport
20
+ from toil.batchSystems.options import OptionSetter
21
+ from toil.batchSystems.registry import add_batch_system_factory
22
+ from toil.common import Toil, addOptions
23
+ from toil.job import JobDescription
24
+ from toil.test import ToilTest
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ class FakeBatchSystem(BatchSystemCleanupSupport):
30
+ @classmethod
31
+ def supportsAutoDeployment(cls) -> bool:
32
+ pass
33
+
34
+ def issueBatchJob(self, command: str, job_desc: JobDescription, job_environment: Optional[Dict[str, str]] = None) -> int:
35
+ pass
36
+
37
+ def killBatchJobs(self, jobIDs: List[int]) -> None:
38
+ pass
39
+
40
+ def getIssuedBatchJobIDs(self) -> List[int]:
41
+ pass
42
+
43
+ def getRunningBatchJobIDs(self) -> Dict[int, float]:
44
+ pass
45
+
46
+ def getUpdatedBatchJob(self, maxWait: int) -> Optional[UpdatedBatchJobInfo]:
47
+ pass
48
+
49
+ def shutdown(self) -> None:
50
+ pass
51
+
52
+ @classmethod
53
+ def add_options(cls, parser: ArgumentParser) -> None:
54
+ parser.add_argument("--fake_argument", default="exists")
55
+
56
+ @classmethod
57
+ def setOptions(cls, setOption: OptionSetter) -> None:
58
+ setOption("fake_argument")
59
+
60
+ class BatchSystemPluginTest(ToilTest):
61
+ def test_batchsystem_plugin_installable(self):
62
+ """
63
+ Test that installing a batch system plugin works.
64
+ :return:
65
+ """
66
+ def fake_batch_system_factory() -> Type[AbstractBatchSystem]:
67
+ return FakeBatchSystem
68
+
69
+ add_batch_system_factory("fake", fake_batch_system_factory)
70
+
71
+ parser = ArgParser()
72
+ addOptions(parser)
73
+
74
+ options = parser.parse_args(["test-jobstore", "--clean=always"])
75
+
76
+ # try to install a batchsystem plugin with some arguments
77
+ # if the arguments exists, the values should also exist in the config
78
+ with Toil(options) as toil:
79
+ self.assertEqual(toil.config.fake_argument == "exists", True)