toil 5.12.0__py3-none-any.whl → 6.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. toil/__init__.py +18 -13
  2. toil/batchSystems/abstractBatchSystem.py +39 -13
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +24 -24
  4. toil/batchSystems/awsBatch.py +14 -14
  5. toil/batchSystems/cleanup_support.py +7 -3
  6. toil/batchSystems/contained_executor.py +3 -3
  7. toil/batchSystems/htcondor.py +0 -1
  8. toil/batchSystems/kubernetes.py +34 -31
  9. toil/batchSystems/local_support.py +3 -1
  10. toil/batchSystems/lsf.py +7 -7
  11. toil/batchSystems/mesos/batchSystem.py +7 -7
  12. toil/batchSystems/options.py +32 -83
  13. toil/batchSystems/registry.py +104 -23
  14. toil/batchSystems/singleMachine.py +16 -13
  15. toil/batchSystems/slurm.py +87 -16
  16. toil/batchSystems/torque.py +0 -1
  17. toil/bus.py +44 -8
  18. toil/common.py +544 -753
  19. toil/cwl/__init__.py +28 -32
  20. toil/cwl/cwltoil.py +595 -574
  21. toil/cwl/utils.py +55 -10
  22. toil/exceptions.py +1 -1
  23. toil/fileStores/__init__.py +2 -2
  24. toil/fileStores/abstractFileStore.py +88 -14
  25. toil/fileStores/cachingFileStore.py +610 -549
  26. toil/fileStores/nonCachingFileStore.py +46 -22
  27. toil/job.py +182 -101
  28. toil/jobStores/abstractJobStore.py +161 -95
  29. toil/jobStores/aws/jobStore.py +23 -9
  30. toil/jobStores/aws/utils.py +6 -6
  31. toil/jobStores/fileJobStore.py +116 -18
  32. toil/jobStores/googleJobStore.py +16 -7
  33. toil/jobStores/utils.py +5 -6
  34. toil/leader.py +87 -56
  35. toil/lib/accelerators.py +10 -5
  36. toil/lib/aws/__init__.py +3 -14
  37. toil/lib/aws/ami.py +22 -9
  38. toil/lib/aws/iam.py +21 -13
  39. toil/lib/aws/session.py +2 -16
  40. toil/lib/aws/utils.py +4 -5
  41. toil/lib/compatibility.py +1 -1
  42. toil/lib/conversions.py +26 -3
  43. toil/lib/docker.py +22 -23
  44. toil/lib/ec2.py +10 -6
  45. toil/lib/ec2nodes.py +106 -100
  46. toil/lib/encryption/_nacl.py +2 -1
  47. toil/lib/generatedEC2Lists.py +325 -18
  48. toil/lib/io.py +49 -2
  49. toil/lib/misc.py +1 -1
  50. toil/lib/resources.py +9 -2
  51. toil/lib/threading.py +101 -38
  52. toil/options/common.py +736 -0
  53. toil/options/cwl.py +336 -0
  54. toil/options/wdl.py +37 -0
  55. toil/provisioners/abstractProvisioner.py +9 -4
  56. toil/provisioners/aws/__init__.py +3 -6
  57. toil/provisioners/aws/awsProvisioner.py +6 -0
  58. toil/provisioners/clusterScaler.py +3 -2
  59. toil/provisioners/gceProvisioner.py +2 -2
  60. toil/realtimeLogger.py +2 -1
  61. toil/resource.py +24 -18
  62. toil/server/app.py +2 -3
  63. toil/server/cli/wes_cwl_runner.py +4 -4
  64. toil/server/utils.py +1 -1
  65. toil/server/wes/abstract_backend.py +3 -2
  66. toil/server/wes/amazon_wes_utils.py +5 -4
  67. toil/server/wes/tasks.py +2 -3
  68. toil/server/wes/toil_backend.py +2 -10
  69. toil/server/wsgi_app.py +2 -0
  70. toil/serviceManager.py +12 -10
  71. toil/statsAndLogging.py +41 -9
  72. toil/test/__init__.py +29 -54
  73. toil/test/batchSystems/batchSystemTest.py +11 -111
  74. toil/test/batchSystems/test_slurm.py +24 -8
  75. toil/test/cactus/__init__.py +0 -0
  76. toil/test/cactus/test_cactus_integration.py +58 -0
  77. toil/test/cwl/cwlTest.py +438 -223
  78. toil/test/cwl/glob_dir.cwl +15 -0
  79. toil/test/cwl/preemptible.cwl +21 -0
  80. toil/test/cwl/preemptible_expression.cwl +28 -0
  81. toil/test/cwl/revsort.cwl +1 -1
  82. toil/test/cwl/revsort2.cwl +1 -1
  83. toil/test/docs/scriptsTest.py +2 -3
  84. toil/test/jobStores/jobStoreTest.py +34 -21
  85. toil/test/lib/aws/test_iam.py +4 -14
  86. toil/test/lib/aws/test_utils.py +0 -3
  87. toil/test/lib/dockerTest.py +4 -4
  88. toil/test/lib/test_ec2.py +12 -17
  89. toil/test/mesos/helloWorld.py +4 -5
  90. toil/test/mesos/stress.py +1 -1
  91. toil/test/{wdl/conftest.py → options/__init__.py} +0 -10
  92. toil/test/options/options.py +37 -0
  93. toil/test/provisioners/aws/awsProvisionerTest.py +9 -5
  94. toil/test/provisioners/clusterScalerTest.py +6 -4
  95. toil/test/provisioners/clusterTest.py +23 -11
  96. toil/test/provisioners/gceProvisionerTest.py +0 -6
  97. toil/test/provisioners/restartScript.py +3 -2
  98. toil/test/server/serverTest.py +1 -1
  99. toil/test/sort/restart_sort.py +2 -1
  100. toil/test/sort/sort.py +2 -1
  101. toil/test/sort/sortTest.py +2 -13
  102. toil/test/src/autoDeploymentTest.py +45 -45
  103. toil/test/src/busTest.py +5 -5
  104. toil/test/src/checkpointTest.py +2 -2
  105. toil/test/src/deferredFunctionTest.py +1 -1
  106. toil/test/src/fileStoreTest.py +32 -16
  107. toil/test/src/helloWorldTest.py +1 -1
  108. toil/test/src/importExportFileTest.py +1 -1
  109. toil/test/src/jobDescriptionTest.py +2 -1
  110. toil/test/src/jobServiceTest.py +1 -1
  111. toil/test/src/jobTest.py +18 -18
  112. toil/test/src/miscTests.py +5 -3
  113. toil/test/src/promisedRequirementTest.py +3 -3
  114. toil/test/src/realtimeLoggerTest.py +1 -1
  115. toil/test/src/resourceTest.py +2 -2
  116. toil/test/src/restartDAGTest.py +1 -1
  117. toil/test/src/resumabilityTest.py +36 -2
  118. toil/test/src/retainTempDirTest.py +1 -1
  119. toil/test/src/systemTest.py +2 -2
  120. toil/test/src/toilContextManagerTest.py +2 -2
  121. toil/test/src/userDefinedJobArgTypeTest.py +1 -1
  122. toil/test/utils/toilDebugTest.py +98 -32
  123. toil/test/utils/toilKillTest.py +2 -2
  124. toil/test/utils/utilsTest.py +23 -3
  125. toil/test/wdl/wdltoil_test.py +223 -45
  126. toil/toilState.py +7 -6
  127. toil/utils/toilClean.py +1 -1
  128. toil/utils/toilConfig.py +36 -0
  129. toil/utils/toilDebugFile.py +60 -33
  130. toil/utils/toilDebugJob.py +39 -12
  131. toil/utils/toilDestroyCluster.py +1 -1
  132. toil/utils/toilKill.py +1 -1
  133. toil/utils/toilLaunchCluster.py +13 -2
  134. toil/utils/toilMain.py +3 -2
  135. toil/utils/toilRsyncCluster.py +1 -1
  136. toil/utils/toilSshCluster.py +1 -1
  137. toil/utils/toilStats.py +445 -305
  138. toil/utils/toilStatus.py +2 -5
  139. toil/version.py +10 -10
  140. toil/wdl/utils.py +2 -122
  141. toil/wdl/wdltoil.py +1257 -492
  142. toil/worker.py +55 -46
  143. toil-6.1.0.dist-info/METADATA +124 -0
  144. toil-6.1.0.dist-info/RECORD +241 -0
  145. {toil-5.12.0.dist-info → toil-6.1.0.dist-info}/WHEEL +1 -1
  146. {toil-5.12.0.dist-info → toil-6.1.0.dist-info}/entry_points.txt +0 -1
  147. toil/batchSystems/parasol.py +0 -379
  148. toil/batchSystems/tes.py +0 -459
  149. toil/test/batchSystems/parasolTestSupport.py +0 -117
  150. toil/test/wdl/builtinTest.py +0 -506
  151. toil/test/wdl/toilwdlTest.py +0 -522
  152. toil/wdl/toilwdl.py +0 -141
  153. toil/wdl/versions/dev.py +0 -107
  154. toil/wdl/versions/draft2.py +0 -980
  155. toil/wdl/versions/v1.py +0 -794
  156. toil/wdl/wdl_analysis.py +0 -116
  157. toil/wdl/wdl_functions.py +0 -997
  158. toil/wdl/wdl_synthesis.py +0 -1011
  159. toil/wdl/wdl_types.py +0 -243
  160. toil-5.12.0.dist-info/METADATA +0 -118
  161. toil-5.12.0.dist-info/RECORD +0 -244
  162. /toil/{wdl/versions → options}/__init__.py +0 -0
  163. {toil-5.12.0.dist-info → toil-6.1.0.dist-info}/LICENSE +0 -0
  164. {toil-5.12.0.dist-info → toil-6.1.0.dist-info}/top_level.txt +0 -0
toil/server/utils.py CHANGED
@@ -193,7 +193,7 @@ class AbstractStateStore:
193
193
  This is a key-value store, with keys namespaced by workflow ID. Concurrent
194
194
  access from multiple threads or processes is safe and globally consistent.
195
195
 
196
- Keys and workflow IDs are restricted to [-a-zA-Z0-9_], because backends may
196
+ Keys and workflow IDs are restricted to ``[-a-zA-Z0-9_]``, because backends may
197
197
  use them as path or URL components.
198
198
 
199
199
  Key values are either a string, or None if the key is not set.
@@ -3,7 +3,6 @@ import functools
3
3
  import json
4
4
  import logging
5
5
  import os
6
- import tempfile
7
6
  from abc import abstractmethod
8
7
  from typing import Any, Callable, Dict, List, Optional, Tuple, Union
9
8
  from urllib.parse import urldefrag
@@ -11,6 +10,8 @@ from urllib.parse import urldefrag
11
10
  import connexion # type: ignore
12
11
  from werkzeug.utils import secure_filename
13
12
 
13
+ from toil.lib.io import mkdtemp
14
+
14
15
  logger = logging.getLogger(__name__)
15
16
 
16
17
  # Define a type for WES task log entries in responses
@@ -210,7 +211,7 @@ class WESBackend:
210
211
  If None, a temporary directory is created.
211
212
  """
212
213
  if not temp_dir:
213
- temp_dir = tempfile.mkdtemp()
214
+ temp_dir = mkdtemp()
214
215
  body: Dict[str, Any] = {}
215
216
  has_attachments = False
216
217
  for key, ls in connexion.request.files.lists():
@@ -20,11 +20,10 @@
20
20
 
21
21
  import json
22
22
  import logging
23
- import os
24
23
  import sys
25
24
  import zipfile
26
25
  from os import path
27
- from typing import IO, Any, Dict, List, Optional, Union, cast
26
+ from typing import IO, List, Optional, cast
28
27
 
29
28
  if sys.version_info >= (3, 8):
30
29
  from typing import TypedDict
@@ -164,15 +163,17 @@ def parse_workflow_manifest_file(manifest_file: str) -> WorkflowPlan:
164
163
  :rtype: dict of `data` and `files`
165
164
 
166
165
  MANIFEST.json is expected to be formatted like:
166
+
167
167
  .. code-block:: json
168
+
168
169
  {
169
170
  "mainWorkflowURL": "relpath/to/workflow",
170
171
  "inputFileURLs": [
171
172
  "relpath/to/input-file-1",
172
173
  "relpath/to/input-file-2",
173
- ...
174
+ "relpath/to/input-file-3"
174
175
  ],
175
- "optionsFileURL" "relpath/to/option-file
176
+ "optionsFileURL": "relpath/to/option-file"
176
177
  }
177
178
 
178
179
  The `mainWorkflowURL` property that provides a relative file path in the zip to a workflow file, which will be set as `workflowSource`
toil/server/wes/tasks.py CHANGED
@@ -11,7 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- import fcntl
15
14
  import json
16
15
  import logging
17
16
  import multiprocessing
@@ -134,8 +133,8 @@ class ToilWorkflowRunner:
134
133
  the workflow execution engine.
135
134
 
136
135
  :param workflow_engine_parameters: User-specified parameters for this
137
- particular workflow. Keys are command-line options, and values are
138
- option arguments, or None for options that are flags.
136
+ particular workflow. Keys are command-line options, and values are
137
+ option arguments, or None for options that are flags.
139
138
  """
140
139
  options = []
141
140
 
@@ -16,9 +16,8 @@ import logging
16
16
  import os
17
17
  import shutil
18
18
  import uuid
19
- from collections import Counter, defaultdict
19
+ from collections import Counter
20
20
  from contextlib import contextmanager
21
- from tempfile import NamedTemporaryFile
22
21
  from typing import (Any,
23
22
  Callable,
24
23
  Dict,
@@ -32,18 +31,11 @@ from typing import (Any,
32
31
  overload)
33
32
 
34
33
  from flask import send_from_directory
35
- from flask.globals import request as flask_request
36
34
  from werkzeug.utils import redirect
37
35
  from werkzeug.wrappers.response import Response
38
36
 
39
37
  import toil.server.wes.amazon_wes_utils as amazon_wes_utils
40
- from toil.bus import (JobAnnotationMessage,
41
- JobCompletedMessage,
42
- JobFailedMessage,
43
- JobIssuedMessage,
44
- JobUpdatedMessage,
45
- MessageBus,
46
- replay_message_bus, JobStatus)
38
+ from toil.bus import JobStatus, replay_message_bus
47
39
  from toil.lib.io import AtomicFileCreate
48
40
  from toil.lib.threading import global_mutex
49
41
  from toil.server.utils import (WorkflowStateMachine,
toil/server/wsgi_app.py CHANGED
@@ -21,6 +21,8 @@ class GunicornApplication(BaseApplication): # type: ignore
21
21
  An entry point to integrate a Gunicorn WSGI server in Python. To start a
22
22
  WSGI application with callable `app`, run the following code:
23
23
 
24
+ .. code-block:: python
25
+
24
26
  WSGIApplication(app, options={
25
27
  ...
26
28
  }).run()
toil/serviceManager.py CHANGED
@@ -122,7 +122,8 @@ class ServiceManager:
122
122
  try:
123
123
  client_id = self.__clients_out.get(timeout=maxWait)
124
124
  self.__waiting_clients.remove(client_id)
125
- assert self.__service_manager_jobs >= 0
125
+ if self.__service_manager_jobs < 0:
126
+ raise RuntimeError("The number of jobs scheduled by the service manager cannot be negative.")
126
127
  self.__service_manager_jobs -= 1
127
128
  return client_id
128
129
  except Empty:
@@ -139,7 +140,8 @@ class ServiceManager:
139
140
  try:
140
141
  client_id = self.__failed_clients_out.get(timeout=maxWait)
141
142
  self.__waiting_clients.remove(client_id)
142
- assert self.__service_manager_jobs >= 0
143
+ if self.__service_manager_jobs < 0:
144
+ raise RuntimeError("The number of jobs scheduled by the service manager cannot be negative.")
143
145
  self.__service_manager_jobs -= 1
144
146
  return client_id
145
147
  except Empty:
@@ -154,7 +156,8 @@ class ServiceManager:
154
156
  """
155
157
  try:
156
158
  service_id = self.__services_out.get(timeout=maxWait)
157
- assert self.__service_manager_jobs >= 0
159
+ if self.__service_manager_jobs < 0:
160
+ raise RuntimeError("The number of jobs scheduled by the service manager cannot be negative.")
158
161
  self.__service_manager_jobs -= 1
159
162
  return service_id
160
163
  except Empty:
@@ -304,7 +307,8 @@ class ServiceManager:
304
307
  starting_services.remove(service_id)
305
308
  client_id = service_to_client[service_id]
306
309
  remaining_services_by_client[client_id] -= 1
307
- assert remaining_services_by_client[client_id] >= 0
310
+ if remaining_services_by_client[client_id] < 0:
311
+ raise RuntimeError("The number of remaining services cannot be negative.")
308
312
  del service_to_client[service_id]
309
313
  if not self.__job_store.file_exists(service_job_desc.errorJobStoreID):
310
314
  logger.error(
@@ -356,12 +360,10 @@ class ServiceManager:
356
360
  service_job_desc,
357
361
  service_job_desc.startJobStoreID,
358
362
  )
359
- assert self.__job_store.file_exists(
360
- service_job_desc.startJobStoreID
361
- ), f"Service manager attempted to start service {service_job_desc} that has already started"
362
- assert self.__toil_state.job_exists(
363
- str(service_job_desc.jobStoreID)
364
- ), f"Service manager attempted to start service {service_job_desc} that is not in the job store"
363
+ if not self.__job_store.file_exists(service_job_desc.startJobStoreID):
364
+ raise RuntimeError(f"Service manager attempted to start service {service_job_desc} that has already started")
365
+ if not self.__toil_state.job_exists(str(service_job_desc.jobStoreID)):
366
+ raise RuntimeError(f"Service manager attempted to start service {service_job_desc} that is not in the job store")
365
367
  # At this point the terminateJobStoreID and errorJobStoreID
366
368
  # could have been deleted, since the service can be killed at
367
369
  # any time! So we can't assert their presence here.
toil/statsAndLogging.py CHANGED
@@ -12,6 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  import gzip
15
+ import io
15
16
  import json
16
17
  import logging
17
18
  import os
@@ -49,7 +50,7 @@ class StatsAndLogging:
49
50
  self._worker.start()
50
51
 
51
52
  @classmethod
52
- def formatLogStream(cls, stream: Union[IO[str], IO[bytes]], job_name: Optional[str] = None) -> str:
53
+ def formatLogStream(cls, stream: Union[IO[str], IO[bytes]], stream_name: str) -> str:
53
54
  """
54
55
  Given a stream of text or bytes, and the job name, job itself, or some
55
56
  other optional stringifyable identity info for the job, return a big
@@ -62,7 +63,7 @@ class StatsAndLogging:
62
63
 
63
64
  :param stream: The stream of text or bytes to print for the user.
64
65
  """
65
- lines = [f'Log from job "{job_name}" follows:', '=========>']
66
+ lines = [f'{stream_name} follows:', '=========>']
66
67
 
67
68
  for line in stream:
68
69
  if isinstance(line, bytes):
@@ -75,13 +76,13 @@ class StatsAndLogging:
75
76
 
76
77
 
77
78
  @classmethod
78
- def logWithFormatting(cls, jobStoreID: str, jobLogs: Union[IO[str], IO[bytes]], method: Callable[[str], None] = logger.debug,
79
+ def logWithFormatting(cls, stream_name: str, jobLogs: Union[IO[str], IO[bytes]], method: Callable[[str], None] = logger.debug,
79
80
  message: Optional[str] = None) -> None:
80
81
  if message is not None:
81
82
  method(message)
82
83
 
83
- # Format and log the logs, identifying the job with its job store ID.
84
- method(cls.formatLogStream(jobLogs, jobStoreID))
84
+ # Format and log the logs, identifying the stream with the given name.
85
+ method(cls.formatLogStream(jobLogs, stream_name))
85
86
 
86
87
  @classmethod
87
88
  def writeLogFiles(cls, jobNames: List[str], jobLogList: List[str], config: 'Config', failed: bool = False) -> None:
@@ -95,7 +96,7 @@ class StatsAndLogging:
95
96
  logName = ('failed_' if failed else '') + logName
96
97
  counter = 0
97
98
  while True:
98
- suffix = str(counter).zfill(3) + logExtension
99
+ suffix = '_' + str(counter).zfill(3) + logExtension
99
100
  fullName = os.path.join(logPath, logName + suffix)
100
101
  # The maximum file name size in the default HFS+ file system is 255 UTF-16 encoding units, so basically 255 characters
101
102
  if len(fullName) >= 255:
@@ -118,6 +119,9 @@ class StatsAndLogging:
118
119
  # we don't have anywhere to write the logs, return now
119
120
  return
120
121
 
122
+ # Make sure the destination exists
123
+ os.makedirs(path, exist_ok=True)
124
+
121
125
  fullName = createName(path, mainFileName, extension, failed)
122
126
  with writeFn(fullName, 'wb') as f:
123
127
  for l in jobLogList:
@@ -148,16 +152,42 @@ class StatsAndLogging:
148
152
  if not isinstance(statsStr, str):
149
153
  statsStr = statsStr.decode()
150
154
  stats = json.loads(statsStr, object_hook=Expando)
155
+ if not stats:
156
+ return
157
+
151
158
  try:
152
- logs = stats.workers.logsToMaster
159
+ # Handle all the log_to_leader messages
160
+ logs = stats.workers.logs_to_leader
153
161
  except AttributeError:
154
- # To be expected if there were no calls to logToMaster()
162
+ # To be expected if there were no calls to log_to_leader()
155
163
  pass
156
164
  else:
157
165
  for message in logs:
158
166
  logger.log(int(message.level),
159
167
  'Got message from job at time %s: %s',
160
168
  time.strftime('%m-%d-%Y %H:%M:%S'), message.text)
169
+
170
+ try:
171
+ # Handle all the user-level text streams reported back (command output, etc.)
172
+ user_logs = stats.workers.logging_user_streams
173
+ except AttributeError:
174
+ # To be expected if there were no calls to log_user_stream()
175
+ pass
176
+ else:
177
+ for stream_entry in user_logs:
178
+ try:
179
+ # Unpack the stream name and text.
180
+ name, text = stream_entry.name, stream_entry.text
181
+ except AttributeError:
182
+ # Doesn't have a user-provided stream name and stream
183
+ # text, so skip it.
184
+ continue
185
+ # Since this is sent as inline text we need to pretend to stream it.
186
+ # TODO: Save these as individual files if they start to get too big?
187
+ cls.logWithFormatting(name, io.StringIO(text), logger.info)
188
+ # Save it as a log file, as if it were a Toil-level job.
189
+ cls.writeLogFiles([name], [text], config=config)
190
+
161
191
  try:
162
192
  logs = stats.logs
163
193
  except AttributeError:
@@ -166,7 +196,7 @@ class StatsAndLogging:
166
196
  # we may have multiple jobs per worker
167
197
  jobNames = logs.names
168
198
  messages = logs.messages
169
- cls.logWithFormatting(jobNames[0], messages,
199
+ cls.logWithFormatting(f'Log from job "{jobNames[0]}"', messages,
170
200
  message='Received Toil worker log. Disable debug level logging to hide this output')
171
201
  cls.writeLogFiles(jobNames, messages, config=config)
172
202
 
@@ -225,6 +255,8 @@ def add_logging_options(parser: ArgumentParser) -> None:
225
255
  levels += [l.lower() for l in levels] + [l.upper() for l in levels]
226
256
  group.add_argument("--logOff", dest="logLevel", default=default_loglevel,
227
257
  action="store_const", const="CRITICAL", help="Same as --logCRITICAL.")
258
+ # Maybe deprecate the above in favor of --logLevel?
259
+
228
260
  group.add_argument("--logLevel", dest="logLevel", default=default_loglevel, choices=levels,
229
261
  help=f"Set the log level. Default: {default_loglevel}. Options: {levels}.")
230
262
  group.add_argument("--logFile", dest="logFile", help="File to log in.")
toil/test/__init__.py CHANGED
@@ -21,7 +21,6 @@ import shutil
21
21
  import signal
22
22
  import subprocess
23
23
  import sys
24
- import tempfile
25
24
  import threading
26
25
  import time
27
26
  import unittest
@@ -30,6 +29,7 @@ from abc import ABCMeta, abstractmethod
30
29
  from contextlib import contextmanager
31
30
  from inspect import getsource
32
31
  from shutil import which
32
+ from tempfile import mkstemp
33
33
  from textwrap import dedent
34
34
  from typing import (Any,
35
35
  Callable,
@@ -57,6 +57,7 @@ from toil import ApplianceImageNotFound, applianceSelf, toilPackageDirPath
57
57
  from toil.lib.accelerators import (have_working_nvidia_docker_runtime,
58
58
  have_working_nvidia_smi)
59
59
  from toil.lib.aws import running_on_ec2
60
+ from toil.lib.io import mkdtemp
60
61
  from toil.lib.iterables import concat
61
62
  from toil.lib.memoize import memoize
62
63
  from toil.lib.threading import ExceptionalThread, cpu_count
@@ -188,7 +189,7 @@ class ToilTest(unittest.TestCase):
188
189
  prefix.extend([_f for _f in names if _f])
189
190
  prefix.append('')
190
191
  temp_dir_path = os.path.realpath(
191
- tempfile.mkdtemp(dir=cls._tempBaseDir, prefix="-".join(prefix))
192
+ mkdtemp(dir=cls._tempBaseDir, prefix="-".join(prefix))
192
193
  )
193
194
  cls._tempDirs.append(temp_dir_path)
194
195
  return temp_dir_path
@@ -314,7 +315,7 @@ else:
314
315
  def get_temp_file(suffix: str = "", rootDir: Optional[str] = None) -> str:
315
316
  """Return a string representing a temporary file, that must be manually deleted."""
316
317
  if rootDir is None:
317
- handle, tmp_file = tempfile.mkstemp(suffix)
318
+ handle, tmp_file = mkstemp(suffix)
318
319
  os.close(handle)
319
320
  return tmp_file
320
321
  else:
@@ -359,10 +360,17 @@ def needs_rsync3(test_item: MT) -> MT:
359
360
  return test_item
360
361
 
361
362
 
363
+ def needs_online(test_item: MT) -> MT:
364
+ """Use as a decorator before test classes or methods to run only if we are meant to talk to the Internet."""
365
+ test_item = _mark_test('online', test_item)
366
+ if os.getenv('TOIL_SKIP_ONLINE', '').lower() == 'true':
367
+ return unittest.skip('Skipping online test.')(test_item)
368
+ return test_item
369
+
362
370
  def needs_aws_s3(test_item: MT) -> MT:
363
371
  """Use as a decorator before test classes or methods to run only if AWS S3 is usable."""
364
372
  # TODO: we just check for generic access to the AWS account
365
- test_item = _mark_test('aws-s3', test_item)
373
+ test_item = _mark_test('aws-s3', needs_online(test_item))
366
374
  try:
367
375
  from boto import config
368
376
  boto_credentials = config.get('Credentials', 'aws_access_key_id')
@@ -415,7 +423,7 @@ def needs_google_storage(test_item: MT) -> MT:
415
423
  Cloud is installed and we ought to be able to access public Google Storage
416
424
  URIs.
417
425
  """
418
- test_item = _mark_test('google-storage', test_item)
426
+ test_item = _mark_test('google-storage', needs_online(test_item))
419
427
  try:
420
428
  from google.cloud import storage # noqa
421
429
  except ImportError:
@@ -427,7 +435,7 @@ def needs_google_project(test_item: MT) -> MT:
427
435
  """
428
436
  Use as a decorator before test classes or methods to run only if we have a Google Cloud project set.
429
437
  """
430
- test_item = _mark_test('google-project', test_item)
438
+ test_item = _mark_test('google-project', needs_online(test_item))
431
439
  test_item = needs_env_var('TOIL_GOOGLE_PROJECTID', "a Google project ID")(test_item)
432
440
  return test_item
433
441
 
@@ -447,44 +455,19 @@ def needs_torque(test_item: MT) -> MT:
447
455
  return test_item
448
456
  return unittest.skip("Install PBS/Torque to include this test.")(test_item)
449
457
 
450
-
451
- def needs_tes(test_item: MT) -> MT:
452
- """Use as a decorator before test classes or methods to run only if TES is available."""
453
- test_item = _mark_test('tes', test_item)
454
-
455
- try:
456
- from toil.batchSystems.tes import TESBatchSystem
457
- except ImportError:
458
- return unittest.skip("Install py-tes to include this test")(test_item)
459
-
460
- tes_url = os.environ.get('TOIL_TES_ENDPOINT', TESBatchSystem.get_default_tes_endpoint())
461
- try:
462
- urlopen(tes_url)
463
- except HTTPError:
464
- # Funnel happens to 404 if TES is working. But any HTTPError means we
465
- # dialed somebody who picked up.
466
- pass
467
- except URLError:
468
- # Will give connection refused if we can't connect because the server's
469
- # not there. We can also get a "cannot assign requested address" if
470
- # we're on Kubernetes dialing localhost and !!creative things!! have
471
- # been done to the network stack.
472
- return unittest.skip(f"Run a TES server on {tes_url} to include this test")(test_item)
473
- return test_item
474
-
475
-
476
458
  def needs_kubernetes_installed(test_item: MT) -> MT:
477
459
  """Use as a decorator before test classes or methods to run only if Kubernetes is installed."""
478
460
  test_item = _mark_test('kubernetes', test_item)
479
461
  try:
480
462
  import kubernetes
463
+ str(kubernetes) # to prevent removal of this import
481
464
  except ImportError:
482
465
  return unittest.skip("Install Toil with the 'kubernetes' extra to include this test.")(test_item)
483
466
  return test_item
484
467
 
485
468
  def needs_kubernetes(test_item: MT) -> MT:
486
469
  """Use as a decorator before test classes or methods to run only if Kubernetes is installed and configured."""
487
- test_item = needs_kubernetes_installed(test_item)
470
+ test_item = needs_kubernetes_installed(needs_online(test_item))
488
471
  try:
489
472
  import kubernetes
490
473
  try:
@@ -514,14 +497,6 @@ def needs_mesos(test_item: MT) -> MT:
514
497
  return test_item
515
498
 
516
499
 
517
- def needs_parasol(test_item: MT) -> MT:
518
- """Use as decorator so tests are only run if Parasol is installed."""
519
- test_item = _mark_test('parasol', test_item)
520
- if which('parasol'):
521
- return test_item
522
- return unittest.skip("Install Parasol to include this test.")(test_item)
523
-
524
-
525
500
  def needs_slurm(test_item: MT) -> MT:
526
501
  """Use as a decorator before test classes or methods to run only if Slurm is installed."""
527
502
  test_item = _mark_test('slurm', test_item)
@@ -571,20 +546,20 @@ def needs_docker(test_item: MT) -> MT:
571
546
  Use as a decorator before test classes or methods to only run them if
572
547
  docker is installed and docker-based tests are enabled.
573
548
  """
574
- test_item = _mark_test('docker', test_item)
549
+ test_item = _mark_test('docker', needs_online(test_item))
575
550
  if os.getenv('TOIL_SKIP_DOCKER', '').lower() == 'true':
576
551
  return unittest.skip('Skipping docker test.')(test_item)
577
552
  if which('docker'):
578
553
  return test_item
579
554
  else:
580
555
  return unittest.skip("Install docker to include this test.")(test_item)
581
-
556
+
582
557
  def needs_singularity(test_item: MT) -> MT:
583
558
  """
584
559
  Use as a decorator before test classes or methods to only run them if
585
560
  singularity is installed.
586
561
  """
587
- test_item = _mark_test('singularity', test_item)
562
+ test_item = _mark_test('singularity', needs_online(test_item))
588
563
  if which('singularity'):
589
564
  return test_item
590
565
  else:
@@ -621,7 +596,7 @@ def needs_docker_cuda(test_item: MT) -> MT:
621
596
  Use as a decorator before test classes or methods to only run them if
622
597
  a CUDA setup is available through Docker.
623
598
  """
624
- test_item = _mark_test('docker_cuda', test_item)
599
+ test_item = _mark_test('docker_cuda', needs_online(test_item))
625
600
  if have_working_nvidia_docker_runtime():
626
601
  return test_item
627
602
  else:
@@ -677,7 +652,7 @@ def needs_celery_broker(test_item: MT) -> MT:
677
652
  """
678
653
  Use as a decorator before test classes or methods to run only if RabbitMQ is set up to take Celery jobs.
679
654
  """
680
- test_item = _mark_test('celery', test_item)
655
+ test_item = _mark_test('celery', needs_online(test_item))
681
656
  test_item = needs_env_var('TOIL_WES_BROKER_URL', "a URL to a RabbitMQ broker for Celery")(test_item)
682
657
  return test_item
683
658
 
@@ -686,7 +661,7 @@ def needs_wes_server(test_item: MT) -> MT:
686
661
  Use as a decorator before test classes or methods to run only if a WES
687
662
  server is available to run against.
688
663
  """
689
- test_item = _mark_test('wes_server', test_item)
664
+ test_item = _mark_test('wes_server', needs_online(test_item))
690
665
 
691
666
  wes_url = os.environ.get('TOIL_WES_ENDPOINT')
692
667
  if not wes_url:
@@ -744,7 +719,7 @@ def needs_fetchable_appliance(test_item: MT) -> MT:
744
719
  the Toil appliance Docker image is able to be downloaded from the Internet.
745
720
  """
746
721
 
747
- test_item = _mark_test('fetchable_appliance', test_item)
722
+ test_item = _mark_test('fetchable_appliance', needs_online(test_item))
748
723
  if os.getenv('TOIL_SKIP_DOCKER', '').lower() == 'true':
749
724
  return unittest.skip('Skipping docker test.')(test_item)
750
725
  try:
@@ -765,9 +740,7 @@ def integrative(test_item: MT) -> MT:
765
740
  Use this to decorate integration tests so as to skip them during regular builds.
766
741
 
767
742
  We define integration tests as A) involving other, non-Toil software components
768
- that we develop and/or B) having a higher cost (time or money). Note that brittleness
769
- does not qualify a test for being integrative. Neither does involvement of external
770
- services such as AWS, since that would cover most of Toil's test.
743
+ that we develop and/or B) having a higher cost (time or money).
771
744
  """
772
745
  test_item = _mark_test('integrative', test_item)
773
746
  if os.getenv('TOIL_TEST_INTEGRATIVE', '').lower() == 'true':
@@ -797,11 +770,13 @@ methodNamePartRegex = re.compile('^[a-zA-Z_0-9]+$')
797
770
  @contextmanager
798
771
  def timeLimit(seconds: int) -> Generator[None, None, None]:
799
772
  """
800
- http://stackoverflow.com/a/601168
801
- Use to limit the execution time of a function. Raises an exception if the execution of the
802
- function takes more than the specified amount of time.
773
+ Use to limit the execution time of a function.
774
+
775
+ Raises an exception if the execution of the function takes more than the
776
+ specified amount of time. See <http://stackoverflow.com/a/601168>.
803
777
 
804
778
  :param seconds: maximum allowable time, in seconds
779
+
805
780
  >>> import time
806
781
  >>> with timeLimit(2):
807
782
  ... time.sleep(1)