toil 9.1.1__py3-none-any.whl → 9.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (155) hide show
  1. toil/__init__.py +5 -9
  2. toil/batchSystems/abstractBatchSystem.py +23 -22
  3. toil/batchSystems/abstractGridEngineBatchSystem.py +17 -12
  4. toil/batchSystems/awsBatch.py +8 -8
  5. toil/batchSystems/cleanup_support.py +4 -4
  6. toil/batchSystems/contained_executor.py +3 -3
  7. toil/batchSystems/gridengine.py +3 -4
  8. toil/batchSystems/htcondor.py +5 -5
  9. toil/batchSystems/kubernetes.py +65 -63
  10. toil/batchSystems/local_support.py +2 -3
  11. toil/batchSystems/lsf.py +6 -7
  12. toil/batchSystems/mesos/batchSystem.py +11 -7
  13. toil/batchSystems/mesos/test/__init__.py +1 -2
  14. toil/batchSystems/options.py +9 -10
  15. toil/batchSystems/registry.py +3 -7
  16. toil/batchSystems/singleMachine.py +8 -11
  17. toil/batchSystems/slurm.py +49 -38
  18. toil/batchSystems/torque.py +3 -4
  19. toil/bus.py +36 -34
  20. toil/common.py +129 -89
  21. toil/cwl/cwltoil.py +857 -729
  22. toil/cwl/utils.py +44 -35
  23. toil/fileStores/__init__.py +3 -1
  24. toil/fileStores/abstractFileStore.py +28 -30
  25. toil/fileStores/cachingFileStore.py +8 -8
  26. toil/fileStores/nonCachingFileStore.py +10 -21
  27. toil/job.py +159 -158
  28. toil/jobStores/abstractJobStore.py +68 -69
  29. toil/jobStores/aws/jobStore.py +249 -213
  30. toil/jobStores/aws/utils.py +13 -24
  31. toil/jobStores/fileJobStore.py +28 -22
  32. toil/jobStores/googleJobStore.py +21 -17
  33. toil/jobStores/utils.py +3 -7
  34. toil/leader.py +17 -22
  35. toil/lib/accelerators.py +6 -4
  36. toil/lib/aws/__init__.py +9 -10
  37. toil/lib/aws/ami.py +33 -19
  38. toil/lib/aws/iam.py +6 -6
  39. toil/lib/aws/s3.py +259 -157
  40. toil/lib/aws/session.py +76 -76
  41. toil/lib/aws/utils.py +51 -43
  42. toil/lib/checksum.py +19 -15
  43. toil/lib/compatibility.py +3 -2
  44. toil/lib/conversions.py +45 -18
  45. toil/lib/directory.py +29 -26
  46. toil/lib/docker.py +93 -99
  47. toil/lib/dockstore.py +77 -50
  48. toil/lib/ec2.py +39 -38
  49. toil/lib/ec2nodes.py +11 -4
  50. toil/lib/exceptions.py +8 -5
  51. toil/lib/ftp_utils.py +9 -14
  52. toil/lib/generatedEC2Lists.py +161 -20
  53. toil/lib/history.py +141 -97
  54. toil/lib/history_submission.py +163 -72
  55. toil/lib/io.py +27 -17
  56. toil/lib/memoize.py +2 -1
  57. toil/lib/misc.py +15 -11
  58. toil/lib/pipes.py +40 -25
  59. toil/lib/plugins.py +12 -8
  60. toil/lib/resources.py +1 -0
  61. toil/lib/retry.py +32 -38
  62. toil/lib/threading.py +12 -12
  63. toil/lib/throttle.py +1 -2
  64. toil/lib/trs.py +113 -51
  65. toil/lib/url.py +14 -23
  66. toil/lib/web.py +7 -2
  67. toil/options/common.py +18 -15
  68. toil/options/cwl.py +2 -2
  69. toil/options/runner.py +9 -5
  70. toil/options/wdl.py +1 -3
  71. toil/provisioners/__init__.py +9 -9
  72. toil/provisioners/abstractProvisioner.py +22 -20
  73. toil/provisioners/aws/__init__.py +20 -14
  74. toil/provisioners/aws/awsProvisioner.py +10 -8
  75. toil/provisioners/clusterScaler.py +19 -18
  76. toil/provisioners/gceProvisioner.py +2 -3
  77. toil/provisioners/node.py +11 -13
  78. toil/realtimeLogger.py +4 -4
  79. toil/resource.py +5 -5
  80. toil/server/app.py +2 -2
  81. toil/server/cli/wes_cwl_runner.py +11 -11
  82. toil/server/utils.py +18 -21
  83. toil/server/wes/abstract_backend.py +9 -8
  84. toil/server/wes/amazon_wes_utils.py +3 -3
  85. toil/server/wes/tasks.py +3 -5
  86. toil/server/wes/toil_backend.py +17 -21
  87. toil/server/wsgi_app.py +3 -3
  88. toil/serviceManager.py +3 -4
  89. toil/statsAndLogging.py +12 -13
  90. toil/test/__init__.py +33 -24
  91. toil/test/batchSystems/batchSystemTest.py +12 -11
  92. toil/test/batchSystems/batch_system_plugin_test.py +3 -5
  93. toil/test/batchSystems/test_slurm.py +38 -24
  94. toil/test/cwl/conftest.py +5 -6
  95. toil/test/cwl/cwlTest.py +194 -78
  96. toil/test/cwl/download_file_uri.json +6 -0
  97. toil/test/cwl/download_file_uri_no_hostname.json +6 -0
  98. toil/test/docs/scripts/tutorial_staging.py +1 -0
  99. toil/test/jobStores/jobStoreTest.py +9 -7
  100. toil/test/lib/aws/test_iam.py +1 -3
  101. toil/test/lib/aws/test_s3.py +1 -1
  102. toil/test/lib/dockerTest.py +9 -9
  103. toil/test/lib/test_ec2.py +12 -11
  104. toil/test/lib/test_history.py +4 -4
  105. toil/test/lib/test_trs.py +16 -14
  106. toil/test/lib/test_url.py +7 -6
  107. toil/test/lib/url_plugin_test.py +12 -18
  108. toil/test/provisioners/aws/awsProvisionerTest.py +10 -8
  109. toil/test/provisioners/clusterScalerTest.py +2 -5
  110. toil/test/provisioners/clusterTest.py +1 -3
  111. toil/test/server/serverTest.py +13 -4
  112. toil/test/sort/restart_sort.py +2 -6
  113. toil/test/sort/sort.py +3 -8
  114. toil/test/src/deferredFunctionTest.py +7 -7
  115. toil/test/src/environmentTest.py +1 -2
  116. toil/test/src/fileStoreTest.py +5 -5
  117. toil/test/src/importExportFileTest.py +5 -6
  118. toil/test/src/jobServiceTest.py +22 -14
  119. toil/test/src/jobTest.py +121 -25
  120. toil/test/src/miscTests.py +5 -7
  121. toil/test/src/promisedRequirementTest.py +8 -7
  122. toil/test/src/regularLogTest.py +2 -3
  123. toil/test/src/resourceTest.py +5 -8
  124. toil/test/src/restartDAGTest.py +5 -6
  125. toil/test/src/resumabilityTest.py +2 -2
  126. toil/test/src/retainTempDirTest.py +3 -3
  127. toil/test/src/systemTest.py +3 -3
  128. toil/test/src/threadingTest.py +1 -1
  129. toil/test/src/workerTest.py +1 -2
  130. toil/test/utils/toilDebugTest.py +6 -4
  131. toil/test/utils/toilKillTest.py +1 -1
  132. toil/test/utils/utilsTest.py +15 -14
  133. toil/test/wdl/wdltoil_test.py +247 -124
  134. toil/test/wdl/wdltoil_test_kubernetes.py +2 -2
  135. toil/toilState.py +2 -3
  136. toil/utils/toilDebugFile.py +3 -8
  137. toil/utils/toilDebugJob.py +1 -2
  138. toil/utils/toilLaunchCluster.py +1 -2
  139. toil/utils/toilSshCluster.py +2 -0
  140. toil/utils/toilStats.py +19 -24
  141. toil/utils/toilStatus.py +11 -14
  142. toil/version.py +10 -10
  143. toil/wdl/wdltoil.py +313 -209
  144. toil/worker.py +18 -12
  145. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/METADATA +11 -14
  146. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/RECORD +150 -153
  147. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/WHEEL +1 -1
  148. toil/test/cwl/staging_cat.cwl +0 -27
  149. toil/test/cwl/staging_make_file.cwl +0 -25
  150. toil/test/cwl/staging_workflow.cwl +0 -43
  151. toil/test/cwl/zero_default.cwl +0 -61
  152. toil/test/utils/ABCWorkflowDebug/ABC.txt +0 -1
  153. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/entry_points.txt +0 -0
  154. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/licenses/LICENSE +0 -0
  155. {toil-9.1.1.dist-info → toil-9.2.0.dist-info}/top_level.txt +0 -0
toil/common.py CHANGED
@@ -14,8 +14,8 @@
14
14
  import json
15
15
  import logging
16
16
  import os
17
- import platform
18
17
  import pickle
18
+ import platform
19
19
  import re
20
20
  import signal
21
21
  import subprocess
@@ -33,13 +33,13 @@ from argparse import (
33
33
  _StoreFalseAction,
34
34
  _StoreTrueAction,
35
35
  )
36
+ from collections.abc import Callable
36
37
  from functools import lru_cache
37
38
  from types import TracebackType
38
39
  from typing import (
39
40
  IO,
40
41
  TYPE_CHECKING,
41
42
  Any,
42
- Callable,
43
43
  ContextManager,
44
44
  Literal,
45
45
  Optional,
@@ -72,12 +72,17 @@ from toil.bus import (
72
72
  from toil.fileStores import FileID
73
73
  from toil.lib.compatibility import deprecated
74
74
  from toil.lib.history import HistoryManager
75
- from toil.lib.history_submission import ask_user_about_publishing_metrics, create_history_submission, create_current_submission
76
- from toil.lib.io import AtomicFileCreate, try_path, get_toil_home
77
- from toil.lib.misc import StrPath
75
+ from toil.lib.history_submission import (
76
+ ask_user_about_publishing_metrics,
77
+ create_current_submission,
78
+ create_history_submission,
79
+ )
80
+ from toil.lib.io import AtomicFileCreate, get_toil_home, try_path
78
81
  from toil.lib.memoize import memoize
82
+ from toil.lib.misc import StrPath
79
83
  from toil.lib.retry import retry
80
84
  from toil.lib.threading import ensure_filesystem_lockable
85
+ from toil.lib.url import URLAccess
81
86
  from toil.options.common import JOBSTORE_HELP, add_base_toil_options
82
87
  from toil.options.cwl import add_cwl_options
83
88
  from toil.options.runner import add_runner_options
@@ -85,8 +90,7 @@ from toil.options.wdl import add_wdl_options
85
90
  from toil.provisioners import add_provisioner_options, cluster_factory
86
91
  from toil.realtimeLogger import RealtimeLogger
87
92
  from toil.statsAndLogging import add_logging_options, set_logging_from_options
88
- from toil.version import dockerRegistry, dockerTag, version, baseVersion
89
- from toil.lib.url import URLAccess
93
+ from toil.version import baseVersion, dockerRegistry, dockerTag, version
90
94
 
91
95
  if TYPE_CHECKING:
92
96
  from toil.batchSystems.abstractBatchSystem import AbstractBatchSystem
@@ -99,6 +103,7 @@ if TYPE_CHECKING:
99
103
  UUID_LENGTH = 32
100
104
  logger = logging.getLogger(__name__)
101
105
 
106
+
102
107
  @memoize
103
108
  def get_default_config_path() -> str:
104
109
  """
@@ -108,10 +113,11 @@ def get_default_config_path() -> str:
108
113
  """
109
114
  return os.path.join(get_toil_home(), "default.yaml")
110
115
 
116
+
111
117
  class Config:
112
118
  """Class to represent configuration operations for a toil workflow run."""
113
119
 
114
- logFile: Optional[str]
120
+ logFile: str | None
115
121
  logRotating: bool
116
122
  cleanWorkDir: str
117
123
  max_jobs: int
@@ -119,27 +125,27 @@ class Config:
119
125
  manualMemArgs: bool
120
126
  run_local_jobs_on_workers: bool
121
127
  coalesceStatusCalls: bool
122
- mesos_endpoint: Optional[str]
123
- mesos_framework_id: Optional[str]
124
- mesos_role: Optional[str]
128
+ mesos_endpoint: str | None
129
+ mesos_framework_id: str | None
130
+ mesos_role: str | None
125
131
  mesos_name: str
126
- kubernetes_host_path: Optional[str]
127
- kubernetes_owner: Optional[str]
128
- kubernetes_service_account: Optional[str]
132
+ kubernetes_host_path: str | None
133
+ kubernetes_owner: str | None
134
+ kubernetes_service_account: str | None
129
135
  kubernetes_pod_timeout: float
130
136
  kubernetes_privileged: bool
131
- kubernetes_pod_security_context: Optional[str]
132
- kubernetes_security_context: Optional[str]
137
+ kubernetes_pod_security_context: str | None
138
+ kubernetes_security_context: str | None
133
139
  tes_endpoint: str
134
140
  tes_user: str
135
141
  tes_password: str
136
142
  tes_bearer_token: str
137
- aws_batch_region: Optional[str]
138
- aws_batch_queue: Optional[str]
139
- aws_batch_job_role_arn: Optional[str]
143
+ aws_batch_region: str | None
144
+ aws_batch_queue: str | None
145
+ aws_batch_job_role_arn: str | None
140
146
  scale: float
141
147
  batchSystem: str
142
- batch_logs_dir: Optional[str]
148
+ batch_logs_dir: str | None
143
149
  """The backing scheduler will be instructed, if possible, to save logs
144
150
  to this directory, where the leader can read them."""
145
151
  statePollingWait: float
@@ -147,7 +153,7 @@ class Config:
147
153
  disableAutoDeployment: bool
148
154
 
149
155
  # Core options
150
- workflowID: Optional[str]
156
+ workflowID: str | None
151
157
  """This attribute uniquely identifies the job store and therefore the workflow. It is
152
158
  necessary in order to distinguish between two consecutive workflows for which
153
159
  self.jobStore is the same, e.g. when a job store name is reused after a previous run has
@@ -156,14 +162,14 @@ class Config:
156
162
  jobStore: str
157
163
  logLevel: str
158
164
  colored_logs: bool
159
- workDir: Optional[str]
160
- coordination_dir: Optional[str]
165
+ workDir: str | None
166
+ coordination_dir: str | None
161
167
  noStdOutErr: bool
162
168
  stats: bool
163
169
 
164
170
  # Because the stats option needs the jobStore to persist past the end of the run,
165
171
  # the clean default value depends the specified stats option and is determined in setOptions
166
- clean: Optional[str]
172
+ clean: str | None
167
173
  clusterStats: str
168
174
 
169
175
  # Restarting the workflow options
@@ -172,14 +178,14 @@ class Config:
172
178
  # Batch system options
173
179
 
174
180
  # File store options
175
- caching: Optional[bool]
181
+ caching: bool | None
176
182
  symlinkImports: bool
177
183
  moveOutputs: bool
178
184
  symlink_job_store_reads: bool
179
185
 
180
186
  # Autoscaling options
181
- provisioner: Optional[str]
182
- nodeTypes: list[tuple[set[str], Optional[float]]]
187
+ provisioner: str | None
188
+ nodeTypes: list[tuple[set[str], float | None]]
183
189
  minNodes: list[int]
184
190
  maxNodes: list[int]
185
191
  targetTime: float
@@ -194,12 +200,12 @@ class Config:
194
200
  # Parameters to limit service jobs, so preventing deadlock scheduling scenarios
195
201
  maxPreemptibleServiceJobs: int
196
202
  maxServiceJobs: int
197
- deadlockWait: Union[float, int]
198
- deadlockCheckInterval: Union[float, int]
203
+ deadlockWait: float | int
204
+ deadlockCheckInterval: float | int
199
205
 
200
206
  # Resource requirements
201
207
  defaultMemory: int
202
- defaultCores: Union[float, int]
208
+ defaultCores: float | int
203
209
  defaultDisk: int
204
210
  defaultPreemptible: bool
205
211
  # TODO: These names are generated programmatically in
@@ -224,17 +230,17 @@ class Config:
224
230
  writeLogs: str
225
231
  writeLogsGzip: str
226
232
  writeLogsFromAllJobs: bool
227
- write_messages: Optional[str]
233
+ write_messages: str | None
228
234
  realTimeLogging: bool
229
235
 
230
236
  # Data publishing
231
- publish_workflow_metrics: Union[Literal["all"], Literal["current"], Literal["no"], None]
237
+ publish_workflow_metrics: Literal["all"] | Literal["current"] | Literal["no"] | None
232
238
 
233
239
  # Misc
234
240
  environment: dict[str, str]
235
241
  disableChaining: bool
236
242
  disableJobStoreChecksumVerification: bool
237
- sseKey: Optional[str]
243
+ sseKey: str | None
238
244
  servicePollingInterval: int
239
245
  useAsync: bool
240
246
  forceDockerAppliance: bool
@@ -291,7 +297,7 @@ class Config:
291
297
  def setOptions(self, options: Namespace) -> None:
292
298
  """Creates a config object from the options object."""
293
299
 
294
- def set_option(option_name: str, old_names: Optional[list[str]] = None) -> None:
300
+ def set_option(option_name: str, old_names: list[str] | None = None) -> None:
295
301
  """
296
302
  Determine the correct value for the given option.
297
303
 
@@ -453,7 +459,9 @@ class Config:
453
459
  # Check for deprecated Toil built-in autoscaling
454
460
  # --provisioner is guaranteed to be set
455
461
  if self.provisioner is not None and self.batchSystem == "mesos":
456
- logger.warning("Toil built-in autoscaling with Mesos is deprecated as Mesos is no longer active. Please use Kubernetes-based autoscaling instead.")
462
+ logger.warning(
463
+ "Toil built-in autoscaling with Mesos is deprecated as Mesos is no longer active. Please use Kubernetes-based autoscaling instead."
464
+ )
457
465
 
458
466
  def check_configuration_consistency(self) -> None:
459
467
  """Old checks that cannot be fit into an action class for argparse"""
@@ -488,6 +496,7 @@ class Config:
488
496
  def __hash__(self) -> int:
489
497
  return self.__dict__.__hash__() # type: ignore
490
498
 
499
+
491
500
  def ensure_config(filepath: str) -> None:
492
501
  """
493
502
  If the config file at the filepath does not exist, create it.
@@ -553,8 +562,7 @@ def generate_config(filepath: str) -> None:
553
562
  "version",
554
563
  # Toil built-in autoscaling with mesos is deprecated as mesos has not been updated since Python 3.10
555
564
  "provisioner",
556
- "nodeTypes"
557
- "minNodes",
565
+ "nodeTypes" "minNodes",
558
566
  "maxNodes",
559
567
  "targetTime",
560
568
  "betaInertia",
@@ -563,7 +571,7 @@ def generate_config(filepath: str) -> None:
563
571
  "nodeStorage",
564
572
  "nodeStorageOverrides",
565
573
  "metrics",
566
- "assumeZeroOverhead"
574
+ "assumeZeroOverhead",
567
575
  )
568
576
 
569
577
  def create_config_dict_from_parser(parser: ArgumentParser) -> CommentedMap:
@@ -666,7 +674,8 @@ def generate_config(filepath: str) -> None:
666
674
  transform=lambda s: re.sub(r"^(.)", r"#\1", s, flags=re.MULTILINE),
667
675
  )
668
676
 
669
- def update_config(filepath: str, key: str, new_value: Union[str, bool, int, float]) -> None:
677
+
678
+ def update_config(filepath: str, key: str, new_value: str | bool | int | float) -> None:
670
679
  """
671
680
  Set the given top-level key to the given value in the given YAML config
672
681
  file.
@@ -681,7 +690,12 @@ def update_config(filepath: str, key: str, new_value: Union[str, bool, int, floa
681
690
  yaml = YAML(typ="rt")
682
691
  data = yaml.load(open(filepath))
683
692
 
684
- logger.info("Change config field %s from %s to %s", key, repr(data.get(key, None)), repr(new_value))
693
+ logger.info(
694
+ "Change config field %s from %s to %s",
695
+ key,
696
+ repr(data.get(key, None)),
697
+ repr(new_value),
698
+ )
685
699
 
686
700
  if isinstance(new_value, str):
687
701
  # Strings with some values (no, yes) will be interpreted as booleans on
@@ -696,11 +710,12 @@ def update_config(filepath: str, key: str, new_value: Union[str, bool, int, floa
696
710
  with open(temp_path, "w") as f:
697
711
  yaml.dump(data, f)
698
712
 
713
+
699
714
  def parser_with_common_options(
700
715
  provisioner_options: bool = False,
701
716
  jobstore_option: bool = True,
702
- prog: Optional[str] = None,
703
- default_log_level: Optional[int] = None,
717
+ prog: str | None = None,
718
+ default_log_level: int | None = None,
704
719
  ) -> ArgParser:
705
720
  """
706
721
  Get a command-line option parser for a Toil subcommand.
@@ -866,7 +881,11 @@ def addOptions(
866
881
  # So we make them accumulate to the same list.
867
882
  # Note that we will get a None in the list when there's no positional inputs.
868
883
  parser.add_argument(
869
- "inputs_uri", type=str, nargs='?', action="append", help="WDL input JSON URI"
884
+ "inputs_uri",
885
+ type=str,
886
+ nargs="?",
887
+ action="append",
888
+ help="WDL input JSON URI",
870
889
  )
871
890
  parser.add_argument(
872
891
  "--input",
@@ -961,7 +980,12 @@ class Toil(ContextManager["Toil"]):
961
980
  _provisioner: Optional["AbstractProvisioner"]
962
981
  _start_time: float
963
982
 
964
- def __init__(self, options: Namespace, workflow_name: Optional[str] = None, trs_spec: Optional[str] = None) -> None:
983
+ def __init__(
984
+ self,
985
+ options: Namespace,
986
+ workflow_name: str | None = None,
987
+ trs_spec: str | None = None,
988
+ ) -> None:
965
989
  """
966
990
  Initialize a Toil object from the given options.
967
991
 
@@ -985,7 +1009,8 @@ class Toil(ContextManager["Toil"]):
985
1009
  if workflow_name is None:
986
1010
  # Try to use the entrypoint file.
987
1011
  import __main__
988
- if hasattr(__main__, '__file__'):
1012
+
1013
+ if hasattr(__main__, "__file__"):
989
1014
  workflow_name = __main__.__file__
990
1015
  if workflow_name is None:
991
1016
  # If there's no file, say this is an interactive usage of Toil.
@@ -1021,7 +1046,9 @@ class Toil(ContextManager["Toil"]):
1021
1046
  jobStore.initialize(config)
1022
1047
  assert config.workflowID is not None
1023
1048
  # Record that there is a workflow beign run
1024
- HistoryManager.record_workflow_creation(config.workflowID, self.canonical_locator(config.jobStore))
1049
+ HistoryManager.record_workflow_creation(
1050
+ config.workflowID, self.canonical_locator(config.jobStore)
1051
+ )
1025
1052
  else:
1026
1053
  jobStore.resume()
1027
1054
  # Merge configuration from job store with command line options
@@ -1041,9 +1068,9 @@ class Toil(ContextManager["Toil"]):
1041
1068
 
1042
1069
  def __exit__(
1043
1070
  self,
1044
- exc_type: Optional[type[BaseException]],
1045
- exc_val: Optional[BaseException],
1046
- exc_tb: Optional[TracebackType],
1071
+ exc_type: type[BaseException] | None,
1072
+ exc_val: BaseException | None,
1073
+ exc_tb: TracebackType | None,
1047
1074
  ) -> Literal[False]:
1048
1075
  """
1049
1076
  Clean up after a workflow invocation.
@@ -1055,9 +1082,13 @@ class Toil(ContextManager["Toil"]):
1055
1082
  # Record that this attempt to run the workflow succeeded or failed.
1056
1083
  # TODO: Get ahold of the timing from statsAndLogging instead of redoing it here!
1057
1084
  # To record the batch system, we need to avoid capturing typos/random text the user types instead of a real batch system.
1058
- batch_system_type="<Not Initialized>"
1085
+ batch_system_type = "<Not Initialized>"
1059
1086
  if hasattr(self, "_batchSystem"):
1060
- batch_system_type = type(self._batchSystem).__module__ + "." + type(self._batchSystem).__qualname__
1087
+ batch_system_type = (
1088
+ type(self._batchSystem).__module__
1089
+ + "."
1090
+ + type(self._batchSystem).__qualname__
1091
+ )
1061
1092
  HistoryManager.record_workflow_attempt(
1062
1093
  self.config.workflowID,
1063
1094
  self.config.workflowAttemptNumber,
@@ -1071,7 +1102,7 @@ class Toil(ContextManager["Toil"]):
1071
1102
  # This should always be major.minor.patch.
1072
1103
  python_version=platform.python_version(),
1073
1104
  platform_system=platform.system(),
1074
- platform_machine=platform.machine()
1105
+ platform_machine=platform.machine(),
1075
1106
  )
1076
1107
 
1077
1108
  if self.config.publish_workflow_metrics == "all":
@@ -1087,14 +1118,18 @@ class Toil(ContextManager["Toil"]):
1087
1118
  # history or something goes wrong.
1088
1119
  submission = create_history_submission()
1089
1120
 
1090
- elif self.config.publish_workflow_metrics == "current" and self.config.workflowID is not None:
1121
+ elif (
1122
+ self.config.publish_workflow_metrics == "current"
1123
+ and self.config.workflowID is not None
1124
+ ):
1091
1125
  # Publish metrics for this run only. Might be empty if we had no TRS ID.
1092
- create_current_submission(self.config.workflowID, self.config.workflowAttemptNumber).submit()
1126
+ create_current_submission(
1127
+ self.config.workflowID, self.config.workflowAttemptNumber
1128
+ ).submit()
1093
1129
 
1094
1130
  # Make sure the history doesn't stay too big
1095
1131
  HistoryManager.enforce_byte_size_limit()
1096
1132
 
1097
-
1098
1133
  if (
1099
1134
  exc_type is not None
1100
1135
  and self.config.clean == "onError"
@@ -1140,7 +1175,9 @@ class Toil(ContextManager["Toil"]):
1140
1175
  self._assertContextManagerUsed()
1141
1176
 
1142
1177
  assert self.config.workflowID is not None
1143
- HistoryManager.record_workflow_metadata(self.config.workflowID, self._workflow_name, self._trs_spec)
1178
+ HistoryManager.record_workflow_metadata(
1179
+ self.config.workflowID, self._workflow_name, self._trs_spec
1180
+ )
1144
1181
 
1145
1182
  from toil.job import Job
1146
1183
 
@@ -1251,21 +1288,22 @@ class Toil(ContextManager["Toil"]):
1251
1288
 
1252
1289
  :return: an instance of a concrete subclass of AbstractJobStore
1253
1290
  """
1254
- name, rest = cls.parseLocator(locator)
1255
- if name == "file":
1256
- from toil.jobStores.fileJobStore import FileJobStore
1291
+ match cls.parseLocator(locator):
1292
+ case ("file", rest):
1293
+ from toil.jobStores.fileJobStore import FileJobStore
1257
1294
 
1258
- return FileJobStore(rest)
1259
- elif name == "aws":
1260
- from toil.jobStores.aws.jobStore import AWSJobStore
1295
+ return FileJobStore(rest)
1296
+ case ("aws", rest):
1297
+ from toil.jobStores.aws.jobStore import AWSJobStore
1261
1298
 
1262
- return AWSJobStore(rest)
1263
- elif name == "google":
1264
- from toil.jobStores.googleJobStore import GoogleJobStore
1299
+ return AWSJobStore(rest)
1300
+ case ("google", rest):
1301
+ from toil.jobStores.googleJobStore import GoogleJobStore
1265
1302
 
1266
- return GoogleJobStore(rest)
1267
- else:
1268
- raise RuntimeError("Unknown job store implementation '%s'" % name)
1303
+ return GoogleJobStore(rest)
1304
+ raise RuntimeError(
1305
+ "Unknown job store implementation " "{cls.parseLocator(locator)[0]!r}"
1306
+ )
1269
1307
 
1270
1308
  @staticmethod
1271
1309
  def parseLocator(locator: str) -> tuple[str, str]:
@@ -1275,7 +1313,7 @@ class Toil(ContextManager["Toil"]):
1275
1313
 
1276
1314
  Does not validate the set of possible job store types.
1277
1315
 
1278
- :raises RuntimeError: if the locator is not in the approproate syntax.
1316
+ :raises RuntimeError: if the locator is not in the appropriate syntax.
1279
1317
  """
1280
1318
  if locator[0] in "/." or ":" not in locator:
1281
1319
  return "file", locator
@@ -1433,8 +1471,8 @@ class Toil(ContextManager["Toil"]):
1433
1471
 
1434
1472
  @deprecated(new_function_name="import_file")
1435
1473
  def importFile(
1436
- self, srcUrl: str, sharedFileName: Optional[str] = None, symlink: bool = True
1437
- ) -> Optional[FileID]:
1474
+ self, srcUrl: str, sharedFileName: str | None = None, symlink: bool = True
1475
+ ) -> FileID | None:
1438
1476
  return self.import_file(srcUrl, sharedFileName, symlink)
1439
1477
 
1440
1478
  @overload
@@ -1452,7 +1490,7 @@ class Toil(ContextManager["Toil"]):
1452
1490
  src_uri: str,
1453
1491
  shared_file_name: None = None,
1454
1492
  symlink: bool = True,
1455
- check_existence: Literal[True] = True
1493
+ check_existence: Literal[True] = True,
1456
1494
  ) -> FileID: ...
1457
1495
 
1458
1496
  @overload
@@ -1461,16 +1499,16 @@ class Toil(ContextManager["Toil"]):
1461
1499
  src_uri: str,
1462
1500
  shared_file_name: None = None,
1463
1501
  symlink: bool = True,
1464
- check_existence: bool = True
1465
- ) -> Optional[FileID]: ...
1502
+ check_existence: bool = True,
1503
+ ) -> FileID | None: ...
1466
1504
 
1467
1505
  def import_file(
1468
- self,
1469
- src_uri: str,
1470
- shared_file_name: Optional[str] = None,
1471
- symlink: bool = True,
1472
- check_existence: bool = True
1473
- ) -> Optional[FileID]:
1506
+ self,
1507
+ src_uri: str,
1508
+ shared_file_name: str | None = None,
1509
+ symlink: bool = True,
1510
+ check_existence: bool = True,
1511
+ ) -> FileID | None:
1474
1512
  """
1475
1513
  Import the file at the given URL into the job store.
1476
1514
 
@@ -1524,7 +1562,9 @@ class Toil(ContextManager["Toil"]):
1524
1562
  self._jobStore.export_file(file_id, dst_uri)
1525
1563
 
1526
1564
  @staticmethod
1527
- def normalize_uri(uri: str, check_existence: bool = False, dir_path: Optional[str] = None) -> str:
1565
+ def normalize_uri(
1566
+ uri: str, check_existence: bool = False, dir_path: str | None = None
1567
+ ) -> str:
1528
1568
  """
1529
1569
  Given a URI, if it has no scheme, make it a properly quoted file: URI.
1530
1570
 
@@ -1592,7 +1632,7 @@ class Toil(ContextManager["Toil"]):
1592
1632
  self._jobCache[job.jobStoreID] = job
1593
1633
 
1594
1634
  @staticmethod
1595
- def getToilWorkDir(configWorkDir: Optional[str] = None) -> str:
1635
+ def getToilWorkDir(configWorkDir: str | None = None) -> str:
1596
1636
  """
1597
1637
  Return a path to a writable directory under which per-workflow directories exist.
1598
1638
 
@@ -1617,7 +1657,7 @@ class Toil(ContextManager["Toil"]):
1617
1657
 
1618
1658
  @classmethod
1619
1659
  def get_toil_coordination_dir(
1620
- cls, config_work_dir: Optional[str], config_coordination_dir: Optional[str]
1660
+ cls, config_work_dir: str | None, config_coordination_dir: str | None
1621
1661
  ) -> str:
1622
1662
  """
1623
1663
  Return a path to a writable directory, which will be in memory if
@@ -1638,7 +1678,7 @@ class Toil(ContextManager["Toil"]):
1638
1678
  # Go get a coordination directory, using a lot of short-circuiting of
1639
1679
  # or and the fact that and returns its second argument when it
1640
1680
  # succeeds.
1641
- coordination_dir: Optional[str] = (
1681
+ coordination_dir: str | None = (
1642
1682
  # First try an override env var
1643
1683
  os.getenv("TOIL_COORDINATION_DIR_OVERRIDE")
1644
1684
  or
@@ -1696,7 +1736,7 @@ class Toil(ContextManager["Toil"]):
1696
1736
 
1697
1737
  @classmethod
1698
1738
  def getLocalWorkflowDir(
1699
- cls, workflowID: str, configWorkDir: Optional[str] = None
1739
+ cls, workflowID: str, configWorkDir: str | None = None
1700
1740
  ) -> str:
1701
1741
  """
1702
1742
  Return the directory where worker directories and the cache will be located for this workflow on this machine.
@@ -1729,8 +1769,8 @@ class Toil(ContextManager["Toil"]):
1729
1769
  def get_local_workflow_coordination_dir(
1730
1770
  cls,
1731
1771
  workflow_id: str,
1732
- config_work_dir: Optional[str],
1733
- config_coordination_dir: Optional[str],
1772
+ config_work_dir: str | None,
1773
+ config_coordination_dir: str | None,
1734
1774
  ) -> str:
1735
1775
  """
1736
1776
  Return the directory where coordination files should be located for
@@ -1859,7 +1899,7 @@ class ToilMetrics:
1859
1899
  pass
1860
1900
 
1861
1901
  try:
1862
- self.mtailProc: Optional[subprocess.Popen[bytes]] = subprocess.Popen(
1902
+ self.mtailProc: subprocess.Popen[bytes] | None = subprocess.Popen(
1863
1903
  [
1864
1904
  "docker",
1865
1905
  "run",
@@ -1883,7 +1923,7 @@ class ToilMetrics:
1883
1923
 
1884
1924
  # On single machine, launch a node exporter instance to monitor CPU/RAM usage.
1885
1925
  # On AWS this is handled by the EC2 init script
1886
- self.nodeExporterProc: Optional[subprocess.Popen[bytes]] = None
1926
+ self.nodeExporterProc: subprocess.Popen[bytes] | None = None
1887
1927
  if not provisioner:
1888
1928
  try:
1889
1929
  self.nodeExporterProc = subprocess.Popen(