toil 8.0.0__py3-none-any.whl → 8.1.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. toil/__init__.py +4 -4
  2. toil/batchSystems/options.py +1 -0
  3. toil/batchSystems/slurm.py +227 -83
  4. toil/common.py +161 -45
  5. toil/cwl/cwltoil.py +31 -10
  6. toil/job.py +47 -38
  7. toil/jobStores/aws/jobStore.py +46 -10
  8. toil/lib/aws/session.py +14 -3
  9. toil/lib/aws/utils.py +92 -35
  10. toil/lib/dockstore.py +379 -0
  11. toil/lib/ec2nodes.py +3 -2
  12. toil/lib/history.py +1271 -0
  13. toil/lib/history_submission.py +681 -0
  14. toil/lib/io.py +22 -1
  15. toil/lib/misc.py +18 -0
  16. toil/lib/retry.py +10 -10
  17. toil/lib/{integration.py → trs.py} +95 -46
  18. toil/lib/web.py +38 -0
  19. toil/options/common.py +17 -2
  20. toil/options/cwl.py +10 -0
  21. toil/provisioners/gceProvisioner.py +4 -4
  22. toil/server/cli/wes_cwl_runner.py +3 -3
  23. toil/server/utils.py +2 -3
  24. toil/statsAndLogging.py +35 -1
  25. toil/test/batchSystems/test_slurm.py +172 -2
  26. toil/test/cwl/conftest.py +39 -0
  27. toil/test/cwl/cwlTest.py +105 -2
  28. toil/test/cwl/optional-file.cwl +18 -0
  29. toil/test/lib/test_history.py +212 -0
  30. toil/test/lib/test_trs.py +161 -0
  31. toil/test/wdl/wdltoil_test.py +1 -1
  32. toil/version.py +10 -10
  33. toil/wdl/wdltoil.py +23 -9
  34. toil/worker.py +113 -33
  35. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/METADATA +9 -4
  36. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/RECORD +40 -34
  37. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/WHEEL +1 -1
  38. toil/test/lib/test_integration.py +0 -104
  39. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/LICENSE +0 -0
  40. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/entry_points.txt +0 -0
  41. {toil-8.0.0.dist-info → toil-8.1.0b1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,212 @@
1
+ # Copyright (C) 2015-2025 Regents of the University of California
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import os
16
+ import logging
17
+ import pytest
18
+ import time
19
+ from toil.test import ToilTest
20
+
21
+ from toil.lib.history import HistoryManager
22
+
23
+ logger = logging.getLogger(__name__)
24
+ logging.basicConfig(level=logging.DEBUG)
25
+
26
+ class HistoryTest(ToilTest):
27
+ """
28
+ Tests for Toil history tracking.
29
+
30
+ Each test gets its own history database.
31
+ """
32
+
33
+ def setUp(self) -> None:
34
+ super().setUp()
35
+
36
+ # Apply a temp dir override to history tracking
37
+ temp_dir = self._createTempDir()
38
+ HistoryManager.database_path_override = os.path.join(temp_dir, "test-db.sqlite")
39
+
40
+ # Flag on job history tracking
41
+ self.original_flag = HistoryManager.JOB_HISTORY_ENABLED
42
+ HistoryManager.JOB_HISTORY_ENABLED = True
43
+
44
+
45
+ def tearDown(self) -> None:
46
+ # Remove the temp dir override from history tracking
47
+ HistoryManager.database_path_override = None
48
+
49
+ # Restore job history tracking flag
50
+ HistoryManager.JOB_HISTORY_ENABLED = self.original_flag
51
+
52
+ super().tearDown()
53
+
54
+ def make_fake_workflow(self, workflow_id: str) -> None:
55
+ # Make a fake workflow
56
+ workflow_jobstore_spec = "file:/tmp/tree"
57
+ HistoryManager.record_workflow_creation(workflow_id, workflow_jobstore_spec)
58
+ workflow_name = "SuperCoolWF"
59
+ workflow_trs_spec = "#wf:v1"
60
+ HistoryManager.record_workflow_metadata(workflow_id, workflow_name, workflow_trs_spec)
61
+
62
+ # Give it a job
63
+ workflow_attempt_number = 1
64
+ job_name = "DoThing"
65
+ succeeded = True
66
+ start_time = time.time()
67
+ runtime = 0.1
68
+ HistoryManager.record_job_attempt(
69
+ workflow_id,
70
+ workflow_attempt_number,
71
+ job_name,
72
+ succeeded,
73
+ start_time,
74
+ runtime,
75
+ )
76
+
77
+ # Give it a workflow attempt with the same details.
78
+ HistoryManager.record_workflow_attempt(
79
+ workflow_id,
80
+ workflow_attempt_number,
81
+ succeeded,
82
+ start_time,
83
+ runtime,
84
+ )
85
+
86
+ def test_history_submittable_detection(self) -> None:
87
+ """
88
+ Make sure that a submittable workflow shows up as such before
89
+ submission and doesn't afterward.
90
+ """
91
+ workflow_id = "123"
92
+ self.make_fake_workflow(workflow_id)
93
+ workflow_attempt_number = 1
94
+
95
+ # Make sure we have data
96
+ self.assertEqual(HistoryManager.count_workflows(), 1)
97
+ self.assertEqual(HistoryManager.count_workflow_attempts(), 1)
98
+ self.assertEqual(HistoryManager.count_job_attempts(), 1)
99
+
100
+ # Make sure we see it as submittable
101
+ submittable_workflow_attempts = HistoryManager.get_submittable_workflow_attempts()
102
+ self.assertEqual(len(submittable_workflow_attempts), 1)
103
+
104
+ # Make sure we see its jobs as submittable
105
+ with_submittable_job_attempts = HistoryManager.get_workflow_attempts_with_submittable_job_attempts()
106
+ self.assertEqual(len(with_submittable_job_attempts), 1)
107
+
108
+ # Make sure we actually see the job
109
+ submittable_job_attempts = HistoryManager.get_unsubmitted_job_attempts(workflow_id, workflow_attempt_number)
110
+ self.assertEqual(len(submittable_job_attempts), 1)
111
+
112
+ # Pretend we submitted them.
113
+ HistoryManager.mark_job_attempts_submitted([j.id for j in submittable_job_attempts])
114
+ HistoryManager.mark_workflow_attempt_submitted(workflow_id, workflow_attempt_number)
115
+
116
+ # Make sure they are no longer matching
117
+ self.assertEqual(len(HistoryManager.get_submittable_workflow_attempts()), 0)
118
+ self.assertEqual(len(HistoryManager.get_workflow_attempts_with_submittable_job_attempts()), 0)
119
+ self.assertEqual(len(HistoryManager.get_unsubmitted_job_attempts(workflow_id, workflow_attempt_number)), 0)
120
+
121
+ # Make sure we still have data
122
+ self.assertEqual(HistoryManager.count_workflows(), 1)
123
+ self.assertEqual(HistoryManager.count_workflow_attempts(), 1)
124
+ self.assertEqual(HistoryManager.count_job_attempts(), 1)
125
+
126
+ def test_history_deletion(self) -> None:
127
+ workflow_id = "123"
128
+ self.make_fake_workflow(workflow_id)
129
+ workflow_attempt_number = 1
130
+
131
+ # Make sure we can see the workflow for deletion by age but not by done-ness
132
+ self.assertEqual(len(HistoryManager.get_oldest_workflow_ids()), 1)
133
+ self.assertEqual(len(HistoryManager.get_fully_submitted_workflow_ids()), 0)
134
+
135
+ # Pretend we submitted the workflow.
136
+ HistoryManager.mark_job_attempts_submitted([j.id for j in HistoryManager.get_unsubmitted_job_attempts(workflow_id, workflow_attempt_number)])
137
+ HistoryManager.mark_workflow_attempt_submitted(workflow_id, workflow_attempt_number)
138
+
139
+ # Make sure we can see the workflow for deletion by done-ness
140
+ self.assertEqual(len(HistoryManager.get_fully_submitted_workflow_ids()), 1)
141
+
142
+ # Add a new workflow
143
+ other_workflow_id = "456"
144
+ self.make_fake_workflow(other_workflow_id)
145
+
146
+ # Make sure we can see the both for deletion by age but only one by done-ness
147
+ self.assertEqual(len(HistoryManager.get_oldest_workflow_ids()), 2)
148
+ self.assertEqual(len(HistoryManager.get_fully_submitted_workflow_ids()), 1)
149
+
150
+ # Make sure the older workflow is first.
151
+ self.assertEqual(HistoryManager.get_oldest_workflow_ids(), [workflow_id, other_workflow_id])
152
+
153
+ # Delete the new workflow
154
+ HistoryManager.delete_workflow(other_workflow_id)
155
+
156
+ # Make sure we can see the old one
157
+ self.assertEqual(HistoryManager.get_oldest_workflow_ids(), [workflow_id])
158
+ self.assertEqual(HistoryManager.get_fully_submitted_workflow_ids(), [workflow_id])
159
+
160
+ # Delete the old workflow
161
+ HistoryManager.delete_workflow(workflow_id)
162
+
163
+ # Make sure we have no data
164
+ self.assertEqual(HistoryManager.count_workflows(), 0)
165
+ self.assertEqual(HistoryManager.count_workflow_attempts(), 0)
166
+ self.assertEqual(HistoryManager.count_job_attempts(), 0)
167
+
168
+
169
+ def test_history_size_limit(self) -> None:
170
+ """
171
+ Make sure the database size can be controlled.
172
+ """
173
+
174
+ for workflow_id in ("WorkflowThatTakesUpSomeSpace,ActuallyMoreThanTheLaterOnesTake" + str(i) for i in range(10)):
175
+ self.make_fake_workflow(workflow_id)
176
+
177
+ # We should see the workflows.
178
+ self.assertEqual(HistoryManager.count_workflows(), 10)
179
+ # And they take up space.
180
+ small_size = HistoryManager.get_database_byte_size()
181
+ self.assertGreater(small_size, 0)
182
+
183
+ # Add a bunch more
184
+ for workflow_id in ("WorkflowThatTakesUpSpace" + str(i) for i in range(50)):
185
+ self.make_fake_workflow(workflow_id)
186
+
187
+ # We should see that this is now a much larger database
188
+ large_size = HistoryManager.get_database_byte_size()
189
+ logger.info("Increased database size from %s to %s", small_size, large_size)
190
+ self.assertGreater(large_size, small_size)
191
+
192
+ # We should be able to shrink it back down
193
+ HistoryManager.enforce_byte_size_limit(small_size)
194
+
195
+ reduced_size = HistoryManager.get_database_byte_size()
196
+ logger.info("Decreased database size from %s to %s", large_size, reduced_size)
197
+ # The database should be small enough
198
+ self.assertLessEqual(reduced_size, small_size)
199
+ # There should still be some workflow attempts left in the smaller database (though probably not the first ones)
200
+ remaining_workflows = HistoryManager.count_workflows()
201
+ logger.info("Still have %s workflows", remaining_workflows)
202
+ self.assertGreater(remaining_workflows, 0)
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
@@ -0,0 +1,161 @@
1
+ # Copyright (C) 2015-2024 Regents of the University of California
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import io
16
+ import logging
17
+ import pytest
18
+ from typing import IO
19
+ import urllib.request
20
+ from urllib.error import URLError
21
+
22
+ from toil.lib.retry import retry
23
+ from toil.lib.trs import find_workflow, fetch_workflow
24
+ from toil.test import ToilTest, needs_online
25
+
26
+ logger = logging.getLogger(__name__)
27
+ logging.basicConfig(level=logging.DEBUG)
28
+
29
+ @pytest.mark.integrative
30
+ @needs_online
31
+ class DockstoreLookupTest(ToilTest):
32
+ """
33
+ Make sure we can look up workflows on Dockstore.
34
+ """
35
+
36
+ @retry(errors=[URLError, RuntimeError])
37
+ def read_result(self, url_or_path: str) -> IO[bytes]:
38
+ """
39
+ Read a file or URL.
40
+
41
+ Binary mode to allow testing for binary file support.
42
+
43
+ This lets us test that we have the right workflow contents and not care
44
+ how we are being shown them.
45
+ """
46
+ if url_or_path.startswith("http://") or url_or_path.startswith("https://"):
47
+ response = urllib.request.urlopen(url_or_path)
48
+ if response.status != 200:
49
+ raise RuntimeError(f"HTTP error response: {response}")
50
+ return response
51
+ else:
52
+ return open(url_or_path, "rb")
53
+
54
+ # TODO: Tests that definitely test a clear cache
55
+
56
+ def test_lookup_from_page_url(self) -> None:
57
+ PAGE_URL = "https://dockstore.org/workflows/github.com/dockstore/bcc2020-training/HelloWorld:master?tab=info"
58
+ trs_id, trs_version, language = find_workflow(PAGE_URL)
59
+
60
+ self.assertEqual(trs_id, "#workflow/github.com/dockstore/bcc2020-training/HelloWorld")
61
+ self.assertEqual(trs_version, "master")
62
+ self.assertEqual(language, "WDL")
63
+
64
+ def test_lookup_from_trs_with_version(self) -> None:
65
+ TRS_ID = "#workflow/github.com/dockstore-testing/md5sum-checker"
66
+ TRS_VERSION = "master"
67
+ trs_id, trs_version, language = find_workflow(f"{TRS_ID}:{TRS_VERSION}")
68
+
69
+ self.assertEqual(trs_id, TRS_ID)
70
+ self.assertEqual(trs_version, TRS_VERSION)
71
+ self.assertEqual(language, "CWL")
72
+
73
+ def test_lookup_from_trs_no_version(self) -> None:
74
+ TRS_ID = "#workflow/github.com/dockstore-testing/md5sum-checker"
75
+ with pytest.raises(ValueError):
76
+ # We don't yet have a way to read Dockstore's default version info,
77
+ # so it's not safe to apply any default version when multiple
78
+ # versions exist.
79
+ trs_id, trs_version, language = find_workflow(TRS_ID)
80
+
81
+ # TODO: Add a test with a workflow that we know has and will only ever
82
+ # have one version, to test version auto-detection in that case.
83
+
84
+ def test_get(self) -> None:
85
+ TRS_ID = "#workflow/github.com/dockstore-testing/md5sum-checker"
86
+ TRS_VERSION = "master"
87
+ LANGUAGE = "CWL"
88
+ # Despite "-checker" in the ID, this actually refers to the base md5sum
89
+ # workflow that just happens to have a checker *available*, not to the
90
+ # checker workflow itself.
91
+ WORKFLOW_URL = "https://raw.githubusercontent.com/dockstore-testing/md5sum-checker/master/md5sum/md5sum-workflow.cwl"
92
+ looked_up = fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
93
+
94
+ data_from_lookup = self.read_result(looked_up).read()
95
+ data_from_source = self.read_result(WORKFLOW_URL).read()
96
+ self.assertEqual(data_from_lookup, data_from_source)
97
+
98
+ def test_get_from_trs_cached(self) -> None:
99
+ TRS_ID = "#workflow/github.com/dockstore-testing/md5sum-checker"
100
+ TRS_VERSION = "master"
101
+ LANGUAGE = "CWL"
102
+ WORKFLOW_URL = "https://raw.githubusercontent.com/dockstore-testing/md5sum-checker/master/md5sum/md5sum-workflow.cwl"
103
+ # This lookup may or may not be cached
104
+ fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
105
+ # This lookup is definitely cached
106
+ looked_up = fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
107
+
108
+ data_from_lookup = self.read_result(looked_up).read()
109
+ data_from_source = self.read_result(WORKFLOW_URL).read()
110
+ self.assertEqual(data_from_lookup, data_from_source)
111
+
112
+ def test_lookup_from_trs_with_version(self) -> None:
113
+ TRS_VERSIONED_ID = "#workflow/github.com/dockstore-testing/md5sum-checker:workflowWithHTTPImport"
114
+ trs_id, trs_version, language = find_workflow(TRS_VERSIONED_ID)
115
+
116
+ parts = TRS_VERSIONED_ID.split(":")
117
+
118
+ self.assertEqual(trs_id, parts[0])
119
+ self.assertEqual(trs_version, parts[1])
120
+ self.assertEqual(language, "CWL")
121
+
122
+ def test_lookup_from_trs_nonexistent_workflow(self) -> None:
123
+ TRS_VERSIONED_ID = "#workflow/github.com/adamnovak/veryfakerepo:notARealVersion"
124
+ with self.assertRaises(FileNotFoundError):
125
+ looked_up = find_workflow(TRS_VERSIONED_ID)
126
+
127
+ def test_lookup_from_trs_nonexistent_workflow_bad_format(self) -> None:
128
+ TRS_VERSIONED_ID = "#workflow/AbsoluteGarbage:notARealVersion"
129
+ with self.assertRaises(FileNotFoundError):
130
+ looked_up = find_workflow(TRS_VERSIONED_ID)
131
+
132
+ def test_lookup_from_trs_nonexistent_version(self) -> None:
133
+ TRS_VERSIONED_ID = "#workflow/github.com/dockstore-testing/md5sum-checker:notARealVersion"
134
+ with self.assertRaises(FileNotFoundError):
135
+ looked_up = find_workflow(TRS_VERSIONED_ID)
136
+
137
+ def test_get_nonexistent_workflow(self) -> None:
138
+ TRS_ID = "#workflow/github.com/adamnovak/veryfakerepo"
139
+ TRS_VERSION = "notARealVersion"
140
+ LANGUAGE = "CWL"
141
+ with self.assertRaises(FileNotFoundError):
142
+ looked_up = fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
143
+
144
+ def test_get_nonexistent_version(self) -> None:
145
+ TRS_ID = "#workflow/github.com/dockstore-testing/md5sum-checker"
146
+ TRS_VERSION = "notARealVersion"
147
+ LANGUAGE = "CWL"
148
+ with self.assertRaises(FileNotFoundError):
149
+ looked_up = fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
150
+
151
+ def test_get_nonexistent_workflow_bad_format(self) -> None:
152
+ # Dockstore enforces an ID pattern and blames your request if you ask
153
+ # about something that doesn't follow it. So don't follow it.
154
+ TRS_ID = "#workflow/AbsoluteGarbage"
155
+ TRS_VERSION = "notARealVersion"
156
+ LANGUAGE = "CWL"
157
+ with self.assertRaises(FileNotFoundError):
158
+ looked_up = fetch_workflow(TRS_ID, TRS_VERSION, LANGUAGE)
159
+
160
+
161
+
@@ -933,7 +933,7 @@ class WDLToilBenchTests(ToilTest):
933
933
  )
934
934
  self.assertEqual(same_id, first_chosen)
935
935
 
936
- # If we use a different ID we shoudl get a different result still obeying the constraints
936
+ # If we use a different ID we should get a different result still obeying the constraints
937
937
  diff_id = choose_human_readable_directory(
938
938
  "root", "taskname", "222-333-444", state
939
939
  )
toil/version.py CHANGED
@@ -1,14 +1,14 @@
1
- baseVersion = '8.0.0'
1
+ baseVersion = '8.1.0b1'
2
2
  cgcloudVersion = '1.6.0a1.dev393'
3
- version = '8.0.0-d2ae0ea9ab49f238670dbf6aafd20de7afdd8514'
4
- cacheTag = 'cache-local-py3.13'
5
- mainCacheTag = 'cache-master-py3.13'
6
- distVersion = '8.0.0'
7
- exactPython = 'python3.13'
8
- python = 'python3.13'
9
- dockerTag = '8.0.0-d2ae0ea9ab49f238670dbf6aafd20de7afdd8514-py3.13'
10
- currentCommit = 'd2ae0ea9ab49f238670dbf6aafd20de7afdd8514'
11
- dockerRegistry = 'quay.io/stxue'
3
+ version = '8.1.0b1-4bb05349c027096ab4785259e39b2648118b5dd7'
4
+ cacheTag = 'cache-local-py3.9'
5
+ mainCacheTag = 'cache-master-py3.9'
6
+ distVersion = '8.1.0b1'
7
+ exactPython = 'python3.9'
8
+ python = 'python3.9'
9
+ dockerTag = '8.1.0b1-4bb05349c027096ab4785259e39b2648118b5dd7-py3.9'
10
+ currentCommit = '4bb05349c027096ab4785259e39b2648118b5dd7'
11
+ dockerRegistry = 'quay.io/ucsc_cgl'
12
12
  dockerName = 'toil'
13
13
  dirty = False
14
14
  cwltool_version = '3.1.20250110105449'
toil/wdl/wdltoil.py CHANGED
@@ -103,8 +103,8 @@ from toil.jobStores.abstractJobStore import (
103
103
  from toil.lib.exceptions import UnimplementedURLException
104
104
  from toil.lib.accelerators import get_individual_local_accelerators
105
105
  from toil.lib.conversions import VALID_PREFIXES, convert_units, human2bytes
106
+ from toil.lib.trs import resolve_workflow
106
107
  from toil.lib.io import mkdtemp, is_any_url, is_file_url, TOIL_URI_SCHEME, is_standard_url, is_toil_url, is_remote_url
107
- from toil.lib.integration import resolve_workflow
108
108
  from toil.lib.memoize import memoize
109
109
  from toil.lib.misc import get_user_name
110
110
  from toil.lib.resources import ResourceMonitor
@@ -515,10 +515,14 @@ async def toil_read_source(
515
515
  # TODO: this is probably sync work that would be better as async work here
516
516
  AbstractJobStore.read_from_url(candidate_uri, destination_buffer)
517
517
  except Exception as e:
518
- # TODO: we need to assume any error is just a not-found,
519
- # because the exceptions thrown by read_from_url()
518
+ if isinstance(e, SyntaxError) or isinstance(e, NameError):
519
+ # These are probably actual problems with the code and not
520
+ # failures in reading the URL.
521
+ raise
522
+ # TODO: we need to assume in general that an error is just a
523
+ # not-found, because the exceptions thrown by read_from_url()
520
524
  # implementations are not specified.
521
- logger.debug("Tried to fetch %s from %s but got %s", uri, candidate_uri, e)
525
+ logger.debug("Tried to fetch %s from %s but got %s: %s", uri, candidate_uri, type(e), e)
522
526
  continue
523
527
  # If we get here, we got it probably.
524
528
  try:
@@ -5438,17 +5442,25 @@ def main() -> None:
5438
5442
  )
5439
5443
 
5440
5444
  try:
5441
- with Toil(options) as toil:
5445
+ wdl_uri, trs_spec = resolve_workflow(options.wdl_uri, supported_languages={"WDL"})
5446
+
5447
+ with Toil(options, workflow_name=trs_spec or wdl_uri, trs_spec=trs_spec) as toil:
5442
5448
  if options.restart:
5443
5449
  output_bindings = toil.restart()
5444
5450
  else:
5445
5451
  # TODO: Move all the input parsing outside the Toil context
5446
5452
  # manager to avoid leaving a job store behind if the workflow
5447
5453
  # can't start.
5448
-
5449
- # Load the WDL document
5454
+
5455
+ # MiniWDL load code internally uses asyncio.get_event_loop()
5456
+ # which might not get an event loop if somebody has ever called
5457
+ # set_event_loop. So we need to make sure an event loop is
5458
+ # available.
5459
+ asyncio.set_event_loop(asyncio.new_event_loop())
5460
+
5461
+ # Load the WDL document.
5450
5462
  document: WDL.Tree.Document = WDL.load(
5451
- resolve_workflow(options.wdl_uri, supported_languages={"WDL"}),
5463
+ wdl_uri,
5452
5464
  read_source=toil_read_source,
5453
5465
  )
5454
5466
 
@@ -5564,12 +5576,14 @@ def main() -> None:
5564
5576
  inputs_search_path.append(input_source_uri)
5565
5577
 
5566
5578
  match = re.match(
5567
- r"https://raw\.githubusercontent\.com/[^/]*/[^/]*/[^/]*/",
5579
+ r"https://raw\.githubusercontent\.com/[^/]*/[^/]*/(refs/heads/)?[^/]*/",
5568
5580
  input_source_uri,
5569
5581
  )
5570
5582
  if match:
5571
5583
  # Special magic for Github repos to make e.g.
5572
5584
  # https://raw.githubusercontent.com/vgteam/vg_wdl/44a03d9664db3f6d041a2f4a69bbc4f65c79533f/params/giraffe.json
5585
+ # or
5586
+ # https://raw.githubusercontent.com/vgteam/vg_wdl/refs/heads/giraffedv/params/giraffe.json
5573
5587
  # work when it references things relative to repo root.
5574
5588
  logger.info(
5575
5589
  "Inputs appear to come from a Github repository; adding repository root to file search path"