virtool-workflow 6.0.0a8__py3-none-any.whl → 6.0.0a9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- virtool_workflow/analysis/fastqc.py +15 -18
- virtool_workflow/analysis/skewer.py +14 -21
- virtool_workflow/analysis/trimming.py +3 -6
- virtool_workflow/api/acquire.py +4 -6
- virtool_workflow/api/client.py +11 -15
- virtool_workflow/api/utils.py +9 -12
- virtool_workflow/data/analyses.py +4 -9
- virtool_workflow/data/hmms.py +5 -10
- virtool_workflow/data/indexes.py +12 -21
- virtool_workflow/data/jobs.py +2 -2
- virtool_workflow/data/ml.py +16 -8
- virtool_workflow/data/samples.py +8 -12
- virtool_workflow/data/subtractions.py +11 -17
- virtool_workflow/data/uploads.py +2 -3
- virtool_workflow/decorators.py +2 -5
- virtool_workflow/errors.py +1 -9
- virtool_workflow/hooks.py +2 -4
- virtool_workflow/pytest_plugin/data.py +3 -3
- virtool_workflow/pytest_plugin/subprocess.py +1 -1
- virtool_workflow/runtime/discover.py +5 -9
- virtool_workflow/runtime/hook.py +4 -7
- virtool_workflow/runtime/path.py +1 -1
- virtool_workflow/runtime/ping.py +1 -2
- virtool_workflow/runtime/run.py +13 -16
- virtool_workflow/runtime/run_subprocess.py +7 -10
- virtool_workflow/runtime/sentry.py +2 -3
- virtool_workflow/utils.py +12 -1
- virtool_workflow/workflow.py +6 -10
- {virtool_workflow-6.0.0a8.dist-info → virtool_workflow-6.0.0a9.dist-info}/METADATA +1 -1
- virtool_workflow-6.0.0a9.dist-info/RECORD +46 -0
- virtool_workflow-6.0.0a8.dist-info/RECORD +0 -46
- {virtool_workflow-6.0.0a8.dist-info → virtool_workflow-6.0.0a9.dist-info}/LICENSE +0 -0
- {virtool_workflow-6.0.0a8.dist-info → virtool_workflow-6.0.0a9.dist-info}/WHEEL +0 -0
- {virtool_workflow-6.0.0a8.dist-info → virtool_workflow-6.0.0a9.dist-info}/entry_points.txt +0 -0
virtool_workflow/data/ml.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
"""A fixture and dataclass for working with machine learning models in workflows."""
|
2
|
+
import asyncio
|
3
|
+
import shutil
|
2
4
|
from dataclasses import dataclass
|
3
5
|
from pathlib import Path
|
4
6
|
|
@@ -8,15 +10,14 @@ from virtool_core.models.ml import MLModelRelease
|
|
8
10
|
|
9
11
|
from virtool_workflow.api.client import APIClient
|
10
12
|
from virtool_workflow.data.analyses import WFAnalysis
|
11
|
-
from virtool_workflow.utils import make_directory
|
13
|
+
from virtool_workflow.utils import make_directory, move_all_model_files, untar
|
12
14
|
|
13
15
|
logger = get_logger("api")
|
14
16
|
|
15
17
|
|
16
18
|
@dataclass
|
17
19
|
class WFMLModelRelease:
|
18
|
-
"""
|
19
|
-
A machine learning model.
|
20
|
+
"""A machine learning model.
|
20
21
|
|
21
22
|
This class represents a machine learning model and the selected release of that
|
22
23
|
model in the workflow.
|
@@ -39,7 +40,9 @@ class WFMLModelRelease:
|
|
39
40
|
|
40
41
|
@fixture
|
41
42
|
async def ml(
|
42
|
-
_api: APIClient,
|
43
|
+
_api: APIClient,
|
44
|
+
analysis: WFAnalysis,
|
45
|
+
work_path: Path,
|
43
46
|
) -> WFMLModelRelease | None:
|
44
47
|
if analysis.ml is None:
|
45
48
|
return None
|
@@ -50,11 +53,11 @@ async def ml(
|
|
50
53
|
log = logger.bind(model_id=analysis.ml.id, model_release_id=model_release_id)
|
51
54
|
|
52
55
|
model_release_json = await _api.get_json(
|
53
|
-
f"/ml/{model_id}/releases/{model_release_id}"
|
56
|
+
f"/ml/{model_id}/releases/{model_release_id}",
|
54
57
|
)
|
55
58
|
model_release = MLModelRelease(**model_release_json)
|
56
59
|
|
57
|
-
log.info("
|
60
|
+
log.info("fetched ml model release json")
|
58
61
|
|
59
62
|
release = WFMLModelRelease(
|
60
63
|
id=model_release.id,
|
@@ -65,9 +68,14 @@ async def ml(
|
|
65
68
|
await make_directory(release.path)
|
66
69
|
|
67
70
|
await _api.get_file(
|
68
|
-
f"/ml/{model_id}/releases/{model_release_id}/model.tar.gz",
|
71
|
+
f"/ml/{model_id}/releases/{model_release_id}/model.tar.gz",
|
72
|
+
release.file_path,
|
69
73
|
)
|
70
74
|
|
71
|
-
|
75
|
+
await asyncio.to_thread(untar, release.file_path, release.path)
|
76
|
+
await asyncio.to_thread(move_all_model_files, release.path / "model", release.path)
|
77
|
+
await asyncio.to_thread(shutil.rmtree, release.path / "model")
|
78
|
+
|
79
|
+
log.info("downloaded ml model release file")
|
72
80
|
|
73
81
|
return release
|
virtool_workflow/data/samples.py
CHANGED
@@ -7,13 +7,13 @@ from pyfixtures import fixture
|
|
7
7
|
from structlog import get_logger
|
8
8
|
from virtool_core.models.enums import LibraryType
|
9
9
|
from virtool_core.models.job import Job
|
10
|
-
from virtool_core.models.samples import
|
10
|
+
from virtool_core.models.samples import Quality, Sample
|
11
11
|
|
12
12
|
from virtool_workflow.analysis.utils import ReadPaths
|
13
13
|
from virtool_workflow.api.client import APIClient
|
14
|
-
from virtool_workflow.files import VirtoolFileFormat
|
15
14
|
from virtool_workflow.data.uploads import WFUploads
|
16
15
|
from virtool_workflow.errors import JobsAPINotFound
|
16
|
+
from virtool_workflow.files import VirtoolFileFormat
|
17
17
|
|
18
18
|
logger = get_logger("api")
|
19
19
|
|
@@ -42,8 +42,7 @@ class WFSample:
|
|
42
42
|
|
43
43
|
@property
|
44
44
|
def min_length(self) -> int | None:
|
45
|
-
"""
|
46
|
-
The minimum observed read length in the sample sequencing data.
|
45
|
+
"""The minimum observed read length in the sample sequencing data.
|
47
46
|
|
48
47
|
Returns ``None`` if the sample is still being created and no quality data is available.
|
49
48
|
|
@@ -52,8 +51,7 @@ class WFSample:
|
|
52
51
|
|
53
52
|
@property
|
54
53
|
def max_length(self) -> int | None:
|
55
|
-
"""
|
56
|
-
The maximum observed read length in the sample sequencing data.
|
54
|
+
"""The maximum observed read length in the sample sequencing data.
|
57
55
|
|
58
56
|
Returns ``None`` if the sample is still being created and no quality data is available.
|
59
57
|
|
@@ -85,10 +83,9 @@ class WFNewSample:
|
|
85
83
|
|
86
84
|
@fixture
|
87
85
|
async def sample(
|
88
|
-
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path
|
86
|
+
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path,
|
89
87
|
) -> WFSample:
|
90
88
|
"""The sample associated with the current job."""
|
91
|
-
|
92
89
|
id_ = job.args["sample_id"]
|
93
90
|
|
94
91
|
base_url_path = f"/samples/{id_}"
|
@@ -104,7 +101,7 @@ async def sample(
|
|
104
101
|
await asyncio.to_thread(reads_path.mkdir, exist_ok=True, parents=True)
|
105
102
|
|
106
103
|
await _api.get_file(
|
107
|
-
f"{base_url_path}/reads/reads_1.fq.gz", reads_path / "reads_1.fq.gz"
|
104
|
+
f"{base_url_path}/reads/reads_1.fq.gz", reads_path / "reads_1.fq.gz",
|
108
105
|
)
|
109
106
|
|
110
107
|
if sample.paired:
|
@@ -113,7 +110,7 @@ async def sample(
|
|
113
110
|
reads_path / "reads_2.fq.gz",
|
114
111
|
)
|
115
112
|
await _api.get_file(
|
116
|
-
f"{base_url_path}/reads/reads_2.fq.gz", reads_path / "reads_2.fq.gz"
|
113
|
+
f"{base_url_path}/reads/reads_2.fq.gz", reads_path / "reads_2.fq.gz",
|
117
114
|
)
|
118
115
|
else:
|
119
116
|
read_paths = (reads_path / "reads_1.fq.gz",)
|
@@ -130,10 +127,9 @@ async def sample(
|
|
130
127
|
|
131
128
|
@fixture
|
132
129
|
async def new_sample(
|
133
|
-
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path
|
130
|
+
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path,
|
134
131
|
) -> WFNewSample:
|
135
132
|
"""The sample associated with the current job."""
|
136
|
-
|
137
133
|
id_ = job.args["sample_id"]
|
138
134
|
|
139
135
|
log = logger.bind(resource="sample", id=id_)
|
@@ -7,8 +7,8 @@ from pyfixtures import fixture
|
|
7
7
|
from structlog import get_logger
|
8
8
|
from virtool_core.models.job import Job
|
9
9
|
from virtool_core.models.subtraction import (
|
10
|
-
Subtraction,
|
11
10
|
NucleotideComposition,
|
11
|
+
Subtraction,
|
12
12
|
SubtractionFile,
|
13
13
|
)
|
14
14
|
|
@@ -22,8 +22,7 @@ logger = get_logger("api")
|
|
22
22
|
|
23
23
|
@dataclass
|
24
24
|
class WFSubtraction:
|
25
|
-
"""
|
26
|
-
A Virtool subtraction that has been loaded into the workflow environment.
|
25
|
+
"""A Virtool subtraction that has been loaded into the workflow environment.
|
27
26
|
|
28
27
|
The subtraction files are downloaded to the workflow's local work path so they can
|
29
28
|
be used for analysis.
|
@@ -47,7 +46,7 @@ class WFSubtraction:
|
|
47
46
|
path: Path
|
48
47
|
"""
|
49
48
|
The path to the subtraction directory.
|
50
|
-
|
49
|
+
|
51
50
|
The subtraction directory contains the FASTA and Bowtie2 files for the subtraction.
|
52
51
|
"""
|
53
52
|
|
@@ -58,8 +57,7 @@ class WFSubtraction:
|
|
58
57
|
|
59
58
|
@property
|
60
59
|
def bowtie2_index_path(self) -> Path:
|
61
|
-
"""
|
62
|
-
The path to Bowtie2 prefix in the running workflow's work_path
|
60
|
+
"""The path to Bowtie2 prefix in the running workflow's work_path
|
63
61
|
|
64
62
|
For example, ``/work/subtractions/<id>/subtraction`` refers to the Bowtie2
|
65
63
|
index that comprises the files:
|
@@ -85,7 +83,7 @@ class WFNewSubtraction:
|
|
85
83
|
A callable that deletes the subtraction from Virtool.
|
86
84
|
|
87
85
|
This should be called if the subtraction creation fails before the subtraction is
|
88
|
-
finalized.
|
86
|
+
finalized.
|
89
87
|
"""
|
90
88
|
|
91
89
|
finalize: Callable[[dict[str, int | float], int], Coroutine[None, None, None]]
|
@@ -120,10 +118,9 @@ class WFNewSubtraction:
|
|
120
118
|
|
121
119
|
@fixture
|
122
120
|
async def subtractions(
|
123
|
-
_api: APIClient, analysis: WFAnalysis, work_path: Path
|
121
|
+
_api: APIClient, analysis: WFAnalysis, work_path: Path,
|
124
122
|
) -> list[WFSubtraction]:
|
125
123
|
"""The subtractions to be used for the current analysis job."""
|
126
|
-
|
127
124
|
subtraction_work_path = work_path / "subtractions"
|
128
125
|
await asyncio.to_thread(subtraction_work_path.mkdir)
|
129
126
|
|
@@ -161,10 +158,9 @@ async def subtractions(
|
|
161
158
|
|
162
159
|
@fixture
|
163
160
|
async def new_subtraction(
|
164
|
-
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path
|
161
|
+
_api: APIClient, job: Job, uploads: WFUploads, work_path: Path,
|
165
162
|
) -> WFNewSubtraction:
|
166
|
-
"""
|
167
|
-
A new subtraction that will be created during the current job.
|
163
|
+
"""A new subtraction that will be created during the current job.
|
168
164
|
|
169
165
|
Currently only used for the `create-subtraction` workflow.
|
170
166
|
"""
|
@@ -193,8 +189,7 @@ async def new_subtraction(
|
|
193
189
|
await _api.delete(f"/subtractions/{subtraction_.id}")
|
194
190
|
|
195
191
|
async def finalize(gc: dict[str, int | float], count: int):
|
196
|
-
"""
|
197
|
-
Finalize the subtraction by setting the gc.
|
192
|
+
"""Finalize the subtraction by setting the gc.
|
198
193
|
|
199
194
|
:param gc: the nucleotide composition of the subtraction
|
200
195
|
:param count: the number of sequences in the FASTA file
|
@@ -205,8 +200,7 @@ async def new_subtraction(
|
|
205
200
|
await _api.patch_json(url_path, {"gc": gc_.dict(), "count": count})
|
206
201
|
|
207
202
|
async def upload(path: Path):
|
208
|
-
"""
|
209
|
-
Upload a file relating to this subtraction.
|
203
|
+
"""Upload a file relating to this subtraction.
|
210
204
|
|
211
205
|
Filenames must be one of:
|
212
206
|
- subtraction.fa.gz
|
@@ -227,7 +221,7 @@ async def new_subtraction(
|
|
227
221
|
log.info("Uploading subtraction file")
|
228
222
|
|
229
223
|
await _api.put_file(
|
230
|
-
f"/subtractions/{subtraction_.id}/files/{filename}", path, "unknown"
|
224
|
+
f"/subtractions/{subtraction_.id}/files/{filename}", path, "unknown",
|
231
225
|
)
|
232
226
|
|
233
227
|
log.info("Finished uploading subtraction file")
|
virtool_workflow/data/uploads.py
CHANGED
@@ -18,14 +18,13 @@ class WFUploads:
|
|
18
18
|
|
19
19
|
@fixture
|
20
20
|
async def uploads(_api: APIClient) -> WFUploads:
|
21
|
-
"""
|
22
|
-
Provides access to files that have been uploaded to the Virtool instance.
|
21
|
+
"""Provides access to files that have been uploaded to the Virtool instance.
|
23
22
|
|
24
23
|
Files can be downloaded into the workflow environment be calling
|
25
24
|
:meth:`.WFUploads.download`.
|
26
25
|
|
27
26
|
Example:
|
28
|
-
|
27
|
+
-------
|
29
28
|
.. code-block:: python
|
30
29
|
|
31
30
|
@step
|
virtool_workflow/decorators.py
CHANGED
@@ -6,8 +6,7 @@ from virtool_workflow.workflow import Workflow
|
|
6
6
|
|
7
7
|
|
8
8
|
def step(func: Callable = None, *, name: str | None = None) -> Callable:
|
9
|
-
"""
|
10
|
-
Mark a function as a workflow step function.
|
9
|
+
"""Mark a function as a workflow step function.
|
11
10
|
|
12
11
|
:param func: the workflow step function
|
13
12
|
:param name: the display name of the workflow step. A name
|
@@ -23,13 +22,11 @@ def step(func: Callable = None, *, name: str | None = None) -> Callable:
|
|
23
22
|
|
24
23
|
|
25
24
|
def collect(module: ModuleType) -> Workflow:
|
26
|
-
"""
|
27
|
-
Build a :class:`.Workflow` object from a workflow module.
|
25
|
+
"""Build a :class:`.Workflow` object from a workflow module.
|
28
26
|
|
29
27
|
:param module: A workflow module
|
30
28
|
:return: A workflow object
|
31
29
|
"""
|
32
|
-
|
33
30
|
workflow = Workflow()
|
34
31
|
|
35
32
|
markers = [
|
virtool_workflow/errors.py
CHANGED
@@ -4,7 +4,6 @@ from subprocess import SubprocessError
|
|
4
4
|
class IllegalJobArguments(ValueError):
|
5
5
|
"""The `job.args` dict is in an illegal state."""
|
6
6
|
|
7
|
-
...
|
8
7
|
|
9
8
|
|
10
9
|
class InsufficientJobRights(Exception):
|
@@ -14,7 +13,7 @@ class InsufficientJobRights(Exception):
|
|
14
13
|
class JobAlreadyAcquired(Exception):
|
15
14
|
def __init__(self, job_id: str):
|
16
15
|
super(JobAlreadyAcquired, self).__init__(
|
17
|
-
f"Job {job_id} is has already been acquired."
|
16
|
+
f"Job {job_id} is has already been acquired.",
|
18
17
|
)
|
19
18
|
|
20
19
|
|
@@ -25,40 +24,33 @@ class JobAlreadyFinalized(Exception):
|
|
25
24
|
class JobsAPIError(Exception):
|
26
25
|
"""A base exception for errors due to HTTP errors from the jobs API."""
|
27
26
|
|
28
|
-
...
|
29
27
|
|
30
28
|
|
31
29
|
class JobsAPIBadRequest(JobsAPIError):
|
32
30
|
"""A ``400 Bad Request`` response from the jobs API."""
|
33
31
|
|
34
32
|
status = 400
|
35
|
-
...
|
36
33
|
|
37
34
|
|
38
35
|
class JobsAPIForbidden(JobsAPIError):
|
39
36
|
status = 403
|
40
|
-
...
|
41
37
|
|
42
38
|
|
43
39
|
class JobsAPINotFound(JobsAPIError):
|
44
40
|
status = 404
|
45
|
-
...
|
46
41
|
|
47
42
|
|
48
43
|
class JobsAPIConflict(JobsAPIError):
|
49
44
|
status = 409
|
50
|
-
...
|
51
45
|
|
52
46
|
|
53
47
|
class JobsAPIServerError(JobsAPIError):
|
54
48
|
status = 500
|
55
|
-
...
|
56
49
|
|
57
50
|
|
58
51
|
class MissingJobArgument(ValueError):
|
59
52
|
"""The `job.args` dict is missing a required key for some funcionality."""
|
60
53
|
|
61
|
-
...
|
62
54
|
|
63
55
|
|
64
56
|
class NotFound(KeyError):
|
virtool_workflow/hooks.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1
|
-
"""
|
2
|
-
Hooks provide a way to do things when events happen during the workflow lifecycle.
|
1
|
+
"""Hooks provide a way to do things when events happen during the workflow lifecycle.
|
3
2
|
"""
|
4
3
|
|
5
4
|
from virtool_workflow.runtime.hook import Hook
|
@@ -126,8 +125,7 @@ __all__ = [
|
|
126
125
|
|
127
126
|
|
128
127
|
def cleanup_builtin_status_hooks():
|
129
|
-
"""
|
130
|
-
Clear callbacks for built-in status hooks.
|
128
|
+
"""Clear callbacks for built-in status hooks.
|
131
129
|
|
132
130
|
This prevents carryover of hooks between tests. Carryover won't be encountered in
|
133
131
|
production because workflow processes exit after one run.
|
@@ -15,8 +15,8 @@ from virtool_core.models.samples import (
|
|
15
15
|
)
|
16
16
|
from virtool_core.models.subtraction import (
|
17
17
|
Subtraction,
|
18
|
-
SubtractionNested,
|
19
18
|
SubtractionFile,
|
19
|
+
SubtractionNested,
|
20
20
|
)
|
21
21
|
|
22
22
|
from virtool_workflow.pytest_plugin.utils import SUBTRACTION_FILENAMES
|
@@ -55,9 +55,9 @@ class Data:
|
|
55
55
|
"""An un-finalized subtraction for testing subtraction creation workflows."""
|
56
56
|
|
57
57
|
|
58
|
-
@pytest.fixture
|
58
|
+
@pytest.fixture()
|
59
59
|
def data(
|
60
|
-
virtool_workflow_example_path: Path, static_datetime: datetime.datetime
|
60
|
+
virtool_workflow_example_path: Path, static_datetime: datetime.datetime,
|
61
61
|
) -> Data:
|
62
62
|
class AnalysisFactory(ModelFactory):
|
63
63
|
__model__ = Analysis
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import sys
|
2
2
|
from importlib import import_module
|
3
|
-
from importlib.util import
|
3
|
+
from importlib.util import module_from_spec, spec_from_file_location
|
4
4
|
from pathlib import Path
|
5
5
|
from types import ModuleType
|
6
6
|
|
@@ -13,8 +13,7 @@ logger = get_logger("runtime")
|
|
13
13
|
|
14
14
|
|
15
15
|
def discover_workflow(path: Path) -> Workflow:
|
16
|
-
"""
|
17
|
-
Find an instance of :class:`.Workflow` in the python module located at the given
|
16
|
+
"""Find an instance of :class:`.Workflow` in the python module located at the given
|
18
17
|
path.
|
19
18
|
|
20
19
|
:param path: The path to a Python module.
|
@@ -32,8 +31,7 @@ def discover_workflow(path: Path) -> Workflow:
|
|
32
31
|
|
33
32
|
|
34
33
|
def load_builtin_fixtures():
|
35
|
-
"""
|
36
|
-
Load built-in fixtures.
|
34
|
+
"""Load built-in fixtures.
|
37
35
|
|
38
36
|
This function is called before any fixtures defined in a workflow's
|
39
37
|
``fixtures.py`` file. It is used to provide built-in fixtures that are
|
@@ -59,8 +57,7 @@ def load_custom_fixtures():
|
|
59
57
|
|
60
58
|
|
61
59
|
def load_workflow_from_file() -> Workflow:
|
62
|
-
"""
|
63
|
-
Load a workflow from a Python file at ``./workflow.py`` and return a :class:`.Workflow` object.
|
60
|
+
"""Load a workflow from a Python file at ``./workflow.py`` and return a :class:`.Workflow` object.
|
64
61
|
|
65
62
|
:raises FileNotFoundError: If no workflow.py file is found.
|
66
63
|
:return: The workflow.
|
@@ -75,8 +72,7 @@ def load_workflow_from_file() -> Workflow:
|
|
75
72
|
|
76
73
|
|
77
74
|
def import_module_from_file(module_name: str, path: Path) -> ModuleType:
|
78
|
-
"""
|
79
|
-
Import a module from a file.
|
75
|
+
"""Import a module from a file.
|
80
76
|
|
81
77
|
The parent directory of `path` will also be added to `sys.path` prior to importing.
|
82
78
|
This ensures that modules and packages defined in that directory can be properly
|
virtool_workflow/runtime/hook.py
CHANGED
@@ -16,8 +16,7 @@ class Hook:
|
|
16
16
|
"""Used to hook into the workflow lifecycle."""
|
17
17
|
|
18
18
|
def __init__(self, hook_name: str):
|
19
|
-
"""
|
20
|
-
A set of functions to be called as a group upon a particular event.
|
19
|
+
"""A set of functions to be called as a group upon a particular event.
|
21
20
|
|
22
21
|
The signature of any functions added via :func:`Hook.callback` or
|
23
22
|
:func:`Hook.__call__` are validated to match the types provided.
|
@@ -31,8 +30,7 @@ class Hook:
|
|
31
30
|
self.clear = self.callbacks.clear
|
32
31
|
|
33
32
|
def __call__(self, callback_: Callable = None, until=None, once=False):
|
34
|
-
"""
|
35
|
-
Add a callback function to this Hook that will be called when the hook is
|
33
|
+
"""Add a callback function to this Hook that will be called when the hook is
|
36
34
|
triggered.
|
37
35
|
|
38
36
|
:param callback_: The callback function to register.
|
@@ -75,8 +73,7 @@ class Hook:
|
|
75
73
|
return _temporary_callback
|
76
74
|
|
77
75
|
async def trigger(self, scope: FixtureScope, suppress=False, **kwargs) -> list[Any]:
|
78
|
-
"""
|
79
|
-
Trigger the hook.
|
76
|
+
"""Trigger the hook.
|
80
77
|
|
81
78
|
Bind fixtures from `scope` to each callback function and invoke them.
|
82
79
|
|
@@ -122,7 +119,7 @@ class Hook:
|
|
122
119
|
return await callback(*args, **kwargs)
|
123
120
|
|
124
121
|
results = await gather(
|
125
|
-
*[call_callback(callback) for callback in callbacks], return_exceptions=True
|
122
|
+
*[call_callback(callback) for callback in callbacks], return_exceptions=True,
|
126
123
|
)
|
127
124
|
|
128
125
|
for error in results:
|
virtool_workflow/runtime/path.py
CHANGED
virtool_workflow/runtime/ping.py
CHANGED
@@ -35,8 +35,7 @@ async def _ping_periodically(api: APIClient, job_id: str):
|
|
35
35
|
|
36
36
|
@asynccontextmanager
|
37
37
|
async def ping_periodically(api: APIClient, job_id: str):
|
38
|
-
"""
|
39
|
-
Ping the API to keep the job alive.
|
38
|
+
"""Ping the API to keep the job alive.
|
40
39
|
|
41
40
|
While the context manager is open, a task runs that pings the API every 5 seconds.
|
42
41
|
When the context manager is closed, the task is cleanly cancelled.
|
virtool_workflow/runtime/run.py
CHANGED
@@ -18,20 +18,20 @@ from virtool_workflow.api.client import api_client
|
|
18
18
|
from virtool_workflow.hooks import (
|
19
19
|
cleanup_builtin_status_hooks,
|
20
20
|
on_cancelled,
|
21
|
-
on_success,
|
22
|
-
on_step_start,
|
23
|
-
on_terminated,
|
24
21
|
on_error,
|
25
22
|
on_failure,
|
26
|
-
on_result,
|
27
23
|
on_finish,
|
28
|
-
|
24
|
+
on_result,
|
29
25
|
on_step_finish,
|
26
|
+
on_step_start,
|
27
|
+
on_success,
|
28
|
+
on_terminated,
|
29
|
+
on_workflow_start,
|
30
30
|
)
|
31
31
|
from virtool_workflow.runtime.config import RunConfig
|
32
32
|
from virtool_workflow.runtime.discover import (
|
33
|
-
load_custom_fixtures,
|
34
33
|
load_builtin_fixtures,
|
34
|
+
load_custom_fixtures,
|
35
35
|
load_workflow_from_file,
|
36
36
|
)
|
37
37
|
from virtool_workflow.runtime.events import Events
|
@@ -48,8 +48,7 @@ logger = get_logger("runtime")
|
|
48
48
|
|
49
49
|
|
50
50
|
def configure_status_hooks():
|
51
|
-
"""
|
52
|
-
Configure built-in job status hooks.
|
51
|
+
"""Configure built-in job status hooks.
|
53
52
|
|
54
53
|
Push status updates to API when various lifecycle hooks are triggered.
|
55
54
|
|
@@ -77,8 +76,7 @@ def configure_status_hooks():
|
|
77
76
|
|
78
77
|
|
79
78
|
async def execute(workflow: Workflow, scope: FixtureScope, events: Events):
|
80
|
-
"""
|
81
|
-
Execute a workflow.
|
79
|
+
"""Execute a workflow.
|
82
80
|
|
83
81
|
:param workflow: The workflow to execute
|
84
82
|
:param scope: The :class:`FixtureScope` to use for fixture injection
|
@@ -118,7 +116,7 @@ async def execute(workflow: Workflow, scope: FixtureScope, events: Events):
|
|
118
116
|
|
119
117
|
if not events.terminated.is_set():
|
120
118
|
logger.warning(
|
121
|
-
"workflow terminated without sigterm. this should not happen."
|
119
|
+
"workflow terminated without sigterm. this should not happen.",
|
122
120
|
)
|
123
121
|
|
124
122
|
await asyncio.gather(
|
@@ -162,7 +160,7 @@ async def run_workflow(
|
|
162
160
|
job = await acquire_job_by_id(config.jobs_api_connection_string, job_id)
|
163
161
|
|
164
162
|
async with api_client(
|
165
|
-
config.jobs_api_connection_string, job.id, job.key
|
163
|
+
config.jobs_api_connection_string, job.id, job.key,
|
166
164
|
) as api, FixtureScope() as scope:
|
167
165
|
# These fixtures should not be used directly by the workflow. They are used
|
168
166
|
# by other built-in fixtures.
|
@@ -200,8 +198,7 @@ async def start_runtime(
|
|
200
198
|
work_path: Path,
|
201
199
|
workflow_loader: Callable[[], Workflow] = load_workflow_from_file,
|
202
200
|
):
|
203
|
-
"""
|
204
|
-
Start the workflow runtime.
|
201
|
+
"""Start the workflow runtime.
|
205
202
|
|
206
203
|
The runtime loads the workflow and fixtures. It then waits for a job ID to be pushed
|
207
204
|
to the configured Redis list.
|
@@ -261,7 +258,7 @@ async def start_runtime(
|
|
261
258
|
job_id,
|
262
259
|
workflow,
|
263
260
|
events,
|
264
|
-
)
|
261
|
+
),
|
265
262
|
)
|
266
263
|
|
267
264
|
def terminate_workflow(*_):
|
@@ -278,7 +275,7 @@ async def start_runtime(
|
|
278
275
|
|
279
276
|
async with configure_redis(redis_connection_string) as redis:
|
280
277
|
cancellation_task = asyncio.create_task(
|
281
|
-
wait_for_cancellation(redis, job_id, cancel_workflow)
|
278
|
+
wait_for_cancellation(redis, job_id, cancel_workflow),
|
282
279
|
)
|
283
280
|
|
284
281
|
await run_workflow_task
|
@@ -15,12 +15,11 @@ logger = get_logger("subprocess")
|
|
15
15
|
|
16
16
|
class LineOutputHandler(Protocol):
|
17
17
|
async def __call__(self, line: str):
|
18
|
-
"""
|
19
|
-
Handle input from stdin, or stderr, line by line.
|
18
|
+
"""Handle input from stdin, or stderr, line by line.
|
20
19
|
|
21
20
|
:param line: A line of output from the stream.
|
22
21
|
"""
|
23
|
-
raise NotImplementedError
|
22
|
+
raise NotImplementedError
|
24
23
|
|
25
24
|
|
26
25
|
class RunSubprocess(Protocol):
|
@@ -32,8 +31,7 @@ class RunSubprocess(Protocol):
|
|
32
31
|
stderr_handler: LineOutputHandler | None = None,
|
33
32
|
stdout_handler: LineOutputHandler | None = None,
|
34
33
|
) -> Process:
|
35
|
-
"""
|
36
|
-
Run a shell command in a subprocess.
|
34
|
+
"""Run a shell command in a subprocess.
|
37
35
|
|
38
36
|
:param command: A shell command
|
39
37
|
:param stdout_handler: A function to handle stdout output line by line
|
@@ -43,14 +41,13 @@ class RunSubprocess(Protocol):
|
|
43
41
|
:raise SubprocessFailed: The subprocess has exited with a non-zero exit code
|
44
42
|
:return: An :class:`.Process` instance
|
45
43
|
"""
|
46
|
-
raise NotImplementedError
|
44
|
+
raise NotImplementedError
|
47
45
|
|
48
46
|
|
49
47
|
async def watch_pipe(
|
50
|
-
stream: asyncio.StreamReader, handler: Callable[[bytes], Awaitable[None]]
|
48
|
+
stream: asyncio.StreamReader, handler: Callable[[bytes], Awaitable[None]],
|
51
49
|
):
|
52
|
-
"""
|
53
|
-
Watch the stdout or stderr stream and pass lines to the `handler` callback function.
|
50
|
+
"""Watch the stdout or stderr stream and pass lines to the `handler` callback function.
|
54
51
|
|
55
52
|
:param stream: a stdout or stderr file object
|
56
53
|
:param handler: a handler coroutine for output lines
|
@@ -127,7 +124,7 @@ async def _run_subprocess(
|
|
127
124
|
if process.returncode not in [0, 15, -15]:
|
128
125
|
raise SubprocessFailed(
|
129
126
|
f"{command[0]} failed with exit code {process.returncode}\n"
|
130
|
-
f"arguments: {command}\n"
|
127
|
+
f"arguments: {command}\n",
|
131
128
|
)
|
132
129
|
|
133
130
|
log.info(
|
@@ -9,8 +9,7 @@ logger = get_logger("runtime")
|
|
9
9
|
|
10
10
|
|
11
11
|
def configure_sentry(dsn: str):
|
12
|
-
"""
|
13
|
-
Initialize Sentry for log aggregation.
|
12
|
+
"""Initialize Sentry for log aggregation.
|
14
13
|
"""
|
15
14
|
if dsn:
|
16
15
|
logger.info("initializing sentry", dsn=f"{dsn[:15]}...")
|
@@ -20,7 +19,7 @@ def configure_sentry(dsn: str):
|
|
20
19
|
integrations=[
|
21
20
|
LoggingIntegration(
|
22
21
|
event_level=logging.WARNING,
|
23
|
-
)
|
22
|
+
),
|
24
23
|
],
|
25
24
|
release=pkg_resources.get_distribution("virtool-workflow").version,
|
26
25
|
traces_sample_rate=0.2,
|