cg 76.0.0__py3-none-any.whl → 83.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cg/__init__.py +1 -1
- cg/apps/housekeeper/hk.py +18 -1
- cg/apps/tb/api.py +42 -5
- cg/cli/transfer.py +13 -2
- cg/cli/upload/mutacc.py +16 -3
- cg/cli/upload/scout.py +2 -2
- cg/cli/upload/utils.py +10 -1
- cg/cli/workflow/balsamic/base.py +86 -172
- cg/cli/workflow/balsamic/options.py +3 -48
- cg/cli/workflow/balsamic/umi.py +210 -15
- cg/cli/workflow/microsalt/base.py +4 -2
- cg/cli/workflow/mip_dna/base.py +1 -1
- cg/cli/workflow/nallo/base.py +73 -23
- cg/cli/workflow/nf_analysis.py +5 -207
- cg/cli/workflow/raredisease/base.py +41 -54
- cg/cli/workflow/rnafusion/base.py +38 -8
- cg/cli/workflow/taxprofiler/base.py +31 -18
- cg/cli/workflow/tomte/base.py +83 -10
- cg/constants/constants.py +25 -30
- cg/constants/devices.py +6 -1
- cg/constants/gene_panel.py +3 -1
- cg/constants/housekeeper_tags.py +28 -28
- cg/constants/lims.py +4 -0
- cg/constants/nf_analysis.py +0 -1
- cg/constants/observations.py +21 -5
- cg/constants/orderforms.py +3 -3
- cg/constants/pacbio.py +1 -0
- cg/constants/priority.py +1 -1
- cg/constants/report.py +1 -0
- cg/constants/scout.py +12 -9
- cg/constants/sequencing.py +2 -2
- cg/constants/tb.py +5 -5
- cg/exc.py +27 -5
- cg/meta/compress/compress.py +7 -2
- cg/meta/delivery_report/balsamic.py +3 -1
- cg/meta/delivery_report/delivery_report_api.py +4 -3
- cg/meta/delivery_report/nallo.py +11 -11
- cg/meta/delivery_report/raredisease.py +7 -3
- cg/meta/delivery_report/templates/macros/data_analysis/qc_metrics/balsamic_qc_metrics.html +1 -0
- cg/meta/delivery_report/templates/macros/ticket_system.html +1 -1
- cg/meta/observations/balsamic_observations_api.py +110 -14
- cg/meta/observations/mip_dna_observations_api.py +1 -1
- cg/meta/observations/nallo_observations_api.py +1 -1
- cg/meta/observations/observations_api.py +23 -32
- cg/meta/observations/raredisease_observations_api.py +1 -1
- cg/meta/tar/tar.py +5 -2
- cg/meta/transfer/lims.py +32 -3
- cg/meta/upload/balsamic/balsamic.py +1 -8
- cg/meta/upload/coverage.py +5 -5
- cg/meta/upload/raredisease/raredisease.py +3 -0
- cg/meta/upload/scout/hk_tags.py +1 -0
- cg/meta/upload/scout/nallo_config_builder.py +31 -7
- cg/meta/workflow/balsamic.py +70 -36
- cg/meta/workflow/fastq.py +8 -0
- cg/meta/workflow/microsalt/quality_controller/models.py +0 -2
- cg/meta/workflow/microsalt/quality_controller/quality_controller.py +8 -16
- cg/meta/workflow/microsalt/quality_controller/result_logger.py +3 -6
- cg/meta/workflow/microsalt/quality_controller/utils.py +2 -45
- cg/meta/workflow/nallo.py +21 -99
- cg/meta/workflow/nf_analysis.py +12 -263
- cg/meta/workflow/raredisease.py +3 -112
- cg/meta/workflow/rnafusion.py +2 -34
- cg/meta/workflow/taxprofiler.py +2 -38
- cg/meta/workflow/tomte.py +2 -42
- cg/models/balsamic/config.py +0 -24
- cg/models/balsamic/metrics.py +5 -3
- cg/models/cg_config.py +39 -16
- cg/models/deliverables/metric_deliverables.py +1 -1
- cg/models/delivery_report/metadata.py +2 -1
- cg/models/nallo/nallo.py +14 -64
- cg/models/nf_analysis.py +1 -41
- cg/models/raredisease/raredisease.py +1 -63
- cg/models/rnafusion/rnafusion.py +0 -26
- cg/models/scout/scout_load_config.py +5 -2
- cg/models/taxprofiler/taxprofiler.py +0 -42
- cg/models/tomte/tomte.py +0 -69
- cg/resources/nallo_bundle_filenames.yaml +292 -22
- cg/resources/raredisease_bundle_filenames.yaml +11 -1
- cg/resources/taxprofiler_bundle_filenames.yaml +20 -0
- cg/server/admin.py +106 -25
- cg/server/app.py +15 -4
- cg/server/endpoints/sequencing_run/dtos.py +21 -3
- cg/server/endpoints/sequencing_run/pacbio_sequencing_run.py +29 -10
- cg/server/endpoints/sequencing_run/pacbio_smrt_cell_metrics.py +20 -0
- cg/services/analysis_starter/{service.py → analysis_starter.py} +11 -9
- cg/services/analysis_starter/configurator/abstract_model.py +8 -0
- cg/services/analysis_starter/configurator/configurator.py +1 -1
- cg/services/analysis_starter/configurator/extensions/nallo.py +27 -0
- cg/services/analysis_starter/configurator/extensions/{abstract.py → pipeline_extension.py} +1 -1
- cg/services/analysis_starter/configurator/extensions/raredisease.py +3 -1
- cg/services/analysis_starter/configurator/extensions/tomte_extension.py +28 -0
- cg/services/analysis_starter/configurator/file_creators/balsamic_config.py +240 -0
- cg/services/analysis_starter/configurator/file_creators/gene_panel.py +10 -5
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/abstract.py +2 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/models.py +40 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/nallo.py +37 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/raredisease.py +8 -5
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/tomte_params_file_creator.py +64 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/creator.py +1 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/nallo_sample_sheet_creator.py +65 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/protocol.py +12 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{raredisease.py → raredisease_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{rnafusion.py → rnafusion_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{taxprofiler.py → taxprofiler_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/tomte_sample_sheet_creator.py +36 -0
- cg/services/analysis_starter/configurator/implementations/balsamic.py +68 -0
- cg/services/analysis_starter/configurator/implementations/nextflow.py +22 -5
- cg/services/analysis_starter/configurator/models/balsamic.py +152 -0
- cg/services/analysis_starter/configurator/models/mip_dna.py +6 -8
- cg/services/analysis_starter/configurator/models/nextflow.py +9 -0
- cg/services/analysis_starter/constants.py +2 -0
- cg/services/analysis_starter/factories/configurator_factory.py +131 -51
- cg/services/analysis_starter/factories/starter_factory.py +36 -7
- cg/services/analysis_starter/input_fetcher/implementations/bam_fetcher.py +57 -0
- cg/services/analysis_starter/input_fetcher/implementations/fastq_fetcher.py +3 -3
- cg/services/analysis_starter/submitters/seqera_platform/{client.py → seqera_platform_client.py} +19 -3
- cg/services/analysis_starter/submitters/seqera_platform/seqera_platform_submitter.py +73 -0
- cg/services/analysis_starter/submitters/submitter.py +1 -1
- cg/services/analysis_starter/submitters/subprocess/submitter.py +2 -1
- cg/services/analysis_starter/tracker/implementations/balsamic.py +22 -0
- cg/services/analysis_starter/tracker/implementations/microsalt.py +4 -4
- cg/services/analysis_starter/tracker/implementations/mip_dna.py +4 -1
- cg/services/analysis_starter/tracker/implementations/{nextflow.py → nextflow_tracker.py} +6 -4
- cg/services/analysis_starter/tracker/tracker.py +19 -15
- cg/services/deliver_files/factory.py +1 -1
- cg/services/delivery_message/messages/__init__.py +24 -14
- cg/services/delivery_message/messages/{microsalt_mwr_message.py → microsalt_message.py} +1 -1
- cg/services/delivery_message/utils.py +4 -40
- cg/services/illumina/backup/backup_service.py +29 -7
- cg/services/orders/validation/constants.py +3 -0
- cg/services/orders/validation/index_sequences.py +558 -0
- cg/services/orders/validation/order_types/microsalt/models/sample.py +2 -3
- cg/services/run_devices/pacbio/data_storage_service/pacbio_store_service.py +39 -18
- cg/services/run_devices/pacbio/data_transfer_service/data_transfer_service.py +8 -2
- cg/services/run_devices/pacbio/data_transfer_service/dto.py +9 -3
- cg/services/run_devices/pacbio/data_transfer_service/utils.py +14 -7
- cg/services/run_devices/pacbio/metrics_parser/models.py +1 -0
- cg/services/run_devices/pacbio/sequencing_runs_service.py +35 -7
- cg/services/sequencing_qc_service/quality_checks/checks.py +18 -16
- cg/services/sequencing_qc_service/quality_checks/utils.py +82 -18
- cg/services/sequencing_qc_service/sequencing_qc_service.py +12 -10
- cg/store/crud/create.py +73 -42
- cg/store/crud/read.py +73 -7
- cg/store/crud/update.py +14 -3
- cg/store/models.py +98 -35
- cg/store/store.py +8 -1
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/METADATA +1 -1
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/RECORD +150 -138
- cg/services/analysis_starter/submitters/seqera_platform/submitter.py +0 -39
- cg/services/delivery_message/messages/microsalt_mwx_message.py +0 -18
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/WHEEL +0 -0
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/entry_points.txt +0 -0
|
@@ -5,21 +5,25 @@ from cg.constants import Workflow
|
|
|
5
5
|
from cg.meta.archive.archive import SpringArchiveAPI
|
|
6
6
|
from cg.meta.compress import CompressAPI
|
|
7
7
|
from cg.models.cg_config import CGConfig
|
|
8
|
+
from cg.services.analysis_starter.analysis_starter import AnalysisStarter
|
|
8
9
|
from cg.services.analysis_starter.configurator.configurator import Configurator
|
|
9
10
|
from cg.services.analysis_starter.constants import IMPLEMENTED_FASTQ_WORKFLOWS
|
|
10
11
|
from cg.services.analysis_starter.factories.configurator_factory import ConfiguratorFactory
|
|
12
|
+
from cg.services.analysis_starter.input_fetcher.implementations.bam_fetcher import BamFetcher
|
|
11
13
|
from cg.services.analysis_starter.input_fetcher.implementations.fastq_fetcher import FastqFetcher
|
|
12
14
|
from cg.services.analysis_starter.input_fetcher.input_fetcher import InputFetcher
|
|
13
|
-
from cg.services.analysis_starter.
|
|
14
|
-
|
|
15
|
-
|
|
15
|
+
from cg.services.analysis_starter.submitters.seqera_platform.seqera_platform_client import (
|
|
16
|
+
SeqeraPlatformClient,
|
|
17
|
+
)
|
|
18
|
+
from cg.services.analysis_starter.submitters.seqera_platform.seqera_platform_submitter import (
|
|
16
19
|
SeqeraPlatformSubmitter,
|
|
17
20
|
)
|
|
18
21
|
from cg.services.analysis_starter.submitters.submitter import Submitter
|
|
19
22
|
from cg.services.analysis_starter.submitters.subprocess.submitter import SubprocessSubmitter
|
|
23
|
+
from cg.services.analysis_starter.tracker.implementations.balsamic import BalsamicTracker
|
|
20
24
|
from cg.services.analysis_starter.tracker.implementations.microsalt import MicrosaltTracker
|
|
21
25
|
from cg.services.analysis_starter.tracker.implementations.mip_dna import MIPDNATracker
|
|
22
|
-
from cg.services.analysis_starter.tracker.implementations.
|
|
26
|
+
from cg.services.analysis_starter.tracker.implementations.nextflow_tracker import NextflowTracker
|
|
23
27
|
from cg.services.analysis_starter.tracker.tracker import Tracker
|
|
24
28
|
from cg.store.store import Store
|
|
25
29
|
|
|
@@ -71,10 +75,18 @@ class AnalysisStarterFactory:
|
|
|
71
75
|
spring_archive_api=spring_archive_api,
|
|
72
76
|
status_db=self.store,
|
|
73
77
|
)
|
|
74
|
-
|
|
78
|
+
elif workflow == Workflow.NALLO:
|
|
79
|
+
return BamFetcher(housekeeper_api=self.housekeeper_api, status_db=self.store)
|
|
80
|
+
raise NotImplementedError(f"No input fetcher for workflow {workflow}")
|
|
75
81
|
|
|
76
82
|
def _get_submitter(self, workflow: Workflow) -> Submitter:
|
|
77
|
-
if workflow in [
|
|
83
|
+
if workflow in [
|
|
84
|
+
Workflow.NALLO,
|
|
85
|
+
Workflow.RAREDISEASE,
|
|
86
|
+
Workflow.RNAFUSION,
|
|
87
|
+
Workflow.TAXPROFILER,
|
|
88
|
+
Workflow.TOMTE,
|
|
89
|
+
]:
|
|
78
90
|
return self._get_seqera_platform_submitter()
|
|
79
91
|
else:
|
|
80
92
|
return SubprocessSubmitter()
|
|
@@ -87,7 +99,13 @@ class AnalysisStarterFactory:
|
|
|
87
99
|
)
|
|
88
100
|
|
|
89
101
|
def _get_tracker(self, workflow: Workflow) -> Tracker:
|
|
90
|
-
if workflow in [
|
|
102
|
+
if workflow in [
|
|
103
|
+
Workflow.NALLO,
|
|
104
|
+
Workflow.RAREDISEASE,
|
|
105
|
+
Workflow.RNAFUSION,
|
|
106
|
+
Workflow.TAXPROFILER,
|
|
107
|
+
Workflow.TOMTE,
|
|
108
|
+
]:
|
|
91
109
|
return NextflowTracker(
|
|
92
110
|
store=self.store,
|
|
93
111
|
trailblazer_api=self.cg_config.trailblazer_api,
|
|
@@ -100,9 +118,20 @@ class AnalysisStarterFactory:
|
|
|
100
118
|
trailblazer_api=self.cg_config.trailblazer_api,
|
|
101
119
|
workflow_root=self.cg_config.microsalt.root,
|
|
102
120
|
)
|
|
121
|
+
elif workflow in [
|
|
122
|
+
Workflow.BALSAMIC,
|
|
123
|
+
Workflow.BALSAMIC_UMI,
|
|
124
|
+
]:
|
|
125
|
+
return BalsamicTracker(
|
|
126
|
+
store=self.store,
|
|
127
|
+
trailblazer_api=self.cg_config.trailblazer_api,
|
|
128
|
+
workflow_root=str(self.cg_config.balsamic.root),
|
|
129
|
+
)
|
|
103
130
|
elif workflow == Workflow.MIP_DNA:
|
|
104
131
|
return MIPDNATracker(
|
|
105
132
|
store=self.store,
|
|
106
133
|
trailblazer_api=self.cg_config.trailblazer_api,
|
|
107
134
|
workflow_root=self.cg_config.mip_rd_dna.root,
|
|
108
135
|
)
|
|
136
|
+
else:
|
|
137
|
+
raise NotImplementedError(f"No {workflow} tracker")
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from housekeeper.store.models import File
|
|
4
|
+
|
|
5
|
+
from cg.apps.housekeeper.hk import HousekeeperAPI
|
|
6
|
+
from cg.exc import AnalysisNotReadyError
|
|
7
|
+
from cg.services.analysis_starter.input_fetcher.input_fetcher import InputFetcher
|
|
8
|
+
from cg.store.models import Case
|
|
9
|
+
from cg.store.store import Store
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BamFetcher(InputFetcher):
|
|
13
|
+
def __init__(self, housekeeper_api: HousekeeperAPI, status_db: Store) -> None:
|
|
14
|
+
self.housekeeper_api = housekeeper_api
|
|
15
|
+
self.status_db = status_db
|
|
16
|
+
|
|
17
|
+
def ensure_files_are_ready(self, case_id: str) -> None:
|
|
18
|
+
case: Case = self.status_db.get_case_by_internal_id_strict(case_id)
|
|
19
|
+
samples_without_files: list[str] = self._get_samples_without_files(case)
|
|
20
|
+
missing_files: list[str] = self._get_missing_file_paths(case)
|
|
21
|
+
if samples_without_files or missing_files:
|
|
22
|
+
self._raise_error(
|
|
23
|
+
missing_files=missing_files, samples_without_files=samples_without_files
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
def _get_samples_without_files(self, case: Case) -> list[str]:
|
|
27
|
+
samples_without_files: list[str] = []
|
|
28
|
+
for sample in case.samples:
|
|
29
|
+
sample_files: list[File] = self.housekeeper_api.files(
|
|
30
|
+
bundle=sample.internal_id, tags={"bam"}
|
|
31
|
+
).all()
|
|
32
|
+
if not sample_files:
|
|
33
|
+
samples_without_files.append(sample.internal_id)
|
|
34
|
+
return samples_without_files
|
|
35
|
+
|
|
36
|
+
def _get_missing_file_paths(self, case: Case) -> list[str]:
|
|
37
|
+
missing_files: list[str] = []
|
|
38
|
+
for sample in case.samples:
|
|
39
|
+
sample_files: list[File] = self.housekeeper_api.files(
|
|
40
|
+
bundle=sample.internal_id, tags={"bam"}
|
|
41
|
+
).all()
|
|
42
|
+
missing_sample_files: list[str] = [
|
|
43
|
+
file.full_path for file in sample_files if not Path(file.full_path).is_file()
|
|
44
|
+
]
|
|
45
|
+
missing_files.extend(missing_sample_files)
|
|
46
|
+
return missing_files
|
|
47
|
+
|
|
48
|
+
@staticmethod
|
|
49
|
+
def _raise_error(missing_files: list[str], samples_without_files: list[str]) -> None:
|
|
50
|
+
error_message = ""
|
|
51
|
+
if samples_without_files:
|
|
52
|
+
samples_without_files_str = "\n".join(samples_without_files)
|
|
53
|
+
error_message += f"The following samples are missing BAM files in Housekeeper: \n{samples_without_files_str}"
|
|
54
|
+
if missing_files:
|
|
55
|
+
missing_files_str = "\n".join(missing_files)
|
|
56
|
+
error_message += f"\nThe following BAM files are missing on disk: \n{missing_files_str}"
|
|
57
|
+
raise AnalysisNotReadyError(error_message)
|
|
@@ -96,7 +96,7 @@ class FastqFetcher(InputFetcher):
|
|
|
96
96
|
else:
|
|
97
97
|
self._decompress_case(case_id)
|
|
98
98
|
elif case_compression_data.is_spring_decompression_running():
|
|
99
|
-
self.status_db.
|
|
99
|
+
self.status_db.update_case_action(case_internal_id=case_id, action=CaseActions.ANALYZE)
|
|
100
100
|
return
|
|
101
101
|
|
|
102
102
|
self._add_decompressed_fastq_files_to_housekeeper(case_id)
|
|
@@ -109,7 +109,7 @@ class FastqFetcher(InputFetcher):
|
|
|
109
109
|
except DecompressionCouldNotStartError:
|
|
110
110
|
LOG.warning(f"Decompression failed to start for {case_id}")
|
|
111
111
|
return
|
|
112
|
-
self.status_db.
|
|
112
|
+
self.status_db.update_case_action(case_internal_id=case_id, action=CaseActions.ANALYZE)
|
|
113
113
|
LOG.info(f"Decompression started for {case_id}")
|
|
114
114
|
|
|
115
115
|
def _are_fastq_files_ready_for_analysis(self, case_id: str) -> bool:
|
|
@@ -155,7 +155,7 @@ class FastqFetcher(InputFetcher):
|
|
|
155
155
|
LOG.info(
|
|
156
156
|
f"Decompression is running for {case_id}, analysis will be started when decompression is done"
|
|
157
157
|
)
|
|
158
|
-
self.status_db.
|
|
158
|
+
self.status_db.update_case_action(case_internal_id=case_id, action=CaseActions.ANALYZE)
|
|
159
159
|
|
|
160
160
|
@staticmethod
|
|
161
161
|
def _should_skip_sample(case: Case, sample: Sample) -> bool:
|
cg/services/analysis_starter/submitters/seqera_platform/{client.py → seqera_platform_client.py}
RENAMED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
from typing import Any
|
|
2
3
|
|
|
3
4
|
import requests
|
|
4
5
|
|
|
@@ -17,7 +18,7 @@ class SeqeraPlatformClient:
|
|
|
17
18
|
self.compute_environment_ids: dict[SlurmQos, str] = config.compute_environments
|
|
18
19
|
self.workspace_id: int = config.workspace_id
|
|
19
20
|
|
|
20
|
-
def
|
|
21
|
+
def launch_workflow(self, request: WorkflowLaunchRequest) -> dict:
|
|
21
22
|
"""Launches a case from the request and returns the workflow ID."""
|
|
22
23
|
url = f"{self.base_url}/workflow/launch"
|
|
23
24
|
params: dict = {"workspaceId": self.workspace_id}
|
|
@@ -25,10 +26,25 @@ class SeqeraPlatformClient:
|
|
|
25
26
|
f"Sending request body {request.model_dump()} \n Headers: {self.auth_headers} \n Params: {params}"
|
|
26
27
|
)
|
|
27
28
|
response: requests.Response = requests.post(
|
|
29
|
+
headers=self.auth_headers,
|
|
30
|
+
json=request.model_dump(),
|
|
31
|
+
params=params,
|
|
28
32
|
url=url,
|
|
33
|
+
)
|
|
34
|
+
response.raise_for_status()
|
|
35
|
+
return response.json()
|
|
36
|
+
|
|
37
|
+
def get_workflow(self, workflow_id: str) -> dict[str, Any]:
|
|
38
|
+
"""Gets information about the specified workflow (the Seqera equivalent of an analysis) in Seqera."""
|
|
39
|
+
url = f"{self.base_url}/workflow/{workflow_id}"
|
|
40
|
+
params: dict = {"workspaceId": self.workspace_id}
|
|
41
|
+
LOG.debug(
|
|
42
|
+
f"Get seqera workflow with id: {workflow_id} \n Headers: {self.auth_headers} \n Params: {params}"
|
|
43
|
+
)
|
|
44
|
+
response: requests.Response = requests.get(
|
|
29
45
|
headers=self.auth_headers,
|
|
30
46
|
params=params,
|
|
31
|
-
|
|
47
|
+
url=url,
|
|
32
48
|
)
|
|
33
49
|
response.raise_for_status()
|
|
34
|
-
return response.json()
|
|
50
|
+
return response.json()
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Any, cast
|
|
4
|
+
|
|
5
|
+
from cg.constants.priority import SlurmQos
|
|
6
|
+
from cg.exc import SeqeraError
|
|
7
|
+
from cg.io.yaml import read_yaml, write_yaml_stream
|
|
8
|
+
from cg.services.analysis_starter.configurator.models.nextflow import NextflowCaseConfig
|
|
9
|
+
from cg.services.analysis_starter.submitters.seqera_platform.dtos import (
|
|
10
|
+
LaunchRequest,
|
|
11
|
+
WorkflowLaunchRequest,
|
|
12
|
+
)
|
|
13
|
+
from cg.services.analysis_starter.submitters.seqera_platform.seqera_platform_client import (
|
|
14
|
+
SeqeraPlatformClient,
|
|
15
|
+
)
|
|
16
|
+
from cg.services.analysis_starter.submitters.submitter import Submitter
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class SeqeraPlatformSubmitter(Submitter):
|
|
20
|
+
|
|
21
|
+
def __init__(self, client: SeqeraPlatformClient, compute_environment_ids: dict[SlurmQos, str]):
|
|
22
|
+
self.client: SeqeraPlatformClient = client
|
|
23
|
+
self.compute_environment_ids: dict[str, str] = compute_environment_ids
|
|
24
|
+
|
|
25
|
+
def submit(self, case_config: NextflowCaseConfig) -> NextflowCaseConfig:
|
|
26
|
+
"""Starts a case and returns the workflow id for the job."""
|
|
27
|
+
new_case_config: NextflowCaseConfig = case_config.model_copy()
|
|
28
|
+
run_request: WorkflowLaunchRequest = self._create_launch_request(new_case_config)
|
|
29
|
+
|
|
30
|
+
response: dict = self.client.launch_workflow(run_request)
|
|
31
|
+
if not (workflow_id := response.get("workflowId")):
|
|
32
|
+
raise SeqeraError(f"workflowId missing from response: {response}")
|
|
33
|
+
new_case_config.workflow_id = workflow_id
|
|
34
|
+
|
|
35
|
+
workflow_response: dict = self.client.get_workflow(cast(str, new_case_config.workflow_id))
|
|
36
|
+
if not (session_id := self._get_session_id(workflow_response)):
|
|
37
|
+
raise SeqeraError(f"No sessionId found in response: {workflow_response}")
|
|
38
|
+
new_case_config.session_id = session_id
|
|
39
|
+
|
|
40
|
+
return new_case_config
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def _get_session_id(workflow_response: dict[str, Any]) -> str | None:
|
|
44
|
+
workflow: dict | None = workflow_response.get("workflow")
|
|
45
|
+
if workflow and (session_id := workflow.get("sessionId")):
|
|
46
|
+
return session_id
|
|
47
|
+
else:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
def _create_launch_request(self, case_config: NextflowCaseConfig) -> WorkflowLaunchRequest:
|
|
51
|
+
parameters: dict = read_yaml(Path(case_config.params_file))
|
|
52
|
+
parameter_stream: str = write_yaml_stream(parameters)
|
|
53
|
+
launch_request = LaunchRequest(
|
|
54
|
+
computeEnvId=self.compute_environment_ids[case_config.case_priority],
|
|
55
|
+
configProfiles=case_config.config_profiles,
|
|
56
|
+
configText=f"includeConfig '{case_config.nextflow_config_file}'",
|
|
57
|
+
paramsText=parameter_stream,
|
|
58
|
+
pipeline=case_config.pipeline_repository,
|
|
59
|
+
preRunScript=case_config.pre_run_script,
|
|
60
|
+
resume=case_config.resume,
|
|
61
|
+
revision=case_config.revision,
|
|
62
|
+
runName=self._create_run_name(case_id=case_config.case_id, resume=case_config.resume),
|
|
63
|
+
sessionId=case_config.session_id,
|
|
64
|
+
workDir=case_config.work_dir,
|
|
65
|
+
)
|
|
66
|
+
return WorkflowLaunchRequest(launch=launch_request)
|
|
67
|
+
|
|
68
|
+
@staticmethod
|
|
69
|
+
def _create_run_name(case_id: str, resume: bool) -> str:
|
|
70
|
+
if resume:
|
|
71
|
+
return f"{case_id}_resumed_{datetime.now().strftime('%Y-%m-%d_%H-%M')}"
|
|
72
|
+
else:
|
|
73
|
+
return case_id
|
|
@@ -4,7 +4,7 @@ from cg.services.analysis_starter.configurator.abstract_model import CaseConfig
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class Submitter(ABC):
|
|
7
|
-
def submit(self, case_config: CaseConfig) ->
|
|
7
|
+
def submit(self, case_config: CaseConfig) -> CaseConfig:
|
|
8
8
|
"""Submits the case to be run either as a SubProcess or using the Seqera Platform.
|
|
9
9
|
Returns the workflowId if run via the Seqera Platform."""
|
|
10
10
|
pass
|
|
@@ -12,7 +12,7 @@ SubprocessCaseConfig = MicrosaltCaseConfig | MIPDNACaseConfig
|
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
class SubprocessSubmitter(Submitter):
|
|
15
|
-
def submit(self, case_config: SubprocessCaseConfig) ->
|
|
15
|
+
def submit(self, case_config: SubprocessCaseConfig) -> SubprocessCaseConfig:
|
|
16
16
|
command: str = case_config.get_start_command()
|
|
17
17
|
LOG.info(f"Running: {command}")
|
|
18
18
|
subprocess.run(
|
|
@@ -22,6 +22,7 @@ class SubprocessSubmitter(Submitter):
|
|
|
22
22
|
stdout=subprocess.PIPE,
|
|
23
23
|
stderr=subprocess.PIPE,
|
|
24
24
|
)
|
|
25
|
+
return case_config
|
|
25
26
|
|
|
26
27
|
@staticmethod
|
|
27
28
|
def get_workflow_version(case_config: SubprocessCaseConfig) -> str:
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from cg.constants.constants import WorkflowManager
|
|
4
|
+
from cg.io.json import read_json
|
|
5
|
+
from cg.services.analysis_starter.configurator.models.balsamic import BalsamicCaseConfig
|
|
6
|
+
from cg.services.analysis_starter.tracker.tracker import Tracker
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BalsamicTracker(Tracker):
|
|
10
|
+
|
|
11
|
+
def _workflow_manager(self):
|
|
12
|
+
return WorkflowManager.Slurm
|
|
13
|
+
|
|
14
|
+
def _get_job_ids_path(self, case_id: str) -> Path:
|
|
15
|
+
return Path(self._get_out_dir_path(case_id), "slurm_jobids.yaml")
|
|
16
|
+
|
|
17
|
+
def _get_out_dir_path(self, case_id: str) -> Path:
|
|
18
|
+
return Path(self.workflow_root, case_id, "analysis")
|
|
19
|
+
|
|
20
|
+
def _get_workflow_version(self, case_config: BalsamicCaseConfig) -> str:
|
|
21
|
+
config_data: dict = read_json(case_config.sample_config)
|
|
22
|
+
return config_data["analysis"]["BALSAMIC_version"]
|
|
@@ -27,13 +27,13 @@ class MicrosaltTracker(Tracker):
|
|
|
27
27
|
def _get_job_ids_path(self, case_id: str) -> Path:
|
|
28
28
|
project_id: str = self._get_file_name_start(case_id)
|
|
29
29
|
return Path(
|
|
30
|
-
self.
|
|
31
|
-
"results",
|
|
32
|
-
"reports",
|
|
33
|
-
"trailblazer",
|
|
30
|
+
self._get_out_dir_path(case_id),
|
|
34
31
|
f"{project_id}_slurm_ids{FileExtensions.YAML}",
|
|
35
32
|
)
|
|
36
33
|
|
|
34
|
+
def _get_out_dir_path(self, case_id: str) -> Path:
|
|
35
|
+
return Path(self.workflow_root, "results", "reports", "trailblazer")
|
|
36
|
+
|
|
37
37
|
def _get_file_name_start(self, case_id: str) -> str:
|
|
38
38
|
"""Returns the LIMS project id if the case contains multiple samples, else the sample id."""
|
|
39
39
|
case: Case = self.store.get_case_by_internal_id(case_id)
|
|
@@ -12,7 +12,10 @@ class MIPDNATracker(Tracker):
|
|
|
12
12
|
return WorkflowManager.Slurm
|
|
13
13
|
|
|
14
14
|
def _get_job_ids_path(self, case_id: str) -> Path:
|
|
15
|
-
return Path(self.
|
|
15
|
+
return Path(self._get_out_dir_path(case_id), "slurm_job_ids.yaml")
|
|
16
|
+
|
|
17
|
+
def _get_out_dir_path(self, case_id: str) -> Path:
|
|
18
|
+
return Path(self.workflow_root, case_id, "analysis")
|
|
16
19
|
|
|
17
20
|
def _get_sample_info_path(self, case_id: str) -> Path:
|
|
18
21
|
return Path(self.workflow_root, case_id, "analysis", f"{case_id}_qc_sample_info.yaml")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
|
|
3
|
-
from cg.constants.constants import
|
|
3
|
+
from cg.constants.constants import WorkflowManager
|
|
4
4
|
from cg.services.analysis_starter.configurator.models.nextflow import NextflowCaseConfig
|
|
5
5
|
from cg.services.analysis_starter.tracker.tracker import Tracker
|
|
6
6
|
|
|
@@ -9,9 +9,11 @@ class NextflowTracker(Tracker):
|
|
|
9
9
|
def _workflow_manager(self) -> WorkflowManager:
|
|
10
10
|
return WorkflowManager.Tower
|
|
11
11
|
|
|
12
|
-
def _get_job_ids_path(self, case_id: str) -> Path:
|
|
13
|
-
|
|
14
|
-
|
|
12
|
+
def _get_job_ids_path(self, case_id: str) -> Path | None:
|
|
13
|
+
return None
|
|
14
|
+
|
|
15
|
+
def _get_out_dir_path(self, case_id: str) -> Path:
|
|
16
|
+
return Path(self.workflow_root, case_id)
|
|
15
17
|
|
|
16
18
|
def _get_workflow_version(self, case_config: NextflowCaseConfig) -> str:
|
|
17
19
|
return case_config.revision
|
|
@@ -6,7 +6,7 @@ from pathlib import Path
|
|
|
6
6
|
from cg.apps.environ import environ_email
|
|
7
7
|
from cg.apps.tb import TrailblazerAPI
|
|
8
8
|
from cg.apps.tb.models import TrailblazerAnalysis
|
|
9
|
-
from cg.constants.constants import CaseActions, CustomerId, Workflow
|
|
9
|
+
from cg.constants.constants import CaseActions, CustomerId, Workflow, WorkflowManager
|
|
10
10
|
from cg.constants.priority import TrailblazerPriority
|
|
11
11
|
from cg.constants.sequencing import SeqLibraryPrepCategory
|
|
12
12
|
from cg.constants.tb import AnalysisType
|
|
@@ -35,10 +35,9 @@ class Tracker(ABC):
|
|
|
35
35
|
def track(
|
|
36
36
|
self,
|
|
37
37
|
case_config: CaseConfig,
|
|
38
|
-
tower_workflow_id: str | None = None,
|
|
39
38
|
) -> None:
|
|
40
39
|
tb_analysis: TrailblazerAnalysis = self._track_in_trailblazer(
|
|
41
|
-
case_id=case_config.case_id, tower_workflow_id=
|
|
40
|
+
case_id=case_config.case_id, tower_workflow_id=case_config.get_workflow_id()
|
|
42
41
|
)
|
|
43
42
|
LOG.info(
|
|
44
43
|
f"Analysis entry for case {case_config.case_id} created in Trailblazer with id {tb_analysis.id}"
|
|
@@ -59,20 +58,20 @@ class Tracker(ABC):
|
|
|
59
58
|
self.store.update_case_action(case_internal_id=case_id, action=None)
|
|
60
59
|
|
|
61
60
|
def _track_in_trailblazer(
|
|
62
|
-
self, case_id: str, tower_workflow_id:
|
|
61
|
+
self, case_id: str, tower_workflow_id: str | None
|
|
63
62
|
) -> TrailblazerAnalysis:
|
|
64
63
|
analysis_type: str = self._get_analysis_type(case_id)
|
|
65
|
-
config_path: Path = self._get_job_ids_path(case_id)
|
|
64
|
+
config_path: Path | None = self._get_job_ids_path(case_id)
|
|
66
65
|
email: str = environ_email()
|
|
67
|
-
order_id: int = self.store.
|
|
68
|
-
out_dir: str =
|
|
66
|
+
order_id: int = self.store.get_case_by_internal_id_strict(case_id).latest_order.id
|
|
67
|
+
out_dir: str = self._get_out_dir_path(case_id).as_posix()
|
|
69
68
|
priority: TrailblazerPriority = self._get_trailblazer_priority(case_id)
|
|
70
69
|
ticket: str = self.store.get_latest_ticket_from_case(case_id)
|
|
71
70
|
is_case_for_development: bool = self._is_case_for_development(case_id)
|
|
72
71
|
return self.trailblazer_api.add_pending_analysis(
|
|
73
72
|
analysis_type=analysis_type,
|
|
74
73
|
case_id=case_id,
|
|
75
|
-
config_path=config_path.as_posix(),
|
|
74
|
+
config_path=config_path.as_posix() if config_path else None,
|
|
76
75
|
email=email,
|
|
77
76
|
order_id=order_id,
|
|
78
77
|
out_dir=out_dir,
|
|
@@ -90,19 +89,20 @@ class Tracker(ABC):
|
|
|
90
89
|
"""Storing an analysis bundle in StatusDB for a provided case."""
|
|
91
90
|
case_id: str = case_config.case_id
|
|
92
91
|
LOG.info(f"Storing analysis in StatusDB for {case_id}")
|
|
93
|
-
case: Case = self.store.
|
|
92
|
+
case: Case = self.store.get_case_by_internal_id_strict(case_id)
|
|
94
93
|
is_primary: bool = len(case.analyses) == 0
|
|
95
94
|
analysis_start: datetime = datetime.now()
|
|
96
95
|
workflow_version: str = self._get_workflow_version(case_config)
|
|
97
96
|
new_analysis: Analysis = self.store.add_analysis(
|
|
97
|
+
case=case,
|
|
98
98
|
workflow=Workflow(case.data_analysis),
|
|
99
99
|
version=workflow_version,
|
|
100
100
|
completed_at=None,
|
|
101
101
|
primary=is_primary,
|
|
102
|
+
session_id=case_config.get_session_id(),
|
|
102
103
|
started_at=analysis_start,
|
|
103
104
|
trailblazer_id=trailblazer_id,
|
|
104
105
|
)
|
|
105
|
-
new_analysis.case = case
|
|
106
106
|
self.store.add_item_to_store(new_analysis)
|
|
107
107
|
self.store.commit_to_store()
|
|
108
108
|
LOG.info(f"Analysis successfully stored in StatusDB: {case_id} : {analysis_start}")
|
|
@@ -112,7 +112,7 @@ class Tracker(ABC):
|
|
|
112
112
|
Return the analysis type for sample.
|
|
113
113
|
Only analysis types supported by Trailblazer are valid outputs.
|
|
114
114
|
"""
|
|
115
|
-
sample: Sample = self.store.
|
|
115
|
+
sample: Sample = self.store.get_case_by_internal_id_strict(case_id).samples[0]
|
|
116
116
|
prep_category: str = sample.prep_category
|
|
117
117
|
if prep_category and prep_category.lower() in {
|
|
118
118
|
SeqLibraryPrepCategory.TARGETED_GENOME_SEQUENCING,
|
|
@@ -125,19 +125,23 @@ class Tracker(ABC):
|
|
|
125
125
|
|
|
126
126
|
def _get_trailblazer_priority(self, case_id: str) -> TrailblazerPriority:
|
|
127
127
|
"""Get the priority for the case in Trailblazer."""
|
|
128
|
-
case: Case = self.store.
|
|
128
|
+
case: Case = self.store.get_case_by_internal_id_strict(internal_id=case_id)
|
|
129
129
|
return MAP_TO_TRAILBLAZER_PRIORITY[case.priority]
|
|
130
130
|
|
|
131
131
|
def _is_case_for_development(self, case_id: str) -> bool:
|
|
132
|
-
case: Case = self.store.
|
|
132
|
+
case: Case = self.store.get_case_by_internal_id_strict(case_id)
|
|
133
133
|
return case.customer.internal_id == CustomerId.CG_INTERNAL_CUSTOMER
|
|
134
134
|
|
|
135
135
|
@abstractmethod
|
|
136
|
-
def _workflow_manager(self):
|
|
136
|
+
def _workflow_manager(self) -> WorkflowManager:
|
|
137
137
|
pass
|
|
138
138
|
|
|
139
139
|
@abstractmethod
|
|
140
|
-
def _get_job_ids_path(self, case_id: str):
|
|
140
|
+
def _get_job_ids_path(self, case_id: str) -> Path | None:
|
|
141
|
+
pass
|
|
142
|
+
|
|
143
|
+
@abstractmethod
|
|
144
|
+
def _get_out_dir_path(self, case_id: str) -> Path:
|
|
141
145
|
pass
|
|
142
146
|
|
|
143
147
|
@abstractmethod
|
|
@@ -180,7 +180,7 @@ class DeliveryServiceFactory:
|
|
|
180
180
|
tag: str = case.samples[0].application_version.application.tag
|
|
181
181
|
microbial_tags: list[str] = [
|
|
182
182
|
application.tag
|
|
183
|
-
for application in self.store.
|
|
183
|
+
for application in self.store.get_applications_by_prep_category(
|
|
184
184
|
prep_category=SeqLibraryPrepCategory.MICROBIAL
|
|
185
185
|
)
|
|
186
186
|
]
|
|
@@ -1,25 +1,35 @@
|
|
|
1
|
-
from cg.services.delivery_message.messages.analysis_scout_message import
|
|
2
|
-
AnalysisScoutMessage,
|
|
3
|
-
)
|
|
1
|
+
from cg.services.delivery_message.messages.analysis_scout_message import AnalysisScoutMessage
|
|
4
2
|
from cg.services.delivery_message.messages.covid_message import CovidMessage
|
|
5
3
|
from cg.services.delivery_message.messages.fastq_analysis_scout_message import (
|
|
6
4
|
FastqAnalysisScoutMessage,
|
|
7
5
|
)
|
|
8
6
|
from cg.services.delivery_message.messages.fastq_message import FastqMessage
|
|
9
7
|
from cg.services.delivery_message.messages.fastq_scout_message import FastqScoutMessage
|
|
10
|
-
from cg.services.delivery_message.messages.
|
|
11
|
-
MicrosaltMwrMessage,
|
|
12
|
-
)
|
|
13
|
-
from cg.services.delivery_message.messages.microsalt_mwx_message import (
|
|
14
|
-
MicrosaltMwxMessage,
|
|
15
|
-
)
|
|
16
|
-
from cg.services.delivery_message.messages.scout_message import ScoutMessage
|
|
17
|
-
from cg.services.delivery_message.messages.statina_message import StatinaMessage
|
|
8
|
+
from cg.services.delivery_message.messages.microsalt_message import MicrosaltMessage
|
|
18
9
|
from cg.services.delivery_message.messages.rna_delivery_message import (
|
|
19
|
-
RNAScoutStrategy,
|
|
20
|
-
RNAFastqStrategy,
|
|
21
10
|
RNAAnalysisStrategy,
|
|
11
|
+
RNADeliveryMessage,
|
|
22
12
|
RNAFastqAnalysisStrategy,
|
|
13
|
+
RNAFastqStrategy,
|
|
14
|
+
RNAScoutStrategy,
|
|
23
15
|
RNAUploadMessageStrategy,
|
|
24
|
-
RNADeliveryMessage,
|
|
25
16
|
)
|
|
17
|
+
from cg.services.delivery_message.messages.scout_message import ScoutMessage
|
|
18
|
+
from cg.services.delivery_message.messages.statina_message import StatinaMessage
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"AnalysisScoutMessage",
|
|
22
|
+
"CovidMessage",
|
|
23
|
+
"FastqAnalysisScoutMessage",
|
|
24
|
+
"FastqMessage",
|
|
25
|
+
"FastqScoutMessage",
|
|
26
|
+
"MicrosaltMessage",
|
|
27
|
+
"RNAAnalysisStrategy",
|
|
28
|
+
"RNADeliveryMessage",
|
|
29
|
+
"RNAFastqAnalysisStrategy",
|
|
30
|
+
"RNAFastqStrategy",
|
|
31
|
+
"RNAScoutStrategy",
|
|
32
|
+
"RNAUploadMessageStrategy",
|
|
33
|
+
"ScoutMessage",
|
|
34
|
+
"StatinaMessage",
|
|
35
|
+
]
|
|
@@ -6,7 +6,7 @@ from cg.services.delivery_message.messages.utils import (
|
|
|
6
6
|
from cg.store.models import Case
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
class
|
|
9
|
+
class MicrosaltMessage(DeliveryMessage):
|
|
10
10
|
def create_message(self, cases: list[Case]) -> str:
|
|
11
11
|
delivery_path: str = get_caesar_delivery_path(cases[0])
|
|
12
12
|
return (
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from cg.constants.constants import DataDelivery,
|
|
1
|
+
from cg.constants.constants import DataDelivery, Workflow
|
|
2
2
|
from cg.exc import CaseNotFoundError, OrderMismatchError
|
|
3
3
|
from cg.services.delivery_message.messages import (
|
|
4
4
|
AnalysisScoutMessage,
|
|
@@ -6,7 +6,7 @@ from cg.services.delivery_message.messages import (
|
|
|
6
6
|
FastqAnalysisScoutMessage,
|
|
7
7
|
FastqMessage,
|
|
8
8
|
FastqScoutMessage,
|
|
9
|
-
|
|
9
|
+
MicrosaltMessage,
|
|
10
10
|
ScoutMessage,
|
|
11
11
|
StatinaMessage,
|
|
12
12
|
)
|
|
@@ -14,7 +14,6 @@ from cg.services.delivery_message.messages.analysis_message import AnalysisMessa
|
|
|
14
14
|
from cg.services.delivery_message.messages.bam_message import BamMessage
|
|
15
15
|
from cg.services.delivery_message.messages.delivery_message import DeliveryMessage
|
|
16
16
|
from cg.services.delivery_message.messages.fastq_analysis_message import FastqAnalysisMessage
|
|
17
|
-
from cg.services.delivery_message.messages.microsalt_mwx_message import MicrosaltMwxMessage
|
|
18
17
|
from cg.services.delivery_message.messages.order_message import TaxprofilerDeliveryMessage
|
|
19
18
|
from cg.services.delivery_message.messages.raw_data_analysis_message import RawDataAnalysisMessage
|
|
20
19
|
from cg.services.delivery_message.messages.raw_data_analysis_scout_message import (
|
|
@@ -29,7 +28,7 @@ from cg.services.delivery_message.messages.rna_delivery_message import (
|
|
|
29
28
|
RNAScoutStrategy,
|
|
30
29
|
RNAUploadMessageStrategy,
|
|
31
30
|
)
|
|
32
|
-
from cg.store.models import Case
|
|
31
|
+
from cg.store.models import Case
|
|
33
32
|
from cg.store.store import Store
|
|
34
33
|
|
|
35
34
|
MESSAGE_MAP = {
|
|
@@ -64,7 +63,7 @@ def get_message(cases: list[Case], store: Store) -> str:
|
|
|
64
63
|
|
|
65
64
|
def get_message_strategy(case: Case, store: Store) -> DeliveryMessage:
|
|
66
65
|
if case.data_analysis == Workflow.MICROSALT:
|
|
67
|
-
return
|
|
66
|
+
return MicrosaltMessage()
|
|
68
67
|
|
|
69
68
|
if case.data_analysis == Workflow.MUTANT:
|
|
70
69
|
return CovidMessage()
|
|
@@ -96,41 +95,6 @@ def get_rna_message_strategy_from_data_delivery(
|
|
|
96
95
|
return MESSAGE_MAP[case.data_delivery]()
|
|
97
96
|
|
|
98
97
|
|
|
99
|
-
def get_microsalt_message_strategy(case: Case) -> DeliveryMessage:
|
|
100
|
-
if has_mwx_samples(case) or has_vwg_samples(case):
|
|
101
|
-
return MicrosaltMwxMessage()
|
|
102
|
-
|
|
103
|
-
if has_mwr_samples(case):
|
|
104
|
-
return MicrosaltMwrMessage()
|
|
105
|
-
|
|
106
|
-
app_tag: str = get_case_app_tag(case)
|
|
107
|
-
raise NotImplementedError(f"Microsalt apptag {app_tag} not supported.")
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def has_mwx_samples(case: Case) -> bool:
|
|
111
|
-
case_app_tag: str = get_case_app_tag(case)
|
|
112
|
-
return case_app_tag == MicrosaltAppTags.MWXNXTR003
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
def has_mwr_samples(case: Case) -> bool:
|
|
116
|
-
case_app_tag: str = get_case_app_tag(case)
|
|
117
|
-
return case_app_tag == MicrosaltAppTags.MWRNXTR003
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def has_vwg_samples(case: Case) -> bool:
|
|
121
|
-
case_app_tag: str = get_case_app_tag(case)
|
|
122
|
-
return case_app_tag == MicrosaltAppTags.VWGNXTR001
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def get_case_app_tag(case: Case) -> str:
|
|
126
|
-
sample: Sample = case.samples[0]
|
|
127
|
-
return get_sample_app_tag(sample)
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
def get_sample_app_tag(sample: Sample) -> str:
|
|
131
|
-
return sample.application_version.application.tag
|
|
132
|
-
|
|
133
|
-
|
|
134
98
|
def validate_cases(cases: list[Case], case_ids: list[str]) -> None:
|
|
135
99
|
if set(case_ids) != set(case.internal_id for case in cases):
|
|
136
100
|
raise CaseNotFoundError("Internal id not found in the database")
|