cg 80.1.0__py3-none-any.whl → 83.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cg/__init__.py +1 -1
- cg/apps/housekeeper/hk.py +1 -1
- cg/apps/tb/api.py +1 -1
- cg/cli/upload/mutacc.py +16 -3
- cg/cli/upload/scout.py +2 -2
- cg/cli/upload/utils.py +10 -1
- cg/cli/workflow/balsamic/base.py +29 -4
- cg/cli/workflow/microsalt/base.py +3 -1
- cg/cli/workflow/nallo/base.py +18 -38
- cg/cli/workflow/nf_analysis.py +2 -203
- cg/cli/workflow/raredisease/base.py +33 -51
- cg/cli/workflow/rnafusion/base.py +28 -3
- cg/cli/workflow/taxprofiler/base.py +21 -13
- cg/cli/workflow/tomte/base.py +17 -19
- cg/constants/constants.py +3 -3
- cg/constants/devices.py +6 -1
- cg/constants/gene_panel.py +3 -1
- cg/constants/lims.py +4 -0
- cg/constants/orderforms.py +1 -1
- cg/constants/pacbio.py +1 -0
- cg/constants/scout.py +6 -4
- cg/exc.py +12 -4
- cg/meta/compress/compress.py +7 -2
- cg/meta/delivery_report/nallo.py +1 -1
- cg/meta/delivery_report/templates/macros/ticket_system.html +1 -1
- cg/meta/observations/balsamic_observations_api.py +1 -1
- cg/meta/observations/mip_dna_observations_api.py +1 -1
- cg/meta/observations/nallo_observations_api.py +1 -1
- cg/meta/observations/observations_api.py +1 -1
- cg/meta/observations/raredisease_observations_api.py +1 -1
- cg/meta/tar/tar.py +5 -2
- cg/meta/upload/coverage.py +5 -5
- cg/meta/upload/raredisease/raredisease.py +3 -0
- cg/meta/upload/scout/nallo_config_builder.py +14 -0
- cg/meta/workflow/nallo.py +22 -95
- cg/meta/workflow/nf_analysis.py +11 -262
- cg/meta/workflow/raredisease.py +3 -112
- cg/meta/workflow/rnafusion.py +2 -34
- cg/meta/workflow/taxprofiler.py +2 -38
- cg/meta/workflow/tomte.py +2 -42
- cg/models/deliverables/metric_deliverables.py +1 -1
- cg/models/nallo/nallo.py +14 -64
- cg/models/nf_analysis.py +1 -41
- cg/models/raredisease/raredisease.py +0 -62
- cg/models/rnafusion/rnafusion.py +0 -26
- cg/models/scout/scout_load_config.py +1 -0
- cg/models/taxprofiler/taxprofiler.py +0 -42
- cg/models/tomte/tomte.py +0 -69
- cg/resources/nallo_bundle_filenames.yaml +282 -22
- cg/resources/raredisease_bundle_filenames.yaml +11 -1
- cg/resources/taxprofiler_bundle_filenames.yaml +20 -0
- cg/server/admin.py +51 -24
- cg/server/app.py +15 -4
- cg/server/endpoints/sequencing_run/dtos.py +21 -3
- cg/server/endpoints/sequencing_run/pacbio_sequencing_run.py +29 -10
- cg/server/endpoints/sequencing_run/pacbio_smrt_cell_metrics.py +20 -0
- cg/services/analysis_starter/configurator/configurator.py +1 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/models.py +40 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/nallo.py +3 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/tomte_params_file_creator.py +3 -1
- cg/services/analysis_starter/factories/configurator_factory.py +4 -4
- cg/services/analysis_starter/tracker/implementations/balsamic.py +4 -1
- cg/services/analysis_starter/tracker/implementations/microsalt.py +4 -4
- cg/services/analysis_starter/tracker/implementations/mip_dna.py +4 -1
- cg/services/analysis_starter/tracker/implementations/nextflow_tracker.py +6 -4
- cg/services/analysis_starter/tracker/tracker.py +10 -6
- cg/services/illumina/backup/backup_service.py +29 -7
- cg/services/orders/validation/constants.py +3 -0
- cg/services/orders/validation/index_sequences.py +558 -0
- cg/services/run_devices/pacbio/data_storage_service/pacbio_store_service.py +39 -18
- cg/services/run_devices/pacbio/data_transfer_service/data_transfer_service.py +8 -2
- cg/services/run_devices/pacbio/data_transfer_service/dto.py +9 -3
- cg/services/run_devices/pacbio/data_transfer_service/utils.py +14 -7
- cg/services/run_devices/pacbio/metrics_parser/models.py +1 -0
- cg/services/run_devices/pacbio/sequencing_runs_service.py +35 -7
- cg/services/sequencing_qc_service/quality_checks/checks.py +18 -16
- cg/services/sequencing_qc_service/quality_checks/utils.py +82 -18
- cg/services/sequencing_qc_service/sequencing_qc_service.py +12 -10
- cg/store/crud/create.py +73 -42
- cg/store/crud/read.py +50 -2
- cg/store/crud/update.py +14 -3
- cg/store/models.py +88 -31
- cg/store/store.py +8 -1
- {cg-80.1.0.dist-info → cg-83.14.0.dist-info}/METADATA +1 -1
- {cg-80.1.0.dist-info → cg-83.14.0.dist-info}/RECORD +91 -90
- /cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{nallo.py → nallo_sample_sheet_creator.py} +0 -0
- /cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{raredisease.py → raredisease_sample_sheet_creator.py} +0 -0
- /cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{rnafusion.py → rnafusion_sample_sheet_creator.py} +0 -0
- /cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{taxprofiler.py → taxprofiler_sample_sheet_creator.py} +0 -0
- {cg-80.1.0.dist-info → cg-83.14.0.dist-info}/WHEEL +0 -0
- {cg-80.1.0.dist-info → cg-83.14.0.dist-info}/entry_points.txt +0 -0
cg/__init__.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
__title__ = "cg"
|
|
2
|
-
__version__ = "
|
|
2
|
+
__version__ = "83.14.0"
|
cg/apps/housekeeper/hk.py
CHANGED
cg/apps/tb/api.py
CHANGED
cg/cli/upload/mutacc.py
CHANGED
|
@@ -7,7 +7,7 @@ import rich_click as click
|
|
|
7
7
|
from cg.apps.mutacc_auto import MutaccAutoAPI
|
|
8
8
|
from cg.apps.scout.scout_export import ScoutExportCase
|
|
9
9
|
from cg.apps.scout.scoutapi import ScoutAPI
|
|
10
|
-
from cg.cli.upload.utils import
|
|
10
|
+
from cg.cli.upload.utils import get_scout_api_by_genome_build
|
|
11
11
|
from cg.constants.cli_options import DRY_RUN
|
|
12
12
|
from cg.meta.upload.mutacc import UploadToMutaccAPI
|
|
13
13
|
from cg.models.cg_config import CGConfig
|
|
@@ -19,17 +19,30 @@ LOG = logging.getLogger(__name__)
|
|
|
19
19
|
@click.option("-c", "--case-id", help="internal case id, leave empty to process all")
|
|
20
20
|
@click.option("-d", "--days-ago", type=int, default=1, help="days since solved")
|
|
21
21
|
@click.option("-C", "--customers", type=str, multiple=True, help="Filter on customers")
|
|
22
|
+
@click.option(
|
|
23
|
+
"--scout-instance",
|
|
24
|
+
type=click.Choice(["hg19", "hg38"]),
|
|
25
|
+
help="Which scout instance to fetch cases from",
|
|
26
|
+
required=True,
|
|
27
|
+
)
|
|
22
28
|
@DRY_RUN
|
|
23
29
|
@click.pass_obj
|
|
24
30
|
def process_solved(
|
|
25
|
-
context: CGConfig,
|
|
31
|
+
context: CGConfig,
|
|
32
|
+
scout_instance: str,
|
|
33
|
+
case_id: str | None,
|
|
34
|
+
days_ago: int,
|
|
35
|
+
customers: tuple[str],
|
|
36
|
+
dry_run: bool,
|
|
26
37
|
):
|
|
27
38
|
"""Process cases with mutacc that has been marked as solved in scout.
|
|
28
39
|
This prepares them to be uploaded to the mutacc database"""
|
|
29
40
|
|
|
30
41
|
LOG.info("----------------- PROCESS-SOLVED ----------------")
|
|
31
42
|
|
|
32
|
-
scout_api: ScoutAPI =
|
|
43
|
+
scout_api: ScoutAPI = get_scout_api_by_genome_build(
|
|
44
|
+
cg_config=context, genome_build=scout_instance
|
|
45
|
+
)
|
|
33
46
|
mutacc_auto_api: MutaccAutoAPI = context.mutacc_auto_api
|
|
34
47
|
mutacc_upload_api = UploadToMutaccAPI(scout_api=scout_api, mutacc_auto_api=mutacc_auto_api)
|
|
35
48
|
|
cg/cli/upload/scout.py
CHANGED
|
@@ -8,7 +8,7 @@ from housekeeper.store.models import File, Version
|
|
|
8
8
|
|
|
9
9
|
from cg.apps.housekeeper.hk import HousekeeperAPI
|
|
10
10
|
from cg.apps.scout.scoutapi import ScoutAPI
|
|
11
|
-
from cg.cli.upload.utils import
|
|
11
|
+
from cg.cli.upload.utils import get_scout_api_by_case, suggest_cases_to_upload
|
|
12
12
|
from cg.constants import Workflow
|
|
13
13
|
from cg.constants.cli_options import DRY_RUN
|
|
14
14
|
from cg.constants.constants import FileFormat
|
|
@@ -145,7 +145,7 @@ def upload_case_to_scout(context: CGConfig, re_upload: bool, dry_run: bool, case
|
|
|
145
145
|
LOG.info("----------------- UPLOAD -----------------------")
|
|
146
146
|
|
|
147
147
|
housekeeper_api: HousekeeperAPI = context.housekeeper_api
|
|
148
|
-
scout_api: ScoutAPI =
|
|
148
|
+
scout_api: ScoutAPI = get_scout_api_by_case(cg_config=context, case_id=case_id)
|
|
149
149
|
|
|
150
150
|
tag_name: str = UploadScoutAPI.get_load_config_tag()
|
|
151
151
|
version: Version = housekeeper_api.last_version(bundle=case_id)
|
cg/cli/upload/utils.py
CHANGED
|
@@ -24,6 +24,15 @@ def suggest_cases_to_upload(status_db: Store, workflow: Workflow | None = None)
|
|
|
24
24
|
click.echo(case_obj)
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
def
|
|
27
|
+
def get_scout_api_by_case(cg_config: CGConfig, case_id: str) -> ScoutAPI:
|
|
28
28
|
workflow = cg_config.status_db.get_case_by_internal_id(case_id).data_analysis
|
|
29
29
|
return cg_config.scout_api_38 if workflow == Workflow.NALLO else cg_config.scout_api_37
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_scout_api_by_genome_build(cg_config: CGConfig, genome_build: str) -> ScoutAPI:
|
|
33
|
+
"""Return the appropriate ScoutAPI based on the genome build."""
|
|
34
|
+
if genome_build == "hg38":
|
|
35
|
+
return cg_config.scout_api_38
|
|
36
|
+
if genome_build == "hg19":
|
|
37
|
+
return cg_config.scout_api_37
|
|
38
|
+
raise ValueError(f"Unsupported genome build: {genome_build}")
|
cg/cli/workflow/balsamic/base.py
CHANGED
|
@@ -140,7 +140,12 @@ def store_available(context: click.Context, dry_run: bool) -> None:
|
|
|
140
140
|
@ARGUMENT_CASE_ID
|
|
141
141
|
@click.pass_obj
|
|
142
142
|
def config_case(cg_config: CGConfig, case_id: str, panel_bed: str | None):
|
|
143
|
-
"""Configure a Balsamic case so that it is ready to be run.
|
|
143
|
+
"""Configure a Balsamic case so that it is ready to be run.
|
|
144
|
+
|
|
145
|
+
\b
|
|
146
|
+
Creates the case config file:
|
|
147
|
+
- CASE_ID.json
|
|
148
|
+
"""
|
|
144
149
|
factory = ConfiguratorFactory(cg_config)
|
|
145
150
|
configurator = cast(BalsamicConfigurator, factory.get_configurator(Workflow.BALSAMIC))
|
|
146
151
|
configurator.configure(case_id=case_id, panel_bed=panel_bed)
|
|
@@ -151,7 +156,13 @@ def config_case(cg_config: CGConfig, case_id: str, panel_bed: str | None):
|
|
|
151
156
|
@ARGUMENT_CASE_ID
|
|
152
157
|
@click.pass_obj
|
|
153
158
|
def run(cg_config: CGConfig, case_id: str, workflow_profile: click.Path | None):
|
|
154
|
-
"""
|
|
159
|
+
"""
|
|
160
|
+
Run a preconfigured Balsamic case.
|
|
161
|
+
|
|
162
|
+
\b
|
|
163
|
+
Assumes that case config file exist in the case run directory:
|
|
164
|
+
- CASE_ID.json
|
|
165
|
+
"""
|
|
155
166
|
factory = AnalysisStarterFactory(cg_config)
|
|
156
167
|
analysis_starter: AnalysisStarter = factory.get_analysis_starter_for_workflow(Workflow.BALSAMIC)
|
|
157
168
|
analysis_starter.run(case_id=case_id, workflow_profile=workflow_profile)
|
|
@@ -168,7 +179,14 @@ def start(
|
|
|
168
179
|
panel_bed: str | None,
|
|
169
180
|
workflow_profile: click.Path | None,
|
|
170
181
|
):
|
|
171
|
-
"""
|
|
182
|
+
"""
|
|
183
|
+
Starts a Balsamic cases.
|
|
184
|
+
|
|
185
|
+
\b
|
|
186
|
+
Configures the case and creates the case config file:
|
|
187
|
+
- CASE_ID.json
|
|
188
|
+
and submits the job to slurm.
|
|
189
|
+
"""
|
|
172
190
|
factory = AnalysisStarterFactory(cg_config)
|
|
173
191
|
analysis_starter: AnalysisStarter = factory.get_analysis_starter_for_workflow(Workflow.BALSAMIC)
|
|
174
192
|
analysis_starter.start(case_id=case_id, workflow_profile=workflow_profile, panel_bed=panel_bed)
|
|
@@ -177,7 +195,14 @@ def start(
|
|
|
177
195
|
@balsamic.command("start-available")
|
|
178
196
|
@click.pass_obj
|
|
179
197
|
def start_available(cg_config: CGConfig):
|
|
180
|
-
"""
|
|
198
|
+
"""
|
|
199
|
+
Starts all available Balsamic cases.
|
|
200
|
+
|
|
201
|
+
\b
|
|
202
|
+
Configures the individual case and creates its case config file:
|
|
203
|
+
- CASE_ID.json
|
|
204
|
+
and submits the job to slurm.
|
|
205
|
+
"""
|
|
181
206
|
LOG.info("Starting Balsamic workflow for all available cases.")
|
|
182
207
|
factory = AnalysisStarterFactory(cg_config)
|
|
183
208
|
analysis_starter = factory.get_analysis_starter_for_workflow(Workflow.BALSAMIC)
|
|
@@ -41,7 +41,9 @@ microsalt.add_command(resolve_compression)
|
|
|
41
41
|
@ARGUMENT_CASE_ID
|
|
42
42
|
@click.pass_obj
|
|
43
43
|
def config_case(cg_config: CGConfig, case_id: str) -> None:
|
|
44
|
-
"""
|
|
44
|
+
"""
|
|
45
|
+
Configure a microSALT case so that it is ready to be run.
|
|
46
|
+
"""
|
|
45
47
|
factory = ConfiguratorFactory(cg_config)
|
|
46
48
|
configurator = cast(MicrosaltConfigurator, factory.get_configurator(Workflow.MICROSALT))
|
|
47
49
|
configurator.configure(case_id=case_id)
|
cg/cli/workflow/nallo/base.py
CHANGED
|
@@ -5,22 +5,17 @@ from typing import cast
|
|
|
5
5
|
|
|
6
6
|
import rich_click as click
|
|
7
7
|
|
|
8
|
-
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
|
|
8
|
+
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
|
|
9
9
|
from cg.cli.workflow.commands import ARGUMENT_CASE_ID
|
|
10
10
|
from cg.cli.workflow.nf_analysis import (
|
|
11
11
|
OPTION_RESUME,
|
|
12
12
|
OPTION_REVISION,
|
|
13
|
-
config_case,
|
|
14
13
|
metrics_deliver,
|
|
15
14
|
report_deliver,
|
|
16
|
-
run,
|
|
17
|
-
start,
|
|
18
|
-
start_available,
|
|
19
15
|
store,
|
|
20
16
|
store_available,
|
|
21
17
|
store_housekeeper,
|
|
22
18
|
)
|
|
23
|
-
from cg.constants.cli_options import DRY_RUN
|
|
24
19
|
from cg.constants.constants import MetaApis, Workflow
|
|
25
20
|
from cg.meta.workflow.analysis import AnalysisAPI
|
|
26
21
|
from cg.meta.workflow.nallo import NalloAnalysisAPI
|
|
@@ -41,40 +36,21 @@ def nallo(context: click.Context) -> None:
|
|
|
41
36
|
context.obj.meta_apis[MetaApis.ANALYSIS_API] = NalloAnalysisAPI(config=context.obj)
|
|
42
37
|
|
|
43
38
|
|
|
44
|
-
nallo.add_command(config_case)
|
|
45
39
|
nallo.add_command(report_deliver)
|
|
46
|
-
nallo.add_command(run)
|
|
47
|
-
nallo.add_command(start)
|
|
48
|
-
nallo.add_command(start_available)
|
|
49
40
|
nallo.add_command(store)
|
|
50
41
|
nallo.add_command(store_available)
|
|
51
42
|
nallo.add_command(store_housekeeper)
|
|
52
43
|
nallo.add_command(metrics_deliver)
|
|
53
44
|
|
|
54
45
|
|
|
55
|
-
@nallo.command("
|
|
56
|
-
@DRY_RUN
|
|
46
|
+
@nallo.command("config-case")
|
|
57
47
|
@ARGUMENT_CASE_ID
|
|
58
48
|
@click.pass_obj
|
|
59
|
-
def
|
|
60
|
-
"""Write aggregated gene panel file exported from Scout."""
|
|
61
|
-
|
|
62
|
-
analysis_api: NalloAnalysisAPI = context.meta_apis["analysis_api"]
|
|
63
|
-
analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
|
|
64
|
-
|
|
65
|
-
bed_lines: list[str] = analysis_api.get_gene_panel(case_id=case_id)
|
|
66
|
-
if dry_run:
|
|
67
|
-
echo_lines(lines=bed_lines)
|
|
68
|
-
return
|
|
69
|
-
analysis_api.write_panel_as_tsv(case_id=case_id, content=bed_lines)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
@nallo.command("dev-config-case")
|
|
73
|
-
@ARGUMENT_CASE_ID
|
|
74
|
-
@click.pass_obj
|
|
75
|
-
def dev_config_case(cg_config: CGConfig, case_id: str):
|
|
49
|
+
def config_case(cg_config: CGConfig, case_id: str):
|
|
76
50
|
"""
|
|
77
|
-
Configure a Nallo case so that it is ready to be run.
|
|
51
|
+
Configure a Nallo case so that it is ready to be run.
|
|
52
|
+
|
|
53
|
+
\b
|
|
78
54
|
Creates the following files in the case run directory:
|
|
79
55
|
- CASE_ID_params_file.yaml
|
|
80
56
|
- CASE_ID_nextflow_config.json
|
|
@@ -85,14 +61,16 @@ def dev_config_case(cg_config: CGConfig, case_id: str):
|
|
|
85
61
|
configurator.configure(case_id=case_id)
|
|
86
62
|
|
|
87
63
|
|
|
88
|
-
@nallo.command("
|
|
64
|
+
@nallo.command("run")
|
|
89
65
|
@OPTION_REVISION
|
|
90
66
|
@OPTION_RESUME
|
|
91
67
|
@ARGUMENT_CASE_ID
|
|
92
68
|
@click.pass_obj
|
|
93
|
-
def
|
|
69
|
+
def run(cg_config: CGConfig, case_id: str, resume: bool, revision: str | None) -> None:
|
|
94
70
|
"""
|
|
95
|
-
Run a preconfigured Nallo case.
|
|
71
|
+
Run a preconfigured Nallo case.
|
|
72
|
+
|
|
73
|
+
\b
|
|
96
74
|
Assumes that the following files exist in the case run directory:
|
|
97
75
|
- CASE_ID_params_file.yaml
|
|
98
76
|
- CASE_ID_nextflow_config.json
|
|
@@ -103,13 +81,15 @@ def dev_run(cg_config: CGConfig, case_id: str, resume: bool, revision: str | Non
|
|
|
103
81
|
analysis_starter.run(case_id=case_id, resume=resume, revision=revision)
|
|
104
82
|
|
|
105
83
|
|
|
106
|
-
@nallo.command("
|
|
84
|
+
@nallo.command("start")
|
|
107
85
|
@OPTION_REVISION
|
|
108
86
|
@ARGUMENT_CASE_ID
|
|
109
87
|
@click.pass_obj
|
|
110
|
-
def
|
|
88
|
+
def start(cg_config: CGConfig, case_id: str, revision: str | None):
|
|
111
89
|
"""
|
|
112
|
-
Start a Nallo case.
|
|
90
|
+
Start a Nallo case.
|
|
91
|
+
|
|
92
|
+
\b
|
|
113
93
|
Configures the case and writes the following files:
|
|
114
94
|
- CASE_ID_params_file.yaml
|
|
115
95
|
- CASE_ID_nextflow_config.json
|
|
@@ -121,9 +101,9 @@ def dev_start(cg_config: CGConfig, case_id: str, revision: str | None):
|
|
|
121
101
|
analysis_starter.start(case_id=case_id, revision=revision)
|
|
122
102
|
|
|
123
103
|
|
|
124
|
-
@nallo.command("
|
|
104
|
+
@nallo.command("start-available")
|
|
125
105
|
@click.pass_obj
|
|
126
|
-
def
|
|
106
|
+
def start_available(cg_config: CGConfig):
|
|
127
107
|
"""Starts all available Nallo cases."""
|
|
128
108
|
LOG.info("Starting Nallo workflow for all available cases.")
|
|
129
109
|
factory = AnalysisStarterFactory(cg_config)
|
cg/cli/workflow/nf_analysis.py
CHANGED
|
@@ -3,25 +3,18 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
|
|
5
5
|
import rich_click as click
|
|
6
|
-
from pydantic import ValidationError
|
|
7
6
|
|
|
8
7
|
from cg.cli.workflow.commands import ARGUMENT_CASE_ID
|
|
9
8
|
from cg.cli.workflow.utils import validate_force_store_option
|
|
10
|
-
from cg.constants import EXIT_FAIL, EXIT_SUCCESS, Workflow
|
|
11
9
|
from cg.constants.cli_options import COMMENT, DRY_RUN, FORCE
|
|
12
10
|
from cg.constants.constants import MetaApis
|
|
13
|
-
from cg.exc import
|
|
11
|
+
from cg.exc import CgError, HousekeeperStoreError
|
|
14
12
|
from cg.meta.workflow.nf_analysis import NfAnalysisAPI
|
|
15
13
|
from cg.models.cg_config import CGConfig
|
|
16
|
-
from cg.store.models import Case
|
|
17
14
|
|
|
18
15
|
LOG = logging.getLogger(__name__)
|
|
19
16
|
|
|
20
|
-
|
|
21
|
-
"--work-dir",
|
|
22
|
-
type=click.Path(),
|
|
23
|
-
help="Directory where intermediate result files are stored",
|
|
24
|
-
)
|
|
17
|
+
|
|
25
18
|
OPTION_RESUME = click.option(
|
|
26
19
|
"--resume",
|
|
27
20
|
default=True,
|
|
@@ -29,205 +22,11 @@ OPTION_RESUME = click.option(
|
|
|
29
22
|
help="Execute the script using the cached results, useful to continue "
|
|
30
23
|
"executions that were stopped by an error",
|
|
31
24
|
)
|
|
32
|
-
OPTION_PROFILE = click.option(
|
|
33
|
-
"--profile",
|
|
34
|
-
type=str,
|
|
35
|
-
show_default=True,
|
|
36
|
-
help="Choose a configuration profile",
|
|
37
|
-
)
|
|
38
|
-
|
|
39
|
-
OPTION_CONFIG = click.option(
|
|
40
|
-
"--config",
|
|
41
|
-
type=click.Path(),
|
|
42
|
-
help="Nextflow config file path",
|
|
43
|
-
)
|
|
44
|
-
|
|
45
|
-
OPTION_PARAMS_FILE = click.option(
|
|
46
|
-
"--params-file",
|
|
47
|
-
type=click.Path(),
|
|
48
|
-
help="Nextflow workflow-specific parameter file path",
|
|
49
|
-
)
|
|
50
|
-
|
|
51
|
-
OPTION_USE_NEXTFLOW = click.option(
|
|
52
|
-
"--use-nextflow",
|
|
53
|
-
type=bool,
|
|
54
|
-
is_flag=True,
|
|
55
|
-
default=False,
|
|
56
|
-
show_default=True,
|
|
57
|
-
help="Execute workflow using nextflow",
|
|
58
|
-
)
|
|
59
|
-
|
|
60
25
|
OPTION_REVISION = click.option(
|
|
61
26
|
"--revision",
|
|
62
27
|
type=str,
|
|
63
28
|
help="Revision of workflow to run (either a git branch, tag or commit SHA number)",
|
|
64
29
|
)
|
|
65
|
-
OPTION_COMPUTE_ENV = click.option(
|
|
66
|
-
"--compute-env",
|
|
67
|
-
type=str,
|
|
68
|
-
help="Compute environment name. If not specified the primary compute environment will be used.",
|
|
69
|
-
)
|
|
70
|
-
OPTION_TOWER_RUN_ID = click.option(
|
|
71
|
-
"--nf-tower-id",
|
|
72
|
-
type=str,
|
|
73
|
-
is_flag=False,
|
|
74
|
-
default=None,
|
|
75
|
-
help="NF-Tower ID of run to relaunch. If not provided the latest NF-Tower ID for a case will be used.",
|
|
76
|
-
)
|
|
77
|
-
OPTION_FROM_START = click.option(
|
|
78
|
-
"--from-start",
|
|
79
|
-
is_flag=True,
|
|
80
|
-
default=False,
|
|
81
|
-
show_default=True,
|
|
82
|
-
help="Start workflow from start without resuming execution",
|
|
83
|
-
)
|
|
84
|
-
OPTION_STUB = click.option(
|
|
85
|
-
"--stub-run",
|
|
86
|
-
is_flag=True,
|
|
87
|
-
default=False,
|
|
88
|
-
show_default=True,
|
|
89
|
-
help="Start a stub workflow",
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
@click.command("config-case")
|
|
94
|
-
@ARGUMENT_CASE_ID
|
|
95
|
-
@DRY_RUN
|
|
96
|
-
@click.pass_obj
|
|
97
|
-
def config_case(context: CGConfig, case_id: str, dry_run: bool) -> None:
|
|
98
|
-
"""Create config files required by a workflow for a case."""
|
|
99
|
-
analysis_api: NfAnalysisAPI = context.meta_apis[MetaApis.ANALYSIS_API]
|
|
100
|
-
try:
|
|
101
|
-
analysis_api.config_case(case_id=case_id, dry_run=dry_run)
|
|
102
|
-
except (CgError, ValidationError) as error:
|
|
103
|
-
LOG.error(f"Could not create config files for {case_id}: {error}")
|
|
104
|
-
raise click.Abort() from error
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
@click.command("run")
|
|
108
|
-
@ARGUMENT_CASE_ID
|
|
109
|
-
@OPTION_WORKDIR
|
|
110
|
-
@OPTION_FROM_START
|
|
111
|
-
@OPTION_PROFILE
|
|
112
|
-
@OPTION_CONFIG
|
|
113
|
-
@OPTION_PARAMS_FILE
|
|
114
|
-
@OPTION_REVISION
|
|
115
|
-
@OPTION_COMPUTE_ENV
|
|
116
|
-
@OPTION_USE_NEXTFLOW
|
|
117
|
-
@OPTION_TOWER_RUN_ID
|
|
118
|
-
@OPTION_STUB
|
|
119
|
-
@DRY_RUN
|
|
120
|
-
@click.pass_obj
|
|
121
|
-
def run(
|
|
122
|
-
context: CGConfig,
|
|
123
|
-
case_id: str,
|
|
124
|
-
work_dir: str,
|
|
125
|
-
from_start: bool,
|
|
126
|
-
profile: str,
|
|
127
|
-
config: str,
|
|
128
|
-
params_file: str,
|
|
129
|
-
revision: str,
|
|
130
|
-
compute_env: str,
|
|
131
|
-
use_nextflow: bool,
|
|
132
|
-
nf_tower_id: str | None,
|
|
133
|
-
stub_run: bool,
|
|
134
|
-
dry_run: bool,
|
|
135
|
-
) -> None:
|
|
136
|
-
"""Run analysis for a case."""
|
|
137
|
-
analysis_api: NfAnalysisAPI = context.meta_apis[MetaApis.ANALYSIS_API]
|
|
138
|
-
try:
|
|
139
|
-
analysis_api.run_nextflow_analysis(
|
|
140
|
-
case_id=case_id,
|
|
141
|
-
dry_run=dry_run,
|
|
142
|
-
work_dir=work_dir,
|
|
143
|
-
from_start=from_start,
|
|
144
|
-
profile=profile,
|
|
145
|
-
config=config,
|
|
146
|
-
params_file=params_file,
|
|
147
|
-
revision=revision,
|
|
148
|
-
compute_env=compute_env,
|
|
149
|
-
use_nextflow=use_nextflow,
|
|
150
|
-
nf_tower_id=nf_tower_id,
|
|
151
|
-
stub_run=stub_run,
|
|
152
|
-
)
|
|
153
|
-
except Exception as error:
|
|
154
|
-
LOG.error(f"Unspecified error occurred: {error}")
|
|
155
|
-
raise click.Abort() from error
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
@click.command("start")
|
|
159
|
-
@ARGUMENT_CASE_ID
|
|
160
|
-
@OPTION_WORKDIR
|
|
161
|
-
@OPTION_PROFILE
|
|
162
|
-
@OPTION_CONFIG
|
|
163
|
-
@OPTION_PARAMS_FILE
|
|
164
|
-
@OPTION_REVISION
|
|
165
|
-
@OPTION_COMPUTE_ENV
|
|
166
|
-
@OPTION_USE_NEXTFLOW
|
|
167
|
-
@OPTION_STUB
|
|
168
|
-
@DRY_RUN
|
|
169
|
-
@click.pass_obj
|
|
170
|
-
def start(
|
|
171
|
-
context: CGConfig,
|
|
172
|
-
case_id: str,
|
|
173
|
-
work_dir: str,
|
|
174
|
-
profile: str,
|
|
175
|
-
config: str,
|
|
176
|
-
params_file: str,
|
|
177
|
-
revision: str,
|
|
178
|
-
compute_env: str,
|
|
179
|
-
use_nextflow: bool,
|
|
180
|
-
stub_run: bool,
|
|
181
|
-
dry_run: bool,
|
|
182
|
-
) -> None:
|
|
183
|
-
"""Start workflow for a case."""
|
|
184
|
-
LOG.info(f"Starting analysis for {case_id}")
|
|
185
|
-
analysis_api: NfAnalysisAPI = context.meta_apis[MetaApis.ANALYSIS_API]
|
|
186
|
-
try:
|
|
187
|
-
analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
|
|
188
|
-
case: Case = analysis_api.status_db.get_case_by_internal_id(case_id)
|
|
189
|
-
if case.data_analysis != Workflow.NALLO:
|
|
190
|
-
analysis_api.prepare_fastq_files(case_id=case_id, dry_run=dry_run)
|
|
191
|
-
analysis_api.config_case(case_id=case_id, dry_run=dry_run)
|
|
192
|
-
analysis_api.run_nextflow_analysis(
|
|
193
|
-
case_id=case_id,
|
|
194
|
-
dry_run=dry_run,
|
|
195
|
-
work_dir=work_dir,
|
|
196
|
-
from_start=True,
|
|
197
|
-
profile=profile,
|
|
198
|
-
config=config,
|
|
199
|
-
params_file=params_file,
|
|
200
|
-
revision=revision,
|
|
201
|
-
compute_env=compute_env,
|
|
202
|
-
use_nextflow=use_nextflow,
|
|
203
|
-
stub_run=stub_run,
|
|
204
|
-
)
|
|
205
|
-
except Exception as error:
|
|
206
|
-
LOG.error(f"Unexpected error occurred: {error}")
|
|
207
|
-
raise click.Abort from error
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
@click.command("start-available")
|
|
211
|
-
@DRY_RUN
|
|
212
|
-
@click.pass_context
|
|
213
|
-
def start_available(context: click.Context, dry_run: bool = False) -> None:
|
|
214
|
-
"""Start workflow for all cases ready for analysis."""
|
|
215
|
-
analysis_api: NfAnalysisAPI = context.obj.meta_apis[MetaApis.ANALYSIS_API]
|
|
216
|
-
|
|
217
|
-
cases: list[Case] = analysis_api.get_cases_to_analyze()
|
|
218
|
-
LOG.info(f"Starting {len(cases)} available {analysis_api.workflow} cases")
|
|
219
|
-
|
|
220
|
-
exit_code: int = EXIT_SUCCESS
|
|
221
|
-
for case in cases:
|
|
222
|
-
try:
|
|
223
|
-
context.invoke(start, case_id=case.internal_id, dry_run=dry_run)
|
|
224
|
-
except AnalysisNotReadyError as error:
|
|
225
|
-
LOG.error(error)
|
|
226
|
-
except Exception as error:
|
|
227
|
-
LOG.error(error)
|
|
228
|
-
exit_code = EXIT_FAIL
|
|
229
|
-
if exit_code:
|
|
230
|
-
raise click.Abort
|
|
231
30
|
|
|
232
31
|
|
|
233
32
|
@click.command("metrics-deliver")
|
|
@@ -5,22 +5,17 @@ from typing import cast
|
|
|
5
5
|
|
|
6
6
|
import rich_click as click
|
|
7
7
|
|
|
8
|
-
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
|
|
8
|
+
from cg.cli.utils import CLICK_CONTEXT_SETTINGS
|
|
9
9
|
from cg.cli.workflow.commands import ARGUMENT_CASE_ID, resolve_compression
|
|
10
10
|
from cg.cli.workflow.nf_analysis import (
|
|
11
11
|
OPTION_RESUME,
|
|
12
12
|
OPTION_REVISION,
|
|
13
|
-
config_case,
|
|
14
13
|
metrics_deliver,
|
|
15
14
|
report_deliver,
|
|
16
|
-
run,
|
|
17
|
-
start,
|
|
18
|
-
start_available,
|
|
19
15
|
store,
|
|
20
16
|
store_available,
|
|
21
17
|
store_housekeeper,
|
|
22
18
|
)
|
|
23
|
-
from cg.constants.cli_options import DRY_RUN
|
|
24
19
|
from cg.constants.constants import MetaApis, Workflow
|
|
25
20
|
from cg.meta.workflow.analysis import AnalysisAPI
|
|
26
21
|
from cg.meta.workflow.raredisease import RarediseaseAnalysisAPI
|
|
@@ -43,11 +38,7 @@ def raredisease(context: click.Context) -> None:
|
|
|
43
38
|
|
|
44
39
|
raredisease.add_command(metrics_deliver)
|
|
45
40
|
raredisease.add_command(resolve_compression)
|
|
46
|
-
raredisease.add_command(config_case)
|
|
47
41
|
raredisease.add_command(report_deliver)
|
|
48
|
-
raredisease.add_command(run)
|
|
49
|
-
raredisease.add_command(start)
|
|
50
|
-
raredisease.add_command(start_available)
|
|
51
42
|
raredisease.add_command(store)
|
|
52
43
|
raredisease.add_command(store_available)
|
|
53
44
|
raredisease.add_command(store_housekeeper)
|
|
@@ -56,8 +47,16 @@ raredisease.add_command(store_housekeeper)
|
|
|
56
47
|
@raredisease.command()
|
|
57
48
|
@ARGUMENT_CASE_ID
|
|
58
49
|
@click.pass_obj
|
|
59
|
-
def
|
|
60
|
-
"""
|
|
50
|
+
def config_case(cg_config: CGConfig, case_id: str):
|
|
51
|
+
"""
|
|
52
|
+
Configure a raredisease case so that it is ready to be run.
|
|
53
|
+
|
|
54
|
+
\b
|
|
55
|
+
Creates the following files in the case run directory:
|
|
56
|
+
- CASE_ID_params_file.yaml
|
|
57
|
+
- CASE_ID_nextflow_config.json
|
|
58
|
+
- CASE_ID_samplesheet.csv
|
|
59
|
+
"""
|
|
61
60
|
factory = ConfiguratorFactory(cg_config)
|
|
62
61
|
configurator = cast(NextflowConfigurator, factory.get_configurator(Workflow.RAREDISEASE))
|
|
63
62
|
configurator.configure(case_id=case_id)
|
|
@@ -68,8 +67,16 @@ def dev_config_case(cg_config: CGConfig, case_id: str):
|
|
|
68
67
|
@OPTION_RESUME
|
|
69
68
|
@ARGUMENT_CASE_ID
|
|
70
69
|
@click.pass_obj
|
|
71
|
-
def
|
|
72
|
-
"""
|
|
70
|
+
def run(cg_config: CGConfig, case_id: str, resume: bool, revision: str | None):
|
|
71
|
+
"""
|
|
72
|
+
Run a preconfigured raredisease case.
|
|
73
|
+
|
|
74
|
+
\b
|
|
75
|
+
Assumes that the following files exist in the case run directory:
|
|
76
|
+
- CASE_ID_params_file.yaml
|
|
77
|
+
- CASE_ID_nextflow_config.json
|
|
78
|
+
- CASE_ID_samplesheet.csv
|
|
79
|
+
"""
|
|
73
80
|
factory = AnalysisStarterFactory(cg_config)
|
|
74
81
|
analysis_starter: AnalysisStarter = factory.get_analysis_starter_for_workflow(
|
|
75
82
|
Workflow.RAREDISEASE
|
|
@@ -81,8 +88,17 @@ def dev_run(cg_config: CGConfig, case_id: str, resume: bool, revision: str | Non
|
|
|
81
88
|
@OPTION_REVISION
|
|
82
89
|
@ARGUMENT_CASE_ID
|
|
83
90
|
@click.pass_obj
|
|
84
|
-
def
|
|
85
|
-
"""
|
|
91
|
+
def start(cg_config: CGConfig, case_id: str, revision: str | None):
|
|
92
|
+
"""
|
|
93
|
+
Start a raredisease case.
|
|
94
|
+
|
|
95
|
+
\b
|
|
96
|
+
Configures the case and writes the following files:
|
|
97
|
+
- CASE_ID_params_file.yaml
|
|
98
|
+
- CASE_ID_nextflow_config.json
|
|
99
|
+
- CASE_ID_samplesheet.csv
|
|
100
|
+
and submits the job to the Seqera Platform.
|
|
101
|
+
"""
|
|
86
102
|
factory = AnalysisStarterFactory(cg_config)
|
|
87
103
|
analysis_starter: AnalysisStarter = factory.get_analysis_starter_for_workflow(
|
|
88
104
|
Workflow.RAREDISEASE
|
|
@@ -92,7 +108,7 @@ def dev_start(cg_config: CGConfig, case_id: str, revision: str | None):
|
|
|
92
108
|
|
|
93
109
|
@raredisease.command()
|
|
94
110
|
@click.pass_obj
|
|
95
|
-
def
|
|
111
|
+
def start_available(cg_config: CGConfig) -> None:
|
|
96
112
|
"""Starts all available raredisease cases."""
|
|
97
113
|
LOG.info("Starting raredisease workflow for all available cases.")
|
|
98
114
|
analysis_starter = AnalysisStarterFactory(cg_config).get_analysis_starter_for_workflow(
|
|
@@ -101,37 +117,3 @@ def dev_start_available(cg_config: CGConfig) -> None:
|
|
|
101
117
|
succeeded: bool = analysis_starter.start_available()
|
|
102
118
|
if not succeeded:
|
|
103
119
|
raise click.Abort
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
@raredisease.command("panel")
|
|
107
|
-
@DRY_RUN
|
|
108
|
-
@ARGUMENT_CASE_ID
|
|
109
|
-
@click.pass_obj
|
|
110
|
-
def panel(context: CGConfig, case_id: str, dry_run: bool) -> None:
|
|
111
|
-
"""Write aggregated gene panel file exported from Scout."""
|
|
112
|
-
|
|
113
|
-
analysis_api: RarediseaseAnalysisAPI = context.meta_apis["analysis_api"]
|
|
114
|
-
analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
|
|
115
|
-
|
|
116
|
-
bed_lines: list[str] = analysis_api.get_gene_panel(case_id=case_id)
|
|
117
|
-
if dry_run:
|
|
118
|
-
echo_lines(lines=bed_lines)
|
|
119
|
-
return
|
|
120
|
-
analysis_api.write_panel(case_id=case_id, content=bed_lines)
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
@raredisease.command("managed-variants")
|
|
124
|
-
@DRY_RUN
|
|
125
|
-
@ARGUMENT_CASE_ID
|
|
126
|
-
@click.pass_obj
|
|
127
|
-
def managed_variants(context: CGConfig, case_id: str, dry_run: bool) -> None:
|
|
128
|
-
"""Write managed variants file exported from Scout."""
|
|
129
|
-
|
|
130
|
-
analysis_api: RarediseaseAnalysisAPI = context.meta_apis["analysis_api"]
|
|
131
|
-
analysis_api.status_db.verify_case_exists(case_internal_id=case_id)
|
|
132
|
-
|
|
133
|
-
vcf_lines: list[str] = analysis_api.get_managed_variants(case_id=case_id)
|
|
134
|
-
if dry_run:
|
|
135
|
-
echo_lines(lines=vcf_lines)
|
|
136
|
-
return
|
|
137
|
-
analysis_api.write_managed_variants(case_id=case_id, content=vcf_lines)
|