cg 83.16.2__py3-none-any.whl → 83.16.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cg/__init__.py CHANGED
@@ -1,2 +1,2 @@
1
1
  __title__ = "cg"
2
- __version__ = "83.16.2"
2
+ __version__ = "83.16.4"
@@ -6,13 +6,7 @@ from typing import cast
6
6
  import rich_click as click
7
7
 
8
8
  from cg.cli.utils import CLICK_CONTEXT_SETTINGS
9
- from cg.cli.workflow.commands import (
10
- ensure_illumina_runs_on_disk,
11
- link,
12
- resolve_compression,
13
- store,
14
- store_available,
15
- )
9
+ from cg.cli.workflow.commands import ensure_illumina_runs_on_disk, store, store_available
16
10
  from cg.cli.workflow.mip.options import (
17
11
  ARGUMENT_CASE_ID,
18
12
  OPTION_BWA_MEM,
@@ -49,8 +43,6 @@ def mip_dna(
49
43
 
50
44
  for sub_cmd in [
51
45
  ensure_illumina_runs_on_disk,
52
- link,
53
- resolve_compression,
54
46
  store,
55
47
  store_available,
56
48
  ]:
cg/constants/nextflow.py CHANGED
@@ -1,29 +1,10 @@
1
1
  """Nextflow related constants."""
2
2
 
3
- from enum import IntEnum
4
3
  from cg.constants import Workflow
5
4
 
6
- NFX_WORK_DIR = "work"
7
- NFX_SAMPLE_HEADER = "sample"
8
- NFX_READ1_HEADER = "fastq_1"
9
- NFX_READ2_HEADER = "fastq_2"
10
- NFX_SAMPLESHEET_READS_HEADERS = [NFX_READ1_HEADER, NFX_READ2_HEADER]
11
- NFX_SAMPLESHEET_HEADERS = [NFX_SAMPLE_HEADER] + NFX_SAMPLESHEET_READS_HEADERS
12
- DELIVER_FILE_HEADERS = ["format", "id", "path", "path_index", "step", "tag"]
13
- NXF_PID_FILE_ENV = "NXF_PID_FILE"
14
- NXF_JVM_ARGS_ENV = "NXF_JVM_ARGS"
15
- JAVA_MEMORY_HEADJOB = "-Xmx5g"
16
5
  NEXTFLOW_WORKFLOWS: list = [
17
6
  Workflow.RAREDISEASE,
18
7
  Workflow.RNAFUSION,
19
8
  Workflow.TAXPROFILER,
20
9
  Workflow.TOMTE,
21
10
  ]
22
-
23
-
24
- class SlurmHeadJobDefaults(IntEnum):
25
- """Default parameters for slurm head jobs."""
26
-
27
- HOURS: int = 96
28
- MEMORY: int = 10
29
- NUMBER_TASKS: int = 1
@@ -1,20 +1,7 @@
1
1
  """Nf-tower related constants."""
2
2
 
3
- from enum import StrEnum
4
3
  from typing import Any
5
4
 
6
-
7
- class NfTowerStatus(StrEnum):
8
- """NF-Tower job submission status."""
9
-
10
- SUBMITTED: str = "SUBMITTED"
11
- RUNNING: str = "RUNNING"
12
- SUCCEEDED: str = "SUCCEEDED"
13
- FAILED: str = "FAILED"
14
- CANCELLED: str = "CANCELLED"
15
- UNKNOWN: str = "UNKNOWN"
16
-
17
-
18
5
  NALLO_GENERAL_METRIC_CONDITIONS: dict[str, dict[str, Any]] = {
19
6
  "median_coverage": {"norm": "gt", "threshold": 20},
20
7
  }
@@ -68,17 +55,6 @@ TOMTE_METRIC_CONDITIONS: dict[str, dict[str, Any]] = {
68
55
  "pct_duplication": {"norm": "lt", "threshold": 70},
69
56
  }
70
57
 
71
-
72
- MULTIQC_NEXFLOW_CONFIG = """process {
73
- withName:'MULTIQC' {
74
- memory = { 4.GB * task.attempt }
75
- time = { 4.h * task.attempt }
76
- cpus = 2
77
- ext.args = ' --data-format json --cl-config "max_table_rows: 10000" '
78
- }
79
- }
80
- """
81
-
82
58
  NALLO_COVERAGE_FILE_TAGS: list[str] = ["d4"]
83
59
  NALLO_COVERAGE_INTERVAL_TYPE: str = "genes"
84
60
  NALLO_COVERAGE_THRESHOLD: int = 10
cg/meta/workflow/nallo.py CHANGED
@@ -60,9 +60,7 @@ class NalloAnalysisAPI(NfAnalysisAPI):
60
60
  self.tower_workflow: str = config.nallo.tower_workflow
61
61
  self.account: str = config.nallo.slurm.account
62
62
  self.email: str = config.nallo.slurm.mail_user
63
- self.compute_env_base: str = config.nallo.compute_env
64
63
  self.revision: str = config.nallo.revision
65
- self.nextflow_binary_path: str = config.nallo.binary_path
66
64
 
67
65
  def get_genome_build(self, case_id: str) -> GenomeVersion:
68
66
  """Return reference genome for a Nallo case. Currently fixed for hg38."""
@@ -8,21 +8,12 @@ from pydantic import TypeAdapter
8
8
  from pydantic.v1 import ValidationError
9
9
 
10
10
  from cg.constants import Workflow
11
- from cg.constants.constants import (
12
- FileExtensions,
13
- FileFormat,
14
- GenomeVersion,
15
- MultiQC,
16
- WorkflowManager,
17
- )
18
- from cg.constants.nextflow import NFX_WORK_DIR
19
- from cg.constants.nf_analysis import NfTowerStatus
11
+ from cg.constants.constants import FileExtensions, FileFormat, GenomeVersion, MultiQC
20
12
  from cg.constants.tb import AnalysisStatus
21
13
  from cg.exc import CgError, HousekeeperStoreError, MetricsQCError
22
14
  from cg.io.controller import ReadFile, WriteFile
23
15
  from cg.io.json import read_json
24
16
  from cg.meta.workflow.analysis import AnalysisAPI
25
- from cg.meta.workflow.nf_handlers import NextflowHandler, NfTowerHandler
26
17
  from cg.models.analysis import NextflowAnalysis
27
18
  from cg.models.cg_config import CGConfig
28
19
  from cg.models.deliverables.metric_deliverables import (
@@ -30,10 +21,9 @@ from cg.models.deliverables.metric_deliverables import (
30
21
  MetricsDeliverablesCondition,
31
22
  MultiqcDataJson,
32
23
  )
33
- from cg.models.nf_analysis import FileDeliverable, NfCommandArgs, WorkflowDeliverables
24
+ from cg.models.nf_analysis import FileDeliverable, WorkflowDeliverables
34
25
  from cg.models.qc_metrics import QCMetrics
35
26
  from cg.store.models import Analysis, Case, Sample
36
- from cg.utils import Process
37
27
 
38
28
  LOG = logging.getLogger(__name__)
39
29
 
@@ -58,72 +48,22 @@ class NfAnalysisAPI(AnalysisAPI):
58
48
  self.tower_workflow: str | None = None
59
49
  self.account: str | None = None
60
50
  self.email: str | None = None
61
- self.compute_env_base: str | None = None
62
51
  self.revision: str | None = None
63
- self.nextflow_binary_path: str | None = None
64
52
 
65
53
  @property
66
54
  def root(self) -> str:
67
55
  return self.root_dir
68
56
 
69
- @property
70
- def process(self):
71
- if not self._process:
72
- self._process = Process(
73
- binary=self.tower_binary_path,
74
- )
75
- return self._process
76
-
77
- @process.setter
78
- def process(self, process: Process):
79
- self._process = process
80
-
81
57
  @property
82
58
  def is_multiqc_pattern_search_exact(self) -> bool:
83
59
  """Return True if only exact pattern search is allowed to collect metrics information from MultiQC file.
84
60
  If false, pattern must be present but does not need to be exact."""
85
61
  return False
86
62
 
87
- def get_profile(self, profile: str | None = None) -> str:
88
- """Get NF profiles."""
89
- return profile or self.profile
90
-
91
- def get_workflow_manager(self) -> str:
92
- """Get workflow manager from Tower."""
93
- return WorkflowManager.Tower.value
94
-
95
- def get_workflow_version(self, case_id: str) -> str:
96
- """Get workflow version from config."""
97
- return self.revision
98
-
99
63
  def get_case_path(self, case_id: str) -> Path:
100
64
  """Path to case working directory."""
101
65
  return Path(self.root_dir, case_id)
102
66
 
103
- def get_sample_sheet_path(self, case_id: str) -> Path:
104
- """Path to sample sheet."""
105
- return Path(self.get_case_path(case_id), f"{case_id}_samplesheet").with_suffix(
106
- FileExtensions.CSV
107
- )
108
-
109
- def get_compute_env(self, case_id: str) -> str:
110
- """Get the compute environment for the head job based on the case priority."""
111
- return f"{self.compute_env_base}-{self.get_slurm_qos_for_case(case_id=case_id)}"
112
-
113
- def get_nextflow_config_path(
114
- self, case_id: str, nextflow_config: Path | str | None = None
115
- ) -> Path:
116
- """Path to nextflow config file."""
117
- if nextflow_config:
118
- return Path(nextflow_config).absolute()
119
- return Path((self.get_case_path(case_id)), f"{case_id}_nextflow_config").with_suffix(
120
- FileExtensions.JSON
121
- )
122
-
123
- def get_job_ids_path(self, case_id: str) -> Path:
124
- """Return the path to a Trailblazer config file containing Tower IDs."""
125
- return Path(self.root_dir, case_id, "tower_ids").with_suffix(FileExtensions.YAML)
126
-
127
67
  def get_deliverables_file_path(self, case_id: str) -> Path:
128
68
  """Path to deliverables file for a case."""
129
69
  return Path(self.get_case_path(case_id), f"{case_id}_deliverables").with_suffix(
@@ -136,33 +76,6 @@ class NfAnalysisAPI(AnalysisAPI):
136
76
  FileExtensions.YAML
137
77
  )
138
78
 
139
- def get_params_file_path(self, case_id: str, params_file: Path | None = None) -> Path:
140
- """Return parameters file or a path where the default parameters file for a case id should be located."""
141
- if params_file:
142
- return Path(params_file).absolute()
143
- return Path((self.get_case_path(case_id)), f"{case_id}_params_file").with_suffix(
144
- FileExtensions.YAML
145
- )
146
-
147
- def get_log_path(self, case_id: str, workflow: str) -> Path:
148
- """Path to NF log."""
149
- launch_time: str = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
150
- return Path(
151
- self.get_case_path(case_id),
152
- f"{case_id}_{workflow}_nextflow_{launch_time}",
153
- ).with_suffix(FileExtensions.LOG)
154
-
155
- def get_workdir_path(self, case_id: str, work_dir: Path | None = None) -> Path:
156
- """Path to NF work directory."""
157
- if work_dir:
158
- return work_dir.absolute()
159
- return Path(self.get_case_path(case_id), NFX_WORK_DIR)
160
-
161
- def verify_sample_sheet_exists(self, case_id: str, dry_run: bool = False) -> None:
162
- """Raise an error if sample sheet file is not found."""
163
- if not dry_run and not Path(self.get_sample_sheet_path(case_id=case_id)).exists():
164
- raise ValueError(f"No config file found for case {case_id}")
165
-
166
79
  def verify_deliverables_file_exists(self, case_id: str) -> None:
167
80
  """Raise an error if a deliverable file is not found."""
168
81
  if not Path(self.get_deliverables_file_path(case_id=case_id)).exists():
@@ -177,181 +90,6 @@ class NfAnalysisAPI(AnalysisAPI):
177
90
  content=deliverables_content, file_format=file_format, file_path=file_path
178
91
  )
179
92
 
180
- def write_trailblazer_config(self, case_id: str, tower_id: str) -> None:
181
- """Write Tower IDs to a file used as the Trailblazer config."""
182
- config_path: Path = self.get_job_ids_path(case_id=case_id)
183
- LOG.info(f"Writing Tower ID to {config_path.as_posix()}")
184
- WriteFile.write_file_from_content(
185
- content={case_id: [tower_id]},
186
- file_format=FileFormat.YAML,
187
- file_path=config_path,
188
- )
189
-
190
- def _run_analysis_with_nextflow(
191
- self, case_id: str, command_args: NfCommandArgs, dry_run: bool
192
- ) -> None:
193
- """Run analysis with given options using Nextflow."""
194
- self.process = Process(
195
- binary=self.nextflow_binary_path,
196
- environment=self.conda_env,
197
- conda_binary=self.conda_binary,
198
- launch_directory=self.get_case_path(case_id=case_id),
199
- )
200
- LOG.info("Workflow will be executed using Nextflow")
201
- parameters: list[str] = NextflowHandler.get_nextflow_run_parameters(
202
- case_id=case_id,
203
- workflow_bin_path=self.workflow_bin_path,
204
- root_dir=self.root_dir,
205
- command_args=command_args.dict(),
206
- )
207
- self.process.export_variables(
208
- export=NextflowHandler.get_variables_to_export(),
209
- )
210
- command: str = self.process.get_command(parameters=parameters)
211
- LOG.info(f"{command}")
212
- sbatch_number: int = NextflowHandler.execute_head_job(
213
- case_id=case_id,
214
- case_directory=self.get_case_path(case_id=case_id),
215
- slurm_account=self.account,
216
- email=self.email,
217
- qos=self.get_slurm_qos_for_case(case_id=case_id),
218
- commands=command,
219
- dry_run=dry_run,
220
- )
221
- LOG.info(f"Nextflow head job running as job: {sbatch_number}")
222
-
223
- def _run_analysis_with_tower(
224
- self, case_id: str, command_args: NfCommandArgs, dry_run: bool
225
- ) -> str | None:
226
- """Run analysis with given options using NF-Tower."""
227
- LOG.info("Workflow will be executed using Tower")
228
- if command_args.resume:
229
- from_tower_id: int = command_args.id or NfTowerHandler.get_last_tower_id(
230
- case_id=case_id,
231
- trailblazer_config=self.get_job_ids_path(case_id=case_id),
232
- )
233
- LOG.info(f"Workflow will be resumed from run with Tower id: {from_tower_id}.")
234
- parameters: list[str] = NfTowerHandler.get_tower_relaunch_parameters(
235
- from_tower_id=from_tower_id, command_args=command_args.dict()
236
- )
237
- else:
238
- parameters: list[str] = NfTowerHandler.get_tower_launch_parameters(
239
- tower_workflow=self.tower_workflow, command_args=command_args.dict()
240
- )
241
- self.process.run_command(parameters=parameters, dry_run=dry_run)
242
- if self.process.stderr:
243
- LOG.error(self.process.stderr)
244
- if not dry_run:
245
- tower_id = NfTowerHandler.get_tower_id(stdout_lines=self.process.stdout_lines())
246
- self.write_trailblazer_config(case_id=case_id, tower_id=tower_id)
247
- return tower_id
248
- LOG.info(self.process.stdout)
249
-
250
- def get_command_args(
251
- self,
252
- case_id: str,
253
- work_dir: str,
254
- from_start: bool,
255
- profile: str,
256
- config: str,
257
- params_file: str | None,
258
- revision: str,
259
- compute_env: str,
260
- nf_tower_id: str | None,
261
- stub_run: bool,
262
- ) -> NfCommandArgs:
263
- command_args: NfCommandArgs = NfCommandArgs(
264
- **{
265
- "log": self.get_log_path(case_id=case_id, workflow=self.workflow),
266
- "work_dir": self.get_workdir_path(case_id=case_id, work_dir=work_dir),
267
- "resume": not from_start,
268
- "profile": self.get_profile(profile=profile),
269
- "config": self.get_nextflow_config_path(case_id=case_id, nextflow_config=config),
270
- "params_file": self.get_params_file_path(case_id=case_id, params_file=params_file),
271
- "name": case_id,
272
- "compute_env": compute_env or self.get_compute_env(case_id=case_id),
273
- "revision": revision or self.revision,
274
- "wait": NfTowerStatus.SUBMITTED,
275
- "id": nf_tower_id,
276
- "stub_run": stub_run,
277
- }
278
- )
279
- return command_args
280
-
281
- def run_nextflow_analysis(
282
- self,
283
- case_id: str,
284
- use_nextflow: bool,
285
- work_dir: str,
286
- from_start: bool,
287
- profile: str,
288
- config: str,
289
- params_file: str | None,
290
- revision: str,
291
- compute_env: str,
292
- stub_run: bool,
293
- nf_tower_id: str | None = None,
294
- dry_run: bool = False,
295
- ) -> None:
296
- """Prepare and start run analysis: check existence of all input files generated by config-case and sync with trailblazer."""
297
- self.status_db.verify_case_exists(case_internal_id=case_id)
298
-
299
- command_args = self.get_command_args(
300
- case_id=case_id,
301
- work_dir=work_dir,
302
- from_start=from_start,
303
- profile=profile,
304
- config=config,
305
- params_file=params_file,
306
- revision=revision,
307
- compute_env=compute_env,
308
- nf_tower_id=nf_tower_id,
309
- stub_run=stub_run,
310
- )
311
-
312
- try:
313
- self.verify_sample_sheet_exists(case_id=case_id, dry_run=dry_run)
314
- self.check_analysis_ongoing(case_id=case_id)
315
- LOG.info(f"Running analysis for {case_id}")
316
- tower_workflow_id: str | None = self.run_analysis(
317
- case_id=case_id,
318
- command_args=command_args,
319
- use_nextflow=use_nextflow,
320
- dry_run=dry_run,
321
- )
322
- if not dry_run:
323
- self.on_analysis_started(case_id=case_id, tower_workflow_id=tower_workflow_id)
324
- except FileNotFoundError as error:
325
- LOG.error(f"Could not resume analysis: {error}")
326
- raise FileNotFoundError
327
- except ValueError as error:
328
- LOG.error(f"Could not run analysis: {error}")
329
- raise ValueError
330
- except CgError as error:
331
- LOG.error(f"Could not run analysis: {error}")
332
- raise CgError
333
-
334
- def run_analysis(
335
- self,
336
- case_id: str,
337
- command_args: NfCommandArgs,
338
- use_nextflow: bool,
339
- dry_run: bool = False,
340
- ) -> str | None:
341
- """Execute run analysis with given options."""
342
- if use_nextflow:
343
- self._run_analysis_with_nextflow(
344
- case_id=case_id,
345
- command_args=command_args,
346
- dry_run=dry_run,
347
- )
348
- else:
349
- return self._run_analysis_with_tower(
350
- case_id=case_id,
351
- command_args=command_args,
352
- dry_run=dry_run,
353
- )
354
-
355
93
  def get_deliverables_template_content(self) -> list[dict[str, str]]:
356
94
  """Return deliverables file template content."""
357
95
  LOG.debug("Getting deliverables file template content")
@@ -59,9 +59,7 @@ class RarediseaseAnalysisAPI(NfAnalysisAPI):
59
59
  self.tower_workflow: str = config.raredisease.tower_workflow
60
60
  self.account: str = config.raredisease.slurm.account
61
61
  self.email: str = config.raredisease.slurm.mail_user
62
- self.compute_env_base: str = config.raredisease.compute_env
63
62
  self.revision: str = config.raredisease.revision
64
- self.nextflow_binary_path: str = config.raredisease.binary_path
65
63
 
66
64
  @staticmethod
67
65
  def get_bundle_filenames_path() -> Path:
@@ -41,9 +41,7 @@ class RnafusionAnalysisAPI(NfAnalysisAPI):
41
41
  self.tower_workflow: str = config.rnafusion.tower_workflow
42
42
  self.account: str = config.rnafusion.slurm.account
43
43
  self.email: str = config.rnafusion.slurm.mail_user
44
- self.compute_env_base: str = config.rnafusion.compute_env
45
44
  self.revision: str = config.rnafusion.revision
46
- self.nextflow_binary_path: str = config.rnafusion.binary_path
47
45
 
48
46
  @property
49
47
  def is_multiple_samples_allowed(self) -> bool:
@@ -40,8 +40,6 @@ class TaxprofilerAnalysisAPI(NfAnalysisAPI):
40
40
  self.tower_workflow: str = config.taxprofiler.tower_workflow
41
41
  self.account: str = config.taxprofiler.slurm.account
42
42
  self.email: str = config.taxprofiler.slurm.mail_user
43
- self.nextflow_binary_path: str = config.taxprofiler.binary_path
44
- self.compute_env_base: str = config.taxprofiler.compute_env
45
43
 
46
44
  @property
47
45
  def is_multiqc_pattern_search_exact(self) -> bool:
cg/meta/workflow/tomte.py CHANGED
@@ -38,9 +38,7 @@ class TomteAnalysisAPI(NfAnalysisAPI):
38
38
  self.tower_workflow: str = config.tomte.tower_workflow
39
39
  self.account: str = config.tomte.slurm.account
40
40
  self.email: str = config.tomte.slurm.mail_user
41
- self.compute_env_base: str = config.tomte.compute_env
42
41
  self.revision: str = config.tomte.revision
43
- self.nextflow_binary_path: str = config.tomte.binary_path
44
42
 
45
43
  @staticmethod
46
44
  def get_bundle_filenames_path() -> Path:
cg/models/cg_config.py CHANGED
@@ -232,7 +232,6 @@ class MipConfig(BaseModel):
232
232
 
233
233
  class NalloConfig(CommonAppConfig):
234
234
  binary_path: str | None = None
235
- compute_env: str
236
235
  conda_binary: str | None = None
237
236
  conda_env: str
238
237
  platform: str
@@ -252,7 +251,6 @@ class NalloConfig(CommonAppConfig):
252
251
 
253
252
  class RarediseaseConfig(CommonAppConfig):
254
253
  binary_path: str | None = None
255
- compute_env: str
256
254
  conda_binary: str | None = None
257
255
  conda_env: str
258
256
  platform: str
@@ -272,7 +270,6 @@ class RarediseaseConfig(CommonAppConfig):
272
270
 
273
271
  class TomteConfig(CommonAppConfig):
274
272
  binary_path: str | None = None
275
- compute_env: str
276
273
  conda_binary: str | None = None
277
274
  conda_env: str
278
275
  platform: str
@@ -291,7 +288,6 @@ class TomteConfig(CommonAppConfig):
291
288
 
292
289
  class RnafusionConfig(CommonAppConfig):
293
290
  binary_path: str
294
- compute_env: str
295
291
  conda_binary: str | None = None
296
292
  conda_env: str
297
293
  platform: str
@@ -313,7 +309,6 @@ class TaxprofilerConfig(CommonAppConfig):
313
309
  binary_path: str
314
310
  conda_binary: str | None = None
315
311
  conda_env: str
316
- compute_env: str
317
312
  platform: str
318
313
  params: str
319
314
  config: str
cg/models/nf_analysis.py CHANGED
@@ -3,25 +3,6 @@ from pathlib import Path
3
3
  from pydantic import BaseModel, field_validator
4
4
 
5
5
 
6
- class NfCommandArgs(BaseModel):
7
- """Model for arguments and options supported."""
8
-
9
- log: str | Path | None = None
10
- resume: bool | None = None
11
- profile: str | None = None
12
- stub_run: bool | None = None
13
- config: str | Path | None = None
14
- name: str | None = None
15
- revision: str | None = None
16
- wait: str | None = None
17
- id: str | None = None
18
- with_tower: bool | None = None
19
- use_nextflow: bool | None = None
20
- compute_env: str | None = None
21
- work_dir: str | Path | None = None
22
- params_file: str | Path | None = None
23
-
24
-
25
6
  class FileDeliverable(BaseModel):
26
7
  """Specification for a general deliverables file."""
27
8
 
cg/server/admin.py CHANGED
@@ -31,6 +31,11 @@ class BaseView(ModelView):
31
31
  return redirect(url_for("google.login", next=request.url))
32
32
 
33
33
 
34
+ def view_hifi_yield_in_gb(unused1, unused2, model, unused3):
35
+ del unused1, unused2, unused3
36
+ return Markup(f"{round(model.hifi_yield/1E9, 1)} Gb") if model.hifi_yield else ""
37
+
38
+
34
39
  def view_priority(unused1, unused2, model, unused3):
35
40
  """column formatter for priority"""
36
41
  del unused1, unused2, unused3
@@ -780,6 +785,7 @@ class SampleView(BaseView):
780
785
  column_formatters = {
781
786
  "application_version": view_application_link_via_application_version,
782
787
  "customer": view_customer_link,
788
+ "hifi_yield": view_hifi_yield_in_gb,
783
789
  "internal_id": view_case_sample_link,
784
790
  "invoice": InvoiceView.view_invoice_link,
785
791
  "original_ticket": view_ticket_link,
cg/utils/commands.py CHANGED
@@ -58,13 +58,6 @@ class Process:
58
58
  self._stdout = ""
59
59
  self._stderr = ""
60
60
 
61
- def export_variables(self, export: dict[str, str]) -> None:
62
- """Export variables prior to execution."""
63
- if export:
64
- self.base_call.insert(
65
- 0, " ".join([f"export {variable}={value};" for variable, value in export.items()])
66
- )
67
-
68
61
  def run_command(self, parameters: list = None, dry_run: bool = False) -> int:
69
62
  """Execute a command in the shell.
70
63
  If environment is supplied - shell=True has to be supplied to enable passing as a string for executing multiple
@@ -107,15 +100,6 @@ class Process:
107
100
 
108
101
  return res.returncode
109
102
 
110
- def get_command(self, parameters: list = None) -> str:
111
- """Returns a command string given a list of parameters."""
112
-
113
- command: list[str] = copy.deepcopy(self.base_call)
114
- if parameters:
115
- command.extend(parameters)
116
-
117
- return " ".join(command)
118
-
119
103
  @property
120
104
  def stdout(self):
121
105
  """Fetch stdout"""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cg
3
- Version: 83.16.2
3
+ Version: 83.16.4
4
4
  Summary: Clinical Genomics command center
5
5
  Requires-Python: >=3.11,<3.13
6
6
  Classifier: Programming Language :: Python
@@ -1,4 +1,4 @@
1
- cg/__init__.py,sha256=cwaalPaK8wOIsf67PDYF8-DG6vrfh9SlTWBYNVi9TuE,41
1
+ cg/__init__.py,sha256=fHsf7E2iBNtyrOiJBDtalgvzOQSE9J5bZSELyUzX-Bw,41
2
2
  cg/apps/__init__.py,sha256=pYf0vxo4iYQqURzFRYzqpOCdV8Cm9MWx0GHvJOz0EMg,315
3
3
  cg/apps/coverage/__init__.py,sha256=dJtsmNf8tODE2-VEomMIoYA7ugLYZAk_upsfOQCZeF8,27
4
4
  cg/apps/coverage/api.py,sha256=e_ozC3QeNKoEfpjjMaL-XjeBLtz-JySWccrtw0E9mLM,2940
@@ -151,7 +151,7 @@ cg/cli/workflow/mip/__init__.py,sha256=tAp9BLY37Gsg-jvTdgUEoP8lnaHW4I61tXfHsBFkL
151
151
  cg/cli/workflow/mip/base.py,sha256=upsDouj2iQbq37xQXHR2gVl2aqGozVRDZTMjLPbtX8Q,6767
152
152
  cg/cli/workflow/mip/options.py,sha256=Yoo9yq2quE52zV3lzp2-OjWdON2jJfjCUrTY82hrq30,1139
153
153
  cg/cli/workflow/mip_dna/__init__.py,sha256=tAp9BLY37Gsg-jvTdgUEoP8lnaHW4I61tXfHsBFkLpk,22
154
- cg/cli/workflow/mip_dna/base.py,sha256=dw1GlXVDr_gcIzMoLjH5nACNqiZbnwbIlvzgK0-sl5Y,4305
154
+ cg/cli/workflow/mip_dna/base.py,sha256=OKKTOsypeNeUdlUUuCJEz0PxuXWCbB92k15eUsx4_go,4218
155
155
  cg/cli/workflow/mip_rna/__init__.py,sha256=tAp9BLY37Gsg-jvTdgUEoP8lnaHW4I61tXfHsBFkLpk,22
156
156
  cg/cli/workflow/mip_rna/base.py,sha256=G6O77Y4DB5eV3Oc5Ktd9vkwH_aMeehnmGT-EDQUX4bk,1019
157
157
  cg/cli/workflow/mutant/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -206,8 +206,8 @@ cg/constants/housekeeper_tags.py,sha256=742ZAo6YdYhdZwRFOLN3HoPmLviuvfqutGlSscza
206
206
  cg/constants/invoice.py,sha256=Tdd3PY_Z7wz4eSfKaTapSFlkmk_FWM1bwWMOj5D5F5Y,95
207
207
  cg/constants/lims.py,sha256=vhATu1UtIXdfPZWEu8pne3uq4ZhCAdPNJcjSotD8AE8,6559
208
208
  cg/constants/metrics.py,sha256=jBLp8buIG_3fKFraerFvpfSJ6s-0H42jI1qysjjAXFc,847
209
- cg/constants/nextflow.py,sha256=bDoyqqt8yj2P49mGfDrA0LEHVn1MkZOe5bDhEiYpPZI,811
210
- cg/constants/nf_analysis.py,sha256=C-sgUS8eRxkjN-ncizHaWRvME0PDG4rw65jgqZ-c_og,3080
209
+ cg/constants/nextflow.py,sha256=o5VxtsyZNJaA1bh_AyKQQ0Iov6grZIeik_hhE8zO268,197
210
+ cg/constants/nf_analysis.py,sha256=r3bfzzIAL3fU3dxMSwIUmN_jYLhcF70L0NXymGLTj5M,2541
211
211
  cg/constants/nipt.py,sha256=KGy7Y08jIUmT9FKKdjGzfRwvklAkOHRLKXqDOQx4aPc,19
212
212
  cg/constants/observations.py,sha256=Sjxo3R7DCkcMkSWNSeg2Sznkjr3werFRRsSREjd6CCU,3887
213
213
  cg/constants/orderforms.py,sha256=pLD_ufw11aE-Fn4x87el3F_KRITi2lbrgpBYU7eJgpM,2362
@@ -383,14 +383,13 @@ cg/meta/workflow/mutant/quality_controller/quality_controller.py,sha256=JMMCZHBj
383
383
  cg/meta/workflow/mutant/quality_controller/report_generator_utils.py,sha256=bM5GSRBoheOzxR3lzATSp-ETvQ1uS8obAw2onP1Q2RE,1247
384
384
  cg/meta/workflow/mutant/quality_controller/result_logger_utils.py,sha256=LqdP8yxf-V4VQcmtt2smX7p6ljoAaL4GnfJRQfIbdqc,2822
385
385
  cg/meta/workflow/mutant/quality_controller/utils.py,sha256=8zbn210O5RIKRIsaZUkVvjvGnUjRDULW5pseyHEOViQ,627
386
- cg/meta/workflow/nallo.py,sha256=PFwG1gvA7pNqFzDA79AdOwk0udxknVdyG-1oGZwaAgI,10163
387
- cg/meta/workflow/nf_analysis.py,sha256=5M8IzSp0jXrBUDX46Tp1YFDefZtCNDhW9hDCyNmhVpw,30787
388
- cg/meta/workflow/nf_handlers.py,sha256=DJ0Xkk3tWbIh7uxxvoJ3615gqeKDtPpNtWMrOYh1VHM,6248
386
+ cg/meta/workflow/nallo.py,sha256=V0yBQLIlRvWS-gtOfJ6OgxGXTTEjx5Yp-KObyBMCNjs,10035
387
+ cg/meta/workflow/nf_analysis.py,sha256=eXl_1AxwwdzVC_k3ujiONH5q1FCw9wacGoVpVKx2_PI,20466
389
388
  cg/meta/workflow/prepare_fastq.py,sha256=HbkrsuqlZgb_7kQPf_j_Mqowh6nEms9jl7043ybxK4U,6149
390
- cg/meta/workflow/raredisease.py,sha256=XhlgCPSx-MmzDdpXrLY__vKl2f8np_1JuIIwwQKI-7c,9038
391
- cg/meta/workflow/rnafusion.py,sha256=S8Pxq9eKN4Bm0U5pPh8Qi3I3eTBYRySdR7S8M6SVb-g,3782
392
- cg/meta/workflow/taxprofiler.py,sha256=SojjfA9dKsfzf_PNpPf3tG2z8M0cO5Fe4b45l4fXsgo,3185
393
- cg/meta/workflow/tomte.py,sha256=VstaJTpe-mCTitzvXcxAVXJ_JpH-icfPK1qN7zMsfHE,2473
389
+ cg/meta/workflow/raredisease.py,sha256=-bENb0f1f9bptMsP_PBy4niMmkxDnIxCOF32OBgDxts,8898
390
+ cg/meta/workflow/rnafusion.py,sha256=Mcmn_HTAHpH-M6Pl_bOG2oSfIgZe11GqtTKrk6lFpPU,3646
391
+ cg/meta/workflow/taxprofiler.py,sha256=pphhUfe-CguO1PIwR3orRoaNnVWi_8V23MkYORHcAh0,3045
392
+ cg/meta/workflow/tomte.py,sha256=-WtxossQMfQjYR9CBdxS1V_gTix3gAVqjDR5tIBTHcE,2345
394
393
  cg/meta/workflow/utils/utils.py,sha256=eNTKbyggGavp4BXD1Lq1alMHL7Qpm_YRdJzmyrGmXAM,393
395
394
  cg/models/__init__.py,sha256=5mHdugMi0PtYuj-EP1N-M5LtnCY6CGpiP2XqgLFpQBI,67
396
395
  cg/models/analysis.py,sha256=o_0SGLl_Y8uQ1nt61OaG3N4D0t7C9Z9Wwq6FxU0Qlj0,775
@@ -398,7 +397,7 @@ cg/models/balsamic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
398
397
  cg/models/balsamic/analysis.py,sha256=XSQDClACnH3tkff66yRdHWJVgsBQcUNuztYb7ICOV68,477
399
398
  cg/models/balsamic/config.py,sha256=uf5Vz_Ro4adM98absZZYm4W4vco2sJziEeO0f5UTtBE,4108
400
399
  cg/models/balsamic/metrics.py,sha256=wtPulyNkfEvKBo742kIsfqRP88-_TBfeWNesh4ezsVI,2279
401
- cg/models/cg_config.py,sha256=LG6gNvXm4a_Gg7nDnwucb8kerbqvoFckXOcz7CsCpas,25839
400
+ cg/models/cg_config.py,sha256=fLqES5j4gPeuKB_K0eDDO6a7XgqRLuTYS1OqHaF_JTk,25734
402
401
  cg/models/compression_data.py,sha256=io1XjIsvlqsVpEZy_jN2XveEYzApPdBbAMs2xcld4Ao,11741
403
402
  cg/models/deliverables/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
404
403
  cg/models/deliverables/metric_deliverables.py,sha256=nWNhlN0I_xbjp43522Uv8yfN0sCCcEqa7S4infIh8HA,5488
@@ -428,7 +427,7 @@ cg/models/mip/mip_metrics_deliverables.py,sha256=WfnRQkNj8CIsI4EBXnPW2OuOd4394aR
428
427
  cg/models/mip/mip_sample_info.py,sha256=DM6tSwajQ-Cxf0peROiryYifFMCMHuD_jeZu39Q0-18,2550
429
428
  cg/models/nallo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
430
429
  cg/models/nallo/nallo.py,sha256=7oUCkDQyuRxvbsct1IItzfEtQnHvZdpTRiwt41EOoIo,727
431
- cg/models/nf_analysis.py,sha256=ZWloZxpyyMrwsDVbawEWTh28ywfL143WGX86gDeCebw,1158
430
+ cg/models/nf_analysis.py,sha256=4iXBSiJJLap_H2zYJtTnRj8t24syWc6dwemFQAcPlbY,602
432
431
  cg/models/observations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
433
432
  cg/models/observations/input_files.py,sha256=oXDc4LFY3FXkxg6u4wASby4DjnNaKZ-zElf7-olezRk,1062
434
433
  cg/models/orders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -467,7 +466,7 @@ cg/resources/rnafusion_bundle_filenames.yaml,sha256=hoXuTobKbjH7W7dld87rSF7X0r4K
467
466
  cg/resources/taxprofiler_bundle_filenames.yaml,sha256=AULMEAYkMzADYUtVtuSmBj7UaAIlLGRDyBMEOO0xWz8,2871
468
467
  cg/resources/tomte_bundle_filenames.yaml,sha256=lFxk9GssmCyKBUn5lbRBtBS9FS9cABaoVzb-e2zrJac,4144
469
468
  cg/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
470
- cg/server/admin.py,sha256=cC2PZXGguuCtodhajx5mLe223-WNsJ24iK99SpAc138,29913
469
+ cg/server/admin.py,sha256=XFr_wxXLyyUB6orQ1mFUHto4NFgJAMBgfEoR2p-RbeQ,30141
471
470
  cg/server/app.py,sha256=WEULzx5JeC0aJHag3Vltzopd8r7tgHPr_1nbytS-AI8,6975
472
471
  cg/server/app_config.py,sha256=8DaNtYtdp6A29FAuTFX-Lk7SLv7Zx-_f57ZU0-T_3ZI,1403
473
472
  cg/server/auto.py,sha256=5DqokNScv483imZTvOYrJo6wV9-P7ZGnJaaHnfnNJLs,57
@@ -912,7 +911,7 @@ cg/utils/checksum/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
912
911
  cg/utils/checksum/checksum.py,sha256=jRvL1f6TPV2QyhUBR-WUK7i67wB7a2FKcwrs6kr8GXI,1992
913
912
  cg/utils/click/EnumChoice.py,sha256=ogTT49nEAsu60YQeFLp1cCI9t6xzuxmVFA17za53cko,1669
914
913
  cg/utils/click/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
915
- cg/utils/commands.py,sha256=0RWc0t9PDsntS3uvVH2swMuNOD7VtbFmQGEAg7MbmAc,4910
914
+ cg/utils/commands.py,sha256=D6tfGBGp3wQMXkNEhquSQ9ZyKzkMn8PvSGAllBVaZWQ,4344
916
915
  cg/utils/date.py,sha256=4aX7ZyyRfHHiNsA8yNYw2nzTON-yq3gQSypAEXHK4Wk,2192
917
916
  cg/utils/dict.py,sha256=Zr67gvr5_W1Lpe6_MUZDAegWa2oakXVW-JrZgiRcaAw,758
918
917
  cg/utils/email.py,sha256=VH15Ezbc_FgSOGJr9KvSiMaEU0TwNaZHVUXlRAaBzxs,1251
@@ -923,7 +922,7 @@ cg/utils/flask/enum.py,sha256=xwNVtFPkSzoloJctLHu7obRyxcng1GJrhkeYkqwf9tw,1052
923
922
  cg/utils/mapping.py,sha256=oZpZW2kgsbtAP2FZ7RtRPELiEE1zZk_nAGisHGtCOUo,491
924
923
  cg/utils/time.py,sha256=_VOglhrFEZ5cwHK1U1g36SdwzB7UvV-Nvlt4ymuZUho,1501
925
924
  cg/utils/utils.py,sha256=RciI_UhWcnG_pMZrmQZ1ZYb-O1N0DweTYMmhE0SIRgQ,1410
926
- cg-83.16.2.dist-info/METADATA,sha256=_QCdGEcZtR-yUxHQqEz8ecQ8KbpJIxe7kYBpXJEDnAs,4940
927
- cg-83.16.2.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
928
- cg-83.16.2.dist-info/entry_points.txt,sha256=q5f47YQQGltzK_xnIq1mDopRXXEItr85Xe1BCtG-Wts,39
929
- cg-83.16.2.dist-info/RECORD,,
925
+ cg-83.16.4.dist-info/METADATA,sha256=o4bGotxkz_SJ-MVCg5JZnXTx65FWbMp6YpGxpvIqEqE,4940
926
+ cg-83.16.4.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
927
+ cg-83.16.4.dist-info/entry_points.txt,sha256=q5f47YQQGltzK_xnIq1mDopRXXEItr85Xe1BCtG-Wts,39
928
+ cg-83.16.4.dist-info/RECORD,,
@@ -1,179 +0,0 @@
1
- """Module to handle NF executors."""
2
-
3
- from datetime import datetime
4
- from pathlib import Path
5
- from typing import Iterable
6
-
7
- from cg.apps.slurm.slurm_api import SlurmAPI
8
- from cg.constants.constants import FileExtensions, FileFormat
9
- from cg.constants.nextflow import JAVA_MEMORY_HEADJOB, NXF_JVM_ARGS_ENV, SlurmHeadJobDefaults
10
- from cg.io.controller import ReadFile
11
- from cg.models.slurm.sbatch import Sbatch
12
- from cg.utils.utils import build_command_from_dict
13
-
14
-
15
- class NfBaseHandler:
16
- """
17
- Parent class for handling the interaction with NF executors that are common to both Nextflow and NF-Tower.
18
- """
19
-
20
- pass
21
-
22
-
23
- class NfTowerHandler(NfBaseHandler):
24
- """
25
- Parent class for handling the interaction with NF-Tower.
26
- """
27
-
28
- @classmethod
29
- def get_tower_launch_parameters(cls, tower_workflow: str, command_args: dict) -> list[str]:
30
- """Returns a tower launch command given a dictionary with arguments."""
31
-
32
- tower_options: list[str] = build_command_from_dict(
33
- options={
34
- f"--{arg.replace('_', '-')}": command_args.get(arg, None)
35
- for arg in (
36
- "work_dir",
37
- "profile",
38
- "params_file",
39
- "config",
40
- "name",
41
- "revision",
42
- "compute_env",
43
- "stub_run",
44
- )
45
- },
46
- exclude_true=True,
47
- )
48
- return ["launch"] + tower_options + [tower_workflow]
49
-
50
- @classmethod
51
- def get_tower_relaunch_parameters(cls, from_tower_id: int, command_args: dict) -> list[str]:
52
- """Returns a tower relaunch command given a dictionary with arguments."""
53
-
54
- tower_options: list[str] = build_command_from_dict(
55
- options={
56
- f"--{arg.replace('_', '-')}": command_args.get(arg, None)
57
- for arg in (
58
- "profile",
59
- "params_file",
60
- "config",
61
- "compute_env",
62
- "revision",
63
- "stub_run",
64
- )
65
- },
66
- exclude_true=True,
67
- )
68
- now: str = datetime.now().strftime("%Y%m%d%H%M%S")
69
- return [
70
- "runs",
71
- "relaunch",
72
- "--id",
73
- str(from_tower_id),
74
- "--name",
75
- f"{command_args.get('name')}_from{str(from_tower_id)}_{now}",
76
- ] + tower_options
77
-
78
- @staticmethod
79
- def get_tower_id(stdout_lines: Iterable) -> str:
80
- """Parse the stdout and return a workflow id. An example of the output to parse is:
81
- Case <CASE_ID> exists in status db
82
- Running RNAFUSION analysis for <CASE_ID>
83
- Workflow will be executed using tower
84
- Running command <COMMAND>
85
-
86
- Workflow 1uxZE9JM7Tl58r submitted at [<WORKSPACE>] workspace.
87
-
88
- https://<URL_TO_TOWER_CASE>
89
- Action running set for case <CASE_ID>"""
90
- for line in stdout_lines:
91
- if line.strip().startswith("Workflow"):
92
- return line.strip().split()[1]
93
-
94
- @classmethod
95
- def get_last_tower_id(cls, case_id: str, trailblazer_config: Path) -> int:
96
- """Return the previously-stored NF-Tower ID."""
97
- if not trailblazer_config.exists():
98
- raise FileNotFoundError(f"No NF-Tower ID found for case {case_id}.")
99
- return ReadFile.get_content_from_file(
100
- file_format=FileFormat.YAML, file_path=trailblazer_config
101
- ).get(case_id)[-1]
102
-
103
-
104
- class NextflowHandler(NfBaseHandler):
105
- """
106
- Parent class for handling the interaction with Nextflow.
107
- """
108
-
109
- @staticmethod
110
- def get_variables_to_export() -> dict[str, str]:
111
- """Dictionary with required environment variables to be exported."""
112
- return {NXF_JVM_ARGS_ENV: f"'{JAVA_MEMORY_HEADJOB}'"}
113
-
114
- @classmethod
115
- def get_nextflow_run_parameters(
116
- cls, case_id: str, workflow_bin_path: str, root_dir: str, command_args: dict
117
- ) -> list[str]:
118
- """Returns a Nextflow run command given a dictionary with arguments."""
119
-
120
- nextflow_options: list[str] = build_command_from_dict(
121
- options=dict((f"-{arg}", command_args.get(arg, True)) for arg in ("log", "config")),
122
- exclude_true=True,
123
- )
124
- run_options: list[str] = build_command_from_dict(
125
- options=dict(
126
- (f"-{arg.replace('_', '-')}", command_args.get(arg, None))
127
- for arg in (
128
- "work_dir",
129
- "resume",
130
- "profile",
131
- "with_tower",
132
- "params_file",
133
- )
134
- ),
135
- exclude_true=True,
136
- )
137
- return nextflow_options + ["run", workflow_bin_path] + run_options
138
-
139
- @staticmethod
140
- def get_head_job_sbatch_path(case_directory: Path) -> Path:
141
- """Path to Nextflow sbatch for the head job."""
142
- return Path(case_directory, "nextflow_head_job").with_suffix(FileExtensions.SBATCH)
143
-
144
- @classmethod
145
- def execute_head_job(
146
- cls,
147
- case_id: str,
148
- case_directory: Path,
149
- slurm_account: str,
150
- email: str,
151
- qos: str,
152
- commands: str,
153
- hours: int = SlurmHeadJobDefaults.HOURS,
154
- memory: int = SlurmHeadJobDefaults.MEMORY,
155
- number_tasks: int = SlurmHeadJobDefaults.NUMBER_TASKS,
156
- dry_run: bool = False,
157
- ) -> int:
158
- """Executes Nextflow head job command."""
159
-
160
- slurm_api: SlurmAPI = SlurmAPI()
161
- slurm_api.set_dry_run(dry_run=dry_run)
162
- sbatch_parameters: Sbatch = Sbatch(
163
- account=slurm_account,
164
- commands=commands,
165
- email=email,
166
- hours=hours,
167
- job_name=f"{case_id}.%j",
168
- log_dir=case_directory.as_posix(),
169
- memory=memory,
170
- number_tasks=number_tasks,
171
- quality_of_service=qos,
172
- )
173
-
174
- sbatch_content: str = slurm_api.generate_sbatch_content(sbatch_parameters=sbatch_parameters)
175
- sbatch_path: Path = cls.get_head_job_sbatch_path(case_directory=case_directory)
176
- sbatch_number: int = slurm_api.submit_sbatch(
177
- sbatch_content=sbatch_content, sbatch_path=sbatch_path
178
- )
179
- return sbatch_number
File without changes