cg 76.0.0__py3-none-any.whl → 83.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cg/__init__.py +1 -1
- cg/apps/housekeeper/hk.py +18 -1
- cg/apps/tb/api.py +42 -5
- cg/cli/transfer.py +13 -2
- cg/cli/upload/mutacc.py +16 -3
- cg/cli/upload/scout.py +2 -2
- cg/cli/upload/utils.py +10 -1
- cg/cli/workflow/balsamic/base.py +86 -172
- cg/cli/workflow/balsamic/options.py +3 -48
- cg/cli/workflow/balsamic/umi.py +210 -15
- cg/cli/workflow/microsalt/base.py +4 -2
- cg/cli/workflow/mip_dna/base.py +1 -1
- cg/cli/workflow/nallo/base.py +73 -23
- cg/cli/workflow/nf_analysis.py +5 -207
- cg/cli/workflow/raredisease/base.py +41 -54
- cg/cli/workflow/rnafusion/base.py +38 -8
- cg/cli/workflow/taxprofiler/base.py +31 -18
- cg/cli/workflow/tomte/base.py +83 -10
- cg/constants/constants.py +25 -30
- cg/constants/devices.py +6 -1
- cg/constants/gene_panel.py +3 -1
- cg/constants/housekeeper_tags.py +28 -28
- cg/constants/lims.py +4 -0
- cg/constants/nf_analysis.py +0 -1
- cg/constants/observations.py +21 -5
- cg/constants/orderforms.py +3 -3
- cg/constants/pacbio.py +1 -0
- cg/constants/priority.py +1 -1
- cg/constants/report.py +1 -0
- cg/constants/scout.py +12 -9
- cg/constants/sequencing.py +2 -2
- cg/constants/tb.py +5 -5
- cg/exc.py +27 -5
- cg/meta/compress/compress.py +7 -2
- cg/meta/delivery_report/balsamic.py +3 -1
- cg/meta/delivery_report/delivery_report_api.py +4 -3
- cg/meta/delivery_report/nallo.py +11 -11
- cg/meta/delivery_report/raredisease.py +7 -3
- cg/meta/delivery_report/templates/macros/data_analysis/qc_metrics/balsamic_qc_metrics.html +1 -0
- cg/meta/delivery_report/templates/macros/ticket_system.html +1 -1
- cg/meta/observations/balsamic_observations_api.py +110 -14
- cg/meta/observations/mip_dna_observations_api.py +1 -1
- cg/meta/observations/nallo_observations_api.py +1 -1
- cg/meta/observations/observations_api.py +23 -32
- cg/meta/observations/raredisease_observations_api.py +1 -1
- cg/meta/tar/tar.py +5 -2
- cg/meta/transfer/lims.py +32 -3
- cg/meta/upload/balsamic/balsamic.py +1 -8
- cg/meta/upload/coverage.py +5 -5
- cg/meta/upload/raredisease/raredisease.py +3 -0
- cg/meta/upload/scout/hk_tags.py +1 -0
- cg/meta/upload/scout/nallo_config_builder.py +31 -7
- cg/meta/workflow/balsamic.py +70 -36
- cg/meta/workflow/fastq.py +8 -0
- cg/meta/workflow/microsalt/quality_controller/models.py +0 -2
- cg/meta/workflow/microsalt/quality_controller/quality_controller.py +8 -16
- cg/meta/workflow/microsalt/quality_controller/result_logger.py +3 -6
- cg/meta/workflow/microsalt/quality_controller/utils.py +2 -45
- cg/meta/workflow/nallo.py +21 -99
- cg/meta/workflow/nf_analysis.py +12 -263
- cg/meta/workflow/raredisease.py +3 -112
- cg/meta/workflow/rnafusion.py +2 -34
- cg/meta/workflow/taxprofiler.py +2 -38
- cg/meta/workflow/tomte.py +2 -42
- cg/models/balsamic/config.py +0 -24
- cg/models/balsamic/metrics.py +5 -3
- cg/models/cg_config.py +39 -16
- cg/models/deliverables/metric_deliverables.py +1 -1
- cg/models/delivery_report/metadata.py +2 -1
- cg/models/nallo/nallo.py +14 -64
- cg/models/nf_analysis.py +1 -41
- cg/models/raredisease/raredisease.py +1 -63
- cg/models/rnafusion/rnafusion.py +0 -26
- cg/models/scout/scout_load_config.py +5 -2
- cg/models/taxprofiler/taxprofiler.py +0 -42
- cg/models/tomte/tomte.py +0 -69
- cg/resources/nallo_bundle_filenames.yaml +292 -22
- cg/resources/raredisease_bundle_filenames.yaml +11 -1
- cg/resources/taxprofiler_bundle_filenames.yaml +20 -0
- cg/server/admin.py +106 -25
- cg/server/app.py +15 -4
- cg/server/endpoints/sequencing_run/dtos.py +21 -3
- cg/server/endpoints/sequencing_run/pacbio_sequencing_run.py +29 -10
- cg/server/endpoints/sequencing_run/pacbio_smrt_cell_metrics.py +20 -0
- cg/services/analysis_starter/{service.py → analysis_starter.py} +11 -9
- cg/services/analysis_starter/configurator/abstract_model.py +8 -0
- cg/services/analysis_starter/configurator/configurator.py +1 -1
- cg/services/analysis_starter/configurator/extensions/nallo.py +27 -0
- cg/services/analysis_starter/configurator/extensions/{abstract.py → pipeline_extension.py} +1 -1
- cg/services/analysis_starter/configurator/extensions/raredisease.py +3 -1
- cg/services/analysis_starter/configurator/extensions/tomte_extension.py +28 -0
- cg/services/analysis_starter/configurator/file_creators/balsamic_config.py +240 -0
- cg/services/analysis_starter/configurator/file_creators/gene_panel.py +10 -5
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/abstract.py +2 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/models.py +40 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/nallo.py +37 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/raredisease.py +8 -5
- cg/services/analysis_starter/configurator/file_creators/nextflow/params_file/tomte_params_file_creator.py +64 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/creator.py +1 -1
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/nallo_sample_sheet_creator.py +65 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/protocol.py +12 -0
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{raredisease.py → raredisease_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{rnafusion.py → rnafusion_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/{taxprofiler.py → taxprofiler_sample_sheet_creator.py} +2 -2
- cg/services/analysis_starter/configurator/file_creators/nextflow/sample_sheet/tomte_sample_sheet_creator.py +36 -0
- cg/services/analysis_starter/configurator/implementations/balsamic.py +68 -0
- cg/services/analysis_starter/configurator/implementations/nextflow.py +22 -5
- cg/services/analysis_starter/configurator/models/balsamic.py +152 -0
- cg/services/analysis_starter/configurator/models/mip_dna.py +6 -8
- cg/services/analysis_starter/configurator/models/nextflow.py +9 -0
- cg/services/analysis_starter/constants.py +2 -0
- cg/services/analysis_starter/factories/configurator_factory.py +131 -51
- cg/services/analysis_starter/factories/starter_factory.py +36 -7
- cg/services/analysis_starter/input_fetcher/implementations/bam_fetcher.py +57 -0
- cg/services/analysis_starter/input_fetcher/implementations/fastq_fetcher.py +3 -3
- cg/services/analysis_starter/submitters/seqera_platform/{client.py → seqera_platform_client.py} +19 -3
- cg/services/analysis_starter/submitters/seqera_platform/seqera_platform_submitter.py +73 -0
- cg/services/analysis_starter/submitters/submitter.py +1 -1
- cg/services/analysis_starter/submitters/subprocess/submitter.py +2 -1
- cg/services/analysis_starter/tracker/implementations/balsamic.py +22 -0
- cg/services/analysis_starter/tracker/implementations/microsalt.py +4 -4
- cg/services/analysis_starter/tracker/implementations/mip_dna.py +4 -1
- cg/services/analysis_starter/tracker/implementations/{nextflow.py → nextflow_tracker.py} +6 -4
- cg/services/analysis_starter/tracker/tracker.py +19 -15
- cg/services/deliver_files/factory.py +1 -1
- cg/services/delivery_message/messages/__init__.py +24 -14
- cg/services/delivery_message/messages/{microsalt_mwr_message.py → microsalt_message.py} +1 -1
- cg/services/delivery_message/utils.py +4 -40
- cg/services/illumina/backup/backup_service.py +29 -7
- cg/services/orders/validation/constants.py +3 -0
- cg/services/orders/validation/index_sequences.py +558 -0
- cg/services/orders/validation/order_types/microsalt/models/sample.py +2 -3
- cg/services/run_devices/pacbio/data_storage_service/pacbio_store_service.py +39 -18
- cg/services/run_devices/pacbio/data_transfer_service/data_transfer_service.py +8 -2
- cg/services/run_devices/pacbio/data_transfer_service/dto.py +9 -3
- cg/services/run_devices/pacbio/data_transfer_service/utils.py +14 -7
- cg/services/run_devices/pacbio/metrics_parser/models.py +1 -0
- cg/services/run_devices/pacbio/sequencing_runs_service.py +35 -7
- cg/services/sequencing_qc_service/quality_checks/checks.py +18 -16
- cg/services/sequencing_qc_service/quality_checks/utils.py +82 -18
- cg/services/sequencing_qc_service/sequencing_qc_service.py +12 -10
- cg/store/crud/create.py +73 -42
- cg/store/crud/read.py +73 -7
- cg/store/crud/update.py +14 -3
- cg/store/models.py +98 -35
- cg/store/store.py +8 -1
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/METADATA +1 -1
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/RECORD +150 -138
- cg/services/analysis_starter/submitters/seqera_platform/submitter.py +0 -39
- cg/services/delivery_message/messages/microsalt_mwx_message.py +0 -18
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/WHEEL +0 -0
- {cg-76.0.0.dist-info → cg-83.14.0.dist-info}/entry_points.txt +0 -0
cg/store/crud/create.py
CHANGED
|
@@ -9,6 +9,7 @@ from sqlalchemy.orm import Session
|
|
|
9
9
|
|
|
10
10
|
from cg.constants import DataDelivery, Priority, Workflow
|
|
11
11
|
from cg.constants.archiving import PDC_ARCHIVE_LOCATION
|
|
12
|
+
from cg.exc import PacbioSequencingRunAlreadyExistsError
|
|
12
13
|
from cg.models.orders.constants import OrderType
|
|
13
14
|
from cg.services.illumina.data_transfer.models import (
|
|
14
15
|
IlluminaFlowCellDTO,
|
|
@@ -19,6 +20,7 @@ from cg.services.run_devices.pacbio.data_transfer_service.dto import (
|
|
|
19
20
|
PacBioSampleSequencingMetricsDTO,
|
|
20
21
|
PacBioSequencingRunDTO,
|
|
21
22
|
PacBioSMRTCellDTO,
|
|
23
|
+
PacBioSMRTCellMetricsDTO,
|
|
22
24
|
)
|
|
23
25
|
from cg.store.crud.read import ReadHandler
|
|
24
26
|
from cg.store.database import get_session
|
|
@@ -44,6 +46,7 @@ from cg.store.models import (
|
|
|
44
46
|
PacbioSampleSequencingMetrics,
|
|
45
47
|
PacbioSequencingRun,
|
|
46
48
|
PacbioSMRTCell,
|
|
49
|
+
PacbioSMRTCellMetrics,
|
|
47
50
|
Panel,
|
|
48
51
|
Pool,
|
|
49
52
|
Sample,
|
|
@@ -500,47 +503,75 @@ class CreateMixin(ReadHandler):
|
|
|
500
503
|
self.add_item_to_store(new_smrt_cell)
|
|
501
504
|
return new_smrt_cell
|
|
502
505
|
|
|
503
|
-
def
|
|
504
|
-
self,
|
|
506
|
+
def create_pacbio_sequencing_run(
|
|
507
|
+
self, pacbio_sequencing_run_dto: PacBioSequencingRunDTO
|
|
505
508
|
) -> PacbioSequencingRun:
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
509
|
+
"""Create a new PacBio sequencing run
|
|
510
|
+
|
|
511
|
+
Raises PacbioSequencingRunAlreadyExistsError:
|
|
512
|
+
- When run name already exists in the database
|
|
513
|
+
"""
|
|
514
|
+
if (
|
|
515
|
+
self._get_query(table=PacbioSequencingRun)
|
|
516
|
+
.filter(PacbioSequencingRun.run_name == pacbio_sequencing_run_dto.run_name)
|
|
517
|
+
.first()
|
|
518
|
+
):
|
|
519
|
+
raise PacbioSequencingRunAlreadyExistsError(
|
|
520
|
+
message=f"{pacbio_sequencing_run_dto.run_name} already exists."
|
|
521
|
+
)
|
|
522
|
+
else:
|
|
523
|
+
LOG.debug(f"Creating Pacbio Sequencing Run for {pacbio_sequencing_run_dto.run_name}")
|
|
524
|
+
sequencing_run = PacbioSequencingRun(
|
|
525
|
+
instrument_name=pacbio_sequencing_run_dto.instrument_name,
|
|
526
|
+
run_name=pacbio_sequencing_run_dto.run_name,
|
|
527
|
+
)
|
|
528
|
+
self.add_item_to_store(sequencing_run)
|
|
529
|
+
return sequencing_run
|
|
530
|
+
|
|
531
|
+
def create_pacbio_smrt_cell_metrics(
|
|
532
|
+
self,
|
|
533
|
+
sequencing_run: PacbioSequencingRun,
|
|
534
|
+
smrt_cell_metrics_dto: PacBioSMRTCellMetricsDTO,
|
|
535
|
+
smrt_cell: PacbioSMRTCell,
|
|
536
|
+
) -> PacbioSMRTCellMetrics:
|
|
537
|
+
LOG.debug(f"Creating Pacbio SMRT cell metrics for SMRT cell {smrt_cell.internal_id}")
|
|
538
|
+
new_sequencing_run = PacbioSMRTCellMetrics(
|
|
539
|
+
barcoded_hifi_mean_read_length=smrt_cell_metrics_dto.barcoded_hifi_mean_read_length,
|
|
540
|
+
barcoded_hifi_reads=smrt_cell_metrics_dto.barcoded_hifi_reads,
|
|
541
|
+
barcoded_hifi_reads_percentage=smrt_cell_metrics_dto.barcoded_hifi_reads_percentage,
|
|
542
|
+
barcoded_hifi_yield=smrt_cell_metrics_dto.barcoded_hifi_yield,
|
|
543
|
+
barcoded_hifi_yield_percentage=smrt_cell_metrics_dto.barcoded_hifi_yield_percentage,
|
|
544
|
+
completed_at=smrt_cell_metrics_dto.completed_at,
|
|
545
|
+
control_mean_read_concordance=smrt_cell_metrics_dto.control_mean_read_concordance,
|
|
546
|
+
control_mean_read_length=smrt_cell_metrics_dto.control_mean_read_length,
|
|
547
|
+
control_mode_read_concordance=smrt_cell_metrics_dto.control_mode_read_concordance,
|
|
548
|
+
control_reads=smrt_cell_metrics_dto.control_reads,
|
|
543
549
|
device=smrt_cell,
|
|
550
|
+
failed_mean_read_length=smrt_cell_metrics_dto.failed_mean_read_length,
|
|
551
|
+
failed_reads=smrt_cell_metrics_dto.failed_reads,
|
|
552
|
+
failed_yield=smrt_cell_metrics_dto.failed_yield,
|
|
553
|
+
hifi_mean_read_length=smrt_cell_metrics_dto.hifi_mean_read_length,
|
|
554
|
+
hifi_median_read_quality=smrt_cell_metrics_dto.hifi_median_read_quality,
|
|
555
|
+
hifi_reads=smrt_cell_metrics_dto.hifi_reads,
|
|
556
|
+
hifi_yield=smrt_cell_metrics_dto.hifi_yield,
|
|
557
|
+
movie_name=smrt_cell_metrics_dto.movie_name,
|
|
558
|
+
p0_percent=smrt_cell_metrics_dto.p0_percent,
|
|
559
|
+
p1_percent=smrt_cell_metrics_dto.p1_percent,
|
|
560
|
+
p2_percent=smrt_cell_metrics_dto.p2_percent,
|
|
561
|
+
percent_reads_passing_q30=smrt_cell_metrics_dto.percent_reads_passing_q30,
|
|
562
|
+
plate=smrt_cell_metrics_dto.plate,
|
|
563
|
+
polymerase_longest_subread_n50=smrt_cell_metrics_dto.polymerase_longest_subread_n50,
|
|
564
|
+
polymerase_mean_longest_subread=smrt_cell_metrics_dto.polymerase_mean_longest_subread,
|
|
565
|
+
polymerase_mean_read_length=smrt_cell_metrics_dto.polymerase_mean_read_length,
|
|
566
|
+
polymerase_read_length_n50=smrt_cell_metrics_dto.polymerase_read_length_n50,
|
|
567
|
+
productive_zmws=smrt_cell_metrics_dto.productive_zmws,
|
|
568
|
+
sequencing_run=sequencing_run,
|
|
569
|
+
started_at=smrt_cell_metrics_dto.started_at,
|
|
570
|
+
type=smrt_cell_metrics_dto.type,
|
|
571
|
+
unbarcoded_hifi_mean_read_length=smrt_cell_metrics_dto.unbarcoded_hifi_mean_read_length,
|
|
572
|
+
unbarcoded_hifi_reads=smrt_cell_metrics_dto.unbarcoded_hifi_reads,
|
|
573
|
+
unbarcoded_hifi_yield=smrt_cell_metrics_dto.unbarcoded_hifi_yield,
|
|
574
|
+
well=smrt_cell_metrics_dto.well,
|
|
544
575
|
)
|
|
545
576
|
self.add_item_to_store(new_sequencing_run)
|
|
546
577
|
return new_sequencing_run
|
|
@@ -548,11 +579,11 @@ class CreateMixin(ReadHandler):
|
|
|
548
579
|
def create_pac_bio_sample_sequencing_run(
|
|
549
580
|
self,
|
|
550
581
|
sample_run_metrics_dto: PacBioSampleSequencingMetricsDTO,
|
|
551
|
-
|
|
582
|
+
smrt_cell_metrics: PacbioSMRTCellMetrics,
|
|
552
583
|
) -> PacbioSampleSequencingMetrics:
|
|
553
584
|
sample_id: str = sample_run_metrics_dto.sample_internal_id
|
|
554
585
|
LOG.debug(f"Creating Pacbio sample sequencing metric for sample {sample_id}")
|
|
555
|
-
sample: Sample = self.
|
|
586
|
+
sample: Sample = self.get_sample_by_internal_id_strict(sample_id)
|
|
556
587
|
if not sample:
|
|
557
588
|
self.rollback()
|
|
558
589
|
raise EntryNotFoundError(f"Sample not found: {sample_id}")
|
|
@@ -562,7 +593,7 @@ class CreateMixin(ReadHandler):
|
|
|
562
593
|
hifi_yield=sample_run_metrics_dto.hifi_yield,
|
|
563
594
|
hifi_mean_read_length=sample_run_metrics_dto.hifi_mean_read_length,
|
|
564
595
|
hifi_median_read_quality=sample_run_metrics_dto.hifi_median_read_quality,
|
|
565
|
-
instrument_run=
|
|
596
|
+
instrument_run=smrt_cell_metrics,
|
|
566
597
|
polymerase_mean_read_length=sample_run_metrics_dto.polymerase_mean_read_length,
|
|
567
598
|
)
|
|
568
599
|
self.add_item_to_store(new_sample_sequencing_run)
|
cg/store/crud/read.py
CHANGED
|
@@ -20,6 +20,7 @@ from cg.exc import (
|
|
|
20
20
|
CgDataError,
|
|
21
21
|
CgError,
|
|
22
22
|
OrderNotFoundError,
|
|
23
|
+
PacbioSequencingRunNotFoundError,
|
|
23
24
|
SampleNotFoundError,
|
|
24
25
|
)
|
|
25
26
|
from cg.models.orders.constants import OrderType
|
|
@@ -97,6 +98,7 @@ from cg.store.models import (
|
|
|
97
98
|
PacbioSampleSequencingMetrics,
|
|
98
99
|
PacbioSequencingRun,
|
|
99
100
|
PacbioSMRTCell,
|
|
101
|
+
PacbioSMRTCellMetrics,
|
|
100
102
|
Panel,
|
|
101
103
|
Pool,
|
|
102
104
|
RunDevice,
|
|
@@ -903,20 +905,19 @@ class ReadHandler(BaseHandler):
|
|
|
903
905
|
valid_from=dt.datetime.now(),
|
|
904
906
|
).first()
|
|
905
907
|
|
|
906
|
-
def
|
|
908
|
+
def get_applications_by_prep_category(
|
|
907
909
|
self, prep_category: SeqLibraryPrepCategory
|
|
908
910
|
) -> list[Application]:
|
|
909
|
-
"""Return all
|
|
911
|
+
"""Return all applications by prep category."""
|
|
910
912
|
return apply_application_filter(
|
|
911
913
|
applications=self._get_query(table=Application),
|
|
912
914
|
filter_functions=[
|
|
913
915
|
ApplicationFilter.BY_PREP_CATEGORIES,
|
|
914
|
-
ApplicationFilter.IS_NOT_ARCHIVED,
|
|
915
916
|
],
|
|
916
917
|
prep_categories=[prep_category],
|
|
917
918
|
).all()
|
|
918
919
|
|
|
919
|
-
def get_bed_version_by_file_name(self, bed_version_file_name: str) -> BedVersion:
|
|
920
|
+
def get_bed_version_by_file_name(self, bed_version_file_name: str) -> BedVersion | None:
|
|
920
921
|
"""Return bed version with file name."""
|
|
921
922
|
return apply_bed_version_filter(
|
|
922
923
|
bed_versions=self._get_query(table=BedVersion),
|
|
@@ -1113,7 +1114,9 @@ class ReadHandler(BaseHandler):
|
|
|
1113
1114
|
"""Return all cases in the database with samples."""
|
|
1114
1115
|
return self._get_join_cases_with_samples_query()
|
|
1115
1116
|
|
|
1116
|
-
def get_cases_to_analyze(
|
|
1117
|
+
def get_cases_to_analyze(
|
|
1118
|
+
self, workflow: Workflow = None, limit: int | None = None
|
|
1119
|
+
) -> list[Case]:
|
|
1117
1120
|
"""Returns a list if cases ready to be analyzed or set to be reanalyzed.
|
|
1118
1121
|
1. Get cases to be analyzed using BE query
|
|
1119
1122
|
2. Use the latest analysis for case to determine if the case is to be analyzed"""
|
|
@@ -1165,6 +1168,23 @@ class ReadHandler(BaseHandler):
|
|
|
1165
1168
|
internal_id=internal_id,
|
|
1166
1169
|
).first()
|
|
1167
1170
|
|
|
1171
|
+
def get_sample_by_internal_id_strict(self, internal_id: str) -> Sample:
|
|
1172
|
+
"""
|
|
1173
|
+
Return a sample by lims id.
|
|
1174
|
+
Raises:
|
|
1175
|
+
SampleNotFoundError: If no sample is found with the given internal id.
|
|
1176
|
+
"""
|
|
1177
|
+
try:
|
|
1178
|
+
return apply_sample_filter(
|
|
1179
|
+
filter_functions=[SampleFilter.BY_INTERNAL_ID],
|
|
1180
|
+
samples=self._get_query(table=Sample),
|
|
1181
|
+
internal_id=internal_id,
|
|
1182
|
+
).one()
|
|
1183
|
+
except sqlalchemy.orm.exc.NoResultFound:
|
|
1184
|
+
raise SampleNotFoundError(
|
|
1185
|
+
f"Sample with internal id {internal_id} was not found in the database."
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1168
1188
|
def get_samples_by_identifier(self, object_type: str, identifier: str) -> list[Sample]:
|
|
1169
1189
|
"""Return all samples from a flow cell, case or sample id"""
|
|
1170
1190
|
object_to_filter: dict[str, Callable] = {
|
|
@@ -1788,18 +1808,34 @@ class ReadHandler(BaseHandler):
|
|
|
1788
1808
|
sequencing_metrics = sequencing_metrics.filter(RunDevice.internal_id.in_(smrt_cell_ids))
|
|
1789
1809
|
return sequencing_metrics.all()
|
|
1790
1810
|
|
|
1791
|
-
def
|
|
1811
|
+
def get_pacbio_smrt_cell_metrics_by_run_name(
|
|
1812
|
+
self, run_name: str
|
|
1813
|
+
) -> list[PacbioSMRTCellMetrics]:
|
|
1792
1814
|
"""
|
|
1793
1815
|
Fetches data from PacbioSequencingRunDTO filtered on run name.
|
|
1794
1816
|
Raises:
|
|
1795
1817
|
EntryNotFoundError if no sequencing runs are found for the run name
|
|
1796
1818
|
"""
|
|
1797
|
-
runs: Query = self._get_query(table=
|
|
1819
|
+
runs: Query = self._get_query(table=PacbioSMRTCellMetrics).join(
|
|
1820
|
+
PacbioSMRTCellMetrics.sequencing_run
|
|
1821
|
+
)
|
|
1798
1822
|
runs = runs.filter(PacbioSequencingRun.run_name == run_name)
|
|
1799
1823
|
if runs.count() == 0:
|
|
1800
1824
|
raise EntryNotFoundError(f"Could not find any sequencing runs for {run_name}")
|
|
1801
1825
|
return runs.all()
|
|
1802
1826
|
|
|
1827
|
+
def get_pacbio_sequencing_runs(
|
|
1828
|
+
self, page: int = 0, page_size: int = 0
|
|
1829
|
+
) -> tuple[list[PacbioSequencingRun], int]:
|
|
1830
|
+
query = self._get_query(PacbioSequencingRun).order_by(PacbioSequencingRun.id.desc())
|
|
1831
|
+
|
|
1832
|
+
if page and page_size:
|
|
1833
|
+
query = query.limit(page_size).offset((page - 1) * page_size)
|
|
1834
|
+
|
|
1835
|
+
total_count: int = self._get_query(table=PacbioSequencingRun).count()
|
|
1836
|
+
|
|
1837
|
+
return query.all(), total_count
|
|
1838
|
+
|
|
1803
1839
|
def get_case_priority(self, case_id: str) -> SlurmQos:
|
|
1804
1840
|
"""Get case priority."""
|
|
1805
1841
|
case: Case = self.get_case_by_internal_id(case_id)
|
|
@@ -1817,3 +1853,33 @@ class ReadHandler(BaseHandler):
|
|
|
1817
1853
|
):
|
|
1818
1854
|
return True
|
|
1819
1855
|
return False
|
|
1856
|
+
|
|
1857
|
+
def get_pacbio_sequencing_run_by_id(self, id: int):
|
|
1858
|
+
"""
|
|
1859
|
+
Get Pacbio Sequencing run by id.
|
|
1860
|
+
Raises:
|
|
1861
|
+
PacbioSequencingRunNotFoundError: If no Pacbio sequencing run is found with the given id.
|
|
1862
|
+
"""
|
|
1863
|
+
try:
|
|
1864
|
+
return (
|
|
1865
|
+
self._get_query(table=PacbioSequencingRun)
|
|
1866
|
+
.filter(PacbioSequencingRun.id == id)
|
|
1867
|
+
.one()
|
|
1868
|
+
)
|
|
1869
|
+
except sqlalchemy.orm.exc.NoResultFound:
|
|
1870
|
+
raise PacbioSequencingRunNotFoundError(
|
|
1871
|
+
f"Pacbio Sequencing run with id {id} was not found in the database."
|
|
1872
|
+
)
|
|
1873
|
+
|
|
1874
|
+
def get_pacbio_sequencing_run_by_run_name(self, run_name: str) -> PacbioSequencingRun:
|
|
1875
|
+
"""
|
|
1876
|
+
Get Pacbio Sequencing run by run name.
|
|
1877
|
+
Raises:
|
|
1878
|
+
PacbioSequencingRunNotFoundError: If no Pacbio sequencing run is found with the given run name.
|
|
1879
|
+
"""
|
|
1880
|
+
try:
|
|
1881
|
+
return self._get_query(table=PacbioSequencingRun).filter_by(run_name=run_name).one()
|
|
1882
|
+
except sqlalchemy.orm.exc.NoResultFound:
|
|
1883
|
+
raise PacbioSequencingRunNotFoundError(
|
|
1884
|
+
f"Pacbio Sequencing run with run_name {run_name} was not found in the database."
|
|
1885
|
+
)
|
cg/store/crud/update.py
CHANGED
|
@@ -13,6 +13,7 @@ from cg.store.models import (
|
|
|
13
13
|
IlluminaSampleSequencingMetrics,
|
|
14
14
|
IlluminaSequencingRun,
|
|
15
15
|
Order,
|
|
16
|
+
PacbioSequencingRun,
|
|
16
17
|
Sample,
|
|
17
18
|
)
|
|
18
19
|
|
|
@@ -78,10 +79,10 @@ class UpdateMixin(ReadHandler):
|
|
|
78
79
|
sample.reads = total_reads_for_sample
|
|
79
80
|
self.commit_to_store()
|
|
80
81
|
|
|
81
|
-
def
|
|
82
|
+
def update_sample_reads_pacbio(self, internal_id: str, reads: int):
|
|
82
83
|
"""Add reads to the current reads for a sample."""
|
|
83
|
-
sample: Sample = self.
|
|
84
|
-
sample.reads
|
|
84
|
+
sample: Sample = self.get_sample_by_internal_id_strict(internal_id)
|
|
85
|
+
sample.reads = reads
|
|
85
86
|
self.commit_to_store()
|
|
86
87
|
|
|
87
88
|
def update_sample_sequenced_at(self, internal_id: str, date: datetime):
|
|
@@ -132,3 +133,13 @@ class UpdateMixin(ReadHandler):
|
|
|
132
133
|
analysis: Analysis = self.get_analysis_by_entry_id(analysis_id)
|
|
133
134
|
analysis.delivery_report_created_at = delivery_report_date
|
|
134
135
|
self.commit_to_store()
|
|
136
|
+
|
|
137
|
+
def update_pacbio_sequencing_run_comment(self, id: int, comment: str):
|
|
138
|
+
sequencing_run: PacbioSequencingRun = self.get_pacbio_sequencing_run_by_id(id)
|
|
139
|
+
sequencing_run.comment = comment
|
|
140
|
+
self.commit_to_store()
|
|
141
|
+
|
|
142
|
+
def update_pacbio_sequencing_run_processed(self, id: int, processed: bool):
|
|
143
|
+
sequencing_run: PacbioSequencingRun = self.get_pacbio_sequencing_run_by_id(id)
|
|
144
|
+
sequencing_run.processed = processed
|
|
145
|
+
self.commit_to_store()
|
cg/store/models.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
from enum import Enum
|
|
3
|
-
from typing import Annotated
|
|
3
|
+
from typing import Annotated, cast
|
|
4
4
|
|
|
5
5
|
import sqlalchemy
|
|
6
6
|
from sqlalchemy import (
|
|
@@ -28,7 +28,7 @@ from cg.constants.constants import (
|
|
|
28
28
|
SexOptions,
|
|
29
29
|
StatusOptions,
|
|
30
30
|
)
|
|
31
|
-
from cg.constants.devices import DeviceType
|
|
31
|
+
from cg.constants.devices import DeviceType, RevioNames
|
|
32
32
|
from cg.constants.priority import SlurmQos
|
|
33
33
|
from cg.constants.sequencing import SeqLibraryPrepCategory
|
|
34
34
|
from cg.constants.symbols import EMPTY_STRING
|
|
@@ -147,6 +147,8 @@ class Application(Base):
|
|
|
147
147
|
min_sequencing_depth: Mapped[int] = mapped_column(default=0)
|
|
148
148
|
target_reads: Mapped[BigInt | None] = mapped_column(default=0)
|
|
149
149
|
percent_reads_guaranteed: Mapped[int]
|
|
150
|
+
target_hifi_yield: Mapped[BigInt | None] = mapped_column(default=None)
|
|
151
|
+
percent_hifi_yield_guaranteed: Mapped[int | None] = mapped_column(default=None)
|
|
150
152
|
sample_amount: Mapped[int | None]
|
|
151
153
|
sample_volume: Mapped[Text | None]
|
|
152
154
|
sample_concentration: Mapped[Text | None]
|
|
@@ -191,9 +193,20 @@ class Application(Base):
|
|
|
191
193
|
return self.tag
|
|
192
194
|
|
|
193
195
|
@property
|
|
194
|
-
def expected_reads(self):
|
|
196
|
+
def expected_reads(self) -> float:
|
|
195
197
|
return self.target_reads * self.percent_reads_guaranteed / 100
|
|
196
198
|
|
|
199
|
+
@property
|
|
200
|
+
def expected_hifi_yield(self) -> int | None:
|
|
201
|
+
if self.target_hifi_yield and self.percent_hifi_yield_guaranteed:
|
|
202
|
+
return round(self.target_hifi_yield * self.percent_hifi_yield_guaranteed / 100)
|
|
203
|
+
else:
|
|
204
|
+
return None
|
|
205
|
+
|
|
206
|
+
@property
|
|
207
|
+
def expected_express_hifi_yield(self) -> int | None:
|
|
208
|
+
return round(self.target_hifi_yield * 0.5) if self.target_hifi_yield else None
|
|
209
|
+
|
|
197
210
|
@property
|
|
198
211
|
def analysis_type(self) -> str:
|
|
199
212
|
if self.prep_category == SeqLibraryPrepCategory.WHOLE_TRANSCRIPTOME_SEQUENCING.value:
|
|
@@ -283,6 +296,7 @@ class Analysis(Base):
|
|
|
283
296
|
case: Mapped["Case"] = orm.relationship(back_populates="analyses")
|
|
284
297
|
trailblazer_id: Mapped[int | None]
|
|
285
298
|
housekeeper_version_id: Mapped[int | None]
|
|
299
|
+
session_id: Mapped[str | None]
|
|
286
300
|
|
|
287
301
|
def __str__(self):
|
|
288
302
|
return f"{self.case.internal_id} | {self.completed_at.date()}"
|
|
@@ -324,7 +338,7 @@ class BedVersion(Base):
|
|
|
324
338
|
__table_args__ = (UniqueConstraint("bed_id", "version", name="_app_version_uc"),)
|
|
325
339
|
|
|
326
340
|
id: Mapped[PrimaryKeyInt]
|
|
327
|
-
shortname: Mapped[Str64
|
|
341
|
+
shortname: Mapped[Str64]
|
|
328
342
|
version: Mapped[int]
|
|
329
343
|
filename: Mapped[Str256]
|
|
330
344
|
checksum: Mapped[Str32 | None]
|
|
@@ -338,6 +352,10 @@ class BedVersion(Base):
|
|
|
338
352
|
|
|
339
353
|
bed: Mapped[Bed] = orm.relationship(back_populates="versions")
|
|
340
354
|
|
|
355
|
+
@property
|
|
356
|
+
def bed_name(self) -> str:
|
|
357
|
+
return self.bed.name
|
|
358
|
+
|
|
341
359
|
def __str__(self) -> str:
|
|
342
360
|
return f"{self.bed.name} ({self.version})"
|
|
343
361
|
|
|
@@ -441,7 +459,7 @@ class Case(Base, PriorityMixin):
|
|
|
441
459
|
created_at: Mapped[datetime | None] = mapped_column(default=datetime.now)
|
|
442
460
|
customer_id: Mapped[int] = mapped_column(ForeignKey("customer.id", ondelete="CASCADE"))
|
|
443
461
|
customer: Mapped["Customer"] = orm.relationship(foreign_keys=[customer_id])
|
|
444
|
-
data_analysis: Mapped[
|
|
462
|
+
data_analysis: Mapped[Workflow] = mapped_column(
|
|
445
463
|
types.Enum(*(workflow.value for workflow in Workflow))
|
|
446
464
|
)
|
|
447
465
|
data_delivery: Mapped[str | None] = mapped_column(
|
|
@@ -548,11 +566,11 @@ class Case(Base, PriorityMixin):
|
|
|
548
566
|
return [link.sample for link in self.links if link.sample.loqusdb_id]
|
|
549
567
|
|
|
550
568
|
@property
|
|
551
|
-
def slurm_priority(self) ->
|
|
569
|
+
def slurm_priority(self) -> SlurmQos:
|
|
552
570
|
"""Get Quality of service (SLURM QOS) for the case."""
|
|
553
571
|
if self.are_all_samples_control():
|
|
554
572
|
return SlurmQos.EXPRESS
|
|
555
|
-
return Priority.priority_to_slurm_qos().get(self.priority)
|
|
573
|
+
return cast(SlurmQos, Priority.priority_to_slurm_qos().get(self.priority))
|
|
556
574
|
|
|
557
575
|
def to_dict(self, links: bool = False, analyses: bool = False) -> dict:
|
|
558
576
|
"""Represent as dictionary."""
|
|
@@ -724,7 +742,7 @@ class Sample(Base, PriorityMixin):
|
|
|
724
742
|
prepared_at: Mapped[datetime | None]
|
|
725
743
|
|
|
726
744
|
priority: Mapped[Priority] = mapped_column(default=Priority.standard)
|
|
727
|
-
reads: Mapped[BigInt
|
|
745
|
+
reads: Mapped[BigInt] = mapped_column(default=0)
|
|
728
746
|
last_sequenced_at: Mapped[datetime | None]
|
|
729
747
|
received_at: Mapped[datetime | None]
|
|
730
748
|
reference_genome: Mapped[Str255 | None]
|
|
@@ -754,16 +772,28 @@ class Sample(Base, PriorityMixin):
|
|
|
754
772
|
def __str__(self) -> str:
|
|
755
773
|
return f"{self.internal_id} ({self.name})"
|
|
756
774
|
|
|
775
|
+
@property
|
|
776
|
+
def hifi_yield(self) -> int | None:
|
|
777
|
+
if self._sample_run_metrics and getattr(self._sample_run_metrics[0], "hifi_yield", False):
|
|
778
|
+
return sum(metric.hifi_yield for metric in self._sample_run_metrics) # type: ignore
|
|
779
|
+
else:
|
|
780
|
+
return None
|
|
781
|
+
|
|
757
782
|
@property
|
|
758
783
|
def archive_location(self) -> str:
|
|
759
784
|
"""Returns the data_archive_location if the customer linked to the sample."""
|
|
760
785
|
return self.customer.data_archive_location
|
|
761
786
|
|
|
762
787
|
@property
|
|
763
|
-
def expected_reads_for_sample(self) ->
|
|
788
|
+
def expected_reads_for_sample(self) -> float | None:
|
|
764
789
|
"""Return the expected reads of the sample."""
|
|
765
790
|
return self.application_version.application.expected_reads
|
|
766
791
|
|
|
792
|
+
@property
|
|
793
|
+
def expected_hifi_yield(self) -> int | None:
|
|
794
|
+
"""Return the expected HiFi yield of the sample."""
|
|
795
|
+
return self.application_version.application.expected_hifi_yield
|
|
796
|
+
|
|
767
797
|
@property
|
|
768
798
|
def has_reads(self) -> bool:
|
|
769
799
|
return bool(self.reads)
|
|
@@ -1018,49 +1048,63 @@ class IlluminaSequencingRun(InstrumentRun):
|
|
|
1018
1048
|
return data
|
|
1019
1049
|
|
|
1020
1050
|
|
|
1021
|
-
class
|
|
1022
|
-
__tablename__ = "
|
|
1051
|
+
class PacbioSMRTCellMetrics(InstrumentRun):
|
|
1052
|
+
__tablename__ = "pacbio_smrt_cell_metrics"
|
|
1023
1053
|
|
|
1024
1054
|
id: Mapped[int] = mapped_column(
|
|
1025
1055
|
ForeignKey("instrument_run.id", ondelete="CASCADE"), primary_key=True
|
|
1026
1056
|
)
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1057
|
+
barcoded_hifi_mean_read_length: Mapped[BigInt]
|
|
1058
|
+
barcoded_hifi_reads_percentage: Mapped[Num_6_2]
|
|
1059
|
+
barcoded_hifi_reads: Mapped[BigInt]
|
|
1060
|
+
barcoded_hifi_yield_percentage: Mapped[Num_6_2]
|
|
1061
|
+
barcoded_hifi_yield: Mapped[BigInt]
|
|
1032
1062
|
completed_at: Mapped[datetime]
|
|
1033
|
-
|
|
1034
|
-
|
|
1063
|
+
control_mean_read_concordance: Mapped[Num_6_2]
|
|
1064
|
+
control_mean_read_length: Mapped[BigInt]
|
|
1065
|
+
control_mode_read_concordance: Mapped[Num_6_2]
|
|
1066
|
+
control_reads: Mapped[BigInt]
|
|
1067
|
+
failed_mean_read_length: Mapped[BigInt]
|
|
1068
|
+
failed_reads: Mapped[BigInt]
|
|
1069
|
+
failed_yield: Mapped[BigInt]
|
|
1035
1070
|
hifi_mean_read_length: Mapped[BigInt]
|
|
1036
1071
|
hifi_median_read_quality: Mapped[Str32]
|
|
1037
|
-
|
|
1072
|
+
hifi_reads: Mapped[BigInt]
|
|
1073
|
+
hifi_yield: Mapped[BigInt]
|
|
1074
|
+
movie_name: Mapped[Str32]
|
|
1038
1075
|
p0_percent: Mapped[Num_6_2]
|
|
1039
1076
|
p1_percent: Mapped[Num_6_2]
|
|
1040
1077
|
p2_percent: Mapped[Num_6_2]
|
|
1041
|
-
|
|
1078
|
+
pacbio_sequencing_run_id: Mapped[int] = mapped_column(
|
|
1079
|
+
ForeignKey(
|
|
1080
|
+
"pacbio_sequencing_run.id",
|
|
1081
|
+
ondelete="CASCADE",
|
|
1082
|
+
name="pacbio_smrt_cell_metrics_pacbio_sequencing_run_fk",
|
|
1083
|
+
)
|
|
1084
|
+
)
|
|
1085
|
+
percent_reads_passing_q30: Mapped[Num_6_2]
|
|
1086
|
+
plate: Mapped[int]
|
|
1087
|
+
polymerase_longest_subread_n50: Mapped[BigInt]
|
|
1088
|
+
polymerase_mean_longest_subread: Mapped[BigInt]
|
|
1042
1089
|
polymerase_mean_read_length: Mapped[BigInt]
|
|
1043
1090
|
polymerase_read_length_n50: Mapped[BigInt]
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
control_mean_read_length: Mapped[BigInt]
|
|
1048
|
-
control_mean_read_concordance: Mapped[Num_6_2]
|
|
1049
|
-
control_mode_read_concordance: Mapped[Num_6_2]
|
|
1050
|
-
failed_reads: Mapped[BigInt]
|
|
1051
|
-
failed_yield: Mapped[BigInt]
|
|
1052
|
-
failed_mean_read_length: Mapped[BigInt]
|
|
1053
|
-
barcoded_hifi_reads: Mapped[BigInt]
|
|
1054
|
-
barcoded_hifi_reads_percentage: Mapped[Num_6_2]
|
|
1055
|
-
barcoded_hifi_yield: Mapped[BigInt]
|
|
1056
|
-
barcoded_hifi_yield_percentage: Mapped[Num_6_2]
|
|
1057
|
-
barcoded_hifi_mean_read_length: Mapped[BigInt]
|
|
1091
|
+
productive_zmws: Mapped[BigInt]
|
|
1092
|
+
started_at: Mapped[datetime]
|
|
1093
|
+
unbarcoded_hifi_mean_read_length: Mapped[BigInt]
|
|
1058
1094
|
unbarcoded_hifi_reads: Mapped[BigInt]
|
|
1059
1095
|
unbarcoded_hifi_yield: Mapped[BigInt]
|
|
1060
|
-
|
|
1096
|
+
well: Mapped[Str32]
|
|
1097
|
+
|
|
1098
|
+
sequencing_run: Mapped["PacbioSequencingRun"] = orm.relationship(
|
|
1099
|
+
back_populates="smrt_cell_metrics"
|
|
1100
|
+
)
|
|
1061
1101
|
|
|
1062
1102
|
__mapper_args__ = {"polymorphic_identity": DeviceType.PACBIO}
|
|
1063
1103
|
|
|
1104
|
+
@property
|
|
1105
|
+
def run_name(self) -> str:
|
|
1106
|
+
return self.sequencing_run.run_name
|
|
1107
|
+
|
|
1064
1108
|
def to_dict(self):
|
|
1065
1109
|
return to_dict(self)
|
|
1066
1110
|
|
|
@@ -1117,12 +1161,31 @@ class PacbioSampleSequencingMetrics(SampleRunMetrics):
|
|
|
1117
1161
|
polymerase_mean_read_length: Mapped[BigInt]
|
|
1118
1162
|
|
|
1119
1163
|
__mapper_args__ = {"polymorphic_identity": DeviceType.PACBIO}
|
|
1164
|
+
instrument_run = orm.relationship(PacbioSMRTCellMetrics, back_populates="sample_metrics")
|
|
1120
1165
|
|
|
1121
1166
|
def to_dict(self) -> dict:
|
|
1122
1167
|
"""Represent as dictionary"""
|
|
1123
1168
|
return to_dict(self)
|
|
1124
1169
|
|
|
1125
1170
|
|
|
1171
|
+
class PacbioSequencingRun(Base):
|
|
1172
|
+
"""PacBio sequencing run, consisting of a set of SMRT-cells sequenced simultaneously."""
|
|
1173
|
+
|
|
1174
|
+
__tablename__ = "pacbio_sequencing_run"
|
|
1175
|
+
|
|
1176
|
+
id: Mapped[PrimaryKeyInt]
|
|
1177
|
+
run_name: Mapped[Str64] = mapped_column(unique=True)
|
|
1178
|
+
processed: Mapped[bool] = mapped_column(default=False)
|
|
1179
|
+
comment: Mapped[Text] = mapped_column(default="")
|
|
1180
|
+
instrument_name: Mapped[RevioNames] = mapped_column(
|
|
1181
|
+
types.Enum(*(revio_name.value for revio_name in RevioNames))
|
|
1182
|
+
)
|
|
1183
|
+
|
|
1184
|
+
smrt_cell_metrics: Mapped[list[PacbioSMRTCellMetrics]] = orm.relationship(
|
|
1185
|
+
back_populates="sequencing_run"
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
|
|
1126
1189
|
class OrderTypeApplication(Base):
|
|
1127
1190
|
"""Maps an order type to its allowed applications"""
|
|
1128
1191
|
|
cg/store/store.py
CHANGED
|
@@ -10,4 +10,11 @@ class Store(
|
|
|
10
10
|
DeleteMixin,
|
|
11
11
|
UpdateMixin,
|
|
12
12
|
):
|
|
13
|
-
|
|
13
|
+
def recalculate_sample_reads_pacbio(self, sample_id: str) -> None:
|
|
14
|
+
reads: int = sum(
|
|
15
|
+
metric.hifi_reads
|
|
16
|
+
for metric in self.get_pacbio_sample_sequencing_metrics(
|
|
17
|
+
sample_id=sample_id, smrt_cell_ids=None
|
|
18
|
+
)
|
|
19
|
+
)
|
|
20
|
+
self.update_sample_reads_pacbio(internal_id=sample_id, reads=reads)
|