@sjcrh/proteinpaint-server 2.153.1-3 → 2.155.1-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sjcrh/proteinpaint-server",
3
- "version": "2.153.1-3",
3
+ "version": "2.155.1-0",
4
4
  "type": "module",
5
5
  "description": "a genomics visualization tool for exploring a cohort's genotype and phenotype data",
6
6
  "main": "src/app.js",
@@ -0,0 +1,124 @@
1
+ import { gdcGRIN2listPayload } from "#types";
2
+ import ky from "ky";
3
+ import { joinUrl } from "#shared/joinUrl.js";
4
+ import serverconfig from "#src/serverconfig.js";
5
+ const maxFileNumber = 1e3;
6
+ const allowedWorkflowType = "Aliquot Ensemble Somatic Variant Merging and Masking";
7
+ const maxFileSizeAllowed = 1e6;
8
+ const maxTotalSizeCompressed = serverconfig.features.gdcMafMaxFileSize || 4e8;
9
+ const api = {
10
+ endpoint: "gdc/GRIN2list",
11
+ methods: {
12
+ get: {
13
+ ...gdcGRIN2listPayload,
14
+ init
15
+ },
16
+ post: {
17
+ ...gdcGRIN2listPayload,
18
+ init
19
+ }
20
+ }
21
+ };
22
+ function init({ genomes }) {
23
+ return async (req, res) => {
24
+ try {
25
+ const g = genomes.hg38;
26
+ if (!g)
27
+ throw "hg38 missing";
28
+ const ds = g.datasets.GDC;
29
+ if (!ds)
30
+ throw "hg38 GDC missing";
31
+ const payload = await listMafFiles(req.query, ds);
32
+ res.send(payload);
33
+ } catch (e) {
34
+ res.send({ status: "error", error: e.message || e });
35
+ }
36
+ };
37
+ }
38
+ async function listMafFiles(q, ds) {
39
+ const dataFormatFilter = {
40
+ op: "and",
41
+ content: [{ op: "=", content: { field: "data_format", value: "MAF" } }]
42
+ };
43
+ const filters = {
44
+ op: "and",
45
+ content: [
46
+ dataFormatFilter,
47
+ { op: "=", content: { field: "experimental_strategy", value: q.experimentalStrategy } },
48
+ { op: "=", content: { field: "analysis.workflow_type", value: allowedWorkflowType } },
49
+ { op: "=", content: { field: "access", value: "open" } }
50
+ ]
51
+ };
52
+ const case_filters = { op: "and", content: [] };
53
+ if (q.filter0) {
54
+ case_filters.content.push(q.filter0);
55
+ }
56
+ const { host } = ds.getHostHeaders(q);
57
+ const body = {
58
+ filters,
59
+ size: maxFileNumber,
60
+ fields: [
61
+ "id",
62
+ "file_size",
63
+ "cases.project.project_id",
64
+ // for display only
65
+ "cases.case_id",
66
+ // case uuid for making case url link to portal
67
+ "cases.submitter_id",
68
+ // used when listing all cases & files
69
+ "cases.samples.tissue_type",
70
+ "cases.samples.tumor_descriptor"
71
+ ].join(",")
72
+ };
73
+ if (case_filters.content.length)
74
+ body.case_filters = case_filters;
75
+ const response = await ky.post(joinUrl(host.rest, "files"), { timeout: false, json: body });
76
+ if (!response.ok)
77
+ throw `HTTP Error: ${response.status} ${response.statusText}`;
78
+ const re = await response.json();
79
+ if (!Number.isInteger(re.data?.pagination?.total))
80
+ throw "re.data.pagination.total is not int";
81
+ if (!Array.isArray(re.data?.hits))
82
+ throw "re.data.hits[] not array";
83
+ const files = [];
84
+ for (const h of re.data.hits) {
85
+ const c = h.cases?.[0];
86
+ if (!c)
87
+ throw "h.cases[0] missing";
88
+ if (h.file_size >= maxFileSizeAllowed)
89
+ continue;
90
+ const file = {
91
+ id: h.id,
92
+ project_id: c.project.project_id,
93
+ file_size: h.file_size,
94
+ case_submitter_id: c.submitter_id,
95
+ case_uuid: c.case_id,
96
+ sample_types: []
97
+ };
98
+ if (c.samples) {
99
+ let normalTypeName;
100
+ for (const { tumor_descriptor, tissue_type } of c.samples) {
101
+ if (tissue_type == "Normal") {
102
+ normalTypeName = (tumor_descriptor == "Not Applicable" ? "" : tumor_descriptor + " ") + tissue_type;
103
+ continue;
104
+ }
105
+ file.sample_types.push(tumor_descriptor + " " + tissue_type);
106
+ }
107
+ if (normalTypeName)
108
+ file.sample_types.push(normalTypeName);
109
+ }
110
+ file.sample_types = [...new Set(file.sample_types)];
111
+ files.push(file);
112
+ }
113
+ files.sort((a, b) => b.file_size - a.file_size);
114
+ const result = {
115
+ files,
116
+ filesTotal: re.data.pagination.total,
117
+ maxTotalSizeCompressed
118
+ };
119
+ return result;
120
+ }
121
+ export {
122
+ api,
123
+ maxTotalSizeCompressed
124
+ };
@@ -0,0 +1,86 @@
1
+ import { runGRIN2Payload } from "#types/checkers";
2
+ import { run_rust } from "@sjcrh/proteinpaint-rust";
3
+ import { run_R } from "@sjcrh/proteinpaint-r";
4
+ import serverconfig from "#src/serverconfig.js";
5
+ import path from "path";
6
+ const api = {
7
+ endpoint: "gdc/runGRIN2",
8
+ methods: {
9
+ get: {
10
+ ...runGRIN2Payload,
11
+ init
12
+ },
13
+ post: {
14
+ ...runGRIN2Payload,
15
+ init
16
+ }
17
+ }
18
+ };
19
+ function init({ genomes }) {
20
+ return async (req, res) => {
21
+ try {
22
+ console.log("[GRIN2] Validating genome configuration");
23
+ const g = genomes.hg38;
24
+ if (!g)
25
+ throw "hg38 missing";
26
+ const ds = g.datasets.GDC;
27
+ if (!ds)
28
+ throw "hg38 GDC missing";
29
+ const caseFiles = req.query;
30
+ console.log(`[GRIN2] Request received: ${JSON.stringify(caseFiles)}`);
31
+ if (!caseFiles) {
32
+ throw "Missing or invalid cases data";
33
+ }
34
+ console.log("[GRIN2] Calling Rust for file processing...");
35
+ const rustInput = JSON.stringify(caseFiles);
36
+ console.log("[GRIN2] Executing Rust function...");
37
+ const rustResult = await run_rust("gdcGRIN2", rustInput);
38
+ console.log("[GRIN2] Rust execution completed");
39
+ if (!rustResult) {
40
+ throw new Error("Failed to process MAF files: No result from Rust");
41
+ }
42
+ let parsedRustResult;
43
+ try {
44
+ parsedRustResult = typeof rustResult === "string" ? JSON.parse(rustResult) : rustResult;
45
+ console.log(`[GRIN2] Parsed Rust result: ${JSON.stringify(parsedRustResult).substring(0, 200)}...`);
46
+ } catch (parseError) {
47
+ console.error("[GRIN2] Error parsing Rust result:", parseError);
48
+ }
49
+ const genedbfile = path.join(serverconfig.tpmasterdir, g.genedb.dbfile);
50
+ const imagefile = path.join(serverconfig.cachedir, `grin2_${Date.now()}_${Math.floor(Math.random() * 1e9)}.png`);
51
+ const rInput = JSON.stringify({
52
+ genedb: genedbfile,
53
+ chromosomelist: g.majorchr,
54
+ imagefile,
55
+ lesion: rustResult
56
+ // The mutation string from Rust
57
+ });
58
+ console.log(`R input: ${rInput}`);
59
+ console.log("[GRIN2] Executing R script...");
60
+ const rResult = await run_R("gdcGRIN2.R", rInput, []);
61
+ console.log(`[GRIN2] R execution completed, result: ${rResult}`);
62
+ let resultData;
63
+ try {
64
+ resultData = JSON.parse(rResult);
65
+ console.log("[GRIN2] Finished R analysis");
66
+ const pngImg = resultData.png[0];
67
+ return res.json({ pngImg });
68
+ } catch (parseError) {
69
+ console.error("[GRIN2] Error parsing R result:", parseError);
70
+ console.log("[GRIN2] Raw R result:", rResult);
71
+ }
72
+ } catch (e) {
73
+ console.error("[GRIN2] Error running analysis:", e);
74
+ console.error("[GRIN2] Error stack:", e.stack);
75
+ const errorResponse = {
76
+ status: "error",
77
+ error: e.message || String(e)
78
+ };
79
+ console.log(`[GRIN2] Sending error response: ${JSON.stringify(errorResponse)}`);
80
+ res.status(500).send(errorResponse);
81
+ }
82
+ };
83
+ }
84
+ export {
85
+ api
86
+ };
@@ -1,6 +1,8 @@
1
1
  import { FilterTermValuesPayload } from "#types/checkers";
2
- import { getData, getSamplesPerFilter } from "../src/termdb.matrix.js";
3
- import { authApi } from "../src/auth.js";
2
+ import { getData } from "../src/termdb.matrix.js";
3
+ import { authApi } from "#src/auth.js";
4
+ import { filterJoin } from "#shared/filter.js";
5
+ import { get_samples } from "../src/termdb.sql.js";
4
6
  const api = {
5
7
  endpoint: "termdb/filterTermValues",
6
8
  methods: {
@@ -28,7 +30,6 @@ function init({ genomes }) {
28
30
  };
29
31
  }
30
32
  async function getFilters(query, ds) {
31
- if (!query.filterByUserSites) authApi.mayAdjustFilter(query, ds, query.terms);
32
33
  const samplesPerFilter = await getSamplesPerFilter(query, ds);
33
34
  const filtersData = await getData(
34
35
  {
@@ -43,6 +44,23 @@ async function getFilters(query, ds) {
43
44
  }
44
45
  return { ...tw2List };
45
46
  }
47
+ async function getSamplesPerFilter(q, ds) {
48
+ q.ds = ds;
49
+ const samples = {};
50
+ const userFilter = q.filter ? structuredClone(q.filter) : null;
51
+ if (!q.filterByUserSites) {
52
+ q.__protected__.ignoredTermIds.push(q.facilityTW.term.id);
53
+ authApi.mayAdjustFilter(q, ds, q.terms);
54
+ }
55
+ for (const id in q.filters) {
56
+ const termfilter = id == q.facilityTW.term.id ? userFilter : q.filter;
57
+ let filter = q.filters[id];
58
+ if (q.filter) filter = filterJoin([termfilter, q.filters[id]]);
59
+ const result = (await get_samples({ filter, __protected__: q.__protected__ }, q.ds)).map((i) => i.id);
60
+ samples[id] = Array.from(new Set(result));
61
+ }
62
+ return samples;
63
+ }
46
64
  function getList(samplesPerFilter, filtersData, tw, showAll) {
47
65
  const values = Object.values(tw.term.values);
48
66
  values.sort((v1, v2) => v1.label.localeCompare(v2.label));