@sjcrh/proteinpaint-server 2.142.0 → 2.143.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dataset/protected.test.js +1 -2
  2. package/dataset/termdb.test.js +3 -2
  3. package/package.json +9 -8
  4. package/routes/aiProjectAdmin.js +46 -60
  5. package/routes/aiProjectSelectedWSImages.js +161 -0
  6. package/routes/brainImaging.js +9 -18
  7. package/routes/brainImagingSamples.js +2 -4
  8. package/routes/burden.js +13 -26
  9. package/routes/correlationVolcano.js +18 -36
  10. package/routes/dataset.js +6 -12
  11. package/routes/deleteWSIAnnotation.js +75 -0
  12. package/routes/dsdata.js +7 -14
  13. package/routes/dzimages.js +4 -8
  14. package/routes/gdc.grin2.list.js +13 -26
  15. package/routes/gdc.grin2.run.js +3 -6
  16. package/routes/gdc.maf.js +8 -16
  17. package/routes/gdc.mafBuild.js +14 -28
  18. package/routes/gene2canonicalisoform.js +4 -8
  19. package/routes/genelookup.js +2 -4
  20. package/routes/genesetEnrichment.js +6 -12
  21. package/routes/genesetOverrepresentation.js +1 -2
  22. package/routes/genomes.js +1 -2
  23. package/routes/grin2.js +13 -17
  24. package/routes/healthcheck.js +3 -6
  25. package/routes/hicdata.js +4 -8
  26. package/routes/hicgenome.js +4 -8
  27. package/routes/hicstat.js +2 -4
  28. package/routes/img.js +1 -2
  29. package/routes/isoformlst.js +6 -12
  30. package/routes/ntseq.js +4 -8
  31. package/routes/pdomain.js +5 -10
  32. package/routes/sampledzimages.js +2 -4
  33. package/routes/samplewsimages.js +3 -67
  34. package/routes/saveWSIAnnotation.js +100 -0
  35. package/routes/snp.js +9 -18
  36. package/routes/termdb.DE.js +23 -46
  37. package/routes/termdb.boxplot.js +84 -84
  38. package/routes/termdb.categories.js +9 -18
  39. package/routes/termdb.cluster.js +23 -46
  40. package/routes/termdb.cohort.summary.js +3 -6
  41. package/routes/termdb.cohorts.js +4 -8
  42. package/routes/termdb.config.js +32 -64
  43. package/routes/termdb.descrstats.js +6 -12
  44. package/routes/termdb.filterTermValues.js +4 -8
  45. package/routes/termdb.numericcategories.js +5 -10
  46. package/routes/termdb.percentile.js +6 -12
  47. package/routes/termdb.profileFormScores.js +12 -24
  48. package/routes/termdb.profileScores.js +7 -14
  49. package/routes/termdb.rootterm.js +4 -8
  50. package/routes/termdb.sampleImages.js +4 -8
  51. package/routes/termdb.singleSampleMutation.js +9 -18
  52. package/routes/termdb.singlecellDEgenes.js +4 -8
  53. package/routes/termdb.singlecellData.js +4 -8
  54. package/routes/termdb.singlecellSamples.js +28 -56
  55. package/routes/termdb.termchildren.js +5 -10
  56. package/routes/termdb.termsbyids.js +4 -8
  57. package/routes/termdb.topMutatedGenes.js +15 -30
  58. package/routes/termdb.topTermsByType.js +9 -18
  59. package/routes/termdb.topVariablyExpressedGenes.js +13 -26
  60. package/routes/termdb.violin.js +124 -135
  61. package/routes/tileserver.js +14 -15
  62. package/routes/wsimages.js +42 -46
  63. package/routes/wsisamples.js +3 -6
  64. package/src/app.js +4345 -6708
  65. package/routes/sampleWsiAiApi.js +0 -33
@@ -2,8 +2,7 @@ import termdbTestInit from "./termdb.test.js";
2
2
  const minSampleSize = 10;
3
3
  function protected_test_default() {
4
4
  const ds = termdbTestInit();
5
- if (!ds.cohort)
6
- ds.cohort = { termdb: {} };
5
+ if (!ds.cohort) ds.cohort = { termdb: {} };
7
6
  ds.cohort.termdb.checkAccessToSampleData = (_, data) => {
8
7
  return {
9
8
  minSampleSize,
@@ -186,8 +186,9 @@ function termdb_test_default() {
186
186
  {
187
187
  name: "TermdbTest TSNE",
188
188
  dimension: 2,
189
- file: "files/hg38/TermdbTest/tnse.txt",
190
- colorTW: { id: "diaggrp" }
189
+ file: "files/hg38/TermdbTest/tsne.txt",
190
+ colorTW: { id: "diaggrp" },
191
+ shapeTW: { id: "sex" }
191
192
  }
192
193
  ]
193
194
  },
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sjcrh/proteinpaint-server",
3
- "version": "2.142.0",
3
+ "version": "2.143.0",
4
4
  "type": "module",
5
5
  "description": "a genomics visualization tool for exploring a cohort's genotype and phenotype data",
6
6
  "main": "src/app.js",
@@ -41,16 +41,17 @@
41
41
  "@babel/preset-env": "^7.9.6",
42
42
  "@babel/preset-typescript": "^7.21.4",
43
43
  "@babel/register": "^7.14.5",
44
+ "@types/better-sqlite3": "^7.6.13",
44
45
  "@types/node": "^20.11.24",
45
46
  "@types/tough-cookie": "^4.0.5",
46
47
  "@typescript-eslint/eslint-plugin": "^8.13.0",
47
48
  "babel-loader": "^8.2.2",
48
49
  "c8": "^10.1.3",
49
- "esbuild": "^0.19.12",
50
+ "esbuild": "^0.25.9",
50
51
  "monocart-coverage-reports": "^2.12.1",
51
52
  "node-notifier": "^9.0.1",
52
53
  "node-watch": "^0.7.1",
53
- "nodemon": "^2.0.19",
54
+ "nodemon": "^3.1.10",
54
55
  "prettier": "^2.8.8",
55
56
  "tape": "^5.2.2",
56
57
  "ts-node": "^10.9.1",
@@ -59,12 +60,12 @@
59
60
  "typescript": "^5.6.3"
60
61
  },
61
62
  "dependencies": {
62
- "@sjcrh/augen": "2.136.0",
63
- "@sjcrh/proteinpaint-python": "2.139.1",
63
+ "@sjcrh/augen": "2.143.0",
64
+ "@sjcrh/proteinpaint-python": "2.143.0",
64
65
  "@sjcrh/proteinpaint-r": "2.137.2-0",
65
- "@sjcrh/proteinpaint-rust": "2.142.0",
66
- "@sjcrh/proteinpaint-shared": "2.142.0",
67
- "@sjcrh/proteinpaint-types": "2.142.0",
66
+ "@sjcrh/proteinpaint-rust": "2.143.0",
67
+ "@sjcrh/proteinpaint-shared": "2.143.0",
68
+ "@sjcrh/proteinpaint-types": "2.143.0",
68
69
  "@types/express": "^5.0.0",
69
70
  "@types/express-session": "^1.18.1",
70
71
  "better-sqlite3": "^9.4.1",
@@ -1,8 +1,8 @@
1
1
  import { aiProjectAdminPayload } from "#types/checkers";
2
- import { connect_db } from "../src/utils.js";
3
- const routePath = "aiProjectAdmin";
2
+ import { getDbConnection } from "#src/aiHistoDBConnection.js";
3
+ import { runMultiStmtSQL, runSQL } from "#src/runSQLHelpers.ts";
4
4
  const api = {
5
- endpoint: `${routePath}`,
5
+ endpoint: "aiProjectAdmin",
6
6
  methods: {
7
7
  get: {
8
8
  //all requests
@@ -35,33 +35,39 @@ function init({ genomes }) {
35
35
  }
36
36
  const g = genomes[query.genome];
37
37
  const ds = g.datasets[query.dslabel];
38
- if (!ds.queries?.WSImages?.db)
39
- throw new Error("WSImages database not found.");
40
- const db = ds.queries.WSImages.db;
41
- db.connection = connect_db(db.file, { readonly: false, fileMustExist: true });
38
+ if (!ds.queries?.WSImages?.db) throw new Error("WSImages database not found.");
39
+ const connection = getDbConnection(ds);
42
40
  if (query.for === "list") {
43
- const projects = getProjects(db.connection);
41
+ const projects = getProjects(connection);
44
42
  res.send(projects);
45
43
  } else if (query.for === "admin") {
46
- if (req.method === "PUT" || query.project.type === "new")
47
- addProject(db.connection, query.project);
48
- else if (req.method === "POST")
49
- editProject(db.connection, query.project);
50
- else if (req.method === "DELETE")
51
- deleteProject(db.connection, query.project.id);
52
- else
53
- throw new Error('Invalid request method for="admin" in aiProjectAdmin route.');
44
+ if (req.method === "PUT" || query.project.type === "new") addProject(connection, query.project);
45
+ else if (req.method === "POST") editProject(connection, query.project);
46
+ else if (req.method === "DELETE") deleteProject(connection, query.project.id);
47
+ else throw new Error('Invalid request method for="admin" in aiProjectAdmin route.');
48
+ let projectId = query.project.id;
49
+ if (!projectId) {
50
+ const row = connection.prepare(`SELECT id FROM project WHERE name = ?`).get(query.project.name);
51
+ if (!row) {
52
+ throw new Error(`Project not found: ${query.project.name}`);
53
+ }
54
+ projectId = row.id;
55
+ }
54
56
  res.status(200).send({
55
57
  status: "ok",
58
+ projectId,
56
59
  message: `Project ${query.project.name} processed successfully`
57
60
  });
58
- } else if (query.for === "images") {
61
+ } else if (query.for === "filterImages") {
59
62
  const q = ds.cohort.termdb.q;
60
63
  const data = await q.getFilteredImages(query.project.filter);
61
64
  res.status(200).send({
62
65
  status: "ok",
63
66
  data
64
67
  });
68
+ } else if (query.for === "images") {
69
+ const images = getImages(connection, query.project);
70
+ res.send({ images });
65
71
  } else {
66
72
  res.send({
67
73
  status: "error",
@@ -78,9 +84,18 @@ function init({ genomes }) {
78
84
  };
79
85
  }
80
86
  function getProjects(connection) {
81
- const sql = "SELECT project.name as value, id FROM project";
87
+ const sql = "SELECT name, id FROM project";
82
88
  return runSQL(connection, sql);
83
89
  }
90
+ function getImages(connection, project) {
91
+ if (!project.id) {
92
+ const res = connection.prepare(`SELECT id FROM project WHERE name = ?`).get(project.name);
93
+ if (!res) throw new Error(`Project not found for name: ${project.name}`);
94
+ project.id = res.id;
95
+ }
96
+ const imageRows = connection.prepare(`SELECT image_path FROM project_images WHERE project_id = ? ORDER BY id ASC`).all(project.id);
97
+ return imageRows.map((r) => r.image_path);
98
+ }
84
99
  function editProject(connection, project) {
85
100
  const stmts = [];
86
101
  if (!project.id) {
@@ -89,18 +104,17 @@ function editProject(connection, project) {
89
104
  }
90
105
  if (project.images) {
91
106
  stmts.push({
92
- sql: `DELETE FROM project_images WHERE project_id = ? AND image NOT IN (${project.images.map(() => "?").join(",") || "''"})`,
107
+ sql: `DELETE FROM project_images WHERE project_id = ? AND image_path NOT IN (${project.images.map(() => "?").join(",") || "''"})`,
93
108
  params: [[project.id, ...project.images]]
94
109
  });
95
- const existingImg = connection.prepare(`SELECT 1 FROM project_images WHERE project_id = ? AND image = ?`);
110
+ const existingImg = connection.prepare(`SELECT 1 FROM project_images WHERE project_id = ? AND image_path = ?`);
96
111
  const multiParams = [];
97
112
  for (const img of project.images) {
98
113
  const exists = existingImg.get(project.id, img);
99
- if (!exists)
100
- multiParams.push([project.id, img]);
114
+ if (!exists) multiParams.push([project.id, img]);
101
115
  }
102
116
  if (multiParams.length > 0) {
103
- const insertImg = `INSERT INTO project_images (project_id, image) VALUES (?, ?)`;
117
+ const insertImg = `INSERT INTO project_images (project_id, image_path) VALUES (?, ?)`;
104
118
  stmts.push({ sql: insertImg, params: multiParams });
105
119
  }
106
120
  }
@@ -119,8 +133,7 @@ function editProject(connection, project) {
119
133
  const multiParams = [];
120
134
  for (const cls of project.classes) {
121
135
  const exists = existingClasses.get(project.id, cls.name);
122
- if (!exists)
123
- multiParams.push([project.id, cls.name, cls.color, cls.key_shortcut || ""]);
136
+ if (!exists) multiParams.push([project.id, cls.name, cls.color, cls.key_shortcut || ""]);
124
137
  }
125
138
  if (multiParams.length > 0) {
126
139
  const insertClass = `INSERT INTO project_classes (project_id, name, color, key_shortcut) VALUES (?, ?, ?, ?)`;
@@ -130,8 +143,7 @@ function editProject(connection, project) {
130
143
  runMultiStmtSQL(connection, stmts, "add");
131
144
  }
132
145
  function deleteProject(connection, projectId) {
133
- if (!projectId)
134
- throw new Error("Invalid project ID [aiProjectAdmin route deleteProject()]");
146
+ if (!projectId) throw new Error("Invalid project ID [aiProjectAdmin route deleteProject()]");
135
147
  const stmts = [
136
148
  { sql: "DELETE FROM project_annotations WHERE project_id = ?", params: [[projectId]] },
137
149
  { sql: "DELETE FROM project_classes WHERE project_id = ?", params: [[projectId]] },
@@ -144,39 +156,13 @@ function deleteProject(connection, projectId) {
144
156
  function addProject(connection, project) {
145
157
  const projectSql = `INSERT INTO project (name, filter) VALUES (?, ?)`;
146
158
  const projectParams = [project.name, JSON.stringify(project.filter)];
147
- const rows = runSQL(connection, projectSql, projectParams, "add");
148
- const classSql = `INSERT INTO project_classes (project_id, name, color, key_shortcut) VALUES (?, ?, ?, ?)`;
149
- const classParams = project.classes.map((c) => [rows.lastInsertRowid, c.label, c.color, c.key_shortcut || ""]);
150
- for (const params of classParams) {
151
- runSQL(connection, classSql, params, "add");
152
- }
153
- }
154
- function runSQL(connection, sql, params = [], errorText = "fetch") {
155
- try {
156
- if (!params.length) {
157
- return connection.prepare(sql).all();
158
- }
159
- return connection.prepare(sql).run(params);
160
- } catch (e) {
161
- console.error(`Error executing SQL for ${errorText}: ${e.message || e}`);
162
- throw new Error(`Failed to ${errorText} projects`);
163
- }
164
- }
165
- function runMultiStmtSQL(connection, stmts, errorText = "execute") {
166
- const transaction = connection.transaction((batch) => {
167
- for (const { sql, params = [] } of batch) {
168
- const sqlStmt = connection.prepare(sql);
169
- for (const item of params) {
170
- sqlStmt.run(item);
171
- }
172
- }
173
- });
174
- try {
175
- transaction(stmts);
176
- } catch (e) {
177
- console.error(`Error executing transaction for ${errorText}: ${e.message || e}`);
178
- throw new Error(`Failed to ${errorText} projects`);
179
- }
159
+ const row = runSQL(connection, projectSql, projectParams, "add");
160
+ const userSql = `INSERT INTO project_users (project_id, email) VALUES (?, ?)`;
161
+ const userParams = [row.lastInsertRowid, "user@domain.com"];
162
+ runSQL(connection, userSql, userParams, "add");
163
+ const classSql = `INSERT INTO project_classes (project_id, label, color, key_shortcut) VALUES (?, ?, ?, ?)`;
164
+ const classParams = project.classes.map((c) => [row.lastInsertRowid, c.label, c.color, c.key_shortcut || ""]);
165
+ runMultiStmtSQL(connection, [{ sql: classSql, params: classParams }], "add");
180
166
  }
181
167
  export {
182
168
  api
@@ -0,0 +1,161 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ import serverconfig from "#src/serverconfig.js";
4
+ import { aiProjectSelectedWSImagesResponsePayload } from "#types/checkers";
5
+ import { getDbConnection } from "#src/aiHistoDBConnection.ts";
6
+ const api = {
7
+ endpoint: "aiProjectSelectedWSImages",
8
+ methods: {
9
+ get: {
10
+ ...aiProjectSelectedWSImagesResponsePayload,
11
+ init
12
+ },
13
+ post: {
14
+ ...aiProjectSelectedWSImagesResponsePayload,
15
+ init
16
+ }
17
+ }
18
+ };
19
+ function init({ genomes }) {
20
+ return async (req, res) => {
21
+ try {
22
+ const query = req.query;
23
+ const g = genomes[query.genome];
24
+ if (!g) throw "invalid genome name";
25
+ const ds = g.datasets[query.dslabel];
26
+ if (!ds) throw "invalid dataset name";
27
+ const projectId = query.projectId;
28
+ const wsimagesFilenames = query.wsimagesFilenames;
29
+ const wsimages = [];
30
+ if (ds.queries.WSImages.getWSIAnnotations) {
31
+ for (const wsimageFilename of wsimagesFilenames) {
32
+ const wsimage = {
33
+ filename: wsimageFilename
34
+ };
35
+ wsimage.annotations = await ds.queries.WSImages.getWSIAnnotations(projectId, wsimageFilename);
36
+ wsimage.classes = await ds.queries.WSImages.getAnnotationClasses(projectId);
37
+ wsimage.uncertainty = ds.queries?.WSImages?.uncertainty;
38
+ wsimage.activePatchColor = ds.queries?.WSImages?.activePatchColor;
39
+ if (ds.queries.WSImages.getWSIPredictionPatches) {
40
+ const predictionsFile = await ds.queries.WSImages.getWSIPredictionPatches(projectId, wsimageFilename);
41
+ const mount = serverconfig.features?.tileserver?.mount;
42
+ if (!mount) throw new Error("No mount available for TileServer");
43
+ const predictionsFilePath = path.join(mount, ds.queries.WSImages.aiToolImageFolder, predictionsFile[0]);
44
+ const predictionsData = JSON.parse(fs.readFileSync(predictionsFilePath, "utf8"));
45
+ wsimage.predictions = predictionsData.features.map((d) => {
46
+ const featClass = wsimage.classes?.find((f) => f.id == d.properties.class)?.label;
47
+ return {
48
+ zoomCoordinates: d.properties.zoomCoordinates,
49
+ uncertainty: d.properties.uncertainty,
50
+ class: featClass
51
+ };
52
+ });
53
+ }
54
+ if (ds.queries.WSImages.makeGeoJson) {
55
+ await ds.queries.WSImages.makeGeoJson(projectId, wsimageFilename);
56
+ }
57
+ wsimages.push(wsimage);
58
+ }
59
+ }
60
+ if (ds.queries.WSImages.getWSIPredictionOverlay) {
61
+ for (const wsimage of wsimages) {
62
+ const predictionOverlay = await ds.queries.WSImages.getWSIPredictionOverlay(wsimage.filename);
63
+ if (predictionOverlay) {
64
+ wsimage.predictionLayers = [predictionOverlay];
65
+ }
66
+ }
67
+ }
68
+ res.send({ wsimages });
69
+ } catch (e) {
70
+ console.log(e);
71
+ res.status(404).send("Sample images not found");
72
+ }
73
+ };
74
+ }
75
+ async function validate_query_getWSIAnnotations(ds) {
76
+ if (!ds.queries?.WSImages?.db) return;
77
+ const connection = getDbConnection(ds);
78
+ if (!connection) return;
79
+ validateWSIAnnotationsQuery(ds, connection);
80
+ }
81
+ async function validate_query_getWSIClassesQuery(ds) {
82
+ if (!ds.queries?.WSImages?.db) return;
83
+ const connection = getDbConnection(ds);
84
+ if (!connection) return;
85
+ validateWSIClassesQuery(ds, connection);
86
+ }
87
+ function validateWSIAnnotationsQuery(ds, connection) {
88
+ if (!ds.queries?.WSImages?.db) return;
89
+ const GET_ANNOTATIONS_SQL = `
90
+ SELECT
91
+ pa.id,
92
+ pa.project_id,
93
+ pa.user_id,
94
+ pa.coordinates,
95
+ pa.timestamp,
96
+ pa.status,
97
+ pc.label AS label
98
+ FROM project_annotations pa
99
+ INNER JOIN project_images pi
100
+ ON pi.id = pa.image_id
101
+ LEFT JOIN project_classes pc
102
+ ON pc.id = pa.class_id
103
+ WHERE pa.project_id = ?
104
+ AND pi.image_path = ?
105
+ AND pa.status = 1
106
+ ORDER BY pa.timestamp DESC, pa.id DESC
107
+ `;
108
+ if (!ds.queries) ds.queries = {};
109
+ if (!ds.queries.WSImages) ds.queries.WSImages = {};
110
+ ds.queries.WSImages.getWSIAnnotations = async (projectId, filename) => {
111
+ try {
112
+ const stmt = connection.prepare(GET_ANNOTATIONS_SQL);
113
+ const rows = stmt.all(projectId, filename);
114
+ return rows.map((r) => {
115
+ let coords = [NaN, NaN];
116
+ try {
117
+ const parsed = typeof r.coordinates === "string" ? JSON.parse(r.coordinates) : r.coordinates;
118
+ if (Array.isArray(parsed) && parsed.length >= 2) {
119
+ const x = Number(parsed[0]);
120
+ const y = Number(parsed[1]);
121
+ if (!Number.isNaN(x) && !Number.isNaN(y)) coords = [x, y];
122
+ }
123
+ } catch {
124
+ }
125
+ return {
126
+ zoomCoordinates: coords,
127
+ class: r.label ?? ""
128
+ };
129
+ });
130
+ } catch (error) {
131
+ console.error("Error loading annotations:", error);
132
+ return [];
133
+ }
134
+ };
135
+ }
136
+ function validateWSIClassesQuery(ds, connection) {
137
+ if (!ds.queries) ds.queries = {};
138
+ if (!ds.queries.WSImages) ds.queries.WSImages = {};
139
+ const GET_CLASSES_SQL = `
140
+ SELECT id, project_id, label, color, key_shortcut
141
+ FROM project_classes
142
+ WHERE project_id = ?
143
+ ORDER BY id
144
+ `;
145
+ ds.queries.WSImages.getAnnotationClasses = async (projectId) => {
146
+ try {
147
+ const stmt = connection.prepare(GET_CLASSES_SQL);
148
+ return stmt.all(projectId);
149
+ } catch (error) {
150
+ console.error("Error loading project classes:", error);
151
+ return [];
152
+ }
153
+ };
154
+ }
155
+ export {
156
+ api,
157
+ validateWSIAnnotationsQuery,
158
+ validateWSIClassesQuery,
159
+ validate_query_getWSIAnnotations,
160
+ validate_query_getWSIClassesQuery
161
+ };
@@ -23,11 +23,9 @@ function init({ genomes }) {
23
23
  try {
24
24
  const query = req.query;
25
25
  const g = genomes[query.genome];
26
- if (!g)
27
- throw "invalid genome name";
26
+ if (!g) throw "invalid genome name";
28
27
  const ds = g.datasets[query.dslabel];
29
- if (!ds)
30
- throw "invalid dataset name";
28
+ if (!ds) throw "invalid dataset name";
31
29
  let plane, index;
32
30
  if (query.l) {
33
31
  plane = "L";
@@ -57,10 +55,8 @@ async function getBrainImage(query, genomes, plane, index) {
57
55
  const terms = [];
58
56
  const divideByTW = query.divideByTW;
59
57
  const overlayTW = query.overlayTW;
60
- if (divideByTW)
61
- terms.push(divideByTW);
62
- if (overlayTW)
63
- terms.push(overlayTW);
58
+ if (divideByTW) terms.push(divideByTW);
59
+ if (overlayTW) terms.push(overlayTW);
64
60
  const selectedSampleNames = query.selectedSampleFileNames.map((s) => s.split(".nii")[0]);
65
61
  const data = await getData({ terms }, ds);
66
62
  const divideByCat = {};
@@ -73,8 +69,7 @@ async function getBrainImage(query, genomes, plane, index) {
73
69
  let overlayCategory = "default";
74
70
  if (divideByTW) {
75
71
  const value = sampleData[divideByTW.$id];
76
- if (value)
77
- divideCategory = divideByTW.term.values?.[value.key]?.label || value.key;
72
+ if (value) divideCategory = divideByTW.term.values?.[value.key]?.label || value.key;
78
73
  }
79
74
  if (overlayTW) {
80
75
  const value = sampleData[overlayTW.$id];
@@ -83,8 +78,7 @@ async function getBrainImage(query, genomes, plane, index) {
83
78
  uniqueOverlayTwCats.add(overlayCategory);
84
79
  }
85
80
  }
86
- if (!divideByCat[divideCategory])
87
- divideByCat[divideCategory] = {};
81
+ if (!divideByCat[divideCategory]) divideByCat[divideCategory] = {};
88
82
  if (!query.legendFilter?.includes(overlayCategory)) {
89
83
  if (!divideByCat[divideCategory][overlayCategory]) {
90
84
  let color = overlayTW?.term?.values?.[overlayCategory]?.color;
@@ -107,8 +101,7 @@ async function getBrainImage(query, genomes, plane, index) {
107
101
  const overlayCat = divideByCat[dcategory][category];
108
102
  const samples = overlayCat.samples;
109
103
  lengths.push(samples.length);
110
- if (!overlayCat.color)
111
- overlayCat.color = category == "default" ? "red" : k2c(category);
104
+ if (!overlayCat.color) overlayCat.color = category == "default" ? "red" : k2c(category);
112
105
  }
113
106
  }
114
107
  const maxLength = Math.max(...lengths);
@@ -118,11 +111,9 @@ async function getBrainImage(query, genomes, plane, index) {
118
111
  let catNum = 0;
119
112
  const filesByCat = divideByCat[dcategory];
120
113
  for (const category in filesByCat) {
121
- if (filesByCat[category].samples.length < 1)
122
- continue;
114
+ if (filesByCat[category].samples.length < 1) continue;
123
115
  catNum += filesByCat[category].samples.length;
124
- if (!legend[category])
125
- legend[category] = { color: filesByCat[category].color, maxLength };
116
+ if (!legend[category]) legend[category] = { color: filesByCat[category].color, maxLength };
126
117
  }
127
118
  const arg = {
128
119
  refFile,
@@ -20,11 +20,9 @@ function init({ genomes }) {
20
20
  try {
21
21
  const query = req.query;
22
22
  const g = genomes[query.genome];
23
- if (!g)
24
- throw "invalid genome name";
23
+ if (!g) throw "invalid genome name";
25
24
  const ds = g.datasets[query.dslabel];
26
- if (!ds)
27
- throw "invalid dataset name";
25
+ if (!ds) throw "invalid dataset name";
28
26
  const samples = await getBrainImageSamples(query, genomes);
29
27
  res.send({ samples });
30
28
  } catch (e) {
package/routes/burden.js CHANGED
@@ -19,18 +19,13 @@ function init({ genomes }) {
19
19
  return async function handler(req, res) {
20
20
  try {
21
21
  const genome = genomes[req.query.genome];
22
- if (!genome)
23
- throw `invalid q.genome=${req.query.genome}`;
22
+ if (!genome) throw `invalid q.genome=${req.query.genome}`;
24
23
  const q = req.query;
25
24
  const ds = genome.datasets[q.dslabel];
26
- if (!ds)
27
- throw `invalid q.dslabel=${req.query.dslabel}`;
28
- if (!ds.cohort.cumburden?.files)
29
- throw `missing ds.cohort.cumburden.files`;
30
- if (!ds.cohort?.cumburden?.db)
31
- throw `missing ds.cohort.cumburden.db`;
32
- if (!ds.cohort?.cumburden?.bootsubdir)
33
- throw `missing ds.cohort.cumburden.bootsubdir`;
25
+ if (!ds) throw `invalid q.dslabel=${req.query.dslabel}`;
26
+ if (!ds.cohort.cumburden?.files) throw `missing ds.cohort.cumburden.files`;
27
+ if (!ds.cohort?.cumburden?.db) throw `missing ds.cohort.cumburden.db`;
28
+ if (!ds.cohort?.cumburden?.bootsubdir) throw `missing ds.cohort.cumburden.bootsubdir`;
34
29
  const result = await getBurdenResult(q, ds.cohort.cumburden);
35
30
  if (!q.showCI) {
36
31
  res.send({
@@ -39,8 +34,7 @@ function init({ genomes }) {
39
34
  ...formatPayload(result.estimate)
40
35
  });
41
36
  } else {
42
- if (!result.ci95)
43
- await compute95ci(result, ds.cohort.cumburden);
37
+ if (!result.ci95) await compute95ci(result, ds.cohort.cumburden);
44
38
  res.send({
45
39
  status: "ok",
46
40
  /*ci95: result.ci95,*/
@@ -63,8 +57,7 @@ async function getBurdenResult(q, cumburden) {
63
57
  const overall = { chc: 0 };
64
58
  for (const age of ages) {
65
59
  overall[age] = [0];
66
- for (const est of estimate)
67
- overall[age][0] += est[age];
60
+ for (const est of estimate) overall[age][0] += est[age];
68
61
  }
69
62
  estimate.push(overall);
70
63
  const burden = {};
@@ -76,8 +69,7 @@ async function getBurdenResult(q, cumburden) {
76
69
  result.estimate = burden;
77
70
  }
78
71
  for (const [k, v] of Object.entries(result)) {
79
- if (k !== "id" && typeof v == "string")
80
- result[k] = JSON.parse(v);
72
+ if (k !== "id" && typeof v == "string") result[k] = JSON.parse(v);
81
73
  }
82
74
  return result;
83
75
  }
@@ -85,8 +77,7 @@ function normalizeInput(q, cumburden) {
85
77
  const keys = Object.keys(q).filter((k) => k in defaultInputValues).sort();
86
78
  const id = keys.map((k) => q[k]).join("-");
87
79
  const normalized = {};
88
- for (const k of keys)
89
- normalized[k] = q[k];
80
+ for (const k of keys) normalized[k] = q[k];
90
81
  normalized.datafiles = {
91
82
  dir: path.join(serverconfig.tpmasterdir, cumburden.dir),
92
83
  files: cumburden.files,
@@ -97,21 +88,18 @@ function normalizeInput(q, cumburden) {
97
88
  }
98
89
  async function compute95ci(result, cumburden) {
99
90
  try {
100
- if (!result.input)
101
- throw "result{} does not have .input";
91
+ if (!result.input) throw "result{} does not have .input";
102
92
  const input = structuredClone(result.input);
103
93
  input.burden = Object.values(result.estimate).filter((est) => est.chc !== 0);
104
94
  const lowup = await run_R("burden-ci95.R", JSON.stringify(input), []);
105
95
  const { low, up, overall } = JSON.parse(lowup);
106
96
  const ci95 = { 0: {} };
107
97
  for (const est of Object.values(result.estimate)) {
108
- if (!ci95[est.chc])
109
- ci95[est.chc] = {};
98
+ if (!ci95[est.chc]) ci95[est.chc] = {};
110
99
  const lower = low.find((l) => l.chc === est.chc);
111
100
  const upper = up.find((u) => u.chc === est.chc);
112
101
  for (const [age, val] of Object.entries(est)) {
113
- if (!age.startsWith("["))
114
- continue;
102
+ if (!age.startsWith("[")) continue;
115
103
  const burden = est.chc === 0 ? overall[0][age] : val;
116
104
  ci95[est.chc][age] = [burden, lower[age], upper[age]];
117
105
  }
@@ -129,8 +117,7 @@ function formatPayload(estimates) {
129
117
  const rows = [];
130
118
  for (const [chc, burdenByAge] of Object.entries(estimates)) {
131
119
  const arr = [chc];
132
- for (const age of rawKeys)
133
- arr.push(Array.isArray(burdenByAge[age]) ? burdenByAge[age] : [burdenByAge[age]]);
120
+ for (const age of rawKeys) arr.push(Array.isArray(burdenByAge[age]) ? burdenByAge[age] : [burdenByAge[age]]);
134
121
  rows.push(arr);
135
122
  }
136
123
  return { keys: outKeys, rows };