@sjcrh/proteinpaint-server 2.83.0 → 2.85.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/package.json +8 -9
  2. package/routes/_template_.js +14 -11
  3. package/routes/brainImaging.js +100 -57
  4. package/routes/brainImagingSamples.js +120 -0
  5. package/routes/burden.js +27 -59
  6. package/routes/dataset.js +9 -17
  7. package/routes/dsdata.js +11 -14
  8. package/routes/dzimages.js +11 -16
  9. package/routes/gdc.maf.js +8 -23
  10. package/routes/gdc.mafBuild.js +9 -9
  11. package/routes/gdc.topMutatedGenes.js +7 -7
  12. package/routes/genelookup.js +16 -34
  13. package/routes/genesetEnrichment.js +18 -14
  14. package/routes/genesetOverrepresentation.js +9 -14
  15. package/routes/healthcheck.js +26 -32
  16. package/routes/hicdata.js +7 -28
  17. package/routes/hicgenome.js +6 -27
  18. package/routes/hicstat.js +4 -22
  19. package/routes/isoformlst.js +8 -11
  20. package/routes/ntseq.js +8 -11
  21. package/routes/pdomain.js +8 -11
  22. package/routes/sampledzimages.js +12 -12
  23. package/routes/samplewsimages.js +6 -10
  24. package/routes/snp.js +8 -10
  25. package/routes/termdb.DE.js +8 -10
  26. package/routes/termdb.boxplot.js +37 -39
  27. package/routes/termdb.categories.js +4 -45
  28. package/routes/termdb.cluster.js +9 -9
  29. package/routes/termdb.cohort.summary.js +5 -8
  30. package/routes/termdb.cohorts.js +3 -7
  31. package/routes/{termdb.getdescrstats.js → termdb.descrstats.js} +8 -45
  32. package/routes/termdb.numericcategories.js +51 -0
  33. package/routes/{termdb.getpercentile.js → termdb.percentile.js} +4 -46
  34. package/routes/{termdb.getrootterm.js → termdb.rootterm.js} +4 -24
  35. package/routes/{termdb.getSampleImages.js → termdb.sampleImages.js} +9 -9
  36. package/routes/termdb.singleSampleMutation.js +3 -7
  37. package/routes/termdb.singlecellDEgenes.js +8 -8
  38. package/routes/termdb.singlecellData.js +4 -8
  39. package/routes/termdb.singlecellSamples.js +8 -8
  40. package/routes/{termdb.gettermchildren.js → termdb.termchildren.js} +8 -28
  41. package/routes/termdb.termsbyids.js +9 -16
  42. package/routes/{termdb.getTopTermsByType.js → termdb.topTermsByType.js} +9 -10
  43. package/routes/termdb.topVariablyExpressedGenes.js +8 -8
  44. package/routes/termdb.violin.js +8 -46
  45. package/routes/tileserver.js +5 -10
  46. package/routes/wsimages.js +10 -9
  47. package/src/app.js +2409 -2954
  48. package/src/serverconfig.js +9 -0
  49. package/routes/termdb.getnumericcategories.js +0 -91
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sjcrh/proteinpaint-server",
3
- "version": "2.83.0",
3
+ "version": "2.85.0",
4
4
  "type": "module",
5
5
  "description": "a genomics visualization tool for exploring a cohort's genotype and phenotype data",
6
6
  "main": "src/app.js",
@@ -28,7 +28,6 @@
28
28
  "dedup": "./dedupjs.sh",
29
29
  "//todo": "refactor or deprecate the scripts below",
30
30
  "pretest": "tsc && ./test/pretest.js",
31
- "prepare": "ts-patch install",
32
31
  "pretest:type": "npm run checkers",
33
32
  "pretest:integration": "tsc",
34
33
  "test:integration": "echo 'TODO: server integration tests'",
@@ -43,7 +42,7 @@
43
42
  "@babel/preset-env": "^7.9.6",
44
43
  "@babel/preset-typescript": "^7.21.4",
45
44
  "@babel/register": "^7.14.5",
46
- "@sjcrh/proteinpaint-types": "2.83.0",
45
+ "@sjcrh/proteinpaint-types": "2.85.0",
47
46
  "@types/node": "^20.11.24",
48
47
  "@types/tough-cookie": "^4.0.5",
49
48
  "@typescript-eslint/eslint-plugin": "^5.60.0",
@@ -58,13 +57,14 @@
58
57
  "ts-patch": "^3.0.2",
59
58
  "tsx": "^4.7.1",
60
59
  "typedoc": "^0.24.8",
61
- "typescript": "^5.0.3",
60
+ "typedoc-plugin-missing-exports": "^2.0.1",
61
+ "typescript": "^5.6.3",
62
62
  "typia": "^4.1.14"
63
63
  },
64
64
  "dependencies": {
65
- "@sjcrh/augen": "2.46.0",
66
- "@sjcrh/proteinpaint-rust": "2.81.5",
67
- "@sjcrh/proteinpaint-shared": "2.83.0",
65
+ "@sjcrh/augen": "2.85.0",
66
+ "@sjcrh/proteinpaint-rust": "2.84.0",
67
+ "@sjcrh/proteinpaint-shared": "2.85.0",
68
68
  "better-sqlite3": "^9.4.1",
69
69
  "body-parser": "^1.15.2",
70
70
  "canvas": "~2.11.2",
@@ -85,8 +85,7 @@
85
85
  "node-fetch": "^2.6.1",
86
86
  "partjson": "^0.58.2",
87
87
  "tiny-async-pool": "^1.2.0",
88
- "tough-cookie": "^4.1.4",
89
- "typedoc-plugin-missing-exports": "^2.0.1"
88
+ "tough-cookie": "^4.1.4"
90
89
  },
91
90
  "repository": {
92
91
  "type": "git",
@@ -1,28 +1,31 @@
1
+ import { snpPayload } from "#types";
1
2
  const api = {
2
3
  // route endpoint
3
4
  // - no need for trailing slash
4
- // - should be a noun (method is based on HTTP GET, POST, etc)
5
+ // - should be a noun
5
6
  // - don't add 'Data' as response is assumed to be data
6
- endpoint: "...",
7
+ // - don't prefix with `get`, such as `/getmyroute`, the method is already indicated via HTTP GET
8
+ endpoint: "/myroute",
7
9
  methods: {
8
10
  get: {
9
- init,
10
- request: {
11
- typeId: "any"
12
- },
13
- response: {
14
- typeId: "any"
15
- }
11
+ ...snpPayload,
12
+ // this would "spread"/copy-over payload key-values from snpPayload, such as {request: {typeId}, examples: []}
13
+ init
16
14
  },
17
15
  post: {
18
- alternativeFor: "get",
16
+ ...snpPayload,
17
+ // repeat for post method, since PP routes are mostly read-only and POST can handle bigger payloads
19
18
  init
20
19
  }
20
+ // !!! DO NOT USE expressjs 'all' method shortcut !!!
21
+ // it will initialize 20+ methods includng HEAD which can break expected HTTP response
21
22
  }
22
23
  };
23
24
  function init({ genomes }) {
24
25
  return async function(req, res) {
25
- console.log(genomes, req, res);
26
+ const q = req.query;
27
+ console.log(genomes[q.genome]);
28
+ res.send({});
26
29
  };
27
30
  }
28
31
  export {
@@ -1,18 +1,19 @@
1
1
  import fs from "fs";
2
2
  import path from "path";
3
3
  import serverconfig from "#src/serverconfig.js";
4
+ import { brainImagingPayload } from "#types";
4
5
  import { spawn } from "child_process";
6
+ import { getData } from "../src/termdb.matrix.js";
5
7
  const api = {
6
8
  endpoint: "brainImaging",
7
9
  methods: {
8
10
  get: {
9
- init,
10
- request: {
11
- typeId: "GetBrainImagingRequest"
12
- },
13
- response: {
14
- typeId: "GetBrainImagingResponse"
15
- }
11
+ ...brainImagingPayload,
12
+ init
13
+ },
14
+ post: {
15
+ ...brainImagingPayload,
16
+ init
16
17
  }
17
18
  }
18
19
  };
@@ -26,15 +27,26 @@ function init({ genomes }) {
26
27
  const ds = g.datasets[query.dslabel];
27
28
  if (!ds)
28
29
  throw "invalid dataset name";
29
- const brainImage = await getBrainImage(query, genomes);
30
- res.send({ brainImage });
30
+ let plane, index;
31
+ if (query.l) {
32
+ plane = "L";
33
+ index = query.l;
34
+ } else if (query.f) {
35
+ plane = "F";
36
+ index = query.f;
37
+ } else {
38
+ plane = "T";
39
+ index = query.t;
40
+ }
41
+ const brainImage = await getBrainImage(query, genomes, plane, index);
42
+ res.send({ brainImage, plane });
31
43
  } catch (e) {
32
44
  console.log(e);
33
45
  res.status(404).send("Sample brain image not found");
34
46
  }
35
47
  };
36
48
  }
37
- async function getBrainImage(query, genomes) {
49
+ async function getBrainImage(query, genomes, plane, index) {
38
50
  const ds = genomes[query.genome].datasets[query.dslabel];
39
51
  const q = ds.queries.NIdata;
40
52
  const key = query.refKey;
@@ -42,58 +54,89 @@ async function getBrainImage(query, genomes) {
42
54
  const refFile = path.join(serverconfig.tpmasterdir, q[key].referenceFile);
43
55
  const dirPath = path.join(serverconfig.tpmasterdir, q[key].samples);
44
56
  const files = fs.readdirSync(dirPath).filter((file) => file.endsWith(".nii") && fs.statSync(path.join(dirPath, file)).isFile());
45
- if (query.samplesOnly) {
46
- const sampleNames = files.map((name) => name.split(".nii")[0]);
47
- if (q[key].sampleColumns) {
48
- const samples = {};
49
- for (const s of sampleNames) {
50
- const annoForOneS = { sample: s };
51
- const sid = ds.cohort.termdb.q.sampleName2id(s);
52
- for (const term of q[key].sampleColumns) {
53
- const v = ds.cohort.termdb.q.getSample2value(term.termid, sid);
54
- if (v[0]) {
55
- annoForOneS[term.termid] = v[0].value;
56
- }
57
- }
58
- samples[s] = annoForOneS;
59
- }
60
- return Object.values(samples);
57
+ const terms = [];
58
+ const divideByTW = query.divideByTW;
59
+ const overlayTW = query.overlayTW;
60
+ if (divideByTW)
61
+ terms.push(divideByTW);
62
+ if (overlayTW)
63
+ terms.push(overlayTW);
64
+ const selectedSampleNames = query.selectedSampleFileNames.map((s) => s.split(".nii")[0]);
65
+ const data = await getData({ terms }, ds, q.genome);
66
+ const divideByCat = {};
67
+ for (const sampleName of selectedSampleNames) {
68
+ const sampleId = ds.sampleName2Id.get(sampleName);
69
+ const sampleData = data.samples[sampleId];
70
+ const samplePath = path.join(dirPath, sampleName) + ".nii";
71
+ const divideCategory = divideByTW ? sampleData[divideByTW.$id].value : "default";
72
+ const overlayCategory = overlayTW ? sampleData[overlayTW.$id].value : "default";
73
+ if (!divideByCat[divideCategory])
74
+ divideByCat[divideCategory] = {};
75
+ if (!divideByCat[divideCategory][overlayCategory])
76
+ divideByCat[divideCategory][overlayCategory] = {
77
+ samples: [],
78
+ color: overlayTW?.term?.values?.[overlayCategory]?.color || "red"
79
+ };
80
+ divideByCat[divideCategory][overlayCategory].samples.push(samplePath);
81
+ }
82
+ const lengths = [];
83
+ for (const dcategory in divideByCat)
84
+ for (const category in divideByCat[dcategory]) {
85
+ const samples = divideByCat[dcategory][category].samples;
86
+ lengths.push(samples.length);
61
87
  }
62
- return sampleNames;
88
+ const maxLength = Math.max(...lengths);
89
+ const brainImageDict = {};
90
+ for (const dcategory in divideByCat) {
91
+ let catNum = 0;
92
+ const filesByCat = divideByCat[dcategory];
93
+ for (const category in filesByCat)
94
+ catNum += filesByCat[category].samples.length;
95
+ const url = await generateBrainImage(refFile, plane, index, maxLength, JSON.stringify(filesByCat));
96
+ brainImageDict[dcategory] = { url, catNum };
63
97
  }
64
- return new Promise((resolve, reject) => {
65
- const filePaths = query.selectedSampleFileNames.map((file) => path.join(dirPath, file));
66
- const cmd = [
67
- `${serverconfig.binpath}/../python/src/plotBrainImaging.py`,
68
- refFile,
69
- query.l,
70
- query.f,
71
- query.t,
72
- ...filePaths
73
- ];
74
- const ps = spawn(serverconfig.python, cmd);
75
- const imgData = [];
76
- ps.stdout.on("data", (data) => {
77
- imgData.push(data);
78
- });
79
- ps.stderr.on("data", (data) => {
80
- console.error(`stderr: ${data}`);
81
- reject(new Error(`Python script filed: ${data}`));
82
- });
83
- ps.on("close", (code) => {
84
- if (code === 0) {
85
- const imageBuffer = Buffer.concat(imgData);
86
- const base64Data = imageBuffer.toString("base64");
87
- const imgUrl = `data:image/png;base64,${base64Data}`;
88
- resolve(imgUrl);
89
- } else {
90
- reject(new Error(`Python script exited with code ${code}`));
91
- }
92
- });
93
- });
98
+ return brainImageDict;
94
99
  } else {
95
100
  throw "no reference or sample files";
96
101
  }
102
+ function getFilesByCat(tw) {
103
+ const filesByCat = {};
104
+ for (const [key2, value] of Object.entries(tw.term.values)) {
105
+ filesByCat[key2] = { samples: [], color: value.color || "red" };
106
+ }
107
+ return filesByCat;
108
+ }
109
+ }
110
+ async function generateBrainImage(refFile, plane, index, maxLength, filesJson) {
111
+ return new Promise((resolve, reject) => {
112
+ const cmd = [
113
+ `${serverconfig.binpath}/../python/src/plotBrainImaging.py`,
114
+ refFile,
115
+ plane,
116
+ index,
117
+ maxLength,
118
+ filesJson
119
+ ];
120
+ const ps = spawn(serverconfig.python, cmd);
121
+ const imgData = [];
122
+ ps.stdout.on("data", (data) => {
123
+ imgData.push(data);
124
+ });
125
+ ps.stderr.on("data", (data) => {
126
+ console.error(`stderr: ${data}`);
127
+ reject(new Error(`Python script filed: ${data}`));
128
+ });
129
+ ps.on("close", (code) => {
130
+ if (code === 0) {
131
+ const imageBuffer = Buffer.concat(imgData);
132
+ const base64Data = imageBuffer.toString("base64");
133
+ const imgUrl = `data:image/png;base64,${base64Data}`;
134
+ resolve(imgUrl);
135
+ } else {
136
+ reject(new Error(`Python script exited with code ${code}`));
137
+ }
138
+ });
139
+ });
97
140
  }
98
141
  export {
99
142
  api
@@ -0,0 +1,120 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import serverconfig from "#src/serverconfig.js";
4
+ import { spawn } from "child_process";
5
+ const api = {
6
+ endpoint: "brainImagingSamples",
7
+ methods: {
8
+ get: {
9
+ init,
10
+ request: {
11
+ typeId: "BrainImagingSamplesRequest"
12
+ },
13
+ response: {
14
+ typeId: "BrainImagingSamplesResponse"
15
+ }
16
+ }
17
+ }
18
+ };
19
+ function init({ genomes }) {
20
+ return async (req, res) => {
21
+ try {
22
+ const query = req.query;
23
+ const g = genomes[query.genome];
24
+ if (!g)
25
+ throw "invalid genome name";
26
+ const ds = g.datasets[query.dslabel];
27
+ if (!ds)
28
+ throw "invalid dataset name";
29
+ const samples = await getBrainImageSamples(query, genomes);
30
+ res.send({ samples });
31
+ } catch (e) {
32
+ console.log(e);
33
+ res.status(404).send("Sample brain image not found");
34
+ }
35
+ };
36
+ }
37
+ async function getBrainImageSamples(query, genomes) {
38
+ const ds = genomes[query.genome].datasets[query.dslabel];
39
+ const q = ds.queries.NIdata;
40
+ const key = query.refKey;
41
+ if (q[key].referenceFile && q[key].samples) {
42
+ const dirPath = path.join(serverconfig.tpmasterdir, q[key].samples);
43
+ const files = fs.readdirSync(dirPath).filter((file) => file.endsWith(".nii") && fs.statSync(path.join(dirPath, file)).isFile());
44
+ const sampleNames = files.map((name) => name.split(".nii")[0]);
45
+ if (q[key].sampleColumns) {
46
+ const samples = {};
47
+ for (const s of sampleNames) {
48
+ const annoForOneS = { sample: s };
49
+ const sid = ds.cohort.termdb.q.sampleName2id(s);
50
+ for (const term of q[key].sampleColumns) {
51
+ const v = ds.cohort.termdb.q.getSample2value(term.termid, sid);
52
+ if (v[0]) {
53
+ annoForOneS[term.termid] = v[0].value;
54
+ }
55
+ }
56
+ samples[s] = annoForOneS;
57
+ }
58
+ return Object.values(samples);
59
+ }
60
+ return sampleNames.map((name) => ({ sample: name }));
61
+ } else {
62
+ throw "no reference or sample files";
63
+ }
64
+ }
65
+ async function validate_query_NIdata(ds) {
66
+ const q = ds.queries.NIdata;
67
+ if (!q || !serverconfig.features?.showBrainImaging)
68
+ return;
69
+ for (const key in q) {
70
+ if (q[key].referenceFile && q[key].samples) {
71
+ q[key].get = async (sampleName, l, f, t) => {
72
+ const refFile = path.join(serverconfig.tpmasterdir, q[key].referenceFile);
73
+ const sampleFile = path.join(serverconfig.tpmasterdir, q[key].samples, sampleName);
74
+ try {
75
+ await fs.promises.stat(sampleFile);
76
+ } catch (e) {
77
+ if (e.code == "EACCES")
78
+ throw "cannot read file, permission denied";
79
+ if (e.code == "ENOENT")
80
+ throw "no data for this sample";
81
+ throw "failed to load data";
82
+ }
83
+ return new Promise((resolve, reject) => {
84
+ const ps = spawn(serverconfig.python, [
85
+ `${serverconfig.binpath}/utils/plotBrainImaging.py`,
86
+ refFile,
87
+ sampleFile,
88
+ l,
89
+ f,
90
+ t
91
+ ]);
92
+ const imgData = [];
93
+ ps.stdout.on("data", (data) => {
94
+ imgData.push(data);
95
+ });
96
+ ps.stderr.on("data", (data) => {
97
+ console.error(`stderr: ${data}`);
98
+ reject(new Error(`Python script filed: ${data}`));
99
+ });
100
+ ps.on("close", (code) => {
101
+ if (code === 0) {
102
+ const imageBuffer = Buffer.concat(imgData);
103
+ const base64Data = imageBuffer.toString("base64");
104
+ const imgUrl = `data:image/png;base64,${base64Data}`;
105
+ resolve(imgUrl);
106
+ } else {
107
+ reject(new Error(`Python script exited with code ${code}`));
108
+ }
109
+ });
110
+ });
111
+ };
112
+ } else {
113
+ throw "no reference or sample files";
114
+ }
115
+ }
116
+ }
117
+ export {
118
+ api,
119
+ validate_query_NIdata
120
+ };
package/routes/burden.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { burdenPayload } from "#types";
1
2
  import run_R from "#src/run_R.js";
2
3
  import path from "path";
3
4
  import serverconfig from "#src/serverconfig.js";
@@ -5,68 +6,35 @@ const api = {
5
6
  endpoint: "burden",
6
7
  methods: {
7
8
  get: {
8
- init({ genomes }) {
9
- return async (req, res) => {
10
- try {
11
- const genome = genomes[req.query.genome];
12
- if (!genome)
13
- throw `invalid q.genome=${req.query.genome}`;
14
- const q = req.query;
15
- const ds = genome.datasets[q.dslabel];
16
- if (!ds)
17
- throw `invalid q.genome=${req.query.dslabel}`;
18
- if (!ds.cohort.cumburden?.files)
19
- throw `missing ds.cohort.cumburden.files`;
20
- const estimates = await getBurdenEstimates(req, ds);
21
- const { keys, rows } = formatPayload(estimates);
22
- res.send({ status: "ok", keys, rows });
23
- } catch (e) {
24
- res.send({ status: "error", error: e.message || e });
25
- }
26
- };
27
- },
28
- request: {
29
- typeId: "BurdenRequest"
30
- },
31
- response: {
32
- typeId: "BurdenResponse"
33
- },
34
- examples: [
35
- {
36
- request: {
37
- body: {
38
- genome: "hg38",
39
- // TODO: !!! use hg38-test and TermdbTest !!!
40
- dslabel: "SJLife",
41
- diaggrp: 5,
42
- sex: 1,
43
- white: 1,
44
- agedx: 1,
45
- bleo: 0,
46
- etop: 0,
47
- cisp: 0,
48
- carbo: 0,
49
- steriod: 0,
50
- vcr: 0,
51
- hdmtx: 0,
52
- itmt: 0,
53
- ced: 0,
54
- dox: 0,
55
- heart: 0,
56
- brain: 0,
57
- abd: 0,
58
- pelvis: 0,
59
- chest: 0
60
- }
61
- },
62
- response: {
63
- header: { status: 200 }
64
- }
65
- }
66
- ]
9
+ init,
10
+ ...burdenPayload
11
+ },
12
+ post: {
13
+ init,
14
+ ...burdenPayload
67
15
  }
68
16
  }
69
17
  };
18
+ function init({ genomes }) {
19
+ return async function handler(req, res) {
20
+ try {
21
+ const genome = genomes[req.query.genome];
22
+ if (!genome)
23
+ throw `invalid q.genome=${req.query.genome}`;
24
+ const q = req.query;
25
+ const ds = genome.datasets[q.dslabel];
26
+ if (!ds)
27
+ throw `invalid q.genome=${req.query.dslabel}`;
28
+ if (!ds.cohort.cumburden?.files)
29
+ throw `missing ds.cohort.cumburden.files`;
30
+ const estimates = await getBurdenEstimates(req, ds);
31
+ const { keys, rows } = formatPayload(estimates);
32
+ res.send({ status: "ok", keys, rows });
33
+ } catch (e) {
34
+ res.send({ status: "error", error: e.message || e });
35
+ }
36
+ };
37
+ }
70
38
  async function getBurdenEstimates(q, ds) {
71
39
  for (const k in q.query) {
72
40
  q.query[k] = Number(q.query[k]);
package/routes/dataset.js CHANGED
@@ -1,49 +1,41 @@
1
1
  import * as mds2_init from "#src/mds2.init.js";
2
2
  import * as mds3_init from "#src/mds3.init.js";
3
3
  import * as common from "#shared/common.js";
4
+ import { datasetPayload } from "#types";
4
5
  const api = {
5
6
  endpoint: "getDataset",
6
7
  // should rename to simply 'dataset', method is based on HTTP method
7
8
  methods: {
8
9
  get: {
9
10
  init,
10
- request: {
11
- typeId: "any"
12
- },
13
- response: {
14
- typeId: "any"
15
- }
11
+ ...datasetPayload
16
12
  },
17
13
  post: {
18
- alternativeFor: "get",
19
- init
14
+ init,
15
+ ...datasetPayload
20
16
  }
21
17
  }
22
18
  };
23
19
  function init({ genomes }) {
24
20
  return function(req, res) {
25
21
  try {
26
- const genome = genomes[req.query.genome];
22
+ const q = req.query;
23
+ const genome = genomes[q.genome];
27
24
  if (!genome)
28
25
  throw "unknown genome";
29
26
  if (!genome.datasets)
30
27
  throw "genomeobj.datasets{} missing";
31
28
  let ds;
32
29
  for (const k in genome.datasets) {
33
- if (k.toLowerCase() == req.query.dsname.toLowerCase()) {
30
+ if (k.toLowerCase() == q.dsname.toLowerCase()) {
34
31
  ds = genome.datasets[k];
35
32
  break;
36
33
  }
37
34
  }
38
35
  if (!ds)
39
36
  throw "invalid dsname";
40
- if (ds.isMds3) {
41
- return res.send({ ds: mds3_init.client_copy(ds) });
42
- }
43
- if (ds.isMds) {
44
- return res.send({ ds: mds_clientcopy(ds) });
45
- }
46
- return res.send({ ds: copy_legacyDataset(ds) });
37
+ const copy = ds.isMds3 ? mds3_init.client_copy(ds) : ds.isMds ? mds_clientcopy(ds) : copy_legacyDataset(ds);
38
+ return res.send({ ds: copy });
47
39
  } catch (e) {
48
40
  res.send({ error: e.message || e });
49
41
  }
package/routes/dsdata.js CHANGED
@@ -2,6 +2,7 @@ import path from "path";
2
2
  import { spawn } from "child_process";
3
3
  import serverconfig from "#src/serverconfig.js";
4
4
  import * as common from "#shared/common.js";
5
+ import { dsDataPayload } from "#types";
5
6
  const api = {
6
7
  // route endpoint
7
8
  // - no need for trailing slash
@@ -10,16 +11,11 @@ const api = {
10
11
  endpoint: "dsdata",
11
12
  methods: {
12
13
  get: {
13
- init,
14
- request: {
15
- typeId: "any"
16
- },
17
- response: {
18
- typeId: "any"
19
- }
14
+ ...dsDataPayload,
15
+ init
20
16
  },
21
17
  post: {
22
- alternativeFor: "get",
18
+ ...dsDataPayload,
23
19
  init
24
20
  }
25
21
  }
@@ -27,19 +23,20 @@ const api = {
27
23
  function init({ genomes }) {
28
24
  return async function handle_dsdata(req, res) {
29
25
  try {
30
- if (!genomes[req.query.genome])
26
+ const q = req.query;
27
+ if (!genomes[q.genome])
31
28
  throw "invalid genome";
32
- if (!req.query.dsname)
29
+ if (!q.dsname)
33
30
  throw ".dsname missing";
34
- const ds = genomes[req.query.genome].datasets[req.query.dsname];
31
+ const ds = genomes[q.genome].datasets[q.dsname];
35
32
  if (!ds)
36
33
  throw "invalid dsname";
37
34
  const data = [];
38
35
  for (const query of ds.queries) {
39
- if (req.query.expressiononly && !query.isgeneexpression) {
36
+ if (q.expressiononly && !query.isgeneexpression) {
40
37
  continue;
41
38
  }
42
- if (req.query.noexpression && query.isgeneexpression) {
39
+ if (q.noexpression && query.isgeneexpression) {
43
40
  continue;
44
41
  }
45
42
  if (query.dsblocktracklst) {
@@ -110,7 +107,7 @@ function handle_dsdata_vcf(query, req) {
110
107
  const out = [], out2 = [];
111
108
  ps.stdout.on("data", (i) => out.push(i));
112
109
  ps.stderr.on("data", (i) => out2.push(i));
113
- ps.on("close", (code) => {
110
+ ps.on("close", () => {
114
111
  const e = out2.join("").trim();
115
112
  if (e != "")
116
113
  reject("error querying vcf file");