@openneuro/server 4.21.0-alpha.9 → 4.21.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.21.0-alpha.9",
3
+ "version": "4.21.1",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -21,7 +21,7 @@
21
21
  "@elastic/elasticsearch": "7.15.0",
22
22
  "@graphql-tools/schema": "^10.0.0",
23
23
  "@keyv/redis": "^2.7.0",
24
- "@openneuro/search": "^4.21.0-alpha.9",
24
+ "@openneuro/search": "^4.21.1",
25
25
  "@passport-next/passport-google-oauth2": "^1.0.0",
26
26
  "@sentry/node": "^4.5.3",
27
27
  "base64url": "^3.0.0",
@@ -30,7 +30,7 @@
30
30
  "date-fns": "^2.16.1",
31
31
  "draft-js": "^0.11.7",
32
32
  "draft-js-export-html": "^1.4.1",
33
- "elastic-apm-node": "3.49.1",
33
+ "elastic-apm-node": "^4.3.0",
34
34
  "express": "4.18.2",
35
35
  "graphql": "16.8.1",
36
36
  "graphql-bigint": "^1.0.0",
@@ -85,5 +85,5 @@
85
85
  "publishConfig": {
86
86
  "access": "public"
87
87
  },
88
- "gitHead": "34ede9ff1af124c428a426bf98334b2f6fa9a908"
88
+ "gitHead": "17fab5325ea133c2a19779472a289ec2b30fa333"
89
89
  }
package/src/config.ts CHANGED
@@ -63,6 +63,8 @@ const config = {
63
63
  },
64
64
  elasticsearch: {
65
65
  connection: process.env.ELASTICSEARCH_CONNECTION,
66
+ apmServerUrl: process.env.ELASTIC_APM_SERVER_URL,
67
+ apmApiKey: process.env.ELASTIC_APM_API_KEY,
66
68
  },
67
69
  github: {
68
70
  token: process.env.DATALAD_GITHUB_TOKEN,
@@ -1,4 +1,3 @@
1
- import request from "superagent"
2
1
  import { redis } from "../libs/redis"
3
2
  import CacheItem, { CacheType } from "../cache/item"
4
3
  import { getDatasetWorker } from "../libs/datalad-service"
@@ -86,36 +85,33 @@ export const computeTotalSize = (files: [DatasetFile]): number =>
86
85
  * @param {string} datasetId - Dataset accession number
87
86
  * @param {string} treeish - Git treeish hexsha
88
87
  */
89
- export const getFiles = (datasetId, treeish): Promise<[DatasetFile]> => {
88
+ export const getFiles = (datasetId, treeish): Promise<[DatasetFile?]> => {
90
89
  const cache = new CacheItem(redis, CacheType.commitFiles, [
91
90
  datasetId,
92
91
  treeish.substring(0, 7),
93
92
  ])
94
93
  return cache.get(
95
- (doNotCache) =>
96
- request
97
- .get(
98
- `${
99
- getDatasetWorker(
100
- datasetId,
101
- )
102
- }/datasets/${datasetId}/tree/${treeish}`,
94
+ async (doNotCache): Promise<[DatasetFile?]> => {
95
+ const response = await fetch(`http://${
96
+ getDatasetWorker(
97
+ datasetId,
103
98
  )
104
- .set("Accept", "application/json")
105
- .then((response) => {
106
- if (response.status === 200) {
107
- const {
108
- body: { files },
109
- } = response
110
- for (const f of files) {
111
- // Skip caching this tree if it doesn't contain S3 URLs - likely still exporting
112
- if (!f.directory && !f.urls[0].includes("s3.amazonaws.com")) {
113
- doNotCache(true)
114
- break
115
- }
116
- }
117
- return files as [DatasetFile]
99
+ }/datasets/${datasetId}/tree/${treeish}`)
100
+ const body = await response.json()
101
+ const files = body?.files
102
+ if (files) {
103
+ for (const f of files) {
104
+ // Skip caching this tree if it doesn't contain S3 URLs - likely still exporting
105
+ if (!f.directory && !f.urls[0].includes("s3.amazonaws.com")) {
106
+ doNotCache(true)
107
+ break
118
108
  }
119
- }) as Promise<[DatasetFile]>,
109
+ }
110
+ return files
111
+ } else {
112
+ // Possible to have zero files here, return an empty array
113
+ return []
114
+ }
115
+ },
120
116
  )
121
117
  }
@@ -44,12 +44,13 @@ export const getFile = async (req, res) => {
44
44
  .then((r) => {
45
45
  // Set the content length (allow clients to catch HTTP issues better)
46
46
  res.setHeader("Content-Length", Number(r.headers.get("content-length")))
47
- return r.body
47
+ if (r.status === 404) {
48
+ res.status(404).send("Requested dataset or file cannot be found")
49
+ } else {
50
+ // @ts-expect-error
51
+ Readable.fromWeb(r.body, { highWaterMark: 4194304 }).pipe(res)
52
+ }
48
53
  })
49
- .then((stream) =>
50
- // @ts-expect-error
51
- Readable.fromWeb(stream, { highWaterMark: 4194304 }).pipe(res)
52
- )
53
54
  .catch((err) => {
54
55
  console.error(err)
55
56
  res.status(500).send("Internal error transferring requested file")
package/src/server.ts CHANGED
@@ -1,14 +1,17 @@
1
- /** Needs to run before the other imports in Node */
2
1
  import apm from "elastic-apm-node"
3
- apm.start({
4
- serviceName: "openneuro-server",
5
- cloudProvider: "none",
6
- })
7
-
2
+ import config from "./config"
3
+ /** Needs to run before the other imports in Node */
4
+ if (config.elasticsearch.apmServerUrl) {
5
+ apm.start({
6
+ serverUrl: config.elasticsearch.apmServerUrl,
7
+ apiKey: config.elasticsearch.apmApiKey,
8
+ serviceName: "openneuro-server",
9
+ cloudProvider: "none",
10
+ })
11
+ }
8
12
  import { createServer } from "http"
9
13
  import mongoose from "mongoose"
10
14
  import { connect as redisConnect } from "./libs/redis"
11
- import config from "./config"
12
15
  import { expressApolloSetup } from "./app"
13
16
 
14
17
  const redisConnectionSetup = async () => {