@openneuro/app 4.26.0 → 4.27.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -6
- package/src/client.jsx +1 -1
- package/src/scripts/common/content/download-agreement.ts +9 -0
- package/src/scripts/common/content/terms.tsx +39 -0
- package/src/scripts/components/agreement.tsx +2 -9
- package/src/scripts/config.ts +8 -4
- package/src/scripts/dataset/dataset-query.jsx +2 -6
- package/src/scripts/dataset/download/download-native.js +8 -16
- package/src/scripts/dataset/mutations/admin-exports.jsx +0 -10
- package/src/scripts/errors/errorBoundary.jsx +2 -3
- package/src/scripts/pages/__tests__/terms.spec.tsx +11 -0
- package/src/scripts/pages/front-page/front-page.tsx +3 -3
- package/src/scripts/pages/terms.tsx +38 -0
- package/src/scripts/routes.tsx +2 -0
- package/src/scripts/sentry.ts +20 -0
- package/src/scripts/uploader/upload-disclaimer-input.tsx +2 -30
- package/src/scripts/uploader/uploader.jsx +4 -4
- package/src/scripts/utils/schema-validator.js +94278 -5451
- package/vite.config.js +3 -1
- package/src/scripts/apm.js +0 -21
- package/src/scripts/resources/__tests__/kibana.spec.js +0 -25
- package/src/scripts/resources/kibana.js +0 -24
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/app",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.27.0-alpha.0",
|
|
4
4
|
"description": "React JS web frontend for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "public/client.js",
|
|
@@ -16,14 +16,15 @@
|
|
|
16
16
|
"dependencies": {
|
|
17
17
|
"@apollo/client": "3.7.2",
|
|
18
18
|
"@artsy/fresnel": "^1.3.1",
|
|
19
|
-
"@elastic/apm-rum": "5.16.0",
|
|
20
19
|
"@emotion/react": "11.11.1",
|
|
21
20
|
"@emotion/styled": "11.11.0",
|
|
22
21
|
"@niivue/niivue": "0.34.0",
|
|
23
|
-
"@openneuro/client": "^4.
|
|
24
|
-
"@openneuro/components": "^4.
|
|
22
|
+
"@openneuro/client": "^4.27.0-alpha.0",
|
|
23
|
+
"@openneuro/components": "^4.27.0-alpha.0",
|
|
24
|
+
"@sentry/react": "^8.25.0",
|
|
25
25
|
"@tanstack/react-table": "^8.9.3",
|
|
26
|
-
"bids-validator": "1.14.
|
|
26
|
+
"bids-validator": "1.14.8",
|
|
27
|
+
"buffer": "^6.0.3",
|
|
27
28
|
"bytes": "^3.0.0",
|
|
28
29
|
"comlink": "^4.0.5",
|
|
29
30
|
"date-fns": "^2.16.1",
|
|
@@ -74,5 +75,5 @@
|
|
|
74
75
|
"publishConfig": {
|
|
75
76
|
"access": "public"
|
|
76
77
|
},
|
|
77
|
-
"gitHead": "
|
|
78
|
+
"gitHead": "7577739ccee193d927d81806f0eeffc285a28800"
|
|
78
79
|
}
|
package/src/client.jsx
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Browser client entrypoint - see server.tsx for SSR entrypoint
|
|
3
3
|
*/
|
|
4
4
|
import "./scripts/utils/global-polyfill"
|
|
5
|
-
import "./scripts/
|
|
5
|
+
import "./scripts/sentry"
|
|
6
6
|
import { ApolloProvider, InMemoryCache } from "@apollo/client"
|
|
7
7
|
import { createClient } from "@openneuro/client"
|
|
8
8
|
import React from "react"
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export const DownloadAgreement = "I affirm that I have the appropriate \
|
|
2
|
+
institutional permissions to receive de-identified data for \
|
|
3
|
+
secondary data analysis, and that neither I nor my collaborators \
|
|
4
|
+
will attempt to reidentify individuals whose data are contained \
|
|
5
|
+
in downloads from OpenNeuro. Further, if for any reason the \
|
|
6
|
+
identity of participants contained in downloads from OpenNeuro \
|
|
7
|
+
become known to me I will make no effort to recontact such \
|
|
8
|
+
participants and will provide immediate notice to OpenNeuro \
|
|
9
|
+
staff."
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import React, { ReactElement } from "react"
|
|
2
|
+
|
|
3
|
+
/** Terms and conditions content. */
|
|
4
|
+
export function Terms(): ReactElement {
|
|
5
|
+
return (
|
|
6
|
+
<>
|
|
7
|
+
<p>
|
|
8
|
+
I am the owner of this dataset and have any necessary ethics permissions
|
|
9
|
+
to share the data publicly. This dataset does not include any
|
|
10
|
+
identifiable personal health information as defined by the{" "}
|
|
11
|
+
<a href="https://www.hhs.gov/hipaa/for-professionals/privacy/laws-regulations/">
|
|
12
|
+
Health Insurance Portability and Accountability Act of 1996
|
|
13
|
+
</a>{" "}
|
|
14
|
+
(including names, zip codes, dates of birth, acquisition dates, etc). I
|
|
15
|
+
agree to destroy any key linking the personal identity of research
|
|
16
|
+
participants to the subject codes used in the dataset.
|
|
17
|
+
</p>
|
|
18
|
+
<p>
|
|
19
|
+
I agree that this dataset will become publicly available under a{" "}
|
|
20
|
+
<a href="https://wiki.creativecommons.org/wiki/CC0">
|
|
21
|
+
Creative Commons CC0
|
|
22
|
+
</a>{" "}
|
|
23
|
+
license after a grace period of 36 months counted from the date of the
|
|
24
|
+
first snapshot creation for this dataset. You will be able to apply for
|
|
25
|
+
up to two 6 month extensions to increase the grace period in case the
|
|
26
|
+
publication of a corresponding paper takes longer than expected. See
|
|
27
|
+
{" "}
|
|
28
|
+
<a href="/faq">FAQ</a> for details.
|
|
29
|
+
</p>
|
|
30
|
+
<p>This dataset is not subject to GDPR protections.</p>
|
|
31
|
+
<p>
|
|
32
|
+
Generally, data should only be uploaded to a single data archive. In the
|
|
33
|
+
rare cases where it is necessary to upload the data to two databases
|
|
34
|
+
(such as the NIMH Data Archive), I agree to ensure that the datasets are
|
|
35
|
+
harmonized across archives.
|
|
36
|
+
</p>
|
|
37
|
+
</>
|
|
38
|
+
)
|
|
39
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import React from "react"
|
|
2
2
|
import { useLocalStorage } from "../utils/local-storage"
|
|
3
|
+
import { DownloadAgreement } from "../common/content/download-agreement"
|
|
3
4
|
import styled from "@emotion/styled"
|
|
4
5
|
|
|
5
6
|
export const STORAGE_KEY = "agreement"
|
|
@@ -46,15 +47,7 @@ export const Agreement = () => {
|
|
|
46
47
|
<div className="grid grid-between">
|
|
47
48
|
<div className="col col-lg col-11">
|
|
48
49
|
<p>
|
|
49
|
-
By clicking "I Agree",
|
|
50
|
-
institutional permissions to receive de-identified data for
|
|
51
|
-
secondary data analysis, and that neither I nor my collaborators
|
|
52
|
-
will attempt to reidentify individuals whose data are contained
|
|
53
|
-
in downloads from OpenNeuro. Further, if for any reason the
|
|
54
|
-
identity of participants contained in downloads from OpenNeuro
|
|
55
|
-
become known to me I will make no effort to recontact such
|
|
56
|
-
participants and will provide immediate notice to OpenNeuro
|
|
57
|
-
staff.
|
|
50
|
+
By clicking "I Agree", {DownloadAgreement}
|
|
58
51
|
</p>
|
|
59
52
|
</div>
|
|
60
53
|
<div className="col col-lg col-1">
|
package/src/scripts/config.ts
CHANGED
|
@@ -20,13 +20,15 @@ export interface OpenNeuroConfig {
|
|
|
20
20
|
}
|
|
21
21
|
}
|
|
22
22
|
analytics?: { trackingIds: string }
|
|
23
|
-
sentry?: {
|
|
23
|
+
sentry?: {
|
|
24
|
+
environment: string
|
|
25
|
+
dsn: string
|
|
26
|
+
}
|
|
24
27
|
support?: {
|
|
25
28
|
url: string
|
|
26
29
|
}
|
|
27
30
|
github?: string
|
|
28
31
|
publicBucket?: string
|
|
29
|
-
ELASTIC_APM_SERVER_URL?: string
|
|
30
32
|
}
|
|
31
33
|
|
|
32
34
|
export const config: OpenNeuroConfig = {
|
|
@@ -54,11 +56,13 @@ export const config: OpenNeuroConfig = {
|
|
|
54
56
|
},
|
|
55
57
|
),
|
|
56
58
|
},
|
|
57
|
-
sentry: {
|
|
59
|
+
sentry: {
|
|
60
|
+
environment: globalThis.OpenNeuroConfig.ENVIRONMENT,
|
|
61
|
+
dsn: globalThis.OpenNeuroConfig.SENTRY_DSN,
|
|
62
|
+
},
|
|
58
63
|
support: { url: globalThis.OpenNeuroConfig.SUPPORT_URL },
|
|
59
64
|
github: globalThis.OpenNeuroConfig.DATALAD_GITHUB_ORG,
|
|
60
65
|
publicBucket: globalThis.OpenNeuroConfig.AWS_S3_PUBLIC_BUCKET,
|
|
61
|
-
ELASTIC_APM_SERVER_URL: globalThis.OpenNeuroConfig.ELASTIC_APM_SERVER_URL,
|
|
62
66
|
}
|
|
63
67
|
|
|
64
68
|
export const getConfig = (): OpenNeuroConfig => config
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { apm } from "../apm"
|
|
2
1
|
import React from "react"
|
|
2
|
+
import * as Sentry from "@sentry/react"
|
|
3
3
|
import PropTypes from "prop-types"
|
|
4
4
|
import { useNavigate, useParams } from "react-router-dom"
|
|
5
5
|
import { gql, useApolloClient, useQuery } from "@apollo/client"
|
|
@@ -48,11 +48,7 @@ export const DatasetQueryHook = ({ datasetId, draft }) => {
|
|
|
48
48
|
}
|
|
49
49
|
return <FourOFourPage message={error.message} />
|
|
50
50
|
} else {
|
|
51
|
-
|
|
52
|
-
apm.captureError(error)
|
|
53
|
-
} catch (err) {
|
|
54
|
-
// Ignore failure to write to APM
|
|
55
|
-
}
|
|
51
|
+
Sentry.captureException(error)
|
|
56
52
|
return <FourOFourPage />
|
|
57
53
|
}
|
|
58
54
|
} else {
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import * as Sentry from "@sentry/react"
|
|
1
2
|
import { trackDownload } from "./track-download.js"
|
|
2
3
|
import {
|
|
3
4
|
downloadAbortToast,
|
|
@@ -9,7 +10,6 @@ import {
|
|
|
9
10
|
permissionsToast,
|
|
10
11
|
requestFailureToast,
|
|
11
12
|
} from "./native-file-toast.jsx"
|
|
12
|
-
import { apm } from "../../apm.js"
|
|
13
13
|
import { downloadDataset } from "./download-query"
|
|
14
14
|
|
|
15
15
|
/**
|
|
@@ -46,7 +46,7 @@ let downloadCanceled
|
|
|
46
46
|
* Recursive download for file trees via browser file access API
|
|
47
47
|
*/
|
|
48
48
|
const downloadTree = async (
|
|
49
|
-
{ datasetId, snapshotTag, client,
|
|
49
|
+
{ datasetId, snapshotTag, client, dirHandle, toastId },
|
|
50
50
|
path = "",
|
|
51
51
|
tree = null,
|
|
52
52
|
) => {
|
|
@@ -64,7 +64,6 @@ const downloadTree = async (
|
|
|
64
64
|
datasetId,
|
|
65
65
|
snapshotTag,
|
|
66
66
|
client,
|
|
67
|
-
apmTransaction,
|
|
68
67
|
dirHandle,
|
|
69
68
|
toastId,
|
|
70
69
|
},
|
|
@@ -100,7 +99,7 @@ const downloadTree = async (
|
|
|
100
99
|
if (status === 200) {
|
|
101
100
|
await body.pipeThrough(progress).pipeTo(writable)
|
|
102
101
|
} else {
|
|
103
|
-
|
|
102
|
+
Sentry.captureException(statusText)
|
|
104
103
|
return requestFailureToast(file.filename)
|
|
105
104
|
}
|
|
106
105
|
}
|
|
@@ -117,18 +116,13 @@ export const downloadNative = (datasetId, snapshotTag, client) => async () => {
|
|
|
117
116
|
try {
|
|
118
117
|
trackDownload(client, datasetId, snapshotTag)
|
|
119
118
|
} catch (err) {
|
|
120
|
-
|
|
121
|
-
}
|
|
122
|
-
const apmTransaction = apm &&
|
|
123
|
-
apm.startTransaction(`download:${datasetId}`, "download")
|
|
124
|
-
if (apmTransaction) {
|
|
125
|
-
apmTransaction.addLabels({ datasetId, snapshot: snapshotTag })
|
|
119
|
+
Sentry.captureException(err)
|
|
126
120
|
}
|
|
121
|
+
const scope = new Sentry.Scope()
|
|
122
|
+
scope.setContext("dataset", { datasetId, snapshot: snapshotTag })
|
|
127
123
|
downloadCanceled = false
|
|
128
124
|
let toastId
|
|
129
125
|
try {
|
|
130
|
-
const apmSelect = apmTransaction &&
|
|
131
|
-
apmTransaction.startSpan("showDirectoryPicker")
|
|
132
126
|
// Open user selected directory
|
|
133
127
|
const dirHandle = await window.showDirectoryPicker()
|
|
134
128
|
toastId = downloadToast(
|
|
@@ -137,12 +131,10 @@ export const downloadNative = (datasetId, snapshotTag, client) => async () => {
|
|
|
137
131
|
snapshotTag,
|
|
138
132
|
() => (downloadCanceled = true),
|
|
139
133
|
)
|
|
140
|
-
apmSelect && apmSelect.end()
|
|
141
134
|
await downloadTree({
|
|
142
135
|
datasetId,
|
|
143
136
|
snapshotTag,
|
|
144
137
|
client,
|
|
145
|
-
apmTransaction,
|
|
146
138
|
dirHandle,
|
|
147
139
|
toastId,
|
|
148
140
|
})
|
|
@@ -158,9 +150,9 @@ export const downloadNative = (datasetId, snapshotTag, client) => async () => {
|
|
|
158
150
|
// Some unknown issue occurred (out of disk space, disk caught fire, etc...)
|
|
159
151
|
nativeErrorToast()
|
|
160
152
|
}
|
|
161
|
-
|
|
153
|
+
Sentry.captureException(err)
|
|
162
154
|
} finally {
|
|
163
|
-
if (apmTransaction) apmTransaction.end()
|
|
164
155
|
downloadToastDone(toastId)
|
|
156
|
+
Sentry.getCurrentScope().clear()
|
|
165
157
|
}
|
|
166
158
|
}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import React from "react"
|
|
2
2
|
import { gql, useMutation } from "@apollo/client"
|
|
3
3
|
import PropTypes from "prop-types"
|
|
4
|
-
import { reexporterLogsURL } from "../../resources/kibana"
|
|
5
4
|
import { Button } from "@openneuro/components/button"
|
|
6
5
|
import styled from "@emotion/styled"
|
|
7
6
|
|
|
@@ -50,15 +49,6 @@ const AdminExports = ({ dataset }) => {
|
|
|
50
49
|
reexportRemotes({ variables: { datasetId: dataset.id } })
|
|
51
50
|
}}
|
|
52
51
|
/>
|
|
53
|
-
<Button
|
|
54
|
-
icon="fa fa-file-text"
|
|
55
|
-
label="View Export Logs"
|
|
56
|
-
secondary={true}
|
|
57
|
-
size="small"
|
|
58
|
-
onClick={() => {
|
|
59
|
-
window.open(reexporterLogsURL, "_blank")
|
|
60
|
-
}}
|
|
61
|
-
/>
|
|
62
52
|
</ButtonRow>
|
|
63
53
|
</div>
|
|
64
54
|
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { apm } from "../apm"
|
|
2
1
|
import React from "react"
|
|
3
2
|
import PropTypes from "prop-types"
|
|
3
|
+
import * as Sentry from "@sentry/react"
|
|
4
4
|
import FreshdeskInterface from "./freshdeskInterface.jsx"
|
|
5
5
|
|
|
6
6
|
// raises error if catchErrorIf returns true
|
|
@@ -36,8 +36,7 @@ class ErrorBoundary extends React.Component {
|
|
|
36
36
|
componentDidCatch(error, { componentStack }) {
|
|
37
37
|
const message = String(error)
|
|
38
38
|
error.componentStack = componentStack
|
|
39
|
-
|
|
40
|
-
apm.captureError(error)
|
|
39
|
+
Sentry.captureException(error)
|
|
41
40
|
this.setState({
|
|
42
41
|
message,
|
|
43
42
|
})
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import React from "react"
|
|
2
|
+
import { render, screen } from "@testing-library/react"
|
|
3
|
+
import { TermsPage } from "../terms"
|
|
4
|
+
|
|
5
|
+
describe("TermsPage", () => {
|
|
6
|
+
it("renders the terms and conditions", () => {
|
|
7
|
+
render(<TermsPage />)
|
|
8
|
+
expect(screen.getByText("OpenNeuro Terms and Conditions"))
|
|
9
|
+
.toBeInTheDocument()
|
|
10
|
+
})
|
|
11
|
+
})
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { apm } from "../../apm"
|
|
2
1
|
import React from "react"
|
|
2
|
+
import * as Sentry from "@sentry/react"
|
|
3
3
|
import { gql, useQuery } from "@apollo/client"
|
|
4
4
|
import { Mutation } from "@apollo/client/react/components"
|
|
5
5
|
import styled from "@emotion/styled"
|
|
@@ -111,7 +111,7 @@ export const FrontPageTopQuery = ({ query }) => {
|
|
|
111
111
|
)
|
|
112
112
|
} else if (result.error || result.data.datasets == null) {
|
|
113
113
|
if (result?.error) {
|
|
114
|
-
|
|
114
|
+
Sentry.captureException(result?.error)
|
|
115
115
|
}
|
|
116
116
|
return <div>Failed to load top datasets, please try again later.</div>
|
|
117
117
|
} else {
|
|
@@ -141,7 +141,7 @@ export const FrontPageNewQuery = ({ query }) => {
|
|
|
141
141
|
return <Loading />
|
|
142
142
|
} else if (result.error || result.data.datasets == null) {
|
|
143
143
|
if (result?.error) {
|
|
144
|
-
|
|
144
|
+
Sentry.captureException(result?.error)
|
|
145
145
|
}
|
|
146
146
|
return <div>Failed to load top datasets, please try again later.</div>
|
|
147
147
|
} else {
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import React, { ReactElement } from "react"
|
|
2
|
+
import { Terms } from "../common/content/terms"
|
|
3
|
+
import Helmet from "react-helmet"
|
|
4
|
+
import { frontPage } from "./front-page/front-page-content"
|
|
5
|
+
import { DownloadAgreement } from "../common/content/download-agreement"
|
|
6
|
+
import styled from "@emotion/styled"
|
|
7
|
+
|
|
8
|
+
const TermsPageStyle = styled.div`
|
|
9
|
+
background: white;
|
|
10
|
+
|
|
11
|
+
.container {
|
|
12
|
+
max-width: 60em;
|
|
13
|
+
}
|
|
14
|
+
`
|
|
15
|
+
|
|
16
|
+
export function TermsPage(): ReactElement {
|
|
17
|
+
return (
|
|
18
|
+
<TermsPageStyle>
|
|
19
|
+
<Helmet>
|
|
20
|
+
<title>Terms and Conditions - {frontPage.pageTitle}</title>
|
|
21
|
+
<meta
|
|
22
|
+
name="description"
|
|
23
|
+
content={`Terms and conditions of the ${frontPage.pageTitle} data archive`}
|
|
24
|
+
/>
|
|
25
|
+
</Helmet>
|
|
26
|
+
<div className="container">
|
|
27
|
+
<h2>OpenNeuro Terms and Conditions</h2>
|
|
28
|
+
<h3>
|
|
29
|
+
By uploading to the {frontPage.pageTitle}{" "}
|
|
30
|
+
data archive I agree to the following conditions:
|
|
31
|
+
</h3>
|
|
32
|
+
<Terms />
|
|
33
|
+
<h3>Downloading Data:</h3>
|
|
34
|
+
<p>{DownloadAgreement}</p>
|
|
35
|
+
</div>
|
|
36
|
+
</TermsPageStyle>
|
|
37
|
+
)
|
|
38
|
+
}
|
package/src/scripts/routes.tsx
CHANGED
|
@@ -16,6 +16,7 @@ import Citation from "./pages/citation-page"
|
|
|
16
16
|
import FourOFourPage from "./errors/404page"
|
|
17
17
|
import { ImportDataset } from "./pages/import-dataset"
|
|
18
18
|
import { DatasetMetadata } from "./pages/metadata/dataset-metadata"
|
|
19
|
+
import { TermsPage } from "./pages/terms"
|
|
19
20
|
|
|
20
21
|
const AppRoutes: React.VoidFunctionComponent = () => (
|
|
21
22
|
<Routes>
|
|
@@ -28,6 +29,7 @@ const AppRoutes: React.VoidFunctionComponent = () => (
|
|
|
28
29
|
<Route path="/error/*" element={<ErrorRoute />} />
|
|
29
30
|
<Route path="/pet" element={<PETRedirect />} />
|
|
30
31
|
<Route path="/cite" element={<Citation />} />
|
|
32
|
+
<Route path="/terms" element={<TermsPage />} />
|
|
31
33
|
<Route path="/import" element={<ImportDataset />} />
|
|
32
34
|
<Route path="/metadata" element={<DatasetMetadata />} />
|
|
33
35
|
<Route path="/public" element={<Navigate to="/search" replace />} />
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as Sentry from "@sentry/react"
|
|
2
|
+
import { config } from "./config"
|
|
3
|
+
import { version } from "../lerna.json"
|
|
4
|
+
|
|
5
|
+
Sentry.init({
|
|
6
|
+
dsn: config.sentry.dsn,
|
|
7
|
+
integrations: [
|
|
8
|
+
Sentry.browserTracingIntegration(),
|
|
9
|
+
Sentry.replayIntegration(),
|
|
10
|
+
],
|
|
11
|
+
// Performance Monitoring
|
|
12
|
+
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
|
13
|
+
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
|
14
|
+
tracePropagationTargets: [config.url],
|
|
15
|
+
// Session Replay
|
|
16
|
+
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
|
17
|
+
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
|
18
|
+
environment: config.sentry.environment,
|
|
19
|
+
release: `openneuro-app@${version}`,
|
|
20
|
+
})
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import React from "react"
|
|
2
2
|
import styled from "@emotion/styled"
|
|
3
|
+
import { Terms } from "../common/content/terms"
|
|
3
4
|
|
|
4
5
|
interface UploadDisclaimerInputProps {
|
|
5
6
|
affirmedDefaced: boolean
|
|
@@ -30,36 +31,7 @@ export const UploadDisclaimerInput: React.FunctionComponent<
|
|
|
30
31
|
By uploading this dataset to OpenNeuro I agree to the following
|
|
31
32
|
conditions:
|
|
32
33
|
</h4>
|
|
33
|
-
<
|
|
34
|
-
I am the owner of this dataset and have any necessary ethics permissions
|
|
35
|
-
to share the data publicly. This dataset does not include any
|
|
36
|
-
identifiable personal health information as defined by the{" "}
|
|
37
|
-
<a href="https://www.hhs.gov/hipaa/for-professionals/privacy/laws-regulations/">
|
|
38
|
-
Health Insurance Portability and Accountability Act of 1996
|
|
39
|
-
</a>{" "}
|
|
40
|
-
(including names, zip codes, dates of birth, acquisition dates, etc). I
|
|
41
|
-
agree to destroy any key linking the personal identity of research
|
|
42
|
-
participants to the subject codes used in the dataset.
|
|
43
|
-
</p>
|
|
44
|
-
<p>
|
|
45
|
-
I agree that this dataset will become publicly available under a{" "}
|
|
46
|
-
<a href="https://wiki.creativecommons.org/wiki/CC0">
|
|
47
|
-
Creative Commons CC0
|
|
48
|
-
</a>{" "}
|
|
49
|
-
license after a grace period of 36 months counted from the date of the
|
|
50
|
-
first snapshot creation for this dataset. You will be able to apply for
|
|
51
|
-
up to two 6 month extensions to increase the grace period in case the
|
|
52
|
-
publication of a corresponding paper takes longer than expected. See
|
|
53
|
-
{" "}
|
|
54
|
-
<a href="/faq">FAQ</a> for details.
|
|
55
|
-
</p>
|
|
56
|
-
<p>This dataset is not subject to GDPR protections.</p>
|
|
57
|
-
<p>
|
|
58
|
-
Generally, data should only be uploaded to a single data archive. In the
|
|
59
|
-
rare cases where it is necessary to upload the data to two databases
|
|
60
|
-
(such as the NIMH Data Archive), I agree to ensure that the datasets are
|
|
61
|
-
harmonized across archives.
|
|
62
|
-
</p>
|
|
34
|
+
<Terms />
|
|
63
35
|
<p>Please affirm one of the following:</p>
|
|
64
36
|
<DisclaimerLabel>
|
|
65
37
|
<input
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import * as Sentry from "@sentry/react"
|
|
2
2
|
import { toast } from "react-toastify"
|
|
3
3
|
import ToastContent from "../common/partials/toast-content.jsx"
|
|
4
4
|
import React from "react"
|
|
@@ -224,7 +224,7 @@ export class UploadClient extends React.Component {
|
|
|
224
224
|
})
|
|
225
225
|
})
|
|
226
226
|
.catch((error) => {
|
|
227
|
-
|
|
227
|
+
Sentry.captureException(error)
|
|
228
228
|
toast.error(
|
|
229
229
|
<ToastContent
|
|
230
230
|
title="Dataset creation failed"
|
|
@@ -302,7 +302,7 @@ export class UploadClient extends React.Component {
|
|
|
302
302
|
this.uploadCompleteAction()
|
|
303
303
|
}
|
|
304
304
|
} catch (error) {
|
|
305
|
-
|
|
305
|
+
Sentry.captureException(error)
|
|
306
306
|
const toastId = toast.error(
|
|
307
307
|
<ToastContent
|
|
308
308
|
title="Dataset upload failed"
|
|
@@ -327,7 +327,7 @@ export class UploadClient extends React.Component {
|
|
|
327
327
|
try {
|
|
328
328
|
this.state.xhr.abort()
|
|
329
329
|
} catch (e) {
|
|
330
|
-
|
|
330
|
+
Sentry.captureException(e)
|
|
331
331
|
}
|
|
332
332
|
}
|
|
333
333
|
}
|