@openneuro/server 4.47.6 → 5.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +10 -7
- package/src/app.ts +1 -1
- package/src/cache/__tests__/tree.spec.ts +212 -0
- package/src/cache/tree.ts +148 -0
- package/src/datalad/__tests__/dataRetentionNotifications.spec.ts +11 -0
- package/src/datalad/__tests__/files.spec.ts +249 -0
- package/src/datalad/dataRetentionNotifications.ts +5 -0
- package/src/datalad/dataset.ts +29 -1
- package/src/datalad/files.ts +362 -39
- package/src/datalad/snapshots.ts +29 -54
- package/src/graphql/resolvers/__tests__/response-status.spec.ts +42 -0
- package/src/graphql/resolvers/__tests__/user.spec.ts +55 -1
- package/src/graphql/resolvers/build-search-query.ts +391 -0
- package/src/graphql/resolvers/cache.ts +5 -1
- package/src/graphql/resolvers/dataset-search.ts +40 -23
- package/src/graphql/resolvers/datasetEvents.ts +48 -78
- package/src/graphql/resolvers/draft.ts +5 -2
- package/src/graphql/resolvers/holdDeletion.ts +21 -0
- package/src/graphql/resolvers/index.ts +6 -0
- package/src/graphql/resolvers/mutation.ts +2 -0
- package/src/graphql/resolvers/response-status.ts +43 -0
- package/src/graphql/resolvers/snapshots.ts +9 -18
- package/src/graphql/resolvers/summary.ts +17 -0
- package/src/graphql/resolvers/user.ts +1 -1
- package/src/graphql/schema.ts +54 -14
- package/src/handlers/datalad.ts +4 -0
- package/src/handlers/doi.ts +32 -36
- package/src/libs/doi/__tests__/doi.spec.ts +50 -12
- package/src/libs/doi/__tests__/validate.spec.ts +110 -0
- package/src/libs/doi/index.ts +108 -71
- package/src/libs/doi/metadata.ts +101 -0
- package/src/libs/doi/validate.ts +59 -0
- package/src/libs/presign.ts +137 -0
- package/src/models/dataset.ts +2 -0
- package/src/models/doi.ts +7 -0
- package/src/queues/producer-methods.ts +9 -5
- package/src/queues/queue-schedule.ts +1 -1
- package/src/queues/queues.ts +2 -2
- package/src/routes.ts +10 -2
- package/src/types/datacite/LICENSE +37 -0
- package/src/types/datacite/README.md +3 -0
- package/src/types/datacite/datacite-v4.5.json +643 -0
- package/src/types/datacite/datacite-v4.5.ts +281 -0
- package/src/types/datacite.ts +53 -63
- package/src/utils/datacite-mapper.ts +7 -3
- package/src/utils/datacite-utils.ts +12 -15
- package/src/libs/doi/__tests__/__snapshots__/doi.spec.ts.snap +0 -17
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/server",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "5.0.0-alpha.0",
|
|
4
4
|
"description": "Core service for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "src/server.js",
|
|
@@ -16,12 +16,13 @@
|
|
|
16
16
|
"author": "Squishymedia",
|
|
17
17
|
"dependencies": {
|
|
18
18
|
"@apollo/client": "3.13.8",
|
|
19
|
-
"@apollo/server": "~
|
|
19
|
+
"@apollo/server": "~5.5.0",
|
|
20
20
|
"@apollo/utils.keyvadapter": "3.0.0",
|
|
21
|
+
"@as-integrations/express5": "^1.1.2",
|
|
21
22
|
"@elastic/elasticsearch": "8.13.1",
|
|
22
23
|
"@graphql-tools/schema": "^10.0.0",
|
|
23
24
|
"@keyv/redis": "^4.5.0",
|
|
24
|
-
"@openneuro/search": "^
|
|
25
|
+
"@openneuro/search": "^5.0.0-alpha.0",
|
|
25
26
|
"@sentry/node": "^10.37.0",
|
|
26
27
|
"@sentry/profiling-node": "^10.37.0",
|
|
27
28
|
"base64url": "^3.0.0",
|
|
@@ -30,12 +31,13 @@
|
|
|
30
31
|
"date-fns": "^2.16.1",
|
|
31
32
|
"draft-js": "^0.11.7",
|
|
32
33
|
"draft-js-export-html": "^1.4.1",
|
|
33
|
-
"express": "
|
|
34
|
+
"express": "5",
|
|
34
35
|
"graphql": "16.8.1",
|
|
35
36
|
"graphql-bigint": "^1.0.0",
|
|
36
37
|
"graphql-compose": "9.0.10",
|
|
37
38
|
"graphql-iso-date": "^3.6.1",
|
|
38
39
|
"graphql-tools": "9.0.0",
|
|
40
|
+
"hash-wasm": "^4.12.0",
|
|
39
41
|
"immutable": "^4.3.8",
|
|
40
42
|
"ioredis": "^5.6.1",
|
|
41
43
|
"js-yaml": "^4.1.0",
|
|
@@ -46,6 +48,7 @@
|
|
|
46
48
|
"mongodb-memory-server": "^9.2.0",
|
|
47
49
|
"mongoose": "^8.9.5",
|
|
48
50
|
"morgan": "^1.6.1",
|
|
51
|
+
"msgpackr": "^1.11.9",
|
|
49
52
|
"node-mailjet": "^3.3.5",
|
|
50
53
|
"object-hash": "2.1.1",
|
|
51
54
|
"passport": "0.7.0",
|
|
@@ -72,8 +75,8 @@
|
|
|
72
75
|
"devDependencies": {
|
|
73
76
|
"@types/cors": "^2",
|
|
74
77
|
"@types/draft-js": "^0.10.43",
|
|
75
|
-
"@types/express": "^
|
|
76
|
-
"@types/express-serve-static-core": "^
|
|
78
|
+
"@types/express": "^5.0.6",
|
|
79
|
+
"@types/express-serve-static-core": "^5.1.1",
|
|
77
80
|
"@types/ioredis": "^4.17.1",
|
|
78
81
|
"@types/ioredis-mock": "^8.2.2",
|
|
79
82
|
"@types/js-yaml": "^4",
|
|
@@ -89,5 +92,5 @@
|
|
|
89
92
|
"publishConfig": {
|
|
90
93
|
"access": "public"
|
|
91
94
|
},
|
|
92
|
-
"gitHead": "
|
|
95
|
+
"gitHead": "ee5b58a1623626ef79a439918e6c065a1c7d5e2e"
|
|
93
96
|
}
|
package/src/app.ts
CHANGED
|
@@ -12,7 +12,7 @@ import morgan from "morgan"
|
|
|
12
12
|
import schema from "./graphql/schema"
|
|
13
13
|
import { ApolloServer } from "@apollo/server"
|
|
14
14
|
import { ApolloServerPluginLandingPageLocalDefault } from "@apollo/server/plugin/landingPage/default"
|
|
15
|
-
import { expressMiddleware } from "@
|
|
15
|
+
import { expressMiddleware } from "@as-integrations/express5"
|
|
16
16
|
import { ApolloServerPluginDrainHttpServer } from "@apollo/server/plugin/drainHttpServer"
|
|
17
17
|
import { KeyvAdapter } from "@apollo/utils.keyvadapter"
|
|
18
18
|
import Keyv from "keyv"
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import type { Redis } from "ioredis"
|
|
2
|
+
import {
|
|
3
|
+
addDatasetTree,
|
|
4
|
+
addDatasetTrees,
|
|
5
|
+
clearDatasetTrees,
|
|
6
|
+
getCommitTrees,
|
|
7
|
+
getDatasetTrees,
|
|
8
|
+
getTree,
|
|
9
|
+
getTreesBulk,
|
|
10
|
+
setCommitTrees,
|
|
11
|
+
setTree,
|
|
12
|
+
type TreeEntry,
|
|
13
|
+
} from "../tree"
|
|
14
|
+
|
|
15
|
+
function makeEntry(overrides: Partial<TreeEntry> = {}): TreeEntry {
|
|
16
|
+
return {
|
|
17
|
+
n: "README.md",
|
|
18
|
+
h: "abc123",
|
|
19
|
+
s: 42,
|
|
20
|
+
k: "datasets/ds000001/README.md",
|
|
21
|
+
v: "v1",
|
|
22
|
+
b: "",
|
|
23
|
+
p: false,
|
|
24
|
+
d: false,
|
|
25
|
+
...overrides,
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function createRedisMock() {
|
|
30
|
+
const store = new Map<string, Buffer>()
|
|
31
|
+
const sets = new Map<string, Set<string>>()
|
|
32
|
+
|
|
33
|
+
const pipelineOps: (() => [Error | null, unknown])[] = []
|
|
34
|
+
const pipelineMock = {
|
|
35
|
+
getBuffer(key: string) {
|
|
36
|
+
pipelineOps.push(() => {
|
|
37
|
+
const val = store.get(key)
|
|
38
|
+
return [null, val ?? null]
|
|
39
|
+
})
|
|
40
|
+
return pipelineMock
|
|
41
|
+
},
|
|
42
|
+
del(key: string) {
|
|
43
|
+
pipelineOps.push(() => {
|
|
44
|
+
store.delete(key)
|
|
45
|
+
sets.delete(key)
|
|
46
|
+
return [null, 1]
|
|
47
|
+
})
|
|
48
|
+
return pipelineMock
|
|
49
|
+
},
|
|
50
|
+
exec: vi.fn(async () => {
|
|
51
|
+
const results = pipelineOps.map((op) => op())
|
|
52
|
+
pipelineOps.length = 0
|
|
53
|
+
return results
|
|
54
|
+
}),
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
getBuffer: vi.fn(async (key: string) => store.get(key) ?? null),
|
|
59
|
+
set: vi.fn(async (key: string, value: Buffer) => {
|
|
60
|
+
store.set(key, value)
|
|
61
|
+
}),
|
|
62
|
+
setex: vi.fn(async (key: string, _ttl: number, value: Buffer) => {
|
|
63
|
+
store.set(key, value)
|
|
64
|
+
}),
|
|
65
|
+
sadd: vi.fn(async (key: string, ...members: string[]) => {
|
|
66
|
+
if (!sets.has(key)) sets.set(key, new Set())
|
|
67
|
+
for (const m of members) sets.get(key)!.add(m)
|
|
68
|
+
}),
|
|
69
|
+
smembers: vi.fn(async (key: string) => [...(sets.get(key) ?? [])]),
|
|
70
|
+
pipeline: vi.fn(() => {
|
|
71
|
+
pipelineOps.length = 0
|
|
72
|
+
return pipelineMock
|
|
73
|
+
}),
|
|
74
|
+
// expose internals for assertions
|
|
75
|
+
_store: store,
|
|
76
|
+
_sets: sets,
|
|
77
|
+
} as unknown as Redis
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
describe("tree cache", () => {
|
|
81
|
+
describe("getTree / setTree", () => {
|
|
82
|
+
it("returns null on cache miss", async () => {
|
|
83
|
+
const redis = createRedisMock()
|
|
84
|
+
expect(await getTree(redis, "nonexistent")).toBeNull()
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
it("round-trips entries through msgpack", async () => {
|
|
88
|
+
const redis = createRedisMock()
|
|
89
|
+
const entries = [makeEntry(), makeEntry({ n: "sub-01", d: true, s: 0 })]
|
|
90
|
+
await setTree(redis, "hash1", entries)
|
|
91
|
+
const result = await getTree(redis, "hash1")
|
|
92
|
+
expect(result).toEqual(entries)
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
it("stores with TTL when provided", async () => {
|
|
96
|
+
const redis = createRedisMock()
|
|
97
|
+
await setTree(redis, "hash2", [makeEntry()], 3600)
|
|
98
|
+
expect(redis.setex).toHaveBeenCalledWith(
|
|
99
|
+
"tree:hash2",
|
|
100
|
+
3600,
|
|
101
|
+
expect.any(Buffer),
|
|
102
|
+
)
|
|
103
|
+
expect(redis.set).not.toHaveBeenCalled()
|
|
104
|
+
})
|
|
105
|
+
|
|
106
|
+
it("stores without TTL when omitted", async () => {
|
|
107
|
+
const redis = createRedisMock()
|
|
108
|
+
await setTree(redis, "hash3", [makeEntry()])
|
|
109
|
+
expect(redis.set).toHaveBeenCalledWith("tree:hash3", expect.any(Buffer))
|
|
110
|
+
expect(redis.setex).not.toHaveBeenCalled()
|
|
111
|
+
})
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
describe("getTreesBulk", () => {
|
|
115
|
+
it("returns empty map for empty input", async () => {
|
|
116
|
+
const redis = createRedisMock()
|
|
117
|
+
const result = await getTreesBulk(redis, [])
|
|
118
|
+
expect(result.size).toBe(0)
|
|
119
|
+
expect(redis.pipeline).not.toHaveBeenCalled()
|
|
120
|
+
})
|
|
121
|
+
|
|
122
|
+
it("fetches multiple trees via pipeline", async () => {
|
|
123
|
+
const redis = createRedisMock()
|
|
124
|
+
const e1 = [makeEntry({ n: "file1" })]
|
|
125
|
+
const e2 = [makeEntry({ n: "file2" })]
|
|
126
|
+
await setTree(redis, "a", e1)
|
|
127
|
+
await setTree(redis, "b", e2)
|
|
128
|
+
|
|
129
|
+
const result = await getTreesBulk(redis, ["a", "b", "missing"])
|
|
130
|
+
expect(result.size).toBe(2)
|
|
131
|
+
expect(result.get("a")).toEqual(e1)
|
|
132
|
+
expect(result.get("b")).toEqual(e2)
|
|
133
|
+
expect(result.has("missing")).toBe(false)
|
|
134
|
+
})
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
describe("setCommitTrees / getCommitTrees", () => {
|
|
138
|
+
it("returns null on cache miss", async () => {
|
|
139
|
+
const redis = createRedisMock()
|
|
140
|
+
expect(await getCommitTrees(redis, "nonexistent")).toBeNull()
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
it("round-trips tree hash lists", async () => {
|
|
144
|
+
const redis = createRedisMock()
|
|
145
|
+
const hashes = ["hash1", "hash2", "hash3"]
|
|
146
|
+
await setCommitTrees(redis, "commit1", hashes)
|
|
147
|
+
const result = await getCommitTrees(redis, "commit1")
|
|
148
|
+
expect(result).toEqual(hashes)
|
|
149
|
+
})
|
|
150
|
+
|
|
151
|
+
it("uses correct key prefix and TTL", async () => {
|
|
152
|
+
const redis = createRedisMock()
|
|
153
|
+
await setCommitTrees(redis, "abc", ["h1"])
|
|
154
|
+
expect(redis.set).toHaveBeenCalledWith(
|
|
155
|
+
"ct:abc",
|
|
156
|
+
expect.any(Buffer),
|
|
157
|
+
)
|
|
158
|
+
})
|
|
159
|
+
})
|
|
160
|
+
|
|
161
|
+
describe("dataset tree index", () => {
|
|
162
|
+
it("addDatasetTree adds a single hash", async () => {
|
|
163
|
+
const redis = createRedisMock()
|
|
164
|
+
await addDatasetTree(redis, "ds000001", "hash1")
|
|
165
|
+
expect(redis.sadd).toHaveBeenCalledWith("dt:ds000001", "hash1")
|
|
166
|
+
const members = await getDatasetTrees(redis, "ds000001")
|
|
167
|
+
expect(members).toEqual(["hash1"])
|
|
168
|
+
})
|
|
169
|
+
|
|
170
|
+
it("addDatasetTrees adds multiple hashes", async () => {
|
|
171
|
+
const redis = createRedisMock()
|
|
172
|
+
await addDatasetTrees(redis, "ds000001", ["h1", "h2", "h3"])
|
|
173
|
+
expect(redis.sadd).toHaveBeenCalledWith("dt:ds000001", "h1", "h2", "h3")
|
|
174
|
+
})
|
|
175
|
+
|
|
176
|
+
it("addDatasetTrees skips empty array", async () => {
|
|
177
|
+
const redis = createRedisMock()
|
|
178
|
+
await addDatasetTrees(redis, "ds000001", [])
|
|
179
|
+
expect(redis.sadd).not.toHaveBeenCalled()
|
|
180
|
+
})
|
|
181
|
+
|
|
182
|
+
it("getDatasetTrees returns empty array for unknown dataset", async () => {
|
|
183
|
+
const redis = createRedisMock()
|
|
184
|
+
const result = await getDatasetTrees(redis, "ds999999")
|
|
185
|
+
expect(result).toEqual([])
|
|
186
|
+
})
|
|
187
|
+
})
|
|
188
|
+
|
|
189
|
+
describe("clearDatasetTrees", () => {
|
|
190
|
+
it("deletes all cached trees and the index", async () => {
|
|
191
|
+
const redis = createRedisMock()
|
|
192
|
+
// Populate tree data and index
|
|
193
|
+
await setTree(redis, "t1", [makeEntry({ n: "a" })])
|
|
194
|
+
await setTree(redis, "t2", [makeEntry({ n: "b" })])
|
|
195
|
+
await addDatasetTrees(redis, "ds000001", ["t1", "t2"])
|
|
196
|
+
|
|
197
|
+
await clearDatasetTrees(redis, "ds000001")
|
|
198
|
+
|
|
199
|
+
// Individual trees should be gone
|
|
200
|
+
expect(await getTree(redis, "t1")).toBeNull()
|
|
201
|
+
expect(await getTree(redis, "t2")).toBeNull()
|
|
202
|
+
// Index should be gone
|
|
203
|
+
expect(await getDatasetTrees(redis, "ds000001")).toEqual([])
|
|
204
|
+
})
|
|
205
|
+
|
|
206
|
+
it("does nothing when dataset has no trees", async () => {
|
|
207
|
+
const redis = createRedisMock()
|
|
208
|
+
await clearDatasetTrees(redis, "ds000001")
|
|
209
|
+
expect(redis.pipeline).not.toHaveBeenCalled()
|
|
210
|
+
})
|
|
211
|
+
})
|
|
212
|
+
})
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import type { Redis } from "ioredis"
|
|
2
|
+
import { pack, unpack } from "msgpackr"
|
|
3
|
+
|
|
4
|
+
/** Compact tree entry stored in Redis */
|
|
5
|
+
export interface TreeEntry {
|
|
6
|
+
/** filename */
|
|
7
|
+
n: string
|
|
8
|
+
/** child hash (tree hash for dirs, blob/annex key for files) */
|
|
9
|
+
h: string
|
|
10
|
+
/** size in bytes (0 for directories) */
|
|
11
|
+
s: number
|
|
12
|
+
/** S3 object key without bucket prefix (empty for directories) */
|
|
13
|
+
k: string
|
|
14
|
+
/** S3 versionId (empty for directories) */
|
|
15
|
+
v: string
|
|
16
|
+
/** S3 bucket override (empty string = default AWS_S3_PUBLIC_BUCKET) */
|
|
17
|
+
b: string
|
|
18
|
+
/** needs presigned URL */
|
|
19
|
+
p: boolean
|
|
20
|
+
/** is directory */
|
|
21
|
+
d: boolean
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function treeKey(hash: string): string {
|
|
25
|
+
return `tree:${hash}`
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function commitTreesKey(commitHash: string): string {
|
|
29
|
+
return `ct:${commitHash}`
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function datasetTreesKey(datasetId: string): string {
|
|
33
|
+
return `dt:${datasetId}`
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/** Read and decode a single tree from cache */
|
|
37
|
+
export async function getTree(
|
|
38
|
+
redis: Redis,
|
|
39
|
+
treeHash: string,
|
|
40
|
+
): Promise<TreeEntry[] | null> {
|
|
41
|
+
const data = await redis.getBuffer(treeKey(treeHash))
|
|
42
|
+
if (data) {
|
|
43
|
+
return unpack(data) as TreeEntry[]
|
|
44
|
+
}
|
|
45
|
+
return null
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/** Encode and write a tree to cache. Optional TTL for unexported trees. */
|
|
49
|
+
export async function setTree(
|
|
50
|
+
redis: Redis,
|
|
51
|
+
treeHash: string,
|
|
52
|
+
entries: TreeEntry[],
|
|
53
|
+
ttl?: number,
|
|
54
|
+
): Promise<void> {
|
|
55
|
+
const packed = pack(entries)
|
|
56
|
+
if (ttl) {
|
|
57
|
+
await redis.setex(treeKey(treeHash), ttl, packed)
|
|
58
|
+
} else {
|
|
59
|
+
await redis.set(treeKey(treeHash), packed)
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/** Pipeline-fetch multiple trees by hash */
|
|
64
|
+
export async function getTreesBulk(
|
|
65
|
+
redis: Redis,
|
|
66
|
+
treeHashes: string[],
|
|
67
|
+
): Promise<Map<string, TreeEntry[]>> {
|
|
68
|
+
if (treeHashes.length === 0) return new Map()
|
|
69
|
+
const pipeline = redis.pipeline()
|
|
70
|
+
for (const hash of treeHashes) {
|
|
71
|
+
pipeline.getBuffer(treeKey(hash))
|
|
72
|
+
}
|
|
73
|
+
const results = await pipeline.exec()
|
|
74
|
+
const trees = new Map<string, TreeEntry[]>()
|
|
75
|
+
for (let i = 0; i < treeHashes.length; i++) {
|
|
76
|
+
const [err, data] = results[i]
|
|
77
|
+
if (!err && data) {
|
|
78
|
+
trees.set(treeHashes[i], unpack(data as Buffer) as TreeEntry[])
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return trees
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/** Store the set of tree hashes belonging to a commit */
|
|
85
|
+
export async function setCommitTrees(
|
|
86
|
+
redis: Redis,
|
|
87
|
+
commitHash: string,
|
|
88
|
+
treeHashes: string[],
|
|
89
|
+
): Promise<void> {
|
|
90
|
+
const packed = pack(treeHashes)
|
|
91
|
+
await redis.set(commitTreesKey(commitHash), packed)
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/** Read the set of tree hashes for a commit */
|
|
95
|
+
export async function getCommitTrees(
|
|
96
|
+
redis: Redis,
|
|
97
|
+
commitHash: string,
|
|
98
|
+
): Promise<string[] | null> {
|
|
99
|
+
const data = await redis.getBuffer(commitTreesKey(commitHash))
|
|
100
|
+
if (data) {
|
|
101
|
+
return unpack(data) as string[]
|
|
102
|
+
}
|
|
103
|
+
return null
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/** Add a tree hash to the dataset's reverse index */
|
|
107
|
+
export async function addDatasetTree(
|
|
108
|
+
redis: Redis,
|
|
109
|
+
datasetId: string,
|
|
110
|
+
treeHash: string,
|
|
111
|
+
): Promise<void> {
|
|
112
|
+
await redis.sadd(datasetTreesKey(datasetId), treeHash)
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/** Add multiple tree hashes to the dataset's reverse index */
|
|
116
|
+
export async function addDatasetTrees(
|
|
117
|
+
redis: Redis,
|
|
118
|
+
datasetId: string,
|
|
119
|
+
treeHashes: string[],
|
|
120
|
+
): Promise<void> {
|
|
121
|
+
if (treeHashes.length > 0) {
|
|
122
|
+
await redis.sadd(datasetTreesKey(datasetId), ...treeHashes)
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/** Get all tree hashes for a dataset (for cache clearing) */
|
|
127
|
+
export async function getDatasetTrees(
|
|
128
|
+
redis: Redis,
|
|
129
|
+
datasetId: string,
|
|
130
|
+
): Promise<string[]> {
|
|
131
|
+
return redis.smembers(datasetTreesKey(datasetId))
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/** Delete all cached trees for a dataset and clean up the index */
|
|
135
|
+
export async function clearDatasetTrees(
|
|
136
|
+
redis: Redis,
|
|
137
|
+
datasetId: string,
|
|
138
|
+
): Promise<void> {
|
|
139
|
+
const treeHashes = await getDatasetTrees(redis, datasetId)
|
|
140
|
+
if (treeHashes.length > 0) {
|
|
141
|
+
const pipeline = redis.pipeline()
|
|
142
|
+
for (const hash of treeHashes) {
|
|
143
|
+
pipeline.del(treeKey(hash))
|
|
144
|
+
}
|
|
145
|
+
pipeline.del(datasetTreesKey(datasetId))
|
|
146
|
+
await pipeline.exec()
|
|
147
|
+
}
|
|
148
|
+
}
|
|
@@ -22,6 +22,7 @@ import notifications from "../../libs/notifications"
|
|
|
22
22
|
import DataRetention from "../../models/dataRetention"
|
|
23
23
|
import Permission from "../../models/permission"
|
|
24
24
|
import User from "../../models/user"
|
|
25
|
+
import Dataset from "../../models/dataset"
|
|
25
26
|
import Deletion from "../../models/deletion"
|
|
26
27
|
import { checkDataRetentionNotifications } from "../dataRetentionNotifications"
|
|
27
28
|
import * as draftModule from "../draft"
|
|
@@ -55,6 +56,7 @@ describe("checkDataRetentionNotifications", () => {
|
|
|
55
56
|
})
|
|
56
57
|
|
|
57
58
|
beforeEach(async () => {
|
|
59
|
+
await Dataset.deleteMany({})
|
|
58
60
|
await DataRetention.deleteMany({})
|
|
59
61
|
await Deletion.deleteMany({})
|
|
60
62
|
await Permission.deleteMany({})
|
|
@@ -96,6 +98,15 @@ describe("checkDataRetentionNotifications", () => {
|
|
|
96
98
|
expect(notifications.send).not.toHaveBeenCalled()
|
|
97
99
|
})
|
|
98
100
|
|
|
101
|
+
it("skips notifications for datasets with holdDeletion flag", async () => {
|
|
102
|
+
await Dataset.create({ id: TEST_DATASET, holdDeletion: true })
|
|
103
|
+
mockDraft(daysAgo(15))
|
|
104
|
+
mockSnapshots([{ hexsha: "other" }])
|
|
105
|
+
|
|
106
|
+
await checkDataRetentionNotifications(TEST_DATASET)
|
|
107
|
+
expect(notifications.send).not.toHaveBeenCalled()
|
|
108
|
+
})
|
|
109
|
+
|
|
99
110
|
it("does nothing when draft matches the latest snapshot", async () => {
|
|
100
111
|
mockDraft(daysAgo(30), TEST_HEXSHA)
|
|
101
112
|
mockSnapshots([{ hexsha: TEST_HEXSHA }])
|