@xyo-network/archivist-lmdb 5.1.22 → 5.1.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +25 -22
- package/dist/browser/spec/Archivist.full.spec.d.ts +0 -2
- package/dist/browser/spec/Archivist.full.spec.d.ts.map +0 -1
- package/dist/browser/spec/Archivist.spec.d.ts +0 -2
- package/dist/browser/spec/Archivist.spec.d.ts.map +0 -1
- package/src/spec/Archivist.full.spec.ts +0 -427
- package/src/spec/Archivist.spec.ts +0 -163
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@xyo-network/archivist-lmdb",
|
|
3
|
-
"version": "5.1.
|
|
3
|
+
"version": "5.1.23",
|
|
4
4
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
5
5
|
"homepage": "https://xyo.network",
|
|
6
6
|
"bugs": {
|
|
@@ -30,36 +30,39 @@
|
|
|
30
30
|
"types": "dist/browser/index.d.ts",
|
|
31
31
|
"files": [
|
|
32
32
|
"dist",
|
|
33
|
-
"src"
|
|
33
|
+
"src",
|
|
34
|
+
"!**/*.bench.*",
|
|
35
|
+
"!**/*.spec.*",
|
|
36
|
+
"!**/*.test.*"
|
|
34
37
|
],
|
|
35
38
|
"dependencies": {
|
|
36
|
-
"@xylabs/assert": "~5.0.
|
|
37
|
-
"@xylabs/exists": "~5.0.
|
|
38
|
-
"@xylabs/hex": "~5.0.
|
|
39
|
-
"@xylabs/promise": "~5.0.
|
|
40
|
-
"@xyo-network/archivist-abstract": "~5.1.
|
|
41
|
-
"@xyo-network/archivist-model": "~5.1.
|
|
42
|
-
"@xyo-network/boundwitness-model": "~5.1.
|
|
43
|
-
"@xyo-network/module-model": "~5.1.
|
|
44
|
-
"@xyo-network/payload-builder": "~5.1.
|
|
45
|
-
"@xyo-network/payload-model": "~5.1.
|
|
39
|
+
"@xylabs/assert": "~5.0.33",
|
|
40
|
+
"@xylabs/exists": "~5.0.33",
|
|
41
|
+
"@xylabs/hex": "~5.0.33",
|
|
42
|
+
"@xylabs/promise": "~5.0.33",
|
|
43
|
+
"@xyo-network/archivist-abstract": "~5.1.23",
|
|
44
|
+
"@xyo-network/archivist-model": "~5.1.23",
|
|
45
|
+
"@xyo-network/boundwitness-model": "~5.1.23",
|
|
46
|
+
"@xyo-network/module-model": "~5.1.23",
|
|
47
|
+
"@xyo-network/payload-builder": "~5.1.23",
|
|
48
|
+
"@xyo-network/payload-model": "~5.1.23",
|
|
46
49
|
"lmdb": "~3.4.4"
|
|
47
50
|
},
|
|
48
51
|
"devDependencies": {
|
|
49
|
-
"@xylabs/delay": "~5.0.
|
|
50
|
-
"@xylabs/object": "~5.0.
|
|
52
|
+
"@xylabs/delay": "~5.0.33",
|
|
53
|
+
"@xylabs/object": "~5.0.33",
|
|
51
54
|
"@xylabs/ts-scripts-yarn3": "~7.2.8",
|
|
52
55
|
"@xylabs/tsconfig": "~7.2.8",
|
|
53
|
-
"@xylabs/vitest-extended": "~5.0.
|
|
54
|
-
"@xyo-network/account": "~5.1.
|
|
55
|
-
"@xyo-network/account-model": "~5.1.
|
|
56
|
-
"@xyo-network/archivist-acceptance-tests": "~5.1.
|
|
57
|
-
"@xyo-network/id-payload-plugin": "~5.1.
|
|
58
|
-
"@xyo-network/payload-wrapper": "~5.1.
|
|
59
|
-
"@xyo-network/wallet": "~5.1.
|
|
56
|
+
"@xylabs/vitest-extended": "~5.0.33",
|
|
57
|
+
"@xyo-network/account": "~5.1.23",
|
|
58
|
+
"@xyo-network/account-model": "~5.1.23",
|
|
59
|
+
"@xyo-network/archivist-acceptance-tests": "~5.1.23",
|
|
60
|
+
"@xyo-network/id-payload-plugin": "~5.1.23",
|
|
61
|
+
"@xyo-network/payload-wrapper": "~5.1.23",
|
|
62
|
+
"@xyo-network/wallet": "~5.1.23",
|
|
60
63
|
"typescript": "~5.9.3",
|
|
61
64
|
"uuid": "~13.0.0",
|
|
62
|
-
"vitest": "~4.0.
|
|
65
|
+
"vitest": "~4.0.9"
|
|
63
66
|
},
|
|
64
67
|
"publishConfig": {
|
|
65
68
|
"access": "public"
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"Archivist.full.spec.d.ts","sourceRoot":"","sources":["../../../src/spec/Archivist.full.spec.ts"],"names":[],"mappings":""}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"Archivist.spec.d.ts","sourceRoot":"","sources":["../../../src/spec/Archivist.spec.ts"],"names":[],"mappings":"AACA,OAAO,yBAAyB,CAAA"}
|
|
@@ -1,427 +0,0 @@
|
|
|
1
|
-
/* eslint-disable complexity */
|
|
2
|
-
/* eslint-disable max-statements */
|
|
3
|
-
|
|
4
|
-
import { tmpdir } from 'node:os'
|
|
5
|
-
|
|
6
|
-
import { delay } from '@xylabs/delay'
|
|
7
|
-
import type { Hash } from '@xylabs/hex'
|
|
8
|
-
import type { AnyObject } from '@xylabs/object'
|
|
9
|
-
import { Account } from '@xyo-network/account'
|
|
10
|
-
import type { AccountInstance } from '@xyo-network/account-model'
|
|
11
|
-
import { generateArchivistNextTests } from '@xyo-network/archivist-acceptance-tests'
|
|
12
|
-
import type { ArchivistInstance } from '@xyo-network/archivist-model'
|
|
13
|
-
import { isArchivistInstance, isArchivistModule } from '@xyo-network/archivist-model'
|
|
14
|
-
import { IdSchema } from '@xyo-network/id-payload-plugin'
|
|
15
|
-
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
16
|
-
import type { Payload, WithStorageMeta } from '@xyo-network/payload-model'
|
|
17
|
-
import { PayloadWrapper } from '@xyo-network/payload-wrapper'
|
|
18
|
-
import { v4 } from 'uuid'
|
|
19
|
-
import {
|
|
20
|
-
beforeAll, describe, expect, it,
|
|
21
|
-
} from 'vitest'
|
|
22
|
-
|
|
23
|
-
import { LmdbArchivist } from '../Archivist.ts'
|
|
24
|
-
import { LmdbArchivistConfigSchema } from '../Config.ts'
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* @group module
|
|
28
|
-
* @group archivist
|
|
29
|
-
*/
|
|
30
|
-
describe('LmdbArchivist [full]', () => {
|
|
31
|
-
type TestPayload = Payload<{ salt: string; schema: string }>
|
|
32
|
-
|
|
33
|
-
const fillDb = async (db: ArchivistInstance, count: number = 10): Promise<TestPayload[]> => {
|
|
34
|
-
const sources = Array.from({ length: count }).map((_, i) => {
|
|
35
|
-
return { salt: `${i}`, schema: IdSchema }
|
|
36
|
-
})
|
|
37
|
-
for (const source of sources) {
|
|
38
|
-
await db.insert([source])
|
|
39
|
-
}
|
|
40
|
-
return sources
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
const shuffleArray = <T>(original: Array<T>) => {
|
|
44
|
-
const shuffled = [...original]
|
|
45
|
-
for (let i = shuffled.length - 1; i > 0; i--) {
|
|
46
|
-
// Generate a random index between 0 and i
|
|
47
|
-
const j = Math.floor(Math.random() * (i + 1))
|
|
48
|
-
// Swap elements at indices i and j
|
|
49
|
-
;[shuffled[i], shuffled[j]] = [shuffled[j], shuffled[i]]
|
|
50
|
-
}
|
|
51
|
-
return shuffled
|
|
52
|
-
}
|
|
53
|
-
let account: AccountInstance
|
|
54
|
-
beforeAll(async () => {
|
|
55
|
-
account = await Account.random()
|
|
56
|
-
})
|
|
57
|
-
describe('config', () => {
|
|
58
|
-
describe('dbName', () => {
|
|
59
|
-
it('supplied via config uses config value', async () => {
|
|
60
|
-
const dbName = 'testDbName'
|
|
61
|
-
const archivist = await LmdbArchivist.create({
|
|
62
|
-
account,
|
|
63
|
-
config: {
|
|
64
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName: 'payloads', location: tmpdir(), clearStoreOnStart: true,
|
|
65
|
-
},
|
|
66
|
-
})
|
|
67
|
-
expect(archivist.dbName).toBe(dbName)
|
|
68
|
-
})
|
|
69
|
-
})
|
|
70
|
-
describe('dbStore', () => {
|
|
71
|
-
it('supplied via config uses config value', async () => {
|
|
72
|
-
const dbName = 'testDbName'
|
|
73
|
-
const storeName = 'testStoreName'
|
|
74
|
-
const archivist = await LmdbArchivist.create({
|
|
75
|
-
account,
|
|
76
|
-
config: {
|
|
77
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
78
|
-
},
|
|
79
|
-
})
|
|
80
|
-
expect(archivist.storeName).toBe(storeName)
|
|
81
|
-
})
|
|
82
|
-
it('allows for multiple dbStores within the same dbName', async () => {
|
|
83
|
-
const dbName = 'testDbName'
|
|
84
|
-
const storeName1 = 'testStoreName1'
|
|
85
|
-
const storeName2 = 'testStoreName2'
|
|
86
|
-
const archivist1 = await LmdbArchivist.create({
|
|
87
|
-
account,
|
|
88
|
-
config: {
|
|
89
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName: storeName1, location: tmpdir(), clearStoreOnStart: true,
|
|
90
|
-
},
|
|
91
|
-
})
|
|
92
|
-
const archivist2 = await LmdbArchivist.create({
|
|
93
|
-
account,
|
|
94
|
-
config: {
|
|
95
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName: storeName2, location: tmpdir(), clearStoreOnStart: true,
|
|
96
|
-
},
|
|
97
|
-
})
|
|
98
|
-
|
|
99
|
-
expect(isArchivistInstance(archivist1)).toBeTruthy()
|
|
100
|
-
expect(isArchivistModule(archivist1)).toBeTruthy()
|
|
101
|
-
|
|
102
|
-
// TODO: This test is not testing the end state of indexedDB, but rather the
|
|
103
|
-
// state of the Archivist instance and therefore isn't valid. We'd want to actually
|
|
104
|
-
// open indexedDB and check the state of the stores matches what we want (which it doesn't).
|
|
105
|
-
expect(archivist1.storeName).toBe(storeName1)
|
|
106
|
-
expect(archivist2.storeName).toBe(storeName2)
|
|
107
|
-
})
|
|
108
|
-
})
|
|
109
|
-
})
|
|
110
|
-
describe('all', () => {
|
|
111
|
-
const dbName = 'e926a178-9c6a-4604-b65c-d1fccd97f1de'
|
|
112
|
-
const storeName = '27fcea19-c30f-415a-a7f9-0b0514705cb1'
|
|
113
|
-
let sources: Payload[] = []
|
|
114
|
-
let archivistModule: ArchivistInstance
|
|
115
|
-
beforeAll(async () => {
|
|
116
|
-
archivistModule = await LmdbArchivist.create({
|
|
117
|
-
account,
|
|
118
|
-
config: {
|
|
119
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
120
|
-
},
|
|
121
|
-
})
|
|
122
|
-
sources = await fillDb(archivistModule)
|
|
123
|
-
})
|
|
124
|
-
it('returns all data', async () => {
|
|
125
|
-
const getResult = await archivistModule.all?.()
|
|
126
|
-
expect(getResult).toBeDefined()
|
|
127
|
-
expect(getResult?.length).toBe(sources.length)
|
|
128
|
-
expect(PayloadBuilder.omitStorageMeta(getResult)).toEqual(sources)
|
|
129
|
-
})
|
|
130
|
-
})
|
|
131
|
-
|
|
132
|
-
describe('delete', () => {
|
|
133
|
-
const dbName = '6e3fcd65-f24f-4ebc-b314-f597b385fb8e'
|
|
134
|
-
const storeName = 'c0872f52-32b9-415e-8ca9-af78713cee28'
|
|
135
|
-
let sources: Payload[] = []
|
|
136
|
-
let archivistModule: ArchivistInstance
|
|
137
|
-
beforeAll(async () => {
|
|
138
|
-
archivistModule = await LmdbArchivist.create({
|
|
139
|
-
account,
|
|
140
|
-
config: {
|
|
141
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
142
|
-
},
|
|
143
|
-
})
|
|
144
|
-
sources = await fillDb(archivistModule)
|
|
145
|
-
})
|
|
146
|
-
it('deletes data', async () => {
|
|
147
|
-
const getResult = (await archivistModule.all?.()) ?? []
|
|
148
|
-
expect(getResult).toBeDefined()
|
|
149
|
-
expect(getResult?.length).toBe(sources.length)
|
|
150
|
-
const dataHashes = (await PayloadBuilder.dataHashes(getResult)) ?? []
|
|
151
|
-
const deleteResult = await archivistModule.delete?.(dataHashes)
|
|
152
|
-
expect(deleteResult.length).toBe(dataHashes.length)
|
|
153
|
-
expect((await archivistModule.all?.()).length).toBe(0)
|
|
154
|
-
})
|
|
155
|
-
})
|
|
156
|
-
describe('get', () => {
|
|
157
|
-
const dbName = 'b4379714-73d1-42c6-88e7-1a363b7ed86f'
|
|
158
|
-
const storeName = '3dbdb153-79d0-45d0-b2f7-9f06cdd74b1e'
|
|
159
|
-
let sources: TestPayload[] = []
|
|
160
|
-
let archivistModule: ArchivistInstance
|
|
161
|
-
beforeAll(async () => {
|
|
162
|
-
archivistModule = await LmdbArchivist.create({
|
|
163
|
-
account,
|
|
164
|
-
config: {
|
|
165
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
166
|
-
},
|
|
167
|
-
})
|
|
168
|
-
sources = await fillDb(archivistModule)
|
|
169
|
-
})
|
|
170
|
-
it('gets existing data', async () => {
|
|
171
|
-
for (const source of sources) {
|
|
172
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
173
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
174
|
-
expect(getResult).toBeDefined()
|
|
175
|
-
expect(getResult.length).toBe(1)
|
|
176
|
-
const resultHash = await PayloadWrapper.wrap(getResult[0]).dataHash()
|
|
177
|
-
expect(resultHash).toBe(sourceHash)
|
|
178
|
-
}
|
|
179
|
-
})
|
|
180
|
-
it('returned by order of insertion', async () => {
|
|
181
|
-
const shuffled = shuffleArray(sources)
|
|
182
|
-
const sourceHashes = await Promise.all(shuffled.map(source => PayloadBuilder.dataHash(source)))
|
|
183
|
-
const getResult = (await archivistModule.get(sourceHashes)) as WithStorageMeta<TestPayload>[]
|
|
184
|
-
expect(getResult).toBeDefined()
|
|
185
|
-
expect(getResult.length).toBe(sourceHashes.length)
|
|
186
|
-
const salts = sources.map(source => source.salt)
|
|
187
|
-
const resultSalts = getResult.map(result => result?.salt)
|
|
188
|
-
expect(resultSalts).toEqual(salts)
|
|
189
|
-
})
|
|
190
|
-
it('returns nothing for non-existing hashes', async () => {
|
|
191
|
-
const hashThatDoesNotExist = '0000000000000000000000000000000000000000000000000000000000000000' as Hash
|
|
192
|
-
const getResult = await archivistModule.get([hashThatDoesNotExist])
|
|
193
|
-
expect(getResult).toBeDefined()
|
|
194
|
-
expect(getResult.length).toBe(0)
|
|
195
|
-
})
|
|
196
|
-
describe('by hash', () => {
|
|
197
|
-
let payload1: Payload<AnyObject>
|
|
198
|
-
let payload2: Payload<AnyObject>
|
|
199
|
-
let dataHash1: Hash
|
|
200
|
-
let dataHash2: Hash
|
|
201
|
-
let rootHash1: Hash
|
|
202
|
-
let rootHash2: Hash
|
|
203
|
-
beforeAll(async () => {
|
|
204
|
-
const salt = '650123f6-191e-4cc4-a813-f7a29dcbfb0e'
|
|
205
|
-
payload1 = {
|
|
206
|
-
$some: [
|
|
207
|
-
'12bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
|
|
208
|
-
],
|
|
209
|
-
salt,
|
|
210
|
-
schema: IdSchema,
|
|
211
|
-
}
|
|
212
|
-
payload2 = {
|
|
213
|
-
$some: [
|
|
214
|
-
'22bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
|
|
215
|
-
],
|
|
216
|
-
salt,
|
|
217
|
-
schema: IdSchema,
|
|
218
|
-
}
|
|
219
|
-
dataHash1 = await PayloadBuilder.dataHash(payload1)
|
|
220
|
-
dataHash2 = await PayloadBuilder.dataHash(payload2)
|
|
221
|
-
rootHash1 = await PayloadBuilder.hash(payload1)
|
|
222
|
-
rootHash2 = await PayloadBuilder.hash(payload2)
|
|
223
|
-
expect(dataHash1).toBe(dataHash2)
|
|
224
|
-
expect(rootHash1).not.toBe(rootHash2)
|
|
225
|
-
await archivistModule.insert([payload1])
|
|
226
|
-
await archivistModule.insert([payload2])
|
|
227
|
-
})
|
|
228
|
-
describe('data hash', () => {
|
|
229
|
-
it('returns value using hash', async () => {
|
|
230
|
-
const result = await archivistModule.get([dataHash1])
|
|
231
|
-
expect(result).toBeDefined()
|
|
232
|
-
expect(result.length).toBe(1)
|
|
233
|
-
})
|
|
234
|
-
it('deduplicates multiple hashes', async () => {
|
|
235
|
-
const result = await archivistModule.get([dataHash1, dataHash2])
|
|
236
|
-
expect(result).toBeDefined()
|
|
237
|
-
expect(result.length).toBe(1)
|
|
238
|
-
})
|
|
239
|
-
it('returns the first occurrence of the hash', async () => {
|
|
240
|
-
// Same data hash contained by multiple root hashes
|
|
241
|
-
const result = await archivistModule.get([dataHash2])
|
|
242
|
-
expect(result).toBeDefined()
|
|
243
|
-
expect(result.length).toBe(1)
|
|
244
|
-
// Returns the first occurrence of the data hash
|
|
245
|
-
// expect(PayloadBuilder.omitStorageMeta(result[0])).toEqual(payload1)
|
|
246
|
-
})
|
|
247
|
-
})
|
|
248
|
-
describe('root hash', () => {
|
|
249
|
-
it('returns value using hash', async () => {
|
|
250
|
-
const result = await archivistModule.get([rootHash1])
|
|
251
|
-
expect(result).toBeDefined()
|
|
252
|
-
expect(result.length).toBe(1)
|
|
253
|
-
})
|
|
254
|
-
it('deduplicates multiple hashes', async () => {
|
|
255
|
-
const result = await archivistModule.get([rootHash1, rootHash1])
|
|
256
|
-
expect(result).toBeDefined()
|
|
257
|
-
expect(result.length).toBe(1)
|
|
258
|
-
})
|
|
259
|
-
})
|
|
260
|
-
})
|
|
261
|
-
})
|
|
262
|
-
describe('insert', () => {
|
|
263
|
-
describe('with unique data', () => {
|
|
264
|
-
const dbName = 'bd86d2dd-dc48-4621-8c1f-105ba2e90287'
|
|
265
|
-
const storeName = 'f8d14049-2966-4198-a2ab-1c096a949315'
|
|
266
|
-
let sources: Payload[] = []
|
|
267
|
-
let archivistModule: ArchivistInstance
|
|
268
|
-
beforeAll(async () => {
|
|
269
|
-
archivistModule = await LmdbArchivist.create({
|
|
270
|
-
account,
|
|
271
|
-
config: {
|
|
272
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
273
|
-
},
|
|
274
|
-
})
|
|
275
|
-
sources = await fillDb(archivistModule)
|
|
276
|
-
})
|
|
277
|
-
it('can round trip data using data hash', async () => {
|
|
278
|
-
await Promise.all(
|
|
279
|
-
sources.map(async (source) => {
|
|
280
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
281
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
282
|
-
expect(getResult).toBeDefined()
|
|
283
|
-
expect(getResult.length).toBe(1)
|
|
284
|
-
const [result] = getResult
|
|
285
|
-
expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
|
|
286
|
-
const resultHash = await PayloadBuilder.dataHash(result)
|
|
287
|
-
expect(resultHash).toBe(sourceHash)
|
|
288
|
-
}),
|
|
289
|
-
)
|
|
290
|
-
})
|
|
291
|
-
it('can round trip data using root hash', async () => {
|
|
292
|
-
await Promise.all(
|
|
293
|
-
sources.map(async (source) => {
|
|
294
|
-
const sourceHash = await PayloadBuilder.hash(source)
|
|
295
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
296
|
-
expect(getResult).toBeDefined()
|
|
297
|
-
expect(getResult.length).toBe(1)
|
|
298
|
-
const [result] = getResult
|
|
299
|
-
expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
|
|
300
|
-
const resultHash = await PayloadBuilder.hash(result)
|
|
301
|
-
expect(resultHash).toBe(sourceHash)
|
|
302
|
-
}),
|
|
303
|
-
)
|
|
304
|
-
})
|
|
305
|
-
})
|
|
306
|
-
describe('with duplicate data', () => {
|
|
307
|
-
const dbName = 'bb43b6fe-2f9e-4bda-8177-f94336353f98'
|
|
308
|
-
const storeName = '91c6b87d-3ac8-4cfd-8aee-d509f3de0299'
|
|
309
|
-
let archivistModule: ArchivistInstance
|
|
310
|
-
beforeAll(async () => {
|
|
311
|
-
archivistModule = await LmdbArchivist.create({
|
|
312
|
-
account,
|
|
313
|
-
config: {
|
|
314
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
315
|
-
},
|
|
316
|
-
})
|
|
317
|
-
})
|
|
318
|
-
it('handles duplicate insertions', async () => {
|
|
319
|
-
// Insert same payload twice
|
|
320
|
-
const source = { salt: '2d515e1d-d82c-4545-9903-3eded7fefa7c', schema: IdSchema }
|
|
321
|
-
// First insertion should succeed and return the inserted payload
|
|
322
|
-
expect((await archivistModule.insert([source]))[0]._hash).toEqual(await PayloadBuilder.hash(source))
|
|
323
|
-
// Second insertion should succeed but return empty array since no new data was inserted
|
|
324
|
-
expect(await archivistModule.insert([source])).toEqual([])
|
|
325
|
-
// Ensure we can get the inserted payload
|
|
326
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
327
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
328
|
-
expect(getResult).toBeDefined()
|
|
329
|
-
expect(getResult.length).toBe(1)
|
|
330
|
-
const resultHash = await PayloadBuilder.dataHash(getResult[0])
|
|
331
|
-
expect(resultHash).toBe(sourceHash)
|
|
332
|
-
// Ensure the DB has only one instance of the payload written to it
|
|
333
|
-
const allResult = await archivistModule.all?.()
|
|
334
|
-
expect(allResult).toBeDefined()
|
|
335
|
-
expect(allResult.length).toBe(1)
|
|
336
|
-
})
|
|
337
|
-
})
|
|
338
|
-
})
|
|
339
|
-
|
|
340
|
-
describe('next', () => {
|
|
341
|
-
const dbName = 'bd86d2dd-dc48-4621-8c1f-105ba2e90288'
|
|
342
|
-
const storeName = 'f8d14049-2966-4198-a2ab-1c096a949316'
|
|
343
|
-
it('next', async () => {
|
|
344
|
-
const archivist = await LmdbArchivist.create({
|
|
345
|
-
account: 'random',
|
|
346
|
-
config: {
|
|
347
|
-
dbName, schema: LmdbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
|
|
348
|
-
},
|
|
349
|
-
})
|
|
350
|
-
const account = await Account.random()
|
|
351
|
-
|
|
352
|
-
const payloads1 = [
|
|
353
|
-
{ schema: 'network.xyo.test', value: 1 },
|
|
354
|
-
]
|
|
355
|
-
|
|
356
|
-
const payloads2 = [
|
|
357
|
-
{ schema: 'network.xyo.test', value: 2 },
|
|
358
|
-
]
|
|
359
|
-
|
|
360
|
-
const payloads3 = [
|
|
361
|
-
{ schema: 'network.xyo.test', value: 3 },
|
|
362
|
-
]
|
|
363
|
-
|
|
364
|
-
const payloads4 = [
|
|
365
|
-
{ schema: 'network.xyo.test', value: 4 },
|
|
366
|
-
]
|
|
367
|
-
|
|
368
|
-
await archivist.insert(payloads1)
|
|
369
|
-
await delay(1)
|
|
370
|
-
const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
|
|
371
|
-
await delay(1)
|
|
372
|
-
await archivist.insert(payloads3)
|
|
373
|
-
await delay(1)
|
|
374
|
-
await archivist.insert(payloads4)
|
|
375
|
-
await delay(1)
|
|
376
|
-
expect(bw).toBeDefined()
|
|
377
|
-
expect(payloads).toBeDefined()
|
|
378
|
-
expect(errors).toBeDefined()
|
|
379
|
-
|
|
380
|
-
const batch1 = await archivist.next?.({ limit: 2 })
|
|
381
|
-
expect(batch1.length).toBe(2)
|
|
382
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
383
|
-
|
|
384
|
-
const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[1]._sequence })
|
|
385
|
-
expect(batch2.length).toBe(2)
|
|
386
|
-
expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
387
|
-
|
|
388
|
-
const batch3 = await archivist.next?.({ limit: 20 })
|
|
389
|
-
expect(batch3.length).toBe(4)
|
|
390
|
-
expect(await PayloadBuilder.dataHash(batch3?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
391
|
-
|
|
392
|
-
const batch4 = await archivist.next?.({ limit: 20, cursor: batch1?.[0]._sequence })
|
|
393
|
-
expect(batch4.length).toBe(3)
|
|
394
|
-
expect(await PayloadBuilder.dataHash(batch4?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
395
|
-
|
|
396
|
-
// desc
|
|
397
|
-
const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
|
|
398
|
-
expect(batch1Desc.length).toBe(2)
|
|
399
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
400
|
-
|
|
401
|
-
const batch2Desc = await archivist.next?.({
|
|
402
|
-
limit: 2, cursor: batch1Desc?.[1]._sequence, order: 'desc',
|
|
403
|
-
})
|
|
404
|
-
expect(batch2Desc.length).toBe(2)
|
|
405
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
406
|
-
|
|
407
|
-
const batch3Desc = await archivist.next?.({
|
|
408
|
-
limit: 20, cursor: batch1Desc?.[1]._sequence, order: 'desc',
|
|
409
|
-
})
|
|
410
|
-
expect(batch3Desc.length).toBe(2)
|
|
411
|
-
expect(await PayloadBuilder.dataHash(batch3Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
412
|
-
})
|
|
413
|
-
})
|
|
414
|
-
generateArchivistNextTests(async () => {
|
|
415
|
-
const dbName = v4()
|
|
416
|
-
const storeName = v4()
|
|
417
|
-
const location = tmpdir()
|
|
418
|
-
const clearStoreOnStart = true
|
|
419
|
-
const schema = LmdbArchivistConfigSchema
|
|
420
|
-
return await LmdbArchivist.create({
|
|
421
|
-
account: 'random',
|
|
422
|
-
config: {
|
|
423
|
-
dbName, schema, storeName, location, clearStoreOnStart,
|
|
424
|
-
},
|
|
425
|
-
})
|
|
426
|
-
})
|
|
427
|
-
})
|
|
@@ -1,163 +0,0 @@
|
|
|
1
|
-
/* eslint-disable max-statements */
|
|
2
|
-
import '@xylabs/vitest-extended'
|
|
3
|
-
|
|
4
|
-
import { tmpdir } from 'node:os'
|
|
5
|
-
|
|
6
|
-
import { delay } from '@xylabs/delay'
|
|
7
|
-
import { toSafeJsonString } from '@xylabs/object'
|
|
8
|
-
import { isArchivistInstance, isArchivistModule } from '@xyo-network/archivist-model'
|
|
9
|
-
import type { Id } from '@xyo-network/id-payload-plugin'
|
|
10
|
-
import {
|
|
11
|
-
asId,
|
|
12
|
-
IdSchema, isId,
|
|
13
|
-
} from '@xyo-network/id-payload-plugin'
|
|
14
|
-
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
15
|
-
import { HDWallet } from '@xyo-network/wallet'
|
|
16
|
-
import {
|
|
17
|
-
describe, expect, it,
|
|
18
|
-
} from 'vitest'
|
|
19
|
-
|
|
20
|
-
import { LmdbArchivist } from '../Archivist.ts'
|
|
21
|
-
import { LmdbArchivistConfigSchema } from '../Config.ts'
|
|
22
|
-
|
|
23
|
-
/**
|
|
24
|
-
* @group module
|
|
25
|
-
* @group archivist
|
|
26
|
-
*/
|
|
27
|
-
describe('LmdbArchivist', () => {
|
|
28
|
-
it('should listen to cleared events', async () => {
|
|
29
|
-
const archivist = await LmdbArchivist.create({
|
|
30
|
-
account: 'random',
|
|
31
|
-
config: {
|
|
32
|
-
schema: LmdbArchivistConfigSchema, location: tmpdir(), dbName: 'test1.db', storeName: 'payloads', clearStoreOnStart: true,
|
|
33
|
-
},
|
|
34
|
-
})
|
|
35
|
-
|
|
36
|
-
expect(isArchivistInstance(archivist)).toBe(true)
|
|
37
|
-
expect(isArchivistModule(archivist)).toBe(true)
|
|
38
|
-
|
|
39
|
-
// Create a new promise and resolve it when the event fires
|
|
40
|
-
const eventPromise = new Promise<void>((resolve) => {
|
|
41
|
-
archivist.on('cleared', () => {
|
|
42
|
-
expect(true).toBe(true) // Confirm event fired
|
|
43
|
-
resolve() // Resolve the promise
|
|
44
|
-
})
|
|
45
|
-
})
|
|
46
|
-
await archivist.clear()
|
|
47
|
-
return eventPromise
|
|
48
|
-
})
|
|
49
|
-
|
|
50
|
-
it('should return items inserted in the order they were provided in', async () => {
|
|
51
|
-
const archivist = await LmdbArchivist.create({
|
|
52
|
-
account: 'random',
|
|
53
|
-
config: {
|
|
54
|
-
schema: LmdbArchivistConfigSchema, location: tmpdir(), dbName: 'test2.db', storeName: 'payloads', clearStoreOnStart: true,
|
|
55
|
-
},
|
|
56
|
-
})
|
|
57
|
-
const payloads: Id[] = Array.from({ length: 100 }, (_, i) => new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: `${i}` }).build())
|
|
58
|
-
// Ensure payload was create in order provided
|
|
59
|
-
for (const [index, id] of payloads.entries()) {
|
|
60
|
-
expect(id?.salt).toBe(`${index}`)
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
const withStorageMeta = await PayloadBuilder.addStorageMeta(payloads)
|
|
64
|
-
|
|
65
|
-
// Ensure payload was returned in order provided
|
|
66
|
-
for (const [index, result] of withStorageMeta.entries()) {
|
|
67
|
-
expect(isId(result)).toBe(true)
|
|
68
|
-
const id = asId(result)
|
|
69
|
-
expect(id).toBeDefined()
|
|
70
|
-
expect(id?.salt).toBe(`${index}`)
|
|
71
|
-
expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
const results = await archivist.insert(payloads)
|
|
75
|
-
expect(results.length).toBe(payloads.length)
|
|
76
|
-
|
|
77
|
-
// Ensure payload was inserted in order provided
|
|
78
|
-
for (const [index, result] of results.entries()) {
|
|
79
|
-
expect(isId(result)).toBe(true)
|
|
80
|
-
const id = asId(result)
|
|
81
|
-
expect(id).toBeDefined()
|
|
82
|
-
if (index > 0) {
|
|
83
|
-
expect(result._sequence > results[index - 1]._sequence).toBeTrue()
|
|
84
|
-
}
|
|
85
|
-
if (index < 99) {
|
|
86
|
-
expect(result._sequence < results[index + 1]._sequence).toBeTrue()
|
|
87
|
-
}
|
|
88
|
-
if (id?.salt !== `${index}`) {
|
|
89
|
-
console.warn('result-', results[index - 1])
|
|
90
|
-
console.warn('result', result)
|
|
91
|
-
console.warn('result+', results[index + 1])
|
|
92
|
-
}
|
|
93
|
-
expect(id?.salt).toBe(`${index}`)
|
|
94
|
-
expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
|
|
95
|
-
}
|
|
96
|
-
})
|
|
97
|
-
|
|
98
|
-
it('next', async () => {
|
|
99
|
-
const archivist = await LmdbArchivist.create({
|
|
100
|
-
account: await HDWallet.random(),
|
|
101
|
-
config: {
|
|
102
|
-
schema: LmdbArchivistConfigSchema, location: tmpdir(), dbName: 'test3.db', storeName: 'payloads', clearStoreOnStart: true,
|
|
103
|
-
},
|
|
104
|
-
})
|
|
105
|
-
const account = await HDWallet.random()
|
|
106
|
-
|
|
107
|
-
const payloads1 = [
|
|
108
|
-
{ schema: 'network.xyo.test', value: 1 },
|
|
109
|
-
]
|
|
110
|
-
|
|
111
|
-
const payloads2 = [
|
|
112
|
-
{ schema: 'network.xyo.test', value: 2 },
|
|
113
|
-
]
|
|
114
|
-
|
|
115
|
-
const payloads3 = [
|
|
116
|
-
{ schema: 'network.xyo.test', value: 3 },
|
|
117
|
-
]
|
|
118
|
-
|
|
119
|
-
const payloads4 = [
|
|
120
|
-
{ schema: 'network.xyo.test', value: 4 },
|
|
121
|
-
]
|
|
122
|
-
|
|
123
|
-
const insertedPayloads1 = await archivist.insert(payloads1)
|
|
124
|
-
expect(insertedPayloads1[0]._hash).toBe(await PayloadBuilder.hash(payloads1[0]))
|
|
125
|
-
expect(insertedPayloads1[0]._dataHash).toBe(await PayloadBuilder.dataHash(payloads1[0]))
|
|
126
|
-
expect(insertedPayloads1[0]._sequence).toBeDefined()
|
|
127
|
-
await delay(1)
|
|
128
|
-
console.log(toSafeJsonString(payloads1, 10))
|
|
129
|
-
const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
|
|
130
|
-
expect(bw).toBeDefined()
|
|
131
|
-
expect(payloads).toBeDefined()
|
|
132
|
-
expect(errors).toBeDefined()
|
|
133
|
-
await delay(1)
|
|
134
|
-
await archivist.insert(payloads3)
|
|
135
|
-
await delay(1)
|
|
136
|
-
await archivist.insert(payloads4)
|
|
137
|
-
|
|
138
|
-
console.log('bw', toSafeJsonString([bw, payloads, errors], 10))
|
|
139
|
-
|
|
140
|
-
const batch1 = await archivist.next?.({ limit: 2 })
|
|
141
|
-
expect(batch1).toBeArrayOfSize(2)
|
|
142
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
143
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(insertedPayloads1[0]))
|
|
144
|
-
|
|
145
|
-
const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[0]._sequence })
|
|
146
|
-
expect(batch2).toBeArrayOfSize(2)
|
|
147
|
-
expect(await PayloadBuilder.dataHash(batch2?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
148
|
-
expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
|
|
149
|
-
|
|
150
|
-
// desc
|
|
151
|
-
const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
|
|
152
|
-
expect(batch1Desc).toBeArrayOfSize(2)
|
|
153
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
154
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
|
|
155
|
-
|
|
156
|
-
const batch2Desc = await archivist.next?.({
|
|
157
|
-
limit: 2, cursor: batch1Desc[1]._sequence, order: 'desc',
|
|
158
|
-
})
|
|
159
|
-
expect(batch2Desc).toBeArrayOfSize(2)
|
|
160
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
161
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
162
|
-
})
|
|
163
|
-
})
|