@xyo-network/archivist-memory 5.1.21 → 5.1.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +28 -25
- package/dist/neutral/spec/Archivist.full.spec.d.ts +0 -2
- package/dist/neutral/spec/Archivist.full.spec.d.ts.map +0 -1
- package/dist/neutral/spec/MemoryArchivist.spec.d.ts +0 -2
- package/dist/neutral/spec/MemoryArchivist.spec.d.ts.map +0 -1
- package/src/spec/Archivist.full.spec.ts +0 -346
- package/src/spec/MemoryArchivist.spec.ts +0 -137
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@xyo-network/archivist-memory",
|
|
3
|
-
"version": "5.1.
|
|
3
|
+
"version": "5.1.23",
|
|
4
4
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
5
5
|
"homepage": "https://xyo.network",
|
|
6
6
|
"bugs": {
|
|
@@ -30,38 +30,41 @@
|
|
|
30
30
|
"types": "dist/neutral/index.d.ts",
|
|
31
31
|
"files": [
|
|
32
32
|
"dist",
|
|
33
|
-
"src"
|
|
33
|
+
"src",
|
|
34
|
+
"!**/*.bench.*",
|
|
35
|
+
"!**/*.spec.*",
|
|
36
|
+
"!**/*.test.*"
|
|
34
37
|
],
|
|
35
38
|
"dependencies": {
|
|
36
|
-
"@xylabs/assert": "~5.0.
|
|
37
|
-
"@xylabs/creatable": "~5.0.
|
|
38
|
-
"@xylabs/exists": "~5.0.
|
|
39
|
-
"@xylabs/object": "~5.0.
|
|
40
|
-
"@xylabs/typeof": "~5.0.
|
|
41
|
-
"@xyo-network/archivist-abstract": "~5.1.
|
|
42
|
-
"@xyo-network/archivist-generic": "~5.1.
|
|
43
|
-
"@xyo-network/archivist-model": "~5.1.
|
|
44
|
-
"@xyo-network/module-model": "~5.1.
|
|
45
|
-
"@xyo-network/payload-builder": "~5.1.
|
|
46
|
-
"@xyo-network/payload-model": "~5.1.
|
|
39
|
+
"@xylabs/assert": "~5.0.33",
|
|
40
|
+
"@xylabs/creatable": "~5.0.33",
|
|
41
|
+
"@xylabs/exists": "~5.0.33",
|
|
42
|
+
"@xylabs/object": "~5.0.33",
|
|
43
|
+
"@xylabs/typeof": "~5.0.33",
|
|
44
|
+
"@xyo-network/archivist-abstract": "~5.1.23",
|
|
45
|
+
"@xyo-network/archivist-generic": "~5.1.23",
|
|
46
|
+
"@xyo-network/archivist-model": "~5.1.23",
|
|
47
|
+
"@xyo-network/module-model": "~5.1.23",
|
|
48
|
+
"@xyo-network/payload-builder": "~5.1.23",
|
|
49
|
+
"@xyo-network/payload-model": "~5.1.23",
|
|
47
50
|
"lru-cache": "~11.2.2"
|
|
48
51
|
},
|
|
49
52
|
"devDependencies": {
|
|
50
|
-
"@xylabs/delay": "~5.0.
|
|
51
|
-
"@xylabs/hex": "~5.0.
|
|
52
|
-
"@xylabs/promise": "~5.0.
|
|
53
|
+
"@xylabs/delay": "~5.0.33",
|
|
54
|
+
"@xylabs/hex": "~5.0.33",
|
|
55
|
+
"@xylabs/promise": "~5.0.33",
|
|
53
56
|
"@xylabs/ts-scripts-yarn3": "~7.2.8",
|
|
54
57
|
"@xylabs/tsconfig": "~7.2.8",
|
|
55
|
-
"@xylabs/vitest-extended": "~5.0.
|
|
56
|
-
"@xyo-network/account": "~5.1.
|
|
57
|
-
"@xyo-network/account-model": "~5.1.
|
|
58
|
-
"@xyo-network/archivist-acceptance-tests": "~5.1.
|
|
59
|
-
"@xyo-network/boundwitness-model": "~5.1.
|
|
60
|
-
"@xyo-network/id-payload-plugin": "~5.1.
|
|
61
|
-
"@xyo-network/payload-wrapper": "~5.1.
|
|
62
|
-
"@xyo-network/wallet": "~5.1.
|
|
58
|
+
"@xylabs/vitest-extended": "~5.0.33",
|
|
59
|
+
"@xyo-network/account": "~5.1.23",
|
|
60
|
+
"@xyo-network/account-model": "~5.1.23",
|
|
61
|
+
"@xyo-network/archivist-acceptance-tests": "~5.1.23",
|
|
62
|
+
"@xyo-network/boundwitness-model": "~5.1.23",
|
|
63
|
+
"@xyo-network/id-payload-plugin": "~5.1.23",
|
|
64
|
+
"@xyo-network/payload-wrapper": "~5.1.23",
|
|
65
|
+
"@xyo-network/wallet": "~5.1.23",
|
|
63
66
|
"typescript": "~5.9.3",
|
|
64
|
-
"vitest": "~4.0.
|
|
67
|
+
"vitest": "~4.0.9"
|
|
65
68
|
},
|
|
66
69
|
"publishConfig": {
|
|
67
70
|
"access": "public"
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"Archivist.full.spec.d.ts","sourceRoot":"","sources":["../../../src/spec/Archivist.full.spec.ts"],"names":[],"mappings":""}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"MemoryArchivist.spec.d.ts","sourceRoot":"","sources":["../../../src/spec/MemoryArchivist.spec.ts"],"names":[],"mappings":"AACA,OAAO,yBAAyB,CAAA"}
|
|
@@ -1,346 +0,0 @@
|
|
|
1
|
-
/* eslint-disable complexity */
|
|
2
|
-
/* eslint-disable max-statements */
|
|
3
|
-
|
|
4
|
-
import { delay } from '@xylabs/delay'
|
|
5
|
-
import type { Hash } from '@xylabs/hex'
|
|
6
|
-
import type { AnyObject } from '@xylabs/object'
|
|
7
|
-
import { Account } from '@xyo-network/account'
|
|
8
|
-
import type { AccountInstance } from '@xyo-network/account-model'
|
|
9
|
-
import { generateArchivistNextTests } from '@xyo-network/archivist-acceptance-tests'
|
|
10
|
-
import type { ArchivistInstance } from '@xyo-network/archivist-model'
|
|
11
|
-
import { IdSchema } from '@xyo-network/id-payload-plugin'
|
|
12
|
-
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
13
|
-
import type { Payload, WithStorageMeta } from '@xyo-network/payload-model'
|
|
14
|
-
import { PayloadWrapper } from '@xyo-network/payload-wrapper'
|
|
15
|
-
import {
|
|
16
|
-
beforeAll, describe, expect, it,
|
|
17
|
-
} from 'vitest'
|
|
18
|
-
|
|
19
|
-
import { MemoryArchivist } from '../Archivist.ts'
|
|
20
|
-
import { MemoryArchivistConfigSchema } from '../Config.ts'
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* @group module
|
|
24
|
-
* @group archivist
|
|
25
|
-
*/
|
|
26
|
-
describe('MemoryArchivist [full]', () => {
|
|
27
|
-
type TestPayload = Payload<{ salt: string; schema: string }>
|
|
28
|
-
|
|
29
|
-
const fillDb = async (db: ArchivistInstance, count: number = 10): Promise<TestPayload[]> => {
|
|
30
|
-
const sources = Array.from({ length: count }).map((_, i) => {
|
|
31
|
-
return { salt: `${i}`, schema: IdSchema }
|
|
32
|
-
})
|
|
33
|
-
await db.insert(sources)
|
|
34
|
-
return sources
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
const shuffleArray = <T>(original: Array<T>) => {
|
|
38
|
-
const shuffled = [...original]
|
|
39
|
-
for (let i = shuffled.length - 1; i > 0; i--) {
|
|
40
|
-
// Generate a random index between 0 and i
|
|
41
|
-
const j = Math.floor(Math.random() * (i + 1))
|
|
42
|
-
// Swap elements at indices i and j
|
|
43
|
-
;[shuffled[i], shuffled[j]] = [shuffled[j], shuffled[i]]
|
|
44
|
-
}
|
|
45
|
-
return shuffled
|
|
46
|
-
}
|
|
47
|
-
let account: AccountInstance
|
|
48
|
-
beforeAll(async () => {
|
|
49
|
-
account = await Account.random()
|
|
50
|
-
})
|
|
51
|
-
describe('config', () => {
|
|
52
|
-
describe('dbName', () => {
|
|
53
|
-
it('supplied via config uses config value', async () => {
|
|
54
|
-
const archivist = await MemoryArchivist.create({
|
|
55
|
-
account,
|
|
56
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
57
|
-
})
|
|
58
|
-
expect(archivist).toBeDefined()
|
|
59
|
-
})
|
|
60
|
-
})
|
|
61
|
-
})
|
|
62
|
-
describe('all', () => {
|
|
63
|
-
let sources: Payload[] = []
|
|
64
|
-
let archivistModule: ArchivistInstance
|
|
65
|
-
beforeAll(async () => {
|
|
66
|
-
archivistModule = await MemoryArchivist.create({
|
|
67
|
-
account,
|
|
68
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
69
|
-
})
|
|
70
|
-
sources = await fillDb(archivistModule)
|
|
71
|
-
})
|
|
72
|
-
it('returns all data', async () => {
|
|
73
|
-
const getResult = await archivistModule.all?.()
|
|
74
|
-
expect(getResult).toBeDefined()
|
|
75
|
-
expect(getResult?.length).toBe(sources.length)
|
|
76
|
-
expect(PayloadBuilder.omitStorageMeta(getResult)).toEqual(sources)
|
|
77
|
-
})
|
|
78
|
-
})
|
|
79
|
-
|
|
80
|
-
describe('delete', () => {
|
|
81
|
-
let sources: Payload[] = []
|
|
82
|
-
let archivistModule: ArchivistInstance
|
|
83
|
-
beforeAll(async () => {
|
|
84
|
-
archivistModule = await MemoryArchivist.create({
|
|
85
|
-
account,
|
|
86
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
87
|
-
})
|
|
88
|
-
sources = await fillDb(archivistModule)
|
|
89
|
-
})
|
|
90
|
-
it('deletes data', async () => {
|
|
91
|
-
const getResult = (await archivistModule.all?.()) ?? []
|
|
92
|
-
expect(getResult).toBeDefined()
|
|
93
|
-
expect(getResult?.length).toBe(sources.length)
|
|
94
|
-
const dataHashes = (await PayloadBuilder.dataHashes(getResult)) ?? []
|
|
95
|
-
const deleteResult = await archivistModule.delete?.(dataHashes)
|
|
96
|
-
expect(deleteResult.length).toBe(dataHashes.length)
|
|
97
|
-
expect((await archivistModule.all?.()).length).toBe(0)
|
|
98
|
-
})
|
|
99
|
-
})
|
|
100
|
-
describe('get', () => {
|
|
101
|
-
let sources: TestPayload[] = []
|
|
102
|
-
let archivistModule: ArchivistInstance
|
|
103
|
-
beforeAll(async () => {
|
|
104
|
-
archivistModule = await MemoryArchivist.create({
|
|
105
|
-
account,
|
|
106
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
107
|
-
})
|
|
108
|
-
sources = await fillDb(archivistModule)
|
|
109
|
-
})
|
|
110
|
-
it('gets existing data', async () => {
|
|
111
|
-
for (const source of sources) {
|
|
112
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
113
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
114
|
-
expect(getResult).toBeDefined()
|
|
115
|
-
expect(getResult.length).toBe(1)
|
|
116
|
-
const resultHash = await PayloadWrapper.wrap(getResult[0]).dataHash()
|
|
117
|
-
expect(resultHash).toBe(sourceHash)
|
|
118
|
-
}
|
|
119
|
-
})
|
|
120
|
-
it('returned by order of insertion', async () => {
|
|
121
|
-
const shuffled = shuffleArray(sources)
|
|
122
|
-
const sourceHashes = await Promise.all(shuffled.map(source => PayloadBuilder.dataHash(source)))
|
|
123
|
-
const getResult = (await archivistModule.get(sourceHashes)) as WithStorageMeta<TestPayload>[]
|
|
124
|
-
expect(getResult).toBeDefined()
|
|
125
|
-
expect(getResult.length).toBe(sourceHashes.length)
|
|
126
|
-
const salts = sources.map(source => source.salt)
|
|
127
|
-
const resultSalts = getResult.map(result => result?.salt)
|
|
128
|
-
expect(resultSalts).toEqual(salts)
|
|
129
|
-
})
|
|
130
|
-
it('returns nothing for non-existing hashes', async () => {
|
|
131
|
-
const hashThatDoesNotExist = '0000000000000000000000000000000000000000000000000000000000000000' as Hash
|
|
132
|
-
const getResult = await archivistModule.get([hashThatDoesNotExist])
|
|
133
|
-
expect(getResult).toBeDefined()
|
|
134
|
-
expect(getResult.length).toBe(0)
|
|
135
|
-
})
|
|
136
|
-
describe('by hash', () => {
|
|
137
|
-
let payload1: Payload<AnyObject>
|
|
138
|
-
let payload2: Payload<AnyObject>
|
|
139
|
-
let dataHash1: Hash
|
|
140
|
-
let dataHash2: Hash
|
|
141
|
-
let rootHash1: Hash
|
|
142
|
-
let rootHash2: Hash
|
|
143
|
-
beforeAll(async () => {
|
|
144
|
-
const salt = '650123f6-191e-4cc4-a813-f7a29dcbfb0e'
|
|
145
|
-
payload1 = {
|
|
146
|
-
$some: [
|
|
147
|
-
'12bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
|
|
148
|
-
],
|
|
149
|
-
salt,
|
|
150
|
-
schema: IdSchema,
|
|
151
|
-
}
|
|
152
|
-
payload2 = {
|
|
153
|
-
$some: [
|
|
154
|
-
'22bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
|
|
155
|
-
],
|
|
156
|
-
salt,
|
|
157
|
-
schema: IdSchema,
|
|
158
|
-
}
|
|
159
|
-
dataHash1 = await PayloadBuilder.dataHash(payload1)
|
|
160
|
-
dataHash2 = await PayloadBuilder.dataHash(payload2)
|
|
161
|
-
rootHash1 = await PayloadBuilder.hash(payload1)
|
|
162
|
-
rootHash2 = await PayloadBuilder.hash(payload2)
|
|
163
|
-
expect(dataHash1).toBe(dataHash2)
|
|
164
|
-
expect(rootHash1).not.toBe(rootHash2)
|
|
165
|
-
await archivistModule.insert([payload1])
|
|
166
|
-
await archivistModule.insert([payload2])
|
|
167
|
-
})
|
|
168
|
-
describe('data hash', () => {
|
|
169
|
-
it('returns value using hash', async () => {
|
|
170
|
-
const result = await archivistModule.get([dataHash1])
|
|
171
|
-
expect(result).toBeDefined()
|
|
172
|
-
expect(result.length).toBe(1)
|
|
173
|
-
})
|
|
174
|
-
it('deduplicates multiple hashes', async () => {
|
|
175
|
-
const result = await archivistModule.get([dataHash1, dataHash2])
|
|
176
|
-
expect(result).toBeDefined()
|
|
177
|
-
expect(result.length).toBe(1)
|
|
178
|
-
})
|
|
179
|
-
it('returns the first occurrence of the hash', async () => {
|
|
180
|
-
// Same data hash contained by multiple root hashes
|
|
181
|
-
const result = await archivistModule.get([dataHash2])
|
|
182
|
-
expect(result).toBeDefined()
|
|
183
|
-
expect(result.length).toBe(1)
|
|
184
|
-
// Returns the first occurrence of the data hash
|
|
185
|
-
// expect(PayloadBuilder.omitStorageMeta(result[0])).toEqual(payload1)
|
|
186
|
-
})
|
|
187
|
-
})
|
|
188
|
-
describe('root hash', () => {
|
|
189
|
-
it('returns value using hash', async () => {
|
|
190
|
-
const result = await archivistModule.get([rootHash1])
|
|
191
|
-
expect(result).toBeDefined()
|
|
192
|
-
expect(result.length).toBe(1)
|
|
193
|
-
})
|
|
194
|
-
it('deduplicates multiple hashes', async () => {
|
|
195
|
-
const result = await archivistModule.get([rootHash1, rootHash1])
|
|
196
|
-
expect(result).toBeDefined()
|
|
197
|
-
expect(result.length).toBe(1)
|
|
198
|
-
})
|
|
199
|
-
})
|
|
200
|
-
})
|
|
201
|
-
})
|
|
202
|
-
describe('insert', () => {
|
|
203
|
-
describe('with unique data', () => {
|
|
204
|
-
let sources: Payload[] = []
|
|
205
|
-
let archivistModule: ArchivistInstance
|
|
206
|
-
beforeAll(async () => {
|
|
207
|
-
archivistModule = await MemoryArchivist.create({
|
|
208
|
-
account,
|
|
209
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
210
|
-
})
|
|
211
|
-
sources = await fillDb(archivistModule)
|
|
212
|
-
})
|
|
213
|
-
it('can round trip data using data hash', async () => {
|
|
214
|
-
await Promise.all(
|
|
215
|
-
sources.map(async (source) => {
|
|
216
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
217
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
218
|
-
expect(getResult).toBeDefined()
|
|
219
|
-
expect(getResult.length).toBe(1)
|
|
220
|
-
const [result] = getResult
|
|
221
|
-
expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
|
|
222
|
-
const resultHash = await PayloadBuilder.dataHash(result)
|
|
223
|
-
expect(resultHash).toBe(sourceHash)
|
|
224
|
-
}),
|
|
225
|
-
)
|
|
226
|
-
})
|
|
227
|
-
it('can round trip data using root hash', async () => {
|
|
228
|
-
await Promise.all(
|
|
229
|
-
sources.map(async (source) => {
|
|
230
|
-
const sourceHash = await PayloadBuilder.hash(source)
|
|
231
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
232
|
-
expect(getResult).toBeDefined()
|
|
233
|
-
expect(getResult.length).toBe(1)
|
|
234
|
-
const [result] = getResult
|
|
235
|
-
expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
|
|
236
|
-
const resultHash = await PayloadBuilder.hash(result)
|
|
237
|
-
expect(resultHash).toBe(sourceHash)
|
|
238
|
-
}),
|
|
239
|
-
)
|
|
240
|
-
})
|
|
241
|
-
})
|
|
242
|
-
describe('with duplicate data', () => {
|
|
243
|
-
let archivistModule: ArchivistInstance
|
|
244
|
-
beforeAll(async () => {
|
|
245
|
-
archivistModule = await MemoryArchivist.create({
|
|
246
|
-
account,
|
|
247
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
248
|
-
})
|
|
249
|
-
})
|
|
250
|
-
it('handles duplicate insertions', async () => {
|
|
251
|
-
// Insert same payload twice
|
|
252
|
-
const source = { salt: '2d515e1d-d82c-4545-9903-3eded7fefa7c', schema: IdSchema }
|
|
253
|
-
// First insertion should succeed and return the inserted payload
|
|
254
|
-
expect((await archivistModule.insert([source]))[0]._hash).toEqual(await PayloadBuilder.hash(source))
|
|
255
|
-
// Second insertion should succeed but return empty array since no new data was inserted
|
|
256
|
-
expect(await archivistModule.insert([source])).toEqual([])
|
|
257
|
-
// Ensure we can get the inserted payload
|
|
258
|
-
const sourceHash = await PayloadBuilder.dataHash(source)
|
|
259
|
-
const getResult = await archivistModule.get([sourceHash])
|
|
260
|
-
expect(getResult).toBeDefined()
|
|
261
|
-
expect(getResult.length).toBe(1)
|
|
262
|
-
const resultHash = await PayloadBuilder.dataHash(getResult[0])
|
|
263
|
-
expect(resultHash).toBe(sourceHash)
|
|
264
|
-
// Ensure the DB has only one instance of the payload written to it
|
|
265
|
-
const allResult = await archivistModule.all?.()
|
|
266
|
-
expect(allResult).toBeDefined()
|
|
267
|
-
expect(allResult.length).toBe(1)
|
|
268
|
-
})
|
|
269
|
-
})
|
|
270
|
-
})
|
|
271
|
-
|
|
272
|
-
describe('next', () => {
|
|
273
|
-
it('next', async () => {
|
|
274
|
-
const archivist = await MemoryArchivist.create({
|
|
275
|
-
account: 'random',
|
|
276
|
-
config: { schema: MemoryArchivistConfigSchema },
|
|
277
|
-
})
|
|
278
|
-
const account = await Account.random()
|
|
279
|
-
|
|
280
|
-
const payloads1 = [
|
|
281
|
-
{ schema: 'network.xyo.test', value: 1 },
|
|
282
|
-
]
|
|
283
|
-
|
|
284
|
-
const payloads2 = [
|
|
285
|
-
{ schema: 'network.xyo.test', value: 2 },
|
|
286
|
-
]
|
|
287
|
-
|
|
288
|
-
const payloads3 = [
|
|
289
|
-
{ schema: 'network.xyo.test', value: 3 },
|
|
290
|
-
]
|
|
291
|
-
|
|
292
|
-
const payloads4 = [
|
|
293
|
-
{ schema: 'network.xyo.test', value: 4 },
|
|
294
|
-
]
|
|
295
|
-
|
|
296
|
-
await archivist.insert(payloads1)
|
|
297
|
-
await delay(2)
|
|
298
|
-
const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
|
|
299
|
-
await delay(2)
|
|
300
|
-
await archivist.insert(payloads3)
|
|
301
|
-
await delay(2)
|
|
302
|
-
await archivist.insert(payloads4)
|
|
303
|
-
await delay(2)
|
|
304
|
-
expect(bw).toBeDefined()
|
|
305
|
-
expect(payloads).toBeDefined()
|
|
306
|
-
expect(errors).toBeDefined()
|
|
307
|
-
|
|
308
|
-
const batch1 = await archivist.next?.({ limit: 2 })
|
|
309
|
-
expect(batch1.length).toBe(2)
|
|
310
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
311
|
-
|
|
312
|
-
const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[1]._sequence })
|
|
313
|
-
expect(batch2.length).toBe(2)
|
|
314
|
-
expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
315
|
-
|
|
316
|
-
const batch3 = await archivist.next?.({ limit: 20 })
|
|
317
|
-
expect(batch3.length).toBe(4)
|
|
318
|
-
expect(await PayloadBuilder.dataHash(batch3?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
319
|
-
|
|
320
|
-
const batch4 = await archivist.next?.({ limit: 20, cursor: batch1?.[0]._sequence })
|
|
321
|
-
expect(batch4.length).toBe(3)
|
|
322
|
-
expect(PayloadBuilder.omitStorageMeta(batch4?.[0])).toEqual(payloads2[0])
|
|
323
|
-
expect(await PayloadBuilder.dataHash(batch4?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
324
|
-
|
|
325
|
-
// desc
|
|
326
|
-
const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
|
|
327
|
-
expect(batch1Desc.length).toBe(2)
|
|
328
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
329
|
-
|
|
330
|
-
const batch2Desc = await archivist.next?.({
|
|
331
|
-
limit: 2, cursor: batch1Desc?.[1]._sequence, order: 'desc',
|
|
332
|
-
})
|
|
333
|
-
expect(batch2Desc.length).toBe(2)
|
|
334
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
335
|
-
|
|
336
|
-
const batch3Desc = await archivist.next?.({
|
|
337
|
-
limit: 20, cursor: batch1Desc?.[1]._sequence, order: 'desc',
|
|
338
|
-
})
|
|
339
|
-
expect(batch3Desc.length).toBe(2)
|
|
340
|
-
expect(await PayloadBuilder.dataHash(batch3Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
341
|
-
})
|
|
342
|
-
})
|
|
343
|
-
generateArchivistNextTests(async () => {
|
|
344
|
-
return await MemoryArchivist.create({ account: 'random' })
|
|
345
|
-
})
|
|
346
|
-
})
|
|
@@ -1,137 +0,0 @@
|
|
|
1
|
-
/* eslint-disable max-statements */
|
|
2
|
-
import '@xylabs/vitest-extended'
|
|
3
|
-
|
|
4
|
-
import { delay } from '@xylabs/delay'
|
|
5
|
-
import { isArchivistInstance, isArchivistModule } from '@xyo-network/archivist-model'
|
|
6
|
-
import type { Id } from '@xyo-network/id-payload-plugin'
|
|
7
|
-
import {
|
|
8
|
-
asId,
|
|
9
|
-
IdSchema, isId,
|
|
10
|
-
} from '@xyo-network/id-payload-plugin'
|
|
11
|
-
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
12
|
-
import { HDWallet } from '@xyo-network/wallet'
|
|
13
|
-
import {
|
|
14
|
-
describe, expect, it,
|
|
15
|
-
} from 'vitest'
|
|
16
|
-
|
|
17
|
-
import { MemoryArchivist } from '../Archivist.ts'
|
|
18
|
-
|
|
19
|
-
/**
|
|
20
|
-
* @group module
|
|
21
|
-
* @group archivist
|
|
22
|
-
*/
|
|
23
|
-
describe('MemoryArchivist', () => {
|
|
24
|
-
it('should emit cleared events', async () => {
|
|
25
|
-
const archivist = await MemoryArchivist.create({ account: 'random' })
|
|
26
|
-
|
|
27
|
-
expect(isArchivistInstance(archivist)).toBe(true)
|
|
28
|
-
expect(isArchivistModule(archivist)).toBe(true)
|
|
29
|
-
|
|
30
|
-
const clearedEventEmitted = new Promise<void>((resolve) => {
|
|
31
|
-
archivist.on('cleared', () => resolve())
|
|
32
|
-
})
|
|
33
|
-
|
|
34
|
-
await archivist.clear()
|
|
35
|
-
return clearedEventEmitted
|
|
36
|
-
})
|
|
37
|
-
|
|
38
|
-
it('should return items inserted in the order they were provided in', async () => {
|
|
39
|
-
const archivist = await MemoryArchivist.create({ account: 'random' })
|
|
40
|
-
const payloads: Id[] = Array.from({ length: 100 }, (_, i) => new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: `${i}` }).build())
|
|
41
|
-
// Ensure payload was create in order provided
|
|
42
|
-
for (const [index, id] of payloads.entries()) {
|
|
43
|
-
expect(id?.salt).toBe(`${index}`)
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
const withStorageMeta = await PayloadBuilder.addStorageMeta(payloads)
|
|
47
|
-
|
|
48
|
-
// Ensure payload was returned in order provided
|
|
49
|
-
for (const [index, result] of withStorageMeta.entries()) {
|
|
50
|
-
expect(isId(result)).toBe(true)
|
|
51
|
-
const id = asId(result)
|
|
52
|
-
expect(id).toBeDefined()
|
|
53
|
-
expect(id?.salt).toBe(`${index}`)
|
|
54
|
-
expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
const results = await archivist.insert(payloads)
|
|
58
|
-
|
|
59
|
-
// Ensure payload was inserted in order provided
|
|
60
|
-
for (const [index, result] of results.entries()) {
|
|
61
|
-
expect(isId(result)).toBe(true)
|
|
62
|
-
const id = asId(result)
|
|
63
|
-
expect(id).toBeDefined()
|
|
64
|
-
if (index > 0) {
|
|
65
|
-
expect(result._sequence > results[index - 1]._sequence).toBeTrue()
|
|
66
|
-
}
|
|
67
|
-
if (index < 99) {
|
|
68
|
-
expect(result._sequence < results[index + 1]._sequence).toBeTrue()
|
|
69
|
-
}
|
|
70
|
-
if (id?.salt !== `${index}`) {
|
|
71
|
-
console.warn('result-', results[index - 1])
|
|
72
|
-
console.warn('result', result)
|
|
73
|
-
console.warn('result+', results[index + 1])
|
|
74
|
-
}
|
|
75
|
-
expect(id?.salt).toBe(`${index}`)
|
|
76
|
-
expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
|
|
77
|
-
}
|
|
78
|
-
})
|
|
79
|
-
|
|
80
|
-
it('next', async () => {
|
|
81
|
-
const archivist = await MemoryArchivist.create({ account: await HDWallet.random() })
|
|
82
|
-
const account = await HDWallet.random()
|
|
83
|
-
|
|
84
|
-
const payloads1 = [
|
|
85
|
-
{ schema: 'network.xyo.test', value: 1 },
|
|
86
|
-
]
|
|
87
|
-
|
|
88
|
-
const payloads2 = [
|
|
89
|
-
{ schema: 'network.xyo.test', value: 2 },
|
|
90
|
-
]
|
|
91
|
-
|
|
92
|
-
const payloads3 = [
|
|
93
|
-
{ schema: 'network.xyo.test', value: 3 },
|
|
94
|
-
]
|
|
95
|
-
|
|
96
|
-
const payloads4 = [
|
|
97
|
-
{ schema: 'network.xyo.test', value: 4 },
|
|
98
|
-
]
|
|
99
|
-
|
|
100
|
-
const insertedPayloads1 = await archivist.insert(payloads1)
|
|
101
|
-
expect(insertedPayloads1[0]._hash).toBe(await PayloadBuilder.hash(payloads1[0]))
|
|
102
|
-
expect(insertedPayloads1[0]._dataHash).toBe(await PayloadBuilder.dataHash(payloads1[0]))
|
|
103
|
-
expect(insertedPayloads1[0]._sequence).toBeDefined()
|
|
104
|
-
await delay(1)
|
|
105
|
-
const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
|
|
106
|
-
expect(bw).toBeDefined()
|
|
107
|
-
expect(payloads).toBeDefined()
|
|
108
|
-
expect(errors).toBeDefined()
|
|
109
|
-
await delay(1)
|
|
110
|
-
await archivist.insert(payloads3)
|
|
111
|
-
await delay(1)
|
|
112
|
-
await archivist.insert(payloads4)
|
|
113
|
-
|
|
114
|
-
const batch1 = await archivist.next?.({ limit: 2 })
|
|
115
|
-
expect(batch1).toBeArrayOfSize(2)
|
|
116
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
117
|
-
expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(insertedPayloads1[0]))
|
|
118
|
-
|
|
119
|
-
const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[0]._sequence })
|
|
120
|
-
expect(batch2).toBeArrayOfSize(2)
|
|
121
|
-
expect(await PayloadBuilder.dataHash(batch2?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
122
|
-
expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
|
|
123
|
-
|
|
124
|
-
// desc
|
|
125
|
-
const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
|
|
126
|
-
expect(batch1Desc).toBeArrayOfSize(2)
|
|
127
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
|
|
128
|
-
expect(await PayloadBuilder.dataHash(batch1Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
|
|
129
|
-
|
|
130
|
-
const batch2Desc = await archivist.next?.({
|
|
131
|
-
limit: 2, cursor: batch1Desc[1]._sequence, order: 'desc',
|
|
132
|
-
})
|
|
133
|
-
expect(batch2Desc).toBeArrayOfSize(2)
|
|
134
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
|
|
135
|
-
expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
|
|
136
|
-
})
|
|
137
|
-
})
|