@fireproof/vendor 2.0.1 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -59
- package/src/p-limit/index.d.ts +81 -0
- package/src/p-limit/index.js +104 -0
- package/src/p-limit/license +9 -0
- package/src/p-limit/readme.md +129 -0
- package/src/@web3-storage/pail/LICENSE.md +0 -232
- package/src/@web3-storage/pail/README.md +0 -84
- package/src/@web3-storage/pail/cli.js +0 -218
- package/src/@web3-storage/pail/dist/src/api.d.ts +0 -59
- package/src/@web3-storage/pail/dist/src/api.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/batch/api.d.ts +0 -31
- package/src/@web3-storage/pail/dist/src/batch/api.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/batch/index.d.ts +0 -23
- package/src/@web3-storage/pail/dist/src/batch/index.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/batch/shard.d.ts +0 -3
- package/src/@web3-storage/pail/dist/src/batch/shard.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/block.d.ts +0 -35
- package/src/@web3-storage/pail/dist/src/block.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/clock/api.d.ts +0 -10
- package/src/@web3-storage/pail/dist/src/clock/api.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/clock/index.d.ts +0 -48
- package/src/@web3-storage/pail/dist/src/clock/index.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/crdt/api.d.ts +0 -26
- package/src/@web3-storage/pail/dist/src/crdt/api.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/crdt/batch/api.d.ts +0 -11
- package/src/@web3-storage/pail/dist/src/crdt/batch/api.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/crdt/batch/index.d.ts +0 -5
- package/src/@web3-storage/pail/dist/src/crdt/batch/index.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/crdt/index.d.ts +0 -9
- package/src/@web3-storage/pail/dist/src/crdt/index.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/diff.d.ts +0 -13
- package/src/@web3-storage/pail/dist/src/diff.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/index.d.ts +0 -10
- package/src/@web3-storage/pail/dist/src/index.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/merge.d.ts +0 -5
- package/src/@web3-storage/pail/dist/src/merge.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/src/shard.d.ts +0 -43
- package/src/@web3-storage/pail/dist/src/shard.d.ts.map +0 -1
- package/src/@web3-storage/pail/dist/tsconfig.tsbuildinfo +0 -1
- package/src/@web3-storage/pail/src/api.js +0 -1
- package/src/@web3-storage/pail/src/api.ts +0 -90
- package/src/@web3-storage/pail/src/batch/api.js +0 -1
- package/src/@web3-storage/pail/src/batch/api.ts +0 -59
- package/src/@web3-storage/pail/src/batch/index.js +0 -258
- package/src/@web3-storage/pail/src/batch/shard.js +0 -13
- package/src/@web3-storage/pail/src/block.js +0 -75
- package/src/@web3-storage/pail/src/clock/api.js +0 -1
- package/src/@web3-storage/pail/src/clock/api.ts +0 -12
- package/src/@web3-storage/pail/src/clock/index.js +0 -182
- package/src/@web3-storage/pail/src/crdt/api.js +0 -1
- package/src/@web3-storage/pail/src/crdt/api.ts +0 -33
- package/src/@web3-storage/pail/src/crdt/batch/api.js +0 -1
- package/src/@web3-storage/pail/src/crdt/batch/api.ts +0 -30
- package/src/@web3-storage/pail/src/crdt/batch/index.js +0 -155
- package/src/@web3-storage/pail/src/crdt/index.js +0 -354
- package/src/@web3-storage/pail/src/diff.js +0 -151
- package/src/@web3-storage/pail/src/index.js +0 -406
- package/src/@web3-storage/pail/src/merge.js +0 -43
- package/src/@web3-storage/pail/src/shard.js +0 -180
|
@@ -1,354 +0,0 @@
|
|
|
1
|
-
// eslint-disable-next-line no-unused-vars
|
|
2
|
-
import * as API from './api.js'
|
|
3
|
-
import * as Clock from '../clock/index.js'
|
|
4
|
-
import { EventFetcher, EventBlock } from '../clock/index.js'
|
|
5
|
-
import * as Pail from '../index.js'
|
|
6
|
-
import { ShardBlock } from '../shard.js'
|
|
7
|
-
import { MemoryBlockstore, MultiBlockFetcher } from '../block.js'
|
|
8
|
-
import * as Batch from '../batch/index.js'
|
|
9
|
-
|
|
10
|
-
/**
|
|
11
|
-
* Put a value (a CID) for the given key. If the key exists it's value is
|
|
12
|
-
* overwritten.
|
|
13
|
-
*
|
|
14
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
15
|
-
* @param {API.EventLink<API.Operation>[]} head Merkle clock head.
|
|
16
|
-
* @param {string} key The key of the value to put.
|
|
17
|
-
* @param {API.UnknownLink} value The value to put.
|
|
18
|
-
* @returns {Promise<API.Result>}
|
|
19
|
-
*/
|
|
20
|
-
export const put = async (blocks, head, key, value) => {
|
|
21
|
-
const mblocks = new MemoryBlockstore()
|
|
22
|
-
blocks = new MultiBlockFetcher(mblocks, blocks)
|
|
23
|
-
|
|
24
|
-
if (!head.length) {
|
|
25
|
-
const shard = await ShardBlock.create()
|
|
26
|
-
mblocks.putSync(shard.cid, shard.bytes)
|
|
27
|
-
const result = await Pail.put(blocks, shard.cid, key, value)
|
|
28
|
-
/** @type {API.Operation} */
|
|
29
|
-
const data = { type: 'put', root: result.root, key, value }
|
|
30
|
-
const event = await EventBlock.create(data, head)
|
|
31
|
-
head = await Clock.advance(blocks, head, event.cid)
|
|
32
|
-
return {
|
|
33
|
-
root: result.root,
|
|
34
|
-
additions: [shard, ...result.additions],
|
|
35
|
-
removals: result.removals,
|
|
36
|
-
head,
|
|
37
|
-
event
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
/** @type {EventFetcher<API.Operation>} */
|
|
42
|
-
const events = new EventFetcher(blocks)
|
|
43
|
-
const ancestor = await findCommonAncestor(events, head)
|
|
44
|
-
if (!ancestor) throw new Error('failed to find common ancestor event')
|
|
45
|
-
|
|
46
|
-
const aevent = await events.get(ancestor)
|
|
47
|
-
let { root } = aevent.value.data
|
|
48
|
-
|
|
49
|
-
const sorted = await findSortedEvents(events, head, ancestor)
|
|
50
|
-
/** @type {Map<string, API.ShardBlockView>} */
|
|
51
|
-
const additions = new Map()
|
|
52
|
-
/** @type {Map<string, API.ShardBlockView>} */
|
|
53
|
-
const removals = new Map()
|
|
54
|
-
|
|
55
|
-
for (const { value: event } of sorted) {
|
|
56
|
-
let result
|
|
57
|
-
if (event.data.type === 'put') {
|
|
58
|
-
result = await Pail.put(blocks, root, event.data.key, event.data.value)
|
|
59
|
-
} else if (event.data.type === 'del') {
|
|
60
|
-
result = await Pail.del(blocks, root, event.data.key)
|
|
61
|
-
} else if (event.data.type === 'batch') {
|
|
62
|
-
const batch = await Batch.create(blocks, root)
|
|
63
|
-
for (const op of event.data.ops) {
|
|
64
|
-
if (op.type !== 'put') throw new Error(`unsupported batch operation: ${op.type}`)
|
|
65
|
-
await batch.put(op.key, op.value)
|
|
66
|
-
}
|
|
67
|
-
result = await batch.commit()
|
|
68
|
-
} else {
|
|
69
|
-
// @ts-expect-error type does not exist on never
|
|
70
|
-
throw new Error(`unknown operation: ${event.data.type}`)
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
root = result.root
|
|
74
|
-
for (const a of result.additions) {
|
|
75
|
-
mblocks.putSync(a.cid, a.bytes)
|
|
76
|
-
additions.set(a.cid.toString(), a)
|
|
77
|
-
}
|
|
78
|
-
for (const r of result.removals) {
|
|
79
|
-
removals.set(r.cid.toString(), r)
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const result = await Pail.put(blocks, root, key, value)
|
|
84
|
-
// if we didn't change the pail we're done
|
|
85
|
-
if (result.root.toString() === root.toString()) {
|
|
86
|
-
return { root, additions: [], removals: [], head }
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
for (const a of result.additions) {
|
|
90
|
-
mblocks.putSync(a.cid, a.bytes)
|
|
91
|
-
additions.set(a.cid.toString(), a)
|
|
92
|
-
}
|
|
93
|
-
for (const r of result.removals) {
|
|
94
|
-
removals.set(r.cid.toString(), r)
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
/** @type {API.Operation} */
|
|
98
|
-
const data = { type: 'put', root: result.root, key, value }
|
|
99
|
-
const event = await EventBlock.create(data, head)
|
|
100
|
-
mblocks.putSync(event.cid, event.bytes)
|
|
101
|
-
head = await Clock.advance(blocks, head, event.cid)
|
|
102
|
-
|
|
103
|
-
// filter blocks that were added _and_ removed
|
|
104
|
-
for (const k of removals.keys()) {
|
|
105
|
-
if (additions.has(k)) {
|
|
106
|
-
additions.delete(k)
|
|
107
|
-
removals.delete(k)
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
return {
|
|
112
|
-
root: result.root,
|
|
113
|
-
additions: [...additions.values()],
|
|
114
|
-
removals: [...removals.values()],
|
|
115
|
-
head,
|
|
116
|
-
event
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
/**
|
|
121
|
-
* Delete the value for the given key from the bucket. If the key is not found
|
|
122
|
-
* no operation occurs.
|
|
123
|
-
*
|
|
124
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
125
|
-
* @param {API.EventLink<API.Operation>[]} head Merkle clock head.
|
|
126
|
-
* @param {string} key The key of the value to delete.
|
|
127
|
-
* @param {object} [options]
|
|
128
|
-
* @returns {Promise<API.Result>}
|
|
129
|
-
*/
|
|
130
|
-
export const del = async (blocks, head, key, options) => {
|
|
131
|
-
throw new Error('not implemented')
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
/**
|
|
135
|
-
* Determine the effective pail root given the current merkle clock head.
|
|
136
|
-
*
|
|
137
|
-
* Clocks with multiple head events may return blocks that were added or
|
|
138
|
-
* removed while playing forward events from their common ancestor.
|
|
139
|
-
*
|
|
140
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
141
|
-
* @param {API.EventLink<API.Operation>[]} head Merkle clock head.
|
|
142
|
-
* @returns {Promise<{ root: API.ShardLink } & API.ShardDiff>}
|
|
143
|
-
*/
|
|
144
|
-
export const root = async (blocks, head) => {
|
|
145
|
-
if (!head.length) throw new Error('cannot determine root of headless clock')
|
|
146
|
-
|
|
147
|
-
const mblocks = new MemoryBlockstore()
|
|
148
|
-
blocks = new MultiBlockFetcher(mblocks, blocks)
|
|
149
|
-
|
|
150
|
-
/** @type {EventFetcher<API.Operation>} */
|
|
151
|
-
const events = new EventFetcher(blocks)
|
|
152
|
-
|
|
153
|
-
if (head.length === 1) {
|
|
154
|
-
const event = await events.get(head[0])
|
|
155
|
-
const { root } = event.value.data
|
|
156
|
-
return { root, additions: [], removals: [] }
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
const ancestor = await findCommonAncestor(events, head)
|
|
160
|
-
if (!ancestor) throw new Error('failed to find common ancestor event')
|
|
161
|
-
|
|
162
|
-
const aevent = await events.get(ancestor)
|
|
163
|
-
let { root } = aevent.value.data
|
|
164
|
-
|
|
165
|
-
const sorted = await findSortedEvents(events, head, ancestor)
|
|
166
|
-
/** @type {Map<string, API.ShardBlockView>} */
|
|
167
|
-
const additions = new Map()
|
|
168
|
-
/** @type {Map<string, API.ShardBlockView>} */
|
|
169
|
-
const removals = new Map()
|
|
170
|
-
|
|
171
|
-
for (const { value: event } of sorted) {
|
|
172
|
-
let result
|
|
173
|
-
if (event.data.type === 'put') {
|
|
174
|
-
result = await Pail.put(blocks, root, event.data.key, event.data.value)
|
|
175
|
-
} else if (event.data.type === 'del') {
|
|
176
|
-
result = await Pail.del(blocks, root, event.data.key)
|
|
177
|
-
} else if (event.data.type === 'batch') {
|
|
178
|
-
const batch = await Batch.create(blocks, root)
|
|
179
|
-
for (const op of event.data.ops) {
|
|
180
|
-
if (op.type !== 'put') throw new Error(`unsupported batch operation: ${op.type}`)
|
|
181
|
-
await batch.put(op.key, op.value)
|
|
182
|
-
}
|
|
183
|
-
result = await batch.commit()
|
|
184
|
-
} else {
|
|
185
|
-
// @ts-expect-error type does not exist on never
|
|
186
|
-
throw new Error(`unknown operation: ${event.data.type}`)
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
root = result.root
|
|
190
|
-
for (const a of result.additions) {
|
|
191
|
-
mblocks.putSync(a.cid, a.bytes)
|
|
192
|
-
additions.set(a.cid.toString(), a)
|
|
193
|
-
}
|
|
194
|
-
for (const r of result.removals) {
|
|
195
|
-
removals.set(r.cid.toString(), r)
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
|
|
199
|
-
// filter blocks that were added _and_ removed
|
|
200
|
-
for (const k of removals.keys()) {
|
|
201
|
-
if (additions.has(k)) {
|
|
202
|
-
additions.delete(k)
|
|
203
|
-
removals.delete(k)
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
|
|
207
|
-
return {
|
|
208
|
-
root,
|
|
209
|
-
additions: [...additions.values()],
|
|
210
|
-
removals: [...removals.values()]
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
/**
|
|
215
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
216
|
-
* @param {API.EventLink<API.Operation>[]} head Merkle clock head.
|
|
217
|
-
* @param {string} key The key of the value to retrieve.
|
|
218
|
-
*/
|
|
219
|
-
export const get = async (blocks, head, key) => {
|
|
220
|
-
if (!head.length) return
|
|
221
|
-
const result = await root(blocks, head)
|
|
222
|
-
if (result.additions.length) {
|
|
223
|
-
blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks)
|
|
224
|
-
}
|
|
225
|
-
return Pail.get(blocks, result.root, key)
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
/**
|
|
229
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
230
|
-
* @param {API.EventLink<API.Operation>[]} head Merkle clock head.
|
|
231
|
-
* @param {API.EntriesOptions} [options]
|
|
232
|
-
*/
|
|
233
|
-
export const entries = async function * (blocks, head, options) {
|
|
234
|
-
if (!head.length) return
|
|
235
|
-
const result = await root(blocks, head)
|
|
236
|
-
if (result.additions.length) {
|
|
237
|
-
blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks)
|
|
238
|
-
}
|
|
239
|
-
yield * Pail.entries(blocks, result.root, options)
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
/**
|
|
243
|
-
* Find the common ancestor event of the passed children. A common ancestor is
|
|
244
|
-
* the first single event in the DAG that _all_ paths from children lead to.
|
|
245
|
-
*
|
|
246
|
-
* @param {EventFetcher<API.Operation>} events
|
|
247
|
-
* @param {API.EventLink<API.Operation>[]} children
|
|
248
|
-
*/
|
|
249
|
-
const findCommonAncestor = async (events, children) => {
|
|
250
|
-
if (!children.length) return
|
|
251
|
-
const candidates = children.map(c => [c])
|
|
252
|
-
while (true) {
|
|
253
|
-
let changed = false
|
|
254
|
-
for (const c of candidates) {
|
|
255
|
-
const candidate = await findAncestorCandidate(events, c[c.length - 1])
|
|
256
|
-
if (!candidate) continue
|
|
257
|
-
changed = true
|
|
258
|
-
c.push(candidate)
|
|
259
|
-
const ancestor = findCommonString(candidates)
|
|
260
|
-
if (ancestor) return ancestor
|
|
261
|
-
}
|
|
262
|
-
if (!changed) return
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
/**
|
|
267
|
-
* @param {EventFetcher<API.Operation>} events
|
|
268
|
-
* @param {API.EventLink<API.Operation>} root
|
|
269
|
-
*/
|
|
270
|
-
const findAncestorCandidate = async (events, root) => {
|
|
271
|
-
const { value: event } = await events.get(root)
|
|
272
|
-
if (!event.parents.length) return root
|
|
273
|
-
return event.parents.length === 1
|
|
274
|
-
? event.parents[0]
|
|
275
|
-
: findCommonAncestor(events, event.parents)
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
/**
|
|
279
|
-
* @template {{ toString: () => string }} T
|
|
280
|
-
* @param {Array<T[]>} arrays
|
|
281
|
-
*/
|
|
282
|
-
const findCommonString = (arrays) => {
|
|
283
|
-
arrays = arrays.map(a => [...a])
|
|
284
|
-
for (const arr of arrays) {
|
|
285
|
-
for (const item of arr) {
|
|
286
|
-
let matched = true
|
|
287
|
-
for (const other of arrays) {
|
|
288
|
-
if (arr === other) continue
|
|
289
|
-
matched = other.some(i => String(i) === String(item))
|
|
290
|
-
if (!matched) break
|
|
291
|
-
}
|
|
292
|
-
if (matched) return item
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
/**
|
|
298
|
-
* Find and sort events between the head(s) and the tail.
|
|
299
|
-
* @param {EventFetcher<API.Operation>} events
|
|
300
|
-
* @param {API.EventLink<API.Operation>[]} head
|
|
301
|
-
* @param {API.EventLink<API.Operation>} tail
|
|
302
|
-
*/
|
|
303
|
-
const findSortedEvents = async (events, head, tail) => {
|
|
304
|
-
if (head.length === 1 && head[0].toString() === tail.toString()) {
|
|
305
|
-
return []
|
|
306
|
-
}
|
|
307
|
-
|
|
308
|
-
// get weighted events - heavier events happened first
|
|
309
|
-
/** @type {Map<string, { event: API.EventBlockView<API.Operation>, weight: number }>} */
|
|
310
|
-
const weights = new Map()
|
|
311
|
-
const all = await Promise.all(head.map(h => findEvents(events, h, tail)))
|
|
312
|
-
for (const arr of all) {
|
|
313
|
-
for (const { event, depth } of arr) {
|
|
314
|
-
const info = weights.get(event.cid.toString())
|
|
315
|
-
if (info) {
|
|
316
|
-
info.weight += depth
|
|
317
|
-
} else {
|
|
318
|
-
weights.set(event.cid.toString(), { event, weight: depth })
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
|
|
323
|
-
// group events into buckets by weight
|
|
324
|
-
/** @type {Map<number, API.EventBlockView<API.Operation>[]>} */
|
|
325
|
-
const buckets = new Map()
|
|
326
|
-
for (const { event, weight } of weights.values()) {
|
|
327
|
-
const bucket = buckets.get(weight)
|
|
328
|
-
if (bucket) {
|
|
329
|
-
bucket.push(event)
|
|
330
|
-
} else {
|
|
331
|
-
buckets.set(weight, [event])
|
|
332
|
-
}
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
// sort by weight, and by CID within weight
|
|
336
|
-
return Array.from(buckets)
|
|
337
|
-
.sort((a, b) => b[0] - a[0])
|
|
338
|
-
.flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1))
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
/**
|
|
342
|
-
* @param {EventFetcher<API.Operation>} events
|
|
343
|
-
* @param {API.EventLink<API.Operation>} start
|
|
344
|
-
* @param {API.EventLink<API.Operation>} end
|
|
345
|
-
* @returns {Promise<Array<{ event: API.EventBlockView<API.Operation>, depth: number }>>}
|
|
346
|
-
*/
|
|
347
|
-
const findEvents = async (events, start, end, depth = 0) => {
|
|
348
|
-
const event = await events.get(start)
|
|
349
|
-
const acc = [{ event, depth }]
|
|
350
|
-
const { parents } = event.value
|
|
351
|
-
if (parents.length === 1 && String(parents[0]) === String(end)) return acc
|
|
352
|
-
const rest = await Promise.all(parents.map(p => findEvents(events, p, end, depth + 1)))
|
|
353
|
-
return acc.concat(...rest)
|
|
354
|
-
}
|
|
@@ -1,151 +0,0 @@
|
|
|
1
|
-
// eslint-disable-next-line no-unused-vars
|
|
2
|
-
import * as API from './api.js'
|
|
3
|
-
import { ShardFetcher } from './shard.js'
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* @typedef {string} K
|
|
7
|
-
* @typedef {[before: null, after: API.UnknownLink]} AddV
|
|
8
|
-
* @typedef {[before: API.UnknownLink, after: API.UnknownLink]} UpdateV
|
|
9
|
-
* @typedef {[before: API.UnknownLink, after: null]} DeleteV
|
|
10
|
-
* @typedef {[key: K, value: AddV|UpdateV|DeleteV]} KV
|
|
11
|
-
* @typedef {KV[]} KeysDiff
|
|
12
|
-
* @typedef {{ keys: KeysDiff, shards: API.ShardDiff }} CombinedDiff
|
|
13
|
-
*/
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* @param {API.BlockFetcher} blocks Bucket block storage.
|
|
17
|
-
* @param {API.ShardLink} a Base DAG.
|
|
18
|
-
* @param {API.ShardLink} b Comparison DAG.
|
|
19
|
-
* @returns {Promise<CombinedDiff>}
|
|
20
|
-
*/
|
|
21
|
-
export const difference = async (blocks, a, b) => {
|
|
22
|
-
if (isEqual(a, b)) return { keys: [], shards: { additions: [], removals: [] } }
|
|
23
|
-
|
|
24
|
-
const shards = new ShardFetcher(blocks)
|
|
25
|
-
const [ashard, bshard] = await Promise.all([shards.get(a), shards.get(b)])
|
|
26
|
-
|
|
27
|
-
const aents = new Map(ashard.value.entries)
|
|
28
|
-
const bents = new Map(bshard.value.entries)
|
|
29
|
-
|
|
30
|
-
const keys = /** @type {Map<K, AddV|UpdateV|DeleteV>} */(new Map())
|
|
31
|
-
const additions = new Map([[bshard.cid.toString(), bshard]])
|
|
32
|
-
const removals = new Map([[ashard.cid.toString(), ashard]])
|
|
33
|
-
|
|
34
|
-
// find shards removed in B
|
|
35
|
-
for (const [akey, aval] of ashard.value.entries) {
|
|
36
|
-
const bval = bents.get(akey)
|
|
37
|
-
if (bval) continue
|
|
38
|
-
if (!Array.isArray(aval)) {
|
|
39
|
-
keys.set(`${ashard.value.prefix}${akey}`, [aval, null])
|
|
40
|
-
continue
|
|
41
|
-
}
|
|
42
|
-
// if shard link _with_ value
|
|
43
|
-
if (aval[1] != null) {
|
|
44
|
-
keys.set(`${ashard.value.prefix}${akey}`, [aval[1], null])
|
|
45
|
-
}
|
|
46
|
-
for await (const s of collect(shards, aval[0])) {
|
|
47
|
-
for (const [k, v] of s.value.entries) {
|
|
48
|
-
if (!Array.isArray(v)) {
|
|
49
|
-
keys.set(`${s.value.prefix}${k}`, [v, null])
|
|
50
|
-
} else if (v[1] != null) {
|
|
51
|
-
keys.set(`${s.value.prefix}${k}`, [v[1], null])
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
removals.set(s.cid.toString(), s)
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
// find shards added or updated in B
|
|
59
|
-
for (const [bkey, bval] of bshard.value.entries) {
|
|
60
|
-
const aval = aents.get(bkey)
|
|
61
|
-
if (!Array.isArray(bval)) {
|
|
62
|
-
if (!aval) {
|
|
63
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [null, bval])
|
|
64
|
-
} else if (Array.isArray(aval)) {
|
|
65
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [aval[1] ?? null, bval])
|
|
66
|
-
} else if (!isEqual(aval, bval)) {
|
|
67
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [aval, bval])
|
|
68
|
-
}
|
|
69
|
-
continue
|
|
70
|
-
}
|
|
71
|
-
if (aval && Array.isArray(aval)) { // updated in B
|
|
72
|
-
if (isEqual(aval[0], bval[0])) {
|
|
73
|
-
if (bval[1] != null && (aval[1] == null || !isEqual(aval[1], bval[1]))) {
|
|
74
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [aval[1] ?? null, bval[1]])
|
|
75
|
-
}
|
|
76
|
-
continue // updated value?
|
|
77
|
-
}
|
|
78
|
-
const res = await difference(blocks, aval[0], bval[0])
|
|
79
|
-
for (const shard of res.shards.additions) {
|
|
80
|
-
additions.set(shard.cid.toString(), shard)
|
|
81
|
-
}
|
|
82
|
-
for (const shard of res.shards.removals) {
|
|
83
|
-
removals.set(shard.cid.toString(), shard)
|
|
84
|
-
}
|
|
85
|
-
for (const [k, v] of res.keys) {
|
|
86
|
-
keys.set(k, v)
|
|
87
|
-
}
|
|
88
|
-
} else if (aval) { // updated in B value => link+value
|
|
89
|
-
if (bval[1] == null) {
|
|
90
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [aval, null])
|
|
91
|
-
} else if (!isEqual(aval, bval[1])) {
|
|
92
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [aval, bval[1]])
|
|
93
|
-
}
|
|
94
|
-
for await (const s of collect(shards, bval[0])) {
|
|
95
|
-
for (const [k, v] of s.value.entries) {
|
|
96
|
-
if (!Array.isArray(v)) {
|
|
97
|
-
keys.set(`${s.value.prefix}${k}`, [null, v])
|
|
98
|
-
} else if (v[1] != null) {
|
|
99
|
-
keys.set(`${s.value.prefix}${k}`, [null, v[1]])
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
additions.set(s.cid.toString(), s)
|
|
103
|
-
}
|
|
104
|
-
} else { // added in B
|
|
105
|
-
keys.set(`${bshard.value.prefix}${bkey}`, [null, bval[0]])
|
|
106
|
-
for await (const s of collect(shards, bval[0])) {
|
|
107
|
-
for (const [k, v] of s.value.entries) {
|
|
108
|
-
if (!Array.isArray(v)) {
|
|
109
|
-
keys.set(`${s.value.prefix}${k}`, [null, v])
|
|
110
|
-
} else if (v[1] != null) {
|
|
111
|
-
keys.set(`${s.value.prefix}${k}`, [null, v[1]])
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
additions.set(s.cid.toString(), s)
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
// filter blocks that were added _and_ removed from B
|
|
120
|
-
for (const k of removals.keys()) {
|
|
121
|
-
if (additions.has(k)) {
|
|
122
|
-
additions.delete(k)
|
|
123
|
-
removals.delete(k)
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
return {
|
|
128
|
-
keys: [...keys.entries()].sort((a, b) => a[0] < b[0] ? -1 : 1),
|
|
129
|
-
shards: { additions: [...additions.values()], removals: [...removals.values()] }
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
/**
|
|
134
|
-
* @param {API.UnknownLink} a
|
|
135
|
-
* @param {API.UnknownLink} b
|
|
136
|
-
*/
|
|
137
|
-
const isEqual = (a, b) => a.toString() === b.toString()
|
|
138
|
-
|
|
139
|
-
/**
|
|
140
|
-
* @param {import('./shard.js').ShardFetcher} shards
|
|
141
|
-
* @param {API.ShardLink} root
|
|
142
|
-
* @returns {AsyncIterableIterator<API.ShardBlockView>}
|
|
143
|
-
*/
|
|
144
|
-
async function * collect (shards, root) {
|
|
145
|
-
const shard = await shards.get(root)
|
|
146
|
-
yield shard
|
|
147
|
-
for (const [, v] of shard.value.entries) {
|
|
148
|
-
if (!Array.isArray(v)) continue
|
|
149
|
-
yield * collect(shards, v[0])
|
|
150
|
-
}
|
|
151
|
-
}
|