@fireproof/core 0.5.7 → 0.5.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fireproof/core",
3
- "version": "0.5.7",
3
+ "version": "0.5.8",
4
4
  "description": "Cloudless database for apps, the browser, and IPFS",
5
5
  "main": "dist/src/fireproof.js",
6
6
  "module": "dist/src/fireproof.mjs",
package/src/blockstore.js CHANGED
@@ -163,7 +163,10 @@ export class TransactionBlockstore {
163
163
  await this.doCommit(innerBlockstore)
164
164
  if (doSync) {
165
165
  // const all =
166
- await Promise.all([...this.syncs].map(async sync => sync.sendUpdate(innerBlockstore)))
166
+ await Promise.all([...this.syncs].map(async sync => sync.sendUpdate(innerBlockstore).catch(e => {
167
+ console.error('sync error', e)
168
+ this.syncs.delete(sync)
169
+ })))
167
170
  }
168
171
  }
169
172
 
package/src/clock.js CHANGED
@@ -190,7 +190,11 @@ async function contains (events, a, b) {
190
190
  */
191
191
  export async function * vis (blocks, head, options = {}) {
192
192
  // @ts-ignore
193
- const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value)
193
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => {
194
+ // @ts-ignore
195
+ const { key, root, type } = b.value.data
196
+ return b.cid.toString() + '\n' + JSON.stringify({ key, root: root.cid.toString(), type }, null, 2).replace(/"/g, '\'')
197
+ })
194
198
  const events = new EventFetcher(blocks)
195
199
  yield 'digraph clock {'
196
200
  yield ' node [shape=point fontname="Courier"]; head;'
@@ -231,6 +235,7 @@ export async function findEventsToSync (blocks, head) {
231
235
  // console.time(callTag + '.contains')
232
236
  const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
233
237
  // console.timeEnd(callTag + '.contains')
238
+ // console.log('toSync.contains', toSync.length)
234
239
 
235
240
  return { cids: events, events: toSync }
236
241
  }
@@ -238,17 +243,21 @@ export async function findEventsToSync (blocks, head) {
238
243
  const asyncFilter = async (arr, predicate) =>
239
244
  Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
240
245
 
241
- export async function findCommonAncestorWithSortedEvents (events, children) {
246
+ export async function findCommonAncestorWithSortedEvents (events, children, doFull = false) {
247
+ // console.trace('findCommonAncestorWithSortedEvents')
242
248
  // const callTag = Math.random().toString(36).substring(7)
249
+ // console.log(callTag + '.children', children.map((c) => c.toString()))
243
250
  // console.time(callTag + '.findCommonAncestor')
244
251
  const ancestor = await findCommonAncestor(events, children)
245
252
  // console.timeEnd(callTag + '.findCommonAncestor')
253
+ // console.log('ancestor', ancestor.toString())
246
254
  if (!ancestor) {
247
255
  throw new Error('failed to find common ancestor event')
248
256
  }
249
257
  // console.time(callTag + '.findSortedEvents')
250
- const sorted = await findSortedEvents(events, children, ancestor)
258
+ const sorted = await findSortedEvents(events, children, ancestor, doFull)
251
259
  // console.timeEnd(callTag + '.findSortedEvents')
260
+ // console.log('sorted', sorted.length)
252
261
  return { ancestor, sorted }
253
262
  }
254
263
 
@@ -261,6 +270,7 @@ export async function findCommonAncestorWithSortedEvents (events, children) {
261
270
  */
262
271
  async function findCommonAncestor (events, children) {
263
272
  if (!children.length) return
273
+ if (children.length === 1) return children[0]
264
274
  const candidates = children.map((c) => [c])
265
275
  while (true) {
266
276
  let changed = false
@@ -281,7 +291,7 @@ async function findCommonAncestor (events, children) {
281
291
  * @param {import('./clock').EventLink<EventData>} root
282
292
  */
283
293
  async function findAncestorCandidate (events, root) {
284
- const { value: event } = await events.get(root)
294
+ const { value: event } = await events.get(root)// .catch(() => ({ value: { parents: [] } }))
285
295
  if (!event.parents.length) return root
286
296
  return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
287
297
  }
@@ -291,6 +301,7 @@ async function findAncestorCandidate (events, root) {
291
301
  * @param {Array<T[]>} arrays
292
302
  */
293
303
  function findCommonString (arrays) {
304
+ // console.log('findCommonString', arrays.map((a) => a.map((i) => String(i))))
294
305
  arrays = arrays.map((a) => [...a])
295
306
  for (const arr of arrays) {
296
307
  for (const item of arr) {
@@ -308,15 +319,33 @@ function findCommonString (arrays) {
308
319
  /**
309
320
  * Find and sort events between the head(s) and the tail.
310
321
  * @param {import('./clock').EventFetcher} events
311
- * @param {import('./clock').EventLink<EventData>[]} head
322
+ * @param {any[]} head
312
323
  * @param {import('./clock').EventLink<EventData>} tail
313
324
  */
314
- async function findSortedEvents (events, head, tail) {
325
+ async function findSortedEvents (events, head, tail, doFull) {
315
326
  // const callTag = Math.random().toString(36).substring(7)
316
327
  // get weighted events - heavier events happened first
328
+ // const callTag = Math.random().toString(36).substring(7)
329
+
317
330
  /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
318
331
  const weights = new Map()
332
+ head = [...new Set([...head.map((h) => h.toString())])]
333
+ // console.log(callTag + '.head', head.length)
334
+
335
+ const allEvents = new Set([tail.toString(), ...head])
336
+ if (!doFull && allEvents.size === 1) {
337
+ // console.log('head contains tail', tail.toString())
338
+ return []
339
+ // const event = await events.get(tail)
340
+ // return [event]
341
+ }
342
+
343
+ // console.log('finding events')
344
+ // console.log(callTag + '.head', head.length, [...head.map((h) => h.toString())], tail.toString())
345
+
346
+ // console.time(callTag + '.findEvents')
319
347
  const all = await Promise.all(head.map((h) => findEvents(events, h, tail)))
348
+ // console.timeEnd(callTag + '.findEvents')
320
349
  for (const arr of all) {
321
350
  for (const { event, depth } of arr) {
322
351
  // console.log('event value', event.value.data.value)
@@ -345,7 +374,7 @@ async function findSortedEvents (events, head, tail) {
345
374
  const sorted = Array.from(buckets)
346
375
  .sort((a, b) => b[0] - a[0])
347
376
  .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)))
348
- // console.log('sorted', sorted.map(s => s.value.data.value))
377
+ // console.log('sorted', sorted.map(s => s.cid))
349
378
 
350
379
  return sorted
351
380
  }
@@ -357,11 +386,14 @@ async function findSortedEvents (events, head, tail) {
357
386
  * @returns {Promise<Array<{ event: EventBlockView<EventData>, depth: number }>>}
358
387
  */
359
388
  async function findEvents (events, start, end, depth = 0) {
360
- // console.log('findEvents', start)
389
+ // console.log('findEvents', start.toString(), end.toString(), depth)
361
390
  const event = await events.get(start)
391
+ const send = String(end)
362
392
  const acc = [{ event, depth }]
363
393
  const { parents } = event.value
364
- if (parents.length === 1 && String(parents[0]) === String(end)) return acc
394
+ // if (parents.length === 1 && String(parents[0]) === send) return acc
395
+ if (parents.findIndex((p) => String(p) === send) !== -1) return acc
396
+ // if (parents.length === 1) return acc
365
397
  const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)))
366
398
  return acc.concat(...rest)
367
399
  }
package/src/database.js CHANGED
@@ -26,15 +26,16 @@ export const parseCID = cid => (typeof cid === 'string' ? CID.parse(cid) : cid)
26
26
  */
27
27
  export class Database {
28
28
  listeners = new Set()
29
+ indexes = new Map()
30
+ rootCache = null
31
+ eventsCache = new Map()
29
32
 
30
- // todo refactor this for the next version
31
33
  constructor (blocks, clock, config = {}) {
32
34
  this.name = config.name
33
35
  this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`
34
36
  this.blocks = blocks
35
37
  this.clock = clock
36
38
  this.config = config
37
- this.indexes = new Map()
38
39
  }
39
40
 
40
41
  /**
@@ -101,11 +102,22 @@ export class Database {
101
102
  * @instance
102
103
  */
103
104
  async changesSince (event) {
105
+ // console.log('events for', this.instanceId, event.constructor.name)
104
106
  // console.log('changesSince', this.instanceId, event, this.clock)
105
107
  let rows, dataCIDs, clockCIDs
106
108
  // if (!event) event = []
107
109
  if (event) {
108
- const resp = await eventsSince(this.blocks, this.clock, event)
110
+ event = event.map((cid) => cid.toString())
111
+ const eventKey = JSON.stringify([...event, ...this.clockToJSON()])
112
+
113
+ let resp
114
+ if (this.eventsCache.has(eventKey)) {
115
+ console.log('events from cache')
116
+ resp = this.eventsCache.get(eventKey)
117
+ } else {
118
+ resp = await eventsSince(this.blocks, this.clock, event)
119
+ this.eventsCache.set(eventKey, resp)
120
+ }
109
121
  const docsMap = new Map()
110
122
  for (const { key, type, value } of resp.result.map(decodeEvent)) {
111
123
  if (type === 'del') {
@@ -118,7 +130,9 @@ export class Database {
118
130
  clockCIDs = resp.clockCIDs
119
131
  // console.log('change rows', this.instanceId, rows)
120
132
  } else {
121
- const allResp = await getAll(this.blocks, this.clock)
133
+ const allResp = await getAll(this.blocks, this.clock, this.rootCache)
134
+ this.rootCache = { root: allResp.root, clockCIDs: allResp.clockCIDs }
135
+
122
136
  rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value }))
123
137
  dataCIDs = allResp.cids
124
138
  // console.log('dbdoc rows', this.instanceId, rows)
@@ -131,7 +145,9 @@ export class Database {
131
145
  }
132
146
 
133
147
  async allDocuments () {
134
- const allResp = await getAll(this.blocks, this.clock)
148
+ const allResp = await getAll(this.blocks, this.clock, this.rootCache)
149
+ this.rootCache = { root: allResp.root, clockCIDs: allResp.clockCIDs }
150
+
135
151
  const rows = allResp.result
136
152
  .map(({ key, value }) => decodeEvent({ key, value }))
137
153
  .map(({ key, value }) => ({ key, value: { _id: key, ...value } }))
@@ -143,7 +159,8 @@ export class Database {
143
159
  }
144
160
 
145
161
  async allCIDs () {
146
- const allResp = await getAll(this.blocks, this.clock)
162
+ const allResp = await getAll(this.blocks, this.clock, this.rootCache, true)
163
+ this.rootCache = { root: allResp.root, clockCIDs: allResp.clockCIDs }
147
164
  // console.log('allcids', allResp.cids, allResp.clockCIDs)
148
165
  const cids = await cidsToProof(allResp.cids)
149
166
  const clockCids = await cidsToProof(allResp.clockCIDs)
@@ -189,13 +206,13 @@ export class Database {
189
206
  */
190
207
  async get (key, opts = {}) {
191
208
  const clock = opts.clock || this.clock
192
- const resp = await get(this.blocks, clock, charwise.encode(key))
193
-
209
+ const resp = await get(this.blocks, clock, charwise.encode(key), this.rootCache)
210
+ this.rootCache = { root: resp.root, clockCIDs: resp.clockCIDs }
194
211
  // this tombstone is temporary until we can get the prolly tree to delete
195
212
  if (!resp || resp.result === null) {
196
213
  throw new Error('Not found')
197
214
  }
198
- const doc = resp.result
215
+ const doc = { ...resp.result }
199
216
  if (opts.mvcc === true) {
200
217
  doc._clock = this.clockToJSON()
201
218
  }
@@ -289,7 +306,9 @@ export class Database {
289
306
  }
290
307
 
291
308
  applyClock (prevClock, newClock) {
292
- // console.log('applyClock', prevClock, newClock, this.clock)
309
+ // console.log('prevClock', prevClock.length, prevClock.map((cid) => cid.toString()))
310
+ // console.log('newClock', newClock.length, newClock.map((cid) => cid.toString()))
311
+ // console.log('this.clock', this.clock.length, this.clockToJSON())
293
312
  const stPrev = prevClock.map(cid => cid.toString())
294
313
  const keptPrevClock = this.clock.filter(cid => stPrev.indexOf(cid.toString()) === -1)
295
314
  const merged = keptPrevClock.concat(newClock)
@@ -297,8 +316,10 @@ export class Database {
297
316
  for (const cid of merged) {
298
317
  uniquebyCid.set(cid.toString(), cid)
299
318
  }
300
- this.clock = Array.from(uniquebyCid.values())
301
- // console.log('afterClock', this.clock)
319
+ this.clock = Array.from(uniquebyCid.values()).sort((a, b) => a.toString().localeCompare(b.toString()))
320
+ this.rootCache = null
321
+ this.eventsCache.clear()
322
+ // console.log('afterClock', this.clock.length, this.clockToJSON())
302
323
  }
303
324
 
304
325
  // /**
package/src/db-index.js CHANGED
@@ -7,7 +7,7 @@ import { sha256 as hasher } from 'multiformats/hashes/sha2'
7
7
  import { nocache as cache } from 'prolly-trees/cache'
8
8
  // @ts-ignore
9
9
  import { bf, simpleCompare } from 'prolly-trees/utils'
10
- import { makeGetBlock } from './prolly.js'
10
+ import { makeGetBlock, visMerkleTree } from './prolly.js'
11
11
  // eslint-disable-next-line no-unused-vars
12
12
  import { Database, cidsToProof } from './database.js'
13
13
 
@@ -35,8 +35,8 @@ const refCompare = (aRef, bRef) => {
35
35
  return simpleCompare(aRef, bRef)
36
36
  }
37
37
 
38
- const dbIndexOpts = { cache, chunker: bf(3), codec, hasher, compare }
39
- const idIndexOpts = { cache, chunker: bf(3), codec, hasher, compare: simpleCompare }
38
+ const dbIndexOpts = { cache, chunker: bf(30), codec, hasher, compare }
39
+ const idIndexOpts = { cache, chunker: bf(30), codec, hasher, compare: simpleCompare }
40
40
 
41
41
  const makeDoc = ({ key, value }) => ({ _id: key, ...value })
42
42
 
@@ -93,6 +93,9 @@ const indexEntriesForChanges = (changes, mapFn) => {
93
93
  *
94
94
  */
95
95
  export class DbIndex {
96
+ /**
97
+ * @param {Database} database
98
+ */
96
99
  constructor (database, name, mapFn, clock = null, opts = {}) {
97
100
  this.database = database
98
101
  if (!database.indexBlocks) {
@@ -164,6 +167,14 @@ export class DbIndex {
164
167
  return new DbIndex(database, name, code, clock)
165
168
  }
166
169
 
170
+ async visKeyTree () {
171
+ return await visMerkleTree(this.database.indexBlocks, this.indexById.cid)
172
+ }
173
+
174
+ async visIdTree () {
175
+ return await visMerkleTree(this.database.indexBlocks, this.indexByKey.cid)
176
+ }
177
+
167
178
  /**
168
179
  * JSDoc for Query type.
169
180
  * @typedef {Object} DbQuery
package/src/prolly.js CHANGED
@@ -17,23 +17,24 @@ import * as codec from '@ipld/dag-cbor'
17
17
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
18
18
  import { doTransaction } from './blockstore.js'
19
19
  import { create as createBlock } from 'multiformats/block'
20
- const blockOpts = { cache, chunker: bf(3), codec, hasher, compare }
20
+ const blockOpts = { cache, chunker: bf(30), codec, hasher, compare }
21
21
 
22
22
  /**
23
23
  * @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
24
24
  */
25
25
 
26
- const withLog = async (label, fn) => {
27
- const resp = await fn()
28
- // console.log('withLog', label, !!resp)
29
- return resp
30
- }
26
+ // const withLog = async (label, fn) => {
27
+ // const resp = await fn()
28
+ // // console.log('withLog', label, !!resp)
29
+ // return resp
30
+ // }
31
31
 
32
32
  // should also return a CIDCounter
33
- export const makeGetBlock = (blocks) => {
33
+ export const makeGetBlock = blocks => {
34
34
  // const cids = new CIDCounter() // this could be used for proofs of mutations
35
- const getBlockFn = async (address) => {
36
- const { cid, bytes } = await withLog(address, () => blocks.get(address))
35
+ const getBlockFn = async address => {
36
+ // const { cid, bytes } = await withLog(address, () => blocks.get(address))
37
+ const { cid, bytes } = await blocks.get(address)
37
38
  // cids.add({ address: cid })
38
39
  return createBlock({ cid, bytes, hasher, codec })
39
40
  }
@@ -48,25 +49,17 @@ export const makeGetBlock = (blocks) => {
48
49
  * @param {*} param0
49
50
  * @returns
50
51
  */
51
- async function createAndSaveNewEvent ({
52
- inBlocks,
53
- bigPut,
54
- root,
55
- event: inEvent,
56
- head,
57
- additions,
58
- removals = []
59
- }) {
52
+ async function createAndSaveNewEvent ({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
60
53
  let cids
61
54
  const { key, value, del } = inEvent
62
55
  const data = {
63
- root: (root
56
+ root: root
64
57
  ? {
65
58
  cid: root.cid,
66
59
  bytes: root.bytes, // can we remove this?
67
60
  value: root.value // can we remove this?
68
61
  }
69
- : null),
62
+ : null,
70
63
  key
71
64
  }
72
65
  // import('./clock').EventLink<import('./clock').EventData>
@@ -94,7 +87,7 @@ async function createAndSaveNewEvent ({
94
87
  }
95
88
  }
96
89
 
97
- const makeGetAndPutBlock = (inBlocks) => {
90
+ const makeGetAndPutBlock = inBlocks => {
98
91
  // const mblocks = new MemoryBlockstore()
99
92
  // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
100
93
  const { getBlock, cids } = makeGetBlock(inBlocks)
@@ -153,20 +146,24 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
153
146
  }
154
147
  }
155
148
 
156
- const doProllyBulk = async (inBlocks, head, event) => {
149
+ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
157
150
  const { getBlock, blocks } = makeGetAndPutBlock(inBlocks)
158
151
  let bulkSorted = []
159
152
  let prollyRootNode = null
160
153
  const events = new EventFetcher(blocks)
161
154
  if (head.length) {
162
- // Otherwise, we find the common ancestor and update the root and other blocks
155
+ if (!doFull && head.length === 1) {
156
+ prollyRootNode = await prollyRootFromAncestor(events, head[0], getBlock)
157
+ } else {
158
+ // Otherwise, we find the common ancestor and update the root and other blocks
163
159
  // todo this is returning more events than necessary, lets define the desired semantics from the top down
164
160
  // good semantics mean we can cache the results of this call
165
- const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
166
- bulkSorted = sorted
167
- // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
168
- prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
161
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull)
162
+ bulkSorted = sorted
163
+ // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
164
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
169
165
  // console.log('event', event)
166
+ }
170
167
  }
171
168
 
172
169
  const bulkOperations = bulkFromEvents(bulkSorted, event)
@@ -176,7 +173,7 @@ const doProllyBulk = async (inBlocks, head, event) => {
176
173
  let root
177
174
  const newBlocks = []
178
175
  // if all operations are deletes, we can just return an empty root
179
- if (bulkOperations.every((op) => op.del)) {
176
+ if (bulkOperations.every(op => op.del)) {
180
177
  return { root: null, blocks: [], clockCIDs: await events.all() }
181
178
  }
182
179
  for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
@@ -196,7 +193,7 @@ const doProllyBulk = async (inBlocks, head, event) => {
196
193
  *
197
194
  * @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
198
195
  * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
199
- * @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
196
+ * @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
200
197
  * @param {object} [options]
201
198
  * @returns {Promise<any>}
202
199
  */
@@ -248,18 +245,24 @@ export async function put (inBlocks, head, event, options) {
248
245
  * @param {TransactionBlockstore} inBlocks Bucket block storage.
249
246
  * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
250
247
  */
251
- export async function root (inBlocks, head) {
248
+ export async function root (inBlocks, head, doFull = false) {
252
249
  if (!head.length) {
253
250
  throw new Error('no head')
254
251
  }
255
- const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head)
252
+ // console.log('root', head.map(h => h.toString()))
253
+ const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull)
256
254
  // todo maybe these should go to a temp blockstore?
257
- await doTransaction('root', inBlocks, async (transactionBlocks) => {
258
- const { bigPut } = makeGetAndPutBlock(transactionBlocks)
259
- for (const nb of newBlocks) {
260
- bigPut(nb)
261
- }
262
- }, false)
255
+ await doTransaction(
256
+ 'root',
257
+ inBlocks,
258
+ async transactionBlocks => {
259
+ const { bigPut } = makeGetAndPutBlock(transactionBlocks)
260
+ for (const nb of newBlocks) {
261
+ bigPut(nb)
262
+ }
263
+ },
264
+ false
265
+ )
263
266
  return { clockCIDs, node: newProllyRootNode }
264
267
  }
265
268
 
@@ -286,22 +289,40 @@ export async function eventsSince (blocks, head, since) {
286
289
  * @param {TransactionBlockstore} blocks Bucket block storage.
287
290
  * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
288
291
  *
289
- * @returns {Promise<{cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
292
+ * @returns {Promise<{root: any, cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
290
293
  *
291
294
  */
292
- export async function getAll (blocks, head) {
295
+ export async function getAll (blocks, head, rootCache = null, doFull = false) {
293
296
  // todo use the root node left around from put, etc
294
297
  // move load to a central place
295
298
  if (!head.length) {
296
- return { clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
299
+ return { root: null, clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
297
300
  }
298
- const { node: prollyRootNode, clockCIDs } = await root(blocks, head)
301
+ const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache, doFull)
299
302
 
300
303
  if (!prollyRootNode) {
301
- return { clockCIDs, cids: new CIDCounter(), result: [] }
304
+ return { root: null, clockCIDs, cids: new CIDCounter(), result: [] }
302
305
  }
303
306
  const { result, cids } = await prollyRootNode.getAllEntries() // todo params
304
- return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
307
+ return { root: prollyRootNode, clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
308
+ }
309
+
310
+ async function rootOrCache (blocks, head, rootCache, doFull = false) {
311
+ let node
312
+ let clockCIDs
313
+ if (!doFull && rootCache && rootCache.root) {
314
+ // console.log('get root from cache', rootCache)
315
+ node = rootCache.root
316
+ clockCIDs = rootCache.clockCIDs
317
+ } else {
318
+ // console.log('finding root')
319
+ // const callTag = Math.random().toString(36).substring(7)
320
+ // console.time(callTag + '.root')
321
+ ;({ node, clockCIDs } = await root(blocks, head, doFull))
322
+ // console.timeEnd(callTag + '.root')
323
+ // console.log('found root')
324
+ }
325
+ return { node, clockCIDs }
305
326
  }
306
327
 
307
328
  /**
@@ -309,17 +330,19 @@ export async function getAll (blocks, head) {
309
330
  * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
310
331
  * @param {string} key The key of the value to retrieve.
311
332
  */
312
- export async function get (blocks, head, key) {
333
+ export async function get (blocks, head, key, rootCache = null) {
313
334
  // instead pass root from db? and always update on change
314
335
  if (!head.length) {
315
336
  return { cids: new CIDCounter(), result: null }
316
337
  }
317
- const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head)
338
+
339
+ const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache)
340
+
318
341
  if (!prollyRootNode) {
319
342
  return { clockCIDs, cids: new CIDCounter(), result: null }
320
343
  }
321
344
  const { result, cids } = await prollyRootNode.get(key)
322
- return { result, cids, clockCIDs }
345
+ return { result, cids, clockCIDs, root: prollyRootNode }
323
346
  }
324
347
 
325
348
  export async function * vis (blocks, head) {
@@ -336,8 +359,19 @@ export async function * vis (blocks, head) {
336
359
  }
337
360
 
338
361
  export async function visMerkleTree (blocks, head) {
339
- if (!head.length) {
340
- return { cids: new CIDCounter(), result: null }
362
+ // if (!head) return
363
+ if (head && !Array.isArray(head)) {
364
+ const getBl = makeGetBlock(blocks)
365
+ const prollyRootNode = await load({
366
+ cid: head,
367
+ get: getBl.getBlock,
368
+ ...blockOpts
369
+ })
370
+ const lines = []
371
+ for await (const line of prollyRootNode.vis()) {
372
+ lines.push(line)
373
+ }
374
+ return { vis: lines.join('\n'), cids: new CIDCounter() }
341
375
  }
342
376
  const { node: prollyRootNode, cids } = await root(blocks, head)
343
377
  const lines = []
package/src/sync.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import SimplePeer from 'simple-peer'
2
2
  import { parseCID } from './database.js'
3
+ import { decodeEventBlock } from './clock.js'
3
4
  import { blocksToCarBlock, blocksToEncryptedCarBlock } from './valet.js'
4
5
  import { CarReader } from '@ipld/car'
5
6
 
@@ -15,7 +16,7 @@ export class Sync {
15
16
  */
16
17
  constructor (database, PeerClass = SimplePeer) {
17
18
  this.database = database
18
- this.database.blocks.syncs.add(this)
19
+ this.database.blocks.syncs.add(this) // should this happen during setup?
19
20
  this.PeerClass = PeerClass
20
21
  this.pushBacklog = new Promise((resolve, reject) => {
21
22
  this.pushBacklogResolve = resolve
@@ -63,8 +64,13 @@ export class Sync {
63
64
 
64
65
  async gotData (data) {
65
66
  // console.log('got data', data.toString())
67
+ let reader = null
66
68
  try {
67
- const reader = await CarReader.fromBytes(data)
69
+ reader = await CarReader.fromBytes(data)
70
+ } catch (e) {
71
+ // console.log('not a car', data.toString())
72
+ }
73
+ if (reader) {
68
74
  const blz = new Set()
69
75
  for await (const block of reader.blocks()) {
70
76
  blz.add(block)
@@ -87,17 +93,18 @@ export class Sync {
87
93
  get: async cid => await reader.get(cid),
88
94
  lastCid: [...blz][0].cid // doesn't matter
89
95
  }, false)
90
- this.database.applyClock([], roots)
96
+ // first arg could be the roots parents?
97
+ // get the roots parents
98
+ const parents = await Promise.all(roots.map(async (cid) => {
99
+ const rbl = await reader.get(cid)
100
+ const block = await decodeEventBlock(rbl.bytes)
101
+ return block.value.parents
102
+ }))
103
+ this.database.applyClock(parents.flat(), roots)
91
104
  this.database.notifyReset()
92
105
  // console.log('after', this.database.clockToJSON())
93
106
  this.pushBacklogResolve({ ok: true })
94
- } catch (e) {
95
- // console.error(e)
96
- // if e.message matche 'CBOR' we can ignore it
97
- if (!e.message.match(/CBOR|fromBytes/)) {
98
- throw e
99
- }
100
-
107
+ } else {
101
108
  // data is a json string, parse it
102
109
  const message = JSON.parse(data.toString())
103
110
  // console.log('got message', message)
@@ -123,7 +130,7 @@ export class Sync {
123
130
  }
124
131
 
125
132
  async sendUpdate (blockstore) {
126
- console.log('send update from', this.database.instanceId)
133
+ // console.log('send update from', this.database.instanceId)
127
134
  // todo should send updates since last sync
128
135
  const newCar = await blocksToCarBlock(blockstore.lastCid, blockstore)
129
136
  this.peer.send(newCar.bytes)
package/src/valet.js CHANGED
@@ -16,7 +16,7 @@ import { Buffer } from 'buffer'
16
16
  // @ts-ignore
17
17
  import * as codec from 'encrypted-block'
18
18
  import { rawSha1 as sha1sync } from './sha1.js'
19
- const chunker = bf(3)
19
+ const chunker = bf(30)
20
20
 
21
21
  const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT
22
22
  // ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT