@fireproof/core 0.0.6 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/coverage/blockstore.js.html +254 -242
  2. package/coverage/clock.js.html +203 -266
  3. package/coverage/db-index.js.html +426 -213
  4. package/coverage/fireproof.js.html +424 -238
  5. package/coverage/index.html +69 -84
  6. package/coverage/listener.js.html +13 -25
  7. package/coverage/prolly.js.html +267 -219
  8. package/coverage/tmp/coverage-28490-1680373005621-0.json +1 -0
  9. package/coverage/tmp/coverage-28494-1680373004502-0.json +1 -0
  10. package/coverage/tmp/coverage-28500-1680373005593-0.json +1 -0
  11. package/coverage/tmp/coverage-28504-1680373005559-0.json +1 -0
  12. package/coverage/valet.js.html +96 -246
  13. package/hooks/use-fireproof.ts +9 -8
  14. package/package.json +5 -5
  15. package/src/blockstore.js +16 -14
  16. package/src/clock.js +27 -5
  17. package/src/db-index.js +139 -96
  18. package/src/fireproof.js +48 -16
  19. package/src/hydrator.js +10 -0
  20. package/src/listener.js +0 -6
  21. package/src/prolly.js +43 -32
  22. package/src/valet.js +6 -62
  23. package/{src → test}/block.js +6 -6
  24. package/test/clock.test.js +0 -5
  25. package/test/db-index.test.js +11 -10
  26. package/test/fireproof.test.js +74 -19
  27. package/test/helpers.js +1 -1
  28. package/test/hydrator.test.js +75 -0
  29. package/test/prolly.test.js +2 -2
  30. package/test/proofs.test.js +5 -5
  31. package/test/reproduce-fixture-bug.test.js +2 -2
  32. package/coverage/tmp/coverage-42191-1678146904346-0.json +0 -1
  33. package/coverage/tmp/coverage-42193-1678146903521-0.json +0 -1
  34. package/coverage/tmp/coverage-42196-1678146904322-0.json +0 -1
  35. package/coverage/tmp/coverage-42197-1678146904292-0.json +0 -1
  36. package/scripts/propernames/gen.sh +0 -3
  37. package/scripts/randomcid.js +0 -12
  38. package/scripts/words/gen.js +0 -55
package/src/clock.js CHANGED
@@ -22,7 +22,7 @@ import { CIDCounter } from 'prolly-trees/utils'
22
22
  * Advance the clock by adding an event.
23
23
  *
24
24
  * @template T
25
- * @param {import('./block').BlockFetcher} blocks Block storage.
25
+ * @param {import('../test/block').BlockFetcher} blocks Block storage.
26
26
  * @param {EventLink<T>[]} head The head of the clock.
27
27
  * @param {EventLink<T>} event The event to add.
28
28
  * @returns {Promise<EventLink<T>[]>} The new head of the clock.
@@ -89,11 +89,12 @@ export class EventBlock extends Block {
89
89
 
90
90
  /** @template T */
91
91
  export class EventFetcher {
92
- /** @param {import('./block').BlockFetcher} blocks */
92
+ /** @param {import('../test/block').BlockFetcher} blocks */
93
93
  constructor (blocks) {
94
94
  /** @private */
95
95
  this._blocks = blocks
96
96
  this._cids = new CIDCounter()
97
+ this._cache = new Map()
97
98
  }
98
99
 
99
100
  /**
@@ -101,10 +102,15 @@ export class EventFetcher {
101
102
  * @returns {Promise<EventBlockView<T>>}
102
103
  */
103
104
  async get (link) {
105
+ const slink = link.toString()
106
+ // console.log('get', link.toString())
107
+ if (this._cache.has(slink)) return this._cache.get(slink)
104
108
  const block = await this._blocks.get(link)
105
109
  this._cids.add({ address: link })
106
110
  if (!block) throw new Error(`missing block: ${link}`)
107
- return decodeEventBlock(block.bytes)
111
+ const got = decodeEventBlock(block.bytes)
112
+ this._cache.set(slink, got)
113
+ return got
108
114
  }
109
115
 
110
116
  async all () {
@@ -162,7 +168,7 @@ async function contains (events, a, b) {
162
168
 
163
169
  /**
164
170
  * @template T
165
- * @param {import('./block').BlockFetcher} blocks Block storage.
171
+ * @param {import('../test/block').BlockFetcher} blocks Block storage.
166
172
  * @param {EventLink<T>[]} head
167
173
  * @param {object} [options]
168
174
  * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
@@ -200,22 +206,34 @@ export async function * vis (blocks, head, options = {}) {
200
206
  }
201
207
 
202
208
  export async function findEventsToSync (blocks, head) {
209
+ // const callTag = Math.random().toString(36).substring(7)
203
210
  const events = new EventFetcher(blocks)
211
+ // console.time(callTag + '.findCommonAncestorWithSortedEvents')
204
212
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
213
+ // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
214
+ // console.log('sorted', sorted.length)
215
+ // console.time(callTag + '.contains')
205
216
  const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
217
+ // console.timeEnd(callTag + '.contains')
218
+
206
219
  return { cids: events.cids, events: toSync }
207
220
  }
208
221
 
209
222
  const asyncFilter = async (arr, predicate) =>
223
+
210
224
  Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
211
225
 
212
226
  export async function findCommonAncestorWithSortedEvents (events, children) {
227
+ // const callTag = Math.random().toString(36).substring(7)
228
+ // console.time(callTag + '.findCommonAncestor')
213
229
  const ancestor = await findCommonAncestor(events, children)
230
+ // console.timeEnd(callTag + '.findCommonAncestor')
214
231
  if (!ancestor) {
215
232
  throw new Error('failed to find common ancestor event')
216
233
  }
217
- // Sort the events by their sequence number
234
+ // console.time(callTag + '.findSortedEvents')
218
235
  const sorted = await findSortedEvents(events, children, ancestor)
236
+ // console.timeEnd(callTag + '.findSortedEvents')
219
237
  return { ancestor, sorted }
220
238
  }
221
239
 
@@ -279,12 +297,14 @@ function findCommonString (arrays) {
279
297
  * @param {import('./clock').EventLink<EventData>} tail
280
298
  */
281
299
  async function findSortedEvents (events, head, tail) {
300
+ // const callTag = Math.random().toString(36).substring(7)
282
301
  // get weighted events - heavier events happened first
283
302
  /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
284
303
  const weights = new Map()
285
304
  const all = await Promise.all(head.map((h) => findEvents(events, h, tail)))
286
305
  for (const arr of all) {
287
306
  for (const { event, depth } of arr) {
307
+ // console.log('event value', event.value.data.value)
288
308
  const info = weights.get(event.cid.toString())
289
309
  if (info) {
290
310
  info.weight += depth
@@ -311,6 +331,7 @@ async function findSortedEvents (events, head, tail) {
311
331
  .sort((a, b) => b[0] - a[0])
312
332
  .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)))
313
333
  // console.log('sorted', sorted.map(s => s.value.data.value))
334
+
314
335
  return sorted
315
336
  }
316
337
 
@@ -321,6 +342,7 @@ async function findSortedEvents (events, head, tail) {
321
342
  * @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
322
343
  */
323
344
  async function findEvents (events, start, end, depth = 0) {
345
+ // console.log('findEvents', start)
324
346
  const event = await events.get(start)
325
347
  const acc = [{ event, depth }]
326
348
  const { parents } = event.value
package/src/db-index.js CHANGED
@@ -1,32 +1,53 @@
1
- import { create, load } from 'prolly-trees/db-index'
1
+ // import { create, load } from 'prolly-trees/db-index'
2
+ import { create, load } from '../../../../prolly-trees/src/db-index.js'
3
+
2
4
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
3
5
  import { nocache as cache } from 'prolly-trees/cache'
4
6
  import { bf, simpleCompare } from 'prolly-trees/utils'
5
7
  import { makeGetBlock } from './prolly.js'
6
8
  import { cidsToProof } from './fireproof.js'
9
+ import { CID } from 'multiformats'
10
+
7
11
  import * as codec from '@ipld/dag-cbor'
8
12
  // import { create as createBlock } from 'multiformats/block'
9
13
  import { doTransaction } from './blockstore.js'
10
14
  import charwise from 'charwise'
11
15
 
12
- const ALWAYS_REBUILD = true // todo: remove this
16
+ const ALWAYS_REBUILD = false // todo: make false
13
17
 
14
- const arrayCompare = (a, b) => {
15
- if (Array.isArray(a) && Array.isArray(b)) {
16
- const len = Math.min(a.length, b.length)
17
- for (let i = 0; i < len; i++) {
18
- const comp = simpleCompare(a[i], b[i])
19
- if (comp !== 0) {
20
- return comp
21
- }
22
- }
23
- return simpleCompare(a.length, b.length)
24
- } else {
25
- return simpleCompare(a, b)
26
- }
18
+ // const arrayCompare = (a, b) => {
19
+ // if (Array.isArray(a) && Array.isArray(b)) {
20
+ // const len = Math.min(a.length, b.length)
21
+ // for (let i = 0; i < len; i++) {
22
+ // const comp = simpleCompare(a[i], b[i])
23
+ // if (comp !== 0) {
24
+ // return comp
25
+ // }
26
+ // }
27
+ // return simpleCompare(a.length, b.length)
28
+ // } else {
29
+ // return simpleCompare(a, b)
30
+ // }
31
+ // }
32
+
33
+ const compare = (a, b) => {
34
+ const [aKey, aRef] = a
35
+ const [bKey, bRef] = b
36
+ const comp = simpleCompare(aKey, bKey)
37
+ if (comp !== 0) return comp
38
+ return refCompare(aRef, bRef)
39
+ }
40
+
41
+ const refCompare = (aRef, bRef) => {
42
+ if (Number.isNaN(aRef)) return -1
43
+ if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
44
+ if (aRef === Infinity) return 1 // need to test this on equal docids!
45
+ // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
46
+ return simpleCompare(aRef, bRef)
27
47
  }
28
48
 
29
- const opts = { cache, chunker: bf(3), codec, hasher, compare: arrayCompare }
49
+ const dbIndexOpts = { cache, chunker: bf(3), codec, hasher, compare }
50
+ const idIndexOpts = { cache, chunker: bf(3), codec, hasher, compare: simpleCompare }
30
51
 
31
52
  const makeDoc = ({ key, value }) => ({ _id: key, ...value })
32
53
 
@@ -71,14 +92,6 @@ const indexEntriesForChanges = (changes, mapFun) => {
71
92
  return indexEntries
72
93
  }
73
94
 
74
- const indexEntriesForOldChanges = async (blocks, byIDindexRoot, ids, mapFun) => {
75
- const { getBlock } = makeGetBlock(blocks)
76
- const byIDindex = await load({ cid: byIDindexRoot.cid, get: getBlock, ...opts })
77
-
78
- const result = await byIDindex.getMany(ids)
79
- return result
80
- }
81
-
82
95
  /**
83
96
  * Represents an DbIndex for a Fireproof database.
84
97
  *
@@ -102,11 +115,31 @@ export default class DbIndex {
102
115
  */
103
116
  this.mapFun = mapFun
104
117
 
105
- this.dbIndexRoot = null
106
- this.dbIndex = null
118
+ this.database.indexes.set(mapFun.toString(), this)
119
+
120
+ this.indexById = { root: null, cid: null }
121
+ this.indexByKey = { root: null, cid: null }
107
122
 
108
- this.byIDindexRoot = null
109
123
  this.dbHead = null
124
+
125
+ this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`
126
+
127
+ this.updateIndexPromise = null
128
+ }
129
+
130
+ toJSON () {
131
+ return { code: this.mapFun?.toString(), clock: { db: this.dbHead?.map(cid => cid.toString()), byId: this.indexById.cid?.toString(), byKey: this.indexByKey.cid?.toString() } }
132
+ }
133
+
134
+ static fromJSON (database, { code, clock: { byId, byKey, db } }) {
135
+ let mapFun
136
+ // eslint-disable-next-line
137
+ eval("mapFun = "+ code)
138
+ const index = new DbIndex(database, mapFun)
139
+ index.indexById.cid = CID.parse(byId)
140
+ index.indexByKey.cid = CID.parse(byKey)
141
+ index.dbHead = db.map(cid => CID.parse(cid))
142
+ return index
110
143
  }
111
144
 
112
145
  /**
@@ -124,17 +157,25 @@ export default class DbIndex {
124
157
  * @instance
125
158
  */
126
159
  async query (query) {
160
+ // const callId = Math.random().toString(36).substring(2, 7)
127
161
  // if (!root) {
128
162
  // pass a root to query a snapshot
129
- await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
130
- await this.#updateIndex(blocks)
131
- })
163
+ // console.time(callId + '.#updateIndex')
164
+ await this.#updateIndex(this.database.blocks)
165
+ // console.timeEnd(callId + '.#updateIndex')
166
+
132
167
  // }
133
- const response = await doIndexQuery(this.database.blocks, this.dbIndexRoot, this.dbIndex, query)
168
+ // console.time(callId + '.doIndexQuery')
169
+ const response = await doIndexQuery(this.database.blocks, this.indexByKey, query)
170
+ // console.timeEnd(callId + '.doIndexQuery')
171
+
134
172
  return {
135
173
  proof: { index: await cidsToProof(response.cids) },
136
174
  // TODO fix this naming upstream in prolly/db-DbIndex?
137
- rows: response.result.map(({ id, key, row }) => ({ id: key, key: charwise.decode(id), value: row }))
175
+ rows: response.result.map(({ id, key, row }) => {
176
+ // console.log('query', id, key, row)
177
+ return ({ id, key: charwise.decode(key), value: row })
178
+ })
138
179
  }
139
180
  }
140
181
 
@@ -143,56 +184,55 @@ export default class DbIndex {
143
184
  * @private
144
185
  * @returns {Promise<void>}
145
186
  */
187
+
146
188
  async #updateIndex (blocks) {
189
+ // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
190
+ // what would it do in a world where all indexes provide a database snapshot to query?
191
+ if (this.updateIndexPromise) return this.updateIndexPromise
192
+ this.updateIndexPromise = this.#innerUpdateIndex(blocks)
193
+ this.updateIndexPromise.finally(() => { this.updateIndexPromise = null })
194
+ return this.updateIndexPromise
195
+ }
196
+
197
+ async #innerUpdateIndex (inBlocks) {
198
+ // const callTag = Math.random().toString(36).substring(4)
199
+ // console.log(`#updateIndex ${callTag} >`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
147
200
  // todo remove this hack
148
201
  if (ALWAYS_REBUILD) {
149
202
  this.dbHead = null // hack
150
- this.dbIndex = null // hack
203
+ this.indexByKey = null // hack
151
204
  this.dbIndexRoot = null
152
205
  }
206
+ // console.log('dbHead', this.dbHead)
207
+ // console.time(callTag + '.changesSince')
153
208
  const result = await this.database.changesSince(this.dbHead) // {key, value, del}
154
- if (this.dbHead) {
155
- const oldChangeEntries = await indexEntriesForOldChanges(
156
- blocks,
157
- this.byIDindexRoot,
158
- result.rows.map(({ key }) => key),
159
- this.mapFun
160
- )
161
- const oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
162
- const removalResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, oldIndexEntries, opts)
163
- this.dbIndexRoot = removalResult.root
164
- this.dbIndex = removalResult.dbIndex
165
-
166
- const removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
167
- const purgedRemovalResults = await bulkIndex(
168
- blocks,
169
- this.byIDindexRoot,
170
- this.byIDIndex,
171
- removeByIdIndexEntries,
172
- opts
173
- )
174
- this.byIDindexRoot = purgedRemovalResults.root
175
- this.byIDIndex = purgedRemovalResults.dbIndex
176
- }
177
- const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
178
- const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
179
- const addFutureRemovalsResult = await bulkIndex(blocks, this.byIDindexRoot, this.byIDIndex, byIdIndexEntries, opts)
180
- this.byIDindexRoot = addFutureRemovalsResult.root
181
- this.byIDIndex = addFutureRemovalsResult.dbIndex
209
+ // console.timeEnd(callTag + '.changesSince')
210
+ // console.log('result.rows.length', result.rows.length)
182
211
 
183
- // console.log('indexEntries', indexEntries)
212
+ // console.time(callTag + '.doTransaction#updateIndex')
184
213
 
185
- const updateIndexResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, indexEntries, opts)
186
- this.dbIndexRoot = updateIndexResult.root
187
- this.dbIndex = updateIndexResult.dbIndex
188
-
189
- this.dbHead = result.clock
214
+ if (result.rows.length === 0) {
215
+ // console.log('#updateIndex < no changes')
216
+ this.dbHead = result.clock
217
+ return
218
+ }
219
+ await doTransaction('#updateIndex', inBlocks, async (blocks) => {
220
+ let oldIndexEntries = []
221
+ let removeByIdIndexEntries = []
222
+ if (this.dbHead) { // need a maybe load
223
+ const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key))
224
+ oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
225
+ removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
226
+ }
227
+ const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
228
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
229
+ this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts)
230
+ this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts)
231
+ this.dbHead = result.clock
232
+ })
233
+ // console.timeEnd(callTag + '.doTransaction#updateIndex')
234
+ // console.log(`#updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
190
235
  }
191
-
192
- // todo use the DbIndex from other peers?
193
- // we might need to add CRDT logic to it for that
194
- // it would only be a performance improvement, but might add a lot of complexity
195
- // advanceIndex ()) {}
196
236
  }
197
237
 
198
238
  /**
@@ -202,44 +242,47 @@ export default class DbIndex {
202
242
  * @param {DbIndexEntry[]} indexEntries
203
243
  * @private
204
244
  */
205
- async function bulkIndex (blocks, inRoot, inDBindex, indexEntries) {
206
- if (!indexEntries.length) return { dbIndex: inDBindex, root: inRoot }
245
+ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
246
+ if (!indexEntries.length) return inIndex
207
247
  const putBlock = blocks.put.bind(blocks)
208
248
  const { getBlock } = makeGetBlock(blocks)
209
249
  let returnRootBlock
210
250
  let returnNode
211
- if (!inDBindex) {
212
- for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
213
- const block = await node.block
214
- await putBlock(block.cid, block.bytes)
215
- returnRootBlock = block
216
- returnNode = node
217
- }
218
- } else {
219
- // const dbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts }) // todo load from root on refresh
220
- const { root, blocks } = await inDBindex.bulk(indexEntries)
221
- returnRootBlock = await root.block
222
- returnNode = root
223
- for await (const block of blocks) {
224
- await putBlock(block.cid, block.bytes)
251
+ if (!inIndex.root) {
252
+ const cid = inIndex.cid
253
+ if (!cid) {
254
+ for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
255
+ const block = await node.block
256
+ await putBlock(block.cid, block.bytes)
257
+ returnRootBlock = block
258
+ returnNode = node
259
+ }
260
+ return { root: returnNode, cid: returnRootBlock.cid }
225
261
  }
226
- await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
262
+ inIndex.root = await load({ cid, get: getBlock, ...dbIndexOpts })
263
+ }
264
+ const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries)
265
+ returnRootBlock = await root.block
266
+ returnNode = root
267
+ for await (const block of newBlocks) {
268
+ await putBlock(block.cid, block.bytes)
227
269
  }
228
- return { dbIndex: returnNode, root: returnRootBlock }
270
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
271
+ return { root: returnNode, cid: returnRootBlock.cid }
229
272
  }
230
273
 
231
- async function doIndexQuery (blocks, dbIndexRoot, dbIndex, query) {
232
- if (!dbIndex) {
233
- const cid = dbIndexRoot && dbIndexRoot.cid
274
+ async function doIndexQuery (blocks, indexByKey, query) {
275
+ if (!indexByKey.root) {
276
+ const cid = indexByKey.cid
234
277
  if (!cid) return { result: [] }
235
278
  const { getBlock } = makeGetBlock(blocks)
236
- dbIndex = await load({ cid, get: getBlock, ...opts })
279
+ indexByKey.root = await load({ cid, get: getBlock, ...dbIndexOpts })
237
280
  }
238
281
  if (query.range) {
239
282
  const encodedRange = query.range.map((key) => charwise.encode(key))
240
- return dbIndex.range(...encodedRange)
283
+ return indexByKey.root.range(...encodedRange)
241
284
  } else if (query.key) {
242
285
  const encodedKey = charwise.encode(query.key)
243
- return dbIndex.get(encodedKey)
286
+ return indexByKey.root.get(encodedKey)
244
287
  }
245
288
  }
package/src/fireproof.js CHANGED
@@ -1,5 +1,6 @@
1
- import { put, get, getAll, eventsSince } from './prolly.js'
2
- import Blockstore, { doTransaction } from './blockstore.js'
1
+ import { vis, put, get, getAll, eventsSince } from './prolly.js'
2
+ import TransactionBlockstore, { doTransaction } from './blockstore.js'
3
+ import charwise from 'charwise'
3
4
 
4
5
  // const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
5
6
 
@@ -26,16 +27,18 @@ export default class Fireproof {
26
27
  * @static
27
28
  * @returns {Fireproof} - a new Fireproof instance
28
29
  */
29
- static storage = () => {
30
- return new Fireproof(new Blockstore(), [])
30
+ static storage = (name) => {
31
+ return new Fireproof(new TransactionBlockstore(name), [], { name })
31
32
  }
32
33
 
33
- constructor (blocks, clock, config = {}, authCtx = {}) {
34
+ constructor (blocks, clock, config, authCtx = {}) {
35
+ this.name = config?.name || 'global'
36
+ this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`
34
37
  this.blocks = blocks
35
38
  this.clock = clock
36
39
  this.config = config
37
40
  this.authCtx = authCtx
38
- this.instanceId = 'fp.' + Math.random().toString(36).substring(2, 7)
41
+ this.indexes = new Map()
39
42
  }
40
43
 
41
44
  /**
@@ -49,7 +52,7 @@ export default class Fireproof {
49
52
  */
50
53
  snapshot (clock) {
51
54
  // how to handle listeners, views, and config?
52
- // todo needs a test for that
55
+ // todo needs a test for listeners, views, and config
53
56
  return new Fireproof(this.blocks, clock || this.clock)
54
57
  }
55
58
 
@@ -76,7 +79,11 @@ export default class Fireproof {
76
79
  */
77
80
  toJSON () {
78
81
  // todo this also needs to return the index roots...
79
- return { clock: this.clock }
82
+ return {
83
+ clock: this.clock.map(cid => cid.toString()),
84
+ name: this.name,
85
+ indexes: [...this.indexes.values()].map((index) => index.toJSON())
86
+ }
80
87
  }
81
88
 
82
89
  /**
@@ -93,7 +100,7 @@ export default class Fireproof {
93
100
  if (event) {
94
101
  const resp = await eventsSince(this.blocks, this.clock, event)
95
102
  const docsMap = new Map()
96
- for (const { key, type, value } of resp.result) {
103
+ for (const { key, type, value } of resp.result.map(decodeEvent)) {
97
104
  if (type === 'del') {
98
105
  docsMap.set(key, { key, del: true })
99
106
  } else {
@@ -105,11 +112,15 @@ export default class Fireproof {
105
112
  // console.log('change rows', this.instanceId, rows)
106
113
  } else {
107
114
  const allResp = await getAll(this.blocks, this.clock)
108
- rows = allResp.result.map(({ key, value }) => ({ key, value }))
115
+ rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })))
109
116
  dataCIDs = allResp.cids
110
117
  // console.log('dbdoc rows', this.instanceId, rows)
111
118
  }
112
- return { rows, clock: this.clock, proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) } }
119
+ return {
120
+ rows,
121
+ clock: this.clock,
122
+ proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
123
+ }
113
124
  }
114
125
 
115
126
  /**
@@ -127,7 +138,7 @@ export default class Fireproof {
127
138
  }
128
139
 
129
140
  async #notifyListeners (changes) {
130
- // await sleep(0)
141
+ // await sleep(10)
131
142
  for (const listener of this.#listeners) {
132
143
  await listener(changes)
133
144
  }
@@ -195,7 +206,8 @@ export default class Fireproof {
195
206
  * @param {Object<{key : string, value: any}>} event - the event to add
196
207
  * @returns {Object<{ id: string, clock: CID[] }>} - The result of adding the event to storage
197
208
  */
198
- async #putToProllyTree (event, clock = null) {
209
+ async #putToProllyTree (decodedEvent, clock = null) {
210
+ const event = encodeEvent(decodedEvent)
199
211
  if (clock && JSON.stringify(clock) !== JSON.stringify(this.clock)) {
200
212
  // we need to check and see what version of the document exists at the clock specified
201
213
  // if it is the same as the one we are trying to put, then we can proceed
@@ -214,9 +226,14 @@ export default class Fireproof {
214
226
  console.error('failed', event)
215
227
  throw new Error('failed to put at storage layer')
216
228
  }
229
+ // console.log('new clock head', this.instanceId, result.head.toString())
217
230
  this.clock = result.head // do we want to do this as a finally block
218
- await this.#notifyListeners([event])
219
- return { id: event.key, clock: this.clock, proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) } }
231
+ await this.#notifyListeners([decodedEvent]) // this type is odd
232
+ return {
233
+ id: decodedEvent.key,
234
+ clock: this.clock,
235
+ proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
236
+ }
220
237
  // todo should include additions (or split clock)
221
238
  }
222
239
 
@@ -255,7 +272,7 @@ export default class Fireproof {
255
272
  */
256
273
  async get (key, opts = {}) {
257
274
  const clock = opts.clock || this.clock
258
- const resp = await get(this.blocks, clock, key)
275
+ const resp = await get(this.blocks, clock, charwise.encode(key))
259
276
 
260
277
  // this tombstone is temporary until we can get the prolly tree to delete
261
278
  if (!resp || resp.result === null) {
@@ -273,6 +290,10 @@ export default class Fireproof {
273
290
  return doc
274
291
  }
275
292
 
293
+ async * vis () {
294
+ return yield * vis(this.blocks, this.clock)
295
+ }
296
+
276
297
  setCarUploader (carUploaderFn) {
277
298
  console.log('registering car uploader')
278
299
  // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
@@ -290,3 +311,14 @@ export async function cidsToProof (cids) {
290
311
  const all = await cids.all()
291
312
  return [...all].map((cid) => cid.toString())
292
313
  }
314
+
315
+ function decodeEvent (event) {
316
+ const decodedKey = charwise.decode(event.key)
317
+ return { ...event, key: decodedKey }
318
+ }
319
+
320
+ function encodeEvent (event) {
321
+ if (!(event && event.key)) return
322
+ const encodedKey = charwise.encode(event.key)
323
+ return { ...event, key: encodedKey }
324
+ }
@@ -0,0 +1,10 @@
1
+ import Fireproof from './fireproof.js'
2
+ import DbIndex from './db-index.js'
3
+
4
+ export function fromJSON (json, blocks) {
5
+ const fp = new Fireproof(blocks, json.clock, { name: json.name })
6
+ for (const index of json.indexes) {
7
+ DbIndex.fromJSON(fp, index)
8
+ }
9
+ return fp
10
+ }
package/src/listener.js CHANGED
@@ -11,12 +11,6 @@
11
11
 
12
12
  export default class Listener {
13
13
  #subcribers = new Map()
14
-
15
- // todo code review if there is a better way that doesn't create a circular reference
16
- // because otherwise we need to document that the user must call stopListening
17
- // or else the listener will never be garbage collected
18
- // maybe we can use WeakRef on the db side
19
- // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakRef
20
14
  #doStopListening = null
21
15
 
22
16
  constructor (database, routingFn) {