@fireproof/core 0.0.5 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,8 +15,6 @@ export const FireproofCtx = createContext<FireproofCtxValue>({
15
15
  ready: false,
16
16
  })
17
17
 
18
-
19
-
20
18
  const inboundSubscriberQueue = new Map()
21
19
  const database = Fireproof.storage()
22
20
  const listener = new Listener(database)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fireproof/core",
3
- "version": "0.0.5",
3
+ "version": "0.0.7",
4
4
  "description": "Realtime database for IPFS",
5
5
  "main": "index.js",
6
6
  "type": "module",
@@ -37,12 +37,13 @@
37
37
  "cli-color": "^2.0.3",
38
38
  "idb": "^7.1.1",
39
39
  "multiformats": "^11.0.1",
40
- "prolly-trees": "^0.2.2",
40
+ "prolly-trees": "1.0.2",
41
41
  "sade": "^1.8.1"
42
42
  },
43
43
  "devDependencies": {
44
44
  "c8": "^7.12.0",
45
45
  "fake-indexeddb": "^4.0.1",
46
+ "flexsearch": "^0.7.31",
46
47
  "mocha": "^10.2.0",
47
48
  "nanoid": "^4.0.0",
48
49
  "standard": "^17.0.0"
@@ -61,12 +62,12 @@
61
62
  },
62
63
  "repository": {
63
64
  "type": "git",
64
- "url": "git+https://github.com/jchris/fireproof.git"
65
+ "url": "git+https://github.com/fireproof-storage/fireproof.git"
65
66
  },
66
67
  "bugs": {
67
- "url": "https://github.com/jchris/fireproof/issues"
68
+ "url": "https://github.com/fireproof-storage/fireproof/issues"
68
69
  },
69
- "homepage": "https://github.com/jchris/fireproof#readme",
70
+ "homepage": "https://github.com/fireproof-storage/fireproof#readme",
70
71
  "workspaces": [
71
72
  "examples/todomvc"
72
73
  ]
package/src/blockstore.js CHANGED
@@ -36,11 +36,15 @@ export default class TransactionBlockstore {
36
36
  /** @type {Map<string, Uint8Array>} */
37
37
  #oldBlocks = new Map()
38
38
 
39
- valet = new Valet() // cars by cid
39
+ valet = null
40
40
 
41
41
  #instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
42
42
  #inflightTransactions = new Set()
43
43
 
44
+ constructor (name) {
45
+ this.valet = new Valet(name)
46
+ }
47
+
44
48
  /**
45
49
  * Get a block from the store.
46
50
  *
@@ -111,15 +115,15 @@ export default class TransactionBlockstore {
111
115
  * @yields {AnyBlock}
112
116
  * @returns {AsyncGenerator<AnyBlock>}
113
117
  */
114
- * entries () {
115
- // todo needs transaction blocks?
116
- // for (const [str, bytes] of this.#blocks) {
117
- // yield { cid: parse(str), bytes }
118
- // }
119
- for (const [str, bytes] of this.#oldBlocks) {
120
- yield { cid: parse(str), bytes }
121
- }
122
- }
118
+ // * entries () {
119
+ // // needs transaction blocks?
120
+ // // for (const [str, bytes] of this.#blocks) {
121
+ // // yield { cid: parse(str), bytes }
122
+ // // }
123
+ // for (const [str, bytes] of this.#oldBlocks) {
124
+ // yield { cid: parse(str), bytes }
125
+ // }
126
+ // }
123
127
 
124
128
  /**
125
129
  * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
package/src/clock.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import { Block, encode, decode } from 'multiformats/block'
2
2
  import { sha256 } from 'multiformats/hashes/sha2'
3
3
  import * as cbor from '@ipld/dag-cbor'
4
+ import { CIDCounter } from 'prolly-trees/utils'
4
5
 
5
6
  /**
6
7
  * @template T
@@ -21,7 +22,7 @@ import * as cbor from '@ipld/dag-cbor'
21
22
  * Advance the clock by adding an event.
22
23
  *
23
24
  * @template T
24
- * @param {import('./block').BlockFetcher} blocks Block storage.
25
+ * @param {import('../test/block').BlockFetcher} blocks Block storage.
25
26
  * @param {EventLink<T>[]} head The head of the clock.
26
27
  * @param {EventLink<T>} event The event to add.
27
28
  * @returns {Promise<EventLink<T>[]>} The new head of the clock.
@@ -29,10 +30,10 @@ import * as cbor from '@ipld/dag-cbor'
29
30
  export async function advance (blocks, head, event) {
30
31
  /** @type {EventFetcher<T>} */
31
32
  const events = new EventFetcher(blocks)
32
- const headmap = new Map(head.map(cid => [cid.toString(), cid]))
33
+ const headmap = new Map(head.map((cid) => [cid.toString(), cid]))
33
34
 
34
35
  // Check if the headmap already includes the event, return head if it does
35
- if (headmap.has(event.toString())) return head
36
+ if (headmap.has(event.toString())) return { head, cids: events.cids }
36
37
 
37
38
  // Does event contain the clock?
38
39
  let changed = false
@@ -46,18 +47,18 @@ export async function advance (blocks, head, event) {
46
47
 
47
48
  // If the headmap has been changed, return the new headmap values
48
49
  if (changed) {
49
- return [...headmap.values()]
50
+ return { head: [...headmap.values()], cids: events.cids }
50
51
  }
51
52
 
52
53
  // Does clock contain the event?
53
54
  for (const p of head) {
54
55
  if (await contains(events, p, event)) {
55
- return head
56
+ return { head, cids: events.cids }
56
57
  }
57
58
  }
58
59
 
59
60
  // Return the head concatenated with the new event if it passes both checks
60
- return head.concat(event)
61
+ return { head: head.concat(event), cids: events.cids }
61
62
  }
62
63
 
63
64
  /**
@@ -88,10 +89,11 @@ export class EventBlock extends Block {
88
89
 
89
90
  /** @template T */
90
91
  export class EventFetcher {
91
- /** @param {import('./block').BlockFetcher} blocks */
92
+ /** @param {import('../test/block').BlockFetcher} blocks */
92
93
  constructor (blocks) {
93
94
  /** @private */
94
95
  this._blocks = blocks
96
+ this._cids = new CIDCounter()
95
97
  }
96
98
 
97
99
  /**
@@ -100,9 +102,15 @@ export class EventFetcher {
100
102
  */
101
103
  async get (link) {
102
104
  const block = await this._blocks.get(link)
105
+ this._cids.add({ address: link })
103
106
  if (!block) throw new Error(`missing block: ${link}`)
104
107
  return decodeEventBlock(block.bytes)
105
108
  }
109
+
110
+ async all () {
111
+ await Promise.all([...this._cids])
112
+ return this._cids
113
+ }
106
114
  }
107
115
 
108
116
  /**
@@ -145,7 +153,7 @@ async function contains (events, a, b) {
145
153
  if (link.toString() === b.toString()) return true
146
154
  // if any of b's parents are this link, then b cannot exist in any of the
147
155
  // tree below, since that would create a cycle.
148
- if (bevent.parents.some(p => link.toString() === p.toString())) continue
156
+ if (bevent.parents.some((p) => link.toString() === p.toString())) continue
149
157
  const { value: event } = await events.get(link)
150
158
  links.push(...event.parents)
151
159
  }
@@ -154,17 +162,17 @@ async function contains (events, a, b) {
154
162
 
155
163
  /**
156
164
  * @template T
157
- * @param {import('./block').BlockFetcher} blocks Block storage.
165
+ * @param {import('../test/block').BlockFetcher} blocks Block storage.
158
166
  * @param {EventLink<T>[]} head
159
167
  * @param {object} [options]
160
168
  * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
161
169
  */
162
170
  export async function * vis (blocks, head, options = {}) {
163
- const renderNodeLabel = options.renderNodeLabel ?? (b => (b.value.data.value))
171
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value)
164
172
  const events = new EventFetcher(blocks)
165
173
  yield 'digraph clock {'
166
174
  yield ' node [shape=point fontname="Courier"]; head;'
167
- const hevents = await Promise.all(head.map(link => events.get(link)))
175
+ const hevents = await Promise.all(head.map((link) => events.get(link)))
168
176
  const links = []
169
177
  const nodes = new Set()
170
178
  for (const e of hevents) {
@@ -192,46 +200,22 @@ export async function * vis (blocks, head, options = {}) {
192
200
  }
193
201
 
194
202
  export async function findEventsToSync (blocks, head) {
195
- const toSync = await findUnknownSortedEvents(blocks, head, await findCommonAncestorWithSortedEvents(blocks, head))
196
- return toSync
197
- }
198
-
199
- export async function findUnknownSortedEvents (blocks, children, { ancestor, sorted }) {
200
203
  const events = new EventFetcher(blocks)
201
- // const childrenCids = children.map(c => c.toString())
202
- // const lowerEvent = sorted.find(({ cid }) => childrenCids.includes(cid.toString()))
203
- // const knownAncestor = await findCommonAncestor(events, [lowerEvent.cid]) // should this be [lowerEvent.cid] ?
204
- // const knownAncestor = await findCommonAncestor(events, [...children]) // should this be [lowerEvent.cid] ?
205
- // console.x('already knownAncestor', knownAncestor.toString() === ancestor.toString(),
206
- // (await (await decodeEventBlock((await blocks.get(knownAncestor)).bytes)).value.data?.value), knownAncestor
207
- // )
208
-
209
- const matchHead = [ancestor]
210
- const unknownSorted = await asyncFilter(sorted, async (uks) => {
211
- for (const ev of matchHead) {
212
- const isIn = await contains(events, ev, uks.cid)
213
- if (isIn) return false
214
- }
215
- return true
216
- })
217
- // console.x('unknownSorted contains', unknownSorted.length, sorted.length)
218
- return unknownSorted
204
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
205
+ const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
206
+ return { cids: events.cids, events: toSync }
219
207
  }
220
208
 
221
- const asyncFilter = async (arr, predicate) => Promise.all(arr.map(predicate))
222
- .then((results) => arr.filter((_v, index) => results[index]))
223
-
224
- export async function findCommonAncestorWithSortedEvents (blocks, children) {
225
- const events = new EventFetcher(blocks)
209
+ const asyncFilter = async (arr, predicate) =>
210
+ Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
226
211
 
212
+ export async function findCommonAncestorWithSortedEvents (events, children) {
227
213
  const ancestor = await findCommonAncestor(events, children)
228
214
  if (!ancestor) {
229
215
  throw new Error('failed to find common ancestor event')
230
216
  }
231
217
  // Sort the events by their sequence number
232
218
  const sorted = await findSortedEvents(events, children, ancestor)
233
- // console.x('ancstor', ancestor, (await decodeEventBlock((await blocks.get(ancestor)).bytes)).value.data?.value)
234
- // sorted.forEach(({ cid, value }) => console.x('xsorted', cid, value.data.value))
235
219
  return { ancestor, sorted }
236
220
  }
237
221
 
@@ -266,9 +250,7 @@ async function findCommonAncestor (events, children) {
266
250
  async function findAncestorCandidate (events, root) {
267
251
  const { value: event } = await events.get(root)
268
252
  if (!event.parents.length) return root
269
- return event.parents.length === 1
270
- ? event.parents[0]
271
- : findCommonAncestor(events, event.parents)
253
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
272
254
  }
273
255
 
274
256
  /**
@@ -303,6 +285,7 @@ async function findSortedEvents (events, head, tail) {
303
285
  const all = await Promise.all(head.map((h) => findEvents(events, h, tail)))
304
286
  for (const arr of all) {
305
287
  for (const { event, depth } of arr) {
288
+ // console.log('event value', event.value.data.value)
306
289
  const info = weights.get(event.cid.toString())
307
290
  if (info) {
308
291
  info.weight += depth
@@ -327,9 +310,7 @@ async function findSortedEvents (events, head, tail) {
327
310
  // sort by weight, and by CID within weight
328
311
  const sorted = Array.from(buckets)
329
312
  .sort((a, b) => b[0] - a[0])
330
- .flatMap(([, es]) =>
331
- es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1))
332
- )
313
+ .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)))
333
314
  // console.log('sorted', sorted.map(s => s.value.data.value))
334
315
  return sorted
335
316
  }
@@ -345,8 +326,6 @@ async function findEvents (events, start, end, depth = 0) {
345
326
  const acc = [{ event, depth }]
346
327
  const { parents } = event.value
347
328
  if (parents.length === 1 && String(parents[0]) === String(end)) return acc
348
- const rest = await Promise.all(
349
- parents.map((p) => findEvents(events, p, end, depth + 1))
350
- )
329
+ const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)))
351
330
  return acc.concat(...rest)
352
331
  }
package/src/db-index.js CHANGED
@@ -2,34 +2,48 @@ import { create, load } from 'prolly-trees/db-index'
2
2
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
3
3
  import { nocache as cache } from 'prolly-trees/cache'
4
4
  import { bf, simpleCompare } from 'prolly-trees/utils'
5
+ import { makeGetBlock } from './prolly.js'
6
+ import { cidsToProof } from './fireproof.js'
5
7
  import * as codec from '@ipld/dag-cbor'
6
- import { create as createBlock } from 'multiformats/block'
8
+ // import { create as createBlock } from 'multiformats/block'
7
9
  import { doTransaction } from './blockstore.js'
8
10
  import charwise from 'charwise'
9
11
 
10
- const arrayCompare = (a, b) => {
11
- if (Array.isArray(a) && Array.isArray(b)) {
12
- const len = Math.min(a.length, b.length)
13
- for (let i = 0; i < len; i++) {
14
- const comp = simpleCompare(a[i], b[i])
15
- if (comp !== 0) {
16
- return comp
17
- }
18
- }
19
- return simpleCompare(a.length, b.length)
20
- } else {
21
- return simpleCompare(a, b)
22
- }
23
- }
12
+ const ALWAYS_REBUILD = true // todo: remove this
24
13
 
25
- const opts = { cache, chunker: bf(3), codec, hasher, compare: arrayCompare }
14
+ // const arrayCompare = (a, b) => {
15
+ // if (Array.isArray(a) && Array.isArray(b)) {
16
+ // const len = Math.min(a.length, b.length)
17
+ // for (let i = 0; i < len; i++) {
18
+ // const comp = simpleCompare(a[i], b[i])
19
+ // if (comp !== 0) {
20
+ // return comp
21
+ // }
22
+ // }
23
+ // return simpleCompare(a.length, b.length)
24
+ // } else {
25
+ // return simpleCompare(a, b)
26
+ // }
27
+ // }
26
28
 
27
- const ALWAYS_REBUILD = false // todo: remove this
29
+ const compare = (a, b) => {
30
+ const [aKey, aRef] = a
31
+ const [bKey, bRef] = b
32
+ const comp = simpleCompare(aKey, bKey)
33
+ if (comp !== 0) return comp
34
+ return refCompare(aRef, bRef)
35
+ }
28
36
 
29
- const makeGetBlock = (blocks) => async (address) => {
30
- const { cid, bytes } = await blocks.get(address)
31
- return createBlock({ cid, bytes, hasher, codec })
37
+ const refCompare = (aRef, bRef) => {
38
+ if (Number.isNaN(aRef)) return -1
39
+ if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
40
+ if (!Number.isFinite(aRef)) return 1
41
+ // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
42
+ return simpleCompare(aRef, bRef)
32
43
  }
44
+
45
+ const opts = { cache, chunker: bf(3), codec, hasher, compare }
46
+
33
47
  const makeDoc = ({ key, value }) => ({ _id: key, ...value })
34
48
 
35
49
  /**
@@ -74,11 +88,11 @@ const indexEntriesForChanges = (changes, mapFun) => {
74
88
  }
75
89
 
76
90
  const indexEntriesForOldChanges = async (blocks, byIDindexRoot, ids, mapFun) => {
77
- const getBlock = makeGetBlock(blocks)
91
+ const { getBlock } = makeGetBlock(blocks)
78
92
  const byIDindex = await load({ cid: byIDindexRoot.cid, get: getBlock, ...opts })
79
93
 
80
94
  const result = await byIDindex.getMany(ids)
81
- return result.result
95
+ return result
82
96
  }
83
97
 
84
98
  /**
@@ -103,7 +117,10 @@ export default class DbIndex {
103
117
  * @type {Function}
104
118
  */
105
119
  this.mapFun = mapFun
106
- this.indexRoot = null
120
+
121
+ this.dbIndexRoot = null
122
+ this.dbIndex = null
123
+
107
124
  this.byIDindexRoot = null
108
125
  this.dbHead = null
109
126
  }
@@ -118,23 +135,25 @@ export default class DbIndex {
118
135
  /**
119
136
  * Query object can have {range}
120
137
  * @param {DbQuery} query - the query range to use
121
- * @param {CID} [root] - an optional root to query a snapshot
122
138
  * @returns {Promise<{rows: Array<{id: string, key: string, value: any}>}>}
123
139
  * @memberof DbIndex
124
140
  * @instance
125
141
  */
126
- async query (query, root = null) {
127
- if (!root) {
128
- // pass a root to query a snapshot
129
- await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
130
- await this.#updateIndex(blocks)
131
- })
132
- }
133
- const response = await doIndexQuery(this.database.blocks, root || this.indexRoot, query)
142
+ async query (query) {
143
+ // if (!root) {
144
+ // pass a root to query a snapshot
145
+ await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
146
+ await this.#updateIndex(blocks)
147
+ })
148
+ // }
149
+ const response = await doIndexQuery(this.database.blocks, this.dbIndexRoot, this.dbIndex, query)
134
150
  return {
135
- // TODO fix this naming upstream in prolly/db-DbIndex
136
- // todo maybe this is a hint about why deletes arent working?
137
- rows: response.result.map(({ id, key, row }) => ({ id: key, key: charwise.decode(id), value: row }))
151
+ proof: { index: await cidsToProof(response.cids) },
152
+ // TODO fix this naming upstream in prolly/db-DbIndex?
153
+ rows: response.result.map(({ id, key, row }) => {
154
+ // console.log('query', id, key, row)
155
+ return ({ id, key: charwise.decode(key), value: row })
156
+ })
138
157
  }
139
158
  }
140
159
 
@@ -147,34 +166,47 @@ export default class DbIndex {
147
166
  // todo remove this hack
148
167
  if (ALWAYS_REBUILD) {
149
168
  this.dbHead = null // hack
150
- this.indexRoot = null // hack
169
+ this.dbIndex = null // hack
170
+ this.dbIndexRoot = null
151
171
  }
152
172
  const result = await this.database.changesSince(this.dbHead) // {key, value, del}
153
173
  if (this.dbHead) {
154
- const oldIndexEntries = (
155
- await indexEntriesForOldChanges(
156
- blocks,
157
- this.byIDindexRoot,
158
- result.rows.map(({ key }) => key),
159
- this.mapFun
160
- )
161
- ).map((key) => ({ key, del: true })) // should be this
162
- this.indexRoot = await bulkIndex(blocks, this.indexRoot, oldIndexEntries, opts)
174
+ const oldChangeEntries = await indexEntriesForOldChanges(
175
+ blocks,
176
+ this.byIDindexRoot,
177
+ result.rows.map(({ key }) => key),
178
+ this.mapFun
179
+ )
180
+ const oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
181
+ const removalResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, oldIndexEntries, opts)
182
+ this.dbIndexRoot = removalResult.root
183
+ this.dbIndex = removalResult.dbIndex
184
+
163
185
  const removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
164
- this.byIDindexRoot = await bulkIndex(blocks, this.byIDindexRoot, removeByIdIndexEntries, opts)
186
+ const purgedRemovalResults = await bulkIndex(
187
+ blocks,
188
+ this.byIDindexRoot,
189
+ this.byIDIndex,
190
+ removeByIdIndexEntries,
191
+ opts
192
+ )
193
+ this.byIDindexRoot = purgedRemovalResults.root
194
+ this.byIDIndex = purgedRemovalResults.dbIndex
165
195
  }
166
196
  const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
167
197
  const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
168
- this.byIDindexRoot = await bulkIndex(blocks, this.byIDindexRoot, byIdIndexEntries, opts)
198
+ const addFutureRemovalsResult = await bulkIndex(blocks, this.byIDindexRoot, this.byIDIndex, byIdIndexEntries, opts)
199
+ this.byIDindexRoot = addFutureRemovalsResult.root
200
+ this.byIDIndex = addFutureRemovalsResult.dbIndex
201
+
169
202
  // console.log('indexEntries', indexEntries)
170
- this.indexRoot = await bulkIndex(blocks, this.indexRoot, indexEntries, opts)
203
+
204
+ const updateIndexResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, indexEntries, opts)
205
+ this.dbIndexRoot = updateIndexResult.root
206
+ this.dbIndex = updateIndexResult.dbIndex
207
+
171
208
  this.dbHead = result.clock
172
209
  }
173
-
174
- // todo use the DbIndex from other peers?
175
- // we might need to add CRDT logic to it for that
176
- // it would only be a performance improvement, but might add a lot of complexity
177
- // advanceIndex ()) {}
178
210
  }
179
211
 
180
212
  /**
@@ -184,37 +216,45 @@ export default class DbIndex {
184
216
  * @param {DbIndexEntry[]} indexEntries
185
217
  * @private
186
218
  */
187
- async function bulkIndex (blocks, inRoot, indexEntries) {
188
- if (!indexEntries.length) return inRoot
219
+ async function bulkIndex (blocks, inRoot, inDBindex, indexEntries) {
220
+ if (!indexEntries.length) return { dbIndex: inDBindex, root: inRoot }
189
221
  const putBlock = blocks.put.bind(blocks)
190
- const getBlock = makeGetBlock(blocks)
191
- if (!inRoot) {
222
+ const { getBlock } = makeGetBlock(blocks)
223
+ let returnRootBlock
224
+ let returnNode
225
+ if (!inDBindex) {
192
226
  for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
193
227
  const block = await node.block
194
228
  await putBlock(block.cid, block.bytes)
195
- inRoot = block
229
+ returnRootBlock = block
230
+ returnNode = node
196
231
  }
197
- return inRoot
198
232
  } else {
199
- const DbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts })
200
- const { root, blocks } = await DbIndex.bulk(indexEntries)
233
+ // const dbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts }) // todo load from root on refresh
234
+ const { root, blocks } = await inDBindex.bulk(indexEntries)
235
+ returnRootBlock = await root.block
236
+ returnNode = root
201
237
  for await (const block of blocks) {
202
238
  await putBlock(block.cid, block.bytes)
203
239
  }
204
- return await root.block // if we hold the root we won't have to load every time
240
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
205
241
  }
242
+ return { dbIndex: returnNode, root: returnRootBlock }
206
243
  }
207
244
 
208
- async function doIndexQuery (blocks, root, query) {
209
- const cid = root && root.cid
210
- if (!cid) return { result: [] }
211
- const getBlock = makeGetBlock(blocks)
212
- const DbIndex = await load({ cid, get: getBlock, ...opts })
245
+ async function doIndexQuery (blocks, dbIndexRoot, dbIndex, query) {
246
+ if (!dbIndex) {
247
+ const cid = dbIndexRoot && dbIndexRoot.cid
248
+ if (!cid) return { result: [] }
249
+ const { getBlock } = makeGetBlock(blocks)
250
+ dbIndex = await load({ cid, get: getBlock, ...opts })
251
+ }
213
252
  if (query.range) {
214
253
  const encodedRange = query.range.map((key) => charwise.encode(key))
215
- return DbIndex.range(...encodedRange)
254
+ return dbIndex.range(...encodedRange)
216
255
  } else if (query.key) {
217
256
  const encodedKey = charwise.encode(query.key)
218
- return DbIndex.get(encodedKey)
257
+ console.log('getting key', encodedKey)
258
+ return dbIndex.get(encodedKey)
219
259
  }
220
260
  }