@fireproof/core 0.0.7 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/db-index.js CHANGED
@@ -1,15 +1,19 @@
1
- import { create, load } from 'prolly-trees/db-index'
1
+ // import { create, load } from 'prolly-trees/db-index'
2
+ import { create, load } from '../../../../prolly-trees/src/db-index.js'
3
+
2
4
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
3
5
  import { nocache as cache } from 'prolly-trees/cache'
4
6
  import { bf, simpleCompare } from 'prolly-trees/utils'
5
7
  import { makeGetBlock } from './prolly.js'
6
8
  import { cidsToProof } from './fireproof.js'
9
+ import { CID } from 'multiformats'
10
+
7
11
  import * as codec from '@ipld/dag-cbor'
8
12
  // import { create as createBlock } from 'multiformats/block'
9
13
  import { doTransaction } from './blockstore.js'
10
14
  import charwise from 'charwise'
11
15
 
12
- const ALWAYS_REBUILD = true // todo: remove this
16
+ const ALWAYS_REBUILD = false // todo: make false
13
17
 
14
18
  // const arrayCompare = (a, b) => {
15
19
  // if (Array.isArray(a) && Array.isArray(b)) {
@@ -37,12 +41,13 @@ const compare = (a, b) => {
37
41
  const refCompare = (aRef, bRef) => {
38
42
  if (Number.isNaN(aRef)) return -1
39
43
  if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
40
- if (!Number.isFinite(aRef)) return 1
44
+ if (aRef === Infinity) return 1 // need to test this on equal docids!
41
45
  // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
42
46
  return simpleCompare(aRef, bRef)
43
47
  }
44
48
 
45
- const opts = { cache, chunker: bf(3), codec, hasher, compare }
49
+ const dbIndexOpts = { cache, chunker: bf(3), codec, hasher, compare }
50
+ const idIndexOpts = { cache, chunker: bf(3), codec, hasher, compare: simpleCompare }
46
51
 
47
52
  const makeDoc = ({ key, value }) => ({ _id: key, ...value })
48
53
 
@@ -87,14 +92,6 @@ const indexEntriesForChanges = (changes, mapFun) => {
87
92
  return indexEntries
88
93
  }
89
94
 
90
- const indexEntriesForOldChanges = async (blocks, byIDindexRoot, ids, mapFun) => {
91
- const { getBlock } = makeGetBlock(blocks)
92
- const byIDindex = await load({ cid: byIDindexRoot.cid, get: getBlock, ...opts })
93
-
94
- const result = await byIDindex.getMany(ids)
95
- return result
96
- }
97
-
98
95
  /**
99
96
  * Represents an DbIndex for a Fireproof database.
100
97
  *
@@ -118,11 +115,31 @@ export default class DbIndex {
118
115
  */
119
116
  this.mapFun = mapFun
120
117
 
121
- this.dbIndexRoot = null
122
- this.dbIndex = null
118
+ this.database.indexes.set(mapFun.toString(), this)
119
+
120
+ this.indexById = { root: null, cid: null }
121
+ this.indexByKey = { root: null, cid: null }
123
122
 
124
- this.byIDindexRoot = null
125
123
  this.dbHead = null
124
+
125
+ this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`
126
+
127
+ this.updateIndexPromise = null
128
+ }
129
+
130
+ toJSON () {
131
+ return { code: this.mapFun?.toString(), clock: { db: this.dbHead?.map(cid => cid.toString()), byId: this.indexById.cid?.toString(), byKey: this.indexByKey.cid?.toString() } }
132
+ }
133
+
134
+ static fromJSON (database, { code, clock: { byId, byKey, db } }) {
135
+ let mapFun
136
+ // eslint-disable-next-line
137
+ eval("mapFun = "+ code)
138
+ const index = new DbIndex(database, mapFun)
139
+ index.indexById.cid = CID.parse(byId)
140
+ index.indexByKey.cid = CID.parse(byKey)
141
+ index.dbHead = db.map(cid => CID.parse(cid))
142
+ return index
126
143
  }
127
144
 
128
145
  /**
@@ -140,13 +157,18 @@ export default class DbIndex {
140
157
  * @instance
141
158
  */
142
159
  async query (query) {
160
+ // const callId = Math.random().toString(36).substring(2, 7)
143
161
  // if (!root) {
144
162
  // pass a root to query a snapshot
145
- await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
146
- await this.#updateIndex(blocks)
147
- })
163
+ // console.time(callId + '.#updateIndex')
164
+ await this.#updateIndex(this.database.blocks)
165
+ // console.timeEnd(callId + '.#updateIndex')
166
+
148
167
  // }
149
- const response = await doIndexQuery(this.database.blocks, this.dbIndexRoot, this.dbIndex, query)
168
+ // console.time(callId + '.doIndexQuery')
169
+ const response = await doIndexQuery(this.database.blocks, this.indexByKey, query)
170
+ // console.timeEnd(callId + '.doIndexQuery')
171
+
150
172
  return {
151
173
  proof: { index: await cidsToProof(response.cids) },
152
174
  // TODO fix this naming upstream in prolly/db-DbIndex?
@@ -162,50 +184,54 @@ export default class DbIndex {
162
184
  * @private
163
185
  * @returns {Promise<void>}
164
186
  */
187
+
165
188
  async #updateIndex (blocks) {
189
+ // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
190
+ // what would it do in a world where all indexes provide a database snapshot to query?
191
+ if (this.updateIndexPromise) return this.updateIndexPromise
192
+ this.updateIndexPromise = this.#innerUpdateIndex(blocks)
193
+ this.updateIndexPromise.finally(() => { this.updateIndexPromise = null })
194
+ return this.updateIndexPromise
195
+ }
196
+
197
+ async #innerUpdateIndex (inBlocks) {
198
+ // const callTag = Math.random().toString(36).substring(4)
199
+ // console.log(`#updateIndex ${callTag} >`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
166
200
  // todo remove this hack
167
201
  if (ALWAYS_REBUILD) {
168
202
  this.dbHead = null // hack
169
- this.dbIndex = null // hack
203
+ this.indexByKey = null // hack
170
204
  this.dbIndexRoot = null
171
205
  }
206
+ // console.log('dbHead', this.dbHead)
207
+ // console.time(callTag + '.changesSince')
172
208
  const result = await this.database.changesSince(this.dbHead) // {key, value, del}
173
- if (this.dbHead) {
174
- const oldChangeEntries = await indexEntriesForOldChanges(
175
- blocks,
176
- this.byIDindexRoot,
177
- result.rows.map(({ key }) => key),
178
- this.mapFun
179
- )
180
- const oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
181
- const removalResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, oldIndexEntries, opts)
182
- this.dbIndexRoot = removalResult.root
183
- this.dbIndex = removalResult.dbIndex
209
+ // console.timeEnd(callTag + '.changesSince')
210
+ // console.log('result.rows.length', result.rows.length)
184
211
 
185
- const removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
186
- const purgedRemovalResults = await bulkIndex(
187
- blocks,
188
- this.byIDindexRoot,
189
- this.byIDIndex,
190
- removeByIdIndexEntries,
191
- opts
192
- )
193
- this.byIDindexRoot = purgedRemovalResults.root
194
- this.byIDIndex = purgedRemovalResults.dbIndex
195
- }
196
- const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
197
- const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
198
- const addFutureRemovalsResult = await bulkIndex(blocks, this.byIDindexRoot, this.byIDIndex, byIdIndexEntries, opts)
199
- this.byIDindexRoot = addFutureRemovalsResult.root
200
- this.byIDIndex = addFutureRemovalsResult.dbIndex
212
+ // console.time(callTag + '.doTransaction#updateIndex')
201
213
 
202
- // console.log('indexEntries', indexEntries)
203
-
204
- const updateIndexResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, indexEntries, opts)
205
- this.dbIndexRoot = updateIndexResult.root
206
- this.dbIndex = updateIndexResult.dbIndex
207
-
208
- this.dbHead = result.clock
214
+ if (result.rows.length === 0) {
215
+ // console.log('#updateIndex < no changes')
216
+ this.dbHead = result.clock
217
+ return
218
+ }
219
+ await doTransaction('#updateIndex', inBlocks, async (blocks) => {
220
+ let oldIndexEntries = []
221
+ let removeByIdIndexEntries = []
222
+ if (this.dbHead) { // need a maybe load
223
+ const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key))
224
+ oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
225
+ removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
226
+ }
227
+ const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
228
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
229
+ this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts)
230
+ this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts)
231
+ this.dbHead = result.clock
232
+ })
233
+ // console.timeEnd(callTag + '.doTransaction#updateIndex')
234
+ // console.log(`#updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
209
235
  }
210
236
  }
211
237
 
@@ -216,45 +242,47 @@ export default class DbIndex {
216
242
  * @param {DbIndexEntry[]} indexEntries
217
243
  * @private
218
244
  */
219
- async function bulkIndex (blocks, inRoot, inDBindex, indexEntries) {
220
- if (!indexEntries.length) return { dbIndex: inDBindex, root: inRoot }
245
+ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
246
+ if (!indexEntries.length) return inIndex
221
247
  const putBlock = blocks.put.bind(blocks)
222
248
  const { getBlock } = makeGetBlock(blocks)
223
249
  let returnRootBlock
224
250
  let returnNode
225
- if (!inDBindex) {
226
- for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
227
- const block = await node.block
228
- await putBlock(block.cid, block.bytes)
229
- returnRootBlock = block
230
- returnNode = node
231
- }
232
- } else {
233
- // const dbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts }) // todo load from root on refresh
234
- const { root, blocks } = await inDBindex.bulk(indexEntries)
235
- returnRootBlock = await root.block
236
- returnNode = root
237
- for await (const block of blocks) {
238
- await putBlock(block.cid, block.bytes)
251
+ if (!inIndex.root) {
252
+ const cid = inIndex.cid
253
+ if (!cid) {
254
+ for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
255
+ const block = await node.block
256
+ await putBlock(block.cid, block.bytes)
257
+ returnRootBlock = block
258
+ returnNode = node
259
+ }
260
+ return { root: returnNode, cid: returnRootBlock.cid }
239
261
  }
240
- await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
262
+ inIndex.root = await load({ cid, get: getBlock, ...dbIndexOpts })
263
+ }
264
+ const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries)
265
+ returnRootBlock = await root.block
266
+ returnNode = root
267
+ for await (const block of newBlocks) {
268
+ await putBlock(block.cid, block.bytes)
241
269
  }
242
- return { dbIndex: returnNode, root: returnRootBlock }
270
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
271
+ return { root: returnNode, cid: returnRootBlock.cid }
243
272
  }
244
273
 
245
- async function doIndexQuery (blocks, dbIndexRoot, dbIndex, query) {
246
- if (!dbIndex) {
247
- const cid = dbIndexRoot && dbIndexRoot.cid
274
+ async function doIndexQuery (blocks, indexByKey, query) {
275
+ if (!indexByKey.root) {
276
+ const cid = indexByKey.cid
248
277
  if (!cid) return { result: [] }
249
278
  const { getBlock } = makeGetBlock(blocks)
250
- dbIndex = await load({ cid, get: getBlock, ...opts })
279
+ indexByKey.root = await load({ cid, get: getBlock, ...dbIndexOpts })
251
280
  }
252
281
  if (query.range) {
253
282
  const encodedRange = query.range.map((key) => charwise.encode(key))
254
- return dbIndex.range(...encodedRange)
283
+ return indexByKey.root.range(...encodedRange)
255
284
  } else if (query.key) {
256
285
  const encodedKey = charwise.encode(query.key)
257
- console.log('getting key', encodedKey)
258
- return dbIndex.get(encodedKey)
286
+ return indexByKey.root.get(encodedKey)
259
287
  }
260
288
  }
package/src/fireproof.js CHANGED
@@ -38,6 +38,7 @@ export default class Fireproof {
38
38
  this.clock = clock
39
39
  this.config = config
40
40
  this.authCtx = authCtx
41
+ this.indexes = new Map()
41
42
  }
42
43
 
43
44
  /**
@@ -78,7 +79,11 @@ export default class Fireproof {
78
79
  */
79
80
  toJSON () {
80
81
  // todo this also needs to return the index roots...
81
- return { clock: this.clock }
82
+ return {
83
+ clock: this.clock.map(cid => cid.toString()),
84
+ name: this.name,
85
+ indexes: [...this.indexes.values()].map((index) => index.toJSON())
86
+ }
82
87
  }
83
88
 
84
89
  /**
@@ -133,7 +138,7 @@ export default class Fireproof {
133
138
  }
134
139
 
135
140
  async #notifyListeners (changes) {
136
- // await sleep(0)
141
+ // await sleep(10)
137
142
  for (const listener of this.#listeners) {
138
143
  await listener(changes)
139
144
  }
@@ -221,6 +226,7 @@ export default class Fireproof {
221
226
  console.error('failed', event)
222
227
  throw new Error('failed to put at storage layer')
223
228
  }
229
+ // console.log('new clock head', this.instanceId, result.head.toString())
224
230
  this.clock = result.head // do we want to do this as a finally block
225
231
  await this.#notifyListeners([decodedEvent]) // this type is odd
226
232
  return {
@@ -0,0 +1,10 @@
1
+ import Fireproof from './fireproof.js'
2
+ import DbIndex from './db-index.js'
3
+
4
+ export function fromJSON (json, blocks) {
5
+ const fp = new Fireproof(blocks, json.clock, { name: json.name })
6
+ for (const index of json.indexes) {
7
+ DbIndex.fromJSON(fp, index)
8
+ }
9
+ return fp
10
+ }
package/src/prolly.js CHANGED
@@ -164,7 +164,8 @@ export async function put (inBlocks, head, event, options) {
164
164
 
165
165
  // Otherwise, we find the common ancestor and update the root and other blocks
166
166
  const events = new EventFetcher(blocks)
167
- // this is returning more events than necessary
167
+ // todo this is returning more events than necessary, lets define the desired semantics from the top down
168
+ // good semantics mean we can cache the results of this call
168
169
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
169
170
  // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
170
171
  const prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
@@ -113,7 +113,7 @@ describe('DbIndex query', () => {
113
113
  await index.query({ range: [51, 54] })
114
114
 
115
115
  console.x('--- make Xander 53')
116
- const DOCID = 'xxxx-3c3a-4b5e-9c1c-8c5c0c5c0c5c'
116
+ const DOCID = 'xander-doc'
117
117
  const r1 = await database.put({ _id: DOCID, name: 'Xander', age: 53 })
118
118
  assert(r1.id, 'should have id')
119
119
 
@@ -2,7 +2,7 @@ import { describe, it, beforeEach } from 'mocha'
2
2
  import assert from 'node:assert'
3
3
  import Blockstore from '../src/blockstore.js'
4
4
  import Fireproof from '../src/fireproof.js'
5
- import * as codec from '@ipld/dag-cbor'
5
+ // import * as codec from '@ipld/dag-cbor'
6
6
 
7
7
  let database, resp0
8
8
 
@@ -335,21 +335,21 @@ describe('Fireproof', () => {
335
335
  assert.equal(res9.rows.length, 0)
336
336
  })
337
337
 
338
- it.skip('docs since repeated changes', async () => {
338
+ it('docs since repeated changes', async () => {
339
339
  assert.equal((await database.changesSince()).rows.length, 1)
340
340
  let resp, doc, changes
341
- for (let index = 0; index < 200; index++) {
341
+ for (let index = 0; index < 30; index++) {
342
342
  const id = '1' + (301 - index).toString()
343
- console.log(`Putting id: ${id}, index: ${index}`)
343
+ // console.log(`Putting id: ${id}, index: ${index}`)
344
344
  resp = await database.put({ index, _id: id }).catch(e => {
345
345
  assert.fail(`put failed on _id: ${id}, error: ${e.message}`)
346
346
  })
347
347
  assert(resp.id, `Failed to obtain resp.id for _id: ${id}`)
348
348
 
349
- console.log(`vis for update id: ${id}, index:`, index)
350
- for await (const line of database.vis()) {
351
- console.log(line)
352
- }
349
+ // console.log(`vis for update id: ${id}, index:`, index)
350
+ // for await (const line of database.vis()) {
351
+ // console.log(line)
352
+ // }
353
353
 
354
354
  doc = await database.get(resp.id).catch(e => {
355
355
  console.log('failed', e)
@@ -366,20 +366,11 @@ describe('Fireproof', () => {
366
366
  assert(!/^bafy/.test(value), `Unexpected "bafy..." value found at index ${index} in row ${JSON.stringify(row)}`)
367
367
  }
368
368
  })
369
- if (index > 3) {
370
- const stored = await database.blocks.get('bafyreicumn7tvssch4xslbe4jjq55c6w3jt4yxyjagkr2tengsudato7vi').catch((e) => {
371
- console.log(`Error getting block for index ${index}: ${e.message}`)
372
- })
373
- if (stored) {
374
- const doc = codec.decode(await stored.bytes)
375
- // console.log('stored', JSON.stringify(dec))
376
- assert.equal(doc.closed, false)
377
- }
378
- }
379
- console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
369
+
370
+ // console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
380
371
  assert.equal(changes.rows.length, index + 2, `failed on ${index}, with ${changes.rows.length} ${id}`)
381
372
  }
382
- }).timeout(20000)
373
+ }).timeout(30000)
383
374
 
384
375
  it('concurrent transactions', async () => {
385
376
  assert.equal((await database.changesSince()).rows.length, 1)
@@ -413,4 +404,11 @@ describe('Fireproof', () => {
413
404
  // await sleep(1000)
414
405
  assert.equal((await database.changesSince()).rows.length, 2)
415
406
  }).timeout(20000)
407
+ it('serialize database', async () => {
408
+ await database.put({ _id: 'rehy', name: 'drate' })
409
+ assert.equal((await database.changesSince()).rows.length, 2)
410
+ const serialized = JSON.parse(JSON.stringify(database))
411
+ assert.equal(serialized.name, 'helloName')
412
+ assert.equal(serialized.clock.length, 1)
413
+ })
416
414
  })
@@ -0,0 +1,75 @@
1
+ import { describe, it, beforeEach } from 'mocha'
2
+ import assert from 'node:assert'
3
+ import Fireproof from '../src/fireproof.js'
4
+ import DbIndex from '../src/db-index.js'
5
+ import { fromJSON } from '../src/hydrator.js'
6
+ console.x = function () {}
7
+
8
+ describe('DbIndex query', () => {
9
+ let database, index
10
+ beforeEach(async () => {
11
+ database = Fireproof.storage()
12
+ const docs = [
13
+ { _id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'alice', age: 40 },
14
+ { _id: 'b2s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'bob', age: 40 },
15
+ { _id: 'c3s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'carol', age: 43 },
16
+ { _id: 'd4s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'dave', age: 48 },
17
+ { _id: 'e4s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'emily', age: 4 },
18
+ { _id: 'f4s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'frank', age: 7 }
19
+ ]
20
+ for (const doc of docs) {
21
+ const id = doc._id
22
+ const response = await database.put(doc)
23
+ assert(response)
24
+ assert(response.id, 'should have id')
25
+ assert.equal(response.id, id)
26
+ }
27
+ index = new DbIndex(database, function (doc, map) {
28
+ map(doc.age, doc.name)
29
+ })
30
+ })
31
+ it('serialize database with index', async () => {
32
+ await database.put({ _id: 'rehy', name: 'drate', age: 1 })
33
+ assert.equal((await database.changesSince()).rows.length, 7)
34
+ const result = await index.query({ range: [0, 54] })
35
+ assert.equal(result.rows[0].value, 'drate')
36
+ const serialized = database.toJSON()
37
+ // console.log('serialized', serialized)
38
+ assert.equal(serialized.name, 'global')
39
+ assert.equal(serialized.clock.length, 1)
40
+ assert.equal(serialized.clock[0].constructor.name, 'String')
41
+ assert.equal(serialized.indexes.length, 1)
42
+ assert.equal(serialized.indexes[0].code, `function (doc, map) {
43
+ map(doc.age, doc.name)
44
+ }`)
45
+ assert.equal(serialized.indexes[0].clock.byId.constructor.name, 'String')
46
+ assert.equal(serialized.indexes[0].clock.byKey.constructor.name, 'String')
47
+ assert.equal(serialized.indexes[0].clock.db[0].constructor.name, 'String')
48
+ })
49
+ it('rehydrate database', async () => {
50
+ await database.put({ _id: 'rehy', name: 'drate', age: 1 })
51
+ assert.equal((await database.changesSince()).rows.length, 7)
52
+ const result = await index.query({ range: [0, 54] })
53
+ assert.equal(result.rows[0].value, 'drate')
54
+
55
+ const serialized = JSON.parse(JSON.stringify(database))
56
+ // console.log('serialized', JSON.stringify(serialized))
57
+ // connect it to the same blockstore for testing
58
+ const newDb = fromJSON(serialized, database.blocks)
59
+ assert.equal(newDb.name, 'global')
60
+ assert.equal(newDb.clock.length, 1)
61
+ assert.equal((await newDb.changesSince()).rows.length, 7)
62
+ const newIndex = [...newDb.indexes.values()][0]
63
+ assert.equal(newIndex.mapFun, `function (doc, map) {
64
+ map(doc.age, doc.name)
65
+ }`)
66
+ assert.equal(newIndex.indexById.cid, 'bafyreifuz54ugnq77fur47vwv3dwab7p3gpnf5to6hlnbhv5p4kwo7auoi')
67
+ assert.equal(newIndex.indexById.root, null)
68
+
69
+ assert.equal(newIndex.indexByKey.cid, 'bafyreicr5rpvsxnqchcwk5rxlmdvd3fah2vexmbsp2dvr4cfdxd2q2ycgu')
70
+ assert.equal(newIndex.indexByKey.root, null)
71
+
72
+ const newResult = await newIndex.query({ range: [0, 54] })
73
+ assert.equal(newResult.rows[0].value, 'drate')
74
+ })
75
+ })