hyperbee2 2.7.1 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -7,16 +7,525 @@ npm install hyperbee2
7
7
  Next major version for [hyperbee](https://github.com/holepunchto/hyperbee).
8
8
  Will be merged in there and released a new major when fully done.
9
9
 
10
+ An append-only B-tree on top of a [Hypercore][hypercore].
11
+
10
12
  ## Usage
11
13
 
12
14
  ```js
13
- const Hyperbee2 = require('hyperbee2')
15
+ import Hyperbee2 from 'hyperbee2'
16
+ import Corestore from 'corestore'
17
+
18
+ // Create/open tree
19
+ const tree = new Hyperbee2(new Corestore('./mystore'))
20
+
21
+ // Wait for tree to become available
22
+ await tree.ready()
14
23
 
15
- const db = new Hyperbee2(store)
24
+ // Write some values to tree
25
+ const batch = tree.write()
26
+ batch.tryPut(Buffer.from('name'), Buffer.from('example'))
27
+ batch.tryPut(Buffer.from('email'), Buffer.from('example@example.com'))
28
+ await batch.flush()
16
29
 
17
- // see tests for more
30
+ // Read values from tree
31
+ const name = await tree.get(Buffer.from('name'))
32
+ const email = await tree.get(Buffer.from('email'))
33
+
34
+ // Print values
35
+ console.log(name.value.toString()) // example
36
+ console.log(email.value.toString()) // example@example.com
18
37
  ```
19
38
 
20
39
  ## License
21
40
 
22
41
  Apache-2.0
42
+
43
+ ## API
44
+
45
+ ### Hyperbee
46
+
47
+ #### `const db = new Hyperbee(store, [options])`
48
+
49
+ Make a new Hyperbee2 instance. The `store` argument expects a
50
+ [Corestore][corestore].
51
+
52
+ Options include:
53
+
54
+ ```js
55
+ {
56
+ key: null, // Buffer or String. Key of Hypercore to load via Corestore
57
+ maxCacheSize: 4096, // Max number of nodes to keep in NodeCache
58
+ core: Hypercore(...), // Hypercore within the Corestore to use (defaults to loading key, or using name='bee')
59
+ view: false, // Is this a view of an open Hyperbee? (i.e. do not close underlying store)
60
+ writable: true, // Is append / truncate allowed on the underlying Hypercore?
61
+ unbatch: 0, // Number of write batches to rollback during bootstrap
62
+ autoUpdate: false, // Reload root node when underlying Hypercore is appended to?
63
+ preload: null, // A function called by ready() after the Hypercore is ready. Can be async.
64
+ wait: true, // Wait for Hypercore to download blocks
65
+ }
66
+ ```
67
+
68
+ #### `await db.ready()`
69
+
70
+ Ensures the underlying [Hypercore][hypercore] is ready and prepares the Hyperbee
71
+ for use. If `autoUpdate` was set to true in the constructor's options,
72
+ this will start watching the [Hypercore][hypercore] for new writes.
73
+
74
+ Calling `get()` will call this automatically for you.
75
+
76
+ #### `await db.close()`
77
+
78
+ Fully close the Hyperbee. If it is not a view on another Hyperbee,
79
+ this will also close it's [Hypercore][hypercore].
80
+
81
+ #### `db.head()`
82
+
83
+ Returns an object with the following properties:
84
+
85
+ ```
86
+ {
87
+ length, // Number of blocks from the start of Hypercore that apply to this tree.
88
+ key, // Buffer or null. The key of the underlying Hypercore.
89
+ }
90
+ ```
91
+
92
+ If the Hyperbee is not ready, this will return null.
93
+
94
+ #### `db.cache`
95
+
96
+ Read only. The NodeCache used by this Hyperbee.
97
+
98
+ #### `db.core`
99
+
100
+ Read only. The [Hypercore][hypercore] used by this Hyperbee.
101
+
102
+ #### `db.opening`
103
+
104
+ Read only. A Promise that resolves to `undefined` once the [Corestore][corestore]
105
+ is ready.
106
+
107
+ #### `db.closing`
108
+
109
+ Read only. Initially null. When `db.close()` is called, this is set to
110
+ a Promise that resolves to `undefined` when the close completes.
111
+
112
+ #### `db.opened`
113
+
114
+ Read only. Boolean indicating whether the [Corestore][corestore] has opened.
115
+
116
+ #### `db.closed`
117
+
118
+ Read only. Boolean indicating whether the [Corestore][corestore] has closed.
119
+
120
+ #### `db.replicate()`
121
+
122
+ Calls `replicate` on underlying [Corestore][corestore].
123
+
124
+ #### `db.checkout([options])`
125
+
126
+ Returns a new Hyperbee as a view on the underlying [Hypercore][hypercore].
127
+
128
+ Options:
129
+
130
+ ```js
131
+ {
132
+ writable: false, // Boolean. Will the new tree be writable?
133
+ length: this.core.length, // Number. Length of blocks used from the Hypercore
134
+ key: null, // Buffer or null. Key of the Hypercore
135
+ }
136
+ ```
137
+
138
+ #### `db.move([options])`
139
+
140
+ Replaces the root of the tree with a new entry, potentially using a new
141
+ [Hypercore][hypercore].
142
+
143
+ Options:
144
+
145
+ ```js
146
+ {
147
+ writable: this.writable, // Boolean. Is this tree writable after the move?
148
+ length: this.core.length, // Number. Length of blocks used from the Hypercore
149
+ key: null, // Buffer or null. Key of the Hypercore
150
+ }
151
+ ```
152
+
153
+ #### `db.snapshot()`
154
+
155
+ Returns a new Hyperbee that is a read only view of the current tree.
156
+
157
+ #### `db.undo(n)`
158
+
159
+ Returns a new Hyperbee that is a writable view of the current tree,
160
+ with the last `n` write batches ignored.
161
+
162
+ #### `db.write([options])`
163
+
164
+ Returns a [WriteBatch](#writebatch) object through which the tree can be updated.
165
+
166
+ Options:
167
+
168
+ ```js
169
+ {
170
+ length: -1, // Length of blocks used from the Hypercore
171
+ // (i.e. what point in the hypercore is the write
172
+ // going to extend when flush() is called?).
173
+ // If -1, the length will be calculated on flush()
174
+ // using root.seq + 1.
175
+ key: null, // Buffer or null. Key of the Hypercore.
176
+ autoUpdate: true, // Boolean .Should Hyperbee automatically reflect updates
177
+ // after each flush()?
178
+ compat: false, // Boolean. Write blocks compatible with Hyperbee 1?
179
+ type: encoding.TYPE_LATEST, // Number. Block format to use.
180
+ inlineValueSize: 1024, // Values smaller than this byte length are
181
+ // written inline in the node. Larger values
182
+ // are referenced via a pointer into the block.
183
+ preferredBlockSize: 4096, // Try to write blocks of approximately this size
184
+ // when flushing updates.
185
+ }
186
+ ```
187
+
188
+ Errors:
189
+
190
+ - If the tree is not writable this will throw an Error
191
+
192
+ #### `db.createReadStream([options])`
193
+
194
+ Returns a [streamx][streamx] Readable Stream. This is async iterable.
195
+
196
+ Options:
197
+
198
+ ```js
199
+ {
200
+ prefetch: true, // Prefetch future blocks after yielding an entry?
201
+ reverse: false, // Iterate backwards over keys?
202
+ limit: -1, // Max number of entries to yield (-1 means no limit)
203
+ gte: undefined, // Buffer. Key lower bound (inclusive)
204
+ gt: undefined, // Buffer. Key lower bound (exclusive)
205
+ lte: undefined, // Buffer. Key upper bound (inclusive)
206
+ lt: undefined, // Buffer. Key upper bound (exclusive)
207
+ highWaterMark: 16384, // Size of read ahead buffer calculated
208
+ // as: number of entries * 1024
209
+ }
210
+ ```
211
+
212
+ Iterating over the stream will yield the following properties:
213
+
214
+ ```js
215
+ {
216
+ core, // Hypercore: the hypercore the entry is stored in
217
+ offset, // Number: the index of the entry in the block
218
+ seq, // Number: the sequence number of the block in the hypercore
219
+ key, // Buffer: the key of the entry
220
+ value, // Buffer: the value of the entry
221
+ }
222
+ ```
223
+
224
+ Example:
225
+
226
+ ```js
227
+ for await (const data of b.createReadStream()) {
228
+ console.log(data.key, '-->', data.value)
229
+ }
230
+ ```
231
+
232
+ #### `db.createDiffStream(right, [options])`
233
+
234
+ Returns a [streamx][streamx] Readable Stream that provides
235
+ synchronized iteration over two trees. This is async iterable.
236
+
237
+ Options: Same as options for `createReadStream()`.
238
+
239
+ Iterating over the stream will yield the same properties as
240
+ `createReadStream()` split into left (`db` in this case) and right
241
+ (`right` parameter).
242
+
243
+ ```js
244
+ {
245
+ left: {core, offset, seq, key, value}, // see createReadStream()
246
+ right: {core, offset, seq, key, value}, // see createReadStream()
247
+ }
248
+ ```
249
+
250
+ Example:
251
+
252
+ ```js
253
+ import Hyperbee from './index.js'
254
+ import Corestore from 'corestore'
255
+
256
+ const b1 = new Hyperbee(new Corestore('./store1'))
257
+ const b2 = new Hyperbee(new Corestore('./store2'))
258
+
259
+ await b1.ready()
260
+ await b2.ready()
261
+
262
+ const w1 = b1.write()
263
+ w1.tryPut(Buffer.from('A'), Buffer.from('A'))
264
+ w1.tryPut(Buffer.from('B'), Buffer.from('B'))
265
+ w1.tryPut(Buffer.from('C'), Buffer.from('C'))
266
+ await w1.flush()
267
+
268
+ const w2 = b2.write()
269
+ w2.tryPut(Buffer.from('A'), Buffer.from('A'))
270
+ w2.tryPut(Buffer.from('C'), Buffer.from('C'))
271
+ w2.tryPut(Buffer.from('E'), Buffer.from('E'))
272
+ await w2.flush()
273
+
274
+ for await (const data of b1.createDiffStream(b2)) {
275
+ console.log(data.left?.key.toString(), data.right?.key.toString())
276
+ }
277
+ ```
278
+
279
+ Example output:
280
+
281
+ ```js
282
+ A A
283
+ B undefined
284
+ C C
285
+ undefined E
286
+ ```
287
+
288
+ #### `db.createChangesStream([options])`
289
+
290
+ Returns a [streamx][streamx] Readable Stream for iterating over all
291
+ batches previously written to the tree. This is async iterable.
292
+
293
+ Options:
294
+
295
+ ```js
296
+ {
297
+ head: null, // null means use this.tree.head()
298
+ highWaterMark: 16384, // Size of read ahead buffer calculated
299
+ // as: number of entries * 1024
300
+ timeout: 0, // Wait at most this many milliseconds (0 means no timeout).
301
+ // Defaults to the value of the Hyperbee's timeout option.
302
+ wait: true, // Wait for Hypercore to download blocks
303
+ // Defaults to the value of the Hyperbee's wait option.
304
+ }
305
+ ```
306
+
307
+ Iterating over the stream will yield:
308
+
309
+ ```js
310
+ {
311
+ head: {
312
+ length, // Number: number of blocks from the start of Hypercore
313
+ // that apply to this version of the tree
314
+ key, // Buffer or null: the key of the Hypercore for this version
315
+ },
316
+ tail: {
317
+ length, // Number: number of blocks from the start of Hypercore
318
+ // that apply to the previous version of the tree
319
+ key, // Buffer or null: the key of the Hypercore for the previous
320
+ // version
321
+ },
322
+ batch: [ // Blocks written in this batch
323
+ {tree, keys, values, cohorts, metadata, ...},
324
+ ...
325
+ ],
326
+ }
327
+ ```
328
+
329
+ #### `await db.peek([range])`
330
+
331
+ Attempts to get the first entry within the given range.
332
+
333
+ Returns `null` if no entry exists in the range, or an object with
334
+ the following properties on success:
335
+
336
+ ```js
337
+ {
338
+ core, // Hypercore: the hypercore the entry is stored in
339
+ offset, // Number: the index of the entry in the block
340
+ seq, // Number: the sequence number of the block in the hypercore
341
+ key, // Buffer: the key of the entry
342
+ value, // Buffer: the value of the entry
343
+ }
344
+ ```
345
+
346
+ The `range` argument accepts the same properties as the options for
347
+ [`createReadStream()`](#dbcreatereadstreamoptions).
348
+
349
+ #### `await db.download([range])`
350
+
351
+ Fetches all entries in the given range. The promise resolves once
352
+ all matching entries have been fetched.
353
+
354
+ The `range` argument accepts the same properties as the options for
355
+ [`createReadStream()`](#dbcreatereadstreamoptions).
356
+
357
+ #### `await db.get(key, [options])`
358
+
359
+ Attempt to find an entry by its key.
360
+
361
+ Returns `null` if no entry exists in the range, or an object with
362
+ the following properties on success:
363
+
364
+ ```js
365
+ {
366
+ core, // Hypercore: the hypercore the entry is stored in
367
+ offset, // Number: the index of the entry in the block
368
+ seq, // Number: the sequence number of the block in the hypercore
369
+ key, // Buffer: the key of the entry
370
+ value, // Buffer: the value of the entry
371
+ }
372
+ ```
373
+
374
+ Options:
375
+
376
+ ```js
377
+ {
378
+ timeout, // Number: wait at most this many milliseconds (0 means no timeout)
379
+ wait, // Boolean: wait for Hypercore to download blocks
380
+ }
381
+ ```
382
+
383
+ #### `b.on('ready', listener)`
384
+
385
+ Emitted once the Hyperbee is ready for use.
386
+
387
+ ##### `b.on('update', listener)`
388
+
389
+ Emitted in the following scenarios:
390
+
391
+ - When a `WriteBatch` is flushed and its `autoUpdate` option is `true`
392
+ (the default).
393
+ - When the underlying core is appended to (locally or remotely) and the
394
+ `Hyperbee`'s `autoUpdate` option is `true`.
395
+ - After a `move()` call on the `Hyperbee`.
396
+ - After a rollback completes because the `unbatch` option to the
397
+ `Hyperbee` constructor was greater than `0`.
398
+
399
+ Warning: this event may be emitted multiple times for the same
400
+ update if `autoUpdate` is set on the `Hyperbee` and on `WriteBatch`:
401
+
402
+ ```js
403
+ const b = new Hyperbee(store, { autoUpdate: true })
404
+ b.on('update', () => console.log('New update'))
405
+
406
+ const w = b.write(/* autoUpdate is true by default */)
407
+ w.tryPut(Buffer.from('key'), Buffer.from('value'))
408
+ await w.flush()
409
+
410
+ // The following will be printed:
411
+ // New udpate
412
+ // New udpate
413
+ ```
414
+
415
+ ##### `b.on('close', listener)`
416
+
417
+ Emitted one the `close()` method completes.
418
+
419
+ ### WriteBatch
420
+
421
+ A WriteBatch can be constructed via Hyperbee's [write()](#dbwriteoptions)
422
+ method. Using a batch, multiple updates can be queued then applied together.
423
+
424
+ ```js
425
+ const batch = b.write();
426
+ batch.tryPut(Buffer.from('key'), Buffer.from('value');
427
+ await batch.flush();
428
+ ```
429
+
430
+ **Warning:** WriteBatch does not hold an exclusive lock on the database
431
+ while queuing operations unless you call `lock()`. By default, the
432
+ lock is only acquired when the operations are flushed. So be careful
433
+ about building concurrent batches:
434
+
435
+ ```js
436
+ import Hyperbee from './index.js'
437
+ import Corestore from 'corestore'
438
+
439
+ const b = new Hyperbee(new Corestore('./store'))
440
+ await b.ready()
441
+
442
+ const w1 = b.write()
443
+ w1.tryPut(Buffer.from('name'), Buffer.from('Sneezy'))
444
+
445
+ const w2 = b.write()
446
+ w2.tryPut(Buffer.from('name'), Buffer.from('Sleepy'))
447
+ w2.tryPut(Buffer.from('email'), Buffer.from('sleepy@example.com'))
448
+
449
+ // Be careful about application order vs. order the operations
450
+ // were queued when building batches concurrently.
451
+ await w2.flush()
452
+ await w1.flush()
453
+
454
+ for await (const data of b.createReadStream(b)) {
455
+ console.log(data.key.toString(), '-->', data.value.toString())
456
+ }
457
+
458
+ // Output: (email and name mismatch)
459
+ //
460
+ // email --> sleepy@example.com
461
+ // name --> Sneezy
462
+ ```
463
+
464
+ ### `await batch.lock()`
465
+
466
+ Aquires an exclusive write lock now instead of waiting for `flush()`
467
+ to be called. No writes will occur until this batch is closed allowing
468
+ you to keep a consistent view of the database while building the batch.
469
+
470
+ #### `batch.tryPut(key, value)`
471
+
472
+ Queues an operation to associate `key` with `value`. Any existing entry
473
+ for `key` will be overwritten.
474
+
475
+ #### `batch.tryDelete(key)`
476
+
477
+ Queues an operation to remove the entry with `key`, if it exists. If it
478
+ does not exist, this method does nothing (and will not throw).
479
+
480
+ #### `batch.tryClear()`
481
+
482
+ Queues an operation to clear all entries from the tree.
483
+
484
+ #### `await batch.flush()`
485
+
486
+ Aquires an exclusive write lock and applies the operations queued in this
487
+ batch to the tree, clearing the queue.
488
+
489
+ **Warning:** continuing to use the batch after flushing can cause unpredictable
490
+ behavior. Batches applied after the first flush will be 'unapplied' if you
491
+ flush again later. This can lead to accidentally removing data from the tree.
492
+
493
+ ```js
494
+ import Hyperbee from './index.js'
495
+ import Corestore from 'corestore'
496
+
497
+ const b = new Hyperbee(new Corestore('./store'))
498
+ await b.ready()
499
+
500
+ const w1 = b.write()
501
+ w1.tryPut(Buffer.from('name'), Buffer.from('Sneezy'))
502
+
503
+ const w2 = b.write()
504
+ w2.tryPut(Buffer.from('name'), Buffer.from('Sleepy'))
505
+ w2.tryPut(Buffer.from('email'), Buffer.from('sleepy@example.com'))
506
+
507
+ await w1.flush()
508
+ await w2.flush()
509
+
510
+ // Warning: flushing w1 again will unapply w2!
511
+ w1.tryPut(Buffer.from('active'), Buffer.from('false'))
512
+ await w1.flush()
513
+
514
+ for await (const data of b.createReadStream(b)) {
515
+ console.log(data.key.toString(), '-->', data.value.toString())
516
+ }
517
+
518
+ // Output: (name has reverted to 'Sneezy', email is missing)
519
+ //
520
+ // active --> false
521
+ // name --> Sneezy
522
+ ```
523
+
524
+ #### `batch.close()`
525
+
526
+ Closes the batch without flushing operations. Subsequent attempts
527
+ to flush the batch will result in an error.
528
+
529
+ [hypercore]: https://github.com/holepunchto/hypercore
530
+ [corestore]: https://github.com/holepunchto/corestore
531
+ [streamx]: https://github.com/mafintosh/streamx
package/index.js CHANGED
@@ -8,36 +8,47 @@ const NodeCache = require('./lib/cache.js')
8
8
  const WriteBatch = require('./lib/write.js')
9
9
  const CoreContext = require('./lib/context.js')
10
10
  const SessionConfig = require('./lib/session-config.js')
11
- const { Pointer, KeyPointer, ValuePointer, TreeNode, EMPTY } = require('./lib/tree.js')
12
- const { DeltaOp, DeltaCohort, OP_COHORT } = require('./lib/compression.js')
11
+ const { inflate, inflateValue } = require('./lib/inflate.js')
12
+ const { EMPTY } = require('./lib/tree.js')
13
13
 
14
14
  class Hyperbee extends EventEmitter {
15
- constructor(store, options = {}) {
15
+ constructor(store, opts = {}) {
16
16
  super()
17
17
 
18
18
  const {
19
19
  t = 128, // legacy number for now, should be 128 now
20
20
  key = null,
21
21
  encryption = null,
22
+ getEncryptionProvider = toEncryptionProvider(encryption),
22
23
  maxCacheSize = 4096,
23
- config = new SessionConfig([], 0, true),
24
+ config = new SessionConfig([], 0, true, null),
24
25
  activeRequests = config.activeRequests,
25
26
  timeout = config.timeout,
26
27
  wait = config.wait,
27
- core = key ? store.get(key) : store.get({ key, name: 'bee', encryption }),
28
- context = new CoreContext(store, core, new NodeCache(maxCacheSize), core, encryption, t),
28
+ trace = config.trace,
29
+ core = key
30
+ ? store.get({ key, encryption: getEncryptionProvider(key) })
31
+ : store.get({ key, name: 'bee', encryption: getEncryptionProvider(key) }),
32
+ context = new CoreContext(
33
+ store,
34
+ core,
35
+ new NodeCache(maxCacheSize),
36
+ core,
37
+ getEncryptionProvider,
38
+ t
39
+ ),
29
40
  root = null,
30
41
  view = false,
31
42
  writable = true,
32
43
  unbatch = 0,
33
- autoUpdate = false,
44
+ autoUpdate = !writable && !view,
34
45
  preload = null
35
- } = options
46
+ } = opts
36
47
 
37
48
  this.store = store
38
49
  this.root = root
39
50
  this.context = context
40
- this.config = config.sub(activeRequests, timeout, wait)
51
+ this.config = config.sub(activeRequests, timeout, wait, trace)
41
52
  this.view = view
42
53
  this.writable = writable
43
54
  this.unbatch = unbatch
@@ -107,12 +118,9 @@ class Hyperbee extends EventEmitter {
107
118
  }
108
119
 
109
120
  move({ length = this.core.length, key = null, writable = this.writable } = {}) {
110
- const context = key ? this.context.getContextByKey(key) : this.context
111
- const root = length === 0 ? EMPTY : context.createTreeNode(0, length - 1, 0, false, null)
112
- this.context = context
121
+ this.context = key ? this.context.getContextByKey(key) : this.context
113
122
  this.writable = writable
114
- this.root = root
115
- this.emit('update')
123
+ this._setRoot(this._nodeAtSeq(length - 1), true)
116
124
  }
117
125
 
118
126
  snapshot() {
@@ -128,21 +136,23 @@ class Hyperbee extends EventEmitter {
128
136
  return new WriteBatch(this, opts)
129
137
  }
130
138
 
139
+ _lastNodeInCore() {
140
+ return this._nodeAtSeq(this.context.core.length - 1)
141
+ }
142
+
143
+ _nodeAtSeq(seq) {
144
+ return seq < 0 ? EMPTY : this.context.createTreeNode(0, seq, 0, false, null)
145
+ }
146
+
131
147
  async ready() {
132
148
  if (!this.core.opened) await this.core.ready()
133
149
  if (this.root) return
134
150
  if (this.preload) await this.preload()
135
151
  if (this.root) return
136
152
 
137
- this.root =
138
- this.context.core.length === 0
139
- ? EMPTY
140
- : this.context.createTreeNode(0, this.core.length - 1, 0, false, null)
141
-
153
+ this._setRoot(this._lastNodeInCore(), false)
142
154
  if (this.autoUpdate) {
143
- this.core.on('append', () => {
144
- this.update()
145
- })
155
+ this.core.on('append', () => this._setRoot(this._lastNodeInCore(), true))
146
156
  }
147
157
 
148
158
  this.emit('ready')
@@ -192,45 +202,16 @@ class Hyperbee extends EventEmitter {
192
202
  return ptr.value
193
203
  }
194
204
 
195
- // TODO: unslab these
196
205
  async inflate(ptr, config) {
197
- if (ptr.value) {
198
- this.bump(ptr)
199
- return ptr.value
206
+ if (!ptr.value) {
207
+ await inflate(ptr, config)
200
208
  }
201
-
202
- const [block, context] = await Promise.all([
203
- ptr.context.getBlock(ptr.seq, ptr.core, config),
204
- ptr.context.getContext(ptr.core, config)
205
- ])
206
-
207
- const tree = block.tree[ptr.offset]
208
-
209
- const keys = new Array(tree.keys.length)
210
- const children = new Array(tree.children.length)
211
-
212
- for (let i = 0; i < keys.length; i++) {
213
- const d = tree.keys[i]
214
- keys[i] = inflateKey(context, d, ptr, block, config)
215
- }
216
-
217
- for (let i = 0; i < children.length; i++) {
218
- const d = tree.children[i]
219
- children[i] = inflateChild(context, d, ptr, block, config)
220
- }
221
-
222
- const [k, c] = await Promise.all([Promise.all(keys), Promise.all(children)])
223
-
224
- const value = new TreeNode(k, c)
225
- if (!ptr.value) ptr.value = value
226
-
227
209
  this.bump(ptr)
228
-
229
210
  return ptr.value
230
211
  }
231
212
 
232
213
  async finalizeKeyPointer(key, config) {
233
- const value = key.value || (await this.inflateValue(key, config))
214
+ const value = key.value || (await inflateValue(key, config))
234
215
 
235
216
  return {
236
217
  core: key.context.getCore(key.core),
@@ -241,30 +222,6 @@ class Hyperbee extends EventEmitter {
241
222
  }
242
223
  }
243
224
 
244
- async inflateValue(key, config) {
245
- if (key.value) return key.value
246
- if (!key.valuePointer) return null
247
-
248
- const ptr = key.valuePointer
249
-
250
- if (ptr.split === 0) {
251
- const block = await ptr.context.getBlock(ptr.seq, ptr.core, config)
252
- return block.values[ptr.offset]
253
- }
254
-
255
- const blockPromises = new Array(ptr.split + 1)
256
- for (let i = 0; i < blockPromises.length; i++) {
257
- blockPromises[i] = ptr.context.getBlock(ptr.seq - ptr.split + i, ptr.core, config)
258
- }
259
- const blocks = await Promise.all(blockPromises)
260
- const splitValue = new Array(blockPromises.length)
261
- for (let i = 0; i < splitValue.length - 1; i++) {
262
- splitValue[i] = blocks[i].values[0]
263
- }
264
- splitValue[splitValue.length - 1] = blocks[blocks.length - 1].buffer[ptr.offset]
265
- return b4a.concat(splitValue)
266
- }
267
-
268
225
  async bootstrap(config) {
269
226
  if (!this.root) await this.ready()
270
227
  if (this.unbatch) await this._rollback(config)
@@ -294,16 +251,16 @@ class Hyperbee extends EventEmitter {
294
251
 
295
252
  if (expected === this.unbatch) {
296
253
  this.context = context
297
- this.root = length === 0 ? EMPTY : context.createTreeNode(0, length - 1, 0, false, null)
298
- this.unbatch = 0
299
- this.emit('update')
254
+ this._setRoot(this._nodeAtSeq(length - 1), true)
300
255
  }
301
256
  }
302
257
 
303
- update(root = null) {
304
- this.root = root
258
+ _setRoot(root, emit) {
259
+ if (!root.equivalentTo(this.root)) {
260
+ this.root = root
261
+ if (emit) this.emit('update')
262
+ }
305
263
  this.unbatch = 0
306
- this.emit('update')
307
264
  }
308
265
 
309
266
  async get(key, opts) {
@@ -343,85 +300,7 @@ module.exports = Hyperbee
343
300
 
344
301
  function noop() {}
345
302
 
346
- function inflateKey(context, d, ptr, block, config) {
347
- if (d.type === OP_COHORT) return inflateKeyCohort(context, d, ptr, block, config)
348
- return inflateKeyDelta(context, d, ptr, block, config)
349
- }
350
-
351
- async function inflateKeyDelta(context, d, ptr, block, config) {
352
- const k = d.pointer
353
-
354
- if (!k) return new DeltaOp(false, d.type, d.index, null)
355
-
356
- const blk =
357
- k.seq === ptr.seq && k.core === 0 && ptr.core === 0
358
- ? block
359
- : await context.getBlock(k.seq, k.core, config)
360
-
361
- const bk = blk.keys[k.offset]
362
-
363
- let vp = null
364
-
365
- if (bk.valuePointer) {
366
- const p = bk.valuePointer
367
- const ctx = await context.getContext(k.core, config)
368
- vp = new ValuePointer(ctx, p.core, p.seq, p.offset, p.split)
369
- }
370
-
371
- const kp = new KeyPointer(context, k.core, k.seq, k.offset, false, bk.key, bk.value, vp)
372
- return new DeltaOp(false, d.type, d.index, kp)
373
- }
374
-
375
- async function inflateKeyCohort(context, d, ptr, block, config) {
376
- const co = d.pointer
377
-
378
- const blk =
379
- co.seq === ptr.seq && co.core === 0 && ptr.core === 0
380
- ? block
381
- : await context.getBlock(co.seq, co.core, config)
382
-
383
- const cohort = blk.cohorts[co.offset]
384
- const promises = new Array(cohort.length)
385
-
386
- for (let i = 0; i < cohort.length; i++) {
387
- const p = cohort[i]
388
- const k = inflateKeyDelta(context, p, co, blk, config)
389
- promises[i] = k
390
- }
391
-
392
- const p = new Pointer(context, co.core, co.seq, co.offset)
393
- return new DeltaCohort(false, p, await Promise.all(promises))
394
- }
395
-
396
- async function inflateChild(context, d, ptr, block, config) {
397
- if (d.type === OP_COHORT) return inflateChildCohort(context, d, ptr, block, config)
398
- if (d.pointer && !context.hasCore(d.pointer.core)) await context.update(config)
399
- return inflateChildDelta(context, d, ptr, block, config)
400
- }
401
-
402
- function inflateChildDelta(context, d, ptr, block, config) {
403
- const p = d.pointer
404
- const c = p && context.createTreeNode(p.core, p.seq, p.offset, false, null)
405
- return new DeltaOp(false, d.type, d.index, c)
406
- }
407
-
408
- async function inflateChildCohort(context, d, ptr, block, config) {
409
- const co = d.pointer
410
-
411
- const blk =
412
- co.seq === ptr.seq && co.core === 0 && ptr.core === 0
413
- ? block
414
- : await context.getBlock(co.seq, co.core, config)
415
-
416
- const cohort = blk.cohorts[co.offset]
417
- const deltas = new Array(cohort.length)
418
-
419
- for (let i = 0; i < cohort.length; i++) {
420
- const c = cohort[i]
421
- if (c.pointer && !context.hasCore(c.pointer.core)) await context.update(config)
422
- deltas[i] = inflateChildDelta(context, c, co, blk, config)
423
- }
424
-
425
- const p = new Pointer(context, co.core, co.seq, co.offset)
426
- return new DeltaCohort(false, p, deltas)
303
+ function toEncryptionProvider(encryption) {
304
+ if (encryption) return (key) => encryption
305
+ return () => null
427
306
  }
package/lib/context.js CHANGED
@@ -5,7 +5,16 @@ const { TreeNodePointer } = require('./tree.js')
5
5
  const { decodeBlock } = require('./encoding.js')
6
6
 
7
7
  class CoreContext {
8
- constructor(store, local, cache, core, encryption, t, lock = new ScopeLock(), other = new Map()) {
8
+ constructor(
9
+ store,
10
+ local,
11
+ cache,
12
+ core,
13
+ getEncryptionProvider,
14
+ t,
15
+ lock = new ScopeLock(),
16
+ other = new Map()
17
+ ) {
9
18
  this.store = store
10
19
  this.local = local
11
20
  this.cache = cache
@@ -13,7 +22,7 @@ class CoreContext {
13
22
  this.t = t
14
23
  this.minKeys = t - 1
15
24
  this.maxKeys = 2 * t - 1
16
- this.encryption = encryption
25
+ this.getEncryptionProvider = getEncryptionProvider
17
26
  this.lock = lock
18
27
  this.other = other
19
28
  this.length = 0
@@ -132,7 +141,7 @@ class CoreContext {
132
141
  if (index > this.cores.length) throw new Error('Bad core index: ' + index)
133
142
  if (this.opened[index - 1] === null) {
134
143
  const key = this.cores[index - 1].key
135
- this.opened[index - 1] = this.store.get({ key, encryption: this.encryption })
144
+ this.opened[index - 1] = this.store.get({ key, encryption: this.getEncryptionProvider(key) })
136
145
  }
137
146
  return this.opened[index - 1]
138
147
  }
@@ -152,6 +161,9 @@ class CoreContext {
152
161
  const hc = this.getCore(core)
153
162
  const buffer = await hc.get(seq, config)
154
163
  if (buffer === null) throw BLOCK_NOT_AVAILABLE()
164
+
165
+ if (config.trace !== null) config.trace(core, seq)
166
+
155
167
  const block = decodeBlock(buffer, seq)
156
168
  return block
157
169
  }
@@ -166,7 +178,9 @@ class CoreContext {
166
178
  const hex = b4a.toString(key, 'hex')
167
179
  if (this.other.has(hex)) return this.other.get(hex)
168
180
 
169
- const ctx = this._createContext(this.store.get({ key, encryption: this.encryption }))
181
+ const ctx = this._createContext(
182
+ this.store.get({ key, encryption: this.getEncryptionProvider(key) })
183
+ )
170
184
  this.other.set(hex, ctx)
171
185
  return ctx
172
186
  }
@@ -195,11 +209,11 @@ class CoreContext {
195
209
  const store = this.store
196
210
  const local = this.local
197
211
  const cache = this.cache
198
- const encryption = this.encryption
212
+ const getEncryptionProvider = this.getEncryptionProvider
199
213
  const t = this.t
200
214
  const lock = this.lock
201
215
  const other = this.other
202
- return new CoreContext(store, local, cache, core, encryption, t, lock, other)
216
+ return new CoreContext(store, local, cache, core, getEncryptionProvider, t, lock, other)
203
217
  }
204
218
  }
205
219
 
package/lib/inflate.js ADDED
@@ -0,0 +1,140 @@
1
+ const b4a = require('b4a')
2
+ const { Pointer, KeyPointer, ValuePointer, TreeNode } = require('./tree.js')
3
+ const { DeltaOp, DeltaCohort, OP_COHORT } = require('./compression.js')
4
+
5
+ exports.inflate = async function inflate(ptr, config) {
6
+ if (ptr.value) return ptr.value
7
+
8
+ const [block, context] = await Promise.all([
9
+ ptr.context.getBlock(ptr.seq, ptr.core, config),
10
+ ptr.context.getContext(ptr.core, config)
11
+ ])
12
+
13
+ const tree = block.tree[ptr.offset]
14
+
15
+ const keys = new Array(tree.keys.length)
16
+ const children = new Array(tree.children.length)
17
+
18
+ for (let i = 0; i < keys.length; i++) {
19
+ const d = tree.keys[i]
20
+ keys[i] = inflateKey(context, d, ptr, block, config)
21
+ }
22
+
23
+ for (let i = 0; i < children.length; i++) {
24
+ const d = tree.children[i]
25
+ children[i] = inflateChild(context, d, ptr, block, config)
26
+ }
27
+
28
+ const [k, c] = await Promise.all([Promise.all(keys), Promise.all(children)])
29
+
30
+ const value = new TreeNode(k, c)
31
+ if (!ptr.value) ptr.value = value
32
+ return ptr.value
33
+ }
34
+
35
+ function inflateKey(context, d, ptr, block, config) {
36
+ if (d.type === OP_COHORT) return inflateKeyCohort(context, d, ptr, block, config)
37
+ return inflateKeyDelta(context, d, ptr, block, config)
38
+ }
39
+
40
+ async function inflateKeyDelta(context, d, ptr, block, config) {
41
+ const k = d.pointer
42
+
43
+ if (!k) return new DeltaOp(false, d.type, d.index, null)
44
+
45
+ const blk =
46
+ k.seq === ptr.seq && k.core === 0 && ptr.core === 0
47
+ ? block
48
+ : await context.getBlock(k.seq, k.core, config)
49
+
50
+ const bk = blk.keys[k.offset]
51
+
52
+ let vp = null
53
+
54
+ if (bk.valuePointer) {
55
+ const p = bk.valuePointer
56
+ const ctx = await context.getContext(k.core, config)
57
+ vp = new ValuePointer(ctx, p.core, p.seq, p.offset, p.split)
58
+ }
59
+
60
+ const kp = new KeyPointer(context, k.core, k.seq, k.offset, false, bk.key, bk.value, vp)
61
+ return new DeltaOp(false, d.type, d.index, kp)
62
+ }
63
+
64
+ exports.inflateValue = async function inflateValue(key, config) {
65
+ if (key.value) return key.value
66
+ if (!key.valuePointer) return null
67
+
68
+ const ptr = key.valuePointer
69
+
70
+ if (ptr.split === 0) {
71
+ const block = await ptr.context.getBlock(ptr.seq, ptr.core, config)
72
+ return block.values[ptr.offset]
73
+ }
74
+
75
+ const blockPromises = new Array(ptr.split + 1)
76
+ for (let i = 0; i < blockPromises.length; i++) {
77
+ blockPromises[i] = ptr.context.getBlock(ptr.seq - ptr.split + i, ptr.core, config)
78
+ }
79
+ const blocks = await Promise.all(blockPromises)
80
+ const splitValue = new Array(blockPromises.length)
81
+ for (let i = 0; i < splitValue.length - 1; i++) {
82
+ splitValue[i] = blocks[i].values[0]
83
+ }
84
+ splitValue[splitValue.length - 1] = blocks[blocks.length - 1].buffer[ptr.offset]
85
+ return b4a.concat(splitValue)
86
+ }
87
+
88
+ async function inflateKeyCohort(context, d, ptr, block, config) {
89
+ const co = d.pointer
90
+
91
+ const blk =
92
+ co.seq === ptr.seq && co.core === 0 && ptr.core === 0
93
+ ? block
94
+ : await context.getBlock(co.seq, co.core, config)
95
+
96
+ const cohort = blk.cohorts[co.offset]
97
+ const promises = new Array(cohort.length)
98
+
99
+ for (let i = 0; i < cohort.length; i++) {
100
+ const p = cohort[i]
101
+ const k = inflateKeyDelta(context, p, co, blk, config)
102
+ promises[i] = k
103
+ }
104
+
105
+ const p = new Pointer(context, co.core, co.seq, co.offset)
106
+ return new DeltaCohort(false, p, await Promise.all(promises))
107
+ }
108
+
109
+ async function inflateChild(context, d, ptr, block, config) {
110
+ if (d.type === OP_COHORT) return inflateChildCohort(context, d, ptr, block, config)
111
+ if (d.pointer && !context.hasCore(d.pointer.core)) await context.update(config)
112
+ return inflateChildDelta(context, d, ptr, block, config)
113
+ }
114
+
115
+ function inflateChildDelta(context, d, ptr, block, config) {
116
+ const p = d.pointer
117
+ const c = p && context.createTreeNode(p.core, p.seq, p.offset, false, null)
118
+ return new DeltaOp(false, d.type, d.index, c)
119
+ }
120
+
121
+ async function inflateChildCohort(context, d, ptr, block, config) {
122
+ const co = d.pointer
123
+
124
+ const blk =
125
+ co.seq === ptr.seq && co.core === 0 && ptr.core === 0
126
+ ? block
127
+ : await context.getBlock(co.seq, co.core, config)
128
+
129
+ const cohort = blk.cohorts[co.offset]
130
+ const deltas = new Array(cohort.length)
131
+
132
+ for (let i = 0; i < cohort.length; i++) {
133
+ const c = cohort[i]
134
+ if (c.pointer && !context.hasCore(c.pointer.core)) await context.update(config)
135
+ deltas[i] = inflateChildDelta(context, c, co, blk, config)
136
+ }
137
+
138
+ const p = new Pointer(context, co.core, co.seq, co.offset)
139
+ return new DeltaCohort(false, p, deltas)
140
+ }
package/lib/ranges.js CHANGED
@@ -119,6 +119,7 @@ class RangeIterator {
119
119
  // TODO: dbl check this for off-by-ones with the offset and keys and children
120
120
  let limit = this.limit
121
121
 
122
+ // TODO: if limit === -1, don't return early here
122
123
  if (limit < this.tree.context.minKeys) return
123
124
 
124
125
  const parent = this.stack[this.stack.length - 1]
@@ -128,17 +129,21 @@ class RangeIterator {
128
129
  this.prefetching = pv
129
130
 
130
131
  for (let i = parent.offset >> 1; i < pv.children.length; i++) {
131
- const k = pv.keys.get(i)
132
+ // If the preceding key in parent is beyond upper bound,
133
+ // stop fetching child nodes.
134
+ if (i > 0) {
135
+ const k = pv.keys.get(i - 1)
132
136
 
133
- const cmp = this.reverse
134
- ? this.start
135
- ? b4a.compare(this.start, k.key)
136
- : -1
137
- : this.end
138
- ? b4a.compare(k.key, this.end)
139
- : -1
137
+ const cmp = this.reverse
138
+ ? this.start
139
+ ? b4a.compare(this.start, k.key)
140
+ : -1
141
+ : this.end
142
+ ? b4a.compare(k.key, this.end)
143
+ : -1
140
144
 
141
- if (cmp > this.compare) break
145
+ if (cmp > this.compare) return
146
+ }
142
147
 
143
148
  const c = pv.children.get(i)
144
149
  if (!c.value) this.tree.inflate(c, this.config).catch(noop)
@@ -1,22 +1,33 @@
1
1
  class SessionConfig {
2
- constructor(activeRequests, timeout, wait) {
2
+ constructor(activeRequests, timeout, wait, trace) {
3
3
  this.activeRequests = activeRequests
4
4
  this.timeout = timeout
5
5
  this.wait = wait
6
+ this.trace = trace
6
7
  }
7
8
 
8
- sub(activeRequests, timeout, wait) {
9
- if (this.activeRequests === activeRequests && this.timeout === timeout && this.wait === wait) {
9
+ sub(activeRequests, timeout, wait, trace) {
10
+ if (
11
+ this.activeRequests === activeRequests &&
12
+ this.timeout === timeout &&
13
+ this.wait === wait &&
14
+ this.trace === trace
15
+ ) {
10
16
  return this
11
17
  }
12
18
 
13
- return new SessionConfig(activeRequests, timeout, wait)
19
+ return new SessionConfig(activeRequests, timeout, wait, trace)
14
20
  }
15
21
 
16
22
  options(opts) {
17
23
  if (!opts) return this
18
- const { activeRequests = this.activeRequests, timeout = this.timeout, wait = this.wait } = opts
19
- return this.sub(activeRequests, timeout, wait)
24
+ const {
25
+ activeRequests = this.activeRequests,
26
+ timeout = this.timeout,
27
+ wait = this.wait,
28
+ trace = this.trace
29
+ } = opts
30
+ return this.sub(activeRequests, timeout, wait, trace)
20
31
  }
21
32
  }
22
33
 
package/lib/tree.js CHANGED
@@ -28,6 +28,22 @@ class Pointer {
28
28
  this.changedBy = null
29
29
  }
30
30
 
31
+ // Compare two pointers to see if they point to equivalent positions
32
+ equivalentTo(other) {
33
+ // EMPTY is a special case that can look equivalent to a first entry
34
+ // in a hypercore but actually contains different data.
35
+ if (other === exports.EMPTY) return this === exports.EMPTY
36
+ if (!other) return false
37
+
38
+ return (
39
+ this.seq === other.seq &&
40
+ this.offset === other.offset &&
41
+ this.changed === other.changed &&
42
+ this.context === other.context &&
43
+ this.core === other.core
44
+ )
45
+ }
46
+
31
47
  retain() {
32
48
  this.retained = this.context.cache.retained + 1
33
49
  }
package/lib/write.js CHANGED
@@ -1,8 +1,8 @@
1
1
  const b4a = require('b4a')
2
- const c = require('compact-encoding')
3
2
  const { OP_COHORT } = require('./compression.js')
4
3
  const { encodeBlock, TYPE_COMPAT, TYPE_LATEST } = require('./encoding.js')
5
4
  const { Pointer, KeyPointer, ValuePointer, TreeNode, INSERTED, NEEDS_SPLIT } = require('./tree.js')
5
+ const { inflateValue } = require('./inflate.js')
6
6
 
7
7
  const PREFERRED_BLOCK_SIZE = 4096
8
8
  const INLINE_VALUE_SIZE = 1024
@@ -33,6 +33,7 @@ module.exports = class WriteBatch {
33
33
  this.length = length
34
34
  this.key = key
35
35
  this.type = type
36
+ this.hasLock = false
36
37
  this.closed = false
37
38
  this.applied = 0
38
39
  this.root = null
@@ -40,14 +41,17 @@ module.exports = class WriteBatch {
40
41
  }
41
42
 
42
43
  tryPut(key, value) {
44
+ this.checkIfClosed()
43
45
  this.ops.push({ put: true, applied: false, key, value })
44
46
  }
45
47
 
46
48
  tryDelete(key) {
49
+ this.checkIfClosed()
47
50
  this.ops.push({ put: false, applied: false, key, value: null })
48
51
  }
49
52
 
50
53
  tryClear() {
54
+ this.checkIfClosed()
51
55
  this.ops = []
52
56
  this.length = 0
53
57
  }
@@ -62,11 +66,25 @@ module.exports = class WriteBatch {
62
66
  return root ? root.seq + 1 : 0
63
67
  }
64
68
 
69
+ async lock() {
70
+ if (this.hasLock) return
71
+ await this.tree.context.lock.lock()
72
+ this.hasLock = true
73
+ }
74
+
75
+ _unlock() {
76
+ if (!this.hasLock) return
77
+ this.tree.context.lock.unlock()
78
+ this.hasLock = false
79
+ }
80
+
65
81
  async flush() {
66
- const lock = this.tree.context.lock
67
- await lock.lock()
82
+ this.checkIfClosed()
83
+ await this.lock()
68
84
 
69
85
  try {
86
+ this.checkIfClosed()
87
+
70
88
  const ops = this.ops
71
89
 
72
90
  const root = await this.tree.bootstrap(this.config)
@@ -88,13 +106,13 @@ module.exports = class WriteBatch {
88
106
  }
89
107
 
90
108
  await this._flush()
91
- await this.snapshot.close()
109
+ await this.close()
92
110
 
93
111
  if (this.autoUpdate) {
94
- this.tree.update(this.root)
112
+ this.tree._setRoot(this.root, true)
95
113
  }
96
114
  } finally {
97
- lock.unlock()
115
+ this._unlock()
98
116
  }
99
117
  }
100
118
 
@@ -128,7 +146,7 @@ module.exports = class WriteBatch {
128
146
  c = b4a.compare(target, m.key)
129
147
 
130
148
  if (c === 0) {
131
- const existing = await snap.inflateValue(m, conf)
149
+ const existing = await inflateValue(m, conf)
132
150
  if (b4a.equals(existing, value)) return false
133
151
  v.setValue(this.tree.context, mid, value)
134
152
  for (let i = 0; i < stack.length; i++) stack[i].changed = true
@@ -149,7 +167,7 @@ module.exports = class WriteBatch {
149
167
  if (status >= 0) {
150
168
  // already exists, upsert if changed
151
169
  const m = v.keys.uget(status)
152
- const existing = await snap.inflateValue(m, conf)
170
+ const existing = await inflateValue(m, conf)
153
171
  if (b4a.equals(existing, value)) return false
154
172
  v.setValue(this.tree.context, status, value)
155
173
  }
@@ -505,9 +523,7 @@ module.exports = class WriteBatch {
505
523
  buffers[i] = encodeBlock(blocks[i])
506
524
  }
507
525
 
508
- if (this.closed) {
509
- throw new Error('Write batch is closed')
510
- }
526
+ this.checkIfClosed()
511
527
 
512
528
  await context.core.append(buffers)
513
529
 
@@ -523,6 +539,12 @@ module.exports = class WriteBatch {
523
539
  context.cache.retained++
524
540
  context.cache.gc()
525
541
  }
542
+
543
+ checkIfClosed() {
544
+ if (this.closed) {
545
+ throw new Error('Write batch is closed')
546
+ }
547
+ }
526
548
  }
527
549
 
528
550
  async function toCompatType(context, batch, ops) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hyperbee2",
3
- "version": "2.7.1",
3
+ "version": "2.9.0",
4
4
  "description": "Scalable P2P BTree",
5
5
  "main": "index.js",
6
6
  "files": [