hypercore 9.12.0 → 10.0.0-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/.github/workflows/test-node.yml +3 -4
  2. package/README.md +131 -404
  3. package/__snapshots__/test/storage.js.snapshot.cjs +15 -0
  4. package/examples/announce.js +19 -0
  5. package/examples/basic.js +10 -0
  6. package/examples/http.js +123 -0
  7. package/examples/lookup.js +20 -0
  8. package/index.js +365 -1600
  9. package/lib/bitfield.js +113 -285
  10. package/lib/block-encryption.js +68 -0
  11. package/lib/block-store.js +58 -0
  12. package/lib/core.js +468 -0
  13. package/lib/extensions.js +76 -0
  14. package/lib/merkle-tree.js +1110 -0
  15. package/lib/messages.js +571 -0
  16. package/lib/mutex.js +39 -0
  17. package/lib/oplog.js +224 -0
  18. package/lib/protocol.js +525 -0
  19. package/lib/random-iterator.js +46 -0
  20. package/lib/remote-bitfield.js +24 -0
  21. package/lib/replicator.js +857 -0
  22. package/lib/streams.js +39 -0
  23. package/package.json +44 -45
  24. package/test/basic.js +59 -471
  25. package/test/bitfield.js +48 -133
  26. package/test/core.js +290 -0
  27. package/test/encodings.js +18 -0
  28. package/test/encryption.js +123 -0
  29. package/test/extension.js +71 -0
  30. package/test/helpers/index.js +23 -0
  31. package/test/merkle-tree.js +518 -0
  32. package/test/mutex.js +137 -0
  33. package/test/oplog.js +399 -0
  34. package/test/preload.js +72 -0
  35. package/test/replicate.js +227 -824
  36. package/test/sessions.js +173 -0
  37. package/test/storage.js +31 -0
  38. package/test/streams.js +39 -146
  39. package/test/user-data.js +47 -0
  40. package/bench/all.sh +0 -65
  41. package/bench/copy-64kb-blocks.js +0 -51
  42. package/bench/helpers/read-throttled.js +0 -27
  43. package/bench/helpers/read.js +0 -47
  44. package/bench/helpers/write.js +0 -29
  45. package/bench/read-16kb-blocks-proof-throttled.js +0 -1
  46. package/bench/read-16kb-blocks-proof.js +0 -1
  47. package/bench/read-16kb-blocks-throttled.js +0 -1
  48. package/bench/read-16kb-blocks.js +0 -1
  49. package/bench/read-512b-blocks.js +0 -1
  50. package/bench/read-64kb-blocks-linear-batch.js +0 -18
  51. package/bench/read-64kb-blocks-linear.js +0 -18
  52. package/bench/read-64kb-blocks-proof.js +0 -1
  53. package/bench/read-64kb-blocks.js +0 -1
  54. package/bench/replicate-16kb-blocks.js +0 -19
  55. package/bench/replicate-64kb-blocks.js +0 -19
  56. package/bench/write-16kb-blocks.js +0 -1
  57. package/bench/write-512b-blocks.js +0 -1
  58. package/bench/write-64kb-blocks-static.js +0 -1
  59. package/bench/write-64kb-blocks.js +0 -1
  60. package/example.js +0 -23
  61. package/lib/cache.js +0 -26
  62. package/lib/crypto.js +0 -5
  63. package/lib/replicate.js +0 -829
  64. package/lib/safe-buffer-equals.js +0 -6
  65. package/lib/storage.js +0 -421
  66. package/lib/tree-index.js +0 -183
  67. package/test/ack.js +0 -306
  68. package/test/audit.js +0 -36
  69. package/test/cache.js +0 -93
  70. package/test/compat.js +0 -209
  71. package/test/copy.js +0 -377
  72. package/test/default-storage.js +0 -51
  73. package/test/extensions.js +0 -137
  74. package/test/get.js +0 -64
  75. package/test/head.js +0 -65
  76. package/test/helpers/create-tracking-ram.js +0 -27
  77. package/test/helpers/create.js +0 -6
  78. package/test/helpers/replicate.js +0 -4
  79. package/test/seek.js +0 -234
  80. package/test/selections.js +0 -95
  81. package/test/set-uploading-downloading.js +0 -91
  82. package/test/stats.js +0 -77
  83. package/test/timeouts.js +0 -22
  84. package/test/tree-index.js +0 -841
  85. package/test/update.js +0 -156
  86. package/test/value-encoding.js +0 -52
package/test/replicate.js CHANGED
@@ -1,969 +1,372 @@
1
- var create = require('./helpers/create')
2
- var replicate = require('./helpers/replicate')
3
- var tape = require('tape')
4
- var Protocol = require('hypercore-protocol')
1
+ const test = require('brittle')
2
+ const NoiseSecretStream = require('@hyperswarm/secret-stream')
3
+ const { create, replicate, eventFlush } = require('./helpers')
5
4
 
6
- tape('replicate', function (t) {
7
- t.plan(10)
5
+ test('basic replication', async function (t) {
6
+ const a = await create()
8
7
 
9
- var feed = create()
8
+ await a.append(['a', 'b', 'c', 'd', 'e'])
10
9
 
11
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
12
- var clone = create(feed.key)
10
+ const b = await create(a.key)
13
11
 
14
- clone.get(0, same(t, 'a'))
15
- clone.get(1, same(t, 'b'))
16
- clone.get(2, same(t, 'c'))
17
- clone.get(3, same(t, 'd'))
18
- clone.get(4, same(t, 'e'))
12
+ let d = 0
13
+ b.on('download', () => d++)
19
14
 
20
- replicate(feed, clone, { live: true })
21
- })
22
- })
23
-
24
- tape('replicate twice', function (t) {
25
- t.plan(20)
26
-
27
- var feed = create()
28
-
29
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
30
- var clone = create(feed.key)
31
-
32
- clone.get(0, same(t, 'a'))
33
- clone.get(1, same(t, 'b'))
34
- clone.get(2, same(t, 'c'))
35
- clone.get(3, same(t, 'd'))
36
- clone.get(4, same(t, 'e'))
37
-
38
- replicate(feed, clone).on('end', function () {
39
- feed.append(['f', 'g', 'h', 'i', 'j'], function () {
40
- replicate(feed, clone).on('end', function () {
41
- clone.get(5, same(t, 'f'))
42
- clone.get(6, same(t, 'g'))
43
- clone.get(7, same(t, 'h'))
44
- clone.get(8, same(t, 'i'))
45
- clone.get(9, same(t, 'j'))
46
- })
47
- })
48
- })
49
- })
50
- })
15
+ replicate(a, b, t)
51
16
 
52
- tape('replicate live', function (t) {
53
- t.plan(6)
17
+ const r = b.download({ start: 0, end: a.length })
54
18
 
55
- var feed = create()
19
+ await r.downloaded()
56
20
 
57
- feed.ready(function () {
58
- var clone = create(feed.key)
59
-
60
- replicate(feed, clone, { live: true })
61
-
62
- feed.append('a')
63
- feed.append('b')
64
- feed.append('c')
65
-
66
- clone.get(0, same(t, 'a'))
67
- clone.get(1, same(t, 'b'))
68
- clone.get(2, same(t, 'c'))
69
- })
21
+ t.is(d, 5)
70
22
  })
71
23
 
72
- tape('download while get', function (t) {
73
- t.plan(10)
74
-
75
- var feed = create()
24
+ test('basic replication from fork', async function (t) {
25
+ const a = await create()
76
26
 
77
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
78
- var clone = create(feed.key)
27
+ await a.append(['a', 'b', 'c', 'd', 'e'])
28
+ await a.truncate(4)
29
+ await a.append('e')
79
30
 
80
- // add 5 so this never finished
81
- clone.download({ start: 0, end: 6 }, function () {
82
- t.fail('should never happen')
83
- })
31
+ t.is(a.fork, 1)
84
32
 
85
- clone.get(0, same(t, 'a'))
86
- clone.get(1, same(t, 'b'))
87
- clone.get(2, same(t, 'c'))
88
- clone.get(3, same(t, 'd'))
89
- clone.get(4, same(t, 'e'))
33
+ const b = await create(a.key)
90
34
 
91
- replicate(feed, clone, { live: true })
92
- })
93
- })
35
+ replicate(a, b, t)
94
36
 
95
- tape('non live', function (t) {
96
- t.plan(10)
37
+ let d = 0
38
+ b.on('download', () => d++)
97
39
 
98
- var feed = create()
40
+ const r = b.download({ start: 0, end: a.length })
99
41
 
100
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
101
- var clone = create(feed.key)
42
+ await r.downloaded()
102
43
 
103
- replicate(clone, feed).on('end', function () {
104
- clone.get(0, same(t, 'a'))
105
- clone.get(1, same(t, 'b'))
106
- clone.get(2, same(t, 'c'))
107
- clone.get(3, same(t, 'd'))
108
- clone.get(4, same(t, 'e'))
109
- })
110
- })
44
+ t.is(d, 5)
45
+ t.is(a.fork, b.fork)
111
46
  })
112
47
 
113
- tape('non live, two way', function (t) {
114
- t.plan(20)
115
-
116
- var feed = create()
48
+ test('eager replication from bigger fork', async function (t) {
49
+ const a = await create()
50
+ const b = await create(a.key)
117
51
 
118
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
119
- var clone = create(feed.key)
120
-
121
- replicate(clone, feed).on('end', function () {
122
- clone.get(0, same(t, 'a'))
123
- clone.get(1, same(t, 'b'))
124
- clone.get(2, same(t, 'c'))
125
- clone.get(3, same(t, 'd'))
126
- clone.get(4, same(t, 'e'))
127
-
128
- var clone2 = create(feed.key)
129
-
130
- replicate(clone, clone2).on('end', function () {
131
- clone2.get(0, same(t, 'a'))
132
- clone2.get(1, same(t, 'b'))
133
- clone2.get(2, same(t, 'c'))
134
- clone2.get(3, same(t, 'd'))
135
- clone2.get(4, same(t, 'e'))
136
- })
137
- })
138
- })
139
- })
52
+ replicate(a, b, t)
140
53
 
141
- tape('non-live empty', function (t) {
142
- var feed = create()
54
+ await a.append(['a', 'b', 'c', 'd', 'e', 'g', 'h', 'i', 'j', 'k'])
55
+ await a.truncate(4)
56
+ await a.append(['FORKED', 'g', 'h', 'i', 'j', 'k'])
143
57
 
144
- feed.ready(function () {
145
- var clone = create(feed.key)
58
+ t.is(a.fork, 1)
146
59
 
147
- replicate(feed, clone).on('end', function () {
148
- t.same(clone.length, 0)
149
- t.end()
150
- })
60
+ let d = 0
61
+ b.on('download', (index) => {
62
+ d++
151
63
  })
152
- })
153
64
 
154
- tape('basic 3-way replication', function (t) {
155
- var feed = create()
65
+ const r = b.download({ start: 0, end: a.length })
156
66
 
157
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
158
- var clone1 = create(feed.key)
159
- var clone2 = create(feed.key)
67
+ await r.downloaded()
160
68
 
161
- replicate(feed, clone1, { live: true })
162
- replicate(clone1, clone2, { live: true })
163
-
164
- clone1.get(0, function (err, data) {
165
- t.error(err, 'no error')
166
- t.same(data, Buffer.from('a'))
167
-
168
- clone2.get(0, function (err) {
169
- t.error(err, 'no error')
170
- t.same(data, Buffer.from('a'))
171
- t.end()
172
- })
173
- })
174
- })
69
+ t.is(d, a.length)
70
+ t.is(a.fork, b.fork)
175
71
  })
176
72
 
177
- tape('basic 3-way replication sparse and not sparse', function (t) {
178
- var feed = create()
179
-
180
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
181
- var clone1 = create(feed.key, { sparse: true })
182
- var clone2 = create(feed.key)
183
-
184
- replicate(feed, clone1, { live: true })
73
+ test('eager replication of updates per default', async function (t) {
74
+ const a = await create()
75
+ const b = await create(a.key)
185
76
 
186
- clone1.get(0, function (err, data) {
187
- t.error(err, 'no error')
188
- t.same(data, Buffer.from('a'))
77
+ replicate(a, b, t)
189
78
 
190
- replicate(clone1, clone2, { live: true })
191
-
192
- clone2.get(0, function (err) {
193
- t.error(err, 'no error')
194
- t.same(data, Buffer.from('a'))
195
- var inflight = clone2.peers[0].inflightRequests
196
- if (inflight.length === 1 && inflight[0].index === 0) inflight = [] // just has not been cleared yet
197
- t.same(inflight, [], 'no additional requests')
198
- t.end()
199
- })
79
+ const appended = new Promise(resolve => {
80
+ b.on('append', function () {
81
+ t.pass('appended')
82
+ resolve()
200
83
  })
201
84
  })
202
- })
203
85
 
204
- tape('extra data + factor of two', function (t) {
205
- var feed = create()
206
-
207
- feed.append(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'], function () {
208
- var clone1 = create(feed.key)
209
-
210
- replicate(feed, clone1, { live: true })
211
-
212
- clone1.get(1, function (err, data) {
213
- t.error(err, 'no error')
214
- t.same(data, Buffer.from('b'))
215
- t.end()
216
- })
217
- })
86
+ await a.append(['a', 'b', 'c', 'd', 'e', 'g', 'h', 'i', 'j', 'k'])
87
+ await appended
218
88
  })
219
89
 
220
- tape('3-way another index', function (t) {
221
- var feed = create()
90
+ test('bigger download range', async function (t) {
91
+ const a = await create()
92
+ const b = await create(a.key)
222
93
 
223
- feed.append(['a', 'b'], function () {
224
- var clone1 = create(feed.key)
225
- var clone2 = create(feed.key)
94
+ replicate(a, b, t)
226
95
 
227
- replicate(feed, clone1, { live: true })
228
- replicate(clone1, clone2, { live: true })
96
+ for (let i = 0; i < 20; i++) await a.append('data')
229
97
 
230
- clone1.get(1, function (err, data) {
231
- t.error(err, 'no error')
232
- t.same(data, Buffer.from('b'))
98
+ const downloaded = new Set()
233
99
 
234
- clone2.get(1, function (err) {
235
- t.error(err, 'no error')
236
- t.same(data, Buffer.from('b'))
237
- t.end()
238
- })
239
- })
100
+ b.on('download', function (index) {
101
+ downloaded.add(index)
240
102
  })
241
- })
242
-
243
- tape('3-way another index + extra data', function (t) {
244
- var feed = create()
245
-
246
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
247
- var clone1 = create(feed.key)
248
- var clone2 = create(feed.key)
249
103
 
250
- replicate(feed, clone1, { live: true })
251
- replicate(clone1, clone2, { live: true })
104
+ const r = b.download({ start: 0, end: a.length })
105
+ await r.downloaded()
252
106
 
253
- clone1.get(1, function (err, data) {
254
- t.error(err, 'no error')
255
- t.same(data, Buffer.from('b'))
107
+ t.is(b.length, a.length, 'same length')
108
+ t.is(downloaded.size, a.length, 'downloaded all')
256
109
 
257
- clone2.get(1, function (err) {
258
- t.error(err, 'no error')
259
- t.same(data, Buffer.from('b'))
260
- t.end()
261
- })
262
- })
263
- })
110
+ t.end()
264
111
  })
265
112
 
266
- tape('3-way another index + extra data + factor of two', function (t) {
267
- var feed = create()
268
-
269
- feed.append(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'], function () {
270
- var clone1 = create(feed.key)
271
- var clone2 = create(feed.key)
272
-
273
- replicate(feed, clone1, { live: true })
274
- replicate(clone1, clone2, { live: true })
113
+ test('high latency reorg', async function (t) {
114
+ const a = await create()
115
+ const b = await create(a.key)
275
116
 
276
- clone1.get(1, function (err, data) {
277
- t.error(err, 'no error')
278
- t.same(data, Buffer.from('b'))
117
+ const s = replicate(a, b, t)
279
118
 
280
- clone2.get(1, function (err) {
281
- t.error(err, 'no error')
282
- t.same(data, Buffer.from('b'))
283
- t.end()
284
- })
285
- })
286
- })
287
- })
119
+ for (let i = 0; i < 50; i++) await a.append('data')
288
120
 
289
- tape('3-way another index + extra data + factor of two + static', function (t) {
290
- var feed = create({ live: false })
291
-
292
- feed.append(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'], function () {
293
- feed.finalize(function () {
294
- var clone1 = create(feed.key)
295
- var clone2 = create(feed.key)
296
-
297
- replicate(feed, clone1, { live: true })
298
- replicate(clone1, clone2, { live: true })
121
+ {
122
+ const r = b.download({ start: 0, end: a.length })
123
+ await r.downloaded()
124
+ }
299
125
 
300
- clone1.get(1, function (err, data) {
301
- t.error(err, 'no error')
302
- t.same(data, Buffer.from('b'))
126
+ s[0].destroy()
127
+ s[1].destroy()
303
128
 
304
- clone2.get(1, function (err) {
305
- t.error(err, 'no error')
306
- t.same(data, Buffer.from('b'))
307
- t.end()
308
- })
309
- })
310
- })
311
- })
312
- })
129
+ await a.truncate(30)
313
130
 
314
- tape('seek while replicating', function (t) {
315
- t.plan(6)
131
+ for (let i = 0; i < 50; i++) await a.append('fork')
316
132
 
317
- var feed = create()
133
+ replicate(a, b, t)
318
134
 
319
- feed.ready(function () {
320
- var clone = create(feed.key)
135
+ {
136
+ const r = b.download({ start: 0, end: a.length })
137
+ await r.downloaded()
138
+ }
321
139
 
322
- clone.seek(9, function (err, index, offset) {
323
- t.error(err, 'no error')
324
- t.same(index, 2)
325
- t.same(offset, 1)
326
- })
140
+ let same = 0
327
141
 
328
- clone.seek(16, function (err, index, offset) {
329
- t.error(err, 'no error')
330
- t.same(index, 4)
331
- t.same(offset, 2)
332
- })
142
+ for (let i = 0; i < a.length; i++) {
143
+ const ba = await a.get(i)
144
+ const bb = await b.get(i)
145
+ if (ba.equals(bb)) same++
146
+ }
333
147
 
334
- feed.append(['hello'], function () {
335
- feed.append(['how', 'are', 'you', 'doing', '?'], function () {
336
- replicate(feed, clone, { live: true })
337
- })
338
- })
339
- })
148
+ t.is(a.fork, 1)
149
+ t.is(a.fork, b.fork)
150
+ t.is(same, 80)
340
151
  })
341
152
 
342
- tape('non spare live replication', function (t) {
343
- var feed = create()
153
+ test('invalid signature fails', async function (t) {
154
+ t.plan(2)
344
155
 
345
- feed.on('ready', function () {
346
- feed.append(['a', 'b', 'c'], function () {
347
- var clone = create(feed.key)
156
+ const a = await create()
157
+ const b = await create() // not the same key
348
158
 
349
- clone.get(0, function () {
350
- clone.get(1, function () {
351
- clone.get(2, function () {
352
- clone.once('download', function () {
353
- t.pass('downloaded new block')
354
- t.end()
355
- })
159
+ b.discoveryKey = a.discoveryKey // haxx to make them swarm
356
160
 
357
- feed.append('a')
358
- })
359
- })
360
- })
161
+ await a.append(['a', 'b', 'c', 'd', 'e'])
361
162
 
362
- replicate(feed, clone, { live: true })
363
- })
364
- })
365
- })
163
+ const [s1, s2] = replicate(a, b, t)
366
164
 
367
- tape('can wait for updates', function (t) {
368
- var feed = create()
369
-
370
- feed.on('ready', function () {
371
- var clone = create(feed.key)
372
-
373
- clone.update(function (err) {
374
- t.error(err, 'no error')
375
- t.same(clone.length, 3)
376
- t.end()
377
- })
378
-
379
- replicate(feed, clone, { live: true }).once('duplex-channel', function () {
380
- feed.append(['a', 'b', 'c'])
381
- })
165
+ s1.on('error', (err) => {
166
+ t.ok(err, 'stream closed')
382
167
  })
383
- })
384
168
 
385
- tape('replicate while clearing', function (t) {
386
- var feed = create()
387
-
388
- feed.on('ready', function () {
389
- var clone = create(feed.key, { sparse: true })
390
-
391
- clone.get(1, function (err) {
392
- t.error(err, 'no error')
393
- feed.clear(2, function (err) {
394
- t.error(err, 'no error')
395
- clone.get(2, { timeout: 50 }, function (err) {
396
- t.ok(err, 'had timeout error')
397
- t.end()
398
- })
399
- })
400
- })
401
-
402
- replicate(feed, clone, { live: true }).once('duplex-channel', function () {
403
- feed.append(['a', 'b', 'c'])
404
- })
169
+ s2.on('error', (err) => {
170
+ t.is(err.message, 'Remote signature does not match')
405
171
  })
406
- })
407
172
 
408
- tape('replicate while cancelling', function (t) {
409
- t.plan(2)
173
+ return new Promise((resolve) => {
174
+ let missing = 2
410
175
 
411
- var feed = create()
176
+ s1.on('close', onclose)
177
+ s2.on('close', onclose)
412
178
 
413
- feed.on('ready', function () {
414
- var clone = create(feed.key, { sparse: true })
415
-
416
- clone.on('download', function () {
417
- t.fail('should not download')
418
- })
419
-
420
- feed.on('upload', function () {
421
- t.pass('should upload')
422
- clone.cancel(0)
423
- })
424
-
425
- clone.get(0, function (err) {
426
- t.ok(err, 'expected error')
427
- })
428
-
429
- feed.append(['a', 'b', 'c'])
430
-
431
- replicate(feed, clone, { live: true })
179
+ function onclose () {
180
+ if (--missing === 0) resolve()
181
+ }
432
182
  })
433
183
  })
434
184
 
435
- tape('allow push', function (t) {
436
- t.plan(3)
437
-
438
- var feed = create()
185
+ test('update with zero length', async function (t) {
186
+ const a = await create()
187
+ const b = await create(a.key)
439
188
 
440
- feed.on('ready', function () {
441
- var clone = create(feed.key, { sparse: true, allowPush: true })
189
+ replicate(a, b, t)
442
190
 
443
- clone.on('download', function () {
444
- t.pass('push allowed')
445
- })
446
-
447
- feed.on('upload', function () {
448
- t.pass('should upload')
449
- clone.cancel(0)
450
- })
451
-
452
- clone.get(0, function (err) {
453
- t.ok(err, 'expected error')
454
- })
455
-
456
- feed.append(['a', 'b', 'c'])
457
-
458
- replicate(feed, clone, { live: true })
459
- })
191
+ await b.update() // should not hang
192
+ t.is(b.length, 0)
460
193
  })
461
194
 
462
- tape('shared stream, non live', function (t) {
463
- var a = create()
464
- var b = create()
195
+ test('basic multiplexing', async function (t) {
196
+ const a1 = await create()
197
+ const a2 = await create()
465
198
 
466
- a.append(['a', 'b'], function () {
467
- b.append(['c', 'd'], function () {
468
- var a1 = create(a.key)
469
- var b1 = create(b.key)
199
+ const b1 = await create(a1.key)
200
+ const b2 = await create(a2.key)
470
201
 
471
- a1.ready(function () {
472
- var s = a.replicate(true)
473
- b1.replicate(s)
202
+ const a = a1.replicate(a2.replicate(true))
203
+ const b = b1.replicate(b2.replicate(false))
474
204
 
475
- var s1 = a1.replicate(false)
476
- b.replicate(s1)
205
+ a.pipe(b).pipe(a)
477
206
 
478
- s.pipe(s1).pipe(s)
207
+ await a1.append('hi')
208
+ t.alike(await b1.get(0), Buffer.from('hi'))
479
209
 
480
- s.on('end', function () {
481
- t.ok(a1.has(0))
482
- t.ok(a1.has(1))
483
- t.ok(b1.has(0))
484
- t.ok(b1.has(1))
485
- t.end()
486
- })
487
- })
488
- })
489
- })
210
+ await a2.append('ho')
211
+ t.alike(await b2.get(0), Buffer.from('ho'))
490
212
  })
491
213
 
492
- tape('get total downloaded chunks', function (t) {
493
- var feed = create()
494
- feed.append(['a', 'b', 'c', 'e'])
495
- feed.on('ready', function () {
496
- var clone = create(feed.key, { sparse: true })
497
- clone.get(1, function (err) {
498
- t.error(err, 'no error')
499
- t.same(clone.downloaded(), 1)
500
- t.same(clone.downloaded(0), 1)
501
- t.same(clone.downloaded(2), 0)
502
- t.same(clone.downloaded(0, 1), 0)
503
- t.same(clone.downloaded(2, 4), 0)
504
- clone.get(3, function (err) {
505
- t.error(err, 'no error')
506
- t.same(clone.downloaded(), 2)
507
- t.same(clone.downloaded(0), 2)
508
- t.same(clone.downloaded(2), 1)
509
- t.same(clone.downloaded(0, 3), 1)
510
- t.same(clone.downloaded(2, 4), 1)
511
- t.end()
512
- })
513
- })
514
- replicate(feed, clone, { live: true })
515
- })
516
- })
214
+ test('async multiplexing', async function (t) {
215
+ const a1 = await create()
216
+ const b1 = await create(a1.key)
517
217
 
518
- tape('feed has a range of chuncks', function (t) {
519
- var feed = create()
520
- feed.append(['a', 'b', 'c', 'e'])
521
- feed.on('ready', function () {
522
- var clone = create(feed.key, { sparse: true })
523
- clone.get(0, function (err) {
524
- t.error(err, 'no error')
525
- clone.get(1, function (err) {
526
- t.error(err, 'no error')
527
- clone.get(2, function (err) {
528
- t.error(err, 'no error')
529
- t.ok(clone.has(1))
530
- t.notOk(clone.has(3))
531
- t.ok(clone.has(0, clone.length - 1))
532
- t.notOk(clone.has(0, clone.length))
533
- t.ok(clone.has(1, 3))
534
- t.notOk(clone.has(3, 4))
535
- t.end()
536
- })
537
- })
538
- })
539
- replicate(feed, clone, { live: true })
540
- })
541
- })
218
+ const a = a1.replicate(true)
219
+ const b = b1.replicate(false)
542
220
 
543
- tape('feed has a large range', function (t) {
544
- var feed = create()
545
- feed.append(['a', 'b', 'c', 'e', 'd', 'e', 'f', 'g'])
546
- feed.append(['a', 'b', 'c', 'e', 'd', 'e', 'f', 'g'])
547
- feed.append(['a', 'b', 'c', 'e', 'd', 'e', 'f', 'g'])
548
- feed.on('ready', function () {
549
- var clone = create(feed.key, { sparse: true })
550
- var count = 20
551
- var gotten = 20
552
- function got () {
553
- gotten--
554
- if (gotten === 0) {
555
- t.same(clone.downloaded(), 20)
556
- t.notOk(clone.has(5, 24))
557
- t.notOk(clone.has(12, 24))
558
- t.notOk(clone.has(20, 24))
559
- t.ok(clone.has(0, 20))
560
- t.ok(clone.has(3, 20))
561
- t.ok(clone.has(8, 20))
562
- t.ok(clone.has(19, 20))
563
- t.ok(clone.has(0, 16))
564
- t.ok(clone.has(3, 16))
565
- t.ok(clone.has(8, 16))
566
- t.end()
567
- }
568
- }
569
- for (var i = 0; i < count; i++) {
570
- clone.get(i, got)
571
- }
572
- replicate(feed, clone, { live: true })
573
- })
574
- })
221
+ a.pipe(b).pipe(a)
575
222
 
576
- tape('replicate no download', function (t) {
577
- var feed = create()
223
+ const a2 = await create()
224
+ await a2.append('ho')
578
225
 
579
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
580
- var clone = create(feed.key)
226
+ const b2 = await create(a2.key)
581
227
 
582
- clone.get(0, function () {
583
- t.fail('Data was received')
584
- })
228
+ // b2 doesn't replicate immediately.
229
+ a2.replicate(a)
230
+ await eventFlush()
231
+ b2.replicate(b)
585
232
 
586
- replicate(feed, clone, { live: true }, { live: true, download: false })
233
+ await new Promise(resolve => b2.once('peer-add', resolve))
587
234
 
588
- setTimeout(function () {
589
- t.pass('No data was received')
590
- t.end()
591
- }, 300)
592
- })
235
+ t.is(b2.peers.length, 1)
236
+ t.alike(await b2.get(0), Buffer.from('ho'))
593
237
  })
594
238
 
595
- tape('replicate no upload', function (t) {
596
- var feed = create()
239
+ test('multiplexing with external noise stream', async function (t) {
240
+ const a1 = await create()
241
+ const a2 = await create()
597
242
 
598
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
599
- var clone = create(feed.key)
243
+ const b1 = await create(a1.key)
244
+ const b2 = await create(a2.key)
600
245
 
601
- clone.get(0, function () {
602
- t.fail('Data was received')
603
- })
246
+ const n1 = new NoiseSecretStream(true)
247
+ const n2 = new NoiseSecretStream(false)
248
+ n1.rawStream.pipe(n2.rawStream).pipe(n1.rawStream)
604
249
 
605
- replicate(feed, clone, { live: true, upload: false }, { live: true })
250
+ a1.replicate(n1)
251
+ a2.replicate(n1)
252
+ b1.replicate(n2)
253
+ b2.replicate(n2)
606
254
 
607
- setTimeout(function () {
608
- t.pass('No data was received')
609
- t.end()
610
- }, 300)
611
- })
612
- })
255
+ await a1.append('hi')
256
+ t.alike(await b1.get(0), Buffer.from('hi'))
613
257
 
614
- tape('sparse mode, two downloads', function (t) {
615
- var feed = create()
616
-
617
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
618
- var clone = create(feed.key, { sparse: true })
619
-
620
- replicate(feed, clone)
621
- clone.update(function () {
622
- clone.download({ start: 0, end: 4 }, function (err) {
623
- t.error(err, 'no error')
624
- // next tick so selection is cleared
625
- process.nextTick(function () {
626
- clone.download(4, function (err) {
627
- t.error(err, 'no error')
628
- t.end()
629
- })
630
- })
631
- })
632
- })
633
- })
258
+ await a2.append('ho')
259
+ t.alike(await b2.get(0), Buffer.from('ho'))
634
260
  })
635
261
 
636
- tape('peer-add and peer-remove are emitted', function (t) {
637
- t.plan(5)
262
+ test('seeking while replicating', async function (t) {
263
+ const a = await create()
264
+ const b = await create(a.key)
638
265
 
639
- var feed = create()
266
+ replicate(a, b, t)
640
267
 
641
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
642
- var clone = create(feed.key)
268
+ await a.append(['hello', 'this', 'is', 'test', 'data'])
643
269
 
644
- feed.on('peer-add', function (peer) {
645
- t.notEquals(peer.remoteId, null)
646
- t.pass('peer-add1')
647
- })
648
- clone.on('peer-add', function (peer) {
649
- t.pass('peer-add2')
650
- })
651
- feed.on('peer-remove', function (peer) {
652
- t.pass('peer-remove1')
653
- })
654
- clone.on('peer-remove', function (peer) {
655
- t.pass('peer-remove2')
656
- })
657
-
658
- replicate(clone, feed)
659
- })
270
+ t.alike(await b.seek(6), [1, 1])
660
271
  })
661
272
 
662
- tape('replicate with onwrite', function (t) {
663
- var feed = create()
273
+ test('multiplexing multiple times over the same stream', async function (t) {
274
+ const a1 = await create()
664
275
 
665
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
666
- var expected = ['a', 'b', 'c', 'd', 'e']
276
+ await a1.append('hi')
667
277
 
668
- var clone = create(feed.key, {
669
- onwrite: function (index, data, peer, cb) {
670
- t.ok(peer, 'has peer')
671
- t.same(expected[index], data.toString())
672
- expected[index] = null
673
- cb()
674
- }
675
- })
676
-
677
- clone.on('sync', function () {
678
- t.same(expected, [null, null, null, null, null])
679
- t.end()
680
- })
681
-
682
- replicate(feed, clone, { live: true })
683
- })
684
- })
685
-
686
- tape('replicate from very sparse', function (t) {
687
- t.plan(3)
688
-
689
- var feed = create()
690
- var arr = new Array(1e3)
278
+ const b1 = await create(a1.key)
691
279
 
692
- arr.fill('a')
693
- feed.append(arr, function loop (err) {
694
- if (feed.length < 1e6) return feed.append(arr, loop)
280
+ const n1 = new NoiseSecretStream(true)
281
+ const n2 = new NoiseSecretStream(false)
695
282
 
696
- t.error(err, 'no error')
697
- t.pass('appended ' + arr.length + ' blocks')
283
+ n1.rawStream.pipe(n2.rawStream).pipe(n1.rawStream)
698
284
 
699
- var clone1 = create(feed.key, { sparse: true })
700
- var clone2 = create(feed.key)
701
- var missing = 30
702
- var then = 0
285
+ a1.replicate(n1)
703
286
 
704
- replicate(feed, clone1, { live: true })
705
-
706
- clone2.on('download', function () {
707
- if (--missing <= 0) {
708
- t.pass('downloaded all in ' + (Date.now() - then) + 'ms')
709
- }
710
- })
287
+ b1.replicate(n2)
288
+ b1.replicate(n2)
711
289
 
712
- clone1.download({ start: feed.length - 30, end: feed.length }, function () {
713
- then = Date.now()
714
- replicate(clone2, clone1, { live: true })
715
- })
716
- })
717
- })
290
+ t.ok(await b1.update(), 'update once')
291
+ t.absent(await a1.update(), 'writer up to date')
292
+ t.absent(await b1.update(), 'update again')
718
293
 
719
- tape('first get hash, then get block', function (t) {
720
- var feed = create()
721
- feed.append(['a', 'b', 'c'], function () {
722
- var clone = create(feed.key, { sparse: true })
723
- replicate(feed, clone, { live: true })
724
-
725
- // fetches the hash for block #2
726
- clone.seek(2, function (err) {
727
- t.error(err, 'no error')
728
- clone.get(2, function (err, data) {
729
- t.error(err, 'no error')
730
- t.same(data, Buffer.from('c'))
731
- t.end()
732
- })
733
- })
734
- })
294
+ t.is(b1.length, a1.length, 'same length')
295
+ t.end()
735
296
  })
736
297
 
737
- tape('destroy replication stream before handshake', function (t) {
738
- var feed = create()
739
- feed.append(['a', 'b', 'c'], function () {
740
- var stream = feed.replicate(true)
741
- stream.destroy()
742
- var anotherStream = feed.replicate(true)
743
- setImmediate(function () {
744
- anotherStream.destroy()
745
- feed.ifAvailable.ready(function () {
746
- t.pass('ifAvailable still triggers')
747
- t.same(feed.peers.length, 0)
748
- t.end()
749
- })
750
- })
751
- })
752
- })
298
+ test('destroying a stream and re-replicating works', async function (t) {
299
+ const core = await create()
753
300
 
754
- tape('request timeouts', function (t) {
755
- t.plan(4)
301
+ while (core.length < 33) await core.append(Buffer.from('#' + core.length))
756
302
 
757
- var feed = create()
758
- var stream = new Protocol(false, {
759
- timeout: 100
760
- })
303
+ const clone = await create(core.key)
761
304
 
762
- feed.ready(function () {
763
- var ch = stream.open(feed.key, {
764
- onwant (want) {
765
- t.pass('got want')
766
- ch.have({ start: 0, length: 1 })
767
- },
768
- onrequest (request) {
769
- t.same(request.index, 0, 'got request for #0')
770
- }
771
- })
305
+ let s1 = core.replicate(true)
306
+ let s2 = clone.replicate(false)
772
307
 
773
- t.same(typeof stream.timeout.ms, 'number', 'can read timeout ms from protocol stream')
308
+ s1.pipe(s2).pipe(s1)
774
309
 
775
- var timeout = setTimeout(() => t.fail('request should have timed out'), stream.timeout.ms * 2)
776
- var feedStream = feed.replicate(true, { download: true, timeout: 100 })
777
- stream.pipe(feedStream).pipe(stream)
310
+ await s2.opened
778
311
 
779
- feedStream.on('error', function (err) {
780
- clearTimeout(timeout)
781
- t.ok(err, 'stream had timeout error')
782
- })
312
+ const all = []
313
+ for (let i = 0; i < 33; i++) {
314
+ all.push(clone.get(i))
315
+ }
783
316
 
784
- stream.on('error', () => {})
317
+ clone.once('download', function () {
318
+ // simulate stream failure in the middle of bulk downloading
319
+ s1.destroy()
785
320
  })
786
- })
787
321
 
788
- tape('double replicate', function (t) {
789
- var feed = create()
322
+ await new Promise((resolve) => s1.once('close', resolve))
790
323
 
791
- feed.append('hi', function () {
792
- var clone = create(feed.key)
324
+ // retry
325
+ s1 = core.replicate(true)
326
+ s2 = clone.replicate(false)
793
327
 
794
- var a = feed.replicate(true)
795
- var b = clone.replicate(false)
796
- var missing = 2
328
+ s1.pipe(s2).pipe(s1)
797
329
 
798
- a.pipe(b).pipe(a)
799
- feed.replicate(a) // replicate twice
330
+ const blocks = await Promise.all(all)
800
331
 
801
- b.on('end', done)
802
- a.on('end', done)
803
-
804
- function done () {
805
- if (!--missing) return
806
- feed.ifAvailable.ready(function () {
807
- clone.ifAvailable.ready(function () {
808
- t.pass('no lingering state')
809
- t.end()
810
- })
811
- })
812
- }
813
- })
332
+ t.is(blocks.length, 33, 'downloaded 33 blocks')
814
333
  })
815
334
 
816
- tape('events: replicating events fired (normal)', function (t) {
817
- var feed = create()
818
- t.plan(1)
819
- feed.on('replicating', () => {
820
- t.pass('replicating')
821
- })
822
- feed.replicate(true).on('end', function () {
823
- t.end()
824
- })
825
- })
335
+ test('replicate discrete range', async function (t) {
336
+ const a = await create()
826
337
 
827
- tape('events: replicating events once for multiple feeds', function (t) {
828
- var feed = create()
829
- var stream = new Protocol(true)
830
- t.plan(1)
831
- feed.on('replicating', () => {
832
- t.pass('replicating')
833
- })
834
- feed.replicate(stream)
835
- feed.replicate(stream)
836
- feed.on('end', function () {
837
- t.end()
838
- })
839
- })
840
-
841
- tape('events: replicating events not fired if canceled before ready', function (t) {
842
- var feed = create()
843
- var stream = new Protocol(true)
844
- t.plan(0)
845
- feed.on('replicating', () => {
846
- t.pass('replicating')
847
- })
848
- stream.destroy()
849
- feed.replicate(stream, { live: true })
850
- t.end()
851
- })
338
+ await a.append(['a', 'b', 'c', 'd', 'e'])
852
339
 
853
- tape('regression: replicate without timeout', function (t) {
854
- t.plan(10)
340
+ const b = await create(a.key)
855
341
 
856
- var feed = create()
342
+ let d = 0
343
+ b.on('download', () => d++)
857
344
 
858
- feed.append(['a', 'b', 'c', 'd', 'e'], function () {
859
- var clone = create(feed.key)
345
+ replicate(a, b, t)
860
346
 
861
- clone.get(0, same(t, 'a'))
862
- clone.get(1, same(t, 'b'))
863
- clone.get(2, same(t, 'c'))
864
- clone.get(3, same(t, 'd'))
865
- clone.get(4, same(t, 'e'))
347
+ const r = b.download({ blocks: [0, 2, 3] })
348
+ await r.downloaded()
866
349
 
867
- replicate(feed, clone, { live: true, timeout: false })
868
- })
350
+ t.is(d, 3)
351
+ t.alike(await b.get(0), Buffer.from('a'))
352
+ t.alike(await b.get(2), Buffer.from('c'))
353
+ t.alike(await b.get(3), Buffer.from('d'))
869
354
  })
870
355
 
871
- tape('replicate with NOISE disabled', function (t) {
872
- var feed = create()
873
- feed.append(['a', 'b', 'c'], function () {
874
- var clone = create(feed.key)
875
- const stream = replicate(feed, clone, { live: false, noise: false, encrypted: false })
876
- clone.get(2, (err, data) => {
877
- t.error(err, 'no error')
878
- t.same(data.toString(), 'c')
879
- t.same(stream.remoteVerified(feed.key), false, 'remote is not verified')
880
- t.end()
881
- })
882
- })
883
- })
356
+ test('replicate discrete empty range', async function (t) {
357
+ const a = await create()
884
358
 
885
- tape('replicate with feed authentication', function (t) {
886
- var feed = create()
887
- const keysA = Protocol.keyPair()
888
- const keysB = Protocol.keyPair()
889
- t.plan(5)
890
- feed.ready(function (err) {
891
- t.error(err)
892
- var clone = create(feed.key)
893
- const stream = feed.replicate(true, {
894
- keyPair: keysA,
895
- onfeedauthenticate: function (authFeed, publicKey, cb) {
896
- t.equals(authFeed, feed)
897
- t.same(publicKey, keysB.publicKey)
898
- cb()
899
- }
900
- })
901
- stream.pipe(clone.replicate(false, {
902
- keyPair: keysB,
903
- onfeedauthenticate: function (authFeed, publicKey, cb) {
904
- t.equals(authFeed, clone)
905
- t.same(publicKey, keysA.publicKey)
906
- cb()
907
- }
908
- })).pipe(stream)
909
- .on('end', function () {
910
- t.end()
911
- })
912
- })
913
- })
359
+ await a.append(['a', 'b', 'c', 'd', 'e'])
914
360
 
915
- tape('replicate and close through stream', function (t) {
916
- var feed = create()
917
- var streams
918
- var clone
919
-
920
- feed.append(['a', 'b', 'c'], function () {
921
- clone = create(feed.key)
922
- streams = [feed.replicate(true, { live: true }), clone.replicate(false, { live: true })]
923
- streams[0].pipe(streams[1]).pipe(streams[0])
924
- streams[0].on('error', () => {})
925
- streams[1].on('error', () => {})
926
- })
361
+ const b = await create(a.key)
927
362
 
928
- feed.once('peer-open', function () {
929
- streams[0].close(feed.discoveryKey)
930
- streams[0].destroy()
931
- streams[0].on('close', function () {
932
- t.same(feed.peers.length, 0)
933
- t.end()
934
- })
935
- })
936
- })
363
+ let d = 0
364
+ b.on('download', () => d++)
937
365
 
938
- tape('download blocks', function (t) {
939
- var feed = create()
940
-
941
- feed.append(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'], function () {
942
- var clone = create(feed.key, { sparse: true })
943
-
944
- clone.download({ start: 0, end: 10, blocks: [0, 3, 4, 9] }, function (err) {
945
- t.error(err, 'no error')
946
- t.same(clone.length, 10)
947
- t.ok(clone.has(0))
948
- t.notOk(clone.has(1))
949
- t.notOk(clone.has(2))
950
- t.ok(clone.has(3))
951
- t.ok(clone.has(4))
952
- t.notOk(clone.has(5))
953
- t.notOk(clone.has(6))
954
- t.notOk(clone.has(7))
955
- t.notOk(clone.has(8))
956
- t.ok(clone.has(9))
957
- t.end()
958
- })
366
+ replicate(a, b, t)
959
367
 
960
- replicate(feed, clone, { live: true })
961
- })
962
- })
368
+ const r = b.download({ blocks: [] })
369
+ await r.downloaded()
963
370
 
964
- function same (t, val) {
965
- return function (err, data) {
966
- t.error(err, 'no error')
967
- t.same(data.toString(), val)
968
- }
969
- }
371
+ t.is(d, 0)
372
+ })