hypercore 10.0.0-alpha.4 → 10.0.0-alpha.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -3
- package/index.js +504 -166
- package/lib/bitfield.js +9 -5
- package/lib/block-encryption.js +68 -0
- package/lib/block-store.js +3 -1
- package/lib/caps.js +34 -0
- package/lib/core.js +84 -27
- package/lib/errors.js +42 -0
- package/lib/merkle-tree.js +186 -113
- package/lib/messages.js +249 -168
- package/lib/oplog.js +4 -3
- package/lib/replicator.js +1288 -548
- package/lib/streams.js +56 -0
- package/package.json +18 -9
- package/.github/workflows/test-node.yml +0 -23
- package/CHANGELOG.md +0 -37
- package/UPGRADE.md +0 -9
- package/examples/announce.js +0 -19
- package/examples/basic.js +0 -10
- package/examples/http.js +0 -123
- package/examples/lookup.js +0 -20
- package/lib/extensions.js +0 -76
- package/lib/protocol.js +0 -524
- package/lib/random-iterator.js +0 -46
- package/test/basic.js +0 -78
- package/test/bitfield.js +0 -71
- package/test/core.js +0 -290
- package/test/encodings.js +0 -18
- package/test/extension.js +0 -71
- package/test/helpers/index.js +0 -23
- package/test/merkle-tree.js +0 -518
- package/test/mutex.js +0 -137
- package/test/oplog.js +0 -399
- package/test/preload.js +0 -72
- package/test/replicate.js +0 -333
- package/test/sessions.js +0 -173
- package/test/user-data.js +0 -47
package/lib/bitfield.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// TODO: needs massive improvements obvs
|
|
2
2
|
|
|
3
3
|
const BigSparseArray = require('big-sparse-array')
|
|
4
|
+
const b4a = require('b4a')
|
|
4
5
|
|
|
5
6
|
class FixedBitfield {
|
|
6
7
|
constructor (index, bitfield) {
|
|
@@ -40,8 +41,9 @@ module.exports = class Bitfield {
|
|
|
40
41
|
this.pages = new BigSparseArray()
|
|
41
42
|
this.unflushed = []
|
|
42
43
|
this.storage = storage
|
|
44
|
+
this.resumed = !!(buf && buf.byteLength >= 4)
|
|
43
45
|
|
|
44
|
-
const all =
|
|
46
|
+
const all = this.resumed
|
|
45
47
|
? new Uint32Array(buf.buffer, buf.byteOffset, Math.floor(buf.byteLength / 4))
|
|
46
48
|
: new Uint32Array(1024)
|
|
47
49
|
|
|
@@ -92,8 +94,10 @@ module.exports = class Bitfield {
|
|
|
92
94
|
clear () {
|
|
93
95
|
return new Promise((resolve, reject) => {
|
|
94
96
|
this.storage.del(0, Infinity, (err) => {
|
|
95
|
-
if (err) reject(err)
|
|
96
|
-
|
|
97
|
+
if (err) return reject(err)
|
|
98
|
+
this.pages = new BigSparseArray()
|
|
99
|
+
this.unflushed = []
|
|
100
|
+
resolve()
|
|
97
101
|
})
|
|
98
102
|
})
|
|
99
103
|
}
|
|
@@ -116,9 +120,9 @@ module.exports = class Bitfield {
|
|
|
116
120
|
let error = null
|
|
117
121
|
|
|
118
122
|
for (const page of this.unflushed) {
|
|
119
|
-
const
|
|
123
|
+
const buf = b4a.from(page.bitfield.buffer, page.bitfield.byteOffset, page.bitfield.byteLength)
|
|
120
124
|
page.dirty = false
|
|
121
|
-
this.storage.write(page.index * 4096,
|
|
125
|
+
this.storage.write(page.index * 4096, buf, done)
|
|
122
126
|
}
|
|
123
127
|
|
|
124
128
|
function done (err) {
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
const sodium = require('sodium-universal')
|
|
2
|
+
const c = require('compact-encoding')
|
|
3
|
+
const b4a = require('b4a')
|
|
4
|
+
|
|
5
|
+
const nonce = b4a.alloc(sodium.crypto_stream_NONCEBYTES)
|
|
6
|
+
|
|
7
|
+
module.exports = class BlockEncryption {
|
|
8
|
+
constructor (encryptionKey, hypercoreKey) {
|
|
9
|
+
const subKeys = b4a.alloc(2 * sodium.crypto_stream_KEYBYTES)
|
|
10
|
+
|
|
11
|
+
this.key = encryptionKey
|
|
12
|
+
this.blockKey = subKeys.subarray(0, sodium.crypto_stream_KEYBYTES)
|
|
13
|
+
this.blindingKey = subKeys.subarray(sodium.crypto_stream_KEYBYTES)
|
|
14
|
+
this.padding = 8
|
|
15
|
+
|
|
16
|
+
sodium.crypto_generichash(this.blockKey, encryptionKey, hypercoreKey)
|
|
17
|
+
sodium.crypto_generichash(this.blindingKey, this.blockKey)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
encrypt (index, block, fork) {
|
|
21
|
+
const padding = block.subarray(0, this.padding)
|
|
22
|
+
block = block.subarray(this.padding)
|
|
23
|
+
|
|
24
|
+
c.uint64.encode({ start: 0, end: 8, buffer: padding }, fork)
|
|
25
|
+
c.uint64.encode({ start: 0, end: 8, buffer: nonce }, index)
|
|
26
|
+
|
|
27
|
+
// Zero out any previous padding.
|
|
28
|
+
nonce.fill(0, 8, 8 + padding.byteLength)
|
|
29
|
+
|
|
30
|
+
// Blind the fork ID, possibly risking reusing the nonce on a reorg of the
|
|
31
|
+
// Hypercore. This is fine as the blinding is best-effort and the latest
|
|
32
|
+
// fork ID shared on replication anyway.
|
|
33
|
+
sodium.crypto_stream_xor(
|
|
34
|
+
padding,
|
|
35
|
+
padding,
|
|
36
|
+
nonce,
|
|
37
|
+
this.blindingKey
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
nonce.set(padding, 8)
|
|
41
|
+
|
|
42
|
+
// The combination of a (blinded) fork ID and a block index is unique for a
|
|
43
|
+
// given Hypercore and is therefore a valid nonce for encrypting the block.
|
|
44
|
+
sodium.crypto_stream_xor(
|
|
45
|
+
block,
|
|
46
|
+
block,
|
|
47
|
+
nonce,
|
|
48
|
+
this.blockKey
|
|
49
|
+
)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
decrypt (index, block) {
|
|
53
|
+
const padding = block.subarray(0, this.padding)
|
|
54
|
+
block = block.subarray(this.padding)
|
|
55
|
+
|
|
56
|
+
c.uint64.encode({ start: 0, end: 8, buffer: nonce }, index)
|
|
57
|
+
|
|
58
|
+
nonce.set(padding, 8)
|
|
59
|
+
|
|
60
|
+
// Decrypt the block using the blinded fork ID.
|
|
61
|
+
sodium.crypto_stream_xor(
|
|
62
|
+
block,
|
|
63
|
+
block,
|
|
64
|
+
nonce,
|
|
65
|
+
this.blockKey
|
|
66
|
+
)
|
|
67
|
+
}
|
|
68
|
+
}
|
package/lib/block-store.js
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
const b4a = require('b4a')
|
|
2
|
+
|
|
1
3
|
module.exports = class BlockStore {
|
|
2
4
|
constructor (storage, tree) {
|
|
3
5
|
this.storage = storage
|
|
@@ -15,7 +17,7 @@ module.exports = class BlockStore {
|
|
|
15
17
|
|
|
16
18
|
putBatch (i, batch, offset) {
|
|
17
19
|
if (batch.length === 0) return Promise.resolve()
|
|
18
|
-
return this.put(i, batch.length === 1 ? batch[0] :
|
|
20
|
+
return this.put(i, batch.length === 1 ? batch[0] : b4a.concat(batch), offset)
|
|
19
21
|
}
|
|
20
22
|
|
|
21
23
|
clear () {
|
package/lib/caps.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
const crypto = require('hypercore-crypto')
|
|
2
|
+
const sodium = require('sodium-universal')
|
|
3
|
+
const b4a = require('b4a')
|
|
4
|
+
const c = require('compact-encoding')
|
|
5
|
+
|
|
6
|
+
// TODO: rename this to "crypto" and move everything hashing related etc in here
|
|
7
|
+
// Also lets move the tree stuff from hypercore-crypto here, and loose the types
|
|
8
|
+
// from the hashes there - they are not needed since we lock the indexes in the tree
|
|
9
|
+
// hash and just makes alignment etc harder in other languages
|
|
10
|
+
|
|
11
|
+
const [TREE, REPLICATE_INITIATOR, REPLICATE_RESPONDER] = crypto.namespace('hypercore', 3)
|
|
12
|
+
|
|
13
|
+
exports.replicate = function (isInitiator, key, handshakeHash) {
|
|
14
|
+
const out = b4a.allocUnsafe(32)
|
|
15
|
+
sodium.crypto_generichash_batch(out, [isInitiator ? REPLICATE_INITIATOR : REPLICATE_RESPONDER, key], handshakeHash)
|
|
16
|
+
return out
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
exports.treeSignable = function (hash, length, fork) {
|
|
20
|
+
const state = { start: 0, end: 80, buffer: b4a.allocUnsafe(80) }
|
|
21
|
+
c.raw.encode(state, TREE)
|
|
22
|
+
c.raw.encode(state, hash)
|
|
23
|
+
c.uint64.encode(state, length)
|
|
24
|
+
c.uint64.encode(state, fork)
|
|
25
|
+
return state.buffer
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
exports.treeSignableLegacy = function (hash, length, fork) {
|
|
29
|
+
const state = { start: 0, end: 48, buffer: b4a.allocUnsafe(48) }
|
|
30
|
+
c.raw.encode(state, hash)
|
|
31
|
+
c.uint64.encode(state, length)
|
|
32
|
+
c.uint64.encode(state, fork)
|
|
33
|
+
return state.buffer
|
|
34
|
+
}
|
package/lib/core.js
CHANGED
|
@@ -1,13 +1,15 @@
|
|
|
1
1
|
const hypercoreCrypto = require('hypercore-crypto')
|
|
2
|
+
const b4a = require('b4a')
|
|
2
3
|
const Oplog = require('./oplog')
|
|
3
4
|
const Mutex = require('./mutex')
|
|
4
5
|
const MerkleTree = require('./merkle-tree')
|
|
5
6
|
const BlockStore = require('./block-store')
|
|
6
7
|
const Bitfield = require('./bitfield')
|
|
7
|
-
const {
|
|
8
|
+
const { BAD_ARGUMENT, STORAGE_EMPTY, STORAGE_CONFLICT, INVALID_SIGNATURE } = require('./errors')
|
|
9
|
+
const m = require('./messages')
|
|
8
10
|
|
|
9
11
|
module.exports = class Core {
|
|
10
|
-
constructor (header, crypto, oplog, tree, blocks, bitfield,
|
|
12
|
+
constructor (header, crypto, oplog, tree, blocks, bitfield, auth, legacy, onupdate) {
|
|
11
13
|
this.onupdate = onupdate
|
|
12
14
|
this.header = header
|
|
13
15
|
this.crypto = crypto
|
|
@@ -15,7 +17,7 @@ module.exports = class Core {
|
|
|
15
17
|
this.tree = tree
|
|
16
18
|
this.blocks = blocks
|
|
17
19
|
this.bitfield = bitfield
|
|
18
|
-
this.
|
|
20
|
+
this.defaultAuth = auth
|
|
19
21
|
this.truncating = 0
|
|
20
22
|
|
|
21
23
|
this._maxOplogSize = 65536
|
|
@@ -23,6 +25,9 @@ module.exports = class Core {
|
|
|
23
25
|
this._verifies = null
|
|
24
26
|
this._verifiesFlushed = null
|
|
25
27
|
this._mutex = new Mutex()
|
|
28
|
+
this._legacy = legacy
|
|
29
|
+
|
|
30
|
+
this._updateContiguousLength(header.contiguousLength)
|
|
26
31
|
}
|
|
27
32
|
|
|
28
33
|
static async open (storage, opts = {}) {
|
|
@@ -49,27 +54,46 @@ module.exports = class Core {
|
|
|
49
54
|
}
|
|
50
55
|
}
|
|
51
56
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
57
|
+
static createAuth (crypto, { publicKey, secretKey }, opts = {}) {
|
|
58
|
+
if (secretKey && !crypto.validateKeyPair({ publicKey, secretKey })) {
|
|
59
|
+
throw BAD_ARGUMENT('Invalid key pair')
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const sign = opts.sign
|
|
63
|
+
? opts.sign
|
|
64
|
+
: secretKey
|
|
65
|
+
? (signable) => crypto.sign(signable, secretKey)
|
|
66
|
+
: undefined
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
sign,
|
|
70
|
+
verify (signable, signature) {
|
|
71
|
+
return crypto.verify(signable, signature, publicKey)
|
|
72
|
+
}
|
|
73
|
+
}
|
|
56
74
|
}
|
|
57
75
|
|
|
58
76
|
static async resume (oplogFile, treeFile, bitfieldFile, dataFile, opts) {
|
|
59
|
-
|
|
77
|
+
let overwrite = opts.overwrite === true
|
|
78
|
+
|
|
79
|
+
const force = opts.force === true
|
|
60
80
|
const createIfMissing = opts.createIfMissing !== false
|
|
61
81
|
const crypto = opts.crypto || hypercoreCrypto
|
|
62
82
|
|
|
63
83
|
const oplog = new Oplog(oplogFile, {
|
|
64
|
-
headerEncoding:
|
|
65
|
-
entryEncoding:
|
|
84
|
+
headerEncoding: m.oplog.header,
|
|
85
|
+
entryEncoding: m.oplog.entry
|
|
66
86
|
})
|
|
67
87
|
|
|
68
88
|
let { header, entries } = await oplog.open()
|
|
69
89
|
|
|
70
|
-
if (
|
|
90
|
+
if (force && opts.keyPair && header && header.signer && !b4a.equals(header.signer.publicKey, opts.keyPair.publicKey)) {
|
|
91
|
+
overwrite = true
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (!header || overwrite) {
|
|
71
95
|
if (!createIfMissing) {
|
|
72
|
-
throw
|
|
96
|
+
throw STORAGE_EMPTY('No Hypercore is stored here')
|
|
73
97
|
}
|
|
74
98
|
|
|
75
99
|
header = {
|
|
@@ -84,14 +108,15 @@ module.exports = class Core {
|
|
|
84
108
|
signer: opts.keyPair || crypto.keyPair(),
|
|
85
109
|
hints: {
|
|
86
110
|
reorgs: []
|
|
87
|
-
}
|
|
111
|
+
},
|
|
112
|
+
contiguousLength: 0
|
|
88
113
|
}
|
|
89
114
|
|
|
90
115
|
await oplog.flush(header)
|
|
91
116
|
}
|
|
92
117
|
|
|
93
|
-
if (opts.keyPair && !header.signer.publicKey
|
|
94
|
-
throw
|
|
118
|
+
if (opts.keyPair && !b4a.equals(header.signer.publicKey, opts.keyPair.publicKey)) {
|
|
119
|
+
throw STORAGE_CONFLICT('Another Hypercore is stored here')
|
|
95
120
|
}
|
|
96
121
|
|
|
97
122
|
const tree = await MerkleTree.open(treeFile, { crypto, ...header.tree })
|
|
@@ -102,9 +127,13 @@ module.exports = class Core {
|
|
|
102
127
|
await tree.clear()
|
|
103
128
|
await blocks.clear()
|
|
104
129
|
await bitfield.clear()
|
|
130
|
+
entries = []
|
|
131
|
+
} else if (bitfield.resumed && header.tree.length === 0) {
|
|
132
|
+
// If this was an old bitfield, reset it since it loads based on disk size atm (TODO: change that)
|
|
133
|
+
await bitfield.clear()
|
|
105
134
|
}
|
|
106
135
|
|
|
107
|
-
const
|
|
136
|
+
const auth = opts.auth || this.createAuth(crypto, header.signer)
|
|
108
137
|
|
|
109
138
|
for (const e of entries) {
|
|
110
139
|
if (e.userData) {
|
|
@@ -118,7 +147,7 @@ module.exports = class Core {
|
|
|
118
147
|
}
|
|
119
148
|
|
|
120
149
|
if (e.bitfield) {
|
|
121
|
-
bitfield.setRange(e.bitfield.start, e.bitfield.length)
|
|
150
|
+
bitfield.setRange(e.bitfield.start, e.bitfield.length, !e.bitfield.drop)
|
|
122
151
|
}
|
|
123
152
|
|
|
124
153
|
if (e.treeUpgrade) {
|
|
@@ -135,7 +164,7 @@ module.exports = class Core {
|
|
|
135
164
|
}
|
|
136
165
|
}
|
|
137
166
|
|
|
138
|
-
return new this(header, crypto, oplog, tree, blocks, bitfield,
|
|
167
|
+
return new this(header, crypto, oplog, tree, blocks, bitfield, auth, !!opts.legacy, opts.onupdate || noop)
|
|
139
168
|
}
|
|
140
169
|
|
|
141
170
|
_shouldFlush () {
|
|
@@ -166,6 +195,16 @@ module.exports = class Core {
|
|
|
166
195
|
await this.blocks.put(index, value, byteOffset)
|
|
167
196
|
}
|
|
168
197
|
|
|
198
|
+
_updateContiguousLength (index, length = 0) {
|
|
199
|
+
if (index === this.header.contiguousLength) {
|
|
200
|
+
let i = this.header.contiguousLength + length
|
|
201
|
+
|
|
202
|
+
while (this.bitfield.get(i)) i++
|
|
203
|
+
|
|
204
|
+
this.header.contiguousLength = i
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
169
208
|
async userData (key, value) {
|
|
170
209
|
// TODO: each oplog append can set user data, so we should have a way
|
|
171
210
|
// to just hitch a ride on one of the other ongoing appends?
|
|
@@ -176,7 +215,7 @@ module.exports = class Core {
|
|
|
176
215
|
|
|
177
216
|
for (const u of this.header.userData) {
|
|
178
217
|
if (u.key !== key) continue
|
|
179
|
-
if (value &&
|
|
218
|
+
if (value && b4a.equals(u.value, value)) return
|
|
180
219
|
empty = false
|
|
181
220
|
break
|
|
182
221
|
}
|
|
@@ -200,13 +239,13 @@ module.exports = class Core {
|
|
|
200
239
|
}
|
|
201
240
|
}
|
|
202
241
|
|
|
203
|
-
async truncate (length, fork,
|
|
242
|
+
async truncate (length, fork, auth = this.defaultAuth) {
|
|
204
243
|
this.truncating++
|
|
205
244
|
await this._mutex.lock()
|
|
206
245
|
|
|
207
246
|
try {
|
|
208
247
|
const batch = await this.tree.truncate(length, fork)
|
|
209
|
-
batch.signature = await sign(batch.signable())
|
|
248
|
+
batch.signature = await auth.sign(batch.signable())
|
|
210
249
|
await this._truncate(batch, null)
|
|
211
250
|
} finally {
|
|
212
251
|
this.truncating--
|
|
@@ -214,17 +253,19 @@ module.exports = class Core {
|
|
|
214
253
|
}
|
|
215
254
|
}
|
|
216
255
|
|
|
217
|
-
async append (values,
|
|
256
|
+
async append (values, auth = this.defaultAuth, hooks = {}) {
|
|
218
257
|
await this._mutex.lock()
|
|
219
258
|
|
|
220
259
|
try {
|
|
260
|
+
if (hooks.preappend) await hooks.preappend(values)
|
|
261
|
+
|
|
221
262
|
if (!values.length) return this.tree.length
|
|
222
263
|
|
|
223
264
|
const batch = this.tree.batch()
|
|
224
265
|
for (const val of values) batch.append(val)
|
|
225
266
|
|
|
226
267
|
const hash = batch.hash()
|
|
227
|
-
batch.signature = await sign(batch.signable(hash))
|
|
268
|
+
batch.signature = await auth.sign(this._legacy ? batch.signableLegacy(hash) : batch.signable(hash))
|
|
228
269
|
|
|
229
270
|
const entry = {
|
|
230
271
|
userData: null,
|
|
@@ -243,6 +284,7 @@ module.exports = class Core {
|
|
|
243
284
|
this.bitfield.setRange(batch.ancestors, batch.length - batch.ancestors, true)
|
|
244
285
|
batch.commit()
|
|
245
286
|
|
|
287
|
+
this.header.contiguousLength = batch.length
|
|
246
288
|
this.header.tree.length = batch.length
|
|
247
289
|
this.header.tree.rootHash = hash
|
|
248
290
|
this.header.tree.signature = batch.signature
|
|
@@ -256,11 +298,16 @@ module.exports = class Core {
|
|
|
256
298
|
}
|
|
257
299
|
}
|
|
258
300
|
|
|
301
|
+
_signed (batch, hash, auth = this.defaultAuth) {
|
|
302
|
+
const signable = this._legacy ? batch.signableLegacy(hash) : batch.signable(hash)
|
|
303
|
+
return auth.verify(signable, batch.signature)
|
|
304
|
+
}
|
|
305
|
+
|
|
259
306
|
async _verifyExclusive ({ batch, bitfield, value, from }) {
|
|
260
307
|
// TODO: move this to tree.js
|
|
261
308
|
const hash = batch.hash()
|
|
262
|
-
if (!batch.signature || !this.
|
|
263
|
-
throw
|
|
309
|
+
if (!batch.signature || !this._signed(batch, hash)) {
|
|
310
|
+
throw INVALID_SIGNATURE('Proof contains an invalid signature')
|
|
264
311
|
}
|
|
265
312
|
|
|
266
313
|
await this._mutex.lock()
|
|
@@ -279,7 +326,11 @@ module.exports = class Core {
|
|
|
279
326
|
|
|
280
327
|
await this.oplog.append([entry], false)
|
|
281
328
|
|
|
282
|
-
if (bitfield)
|
|
329
|
+
if (bitfield) {
|
|
330
|
+
this.bitfield.set(bitfield.start, true)
|
|
331
|
+
this._updateContiguousLength(bitfield.start, bitfield.length)
|
|
332
|
+
}
|
|
333
|
+
|
|
283
334
|
batch.commit()
|
|
284
335
|
|
|
285
336
|
this.header.tree.fork = batch.fork
|
|
@@ -333,8 +384,13 @@ module.exports = class Core {
|
|
|
333
384
|
continue
|
|
334
385
|
}
|
|
335
386
|
|
|
336
|
-
if (bitfield)
|
|
387
|
+
if (bitfield) {
|
|
388
|
+
this.bitfield.set(bitfield.start, true)
|
|
389
|
+
this._updateContiguousLength(bitfield.start, bitfield.length)
|
|
390
|
+
}
|
|
391
|
+
|
|
337
392
|
batch.commit()
|
|
393
|
+
|
|
338
394
|
this.onupdate(0, bitfield, value, from)
|
|
339
395
|
}
|
|
340
396
|
|
|
@@ -415,6 +471,7 @@ module.exports = class Core {
|
|
|
415
471
|
|
|
416
472
|
const appended = batch.length > batch.ancestors
|
|
417
473
|
|
|
474
|
+
this.header.contiguousLength = Math.min(batch.ancestors, this.header.contiguousLength)
|
|
418
475
|
this.header.tree.fork = batch.fork
|
|
419
476
|
this.header.tree.length = batch.length
|
|
420
477
|
this.header.tree.rootHash = batch.hash()
|
package/lib/errors.js
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
module.exports = class HypercoreError extends Error {
|
|
2
|
+
constructor (msg, code) {
|
|
3
|
+
super(msg)
|
|
4
|
+
this.code = code
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
static BAD_ARGUMENT (msg) {
|
|
8
|
+
return new HypercoreError(msg, 'BAD_ARGUMENT')
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
static STORAGE_EMPTY (msg) {
|
|
12
|
+
return new HypercoreError(msg, 'STORAGE_EMPTY')
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
static STORAGE_CONFLICT (msg) {
|
|
16
|
+
return new HypercoreError(msg, 'STORAGE_CONFLICT')
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
static INVALID_SIGNATURE (msg) {
|
|
20
|
+
return new HypercoreError(msg, 'INVALID_SIGNATURE')
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
static INVALID_CAPABILITY (msg) {
|
|
24
|
+
return new HypercoreError(msg, 'INVALID_CAPABILITY')
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
static SNAPSHOT_NOT_AVAILABLE (msg = 'Snapshot is not available') {
|
|
28
|
+
return new HypercoreError(msg, 'SNAPSHOT_NOT_AVAILABLE')
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
static REQUEST_CANCELLED (msg = 'Request was cancelled') {
|
|
32
|
+
return new HypercoreError(msg, 'REQUEST_CANCELLED')
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
static SESSION_NOT_WRITABLE (msg = 'Session is not writable') {
|
|
36
|
+
return new HypercoreError(msg, 'SESSION_NOT_WRITABLE')
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
static SESSION_CLOSED (msg = 'Session is closed') {
|
|
40
|
+
return new HypercoreError(msg, 'SESSION_CLOSED')
|
|
41
|
+
}
|
|
42
|
+
}
|