@bsv/sdk 2.0.10 → 2.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/package.json +1 -1
- package/dist/cjs/src/overlay-tools/HostReputationTracker.js +21 -13
- package/dist/cjs/src/overlay-tools/HostReputationTracker.js.map +1 -1
- package/dist/cjs/src/primitives/PrivateKey.js +3 -3
- package/dist/cjs/src/primitives/PrivateKey.js.map +1 -1
- package/dist/cjs/src/script/Spend.js +17 -9
- package/dist/cjs/src/script/Spend.js.map +1 -1
- package/dist/cjs/src/storage/StorageDownloader.js +6 -6
- package/dist/cjs/src/storage/StorageDownloader.js.map +1 -1
- package/dist/cjs/src/storage/StorageUtils.js +1 -1
- package/dist/cjs/src/storage/StorageUtils.js.map +1 -1
- package/dist/cjs/src/transaction/MerklePath.js +45 -31
- package/dist/cjs/src/transaction/MerklePath.js.map +1 -1
- package/dist/cjs/tsconfig.cjs.tsbuildinfo +1 -1
- package/dist/esm/src/overlay-tools/HostReputationTracker.js +21 -13
- package/dist/esm/src/overlay-tools/HostReputationTracker.js.map +1 -1
- package/dist/esm/src/primitives/PrivateKey.js +3 -3
- package/dist/esm/src/primitives/PrivateKey.js.map +1 -1
- package/dist/esm/src/script/Spend.js +17 -9
- package/dist/esm/src/script/Spend.js.map +1 -1
- package/dist/esm/src/storage/StorageDownloader.js +6 -6
- package/dist/esm/src/storage/StorageDownloader.js.map +1 -1
- package/dist/esm/src/storage/StorageUtils.js +1 -1
- package/dist/esm/src/storage/StorageUtils.js.map +1 -1
- package/dist/esm/src/transaction/MerklePath.js +45 -31
- package/dist/esm/src/transaction/MerklePath.js.map +1 -1
- package/dist/esm/tsconfig.esm.tsbuildinfo +1 -1
- package/dist/types/src/overlay-tools/HostReputationTracker.d.ts.map +1 -1
- package/dist/types/src/script/Spend.d.ts.map +1 -1
- package/dist/types/src/transaction/MerklePath.d.ts.map +1 -1
- package/dist/types/tsconfig.types.tsbuildinfo +1 -1
- package/dist/umd/bundle.js +3 -3
- package/dist/umd/bundle.js.map +1 -1
- package/package.json +1 -1
- package/src/auth/utils/__tests/validateCertificates.test.ts +12 -9
- package/src/kvstore/__tests/LocalKVStore.test.ts +4 -6
- package/src/overlay-tools/HostReputationTracker.ts +17 -14
- package/src/primitives/PrivateKey.ts +3 -3
- package/src/script/Spend.ts +19 -11
- package/src/storage/StorageDownloader.ts +6 -6
- package/src/storage/StorageUtils.ts +1 -1
- package/src/transaction/MerklePath.ts +51 -42
- package/src/transaction/__tests/MerklePath.test.ts +191 -22
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import ChainTracker from '../ChainTracker'
|
|
2
2
|
import MerklePath from '../../transaction/MerklePath'
|
|
3
|
+
import { hash256 } from '../../primitives/Hash'
|
|
4
|
+
import { toHex, toArray } from '../../primitives/utils'
|
|
3
5
|
import invalidBumps from './bump.invalid.vectors'
|
|
4
6
|
import validBumps from './bump.valid.vectors'
|
|
5
7
|
|
|
8
|
+
const merkleHash = (m: string): string => toHex(hash256(toArray(m, 'hex').reverse()).reverse())
|
|
9
|
+
|
|
6
10
|
const BRC74Hex =
|
|
7
11
|
'fe8a6a0c000c04fde80b0011774f01d26412f0d16ea3f0447be0b5ebec67b0782e321a7a01cbdf7f734e30fde90b02004e53753e3fe4667073063a17987292cfdea278824e9888e52180581d7188d8fdea0b025e441996fc53f0191d649e68a200e752fb5f39e0d5617083408fa179ddc5c998fdeb0b0102fdf405000671394f72237d08a4277f4435e5b6edf7adc272f25effef27cdfe805ce71a81fdf50500262bccabec6c4af3ed00cc7a7414edea9c5efa92fb8623dd6160a001450a528201fdfb020101fd7c010093b3efca9b77ddec914f8effac691ecb54e2c81d0ab81cbc4c4b93befe418e8501bf01015e005881826eb6973c54003a02118fe270f03d46d02681c8bc71cd44c613e86302f8012e00e07a2bb8bb75e5accff266022e1e5e6e7b4d6d943a04faadcf2ab4a22f796ff30116008120cafa17309c0bb0e0ffce835286b3a2dcae48e4497ae2d2b7ced4f051507d010a00502e59ac92f46543c23006bff855d96f5e648043f0fb87a7a5949e6a9bebae430104001ccd9f8f64f4d0489b30cc815351cf425e0e78ad79a589350e4341ac165dbe45010301010000af8764ce7e1cc132ab5ed2229a005c87201c9a5ee15c0f91dd53eff31ab30cd4'
|
|
8
12
|
|
|
@@ -108,6 +112,24 @@ const BRC74JSONTrimmed = {
|
|
|
108
112
|
}
|
|
109
113
|
BRC74JSONTrimmed.path[1] = []
|
|
110
114
|
|
|
115
|
+
const BLOCK_125632 = {
|
|
116
|
+
height: 125632,
|
|
117
|
+
merkleroot: '205b2e27c58601fc1a8de04c83b6b0c46f89c16b2161c93441b7e9269cf6bc4a',
|
|
118
|
+
tx: [
|
|
119
|
+
'17cba98da71fe75862aac894392f2ff604356db386767fec364877a5a9ff200c',
|
|
120
|
+
'14ce64bd223ec9bb42662b74fdcf94f96a209a1aee72b7ba7639db503150ec2e',
|
|
121
|
+
'90a2de85351cfadd2326b9b0098e9c453af09b2980835f57a1429bbb44beb872',
|
|
122
|
+
'a31f2ddfea7ddd4581dca3007ee99e58ea6baa97a8ac3b32bb4610baac9f7206',
|
|
123
|
+
'c36eeed6fbc0259d30804f59f804dfcda35a54461157d6ac9c094f0ea378f35c',
|
|
124
|
+
'17752483868c52a98407a0e226d73b42e214e0fad548541619d858e1fd4a9549',
|
|
125
|
+
'3b8c4460412cfc55be0d50308ba704a859bd6f83bfed01b0828c9b067cd69246',
|
|
126
|
+
'a3f1b9d4b3ef3b061af352fdc2d02048417030fef9282c36da689cd899437cdb',
|
|
127
|
+
'66e2b022da877621ef197e02c3ef7d3f820d33a86ead2e72bf966432ea6776f1',
|
|
128
|
+
'e988b5d7a2cec8e0759ade2e151737d1cdfdde68accff42938583ad12eb98b99',
|
|
129
|
+
'5e7a8a8ec3f912ac1c4e90279c04263f170ed055c0411c8d490b846f01e6a99e'
|
|
130
|
+
]
|
|
131
|
+
}
|
|
132
|
+
|
|
111
133
|
const BRC74Root =
|
|
112
134
|
'57aab6e6fb1b697174ffb64e062c4728f2ffd33ddcfa02a43b64d8cd29b483b4'
|
|
113
135
|
const BRC74TXID1 =
|
|
@@ -131,6 +153,26 @@ class FakeChainTracker implements ChainTracker {
|
|
|
131
153
|
}
|
|
132
154
|
}
|
|
133
155
|
|
|
156
|
+
/** Splits BRC74JSON into two partial paths (A covers txid2, B covers txid3) ready to combine. */
|
|
157
|
+
function buildSplitPaths (): [MerklePath, MerklePath] {
|
|
158
|
+
const path0A = [...BRC74JSON.path[0]]
|
|
159
|
+
const path0B = [...BRC74JSON.path[0]]
|
|
160
|
+
const path1A = [...BRC74JSON.path[1]]
|
|
161
|
+
const path1B = [...BRC74JSON.path[1]]
|
|
162
|
+
const pathRest = [...BRC74JSON.path]
|
|
163
|
+
pathRest.shift()
|
|
164
|
+
pathRest.shift()
|
|
165
|
+
path0A.splice(2, 2)
|
|
166
|
+
path0B.shift()
|
|
167
|
+
path0B.shift()
|
|
168
|
+
path1A.shift()
|
|
169
|
+
path1B.pop()
|
|
170
|
+
return [
|
|
171
|
+
new MerklePath(BRC74JSON.blockHeight, [path0A, path1A, ...pathRest]),
|
|
172
|
+
new MerklePath(BRC74JSON.blockHeight, [path0B, path1B, ...pathRest])
|
|
173
|
+
]
|
|
174
|
+
}
|
|
175
|
+
|
|
134
176
|
describe('MerklePath', () => {
|
|
135
177
|
it('Parses from hex', () => {
|
|
136
178
|
const path = MerklePath.fromHex(BRC74Hex)
|
|
@@ -163,28 +205,7 @@ describe('MerklePath', () => {
|
|
|
163
205
|
)
|
|
164
206
|
})
|
|
165
207
|
it('Combines two paths', () => {
|
|
166
|
-
const
|
|
167
|
-
const path0B = [...BRC74JSON.path[0]]
|
|
168
|
-
const path1A = [...BRC74JSON.path[1]]
|
|
169
|
-
const path1B = [...BRC74JSON.path[1]]
|
|
170
|
-
const pathRest = [...BRC74JSON.path]
|
|
171
|
-
pathRest.shift()
|
|
172
|
-
pathRest.shift()
|
|
173
|
-
path0A.splice(2, 2)
|
|
174
|
-
path0B.shift()
|
|
175
|
-
path0B.shift()
|
|
176
|
-
path1A.shift()
|
|
177
|
-
path1B.pop()
|
|
178
|
-
const pathAJSON = {
|
|
179
|
-
blockHeight: BRC74JSON.blockHeight,
|
|
180
|
-
path: [path0A, path1A, ...pathRest]
|
|
181
|
-
}
|
|
182
|
-
const pathBJSON = {
|
|
183
|
-
blockHeight: BRC74JSON.blockHeight,
|
|
184
|
-
path: [path0B, path1B, ...pathRest]
|
|
185
|
-
}
|
|
186
|
-
const pathA = new MerklePath(pathAJSON.blockHeight, pathAJSON.path)
|
|
187
|
-
const pathB = new MerklePath(pathBJSON.blockHeight, pathBJSON.path)
|
|
208
|
+
const [pathA, pathB] = buildSplitPaths()
|
|
188
209
|
expect(pathA.computeRoot(BRC74TXID2)).toEqual(BRC74Root)
|
|
189
210
|
expect(() => pathA.computeRoot(BRC74TXID3)).toThrow()
|
|
190
211
|
expect(() => pathB.computeRoot(BRC74TXID2)).toThrow()
|
|
@@ -194,6 +215,36 @@ describe('MerklePath', () => {
|
|
|
194
215
|
expect(pathA.computeRoot(BRC74TXID2)).toEqual(BRC74Root)
|
|
195
216
|
expect(pathA.computeRoot(BRC74TXID3)).toEqual(BRC74Root)
|
|
196
217
|
})
|
|
218
|
+
it('Serializes and deserializes a combined trimmed path', () => {
|
|
219
|
+
const [pathA, pathB] = buildSplitPaths()
|
|
220
|
+
pathA.combine(pathB)
|
|
221
|
+
const deserialized = MerklePath.fromHex(pathA.toHex())
|
|
222
|
+
expect(deserialized.computeRoot(BRC74TXID2)).toEqual(BRC74Root)
|
|
223
|
+
expect(deserialized.computeRoot(BRC74TXID3)).toEqual(BRC74Root)
|
|
224
|
+
})
|
|
225
|
+
it('Constructs a compound path from all txids at level 0 only', () => {
|
|
226
|
+
// A single-level compound path: all txids for a block given at level 0, no higher levels.
|
|
227
|
+
// The implementation should be able to compute the merkle root by calculating up from the leaves.
|
|
228
|
+
const tx0 = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
|
229
|
+
const tx1 = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
|
|
230
|
+
const tx2 = 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
|
|
231
|
+
const tx3 = 'dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
|
|
232
|
+
const root4 = merkleHash(merkleHash(tx3 + tx2) + merkleHash(tx1 + tx0))
|
|
233
|
+
const mp = new MerklePath(100, [[
|
|
234
|
+
{ offset: 0, txid: true, hash: tx0 },
|
|
235
|
+
{ offset: 1, txid: true, hash: tx1 },
|
|
236
|
+
{ offset: 2, txid: true, hash: tx2 },
|
|
237
|
+
{ offset: 3, txid: true, hash: tx3 }
|
|
238
|
+
]])
|
|
239
|
+
expect(mp.computeRoot(tx0)).toEqual(root4)
|
|
240
|
+
expect(mp.computeRoot(tx1)).toEqual(root4)
|
|
241
|
+
expect(mp.computeRoot(tx2)).toEqual(root4)
|
|
242
|
+
expect(mp.computeRoot(tx3)).toEqual(root4)
|
|
243
|
+
// Serializing and deserializing a single-level compound path should also work
|
|
244
|
+
const deserialized = MerklePath.fromHex(mp.toHex())
|
|
245
|
+
expect(deserialized.computeRoot(tx0)).toEqual(root4)
|
|
246
|
+
expect(deserialized.computeRoot(tx3)).toEqual(root4)
|
|
247
|
+
})
|
|
197
248
|
it('Rejects invalid bumps', () => {
|
|
198
249
|
for (const invalid of invalidBumps) {
|
|
199
250
|
expect(() => MerklePath.fromHex(invalid.bump)).toThrow(invalid.error)
|
|
@@ -246,4 +297,122 @@ describe('MerklePath', () => {
|
|
|
246
297
|
)
|
|
247
298
|
expect(isValid).toBe(false)
|
|
248
299
|
})
|
|
300
|
+
it('constructs a compound MerklePath from all txids in a block with odd tree levels', () => {
|
|
301
|
+
const { height, merkleroot, tx } = BLOCK_125632
|
|
302
|
+
const leafs = tx.map((hash, offset) => ({ hash, txid: true, offset }))
|
|
303
|
+
if (leafs.length % 2) leafs.push({ offset: leafs.length, duplicate: true } as any)
|
|
304
|
+
const mp = new MerklePath(height, [leafs])
|
|
305
|
+
expect(mp.computeRoot()).toBe(merkleroot)
|
|
306
|
+
})
|
|
307
|
+
it('compound path for 3 txids trims, round-trips through hex, and splits into per-txid proofs', () => {
|
|
308
|
+
const { height, merkleroot, tx } = BLOCK_125632
|
|
309
|
+
|
|
310
|
+
// Precompute the full Merkle tree for block 125632.
|
|
311
|
+
// merkleHash(right + left) matches the SDK's internal hash convention.
|
|
312
|
+
const L1 = [
|
|
313
|
+
merkleHash(tx[1] + tx[0]),
|
|
314
|
+
merkleHash(tx[3] + tx[2]),
|
|
315
|
+
merkleHash(tx[5] + tx[4]),
|
|
316
|
+
merkleHash(tx[7] + tx[6]),
|
|
317
|
+
merkleHash(tx[9] + tx[8]),
|
|
318
|
+
merkleHash(tx[10] + tx[10]) // tx[10] duplicated — odd count at level 0
|
|
319
|
+
]
|
|
320
|
+
const L2 = [
|
|
321
|
+
merkleHash(L1[1] + L1[0]),
|
|
322
|
+
merkleHash(L1[3] + L1[2]),
|
|
323
|
+
merkleHash(L1[5] + L1[4])
|
|
324
|
+
]
|
|
325
|
+
const L3 = [
|
|
326
|
+
merkleHash(L2[1] + L2[0]),
|
|
327
|
+
merkleHash(L2[2] + L2[2]) // L2 count = 3 (odd) — last node duplicated
|
|
328
|
+
]
|
|
329
|
+
expect(merkleHash(L3[1] + L3[0])).toBe(merkleroot)
|
|
330
|
+
|
|
331
|
+
// Build minimal per-txid MerklePaths for tx[2], tx[5], and tx[8].
|
|
332
|
+
// tx[8] exercises the odd-level duplication at level 2 ({offset:3, duplicate:true}).
|
|
333
|
+
const mpTx2 = new MerklePath(height, [
|
|
334
|
+
[{ offset: 2, txid: true, hash: tx[2] }, { offset: 3, hash: tx[3] }],
|
|
335
|
+
[{ offset: 0, hash: L1[0] }],
|
|
336
|
+
[{ offset: 1, hash: L2[1] }],
|
|
337
|
+
[{ offset: 1, hash: L3[1] }]
|
|
338
|
+
])
|
|
339
|
+
const mpTx5 = new MerklePath(height, [
|
|
340
|
+
[{ offset: 4, hash: tx[4] }, { offset: 5, txid: true, hash: tx[5] }],
|
|
341
|
+
[{ offset: 3, hash: L1[3] }],
|
|
342
|
+
[{ offset: 0, hash: L2[0] }],
|
|
343
|
+
[{ offset: 1, hash: L3[1] }]
|
|
344
|
+
])
|
|
345
|
+
const mpTx8 = new MerklePath(height, [
|
|
346
|
+
[{ offset: 8, txid: true, hash: tx[8] }, { offset: 9, hash: tx[9] }],
|
|
347
|
+
[{ offset: 5, hash: L1[5] }],
|
|
348
|
+
[{ offset: 3, duplicate: true }], // tx[8] is last odd node at level 2
|
|
349
|
+
[{ offset: 0, hash: L3[0] }]
|
|
350
|
+
])
|
|
351
|
+
expect(mpTx2.computeRoot(tx[2])).toBe(merkleroot)
|
|
352
|
+
expect(mpTx5.computeRoot(tx[5])).toBe(merkleroot)
|
|
353
|
+
expect(mpTx8.computeRoot(tx[8])).toBe(merkleroot)
|
|
354
|
+
|
|
355
|
+
// Combine into one compound path (combine() trims automatically)
|
|
356
|
+
const compound = new MerklePath(height, mpTx2.path.map(l => [...l]))
|
|
357
|
+
compound.combine(mpTx5)
|
|
358
|
+
compound.combine(mpTx8)
|
|
359
|
+
expect(compound.computeRoot(tx[2])).toBe(merkleroot)
|
|
360
|
+
expect(compound.computeRoot(tx[5])).toBe(merkleroot)
|
|
361
|
+
expect(compound.computeRoot(tx[8])).toBe(merkleroot)
|
|
362
|
+
|
|
363
|
+
// Serialize and deserialize
|
|
364
|
+
const deserialized = MerklePath.fromHex(compound.toHex())
|
|
365
|
+
expect(deserialized.computeRoot(tx[2])).toBe(merkleroot)
|
|
366
|
+
expect(deserialized.computeRoot(tx[5])).toBe(merkleroot)
|
|
367
|
+
expect(deserialized.computeRoot(tx[8])).toBe(merkleroot)
|
|
368
|
+
|
|
369
|
+
// Split the deserialized compound path into standalone per-txid proofs.
|
|
370
|
+
// findOrComputeLeaf reconstructs sibling hashes that were trimmed away.
|
|
371
|
+
const splitProof = (source: MerklePath, txOffset: number, txHash: string): MerklePath => {
|
|
372
|
+
const levels = source.path.map((_, h) => {
|
|
373
|
+
const sibOffset = (txOffset >> h) ^ 1
|
|
374
|
+
if (h === 0) {
|
|
375
|
+
const sib = source.findOrComputeLeaf(0, sibOffset)
|
|
376
|
+
if (sib == null) throw new Error('Missing sibling at level 0')
|
|
377
|
+
return [{ offset: txOffset, txid: true, hash: txHash }, sib].sort((a, b) => a.offset - b.offset)
|
|
378
|
+
}
|
|
379
|
+
const sib = source.findOrComputeLeaf(h, sibOffset)
|
|
380
|
+
return sib == null ? [] : [sib]
|
|
381
|
+
})
|
|
382
|
+
return new MerklePath(source.blockHeight, levels)
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
const splitTx2 = splitProof(deserialized, 2, tx[2])
|
|
386
|
+
const splitTx5 = splitProof(deserialized, 5, tx[5])
|
|
387
|
+
const splitTx8 = splitProof(deserialized, 8, tx[8])
|
|
388
|
+
|
|
389
|
+
// Each standalone proof computes the same root — no data was lost through the pipeline
|
|
390
|
+
expect(splitTx2.computeRoot(tx[2])).toBe(merkleroot)
|
|
391
|
+
expect(splitTx5.computeRoot(tx[5])).toBe(merkleroot)
|
|
392
|
+
expect(splitTx8.computeRoot(tx[8])).toBe(merkleroot)
|
|
393
|
+
})
|
|
394
|
+
it('findOrComputeLeaf duplicates leaf0 when leaf1 carries both a hash and duplicate=true', () => {
|
|
395
|
+
// Covers the leaf1.duplicate === true branch inside findOrComputeLeaf.
|
|
396
|
+
// That branch is reached when leaf1.hash is non-null (bypassing the null-check above it)
|
|
397
|
+
// but leaf1.duplicate is also true — an unusual but valid interface state.
|
|
398
|
+
const tx0 = 'aa'.repeat(32)
|
|
399
|
+
const tx1 = 'bb'.repeat(32)
|
|
400
|
+
|
|
401
|
+
// Build a minimal valid path so the constructor does not throw.
|
|
402
|
+
const mp = new MerklePath(1, [[
|
|
403
|
+
{ offset: 0, txid: true, hash: tx0 },
|
|
404
|
+
{ offset: 1, hash: tx1 }
|
|
405
|
+
]])
|
|
406
|
+
|
|
407
|
+
// Mutate: give the sibling leaf at offset 1 both a hash and duplicate=true.
|
|
408
|
+
// findOrComputeLeaf(1, 0) will:
|
|
409
|
+
// - not find offset 0 in path[1] (path.length === 1, no higher levels)
|
|
410
|
+
// - recurse to level 0: leaf0 = tx0 (offset 0), leaf1 = {hash:tx1, duplicate:true}
|
|
411
|
+
// - leaf1.hash is non-null → skips the null-branch
|
|
412
|
+
// - leaf1.duplicate === true → line 349: workinghash = hash(leaf0 + leaf0)
|
|
413
|
+
mp.path[0][1] = { offset: 1, hash: tx1, duplicate: true }
|
|
414
|
+
|
|
415
|
+
const result = mp.findOrComputeLeaf(1, 0)
|
|
416
|
+
expect(result?.hash).toBe(merkleHash(tx0 + tx0))
|
|
417
|
+
})
|
|
249
418
|
})
|