@bsv/wallet-toolbox 1.6.31 → 1.6.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/CHANGELOG.md +4 -0
  2. package/mobile/out/src/monitor/Monitor.d.ts +8 -0
  3. package/mobile/out/src/monitor/Monitor.d.ts.map +1 -1
  4. package/mobile/out/src/monitor/Monitor.js +8 -0
  5. package/mobile/out/src/monitor/Monitor.js.map +1 -1
  6. package/mobile/out/src/monitor/tasks/TaskReorg.d.ts +8 -12
  7. package/mobile/out/src/monitor/tasks/TaskReorg.d.ts.map +1 -1
  8. package/mobile/out/src/monitor/tasks/TaskReorg.js +16 -73
  9. package/mobile/out/src/monitor/tasks/TaskReorg.js.map +1 -1
  10. package/mobile/out/src/sdk/WalletStorage.interfaces.d.ts +49 -0
  11. package/mobile/out/src/sdk/WalletStorage.interfaces.d.ts.map +1 -1
  12. package/mobile/out/src/storage/StorageProvider.d.ts.map +1 -1
  13. package/mobile/out/src/storage/StorageProvider.js +4 -0
  14. package/mobile/out/src/storage/StorageProvider.js.map +1 -1
  15. package/mobile/out/src/storage/WalletStorageManager.d.ts +34 -2
  16. package/mobile/out/src/storage/WalletStorageManager.d.ts.map +1 -1
  17. package/mobile/out/src/storage/WalletStorageManager.js +146 -0
  18. package/mobile/out/src/storage/WalletStorageManager.js.map +1 -1
  19. package/mobile/package-lock.json +6 -6
  20. package/mobile/package.json +2 -2
  21. package/out/src/monitor/Monitor.d.ts +8 -0
  22. package/out/src/monitor/Monitor.d.ts.map +1 -1
  23. package/out/src/monitor/Monitor.js +8 -0
  24. package/out/src/monitor/Monitor.js.map +1 -1
  25. package/out/src/monitor/tasks/TaskReorg.d.ts +8 -12
  26. package/out/src/monitor/tasks/TaskReorg.d.ts.map +1 -1
  27. package/out/src/monitor/tasks/TaskReorg.js +16 -73
  28. package/out/src/monitor/tasks/TaskReorg.js.map +1 -1
  29. package/out/src/sdk/WalletStorage.interfaces.d.ts +49 -0
  30. package/out/src/sdk/WalletStorage.interfaces.d.ts.map +1 -1
  31. package/out/src/services/__tests/verifyBeef.test.js +7 -0
  32. package/out/src/services/__tests/verifyBeef.test.js.map +1 -1
  33. package/out/src/storage/StorageProvider.d.ts.map +1 -1
  34. package/out/src/storage/StorageProvider.js +4 -0
  35. package/out/src/storage/StorageProvider.js.map +1 -1
  36. package/out/src/storage/WalletStorageManager.d.ts +34 -2
  37. package/out/src/storage/WalletStorageManager.d.ts.map +1 -1
  38. package/out/src/storage/WalletStorageManager.js +146 -0
  39. package/out/src/storage/WalletStorageManager.js.map +1 -1
  40. package/out/src/storage/__test/getBeefForTransaction.test.js +10 -0
  41. package/out/src/storage/__test/getBeefForTransaction.test.js.map +1 -1
  42. package/out/src/storage/schema/KnexMigrations.d.ts.map +1 -1
  43. package/out/src/storage/schema/KnexMigrations.js +12 -0
  44. package/out/src/storage/schema/KnexMigrations.js.map +1 -1
  45. package/out/tsconfig.all.tsbuildinfo +1 -1
  46. package/package.json +2 -4
  47. package/src/monitor/Monitor.ts +8 -0
  48. package/src/monitor/tasks/TaskReorg.ts +16 -70
  49. package/src/sdk/WalletStorage.interfaces.ts +44 -0
  50. package/src/services/__tests/verifyBeef.test.ts +11 -1
  51. package/src/storage/StorageProvider.ts +4 -0
  52. package/src/storage/WalletStorageManager.ts +162 -1
  53. package/src/storage/__test/getBeefForTransaction.test.ts +11 -2
  54. package/src/storage/methods/internalizeAction.ts +2 -2
  55. package/src/storage/schema/KnexMigrations.ts +13 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bsv/wallet-toolbox",
3
- "version": "1.6.31",
3
+ "version": "1.6.33",
4
4
  "description": "BRC100 conforming wallet, wallet storage and wallet signer components",
5
5
  "main": "./out/src/index.js",
6
6
  "types": "./out/src/index.d.ts",
@@ -33,7 +33,7 @@
33
33
  "dependencies": {
34
34
  "@bsv/auth-express-middleware": "^1.2.3",
35
35
  "@bsv/payment-express-middleware": "^1.2.3",
36
- "@bsv/sdk": "^1.8.2",
36
+ "@bsv/sdk": "^1.8.6",
37
37
  "express": "^4.21.2",
38
38
  "idb": "^8.0.2",
39
39
  "knex": "^3.1.0",
@@ -54,8 +54,6 @@
54
54
  "fake-indexeddb": "^6.0.0",
55
55
  "fs-extra": "^11.2.0",
56
56
  "jest": "^29.7.0",
57
- "jest-diff": "^29.7.0",
58
- "jest-simple-summary": "^1.0.2",
59
57
  "prettier": "^3.4.2",
60
58
  "ts-jest": "^29.0.5",
61
59
  "ts-node": "^10.9.1",
@@ -383,6 +383,14 @@ export class Monitor {
383
383
  }
384
384
  }
385
385
 
386
+ /**
387
+ * Handler for new header events from Chaintracks.
388
+ *
389
+ * To minimize reorg processing, new headers are aged before processing via TaskNewHeader.
390
+ * Therefore this handler is intentionally a no-op.
391
+ *
392
+ * @param header
393
+ */
386
394
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
387
395
  processHeader(header: BlockHeader): void {}
388
396
  }
@@ -14,7 +14,7 @@ import { WalletMonitorTask } from './WalletMonitorTask'
14
14
  * The current implementation ages deactivation notifications by 10 minutes with each retry.
15
15
  * If a successful proof update confirms original proof data after 3 retries, the original is retained.
16
16
  *
17
- * In normal operation there should never be any work for this task to perform.
17
+ * In normal operation there should rarely be any work for this task to perform.
18
18
  * The most common result is that there are no matching proven_txs records because
19
19
  * generating new proven_txs records intentionally lags new block generation to
20
20
  * minimize this disruption.
@@ -24,17 +24,8 @@ import { WalletMonitorTask } from './WalletMonitorTask'
24
24
  * - Generated beefs are impacted.
25
25
  * - Updated proof data may be unavailable at the time a reorg is first reported.
26
26
  *
27
- * Instead of reorg notification derived from new header notification, reorg repair to
28
- * the proven_txs table is more effectively driven by noticing that a beef generated for a new
29
- * createAction fails to verify against the chaintracker.
30
- *
31
- * An alternate approach to processing these events is to revert the proven_txs record to a proven_tx_reqs record.
32
- * Pros:
33
- * - The same multiple attempt logic that already exists is reused.
34
- * - Failing to obtain a new proof already has transaction failure handling in place.
35
- * - Generated beefs automatically become one generation deeper, potentially allowing transaction outputs to be spent.
36
- * Cons:
37
- * - Transactions must revert to un-proven / un-mined.
27
+ * Proper reorg handling also requires repairing invalid beefs for new transactions when
28
+ * createAction fails to verify a generated beef against the chaintracker.
38
29
  */
39
30
  export class TaskReorg extends WalletMonitorTask {
40
31
  static taskName = 'Reorg'
@@ -49,6 +40,11 @@ export class TaskReorg extends WalletMonitorTask {
49
40
  super(monitor, TaskReorg.taskName)
50
41
  }
51
42
 
43
+ /**
44
+ * Shift aged deactivated headers onto `process` array.
45
+ * @param nowMsecsSinceEpoch current time in milliseconds since epoch.
46
+ * @returns `run` true iff there are aged deactivated headers to process.
47
+ */
52
48
  trigger(nowMsecsSinceEpoch: number): { run: boolean } {
53
49
  const cutoff = nowMsecsSinceEpoch - this.agedMsecs
54
50
  const q = this.monitor.deactivatedHeaders
@@ -66,72 +62,22 @@ export class TaskReorg extends WalletMonitorTask {
66
62
  let log = ''
67
63
 
68
64
  for (;;) {
65
+ // Loop over deactivated headers to process
69
66
  const header = this.process.shift()
70
67
  if (!header) break
71
68
 
72
- let ptxs: TableProvenTx[] = []
73
-
74
- await this.storage.runAsStorageProvider(async sp => {
75
- // Lookup all the proven_txs records matching the deactivated headers
76
- ptxs = await sp.findProvenTxs({ partial: { blockHash: header.header.hash } })
77
- })
69
+ const r = await this.storage.reproveHeader(header.header.hash)
78
70
 
79
- log += ` block ${header.header.hash} orphaned with ${ptxs.length} impacted transactions\n`
71
+ log += r.log
80
72
 
81
- let retry = false
82
- for (const ptx of ptxs) {
83
- const mpr = await this.monitor.services.getMerklePath(ptx.txid)
84
- if (mpr.merklePath && mpr.header) {
85
- const mp = mpr.merklePath
86
- const h = mpr.header
87
- const leaf = mp.path[0].find(leaf => leaf.txid === true && leaf.hash === ptx.txid)
88
- if (leaf) {
89
- const update: Partial<TableProvenTx> = {
90
- height: mp.blockHeight,
91
- index: leaf.offset,
92
- merklePath: mp.toBinary(),
93
- merkleRoot: h.merkleRoot,
94
- blockHash: h.hash
95
- }
96
- if (update.blockHash === ptx.blockHash) {
97
- log += ` txid ${ptx.txid} merkle path update still based on deactivated header ${ptx.blockHash}\n`
98
- if (header.tries + 1 >= this.maxRetries) {
99
- log += ` maximum retries ${this.maxRetries} exceeded\n`
100
- } else {
101
- retry = true
102
- }
103
- } else {
104
- // Verify the new proof's validity.
105
- const merkleRoot = mp.computeRoot(ptx.txid)
106
- const chaintracker = await this.monitor.services.getChainTracker()
107
- const isValid = await chaintracker.isValidRootForHeight(merkleRoot, update.height!)
108
- const logUpdate = ` height ${ptx.height} ${ptx.height === update.height ? 'unchanged' : `-> ${update.height}`}\n`
109
- log += ` blockHash ${ptx.blockHash} -> ${update.blockHash}\n`
110
- log += ` merkleRoot ${ptx.merkleRoot} -> ${update.merkleRoot}\n`
111
- log += ` index ${ptx.index} -> ${update.index}\n`
112
- if (!isValid) {
113
- log +=
114
- ` txid ${ptx.txid} chaintracker fails to confirm updated merkle path update invalid\n` + logUpdate
115
- } else {
116
- await this.storage.runAsStorageProvider(async sp => {
117
- await sp.updateProvenTx(ptx.provenTxId, update)
118
- })
119
- log += ` txid ${ptx.txid} proof data updated\n` + logUpdate
120
- }
121
- }
122
- } else {
123
- log += ` txid ${ptx.txid} merkle path update doesn't include txid\n`
124
- retry = true
125
- }
73
+ if (r.unavailable.length > 0 || r.unchanged.length > 0) {
74
+ if (header.tries + 1 >= this.maxRetries) {
75
+ log += ` maximum retries ${this.maxRetries} exceeded\n`
126
76
  } else {
127
- log += ` txid ${ptx.txid} merkle path update unavailable\n`
128
- retry = true
77
+ log += ` retrying...\n`
78
+ this.monitor.deactivatedHeaders.push({ header: header.header, whenMsecs: Date.now(), tries: header.tries + 1 })
129
79
  }
130
80
  }
131
- if (retry) {
132
- log += ` retrying...\n`
133
- this.monitor.deactivatedHeaders.push({ header: header.header, whenMsecs: Date.now(), tries: header.tries + 1 })
134
- }
135
81
  }
136
82
 
137
83
  return log
@@ -560,3 +560,47 @@ export interface ProcessSyncChunkResult {
560
560
  inserts: number
561
561
  error?: WalletError
562
562
  }
563
+
564
+ /**
565
+ * Returned results from WalletStorageManager reproveHeader method.
566
+ */
567
+ export interface ReproveHeaderResult {
568
+ /**
569
+ * Human readable log of the reproveHeader process.
570
+ */
571
+ log: string
572
+ /**
573
+ * List of proven_txs records that were updated with new proof data.
574
+ */
575
+ updated: { was: TableProvenTx; update: Partial<TableProvenTx>, logUpdate: string }[]
576
+ /**
577
+ * List of proven_txs records that were checked but currently available proof is unchanged.
578
+ */
579
+ unchanged: TableProvenTx[]
580
+ /**
581
+ * List of proven_txs records that were checked but currently proof data is unavailable.
582
+ */
583
+ unavailable: TableProvenTx[]
584
+ }
585
+
586
+ /**
587
+ * Returned results from WalletStorageManager reproveProven method.
588
+ */
589
+ export interface ReproveProvenResult {
590
+ /**
591
+ * Human readable log of the reproveProven process.
592
+ */
593
+ log: string
594
+ /**
595
+ * Valid if proof data for proven_txs record is available and has changed.
596
+ */
597
+ updated?: { update: Partial<TableProvenTx>, logUpdate: string }
598
+ /**
599
+ * True if proof data for proven_txs record was found to be unchanged.
600
+ */
601
+ unchanged: boolean
602
+ /**
603
+ * True if proof data for proven_txs record is currently unavailable.
604
+ */
605
+ unavailable: boolean
606
+ }
@@ -1,4 +1,4 @@
1
- import { Beef } from '@bsv/sdk'
1
+ import { Beef, Utils } from '@bsv/sdk'
2
2
  import { Services } from '../Services'
3
3
  import { _tu, logger } from '../../../test/utils/TestUtilsWalletStorage'
4
4
  import { verifyTruthy } from '../../utility/utilityHelpers'
@@ -38,4 +38,14 @@ describe('verifyBeef tests', () => {
38
38
  expect(ok).toBe(true)
39
39
  }
40
40
  })
41
+
42
+ test.skip('2_ review beef verify root', async () => {
43
+ const bhex =
44
+ '0200beef01fe7c830d000a02a0021d4ca6c031db7f6334c08ddfda43cbde3800c7fa27892f8e80a5218ca8493918a10081788ac8d8267d409b6258a6a6f5d28317ee65b5b25892def4f6cbf44f92571d01510027c2382032711033d0a1e2724b9eefcf257e27bce28e37b7472877860570ee6e0129008e15879954392f322efdd32376077a3323db02501926a697f5db6b68862f67ce01150061dcb195186d564d754a056d9ad90d65ece5bfa5ddccebd24b64d25df3780b15010b00bcd8f2c9c62b4fbbefad9640f9f6dccf21246fa08a6e1cab2c052666dee4182001040018ad6a5739749e27c191a5ef7442d861e5b8d204d36c91e08bf8015811851dbe010300f47047d1c4582eb02349eabcdafc7f4573e93ed687718275475d6f528783d16201000039a5fa5dbbbcd4a1754c250a7879ae1ad2eeb189d87d3614c2a2d9519a7a47af0101001670fc6a8d40adbd3f8a84ae35f0a702695f19f19a8feddcfd1de6249cc164e901010092a689a4cda27aea3552a98a7441ffbaed8566ae31e0a1a67e67647e2f3b8fda05025a8b77e1c82cfcfda197fec3f805a6b7000737a583e45833df6721975fe8bad102448f38860c45d33c87041c0fda51befb1c90853d3141a0df3ac737ccb9b5e61b01000100000001f7ddf439a165bf63a7d6c144b4bd8882ff45dc35a3ca3e75517fa56482fed6bd000000006b4830450221008106bc7164333415bc485ae1d12acd72bbc536f1f03b25aa42d92971565b329902202d484d09935be7fa49bbd5806148dbfdb90cc86516537351acf20655c03fa656412102b53b5339d6241c4271a07e7b09035966defe37c1a3edd60b8a427d5a5b488cb5ffffffff021d00000000000000c421029664d9baa433b4ded47ce151d348fda7ed30df597b93bf5f321ec2fe742b0faaac2131546f446f44744b7265457a6248594b466a6d6f42756475466d53585855475a4734aba7171082ff009628f6d1abea57bc1ffcdb6c2b45a5e17219eaf6bc6b6e093b5243036565505084548f9715a440b6c03e73427d4730450221008e4964dc5e8f3cc6f41da7508cba05babb2ce211fa47fe91ae9c06903d95fde902206cb21d6c188f302fccedbbcd80459561dbabcabe3da16853371fede9f5d027d06d75c8030000000000001976a914f8a84c2bef6eed4eb3270c8605a8063202ed25cb88ac000000000001000000015a8b77e1c82cfcfda197fec3f805a6b7000737a583e45833df6721975fe8bad1010000006b483045022100fb62de36ac2930029b1397931c3f30bf6df5166f2e82bed6b2ef1d23491f8e450220730105461dc12236439ee568709ee72c345bb6748efe8656a0e96e4cc5eaecfb412102c6b33e96f3b635ebd71bcedd9bcb90b4c098b9b38730f58984e23615e0864833ffffffff042800000000000000c521029664d9baa433b4ded47ce151d348fda7ed30df597b93bf5f321ec2fe742b0faaac2131546f446f44744b7265457a6248594b466a6d6f42756475466d53585855475a47361c08d47822cb0806cd17af298948641db6bd36440da9a988af0f6600cba6dabfcfe5c7fe086b7a08e8feef3a9d21d8b0126c2f4a260b46304402204f418ece238fb0587f887c1e0ea6beb4ebcefa6749d1b523195bd65dc9971374022009d0b21c669a72a8a01808d394c55de730a3a4d287b3bb209697b2e79a9787ce6d7516050000000000001976a914803a2e1d2ca2373c21129a7075f1a42587f16c8188acec030000000000001976a91441cb6381a584c464df4b6dd75b91fb0ab6c4b7a688acd0040000000000001976a914e08fbd92ba37c1d84bba8439c55793ea60c0dd6b88ac00000000000100000001448f38860c45d33c87041c0fda51befb1c90853d3141a0df3ac737ccb9b5e61b020000006a4730440220411ab1f23f747899bf71185fbb4ab03defc6e215fb1ee3d24060b14256d2dc40022035669cd13b5c5fd399a402862b4e6bc001d0cbf56660bac37b1563eeaf49a700412103b20f91159733fd69817cc4d4f9ed0cf4340f63b482e0a0a7f233885c61d1b044ffffffff020a00000000000000c421029664d9baa433b4ded47ce151d348fda7ed30df597b93bf5f321ec2fe742b0faaac2131546f446f44744b7265457a6248594b466a6d6f42756475466d53585855475a47343c32fe905bb02e70c0a9779048c921b1e26a2684c498ab44759ac25bcdfafa95309c59d1c3ac12f056ad8d10dabe777d1d57dd934730450221009a64cdc81a0ada12d329463db24260a15ad56bdc3523613c0fae2fb64762d20e022021b942e859749fc23585fdb0395585d6ea52dcf0a310cc989a38ff0483c8717e6d75b7150000000000001976a91468cce1214ccbd14d9dfd813d8490daadaa96b39288ac00000000'
45
+ const beef = Beef.fromString(bhex)
46
+ logger(beef.toLogString())
47
+ logger(Utils.toHex(beef.txs[1].rawTx!))
48
+ logger(beef.bumps[0].computeRoot('e47df21819ed320a78392e62e963ddd77143c3c52ad5255a07ff55ba507df71d'))
49
+ })
50
+
41
51
  })
@@ -230,7 +230,11 @@ export abstract class StorageProvider extends StorageReaderWriter implements Wal
230
230
  for (const txid of txids) {
231
231
  const d: GetReqsAndBeefDetail = {
232
232
  txid,
233
+ // status: 'readyToSend' | 'alreadySent' | 'error' | 'unknown'
233
234
  status: 'unknown'
235
+ // req?: TableProvenTxReq
236
+ // proven?: TableProvenTx
237
+ // error?: string
234
238
  }
235
239
  r.details.push(d)
236
240
  try {
@@ -1,6 +1,7 @@
1
1
  import {
2
2
  AbortActionArgs,
3
3
  AbortActionResult,
4
+ Beef,
4
5
  InternalizeActionArgs,
5
6
  InternalizeActionResult,
6
7
  ListActionsResult,
@@ -16,6 +17,7 @@ import {
16
17
  TableCertificateX,
17
18
  TableOutput,
18
19
  TableOutputBasket,
20
+ TableProvenTx,
19
21
  TableProvenTxReq,
20
22
  TableSettings,
21
23
  TableUser
@@ -519,6 +521,165 @@ export class WalletStorageManager implements sdk.WalletStorage {
519
521
  })
520
522
  }
521
523
 
524
+ /**
525
+ * For each proven_txs record currently sourcing its transaction merkle proof from the given deactivated header,
526
+ * attempt to reprove the transaction against the current chain,
527
+ * updating the proven_txs record if a new valid proof is found.
528
+ *
529
+ * @param deactivatedHash An orphaned header than may have served as a proof source for proven_txs records.
530
+ * @returns
531
+ */
532
+ async reproveHeader(deactivatedHash: string): Promise<sdk.ReproveHeaderResult> {
533
+ const r: sdk.ReproveHeaderResult = { log: '', updated: [], unchanged: [], unavailable: [] }
534
+ const services = this.getServices()
535
+ const chaintracker = await services.getChainTracker()
536
+
537
+ // Lookup all the proven_txs records matching the deactivated headers
538
+ let ptxs: TableProvenTx[] = []
539
+ await this.runAsStorageProvider(async sp => {
540
+ ptxs = await sp.findProvenTxs({ partial: { blockHash: deactivatedHash } })
541
+ })
542
+
543
+ r.log += ` block ${deactivatedHash} orphaned with ${ptxs.length} impacted transactions\n`
544
+
545
+ for (const ptx of ptxs) {
546
+ // Loop over proven_txs records matching the deactivated header
547
+ const rp = await this.reproveProven(ptx, true)
548
+
549
+ r.log += rp.log
550
+ if (rp.unavailable) r.unavailable.push(ptx);
551
+ if (rp.unchanged) r.unchanged.push(ptx);
552
+ if (rp.updated) r.updated.push({ was: ptx, update: rp.updated.update, logUpdate: rp.updated.logUpdate })
553
+ }
554
+
555
+ if (r.updated.length > 0) {
556
+ await this.runAsStorageProvider(async sp => {
557
+ for (const u of r.updated) {
558
+ await sp.updateProvenTx(u.was.provenTxId, u.update)
559
+ r.log += ` txid ${u.was.txid} proof data updated\n` + u.logUpdate
560
+ }
561
+ })
562
+ }
563
+
564
+ return r
565
+ }
566
+
567
+ /**
568
+ * Extends the Beef `verify` function to handle BUMPs that have become invalid due to a chain reorg.
569
+ *
570
+ * Any merkle root that fails `isValidRootForHeight` triggers a reprove attempt for that block header.
571
+ * This results in proven_txs with invalid proofs being updated with new valid proofs where possible.
572
+ * Finally, a new beef is generated and verified against the chaintracker.
573
+ *
574
+ * @param beef
575
+ * @param allowTxidOnly
576
+ * @returns
577
+ */
578
+ async verifyAndRepairBeef(beef: Beef, allowTxidOnly?: boolean): Promise<boolean> {
579
+ throw new sdk.WERR_NOT_IMPLEMENTED()
580
+
581
+ const services = this.getServices()
582
+ const chaintracker = await services.getChainTracker()
583
+ const verified = await beef.verify(chaintracker)
584
+
585
+ const r = beef.verifyValid(allowTxidOnly)
586
+ if (!r.valid) return false
587
+
588
+ const invalidRoots: Record<number, string> = {}
589
+ for (const height of Object.keys(r.roots)) {
590
+ const isValid = await chaintracker.isValidRootForHeight(
591
+ r.roots[height],
592
+ Number(height)
593
+ )
594
+ if (!isValid) {
595
+ invalidRoots[height] = r.roots[height]
596
+ }
597
+ }
598
+
599
+ if (Object.keys(invalidRoots).length === 0) {
600
+ // There are no invalid merkle roots and the beef is structurally valid,
601
+ // the beef is fully verified.
602
+ return true
603
+ }
604
+
605
+ for (const heightStr of Object.keys(invalidRoots)) {
606
+ const hash = invalidRoots[Number(heightStr)]
607
+ const r = await this.reproveHeader(hash)
608
+ }
609
+
610
+ // All invalid BUMPs must be removed from the beef
611
+ // and all txid's that were proven by those BUMPs need
612
+ // new beefs merged into the beef.
613
+ // In most cases, this will be a replacement BUMP,
614
+ // but it may also require a deeper proof.
615
+ }
616
+
617
+ /**
618
+ * Attempt to reprove the transaction against the current chain,
619
+ * If a new valid proof is found and noUpdate is not true,
620
+ * update the proven_txs record with new block and merkle proof data.
621
+ * If noUpdate is true, the update to be applied is available in the returned result.
622
+ *
623
+ * @param ptx proven_txs record to reprove
624
+ * @param noUpdate
625
+ * @returns
626
+ */
627
+ async reproveProven(ptx: TableProvenTx, noUpdate?: boolean): Promise<sdk.ReproveProvenResult> {
628
+ const r: sdk.ReproveProvenResult = { log: '', updated: undefined, unchanged: false, unavailable: false }
629
+ const services = this.getServices()
630
+ const chaintracker = await services.getChainTracker()
631
+
632
+ const mpr = await services.getMerklePath(ptx.txid)
633
+ if (mpr.merklePath && mpr.header) {
634
+ const mp = mpr.merklePath
635
+ const h = mpr.header
636
+ const leaf = mp.path[0].find(leaf => leaf.txid === true && leaf.hash === ptx.txid)
637
+ if (leaf) {
638
+ const update: Partial<TableProvenTx> = {
639
+ height: mp.blockHeight,
640
+ index: leaf.offset,
641
+ merklePath: mp.toBinary(),
642
+ merkleRoot: h.merkleRoot,
643
+ blockHash: h.hash
644
+ }
645
+ if (update.blockHash === ptx.blockHash) {
646
+ r.log += ` txid ${ptx.txid} merkle path update still based on deactivated header ${ptx.blockHash}\n`
647
+ r.unchanged = true
648
+ } else {
649
+ // Verify the new proof's validity.
650
+ const merkleRoot = mp.computeRoot(ptx.txid)
651
+ const isValid = await chaintracker.isValidRootForHeight(merkleRoot, update.height!)
652
+ const logUpdate = ` height ${ptx.height} ${ptx.height === update.height ? 'unchanged' : `-> ${update.height}`}\n`
653
+ r.log += ` blockHash ${ptx.blockHash} -> ${update.blockHash}\n`
654
+ r.log += ` merkleRoot ${ptx.merkleRoot} -> ${update.merkleRoot}\n`
655
+ r.log += ` index ${ptx.index} -> ${update.index}\n`
656
+ if (isValid) {
657
+ r.updated = { update, logUpdate }
658
+ } else {
659
+ r.log +=
660
+ ` txid ${ptx.txid} chaintracker fails to confirm updated merkle path update invalid\n` + logUpdate
661
+ r.unavailable = true
662
+ }
663
+ }
664
+ } else {
665
+ r.log += ` txid ${ptx.txid} merkle path update doesn't include txid\n`
666
+ r.unavailable = true
667
+ }
668
+ } else {
669
+ r.log += ` txid ${ptx.txid} merkle path update unavailable\n`
670
+ r.unavailable = true
671
+ }
672
+
673
+ if (r.updated && !noUpdate) {
674
+ await this.runAsStorageProvider(async sp => {
675
+ await sp.updateProvenTx(ptx.provenTxId, r.updated!.update)
676
+ r.log += ` txid ${ptx.txid} proof data updated\n` + r.updated!.logUpdate
677
+ })
678
+ }
679
+
680
+ return r
681
+ }
682
+
522
683
  async syncFromReader(
523
684
  identityKey: string,
524
685
  reader: sdk.WalletStorageSyncReader,
@@ -763,4 +924,4 @@ export class WalletStorageManager implements sdk.WalletStorage {
763
924
  }
764
925
  return stores
765
926
  }
766
- }
927
+ }
@@ -1,5 +1,4 @@
1
- import { Beef, ListActionsResult, ListOutputsResult } from '@bsv/sdk'
2
- import { WalletError } from '../../sdk/WalletError'
1
+ import { Beef, ListActionsResult, ListOutputsResult, Utils } from '@bsv/sdk'
3
2
  import { StorageAdminStats, StorageProvider } from '../StorageProvider'
4
3
  import { Chain } from '../../sdk/types'
5
4
  import { Services } from '../../services/Services'
@@ -60,6 +59,16 @@ describe('getBeefForTransaction tests', () => {
60
59
  expect(beef.bumps.length > 0)
61
60
  }
62
61
  })
62
+
63
+ test.skip('1 obtain atomic beef hex for txid', async () => {
64
+ const ps = new ProtoStorage('main')
65
+ const txid = '4cefbe79926d6ef2cc727d8faccac186d9bb141f170411dd75bc6329f428f5a4'
66
+ const beef = await ps.getBeefForTxid(txid)
67
+ expect(beef.bumps.length > 0)
68
+ console.log(beef.toLogString())
69
+ const hex = Utils.toHex(beef.toBinaryAtomic(txid))
70
+ console.log(hex)
71
+ })
63
72
  })
64
73
 
65
74
  class ProtoStorage extends StorageProvider {
@@ -18,7 +18,7 @@ import { randomBytesBase64, verifyId, verifyOne, verifyOneOrNone } from '../../u
18
18
  import { TransactionStatus } from '../../sdk/types'
19
19
  import { EntityProvenTxReq } from '../schema/entities/EntityProvenTxReq'
20
20
  import { blockHash } from '../../services/chaintracker/chaintracks/util/blockHeaderUtilities'
21
- import { TableProvenTx } from '../index.client'
21
+ import { TableProvenTx } from '../schema/tables/TableProvenTx'
22
22
 
23
23
  /**
24
24
  * Internalize Action allows a wallet to take ownership of outputs in a pre-existing transaction.
@@ -559,4 +559,4 @@ class InternalizeActionContext {
559
559
 
560
560
  basket.eo = txOut
561
561
  }
562
- }
562
+ }
@@ -73,6 +73,19 @@ export class KnexMigrations implements MigrationSource<string> {
73
73
  }
74
74
  }
75
75
 
76
+ migrations['2025-10-13-001 add outputs spendable index'] = {
77
+ async up(knex) {
78
+ await knex.schema.alterTable('outputs', table => {
79
+ table.index('spendable')
80
+ })
81
+ },
82
+ async down(knex) {
83
+ await knex.schema.alterTable('outputs', table => {
84
+ table.dropIndex('spendable')
85
+ })
86
+ }
87
+ }
88
+
76
89
  migrations['2025-10-18-002 add proven_tx_reqs txid index'] = {
77
90
  async up(knex) {
78
91
  await knex.schema.alterTable('proven_tx_reqs', table => {