@relay-federation/bridge 0.3.14 → 0.3.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,811 +1,896 @@
1
- import { Level } from 'level'
2
- import { EventEmitter } from 'node:events'
3
- import { createHash } from 'node:crypto'
4
- import { join } from 'node:path'
5
- import { mkdir, writeFile, readFile } from 'node:fs/promises'
6
-
7
- /**
8
- * PersistentStore — LevelDB-backed storage for bridge state.
9
- *
10
- * Stores headers, transactions, and arbitrary metadata in sublevel
11
- * namespaces. Replaces the in-memory Maps used by HeaderRelay and
12
- * TxRelay with durable storage that survives restarts.
13
- *
14
- * Sublevels:
15
- * headers — height → { height, hash, prevHash }
16
- * txs — txid → rawHex
17
- * utxos — txid:vout → { txid, vout, satoshis, scriptHex, address, spent }
18
- * meta — key → value (bestHeight, bestHash, etc.)
19
- * watched — txid → { txid, address, direction, timestamp }
20
- *
21
- * Events:
22
- * 'open' — store ready
23
- * 'error' — LevelDB error
24
- */
25
- export class PersistentStore extends EventEmitter {
26
- /**
27
- * @param {string} dataDir — directory for the LevelDB database
28
- */
29
- constructor (dataDir) {
30
- super()
31
- this.dbPath = join(dataDir, 'bridge.db')
32
- this.db = null
33
- this._headers = null
34
- this._txs = null
35
- this._utxos = null
36
- this._meta = null
37
- this._watched = null
38
- this._hashIndex = null
39
- this._inscriptions = null
40
- this._inscriptionIdx = null
41
- this._txStatus = null
42
- this._txBlock = null
43
- this._content = null
44
- this._tokens = null
45
- this._sessions = null
46
- this._contentDir = join(dataDir, 'content')
47
- }
48
-
49
- /** Open the database and create sublevels. */
50
- async open () {
51
- this.db = new Level(this.dbPath, { valueEncoding: 'json' })
52
- await this.db.open()
53
- this._headers = this.db.sublevel('headers', { valueEncoding: 'json' })
54
- this._txs = this.db.sublevel('txs', { valueEncoding: 'utf8' })
55
- this._utxos = this.db.sublevel('utxos', { valueEncoding: 'json' })
56
- this._meta = this.db.sublevel('meta', { valueEncoding: 'json' })
57
- this._watched = this.db.sublevel('watched', { valueEncoding: 'json' })
58
- this._hashIndex = this.db.sublevel('hashIndex', { valueEncoding: 'json' })
59
- this._inscriptions = this.db.sublevel('inscriptions', { valueEncoding: 'json' })
60
- this._inscriptionIdx = this.db.sublevel('inscIdx', { valueEncoding: 'json' })
61
- this._txStatus = this.db.sublevel('txStatus', { valueEncoding: 'json' })
62
- this._txBlock = this.db.sublevel('txBlock', { valueEncoding: 'json' })
63
- this._content = this.db.sublevel('content', { valueEncoding: 'json' })
64
- this._tokens = this.db.sublevel('tokens', { valueEncoding: 'json' })
65
- this._sessions = this.db.sublevel('sessions', { valueEncoding: 'json' })
66
- await mkdir(this._contentDir, { recursive: true })
67
- this.emit('open')
68
- }
69
-
70
- /** Close the database. */
71
- async close () {
72
- if (this.db) await this.db.close()
73
- }
74
-
75
- // ── Headers ──────────────────────────────────────────────
76
-
77
- /**
78
- * Store a header by height (with hash index).
79
- * @param {{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }} header
80
- */
81
- async putHeader (header) {
82
- await this._headers.put(String(header.height), header)
83
- if (header.hash) {
84
- await this._hashIndex.put(header.hash, header.height)
85
- }
86
- }
87
-
88
- /**
89
- * Store multiple headers in a batch (with hash index).
90
- * @param {Array<{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }>} headers
91
- */
92
- async putHeaders (headers) {
93
- const headerOps = headers.map(h => ({
94
- type: 'put',
95
- key: String(h.height),
96
- value: h
97
- }))
98
- await this._headers.batch(headerOps)
99
- const hashOps = headers.filter(h => h.hash).map(h => ({
100
- type: 'put',
101
- key: h.hash,
102
- value: h.height
103
- }))
104
- if (hashOps.length > 0) {
105
- await this._hashIndex.batch(hashOps)
106
- }
107
- }
108
-
109
- /**
110
- * Get a header by height.
111
- * @param {number} height
112
- * @returns {Promise<{ height: number, hash: string, prevHash: string }|null>}
113
- */
114
- async getHeader (height) {
115
- const val = await this._headers.get(String(height))
116
- return val !== undefined ? val : null
117
- }
118
-
119
- /**
120
- * Get a header by block hash.
121
- * @param {string} hash
122
- * @returns {Promise<object|null>}
123
- */
124
- async getHeaderByHash (hash) {
125
- const height = await this._hashIndex.get(hash)
126
- if (height === undefined) return null
127
- return this.getHeader(height)
128
- }
129
-
130
- /**
131
- * Verify a merkle proof against a stored block header.
132
- * @param {string} txHash — transaction hash (hex, display order)
133
- * @param {string[]} merkleProof sibling hashes in the merkle path
134
- * @param {number} txIndex — transaction index in the block
135
- * @param {string} blockHashblock hash to verify against
136
- * @returns {Promise<{ verified: boolean, blockHeight: number, blockTimestamp: number }>}
137
- */
138
- async verifyMerkleProof (txHash, merkleProof, txIndex, blockHash) {
139
- const header = await this.getHeaderByHash(blockHash)
140
- if (!header) {
141
- throw new Error(`Block ${blockHash} not found in header chain`)
142
- }
143
- if (!header.merkleRoot) {
144
- throw new Error(`Header at height ${header.height} has no merkleRoot stored`)
145
- }
146
-
147
- // Compute merkle root from proof
148
- let hash = Buffer.from(txHash, 'hex').reverse()
149
- let index = txIndex
150
-
151
- for (const proofHash of merkleProof) {
152
- const sibling = Buffer.from(proofHash, 'hex').reverse()
153
- const combined = (index % 2 === 0)
154
- ? Buffer.concat([hash, sibling])
155
- : Buffer.concat([sibling, hash])
156
- hash = doubleSha256(combined)
157
- index = Math.floor(index / 2)
158
- }
159
-
160
- const calculatedRoot = hash.reverse().toString('hex')
161
-
162
- if (calculatedRoot !== header.merkleRoot) {
163
- throw new Error('Merkle proof verification failed')
164
- }
165
-
166
- return {
167
- verified: true,
168
- blockHash: header.hash,
169
- blockHeight: header.height,
170
- blockTimestamp: header.timestamp
171
- }
172
- }
173
-
174
- // ── Transactions ─────────────────────────────────────────
175
-
176
- /**
177
- * Store a raw transaction.
178
- * @param {string} txid
179
- * @param {string} rawHex
180
- */
181
- async putTx (txid, rawHex) {
182
- await this._txs.put(txid, rawHex)
183
- }
184
-
185
- /**
186
- * Get a raw transaction by txid.
187
- * @param {string} txid
188
- * @returns {Promise<string|null>} rawHex or null
189
- */
190
- async getTx (txid) {
191
- const val = await this._txs.get(txid)
192
- return val !== undefined ? val : null
193
- }
194
-
195
- /**
196
- * Check if a transaction exists.
197
- * @param {string} txid
198
- * @returns {Promise<boolean>}
199
- */
200
- async hasTx (txid) {
201
- return (await this.getTx(txid)) !== null
202
- }
203
-
204
- // ── UTXOs ────────────────────────────────────────────────
205
-
206
- /**
207
- * Store a UTXO.
208
- * @param {{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }} utxo
209
- */
210
- async putUtxo (utxo) {
211
- const key = `${utxo.txid}:${utxo.vout}`
212
- await this._utxos.put(key, { ...utxo, spent: false })
213
- }
214
-
215
- /**
216
- * Mark a UTXO as spent.
217
- * @param {string} txid
218
- * @param {number} vout
219
- */
220
- async spendUtxo (txid, vout) {
221
- const key = `${txid}:${vout}`
222
- const utxo = await this._utxos.get(key)
223
- if (utxo === undefined) return
224
- utxo.spent = true
225
- await this._utxos.put(key, utxo)
226
- }
227
-
228
- /**
229
- * Get all unspent UTXOs.
230
- * @returns {Promise<Array<{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }>>}
231
- */
232
- async getUnspentUtxos () {
233
- const utxos = []
234
- for await (const [, utxo] of this._utxos.iterator()) {
235
- if (!utxo.spent) utxos.push(utxo)
236
- }
237
- return utxos
238
- }
239
-
240
- /**
241
- * Get total unspent balance in satoshis.
242
- * @returns {Promise<number>}
243
- */
244
- async getBalance () {
245
- let total = 0
246
- for await (const [, utxo] of this._utxos.iterator()) {
247
- if (!utxo.spent) total += utxo.satoshis
248
- }
249
- return total
250
- }
251
-
252
- // ── Watched address matches ──────────────────────────────
253
-
254
- /**
255
- * Store a watched-address match (a tx that touched a watched address).
256
- * @param {{ txid: string, address: string, direction: 'in'|'out', timestamp: number }} match
257
- */
258
- async putWatchedTx (match) {
259
- const key = `${match.address}:${match.txid}`
260
- await this._watched.put(key, match)
261
- }
262
-
263
- /**
264
- * Get all watched-address matches for an address.
265
- * @param {string} address
266
- * @returns {Promise<Array>}
267
- */
268
- async getWatchedTxs (address) {
269
- const matches = []
270
- for await (const [key, value] of this._watched.iterator()) {
271
- if (key.startsWith(`${address}:`)) {
272
- matches.push(value)
273
- }
274
- }
275
- return matches
276
- }
277
-
278
- // ── Sessions (Indelible) ───────────────────────────────────
279
-
280
- /**
281
- * Store a session metadata record with sort index.
282
- * PK: s!{address}!{txId} SK: t!{address}!{revTs}!{txId}
283
- * @param {object} session — must have txId and address
284
- */
285
- async putSession (session) {
286
- const { txId, address } = session
287
- if (!txId || !address) throw new Error('txId and address required')
288
- const pk = `s!${address}!${txId}`
289
- const ts = session.timestamp || new Date().toISOString()
290
- const revTs = String(9999999999999 - new Date(ts).getTime()).padStart(13, '0')
291
- const sk = `t!${address}!${revTs}!${txId}`
292
- const record = {
293
- txId, address,
294
- session_id: session.session_id || null,
295
- prev_session_id: session.prev_session_id || null,
296
- summary: session.summary || '',
297
- message_count: session.message_count || 0,
298
- save_type: session.save_type || 'full',
299
- timestamp: ts,
300
- receivedAt: new Date().toISOString()
301
- }
302
- await this._sessions.batch([
303
- { type: 'put', key: pk, value: record },
304
- { type: 'put', key: sk, value: txId }
305
- ])
306
- return record
307
- }
308
-
309
- /**
310
- * Get sessions for an address, newest first.
311
- * @param {string} address
312
- * @param {number} [limit=200]
313
- * @returns {Promise<Array>}
314
- */
315
- async getSessions (address, limit = 200) {
316
- const prefix = `t!${address}!`
317
- const results = []
318
- for await (const [, txId] of this._sessions.iterator({
319
- gte: prefix, lt: prefix + '~', limit
320
- })) {
321
- const record = await this._safeGet(this._sessions, `s!${address}!${txId}`)
322
- if (record) results.push(record)
323
- }
324
- return results
325
- }
326
-
327
- /**
328
- * Batch import sessions (for backfill).
329
- * @param {Array} sessions — array of session objects
330
- * @returns {Promise<number>} count imported
331
- */
332
- async putSessionsBatch (sessions) {
333
- const ops = []
334
- for (const session of sessions) {
335
- const { txId, address } = session
336
- if (!txId || !address) continue
337
- const pk = `s!${address}!${txId}`
338
- const ts = session.timestamp || new Date().toISOString()
339
- const revTs = String(9999999999999 - new Date(ts).getTime()).padStart(13, '0')
340
- const sk = `t!${address}!${revTs}!${txId}`
341
- const record = {
342
- txId, address,
343
- session_id: session.session_id || null,
344
- prev_session_id: session.prev_session_id || null,
345
- summary: session.summary || '',
346
- message_count: session.message_count || 0,
347
- save_type: session.save_type || 'full',
348
- timestamp: ts,
349
- receivedAt: new Date().toISOString()
350
- }
351
- ops.push({ type: 'put', key: pk, value: record })
352
- ops.push({ type: 'put', key: sk, value: txId })
353
- }
354
- if (ops.length > 0) await this._sessions.batch(ops)
355
- return ops.length / 2
356
- }
357
-
358
- /**
359
- * Get summary of all addresses with sessions (for peer sync announce).
360
- * @returns {Promise<Array<{ address: string, count: number, latest: string }>>}
361
- */
362
- async getSessionAddresses () {
363
- const map = new Map() // address → { count, latest }
364
- for await (const [key, value] of this._sessions.iterator({
365
- gte: 's!', lt: 's!~'
366
- })) {
367
- const addr = key.split('!')[1]
368
- const entry = map.get(addr)
369
- if (!entry) {
370
- map.set(addr, { count: 1, latest: value.timestamp || '' })
371
- } else {
372
- entry.count++
373
- if (value.timestamp > entry.latest) entry.latest = value.timestamp
374
- }
375
- }
376
- return [...map].map(([address, { count, latest }]) => ({ address, count, latest }))
377
- }
378
-
379
- // ── Metadata ─────────────────────────────────────────────
380
-
381
- /**
382
- * Store a metadata value.
383
- * @param {string} key
384
- * @param {*} value — any JSON-serializable value
385
- */
386
- async putMeta (key, value) {
387
- await this._meta.put(key, value)
388
- }
389
-
390
- /**
391
- * Get a metadata value.
392
- * @param {string} key
393
- * @param {*} [defaultValue=null]
394
- * @returns {Promise<*>}
395
- */
396
- async getMeta (key, defaultValue = null) {
397
- const val = await this._meta.get(key)
398
- return val !== undefined ? val : defaultValue
399
- }
400
- // ── Tx Status + Block Mapping ───────────────────────────
401
-
402
- /**
403
- * Set or update tx lifecycle state.
404
- * @param {string} txid
405
- * @param {'mempool'|'confirmed'|'orphaned'|'dropped'} state
406
- * @param {object} [meta] — optional fields: blockHash, height, source
407
- */
408
- async updateTxStatus (txid, state, meta = {}) {
409
- const key = `s!${txid}`
410
- const now = Date.now()
411
- let existing = null
412
- try {
413
- const val = await this._txStatus.get(key)
414
- if (val !== undefined) existing = val
415
- } catch {}
416
-
417
- const record = existing || { firstSeen: now }
418
- record.state = state
419
- record.lastSeen = now
420
- record.updatedAt = now
421
- if (meta.blockHash) record.blockHash = meta.blockHash
422
- if (meta.height !== undefined) record.height = meta.height
423
- if (meta.source) record.source = meta.source
424
-
425
- const batch = [{ type: 'put', key, value: record }]
426
-
427
- // Maintain mempool secondary index
428
- if (state === 'mempool') {
429
- batch.push({ type: 'put', key: `mempool!${txid}`, value: 1 })
430
- } else if (existing?.state === 'mempool') {
431
- batch.push({ type: 'del', key: `mempool!${txid}` })
432
- }
433
-
434
- await this._txStatus.batch(batch)
435
- return record
436
- }
437
-
438
- /**
439
- * Get tx lifecycle state.
440
- * @param {string} txid
441
- * @returns {Promise<object|null>}
442
- */
443
- async getTxStatus (txid) {
444
- try {
445
- const val = await this._txStatus.get(`s!${txid}`)
446
- return val !== undefined ? val : null
447
- } catch { return null }
448
- }
449
-
450
- /**
451
- * Confirm a tx — atomic batch: txBlock + reverse index + txStatus update.
452
- * @param {string} txid
453
- * @param {string} blockHash
454
- * @param {number} height
455
- * @param {{ nodes: string[], index: number }|null} proof
456
- */
457
- async confirmTx (txid, blockHash, height, proof = null) {
458
- const now = Date.now()
459
- const blockRecord = { blockHash, height, confirmedAt: now, verified: !!proof }
460
- if (proof) blockRecord.proof = proof
461
-
462
- // Atomic batch across txBlock + txStatus
463
- const txBlockBatch = [
464
- { type: 'put', key: `tx!${txid}`, value: blockRecord },
465
- { type: 'put', key: `block!${blockHash}!tx!${txid}`, value: 1 }
466
- ]
467
- await this._txBlock.batch(txBlockBatch)
468
-
469
- await this.updateTxStatus(txid, 'confirmed', { blockHash, height })
470
- this.emit('tx:confirmed', { txid, blockHash, height })
471
- }
472
-
473
- /**
474
- * Get tx block placement.
475
- * @param {string} txid
476
- * @returns {Promise<object|null>}
477
- */
478
- async getTxBlock (txid) {
479
- try {
480
- const val = await this._txBlock.get(`tx!${txid}`)
481
- return val !== undefined ? val : null
482
- } catch { return null }
483
- }
484
-
485
- /**
486
- * Handle reorg — mark all txs in disconnected block as orphaned.
487
- * @param {string} blockHash — the disconnected block hash
488
- * @returns {Promise<string[]>} list of affected txids
489
- */
490
- async handleReorg (blockHash) {
491
- const affected = []
492
- const prefix = `block!${blockHash}!tx!`
493
-
494
- // Find all txids in this block via reverse index
495
- for await (const [key] of this._txBlock.iterator({ gte: prefix, lt: prefix + '~' })) {
496
- const txid = key.slice(prefix.length)
497
- affected.push(txid)
498
- }
499
-
500
- // Mark each as orphaned + clean up block associations
501
- for (const txid of affected) {
502
- await this.updateTxStatus(txid, 'orphaned', { blockHash })
503
- await this._txBlock.del(`tx!${txid}`)
504
- await this._txBlock.del(`block!${blockHash}!tx!${txid}`)
505
- }
506
-
507
- return affected
508
- }
509
-
510
- // ── Content-Addressed Storage ───────────────────────────
511
-
512
- static CAS_THRESHOLD = 4096 // 4KB — below this, inline in LevelDB
513
-
514
- /**
515
- * Store content bytes via CAS. Small content inline, large to filesystem.
516
- * @param {string} hexContent hex-encoded content bytes
517
- * @param {string} [mime] — content type
518
- * @returns {Promise<{ contentHash: string, contentLen: number, contentPath: string|null, inline: boolean }>}
519
- */
520
- async putContent (hexContent, mime) {
521
- const buf = Buffer.from(hexContent, 'hex')
522
- const contentHash = createHash('sha256').update(buf).digest('hex')
523
- const contentLen = buf.length
524
- const inline = contentLen < PersistentStore.CAS_THRESHOLD
525
-
526
- const record = { len: contentLen, mime: mime || null, createdAt: Date.now() }
527
-
528
- if (inline) {
529
- record.inline = hexContent
530
- record.path = null
531
- } else {
532
- const dir = join(this._contentDir, contentHash.slice(0, 2))
533
- const filePath = join(dir, contentHash)
534
- await mkdir(dir, { recursive: true })
535
- await writeFile(filePath, buf)
536
- record.path = filePath
537
- }
538
-
539
- await this._content.put(`c!${contentHash}`, record)
540
- return { contentHash, contentLen, contentPath: record.path, inline }
541
- }
542
-
543
- /**
544
- * Get content bytes by hash.
545
- * @param {string} contentHash
546
- * @returns {Promise<Buffer|null>}
547
- */
548
- async getContentBytes (contentHash) {
549
- let record
550
- try {
551
- const val = await this._content.get(`c!${contentHash}`)
552
- if (val === undefined) return null
553
- record = val
554
- } catch { return null }
555
-
556
- if (record.inline) {
557
- return Buffer.from(record.inline, 'hex')
558
- }
559
- if (record.path) {
560
- try { return await readFile(record.path) } catch { return null }
561
- }
562
- return null
563
- }
564
-
565
- /**
566
- * Get content metadata by hash.
567
- * @param {string} contentHash
568
- * @returns {Promise<object|null>}
569
- */
570
- async getContentMeta (contentHash) {
571
- try {
572
- const val = await this._content.get(`c!${contentHash}`)
573
- return val !== undefined ? val : null
574
- } catch { return null }
575
- }
576
-
577
- // ── Token Tracking (BSV-20) ─────────────────────────────
578
-
579
- /**
580
- * Process a BSV-20 token operation (confirmed-only).
581
- * Uses atomic batch() for all writes. Keyed by scriptHash for owner identity.
582
- * @param {{ op: string, tick: string, amt: string, ownerScriptHash: string, address: string|null, txid: string, height: number, blockHash: string }} params
583
- * @returns {Promise<{ valid: boolean, reason?: string }>}
584
- */
585
- async processTokenOp ({ op, tick, amt, ownerScriptHash, address, txid, height, blockHash }) {
586
- const tickNorm = tick.toLowerCase().trim()
587
-
588
- if (op === 'deploy') {
589
- // Only first deploy counts (chain-ordered by height)
590
- const existing = await this._safeGet(this._tokens, `tick!${tickNorm}`)
591
- if (existing) return { valid: false, reason: 'already deployed' }
592
-
593
- const parsed = typeof amt === 'object' ? amt : {}
594
- const batch = [
595
- { type: 'put', key: `tick!${tickNorm}`, value: {
596
- tick: tickNorm, max: parsed.max || '0', lim: parsed.lim || '0',
597
- dec: parsed.dec || '0', deployer: ownerScriptHash, deployerAddr: address,
598
- deployTxid: txid, deployHeight: height, totalMinted: '0'
599
- }},
600
- { type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!deploy`, value: {
601
- tick: tickNorm, op: 'deploy', ownerScriptHash, valid: true
602
- }}
603
- ]
604
- await this._tokens.batch(batch)
605
- return { valid: true }
606
- }
607
-
608
- if (op === 'mint') {
609
- const deploy = await this._safeGet(this._tokens, `tick!${tickNorm}`)
610
- if (!deploy) return { valid: false, reason: 'token not deployed' }
611
-
612
- const mintAmt = BigInt(amt || '0')
613
- if (mintAmt <= 0n) return { valid: false, reason: 'invalid amount' }
614
- if (deploy.lim !== '0' && mintAmt > BigInt(deploy.lim)) return { valid: false, reason: 'exceeds mint limit' }
615
-
616
- const newTotal = BigInt(deploy.totalMinted) + mintAmt
617
- if (deploy.max !== '0' && newTotal > BigInt(deploy.max)) return { valid: false, reason: 'exceeds max supply' }
618
-
619
- // Credit owner balance
620
- const balKey = `bal!${tickNorm}!owner!${ownerScriptHash}`
621
- const existing = await this._safeGet(this._tokens, balKey) || { confirmed: '0' }
622
- const newBal = (BigInt(existing.confirmed) + mintAmt).toString()
623
-
624
- const batch = [
625
- { type: 'put', key: `tick!${tickNorm}`, value: { ...deploy, totalMinted: newTotal.toString() } },
626
- { type: 'put', key: balKey, value: { confirmed: newBal, updatedAt: Date.now() } },
627
- { type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!mint`, value: {
628
- tick: tickNorm, op: 'mint', amt: amt, ownerScriptHash, valid: true
629
- }}
630
- ]
631
- await this._tokens.batch(batch)
632
- return { valid: true }
633
- }
634
-
635
- // Transfers deferred to Phase 2
636
- return { valid: false, reason: 'transfers not yet supported' }
637
- }
638
-
639
- /**
640
- * Get token deploy info.
641
- * @param {string} tick
642
- * @returns {Promise<object|null>}
643
- */
644
- async getToken (tick) {
645
- return this._safeGet(this._tokens, `tick!${tick.toLowerCase().trim()}`)
646
- }
647
-
648
- /**
649
- * Get token balance for an owner.
650
- * @param {string} tick
651
- * @param {string} ownerScriptHash
652
- * @returns {Promise<string>} balance as string
653
- */
654
- async getTokenBalance (tick, ownerScriptHash) {
655
- const record = await this._safeGet(this._tokens, `bal!${tick.toLowerCase().trim()}!owner!${ownerScriptHash}`)
656
- return record ? record.confirmed : '0'
657
- }
658
-
659
- /**
660
- * List all deployed tokens.
661
- * @returns {Promise<Array>}
662
- */
663
- async listTokens () {
664
- const tokens = []
665
- const prefix = 'tick!'
666
- for await (const [key, value] of this._tokens.iterator({ gte: prefix, lt: prefix + '~' })) {
667
- tokens.push(value)
668
- }
669
- return tokens
670
- }
671
-
672
- /** Safe get — returns null instead of throwing for missing keys. */
673
- async _safeGet (sublevel, key) {
674
- try {
675
- const val = await sublevel.get(key)
676
- return val !== undefined ? val : null
677
- } catch { return null }
678
- }
679
-
680
- // ── Inscriptions ─────────────────────────────────────────
681
-
682
- /**
683
- * Store an inscription record with secondary indexes.
684
- * @param {{ txid: string, vout: number, contentType: string, contentSize: number, isBsv20: boolean, bsv20: object|null, timestamp: number, address: string|null }} record
685
- */
686
- async putInscription (record) {
687
- const key = `${record.txid}:${record.vout}`
688
- const suffix = `${record.txid}:${record.vout}`
689
-
690
- // Purge ALL stale secondary index entries pointing to this key
691
- try {
692
- const delBatch = []
693
- for await (const [idxKey, val] of this._inscriptionIdx.iterator()) {
694
- if (val === key && idxKey.endsWith(suffix)) delBatch.push({ type: 'del', key: idxKey })
695
- }
696
- if (delBatch.length) await this._inscriptionIdx.batch(delBatch)
697
- } catch {}
698
-
699
- // Route content through CAS
700
- if (record.content) {
701
- try {
702
- const cas = await this.putContent(record.content, record.contentType)
703
- record.contentHash = cas.contentHash
704
- record.contentLen = cas.contentLen
705
- // Strip raw content from inscription record if large (stored on filesystem)
706
- if (!cas.inline) {
707
- delete record.content
708
- }
709
- } catch {}
710
- }
711
-
712
- await this._inscriptions.put(key, record)
713
-
714
- const ts = String(record.timestamp).padStart(15, '0')
715
- const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
716
- if (record.contentType) {
717
- batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
718
- }
719
- if (record.address) {
720
- batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
721
- }
722
- await this._inscriptionIdx.batch(batch)
723
- }
724
-
725
- /**
726
- * Query inscriptions with optional filters.
727
- * @param {{ mime?: string, address?: string, limit?: number }} opts
728
- * @returns {Promise<Array>}
729
- */
730
- async getInscriptions ({ mime, address, limit = 50 } = {}) {
731
- const results = []
732
- let prefix
733
- if (address) {
734
- prefix = `addr:${address}:`
735
- } else if (mime) {
736
- prefix = `mime:${mime}:`
737
- } else {
738
- prefix = 'time:'
739
- }
740
-
741
- for await (const [, primaryKey] of this._inscriptionIdx.iterator({
742
- gte: prefix, lt: prefix + '~', reverse: true, limit
743
- })) {
744
- try {
745
- const record = await this._inscriptions.get(primaryKey)
746
- if (record) {
747
- // Strip content from list results (can be 400KB+ per image)
748
- const { content, ...meta } = record
749
- results.push(meta)
750
- }
751
- } catch {}
752
- }
753
- return results
754
- }
755
-
756
- /**
757
- * Rebuild inscription secondary indexes from primary records.
758
- * Clears all index entries and re-creates from source of truth.
759
- * @returns {Promise<number>} count of inscriptions re-indexed
760
- */
761
- async rebuildInscriptionIndex () {
762
- // Clear entire index
763
- for await (const [key] of this._inscriptionIdx.iterator()) {
764
- await this._inscriptionIdx.del(key)
765
- }
766
- // Re-create from primary records
767
- let count = 0
768
- for await (const [, record] of this._inscriptions.iterator()) {
769
- const ts = String(record.timestamp).padStart(15, '0')
770
- const suffix = `${record.txid}:${record.vout}`
771
- const key = suffix
772
- const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
773
- if (record.contentType) batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
774
- if (record.address) batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
775
- await this._inscriptionIdx.batch(batch)
776
- count++
777
- }
778
- return count
779
- }
780
-
781
- /**
782
- * Get a single inscription record (with content) by txid:vout.
783
- * @param {string} txid
784
- * @param {number} vout
785
- * @returns {Promise<object|null>}
786
- */
787
- async getInscription (txid, vout) {
788
- try {
789
- return await this._inscriptions.get(`${txid}:${vout}`)
790
- } catch {
791
- return null
792
- }
793
- }
794
-
795
- /**
796
- * Get total inscription count.
797
- * @returns {Promise<number>}
798
- */
799
- async getInscriptionCount () {
800
- let count = 0
801
- for await (const _ of this._inscriptions.keys()) count++
802
- return count
803
- }
804
- }
805
-
806
- /** Double SHA-256 (Bitcoin standard) */
807
- function doubleSha256 (data) {
808
- return createHash('sha256').update(
809
- createHash('sha256').update(data).digest()
810
- ).digest()
811
- }
1
+ import { Level } from 'level'
2
+ import { EventEmitter } from 'node:events'
3
+ import { createHash } from 'node:crypto'
4
+ import { join } from 'node:path'
5
+ import { mkdir, writeFile, readFile } from 'node:fs/promises'
6
+
7
+ /**
8
+ * PersistentStore — LevelDB-backed storage for bridge state.
9
+ *
10
+ * Stores headers, transactions, and arbitrary metadata in sublevel
11
+ * namespaces. Replaces the in-memory Maps used by HeaderRelay and
12
+ * TxRelay with durable storage that survives restarts.
13
+ *
14
+ * Sublevels:
15
+ * headers — height → { height, hash, prevHash }
16
+ * txs — txid → rawHex
17
+ * utxos — txid:vout → { txid, vout, satoshis, scriptHex, address, spent }
18
+ * meta — key → value (bestHeight, bestHash, etc.)
19
+ * watched — txid → { txid, address, direction, timestamp }
20
+ *
21
+ * Events:
22
+ * 'open' — store ready
23
+ * 'error' — LevelDB error
24
+ */
25
+ export class PersistentStore extends EventEmitter {
26
+ /**
27
+ * @param {string} dataDir — directory for the LevelDB database
28
+ */
29
+ constructor (dataDir) {
30
+ super()
31
+ this.dbPath = join(dataDir, 'bridge.db')
32
+ this.db = null
33
+ this._headers = null
34
+ this._txs = null
35
+ this._utxos = null
36
+ this._meta = null
37
+ this._watched = null
38
+ this._hashIndex = null
39
+ this._inscriptions = null
40
+ this._inscriptionIdx = null
41
+ this._txStatus = null
42
+ this._txBlock = null
43
+ this._content = null
44
+ this._tokens = null
45
+ this._sessions = null
46
+ this._paymentReceipts = null
47
+ this._contentDir = join(dataDir, 'content')
48
+ }
49
+
50
+ /** Open the database and create sublevels. */
51
+ async open () {
52
+ this.db = new Level(this.dbPath, { valueEncoding: 'json' })
53
+ await this.db.open()
54
+ this._headers = this.db.sublevel('headers', { valueEncoding: 'json' })
55
+ this._txs = this.db.sublevel('txs', { valueEncoding: 'utf8' })
56
+ this._utxos = this.db.sublevel('utxos', { valueEncoding: 'json' })
57
+ this._meta = this.db.sublevel('meta', { valueEncoding: 'json' })
58
+ this._watched = this.db.sublevel('watched', { valueEncoding: 'json' })
59
+ this._hashIndex = this.db.sublevel('hashIndex', { valueEncoding: 'json' })
60
+ this._inscriptions = this.db.sublevel('inscriptions', { valueEncoding: 'json' })
61
+ this._inscriptionIdx = this.db.sublevel('inscIdx', { valueEncoding: 'json' })
62
+ this._txStatus = this.db.sublevel('txStatus', { valueEncoding: 'json' })
63
+ this._txBlock = this.db.sublevel('txBlock', { valueEncoding: 'json' })
64
+ this._content = this.db.sublevel('content', { valueEncoding: 'json' })
65
+ this._tokens = this.db.sublevel('tokens', { valueEncoding: 'json' })
66
+ this._sessions = this.db.sublevel('sessions', { valueEncoding: 'json' })
67
+ this._paymentReceipts = this.db.sublevel('payment_receipts', { valueEncoding: 'json' })
68
+ await mkdir(this._contentDir, { recursive: true })
69
+ this.emit('open')
70
+ }
71
+
72
+ /** Close the database. */
73
+ async close () {
74
+ if (this.db) await this.db.close()
75
+ }
76
+
77
+ // ── Headers ──────────────────────────────────────────────
78
+
79
+ /**
80
+ * Store a header by height (with hash index).
81
+ * @param {{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }} header
82
+ */
83
+ async putHeader (header) {
84
+ await this._headers.put(String(header.height), header)
85
+ if (header.hash) {
86
+ await this._hashIndex.put(header.hash, header.height)
87
+ }
88
+ }
89
+
90
+ /**
91
+ * Store multiple headers in a batch (with hash index).
92
+ * @param {Array<{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }>} headers
93
+ */
94
+ async putHeaders (headers) {
95
+ const headerOps = headers.map(h => ({
96
+ type: 'put',
97
+ key: String(h.height),
98
+ value: h
99
+ }))
100
+ await this._headers.batch(headerOps)
101
+ const hashOps = headers.filter(h => h.hash).map(h => ({
102
+ type: 'put',
103
+ key: h.hash,
104
+ value: h.height
105
+ }))
106
+ if (hashOps.length > 0) {
107
+ await this._hashIndex.batch(hashOps)
108
+ }
109
+ }
110
+
111
+ /**
112
+ * Get a header by height.
113
+ * @param {number} height
114
+ * @returns {Promise<{ height: number, hash: string, prevHash: string }|null>}
115
+ */
116
+ async getHeader (height) {
117
+ const val = await this._headers.get(String(height))
118
+ return val !== undefined ? val : null
119
+ }
120
+
121
+ /**
122
+ * Get a header by block hash.
123
+ * @param {string} hash
124
+ * @returns {Promise<object|null>}
125
+ */
126
+ async getHeaderByHash (hash) {
127
+ const height = await this._hashIndex.get(hash)
128
+ if (height === undefined) return null
129
+ return this.getHeader(height)
130
+ }
131
+
132
+ /**
133
+ * Verify a merkle proof against a stored block header.
134
+ * @param {string} txHash — transaction hash (hex, display order)
135
+ * @param {string[]} merkleProofsibling hashes in the merkle path
136
+ * @param {number} txIndex transaction index in the block
137
+ * @param {string} blockHash — block hash to verify against
138
+ * @returns {Promise<{ verified: boolean, blockHeight: number, blockTimestamp: number }>}
139
+ */
140
+ async verifyMerkleProof (txHash, merkleProof, txIndex, blockHash) {
141
+ const header = await this.getHeaderByHash(blockHash)
142
+ if (!header) {
143
+ throw new Error(`Block ${blockHash} not found in header chain`)
144
+ }
145
+ if (!header.merkleRoot) {
146
+ throw new Error(`Header at height ${header.height} has no merkleRoot stored`)
147
+ }
148
+
149
+ // Compute merkle root from proof
150
+ let hash = Buffer.from(txHash, 'hex').reverse()
151
+ let index = txIndex
152
+
153
+ for (const proofHash of merkleProof) {
154
+ const sibling = Buffer.from(proofHash, 'hex').reverse()
155
+ const combined = (index % 2 === 0)
156
+ ? Buffer.concat([hash, sibling])
157
+ : Buffer.concat([sibling, hash])
158
+ hash = doubleSha256(combined)
159
+ index = Math.floor(index / 2)
160
+ }
161
+
162
+ const calculatedRoot = hash.reverse().toString('hex')
163
+
164
+ if (calculatedRoot !== header.merkleRoot) {
165
+ throw new Error('Merkle proof verification failed')
166
+ }
167
+
168
+ return {
169
+ verified: true,
170
+ blockHash: header.hash,
171
+ blockHeight: header.height,
172
+ blockTimestamp: header.timestamp
173
+ }
174
+ }
175
+
176
+ // ── Transactions ─────────────────────────────────────────
177
+
178
+ /**
179
+ * Store a raw transaction.
180
+ * @param {string} txid
181
+ * @param {string} rawHex
182
+ */
183
+ async putTx (txid, rawHex) {
184
+ await this._txs.put(txid, rawHex)
185
+ }
186
+
187
+ /**
188
+ * Get a raw transaction by txid.
189
+ * @param {string} txid
190
+ * @returns {Promise<string|null>} rawHex or null
191
+ */
192
+ async getTx (txid) {
193
+ const val = await this._txs.get(txid)
194
+ return val !== undefined ? val : null
195
+ }
196
+
197
+ /**
198
+ * Check if a transaction exists.
199
+ * @param {string} txid
200
+ * @returns {Promise<boolean>}
201
+ */
202
+ async hasTx (txid) {
203
+ return (await this.getTx(txid)) !== null
204
+ }
205
+
206
+ // ── UTXOs ────────────────────────────────────────────────
207
+
208
+ /**
209
+ * Store a UTXO.
210
+ * @param {{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }} utxo
211
+ */
212
+ async putUtxo (utxo) {
213
+ const key = `${utxo.txid}:${utxo.vout}`
214
+ await this._utxos.put(key, { ...utxo, spent: false })
215
+ }
216
+
217
+ /**
218
+ * Mark a UTXO as spent.
219
+ * @param {string} txid
220
+ * @param {number} vout
221
+ */
222
+ async spendUtxo (txid, vout) {
223
+ const key = `${txid}:${vout}`
224
+ const utxo = await this._utxos.get(key)
225
+ if (utxo === undefined) return
226
+ utxo.spent = true
227
+ await this._utxos.put(key, utxo)
228
+ }
229
+
230
+ /**
231
+ * Get all unspent UTXOs.
232
+ * @returns {Promise<Array<{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }>>}
233
+ */
234
+ async getUnspentUtxos () {
235
+ const utxos = []
236
+ for await (const [, utxo] of this._utxos.iterator()) {
237
+ if (!utxo.spent) utxos.push(utxo)
238
+ }
239
+ return utxos
240
+ }
241
+
242
+ /**
243
+ * Get unspent UTXOs for a specific address.
244
+ * @param {string} address
245
+ * @returns {Promise<Array<{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }>>}
246
+ */
247
+ async getUnspentByAddress (address) {
248
+ const utxos = []
249
+ for await (const [, utxo] of this._utxos.iterator()) {
250
+ if (!utxo.spent && utxo.address === address) utxos.push(utxo)
251
+ }
252
+ return utxos
253
+ }
254
+
255
+ /**
256
+ * Get total unspent balance in satoshis.
257
+ * @returns {Promise<number>}
258
+ */
259
+ async getBalance () {
260
+ let total = 0
261
+ for await (const [, utxo] of this._utxos.iterator()) {
262
+ if (!utxo.spent) total += utxo.satoshis
263
+ }
264
+ return total
265
+ }
266
+
267
+ // ── Watched address matches ──────────────────────────────
268
+
269
+ /**
270
+ * Store a watched-address match (a tx that touched a watched address).
271
+ * @param {{ txid: string, address: string, direction: 'in'|'out', timestamp: number }} match
272
+ */
273
+ async putWatchedTx (match) {
274
+ const key = `${match.address}:${match.txid}`
275
+ await this._watched.put(key, match)
276
+ }
277
+
278
+ /**
279
+ * Get all watched-address matches for an address.
280
+ * @param {string} address
281
+ * @returns {Promise<Array>}
282
+ */
283
+ async getWatchedTxs (address) {
284
+ const matches = []
285
+ for await (const [key, value] of this._watched.iterator()) {
286
+ if (key.startsWith(`${address}:`)) {
287
+ matches.push(value)
288
+ }
289
+ }
290
+ return matches
291
+ }
292
+
293
+ // ── Sessions (Indelible) ───────────────────────────────────
294
+
295
+ /**
296
+ * Store a session metadata record with sort index.
297
+ * PK: s!{address}!{txId} SK: t!{address}!{revTs}!{txId}
298
+ * @param {object} session must have txId and address
299
+ */
300
+ async putSession (session) {
301
+ const { txId, address } = session
302
+ if (!txId || !address) throw new Error('txId and address required')
303
+ const pk = `s!${address}!${txId}`
304
+ const ts = session.timestamp || new Date().toISOString()
305
+ const revTs = String(9999999999999 - new Date(ts).getTime()).padStart(13, '0')
306
+ const sk = `t!${address}!${revTs}!${txId}`
307
+ const record = {
308
+ txId, address,
309
+ session_id: session.session_id || null,
310
+ prev_session_id: session.prev_session_id || null,
311
+ summary: session.summary || '',
312
+ message_count: session.message_count || 0,
313
+ save_type: session.save_type || 'full',
314
+ timestamp: ts,
315
+ receivedAt: new Date().toISOString()
316
+ }
317
+ await this._sessions.batch([
318
+ { type: 'put', key: pk, value: record },
319
+ { type: 'put', key: sk, value: txId }
320
+ ])
321
+ return record
322
+ }
323
+
324
+ /**
325
+ * Get sessions for an address, newest first.
326
+ * @param {string} address
327
+ * @param {number} [limit=200]
328
+ * @returns {Promise<Array>}
329
+ */
330
+ async getSessions (address, limit = 200) {
331
+ const prefix = `t!${address}!`
332
+ const results = []
333
+ for await (const [, txId] of this._sessions.iterator({
334
+ gte: prefix, lt: prefix + '~', limit
335
+ })) {
336
+ const record = await this._safeGet(this._sessions, `s!${address}!${txId}`)
337
+ if (record) results.push(record)
338
+ }
339
+ return results
340
+ }
341
+
342
+ /**
343
+ * Batch import sessions (for backfill).
344
+ * @param {Array} sessions — array of session objects
345
+ * @returns {Promise<number>} count imported
346
+ */
347
+ async putSessionsBatch (sessions) {
348
+ const ops = []
349
+ for (const session of sessions) {
350
+ const { txId, address } = session
351
+ if (!txId || !address) continue
352
+ const pk = `s!${address}!${txId}`
353
+ const ts = session.timestamp || new Date().toISOString()
354
+ const revTs = String(9999999999999 - new Date(ts).getTime()).padStart(13, '0')
355
+ const sk = `t!${address}!${revTs}!${txId}`
356
+ const record = {
357
+ txId, address,
358
+ session_id: session.session_id || null,
359
+ prev_session_id: session.prev_session_id || null,
360
+ summary: session.summary || '',
361
+ message_count: session.message_count || 0,
362
+ save_type: session.save_type || 'full',
363
+ timestamp: ts,
364
+ receivedAt: new Date().toISOString()
365
+ }
366
+ ops.push({ type: 'put', key: pk, value: record })
367
+ ops.push({ type: 'put', key: sk, value: txId })
368
+ }
369
+ if (ops.length > 0) await this._sessions.batch(ops)
370
+ return ops.length / 2
371
+ }
372
+
373
+ /**
374
+ * Get summary of all addresses with sessions (for peer sync announce).
375
+ * @returns {Promise<Array<{ address: string, count: number, latest: string }>>}
376
+ */
377
+ async getSessionAddresses () {
378
+ const map = new Map() // address → { count, latest }
379
+ for await (const [key, value] of this._sessions.iterator({
380
+ gte: 's!', lt: 's!~'
381
+ })) {
382
+ const addr = key.split('!')[1]
383
+ const entry = map.get(addr)
384
+ if (!entry) {
385
+ map.set(addr, { count: 1, latest: value.timestamp || '' })
386
+ } else {
387
+ entry.count++
388
+ if (value.timestamp > entry.latest) entry.latest = value.timestamp
389
+ }
390
+ }
391
+ return [...map].map(([address, { count, latest }]) => ({ address, count, latest }))
392
+ }
393
+
394
+ // ── Metadata ─────────────────────────────────────────────
395
+
396
+ /**
397
+ * Store a metadata value.
398
+ * @param {string} key
399
+ * @param {*} value — any JSON-serializable value
400
+ */
401
+ async putMeta (key, value) {
402
+ await this._meta.put(key, value)
403
+ }
404
+
405
+ /**
406
+ * Get a metadata value.
407
+ * @param {string} key
408
+ * @param {*} [defaultValue=null]
409
+ * @returns {Promise<*>}
410
+ */
411
+ async getMeta (key, defaultValue = null) {
412
+ const val = await this._meta.get(key)
413
+ return val !== undefined ? val : defaultValue
414
+ }
415
+ // ── Tx Status + Block Mapping ───────────────────────────
416
+
417
+ /**
418
+ * Set or update tx lifecycle state.
419
+ * @param {string} txid
420
+ * @param {'mempool'|'confirmed'|'orphaned'|'dropped'} state
421
+ * @param {object} [meta] — optional fields: blockHash, height, source
422
+ */
423
+ async updateTxStatus (txid, state, meta = {}) {
424
+ const key = `s!${txid}`
425
+ const now = Date.now()
426
+ let existing = null
427
+ try {
428
+ const val = await this._txStatus.get(key)
429
+ if (val !== undefined) existing = val
430
+ } catch {}
431
+
432
+ const record = existing || { firstSeen: now }
433
+ record.state = state
434
+ record.lastSeen = now
435
+ record.updatedAt = now
436
+ if (meta.blockHash) record.blockHash = meta.blockHash
437
+ if (meta.height !== undefined) record.height = meta.height
438
+ if (meta.source) record.source = meta.source
439
+
440
+ const batch = [{ type: 'put', key, value: record }]
441
+
442
+ // Maintain mempool secondary index
443
+ if (state === 'mempool') {
444
+ batch.push({ type: 'put', key: `mempool!${txid}`, value: 1 })
445
+ } else if (existing?.state === 'mempool') {
446
+ batch.push({ type: 'del', key: `mempool!${txid}` })
447
+ }
448
+
449
+ await this._txStatus.batch(batch)
450
+ return record
451
+ }
452
+
453
+ /**
454
+ * Get tx lifecycle state.
455
+ * @param {string} txid
456
+ * @returns {Promise<object|null>}
457
+ */
458
+ async getTxStatus (txid) {
459
+ try {
460
+ const val = await this._txStatus.get(`s!${txid}`)
461
+ return val !== undefined ? val : null
462
+ } catch { return null }
463
+ }
464
+
465
+ /**
466
+ * Confirm a tx — atomic batch: txBlock + reverse index + txStatus update.
467
+ * @param {string} txid
468
+ * @param {string} blockHash
469
+ * @param {number} height
470
+ * @param {{ nodes: string[], index: number }|null} proof
471
+ */
472
+ async confirmTx (txid, blockHash, height, proof = null) {
473
+ const now = Date.now()
474
+ const blockRecord = { blockHash, height, confirmedAt: now, verified: !!proof }
475
+ if (proof) blockRecord.proof = proof
476
+
477
+ // Atomic batch across txBlock + txStatus
478
+ const txBlockBatch = [
479
+ { type: 'put', key: `tx!${txid}`, value: blockRecord },
480
+ { type: 'put', key: `block!${blockHash}!tx!${txid}`, value: 1 }
481
+ ]
482
+ await this._txBlock.batch(txBlockBatch)
483
+
484
+ await this.updateTxStatus(txid, 'confirmed', { blockHash, height })
485
+ this.emit('tx:confirmed', { txid, blockHash, height })
486
+ }
487
+
488
+ /**
489
+ * Get tx block placement.
490
+ * @param {string} txid
491
+ * @returns {Promise<object|null>}
492
+ */
493
+ async getTxBlock (txid) {
494
+ try {
495
+ const val = await this._txBlock.get(`tx!${txid}`)
496
+ return val !== undefined ? val : null
497
+ } catch { return null }
498
+ }
499
+
500
+ /**
501
+ * Handle reorg mark all txs in disconnected block as orphaned.
502
+ * @param {string} blockHash — the disconnected block hash
503
+ * @returns {Promise<string[]>} list of affected txids
504
+ */
505
+ async handleReorg (blockHash) {
506
+ const affected = []
507
+ const prefix = `block!${blockHash}!tx!`
508
+
509
+ // Find all txids in this block via reverse index
510
+ for await (const [key] of this._txBlock.iterator({ gte: prefix, lt: prefix + '~' })) {
511
+ const txid = key.slice(prefix.length)
512
+ affected.push(txid)
513
+ }
514
+
515
+ // Mark each as orphaned + clean up block associations
516
+ for (const txid of affected) {
517
+ await this.updateTxStatus(txid, 'orphaned', { blockHash })
518
+ await this._txBlock.del(`tx!${txid}`)
519
+ await this._txBlock.del(`block!${blockHash}!tx!${txid}`)
520
+ }
521
+
522
+ return affected
523
+ }
524
+
525
+ // ── Content-Addressed Storage ───────────────────────────
526
+
527
+ static CAS_THRESHOLD = 4096 // 4KB — below this, inline in LevelDB
528
+
529
+ /**
530
+ * Store content bytes via CAS. Small content inline, large to filesystem.
531
+ * @param {string} hexContent — hex-encoded content bytes
532
+ * @param {string} [mime] content type
533
+ * @returns {Promise<{ contentHash: string, contentLen: number, contentPath: string|null, inline: boolean }>}
534
+ */
535
+ async putContent (hexContent, mime) {
536
+ const buf = Buffer.from(hexContent, 'hex')
537
+ const contentHash = createHash('sha256').update(buf).digest('hex')
538
+ const contentLen = buf.length
539
+ const inline = contentLen < PersistentStore.CAS_THRESHOLD
540
+
541
+ const record = { len: contentLen, mime: mime || null, createdAt: Date.now() }
542
+
543
+ if (inline) {
544
+ record.inline = hexContent
545
+ record.path = null
546
+ } else {
547
+ const dir = join(this._contentDir, contentHash.slice(0, 2))
548
+ const filePath = join(dir, contentHash)
549
+ await mkdir(dir, { recursive: true })
550
+ await writeFile(filePath, buf)
551
+ record.path = filePath
552
+ }
553
+
554
+ await this._content.put(`c!${contentHash}`, record)
555
+ return { contentHash, contentLen, contentPath: record.path, inline }
556
+ }
557
+
558
+ /**
559
+ * Get content bytes by hash.
560
+ * @param {string} contentHash
561
+ * @returns {Promise<Buffer|null>}
562
+ */
563
+ async getContentBytes (contentHash) {
564
+ let record
565
+ try {
566
+ const val = await this._content.get(`c!${contentHash}`)
567
+ if (val === undefined) return null
568
+ record = val
569
+ } catch { return null }
570
+
571
+ if (record.inline) {
572
+ return Buffer.from(record.inline, 'hex')
573
+ }
574
+ if (record.path) {
575
+ try { return await readFile(record.path) } catch { return null }
576
+ }
577
+ return null
578
+ }
579
+
580
+ /**
581
+ * Get content metadata by hash.
582
+ * @param {string} contentHash
583
+ * @returns {Promise<object|null>}
584
+ */
585
+ async getContentMeta (contentHash) {
586
+ try {
587
+ const val = await this._content.get(`c!${contentHash}`)
588
+ return val !== undefined ? val : null
589
+ } catch { return null }
590
+ }
591
+
592
+ // ── Token Tracking (BSV-20) ─────────────────────────────
593
+
594
+ /**
595
+ * Process a BSV-20 token operation (confirmed-only).
596
+ * Uses atomic batch() for all writes. Keyed by scriptHash for owner identity.
597
+ * @param {{ op: string, tick: string, amt: string, ownerScriptHash: string, address: string|null, txid: string, height: number, blockHash: string }} params
598
+ * @returns {Promise<{ valid: boolean, reason?: string }>}
599
+ */
600
+ async processTokenOp ({ op, tick, amt, ownerScriptHash, address, txid, height, blockHash }) {
601
+ const tickNorm = tick.toLowerCase().trim()
602
+
603
+ if (op === 'deploy') {
604
+ // Only first deploy counts (chain-ordered by height)
605
+ const existing = await this._safeGet(this._tokens, `tick!${tickNorm}`)
606
+ if (existing) return { valid: false, reason: 'already deployed' }
607
+
608
+ const parsed = typeof amt === 'object' ? amt : {}
609
+ const batch = [
610
+ { type: 'put', key: `tick!${tickNorm}`, value: {
611
+ tick: tickNorm, max: parsed.max || '0', lim: parsed.lim || '0',
612
+ dec: parsed.dec || '0', deployer: ownerScriptHash, deployerAddr: address,
613
+ deployTxid: txid, deployHeight: height, totalMinted: '0'
614
+ }},
615
+ { type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!deploy`, value: {
616
+ tick: tickNorm, op: 'deploy', ownerScriptHash, valid: true
617
+ }}
618
+ ]
619
+ await this._tokens.batch(batch)
620
+ return { valid: true }
621
+ }
622
+
623
+ if (op === 'mint') {
624
+ const deploy = await this._safeGet(this._tokens, `tick!${tickNorm}`)
625
+ if (!deploy) return { valid: false, reason: 'token not deployed' }
626
+
627
+ const mintAmt = BigInt(amt || '0')
628
+ if (mintAmt <= 0n) return { valid: false, reason: 'invalid amount' }
629
+ if (deploy.lim !== '0' && mintAmt > BigInt(deploy.lim)) return { valid: false, reason: 'exceeds mint limit' }
630
+
631
+ const newTotal = BigInt(deploy.totalMinted) + mintAmt
632
+ if (deploy.max !== '0' && newTotal > BigInt(deploy.max)) return { valid: false, reason: 'exceeds max supply' }
633
+
634
+ // Credit owner balance
635
+ const balKey = `bal!${tickNorm}!owner!${ownerScriptHash}`
636
+ const existing = await this._safeGet(this._tokens, balKey) || { confirmed: '0' }
637
+ const newBal = (BigInt(existing.confirmed) + mintAmt).toString()
638
+
639
+ const batch = [
640
+ { type: 'put', key: `tick!${tickNorm}`, value: { ...deploy, totalMinted: newTotal.toString() } },
641
+ { type: 'put', key: balKey, value: { confirmed: newBal, updatedAt: Date.now() } },
642
+ { type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!mint`, value: {
643
+ tick: tickNorm, op: 'mint', amt: amt, ownerScriptHash, valid: true
644
+ }}
645
+ ]
646
+ await this._tokens.batch(batch)
647
+ return { valid: true }
648
+ }
649
+
650
+ // Transfers deferred to Phase 2
651
+ return { valid: false, reason: 'transfers not yet supported' }
652
+ }
653
+
654
+ /**
655
+ * Get token deploy info.
656
+ * @param {string} tick
657
+ * @returns {Promise<object|null>}
658
+ */
659
+ async getToken (tick) {
660
+ return this._safeGet(this._tokens, `tick!${tick.toLowerCase().trim()}`)
661
+ }
662
+
663
+ /**
664
+ * Get token balance for an owner.
665
+ * @param {string} tick
666
+ * @param {string} ownerScriptHash
667
+ * @returns {Promise<string>} balance as string
668
+ */
669
+ async getTokenBalance (tick, ownerScriptHash) {
670
+ const record = await this._safeGet(this._tokens, `bal!${tick.toLowerCase().trim()}!owner!${ownerScriptHash}`)
671
+ return record ? record.confirmed : '0'
672
+ }
673
+
674
+ /**
675
+ * List all deployed tokens.
676
+ * @returns {Promise<Array>}
677
+ */
678
+ async listTokens () {
679
+ const tokens = []
680
+ const prefix = 'tick!'
681
+ for await (const [key, value] of this._tokens.iterator({ gte: prefix, lt: prefix + '~' })) {
682
+ tokens.push(value)
683
+ }
684
+ return tokens
685
+ }
686
+
687
+ /** Safe get — returns null instead of throwing for missing keys. */
688
+ async _safeGet (sublevel, key) {
689
+ try {
690
+ const val = await sublevel.get(key)
691
+ return val !== undefined ? val : null
692
+ } catch { return null }
693
+ }
694
+
695
+ // ── Inscriptions ─────────────────────────────────────────
696
+
697
+ /**
698
+ * Store an inscription record with secondary indexes.
699
+ * @param {{ txid: string, vout: number, contentType: string, contentSize: number, isBsv20: boolean, bsv20: object|null, timestamp: number, address: string|null }} record
700
+ */
701
+ async putInscription (record) {
702
+ const key = `${record.txid}:${record.vout}`
703
+ const suffix = `${record.txid}:${record.vout}`
704
+
705
+ // Purge ALL stale secondary index entries pointing to this key
706
+ try {
707
+ const delBatch = []
708
+ for await (const [idxKey, val] of this._inscriptionIdx.iterator()) {
709
+ if (val === key && idxKey.endsWith(suffix)) delBatch.push({ type: 'del', key: idxKey })
710
+ }
711
+ if (delBatch.length) await this._inscriptionIdx.batch(delBatch)
712
+ } catch {}
713
+
714
+ // Route content through CAS
715
+ if (record.content) {
716
+ try {
717
+ const cas = await this.putContent(record.content, record.contentType)
718
+ record.contentHash = cas.contentHash
719
+ record.contentLen = cas.contentLen
720
+ // Strip raw content from inscription record if large (stored on filesystem)
721
+ if (!cas.inline) {
722
+ delete record.content
723
+ }
724
+ } catch {}
725
+ }
726
+
727
+ await this._inscriptions.put(key, record)
728
+
729
+ const ts = String(record.timestamp).padStart(15, '0')
730
+ const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
731
+ if (record.contentType) {
732
+ batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
733
+ }
734
+ if (record.address) {
735
+ batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
736
+ }
737
+ await this._inscriptionIdx.batch(batch)
738
+ }
739
+
740
+ /**
741
+ * Query inscriptions with optional filters.
742
+ * @param {{ mime?: string, address?: string, limit?: number }} opts
743
+ * @returns {Promise<Array>}
744
+ */
745
+ async getInscriptions ({ mime, address, limit = 50 } = {}) {
746
+ const results = []
747
+ let prefix
748
+ if (address) {
749
+ prefix = `addr:${address}:`
750
+ } else if (mime) {
751
+ prefix = `mime:${mime}:`
752
+ } else {
753
+ prefix = 'time:'
754
+ }
755
+
756
+ for await (const [, primaryKey] of this._inscriptionIdx.iterator({
757
+ gte: prefix, lt: prefix + '~', reverse: true, limit
758
+ })) {
759
+ try {
760
+ const record = await this._inscriptions.get(primaryKey)
761
+ if (record) {
762
+ // Strip content from list results (can be 400KB+ per image)
763
+ const { content, ...meta } = record
764
+ results.push(meta)
765
+ }
766
+ } catch {}
767
+ }
768
+ return results
769
+ }
770
+
771
+ /**
772
+ * Rebuild inscription secondary indexes from primary records.
773
+ * Clears all index entries and re-creates from source of truth.
774
+ * @returns {Promise<number>} count of inscriptions re-indexed
775
+ */
776
+ async rebuildInscriptionIndex () {
777
+ // Clear entire index
778
+ for await (const [key] of this._inscriptionIdx.iterator()) {
779
+ await this._inscriptionIdx.del(key)
780
+ }
781
+ // Re-create from primary records
782
+ let count = 0
783
+ for await (const [, record] of this._inscriptions.iterator()) {
784
+ const ts = String(record.timestamp).padStart(15, '0')
785
+ const suffix = `${record.txid}:${record.vout}`
786
+ const key = suffix
787
+ const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
788
+ if (record.contentType) batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
789
+ if (record.address) batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
790
+ await this._inscriptionIdx.batch(batch)
791
+ count++
792
+ }
793
+ return count
794
+ }
795
+
796
+ /**
797
+ * Get a single inscription record (with content) by txid:vout.
798
+ * @param {string} txid
799
+ * @param {number} vout
800
+ * @returns {Promise<object|null>}
801
+ */
802
+ async getInscription (txid, vout) {
803
+ try {
804
+ return await this._inscriptions.get(`${txid}:${vout}`)
805
+ } catch {
806
+ return null
807
+ }
808
+ }
809
+
810
+ /**
811
+ * Get total inscription count.
812
+ * @returns {Promise<number>}
813
+ */
814
+ async getInscriptionCount () {
815
+ let count = 0
816
+ for await (const _ of this._inscriptions.keys()) count++
817
+ return count
818
+ }
819
+
820
+ // ── x402 Payment Receipts ──────────────────────────────
821
+
822
+ /**
823
+ * Atomic claim — put-if-absent. Returns { ok: true } if claimed,
824
+ * { ok: false } if txid already exists (replay blocked).
825
+ */
826
+ async claimTxid (txid, { routeKey, price, createdAt }) {
827
+ const key = `u!${txid}`
828
+ try {
829
+ await this._paymentReceipts.put(key,
830
+ { status: 'claimed', routeKey, price, createdAt },
831
+ { ifNotExists: true })
832
+ return { ok: true }
833
+ } catch (err) {
834
+ if (err.code !== 'LEVEL_KEY_EXISTS' && err?.cause?.code !== 'LEVEL_KEY_EXISTS')
835
+ console.error(`[x402] unexpected claimTxid error for ${txid}:`, err.message)
836
+ return { ok: false }
837
+ }
838
+ }
839
+
840
+ /**
841
+ * Release a claim (verification failed). Only deletes if status is 'claimed'.
842
+ * Never deletes receipts — finalized payments are permanent.
843
+ */
844
+ async releaseClaim (txid) {
845
+ const key = `u!${txid}`
846
+ try {
847
+ const val = await this._paymentReceipts.get(key)
848
+ if (val && val.status === 'claimed') await this._paymentReceipts.del(key)
849
+ } catch {}
850
+ }
851
+
852
+ /**
853
+ * Promote claim to permanent receipt. Overwrites in-place — key is
854
+ * NEVER deleted after this, blocking replay permanently.
855
+ */
856
+ async finalizePayment (txid, receipt) {
857
+ await this._paymentReceipts.put(`u!${txid}`, { ...receipt, status: 'receipt' })
858
+ }
859
+
860
+ /**
861
+ * Startup sweep — delete stale claims older than maxAgeMs (default 5 min).
862
+ * Only touches status === 'claimed' keys. Receipts are untouched.
863
+ */
864
+ async cleanupStaleClaims (maxAgeMs = 300000) {
865
+ const now = Date.now()
866
+ for await (const [key, val] of this._paymentReceipts.iterator({ gte: 'u!', lt: 'u~' })) {
867
+ if (val.status !== 'claimed') continue
868
+ if (!val.createdAt || (now - val.createdAt) > maxAgeMs)
869
+ await this._paymentReceipts.del(key)
870
+ }
871
+ }
872
+
873
+ /**
874
+ * Prune old receipts — chunked batch deletes for receipts older than N months.
875
+ */
876
+ async pruneOldReceipts (monthsToKeep = 6) {
877
+ const cutoffMs = Date.now() - (monthsToKeep * 30 * 24 * 60 * 60 * 1000)
878
+ const CHUNK = 500
879
+ let ops = []
880
+ for await (const [key, val] of this._paymentReceipts.iterator({ gte: 'u!', lt: 'u~' })) {
881
+ if (val.status !== 'receipt') continue
882
+ if (val.createdAt && val.createdAt < cutoffMs) {
883
+ ops.push({ type: 'del', key })
884
+ if (ops.length >= CHUNK) { await this._paymentReceipts.batch(ops); ops = [] }
885
+ }
886
+ }
887
+ if (ops.length > 0) await this._paymentReceipts.batch(ops)
888
+ }
889
+ }
890
+
891
+ /** Double SHA-256 (Bitcoin standard) */
892
+ function doubleSha256 (data) {
893
+ return createHash('sha256').update(
894
+ createHash('sha256').update(data).digest()
895
+ ).digest()
896
+ }