@relay-federation/bridge 0.1.2 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +753 -73
- package/dashboard/index.html +2212 -0
- package/lib/actions.js +373 -0
- package/lib/address-scanner.js +169 -0
- package/lib/address-watcher.js +161 -0
- package/lib/bsv-node-client.js +311 -0
- package/lib/bsv-peer.js +791 -0
- package/lib/config.js +32 -4
- package/lib/data-validator.js +6 -0
- package/lib/endpoint-probe.js +45 -0
- package/lib/gossip.js +266 -0
- package/lib/header-relay.js +6 -1
- package/lib/ip-diversity.js +88 -0
- package/lib/output-parser.js +494 -0
- package/lib/peer-manager.js +81 -12
- package/lib/persistent-store.js +708 -0
- package/lib/status-server.js +965 -14
- package/package.json +4 -2
|
@@ -0,0 +1,708 @@
|
|
|
1
|
+
import { Level } from 'level'
|
|
2
|
+
import { EventEmitter } from 'node:events'
|
|
3
|
+
import { createHash } from 'node:crypto'
|
|
4
|
+
import { join } from 'node:path'
|
|
5
|
+
import { mkdir, writeFile, readFile } from 'node:fs/promises'
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* PersistentStore — LevelDB-backed storage for bridge state.
|
|
9
|
+
*
|
|
10
|
+
* Stores headers, transactions, and arbitrary metadata in sublevel
|
|
11
|
+
* namespaces. Replaces the in-memory Maps used by HeaderRelay and
|
|
12
|
+
* TxRelay with durable storage that survives restarts.
|
|
13
|
+
*
|
|
14
|
+
* Sublevels:
|
|
15
|
+
* headers — height → { height, hash, prevHash }
|
|
16
|
+
* txs — txid → rawHex
|
|
17
|
+
* utxos — txid:vout → { txid, vout, satoshis, scriptHex, address, spent }
|
|
18
|
+
* meta — key → value (bestHeight, bestHash, etc.)
|
|
19
|
+
* watched — txid → { txid, address, direction, timestamp }
|
|
20
|
+
*
|
|
21
|
+
* Events:
|
|
22
|
+
* 'open' — store ready
|
|
23
|
+
* 'error' — LevelDB error
|
|
24
|
+
*/
|
|
25
|
+
export class PersistentStore extends EventEmitter {
|
|
26
|
+
/**
|
|
27
|
+
* @param {string} dataDir — directory for the LevelDB database
|
|
28
|
+
*/
|
|
29
|
+
constructor (dataDir) {
|
|
30
|
+
super()
|
|
31
|
+
this.dbPath = join(dataDir, 'bridge.db')
|
|
32
|
+
this.db = null
|
|
33
|
+
this._headers = null
|
|
34
|
+
this._txs = null
|
|
35
|
+
this._utxos = null
|
|
36
|
+
this._meta = null
|
|
37
|
+
this._watched = null
|
|
38
|
+
this._hashIndex = null
|
|
39
|
+
this._inscriptions = null
|
|
40
|
+
this._inscriptionIdx = null
|
|
41
|
+
this._txStatus = null
|
|
42
|
+
this._txBlock = null
|
|
43
|
+
this._content = null
|
|
44
|
+
this._tokens = null
|
|
45
|
+
this._contentDir = join(dataDir, 'content')
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/** Open the database and create sublevels. */
|
|
49
|
+
async open () {
|
|
50
|
+
this.db = new Level(this.dbPath, { valueEncoding: 'json' })
|
|
51
|
+
await this.db.open()
|
|
52
|
+
this._headers = this.db.sublevel('headers', { valueEncoding: 'json' })
|
|
53
|
+
this._txs = this.db.sublevel('txs', { valueEncoding: 'utf8' })
|
|
54
|
+
this._utxos = this.db.sublevel('utxos', { valueEncoding: 'json' })
|
|
55
|
+
this._meta = this.db.sublevel('meta', { valueEncoding: 'json' })
|
|
56
|
+
this._watched = this.db.sublevel('watched', { valueEncoding: 'json' })
|
|
57
|
+
this._hashIndex = this.db.sublevel('hashIndex', { valueEncoding: 'json' })
|
|
58
|
+
this._inscriptions = this.db.sublevel('inscriptions', { valueEncoding: 'json' })
|
|
59
|
+
this._inscriptionIdx = this.db.sublevel('inscIdx', { valueEncoding: 'json' })
|
|
60
|
+
this._txStatus = this.db.sublevel('txStatus', { valueEncoding: 'json' })
|
|
61
|
+
this._txBlock = this.db.sublevel('txBlock', { valueEncoding: 'json' })
|
|
62
|
+
this._content = this.db.sublevel('content', { valueEncoding: 'json' })
|
|
63
|
+
this._tokens = this.db.sublevel('tokens', { valueEncoding: 'json' })
|
|
64
|
+
await mkdir(this._contentDir, { recursive: true })
|
|
65
|
+
this.emit('open')
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/** Close the database. */
|
|
69
|
+
async close () {
|
|
70
|
+
if (this.db) await this.db.close()
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ── Headers ──────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Store a header by height (with hash index).
|
|
77
|
+
* @param {{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }} header
|
|
78
|
+
*/
|
|
79
|
+
async putHeader (header) {
|
|
80
|
+
await this._headers.put(String(header.height), header)
|
|
81
|
+
if (header.hash) {
|
|
82
|
+
await this._hashIndex.put(header.hash, header.height)
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* Store multiple headers in a batch (with hash index).
|
|
88
|
+
* @param {Array<{ height: number, hash: string, prevHash: string, merkleRoot?: string, timestamp?: number, bits?: number, nonce?: number, version?: number }>} headers
|
|
89
|
+
*/
|
|
90
|
+
async putHeaders (headers) {
|
|
91
|
+
const headerOps = headers.map(h => ({
|
|
92
|
+
type: 'put',
|
|
93
|
+
key: String(h.height),
|
|
94
|
+
value: h
|
|
95
|
+
}))
|
|
96
|
+
await this._headers.batch(headerOps)
|
|
97
|
+
const hashOps = headers.filter(h => h.hash).map(h => ({
|
|
98
|
+
type: 'put',
|
|
99
|
+
key: h.hash,
|
|
100
|
+
value: h.height
|
|
101
|
+
}))
|
|
102
|
+
if (hashOps.length > 0) {
|
|
103
|
+
await this._hashIndex.batch(hashOps)
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Get a header by height.
|
|
109
|
+
* @param {number} height
|
|
110
|
+
* @returns {Promise<{ height: number, hash: string, prevHash: string }|null>}
|
|
111
|
+
*/
|
|
112
|
+
async getHeader (height) {
|
|
113
|
+
const val = await this._headers.get(String(height))
|
|
114
|
+
return val !== undefined ? val : null
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Get a header by block hash.
|
|
119
|
+
* @param {string} hash
|
|
120
|
+
* @returns {Promise<object|null>}
|
|
121
|
+
*/
|
|
122
|
+
async getHeaderByHash (hash) {
|
|
123
|
+
const height = await this._hashIndex.get(hash)
|
|
124
|
+
if (height === undefined) return null
|
|
125
|
+
return this.getHeader(height)
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Verify a merkle proof against a stored block header.
|
|
130
|
+
* @param {string} txHash — transaction hash (hex, display order)
|
|
131
|
+
* @param {string[]} merkleProof — sibling hashes in the merkle path
|
|
132
|
+
* @param {number} txIndex — transaction index in the block
|
|
133
|
+
* @param {string} blockHash — block hash to verify against
|
|
134
|
+
* @returns {Promise<{ verified: boolean, blockHeight: number, blockTimestamp: number }>}
|
|
135
|
+
*/
|
|
136
|
+
async verifyMerkleProof (txHash, merkleProof, txIndex, blockHash) {
|
|
137
|
+
const header = await this.getHeaderByHash(blockHash)
|
|
138
|
+
if (!header) {
|
|
139
|
+
throw new Error(`Block ${blockHash} not found in header chain`)
|
|
140
|
+
}
|
|
141
|
+
if (!header.merkleRoot) {
|
|
142
|
+
throw new Error(`Header at height ${header.height} has no merkleRoot stored`)
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Compute merkle root from proof
|
|
146
|
+
let hash = Buffer.from(txHash, 'hex').reverse()
|
|
147
|
+
let index = txIndex
|
|
148
|
+
|
|
149
|
+
for (const proofHash of merkleProof) {
|
|
150
|
+
const sibling = Buffer.from(proofHash, 'hex').reverse()
|
|
151
|
+
const combined = (index % 2 === 0)
|
|
152
|
+
? Buffer.concat([hash, sibling])
|
|
153
|
+
: Buffer.concat([sibling, hash])
|
|
154
|
+
hash = doubleSha256(combined)
|
|
155
|
+
index = Math.floor(index / 2)
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const calculatedRoot = hash.reverse().toString('hex')
|
|
159
|
+
|
|
160
|
+
if (calculatedRoot !== header.merkleRoot) {
|
|
161
|
+
throw new Error('Merkle proof verification failed')
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
return {
|
|
165
|
+
verified: true,
|
|
166
|
+
blockHash: header.hash,
|
|
167
|
+
blockHeight: header.height,
|
|
168
|
+
blockTimestamp: header.timestamp
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// ── Transactions ─────────────────────────────────────────
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Store a raw transaction.
|
|
176
|
+
* @param {string} txid
|
|
177
|
+
* @param {string} rawHex
|
|
178
|
+
*/
|
|
179
|
+
async putTx (txid, rawHex) {
|
|
180
|
+
await this._txs.put(txid, rawHex)
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Get a raw transaction by txid.
|
|
185
|
+
* @param {string} txid
|
|
186
|
+
* @returns {Promise<string|null>} rawHex or null
|
|
187
|
+
*/
|
|
188
|
+
async getTx (txid) {
|
|
189
|
+
const val = await this._txs.get(txid)
|
|
190
|
+
return val !== undefined ? val : null
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/**
|
|
194
|
+
* Check if a transaction exists.
|
|
195
|
+
* @param {string} txid
|
|
196
|
+
* @returns {Promise<boolean>}
|
|
197
|
+
*/
|
|
198
|
+
async hasTx (txid) {
|
|
199
|
+
return (await this.getTx(txid)) !== null
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// ── UTXOs ────────────────────────────────────────────────
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Store a UTXO.
|
|
206
|
+
* @param {{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }} utxo
|
|
207
|
+
*/
|
|
208
|
+
async putUtxo (utxo) {
|
|
209
|
+
const key = `${utxo.txid}:${utxo.vout}`
|
|
210
|
+
await this._utxos.put(key, { ...utxo, spent: false })
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Mark a UTXO as spent.
|
|
215
|
+
* @param {string} txid
|
|
216
|
+
* @param {number} vout
|
|
217
|
+
*/
|
|
218
|
+
async spendUtxo (txid, vout) {
|
|
219
|
+
const key = `${txid}:${vout}`
|
|
220
|
+
const utxo = await this._utxos.get(key)
|
|
221
|
+
if (utxo === undefined) return
|
|
222
|
+
utxo.spent = true
|
|
223
|
+
await this._utxos.put(key, utxo)
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Get all unspent UTXOs.
|
|
228
|
+
* @returns {Promise<Array<{ txid: string, vout: number, satoshis: number, scriptHex: string, address: string }>>}
|
|
229
|
+
*/
|
|
230
|
+
async getUnspentUtxos () {
|
|
231
|
+
const utxos = []
|
|
232
|
+
for await (const [, utxo] of this._utxos.iterator()) {
|
|
233
|
+
if (!utxo.spent) utxos.push(utxo)
|
|
234
|
+
}
|
|
235
|
+
return utxos
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Get total unspent balance in satoshis.
|
|
240
|
+
* @returns {Promise<number>}
|
|
241
|
+
*/
|
|
242
|
+
async getBalance () {
|
|
243
|
+
let total = 0
|
|
244
|
+
for await (const [, utxo] of this._utxos.iterator()) {
|
|
245
|
+
if (!utxo.spent) total += utxo.satoshis
|
|
246
|
+
}
|
|
247
|
+
return total
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// ── Watched address matches ──────────────────────────────
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Store a watched-address match (a tx that touched a watched address).
|
|
254
|
+
* @param {{ txid: string, address: string, direction: 'in'|'out', timestamp: number }} match
|
|
255
|
+
*/
|
|
256
|
+
async putWatchedTx (match) {
|
|
257
|
+
const key = `${match.address}:${match.txid}`
|
|
258
|
+
await this._watched.put(key, match)
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Get all watched-address matches for an address.
|
|
263
|
+
* @param {string} address
|
|
264
|
+
* @returns {Promise<Array>}
|
|
265
|
+
*/
|
|
266
|
+
async getWatchedTxs (address) {
|
|
267
|
+
const matches = []
|
|
268
|
+
for await (const [key, value] of this._watched.iterator()) {
|
|
269
|
+
if (key.startsWith(`${address}:`)) {
|
|
270
|
+
matches.push(value)
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
return matches
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// ── Metadata ─────────────────────────────────────────────
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Store a metadata value.
|
|
280
|
+
* @param {string} key
|
|
281
|
+
* @param {*} value — any JSON-serializable value
|
|
282
|
+
*/
|
|
283
|
+
async putMeta (key, value) {
|
|
284
|
+
await this._meta.put(key, value)
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Get a metadata value.
|
|
289
|
+
* @param {string} key
|
|
290
|
+
* @param {*} [defaultValue=null]
|
|
291
|
+
* @returns {Promise<*>}
|
|
292
|
+
*/
|
|
293
|
+
async getMeta (key, defaultValue = null) {
|
|
294
|
+
const val = await this._meta.get(key)
|
|
295
|
+
return val !== undefined ? val : defaultValue
|
|
296
|
+
}
|
|
297
|
+
// ── Tx Status + Block Mapping ───────────────────────────
|
|
298
|
+
|
|
299
|
+
/**
|
|
300
|
+
* Set or update tx lifecycle state.
|
|
301
|
+
* @param {string} txid
|
|
302
|
+
* @param {'mempool'|'confirmed'|'orphaned'|'dropped'} state
|
|
303
|
+
* @param {object} [meta] — optional fields: blockHash, height, source
|
|
304
|
+
*/
|
|
305
|
+
async updateTxStatus (txid, state, meta = {}) {
|
|
306
|
+
const key = `s!${txid}`
|
|
307
|
+
const now = Date.now()
|
|
308
|
+
let existing = null
|
|
309
|
+
try {
|
|
310
|
+
const val = await this._txStatus.get(key)
|
|
311
|
+
if (val !== undefined) existing = val
|
|
312
|
+
} catch {}
|
|
313
|
+
|
|
314
|
+
const record = existing || { firstSeen: now }
|
|
315
|
+
record.state = state
|
|
316
|
+
record.lastSeen = now
|
|
317
|
+
record.updatedAt = now
|
|
318
|
+
if (meta.blockHash) record.blockHash = meta.blockHash
|
|
319
|
+
if (meta.height !== undefined) record.height = meta.height
|
|
320
|
+
if (meta.source) record.source = meta.source
|
|
321
|
+
|
|
322
|
+
const batch = [{ type: 'put', key, value: record }]
|
|
323
|
+
|
|
324
|
+
// Maintain mempool secondary index
|
|
325
|
+
if (state === 'mempool') {
|
|
326
|
+
batch.push({ type: 'put', key: `mempool!${txid}`, value: 1 })
|
|
327
|
+
} else if (existing?.state === 'mempool') {
|
|
328
|
+
batch.push({ type: 'del', key: `mempool!${txid}` })
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
await this._txStatus.batch(batch)
|
|
332
|
+
return record
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Get tx lifecycle state.
|
|
337
|
+
* @param {string} txid
|
|
338
|
+
* @returns {Promise<object|null>}
|
|
339
|
+
*/
|
|
340
|
+
async getTxStatus (txid) {
|
|
341
|
+
try {
|
|
342
|
+
const val = await this._txStatus.get(`s!${txid}`)
|
|
343
|
+
return val !== undefined ? val : null
|
|
344
|
+
} catch { return null }
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
/**
|
|
348
|
+
* Confirm a tx — atomic batch: txBlock + reverse index + txStatus update.
|
|
349
|
+
* @param {string} txid
|
|
350
|
+
* @param {string} blockHash
|
|
351
|
+
* @param {number} height
|
|
352
|
+
* @param {{ nodes: string[], index: number }|null} proof
|
|
353
|
+
*/
|
|
354
|
+
async confirmTx (txid, blockHash, height, proof = null) {
|
|
355
|
+
const now = Date.now()
|
|
356
|
+
const blockRecord = { blockHash, height, confirmedAt: now, verified: !!proof }
|
|
357
|
+
if (proof) blockRecord.proof = proof
|
|
358
|
+
|
|
359
|
+
// Atomic batch across txBlock + txStatus
|
|
360
|
+
const txBlockBatch = [
|
|
361
|
+
{ type: 'put', key: `tx!${txid}`, value: blockRecord },
|
|
362
|
+
{ type: 'put', key: `block!${blockHash}!tx!${txid}`, value: 1 }
|
|
363
|
+
]
|
|
364
|
+
await this._txBlock.batch(txBlockBatch)
|
|
365
|
+
|
|
366
|
+
await this.updateTxStatus(txid, 'confirmed', { blockHash, height })
|
|
367
|
+
this.emit('tx:confirmed', { txid, blockHash, height })
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* Get tx block placement.
|
|
372
|
+
* @param {string} txid
|
|
373
|
+
* @returns {Promise<object|null>}
|
|
374
|
+
*/
|
|
375
|
+
async getTxBlock (txid) {
|
|
376
|
+
try {
|
|
377
|
+
const val = await this._txBlock.get(`tx!${txid}`)
|
|
378
|
+
return val !== undefined ? val : null
|
|
379
|
+
} catch { return null }
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
/**
|
|
383
|
+
* Handle reorg — mark all txs in disconnected block as orphaned.
|
|
384
|
+
* @param {string} blockHash — the disconnected block hash
|
|
385
|
+
* @returns {Promise<string[]>} list of affected txids
|
|
386
|
+
*/
|
|
387
|
+
async handleReorg (blockHash) {
|
|
388
|
+
const affected = []
|
|
389
|
+
const prefix = `block!${blockHash}!tx!`
|
|
390
|
+
|
|
391
|
+
// Find all txids in this block via reverse index
|
|
392
|
+
for await (const [key] of this._txBlock.iterator({ gte: prefix, lt: prefix + '~' })) {
|
|
393
|
+
const txid = key.slice(prefix.length)
|
|
394
|
+
affected.push(txid)
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// Mark each as orphaned + clean up block associations
|
|
398
|
+
for (const txid of affected) {
|
|
399
|
+
await this.updateTxStatus(txid, 'orphaned', { blockHash })
|
|
400
|
+
await this._txBlock.del(`tx!${txid}`)
|
|
401
|
+
await this._txBlock.del(`block!${blockHash}!tx!${txid}`)
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
return affected
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// ── Content-Addressed Storage ───────────────────────────
|
|
408
|
+
|
|
409
|
+
static CAS_THRESHOLD = 4096 // 4KB — below this, inline in LevelDB
|
|
410
|
+
|
|
411
|
+
/**
|
|
412
|
+
* Store content bytes via CAS. Small content inline, large to filesystem.
|
|
413
|
+
* @param {string} hexContent — hex-encoded content bytes
|
|
414
|
+
* @param {string} [mime] — content type
|
|
415
|
+
* @returns {Promise<{ contentHash: string, contentLen: number, contentPath: string|null, inline: boolean }>}
|
|
416
|
+
*/
|
|
417
|
+
async putContent (hexContent, mime) {
|
|
418
|
+
const buf = Buffer.from(hexContent, 'hex')
|
|
419
|
+
const contentHash = createHash('sha256').update(buf).digest('hex')
|
|
420
|
+
const contentLen = buf.length
|
|
421
|
+
const inline = contentLen < PersistentStore.CAS_THRESHOLD
|
|
422
|
+
|
|
423
|
+
const record = { len: contentLen, mime: mime || null, createdAt: Date.now() }
|
|
424
|
+
|
|
425
|
+
if (inline) {
|
|
426
|
+
record.inline = hexContent
|
|
427
|
+
record.path = null
|
|
428
|
+
} else {
|
|
429
|
+
const dir = join(this._contentDir, contentHash.slice(0, 2))
|
|
430
|
+
const filePath = join(dir, contentHash)
|
|
431
|
+
await mkdir(dir, { recursive: true })
|
|
432
|
+
await writeFile(filePath, buf)
|
|
433
|
+
record.path = filePath
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
await this._content.put(`c!${contentHash}`, record)
|
|
437
|
+
return { contentHash, contentLen, contentPath: record.path, inline }
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Get content bytes by hash.
|
|
442
|
+
* @param {string} contentHash
|
|
443
|
+
* @returns {Promise<Buffer|null>}
|
|
444
|
+
*/
|
|
445
|
+
async getContentBytes (contentHash) {
|
|
446
|
+
let record
|
|
447
|
+
try {
|
|
448
|
+
const val = await this._content.get(`c!${contentHash}`)
|
|
449
|
+
if (val === undefined) return null
|
|
450
|
+
record = val
|
|
451
|
+
} catch { return null }
|
|
452
|
+
|
|
453
|
+
if (record.inline) {
|
|
454
|
+
return Buffer.from(record.inline, 'hex')
|
|
455
|
+
}
|
|
456
|
+
if (record.path) {
|
|
457
|
+
try { return await readFile(record.path) } catch { return null }
|
|
458
|
+
}
|
|
459
|
+
return null
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
/**
|
|
463
|
+
* Get content metadata by hash.
|
|
464
|
+
* @param {string} contentHash
|
|
465
|
+
* @returns {Promise<object|null>}
|
|
466
|
+
*/
|
|
467
|
+
async getContentMeta (contentHash) {
|
|
468
|
+
try {
|
|
469
|
+
const val = await this._content.get(`c!${contentHash}`)
|
|
470
|
+
return val !== undefined ? val : null
|
|
471
|
+
} catch { return null }
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
// ── Token Tracking (BSV-20) ─────────────────────────────
|
|
475
|
+
|
|
476
|
+
/**
|
|
477
|
+
* Process a BSV-20 token operation (confirmed-only).
|
|
478
|
+
* Uses atomic batch() for all writes. Keyed by scriptHash for owner identity.
|
|
479
|
+
* @param {{ op: string, tick: string, amt: string, ownerScriptHash: string, address: string|null, txid: string, height: number, blockHash: string }} params
|
|
480
|
+
* @returns {Promise<{ valid: boolean, reason?: string }>}
|
|
481
|
+
*/
|
|
482
|
+
async processTokenOp ({ op, tick, amt, ownerScriptHash, address, txid, height, blockHash }) {
|
|
483
|
+
const tickNorm = tick.toLowerCase().trim()
|
|
484
|
+
|
|
485
|
+
if (op === 'deploy') {
|
|
486
|
+
// Only first deploy counts (chain-ordered by height)
|
|
487
|
+
const existing = await this._safeGet(this._tokens, `tick!${tickNorm}`)
|
|
488
|
+
if (existing) return { valid: false, reason: 'already deployed' }
|
|
489
|
+
|
|
490
|
+
const parsed = typeof amt === 'object' ? amt : {}
|
|
491
|
+
const batch = [
|
|
492
|
+
{ type: 'put', key: `tick!${tickNorm}`, value: {
|
|
493
|
+
tick: tickNorm, max: parsed.max || '0', lim: parsed.lim || '0',
|
|
494
|
+
dec: parsed.dec || '0', deployer: ownerScriptHash, deployerAddr: address,
|
|
495
|
+
deployTxid: txid, deployHeight: height, totalMinted: '0'
|
|
496
|
+
}},
|
|
497
|
+
{ type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!deploy`, value: {
|
|
498
|
+
tick: tickNorm, op: 'deploy', ownerScriptHash, valid: true
|
|
499
|
+
}}
|
|
500
|
+
]
|
|
501
|
+
await this._tokens.batch(batch)
|
|
502
|
+
return { valid: true }
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
if (op === 'mint') {
|
|
506
|
+
const deploy = await this._safeGet(this._tokens, `tick!${tickNorm}`)
|
|
507
|
+
if (!deploy) return { valid: false, reason: 'token not deployed' }
|
|
508
|
+
|
|
509
|
+
const mintAmt = BigInt(amt || '0')
|
|
510
|
+
if (mintAmt <= 0n) return { valid: false, reason: 'invalid amount' }
|
|
511
|
+
if (deploy.lim !== '0' && mintAmt > BigInt(deploy.lim)) return { valid: false, reason: 'exceeds mint limit' }
|
|
512
|
+
|
|
513
|
+
const newTotal = BigInt(deploy.totalMinted) + mintAmt
|
|
514
|
+
if (deploy.max !== '0' && newTotal > BigInt(deploy.max)) return { valid: false, reason: 'exceeds max supply' }
|
|
515
|
+
|
|
516
|
+
// Credit owner balance
|
|
517
|
+
const balKey = `bal!${tickNorm}!owner!${ownerScriptHash}`
|
|
518
|
+
const existing = await this._safeGet(this._tokens, balKey) || { confirmed: '0' }
|
|
519
|
+
const newBal = (BigInt(existing.confirmed) + mintAmt).toString()
|
|
520
|
+
|
|
521
|
+
const batch = [
|
|
522
|
+
{ type: 'put', key: `tick!${tickNorm}`, value: { ...deploy, totalMinted: newTotal.toString() } },
|
|
523
|
+
{ type: 'put', key: balKey, value: { confirmed: newBal, updatedAt: Date.now() } },
|
|
524
|
+
{ type: 'put', key: `op!${String(height).padStart(10, '0')}!${txid}!mint`, value: {
|
|
525
|
+
tick: tickNorm, op: 'mint', amt: amt, ownerScriptHash, valid: true
|
|
526
|
+
}}
|
|
527
|
+
]
|
|
528
|
+
await this._tokens.batch(batch)
|
|
529
|
+
return { valid: true }
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
// Transfers deferred to Phase 2
|
|
533
|
+
return { valid: false, reason: 'transfers not yet supported' }
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
/**
|
|
537
|
+
* Get token deploy info.
|
|
538
|
+
* @param {string} tick
|
|
539
|
+
* @returns {Promise<object|null>}
|
|
540
|
+
*/
|
|
541
|
+
async getToken (tick) {
|
|
542
|
+
return this._safeGet(this._tokens, `tick!${tick.toLowerCase().trim()}`)
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
/**
|
|
546
|
+
* Get token balance for an owner.
|
|
547
|
+
* @param {string} tick
|
|
548
|
+
* @param {string} ownerScriptHash
|
|
549
|
+
* @returns {Promise<string>} balance as string
|
|
550
|
+
*/
|
|
551
|
+
async getTokenBalance (tick, ownerScriptHash) {
|
|
552
|
+
const record = await this._safeGet(this._tokens, `bal!${tick.toLowerCase().trim()}!owner!${ownerScriptHash}`)
|
|
553
|
+
return record ? record.confirmed : '0'
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
/**
|
|
557
|
+
* List all deployed tokens.
|
|
558
|
+
* @returns {Promise<Array>}
|
|
559
|
+
*/
|
|
560
|
+
async listTokens () {
|
|
561
|
+
const tokens = []
|
|
562
|
+
const prefix = 'tick!'
|
|
563
|
+
for await (const [key, value] of this._tokens.iterator({ gte: prefix, lt: prefix + '~' })) {
|
|
564
|
+
tokens.push(value)
|
|
565
|
+
}
|
|
566
|
+
return tokens
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
/** Safe get — returns null instead of throwing for missing keys. */
|
|
570
|
+
async _safeGet (sublevel, key) {
|
|
571
|
+
try {
|
|
572
|
+
const val = await sublevel.get(key)
|
|
573
|
+
return val !== undefined ? val : null
|
|
574
|
+
} catch { return null }
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
// ── Inscriptions ─────────────────────────────────────────
|
|
578
|
+
|
|
579
|
+
/**
|
|
580
|
+
* Store an inscription record with secondary indexes.
|
|
581
|
+
* @param {{ txid: string, vout: number, contentType: string, contentSize: number, isBsv20: boolean, bsv20: object|null, timestamp: number, address: string|null }} record
|
|
582
|
+
*/
|
|
583
|
+
async putInscription (record) {
|
|
584
|
+
const key = `${record.txid}:${record.vout}`
|
|
585
|
+
const suffix = `${record.txid}:${record.vout}`
|
|
586
|
+
|
|
587
|
+
// Purge ALL stale secondary index entries pointing to this key
|
|
588
|
+
try {
|
|
589
|
+
const delBatch = []
|
|
590
|
+
for await (const [idxKey, val] of this._inscriptionIdx.iterator()) {
|
|
591
|
+
if (val === key && idxKey.endsWith(suffix)) delBatch.push({ type: 'del', key: idxKey })
|
|
592
|
+
}
|
|
593
|
+
if (delBatch.length) await this._inscriptionIdx.batch(delBatch)
|
|
594
|
+
} catch {}
|
|
595
|
+
|
|
596
|
+
// Route content through CAS
|
|
597
|
+
if (record.content) {
|
|
598
|
+
try {
|
|
599
|
+
const cas = await this.putContent(record.content, record.contentType)
|
|
600
|
+
record.contentHash = cas.contentHash
|
|
601
|
+
record.contentLen = cas.contentLen
|
|
602
|
+
// Strip raw content from inscription record if large (stored on filesystem)
|
|
603
|
+
if (!cas.inline) {
|
|
604
|
+
delete record.content
|
|
605
|
+
}
|
|
606
|
+
} catch {}
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
await this._inscriptions.put(key, record)
|
|
610
|
+
|
|
611
|
+
const ts = String(record.timestamp).padStart(15, '0')
|
|
612
|
+
const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
|
|
613
|
+
if (record.contentType) {
|
|
614
|
+
batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
|
|
615
|
+
}
|
|
616
|
+
if (record.address) {
|
|
617
|
+
batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
|
|
618
|
+
}
|
|
619
|
+
await this._inscriptionIdx.batch(batch)
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
/**
|
|
623
|
+
* Query inscriptions with optional filters.
|
|
624
|
+
* @param {{ mime?: string, address?: string, limit?: number }} opts
|
|
625
|
+
* @returns {Promise<Array>}
|
|
626
|
+
*/
|
|
627
|
+
async getInscriptions ({ mime, address, limit = 50 } = {}) {
|
|
628
|
+
const results = []
|
|
629
|
+
let prefix
|
|
630
|
+
if (address) {
|
|
631
|
+
prefix = `addr:${address}:`
|
|
632
|
+
} else if (mime) {
|
|
633
|
+
prefix = `mime:${mime}:`
|
|
634
|
+
} else {
|
|
635
|
+
prefix = 'time:'
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
for await (const [, primaryKey] of this._inscriptionIdx.iterator({
|
|
639
|
+
gte: prefix, lt: prefix + '~', reverse: true, limit
|
|
640
|
+
})) {
|
|
641
|
+
try {
|
|
642
|
+
const record = await this._inscriptions.get(primaryKey)
|
|
643
|
+
if (record) {
|
|
644
|
+
// Strip content from list results (can be 400KB+ per image)
|
|
645
|
+
const { content, ...meta } = record
|
|
646
|
+
results.push(meta)
|
|
647
|
+
}
|
|
648
|
+
} catch {}
|
|
649
|
+
}
|
|
650
|
+
return results
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
/**
|
|
654
|
+
* Rebuild inscription secondary indexes from primary records.
|
|
655
|
+
* Clears all index entries and re-creates from source of truth.
|
|
656
|
+
* @returns {Promise<number>} count of inscriptions re-indexed
|
|
657
|
+
*/
|
|
658
|
+
async rebuildInscriptionIndex () {
|
|
659
|
+
// Clear entire index
|
|
660
|
+
for await (const [key] of this._inscriptionIdx.iterator()) {
|
|
661
|
+
await this._inscriptionIdx.del(key)
|
|
662
|
+
}
|
|
663
|
+
// Re-create from primary records
|
|
664
|
+
let count = 0
|
|
665
|
+
for await (const [, record] of this._inscriptions.iterator()) {
|
|
666
|
+
const ts = String(record.timestamp).padStart(15, '0')
|
|
667
|
+
const suffix = `${record.txid}:${record.vout}`
|
|
668
|
+
const key = suffix
|
|
669
|
+
const batch = [{ type: 'put', key: `time:${ts}:${suffix}`, value: key }]
|
|
670
|
+
if (record.contentType) batch.push({ type: 'put', key: `mime:${record.contentType}:${ts}:${suffix}`, value: key })
|
|
671
|
+
if (record.address) batch.push({ type: 'put', key: `addr:${record.address}:${ts}:${suffix}`, value: key })
|
|
672
|
+
await this._inscriptionIdx.batch(batch)
|
|
673
|
+
count++
|
|
674
|
+
}
|
|
675
|
+
return count
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
/**
|
|
679
|
+
* Get a single inscription record (with content) by txid:vout.
|
|
680
|
+
* @param {string} txid
|
|
681
|
+
* @param {number} vout
|
|
682
|
+
* @returns {Promise<object|null>}
|
|
683
|
+
*/
|
|
684
|
+
async getInscription (txid, vout) {
|
|
685
|
+
try {
|
|
686
|
+
return await this._inscriptions.get(`${txid}:${vout}`)
|
|
687
|
+
} catch {
|
|
688
|
+
return null
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
/**
|
|
693
|
+
* Get total inscription count.
|
|
694
|
+
* @returns {Promise<number>}
|
|
695
|
+
*/
|
|
696
|
+
async getInscriptionCount () {
|
|
697
|
+
let count = 0
|
|
698
|
+
for await (const _ of this._inscriptions.keys()) count++
|
|
699
|
+
return count
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
/** Double SHA-256 (Bitcoin standard) */
|
|
704
|
+
function doubleSha256 (data) {
|
|
705
|
+
return createHash('sha256').update(
|
|
706
|
+
createHash('sha256').update(data).digest()
|
|
707
|
+
).digest()
|
|
708
|
+
}
|