@1sat/wallet-toolbox 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/OneSatWallet.d.ts +46 -17
- package/dist/OneSatWallet.js +956 -0
- package/dist/errors.js +11 -0
- package/dist/index.d.ts +0 -2
- package/dist/index.js +12 -93707
- package/dist/indexers/Bsv21Indexer.js +232 -0
- package/dist/indexers/CosignIndexer.js +25 -0
- package/dist/indexers/FundIndexer.js +64 -0
- package/dist/indexers/InscriptionIndexer.js +115 -0
- package/dist/indexers/LockIndexer.js +42 -0
- package/dist/indexers/MapIndexer.js +62 -0
- package/dist/indexers/OpNSIndexer.js +38 -0
- package/dist/indexers/OrdLockIndexer.js +63 -0
- package/dist/indexers/OriginIndexer.js +240 -0
- package/dist/indexers/Outpoint.js +53 -0
- package/dist/indexers/SigmaIndexer.js +133 -0
- package/dist/indexers/TransactionParser.d.ts +53 -0
- package/dist/indexers/index.js +13 -0
- package/dist/indexers/parseAddress.js +24 -0
- package/dist/indexers/types.js +18 -0
- package/dist/services/OneSatServices.d.ts +12 -4
- package/dist/services/OneSatServices.js +231 -0
- package/dist/services/client/ArcadeClient.js +107 -0
- package/dist/services/client/BaseClient.js +125 -0
- package/dist/services/client/BeefClient.js +33 -0
- package/dist/services/client/Bsv21Client.js +65 -0
- package/dist/services/client/ChaintracksClient.js +175 -0
- package/dist/services/client/OrdfsClient.js +122 -0
- package/dist/services/client/OwnerClient.js +123 -0
- package/dist/services/client/TxoClient.js +85 -0
- package/dist/services/client/index.js +8 -0
- package/dist/services/types.js +5 -0
- package/dist/signers/ReadOnlySigner.js +47 -0
- package/dist/sync/IndexedDbSyncQueue.js +355 -0
- package/dist/sync/SqliteSyncQueue.js +197 -0
- package/dist/sync/index.js +3 -0
- package/dist/sync/types.js +4 -0
- package/package.json +5 -5
|
@@ -0,0 +1,956 @@
|
|
|
1
|
+
import { Beef, Hash, Random, Transaction, Utils, } from "@bsv/sdk";
|
|
2
|
+
import { Bsv21Indexer } from "./indexers/Bsv21Indexer";
|
|
3
|
+
import { CosignIndexer } from "./indexers/CosignIndexer";
|
|
4
|
+
import { FundIndexer } from "./indexers/FundIndexer";
|
|
5
|
+
import { InscriptionIndexer } from "./indexers/InscriptionIndexer";
|
|
6
|
+
import { LockIndexer } from "./indexers/LockIndexer";
|
|
7
|
+
import { MapIndexer } from "./indexers/MapIndexer";
|
|
8
|
+
import { OpNSIndexer } from "./indexers/OpNSIndexer";
|
|
9
|
+
import { OrdLockIndexer } from "./indexers/OrdLockIndexer";
|
|
10
|
+
import { OriginIndexer } from "./indexers/OriginIndexer";
|
|
11
|
+
import { Outpoint } from "./indexers/Outpoint";
|
|
12
|
+
import { SigmaIndexer } from "./indexers/SigmaIndexer";
|
|
13
|
+
import { OneSatServices } from "./services/OneSatServices";
|
|
14
|
+
/** Number of blocks to wait before considering a score "safe" from reorgs */
|
|
15
|
+
const REORG_SAFE_DEPTH = 6;
|
|
16
|
+
/** Default batch size for queue processing */
|
|
17
|
+
const DEFAULT_BATCH_SIZE = 20;
|
|
18
|
+
/**
|
|
19
|
+
* OneSatWallet wraps a BRC-100 Wallet with 1Sat-specific indexing and services.
|
|
20
|
+
*
|
|
21
|
+
* The consumer is responsible for constructing the underlying Wallet with
|
|
22
|
+
* the appropriate storage and key derivation for their environment.
|
|
23
|
+
*/
|
|
24
|
+
export class OneSatWallet {
|
|
25
|
+
wallet;
|
|
26
|
+
storage;
|
|
27
|
+
indexers;
|
|
28
|
+
services;
|
|
29
|
+
owners;
|
|
30
|
+
listeners = {};
|
|
31
|
+
// Queue-based sync
|
|
32
|
+
syncQueue = null;
|
|
33
|
+
syncBatchSize = DEFAULT_BATCH_SIZE;
|
|
34
|
+
syncRunning = false;
|
|
35
|
+
syncStopRequested = false;
|
|
36
|
+
activeQueueSync = null;
|
|
37
|
+
// Separate stream/processor state for testing
|
|
38
|
+
sseStreamActive = false;
|
|
39
|
+
sseUnsubscribe = null;
|
|
40
|
+
processorActive = false;
|
|
41
|
+
processorStopRequested = false;
|
|
42
|
+
streamDone = false;
|
|
43
|
+
constructor(args) {
|
|
44
|
+
this.wallet = args.wallet;
|
|
45
|
+
this.storage = args.storage;
|
|
46
|
+
const services = new OneSatServices(args.chain, args.onesatUrl, args.storage);
|
|
47
|
+
const network = args.chain === "main" ? "mainnet" : "testnet";
|
|
48
|
+
const owners = args.owners || new Set();
|
|
49
|
+
this.services = services;
|
|
50
|
+
this.owners = owners;
|
|
51
|
+
// Use provided indexers or create defaults
|
|
52
|
+
this.indexers = args.indexers ?? [
|
|
53
|
+
new FundIndexer(owners, network),
|
|
54
|
+
new LockIndexer(owners, network),
|
|
55
|
+
new InscriptionIndexer(owners, network),
|
|
56
|
+
new SigmaIndexer(owners, network),
|
|
57
|
+
new MapIndexer(owners, network),
|
|
58
|
+
new OriginIndexer(owners, network, services),
|
|
59
|
+
new Bsv21Indexer(owners, network, services),
|
|
60
|
+
new OrdLockIndexer(owners, network),
|
|
61
|
+
new OpNSIndexer(owners, network),
|
|
62
|
+
new CosignIndexer(owners, network),
|
|
63
|
+
];
|
|
64
|
+
// Queue-based sync settings
|
|
65
|
+
this.syncQueue = args.syncQueue ?? null;
|
|
66
|
+
this.syncBatchSize = args.syncBatchSize ?? DEFAULT_BATCH_SIZE;
|
|
67
|
+
if (args.autoSync) {
|
|
68
|
+
this.sync();
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// ===== WalletInterface Delegation =====
|
|
72
|
+
getPublicKey = (args, originator) => this.wallet.getPublicKey(args, originator);
|
|
73
|
+
revealCounterpartyKeyLinkage = (args, originator) => this.wallet.revealCounterpartyKeyLinkage(args, originator);
|
|
74
|
+
revealSpecificKeyLinkage = (args, originator) => this.wallet.revealSpecificKeyLinkage(args, originator);
|
|
75
|
+
encrypt = (args, originator) => this.wallet.encrypt(args, originator);
|
|
76
|
+
decrypt = (args, originator) => this.wallet.decrypt(args, originator);
|
|
77
|
+
createHmac = (args, originator) => this.wallet.createHmac(args, originator);
|
|
78
|
+
verifyHmac = (args, originator) => this.wallet.verifyHmac(args, originator);
|
|
79
|
+
createSignature = (args, originator) => this.wallet.createSignature(args, originator);
|
|
80
|
+
verifySignature = (args, originator) => this.wallet.verifySignature(args, originator);
|
|
81
|
+
createAction = (args, originator) => this.wallet.createAction(args, originator);
|
|
82
|
+
signAction = (args, originator) => this.wallet.signAction(args, originator);
|
|
83
|
+
abortAction = (args, originator) => this.wallet.abortAction(args, originator);
|
|
84
|
+
listActions = (args, originator) => this.wallet.listActions(args, originator);
|
|
85
|
+
internalizeAction = (args, originator) => this.wallet.internalizeAction(args, originator);
|
|
86
|
+
listOutputs = (args, originator) => this.wallet.listOutputs(args, originator);
|
|
87
|
+
relinquishOutput = (args, originator) => this.wallet.relinquishOutput(args, originator);
|
|
88
|
+
acquireCertificate = (args, originator) => this.wallet.acquireCertificate(args, originator);
|
|
89
|
+
listCertificates = (args, originator) => this.wallet.listCertificates(args, originator);
|
|
90
|
+
proveCertificate = (args, originator) => this.wallet.proveCertificate(args, originator);
|
|
91
|
+
relinquishCertificate = (args, originator) => this.wallet.relinquishCertificate(args, originator);
|
|
92
|
+
discoverByIdentityKey = (args, originator) => this.wallet.discoverByIdentityKey(args, originator);
|
|
93
|
+
discoverByAttributes = (args, originator) => this.wallet.discoverByAttributes(args, originator);
|
|
94
|
+
isAuthenticated = (args, originator) => this.wallet.isAuthenticated(args, originator);
|
|
95
|
+
waitForAuthentication = (args, originator) => this.wallet.waitForAuthentication(args, originator);
|
|
96
|
+
getHeight = (args, originator) => this.wallet.getHeight(args, originator);
|
|
97
|
+
getHeaderForHeight = (args, originator) => this.wallet.getHeaderForHeight(args, originator);
|
|
98
|
+
getNetwork = (args, originator) => this.wallet.getNetwork(args, originator);
|
|
99
|
+
getVersion = (args, originator) => this.wallet.getVersion(args, originator);
|
|
100
|
+
// ===== Event Emitter =====
|
|
101
|
+
/**
|
|
102
|
+
* Subscribe to wallet events
|
|
103
|
+
*/
|
|
104
|
+
on(event, callback) {
|
|
105
|
+
if (!this.listeners[event]) {
|
|
106
|
+
this.listeners[event] = new Set();
|
|
107
|
+
}
|
|
108
|
+
this.listeners[event].add(callback);
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Unsubscribe from wallet events
|
|
112
|
+
*/
|
|
113
|
+
off(event, callback) {
|
|
114
|
+
this.listeners[event]?.delete(callback);
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Emit a wallet event
|
|
118
|
+
*/
|
|
119
|
+
emit(event, data) {
|
|
120
|
+
const callbacks = this.listeners[event];
|
|
121
|
+
if (callbacks) {
|
|
122
|
+
for (const cb of callbacks) {
|
|
123
|
+
cb(data);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Add an address to the set of owned addresses.
|
|
129
|
+
* Outputs to these addresses will be indexed.
|
|
130
|
+
*/
|
|
131
|
+
addOwner(address) {
|
|
132
|
+
this.owners.add(address);
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Parse a transaction through indexers without internalizing.
|
|
136
|
+
*
|
|
137
|
+
* This is useful for debugging/testing to see what the indexers produce
|
|
138
|
+
* without actually storing the transaction in the wallet.
|
|
139
|
+
*
|
|
140
|
+
* @param tx - Transaction or txid to parse
|
|
141
|
+
* @param isBroadcasted - Whether this transaction has been broadcast
|
|
142
|
+
* @returns ParseContext with all indexer data
|
|
143
|
+
*/
|
|
144
|
+
async parseTransaction(txOrTxid, isBroadcasted = true) {
|
|
145
|
+
// Load transaction if needed
|
|
146
|
+
const tx = typeof txOrTxid === "string"
|
|
147
|
+
? await this.loadTransaction(txOrTxid)
|
|
148
|
+
: txOrTxid;
|
|
149
|
+
// Hydrate source transactions for inputs
|
|
150
|
+
await this.hydrateSourceTransactions(tx);
|
|
151
|
+
// Build context
|
|
152
|
+
const ctx = this.buildParseContext(tx);
|
|
153
|
+
// Parse all inputs (build ctx.spends)
|
|
154
|
+
await this.parseInputs(ctx);
|
|
155
|
+
// Run parse on each output with each indexer
|
|
156
|
+
for (const txo of ctx.txos) {
|
|
157
|
+
await this.runIndexersOnTxo(txo);
|
|
158
|
+
}
|
|
159
|
+
// Run summarize on each indexer
|
|
160
|
+
for (const indexer of this.indexers) {
|
|
161
|
+
const summary = await indexer.summarize(ctx, isBroadcasted);
|
|
162
|
+
if (summary) {
|
|
163
|
+
ctx.summary[indexer.tag] = summary;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return ctx;
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Parse a single output without full transaction context.
|
|
170
|
+
* Runs all indexers' parse() methods but NOT summarize().
|
|
171
|
+
*
|
|
172
|
+
* @param output - The TransactionOutput to parse
|
|
173
|
+
* @param outpoint - The outpoint identifying this output
|
|
174
|
+
* @returns Txo with all indexer data populated
|
|
175
|
+
*/
|
|
176
|
+
async parseOutput(output, outpoint) {
|
|
177
|
+
const txo = {
|
|
178
|
+
output,
|
|
179
|
+
outpoint,
|
|
180
|
+
data: {},
|
|
181
|
+
};
|
|
182
|
+
await this.runIndexersOnTxo(txo);
|
|
183
|
+
return txo;
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Load and parse a single output by outpoint.
|
|
187
|
+
* Loads the transaction, extracts the output, and runs indexers on it.
|
|
188
|
+
*
|
|
189
|
+
* @param outpoint - Outpoint string (txid_vout)
|
|
190
|
+
* @returns Txo with all indexer data populated
|
|
191
|
+
*/
|
|
192
|
+
async loadTxo(outpoint) {
|
|
193
|
+
const op = new Outpoint(outpoint);
|
|
194
|
+
const tx = await this.loadTransaction(op.txid);
|
|
195
|
+
const output = tx.outputs[op.vout];
|
|
196
|
+
if (!output) {
|
|
197
|
+
throw new Error(`Output ${op.vout} not found in transaction ${op.txid}`);
|
|
198
|
+
}
|
|
199
|
+
return this.parseOutput(output, op);
|
|
200
|
+
}
|
|
201
|
+
/**
|
|
202
|
+
* Run all indexers on a single Txo and populate its data/owner/basket
|
|
203
|
+
*/
|
|
204
|
+
async runIndexersOnTxo(txo) {
|
|
205
|
+
for (const indexer of this.indexers) {
|
|
206
|
+
const result = await indexer.parse(txo);
|
|
207
|
+
if (result) {
|
|
208
|
+
txo.data[indexer.tag] = {
|
|
209
|
+
data: result.data,
|
|
210
|
+
tags: result.tags,
|
|
211
|
+
content: result.content,
|
|
212
|
+
};
|
|
213
|
+
if (result.owner) {
|
|
214
|
+
txo.owner = result.owner;
|
|
215
|
+
}
|
|
216
|
+
if (result.basket) {
|
|
217
|
+
txo.basket = result.basket;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* Parse all inputs - run indexers on source outputs to populate ctx.spends
|
|
224
|
+
*/
|
|
225
|
+
async parseInputs(ctx) {
|
|
226
|
+
for (const input of ctx.tx.inputs) {
|
|
227
|
+
if (!input.sourceTransaction)
|
|
228
|
+
continue;
|
|
229
|
+
const sourceOutput = input.sourceTransaction.outputs[input.sourceOutputIndex];
|
|
230
|
+
if (!sourceOutput)
|
|
231
|
+
continue;
|
|
232
|
+
const sourceTxid = input.sourceTransaction.id("hex");
|
|
233
|
+
const sourceVout = input.sourceOutputIndex;
|
|
234
|
+
// Create Txo for the spent output
|
|
235
|
+
const spendTxo = {
|
|
236
|
+
output: sourceOutput,
|
|
237
|
+
outpoint: new Outpoint(sourceTxid, sourceVout),
|
|
238
|
+
data: {},
|
|
239
|
+
};
|
|
240
|
+
// Run all indexers on the spent output
|
|
241
|
+
await this.runIndexersOnTxo(spendTxo);
|
|
242
|
+
// Add to spends
|
|
243
|
+
ctx.spends.push(spendTxo);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Load a transaction by txid.
|
|
248
|
+
* Checks storage first, falls back to beef service.
|
|
249
|
+
*
|
|
250
|
+
* @param txid - Transaction ID to load
|
|
251
|
+
* @returns Transaction (without source transactions hydrated)
|
|
252
|
+
*/
|
|
253
|
+
async loadTransaction(txid) {
|
|
254
|
+
// Check storage first
|
|
255
|
+
const userId = await this.storage.getUserId();
|
|
256
|
+
const existingTx = await this.storage.runAsStorageProvider(async (sp) => {
|
|
257
|
+
const txs = await sp.findTransactions({ partial: { userId, txid } });
|
|
258
|
+
return txs.length > 0 ? txs[0] : null;
|
|
259
|
+
});
|
|
260
|
+
if (existingTx?.rawTx) {
|
|
261
|
+
return Transaction.fromBinary(existingTx.rawTx);
|
|
262
|
+
}
|
|
263
|
+
// Fall back to network
|
|
264
|
+
const beefBytes = await this.services.beef.getBeef(txid);
|
|
265
|
+
const tx = Transaction.fromBEEF(Array.from(beefBytes));
|
|
266
|
+
return tx;
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Load and attach source transactions for all inputs (1 level deep).
|
|
270
|
+
* Modifies the transaction in place.
|
|
271
|
+
*/
|
|
272
|
+
async hydrateSourceTransactions(tx) {
|
|
273
|
+
const loaded = new Map();
|
|
274
|
+
for (const input of tx.inputs) {
|
|
275
|
+
if (!input.sourceTransaction && input.sourceTXID) {
|
|
276
|
+
if (!loaded.has(input.sourceTXID)) {
|
|
277
|
+
loaded.set(input.sourceTXID, await this.loadTransaction(input.sourceTXID));
|
|
278
|
+
}
|
|
279
|
+
input.sourceTransaction = loaded.get(input.sourceTXID);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Build minimal parse context from transaction
|
|
285
|
+
*/
|
|
286
|
+
buildParseContext(tx) {
|
|
287
|
+
const txid = tx.id("hex");
|
|
288
|
+
return {
|
|
289
|
+
tx,
|
|
290
|
+
txid,
|
|
291
|
+
txos: tx.outputs.map((output, vout) => ({
|
|
292
|
+
output,
|
|
293
|
+
outpoint: new Outpoint(txid, vout),
|
|
294
|
+
data: {},
|
|
295
|
+
})),
|
|
296
|
+
spends: [],
|
|
297
|
+
summary: {},
|
|
298
|
+
indexers: this.indexers,
|
|
299
|
+
};
|
|
300
|
+
}
|
|
301
|
+
/**
|
|
302
|
+
* Calculate the byte offset and length of each output's locking script
|
|
303
|
+
* within the raw transaction binary. This is needed for wallet-toolbox's
|
|
304
|
+
* listOutputs to extract locking scripts on demand.
|
|
305
|
+
*/
|
|
306
|
+
calculateScriptOffsets(tx) {
|
|
307
|
+
const rawTx = tx.toBinary();
|
|
308
|
+
const reader = new Utils.Reader(rawTx);
|
|
309
|
+
// Skip version (4 bytes)
|
|
310
|
+
reader.pos = 4;
|
|
311
|
+
// Read input count (varint)
|
|
312
|
+
const inputCount = reader.readVarIntNum();
|
|
313
|
+
// Skip all inputs
|
|
314
|
+
for (let i = 0; i < inputCount; i++) {
|
|
315
|
+
reader.pos += 32; // txid
|
|
316
|
+
reader.pos += 4; // vout
|
|
317
|
+
const scriptLen = reader.readVarIntNum();
|
|
318
|
+
reader.pos += scriptLen; // unlocking script
|
|
319
|
+
reader.pos += 4; // sequence
|
|
320
|
+
}
|
|
321
|
+
// Read output count (varint)
|
|
322
|
+
const outputCount = reader.readVarIntNum();
|
|
323
|
+
// Calculate offset for each output's locking script
|
|
324
|
+
const offsets = [];
|
|
325
|
+
for (let i = 0; i < outputCount; i++) {
|
|
326
|
+
reader.pos += 8; // satoshis (8 bytes)
|
|
327
|
+
const scriptLen = reader.readVarIntNum();
|
|
328
|
+
const scriptOffset = reader.pos;
|
|
329
|
+
reader.pos += scriptLen; // locking script
|
|
330
|
+
offsets.push({ offset: scriptOffset, length: scriptLen });
|
|
331
|
+
}
|
|
332
|
+
return offsets;
|
|
333
|
+
}
|
|
334
|
+
/**
|
|
335
|
+
* Ingest a transaction by running it through indexers and writing directly to storage.
|
|
336
|
+
*
|
|
337
|
+
* This is the main entry point for adding external transactions to the wallet.
|
|
338
|
+
* The indexers extract basket, tags, and custom instructions which are then
|
|
339
|
+
* written directly to the wallet's storage.
|
|
340
|
+
*
|
|
341
|
+
* Unlike internalizeAction, this method also marks any wallet outputs that are
|
|
342
|
+
* consumed as inputs in the transaction as spent (spentBy, spendable: false).
|
|
343
|
+
*
|
|
344
|
+
* @param tx - Transaction to ingest
|
|
345
|
+
* @param description - Human-readable description
|
|
346
|
+
* @param labels - Optional labels for the transaction
|
|
347
|
+
* @param isBroadcasted - Whether this transaction has been broadcast (affects validation)
|
|
348
|
+
* @returns Result including parse details for all outputs
|
|
349
|
+
*/
|
|
350
|
+
async ingestTransaction(tx, description, labels, isBroadcasted = true) {
|
|
351
|
+
// Run through indexers (parseTransaction handles loading source txs)
|
|
352
|
+
const ctx = await this.parseTransaction(tx, isBroadcasted);
|
|
353
|
+
const txid = tx.id("hex");
|
|
354
|
+
// Calculate script offsets for all outputs (needed for listOutputs with locking scripts)
|
|
355
|
+
const scriptOffsets = this.calculateScriptOffsets(tx);
|
|
356
|
+
// Collect owned outputs
|
|
357
|
+
const ownedTxos = ctx.txos.filter((txo) => txo.owner && this.owners.has(txo.owner));
|
|
358
|
+
// Get userId from storage manager
|
|
359
|
+
const userId = await this.storage.getUserId();
|
|
360
|
+
// Pre-fetch header for merkle proof BEFORE starting the IDB transaction
|
|
361
|
+
// This avoids IDB auto-commit issue caused by network calls inside transaction
|
|
362
|
+
let provenTxBlockHash;
|
|
363
|
+
let provenTxMerkleRoot;
|
|
364
|
+
if (tx.merklePath) {
|
|
365
|
+
const mp = tx.merklePath;
|
|
366
|
+
const header = await this.services.getHeaderForHeight(mp.blockHeight);
|
|
367
|
+
provenTxBlockHash = Utils.toHex(Hash.hash256(header).reverse());
|
|
368
|
+
provenTxMerkleRoot = mp.computeRoot();
|
|
369
|
+
}
|
|
370
|
+
// Write directly to storage within a transaction
|
|
371
|
+
const internalizedCount = await this.storage.runAsStorageProvider(async (sp) => {
|
|
372
|
+
return await sp.transaction(async (trx) => {
|
|
373
|
+
// Check if transaction already exists
|
|
374
|
+
const existingTxs = await sp.findTransactions({
|
|
375
|
+
partial: { userId, txid },
|
|
376
|
+
trx,
|
|
377
|
+
});
|
|
378
|
+
let transactionId;
|
|
379
|
+
let isNewTransaction = false;
|
|
380
|
+
if (existingTxs.length > 0) {
|
|
381
|
+
// Transaction already exists, use its ID
|
|
382
|
+
transactionId = existingTxs[0].transactionId;
|
|
383
|
+
}
|
|
384
|
+
else {
|
|
385
|
+
// Determine if this is an outgoing transaction (we're spending our own outputs)
|
|
386
|
+
// by checking if any inputs spend our outputs
|
|
387
|
+
let isOutgoing = false;
|
|
388
|
+
let satoshisSpent = 0;
|
|
389
|
+
for (const input of tx.inputs) {
|
|
390
|
+
// Get source txid from either sourceTXID or sourceTransaction
|
|
391
|
+
const sourceTxid = input.sourceTXID || input.sourceTransaction?.id("hex");
|
|
392
|
+
if (sourceTxid) {
|
|
393
|
+
const spentOutputs = await sp.findOutputs({
|
|
394
|
+
partial: {
|
|
395
|
+
userId,
|
|
396
|
+
txid: sourceTxid,
|
|
397
|
+
vout: input.sourceOutputIndex,
|
|
398
|
+
},
|
|
399
|
+
trx,
|
|
400
|
+
});
|
|
401
|
+
if (spentOutputs.length > 0) {
|
|
402
|
+
isOutgoing = true;
|
|
403
|
+
satoshisSpent += spentOutputs[0].satoshis;
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
// Calculate satoshis received
|
|
408
|
+
const satoshisReceived = ownedTxos.reduce((sum, txo) => sum + (txo.output.satoshis || 0), 0);
|
|
409
|
+
// Net satoshis: positive if receiving, negative if spending
|
|
410
|
+
const satoshis = satoshisReceived - satoshisSpent;
|
|
411
|
+
// Create transaction record
|
|
412
|
+
const now = new Date();
|
|
413
|
+
const reference = Utils.toBase64(Random(12));
|
|
414
|
+
const newTx = {
|
|
415
|
+
created_at: now,
|
|
416
|
+
updated_at: now,
|
|
417
|
+
transactionId: 0,
|
|
418
|
+
userId,
|
|
419
|
+
status: isBroadcasted
|
|
420
|
+
? "completed"
|
|
421
|
+
: "unproven",
|
|
422
|
+
reference,
|
|
423
|
+
isOutgoing,
|
|
424
|
+
satoshis,
|
|
425
|
+
description,
|
|
426
|
+
version: tx.version,
|
|
427
|
+
lockTime: tx.lockTime,
|
|
428
|
+
txid,
|
|
429
|
+
rawTx: Array.from(tx.toBinary()),
|
|
430
|
+
};
|
|
431
|
+
transactionId = await sp.insertTransaction(newTx, trx);
|
|
432
|
+
isNewTransaction = true;
|
|
433
|
+
// Store in ProvenTx or ProvenTxReq for listOutputs compatibility
|
|
434
|
+
// This enables include: 'entire transactions' and 'locking scripts'
|
|
435
|
+
if (tx.merklePath && provenTxBlockHash && provenTxMerkleRoot) {
|
|
436
|
+
// Transaction has merkle proof - store as ProvenTx
|
|
437
|
+
const existingProven = await sp.findProvenTxs({
|
|
438
|
+
partial: { txid },
|
|
439
|
+
trx,
|
|
440
|
+
});
|
|
441
|
+
if (existingProven.length === 0) {
|
|
442
|
+
const mp = tx.merklePath;
|
|
443
|
+
const index = mp.path[0].find((l) => l.hash === txid)?.offset ?? 0;
|
|
444
|
+
const provenNow = new Date();
|
|
445
|
+
await sp.insertProvenTx({
|
|
446
|
+
created_at: provenNow,
|
|
447
|
+
updated_at: provenNow,
|
|
448
|
+
provenTxId: 0,
|
|
449
|
+
txid,
|
|
450
|
+
height: mp.blockHeight,
|
|
451
|
+
index,
|
|
452
|
+
merklePath: mp.toBinary(),
|
|
453
|
+
rawTx: Array.from(tx.toBinary()),
|
|
454
|
+
blockHash: provenTxBlockHash,
|
|
455
|
+
merkleRoot: provenTxMerkleRoot,
|
|
456
|
+
}, trx);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
else if (!tx.merklePath) {
|
|
460
|
+
// No merkle proof - store as ProvenTxReq (pending confirmation)
|
|
461
|
+
const existingReq = await sp.findProvenTxReqs({
|
|
462
|
+
partial: { txid },
|
|
463
|
+
trx,
|
|
464
|
+
});
|
|
465
|
+
if (existingReq.length === 0) {
|
|
466
|
+
// Build inputBEEF from source transactions
|
|
467
|
+
const inputBeef = new Beef();
|
|
468
|
+
inputBeef.mergeTransaction(tx);
|
|
469
|
+
const reqNow = new Date();
|
|
470
|
+
await sp.insertProvenTxReq({
|
|
471
|
+
created_at: reqNow,
|
|
472
|
+
updated_at: reqNow,
|
|
473
|
+
provenTxReqId: 0,
|
|
474
|
+
status: isBroadcasted ? "unmined" : "unsent",
|
|
475
|
+
attempts: 0,
|
|
476
|
+
notified: false,
|
|
477
|
+
txid,
|
|
478
|
+
history: "[]",
|
|
479
|
+
notify: "{}",
|
|
480
|
+
rawTx: Array.from(tx.toBinary()),
|
|
481
|
+
inputBEEF: inputBeef.toBinary(),
|
|
482
|
+
}, trx);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
// Add labels
|
|
486
|
+
for (const label of labels || []) {
|
|
487
|
+
const txLabel = await sp.findOrInsertTxLabel(userId, label, trx);
|
|
488
|
+
if (txLabel.txLabelId) {
|
|
489
|
+
await sp.findOrInsertTxLabelMap(transactionId, txLabel.txLabelId, trx);
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
// Mark inputs as spent (only for new transactions)
|
|
494
|
+
if (isNewTransaction) {
|
|
495
|
+
for (const input of tx.inputs) {
|
|
496
|
+
// Get source txid from either sourceTXID or sourceTransaction
|
|
497
|
+
const sourceTxid = input.sourceTXID || input.sourceTransaction?.id("hex");
|
|
498
|
+
if (sourceTxid) {
|
|
499
|
+
const spentOutputs = await sp.findOutputs({
|
|
500
|
+
partial: {
|
|
501
|
+
userId,
|
|
502
|
+
txid: sourceTxid,
|
|
503
|
+
vout: input.sourceOutputIndex,
|
|
504
|
+
},
|
|
505
|
+
trx,
|
|
506
|
+
});
|
|
507
|
+
if (spentOutputs.length > 0) {
|
|
508
|
+
const output = spentOutputs[0];
|
|
509
|
+
// Mark as spent
|
|
510
|
+
if (output.outputId) {
|
|
511
|
+
await sp.updateOutput(output.outputId, {
|
|
512
|
+
spendable: false,
|
|
513
|
+
spentBy: transactionId,
|
|
514
|
+
}, trx);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
// Create output records for owned outputs
|
|
521
|
+
let outputsCreated = 0;
|
|
522
|
+
for (const txo of ownedTxos) {
|
|
523
|
+
// Check if output already exists
|
|
524
|
+
const existingOutputs = await sp.findOutputs({
|
|
525
|
+
partial: { userId, txid, vout: txo.outpoint.vout },
|
|
526
|
+
trx,
|
|
527
|
+
});
|
|
528
|
+
if (existingOutputs.length > 0) {
|
|
529
|
+
// Output already exists, skip
|
|
530
|
+
continue;
|
|
531
|
+
}
|
|
532
|
+
// Collect tags and content from all indexer data
|
|
533
|
+
const tags = [];
|
|
534
|
+
let content;
|
|
535
|
+
if (txo.owner) {
|
|
536
|
+
tags.push(`own:${txo.owner}`);
|
|
537
|
+
}
|
|
538
|
+
for (const indexData of Object.values(txo.data)) {
|
|
539
|
+
if (indexData.tags) {
|
|
540
|
+
tags.push(...indexData.tags);
|
|
541
|
+
}
|
|
542
|
+
// Use first non-empty content found
|
|
543
|
+
if (!content && indexData.content) {
|
|
544
|
+
content = indexData.content;
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
// Get or create basket
|
|
548
|
+
const basketName = txo.basket || "default";
|
|
549
|
+
const basket = await sp.findOrInsertOutputBasket(userId, basketName, trx);
|
|
550
|
+
// Create output record
|
|
551
|
+
const now = new Date();
|
|
552
|
+
const scriptInfo = scriptOffsets[txo.outpoint.vout];
|
|
553
|
+
const newOutput = {
|
|
554
|
+
created_at: now,
|
|
555
|
+
updated_at: now,
|
|
556
|
+
outputId: 0,
|
|
557
|
+
userId,
|
|
558
|
+
transactionId,
|
|
559
|
+
basketId: basket.basketId,
|
|
560
|
+
spendable: true,
|
|
561
|
+
change: basketName === "default",
|
|
562
|
+
outputDescription: "",
|
|
563
|
+
vout: txo.outpoint.vout,
|
|
564
|
+
satoshis: txo.output.satoshis || 0,
|
|
565
|
+
providedBy: "you",
|
|
566
|
+
purpose: basketName === "default" ? "change" : "",
|
|
567
|
+
type: "custom",
|
|
568
|
+
txid,
|
|
569
|
+
lockingScript: Array.from(txo.output.lockingScript.toBinary()),
|
|
570
|
+
scriptOffset: scriptInfo?.offset,
|
|
571
|
+
scriptLength: scriptInfo?.length,
|
|
572
|
+
spentBy: undefined,
|
|
573
|
+
customInstructions: content?.substring(0, 1000),
|
|
574
|
+
};
|
|
575
|
+
const outputId = await sp.insertOutput(newOutput, trx);
|
|
576
|
+
// Add tags to output
|
|
577
|
+
for (const tag of tags) {
|
|
578
|
+
const outputTag = await sp.findOrInsertOutputTag(userId, tag, trx);
|
|
579
|
+
if (outputTag.outputTagId) {
|
|
580
|
+
await sp.findOrInsertOutputTagMap(outputId, outputTag.outputTagId, trx);
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
outputsCreated++;
|
|
584
|
+
}
|
|
585
|
+
return outputsCreated;
|
|
586
|
+
});
|
|
587
|
+
});
|
|
588
|
+
return { parseContext: ctx, internalizedCount };
|
|
589
|
+
}
|
|
590
|
+
/**
|
|
591
|
+
* Broadcast a transaction and ingest it into the wallet if successful.
|
|
592
|
+
*
|
|
593
|
+
* @param tx - Transaction to broadcast
|
|
594
|
+
* @param description - Human-readable description for the transaction
|
|
595
|
+
* @param labels - Optional labels for the transaction
|
|
596
|
+
* @returns The ingest result if successful
|
|
597
|
+
* @throws Error if broadcast fails
|
|
598
|
+
*/
|
|
599
|
+
async broadcast(tx, description, labels) {
|
|
600
|
+
const txid = tx.id("hex");
|
|
601
|
+
const beef = new Beef();
|
|
602
|
+
beef.mergeTransaction(tx);
|
|
603
|
+
const results = await this.services.postBeef(beef, [txid]);
|
|
604
|
+
const result = results[0];
|
|
605
|
+
if (result.status !== "success") {
|
|
606
|
+
const errorMsg = result.error?.message || "Broadcast failed";
|
|
607
|
+
throw new Error(`Broadcast failed for ${txid}: ${errorMsg}`);
|
|
608
|
+
}
|
|
609
|
+
return this.ingestTransaction(tx, description, labels, true);
|
|
610
|
+
}
|
|
611
|
+
// ===== Queue-Based Sync =====
|
|
612
|
+
/**
|
|
613
|
+
* Start queue-based sync for all owner addresses.
|
|
614
|
+
* Requires syncQueue to be provided in constructor args.
|
|
615
|
+
*
|
|
616
|
+
* This method:
|
|
617
|
+
* 1. Opens SSE stream and enqueues outputs
|
|
618
|
+
* 2. Processes queue in batches using Promise.all()
|
|
619
|
+
* 3. Continues until queue is empty and stream is done
|
|
620
|
+
*/
|
|
621
|
+
async sync() {
|
|
622
|
+
if (!this.syncQueue) {
|
|
623
|
+
throw new Error("syncQueue not provided - provide syncQueue in constructor");
|
|
624
|
+
}
|
|
625
|
+
if (this.syncRunning) {
|
|
626
|
+
return;
|
|
627
|
+
}
|
|
628
|
+
const addresses = Array.from(this.owners);
|
|
629
|
+
if (addresses.length === 0) {
|
|
630
|
+
return;
|
|
631
|
+
}
|
|
632
|
+
this.syncRunning = true;
|
|
633
|
+
this.syncStopRequested = false;
|
|
634
|
+
// Reset any items stuck in "processing" from a previous crashed session
|
|
635
|
+
await this.syncQueue.resetProcessing();
|
|
636
|
+
// Get last queued score from queue state
|
|
637
|
+
const state = await this.syncQueue.getState();
|
|
638
|
+
const fromScore = state.lastQueuedScore;
|
|
639
|
+
// Fetch current height once for reorg protection checks
|
|
640
|
+
const currentHeight = await this.services.getHeight();
|
|
641
|
+
this.emit("sync:start", { addresses });
|
|
642
|
+
// Start SSE stream
|
|
643
|
+
let streamDone = false;
|
|
644
|
+
const unsubscribe = this.services.owner.sync(addresses, async (output) => {
|
|
645
|
+
await this.handleSyncOutput(output, currentHeight);
|
|
646
|
+
}, fromScore, () => {
|
|
647
|
+
streamDone = true;
|
|
648
|
+
}, (error) => {
|
|
649
|
+
streamDone = true;
|
|
650
|
+
this.emit("sync:error", { message: error.message });
|
|
651
|
+
});
|
|
652
|
+
this.activeQueueSync = unsubscribe;
|
|
653
|
+
// Start processing loop
|
|
654
|
+
await this.processQueueLoop(streamDone, () => streamDone);
|
|
655
|
+
this.syncRunning = false;
|
|
656
|
+
this.activeQueueSync = null;
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Handle a single output from the SSE stream.
|
|
660
|
+
* Enqueues to the sync queue and updates lastQueuedScore with reorg protection.
|
|
661
|
+
*/
|
|
662
|
+
async handleSyncOutput(output, currentHeight) {
|
|
663
|
+
if (!this.syncQueue)
|
|
664
|
+
return;
|
|
665
|
+
// Enqueue the output
|
|
666
|
+
await this.syncQueue.enqueue([
|
|
667
|
+
{
|
|
668
|
+
outpoint: output.outpoint,
|
|
669
|
+
score: output.score,
|
|
670
|
+
spendTxid: output.spendTxid,
|
|
671
|
+
},
|
|
672
|
+
]);
|
|
673
|
+
// Update lastQueuedScore with reorg protection
|
|
674
|
+
const blockHeight = Math.floor(output.score);
|
|
675
|
+
if (blockHeight <= currentHeight - REORG_SAFE_DEPTH) {
|
|
676
|
+
await this.syncQueue.setState({
|
|
677
|
+
lastQueuedScore: output.score,
|
|
678
|
+
lastSyncedAt: Date.now(),
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* Process queue in batches until empty or stopped.
|
|
684
|
+
*/
|
|
685
|
+
async processQueueLoop(_streamDone, isStreamDone) {
|
|
686
|
+
if (!this.syncQueue)
|
|
687
|
+
return;
|
|
688
|
+
while (!this.syncStopRequested) {
|
|
689
|
+
// claim() returns items already grouped by txid, all marked as "processing"
|
|
690
|
+
const byTxid = await this.syncQueue.claim(this.syncBatchSize);
|
|
691
|
+
if (byTxid.size === 0) {
|
|
692
|
+
if (isStreamDone()) {
|
|
693
|
+
// Stream done and queue empty - sync complete
|
|
694
|
+
this.emit("sync:complete", {});
|
|
695
|
+
break;
|
|
696
|
+
}
|
|
697
|
+
// Queue empty but stream still running - wait a bit
|
|
698
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
699
|
+
continue;
|
|
700
|
+
}
|
|
701
|
+
// Process each txid in parallel
|
|
702
|
+
await Promise.all(Array.from(byTxid.entries()).map(([txid, txidItems]) => this.processTxid(txid, txidItems)));
|
|
703
|
+
// Emit progress
|
|
704
|
+
const stats = await this.syncQueue.getStats();
|
|
705
|
+
this.emit("sync:progress", {
|
|
706
|
+
pending: stats.pending,
|
|
707
|
+
done: stats.done,
|
|
708
|
+
failed: stats.failed,
|
|
709
|
+
});
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
/**
|
|
713
|
+
* Group queue items by txid.
|
|
714
|
+
* @deprecated - claim() now returns items already grouped
|
|
715
|
+
*/
|
|
716
|
+
groupItemsByTxid(items) {
|
|
717
|
+
const byTxid = new Map();
|
|
718
|
+
for (const item of items) {
|
|
719
|
+
const txid = item.outpoint.substring(0, 64);
|
|
720
|
+
const existing = byTxid.get(txid);
|
|
721
|
+
if (existing) {
|
|
722
|
+
existing.push(item);
|
|
723
|
+
}
|
|
724
|
+
else {
|
|
725
|
+
byTxid.set(txid, [item]);
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
return byTxid;
|
|
729
|
+
}
|
|
730
|
+
/**
|
|
731
|
+
* Process a single txid - ingest transaction and complete queue items.
|
|
732
|
+
* Items are already marked as "processing" by claim().
|
|
733
|
+
*/
|
|
734
|
+
async processTxid(txid, items) {
|
|
735
|
+
if (!this.syncQueue)
|
|
736
|
+
return;
|
|
737
|
+
try {
|
|
738
|
+
const itemIds = items.map((i) => i.id);
|
|
739
|
+
// Build spend map: vout -> spendTxid
|
|
740
|
+
const spendMap = new Map();
|
|
741
|
+
for (const item of items) {
|
|
742
|
+
if (item.spendTxid) {
|
|
743
|
+
const vout = Number.parseInt(item.outpoint.substring(65), 10);
|
|
744
|
+
spendMap.set(vout, item.spendTxid);
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
// Check if this is purely a spend-only batch (all items have spendTxid, no new outputs)
|
|
748
|
+
const hasUnspentCreation = items.some((item) => !item.spendTxid);
|
|
749
|
+
if (hasUnspentCreation) {
|
|
750
|
+
// Need to ingest the transaction
|
|
751
|
+
await this.ingestWithSpendInfo(txid, spendMap);
|
|
752
|
+
}
|
|
753
|
+
else {
|
|
754
|
+
// All items are spends - just mark outputs as spent
|
|
755
|
+
await this.markOutputsSpent(items);
|
|
756
|
+
}
|
|
757
|
+
// Complete all items
|
|
758
|
+
await this.syncQueue.completeMany(itemIds);
|
|
759
|
+
}
|
|
760
|
+
catch (error) {
|
|
761
|
+
this.emit("sync:error", {
|
|
762
|
+
message: error instanceof Error ? error.message : String(error),
|
|
763
|
+
});
|
|
764
|
+
// Mark items as failed
|
|
765
|
+
for (const item of items) {
|
|
766
|
+
await this.syncQueue.fail(item.id, String(error));
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
/**
|
|
771
|
+
* Ingest a transaction with knowledge of which outputs are already spent.
|
|
772
|
+
*/
|
|
773
|
+
async ingestWithSpendInfo(txid, spendMap) {
|
|
774
|
+
// Load and ingest the transaction
|
|
775
|
+
const tx = await this.loadTransaction(txid);
|
|
776
|
+
const result = await this.ingestTransaction(tx, "1sat-sync");
|
|
777
|
+
// Mark any outputs that we know are spent
|
|
778
|
+
if (spendMap.size > 0) {
|
|
779
|
+
const userId = await this.storage.getUserId();
|
|
780
|
+
await this.storage.runAsStorageProvider(async (sp) => {
|
|
781
|
+
await sp.transaction(async (trx) => {
|
|
782
|
+
for (const [vout] of spendMap) {
|
|
783
|
+
const outputs = await sp.findOutputs({
|
|
784
|
+
partial: { userId, txid, vout },
|
|
785
|
+
trx,
|
|
786
|
+
});
|
|
787
|
+
if (outputs.length > 0 && outputs[0].spendable) {
|
|
788
|
+
const output = outputs[0];
|
|
789
|
+
if (output.outputId) {
|
|
790
|
+
await sp.updateOutput(output.outputId, { spendable: false }, trx);
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
});
|
|
795
|
+
});
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
/**
|
|
799
|
+
* Mark outputs as spent for spend-only queue items.
|
|
800
|
+
*/
|
|
801
|
+
async markOutputsSpent(items) {
|
|
802
|
+
const userId = await this.storage.getUserId();
|
|
803
|
+
await this.storage.runAsStorageProvider(async (sp) => {
|
|
804
|
+
await sp.transaction(async (trx) => {
|
|
805
|
+
for (const item of items) {
|
|
806
|
+
if (!item.spendTxid)
|
|
807
|
+
continue;
|
|
808
|
+
const txid = item.outpoint.substring(0, 64);
|
|
809
|
+
const vout = Number.parseInt(item.outpoint.substring(65), 10);
|
|
810
|
+
const outputs = await sp.findOutputs({
|
|
811
|
+
partial: { userId, txid, vout },
|
|
812
|
+
trx,
|
|
813
|
+
});
|
|
814
|
+
if (outputs.length > 0 && outputs[0].spendable && outputs[0].outputId) {
|
|
815
|
+
await sp.updateOutput(outputs[0].outputId, { spendable: false }, trx);
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
});
|
|
819
|
+
});
|
|
820
|
+
}
|
|
821
|
+
/**
|
|
822
|
+
* Stop the sync.
|
|
823
|
+
*/
|
|
824
|
+
stopSync() {
|
|
825
|
+
this.syncStopRequested = true;
|
|
826
|
+
if (this.activeQueueSync) {
|
|
827
|
+
this.activeQueueSync();
|
|
828
|
+
this.activeQueueSync = null;
|
|
829
|
+
}
|
|
830
|
+
// Also stop individual components
|
|
831
|
+
this.stopStream();
|
|
832
|
+
this.stopProcessor();
|
|
833
|
+
}
|
|
834
|
+
/**
|
|
835
|
+
* Close the wallet and cleanup all sync connections.
|
|
836
|
+
*/
|
|
837
|
+
close() {
|
|
838
|
+
this.stopSync();
|
|
839
|
+
this.services.close();
|
|
840
|
+
}
|
|
841
|
+
/**
|
|
842
|
+
* Check if sync is currently running.
|
|
843
|
+
*/
|
|
844
|
+
isSyncing() {
|
|
845
|
+
return this.syncRunning;
|
|
846
|
+
}
|
|
847
|
+
/**
|
|
848
|
+
* Get the sync queue instance (if provided).
|
|
849
|
+
*/
|
|
850
|
+
getQueue() {
|
|
851
|
+
return this.syncQueue;
|
|
852
|
+
}
|
|
853
|
+
// ===== Separate Stream/Processor Controls (for testing) =====
|
|
854
|
+
/**
|
|
855
|
+
* Start only the SSE stream, enqueueing outputs without processing.
|
|
856
|
+
* Useful for testing to observe queue buildup.
|
|
857
|
+
*/
|
|
858
|
+
async startStream() {
|
|
859
|
+
if (!this.syncQueue) {
|
|
860
|
+
throw new Error("syncQueue not provided");
|
|
861
|
+
}
|
|
862
|
+
if (this.sseStreamActive) {
|
|
863
|
+
return;
|
|
864
|
+
}
|
|
865
|
+
const addresses = Array.from(this.owners);
|
|
866
|
+
if (addresses.length === 0) {
|
|
867
|
+
return;
|
|
868
|
+
}
|
|
869
|
+
const state = await this.syncQueue.getState();
|
|
870
|
+
const fromScore = state.lastQueuedScore;
|
|
871
|
+
// Fetch current height once for reorg protection checks
|
|
872
|
+
const currentHeight = await this.services.getHeight();
|
|
873
|
+
this.sseStreamActive = true;
|
|
874
|
+
this.streamDone = false;
|
|
875
|
+
this.emit("sync:start", { addresses });
|
|
876
|
+
this.sseUnsubscribe = this.services.owner.sync(addresses, async (output) => {
|
|
877
|
+
await this.handleSyncOutput(output, currentHeight);
|
|
878
|
+
}, fromScore, () => {
|
|
879
|
+
this.streamDone = true;
|
|
880
|
+
this.sseStreamActive = false;
|
|
881
|
+
}, (error) => {
|
|
882
|
+
this.streamDone = true;
|
|
883
|
+
this.sseStreamActive = false;
|
|
884
|
+
this.emit("sync:error", { message: error.message });
|
|
885
|
+
});
|
|
886
|
+
}
|
|
887
|
+
/**
|
|
888
|
+
* Stop the SSE stream.
|
|
889
|
+
*/
|
|
890
|
+
stopStream() {
|
|
891
|
+
if (this.sseUnsubscribe) {
|
|
892
|
+
this.sseUnsubscribe();
|
|
893
|
+
this.sseUnsubscribe = null;
|
|
894
|
+
}
|
|
895
|
+
this.sseStreamActive = false;
|
|
896
|
+
}
|
|
897
|
+
/**
|
|
898
|
+
* Check if SSE stream is active.
|
|
899
|
+
*/
|
|
900
|
+
isStreamActive() {
|
|
901
|
+
return this.sseStreamActive;
|
|
902
|
+
}
|
|
903
|
+
/**
|
|
904
|
+
* Check if SSE stream has completed.
|
|
905
|
+
*/
|
|
906
|
+
isStreamDone() {
|
|
907
|
+
return this.streamDone;
|
|
908
|
+
}
|
|
909
|
+
/**
|
|
910
|
+
* Start only the queue processor, without starting a new SSE stream.
|
|
911
|
+
* Useful for testing to process queued items independently.
|
|
912
|
+
*/
|
|
913
|
+
async startProcessor() {
|
|
914
|
+
if (!this.syncQueue) {
|
|
915
|
+
throw new Error("syncQueue not provided");
|
|
916
|
+
}
|
|
917
|
+
if (this.processorActive) {
|
|
918
|
+
return;
|
|
919
|
+
}
|
|
920
|
+
this.processorActive = true;
|
|
921
|
+
this.processorStopRequested = false;
|
|
922
|
+
// Reset any items stuck in "processing" from a previous crashed session
|
|
923
|
+
await this.syncQueue.resetProcessing();
|
|
924
|
+
while (!this.processorStopRequested) {
|
|
925
|
+
// claim() returns items already grouped by txid, all marked as "processing"
|
|
926
|
+
const byTxid = await this.syncQueue.claim(this.syncBatchSize);
|
|
927
|
+
if (byTxid.size === 0) {
|
|
928
|
+
// Queue empty - wait a bit and check again
|
|
929
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
930
|
+
continue;
|
|
931
|
+
}
|
|
932
|
+
// Process each txid in parallel
|
|
933
|
+
await Promise.all(Array.from(byTxid.entries()).map(([txid, txidItems]) => this.processTxid(txid, txidItems)));
|
|
934
|
+
// Emit progress
|
|
935
|
+
const stats = await this.syncQueue.getStats();
|
|
936
|
+
this.emit("sync:progress", {
|
|
937
|
+
pending: stats.pending,
|
|
938
|
+
done: stats.done,
|
|
939
|
+
failed: stats.failed,
|
|
940
|
+
});
|
|
941
|
+
}
|
|
942
|
+
this.processorActive = false;
|
|
943
|
+
}
|
|
944
|
+
/**
|
|
945
|
+
* Stop the queue processor.
|
|
946
|
+
*/
|
|
947
|
+
stopProcessor() {
|
|
948
|
+
this.processorStopRequested = true;
|
|
949
|
+
}
|
|
950
|
+
/**
|
|
951
|
+
* Check if queue processor is active.
|
|
952
|
+
*/
|
|
953
|
+
isProcessorActive() {
|
|
954
|
+
return this.processorActive;
|
|
955
|
+
}
|
|
956
|
+
}
|