@sunnyln/lni 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,974 @@
1
+ import { Transaction as BtcTransaction } from '@scure/btc-signer';
2
+ import { mnemonicToSeedSync } from '@scure/bip39';
3
+ import { sha256 } from '@noble/hashes/sha2';
4
+ import { schnorr, secp256k1 } from '@noble/curves/secp256k1';
5
+ import { Identifier as FrostIdentifier, KeyPackage as FrostKeyPackage, Nonce as FrostNonce, NonceCommitment as FrostNonceCommitment, PublicKeyPackage as FrostPublicKeyPackage, Signature as FrostSignature, Secp256K1Sha256TR, SignatureShare as FrostSignatureShare, SigningCommitments as FrostSigningCommitments, SigningNonces as FrostSigningNonces, SigningPackageImpl as FrostSigningPackageImpl, SigningShare as FrostSigningShare, VerifyingKey as FrostVerifyingKey, VerifyingShare as FrostVerifyingShare, hasEvenYPublicKey, intoEvenYKeyPackage, tweakKeyPackage, tweakPublicKeyPackage, } from '../vendor/frosts-bridge.js';
6
+ import { decrypt as eciesDecrypt, encrypt as eciesEncrypt } from 'eciesjs';
7
+ import { decode as decodeBolt11 } from 'light-bolt11-decoder';
8
+ import { LniError } from '../errors.js';
9
+ import { bytesToHex, hexToBytes } from '../internal/encoding.js';
10
+ import { pollInvoiceEvents } from '../internal/polling.js';
11
+ import { emptyNodeInfo, emptyTransaction, matchesSearch, toUnixSeconds } from '../internal/transform.js';
12
+ import { InvoiceType } from '../types.js';
13
+ const DEFAULT_MAX_FEE_SATS = 20;
14
+ const DEFAULT_PAGE_SIZE = 50;
15
+ const DEFAULT_SCAN_LIMIT = 1000;
16
+ const SPARK_SDK_DEFAULT_ENTRY = '@buildonspark/spark-sdk';
17
+ const SPARK_SDK_PACKAGED_BARE_ENTRY = '../vendor/spark-sdk-bare.js';
18
+ const SPARK_SDK_BARE_ENTRY = '@buildonspark/spark-sdk/bare';
19
+ const SPARK_SDK_NATIVE_ENTRY = '@buildonspark/spark-sdk/native';
20
+ function createUserIdentifier() {
21
+ return FrostIdentifier.derive(Secp256K1Sha256TR, new TextEncoder().encode('user'));
22
+ }
23
+ function mapNetworkToSpark(network) {
24
+ switch ((network ?? 'mainnet').toLowerCase()) {
25
+ case 'mainnet':
26
+ return 'MAINNET';
27
+ case 'regtest':
28
+ return 'REGTEST';
29
+ case 'testnet':
30
+ return 'TESTNET';
31
+ case 'signet':
32
+ return 'SIGNET';
33
+ case 'local':
34
+ return 'LOCAL';
35
+ default:
36
+ return 'MAINNET';
37
+ }
38
+ }
39
+ function numberFromUnknown(value) {
40
+ if (typeof value === 'number') {
41
+ return Number.isFinite(value) ? value : 0;
42
+ }
43
+ if (typeof value === 'bigint') {
44
+ if (value > BigInt(Number.MAX_SAFE_INTEGER) || value < BigInt(Number.MIN_SAFE_INTEGER)) {
45
+ throw new Error(`BigInt value ${value} exceeds Number.MAX_SAFE_INTEGER and cannot be safely converted.`);
46
+ }
47
+ return Number(value);
48
+ }
49
+ if (typeof value === 'string') {
50
+ const parsed = Number(value);
51
+ return Number.isFinite(parsed) ? parsed : 0;
52
+ }
53
+ return 0;
54
+ }
55
+ function toUnixSecondsFromAny(value) {
56
+ if (value instanceof Date) {
57
+ return Math.floor(value.getTime() / 1000);
58
+ }
59
+ if (typeof value === 'string') {
60
+ const parsedDate = Date.parse(value);
61
+ if (Number.isFinite(parsedDate)) {
62
+ return Math.floor(parsedDate / 1000);
63
+ }
64
+ }
65
+ return toUnixSeconds(value);
66
+ }
67
+ function mapCurrencyAmountToMsats(value) {
68
+ if (typeof value === 'number' || typeof value === 'bigint' || typeof value === 'string') {
69
+ return Math.floor(numberFromUnknown(value) * 1000);
70
+ }
71
+ if (!value || typeof value !== 'object') {
72
+ return 0;
73
+ }
74
+ const maybe = value;
75
+ const amount = numberFromUnknown(maybe.originalValue);
76
+ const unit = typeof maybe.originalUnit === 'string' ? maybe.originalUnit.toLowerCase() : '';
77
+ if (!amount) {
78
+ return 0;
79
+ }
80
+ if (unit.includes('millisatoshi') || unit.includes('msat')) {
81
+ return Math.floor(amount);
82
+ }
83
+ if (unit.includes('sat') && !unit.includes('msat')) {
84
+ return Math.floor(amount * 1000);
85
+ }
86
+ if (unit.includes('btc') || unit.includes('bitcoin')) {
87
+ return Math.floor(amount * 100_000_000_000);
88
+ }
89
+ return Math.floor(amount);
90
+ }
91
+ function removeUndefinedValues(value) {
92
+ return Object.fromEntries(Object.entries(value).filter(([, entry]) => entry !== undefined));
93
+ }
94
+ async function resolveWalletBalanceSats(wallet, rawBalance) {
95
+ const directBalance = numberFromUnknown(rawBalance?.balance);
96
+ if (directBalance > 0) {
97
+ return directBalance;
98
+ }
99
+ if (typeof wallet.getLeaves !== 'function') {
100
+ return Math.max(0, directBalance);
101
+ }
102
+ try {
103
+ const leaves = await wallet.getLeaves(true);
104
+ if (!Array.isArray(leaves)) {
105
+ return Math.max(0, directBalance);
106
+ }
107
+ const leafBalance = leaves.reduce((acc, leaf) => {
108
+ return acc + numberFromUnknown(leaf?.value);
109
+ }, 0);
110
+ if (leafBalance > 0) {
111
+ return leafBalance;
112
+ }
113
+ }
114
+ catch {
115
+ return Math.max(0, directBalance);
116
+ }
117
+ return Math.max(0, directBalance);
118
+ }
119
+ function signingCommitmentFromBinding(commitment) {
120
+ const hiding = FrostNonceCommitment.deserialize(Secp256K1Sha256TR, commitment.hiding);
121
+ const binding = FrostNonceCommitment.deserialize(Secp256K1Sha256TR, commitment.binding);
122
+ return new FrostSigningCommitments(Secp256K1Sha256TR, hiding, binding);
123
+ }
124
+ function identifierFromHex(identifier) {
125
+ return FrostIdentifier.deserialize(Secp256K1Sha256TR, hexToBytes(identifier));
126
+ }
127
+ function identifierToHex(identifier) {
128
+ return bytesToHex(identifier.serialize());
129
+ }
130
+ function buildUserKeyPackage(params, identifierOverride) {
131
+ const userIdentifier = identifierOverride ?? createUserIdentifier();
132
+ const signingShare = FrostSigningShare.deserialize(Secp256K1Sha256TR, params.secretKey);
133
+ const verifyingShare = FrostVerifyingShare.deserialize(Secp256K1Sha256TR, params.publicKey);
134
+ const verifyingKey = FrostVerifyingKey.deserialize(Secp256K1Sha256TR, params.verifyingKey).toElement();
135
+ const base = new FrostKeyPackage(Secp256K1Sha256TR, userIdentifier, signingShare, verifyingShare, verifyingKey, 1);
136
+ const tweaked = tweakKeyPackage(base, new Uint8Array());
137
+ const evenY = intoEvenYKeyPackage(base, hasEvenYPublicKey(params.verifyingKey));
138
+ return new FrostKeyPackage(Secp256K1Sha256TR, evenY.identifier, evenY.signingShare, evenY.verifyingShare, tweaked.verifyingKey, evenY.minSigners);
139
+ }
140
+ function buildSigningPackage(message, selfCommitment, selfIdentifier, statechainCommitments) {
141
+ const commitments = new Map();
142
+ const userIdHex = identifierToHex(selfIdentifier);
143
+ const commitmentById = new Map();
144
+ for (const [identifier, commitment] of Object.entries(statechainCommitments ?? {})) {
145
+ commitmentById.set(identifier, commitment);
146
+ }
147
+ commitmentById.set(userIdHex, selfCommitment);
148
+ const sortedIds = Array.from(commitmentById.keys()).sort();
149
+ for (const identifier of sortedIds) {
150
+ const commitment = commitmentById.get(identifier);
151
+ if (!commitment) {
152
+ continue;
153
+ }
154
+ commitments.set(identifier === userIdHex ? selfIdentifier : identifierFromHex(identifier), signingCommitmentFromBinding(commitment));
155
+ }
156
+ return new FrostSigningPackageImpl(Secp256K1Sha256TR, commitments, message);
157
+ }
158
+ function normalizeAdaptorPublicKey(adaptorPubKey) {
159
+ if (!adaptorPubKey || adaptorPubKey.length === 0) {
160
+ return undefined;
161
+ }
162
+ if (adaptorPubKey.length === 33) {
163
+ const prefix = adaptorPubKey[0];
164
+ if (prefix !== 0x02 && prefix !== 0x03) {
165
+ throw new LniError('InvalidInput', 'Spark adaptor public key (33-byte form) must use compressed secp256k1 prefix 0x02/0x03.');
166
+ }
167
+ return adaptorPubKey;
168
+ }
169
+ if (adaptorPubKey.length === 32) {
170
+ const compressed = new Uint8Array(33);
171
+ compressed[0] = 0x02;
172
+ compressed.set(adaptorPubKey, 1);
173
+ return compressed;
174
+ }
175
+ throw new LniError('InvalidInput', 'Spark adaptor public key must be 32 or 33 bytes.');
176
+ }
177
+ function scalarFromLike(value, label) {
178
+ if (typeof value === 'bigint') {
179
+ return value;
180
+ }
181
+ if (typeof value === 'object' &&
182
+ value !== null &&
183
+ 'toScalar' in value &&
184
+ typeof value.toScalar === 'function') {
185
+ return (value.toScalar());
186
+ }
187
+ throw new LniError('Api', `Spark signer expected scalar-like value for ${label}.`);
188
+ }
189
+ function elementBytesFromLike(value, label) {
190
+ if (value instanceof Uint8Array) {
191
+ return value;
192
+ }
193
+ if (typeof value === 'object' &&
194
+ value !== null &&
195
+ 'serialize' in value &&
196
+ typeof value.serialize === 'function') {
197
+ return value.serialize();
198
+ }
199
+ throw new LniError('Api', `Spark signer expected element-like value for ${label}.`);
200
+ }
201
+ function bytesToBigInt(bytes) {
202
+ let value = 0n;
203
+ for (const byte of bytes) {
204
+ value = (value << 8n) | BigInt(byte);
205
+ }
206
+ return value;
207
+ }
208
+ function bigIntToFixedBytes(value, size) {
209
+ const bytes = new Uint8Array(size);
210
+ let remainder = value;
211
+ for (let index = size - 1; index >= 0; index -= 1) {
212
+ bytes[index] = Number(remainder & 0xffn);
213
+ remainder >>= 8n;
214
+ }
215
+ return bytes;
216
+ }
217
+ function toXOnlyPublicKey(pubkey) {
218
+ if (pubkey.length === 32) {
219
+ return pubkey;
220
+ }
221
+ if (pubkey.length === 33 && (pubkey[0] === 0x02 || pubkey[0] === 0x03)) {
222
+ return pubkey.slice(1);
223
+ }
224
+ throw new LniError('InvalidInput', `Spark public key must be 32-byte x-only or 33-byte compressed, got ${pubkey.length} bytes.`);
225
+ }
226
+ function validateOutboundAdaptorSignatureLocal(params) {
227
+ try {
228
+ const { signature, message, pubkey, adaptorPubkey } = params;
229
+ if (message.length !== 32) {
230
+ return { ok: false, reason: `invalid message length: ${message.length}` };
231
+ }
232
+ if (signature.length !== 64) {
233
+ return { ok: false, reason: `invalid signature length: ${signature.length}` };
234
+ }
235
+ const r = signature.slice(0, 32);
236
+ const s = signature.slice(32, 64);
237
+ const rNum = bytesToBigInt(r);
238
+ const sNum = bytesToBigInt(s);
239
+ if (rNum >= secp256k1.CURVE.Fp.ORDER) {
240
+ return { ok: false, reason: 'invalid signature r >= field order' };
241
+ }
242
+ if (sNum >= secp256k1.CURVE.n) {
243
+ return { ok: false, reason: 'invalid signature s >= curve order' };
244
+ }
245
+ const xOnlyPubkey = toXOnlyPublicKey(pubkey);
246
+ const signerPoint = schnorr.utils.lift_x(bytesToBigInt(xOnlyPubkey));
247
+ signerPoint.assertValidity();
248
+ const challengeBytes = schnorr.utils.taggedHash('BIP0340/challenge', r, signerPoint.toBytes().slice(1), message);
249
+ const challengeScalar = bytesToBigInt(challengeBytes) % secp256k1.CURVE.n;
250
+ const negChallenge = (secp256k1.CURVE.n - challengeScalar) % secp256k1.CURVE.n;
251
+ const sG = secp256k1.Point.BASE.multiplyUnsafe(sNum);
252
+ const eP = signerPoint.multiplyUnsafe(negChallenge);
253
+ const baseR = sG.add(eP);
254
+ if (baseR.equals(secp256k1.Point.ZERO)) {
255
+ return { ok: false, reason: 'calculated base R is zero' };
256
+ }
257
+ baseR.assertValidity();
258
+ const adaptorPoint = secp256k1.Point.fromHex(adaptorPubkey);
259
+ const adaptedR = baseR.add(adaptorPoint);
260
+ if (adaptedR.equals(secp256k1.Point.ZERO)) {
261
+ return { ok: false, reason: 'calculated adapted R is infinity' };
262
+ }
263
+ adaptedR.assertValidity();
264
+ if (adaptedR.toAffine().y % 2n !== 0n) {
265
+ return { ok: false, reason: 'calculated adapted R y-value is odd' };
266
+ }
267
+ if (adaptedR.toAffine().x !== rNum) {
268
+ return { ok: false, reason: 'calculated adapted R x does not match signature r' };
269
+ }
270
+ return { ok: true };
271
+ }
272
+ catch (error) {
273
+ return { ok: false, reason: toDebugReason(error) };
274
+ }
275
+ }
276
+ function computeSignatureShareRustCompat(params) {
277
+ const negateNonces = !hasEvenYPublicKey(params.groupCommitmentElement);
278
+ const hiding = scalarFromLike(params.signerNonces.hiding, 'signerNonces.hiding');
279
+ const binding = scalarFromLike(params.signerNonces.binding, 'signerNonces.binding');
280
+ const adjustedHiding = negateNonces
281
+ ? Secp256K1Sha256TR.scalarSub(Secp256K1Sha256TR.scalarZero(), hiding)
282
+ : hiding;
283
+ const adjustedBinding = negateNonces
284
+ ? Secp256K1Sha256TR.scalarSub(Secp256K1Sha256TR.scalarZero(), binding)
285
+ : binding;
286
+ const bindingFactorScalar = scalarFromLike(params.bindingFactor, 'bindingFactor');
287
+ const lambdaScalar = scalarFromLike(params.lambdaI, 'lambdaI');
288
+ let signingShareScalar = scalarFromLike(params.keyPackage.signingShare, 'keyPackage.signingShare');
289
+ const challengeScalar = scalarFromLike(params.challenge, 'challenge');
290
+ const bindingTimesRho = Secp256K1Sha256TR.scalarMul(adjustedBinding, bindingFactorScalar);
291
+ const lambdaTimesShare = Secp256K1Sha256TR.scalarMul(lambdaScalar, signingShareScalar);
292
+ const lambdaShareChallenge = Secp256K1Sha256TR.scalarMul(lambdaTimesShare, challengeScalar);
293
+ const hidingPlusBinding = Secp256K1Sha256TR.scalarAdd(adjustedHiding, bindingTimesRho);
294
+ const zShare = Secp256K1Sha256TR.scalarAdd(hidingPlusBinding, lambdaShareChallenge);
295
+ return FrostSignatureShare.fromScalar(Secp256K1Sha256TR, zShare).serialize();
296
+ }
297
+ function normalizePublicKeyPackageForPreAggregate(publicKeyPackage) {
298
+ if (hasEvenYPublicKey(publicKeyPackage.verifyingKey)) {
299
+ return publicKeyPackage;
300
+ }
301
+ const negatedVerifyingShares = new Map();
302
+ for (const [identifier, share] of publicKeyPackage.verifyingShares.entries()) {
303
+ const shareElement = typeof share?.toElement === 'function'
304
+ ? share.toElement()
305
+ : share;
306
+ negatedVerifyingShares.set(identifier, Secp256K1Sha256TR.elementSub(Secp256K1Sha256TR.identity(), shareElement));
307
+ }
308
+ return {
309
+ ...publicKeyPackage,
310
+ verifyingKey: Secp256K1Sha256TR.elementSub(Secp256K1Sha256TR.identity(), publicKeyPackage.verifyingKey),
311
+ verifyingShares: negatedVerifyingShares,
312
+ };
313
+ }
314
+ function toDebugReason(error) {
315
+ const message = error instanceof Error ? error.message : String(error);
316
+ if (message.length <= 200) {
317
+ return message;
318
+ }
319
+ return `${message.slice(0, 200)}...`;
320
+ }
321
+ function emitSparkDebugCheckpoint(phase, meta = {}) {
322
+ const runtime = globalThis;
323
+ const hook = runtime.__LNI_SPARK_DEBUG__;
324
+ if (!hook) {
325
+ return;
326
+ }
327
+ const checkpoint = {
328
+ phase,
329
+ ts: Date.now(),
330
+ meta,
331
+ };
332
+ try {
333
+ if (typeof hook === 'function') {
334
+ hook(checkpoint);
335
+ return;
336
+ }
337
+ if (hook.enabled !== false && typeof hook.emit === 'function') {
338
+ hook.emit(checkpoint);
339
+ }
340
+ }
341
+ catch { }
342
+ }
343
+ async function pureSignFrost(params) {
344
+ const commitmentKeys = Object.keys(params.statechainCommitments ?? {});
345
+ emitSparkDebugCheckpoint('sign_frost:start', {
346
+ messageBytes: params.message.length,
347
+ statechainCommitments: commitmentKeys.length,
348
+ firstCommitmentKeyLen: commitmentKeys[0]?.length ?? 0,
349
+ firstCommitmentKeyPrefix: commitmentKeys[0]?.slice(0, 8) ?? '',
350
+ hasAdaptor: Boolean(params.adaptorPubKey && params.adaptorPubKey.length > 0),
351
+ adaptorInputBytes: params.adaptorPubKey?.length ?? 0,
352
+ adaptorInputPrefix: params.adaptorPubKey && params.adaptorPubKey.length > 0
353
+ ? Number(params.adaptorPubKey[0]).toString(16).padStart(2, '0')
354
+ : '',
355
+ });
356
+ try {
357
+ const userIdentifier = createUserIdentifier();
358
+ const userIdentifierHex = identifierToHex(userIdentifier);
359
+ const keyPackage = buildUserKeyPackage(params.keyPackage, userIdentifier);
360
+ const preSignedKeyPackage = intoEvenYKeyPackage(keyPackage);
361
+ const hiding = FrostNonce.deserialize(Secp256K1Sha256TR, params.nonce.hiding);
362
+ const binding = FrostNonce.deserialize(Secp256K1Sha256TR, params.nonce.binding);
363
+ const nonces = FrostSigningNonces.fromNonces(Secp256K1Sha256TR, hiding, binding);
364
+ const signingPackage = buildSigningPackage(params.message, params.selfCommitment, userIdentifier, params.statechainCommitments);
365
+ const adaptorPublicKey = normalizeAdaptorPublicKey(params.adaptorPubKey);
366
+ const statechainIds = Object.keys(params.statechainCommitments ?? {}).sort();
367
+ emitSparkDebugCheckpoint('sign_frost:package_ready', {
368
+ adaptorCompressedBytes: adaptorPublicKey?.length ?? 0,
369
+ userIdentifierHex,
370
+ statechainIds,
371
+ });
372
+ const bindingFactorList = Secp256K1Sha256TR.computeBindingFactorList(signingPackage, preSignedKeyPackage.verifyingKey, new Uint8Array());
373
+ const bindingFactor = bindingFactorList.get(preSignedKeyPackage.identifier);
374
+ if (!bindingFactor) {
375
+ throw new LniError('Api', 'Failed to compute Spark signing binding factor.');
376
+ }
377
+ emitSparkDebugCheckpoint('sign_frost:binding_factor_ready');
378
+ const groupCommitment = Secp256K1Sha256TR.computeGroupCommitment(signingPackage, bindingFactorList);
379
+ const challengeCommitment = adaptorPublicKey
380
+ ? Secp256K1Sha256TR.elementAdd(groupCommitment.toElement(), adaptorPublicKey)
381
+ : groupCommitment.toElement();
382
+ const lambdaI = Secp256K1Sha256TR.scalarOne();
383
+ const challenge = Secp256K1Sha256TR.challenge(challengeCommitment, preSignedKeyPackage.verifyingKey, signingPackage.message);
384
+ emitSparkDebugCheckpoint('sign_frost:challenge_ready');
385
+ emitSparkDebugCheckpoint('sign_frost:parity_adjusted');
386
+ const groupCommitmentElement = adaptorPublicKey
387
+ ? challengeCommitment
388
+ : groupCommitment.toElement();
389
+ const serialized = computeSignatureShareRustCompat({
390
+ groupCommitmentElement,
391
+ signerNonces: nonces,
392
+ bindingFactor,
393
+ lambdaI,
394
+ keyPackage: preSignedKeyPackage,
395
+ challenge,
396
+ });
397
+ emitSparkDebugCheckpoint('sign_frost:complete');
398
+ return serialized;
399
+ }
400
+ catch (error) {
401
+ emitSparkDebugCheckpoint('sign_frost:error', {
402
+ reason: toDebugReason(error),
403
+ });
404
+ throw error;
405
+ }
406
+ }
407
+ async function pureAggregateFrost(params) {
408
+ const signatureKeys = Object.keys(params.statechainSignatures ?? {});
409
+ emitSparkDebugCheckpoint('aggregate_frost:start', {
410
+ statechainSignatures: signatureKeys.length,
411
+ firstSignatureKeyLen: signatureKeys[0]?.length ?? 0,
412
+ firstSignatureKeyPrefix: signatureKeys[0]?.slice(0, 8) ?? '',
413
+ hasAdaptor: Boolean(params.adaptorPubKey && params.adaptorPubKey.length > 0),
414
+ });
415
+ try {
416
+ const signingPackage = buildSigningPackage(params.message, params.selfCommitment, createUserIdentifier(), params.statechainCommitments);
417
+ const signatureShares = new Map();
418
+ for (const [identifier, shareBytes] of Object.entries(params.statechainSignatures ?? {})) {
419
+ signatureShares.set(identifierFromHex(identifier), FrostSignatureShare.deserialize(Secp256K1Sha256TR, shareBytes));
420
+ }
421
+ const selfIdentifier = createUserIdentifier();
422
+ signatureShares.set(selfIdentifier, FrostSignatureShare.deserialize(Secp256K1Sha256TR, params.selfSignature));
423
+ const verifyingShares = new Map();
424
+ for (const [identifier, publicKey] of Object.entries(params.statechainPublicKeys ?? {})) {
425
+ verifyingShares.set(identifier, FrostVerifyingShare.deserialize(Secp256K1Sha256TR, publicKey));
426
+ }
427
+ verifyingShares.set(identifierToHex(selfIdentifier), FrostVerifyingShare.deserialize(Secp256K1Sha256TR, params.selfPublicKey));
428
+ const verifyingKey = FrostVerifyingKey.deserialize(Secp256K1Sha256TR, params.verifyingKey).toElement();
429
+ const publicKeyPackage = new FrostPublicKeyPackage(Secp256K1Sha256TR, verifyingShares, verifyingKey, 1);
430
+ const adaptorPublicKey = normalizeAdaptorPublicKey(params.adaptorPubKey);
431
+ const tweakedPublicKeyPackage = tweakPublicKeyPackage(publicKeyPackage, new Uint8Array());
432
+ const preAggregatedPublicKeyPackage = normalizePublicKeyPackageForPreAggregate(tweakedPublicKeyPackage);
433
+ const bindingFactorList = Secp256K1Sha256TR.computeBindingFactorList(signingPackage, preAggregatedPublicKeyPackage.verifyingKey, new Uint8Array());
434
+ const groupCommitment = Secp256K1Sha256TR.computeGroupCommitment(signingPackage, bindingFactorList);
435
+ if (!adaptorPublicKey) {
436
+ let z = Secp256K1Sha256TR.scalarZero();
437
+ for (const signatureShare of signatureShares.values()) {
438
+ z = Secp256K1Sha256TR.scalarAdd(z, scalarFromLike(signatureShare, 'signatureShare'));
439
+ }
440
+ const signature = new FrostSignature(groupCommitment.toElement(), z);
441
+ const serialized = signature.serialize(Secp256K1Sha256TR);
442
+ emitSparkDebugCheckpoint('aggregate_frost:complete', {
443
+ mode: 'standard',
444
+ });
445
+ return serialized;
446
+ }
447
+ const challengeCommitment = Secp256K1Sha256TR.elementAdd(groupCommitment.toElement(), adaptorPublicKey);
448
+ const adaptedGroupCommitment = hasEvenYPublicKey(challengeCommitment)
449
+ ? challengeCommitment
450
+ : Secp256K1Sha256TR.elementSub(Secp256K1Sha256TR.identity(), challengeCommitment);
451
+ let z = Secp256K1Sha256TR.scalarZero();
452
+ for (const signatureShare of signatureShares.values()) {
453
+ z = Secp256K1Sha256TR.scalarAdd(z, scalarFromLike(signatureShare, 'signatureShare'));
454
+ }
455
+ const zCandidates = [
456
+ z,
457
+ Secp256K1Sha256TR.scalarSub(Secp256K1Sha256TR.scalarZero(), z),
458
+ ];
459
+ let fallbackSerialized;
460
+ const candidateDiagnostics = [];
461
+ for (const [candidateIndex, candidateZ] of zCandidates.entries()) {
462
+ const preSignature = new FrostSignature(adaptedGroupCommitment, candidateZ);
463
+ const serialized = preSignature.serialize(Secp256K1Sha256TR);
464
+ if (!fallbackSerialized) {
465
+ fallbackSerialized = serialized;
466
+ }
467
+ const validation = validateOutboundAdaptorSignatureLocal({
468
+ signature: serialized,
469
+ message: params.message,
470
+ pubkey: preAggregatedPublicKeyPackage.verifyingKey,
471
+ adaptorPubkey: adaptorPublicKey,
472
+ });
473
+ candidateDiagnostics.push({
474
+ candidateIndex,
475
+ valid: validation.ok,
476
+ reason: validation.ok ? 'ok' : validation.reason,
477
+ });
478
+ if (validation.ok) {
479
+ emitSparkDebugCheckpoint('aggregate_frost:complete', {
480
+ mode: 'adaptor',
481
+ candidateIndex,
482
+ });
483
+ return serialized;
484
+ }
485
+ }
486
+ emitSparkDebugCheckpoint('aggregate_frost:adaptor_validation_failed', {
487
+ candidates: candidateDiagnostics,
488
+ });
489
+ throw new LniError('Api', `Adaptor signature validation failed: no z-candidate passed validation (${candidateDiagnostics.length} tried).`);
490
+ }
491
+ catch (error) {
492
+ emitSparkDebugCheckpoint('aggregate_frost:error', {
493
+ reason: toDebugReason(error),
494
+ });
495
+ throw error;
496
+ }
497
+ }
498
+ async function pureCreateDummyTx(address, amountSats) {
499
+ const tx = new BtcTransaction({ version: 3 });
500
+ tx.addInput({
501
+ txid: new Uint8Array(32),
502
+ index: 0,
503
+ sequence: 0,
504
+ });
505
+ tx.addOutputAddress(address, amountSats);
506
+ return {
507
+ tx: tx.toBytes(),
508
+ txid: tx.id,
509
+ };
510
+ }
511
+ async function pureEncryptEcies(msg, publicKey) {
512
+ return Uint8Array.from(eciesEncrypt(publicKey, msg));
513
+ }
514
+ async function pureDecryptEcies(encryptedMsg, privateKey) {
515
+ return Uint8Array.from(eciesDecrypt(privateKey, encryptedMsg));
516
+ }
517
+ function extractPaymentHashFromInvoice(invoice) {
518
+ if (!invoice) {
519
+ return '';
520
+ }
521
+ try {
522
+ const decoded = decodeBolt11(invoice);
523
+ const section = decoded.sections?.find((entry) => entry.name === 'payment_hash');
524
+ return typeof section?.value === 'string' ? section.value : '';
525
+ }
526
+ catch {
527
+ return '';
528
+ }
529
+ }
530
+ function extractExpiryFromInvoice(invoice) {
531
+ if (!invoice) {
532
+ return 0;
533
+ }
534
+ try {
535
+ const decoded = decodeBolt11(invoice);
536
+ return numberFromUnknown(decoded.expiry);
537
+ }
538
+ catch {
539
+ return 0;
540
+ }
541
+ }
542
+ function extractAmountMsatsFromInvoice(invoice) {
543
+ if (!invoice) {
544
+ return undefined;
545
+ }
546
+ try {
547
+ const decoded = decodeBolt11(invoice);
548
+ const section = decoded.sections?.find((entry) => entry.name === 'amount');
549
+ const amountMsats = numberFromUnknown(section?.value);
550
+ if (amountMsats > 0) {
551
+ return Math.floor(amountMsats);
552
+ }
553
+ }
554
+ catch {
555
+ return undefined;
556
+ }
557
+ return undefined;
558
+ }
559
+ async function sha256HexOfHexString(hex) {
560
+ if (!hex) {
561
+ return '';
562
+ }
563
+ const bytes = hexToBytes(hex);
564
+ return bytesToHex(sha256(bytes));
565
+ }
566
+ function isSettledStatus(status) {
567
+ if (typeof status !== 'string') {
568
+ return false;
569
+ }
570
+ return status.includes('COMPLETED') || status.includes('FINALIZED');
571
+ }
572
+ function mapSparkTransferToTransaction(transfer) {
573
+ const item = (transfer ?? {});
574
+ const userRequest = (item.userRequest ?? {});
575
+ const requestInvoice = (userRequest.invoice ?? {});
576
+ const invoice = (typeof userRequest.encodedInvoice === 'string' ? userRequest.encodedInvoice : '') ||
577
+ (typeof requestInvoice.encodedInvoice === 'string' ? requestInvoice.encodedInvoice : '') ||
578
+ (typeof item.sparkInvoice === 'string' ? item.sparkInvoice : '');
579
+ const paymentPreimage = typeof userRequest.paymentPreimage === 'string' ? userRequest.paymentPreimage : '';
580
+ const paymentHash = (typeof requestInvoice.paymentHash === 'string' ? requestInvoice.paymentHash : '') ||
581
+ (typeof userRequest.paymentHash === 'string' ? userRequest.paymentHash : '') ||
582
+ extractPaymentHashFromInvoice(invoice);
583
+ const createdAt = toUnixSecondsFromAny(item.createdTime) ||
584
+ toUnixSecondsFromAny(requestInvoice.createdAt);
585
+ const expiresAt = toUnixSecondsFromAny(requestInvoice.expiresAt) ||
586
+ (createdAt ? createdAt + extractExpiryFromInvoice(invoice) : 0);
587
+ const feeMsats = mapCurrencyAmountToMsats(userRequest.fee);
588
+ const transferDirection = typeof item.transferDirection === 'string' ? item.transferDirection : '';
589
+ return emptyTransaction({
590
+ type: transferDirection === 'INCOMING' ? 'incoming' : 'outgoing',
591
+ invoice,
592
+ description: (typeof requestInvoice.memo === 'string' ? requestInvoice.memo : '') ||
593
+ (typeof userRequest.memo === 'string' ? userRequest.memo : ''),
594
+ descriptionHash: '',
595
+ preimage: paymentPreimage,
596
+ paymentHash,
597
+ amountMsats: mapCurrencyAmountToMsats(item.totalValue),
598
+ feesPaid: feeMsats,
599
+ createdAt,
600
+ expiresAt,
601
+ settledAt: isSettledStatus(item.status) ? createdAt : 0,
602
+ externalId: typeof item.id === 'string' ? item.id : '',
603
+ });
604
+ }
605
+ function resolveEntry(config) {
606
+ return config.sdkEntry ?? 'auto';
607
+ }
608
+ function isNodeRuntime() {
609
+ const runtime = globalThis;
610
+ return Boolean(runtime.process?.versions?.node && runtime.navigator?.product !== 'ReactNative');
611
+ }
612
+ async function importSparkSdkCandidate(specifier) {
613
+ if (specifier === SPARK_SDK_PACKAGED_BARE_ENTRY) {
614
+ return (await import('../vendor/spark-sdk-bare.js'));
615
+ }
616
+ if (specifier === SPARK_SDK_DEFAULT_ENTRY) {
617
+ return (await import('@buildonspark/spark-sdk'));
618
+ }
619
+ if (specifier === SPARK_SDK_NATIVE_ENTRY) {
620
+ return (await import('@buildonspark/spark-sdk/native'));
621
+ }
622
+ throw new LniError('InvalidInput', `Unsupported Spark SDK entry: ${specifier}`);
623
+ }
624
+ export class SparkNode {
625
+ config;
626
+ sdkPromise;
627
+ walletPromise;
628
+ pureFrostInstalled = false;
629
+ constructor(config, _options = {}) {
630
+ this.config = config;
631
+ if (!config.mnemonic?.trim()) {
632
+ throw new LniError('InvalidInput', 'Spark mnemonic is required.');
633
+ }
634
+ }
635
+ async loadSdk() {
636
+ if (this.sdkPromise) {
637
+ return this.sdkPromise;
638
+ }
639
+ this.sdkPromise = (async () => {
640
+ const entry = resolveEntry(this.config);
641
+ const candidates = entry === 'native'
642
+ ? [SPARK_SDK_NATIVE_ENTRY]
643
+ : entry === 'default'
644
+ ? [SPARK_SDK_DEFAULT_ENTRY]
645
+ : entry === 'bare'
646
+ ? [SPARK_SDK_PACKAGED_BARE_ENTRY]
647
+ : isNodeRuntime()
648
+ ? [SPARK_SDK_DEFAULT_ENTRY]
649
+ : [SPARK_SDK_PACKAGED_BARE_ENTRY, SPARK_SDK_DEFAULT_ENTRY];
650
+ let lastError;
651
+ for (const specifier of candidates) {
652
+ try {
653
+ const module = await importSparkSdkCandidate(specifier);
654
+ await this.installPureSparkFrost(module);
655
+ return module;
656
+ }
657
+ catch (error) {
658
+ lastError = error;
659
+ }
660
+ }
661
+ this.sdkPromise = undefined;
662
+ throw new LniError('Api', `Failed to load Spark SDK entry (${candidates.join(', ')}): ${lastError?.message ?? 'unknown error'}`, { cause: lastError });
663
+ })();
664
+ return this.sdkPromise;
665
+ }
666
+ async installPureSparkFrost(module) {
667
+ if (this.pureFrostInstalled) {
668
+ return;
669
+ }
670
+ const applyPureMethods = (sparkFrost) => {
671
+ sparkFrost.signFrost = async (params) => pureSignFrost(params);
672
+ sparkFrost.aggregateFrost = async (params) => pureAggregateFrost(params);
673
+ sparkFrost.createDummyTx = pureCreateDummyTx;
674
+ sparkFrost.encryptEcies = pureEncryptEcies;
675
+ sparkFrost.decryptEcies = pureDecryptEcies;
676
+ return sparkFrost;
677
+ };
678
+ if (typeof module.setSparkFrostOnce === 'function' && typeof module.SparkFrostBase === 'function') {
679
+ const sparkFrost = applyPureMethods(new module.SparkFrostBase());
680
+ module.setSparkFrostOnce(sparkFrost);
681
+ this.pureFrostInstalled = true;
682
+ return;
683
+ }
684
+ if (typeof module.getSparkFrost !== 'function') {
685
+ throw new LniError('Api', 'Spark SDK entry does not expose SparkFrost hooks required for pure TypeScript mode.');
686
+ }
687
+ const sparkFrost = applyPureMethods(module.getSparkFrost());
688
+ this.pureFrostInstalled = true;
689
+ }
690
+ async getWallet() {
691
+ if (this.walletPromise) {
692
+ return this.walletPromise;
693
+ }
694
+ this.walletPromise = (async () => {
695
+ try {
696
+ const sdk = await this.loadSdk();
697
+ const mnemonic = this.config.mnemonic.trim();
698
+ const mnemonicOrSeed = this.config.passphrase
699
+ ? mnemonicToSeedSync(mnemonic, this.config.passphrase)
700
+ : mnemonic;
701
+ const init = await sdk.SparkWallet.initialize({
702
+ mnemonicOrSeed,
703
+ accountNumber: this.config.accountNumber,
704
+ options: {
705
+ network: mapNetworkToSpark(this.config.network),
706
+ ...removeUndefinedValues(this.config.sparkOptions ?? {}),
707
+ },
708
+ });
709
+ return init.wallet;
710
+ }
711
+ catch (error) {
712
+ this.walletPromise = undefined;
713
+ throw error;
714
+ }
715
+ })();
716
+ return this.walletPromise;
717
+ }
718
+ async getInfo() {
719
+ const wallet = await this.getWallet();
720
+ const [balance, identityPublicKey] = await Promise.all([
721
+ wallet.getBalance(),
722
+ wallet.getIdentityPublicKey(),
723
+ ]);
724
+ const sendBalanceSats = await resolveWalletBalanceSats(wallet, balance);
725
+ return emptyNodeInfo({
726
+ alias: 'Spark Node',
727
+ pubkey: identityPublicKey,
728
+ network: this.config.network ?? 'mainnet',
729
+ sendBalanceMsat: sendBalanceSats * 1000,
730
+ });
731
+ }
732
+ async createInvoice(params) {
733
+ const invoiceType = params.invoiceType ?? InvoiceType.Bolt11;
734
+ if (invoiceType === InvoiceType.Bolt12) {
735
+ throw new LniError('Api', 'Bolt12 offers are not implemented for SparkNode.');
736
+ }
737
+ const wallet = await this.getWallet();
738
+ const amountSats = params.amountMsats ? Math.max(1, Math.floor(params.amountMsats / 1000)) : 0;
739
+ const now = Math.floor(Date.now() / 1000);
740
+ const response = await wallet.createLightningInvoice({
741
+ amountSats,
742
+ memo: params.description,
743
+ expirySeconds: params.expiry,
744
+ descriptionHash: params.descriptionHash,
745
+ });
746
+ const invoiceObject = response.invoice;
747
+ const invoice = invoiceObject?.encodedInvoice ?? '';
748
+ const paymentHash = invoiceObject?.paymentHash ?? extractPaymentHashFromInvoice(invoice);
749
+ const createdAt = toUnixSecondsFromAny(response.createdAt) || now;
750
+ const expirySeconds = params.expiry ?? (extractExpiryFromInvoice(invoice) || 3600);
751
+ const expiresAt = toUnixSecondsFromAny(invoiceObject?.expiresAt) ||
752
+ (createdAt + expirySeconds);
753
+ return emptyTransaction({
754
+ type: 'incoming',
755
+ invoice,
756
+ paymentHash,
757
+ amountMsats: params.amountMsats ?? amountSats * 1000,
758
+ createdAt,
759
+ expiresAt,
760
+ description: invoiceObject?.memo ?? params.description ?? '',
761
+ descriptionHash: params.descriptionHash ?? '',
762
+ });
763
+ }
764
+ async payInvoice(params) {
765
+ const amountMsatsFromInvoice = extractAmountMsatsFromInvoice(params.invoice);
766
+ const hasInvoiceAmount = amountMsatsFromInvoice !== undefined;
767
+ const providedAmountMsats = params.amountMsats;
768
+ const isAmountlessInvoice = !hasInvoiceAmount;
769
+ emitSparkDebugCheckpoint('pay_invoice:start', {
770
+ hasAmountMsats: providedAmountMsats !== undefined,
771
+ hasAmountInInvoice: hasInvoiceAmount,
772
+ isAmountlessInvoice,
773
+ hasFeeLimitMsat: params.feeLimitMsat !== undefined,
774
+ invoiceChars: params.invoice.length,
775
+ });
776
+ if (isAmountlessInvoice && (!providedAmountMsats || providedAmountMsats <= 0)) {
777
+ throw new LniError('InvalidInput', 'Spark amountless invoice requires amountMsats.');
778
+ }
779
+ try {
780
+ const wallet = await this.getWallet();
781
+ emitSparkDebugCheckpoint('pay_invoice:wallet_ready');
782
+ const amountSatsToSend = isAmountlessInvoice && providedAmountMsats
783
+ ? Math.max(1, Math.floor(providedAmountMsats / 1000))
784
+ : undefined;
785
+ const maxFeeSats = params.feeLimitMsat
786
+ ? Math.max(1, Math.ceil(params.feeLimitMsat / 1000))
787
+ : (this.config.defaultMaxFeeSats ?? DEFAULT_MAX_FEE_SATS);
788
+ emitSparkDebugCheckpoint('pay_invoice:submit', {
789
+ hasAmountSatsToSend: amountSatsToSend !== undefined,
790
+ amountSource: providedAmountMsats !== undefined
791
+ ? 'params'
792
+ : 'none',
793
+ maxFeeSats,
794
+ });
795
+ const response = await wallet.payLightningInvoice({
796
+ invoice: params.invoice,
797
+ maxFeeSats,
798
+ amountSatsToSend,
799
+ preferSpark: false,
800
+ });
801
+ const initResult = response;
802
+ emitSparkDebugCheckpoint('pay_invoice:response_received', {
803
+ status: initResult.status,
804
+ });
805
+ let preimage = typeof initResult.paymentPreimage === 'string' ? initResult.paymentPreimage : '';
806
+ let fee = initResult.fee;
807
+ // Poll for completion if payment was only initiated
808
+ const requestId = initResult.id;
809
+ if (!preimage && requestId && typeof wallet.getLightningSendRequest === 'function') {
810
+ const terminalStatuses = new Set([
811
+ 'LIGHTNING_PAYMENT_SUCCEEDED',
812
+ 'PREIMAGE_PROVIDED',
813
+ 'TRANSFER_COMPLETED',
814
+ 'LIGHTNING_PAYMENT_FAILED',
815
+ 'TRANSFER_FAILED',
816
+ 'USER_TRANSFER_VALIDATION_FAILED',
817
+ 'PREIMAGE_PROVIDING_FAILED',
818
+ 'USER_SWAP_RETURNED',
819
+ 'USER_SWAP_RETURN_FAILED',
820
+ ]);
821
+ const maxPollMs = 60_000;
822
+ const pollIntervalMs = 2_000;
823
+ const startedAt = Date.now();
824
+ while (Date.now() - startedAt < maxPollMs) {
825
+ await new Promise((r) => setTimeout(r, pollIntervalMs));
826
+ try {
827
+ const req = await wallet.getLightningSendRequest(requestId);
828
+ const status = req?.status ?? '';
829
+ emitSparkDebugCheckpoint('pay_invoice:poll', { status });
830
+ if (req?.paymentPreimage) {
831
+ preimage = req.paymentPreimage;
832
+ if (req.fee) {
833
+ fee = req.fee;
834
+ }
835
+ break;
836
+ }
837
+ if (terminalStatuses.has(status)) {
838
+ if (status.includes('FAILED') || status.includes('RETURN')) {
839
+ throw new LniError('Api', `Spark Lightning payment failed: ${status}`);
840
+ }
841
+ break;
842
+ }
843
+ }
844
+ catch (error) {
845
+ if (error instanceof LniError) {
846
+ throw error;
847
+ }
848
+ emitSparkDebugCheckpoint('pay_invoice:poll_error', {
849
+ reason: toDebugReason(error),
850
+ });
851
+ }
852
+ }
853
+ }
854
+ let paymentHash = extractPaymentHashFromInvoice(params.invoice);
855
+ if (!paymentHash && preimage) {
856
+ paymentHash = await sha256HexOfHexString(preimage);
857
+ }
858
+ emitSparkDebugCheckpoint('pay_invoice:complete', {
859
+ hasPaymentHash: Boolean(paymentHash),
860
+ hasPreimage: Boolean(preimage),
861
+ });
862
+ return {
863
+ paymentHash,
864
+ preimage,
865
+ feeMsats: mapCurrencyAmountToMsats(fee),
866
+ };
867
+ }
868
+ catch (error) {
869
+ emitSparkDebugCheckpoint('pay_invoice:error', {
870
+ reason: toDebugReason(error),
871
+ });
872
+ throw error;
873
+ }
874
+ }
875
+ async createOffer(_params) {
876
+ throw new LniError('Api', 'Bolt12 offers are not implemented for SparkNode.');
877
+ }
878
+ async getOffer(_search) {
879
+ throw new LniError('Api', 'Bolt12 offers are not implemented for SparkNode.');
880
+ }
881
+ async listOffers(_search) {
882
+ throw new LniError('Api', 'Bolt12 offers are not implemented for SparkNode.');
883
+ }
884
+ async payOffer(_offer, _amountMsats, _payerNote) {
885
+ throw new LniError('Api', 'Bolt12 offers are not implemented for SparkNode.');
886
+ }
887
+ async scanTransactions(params) {
888
+ const wallet = await this.getWallet();
889
+ const from = Math.max(0, params.from || 0);
890
+ const limit = params.limit > 0 ? params.limit : DEFAULT_SCAN_LIMIT;
891
+ const pageSize = Math.min(DEFAULT_PAGE_SIZE, Math.max(1, limit));
892
+ const results = [];
893
+ let offset = from;
894
+ let scanned = 0;
895
+ while (results.length < limit && scanned < DEFAULT_SCAN_LIMIT) {
896
+ const page = await wallet.getTransfers(pageSize, offset);
897
+ const transfers = Array.isArray(page.transfers) ? page.transfers : [];
898
+ if (!transfers.length) {
899
+ break;
900
+ }
901
+ for (const transfer of transfers) {
902
+ const tx = mapSparkTransferToTransaction(transfer);
903
+ if (params.paymentHash && tx.paymentHash !== params.paymentHash) {
904
+ continue;
905
+ }
906
+ if (!matchesSearch(tx, params.search)) {
907
+ continue;
908
+ }
909
+ results.push(tx);
910
+ if (results.length >= limit) {
911
+ break;
912
+ }
913
+ }
914
+ const nextOffset = numberFromUnknown(page.offset);
915
+ offset = nextOffset > offset ? nextOffset : offset + transfers.length;
916
+ scanned += transfers.length;
917
+ if (transfers.length < pageSize) {
918
+ break;
919
+ }
920
+ }
921
+ return results;
922
+ }
923
+ async lookupInvoice(params) {
924
+ if (!params.paymentHash && !params.search) {
925
+ throw new LniError('InvalidInput', 'lookupInvoice requires paymentHash or search for SparkNode.');
926
+ }
927
+ const txs = await this.scanTransactions({
928
+ from: 0,
929
+ limit: DEFAULT_SCAN_LIMIT,
930
+ paymentHash: params.paymentHash,
931
+ search: params.search,
932
+ });
933
+ const tx = txs[0];
934
+ if (!tx) {
935
+ throw new LniError('Api', `Invoice not found for SparkNode (paymentHash=${params.paymentHash ?? ''}, search=${params.search ?? ''}).`);
936
+ }
937
+ return tx;
938
+ }
939
+ async listTransactions(params) {
940
+ const txs = await this.scanTransactions({
941
+ from: params.from,
942
+ limit: params.limit > 0 ? params.limit : DEFAULT_SCAN_LIMIT,
943
+ paymentHash: params.paymentHash,
944
+ search: params.search,
945
+ });
946
+ return txs.sort((a, b) => b.createdAt - a.createdAt);
947
+ }
948
+ async decode(str) {
949
+ try {
950
+ return JSON.stringify(decodeBolt11(str));
951
+ }
952
+ catch {
953
+ return str;
954
+ }
955
+ }
956
+ async onInvoiceEvents(params, callback) {
957
+ await pollInvoiceEvents({
958
+ params,
959
+ callback,
960
+ lookup: () => this.lookupInvoice({
961
+ paymentHash: params.paymentHash,
962
+ search: params.search,
963
+ }),
964
+ });
965
+ }
966
+ async cleanupConnections() {
967
+ if (!this.walletPromise) {
968
+ return;
969
+ }
970
+ const wallet = await this.walletPromise;
971
+ await wallet.cleanupConnections?.();
972
+ }
973
+ }
974
+ //# sourceMappingURL=spark.js.map