bolt12-utils 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bech32.d.ts +31 -0
- package/dist/bech32.d.ts.map +1 -0
- package/dist/bech32.js +161 -0
- package/dist/bech32.js.map +1 -0
- package/dist/bigsize.d.ts +22 -0
- package/dist/bigsize.d.ts.map +1 -0
- package/dist/bigsize.js +87 -0
- package/dist/bigsize.js.map +1 -0
- package/dist/fields.d.ts +61 -0
- package/dist/fields.d.ts.map +1 -0
- package/dist/fields.js +99 -0
- package/dist/fields.js.map +1 -0
- package/dist/generated.d.ts +179 -0
- package/dist/generated.d.ts.map +1 -0
- package/dist/generated.js +565 -0
- package/dist/generated.js.map +1 -0
- package/dist/index.d.ts +47 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +125 -0
- package/dist/index.js.map +1 -0
- package/dist/merkle.d.ts +55 -0
- package/dist/merkle.d.ts.map +1 -0
- package/dist/merkle.js +144 -0
- package/dist/merkle.js.map +1 -0
- package/dist/offer.d.ts +45 -0
- package/dist/offer.d.ts.map +1 -0
- package/dist/offer.js +288 -0
- package/dist/offer.js.map +1 -0
- package/dist/payer_proof.d.ts +89 -0
- package/dist/payer_proof.d.ts.map +1 -0
- package/dist/payer_proof.js +576 -0
- package/dist/payer_proof.js.map +1 -0
- package/dist/tlv.d.ts +26 -0
- package/dist/tlv.d.ts.map +1 -0
- package/dist/tlv.js +65 -0
- package/dist/tlv.js.map +1 -0
- package/dist/utils.d.ts +12 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +52 -0
- package/dist/utils.js.map +1 -0
- package/package.json +47 -0
- package/src/bech32.ts +187 -0
- package/src/bigsize.ts +97 -0
- package/src/fields.ts +147 -0
- package/src/generated.ts +697 -0
- package/src/index.ts +132 -0
- package/src/merkle.ts +163 -0
- package/src/offer.ts +328 -0
- package/src/payer_proof.ts +727 -0
- package/src/tlv.ts +75 -0
- package/src/utils.ts +49 -0
|
@@ -0,0 +1,727 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BOLT12 Payer Proof (experimental, PR #1295).
|
|
3
|
+
*
|
|
4
|
+
* A payer proof is a proof of invoice payment, encoded with the "lnp" prefix.
|
|
5
|
+
* It contains a subset of the invoice's TLV fields, allowing the payer to
|
|
6
|
+
* prove they paid a specific invoice while selectively disclosing only
|
|
7
|
+
* certain fields for privacy.
|
|
8
|
+
*
|
|
9
|
+
* New TLV types:
|
|
10
|
+
* 242 - preimage (32-byte payment preimage)
|
|
11
|
+
* 244 - omitted_tlvs (array of bigsize marker numbers)
|
|
12
|
+
* 246 - missing_hashes (array of sha256 hashes for merkle reconstruction)
|
|
13
|
+
* 248 - leaf_hashes (array of sha256 nonce hashes for included TLVs)
|
|
14
|
+
* 250 - payer_signature (bip340sig + optional UTF-8 note)
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import { sha256 } from '@noble/hashes/sha2';
|
|
18
|
+
import { concatBytes } from '@noble/hashes/utils';
|
|
19
|
+
import { schnorr } from '@noble/curves/secp256k1';
|
|
20
|
+
import type { TlvRecord } from './tlv.js';
|
|
21
|
+
import { parseTlvStream, serializeTlvRecord } from './tlv.js';
|
|
22
|
+
import { readBigSize, writeBigSize } from './bigsize.js';
|
|
23
|
+
import {
|
|
24
|
+
taggedHash,
|
|
25
|
+
tlvToBytes,
|
|
26
|
+
branchHash,
|
|
27
|
+
computePerTlvBranches,
|
|
28
|
+
computeMerkleRoot,
|
|
29
|
+
} from './merkle.js';
|
|
30
|
+
import { encodeBolt12 } from './bech32.js';
|
|
31
|
+
import { compareBytes, isSignatureType, toHex } from './utils.js';
|
|
32
|
+
|
|
33
|
+
const encoder = new TextEncoder();
|
|
34
|
+
|
|
35
|
+
// Payer proof specific TLV types
|
|
36
|
+
export const PP_PREIMAGE = 242n;
|
|
37
|
+
export const PP_OMITTED_TLVS = 244n;
|
|
38
|
+
export const PP_MISSING_HASHES = 246n;
|
|
39
|
+
export const PP_LEAF_HASHES = 248n;
|
|
40
|
+
export const PP_PAYER_SIGNATURE = 250n;
|
|
41
|
+
|
|
42
|
+
// Invoice field types needed for validation
|
|
43
|
+
const INVREQ_METADATA = 0n;
|
|
44
|
+
const INVREQ_PAYER_ID = 88n;
|
|
45
|
+
const INVOICE_PAYMENT_HASH = 168n;
|
|
46
|
+
const INVOICE_NODE_ID = 176n;
|
|
47
|
+
const SIGNATURE = 240n;
|
|
48
|
+
|
|
49
|
+
// Required TLV types that must always be included in a payer proof
|
|
50
|
+
const REQUIRED_TYPES = new Set([INVREQ_PAYER_ID, INVOICE_PAYMENT_HASH, INVOICE_NODE_ID]);
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Tagged hash using a pre-computed tag hash.
|
|
54
|
+
*/
|
|
55
|
+
function taggedHashWithPrecomputedTag(tagHash: Uint8Array, msg: Uint8Array): Uint8Array {
|
|
56
|
+
return sha256(concatBytes(tagHash, tagHash, msg));
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Parse an array of BigSize values from a byte buffer.
|
|
61
|
+
*/
|
|
62
|
+
function parseBigSizeArray(data: Uint8Array): bigint[] {
|
|
63
|
+
const result: bigint[] = [];
|
|
64
|
+
let offset = 0;
|
|
65
|
+
while (offset < data.length) {
|
|
66
|
+
const { value, bytesRead } = readBigSize(data, offset);
|
|
67
|
+
result.push(value);
|
|
68
|
+
offset += bytesRead;
|
|
69
|
+
}
|
|
70
|
+
return result;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Parse an array of 32-byte SHA256 hashes from a byte buffer.
|
|
75
|
+
*/
|
|
76
|
+
function parseSha256Array(data: Uint8Array): Uint8Array[] {
|
|
77
|
+
if (data.length % 32 !== 0) {
|
|
78
|
+
throw new Error('Hash array length must be a multiple of 32');
|
|
79
|
+
}
|
|
80
|
+
const result: Uint8Array[] = [];
|
|
81
|
+
for (let i = 0; i < data.length; i += 32) {
|
|
82
|
+
result.push(data.slice(i, i + 32));
|
|
83
|
+
}
|
|
84
|
+
return result;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Compute the leaf+nonce branch for a TLV record given its nonce hash.
|
|
89
|
+
*/
|
|
90
|
+
function leafBranch(tlvBytes: Uint8Array, nonceHash: Uint8Array): Uint8Array {
|
|
91
|
+
const leafTag = encoder.encode('LnLeaf');
|
|
92
|
+
const leaf = taggedHash(leafTag, tlvBytes);
|
|
93
|
+
return branchHash(leaf, nonceHash);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export interface PayerProofFields {
|
|
97
|
+
/** Included TLV records (non-signature, non-payer-proof-specific) */
|
|
98
|
+
includedRecords: TlvRecord[];
|
|
99
|
+
/** The invoice signature (type 240) */
|
|
100
|
+
signature: Uint8Array;
|
|
101
|
+
/** Payment preimage (type 242, 32 bytes) */
|
|
102
|
+
preimage: Uint8Array | undefined;
|
|
103
|
+
/** Marker numbers for omitted TLVs (type 244) */
|
|
104
|
+
omittedTlvs: bigint[];
|
|
105
|
+
/** Missing merkle branch hashes (type 246) */
|
|
106
|
+
missingHashes: Uint8Array[];
|
|
107
|
+
/** Nonce hashes for included non-signature TLVs (type 248) */
|
|
108
|
+
leafHashes: Uint8Array[];
|
|
109
|
+
/** Payer signature (type 250) */
|
|
110
|
+
payerSignature: Uint8Array;
|
|
111
|
+
/** Optional note from payer_signature (type 250) */
|
|
112
|
+
payerNote: string;
|
|
113
|
+
/** invoice_payment_hash for preimage verification */
|
|
114
|
+
invoicePaymentHash: Uint8Array;
|
|
115
|
+
/** invoice_node_id for signature verification */
|
|
116
|
+
invoiceNodeId: Uint8Array;
|
|
117
|
+
/** invreq_payer_id for payer_signature verification */
|
|
118
|
+
payerId: Uint8Array;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Parse and validate a payer proof's TLV records.
|
|
123
|
+
*/
|
|
124
|
+
export function parsePayerProof(records: TlvRecord[]): PayerProofFields {
|
|
125
|
+
const includedRecords: TlvRecord[] = [];
|
|
126
|
+
let signature: Uint8Array | null = null;
|
|
127
|
+
let preimage: Uint8Array | null = null;
|
|
128
|
+
let omittedTlvsRaw: Uint8Array | null = null;
|
|
129
|
+
let missingHashesRaw: Uint8Array | null = null;
|
|
130
|
+
let leafHashesRaw: Uint8Array | null = null;
|
|
131
|
+
let payerSignatureRaw: Uint8Array | null = null;
|
|
132
|
+
let invoicePaymentHash: Uint8Array | null = null;
|
|
133
|
+
let invoiceNodeId: Uint8Array | null = null;
|
|
134
|
+
let payerId: Uint8Array | null = null;
|
|
135
|
+
|
|
136
|
+
for (const record of records) {
|
|
137
|
+
const type = record.type;
|
|
138
|
+
|
|
139
|
+
// invreq_metadata (type 0) MUST NOT be included
|
|
140
|
+
if (type === INVREQ_METADATA) {
|
|
141
|
+
throw new Error('Payer proof MUST NOT include invreq_metadata (type 0)');
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// Track required fields
|
|
145
|
+
if (type === INVREQ_PAYER_ID) {
|
|
146
|
+
payerId = record.value;
|
|
147
|
+
} else if (type === INVOICE_PAYMENT_HASH) {
|
|
148
|
+
invoicePaymentHash = record.value;
|
|
149
|
+
} else if (type === INVOICE_NODE_ID) {
|
|
150
|
+
invoiceNodeId = record.value;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Handle payer-proof-specific fields
|
|
154
|
+
if (type === SIGNATURE) {
|
|
155
|
+
if (record.value.length !== 64) {
|
|
156
|
+
throw new Error('Invalid signature: expected 64 bytes');
|
|
157
|
+
}
|
|
158
|
+
signature = record.value;
|
|
159
|
+
} else if (type === PP_PREIMAGE) {
|
|
160
|
+
if (record.value.length !== 32) {
|
|
161
|
+
throw new Error('Invalid preimage: expected 32 bytes');
|
|
162
|
+
}
|
|
163
|
+
preimage = record.value;
|
|
164
|
+
} else if (type === PP_OMITTED_TLVS) {
|
|
165
|
+
omittedTlvsRaw = record.value;
|
|
166
|
+
} else if (type === PP_MISSING_HASHES) {
|
|
167
|
+
missingHashesRaw = record.value;
|
|
168
|
+
} else if (type === PP_LEAF_HASHES) {
|
|
169
|
+
leafHashesRaw = record.value;
|
|
170
|
+
} else if (type === PP_PAYER_SIGNATURE) {
|
|
171
|
+
if (record.value.length < 64) {
|
|
172
|
+
throw new Error('Invalid payer_signature: expected at least 64 bytes');
|
|
173
|
+
}
|
|
174
|
+
payerSignatureRaw = record.value;
|
|
175
|
+
} else if (!isSignatureType(type)) {
|
|
176
|
+
// Non-signature, non-payer-proof field -> included invoice record
|
|
177
|
+
includedRecords.push(record);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Required fields check
|
|
182
|
+
if (!payerId) {
|
|
183
|
+
throw new Error('Missing invreq_payer_id');
|
|
184
|
+
}
|
|
185
|
+
if (!invoicePaymentHash) {
|
|
186
|
+
throw new Error('Missing invoice_payment_hash');
|
|
187
|
+
}
|
|
188
|
+
if (!invoiceNodeId) {
|
|
189
|
+
throw new Error('Missing invoice_node_id');
|
|
190
|
+
}
|
|
191
|
+
if (!signature) {
|
|
192
|
+
throw new Error('Missing signature');
|
|
193
|
+
}
|
|
194
|
+
if (!payerSignatureRaw) {
|
|
195
|
+
throw new Error('Missing payer_signature');
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Parse omitted_tlvs
|
|
199
|
+
const omittedTlvs = omittedTlvsRaw ? parseBigSizeArray(omittedTlvsRaw) : [];
|
|
200
|
+
|
|
201
|
+
// Validate omitted_tlvs
|
|
202
|
+
validateOmittedTlvs(omittedTlvs, includedRecords);
|
|
203
|
+
|
|
204
|
+
// Parse missing_hashes
|
|
205
|
+
const missingHashes = missingHashesRaw ? parseSha256Array(missingHashesRaw) : [];
|
|
206
|
+
|
|
207
|
+
// Parse leaf_hashes
|
|
208
|
+
const leafHashes = leafHashesRaw ? parseSha256Array(leafHashesRaw) : [];
|
|
209
|
+
|
|
210
|
+
// Validate leaf_hashes count matches included non-signature TLVs
|
|
211
|
+
if (leafHashes.length !== includedRecords.length) {
|
|
212
|
+
throw new Error(
|
|
213
|
+
`leaf_hashes count (${leafHashes.length}) must match included non-signature TLV count (${includedRecords.length})`
|
|
214
|
+
);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Validate preimage matches payment hash
|
|
218
|
+
if (preimage) {
|
|
219
|
+
const computedHash = sha256(preimage);
|
|
220
|
+
if (toHex(computedHash) !== toHex(invoicePaymentHash)) {
|
|
221
|
+
throw new Error('SHA256(preimage) does not match invoice_payment_hash');
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Extract payer signature and optional note
|
|
226
|
+
const payerSignature = payerSignatureRaw.slice(0, 64);
|
|
227
|
+
const payerNoteBytes = payerSignatureRaw.slice(64);
|
|
228
|
+
let payerNote = '';
|
|
229
|
+
if (payerNoteBytes.length > 0) {
|
|
230
|
+
const decoder = new TextDecoder('utf-8', { fatal: true });
|
|
231
|
+
try {
|
|
232
|
+
payerNote = decoder.decode(payerNoteBytes);
|
|
233
|
+
} catch {
|
|
234
|
+
throw new Error('Invalid UTF-8 in payer_signature note');
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
return {
|
|
239
|
+
includedRecords,
|
|
240
|
+
signature,
|
|
241
|
+
preimage: preimage || undefined,
|
|
242
|
+
omittedTlvs,
|
|
243
|
+
missingHashes,
|
|
244
|
+
leafHashes,
|
|
245
|
+
payerSignature,
|
|
246
|
+
payerNote,
|
|
247
|
+
invoicePaymentHash,
|
|
248
|
+
invoiceNodeId,
|
|
249
|
+
payerId,
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Validate the omitted_tlvs array.
|
|
255
|
+
*/
|
|
256
|
+
function validateOmittedTlvs(omittedTlvs: bigint[], includedRecords: TlvRecord[]): void {
|
|
257
|
+
// Must be in strict ascending order (no duplicates)
|
|
258
|
+
for (let i = 1; i < omittedTlvs.length; i++) {
|
|
259
|
+
if (omittedTlvs[i] <= omittedTlvs[i - 1]) {
|
|
260
|
+
throw new Error('omitted_tlvs must be in strict ascending order');
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Must not contain 0
|
|
265
|
+
if (omittedTlvs.includes(0n)) {
|
|
266
|
+
throw new Error('omitted_tlvs must not contain 0');
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Must not contain signature type numbers (240-1000)
|
|
270
|
+
for (const marker of omittedTlvs) {
|
|
271
|
+
if (isSignatureType(marker)) {
|
|
272
|
+
throw new Error(`omitted_tlvs must not contain signature type number ${marker}`);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Must not contain the type number of any included TLV field
|
|
277
|
+
const includedTypes = new Set(includedRecords.map(r => r.type));
|
|
278
|
+
for (const marker of omittedTlvs) {
|
|
279
|
+
if (includedTypes.has(marker)) {
|
|
280
|
+
throw new Error(`omitted_tlvs must not contain included TLV type ${marker}`);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Must not contain more than one number larger than the largest included non-signature TLV
|
|
285
|
+
if (includedRecords.length > 0) {
|
|
286
|
+
const maxIncluded = includedRecords[includedRecords.length - 1].type;
|
|
287
|
+
const largerMarkers = omittedTlvs.filter(m => m > maxIncluded);
|
|
288
|
+
if (largerMarkers.length > 1) {
|
|
289
|
+
throw new Error('omitted_tlvs must not contain more than one marker larger than the largest included TLV');
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
/**
|
|
295
|
+
* Largest power of 2 less than n. Used for recursive tree splitting.
|
|
296
|
+
*/
|
|
297
|
+
function largestPow2LessThan(n: number): number {
|
|
298
|
+
let p = 1;
|
|
299
|
+
while (p * 2 < n) p *= 2;
|
|
300
|
+
return p;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
interface MerkleNode {
|
|
304
|
+
hash: Uint8Array;
|
|
305
|
+
isKnown: boolean;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Check if all nodes in a slice are unknown.
|
|
310
|
+
*/
|
|
311
|
+
function allUnknown(nodes: MerkleNode[]): boolean {
|
|
312
|
+
return nodes.every(n => !n.isKnown);
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
/**
|
|
316
|
+
* Recursively build merkle tree (DFS top-down order), pulling missing hashes.
|
|
317
|
+
*
|
|
318
|
+
* Before recursing into a child, checks if it's entirely unknown. If so,
|
|
319
|
+
* pulls its subtree hash from missing_hashes without recursing. This ensures
|
|
320
|
+
* hashes are consumed in the same order they were produced.
|
|
321
|
+
*/
|
|
322
|
+
function rebuildTreeRecursive(
|
|
323
|
+
nodes: MerkleNode[],
|
|
324
|
+
missingHashes: Uint8Array[],
|
|
325
|
+
missingIdx: { value: number },
|
|
326
|
+
): MerkleNode {
|
|
327
|
+
if (nodes.length === 1) {
|
|
328
|
+
return nodes[0];
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
const split = largestPow2LessThan(nodes.length);
|
|
332
|
+
const leftNodes = nodes.slice(0, split);
|
|
333
|
+
const rightNodes = nodes.slice(split);
|
|
334
|
+
const leftAllUnknown = allUnknown(leftNodes);
|
|
335
|
+
const rightAllUnknown = allUnknown(rightNodes);
|
|
336
|
+
|
|
337
|
+
if (leftAllUnknown && rightAllUnknown) {
|
|
338
|
+
return { hash: new Uint8Array(0), isKnown: false };
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
if (leftAllUnknown) {
|
|
342
|
+
if (missingIdx.value >= missingHashes.length) {
|
|
343
|
+
throw new Error('Not enough missing_hashes to reconstruct merkle tree');
|
|
344
|
+
}
|
|
345
|
+
const leftHash = missingHashes[missingIdx.value++];
|
|
346
|
+
const right = rebuildTreeRecursive(rightNodes, missingHashes, missingIdx);
|
|
347
|
+
return { hash: branchHash(leftHash, right.hash), isKnown: true };
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
if (rightAllUnknown) {
|
|
351
|
+
const left = rebuildTreeRecursive(leftNodes, missingHashes, missingIdx);
|
|
352
|
+
if (missingIdx.value >= missingHashes.length) {
|
|
353
|
+
throw new Error('Not enough missing_hashes to reconstruct merkle tree');
|
|
354
|
+
}
|
|
355
|
+
const rightHash = missingHashes[missingIdx.value++];
|
|
356
|
+
return { hash: branchHash(left.hash, rightHash), isKnown: true };
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
const left = rebuildTreeRecursive(leftNodes, missingHashes, missingIdx);
|
|
360
|
+
const right = rebuildTreeRecursive(rightNodes, missingHashes, missingIdx);
|
|
361
|
+
return { hash: branchHash(left.hash, right.hash), isKnown: true };
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Reconstruct the Merkle root from a payer proof.
|
|
366
|
+
*
|
|
367
|
+
* The tree has N positions: 1 implicit (type 0) + omitted markers + included records.
|
|
368
|
+
* Uses recursive DFS tree building to consume missing_hashes in the correct order.
|
|
369
|
+
*/
|
|
370
|
+
export function reconstructMerkleRoot(proof: PayerProofFields): Uint8Array {
|
|
371
|
+
// Build ordered list: implicit type 0 + interleaved included/omitted
|
|
372
|
+
const allNodes: MerkleNode[] = [];
|
|
373
|
+
let includedIdx = 0;
|
|
374
|
+
let omittedIdx = 0;
|
|
375
|
+
|
|
376
|
+
// Position 0: implicit type 0 (always omitted)
|
|
377
|
+
allNodes.push({ hash: new Uint8Array(0), isKnown: false });
|
|
378
|
+
|
|
379
|
+
// Merge included records and omitted markers in ascending order
|
|
380
|
+
while (includedIdx < proof.includedRecords.length || omittedIdx < proof.omittedTlvs.length) {
|
|
381
|
+
const includedType = includedIdx < proof.includedRecords.length
|
|
382
|
+
? proof.includedRecords[includedIdx].type : BigInt(Number.MAX_SAFE_INTEGER);
|
|
383
|
+
const omittedMarker = omittedIdx < proof.omittedTlvs.length
|
|
384
|
+
? proof.omittedTlvs[omittedIdx] : BigInt(Number.MAX_SAFE_INTEGER);
|
|
385
|
+
|
|
386
|
+
if (includedType < omittedMarker) {
|
|
387
|
+
const record = proof.includedRecords[includedIdx];
|
|
388
|
+
const nonceHash = proof.leafHashes[includedIdx];
|
|
389
|
+
const hash = leafBranch(tlvToBytes(record), nonceHash);
|
|
390
|
+
allNodes.push({ hash, isKnown: true });
|
|
391
|
+
includedIdx++;
|
|
392
|
+
} else {
|
|
393
|
+
allNodes.push({ hash: new Uint8Array(0), isKnown: false });
|
|
394
|
+
omittedIdx++;
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
const missingIdx = { value: 0 };
|
|
399
|
+
const root = rebuildTreeRecursive(allNodes, proof.missingHashes, missingIdx);
|
|
400
|
+
|
|
401
|
+
if (missingIdx.value !== proof.missingHashes.length) {
|
|
402
|
+
throw new Error(
|
|
403
|
+
`Excess missing_hashes: used ${missingIdx.value} of ${proof.missingHashes.length}`
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
if (!root.isKnown) {
|
|
408
|
+
throw new Error('Failed to reconstruct merkle root');
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
return root.hash;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
/**
|
|
415
|
+
* Verify a payer proof's signatures.
|
|
416
|
+
*/
|
|
417
|
+
export function verifyPayerProof(proof: PayerProofFields): {
|
|
418
|
+
valid: boolean;
|
|
419
|
+
merkleRoot: Uint8Array;
|
|
420
|
+
error?: string;
|
|
421
|
+
} {
|
|
422
|
+
try {
|
|
423
|
+
const merkleRoot = reconstructMerkleRoot(proof);
|
|
424
|
+
|
|
425
|
+
// Verify invoice signature: tag = "lightninginvoicesignature"
|
|
426
|
+
const invoiceSigTag = encoder.encode('lightninginvoicesignature');
|
|
427
|
+
const invoiceSigMsg = taggedHash(invoiceSigTag, merkleRoot);
|
|
428
|
+
const nodeIdX = proof.invoiceNodeId.length === 33
|
|
429
|
+
? proof.invoiceNodeId.slice(1)
|
|
430
|
+
: proof.invoiceNodeId;
|
|
431
|
+
|
|
432
|
+
const invoiceSigValid = schnorr.verify(proof.signature, invoiceSigMsg, nodeIdX);
|
|
433
|
+
if (!invoiceSigValid) {
|
|
434
|
+
return { valid: false, merkleRoot, error: 'Invalid invoice signature' };
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// Verify payer_signature: SIG(tag, msg, key)
|
|
438
|
+
// tag = "lightningpayer_proofpayer_signature"
|
|
439
|
+
// msg = SHA256(note || merkle-root)
|
|
440
|
+
const noteBytes = encoder.encode(proof.payerNote);
|
|
441
|
+
const payerRawMsg = sha256(concatBytes(noteBytes, merkleRoot));
|
|
442
|
+
const payerSigTag = encoder.encode('lightningpayer_proofpayer_signature');
|
|
443
|
+
const payerMsg = taggedHash(payerSigTag, payerRawMsg);
|
|
444
|
+
const payerIdX = proof.payerId.length === 33
|
|
445
|
+
? proof.payerId.slice(1)
|
|
446
|
+
: proof.payerId;
|
|
447
|
+
|
|
448
|
+
const payerSigValid = schnorr.verify(proof.payerSignature, payerMsg, payerIdX);
|
|
449
|
+
if (!payerSigValid) {
|
|
450
|
+
return { valid: false, merkleRoot, error: 'Invalid payer signature' };
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
return { valid: true, merkleRoot };
|
|
454
|
+
} catch (e: unknown) {
|
|
455
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
456
|
+
return { valid: false, merkleRoot: new Uint8Array(32), error: message };
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
// ---- Creation ----
|
|
461
|
+
|
|
462
|
+
export interface CreatePayerProofParams {
|
|
463
|
+
/** Hex-encoded invoice TLV stream */
|
|
464
|
+
invoiceHex: string;
|
|
465
|
+
/** Hex-encoded 32-byte payment preimage */
|
|
466
|
+
preimageHex: string;
|
|
467
|
+
/** Hex-encoded 32-byte payer secret key (for BIP-340 signing) */
|
|
468
|
+
payerSecretKeyHex: string;
|
|
469
|
+
/** Additional TLV types to include beyond the required ones */
|
|
470
|
+
includedTlvTypes?: number[];
|
|
471
|
+
/** Optional payer note (UTF-8) */
|
|
472
|
+
note?: string;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
export interface CreatePayerProofResult {
|
|
476
|
+
/** Hex-encoded proof TLV stream */
|
|
477
|
+
proofHex: string;
|
|
478
|
+
/** Bech32-encoded proof with "lnp" prefix */
|
|
479
|
+
proofBech32: string;
|
|
480
|
+
/** 32-byte merkle root */
|
|
481
|
+
merkleRoot: Uint8Array;
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
/**
|
|
485
|
+
* Compute omitted TLV markers for the payer proof.
|
|
486
|
+
*
|
|
487
|
+
* Markers assign minimal values to omitted positions:
|
|
488
|
+
* - Type 0 is always implicit (no marker)
|
|
489
|
+
* - Before first included type: markers start at 1 and increment
|
|
490
|
+
* - After an included type: markers start at included_type+1 and increment
|
|
491
|
+
*/
|
|
492
|
+
function computeOmittedMarkers(
|
|
493
|
+
nonSigTypes: bigint[],
|
|
494
|
+
includedTypes: Set<bigint>,
|
|
495
|
+
): bigint[] {
|
|
496
|
+
const markers: bigint[] = [];
|
|
497
|
+
let nextMarker = 1n;
|
|
498
|
+
|
|
499
|
+
for (const type of nonSigTypes) {
|
|
500
|
+
if (type === 0n) {
|
|
501
|
+
continue; // implicit
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
if (includedTypes.has(type)) {
|
|
505
|
+
nextMarker = type + 1n;
|
|
506
|
+
} else {
|
|
507
|
+
markers.push(nextMarker);
|
|
508
|
+
nextMarker++;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
return markers;
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
/**
|
|
516
|
+
* Compute subtree hash for a fully-unknown subtree (creator side).
|
|
517
|
+
*/
|
|
518
|
+
function computeSubtreeHash(nodes: MerkleNode[]): Uint8Array {
|
|
519
|
+
if (nodes.length === 1) {
|
|
520
|
+
return nodes[0].hash;
|
|
521
|
+
}
|
|
522
|
+
const split = largestPow2LessThan(nodes.length);
|
|
523
|
+
const left = computeSubtreeHash(nodes.slice(0, split));
|
|
524
|
+
const right = computeSubtreeHash(nodes.slice(split));
|
|
525
|
+
return branchHash(left, right);
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
/**
|
|
529
|
+
* Recursively compute missing hashes in DFS top-down order.
|
|
530
|
+
* Mirrors the verifier's recursive tree reconstruction: before recursing
|
|
531
|
+
* into a child, checks if it's entirely unknown and pushes its hash directly.
|
|
532
|
+
*/
|
|
533
|
+
function collectMissingRecursive(
|
|
534
|
+
nodes: MerkleNode[],
|
|
535
|
+
missing: Uint8Array[],
|
|
536
|
+
): MerkleNode {
|
|
537
|
+
if (nodes.length === 1) {
|
|
538
|
+
return nodes[0];
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
const split = largestPow2LessThan(nodes.length);
|
|
542
|
+
const leftNodes = nodes.slice(0, split);
|
|
543
|
+
const rightNodes = nodes.slice(split);
|
|
544
|
+
const leftAllUnknown = allUnknown(leftNodes);
|
|
545
|
+
const rightAllUnknown = allUnknown(rightNodes);
|
|
546
|
+
|
|
547
|
+
if (leftAllUnknown && rightAllUnknown) {
|
|
548
|
+
const hash = branchHash(computeSubtreeHash(leftNodes), computeSubtreeHash(rightNodes));
|
|
549
|
+
return { hash, isKnown: false };
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
if (leftAllUnknown) {
|
|
553
|
+
const leftHash = computeSubtreeHash(leftNodes);
|
|
554
|
+
missing.push(leftHash);
|
|
555
|
+
const right = collectMissingRecursive(rightNodes, missing);
|
|
556
|
+
return { hash: branchHash(leftHash, right.hash), isKnown: true };
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
if (rightAllUnknown) {
|
|
560
|
+
const left = collectMissingRecursive(leftNodes, missing);
|
|
561
|
+
const rightHash = computeSubtreeHash(rightNodes);
|
|
562
|
+
missing.push(rightHash);
|
|
563
|
+
return { hash: branchHash(left.hash, rightHash), isKnown: true };
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
const left = collectMissingRecursive(leftNodes, missing);
|
|
567
|
+
const right = collectMissingRecursive(rightNodes, missing);
|
|
568
|
+
return { hash: branchHash(left.hash, right.hash), isKnown: true };
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
/**
|
|
572
|
+
* Compute the missing hashes needed for merkle tree reconstruction.
|
|
573
|
+
* Uses recursive DFS to produce hashes in the same order the verifier consumes them.
|
|
574
|
+
*/
|
|
575
|
+
function computeMissingHashesForProof(
|
|
576
|
+
allBranches: Uint8Array[],
|
|
577
|
+
isIncluded: boolean[],
|
|
578
|
+
): Uint8Array[] {
|
|
579
|
+
const nodes: MerkleNode[] = allBranches.map((hash, i) => ({
|
|
580
|
+
hash,
|
|
581
|
+
isKnown: isIncluded[i],
|
|
582
|
+
}));
|
|
583
|
+
|
|
584
|
+
const missing: Uint8Array[] = [];
|
|
585
|
+
collectMissingRecursive(nodes, missing);
|
|
586
|
+
return missing;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
function fromHex(hex: string): Uint8Array {
|
|
590
|
+
const bytes = new Uint8Array(hex.length / 2);
|
|
591
|
+
for (let i = 0; i < hex.length; i += 2) {
|
|
592
|
+
bytes[i / 2] = parseInt(hex.substring(i, i + 2), 16);
|
|
593
|
+
}
|
|
594
|
+
return bytes;
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
function makeTlv(type: bigint, value: Uint8Array): TlvRecord {
|
|
598
|
+
return { type, length: BigInt(value.length), value };
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
/**
|
|
602
|
+
* Create a BOLT12 payer proof from an invoice, preimage, and payer secret key.
|
|
603
|
+
*/
|
|
604
|
+
export function createPayerProof(params: CreatePayerProofParams): CreatePayerProofResult {
|
|
605
|
+
const invoiceBytes = fromHex(params.invoiceHex);
|
|
606
|
+
const preimage = fromHex(params.preimageHex);
|
|
607
|
+
const payerSecretKey = fromHex(params.payerSecretKeyHex);
|
|
608
|
+
|
|
609
|
+
// Parse invoice TLV records
|
|
610
|
+
const invoiceRecords = parseTlvStream(invoiceBytes);
|
|
611
|
+
|
|
612
|
+
// Find required fields
|
|
613
|
+
const nonSigRecords = invoiceRecords.filter(r => !isSignatureType(r.type));
|
|
614
|
+
const sigRecord = invoiceRecords.find(r => r.type === SIGNATURE);
|
|
615
|
+
if (!sigRecord) {
|
|
616
|
+
throw new Error('Invoice missing signature (type 240)');
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
const paymentHashRecord = nonSigRecords.find(r => r.type === INVOICE_PAYMENT_HASH);
|
|
620
|
+
if (!paymentHashRecord) {
|
|
621
|
+
throw new Error('Invoice missing payment_hash (type 168)');
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
// Verify preimage
|
|
625
|
+
const computedHash = sha256(preimage);
|
|
626
|
+
const paymentHash = paymentHashRecord.value;
|
|
627
|
+
if (computedHash.length !== paymentHash.length ||
|
|
628
|
+
!computedHash.every((b, i) => b === paymentHash[i])) {
|
|
629
|
+
throw new Error('SHA256(preimage) does not match invoice_payment_hash');
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
// Compute merkle root from invoice
|
|
633
|
+
const merkleRoot = computeMerkleRoot(invoiceRecords);
|
|
634
|
+
|
|
635
|
+
// Determine included types
|
|
636
|
+
const INVOICE_FEATURES = 174n;
|
|
637
|
+
const additionalTypes = new Set((params.includedTlvTypes || []).map(BigInt));
|
|
638
|
+
const hasInvoiceFeatures = nonSigRecords.some(r => r.type === INVOICE_FEATURES);
|
|
639
|
+
const includedTypes = new Set([
|
|
640
|
+
...REQUIRED_TYPES,
|
|
641
|
+
...additionalTypes,
|
|
642
|
+
...(hasInvoiceFeatures ? [INVOICE_FEATURES] : []),
|
|
643
|
+
]);
|
|
644
|
+
|
|
645
|
+
// Filter: only types actually present in the invoice and not type 0
|
|
646
|
+
const nonSigTypes = nonSigRecords.map(r => r.type);
|
|
647
|
+
|
|
648
|
+
// Compute per-TLV branches
|
|
649
|
+
const { branches, nonceTagHash } = computePerTlvBranches(invoiceRecords);
|
|
650
|
+
|
|
651
|
+
// Build isIncluded array (type 0 is always NOT included)
|
|
652
|
+
const isIncluded = nonSigRecords.map(r => r.type !== 0n && includedTypes.has(r.type));
|
|
653
|
+
|
|
654
|
+
// Compute omitted markers
|
|
655
|
+
const omittedMarkers = computeOmittedMarkers(nonSigTypes, includedTypes);
|
|
656
|
+
|
|
657
|
+
// Compute nonce hashes (leaf_hashes) for included TLVs
|
|
658
|
+
const includedNonceHashes: Uint8Array[] = [];
|
|
659
|
+
for (let i = 0; i < nonSigRecords.length; i++) {
|
|
660
|
+
if (isIncluded[i]) {
|
|
661
|
+
const typeBytes = writeBigSize(nonSigRecords[i].type);
|
|
662
|
+
const nonce = taggedHashWithPrecomputedTag(nonceTagHash, typeBytes);
|
|
663
|
+
includedNonceHashes.push(nonce);
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
// Compute missing hashes
|
|
668
|
+
const missingHashes = computeMissingHashesForProof(branches, isIncluded);
|
|
669
|
+
|
|
670
|
+
// Sign: SIG(tag, msg, key) where tag = "lightningpayer_proofpayer_signature"
|
|
671
|
+
// msg = SHA256(note_bytes || merkle_root)
|
|
672
|
+
const noteBytes = params.note ? encoder.encode(params.note) : new Uint8Array(0);
|
|
673
|
+
const payerRawMsg = sha256(concatBytes(noteBytes, merkleRoot));
|
|
674
|
+
const payerSigTag = encoder.encode('lightningpayer_proofpayer_signature');
|
|
675
|
+
const payerMsg = taggedHash(payerSigTag, payerRawMsg);
|
|
676
|
+
const payerSig = schnorr.sign(payerMsg, payerSecretKey);
|
|
677
|
+
|
|
678
|
+
// Build proof TLV records in ascending type order
|
|
679
|
+
const proofRecords: TlvRecord[] = [];
|
|
680
|
+
|
|
681
|
+
// Add included invoice records (non-sig, non-type-0)
|
|
682
|
+
for (const record of nonSigRecords) {
|
|
683
|
+
if (record.type !== 0n && includedTypes.has(record.type)) {
|
|
684
|
+
proofRecords.push(record);
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
// Type 240: invoice signature
|
|
689
|
+
proofRecords.push(makeTlv(SIGNATURE, sigRecord.value));
|
|
690
|
+
|
|
691
|
+
// Type 242: preimage
|
|
692
|
+
proofRecords.push(makeTlv(PP_PREIMAGE, preimage));
|
|
693
|
+
|
|
694
|
+
// Type 244: omitted_tlvs
|
|
695
|
+
if (omittedMarkers.length > 0) {
|
|
696
|
+
const omittedValue = concatBytes(...omittedMarkers.map(m => writeBigSize(m)));
|
|
697
|
+
proofRecords.push(makeTlv(PP_OMITTED_TLVS, omittedValue));
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
// Type 246: missing_hashes
|
|
701
|
+
if (missingHashes.length > 0) {
|
|
702
|
+
const missingValue = concatBytes(...missingHashes);
|
|
703
|
+
proofRecords.push(makeTlv(PP_MISSING_HASHES, missingValue));
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
// Type 248: leaf_hashes
|
|
707
|
+
if (includedNonceHashes.length > 0) {
|
|
708
|
+
const leafHashesValue = concatBytes(...includedNonceHashes);
|
|
709
|
+
proofRecords.push(makeTlv(PP_LEAF_HASHES, leafHashesValue));
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// Type 250: payer_signature (64-byte sig + optional note)
|
|
713
|
+
const payerSigValue = noteBytes.length > 0
|
|
714
|
+
? concatBytes(payerSig, noteBytes)
|
|
715
|
+
: payerSig;
|
|
716
|
+
proofRecords.push(makeTlv(PP_PAYER_SIGNATURE, payerSigValue));
|
|
717
|
+
|
|
718
|
+
// Sort by type
|
|
719
|
+
proofRecords.sort((a, b) => Number(a.type - b.type));
|
|
720
|
+
|
|
721
|
+
// Serialize to bytes
|
|
722
|
+
const proofBytes = concatBytes(...proofRecords.map(serializeTlvRecord));
|
|
723
|
+
const proofHex = toHex(proofBytes);
|
|
724
|
+
const proofBech32 = encodeBolt12('lnp', proofBytes);
|
|
725
|
+
|
|
726
|
+
return { proofHex, proofBech32, merkleRoot };
|
|
727
|
+
}
|