@buildonspark/spark-sdk 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/LightningSendRequest-CNJFhLVc.d.cts +374 -0
- package/dist/LightningSendRequest-CNJFhLVc.d.ts +374 -0
- package/dist/auto-bind.d.ts +7 -0
- package/dist/auto-bind.js +41 -0
- package/dist/auto-bind.js.map +1 -0
- package/dist/chunk-5SAJ52IV.js +10309 -0
- package/dist/chunk-COXVABEU.js +1524 -0
- package/dist/chunk-F4JW24C4.js +78 -0
- package/dist/chunk-H4A2WXR3.js +331 -0
- package/dist/chunk-HTNOFUHX.js +1547 -0
- package/dist/chunk-JQFHUW4I.js +21 -0
- package/dist/chunk-K3Y7DVLD.js +19 -0
- package/dist/chunk-NDKNVHGP.js +127 -0
- package/dist/chunk-PMVJGQCP.js +627 -0
- package/dist/chunk-QX3ZJH2S.js +527 -0
- package/dist/chunk-SL2YOBVM.js +127 -0
- package/dist/chunk-SWCOMKD6.js +333 -0
- package/dist/chunk-SWFFNBSR.js +1244 -0
- package/dist/chunk-WLK5POBV.js +527 -0
- package/dist/chunk-WZ74TD7N.js +660 -0
- package/dist/chunk-WZYVI3M3.js +1244 -0
- package/dist/chunk-ZGU3XW7W.js +78 -0
- package/dist/connection-BgWj7Hnd.d.cts +77 -0
- package/dist/connection-BgbVJtzh.d.ts +77 -0
- package/dist/connection-DX-9yFl8.d.ts +77 -0
- package/dist/connection-hITj9Mgk.d.cts +77 -0
- package/dist/graphql/objects/index.cjs +626 -0
- package/dist/graphql/objects/index.d.cts +140 -0
- package/dist/index.cjs +17202 -0
- package/dist/index.d.cts +413 -0
- package/dist/index.d.ts +413 -0
- package/dist/index.js +3390 -0
- package/dist/proto/spark.cjs +10451 -0
- package/dist/proto/spark.d.cts +3 -0
- package/dist/services/config.d.ts +2 -0
- package/dist/services/config.js +5 -0
- package/dist/services/config.js.map +1 -1
- package/dist/services/connection.d.ts +1 -0
- package/dist/services/connection.js +19 -12
- package/dist/services/connection.js.map +1 -1
- package/dist/services/index.cjs +12503 -0
- package/dist/services/index.d.cts +23 -0
- package/dist/services/index.d.ts +23 -0
- package/dist/services/index.js +17 -0
- package/dist/services/lightning.js +16 -4
- package/dist/services/lightning.js.map +1 -1
- package/dist/services/lrc20.d.ts +5 -0
- package/dist/services/lrc20.js +27 -0
- package/dist/services/lrc20.js.map +1 -0
- package/dist/services/token-transactions.js +21 -3
- package/dist/services/token-transactions.js.map +1 -1
- package/dist/services/transfer.d.ts +1 -0
- package/dist/services/transfer.js +109 -3
- package/dist/services/transfer.js.map +1 -1
- package/dist/services/tree-creation.d.ts +0 -1
- package/dist/services/tree-creation.js +4 -9
- package/dist/services/tree-creation.js.map +1 -1
- package/dist/signer/signer.cjs +894 -0
- package/dist/signer/signer.d.cts +5 -0
- package/dist/signer/signer.d.ts +2 -0
- package/dist/signer/signer.js +9 -3
- package/dist/signer/signer.js.map +1 -1
- package/dist/signer-BaC_ZP1g.d.ts +138 -0
- package/dist/signer-C6h1OnSQ.d.cts +138 -0
- package/dist/signer-CO4owhHI.d.ts +154 -0
- package/dist/signer-DDkpXvNZ.d.cts +154 -0
- package/dist/spark-BUTdOtMz.d.cts +1170 -0
- package/dist/spark-BUTdOtMz.d.ts +1170 -0
- package/dist/spark-sdk.d.ts +25 -18
- package/dist/spark-sdk.js +257 -142
- package/dist/spark-sdk.js.map +1 -1
- package/dist/tests/adaptor-signature.test.js +1 -2
- package/dist/tests/adaptor-signature.test.js.map +1 -1
- package/dist/tests/coop-exit.test.js.map +1 -1
- package/dist/tests/lightning.test.js.map +1 -1
- package/dist/tests/test-util.cjs +12269 -0
- package/dist/tests/test-util.d.cts +90 -0
- package/dist/tests/test-util.d.ts +19 -0
- package/dist/tests/test-util.js +19 -0
- package/dist/tests/test-util.js.map +1 -1
- package/dist/tests/transfer.test.js.map +1 -1
- package/dist/types/grpc.d.ts +1 -1
- package/dist/utils/index.cjs +1825 -0
- package/dist/utils/index.d.cts +280 -0
- package/dist/utils/keys.d.ts +2 -0
- package/dist/utils/keys.js +4 -0
- package/dist/utils/keys.js.map +1 -1
- package/dist/utils/network.d.ts +17 -0
- package/dist/utils/network.js +16 -0
- package/dist/utils/network.js.map +1 -1
- package/dist/utils/response-validation.js.map +1 -1
- package/dist/utils/token-hashing.js +1 -2
- package/dist/utils/token-hashing.js.map +1 -1
- package/dist/utils/transaction.d.ts +3 -5
- package/dist/utils/transaction.js +14 -16
- package/dist/utils/transaction.js.map +1 -1
- package/dist/utils/wasm-wrapper.js +4 -4
- package/dist/utils/wasm-wrapper.js.map +1 -1
- package/dist/wasm/spark_bindings.js +7 -3
- package/dist/wasm/spark_bindings.js.map +1 -1
- package/dist/wasm/spark_bindings_bg.wasm +0 -0
- package/package.json +12 -1
- package/dist/tests/jest.setup.d.ts +0 -1
- package/dist/tests/jest.setup.js +0 -8
- package/dist/tests/jest.setup.js.map +0 -1
package/dist/index.js
ADDED
|
@@ -0,0 +1,3390 @@
|
|
|
1
|
+
import {
|
|
2
|
+
TokenTransactionService,
|
|
3
|
+
calculateAvailableTokenAmount,
|
|
4
|
+
checkIfSelectedLeavesAreAvailable
|
|
5
|
+
} from "./chunk-QX3ZJH2S.js";
|
|
6
|
+
import {
|
|
7
|
+
ConnectionManager,
|
|
8
|
+
DepositService,
|
|
9
|
+
WalletConfigService
|
|
10
|
+
} from "./chunk-SWFFNBSR.js";
|
|
11
|
+
import "./chunk-SWCOMKD6.js";
|
|
12
|
+
import {
|
|
13
|
+
createRefundTx,
|
|
14
|
+
getEphemeralAnchorOutput,
|
|
15
|
+
getNextTransactionSequence,
|
|
16
|
+
initWasm
|
|
17
|
+
} from "./chunk-F4JW24C4.js";
|
|
18
|
+
import "./chunk-JQFHUW4I.js";
|
|
19
|
+
import {
|
|
20
|
+
computeTaprootKeyNoScript,
|
|
21
|
+
getP2TRAddressFromPublicKey,
|
|
22
|
+
getP2TRScriptFromPublicKey,
|
|
23
|
+
getSigHashFromTx,
|
|
24
|
+
getTxFromRawTxBytes,
|
|
25
|
+
getTxFromRawTxHex,
|
|
26
|
+
getTxId
|
|
27
|
+
} from "./chunk-NDKNVHGP.js";
|
|
28
|
+
import {
|
|
29
|
+
applyAdaptorToSignature,
|
|
30
|
+
generateAdaptorFromSignature,
|
|
31
|
+
generateSignatureFromExistingAdaptor,
|
|
32
|
+
getCrypto,
|
|
33
|
+
getNetwork
|
|
34
|
+
} from "./chunk-COXVABEU.js";
|
|
35
|
+
import "./chunk-5SAJ52IV.js";
|
|
36
|
+
import {
|
|
37
|
+
BitcoinNetwork_default,
|
|
38
|
+
CoopExitFeeEstimateOutputFromJson,
|
|
39
|
+
CoopExitRequestFromJson,
|
|
40
|
+
FRAGMENT,
|
|
41
|
+
FRAGMENT2,
|
|
42
|
+
FRAGMENT3,
|
|
43
|
+
FRAGMENT4,
|
|
44
|
+
FRAGMENT5,
|
|
45
|
+
FRAGMENT6,
|
|
46
|
+
FRAGMENT7,
|
|
47
|
+
LeavesSwapRequestFromJson,
|
|
48
|
+
LightningReceiveFeeEstimateOutputFromJson,
|
|
49
|
+
LightningReceiveRequestFromJson,
|
|
50
|
+
LightningSendFeeEstimateOutputFromJson,
|
|
51
|
+
LightningSendRequestFromJson
|
|
52
|
+
} from "./chunk-WZ74TD7N.js";
|
|
53
|
+
|
|
54
|
+
// src/spark-sdk.ts
|
|
55
|
+
import { bytesToHex as bytesToHex2, hexToBytes as hexToBytes3 } from "@noble/curves/abstract/utils";
|
|
56
|
+
import { secp256k1 as secp256k13 } from "@noble/curves/secp256k1";
|
|
57
|
+
import { Address as Address2, OutScript as OutScript2 } from "@scure/btc-signer";
|
|
58
|
+
import { sha256 as sha2564 } from "@scure/btc-signer/utils";
|
|
59
|
+
import { decode as decode2 } from "light-bolt11-decoder";
|
|
60
|
+
|
|
61
|
+
// src/graphql/client.ts
|
|
62
|
+
import {
|
|
63
|
+
DefaultCrypto,
|
|
64
|
+
NodeKeyCache,
|
|
65
|
+
Requester
|
|
66
|
+
} from "@lightsparkdev/core";
|
|
67
|
+
|
|
68
|
+
// src/graphql/mutations/CompleteCoopExit.ts
|
|
69
|
+
var CompleteCoopExit = `
|
|
70
|
+
mutation CompleteCoopExit(
|
|
71
|
+
$user_outbound_transfer_external_id: UUID!
|
|
72
|
+
$coop_exit_request_id: ID!
|
|
73
|
+
) {
|
|
74
|
+
complete_coop_exit(input: {
|
|
75
|
+
user_outbound_transfer_external_id: $user_outbound_transfer_external_id
|
|
76
|
+
coop_exit_request_id: $coop_exit_request_id
|
|
77
|
+
}) {
|
|
78
|
+
request {
|
|
79
|
+
...CoopExitRequestFragment
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
${FRAGMENT}
|
|
85
|
+
`;
|
|
86
|
+
|
|
87
|
+
// src/graphql/mutations/CompleteLeavesSwap.ts
|
|
88
|
+
var CompleteLeavesSwap = `
|
|
89
|
+
mutation CompleteLeavesSwap(
|
|
90
|
+
$adaptor_secret_key: String!
|
|
91
|
+
$user_outbound_transfer_external_id: UUID!
|
|
92
|
+
$leaves_swap_request_id: ID!
|
|
93
|
+
) {
|
|
94
|
+
complete_leaves_swap(input: { adaptor_secret_key: $adaptor_secret_key, user_outbound_transfer_external_id: $user_outbound_transfer_external_id, leaves_swap_request_id: $leaves_swap_request_id }) {
|
|
95
|
+
request {
|
|
96
|
+
...LeavesSwapRequestFragment
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
${FRAGMENT2}
|
|
102
|
+
`;
|
|
103
|
+
|
|
104
|
+
// src/graphql/mutations/RequestCoopExit.ts
|
|
105
|
+
var RequestCoopExit = `
|
|
106
|
+
mutation RequestCoopExit(
|
|
107
|
+
$leaf_external_ids: [UUID!]!
|
|
108
|
+
$withdrawal_address: String!
|
|
109
|
+
) {
|
|
110
|
+
request_coop_exit(
|
|
111
|
+
input: {
|
|
112
|
+
leaf_external_ids: $leaf_external_ids
|
|
113
|
+
withdrawal_address: $withdrawal_address
|
|
114
|
+
}
|
|
115
|
+
) {
|
|
116
|
+
request {
|
|
117
|
+
...CoopExitRequestFragment
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
${FRAGMENT}
|
|
122
|
+
`;
|
|
123
|
+
|
|
124
|
+
// src/graphql/mutations/RequestLightningReceive.ts
|
|
125
|
+
var RequestLightningReceive = `
|
|
126
|
+
mutation RequestLightningReceive(
|
|
127
|
+
$network: BitcoinNetwork!
|
|
128
|
+
$amount_sats: Long!
|
|
129
|
+
$payment_hash: Hash32!
|
|
130
|
+
$expiry_secs: Int
|
|
131
|
+
$memo: String
|
|
132
|
+
) {
|
|
133
|
+
request_lightning_receive(
|
|
134
|
+
input: {
|
|
135
|
+
network: $network
|
|
136
|
+
amount_sats: $amount_sats
|
|
137
|
+
payment_hash: $payment_hash
|
|
138
|
+
expiry_secs: $expiry_secs
|
|
139
|
+
memo: $memo
|
|
140
|
+
}
|
|
141
|
+
) {
|
|
142
|
+
request {
|
|
143
|
+
...LightningReceiveRequestFragment
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
${FRAGMENT3}
|
|
148
|
+
`;
|
|
149
|
+
|
|
150
|
+
// src/graphql/mutations/RequestLightningSend.ts
|
|
151
|
+
var RequestLightningSend = `
|
|
152
|
+
mutation RequestLightningSend(
|
|
153
|
+
$encoded_invoice: String!
|
|
154
|
+
$idempotency_key: String!
|
|
155
|
+
) {
|
|
156
|
+
request_lightning_send(input: {
|
|
157
|
+
encoded_invoice: $encoded_invoice
|
|
158
|
+
idempotency_key: $idempotency_key
|
|
159
|
+
}) {
|
|
160
|
+
request {
|
|
161
|
+
...LightningSendRequestFragment
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
${FRAGMENT4}
|
|
166
|
+
`;
|
|
167
|
+
|
|
168
|
+
// src/graphql/mutations/RequestSwapLeaves.ts
|
|
169
|
+
var RequestSwapLeaves = `
|
|
170
|
+
mutation RequestSwapLeaves(
|
|
171
|
+
$adaptor_pubkey: String!
|
|
172
|
+
$total_amount_sats: Long!
|
|
173
|
+
$target_amount_sats: Long!
|
|
174
|
+
$fee_sats: Long!
|
|
175
|
+
$user_leaves: [UserLeafInput!]!
|
|
176
|
+
) {
|
|
177
|
+
request_leaves_swap(input: {
|
|
178
|
+
adaptor_pubkey: $adaptor_pubkey
|
|
179
|
+
total_amount_sats: $total_amount_sats
|
|
180
|
+
target_amount_sats: $target_amount_sats
|
|
181
|
+
fee_sats: $fee_sats
|
|
182
|
+
user_leaves: $user_leaves
|
|
183
|
+
}) {
|
|
184
|
+
request {
|
|
185
|
+
...LeavesSwapRequestFragment
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
${FRAGMENT2}
|
|
190
|
+
`;
|
|
191
|
+
|
|
192
|
+
// src/graphql/queries/CoopExitFeeEstimate.ts
|
|
193
|
+
var CoopExitFeeEstimate = `
|
|
194
|
+
query CoopExitFeeEstimate(
|
|
195
|
+
$leaf_external_ids: [UUID!]!
|
|
196
|
+
$withdrawal_address: String!
|
|
197
|
+
) {
|
|
198
|
+
coop_exit_fee_estimate(
|
|
199
|
+
input: {
|
|
200
|
+
leaf_external_ids: $leaf_external_ids
|
|
201
|
+
withdrawal_address: $withdrawal_address
|
|
202
|
+
}
|
|
203
|
+
) {
|
|
204
|
+
...CoopExitFeeEstimateOutputFragment
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
${FRAGMENT5}
|
|
208
|
+
`;
|
|
209
|
+
|
|
210
|
+
// src/graphql/queries/LightningReceiveFeeEstimate.ts
|
|
211
|
+
var LightningReceiveFeeEstimate = `
|
|
212
|
+
query LightningReceiveFeeEstimate(
|
|
213
|
+
$network: BitcoinNetwork!
|
|
214
|
+
$amount_sats: Long!
|
|
215
|
+
) {
|
|
216
|
+
lightning_receive_fee_estimate(
|
|
217
|
+
input: {
|
|
218
|
+
network: $network
|
|
219
|
+
amount_sats: $amount_sats
|
|
220
|
+
}
|
|
221
|
+
) {
|
|
222
|
+
...LightningReceiveFeeEstimateOutputFragment
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
${FRAGMENT6}
|
|
226
|
+
`;
|
|
227
|
+
|
|
228
|
+
// src/graphql/queries/LightningSendFeeEstimate.ts
|
|
229
|
+
var LightningSendFeeEstimate = `
|
|
230
|
+
query LightningSendFeeEstimate(
|
|
231
|
+
$encoded_invoice: String!
|
|
232
|
+
) {
|
|
233
|
+
lightning_send_fee_estimate(
|
|
234
|
+
input: {
|
|
235
|
+
encoded_invoice: $encoded_invoice
|
|
236
|
+
}
|
|
237
|
+
) {
|
|
238
|
+
...LightningSendFeeEstimateOutputFragment
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
${FRAGMENT7}
|
|
242
|
+
`;
|
|
243
|
+
|
|
244
|
+
// src/graphql/client.ts
|
|
245
|
+
var SspClient = class {
|
|
246
|
+
requester;
|
|
247
|
+
identityPublicKey;
|
|
248
|
+
signingKey;
|
|
249
|
+
constructor(identityPublicKey) {
|
|
250
|
+
this.identityPublicKey = identityPublicKey;
|
|
251
|
+
const fetchFunction = typeof window !== "undefined" ? window.fetch.bind(window) : fetch;
|
|
252
|
+
this.requester = new Requester(
|
|
253
|
+
new NodeKeyCache(DefaultCrypto),
|
|
254
|
+
"graphql/spark/rc",
|
|
255
|
+
`spark-sdk/0.0.0`,
|
|
256
|
+
new SparkAuthProvider(identityPublicKey),
|
|
257
|
+
"https://api.dev.dev.sparkinfra.net",
|
|
258
|
+
DefaultCrypto,
|
|
259
|
+
this.signingKey,
|
|
260
|
+
fetchFunction
|
|
261
|
+
);
|
|
262
|
+
}
|
|
263
|
+
async executeRawQuery(query) {
|
|
264
|
+
return await this.requester.executeQuery(query);
|
|
265
|
+
}
|
|
266
|
+
async getLightningReceiveFeeEstimate(amountSats, network) {
|
|
267
|
+
return await this.executeRawQuery({
|
|
268
|
+
queryPayload: LightningReceiveFeeEstimate,
|
|
269
|
+
variables: {
|
|
270
|
+
amount_sats: amountSats,
|
|
271
|
+
network
|
|
272
|
+
},
|
|
273
|
+
constructObject: (response) => {
|
|
274
|
+
return LightningReceiveFeeEstimateOutputFromJson(
|
|
275
|
+
response.lightning_receive_fee_estimate
|
|
276
|
+
);
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
async getLightningSendFeeEstimate(encodedInvoice) {
|
|
281
|
+
return await this.executeRawQuery({
|
|
282
|
+
queryPayload: LightningSendFeeEstimate,
|
|
283
|
+
variables: {
|
|
284
|
+
encoded_invoice: encodedInvoice
|
|
285
|
+
},
|
|
286
|
+
constructObject: (response) => {
|
|
287
|
+
return LightningSendFeeEstimateOutputFromJson(
|
|
288
|
+
response.lightning_send_fee_estimate
|
|
289
|
+
);
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
async getCoopExitFeeEstimate({
|
|
294
|
+
leafExternalIds,
|
|
295
|
+
withdrawalAddress
|
|
296
|
+
}) {
|
|
297
|
+
return await this.executeRawQuery({
|
|
298
|
+
queryPayload: CoopExitFeeEstimate,
|
|
299
|
+
variables: {
|
|
300
|
+
leaf_external_ids: leafExternalIds,
|
|
301
|
+
withdrawal_address: withdrawalAddress
|
|
302
|
+
},
|
|
303
|
+
constructObject: (response) => {
|
|
304
|
+
return CoopExitFeeEstimateOutputFromJson(
|
|
305
|
+
response.coop_exit_fee_estimate
|
|
306
|
+
);
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
// TODO: Might not need
|
|
311
|
+
async getCurrentUser() {
|
|
312
|
+
throw new Error("Not implemented");
|
|
313
|
+
}
|
|
314
|
+
async completeCoopExit({
|
|
315
|
+
userOutboundTransferExternalId,
|
|
316
|
+
coopExitRequestId
|
|
317
|
+
}) {
|
|
318
|
+
return await this.executeRawQuery({
|
|
319
|
+
queryPayload: CompleteCoopExit,
|
|
320
|
+
variables: {
|
|
321
|
+
user_outbound_transfer_external_id: userOutboundTransferExternalId,
|
|
322
|
+
coop_exit_request_id: coopExitRequestId
|
|
323
|
+
},
|
|
324
|
+
constructObject: (response) => {
|
|
325
|
+
return CoopExitRequestFromJson(response.complete_coop_exit.request);
|
|
326
|
+
}
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
async requestCoopExit({
|
|
330
|
+
leafExternalIds,
|
|
331
|
+
withdrawalAddress
|
|
332
|
+
}) {
|
|
333
|
+
return await this.executeRawQuery({
|
|
334
|
+
queryPayload: RequestCoopExit,
|
|
335
|
+
variables: {
|
|
336
|
+
leaf_external_ids: leafExternalIds,
|
|
337
|
+
withdrawal_address: withdrawalAddress
|
|
338
|
+
},
|
|
339
|
+
constructObject: (response) => {
|
|
340
|
+
return CoopExitRequestFromJson(response.request_coop_exit.request);
|
|
341
|
+
}
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
// TODO: Lets name this better
|
|
345
|
+
async requestLightningReceive({
|
|
346
|
+
amountSats,
|
|
347
|
+
network,
|
|
348
|
+
paymentHash,
|
|
349
|
+
expirySecs,
|
|
350
|
+
memo
|
|
351
|
+
}) {
|
|
352
|
+
return await this.executeRawQuery({
|
|
353
|
+
queryPayload: RequestLightningReceive,
|
|
354
|
+
variables: {
|
|
355
|
+
amount_sats: amountSats,
|
|
356
|
+
network,
|
|
357
|
+
payment_hash: paymentHash,
|
|
358
|
+
expiry_secs: expirySecs,
|
|
359
|
+
memo
|
|
360
|
+
},
|
|
361
|
+
constructObject: (response) => {
|
|
362
|
+
return LightningReceiveRequestFromJson(
|
|
363
|
+
response.request_lightning_receive.request
|
|
364
|
+
);
|
|
365
|
+
}
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
async requestLightningSend({
|
|
369
|
+
encodedInvoice,
|
|
370
|
+
idempotencyKey
|
|
371
|
+
}) {
|
|
372
|
+
return await this.executeRawQuery({
|
|
373
|
+
queryPayload: RequestLightningSend,
|
|
374
|
+
variables: {
|
|
375
|
+
encoded_invoice: encodedInvoice,
|
|
376
|
+
idempotency_key: idempotencyKey
|
|
377
|
+
},
|
|
378
|
+
constructObject: (response) => {
|
|
379
|
+
return LightningSendRequestFromJson(
|
|
380
|
+
response.request_lightning_send.request
|
|
381
|
+
);
|
|
382
|
+
}
|
|
383
|
+
});
|
|
384
|
+
}
|
|
385
|
+
async requestLeaveSwap({
|
|
386
|
+
adaptorPubkey,
|
|
387
|
+
totalAmountSats,
|
|
388
|
+
targetAmountSats,
|
|
389
|
+
feeSats,
|
|
390
|
+
userLeaves
|
|
391
|
+
}) {
|
|
392
|
+
const query = {
|
|
393
|
+
queryPayload: RequestSwapLeaves,
|
|
394
|
+
variables: {
|
|
395
|
+
adaptor_pubkey: adaptorPubkey,
|
|
396
|
+
total_amount_sats: totalAmountSats,
|
|
397
|
+
target_amount_sats: targetAmountSats,
|
|
398
|
+
fee_sats: feeSats,
|
|
399
|
+
user_leaves: userLeaves
|
|
400
|
+
},
|
|
401
|
+
constructObject: (response) => {
|
|
402
|
+
if (!response.request_leaves_swap) {
|
|
403
|
+
return null;
|
|
404
|
+
}
|
|
405
|
+
return LeavesSwapRequestFromJson(response.request_leaves_swap.request);
|
|
406
|
+
}
|
|
407
|
+
};
|
|
408
|
+
return await this.executeRawQuery(query);
|
|
409
|
+
}
|
|
410
|
+
async completeLeaveSwap({
|
|
411
|
+
adaptorSecretKey,
|
|
412
|
+
userOutboundTransferExternalId,
|
|
413
|
+
leavesSwapRequestId
|
|
414
|
+
}) {
|
|
415
|
+
return await this.executeRawQuery({
|
|
416
|
+
queryPayload: CompleteLeavesSwap,
|
|
417
|
+
variables: {
|
|
418
|
+
adaptor_secret_key: adaptorSecretKey,
|
|
419
|
+
user_outbound_transfer_external_id: userOutboundTransferExternalId,
|
|
420
|
+
leaves_swap_request_id: leavesSwapRequestId
|
|
421
|
+
},
|
|
422
|
+
constructObject: (response) => {
|
|
423
|
+
return LeavesSwapRequestFromJson(response.complete_leaves_swap.request);
|
|
424
|
+
}
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
};
|
|
428
|
+
var SparkAuthProvider = class {
|
|
429
|
+
publicKey;
|
|
430
|
+
constructor(publicKey) {
|
|
431
|
+
this.publicKey = publicKey;
|
|
432
|
+
}
|
|
433
|
+
async addAuthHeaders(headers) {
|
|
434
|
+
const _headers = {
|
|
435
|
+
"Spark-Identity-Public-Key": this.publicKey,
|
|
436
|
+
"Content-Type": "application/json"
|
|
437
|
+
};
|
|
438
|
+
return Promise.resolve(_headers);
|
|
439
|
+
}
|
|
440
|
+
async isAuthorized() {
|
|
441
|
+
return Promise.resolve(true);
|
|
442
|
+
}
|
|
443
|
+
async addWsConnectionParams(params) {
|
|
444
|
+
return Promise.resolve({
|
|
445
|
+
...params,
|
|
446
|
+
"Spark-Identity-Public-Key": this.publicKey
|
|
447
|
+
});
|
|
448
|
+
}
|
|
449
|
+
};
|
|
450
|
+
|
|
451
|
+
// src/services/coop-exit.ts
|
|
452
|
+
import { Transaction as Transaction2 } from "@scure/btc-signer";
|
|
453
|
+
|
|
454
|
+
// src/services/transfer.ts
|
|
455
|
+
import {
|
|
456
|
+
bytesToHex,
|
|
457
|
+
equalBytes,
|
|
458
|
+
hexToBytes,
|
|
459
|
+
numberToBytesBE
|
|
460
|
+
} from "@noble/curves/abstract/utils";
|
|
461
|
+
import { secp256k1 } from "@noble/curves/secp256k1";
|
|
462
|
+
import { Transaction } from "@scure/btc-signer";
|
|
463
|
+
import { sha256 } from "@scure/btc-signer/utils";
|
|
464
|
+
import * as ecies from "eciesjs";
|
|
465
|
+
var INITIAL_TIME_LOCK = 2e3;
|
|
466
|
+
function initialSequence() {
|
|
467
|
+
return 1 << 30 | INITIAL_TIME_LOCK;
|
|
468
|
+
}
|
|
469
|
+
var crypto = getCrypto();
|
|
470
|
+
var BaseTransferService = class {
|
|
471
|
+
config;
|
|
472
|
+
connectionManager;
|
|
473
|
+
constructor(config, connectionManager) {
|
|
474
|
+
this.config = config;
|
|
475
|
+
this.connectionManager = connectionManager;
|
|
476
|
+
}
|
|
477
|
+
async sendTransferTweakKey(transfer, leaves, refundSignatureMap) {
|
|
478
|
+
const keyTweakInputMap = await this.prepareSendTransferKeyTweaks(
|
|
479
|
+
transfer,
|
|
480
|
+
leaves,
|
|
481
|
+
refundSignatureMap
|
|
482
|
+
);
|
|
483
|
+
let updatedTransfer;
|
|
484
|
+
const errors = [];
|
|
485
|
+
const promises = Object.entries(
|
|
486
|
+
this.config.getConfig().signingOperators
|
|
487
|
+
).map(async ([identifier, operator]) => {
|
|
488
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
489
|
+
operator.address
|
|
490
|
+
);
|
|
491
|
+
const leavesToSend = keyTweakInputMap.get(identifier);
|
|
492
|
+
if (!leavesToSend) {
|
|
493
|
+
errors.push(new Error(`No leaves to send for operator ${identifier}`));
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
let transferResp;
|
|
497
|
+
try {
|
|
498
|
+
transferResp = await sparkClient.complete_send_transfer({
|
|
499
|
+
transferId: transfer.id,
|
|
500
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
501
|
+
leavesToSend
|
|
502
|
+
});
|
|
503
|
+
} catch (error) {
|
|
504
|
+
errors.push(new Error(`Error completing send transfer: ${error}`));
|
|
505
|
+
return;
|
|
506
|
+
}
|
|
507
|
+
if (!updatedTransfer) {
|
|
508
|
+
updatedTransfer = transferResp.transfer;
|
|
509
|
+
} else {
|
|
510
|
+
if (!transferResp.transfer) {
|
|
511
|
+
errors.push(
|
|
512
|
+
new Error(`No transfer response from operator ${identifier}`)
|
|
513
|
+
);
|
|
514
|
+
return;
|
|
515
|
+
}
|
|
516
|
+
if (!this.compareTransfers(updatedTransfer, transferResp.transfer)) {
|
|
517
|
+
errors.push(
|
|
518
|
+
new Error(`Inconsistent transfer response from operators`)
|
|
519
|
+
);
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
});
|
|
523
|
+
await Promise.all(promises);
|
|
524
|
+
if (errors.length > 0) {
|
|
525
|
+
throw new Error(`Error completing send transfer: ${errors[0]}`);
|
|
526
|
+
}
|
|
527
|
+
if (!updatedTransfer) {
|
|
528
|
+
throw new Error("No updated transfer found");
|
|
529
|
+
}
|
|
530
|
+
return updatedTransfer;
|
|
531
|
+
}
|
|
532
|
+
async signRefunds(leafDataMap, operatorSigningResults, adaptorPubKey) {
|
|
533
|
+
const nodeSignatures = [];
|
|
534
|
+
for (const operatorSigningResult of operatorSigningResults) {
|
|
535
|
+
const leafData = leafDataMap.get(operatorSigningResult.leafId);
|
|
536
|
+
if (!leafData || !leafData.tx || leafData.vout === void 0 || !leafData.refundTx) {
|
|
537
|
+
throw new Error(
|
|
538
|
+
`Leaf data not found for leaf ${operatorSigningResult.leafId}`
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
const txOutput = leafData.tx?.getOutput(0);
|
|
542
|
+
if (!txOutput) {
|
|
543
|
+
throw new Error(
|
|
544
|
+
`Output not found for leaf ${operatorSigningResult.leafId}`
|
|
545
|
+
);
|
|
546
|
+
}
|
|
547
|
+
const refundTxSighash = getSigHashFromTx(leafData.refundTx, 0, txOutput);
|
|
548
|
+
const userSignature = await this.config.signer.signFrost({
|
|
549
|
+
message: refundTxSighash,
|
|
550
|
+
publicKey: leafData.signingPubKey,
|
|
551
|
+
privateAsPubKey: leafData.signingPubKey,
|
|
552
|
+
selfCommitment: leafData.signingNonceCommitment,
|
|
553
|
+
statechainCommitments: operatorSigningResult.refundTxSigningResult?.signingNonceCommitments,
|
|
554
|
+
adaptorPubKey,
|
|
555
|
+
verifyingKey: operatorSigningResult.verifyingKey
|
|
556
|
+
});
|
|
557
|
+
const refundAggregate = await this.config.signer.aggregateFrost({
|
|
558
|
+
message: refundTxSighash,
|
|
559
|
+
statechainSignatures: operatorSigningResult.refundTxSigningResult?.signatureShares,
|
|
560
|
+
statechainPublicKeys: operatorSigningResult.refundTxSigningResult?.publicKeys,
|
|
561
|
+
verifyingKey: operatorSigningResult.verifyingKey,
|
|
562
|
+
statechainCommitments: operatorSigningResult.refundTxSigningResult?.signingNonceCommitments,
|
|
563
|
+
selfCommitment: leafData.signingNonceCommitment,
|
|
564
|
+
publicKey: leafData.signingPubKey,
|
|
565
|
+
selfSignature: userSignature,
|
|
566
|
+
adaptorPubKey
|
|
567
|
+
});
|
|
568
|
+
nodeSignatures.push({
|
|
569
|
+
nodeId: operatorSigningResult.leafId,
|
|
570
|
+
refundTxSignature: refundAggregate,
|
|
571
|
+
nodeTxSignature: new Uint8Array()
|
|
572
|
+
});
|
|
573
|
+
}
|
|
574
|
+
return nodeSignatures;
|
|
575
|
+
}
|
|
576
|
+
async prepareSendTransferKeyTweaks(transfer, leaves, refundSignatureMap) {
|
|
577
|
+
const receiverEciesPubKey = ecies.PublicKey.fromHex(
|
|
578
|
+
bytesToHex(transfer.receiverIdentityPublicKey)
|
|
579
|
+
);
|
|
580
|
+
const leavesTweaksMap = /* @__PURE__ */ new Map();
|
|
581
|
+
for (const leaf of leaves) {
|
|
582
|
+
const refundSignature = refundSignatureMap.get(leaf.leaf.id);
|
|
583
|
+
const leafTweaksMap = await this.prepareSingleSendTransferKeyTweak(
|
|
584
|
+
transfer.id,
|
|
585
|
+
leaf,
|
|
586
|
+
receiverEciesPubKey,
|
|
587
|
+
refundSignature
|
|
588
|
+
);
|
|
589
|
+
for (const [identifier, leafTweak] of leafTweaksMap) {
|
|
590
|
+
leavesTweaksMap.set(identifier, [
|
|
591
|
+
...leavesTweaksMap.get(identifier) || [],
|
|
592
|
+
leafTweak
|
|
593
|
+
]);
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
return leavesTweaksMap;
|
|
597
|
+
}
|
|
598
|
+
async prepareSingleSendTransferKeyTweak(transferID, leaf, receiverEciesPubKey, refundSignature) {
|
|
599
|
+
const pubKeyTweak = await this.config.signer.subtractPrivateKeysGivenPublicKeys(
|
|
600
|
+
leaf.signingPubKey,
|
|
601
|
+
leaf.newSigningPubKey
|
|
602
|
+
);
|
|
603
|
+
const shares = await this.config.signer.splitSecretWithProofs({
|
|
604
|
+
secret: pubKeyTweak,
|
|
605
|
+
curveOrder: secp256k1.CURVE.n,
|
|
606
|
+
threshold: this.config.getConfig().threshold,
|
|
607
|
+
numShares: Object.keys(this.config.getConfig().signingOperators).length,
|
|
608
|
+
isSecretPubkey: true
|
|
609
|
+
});
|
|
610
|
+
const pubkeySharesTweak = /* @__PURE__ */ new Map();
|
|
611
|
+
for (const [identifier, operator] of Object.entries(
|
|
612
|
+
this.config.getConfig().signingOperators
|
|
613
|
+
)) {
|
|
614
|
+
const share = this.findShare(shares, operator.id);
|
|
615
|
+
if (!share) {
|
|
616
|
+
throw new Error(`Share not found for operator ${operator.id}`);
|
|
617
|
+
}
|
|
618
|
+
const pubkeyTweak = secp256k1.getPublicKey(
|
|
619
|
+
numberToBytesBE(share.share, 32),
|
|
620
|
+
true
|
|
621
|
+
);
|
|
622
|
+
pubkeySharesTweak.set(identifier, pubkeyTweak);
|
|
623
|
+
}
|
|
624
|
+
const secretCipher = await this.config.signer.encryptLeafPrivateKeyEcies(
|
|
625
|
+
receiverEciesPubKey.toBytes(),
|
|
626
|
+
leaf.newSigningPubKey
|
|
627
|
+
);
|
|
628
|
+
const encoder = new TextEncoder();
|
|
629
|
+
const payload = new Uint8Array([
|
|
630
|
+
...encoder.encode(leaf.leaf.id),
|
|
631
|
+
...encoder.encode(transferID),
|
|
632
|
+
...secretCipher
|
|
633
|
+
]);
|
|
634
|
+
const payloadHash = sha256(payload);
|
|
635
|
+
const signature = await this.config.signer.signMessageWithIdentityKey(
|
|
636
|
+
payloadHash,
|
|
637
|
+
true
|
|
638
|
+
);
|
|
639
|
+
const leafTweaksMap = /* @__PURE__ */ new Map();
|
|
640
|
+
for (const [identifier, operator] of Object.entries(
|
|
641
|
+
this.config.getConfig().signingOperators
|
|
642
|
+
)) {
|
|
643
|
+
const share = this.findShare(shares, operator.id);
|
|
644
|
+
if (!share) {
|
|
645
|
+
throw new Error(`Share not found for operator ${operator.id}`);
|
|
646
|
+
}
|
|
647
|
+
leafTweaksMap.set(identifier, {
|
|
648
|
+
leafId: leaf.leaf.id,
|
|
649
|
+
secretShareTweak: {
|
|
650
|
+
secretShare: numberToBytesBE(share.share, 32),
|
|
651
|
+
proofs: share.proofs
|
|
652
|
+
},
|
|
653
|
+
pubkeySharesTweak: Object.fromEntries(pubkeySharesTweak),
|
|
654
|
+
secretCipher,
|
|
655
|
+
signature,
|
|
656
|
+
refundSignature: refundSignature ?? new Uint8Array()
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
return leafTweaksMap;
|
|
660
|
+
}
|
|
661
|
+
findShare(shares, operatorID) {
|
|
662
|
+
const targetShareIndex = BigInt(operatorID + 1);
|
|
663
|
+
for (const s of shares) {
|
|
664
|
+
if (s.index === targetShareIndex) {
|
|
665
|
+
return s;
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
return void 0;
|
|
669
|
+
}
|
|
670
|
+
compareTransfers(transfer1, transfer2) {
|
|
671
|
+
return transfer1.id === transfer2.id && equalBytes(
|
|
672
|
+
transfer1.senderIdentityPublicKey,
|
|
673
|
+
transfer2.senderIdentityPublicKey
|
|
674
|
+
) && transfer1.status === transfer2.status && transfer1.totalValue === transfer2.totalValue && transfer1.expiryTime?.getTime() === transfer2.expiryTime?.getTime() && transfer1.leaves.length === transfer2.leaves.length;
|
|
675
|
+
}
|
|
676
|
+
};
|
|
677
|
+
var TransferService = class extends BaseTransferService {
|
|
678
|
+
constructor(config, connectionManager) {
|
|
679
|
+
super(config, connectionManager);
|
|
680
|
+
}
|
|
681
|
+
async sendTransfer(leaves, receiverIdentityPubkey, expiryTime) {
|
|
682
|
+
const { transfer, signatureMap } = await this.sendTransferSignRefund(
|
|
683
|
+
leaves,
|
|
684
|
+
receiverIdentityPubkey,
|
|
685
|
+
expiryTime
|
|
686
|
+
);
|
|
687
|
+
const transferWithTweakedKeys = await this.sendTransferTweakKey(
|
|
688
|
+
transfer,
|
|
689
|
+
leaves,
|
|
690
|
+
signatureMap
|
|
691
|
+
);
|
|
692
|
+
return transferWithTweakedKeys;
|
|
693
|
+
}
|
|
694
|
+
async claimTransfer(transfer, leaves) {
|
|
695
|
+
if (transfer.status === 2 /* TRANSFER_STATUS_SENDER_KEY_TWEAKED */) {
|
|
696
|
+
await this.claimTransferTweakKeys(transfer, leaves);
|
|
697
|
+
}
|
|
698
|
+
const signatures = await this.claimTransferSignRefunds(transfer, leaves);
|
|
699
|
+
return await this.finalizeTransfer(signatures);
|
|
700
|
+
}
|
|
701
|
+
async queryPendingTransfers() {
|
|
702
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
703
|
+
this.config.getCoordinatorAddress()
|
|
704
|
+
);
|
|
705
|
+
let pendingTransfersResp;
|
|
706
|
+
try {
|
|
707
|
+
pendingTransfersResp = await sparkClient.query_pending_transfers({
|
|
708
|
+
participant: {
|
|
709
|
+
$case: "receiverIdentityPublicKey",
|
|
710
|
+
receiverIdentityPublicKey: await this.config.signer.getIdentityPublicKey()
|
|
711
|
+
}
|
|
712
|
+
});
|
|
713
|
+
} catch (error) {
|
|
714
|
+
throw new Error(`Error querying pending transfers: ${error}`);
|
|
715
|
+
}
|
|
716
|
+
return pendingTransfersResp;
|
|
717
|
+
}
|
|
718
|
+
async queryAllTransfers(limit, offset) {
|
|
719
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
720
|
+
this.config.getCoordinatorAddress()
|
|
721
|
+
);
|
|
722
|
+
let allTransfersResp;
|
|
723
|
+
try {
|
|
724
|
+
allTransfersResp = await sparkClient.query_all_transfers({
|
|
725
|
+
identityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
726
|
+
limit,
|
|
727
|
+
offset
|
|
728
|
+
});
|
|
729
|
+
} catch (error) {
|
|
730
|
+
throw new Error(`Error querying all transfers: ${error}`);
|
|
731
|
+
}
|
|
732
|
+
return allTransfersResp;
|
|
733
|
+
}
|
|
734
|
+
async verifyPendingTransfer(transfer) {
|
|
735
|
+
const leafPubKeyMap = /* @__PURE__ */ new Map();
|
|
736
|
+
for (const leaf of transfer.leaves) {
|
|
737
|
+
if (!leaf.leaf) {
|
|
738
|
+
throw new Error("Leaf is undefined");
|
|
739
|
+
}
|
|
740
|
+
const encoder = new TextEncoder();
|
|
741
|
+
const leafIdBytes = encoder.encode(leaf.leaf.id);
|
|
742
|
+
const transferIdBytes = encoder.encode(transfer.id);
|
|
743
|
+
const payload = new Uint8Array([
|
|
744
|
+
...leafIdBytes,
|
|
745
|
+
...transferIdBytes,
|
|
746
|
+
...leaf.secretCipher
|
|
747
|
+
]);
|
|
748
|
+
const payloadHash = sha256(payload);
|
|
749
|
+
if (!secp256k1.verify(
|
|
750
|
+
leaf.signature,
|
|
751
|
+
payloadHash,
|
|
752
|
+
transfer.senderIdentityPublicKey
|
|
753
|
+
)) {
|
|
754
|
+
throw new Error("Signature verification failed");
|
|
755
|
+
}
|
|
756
|
+
const leafSecret = await this.config.signer.decryptEcies(
|
|
757
|
+
leaf.secretCipher
|
|
758
|
+
);
|
|
759
|
+
leafPubKeyMap.set(leaf.leaf.id, leafSecret);
|
|
760
|
+
}
|
|
761
|
+
return leafPubKeyMap;
|
|
762
|
+
}
|
|
763
|
+
async sendSwapSignRefund(leaves, receiverIdentityPubkey, expiryTime, adaptorPubKey) {
|
|
764
|
+
const transferId = crypto.randomUUID();
|
|
765
|
+
const leafDataMap = /* @__PURE__ */ new Map();
|
|
766
|
+
for (const leaf of leaves) {
|
|
767
|
+
const signingNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
768
|
+
const tx = getTxFromRawTxBytes(leaf.leaf.nodeTx);
|
|
769
|
+
const refundTx = getTxFromRawTxBytes(leaf.leaf.refundTx);
|
|
770
|
+
leafDataMap.set(leaf.leaf.id, {
|
|
771
|
+
signingPubKey: leaf.signingPubKey,
|
|
772
|
+
receivingPubkey: receiverIdentityPubkey,
|
|
773
|
+
signingNonceCommitment,
|
|
774
|
+
tx,
|
|
775
|
+
refundTx,
|
|
776
|
+
vout: leaf.leaf.vout
|
|
777
|
+
});
|
|
778
|
+
}
|
|
779
|
+
const signingJobs = this.prepareRefundSoSigningJobs(leaves, leafDataMap);
|
|
780
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
781
|
+
this.config.getCoordinatorAddress()
|
|
782
|
+
);
|
|
783
|
+
let response;
|
|
784
|
+
try {
|
|
785
|
+
response = await sparkClient.leaf_swap({
|
|
786
|
+
transfer: {
|
|
787
|
+
transferId,
|
|
788
|
+
leavesToSend: signingJobs,
|
|
789
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
790
|
+
receiverIdentityPublicKey: receiverIdentityPubkey,
|
|
791
|
+
expiryTime
|
|
792
|
+
},
|
|
793
|
+
swapId: crypto.randomUUID(),
|
|
794
|
+
adaptorPublicKey: adaptorPubKey || new Uint8Array()
|
|
795
|
+
});
|
|
796
|
+
} catch (error) {
|
|
797
|
+
throw new Error(`Error initiating leaf swap: ${error}`);
|
|
798
|
+
}
|
|
799
|
+
if (!response.transfer) {
|
|
800
|
+
throw new Error("No transfer response from coordinator");
|
|
801
|
+
}
|
|
802
|
+
const signatures = await this.signRefunds(
|
|
803
|
+
leafDataMap,
|
|
804
|
+
response.signingResults,
|
|
805
|
+
adaptorPubKey
|
|
806
|
+
);
|
|
807
|
+
const signatureMap = /* @__PURE__ */ new Map();
|
|
808
|
+
for (const signature of signatures) {
|
|
809
|
+
signatureMap.set(signature.nodeId, signature.refundTxSignature);
|
|
810
|
+
}
|
|
811
|
+
return {
|
|
812
|
+
transfer: response.transfer,
|
|
813
|
+
signatureMap,
|
|
814
|
+
leafDataMap,
|
|
815
|
+
signingResults: response.signingResults
|
|
816
|
+
};
|
|
817
|
+
}
|
|
818
|
+
async sendTransferSignRefund(leaves, receiverIdentityPubkey, expiryTime) {
|
|
819
|
+
const transferID = crypto.randomUUID();
|
|
820
|
+
const leafDataMap = /* @__PURE__ */ new Map();
|
|
821
|
+
for (const leaf of leaves) {
|
|
822
|
+
const signingNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
823
|
+
const tx = getTxFromRawTxBytes(leaf.leaf.nodeTx);
|
|
824
|
+
const refundTx = getTxFromRawTxBytes(leaf.leaf.refundTx);
|
|
825
|
+
leafDataMap.set(leaf.leaf.id, {
|
|
826
|
+
signingPubKey: leaf.signingPubKey,
|
|
827
|
+
receivingPubkey: receiverIdentityPubkey,
|
|
828
|
+
signingNonceCommitment,
|
|
829
|
+
tx,
|
|
830
|
+
refundTx,
|
|
831
|
+
vout: leaf.leaf.vout
|
|
832
|
+
});
|
|
833
|
+
}
|
|
834
|
+
const signingJobs = this.prepareRefundSoSigningJobs(leaves, leafDataMap);
|
|
835
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
836
|
+
this.config.getCoordinatorAddress()
|
|
837
|
+
);
|
|
838
|
+
let response;
|
|
839
|
+
try {
|
|
840
|
+
response = await sparkClient.start_send_transfer({
|
|
841
|
+
transferId: transferID,
|
|
842
|
+
leavesToSend: signingJobs,
|
|
843
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
844
|
+
receiverIdentityPublicKey: receiverIdentityPubkey,
|
|
845
|
+
expiryTime
|
|
846
|
+
});
|
|
847
|
+
} catch (error) {
|
|
848
|
+
throw new Error(`Error starting send transfer: ${error}`);
|
|
849
|
+
}
|
|
850
|
+
const signatures = await this.signRefunds(
|
|
851
|
+
leafDataMap,
|
|
852
|
+
response.signingResults
|
|
853
|
+
);
|
|
854
|
+
const signatureMap = /* @__PURE__ */ new Map();
|
|
855
|
+
for (const signature of signatures) {
|
|
856
|
+
signatureMap.set(signature.nodeId, signature.refundTxSignature);
|
|
857
|
+
}
|
|
858
|
+
if (!response.transfer) {
|
|
859
|
+
throw new Error("No transfer response from coordinator");
|
|
860
|
+
}
|
|
861
|
+
return {
|
|
862
|
+
transfer: response.transfer,
|
|
863
|
+
signatureMap,
|
|
864
|
+
leafDataMap
|
|
865
|
+
};
|
|
866
|
+
}
|
|
867
|
+
prepareRefundSoSigningJobs(leaves, leafDataMap) {
|
|
868
|
+
const signingJobs = [];
|
|
869
|
+
for (const leaf of leaves) {
|
|
870
|
+
const refundSigningData = leafDataMap.get(leaf.leaf.id);
|
|
871
|
+
if (!refundSigningData) {
|
|
872
|
+
throw new Error(`Leaf data not found for leaf ${leaf.leaf.id}`);
|
|
873
|
+
}
|
|
874
|
+
const nodeTx = getTxFromRawTxBytes(leaf.leaf.nodeTx);
|
|
875
|
+
const nodeOutPoint = {
|
|
876
|
+
txid: hexToBytes(getTxId(nodeTx)),
|
|
877
|
+
index: 0
|
|
878
|
+
};
|
|
879
|
+
const currRefundTx = getTxFromRawTxBytes(leaf.leaf.refundTx);
|
|
880
|
+
const nextSequence = getNextTransactionSequence(
|
|
881
|
+
currRefundTx.getInput(0).sequence
|
|
882
|
+
);
|
|
883
|
+
const amountSats = currRefundTx.getOutput(0).amount;
|
|
884
|
+
if (amountSats === void 0) {
|
|
885
|
+
throw new Error("Amount not found in signRefunds");
|
|
886
|
+
}
|
|
887
|
+
const refundTx = createRefundTx(
|
|
888
|
+
nextSequence,
|
|
889
|
+
nodeOutPoint,
|
|
890
|
+
amountSats,
|
|
891
|
+
refundSigningData.receivingPubkey,
|
|
892
|
+
this.config.getNetwork()
|
|
893
|
+
);
|
|
894
|
+
refundSigningData.refundTx = refundTx;
|
|
895
|
+
const refundNonceCommitmentProto = refundSigningData.signingNonceCommitment;
|
|
896
|
+
signingJobs.push({
|
|
897
|
+
leafId: leaf.leaf.id,
|
|
898
|
+
refundTxSigningJob: {
|
|
899
|
+
signingPublicKey: refundSigningData.signingPubKey,
|
|
900
|
+
rawTx: refundTx.toBytes(),
|
|
901
|
+
signingNonceCommitment: refundNonceCommitmentProto
|
|
902
|
+
}
|
|
903
|
+
});
|
|
904
|
+
}
|
|
905
|
+
return signingJobs;
|
|
906
|
+
}
|
|
907
|
+
async claimTransferTweakKeys(transfer, leaves) {
|
|
908
|
+
const leavesTweaksMap = await this.prepareClaimLeavesKeyTweaks(leaves);
|
|
909
|
+
const errors = [];
|
|
910
|
+
const promises = Object.entries(
|
|
911
|
+
this.config.getConfig().signingOperators
|
|
912
|
+
).map(async ([identifier, operator]) => {
|
|
913
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
914
|
+
operator.address
|
|
915
|
+
);
|
|
916
|
+
const leavesToReceive = leavesTweaksMap.get(identifier);
|
|
917
|
+
if (!leavesToReceive) {
|
|
918
|
+
errors.push(
|
|
919
|
+
new Error(`No leaves to receive for operator ${identifier}`)
|
|
920
|
+
);
|
|
921
|
+
return;
|
|
922
|
+
}
|
|
923
|
+
try {
|
|
924
|
+
await sparkClient.claim_transfer_tweak_keys({
|
|
925
|
+
transferId: transfer.id,
|
|
926
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
927
|
+
leavesToReceive
|
|
928
|
+
});
|
|
929
|
+
} catch (error) {
|
|
930
|
+
errors.push(new Error(`Error claiming transfer tweak keys: ${error}`));
|
|
931
|
+
return;
|
|
932
|
+
}
|
|
933
|
+
});
|
|
934
|
+
await Promise.all(promises);
|
|
935
|
+
if (errors.length > 0) {
|
|
936
|
+
throw new Error(`Error claiming transfer tweak keys: ${errors[0]}`);
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
async prepareClaimLeavesKeyTweaks(leaves) {
|
|
940
|
+
const leafDataMap = /* @__PURE__ */ new Map();
|
|
941
|
+
for (const leaf of leaves) {
|
|
942
|
+
const leafData = await this.prepareClaimLeafKeyTweaks(leaf);
|
|
943
|
+
for (const [identifier, leafTweak] of leafData) {
|
|
944
|
+
leafDataMap.set(identifier, [
|
|
945
|
+
...leafDataMap.get(identifier) || [],
|
|
946
|
+
leafTweak
|
|
947
|
+
]);
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
return leafDataMap;
|
|
951
|
+
}
|
|
952
|
+
async prepareClaimLeafKeyTweaks(leaf) {
|
|
953
|
+
const pubKeyTweak = await this.config.signer.subtractPrivateKeysGivenPublicKeys(
|
|
954
|
+
leaf.signingPubKey,
|
|
955
|
+
leaf.newSigningPubKey
|
|
956
|
+
);
|
|
957
|
+
const shares = await this.config.signer.splitSecretWithProofs({
|
|
958
|
+
secret: pubKeyTweak,
|
|
959
|
+
curveOrder: secp256k1.CURVE.n,
|
|
960
|
+
threshold: this.config.getConfig().threshold,
|
|
961
|
+
numShares: Object.keys(this.config.getConfig().signingOperators).length,
|
|
962
|
+
isSecretPubkey: true
|
|
963
|
+
});
|
|
964
|
+
const pubkeySharesTweak = /* @__PURE__ */ new Map();
|
|
965
|
+
for (const [identifier, operator] of Object.entries(
|
|
966
|
+
this.config.getConfig().signingOperators
|
|
967
|
+
)) {
|
|
968
|
+
const share = this.findShare(shares, operator.id);
|
|
969
|
+
if (!share) {
|
|
970
|
+
throw new Error(`Share not found for operator ${operator.id}`);
|
|
971
|
+
}
|
|
972
|
+
const pubkeyTweak = secp256k1.getPublicKey(
|
|
973
|
+
numberToBytesBE(share.share, 32)
|
|
974
|
+
);
|
|
975
|
+
pubkeySharesTweak.set(identifier, pubkeyTweak);
|
|
976
|
+
}
|
|
977
|
+
const leafTweaksMap = /* @__PURE__ */ new Map();
|
|
978
|
+
for (const [identifier, operator] of Object.entries(
|
|
979
|
+
this.config.getConfig().signingOperators
|
|
980
|
+
)) {
|
|
981
|
+
const share = this.findShare(shares, operator.id);
|
|
982
|
+
if (!share) {
|
|
983
|
+
throw new Error(`Share not found for operator ${operator.id}`);
|
|
984
|
+
}
|
|
985
|
+
leafTweaksMap.set(identifier, {
|
|
986
|
+
leafId: leaf.leaf.id,
|
|
987
|
+
secretShareTweak: {
|
|
988
|
+
secretShare: numberToBytesBE(share.share, 32),
|
|
989
|
+
proofs: share.proofs
|
|
990
|
+
},
|
|
991
|
+
pubkeySharesTweak: Object.fromEntries(pubkeySharesTweak)
|
|
992
|
+
});
|
|
993
|
+
}
|
|
994
|
+
return leafTweaksMap;
|
|
995
|
+
}
|
|
996
|
+
async claimTransferSignRefunds(transfer, leafKeys) {
|
|
997
|
+
const leafDataMap = /* @__PURE__ */ new Map();
|
|
998
|
+
for (const leafKey of leafKeys) {
|
|
999
|
+
const tx = getTxFromRawTxBytes(leafKey.leaf.nodeTx);
|
|
1000
|
+
leafDataMap.set(leafKey.leaf.id, {
|
|
1001
|
+
signingPubKey: leafKey.newSigningPubKey,
|
|
1002
|
+
receivingPubkey: leafKey.newSigningPubKey,
|
|
1003
|
+
signingNonceCommitment: await this.config.signer.getRandomSigningCommitment(),
|
|
1004
|
+
tx,
|
|
1005
|
+
vout: leafKey.leaf.vout
|
|
1006
|
+
});
|
|
1007
|
+
}
|
|
1008
|
+
const signingJobs = this.prepareRefundSoSigningJobs(leafKeys, leafDataMap);
|
|
1009
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1010
|
+
this.config.getCoordinatorAddress()
|
|
1011
|
+
);
|
|
1012
|
+
let resp;
|
|
1013
|
+
try {
|
|
1014
|
+
resp = await sparkClient.claim_transfer_sign_refunds({
|
|
1015
|
+
transferId: transfer.id,
|
|
1016
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1017
|
+
signingJobs
|
|
1018
|
+
});
|
|
1019
|
+
} catch (error) {
|
|
1020
|
+
throw new Error(`Error claiming transfer sign refunds: ${error}`);
|
|
1021
|
+
}
|
|
1022
|
+
return this.signRefunds(leafDataMap, resp.signingResults);
|
|
1023
|
+
}
|
|
1024
|
+
async finalizeTransfer(nodeSignatures) {
|
|
1025
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1026
|
+
this.config.getCoordinatorAddress()
|
|
1027
|
+
);
|
|
1028
|
+
try {
|
|
1029
|
+
return await sparkClient.finalize_node_signatures({
|
|
1030
|
+
intent: 1 /* TRANSFER */,
|
|
1031
|
+
nodeSignatures
|
|
1032
|
+
});
|
|
1033
|
+
} catch (error) {
|
|
1034
|
+
throw new Error(`Error finalizing node signatures in transfer: ${error}`);
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
async cancelSendTransfer(transfer) {
|
|
1038
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1039
|
+
this.config.getCoordinatorAddress()
|
|
1040
|
+
);
|
|
1041
|
+
try {
|
|
1042
|
+
const response = await sparkClient.cancel_send_transfer({
|
|
1043
|
+
transferId: transfer.id,
|
|
1044
|
+
senderIdentityPublicKey: await this.config.signer.getIdentityPublicKey()
|
|
1045
|
+
});
|
|
1046
|
+
return response.transfer;
|
|
1047
|
+
} catch (error) {
|
|
1048
|
+
throw new Error(`Error canceling send transfer: ${error}`);
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
async queryPendingTransfersBySender() {
|
|
1052
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1053
|
+
this.config.getCoordinatorAddress()
|
|
1054
|
+
);
|
|
1055
|
+
try {
|
|
1056
|
+
return await sparkClient.query_pending_transfers({
|
|
1057
|
+
participant: {
|
|
1058
|
+
$case: "senderIdentityPublicKey",
|
|
1059
|
+
senderIdentityPublicKey: await this.config.signer.getIdentityPublicKey()
|
|
1060
|
+
}
|
|
1061
|
+
});
|
|
1062
|
+
} catch (error) {
|
|
1063
|
+
throw new Error(`Error querying pending transfers by sender: ${error}`);
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
async refreshTimelockNodes(nodes, parentNode, signingPubKey) {
|
|
1067
|
+
if (nodes.length === 0) {
|
|
1068
|
+
throw Error("no nodes to refresh");
|
|
1069
|
+
}
|
|
1070
|
+
const signingJobs = [];
|
|
1071
|
+
const newNodeTxs = [];
|
|
1072
|
+
for (let i = 0; i < nodes.length; i++) {
|
|
1073
|
+
const node = nodes[i];
|
|
1074
|
+
if (!node) {
|
|
1075
|
+
throw Error("could not get node");
|
|
1076
|
+
}
|
|
1077
|
+
const nodeTx = getTxFromRawTxBytes(node?.nodeTx);
|
|
1078
|
+
const input = nodeTx.getInput(0);
|
|
1079
|
+
if (!input) {
|
|
1080
|
+
throw Error("Could not fetch tx input");
|
|
1081
|
+
}
|
|
1082
|
+
const newTx = new Transaction({ allowUnknownOutputs: true });
|
|
1083
|
+
for (let j = 0; j < nodeTx.outputsLength; j++) {
|
|
1084
|
+
newTx.addOutput(nodeTx.getOutput(j));
|
|
1085
|
+
}
|
|
1086
|
+
if (i === 0) {
|
|
1087
|
+
const currSequence = input.sequence;
|
|
1088
|
+
newTx.addInput({
|
|
1089
|
+
...input,
|
|
1090
|
+
sequence: getNextTransactionSequence(currSequence)
|
|
1091
|
+
});
|
|
1092
|
+
} else {
|
|
1093
|
+
newTx.addInput({
|
|
1094
|
+
...input,
|
|
1095
|
+
sequence: initialSequence(),
|
|
1096
|
+
txid: newNodeTxs[i - 1]?.id
|
|
1097
|
+
});
|
|
1098
|
+
}
|
|
1099
|
+
signingJobs.push({
|
|
1100
|
+
signingPublicKey: signingPubKey,
|
|
1101
|
+
rawTx: newTx.toBytes(),
|
|
1102
|
+
signingNonceCommitment: await this.config.signer.getRandomSigningCommitment()
|
|
1103
|
+
});
|
|
1104
|
+
newNodeTxs[i] = newTx;
|
|
1105
|
+
}
|
|
1106
|
+
const leaf = nodes[nodes.length - 1];
|
|
1107
|
+
if (!leaf?.refundTx) {
|
|
1108
|
+
throw Error("leaf does not have refund tx");
|
|
1109
|
+
}
|
|
1110
|
+
const refundTx = getTxFromRawTxBytes(leaf?.refundTx);
|
|
1111
|
+
const newRefundTx = new Transaction({ allowUnknownOutputs: true });
|
|
1112
|
+
for (let j = 0; j < refundTx.outputsLength; j++) {
|
|
1113
|
+
newRefundTx.addOutput(refundTx.getOutput(j));
|
|
1114
|
+
}
|
|
1115
|
+
const refundTxInput = refundTx.getInput(0);
|
|
1116
|
+
if (!refundTxInput) {
|
|
1117
|
+
throw Error("refund tx doesn't have input");
|
|
1118
|
+
}
|
|
1119
|
+
if (!newNodeTxs[newNodeTxs.length - 1]) {
|
|
1120
|
+
throw Error("Could not get last node tx");
|
|
1121
|
+
}
|
|
1122
|
+
newRefundTx.addInput({
|
|
1123
|
+
...refundTxInput,
|
|
1124
|
+
sequence: initialSequence(),
|
|
1125
|
+
txid: getTxId(newNodeTxs[newNodeTxs.length - 1])
|
|
1126
|
+
});
|
|
1127
|
+
const refundSigningJob = {
|
|
1128
|
+
signingPublicKey: signingPubKey,
|
|
1129
|
+
rawTx: newRefundTx.toBytes(),
|
|
1130
|
+
signingNonceCommitment: await this.config.signer.getRandomSigningCommitment()
|
|
1131
|
+
};
|
|
1132
|
+
signingJobs.push(refundSigningJob);
|
|
1133
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1134
|
+
this.config.getCoordinatorAddress()
|
|
1135
|
+
);
|
|
1136
|
+
const response = await sparkClient.refresh_timelock({
|
|
1137
|
+
leafId: leaf.id,
|
|
1138
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1139
|
+
signingJobs
|
|
1140
|
+
});
|
|
1141
|
+
if (signingJobs.length !== response.signingResults.length) {
|
|
1142
|
+
throw Error(
|
|
1143
|
+
`number of signing jobs and signing results do not match: ${signingJobs.length} !== ${response.signingResults.length}`
|
|
1144
|
+
);
|
|
1145
|
+
}
|
|
1146
|
+
let nodeSignatures = [];
|
|
1147
|
+
let leafSignature;
|
|
1148
|
+
let refundSignature;
|
|
1149
|
+
let leafNodeId;
|
|
1150
|
+
for (let i = 0; i < response.signingResults.length; i++) {
|
|
1151
|
+
const signingResult = response.signingResults[i];
|
|
1152
|
+
const signingJob = signingJobs[i];
|
|
1153
|
+
if (!signingJob || !signingResult) {
|
|
1154
|
+
throw Error("Signing job does not exist");
|
|
1155
|
+
}
|
|
1156
|
+
if (!signingJob.signingNonceCommitment) {
|
|
1157
|
+
throw Error("nonce commitment does not exist");
|
|
1158
|
+
}
|
|
1159
|
+
const rawTx = getTxFromRawTxBytes(signingJob.rawTx);
|
|
1160
|
+
let parentTx;
|
|
1161
|
+
let nodeId;
|
|
1162
|
+
let vout;
|
|
1163
|
+
if (i === nodes.length) {
|
|
1164
|
+
nodeId = nodes[i - 1]?.id;
|
|
1165
|
+
parentTx = newNodeTxs[i - 1];
|
|
1166
|
+
vout = 0;
|
|
1167
|
+
} else if (i === 0) {
|
|
1168
|
+
nodeId = nodes[i]?.id;
|
|
1169
|
+
parentTx = getTxFromRawTxBytes(parentNode.nodeTx);
|
|
1170
|
+
vout = nodes[i]?.vout;
|
|
1171
|
+
} else {
|
|
1172
|
+
nodeId = nodes[i]?.id;
|
|
1173
|
+
parentTx = newNodeTxs[i - 1];
|
|
1174
|
+
vout = nodes[i]?.vout;
|
|
1175
|
+
}
|
|
1176
|
+
if (!parentTx || !nodeId || vout === void 0) {
|
|
1177
|
+
throw Error("Could not parse signing results");
|
|
1178
|
+
}
|
|
1179
|
+
const txOut = parentTx.getOutput(vout);
|
|
1180
|
+
const rawTxSighash = getSigHashFromTx(rawTx, 0, txOut);
|
|
1181
|
+
const userSignature = await this.config.signer.signFrost({
|
|
1182
|
+
message: rawTxSighash,
|
|
1183
|
+
privateAsPubKey: signingPubKey,
|
|
1184
|
+
publicKey: signingPubKey,
|
|
1185
|
+
verifyingKey: signingResult.verifyingKey,
|
|
1186
|
+
selfCommitment: signingJob.signingNonceCommitment,
|
|
1187
|
+
statechainCommitments: signingResult.signingResult?.signingNonceCommitments,
|
|
1188
|
+
adaptorPubKey: new Uint8Array()
|
|
1189
|
+
});
|
|
1190
|
+
const signature = await this.config.signer.aggregateFrost({
|
|
1191
|
+
message: rawTxSighash,
|
|
1192
|
+
statechainSignatures: signingResult.signingResult?.signatureShares,
|
|
1193
|
+
statechainPublicKeys: signingResult.signingResult?.publicKeys,
|
|
1194
|
+
verifyingKey: signingResult.verifyingKey,
|
|
1195
|
+
statechainCommitments: signingResult.signingResult?.signingNonceCommitments,
|
|
1196
|
+
selfCommitment: signingJob.signingNonceCommitment,
|
|
1197
|
+
publicKey: signingPubKey,
|
|
1198
|
+
selfSignature: userSignature,
|
|
1199
|
+
adaptorPubKey: new Uint8Array()
|
|
1200
|
+
});
|
|
1201
|
+
if (i !== nodes.length && i !== nodes.length - 1) {
|
|
1202
|
+
nodeSignatures.push({
|
|
1203
|
+
nodeId,
|
|
1204
|
+
nodeTxSignature: signature,
|
|
1205
|
+
refundTxSignature: new Uint8Array()
|
|
1206
|
+
});
|
|
1207
|
+
} else if (i === nodes.length) {
|
|
1208
|
+
refundSignature = signature;
|
|
1209
|
+
} else if (i === nodes.length - 1) {
|
|
1210
|
+
leafNodeId = nodeId;
|
|
1211
|
+
leafSignature = signature;
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
if (!leafSignature || !refundSignature || !leafNodeId) {
|
|
1215
|
+
throw Error("leaf or refund signature does not exist");
|
|
1216
|
+
}
|
|
1217
|
+
nodeSignatures.push({
|
|
1218
|
+
nodeId: leafNodeId,
|
|
1219
|
+
nodeTxSignature: leafSignature,
|
|
1220
|
+
refundTxSignature: refundSignature
|
|
1221
|
+
});
|
|
1222
|
+
return await sparkClient.finalize_node_signatures({
|
|
1223
|
+
intent: 3 /* REFRESH */,
|
|
1224
|
+
nodeSignatures
|
|
1225
|
+
});
|
|
1226
|
+
}
|
|
1227
|
+
async extendTimelock(node, signingPubKey) {
|
|
1228
|
+
const nodeTx = getTxFromRawTxBytes(node.nodeTx);
|
|
1229
|
+
const refundTx = getTxFromRawTxBytes(node.refundTx);
|
|
1230
|
+
const refundSequence = refundTx.getInput(0).sequence || 0;
|
|
1231
|
+
const newNodeOutPoint = {
|
|
1232
|
+
txid: hexToBytes(nodeTx.id),
|
|
1233
|
+
index: 0
|
|
1234
|
+
};
|
|
1235
|
+
const newNodeSequence = getNextTransactionSequence(refundSequence);
|
|
1236
|
+
const newNodeTx = new Transaction({ allowUnknownOutputs: true });
|
|
1237
|
+
newNodeTx.addInput({ ...newNodeOutPoint, sequence: newNodeSequence });
|
|
1238
|
+
newNodeTx.addOutput(nodeTx.getOutput(0));
|
|
1239
|
+
newNodeTx.addOutput(getEphemeralAnchorOutput());
|
|
1240
|
+
const newRefundOutPoint = {
|
|
1241
|
+
txid: hexToBytes(getTxId(newNodeTx)),
|
|
1242
|
+
index: 0
|
|
1243
|
+
};
|
|
1244
|
+
const amountSats = refundTx.getOutput(0).amount;
|
|
1245
|
+
if (amountSats === void 0) {
|
|
1246
|
+
throw new Error("Amount not found in extendTimelock");
|
|
1247
|
+
}
|
|
1248
|
+
const newRefundTx = createRefundTx(
|
|
1249
|
+
initialSequence(),
|
|
1250
|
+
newRefundOutPoint,
|
|
1251
|
+
amountSats,
|
|
1252
|
+
signingPubKey,
|
|
1253
|
+
this.config.getNetwork()
|
|
1254
|
+
);
|
|
1255
|
+
const nodeSighash = getSigHashFromTx(newNodeTx, 0, nodeTx.getOutput(0));
|
|
1256
|
+
const refundSighash = getSigHashFromTx(newRefundTx, 0, nodeTx.getOutput(0));
|
|
1257
|
+
const newNodeSigningJob = {
|
|
1258
|
+
signingPublicKey: signingPubKey,
|
|
1259
|
+
rawTx: newNodeTx.toBytes(),
|
|
1260
|
+
signingNonceCommitment: await this.config.signer.getRandomSigningCommitment()
|
|
1261
|
+
};
|
|
1262
|
+
const newRefundSigningJob = {
|
|
1263
|
+
signingPublicKey: signingPubKey,
|
|
1264
|
+
rawTx: newRefundTx.toBytes(),
|
|
1265
|
+
signingNonceCommitment: await this.config.signer.getRandomSigningCommitment()
|
|
1266
|
+
};
|
|
1267
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1268
|
+
this.config.getCoordinatorAddress()
|
|
1269
|
+
);
|
|
1270
|
+
const response = await sparkClient.extend_leaf({
|
|
1271
|
+
leafId: node.id,
|
|
1272
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1273
|
+
nodeTxSigningJob: newNodeSigningJob,
|
|
1274
|
+
refundTxSigningJob: newRefundSigningJob
|
|
1275
|
+
});
|
|
1276
|
+
if (!response.nodeTxSigningResult || !response.refundTxSigningResult) {
|
|
1277
|
+
throw new Error("Signing result does not exist");
|
|
1278
|
+
}
|
|
1279
|
+
const nodeUserSig = await this.config.signer.signFrost({
|
|
1280
|
+
message: nodeSighash,
|
|
1281
|
+
privateAsPubKey: signingPubKey,
|
|
1282
|
+
publicKey: signingPubKey,
|
|
1283
|
+
verifyingKey: response.nodeTxSigningResult.verifyingKey,
|
|
1284
|
+
selfCommitment: newNodeSigningJob.signingNonceCommitment,
|
|
1285
|
+
statechainCommitments: response.nodeTxSigningResult.signingResult?.signingNonceCommitments,
|
|
1286
|
+
adaptorPubKey: new Uint8Array()
|
|
1287
|
+
});
|
|
1288
|
+
const refundUserSig = await this.config.signer.signFrost({
|
|
1289
|
+
message: refundSighash,
|
|
1290
|
+
privateAsPubKey: signingPubKey,
|
|
1291
|
+
publicKey: signingPubKey,
|
|
1292
|
+
verifyingKey: response.refundTxSigningResult.verifyingKey,
|
|
1293
|
+
selfCommitment: newRefundSigningJob.signingNonceCommitment,
|
|
1294
|
+
statechainCommitments: response.refundTxSigningResult.signingResult?.signingNonceCommitments,
|
|
1295
|
+
adaptorPubKey: new Uint8Array()
|
|
1296
|
+
});
|
|
1297
|
+
const nodeSig = await this.config.signer.aggregateFrost({
|
|
1298
|
+
message: nodeSighash,
|
|
1299
|
+
statechainSignatures: response.nodeTxSigningResult.signingResult?.signatureShares,
|
|
1300
|
+
statechainPublicKeys: response.nodeTxSigningResult.signingResult?.publicKeys,
|
|
1301
|
+
verifyingKey: response.nodeTxSigningResult.verifyingKey,
|
|
1302
|
+
statechainCommitments: response.nodeTxSigningResult.signingResult?.signingNonceCommitments,
|
|
1303
|
+
selfCommitment: newNodeSigningJob.signingNonceCommitment,
|
|
1304
|
+
publicKey: signingPubKey,
|
|
1305
|
+
selfSignature: nodeUserSig,
|
|
1306
|
+
adaptorPubKey: new Uint8Array()
|
|
1307
|
+
});
|
|
1308
|
+
const refundSig = await this.config.signer.aggregateFrost({
|
|
1309
|
+
message: refundSighash,
|
|
1310
|
+
statechainSignatures: response.refundTxSigningResult.signingResult?.signatureShares,
|
|
1311
|
+
statechainPublicKeys: response.refundTxSigningResult.signingResult?.publicKeys,
|
|
1312
|
+
verifyingKey: response.refundTxSigningResult.verifyingKey,
|
|
1313
|
+
statechainCommitments: response.refundTxSigningResult.signingResult?.signingNonceCommitments,
|
|
1314
|
+
selfCommitment: newRefundSigningJob.signingNonceCommitment,
|
|
1315
|
+
publicKey: signingPubKey,
|
|
1316
|
+
selfSignature: refundUserSig,
|
|
1317
|
+
adaptorPubKey: new Uint8Array()
|
|
1318
|
+
});
|
|
1319
|
+
return await sparkClient.finalize_node_signatures({
|
|
1320
|
+
intent: 4 /* EXTEND */,
|
|
1321
|
+
nodeSignatures: [
|
|
1322
|
+
{
|
|
1323
|
+
nodeId: response.leafId,
|
|
1324
|
+
nodeTxSignature: nodeSig,
|
|
1325
|
+
refundTxSignature: refundSig
|
|
1326
|
+
}
|
|
1327
|
+
]
|
|
1328
|
+
});
|
|
1329
|
+
}
|
|
1330
|
+
};
|
|
1331
|
+
|
|
1332
|
+
// src/services/coop-exit.ts
|
|
1333
|
+
var crypto2 = getCrypto();
|
|
1334
|
+
var CoopExitService = class extends BaseTransferService {
|
|
1335
|
+
constructor(config, connectionManager) {
|
|
1336
|
+
super(config, connectionManager);
|
|
1337
|
+
}
|
|
1338
|
+
async getConnectorRefundSignatures({
|
|
1339
|
+
leaves,
|
|
1340
|
+
exitTxId,
|
|
1341
|
+
connectorOutputs,
|
|
1342
|
+
receiverPubKey
|
|
1343
|
+
}) {
|
|
1344
|
+
const { transfer, signaturesMap } = await this.signCoopExitRefunds(
|
|
1345
|
+
leaves,
|
|
1346
|
+
exitTxId,
|
|
1347
|
+
connectorOutputs,
|
|
1348
|
+
receiverPubKey
|
|
1349
|
+
);
|
|
1350
|
+
const transferTweak = await this.sendTransferTweakKey(
|
|
1351
|
+
transfer,
|
|
1352
|
+
leaves,
|
|
1353
|
+
signaturesMap
|
|
1354
|
+
);
|
|
1355
|
+
return { transfer: transferTweak, signaturesMap };
|
|
1356
|
+
}
|
|
1357
|
+
createConnectorRefundTransaction(sequence, nodeOutPoint, connectorOutput, amountSats, receiverPubKey) {
|
|
1358
|
+
const refundTx = new Transaction2();
|
|
1359
|
+
if (!nodeOutPoint.txid || nodeOutPoint.index === void 0) {
|
|
1360
|
+
throw new Error("Node outpoint txid or index is undefined");
|
|
1361
|
+
}
|
|
1362
|
+
refundTx.addInput({
|
|
1363
|
+
txid: nodeOutPoint.txid,
|
|
1364
|
+
index: nodeOutPoint.index,
|
|
1365
|
+
sequence
|
|
1366
|
+
});
|
|
1367
|
+
refundTx.addInput(connectorOutput);
|
|
1368
|
+
const receiverScript = getP2TRScriptFromPublicKey(
|
|
1369
|
+
receiverPubKey,
|
|
1370
|
+
this.config.getNetwork()
|
|
1371
|
+
);
|
|
1372
|
+
refundTx.addOutput({
|
|
1373
|
+
script: receiverScript,
|
|
1374
|
+
amount: amountSats
|
|
1375
|
+
});
|
|
1376
|
+
return refundTx;
|
|
1377
|
+
}
|
|
1378
|
+
async signCoopExitRefunds(leaves, exitTxId, connectorOutputs, receiverPubKey) {
|
|
1379
|
+
if (leaves.length !== connectorOutputs.length) {
|
|
1380
|
+
throw new Error("Number of leaves and connector outputs must match");
|
|
1381
|
+
}
|
|
1382
|
+
const signingJobs = [];
|
|
1383
|
+
const leafDataMap = /* @__PURE__ */ new Map();
|
|
1384
|
+
for (let i = 0; i < leaves.length; i++) {
|
|
1385
|
+
const leaf = leaves[i];
|
|
1386
|
+
const connectorOutput = connectorOutputs[i];
|
|
1387
|
+
if (!leaf?.leaf) {
|
|
1388
|
+
throw new Error("Leaf not found");
|
|
1389
|
+
}
|
|
1390
|
+
if (!connectorOutput) {
|
|
1391
|
+
throw new Error("Connector output not found");
|
|
1392
|
+
}
|
|
1393
|
+
const currentRefundTx = getTxFromRawTxBytes(leaf.leaf.refundTx);
|
|
1394
|
+
const sequence = getNextTransactionSequence(
|
|
1395
|
+
currentRefundTx.getInput(0).sequence
|
|
1396
|
+
);
|
|
1397
|
+
const refundTx = this.createConnectorRefundTransaction(
|
|
1398
|
+
sequence,
|
|
1399
|
+
currentRefundTx.getInput(0),
|
|
1400
|
+
connectorOutput,
|
|
1401
|
+
BigInt(leaf.leaf.value),
|
|
1402
|
+
receiverPubKey
|
|
1403
|
+
);
|
|
1404
|
+
const signingNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1405
|
+
const signingJob = {
|
|
1406
|
+
leafId: leaf.leaf.id,
|
|
1407
|
+
refundTxSigningJob: {
|
|
1408
|
+
signingPublicKey: leaf.signingPubKey,
|
|
1409
|
+
rawTx: refundTx.toBytes(),
|
|
1410
|
+
signingNonceCommitment
|
|
1411
|
+
}
|
|
1412
|
+
};
|
|
1413
|
+
signingJobs.push(signingJob);
|
|
1414
|
+
const tx = getTxFromRawTxBytes(leaf.leaf.nodeTx);
|
|
1415
|
+
leafDataMap.set(leaf.leaf.id, {
|
|
1416
|
+
signingPubKey: leaf.signingPubKey,
|
|
1417
|
+
refundTx,
|
|
1418
|
+
signingNonceCommitment,
|
|
1419
|
+
tx,
|
|
1420
|
+
vout: leaf.leaf.vout,
|
|
1421
|
+
receivingPubkey: receiverPubKey
|
|
1422
|
+
});
|
|
1423
|
+
}
|
|
1424
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1425
|
+
this.config.getCoordinatorAddress()
|
|
1426
|
+
);
|
|
1427
|
+
let response;
|
|
1428
|
+
try {
|
|
1429
|
+
response = await sparkClient.cooperative_exit({
|
|
1430
|
+
transfer: {
|
|
1431
|
+
transferId: crypto2.randomUUID(),
|
|
1432
|
+
leavesToSend: signingJobs,
|
|
1433
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1434
|
+
receiverIdentityPublicKey: receiverPubKey,
|
|
1435
|
+
expiryTime: new Date(Date.now() + 24 * 60 * 1e3)
|
|
1436
|
+
},
|
|
1437
|
+
exitId: crypto2.randomUUID(),
|
|
1438
|
+
exitTxid: exitTxId
|
|
1439
|
+
});
|
|
1440
|
+
} catch (error) {
|
|
1441
|
+
throw new Error(`Error initiating cooperative exit: ${error}`);
|
|
1442
|
+
}
|
|
1443
|
+
if (!response.transfer) {
|
|
1444
|
+
throw new Error("Failed to initiate cooperative exit");
|
|
1445
|
+
}
|
|
1446
|
+
const signatures = await this.signRefunds(
|
|
1447
|
+
leafDataMap,
|
|
1448
|
+
response.signingResults
|
|
1449
|
+
);
|
|
1450
|
+
const signaturesMap = /* @__PURE__ */ new Map();
|
|
1451
|
+
for (const signature of signatures) {
|
|
1452
|
+
signaturesMap.set(signature.nodeId, signature.refundTxSignature);
|
|
1453
|
+
}
|
|
1454
|
+
return { transfer: response.transfer, signaturesMap };
|
|
1455
|
+
}
|
|
1456
|
+
};
|
|
1457
|
+
|
|
1458
|
+
// src/services/lightning.ts
|
|
1459
|
+
import {
|
|
1460
|
+
bytesToNumberBE,
|
|
1461
|
+
hexToBytes as hexToBytes2,
|
|
1462
|
+
numberToBytesBE as numberToBytesBE2
|
|
1463
|
+
} from "@noble/curves/abstract/utils";
|
|
1464
|
+
import { secp256k1 as secp256k12 } from "@noble/curves/secp256k1";
|
|
1465
|
+
import { sha256 as sha2562 } from "@scure/btc-signer/utils";
|
|
1466
|
+
import { decode } from "light-bolt11-decoder";
|
|
1467
|
+
var crypto3 = getCrypto();
|
|
1468
|
+
var LightningService = class {
|
|
1469
|
+
config;
|
|
1470
|
+
connectionManager;
|
|
1471
|
+
constructor(config, connectionManager) {
|
|
1472
|
+
this.config = config;
|
|
1473
|
+
this.connectionManager = connectionManager;
|
|
1474
|
+
}
|
|
1475
|
+
async createLightningInvoice({
|
|
1476
|
+
invoiceCreator,
|
|
1477
|
+
amountSats,
|
|
1478
|
+
memo
|
|
1479
|
+
}) {
|
|
1480
|
+
const randBytes = crypto3.getRandomValues(new Uint8Array(32));
|
|
1481
|
+
const preimage = numberToBytesBE2(
|
|
1482
|
+
bytesToNumberBE(randBytes) % secp256k12.CURVE.n,
|
|
1483
|
+
32
|
|
1484
|
+
);
|
|
1485
|
+
return await this.createLightningInvoiceWithPreImage({
|
|
1486
|
+
invoiceCreator,
|
|
1487
|
+
amountSats,
|
|
1488
|
+
memo,
|
|
1489
|
+
preimage
|
|
1490
|
+
});
|
|
1491
|
+
}
|
|
1492
|
+
async createLightningInvoiceWithPreImage({
|
|
1493
|
+
invoiceCreator,
|
|
1494
|
+
amountSats,
|
|
1495
|
+
memo,
|
|
1496
|
+
preimage
|
|
1497
|
+
}) {
|
|
1498
|
+
const paymentHash = sha2562(preimage);
|
|
1499
|
+
const invoice = await invoiceCreator(amountSats, paymentHash, memo);
|
|
1500
|
+
if (!invoice) {
|
|
1501
|
+
throw new Error("Error creating lightning invoice");
|
|
1502
|
+
}
|
|
1503
|
+
const shares = await this.config.signer.splitSecretWithProofs({
|
|
1504
|
+
secret: preimage,
|
|
1505
|
+
curveOrder: secp256k12.CURVE.n,
|
|
1506
|
+
threshold: this.config.getConfig().threshold,
|
|
1507
|
+
numShares: Object.keys(this.config.getConfig().signingOperators).length
|
|
1508
|
+
});
|
|
1509
|
+
const errors = [];
|
|
1510
|
+
const promises = Object.entries(
|
|
1511
|
+
this.config.getConfig().signingOperators
|
|
1512
|
+
).map(async ([_, operator]) => {
|
|
1513
|
+
const share = shares[operator.id];
|
|
1514
|
+
if (!share) {
|
|
1515
|
+
throw new Error("Share not found");
|
|
1516
|
+
}
|
|
1517
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1518
|
+
operator.address
|
|
1519
|
+
);
|
|
1520
|
+
try {
|
|
1521
|
+
await sparkClient.store_preimage_share({
|
|
1522
|
+
paymentHash,
|
|
1523
|
+
preimageShare: {
|
|
1524
|
+
secretShare: numberToBytesBE2(share.share, 32),
|
|
1525
|
+
proofs: share.proofs
|
|
1526
|
+
},
|
|
1527
|
+
threshold: this.config.getConfig().threshold,
|
|
1528
|
+
invoiceString: invoice,
|
|
1529
|
+
userIdentityPublicKey: await this.config.signer.getIdentityPublicKey()
|
|
1530
|
+
});
|
|
1531
|
+
} catch (e) {
|
|
1532
|
+
errors.push(e);
|
|
1533
|
+
}
|
|
1534
|
+
});
|
|
1535
|
+
await Promise.all(promises);
|
|
1536
|
+
if (errors.length > 0) {
|
|
1537
|
+
throw new Error(`Error creating lightning invoice: ${errors[0]}`);
|
|
1538
|
+
}
|
|
1539
|
+
return invoice;
|
|
1540
|
+
}
|
|
1541
|
+
async swapNodesForPreimage({
|
|
1542
|
+
leaves,
|
|
1543
|
+
receiverIdentityPubkey,
|
|
1544
|
+
paymentHash,
|
|
1545
|
+
invoiceString,
|
|
1546
|
+
isInboundPayment
|
|
1547
|
+
}) {
|
|
1548
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1549
|
+
this.config.getCoordinatorAddress()
|
|
1550
|
+
);
|
|
1551
|
+
let signingCommitments;
|
|
1552
|
+
try {
|
|
1553
|
+
signingCommitments = await sparkClient.get_signing_commitments({
|
|
1554
|
+
nodeIds: leaves.map((leaf) => leaf.leaf.id)
|
|
1555
|
+
});
|
|
1556
|
+
} catch (error) {
|
|
1557
|
+
throw new Error(`Error getting signing commitments: ${error}`);
|
|
1558
|
+
}
|
|
1559
|
+
const userSignedRefunds = await this.signRefunds(
|
|
1560
|
+
leaves,
|
|
1561
|
+
signingCommitments.signingCommitments,
|
|
1562
|
+
receiverIdentityPubkey
|
|
1563
|
+
);
|
|
1564
|
+
const transferId = crypto3.randomUUID();
|
|
1565
|
+
let bolt11String = "";
|
|
1566
|
+
let amountSats = 0;
|
|
1567
|
+
if (invoiceString) {
|
|
1568
|
+
const decodedInvoice = decode(invoiceString);
|
|
1569
|
+
let amountMsats = 0;
|
|
1570
|
+
try {
|
|
1571
|
+
amountMsats = Number(
|
|
1572
|
+
decodedInvoice.sections.find((section) => section.name === "amount")?.value
|
|
1573
|
+
);
|
|
1574
|
+
} catch (error) {
|
|
1575
|
+
console.error("Error decoding invoice", error);
|
|
1576
|
+
}
|
|
1577
|
+
amountSats = amountMsats / 1e3;
|
|
1578
|
+
bolt11String = invoiceString;
|
|
1579
|
+
}
|
|
1580
|
+
const reason = isInboundPayment ? 1 /* REASON_RECEIVE */ : 0 /* REASON_SEND */;
|
|
1581
|
+
let response;
|
|
1582
|
+
try {
|
|
1583
|
+
response = await sparkClient.initiate_preimage_swap({
|
|
1584
|
+
paymentHash,
|
|
1585
|
+
userSignedRefunds,
|
|
1586
|
+
reason,
|
|
1587
|
+
invoiceAmount: {
|
|
1588
|
+
invoiceAmountProof: {
|
|
1589
|
+
bolt11Invoice: bolt11String
|
|
1590
|
+
},
|
|
1591
|
+
valueSats: amountSats
|
|
1592
|
+
},
|
|
1593
|
+
transfer: {
|
|
1594
|
+
transferId,
|
|
1595
|
+
ownerIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1596
|
+
receiverIdentityPublicKey: receiverIdentityPubkey
|
|
1597
|
+
},
|
|
1598
|
+
receiverIdentityPublicKey: receiverIdentityPubkey,
|
|
1599
|
+
feeSats: 0
|
|
1600
|
+
});
|
|
1601
|
+
} catch (error) {
|
|
1602
|
+
throw new Error(`Error initiating preimage swap: ${error}`);
|
|
1603
|
+
}
|
|
1604
|
+
return response;
|
|
1605
|
+
}
|
|
1606
|
+
async queryUserSignedRefunds(paymentHash) {
|
|
1607
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1608
|
+
this.config.getCoordinatorAddress()
|
|
1609
|
+
);
|
|
1610
|
+
let response;
|
|
1611
|
+
try {
|
|
1612
|
+
response = await sparkClient.query_user_signed_refunds({
|
|
1613
|
+
paymentHash
|
|
1614
|
+
});
|
|
1615
|
+
} catch (error) {
|
|
1616
|
+
throw new Error(`Error querying user signed refunds: ${error}`);
|
|
1617
|
+
}
|
|
1618
|
+
return response.userSignedRefunds;
|
|
1619
|
+
}
|
|
1620
|
+
validateUserSignedRefund(userSignedRefund) {
|
|
1621
|
+
const refundTx = getTxFromRawTxBytes(userSignedRefund.refundTx);
|
|
1622
|
+
return refundTx.getOutput(0).amount || 0n;
|
|
1623
|
+
}
|
|
1624
|
+
async providePreimage(preimage) {
|
|
1625
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1626
|
+
this.config.getCoordinatorAddress()
|
|
1627
|
+
);
|
|
1628
|
+
const paymentHash = sha2562(preimage);
|
|
1629
|
+
let response;
|
|
1630
|
+
try {
|
|
1631
|
+
response = await sparkClient.provide_preimage({
|
|
1632
|
+
preimage,
|
|
1633
|
+
paymentHash
|
|
1634
|
+
});
|
|
1635
|
+
} catch (error) {
|
|
1636
|
+
throw new Error(`Error providing preimage: ${error}`);
|
|
1637
|
+
}
|
|
1638
|
+
if (!response.transfer) {
|
|
1639
|
+
throw new Error("No transfer returned from coordinator");
|
|
1640
|
+
}
|
|
1641
|
+
return response.transfer;
|
|
1642
|
+
}
|
|
1643
|
+
async signRefunds(leaves, signingCommitments, receiverIdentityPubkey) {
|
|
1644
|
+
const userSignedRefunds = [];
|
|
1645
|
+
for (let i = 0; i < leaves.length; i++) {
|
|
1646
|
+
const leaf = leaves[i];
|
|
1647
|
+
if (!leaf?.leaf) {
|
|
1648
|
+
throw new Error("Leaf not found in signRefunds");
|
|
1649
|
+
}
|
|
1650
|
+
const nodeTx = getTxFromRawTxBytes(leaf.leaf.nodeTx);
|
|
1651
|
+
const nodeOutPoint = {
|
|
1652
|
+
txid: hexToBytes2(getTxId(nodeTx)),
|
|
1653
|
+
index: 0
|
|
1654
|
+
};
|
|
1655
|
+
const currRefundTx = getTxFromRawTxBytes(leaf.leaf.refundTx);
|
|
1656
|
+
const nextSequence = getNextTransactionSequence(
|
|
1657
|
+
currRefundTx.getInput(0).sequence
|
|
1658
|
+
);
|
|
1659
|
+
const amountSats = currRefundTx.getOutput(0).amount;
|
|
1660
|
+
if (amountSats === void 0) {
|
|
1661
|
+
throw new Error("Amount not found in signRefunds");
|
|
1662
|
+
}
|
|
1663
|
+
const refundTx = createRefundTx(
|
|
1664
|
+
nextSequence,
|
|
1665
|
+
nodeOutPoint,
|
|
1666
|
+
amountSats,
|
|
1667
|
+
receiverIdentityPubkey,
|
|
1668
|
+
this.config.getNetwork()
|
|
1669
|
+
);
|
|
1670
|
+
const sighash = getSigHashFromTx(refundTx, 0, nodeTx.getOutput(0));
|
|
1671
|
+
const signingCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1672
|
+
const signingNonceCommitments = signingCommitments[i]?.signingNonceCommitments;
|
|
1673
|
+
if (!signingNonceCommitments) {
|
|
1674
|
+
throw new Error("Signing nonce commitments not found in signRefunds");
|
|
1675
|
+
}
|
|
1676
|
+
const signingResult = await this.config.signer.signFrost({
|
|
1677
|
+
message: sighash,
|
|
1678
|
+
publicKey: leaf.signingPubKey,
|
|
1679
|
+
privateAsPubKey: leaf.signingPubKey,
|
|
1680
|
+
selfCommitment: signingCommitment,
|
|
1681
|
+
statechainCommitments: signingNonceCommitments,
|
|
1682
|
+
adaptorPubKey: new Uint8Array(),
|
|
1683
|
+
verifyingKey: leaf.leaf.verifyingPublicKey
|
|
1684
|
+
});
|
|
1685
|
+
userSignedRefunds.push({
|
|
1686
|
+
nodeId: leaf.leaf.id,
|
|
1687
|
+
refundTx: refundTx.toBytes(),
|
|
1688
|
+
userSignature: signingResult,
|
|
1689
|
+
userSignatureCommitment: signingCommitment,
|
|
1690
|
+
signingCommitments: {
|
|
1691
|
+
signingCommitments: signingNonceCommitments
|
|
1692
|
+
}
|
|
1693
|
+
});
|
|
1694
|
+
}
|
|
1695
|
+
return userSignedRefunds;
|
|
1696
|
+
}
|
|
1697
|
+
};
|
|
1698
|
+
|
|
1699
|
+
// src/spark-sdk.ts
|
|
1700
|
+
import { validateMnemonic } from "@scure/bip39";
|
|
1701
|
+
import { wordlist } from "@scure/bip39/wordlists/english";
|
|
1702
|
+
import { Mutex } from "async-mutex";
|
|
1703
|
+
import bitcoin from "bitcoinjs-lib";
|
|
1704
|
+
|
|
1705
|
+
// src/services/tree-creation.ts
|
|
1706
|
+
import { Address, OutScript, Transaction as Transaction3 } from "@scure/btc-signer";
|
|
1707
|
+
import { sha256 as sha2563 } from "@scure/btc-signer/utils";
|
|
1708
|
+
var INITIAL_TIME_LOCK2 = 2e3;
|
|
1709
|
+
var TreeCreationService = class {
|
|
1710
|
+
config;
|
|
1711
|
+
connectionManager;
|
|
1712
|
+
constructor(config, connectionManager) {
|
|
1713
|
+
this.config = config;
|
|
1714
|
+
this.connectionManager = connectionManager;
|
|
1715
|
+
}
|
|
1716
|
+
async generateDepositAddressForTree(vout, parentSigningPublicKey, parentTx, parentNode) {
|
|
1717
|
+
if (!parentTx && !parentNode) {
|
|
1718
|
+
throw new Error("No parent tx or parent node provided");
|
|
1719
|
+
}
|
|
1720
|
+
const id = parentNode?.id ?? getTxId(parentTx);
|
|
1721
|
+
const tree = await this.createDepositAddressTree(
|
|
1722
|
+
parentSigningPublicKey,
|
|
1723
|
+
id
|
|
1724
|
+
);
|
|
1725
|
+
const addressRequestNodes = this.createAddressRequestNodeFromTreeNodes(tree);
|
|
1726
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1727
|
+
this.config.getCoordinatorAddress()
|
|
1728
|
+
);
|
|
1729
|
+
const request = {
|
|
1730
|
+
userIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1731
|
+
node: void 0
|
|
1732
|
+
};
|
|
1733
|
+
if (parentNode) {
|
|
1734
|
+
if (!parentNode.parentNodeId) {
|
|
1735
|
+
throw new Error("Parent node ID is undefined");
|
|
1736
|
+
}
|
|
1737
|
+
request.source = {
|
|
1738
|
+
$case: "parentNodeOutput",
|
|
1739
|
+
parentNodeOutput: {
|
|
1740
|
+
nodeId: parentNode.parentNodeId,
|
|
1741
|
+
vout
|
|
1742
|
+
}
|
|
1743
|
+
};
|
|
1744
|
+
} else if (parentTx) {
|
|
1745
|
+
request.source = {
|
|
1746
|
+
$case: "onChainUtxo",
|
|
1747
|
+
onChainUtxo: {
|
|
1748
|
+
vout,
|
|
1749
|
+
rawTx: parentTx.toBytes(),
|
|
1750
|
+
network: this.config.getNetworkProto()
|
|
1751
|
+
}
|
|
1752
|
+
};
|
|
1753
|
+
} else {
|
|
1754
|
+
throw new Error("No parent node or parent tx provided");
|
|
1755
|
+
}
|
|
1756
|
+
request.node = {
|
|
1757
|
+
userPublicKey: parentSigningPublicKey,
|
|
1758
|
+
children: addressRequestNodes
|
|
1759
|
+
};
|
|
1760
|
+
const root = {
|
|
1761
|
+
address: void 0,
|
|
1762
|
+
signingPublicKey: parentSigningPublicKey,
|
|
1763
|
+
children: tree
|
|
1764
|
+
};
|
|
1765
|
+
let response;
|
|
1766
|
+
try {
|
|
1767
|
+
response = await sparkClient.prepare_tree_address(request);
|
|
1768
|
+
} catch (error) {
|
|
1769
|
+
throw new Error(`Error preparing tree address: ${error}`);
|
|
1770
|
+
}
|
|
1771
|
+
if (!response.node) {
|
|
1772
|
+
throw new Error("No node found in response");
|
|
1773
|
+
}
|
|
1774
|
+
this.applyAddressNodesToTree([root], [response.node]);
|
|
1775
|
+
return root;
|
|
1776
|
+
}
|
|
1777
|
+
async createTree(vout, root, createLeaves, parentTx, parentNode) {
|
|
1778
|
+
const request = {
|
|
1779
|
+
userIdentityPublicKey: await this.config.signer.getIdentityPublicKey(),
|
|
1780
|
+
node: void 0
|
|
1781
|
+
};
|
|
1782
|
+
let tx;
|
|
1783
|
+
if (parentTx) {
|
|
1784
|
+
tx = parentTx;
|
|
1785
|
+
request.source = {
|
|
1786
|
+
$case: "onChainUtxo",
|
|
1787
|
+
onChainUtxo: {
|
|
1788
|
+
vout,
|
|
1789
|
+
rawTx: parentTx.toBytes(),
|
|
1790
|
+
network: this.config.getNetworkProto()
|
|
1791
|
+
}
|
|
1792
|
+
};
|
|
1793
|
+
} else if (parentNode) {
|
|
1794
|
+
tx = getTxFromRawTxBytes(parentNode.nodeTx);
|
|
1795
|
+
if (!parentNode.parentNodeId) {
|
|
1796
|
+
throw new Error("Parent node ID is undefined");
|
|
1797
|
+
}
|
|
1798
|
+
request.source = {
|
|
1799
|
+
$case: "parentNodeOutput",
|
|
1800
|
+
parentNodeOutput: {
|
|
1801
|
+
nodeId: parentNode.parentNodeId,
|
|
1802
|
+
vout
|
|
1803
|
+
}
|
|
1804
|
+
};
|
|
1805
|
+
} else {
|
|
1806
|
+
throw new Error("No parent node or parent tx provided");
|
|
1807
|
+
}
|
|
1808
|
+
const rootCreationNode = await this.buildCreationNodesFromTree(
|
|
1809
|
+
vout,
|
|
1810
|
+
createLeaves,
|
|
1811
|
+
this.config.getNetwork(),
|
|
1812
|
+
root,
|
|
1813
|
+
tx
|
|
1814
|
+
);
|
|
1815
|
+
request.node = rootCreationNode;
|
|
1816
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1817
|
+
this.config.getCoordinatorAddress()
|
|
1818
|
+
);
|
|
1819
|
+
let response;
|
|
1820
|
+
try {
|
|
1821
|
+
response = await sparkClient.create_tree(request);
|
|
1822
|
+
} catch (error) {
|
|
1823
|
+
throw new Error(`Error creating tree: ${error}`);
|
|
1824
|
+
}
|
|
1825
|
+
if (!response.node) {
|
|
1826
|
+
throw new Error("No node found in response");
|
|
1827
|
+
}
|
|
1828
|
+
const creationResultTreeRoot = response.node;
|
|
1829
|
+
const nodeSignatures = await this.signTreeCreation(
|
|
1830
|
+
tx,
|
|
1831
|
+
vout,
|
|
1832
|
+
root,
|
|
1833
|
+
rootCreationNode,
|
|
1834
|
+
creationResultTreeRoot
|
|
1835
|
+
);
|
|
1836
|
+
let finalizeResp;
|
|
1837
|
+
try {
|
|
1838
|
+
finalizeResp = await sparkClient.finalize_node_signatures({
|
|
1839
|
+
nodeSignatures
|
|
1840
|
+
});
|
|
1841
|
+
} catch (error) {
|
|
1842
|
+
throw new Error(
|
|
1843
|
+
`Error finalizing node signatures in tree creation: ${error}`
|
|
1844
|
+
);
|
|
1845
|
+
}
|
|
1846
|
+
return finalizeResp;
|
|
1847
|
+
}
|
|
1848
|
+
async createDepositAddressTree(targetSigningPublicKey, nodeId) {
|
|
1849
|
+
const leftKey = await this.config.signer.generatePublicKey(sha2563(nodeId));
|
|
1850
|
+
const leftNode = {
|
|
1851
|
+
signingPublicKey: leftKey,
|
|
1852
|
+
children: []
|
|
1853
|
+
};
|
|
1854
|
+
const rightKey = await this.config.signer.subtractPrivateKeysGivenPublicKeys(
|
|
1855
|
+
targetSigningPublicKey,
|
|
1856
|
+
leftKey
|
|
1857
|
+
);
|
|
1858
|
+
const rightNode = {
|
|
1859
|
+
signingPublicKey: rightKey,
|
|
1860
|
+
children: []
|
|
1861
|
+
};
|
|
1862
|
+
return [leftNode, rightNode];
|
|
1863
|
+
}
|
|
1864
|
+
createAddressRequestNodeFromTreeNodes(treeNodes) {
|
|
1865
|
+
const results = [];
|
|
1866
|
+
for (const node of treeNodes) {
|
|
1867
|
+
const result = {
|
|
1868
|
+
userPublicKey: node.signingPublicKey,
|
|
1869
|
+
children: this.createAddressRequestNodeFromTreeNodes(node.children)
|
|
1870
|
+
};
|
|
1871
|
+
results.push(result);
|
|
1872
|
+
}
|
|
1873
|
+
return results;
|
|
1874
|
+
}
|
|
1875
|
+
applyAddressNodesToTree(tree, addressNodes) {
|
|
1876
|
+
for (let i = 0; i < tree.length; i++) {
|
|
1877
|
+
if (!tree[i]) {
|
|
1878
|
+
throw new Error("Tree or address node is undefined");
|
|
1879
|
+
}
|
|
1880
|
+
if (!addressNodes[i]) {
|
|
1881
|
+
throw new Error("Address node is undefined");
|
|
1882
|
+
}
|
|
1883
|
+
tree[i].address = addressNodes[i].address?.address;
|
|
1884
|
+
tree[i].verificationKey = addressNodes[i].address?.verifyingKey;
|
|
1885
|
+
this.applyAddressNodesToTree(tree[i].children, addressNodes[i].children);
|
|
1886
|
+
}
|
|
1887
|
+
}
|
|
1888
|
+
async buildChildCreationNode(node, parentTx, vout, network) {
|
|
1889
|
+
const internalCreationNode = {
|
|
1890
|
+
nodeTxSigningJob: void 0,
|
|
1891
|
+
refundTxSigningJob: void 0,
|
|
1892
|
+
children: []
|
|
1893
|
+
};
|
|
1894
|
+
const tx = new Transaction3();
|
|
1895
|
+
tx.addInput({
|
|
1896
|
+
txid: getTxId(parentTx),
|
|
1897
|
+
index: vout
|
|
1898
|
+
});
|
|
1899
|
+
const parentTxOut = parentTx.getOutput(vout);
|
|
1900
|
+
if (!parentTxOut?.script || !parentTxOut?.amount) {
|
|
1901
|
+
throw new Error("parentTxOut is undefined");
|
|
1902
|
+
}
|
|
1903
|
+
tx.addOutput({
|
|
1904
|
+
script: parentTxOut.script,
|
|
1905
|
+
amount: parentTxOut.amount
|
|
1906
|
+
});
|
|
1907
|
+
tx.addOutput(getEphemeralAnchorOutput());
|
|
1908
|
+
const signingNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1909
|
+
const signingJob = {
|
|
1910
|
+
signingPublicKey: node.signingPublicKey,
|
|
1911
|
+
rawTx: tx.toBytes(),
|
|
1912
|
+
signingNonceCommitment
|
|
1913
|
+
};
|
|
1914
|
+
internalCreationNode.nodeTxSigningCommitment = signingNonceCommitment;
|
|
1915
|
+
internalCreationNode.nodeTxSigningJob = signingJob;
|
|
1916
|
+
const sequence = 1 << 30 | INITIAL_TIME_LOCK2;
|
|
1917
|
+
const childCreationNode = {
|
|
1918
|
+
nodeTxSigningJob: void 0,
|
|
1919
|
+
refundTxSigningJob: void 0,
|
|
1920
|
+
children: []
|
|
1921
|
+
};
|
|
1922
|
+
const childTx = new Transaction3();
|
|
1923
|
+
childTx.addInput({
|
|
1924
|
+
txid: getTxId(tx),
|
|
1925
|
+
index: 0,
|
|
1926
|
+
sequence
|
|
1927
|
+
});
|
|
1928
|
+
childTx.addOutput({
|
|
1929
|
+
script: parentTxOut.script,
|
|
1930
|
+
amount: parentTxOut.amount
|
|
1931
|
+
});
|
|
1932
|
+
childTx.addOutput(getEphemeralAnchorOutput());
|
|
1933
|
+
const childSigningNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1934
|
+
const childSigningJob = {
|
|
1935
|
+
signingPublicKey: node.signingPublicKey,
|
|
1936
|
+
rawTx: childTx.toBytes(),
|
|
1937
|
+
signingNonceCommitment: childSigningNonceCommitment
|
|
1938
|
+
};
|
|
1939
|
+
childCreationNode.nodeTxSigningCommitment = childSigningNonceCommitment;
|
|
1940
|
+
childCreationNode.nodeTxSigningJob = childSigningJob;
|
|
1941
|
+
const refundTx = new Transaction3();
|
|
1942
|
+
refundTx.addInput({
|
|
1943
|
+
txid: getTxId(childTx),
|
|
1944
|
+
index: 0,
|
|
1945
|
+
sequence
|
|
1946
|
+
});
|
|
1947
|
+
const refundP2trAddress = getP2TRAddressFromPublicKey(
|
|
1948
|
+
node.signingPublicKey,
|
|
1949
|
+
network
|
|
1950
|
+
);
|
|
1951
|
+
const refundAddress = Address(getNetwork(network)).decode(
|
|
1952
|
+
refundP2trAddress
|
|
1953
|
+
);
|
|
1954
|
+
const refundPkScript = OutScript.encode(refundAddress);
|
|
1955
|
+
refundTx.addOutput({
|
|
1956
|
+
script: refundPkScript,
|
|
1957
|
+
amount: parentTxOut.amount
|
|
1958
|
+
});
|
|
1959
|
+
const refundSigningNonceCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1960
|
+
const refundSigningJob = {
|
|
1961
|
+
signingPublicKey: node.signingPublicKey,
|
|
1962
|
+
rawTx: refundTx.toBytes(),
|
|
1963
|
+
signingNonceCommitment: refundSigningNonceCommitment
|
|
1964
|
+
};
|
|
1965
|
+
childCreationNode.refundTxSigningCommitment = refundSigningNonceCommitment;
|
|
1966
|
+
childCreationNode.refundTxSigningJob = refundSigningJob;
|
|
1967
|
+
internalCreationNode.children.push(childCreationNode);
|
|
1968
|
+
return internalCreationNode;
|
|
1969
|
+
}
|
|
1970
|
+
async buildCreationNodesFromTree(vout, createLeaves, network, root, parentTx) {
|
|
1971
|
+
const parentTxOutput = parentTx.getOutput(vout);
|
|
1972
|
+
if (!parentTxOutput?.script || !parentTxOutput?.amount) {
|
|
1973
|
+
throw new Error("parentTxOutput is undefined");
|
|
1974
|
+
}
|
|
1975
|
+
const rootNodeTx = new Transaction3();
|
|
1976
|
+
rootNodeTx.addInput({
|
|
1977
|
+
txid: getTxId(parentTx),
|
|
1978
|
+
index: vout
|
|
1979
|
+
});
|
|
1980
|
+
for (let i = 0; i < root.children.length; i++) {
|
|
1981
|
+
const child = root.children[i];
|
|
1982
|
+
if (!child || !child.address) {
|
|
1983
|
+
throw new Error("child address is undefined");
|
|
1984
|
+
}
|
|
1985
|
+
const childAddress = Address(getNetwork(network)).decode(child.address);
|
|
1986
|
+
const childPkScript = OutScript.encode(childAddress);
|
|
1987
|
+
rootNodeTx.addOutput({
|
|
1988
|
+
script: childPkScript,
|
|
1989
|
+
amount: parentTxOutput.amount / 2n
|
|
1990
|
+
});
|
|
1991
|
+
}
|
|
1992
|
+
const anchor = getEphemeralAnchorOutput();
|
|
1993
|
+
rootNodeTx.addOutput(anchor);
|
|
1994
|
+
const rootNodeSigningCommitment = await this.config.signer.getRandomSigningCommitment();
|
|
1995
|
+
const rootNodeSigningJob = {
|
|
1996
|
+
signingPublicKey: root.signingPublicKey,
|
|
1997
|
+
rawTx: rootNodeTx.toBytes(),
|
|
1998
|
+
signingNonceCommitment: rootNodeSigningCommitment
|
|
1999
|
+
};
|
|
2000
|
+
const rootCreationNode = {
|
|
2001
|
+
nodeTxSigningJob: rootNodeSigningJob,
|
|
2002
|
+
refundTxSigningJob: void 0,
|
|
2003
|
+
children: []
|
|
2004
|
+
};
|
|
2005
|
+
rootCreationNode.nodeTxSigningCommitment = rootNodeSigningCommitment;
|
|
2006
|
+
const leftChild = root.children[0];
|
|
2007
|
+
const rightChild = root.children[1];
|
|
2008
|
+
if (!leftChild || !rightChild) {
|
|
2009
|
+
throw new Error("Root children are undefined");
|
|
2010
|
+
}
|
|
2011
|
+
const leftChildCreationNode = await this.buildChildCreationNode(
|
|
2012
|
+
leftChild,
|
|
2013
|
+
rootNodeTx,
|
|
2014
|
+
0,
|
|
2015
|
+
network
|
|
2016
|
+
);
|
|
2017
|
+
const rightChildCreationNode = await this.buildChildCreationNode(
|
|
2018
|
+
rightChild,
|
|
2019
|
+
rootNodeTx,
|
|
2020
|
+
1,
|
|
2021
|
+
network
|
|
2022
|
+
);
|
|
2023
|
+
rootCreationNode.children.push(leftChildCreationNode);
|
|
2024
|
+
rootCreationNode.children.push(rightChildCreationNode);
|
|
2025
|
+
return rootCreationNode;
|
|
2026
|
+
}
|
|
2027
|
+
async signNodeCreation(parentTx, vout, internalNode, creationNode, creationResponseNode) {
|
|
2028
|
+
if (!creationNode.nodeTxSigningJob?.signingPublicKey || !internalNode.verificationKey) {
|
|
2029
|
+
throw new Error("signingPublicKey or verificationKey is undefined");
|
|
2030
|
+
}
|
|
2031
|
+
const parentTxOutput = parentTx.getOutput(vout);
|
|
2032
|
+
if (!parentTxOutput) {
|
|
2033
|
+
throw new Error("parentTxOutput is undefined");
|
|
2034
|
+
}
|
|
2035
|
+
const tx = getTxFromRawTxBytes(creationNode.nodeTxSigningJob.rawTx);
|
|
2036
|
+
const txSighash = getSigHashFromTx(tx, 0, parentTxOutput);
|
|
2037
|
+
let nodeTxSignature = new Uint8Array();
|
|
2038
|
+
if (creationNode.nodeTxSigningCommitment) {
|
|
2039
|
+
const userSignature = await this.config.signer.signFrost({
|
|
2040
|
+
message: txSighash,
|
|
2041
|
+
publicKey: creationNode.nodeTxSigningJob.signingPublicKey,
|
|
2042
|
+
privateAsPubKey: internalNode.signingPublicKey,
|
|
2043
|
+
selfCommitment: creationNode.nodeTxSigningCommitment,
|
|
2044
|
+
statechainCommitments: creationResponseNode.nodeTxSigningResult?.signingNonceCommitments,
|
|
2045
|
+
verifyingKey: internalNode.verificationKey
|
|
2046
|
+
});
|
|
2047
|
+
nodeTxSignature = await this.config.signer.aggregateFrost({
|
|
2048
|
+
message: txSighash,
|
|
2049
|
+
statechainSignatures: creationResponseNode.nodeTxSigningResult?.signatureShares,
|
|
2050
|
+
statechainPublicKeys: creationResponseNode.nodeTxSigningResult?.publicKeys,
|
|
2051
|
+
verifyingKey: internalNode.verificationKey,
|
|
2052
|
+
statechainCommitments: creationResponseNode.nodeTxSigningResult?.signingNonceCommitments,
|
|
2053
|
+
selfCommitment: creationNode.nodeTxSigningCommitment,
|
|
2054
|
+
selfSignature: userSignature,
|
|
2055
|
+
publicKey: internalNode.signingPublicKey
|
|
2056
|
+
});
|
|
2057
|
+
}
|
|
2058
|
+
let refundTxSignature = new Uint8Array();
|
|
2059
|
+
if (creationNode.refundTxSigningCommitment) {
|
|
2060
|
+
const rawTx = creationNode.refundTxSigningJob?.rawTx;
|
|
2061
|
+
if (!rawTx) {
|
|
2062
|
+
throw new Error("rawTx is undefined");
|
|
2063
|
+
}
|
|
2064
|
+
if (!creationNode.refundTxSigningJob?.signingPublicKey) {
|
|
2065
|
+
throw new Error("signingPublicKey is undefined");
|
|
2066
|
+
}
|
|
2067
|
+
const refundTx = getTxFromRawTxBytes(rawTx);
|
|
2068
|
+
const refundTxSighash = getSigHashFromTx(refundTx, 0, parentTxOutput);
|
|
2069
|
+
const refundSigningResponse = await this.config.signer.signFrost({
|
|
2070
|
+
message: refundTxSighash,
|
|
2071
|
+
publicKey: creationNode.refundTxSigningJob.signingPublicKey,
|
|
2072
|
+
privateAsPubKey: internalNode.signingPublicKey,
|
|
2073
|
+
selfCommitment: creationNode.refundTxSigningCommitment,
|
|
2074
|
+
statechainCommitments: creationResponseNode.refundTxSigningResult?.signingNonceCommitments,
|
|
2075
|
+
verifyingKey: internalNode.verificationKey
|
|
2076
|
+
});
|
|
2077
|
+
refundTxSignature = await this.config.signer.aggregateFrost({
|
|
2078
|
+
message: refundTxSighash,
|
|
2079
|
+
statechainSignatures: creationResponseNode.refundTxSigningResult?.signatureShares,
|
|
2080
|
+
statechainPublicKeys: creationResponseNode.refundTxSigningResult?.publicKeys,
|
|
2081
|
+
verifyingKey: internalNode.verificationKey,
|
|
2082
|
+
statechainCommitments: creationResponseNode.refundTxSigningResult?.signingNonceCommitments,
|
|
2083
|
+
selfCommitment: creationNode.refundTxSigningCommitment,
|
|
2084
|
+
selfSignature: refundSigningResponse,
|
|
2085
|
+
publicKey: internalNode.signingPublicKey
|
|
2086
|
+
});
|
|
2087
|
+
}
|
|
2088
|
+
return {
|
|
2089
|
+
tx,
|
|
2090
|
+
signature: {
|
|
2091
|
+
nodeId: creationResponseNode.nodeId,
|
|
2092
|
+
nodeTxSignature,
|
|
2093
|
+
refundTxSignature
|
|
2094
|
+
}
|
|
2095
|
+
};
|
|
2096
|
+
}
|
|
2097
|
+
async signTreeCreation(tx, vout, root, rootCreationNode, creationResultTreeRoot) {
|
|
2098
|
+
const rootSignature = await this.signNodeCreation(
|
|
2099
|
+
tx,
|
|
2100
|
+
vout,
|
|
2101
|
+
root,
|
|
2102
|
+
rootCreationNode,
|
|
2103
|
+
creationResultTreeRoot
|
|
2104
|
+
);
|
|
2105
|
+
const firstRootChild = root.children[0];
|
|
2106
|
+
const secondRootChild = root.children[1];
|
|
2107
|
+
const firstRootChildCreationNode = rootCreationNode.children[0];
|
|
2108
|
+
const secondRootChildCreationNode = rootCreationNode.children[1];
|
|
2109
|
+
const firstRootChildCreationResult = creationResultTreeRoot.children[0];
|
|
2110
|
+
const secondRootChildCreationResult = creationResultTreeRoot.children[1];
|
|
2111
|
+
if (!firstRootChild || !secondRootChild) {
|
|
2112
|
+
throw new Error("Root children are undefined");
|
|
2113
|
+
}
|
|
2114
|
+
if (!firstRootChildCreationNode || !secondRootChildCreationNode) {
|
|
2115
|
+
throw new Error("Root child creation nodes are undefined");
|
|
2116
|
+
}
|
|
2117
|
+
if (!firstRootChildCreationResult || !secondRootChildCreationResult) {
|
|
2118
|
+
throw new Error("Root child creation results are undefined");
|
|
2119
|
+
}
|
|
2120
|
+
const leftChildSignature = await this.signNodeCreation(
|
|
2121
|
+
rootSignature.tx,
|
|
2122
|
+
0,
|
|
2123
|
+
firstRootChild,
|
|
2124
|
+
firstRootChildCreationNode,
|
|
2125
|
+
firstRootChildCreationResult
|
|
2126
|
+
);
|
|
2127
|
+
const rightChildSignature = await this.signNodeCreation(
|
|
2128
|
+
rootSignature.tx,
|
|
2129
|
+
1,
|
|
2130
|
+
secondRootChild,
|
|
2131
|
+
secondRootChildCreationNode,
|
|
2132
|
+
secondRootChildCreationResult
|
|
2133
|
+
);
|
|
2134
|
+
const signatures = [
|
|
2135
|
+
rootSignature.signature,
|
|
2136
|
+
leftChildSignature.signature,
|
|
2137
|
+
rightChildSignature.signature
|
|
2138
|
+
];
|
|
2139
|
+
return signatures;
|
|
2140
|
+
}
|
|
2141
|
+
};
|
|
2142
|
+
|
|
2143
|
+
// src/spark-sdk.ts
|
|
2144
|
+
var MAX_TOKEN_LEAVES = 100;
|
|
2145
|
+
var SparkWallet = class {
|
|
2146
|
+
config;
|
|
2147
|
+
connectionManager;
|
|
2148
|
+
depositService;
|
|
2149
|
+
transferService;
|
|
2150
|
+
treeCreationService;
|
|
2151
|
+
lightningService;
|
|
2152
|
+
coopExitService;
|
|
2153
|
+
tokenTransactionService;
|
|
2154
|
+
claimTransferMutex = new Mutex();
|
|
2155
|
+
leavesMutex = new Mutex();
|
|
2156
|
+
optimizationInProgress = false;
|
|
2157
|
+
sspClient = null;
|
|
2158
|
+
wasmModule = null;
|
|
2159
|
+
leaves = [];
|
|
2160
|
+
tokenLeaves = /* @__PURE__ */ new Map();
|
|
2161
|
+
constructor(network, signer, config) {
|
|
2162
|
+
if (config) {
|
|
2163
|
+
this.config = WalletConfigService.withConfig(config, signer);
|
|
2164
|
+
} else {
|
|
2165
|
+
this.config = new WalletConfigService(network, signer);
|
|
2166
|
+
}
|
|
2167
|
+
this.connectionManager = new ConnectionManager(this.config);
|
|
2168
|
+
this.depositService = new DepositService(
|
|
2169
|
+
this.config,
|
|
2170
|
+
this.connectionManager
|
|
2171
|
+
);
|
|
2172
|
+
this.transferService = new TransferService(
|
|
2173
|
+
this.config,
|
|
2174
|
+
this.connectionManager
|
|
2175
|
+
);
|
|
2176
|
+
this.treeCreationService = new TreeCreationService(
|
|
2177
|
+
this.config,
|
|
2178
|
+
this.connectionManager
|
|
2179
|
+
);
|
|
2180
|
+
this.tokenTransactionService = new TokenTransactionService(
|
|
2181
|
+
this.config,
|
|
2182
|
+
this.connectionManager
|
|
2183
|
+
);
|
|
2184
|
+
this.lightningService = new LightningService(
|
|
2185
|
+
this.config,
|
|
2186
|
+
this.connectionManager
|
|
2187
|
+
);
|
|
2188
|
+
this.coopExitService = new CoopExitService(
|
|
2189
|
+
this.config,
|
|
2190
|
+
this.connectionManager
|
|
2191
|
+
);
|
|
2192
|
+
}
|
|
2193
|
+
async initWasm() {
|
|
2194
|
+
try {
|
|
2195
|
+
this.wasmModule = await initWasm();
|
|
2196
|
+
} catch (e) {
|
|
2197
|
+
console.error("Failed to initialize Wasm module", e);
|
|
2198
|
+
}
|
|
2199
|
+
}
|
|
2200
|
+
async initializeWallet(identityPublicKey) {
|
|
2201
|
+
await this.connectionManager.createClients();
|
|
2202
|
+
this.sspClient = new SspClient(identityPublicKey);
|
|
2203
|
+
await Promise.all([
|
|
2204
|
+
this.initWasm(),
|
|
2205
|
+
// Hacky but do this to store the deposit signing key in the signer
|
|
2206
|
+
this.config.signer.getDepositSigningKey()
|
|
2207
|
+
]);
|
|
2208
|
+
await this.syncWallet();
|
|
2209
|
+
}
|
|
2210
|
+
async getLeaves() {
|
|
2211
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
2212
|
+
this.config.getCoordinatorAddress()
|
|
2213
|
+
);
|
|
2214
|
+
const leaves = await sparkClient.query_nodes({
|
|
2215
|
+
source: {
|
|
2216
|
+
$case: "ownerIdentityPubkey",
|
|
2217
|
+
ownerIdentityPubkey: await this.config.signer.getIdentityPublicKey()
|
|
2218
|
+
},
|
|
2219
|
+
includeParents: false
|
|
2220
|
+
});
|
|
2221
|
+
return Object.entries(leaves.nodes).filter(([_, node]) => node.status === "AVAILABLE").map(([_, node]) => node);
|
|
2222
|
+
}
|
|
2223
|
+
async selectLeaves(targetAmount) {
|
|
2224
|
+
if (targetAmount <= 0) {
|
|
2225
|
+
throw new Error("Target amount must be positive");
|
|
2226
|
+
}
|
|
2227
|
+
const leaves = await this.getLeaves();
|
|
2228
|
+
if (leaves.length === 0) {
|
|
2229
|
+
return [];
|
|
2230
|
+
}
|
|
2231
|
+
leaves.sort((a, b) => b.value - a.value);
|
|
2232
|
+
let amount = 0;
|
|
2233
|
+
let nodes = [];
|
|
2234
|
+
for (const leaf of leaves) {
|
|
2235
|
+
if (targetAmount - amount >= leaf.value) {
|
|
2236
|
+
amount += leaf.value;
|
|
2237
|
+
nodes.push(leaf);
|
|
2238
|
+
}
|
|
2239
|
+
}
|
|
2240
|
+
if (amount !== targetAmount) {
|
|
2241
|
+
await this.requestLeavesSwap({ targetAmount });
|
|
2242
|
+
amount = 0;
|
|
2243
|
+
nodes = [];
|
|
2244
|
+
const newLeaves = await this.getLeaves();
|
|
2245
|
+
newLeaves.sort((a, b) => b.value - a.value);
|
|
2246
|
+
for (const leaf of newLeaves) {
|
|
2247
|
+
if (targetAmount - amount >= leaf.value) {
|
|
2248
|
+
amount += leaf.value;
|
|
2249
|
+
nodes.push(leaf);
|
|
2250
|
+
}
|
|
2251
|
+
}
|
|
2252
|
+
}
|
|
2253
|
+
return nodes;
|
|
2254
|
+
}
|
|
2255
|
+
async selectLeavesForSwap(targetAmount) {
|
|
2256
|
+
if (targetAmount == 0) {
|
|
2257
|
+
throw new Error("Target amount needs to > 0");
|
|
2258
|
+
}
|
|
2259
|
+
const leaves = await this.getLeaves();
|
|
2260
|
+
leaves.sort((a, b) => a.value - b.value);
|
|
2261
|
+
let amount = 0;
|
|
2262
|
+
const nodes = [];
|
|
2263
|
+
for (const leaf of leaves) {
|
|
2264
|
+
if (amount < targetAmount) {
|
|
2265
|
+
amount += leaf.value;
|
|
2266
|
+
nodes.push(leaf);
|
|
2267
|
+
}
|
|
2268
|
+
}
|
|
2269
|
+
if (amount < targetAmount) {
|
|
2270
|
+
throw new Error(
|
|
2271
|
+
"You don't have enough nodes to swap for the target amount"
|
|
2272
|
+
);
|
|
2273
|
+
}
|
|
2274
|
+
return nodes;
|
|
2275
|
+
}
|
|
2276
|
+
areLeavesInefficient() {
|
|
2277
|
+
const totalAmount = this.leaves.reduce((acc, leaf) => acc + leaf.value, 0);
|
|
2278
|
+
if (this.leaves.length <= 1) {
|
|
2279
|
+
return false;
|
|
2280
|
+
}
|
|
2281
|
+
const nextLowerPowerOfTwo = 31 - Math.clz32(totalAmount);
|
|
2282
|
+
let remainingAmount = totalAmount;
|
|
2283
|
+
let optimalLeavesLength = 0;
|
|
2284
|
+
for (let i = nextLowerPowerOfTwo; i >= 0; i--) {
|
|
2285
|
+
const denomination = 2 ** i;
|
|
2286
|
+
while (remainingAmount >= denomination) {
|
|
2287
|
+
remainingAmount -= denomination;
|
|
2288
|
+
optimalLeavesLength++;
|
|
2289
|
+
}
|
|
2290
|
+
}
|
|
2291
|
+
return this.leaves.length > optimalLeavesLength * 5;
|
|
2292
|
+
}
|
|
2293
|
+
async optimizeLeaves() {
|
|
2294
|
+
if (this.optimizationInProgress || !this.areLeavesInefficient()) {
|
|
2295
|
+
return;
|
|
2296
|
+
}
|
|
2297
|
+
await this.withLeaves(async () => {
|
|
2298
|
+
this.optimizationInProgress = true;
|
|
2299
|
+
try {
|
|
2300
|
+
if (this.leaves.length > 0) {
|
|
2301
|
+
await this.requestLeavesSwap({ leaves: this.leaves });
|
|
2302
|
+
}
|
|
2303
|
+
this.leaves = await this.getLeaves();
|
|
2304
|
+
} finally {
|
|
2305
|
+
this.optimizationInProgress = false;
|
|
2306
|
+
}
|
|
2307
|
+
});
|
|
2308
|
+
}
|
|
2309
|
+
async syncWallet() {
|
|
2310
|
+
await Promise.all([this.claimTransfers(), this.syncTokenLeaves()]);
|
|
2311
|
+
this.leaves = await this.getLeaves();
|
|
2312
|
+
await this.refreshTimelockNodes();
|
|
2313
|
+
await this.config.signer.restoreSigningKeysFromLeafs(this.leaves);
|
|
2314
|
+
this.optimizeLeaves().catch((e) => {
|
|
2315
|
+
console.error("Failed to optimize leaves", e);
|
|
2316
|
+
});
|
|
2317
|
+
}
|
|
2318
|
+
async withLeaves(operation) {
|
|
2319
|
+
const release = await this.leavesMutex.acquire();
|
|
2320
|
+
try {
|
|
2321
|
+
return await operation();
|
|
2322
|
+
} finally {
|
|
2323
|
+
release();
|
|
2324
|
+
}
|
|
2325
|
+
}
|
|
2326
|
+
isInitialized() {
|
|
2327
|
+
return this.sspClient !== null && this.wasmModule !== null;
|
|
2328
|
+
}
|
|
2329
|
+
/**
|
|
2330
|
+
* Gets the identity public key of the wallet.
|
|
2331
|
+
*
|
|
2332
|
+
* @returns {Promise<string>} The identity public key as a hex string.
|
|
2333
|
+
*/
|
|
2334
|
+
async getIdentityPublicKey() {
|
|
2335
|
+
return bytesToHex2(await this.config.signer.getIdentityPublicKey());
|
|
2336
|
+
}
|
|
2337
|
+
/**
|
|
2338
|
+
* Gets the Spark address of the wallet.
|
|
2339
|
+
*
|
|
2340
|
+
* @returns {Promise<string>} The Spark address as a hex string.
|
|
2341
|
+
*/
|
|
2342
|
+
async getSparkAddress() {
|
|
2343
|
+
return bytesToHex2(await this.config.signer.getIdentityPublicKey());
|
|
2344
|
+
}
|
|
2345
|
+
/**
|
|
2346
|
+
* Initializes the wallet using either a mnemonic phrase or a raw seed.
|
|
2347
|
+
* initWallet will also claim any pending incoming lightning payment, spark transfer,
|
|
2348
|
+
* or bitcoin deposit.
|
|
2349
|
+
*
|
|
2350
|
+
* @param {Uint8Array | string} [mnemonicOrSeed] - (Optional) Either:
|
|
2351
|
+
* - A BIP-39 mnemonic phrase as string
|
|
2352
|
+
* - A raw seed as Uint8Array or hex string
|
|
2353
|
+
* If not provided, generates a new mnemonic and uses it to create a new wallet
|
|
2354
|
+
*
|
|
2355
|
+
* @returns {Promise<Object>} Object containing:
|
|
2356
|
+
* - mnemonic: The mnemonic if one was generated (undefined for raw seed)
|
|
2357
|
+
* - balance: The wallet's initial balance in satoshis
|
|
2358
|
+
* - tokenBalance: Map of token balances and leaf counts
|
|
2359
|
+
*/
|
|
2360
|
+
async initWallet(mnemonicOrSeed) {
|
|
2361
|
+
const returnMnemonic = !mnemonicOrSeed;
|
|
2362
|
+
if (!mnemonicOrSeed) {
|
|
2363
|
+
mnemonicOrSeed = await this.config.signer.generateMnemonic();
|
|
2364
|
+
}
|
|
2365
|
+
if (typeof mnemonicOrSeed !== "string") {
|
|
2366
|
+
mnemonicOrSeed = bytesToHex2(mnemonicOrSeed);
|
|
2367
|
+
}
|
|
2368
|
+
let mnemonic;
|
|
2369
|
+
if (validateMnemonic(mnemonicOrSeed, wordlist)) {
|
|
2370
|
+
mnemonic = mnemonicOrSeed;
|
|
2371
|
+
await this.initWalletFromMnemonic(mnemonicOrSeed);
|
|
2372
|
+
} else {
|
|
2373
|
+
await this.initWalletFromSeed(mnemonicOrSeed);
|
|
2374
|
+
}
|
|
2375
|
+
const balance = this.leaves.reduce(
|
|
2376
|
+
(acc, leaf) => acc + BigInt(leaf.value),
|
|
2377
|
+
0n
|
|
2378
|
+
);
|
|
2379
|
+
const tokenBalance = await this.getAllTokenBalances();
|
|
2380
|
+
if (returnMnemonic) {
|
|
2381
|
+
return {
|
|
2382
|
+
mnemonic,
|
|
2383
|
+
balance,
|
|
2384
|
+
tokenBalance
|
|
2385
|
+
};
|
|
2386
|
+
}
|
|
2387
|
+
return {
|
|
2388
|
+
balance,
|
|
2389
|
+
tokenBalance
|
|
2390
|
+
};
|
|
2391
|
+
}
|
|
2392
|
+
async initWalletFromMnemonic(mnemonic) {
|
|
2393
|
+
const identityPublicKey = await this.config.signer.createSparkWalletFromMnemonic(
|
|
2394
|
+
mnemonic,
|
|
2395
|
+
this.config.getNetwork()
|
|
2396
|
+
);
|
|
2397
|
+
await this.initializeWallet(identityPublicKey);
|
|
2398
|
+
return identityPublicKey;
|
|
2399
|
+
}
|
|
2400
|
+
/**
|
|
2401
|
+
* Initializes a wallet from a seed.
|
|
2402
|
+
*
|
|
2403
|
+
* @param {Uint8Array | string} seed - The seed to initialize the wallet from
|
|
2404
|
+
* @returns {Promise<string>} The identity public key
|
|
2405
|
+
* @private
|
|
2406
|
+
*/
|
|
2407
|
+
async initWalletFromSeed(seed) {
|
|
2408
|
+
const identityPublicKey = await this.config.signer.createSparkWalletFromSeed(
|
|
2409
|
+
seed,
|
|
2410
|
+
this.config.getNetwork()
|
|
2411
|
+
);
|
|
2412
|
+
await this.initializeWallet(identityPublicKey);
|
|
2413
|
+
return identityPublicKey;
|
|
2414
|
+
}
|
|
2415
|
+
/**
|
|
2416
|
+
* Requests a swap of leaves to optimize wallet structure.
|
|
2417
|
+
*
|
|
2418
|
+
* @param {Object} params - Parameters for the leaves swap
|
|
2419
|
+
* @param {number} [params.targetAmount] - Target amount for the swap
|
|
2420
|
+
* @param {TreeNode[]} [params.leaves] - Specific leaves to swap
|
|
2421
|
+
* @returns {Promise<Object>} The completed swap response
|
|
2422
|
+
* @private
|
|
2423
|
+
*/
|
|
2424
|
+
async requestLeavesSwap({
|
|
2425
|
+
targetAmount,
|
|
2426
|
+
leaves
|
|
2427
|
+
}) {
|
|
2428
|
+
if (targetAmount && targetAmount <= 0) {
|
|
2429
|
+
throw new Error("targetAmount must be positive");
|
|
2430
|
+
}
|
|
2431
|
+
await this.claimTransfers();
|
|
2432
|
+
let leavesToSwap;
|
|
2433
|
+
if (targetAmount && leaves && leaves.length > 0) {
|
|
2434
|
+
if (targetAmount < leaves.reduce((acc, leaf) => acc + leaf.value, 0)) {
|
|
2435
|
+
throw new Error("targetAmount is less than the sum of leaves");
|
|
2436
|
+
}
|
|
2437
|
+
leavesToSwap = leaves;
|
|
2438
|
+
} else if (targetAmount) {
|
|
2439
|
+
leavesToSwap = await this.selectLeavesForSwap(targetAmount);
|
|
2440
|
+
} else if (leaves && leaves.length > 0) {
|
|
2441
|
+
leavesToSwap = leaves;
|
|
2442
|
+
} else {
|
|
2443
|
+
throw new Error("targetAmount or leaves must be provided");
|
|
2444
|
+
}
|
|
2445
|
+
const leafKeyTweaks = await Promise.all(
|
|
2446
|
+
leavesToSwap.map(async (leaf) => ({
|
|
2447
|
+
leaf,
|
|
2448
|
+
signingPubKey: await this.config.signer.generatePublicKey(
|
|
2449
|
+
sha2564(leaf.id)
|
|
2450
|
+
),
|
|
2451
|
+
newSigningPubKey: await this.config.signer.generatePublicKey()
|
|
2452
|
+
}))
|
|
2453
|
+
);
|
|
2454
|
+
const { transfer, signatureMap } = await this.transferService.sendTransferSignRefund(
|
|
2455
|
+
leafKeyTweaks,
|
|
2456
|
+
await this.config.signer.getSspIdentityPublicKey(
|
|
2457
|
+
this.config.getNetwork()
|
|
2458
|
+
),
|
|
2459
|
+
new Date(Date.now() + 10 * 60 * 1e3)
|
|
2460
|
+
);
|
|
2461
|
+
try {
|
|
2462
|
+
if (!transfer.leaves[0]?.leaf) {
|
|
2463
|
+
throw new Error("Failed to get leaf");
|
|
2464
|
+
}
|
|
2465
|
+
const refundSignature = signatureMap.get(transfer.leaves[0].leaf.id);
|
|
2466
|
+
if (!refundSignature) {
|
|
2467
|
+
throw new Error("Failed to get refund signature");
|
|
2468
|
+
}
|
|
2469
|
+
const { adaptorPrivateKey, adaptorSignature } = generateAdaptorFromSignature(refundSignature);
|
|
2470
|
+
if (!transfer.leaves[0].leaf) {
|
|
2471
|
+
throw new Error("Failed to get leaf");
|
|
2472
|
+
}
|
|
2473
|
+
const userLeaves = [];
|
|
2474
|
+
userLeaves.push({
|
|
2475
|
+
leaf_id: transfer.leaves[0].leaf.id,
|
|
2476
|
+
raw_unsigned_refund_transaction: bytesToHex2(
|
|
2477
|
+
transfer.leaves[0].intermediateRefundTx
|
|
2478
|
+
),
|
|
2479
|
+
adaptor_added_signature: bytesToHex2(adaptorSignature)
|
|
2480
|
+
});
|
|
2481
|
+
for (let i = 1; i < transfer.leaves.length; i++) {
|
|
2482
|
+
const leaf = transfer.leaves[i];
|
|
2483
|
+
if (!leaf?.leaf) {
|
|
2484
|
+
throw new Error("Failed to get leaf");
|
|
2485
|
+
}
|
|
2486
|
+
const refundSignature2 = signatureMap.get(leaf.leaf.id);
|
|
2487
|
+
if (!refundSignature2) {
|
|
2488
|
+
throw new Error("Failed to get refund signature");
|
|
2489
|
+
}
|
|
2490
|
+
const signature = generateSignatureFromExistingAdaptor(
|
|
2491
|
+
refundSignature2,
|
|
2492
|
+
adaptorPrivateKey
|
|
2493
|
+
);
|
|
2494
|
+
userLeaves.push({
|
|
2495
|
+
leaf_id: leaf.leaf.id,
|
|
2496
|
+
raw_unsigned_refund_transaction: bytesToHex2(
|
|
2497
|
+
leaf.intermediateRefundTx
|
|
2498
|
+
),
|
|
2499
|
+
adaptor_added_signature: bytesToHex2(signature)
|
|
2500
|
+
});
|
|
2501
|
+
}
|
|
2502
|
+
const adaptorPubkey = bytesToHex2(
|
|
2503
|
+
secp256k13.getPublicKey(adaptorPrivateKey)
|
|
2504
|
+
);
|
|
2505
|
+
let request = null;
|
|
2506
|
+
request = await this.sspClient?.requestLeaveSwap({
|
|
2507
|
+
userLeaves,
|
|
2508
|
+
adaptorPubkey,
|
|
2509
|
+
targetAmountSats: targetAmount || leavesToSwap.reduce((acc, leaf) => acc + leaf.value, 0),
|
|
2510
|
+
totalAmountSats: leavesToSwap.reduce(
|
|
2511
|
+
(acc, leaf) => acc + leaf.value,
|
|
2512
|
+
0
|
|
2513
|
+
),
|
|
2514
|
+
// TODO: Request fee from SSP
|
|
2515
|
+
feeSats: 0
|
|
2516
|
+
});
|
|
2517
|
+
if (!request) {
|
|
2518
|
+
throw new Error("Failed to request leaves swap. No response returned.");
|
|
2519
|
+
}
|
|
2520
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
2521
|
+
this.config.getCoordinatorAddress()
|
|
2522
|
+
);
|
|
2523
|
+
const nodes = await sparkClient.query_nodes({
|
|
2524
|
+
source: {
|
|
2525
|
+
$case: "nodeIds",
|
|
2526
|
+
nodeIds: {
|
|
2527
|
+
nodeIds: request.swapLeaves.map((leaf) => leaf.leafId)
|
|
2528
|
+
}
|
|
2529
|
+
},
|
|
2530
|
+
includeParents: false
|
|
2531
|
+
});
|
|
2532
|
+
if (Object.values(nodes.nodes).length !== request.swapLeaves.length) {
|
|
2533
|
+
throw new Error("Expected same number of nodes as swapLeaves");
|
|
2534
|
+
}
|
|
2535
|
+
for (const [nodeId, node] of Object.entries(nodes.nodes)) {
|
|
2536
|
+
if (!node.nodeTx) {
|
|
2537
|
+
throw new Error(`Node tx not found for leaf ${nodeId}`);
|
|
2538
|
+
}
|
|
2539
|
+
if (!node.verifyingPublicKey) {
|
|
2540
|
+
throw new Error(`Node public key not found for leaf ${nodeId}`);
|
|
2541
|
+
}
|
|
2542
|
+
const leaf = request.swapLeaves.find((leaf2) => leaf2.leafId === nodeId);
|
|
2543
|
+
if (!leaf) {
|
|
2544
|
+
throw new Error(`Leaf not found for node ${nodeId}`);
|
|
2545
|
+
}
|
|
2546
|
+
const nodeTx = getTxFromRawTxBytes(node.nodeTx);
|
|
2547
|
+
const refundTxBytes = hexToBytes3(leaf.rawUnsignedRefundTransaction);
|
|
2548
|
+
const refundTx = getTxFromRawTxBytes(refundTxBytes);
|
|
2549
|
+
const sighash = getSigHashFromTx(refundTx, 0, nodeTx.getOutput(0));
|
|
2550
|
+
const nodePublicKey = node.verifyingPublicKey;
|
|
2551
|
+
const taprootKey = computeTaprootKeyNoScript(nodePublicKey.slice(1));
|
|
2552
|
+
const adaptorSignatureBytes = hexToBytes3(leaf.adaptorSignedSignature);
|
|
2553
|
+
applyAdaptorToSignature(
|
|
2554
|
+
taprootKey.slice(1),
|
|
2555
|
+
sighash,
|
|
2556
|
+
adaptorSignatureBytes,
|
|
2557
|
+
adaptorPrivateKey
|
|
2558
|
+
);
|
|
2559
|
+
}
|
|
2560
|
+
await this.transferService.sendTransferTweakKey(
|
|
2561
|
+
transfer,
|
|
2562
|
+
leafKeyTweaks,
|
|
2563
|
+
signatureMap
|
|
2564
|
+
);
|
|
2565
|
+
const completeResponse = await this.sspClient?.completeLeaveSwap({
|
|
2566
|
+
adaptorSecretKey: bytesToHex2(adaptorPrivateKey),
|
|
2567
|
+
userOutboundTransferExternalId: transfer.id,
|
|
2568
|
+
leavesSwapRequestId: request.id
|
|
2569
|
+
});
|
|
2570
|
+
if (!completeResponse) {
|
|
2571
|
+
throw new Error("Failed to complete leaves swap");
|
|
2572
|
+
}
|
|
2573
|
+
await this.claimTransfers();
|
|
2574
|
+
return completeResponse;
|
|
2575
|
+
} catch (e) {
|
|
2576
|
+
await this.cancelAllSenderInitiatedTransfers();
|
|
2577
|
+
throw new Error(`Failed to request leaves swap: ${e}`);
|
|
2578
|
+
}
|
|
2579
|
+
}
|
|
2580
|
+
/**
|
|
2581
|
+
* Gets all transfers for the wallet.
|
|
2582
|
+
*
|
|
2583
|
+
* @param {number} [limit=20] - Maximum number of transfers to return
|
|
2584
|
+
* @param {number} [offset=0] - Offset for pagination
|
|
2585
|
+
* @returns {Promise<QueryAllTransfersResponse>} Response containing the list of transfers
|
|
2586
|
+
*/
|
|
2587
|
+
async getAllTransfers(limit = 20, offset = 0) {
|
|
2588
|
+
return await this.transferService.queryAllTransfers(limit, offset);
|
|
2589
|
+
}
|
|
2590
|
+
/**
|
|
2591
|
+
* Gets the current balance of the wallet.
|
|
2592
|
+
* You can use the forceRefetch option to synchronize your wallet and claim any
|
|
2593
|
+
* pending incoming lightning payment, spark transfer, or bitcoin deposit before returning the balance.
|
|
2594
|
+
*
|
|
2595
|
+
* @param {boolean} [forceRefetch=true] - Synchronizes the wallet before returning the balance
|
|
2596
|
+
* @returns {Promise<Object>} Object containing:
|
|
2597
|
+
* - balance: The wallet's current balance in satoshis
|
|
2598
|
+
* - tokenBalances: Map of token balances and leaf counts
|
|
2599
|
+
*/
|
|
2600
|
+
async getBalance(forceRefetch = true) {
|
|
2601
|
+
if (forceRefetch) {
|
|
2602
|
+
await Promise.all([this.claimTransfers(), this.syncTokenLeaves()]);
|
|
2603
|
+
this.leaves = await this.getLeaves();
|
|
2604
|
+
}
|
|
2605
|
+
const tokenBalances = /* @__PURE__ */ new Map();
|
|
2606
|
+
for (const [tokenPublicKey, leaves] of this.tokenLeaves.entries()) {
|
|
2607
|
+
tokenBalances.set(tokenPublicKey, {
|
|
2608
|
+
balance: calculateAvailableTokenAmount(leaves)
|
|
2609
|
+
});
|
|
2610
|
+
}
|
|
2611
|
+
return {
|
|
2612
|
+
balance: this.leaves.reduce((acc, leaf) => acc + BigInt(leaf.value), 0n),
|
|
2613
|
+
tokenBalances
|
|
2614
|
+
};
|
|
2615
|
+
}
|
|
2616
|
+
// ***** Deposit Flow *****
|
|
2617
|
+
/**
|
|
2618
|
+
* Generates a new deposit address for receiving bitcoin funds.
|
|
2619
|
+
* Note that this function returns a bitcoin address, not a spark address.
|
|
2620
|
+
* For Layer 1 Bitcoin deposits, Spark generates Pay to Taproot (P2TR) addresses.
|
|
2621
|
+
* These addresses start with "bc1p" and can be used to receive Bitcoin from any wallet.
|
|
2622
|
+
*
|
|
2623
|
+
* @returns {Promise<string>} A Bitcoin address for depositing funds
|
|
2624
|
+
*/
|
|
2625
|
+
async getDepositAddress() {
|
|
2626
|
+
return await this.generateDepositAddress();
|
|
2627
|
+
}
|
|
2628
|
+
/**
|
|
2629
|
+
* Generates a deposit address for receiving funds.
|
|
2630
|
+
*
|
|
2631
|
+
* @returns {Promise<string>} A deposit address
|
|
2632
|
+
* @private
|
|
2633
|
+
*/
|
|
2634
|
+
async generateDepositAddress() {
|
|
2635
|
+
const signingPubkey = await this.config.signer.getDepositSigningKey();
|
|
2636
|
+
const address = await this.depositService.generateDepositAddress({
|
|
2637
|
+
signingPubkey
|
|
2638
|
+
});
|
|
2639
|
+
if (!address.depositAddress) {
|
|
2640
|
+
throw new Error("Failed to generate deposit address");
|
|
2641
|
+
}
|
|
2642
|
+
return address.depositAddress.address;
|
|
2643
|
+
}
|
|
2644
|
+
/**
|
|
2645
|
+
* Finalizes a deposit to the wallet.
|
|
2646
|
+
*
|
|
2647
|
+
* @param {DepositParams} params - Parameters for finalizing the deposit
|
|
2648
|
+
* @returns {Promise<TreeNode[] | undefined>} The nodes created from the deposit
|
|
2649
|
+
* @private
|
|
2650
|
+
*/
|
|
2651
|
+
async finalizeDeposit({
|
|
2652
|
+
signingPubKey,
|
|
2653
|
+
verifyingKey,
|
|
2654
|
+
depositTx,
|
|
2655
|
+
vout
|
|
2656
|
+
}) {
|
|
2657
|
+
const response = await this.depositService.createTreeRoot({
|
|
2658
|
+
signingPubKey,
|
|
2659
|
+
verifyingKey,
|
|
2660
|
+
depositTx,
|
|
2661
|
+
vout
|
|
2662
|
+
});
|
|
2663
|
+
return await this.transferDepositToSelf(response.nodes, signingPubKey);
|
|
2664
|
+
}
|
|
2665
|
+
async claimDeposit(txid) {
|
|
2666
|
+
const baseUrl = this.config.getNetwork() === 3 /* REGTEST */ ? "https://regtest-mempool.dev.dev.sparkinfra.net/api" : "https://mempool.space/api";
|
|
2667
|
+
const auth = btoa("spark-sdk:mCMk1JqlBNtetUNy");
|
|
2668
|
+
const headers = {
|
|
2669
|
+
"Content-Type": "application/json"
|
|
2670
|
+
};
|
|
2671
|
+
if (this.config.getNetwork() === 3 /* REGTEST */) {
|
|
2672
|
+
headers["Authorization"] = `Basic ${auth}`;
|
|
2673
|
+
}
|
|
2674
|
+
const response = await fetch(`${baseUrl}/tx/${txid}/hex`, {
|
|
2675
|
+
headers
|
|
2676
|
+
});
|
|
2677
|
+
const txHex = await response.text();
|
|
2678
|
+
if (!/^[0-9A-Fa-f]+$/.test(txHex)) {
|
|
2679
|
+
throw new Error("Transaction not found");
|
|
2680
|
+
}
|
|
2681
|
+
const depositTx = getTxFromRawTxHex(txHex);
|
|
2682
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
2683
|
+
this.config.getCoordinatorAddress()
|
|
2684
|
+
);
|
|
2685
|
+
const unusedDepositAddresses = new Map(
|
|
2686
|
+
(await sparkClient.query_unused_deposit_addresses({
|
|
2687
|
+
identityPublicKey: await this.config.signer.getIdentityPublicKey()
|
|
2688
|
+
})).depositAddresses.map((addr) => [addr.depositAddress, addr])
|
|
2689
|
+
);
|
|
2690
|
+
let depositAddress;
|
|
2691
|
+
let vout = 0;
|
|
2692
|
+
for (let i = 0; i < depositTx.outputsLength; i++) {
|
|
2693
|
+
const output = depositTx.getOutput(i);
|
|
2694
|
+
if (!output) {
|
|
2695
|
+
continue;
|
|
2696
|
+
}
|
|
2697
|
+
const parsedScript = OutScript2.decode(output.script);
|
|
2698
|
+
const address = Address2(getNetwork(this.config.getNetwork())).encode(
|
|
2699
|
+
parsedScript
|
|
2700
|
+
);
|
|
2701
|
+
if (unusedDepositAddresses.has(address)) {
|
|
2702
|
+
vout = i;
|
|
2703
|
+
depositAddress = unusedDepositAddresses.get(address);
|
|
2704
|
+
break;
|
|
2705
|
+
}
|
|
2706
|
+
}
|
|
2707
|
+
if (!depositAddress) {
|
|
2708
|
+
throw new Error("Deposit address not found");
|
|
2709
|
+
}
|
|
2710
|
+
const nodes = await this.finalizeDeposit({
|
|
2711
|
+
signingPubKey: depositAddress.userSigningPublicKey,
|
|
2712
|
+
verifyingKey: depositAddress.verifyingPublicKey,
|
|
2713
|
+
depositTx,
|
|
2714
|
+
vout
|
|
2715
|
+
});
|
|
2716
|
+
return nodes;
|
|
2717
|
+
}
|
|
2718
|
+
/**
|
|
2719
|
+
* Queries the mempool for transactions associated with an address.
|
|
2720
|
+
*
|
|
2721
|
+
* @param {string} address - The address to query
|
|
2722
|
+
* @returns {Promise<{depositTx: Transaction, vout: number} | null>} Transaction details or null if none found
|
|
2723
|
+
* @private
|
|
2724
|
+
*/
|
|
2725
|
+
async queryMempoolTxs(address) {
|
|
2726
|
+
const network = getNetworkFromAddress(address) || this.config.getNetwork();
|
|
2727
|
+
const baseUrl = network === BitcoinNetwork_default.REGTEST ? "https://regtest-mempool.dev.dev.sparkinfra.net/api" : "https://mempool.space/docs/api";
|
|
2728
|
+
const auth = btoa("spark-sdk:mCMk1JqlBNtetUNy");
|
|
2729
|
+
const headers = {
|
|
2730
|
+
"Content-Type": "application/json"
|
|
2731
|
+
};
|
|
2732
|
+
if (network === BitcoinNetwork_default.REGTEST) {
|
|
2733
|
+
headers["Authorization"] = `Basic ${auth}`;
|
|
2734
|
+
}
|
|
2735
|
+
const response = await fetch(`${baseUrl}/address/${address}/txs`, {
|
|
2736
|
+
headers
|
|
2737
|
+
});
|
|
2738
|
+
const addressTxs = await response.json();
|
|
2739
|
+
if (addressTxs && addressTxs.length > 0) {
|
|
2740
|
+
const latestTx = addressTxs[0];
|
|
2741
|
+
const outputIndex = latestTx.vout.findIndex(
|
|
2742
|
+
(output) => output.scriptpubkey_address === address
|
|
2743
|
+
);
|
|
2744
|
+
if (outputIndex === -1) {
|
|
2745
|
+
return null;
|
|
2746
|
+
}
|
|
2747
|
+
const txResponse = await fetch(`${baseUrl}/tx/${latestTx.txid}/hex`, {
|
|
2748
|
+
headers
|
|
2749
|
+
});
|
|
2750
|
+
const txHex = await txResponse.text();
|
|
2751
|
+
const depositTx = getTxFromRawTxHex(txHex);
|
|
2752
|
+
return {
|
|
2753
|
+
depositTx,
|
|
2754
|
+
vout: outputIndex
|
|
2755
|
+
};
|
|
2756
|
+
}
|
|
2757
|
+
return null;
|
|
2758
|
+
}
|
|
2759
|
+
/**
|
|
2760
|
+
* Transfers deposit to self to claim ownership.
|
|
2761
|
+
*
|
|
2762
|
+
* @param {TreeNode[]} leaves - The leaves to transfer
|
|
2763
|
+
* @param {Uint8Array} signingPubKey - The signing public key
|
|
2764
|
+
* @returns {Promise<TreeNode[] | undefined>} The nodes resulting from the transfer
|
|
2765
|
+
* @private
|
|
2766
|
+
*/
|
|
2767
|
+
async transferDepositToSelf(leaves, signingPubKey) {
|
|
2768
|
+
const leafKeyTweaks = await Promise.all(
|
|
2769
|
+
leaves.map(async (leaf) => ({
|
|
2770
|
+
leaf,
|
|
2771
|
+
signingPubKey,
|
|
2772
|
+
newSigningPubKey: await this.config.signer.generatePublicKey()
|
|
2773
|
+
}))
|
|
2774
|
+
);
|
|
2775
|
+
await this.transferService.sendTransfer(
|
|
2776
|
+
leafKeyTweaks,
|
|
2777
|
+
await this.config.signer.getIdentityPublicKey(),
|
|
2778
|
+
new Date(Date.now() + 10 * 60 * 1e3)
|
|
2779
|
+
);
|
|
2780
|
+
const pendingTransfers = await this.transferService.queryPendingTransfers();
|
|
2781
|
+
if (pendingTransfers.transfers.length > 0) {
|
|
2782
|
+
return (await this.claimTransfer(pendingTransfers.transfers[0])).nodes;
|
|
2783
|
+
}
|
|
2784
|
+
return;
|
|
2785
|
+
}
|
|
2786
|
+
// ***** Transfer Flow *****
|
|
2787
|
+
/**
|
|
2788
|
+
* Sends a transfer to another Spark user.
|
|
2789
|
+
*
|
|
2790
|
+
* @param {Object} params - Parameters for the transfer
|
|
2791
|
+
* @param {string} params.receiverSparkAddress - The recipient's Spark address
|
|
2792
|
+
* @param {number} params.amountSats - Amount to send in satoshis
|
|
2793
|
+
* @returns {Promise<Transfer>} The completed transfer details
|
|
2794
|
+
*/
|
|
2795
|
+
async sendSparkTransfer({
|
|
2796
|
+
receiverSparkAddress,
|
|
2797
|
+
amountSats
|
|
2798
|
+
}) {
|
|
2799
|
+
return await this.withLeaves(async () => {
|
|
2800
|
+
return await this._sendTransfer({
|
|
2801
|
+
receiverPubKey: receiverSparkAddress,
|
|
2802
|
+
amount: amountSats
|
|
2803
|
+
});
|
|
2804
|
+
});
|
|
2805
|
+
}
|
|
2806
|
+
/**
|
|
2807
|
+
* Internal method to send a transfer.
|
|
2808
|
+
*
|
|
2809
|
+
* @param {SendTransferParams} params - Parameters for the transfer
|
|
2810
|
+
* @returns {Promise<Transfer>} The completed transfer details
|
|
2811
|
+
* @private
|
|
2812
|
+
*/
|
|
2813
|
+
async _sendTransfer({
|
|
2814
|
+
amount,
|
|
2815
|
+
receiverPubKey,
|
|
2816
|
+
leaves,
|
|
2817
|
+
expiryTime = new Date(Date.now() + 10 * 60 * 1e3)
|
|
2818
|
+
}) {
|
|
2819
|
+
let leavesToSend = [];
|
|
2820
|
+
if (leaves) {
|
|
2821
|
+
leavesToSend = leaves.map((leaf) => ({
|
|
2822
|
+
...leaf
|
|
2823
|
+
}));
|
|
2824
|
+
} else if (amount) {
|
|
2825
|
+
leavesToSend = await this.selectLeaves(amount);
|
|
2826
|
+
} else {
|
|
2827
|
+
throw new Error("Must provide amount or leaves");
|
|
2828
|
+
}
|
|
2829
|
+
await this.refreshTimelockNodes();
|
|
2830
|
+
const leafKeyTweaks = await Promise.all(
|
|
2831
|
+
leavesToSend.map(async (leaf) => ({
|
|
2832
|
+
leaf,
|
|
2833
|
+
signingPubKey: await this.config.signer.generatePublicKey(
|
|
2834
|
+
sha2564(leaf.id)
|
|
2835
|
+
),
|
|
2836
|
+
newSigningPubKey: await this.config.signer.generatePublicKey()
|
|
2837
|
+
}))
|
|
2838
|
+
);
|
|
2839
|
+
const transfer = await this.transferService.sendTransfer(
|
|
2840
|
+
leafKeyTweaks,
|
|
2841
|
+
hexToBytes3(receiverPubKey),
|
|
2842
|
+
expiryTime
|
|
2843
|
+
);
|
|
2844
|
+
const leavesToRemove = new Set(leavesToSend.map((leaf) => leaf.id));
|
|
2845
|
+
this.leaves = this.leaves.filter((leaf) => !leavesToRemove.has(leaf.id));
|
|
2846
|
+
return transfer;
|
|
2847
|
+
}
|
|
2848
|
+
/**
|
|
2849
|
+
* Internal method to refresh timelock nodes.
|
|
2850
|
+
*
|
|
2851
|
+
* @param {string} nodeId - The optional ID of the node to refresh. If not provided, all nodes will be checked.
|
|
2852
|
+
* @returns {Promise<void>}
|
|
2853
|
+
*/
|
|
2854
|
+
async refreshTimelockNodes(nodeId) {
|
|
2855
|
+
const nodesToRefresh = [];
|
|
2856
|
+
const nodeIds = [];
|
|
2857
|
+
if (nodeId) {
|
|
2858
|
+
for (const node of this.leaves) {
|
|
2859
|
+
if (node.id === nodeId) {
|
|
2860
|
+
nodesToRefresh.push(node);
|
|
2861
|
+
nodeIds.push(node.id);
|
|
2862
|
+
break;
|
|
2863
|
+
}
|
|
2864
|
+
}
|
|
2865
|
+
if (nodesToRefresh.length === 0) {
|
|
2866
|
+
throw new Error(`node ${nodeId} not found`);
|
|
2867
|
+
}
|
|
2868
|
+
} else {
|
|
2869
|
+
for (const node of this.leaves) {
|
|
2870
|
+
const refundTx = getTxFromRawTxBytes(node.refundTx);
|
|
2871
|
+
const nextSequence = getNextTransactionSequence(
|
|
2872
|
+
refundTx.getInput(0).sequence
|
|
2873
|
+
);
|
|
2874
|
+
const needRefresh = nextSequence <= 0;
|
|
2875
|
+
if (needRefresh) {
|
|
2876
|
+
nodesToRefresh.push(node);
|
|
2877
|
+
nodeIds.push(node.id);
|
|
2878
|
+
}
|
|
2879
|
+
}
|
|
2880
|
+
}
|
|
2881
|
+
if (nodesToRefresh.length === 0) {
|
|
2882
|
+
return;
|
|
2883
|
+
}
|
|
2884
|
+
const sparkClient = await this.connectionManager.createSparkClient(
|
|
2885
|
+
this.config.getCoordinatorAddress()
|
|
2886
|
+
);
|
|
2887
|
+
const nodesResp = await sparkClient.query_nodes({
|
|
2888
|
+
source: {
|
|
2889
|
+
$case: "nodeIds",
|
|
2890
|
+
nodeIds: {
|
|
2891
|
+
nodeIds
|
|
2892
|
+
}
|
|
2893
|
+
},
|
|
2894
|
+
includeParents: true
|
|
2895
|
+
});
|
|
2896
|
+
const nodesMap = /* @__PURE__ */ new Map();
|
|
2897
|
+
for (const node of Object.values(nodesResp.nodes)) {
|
|
2898
|
+
nodesMap.set(node.id, node);
|
|
2899
|
+
}
|
|
2900
|
+
for (const node of nodesToRefresh) {
|
|
2901
|
+
if (!node.parentNodeId) {
|
|
2902
|
+
throw new Error(`node ${node.id} has no parent`);
|
|
2903
|
+
}
|
|
2904
|
+
const parentNode = nodesMap.get(node.parentNodeId);
|
|
2905
|
+
if (!parentNode) {
|
|
2906
|
+
throw new Error(`parent node ${node.parentNodeId} not found`);
|
|
2907
|
+
}
|
|
2908
|
+
const { nodes } = await this.transferService.refreshTimelockNodes(
|
|
2909
|
+
[node],
|
|
2910
|
+
parentNode,
|
|
2911
|
+
await this.config.signer.generatePublicKey(sha2564(node.id))
|
|
2912
|
+
);
|
|
2913
|
+
if (nodes.length !== 1) {
|
|
2914
|
+
throw new Error(`expected 1 node, got ${nodes.length}`);
|
|
2915
|
+
}
|
|
2916
|
+
const newNode = nodes[0];
|
|
2917
|
+
if (!newNode) {
|
|
2918
|
+
throw new Error("Failed to refresh timelock node");
|
|
2919
|
+
}
|
|
2920
|
+
this.leaves = this.leaves.filter((leaf) => leaf.id !== node.id);
|
|
2921
|
+
this.leaves.push(newNode);
|
|
2922
|
+
}
|
|
2923
|
+
}
|
|
2924
|
+
/**
|
|
2925
|
+
* Claims a specific transfer.
|
|
2926
|
+
*
|
|
2927
|
+
* @param {Transfer} transfer - The transfer to claim
|
|
2928
|
+
* @returns {Promise<Object>} The claim result
|
|
2929
|
+
* @private
|
|
2930
|
+
*/
|
|
2931
|
+
async claimTransfer(transfer) {
|
|
2932
|
+
return await this.claimTransferMutex.runExclusive(async () => {
|
|
2933
|
+
const leafPubKeyMap = await this.transferService.verifyPendingTransfer(transfer);
|
|
2934
|
+
let leavesToClaim = [];
|
|
2935
|
+
for (const leaf of transfer.leaves) {
|
|
2936
|
+
if (leaf.leaf) {
|
|
2937
|
+
const leafPubKey = leafPubKeyMap.get(leaf.leaf.id);
|
|
2938
|
+
if (leafPubKey) {
|
|
2939
|
+
leavesToClaim.push({
|
|
2940
|
+
leaf: leaf.leaf,
|
|
2941
|
+
signingPubKey: leafPubKey,
|
|
2942
|
+
newSigningPubKey: await this.config.signer.generatePublicKey(
|
|
2943
|
+
sha2564(leaf.leaf.id)
|
|
2944
|
+
)
|
|
2945
|
+
});
|
|
2946
|
+
}
|
|
2947
|
+
}
|
|
2948
|
+
}
|
|
2949
|
+
const response = await this.transferService.claimTransfer(
|
|
2950
|
+
transfer,
|
|
2951
|
+
leavesToClaim
|
|
2952
|
+
);
|
|
2953
|
+
this.leaves.push(...response.nodes);
|
|
2954
|
+
await this.refreshTimelockNodes();
|
|
2955
|
+
return response.nodes;
|
|
2956
|
+
});
|
|
2957
|
+
}
|
|
2958
|
+
/**
|
|
2959
|
+
* Claims all pending transfers.
|
|
2960
|
+
*
|
|
2961
|
+
* @returns {Promise<boolean>} True if any transfers were claimed
|
|
2962
|
+
* @private
|
|
2963
|
+
*/
|
|
2964
|
+
async claimTransfers() {
|
|
2965
|
+
const transfers = await this.transferService.queryPendingTransfers();
|
|
2966
|
+
let claimed = false;
|
|
2967
|
+
for (const transfer of transfers.transfers) {
|
|
2968
|
+
if (transfer.status !== 2 /* TRANSFER_STATUS_SENDER_KEY_TWEAKED */ && transfer.status !== 3 /* TRANSFER_STATUS_RECEIVER_KEY_TWEAKED */ && transfer.status !== 4 /* TRANSFER_STATUSR_RECEIVER_REFUND_SIGNED */) {
|
|
2969
|
+
continue;
|
|
2970
|
+
}
|
|
2971
|
+
await this.claimTransfer(transfer);
|
|
2972
|
+
claimed = true;
|
|
2973
|
+
}
|
|
2974
|
+
return claimed;
|
|
2975
|
+
}
|
|
2976
|
+
/**
|
|
2977
|
+
* Cancels all sender-initiated transfers.
|
|
2978
|
+
*
|
|
2979
|
+
* @returns {Promise<void>}
|
|
2980
|
+
* @private
|
|
2981
|
+
*/
|
|
2982
|
+
async cancelAllSenderInitiatedTransfers() {
|
|
2983
|
+
const transfers = await this.transferService.queryPendingTransfersBySender();
|
|
2984
|
+
for (const transfer of transfers.transfers) {
|
|
2985
|
+
if (transfer.status === 0 /* TRANSFER_STATUS_SENDER_INITIATED */) {
|
|
2986
|
+
await this.transferService.cancelSendTransfer(transfer);
|
|
2987
|
+
}
|
|
2988
|
+
}
|
|
2989
|
+
}
|
|
2990
|
+
// ***** Lightning Flow *****
|
|
2991
|
+
/**
|
|
2992
|
+
* Creates a Lightning invoice for receiving payments.
|
|
2993
|
+
*
|
|
2994
|
+
* @param {Object} params - Parameters for the lightning invoice
|
|
2995
|
+
* @param {number} params.amountSats - Amount in satoshis
|
|
2996
|
+
* @param {string} params.memo - Description for the invoice
|
|
2997
|
+
* @param {number} [params.expirySeconds] - Optional expiry time in seconds
|
|
2998
|
+
* @returns {Promise<string>} BOLT11 encoded invoice
|
|
2999
|
+
*/
|
|
3000
|
+
async createLightningInvoice({
|
|
3001
|
+
amountSats,
|
|
3002
|
+
memo,
|
|
3003
|
+
expirySeconds = 60 * 60 * 24 * 30
|
|
3004
|
+
}) {
|
|
3005
|
+
if (!this.sspClient) {
|
|
3006
|
+
throw new Error("SSP client not initialized");
|
|
3007
|
+
}
|
|
3008
|
+
const requestLightningInvoice = async (amountSats2, paymentHash, memo2) => {
|
|
3009
|
+
const network = this.config.getNetwork();
|
|
3010
|
+
let bitcoinNetwork = BitcoinNetwork_default.REGTEST;
|
|
3011
|
+
if (network === 0 /* MAINNET */) {
|
|
3012
|
+
bitcoinNetwork = BitcoinNetwork_default.MAINNET;
|
|
3013
|
+
} else if (network === 3 /* REGTEST */) {
|
|
3014
|
+
bitcoinNetwork = BitcoinNetwork_default.REGTEST;
|
|
3015
|
+
}
|
|
3016
|
+
const invoice = await this.sspClient.requestLightningReceive({
|
|
3017
|
+
amountSats: amountSats2,
|
|
3018
|
+
network: bitcoinNetwork,
|
|
3019
|
+
paymentHash: bytesToHex2(paymentHash),
|
|
3020
|
+
expirySecs: expirySeconds,
|
|
3021
|
+
memo: memo2
|
|
3022
|
+
});
|
|
3023
|
+
return invoice?.invoice.encodedEnvoice;
|
|
3024
|
+
};
|
|
3025
|
+
return this.lightningService.createLightningInvoice({
|
|
3026
|
+
amountSats,
|
|
3027
|
+
memo,
|
|
3028
|
+
invoiceCreator: requestLightningInvoice
|
|
3029
|
+
});
|
|
3030
|
+
}
|
|
3031
|
+
/**
|
|
3032
|
+
* Pays a Lightning invoice.
|
|
3033
|
+
*
|
|
3034
|
+
* @param {Object} params - Parameters for paying the invoice
|
|
3035
|
+
* @param {string} params.invoice - The BOLT11-encoded Lightning invoice to pay
|
|
3036
|
+
* @returns {Promise<LightningSendRequest>} The Lightning payment request details
|
|
3037
|
+
*/
|
|
3038
|
+
async payLightningInvoice({ invoice }) {
|
|
3039
|
+
return await this.withLeaves(async () => {
|
|
3040
|
+
if (!this.sspClient) {
|
|
3041
|
+
throw new Error("SSP client not initialized");
|
|
3042
|
+
}
|
|
3043
|
+
const decodedInvoice = decode2(invoice);
|
|
3044
|
+
const amountSats = Number(
|
|
3045
|
+
decodedInvoice.sections.find((section) => section.name === "amount")?.value
|
|
3046
|
+
) / 1e3;
|
|
3047
|
+
if (isNaN(amountSats) || amountSats <= 0) {
|
|
3048
|
+
throw new Error("Invalid amount");
|
|
3049
|
+
}
|
|
3050
|
+
const paymentHash = decodedInvoice.sections.find(
|
|
3051
|
+
(section) => section.name === "payment_hash"
|
|
3052
|
+
)?.value;
|
|
3053
|
+
if (!paymentHash) {
|
|
3054
|
+
throw new Error("No payment hash found in invoice");
|
|
3055
|
+
}
|
|
3056
|
+
const leaves = await this.selectLeaves(amountSats);
|
|
3057
|
+
await this.refreshTimelockNodes();
|
|
3058
|
+
const leavesToSend = await Promise.all(
|
|
3059
|
+
leaves.map(async (leaf) => ({
|
|
3060
|
+
leaf,
|
|
3061
|
+
signingPubKey: await this.config.signer.generatePublicKey(
|
|
3062
|
+
sha2564(leaf.id)
|
|
3063
|
+
),
|
|
3064
|
+
newSigningPubKey: await this.config.signer.generatePublicKey()
|
|
3065
|
+
}))
|
|
3066
|
+
);
|
|
3067
|
+
const swapResponse = await this.lightningService.swapNodesForPreimage({
|
|
3068
|
+
leaves: leavesToSend,
|
|
3069
|
+
receiverIdentityPubkey: await this.config.signer.getSspIdentityPublicKey(
|
|
3070
|
+
this.config.getNetwork()
|
|
3071
|
+
),
|
|
3072
|
+
paymentHash: hexToBytes3(paymentHash),
|
|
3073
|
+
isInboundPayment: false,
|
|
3074
|
+
invoiceString: invoice
|
|
3075
|
+
});
|
|
3076
|
+
if (!swapResponse.transfer) {
|
|
3077
|
+
throw new Error("Failed to swap nodes for preimage");
|
|
3078
|
+
}
|
|
3079
|
+
const transfer = await this.transferService.sendTransferTweakKey(
|
|
3080
|
+
swapResponse.transfer,
|
|
3081
|
+
leavesToSend,
|
|
3082
|
+
/* @__PURE__ */ new Map()
|
|
3083
|
+
);
|
|
3084
|
+
const sspResponse = await this.sspClient.requestLightningSend({
|
|
3085
|
+
encodedInvoice: invoice,
|
|
3086
|
+
idempotencyKey: paymentHash
|
|
3087
|
+
});
|
|
3088
|
+
if (!sspResponse) {
|
|
3089
|
+
throw new Error("Failed to contact SSP");
|
|
3090
|
+
}
|
|
3091
|
+
const leavesToRemove = new Set(leavesToSend.map((leaf) => leaf.leaf.id));
|
|
3092
|
+
this.leaves = this.leaves.filter((leaf) => !leavesToRemove.has(leaf.id));
|
|
3093
|
+
return sspResponse;
|
|
3094
|
+
});
|
|
3095
|
+
}
|
|
3096
|
+
/**
|
|
3097
|
+
* Gets fee estimate for receiving Lightning payments.
|
|
3098
|
+
*
|
|
3099
|
+
* @param {LightningReceiveFeeEstimateInput} params - Input parameters for fee estimation
|
|
3100
|
+
* @returns {Promise<LightningReceiveFeeEstimateOutput | null>} Fee estimate for receiving Lightning payments
|
|
3101
|
+
* @private
|
|
3102
|
+
*/
|
|
3103
|
+
async getLightningReceiveFeeEstimate({
|
|
3104
|
+
amountSats,
|
|
3105
|
+
network
|
|
3106
|
+
}) {
|
|
3107
|
+
if (!this.sspClient) {
|
|
3108
|
+
throw new Error("SSP client not initialized");
|
|
3109
|
+
}
|
|
3110
|
+
return await this.sspClient.getLightningReceiveFeeEstimate(
|
|
3111
|
+
amountSats,
|
|
3112
|
+
network
|
|
3113
|
+
);
|
|
3114
|
+
}
|
|
3115
|
+
/**
|
|
3116
|
+
* Gets fee estimate for sending Lightning payments.
|
|
3117
|
+
*
|
|
3118
|
+
* @param {LightningSendFeeEstimateInput} params - Input parameters for fee estimation
|
|
3119
|
+
* @returns {Promise<LightningSendFeeEstimateOutput | null>} Fee estimate for sending Lightning payments
|
|
3120
|
+
* @private
|
|
3121
|
+
*/
|
|
3122
|
+
async getLightningSendFeeEstimate({
|
|
3123
|
+
encodedInvoice
|
|
3124
|
+
}) {
|
|
3125
|
+
if (!this.sspClient) {
|
|
3126
|
+
throw new Error("SSP client not initialized");
|
|
3127
|
+
}
|
|
3128
|
+
return await this.sspClient.getLightningSendFeeEstimate(encodedInvoice);
|
|
3129
|
+
}
|
|
3130
|
+
// ***** Tree Creation Flow *****
|
|
3131
|
+
/**
|
|
3132
|
+
* Generates a deposit address for a tree.
|
|
3133
|
+
*
|
|
3134
|
+
* @param {number} vout - The vout index
|
|
3135
|
+
* @param {Uint8Array} parentSigningPubKey - The parent signing public key
|
|
3136
|
+
* @param {Transaction} [parentTx] - Optional parent transaction
|
|
3137
|
+
* @param {TreeNode} [parentNode] - Optional parent node
|
|
3138
|
+
* @returns {Promise<Object>} Deposit address information
|
|
3139
|
+
* @private
|
|
3140
|
+
*/
|
|
3141
|
+
async generateDepositAddressForTree(vout, parentSigningPubKey, parentTx, parentNode) {
|
|
3142
|
+
return await this.treeCreationService.generateDepositAddressForTree(
|
|
3143
|
+
vout,
|
|
3144
|
+
parentSigningPubKey,
|
|
3145
|
+
parentTx,
|
|
3146
|
+
parentNode
|
|
3147
|
+
);
|
|
3148
|
+
}
|
|
3149
|
+
/**
|
|
3150
|
+
* Creates a tree structure.
|
|
3151
|
+
*
|
|
3152
|
+
* @param {number} vout - The vout index
|
|
3153
|
+
* @param {DepositAddressTree} root - The root of the tree
|
|
3154
|
+
* @param {boolean} createLeaves - Whether to create leaves
|
|
3155
|
+
* @param {Transaction} [parentTx] - Optional parent transaction
|
|
3156
|
+
* @param {TreeNode} [parentNode] - Optional parent node
|
|
3157
|
+
* @returns {Promise<Object>} The created tree
|
|
3158
|
+
* @private
|
|
3159
|
+
*/
|
|
3160
|
+
async createTree(vout, root, createLeaves, parentTx, parentNode) {
|
|
3161
|
+
return await this.treeCreationService.createTree(
|
|
3162
|
+
vout,
|
|
3163
|
+
root,
|
|
3164
|
+
createLeaves,
|
|
3165
|
+
parentTx,
|
|
3166
|
+
parentNode
|
|
3167
|
+
);
|
|
3168
|
+
}
|
|
3169
|
+
// ***** Cooperative Exit Flow *****
|
|
3170
|
+
/**
|
|
3171
|
+
* Initiates a withdrawal to move funds from the Spark network to an on-chain Bitcoin address.
|
|
3172
|
+
*
|
|
3173
|
+
* @param {Object} params - Parameters for the withdrawal
|
|
3174
|
+
* @param {string} params.onchainAddress - The Bitcoin address where the funds should be sent
|
|
3175
|
+
* @param {number} [params.targetAmountSats] - The amount in satoshis to withdraw. If not specified, attempts to withdraw all available funds
|
|
3176
|
+
* @returns {Promise<CoopExitRequest | null | undefined>} The withdrawal request details, or null/undefined if the request cannot be completed
|
|
3177
|
+
*/
|
|
3178
|
+
async withdraw({
|
|
3179
|
+
onchainAddress,
|
|
3180
|
+
targetAmountSats
|
|
3181
|
+
}) {
|
|
3182
|
+
return await this.withLeaves(async () => {
|
|
3183
|
+
return await this.coopExit(onchainAddress, targetAmountSats);
|
|
3184
|
+
});
|
|
3185
|
+
}
|
|
3186
|
+
/**
|
|
3187
|
+
* Internal method to perform a cooperative exit (withdrawal).
|
|
3188
|
+
*
|
|
3189
|
+
* @param {string} onchainAddress - The Bitcoin address where the funds should be sent
|
|
3190
|
+
* @param {number} [targetAmountSats] - The amount in satoshis to withdraw
|
|
3191
|
+
* @returns {Promise<Object | null | undefined>} The exit request details
|
|
3192
|
+
* @private
|
|
3193
|
+
*/
|
|
3194
|
+
async coopExit(onchainAddress, targetAmountSats) {
|
|
3195
|
+
let leavesToSend = [];
|
|
3196
|
+
if (targetAmountSats) {
|
|
3197
|
+
leavesToSend = await this.selectLeaves(targetAmountSats);
|
|
3198
|
+
} else {
|
|
3199
|
+
leavesToSend = this.leaves.map((leaf) => ({
|
|
3200
|
+
...leaf
|
|
3201
|
+
}));
|
|
3202
|
+
}
|
|
3203
|
+
const leafKeyTweaks = await Promise.all(
|
|
3204
|
+
leavesToSend.map(async (leaf) => ({
|
|
3205
|
+
leaf,
|
|
3206
|
+
signingPubKey: await this.config.signer.generatePublicKey(
|
|
3207
|
+
sha2564(leaf.id)
|
|
3208
|
+
),
|
|
3209
|
+
newSigningPubKey: await this.config.signer.generatePublicKey()
|
|
3210
|
+
}))
|
|
3211
|
+
);
|
|
3212
|
+
const coopExitRequest = await this.sspClient?.requestCoopExit({
|
|
3213
|
+
leafExternalIds: leavesToSend.map((leaf) => leaf.id),
|
|
3214
|
+
withdrawalAddress: onchainAddress
|
|
3215
|
+
});
|
|
3216
|
+
if (!coopExitRequest?.rawConnectorTransaction) {
|
|
3217
|
+
throw new Error("Failed to request coop exit");
|
|
3218
|
+
}
|
|
3219
|
+
const connectorTx = getTxFromRawTxHex(
|
|
3220
|
+
coopExitRequest.rawConnectorTransaction
|
|
3221
|
+
);
|
|
3222
|
+
const coopExitTxId = connectorTx.getInput(0).txid;
|
|
3223
|
+
const connectorTxId = getTxId(connectorTx);
|
|
3224
|
+
if (!coopExitTxId) {
|
|
3225
|
+
throw new Error("Failed to get coop exit tx id");
|
|
3226
|
+
}
|
|
3227
|
+
const connectorOutputs = [];
|
|
3228
|
+
for (let i = 0; i < connectorTx.outputsLength - 1; i++) {
|
|
3229
|
+
connectorOutputs.push({
|
|
3230
|
+
txid: hexToBytes3(connectorTxId),
|
|
3231
|
+
index: i
|
|
3232
|
+
});
|
|
3233
|
+
}
|
|
3234
|
+
const sspPubIdentityKey = await this.config.signer.getSspIdentityPublicKey(
|
|
3235
|
+
this.config.getNetwork()
|
|
3236
|
+
);
|
|
3237
|
+
const transfer = await this.coopExitService.getConnectorRefundSignatures({
|
|
3238
|
+
leaves: leafKeyTweaks,
|
|
3239
|
+
exitTxId: coopExitTxId,
|
|
3240
|
+
connectorOutputs,
|
|
3241
|
+
receiverPubKey: sspPubIdentityKey
|
|
3242
|
+
});
|
|
3243
|
+
const completeResponse = await this.sspClient?.completeCoopExit({
|
|
3244
|
+
userOutboundTransferExternalId: transfer.transfer.id,
|
|
3245
|
+
coopExitRequestId: coopExitRequest.id
|
|
3246
|
+
});
|
|
3247
|
+
return completeResponse;
|
|
3248
|
+
}
|
|
3249
|
+
/**
|
|
3250
|
+
* Gets fee estimate for cooperative exit (on-chain withdrawal).
|
|
3251
|
+
*
|
|
3252
|
+
* @param {CoopExitFeeEstimateInput} params - Input parameters for fee estimation
|
|
3253
|
+
* @returns {Promise<CoopExitFeeEstimateOutput | null>} Fee estimate for the withdrawal
|
|
3254
|
+
* @private
|
|
3255
|
+
*/
|
|
3256
|
+
async getCoopExitFeeEstimate({
|
|
3257
|
+
leafExternalIds,
|
|
3258
|
+
withdrawalAddress
|
|
3259
|
+
}) {
|
|
3260
|
+
if (!this.sspClient) {
|
|
3261
|
+
throw new Error("SSP client not initialized");
|
|
3262
|
+
}
|
|
3263
|
+
return await this.sspClient.getCoopExitFeeEstimate({
|
|
3264
|
+
leafExternalIds,
|
|
3265
|
+
withdrawalAddress
|
|
3266
|
+
});
|
|
3267
|
+
}
|
|
3268
|
+
// ***** Token Flow *****
|
|
3269
|
+
/**
|
|
3270
|
+
* Synchronizes token leaves for the wallet.
|
|
3271
|
+
*
|
|
3272
|
+
* @returns {Promise<void>}
|
|
3273
|
+
* @private
|
|
3274
|
+
*/
|
|
3275
|
+
async syncTokenLeaves() {
|
|
3276
|
+
this.tokenLeaves.clear();
|
|
3277
|
+
const trackedPublicKeys = await this.config.signer.getTrackedPublicKeys();
|
|
3278
|
+
const unsortedTokenLeaves = await this.tokenTransactionService.fetchOwnedTokenLeaves(
|
|
3279
|
+
[...trackedPublicKeys, await this.config.signer.getIdentityPublicKey()],
|
|
3280
|
+
[]
|
|
3281
|
+
);
|
|
3282
|
+
const groupedLeaves = /* @__PURE__ */ new Map();
|
|
3283
|
+
unsortedTokenLeaves.forEach((leaf) => {
|
|
3284
|
+
const tokenKey = bytesToHex2(leaf.leaf.tokenPublicKey);
|
|
3285
|
+
const index = leaf.previousTransactionVout;
|
|
3286
|
+
if (!groupedLeaves.has(tokenKey)) {
|
|
3287
|
+
groupedLeaves.set(tokenKey, []);
|
|
3288
|
+
}
|
|
3289
|
+
groupedLeaves.get(tokenKey).push({
|
|
3290
|
+
...leaf,
|
|
3291
|
+
previousTransactionVout: index
|
|
3292
|
+
});
|
|
3293
|
+
});
|
|
3294
|
+
this.tokenLeaves = groupedLeaves;
|
|
3295
|
+
}
|
|
3296
|
+
/**
|
|
3297
|
+
* Gets all token balances.
|
|
3298
|
+
*
|
|
3299
|
+
* @returns {Promise<Map<string, { balance: bigint }>>} Map of token balances and leaf counts
|
|
3300
|
+
* @private
|
|
3301
|
+
*/
|
|
3302
|
+
async getAllTokenBalances() {
|
|
3303
|
+
await this.syncTokenLeaves();
|
|
3304
|
+
const balances = /* @__PURE__ */ new Map();
|
|
3305
|
+
for (const [tokenPublicKey, leaves] of this.tokenLeaves.entries()) {
|
|
3306
|
+
balances.set(tokenPublicKey, {
|
|
3307
|
+
balance: calculateAvailableTokenAmount(leaves)
|
|
3308
|
+
});
|
|
3309
|
+
}
|
|
3310
|
+
return balances;
|
|
3311
|
+
}
|
|
3312
|
+
/**
|
|
3313
|
+
* Transfers tokens to another user.
|
|
3314
|
+
*
|
|
3315
|
+
* @param {Object} params - Parameters for the token transfer
|
|
3316
|
+
* @param {string} params.tokenPublicKey - The public key of the token to transfer
|
|
3317
|
+
* @param {bigint} params.tokenAmount - The amount of tokens to transfer
|
|
3318
|
+
* @param {string} params.receiverSparkAddress - The recipient's public key
|
|
3319
|
+
* @param {LeafWithPreviousTransactionData[]} [params.selectedLeaves] - Optional specific leaves to use for the transfer
|
|
3320
|
+
* @returns {Promise<string>} The transaction ID of the token transfer
|
|
3321
|
+
*/
|
|
3322
|
+
async sendSparkTokenTransfer({
|
|
3323
|
+
tokenPublicKey,
|
|
3324
|
+
tokenAmount,
|
|
3325
|
+
receiverSparkAddress,
|
|
3326
|
+
selectedLeaves
|
|
3327
|
+
}) {
|
|
3328
|
+
await this.syncTokenLeaves();
|
|
3329
|
+
if (!this.tokenLeaves.has(tokenPublicKey)) {
|
|
3330
|
+
throw new Error("No token leaves with the given tokenPublicKey");
|
|
3331
|
+
}
|
|
3332
|
+
const tokenPublicKeyBytes = hexToBytes3(tokenPublicKey);
|
|
3333
|
+
const receiverSparkAddressBytes = hexToBytes3(receiverSparkAddress);
|
|
3334
|
+
if (selectedLeaves) {
|
|
3335
|
+
if (!checkIfSelectedLeavesAreAvailable(
|
|
3336
|
+
selectedLeaves,
|
|
3337
|
+
this.tokenLeaves,
|
|
3338
|
+
tokenPublicKeyBytes
|
|
3339
|
+
)) {
|
|
3340
|
+
throw new Error("One or more selected leaves are not available");
|
|
3341
|
+
}
|
|
3342
|
+
} else {
|
|
3343
|
+
selectedLeaves = this.selectTokenLeaves(tokenPublicKey, tokenAmount);
|
|
3344
|
+
}
|
|
3345
|
+
if (selectedLeaves.length > MAX_TOKEN_LEAVES) {
|
|
3346
|
+
throw new Error("Too many leaves selected");
|
|
3347
|
+
}
|
|
3348
|
+
const tokenTransaction = await this.tokenTransactionService.constructTransferTokenTransaction(
|
|
3349
|
+
selectedLeaves,
|
|
3350
|
+
receiverSparkAddressBytes,
|
|
3351
|
+
tokenPublicKeyBytes,
|
|
3352
|
+
tokenAmount
|
|
3353
|
+
);
|
|
3354
|
+
return await this.tokenTransactionService.broadcastTokenTransaction(
|
|
3355
|
+
tokenTransaction,
|
|
3356
|
+
selectedLeaves.map((leaf) => leaf.leaf.ownerPublicKey),
|
|
3357
|
+
selectedLeaves.map((leaf) => leaf.leaf.revocationPublicKey)
|
|
3358
|
+
);
|
|
3359
|
+
}
|
|
3360
|
+
/**
|
|
3361
|
+
* Selects token leaves for a transfer.
|
|
3362
|
+
*
|
|
3363
|
+
* @param {string} tokenPublicKey - The public key of the token
|
|
3364
|
+
* @param {bigint} tokenAmount - The amount of tokens to select leaves for
|
|
3365
|
+
* @returns {LeafWithPreviousTransactionData[]} The selected leaves
|
|
3366
|
+
* @private
|
|
3367
|
+
*/
|
|
3368
|
+
selectTokenLeaves(tokenPublicKey, tokenAmount) {
|
|
3369
|
+
return this.tokenTransactionService.selectTokenLeaves(
|
|
3370
|
+
this.tokenLeaves.get(tokenPublicKey),
|
|
3371
|
+
tokenAmount
|
|
3372
|
+
);
|
|
3373
|
+
}
|
|
3374
|
+
};
|
|
3375
|
+
function getNetworkFromAddress(address) {
|
|
3376
|
+
try {
|
|
3377
|
+
const decoded = bitcoin.address.fromBech32(address);
|
|
3378
|
+
if (decoded.prefix === "bc") {
|
|
3379
|
+
return BitcoinNetwork_default.MAINNET;
|
|
3380
|
+
} else if (decoded.prefix === "bcrt") {
|
|
3381
|
+
return BitcoinNetwork_default.REGTEST;
|
|
3382
|
+
}
|
|
3383
|
+
} catch (err) {
|
|
3384
|
+
throw new Error("Invalid Bitcoin address");
|
|
3385
|
+
}
|
|
3386
|
+
return null;
|
|
3387
|
+
}
|
|
3388
|
+
export {
|
|
3389
|
+
SparkWallet
|
|
3390
|
+
};
|