@buildonspark/spark-sdk 0.1.35 → 0.1.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/{RequestLightningSendInput-BtR12ZlP.d.cts → RequestLightningSendInput-39_zGri6.d.cts} +1 -1
- package/dist/{RequestLightningSendInput-Dfx7JYWT.d.ts → RequestLightningSendInput-B4JdzclX.d.ts} +1 -1
- package/dist/address/index.cjs +7 -2
- package/dist/address/index.d.cts +2 -2
- package/dist/address/index.d.ts +2 -2
- package/dist/address/index.js +3 -3
- package/dist/{chunk-5IXRSN47.js → chunk-2CDJZQN4.js} +1 -1
- package/dist/{chunk-YQBZR7YC.js → chunk-A74XSEW3.js} +1 -1
- package/dist/{chunk-PZRENZXV.js → chunk-C5LTJBI7.js} +215 -6
- package/dist/{chunk-B2CHXWG6.js → chunk-FWQPAPXK.js} +2 -2
- package/dist/{chunk-EVTP3LXL.js → chunk-I4JI6TYN.js} +2 -2
- package/dist/{chunk-74S7NOHT.js → chunk-MVRQ5US7.js} +7 -2
- package/dist/{chunk-UDT7KKB6.js → chunk-NS4UZRQ7.js} +1 -1
- package/dist/{chunk-W7N2ABBH.js → chunk-RAPBVYJY.js} +7 -12
- package/dist/{chunk-7TM6XZIQ.js → chunk-RULOY5WX.js} +2 -2
- package/dist/{chunk-3FT662ZE.js → chunk-RZDD6X5A.js} +143 -68
- package/dist/{chunk-C4URKX6J.js → chunk-YWFZ5ECA.js} +1 -1
- package/dist/graphql/objects/index.cjs +7 -2
- package/dist/graphql/objects/index.d.cts +4 -4
- package/dist/graphql/objects/index.d.ts +4 -4
- package/dist/graphql/objects/index.js +1 -1
- package/dist/{index-BBzdQhqB.d.ts → index-CxAi2L8y.d.ts} +10 -8
- package/dist/{index-D-We3A27.d.cts → index-Dm17Ggfe.d.cts} +10 -8
- package/dist/index.cjs +365 -85
- package/dist/index.d.cts +6 -6
- package/dist/index.d.ts +6 -6
- package/dist/index.js +11 -11
- package/dist/index.node.cjs +365 -85
- package/dist/index.node.d.cts +7 -7
- package/dist/index.node.d.ts +7 -7
- package/dist/index.node.js +11 -11
- package/dist/native/chunk-C3WN3D4O.js +19 -0
- package/dist/native/index.cjs +365 -85
- package/dist/native/index.d.cts +16 -4
- package/dist/native/index.d.ts +16 -4
- package/dist/native/index.js +363 -88
- package/dist/native/{wasm-A6KGFTNV.js → wasm-PZWVEGEE.js} +1 -1
- package/dist/native/{xhr-transport-TERT4PBA.js → xhr-transport-WHMS3FGG.js} +1 -1
- package/dist/{network-Bc-W9qLR.d.ts → network-DobHpaV6.d.ts} +1 -1
- package/dist/{network-Dbxfi7Tp.d.cts → network-GFGEHkS4.d.cts} +1 -1
- package/dist/proto/lrc20.cjs +7 -2
- package/dist/proto/lrc20.d.cts +1 -1
- package/dist/proto/lrc20.d.ts +1 -1
- package/dist/proto/lrc20.js +3 -3
- package/dist/proto/spark.cjs +221 -7
- package/dist/proto/spark.d.cts +1 -1
- package/dist/proto/spark.d.ts +1 -1
- package/dist/proto/spark.js +6 -2
- package/dist/{sdk-types-BZhxaXMN.d.ts → sdk-types-BeI6DM_M.d.ts} +1 -1
- package/dist/{sdk-types-BPwagWYa.d.cts → sdk-types-BuVMn2rX.d.cts} +1 -1
- package/dist/services/config.cjs +17 -17
- package/dist/services/config.d.cts +4 -4
- package/dist/services/config.d.ts +4 -4
- package/dist/services/config.js +4 -4
- package/dist/services/connection.cjs +217 -7
- package/dist/services/connection.d.cts +4 -4
- package/dist/services/connection.d.ts +4 -4
- package/dist/services/connection.js +3 -3
- package/dist/services/index.cjs +227 -22
- package/dist/services/index.d.cts +4 -4
- package/dist/services/index.d.ts +4 -4
- package/dist/services/index.js +7 -7
- package/dist/services/lrc-connection.cjs +7 -2
- package/dist/services/lrc-connection.d.cts +4 -4
- package/dist/services/lrc-connection.d.ts +4 -4
- package/dist/services/lrc-connection.js +4 -4
- package/dist/services/token-transactions.cjs +7 -2
- package/dist/services/token-transactions.d.cts +4 -4
- package/dist/services/token-transactions.d.ts +4 -4
- package/dist/services/token-transactions.js +4 -4
- package/dist/services/wallet-config.cjs +7 -2
- package/dist/services/wallet-config.d.cts +4 -4
- package/dist/services/wallet-config.d.ts +4 -4
- package/dist/services/wallet-config.js +1 -1
- package/dist/signer/signer.cjs +11 -11
- package/dist/signer/signer.d.cts +2 -2
- package/dist/signer/signer.d.ts +2 -2
- package/dist/signer/signer.js +2 -2
- package/dist/{signer-DQfFgVNA.d.cts → signer-C1t40Wus.d.cts} +1 -1
- package/dist/{signer-Db_TjQFj.d.ts → signer-DFGw9RRp.d.ts} +1 -1
- package/dist/{spark-FHwyinrG.d.cts → spark-DXYE9gMM.d.cts} +29 -3
- package/dist/{spark-FHwyinrG.d.ts → spark-DXYE9gMM.d.ts} +29 -3
- package/dist/types/index.cjs +219 -7
- package/dist/types/index.d.cts +4 -4
- package/dist/types/index.d.ts +4 -4
- package/dist/types/index.js +3 -3
- package/dist/utils/index.cjs +7 -2
- package/dist/utils/index.d.cts +5 -5
- package/dist/utils/index.d.ts +5 -5
- package/dist/utils/index.js +3 -3
- package/dist/{wasm-W3WBILBX.js → wasm-7OWFHDMS.js} +1 -1
- package/dist/{xhr-transport-DSTXEBFS.js → xhr-transport-RH6LDRXS.js} +1 -1
- package/package.json +3 -4
- package/src/proto/common.ts +1 -1
- package/src/proto/google/protobuf/descriptor.ts +1 -1
- package/src/proto/google/protobuf/duration.ts +1 -1
- package/src/proto/google/protobuf/empty.ts +1 -1
- package/src/proto/google/protobuf/timestamp.ts +1 -1
- package/src/proto/mock.ts +2 -2
- package/src/proto/spark.ts +271 -8
- package/src/proto/spark_authn.ts +2 -2
- package/src/proto/validate/validate.ts +1 -1
- package/src/services/transfer.ts +1 -0
- package/src/signer/signer.ts +6 -11
- package/src/spark-wallet/spark-wallet.ts +177 -73
- package/src/tests/integration/deposit.test.ts +5 -0
- package/src/tests/integration/ssp/static_deposit.test.ts +4 -0
- package/src/tests/spark-wallet/queryNodes.test.ts +89 -0
- package/dist/native/chunk-OVEXKBAZ.js +0 -14
|
@@ -37,6 +37,8 @@ import {
|
|
|
37
37
|
import {
|
|
38
38
|
DepositAddressQueryResult,
|
|
39
39
|
OutputWithPreviousTransactionData,
|
|
40
|
+
QueryNodesRequest,
|
|
41
|
+
QueryNodesResponse,
|
|
40
42
|
SigningJob,
|
|
41
43
|
SubscribeToEventsResponse,
|
|
42
44
|
TokenTransactionWithStatus,
|
|
@@ -96,6 +98,7 @@ import {
|
|
|
96
98
|
SparkAddressFormat,
|
|
97
99
|
} from "../address/index.js";
|
|
98
100
|
import { isReactNative } from "../constants.js";
|
|
101
|
+
import { networkToJSON } from "../proto/spark.js";
|
|
99
102
|
import {
|
|
100
103
|
decodeInvoice,
|
|
101
104
|
getNetworkFromInvoice,
|
|
@@ -121,7 +124,6 @@ import type {
|
|
|
121
124
|
TokenInfo,
|
|
122
125
|
TransferParams,
|
|
123
126
|
} from "./types.js";
|
|
124
|
-
import { networkToJSON } from "../proto/spark.js";
|
|
125
127
|
|
|
126
128
|
/**
|
|
127
129
|
* The SparkWallet class is the primary interface for interacting with the Spark network.
|
|
@@ -261,7 +263,11 @@ export class SparkWallet extends EventEmitter {
|
|
|
261
263
|
event.transfer.transfer &&
|
|
262
264
|
!equalBytes(senderIdentityPublicKey, receiverIdentityPublicKey)
|
|
263
265
|
) {
|
|
264
|
-
await this.claimTransfer(
|
|
266
|
+
await this.claimTransfer({
|
|
267
|
+
transfer: event.transfer.transfer,
|
|
268
|
+
emit: true,
|
|
269
|
+
optimize: true,
|
|
270
|
+
});
|
|
265
271
|
}
|
|
266
272
|
} else if (event?.$case === "deposit" && event.deposit.deposit) {
|
|
267
273
|
const deposit = event.deposit.deposit;
|
|
@@ -427,10 +433,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
427
433
|
private async getLeaves(
|
|
428
434
|
isBalanceCheck: boolean = false,
|
|
429
435
|
): Promise<TreeNode[]> {
|
|
430
|
-
const
|
|
431
|
-
this.config.getCoordinatorAddress(),
|
|
432
|
-
);
|
|
433
|
-
const leaves = await sparkClient.query_nodes({
|
|
436
|
+
const leaves = await this.queryNodes({
|
|
434
437
|
source: {
|
|
435
438
|
$case: "ownerIdentityPubkey",
|
|
436
439
|
ownerIdentityPubkey: await this.config.signer.getIdentityPublicKey(),
|
|
@@ -448,18 +451,18 @@ export class SparkWallet extends EventEmitter {
|
|
|
448
451
|
this.config.getSigningOperators(),
|
|
449
452
|
)) {
|
|
450
453
|
if (id !== this.config.getCoordinatorIdentifier()) {
|
|
451
|
-
const
|
|
454
|
+
const operatorLeaves = await this.queryNodes(
|
|
455
|
+
{
|
|
456
|
+
source: {
|
|
457
|
+
$case: "ownerIdentityPubkey",
|
|
458
|
+
ownerIdentityPubkey:
|
|
459
|
+
await this.config.signer.getIdentityPublicKey(),
|
|
460
|
+
},
|
|
461
|
+
includeParents: false,
|
|
462
|
+
network: NetworkToProto[this.config.getNetwork()],
|
|
463
|
+
},
|
|
452
464
|
operator.address,
|
|
453
465
|
);
|
|
454
|
-
const operatorLeaves = await client.query_nodes({
|
|
455
|
-
source: {
|
|
456
|
-
$case: "ownerIdentityPubkey",
|
|
457
|
-
ownerIdentityPubkey:
|
|
458
|
-
await this.config.signer.getIdentityPublicKey(),
|
|
459
|
-
},
|
|
460
|
-
includeParents: false,
|
|
461
|
-
network: NetworkToProto[this.config.getNetwork()],
|
|
462
|
-
});
|
|
463
466
|
|
|
464
467
|
// Loop over leaves returned by coordinator.
|
|
465
468
|
// If the leaf is not present in the operator's leaves, we'll ignore it.
|
|
@@ -635,10 +638,14 @@ export class SparkWallet extends EventEmitter {
|
|
|
635
638
|
|
|
636
639
|
private async syncWallet() {
|
|
637
640
|
await this.syncTokenOutputs();
|
|
638
|
-
|
|
639
|
-
await this.
|
|
640
|
-
await this.
|
|
641
|
-
|
|
641
|
+
|
|
642
|
+
let leaves = await this.getLeaves();
|
|
643
|
+
await this.config.signer.restoreSigningKeysFromLeafs(leaves);
|
|
644
|
+
|
|
645
|
+
leaves = await this.checkRefreshTimelockNodes(leaves);
|
|
646
|
+
leaves = await this.checkExtendTimeLockNodes(leaves);
|
|
647
|
+
|
|
648
|
+
this.leaves = leaves;
|
|
642
649
|
this.optimizeLeaves().catch((e) => {
|
|
643
650
|
console.error("Failed to optimize leaves", e);
|
|
644
651
|
});
|
|
@@ -812,12 +819,6 @@ export class SparkWallet extends EventEmitter {
|
|
|
812
819
|
});
|
|
813
820
|
}
|
|
814
821
|
|
|
815
|
-
try {
|
|
816
|
-
await this.claimTransfers();
|
|
817
|
-
} catch (e) {
|
|
818
|
-
console.warn("Unabled to claim transfers.");
|
|
819
|
-
}
|
|
820
|
-
|
|
821
822
|
let leavesToSwap: TreeNode[];
|
|
822
823
|
if (targetAmount && leaves && leaves.length > 0) {
|
|
823
824
|
if (targetAmount < leaves.reduce((acc, leaf) => acc + leaf.value, 0)) {
|
|
@@ -940,11 +941,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
940
941
|
throw new Error("Failed to request leaves swap. No response returned.");
|
|
941
942
|
}
|
|
942
943
|
|
|
943
|
-
const
|
|
944
|
-
this.config.getCoordinatorAddress(),
|
|
945
|
-
);
|
|
946
|
-
|
|
947
|
-
const nodes = await sparkClient.query_nodes({
|
|
944
|
+
const nodes = await this.queryNodes({
|
|
948
945
|
source: {
|
|
949
946
|
$case: "nodeIds",
|
|
950
947
|
nodeIds: {
|
|
@@ -973,7 +970,6 @@ export class SparkWallet extends EventEmitter {
|
|
|
973
970
|
throw new Error(`Leaf not found for node ${nodeId}`);
|
|
974
971
|
}
|
|
975
972
|
|
|
976
|
-
// @ts-ignore - We do a null check above
|
|
977
973
|
const nodeTx = getTxFromRawTxBytes(node.nodeTx);
|
|
978
974
|
const refundTxBytes = hexToBytes(leaf.rawUnsignedRefundTransaction);
|
|
979
975
|
const refundTx = getTxFromRawTxBytes(refundTxBytes);
|
|
@@ -1163,7 +1159,21 @@ export class SparkWallet extends EventEmitter {
|
|
|
1163
1159
|
* @returns {Promise<string>} A Bitcoin address for depositing funds
|
|
1164
1160
|
*/
|
|
1165
1161
|
public async getStaticDepositAddress(): Promise<string> {
|
|
1166
|
-
|
|
1162
|
+
try {
|
|
1163
|
+
return await this.generateDepositAddress(true);
|
|
1164
|
+
} catch (error: any) {
|
|
1165
|
+
if (error.message?.includes("static deposit address already exists")) {
|
|
1166
|
+
// Query instead of checking error message in case error message changes.
|
|
1167
|
+
const existingAddresses = await this.queryStaticDepositAddresses();
|
|
1168
|
+
if (existingAddresses.length > 0 && existingAddresses[0]) {
|
|
1169
|
+
return existingAddresses[0];
|
|
1170
|
+
} else {
|
|
1171
|
+
throw error;
|
|
1172
|
+
}
|
|
1173
|
+
} else {
|
|
1174
|
+
throw error;
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1167
1177
|
}
|
|
1168
1178
|
|
|
1169
1179
|
/**
|
|
@@ -1199,7 +1209,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
1199
1209
|
return address.depositAddress.address;
|
|
1200
1210
|
}
|
|
1201
1211
|
|
|
1202
|
-
|
|
1212
|
+
public async queryStaticDepositAddresses(): Promise<string[]> {
|
|
1203
1213
|
const sparkClient = await this.connectionManager.createSparkClient(
|
|
1204
1214
|
this.config.getCoordinatorAddress(),
|
|
1205
1215
|
);
|
|
@@ -1902,7 +1912,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
1902
1912
|
|
|
1903
1913
|
const resultNodes = !pendingTransfer
|
|
1904
1914
|
? []
|
|
1905
|
-
: await this.claimTransfer(pendingTransfer);
|
|
1915
|
+
: await this.claimTransfer({ transfer: pendingTransfer });
|
|
1906
1916
|
|
|
1907
1917
|
const leavesToRemove = new Set(leaves.map((leaf) => leaf.id));
|
|
1908
1918
|
this.leaves = [
|
|
@@ -1963,7 +1973,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
1963
1973
|
return await this.withLeaves(async () => {
|
|
1964
1974
|
let leavesToSend = await this.selectLeaves(amountSats);
|
|
1965
1975
|
|
|
1966
|
-
await this.checkRefreshTimelockNodes(leavesToSend);
|
|
1976
|
+
leavesToSend = await this.checkRefreshTimelockNodes(leavesToSend);
|
|
1967
1977
|
leavesToSend = await this.checkExtendTimeLockNodes(leavesToSend);
|
|
1968
1978
|
|
|
1969
1979
|
const leafKeyTweaks = await Promise.all(
|
|
@@ -1990,7 +2000,10 @@ export class SparkWallet extends EventEmitter {
|
|
|
1990
2000
|
const pendingTransfer =
|
|
1991
2001
|
await this.transferService.queryTransfer(transactionId);
|
|
1992
2002
|
if (pendingTransfer) {
|
|
1993
|
-
await this.claimTransfer(
|
|
2003
|
+
await this.claimTransfer({
|
|
2004
|
+
transfer: pendingTransfer,
|
|
2005
|
+
optimize: true,
|
|
2006
|
+
});
|
|
1994
2007
|
}
|
|
1995
2008
|
}
|
|
1996
2009
|
|
|
@@ -2002,14 +2015,13 @@ export class SparkWallet extends EventEmitter {
|
|
|
2002
2015
|
}
|
|
2003
2016
|
|
|
2004
2017
|
private async checkExtendTimeLockNodes(
|
|
2005
|
-
nodes
|
|
2018
|
+
nodes: TreeNode[],
|
|
2006
2019
|
): Promise<TreeNode[]> {
|
|
2007
|
-
const nodesToCheck = nodes ?? this.leaves;
|
|
2008
2020
|
const nodesToExtend: TreeNode[] = [];
|
|
2009
2021
|
const nodeIds: string[] = [];
|
|
2010
|
-
|
|
2022
|
+
const validNodes: TreeNode[] = [];
|
|
2011
2023
|
|
|
2012
|
-
for (const node of
|
|
2024
|
+
for (const node of nodes) {
|
|
2013
2025
|
const nodeTx = getTxFromRawTxBytes(node.nodeTx);
|
|
2014
2026
|
const { needRefresh } = getNextTransactionSequence(
|
|
2015
2027
|
nodeTx.getInput(0).sequence,
|
|
@@ -2017,11 +2029,16 @@ export class SparkWallet extends EventEmitter {
|
|
|
2017
2029
|
if (needRefresh) {
|
|
2018
2030
|
nodesToExtend.push(node);
|
|
2019
2031
|
nodeIds.push(node.id);
|
|
2032
|
+
} else {
|
|
2033
|
+
validNodes.push(node);
|
|
2020
2034
|
}
|
|
2021
2035
|
}
|
|
2022
2036
|
|
|
2023
|
-
|
|
2037
|
+
if (nodesToExtend.length === 0) {
|
|
2038
|
+
return validNodes;
|
|
2039
|
+
}
|
|
2024
2040
|
|
|
2041
|
+
const nodesToAdd: TreeNode[] = [];
|
|
2025
2042
|
for (const node of nodesToExtend) {
|
|
2026
2043
|
const signingPubKey = await this.config.signer.generatePublicKey(
|
|
2027
2044
|
sha256(node.id),
|
|
@@ -2032,10 +2049,13 @@ export class SparkWallet extends EventEmitter {
|
|
|
2032
2049
|
);
|
|
2033
2050
|
this.leaves = this.leaves.filter((leaf) => leaf.id !== node.id);
|
|
2034
2051
|
const newNodes = await this.transferLeavesToSelf(nodes, signingPubKey);
|
|
2035
|
-
|
|
2052
|
+
nodesToAdd.push(...newNodes);
|
|
2036
2053
|
}
|
|
2037
2054
|
|
|
2038
|
-
|
|
2055
|
+
this.updateLeaves(nodeIds, nodesToAdd);
|
|
2056
|
+
validNodes.push(...nodesToAdd);
|
|
2057
|
+
|
|
2058
|
+
return validNodes;
|
|
2039
2059
|
}
|
|
2040
2060
|
|
|
2041
2061
|
/**
|
|
@@ -2045,11 +2065,14 @@ export class SparkWallet extends EventEmitter {
|
|
|
2045
2065
|
* @returns {Promise<void>}
|
|
2046
2066
|
* @private
|
|
2047
2067
|
*/
|
|
2048
|
-
private async checkRefreshTimelockNodes(
|
|
2068
|
+
private async checkRefreshTimelockNodes(
|
|
2069
|
+
nodes: TreeNode[],
|
|
2070
|
+
): Promise<TreeNode[]> {
|
|
2049
2071
|
const nodesToRefresh: TreeNode[] = [];
|
|
2050
2072
|
const nodeIds: string[] = [];
|
|
2073
|
+
const validNodes: TreeNode[] = [];
|
|
2051
2074
|
|
|
2052
|
-
for (const node of nodes
|
|
2075
|
+
for (const node of nodes) {
|
|
2053
2076
|
const refundTx = getTxFromRawTxBytes(node.refundTx);
|
|
2054
2077
|
const { needRefresh } = getNextTransactionSequence(
|
|
2055
2078
|
refundTx.getInput(0).sequence,
|
|
@@ -2058,18 +2081,16 @@ export class SparkWallet extends EventEmitter {
|
|
|
2058
2081
|
if (needRefresh) {
|
|
2059
2082
|
nodesToRefresh.push(node);
|
|
2060
2083
|
nodeIds.push(node.id);
|
|
2084
|
+
} else {
|
|
2085
|
+
validNodes.push(node);
|
|
2061
2086
|
}
|
|
2062
2087
|
}
|
|
2063
2088
|
|
|
2064
2089
|
if (nodesToRefresh.length === 0) {
|
|
2065
|
-
return;
|
|
2090
|
+
return validNodes;
|
|
2066
2091
|
}
|
|
2067
2092
|
|
|
2068
|
-
const
|
|
2069
|
-
this.config.getCoordinatorAddress(),
|
|
2070
|
-
);
|
|
2071
|
-
|
|
2072
|
-
const nodesResp = await sparkClient.query_nodes({
|
|
2093
|
+
const nodesResp = await this.queryNodes({
|
|
2073
2094
|
source: {
|
|
2074
2095
|
$case: "nodeIds",
|
|
2075
2096
|
nodeIds: {
|
|
@@ -2085,6 +2106,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
2085
2106
|
nodesMap.set(node.id, node);
|
|
2086
2107
|
}
|
|
2087
2108
|
|
|
2109
|
+
const nodesToAdd: TreeNode[] = [];
|
|
2088
2110
|
for (const node of nodesToRefresh) {
|
|
2089
2111
|
if (!node.parentNodeId) {
|
|
2090
2112
|
throw new Error(`node ${node.id} has no parent`);
|
|
@@ -2110,9 +2132,13 @@ export class SparkWallet extends EventEmitter {
|
|
|
2110
2132
|
throw new Error("Failed to refresh timelock node");
|
|
2111
2133
|
}
|
|
2112
2134
|
|
|
2113
|
-
|
|
2114
|
-
this.leaves.push(newNode);
|
|
2135
|
+
nodesToAdd.push(newNode);
|
|
2115
2136
|
}
|
|
2137
|
+
|
|
2138
|
+
this.updateLeaves(nodeIds, nodesToAdd);
|
|
2139
|
+
validNodes.push(...nodesToAdd);
|
|
2140
|
+
|
|
2141
|
+
return validNodes;
|
|
2116
2142
|
}
|
|
2117
2143
|
|
|
2118
2144
|
/**
|
|
@@ -2121,16 +2147,22 @@ export class SparkWallet extends EventEmitter {
|
|
|
2121
2147
|
* @param {Transfer} transfer - The transfer to claim
|
|
2122
2148
|
* @returns {Promise<Object>} The claim result
|
|
2123
2149
|
*/
|
|
2124
|
-
private async claimTransfer(
|
|
2125
|
-
transfer
|
|
2126
|
-
emit
|
|
2127
|
-
retryCount
|
|
2128
|
-
|
|
2150
|
+
private async claimTransfer({
|
|
2151
|
+
transfer,
|
|
2152
|
+
emit,
|
|
2153
|
+
retryCount,
|
|
2154
|
+
optimize,
|
|
2155
|
+
}: {
|
|
2156
|
+
transfer: Transfer;
|
|
2157
|
+
emit?: boolean;
|
|
2158
|
+
retryCount?: number;
|
|
2159
|
+
optimize?: boolean;
|
|
2160
|
+
}) {
|
|
2129
2161
|
const MAX_RETRIES = 5;
|
|
2130
2162
|
const BASE_DELAY_MS = 1000;
|
|
2131
2163
|
const MAX_DELAY_MS = 10000;
|
|
2132
2164
|
|
|
2133
|
-
if (retryCount > 0) {
|
|
2165
|
+
if (retryCount && retryCount > 0) {
|
|
2134
2166
|
const delayMs = Math.min(
|
|
2135
2167
|
BASE_DELAY_MS * Math.pow(2, retryCount - 1),
|
|
2136
2168
|
MAX_DELAY_MS,
|
|
@@ -2149,7 +2181,10 @@ export class SparkWallet extends EventEmitter {
|
|
|
2149
2181
|
const leafPubKey = leafPubKeyMap.get(leaf.leaf.id);
|
|
2150
2182
|
if (leafPubKey) {
|
|
2151
2183
|
leavesToClaim.push({
|
|
2152
|
-
leaf:
|
|
2184
|
+
leaf: {
|
|
2185
|
+
...leaf.leaf,
|
|
2186
|
+
refundTx: leaf.intermediateRefundTx,
|
|
2187
|
+
},
|
|
2153
2188
|
signingPubKey: leafPubKey,
|
|
2154
2189
|
newSigningPubKey: await this.config.signer.generatePublicKey(
|
|
2155
2190
|
sha256(leaf.leaf.id),
|
|
@@ -2164,8 +2199,6 @@ export class SparkWallet extends EventEmitter {
|
|
|
2164
2199
|
leavesToClaim,
|
|
2165
2200
|
);
|
|
2166
2201
|
|
|
2167
|
-
this.leaves.push(...response.nodes);
|
|
2168
|
-
|
|
2169
2202
|
if (emit) {
|
|
2170
2203
|
this.emit(
|
|
2171
2204
|
"transfer:claimed",
|
|
@@ -2177,15 +2210,28 @@ export class SparkWallet extends EventEmitter {
|
|
|
2177
2210
|
return response.nodes;
|
|
2178
2211
|
});
|
|
2179
2212
|
|
|
2180
|
-
await this.checkRefreshTimelockNodes(result);
|
|
2213
|
+
result = await this.checkRefreshTimelockNodes(result);
|
|
2181
2214
|
result = await this.checkExtendTimeLockNodes(result);
|
|
2182
2215
|
|
|
2216
|
+
const existingIds = new Set(this.leaves.map((leaf) => leaf.id));
|
|
2217
|
+
const uniqueResults = result.filter((node) => !existingIds.has(node.id));
|
|
2218
|
+
this.leaves.push(...uniqueResults);
|
|
2219
|
+
|
|
2220
|
+
if (optimize && transfer.type !== TransferType.COUNTER_SWAP) {
|
|
2221
|
+
await this.optimizeLeaves();
|
|
2222
|
+
}
|
|
2223
|
+
|
|
2183
2224
|
return result;
|
|
2184
2225
|
} catch (error) {
|
|
2185
|
-
if (retryCount < MAX_RETRIES) {
|
|
2186
|
-
this.claimTransfer(
|
|
2226
|
+
if (retryCount && retryCount < MAX_RETRIES) {
|
|
2227
|
+
this.claimTransfer({
|
|
2228
|
+
transfer,
|
|
2229
|
+
emit,
|
|
2230
|
+
retryCount: retryCount + 1,
|
|
2231
|
+
optimize,
|
|
2232
|
+
});
|
|
2187
2233
|
return [];
|
|
2188
|
-
} else if (retryCount
|
|
2234
|
+
} else if (retryCount) {
|
|
2189
2235
|
console.warn(
|
|
2190
2236
|
"Failed to claim transfer. Please try reinitializing your wallet in a few minutes. Transfer ID: " +
|
|
2191
2237
|
transfer.id,
|
|
@@ -2234,7 +2280,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
2234
2280
|
continue;
|
|
2235
2281
|
}
|
|
2236
2282
|
promises.push(
|
|
2237
|
-
this.claimTransfer(transfer, emit)
|
|
2283
|
+
this.claimTransfer({ transfer, emit, optimize: true })
|
|
2238
2284
|
.then(() => transfer.id)
|
|
2239
2285
|
.catch((error) => {
|
|
2240
2286
|
console.warn(`Failed to claim transfer ${transfer.id}:`, error);
|
|
@@ -2280,11 +2326,11 @@ export class SparkWallet extends EventEmitter {
|
|
|
2280
2326
|
*
|
|
2281
2327
|
* @param {Object} params - Parameters for the lightning invoice
|
|
2282
2328
|
* @param {number} params.amountSats - Amount in satoshis
|
|
2283
|
-
* @param {string} params.memo - Description for the invoice
|
|
2329
|
+
* @param {string} [params.memo] - Description for the invoice. Should not be provided if the descriptionHash is provided.
|
|
2284
2330
|
* @param {number} [params.expirySeconds] - Optional expiry time in seconds
|
|
2285
2331
|
* @param {boolean} [params.includeSparkAddress] - Optional boolean signalling whether or not to include the spark address in the invoice
|
|
2286
2332
|
* @param {string} [params.receiverIdentityPubkey] - Optional public key of the wallet receiving the lightning invoice. If not present, the receiver will be the creator of this request.
|
|
2287
|
-
* @param {string} [params.descriptionHash] - Optional h tag of the invoice. This is the hash of a longer description to include in the lightning invoice. It is used in LNURL and UMA as the hash of the metadata.
|
|
2333
|
+
* @param {string} [params.descriptionHash] - Optional h tag of the invoice. This is the hash of a longer description to include in the lightning invoice. It is used in LNURL and UMA as the hash of the metadata. This field is mutually exclusive with the memo field. Only one or the other should be provided.
|
|
2288
2334
|
* @returns {Promise<LightningReceiveRequest>} BOLT11 encoded invoice
|
|
2289
2335
|
*/
|
|
2290
2336
|
public async createLightningInvoice({
|
|
@@ -2337,6 +2383,17 @@ export class SparkWallet extends EventEmitter {
|
|
|
2337
2383
|
});
|
|
2338
2384
|
}
|
|
2339
2385
|
|
|
2386
|
+
if (memo && descriptionHash) {
|
|
2387
|
+
throw new ValidationError(
|
|
2388
|
+
"Memo and descriptionHash cannot be provided together. Please provide only one.",
|
|
2389
|
+
{
|
|
2390
|
+
field: "memo",
|
|
2391
|
+
value: memo,
|
|
2392
|
+
expected: "Memo or descriptionHash",
|
|
2393
|
+
},
|
|
2394
|
+
);
|
|
2395
|
+
}
|
|
2396
|
+
|
|
2340
2397
|
const requestLightningInvoice = async (
|
|
2341
2398
|
amountSats: number,
|
|
2342
2399
|
paymentHash: Uint8Array,
|
|
@@ -2510,7 +2567,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
2510
2567
|
|
|
2511
2568
|
let leaves = await this.selectLeaves(totalAmount);
|
|
2512
2569
|
|
|
2513
|
-
await this.checkRefreshTimelockNodes(leaves);
|
|
2570
|
+
leaves = await this.checkRefreshTimelockNodes(leaves);
|
|
2514
2571
|
leaves = await this.checkExtendTimeLockNodes(leaves);
|
|
2515
2572
|
|
|
2516
2573
|
const leavesToSend = await Promise.all(
|
|
@@ -2741,7 +2798,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
2741
2798
|
);
|
|
2742
2799
|
}
|
|
2743
2800
|
}
|
|
2744
|
-
await this.checkRefreshTimelockNodes(leavesToSend);
|
|
2801
|
+
leavesToSend = await this.checkRefreshTimelockNodes(leavesToSend);
|
|
2745
2802
|
leavesToSend = await this.checkExtendTimeLockNodes(leavesToSend);
|
|
2746
2803
|
|
|
2747
2804
|
const leafKeyTweaks = await Promise.all(
|
|
@@ -2828,7 +2885,7 @@ export class SparkWallet extends EventEmitter {
|
|
|
2828
2885
|
|
|
2829
2886
|
let leaves = await this.selectLeaves(amountSats);
|
|
2830
2887
|
|
|
2831
|
-
await this.checkRefreshTimelockNodes(leaves);
|
|
2888
|
+
leaves = await this.checkRefreshTimelockNodes(leaves);
|
|
2832
2889
|
leaves = await this.checkExtendTimeLockNodes(leaves);
|
|
2833
2890
|
|
|
2834
2891
|
const feeEstimate = await sspClient.getCoopExitFeeEstimate({
|
|
@@ -3128,4 +3185,51 @@ export class SparkWallet extends EventEmitter {
|
|
|
3128
3185
|
}
|
|
3129
3186
|
}, 10000);
|
|
3130
3187
|
}
|
|
3188
|
+
|
|
3189
|
+
private async updateLeaves(
|
|
3190
|
+
leavesToRemove: string[],
|
|
3191
|
+
leavesToAdd: TreeNode[],
|
|
3192
|
+
) {
|
|
3193
|
+
const leavesToRemoveSet = new Set(leavesToRemove);
|
|
3194
|
+
this.leaves = this.leaves.filter((leaf) => !leavesToRemoveSet.has(leaf.id));
|
|
3195
|
+
this.leaves.push(...leavesToAdd);
|
|
3196
|
+
}
|
|
3197
|
+
|
|
3198
|
+
private async queryNodes(
|
|
3199
|
+
baseRequest: Omit<QueryNodesRequest, "limit" | "offset">,
|
|
3200
|
+
sparkClientAddress?: string,
|
|
3201
|
+
pageSize: number = 100,
|
|
3202
|
+
): Promise<QueryNodesResponse> {
|
|
3203
|
+
const address = sparkClientAddress ?? this.config.getCoordinatorAddress();
|
|
3204
|
+
const aggregatedNodes: {
|
|
3205
|
+
[key: string]: QueryNodesResponse["nodes"][string];
|
|
3206
|
+
} = {};
|
|
3207
|
+
let offset = 0;
|
|
3208
|
+
|
|
3209
|
+
while (true) {
|
|
3210
|
+
const sparkClient =
|
|
3211
|
+
await this.connectionManager.createSparkClient(address);
|
|
3212
|
+
|
|
3213
|
+
const response = await sparkClient.query_nodes({
|
|
3214
|
+
...baseRequest,
|
|
3215
|
+
limit: pageSize,
|
|
3216
|
+
offset,
|
|
3217
|
+
});
|
|
3218
|
+
|
|
3219
|
+
/* Merge nodes from this page. If user is sending or receiving payments results can shift
|
|
3220
|
+
accross pages, potentially causing duplicates. Dedupe by node id: */
|
|
3221
|
+
Object.assign(aggregatedNodes, response.nodes ?? {});
|
|
3222
|
+
|
|
3223
|
+
/* If we received fewer nodes than requested, this was the last page. */
|
|
3224
|
+
const received = Object.keys(response.nodes ?? {}).length;
|
|
3225
|
+
if (received < pageSize) {
|
|
3226
|
+
return {
|
|
3227
|
+
nodes: aggregatedNodes,
|
|
3228
|
+
offset: response.offset,
|
|
3229
|
+
} as QueryNodesResponse;
|
|
3230
|
+
}
|
|
3231
|
+
|
|
3232
|
+
offset += pageSize;
|
|
3233
|
+
}
|
|
3234
|
+
}
|
|
3131
3235
|
}
|
|
@@ -33,6 +33,11 @@ describe("deposit", () => {
|
|
|
33
33
|
// Verify that static deposit addresses don't appear in unused deposit addresses
|
|
34
34
|
const unusedDepositAddresses = await sdk.getUnusedDepositAddresses();
|
|
35
35
|
expect(unusedDepositAddresses).toHaveLength(0);
|
|
36
|
+
|
|
37
|
+
// Check that the same static deposit address is returned a second time.
|
|
38
|
+
const secondDepositAddress = await sdk.getStaticDepositAddress();
|
|
39
|
+
expect(secondDepositAddress).toBeDefined();
|
|
40
|
+
expect(secondDepositAddress).toEqual(depositAddress);
|
|
36
41
|
}, 30000);
|
|
37
42
|
|
|
38
43
|
it("should create a tree root", async () => {
|
|
@@ -86,5 +86,9 @@ describe.skip("SSP static deposit address integration", () => {
|
|
|
86
86
|
|
|
87
87
|
const { balance: balance2 } = await userWallet.getBalance();
|
|
88
88
|
expect(balance2).toBe(BigInt(quoteAmount + quoteAmount2));
|
|
89
|
+
|
|
90
|
+
// Get transfers should include static deposit transfers.
|
|
91
|
+
const transfers = await userWallet.getTransfers();
|
|
92
|
+
expect(transfers.transfers.length).toBe(2);
|
|
89
93
|
}, 60000);
|
|
90
94
|
});
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, jest } from "@jest/globals";
|
|
2
|
+
import { SparkWallet } from "../../spark-wallet/spark-wallet.js";
|
|
3
|
+
import type { ConnectionManager } from "../../services/connection.js";
|
|
4
|
+
|
|
5
|
+
/** Helper subclass to expose the private `queryNodes` method for testing. */
|
|
6
|
+
class TestSparkWallet extends SparkWallet {
|
|
7
|
+
public constructor() {
|
|
8
|
+
super();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/** Expose the private `queryNodes` method as public for tests. */
|
|
12
|
+
public async queryNodesPublic(
|
|
13
|
+
...args: Parameters<SparkWallet["queryNodes"]>
|
|
14
|
+
) {
|
|
15
|
+
return (this as any).queryNodes(...args);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** Replace the internally-created ConnectionManager with a mocked version. */
|
|
19
|
+
public setConnectionManager(manager: any) {
|
|
20
|
+
this.connectionManager = manager;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
describe("queryNodes pagination", () => {
|
|
25
|
+
let wallet: TestSparkWallet;
|
|
26
|
+
let createSparkClientMock: jest.Mock;
|
|
27
|
+
|
|
28
|
+
beforeEach(() => {
|
|
29
|
+
const pageSize = 2;
|
|
30
|
+
const paginatedResponses: Record<number, unknown> = {
|
|
31
|
+
0: {
|
|
32
|
+
nodes: {
|
|
33
|
+
n1: { id: "n1" },
|
|
34
|
+
n2: { id: "n2" },
|
|
35
|
+
},
|
|
36
|
+
offset: 0,
|
|
37
|
+
},
|
|
38
|
+
2: {
|
|
39
|
+
nodes: {
|
|
40
|
+
n2: { id: "n2" },
|
|
41
|
+
n3: { id: "n3" },
|
|
42
|
+
},
|
|
43
|
+
offset: 2,
|
|
44
|
+
},
|
|
45
|
+
4: {
|
|
46
|
+
nodes: {},
|
|
47
|
+
offset: 4,
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
// `query_nodes` implementation returns the matching response for the current offset.
|
|
52
|
+
const queryNodesStub = jest.fn(async ({ offset }: { offset: number }) => {
|
|
53
|
+
return paginatedResponses[offset] ?? { nodes: {}, offset };
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
// Mock `createSparkClient` so that each call returns an object containing the stub.
|
|
57
|
+
createSparkClientMock = jest.fn(async () => ({
|
|
58
|
+
query_nodes: queryNodesStub,
|
|
59
|
+
}));
|
|
60
|
+
|
|
61
|
+
// Mock ConnectionManager housing the mocked factory.
|
|
62
|
+
const connectionManagerMock = {
|
|
63
|
+
createSparkClient: createSparkClientMock,
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
wallet = new TestSparkWallet();
|
|
67
|
+
wallet.setConnectionManager(connectionManagerMock);
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it("aggregates all pages and removes duplicates", async () => {
|
|
71
|
+
const result = await wallet.queryNodesPublic(
|
|
72
|
+
{ includeParents: false } as any,
|
|
73
|
+
undefined,
|
|
74
|
+
2,
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
// Expect three unique nodes in the final aggregation.
|
|
78
|
+
expect(Object.keys(result.nodes)).toHaveLength(3);
|
|
79
|
+
expect(Object.keys(result.nodes)).toEqual(
|
|
80
|
+
expect.arrayContaining(["n1", "n2", "n3"]),
|
|
81
|
+
);
|
|
82
|
+
|
|
83
|
+
// Ensure we kept the last offset from the mocked response.
|
|
84
|
+
expect(result.offset).toBe(4);
|
|
85
|
+
|
|
86
|
+
// `createSparkClient` must have been invoked once per page (3 times).
|
|
87
|
+
expect(createSparkClientMock).toHaveBeenCalledTimes(3);
|
|
88
|
+
});
|
|
89
|
+
});
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
require("react-native-get-random-values");
|
|
2
|
-
|
|
3
|
-
// buffer.js
|
|
4
|
-
import { Buffer } from "buffer";
|
|
5
|
-
if (typeof globalThis.Buffer === "undefined") {
|
|
6
|
-
globalThis.Buffer = Buffer;
|
|
7
|
-
}
|
|
8
|
-
if (typeof global === "undefined") {
|
|
9
|
-
window.global = window.globalThis;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
export {
|
|
13
|
-
Buffer
|
|
14
|
-
};
|