@buildonspark/spark-sdk 0.4.1 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/CHANGELOG.md +11 -0
  2. package/dist/bare/index.cjs +1038 -236
  3. package/dist/bare/index.d.cts +143 -14
  4. package/dist/bare/index.d.ts +143 -14
  5. package/dist/bare/index.js +1043 -240
  6. package/dist/{chunk-4E23VB7E.js → chunk-6UVL674M.js} +2 -2
  7. package/dist/{chunk-YEBEN7XD.js → chunk-BHFVMNXO.js} +623 -16
  8. package/dist/{chunk-EV2PTGGR.js → chunk-JZODGME5.js} +1 -1
  9. package/dist/{chunk-JLF6WJ7K.js → chunk-LUOP2ATD.js} +1 -1
  10. package/dist/{chunk-4YFT7DAE.js → chunk-Q537QTDY.js} +1 -1
  11. package/dist/{chunk-W2S4FPZ4.js → chunk-SUGTL4PD.js} +250 -27
  12. package/dist/{chunk-JR7ZU6LX.js → chunk-U7EELMDO.js} +1 -1
  13. package/dist/{client-BaQf-5gD.d.ts → client-3MdPdtnA.d.ts} +1 -1
  14. package/dist/{client-BIqiUNy4.d.cts → client-BZFWaZ1l.d.cts} +1 -1
  15. package/dist/debug.cjs +1038 -241
  16. package/dist/debug.d.cts +5 -5
  17. package/dist/debug.d.ts +5 -5
  18. package/dist/debug.js +5 -5
  19. package/dist/graphql/objects/index.d.cts +3 -3
  20. package/dist/graphql/objects/index.d.ts +3 -3
  21. package/dist/index.cjs +923 -119
  22. package/dist/index.d.cts +6 -6
  23. package/dist/index.d.ts +6 -6
  24. package/dist/index.js +6 -6
  25. package/dist/index.node.cjs +923 -119
  26. package/dist/index.node.d.cts +6 -6
  27. package/dist/index.node.d.ts +6 -6
  28. package/dist/index.node.js +5 -5
  29. package/dist/{logging-BLzoEEoP.d.ts → logging-g6Oib3Ua.d.ts} +3 -3
  30. package/dist/{logging-Cl-gGtZz.d.cts → logging-miPhbIyT.d.cts} +3 -3
  31. package/dist/native/index.react-native.cjs +923 -119
  32. package/dist/native/index.react-native.d.cts +141 -16
  33. package/dist/native/index.react-native.d.ts +141 -16
  34. package/dist/native/index.react-native.js +926 -121
  35. package/dist/proto/spark.cjs +623 -16
  36. package/dist/proto/spark.d.cts +1 -1
  37. package/dist/proto/spark.d.ts +1 -1
  38. package/dist/proto/spark.js +19 -1
  39. package/dist/proto/spark_token.d.cts +1 -1
  40. package/dist/proto/spark_token.d.ts +1 -1
  41. package/dist/proto/spark_token.js +2 -2
  42. package/dist/{spark-DOpheE8_.d.cts → spark-BWkABx3N.d.cts} +124 -3
  43. package/dist/{spark-DOpheE8_.d.ts → spark-BWkABx3N.d.ts} +124 -3
  44. package/dist/{spark-wallet.browser-XT25hMYM.d.ts → spark-wallet.browser-Cm-JMRQB.d.ts} +2 -2
  45. package/dist/{spark-wallet.browser-BGMpYML0.d.cts → spark-wallet.browser-M7zcmZYm.d.cts} +2 -2
  46. package/dist/{spark-wallet.node-CAfuCfrU.d.ts → spark-wallet.node-B4y08Ri5.d.ts} +2 -2
  47. package/dist/{spark-wallet.node-C8dbiFsD.d.cts → spark-wallet.node-BotQbNoI.d.cts} +2 -2
  48. package/dist/tests/test-utils.cjs +1030 -60
  49. package/dist/tests/test-utils.d.cts +4 -4
  50. package/dist/tests/test-utils.d.ts +4 -4
  51. package/dist/tests/test-utils.js +7 -7
  52. package/dist/{token-transactions-DhVLTxEY.d.cts → token-transactions-C95IPSyM.d.cts} +2 -2
  53. package/dist/{token-transactions-vcOZg63D.d.ts → token-transactions-SPf4ygYc.d.ts} +2 -2
  54. package/dist/types/index.cjs +614 -16
  55. package/dist/types/index.d.cts +2 -2
  56. package/dist/types/index.d.ts +2 -2
  57. package/dist/types/index.js +5 -5
  58. package/dist/{wallet-config-w9e62Plc.d.ts → wallet-config-CQW-mDqN.d.ts} +41 -17
  59. package/dist/{wallet-config-DVEoQg3w.d.cts → wallet-config-JYsN3izc.d.cts} +41 -17
  60. package/package.json +1 -1
  61. package/src/proto/spark.ts +842 -80
  62. package/src/services/lightning.ts +1 -1
  63. package/src/services/transfer.ts +0 -1
  64. package/src/spark-wallet/proto-descriptors.ts +1 -1
  65. package/src/spark-wallet/spark-wallet.ts +279 -16
  66. package/src/spark-wallet/types.ts +6 -7
  67. package/src/spark_descriptors.pb +0 -0
  68. package/src/tests/address.test.ts +10 -103
  69. package/src/tests/integration/address.test.ts +7 -17
  70. package/src/tests/integration/ssp/coop-exit.test.ts +0 -1
  71. package/src/tests/integration/ssp/lightning.test.ts +67 -2
  72. package/src/tests/integration/ssp/swap.test.ts +0 -1
  73. package/src/tests/integration/static_deposit.test.ts +2 -1
  74. package/src/tests/integration/transfer.test.ts +8 -28
  75. package/src/tests/token-hashing.test.ts +4 -4
  76. package/src/tests/xchain-address.test.ts +3 -3
  77. package/src/utils/address.ts +6 -4
@@ -2,12 +2,16 @@ import { describe, expect, it } from "@jest/globals";
2
2
  import { ConfigOptions } from "../../../services/wallet-config.js";
3
3
  import { SparkWallet } from "../../../spark-wallet/spark-wallet.node.js";
4
4
  import {
5
- BitcoinNetwork,
6
5
  CurrencyUnit,
7
6
  LightningReceiveRequestStatus,
8
7
  } from "../../../types/index.js";
9
- import { NetworkType } from "../../../utils/network.js";
10
8
  import { ValidationError } from "../../../errors/types.js";
9
+ import { BitcoinFaucet } from "../../utils/test-faucet.js";
10
+ import { SparkWalletTestingWithStream } from "../../utils/spark-testing-wallet.js";
11
+ import { waitForClaim } from "../../utils/utils.js";
12
+
13
+ const DEPOSIT_AMOUNT = 10000n;
14
+ const INVOICE_AMOUNT = 1000;
11
15
 
12
16
  const options: ConfigOptions = {
13
17
  network: "LOCAL",
@@ -55,6 +59,67 @@ describe("Lightning Network provider", () => {
55
59
  );
56
60
  });
57
61
 
62
+ describe("should pay lightning invoice", () => {
63
+ it("should pay lightning invoice created by another wallet", async () => {
64
+ const faucet = BitcoinFaucet.getInstance();
65
+
66
+ const { wallet: aliceWallet } =
67
+ await SparkWalletTestingWithStream.initialize({
68
+ options: {
69
+ network: "LOCAL",
70
+ },
71
+ });
72
+
73
+ const { wallet: bobWallet } =
74
+ await SparkWalletTestingWithStream.initialize({
75
+ options: {
76
+ network: "LOCAL",
77
+ },
78
+ });
79
+
80
+ const depositAddress = await aliceWallet.getSingleUseDepositAddress();
81
+ expect(depositAddress).toBeDefined();
82
+
83
+ const signedTx = await faucet.sendToAddress(
84
+ depositAddress,
85
+ DEPOSIT_AMOUNT,
86
+ );
87
+
88
+ // Wait for the transaction to be mined
89
+ await faucet.mineBlocksAndWaitForMiningToComplete(6);
90
+
91
+ await aliceWallet.claimDeposit(signedTx.id);
92
+
93
+ await waitForClaim({ wallet: aliceWallet });
94
+
95
+ const { balance } = await aliceWallet.getBalance();
96
+ expect(balance).toBe(DEPOSIT_AMOUNT);
97
+
98
+ const invoice = await bobWallet.createLightningInvoice({
99
+ amountSats: INVOICE_AMOUNT,
100
+ memo: "test",
101
+ expirySeconds: 10,
102
+ });
103
+
104
+ expect(invoice).toBeDefined();
105
+
106
+ await aliceWallet.payLightningInvoice({
107
+ invoice: invoice.invoice.encodedInvoice,
108
+ maxFeeSats: 100,
109
+ });
110
+
111
+ await waitForClaim({ wallet: bobWallet });
112
+
113
+ const { balance: bobBalance } = await bobWallet.getBalance();
114
+ expect(bobBalance).toBe(BigInt(INVOICE_AMOUNT));
115
+
116
+ const { balance: aliceBalance } = await aliceWallet.getBalance();
117
+ expect(aliceBalance).toBeLessThan(
118
+ DEPOSIT_AMOUNT - BigInt(INVOICE_AMOUNT),
119
+ );
120
+ }, 120000);
121
+ });
122
+
58
123
  describe("should fail to create lightning invoice", () => {
59
124
  it(`should fail to create lightning invoice with invalid amount`, async () => {
60
125
  await expect(
@@ -1,5 +1,4 @@
1
1
  import { describe, expect, it } from "@jest/globals";
2
- import { SparkWalletTesting } from "../../utils/spark-testing-wallet.js";
3
2
  import { BitcoinFaucet } from "../../utils/test-faucet.js";
4
3
  import { SparkWalletTestingWithStream } from "../../utils/spark-testing-wallet.js";
5
4
 
@@ -48,7 +48,8 @@ describe("SSP static deposit address integration", () => {
48
48
  ).rejects.toThrow();
49
49
  }, 600000);
50
50
 
51
- it("should refund and broadcast a static deposit refund transaction", async () => {
51
+ // Skipping because chainwatcher is not catching up to the transaction.
52
+ it.skip("should refund and broadcast a static deposit refund transaction", async () => {
52
53
  const {
53
54
  wallet: userWallet,
54
55
  depositAddress,
@@ -5,8 +5,6 @@ import { wordlist } from "@scure/bip39/wordlists/english";
5
5
  import { uuidv7 } from "uuidv7";
6
6
  import { RPCError } from "../../errors/types.js";
7
7
  import {
8
- decodeSparkAddress,
9
- isLegacySparkAddress,
10
8
  KeyDerivation,
11
9
  KeyDerivationType,
12
10
  SparkWalletEvent,
@@ -18,7 +16,7 @@ import { SigningService } from "../../services/signing.js";
18
16
  import type { LeafKeyTweak } from "../../services/transfer.js";
19
17
  import { TransferService } from "../../services/transfer.js";
20
18
  import {
21
- ConfigOptions,
19
+ type ConfigOptions,
22
20
  getLocalSigningOperators,
23
21
  WalletConfig,
24
22
  } from "../../services/wallet-config.js";
@@ -1277,26 +1275,9 @@ describe.each(walletTypes)("transfer v2", ({ name, Signer, createTree }) => {
1277
1275
  bobSigningService,
1278
1276
  );
1279
1277
 
1280
- const legacySparkAddress = await bob.getSparkAddress();
1281
- const newSparkAddress =
1282
- "sparkl1pgssxlp9dr9ypzqf2havm5weefu6470l062k3ujtw4uu6gjmfgl599rxucvvkr";
1283
-
1284
- // TODO: Remove this once we upgrade to the new spark address format
1285
- expect(isLegacySparkAddress(legacySparkAddress)).toBe(true);
1286
-
1287
- const decodedLegacySparkAddress = decodeSparkAddress(
1288
- legacySparkAddress,
1289
- "LOCAL",
1290
- );
1291
- const decodedNewSparkAddress = decodeSparkAddress(newSparkAddress, "LOCAL");
1292
-
1293
- expect(decodedLegacySparkAddress).toMatchObject({
1294
- ...decodedNewSparkAddress,
1295
- });
1296
-
1297
1278
  await alice.transfer({
1298
1279
  amountSats: 1000,
1299
- receiverSparkAddress: newSparkAddress,
1280
+ receiverSparkAddress: await bob.getSparkAddress(),
1300
1281
  });
1301
1282
 
1302
1283
  const pendingTransfers = await bob.queryPendingTransfers();
@@ -1319,13 +1300,12 @@ describe.each(walletTypes)("transfer v2", ({ name, Signer, createTree }) => {
1319
1300
  });
1320
1301
  });
1321
1302
 
1322
- // TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
1323
- describe.skip.each(walletTypes)(
1303
+ describe.each(walletTypes)(
1324
1304
  "fulfill spark invoice",
1325
1305
  ({ name, Signer, createTree }) => {
1326
1306
  jest.setTimeout(25_000);
1327
1307
 
1328
- it.skip(`${name} - test multiple valid transfers with invoice and nil amount invoice`, async () => {
1308
+ it(`${name} - test multiple valid transfers with invoice and nil amount invoice`, async () => {
1329
1309
  const faucet = BitcoinFaucet.getInstance();
1330
1310
 
1331
1311
  const options: ConfigOptions = {
@@ -1512,7 +1492,7 @@ describe.skip.each(walletTypes)(
1512
1492
  amount: 1_000,
1513
1493
  memo: "Test invoice",
1514
1494
  expiryTime: tomorrow,
1515
- senderPublicKey: await sdk2.getIdentityPublicKey(), // invalid sender public key - receiver as sender
1495
+ senderSparkAddress: await sdk2.getSparkAddress(), // invalid sender public key - receiver as sender
1516
1496
  });
1517
1497
 
1518
1498
  const transferResults = await sdk.fulfillSparkInvoice([
@@ -1573,7 +1553,7 @@ describe.skip.each(walletTypes)(
1573
1553
  amount: 1_000,
1574
1554
  memo: "Test invoice",
1575
1555
  expiryTime: yesterday,
1576
- senderPublicKey: await sdk.getIdentityPublicKey(),
1556
+ senderSparkAddress: await sdk.getSparkAddress(),
1577
1557
  });
1578
1558
 
1579
1559
  const transferResults = await sdk.fulfillSparkInvoice([
@@ -1583,7 +1563,7 @@ describe.skip.each(walletTypes)(
1583
1563
  expect(invalidInvoices.length).toBe(1);
1584
1564
  });
1585
1565
 
1586
- it.skip(`${name} - should error when paying the same invoice twice`, async () => {
1566
+ it(`${name} - should error when paying the same invoice twice`, async () => {
1587
1567
  const faucet = BitcoinFaucet.getInstance();
1588
1568
 
1589
1569
  const options: ConfigOptions = {
@@ -1629,7 +1609,7 @@ describe.skip.each(walletTypes)(
1629
1609
  amount: 1_000,
1630
1610
  memo: "Test invoice",
1631
1611
  expiryTime: tomorrow,
1632
- senderPublicKey: await sdk.getIdentityPublicKey(),
1612
+ senderSparkAddress: await sdk.getSparkAddress(),
1633
1613
  });
1634
1614
 
1635
1615
  await sdk.fulfillSparkInvoice([{ invoice: invoice1000 }]);
@@ -30,11 +30,11 @@ const TEST_OPERATOR_PUB_KEY = new Uint8Array([
30
30
  const TEST_INVOICE_ATTACHMENTS = [
31
31
  {
32
32
  sparkInvoice:
33
- "sprt1pgssypkrjhrpzt2hw0ggrmndanmm035ley75nxu3gejaju4wx9nq86lwzfjqsqgjzqqe3zul2fm8a24y576t0ne2ehup5fg2yz4r6hxlhatyu9kpw09s2fk36ta5j0k85qascf6snpuy4sp0rp4ezyspvs4qgmt9d4hnyggzqmpet3s394th85ypaek7eaahc60uj02fnwg5vewew2hrzesra0hqflc0vn",
33
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzfmssqgjzqqejtaxmwj8ms9rn58574nvlq4j5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5rqgandgr3sxzrqdmew8qydzvz3qpylysylkgcaw9vpm2jzspls0qtr5kfmlwz244rvuk25w5w2sgc2pyqsraqdyp8tf57a6cn2egttaas9ms3whssenmjqt8wag3lgyvdzjskfeupt8xwwdx4agxdm9f0wefzj28jmdxqeudwcwdj9vfl9sdr65x06r0tasf5fwz2",
34
34
  },
35
35
  {
36
36
  sparkInvoice:
37
- "sprt1pgssypkrjhrpzt2hw0ggrmndanmm035ley75nxu3gejaju4wx9nq86lwzf5ssqgjzqqe3zulcs6h42v0kqkdsv9utxyp5fs2yz4r6hxlhatyu9kpw09s2fk36ta5j0k85qascf6snpuy4sp0rp4ezyszq86z5zryd9nxvmt9d4hnyggzqmpet3s394th85ypaek7eaahc60uj02fnwg5vewew2hrzesra0hql7r5ne",
37
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzfmqsqgjzqqejtavuhf8n5uh9a74zw66kqaz5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5zcglrwcr3sxzzqt3wrjrgnq5gqf8eyp8ajx8t3tqw65s5q0urczca9jwlmsj4dgm89j4r4rj5zxzsfqyqlgrfqw9ucldgmfzs5zmkekj90thwzmn6ps55gdjnz23aarjkf245608yg0v2x6xdpdrz6m8xjlhtru0kygcu4zhqwlth9duadfqpruuzx4tc7fdckn",
38
38
  },
39
39
  ];
40
40
 
@@ -202,8 +202,8 @@ describe("hash token transaction", () => {
202
202
  const hash = hashTokenTransactionV2(tokenTransaction, false);
203
203
 
204
204
  expect(Array.from(hash)).toEqual([
205
- 139, 4, 220, 112, 69, 32, 149, 81, 90, 67, 151, 101, 240, 182, 13, 123,
206
- 70, 4, 153, 159, 172, 225, 15, 120, 71, 219, 154, 27, 72, 167, 2, 149,
205
+ 19, 39, 37, 63, 91, 26, 243, 192, 252, 18, 74, 19, 59, 241, 142, 11, 20,
206
+ 6, 129, 246, 162, 133, 158, 123, 133, 98, 169, 100, 172, 163, 231, 32,
207
207
  ]);
208
208
  });
209
209
  });
@@ -5,17 +5,17 @@ describe("xchain-address", () => {
5
5
  [
6
6
  "mainnet",
7
7
  "bc1pvluhspufxmuus9wh3dshxhxfg3656c9mwfw85scaydyp7sk9800sl4h5ae",
8
- "sp1pgssyele0qrcjdheeq2a0zmpwdwvj3r4f4stkuju0fp36g6grapv2w7ltce4pm",
8
+ "spark1pgssyele0qrcjdheeq2a0zmpwdwvj3r4f4stkuju0fp36g6grapv2w7l8am2cp",
9
9
  ],
10
10
  [
11
11
  "testnet",
12
12
  "tb1psw24aa3jkdkndr68gy2gk3ws4hm7dnsccqqtd92fxxqv2jhv5fcqkfyu7v",
13
- "spt1pgss9qu4tmmr9vmdx685wsg53dzapt0hum8p3sqqk625jvvqc49wegns9pkyp6",
13
+ "sparkt1pgss9qu4tmmr9vmdx685wsg53dzapt0hum8p3sqqk625jvvqc49wegnst05852",
14
14
  ],
15
15
  [
16
16
  "regtest",
17
17
  "bcrt1p47kh0ff29d3rjw2n43vxqgmrgv9az562x37x6dp4ehyqq7ezyhcqlz5y42",
18
- "sprt1pgss9tadw7jj52mz8yu48tzcvq3kxsct69f55drud56rtnwgqpajyf0sp8jkjq",
18
+ "sparkrt1pgss9tadw7jj52mz8yu48tzcvq3kxsct69f55drud56rtnwgqpajyf0stj62w6",
19
19
  ],
20
20
  ])(
21
21
  "getSparkAddressFromTaproot success (%s)",
@@ -63,14 +63,14 @@ export interface DecodedSparkAddressData {
63
63
 
64
64
  export function encodeSparkAddress(
65
65
  payload: SparkAddressData,
66
- ): LegacySparkAddressFormat {
66
+ ): SparkAddressFormat {
67
67
  return encodeSparkAddressWithSignature(payload);
68
68
  }
69
69
 
70
70
  export function encodeSparkAddressWithSignature(
71
71
  payload: SparkAddressData,
72
72
  signature?: Uint8Array,
73
- ): LegacySparkAddressFormat {
73
+ ): SparkAddressFormat {
74
74
  try {
75
75
  isValidPublicKey(payload.identityPublicKey);
76
76
  const identityPublicKey = hexToBytes(payload.identityPublicKey);
@@ -99,9 +99,9 @@ export function encodeSparkAddressWithSignature(
99
99
  const words = bech32m.toWords(serializedPayload);
100
100
 
101
101
  return bech32mEncode(
102
- LegacyAddressNetwork[payload.network],
102
+ AddressNetwork[payload.network],
103
103
  words,
104
- ) as LegacySparkAddressFormat;
104
+ ) as SparkAddressFormat;
105
105
  } catch (error) {
106
106
  throw new ValidationError(
107
107
  "Failed to encode Spark address",
@@ -193,6 +193,7 @@ export function decodeSparkAddress(
193
193
  const PrefixToNetwork: Record<string, NetworkType> = Object.fromEntries(
194
194
  Object.entries(AddressNetwork).map(([k, v]) => [v, k as NetworkType]),
195
195
  ) as Record<string, NetworkType>;
196
+
196
197
  const LegacyPrefixToNetwork: Record<string, NetworkType> = Object.fromEntries(
197
198
  Object.entries(LegacyAddressNetwork).map(([k, v]) => [v, k as NetworkType]),
198
199
  ) as Record<string, NetworkType>;
@@ -210,6 +211,7 @@ export function getNetworkFromSparkAddress(address: string): NetworkType {
210
211
  }
211
212
  return network;
212
213
  }
214
+
213
215
  export function isLegacySparkAddress(
214
216
  address: string,
215
217
  ): address is LegacySparkAddressFormat {