@buildonspark/spark-sdk 0.3.3 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/dist/bare/index.cjs +945 -1401
  3. package/dist/bare/index.d.cts +128 -10
  4. package/dist/bare/index.d.ts +128 -10
  5. package/dist/bare/index.js +940 -1399
  6. package/dist/{chunk-55XNR6DM.js → chunk-DIXXHATX.js} +1 -1
  7. package/dist/{chunk-MGCUXELA.js → chunk-IC4IUEOS.js} +931 -125
  8. package/dist/{chunk-MH7BMOLL.js → chunk-J2P3KTQP.js} +1 -1
  9. package/dist/{chunk-SXXM52XH.js → chunk-JE73HB26.js} +409 -1656
  10. package/dist/{chunk-73GJOG5R.js → chunk-XWLR6G5C.js} +1 -1
  11. package/dist/{client-DrjQwET9.d.ts → client-DBZ43pJT.d.ts} +1 -1
  12. package/dist/{client-DUFejFfn.d.cts → client-DWml6sjL.d.cts} +1 -1
  13. package/dist/debug.cjs +949 -1403
  14. package/dist/debug.d.cts +8 -5
  15. package/dist/debug.d.ts +8 -5
  16. package/dist/debug.js +4 -4
  17. package/dist/graphql/objects/index.d.cts +3 -3
  18. package/dist/graphql/objects/index.d.ts +3 -3
  19. package/dist/index.cjs +905 -1362
  20. package/dist/index.d.cts +6 -6
  21. package/dist/index.d.ts +6 -6
  22. package/dist/index.js +9 -5
  23. package/dist/index.node.cjs +905 -1362
  24. package/dist/index.node.d.cts +6 -6
  25. package/dist/index.node.d.ts +6 -6
  26. package/dist/index.node.js +8 -4
  27. package/dist/{logging-CGeEoKYd.d.cts → logging-BUpzk4Z6.d.cts} +3 -3
  28. package/dist/{logging-DpSsvFVM.d.ts → logging-Dt2ooQiP.d.ts} +3 -3
  29. package/dist/native/index.cjs +905 -1362
  30. package/dist/native/index.d.cts +129 -25
  31. package/dist/native/index.d.ts +129 -25
  32. package/dist/native/index.js +904 -1363
  33. package/dist/proto/spark.cjs +931 -125
  34. package/dist/proto/spark.d.cts +1 -1
  35. package/dist/proto/spark.d.ts +1 -1
  36. package/dist/proto/spark.js +17 -1
  37. package/dist/proto/spark_token.d.cts +1 -1
  38. package/dist/proto/spark_token.d.ts +1 -1
  39. package/dist/proto/spark_token.js +2 -2
  40. package/dist/{spark-CLz4-Ln8.d.cts → spark-DasxuVfm.d.cts} +150 -5
  41. package/dist/{spark-CLz4-Ln8.d.ts → spark-DasxuVfm.d.ts} +150 -5
  42. package/dist/{spark-wallet-BVBrWYKL.d.cts → spark-wallet-BoMIOPWW.d.cts} +13 -9
  43. package/dist/{spark-wallet-CFPm6wZs.d.ts → spark-wallet-jlC0XN5f.d.ts} +13 -9
  44. package/dist/{spark-wallet.node-e1gncoIZ.d.ts → spark-wallet.node-07PksUHH.d.cts} +1 -1
  45. package/dist/{spark-wallet.node-B_00X-1j.d.cts → spark-wallet.node-CdWkKMSq.d.ts} +1 -1
  46. package/dist/tests/test-utils.cjs +947 -144
  47. package/dist/tests/test-utils.d.cts +4 -4
  48. package/dist/tests/test-utils.d.ts +4 -4
  49. package/dist/tests/test-utils.js +5 -5
  50. package/dist/{token-transactions-BkAqlmY6.d.ts → token-transactions-BDzCrQSk.d.cts} +5 -19
  51. package/dist/{token-transactions-BZGtwFFM.d.cts → token-transactions-DscJaJOE.d.ts} +5 -19
  52. package/dist/types/index.cjs +923 -125
  53. package/dist/types/index.d.cts +2 -2
  54. package/dist/types/index.d.ts +2 -2
  55. package/dist/types/index.js +2 -2
  56. package/package.json +1 -1
  57. package/src/proto/spark.ts +1167 -103
  58. package/src/services/config.ts +0 -4
  59. package/src/services/token-transactions.ts +11 -703
  60. package/src/services/wallet-config.ts +0 -2
  61. package/src/spark-wallet/proto-descriptors.ts +1 -1
  62. package/src/spark-wallet/spark-wallet.ts +58 -215
  63. package/src/spark_descriptors.pb +0 -0
  64. package/src/tests/address.test.ts +141 -0
  65. package/src/tests/integration/address.test.ts +4 -0
  66. package/src/tests/integration/lightning.test.ts +14 -9
  67. package/src/tests/integration/token-output.test.ts +0 -1
  68. package/src/tests/integration/transfer.test.ts +108 -2
  69. package/src/tests/token-hashing.test.ts +0 -247
  70. package/src/utils/address.ts +58 -35
  71. package/src/utils/token-hashing.ts +1 -420
  72. package/src/utils/token-transaction-validation.ts +0 -330
@@ -5,17 +5,17 @@ import {
5
5
  bytesToNumberBE,
6
6
  equalBytes,
7
7
  hexToBytes,
8
- numberToVarBytesBE,
9
8
  } from "@noble/curves/utils";
10
9
  import { validateMnemonic } from "@scure/bip39";
11
10
  import { wordlist } from "@scure/bip39/wordlists/english";
12
11
  import { Address, OutScript, Transaction } from "@scure/btc-signer";
13
12
  import { TransactionInput } from "@scure/btc-signer/psbt";
14
13
  import { Mutex } from "async-mutex";
15
- import { uuidv7, uuidv7obj } from "uuidv7";
14
+ import { uuidv7 } from "uuidv7";
16
15
  import {
17
16
  ConfigurationError,
18
17
  NetworkError,
18
+ NotImplementedError,
19
19
  RPCError,
20
20
  ValidationError,
21
21
  } from "../errors/types.js";
@@ -40,6 +40,7 @@ import {
40
40
  OutputWithPreviousTransactionData,
41
41
  QueryNodesRequest,
42
42
  QueryNodesResponse,
43
+ QuerySparkInvoicesResponse,
43
44
  SigningJob,
44
45
  SubscribeToEventsResponse,
45
46
  Transfer,
@@ -118,14 +119,11 @@ import {
118
119
  import {
119
120
  decodeSparkAddress,
120
121
  encodeSparkAddress,
121
- encodeSparkAddressWithSignature,
122
122
  isSafeForNumber,
123
123
  SparkAddressFormat,
124
- validateSparkInvoiceFields,
125
124
  } from "../utils/address.js";
126
125
  import { chunkArray } from "../utils/chunkArray.js";
127
126
  import { getFetch } from "../utils/fetch.js";
128
- import { HashSparkInvoice } from "../utils/invoice-hashing.js";
129
127
  import { addPublicKeys } from "../utils/keys.js";
130
128
  import { RetryContext, withRetry } from "../utils/retry.js";
131
129
  import {
@@ -141,6 +139,7 @@ import type {
141
139
  InitWalletResponse,
142
140
  InvalidInvoice,
143
141
  PayLightningInvoiceParams,
142
+ SparkWalletEvents,
144
143
  SparkWalletProps,
145
144
  TokenBalanceMap,
146
145
  TokenInvoice,
@@ -150,7 +149,6 @@ import type {
150
149
  TransferWithInvoiceOutcome,
151
150
  TransferWithInvoiceParams,
152
151
  UserTokenMetadata,
153
- SparkWalletEvents,
154
152
  } from "./types.js";
155
153
  import { SparkWalletEvent } from "./types.js";
156
154
 
@@ -836,51 +834,8 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
836
834
  senderPublicKey?: string;
837
835
  expiryTime?: Date;
838
836
  }): Promise<SparkAddressFormat> {
839
- const MAX_SATS_AMOUNT = 2_100_000_000_000_000; // 21_000_000 BTC * 100_000_000 sats/BTC
840
- if (amount && (amount < 0 || amount > MAX_SATS_AMOUNT)) {
841
- throw new ValidationError(
842
- `Amount must be between 0 and ${MAX_SATS_AMOUNT} sats`,
843
- {
844
- field: "amount",
845
- value: amount,
846
- expected: `less than or equal to ${MAX_SATS_AMOUNT}`,
847
- },
848
- );
849
- }
850
- const protoPayment = {
851
- $case: "satsPayment",
852
- satsPayment: {
853
- amount: amount,
854
- },
855
- } as const;
856
- const invoiceFields = {
857
- version: 1,
858
- id: uuidv7obj().bytes,
859
- paymentType: protoPayment,
860
- memo: memo,
861
- senderPublicKey: senderPublicKey
862
- ? hexToBytes(senderPublicKey)
863
- : undefined,
864
- expiryTime: expiryTime ?? undefined,
865
- };
866
- validateSparkInvoiceFields(invoiceFields);
867
-
868
- const identityPublicKey = await this.config.signer.getIdentityPublicKey();
869
- const hash = HashSparkInvoice(
870
- invoiceFields,
871
- identityPublicKey,
872
- this.config.getNetworkType(),
873
- );
874
- const signature = await this.config.signer.signSchnorrWithIdentityKey(hash);
875
-
876
- return encodeSparkAddressWithSignature(
877
- {
878
- identityPublicKey: bytesToHex(identityPublicKey),
879
- network: this.config.getNetworkType(),
880
- sparkInvoiceFields: invoiceFields,
881
- },
882
- signature,
883
- );
837
+ // TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
838
+ throw new NotImplementedError("Invoice functionality is not enabled");
884
839
  }
885
840
 
886
841
  /**
@@ -907,57 +862,8 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
907
862
  senderPublicKey?: string;
908
863
  expiryTime?: Date;
909
864
  }): Promise<SparkAddressFormat> {
910
- const MAX_UINT128 = BigInt(2 ** 128 - 1);
911
- if (amount && (amount < 0 || amount > MAX_UINT128)) {
912
- throw new ValidationError(`Amount must be between 0 and ${MAX_UINT128}`, {
913
- field: "amount",
914
- value: amount,
915
- expected: `greater than or equal to 0 and less than or equal to ${MAX_UINT128}`,
916
- });
917
- }
918
- let decodedTokenIdentifier: Uint8Array | undefined = undefined;
919
- if (tokenIdentifier) {
920
- decodedTokenIdentifier = decodeBech32mTokenIdentifier(
921
- tokenIdentifier,
922
- this.config.getNetworkType(),
923
- ).tokenIdentifier;
924
- }
925
-
926
- const protoPayment = {
927
- $case: "tokensPayment",
928
- tokensPayment: {
929
- tokenIdentifier: decodedTokenIdentifier ?? undefined,
930
- amount: amount ? numberToVarBytesBE(amount) : undefined,
931
- },
932
- } as const;
933
- const invoiceFields = {
934
- version: 1,
935
- id: uuidv7obj().bytes,
936
- paymentType: protoPayment,
937
- memo: memo ?? undefined,
938
- senderPublicKey: senderPublicKey
939
- ? hexToBytes(senderPublicKey)
940
- : undefined,
941
- expiryTime: expiryTime ?? undefined,
942
- };
943
- validateSparkInvoiceFields(invoiceFields);
944
-
945
- const identityPublicKey = await this.config.signer.getIdentityPublicKey();
946
- const hash = HashSparkInvoice(
947
- invoiceFields,
948
- identityPublicKey,
949
- this.config.getNetworkType(),
950
- );
951
- const signature = await this.config.signer.signSchnorrWithIdentityKey(hash);
952
-
953
- return encodeSparkAddressWithSignature(
954
- {
955
- identityPublicKey: bytesToHex(identityPublicKey),
956
- network: this.config.getNetworkType(),
957
- sparkInvoiceFields: invoiceFields,
958
- },
959
- signature,
960
- );
865
+ // TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
866
+ throw new NotImplementedError("Invoice functionality is not enabled");
961
867
  }
962
868
 
963
869
  /**
@@ -3370,11 +3276,41 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
3370
3276
  throw new Error("Failed to create lightning invoice");
3371
3277
  }
3372
3278
 
3279
+ const decodedInvoice = decodeInvoice(invoice.invoice.encodedInvoice);
3280
+
3281
+ if (
3282
+ invoice.invoice.paymentHash !== bytesToHex(paymentHash) ||
3283
+ decodedInvoice.paymentHash !== bytesToHex(paymentHash)
3284
+ ) {
3285
+ throw new ValidationError("Payment hash mismatch", {
3286
+ field: "paymentHash",
3287
+ value: invoice.invoice.paymentHash,
3288
+ expected: bytesToHex(paymentHash),
3289
+ });
3290
+ }
3291
+
3292
+ if (decodedInvoice.amountMSats === null && amountSats !== 0) {
3293
+ throw new ValidationError("Amount mismatch", {
3294
+ field: "amountMSats",
3295
+ value: "null",
3296
+ expected: amountSats * 1000,
3297
+ });
3298
+ }
3299
+
3300
+ if (
3301
+ decodedInvoice.amountMSats !== null &&
3302
+ decodedInvoice.amountMSats !== BigInt(amountSats * 1000)
3303
+ ) {
3304
+ throw new ValidationError("Amount mismatch", {
3305
+ field: "amountMSats",
3306
+ value: decodedInvoice.amountMSats,
3307
+ expected: amountSats * 1000,
3308
+ });
3309
+ }
3310
+
3373
3311
  // Validate the spark address embedded in the lightning invoice
3374
3312
  if (includeSparkAddress) {
3375
- const sparkFallbackAddress = decodeInvoice(
3376
- invoice.invoice.encodedInvoice,
3377
- ).fallbackAddress;
3313
+ const sparkFallbackAddress = decodedInvoice.fallbackAddress;
3378
3314
 
3379
3315
  if (!sparkFallbackAddress) {
3380
3316
  throw new ValidationError(
@@ -3400,6 +3336,14 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
3400
3336
  },
3401
3337
  );
3402
3338
  }
3339
+ } else if (decodedInvoice.fallbackAddress !== undefined) {
3340
+ throw new ValidationError(
3341
+ "Spark fallback address found in lightning invoice but includeSparkAddress is false",
3342
+ {
3343
+ field: "sparkFallbackAddress",
3344
+ value: decodedInvoice.fallbackAddress,
3345
+ },
3346
+ );
3403
3347
  }
3404
3348
 
3405
3349
  return invoice;
@@ -3613,116 +3557,8 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
3613
3557
  amount?: bigint;
3614
3558
  }[],
3615
3559
  ): Promise<FulfillSparkInvoiceResponse> {
3616
- if (!Array.isArray(sparkInvoices) || sparkInvoices.length === 0) {
3617
- throw new ValidationError("No Spark invoices provided", {
3618
- field: "sparkInvoices",
3619
- value: sparkInvoices,
3620
- expected: "Non-empty array",
3621
- });
3622
- }
3623
- const satsTransactionSuccess: {
3624
- invoice: SparkAddressFormat;
3625
- transferResponse: WalletTransfer;
3626
- }[] = [];
3627
- const satsTransactionErrors: {
3628
- invoice: string;
3629
- error: Error;
3630
- }[] = [];
3631
- const tokenTransactionSuccess: {
3632
- tokenIdentifier: Bech32mTokenIdentifier;
3633
- txid: string;
3634
- }[] = [];
3635
- const tokenTransactionErrors: {
3636
- tokenIdentifier: Bech32mTokenIdentifier;
3637
- error: Error;
3638
- }[] = [];
3639
-
3640
- const { satsInvoices, tokenInvoices, invalidInvoices } =
3641
- await this.groupSparkInvoicesByPaymentType(sparkInvoices);
3642
- if (invalidInvoices.length > 0) {
3643
- return {
3644
- satsTransactionSuccess,
3645
- satsTransactionErrors,
3646
- tokenTransactionSuccess,
3647
- tokenTransactionErrors,
3648
- invalidInvoices,
3649
- };
3650
- }
3651
-
3652
- if (tokenInvoices.size > 0) {
3653
- await this.syncTokenOutputs();
3654
- const tokenTransferTasks: Promise<
3655
- | { ok: true; tokenIdentifier: Bech32mTokenIdentifier; txid: string }
3656
- | { ok: false; tokenIdentifier: Bech32mTokenIdentifier; error: Error }
3657
- >[] = [];
3658
- for (const [identifierHex, decodedInvoices] of tokenInvoices.entries()) {
3659
- const tokenIdentifier = hexToBytes(identifierHex);
3660
- const tokenIdB32 = encodeBech32mTokenIdentifier({
3661
- tokenIdentifier,
3662
- network: this.config.getNetworkType(),
3663
- }) as Bech32mTokenIdentifier;
3664
-
3665
- const receiverOutputs = decodedInvoices.map((d) => ({
3666
- tokenIdentifier: tokenIdB32,
3667
- tokenAmount: d.amount!,
3668
- receiverSparkAddress: d.invoice,
3669
- }));
3670
-
3671
- tokenTransferTasks.push(
3672
- this.tokenTransactionService
3673
- .tokenTransfer({ tokenOutputs: this.tokenOutputs, receiverOutputs })
3674
- .then((txid) => ({
3675
- ok: true as const,
3676
- tokenIdentifier: tokenIdB32,
3677
- txid,
3678
- }))
3679
- .catch((e: any) => ({
3680
- ok: false as const,
3681
- tokenIdentifier: tokenIdB32,
3682
- error: e instanceof Error ? e : new Error(String(e)),
3683
- })),
3684
- );
3685
- }
3686
- const results = await Promise.all(tokenTransferTasks);
3687
- for (const r of results) {
3688
- if (r.ok) {
3689
- tokenTransactionSuccess.push({
3690
- tokenIdentifier: r.tokenIdentifier,
3691
- txid: r.txid,
3692
- });
3693
- } else {
3694
- tokenTransactionErrors.push({
3695
- tokenIdentifier: r.tokenIdentifier,
3696
- error: r.error,
3697
- });
3698
- }
3699
- }
3700
- }
3701
-
3702
- if (satsInvoices.length > 0) {
3703
- const transfers = await this.transferWithInvoice(satsInvoices);
3704
- for (const transfer of transfers) {
3705
- if (transfer.ok) {
3706
- satsTransactionSuccess.push({
3707
- invoice: transfer.param.sparkInvoice ?? ("" as SparkAddressFormat),
3708
- transferResponse: transfer.transfer,
3709
- });
3710
- } else {
3711
- satsTransactionErrors.push({
3712
- invoice: transfer.param.sparkInvoice ?? ("" as SparkAddressFormat),
3713
- error: transfer.error,
3714
- });
3715
- }
3716
- }
3717
- }
3718
-
3719
- return {
3720
- satsTransactionSuccess,
3721
- satsTransactionErrors,
3722
- tokenTransactionSuccess,
3723
- tokenTransactionErrors,
3724
- invalidInvoices,
3725
- };
3560
+ // TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
3561
+ throw new NotImplementedError("Invoice functionality is not enabled");
3726
3562
  }
3727
3563
 
3728
3564
  private async groupSparkInvoicesByPaymentType(
@@ -3876,6 +3712,13 @@ export class SparkWallet extends EventEmitter<SparkWalletEvents> {
3876
3712
  return { satsInvoices, tokenInvoices, invalidInvoices };
3877
3713
  }
3878
3714
 
3715
+ public async querySparkInvoices(
3716
+ invoices: string[],
3717
+ ): Promise<QuerySparkInvoicesResponse> {
3718
+ // TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
3719
+ throw new NotImplementedError("Invoice functionality is not enabled");
3720
+ }
3721
+
3879
3722
  /**
3880
3723
  * Gets fee estimate for sending Lightning payments.
3881
3724
  *
Binary file
@@ -14,6 +14,7 @@ import {
14
14
  encodeSparkAddress,
15
15
  encodeSparkAddressWithSignature,
16
16
  getNetworkFromSparkAddress,
17
+ isLegacySparkAddress,
17
18
  SparkAddressData,
18
19
  SparkAddressFormat,
19
20
  } from "../utils/address.js";
@@ -226,12 +227,24 @@ describe("Spark Invoice Encode/Decode", () => {
226
227
 
227
228
  describe("getNetworkFromSparkAddress", () => {
228
229
  test("REGTEST", () => {
230
+ const network = getNetworkFromSparkAddress(
231
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnxxdy83",
232
+ );
233
+ expect(network).toBe("REGTEST");
234
+ });
235
+ test("Legacy REGTEST", () => {
229
236
  const network = getNetworkFromSparkAddress(
230
237
  "sprt1pgssx63fa5g6uyv450rajp5ndwy9laxzpsp9e37su58jddmcdsvhgm5n7y0ud6",
231
238
  );
232
239
  expect(network).toBe("REGTEST");
233
240
  });
234
241
  test("MAINNET", () => {
242
+ const network = getNetworkFromSparkAddress(
243
+ "spark1pgss9qg3vdslzmt2name9v550skuvlu6lj5xt9sly90k7p0gxughlqv023jqmc",
244
+ );
245
+ expect(network).toBe("MAINNET");
246
+ });
247
+ test("Legacy MAINNET", () => {
235
248
  const network = getNetworkFromSparkAddress(
236
249
  "sp1pgssxwh6hznfdc3c0cuqrhgttder539d52a0rqcf34amge69huh664gd2ew787",
237
250
  );
@@ -240,7 +253,67 @@ describe("getNetworkFromSparkAddress", () => {
240
253
  });
241
254
 
242
255
  describe("knownSparkAddress", () => {
256
+ test("spark address encodes known identity public key", () => {
257
+ const address =
258
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnxxdy83";
259
+ const network = getNetworkFromSparkAddress(address);
260
+ expect(network).toBe("REGTEST");
261
+ const decoded = decodeSparkAddress(address, network);
262
+ expect(decoded.identityPublicKey).toBe(
263
+ "0353908bac090ba741de6147a540a665537006911590f93249b2823dbe187d3213",
264
+ );
265
+ });
266
+ test("legacy spark address encodes known identity public key", () => {
267
+ const address =
268
+ "sprt1pgssx63fa5g6uyv450rajp5ndwy9laxzpsp9e37su58jddmcdsvhgm5n7y0ud6";
269
+ const network = getNetworkFromSparkAddress(address);
270
+ expect(network).toBe("REGTEST");
271
+ const decoded = decodeSparkAddress(address, network);
272
+ expect(decoded.identityPublicKey).toBe(
273
+ "036a29ed11ae1195a3c7d906936b885ff4c20c025cc7d0e50f26b7786c19746e93",
274
+ );
275
+ });
243
276
  test("known spark address decodes and encodes to the same address", () => {
277
+ const address =
278
+ "sprt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzffssqgjzqqejta89sa8su5f05g0vunfzzkj5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5zcgs8dcr3sxzrqdetshygps36q8rfqg49d0p0447trnpyxh9f76kt9cwrfx4342jym5emx049chkfsz6j9qc0z8cl7ymmsckx42k76c2qm5f5n5kfvyd26x78eyw0ygs502vgwauy8j";
279
+ const decoded = bech32mDecode(address as SparkAddressFormat);
280
+ const payload = SparkAddress.decode(bech32m.fromWords(decoded.words));
281
+
282
+ const { identityPublicKey, sparkInvoiceFields, signature } = payload;
283
+
284
+ const sparkAddressData: SparkAddressData = {
285
+ identityPublicKey: bytesToHex(identityPublicKey),
286
+ network: "REGTEST",
287
+ sparkInvoiceFields: sparkInvoiceFields,
288
+ };
289
+ const reEncoded = encodeSparkAddressWithSignature(
290
+ sparkAddressData,
291
+ signature,
292
+ );
293
+ expect(reEncoded).toBe(address);
294
+ });
295
+ test("new spark address decodes and encodes to the same legacy address", () => {
296
+ const address =
297
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzffssqgjzqqejta89sa8su5f05g0vunfzzkj5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5zcgs8dcr3sxzrqdetshygps36q8rfqg49d0p0447trnpyxh9f76kt9cwrfx4342jym5emx049chkfsz6j9qc0z8cl7ymmsckx42k76c2qm5f5n5kfvyd26x78eyw0ygs502vg42n8ls";
298
+ const decoded = bech32mDecode(address as SparkAddressFormat);
299
+ const payload = SparkAddress.decode(bech32m.fromWords(decoded.words));
300
+
301
+ const { identityPublicKey, sparkInvoiceFields, signature } = payload;
302
+
303
+ const sparkAddressData: SparkAddressData = {
304
+ identityPublicKey: bytesToHex(identityPublicKey),
305
+ network: "REGTEST",
306
+ sparkInvoiceFields: sparkInvoiceFields,
307
+ };
308
+ const reEncoded = encodeSparkAddressWithSignature(
309
+ sparkAddressData,
310
+ signature,
311
+ );
312
+ expect(reEncoded).toBe(
313
+ "sprt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzffssqgjzqqejta89sa8su5f05g0vunfzzkj5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5zcgs8dcr3sxzrqdetshygps36q8rfqg49d0p0447trnpyxh9f76kt9cwrfx4342jym5emx049chkfsz6j9qc0z8cl7ymmsckx42k76c2qm5f5n5kfvyd26x78eyw0ygs502vgwauy8j",
314
+ );
315
+ });
316
+ test("legacy known spark address decodes and encodes to the same address", () => {
244
317
  const address =
245
318
  "sprt1pgss8stv8nfkamyea7mtc8werley55anfnnpgtnglff0wmxwm52mkyk6zfeqsqgjzqqe3dvr6e48l2alnpagf7ny3vlj5pr5v4ehgv3pqwd7wxx3awkku9p3epk73na6hcf9220h8kue2tmlkqx8tcrfpsf5ywsvpzgd9px9qcgvpzy8ecp35fg2yq4r39r4njq3slgcul7laarh9sndex9uejz7vwrcrz4g7n4egvwt5yspvsdyped46sflczvrzh0jzksgqnvaqlk02cz4vkwjrkwuep9zsrz5vmjp7mqxq7762tfjczy07at2fvzd7cgk2sqsxrmqdxnpy464rmq2nzdqzpuhme";
246
319
  const decoded = bech32mDecode(address as SparkAddressFormat);
@@ -298,4 +371,72 @@ describe("knownSparkAddress", () => {
298
371
  "e5b5d413fc098315df215a0804d9d07ecf56055659d21d9dcc84a280c5466e41f6c0607bda52d32c088ff756a4b04df61165401030f6069a61257551ec0a989a",
299
372
  );
300
373
  });
374
+ test("legacy known spark address decodes to expected fields", () => {
375
+ const address =
376
+ "sparkrt1pgssx5us3wkqjza8g80xz3a9gznx25msq6g3ty8exfym9q3ahcv86vsnzfmssqgjzqqejtaxmwj8ms9rn58574nvlq4j5zr5v4ehgnt9d4hnyggr2wgghtqfpwn5rhnpg7j5pfn92dcqdyg4jrunyjdjsg7muxraxgfn5rqgandgr3sxzrqdmew8qydzvz3qpylysylkgcaw9vpm2jzspls0qtr5kfmlwz244rvuk25w5w2sgc2pyqsraqdyp8tf57a6cn2egttaas9ms3whssenmjqt8wag3lgyvdzjskfeupt8xwwdx4agxdm9f0wefzj28jmdxqeudwcwdj9vfl9sdr65x06r0tasf5fwz2";
377
+
378
+ const decoded = decodeSparkAddress(address, "REGTEST");
379
+
380
+ expect(decoded.network).toBe("REGTEST");
381
+ expect(decoded.identityPublicKey).toBe(
382
+ "0353908bac090ba741de6147a540a665537006911590f93249b2823dbe187d3213",
383
+ );
384
+
385
+ const f = decoded.sparkInvoiceFields!;
386
+ expect(f.version).toBe(1);
387
+ expect(f.id).toBe("01992fa6-dba4-7dc0-a39d-0f4f566cf82b");
388
+
389
+ expect(f.paymentType?.type).toBe("tokens");
390
+ expect(
391
+ f.paymentType && "tokenIdentifier" in f.paymentType
392
+ ? f.paymentType.tokenIdentifier
393
+ : undefined,
394
+ ).toBe("093e4813f6463ae2b03b548500fe0f02c74b277f70955a8d9cb2a8ea39504614");
395
+ expect(
396
+ f.paymentType && "amount" in f.paymentType
397
+ ? f.paymentType.amount
398
+ : undefined,
399
+ ).toBe(1000n);
400
+
401
+ expect(f.memo).toBe("testMemo");
402
+ expect(f.senderPublicKey).toBe(
403
+ "0353908bac090ba741de6147a540a665537006911590f93249b2823dbe187d3213",
404
+ );
405
+
406
+ expect(f.expiryTime?.toISOString()).toBe("2025-09-09T18:09:48.419Z");
407
+
408
+ expect(decoded.signature).toBe(
409
+ "9d69a7bbac4d5942d7dec0bb845d784333dc80b3bba88fd046345285939e0567339cd357a8337654bdd948a4a3cb6d3033c6bb0e6c8ac4fcb068f5433f437afb",
410
+ );
411
+ });
412
+ test("encoding and decoding address", () => {
413
+ const identityPublicKey =
414
+ "02833069ed82f5ef07e9afa374d749f86c88415e4edc0609d7b0fbeb6de5929c4e";
415
+ const newSparkAddressFormat =
416
+ "sparkl1pgss9qesd8kc9a00ql56lgm56aylsmygg90yahqxp8tmp7ltdhje98zwv6zmy8";
417
+
418
+ const legacySparkAddressFormat = encodeSparkAddress({
419
+ identityPublicKey: identityPublicKey,
420
+ network: "LOCAL",
421
+ });
422
+
423
+ expect(isLegacySparkAddress(legacySparkAddressFormat)).toBe(true);
424
+ expect(isLegacySparkAddress(newSparkAddressFormat)).toBe(false);
425
+
426
+ const decodedLegacySparkAddress = decodeSparkAddress(
427
+ legacySparkAddressFormat,
428
+ "LOCAL",
429
+ );
430
+ expect(decodedLegacySparkAddress.identityPublicKey).toBe(identityPublicKey);
431
+ expect(decodedLegacySparkAddress.network).toBe("LOCAL");
432
+
433
+ const decodedNewSparkAddress = decodeSparkAddress(
434
+ newSparkAddressFormat,
435
+ "LOCAL",
436
+ );
437
+ expect(decodedNewSparkAddress.identityPublicKey).toBe(identityPublicKey);
438
+ expect(decodedNewSparkAddress.network).toBe("LOCAL");
439
+
440
+ expect(decodedLegacySparkAddress).toMatchObject(decodedNewSparkAddress);
441
+ });
301
442
  });
@@ -1,5 +1,6 @@
1
1
  import { describe, expect } from "@jest/globals";
2
2
  import { ConfigOptions } from "../../services/wallet-config.js";
3
+ import { isLegacySparkAddress } from "../../utils/address.js";
3
4
  import { NetworkType } from "../../utils/network.js";
4
5
  import { SparkWalletTesting } from "../utils/spark-testing-wallet.js";
5
6
 
@@ -32,6 +33,9 @@ describe("address", () => {
32
33
  );
33
34
  expect(sparkAddress).toEqual(await wallet.getSparkAddress());
34
35
 
36
+ // TODO: Remove this once we upgrade to the new spark address format
37
+ expect(isLegacySparkAddress(sparkAddress)).toBe(true);
38
+
35
39
  const depositAddresses = await Promise.all([
36
40
  wallet.getSingleUseDepositAddress(),
37
41
  ]);
@@ -1,5 +1,5 @@
1
1
  import { afterEach, beforeAll, describe, expect, it } from "@jest/globals";
2
- import { hexToBytes } from "@noble/curves/utils";
2
+ import { bytesToHex, hexToBytes } from "@noble/curves/utils";
3
3
  import { sha256 } from "@noble/hashes/sha2";
4
4
  import { equalBytes } from "@scure/btc-signer/utils";
5
5
  import { uuidv7 } from "uuidv7";
@@ -55,16 +55,21 @@ const fakeInvoiceCreator = async (): Promise<LightningReceiveRequest> => {
55
55
  typename: "LightningReceiveRequest",
56
56
  invoice: {
57
57
  encodedInvoice:
58
- "lnbcrt123450n1pnj6uf4pp5l26hsdxssmr52vd4xmn5xran7puzx34hpr6uevaq7ta0ayzrp8esdqqcqzpgxqyz5vqrzjqtr2vd60g57hu63rdqk87u3clac6jlfhej4kldrrjvfcw3mphcw8sqqqqzp3jlj6zyqqqqqqqqqqqqqq9qsp5w22fd8aqn7sdum7hxdf59ptgk322fkv589ejxjltngvgehlcqcyq9qxpqysgqvykwsxdx64qrj0s5pgcgygmrpj8w25jsjgltwn09yp24l9nvghe3dl3y0ycy70ksrlqmcn42hxn24e0ucuy3g9fjltudvhv4lrhhamgq3stqgp",
58
+ "lnbcrt1u1p5vxn7cpp5l26hsdxssmr52vd4xmn5xran7puzx34hpr6uevaq7ta0ayzrp8essp5dlpmev9m3yxyak47ncnz9a0jyt2su2hulw4t97etewgkvrtjtl9sxq9z0rgqnp4qtlyk6hxw5h4hrdfdkd4nh2rv0mwyyqvdtakr3dv6m4vvsmfshvg6rzjqgp0s738klwqef7yr8yu54vv3wfuk4psv46x5laf6l6v5x4lwwahvqqqqrusum7gtyqqqqqqqqqqqqqq9qcqzpgdqq9qyyssq0evxvv962npnvsw8zxsghcty5j9du55yhkjm8qnlr760qdjvn0gsnr650wclqcvc90mpm6e493sy8ds4hxk2h0828nwlmdc64mtr87cqp9eq8w",
59
59
  bitcoinNetwork: BitcoinNetwork.REGTEST,
60
- paymentHash:
61
- "2d059c3ede82a107aa1452c0bea47759be3c5c6e5342be6a310f6c3a907d9f4c",
60
+ paymentHash: bytesToHex(
61
+ sha256(
62
+ hexToBytes(
63
+ "2d059c3ede82a107aa1452c0bea47759be3c5c6e5342be6a310f6c3a907d9f4c",
64
+ ),
65
+ ),
66
+ ),
62
67
  amount: {
63
- originalValue: 10000,
64
- originalUnit: CurrencyUnit.SATOSHI,
65
- preferredCurrencyUnit: CurrencyUnit.SATOSHI,
66
- preferredCurrencyValueRounded: 10000,
67
- preferredCurrencyValueApprox: 10000,
68
+ originalValue: 100000,
69
+ originalUnit: CurrencyUnit.MILLISATOSHI,
70
+ preferredCurrencyUnit: CurrencyUnit.USD,
71
+ preferredCurrencyValueRounded: 11,
72
+ preferredCurrencyValueApprox: 11.45475372279496,
68
73
  },
69
74
  createdAt: new Date().toISOString(),
70
75
  expiresAt: new Date(Date.now() + 1000 * 60 * 60 * 24).toISOString(),
@@ -14,7 +14,6 @@ describe.each(walletTypes)(
14
14
  const userWallet = await SparkWalletTesting.initialize({
15
15
  options: {
16
16
  network: "LOCAL",
17
- tokenTransactionVersion: "V1",
18
17
  },
19
18
  signer: new Signer(),
20
19
  });