@buildonspark/spark-sdk 0.3.3 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/bare/index.cjs +945 -1401
- package/dist/bare/index.d.cts +128 -10
- package/dist/bare/index.d.ts +128 -10
- package/dist/bare/index.js +940 -1399
- package/dist/{chunk-55XNR6DM.js → chunk-DIXXHATX.js} +1 -1
- package/dist/{chunk-MGCUXELA.js → chunk-IC4IUEOS.js} +931 -125
- package/dist/{chunk-MH7BMOLL.js → chunk-J2P3KTQP.js} +1 -1
- package/dist/{chunk-SXXM52XH.js → chunk-JE73HB26.js} +409 -1656
- package/dist/{chunk-73GJOG5R.js → chunk-XWLR6G5C.js} +1 -1
- package/dist/{client-DrjQwET9.d.ts → client-DBZ43pJT.d.ts} +1 -1
- package/dist/{client-DUFejFfn.d.cts → client-DWml6sjL.d.cts} +1 -1
- package/dist/debug.cjs +949 -1403
- package/dist/debug.d.cts +8 -5
- package/dist/debug.d.ts +8 -5
- package/dist/debug.js +4 -4
- package/dist/graphql/objects/index.d.cts +3 -3
- package/dist/graphql/objects/index.d.ts +3 -3
- package/dist/index.cjs +905 -1362
- package/dist/index.d.cts +6 -6
- package/dist/index.d.ts +6 -6
- package/dist/index.js +9 -5
- package/dist/index.node.cjs +905 -1362
- package/dist/index.node.d.cts +6 -6
- package/dist/index.node.d.ts +6 -6
- package/dist/index.node.js +8 -4
- package/dist/{logging-CGeEoKYd.d.cts → logging-BUpzk4Z6.d.cts} +3 -3
- package/dist/{logging-DpSsvFVM.d.ts → logging-Dt2ooQiP.d.ts} +3 -3
- package/dist/native/index.cjs +905 -1362
- package/dist/native/index.d.cts +129 -25
- package/dist/native/index.d.ts +129 -25
- package/dist/native/index.js +904 -1363
- package/dist/proto/spark.cjs +931 -125
- package/dist/proto/spark.d.cts +1 -1
- package/dist/proto/spark.d.ts +1 -1
- package/dist/proto/spark.js +17 -1
- package/dist/proto/spark_token.d.cts +1 -1
- package/dist/proto/spark_token.d.ts +1 -1
- package/dist/proto/spark_token.js +2 -2
- package/dist/{spark-CLz4-Ln8.d.cts → spark-DasxuVfm.d.cts} +150 -5
- package/dist/{spark-CLz4-Ln8.d.ts → spark-DasxuVfm.d.ts} +150 -5
- package/dist/{spark-wallet-BVBrWYKL.d.cts → spark-wallet-BoMIOPWW.d.cts} +13 -9
- package/dist/{spark-wallet-CFPm6wZs.d.ts → spark-wallet-jlC0XN5f.d.ts} +13 -9
- package/dist/{spark-wallet.node-e1gncoIZ.d.ts → spark-wallet.node-07PksUHH.d.cts} +1 -1
- package/dist/{spark-wallet.node-B_00X-1j.d.cts → spark-wallet.node-CdWkKMSq.d.ts} +1 -1
- package/dist/tests/test-utils.cjs +947 -144
- package/dist/tests/test-utils.d.cts +4 -4
- package/dist/tests/test-utils.d.ts +4 -4
- package/dist/tests/test-utils.js +5 -5
- package/dist/{token-transactions-BkAqlmY6.d.ts → token-transactions-BDzCrQSk.d.cts} +5 -19
- package/dist/{token-transactions-BZGtwFFM.d.cts → token-transactions-DscJaJOE.d.ts} +5 -19
- package/dist/types/index.cjs +923 -125
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.ts +2 -2
- package/dist/types/index.js +2 -2
- package/package.json +1 -1
- package/src/proto/spark.ts +1167 -103
- package/src/services/config.ts +0 -4
- package/src/services/token-transactions.ts +11 -703
- package/src/services/wallet-config.ts +0 -2
- package/src/spark-wallet/proto-descriptors.ts +1 -1
- package/src/spark-wallet/spark-wallet.ts +58 -215
- package/src/spark_descriptors.pb +0 -0
- package/src/tests/address.test.ts +141 -0
- package/src/tests/integration/address.test.ts +4 -0
- package/src/tests/integration/lightning.test.ts +14 -9
- package/src/tests/integration/token-output.test.ts +0 -1
- package/src/tests/integration/transfer.test.ts +108 -2
- package/src/tests/token-hashing.test.ts +0 -247
- package/src/utils/address.ts +58 -35
- package/src/utils/token-hashing.ts +1 -420
- package/src/utils/token-transaction-validation.ts +0 -330
|
@@ -5,11 +5,13 @@ import { wordlist } from "@scure/bip39/wordlists/english";
|
|
|
5
5
|
import { uuidv7 } from "uuidv7";
|
|
6
6
|
import { RPCError } from "../../errors/types.js";
|
|
7
7
|
import {
|
|
8
|
+
decodeSparkAddress,
|
|
9
|
+
isLegacySparkAddress,
|
|
8
10
|
KeyDerivation,
|
|
9
11
|
KeyDerivationType,
|
|
10
12
|
SparkWalletEvent,
|
|
11
13
|
} from "../../index.js";
|
|
12
|
-
import { TransferStatus } from "../../proto/spark.js";
|
|
14
|
+
import { InvoiceStatus, TransferStatus } from "../../proto/spark.js";
|
|
13
15
|
import { WalletConfigService } from "../../services/config.js";
|
|
14
16
|
import { ConnectionManager } from "../../services/connection.js";
|
|
15
17
|
import { SigningService } from "../../services/signing.js";
|
|
@@ -1209,9 +1211,100 @@ describe.each(walletTypes)("transfer v2", ({ name, Signer, createTree }) => {
|
|
|
1209
1211
|
|
|
1210
1212
|
await bobTransferService.claimTransfer(transfer!, claimingNodes);
|
|
1211
1213
|
});
|
|
1214
|
+
|
|
1215
|
+
it(`${name} - test transfer with new spark address`, async () => {
|
|
1216
|
+
const faucet = BitcoinFaucet.getInstance();
|
|
1217
|
+
|
|
1218
|
+
const localOperators = Object.values(getLocalSigningOperators());
|
|
1219
|
+
const { wallet: alice } = await SparkWalletTesting.initialize({
|
|
1220
|
+
options: {
|
|
1221
|
+
network: "LOCAL",
|
|
1222
|
+
coordinatorIdentifier: localOperators[0]!.identifier,
|
|
1223
|
+
},
|
|
1224
|
+
signer: new Signer(),
|
|
1225
|
+
});
|
|
1226
|
+
const depositResp = await alice.getSingleUseDepositAddress();
|
|
1227
|
+
|
|
1228
|
+
if (!depositResp) {
|
|
1229
|
+
throw new RPCError("Deposit address not found", {
|
|
1230
|
+
method: "getDepositAddress",
|
|
1231
|
+
});
|
|
1232
|
+
}
|
|
1233
|
+
|
|
1234
|
+
const signedTx = await faucet.sendToAddress(depositResp, 1_000n);
|
|
1235
|
+
|
|
1236
|
+
await faucet.mineBlocks(1);
|
|
1237
|
+
|
|
1238
|
+
await alice.claimDeposit(signedTx.id);
|
|
1239
|
+
|
|
1240
|
+
const balance = await alice.getBalance();
|
|
1241
|
+
expect(balance.balance).toBe(1_000n);
|
|
1242
|
+
|
|
1243
|
+
const options: ConfigOptions = {
|
|
1244
|
+
network: "LOCAL",
|
|
1245
|
+
coordinatorIdentifier: localOperators[1]!.identifier,
|
|
1246
|
+
};
|
|
1247
|
+
const { wallet: bob } = await SparkWalletTesting.initialize({
|
|
1248
|
+
options,
|
|
1249
|
+
mnemonicOrSeed:
|
|
1250
|
+
"vacant travel foot castle surprise another dress stem slam lemon open anxiety",
|
|
1251
|
+
signer: new Signer(),
|
|
1252
|
+
});
|
|
1253
|
+
|
|
1254
|
+
const bobConfigService = new WalletConfigService(options, bob.getSigner());
|
|
1255
|
+
const bobConnectionManager = new ConnectionManager(bobConfigService);
|
|
1256
|
+
const bobSigningService = new SigningService(bobConfigService);
|
|
1257
|
+
|
|
1258
|
+
const bobTransferService = new TransferService(
|
|
1259
|
+
bobConfigService,
|
|
1260
|
+
bobConnectionManager,
|
|
1261
|
+
bobSigningService,
|
|
1262
|
+
);
|
|
1263
|
+
|
|
1264
|
+
const legacySparkAddress = await bob.getSparkAddress();
|
|
1265
|
+
const newSparkAddress =
|
|
1266
|
+
"sparkl1pgssxlp9dr9ypzqf2havm5weefu6470l062k3ujtw4uu6gjmfgl599rxucvvkr";
|
|
1267
|
+
|
|
1268
|
+
// TODO: Remove this once we upgrade to the new spark address format
|
|
1269
|
+
expect(isLegacySparkAddress(legacySparkAddress)).toBe(true);
|
|
1270
|
+
|
|
1271
|
+
const decodedLegacySparkAddress = decodeSparkAddress(
|
|
1272
|
+
legacySparkAddress,
|
|
1273
|
+
"LOCAL",
|
|
1274
|
+
);
|
|
1275
|
+
const decodedNewSparkAddress = decodeSparkAddress(newSparkAddress, "LOCAL");
|
|
1276
|
+
|
|
1277
|
+
expect(decodedLegacySparkAddress).toMatchObject({
|
|
1278
|
+
...decodedNewSparkAddress,
|
|
1279
|
+
});
|
|
1280
|
+
|
|
1281
|
+
await alice.transfer({
|
|
1282
|
+
amountSats: 1000,
|
|
1283
|
+
receiverSparkAddress: newSparkAddress,
|
|
1284
|
+
});
|
|
1285
|
+
|
|
1286
|
+
const pendingTransfers = await bob.queryPendingTransfers();
|
|
1287
|
+
expect(pendingTransfers.transfers.length).toBe(1);
|
|
1288
|
+
const transfer = pendingTransfers.transfers[0]!;
|
|
1289
|
+
|
|
1290
|
+
const claimingNodes: LeafKeyTweak[] = transfer!.leaves.map((leaf) => ({
|
|
1291
|
+
leaf: leaf.leaf!,
|
|
1292
|
+
keyDerivation: {
|
|
1293
|
+
type: KeyDerivationType.ECIES,
|
|
1294
|
+
path: leaf.secretCipher,
|
|
1295
|
+
},
|
|
1296
|
+
newKeyDerivation: {
|
|
1297
|
+
type: KeyDerivationType.LEAF,
|
|
1298
|
+
path: leaf.leaf!.id,
|
|
1299
|
+
},
|
|
1300
|
+
}));
|
|
1301
|
+
|
|
1302
|
+
await bobTransferService.claimTransfer(transfer!, claimingNodes);
|
|
1303
|
+
});
|
|
1212
1304
|
});
|
|
1213
1305
|
|
|
1214
|
-
|
|
1306
|
+
// TODO: (CNT-493) Re-enable invoice functionality once spark address migration is complete
|
|
1307
|
+
describe.skip.each(walletTypes)(
|
|
1215
1308
|
"fulfill spark invoice",
|
|
1216
1309
|
({ name, Signer, createTree }) => {
|
|
1217
1310
|
jest.setTimeout(25_000);
|
|
@@ -1338,6 +1431,19 @@ describe.each(walletTypes)(
|
|
|
1338
1431
|
beforeClaimBalance.balance + BigInt(transfer.totalValue),
|
|
1339
1432
|
);
|
|
1340
1433
|
}
|
|
1434
|
+
|
|
1435
|
+
const res = await (sdk2 as any).querySparkInvoices([
|
|
1436
|
+
invoice1000,
|
|
1437
|
+
invoice2000,
|
|
1438
|
+
invoiceNilAmount,
|
|
1439
|
+
]);
|
|
1440
|
+
expect(res.invoiceStatuses.length).toBe(3);
|
|
1441
|
+
expect(res.invoiceStatuses[0].invoice).toBe(invoice1000);
|
|
1442
|
+
expect(res.invoiceStatuses[1].invoice).toBe(invoice2000);
|
|
1443
|
+
expect(res.invoiceStatuses[2].invoice).toBe(invoiceNilAmount);
|
|
1444
|
+
expect(res.invoiceStatuses[0].status).toBe(InvoiceStatus.FINALIZED);
|
|
1445
|
+
expect(res.invoiceStatuses[1].status).toBe(InvoiceStatus.FINALIZED);
|
|
1446
|
+
expect(res.invoiceStatuses[2].status).toBe(InvoiceStatus.FINALIZED);
|
|
1341
1447
|
});
|
|
1342
1448
|
|
|
1343
1449
|
it(`${name} - should reject invalid invoice: mismatched sender`, async () => {
|
|
@@ -2,7 +2,6 @@ import { numberToBytesBE } from "@noble/curves/utils";
|
|
|
2
2
|
import { sha256 } from "@noble/hashes/sha2";
|
|
3
3
|
import { Network } from "../proto/spark.js";
|
|
4
4
|
import {
|
|
5
|
-
hashTokenTransactionV0,
|
|
6
5
|
hashTokenTransactionV1,
|
|
7
6
|
hashTokenTransactionV2,
|
|
8
7
|
} from "../utils/token-hashing.js";
|
|
@@ -62,145 +61,6 @@ const PREV_TX_HASH = Uint8Array.from(
|
|
|
62
61
|
);
|
|
63
62
|
|
|
64
63
|
describe("hash token transaction", () => {
|
|
65
|
-
it("should produce the exact same hash for mint v0 (legacy vector)", () => {
|
|
66
|
-
const tokenAmount: bigint = 1000n;
|
|
67
|
-
|
|
68
|
-
const tokenPublicKey = new Uint8Array([
|
|
69
|
-
242, 155, 208, 90, 72, 211, 120, 244, 69, 99, 28, 101, 149, 222, 123, 50,
|
|
70
|
-
252, 63, 99, 54, 137, 226, 7, 224, 163, 122, 93, 248, 42, 159, 173, 45,
|
|
71
|
-
]);
|
|
72
|
-
|
|
73
|
-
const identityPubKey = new Uint8Array([
|
|
74
|
-
25, 155, 208, 90, 72, 211, 120, 244, 69, 99, 28, 101, 149, 222, 123, 50,
|
|
75
|
-
252, 63, 99, 54, 137, 226, 7, 224, 163, 122, 93, 248, 42, 159, 173, 46,
|
|
76
|
-
]);
|
|
77
|
-
|
|
78
|
-
const tokenTransaction = {
|
|
79
|
-
tokenInputs: {
|
|
80
|
-
$case: "mintInput" as const,
|
|
81
|
-
mintInput: {
|
|
82
|
-
issuerPublicKey: tokenPublicKey,
|
|
83
|
-
issuerProvidedTimestamp: 100,
|
|
84
|
-
},
|
|
85
|
-
},
|
|
86
|
-
tokenOutputs: [
|
|
87
|
-
{
|
|
88
|
-
id: "db1a4e48-0fc5-4f6c-8a80-d9d6c561a436",
|
|
89
|
-
ownerPublicKey: identityPubKey,
|
|
90
|
-
withdrawBondSats: 10000,
|
|
91
|
-
withdrawRelativeBlockLocktime: 100,
|
|
92
|
-
tokenPublicKey: tokenPublicKey,
|
|
93
|
-
tokenAmount: numberToBytesBE(tokenAmount, 16),
|
|
94
|
-
revocationCommitment: identityPubKey,
|
|
95
|
-
},
|
|
96
|
-
],
|
|
97
|
-
sparkOperatorIdentityPublicKeys: [],
|
|
98
|
-
network: Network.REGTEST,
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
const hash = hashTokenTransactionV0(tokenTransaction, false);
|
|
102
|
-
|
|
103
|
-
expect(Array.from(hash)).toEqual([
|
|
104
|
-
66, 235, 134, 101, 172, 110, 147, 77, 122, 48, 86, 240, 239, 9, 163, 82,
|
|
105
|
-
120, 234, 246, 206, 245, 242, 186, 180, 154, 41, 207, 179, 194, 31, 211,
|
|
106
|
-
36,
|
|
107
|
-
]);
|
|
108
|
-
});
|
|
109
|
-
|
|
110
|
-
it("should produce the exact same hash for mint v0", () => {
|
|
111
|
-
const tokenTransaction = {
|
|
112
|
-
tokenInputs: {
|
|
113
|
-
$case: "mintInput" as const,
|
|
114
|
-
mintInput: {
|
|
115
|
-
issuerPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
116
|
-
issuerProvidedTimestamp: TEST_ISSUER_TIMESTAMP,
|
|
117
|
-
},
|
|
118
|
-
},
|
|
119
|
-
tokenOutputs: [
|
|
120
|
-
{
|
|
121
|
-
id: TEST_LEAF_ID,
|
|
122
|
-
ownerPublicKey: TEST_IDENTITY_PUB_KEY,
|
|
123
|
-
tokenPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
124
|
-
tokenAmount: numberToBytesBE(TEST_TOKEN_AMOUNT, 16),
|
|
125
|
-
revocationCommitment: TEST_REVOCATION_PUB_KEY,
|
|
126
|
-
withdrawBondSats: TEST_WITHDRAW_BOND_SATS,
|
|
127
|
-
withdrawRelativeBlockLocktime: TEST_WITHDRAW_RELATIVE_BLOCK_LOCKTIME,
|
|
128
|
-
},
|
|
129
|
-
],
|
|
130
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
131
|
-
network: Network.REGTEST,
|
|
132
|
-
};
|
|
133
|
-
|
|
134
|
-
const hash = hashTokenTransactionV0(tokenTransaction, false);
|
|
135
|
-
|
|
136
|
-
expect(Array.from(hash)).toEqual([
|
|
137
|
-
56, 47, 132, 171, 2, 236, 10, 72, 214, 89, 28, 46, 171, 39, 221, 113, 162,
|
|
138
|
-
74, 170, 64, 160, 91, 11, 201, 45, 35, 67, 179, 199, 130, 116, 69,
|
|
139
|
-
]);
|
|
140
|
-
});
|
|
141
|
-
|
|
142
|
-
it("should produce the exact same hash for create v0", () => {
|
|
143
|
-
const tokenTransaction = {
|
|
144
|
-
tokenInputs: {
|
|
145
|
-
$case: "createInput" as const,
|
|
146
|
-
createInput: {
|
|
147
|
-
issuerPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
148
|
-
tokenName: TEST_TOKEN_NAME,
|
|
149
|
-
tokenTicker: TEST_TOKEN_TICKER,
|
|
150
|
-
decimals: TEST_DECIMALS,
|
|
151
|
-
maxSupply: TEST_MAX_SUPPLY,
|
|
152
|
-
isFreezable: false,
|
|
153
|
-
},
|
|
154
|
-
},
|
|
155
|
-
tokenOutputs: [],
|
|
156
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
157
|
-
network: Network.REGTEST,
|
|
158
|
-
};
|
|
159
|
-
|
|
160
|
-
const hash = hashTokenTransactionV0(tokenTransaction, false);
|
|
161
|
-
|
|
162
|
-
expect(Array.from(hash)).toEqual([
|
|
163
|
-
35, 118, 177, 53, 49, 47, 174, 59, 123, 2, 212, 38, 217, 133, 124, 232,
|
|
164
|
-
93, 185, 248, 87, 146, 123, 157, 10, 6, 111, 79, 183, 185, 175, 45, 224,
|
|
165
|
-
]);
|
|
166
|
-
});
|
|
167
|
-
|
|
168
|
-
it("should produce the exact same hash for transfer v0", () => {
|
|
169
|
-
const tokenTransaction = {
|
|
170
|
-
tokenInputs: {
|
|
171
|
-
$case: "transferInput" as const,
|
|
172
|
-
transferInput: {
|
|
173
|
-
outputsToSpend: [
|
|
174
|
-
{
|
|
175
|
-
prevTokenTransactionHash: PREV_TX_HASH,
|
|
176
|
-
prevTokenTransactionVout: 0,
|
|
177
|
-
},
|
|
178
|
-
],
|
|
179
|
-
},
|
|
180
|
-
},
|
|
181
|
-
tokenOutputs: [
|
|
182
|
-
{
|
|
183
|
-
id: TEST_LEAF_ID,
|
|
184
|
-
ownerPublicKey: TEST_IDENTITY_PUB_KEY,
|
|
185
|
-
tokenPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
186
|
-
tokenAmount: numberToBytesBE(1000n, 16),
|
|
187
|
-
revocationCommitment: TEST_REVOCATION_PUB_KEY,
|
|
188
|
-
withdrawBondSats: TEST_BOND_SATS,
|
|
189
|
-
withdrawRelativeBlockLocktime: TEST_LOCKTIME,
|
|
190
|
-
},
|
|
191
|
-
],
|
|
192
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
193
|
-
network: Network.REGTEST,
|
|
194
|
-
};
|
|
195
|
-
|
|
196
|
-
const hash = hashTokenTransactionV0(tokenTransaction, false);
|
|
197
|
-
|
|
198
|
-
expect(Array.from(hash)).toEqual([
|
|
199
|
-
68, 88, 168, 87, 42, 251, 11, 182, 69, 202, 46, 202, 39, 234, 196, 201,
|
|
200
|
-
24, 52, 213, 56, 151, 103, 99, 110, 211, 237, 148, 78, 216, 146, 143, 131,
|
|
201
|
-
]);
|
|
202
|
-
});
|
|
203
|
-
|
|
204
64
|
it("should produce the exact same hash for mint v1", () => {
|
|
205
65
|
const tokenTransaction = {
|
|
206
66
|
version: 1,
|
|
@@ -237,113 +97,6 @@ describe("hash token transaction", () => {
|
|
|
237
97
|
]);
|
|
238
98
|
});
|
|
239
99
|
|
|
240
|
-
it("should produce the exact same hash for create v1", () => {
|
|
241
|
-
const tokenTransaction = {
|
|
242
|
-
version: 1,
|
|
243
|
-
tokenInputs: {
|
|
244
|
-
$case: "createInput" as const,
|
|
245
|
-
createInput: {
|
|
246
|
-
issuerPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
247
|
-
tokenName: TEST_TOKEN_NAME,
|
|
248
|
-
tokenTicker: TEST_TOKEN_TICKER,
|
|
249
|
-
decimals: TEST_DECIMALS,
|
|
250
|
-
maxSupply: TEST_MAX_SUPPLY,
|
|
251
|
-
isFreezable: false,
|
|
252
|
-
},
|
|
253
|
-
},
|
|
254
|
-
tokenOutputs: [],
|
|
255
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
256
|
-
network: Network.REGTEST,
|
|
257
|
-
expiryTime: new Date(TEST_EXPIRY_TIME),
|
|
258
|
-
clientCreatedTimestamp: new Date(TEST_CLIENT_TIMESTAMP),
|
|
259
|
-
invoiceAttachments: [],
|
|
260
|
-
};
|
|
261
|
-
|
|
262
|
-
const hash = hashTokenTransactionV1(tokenTransaction, false);
|
|
263
|
-
|
|
264
|
-
expect(Array.from(hash)).toEqual([
|
|
265
|
-
201, 249, 88, 215, 6, 7, 221, 209, 103, 153, 36, 41, 19, 60, 80, 144, 153,
|
|
266
|
-
159, 185, 61, 20, 117, 87, 196, 102, 151, 76, 4, 191, 121, 221, 182,
|
|
267
|
-
]);
|
|
268
|
-
});
|
|
269
|
-
|
|
270
|
-
it("should produce the exact same hash for transfer v1", () => {
|
|
271
|
-
const tokenTransaction = {
|
|
272
|
-
version: 1,
|
|
273
|
-
tokenInputs: {
|
|
274
|
-
$case: "transferInput" as const,
|
|
275
|
-
transferInput: {
|
|
276
|
-
outputsToSpend: [
|
|
277
|
-
{
|
|
278
|
-
prevTokenTransactionHash: PREV_TX_HASH,
|
|
279
|
-
prevTokenTransactionVout: 0,
|
|
280
|
-
},
|
|
281
|
-
],
|
|
282
|
-
},
|
|
283
|
-
},
|
|
284
|
-
tokenOutputs: [
|
|
285
|
-
{
|
|
286
|
-
id: TEST_LEAF_ID,
|
|
287
|
-
ownerPublicKey: TEST_IDENTITY_PUB_KEY,
|
|
288
|
-
tokenPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
289
|
-
tokenAmount: numberToBytesBE(TEST_TOKEN_AMOUNT, 16),
|
|
290
|
-
revocationCommitment: TEST_REVOCATION_PUB_KEY,
|
|
291
|
-
withdrawBondSats: TEST_BOND_SATS,
|
|
292
|
-
withdrawRelativeBlockLocktime: TEST_LOCKTIME,
|
|
293
|
-
},
|
|
294
|
-
],
|
|
295
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
296
|
-
network: Network.REGTEST,
|
|
297
|
-
expiryTime: new Date(TEST_EXPIRY_TIME),
|
|
298
|
-
clientCreatedTimestamp: new Date(TEST_CLIENT_TIMESTAMP),
|
|
299
|
-
invoiceAttachments: [],
|
|
300
|
-
};
|
|
301
|
-
|
|
302
|
-
const hash = hashTokenTransactionV1(tokenTransaction, false);
|
|
303
|
-
|
|
304
|
-
expect(Array.from(hash)).toEqual([
|
|
305
|
-
86, 89, 220, 198, 197, 223, 236, 142, 73, 125, 112, 186, 29, 1, 26, 203,
|
|
306
|
-
126, 154, 255, 176, 237, 210, 171, 98, 211, 130, 138, 113, 128, 129, 227,
|
|
307
|
-
35,
|
|
308
|
-
]);
|
|
309
|
-
});
|
|
310
|
-
|
|
311
|
-
it("should produce the exact same hash for mint v2", () => {
|
|
312
|
-
const tokenTransaction = {
|
|
313
|
-
version: 2,
|
|
314
|
-
tokenInputs: {
|
|
315
|
-
$case: "mintInput" as const,
|
|
316
|
-
mintInput: {
|
|
317
|
-
issuerPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
318
|
-
tokenIdentifier: TEST_TOKEN_IDENTIFIER,
|
|
319
|
-
},
|
|
320
|
-
},
|
|
321
|
-
tokenOutputs: [
|
|
322
|
-
{
|
|
323
|
-
id: TEST_LEAF_ID,
|
|
324
|
-
ownerPublicKey: TEST_IDENTITY_PUB_KEY,
|
|
325
|
-
withdrawBondSats: TEST_WITHDRAW_BOND_SATS,
|
|
326
|
-
withdrawRelativeBlockLocktime: TEST_WITHDRAW_RELATIVE_BLOCK_LOCKTIME,
|
|
327
|
-
tokenPublicKey: TEST_TOKEN_PUBLIC_KEY,
|
|
328
|
-
tokenAmount: numberToBytesBE(TEST_TOKEN_AMOUNT, 16),
|
|
329
|
-
revocationCommitment: TEST_REVOCATION_PUB_KEY,
|
|
330
|
-
},
|
|
331
|
-
],
|
|
332
|
-
sparkOperatorIdentityPublicKeys: [TEST_OPERATOR_PUB_KEY],
|
|
333
|
-
network: Network.REGTEST,
|
|
334
|
-
expiryTime: new Date(TEST_EXPIRY_TIME),
|
|
335
|
-
clientCreatedTimestamp: new Date(TEST_CLIENT_TIMESTAMP),
|
|
336
|
-
invoiceAttachments: [],
|
|
337
|
-
};
|
|
338
|
-
|
|
339
|
-
const hash = hashTokenTransactionV1(tokenTransaction, false);
|
|
340
|
-
|
|
341
|
-
expect(Array.from(hash)).toEqual([
|
|
342
|
-
2, 4, 36, 141, 246, 170, 160, 204, 181, 102, 122, 220, 56, 182, 138, 153,
|
|
343
|
-
199, 216, 80, 3, 35, 2, 146, 139, 209, 31, 195, 129, 121, 120, 236, 126,
|
|
344
|
-
]);
|
|
345
|
-
});
|
|
346
|
-
|
|
347
100
|
it("should produce the exact same hash for create v2", () => {
|
|
348
101
|
const tokenTransaction = {
|
|
349
102
|
version: 2,
|
package/src/utils/address.ts
CHANGED
|
@@ -18,6 +18,14 @@ import { NetworkType } from "./network.js";
|
|
|
18
18
|
const BECH32M_LIMIT = 1024;
|
|
19
19
|
|
|
20
20
|
const AddressNetwork: Record<NetworkType, string> = {
|
|
21
|
+
MAINNET: "spark",
|
|
22
|
+
TESTNET: "sparkt",
|
|
23
|
+
REGTEST: "sparkrt",
|
|
24
|
+
SIGNET: "sparks",
|
|
25
|
+
LOCAL: "sparkl",
|
|
26
|
+
} as const;
|
|
27
|
+
|
|
28
|
+
const LegacyAddressNetwork: Record<NetworkType, string> = {
|
|
21
29
|
MAINNET: "sp",
|
|
22
30
|
TESTNET: "spt",
|
|
23
31
|
REGTEST: "sprt",
|
|
@@ -25,8 +33,11 @@ const AddressNetwork: Record<NetworkType, string> = {
|
|
|
25
33
|
LOCAL: "spl",
|
|
26
34
|
} as const;
|
|
27
35
|
|
|
36
|
+
type Bech32String = `${string}1${string}`;
|
|
28
37
|
export type SparkAddressFormat =
|
|
29
38
|
`${(typeof AddressNetwork)[keyof typeof AddressNetwork]}1${string}`;
|
|
39
|
+
export type LegacySparkAddressFormat =
|
|
40
|
+
`${(typeof LegacyAddressNetwork)[keyof typeof LegacyAddressNetwork]}1${string}`;
|
|
30
41
|
|
|
31
42
|
export interface SparkAddressData {
|
|
32
43
|
identityPublicKey: string;
|
|
@@ -52,14 +63,14 @@ export interface DecodedSparkAddressData {
|
|
|
52
63
|
|
|
53
64
|
export function encodeSparkAddress(
|
|
54
65
|
payload: SparkAddressData,
|
|
55
|
-
):
|
|
66
|
+
): LegacySparkAddressFormat {
|
|
56
67
|
return encodeSparkAddressWithSignature(payload);
|
|
57
68
|
}
|
|
58
69
|
|
|
59
70
|
export function encodeSparkAddressWithSignature(
|
|
60
71
|
payload: SparkAddressData,
|
|
61
72
|
signature?: Uint8Array,
|
|
62
|
-
):
|
|
73
|
+
): LegacySparkAddressFormat {
|
|
63
74
|
try {
|
|
64
75
|
isValidPublicKey(payload.identityPublicKey);
|
|
65
76
|
const identityPublicKey = hexToBytes(payload.identityPublicKey);
|
|
@@ -88,9 +99,9 @@ export function encodeSparkAddressWithSignature(
|
|
|
88
99
|
const words = bech32m.toWords(serializedPayload);
|
|
89
100
|
|
|
90
101
|
return bech32mEncode(
|
|
91
|
-
|
|
102
|
+
LegacyAddressNetwork[payload.network],
|
|
92
103
|
words,
|
|
93
|
-
) as
|
|
104
|
+
) as LegacySparkAddressFormat;
|
|
94
105
|
} catch (error) {
|
|
95
106
|
throw new ValidationError(
|
|
96
107
|
"Failed to encode Spark address",
|
|
@@ -108,16 +119,16 @@ export function decodeSparkAddress(
|
|
|
108
119
|
network: NetworkType,
|
|
109
120
|
): DecodedSparkAddressData {
|
|
110
121
|
try {
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
if (decoded.prefix !== AddressNetwork[network]) {
|
|
122
|
+
if (network !== getNetworkFromSparkAddress(address)) {
|
|
114
123
|
throw new ValidationError("Invalid Spark address prefix", {
|
|
115
124
|
field: "address",
|
|
116
125
|
value: address,
|
|
117
|
-
expected: `prefix='${AddressNetwork[network]}'`,
|
|
126
|
+
expected: `prefix='${AddressNetwork[network]}' or '${LegacyAddressNetwork[network]}'`,
|
|
118
127
|
});
|
|
119
128
|
}
|
|
120
129
|
|
|
130
|
+
const decoded = bech32mDecode(address as Bech32String);
|
|
131
|
+
|
|
121
132
|
const payload = SparkAddress.decode(bech32m.fromWords(decoded.words));
|
|
122
133
|
|
|
123
134
|
const { identityPublicKey, sparkInvoiceFields, signature } = payload;
|
|
@@ -179,19 +190,40 @@ export function decodeSparkAddress(
|
|
|
179
190
|
}
|
|
180
191
|
}
|
|
181
192
|
|
|
182
|
-
|
|
193
|
+
const PrefixToNetwork: Record<string, NetworkType> = Object.fromEntries(
|
|
194
|
+
Object.entries(AddressNetwork).map(([k, v]) => [v, k as NetworkType]),
|
|
195
|
+
) as Record<string, NetworkType>;
|
|
196
|
+
const LegacyPrefixToNetwork: Record<string, NetworkType> = Object.fromEntries(
|
|
197
|
+
Object.entries(LegacyAddressNetwork).map(([k, v]) => [v, k as NetworkType]),
|
|
198
|
+
) as Record<string, NetworkType>;
|
|
199
|
+
|
|
200
|
+
export function getNetworkFromSparkAddress(address: string): NetworkType {
|
|
201
|
+
const { prefix } = bech32mDecode(address);
|
|
202
|
+
const network = PrefixToNetwork[prefix] ?? LegacyPrefixToNetwork[prefix];
|
|
203
|
+
if (!network) {
|
|
204
|
+
throw new ValidationError("Invalid Spark address prefix", {
|
|
205
|
+
field: "network",
|
|
206
|
+
value: address,
|
|
207
|
+
expected:
|
|
208
|
+
"prefix='spark1', 'sparkt1', 'sparkrt1', 'sparks1', 'sparkl1' or legacy ('sp1', 'spt1', 'sprt1', 'sps1', 'spl1')",
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
return network;
|
|
212
|
+
}
|
|
213
|
+
export function isLegacySparkAddress(
|
|
214
|
+
address: string,
|
|
215
|
+
): address is LegacySparkAddressFormat {
|
|
183
216
|
try {
|
|
184
|
-
const
|
|
185
|
-
|
|
186
|
-
|
|
217
|
+
const { prefix } = bech32mDecode(address);
|
|
218
|
+
return prefix in LegacyPrefixToNetwork;
|
|
219
|
+
} catch (error) {
|
|
220
|
+
return false;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
187
223
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
value: address,
|
|
192
|
-
expected: Object.values(AddressNetwork),
|
|
193
|
-
});
|
|
194
|
-
}
|
|
224
|
+
export function isValidSparkAddress(address: string) {
|
|
225
|
+
try {
|
|
226
|
+
const network = getNetworkFromSparkAddress(address);
|
|
195
227
|
|
|
196
228
|
decodeSparkAddress(address, network);
|
|
197
229
|
return true;
|
|
@@ -397,27 +429,18 @@ export function validateSparkInvoiceSignature(invoice: SparkAddressFormat) {
|
|
|
397
429
|
}
|
|
398
430
|
}
|
|
399
431
|
|
|
400
|
-
export function getNetworkFromSparkAddress(address: SparkAddressFormat) {
|
|
401
|
-
const { prefix } = bech32mDecode(address as SparkAddressFormat);
|
|
402
|
-
const network = Object.entries(AddressNetwork).find(
|
|
403
|
-
([, p]) => p === prefix,
|
|
404
|
-
)?.[0] as NetworkType | undefined;
|
|
405
|
-
if (!network) {
|
|
406
|
-
throw new ValidationError("Invalid Spark address network", {
|
|
407
|
-
field: "network",
|
|
408
|
-
value: address,
|
|
409
|
-
expected: Object.values(AddressNetwork),
|
|
410
|
-
});
|
|
411
|
-
}
|
|
412
|
-
return network;
|
|
413
|
-
}
|
|
414
|
-
|
|
415
432
|
export function toProtoTimestamp(date: Date) {
|
|
416
433
|
const ms = date.getTime();
|
|
417
434
|
return { seconds: Math.floor(ms / 1000), nanos: (ms % 1000) * 1_000_000 };
|
|
418
435
|
}
|
|
419
436
|
|
|
420
|
-
export function
|
|
437
|
+
export function assertBech32(s: string): asserts s is Bech32String {
|
|
438
|
+
const i = s.lastIndexOf("1");
|
|
439
|
+
if (i <= 0 || i >= s.length - 1) throw new Error("invalid bech32 string");
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
export function bech32mDecode(address: string) {
|
|
443
|
+
assertBech32(address);
|
|
421
444
|
return bech32m.decode(address, BECH32M_LIMIT);
|
|
422
445
|
}
|
|
423
446
|
|