@buildonspark/issuer-sdk 0.0.80 → 0.0.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @buildonspark/issuer-sdk
2
2
 
3
+ ## 0.0.82
4
+
5
+ ### Patch Changes
6
+
7
+ - -leaf key improvements
8
+ -token improvements
9
+ - Updated dependencies
10
+ - @buildonspark/spark-sdk@0.2.3
11
+
12
+ ## 0.0.81
13
+
14
+ ### Patch Changes
15
+
16
+ - Updated dependencies
17
+ - @buildonspark/spark-sdk@0.2.2
18
+
3
19
  ## 0.0.80
4
20
 
5
21
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -44,6 +44,7 @@ var import_spark_sdk5 = require("@buildonspark/spark-sdk");
44
44
  var import_core = require("@lightsparkdev/core");
45
45
  var import_spark_sdk6 = require("@buildonspark/spark-sdk");
46
46
  var import_utils4 = require("@noble/curves/abstract/utils");
47
+ var import_spark_sdk7 = require("@buildonspark/spark-sdk");
47
48
 
48
49
  // src/services/freeze.ts
49
50
  var import_spark_sdk2 = require("@buildonspark/spark-sdk");
@@ -60,16 +61,26 @@ function hashFreezeTokensPayload(payload) {
60
61
  });
61
62
  }
62
63
  let allHashes = [];
64
+ const versionHashObj = import_utils.sha256.create();
65
+ const versionBytes = new Uint8Array(4);
66
+ new DataView(versionBytes.buffer).setUint32(
67
+ 0,
68
+ payload.version,
69
+ false
70
+ // false for big-endian
71
+ );
72
+ versionHashObj.update(versionBytes);
73
+ allHashes.push(versionHashObj.digest());
63
74
  const ownerPubKeyHash = import_utils.sha256.create();
64
75
  if (payload.ownerPublicKey) {
65
76
  ownerPubKeyHash.update(payload.ownerPublicKey);
66
77
  }
67
78
  allHashes.push(ownerPubKeyHash.digest());
68
- const tokenPubKeyHash = import_utils.sha256.create();
69
- if (payload.tokenPublicKey) {
70
- tokenPubKeyHash.update(payload.tokenPublicKey);
79
+ const tokenIdentifierHash = import_utils.sha256.create();
80
+ if (payload.tokenIdentifier) {
81
+ tokenIdentifierHash.update(payload.tokenIdentifier);
71
82
  }
72
- allHashes.push(tokenPubKeyHash.digest());
83
+ allHashes.push(tokenIdentifierHash.digest());
73
84
  const shouldUnfreezeHash = import_utils.sha256.create();
74
85
  shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
75
86
  allHashes.push(shouldUnfreezeHash.digest());
@@ -106,21 +117,28 @@ var TokenFreezeService = class {
106
117
  this.config = config;
107
118
  this.connectionManager = connectionManager;
108
119
  }
109
- async freezeTokens(ownerPublicKey, tokenPublicKey) {
110
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, false);
120
+ async freezeTokens({
121
+ ownerPublicKey,
122
+ tokenIdentifier
123
+ }) {
124
+ return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
111
125
  }
112
- async unfreezeTokens(ownerPublicKey, tokenPublicKey) {
113
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, true);
126
+ async unfreezeTokens({
127
+ ownerPublicKey,
128
+ tokenIdentifier
129
+ }) {
130
+ return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
114
131
  }
115
- async freezeOperation(ownerPublicKey, tokenPublicKey, shouldUnfreeze) {
132
+ async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
116
133
  const signingOperators = this.config.getSigningOperators();
117
134
  const issuerProvidedTimestamp = Date.now();
118
135
  const freezeResponses = await Promise.allSettled(
119
136
  Object.entries(signingOperators).map(async ([identifier, operator]) => {
120
- const internalSparkClient = await this.connectionManager.createSparkClient(operator.address);
137
+ const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
121
138
  const freezeTokensPayload = {
139
+ version: 1,
122
140
  ownerPublicKey,
123
- tokenPublicKey,
141
+ tokenIdentifier,
124
142
  shouldUnfreeze,
125
143
  issuerProvidedTimestamp,
126
144
  operatorIdentityPublicKey: (0, import_utils2.hexToBytes)(operator.identityPublicKey)
@@ -128,7 +146,7 @@ var TokenFreezeService = class {
128
146
  const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
129
147
  const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
130
148
  try {
131
- const response = await internalSparkClient.freeze_tokens({
149
+ const response = await sparkTokenClient.freeze_tokens({
132
150
  freezeTokensPayload,
133
151
  issuerSignature
134
152
  });
@@ -138,7 +156,7 @@ var TokenFreezeService = class {
138
156
  };
139
157
  } catch (error) {
140
158
  throw new import_spark_sdk2.NetworkError(
141
- `Failed to send a freeze/unfreeze operation for token: ${tokenPublicKey.toString()} at operator: ${operator.address}`,
159
+ `Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
142
160
  {
143
161
  operation: "freeze_tokens",
144
162
  errorCount: 1,
@@ -228,7 +246,7 @@ var IssuerTokenTransactionService = class extends import_spark_sdk3.TokenTransac
228
246
  };
229
247
 
230
248
  // src/issuer-wallet/issuer-spark-wallet.ts
231
- var import_spark_sdk7 = require("@buildonspark/spark-sdk");
249
+ var import_spark_sdk8 = require("@buildonspark/spark-sdk");
232
250
 
233
251
  // src/utils/create-validation.ts
234
252
  var import_spark_sdk4 = require("@buildonspark/spark-sdk");
@@ -300,7 +318,7 @@ function validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply) {
300
318
  }
301
319
 
302
320
  // src/issuer-wallet/issuer-spark-wallet.ts
303
- var import_spark_sdk8 = require("@buildonspark/spark-sdk");
321
+ var import_spark_sdk9 = require("@buildonspark/spark-sdk");
304
322
  var BURN_ADDRESS = "02".repeat(33);
305
323
  var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.SparkWallet {
306
324
  issuerTokenTransactionService;
@@ -440,7 +458,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.Spark
440
458
  );
441
459
  }
442
460
  const metadata = response.tokenMetadata[0];
443
- const tokenIdentifier = (0, import_spark_sdk8.encodeBech32mTokenIdentifier)({
461
+ const tokenIdentifier = (0, import_spark_sdk9.encodeBech32mTokenIdentifier)({
444
462
  tokenIdentifier: metadata.tokenIdentifier,
445
463
  network: this.config.getNetworkType()
446
464
  });
@@ -468,7 +486,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.Spark
468
486
  */
469
487
  async getIssuerTokenIdentifier() {
470
488
  const tokenMetadata = await this.getIssuerTokenMetadata();
471
- return (0, import_spark_sdk8.encodeBech32mTokenIdentifier)({
489
+ return (0, import_spark_sdk9.encodeBech32mTokenIdentifier)({
472
490
  tokenIdentifier: tokenMetadata.rawTokenIdentifier,
473
491
  network: this.config.getNetworkType()
474
492
  });
@@ -565,15 +583,26 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.Spark
565
583
  */
566
584
  async freezeTokens(sparkAddress) {
567
585
  await this.syncTokenOutputs();
568
- const tokenPublicKey = await super.getIdentityPublicKey();
569
586
  const decodedOwnerPubkey = (0, import_spark_sdk6.decodeSparkAddress)(
570
587
  sparkAddress,
571
588
  this.config.getNetworkType()
572
589
  );
573
- const response = await this.tokenFreezeService.freezeTokens(
574
- (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
575
- (0, import_utils4.hexToBytes)(tokenPublicKey)
576
- );
590
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
591
+ if (issuerTokenIdentifier === null) {
592
+ throw new import_spark_sdk5.ValidationError("Issuer token identifier not found", {
593
+ field: "issuerTokenIdentifier",
594
+ value: issuerTokenIdentifier,
595
+ expected: "non-null token identifier"
596
+ });
597
+ }
598
+ const rawTokenIdentifier = (0, import_spark_sdk7.decodeBech32mTokenIdentifier)(
599
+ issuerTokenIdentifier,
600
+ this.config.getNetworkType()
601
+ ).tokenIdentifier;
602
+ const response = await this.tokenFreezeService.freezeTokens({
603
+ ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
604
+ tokenIdentifier: rawTokenIdentifier
605
+ });
577
606
  const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
578
607
  return {
579
608
  impactedOutputIds: response.impactedOutputIds,
@@ -587,15 +616,26 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.Spark
587
616
  */
588
617
  async unfreezeTokens(sparkAddress) {
589
618
  await this.syncTokenOutputs();
590
- const tokenPublicKey = await super.getIdentityPublicKey();
591
619
  const decodedOwnerPubkey = (0, import_spark_sdk6.decodeSparkAddress)(
592
620
  sparkAddress,
593
621
  this.config.getNetworkType()
594
622
  );
595
- const response = await this.tokenFreezeService.unfreezeTokens(
596
- (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
597
- (0, import_utils4.hexToBytes)(tokenPublicKey)
598
- );
623
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
624
+ if (issuerTokenIdentifier === null) {
625
+ throw new import_spark_sdk5.ValidationError("Issuer token identifier not found", {
626
+ field: "issuerTokenIdentifier",
627
+ value: issuerTokenIdentifier,
628
+ expected: "non-null token identifier"
629
+ });
630
+ }
631
+ const rawTokenIdentifier = (0, import_spark_sdk7.decodeBech32mTokenIdentifier)(
632
+ issuerTokenIdentifier,
633
+ this.config.getNetworkType()
634
+ ).tokenIdentifier;
635
+ const response = await this.tokenFreezeService.unfreezeTokens({
636
+ ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
637
+ tokenIdentifier: rawTokenIdentifier
638
+ });
599
639
  const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
600
640
  return {
601
641
  impactedOutputIds: response.impactedOutputIds,
@@ -607,7 +647,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk5.Spark
607
647
  * @throws {NotImplementedError} This feature is not yet supported
608
648
  */
609
649
  async getIssuerTokenDistribution() {
610
- throw new import_spark_sdk7.NotImplementedError("Token distribution is not yet supported");
650
+ throw new import_spark_sdk8.NotImplementedError("Token distribution is not yet supported");
611
651
  }
612
652
  /**
613
653
  * Announces a new token on the L1 (Bitcoin) network.
package/dist/index.js CHANGED
@@ -19,6 +19,7 @@ import {
19
19
  bytesToNumberBE,
20
20
  hexToBytes as hexToBytes2
21
21
  } from "@noble/curves/abstract/utils";
22
+ import { decodeBech32mTokenIdentifier } from "@buildonspark/spark-sdk";
22
23
 
23
24
  // src/services/freeze.ts
24
25
  import {
@@ -38,16 +39,26 @@ function hashFreezeTokensPayload(payload) {
38
39
  });
39
40
  }
40
41
  let allHashes = [];
42
+ const versionHashObj = sha256.create();
43
+ const versionBytes = new Uint8Array(4);
44
+ new DataView(versionBytes.buffer).setUint32(
45
+ 0,
46
+ payload.version,
47
+ false
48
+ // false for big-endian
49
+ );
50
+ versionHashObj.update(versionBytes);
51
+ allHashes.push(versionHashObj.digest());
41
52
  const ownerPubKeyHash = sha256.create();
42
53
  if (payload.ownerPublicKey) {
43
54
  ownerPubKeyHash.update(payload.ownerPublicKey);
44
55
  }
45
56
  allHashes.push(ownerPubKeyHash.digest());
46
- const tokenPubKeyHash = sha256.create();
47
- if (payload.tokenPublicKey) {
48
- tokenPubKeyHash.update(payload.tokenPublicKey);
57
+ const tokenIdentifierHash = sha256.create();
58
+ if (payload.tokenIdentifier) {
59
+ tokenIdentifierHash.update(payload.tokenIdentifier);
49
60
  }
50
- allHashes.push(tokenPubKeyHash.digest());
61
+ allHashes.push(tokenIdentifierHash.digest());
51
62
  const shouldUnfreezeHash = sha256.create();
52
63
  shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
53
64
  allHashes.push(shouldUnfreezeHash.digest());
@@ -84,21 +95,28 @@ var TokenFreezeService = class {
84
95
  this.config = config;
85
96
  this.connectionManager = connectionManager;
86
97
  }
87
- async freezeTokens(ownerPublicKey, tokenPublicKey) {
88
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, false);
98
+ async freezeTokens({
99
+ ownerPublicKey,
100
+ tokenIdentifier
101
+ }) {
102
+ return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
89
103
  }
90
- async unfreezeTokens(ownerPublicKey, tokenPublicKey) {
91
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, true);
104
+ async unfreezeTokens({
105
+ ownerPublicKey,
106
+ tokenIdentifier
107
+ }) {
108
+ return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
92
109
  }
93
- async freezeOperation(ownerPublicKey, tokenPublicKey, shouldUnfreeze) {
110
+ async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
94
111
  const signingOperators = this.config.getSigningOperators();
95
112
  const issuerProvidedTimestamp = Date.now();
96
113
  const freezeResponses = await Promise.allSettled(
97
114
  Object.entries(signingOperators).map(async ([identifier, operator]) => {
98
- const internalSparkClient = await this.connectionManager.createSparkClient(operator.address);
115
+ const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
99
116
  const freezeTokensPayload = {
117
+ version: 1,
100
118
  ownerPublicKey,
101
- tokenPublicKey,
119
+ tokenIdentifier,
102
120
  shouldUnfreeze,
103
121
  issuerProvidedTimestamp,
104
122
  operatorIdentityPublicKey: hexToBytes(operator.identityPublicKey)
@@ -106,7 +124,7 @@ var TokenFreezeService = class {
106
124
  const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
107
125
  const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
108
126
  try {
109
- const response = await internalSparkClient.freeze_tokens({
127
+ const response = await sparkTokenClient.freeze_tokens({
110
128
  freezeTokensPayload,
111
129
  issuerSignature
112
130
  });
@@ -116,7 +134,7 @@ var TokenFreezeService = class {
116
134
  };
117
135
  } catch (error) {
118
136
  throw new NetworkError(
119
- `Failed to send a freeze/unfreeze operation for token: ${tokenPublicKey.toString()} at operator: ${operator.address}`,
137
+ `Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
120
138
  {
121
139
  operation: "freeze_tokens",
122
140
  errorCount: 1,
@@ -545,15 +563,26 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
545
563
  */
546
564
  async freezeTokens(sparkAddress) {
547
565
  await this.syncTokenOutputs();
548
- const tokenPublicKey = await super.getIdentityPublicKey();
549
566
  const decodedOwnerPubkey = decodeSparkAddress(
550
567
  sparkAddress,
551
568
  this.config.getNetworkType()
552
569
  );
553
- const response = await this.tokenFreezeService.freezeTokens(
554
- hexToBytes2(decodedOwnerPubkey.identityPublicKey),
555
- hexToBytes2(tokenPublicKey)
556
- );
570
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
571
+ if (issuerTokenIdentifier === null) {
572
+ throw new ValidationError3("Issuer token identifier not found", {
573
+ field: "issuerTokenIdentifier",
574
+ value: issuerTokenIdentifier,
575
+ expected: "non-null token identifier"
576
+ });
577
+ }
578
+ const rawTokenIdentifier = decodeBech32mTokenIdentifier(
579
+ issuerTokenIdentifier,
580
+ this.config.getNetworkType()
581
+ ).tokenIdentifier;
582
+ const response = await this.tokenFreezeService.freezeTokens({
583
+ ownerPublicKey: hexToBytes2(decodedOwnerPubkey.identityPublicKey),
584
+ tokenIdentifier: rawTokenIdentifier
585
+ });
557
586
  const tokenAmount = bytesToNumberBE(response.impactedTokenAmount);
558
587
  return {
559
588
  impactedOutputIds: response.impactedOutputIds,
@@ -567,15 +596,26 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
567
596
  */
568
597
  async unfreezeTokens(sparkAddress) {
569
598
  await this.syncTokenOutputs();
570
- const tokenPublicKey = await super.getIdentityPublicKey();
571
599
  const decodedOwnerPubkey = decodeSparkAddress(
572
600
  sparkAddress,
573
601
  this.config.getNetworkType()
574
602
  );
575
- const response = await this.tokenFreezeService.unfreezeTokens(
576
- hexToBytes2(decodedOwnerPubkey.identityPublicKey),
577
- hexToBytes2(tokenPublicKey)
578
- );
603
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
604
+ if (issuerTokenIdentifier === null) {
605
+ throw new ValidationError3("Issuer token identifier not found", {
606
+ field: "issuerTokenIdentifier",
607
+ value: issuerTokenIdentifier,
608
+ expected: "non-null token identifier"
609
+ });
610
+ }
611
+ const rawTokenIdentifier = decodeBech32mTokenIdentifier(
612
+ issuerTokenIdentifier,
613
+ this.config.getNetworkType()
614
+ ).tokenIdentifier;
615
+ const response = await this.tokenFreezeService.unfreezeTokens({
616
+ ownerPublicKey: hexToBytes2(decodedOwnerPubkey.identityPublicKey),
617
+ tokenIdentifier: rawTokenIdentifier
618
+ });
579
619
  const tokenAmount = bytesToNumberBE(response.impactedTokenAmount);
580
620
  return {
581
621
  impactedOutputIds: response.impactedOutputIds,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@buildonspark/issuer-sdk",
3
- "version": "0.0.80",
3
+ "version": "0.0.82",
4
4
  "description": "Spark Issuer SDK for token issuance",
5
5
  "license": "Apache-2.0",
6
6
  "module": "./dist/index.js",
@@ -55,7 +55,7 @@
55
55
  },
56
56
  "dependencies": {
57
57
  "@buildonspark/lrc20-sdk": "0.0.60",
58
- "@buildonspark/spark-sdk": "0.2.1",
58
+ "@buildonspark/spark-sdk": "0.2.3",
59
59
  "@lightsparkdev/core": "^1.4.2",
60
60
  "@noble/curves": "^1.8.0",
61
61
  "@scure/btc-signer": "^1.5.0",
@@ -21,6 +21,7 @@ import {
21
21
  bytesToNumberBE,
22
22
  hexToBytes,
23
23
  } from "@noble/curves/abstract/utils";
24
+ import { decodeBech32mTokenIdentifier } from "@buildonspark/spark-sdk";
24
25
  import { TokenFreezeService } from "../services/freeze.js";
25
26
  import { IssuerTokenTransactionService } from "../services/token-transactions.js";
26
27
  import { TokenDistribution, IssuerTokenMetadata } from "./types.js";
@@ -347,15 +348,29 @@ export class IssuerSparkWallet extends SparkWallet {
347
348
  sparkAddress: string,
348
349
  ): Promise<{ impactedOutputIds: string[]; impactedTokenAmount: bigint }> {
349
350
  await this.syncTokenOutputs();
350
- const tokenPublicKey = await super.getIdentityPublicKey();
351
351
  const decodedOwnerPubkey = decodeSparkAddress(
352
352
  sparkAddress,
353
353
  this.config.getNetworkType(),
354
354
  );
355
- const response = await this.tokenFreezeService!.freezeTokens(
356
- hexToBytes(decodedOwnerPubkey.identityPublicKey),
357
- hexToBytes(tokenPublicKey),
358
- );
355
+
356
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
357
+ if (issuerTokenIdentifier === null) {
358
+ throw new ValidationError("Issuer token identifier not found", {
359
+ field: "issuerTokenIdentifier",
360
+ value: issuerTokenIdentifier,
361
+ expected: "non-null token identifier",
362
+ });
363
+ }
364
+
365
+ const rawTokenIdentifier = decodeBech32mTokenIdentifier(
366
+ issuerTokenIdentifier,
367
+ this.config.getNetworkType(),
368
+ ).tokenIdentifier;
369
+
370
+ const response = await this.tokenFreezeService!.freezeTokens({
371
+ ownerPublicKey: hexToBytes(decodedOwnerPubkey.identityPublicKey),
372
+ tokenIdentifier: rawTokenIdentifier,
373
+ });
359
374
 
360
375
  // Convert the Uint8Array to a bigint
361
376
  const tokenAmount = bytesToNumberBE(response.impactedTokenAmount);
@@ -375,15 +390,29 @@ export class IssuerSparkWallet extends SparkWallet {
375
390
  sparkAddress: string,
376
391
  ): Promise<{ impactedOutputIds: string[]; impactedTokenAmount: bigint }> {
377
392
  await this.syncTokenOutputs();
378
- const tokenPublicKey = await super.getIdentityPublicKey();
379
393
  const decodedOwnerPubkey = decodeSparkAddress(
380
394
  sparkAddress,
381
395
  this.config.getNetworkType(),
382
396
  );
383
- const response = await this.tokenFreezeService!.unfreezeTokens(
384
- hexToBytes(decodedOwnerPubkey.identityPublicKey),
385
- hexToBytes(tokenPublicKey),
386
- );
397
+
398
+ const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
399
+ if (issuerTokenIdentifier === null) {
400
+ throw new ValidationError("Issuer token identifier not found", {
401
+ field: "issuerTokenIdentifier",
402
+ value: issuerTokenIdentifier,
403
+ expected: "non-null token identifier",
404
+ });
405
+ }
406
+
407
+ const rawTokenIdentifier = decodeBech32mTokenIdentifier(
408
+ issuerTokenIdentifier,
409
+ this.config.getNetworkType(),
410
+ ).tokenIdentifier;
411
+
412
+ const response = await this.tokenFreezeService!.unfreezeTokens({
413
+ ownerPublicKey: hexToBytes(decodedOwnerPubkey.identityPublicKey),
414
+ tokenIdentifier: rawTokenIdentifier,
415
+ });
387
416
  const tokenAmount = bytesToNumberBE(response.impactedTokenAmount);
388
417
 
389
418
  return {
@@ -0,0 +1 @@
1
+ export * from "@buildonspark/spark-sdk/proto/spark_token";
@@ -1,7 +1,7 @@
1
1
  import {
2
- FreezeTokensPayload,
3
2
  FreezeTokensResponse,
4
- } from "@buildonspark/spark-sdk/proto/spark";
3
+ FreezeTokensPayload,
4
+ } from "@buildonspark/spark-sdk/proto/spark_token";
5
5
  import {
6
6
  type ConnectionManager,
7
7
  WalletConfigService,
@@ -23,24 +23,30 @@ export class TokenFreezeService {
23
23
  this.connectionManager = connectionManager;
24
24
  }
25
25
 
26
- async freezeTokens(
27
- ownerPublicKey: Uint8Array,
28
- tokenPublicKey: Uint8Array,
29
- ): Promise<FreezeTokensResponse> {
30
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, false);
26
+ async freezeTokens({
27
+ ownerPublicKey,
28
+ tokenIdentifier,
29
+ }: {
30
+ ownerPublicKey: Uint8Array;
31
+ tokenIdentifier?: Uint8Array;
32
+ }): Promise<FreezeTokensResponse> {
33
+ return this.freezeOperation(ownerPublicKey, false, tokenIdentifier!);
31
34
  }
32
35
 
33
- async unfreezeTokens(
34
- ownerPublicKey: Uint8Array,
35
- tokenPublicKey: Uint8Array,
36
- ): Promise<FreezeTokensResponse> {
37
- return this.freezeOperation(ownerPublicKey, tokenPublicKey, true);
36
+ async unfreezeTokens({
37
+ ownerPublicKey,
38
+ tokenIdentifier,
39
+ }: {
40
+ ownerPublicKey: Uint8Array;
41
+ tokenIdentifier?: Uint8Array;
42
+ }): Promise<FreezeTokensResponse> {
43
+ return this.freezeOperation(ownerPublicKey, true, tokenIdentifier!);
38
44
  }
39
45
 
40
46
  private async freezeOperation(
41
47
  ownerPublicKey: Uint8Array,
42
- tokenPublicKey: Uint8Array,
43
48
  shouldUnfreeze: boolean,
49
+ tokenIdentifier: Uint8Array,
44
50
  ): Promise<FreezeTokensResponse> {
45
51
  const signingOperators = this.config.getSigningOperators();
46
52
  const issuerProvidedTimestamp = Date.now();
@@ -48,12 +54,13 @@ export class TokenFreezeService {
48
54
  // Submit freeze_tokens to all SOs in parallel
49
55
  const freezeResponses = await Promise.allSettled(
50
56
  Object.entries(signingOperators).map(async ([identifier, operator]) => {
51
- const internalSparkClient =
52
- await this.connectionManager.createSparkClient(operator.address);
57
+ const sparkTokenClient =
58
+ await this.connectionManager.createSparkTokenClient(operator.address);
53
59
 
54
60
  const freezeTokensPayload: FreezeTokensPayload = {
61
+ version: 1,
55
62
  ownerPublicKey,
56
- tokenPublicKey,
63
+ tokenIdentifier,
57
64
  shouldUnfreeze,
58
65
  issuerProvidedTimestamp,
59
66
  operatorIdentityPublicKey: hexToBytes(operator.identityPublicKey),
@@ -66,7 +73,7 @@ export class TokenFreezeService {
66
73
  await this.config.signer.signMessageWithIdentityKey(hashedPayload);
67
74
 
68
75
  try {
69
- const response = await internalSparkClient.freeze_tokens({
76
+ const response = await sparkTokenClient.freeze_tokens({
70
77
  freezeTokensPayload,
71
78
  issuerSignature,
72
79
  });
@@ -77,7 +84,7 @@ export class TokenFreezeService {
77
84
  };
78
85
  } catch (error) {
79
86
  throw new NetworkError(
80
- `Failed to send a freeze/unfreeze operation for token: ${tokenPublicKey.toString()} at operator: ${operator.address}`,
87
+ `Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
81
88
  {
82
89
  operation: "freeze_tokens",
83
90
  errorCount: 1,
@@ -2,12 +2,9 @@ import {
2
2
  WalletConfig,
3
3
  ConfigOptions,
4
4
  filterTokenBalanceForTokenPublicKey,
5
- encodeSparkAddress,
6
5
  } from "@buildonspark/spark-sdk";
7
6
  import { jest } from "@jest/globals";
8
- import { BitcoinFaucet } from "@buildonspark/spark-sdk/test-utils";
9
7
  import { IssuerSparkWalletTesting } from "../utils/issuer-test-wallet.js";
10
- import { bytesToHex, hexToBytes } from "@noble/curves/abstract/utils";
11
8
  import { SparkWalletTesting } from "../utils/spark-testing-wallet.js";
12
9
  import { IssuerSparkWallet } from "../../index.js";
13
10
 
@@ -53,29 +50,11 @@ describe.each(TEST_CONFIGS)(
53
50
  let sharedUserWallet: any;
54
51
  let sharedTokenPublicKey: string;
55
52
 
56
- beforeAll(async () => {
57
- const { wallet: issuerWallet } =
58
- await IssuerSparkWalletTesting.initialize({
59
- options: config,
60
- });
61
-
62
- const { wallet: userWallet } = await SparkWalletTesting.initialize({
63
- options: config,
64
- });
65
-
66
- // Announce a shared token for this configuration
67
- await fundAndAnnounce(issuerWallet, 1000000n, 0, `${name}Shared`, "SHR");
68
-
69
- sharedIssuerWallet = issuerWallet;
70
- sharedUserWallet = userWallet;
71
- sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
72
- });
73
-
74
53
  afterEach(async () => {
75
54
  await new Promise((resolve) => setTimeout(resolve, 1000));
76
55
  });
77
56
 
78
- it("should fail when minting tokens without announcement", async () => {
57
+ it("should fail when minting tokens without creation", async () => {
79
58
  const tokenAmount: bigint = 1000n;
80
59
  const { wallet } = await IssuerSparkWalletTesting.initialize({
81
60
  options: config,
@@ -84,21 +63,20 @@ describe.each(TEST_CONFIGS)(
84
63
  await expect(wallet.mintTokens(tokenAmount)).rejects.toThrow();
85
64
  });
86
65
 
87
- it("should fail when announce decimal is greater than js MAX_SAFE_INTEGER", async () => {
66
+ it("should fail when creation decimal is greater than js MAX_SAFE_INTEGER", async () => {
88
67
  const tokenAmount: bigint = 1000n;
89
68
  const { wallet } = await IssuerSparkWalletTesting.initialize({
90
69
  options: config,
91
70
  });
92
71
 
93
72
  await expect(
94
- fundAndAnnounce(
95
- wallet,
96
- tokenAmount,
97
- 2 ** 53,
98
- "2Pow53Decimal",
99
- "2P53D",
100
- false,
101
- ),
73
+ wallet.createToken({
74
+ tokenName: "2Pow53Decimal",
75
+ tokenTicker: "2P53D",
76
+ decimals: 2 ** 53,
77
+ isFreezable: false,
78
+ maxSupply: tokenAmount,
79
+ }),
102
80
  ).rejects.toThrow();
103
81
  });
104
82
 
@@ -108,19 +86,38 @@ describe.each(TEST_CONFIGS)(
108
86
  options: config,
109
87
  });
110
88
 
111
- await fundAndAnnounce(wallet, 2n, 0, "MST", "MST");
89
+ await wallet.createToken({
90
+ tokenName: "MST",
91
+ tokenTicker: "MST",
92
+ decimals: 0,
93
+ isFreezable: false,
94
+ maxSupply: 2n,
95
+ });
112
96
  await expect(wallet.mintTokens(tokenAmount)).rejects.toThrow();
113
97
  });
114
98
 
115
99
  it("should mint tokens successfully", async () => {
116
100
  const tokenAmount: bigint = 1000n;
117
101
 
102
+ const { wallet: issuerWallet } =
103
+ await IssuerSparkWalletTesting.initialize({
104
+ options: config,
105
+ });
106
+ await issuerWallet.createToken({
107
+ tokenName: `${name}M`,
108
+ tokenTicker: "MIN",
109
+ decimals: 0,
110
+ isFreezable: false,
111
+ maxSupply: 1_000_000n,
112
+ });
113
+ sharedIssuerWallet = issuerWallet;
114
+ sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
115
+
118
116
  const tokenMetadata = await sharedIssuerWallet.getIssuerTokenMetadata();
119
117
 
120
- // Assert token public key metadata values
121
118
  const identityPublicKey = await sharedIssuerWallet.getIdentityPublicKey();
122
- expect(tokenMetadata?.tokenName).toEqual(`${name}Shared`);
123
- expect(tokenMetadata?.tokenTicker).toEqual("SHR");
119
+ expect(tokenMetadata?.tokenName).toEqual(`${name}M`);
120
+ expect(tokenMetadata?.tokenTicker).toEqual("MIN");
124
121
  expect(tokenMetadata?.decimals).toEqual(0);
125
122
  expect(tokenMetadata?.maxSupply).toEqual(1000000n);
126
123
  expect(tokenMetadata?.isFreezable).toEqual(false);
@@ -138,6 +135,24 @@ describe.each(TEST_CONFIGS)(
138
135
  it("should mint and transfer tokens", async () => {
139
136
  const tokenAmount: bigint = 1000n;
140
137
 
138
+ const { wallet: issuerWallet } =
139
+ await IssuerSparkWalletTesting.initialize({
140
+ options: config,
141
+ });
142
+ const { wallet: userWallet } = await SparkWalletTesting.initialize({
143
+ options: config,
144
+ });
145
+ await issuerWallet.createToken({
146
+ tokenName: `${name}MTR`,
147
+ tokenTicker: "MTR",
148
+ decimals: 0,
149
+ isFreezable: false,
150
+ maxSupply: 1_000_000n,
151
+ });
152
+ sharedIssuerWallet = issuerWallet;
153
+ sharedUserWallet = userWallet;
154
+ sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
155
+
141
156
  await sharedIssuerWallet.mintTokens(tokenAmount);
142
157
 
143
158
  const sharedIssuerBalance =
@@ -296,6 +311,20 @@ describe.each(TEST_CONFIGS)(
296
311
  it("should mint and batchtransfer tokens", async () => {
297
312
  const tokenAmount: bigint = 999n;
298
313
 
314
+ const { wallet: issuerWallet } =
315
+ await IssuerSparkWalletTesting.initialize({
316
+ options: config,
317
+ });
318
+ await issuerWallet.createToken({
319
+ tokenName: `${name}MBN`,
320
+ tokenTicker: "MBN",
321
+ decimals: 0,
322
+ isFreezable: false,
323
+ maxSupply: 1_000_000n,
324
+ });
325
+ sharedIssuerWallet = issuerWallet;
326
+ sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
327
+
299
328
  const { wallet: destinationWallet } = await SparkWalletTesting.initialize(
300
329
  {
301
330
  options: config,
@@ -409,117 +438,128 @@ describe.each(TEST_CONFIGS)(
409
438
  options: config,
410
439
  });
411
440
 
412
- await fundAndAnnounce(issuerWallet, 1n, 0, "MST", "MST");
441
+ await issuerWallet.createToken({
442
+ tokenName: "MST",
443
+ tokenTicker: "MST",
444
+ decimals: 0,
445
+ isFreezable: false,
446
+ maxSupply: 1n,
447
+ });
413
448
  await issuerWallet.mintTokens(tokenAmount);
414
449
 
415
450
  const tokenBalance = await issuerWallet.getIssuerTokenBalance();
416
451
  expect(tokenBalance.balance).toEqual(tokenAmount);
417
452
  });
418
453
 
419
- it("it should be able to announce a token with name of size equal to MAX_SYMBOL_SIZE", async () => {
454
+ it("it should be able to create a token with name of size equal to MAX_SYMBOL_SIZE", async () => {
420
455
  const { wallet: issuerWallet } =
421
456
  await IssuerSparkWalletTesting.initialize({
422
457
  options: config,
423
458
  });
424
459
 
425
- await fundAndAnnounce(issuerWallet, 1n, 0, "MST", "TESTAA");
460
+ await issuerWallet.createToken({
461
+ tokenName: "MST",
462
+ tokenTicker: "TESTAA",
463
+ decimals: 0,
464
+ isFreezable: false,
465
+ maxSupply: 1n,
466
+ });
426
467
  });
427
468
 
428
- it("it should be able to announce a token with symbol of size equal to MAX_NAME_SIZE", async () => {
469
+ it("it should be able to create a token with symbol of size equal to MAX_NAME_SIZE", async () => {
429
470
  const { wallet: issuerWallet } =
430
471
  await IssuerSparkWalletTesting.initialize({
431
472
  options: config,
432
473
  });
433
474
 
434
- await fundAndAnnounce(issuerWallet, 1n, 0, "ABCDEFGHIJKLMNOPQ", "MQS");
475
+ await issuerWallet.createToken({
476
+ tokenName: "ABCDEFGHIJKLMNOPQ",
477
+ tokenTicker: "MQS",
478
+ decimals: 0,
479
+ isFreezable: false,
480
+ maxSupply: 1n,
481
+ });
435
482
  });
436
483
 
437
- it("it should NOT be able to announce a token with ( symbol size + name size ) > MAX_NAME_AND_SYMBOL_SIZE", async () => {
484
+ it("should create, mint, freeze and unfreeze tokens", async () => {
485
+ const tokenAmount: bigint = 1000n;
438
486
  const { wallet: issuerWallet } =
439
487
  await IssuerSparkWalletTesting.initialize({
440
488
  options: config,
441
489
  });
442
490
 
443
- await expect(
444
- fundAndAnnounce(issuerWallet, 1n, 0, "ABCDEFGHIJKLMNOPQ", "TESTAB"),
445
- ).rejects.toThrow();
446
- });
447
-
448
- it("it should NOT be able to announce a token with ( symbol size + name size ) > MAX_NAME_AND_SYMBOL_SIZE, and size is calculated in bytes", async () => {
449
- const { wallet: issuerWallet } =
450
- await IssuerSparkWalletTesting.initialize({
451
- options: config,
452
- });
491
+ await issuerWallet.createToken({
492
+ tokenName: `${name}FRZ`,
493
+ tokenTicker: "FRZ",
494
+ decimals: 0,
495
+ isFreezable: true,
496
+ maxSupply: 100000n,
497
+ });
498
+ await issuerWallet.mintTokens(tokenAmount);
453
499
 
454
- await expect(
455
- fundAndAnnounce(issuerWallet, 1n, 0, "ABCDEFGHIJKLMNOPQ", "🥸🥸"),
456
- ).rejects.toThrow();
457
- });
500
+ // Check issuer balance after minting
501
+ const issuerBalanceObjAfterMint =
502
+ await issuerWallet.getIssuerTokenBalance();
503
+ expect(issuerBalanceObjAfterMint).toBeDefined();
504
+ expect(issuerBalanceObjAfterMint.tokenIdentifier).toBeDefined();
458
505
 
459
- // freeze is hardcoded to mainnet
460
- brokenTestFn(
461
- "should announce, mint, freeze and unfreeze tokens",
462
- async () => {
463
- const tokenAmount: bigint = 1000n;
464
- const { wallet: issuerWallet } =
465
- await IssuerSparkWalletTesting.initialize({
466
- options: config,
467
- });
506
+ const issuerBalanceAfterMint = issuerBalanceObjAfterMint.balance;
507
+ const tokenIdentifier = issuerBalanceObjAfterMint.tokenIdentifier!;
468
508
 
469
- await fundAndAnnounce(issuerWallet, 100000n, 0, `${name}FR`, "FRT");
470
- await issuerWallet.mintTokens(tokenAmount);
509
+ expect(issuerBalanceAfterMint).toEqual(tokenAmount);
471
510
 
472
- // Check issuer balance after minting
473
- const issuerBalanceObjAfterMint =
474
- await issuerWallet.getIssuerTokenBalance();
475
- expect(issuerBalanceObjAfterMint).toBeDefined();
476
- expect(issuerBalanceObjAfterMint.tokenIdentifier).toBeDefined();
511
+ const { wallet: userWallet } = await SparkWalletTesting.initialize({
512
+ options: config,
513
+ });
514
+ const userSparkAddress = await userWallet.getSparkAddress();
477
515
 
478
- const issuerBalanceAfterMint = issuerBalanceObjAfterMint.balance;
479
- const tokenIdentifier = issuerBalanceObjAfterMint.tokenIdentifier!;
516
+ await issuerWallet.transferTokens({
517
+ tokenAmount,
518
+ tokenIdentifier,
519
+ receiverSparkAddress: userSparkAddress,
520
+ });
521
+ const issuerBalanceAfterTransfer = (
522
+ await issuerWallet.getIssuerTokenBalance()
523
+ ).balance;
524
+ expect(issuerBalanceAfterTransfer).toEqual(0n);
480
525
 
481
- expect(issuerBalanceAfterMint).toEqual(tokenAmount);
526
+ const tokenPublicKey = await issuerWallet.getIdentityPublicKey();
527
+ const userBalanceObj = await userWallet.getBalance();
528
+ const userBalanceAfterTransfer = filterTokenBalanceForTokenPublicKey(
529
+ userBalanceObj?.tokenBalances,
530
+ tokenPublicKey,
531
+ );
532
+ expect(userBalanceAfterTransfer.balance).toEqual(tokenAmount);
482
533
 
483
- const { wallet: userWallet } = await SparkWalletTesting.initialize({
484
- options: config,
485
- });
486
- const userWalletPublicKey = await userWallet.getSparkAddress();
534
+ // Freeze tokens
535
+ const freezeResponse = await issuerWallet.freezeTokens(userSparkAddress);
536
+ expect(freezeResponse.impactedOutputIds.length).toBeGreaterThan(0);
537
+ expect(freezeResponse.impactedTokenAmount).toEqual(tokenAmount);
487
538
 
488
- await issuerWallet.transferTokens({
489
- tokenAmount,
490
- tokenIdentifier,
491
- receiverSparkAddress: userWalletPublicKey,
492
- });
493
- const issuerBalanceAfterTransfer = (
494
- await issuerWallet.getIssuerTokenBalance()
495
- ).balance;
496
- expect(issuerBalanceAfterTransfer).toEqual(0n);
497
-
498
- const tokenPublicKey = await issuerWallet.getIdentityPublicKey();
499
- const userBalanceObj = await userWallet.getBalance();
500
- const userBalanceAfterTransfer = filterTokenBalanceForTokenPublicKey(
501
- userBalanceObj?.tokenBalances,
502
- tokenPublicKey,
503
- );
504
- expect(userBalanceAfterTransfer.balance).toEqual(tokenAmount);
505
-
506
- // Freeze tokens
507
- const freezeResponse =
508
- await issuerWallet.freezeTokens(userWalletPublicKey);
509
- expect(freezeResponse.impactedOutputIds.length).toBeGreaterThan(0);
510
- expect(freezeResponse.impactedTokenAmount).toEqual(tokenAmount);
511
-
512
- // Unfreeze tokens
513
- const unfreezeResponse =
514
- await issuerWallet.unfreezeTokens(userWalletPublicKey);
515
- expect(unfreezeResponse.impactedOutputIds.length).toBeGreaterThan(0);
516
- expect(unfreezeResponse.impactedTokenAmount).toEqual(tokenAmount);
517
- },
518
- );
539
+ // Unfreeze tokens
540
+ const unfreezeResponse =
541
+ await issuerWallet.unfreezeTokens(userSparkAddress);
542
+ expect(unfreezeResponse.impactedOutputIds.length).toBeGreaterThan(0);
543
+ expect(unfreezeResponse.impactedTokenAmount).toEqual(tokenAmount);
544
+ });
519
545
 
520
546
  it("should mint and burn tokens", async () => {
521
547
  const tokenAmount: bigint = 200n;
522
548
 
549
+ const { wallet: issuerWallet } =
550
+ await IssuerSparkWalletTesting.initialize({
551
+ options: config,
552
+ });
553
+ await issuerWallet.createToken({
554
+ tokenName: `${name}MBN`,
555
+ tokenTicker: "MBN",
556
+ decimals: 0,
557
+ isFreezable: false,
558
+ maxSupply: 1_000_000n,
559
+ });
560
+ sharedIssuerWallet = issuerWallet;
561
+ sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
562
+
523
563
  await sharedIssuerWallet.mintTokens(tokenAmount);
524
564
  const issuerTokenBalance = (
525
565
  await sharedIssuerWallet.getIssuerTokenBalance()
@@ -536,9 +576,23 @@ describe.each(TEST_CONFIGS)(
536
576
  );
537
577
  });
538
578
 
539
- it("should complete full token lifecycle: announce, mint, transfer, return, burn", async () => {
579
+ it("should complete full token lifecycle: create, mint, transfer, return, burn", async () => {
540
580
  const tokenAmount: bigint = 1000n;
541
581
 
582
+ const { wallet: issuerWallet } =
583
+ await IssuerSparkWalletTesting.initialize({
584
+ options: config,
585
+ });
586
+ await issuerWallet.createToken({
587
+ tokenName: `${name}LFC`,
588
+ tokenTicker: "LFC",
589
+ decimals: 0,
590
+ isFreezable: false,
591
+ maxSupply: 1_000_000n,
592
+ });
593
+ sharedIssuerWallet = issuerWallet;
594
+ sharedTokenPublicKey = await issuerWallet.getIdentityPublicKey();
595
+
542
596
  const { wallet: userWallet } = await SparkWalletTesting.initialize({
543
597
  options: config,
544
598
  });
@@ -554,12 +608,12 @@ describe.each(TEST_CONFIGS)(
554
608
  expect(issuerBalanceAfterMint).toEqual(initialBalance + tokenAmount);
555
609
  expect(issuerBalanceObjAfterMint.tokenIdentifier).toBeDefined();
556
610
  const tokenIdentifier = issuerBalanceObjAfterMint.tokenIdentifier!;
557
- const userWalletPublicKey = await userWallet.getSparkAddress();
611
+ const userSparkAddress = await userWallet.getSparkAddress();
558
612
 
559
613
  await sharedIssuerWallet.transferTokens({
560
614
  tokenAmount,
561
615
  tokenIdentifier,
562
- receiverSparkAddress: userWalletPublicKey,
616
+ receiverSparkAddress: userSparkAddress,
563
617
  });
564
618
 
565
619
  const issuerBalanceAfterTransfer = (
@@ -708,40 +762,3 @@ describe.each(TEST_CONFIGS)(
708
762
  );
709
763
  },
710
764
  );
711
-
712
- async function fundAndAnnounce(
713
- wallet: IssuerSparkWallet,
714
- maxSupply: bigint = 100000n,
715
- decimals: number = 0,
716
- tokenName: string = "TestToken1",
717
- tokenSymbol: string = "TT1",
718
- isFreezable: boolean = false,
719
- ) {
720
- // Faucet funds to the Issuer wallet because announcing a token
721
- // requires ownership of an L1 UTXO.
722
- const faucet = BitcoinFaucet.getInstance();
723
- const l1WalletPubKey = await wallet.getTokenL1Address();
724
- await faucet.sendToAddress(l1WalletPubKey, 100_000n);
725
- await faucet.mineBlocks(6);
726
-
727
- await new Promise((resolve) => setTimeout(resolve, 3000));
728
-
729
- try {
730
- const response = await wallet.announceTokenL1(
731
- tokenName,
732
- tokenSymbol,
733
- decimals,
734
- maxSupply,
735
- isFreezable,
736
- );
737
- console.log("Announce token response:", response);
738
- } catch (error: any) {
739
- console.error("Error when announcing token on L1:", error);
740
- throw error;
741
- }
742
- await faucet.mineBlocks(2);
743
-
744
- // Wait for LRC20 processing.
745
- const SECONDS = 1000;
746
- await new Promise((resolve) => setTimeout(resolve, 3 * SECONDS));
747
- }
@@ -1,8 +1,8 @@
1
1
  import {
2
+ ConfigOptions,
3
+ SparkSigner,
2
4
  SparkWallet,
3
5
  SparkWalletProps,
4
- SparkSigner,
5
- ConfigOptions,
6
6
  } from "@buildonspark/spark-sdk";
7
7
  import {
8
8
  type QueryTransfersResponse,
@@ -1,5 +1,5 @@
1
1
  import { sha256 } from "@scure/btc-signer/utils";
2
- import { FreezeTokensPayload } from "@buildonspark/spark-sdk/proto/spark";
2
+ import { FreezeTokensPayload } from "@buildonspark/spark-sdk/proto/spark_token";
3
3
  import { ValidationError } from "@buildonspark/spark-sdk";
4
4
 
5
5
  export function hashFreezeTokensPayload(
@@ -15,6 +15,17 @@ export function hashFreezeTokensPayload(
15
15
 
16
16
  let allHashes: Uint8Array[] = [];
17
17
 
18
+ // Hash version
19
+ const versionHashObj = sha256.create();
20
+ const versionBytes = new Uint8Array(4);
21
+ new DataView(versionBytes.buffer).setUint32(
22
+ 0,
23
+ payload.version,
24
+ false, // false for big-endian
25
+ );
26
+ versionHashObj.update(versionBytes);
27
+ allHashes.push(versionHashObj.digest());
28
+
18
29
  // Hash owner public key
19
30
  const ownerPubKeyHash = sha256.create();
20
31
  if (payload.ownerPublicKey) {
@@ -22,12 +33,12 @@ export function hashFreezeTokensPayload(
22
33
  }
23
34
  allHashes.push(ownerPubKeyHash.digest());
24
35
 
25
- // Hash token public key
26
- const tokenPubKeyHash = sha256.create();
27
- if (payload.tokenPublicKey) {
28
- tokenPubKeyHash.update(payload.tokenPublicKey);
36
+ // Hash token identifier
37
+ const tokenIdentifierHash = sha256.create();
38
+ if (payload.tokenIdentifier) {
39
+ tokenIdentifierHash.update(payload.tokenIdentifier);
29
40
  }
30
- allHashes.push(tokenPubKeyHash.digest());
41
+ allHashes.push(tokenIdentifierHash.digest());
31
42
 
32
43
  // Hash shouldUnfreeze
33
44
  const shouldUnfreezeHash = sha256.create();