@buildonspark/issuer-sdk 0.0.40 → 0.0.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +40 -39
- package/dist/index.js +11 -10
- package/package.json +3 -3
- package/src/issuer-spark-wallet.ts +1 -1
- package/src/services/freeze.ts +2 -1
package/dist/index.cjs
CHANGED
|
@@ -37,7 +37,7 @@ if (typeof global === "undefined") {
|
|
|
37
37
|
var import_lrc20_sdk2 = require("@buildonspark/lrc20-sdk");
|
|
38
38
|
var import_spark_sdk3 = require("@buildonspark/spark-sdk");
|
|
39
39
|
var import_address = require("@buildonspark/spark-sdk/address");
|
|
40
|
-
var
|
|
40
|
+
var import_utils6 = require("@noble/curves/abstract/utils");
|
|
41
41
|
|
|
42
42
|
// src/services/freeze.ts
|
|
43
43
|
var import_utils2 = require("@buildonspark/spark-sdk/utils");
|
|
@@ -93,6 +93,7 @@ function hashFreezeTokensPayload(payload) {
|
|
|
93
93
|
|
|
94
94
|
// src/services/freeze.ts
|
|
95
95
|
var import_spark_sdk2 = require("@buildonspark/spark-sdk");
|
|
96
|
+
var import_utils3 = require("@noble/curves/abstract/utils");
|
|
96
97
|
var TokenFreezeService = class {
|
|
97
98
|
config;
|
|
98
99
|
connectionManager;
|
|
@@ -117,7 +118,7 @@ var TokenFreezeService = class {
|
|
|
117
118
|
tokenPublicKey,
|
|
118
119
|
shouldUnfreeze,
|
|
119
120
|
issuerProvidedTimestamp,
|
|
120
|
-
operatorIdentityPublicKey: operator.identityPublicKey
|
|
121
|
+
operatorIdentityPublicKey: (0, import_utils3.hexToBytes)(operator.identityPublicKey)
|
|
121
122
|
};
|
|
122
123
|
const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
|
|
123
124
|
const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
|
|
@@ -150,7 +151,7 @@ var TokenFreezeService = class {
|
|
|
150
151
|
|
|
151
152
|
// src/services/token-transactions.ts
|
|
152
153
|
var import_token_transactions = require("@buildonspark/spark-sdk/token-transactions");
|
|
153
|
-
var
|
|
154
|
+
var import_utils4 = require("@noble/curves/abstract/utils");
|
|
154
155
|
var IssuerTokenTransactionService = class extends import_token_transactions.TokenTransactionService {
|
|
155
156
|
constructor(config, connectionManager) {
|
|
156
157
|
super(config, connectionManager);
|
|
@@ -169,7 +170,7 @@ var IssuerTokenTransactionService = class extends import_token_transactions.Toke
|
|
|
169
170
|
{
|
|
170
171
|
ownerPublicKey: tokenPublicKey,
|
|
171
172
|
tokenPublicKey,
|
|
172
|
-
tokenAmount: (0,
|
|
173
|
+
tokenAmount: (0, import_utils4.numberToBytesBE)(tokenAmount, 16)
|
|
173
174
|
}
|
|
174
175
|
],
|
|
175
176
|
sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys()
|
|
@@ -178,7 +179,7 @@ var IssuerTokenTransactionService = class extends import_token_transactions.Toke
|
|
|
178
179
|
};
|
|
179
180
|
|
|
180
181
|
// src/utils/type-mappers.ts
|
|
181
|
-
var
|
|
182
|
+
var import_utils5 = require("@noble/curves/abstract/utils");
|
|
182
183
|
|
|
183
184
|
// src/utils/enum-mappers.ts
|
|
184
185
|
var import_lrc20_sdk = require("@buildonspark/lrc20-sdk");
|
|
@@ -259,22 +260,22 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
259
260
|
$case: "onChain",
|
|
260
261
|
onChain: {
|
|
261
262
|
operationType: mapOperationType(onChain.operationType),
|
|
262
|
-
transactionHash: (0,
|
|
263
|
-
rawtx: (0,
|
|
263
|
+
transactionHash: (0, import_utils5.bytesToHex)(onChain.transactionHash),
|
|
264
|
+
rawtx: (0, import_utils5.bytesToHex)(onChain.rawtx),
|
|
264
265
|
status: mapOnChainTransactionStatus(onChain.status),
|
|
265
266
|
inputs: onChain.inputs.map((input) => ({
|
|
266
|
-
rawTx: (0,
|
|
267
|
+
rawTx: (0, import_utils5.bytesToHex)(input.rawTx),
|
|
267
268
|
vout: input.vout,
|
|
268
269
|
amountSats: input.amountSats,
|
|
269
270
|
tokenPublicKey: input.tokenPublicKey,
|
|
270
|
-
tokenAmount: input.tokenAmount ? (0,
|
|
271
|
+
tokenAmount: input.tokenAmount ? (0, import_utils5.bytesToNumberBE)(input.tokenAmount).toString() : void 0
|
|
271
272
|
})),
|
|
272
273
|
outputs: onChain.outputs.map((output) => ({
|
|
273
|
-
rawTx: (0,
|
|
274
|
+
rawTx: (0, import_utils5.bytesToHex)(output.rawTx),
|
|
274
275
|
vout: output.vout,
|
|
275
276
|
amountSats: output.amountSats,
|
|
276
277
|
tokenPublicKey: output.tokenPublicKey,
|
|
277
|
-
tokenAmount: output.tokenAmount ? (0,
|
|
278
|
+
tokenAmount: output.tokenAmount ? (0, import_utils5.bytesToNumberBE)(output.tokenAmount).toString() : void 0
|
|
278
279
|
})),
|
|
279
280
|
broadcastedAt: onChain.broadcastedAt,
|
|
280
281
|
confirmedAt: onChain.confirmedAt
|
|
@@ -288,39 +289,39 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
288
289
|
$case: "spark",
|
|
289
290
|
spark: {
|
|
290
291
|
operationType: mapOperationType(spark.operationType),
|
|
291
|
-
transactionHash: (0,
|
|
292
|
+
transactionHash: (0, import_utils5.bytesToHex)(spark.transactionHash),
|
|
292
293
|
status: mapSparkTransactionStatus(spark.status),
|
|
293
294
|
confirmedAt: spark.confirmedAt,
|
|
294
295
|
leavesToCreate: spark.leavesToCreate.map((leaf) => ({
|
|
295
|
-
tokenPublicKey: (0,
|
|
296
|
+
tokenPublicKey: (0, import_utils5.bytesToHex)(leaf.tokenPublicKey),
|
|
296
297
|
id: leaf.id,
|
|
297
|
-
ownerPublicKey: (0,
|
|
298
|
-
revocationPublicKey: (0,
|
|
298
|
+
ownerPublicKey: (0, import_utils5.bytesToHex)(leaf.ownerPublicKey),
|
|
299
|
+
revocationPublicKey: (0, import_utils5.bytesToHex)(leaf.revocationPublicKey),
|
|
299
300
|
withdrawalBondSats: leaf.withdrawalBondSats,
|
|
300
301
|
withdrawalLocktime: leaf.withdrawalLocktime,
|
|
301
|
-
tokenAmount: (0,
|
|
302
|
-
createTxHash: (0,
|
|
302
|
+
tokenAmount: (0, import_utils5.bytesToNumberBE)(leaf.tokenAmount).toString(),
|
|
303
|
+
createTxHash: (0, import_utils5.bytesToHex)(leaf.createTxHash),
|
|
303
304
|
createTxVoutIndex: leaf.createTxVoutIndex,
|
|
304
|
-
spendTxHash: leaf.spendTxHash ? (0,
|
|
305
|
+
spendTxHash: leaf.spendTxHash ? (0, import_utils5.bytesToHex)(leaf.spendTxHash) : void 0,
|
|
305
306
|
spendTxVoutIndex: leaf.spendTxVoutIndex,
|
|
306
307
|
isFrozen: leaf.isFrozen
|
|
307
308
|
})),
|
|
308
309
|
leavesToSpend: spark.leavesToSpend.map((leaf) => ({
|
|
309
|
-
tokenPublicKey: (0,
|
|
310
|
+
tokenPublicKey: (0, import_utils5.bytesToHex)(leaf.tokenPublicKey),
|
|
310
311
|
id: leaf.id,
|
|
311
|
-
ownerPublicKey: (0,
|
|
312
|
-
revocationPublicKey: (0,
|
|
312
|
+
ownerPublicKey: (0, import_utils5.bytesToHex)(leaf.ownerPublicKey),
|
|
313
|
+
revocationPublicKey: (0, import_utils5.bytesToHex)(leaf.revocationPublicKey),
|
|
313
314
|
withdrawalBondSats: leaf.withdrawalBondSats,
|
|
314
315
|
withdrawalLocktime: leaf.withdrawalLocktime,
|
|
315
|
-
tokenAmount: (0,
|
|
316
|
-
createTxHash: (0,
|
|
316
|
+
tokenAmount: (0, import_utils5.bytesToNumberBE)(leaf.tokenAmount).toString(),
|
|
317
|
+
createTxHash: (0, import_utils5.bytesToHex)(leaf.createTxHash),
|
|
317
318
|
createTxVoutIndex: leaf.createTxVoutIndex,
|
|
318
|
-
spendTxHash: leaf.spendTxHash ? (0,
|
|
319
|
+
spendTxHash: leaf.spendTxHash ? (0, import_utils5.bytesToHex)(leaf.spendTxHash) : void 0,
|
|
319
320
|
spendTxVoutIndex: leaf.spendTxVoutIndex,
|
|
320
321
|
isFrozen: leaf.isFrozen
|
|
321
322
|
})),
|
|
322
323
|
sparkOperatorIdentityPublicKeys: spark.sparkOperatorIdentityPublicKeys.map(
|
|
323
|
-
(key) => (0,
|
|
324
|
+
(key) => (0, import_utils5.bytesToHex)(key)
|
|
324
325
|
)
|
|
325
326
|
}
|
|
326
327
|
}
|
|
@@ -329,7 +330,7 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
329
330
|
return { transaction: void 0 };
|
|
330
331
|
}),
|
|
331
332
|
nextCursor: rawTransactions.nextCursor ? {
|
|
332
|
-
lastTransactionHash: (0,
|
|
333
|
+
lastTransactionHash: (0, import_utils5.bytesToHex)(
|
|
333
334
|
rawTransactions.nextCursor.lastTransactionHash
|
|
334
335
|
),
|
|
335
336
|
layer: mapLayer(rawTransactions.nextCursor.layer)
|
|
@@ -379,16 +380,16 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
379
380
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
380
381
|
try {
|
|
381
382
|
const tokenInfo = await lrc20Client.getTokenPubkeyInfo({
|
|
382
|
-
publicKeys: [(0,
|
|
383
|
+
publicKeys: [(0, import_utils6.hexToBytes)(await super.getIdentityPublicKey())]
|
|
383
384
|
});
|
|
384
385
|
const info = tokenInfo.tokenPubkeyInfos[0];
|
|
385
386
|
return {
|
|
386
|
-
tokenPublicKey: (0,
|
|
387
|
+
tokenPublicKey: (0, import_utils6.bytesToHex)(info.announcement.publicKey.publicKey),
|
|
387
388
|
tokenName: info.announcement.name,
|
|
388
389
|
tokenSymbol: info.announcement.symbol,
|
|
389
|
-
tokenDecimals: Number((0,
|
|
390
|
+
tokenDecimals: Number((0, import_utils6.bytesToNumberBE)(info.announcement.decimal)),
|
|
390
391
|
isFreezable: info.announcement.isFreezable,
|
|
391
|
-
maxSupply: (0,
|
|
392
|
+
maxSupply: (0, import_utils6.bytesToNumberBE)(info.announcement.maxSupply)
|
|
392
393
|
};
|
|
393
394
|
} catch (error) {
|
|
394
395
|
throw new import_spark_sdk3.NetworkError("Failed to get token info", {
|
|
@@ -401,7 +402,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
401
402
|
async mintTokens(tokenAmount) {
|
|
402
403
|
var tokenPublicKey = await super.getIdentityPublicKey();
|
|
403
404
|
const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(
|
|
404
|
-
(0,
|
|
405
|
+
(0, import_utils6.hexToBytes)(tokenPublicKey),
|
|
405
406
|
tokenAmount
|
|
406
407
|
);
|
|
407
408
|
return await this.issuerTokenTransactionService.broadcastTokenTransaction(
|
|
@@ -428,10 +429,10 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
428
429
|
this.config.getNetworkType()
|
|
429
430
|
);
|
|
430
431
|
const response = await this.tokenFreezeService.freezeTokens(
|
|
431
|
-
(0,
|
|
432
|
-
(0,
|
|
432
|
+
(0, import_utils6.hexToBytes)(decodedOwnerPubkey),
|
|
433
|
+
(0, import_utils6.hexToBytes)(tokenPublicKey)
|
|
433
434
|
);
|
|
434
|
-
const tokenAmount = (0,
|
|
435
|
+
const tokenAmount = (0, import_utils6.bytesToNumberBE)(response.impactedTokenAmount);
|
|
435
436
|
return {
|
|
436
437
|
impactedOutputIds: response.impactedOutputIds,
|
|
437
438
|
impactedTokenAmount: tokenAmount
|
|
@@ -445,10 +446,10 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
445
446
|
this.config.getNetworkType()
|
|
446
447
|
);
|
|
447
448
|
const response = await this.tokenFreezeService.unfreezeTokens(
|
|
448
|
-
(0,
|
|
449
|
-
(0,
|
|
449
|
+
(0, import_utils6.hexToBytes)(decodedOwnerPubkey),
|
|
450
|
+
(0, import_utils6.hexToBytes)(tokenPublicKey)
|
|
450
451
|
);
|
|
451
|
-
const tokenAmount = (0,
|
|
452
|
+
const tokenAmount = (0, import_utils6.bytesToNumberBE)(response.impactedTokenAmount);
|
|
452
453
|
return {
|
|
453
454
|
impactedOutputIds: response.impactedOutputIds,
|
|
454
455
|
impactedTokenAmount: tokenAmount
|
|
@@ -458,7 +459,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
458
459
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
459
460
|
try {
|
|
460
461
|
const transactions = await lrc20Client.listTransactions({
|
|
461
|
-
tokenPublicKey: (0,
|
|
462
|
+
tokenPublicKey: (0, import_utils6.hexToBytes)(await super.getIdentityPublicKey()),
|
|
462
463
|
cursor,
|
|
463
464
|
pageSize,
|
|
464
465
|
beforeTimestamp,
|
|
@@ -477,7 +478,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
477
478
|
async getIssuerTokenDistribution() {
|
|
478
479
|
throw new import_spark_sdk4.NotImplementedError("Token distribution is not yet supported");
|
|
479
480
|
}
|
|
480
|
-
async announceTokenL1(tokenName, tokenTicker, decimals, maxSupply, isFreezable, feeRateSatsPerVb =
|
|
481
|
+
async announceTokenL1(tokenName, tokenTicker, decimals, maxSupply, isFreezable, feeRateSatsPerVb = 4) {
|
|
481
482
|
await this.lrc20Wallet.syncWallet();
|
|
482
483
|
const tokenPublicKey = new import_lrc20_sdk2.TokenPubkey(this.lrc20Wallet.pubkey);
|
|
483
484
|
const announcement = new import_lrc20_sdk2.TokenPubkeyAnnouncement(
|
package/dist/index.js
CHANGED
|
@@ -13,7 +13,7 @@ import {
|
|
|
13
13
|
import {
|
|
14
14
|
bytesToHex as bytesToHex2,
|
|
15
15
|
bytesToNumberBE as bytesToNumberBE2,
|
|
16
|
-
hexToBytes
|
|
16
|
+
hexToBytes as hexToBytes2
|
|
17
17
|
} from "@noble/curves/abstract/utils";
|
|
18
18
|
|
|
19
19
|
// src/services/freeze.ts
|
|
@@ -70,6 +70,7 @@ function hashFreezeTokensPayload(payload) {
|
|
|
70
70
|
|
|
71
71
|
// src/services/freeze.ts
|
|
72
72
|
import { NetworkError } from "@buildonspark/spark-sdk";
|
|
73
|
+
import { hexToBytes } from "@noble/curves/abstract/utils";
|
|
73
74
|
var TokenFreezeService = class {
|
|
74
75
|
config;
|
|
75
76
|
connectionManager;
|
|
@@ -94,7 +95,7 @@ var TokenFreezeService = class {
|
|
|
94
95
|
tokenPublicKey,
|
|
95
96
|
shouldUnfreeze,
|
|
96
97
|
issuerProvidedTimestamp,
|
|
97
|
-
operatorIdentityPublicKey: operator.identityPublicKey
|
|
98
|
+
operatorIdentityPublicKey: hexToBytes(operator.identityPublicKey)
|
|
98
99
|
};
|
|
99
100
|
const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
|
|
100
101
|
const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
|
|
@@ -356,7 +357,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
356
357
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
357
358
|
try {
|
|
358
359
|
const tokenInfo = await lrc20Client.getTokenPubkeyInfo({
|
|
359
|
-
publicKeys: [
|
|
360
|
+
publicKeys: [hexToBytes2(await super.getIdentityPublicKey())]
|
|
360
361
|
});
|
|
361
362
|
const info = tokenInfo.tokenPubkeyInfos[0];
|
|
362
363
|
return {
|
|
@@ -378,7 +379,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
378
379
|
async mintTokens(tokenAmount) {
|
|
379
380
|
var tokenPublicKey = await super.getIdentityPublicKey();
|
|
380
381
|
const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(
|
|
381
|
-
|
|
382
|
+
hexToBytes2(tokenPublicKey),
|
|
382
383
|
tokenAmount
|
|
383
384
|
);
|
|
384
385
|
return await this.issuerTokenTransactionService.broadcastTokenTransaction(
|
|
@@ -405,8 +406,8 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
405
406
|
this.config.getNetworkType()
|
|
406
407
|
);
|
|
407
408
|
const response = await this.tokenFreezeService.freezeTokens(
|
|
408
|
-
|
|
409
|
-
|
|
409
|
+
hexToBytes2(decodedOwnerPubkey),
|
|
410
|
+
hexToBytes2(tokenPublicKey)
|
|
410
411
|
);
|
|
411
412
|
const tokenAmount = bytesToNumberBE2(response.impactedTokenAmount);
|
|
412
413
|
return {
|
|
@@ -422,8 +423,8 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
422
423
|
this.config.getNetworkType()
|
|
423
424
|
);
|
|
424
425
|
const response = await this.tokenFreezeService.unfreezeTokens(
|
|
425
|
-
|
|
426
|
-
|
|
426
|
+
hexToBytes2(decodedOwnerPubkey),
|
|
427
|
+
hexToBytes2(tokenPublicKey)
|
|
427
428
|
);
|
|
428
429
|
const tokenAmount = bytesToNumberBE2(response.impactedTokenAmount);
|
|
429
430
|
return {
|
|
@@ -435,7 +436,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
435
436
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
436
437
|
try {
|
|
437
438
|
const transactions = await lrc20Client.listTransactions({
|
|
438
|
-
tokenPublicKey:
|
|
439
|
+
tokenPublicKey: hexToBytes2(await super.getIdentityPublicKey()),
|
|
439
440
|
cursor,
|
|
440
441
|
pageSize,
|
|
441
442
|
beforeTimestamp,
|
|
@@ -454,7 +455,7 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
454
455
|
async getIssuerTokenDistribution() {
|
|
455
456
|
throw new NotImplementedError("Token distribution is not yet supported");
|
|
456
457
|
}
|
|
457
|
-
async announceTokenL1(tokenName, tokenTicker, decimals, maxSupply, isFreezable, feeRateSatsPerVb =
|
|
458
|
+
async announceTokenL1(tokenName, tokenTicker, decimals, maxSupply, isFreezable, feeRateSatsPerVb = 4) {
|
|
458
459
|
await this.lrc20Wallet.syncWallet();
|
|
459
460
|
const tokenPublicKey = new TokenPubkey(this.lrc20Wallet.pubkey);
|
|
460
461
|
const announcement = new TokenPubkeyAnnouncement(
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@buildonspark/issuer-sdk",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.41",
|
|
4
4
|
"description": "Spark Issuer SDK for token issuance",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"module": "./dist/index.js",
|
|
@@ -54,8 +54,8 @@
|
|
|
54
54
|
},
|
|
55
55
|
"dependencies": {
|
|
56
56
|
"@bufbuild/protobuf": "^2.2.5",
|
|
57
|
-
"@buildonspark/lrc20-sdk": "0.0.
|
|
58
|
-
"@buildonspark/spark-sdk": "0.1.
|
|
57
|
+
"@buildonspark/lrc20-sdk": "0.0.38",
|
|
58
|
+
"@buildonspark/spark-sdk": "0.1.10",
|
|
59
59
|
"@noble/curves": "^1.8.0",
|
|
60
60
|
"@scure/btc-signer": "^1.5.0",
|
|
61
61
|
"bitcoinjs-lib": "^6.1.5",
|
|
@@ -217,7 +217,7 @@ export class IssuerSparkWallet extends SparkWallet {
|
|
|
217
217
|
decimals: number,
|
|
218
218
|
maxSupply: bigint,
|
|
219
219
|
isFreezable: boolean,
|
|
220
|
-
feeRateSatsPerVb: number =
|
|
220
|
+
feeRateSatsPerVb: number = 4.0,
|
|
221
221
|
): Promise<string> {
|
|
222
222
|
await this.lrc20Wallet!.syncWallet();
|
|
223
223
|
|
package/src/services/freeze.ts
CHANGED
|
@@ -7,6 +7,7 @@ import {
|
|
|
7
7
|
import { collectResponses } from "@buildonspark/spark-sdk/utils";
|
|
8
8
|
import { hashFreezeTokensPayload } from "../utils/token-hashing.js";
|
|
9
9
|
import { NetworkError } from "@buildonspark/spark-sdk";
|
|
10
|
+
import { hexToBytes } from "@noble/curves/abstract/utils";
|
|
10
11
|
|
|
11
12
|
export class TokenFreezeService {
|
|
12
13
|
private readonly config: WalletConfigService;
|
|
@@ -53,7 +54,7 @@ export class TokenFreezeService {
|
|
|
53
54
|
tokenPublicKey,
|
|
54
55
|
shouldUnfreeze,
|
|
55
56
|
issuerProvidedTimestamp,
|
|
56
|
-
operatorIdentityPublicKey: operator.identityPublicKey,
|
|
57
|
+
operatorIdentityPublicKey: hexToBytes(operator.identityPublicKey),
|
|
57
58
|
};
|
|
58
59
|
|
|
59
60
|
const hashedPayload: Uint8Array =
|