@buildonspark/issuer-sdk 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,956 +1,675 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
-
20
- // src/index.node.ts
21
- var index_node_exports = {};
22
- __export(index_node_exports, {
23
- DefaultSparkSigner: () => import_spark_sdk8.DefaultSparkSigner,
24
- IssuerSparkWallet: () => IssuerSparkWalletNodeJS,
25
- UnsafeStatelessSparkSigner: () => import_spark_sdk8.UnsafeStatelessSparkSigner,
26
- WalletConfig: () => import_spark_sdk9.WalletConfig
27
- });
28
- module.exports = __toCommonJS(index_node_exports);
1
+ let _buildonspark_spark_sdk = require("@buildonspark/spark-sdk");
2
+ let _noble_curves_utils = require("@noble/curves/utils");
3
+ let _scure_btc_signer_utils = require("@scure/btc-signer/utils");
29
4
 
30
- // buffer.js
31
- var import_buffer = require("buffer");
32
- if (typeof globalThis.Buffer === "undefined") {
33
- globalThis.Buffer = import_buffer.Buffer;
34
- }
35
- if (typeof window !== "undefined") {
36
- if (typeof window.global === "undefined") {
37
- window.global = window;
38
- }
39
- if (typeof window.globalThis === "undefined") {
40
- window.globalThis = window;
41
- }
42
- }
43
-
44
- // src/issuer-wallet/issuer-spark-wallet.ts
45
- var import_spark_sdk5 = require("@buildonspark/spark-sdk");
46
- var import_utils4 = require("@noble/curves/utils");
47
-
48
- // src/services/freeze.ts
49
- var import_spark_sdk2 = require("@buildonspark/spark-sdk");
50
- var import_utils2 = require("@noble/curves/utils");
51
-
52
- // src/utils/token-hashing.ts
53
- var import_utils = require("@scure/btc-signer/utils");
54
- var import_spark_sdk = require("@buildonspark/spark-sdk");
5
+ //#region src/utils/token-hashing.ts
55
6
  function hashFreezeTokensPayload(payload) {
56
- if (!payload) {
57
- throw new import_spark_sdk.SparkValidationError("Freeze tokens payload cannot be nil", {
58
- field: "payload",
59
- value: payload,
60
- expected: "valid freeze tokens payload"
61
- });
62
- }
63
- let allHashes = [];
64
- const versionHashObj = import_utils.sha256.create();
65
- const versionBytes = new Uint8Array(4);
66
- new DataView(versionBytes.buffer).setUint32(
67
- 0,
68
- payload.version,
69
- false
70
- // false for big-endian
71
- );
72
- versionHashObj.update(versionBytes);
73
- allHashes.push(versionHashObj.digest());
74
- const ownerPubKeyHash = import_utils.sha256.create();
75
- if (payload.ownerPublicKey) {
76
- ownerPubKeyHash.update(payload.ownerPublicKey);
77
- }
78
- allHashes.push(ownerPubKeyHash.digest());
79
- const tokenIdentifierHash = import_utils.sha256.create();
80
- if (payload.tokenIdentifier) {
81
- tokenIdentifierHash.update(payload.tokenIdentifier);
82
- }
83
- allHashes.push(tokenIdentifierHash.digest());
84
- const shouldUnfreezeHash = import_utils.sha256.create();
85
- shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
86
- allHashes.push(shouldUnfreezeHash.digest());
87
- const timestampHash = import_utils.sha256.create();
88
- if (payload.issuerProvidedTimestamp) {
89
- const timestampBytes = new Uint8Array(8);
90
- new DataView(timestampBytes.buffer).setBigUint64(
91
- 0,
92
- BigInt(payload.issuerProvidedTimestamp),
93
- true
94
- // true for little-endian
95
- );
96
- timestampHash.update(timestampBytes);
97
- }
98
- allHashes.push(timestampHash.digest());
99
- const operatorPubKeyHash = import_utils.sha256.create();
100
- if (payload.operatorIdentityPublicKey) {
101
- operatorPubKeyHash.update(payload.operatorIdentityPublicKey);
102
- }
103
- allHashes.push(operatorPubKeyHash.digest());
104
- const finalHash = import_utils.sha256.create();
105
- for (const hash of allHashes) {
106
- finalHash.update(hash);
107
- }
108
- return finalHash.digest();
7
+ if (!payload) throw new _buildonspark_spark_sdk.SparkValidationError("Freeze tokens payload cannot be nil", {
8
+ field: "payload",
9
+ value: payload,
10
+ expected: "valid freeze tokens payload"
11
+ });
12
+ let allHashes = [];
13
+ const versionHashObj = _scure_btc_signer_utils.sha256.create();
14
+ const versionBytes = new Uint8Array(4);
15
+ new DataView(versionBytes.buffer).setUint32(0, payload.version, false);
16
+ versionHashObj.update(versionBytes);
17
+ allHashes.push(versionHashObj.digest());
18
+ const ownerPubKeyHash = _scure_btc_signer_utils.sha256.create();
19
+ if (payload.ownerPublicKey) ownerPubKeyHash.update(payload.ownerPublicKey);
20
+ allHashes.push(ownerPubKeyHash.digest());
21
+ const tokenIdentifierHash = _scure_btc_signer_utils.sha256.create();
22
+ if (payload.tokenIdentifier) tokenIdentifierHash.update(payload.tokenIdentifier);
23
+ allHashes.push(tokenIdentifierHash.digest());
24
+ const shouldUnfreezeHash = _scure_btc_signer_utils.sha256.create();
25
+ shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
26
+ allHashes.push(shouldUnfreezeHash.digest());
27
+ const timestampHash = _scure_btc_signer_utils.sha256.create();
28
+ if (payload.issuerProvidedTimestamp) {
29
+ const timestampBytes = new Uint8Array(8);
30
+ new DataView(timestampBytes.buffer).setBigUint64(0, BigInt(payload.issuerProvidedTimestamp), true);
31
+ timestampHash.update(timestampBytes);
32
+ }
33
+ allHashes.push(timestampHash.digest());
34
+ const operatorPubKeyHash = _scure_btc_signer_utils.sha256.create();
35
+ if (payload.operatorIdentityPublicKey) operatorPubKeyHash.update(payload.operatorIdentityPublicKey);
36
+ allHashes.push(operatorPubKeyHash.digest());
37
+ const finalHash = _scure_btc_signer_utils.sha256.create();
38
+ for (const hash of allHashes) finalHash.update(hash);
39
+ return finalHash.digest();
109
40
  }
110
41
 
111
- // src/services/freeze.ts
42
+ //#endregion
43
+ //#region src/services/freeze.ts
112
44
  var TokenFreezeService = class {
113
- config;
114
- connectionManager;
115
- constructor(config, connectionManager) {
116
- this.config = config;
117
- this.connectionManager = connectionManager;
118
- }
119
- async freezeTokens({
120
- ownerPublicKey,
121
- tokenIdentifier
122
- }) {
123
- return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
124
- }
125
- async unfreezeTokens({
126
- ownerPublicKey,
127
- tokenIdentifier
128
- }) {
129
- return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
130
- }
131
- async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
132
- const signingOperators = this.config.getSigningOperators();
133
- const issuerProvidedTimestamp = Date.now();
134
- const freezeResponses = await Promise.allSettled(
135
- Object.entries(signingOperators).map(async ([identifier, operator]) => {
136
- const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
137
- const freezeTokensPayload = {
138
- version: 1,
139
- ownerPublicKey,
140
- tokenIdentifier,
141
- shouldUnfreeze,
142
- issuerProvidedTimestamp,
143
- operatorIdentityPublicKey: (0, import_utils2.hexToBytes)(operator.identityPublicKey)
144
- };
145
- const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
146
- const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
147
- try {
148
- const response = await sparkTokenClient.freeze_tokens({
149
- freezeTokensPayload,
150
- issuerSignature
151
- });
152
- return {
153
- identifier,
154
- response
155
- };
156
- } catch (error) {
157
- throw new import_spark_sdk2.SparkRequestError(
158
- `Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
159
- { operation: "freeze_tokens", error }
160
- );
161
- }
162
- })
163
- );
164
- const successfulResponses = (0, import_spark_sdk2.collectResponses)(freezeResponses);
165
- return successfulResponses[0].response;
166
- }
45
+ config;
46
+ connectionManager;
47
+ constructor(config, connectionManager) {
48
+ this.config = config;
49
+ this.connectionManager = connectionManager;
50
+ }
51
+ async freezeTokens({ ownerPublicKey, tokenIdentifier }) {
52
+ return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
53
+ }
54
+ async unfreezeTokens({ ownerPublicKey, tokenIdentifier }) {
55
+ return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
56
+ }
57
+ async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
58
+ const signingOperators = this.config.getSigningOperators();
59
+ const issuerProvidedTimestamp = Date.now();
60
+ return (0, _buildonspark_spark_sdk.collectResponses)(await Promise.allSettled(Object.entries(signingOperators).map(async ([identifier, operator]) => {
61
+ const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
62
+ const freezeTokensPayload = {
63
+ version: 1,
64
+ ownerPublicKey,
65
+ tokenIdentifier,
66
+ shouldUnfreeze,
67
+ issuerProvidedTimestamp,
68
+ operatorIdentityPublicKey: (0, _noble_curves_utils.hexToBytes)(operator.identityPublicKey)
69
+ };
70
+ const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
71
+ const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
72
+ try {
73
+ return {
74
+ identifier,
75
+ response: await sparkTokenClient.freeze_tokens({
76
+ freezeTokensPayload,
77
+ issuerSignature
78
+ })
79
+ };
80
+ } catch (error) {
81
+ throw new _buildonspark_spark_sdk.SparkRequestError(`Failed to send a freeze/unfreeze operation to operator: ${operator.address}`, {
82
+ operation: "freeze_tokens",
83
+ error
84
+ });
85
+ }
86
+ })))[0].response;
87
+ }
167
88
  };
168
89
 
169
- // src/services/token-transactions.ts
170
- var import_spark_sdk3 = require("@buildonspark/spark-sdk");
171
- var import_utils3 = require("@noble/curves/utils");
172
- var IssuerTokenTransactionService = class extends import_spark_sdk3.TokenTransactionService {
173
- constructor(config, connectionManager) {
174
- super(config, connectionManager);
175
- }
176
- async constructMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
177
- return {
178
- version: 2,
179
- network: this.config.getNetworkProto(),
180
- tokenInputs: {
181
- $case: "mintInput",
182
- mintInput: {
183
- issuerPublicKey: issuerTokenPublicKey,
184
- tokenIdentifier: rawTokenIdentifierBytes
185
- }
186
- },
187
- tokenOutputs: [
188
- {
189
- ownerPublicKey: issuerTokenPublicKey,
190
- tokenIdentifier: rawTokenIdentifierBytes,
191
- tokenAmount: (0, import_utils3.numberToBytesBE)(tokenAmount, 16)
192
- }
193
- ],
194
- clientCreatedTimestamp: /* @__PURE__ */ new Date(),
195
- sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
196
- expiryTime: void 0,
197
- invoiceAttachments: []
198
- };
199
- }
200
- async constructPartialMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
201
- return {
202
- version: 3,
203
- tokenTransactionMetadata: {
204
- network: this.config.getNetworkProto(),
205
- sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
206
- validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
207
- clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
208
- invoiceAttachments: []
209
- },
210
- tokenInputs: {
211
- $case: "mintInput",
212
- mintInput: {
213
- issuerPublicKey: issuerTokenPublicKey,
214
- tokenIdentifier: rawTokenIdentifierBytes
215
- }
216
- },
217
- partialTokenOutputs: [
218
- {
219
- ownerPublicKey: issuerTokenPublicKey,
220
- tokenIdentifier: rawTokenIdentifierBytes,
221
- withdrawBondSats: this.config.getExpectedWithdrawBondSats(),
222
- withdrawRelativeBlockLocktime: this.config.getExpectedWithdrawRelativeBlockLocktime(),
223
- tokenAmount: (0, import_utils3.numberToBytesBE)(tokenAmount, 16)
224
- }
225
- ]
226
- };
227
- }
228
- async constructCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
229
- return {
230
- version: 2,
231
- network: this.config.getNetworkProto(),
232
- tokenInputs: {
233
- $case: "createInput",
234
- createInput: {
235
- issuerPublicKey: tokenPublicKey,
236
- tokenName,
237
- tokenTicker,
238
- decimals,
239
- maxSupply: (0, import_utils3.numberToBytesBE)(maxSupply, 16),
240
- isFreezable,
241
- extraMetadata
242
- }
243
- },
244
- tokenOutputs: [],
245
- clientCreatedTimestamp: /* @__PURE__ */ new Date(),
246
- sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
247
- expiryTime: void 0,
248
- invoiceAttachments: []
249
- };
250
- }
251
- async constructPartialCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
252
- return {
253
- version: 3,
254
- tokenTransactionMetadata: {
255
- network: this.config.getNetworkProto(),
256
- sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
257
- validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
258
- clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
259
- invoiceAttachments: []
260
- },
261
- tokenInputs: {
262
- $case: "createInput",
263
- createInput: {
264
- issuerPublicKey: tokenPublicKey,
265
- tokenName,
266
- tokenTicker,
267
- decimals,
268
- maxSupply: (0, import_utils3.numberToBytesBE)(maxSupply, 16),
269
- isFreezable,
270
- extraMetadata
271
- }
272
- },
273
- partialTokenOutputs: []
274
- };
275
- }
90
+ //#endregion
91
+ //#region src/services/token-transactions.ts
92
+ var IssuerTokenTransactionService = class extends _buildonspark_spark_sdk.TokenTransactionService {
93
+ constructor(config, connectionManager) {
94
+ super(config, connectionManager);
95
+ }
96
+ async constructMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
97
+ return {
98
+ version: 2,
99
+ network: this.config.getNetworkProto(),
100
+ tokenInputs: {
101
+ $case: "mintInput",
102
+ mintInput: {
103
+ issuerPublicKey: issuerTokenPublicKey,
104
+ tokenIdentifier: rawTokenIdentifierBytes
105
+ }
106
+ },
107
+ tokenOutputs: [{
108
+ ownerPublicKey: issuerTokenPublicKey,
109
+ tokenIdentifier: rawTokenIdentifierBytes,
110
+ tokenAmount: (0, _noble_curves_utils.numberToBytesBE)(tokenAmount, 16)
111
+ }],
112
+ clientCreatedTimestamp: /* @__PURE__ */ new Date(),
113
+ sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
114
+ expiryTime: void 0,
115
+ invoiceAttachments: []
116
+ };
117
+ }
118
+ async constructPartialMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
119
+ return {
120
+ version: 3,
121
+ tokenTransactionMetadata: {
122
+ network: this.config.getNetworkProto(),
123
+ sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
124
+ validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
125
+ clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
126
+ invoiceAttachments: []
127
+ },
128
+ tokenInputs: {
129
+ $case: "mintInput",
130
+ mintInput: {
131
+ issuerPublicKey: issuerTokenPublicKey,
132
+ tokenIdentifier: rawTokenIdentifierBytes
133
+ }
134
+ },
135
+ partialTokenOutputs: [{
136
+ ownerPublicKey: issuerTokenPublicKey,
137
+ tokenIdentifier: rawTokenIdentifierBytes,
138
+ withdrawBondSats: this.config.getExpectedWithdrawBondSats(),
139
+ withdrawRelativeBlockLocktime: this.config.getExpectedWithdrawRelativeBlockLocktime(),
140
+ tokenAmount: (0, _noble_curves_utils.numberToBytesBE)(tokenAmount, 16)
141
+ }]
142
+ };
143
+ }
144
+ async constructCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
145
+ return {
146
+ version: 2,
147
+ network: this.config.getNetworkProto(),
148
+ tokenInputs: {
149
+ $case: "createInput",
150
+ createInput: {
151
+ issuerPublicKey: tokenPublicKey,
152
+ tokenName,
153
+ tokenTicker,
154
+ decimals,
155
+ maxSupply: (0, _noble_curves_utils.numberToBytesBE)(maxSupply, 16),
156
+ isFreezable,
157
+ extraMetadata
158
+ }
159
+ },
160
+ tokenOutputs: [],
161
+ clientCreatedTimestamp: /* @__PURE__ */ new Date(),
162
+ sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
163
+ expiryTime: void 0,
164
+ invoiceAttachments: []
165
+ };
166
+ }
167
+ async constructPartialCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
168
+ return {
169
+ version: 3,
170
+ tokenTransactionMetadata: {
171
+ network: this.config.getNetworkProto(),
172
+ sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
173
+ validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
174
+ clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
175
+ invoiceAttachments: []
176
+ },
177
+ tokenInputs: {
178
+ $case: "createInput",
179
+ createInput: {
180
+ issuerPublicKey: tokenPublicKey,
181
+ tokenName,
182
+ tokenTicker,
183
+ decimals,
184
+ maxSupply: (0, _noble_curves_utils.numberToBytesBE)(maxSupply, 16),
185
+ isFreezable,
186
+ extraMetadata
187
+ }
188
+ },
189
+ partialTokenOutputs: []
190
+ };
191
+ }
276
192
  };
277
193
 
278
- // src/issuer-wallet/issuer-spark-wallet.ts
279
- var import_spark_sdk6 = require("@buildonspark/spark-sdk");
280
-
281
- // src/utils/create-validation.ts
282
- var import_spark_sdk4 = require("@buildonspark/spark-sdk");
194
+ //#endregion
195
+ //#region src/utils/create-validation.ts
196
+ /**
197
+ * Returns true when the input is already in NFC normalisation form.
198
+ * JavaScript strings are UTF-16 encoded, so any JavaScript string is
199
+ * already valid Unicode. However, we still need to ensure canonical
200
+ * equivalence so that, for example, \u00E9 (é) and \u0065\u0301 (é)
201
+ * are treated identically. We do this by comparing the original
202
+ * string to its NFC-normalised representation.
203
+ */
283
204
  function isNfcNormalized(value) {
284
- return value.normalize("NFC") === value;
205
+ return value.normalize("NFC") === value;
285
206
  }
286
- var MIN_NAME_SIZE = 3;
287
- var MAX_NAME_SIZE = 20;
288
- var MIN_SYMBOL_SIZE = 3;
289
- var MAX_SYMBOL_SIZE = 6;
290
- var MAX_DECIMALS = 255;
291
- var MAXIMUM_MAX_SUPPLY = (1n << 128n) - 1n;
292
- var MAX_TOKEN_CONTENT_SIZE = 1024;
207
+ const MIN_NAME_SIZE = 3;
208
+ const MAX_NAME_SIZE = 20;
209
+ const MIN_SYMBOL_SIZE = 3;
210
+ const MAX_SYMBOL_SIZE = 6;
211
+ const MAX_DECIMALS = 255;
212
+ const MAXIMUM_MAX_SUPPLY = (1n << 128n) - 1n;
213
+ const MAX_TOKEN_CONTENT_SIZE = 1024;
293
214
  function validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply, extraMetadata) {
294
- if (!isNfcNormalized(tokenName)) {
295
- throw new import_spark_sdk4.SparkValidationError("Token name must be NFC-normalised UTF-8", {
296
- field: "tokenName",
297
- value: tokenName,
298
- expected: "NFC normalised string"
299
- });
300
- }
301
- if (!isNfcNormalized(tokenTicker)) {
302
- throw new import_spark_sdk4.SparkValidationError(
303
- "Token ticker must be NFC-normalised UTF-8",
304
- {
305
- field: "tokenTicker",
306
- value: tokenTicker,
307
- expected: "NFC normalised string"
308
- }
309
- );
310
- }
311
- const nameBytes = import_buffer.Buffer.from(tokenName, "utf-8").length;
312
- if (nameBytes < MIN_NAME_SIZE || nameBytes > MAX_NAME_SIZE) {
313
- throw new import_spark_sdk4.SparkValidationError(
314
- `Token name must be between ${MIN_NAME_SIZE} and ${MAX_NAME_SIZE} bytes`,
315
- {
316
- field: "tokenName",
317
- value: tokenName,
318
- actualLength: nameBytes,
319
- expected: `>=${MIN_NAME_SIZE} and <=${MAX_NAME_SIZE}`
320
- }
321
- );
322
- }
323
- const tickerBytes = import_buffer.Buffer.from(tokenTicker, "utf-8").length;
324
- if (tickerBytes < MIN_SYMBOL_SIZE || tickerBytes > MAX_SYMBOL_SIZE) {
325
- throw new import_spark_sdk4.SparkValidationError(
326
- `Token ticker must be between ${MIN_SYMBOL_SIZE} and ${MAX_SYMBOL_SIZE} bytes`,
327
- {
328
- field: "tokenTicker",
329
- value: tokenTicker,
330
- actualLength: tickerBytes,
331
- expected: `>=${MIN_SYMBOL_SIZE} and <=${MAX_SYMBOL_SIZE}`
332
- }
333
- );
334
- }
335
- if (!Number.isSafeInteger(decimals) || decimals < 0 || decimals > MAX_DECIMALS) {
336
- throw new import_spark_sdk4.SparkValidationError(
337
- `Decimals must be an integer between 0 and ${MAX_DECIMALS}`,
338
- {
339
- field: "decimals",
340
- value: decimals,
341
- expected: `>=0 and <=${MAX_DECIMALS}`
342
- }
343
- );
344
- }
345
- if (maxSupply < 0n || maxSupply > MAXIMUM_MAX_SUPPLY) {
346
- throw new import_spark_sdk4.SparkValidationError(`maxSupply must be between 0 and 2^128-1`, {
347
- field: "maxSupply",
348
- value: maxSupply.toString(),
349
- expected: `>=0 and <=${MAXIMUM_MAX_SUPPLY.toString()}`
350
- });
351
- }
352
- if (extraMetadata && extraMetadata.length > MAX_TOKEN_CONTENT_SIZE) {
353
- throw new import_spark_sdk4.SparkValidationError(
354
- `Extra metadata must be less than ${MAX_TOKEN_CONTENT_SIZE} bytes`,
355
- {
356
- field: "extraMetadata",
357
- value: extraMetadata.length,
358
- expected: `<${MAX_TOKEN_CONTENT_SIZE}`
359
- }
360
- );
361
- }
215
+ if (!isNfcNormalized(tokenName)) throw new _buildonspark_spark_sdk.SparkValidationError("Token name must be NFC-normalised UTF-8", {
216
+ field: "tokenName",
217
+ value: tokenName,
218
+ expected: "NFC normalised string"
219
+ });
220
+ if (!isNfcNormalized(tokenTicker)) throw new _buildonspark_spark_sdk.SparkValidationError("Token ticker must be NFC-normalised UTF-8", {
221
+ field: "tokenTicker",
222
+ value: tokenTicker,
223
+ expected: "NFC normalised string"
224
+ });
225
+ const nameBytes = Buffer.from(tokenName, "utf-8").length;
226
+ if (nameBytes < MIN_NAME_SIZE || nameBytes > MAX_NAME_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Token name must be between ${MIN_NAME_SIZE} and ${MAX_NAME_SIZE} bytes`, {
227
+ field: "tokenName",
228
+ value: tokenName,
229
+ actualLength: nameBytes,
230
+ expected: `>=${MIN_NAME_SIZE} and <=${MAX_NAME_SIZE}`
231
+ });
232
+ const tickerBytes = Buffer.from(tokenTicker, "utf-8").length;
233
+ if (tickerBytes < MIN_SYMBOL_SIZE || tickerBytes > MAX_SYMBOL_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Token ticker must be between ${MIN_SYMBOL_SIZE} and ${MAX_SYMBOL_SIZE} bytes`, {
234
+ field: "tokenTicker",
235
+ value: tokenTicker,
236
+ actualLength: tickerBytes,
237
+ expected: `>=${MIN_SYMBOL_SIZE} and <=${MAX_SYMBOL_SIZE}`
238
+ });
239
+ if (!Number.isSafeInteger(decimals) || decimals < 0 || decimals > MAX_DECIMALS) throw new _buildonspark_spark_sdk.SparkValidationError(`Decimals must be an integer between 0 and ${MAX_DECIMALS}`, {
240
+ field: "decimals",
241
+ value: decimals,
242
+ expected: `>=0 and <=${MAX_DECIMALS}`
243
+ });
244
+ if (maxSupply < 0n || maxSupply > MAXIMUM_MAX_SUPPLY) throw new _buildonspark_spark_sdk.SparkValidationError(`maxSupply must be between 0 and 2^128-1`, {
245
+ field: "maxSupply",
246
+ value: maxSupply.toString(),
247
+ expected: `>=0 and <=${MAXIMUM_MAX_SUPPLY.toString()}`
248
+ });
249
+ if (extraMetadata && extraMetadata.length > MAX_TOKEN_CONTENT_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Extra metadata must be less than ${MAX_TOKEN_CONTENT_SIZE} bytes`, {
250
+ field: "extraMetadata",
251
+ value: extraMetadata.length,
252
+ expected: `<${MAX_TOKEN_CONTENT_SIZE}`
253
+ });
362
254
  }
363
255
 
364
- // src/issuer-wallet/issuer-spark-wallet.ts
365
- var BURN_ADDRESS = "02".repeat(33);
366
- var IssuerSparkWallet = class extends import_spark_sdk5.SparkWallet {
367
- issuerTokenTransactionService;
368
- tokenFreezeService;
369
- tracerId = "issuer-sdk";
370
- /**
371
- * Initializes a new IssuerSparkWallet instance.
372
- * Inherits the generic static initialize from the base class.
373
- */
374
- constructor(configOptions, signer) {
375
- super(configOptions, signer);
376
- this.issuerTokenTransactionService = new IssuerTokenTransactionService(
377
- this.config,
378
- this.connectionManager
379
- );
380
- this.tokenFreezeService = new TokenFreezeService(
381
- this.config,
382
- this.connectionManager
383
- );
384
- this.wrapIssuerSparkWalletMethods();
385
- }
386
- /**
387
- * Gets the token balance for the issuer's token.
388
- * @deprecated Use getIssuerTokenBalances() instead. This method will be removed in a future version.
389
- * @returns An object containing the token balance as a bigint
390
- *
391
- * @throws {SparkValidationError} If multiple tokens are found for this issuer
392
- */
393
- async getIssuerTokenBalance() {
394
- const publicKey = await super.getIdentityPublicKey();
395
- const balanceObj = await this.getBalance();
396
- const issuerBalance = [...balanceObj.tokenBalances.entries()].filter(
397
- ([, info]) => info.tokenMetadata.tokenPublicKey === publicKey
398
- );
399
- if (issuerBalance.length > 1) {
400
- throw new import_spark_sdk5.SparkValidationError(
401
- "Multiple tokens found for this issuer. Use getIssuerTokenBalances() instead.",
402
- {
403
- field: "issuerTokenBalance",
404
- expected: "single token",
405
- actual: `${issuerBalance.length} tokens`
406
- }
407
- );
408
- }
409
- if (issuerBalance.length === 0) {
410
- return {
411
- tokenIdentifier: void 0,
412
- balance: 0n
413
- };
414
- }
415
- return {
416
- tokenIdentifier: issuerBalance[0][0],
417
- balance: issuerBalance[0][1].balance
418
- };
419
- }
420
- /**
421
- * Gets the token balances for the tokens that were issued by this user.
422
- * @returns An array of objects containing the token identifier and balance
423
- */
424
- async getIssuerTokenBalances() {
425
- const publicKey = await super.getIdentityPublicKey();
426
- const balanceObj = await this.getBalance();
427
- const issuerBalance = [...balanceObj.tokenBalances.entries()].filter(
428
- ([, info]) => info.tokenMetadata.tokenPublicKey === publicKey
429
- );
430
- if (issuerBalance.length === 0) {
431
- return [
432
- {
433
- tokenIdentifier: void 0,
434
- balance: 0n
435
- }
436
- ];
437
- }
438
- return issuerBalance.map(([tokenIdentifier, { balance }]) => ({
439
- tokenIdentifier,
440
- balance
441
- }));
442
- }
443
- /**
444
- * Retrieves information about the issuer's token.
445
- * @deprecated Use getIssuerTokensMetadata() instead. This method will be removed in a future version.
446
- * @returns An object containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
447
- * @throws {SparkRequestError} If the token metadata cannot be retrieved
448
- * @throws {SparkValidationError} If multiple tokens are found for this issuer
449
- */
450
- async getIssuerTokenMetadata() {
451
- const tokensMetadata = await this.getIssuerTokensMetadata();
452
- if (tokensMetadata.length === 0) {
453
- throw new import_spark_sdk5.SparkValidationError("No tokens found. Create a token first.");
454
- }
455
- if (tokensMetadata.length > 1) {
456
- throw new import_spark_sdk5.SparkValidationError(
457
- "Multiple tokens found for this issuer. Please migrate to getIssuerTokensMetadata() instead.",
458
- {
459
- field: "tokenMetadata",
460
- value: tokensMetadata
461
- }
462
- );
463
- }
464
- return tokensMetadata[0];
465
- }
466
- /**
467
- * Retrieves information about the tokens that were issued by this user.
468
- * @param tokenIdentifiers - Optional array of specific token identifiers to fetch.
469
- * If omitted, all tokens for this issuer are fetched.
470
- * @returns An array of objects containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
471
- * @throws {SparkRequestError} If the token metadata cannot be retrieved
472
- */
473
- async getIssuerTokensMetadata(tokenIdentifiers) {
474
- const issuerPublicKey = await super.getIdentityPublicKey();
475
- const sparkTokenClient = await this.connectionManager.createSparkTokenClient(
476
- this.config.getCoordinatorAddress()
477
- );
478
- const filterByIdentifiers = Array.isArray(tokenIdentifiers) && tokenIdentifiers.length > 0;
479
- const tokenIdentifierSet = filterByIdentifiers ? new Set(tokenIdentifiers) : void 0;
480
- const request = {};
481
- if (filterByIdentifiers) {
482
- request.tokenIdentifiers = tokenIdentifiers.map(
483
- (id) => (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(id, this.config.getNetworkType()).tokenIdentifier
484
- );
485
- } else {
486
- request.issuerPublicKeys = Array.of((0, import_utils4.hexToBytes)(issuerPublicKey));
487
- }
488
- try {
489
- const response = await sparkTokenClient.query_token_metadata(request);
490
- const tokenMetadata = [];
491
- for (const metadata of response.tokenMetadata) {
492
- const bech32mTokenIdentifier = (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
493
- tokenIdentifier: metadata.tokenIdentifier,
494
- network: this.config.getNetworkType()
495
- });
496
- if ((0, import_utils4.bytesToHex)(metadata.issuerPublicKey) !== issuerPublicKey) {
497
- continue;
498
- }
499
- if (filterByIdentifiers && !tokenIdentifierSet.has(bech32mTokenIdentifier)) {
500
- continue;
501
- }
502
- this.tokenMetadata.set(bech32mTokenIdentifier, metadata);
503
- tokenMetadata.push({
504
- tokenPublicKey: (0, import_utils4.bytesToHex)(metadata.issuerPublicKey),
505
- rawTokenIdentifier: metadata.tokenIdentifier,
506
- tokenName: metadata.tokenName,
507
- tokenTicker: metadata.tokenTicker,
508
- decimals: metadata.decimals,
509
- maxSupply: (0, import_utils4.bytesToNumberBE)(metadata.maxSupply),
510
- isFreezable: metadata.isFreezable,
511
- extraMetadata: metadata.extraMetadata ? new Uint8Array(metadata.extraMetadata) : void 0,
512
- bech32mTokenIdentifier
513
- });
514
- }
515
- return tokenMetadata;
516
- } catch (error) {
517
- if (error instanceof import_spark_sdk5.SparkError) {
518
- throw error;
519
- }
520
- throw new import_spark_sdk5.SparkRequestError("Failed to fetch token metadata", { error });
521
- }
522
- }
523
- /**
524
- * Retrieves the bech32m encoded token identifier for the issuer's token.
525
- * @deprecated Use getIssuerTokenIdentifiers() instead. This method will be removed in a future version.
526
- * @returns The bech32m encoded token identifier for the issuer's token
527
- * @throws {SparkRequestError} If the token identifier cannot be retrieved
528
- * @throws {SparkValidationError} If multiple tokens are found for this issuer
529
- */
530
- async getIssuerTokenIdentifier() {
531
- const tokensMetadata = await this.getIssuerTokensMetadata();
532
- if (tokensMetadata.length === 0) {
533
- throw new import_spark_sdk5.SparkValidationError("No tokens found. Create a token first.");
534
- }
535
- if (tokensMetadata.length > 1) {
536
- throw new import_spark_sdk5.SparkValidationError(
537
- "Multiple tokens found. Use getIssuerTokenIdentifiers() instead.",
538
- {
539
- method: "getIssuerTokenIdentifier",
540
- availableTokens: tokensMetadata.map((t) => ({
541
- tokenName: t.tokenName,
542
- tokenTicker: t.tokenTicker,
543
- bech32mTokenIdentifier: (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
544
- tokenIdentifier: t.rawTokenIdentifier,
545
- network: this.config.getNetworkType()
546
- })
547
- }))
548
- }
549
- );
550
- }
551
- return tokensMetadata[0].bech32mTokenIdentifier;
552
- }
553
- /**
554
- * Retrieves the bech32m encoded token identifier for the issuer's token.
555
- * @returns The bech32m encoded token identifier for the issuer's token
556
- * @throws {SparkRequestError} If the token identifier cannot be retrieved
557
- */
558
- async getIssuerTokenIdentifiers() {
559
- const tokensMetadata = await this.getIssuerTokensMetadata();
560
- return tokensMetadata.map((metadata) => metadata.bech32mTokenIdentifier);
561
- }
562
- async createToken({
563
- tokenName,
564
- tokenTicker,
565
- decimals,
566
- isFreezable,
567
- maxSupply = 0n,
568
- extraMetadata,
569
- returnIdentifierForCreate = false
570
- }) {
571
- validateTokenParameters(
572
- tokenName,
573
- tokenTicker,
574
- decimals,
575
- maxSupply,
576
- extraMetadata
577
- );
578
- const issuerPublicKey = await super.getIdentityPublicKey();
579
- if (this.config.getTokenTransactionVersion() === "V2") {
580
- const tokenTransaction = await this.issuerTokenTransactionService.constructCreateTokenTransaction(
581
- (0, import_utils4.hexToBytes)(issuerPublicKey),
582
- tokenName,
583
- tokenTicker,
584
- decimals,
585
- maxSupply,
586
- isFreezable,
587
- extraMetadata
588
- );
589
- const { finalTokenTransactionHash, tokenIdentifier } = await this.issuerTokenTransactionService.broadcastTokenTransactionDetailed(
590
- tokenTransaction
591
- );
592
- const txHash = (0, import_utils4.bytesToHex)(finalTokenTransactionHash);
593
- if (returnIdentifierForCreate) {
594
- if (!tokenIdentifier) {
595
- throw new import_spark_sdk5.SparkRequestError(
596
- "Server response missing expected field: tokenIdentifier",
597
- {
598
- operation: "broadcast_transaction",
599
- field: "tokenIdentifier"
600
- }
601
- );
602
- }
603
- const bech32mTokenIdentifier = (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
604
- tokenIdentifier,
605
- network: this.config.getNetworkType()
606
- });
607
- return {
608
- tokenIdentifier: bech32mTokenIdentifier,
609
- transactionHash: txHash
610
- };
611
- }
612
- return txHash;
613
- } else {
614
- const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialCreateTokenTransaction(
615
- (0, import_utils4.hexToBytes)(issuerPublicKey),
616
- tokenName,
617
- tokenTicker,
618
- decimals,
619
- maxSupply,
620
- isFreezable,
621
- extraMetadata
622
- );
623
- const broadcastResponse = await this.issuerTokenTransactionService.broadcastTokenTransactionV3Detailed(
624
- partialTokenTransaction
625
- );
626
- const finalHash = await (0, import_spark_sdk6.hashFinalTokenTransaction)(
627
- broadcastResponse.finalTokenTransaction
628
- );
629
- const finalTransactionHash = (0, import_utils4.bytesToHex)(finalHash);
630
- if (returnIdentifierForCreate) {
631
- if (!broadcastResponse.tokenIdentifier) {
632
- throw new import_spark_sdk5.SparkRequestError(
633
- "Server response missing expected field: tokenIdentifier",
634
- {
635
- operation: "broadcast_transaction",
636
- field: "tokenIdentifier"
637
- }
638
- );
639
- }
640
- const tokenIdentifier = (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
641
- tokenIdentifier: broadcastResponse.tokenIdentifier,
642
- network: this.config.getNetworkType()
643
- });
644
- return {
645
- tokenIdentifier,
646
- transactionHash: finalTransactionHash
647
- };
648
- }
649
- return finalTransactionHash;
650
- }
651
- }
652
- async mintTokens(tokenAmountOrParams) {
653
- let tokenAmount;
654
- let bech32mTokenIdentifier;
655
- if (typeof tokenAmountOrParams === "bigint") {
656
- tokenAmount = tokenAmountOrParams;
657
- bech32mTokenIdentifier = void 0;
658
- } else {
659
- tokenAmount = tokenAmountOrParams.tokenAmount;
660
- bech32mTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
661
- }
662
- const issuerTokenPublicKey = await super.getIdentityPublicKey();
663
- const issuerTokenPublicKeyBytes = (0, import_utils4.hexToBytes)(issuerTokenPublicKey);
664
- if (bech32mTokenIdentifier === void 0) {
665
- const tokensMetadata = await this.getIssuerTokensMetadata();
666
- if (tokensMetadata.length === 0) {
667
- throw new import_spark_sdk5.SparkValidationError(
668
- "No tokens found. Create a token first."
669
- );
670
- }
671
- if (tokensMetadata.length > 1) {
672
- throw new import_spark_sdk5.SparkValidationError(
673
- "Multiple tokens found. Please use mintTokens({ tokenAmount, tokenIdentifier }) instead.",
674
- {
675
- field: "tokenIdentifier",
676
- availableTokens: tokensMetadata.map((t) => ({
677
- tokenName: t.tokenName,
678
- tokenTicker: t.tokenTicker,
679
- bech32mTokenIdentifier: t.bech32mTokenIdentifier
680
- }))
681
- }
682
- );
683
- }
684
- bech32mTokenIdentifier = tokensMetadata[0].bech32mTokenIdentifier;
685
- } else {
686
- await this.validateTokenIssuer(bech32mTokenIdentifier);
687
- }
688
- const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
689
- bech32mTokenIdentifier,
690
- this.config.getNetworkType()
691
- ).tokenIdentifier;
692
- if (this.config.getTokenTransactionVersion() === "V2") {
693
- const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(
694
- rawTokenIdentifier,
695
- issuerTokenPublicKeyBytes,
696
- tokenAmount
697
- );
698
- return await this.issuerTokenTransactionService.broadcastTokenTransaction(
699
- tokenTransaction
700
- );
701
- } else {
702
- const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialMintTokenTransaction(
703
- rawTokenIdentifier,
704
- issuerTokenPublicKeyBytes,
705
- tokenAmount
706
- );
707
- return await this.issuerTokenTransactionService.broadcastTokenTransactionV3(
708
- partialTokenTransaction
709
- );
710
- }
711
- }
712
- async burnTokens(tokenAmountOrParams, selectedOutputs) {
713
- let burnTokenIdentifier;
714
- let tokenAmount;
715
- let outputs;
716
- if (typeof tokenAmountOrParams === "bigint") {
717
- tokenAmount = tokenAmountOrParams;
718
- outputs = selectedOutputs;
719
- const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
720
- if (tokenIdentifiers.length === 0) {
721
- throw new import_spark_sdk5.SparkValidationError(
722
- "No tokens found. Create a token first."
723
- );
724
- }
725
- if (tokenIdentifiers.length > 1) {
726
- throw new import_spark_sdk5.SparkValidationError(
727
- "Multiple tokens found. Use burnTokens({ tokenIdentifier, tokenAmount, selectedOutputs }) to specify which token to burn.",
728
- {
729
- field: "tokenIdentifier",
730
- availableTokens: tokenIdentifiers
731
- }
732
- );
733
- }
734
- burnTokenIdentifier = tokenIdentifiers[0];
735
- } else {
736
- tokenAmount = tokenAmountOrParams.tokenAmount;
737
- outputs = tokenAmountOrParams.selectedOutputs;
738
- await this.validateTokenIssuer(tokenAmountOrParams.tokenIdentifier);
739
- burnTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
740
- }
741
- const burnAddress = (0, import_spark_sdk5.encodeSparkAddress)({
742
- identityPublicKey: BURN_ADDRESS,
743
- network: this.config.getNetworkType()
744
- });
745
- return await this.transferTokens({
746
- tokenIdentifier: burnTokenIdentifier,
747
- tokenAmount,
748
- receiverSparkAddress: burnAddress,
749
- selectedOutputs: outputs
750
- });
751
- }
752
- async freezeTokens(sparkAddressOrParams) {
753
- let bech32mTokenIdentifier;
754
- let sparkAddress;
755
- if (typeof sparkAddressOrParams === "string") {
756
- sparkAddress = sparkAddressOrParams;
757
- bech32mTokenIdentifier = void 0;
758
- } else {
759
- sparkAddress = sparkAddressOrParams.sparkAddress;
760
- bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
761
- }
762
- const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
763
- sparkAddress,
764
- this.config.getNetworkType()
765
- );
766
- if (bech32mTokenIdentifier === void 0) {
767
- const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
768
- if (tokenIdentifiers.length === 0) {
769
- throw new import_spark_sdk5.SparkValidationError(
770
- "No tokens found. Create a token first."
771
- );
772
- }
773
- if (tokenIdentifiers.length > 1) {
774
- throw new import_spark_sdk5.SparkValidationError(
775
- "Multiple tokens found. Use freezeTokens({ tokenIdentifier, sparkAddress }) instead.",
776
- {
777
- field: "tokenIdentifier",
778
- availableTokens: tokenIdentifiers
779
- }
780
- );
781
- }
782
- bech32mTokenIdentifier = tokenIdentifiers[0];
783
- } else {
784
- await this.validateTokenIssuer(bech32mTokenIdentifier);
785
- }
786
- const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
787
- bech32mTokenIdentifier,
788
- this.config.getNetworkType()
789
- ).tokenIdentifier;
790
- const response = await this.tokenFreezeService.freezeTokens({
791
- ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
792
- tokenIdentifier: rawTokenIdentifier
793
- });
794
- const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
795
- return {
796
- impactedOutputIds: response.impactedOutputIds,
797
- impactedTokenAmount: tokenAmount
798
- };
799
- }
800
- async unfreezeTokens(sparkAddressOrParams) {
801
- let bech32mTokenIdentifier;
802
- let sparkAddress;
803
- if (typeof sparkAddressOrParams === "string") {
804
- sparkAddress = sparkAddressOrParams;
805
- bech32mTokenIdentifier = void 0;
806
- } else {
807
- sparkAddress = sparkAddressOrParams.sparkAddress;
808
- bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
809
- }
810
- if (bech32mTokenIdentifier === void 0) {
811
- const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
812
- if (tokenIdentifiers.length === 0) {
813
- throw new import_spark_sdk5.SparkValidationError(
814
- "No tokens found. Create a token first."
815
- );
816
- }
817
- if (tokenIdentifiers.length > 1) {
818
- throw new import_spark_sdk5.SparkValidationError(
819
- "Multiple tokens found. Use unfreezeTokens({ tokenIdentifier, sparkAddress }) instead.",
820
- {
821
- field: "tokenIdentifier",
822
- availableTokens: tokenIdentifiers
823
- }
824
- );
825
- }
826
- bech32mTokenIdentifier = tokenIdentifiers[0];
827
- } else {
828
- await this.validateTokenIssuer(bech32mTokenIdentifier);
829
- }
830
- const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
831
- sparkAddress,
832
- this.config.getNetworkType()
833
- );
834
- const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
835
- bech32mTokenIdentifier,
836
- this.config.getNetworkType()
837
- ).tokenIdentifier;
838
- const response = await this.tokenFreezeService.unfreezeTokens({
839
- ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
840
- tokenIdentifier: rawTokenIdentifier
841
- });
842
- const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
843
- return {
844
- impactedOutputIds: response.impactedOutputIds,
845
- impactedTokenAmount: tokenAmount
846
- };
847
- }
848
- /**
849
- * Retrieves the distribution information for the issuer's token.
850
- * @throws {SparkError} This feature is not yet supported
851
- */
852
- async getIssuerTokenDistribution() {
853
- throw new import_spark_sdk5.SparkError("Token distribution is not yet supported");
854
- }
855
- /**
856
- * This validates that the token belongs to this issuer.
857
- * If a token is in the cache, it must belong to this issuer.
858
- * @param tokenIdentifier - The bech32m encoded token identifier
859
- * @throws {SparkValidationError} If the token is not found for this issuer
860
- * @private
861
- */
862
- async validateTokenIssuer(tokenIdentifier) {
863
- const issuerPublicKey = await super.getIdentityPublicKey();
864
- const cachedMetadata = this.tokenMetadata.get(tokenIdentifier);
865
- if (cachedMetadata) {
866
- if ((0, import_utils4.bytesToHex)(cachedMetadata.issuerPublicKey) !== issuerPublicKey) {
867
- throw new import_spark_sdk5.SparkValidationError("Token was not issued by this issuer", {
868
- field: "issuerPublicKey",
869
- tokenIdentifier,
870
- expected: issuerPublicKey,
871
- actual: (0, import_utils4.bytesToHex)(cachedMetadata.issuerPublicKey)
872
- });
873
- }
874
- } else {
875
- const tokensMetadata = await this.getIssuerTokensMetadata([
876
- tokenIdentifier
877
- ]);
878
- if (tokensMetadata.length === 0) {
879
- throw new import_spark_sdk5.SparkValidationError("Token not found for this issuer", {
880
- field: "tokenIdentifier",
881
- value: tokenIdentifier
882
- });
883
- }
884
- if (tokensMetadata[0].tokenPublicKey !== issuerPublicKey) {
885
- throw new import_spark_sdk5.SparkValidationError("Token was not issued by this issuer", {
886
- field: "issuerPublicKey",
887
- tokenIdentifier,
888
- expected: issuerPublicKey,
889
- actual: tokensMetadata[0].tokenPublicKey
890
- });
891
- }
892
- }
893
- }
894
- getTraceName(methodName) {
895
- return `IssuerSparkWallet.${methodName}`;
896
- }
897
- wrapIssuerPublicMethod(methodName) {
898
- const original = this[methodName];
899
- if (typeof original !== "function") {
900
- throw new Error(
901
- `Method ${methodName} is not a function on IssuerSparkWallet.`
902
- );
903
- }
904
- const originalFn = original;
905
- const wrapped = import_spark_sdk5.SparkWallet.wrapMethod(
906
- String(methodName),
907
- originalFn,
908
- this
909
- );
910
- this[methodName] = wrapped;
911
- }
912
- wrapIssuerSparkWalletMethods() {
913
- PUBLIC_ISSUER_SPARK_WALLET_METHODS.forEach(
914
- (m) => this.wrapIssuerPublicMethod(m)
915
- );
916
- }
256
+ //#endregion
257
+ //#region src/issuer-wallet/issuer-spark-wallet.ts
258
+ const BURN_ADDRESS = "02".repeat(33);
259
+ /**
260
+ * Represents a Spark wallet with minting capabilities.
261
+ * This class extends the base SparkWallet with additional functionality for token minting,
262
+ * burning, and freezing operations.
263
+ */
264
+ var IssuerSparkWallet = class extends _buildonspark_spark_sdk.SparkWallet {
265
+ issuerTokenTransactionService;
266
+ tokenFreezeService;
267
+ tracerId = "issuer-sdk";
268
+ /**
269
+ * Initializes a new IssuerSparkWallet instance.
270
+ * Inherits the generic static initialize from the base class.
271
+ */
272
+ constructor(configOptions, signer) {
273
+ super(configOptions, signer);
274
+ this.issuerTokenTransactionService = new IssuerTokenTransactionService(this.config, this.connectionManager);
275
+ this.tokenFreezeService = new TokenFreezeService(this.config, this.connectionManager);
276
+ this.wrapIssuerSparkWalletMethods();
277
+ }
278
+ /**
279
+ * Gets the token balance for the issuer's token.
280
+ * @deprecated Use getIssuerTokenBalances() instead. This method will be removed in a future version.
281
+ * @returns An object containing the token balance as a bigint
282
+ *
283
+ * @throws {SparkValidationError} If multiple tokens are found for this issuer
284
+ */
285
+ async getIssuerTokenBalance() {
286
+ const publicKey = await super.getIdentityPublicKey();
287
+ const issuerBalance = [...(await this.getBalance()).tokenBalances.entries()].filter(([, info]) => info.tokenMetadata.tokenPublicKey === publicKey);
288
+ if (issuerBalance.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found for this issuer. Use getIssuerTokenBalances() instead.", {
289
+ field: "issuerTokenBalance",
290
+ expected: "single token",
291
+ actual: `${issuerBalance.length} tokens`
292
+ });
293
+ if (issuerBalance.length === 0) return {
294
+ tokenIdentifier: void 0,
295
+ balance: 0n
296
+ };
297
+ return {
298
+ tokenIdentifier: issuerBalance[0][0],
299
+ balance: issuerBalance[0][1].balance
300
+ };
301
+ }
302
+ /**
303
+ * Gets the token balances for the tokens that were issued by this user.
304
+ * @returns An array of objects containing the token identifier and balance
305
+ */
306
+ async getIssuerTokenBalances() {
307
+ const publicKey = await super.getIdentityPublicKey();
308
+ const issuerBalance = [...(await this.getBalance()).tokenBalances.entries()].filter(([, info]) => info.tokenMetadata.tokenPublicKey === publicKey);
309
+ if (issuerBalance.length === 0) return [{
310
+ tokenIdentifier: void 0,
311
+ balance: 0n
312
+ }];
313
+ return issuerBalance.map(([tokenIdentifier, { balance }]) => ({
314
+ tokenIdentifier,
315
+ balance
316
+ }));
317
+ }
318
+ /**
319
+ * Retrieves information about the issuer's token.
320
+ * @deprecated Use getIssuerTokensMetadata() instead. This method will be removed in a future version.
321
+ * @returns An object containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
322
+ * @throws {SparkRequestError} If the token metadata cannot be retrieved
323
+ * @throws {SparkValidationError} If multiple tokens are found for this issuer
324
+ */
325
+ async getIssuerTokenMetadata() {
326
+ const tokensMetadata = await this.getIssuerTokensMetadata();
327
+ if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
328
+ if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found for this issuer. Please migrate to getIssuerTokensMetadata() instead.", {
329
+ field: "tokenMetadata",
330
+ value: tokensMetadata
331
+ });
332
+ return tokensMetadata[0];
333
+ }
334
+ /**
335
+ * Retrieves information about the tokens that were issued by this user.
336
+ * @param tokenIdentifiers - Optional array of specific token identifiers to fetch.
337
+ * If omitted, all tokens for this issuer are fetched.
338
+ * @returns An array of objects containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
339
+ * @throws {SparkRequestError} If the token metadata cannot be retrieved
340
+ */
341
+ async getIssuerTokensMetadata(tokenIdentifiers) {
342
+ const issuerPublicKey = await super.getIdentityPublicKey();
343
+ const sparkTokenClient = await this.connectionManager.createSparkTokenClient(this.config.getCoordinatorAddress());
344
+ const filterByIdentifiers = Array.isArray(tokenIdentifiers) && tokenIdentifiers.length > 0;
345
+ const tokenIdentifierSet = filterByIdentifiers ? new Set(tokenIdentifiers) : void 0;
346
+ const request = {};
347
+ if (filterByIdentifiers) request.tokenIdentifiers = tokenIdentifiers.map((id) => (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(id, this.config.getNetworkType()).tokenIdentifier);
348
+ else request.issuerPublicKeys = Array.of((0, _noble_curves_utils.hexToBytes)(issuerPublicKey));
349
+ try {
350
+ const response = await sparkTokenClient.query_token_metadata(request);
351
+ const tokenMetadata = [];
352
+ for (const metadata of response.tokenMetadata) {
353
+ const bech32mTokenIdentifier = (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
354
+ tokenIdentifier: metadata.tokenIdentifier,
355
+ network: this.config.getNetworkType()
356
+ });
357
+ if ((0, _noble_curves_utils.bytesToHex)(metadata.issuerPublicKey) !== issuerPublicKey) continue;
358
+ if (filterByIdentifiers && !tokenIdentifierSet.has(bech32mTokenIdentifier)) continue;
359
+ this.tokenMetadata.set(bech32mTokenIdentifier, metadata);
360
+ tokenMetadata.push({
361
+ tokenPublicKey: (0, _noble_curves_utils.bytesToHex)(metadata.issuerPublicKey),
362
+ rawTokenIdentifier: metadata.tokenIdentifier,
363
+ tokenName: metadata.tokenName,
364
+ tokenTicker: metadata.tokenTicker,
365
+ decimals: metadata.decimals,
366
+ maxSupply: (0, _noble_curves_utils.bytesToNumberBE)(metadata.maxSupply),
367
+ isFreezable: metadata.isFreezable,
368
+ extraMetadata: metadata.extraMetadata ? new Uint8Array(metadata.extraMetadata) : void 0,
369
+ bech32mTokenIdentifier
370
+ });
371
+ }
372
+ return tokenMetadata;
373
+ } catch (error) {
374
+ if (error instanceof _buildonspark_spark_sdk.SparkError) throw error;
375
+ throw new _buildonspark_spark_sdk.SparkRequestError("Failed to fetch token metadata", { error });
376
+ }
377
+ }
378
+ /**
379
+ * Retrieves the bech32m encoded token identifier for the issuer's token.
380
+ * @deprecated Use getIssuerTokenIdentifiers() instead. This method will be removed in a future version.
381
+ * @returns The bech32m encoded token identifier for the issuer's token
382
+ * @throws {SparkRequestError} If the token identifier cannot be retrieved
383
+ * @throws {SparkValidationError} If multiple tokens are found for this issuer
384
+ */
385
+ async getIssuerTokenIdentifier() {
386
+ const tokensMetadata = await this.getIssuerTokensMetadata();
387
+ if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
388
+ if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use getIssuerTokenIdentifiers() instead.", {
389
+ method: "getIssuerTokenIdentifier",
390
+ availableTokens: tokensMetadata.map((t) => ({
391
+ tokenName: t.tokenName,
392
+ tokenTicker: t.tokenTicker,
393
+ bech32mTokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
394
+ tokenIdentifier: t.rawTokenIdentifier,
395
+ network: this.config.getNetworkType()
396
+ })
397
+ }))
398
+ });
399
+ return tokensMetadata[0].bech32mTokenIdentifier;
400
+ }
401
+ /**
402
+ * Retrieves the bech32m encoded token identifier for the issuer's token.
403
+ * @returns The bech32m encoded token identifier for the issuer's token
404
+ * @throws {SparkRequestError} If the token identifier cannot be retrieved
405
+ */
406
+ async getIssuerTokenIdentifiers() {
407
+ return (await this.getIssuerTokensMetadata()).map((metadata) => metadata.bech32mTokenIdentifier);
408
+ }
409
+ async createToken({ tokenName, tokenTicker, decimals, isFreezable, maxSupply = 0n, extraMetadata, returnIdentifierForCreate = false }) {
410
+ validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply, extraMetadata);
411
+ const issuerPublicKey = await super.getIdentityPublicKey();
412
+ if (this.config.getTokenTransactionVersion() === "V2") {
413
+ const tokenTransaction = await this.issuerTokenTransactionService.constructCreateTokenTransaction((0, _noble_curves_utils.hexToBytes)(issuerPublicKey), tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata);
414
+ const { finalTokenTransactionHash, tokenIdentifier } = await this.issuerTokenTransactionService.broadcastTokenTransactionDetailed(tokenTransaction);
415
+ const txHash = (0, _noble_curves_utils.bytesToHex)(finalTokenTransactionHash);
416
+ if (returnIdentifierForCreate) {
417
+ if (!tokenIdentifier) throw new _buildonspark_spark_sdk.SparkRequestError("Server response missing expected field: tokenIdentifier", {
418
+ operation: "broadcast_transaction",
419
+ field: "tokenIdentifier"
420
+ });
421
+ return {
422
+ tokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
423
+ tokenIdentifier,
424
+ network: this.config.getNetworkType()
425
+ }),
426
+ transactionHash: txHash
427
+ };
428
+ }
429
+ return txHash;
430
+ } else {
431
+ const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialCreateTokenTransaction((0, _noble_curves_utils.hexToBytes)(issuerPublicKey), tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata);
432
+ const broadcastResponse = await this.issuerTokenTransactionService.broadcastTokenTransactionV3Detailed(partialTokenTransaction);
433
+ const finalTransactionHash = (0, _noble_curves_utils.bytesToHex)(await (0, _buildonspark_spark_sdk.hashFinalTokenTransaction)(broadcastResponse.finalTokenTransaction));
434
+ if (returnIdentifierForCreate) {
435
+ if (!broadcastResponse.tokenIdentifier) throw new _buildonspark_spark_sdk.SparkRequestError("Server response missing expected field: tokenIdentifier", {
436
+ operation: "broadcast_transaction",
437
+ field: "tokenIdentifier"
438
+ });
439
+ return {
440
+ tokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
441
+ tokenIdentifier: broadcastResponse.tokenIdentifier,
442
+ network: this.config.getNetworkType()
443
+ }),
444
+ transactionHash: finalTransactionHash
445
+ };
446
+ }
447
+ return finalTransactionHash;
448
+ }
449
+ }
450
+ async mintTokens(tokenAmountOrParams) {
451
+ let tokenAmount;
452
+ let bech32mTokenIdentifier;
453
+ if (typeof tokenAmountOrParams === "bigint") {
454
+ tokenAmount = tokenAmountOrParams;
455
+ bech32mTokenIdentifier = void 0;
456
+ } else {
457
+ tokenAmount = tokenAmountOrParams.tokenAmount;
458
+ bech32mTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
459
+ }
460
+ const issuerTokenPublicKeyBytes = (0, _noble_curves_utils.hexToBytes)(await super.getIdentityPublicKey());
461
+ if (bech32mTokenIdentifier === void 0) {
462
+ const tokensMetadata = await this.getIssuerTokensMetadata();
463
+ if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
464
+ if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Please use mintTokens({ tokenAmount, tokenIdentifier }) instead.", {
465
+ field: "tokenIdentifier",
466
+ availableTokens: tokensMetadata.map((t) => ({
467
+ tokenName: t.tokenName,
468
+ tokenTicker: t.tokenTicker,
469
+ bech32mTokenIdentifier: t.bech32mTokenIdentifier
470
+ }))
471
+ });
472
+ bech32mTokenIdentifier = tokensMetadata[0].bech32mTokenIdentifier;
473
+ } else await this.validateTokenIssuer(bech32mTokenIdentifier);
474
+ const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
475
+ if (this.config.getTokenTransactionVersion() === "V2") {
476
+ const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(rawTokenIdentifier, issuerTokenPublicKeyBytes, tokenAmount);
477
+ return await this.issuerTokenTransactionService.broadcastTokenTransaction(tokenTransaction);
478
+ } else {
479
+ const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialMintTokenTransaction(rawTokenIdentifier, issuerTokenPublicKeyBytes, tokenAmount);
480
+ return await this.issuerTokenTransactionService.broadcastTokenTransactionV3(partialTokenTransaction);
481
+ }
482
+ }
483
+ async burnTokens(tokenAmountOrParams, selectedOutputs) {
484
+ let burnTokenIdentifier;
485
+ let tokenAmount;
486
+ let outputs;
487
+ if (typeof tokenAmountOrParams === "bigint") {
488
+ tokenAmount = tokenAmountOrParams;
489
+ outputs = selectedOutputs;
490
+ const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
491
+ if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
492
+ if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use burnTokens({ tokenIdentifier, tokenAmount, selectedOutputs }) to specify which token to burn.", {
493
+ field: "tokenIdentifier",
494
+ availableTokens: tokenIdentifiers
495
+ });
496
+ burnTokenIdentifier = tokenIdentifiers[0];
497
+ } else {
498
+ tokenAmount = tokenAmountOrParams.tokenAmount;
499
+ outputs = tokenAmountOrParams.selectedOutputs;
500
+ await this.validateTokenIssuer(tokenAmountOrParams.tokenIdentifier);
501
+ burnTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
502
+ }
503
+ const burnAddress = (0, _buildonspark_spark_sdk.encodeSparkAddress)({
504
+ identityPublicKey: BURN_ADDRESS,
505
+ network: this.config.getNetworkType()
506
+ });
507
+ return await this.transferTokens({
508
+ tokenIdentifier: burnTokenIdentifier,
509
+ tokenAmount,
510
+ receiverSparkAddress: burnAddress,
511
+ selectedOutputs: outputs
512
+ });
513
+ }
514
+ async freezeTokens(sparkAddressOrParams) {
515
+ let bech32mTokenIdentifier;
516
+ let sparkAddress;
517
+ if (typeof sparkAddressOrParams === "string") {
518
+ sparkAddress = sparkAddressOrParams;
519
+ bech32mTokenIdentifier = void 0;
520
+ } else {
521
+ sparkAddress = sparkAddressOrParams.sparkAddress;
522
+ bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
523
+ }
524
+ const decodedOwnerPubkey = (0, _buildonspark_spark_sdk.decodeSparkAddress)(sparkAddress, this.config.getNetworkType());
525
+ if (bech32mTokenIdentifier === void 0) {
526
+ const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
527
+ if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
528
+ if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use freezeTokens({ tokenIdentifier, sparkAddress }) instead.", {
529
+ field: "tokenIdentifier",
530
+ availableTokens: tokenIdentifiers
531
+ });
532
+ bech32mTokenIdentifier = tokenIdentifiers[0];
533
+ } else await this.validateTokenIssuer(bech32mTokenIdentifier);
534
+ const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
535
+ const response = await this.tokenFreezeService.freezeTokens({
536
+ ownerPublicKey: (0, _noble_curves_utils.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
537
+ tokenIdentifier: rawTokenIdentifier
538
+ });
539
+ const tokenAmount = (0, _noble_curves_utils.bytesToNumberBE)(response.impactedTokenAmount);
540
+ return {
541
+ impactedOutputIds: response.impactedOutputIds,
542
+ impactedTokenAmount: tokenAmount
543
+ };
544
+ }
545
+ async unfreezeTokens(sparkAddressOrParams) {
546
+ let bech32mTokenIdentifier;
547
+ let sparkAddress;
548
+ if (typeof sparkAddressOrParams === "string") {
549
+ sparkAddress = sparkAddressOrParams;
550
+ bech32mTokenIdentifier = void 0;
551
+ } else {
552
+ sparkAddress = sparkAddressOrParams.sparkAddress;
553
+ bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
554
+ }
555
+ if (bech32mTokenIdentifier === void 0) {
556
+ const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
557
+ if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
558
+ if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use unfreezeTokens({ tokenIdentifier, sparkAddress }) instead.", {
559
+ field: "tokenIdentifier",
560
+ availableTokens: tokenIdentifiers
561
+ });
562
+ bech32mTokenIdentifier = tokenIdentifiers[0];
563
+ } else await this.validateTokenIssuer(bech32mTokenIdentifier);
564
+ const decodedOwnerPubkey = (0, _buildonspark_spark_sdk.decodeSparkAddress)(sparkAddress, this.config.getNetworkType());
565
+ const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
566
+ const response = await this.tokenFreezeService.unfreezeTokens({
567
+ ownerPublicKey: (0, _noble_curves_utils.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
568
+ tokenIdentifier: rawTokenIdentifier
569
+ });
570
+ const tokenAmount = (0, _noble_curves_utils.bytesToNumberBE)(response.impactedTokenAmount);
571
+ return {
572
+ impactedOutputIds: response.impactedOutputIds,
573
+ impactedTokenAmount: tokenAmount
574
+ };
575
+ }
576
+ /**
577
+ * Retrieves the distribution information for the issuer's token.
578
+ * @throws {SparkError} This feature is not yet supported
579
+ */
580
+ async getIssuerTokenDistribution() {
581
+ throw new _buildonspark_spark_sdk.SparkError("Token distribution is not yet supported");
582
+ }
583
+ /**
584
+ * This validates that the token belongs to this issuer.
585
+ * If a token is in the cache, it must belong to this issuer.
586
+ * @param tokenIdentifier - The bech32m encoded token identifier
587
+ * @throws {SparkValidationError} If the token is not found for this issuer
588
+ * @private
589
+ */
590
+ async validateTokenIssuer(tokenIdentifier) {
591
+ const issuerPublicKey = await super.getIdentityPublicKey();
592
+ const cachedMetadata = this.tokenMetadata.get(tokenIdentifier);
593
+ if (cachedMetadata) {
594
+ if ((0, _noble_curves_utils.bytesToHex)(cachedMetadata.issuerPublicKey) !== issuerPublicKey) throw new _buildonspark_spark_sdk.SparkValidationError("Token was not issued by this issuer", {
595
+ field: "issuerPublicKey",
596
+ tokenIdentifier,
597
+ expected: issuerPublicKey,
598
+ actual: (0, _noble_curves_utils.bytesToHex)(cachedMetadata.issuerPublicKey)
599
+ });
600
+ } else {
601
+ const tokensMetadata = await this.getIssuerTokensMetadata([tokenIdentifier]);
602
+ if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("Token not found for this issuer", {
603
+ field: "tokenIdentifier",
604
+ value: tokenIdentifier
605
+ });
606
+ if (tokensMetadata[0].tokenPublicKey !== issuerPublicKey) throw new _buildonspark_spark_sdk.SparkValidationError("Token was not issued by this issuer", {
607
+ field: "issuerPublicKey",
608
+ tokenIdentifier,
609
+ expected: issuerPublicKey,
610
+ actual: tokensMetadata[0].tokenPublicKey
611
+ });
612
+ }
613
+ }
614
+ getTraceName(methodName) {
615
+ return `IssuerSparkWallet.${methodName}`;
616
+ }
617
+ wrapIssuerPublicMethod(methodName) {
618
+ const original = this[methodName];
619
+ if (typeof original !== "function") throw new Error(`Method ${methodName} is not a function on IssuerSparkWallet.`);
620
+ const originalFn = original;
621
+ this[methodName] = _buildonspark_spark_sdk.SparkWallet.wrapMethod(String(methodName), originalFn, this);
622
+ }
623
+ wrapIssuerSparkWalletMethods() {
624
+ PUBLIC_ISSUER_SPARK_WALLET_METHODS.forEach((m) => this.wrapIssuerPublicMethod(m));
625
+ }
917
626
  };
918
- var PUBLIC_ISSUER_SPARK_WALLET_METHODS = [
919
- "getIssuerTokenBalance",
920
- "getIssuerTokenBalances",
921
- "getIssuerTokenMetadata",
922
- "getIssuerTokensMetadata",
923
- "getIssuerTokenIdentifier",
924
- "getIssuerTokenIdentifiers",
925
- "createToken",
926
- "mintTokens",
927
- "burnTokens",
928
- "freezeTokens",
929
- "unfreezeTokens",
930
- "getIssuerTokenDistribution"
627
+ const PUBLIC_ISSUER_SPARK_WALLET_METHODS = [
628
+ "getIssuerTokenBalance",
629
+ "getIssuerTokenBalances",
630
+ "getIssuerTokenMetadata",
631
+ "getIssuerTokensMetadata",
632
+ "getIssuerTokenIdentifier",
633
+ "getIssuerTokenIdentifiers",
634
+ "createToken",
635
+ "mintTokens",
636
+ "burnTokens",
637
+ "freezeTokens",
638
+ "unfreezeTokens",
639
+ "getIssuerTokenDistribution"
931
640
  ];
932
641
 
933
- // src/issuer-wallet/issuer-spark-wallet.node.ts
934
- var import_spark_sdk7 = require("@buildonspark/spark-sdk");
642
+ //#endregion
643
+ //#region src/issuer-wallet/issuer-spark-wallet.node.ts
935
644
  var IssuerSparkWalletNodeJS = class extends IssuerSparkWallet {
936
- buildConnectionManager(config) {
937
- return new import_spark_sdk7.ConnectionManager(config);
938
- }
939
- initializeTracerEnv({
940
- spanProcessors,
941
- traceUrls
942
- }) {
943
- (0, import_spark_sdk7.initializeTracerEnv)({ spanProcessors, traceUrls });
944
- }
645
+ buildConnectionManager(config) {
646
+ return new _buildonspark_spark_sdk.ConnectionManager(config);
647
+ }
648
+ initializeTracerEnv({ spanProcessors, traceUrls }) {
649
+ (0, _buildonspark_spark_sdk.initializeTracerEnv)({
650
+ spanProcessors,
651
+ traceUrls
652
+ });
653
+ }
945
654
  };
946
655
 
947
- // src/index-shared.ts
948
- var import_spark_sdk8 = require("@buildonspark/spark-sdk");
949
- var import_spark_sdk9 = require("@buildonspark/spark-sdk");
950
- // Annotate the CommonJS export names for ESM import in node:
951
- 0 && (module.exports = {
952
- DefaultSparkSigner,
953
- IssuerSparkWallet,
954
- UnsafeStatelessSparkSigner,
955
- WalletConfig
656
+ //#endregion
657
+ Object.defineProperty(exports, 'DefaultSparkSigner', {
658
+ enumerable: true,
659
+ get: function () {
660
+ return _buildonspark_spark_sdk.DefaultSparkSigner;
661
+ }
956
662
  });
663
+ exports.IssuerSparkWallet = IssuerSparkWalletNodeJS;
664
+ Object.defineProperty(exports, 'UnsafeStatelessSparkSigner', {
665
+ enumerable: true,
666
+ get: function () {
667
+ return _buildonspark_spark_sdk.UnsafeStatelessSparkSigner;
668
+ }
669
+ });
670
+ Object.defineProperty(exports, 'WalletConfig', {
671
+ enumerable: true,
672
+ get: function () {
673
+ return _buildonspark_spark_sdk.WalletConfig;
674
+ }
675
+ });