@buildonspark/issuer-sdk 0.0.99 → 0.0.100
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/dist/native/index.react-native.cjs +588 -0
- package/dist/native/index.react-native.d.cts +149 -0
- package/dist/native/index.react-native.d.ts +149 -0
- package/dist/native/index.react-native.js +576 -0
- package/package.json +43 -4
- package/src/index.react-native.ts +4 -0
- package/src/issuer-wallet/issuer-spark-wallet.react-native.ts +8 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
# @buildonspark/issuer-sdk
|
|
2
2
|
|
|
3
|
+
## 0.0.100
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- - Fix: replacement of lossy comparison for sorting token outputs
|
|
8
|
+
- Added React Native support and export directly from @buildonspark/issuer-sdk in RN
|
|
9
|
+
- Updated dependencies
|
|
10
|
+
- @buildonspark/spark-sdk@0.3.7
|
|
11
|
+
|
|
3
12
|
## 0.0.99
|
|
4
13
|
|
|
5
14
|
### Patch Changes
|
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
require("react-native-get-random-values");
|
|
2
|
+
"use strict";
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
20
|
+
|
|
21
|
+
// src/index.react-native.ts
|
|
22
|
+
var index_react_native_exports = {};
|
|
23
|
+
__export(index_react_native_exports, {
|
|
24
|
+
IssuerSparkWallet: () => IssuerSparkWalletReactNative
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(index_react_native_exports);
|
|
27
|
+
|
|
28
|
+
// buffer.js
|
|
29
|
+
var import_buffer = require("buffer");
|
|
30
|
+
if (typeof globalThis.Buffer === "undefined") {
|
|
31
|
+
globalThis.Buffer = import_buffer.Buffer;
|
|
32
|
+
}
|
|
33
|
+
if (typeof window !== "undefined") {
|
|
34
|
+
if (typeof window.global === "undefined") {
|
|
35
|
+
window.global = window;
|
|
36
|
+
}
|
|
37
|
+
if (typeof window.globalThis === "undefined") {
|
|
38
|
+
window.globalThis = window;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// src/issuer-wallet/issuer-spark-wallet.ts
|
|
43
|
+
var import_spark_sdk5 = require("@buildonspark/spark-sdk");
|
|
44
|
+
var import_utils4 = require("@noble/curves/utils");
|
|
45
|
+
|
|
46
|
+
// src/services/freeze.ts
|
|
47
|
+
var import_spark_sdk2 = require("@buildonspark/spark-sdk");
|
|
48
|
+
var import_utils2 = require("@noble/curves/utils");
|
|
49
|
+
|
|
50
|
+
// src/utils/token-hashing.ts
|
|
51
|
+
var import_utils = require("@scure/btc-signer/utils");
|
|
52
|
+
var import_spark_sdk = require("@buildonspark/spark-sdk");
|
|
53
|
+
function hashFreezeTokensPayload(payload) {
|
|
54
|
+
if (!payload) {
|
|
55
|
+
throw new import_spark_sdk.ValidationError("Freeze tokens payload cannot be nil", {
|
|
56
|
+
field: "payload",
|
|
57
|
+
value: payload,
|
|
58
|
+
expected: "valid freeze tokens payload"
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
let allHashes = [];
|
|
62
|
+
const versionHashObj = import_utils.sha256.create();
|
|
63
|
+
const versionBytes = new Uint8Array(4);
|
|
64
|
+
new DataView(versionBytes.buffer).setUint32(
|
|
65
|
+
0,
|
|
66
|
+
payload.version,
|
|
67
|
+
false
|
|
68
|
+
// false for big-endian
|
|
69
|
+
);
|
|
70
|
+
versionHashObj.update(versionBytes);
|
|
71
|
+
allHashes.push(versionHashObj.digest());
|
|
72
|
+
const ownerPubKeyHash = import_utils.sha256.create();
|
|
73
|
+
if (payload.ownerPublicKey) {
|
|
74
|
+
ownerPubKeyHash.update(payload.ownerPublicKey);
|
|
75
|
+
}
|
|
76
|
+
allHashes.push(ownerPubKeyHash.digest());
|
|
77
|
+
const tokenIdentifierHash = import_utils.sha256.create();
|
|
78
|
+
if (payload.tokenIdentifier) {
|
|
79
|
+
tokenIdentifierHash.update(payload.tokenIdentifier);
|
|
80
|
+
}
|
|
81
|
+
allHashes.push(tokenIdentifierHash.digest());
|
|
82
|
+
const shouldUnfreezeHash = import_utils.sha256.create();
|
|
83
|
+
shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
|
|
84
|
+
allHashes.push(shouldUnfreezeHash.digest());
|
|
85
|
+
const timestampHash = import_utils.sha256.create();
|
|
86
|
+
if (payload.issuerProvidedTimestamp) {
|
|
87
|
+
const timestampBytes = new Uint8Array(8);
|
|
88
|
+
new DataView(timestampBytes.buffer).setBigUint64(
|
|
89
|
+
0,
|
|
90
|
+
BigInt(payload.issuerProvidedTimestamp),
|
|
91
|
+
true
|
|
92
|
+
// true for little-endian
|
|
93
|
+
);
|
|
94
|
+
timestampHash.update(timestampBytes);
|
|
95
|
+
}
|
|
96
|
+
allHashes.push(timestampHash.digest());
|
|
97
|
+
const operatorPubKeyHash = import_utils.sha256.create();
|
|
98
|
+
if (payload.operatorIdentityPublicKey) {
|
|
99
|
+
operatorPubKeyHash.update(payload.operatorIdentityPublicKey);
|
|
100
|
+
}
|
|
101
|
+
allHashes.push(operatorPubKeyHash.digest());
|
|
102
|
+
const finalHash = import_utils.sha256.create();
|
|
103
|
+
for (const hash of allHashes) {
|
|
104
|
+
finalHash.update(hash);
|
|
105
|
+
}
|
|
106
|
+
return finalHash.digest();
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// src/services/freeze.ts
|
|
110
|
+
var TokenFreezeService = class {
|
|
111
|
+
config;
|
|
112
|
+
connectionManager;
|
|
113
|
+
constructor(config, connectionManager) {
|
|
114
|
+
this.config = config;
|
|
115
|
+
this.connectionManager = connectionManager;
|
|
116
|
+
}
|
|
117
|
+
async freezeTokens({
|
|
118
|
+
ownerPublicKey,
|
|
119
|
+
tokenIdentifier
|
|
120
|
+
}) {
|
|
121
|
+
return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
|
|
122
|
+
}
|
|
123
|
+
async unfreezeTokens({
|
|
124
|
+
ownerPublicKey,
|
|
125
|
+
tokenIdentifier
|
|
126
|
+
}) {
|
|
127
|
+
return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
|
|
128
|
+
}
|
|
129
|
+
async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
|
|
130
|
+
const signingOperators = this.config.getSigningOperators();
|
|
131
|
+
const issuerProvidedTimestamp = Date.now();
|
|
132
|
+
const freezeResponses = await Promise.allSettled(
|
|
133
|
+
Object.entries(signingOperators).map(async ([identifier, operator]) => {
|
|
134
|
+
const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
|
|
135
|
+
const freezeTokensPayload = {
|
|
136
|
+
version: 1,
|
|
137
|
+
ownerPublicKey,
|
|
138
|
+
tokenIdentifier,
|
|
139
|
+
shouldUnfreeze,
|
|
140
|
+
issuerProvidedTimestamp,
|
|
141
|
+
operatorIdentityPublicKey: (0, import_utils2.hexToBytes)(operator.identityPublicKey)
|
|
142
|
+
};
|
|
143
|
+
const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
|
|
144
|
+
const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
|
|
145
|
+
try {
|
|
146
|
+
const response = await sparkTokenClient.freeze_tokens({
|
|
147
|
+
freezeTokensPayload,
|
|
148
|
+
issuerSignature
|
|
149
|
+
});
|
|
150
|
+
return {
|
|
151
|
+
identifier,
|
|
152
|
+
response
|
|
153
|
+
};
|
|
154
|
+
} catch (error) {
|
|
155
|
+
throw new import_spark_sdk2.NetworkError(
|
|
156
|
+
`Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
|
|
157
|
+
{
|
|
158
|
+
operation: "freeze_tokens",
|
|
159
|
+
errorCount: 1,
|
|
160
|
+
errors: error instanceof Error ? error.message : String(error)
|
|
161
|
+
},
|
|
162
|
+
error instanceof Error ? error : void 0
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
})
|
|
166
|
+
);
|
|
167
|
+
const successfulResponses = (0, import_spark_sdk2.collectResponses)(freezeResponses);
|
|
168
|
+
return successfulResponses[0].response;
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
|
|
172
|
+
// src/services/token-transactions.ts
|
|
173
|
+
var import_spark_sdk3 = require("@buildonspark/spark-sdk");
|
|
174
|
+
var import_utils3 = require("@noble/curves/utils");
|
|
175
|
+
var IssuerTokenTransactionService = class extends import_spark_sdk3.TokenTransactionService {
|
|
176
|
+
constructor(config, connectionManager) {
|
|
177
|
+
super(config, connectionManager);
|
|
178
|
+
}
|
|
179
|
+
async constructMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
|
|
180
|
+
return {
|
|
181
|
+
version: 2,
|
|
182
|
+
network: this.config.getNetworkProto(),
|
|
183
|
+
tokenInputs: {
|
|
184
|
+
$case: "mintInput",
|
|
185
|
+
mintInput: {
|
|
186
|
+
issuerPublicKey: issuerTokenPublicKey,
|
|
187
|
+
tokenIdentifier: rawTokenIdentifierBytes
|
|
188
|
+
}
|
|
189
|
+
},
|
|
190
|
+
tokenOutputs: [
|
|
191
|
+
{
|
|
192
|
+
ownerPublicKey: issuerTokenPublicKey,
|
|
193
|
+
tokenIdentifier: rawTokenIdentifierBytes,
|
|
194
|
+
tokenAmount: (0, import_utils3.numberToBytesBE)(tokenAmount, 16)
|
|
195
|
+
}
|
|
196
|
+
],
|
|
197
|
+
clientCreatedTimestamp: /* @__PURE__ */ new Date(),
|
|
198
|
+
sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
|
|
199
|
+
expiryTime: void 0,
|
|
200
|
+
invoiceAttachments: []
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
async constructCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable) {
|
|
204
|
+
return {
|
|
205
|
+
version: 2,
|
|
206
|
+
network: this.config.getNetworkProto(),
|
|
207
|
+
tokenInputs: {
|
|
208
|
+
$case: "createInput",
|
|
209
|
+
createInput: {
|
|
210
|
+
issuerPublicKey: tokenPublicKey,
|
|
211
|
+
tokenName,
|
|
212
|
+
tokenTicker,
|
|
213
|
+
decimals,
|
|
214
|
+
maxSupply: (0, import_utils3.numberToBytesBE)(maxSupply, 16),
|
|
215
|
+
isFreezable
|
|
216
|
+
}
|
|
217
|
+
},
|
|
218
|
+
tokenOutputs: [],
|
|
219
|
+
clientCreatedTimestamp: /* @__PURE__ */ new Date(),
|
|
220
|
+
sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
|
|
221
|
+
expiryTime: void 0,
|
|
222
|
+
invoiceAttachments: []
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
// src/utils/create-validation.ts
|
|
228
|
+
var import_spark_sdk4 = require("@buildonspark/spark-sdk");
|
|
229
|
+
function isNfcNormalized(value) {
|
|
230
|
+
return value.normalize("NFC") === value;
|
|
231
|
+
}
|
|
232
|
+
var MIN_NAME_SIZE = 3;
|
|
233
|
+
var MAX_NAME_SIZE = 20;
|
|
234
|
+
var MIN_SYMBOL_SIZE = 3;
|
|
235
|
+
var MAX_SYMBOL_SIZE = 6;
|
|
236
|
+
var MAX_DECIMALS = 255;
|
|
237
|
+
var MAXIMUM_MAX_SUPPLY = (1n << 128n) - 1n;
|
|
238
|
+
function validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply) {
|
|
239
|
+
if (!isNfcNormalized(tokenName)) {
|
|
240
|
+
throw new import_spark_sdk4.ValidationError("Token name must be NFC-normalised UTF-8", {
|
|
241
|
+
field: "tokenName",
|
|
242
|
+
value: tokenName,
|
|
243
|
+
expected: "NFC normalised string"
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
if (!isNfcNormalized(tokenTicker)) {
|
|
247
|
+
throw new import_spark_sdk4.ValidationError("Token ticker must be NFC-normalised UTF-8", {
|
|
248
|
+
field: "tokenTicker",
|
|
249
|
+
value: tokenTicker,
|
|
250
|
+
expected: "NFC normalised string"
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
const nameBytes = import_buffer.Buffer.from(tokenName, "utf-8").length;
|
|
254
|
+
if (nameBytes < MIN_NAME_SIZE || nameBytes > MAX_NAME_SIZE) {
|
|
255
|
+
throw new import_spark_sdk4.ValidationError(
|
|
256
|
+
`Token name must be between ${MIN_NAME_SIZE} and ${MAX_NAME_SIZE} bytes`,
|
|
257
|
+
{
|
|
258
|
+
field: "tokenName",
|
|
259
|
+
value: tokenName,
|
|
260
|
+
actualLength: nameBytes,
|
|
261
|
+
expected: `>=${MIN_NAME_SIZE} and <=${MAX_NAME_SIZE}`
|
|
262
|
+
}
|
|
263
|
+
);
|
|
264
|
+
}
|
|
265
|
+
const tickerBytes = import_buffer.Buffer.from(tokenTicker, "utf-8").length;
|
|
266
|
+
if (tickerBytes < MIN_SYMBOL_SIZE || tickerBytes > MAX_SYMBOL_SIZE) {
|
|
267
|
+
throw new import_spark_sdk4.ValidationError(
|
|
268
|
+
`Token ticker must be between ${MIN_SYMBOL_SIZE} and ${MAX_SYMBOL_SIZE} bytes`,
|
|
269
|
+
{
|
|
270
|
+
field: "tokenTicker",
|
|
271
|
+
value: tokenTicker,
|
|
272
|
+
actualLength: tickerBytes,
|
|
273
|
+
expected: `>=${MIN_SYMBOL_SIZE} and <=${MAX_SYMBOL_SIZE}`
|
|
274
|
+
}
|
|
275
|
+
);
|
|
276
|
+
}
|
|
277
|
+
if (!Number.isSafeInteger(decimals) || decimals < 0 || decimals > MAX_DECIMALS) {
|
|
278
|
+
throw new import_spark_sdk4.ValidationError(
|
|
279
|
+
`Decimals must be an integer between 0 and ${MAX_DECIMALS}`,
|
|
280
|
+
{
|
|
281
|
+
field: "decimals",
|
|
282
|
+
value: decimals,
|
|
283
|
+
expected: `>=0 and <=${MAX_DECIMALS}`
|
|
284
|
+
}
|
|
285
|
+
);
|
|
286
|
+
}
|
|
287
|
+
if (maxSupply < 0n || maxSupply > MAXIMUM_MAX_SUPPLY) {
|
|
288
|
+
throw new import_spark_sdk4.ValidationError(`maxSupply must be between 0 and 2^128-1`, {
|
|
289
|
+
field: "maxSupply",
|
|
290
|
+
value: maxSupply.toString(),
|
|
291
|
+
expected: `>=0 and <=${MAXIMUM_MAX_SUPPLY.toString()}`
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// src/issuer-wallet/issuer-spark-wallet.ts
|
|
297
|
+
var BURN_ADDRESS = "02".repeat(33);
|
|
298
|
+
var IssuerSparkWallet = class extends import_spark_sdk5.SparkWallet {
|
|
299
|
+
issuerTokenTransactionService;
|
|
300
|
+
tokenFreezeService;
|
|
301
|
+
tracerId = "issuer-sdk";
|
|
302
|
+
/**
|
|
303
|
+
* Initializes a new IssuerSparkWallet instance.
|
|
304
|
+
* Inherits the generic static initialize from the base class.
|
|
305
|
+
*/
|
|
306
|
+
constructor(configOptions, signer) {
|
|
307
|
+
super(configOptions, signer);
|
|
308
|
+
this.issuerTokenTransactionService = new IssuerTokenTransactionService(
|
|
309
|
+
this.config,
|
|
310
|
+
this.connectionManager
|
|
311
|
+
);
|
|
312
|
+
this.tokenFreezeService = new TokenFreezeService(
|
|
313
|
+
this.config,
|
|
314
|
+
this.connectionManager
|
|
315
|
+
);
|
|
316
|
+
this.wrapIssuerSparkWalletMethodsWithTracing();
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Gets the token balance for the issuer's token.
|
|
320
|
+
* @returns An object containing the token balance as a bigint
|
|
321
|
+
*/
|
|
322
|
+
async getIssuerTokenBalance() {
|
|
323
|
+
const publicKey = await super.getIdentityPublicKey();
|
|
324
|
+
const balanceObj = await this.getBalance();
|
|
325
|
+
const issuerBalance = [...balanceObj.tokenBalances.entries()].find(
|
|
326
|
+
([, info]) => info.tokenMetadata.tokenPublicKey === publicKey
|
|
327
|
+
);
|
|
328
|
+
if (!balanceObj.tokenBalances || issuerBalance === void 0) {
|
|
329
|
+
return {
|
|
330
|
+
tokenIdentifier: void 0,
|
|
331
|
+
balance: 0n
|
|
332
|
+
};
|
|
333
|
+
}
|
|
334
|
+
return {
|
|
335
|
+
tokenIdentifier: issuerBalance[0] ?? void 0,
|
|
336
|
+
balance: issuerBalance[1].balance
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Retrieves information about the issuer's token.
|
|
341
|
+
* @returns An object containing token information including public key, name, symbol, decimals, max supply, and freeze status
|
|
342
|
+
* @throws {NetworkError} If the token metadata cannot be retrieved
|
|
343
|
+
*/
|
|
344
|
+
async getIssuerTokenMetadata() {
|
|
345
|
+
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
346
|
+
const tokenMetadata = this.tokenMetadata;
|
|
347
|
+
const cachedIssuerTokenMetadata = [...tokenMetadata.entries()].find(
|
|
348
|
+
([, metadata]) => (0, import_utils4.bytesToHex)(metadata.issuerPublicKey) === issuerPublicKey
|
|
349
|
+
);
|
|
350
|
+
if (cachedIssuerTokenMetadata !== void 0) {
|
|
351
|
+
const metadata = cachedIssuerTokenMetadata[1];
|
|
352
|
+
return {
|
|
353
|
+
tokenPublicKey: (0, import_utils4.bytesToHex)(metadata.issuerPublicKey),
|
|
354
|
+
rawTokenIdentifier: metadata.tokenIdentifier,
|
|
355
|
+
tokenName: metadata.tokenName,
|
|
356
|
+
tokenTicker: metadata.tokenTicker,
|
|
357
|
+
decimals: metadata.decimals,
|
|
358
|
+
maxSupply: (0, import_utils4.bytesToNumberBE)(metadata.maxSupply),
|
|
359
|
+
isFreezable: metadata.isFreezable
|
|
360
|
+
};
|
|
361
|
+
}
|
|
362
|
+
const sparkTokenClient = await this.connectionManager.createSparkTokenClient(
|
|
363
|
+
this.config.getCoordinatorAddress()
|
|
364
|
+
);
|
|
365
|
+
try {
|
|
366
|
+
const response = await sparkTokenClient.query_token_metadata({
|
|
367
|
+
issuerPublicKeys: Array.of((0, import_utils4.hexToBytes)(issuerPublicKey))
|
|
368
|
+
});
|
|
369
|
+
if (response.tokenMetadata.length === 0) {
|
|
370
|
+
throw new import_spark_sdk5.ValidationError(
|
|
371
|
+
"Token metadata not found - If a token has not yet been created, please create it first. Try again in a few seconds.",
|
|
372
|
+
{
|
|
373
|
+
field: "tokenMetadata",
|
|
374
|
+
value: response.tokenMetadata,
|
|
375
|
+
expected: "non-empty array",
|
|
376
|
+
actualLength: response.tokenMetadata.length,
|
|
377
|
+
expectedLength: 1
|
|
378
|
+
}
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
const metadata = response.tokenMetadata[0];
|
|
382
|
+
const tokenIdentifier = (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
|
|
383
|
+
tokenIdentifier: metadata.tokenIdentifier,
|
|
384
|
+
network: this.config.getNetworkType()
|
|
385
|
+
});
|
|
386
|
+
this.tokenMetadata.set(tokenIdentifier, metadata);
|
|
387
|
+
return {
|
|
388
|
+
tokenPublicKey: (0, import_utils4.bytesToHex)(metadata.issuerPublicKey),
|
|
389
|
+
rawTokenIdentifier: metadata.tokenIdentifier,
|
|
390
|
+
tokenName: metadata.tokenName,
|
|
391
|
+
tokenTicker: metadata.tokenTicker,
|
|
392
|
+
decimals: metadata.decimals,
|
|
393
|
+
maxSupply: (0, import_utils4.bytesToNumberBE)(metadata.maxSupply),
|
|
394
|
+
isFreezable: metadata.isFreezable
|
|
395
|
+
};
|
|
396
|
+
} catch (error) {
|
|
397
|
+
throw new import_spark_sdk5.NetworkError("Failed to fetch token metadata", {
|
|
398
|
+
errorCount: 1,
|
|
399
|
+
errors: error instanceof Error ? error.message : String(error)
|
|
400
|
+
});
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
/**
|
|
404
|
+
* Retrieves the bech32m encoded token identifier for the issuer's token.
|
|
405
|
+
* @returns The bech32m encoded token identifier for the issuer's token
|
|
406
|
+
* @throws {NetworkError} If the token identifier cannot be retrieved
|
|
407
|
+
*/
|
|
408
|
+
async getIssuerTokenIdentifier() {
|
|
409
|
+
const tokenMetadata = await this.getIssuerTokenMetadata();
|
|
410
|
+
return (0, import_spark_sdk5.encodeBech32mTokenIdentifier)({
|
|
411
|
+
tokenIdentifier: tokenMetadata.rawTokenIdentifier,
|
|
412
|
+
network: this.config.getNetworkType()
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Create a new token on Spark.
|
|
417
|
+
*
|
|
418
|
+
* @param params - Object containing token creation parameters.
|
|
419
|
+
* @param params.tokenName - The name of the token.
|
|
420
|
+
* @param params.tokenTicker - The ticker symbol for the token.
|
|
421
|
+
* @param params.decimals - The number of decimal places for the token.
|
|
422
|
+
* @param params.isFreezable - Whether the token can be frozen.
|
|
423
|
+
* @param [params.maxSupply=0n] - (Optional) The maximum supply of the token. Defaults to <code>0n</code>.
|
|
424
|
+
*
|
|
425
|
+
* @returns The transaction ID of the announcement.
|
|
426
|
+
*
|
|
427
|
+
* @throws {ValidationError} If `decimals` is not a safe integer or other validation fails.
|
|
428
|
+
* @throws {NetworkError} If the announcement transaction cannot be broadcast.
|
|
429
|
+
*/
|
|
430
|
+
async createToken({
|
|
431
|
+
tokenName,
|
|
432
|
+
tokenTicker,
|
|
433
|
+
decimals,
|
|
434
|
+
isFreezable,
|
|
435
|
+
maxSupply = 0n
|
|
436
|
+
}) {
|
|
437
|
+
validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply);
|
|
438
|
+
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
439
|
+
const tokenTransaction = await this.issuerTokenTransactionService.constructCreateTokenTransaction(
|
|
440
|
+
(0, import_utils4.hexToBytes)(issuerPublicKey),
|
|
441
|
+
tokenName,
|
|
442
|
+
tokenTicker,
|
|
443
|
+
decimals,
|
|
444
|
+
maxSupply,
|
|
445
|
+
isFreezable
|
|
446
|
+
);
|
|
447
|
+
return await this.issuerTokenTransactionService.broadcastTokenTransaction(
|
|
448
|
+
tokenTransaction
|
|
449
|
+
);
|
|
450
|
+
}
|
|
451
|
+
/**
|
|
452
|
+
* Mints new tokens
|
|
453
|
+
* @param tokenAmount - The amount of tokens to mint
|
|
454
|
+
* @returns The transaction ID of the mint operation
|
|
455
|
+
*/
|
|
456
|
+
async mintTokens(tokenAmount) {
|
|
457
|
+
const issuerTokenPublicKey = await super.getIdentityPublicKey();
|
|
458
|
+
const issuerTokenPublicKeyBytes = (0, import_utils4.hexToBytes)(issuerTokenPublicKey);
|
|
459
|
+
const tokenMetadata = await this.getIssuerTokenMetadata();
|
|
460
|
+
const rawTokenIdentifier = tokenMetadata.rawTokenIdentifier;
|
|
461
|
+
const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(
|
|
462
|
+
rawTokenIdentifier,
|
|
463
|
+
issuerTokenPublicKeyBytes,
|
|
464
|
+
tokenAmount
|
|
465
|
+
);
|
|
466
|
+
return await this.issuerTokenTransactionService.broadcastTokenTransaction(
|
|
467
|
+
tokenTransaction
|
|
468
|
+
);
|
|
469
|
+
}
|
|
470
|
+
/**
|
|
471
|
+
* Burns issuer's tokens
|
|
472
|
+
* @param tokenAmount - The amount of tokens to burn
|
|
473
|
+
* @param selectedOutputs - Optional array of outputs to use for the burn operation
|
|
474
|
+
* @returns The transaction ID of the burn operation
|
|
475
|
+
*/
|
|
476
|
+
async burnTokens(tokenAmount, selectedOutputs) {
|
|
477
|
+
const burnAddress = (0, import_spark_sdk5.encodeSparkAddress)({
|
|
478
|
+
identityPublicKey: BURN_ADDRESS,
|
|
479
|
+
network: this.config.getNetworkType()
|
|
480
|
+
});
|
|
481
|
+
const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
|
|
482
|
+
return await this.transferTokens({
|
|
483
|
+
tokenIdentifier: issuerTokenIdentifier,
|
|
484
|
+
tokenAmount,
|
|
485
|
+
receiverSparkAddress: burnAddress,
|
|
486
|
+
selectedOutputs
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
/**
|
|
490
|
+
* Freezes tokens associated with a specific Spark address.
|
|
491
|
+
* @param sparkAddress - The Spark address whose tokens should be frozen
|
|
492
|
+
* @returns An object containing the IDs of impacted outputs and the total amount of frozen tokens
|
|
493
|
+
*/
|
|
494
|
+
async freezeTokens(sparkAddress) {
|
|
495
|
+
await this.syncTokenOutputs();
|
|
496
|
+
const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
|
|
497
|
+
sparkAddress,
|
|
498
|
+
this.config.getNetworkType()
|
|
499
|
+
);
|
|
500
|
+
const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
|
|
501
|
+
const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
|
|
502
|
+
issuerTokenIdentifier,
|
|
503
|
+
this.config.getNetworkType()
|
|
504
|
+
).tokenIdentifier;
|
|
505
|
+
const response = await this.tokenFreezeService.freezeTokens({
|
|
506
|
+
ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
507
|
+
tokenIdentifier: rawTokenIdentifier
|
|
508
|
+
});
|
|
509
|
+
const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
|
|
510
|
+
return {
|
|
511
|
+
impactedOutputIds: response.impactedOutputIds,
|
|
512
|
+
impactedTokenAmount: tokenAmount
|
|
513
|
+
};
|
|
514
|
+
}
|
|
515
|
+
/**
|
|
516
|
+
* Unfreezes previously frozen tokens associated with a specific Spark address.
|
|
517
|
+
* @param sparkAddress - The Spark address whose tokens should be unfrozen
|
|
518
|
+
* @returns An object containing the IDs of impacted outputs and the total amount of unfrozen tokens
|
|
519
|
+
*/
|
|
520
|
+
async unfreezeTokens(sparkAddress) {
|
|
521
|
+
await this.syncTokenOutputs();
|
|
522
|
+
const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
|
|
523
|
+
sparkAddress,
|
|
524
|
+
this.config.getNetworkType()
|
|
525
|
+
);
|
|
526
|
+
const issuerTokenIdentifier = await this.getIssuerTokenIdentifier();
|
|
527
|
+
const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
|
|
528
|
+
issuerTokenIdentifier,
|
|
529
|
+
this.config.getNetworkType()
|
|
530
|
+
).tokenIdentifier;
|
|
531
|
+
const response = await this.tokenFreezeService.unfreezeTokens({
|
|
532
|
+
ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
533
|
+
tokenIdentifier: rawTokenIdentifier
|
|
534
|
+
});
|
|
535
|
+
const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
|
|
536
|
+
return {
|
|
537
|
+
impactedOutputIds: response.impactedOutputIds,
|
|
538
|
+
impactedTokenAmount: tokenAmount
|
|
539
|
+
};
|
|
540
|
+
}
|
|
541
|
+
/**
|
|
542
|
+
* Retrieves the distribution information for the issuer's token.
|
|
543
|
+
* @throws {NotImplementedError} This feature is not yet supported
|
|
544
|
+
*/
|
|
545
|
+
async getIssuerTokenDistribution() {
|
|
546
|
+
throw new import_spark_sdk5.NotImplementedError("Token distribution is not yet supported");
|
|
547
|
+
}
|
|
548
|
+
getTraceName(methodName) {
|
|
549
|
+
return `IssuerSparkWallet.${methodName}`;
|
|
550
|
+
}
|
|
551
|
+
wrapPublicIssuerSparkWalletMethodWithOtelSpan(methodName) {
|
|
552
|
+
const original = this[methodName];
|
|
553
|
+
if (typeof original !== "function") {
|
|
554
|
+
throw new Error(
|
|
555
|
+
`Method ${methodName} is not a function on IssuerSparkWallet.`
|
|
556
|
+
);
|
|
557
|
+
}
|
|
558
|
+
const wrapped = this.wrapWithOtelSpan(
|
|
559
|
+
this.getTraceName(methodName),
|
|
560
|
+
original.bind(this)
|
|
561
|
+
);
|
|
562
|
+
this[methodName] = wrapped;
|
|
563
|
+
}
|
|
564
|
+
wrapIssuerSparkWalletMethodsWithTracing() {
|
|
565
|
+
const methods = [
|
|
566
|
+
"getIssuerTokenBalance",
|
|
567
|
+
"getIssuerTokenMetadata",
|
|
568
|
+
"getIssuerTokenIdentifier",
|
|
569
|
+
"createToken",
|
|
570
|
+
"mintTokens",
|
|
571
|
+
"burnTokens",
|
|
572
|
+
"freezeTokens",
|
|
573
|
+
"unfreezeTokens",
|
|
574
|
+
"getIssuerTokenDistribution"
|
|
575
|
+
];
|
|
576
|
+
methods.forEach(
|
|
577
|
+
(m) => this.wrapPublicIssuerSparkWalletMethodWithOtelSpan(m)
|
|
578
|
+
);
|
|
579
|
+
}
|
|
580
|
+
};
|
|
581
|
+
|
|
582
|
+
// src/issuer-wallet/issuer-spark-wallet.react-native.ts
|
|
583
|
+
var IssuerSparkWalletReactNative = class extends IssuerSparkWallet {
|
|
584
|
+
};
|
|
585
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
586
|
+
0 && (module.exports = {
|
|
587
|
+
IssuerSparkWallet
|
|
588
|
+
});
|