@buildonspark/issuer-sdk 0.1.6 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.browser.d.ts +244 -238
- package/dist/index.browser.js +629 -917
- package/dist/index.node.cjs +654 -935
- package/dist/index.node.d.cts +248 -239
- package/dist/index.node.d.ts +248 -239
- package/dist/index.node.js +635 -914
- package/dist/native/index.react-native.cjs +684 -925
- package/dist/native/index.react-native.d.cts +245 -239
- package/dist/native/index.react-native.d.ts +245 -239
- package/dist/native/index.react-native.js +665 -913
- package/dist/proto/spark.cjs +6 -37
- package/dist/proto/spark.d.cts +1 -4
- package/dist/proto/spark.d.ts +1 -4
- package/dist/proto/spark.js +2 -3
- package/dist/proto/spark_token.cjs +6 -37
- package/dist/proto/spark_token.d.cts +1 -1
- package/dist/proto/spark_token.d.ts +1 -1
- package/dist/proto/spark_token.js +2 -3
- package/dist/tests/test-utils.cjs +6 -37
- package/dist/tests/test-utils.d.cts +1 -1
- package/dist/tests/test-utils.d.ts +1 -1
- package/dist/tests/test-utils.js +2 -3
- package/dist/types/index.cjs +6 -37
- package/dist/types/index.d.cts +1 -1
- package/dist/types/index.d.ts +1 -1
- package/dist/types/index.js +2 -3
- package/package.json +4 -4
- package/dist/chunk-7B4B24XF.js +0 -17
package/dist/index.node.cjs
CHANGED
|
@@ -1,956 +1,675 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __export = (target, all) => {
|
|
7
|
-
for (var name in all)
|
|
8
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
-
};
|
|
10
|
-
var __copyProps = (to, from, except, desc) => {
|
|
11
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
-
for (let key of __getOwnPropNames(from))
|
|
13
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
-
}
|
|
16
|
-
return to;
|
|
17
|
-
};
|
|
18
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
-
|
|
20
|
-
// src/index.node.ts
|
|
21
|
-
var index_node_exports = {};
|
|
22
|
-
__export(index_node_exports, {
|
|
23
|
-
DefaultSparkSigner: () => import_spark_sdk8.DefaultSparkSigner,
|
|
24
|
-
IssuerSparkWallet: () => IssuerSparkWalletNodeJS,
|
|
25
|
-
UnsafeStatelessSparkSigner: () => import_spark_sdk8.UnsafeStatelessSparkSigner,
|
|
26
|
-
WalletConfig: () => import_spark_sdk9.WalletConfig
|
|
27
|
-
});
|
|
28
|
-
module.exports = __toCommonJS(index_node_exports);
|
|
1
|
+
let _buildonspark_spark_sdk = require("@buildonspark/spark-sdk");
|
|
2
|
+
let _noble_curves_utils = require("@noble/curves/utils");
|
|
3
|
+
let _scure_btc_signer_utils = require("@scure/btc-signer/utils");
|
|
29
4
|
|
|
30
|
-
|
|
31
|
-
var import_buffer = require("buffer");
|
|
32
|
-
if (typeof globalThis.Buffer === "undefined") {
|
|
33
|
-
globalThis.Buffer = import_buffer.Buffer;
|
|
34
|
-
}
|
|
35
|
-
if (typeof window !== "undefined") {
|
|
36
|
-
if (typeof window.global === "undefined") {
|
|
37
|
-
window.global = window;
|
|
38
|
-
}
|
|
39
|
-
if (typeof window.globalThis === "undefined") {
|
|
40
|
-
window.globalThis = window;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
// src/issuer-wallet/issuer-spark-wallet.ts
|
|
45
|
-
var import_spark_sdk5 = require("@buildonspark/spark-sdk");
|
|
46
|
-
var import_utils4 = require("@noble/curves/utils");
|
|
47
|
-
|
|
48
|
-
// src/services/freeze.ts
|
|
49
|
-
var import_spark_sdk2 = require("@buildonspark/spark-sdk");
|
|
50
|
-
var import_utils2 = require("@noble/curves/utils");
|
|
51
|
-
|
|
52
|
-
// src/utils/token-hashing.ts
|
|
53
|
-
var import_utils = require("@scure/btc-signer/utils");
|
|
54
|
-
var import_spark_sdk = require("@buildonspark/spark-sdk");
|
|
5
|
+
//#region src/utils/token-hashing.ts
|
|
55
6
|
function hashFreezeTokensPayload(payload) {
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
const timestampBytes = new Uint8Array(8);
|
|
90
|
-
new DataView(timestampBytes.buffer).setBigUint64(
|
|
91
|
-
0,
|
|
92
|
-
BigInt(payload.issuerProvidedTimestamp),
|
|
93
|
-
true
|
|
94
|
-
// true for little-endian
|
|
95
|
-
);
|
|
96
|
-
timestampHash.update(timestampBytes);
|
|
97
|
-
}
|
|
98
|
-
allHashes.push(timestampHash.digest());
|
|
99
|
-
const operatorPubKeyHash = import_utils.sha256.create();
|
|
100
|
-
if (payload.operatorIdentityPublicKey) {
|
|
101
|
-
operatorPubKeyHash.update(payload.operatorIdentityPublicKey);
|
|
102
|
-
}
|
|
103
|
-
allHashes.push(operatorPubKeyHash.digest());
|
|
104
|
-
const finalHash = import_utils.sha256.create();
|
|
105
|
-
for (const hash of allHashes) {
|
|
106
|
-
finalHash.update(hash);
|
|
107
|
-
}
|
|
108
|
-
return finalHash.digest();
|
|
7
|
+
if (!payload) throw new _buildonspark_spark_sdk.SparkValidationError("Freeze tokens payload cannot be nil", {
|
|
8
|
+
field: "payload",
|
|
9
|
+
value: payload,
|
|
10
|
+
expected: "valid freeze tokens payload"
|
|
11
|
+
});
|
|
12
|
+
let allHashes = [];
|
|
13
|
+
const versionHashObj = _scure_btc_signer_utils.sha256.create();
|
|
14
|
+
const versionBytes = new Uint8Array(4);
|
|
15
|
+
new DataView(versionBytes.buffer).setUint32(0, payload.version, false);
|
|
16
|
+
versionHashObj.update(versionBytes);
|
|
17
|
+
allHashes.push(versionHashObj.digest());
|
|
18
|
+
const ownerPubKeyHash = _scure_btc_signer_utils.sha256.create();
|
|
19
|
+
if (payload.ownerPublicKey) ownerPubKeyHash.update(payload.ownerPublicKey);
|
|
20
|
+
allHashes.push(ownerPubKeyHash.digest());
|
|
21
|
+
const tokenIdentifierHash = _scure_btc_signer_utils.sha256.create();
|
|
22
|
+
if (payload.tokenIdentifier) tokenIdentifierHash.update(payload.tokenIdentifier);
|
|
23
|
+
allHashes.push(tokenIdentifierHash.digest());
|
|
24
|
+
const shouldUnfreezeHash = _scure_btc_signer_utils.sha256.create();
|
|
25
|
+
shouldUnfreezeHash.update(new Uint8Array([payload.shouldUnfreeze ? 1 : 0]));
|
|
26
|
+
allHashes.push(shouldUnfreezeHash.digest());
|
|
27
|
+
const timestampHash = _scure_btc_signer_utils.sha256.create();
|
|
28
|
+
if (payload.issuerProvidedTimestamp) {
|
|
29
|
+
const timestampBytes = new Uint8Array(8);
|
|
30
|
+
new DataView(timestampBytes.buffer).setBigUint64(0, BigInt(payload.issuerProvidedTimestamp), true);
|
|
31
|
+
timestampHash.update(timestampBytes);
|
|
32
|
+
}
|
|
33
|
+
allHashes.push(timestampHash.digest());
|
|
34
|
+
const operatorPubKeyHash = _scure_btc_signer_utils.sha256.create();
|
|
35
|
+
if (payload.operatorIdentityPublicKey) operatorPubKeyHash.update(payload.operatorIdentityPublicKey);
|
|
36
|
+
allHashes.push(operatorPubKeyHash.digest());
|
|
37
|
+
const finalHash = _scure_btc_signer_utils.sha256.create();
|
|
38
|
+
for (const hash of allHashes) finalHash.update(hash);
|
|
39
|
+
return finalHash.digest();
|
|
109
40
|
}
|
|
110
41
|
|
|
111
|
-
|
|
42
|
+
//#endregion
|
|
43
|
+
//#region src/services/freeze.ts
|
|
112
44
|
var TokenFreezeService = class {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
} catch (error) {
|
|
157
|
-
throw new import_spark_sdk2.SparkRequestError(
|
|
158
|
-
`Failed to send a freeze/unfreeze operation to operator: ${operator.address}`,
|
|
159
|
-
{ operation: "freeze_tokens", error }
|
|
160
|
-
);
|
|
161
|
-
}
|
|
162
|
-
})
|
|
163
|
-
);
|
|
164
|
-
const successfulResponses = (0, import_spark_sdk2.collectResponses)(freezeResponses);
|
|
165
|
-
return successfulResponses[0].response;
|
|
166
|
-
}
|
|
45
|
+
config;
|
|
46
|
+
connectionManager;
|
|
47
|
+
constructor(config, connectionManager) {
|
|
48
|
+
this.config = config;
|
|
49
|
+
this.connectionManager = connectionManager;
|
|
50
|
+
}
|
|
51
|
+
async freezeTokens({ ownerPublicKey, tokenIdentifier }) {
|
|
52
|
+
return this.freezeOperation(ownerPublicKey, false, tokenIdentifier);
|
|
53
|
+
}
|
|
54
|
+
async unfreezeTokens({ ownerPublicKey, tokenIdentifier }) {
|
|
55
|
+
return this.freezeOperation(ownerPublicKey, true, tokenIdentifier);
|
|
56
|
+
}
|
|
57
|
+
async freezeOperation(ownerPublicKey, shouldUnfreeze, tokenIdentifier) {
|
|
58
|
+
const signingOperators = this.config.getSigningOperators();
|
|
59
|
+
const issuerProvidedTimestamp = Date.now();
|
|
60
|
+
return (0, _buildonspark_spark_sdk.collectResponses)(await Promise.allSettled(Object.entries(signingOperators).map(async ([identifier, operator]) => {
|
|
61
|
+
const sparkTokenClient = await this.connectionManager.createSparkTokenClient(operator.address);
|
|
62
|
+
const freezeTokensPayload = {
|
|
63
|
+
version: 1,
|
|
64
|
+
ownerPublicKey,
|
|
65
|
+
tokenIdentifier,
|
|
66
|
+
shouldUnfreeze,
|
|
67
|
+
issuerProvidedTimestamp,
|
|
68
|
+
operatorIdentityPublicKey: (0, _noble_curves_utils.hexToBytes)(operator.identityPublicKey)
|
|
69
|
+
};
|
|
70
|
+
const hashedPayload = hashFreezeTokensPayload(freezeTokensPayload);
|
|
71
|
+
const issuerSignature = await this.config.signer.signMessageWithIdentityKey(hashedPayload);
|
|
72
|
+
try {
|
|
73
|
+
return {
|
|
74
|
+
identifier,
|
|
75
|
+
response: await sparkTokenClient.freeze_tokens({
|
|
76
|
+
freezeTokensPayload,
|
|
77
|
+
issuerSignature
|
|
78
|
+
})
|
|
79
|
+
};
|
|
80
|
+
} catch (error) {
|
|
81
|
+
throw new _buildonspark_spark_sdk.SparkRequestError(`Failed to send a freeze/unfreeze operation to operator: ${operator.address}`, {
|
|
82
|
+
operation: "freeze_tokens",
|
|
83
|
+
error
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
})))[0].response;
|
|
87
|
+
}
|
|
167
88
|
};
|
|
168
89
|
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
var
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
}
|
|
272
|
-
},
|
|
273
|
-
partialTokenOutputs: []
|
|
274
|
-
};
|
|
275
|
-
}
|
|
90
|
+
//#endregion
|
|
91
|
+
//#region src/services/token-transactions.ts
|
|
92
|
+
var IssuerTokenTransactionService = class extends _buildonspark_spark_sdk.TokenTransactionService {
|
|
93
|
+
constructor(config, connectionManager) {
|
|
94
|
+
super(config, connectionManager);
|
|
95
|
+
}
|
|
96
|
+
async constructMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
|
|
97
|
+
return {
|
|
98
|
+
version: 2,
|
|
99
|
+
network: this.config.getNetworkProto(),
|
|
100
|
+
tokenInputs: {
|
|
101
|
+
$case: "mintInput",
|
|
102
|
+
mintInput: {
|
|
103
|
+
issuerPublicKey: issuerTokenPublicKey,
|
|
104
|
+
tokenIdentifier: rawTokenIdentifierBytes
|
|
105
|
+
}
|
|
106
|
+
},
|
|
107
|
+
tokenOutputs: [{
|
|
108
|
+
ownerPublicKey: issuerTokenPublicKey,
|
|
109
|
+
tokenIdentifier: rawTokenIdentifierBytes,
|
|
110
|
+
tokenAmount: (0, _noble_curves_utils.numberToBytesBE)(tokenAmount, 16)
|
|
111
|
+
}],
|
|
112
|
+
clientCreatedTimestamp: /* @__PURE__ */ new Date(),
|
|
113
|
+
sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
|
|
114
|
+
expiryTime: void 0,
|
|
115
|
+
invoiceAttachments: []
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
async constructPartialMintTokenTransaction(rawTokenIdentifierBytes, issuerTokenPublicKey, tokenAmount) {
|
|
119
|
+
return {
|
|
120
|
+
version: 3,
|
|
121
|
+
tokenTransactionMetadata: {
|
|
122
|
+
network: this.config.getNetworkProto(),
|
|
123
|
+
sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
|
|
124
|
+
validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
|
|
125
|
+
clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
|
|
126
|
+
invoiceAttachments: []
|
|
127
|
+
},
|
|
128
|
+
tokenInputs: {
|
|
129
|
+
$case: "mintInput",
|
|
130
|
+
mintInput: {
|
|
131
|
+
issuerPublicKey: issuerTokenPublicKey,
|
|
132
|
+
tokenIdentifier: rawTokenIdentifierBytes
|
|
133
|
+
}
|
|
134
|
+
},
|
|
135
|
+
partialTokenOutputs: [{
|
|
136
|
+
ownerPublicKey: issuerTokenPublicKey,
|
|
137
|
+
tokenIdentifier: rawTokenIdentifierBytes,
|
|
138
|
+
withdrawBondSats: this.config.getExpectedWithdrawBondSats(),
|
|
139
|
+
withdrawRelativeBlockLocktime: this.config.getExpectedWithdrawRelativeBlockLocktime(),
|
|
140
|
+
tokenAmount: (0, _noble_curves_utils.numberToBytesBE)(tokenAmount, 16)
|
|
141
|
+
}]
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
async constructCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
|
|
145
|
+
return {
|
|
146
|
+
version: 2,
|
|
147
|
+
network: this.config.getNetworkProto(),
|
|
148
|
+
tokenInputs: {
|
|
149
|
+
$case: "createInput",
|
|
150
|
+
createInput: {
|
|
151
|
+
issuerPublicKey: tokenPublicKey,
|
|
152
|
+
tokenName,
|
|
153
|
+
tokenTicker,
|
|
154
|
+
decimals,
|
|
155
|
+
maxSupply: (0, _noble_curves_utils.numberToBytesBE)(maxSupply, 16),
|
|
156
|
+
isFreezable,
|
|
157
|
+
extraMetadata
|
|
158
|
+
}
|
|
159
|
+
},
|
|
160
|
+
tokenOutputs: [],
|
|
161
|
+
clientCreatedTimestamp: /* @__PURE__ */ new Date(),
|
|
162
|
+
sparkOperatorIdentityPublicKeys: super.collectOperatorIdentityPublicKeys(),
|
|
163
|
+
expiryTime: void 0,
|
|
164
|
+
invoiceAttachments: []
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
async constructPartialCreateTokenTransaction(tokenPublicKey, tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata) {
|
|
168
|
+
return {
|
|
169
|
+
version: 3,
|
|
170
|
+
tokenTransactionMetadata: {
|
|
171
|
+
network: this.config.getNetworkProto(),
|
|
172
|
+
sparkOperatorIdentityPublicKeys: this.collectOperatorIdentityPublicKeys(),
|
|
173
|
+
validityDurationSeconds: await this.config.getTokenValidityDurationSeconds(),
|
|
174
|
+
clientCreatedTimestamp: this.connectionManager.getCurrentServerTime(),
|
|
175
|
+
invoiceAttachments: []
|
|
176
|
+
},
|
|
177
|
+
tokenInputs: {
|
|
178
|
+
$case: "createInput",
|
|
179
|
+
createInput: {
|
|
180
|
+
issuerPublicKey: tokenPublicKey,
|
|
181
|
+
tokenName,
|
|
182
|
+
tokenTicker,
|
|
183
|
+
decimals,
|
|
184
|
+
maxSupply: (0, _noble_curves_utils.numberToBytesBE)(maxSupply, 16),
|
|
185
|
+
isFreezable,
|
|
186
|
+
extraMetadata
|
|
187
|
+
}
|
|
188
|
+
},
|
|
189
|
+
partialTokenOutputs: []
|
|
190
|
+
};
|
|
191
|
+
}
|
|
276
192
|
};
|
|
277
193
|
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
194
|
+
//#endregion
|
|
195
|
+
//#region src/utils/create-validation.ts
|
|
196
|
+
/**
|
|
197
|
+
* Returns true when the input is already in NFC normalisation form.
|
|
198
|
+
* JavaScript strings are UTF-16 encoded, so any JavaScript string is
|
|
199
|
+
* already valid Unicode. However, we still need to ensure canonical
|
|
200
|
+
* equivalence so that, for example, \u00E9 (é) and \u0065\u0301 (é)
|
|
201
|
+
* are treated identically. We do this by comparing the original
|
|
202
|
+
* string to its NFC-normalised representation.
|
|
203
|
+
*/
|
|
283
204
|
function isNfcNormalized(value) {
|
|
284
|
-
|
|
205
|
+
return value.normalize("NFC") === value;
|
|
285
206
|
}
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
207
|
+
const MIN_NAME_SIZE = 3;
|
|
208
|
+
const MAX_NAME_SIZE = 20;
|
|
209
|
+
const MIN_SYMBOL_SIZE = 3;
|
|
210
|
+
const MAX_SYMBOL_SIZE = 6;
|
|
211
|
+
const MAX_DECIMALS = 255;
|
|
212
|
+
const MAXIMUM_MAX_SUPPLY = (1n << 128n) - 1n;
|
|
213
|
+
const MAX_TOKEN_CONTENT_SIZE = 1024;
|
|
293
214
|
function validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply, extraMetadata) {
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
);
|
|
334
|
-
}
|
|
335
|
-
if (!Number.isSafeInteger(decimals) || decimals < 0 || decimals > MAX_DECIMALS) {
|
|
336
|
-
throw new import_spark_sdk4.SparkValidationError(
|
|
337
|
-
`Decimals must be an integer between 0 and ${MAX_DECIMALS}`,
|
|
338
|
-
{
|
|
339
|
-
field: "decimals",
|
|
340
|
-
value: decimals,
|
|
341
|
-
expected: `>=0 and <=${MAX_DECIMALS}`
|
|
342
|
-
}
|
|
343
|
-
);
|
|
344
|
-
}
|
|
345
|
-
if (maxSupply < 0n || maxSupply > MAXIMUM_MAX_SUPPLY) {
|
|
346
|
-
throw new import_spark_sdk4.SparkValidationError(`maxSupply must be between 0 and 2^128-1`, {
|
|
347
|
-
field: "maxSupply",
|
|
348
|
-
value: maxSupply.toString(),
|
|
349
|
-
expected: `>=0 and <=${MAXIMUM_MAX_SUPPLY.toString()}`
|
|
350
|
-
});
|
|
351
|
-
}
|
|
352
|
-
if (extraMetadata && extraMetadata.length > MAX_TOKEN_CONTENT_SIZE) {
|
|
353
|
-
throw new import_spark_sdk4.SparkValidationError(
|
|
354
|
-
`Extra metadata must be less than ${MAX_TOKEN_CONTENT_SIZE} bytes`,
|
|
355
|
-
{
|
|
356
|
-
field: "extraMetadata",
|
|
357
|
-
value: extraMetadata.length,
|
|
358
|
-
expected: `<${MAX_TOKEN_CONTENT_SIZE}`
|
|
359
|
-
}
|
|
360
|
-
);
|
|
361
|
-
}
|
|
215
|
+
if (!isNfcNormalized(tokenName)) throw new _buildonspark_spark_sdk.SparkValidationError("Token name must be NFC-normalised UTF-8", {
|
|
216
|
+
field: "tokenName",
|
|
217
|
+
value: tokenName,
|
|
218
|
+
expected: "NFC normalised string"
|
|
219
|
+
});
|
|
220
|
+
if (!isNfcNormalized(tokenTicker)) throw new _buildonspark_spark_sdk.SparkValidationError("Token ticker must be NFC-normalised UTF-8", {
|
|
221
|
+
field: "tokenTicker",
|
|
222
|
+
value: tokenTicker,
|
|
223
|
+
expected: "NFC normalised string"
|
|
224
|
+
});
|
|
225
|
+
const nameBytes = Buffer.from(tokenName, "utf-8").length;
|
|
226
|
+
if (nameBytes < MIN_NAME_SIZE || nameBytes > MAX_NAME_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Token name must be between ${MIN_NAME_SIZE} and ${MAX_NAME_SIZE} bytes`, {
|
|
227
|
+
field: "tokenName",
|
|
228
|
+
value: tokenName,
|
|
229
|
+
actualLength: nameBytes,
|
|
230
|
+
expected: `>=${MIN_NAME_SIZE} and <=${MAX_NAME_SIZE}`
|
|
231
|
+
});
|
|
232
|
+
const tickerBytes = Buffer.from(tokenTicker, "utf-8").length;
|
|
233
|
+
if (tickerBytes < MIN_SYMBOL_SIZE || tickerBytes > MAX_SYMBOL_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Token ticker must be between ${MIN_SYMBOL_SIZE} and ${MAX_SYMBOL_SIZE} bytes`, {
|
|
234
|
+
field: "tokenTicker",
|
|
235
|
+
value: tokenTicker,
|
|
236
|
+
actualLength: tickerBytes,
|
|
237
|
+
expected: `>=${MIN_SYMBOL_SIZE} and <=${MAX_SYMBOL_SIZE}`
|
|
238
|
+
});
|
|
239
|
+
if (!Number.isSafeInteger(decimals) || decimals < 0 || decimals > MAX_DECIMALS) throw new _buildonspark_spark_sdk.SparkValidationError(`Decimals must be an integer between 0 and ${MAX_DECIMALS}`, {
|
|
240
|
+
field: "decimals",
|
|
241
|
+
value: decimals,
|
|
242
|
+
expected: `>=0 and <=${MAX_DECIMALS}`
|
|
243
|
+
});
|
|
244
|
+
if (maxSupply < 0n || maxSupply > MAXIMUM_MAX_SUPPLY) throw new _buildonspark_spark_sdk.SparkValidationError(`maxSupply must be between 0 and 2^128-1`, {
|
|
245
|
+
field: "maxSupply",
|
|
246
|
+
value: maxSupply.toString(),
|
|
247
|
+
expected: `>=0 and <=${MAXIMUM_MAX_SUPPLY.toString()}`
|
|
248
|
+
});
|
|
249
|
+
if (extraMetadata && extraMetadata.length > MAX_TOKEN_CONTENT_SIZE) throw new _buildonspark_spark_sdk.SparkValidationError(`Extra metadata must be less than ${MAX_TOKEN_CONTENT_SIZE} bytes`, {
|
|
250
|
+
field: "extraMetadata",
|
|
251
|
+
value: extraMetadata.length,
|
|
252
|
+
expected: `<${MAX_TOKEN_CONTENT_SIZE}`
|
|
253
|
+
});
|
|
362
254
|
}
|
|
363
255
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
burnTokenIdentifier = tokenIdentifiers[0];
|
|
735
|
-
} else {
|
|
736
|
-
tokenAmount = tokenAmountOrParams.tokenAmount;
|
|
737
|
-
outputs = tokenAmountOrParams.selectedOutputs;
|
|
738
|
-
await this.validateTokenIssuer(tokenAmountOrParams.tokenIdentifier);
|
|
739
|
-
burnTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
|
|
740
|
-
}
|
|
741
|
-
const burnAddress = (0, import_spark_sdk5.encodeSparkAddress)({
|
|
742
|
-
identityPublicKey: BURN_ADDRESS,
|
|
743
|
-
network: this.config.getNetworkType()
|
|
744
|
-
});
|
|
745
|
-
return await this.transferTokens({
|
|
746
|
-
tokenIdentifier: burnTokenIdentifier,
|
|
747
|
-
tokenAmount,
|
|
748
|
-
receiverSparkAddress: burnAddress,
|
|
749
|
-
selectedOutputs: outputs
|
|
750
|
-
});
|
|
751
|
-
}
|
|
752
|
-
async freezeTokens(sparkAddressOrParams) {
|
|
753
|
-
let bech32mTokenIdentifier;
|
|
754
|
-
let sparkAddress;
|
|
755
|
-
if (typeof sparkAddressOrParams === "string") {
|
|
756
|
-
sparkAddress = sparkAddressOrParams;
|
|
757
|
-
bech32mTokenIdentifier = void 0;
|
|
758
|
-
} else {
|
|
759
|
-
sparkAddress = sparkAddressOrParams.sparkAddress;
|
|
760
|
-
bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
|
|
761
|
-
}
|
|
762
|
-
const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
|
|
763
|
-
sparkAddress,
|
|
764
|
-
this.config.getNetworkType()
|
|
765
|
-
);
|
|
766
|
-
if (bech32mTokenIdentifier === void 0) {
|
|
767
|
-
const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
|
|
768
|
-
if (tokenIdentifiers.length === 0) {
|
|
769
|
-
throw new import_spark_sdk5.SparkValidationError(
|
|
770
|
-
"No tokens found. Create a token first."
|
|
771
|
-
);
|
|
772
|
-
}
|
|
773
|
-
if (tokenIdentifiers.length > 1) {
|
|
774
|
-
throw new import_spark_sdk5.SparkValidationError(
|
|
775
|
-
"Multiple tokens found. Use freezeTokens({ tokenIdentifier, sparkAddress }) instead.",
|
|
776
|
-
{
|
|
777
|
-
field: "tokenIdentifier",
|
|
778
|
-
availableTokens: tokenIdentifiers
|
|
779
|
-
}
|
|
780
|
-
);
|
|
781
|
-
}
|
|
782
|
-
bech32mTokenIdentifier = tokenIdentifiers[0];
|
|
783
|
-
} else {
|
|
784
|
-
await this.validateTokenIssuer(bech32mTokenIdentifier);
|
|
785
|
-
}
|
|
786
|
-
const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
|
|
787
|
-
bech32mTokenIdentifier,
|
|
788
|
-
this.config.getNetworkType()
|
|
789
|
-
).tokenIdentifier;
|
|
790
|
-
const response = await this.tokenFreezeService.freezeTokens({
|
|
791
|
-
ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
792
|
-
tokenIdentifier: rawTokenIdentifier
|
|
793
|
-
});
|
|
794
|
-
const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
|
|
795
|
-
return {
|
|
796
|
-
impactedOutputIds: response.impactedOutputIds,
|
|
797
|
-
impactedTokenAmount: tokenAmount
|
|
798
|
-
};
|
|
799
|
-
}
|
|
800
|
-
async unfreezeTokens(sparkAddressOrParams) {
|
|
801
|
-
let bech32mTokenIdentifier;
|
|
802
|
-
let sparkAddress;
|
|
803
|
-
if (typeof sparkAddressOrParams === "string") {
|
|
804
|
-
sparkAddress = sparkAddressOrParams;
|
|
805
|
-
bech32mTokenIdentifier = void 0;
|
|
806
|
-
} else {
|
|
807
|
-
sparkAddress = sparkAddressOrParams.sparkAddress;
|
|
808
|
-
bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
|
|
809
|
-
}
|
|
810
|
-
if (bech32mTokenIdentifier === void 0) {
|
|
811
|
-
const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
|
|
812
|
-
if (tokenIdentifiers.length === 0) {
|
|
813
|
-
throw new import_spark_sdk5.SparkValidationError(
|
|
814
|
-
"No tokens found. Create a token first."
|
|
815
|
-
);
|
|
816
|
-
}
|
|
817
|
-
if (tokenIdentifiers.length > 1) {
|
|
818
|
-
throw new import_spark_sdk5.SparkValidationError(
|
|
819
|
-
"Multiple tokens found. Use unfreezeTokens({ tokenIdentifier, sparkAddress }) instead.",
|
|
820
|
-
{
|
|
821
|
-
field: "tokenIdentifier",
|
|
822
|
-
availableTokens: tokenIdentifiers
|
|
823
|
-
}
|
|
824
|
-
);
|
|
825
|
-
}
|
|
826
|
-
bech32mTokenIdentifier = tokenIdentifiers[0];
|
|
827
|
-
} else {
|
|
828
|
-
await this.validateTokenIssuer(bech32mTokenIdentifier);
|
|
829
|
-
}
|
|
830
|
-
const decodedOwnerPubkey = (0, import_spark_sdk5.decodeSparkAddress)(
|
|
831
|
-
sparkAddress,
|
|
832
|
-
this.config.getNetworkType()
|
|
833
|
-
);
|
|
834
|
-
const rawTokenIdentifier = (0, import_spark_sdk5.decodeBech32mTokenIdentifier)(
|
|
835
|
-
bech32mTokenIdentifier,
|
|
836
|
-
this.config.getNetworkType()
|
|
837
|
-
).tokenIdentifier;
|
|
838
|
-
const response = await this.tokenFreezeService.unfreezeTokens({
|
|
839
|
-
ownerPublicKey: (0, import_utils4.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
840
|
-
tokenIdentifier: rawTokenIdentifier
|
|
841
|
-
});
|
|
842
|
-
const tokenAmount = (0, import_utils4.bytesToNumberBE)(response.impactedTokenAmount);
|
|
843
|
-
return {
|
|
844
|
-
impactedOutputIds: response.impactedOutputIds,
|
|
845
|
-
impactedTokenAmount: tokenAmount
|
|
846
|
-
};
|
|
847
|
-
}
|
|
848
|
-
/**
|
|
849
|
-
* Retrieves the distribution information for the issuer's token.
|
|
850
|
-
* @throws {SparkError} This feature is not yet supported
|
|
851
|
-
*/
|
|
852
|
-
async getIssuerTokenDistribution() {
|
|
853
|
-
throw new import_spark_sdk5.SparkError("Token distribution is not yet supported");
|
|
854
|
-
}
|
|
855
|
-
/**
|
|
856
|
-
* This validates that the token belongs to this issuer.
|
|
857
|
-
* If a token is in the cache, it must belong to this issuer.
|
|
858
|
-
* @param tokenIdentifier - The bech32m encoded token identifier
|
|
859
|
-
* @throws {SparkValidationError} If the token is not found for this issuer
|
|
860
|
-
* @private
|
|
861
|
-
*/
|
|
862
|
-
async validateTokenIssuer(tokenIdentifier) {
|
|
863
|
-
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
864
|
-
const cachedMetadata = this.tokenMetadata.get(tokenIdentifier);
|
|
865
|
-
if (cachedMetadata) {
|
|
866
|
-
if ((0, import_utils4.bytesToHex)(cachedMetadata.issuerPublicKey) !== issuerPublicKey) {
|
|
867
|
-
throw new import_spark_sdk5.SparkValidationError("Token was not issued by this issuer", {
|
|
868
|
-
field: "issuerPublicKey",
|
|
869
|
-
tokenIdentifier,
|
|
870
|
-
expected: issuerPublicKey,
|
|
871
|
-
actual: (0, import_utils4.bytesToHex)(cachedMetadata.issuerPublicKey)
|
|
872
|
-
});
|
|
873
|
-
}
|
|
874
|
-
} else {
|
|
875
|
-
const tokensMetadata = await this.getIssuerTokensMetadata([
|
|
876
|
-
tokenIdentifier
|
|
877
|
-
]);
|
|
878
|
-
if (tokensMetadata.length === 0) {
|
|
879
|
-
throw new import_spark_sdk5.SparkValidationError("Token not found for this issuer", {
|
|
880
|
-
field: "tokenIdentifier",
|
|
881
|
-
value: tokenIdentifier
|
|
882
|
-
});
|
|
883
|
-
}
|
|
884
|
-
if (tokensMetadata[0].tokenPublicKey !== issuerPublicKey) {
|
|
885
|
-
throw new import_spark_sdk5.SparkValidationError("Token was not issued by this issuer", {
|
|
886
|
-
field: "issuerPublicKey",
|
|
887
|
-
tokenIdentifier,
|
|
888
|
-
expected: issuerPublicKey,
|
|
889
|
-
actual: tokensMetadata[0].tokenPublicKey
|
|
890
|
-
});
|
|
891
|
-
}
|
|
892
|
-
}
|
|
893
|
-
}
|
|
894
|
-
getTraceName(methodName) {
|
|
895
|
-
return `IssuerSparkWallet.${methodName}`;
|
|
896
|
-
}
|
|
897
|
-
wrapIssuerPublicMethod(methodName) {
|
|
898
|
-
const original = this[methodName];
|
|
899
|
-
if (typeof original !== "function") {
|
|
900
|
-
throw new Error(
|
|
901
|
-
`Method ${methodName} is not a function on IssuerSparkWallet.`
|
|
902
|
-
);
|
|
903
|
-
}
|
|
904
|
-
const originalFn = original;
|
|
905
|
-
const wrapped = import_spark_sdk5.SparkWallet.wrapMethod(
|
|
906
|
-
String(methodName),
|
|
907
|
-
originalFn,
|
|
908
|
-
this
|
|
909
|
-
);
|
|
910
|
-
this[methodName] = wrapped;
|
|
911
|
-
}
|
|
912
|
-
wrapIssuerSparkWalletMethods() {
|
|
913
|
-
PUBLIC_ISSUER_SPARK_WALLET_METHODS.forEach(
|
|
914
|
-
(m) => this.wrapIssuerPublicMethod(m)
|
|
915
|
-
);
|
|
916
|
-
}
|
|
256
|
+
//#endregion
|
|
257
|
+
//#region src/issuer-wallet/issuer-spark-wallet.ts
|
|
258
|
+
const BURN_ADDRESS = "02".repeat(33);
|
|
259
|
+
/**
|
|
260
|
+
* Represents a Spark wallet with minting capabilities.
|
|
261
|
+
* This class extends the base SparkWallet with additional functionality for token minting,
|
|
262
|
+
* burning, and freezing operations.
|
|
263
|
+
*/
|
|
264
|
+
var IssuerSparkWallet = class extends _buildonspark_spark_sdk.SparkWallet {
|
|
265
|
+
issuerTokenTransactionService;
|
|
266
|
+
tokenFreezeService;
|
|
267
|
+
tracerId = "issuer-sdk";
|
|
268
|
+
/**
|
|
269
|
+
* Initializes a new IssuerSparkWallet instance.
|
|
270
|
+
* Inherits the generic static initialize from the base class.
|
|
271
|
+
*/
|
|
272
|
+
constructor(configOptions, signer) {
|
|
273
|
+
super(configOptions, signer);
|
|
274
|
+
this.issuerTokenTransactionService = new IssuerTokenTransactionService(this.config, this.connectionManager);
|
|
275
|
+
this.tokenFreezeService = new TokenFreezeService(this.config, this.connectionManager);
|
|
276
|
+
this.wrapIssuerSparkWalletMethods();
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Gets the token balance for the issuer's token.
|
|
280
|
+
* @deprecated Use getIssuerTokenBalances() instead. This method will be removed in a future version.
|
|
281
|
+
* @returns An object containing the token balance as a bigint
|
|
282
|
+
*
|
|
283
|
+
* @throws {SparkValidationError} If multiple tokens are found for this issuer
|
|
284
|
+
*/
|
|
285
|
+
async getIssuerTokenBalance() {
|
|
286
|
+
const publicKey = await super.getIdentityPublicKey();
|
|
287
|
+
const issuerBalance = [...(await this.getBalance()).tokenBalances.entries()].filter(([, info]) => info.tokenMetadata.tokenPublicKey === publicKey);
|
|
288
|
+
if (issuerBalance.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found for this issuer. Use getIssuerTokenBalances() instead.", {
|
|
289
|
+
field: "issuerTokenBalance",
|
|
290
|
+
expected: "single token",
|
|
291
|
+
actual: `${issuerBalance.length} tokens`
|
|
292
|
+
});
|
|
293
|
+
if (issuerBalance.length === 0) return {
|
|
294
|
+
tokenIdentifier: void 0,
|
|
295
|
+
balance: 0n
|
|
296
|
+
};
|
|
297
|
+
return {
|
|
298
|
+
tokenIdentifier: issuerBalance[0][0],
|
|
299
|
+
balance: issuerBalance[0][1].balance
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
/**
|
|
303
|
+
* Gets the token balances for the tokens that were issued by this user.
|
|
304
|
+
* @returns An array of objects containing the token identifier and balance
|
|
305
|
+
*/
|
|
306
|
+
async getIssuerTokenBalances() {
|
|
307
|
+
const publicKey = await super.getIdentityPublicKey();
|
|
308
|
+
const issuerBalance = [...(await this.getBalance()).tokenBalances.entries()].filter(([, info]) => info.tokenMetadata.tokenPublicKey === publicKey);
|
|
309
|
+
if (issuerBalance.length === 0) return [{
|
|
310
|
+
tokenIdentifier: void 0,
|
|
311
|
+
balance: 0n
|
|
312
|
+
}];
|
|
313
|
+
return issuerBalance.map(([tokenIdentifier, { balance }]) => ({
|
|
314
|
+
tokenIdentifier,
|
|
315
|
+
balance
|
|
316
|
+
}));
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Retrieves information about the issuer's token.
|
|
320
|
+
* @deprecated Use getIssuerTokensMetadata() instead. This method will be removed in a future version.
|
|
321
|
+
* @returns An object containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
|
|
322
|
+
* @throws {SparkRequestError} If the token metadata cannot be retrieved
|
|
323
|
+
* @throws {SparkValidationError} If multiple tokens are found for this issuer
|
|
324
|
+
*/
|
|
325
|
+
async getIssuerTokenMetadata() {
|
|
326
|
+
const tokensMetadata = await this.getIssuerTokensMetadata();
|
|
327
|
+
if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
328
|
+
if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found for this issuer. Please migrate to getIssuerTokensMetadata() instead.", {
|
|
329
|
+
field: "tokenMetadata",
|
|
330
|
+
value: tokensMetadata
|
|
331
|
+
});
|
|
332
|
+
return tokensMetadata[0];
|
|
333
|
+
}
|
|
334
|
+
/**
|
|
335
|
+
* Retrieves information about the tokens that were issued by this user.
|
|
336
|
+
* @param tokenIdentifiers - Optional array of specific token identifiers to fetch.
|
|
337
|
+
* If omitted, all tokens for this issuer are fetched.
|
|
338
|
+
* @returns An array of objects containing token information including public key, name, symbol, decimals, max supply, freeze status, and extra metadata
|
|
339
|
+
* @throws {SparkRequestError} If the token metadata cannot be retrieved
|
|
340
|
+
*/
|
|
341
|
+
async getIssuerTokensMetadata(tokenIdentifiers) {
|
|
342
|
+
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
343
|
+
const sparkTokenClient = await this.connectionManager.createSparkTokenClient(this.config.getCoordinatorAddress());
|
|
344
|
+
const filterByIdentifiers = Array.isArray(tokenIdentifiers) && tokenIdentifiers.length > 0;
|
|
345
|
+
const tokenIdentifierSet = filterByIdentifiers ? new Set(tokenIdentifiers) : void 0;
|
|
346
|
+
const request = {};
|
|
347
|
+
if (filterByIdentifiers) request.tokenIdentifiers = tokenIdentifiers.map((id) => (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(id, this.config.getNetworkType()).tokenIdentifier);
|
|
348
|
+
else request.issuerPublicKeys = Array.of((0, _noble_curves_utils.hexToBytes)(issuerPublicKey));
|
|
349
|
+
try {
|
|
350
|
+
const response = await sparkTokenClient.query_token_metadata(request);
|
|
351
|
+
const tokenMetadata = [];
|
|
352
|
+
for (const metadata of response.tokenMetadata) {
|
|
353
|
+
const bech32mTokenIdentifier = (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
|
|
354
|
+
tokenIdentifier: metadata.tokenIdentifier,
|
|
355
|
+
network: this.config.getNetworkType()
|
|
356
|
+
});
|
|
357
|
+
if ((0, _noble_curves_utils.bytesToHex)(metadata.issuerPublicKey) !== issuerPublicKey) continue;
|
|
358
|
+
if (filterByIdentifiers && !tokenIdentifierSet.has(bech32mTokenIdentifier)) continue;
|
|
359
|
+
this.tokenMetadata.set(bech32mTokenIdentifier, metadata);
|
|
360
|
+
tokenMetadata.push({
|
|
361
|
+
tokenPublicKey: (0, _noble_curves_utils.bytesToHex)(metadata.issuerPublicKey),
|
|
362
|
+
rawTokenIdentifier: metadata.tokenIdentifier,
|
|
363
|
+
tokenName: metadata.tokenName,
|
|
364
|
+
tokenTicker: metadata.tokenTicker,
|
|
365
|
+
decimals: metadata.decimals,
|
|
366
|
+
maxSupply: (0, _noble_curves_utils.bytesToNumberBE)(metadata.maxSupply),
|
|
367
|
+
isFreezable: metadata.isFreezable,
|
|
368
|
+
extraMetadata: metadata.extraMetadata ? new Uint8Array(metadata.extraMetadata) : void 0,
|
|
369
|
+
bech32mTokenIdentifier
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
return tokenMetadata;
|
|
373
|
+
} catch (error) {
|
|
374
|
+
if (error instanceof _buildonspark_spark_sdk.SparkError) throw error;
|
|
375
|
+
throw new _buildonspark_spark_sdk.SparkRequestError("Failed to fetch token metadata", { error });
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
/**
|
|
379
|
+
* Retrieves the bech32m encoded token identifier for the issuer's token.
|
|
380
|
+
* @deprecated Use getIssuerTokenIdentifiers() instead. This method will be removed in a future version.
|
|
381
|
+
* @returns The bech32m encoded token identifier for the issuer's token
|
|
382
|
+
* @throws {SparkRequestError} If the token identifier cannot be retrieved
|
|
383
|
+
* @throws {SparkValidationError} If multiple tokens are found for this issuer
|
|
384
|
+
*/
|
|
385
|
+
async getIssuerTokenIdentifier() {
|
|
386
|
+
const tokensMetadata = await this.getIssuerTokensMetadata();
|
|
387
|
+
if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
388
|
+
if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use getIssuerTokenIdentifiers() instead.", {
|
|
389
|
+
method: "getIssuerTokenIdentifier",
|
|
390
|
+
availableTokens: tokensMetadata.map((t) => ({
|
|
391
|
+
tokenName: t.tokenName,
|
|
392
|
+
tokenTicker: t.tokenTicker,
|
|
393
|
+
bech32mTokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
|
|
394
|
+
tokenIdentifier: t.rawTokenIdentifier,
|
|
395
|
+
network: this.config.getNetworkType()
|
|
396
|
+
})
|
|
397
|
+
}))
|
|
398
|
+
});
|
|
399
|
+
return tokensMetadata[0].bech32mTokenIdentifier;
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Retrieves the bech32m encoded token identifier for the issuer's token.
|
|
403
|
+
* @returns The bech32m encoded token identifier for the issuer's token
|
|
404
|
+
* @throws {SparkRequestError} If the token identifier cannot be retrieved
|
|
405
|
+
*/
|
|
406
|
+
async getIssuerTokenIdentifiers() {
|
|
407
|
+
return (await this.getIssuerTokensMetadata()).map((metadata) => metadata.bech32mTokenIdentifier);
|
|
408
|
+
}
|
|
409
|
+
async createToken({ tokenName, tokenTicker, decimals, isFreezable, maxSupply = 0n, extraMetadata, returnIdentifierForCreate = false }) {
|
|
410
|
+
validateTokenParameters(tokenName, tokenTicker, decimals, maxSupply, extraMetadata);
|
|
411
|
+
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
412
|
+
if (this.config.getTokenTransactionVersion() === "V2") {
|
|
413
|
+
const tokenTransaction = await this.issuerTokenTransactionService.constructCreateTokenTransaction((0, _noble_curves_utils.hexToBytes)(issuerPublicKey), tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata);
|
|
414
|
+
const { finalTokenTransactionHash, tokenIdentifier } = await this.issuerTokenTransactionService.broadcastTokenTransactionDetailed(tokenTransaction);
|
|
415
|
+
const txHash = (0, _noble_curves_utils.bytesToHex)(finalTokenTransactionHash);
|
|
416
|
+
if (returnIdentifierForCreate) {
|
|
417
|
+
if (!tokenIdentifier) throw new _buildonspark_spark_sdk.SparkRequestError("Server response missing expected field: tokenIdentifier", {
|
|
418
|
+
operation: "broadcast_transaction",
|
|
419
|
+
field: "tokenIdentifier"
|
|
420
|
+
});
|
|
421
|
+
return {
|
|
422
|
+
tokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
|
|
423
|
+
tokenIdentifier,
|
|
424
|
+
network: this.config.getNetworkType()
|
|
425
|
+
}),
|
|
426
|
+
transactionHash: txHash
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
return txHash;
|
|
430
|
+
} else {
|
|
431
|
+
const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialCreateTokenTransaction((0, _noble_curves_utils.hexToBytes)(issuerPublicKey), tokenName, tokenTicker, decimals, maxSupply, isFreezable, extraMetadata);
|
|
432
|
+
const broadcastResponse = await this.issuerTokenTransactionService.broadcastTokenTransactionV3Detailed(partialTokenTransaction);
|
|
433
|
+
const finalTransactionHash = (0, _noble_curves_utils.bytesToHex)(await (0, _buildonspark_spark_sdk.hashFinalTokenTransaction)(broadcastResponse.finalTokenTransaction));
|
|
434
|
+
if (returnIdentifierForCreate) {
|
|
435
|
+
if (!broadcastResponse.tokenIdentifier) throw new _buildonspark_spark_sdk.SparkRequestError("Server response missing expected field: tokenIdentifier", {
|
|
436
|
+
operation: "broadcast_transaction",
|
|
437
|
+
field: "tokenIdentifier"
|
|
438
|
+
});
|
|
439
|
+
return {
|
|
440
|
+
tokenIdentifier: (0, _buildonspark_spark_sdk.encodeBech32mTokenIdentifier)({
|
|
441
|
+
tokenIdentifier: broadcastResponse.tokenIdentifier,
|
|
442
|
+
network: this.config.getNetworkType()
|
|
443
|
+
}),
|
|
444
|
+
transactionHash: finalTransactionHash
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
return finalTransactionHash;
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
async mintTokens(tokenAmountOrParams) {
|
|
451
|
+
let tokenAmount;
|
|
452
|
+
let bech32mTokenIdentifier;
|
|
453
|
+
if (typeof tokenAmountOrParams === "bigint") {
|
|
454
|
+
tokenAmount = tokenAmountOrParams;
|
|
455
|
+
bech32mTokenIdentifier = void 0;
|
|
456
|
+
} else {
|
|
457
|
+
tokenAmount = tokenAmountOrParams.tokenAmount;
|
|
458
|
+
bech32mTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
|
|
459
|
+
}
|
|
460
|
+
const issuerTokenPublicKeyBytes = (0, _noble_curves_utils.hexToBytes)(await super.getIdentityPublicKey());
|
|
461
|
+
if (bech32mTokenIdentifier === void 0) {
|
|
462
|
+
const tokensMetadata = await this.getIssuerTokensMetadata();
|
|
463
|
+
if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
464
|
+
if (tokensMetadata.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Please use mintTokens({ tokenAmount, tokenIdentifier }) instead.", {
|
|
465
|
+
field: "tokenIdentifier",
|
|
466
|
+
availableTokens: tokensMetadata.map((t) => ({
|
|
467
|
+
tokenName: t.tokenName,
|
|
468
|
+
tokenTicker: t.tokenTicker,
|
|
469
|
+
bech32mTokenIdentifier: t.bech32mTokenIdentifier
|
|
470
|
+
}))
|
|
471
|
+
});
|
|
472
|
+
bech32mTokenIdentifier = tokensMetadata[0].bech32mTokenIdentifier;
|
|
473
|
+
} else await this.validateTokenIssuer(bech32mTokenIdentifier);
|
|
474
|
+
const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
|
|
475
|
+
if (this.config.getTokenTransactionVersion() === "V2") {
|
|
476
|
+
const tokenTransaction = await this.issuerTokenTransactionService.constructMintTokenTransaction(rawTokenIdentifier, issuerTokenPublicKeyBytes, tokenAmount);
|
|
477
|
+
return await this.issuerTokenTransactionService.broadcastTokenTransaction(tokenTransaction);
|
|
478
|
+
} else {
|
|
479
|
+
const partialTokenTransaction = await this.issuerTokenTransactionService.constructPartialMintTokenTransaction(rawTokenIdentifier, issuerTokenPublicKeyBytes, tokenAmount);
|
|
480
|
+
return await this.issuerTokenTransactionService.broadcastTokenTransactionV3(partialTokenTransaction);
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
async burnTokens(tokenAmountOrParams, selectedOutputs) {
|
|
484
|
+
let burnTokenIdentifier;
|
|
485
|
+
let tokenAmount;
|
|
486
|
+
let outputs;
|
|
487
|
+
if (typeof tokenAmountOrParams === "bigint") {
|
|
488
|
+
tokenAmount = tokenAmountOrParams;
|
|
489
|
+
outputs = selectedOutputs;
|
|
490
|
+
const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
|
|
491
|
+
if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
492
|
+
if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use burnTokens({ tokenIdentifier, tokenAmount, selectedOutputs }) to specify which token to burn.", {
|
|
493
|
+
field: "tokenIdentifier",
|
|
494
|
+
availableTokens: tokenIdentifiers
|
|
495
|
+
});
|
|
496
|
+
burnTokenIdentifier = tokenIdentifiers[0];
|
|
497
|
+
} else {
|
|
498
|
+
tokenAmount = tokenAmountOrParams.tokenAmount;
|
|
499
|
+
outputs = tokenAmountOrParams.selectedOutputs;
|
|
500
|
+
await this.validateTokenIssuer(tokenAmountOrParams.tokenIdentifier);
|
|
501
|
+
burnTokenIdentifier = tokenAmountOrParams.tokenIdentifier;
|
|
502
|
+
}
|
|
503
|
+
const burnAddress = (0, _buildonspark_spark_sdk.encodeSparkAddress)({
|
|
504
|
+
identityPublicKey: BURN_ADDRESS,
|
|
505
|
+
network: this.config.getNetworkType()
|
|
506
|
+
});
|
|
507
|
+
return await this.transferTokens({
|
|
508
|
+
tokenIdentifier: burnTokenIdentifier,
|
|
509
|
+
tokenAmount,
|
|
510
|
+
receiverSparkAddress: burnAddress,
|
|
511
|
+
selectedOutputs: outputs
|
|
512
|
+
});
|
|
513
|
+
}
|
|
514
|
+
async freezeTokens(sparkAddressOrParams) {
|
|
515
|
+
let bech32mTokenIdentifier;
|
|
516
|
+
let sparkAddress;
|
|
517
|
+
if (typeof sparkAddressOrParams === "string") {
|
|
518
|
+
sparkAddress = sparkAddressOrParams;
|
|
519
|
+
bech32mTokenIdentifier = void 0;
|
|
520
|
+
} else {
|
|
521
|
+
sparkAddress = sparkAddressOrParams.sparkAddress;
|
|
522
|
+
bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
|
|
523
|
+
}
|
|
524
|
+
const decodedOwnerPubkey = (0, _buildonspark_spark_sdk.decodeSparkAddress)(sparkAddress, this.config.getNetworkType());
|
|
525
|
+
if (bech32mTokenIdentifier === void 0) {
|
|
526
|
+
const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
|
|
527
|
+
if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
528
|
+
if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use freezeTokens({ tokenIdentifier, sparkAddress }) instead.", {
|
|
529
|
+
field: "tokenIdentifier",
|
|
530
|
+
availableTokens: tokenIdentifiers
|
|
531
|
+
});
|
|
532
|
+
bech32mTokenIdentifier = tokenIdentifiers[0];
|
|
533
|
+
} else await this.validateTokenIssuer(bech32mTokenIdentifier);
|
|
534
|
+
const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
|
|
535
|
+
const response = await this.tokenFreezeService.freezeTokens({
|
|
536
|
+
ownerPublicKey: (0, _noble_curves_utils.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
537
|
+
tokenIdentifier: rawTokenIdentifier
|
|
538
|
+
});
|
|
539
|
+
const tokenAmount = (0, _noble_curves_utils.bytesToNumberBE)(response.impactedTokenAmount);
|
|
540
|
+
return {
|
|
541
|
+
impactedOutputIds: response.impactedOutputIds,
|
|
542
|
+
impactedTokenAmount: tokenAmount
|
|
543
|
+
};
|
|
544
|
+
}
|
|
545
|
+
async unfreezeTokens(sparkAddressOrParams) {
|
|
546
|
+
let bech32mTokenIdentifier;
|
|
547
|
+
let sparkAddress;
|
|
548
|
+
if (typeof sparkAddressOrParams === "string") {
|
|
549
|
+
sparkAddress = sparkAddressOrParams;
|
|
550
|
+
bech32mTokenIdentifier = void 0;
|
|
551
|
+
} else {
|
|
552
|
+
sparkAddress = sparkAddressOrParams.sparkAddress;
|
|
553
|
+
bech32mTokenIdentifier = sparkAddressOrParams.tokenIdentifier;
|
|
554
|
+
}
|
|
555
|
+
if (bech32mTokenIdentifier === void 0) {
|
|
556
|
+
const tokenIdentifiers = await this.getIssuerTokenIdentifiers();
|
|
557
|
+
if (tokenIdentifiers.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("No tokens found. Create a token first.");
|
|
558
|
+
if (tokenIdentifiers.length > 1) throw new _buildonspark_spark_sdk.SparkValidationError("Multiple tokens found. Use unfreezeTokens({ tokenIdentifier, sparkAddress }) instead.", {
|
|
559
|
+
field: "tokenIdentifier",
|
|
560
|
+
availableTokens: tokenIdentifiers
|
|
561
|
+
});
|
|
562
|
+
bech32mTokenIdentifier = tokenIdentifiers[0];
|
|
563
|
+
} else await this.validateTokenIssuer(bech32mTokenIdentifier);
|
|
564
|
+
const decodedOwnerPubkey = (0, _buildonspark_spark_sdk.decodeSparkAddress)(sparkAddress, this.config.getNetworkType());
|
|
565
|
+
const rawTokenIdentifier = (0, _buildonspark_spark_sdk.decodeBech32mTokenIdentifier)(bech32mTokenIdentifier, this.config.getNetworkType()).tokenIdentifier;
|
|
566
|
+
const response = await this.tokenFreezeService.unfreezeTokens({
|
|
567
|
+
ownerPublicKey: (0, _noble_curves_utils.hexToBytes)(decodedOwnerPubkey.identityPublicKey),
|
|
568
|
+
tokenIdentifier: rawTokenIdentifier
|
|
569
|
+
});
|
|
570
|
+
const tokenAmount = (0, _noble_curves_utils.bytesToNumberBE)(response.impactedTokenAmount);
|
|
571
|
+
return {
|
|
572
|
+
impactedOutputIds: response.impactedOutputIds,
|
|
573
|
+
impactedTokenAmount: tokenAmount
|
|
574
|
+
};
|
|
575
|
+
}
|
|
576
|
+
/**
|
|
577
|
+
* Retrieves the distribution information for the issuer's token.
|
|
578
|
+
* @throws {SparkError} This feature is not yet supported
|
|
579
|
+
*/
|
|
580
|
+
async getIssuerTokenDistribution() {
|
|
581
|
+
throw new _buildonspark_spark_sdk.SparkError("Token distribution is not yet supported");
|
|
582
|
+
}
|
|
583
|
+
/**
|
|
584
|
+
* This validates that the token belongs to this issuer.
|
|
585
|
+
* If a token is in the cache, it must belong to this issuer.
|
|
586
|
+
* @param tokenIdentifier - The bech32m encoded token identifier
|
|
587
|
+
* @throws {SparkValidationError} If the token is not found for this issuer
|
|
588
|
+
* @private
|
|
589
|
+
*/
|
|
590
|
+
async validateTokenIssuer(tokenIdentifier) {
|
|
591
|
+
const issuerPublicKey = await super.getIdentityPublicKey();
|
|
592
|
+
const cachedMetadata = this.tokenMetadata.get(tokenIdentifier);
|
|
593
|
+
if (cachedMetadata) {
|
|
594
|
+
if ((0, _noble_curves_utils.bytesToHex)(cachedMetadata.issuerPublicKey) !== issuerPublicKey) throw new _buildonspark_spark_sdk.SparkValidationError("Token was not issued by this issuer", {
|
|
595
|
+
field: "issuerPublicKey",
|
|
596
|
+
tokenIdentifier,
|
|
597
|
+
expected: issuerPublicKey,
|
|
598
|
+
actual: (0, _noble_curves_utils.bytesToHex)(cachedMetadata.issuerPublicKey)
|
|
599
|
+
});
|
|
600
|
+
} else {
|
|
601
|
+
const tokensMetadata = await this.getIssuerTokensMetadata([tokenIdentifier]);
|
|
602
|
+
if (tokensMetadata.length === 0) throw new _buildonspark_spark_sdk.SparkValidationError("Token not found for this issuer", {
|
|
603
|
+
field: "tokenIdentifier",
|
|
604
|
+
value: tokenIdentifier
|
|
605
|
+
});
|
|
606
|
+
if (tokensMetadata[0].tokenPublicKey !== issuerPublicKey) throw new _buildonspark_spark_sdk.SparkValidationError("Token was not issued by this issuer", {
|
|
607
|
+
field: "issuerPublicKey",
|
|
608
|
+
tokenIdentifier,
|
|
609
|
+
expected: issuerPublicKey,
|
|
610
|
+
actual: tokensMetadata[0].tokenPublicKey
|
|
611
|
+
});
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
getTraceName(methodName) {
|
|
615
|
+
return `IssuerSparkWallet.${methodName}`;
|
|
616
|
+
}
|
|
617
|
+
wrapIssuerPublicMethod(methodName) {
|
|
618
|
+
const original = this[methodName];
|
|
619
|
+
if (typeof original !== "function") throw new Error(`Method ${methodName} is not a function on IssuerSparkWallet.`);
|
|
620
|
+
const originalFn = original;
|
|
621
|
+
this[methodName] = _buildonspark_spark_sdk.SparkWallet.wrapMethod(String(methodName), originalFn, this);
|
|
622
|
+
}
|
|
623
|
+
wrapIssuerSparkWalletMethods() {
|
|
624
|
+
PUBLIC_ISSUER_SPARK_WALLET_METHODS.forEach((m) => this.wrapIssuerPublicMethod(m));
|
|
625
|
+
}
|
|
917
626
|
};
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
627
|
+
const PUBLIC_ISSUER_SPARK_WALLET_METHODS = [
|
|
628
|
+
"getIssuerTokenBalance",
|
|
629
|
+
"getIssuerTokenBalances",
|
|
630
|
+
"getIssuerTokenMetadata",
|
|
631
|
+
"getIssuerTokensMetadata",
|
|
632
|
+
"getIssuerTokenIdentifier",
|
|
633
|
+
"getIssuerTokenIdentifiers",
|
|
634
|
+
"createToken",
|
|
635
|
+
"mintTokens",
|
|
636
|
+
"burnTokens",
|
|
637
|
+
"freezeTokens",
|
|
638
|
+
"unfreezeTokens",
|
|
639
|
+
"getIssuerTokenDistribution"
|
|
931
640
|
];
|
|
932
641
|
|
|
933
|
-
|
|
934
|
-
|
|
642
|
+
//#endregion
|
|
643
|
+
//#region src/issuer-wallet/issuer-spark-wallet.node.ts
|
|
935
644
|
var IssuerSparkWalletNodeJS = class extends IssuerSparkWallet {
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
645
|
+
buildConnectionManager(config) {
|
|
646
|
+
return new _buildonspark_spark_sdk.ConnectionManager(config);
|
|
647
|
+
}
|
|
648
|
+
initializeTracerEnv({ spanProcessors, traceUrls }) {
|
|
649
|
+
(0, _buildonspark_spark_sdk.initializeTracerEnv)({
|
|
650
|
+
spanProcessors,
|
|
651
|
+
traceUrls
|
|
652
|
+
});
|
|
653
|
+
}
|
|
945
654
|
};
|
|
946
655
|
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
IssuerSparkWallet,
|
|
954
|
-
UnsafeStatelessSparkSigner,
|
|
955
|
-
WalletConfig
|
|
656
|
+
//#endregion
|
|
657
|
+
Object.defineProperty(exports, 'DefaultSparkSigner', {
|
|
658
|
+
enumerable: true,
|
|
659
|
+
get: function () {
|
|
660
|
+
return _buildonspark_spark_sdk.DefaultSparkSigner;
|
|
661
|
+
}
|
|
956
662
|
});
|
|
663
|
+
exports.IssuerSparkWallet = IssuerSparkWalletNodeJS;
|
|
664
|
+
Object.defineProperty(exports, 'UnsafeStatelessSparkSigner', {
|
|
665
|
+
enumerable: true,
|
|
666
|
+
get: function () {
|
|
667
|
+
return _buildonspark_spark_sdk.UnsafeStatelessSparkSigner;
|
|
668
|
+
}
|
|
669
|
+
});
|
|
670
|
+
Object.defineProperty(exports, 'WalletConfig', {
|
|
671
|
+
enumerable: true,
|
|
672
|
+
get: function () {
|
|
673
|
+
return _buildonspark_spark_sdk.WalletConfig;
|
|
674
|
+
}
|
|
675
|
+
});
|