@buildonspark/issuer-sdk 0.0.59 → 0.0.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-77FPZDAE.js +8 -0
- package/dist/index.cjs +72 -80
- package/dist/index.d.cts +7 -3
- package/dist/index.d.ts +7 -3
- package/dist/index.js +77 -78
- package/dist/proto/lrc20.cjs +35 -0
- package/dist/proto/lrc20.d.cts +2 -0
- package/dist/proto/lrc20.d.ts +2 -0
- package/dist/proto/lrc20.js +4 -0
- package/dist/proto/spark.cjs +35 -0
- package/dist/proto/spark.d.cts +1 -0
- package/dist/proto/spark.d.ts +1 -0
- package/dist/proto/spark.js +4 -0
- package/dist/types.cjs +0 -50
- package/dist/types.d.cts +7 -34
- package/dist/types.d.ts +7 -34
- package/dist/types.js +1 -12
- package/package.json +11 -5
- package/src/issuer-spark-wallet.ts +79 -12
- package/src/proto/lrc20.ts +1 -0
- package/src/proto/spark.ts +1 -18583
- package/src/tests/integration/spark.test.ts +251 -4
- package/src/types.ts +6 -38
- package/src/utils/type-mappers.ts +26 -18
- package/dist/chunk-GB7N6I5O.js +0 -48
- package/src/proto/common.ts +0 -484
- package/src/proto/google/protobuf/descriptor.ts +0 -7494
- package/src/proto/google/protobuf/duration.ts +0 -215
- package/src/proto/google/protobuf/empty.ts +0 -97
- package/src/proto/google/protobuf/timestamp.ts +0 -244
- package/src/proto/mock.ts +0 -431
- package/src/proto/spark_authn.ts +0 -778
- package/src/proto/validate/validate.ts +0 -6543
- package/src/utils/enum-mappers.ts +0 -80
package/dist/index.cjs
CHANGED
|
@@ -34,10 +34,11 @@ if (typeof global === "undefined") {
|
|
|
34
34
|
}
|
|
35
35
|
|
|
36
36
|
// src/issuer-spark-wallet.ts
|
|
37
|
-
var
|
|
37
|
+
var import_lrc20_sdk = require("@buildonspark/lrc20-sdk");
|
|
38
38
|
var import_spark_sdk3 = require("@buildonspark/spark-sdk");
|
|
39
39
|
var import_core = require("@lightsparkdev/core");
|
|
40
40
|
var import_address = require("@buildonspark/spark-sdk/address");
|
|
41
|
+
var import_lrc202 = require("@buildonspark/spark-sdk/proto/lrc20");
|
|
41
42
|
var import_utils6 = require("@noble/curves/abstract/utils");
|
|
42
43
|
|
|
43
44
|
// src/services/freeze.ts
|
|
@@ -181,74 +182,8 @@ var IssuerTokenTransactionService = class extends import_token_transactions.Toke
|
|
|
181
182
|
|
|
182
183
|
// src/utils/type-mappers.ts
|
|
183
184
|
var import_utils5 = require("@noble/curves/abstract/utils");
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
var import_lrc20_sdk = require("@buildonspark/lrc20-sdk");
|
|
187
|
-
function mapOperationType(type) {
|
|
188
|
-
switch (type) {
|
|
189
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.USER_TRANSFER:
|
|
190
|
-
return "USER_TRANSFER" /* USER_TRANSFER */;
|
|
191
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.USER_BURN:
|
|
192
|
-
return "USER_BURN" /* USER_BURN */;
|
|
193
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_ANNOUNCE:
|
|
194
|
-
return "ISSUER_ANNOUNCE" /* ISSUER_ANNOUNCE */;
|
|
195
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_MINT:
|
|
196
|
-
return "ISSUER_MINT" /* ISSUER_MINT */;
|
|
197
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_TRANSFER:
|
|
198
|
-
return "ISSUER_TRANSFER" /* ISSUER_TRANSFER */;
|
|
199
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_FREEZE:
|
|
200
|
-
return "ISSUER_FREEZE" /* ISSUER_FREEZE */;
|
|
201
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_UNFREEZE:
|
|
202
|
-
return "ISSUER_UNFREEZE" /* ISSUER_UNFREEZE */;
|
|
203
|
-
case import_lrc20_sdk.Lrc20Protos.OperationType.ISSUER_BURN:
|
|
204
|
-
return "ISSUER_BURN" /* ISSUER_BURN */;
|
|
205
|
-
default:
|
|
206
|
-
return "USER_TRANSFER" /* USER_TRANSFER */;
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
function mapOnChainTransactionStatus(status) {
|
|
210
|
-
switch (status) {
|
|
211
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.PENDING:
|
|
212
|
-
return "PENDING" /* PENDING */;
|
|
213
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.CONFIRMED:
|
|
214
|
-
return "CONFIRMED" /* CONFIRMED */;
|
|
215
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.WAITING_MINED:
|
|
216
|
-
return "WAITING_MINED" /* WAITING_MINED */;
|
|
217
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.MINED:
|
|
218
|
-
return "MINED" /* MINED */;
|
|
219
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.ATTACHING:
|
|
220
|
-
return "ATTACHING" /* ATTACHING */;
|
|
221
|
-
case import_lrc20_sdk.Lrc20Protos.OnChainTransactionStatus.ATTACHED:
|
|
222
|
-
return "ATTACHED" /* ATTACHED */;
|
|
223
|
-
default:
|
|
224
|
-
return "PENDING" /* PENDING */;
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
function mapSparkTransactionStatus(status) {
|
|
228
|
-
switch (status) {
|
|
229
|
-
case import_lrc20_sdk.Lrc20Protos.SparkTransactionStatus.STARTED:
|
|
230
|
-
return "STARTED" /* STARTED */;
|
|
231
|
-
case import_lrc20_sdk.Lrc20Protos.SparkTransactionStatus.SIGNED:
|
|
232
|
-
return "SIGNED" /* SIGNED */;
|
|
233
|
-
case import_lrc20_sdk.Lrc20Protos.SparkTransactionStatus.FINALIZED:
|
|
234
|
-
return "FINALIZED" /* FINALIZED */;
|
|
235
|
-
default:
|
|
236
|
-
return "STARTED" /* STARTED */;
|
|
237
|
-
}
|
|
238
|
-
}
|
|
239
|
-
function mapLayer(layer) {
|
|
240
|
-
switch (layer) {
|
|
241
|
-
case 0:
|
|
242
|
-
return "L1" /* L1 */;
|
|
243
|
-
case 1:
|
|
244
|
-
return "SPARK" /* SPARK */;
|
|
245
|
-
default:
|
|
246
|
-
return "L1" /* L1 */;
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
// src/utils/type-mappers.ts
|
|
251
|
-
function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
185
|
+
var import_lrc20 = require("@buildonspark/spark-sdk/proto/lrc20");
|
|
186
|
+
function convertToTokenActivity(rawTransactions) {
|
|
252
187
|
const response = {
|
|
253
188
|
transactions: rawTransactions.transactions.map((transaction) => {
|
|
254
189
|
if (!transaction.transaction) {
|
|
@@ -260,10 +195,10 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
260
195
|
transaction: {
|
|
261
196
|
$case: "onChain",
|
|
262
197
|
onChain: {
|
|
263
|
-
operationType:
|
|
198
|
+
operationType: getEnumName(import_lrc20.OperationType, onChain.operationType),
|
|
264
199
|
transactionHash: (0, import_utils5.bytesToHex)(onChain.transactionHash),
|
|
265
200
|
rawtx: (0, import_utils5.bytesToHex)(onChain.rawtx),
|
|
266
|
-
status:
|
|
201
|
+
status: getEnumName(import_lrc20.OnChainTransactionStatus, onChain.status),
|
|
267
202
|
inputs: onChain.inputs.map((input) => ({
|
|
268
203
|
rawTx: (0, import_utils5.bytesToHex)(input.rawTx),
|
|
269
204
|
vout: input.vout,
|
|
@@ -289,9 +224,9 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
289
224
|
transaction: {
|
|
290
225
|
$case: "spark",
|
|
291
226
|
spark: {
|
|
292
|
-
operationType:
|
|
227
|
+
operationType: getEnumName(import_lrc20.OperationType, spark.operationType),
|
|
293
228
|
transactionHash: (0, import_utils5.bytesToHex)(spark.transactionHash),
|
|
294
|
-
status:
|
|
229
|
+
status: getEnumName(import_lrc20.SparkTransactionStatus, spark.status),
|
|
295
230
|
confirmedAt: spark.confirmedAt,
|
|
296
231
|
leavesToCreate: spark.leavesToCreate.map((leaf) => ({
|
|
297
232
|
tokenPublicKey: (0, import_utils5.bytesToHex)(leaf.tokenPublicKey),
|
|
@@ -334,14 +269,18 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
334
269
|
lastTransactionHash: (0, import_utils5.bytesToHex)(
|
|
335
270
|
rawTransactions.nextCursor.lastTransactionHash
|
|
336
271
|
),
|
|
337
|
-
layer:
|
|
272
|
+
layer: getEnumName(import_lrc20.Layer, rawTransactions.nextCursor.layer)
|
|
338
273
|
} : void 0
|
|
339
274
|
};
|
|
340
275
|
return response;
|
|
341
276
|
}
|
|
277
|
+
function getEnumName(enumObj, value) {
|
|
278
|
+
return enumObj[value];
|
|
279
|
+
}
|
|
342
280
|
|
|
343
281
|
// src/issuer-spark-wallet.ts
|
|
344
282
|
var import_spark_sdk4 = require("@buildonspark/spark-sdk");
|
|
283
|
+
var import_lrc203 = require("@buildonspark/spark-sdk/proto/lrc20");
|
|
345
284
|
var BURN_ADDRESS = "02".repeat(33);
|
|
346
285
|
var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.SparkWallet {
|
|
347
286
|
issuerTokenTransactionService;
|
|
@@ -449,7 +388,8 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
449
388
|
tokenSymbol: info.announcement.symbol,
|
|
450
389
|
tokenDecimals: Number((0, import_utils6.bytesToNumberBE)(info.announcement.decimal)),
|
|
451
390
|
isFreezable: info.announcement.isFreezable,
|
|
452
|
-
maxSupply: (0, import_utils6.bytesToNumberBE)(info.announcement.maxSupply)
|
|
391
|
+
maxSupply: (0, import_utils6.bytesToNumberBE)(info.announcement.maxSupply),
|
|
392
|
+
totalSupply: (0, import_utils6.bytesToNumberBE)(info.totalSupply)
|
|
453
393
|
};
|
|
454
394
|
} catch (error) {
|
|
455
395
|
throw new import_spark_sdk3.NetworkError("Failed to get token info", {
|
|
@@ -556,16 +496,68 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
556
496
|
});
|
|
557
497
|
}
|
|
558
498
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
499
|
+
let convertedCursor;
|
|
500
|
+
if (cursor) {
|
|
501
|
+
const lastTransactionHash = typeof cursor.lastTransactionHash === "string" ? (0, import_utils6.hexToBytes)(cursor.lastTransactionHash) : cursor.lastTransactionHash;
|
|
502
|
+
let layer;
|
|
503
|
+
if (typeof cursor.layer === "string") {
|
|
504
|
+
switch (cursor.layer.toUpperCase()) {
|
|
505
|
+
case "L1":
|
|
506
|
+
layer = import_lrc202.Layer.L1;
|
|
507
|
+
break;
|
|
508
|
+
case "Spark":
|
|
509
|
+
layer = import_lrc202.Layer.SPARK;
|
|
510
|
+
break;
|
|
511
|
+
default:
|
|
512
|
+
layer = import_lrc202.Layer.UNRECOGNIZED;
|
|
513
|
+
}
|
|
514
|
+
} else {
|
|
515
|
+
layer = cursor.layer;
|
|
516
|
+
}
|
|
517
|
+
convertedCursor = {
|
|
518
|
+
lastTransactionHash,
|
|
519
|
+
layer
|
|
520
|
+
};
|
|
521
|
+
}
|
|
522
|
+
let convertedOperationTypes;
|
|
523
|
+
if (operationTypes) {
|
|
524
|
+
if (typeof operationTypes[0] === "string") {
|
|
525
|
+
convertedOperationTypes = operationTypes.map((opType) => {
|
|
526
|
+
switch (opType.toUpperCase()) {
|
|
527
|
+
case "USER_TRANSFER":
|
|
528
|
+
return import_lrc203.OperationType.USER_TRANSFER;
|
|
529
|
+
case "USER_BURN":
|
|
530
|
+
return import_lrc203.OperationType.USER_BURN;
|
|
531
|
+
case "ISSUER_ANNOUNCE":
|
|
532
|
+
return import_lrc203.OperationType.ISSUER_ANNOUNCE;
|
|
533
|
+
case "ISSUER_MINT":
|
|
534
|
+
return import_lrc203.OperationType.ISSUER_MINT;
|
|
535
|
+
case "ISSUER_TRANSFER":
|
|
536
|
+
return import_lrc203.OperationType.ISSUER_TRANSFER;
|
|
537
|
+
case "ISSUER_FREEZE":
|
|
538
|
+
return import_lrc203.OperationType.ISSUER_FREEZE;
|
|
539
|
+
case "ISSUER_UNFREEZE":
|
|
540
|
+
return import_lrc203.OperationType.ISSUER_UNFREEZE;
|
|
541
|
+
case "ISSUER_BURN":
|
|
542
|
+
return import_lrc203.OperationType.ISSUER_BURN;
|
|
543
|
+
default:
|
|
544
|
+
return import_lrc203.OperationType.UNRECOGNIZED;
|
|
545
|
+
}
|
|
546
|
+
});
|
|
547
|
+
} else {
|
|
548
|
+
convertedOperationTypes = operationTypes;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
559
551
|
try {
|
|
560
552
|
const transactions = await lrc20Client.listTransactions({
|
|
561
553
|
tokenPublicKey: (0, import_utils6.hexToBytes)(await super.getIdentityPublicKey()),
|
|
562
|
-
cursor,
|
|
554
|
+
cursor: convertedCursor,
|
|
563
555
|
pageSize,
|
|
564
556
|
beforeTimestamp,
|
|
565
557
|
afterTimestamp,
|
|
566
|
-
operationTypes
|
|
558
|
+
operationTypes: convertedOperationTypes
|
|
567
559
|
});
|
|
568
|
-
return
|
|
560
|
+
return convertToTokenActivity(transactions);
|
|
569
561
|
} catch (error) {
|
|
570
562
|
throw new import_spark_sdk3.NetworkError("Failed to get token activity", {
|
|
571
563
|
operation: "listTransactions",
|
|
@@ -602,8 +594,8 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends import_spark_sdk3.Spark
|
|
|
602
594
|
});
|
|
603
595
|
}
|
|
604
596
|
await this.lrc20Wallet.syncWallet();
|
|
605
|
-
const tokenPublicKey = new
|
|
606
|
-
const announcement = new
|
|
597
|
+
const tokenPublicKey = new import_lrc20_sdk.TokenPubkey(this.lrc20Wallet.pubkey);
|
|
598
|
+
const announcement = new import_lrc20_sdk.TokenPubkeyAnnouncement(
|
|
607
599
|
tokenPublicKey,
|
|
608
600
|
tokenName,
|
|
609
601
|
tokenTicker,
|
package/dist/index.d.cts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { ListAllTokenTransactionsCursor, OperationType } from '@buildonspark/lrc20-sdk/proto/rpc/v1/types';
|
|
2
1
|
import { SparkWallet, SparkWalletProps } from '@buildonspark/spark-sdk';
|
|
3
2
|
import { OutputWithPreviousTransactionData } from '@buildonspark/spark-sdk/proto/spark';
|
|
3
|
+
import { Layer, OperationType } from '@buildonspark/spark-sdk/proto/lrc20';
|
|
4
4
|
import { ConfigOptions } from '@buildonspark/spark-sdk/services/wallet-config';
|
|
5
|
-
import {
|
|
5
|
+
import { TokenActivityResponse, TokenDistribution } from './types.cjs';
|
|
6
6
|
|
|
7
7
|
type IssuerTokenInfo = {
|
|
8
8
|
tokenPublicKey: string;
|
|
@@ -11,6 +11,7 @@ type IssuerTokenInfo = {
|
|
|
11
11
|
tokenDecimals: number;
|
|
12
12
|
maxSupply: bigint;
|
|
13
13
|
isFreezable: boolean;
|
|
14
|
+
totalSupply: bigint;
|
|
14
15
|
};
|
|
15
16
|
/**
|
|
16
17
|
* Represents a Spark wallet with minting capabilities.
|
|
@@ -87,7 +88,10 @@ declare class IssuerSparkWallet extends SparkWallet {
|
|
|
87
88
|
* @throws {ValidationError} If pageSize is not a safe integer
|
|
88
89
|
* @throws {NetworkError} If the activity data cannot be retrieved
|
|
89
90
|
*/
|
|
90
|
-
getIssuerTokenActivity(pageSize?: number, cursor?:
|
|
91
|
+
getIssuerTokenActivity(pageSize?: number, cursor?: {
|
|
92
|
+
lastTransactionHash: string | Uint8Array;
|
|
93
|
+
layer: string | Layer;
|
|
94
|
+
}, operationTypes?: string[] | OperationType[], beforeTimestamp?: Date, afterTimestamp?: Date): Promise<TokenActivityResponse>;
|
|
91
95
|
/**
|
|
92
96
|
* Retrieves the distribution information for the issuer's token.
|
|
93
97
|
* @throws {NotImplementedError} This feature is not yet supported
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { ListAllTokenTransactionsCursor, OperationType } from '@buildonspark/lrc20-sdk/proto/rpc/v1/types';
|
|
2
1
|
import { SparkWallet, SparkWalletProps } from '@buildonspark/spark-sdk';
|
|
3
2
|
import { OutputWithPreviousTransactionData } from '@buildonspark/spark-sdk/proto/spark';
|
|
3
|
+
import { Layer, OperationType } from '@buildonspark/spark-sdk/proto/lrc20';
|
|
4
4
|
import { ConfigOptions } from '@buildonspark/spark-sdk/services/wallet-config';
|
|
5
|
-
import {
|
|
5
|
+
import { TokenActivityResponse, TokenDistribution } from './types.js';
|
|
6
6
|
|
|
7
7
|
type IssuerTokenInfo = {
|
|
8
8
|
tokenPublicKey: string;
|
|
@@ -11,6 +11,7 @@ type IssuerTokenInfo = {
|
|
|
11
11
|
tokenDecimals: number;
|
|
12
12
|
maxSupply: bigint;
|
|
13
13
|
isFreezable: boolean;
|
|
14
|
+
totalSupply: bigint;
|
|
14
15
|
};
|
|
15
16
|
/**
|
|
16
17
|
* Represents a Spark wallet with minting capabilities.
|
|
@@ -87,7 +88,10 @@ declare class IssuerSparkWallet extends SparkWallet {
|
|
|
87
88
|
* @throws {ValidationError} If pageSize is not a safe integer
|
|
88
89
|
* @throws {NetworkError} If the activity data cannot be retrieved
|
|
89
90
|
*/
|
|
90
|
-
getIssuerTokenActivity(pageSize?: number, cursor?:
|
|
91
|
+
getIssuerTokenActivity(pageSize?: number, cursor?: {
|
|
92
|
+
lastTransactionHash: string | Uint8Array;
|
|
93
|
+
layer: string | Layer;
|
|
94
|
+
}, operationTypes?: string[] | OperationType[], beforeTimestamp?: Date, afterTimestamp?: Date): Promise<TokenActivityResponse>;
|
|
91
95
|
/**
|
|
92
96
|
* Retrieves the distribution information for the issuer's token.
|
|
93
97
|
* @throws {NotImplementedError} This feature is not yet supported
|
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "./chunk-
|
|
1
|
+
import "./chunk-77FPZDAE.js";
|
|
2
2
|
|
|
3
3
|
// src/issuer-spark-wallet.ts
|
|
4
4
|
import { TokenPubkey, TokenPubkeyAnnouncement } from "@buildonspark/lrc20-sdk";
|
|
@@ -12,6 +12,7 @@ import {
|
|
|
12
12
|
decodeSparkAddress,
|
|
13
13
|
encodeSparkAddress
|
|
14
14
|
} from "@buildonspark/spark-sdk/address";
|
|
15
|
+
import { Layer as Layer2 } from "@buildonspark/spark-sdk/proto/lrc20";
|
|
15
16
|
import {
|
|
16
17
|
bytesToHex as bytesToHex2,
|
|
17
18
|
bytesToNumberBE as bytesToNumberBE2,
|
|
@@ -159,74 +160,13 @@ var IssuerTokenTransactionService = class extends TokenTransactionService {
|
|
|
159
160
|
|
|
160
161
|
// src/utils/type-mappers.ts
|
|
161
162
|
import { bytesToHex, bytesToNumberBE } from "@noble/curves/abstract/utils";
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
case Lrc20Protos.OperationType.USER_BURN:
|
|
170
|
-
return "USER_BURN" /* USER_BURN */;
|
|
171
|
-
case Lrc20Protos.OperationType.ISSUER_ANNOUNCE:
|
|
172
|
-
return "ISSUER_ANNOUNCE" /* ISSUER_ANNOUNCE */;
|
|
173
|
-
case Lrc20Protos.OperationType.ISSUER_MINT:
|
|
174
|
-
return "ISSUER_MINT" /* ISSUER_MINT */;
|
|
175
|
-
case Lrc20Protos.OperationType.ISSUER_TRANSFER:
|
|
176
|
-
return "ISSUER_TRANSFER" /* ISSUER_TRANSFER */;
|
|
177
|
-
case Lrc20Protos.OperationType.ISSUER_FREEZE:
|
|
178
|
-
return "ISSUER_FREEZE" /* ISSUER_FREEZE */;
|
|
179
|
-
case Lrc20Protos.OperationType.ISSUER_UNFREEZE:
|
|
180
|
-
return "ISSUER_UNFREEZE" /* ISSUER_UNFREEZE */;
|
|
181
|
-
case Lrc20Protos.OperationType.ISSUER_BURN:
|
|
182
|
-
return "ISSUER_BURN" /* ISSUER_BURN */;
|
|
183
|
-
default:
|
|
184
|
-
return "USER_TRANSFER" /* USER_TRANSFER */;
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
function mapOnChainTransactionStatus(status) {
|
|
188
|
-
switch (status) {
|
|
189
|
-
case Lrc20Protos.OnChainTransactionStatus.PENDING:
|
|
190
|
-
return "PENDING" /* PENDING */;
|
|
191
|
-
case Lrc20Protos.OnChainTransactionStatus.CONFIRMED:
|
|
192
|
-
return "CONFIRMED" /* CONFIRMED */;
|
|
193
|
-
case Lrc20Protos.OnChainTransactionStatus.WAITING_MINED:
|
|
194
|
-
return "WAITING_MINED" /* WAITING_MINED */;
|
|
195
|
-
case Lrc20Protos.OnChainTransactionStatus.MINED:
|
|
196
|
-
return "MINED" /* MINED */;
|
|
197
|
-
case Lrc20Protos.OnChainTransactionStatus.ATTACHING:
|
|
198
|
-
return "ATTACHING" /* ATTACHING */;
|
|
199
|
-
case Lrc20Protos.OnChainTransactionStatus.ATTACHED:
|
|
200
|
-
return "ATTACHED" /* ATTACHED */;
|
|
201
|
-
default:
|
|
202
|
-
return "PENDING" /* PENDING */;
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
function mapSparkTransactionStatus(status) {
|
|
206
|
-
switch (status) {
|
|
207
|
-
case Lrc20Protos.SparkTransactionStatus.STARTED:
|
|
208
|
-
return "STARTED" /* STARTED */;
|
|
209
|
-
case Lrc20Protos.SparkTransactionStatus.SIGNED:
|
|
210
|
-
return "SIGNED" /* SIGNED */;
|
|
211
|
-
case Lrc20Protos.SparkTransactionStatus.FINALIZED:
|
|
212
|
-
return "FINALIZED" /* FINALIZED */;
|
|
213
|
-
default:
|
|
214
|
-
return "STARTED" /* STARTED */;
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
function mapLayer(layer) {
|
|
218
|
-
switch (layer) {
|
|
219
|
-
case 0:
|
|
220
|
-
return "L1" /* L1 */;
|
|
221
|
-
case 1:
|
|
222
|
-
return "SPARK" /* SPARK */;
|
|
223
|
-
default:
|
|
224
|
-
return "L1" /* L1 */;
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
// src/utils/type-mappers.ts
|
|
229
|
-
function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
163
|
+
import {
|
|
164
|
+
OperationType,
|
|
165
|
+
OnChainTransactionStatus,
|
|
166
|
+
SparkTransactionStatus,
|
|
167
|
+
Layer
|
|
168
|
+
} from "@buildonspark/spark-sdk/proto/lrc20";
|
|
169
|
+
function convertToTokenActivity(rawTransactions) {
|
|
230
170
|
const response = {
|
|
231
171
|
transactions: rawTransactions.transactions.map((transaction) => {
|
|
232
172
|
if (!transaction.transaction) {
|
|
@@ -238,10 +178,10 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
238
178
|
transaction: {
|
|
239
179
|
$case: "onChain",
|
|
240
180
|
onChain: {
|
|
241
|
-
operationType:
|
|
181
|
+
operationType: getEnumName(OperationType, onChain.operationType),
|
|
242
182
|
transactionHash: bytesToHex(onChain.transactionHash),
|
|
243
183
|
rawtx: bytesToHex(onChain.rawtx),
|
|
244
|
-
status:
|
|
184
|
+
status: getEnumName(OnChainTransactionStatus, onChain.status),
|
|
245
185
|
inputs: onChain.inputs.map((input) => ({
|
|
246
186
|
rawTx: bytesToHex(input.rawTx),
|
|
247
187
|
vout: input.vout,
|
|
@@ -267,9 +207,9 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
267
207
|
transaction: {
|
|
268
208
|
$case: "spark",
|
|
269
209
|
spark: {
|
|
270
|
-
operationType:
|
|
210
|
+
operationType: getEnumName(OperationType, spark.operationType),
|
|
271
211
|
transactionHash: bytesToHex(spark.transactionHash),
|
|
272
|
-
status:
|
|
212
|
+
status: getEnumName(SparkTransactionStatus, spark.status),
|
|
273
213
|
confirmedAt: spark.confirmedAt,
|
|
274
214
|
leavesToCreate: spark.leavesToCreate.map((leaf) => ({
|
|
275
215
|
tokenPublicKey: bytesToHex(leaf.tokenPublicKey),
|
|
@@ -312,14 +252,20 @@ function convertTokenActivityToHexEncoded(rawTransactions) {
|
|
|
312
252
|
lastTransactionHash: bytesToHex(
|
|
313
253
|
rawTransactions.nextCursor.lastTransactionHash
|
|
314
254
|
),
|
|
315
|
-
layer:
|
|
255
|
+
layer: getEnumName(Layer, rawTransactions.nextCursor.layer)
|
|
316
256
|
} : void 0
|
|
317
257
|
};
|
|
318
258
|
return response;
|
|
319
259
|
}
|
|
260
|
+
function getEnumName(enumObj, value) {
|
|
261
|
+
return enumObj[value];
|
|
262
|
+
}
|
|
320
263
|
|
|
321
264
|
// src/issuer-spark-wallet.ts
|
|
322
265
|
import { NotImplementedError } from "@buildonspark/spark-sdk";
|
|
266
|
+
import {
|
|
267
|
+
OperationType as OperationType2
|
|
268
|
+
} from "@buildonspark/spark-sdk/proto/lrc20";
|
|
323
269
|
var BURN_ADDRESS = "02".repeat(33);
|
|
324
270
|
var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
325
271
|
issuerTokenTransactionService;
|
|
@@ -427,7 +373,8 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
427
373
|
tokenSymbol: info.announcement.symbol,
|
|
428
374
|
tokenDecimals: Number(bytesToNumberBE2(info.announcement.decimal)),
|
|
429
375
|
isFreezable: info.announcement.isFreezable,
|
|
430
|
-
maxSupply: bytesToNumberBE2(info.announcement.maxSupply)
|
|
376
|
+
maxSupply: bytesToNumberBE2(info.announcement.maxSupply),
|
|
377
|
+
totalSupply: bytesToNumberBE2(info.totalSupply)
|
|
431
378
|
};
|
|
432
379
|
} catch (error) {
|
|
433
380
|
throw new NetworkError2("Failed to get token info", {
|
|
@@ -534,16 +481,68 @@ var IssuerSparkWallet = class _IssuerSparkWallet extends SparkWallet {
|
|
|
534
481
|
});
|
|
535
482
|
}
|
|
536
483
|
const lrc20Client = await this.lrc20ConnectionManager.createLrc20Client();
|
|
484
|
+
let convertedCursor;
|
|
485
|
+
if (cursor) {
|
|
486
|
+
const lastTransactionHash = typeof cursor.lastTransactionHash === "string" ? hexToBytes2(cursor.lastTransactionHash) : cursor.lastTransactionHash;
|
|
487
|
+
let layer;
|
|
488
|
+
if (typeof cursor.layer === "string") {
|
|
489
|
+
switch (cursor.layer.toUpperCase()) {
|
|
490
|
+
case "L1":
|
|
491
|
+
layer = Layer2.L1;
|
|
492
|
+
break;
|
|
493
|
+
case "Spark":
|
|
494
|
+
layer = Layer2.SPARK;
|
|
495
|
+
break;
|
|
496
|
+
default:
|
|
497
|
+
layer = Layer2.UNRECOGNIZED;
|
|
498
|
+
}
|
|
499
|
+
} else {
|
|
500
|
+
layer = cursor.layer;
|
|
501
|
+
}
|
|
502
|
+
convertedCursor = {
|
|
503
|
+
lastTransactionHash,
|
|
504
|
+
layer
|
|
505
|
+
};
|
|
506
|
+
}
|
|
507
|
+
let convertedOperationTypes;
|
|
508
|
+
if (operationTypes) {
|
|
509
|
+
if (typeof operationTypes[0] === "string") {
|
|
510
|
+
convertedOperationTypes = operationTypes.map((opType) => {
|
|
511
|
+
switch (opType.toUpperCase()) {
|
|
512
|
+
case "USER_TRANSFER":
|
|
513
|
+
return OperationType2.USER_TRANSFER;
|
|
514
|
+
case "USER_BURN":
|
|
515
|
+
return OperationType2.USER_BURN;
|
|
516
|
+
case "ISSUER_ANNOUNCE":
|
|
517
|
+
return OperationType2.ISSUER_ANNOUNCE;
|
|
518
|
+
case "ISSUER_MINT":
|
|
519
|
+
return OperationType2.ISSUER_MINT;
|
|
520
|
+
case "ISSUER_TRANSFER":
|
|
521
|
+
return OperationType2.ISSUER_TRANSFER;
|
|
522
|
+
case "ISSUER_FREEZE":
|
|
523
|
+
return OperationType2.ISSUER_FREEZE;
|
|
524
|
+
case "ISSUER_UNFREEZE":
|
|
525
|
+
return OperationType2.ISSUER_UNFREEZE;
|
|
526
|
+
case "ISSUER_BURN":
|
|
527
|
+
return OperationType2.ISSUER_BURN;
|
|
528
|
+
default:
|
|
529
|
+
return OperationType2.UNRECOGNIZED;
|
|
530
|
+
}
|
|
531
|
+
});
|
|
532
|
+
} else {
|
|
533
|
+
convertedOperationTypes = operationTypes;
|
|
534
|
+
}
|
|
535
|
+
}
|
|
537
536
|
try {
|
|
538
537
|
const transactions = await lrc20Client.listTransactions({
|
|
539
538
|
tokenPublicKey: hexToBytes2(await super.getIdentityPublicKey()),
|
|
540
|
-
cursor,
|
|
539
|
+
cursor: convertedCursor,
|
|
541
540
|
pageSize,
|
|
542
541
|
beforeTimestamp,
|
|
543
542
|
afterTimestamp,
|
|
544
|
-
operationTypes
|
|
543
|
+
operationTypes: convertedOperationTypes
|
|
545
544
|
});
|
|
546
|
-
return
|
|
545
|
+
return convertToTokenActivity(transactions);
|
|
547
546
|
} catch (error) {
|
|
548
547
|
throw new NetworkError2("Failed to get token activity", {
|
|
549
548
|
operation: "listTransactions",
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __copyProps = (to, from, except, desc) => {
|
|
7
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
8
|
+
for (let key of __getOwnPropNames(from))
|
|
9
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
10
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
11
|
+
}
|
|
12
|
+
return to;
|
|
13
|
+
};
|
|
14
|
+
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
|
15
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
16
|
+
|
|
17
|
+
// src/proto/lrc20.ts
|
|
18
|
+
var lrc20_exports = {};
|
|
19
|
+
module.exports = __toCommonJS(lrc20_exports);
|
|
20
|
+
|
|
21
|
+
// buffer.js
|
|
22
|
+
var import_buffer = require("buffer");
|
|
23
|
+
if (typeof globalThis.Buffer === "undefined") {
|
|
24
|
+
globalThis.Buffer = import_buffer.Buffer;
|
|
25
|
+
}
|
|
26
|
+
if (typeof global === "undefined") {
|
|
27
|
+
window.global = window.globalThis;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// src/proto/lrc20.ts
|
|
31
|
+
__reExport(lrc20_exports, require("@buildonspark/spark-sdk/proto/lrc20"), module.exports);
|
|
32
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
33
|
+
0 && (module.exports = {
|
|
34
|
+
...require("@buildonspark/spark-sdk/proto/lrc20")
|
|
35
|
+
});
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __copyProps = (to, from, except, desc) => {
|
|
7
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
8
|
+
for (let key of __getOwnPropNames(from))
|
|
9
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
10
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
11
|
+
}
|
|
12
|
+
return to;
|
|
13
|
+
};
|
|
14
|
+
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
|
15
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
16
|
+
|
|
17
|
+
// src/proto/spark.ts
|
|
18
|
+
var spark_exports = {};
|
|
19
|
+
module.exports = __toCommonJS(spark_exports);
|
|
20
|
+
|
|
21
|
+
// buffer.js
|
|
22
|
+
var import_buffer = require("buffer");
|
|
23
|
+
if (typeof globalThis.Buffer === "undefined") {
|
|
24
|
+
globalThis.Buffer = import_buffer.Buffer;
|
|
25
|
+
}
|
|
26
|
+
if (typeof global === "undefined") {
|
|
27
|
+
window.global = window.globalThis;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// src/proto/spark.ts
|
|
31
|
+
__reExport(spark_exports, require("@buildonspark/spark-sdk/proto/spark"), module.exports);
|
|
32
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
33
|
+
0 && (module.exports = {
|
|
34
|
+
...require("@buildonspark/spark-sdk/proto/spark")
|
|
35
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '@buildonspark/spark-sdk/proto/spark';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '@buildonspark/spark-sdk/proto/spark';
|