@buildonspark/spark-sdk 0.2.7 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/bare/index.cjs +1657 -1879
- package/dist/bare/index.d.cts +65 -115
- package/dist/bare/index.d.ts +65 -115
- package/dist/bare/index.js +1578 -1807
- package/dist/{chunk-GIDAHHDB.js → chunk-6CMNEDBK.js} +217 -9
- package/dist/{chunk-J24LM4RO.js → chunk-76SYPHOC.js} +1 -1
- package/dist/{chunk-2HD3USKS.js → chunk-A5M55UR3.js} +0 -24
- package/dist/{chunk-7LY7PJQL.js → chunk-G2RIVSSI.js} +14 -5
- package/dist/{chunk-R5VUHUJR.js → chunk-HMJO7LPI.js} +4160 -3739
- package/dist/{client-DmjOifnt.d.ts → client-B5vlztPA.d.ts} +1 -1
- package/dist/{client-BmnZ1xDg.d.cts → client-TLqKGhj8.d.cts} +1 -1
- package/dist/debug.cjs +1664 -1945
- package/dist/debug.d.cts +12 -7
- package/dist/debug.d.ts +12 -7
- package/dist/debug.js +6 -8
- package/dist/graphql/objects/index.d.cts +3 -3
- package/dist/graphql/objects/index.d.ts +3 -3
- package/dist/index.cjs +1703 -1926
- package/dist/index.d.cts +18 -6
- package/dist/index.d.ts +18 -6
- package/dist/index.js +17 -8
- package/dist/index.node.cjs +1703 -1926
- package/dist/index.node.d.cts +7 -6
- package/dist/index.node.d.ts +7 -6
- package/dist/index.node.js +22 -6
- package/dist/native/index.cjs +1697 -1927
- package/dist/native/index.d.cts +78 -123
- package/dist/native/index.d.ts +78 -123
- package/dist/native/index.js +1626 -1862
- package/dist/proto/spark.cjs +0 -24
- package/dist/proto/spark.d.cts +1 -1
- package/dist/proto/spark.d.ts +1 -1
- package/dist/proto/spark.js +1 -1
- package/dist/proto/spark_token.cjs +221 -8
- package/dist/proto/spark_token.d.cts +25 -2
- package/dist/proto/spark_token.d.ts +25 -2
- package/dist/proto/spark_token.js +12 -2
- package/dist/{spark-B305mDNB.d.cts → spark-CBBjw5AY.d.cts} +2 -30
- package/dist/{spark-B305mDNB.d.ts → spark-CBBjw5AY.d.ts} +2 -30
- package/dist/{spark-wallet-BdwARy70.d.cts → spark-wallet-CIuHgbSR.d.cts} +38 -29
- package/dist/{spark-wallet-enp968Uc.d.ts → spark-wallet-CrNWPa9p.d.ts} +38 -29
- package/dist/{spark-wallet.node-CtpJlYBs.d.cts → spark-wallet.node-BvfiSwh_.d.cts} +1 -1
- package/dist/{spark-wallet.node-DqWcsNb6.d.ts → spark-wallet.node-pnKSb-aa.d.ts} +1 -1
- package/dist/tests/test-utils.cjs +483 -1120
- package/dist/tests/test-utils.d.cts +8 -4
- package/dist/tests/test-utils.d.ts +8 -4
- package/dist/tests/test-utils.js +5 -6
- package/dist/{token-transactions-3-pVToE0.d.cts → token-transactions-ChvD_wLe.d.cts} +15 -10
- package/dist/{token-transactions-84Hp0hGz.d.ts → token-transactions-e8UUOjFk.d.ts} +15 -10
- package/dist/types/index.cjs +0 -24
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.ts +2 -2
- package/dist/types/index.js +2 -2
- package/dist/{xchain-address-BtuJEbzG.d.cts → xchain-address-CDnNobbB.d.ts} +9 -3
- package/dist/{xchain-address-Q1BrcwID.d.ts → xchain-address-CtqVOuWz.d.cts} +9 -3
- package/package.json +1 -1
- package/src/constants.ts +7 -1
- package/src/debug.ts +1 -1
- package/src/proto/spark.ts +2 -48
- package/src/proto/spark_token.ts +255 -7
- package/src/services/token-transactions.ts +57 -14
- package/src/services/transfer.ts +20 -17
- package/src/services/wallet-config.ts +2 -0
- package/src/signer/signer.react-native.ts +0 -2
- package/src/spark-wallet/spark-wallet.browser.ts +9 -8
- package/src/spark-wallet/spark-wallet.node.ts +8 -4
- package/src/spark-wallet/spark-wallet.ts +424 -227
- package/src/tests/address.test.ts +87 -1
- package/src/tests/integration/retry.test.ts +78 -0
- package/src/tests/integration/transfer.test.ts +285 -1
- package/src/tests/integration/wallet.test.ts +160 -0
- package/src/tests/{tokens.test.ts → token-hashing.test.ts} +150 -162
- package/src/tests/token-outputs.test.ts +194 -0
- package/src/tests/utils/spark-testing-wallet.ts +16 -8
- package/src/utils/address.ts +152 -11
- package/src/utils/invoice-hashing.test.ts +235 -0
- package/src/utils/invoice-hashing.ts +227 -0
- package/src/utils/mempool.ts +6 -0
- package/src/utils/retry.ts +116 -0
- package/src/utils/token-hashing.ts +566 -0
- package/src/utils/token-transactions.ts +9 -5
- package/dist/chunk-7N6R7G3E.js +0 -7
- package/dist/spark-wallet.browser-BYlprQpX.d.ts +0 -12
- package/dist/spark-wallet.browser-CVI2Ss3u.d.cts +0 -12
- package/src/services/tree-creation.ts +0 -893
- package/src/tests/integration/tree-creation.test.ts +0 -46
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
export interface RetryConfig {
|
|
2
|
+
maxAttempts: number;
|
|
3
|
+
baseDelayMs: number;
|
|
4
|
+
maxDelayMs: number;
|
|
5
|
+
backoffFactor: number;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface RetryContext<T, TData = any> {
|
|
9
|
+
attempt: number;
|
|
10
|
+
maxAttempts: number;
|
|
11
|
+
error: Error;
|
|
12
|
+
delayMs: number;
|
|
13
|
+
result?: T;
|
|
14
|
+
data?: TData;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface RetryCallbacks<T, TData = any> {
|
|
18
|
+
fetchData?: (context: RetryContext<T, TData>) => Promise<TData | undefined>;
|
|
19
|
+
onRetry?: (context: RetryContext<T, TData>) => Promise<void> | void;
|
|
20
|
+
onError?: (
|
|
21
|
+
context: RetryContext<T, TData>,
|
|
22
|
+
) => Promise<T | undefined> | T | undefined;
|
|
23
|
+
onMaxAttemptsReached?: (
|
|
24
|
+
context: RetryContext<T, TData>,
|
|
25
|
+
) => Promise<T | undefined> | T | undefined;
|
|
26
|
+
onStart?: () => Promise<void> | void;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export const DEFAULT_RETRY_CONFIG: RetryConfig = {
|
|
30
|
+
maxAttempts: 5,
|
|
31
|
+
baseDelayMs: 1000,
|
|
32
|
+
maxDelayMs: 10000,
|
|
33
|
+
backoffFactor: 2,
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
function calculateBackoffDelay(attempt: number, config: RetryConfig): number {
|
|
37
|
+
const delay =
|
|
38
|
+
config.baseDelayMs * Math.pow(config.backoffFactor, attempt - 1);
|
|
39
|
+
return Math.min(delay, config.maxDelayMs);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export interface WithRetryOptions<T, TData = any> {
|
|
43
|
+
config?: RetryConfig;
|
|
44
|
+
callbacks?: RetryCallbacks<T, TData>;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export async function withRetry<T, TData = any>(
|
|
48
|
+
operation: (data?: TData) => Promise<T>,
|
|
49
|
+
options: WithRetryOptions<T, TData> = {},
|
|
50
|
+
): Promise<T> {
|
|
51
|
+
const config = options.config ?? DEFAULT_RETRY_CONFIG;
|
|
52
|
+
const callbacks = options.callbacks ?? {};
|
|
53
|
+
|
|
54
|
+
const { fetchData, onRetry, onError, onMaxAttemptsReached, onStart } =
|
|
55
|
+
callbacks;
|
|
56
|
+
|
|
57
|
+
if (onStart) {
|
|
58
|
+
await onStart();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
let currentData: TData | undefined = undefined;
|
|
62
|
+
|
|
63
|
+
for (let attempt = 1; attempt <= config.maxAttempts; attempt++) {
|
|
64
|
+
try {
|
|
65
|
+
if (attempt > 1 && fetchData) {
|
|
66
|
+
const context: RetryContext<T, TData> = {
|
|
67
|
+
attempt,
|
|
68
|
+
maxAttempts: config.maxAttempts,
|
|
69
|
+
error: new Error("Placeholder"),
|
|
70
|
+
delayMs: calculateBackoffDelay(attempt, config),
|
|
71
|
+
data: currentData,
|
|
72
|
+
};
|
|
73
|
+
currentData = await fetchData(context);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return await operation(currentData);
|
|
77
|
+
} catch (error) {
|
|
78
|
+
const lastError =
|
|
79
|
+
error instanceof Error ? error : new Error(String(error));
|
|
80
|
+
const delayMs = calculateBackoffDelay(attempt, config);
|
|
81
|
+
|
|
82
|
+
const context: RetryContext<T, TData> = {
|
|
83
|
+
attempt,
|
|
84
|
+
maxAttempts: config.maxAttempts,
|
|
85
|
+
error: lastError,
|
|
86
|
+
delayMs,
|
|
87
|
+
data: currentData,
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
if (onError) {
|
|
91
|
+
const result = await onError(context);
|
|
92
|
+
if (result) {
|
|
93
|
+
return result;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if (attempt === config.maxAttempts) {
|
|
98
|
+
if (onMaxAttemptsReached) {
|
|
99
|
+
const result = await onMaxAttemptsReached(context);
|
|
100
|
+
if (result) {
|
|
101
|
+
return result;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
throw lastError;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (onRetry) {
|
|
108
|
+
await onRetry(context);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
throw new Error("Unexpected retry loop exit");
|
|
116
|
+
}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { sha256 } from "@noble/hashes/sha2";
|
|
2
2
|
import { ValidationError } from "../errors/types.js";
|
|
3
|
+
import { bech32m } from "@scure/base";
|
|
4
|
+
import { SparkAddress } from "../proto/spark.js";
|
|
3
5
|
import {
|
|
4
6
|
OperatorSpecificTokenTransactionSignablePayload,
|
|
5
7
|
TokenTransaction as TokenTransactionV0,
|
|
@@ -19,6 +21,8 @@ export function hashTokenTransaction(
|
|
|
19
21
|
return hashTokenTransactionV0(tokenTransaction, partialHash);
|
|
20
22
|
case 1:
|
|
21
23
|
return hashTokenTransactionV1(tokenTransaction, partialHash);
|
|
24
|
+
case 2:
|
|
25
|
+
return hashTokenTransactionV2(tokenTransaction, partialHash);
|
|
22
26
|
default:
|
|
23
27
|
throw new ValidationError("invalid token transaction version", {
|
|
24
28
|
field: "tokenTransaction.version",
|
|
@@ -924,6 +928,568 @@ export function hashTokenTransactionV1(
|
|
|
924
928
|
return finalHashObj.digest();
|
|
925
929
|
}
|
|
926
930
|
|
|
931
|
+
export function hashTokenTransactionV2(
|
|
932
|
+
tokenTransaction: TokenTransaction,
|
|
933
|
+
partialHash: boolean = false,
|
|
934
|
+
): Uint8Array {
|
|
935
|
+
if (!tokenTransaction) {
|
|
936
|
+
throw new ValidationError("token transaction cannot be nil", {
|
|
937
|
+
field: "tokenTransaction",
|
|
938
|
+
});
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
let allHashes: Uint8Array[] = [];
|
|
942
|
+
|
|
943
|
+
// Hash version
|
|
944
|
+
const versionHashObj = sha256.create();
|
|
945
|
+
const versionBytes = new Uint8Array(4);
|
|
946
|
+
new DataView(versionBytes.buffer).setUint32(
|
|
947
|
+
0,
|
|
948
|
+
tokenTransaction.version,
|
|
949
|
+
false, // false for big-endian
|
|
950
|
+
);
|
|
951
|
+
versionHashObj.update(versionBytes);
|
|
952
|
+
allHashes.push(versionHashObj.digest());
|
|
953
|
+
|
|
954
|
+
// Hash transaction type
|
|
955
|
+
const typeHashObj = sha256.create();
|
|
956
|
+
const typeBytes = new Uint8Array(4);
|
|
957
|
+
let transactionType = 0;
|
|
958
|
+
|
|
959
|
+
if (tokenTransaction.tokenInputs?.$case === "mintInput") {
|
|
960
|
+
transactionType = TokenTransactionType.TOKEN_TRANSACTION_TYPE_MINT;
|
|
961
|
+
} else if (tokenTransaction.tokenInputs?.$case === "transferInput") {
|
|
962
|
+
transactionType = TokenTransactionType.TOKEN_TRANSACTION_TYPE_TRANSFER;
|
|
963
|
+
} else if (tokenTransaction.tokenInputs?.$case === "createInput") {
|
|
964
|
+
transactionType = TokenTransactionType.TOKEN_TRANSACTION_TYPE_CREATE;
|
|
965
|
+
} else {
|
|
966
|
+
throw new ValidationError(
|
|
967
|
+
"token transaction must have exactly one input type",
|
|
968
|
+
{
|
|
969
|
+
field: "tokenInputs",
|
|
970
|
+
},
|
|
971
|
+
);
|
|
972
|
+
}
|
|
973
|
+
|
|
974
|
+
new DataView(typeBytes.buffer).setUint32(0, transactionType, false);
|
|
975
|
+
typeHashObj.update(typeBytes);
|
|
976
|
+
allHashes.push(typeHashObj.digest());
|
|
977
|
+
|
|
978
|
+
// Hash token inputs based on type
|
|
979
|
+
if (tokenTransaction.tokenInputs?.$case === "transferInput") {
|
|
980
|
+
if (!tokenTransaction.tokenInputs.transferInput.outputsToSpend) {
|
|
981
|
+
throw new ValidationError("outputs to spend cannot be null", {
|
|
982
|
+
field: "tokenInputs.transferInput.outputsToSpend",
|
|
983
|
+
});
|
|
984
|
+
}
|
|
985
|
+
|
|
986
|
+
if (
|
|
987
|
+
tokenTransaction.tokenInputs.transferInput.outputsToSpend.length === 0
|
|
988
|
+
) {
|
|
989
|
+
throw new ValidationError("outputs to spend cannot be empty", {
|
|
990
|
+
field: "tokenInputs.transferInput.outputsToSpend",
|
|
991
|
+
});
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
// Hash outputs to spend length
|
|
995
|
+
const outputsLenHashObj = sha256.create();
|
|
996
|
+
const outputsLenBytes = new Uint8Array(4);
|
|
997
|
+
new DataView(outputsLenBytes.buffer).setUint32(
|
|
998
|
+
0,
|
|
999
|
+
tokenTransaction.tokenInputs.transferInput.outputsToSpend.length,
|
|
1000
|
+
false,
|
|
1001
|
+
);
|
|
1002
|
+
outputsLenHashObj.update(outputsLenBytes);
|
|
1003
|
+
allHashes.push(outputsLenHashObj.digest());
|
|
1004
|
+
|
|
1005
|
+
// Hash outputs to spend
|
|
1006
|
+
for (const [
|
|
1007
|
+
i,
|
|
1008
|
+
output,
|
|
1009
|
+
] of tokenTransaction.tokenInputs!.transferInput!.outputsToSpend.entries()) {
|
|
1010
|
+
if (!output) {
|
|
1011
|
+
throw new ValidationError(`output cannot be null at index ${i}`, {
|
|
1012
|
+
field: `tokenInputs.transferInput.outputsToSpend[${i}]`,
|
|
1013
|
+
index: i,
|
|
1014
|
+
});
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
const hashObj = sha256.create();
|
|
1018
|
+
|
|
1019
|
+
if (output.prevTokenTransactionHash) {
|
|
1020
|
+
const prevHash = output.prevTokenTransactionHash;
|
|
1021
|
+
if (output.prevTokenTransactionHash.length !== 32) {
|
|
1022
|
+
throw new ValidationError(
|
|
1023
|
+
`invalid previous transaction hash length at index ${i}`,
|
|
1024
|
+
{
|
|
1025
|
+
field: `tokenInputs.transferInput.outputsToSpend[${i}].prevTokenTransactionHash`,
|
|
1026
|
+
value: prevHash,
|
|
1027
|
+
expectedLength: 32,
|
|
1028
|
+
actualLength: prevHash.length,
|
|
1029
|
+
index: i,
|
|
1030
|
+
},
|
|
1031
|
+
);
|
|
1032
|
+
}
|
|
1033
|
+
hashObj.update(output.prevTokenTransactionHash);
|
|
1034
|
+
}
|
|
1035
|
+
|
|
1036
|
+
const voutBytes = new Uint8Array(4);
|
|
1037
|
+
new DataView(voutBytes.buffer).setUint32(
|
|
1038
|
+
0,
|
|
1039
|
+
output.prevTokenTransactionVout,
|
|
1040
|
+
false,
|
|
1041
|
+
); // false for big-endian
|
|
1042
|
+
hashObj.update(voutBytes);
|
|
1043
|
+
|
|
1044
|
+
allHashes.push(hashObj.digest());
|
|
1045
|
+
}
|
|
1046
|
+
} else if (tokenTransaction.tokenInputs?.$case === "mintInput") {
|
|
1047
|
+
const hashObj = sha256.create();
|
|
1048
|
+
|
|
1049
|
+
if (tokenTransaction.tokenInputs.mintInput!.issuerPublicKey) {
|
|
1050
|
+
const issuerPubKey: Uint8Array =
|
|
1051
|
+
tokenTransaction.tokenInputs.mintInput.issuerPublicKey;
|
|
1052
|
+
if (issuerPubKey.length === 0) {
|
|
1053
|
+
throw new ValidationError("issuer public key cannot be empty", {
|
|
1054
|
+
field: "tokenInputs.mintInput.issuerPublicKey",
|
|
1055
|
+
value: issuerPubKey,
|
|
1056
|
+
expectedLength: 1,
|
|
1057
|
+
actualLength: 0,
|
|
1058
|
+
});
|
|
1059
|
+
}
|
|
1060
|
+
hashObj.update(issuerPubKey);
|
|
1061
|
+
allHashes.push(hashObj.digest());
|
|
1062
|
+
|
|
1063
|
+
const tokenIdentifierHashObj = sha256.create();
|
|
1064
|
+
if (tokenTransaction.tokenInputs.mintInput.tokenIdentifier) {
|
|
1065
|
+
tokenIdentifierHashObj.update(
|
|
1066
|
+
tokenTransaction.tokenInputs.mintInput.tokenIdentifier,
|
|
1067
|
+
);
|
|
1068
|
+
} else {
|
|
1069
|
+
tokenIdentifierHashObj.update(new Uint8Array(32));
|
|
1070
|
+
}
|
|
1071
|
+
allHashes.push(tokenIdentifierHashObj.digest());
|
|
1072
|
+
}
|
|
1073
|
+
} else if (tokenTransaction.tokenInputs?.$case === "createInput") {
|
|
1074
|
+
const createInput = tokenTransaction.tokenInputs.createInput!;
|
|
1075
|
+
|
|
1076
|
+
// Hash issuer public key
|
|
1077
|
+
const issuerPubKeyHashObj = sha256.create();
|
|
1078
|
+
if (
|
|
1079
|
+
!createInput.issuerPublicKey ||
|
|
1080
|
+
createInput.issuerPublicKey.length === 0
|
|
1081
|
+
) {
|
|
1082
|
+
throw new ValidationError("issuer public key cannot be nil or empty", {
|
|
1083
|
+
field: "tokenInputs.createInput.issuerPublicKey",
|
|
1084
|
+
});
|
|
1085
|
+
}
|
|
1086
|
+
issuerPubKeyHashObj.update(createInput.issuerPublicKey);
|
|
1087
|
+
allHashes.push(issuerPubKeyHashObj.digest());
|
|
1088
|
+
|
|
1089
|
+
// Hash token name
|
|
1090
|
+
const tokenNameHashObj = sha256.create();
|
|
1091
|
+
if (!createInput.tokenName || createInput.tokenName.length === 0) {
|
|
1092
|
+
throw new ValidationError("token name cannot be empty", {
|
|
1093
|
+
field: "tokenInputs.createInput.tokenName",
|
|
1094
|
+
});
|
|
1095
|
+
}
|
|
1096
|
+
if (createInput.tokenName.length > 20) {
|
|
1097
|
+
throw new ValidationError("token name cannot be longer than 20 bytes", {
|
|
1098
|
+
field: "tokenInputs.createInput.tokenName",
|
|
1099
|
+
value: createInput.tokenName,
|
|
1100
|
+
expectedLength: 20,
|
|
1101
|
+
actualLength: createInput.tokenName.length,
|
|
1102
|
+
});
|
|
1103
|
+
}
|
|
1104
|
+
const tokenNameEncoder = new TextEncoder();
|
|
1105
|
+
tokenNameHashObj.update(tokenNameEncoder.encode(createInput.tokenName));
|
|
1106
|
+
allHashes.push(tokenNameHashObj.digest());
|
|
1107
|
+
|
|
1108
|
+
// Hash token ticker
|
|
1109
|
+
const tokenTickerHashObj = sha256.create();
|
|
1110
|
+
if (!createInput.tokenTicker || createInput.tokenTicker.length === 0) {
|
|
1111
|
+
throw new ValidationError("token ticker cannot be empty", {
|
|
1112
|
+
field: "tokenInputs.createInput.tokenTicker",
|
|
1113
|
+
});
|
|
1114
|
+
}
|
|
1115
|
+
if (createInput.tokenTicker.length > 6) {
|
|
1116
|
+
throw new ValidationError("token ticker cannot be longer than 6 bytes", {
|
|
1117
|
+
field: "tokenInputs.createInput.tokenTicker",
|
|
1118
|
+
value: createInput.tokenTicker,
|
|
1119
|
+
expectedLength: 6,
|
|
1120
|
+
actualLength: createInput.tokenTicker.length,
|
|
1121
|
+
});
|
|
1122
|
+
}
|
|
1123
|
+
const tokenTickerEncoder = new TextEncoder();
|
|
1124
|
+
tokenTickerHashObj.update(
|
|
1125
|
+
tokenTickerEncoder.encode(createInput.tokenTicker),
|
|
1126
|
+
);
|
|
1127
|
+
allHashes.push(tokenTickerHashObj.digest());
|
|
1128
|
+
|
|
1129
|
+
// Hash decimals
|
|
1130
|
+
const decimalsHashObj = sha256.create();
|
|
1131
|
+
const decimalsBytes = new Uint8Array(4);
|
|
1132
|
+
new DataView(decimalsBytes.buffer).setUint32(
|
|
1133
|
+
0,
|
|
1134
|
+
createInput.decimals,
|
|
1135
|
+
false,
|
|
1136
|
+
);
|
|
1137
|
+
decimalsHashObj.update(decimalsBytes);
|
|
1138
|
+
allHashes.push(decimalsHashObj.digest());
|
|
1139
|
+
|
|
1140
|
+
// Hash max supply (fixed 16 bytes)
|
|
1141
|
+
const maxSupplyHashObj = sha256.create();
|
|
1142
|
+
if (!createInput.maxSupply) {
|
|
1143
|
+
throw new ValidationError("max supply cannot be nil", {
|
|
1144
|
+
field: "tokenInputs.createInput.maxSupply",
|
|
1145
|
+
});
|
|
1146
|
+
}
|
|
1147
|
+
if (createInput.maxSupply.length !== 16) {
|
|
1148
|
+
throw new ValidationError("max supply must be exactly 16 bytes", {
|
|
1149
|
+
field: "tokenInputs.createInput.maxSupply",
|
|
1150
|
+
value: createInput.maxSupply,
|
|
1151
|
+
expectedLength: 16,
|
|
1152
|
+
actualLength: createInput.maxSupply.length,
|
|
1153
|
+
});
|
|
1154
|
+
}
|
|
1155
|
+
maxSupplyHashObj.update(createInput.maxSupply);
|
|
1156
|
+
allHashes.push(maxSupplyHashObj.digest());
|
|
1157
|
+
|
|
1158
|
+
// Hash is freezable
|
|
1159
|
+
const isFreezableHashObj = sha256.create();
|
|
1160
|
+
isFreezableHashObj.update(
|
|
1161
|
+
new Uint8Array([createInput.isFreezable ? 1 : 0]),
|
|
1162
|
+
);
|
|
1163
|
+
allHashes.push(isFreezableHashObj.digest());
|
|
1164
|
+
|
|
1165
|
+
// Hash creation entity public key (only for final hash)
|
|
1166
|
+
const creationEntityHashObj = sha256.create();
|
|
1167
|
+
if (!partialHash && createInput.creationEntityPublicKey) {
|
|
1168
|
+
creationEntityHashObj.update(createInput.creationEntityPublicKey);
|
|
1169
|
+
}
|
|
1170
|
+
allHashes.push(creationEntityHashObj.digest());
|
|
1171
|
+
}
|
|
1172
|
+
|
|
1173
|
+
// Hash token outputs (length + contents)
|
|
1174
|
+
if (!tokenTransaction.tokenOutputs) {
|
|
1175
|
+
throw new ValidationError("token outputs cannot be null", {
|
|
1176
|
+
field: "tokenOutputs",
|
|
1177
|
+
});
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
// Hash outputs length
|
|
1181
|
+
const outputsLenHashObj = sha256.create();
|
|
1182
|
+
const outputsLenBytes = new Uint8Array(4);
|
|
1183
|
+
new DataView(outputsLenBytes.buffer).setUint32(
|
|
1184
|
+
0,
|
|
1185
|
+
tokenTransaction.tokenOutputs.length,
|
|
1186
|
+
false,
|
|
1187
|
+
);
|
|
1188
|
+
outputsLenHashObj.update(outputsLenBytes);
|
|
1189
|
+
allHashes.push(outputsLenHashObj.digest());
|
|
1190
|
+
|
|
1191
|
+
for (const [i, output] of tokenTransaction.tokenOutputs.entries()) {
|
|
1192
|
+
if (!output) {
|
|
1193
|
+
throw new ValidationError(`output cannot be null at index ${i}`, {
|
|
1194
|
+
field: `tokenOutputs[${i}]`,
|
|
1195
|
+
index: i,
|
|
1196
|
+
});
|
|
1197
|
+
}
|
|
1198
|
+
|
|
1199
|
+
const hashObj = sha256.create();
|
|
1200
|
+
|
|
1201
|
+
// Only hash ID if it's not empty and not in partial hash mode
|
|
1202
|
+
if (output.id && !partialHash) {
|
|
1203
|
+
if (output.id.length === 0) {
|
|
1204
|
+
throw new ValidationError(`output ID at index ${i} cannot be empty`, {
|
|
1205
|
+
field: `tokenOutputs[${i}].id`,
|
|
1206
|
+
index: i,
|
|
1207
|
+
});
|
|
1208
|
+
}
|
|
1209
|
+
hashObj.update(new TextEncoder().encode(output.id));
|
|
1210
|
+
}
|
|
1211
|
+
if (output.ownerPublicKey) {
|
|
1212
|
+
if (output.ownerPublicKey.length === 0) {
|
|
1213
|
+
throw new ValidationError(
|
|
1214
|
+
`owner public key at index ${i} cannot be empty`,
|
|
1215
|
+
{
|
|
1216
|
+
field: `tokenOutputs[${i}].ownerPublicKey`,
|
|
1217
|
+
index: i,
|
|
1218
|
+
},
|
|
1219
|
+
);
|
|
1220
|
+
}
|
|
1221
|
+
hashObj.update(output.ownerPublicKey);
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
if (!partialHash) {
|
|
1225
|
+
const revPubKey = output.revocationCommitment!!;
|
|
1226
|
+
if (revPubKey) {
|
|
1227
|
+
if (revPubKey.length === 0) {
|
|
1228
|
+
throw new ValidationError(
|
|
1229
|
+
`revocation commitment at index ${i} cannot be empty`,
|
|
1230
|
+
{
|
|
1231
|
+
field: `tokenOutputs[${i}].revocationCommitment`,
|
|
1232
|
+
index: i,
|
|
1233
|
+
},
|
|
1234
|
+
);
|
|
1235
|
+
}
|
|
1236
|
+
hashObj.update(revPubKey);
|
|
1237
|
+
}
|
|
1238
|
+
|
|
1239
|
+
const bondBytes = new Uint8Array(8);
|
|
1240
|
+
new DataView(bondBytes.buffer).setBigUint64(
|
|
1241
|
+
0,
|
|
1242
|
+
BigInt(output.withdrawBondSats!),
|
|
1243
|
+
false,
|
|
1244
|
+
);
|
|
1245
|
+
hashObj.update(bondBytes);
|
|
1246
|
+
|
|
1247
|
+
const locktimeBytes = new Uint8Array(8);
|
|
1248
|
+
new DataView(locktimeBytes.buffer).setBigUint64(
|
|
1249
|
+
0,
|
|
1250
|
+
BigInt(output.withdrawRelativeBlockLocktime!),
|
|
1251
|
+
false,
|
|
1252
|
+
);
|
|
1253
|
+
hashObj.update(locktimeBytes);
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
// Hash token public key (33 bytes if present, otherwise 33 zero bytes)
|
|
1257
|
+
if (!output.tokenPublicKey || output.tokenPublicKey.length === 0) {
|
|
1258
|
+
hashObj.update(new Uint8Array(33));
|
|
1259
|
+
} else {
|
|
1260
|
+
hashObj.update(output.tokenPublicKey);
|
|
1261
|
+
}
|
|
1262
|
+
|
|
1263
|
+
// Hash token identifier (32 bytes if present, otherwise 32 zero bytes)
|
|
1264
|
+
if (!output.tokenIdentifier || output.tokenIdentifier.length === 0) {
|
|
1265
|
+
hashObj.update(new Uint8Array(32));
|
|
1266
|
+
} else {
|
|
1267
|
+
hashObj.update(output.tokenIdentifier);
|
|
1268
|
+
}
|
|
1269
|
+
|
|
1270
|
+
if (output.tokenAmount) {
|
|
1271
|
+
if (output.tokenAmount.length === 0) {
|
|
1272
|
+
throw new ValidationError(
|
|
1273
|
+
`token amount at index ${i} cannot be empty`,
|
|
1274
|
+
{
|
|
1275
|
+
field: `tokenOutputs[${i}].tokenAmount`,
|
|
1276
|
+
index: i,
|
|
1277
|
+
},
|
|
1278
|
+
);
|
|
1279
|
+
}
|
|
1280
|
+
if (output.tokenAmount.length > 16) {
|
|
1281
|
+
throw new ValidationError(
|
|
1282
|
+
`token amount at index ${i} exceeds maximum length`,
|
|
1283
|
+
{
|
|
1284
|
+
field: `tokenOutputs[${i}].tokenAmount`,
|
|
1285
|
+
value: output.tokenAmount,
|
|
1286
|
+
expectedLength: 16,
|
|
1287
|
+
actualLength: output.tokenAmount.length,
|
|
1288
|
+
index: i,
|
|
1289
|
+
},
|
|
1290
|
+
);
|
|
1291
|
+
}
|
|
1292
|
+
hashObj.update(output.tokenAmount);
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1295
|
+
allHashes.push(hashObj.digest());
|
|
1296
|
+
}
|
|
1297
|
+
|
|
1298
|
+
if (!tokenTransaction.sparkOperatorIdentityPublicKeys) {
|
|
1299
|
+
throw new ValidationError(
|
|
1300
|
+
"spark operator identity public keys cannot be null",
|
|
1301
|
+
{},
|
|
1302
|
+
);
|
|
1303
|
+
}
|
|
1304
|
+
|
|
1305
|
+
// Sort operator public keys before hashing
|
|
1306
|
+
const sortedPubKeys = [
|
|
1307
|
+
...(tokenTransaction.sparkOperatorIdentityPublicKeys || []),
|
|
1308
|
+
].sort((a, b) => {
|
|
1309
|
+
for (let i = 0; i < a.length && i < b.length; i++) {
|
|
1310
|
+
// @ts-ignore - i < a and b length
|
|
1311
|
+
if (a[i] !== b[i]) return a[i] - b[i];
|
|
1312
|
+
}
|
|
1313
|
+
return a.length - b.length;
|
|
1314
|
+
});
|
|
1315
|
+
|
|
1316
|
+
// Hash spark operator identity public keys length
|
|
1317
|
+
const operatorLenHashObj = sha256.create();
|
|
1318
|
+
const operatorLenBytes = new Uint8Array(4);
|
|
1319
|
+
new DataView(operatorLenBytes.buffer).setUint32(
|
|
1320
|
+
0,
|
|
1321
|
+
sortedPubKeys.length,
|
|
1322
|
+
false,
|
|
1323
|
+
);
|
|
1324
|
+
operatorLenHashObj.update(operatorLenBytes);
|
|
1325
|
+
allHashes.push(operatorLenHashObj.digest());
|
|
1326
|
+
|
|
1327
|
+
// Hash spark operator identity public keys
|
|
1328
|
+
for (const [i, pubKey] of sortedPubKeys.entries()) {
|
|
1329
|
+
if (!pubKey) {
|
|
1330
|
+
throw new ValidationError(
|
|
1331
|
+
`operator public key at index ${i} cannot be null`,
|
|
1332
|
+
{
|
|
1333
|
+
field: `sparkOperatorIdentityPublicKeys[${i}]`,
|
|
1334
|
+
index: i,
|
|
1335
|
+
},
|
|
1336
|
+
);
|
|
1337
|
+
}
|
|
1338
|
+
if (pubKey.length === 0) {
|
|
1339
|
+
throw new ValidationError(
|
|
1340
|
+
`operator public key at index ${i} cannot be empty`,
|
|
1341
|
+
{
|
|
1342
|
+
field: `sparkOperatorIdentityPublicKeys[${i}]`,
|
|
1343
|
+
index: i,
|
|
1344
|
+
},
|
|
1345
|
+
);
|
|
1346
|
+
}
|
|
1347
|
+
const hashObj = sha256.create();
|
|
1348
|
+
hashObj.update(pubKey);
|
|
1349
|
+
allHashes.push(hashObj.digest());
|
|
1350
|
+
}
|
|
1351
|
+
|
|
1352
|
+
// Hash the network field
|
|
1353
|
+
const hashObj = sha256.create();
|
|
1354
|
+
let networkBytes = new Uint8Array(4);
|
|
1355
|
+
new DataView(networkBytes.buffer).setUint32(
|
|
1356
|
+
0,
|
|
1357
|
+
tokenTransaction.network.valueOf(),
|
|
1358
|
+
false, // false for big-endian
|
|
1359
|
+
);
|
|
1360
|
+
hashObj.update(networkBytes);
|
|
1361
|
+
allHashes.push(hashObj.digest());
|
|
1362
|
+
|
|
1363
|
+
// Hash client created timestamp
|
|
1364
|
+
const clientTimestampHashObj = sha256.create();
|
|
1365
|
+
const clientCreatedTs: Date | undefined = (tokenTransaction as any)
|
|
1366
|
+
.clientCreatedTimestamp;
|
|
1367
|
+
if (!clientCreatedTs) {
|
|
1368
|
+
throw new ValidationError(
|
|
1369
|
+
"client created timestamp cannot be null for V1 token transactions",
|
|
1370
|
+
{
|
|
1371
|
+
field: "clientCreatedTimestamp",
|
|
1372
|
+
},
|
|
1373
|
+
);
|
|
1374
|
+
}
|
|
1375
|
+
const clientUnixTime = clientCreatedTs.getTime();
|
|
1376
|
+
const clientTimestampBytes = new Uint8Array(8);
|
|
1377
|
+
new DataView(clientTimestampBytes.buffer).setBigUint64(
|
|
1378
|
+
0,
|
|
1379
|
+
BigInt(clientUnixTime),
|
|
1380
|
+
false,
|
|
1381
|
+
);
|
|
1382
|
+
clientTimestampHashObj.update(clientTimestampBytes);
|
|
1383
|
+
allHashes.push(clientTimestampHashObj.digest());
|
|
1384
|
+
|
|
1385
|
+
if (!partialHash) {
|
|
1386
|
+
// Hash expiry time
|
|
1387
|
+
const expiryHashObj = sha256.create();
|
|
1388
|
+
const expiryTimeBytes = new Uint8Array(8);
|
|
1389
|
+
const expiryUnixTime = tokenTransaction.expiryTime
|
|
1390
|
+
? Math.floor(tokenTransaction.expiryTime.getTime() / 1000)
|
|
1391
|
+
: 0;
|
|
1392
|
+
new DataView(expiryTimeBytes.buffer).setBigUint64(
|
|
1393
|
+
0,
|
|
1394
|
+
BigInt(expiryUnixTime),
|
|
1395
|
+
false, // false for big-endian
|
|
1396
|
+
);
|
|
1397
|
+
expiryHashObj.update(expiryTimeBytes);
|
|
1398
|
+
allHashes.push(expiryHashObj.digest());
|
|
1399
|
+
}
|
|
1400
|
+
|
|
1401
|
+
// Hash invoice attachments
|
|
1402
|
+
const attachments = tokenTransaction.invoiceAttachments;
|
|
1403
|
+
|
|
1404
|
+
// Hash attachments length (uint32 BE)
|
|
1405
|
+
const lenHash = sha256.create();
|
|
1406
|
+
const lenBytes = new Uint8Array(4);
|
|
1407
|
+
new DataView(lenBytes.buffer).setUint32(
|
|
1408
|
+
0,
|
|
1409
|
+
attachments ? attachments.length : 0,
|
|
1410
|
+
false,
|
|
1411
|
+
);
|
|
1412
|
+
lenHash.update(lenBytes);
|
|
1413
|
+
allHashes.push(lenHash.digest());
|
|
1414
|
+
|
|
1415
|
+
type Keyed = { id: Uint8Array; raw: string };
|
|
1416
|
+
const sortedInvoices: Keyed[] = [];
|
|
1417
|
+
|
|
1418
|
+
if (attachments) {
|
|
1419
|
+
for (let i = 0; i < attachments.length; i++) {
|
|
1420
|
+
const attachment = attachments[i];
|
|
1421
|
+
if (!attachment) {
|
|
1422
|
+
throw new ValidationError(
|
|
1423
|
+
`invoice attachment at index ${i} cannot be null`,
|
|
1424
|
+
{
|
|
1425
|
+
field: `invoiceAttachments[${i}]`,
|
|
1426
|
+
index: i,
|
|
1427
|
+
},
|
|
1428
|
+
);
|
|
1429
|
+
}
|
|
1430
|
+
const invoice = attachment.sparkInvoice;
|
|
1431
|
+
|
|
1432
|
+
let idBytes: Uint8Array | undefined;
|
|
1433
|
+
try {
|
|
1434
|
+
const decoded = bech32m.decode(invoice as any, 500);
|
|
1435
|
+
const payload = SparkAddress.decode(bech32m.fromWords(decoded.words));
|
|
1436
|
+
if (!payload.sparkInvoiceFields || !payload.sparkInvoiceFields.id) {
|
|
1437
|
+
throw new Error("missing spark invoice fields or id");
|
|
1438
|
+
}
|
|
1439
|
+
idBytes = payload.sparkInvoiceFields.id;
|
|
1440
|
+
} catch (err) {
|
|
1441
|
+
throw new ValidationError(
|
|
1442
|
+
`invalid invoice at ${i}`,
|
|
1443
|
+
{
|
|
1444
|
+
field: `invoiceAttachments[${i}].sparkInvoice`,
|
|
1445
|
+
index: i,
|
|
1446
|
+
value: invoice,
|
|
1447
|
+
},
|
|
1448
|
+
err as Error,
|
|
1449
|
+
);
|
|
1450
|
+
}
|
|
1451
|
+
if (!idBytes || idBytes.length !== 16) {
|
|
1452
|
+
throw new ValidationError(`invalid invoice id at ${i}`, {
|
|
1453
|
+
field: `invoiceAttachments[${i}].sparkInvoice`,
|
|
1454
|
+
index: i,
|
|
1455
|
+
});
|
|
1456
|
+
}
|
|
1457
|
+
sortedInvoices.push({ id: idBytes, raw: invoice });
|
|
1458
|
+
}
|
|
1459
|
+
}
|
|
1460
|
+
|
|
1461
|
+
// Sort by UUID bytes (lexicographically)
|
|
1462
|
+
sortedInvoices.sort((a, b) => {
|
|
1463
|
+
for (let j = 0; j < a.id.length && j < b.id.length; j++) {
|
|
1464
|
+
const av = a.id[j] as number;
|
|
1465
|
+
const bv = b.id[j] as number;
|
|
1466
|
+
if (av !== bv) return av - bv;
|
|
1467
|
+
}
|
|
1468
|
+
return a.id.length - b.id.length;
|
|
1469
|
+
});
|
|
1470
|
+
|
|
1471
|
+
// Hash raw invoice strings (UTF-8)
|
|
1472
|
+
const encoder = new TextEncoder();
|
|
1473
|
+
for (const k of sortedInvoices) {
|
|
1474
|
+
const h = sha256.create();
|
|
1475
|
+
h.update(encoder.encode(k.raw));
|
|
1476
|
+
allHashes.push(h.digest());
|
|
1477
|
+
}
|
|
1478
|
+
|
|
1479
|
+
// Final hash of all concatenated hashes
|
|
1480
|
+
const finalHashObj = sha256.create();
|
|
1481
|
+
const concatenatedHashes = new Uint8Array(
|
|
1482
|
+
allHashes.reduce((sum, hash) => sum + hash.length, 0),
|
|
1483
|
+
);
|
|
1484
|
+
let offset = 0;
|
|
1485
|
+
for (const hash of allHashes) {
|
|
1486
|
+
concatenatedHashes.set(hash, offset);
|
|
1487
|
+
offset += hash.length;
|
|
1488
|
+
}
|
|
1489
|
+
finalHashObj.update(concatenatedHashes);
|
|
1490
|
+
return finalHashObj.digest();
|
|
1491
|
+
}
|
|
1492
|
+
|
|
927
1493
|
export function hashOperatorSpecificTokenTransactionSignablePayload(
|
|
928
1494
|
payload: OperatorSpecificTokenTransactionSignablePayload,
|
|
929
1495
|
): Uint8Array {
|