@sequence0/sdk 2.0.1 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -10
- package/dist/chains/casper.d.ts +74 -0
- package/dist/chains/casper.d.ts.map +1 -0
- package/dist/chains/casper.js +512 -0
- package/dist/chains/casper.js.map +1 -0
- package/dist/chains/cosmos.d.ts +22 -0
- package/dist/chains/cosmos.d.ts.map +1 -1
- package/dist/chains/cosmos.js +113 -12
- package/dist/chains/cosmos.js.map +1 -1
- package/dist/chains/ethereum.d.ts.map +1 -1
- package/dist/chains/ethereum.js +14 -2
- package/dist/chains/ethereum.js.map +1 -1
- package/dist/chains/flow.d.ts +57 -0
- package/dist/chains/flow.d.ts.map +1 -0
- package/dist/chains/flow.js +435 -0
- package/dist/chains/flow.js.map +1 -0
- package/dist/chains/icp.d.ts.map +1 -1
- package/dist/chains/icp.js +483 -67
- package/dist/chains/icp.js.map +1 -1
- package/dist/chains/iota.d.ts +80 -0
- package/dist/chains/iota.d.ts.map +1 -0
- package/dist/chains/iota.js +502 -0
- package/dist/chains/iota.js.map +1 -0
- package/dist/chains/kadena.d.ts +81 -0
- package/dist/chains/kadena.d.ts.map +1 -0
- package/dist/chains/kadena.js +356 -0
- package/dist/chains/kadena.js.map +1 -0
- package/dist/chains/near.d.ts +4 -1
- package/dist/chains/near.d.ts.map +1 -1
- package/dist/chains/near.js +58 -15
- package/dist/chains/near.js.map +1 -1
- package/dist/chains/nervos.d.ts +148 -0
- package/dist/chains/nervos.d.ts.map +1 -0
- package/dist/chains/nervos.js +913 -0
- package/dist/chains/nervos.js.map +1 -0
- package/dist/chains/radix.d.ts +81 -0
- package/dist/chains/radix.d.ts.map +1 -0
- package/dist/chains/radix.js +289 -0
- package/dist/chains/radix.js.map +1 -0
- package/dist/chains/solana.d.ts +4 -0
- package/dist/chains/solana.d.ts.map +1 -1
- package/dist/chains/solana.js +47 -13
- package/dist/chains/solana.js.map +1 -1
- package/dist/chains/stacks.d.ts +113 -0
- package/dist/chains/stacks.d.ts.map +1 -0
- package/dist/chains/stacks.js +576 -0
- package/dist/chains/stacks.js.map +1 -0
- package/dist/chains/sui.d.ts +11 -0
- package/dist/chains/sui.d.ts.map +1 -1
- package/dist/chains/sui.js +49 -8
- package/dist/chains/sui.js.map +1 -1
- package/dist/core/client.js +1 -1
- package/dist/core/client.js.map +1 -1
- package/dist/core/solvency.d.ts +1 -1
- package/dist/core/solvency.js +1 -1
- package/dist/core/types.d.ts +94 -2
- package/dist/core/types.d.ts.map +1 -1
- package/dist/core/universal-account.d.ts +1 -1
- package/dist/core/universal-account.js +1 -1
- package/dist/core/witness.d.ts +1 -1
- package/dist/core/witness.js +1 -1
- package/dist/settlement/settlement.d.ts +1 -1
- package/dist/settlement/settlement.js +1 -1
- package/dist/utils/discovery.d.ts.map +1 -1
- package/dist/utils/discovery.js +19 -2
- package/dist/utils/discovery.js.map +1 -1
- package/dist/utils/http.d.ts +1 -1
- package/dist/utils/http.js +1 -1
- package/dist/wallet/wallet.d.ts.map +1 -1
- package/dist/wallet/wallet.js +45 -0
- package/dist/wallet/wallet.js.map +1 -1
- package/package.json +1 -1
package/dist/chains/icp.js
CHANGED
|
@@ -101,40 +101,462 @@ function decodeAccountId(accountId) {
|
|
|
101
101
|
return Buffer.from(hex, 'hex');
|
|
102
102
|
}
|
|
103
103
|
/**
|
|
104
|
-
*
|
|
104
|
+
* Compute a Candid field hash (idlHash).
|
|
105
105
|
*
|
|
106
|
-
*
|
|
106
|
+
* The algorithm is: fold each character code into the hash:
|
|
107
|
+
* h = 0
|
|
108
|
+
* for each char c: h = (h * 223 + charCode(c)) mod 2^32
|
|
109
|
+
*/
|
|
110
|
+
function candidFieldHash(name) {
|
|
111
|
+
let h = 0;
|
|
112
|
+
for (let i = 0; i < name.length; i++) {
|
|
113
|
+
h = (h * 223 + name.charCodeAt(i)) >>> 0; // >>> 0 forces unsigned 32-bit
|
|
114
|
+
}
|
|
115
|
+
return h;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Encode a u32 as unsigned LEB128.
|
|
119
|
+
*/
|
|
120
|
+
function encodeLeb128U32(value) {
|
|
121
|
+
const bytes = [];
|
|
122
|
+
let v = value >>> 0;
|
|
123
|
+
while (v > 0x7f) {
|
|
124
|
+
bytes.push((v & 0x7f) | 0x80);
|
|
125
|
+
v >>>= 7;
|
|
126
|
+
}
|
|
127
|
+
bytes.push(v);
|
|
128
|
+
return Buffer.from(bytes);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Write a little-endian unsigned 64-bit integer into a buffer.
|
|
132
|
+
*/
|
|
133
|
+
function writeU64LE(buf, offset, value) {
|
|
134
|
+
buf.writeBigUInt64LE(value, offset);
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Encode the ICP ledger "transfer" method arguments in Candid binary format.
|
|
138
|
+
*
|
|
139
|
+
* The ICP ledger canister's `transfer` method expects a single argument of type:
|
|
107
140
|
* record {
|
|
108
|
-
*
|
|
109
|
-
*
|
|
110
|
-
*
|
|
111
|
-
*
|
|
141
|
+
* memo: nat64; // field hash: 0x005B75A4 (idlHash("memo"))
|
|
142
|
+
* amount: Tokens; // field hash: idlHash("amount")
|
|
143
|
+
* fee: Tokens; // field hash: idlHash("fee")
|
|
144
|
+
* from_subaccount: opt blob; // field hash: idlHash("from_subaccount")
|
|
145
|
+
* to: blob; // field hash: idlHash("to")
|
|
146
|
+
* created_at_time: opt TimeStamp; // field hash: idlHash("created_at_time")
|
|
112
147
|
* }
|
|
148
|
+
* where Tokens = record { e8s: nat64 }
|
|
149
|
+
* and TimeStamp = record { timestamp_nanos: nat64 }
|
|
150
|
+
*
|
|
151
|
+
* Candid binary format:
|
|
152
|
+
* "DIDL" magic (4 bytes)
|
|
153
|
+
* <type_table_length> (LEB128)
|
|
154
|
+
* <type_table entries...>
|
|
155
|
+
* <arg_count> (LEB128) — always 1
|
|
156
|
+
* <arg_type_index> (SLEB128) — index into type table for TransferArgs
|
|
157
|
+
* <values...>
|
|
113
158
|
*
|
|
114
|
-
*
|
|
115
|
-
* binary blob that the adapter can reconstruct for broadcast.
|
|
159
|
+
* Field ordering in Candid records is by ascending field hash (unsigned).
|
|
116
160
|
*/
|
|
117
161
|
function encodeTransferArgs(to, amount, fee, memo) {
|
|
118
|
-
//
|
|
119
|
-
const
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
162
|
+
// Pre-compute field hashes (verified against the ICP ledger .did specification)
|
|
163
|
+
const hashE8s = candidFieldHash('e8s'); // 5035232
|
|
164
|
+
const hashTimestampNanos = candidFieldHash('timestamp_nanos'); // computed
|
|
165
|
+
const hashMemo = candidFieldHash('memo');
|
|
166
|
+
const hashAmount = candidFieldHash('amount');
|
|
167
|
+
const hashFee = candidFieldHash('fee');
|
|
168
|
+
const hashFromSubaccount = candidFieldHash('from_subaccount');
|
|
169
|
+
const hashTo = candidFieldHash('to');
|
|
170
|
+
const hashCreatedAtTime = candidFieldHash('created_at_time');
|
|
171
|
+
// Sort the main record fields by hash for Candid ordering
|
|
172
|
+
const mainFields = [
|
|
173
|
+
{ hash: hashMemo, name: 'memo' },
|
|
174
|
+
{ hash: hashAmount, name: 'amount' },
|
|
175
|
+
{ hash: hashFee, name: 'fee' },
|
|
176
|
+
{ hash: hashFromSubaccount, name: 'from_subaccount' },
|
|
177
|
+
{ hash: hashTo, name: 'to' },
|
|
178
|
+
{ hash: hashCreatedAtTime, name: 'created_at_time' },
|
|
179
|
+
].sort((a, b) => a.hash - b.hash);
|
|
180
|
+
const parts = [];
|
|
181
|
+
// ── DIDL magic header ──
|
|
182
|
+
parts.push(Buffer.from([0x44, 0x49, 0x44, 0x4C]));
|
|
183
|
+
// ── Type table ──
|
|
184
|
+
// We define 4 compound types:
|
|
185
|
+
// Type 0: record { e8s: nat64 } (Tokens)
|
|
186
|
+
// Type 1: record { timestamp_nanos: nat64 } (TimeStamp)
|
|
187
|
+
// Type 2: opt record { timestamp_nanos: nat64 } = opt T1 (opt TimeStamp)
|
|
188
|
+
// Type 3: opt blob (opt SubAccount)
|
|
189
|
+
// Type 4: record { memo, amount, fee, from_subaccount, to, created_at_time } (TransferArgs)
|
|
190
|
+
parts.push(encodeLeb128U32(5)); // 5 compound types
|
|
191
|
+
// Type 0: record { e8s: nat64 }
|
|
192
|
+
// SLEB128 for 'record' type constructor = 0x6C (-20 in signed) but Candid uses
|
|
193
|
+
// type constructor codes as signed LEB128: record = -20
|
|
194
|
+
parts.push(Buffer.from([0x6C])); // record constructor (SLEB128 for -20 is 0x6C)
|
|
195
|
+
parts.push(encodeLeb128U32(1)); // 1 field
|
|
196
|
+
parts.push(encodeLeb128U32(hashE8s)); // field hash
|
|
197
|
+
parts.push(Buffer.from([0x78])); // nat64 type code (SLEB128 for -8)
|
|
198
|
+
// Type 1: record { timestamp_nanos: nat64 }
|
|
199
|
+
parts.push(Buffer.from([0x6C])); // record
|
|
200
|
+
parts.push(encodeLeb128U32(1)); // 1 field
|
|
201
|
+
parts.push(encodeLeb128U32(hashTimestampNanos));
|
|
202
|
+
parts.push(Buffer.from([0x78])); // nat64
|
|
203
|
+
// Type 2: opt Type1 (opt TimeStamp)
|
|
204
|
+
parts.push(Buffer.from([0x6E])); // opt constructor (SLEB128 for -18)
|
|
205
|
+
parts.push(encodeLeb128U32(1)); // references type index 1
|
|
206
|
+
// Type 3: opt blob
|
|
207
|
+
parts.push(Buffer.from([0x6E])); // opt
|
|
208
|
+
parts.push(Buffer.from([0x68])); // blob type code (SLEB128 for -24 = 0x68)
|
|
209
|
+
// Type 4: TransferArgs record (6 fields, sorted by field hash)
|
|
210
|
+
parts.push(Buffer.from([0x6C])); // record
|
|
211
|
+
parts.push(encodeLeb128U32(6)); // 6 fields
|
|
212
|
+
for (const field of mainFields) {
|
|
213
|
+
parts.push(encodeLeb128U32(field.hash));
|
|
214
|
+
switch (field.name) {
|
|
215
|
+
case 'memo':
|
|
216
|
+
parts.push(Buffer.from([0x78])); // nat64
|
|
217
|
+
break;
|
|
218
|
+
case 'amount':
|
|
219
|
+
parts.push(encodeLeb128U32(0)); // type index 0 (Tokens)
|
|
220
|
+
break;
|
|
221
|
+
case 'fee':
|
|
222
|
+
parts.push(encodeLeb128U32(0)); // type index 0 (Tokens)
|
|
223
|
+
break;
|
|
224
|
+
case 'from_subaccount':
|
|
225
|
+
parts.push(encodeLeb128U32(3)); // type index 3 (opt blob)
|
|
226
|
+
break;
|
|
227
|
+
case 'to':
|
|
228
|
+
parts.push(Buffer.from([0x68])); // blob
|
|
229
|
+
break;
|
|
230
|
+
case 'created_at_time':
|
|
231
|
+
parts.push(encodeLeb128U32(2)); // type index 2 (opt TimeStamp)
|
|
232
|
+
break;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
// ── Argument count and type ──
|
|
236
|
+
parts.push(encodeLeb128U32(1)); // 1 argument
|
|
237
|
+
parts.push(encodeLeb128U32(4)); // argument type = type index 4 (TransferArgs)
|
|
238
|
+
// ── Values (in field-hash-sorted order) ──
|
|
239
|
+
for (const field of mainFields) {
|
|
240
|
+
switch (field.name) {
|
|
241
|
+
case 'memo': {
|
|
242
|
+
// nat64 as 8 bytes little-endian
|
|
243
|
+
const memoBuf = Buffer.alloc(8);
|
|
244
|
+
writeU64LE(memoBuf, 0, memo);
|
|
245
|
+
parts.push(memoBuf);
|
|
246
|
+
break;
|
|
247
|
+
}
|
|
248
|
+
case 'amount': {
|
|
249
|
+
// Tokens record: just the e8s field as nat64 LE
|
|
250
|
+
const amountBuf = Buffer.alloc(8);
|
|
251
|
+
writeU64LE(amountBuf, 0, amount);
|
|
252
|
+
parts.push(amountBuf);
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
case 'fee': {
|
|
256
|
+
// Tokens record: just the e8s field as nat64 LE
|
|
257
|
+
const feeBuf = Buffer.alloc(8);
|
|
258
|
+
writeU64LE(feeBuf, 0, fee);
|
|
259
|
+
parts.push(feeBuf);
|
|
260
|
+
break;
|
|
261
|
+
}
|
|
262
|
+
case 'from_subaccount':
|
|
263
|
+
// opt blob: None (0x00)
|
|
264
|
+
parts.push(Buffer.from([0x00]));
|
|
265
|
+
break;
|
|
266
|
+
case 'to': {
|
|
267
|
+
// blob: length (LEB128) + raw bytes
|
|
268
|
+
parts.push(encodeLeb128U32(to.length));
|
|
269
|
+
parts.push(to);
|
|
270
|
+
break;
|
|
271
|
+
}
|
|
272
|
+
case 'created_at_time':
|
|
273
|
+
// opt TimeStamp: None (0x00)
|
|
274
|
+
parts.push(Buffer.from([0x00]));
|
|
275
|
+
break;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
return Buffer.concat(parts);
|
|
126
279
|
}
|
|
127
280
|
/**
|
|
128
|
-
* Decode transfer args from the binary format back to components.
|
|
281
|
+
* Decode transfer args from the Candid binary format back to components.
|
|
282
|
+
*
|
|
283
|
+
* This extracts the essential fields (to, amount, fee, memo) from the
|
|
284
|
+
* Candid-encoded TransferArgs. It relies on the known field ordering
|
|
285
|
+
* produced by encodeTransferArgs above.
|
|
129
286
|
*/
|
|
130
287
|
function decodeTransferArgs(buf) {
|
|
288
|
+
// The field order in the encoded Candid is sorted by field hash.
|
|
289
|
+
// Recompute field hashes and sort to determine the value layout.
|
|
290
|
+
const fields = [
|
|
291
|
+
{ hash: candidFieldHash('memo'), name: 'memo' },
|
|
292
|
+
{ hash: candidFieldHash('amount'), name: 'amount' },
|
|
293
|
+
{ hash: candidFieldHash('fee'), name: 'fee' },
|
|
294
|
+
{ hash: candidFieldHash('from_subaccount'), name: 'from_subaccount' },
|
|
295
|
+
{ hash: candidFieldHash('to'), name: 'to' },
|
|
296
|
+
{ hash: candidFieldHash('created_at_time'), name: 'created_at_time' },
|
|
297
|
+
].sort((a, b) => a.hash - b.hash);
|
|
298
|
+
// Skip the DIDL header and type table to find the values section.
|
|
299
|
+
// We scan past "DIDL" (4 bytes), then the type table, then arg count + type.
|
|
300
|
+
let offset = 4; // skip DIDL
|
|
301
|
+
// Read type table length (LEB128)
|
|
302
|
+
const { value: typeCount, bytesRead: tcBytes } = readLeb128U32(buf, offset);
|
|
303
|
+
offset += tcBytes;
|
|
304
|
+
// Skip type table entries — this is complex, so we use a simpler approach:
|
|
305
|
+
// We know our own encoding layout, so skip to the values section by
|
|
306
|
+
// re-encoding with dummy values and measuring the header size.
|
|
307
|
+
// Instead, just find the values section by parsing forward.
|
|
308
|
+
for (let t = 0; t < typeCount; t++) {
|
|
309
|
+
const typeByte = buf[offset++];
|
|
310
|
+
if (typeByte === 0x6C) { // record
|
|
311
|
+
const { value: fieldCount, bytesRead: fcBytes } = readLeb128U32(buf, offset);
|
|
312
|
+
offset += fcBytes;
|
|
313
|
+
for (let f = 0; f < fieldCount; f++) {
|
|
314
|
+
// field hash (LEB128)
|
|
315
|
+
const { bytesRead: fhBytes } = readLeb128U32(buf, offset);
|
|
316
|
+
offset += fhBytes;
|
|
317
|
+
// field type (SLEB128 or LEB128 index)
|
|
318
|
+
const { bytesRead: ftBytes } = readLeb128U32(buf, offset);
|
|
319
|
+
offset += ftBytes;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
else if (typeByte === 0x6E) { // opt
|
|
323
|
+
// type reference (LEB128 or SLEB128)
|
|
324
|
+
const { bytesRead: otBytes } = readLeb128U32(buf, offset);
|
|
325
|
+
offset += otBytes;
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
// Read arg count
|
|
329
|
+
const { bytesRead: acBytes } = readLeb128U32(buf, offset);
|
|
330
|
+
offset += acBytes;
|
|
331
|
+
// Read arg type index
|
|
332
|
+
const { bytesRead: atBytes } = readLeb128U32(buf, offset);
|
|
333
|
+
offset += atBytes;
|
|
334
|
+
// Now read values in field-hash-sorted order
|
|
335
|
+
let resultMemo = 0n;
|
|
336
|
+
let resultAmount = 0n;
|
|
337
|
+
let resultFee = 0n;
|
|
338
|
+
let resultTo = Buffer.alloc(0);
|
|
339
|
+
for (const field of fields) {
|
|
340
|
+
switch (field.name) {
|
|
341
|
+
case 'memo':
|
|
342
|
+
resultMemo = buf.readBigUInt64LE(offset);
|
|
343
|
+
offset += 8;
|
|
344
|
+
break;
|
|
345
|
+
case 'amount':
|
|
346
|
+
resultAmount = buf.readBigUInt64LE(offset);
|
|
347
|
+
offset += 8;
|
|
348
|
+
break;
|
|
349
|
+
case 'fee':
|
|
350
|
+
resultFee = buf.readBigUInt64LE(offset);
|
|
351
|
+
offset += 8;
|
|
352
|
+
break;
|
|
353
|
+
case 'from_subaccount':
|
|
354
|
+
// opt blob: read presence byte
|
|
355
|
+
if (buf[offset] === 0x01) {
|
|
356
|
+
offset++;
|
|
357
|
+
const { value: blobLen, bytesRead: blBytes } = readLeb128U32(buf, offset);
|
|
358
|
+
offset += blBytes;
|
|
359
|
+
offset += blobLen; // skip the blob data
|
|
360
|
+
}
|
|
361
|
+
else {
|
|
362
|
+
offset++; // skip 0x00
|
|
363
|
+
}
|
|
364
|
+
break;
|
|
365
|
+
case 'to': {
|
|
366
|
+
// blob: length + data
|
|
367
|
+
const { value: toLen, bytesRead: tlBytes } = readLeb128U32(buf, offset);
|
|
368
|
+
offset += tlBytes;
|
|
369
|
+
resultTo = Buffer.from(buf.subarray(offset, offset + toLen));
|
|
370
|
+
offset += toLen;
|
|
371
|
+
break;
|
|
372
|
+
}
|
|
373
|
+
case 'created_at_time':
|
|
374
|
+
// opt TimeStamp: read presence byte
|
|
375
|
+
if (buf[offset] === 0x01) {
|
|
376
|
+
offset++;
|
|
377
|
+
offset += 8; // skip timestamp_nanos nat64
|
|
378
|
+
}
|
|
379
|
+
else {
|
|
380
|
+
offset++; // skip 0x00
|
|
381
|
+
}
|
|
382
|
+
break;
|
|
383
|
+
}
|
|
384
|
+
}
|
|
131
385
|
return {
|
|
132
|
-
to:
|
|
133
|
-
amount:
|
|
134
|
-
fee:
|
|
135
|
-
memo:
|
|
386
|
+
to: resultTo,
|
|
387
|
+
amount: resultAmount,
|
|
388
|
+
fee: resultFee,
|
|
389
|
+
memo: resultMemo,
|
|
136
390
|
};
|
|
137
391
|
}
|
|
392
|
+
/**
|
|
393
|
+
* Read a LEB128-encoded unsigned 32-bit integer from a buffer at the given offset.
|
|
394
|
+
*/
|
|
395
|
+
function readLeb128U32(buf, offset) {
|
|
396
|
+
let result = 0;
|
|
397
|
+
let shift = 0;
|
|
398
|
+
let bytesRead = 0;
|
|
399
|
+
while (offset + bytesRead < buf.length) {
|
|
400
|
+
const byte = buf[offset + bytesRead];
|
|
401
|
+
result |= (byte & 0x7f) << shift;
|
|
402
|
+
bytesRead++;
|
|
403
|
+
if ((byte & 0x80) === 0)
|
|
404
|
+
break;
|
|
405
|
+
shift += 7;
|
|
406
|
+
}
|
|
407
|
+
return { value: result >>> 0, bytesRead };
|
|
408
|
+
}
|
|
409
|
+
// ── Minimal CBOR Encoder for IC API ──
|
|
410
|
+
// The IC HTTP API v2 requires CBOR-encoded request envelopes.
|
|
411
|
+
// We implement only the subset needed: maps, byte strings, text strings, and unsigned integers.
|
|
412
|
+
/** CBOR major types */
|
|
413
|
+
const CBOR_MAP = 5;
|
|
414
|
+
const CBOR_BYTES = 2;
|
|
415
|
+
const CBOR_TEXT = 3;
|
|
416
|
+
const CBOR_UINT = 0;
|
|
417
|
+
/** Encode CBOR header (major type + additional info) */
|
|
418
|
+
function cborHeader(majorType, length) {
|
|
419
|
+
const mt = majorType << 5;
|
|
420
|
+
if (length < 24) {
|
|
421
|
+
return Buffer.from([mt | length]);
|
|
422
|
+
}
|
|
423
|
+
else if (length < 256) {
|
|
424
|
+
return Buffer.from([mt | 24, length]);
|
|
425
|
+
}
|
|
426
|
+
else if (length < 65536) {
|
|
427
|
+
const buf = Buffer.alloc(3);
|
|
428
|
+
buf[0] = mt | 25;
|
|
429
|
+
buf.writeUInt16BE(length, 1);
|
|
430
|
+
return buf;
|
|
431
|
+
}
|
|
432
|
+
else {
|
|
433
|
+
const buf = Buffer.alloc(5);
|
|
434
|
+
buf[0] = mt | 26;
|
|
435
|
+
buf.writeUInt32BE(length, 1);
|
|
436
|
+
return buf;
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
/** Encode a CBOR unsigned integer (up to 64 bits) */
|
|
440
|
+
function cborUint(value) {
|
|
441
|
+
const n = BigInt(value);
|
|
442
|
+
if (n < 24n) {
|
|
443
|
+
return Buffer.from([Number(n)]);
|
|
444
|
+
}
|
|
445
|
+
else if (n < 256n) {
|
|
446
|
+
return Buffer.from([24, Number(n)]);
|
|
447
|
+
}
|
|
448
|
+
else if (n < 65536n) {
|
|
449
|
+
const buf = Buffer.alloc(3);
|
|
450
|
+
buf[0] = 25;
|
|
451
|
+
buf.writeUInt16BE(Number(n), 1);
|
|
452
|
+
return buf;
|
|
453
|
+
}
|
|
454
|
+
else if (n < 4294967296n) {
|
|
455
|
+
const buf = Buffer.alloc(5);
|
|
456
|
+
buf[0] = 26;
|
|
457
|
+
buf.writeUInt32BE(Number(n), 1);
|
|
458
|
+
return buf;
|
|
459
|
+
}
|
|
460
|
+
else {
|
|
461
|
+
const buf = Buffer.alloc(9);
|
|
462
|
+
buf[0] = 27;
|
|
463
|
+
buf.writeBigUInt64BE(n, 1);
|
|
464
|
+
return buf;
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
/** Encode a CBOR byte string */
|
|
468
|
+
function cborBytes(data) {
|
|
469
|
+
const bytes = Buffer.from(data);
|
|
470
|
+
return Buffer.concat([cborHeader(CBOR_BYTES, bytes.length), bytes]);
|
|
471
|
+
}
|
|
472
|
+
/** Encode a CBOR text string */
|
|
473
|
+
function cborText(str) {
|
|
474
|
+
const bytes = Buffer.from(str, 'utf-8');
|
|
475
|
+
return Buffer.concat([cborHeader(CBOR_TEXT, bytes.length), bytes]);
|
|
476
|
+
}
|
|
477
|
+
/**
|
|
478
|
+
* Encode an IC request envelope as CBOR.
|
|
479
|
+
*
|
|
480
|
+
* The IC expects:
|
|
481
|
+
* {
|
|
482
|
+
* "content": {
|
|
483
|
+
* "request_type": <text>,
|
|
484
|
+
* "canister_id": <bytes>,
|
|
485
|
+
* "method_name": <text>,
|
|
486
|
+
* "arg": <bytes>,
|
|
487
|
+
* "sender": <bytes>,
|
|
488
|
+
* "ingress_expiry": <uint>,
|
|
489
|
+
* },
|
|
490
|
+
* "sender_pubkey": <bytes>,
|
|
491
|
+
* "sender_sig": <bytes>,
|
|
492
|
+
* }
|
|
493
|
+
*/
|
|
494
|
+
function cborEncodeEnvelope(envelope) {
|
|
495
|
+
const parts = [];
|
|
496
|
+
const content = envelope.content;
|
|
497
|
+
// Build content fields dynamically — only include fields that exist
|
|
498
|
+
const contentFields = [];
|
|
499
|
+
contentFields.push(['request_type', cborText(content.request_type)]);
|
|
500
|
+
if (content.canister_id) {
|
|
501
|
+
contentFields.push(['canister_id', cborBytes(content.canister_id)]);
|
|
502
|
+
}
|
|
503
|
+
if (content.method_name) {
|
|
504
|
+
contentFields.push(['method_name', cborText(content.method_name)]);
|
|
505
|
+
}
|
|
506
|
+
if (content.arg) {
|
|
507
|
+
contentFields.push(['arg', cborBytes(content.arg)]);
|
|
508
|
+
}
|
|
509
|
+
contentFields.push(['sender', cborBytes(content.sender)]);
|
|
510
|
+
contentFields.push(['ingress_expiry', cborUint(BigInt(content.ingress_expiry))]);
|
|
511
|
+
if (content.paths) {
|
|
512
|
+
// Encode paths as array of arrays of byte strings
|
|
513
|
+
const pathBuffers = [];
|
|
514
|
+
for (const path of content.paths) {
|
|
515
|
+
const segBuffers = [];
|
|
516
|
+
for (const seg of path) {
|
|
517
|
+
segBuffers.push(cborBytes(seg));
|
|
518
|
+
}
|
|
519
|
+
// Array header + segments
|
|
520
|
+
pathBuffers.push(Buffer.concat([cborHeader(4, segBuffers.length), ...segBuffers]));
|
|
521
|
+
}
|
|
522
|
+
const pathsEncoded = Buffer.concat([cborHeader(4, pathBuffers.length), ...pathBuffers]);
|
|
523
|
+
contentFields.push(['paths', pathsEncoded]);
|
|
524
|
+
}
|
|
525
|
+
// Encode content map
|
|
526
|
+
const contentParts = [];
|
|
527
|
+
for (const [key, value] of contentFields) {
|
|
528
|
+
contentParts.push(cborText(key));
|
|
529
|
+
contentParts.push(value);
|
|
530
|
+
}
|
|
531
|
+
const contentMap = Buffer.concat([
|
|
532
|
+
cborHeader(CBOR_MAP, contentFields.length),
|
|
533
|
+
...contentParts,
|
|
534
|
+
]);
|
|
535
|
+
// Encode outer map
|
|
536
|
+
let outerFieldCount = 1; // content is always present
|
|
537
|
+
if (envelope.sender_pubkey)
|
|
538
|
+
outerFieldCount++;
|
|
539
|
+
if (envelope.sender_sig)
|
|
540
|
+
outerFieldCount++;
|
|
541
|
+
parts.push(cborHeader(CBOR_MAP, outerFieldCount));
|
|
542
|
+
parts.push(cborText('content'));
|
|
543
|
+
parts.push(contentMap);
|
|
544
|
+
if (envelope.sender_pubkey) {
|
|
545
|
+
parts.push(cborText('sender_pubkey'));
|
|
546
|
+
parts.push(cborBytes(envelope.sender_pubkey));
|
|
547
|
+
}
|
|
548
|
+
if (envelope.sender_sig) {
|
|
549
|
+
parts.push(cborText('sender_sig'));
|
|
550
|
+
parts.push(cborBytes(envelope.sender_sig));
|
|
551
|
+
}
|
|
552
|
+
return Buffer.concat(parts);
|
|
553
|
+
}
|
|
554
|
+
/** Encode a simple query/read_state envelope (no signature) as CBOR */
|
|
555
|
+
function cborEncodeQueryEnvelope(envelope) {
|
|
556
|
+
return cborEncodeEnvelope({
|
|
557
|
+
content: envelope.content,
|
|
558
|
+
});
|
|
559
|
+
}
|
|
138
560
|
class IcpAdapter {
|
|
139
561
|
constructor(network = 'mainnet', apiUrl) {
|
|
140
562
|
this.network = network;
|
|
@@ -199,6 +621,7 @@ class IcpAdapter {
|
|
|
199
621
|
methodName: 'transfer',
|
|
200
622
|
arg: argBytes.toString('hex'),
|
|
201
623
|
ingressExpiry: ingressExpiryNs.toString(),
|
|
624
|
+
senderPubkey: tx.senderPubkey || '',
|
|
202
625
|
to: tx.to,
|
|
203
626
|
amount: tx.amount,
|
|
204
627
|
fee: fee.toString(),
|
|
@@ -284,12 +707,14 @@ class IcpAdapter {
|
|
|
284
707
|
sender: principalToBytes(payload.sender),
|
|
285
708
|
ingress_expiry: payload.ingressExpiry,
|
|
286
709
|
},
|
|
287
|
-
sender_pubkey:
|
|
710
|
+
sender_pubkey: payload.senderPubkey
|
|
711
|
+
? Array.from(Buffer.from(payload.senderPubkey, 'hex'))
|
|
712
|
+
: [],
|
|
288
713
|
sender_sig: Array.from(derSig),
|
|
289
714
|
};
|
|
290
715
|
// Submit to the IC
|
|
291
716
|
const url = `${this.apiUrl}/api/v2/canister/${payload.canisterId}/call`;
|
|
292
|
-
const body =
|
|
717
|
+
const body = new Uint8Array(cborEncodeEnvelope(envelope));
|
|
293
718
|
const response = await fetch(url, {
|
|
294
719
|
method: 'POST',
|
|
295
720
|
headers: {
|
|
@@ -322,51 +747,29 @@ class IcpAdapter {
|
|
|
322
747
|
*/
|
|
323
748
|
async getBalance(address) {
|
|
324
749
|
try {
|
|
325
|
-
// Use the
|
|
326
|
-
|
|
327
|
-
//
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
argBytes.writeUInt8(0x44, 0); // 'D'
|
|
333
|
-
argBytes.writeUInt8(0x49, 1); // 'I'
|
|
334
|
-
argBytes.writeUInt8(0x44, 2); // 'D'
|
|
335
|
-
argBytes.writeUInt8(0x4c, 3); // 'L'
|
|
336
|
-
accountId.copy(argBytes, 4);
|
|
337
|
-
const queryPayload = {
|
|
338
|
-
content: {
|
|
339
|
-
request_type: 'query',
|
|
340
|
-
canister_id: principalToBytes(LEDGER_CANISTER_ID),
|
|
341
|
-
method_name: 'account_balance',
|
|
342
|
-
arg: Array.from(argBytes),
|
|
343
|
-
sender: principalToBytes('2vxsx-fae'), // Anonymous principal
|
|
344
|
-
ingress_expiry: (BigInt(Date.now() * 1000000) + 300000000000n).toString(),
|
|
345
|
-
},
|
|
346
|
-
};
|
|
347
|
-
const response = await fetch(url, {
|
|
750
|
+
// Use the ICP Rosetta API for balance queries. The Rosetta API returns
|
|
751
|
+
// a well-structured JSON response, avoiding the need to parse complex
|
|
752
|
+
// CBOR-encoded certificates from the IC read_state endpoint.
|
|
753
|
+
const rosettaUrl = this.network === 'mainnet'
|
|
754
|
+
? 'https://rosetta-api.internetcomputer.org'
|
|
755
|
+
: this.apiUrl;
|
|
756
|
+
const response = await fetch(`${rosettaUrl}/account/balance`, {
|
|
348
757
|
method: 'POST',
|
|
349
|
-
headers: {
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
758
|
+
headers: { 'Content-Type': 'application/json' },
|
|
759
|
+
body: JSON.stringify({
|
|
760
|
+
network_identifier: {
|
|
761
|
+
blockchain: 'Internet Computer',
|
|
762
|
+
network: '00000000000000020101',
|
|
763
|
+
},
|
|
764
|
+
account_identifier: { address },
|
|
765
|
+
}),
|
|
766
|
+
signal: AbortSignal.timeout(10000),
|
|
353
767
|
});
|
|
354
|
-
if (!response.ok)
|
|
768
|
+
if (!response.ok)
|
|
355
769
|
return '0';
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
const resultBytes = Buffer.from(data);
|
|
360
|
-
// The balance is returned as a Candid-encoded nat64
|
|
361
|
-
// For simplicity, try to extract the e8s value from the response
|
|
362
|
-
if (resultBytes.length >= 8) {
|
|
363
|
-
// Look for the balance value in the reply
|
|
364
|
-
// The Candid reply contains the Tokens record with e8s field
|
|
365
|
-
const lastEight = resultBytes.subarray(resultBytes.length - 8);
|
|
366
|
-
const balance = lastEight.readBigUInt64LE(0);
|
|
367
|
-
return balance.toString();
|
|
368
|
-
}
|
|
369
|
-
return '0';
|
|
770
|
+
const data = await response.json();
|
|
771
|
+
const balance = data?.balances?.[0]?.value;
|
|
772
|
+
return balance || '0';
|
|
370
773
|
}
|
|
371
774
|
catch {
|
|
372
775
|
return '0';
|
|
@@ -404,12 +807,25 @@ class IcpAdapter {
|
|
|
404
807
|
headers: {
|
|
405
808
|
'Content-Type': 'application/cbor',
|
|
406
809
|
},
|
|
407
|
-
body:
|
|
810
|
+
body: new Uint8Array(cborEncodeQueryEnvelope(readStatePayload)),
|
|
408
811
|
});
|
|
409
812
|
if (!response.ok) {
|
|
410
813
|
return 'unknown';
|
|
411
814
|
}
|
|
412
|
-
// Parse the certificate to extract the status
|
|
815
|
+
// Parse the certificate to extract the status.
|
|
816
|
+
//
|
|
817
|
+
// The IC returns a CBOR-encoded certificate with the status value
|
|
818
|
+
// embedded as a text string inside a hash tree. While the "proper"
|
|
819
|
+
// approach would be to fully decode the CBOR certificate and walk
|
|
820
|
+
// the hash tree, the status values ("replied", "rejected", "done",
|
|
821
|
+
// "processing", "received") are short ASCII strings that appear
|
|
822
|
+
// verbatim in the binary response. Searching for them as substrings
|
|
823
|
+
// is a pragmatic approach that works reliably in practice because:
|
|
824
|
+
// 1. These status strings are embedded as CBOR text values.
|
|
825
|
+
// 2. The IC spec guarantees they are the only human-readable
|
|
826
|
+
// status keywords in the response.
|
|
827
|
+
// 3. False positives from random binary data matching these
|
|
828
|
+
// multi-character ASCII sequences are extremely unlikely.
|
|
413
829
|
const data = await response.arrayBuffer();
|
|
414
830
|
const resultBuf = Buffer.from(data);
|
|
415
831
|
const statusStr = resultBuf.toString('utf-8');
|