@typeberry/lib 0.1.0-eb00e84 → 0.1.1-127cc86
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +228 -136
- package/index.d.ts +225 -222
- package/index.js +228 -136
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -113,32 +113,21 @@ declare function isBrowser() {
|
|
|
113
113
|
* We avoid using `node:assert` to keep compatibility with a browser environment.
|
|
114
114
|
* Note the checks should not have any side effects, since we might decide
|
|
115
115
|
* to remove all of them in a post-processing step.
|
|
116
|
-
*/
|
|
117
|
-
declare function check(condition: boolean, message?: string): asserts condition is true {
|
|
118
|
-
if (!condition) {
|
|
119
|
-
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
declare function cast<T, U extends T>(_a: T, condition: boolean): _a is U {
|
|
124
|
-
return condition;
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
/**
|
|
128
|
-
* Yet another function to perform runtime assertions.
|
|
129
|
-
* This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
|
|
130
116
|
*
|
|
131
|
-
*
|
|
132
|
-
*
|
|
133
|
-
* should be replaced with:
|
|
134
|
-
* const x = y as CheckedNumber;
|
|
117
|
+
* NOTE the function is intended to be used as tagged template string for the performance
|
|
118
|
+
* reasons.
|
|
135
119
|
*/
|
|
136
|
-
declare function
|
|
137
|
-
|
|
138
|
-
|
|
120
|
+
declare function check(
|
|
121
|
+
strings: TemplateStringsArray,
|
|
122
|
+
condition: boolean,
|
|
123
|
+
...data: unknown[]
|
|
124
|
+
): asserts condition is true {
|
|
125
|
+
if (!condition) {
|
|
126
|
+
// add an empty value so that `data.length === strings.length`
|
|
127
|
+
data.unshift("");
|
|
128
|
+
const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
|
|
129
|
+
throw new Error(`Assertion failure:${message.join("")}`);
|
|
139
130
|
}
|
|
140
|
-
|
|
141
|
-
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
142
131
|
}
|
|
143
132
|
|
|
144
133
|
/**
|
|
@@ -369,8 +358,8 @@ type Result$2<Ok, Error> = OkResult<Ok> | ErrorResult<Error>;
|
|
|
369
358
|
/** An indication of two possible outcomes returned from a function. */
|
|
370
359
|
declare const Result$2 = {
|
|
371
360
|
/** Create new [`Result`] with `Ok` status. */
|
|
372
|
-
ok: <Ok
|
|
373
|
-
check
|
|
361
|
+
ok: <Ok>(ok: Ok): OkResult<Ok> => {
|
|
362
|
+
check`${ok !== undefined} 'ok' type cannot be undefined.`;
|
|
374
363
|
return {
|
|
375
364
|
isOk: true,
|
|
376
365
|
isError: false,
|
|
@@ -379,8 +368,8 @@ declare const Result$2 = {
|
|
|
379
368
|
},
|
|
380
369
|
|
|
381
370
|
/** Create new [`Result`] with `Error` status. */
|
|
382
|
-
error: <
|
|
383
|
-
check
|
|
371
|
+
error: <Error>(error: Error, details = ""): ErrorResult<Error> => {
|
|
372
|
+
check`${error !== undefined} 'Error' type cannot be undefined.`;
|
|
384
373
|
return {
|
|
385
374
|
isOk: false,
|
|
386
375
|
isError: true,
|
|
@@ -721,10 +710,8 @@ declare const index$s_asOpaqueType: typeof asOpaqueType;
|
|
|
721
710
|
declare const index$s_assertEmpty: typeof assertEmpty;
|
|
722
711
|
declare const index$s_assertNever: typeof assertNever;
|
|
723
712
|
declare const index$s_callCompareFunction: typeof callCompareFunction;
|
|
724
|
-
declare const index$s_cast: typeof cast;
|
|
725
713
|
declare const index$s_check: typeof check;
|
|
726
714
|
declare const index$s_deepEqual: typeof deepEqual;
|
|
727
|
-
declare const index$s_ensure: typeof ensure;
|
|
728
715
|
declare const index$s_env: typeof env;
|
|
729
716
|
declare const index$s_getAllKeysSorted: typeof getAllKeysSorted;
|
|
730
717
|
declare const index$s_inspect: typeof inspect;
|
|
@@ -740,7 +727,7 @@ declare const index$s_resultToString: typeof resultToString;
|
|
|
740
727
|
declare const index$s_seeThrough: typeof seeThrough;
|
|
741
728
|
declare const index$s_trimStack: typeof trimStack;
|
|
742
729
|
declare namespace index$s {
|
|
743
|
-
export { index$s_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$s_CURRENT_SUITE as CURRENT_SUITE, index$s_CURRENT_VERSION as CURRENT_VERSION, index$s_Compatibility as Compatibility, index$s_DEFAULT_SUITE as DEFAULT_SUITE, index$s_DEFAULT_VERSION as DEFAULT_VERSION, index$s_ErrorsCollector as ErrorsCollector, index$s_GpVersion as GpVersion, Result$2 as Result, index$s_RichTaggedError as RichTaggedError, index$s_TEST_COMPARE_USING as TEST_COMPARE_USING, index$s_TestSuite as TestSuite, index$s_WithDebug as WithDebug, index$s___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$s_asOpaqueType as asOpaqueType, index$s_assertEmpty as assertEmpty, index$s_assertNever as assertNever, index$s_callCompareFunction as callCompareFunction, index$
|
|
730
|
+
export { index$s_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$s_CURRENT_SUITE as CURRENT_SUITE, index$s_CURRENT_VERSION as CURRENT_VERSION, index$s_Compatibility as Compatibility, index$s_DEFAULT_SUITE as DEFAULT_SUITE, index$s_DEFAULT_VERSION as DEFAULT_VERSION, index$s_ErrorsCollector as ErrorsCollector, index$s_GpVersion as GpVersion, Result$2 as Result, index$s_RichTaggedError as RichTaggedError, index$s_TEST_COMPARE_USING as TEST_COMPARE_USING, index$s_TestSuite as TestSuite, index$s_WithDebug as WithDebug, index$s___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$s_asOpaqueType as asOpaqueType, index$s_assertEmpty as assertEmpty, index$s_assertNever as assertNever, index$s_callCompareFunction as callCompareFunction, index$s_check as check, index$s_deepEqual as deepEqual, index$s_env as env, index$s_getAllKeysSorted as getAllKeysSorted, index$s_inspect as inspect, index$s_isBrowser as isBrowser, index$s_isResult as isResult, index$s_isTaggedError as isTaggedError, index$s_maybeTaggedErrorToString as maybeTaggedErrorToString, index$s_measure as measure, index$s_oomWarningPrinted as oomWarningPrinted, index$s_parseCurrentSuite as parseCurrentSuite, index$s_parseCurrentVersion as parseCurrentVersion, index$s_resultToString as resultToString, index$s_seeThrough as seeThrough, index$s_trimStack as trimStack };
|
|
744
731
|
export type { index$s_DeepEqualOptions as DeepEqualOptions, index$s_EnumMapping as EnumMapping, index$s_ErrorResult as ErrorResult, index$s_OK as OK, index$s_OkResult as OkResult, index$s_Opaque as Opaque, index$s_StringLiteral as StringLiteral, index$s_TaggedError as TaggedError, index$s_TokenOf as TokenOf, index$s_Uninstantiable as Uninstantiable, index$s_WithOpaque as WithOpaque };
|
|
745
732
|
}
|
|
746
733
|
|
|
@@ -925,7 +912,7 @@ declare class BytesBlob {
|
|
|
925
912
|
|
|
926
913
|
/** Create a new [`BytesBlob`] from an array of bytes. */
|
|
927
914
|
static blobFromNumbers(v: number[]): BytesBlob {
|
|
928
|
-
check
|
|
915
|
+
check`${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
|
|
929
916
|
const arr = new Uint8Array(v);
|
|
930
917
|
return new BytesBlob(arr);
|
|
931
918
|
}
|
|
@@ -975,7 +962,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
975
962
|
|
|
976
963
|
private constructor(raw: Uint8Array, len: T) {
|
|
977
964
|
super(raw);
|
|
978
|
-
check
|
|
965
|
+
check`${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
|
|
979
966
|
this.length = len;
|
|
980
967
|
}
|
|
981
968
|
|
|
@@ -986,7 +973,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
986
973
|
|
|
987
974
|
/** Create new [`Bytes<X>`] given an array of bytes and it's length. */
|
|
988
975
|
static fromNumbers<X extends number>(v: number[], len: X): Bytes<X> {
|
|
989
|
-
check
|
|
976
|
+
check`${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
|
|
990
977
|
const x = new Uint8Array(v);
|
|
991
978
|
return new Bytes(x, len);
|
|
992
979
|
}
|
|
@@ -999,7 +986,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
999
986
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
1000
987
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
1001
988
|
static fill<X extends number>(len: X, input: number): Bytes<X> {
|
|
1002
|
-
check(
|
|
989
|
+
check`${(input & 0xff) === input} Input has to be a byte.`;
|
|
1003
990
|
const bytes = Bytes.zero(len);
|
|
1004
991
|
bytes.raw.fill(input, 0, len);
|
|
1005
992
|
return bytes;
|
|
@@ -1027,7 +1014,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1027
1014
|
|
|
1028
1015
|
/** Compare the sequence to another one. */
|
|
1029
1016
|
isEqualTo(other: Bytes<T>): boolean {
|
|
1030
|
-
check
|
|
1017
|
+
check`${this.length === other.length} Comparing incorrectly typed bytes!`;
|
|
1031
1018
|
return u8ArraySameLengthEqual(this.raw, other.raw);
|
|
1032
1019
|
}
|
|
1033
1020
|
|
|
@@ -1038,7 +1025,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1038
1025
|
}
|
|
1039
1026
|
|
|
1040
1027
|
declare function byteFromString(s: string): number {
|
|
1041
|
-
check
|
|
1028
|
+
check`${s.length === 2} Two-character string expected`;
|
|
1042
1029
|
const a = numberFromCharCode(s.charCodeAt(0));
|
|
1043
1030
|
const b = numberFromCharCode(s.charCodeAt(1));
|
|
1044
1031
|
return (a << 4) | b;
|
|
@@ -1124,10 +1111,10 @@ declare class BitVec {
|
|
|
1124
1111
|
private readonly data: Uint8Array,
|
|
1125
1112
|
public readonly bitLength: number,
|
|
1126
1113
|
) {
|
|
1127
|
-
check
|
|
1128
|
-
data.length * 8 >= bitLength
|
|
1129
|
-
|
|
1130
|
-
|
|
1114
|
+
check`
|
|
1115
|
+
${data.length * 8 >= bitLength}
|
|
1116
|
+
Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
|
|
1117
|
+
`;
|
|
1131
1118
|
|
|
1132
1119
|
this.byteLength = Math.ceil(bitLength / 8);
|
|
1133
1120
|
}
|
|
@@ -1139,10 +1126,10 @@ declare class BitVec {
|
|
|
1139
1126
|
|
|
1140
1127
|
/** Perform OR operation on all bits in place. */
|
|
1141
1128
|
sumWith(other: BitVec) {
|
|
1142
|
-
check
|
|
1143
|
-
other.bitLength === this.bitLength
|
|
1144
|
-
|
|
1145
|
-
|
|
1129
|
+
check`
|
|
1130
|
+
${other.bitLength === this.bitLength}
|
|
1131
|
+
Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
|
|
1132
|
+
`;
|
|
1146
1133
|
|
|
1147
1134
|
const otherRaw = other.raw;
|
|
1148
1135
|
for (let i = 0; i < this.byteLength; i++) {
|
|
@@ -1154,7 +1141,7 @@ declare class BitVec {
|
|
|
1154
1141
|
* Set the bit at index `idx` to value `val`.
|
|
1155
1142
|
*/
|
|
1156
1143
|
setBit(idx: number, val: boolean) {
|
|
1157
|
-
check
|
|
1144
|
+
check`${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
|
|
1158
1145
|
|
|
1159
1146
|
const byteIndex = Math.floor(idx / 8);
|
|
1160
1147
|
const bitIndexInByte = idx % 8;
|
|
@@ -1170,7 +1157,7 @@ declare class BitVec {
|
|
|
1170
1157
|
* Return `true` if the bit at index `idx` is set.
|
|
1171
1158
|
*/
|
|
1172
1159
|
isSet(idx: number): boolean {
|
|
1173
|
-
check
|
|
1160
|
+
check`${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
|
|
1174
1161
|
const byteIndex = Math.floor(idx / 8);
|
|
1175
1162
|
const bitIndexInByte = idx % 8;
|
|
1176
1163
|
const mask = 1 << bitIndexInByte;
|
|
@@ -1241,7 +1228,7 @@ declare const __REPRESENTATION_BYTES__: "REPRESENTATION_BYTES";
|
|
|
1241
1228
|
type WithBytesRepresentation<Bytes extends number> = {
|
|
1242
1229
|
readonly [__REPRESENTATION_BYTES__]: Bytes;
|
|
1243
1230
|
};
|
|
1244
|
-
declare const
|
|
1231
|
+
declare const asTypedNumber = <T, N extends number>(v: T): T & WithBytesRepresentation<N> =>
|
|
1245
1232
|
v as T & WithBytesRepresentation<N>;
|
|
1246
1233
|
|
|
1247
1234
|
type FixedSizeNumber<Bytes extends number> = number & WithBytesRepresentation<Bytes>;
|
|
@@ -1260,20 +1247,27 @@ type U64 = bigint & WithBytesRepresentation<8>;
|
|
|
1260
1247
|
declare const MAX_VALUE_U64 = 0xffff_ffff_ffff_ffffn;
|
|
1261
1248
|
|
|
1262
1249
|
/** Attempt to cast an input number into U8. */
|
|
1263
|
-
declare const tryAsU8 = (v: number): U8 =>
|
|
1264
|
-
|
|
1250
|
+
declare const tryAsU8 = (v: number): U8 => {
|
|
1251
|
+
check`${isU8(v)} input must have one-byte representation, got ${v}`;
|
|
1252
|
+
return asTypedNumber(v);
|
|
1253
|
+
};
|
|
1265
1254
|
/** Check if given number is a valid U8 number. */
|
|
1266
1255
|
declare const isU8 = (v: number): v is U8 => (v & MAX_VALUE_U8) === v;
|
|
1267
1256
|
|
|
1268
1257
|
/** Attempt to cast an input number into U16. */
|
|
1269
|
-
declare const tryAsU16 = (v: number): U16 =>
|
|
1270
|
-
|
|
1258
|
+
declare const tryAsU16 = (v: number): U16 => {
|
|
1259
|
+
check`${isU16(v)} input must have two-byte representation, got ${v}`;
|
|
1260
|
+
return asTypedNumber(v);
|
|
1261
|
+
};
|
|
1262
|
+
|
|
1271
1263
|
/** Check if given number is a valid U16 number. */
|
|
1272
1264
|
declare const isU16 = (v: number): v is U16 => (v & MAX_VALUE_U16) === v;
|
|
1273
1265
|
|
|
1274
1266
|
/** Attempt to cast an input number into U32. */
|
|
1275
|
-
declare const tryAsU32 = (v: number): U32 =>
|
|
1276
|
-
|
|
1267
|
+
declare const tryAsU32 = (v: number): U32 => {
|
|
1268
|
+
check`${isU32(v)} input must have four-byte representation, got ${v}`;
|
|
1269
|
+
return asTypedNumber(v);
|
|
1270
|
+
};
|
|
1277
1271
|
|
|
1278
1272
|
/** Check if given number is a valid U32 number. */
|
|
1279
1273
|
declare const isU32 = (v: number): v is U32 => (v & MAX_VALUE_U32) >>> 0 === v;
|
|
@@ -1281,25 +1275,28 @@ declare const isU32 = (v: number): v is U32 => (v & MAX_VALUE_U32) >>> 0 === v;
|
|
|
1281
1275
|
/** Attempt to cast an input number into U64. */
|
|
1282
1276
|
declare const tryAsU64 = (x: number | bigint): U64 => {
|
|
1283
1277
|
const v = BigInt(x);
|
|
1284
|
-
|
|
1278
|
+
check`${isU64(v)} input must have eight-byte representation, got ${x}`;
|
|
1279
|
+
return asTypedNumber(v);
|
|
1285
1280
|
};
|
|
1281
|
+
|
|
1286
1282
|
/** Check if given number is a valid U64 number. */
|
|
1287
1283
|
declare const isU64 = (v: bigint): v is U64 => (v & MAX_VALUE_U64) === v;
|
|
1288
1284
|
|
|
1289
1285
|
/** Collate two U32 parts into one U64. */
|
|
1290
1286
|
declare const u64FromParts = ({ lower, upper }: { lower: U32; upper: U32 }): U64 => {
|
|
1291
1287
|
const val = (BigInt(upper) << 32n) + BigInt(lower);
|
|
1292
|
-
return
|
|
1288
|
+
return asTypedNumber(val);
|
|
1293
1289
|
};
|
|
1294
1290
|
|
|
1295
1291
|
/** Split U64 into lower & upper parts. */
|
|
1296
1292
|
declare const u64IntoParts = (v: U64): { lower: U32; upper: U32 } => {
|
|
1297
|
-
|
|
1298
|
-
const
|
|
1293
|
+
// Number(...) safe: both parts are <= 0xffffffff
|
|
1294
|
+
const lower = Number(v & (2n ** 32n - 1n));
|
|
1295
|
+
const upper = Number(v >> 32n);
|
|
1299
1296
|
|
|
1300
1297
|
return {
|
|
1301
|
-
lower:
|
|
1302
|
-
upper:
|
|
1298
|
+
lower: asTypedNumber(lower),
|
|
1299
|
+
upper: asTypedNumber(upper),
|
|
1303
1300
|
};
|
|
1304
1301
|
};
|
|
1305
1302
|
|
|
@@ -1356,10 +1353,8 @@ declare function u32AsLeBytes(value: U32): Uint8Array {
|
|
|
1356
1353
|
* Interpret 4-byte `Uint8Array` as U32 written as little endian.
|
|
1357
1354
|
*/
|
|
1358
1355
|
declare function leBytesAsU32(uint8Array: Uint8Array): U32 {
|
|
1359
|
-
check
|
|
1360
|
-
return
|
|
1361
|
-
uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24),
|
|
1362
|
-
);
|
|
1356
|
+
check`${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
|
|
1357
|
+
return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
|
|
1363
1358
|
}
|
|
1364
1359
|
|
|
1365
1360
|
/** Get the smallest value between U64 a and values given as input parameters. */
|
|
@@ -1379,7 +1374,7 @@ type index$p_U64 = U64;
|
|
|
1379
1374
|
type index$p_U8 = U8;
|
|
1380
1375
|
type index$p_WithBytesRepresentation<Bytes extends number> = WithBytesRepresentation<Bytes>;
|
|
1381
1376
|
declare const index$p___REPRESENTATION_BYTES__: typeof __REPRESENTATION_BYTES__;
|
|
1382
|
-
declare const index$
|
|
1377
|
+
declare const index$p_asTypedNumber: typeof asTypedNumber;
|
|
1383
1378
|
declare const index$p_isU16: typeof isU16;
|
|
1384
1379
|
declare const index$p_isU32: typeof isU32;
|
|
1385
1380
|
declare const index$p_isU64: typeof isU64;
|
|
@@ -1397,7 +1392,7 @@ declare const index$p_u32AsLeBytes: typeof u32AsLeBytes;
|
|
|
1397
1392
|
declare const index$p_u64FromParts: typeof u64FromParts;
|
|
1398
1393
|
declare const index$p_u64IntoParts: typeof u64IntoParts;
|
|
1399
1394
|
declare namespace index$p {
|
|
1400
|
-
export { index$p_MAX_VALUE_U16 as MAX_VALUE_U16, index$p_MAX_VALUE_U32 as MAX_VALUE_U32, index$p_MAX_VALUE_U64 as MAX_VALUE_U64, index$p_MAX_VALUE_U8 as MAX_VALUE_U8, index$p___REPRESENTATION_BYTES__ as __REPRESENTATION_BYTES__, index$
|
|
1395
|
+
export { index$p_MAX_VALUE_U16 as MAX_VALUE_U16, index$p_MAX_VALUE_U32 as MAX_VALUE_U32, index$p_MAX_VALUE_U64 as MAX_VALUE_U64, index$p_MAX_VALUE_U8 as MAX_VALUE_U8, index$p___REPRESENTATION_BYTES__ as __REPRESENTATION_BYTES__, index$p_asTypedNumber as asTypedNumber, index$p_isU16 as isU16, index$p_isU32 as isU32, index$p_isU64 as isU64, index$p_isU8 as isU8, index$p_leBytesAsU32 as leBytesAsU32, index$p_maxU64 as maxU64, index$p_minU64 as minU64, index$p_sumU32 as sumU32, index$p_sumU64 as sumU64, index$p_tryAsU16 as tryAsU16, index$p_tryAsU32 as tryAsU32, index$p_tryAsU64 as tryAsU64, index$p_tryAsU8 as tryAsU8, index$p_u32AsLeBytes as u32AsLeBytes, index$p_u64FromParts as u64FromParts, index$p_u64IntoParts as u64IntoParts };
|
|
1401
1396
|
export type { index$p_FixedSizeNumber as FixedSizeNumber, Result$1 as Result, index$p_U16 as U16, index$p_U32 as U32, index$p_U64 as U64, index$p_U8 as U8, index$p_WithBytesRepresentation as WithBytesRepresentation };
|
|
1402
1397
|
}
|
|
1403
1398
|
|
|
@@ -1736,7 +1731,7 @@ declare class Decoder {
|
|
|
1736
1731
|
if (this.offset < newOffset) {
|
|
1737
1732
|
this.skip(newOffset - this.offset);
|
|
1738
1733
|
} else {
|
|
1739
|
-
check
|
|
1734
|
+
check`${newOffset >= 0} The offset has to be positive`;
|
|
1740
1735
|
this.offset = newOffset;
|
|
1741
1736
|
}
|
|
1742
1737
|
}
|
|
@@ -1768,7 +1763,7 @@ declare class Decoder {
|
|
|
1768
1763
|
}
|
|
1769
1764
|
|
|
1770
1765
|
private ensureHasBytes(bytes: number) {
|
|
1771
|
-
check
|
|
1766
|
+
check`${bytes >= 0} Negative number of bytes given.`;
|
|
1772
1767
|
if (this.offset + bytes > this.source.length) {
|
|
1773
1768
|
throw new Error(
|
|
1774
1769
|
`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`,
|
|
@@ -1779,7 +1774,7 @@ declare class Decoder {
|
|
|
1779
1774
|
|
|
1780
1775
|
declare const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
|
|
1781
1776
|
declare function decodeVariableLengthExtraBytes(firstByte: number) {
|
|
1782
|
-
check
|
|
1777
|
+
check`${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
|
|
1783
1778
|
for (let i = 0; i < MASKS.length; i++) {
|
|
1784
1779
|
if (firstByte >= MASKS[i]) {
|
|
1785
1780
|
return 8 - i;
|
|
@@ -1798,7 +1793,7 @@ type SizeHint = {
|
|
|
1798
1793
|
};
|
|
1799
1794
|
|
|
1800
1795
|
declare function tryAsExactBytes(a: SizeHint): number {
|
|
1801
|
-
check
|
|
1796
|
+
check`${a.isExact} The value is not exact size estimation!`;
|
|
1802
1797
|
return a.bytes;
|
|
1803
1798
|
}
|
|
1804
1799
|
|
|
@@ -1957,8 +1952,8 @@ declare class Encoder {
|
|
|
1957
1952
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
1958
1953
|
// So it does not matter if the argument is a negative value,
|
|
1959
1954
|
// OR if someone just gave us two-complement already.
|
|
1960
|
-
check
|
|
1961
|
-
check
|
|
1955
|
+
check`${num < maxNum} Only for numbers up to 2**64 - 1`;
|
|
1956
|
+
check`${-num <= maxNum / 2n} Only for numbers down to -2**63`;
|
|
1962
1957
|
this.ensureBigEnough(8);
|
|
1963
1958
|
|
|
1964
1959
|
this.dataView.setBigInt64(this.offset, num, true);
|
|
@@ -2028,8 +2023,8 @@ declare class Encoder {
|
|
|
2028
2023
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
2029
2024
|
// So it does not matter if the argument is a negative value,
|
|
2030
2025
|
// OR if someone just gave us two-complement already.
|
|
2031
|
-
check
|
|
2032
|
-
check
|
|
2026
|
+
check`${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
|
|
2027
|
+
check`${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
|
|
2033
2028
|
|
|
2034
2029
|
this.ensureBigEnough(bytesToEncode);
|
|
2035
2030
|
}
|
|
@@ -2042,8 +2037,8 @@ declare class Encoder {
|
|
|
2042
2037
|
* https://graypaper.fluffylabs.dev/#/579bd12/365202365202
|
|
2043
2038
|
*/
|
|
2044
2039
|
varU32(num: U32) {
|
|
2045
|
-
check
|
|
2046
|
-
check
|
|
2040
|
+
check`${num >= 0} Only for natural numbers.`;
|
|
2041
|
+
check`${num < 2 ** 32} Only for numbers up to 2**32`;
|
|
2047
2042
|
this.varU64(BigInt(num));
|
|
2048
2043
|
}
|
|
2049
2044
|
|
|
@@ -2211,7 +2206,7 @@ declare class Encoder {
|
|
|
2211
2206
|
* https://graypaper.fluffylabs.dev/#/579bd12/374400374400
|
|
2212
2207
|
*/
|
|
2213
2208
|
sequenceVarLen<T>(encode: Encode<T>, elements: readonly T[]) {
|
|
2214
|
-
check
|
|
2209
|
+
check`${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
|
|
2215
2210
|
this.varU32(tryAsU32(elements.length));
|
|
2216
2211
|
this.sequenceFixLen(encode, elements);
|
|
2217
2212
|
}
|
|
@@ -2234,7 +2229,7 @@ declare class Encoder {
|
|
|
2234
2229
|
* anyway, so if we really should throw we will.
|
|
2235
2230
|
*/
|
|
2236
2231
|
private ensureBigEnough(length: number, options: { silent: boolean } = { silent: false }) {
|
|
2237
|
-
check
|
|
2232
|
+
check`${length >= 0} Negative length given`;
|
|
2238
2233
|
|
|
2239
2234
|
const newLength = this.offset + length;
|
|
2240
2235
|
if (newLength > MAX_LENGTH) {
|
|
@@ -2594,13 +2589,12 @@ declare abstract class ObjectView<T> {
|
|
|
2594
2589
|
private decodeUpTo<K extends keyof T>(field: K): ViewField<T[K], unknown> {
|
|
2595
2590
|
const index = this.descriptorsKeys.indexOf(field);
|
|
2596
2591
|
const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
|
|
2597
|
-
check
|
|
2598
|
-
this.lastDecodedFieldIdx < index
|
|
2599
|
-
|
|
2592
|
+
check`
|
|
2593
|
+
${this.lastDecodedFieldIdx < index}
|
|
2594
|
+
Unjustified call to 'decodeUpTo' -
|
|
2600
2595
|
the index ($Blobindex}, ${String(field)})
|
|
2601
2596
|
is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
|
|
2602
|
-
|
|
2603
|
-
);
|
|
2597
|
+
`;
|
|
2604
2598
|
|
|
2605
2599
|
let lastItem = this.cache.get(lastField);
|
|
2606
2600
|
const skipper = new Skipper(this.decoder);
|
|
@@ -2623,12 +2617,11 @@ declare abstract class ObjectView<T> {
|
|
|
2623
2617
|
this.lastDecodedFieldIdx = i;
|
|
2624
2618
|
}
|
|
2625
2619
|
|
|
2626
|
-
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
return last;
|
|
2620
|
+
if (lastItem === undefined) {
|
|
2621
|
+
throw new Error("Last item must be set, since the loop turns at least once.");
|
|
2622
|
+
}
|
|
2623
|
+
|
|
2624
|
+
return lastItem as ViewField<T[K], unknown>;
|
|
2632
2625
|
}
|
|
2633
2626
|
}
|
|
2634
2627
|
|
|
@@ -2664,12 +2657,10 @@ declare class SequenceView<T, V = T> {
|
|
|
2664
2657
|
*[Symbol.iterator]() {
|
|
2665
2658
|
for (let i = 0; i < this.length; i++) {
|
|
2666
2659
|
const val = this.get(i);
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
);
|
|
2672
|
-
yield v;
|
|
2660
|
+
if (val === undefined) {
|
|
2661
|
+
throw new Error("We are within 0..this.length so all items are defined.");
|
|
2662
|
+
}
|
|
2663
|
+
yield val;
|
|
2673
2664
|
}
|
|
2674
2665
|
}
|
|
2675
2666
|
|
|
@@ -2719,10 +2710,10 @@ declare class SequenceView<T, V = T> {
|
|
|
2719
2710
|
}
|
|
2720
2711
|
|
|
2721
2712
|
private decodeUpTo(index: number): ViewField<T, V> {
|
|
2722
|
-
check
|
|
2723
|
-
this.lastDecodedIdx < index
|
|
2724
|
-
|
|
2725
|
-
|
|
2713
|
+
check`
|
|
2714
|
+
${this.lastDecodedIdx < index}
|
|
2715
|
+
Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
|
|
2716
|
+
`;
|
|
2726
2717
|
let lastItem = this.cache.get(this.lastDecodedIdx);
|
|
2727
2718
|
const skipper = new Skipper(this.decoder);
|
|
2728
2719
|
|
|
@@ -2743,12 +2734,10 @@ declare class SequenceView<T, V = T> {
|
|
|
2743
2734
|
this.lastDecodedIdx = i;
|
|
2744
2735
|
}
|
|
2745
2736
|
|
|
2746
|
-
|
|
2747
|
-
|
|
2748
|
-
|
|
2749
|
-
|
|
2750
|
-
);
|
|
2751
|
-
return last;
|
|
2737
|
+
if (lastItem === undefined) {
|
|
2738
|
+
throw new Error("Last item must be set, since the loop turns at least once.");
|
|
2739
|
+
}
|
|
2740
|
+
return lastItem;
|
|
2752
2741
|
}
|
|
2753
2742
|
}
|
|
2754
2743
|
|
|
@@ -2774,7 +2763,10 @@ declare const TYPICAL_DICTIONARY_LENGTH = 32;
|
|
|
2774
2763
|
declare function readonlyArray<T, V>(desc: Descriptor<T[], V>): Descriptor<readonly T[], V> {
|
|
2775
2764
|
return desc.convert(
|
|
2776
2765
|
(x) => {
|
|
2777
|
-
check
|
|
2766
|
+
check`
|
|
2767
|
+
${Array.isArray(x)}
|
|
2768
|
+
Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
|
|
2769
|
+
`;
|
|
2778
2770
|
// NOTE [ToDr] This assumption is incorrect in general, but it's documented
|
|
2779
2771
|
// in the general note. We avoid `.slice()` the array for performance reasons.
|
|
2780
2772
|
return x as T[];
|
|
@@ -3447,6 +3439,8 @@ type KeccakHash = Bytes<HASH_SIZE>;
|
|
|
3447
3439
|
/** Truncated hash. */
|
|
3448
3440
|
type TruncatedHash = Bytes<TRUNCATED_HASH_SIZE>;
|
|
3449
3441
|
|
|
3442
|
+
declare const ZERO_HASH = Bytes.zero(HASH_SIZE);
|
|
3443
|
+
|
|
3450
3444
|
/**
|
|
3451
3445
|
* Container for some object with a hash that is related to this object.
|
|
3452
3446
|
*
|
|
@@ -3495,7 +3489,7 @@ declare class PageAllocator implements HashAllocator {
|
|
|
3495
3489
|
|
|
3496
3490
|
// TODO [ToDr] Benchmark the performance!
|
|
3497
3491
|
constructor(private readonly hashesPerPage: number) {
|
|
3498
|
-
check
|
|
3492
|
+
check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
|
|
3499
3493
|
this.resetPage();
|
|
3500
3494
|
}
|
|
3501
3495
|
|
|
@@ -3655,11 +3649,12 @@ type index$n_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
|
3655
3649
|
declare const index$n_WithHash: typeof WithHash;
|
|
3656
3650
|
type index$n_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3657
3651
|
declare const index$n_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3652
|
+
declare const index$n_ZERO_HASH: typeof ZERO_HASH;
|
|
3658
3653
|
declare const index$n_blake2b: typeof blake2b;
|
|
3659
3654
|
declare const index$n_defaultAllocator: typeof defaultAllocator;
|
|
3660
3655
|
declare const index$n_keccak: typeof keccak;
|
|
3661
3656
|
declare namespace index$n {
|
|
3662
|
-
export { index$n_PageAllocator as PageAllocator, index$n_SimpleAllocator as SimpleAllocator, index$n_WithHash as WithHash, index$n_WithHashAndBytes as WithHashAndBytes, index$n_blake2b as blake2b, index$n_defaultAllocator as defaultAllocator, index$n_keccak as keccak };
|
|
3657
|
+
export { index$n_PageAllocator as PageAllocator, index$n_SimpleAllocator as SimpleAllocator, index$n_WithHash as WithHash, index$n_WithHashAndBytes as WithHashAndBytes, index$n_ZERO_HASH as ZERO_HASH, index$n_blake2b as blake2b, index$n_defaultAllocator as defaultAllocator, index$n_keccak as keccak };
|
|
3663
3658
|
export type { index$n_Blake2bHash as Blake2bHash, index$n_HASH_SIZE as HASH_SIZE, index$n_HashAllocator as HashAllocator, index$n_KeccakHash as KeccakHash, index$n_OpaqueHash as OpaqueHash, index$n_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$n_TruncatedHash as TruncatedHash };
|
|
3664
3659
|
}
|
|
3665
3660
|
|
|
@@ -3876,8 +3871,8 @@ declare class MultiMap<TKeys extends readonly unknown[], TValue> {
|
|
|
3876
3871
|
* if needed.
|
|
3877
3872
|
*/
|
|
3878
3873
|
constructor(keysLength: TKeys["length"], keyMappers?: KeyMappers<TKeys>) {
|
|
3879
|
-
check
|
|
3880
|
-
check
|
|
3874
|
+
check`${keysLength > 0} Keys cannot be empty.`;
|
|
3875
|
+
check`${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
|
|
3881
3876
|
this.data = new Map() as NestedMaps<TKeys, TValue>;
|
|
3882
3877
|
this.keyMappers = keyMappers === undefined ? (Array(keysLength).fill(null) as KeyMappers<TKeys>) : keyMappers;
|
|
3883
3878
|
}
|
|
@@ -4002,7 +3997,7 @@ declare class FixedSizeArray<T, N extends number> extends Array<T> {
|
|
|
4002
3997
|
}
|
|
4003
3998
|
|
|
4004
3999
|
static new<T, N extends number>(data: readonly T[], len: N): FixedSizeArray<T, N> {
|
|
4005
|
-
check
|
|
4000
|
+
check`${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
|
|
4006
4001
|
|
|
4007
4002
|
const arr = new FixedSizeArray<T, N>(len);
|
|
4008
4003
|
|
|
@@ -4190,7 +4185,7 @@ declare class SortedArray<V> implements ImmutableSortedArray<V> {
|
|
|
4190
4185
|
|
|
4191
4186
|
/** Create a new SortedSet from two sorted collections. */
|
|
4192
4187
|
static fromTwoSortedCollections<V>(first: ImmutableSortedArray<V>, second: ImmutableSortedArray<V>) {
|
|
4193
|
-
check
|
|
4188
|
+
check`${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
|
|
4194
4189
|
const comparator = first.comparator;
|
|
4195
4190
|
const arr1 = first.array;
|
|
4196
4191
|
const arr1Length = arr1.length;
|
|
@@ -4327,7 +4322,7 @@ declare class SortedSet<V> extends SortedArray<V> implements ImmutableSortedSet<
|
|
|
4327
4322
|
|
|
4328
4323
|
/** Create a new SortedSet from two sorted collections. */
|
|
4329
4324
|
static fromTwoSortedCollections<V>(first: ImmutableSortedArray<V>, second: ImmutableSortedArray<V>) {
|
|
4330
|
-
check
|
|
4325
|
+
check`${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
|
|
4331
4326
|
const comparator = first.comparator;
|
|
4332
4327
|
|
|
4333
4328
|
if (first.length === 0) {
|
|
@@ -4597,7 +4592,7 @@ type BlsKey = Opaque<Bytes<BLS_KEY_BYTES>, "BlsKey">;
|
|
|
4597
4592
|
declare function publicKey(seed: Uint8Array): BandersnatchKey {
|
|
4598
4593
|
const key = bandersnatch.derive_public_key(seed);
|
|
4599
4594
|
|
|
4600
|
-
check
|
|
4595
|
+
check`${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
|
|
4601
4596
|
|
|
4602
4597
|
return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
|
|
4603
4598
|
}
|
|
@@ -4703,7 +4698,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4703
4698
|
data.set(signature.raw, offset);
|
|
4704
4699
|
offset += ED25519_SIGNATURE_BYTES;
|
|
4705
4700
|
const messageLength = message.length;
|
|
4706
|
-
check
|
|
4701
|
+
check`${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
|
|
4707
4702
|
data[offset] = messageLength;
|
|
4708
4703
|
offset += 1;
|
|
4709
4704
|
data.set(message.raw, offset);
|
|
@@ -5258,10 +5253,10 @@ declare const tryAsEpoch = (v: number): Epoch => asOpaqueType(tryAsU32(v));
|
|
|
5258
5253
|
/** One entry of `T` per one validator. */
|
|
5259
5254
|
type PerValidator<T> = KnownSizeArray<T, "ValidatorsCount">;
|
|
5260
5255
|
declare function tryAsPerValidator<T>(array: T[], spec: ChainSpec): PerValidator<T> {
|
|
5261
|
-
check
|
|
5262
|
-
array.length === spec.validatorsCount
|
|
5263
|
-
|
|
5264
|
-
|
|
5256
|
+
check`
|
|
5257
|
+
${array.length === spec.validatorsCount}
|
|
5258
|
+
Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
|
|
5259
|
+
`;
|
|
5265
5260
|
return asKnownSize(array);
|
|
5266
5261
|
}
|
|
5267
5262
|
declare const codecPerValidator = <T, V>(val: Descriptor<T, V>): Descriptor<PerValidator<T>, SequenceView<T, V>> =>
|
|
@@ -5274,10 +5269,10 @@ declare const codecPerValidator = <T, V>(val: Descriptor<T, V>): Descriptor<PerV
|
|
|
5274
5269
|
/** One entry of `T` per one block in epoch. */
|
|
5275
5270
|
type PerEpochBlock<T> = KnownSizeArray<T, "EpochLength">;
|
|
5276
5271
|
declare function tryAsPerEpochBlock<T>(array: T[], spec: ChainSpec): PerEpochBlock<T> {
|
|
5277
|
-
check
|
|
5278
|
-
array.length === spec.epochLength
|
|
5279
|
-
|
|
5280
|
-
|
|
5272
|
+
check`
|
|
5273
|
+
${array.length === spec.epochLength}
|
|
5274
|
+
Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
|
|
5275
|
+
`;
|
|
5281
5276
|
return asKnownSize(array);
|
|
5282
5277
|
}
|
|
5283
5278
|
declare const codecPerEpochBlock = <T, V>(val: Descriptor<T, V>): Descriptor<PerEpochBlock<T>, SequenceView<T, V>> =>
|
|
@@ -5892,11 +5887,11 @@ type WorkItemsCount = U8;
|
|
|
5892
5887
|
|
|
5893
5888
|
/** Verify the value is within the `WorkItemsCount` bounds. */
|
|
5894
5889
|
declare function tryAsWorkItemsCount(len: number): WorkItemsCount {
|
|
5895
|
-
|
|
5896
|
-
len
|
|
5897
|
-
|
|
5898
|
-
|
|
5899
|
-
);
|
|
5890
|
+
check`
|
|
5891
|
+
${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= MAX_NUMBER_OF_WORK_ITEMS}
|
|
5892
|
+
WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
|
|
5893
|
+
`;
|
|
5894
|
+
return tryAsU8(len);
|
|
5900
5895
|
}
|
|
5901
5896
|
|
|
5902
5897
|
/** Minimal number of work items in the work package or results in work report. */
|
|
@@ -8319,13 +8314,13 @@ declare class TrieNode {
|
|
|
8319
8314
|
|
|
8320
8315
|
/** View this node as a branch node */
|
|
8321
8316
|
asBranchNode(): BranchNode {
|
|
8322
|
-
check
|
|
8317
|
+
check`${this.getNodeType() === NodeType.Branch} not a branch!`;
|
|
8323
8318
|
return new BranchNode(this);
|
|
8324
8319
|
}
|
|
8325
8320
|
|
|
8326
8321
|
/** View this node as a leaf node */
|
|
8327
8322
|
asLeafNode(): LeafNode {
|
|
8328
|
-
check
|
|
8323
|
+
check`${this.getNodeType() !== NodeType.Branch} not a leaf!`;
|
|
8329
8324
|
return new LeafNode(this);
|
|
8330
8325
|
}
|
|
8331
8326
|
|
|
@@ -8866,7 +8861,7 @@ declare function createSubtreeForBothLeaves(
|
|
|
8866
8861
|
* Return a single bit from `key` located at `bitIndex`.
|
|
8867
8862
|
*/
|
|
8868
8863
|
declare function getBit(key: TruncatedStateKey, bitIndex: number): boolean {
|
|
8869
|
-
check
|
|
8864
|
+
check`${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
|
|
8870
8865
|
const byte = bitIndex >>> 3;
|
|
8871
8866
|
const bit = bitIndex - (byte << 3);
|
|
8872
8867
|
const mask = 0b10_00_00_00 >>> bit;
|
|
@@ -9086,10 +9081,10 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9086
9081
|
type PerCore<T> = KnownSizeArray<T, "number of cores">;
|
|
9087
9082
|
/** Check if given array has correct length before casting to the opaque type. */
|
|
9088
9083
|
declare function tryAsPerCore<T>(array: T[], spec: ChainSpec): PerCore<T> {
|
|
9089
|
-
check
|
|
9090
|
-
array.length === spec.coresCount
|
|
9091
|
-
|
|
9092
|
-
|
|
9084
|
+
check`
|
|
9085
|
+
${array.length === spec.coresCount}
|
|
9086
|
+
Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
|
|
9087
|
+
`;
|
|
9093
9088
|
return asOpaqueType(array);
|
|
9094
9089
|
}
|
|
9095
9090
|
declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T>, SequenceView<T, V>> =>
|
|
@@ -10792,7 +10787,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10792
10787
|
|
|
10793
10788
|
private removeServices(servicesRemoved: ServiceId[] | undefined) {
|
|
10794
10789
|
for (const serviceId of servicesRemoved ?? []) {
|
|
10795
|
-
check
|
|
10790
|
+
check`${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
|
|
10796
10791
|
this.services.delete(serviceId);
|
|
10797
10792
|
}
|
|
10798
10793
|
}
|
|
@@ -10813,10 +10808,10 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10813
10808
|
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
10814
10809
|
} else if (kind === UpdateStorageKind.Remove) {
|
|
10815
10810
|
const { key } = action;
|
|
10816
|
-
check
|
|
10817
|
-
service.data.storage.has(key.toString())
|
|
10818
|
-
|
|
10819
|
-
|
|
10811
|
+
check`
|
|
10812
|
+
${service.data.storage.has(key.toString())}
|
|
10813
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
10814
|
+
`;
|
|
10820
10815
|
service.data.storage.delete(key.toString());
|
|
10821
10816
|
} else {
|
|
10822
10817
|
assertNever(kind);
|
|
@@ -12450,7 +12445,6 @@ declare const N_CHUNKS_REDUNDANCY = 681;
|
|
|
12450
12445
|
/** Total number of chunks generated by EC. */
|
|
12451
12446
|
declare const N_CHUNKS_TOTAL = 1023;
|
|
12452
12447
|
type N_CHUNKS_TOTAL = typeof N_CHUNKS_TOTAL;
|
|
12453
|
-
// check(N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY);
|
|
12454
12448
|
|
|
12455
12449
|
/**
|
|
12456
12450
|
* reed-solomon-simd requires point size to be multiple of 64 bytes but we need only 2 bytes.
|
|
@@ -12467,7 +12461,11 @@ type POINT_LENGTH = typeof POINT_LENGTH;
|
|
|
12467
12461
|
|
|
12468
12462
|
declare const PIECE_SIZE = 684;
|
|
12469
12463
|
type PIECE_SIZE = typeof PIECE_SIZE;
|
|
12470
|
-
|
|
12464
|
+
|
|
12465
|
+
declare function checkConsistency() {
|
|
12466
|
+
check`${N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY} no of required and redundancy chunks need to match ${N_CHUNKS_TOTAL}`;
|
|
12467
|
+
check`${PIECE_SIZE === N_CHUNKS_REQUIRED * POINT_LENGTH} piece size needs to match ${N_CHUNKS_REQUIRED} * ${POINT_LENGTH}`;
|
|
12468
|
+
}
|
|
12471
12469
|
|
|
12472
12470
|
/**
|
|
12473
12471
|
* Takes arbitrarily long input data, padds it to multiple of `PIECE_SIZE` and returns
|
|
@@ -12506,11 +12504,11 @@ declare function decodeDataAndTrim(
|
|
|
12506
12504
|
declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_REQUIRED>): BytesBlob {
|
|
12507
12505
|
const pointBytes = input[0][1].length;
|
|
12508
12506
|
const points = Math.floor(pointBytes / POINT_LENGTH);
|
|
12509
|
-
check
|
|
12510
|
-
check
|
|
12511
|
-
input.every(([_idx, point]) => point.length === pointBytes),
|
|
12512
|
-
|
|
12513
|
-
|
|
12507
|
+
check`${points * POINT_LENGTH === pointBytes} Each point length needs to be a multiple of ${POINT_LENGTH}`;
|
|
12508
|
+
check`
|
|
12509
|
+
${input.every(([_idx, point]) => point.length === pointBytes)},
|
|
12510
|
+
Every piece must have the same length!
|
|
12511
|
+
`;
|
|
12514
12512
|
|
|
12515
12513
|
const pieces = FixedSizeArray.fill(() => Bytes.zero(PIECE_SIZE), points);
|
|
12516
12514
|
|
|
@@ -12601,7 +12599,7 @@ declare function decodePiece(
|
|
|
12601
12599
|
throw new Error("indices array in decoded result must exist!");
|
|
12602
12600
|
}
|
|
12603
12601
|
|
|
12604
|
-
check
|
|
12602
|
+
check`${resultData.length === resultIndices.length * POINT_ALIGNMENT} incorrect length of data or indices!`;
|
|
12605
12603
|
|
|
12606
12604
|
for (let i = 0; i < resultIndices.length; i++) {
|
|
12607
12605
|
// fill reconstructed shards in result
|
|
@@ -12629,7 +12627,7 @@ declare function decodePiece(
|
|
|
12629
12627
|
* https://graypaper.fluffylabs.dev/#/9a08063/3eb4013eb401?v=0.6.6
|
|
12630
12628
|
*/
|
|
12631
12629
|
declare function split<N extends number, K extends number>(input: BytesBlob, n: N, k: K): FixedSizeArray<Bytes<N>, K> {
|
|
12632
|
-
check
|
|
12630
|
+
check`${n * k === input.length}`;
|
|
12633
12631
|
const result: Bytes<N>[] = [];
|
|
12634
12632
|
for (let i = 0; i < k; i++) {
|
|
12635
12633
|
const start = i * n;
|
|
@@ -12752,7 +12750,7 @@ declare function transpose<T, N extends number, K extends number>(
|
|
|
12752
12750
|
*/
|
|
12753
12751
|
declare function chunkingFunction(input: BytesBlob): FixedSizeArray<BytesBlob, N_CHUNKS_TOTAL> {
|
|
12754
12752
|
const k = Math.floor(input.length / PIECE_SIZE);
|
|
12755
|
-
check
|
|
12753
|
+
check`${k * PIECE_SIZE === input.length} Input length ${input.length} is not divisible by ${PIECE_SIZE}`;
|
|
12756
12754
|
|
|
12757
12755
|
// we get a `k` pieces.
|
|
12758
12756
|
const pieces = unzip<PIECE_SIZE, typeof k>(input, PIECE_SIZE, k);
|
|
@@ -12775,10 +12773,10 @@ declare function shardsToChunks(spec: ChainSpec, shards: PerValidator<BytesBlob>
|
|
|
12775
12773
|
const result: [number, BytesBlob][][] = [];
|
|
12776
12774
|
|
|
12777
12775
|
const shardSize = shards[0].length;
|
|
12778
|
-
check
|
|
12779
|
-
shards.every((s) => s.length === shardSize)
|
|
12780
|
-
|
|
12781
|
-
|
|
12776
|
+
check`
|
|
12777
|
+
${shards.every((s) => s.length === shardSize)}
|
|
12778
|
+
Each shard must be the same length!
|
|
12779
|
+
`;
|
|
12782
12780
|
|
|
12783
12781
|
const totalData = shards.map((s) => s.length).reduce((sum, sLength) => sum + sLength, 0);
|
|
12784
12782
|
const chunkSize = Math.floor(totalData / N_CHUNKS_TOTAL);
|
|
@@ -12841,6 +12839,7 @@ type index$9_N_CHUNKS_TOTAL = N_CHUNKS_TOTAL;
|
|
|
12841
12839
|
type index$9_PIECE_SIZE = PIECE_SIZE;
|
|
12842
12840
|
declare const index$9_POINT_ALIGNMENT: typeof POINT_ALIGNMENT;
|
|
12843
12841
|
type index$9_POINT_LENGTH = POINT_LENGTH;
|
|
12842
|
+
declare const index$9_checkConsistency: typeof checkConsistency;
|
|
12844
12843
|
declare const index$9_chunkingFunction: typeof chunkingFunction;
|
|
12845
12844
|
declare const index$9_chunksToShards: typeof chunksToShards;
|
|
12846
12845
|
declare const index$9_decodeData: typeof decodeData;
|
|
@@ -12856,7 +12855,7 @@ declare const index$9_split: typeof split;
|
|
|
12856
12855
|
declare const index$9_transpose: typeof transpose;
|
|
12857
12856
|
declare const index$9_unzip: typeof unzip;
|
|
12858
12857
|
declare namespace index$9 {
|
|
12859
|
-
export { index$9_HALF_POINT_SIZE as HALF_POINT_SIZE, index$9_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$9_POINT_ALIGNMENT as POINT_ALIGNMENT, index$9_chunkingFunction as chunkingFunction, index$9_chunksToShards as chunksToShards, index$9_decodeData as decodeData, index$9_decodeDataAndTrim as decodeDataAndTrim, index$9_decodePiece as decodePiece, index$9_encodePoints as encodePoints, index$9_initEc as initEc, index$9_join as join, index$9_lace as lace, index$9_padAndEncodeData as padAndEncodeData, index$9_shardsToChunks as shardsToChunks, index$9_split as split, index$9_transpose as transpose, index$9_unzip as unzip };
|
|
12858
|
+
export { index$9_HALF_POINT_SIZE as HALF_POINT_SIZE, index$9_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$9_POINT_ALIGNMENT as POINT_ALIGNMENT, index$9_checkConsistency as checkConsistency, index$9_chunkingFunction as chunkingFunction, index$9_chunksToShards as chunksToShards, index$9_decodeData as decodeData, index$9_decodeDataAndTrim as decodeDataAndTrim, index$9_decodePiece as decodePiece, index$9_encodePoints as encodePoints, index$9_initEc as initEc, index$9_join as join, index$9_lace as lace, index$9_padAndEncodeData as padAndEncodeData, index$9_shardsToChunks as shardsToChunks, index$9_split as split, index$9_transpose as transpose, index$9_unzip as unzip };
|
|
12860
12859
|
export type { index$9_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$9_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$9_PIECE_SIZE as PIECE_SIZE, index$9_POINT_LENGTH as POINT_LENGTH };
|
|
12861
12860
|
}
|
|
12862
12861
|
|
|
@@ -13228,7 +13227,7 @@ declare class Mask {
|
|
|
13228
13227
|
}
|
|
13229
13228
|
|
|
13230
13229
|
getNoOfBytesToNextInstruction(index: number) {
|
|
13231
|
-
check
|
|
13230
|
+
check`${index >= 0} index (${index}) cannot be a negative number`;
|
|
13232
13231
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
13233
13232
|
}
|
|
13234
13233
|
|
|
@@ -13378,13 +13377,13 @@ declare class Registers {
|
|
|
13378
13377
|
private asUnsigned: BigUint64Array;
|
|
13379
13378
|
|
|
13380
13379
|
constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
13381
|
-
check
|
|
13380
|
+
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13382
13381
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
13383
13382
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
13384
13383
|
}
|
|
13385
13384
|
|
|
13386
13385
|
static fromBytes(bytes: Uint8Array) {
|
|
13387
|
-
check
|
|
13386
|
+
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13388
13387
|
return new Registers(bytes);
|
|
13389
13388
|
}
|
|
13390
13389
|
|
|
@@ -14270,13 +14269,17 @@ declare class InstructionResult {
|
|
|
14270
14269
|
|
|
14271
14270
|
type MemoryIndex = Opaque<number, "memory index">;
|
|
14272
14271
|
|
|
14273
|
-
declare const tryAsMemoryIndex = (index: number): MemoryIndex =>
|
|
14274
|
-
|
|
14272
|
+
declare const tryAsMemoryIndex = (index: number): MemoryIndex => {
|
|
14273
|
+
check`${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
|
|
14274
|
+
return asOpaqueType(index);
|
|
14275
|
+
};
|
|
14275
14276
|
|
|
14276
14277
|
type SbrkIndex = Opaque<number, "sbrk index">;
|
|
14277
14278
|
|
|
14278
|
-
declare const tryAsSbrkIndex = (index: number): SbrkIndex =>
|
|
14279
|
-
|
|
14279
|
+
declare const tryAsSbrkIndex = (index: number): SbrkIndex => {
|
|
14280
|
+
check`${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
|
|
14281
|
+
return asOpaqueType(index);
|
|
14282
|
+
};
|
|
14280
14283
|
|
|
14281
14284
|
type PageIndex = Opaque<number, "memory page index">;
|
|
14282
14285
|
type PageNumber = Opaque<number, "memory page number">;
|
|
@@ -14645,10 +14648,10 @@ declare class MemoryBuilder {
|
|
|
14645
14648
|
*/
|
|
14646
14649
|
setReadablePages(start: MemoryIndex, end: MemoryIndex, data: Uint8Array = new Uint8Array()) {
|
|
14647
14650
|
this.ensureNotFinalized();
|
|
14648
|
-
check
|
|
14649
|
-
check
|
|
14650
|
-
check
|
|
14651
|
-
check
|
|
14651
|
+
check`${start < end} end has to be bigger than start`;
|
|
14652
|
+
check`${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
|
|
14653
|
+
check`${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
|
|
14654
|
+
check`${data.length <= end - start} the initial data is longer than address range`;
|
|
14652
14655
|
|
|
14653
14656
|
const length = end - start;
|
|
14654
14657
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14679,10 +14682,10 @@ declare class MemoryBuilder {
|
|
|
14679
14682
|
*/
|
|
14680
14683
|
setWriteablePages(start: MemoryIndex, end: MemoryIndex, data: Uint8Array = new Uint8Array()) {
|
|
14681
14684
|
this.ensureNotFinalized();
|
|
14682
|
-
check
|
|
14683
|
-
check
|
|
14684
|
-
check
|
|
14685
|
-
check
|
|
14685
|
+
check`${start < end} end has to be bigger than start`;
|
|
14686
|
+
check`${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
|
|
14687
|
+
check`${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
|
|
14688
|
+
check`${data.length <= end - start} the initial data is longer than address range`;
|
|
14686
14689
|
|
|
14687
14690
|
const length = end - start;
|
|
14688
14691
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14710,7 +14713,7 @@ declare class MemoryBuilder {
|
|
|
14710
14713
|
this.ensureNotFinalized();
|
|
14711
14714
|
const pageOffset = start % PAGE_SIZE;
|
|
14712
14715
|
const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
|
|
14713
|
-
check
|
|
14716
|
+
check`${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
|
|
14714
14717
|
|
|
14715
14718
|
const length = data.length;
|
|
14716
14719
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14731,10 +14734,10 @@ declare class MemoryBuilder {
|
|
|
14731
14734
|
}
|
|
14732
14735
|
|
|
14733
14736
|
finalize(startHeapIndex: MemoryIndex, endHeapIndex: SbrkIndex): Memory {
|
|
14734
|
-
check
|
|
14735
|
-
startHeapIndex <= endHeapIndex
|
|
14736
|
-
|
|
14737
|
-
|
|
14737
|
+
check`
|
|
14738
|
+
${startHeapIndex <= endHeapIndex}
|
|
14739
|
+
startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
|
|
14740
|
+
`;
|
|
14738
14741
|
this.ensureNotFinalized();
|
|
14739
14742
|
|
|
14740
14743
|
const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
|
|
@@ -15067,10 +15070,10 @@ declare class JumpTable {
|
|
|
15067
15070
|
private indices: Uint32Array;
|
|
15068
15071
|
|
|
15069
15072
|
constructor(itemByteLength: number, bytes: Uint8Array) {
|
|
15070
|
-
check
|
|
15071
|
-
itemByteLength === 0 || bytes.length % itemByteLength === 0
|
|
15072
|
-
|
|
15073
|
-
|
|
15073
|
+
check`
|
|
15074
|
+
${itemByteLength === 0 || bytes.length % itemByteLength === 0}
|
|
15075
|
+
Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
|
|
15076
|
+
`;
|
|
15074
15077
|
|
|
15075
15078
|
const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
|
|
15076
15079
|
|
|
@@ -17278,8 +17281,8 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
17278
17281
|
bytes: bigint,
|
|
17279
17282
|
serviceInfo: ServiceAccountInfo,
|
|
17280
17283
|
): Result$2<OK, InsufficientFundsError> {
|
|
17281
|
-
check
|
|
17282
|
-
check
|
|
17284
|
+
check`${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
|
|
17285
|
+
check`${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
|
|
17283
17286
|
|
|
17284
17287
|
const overflowItems = !isU32(items);
|
|
17285
17288
|
const overflowBytes = !isU64(bytes);
|
|
@@ -17484,7 +17487,7 @@ declare class HostCallsManager {
|
|
|
17484
17487
|
this.missing = missing;
|
|
17485
17488
|
|
|
17486
17489
|
for (const handler of handlers) {
|
|
17487
|
-
check
|
|
17490
|
+
check`${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
|
|
17488
17491
|
this.hostCalls.set(handler.index, handler);
|
|
17489
17492
|
}
|
|
17490
17493
|
}
|
|
@@ -17556,10 +17559,10 @@ declare class ReturnValue {
|
|
|
17556
17559
|
public status: Status | null,
|
|
17557
17560
|
public memorySlice: Uint8Array | null,
|
|
17558
17561
|
) {
|
|
17559
|
-
check
|
|
17560
|
-
(status === null && memorySlice !== null) || (status !== null && memorySlice === null)
|
|
17561
|
-
|
|
17562
|
-
|
|
17562
|
+
check`
|
|
17563
|
+
${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
|
|
17564
|
+
'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
|
|
17565
|
+
`;
|
|
17563
17566
|
}
|
|
17564
17567
|
|
|
17565
17568
|
static fromStatus(consumedGas: Gas, status: Status) {
|
|
@@ -17617,10 +17620,10 @@ declare class HostCalls {
|
|
|
17617
17620
|
if (status !== Status.HOST) {
|
|
17618
17621
|
return this.getReturnValue(status, pvmInstance);
|
|
17619
17622
|
}
|
|
17620
|
-
check
|
|
17621
|
-
pvmInstance.getExitParam() !== null
|
|
17622
|
-
"We know that the exit param is not null, because the status is
|
|
17623
|
-
|
|
17623
|
+
check`
|
|
17624
|
+
${pvmInstance.getExitParam() !== null}
|
|
17625
|
+
"We know that the exit param is not null, because the status is 'Status.HOST'
|
|
17626
|
+
`;
|
|
17624
17627
|
const hostCallIndex = pvmInstance.getExitParam() ?? -1;
|
|
17625
17628
|
const gas = pvmInstance.getGasCounter();
|
|
17626
17629
|
const regs = new HostCallRegisters(pvmInstance.getRegisters());
|
|
@@ -17729,7 +17732,7 @@ declare function getServiceId(serviceId: U64): ServiceId | null {
|
|
|
17729
17732
|
}
|
|
17730
17733
|
|
|
17731
17734
|
declare function writeServiceIdAsLeBytes(serviceId: ServiceId, destination: Uint8Array) {
|
|
17732
|
-
check
|
|
17735
|
+
check`${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
|
|
17733
17736
|
destination.set(u32AsLeBytes(serviceId));
|
|
17734
17737
|
}
|
|
17735
17738
|
|
|
@@ -17805,20 +17808,6 @@ declare namespace index$6 {
|
|
|
17805
17808
|
|
|
17806
17809
|
declare const NO_OF_REGISTERS = 13;
|
|
17807
17810
|
|
|
17808
|
-
/**
|
|
17809
|
-
* program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
|
|
17810
|
-
*
|
|
17811
|
-
* E_n - little endian encoding, n - length
|
|
17812
|
-
* o - initial read only data
|
|
17813
|
-
* w - initial heap
|
|
17814
|
-
* z - heap pages filled with zeros
|
|
17815
|
-
* s - stack size
|
|
17816
|
-
* c - program code
|
|
17817
|
-
*
|
|
17818
|
-
* https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
|
|
17819
|
-
*/
|
|
17820
|
-
type InputLength = Opaque<number, "Number that is lower than 2 ** 24 (Z_I from GP)">;
|
|
17821
|
-
|
|
17822
17811
|
declare class MemorySegment extends WithDebug {
|
|
17823
17812
|
static from({ start, end, data }: Omit<MemorySegment, never>) {
|
|
17824
17813
|
return new MemorySegment(start, end, data);
|
|
@@ -17853,17 +17842,27 @@ declare class SpiProgram extends WithDebug {
|
|
|
17853
17842
|
}
|
|
17854
17843
|
}
|
|
17855
17844
|
|
|
17845
|
+
/**
|
|
17846
|
+
* program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
|
|
17847
|
+
*
|
|
17848
|
+
* E_n - little endian encoding, n - length
|
|
17849
|
+
* o - initial read only data
|
|
17850
|
+
* w - initial heap
|
|
17851
|
+
* z - heap pages filled with zeros
|
|
17852
|
+
* s - stack size
|
|
17853
|
+
* c - program code
|
|
17854
|
+
*
|
|
17855
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
|
|
17856
|
+
*/
|
|
17856
17857
|
declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
17857
17858
|
const decoder = Decoder.fromBlob(program);
|
|
17858
17859
|
const oLength = decoder.u24();
|
|
17859
17860
|
const wLength = decoder.u24();
|
|
17860
|
-
|
|
17861
|
-
|
|
17862
|
-
|
|
17863
|
-
|
|
17864
|
-
|
|
17865
|
-
);
|
|
17866
|
-
const heapLength = ensure<number, InputLength>(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
|
|
17861
|
+
check`${args.length <= DATA_LENGTH} Incorrect arguments length`;
|
|
17862
|
+
check`${oLength <= DATA_LENGTH} Incorrect readonly segment length`;
|
|
17863
|
+
const readOnlyLength = oLength;
|
|
17864
|
+
check`${wLength <= DATA_LENGTH} Incorrect heap segment length`;
|
|
17865
|
+
const heapLength = wLength;
|
|
17867
17866
|
const noOfHeapZerosPages = decoder.u16();
|
|
17868
17867
|
const stackSize = decoder.u24();
|
|
17869
17868
|
const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
|
|
@@ -17880,8 +17879,8 @@ declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
|
17880
17879
|
const stackStart = STACK_SEGMENT - alignToPageSize(stackSize);
|
|
17881
17880
|
const stackEnd = STACK_SEGMENT;
|
|
17882
17881
|
const argsStart = ARGS_SEGMENT;
|
|
17883
|
-
const argsEnd = argsStart + alignToPageSize(
|
|
17884
|
-
const argsZerosEnd = argsEnd + alignToPageSize(
|
|
17882
|
+
const argsEnd = argsStart + alignToPageSize(args.length);
|
|
17883
|
+
const argsZerosEnd = argsEnd + alignToPageSize(args.length);
|
|
17885
17884
|
|
|
17886
17885
|
function nonEmpty(s: MemorySegment | false): s is MemorySegment {
|
|
17887
17886
|
return s !== false;
|
|
@@ -17889,7 +17888,7 @@ declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
|
17889
17888
|
|
|
17890
17889
|
const readableMemory = [
|
|
17891
17890
|
readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
|
|
17892
|
-
|
|
17891
|
+
args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
|
|
17893
17892
|
argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
|
|
17894
17893
|
].filter(nonEmpty);
|
|
17895
17894
|
const writeableMemory = [
|
|
@@ -17921,7 +17920,6 @@ declare function getRegisters(argsLength: number) {
|
|
|
17921
17920
|
return regs;
|
|
17922
17921
|
}
|
|
17923
17922
|
|
|
17924
|
-
type index$5_InputLength = InputLength;
|
|
17925
17923
|
type index$5_MemorySegment = MemorySegment;
|
|
17926
17924
|
declare const index$5_MemorySegment: typeof MemorySegment;
|
|
17927
17925
|
declare const index$5_NO_OF_REGISTERS: typeof NO_OF_REGISTERS;
|
|
@@ -17933,8 +17931,15 @@ declare const index$5_decodeStandardProgram: typeof decodeStandardProgram;
|
|
|
17933
17931
|
declare const index$5_getMemorySegment: typeof getMemorySegment;
|
|
17934
17932
|
declare const index$5_getRegisters: typeof getRegisters;
|
|
17935
17933
|
declare namespace index$5 {
|
|
17936
|
-
export {
|
|
17937
|
-
|
|
17934
|
+
export {
|
|
17935
|
+
index$5_MemorySegment as MemorySegment,
|
|
17936
|
+
index$5_NO_OF_REGISTERS as NO_OF_REGISTERS,
|
|
17937
|
+
index$5_SpiMemory as SpiMemory,
|
|
17938
|
+
index$5_SpiProgram as SpiProgram,
|
|
17939
|
+
index$5_decodeStandardProgram as decodeStandardProgram,
|
|
17940
|
+
index$5_getMemorySegment as getMemorySegment,
|
|
17941
|
+
index$5_getRegisters as getRegisters,
|
|
17942
|
+
};
|
|
17938
17943
|
}
|
|
17939
17944
|
|
|
17940
17945
|
declare class Program {
|
|
@@ -18057,7 +18062,7 @@ declare class DebuggerAdapter {
|
|
|
18057
18062
|
}
|
|
18058
18063
|
|
|
18059
18064
|
nSteps(steps: number): boolean {
|
|
18060
|
-
check
|
|
18065
|
+
check`${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
|
|
18061
18066
|
for (let i = 0; i < steps; i++) {
|
|
18062
18067
|
const isOk = this.nextStep();
|
|
18063
18068
|
if (!isOk) {
|
|
@@ -18194,12 +18199,10 @@ declare const index$3___OPAQUE_TYPE__: typeof __OPAQUE_TYPE__;
|
|
|
18194
18199
|
declare const index$3_asOpaqueType: typeof asOpaqueType;
|
|
18195
18200
|
declare const index$3_assertEmpty: typeof assertEmpty;
|
|
18196
18201
|
declare const index$3_assertNever: typeof assertNever;
|
|
18197
|
-
declare const index$3_cast: typeof cast;
|
|
18198
18202
|
declare const index$3_check: typeof check;
|
|
18199
18203
|
declare const index$3_clampU64ToU32: typeof clampU64ToU32;
|
|
18200
18204
|
declare const index$3_createResults: typeof createResults;
|
|
18201
18205
|
declare const index$3_decodeStandardProgram: typeof decodeStandardProgram;
|
|
18202
|
-
declare const index$3_ensure: typeof ensure;
|
|
18203
18206
|
declare const index$3_extractCodeAndMetadata: typeof extractCodeAndMetadata;
|
|
18204
18207
|
declare const index$3_getServiceId: typeof getServiceId;
|
|
18205
18208
|
declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
|
|
@@ -18218,7 +18221,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
|
|
|
18218
18221
|
declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
|
|
18219
18222
|
declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
|
|
18220
18223
|
declare namespace index$3 {
|
|
18221
|
-
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$
|
|
18224
|
+
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
18222
18225
|
export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
|
|
18223
18226
|
}
|
|
18224
18227
|
|
|
@@ -18231,7 +18234,7 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18231
18234
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18232
18235
|
*/
|
|
18233
18236
|
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18234
|
-
check
|
|
18237
|
+
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18235
18238
|
const n = arr.length;
|
|
18236
18239
|
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18237
18240
|
const result: T[] = new Array<T>(n);
|