@typeberry/lib 0.1.0-b2d0b72 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +136 -228
- package/index.d.ts +222 -225
- package/index.js +136 -228
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -113,21 +113,32 @@ declare function isBrowser() {
|
|
|
113
113
|
* We avoid using `node:assert` to keep compatibility with a browser environment.
|
|
114
114
|
* Note the checks should not have any side effects, since we might decide
|
|
115
115
|
* to remove all of them in a post-processing step.
|
|
116
|
-
*
|
|
117
|
-
* NOTE the function is intended to be used as tagged template string for the performance
|
|
118
|
-
* reasons.
|
|
119
116
|
*/
|
|
120
|
-
declare function check(
|
|
121
|
-
strings: TemplateStringsArray,
|
|
122
|
-
condition: boolean,
|
|
123
|
-
...data: unknown[]
|
|
124
|
-
): asserts condition is true {
|
|
117
|
+
declare function check(condition: boolean, message?: string): asserts condition is true {
|
|
125
118
|
if (!condition) {
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
119
|
+
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
declare function cast<T, U extends T>(_a: T, condition: boolean): _a is U {
|
|
124
|
+
return condition;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Yet another function to perform runtime assertions.
|
|
129
|
+
* This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
|
|
130
|
+
*
|
|
131
|
+
* In the post-processing step all usages of this functions should be replaced with simple casting. An example:
|
|
132
|
+
* const x = checkAndType<number, CheckedNumber>(y);
|
|
133
|
+
* should be replaced with:
|
|
134
|
+
* const x = y as CheckedNumber;
|
|
135
|
+
*/
|
|
136
|
+
declare function ensure<T, U extends T>(a: T, condition: boolean, message?: string): U {
|
|
137
|
+
if (cast<T, U>(a, condition)) {
|
|
138
|
+
return a;
|
|
130
139
|
}
|
|
140
|
+
|
|
141
|
+
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
131
142
|
}
|
|
132
143
|
|
|
133
144
|
/**
|
|
@@ -358,8 +369,8 @@ type Result$2<Ok, Error> = OkResult<Ok> | ErrorResult<Error>;
|
|
|
358
369
|
/** An indication of two possible outcomes returned from a function. */
|
|
359
370
|
declare const Result$2 = {
|
|
360
371
|
/** Create new [`Result`] with `Ok` status. */
|
|
361
|
-
ok: <Ok>(ok: Ok):
|
|
362
|
-
check
|
|
372
|
+
ok: <Ok, Error>(ok: Ok): Result<Ok, Error> => {
|
|
373
|
+
check(ok !== undefined, "`Ok` type cannot be undefined.");
|
|
363
374
|
return {
|
|
364
375
|
isOk: true,
|
|
365
376
|
isError: false,
|
|
@@ -368,8 +379,8 @@ declare const Result$2 = {
|
|
|
368
379
|
},
|
|
369
380
|
|
|
370
381
|
/** Create new [`Result`] with `Error` status. */
|
|
371
|
-
error: <Error>(error: Error, details = ""):
|
|
372
|
-
check
|
|
382
|
+
error: <Ok, Error>(error: Error, details = ""): Result<Ok, Error> => {
|
|
383
|
+
check(error !== undefined, "`Error` type cannot be undefined.");
|
|
373
384
|
return {
|
|
374
385
|
isOk: false,
|
|
375
386
|
isError: true,
|
|
@@ -710,8 +721,10 @@ declare const index$s_asOpaqueType: typeof asOpaqueType;
|
|
|
710
721
|
declare const index$s_assertEmpty: typeof assertEmpty;
|
|
711
722
|
declare const index$s_assertNever: typeof assertNever;
|
|
712
723
|
declare const index$s_callCompareFunction: typeof callCompareFunction;
|
|
724
|
+
declare const index$s_cast: typeof cast;
|
|
713
725
|
declare const index$s_check: typeof check;
|
|
714
726
|
declare const index$s_deepEqual: typeof deepEqual;
|
|
727
|
+
declare const index$s_ensure: typeof ensure;
|
|
715
728
|
declare const index$s_env: typeof env;
|
|
716
729
|
declare const index$s_getAllKeysSorted: typeof getAllKeysSorted;
|
|
717
730
|
declare const index$s_inspect: typeof inspect;
|
|
@@ -727,7 +740,7 @@ declare const index$s_resultToString: typeof resultToString;
|
|
|
727
740
|
declare const index$s_seeThrough: typeof seeThrough;
|
|
728
741
|
declare const index$s_trimStack: typeof trimStack;
|
|
729
742
|
declare namespace index$s {
|
|
730
|
-
export { index$s_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$s_CURRENT_SUITE as CURRENT_SUITE, index$s_CURRENT_VERSION as CURRENT_VERSION, index$s_Compatibility as Compatibility, index$s_DEFAULT_SUITE as DEFAULT_SUITE, index$s_DEFAULT_VERSION as DEFAULT_VERSION, index$s_ErrorsCollector as ErrorsCollector, index$s_GpVersion as GpVersion, Result$2 as Result, index$s_RichTaggedError as RichTaggedError, index$s_TEST_COMPARE_USING as TEST_COMPARE_USING, index$s_TestSuite as TestSuite, index$s_WithDebug as WithDebug, index$s___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$s_asOpaqueType as asOpaqueType, index$s_assertEmpty as assertEmpty, index$s_assertNever as assertNever, index$s_callCompareFunction as callCompareFunction, index$s_check as check, index$s_deepEqual as deepEqual, index$s_env as env, index$s_getAllKeysSorted as getAllKeysSorted, index$s_inspect as inspect, index$s_isBrowser as isBrowser, index$s_isResult as isResult, index$s_isTaggedError as isTaggedError, index$s_maybeTaggedErrorToString as maybeTaggedErrorToString, index$s_measure as measure, index$s_oomWarningPrinted as oomWarningPrinted, index$s_parseCurrentSuite as parseCurrentSuite, index$s_parseCurrentVersion as parseCurrentVersion, index$s_resultToString as resultToString, index$s_seeThrough as seeThrough, index$s_trimStack as trimStack };
|
|
743
|
+
export { index$s_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$s_CURRENT_SUITE as CURRENT_SUITE, index$s_CURRENT_VERSION as CURRENT_VERSION, index$s_Compatibility as Compatibility, index$s_DEFAULT_SUITE as DEFAULT_SUITE, index$s_DEFAULT_VERSION as DEFAULT_VERSION, index$s_ErrorsCollector as ErrorsCollector, index$s_GpVersion as GpVersion, Result$2 as Result, index$s_RichTaggedError as RichTaggedError, index$s_TEST_COMPARE_USING as TEST_COMPARE_USING, index$s_TestSuite as TestSuite, index$s_WithDebug as WithDebug, index$s___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$s_asOpaqueType as asOpaqueType, index$s_assertEmpty as assertEmpty, index$s_assertNever as assertNever, index$s_callCompareFunction as callCompareFunction, index$s_cast as cast, index$s_check as check, index$s_deepEqual as deepEqual, index$s_ensure as ensure, index$s_env as env, index$s_getAllKeysSorted as getAllKeysSorted, index$s_inspect as inspect, index$s_isBrowser as isBrowser, index$s_isResult as isResult, index$s_isTaggedError as isTaggedError, index$s_maybeTaggedErrorToString as maybeTaggedErrorToString, index$s_measure as measure, index$s_oomWarningPrinted as oomWarningPrinted, index$s_parseCurrentSuite as parseCurrentSuite, index$s_parseCurrentVersion as parseCurrentVersion, index$s_resultToString as resultToString, index$s_seeThrough as seeThrough, index$s_trimStack as trimStack };
|
|
731
744
|
export type { index$s_DeepEqualOptions as DeepEqualOptions, index$s_EnumMapping as EnumMapping, index$s_ErrorResult as ErrorResult, index$s_OK as OK, index$s_OkResult as OkResult, index$s_Opaque as Opaque, index$s_StringLiteral as StringLiteral, index$s_TaggedError as TaggedError, index$s_TokenOf as TokenOf, index$s_Uninstantiable as Uninstantiable, index$s_WithOpaque as WithOpaque };
|
|
732
745
|
}
|
|
733
746
|
|
|
@@ -912,7 +925,7 @@ declare class BytesBlob {
|
|
|
912
925
|
|
|
913
926
|
/** Create a new [`BytesBlob`] from an array of bytes. */
|
|
914
927
|
static blobFromNumbers(v: number[]): BytesBlob {
|
|
915
|
-
check
|
|
928
|
+
check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
|
|
916
929
|
const arr = new Uint8Array(v);
|
|
917
930
|
return new BytesBlob(arr);
|
|
918
931
|
}
|
|
@@ -962,7 +975,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
962
975
|
|
|
963
976
|
private constructor(raw: Uint8Array, len: T) {
|
|
964
977
|
super(raw);
|
|
965
|
-
check
|
|
978
|
+
check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
|
|
966
979
|
this.length = len;
|
|
967
980
|
}
|
|
968
981
|
|
|
@@ -973,7 +986,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
973
986
|
|
|
974
987
|
/** Create new [`Bytes<X>`] given an array of bytes and it's length. */
|
|
975
988
|
static fromNumbers<X extends number>(v: number[], len: X): Bytes<X> {
|
|
976
|
-
check
|
|
989
|
+
check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
|
|
977
990
|
const x = new Uint8Array(v);
|
|
978
991
|
return new Bytes(x, len);
|
|
979
992
|
}
|
|
@@ -986,7 +999,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
986
999
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
987
1000
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
988
1001
|
static fill<X extends number>(len: X, input: number): Bytes<X> {
|
|
989
|
-
check
|
|
1002
|
+
check((input & 0xff) === input, "Input has to be a byte.");
|
|
990
1003
|
const bytes = Bytes.zero(len);
|
|
991
1004
|
bytes.raw.fill(input, 0, len);
|
|
992
1005
|
return bytes;
|
|
@@ -1014,7 +1027,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1014
1027
|
|
|
1015
1028
|
/** Compare the sequence to another one. */
|
|
1016
1029
|
isEqualTo(other: Bytes<T>): boolean {
|
|
1017
|
-
check
|
|
1030
|
+
check(this.length === other.length, "Comparing incorrectly typed bytes!");
|
|
1018
1031
|
return u8ArraySameLengthEqual(this.raw, other.raw);
|
|
1019
1032
|
}
|
|
1020
1033
|
|
|
@@ -1025,7 +1038,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1025
1038
|
}
|
|
1026
1039
|
|
|
1027
1040
|
declare function byteFromString(s: string): number {
|
|
1028
|
-
check
|
|
1041
|
+
check(s.length === 2, "Two-character string expected");
|
|
1029
1042
|
const a = numberFromCharCode(s.charCodeAt(0));
|
|
1030
1043
|
const b = numberFromCharCode(s.charCodeAt(1));
|
|
1031
1044
|
return (a << 4) | b;
|
|
@@ -1111,10 +1124,10 @@ declare class BitVec {
|
|
|
1111
1124
|
private readonly data: Uint8Array,
|
|
1112
1125
|
public readonly bitLength: number,
|
|
1113
1126
|
) {
|
|
1114
|
-
check
|
|
1115
|
-
|
|
1116
|
-
Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}
|
|
1117
|
-
|
|
1127
|
+
check(
|
|
1128
|
+
data.length * 8 >= bitLength,
|
|
1129
|
+
`Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`,
|
|
1130
|
+
);
|
|
1118
1131
|
|
|
1119
1132
|
this.byteLength = Math.ceil(bitLength / 8);
|
|
1120
1133
|
}
|
|
@@ -1126,10 +1139,10 @@ declare class BitVec {
|
|
|
1126
1139
|
|
|
1127
1140
|
/** Perform OR operation on all bits in place. */
|
|
1128
1141
|
sumWith(other: BitVec) {
|
|
1129
|
-
check
|
|
1130
|
-
|
|
1131
|
-
Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
|
|
1132
|
-
|
|
1142
|
+
check(
|
|
1143
|
+
other.bitLength === this.bitLength,
|
|
1144
|
+
`Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`,
|
|
1145
|
+
);
|
|
1133
1146
|
|
|
1134
1147
|
const otherRaw = other.raw;
|
|
1135
1148
|
for (let i = 0; i < this.byteLength; i++) {
|
|
@@ -1141,7 +1154,7 @@ declare class BitVec {
|
|
|
1141
1154
|
* Set the bit at index `idx` to value `val`.
|
|
1142
1155
|
*/
|
|
1143
1156
|
setBit(idx: number, val: boolean) {
|
|
1144
|
-
check
|
|
1157
|
+
check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
|
|
1145
1158
|
|
|
1146
1159
|
const byteIndex = Math.floor(idx / 8);
|
|
1147
1160
|
const bitIndexInByte = idx % 8;
|
|
@@ -1157,7 +1170,7 @@ declare class BitVec {
|
|
|
1157
1170
|
* Return `true` if the bit at index `idx` is set.
|
|
1158
1171
|
*/
|
|
1159
1172
|
isSet(idx: number): boolean {
|
|
1160
|
-
check
|
|
1173
|
+
check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
|
|
1161
1174
|
const byteIndex = Math.floor(idx / 8);
|
|
1162
1175
|
const bitIndexInByte = idx % 8;
|
|
1163
1176
|
const mask = 1 << bitIndexInByte;
|
|
@@ -1228,7 +1241,7 @@ declare const __REPRESENTATION_BYTES__: "REPRESENTATION_BYTES";
|
|
|
1228
1241
|
type WithBytesRepresentation<Bytes extends number> = {
|
|
1229
1242
|
readonly [__REPRESENTATION_BYTES__]: Bytes;
|
|
1230
1243
|
};
|
|
1231
|
-
declare const
|
|
1244
|
+
declare const asWithBytesRepresentation = <T, N extends number>(v: T): T & WithBytesRepresentation<N> =>
|
|
1232
1245
|
v as T & WithBytesRepresentation<N>;
|
|
1233
1246
|
|
|
1234
1247
|
type FixedSizeNumber<Bytes extends number> = number & WithBytesRepresentation<Bytes>;
|
|
@@ -1247,27 +1260,20 @@ type U64 = bigint & WithBytesRepresentation<8>;
|
|
|
1247
1260
|
declare const MAX_VALUE_U64 = 0xffff_ffff_ffff_ffffn;
|
|
1248
1261
|
|
|
1249
1262
|
/** Attempt to cast an input number into U8. */
|
|
1250
|
-
declare const tryAsU8 = (v: number): U8 =>
|
|
1251
|
-
|
|
1252
|
-
return asTypedNumber(v);
|
|
1253
|
-
};
|
|
1263
|
+
declare const tryAsU8 = (v: number): U8 =>
|
|
1264
|
+
ensure<number, U8>(v, isU8(v), `input must have one-byte representation, got ${v}`);
|
|
1254
1265
|
/** Check if given number is a valid U8 number. */
|
|
1255
1266
|
declare const isU8 = (v: number): v is U8 => (v & MAX_VALUE_U8) === v;
|
|
1256
1267
|
|
|
1257
1268
|
/** Attempt to cast an input number into U16. */
|
|
1258
|
-
declare const tryAsU16 = (v: number): U16 =>
|
|
1259
|
-
|
|
1260
|
-
return asTypedNumber(v);
|
|
1261
|
-
};
|
|
1262
|
-
|
|
1269
|
+
declare const tryAsU16 = (v: number): U16 =>
|
|
1270
|
+
ensure<number, U16>(v, isU16(v), `input must have two-byte representation, got ${v}`);
|
|
1263
1271
|
/** Check if given number is a valid U16 number. */
|
|
1264
1272
|
declare const isU16 = (v: number): v is U16 => (v & MAX_VALUE_U16) === v;
|
|
1265
1273
|
|
|
1266
1274
|
/** Attempt to cast an input number into U32. */
|
|
1267
|
-
declare const tryAsU32 = (v: number): U32 =>
|
|
1268
|
-
|
|
1269
|
-
return asTypedNumber(v);
|
|
1270
|
-
};
|
|
1275
|
+
declare const tryAsU32 = (v: number): U32 =>
|
|
1276
|
+
ensure<number, U32>(v, isU32(v), `input must have four-byte representation, got ${v}`);
|
|
1271
1277
|
|
|
1272
1278
|
/** Check if given number is a valid U32 number. */
|
|
1273
1279
|
declare const isU32 = (v: number): v is U32 => (v & MAX_VALUE_U32) >>> 0 === v;
|
|
@@ -1275,28 +1281,25 @@ declare const isU32 = (v: number): v is U32 => (v & MAX_VALUE_U32) >>> 0 === v;
|
|
|
1275
1281
|
/** Attempt to cast an input number into U64. */
|
|
1276
1282
|
declare const tryAsU64 = (x: number | bigint): U64 => {
|
|
1277
1283
|
const v = BigInt(x);
|
|
1278
|
-
|
|
1279
|
-
return asTypedNumber(v);
|
|
1284
|
+
return ensure<bigint, U64>(v, isU64(v), `input must have eight-byte representation, got ${x}`);
|
|
1280
1285
|
};
|
|
1281
|
-
|
|
1282
1286
|
/** Check if given number is a valid U64 number. */
|
|
1283
1287
|
declare const isU64 = (v: bigint): v is U64 => (v & MAX_VALUE_U64) === v;
|
|
1284
1288
|
|
|
1285
1289
|
/** Collate two U32 parts into one U64. */
|
|
1286
1290
|
declare const u64FromParts = ({ lower, upper }: { lower: U32; upper: U32 }): U64 => {
|
|
1287
1291
|
const val = (BigInt(upper) << 32n) + BigInt(lower);
|
|
1288
|
-
return
|
|
1292
|
+
return asWithBytesRepresentation(val);
|
|
1289
1293
|
};
|
|
1290
1294
|
|
|
1291
1295
|
/** Split U64 into lower & upper parts. */
|
|
1292
1296
|
declare const u64IntoParts = (v: U64): { lower: U32; upper: U32 } => {
|
|
1293
|
-
|
|
1294
|
-
const
|
|
1295
|
-
const upper = Number(v >> 32n);
|
|
1297
|
+
const lower = v & (2n ** 32n - 1n);
|
|
1298
|
+
const upper = v >> 32n;
|
|
1296
1299
|
|
|
1297
1300
|
return {
|
|
1298
|
-
lower:
|
|
1299
|
-
upper:
|
|
1301
|
+
lower: asWithBytesRepresentation(Number(lower)),
|
|
1302
|
+
upper: asWithBytesRepresentation(Number(upper)),
|
|
1300
1303
|
};
|
|
1301
1304
|
};
|
|
1302
1305
|
|
|
@@ -1353,8 +1356,10 @@ declare function u32AsLeBytes(value: U32): Uint8Array {
|
|
|
1353
1356
|
* Interpret 4-byte `Uint8Array` as U32 written as little endian.
|
|
1354
1357
|
*/
|
|
1355
1358
|
declare function leBytesAsU32(uint8Array: Uint8Array): U32 {
|
|
1356
|
-
check
|
|
1357
|
-
return
|
|
1359
|
+
check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
|
|
1360
|
+
return asWithBytesRepresentation(
|
|
1361
|
+
uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24),
|
|
1362
|
+
);
|
|
1358
1363
|
}
|
|
1359
1364
|
|
|
1360
1365
|
/** Get the smallest value between U64 a and values given as input parameters. */
|
|
@@ -1374,7 +1379,7 @@ type index$p_U64 = U64;
|
|
|
1374
1379
|
type index$p_U8 = U8;
|
|
1375
1380
|
type index$p_WithBytesRepresentation<Bytes extends number> = WithBytesRepresentation<Bytes>;
|
|
1376
1381
|
declare const index$p___REPRESENTATION_BYTES__: typeof __REPRESENTATION_BYTES__;
|
|
1377
|
-
declare const index$
|
|
1382
|
+
declare const index$p_asWithBytesRepresentation: typeof asWithBytesRepresentation;
|
|
1378
1383
|
declare const index$p_isU16: typeof isU16;
|
|
1379
1384
|
declare const index$p_isU32: typeof isU32;
|
|
1380
1385
|
declare const index$p_isU64: typeof isU64;
|
|
@@ -1392,7 +1397,7 @@ declare const index$p_u32AsLeBytes: typeof u32AsLeBytes;
|
|
|
1392
1397
|
declare const index$p_u64FromParts: typeof u64FromParts;
|
|
1393
1398
|
declare const index$p_u64IntoParts: typeof u64IntoParts;
|
|
1394
1399
|
declare namespace index$p {
|
|
1395
|
-
export { index$p_MAX_VALUE_U16 as MAX_VALUE_U16, index$p_MAX_VALUE_U32 as MAX_VALUE_U32, index$p_MAX_VALUE_U64 as MAX_VALUE_U64, index$p_MAX_VALUE_U8 as MAX_VALUE_U8, index$p___REPRESENTATION_BYTES__ as __REPRESENTATION_BYTES__, index$
|
|
1400
|
+
export { index$p_MAX_VALUE_U16 as MAX_VALUE_U16, index$p_MAX_VALUE_U32 as MAX_VALUE_U32, index$p_MAX_VALUE_U64 as MAX_VALUE_U64, index$p_MAX_VALUE_U8 as MAX_VALUE_U8, index$p___REPRESENTATION_BYTES__ as __REPRESENTATION_BYTES__, index$p_asWithBytesRepresentation as asWithBytesRepresentation, index$p_isU16 as isU16, index$p_isU32 as isU32, index$p_isU64 as isU64, index$p_isU8 as isU8, index$p_leBytesAsU32 as leBytesAsU32, index$p_maxU64 as maxU64, index$p_minU64 as minU64, index$p_sumU32 as sumU32, index$p_sumU64 as sumU64, index$p_tryAsU16 as tryAsU16, index$p_tryAsU32 as tryAsU32, index$p_tryAsU64 as tryAsU64, index$p_tryAsU8 as tryAsU8, index$p_u32AsLeBytes as u32AsLeBytes, index$p_u64FromParts as u64FromParts, index$p_u64IntoParts as u64IntoParts };
|
|
1396
1401
|
export type { index$p_FixedSizeNumber as FixedSizeNumber, Result$1 as Result, index$p_U16 as U16, index$p_U32 as U32, index$p_U64 as U64, index$p_U8 as U8, index$p_WithBytesRepresentation as WithBytesRepresentation };
|
|
1397
1402
|
}
|
|
1398
1403
|
|
|
@@ -1731,7 +1736,7 @@ declare class Decoder {
|
|
|
1731
1736
|
if (this.offset < newOffset) {
|
|
1732
1737
|
this.skip(newOffset - this.offset);
|
|
1733
1738
|
} else {
|
|
1734
|
-
check
|
|
1739
|
+
check(newOffset >= 0, "The offset has to be positive");
|
|
1735
1740
|
this.offset = newOffset;
|
|
1736
1741
|
}
|
|
1737
1742
|
}
|
|
@@ -1763,7 +1768,7 @@ declare class Decoder {
|
|
|
1763
1768
|
}
|
|
1764
1769
|
|
|
1765
1770
|
private ensureHasBytes(bytes: number) {
|
|
1766
|
-
check
|
|
1771
|
+
check(bytes >= 0, "Negative number of bytes given.");
|
|
1767
1772
|
if (this.offset + bytes > this.source.length) {
|
|
1768
1773
|
throw new Error(
|
|
1769
1774
|
`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`,
|
|
@@ -1774,7 +1779,7 @@ declare class Decoder {
|
|
|
1774
1779
|
|
|
1775
1780
|
declare const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
|
|
1776
1781
|
declare function decodeVariableLengthExtraBytes(firstByte: number) {
|
|
1777
|
-
check
|
|
1782
|
+
check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
|
|
1778
1783
|
for (let i = 0; i < MASKS.length; i++) {
|
|
1779
1784
|
if (firstByte >= MASKS[i]) {
|
|
1780
1785
|
return 8 - i;
|
|
@@ -1793,7 +1798,7 @@ type SizeHint = {
|
|
|
1793
1798
|
};
|
|
1794
1799
|
|
|
1795
1800
|
declare function tryAsExactBytes(a: SizeHint): number {
|
|
1796
|
-
check
|
|
1801
|
+
check(a.isExact, "The value is not exact size estimation!");
|
|
1797
1802
|
return a.bytes;
|
|
1798
1803
|
}
|
|
1799
1804
|
|
|
@@ -1952,8 +1957,8 @@ declare class Encoder {
|
|
|
1952
1957
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
1953
1958
|
// So it does not matter if the argument is a negative value,
|
|
1954
1959
|
// OR if someone just gave us two-complement already.
|
|
1955
|
-
check
|
|
1956
|
-
check
|
|
1960
|
+
check(num < maxNum, "Only for numbers up to 2**64 - 1");
|
|
1961
|
+
check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
|
|
1957
1962
|
this.ensureBigEnough(8);
|
|
1958
1963
|
|
|
1959
1964
|
this.dataView.setBigInt64(this.offset, num, true);
|
|
@@ -2023,8 +2028,8 @@ declare class Encoder {
|
|
|
2023
2028
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
2024
2029
|
// So it does not matter if the argument is a negative value,
|
|
2025
2030
|
// OR if someone just gave us two-complement already.
|
|
2026
|
-
check
|
|
2027
|
-
check
|
|
2031
|
+
check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
|
|
2032
|
+
check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
|
|
2028
2033
|
|
|
2029
2034
|
this.ensureBigEnough(bytesToEncode);
|
|
2030
2035
|
}
|
|
@@ -2037,8 +2042,8 @@ declare class Encoder {
|
|
|
2037
2042
|
* https://graypaper.fluffylabs.dev/#/579bd12/365202365202
|
|
2038
2043
|
*/
|
|
2039
2044
|
varU32(num: U32) {
|
|
2040
|
-
check
|
|
2041
|
-
check
|
|
2045
|
+
check(num >= 0, "Only for natural numbers.");
|
|
2046
|
+
check(num < 2 ** 32, "Only for numbers up to 2**32");
|
|
2042
2047
|
this.varU64(BigInt(num));
|
|
2043
2048
|
}
|
|
2044
2049
|
|
|
@@ -2206,7 +2211,7 @@ declare class Encoder {
|
|
|
2206
2211
|
* https://graypaper.fluffylabs.dev/#/579bd12/374400374400
|
|
2207
2212
|
*/
|
|
2208
2213
|
sequenceVarLen<T>(encode: Encode<T>, elements: readonly T[]) {
|
|
2209
|
-
check
|
|
2214
|
+
check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
|
|
2210
2215
|
this.varU32(tryAsU32(elements.length));
|
|
2211
2216
|
this.sequenceFixLen(encode, elements);
|
|
2212
2217
|
}
|
|
@@ -2229,7 +2234,7 @@ declare class Encoder {
|
|
|
2229
2234
|
* anyway, so if we really should throw we will.
|
|
2230
2235
|
*/
|
|
2231
2236
|
private ensureBigEnough(length: number, options: { silent: boolean } = { silent: false }) {
|
|
2232
|
-
check
|
|
2237
|
+
check(length >= 0, "Negative length given");
|
|
2233
2238
|
|
|
2234
2239
|
const newLength = this.offset + length;
|
|
2235
2240
|
if (newLength > MAX_LENGTH) {
|
|
@@ -2589,12 +2594,13 @@ declare abstract class ObjectView<T> {
|
|
|
2589
2594
|
private decodeUpTo<K extends keyof T>(field: K): ViewField<T[K], unknown> {
|
|
2590
2595
|
const index = this.descriptorsKeys.indexOf(field);
|
|
2591
2596
|
const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
|
|
2592
|
-
check
|
|
2593
|
-
|
|
2594
|
-
Unjustified call to 'decodeUpTo' -
|
|
2597
|
+
check(
|
|
2598
|
+
this.lastDecodedFieldIdx < index,
|
|
2599
|
+
`Unjustified call to 'decodeUpTo' -
|
|
2595
2600
|
the index ($Blobindex}, ${String(field)})
|
|
2596
2601
|
is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
|
|
2597
|
-
|
|
2602
|
+
`,
|
|
2603
|
+
);
|
|
2598
2604
|
|
|
2599
2605
|
let lastItem = this.cache.get(lastField);
|
|
2600
2606
|
const skipper = new Skipper(this.decoder);
|
|
@@ -2617,11 +2623,12 @@ declare abstract class ObjectView<T> {
|
|
|
2617
2623
|
this.lastDecodedFieldIdx = i;
|
|
2618
2624
|
}
|
|
2619
2625
|
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
|
|
2626
|
+
const last: ViewField<T[K], unknown> = ensure(
|
|
2627
|
+
lastItem,
|
|
2628
|
+
lastItem !== undefined,
|
|
2629
|
+
"Last item must be set, since the loop turns at least once.",
|
|
2630
|
+
);
|
|
2631
|
+
return last;
|
|
2625
2632
|
}
|
|
2626
2633
|
}
|
|
2627
2634
|
|
|
@@ -2657,10 +2664,12 @@ declare class SequenceView<T, V = T> {
|
|
|
2657
2664
|
*[Symbol.iterator]() {
|
|
2658
2665
|
for (let i = 0; i < this.length; i++) {
|
|
2659
2666
|
const val = this.get(i);
|
|
2660
|
-
|
|
2661
|
-
|
|
2662
|
-
|
|
2663
|
-
|
|
2667
|
+
const v: ViewField<T, V> = ensure(
|
|
2668
|
+
val,
|
|
2669
|
+
val !== undefined,
|
|
2670
|
+
"We are within 0..this.length so all items are defined.",
|
|
2671
|
+
);
|
|
2672
|
+
yield v;
|
|
2664
2673
|
}
|
|
2665
2674
|
}
|
|
2666
2675
|
|
|
@@ -2710,10 +2719,10 @@ declare class SequenceView<T, V = T> {
|
|
|
2710
2719
|
}
|
|
2711
2720
|
|
|
2712
2721
|
private decodeUpTo(index: number): ViewField<T, V> {
|
|
2713
|
-
check
|
|
2714
|
-
|
|
2715
|
-
Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx})
|
|
2716
|
-
|
|
2722
|
+
check(
|
|
2723
|
+
this.lastDecodedIdx < index,
|
|
2724
|
+
`Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`,
|
|
2725
|
+
);
|
|
2717
2726
|
let lastItem = this.cache.get(this.lastDecodedIdx);
|
|
2718
2727
|
const skipper = new Skipper(this.decoder);
|
|
2719
2728
|
|
|
@@ -2734,10 +2743,12 @@ declare class SequenceView<T, V = T> {
|
|
|
2734
2743
|
this.lastDecodedIdx = i;
|
|
2735
2744
|
}
|
|
2736
2745
|
|
|
2737
|
-
|
|
2738
|
-
|
|
2739
|
-
|
|
2740
|
-
|
|
2746
|
+
const last: ViewField<T, V> = ensure(
|
|
2747
|
+
lastItem,
|
|
2748
|
+
lastItem !== undefined,
|
|
2749
|
+
"Last item must be set, since the loop turns at least once.",
|
|
2750
|
+
);
|
|
2751
|
+
return last;
|
|
2741
2752
|
}
|
|
2742
2753
|
}
|
|
2743
2754
|
|
|
@@ -2763,10 +2774,7 @@ declare const TYPICAL_DICTIONARY_LENGTH = 32;
|
|
|
2763
2774
|
declare function readonlyArray<T, V>(desc: Descriptor<T[], V>): Descriptor<readonly T[], V> {
|
|
2764
2775
|
return desc.convert(
|
|
2765
2776
|
(x) => {
|
|
2766
|
-
check`
|
|
2767
|
-
${Array.isArray(x)}
|
|
2768
|
-
Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
|
|
2769
|
-
`;
|
|
2777
|
+
check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
|
|
2770
2778
|
// NOTE [ToDr] This assumption is incorrect in general, but it's documented
|
|
2771
2779
|
// in the general note. We avoid `.slice()` the array for performance reasons.
|
|
2772
2780
|
return x as T[];
|
|
@@ -3439,8 +3447,6 @@ type KeccakHash = Bytes<HASH_SIZE>;
|
|
|
3439
3447
|
/** Truncated hash. */
|
|
3440
3448
|
type TruncatedHash = Bytes<TRUNCATED_HASH_SIZE>;
|
|
3441
3449
|
|
|
3442
|
-
declare const ZERO_HASH = Bytes.zero(HASH_SIZE);
|
|
3443
|
-
|
|
3444
3450
|
/**
|
|
3445
3451
|
* Container for some object with a hash that is related to this object.
|
|
3446
3452
|
*
|
|
@@ -3489,7 +3495,7 @@ declare class PageAllocator implements HashAllocator {
|
|
|
3489
3495
|
|
|
3490
3496
|
// TODO [ToDr] Benchmark the performance!
|
|
3491
3497
|
constructor(private readonly hashesPerPage: number) {
|
|
3492
|
-
check
|
|
3498
|
+
check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
|
|
3493
3499
|
this.resetPage();
|
|
3494
3500
|
}
|
|
3495
3501
|
|
|
@@ -3649,12 +3655,11 @@ type index$n_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
|
3649
3655
|
declare const index$n_WithHash: typeof WithHash;
|
|
3650
3656
|
type index$n_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3651
3657
|
declare const index$n_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3652
|
-
declare const index$n_ZERO_HASH: typeof ZERO_HASH;
|
|
3653
3658
|
declare const index$n_blake2b: typeof blake2b;
|
|
3654
3659
|
declare const index$n_defaultAllocator: typeof defaultAllocator;
|
|
3655
3660
|
declare const index$n_keccak: typeof keccak;
|
|
3656
3661
|
declare namespace index$n {
|
|
3657
|
-
export { index$n_PageAllocator as PageAllocator, index$n_SimpleAllocator as SimpleAllocator, index$n_WithHash as WithHash, index$n_WithHashAndBytes as WithHashAndBytes, index$
|
|
3662
|
+
export { index$n_PageAllocator as PageAllocator, index$n_SimpleAllocator as SimpleAllocator, index$n_WithHash as WithHash, index$n_WithHashAndBytes as WithHashAndBytes, index$n_blake2b as blake2b, index$n_defaultAllocator as defaultAllocator, index$n_keccak as keccak };
|
|
3658
3663
|
export type { index$n_Blake2bHash as Blake2bHash, index$n_HASH_SIZE as HASH_SIZE, index$n_HashAllocator as HashAllocator, index$n_KeccakHash as KeccakHash, index$n_OpaqueHash as OpaqueHash, index$n_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$n_TruncatedHash as TruncatedHash };
|
|
3659
3664
|
}
|
|
3660
3665
|
|
|
@@ -3871,8 +3876,8 @@ declare class MultiMap<TKeys extends readonly unknown[], TValue> {
|
|
|
3871
3876
|
* if needed.
|
|
3872
3877
|
*/
|
|
3873
3878
|
constructor(keysLength: TKeys["length"], keyMappers?: KeyMappers<TKeys>) {
|
|
3874
|
-
check
|
|
3875
|
-
check
|
|
3879
|
+
check(keysLength > 0, "Keys cannot be empty.");
|
|
3880
|
+
check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
|
|
3876
3881
|
this.data = new Map() as NestedMaps<TKeys, TValue>;
|
|
3877
3882
|
this.keyMappers = keyMappers === undefined ? (Array(keysLength).fill(null) as KeyMappers<TKeys>) : keyMappers;
|
|
3878
3883
|
}
|
|
@@ -3997,7 +4002,7 @@ declare class FixedSizeArray<T, N extends number> extends Array<T> {
|
|
|
3997
4002
|
}
|
|
3998
4003
|
|
|
3999
4004
|
static new<T, N extends number>(data: readonly T[], len: N): FixedSizeArray<T, N> {
|
|
4000
|
-
check
|
|
4005
|
+
check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
|
|
4001
4006
|
|
|
4002
4007
|
const arr = new FixedSizeArray<T, N>(len);
|
|
4003
4008
|
|
|
@@ -4185,7 +4190,7 @@ declare class SortedArray<V> implements ImmutableSortedArray<V> {
|
|
|
4185
4190
|
|
|
4186
4191
|
/** Create a new SortedSet from two sorted collections. */
|
|
4187
4192
|
static fromTwoSortedCollections<V>(first: ImmutableSortedArray<V>, second: ImmutableSortedArray<V>) {
|
|
4188
|
-
check
|
|
4193
|
+
check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
|
|
4189
4194
|
const comparator = first.comparator;
|
|
4190
4195
|
const arr1 = first.array;
|
|
4191
4196
|
const arr1Length = arr1.length;
|
|
@@ -4322,7 +4327,7 @@ declare class SortedSet<V> extends SortedArray<V> implements ImmutableSortedSet<
|
|
|
4322
4327
|
|
|
4323
4328
|
/** Create a new SortedSet from two sorted collections. */
|
|
4324
4329
|
static fromTwoSortedCollections<V>(first: ImmutableSortedArray<V>, second: ImmutableSortedArray<V>) {
|
|
4325
|
-
check
|
|
4330
|
+
check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
|
|
4326
4331
|
const comparator = first.comparator;
|
|
4327
4332
|
|
|
4328
4333
|
if (first.length === 0) {
|
|
@@ -4592,7 +4597,7 @@ type BlsKey = Opaque<Bytes<BLS_KEY_BYTES>, "BlsKey">;
|
|
|
4592
4597
|
declare function publicKey(seed: Uint8Array): BandersnatchKey {
|
|
4593
4598
|
const key = bandersnatch.derive_public_key(seed);
|
|
4594
4599
|
|
|
4595
|
-
check
|
|
4600
|
+
check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
|
|
4596
4601
|
|
|
4597
4602
|
return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
|
|
4598
4603
|
}
|
|
@@ -4698,7 +4703,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4698
4703
|
data.set(signature.raw, offset);
|
|
4699
4704
|
offset += ED25519_SIGNATURE_BYTES;
|
|
4700
4705
|
const messageLength = message.length;
|
|
4701
|
-
check
|
|
4706
|
+
check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
|
|
4702
4707
|
data[offset] = messageLength;
|
|
4703
4708
|
offset += 1;
|
|
4704
4709
|
data.set(message.raw, offset);
|
|
@@ -5253,10 +5258,10 @@ declare const tryAsEpoch = (v: number): Epoch => asOpaqueType(tryAsU32(v));
|
|
|
5253
5258
|
/** One entry of `T` per one validator. */
|
|
5254
5259
|
type PerValidator<T> = KnownSizeArray<T, "ValidatorsCount">;
|
|
5255
5260
|
declare function tryAsPerValidator<T>(array: T[], spec: ChainSpec): PerValidator<T> {
|
|
5256
|
-
check
|
|
5257
|
-
|
|
5258
|
-
Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
|
|
5259
|
-
|
|
5261
|
+
check(
|
|
5262
|
+
array.length === spec.validatorsCount,
|
|
5263
|
+
`Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`,
|
|
5264
|
+
);
|
|
5260
5265
|
return asKnownSize(array);
|
|
5261
5266
|
}
|
|
5262
5267
|
declare const codecPerValidator = <T, V>(val: Descriptor<T, V>): Descriptor<PerValidator<T>, SequenceView<T, V>> =>
|
|
@@ -5269,10 +5274,10 @@ declare const codecPerValidator = <T, V>(val: Descriptor<T, V>): Descriptor<PerV
|
|
|
5269
5274
|
/** One entry of `T` per one block in epoch. */
|
|
5270
5275
|
type PerEpochBlock<T> = KnownSizeArray<T, "EpochLength">;
|
|
5271
5276
|
declare function tryAsPerEpochBlock<T>(array: T[], spec: ChainSpec): PerEpochBlock<T> {
|
|
5272
|
-
check
|
|
5273
|
-
|
|
5274
|
-
Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
|
|
5275
|
-
|
|
5277
|
+
check(
|
|
5278
|
+
array.length === spec.epochLength,
|
|
5279
|
+
`Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`,
|
|
5280
|
+
);
|
|
5276
5281
|
return asKnownSize(array);
|
|
5277
5282
|
}
|
|
5278
5283
|
declare const codecPerEpochBlock = <T, V>(val: Descriptor<T, V>): Descriptor<PerEpochBlock<T>, SequenceView<T, V>> =>
|
|
@@ -5887,11 +5892,11 @@ type WorkItemsCount = U8;
|
|
|
5887
5892
|
|
|
5888
5893
|
/** Verify the value is within the `WorkItemsCount` bounds. */
|
|
5889
5894
|
declare function tryAsWorkItemsCount(len: number): WorkItemsCount {
|
|
5890
|
-
|
|
5891
|
-
|
|
5892
|
-
|
|
5893
|
-
|
|
5894
|
-
|
|
5895
|
+
return ensure<number, WorkItemsCount>(
|
|
5896
|
+
len,
|
|
5897
|
+
len >= MIN_NUMBER_OF_WORK_ITEMS && len <= MAX_NUMBER_OF_WORK_ITEMS,
|
|
5898
|
+
`WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`,
|
|
5899
|
+
);
|
|
5895
5900
|
}
|
|
5896
5901
|
|
|
5897
5902
|
/** Minimal number of work items in the work package or results in work report. */
|
|
@@ -8314,13 +8319,13 @@ declare class TrieNode {
|
|
|
8314
8319
|
|
|
8315
8320
|
/** View this node as a branch node */
|
|
8316
8321
|
asBranchNode(): BranchNode {
|
|
8317
|
-
check
|
|
8322
|
+
check(this.getNodeType() === NodeType.Branch);
|
|
8318
8323
|
return new BranchNode(this);
|
|
8319
8324
|
}
|
|
8320
8325
|
|
|
8321
8326
|
/** View this node as a leaf node */
|
|
8322
8327
|
asLeafNode(): LeafNode {
|
|
8323
|
-
check
|
|
8328
|
+
check(this.getNodeType() !== NodeType.Branch);
|
|
8324
8329
|
return new LeafNode(this);
|
|
8325
8330
|
}
|
|
8326
8331
|
|
|
@@ -8861,7 +8866,7 @@ declare function createSubtreeForBothLeaves(
|
|
|
8861
8866
|
* Return a single bit from `key` located at `bitIndex`.
|
|
8862
8867
|
*/
|
|
8863
8868
|
declare function getBit(key: TruncatedStateKey, bitIndex: number): boolean {
|
|
8864
|
-
check
|
|
8869
|
+
check(bitIndex < TRUNCATED_KEY_BITS);
|
|
8865
8870
|
const byte = bitIndex >>> 3;
|
|
8866
8871
|
const bit = bitIndex - (byte << 3);
|
|
8867
8872
|
const mask = 0b10_00_00_00 >>> bit;
|
|
@@ -9081,10 +9086,10 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9081
9086
|
type PerCore<T> = KnownSizeArray<T, "number of cores">;
|
|
9082
9087
|
/** Check if given array has correct length before casting to the opaque type. */
|
|
9083
9088
|
declare function tryAsPerCore<T>(array: T[], spec: ChainSpec): PerCore<T> {
|
|
9084
|
-
check
|
|
9085
|
-
|
|
9086
|
-
Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
|
|
9087
|
-
|
|
9089
|
+
check(
|
|
9090
|
+
array.length === spec.coresCount,
|
|
9091
|
+
`Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`,
|
|
9092
|
+
);
|
|
9088
9093
|
return asOpaqueType(array);
|
|
9089
9094
|
}
|
|
9090
9095
|
declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T>, SequenceView<T, V>> =>
|
|
@@ -10787,7 +10792,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10787
10792
|
|
|
10788
10793
|
private removeServices(servicesRemoved: ServiceId[] | undefined) {
|
|
10789
10794
|
for (const serviceId of servicesRemoved ?? []) {
|
|
10790
|
-
check
|
|
10795
|
+
check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
|
|
10791
10796
|
this.services.delete(serviceId);
|
|
10792
10797
|
}
|
|
10793
10798
|
}
|
|
@@ -10808,10 +10813,10 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10808
10813
|
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
10809
10814
|
} else if (kind === UpdateStorageKind.Remove) {
|
|
10810
10815
|
const { key } = action;
|
|
10811
|
-
check
|
|
10812
|
-
|
|
10813
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
10814
|
-
|
|
10816
|
+
check(
|
|
10817
|
+
service.data.storage.has(key.toString()),
|
|
10818
|
+
`Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`,
|
|
10819
|
+
);
|
|
10815
10820
|
service.data.storage.delete(key.toString());
|
|
10816
10821
|
} else {
|
|
10817
10822
|
assertNever(kind);
|
|
@@ -12445,6 +12450,7 @@ declare const N_CHUNKS_REDUNDANCY = 681;
|
|
|
12445
12450
|
/** Total number of chunks generated by EC. */
|
|
12446
12451
|
declare const N_CHUNKS_TOTAL = 1023;
|
|
12447
12452
|
type N_CHUNKS_TOTAL = typeof N_CHUNKS_TOTAL;
|
|
12453
|
+
// check(N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY);
|
|
12448
12454
|
|
|
12449
12455
|
/**
|
|
12450
12456
|
* reed-solomon-simd requires point size to be multiple of 64 bytes but we need only 2 bytes.
|
|
@@ -12461,11 +12467,7 @@ type POINT_LENGTH = typeof POINT_LENGTH;
|
|
|
12461
12467
|
|
|
12462
12468
|
declare const PIECE_SIZE = 684;
|
|
12463
12469
|
type PIECE_SIZE = typeof PIECE_SIZE;
|
|
12464
|
-
|
|
12465
|
-
declare function checkConsistency() {
|
|
12466
|
-
check`${N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY} no of required and redundancy chunks need to match ${N_CHUNKS_TOTAL}`;
|
|
12467
|
-
check`${PIECE_SIZE === N_CHUNKS_REQUIRED * POINT_LENGTH} piece size needs to match ${N_CHUNKS_REQUIRED} * ${POINT_LENGTH}`;
|
|
12468
|
-
}
|
|
12470
|
+
// check(PIECE_SIZE === N_CHUNKS_REQUIRED * POINT_LENGTH);
|
|
12469
12471
|
|
|
12470
12472
|
/**
|
|
12471
12473
|
* Takes arbitrarily long input data, padds it to multiple of `PIECE_SIZE` and returns
|
|
@@ -12504,11 +12506,11 @@ declare function decodeDataAndTrim(
|
|
|
12504
12506
|
declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_REQUIRED>): BytesBlob {
|
|
12505
12507
|
const pointBytes = input[0][1].length;
|
|
12506
12508
|
const points = Math.floor(pointBytes / POINT_LENGTH);
|
|
12507
|
-
check
|
|
12508
|
-
check
|
|
12509
|
-
|
|
12510
|
-
Every piece must have the same length!
|
|
12511
|
-
|
|
12509
|
+
check(points * POINT_LENGTH === pointBytes, "Each point length needs to be a multiple of `POINT_LENGTH`");
|
|
12510
|
+
check(
|
|
12511
|
+
input.every(([_idx, point]) => point.length === pointBytes),
|
|
12512
|
+
"Every piece must have the same length!",
|
|
12513
|
+
);
|
|
12512
12514
|
|
|
12513
12515
|
const pieces = FixedSizeArray.fill(() => Bytes.zero(PIECE_SIZE), points);
|
|
12514
12516
|
|
|
@@ -12599,7 +12601,7 @@ declare function decodePiece(
|
|
|
12599
12601
|
throw new Error("indices array in decoded result must exist!");
|
|
12600
12602
|
}
|
|
12601
12603
|
|
|
12602
|
-
check
|
|
12604
|
+
check(resultData.length === resultIndices.length * POINT_ALIGNMENT, "incorrect length of data or indices!");
|
|
12603
12605
|
|
|
12604
12606
|
for (let i = 0; i < resultIndices.length; i++) {
|
|
12605
12607
|
// fill reconstructed shards in result
|
|
@@ -12627,7 +12629,7 @@ declare function decodePiece(
|
|
|
12627
12629
|
* https://graypaper.fluffylabs.dev/#/9a08063/3eb4013eb401?v=0.6.6
|
|
12628
12630
|
*/
|
|
12629
12631
|
declare function split<N extends number, K extends number>(input: BytesBlob, n: N, k: K): FixedSizeArray<Bytes<N>, K> {
|
|
12630
|
-
check
|
|
12632
|
+
check(n * k === input.length);
|
|
12631
12633
|
const result: Bytes<N>[] = [];
|
|
12632
12634
|
for (let i = 0; i < k; i++) {
|
|
12633
12635
|
const start = i * n;
|
|
@@ -12750,7 +12752,7 @@ declare function transpose<T, N extends number, K extends number>(
|
|
|
12750
12752
|
*/
|
|
12751
12753
|
declare function chunkingFunction(input: BytesBlob): FixedSizeArray<BytesBlob, N_CHUNKS_TOTAL> {
|
|
12752
12754
|
const k = Math.floor(input.length / PIECE_SIZE);
|
|
12753
|
-
check
|
|
12755
|
+
check(k * PIECE_SIZE === input.length, `Input length ${input.length} is not divisible by ${PIECE_SIZE}`);
|
|
12754
12756
|
|
|
12755
12757
|
// we get a `k` pieces.
|
|
12756
12758
|
const pieces = unzip<PIECE_SIZE, typeof k>(input, PIECE_SIZE, k);
|
|
@@ -12773,10 +12775,10 @@ declare function shardsToChunks(spec: ChainSpec, shards: PerValidator<BytesBlob>
|
|
|
12773
12775
|
const result: [number, BytesBlob][][] = [];
|
|
12774
12776
|
|
|
12775
12777
|
const shardSize = shards[0].length;
|
|
12776
|
-
check
|
|
12777
|
-
|
|
12778
|
-
Each shard must be the same length!
|
|
12779
|
-
|
|
12778
|
+
check(
|
|
12779
|
+
shards.every((s) => s.length === shardSize),
|
|
12780
|
+
"Each shard must be the same length!",
|
|
12781
|
+
);
|
|
12780
12782
|
|
|
12781
12783
|
const totalData = shards.map((s) => s.length).reduce((sum, sLength) => sum + sLength, 0);
|
|
12782
12784
|
const chunkSize = Math.floor(totalData / N_CHUNKS_TOTAL);
|
|
@@ -12839,7 +12841,6 @@ type index$9_N_CHUNKS_TOTAL = N_CHUNKS_TOTAL;
|
|
|
12839
12841
|
type index$9_PIECE_SIZE = PIECE_SIZE;
|
|
12840
12842
|
declare const index$9_POINT_ALIGNMENT: typeof POINT_ALIGNMENT;
|
|
12841
12843
|
type index$9_POINT_LENGTH = POINT_LENGTH;
|
|
12842
|
-
declare const index$9_checkConsistency: typeof checkConsistency;
|
|
12843
12844
|
declare const index$9_chunkingFunction: typeof chunkingFunction;
|
|
12844
12845
|
declare const index$9_chunksToShards: typeof chunksToShards;
|
|
12845
12846
|
declare const index$9_decodeData: typeof decodeData;
|
|
@@ -12855,7 +12856,7 @@ declare const index$9_split: typeof split;
|
|
|
12855
12856
|
declare const index$9_transpose: typeof transpose;
|
|
12856
12857
|
declare const index$9_unzip: typeof unzip;
|
|
12857
12858
|
declare namespace index$9 {
|
|
12858
|
-
export { index$9_HALF_POINT_SIZE as HALF_POINT_SIZE, index$9_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$9_POINT_ALIGNMENT as POINT_ALIGNMENT, index$
|
|
12859
|
+
export { index$9_HALF_POINT_SIZE as HALF_POINT_SIZE, index$9_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$9_POINT_ALIGNMENT as POINT_ALIGNMENT, index$9_chunkingFunction as chunkingFunction, index$9_chunksToShards as chunksToShards, index$9_decodeData as decodeData, index$9_decodeDataAndTrim as decodeDataAndTrim, index$9_decodePiece as decodePiece, index$9_encodePoints as encodePoints, index$9_initEc as initEc, index$9_join as join, index$9_lace as lace, index$9_padAndEncodeData as padAndEncodeData, index$9_shardsToChunks as shardsToChunks, index$9_split as split, index$9_transpose as transpose, index$9_unzip as unzip };
|
|
12859
12860
|
export type { index$9_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$9_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$9_PIECE_SIZE as PIECE_SIZE, index$9_POINT_LENGTH as POINT_LENGTH };
|
|
12860
12861
|
}
|
|
12861
12862
|
|
|
@@ -13227,7 +13228,7 @@ declare class Mask {
|
|
|
13227
13228
|
}
|
|
13228
13229
|
|
|
13229
13230
|
getNoOfBytesToNextInstruction(index: number) {
|
|
13230
|
-
check
|
|
13231
|
+
check(index >= 0, `index (${index}) cannot be a negative number`);
|
|
13231
13232
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
13232
13233
|
}
|
|
13233
13234
|
|
|
@@ -13377,13 +13378,13 @@ declare class Registers {
|
|
|
13377
13378
|
private asUnsigned: BigUint64Array;
|
|
13378
13379
|
|
|
13379
13380
|
constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
13380
|
-
check
|
|
13381
|
+
check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
|
|
13381
13382
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
13382
13383
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
13383
13384
|
}
|
|
13384
13385
|
|
|
13385
13386
|
static fromBytes(bytes: Uint8Array) {
|
|
13386
|
-
check
|
|
13387
|
+
check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
|
|
13387
13388
|
return new Registers(bytes);
|
|
13388
13389
|
}
|
|
13389
13390
|
|
|
@@ -14269,17 +14270,13 @@ declare class InstructionResult {
|
|
|
14269
14270
|
|
|
14270
14271
|
type MemoryIndex = Opaque<number, "memory index">;
|
|
14271
14272
|
|
|
14272
|
-
declare const tryAsMemoryIndex = (index: number): MemoryIndex =>
|
|
14273
|
-
|
|
14274
|
-
return asOpaqueType(index);
|
|
14275
|
-
};
|
|
14273
|
+
declare const tryAsMemoryIndex = (index: number): MemoryIndex =>
|
|
14274
|
+
ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
|
|
14276
14275
|
|
|
14277
14276
|
type SbrkIndex = Opaque<number, "sbrk index">;
|
|
14278
14277
|
|
|
14279
|
-
declare const tryAsSbrkIndex = (index: number): SbrkIndex =>
|
|
14280
|
-
|
|
14281
|
-
return asOpaqueType(index);
|
|
14282
|
-
};
|
|
14278
|
+
declare const tryAsSbrkIndex = (index: number): SbrkIndex =>
|
|
14279
|
+
ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
|
|
14283
14280
|
|
|
14284
14281
|
type PageIndex = Opaque<number, "memory page index">;
|
|
14285
14282
|
type PageNumber = Opaque<number, "memory page number">;
|
|
@@ -14648,10 +14645,10 @@ declare class MemoryBuilder {
|
|
|
14648
14645
|
*/
|
|
14649
14646
|
setReadablePages(start: MemoryIndex, end: MemoryIndex, data: Uint8Array = new Uint8Array()) {
|
|
14650
14647
|
this.ensureNotFinalized();
|
|
14651
|
-
check
|
|
14652
|
-
check
|
|
14653
|
-
check
|
|
14654
|
-
check
|
|
14648
|
+
check(start < end, "end has to be bigger than start");
|
|
14649
|
+
check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
14650
|
+
check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
14651
|
+
check(data.length <= end - start, "the initial data is longer than address range");
|
|
14655
14652
|
|
|
14656
14653
|
const length = end - start;
|
|
14657
14654
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14682,10 +14679,10 @@ declare class MemoryBuilder {
|
|
|
14682
14679
|
*/
|
|
14683
14680
|
setWriteablePages(start: MemoryIndex, end: MemoryIndex, data: Uint8Array = new Uint8Array()) {
|
|
14684
14681
|
this.ensureNotFinalized();
|
|
14685
|
-
check
|
|
14686
|
-
check
|
|
14687
|
-
check
|
|
14688
|
-
check
|
|
14682
|
+
check(start < end, "end has to be bigger than start");
|
|
14683
|
+
check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
14684
|
+
check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
14685
|
+
check(data.length <= end - start, "the initial data is longer than address range");
|
|
14689
14686
|
|
|
14690
14687
|
const length = end - start;
|
|
14691
14688
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14713,7 +14710,7 @@ declare class MemoryBuilder {
|
|
|
14713
14710
|
this.ensureNotFinalized();
|
|
14714
14711
|
const pageOffset = start % PAGE_SIZE;
|
|
14715
14712
|
const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
|
|
14716
|
-
check
|
|
14713
|
+
check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
|
|
14717
14714
|
|
|
14718
14715
|
const length = data.length;
|
|
14719
14716
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
@@ -14734,10 +14731,10 @@ declare class MemoryBuilder {
|
|
|
14734
14731
|
}
|
|
14735
14732
|
|
|
14736
14733
|
finalize(startHeapIndex: MemoryIndex, endHeapIndex: SbrkIndex): Memory {
|
|
14737
|
-
check
|
|
14738
|
-
|
|
14739
|
-
startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
|
|
14740
|
-
|
|
14734
|
+
check(
|
|
14735
|
+
startHeapIndex <= endHeapIndex,
|
|
14736
|
+
`startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`,
|
|
14737
|
+
);
|
|
14741
14738
|
this.ensureNotFinalized();
|
|
14742
14739
|
|
|
14743
14740
|
const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
|
|
@@ -15070,10 +15067,10 @@ declare class JumpTable {
|
|
|
15070
15067
|
private indices: Uint32Array;
|
|
15071
15068
|
|
|
15072
15069
|
constructor(itemByteLength: number, bytes: Uint8Array) {
|
|
15073
|
-
check
|
|
15074
|
-
|
|
15075
|
-
Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})
|
|
15076
|
-
|
|
15070
|
+
check(
|
|
15071
|
+
itemByteLength === 0 || bytes.length % itemByteLength === 0,
|
|
15072
|
+
`Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`,
|
|
15073
|
+
);
|
|
15077
15074
|
|
|
15078
15075
|
const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
|
|
15079
15076
|
|
|
@@ -17281,8 +17278,8 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
17281
17278
|
bytes: bigint,
|
|
17282
17279
|
serviceInfo: ServiceAccountInfo,
|
|
17283
17280
|
): Result$2<OK, InsufficientFundsError> {
|
|
17284
|
-
check
|
|
17285
|
-
check
|
|
17281
|
+
check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
|
|
17282
|
+
check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
|
|
17286
17283
|
|
|
17287
17284
|
const overflowItems = !isU32(items);
|
|
17288
17285
|
const overflowBytes = !isU64(bytes);
|
|
@@ -17487,7 +17484,7 @@ declare class HostCallsManager {
|
|
|
17487
17484
|
this.missing = missing;
|
|
17488
17485
|
|
|
17489
17486
|
for (const handler of handlers) {
|
|
17490
|
-
check
|
|
17487
|
+
check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
|
|
17491
17488
|
this.hostCalls.set(handler.index, handler);
|
|
17492
17489
|
}
|
|
17493
17490
|
}
|
|
@@ -17559,10 +17556,10 @@ declare class ReturnValue {
|
|
|
17559
17556
|
public status: Status | null,
|
|
17560
17557
|
public memorySlice: Uint8Array | null,
|
|
17561
17558
|
) {
|
|
17562
|
-
check
|
|
17563
|
-
|
|
17564
|
-
|
|
17565
|
-
|
|
17559
|
+
check(
|
|
17560
|
+
(status === null && memorySlice !== null) || (status !== null && memorySlice === null),
|
|
17561
|
+
"`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided",
|
|
17562
|
+
);
|
|
17566
17563
|
}
|
|
17567
17564
|
|
|
17568
17565
|
static fromStatus(consumedGas: Gas, status: Status) {
|
|
@@ -17620,10 +17617,10 @@ declare class HostCalls {
|
|
|
17620
17617
|
if (status !== Status.HOST) {
|
|
17621
17618
|
return this.getReturnValue(status, pvmInstance);
|
|
17622
17619
|
}
|
|
17623
|
-
check
|
|
17624
|
-
|
|
17625
|
-
"We know that the exit param is not null, because the status is
|
|
17626
|
-
|
|
17620
|
+
check(
|
|
17621
|
+
pvmInstance.getExitParam() !== null,
|
|
17622
|
+
"We know that the exit param is not null, because the status is `Status.HOST`",
|
|
17623
|
+
);
|
|
17627
17624
|
const hostCallIndex = pvmInstance.getExitParam() ?? -1;
|
|
17628
17625
|
const gas = pvmInstance.getGasCounter();
|
|
17629
17626
|
const regs = new HostCallRegisters(pvmInstance.getRegisters());
|
|
@@ -17732,7 +17729,7 @@ declare function getServiceId(serviceId: U64): ServiceId | null {
|
|
|
17732
17729
|
}
|
|
17733
17730
|
|
|
17734
17731
|
declare function writeServiceIdAsLeBytes(serviceId: ServiceId, destination: Uint8Array) {
|
|
17735
|
-
check
|
|
17732
|
+
check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
|
|
17736
17733
|
destination.set(u32AsLeBytes(serviceId));
|
|
17737
17734
|
}
|
|
17738
17735
|
|
|
@@ -17808,6 +17805,20 @@ declare namespace index$6 {
|
|
|
17808
17805
|
|
|
17809
17806
|
declare const NO_OF_REGISTERS = 13;
|
|
17810
17807
|
|
|
17808
|
+
/**
|
|
17809
|
+
* program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
|
|
17810
|
+
*
|
|
17811
|
+
* E_n - little endian encoding, n - length
|
|
17812
|
+
* o - initial read only data
|
|
17813
|
+
* w - initial heap
|
|
17814
|
+
* z - heap pages filled with zeros
|
|
17815
|
+
* s - stack size
|
|
17816
|
+
* c - program code
|
|
17817
|
+
*
|
|
17818
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
|
|
17819
|
+
*/
|
|
17820
|
+
type InputLength = Opaque<number, "Number that is lower than 2 ** 24 (Z_I from GP)">;
|
|
17821
|
+
|
|
17811
17822
|
declare class MemorySegment extends WithDebug {
|
|
17812
17823
|
static from({ start, end, data }: Omit<MemorySegment, never>) {
|
|
17813
17824
|
return new MemorySegment(start, end, data);
|
|
@@ -17842,27 +17853,17 @@ declare class SpiProgram extends WithDebug {
|
|
|
17842
17853
|
}
|
|
17843
17854
|
}
|
|
17844
17855
|
|
|
17845
|
-
/**
|
|
17846
|
-
* program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
|
|
17847
|
-
*
|
|
17848
|
-
* E_n - little endian encoding, n - length
|
|
17849
|
-
* o - initial read only data
|
|
17850
|
-
* w - initial heap
|
|
17851
|
-
* z - heap pages filled with zeros
|
|
17852
|
-
* s - stack size
|
|
17853
|
-
* c - program code
|
|
17854
|
-
*
|
|
17855
|
-
* https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
|
|
17856
|
-
*/
|
|
17857
17856
|
declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
17858
17857
|
const decoder = Decoder.fromBlob(program);
|
|
17859
17858
|
const oLength = decoder.u24();
|
|
17860
17859
|
const wLength = decoder.u24();
|
|
17861
|
-
|
|
17862
|
-
|
|
17863
|
-
|
|
17864
|
-
|
|
17865
|
-
|
|
17860
|
+
const argsLength = ensure<number, InputLength>(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
|
|
17861
|
+
const readOnlyLength = ensure<number, InputLength>(
|
|
17862
|
+
oLength,
|
|
17863
|
+
oLength <= DATA_LEGNTH,
|
|
17864
|
+
"Incorrect readonly segment length",
|
|
17865
|
+
);
|
|
17866
|
+
const heapLength = ensure<number, InputLength>(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
|
|
17866
17867
|
const noOfHeapZerosPages = decoder.u16();
|
|
17867
17868
|
const stackSize = decoder.u24();
|
|
17868
17869
|
const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
|
|
@@ -17879,8 +17880,8 @@ declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
|
17879
17880
|
const stackStart = STACK_SEGMENT - alignToPageSize(stackSize);
|
|
17880
17881
|
const stackEnd = STACK_SEGMENT;
|
|
17881
17882
|
const argsStart = ARGS_SEGMENT;
|
|
17882
|
-
const argsEnd = argsStart + alignToPageSize(
|
|
17883
|
-
const argsZerosEnd = argsEnd + alignToPageSize(
|
|
17883
|
+
const argsEnd = argsStart + alignToPageSize(argsLength);
|
|
17884
|
+
const argsZerosEnd = argsEnd + alignToPageSize(argsLength);
|
|
17884
17885
|
|
|
17885
17886
|
function nonEmpty(s: MemorySegment | false): s is MemorySegment {
|
|
17886
17887
|
return s !== false;
|
|
@@ -17888,7 +17889,7 @@ declare function decodeStandardProgram(program: Uint8Array, args: Uint8Array) {
|
|
|
17888
17889
|
|
|
17889
17890
|
const readableMemory = [
|
|
17890
17891
|
readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
|
|
17891
|
-
|
|
17892
|
+
argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
|
|
17892
17893
|
argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
|
|
17893
17894
|
].filter(nonEmpty);
|
|
17894
17895
|
const writeableMemory = [
|
|
@@ -17920,6 +17921,7 @@ declare function getRegisters(argsLength: number) {
|
|
|
17920
17921
|
return regs;
|
|
17921
17922
|
}
|
|
17922
17923
|
|
|
17924
|
+
type index$5_InputLength = InputLength;
|
|
17923
17925
|
type index$5_MemorySegment = MemorySegment;
|
|
17924
17926
|
declare const index$5_MemorySegment: typeof MemorySegment;
|
|
17925
17927
|
declare const index$5_NO_OF_REGISTERS: typeof NO_OF_REGISTERS;
|
|
@@ -17931,15 +17933,8 @@ declare const index$5_decodeStandardProgram: typeof decodeStandardProgram;
|
|
|
17931
17933
|
declare const index$5_getMemorySegment: typeof getMemorySegment;
|
|
17932
17934
|
declare const index$5_getRegisters: typeof getRegisters;
|
|
17933
17935
|
declare namespace index$5 {
|
|
17934
|
-
export {
|
|
17935
|
-
|
|
17936
|
-
index$5_NO_OF_REGISTERS as NO_OF_REGISTERS,
|
|
17937
|
-
index$5_SpiMemory as SpiMemory,
|
|
17938
|
-
index$5_SpiProgram as SpiProgram,
|
|
17939
|
-
index$5_decodeStandardProgram as decodeStandardProgram,
|
|
17940
|
-
index$5_getMemorySegment as getMemorySegment,
|
|
17941
|
-
index$5_getRegisters as getRegisters,
|
|
17942
|
-
};
|
|
17936
|
+
export { index$5_MemorySegment as MemorySegment, index$5_NO_OF_REGISTERS as NO_OF_REGISTERS, index$5_SpiMemory as SpiMemory, index$5_SpiProgram as SpiProgram, index$5_decodeStandardProgram as decodeStandardProgram, index$5_getMemorySegment as getMemorySegment, index$5_getRegisters as getRegisters };
|
|
17937
|
+
export type { index$5_InputLength as InputLength };
|
|
17943
17938
|
}
|
|
17944
17939
|
|
|
17945
17940
|
declare class Program {
|
|
@@ -18062,7 +18057,7 @@ declare class DebuggerAdapter {
|
|
|
18062
18057
|
}
|
|
18063
18058
|
|
|
18064
18059
|
nSteps(steps: number): boolean {
|
|
18065
|
-
check
|
|
18060
|
+
check(steps >>> 0 > 0, `Expected a positive integer got ${steps}`);
|
|
18066
18061
|
for (let i = 0; i < steps; i++) {
|
|
18067
18062
|
const isOk = this.nextStep();
|
|
18068
18063
|
if (!isOk) {
|
|
@@ -18199,10 +18194,12 @@ declare const index$3___OPAQUE_TYPE__: typeof __OPAQUE_TYPE__;
|
|
|
18199
18194
|
declare const index$3_asOpaqueType: typeof asOpaqueType;
|
|
18200
18195
|
declare const index$3_assertEmpty: typeof assertEmpty;
|
|
18201
18196
|
declare const index$3_assertNever: typeof assertNever;
|
|
18197
|
+
declare const index$3_cast: typeof cast;
|
|
18202
18198
|
declare const index$3_check: typeof check;
|
|
18203
18199
|
declare const index$3_clampU64ToU32: typeof clampU64ToU32;
|
|
18204
18200
|
declare const index$3_createResults: typeof createResults;
|
|
18205
18201
|
declare const index$3_decodeStandardProgram: typeof decodeStandardProgram;
|
|
18202
|
+
declare const index$3_ensure: typeof ensure;
|
|
18206
18203
|
declare const index$3_extractCodeAndMetadata: typeof extractCodeAndMetadata;
|
|
18207
18204
|
declare const index$3_getServiceId: typeof getServiceId;
|
|
18208
18205
|
declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
|
|
@@ -18221,7 +18218,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
|
|
|
18221
18218
|
declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
|
|
18222
18219
|
declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
|
|
18223
18220
|
declare namespace index$3 {
|
|
18224
|
-
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
18221
|
+
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
18225
18222
|
export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
|
|
18226
18223
|
}
|
|
18227
18224
|
|
|
@@ -18234,7 +18231,7 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18234
18231
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18235
18232
|
*/
|
|
18236
18233
|
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18237
|
-
check
|
|
18234
|
+
check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
|
|
18238
18235
|
const n = arr.length;
|
|
18239
18236
|
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18240
18237
|
const result: T[] = new Array<T>(n);
|