@typeberry/lib 0.1.3-135961b → 0.1.3-462ca77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +34 -19
- package/index.d.ts +34 -19
- package/index.js +34 -19
- package/package.json +1 -1
package/index.cjs
CHANGED
|
@@ -332,6 +332,19 @@ const Result$1 = {
|
|
|
332
332
|
},
|
|
333
333
|
};
|
|
334
334
|
|
|
335
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
336
|
+
// - https://issues.chromium.org/issues/40055619
|
|
337
|
+
// - https://stackoverflow.com/a/72124984
|
|
338
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
339
|
+
const MAX_LENGTH$2 = 2145386496;
|
|
340
|
+
function safeAllocUint8Array(length) {
|
|
341
|
+
if (length > MAX_LENGTH$2) {
|
|
342
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
343
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH$2}.`);
|
|
344
|
+
}
|
|
345
|
+
return new Uint8Array(Math.min(MAX_LENGTH$2, length));
|
|
346
|
+
}
|
|
347
|
+
|
|
335
348
|
/**
|
|
336
349
|
* Utilities for tests.
|
|
337
350
|
*/
|
|
@@ -572,6 +585,7 @@ var index$u = /*#__PURE__*/Object.freeze({
|
|
|
572
585
|
DEFAULT_VERSION: DEFAULT_VERSION,
|
|
573
586
|
ErrorsCollector: ErrorsCollector,
|
|
574
587
|
get GpVersion () { return GpVersion; },
|
|
588
|
+
MAX_LENGTH: MAX_LENGTH$2,
|
|
575
589
|
OK: OK,
|
|
576
590
|
Result: Result$1,
|
|
577
591
|
TEST_COMPARE_USING: TEST_COMPARE_USING,
|
|
@@ -586,6 +600,7 @@ var index$u = /*#__PURE__*/Object.freeze({
|
|
|
586
600
|
isBrowser: isBrowser,
|
|
587
601
|
measure: measure,
|
|
588
602
|
resultToString: resultToString,
|
|
603
|
+
safeAllocUint8Array: safeAllocUint8Array,
|
|
589
604
|
seeThrough: seeThrough,
|
|
590
605
|
workspacePathFix: workspacePathFix
|
|
591
606
|
});
|
|
@@ -609,7 +624,7 @@ class BitVec {
|
|
|
609
624
|
* Create new [`BitVec`] with all values set to `false`.
|
|
610
625
|
*/
|
|
611
626
|
static empty(bitLength) {
|
|
612
|
-
const data =
|
|
627
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
613
628
|
return new BitVec(data, bitLength);
|
|
614
629
|
}
|
|
615
630
|
byteLength;
|
|
@@ -810,7 +825,7 @@ class BytesBlob {
|
|
|
810
825
|
static blobFromParts(v, ...rest) {
|
|
811
826
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
812
827
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
813
|
-
const buffer =
|
|
828
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
814
829
|
let offset = 0;
|
|
815
830
|
for (const r of vArr) {
|
|
816
831
|
buffer.set(r, offset);
|
|
@@ -883,7 +898,7 @@ class Bytes extends BytesBlob {
|
|
|
883
898
|
}
|
|
884
899
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
885
900
|
static zero(len) {
|
|
886
|
-
return new Bytes(
|
|
901
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
887
902
|
}
|
|
888
903
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
889
904
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
@@ -3592,7 +3607,7 @@ async function verify(input) {
|
|
|
3592
3607
|
return Promise.resolve([]);
|
|
3593
3608
|
}
|
|
3594
3609
|
const dataLength = input.reduce((acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1, 0);
|
|
3595
|
-
const data =
|
|
3610
|
+
const data = safeAllocUint8Array(dataLength);
|
|
3596
3611
|
let offset = 0;
|
|
3597
3612
|
for (const { key, message, signature } of input) {
|
|
3598
3613
|
data.set(key.raw, offset);
|
|
@@ -3684,7 +3699,7 @@ class SimpleAllocator {
|
|
|
3684
3699
|
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3685
3700
|
class PageAllocator {
|
|
3686
3701
|
hashesPerPage;
|
|
3687
|
-
page =
|
|
3702
|
+
page = safeAllocUint8Array(0);
|
|
3688
3703
|
currentHash = 0;
|
|
3689
3704
|
// TODO [ToDr] Benchmark the performance!
|
|
3690
3705
|
constructor(hashesPerPage) {
|
|
@@ -3695,7 +3710,7 @@ class PageAllocator {
|
|
|
3695
3710
|
resetPage() {
|
|
3696
3711
|
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3697
3712
|
this.currentHash = 0;
|
|
3698
|
-
this.page =
|
|
3713
|
+
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
3699
3714
|
}
|
|
3700
3715
|
emptyHash() {
|
|
3701
3716
|
const startIdx = this.currentHash * HASH_SIZE;
|
|
@@ -10463,7 +10478,7 @@ class SerializedService {
|
|
|
10463
10478
|
getStorage(rawKey) {
|
|
10464
10479
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
10465
10480
|
const SERVICE_ID_BYTES = 4;
|
|
10466
|
-
const serviceIdAndKey =
|
|
10481
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
10467
10482
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
10468
10483
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
10469
10484
|
const key = asOpaqueType(BytesBlob.blobFrom(hashBytes(serviceIdAndKey).raw));
|
|
@@ -10548,7 +10563,7 @@ class TrieNode {
|
|
|
10548
10563
|
raw;
|
|
10549
10564
|
constructor(
|
|
10550
10565
|
/** Exactly 512 bits / 64 bytes */
|
|
10551
|
-
raw =
|
|
10566
|
+
raw = safeAllocUint8Array(TRIE_NODE_BYTES)) {
|
|
10552
10567
|
this.raw = raw;
|
|
10553
10568
|
}
|
|
10554
10569
|
/** Returns the type of the node */
|
|
@@ -11799,7 +11814,7 @@ function padAndEncodeData(input) {
|
|
|
11799
11814
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
11800
11815
|
let padded = input;
|
|
11801
11816
|
if (input.length !== paddedLength) {
|
|
11802
|
-
padded = BytesBlob.blobFrom(
|
|
11817
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
11803
11818
|
padded.raw.set(input.raw, 0);
|
|
11804
11819
|
}
|
|
11805
11820
|
return chunkingFunction(padded);
|
|
@@ -11845,7 +11860,7 @@ function decodeData(input) {
|
|
|
11845
11860
|
*/
|
|
11846
11861
|
function encodePoints(input) {
|
|
11847
11862
|
const result = [];
|
|
11848
|
-
const data =
|
|
11863
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
11849
11864
|
// add original shards to the result
|
|
11850
11865
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
11851
11866
|
const pointStart = POINT_LENGTH * i;
|
|
@@ -11861,7 +11876,7 @@ function encodePoints(input) {
|
|
|
11861
11876
|
const encodedData = encodedResult.take_data();
|
|
11862
11877
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
11863
11878
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
11864
|
-
const redundancyPoint =
|
|
11879
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
11865
11880
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
11866
11881
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
11867
11882
|
}
|
|
@@ -11876,7 +11891,7 @@ function encodePoints(input) {
|
|
|
11876
11891
|
*/
|
|
11877
11892
|
function decodePiece(input) {
|
|
11878
11893
|
const result = Bytes.zero(PIECE_SIZE);
|
|
11879
|
-
const data =
|
|
11894
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
11880
11895
|
const indices = new Uint16Array(input.length);
|
|
11881
11896
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
11882
11897
|
const [index, points] = input[i];
|
|
@@ -11992,7 +12007,7 @@ function lace(input) {
|
|
|
11992
12007
|
return BytesBlob.empty();
|
|
11993
12008
|
}
|
|
11994
12009
|
const n = input[0].length;
|
|
11995
|
-
const result = BytesBlob.blobFrom(
|
|
12010
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
11996
12011
|
for (let i = 0; i < k; i++) {
|
|
11997
12012
|
const entry = input[i].raw;
|
|
11998
12013
|
for (let j = 0; j < n; j++) {
|
|
@@ -13271,7 +13286,7 @@ class Registers {
|
|
|
13271
13286
|
bytes;
|
|
13272
13287
|
asSigned;
|
|
13273
13288
|
asUnsigned;
|
|
13274
|
-
constructor(bytes =
|
|
13289
|
+
constructor(bytes = safeAllocUint8Array(NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT)) {
|
|
13275
13290
|
this.bytes = bytes;
|
|
13276
13291
|
check `${bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13277
13292
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -13424,7 +13439,7 @@ class Mask {
|
|
|
13424
13439
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
13425
13440
|
}
|
|
13426
13441
|
buildLookupTableForward(mask) {
|
|
13427
|
-
const table =
|
|
13442
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13428
13443
|
let lastInstructionOffset = 0;
|
|
13429
13444
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13430
13445
|
if (mask.isSet(i)) {
|
|
@@ -16959,7 +16974,7 @@ class HostCalls {
|
|
|
16959
16974
|
const regs = pvmInstance.getRegisters();
|
|
16960
16975
|
const maybeAddress = regs.getLowerU32(7);
|
|
16961
16976
|
const maybeLength = regs.getLowerU32(8);
|
|
16962
|
-
const result =
|
|
16977
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
16963
16978
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
16964
16979
|
const loadResult = memory.loadInto(result, startAddress);
|
|
16965
16980
|
if (loadResult.isError) {
|
|
@@ -17308,14 +17323,14 @@ class DebuggerAdapter {
|
|
|
17308
17323
|
const page = this.pvm.getMemoryPage(pageNumber);
|
|
17309
17324
|
if (page === null) {
|
|
17310
17325
|
// page wasn't allocated so we return an empty page
|
|
17311
|
-
return
|
|
17326
|
+
return safeAllocUint8Array(PAGE_SIZE$1);
|
|
17312
17327
|
}
|
|
17313
17328
|
if (page.length === PAGE_SIZE$1) {
|
|
17314
17329
|
// page was allocated and has a proper size so we can simply return it
|
|
17315
17330
|
return page;
|
|
17316
17331
|
}
|
|
17317
17332
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
17318
|
-
const fullPage =
|
|
17333
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE$1);
|
|
17319
17334
|
fullPage.set(page);
|
|
17320
17335
|
return fullPage;
|
|
17321
17336
|
}
|
|
@@ -17459,7 +17474,7 @@ function fisherYatesShuffle(arr, entropy) {
|
|
|
17459
17474
|
}
|
|
17460
17475
|
function hashToNumberSequence(entropy, length) {
|
|
17461
17476
|
const result = new Array(length);
|
|
17462
|
-
const randomBytes =
|
|
17477
|
+
const randomBytes = safeAllocUint8Array(ENTROPY_BYTES + 4);
|
|
17463
17478
|
randomBytes.set(entropy.raw);
|
|
17464
17479
|
for (let i = 0; i < length; i++) {
|
|
17465
17480
|
randomBytes.set(u32AsLeBytes(tryAsU32(Math.floor(i / 8))), ENTROPY_BYTES);
|
package/index.d.ts
CHANGED
|
@@ -420,6 +420,20 @@ declare const Result$2 = {
|
|
|
420
420
|
},
|
|
421
421
|
};
|
|
422
422
|
|
|
423
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
424
|
+
// - https://issues.chromium.org/issues/40055619
|
|
425
|
+
// - https://stackoverflow.com/a/72124984
|
|
426
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
427
|
+
declare const MAX_LENGTH$1 = 2145386496;
|
|
428
|
+
|
|
429
|
+
declare function safeAllocUint8Array(length: number) {
|
|
430
|
+
if (length > MAX_LENGTH) {
|
|
431
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
432
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
433
|
+
}
|
|
434
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
435
|
+
}
|
|
436
|
+
|
|
423
437
|
/**
|
|
424
438
|
* Utilities for tests.
|
|
425
439
|
*/
|
|
@@ -755,11 +769,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
|
|
|
755
769
|
declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
|
|
756
770
|
declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
|
|
757
771
|
declare const index$u_resultToString: typeof resultToString;
|
|
772
|
+
declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
|
|
758
773
|
declare const index$u_seeThrough: typeof seeThrough;
|
|
759
774
|
declare const index$u_trimStack: typeof trimStack;
|
|
760
775
|
declare const index$u_workspacePathFix: typeof workspacePathFix;
|
|
761
776
|
declare namespace index$u {
|
|
762
|
-
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
777
|
+
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
763
778
|
export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
|
|
764
779
|
}
|
|
765
780
|
|
|
@@ -929,7 +944,7 @@ declare class BytesBlob {
|
|
|
929
944
|
static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
|
|
930
945
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
931
946
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
932
|
-
const buffer =
|
|
947
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
933
948
|
let offset = 0;
|
|
934
949
|
for (const r of vArr) {
|
|
935
950
|
buffer.set(r, offset);
|
|
@@ -1012,7 +1027,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1012
1027
|
|
|
1013
1028
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
1014
1029
|
static zero<X extends number>(len: X): Bytes<X> {
|
|
1015
|
-
return new Bytes(
|
|
1030
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
1016
1031
|
}
|
|
1017
1032
|
|
|
1018
1033
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
@@ -1133,7 +1148,7 @@ declare class BitVec {
|
|
|
1133
1148
|
* Create new [`BitVec`] with all values set to `false`.
|
|
1134
1149
|
*/
|
|
1135
1150
|
static empty(bitLength: number) {
|
|
1136
|
-
const data =
|
|
1151
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
1137
1152
|
return new BitVec(data, bitLength);
|
|
1138
1153
|
}
|
|
1139
1154
|
|
|
@@ -3531,7 +3546,7 @@ declare class SimpleAllocator implements HashAllocator {
|
|
|
3531
3546
|
|
|
3532
3547
|
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3533
3548
|
declare class PageAllocator implements HashAllocator {
|
|
3534
|
-
private page: Uint8Array =
|
|
3549
|
+
private page: Uint8Array = safeAllocUint8Array(0);
|
|
3535
3550
|
private currentHash = 0;
|
|
3536
3551
|
|
|
3537
3552
|
// TODO [ToDr] Benchmark the performance!
|
|
@@ -3543,7 +3558,7 @@ declare class PageAllocator implements HashAllocator {
|
|
|
3543
3558
|
private resetPage() {
|
|
3544
3559
|
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3545
3560
|
this.currentHash = 0;
|
|
3546
|
-
this.page =
|
|
3561
|
+
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
3547
3562
|
}
|
|
3548
3563
|
|
|
3549
3564
|
emptyHash(): OpaqueHash {
|
|
@@ -4735,7 +4750,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4735
4750
|
(acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
|
|
4736
4751
|
0,
|
|
4737
4752
|
);
|
|
4738
|
-
const data =
|
|
4753
|
+
const data = safeAllocUint8Array(dataLength);
|
|
4739
4754
|
|
|
4740
4755
|
let offset = 0;
|
|
4741
4756
|
|
|
@@ -8373,7 +8388,7 @@ declare enum NodeType {
|
|
|
8373
8388
|
declare class TrieNode {
|
|
8374
8389
|
constructor(
|
|
8375
8390
|
/** Exactly 512 bits / 64 bytes */
|
|
8376
|
-
public readonly raw: Uint8Array =
|
|
8391
|
+
public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
|
|
8377
8392
|
) {}
|
|
8378
8393
|
|
|
8379
8394
|
/** Returns the type of the node */
|
|
@@ -12172,7 +12187,7 @@ declare class SerializedService implements Service {
|
|
|
12172
12187
|
getStorage(rawKey: StorageKey): BytesBlob | null {
|
|
12173
12188
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12174
12189
|
const SERVICE_ID_BYTES = 4;
|
|
12175
|
-
const serviceIdAndKey =
|
|
12190
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12176
12191
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12177
12192
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12178
12193
|
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
@@ -12573,7 +12588,7 @@ declare function padAndEncodeData(input: BytesBlob) {
|
|
|
12573
12588
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
12574
12589
|
let padded = input;
|
|
12575
12590
|
if (input.length !== paddedLength) {
|
|
12576
|
-
padded = BytesBlob.blobFrom(
|
|
12591
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
12577
12592
|
padded.raw.set(input.raw, 0);
|
|
12578
12593
|
}
|
|
12579
12594
|
return chunkingFunction(padded);
|
|
@@ -12629,7 +12644,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
|
|
|
12629
12644
|
*/
|
|
12630
12645
|
declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
|
|
12631
12646
|
const result: Bytes<POINT_LENGTH>[] = [];
|
|
12632
|
-
const data =
|
|
12647
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
12633
12648
|
|
|
12634
12649
|
// add original shards to the result
|
|
12635
12650
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12649,7 +12664,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
|
|
|
12649
12664
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
12650
12665
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
12651
12666
|
|
|
12652
|
-
const redundancyPoint =
|
|
12667
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
12653
12668
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
12654
12669
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
12655
12670
|
}
|
|
@@ -12669,7 +12684,7 @@ declare function decodePiece(
|
|
|
12669
12684
|
): Bytes<PIECE_SIZE> {
|
|
12670
12685
|
const result = Bytes.zero(PIECE_SIZE);
|
|
12671
12686
|
|
|
12672
|
-
const data =
|
|
12687
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
12673
12688
|
const indices = new Uint16Array(input.length);
|
|
12674
12689
|
|
|
12675
12690
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12796,7 +12811,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
|
|
|
12796
12811
|
return BytesBlob.empty();
|
|
12797
12812
|
}
|
|
12798
12813
|
const n = input[0].length;
|
|
12799
|
-
const result = BytesBlob.blobFrom(
|
|
12814
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
12800
12815
|
for (let i = 0; i < k; i++) {
|
|
12801
12816
|
const entry = input[i].raw;
|
|
12802
12817
|
for (let j = 0; j < n; j++) {
|
|
@@ -13868,7 +13883,7 @@ declare class Mask {
|
|
|
13868
13883
|
}
|
|
13869
13884
|
|
|
13870
13885
|
private buildLookupTableForward(mask: BitVec) {
|
|
13871
|
-
const table =
|
|
13886
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13872
13887
|
let lastInstructionOffset = 0;
|
|
13873
13888
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13874
13889
|
if (mask.isSet(i)) {
|
|
@@ -14012,7 +14027,7 @@ declare class Registers {
|
|
|
14012
14027
|
private asSigned: BigInt64Array;
|
|
14013
14028
|
private asUnsigned: BigUint64Array;
|
|
14014
14029
|
|
|
14015
|
-
constructor(private readonly bytes =
|
|
14030
|
+
constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
14016
14031
|
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
14017
14032
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14018
14033
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -18239,7 +18254,7 @@ declare class HostCalls {
|
|
|
18239
18254
|
const maybeAddress = regs.getLowerU32(7);
|
|
18240
18255
|
const maybeLength = regs.getLowerU32(8);
|
|
18241
18256
|
|
|
18242
|
-
const result =
|
|
18257
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
18243
18258
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18244
18259
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18245
18260
|
|
|
@@ -18678,7 +18693,7 @@ declare class DebuggerAdapter {
|
|
|
18678
18693
|
|
|
18679
18694
|
if (page === null) {
|
|
18680
18695
|
// page wasn't allocated so we return an empty page
|
|
18681
|
-
return
|
|
18696
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
18682
18697
|
}
|
|
18683
18698
|
|
|
18684
18699
|
if (page.length === PAGE_SIZE) {
|
|
@@ -18687,7 +18702,7 @@ declare class DebuggerAdapter {
|
|
|
18687
18702
|
}
|
|
18688
18703
|
|
|
18689
18704
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
18690
|
-
const fullPage =
|
|
18705
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
18691
18706
|
fullPage.set(page);
|
|
18692
18707
|
return fullPage;
|
|
18693
18708
|
}
|
package/index.js
CHANGED
|
@@ -329,6 +329,19 @@ const Result$1 = {
|
|
|
329
329
|
},
|
|
330
330
|
};
|
|
331
331
|
|
|
332
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
333
|
+
// - https://issues.chromium.org/issues/40055619
|
|
334
|
+
// - https://stackoverflow.com/a/72124984
|
|
335
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
336
|
+
const MAX_LENGTH$2 = 2145386496;
|
|
337
|
+
function safeAllocUint8Array(length) {
|
|
338
|
+
if (length > MAX_LENGTH$2) {
|
|
339
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
340
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH$2}.`);
|
|
341
|
+
}
|
|
342
|
+
return new Uint8Array(Math.min(MAX_LENGTH$2, length));
|
|
343
|
+
}
|
|
344
|
+
|
|
332
345
|
/**
|
|
333
346
|
* Utilities for tests.
|
|
334
347
|
*/
|
|
@@ -569,6 +582,7 @@ var index$u = /*#__PURE__*/Object.freeze({
|
|
|
569
582
|
DEFAULT_VERSION: DEFAULT_VERSION,
|
|
570
583
|
ErrorsCollector: ErrorsCollector,
|
|
571
584
|
get GpVersion () { return GpVersion; },
|
|
585
|
+
MAX_LENGTH: MAX_LENGTH$2,
|
|
572
586
|
OK: OK,
|
|
573
587
|
Result: Result$1,
|
|
574
588
|
TEST_COMPARE_USING: TEST_COMPARE_USING,
|
|
@@ -583,6 +597,7 @@ var index$u = /*#__PURE__*/Object.freeze({
|
|
|
583
597
|
isBrowser: isBrowser,
|
|
584
598
|
measure: measure,
|
|
585
599
|
resultToString: resultToString,
|
|
600
|
+
safeAllocUint8Array: safeAllocUint8Array,
|
|
586
601
|
seeThrough: seeThrough,
|
|
587
602
|
workspacePathFix: workspacePathFix
|
|
588
603
|
});
|
|
@@ -606,7 +621,7 @@ class BitVec {
|
|
|
606
621
|
* Create new [`BitVec`] with all values set to `false`.
|
|
607
622
|
*/
|
|
608
623
|
static empty(bitLength) {
|
|
609
|
-
const data =
|
|
624
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
610
625
|
return new BitVec(data, bitLength);
|
|
611
626
|
}
|
|
612
627
|
byteLength;
|
|
@@ -807,7 +822,7 @@ class BytesBlob {
|
|
|
807
822
|
static blobFromParts(v, ...rest) {
|
|
808
823
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
809
824
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
810
|
-
const buffer =
|
|
825
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
811
826
|
let offset = 0;
|
|
812
827
|
for (const r of vArr) {
|
|
813
828
|
buffer.set(r, offset);
|
|
@@ -880,7 +895,7 @@ class Bytes extends BytesBlob {
|
|
|
880
895
|
}
|
|
881
896
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
882
897
|
static zero(len) {
|
|
883
|
-
return new Bytes(
|
|
898
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
884
899
|
}
|
|
885
900
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
886
901
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
@@ -3589,7 +3604,7 @@ async function verify(input) {
|
|
|
3589
3604
|
return Promise.resolve([]);
|
|
3590
3605
|
}
|
|
3591
3606
|
const dataLength = input.reduce((acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1, 0);
|
|
3592
|
-
const data =
|
|
3607
|
+
const data = safeAllocUint8Array(dataLength);
|
|
3593
3608
|
let offset = 0;
|
|
3594
3609
|
for (const { key, message, signature } of input) {
|
|
3595
3610
|
data.set(key.raw, offset);
|
|
@@ -3681,7 +3696,7 @@ class SimpleAllocator {
|
|
|
3681
3696
|
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3682
3697
|
class PageAllocator {
|
|
3683
3698
|
hashesPerPage;
|
|
3684
|
-
page =
|
|
3699
|
+
page = safeAllocUint8Array(0);
|
|
3685
3700
|
currentHash = 0;
|
|
3686
3701
|
// TODO [ToDr] Benchmark the performance!
|
|
3687
3702
|
constructor(hashesPerPage) {
|
|
@@ -3692,7 +3707,7 @@ class PageAllocator {
|
|
|
3692
3707
|
resetPage() {
|
|
3693
3708
|
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3694
3709
|
this.currentHash = 0;
|
|
3695
|
-
this.page =
|
|
3710
|
+
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
3696
3711
|
}
|
|
3697
3712
|
emptyHash() {
|
|
3698
3713
|
const startIdx = this.currentHash * HASH_SIZE;
|
|
@@ -10460,7 +10475,7 @@ class SerializedService {
|
|
|
10460
10475
|
getStorage(rawKey) {
|
|
10461
10476
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
10462
10477
|
const SERVICE_ID_BYTES = 4;
|
|
10463
|
-
const serviceIdAndKey =
|
|
10478
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
10464
10479
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
10465
10480
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
10466
10481
|
const key = asOpaqueType(BytesBlob.blobFrom(hashBytes(serviceIdAndKey).raw));
|
|
@@ -10545,7 +10560,7 @@ class TrieNode {
|
|
|
10545
10560
|
raw;
|
|
10546
10561
|
constructor(
|
|
10547
10562
|
/** Exactly 512 bits / 64 bytes */
|
|
10548
|
-
raw =
|
|
10563
|
+
raw = safeAllocUint8Array(TRIE_NODE_BYTES)) {
|
|
10549
10564
|
this.raw = raw;
|
|
10550
10565
|
}
|
|
10551
10566
|
/** Returns the type of the node */
|
|
@@ -11796,7 +11811,7 @@ function padAndEncodeData(input) {
|
|
|
11796
11811
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
11797
11812
|
let padded = input;
|
|
11798
11813
|
if (input.length !== paddedLength) {
|
|
11799
|
-
padded = BytesBlob.blobFrom(
|
|
11814
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
11800
11815
|
padded.raw.set(input.raw, 0);
|
|
11801
11816
|
}
|
|
11802
11817
|
return chunkingFunction(padded);
|
|
@@ -11842,7 +11857,7 @@ function decodeData(input) {
|
|
|
11842
11857
|
*/
|
|
11843
11858
|
function encodePoints(input) {
|
|
11844
11859
|
const result = [];
|
|
11845
|
-
const data =
|
|
11860
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
11846
11861
|
// add original shards to the result
|
|
11847
11862
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
11848
11863
|
const pointStart = POINT_LENGTH * i;
|
|
@@ -11858,7 +11873,7 @@ function encodePoints(input) {
|
|
|
11858
11873
|
const encodedData = encodedResult.take_data();
|
|
11859
11874
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
11860
11875
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
11861
|
-
const redundancyPoint =
|
|
11876
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
11862
11877
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
11863
11878
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
11864
11879
|
}
|
|
@@ -11873,7 +11888,7 @@ function encodePoints(input) {
|
|
|
11873
11888
|
*/
|
|
11874
11889
|
function decodePiece(input) {
|
|
11875
11890
|
const result = Bytes.zero(PIECE_SIZE);
|
|
11876
|
-
const data =
|
|
11891
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
11877
11892
|
const indices = new Uint16Array(input.length);
|
|
11878
11893
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
11879
11894
|
const [index, points] = input[i];
|
|
@@ -11989,7 +12004,7 @@ function lace(input) {
|
|
|
11989
12004
|
return BytesBlob.empty();
|
|
11990
12005
|
}
|
|
11991
12006
|
const n = input[0].length;
|
|
11992
|
-
const result = BytesBlob.blobFrom(
|
|
12007
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
11993
12008
|
for (let i = 0; i < k; i++) {
|
|
11994
12009
|
const entry = input[i].raw;
|
|
11995
12010
|
for (let j = 0; j < n; j++) {
|
|
@@ -13268,7 +13283,7 @@ class Registers {
|
|
|
13268
13283
|
bytes;
|
|
13269
13284
|
asSigned;
|
|
13270
13285
|
asUnsigned;
|
|
13271
|
-
constructor(bytes =
|
|
13286
|
+
constructor(bytes = safeAllocUint8Array(NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT)) {
|
|
13272
13287
|
this.bytes = bytes;
|
|
13273
13288
|
check `${bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13274
13289
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -13421,7 +13436,7 @@ class Mask {
|
|
|
13421
13436
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
13422
13437
|
}
|
|
13423
13438
|
buildLookupTableForward(mask) {
|
|
13424
|
-
const table =
|
|
13439
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13425
13440
|
let lastInstructionOffset = 0;
|
|
13426
13441
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13427
13442
|
if (mask.isSet(i)) {
|
|
@@ -16956,7 +16971,7 @@ class HostCalls {
|
|
|
16956
16971
|
const regs = pvmInstance.getRegisters();
|
|
16957
16972
|
const maybeAddress = regs.getLowerU32(7);
|
|
16958
16973
|
const maybeLength = regs.getLowerU32(8);
|
|
16959
|
-
const result =
|
|
16974
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
16960
16975
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
16961
16976
|
const loadResult = memory.loadInto(result, startAddress);
|
|
16962
16977
|
if (loadResult.isError) {
|
|
@@ -17305,14 +17320,14 @@ class DebuggerAdapter {
|
|
|
17305
17320
|
const page = this.pvm.getMemoryPage(pageNumber);
|
|
17306
17321
|
if (page === null) {
|
|
17307
17322
|
// page wasn't allocated so we return an empty page
|
|
17308
|
-
return
|
|
17323
|
+
return safeAllocUint8Array(PAGE_SIZE$1);
|
|
17309
17324
|
}
|
|
17310
17325
|
if (page.length === PAGE_SIZE$1) {
|
|
17311
17326
|
// page was allocated and has a proper size so we can simply return it
|
|
17312
17327
|
return page;
|
|
17313
17328
|
}
|
|
17314
17329
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
17315
|
-
const fullPage =
|
|
17330
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE$1);
|
|
17316
17331
|
fullPage.set(page);
|
|
17317
17332
|
return fullPage;
|
|
17318
17333
|
}
|
|
@@ -17456,7 +17471,7 @@ function fisherYatesShuffle(arr, entropy) {
|
|
|
17456
17471
|
}
|
|
17457
17472
|
function hashToNumberSequence(entropy, length) {
|
|
17458
17473
|
const result = new Array(length);
|
|
17459
|
-
const randomBytes =
|
|
17474
|
+
const randomBytes = safeAllocUint8Array(ENTROPY_BYTES + 4);
|
|
17460
17475
|
randomBytes.set(entropy.raw);
|
|
17461
17476
|
for (let i = 0; i < length; i++) {
|
|
17462
17477
|
randomBytes.set(u32AsLeBytes(tryAsU32(Math.floor(i / 8))), ENTROPY_BYTES);
|