@aztec/blob-lib 4.0.0-nightly.20250907 → 4.0.0-nightly.20260108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/batched_blob.d.ts +26 -0
- package/dest/batched_blob.d.ts.map +1 -0
- package/dest/batched_blob.js +20 -0
- package/dest/blob.d.ts +50 -99
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +78 -169
- package/dest/blob_batching.d.ts +41 -123
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +129 -203
- package/dest/blob_utils.d.ts +40 -0
- package/dest/blob_utils.d.ts.map +1 -0
- package/dest/blob_utils.js +69 -0
- package/dest/circuit_types/blob_accumulator.d.ts +23 -0
- package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/blob_accumulator.js +62 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts +23 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_accumulator.js +66 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +16 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_batching_challenges.js +26 -0
- package/dest/circuit_types/index.d.ts +4 -0
- package/dest/circuit_types/index.d.ts.map +1 -0
- package/dest/circuit_types/index.js +4 -0
- package/dest/encoding/block_blob_data.d.ts +22 -0
- package/dest/encoding/block_blob_data.d.ts.map +1 -0
- package/dest/encoding/block_blob_data.js +65 -0
- package/dest/encoding/block_end_marker.d.ts +11 -0
- package/dest/encoding/block_end_marker.d.ts.map +1 -0
- package/dest/encoding/block_end_marker.js +41 -0
- package/dest/encoding/block_end_state_field.d.ts +12 -0
- package/dest/encoding/block_end_state_field.d.ts.map +1 -0
- package/dest/encoding/block_end_state_field.js +39 -0
- package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
- package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
- package/dest/encoding/checkpoint_blob_data.js +67 -0
- package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
- package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
- package/dest/encoding/checkpoint_end_marker.js +28 -0
- package/dest/encoding/fixtures.d.ts +41 -0
- package/dest/encoding/fixtures.d.ts.map +1 -0
- package/dest/encoding/fixtures.js +140 -0
- package/dest/encoding/index.d.ts +10 -0
- package/dest/encoding/index.d.ts.map +1 -0
- package/dest/encoding/index.js +9 -0
- package/dest/encoding/tx_blob_data.d.ts +19 -0
- package/dest/encoding/tx_blob_data.d.ts.map +1 -0
- package/dest/encoding/tx_blob_data.js +79 -0
- package/dest/encoding/tx_start_marker.d.ts +16 -0
- package/dest/encoding/tx_start_marker.d.ts.map +1 -0
- package/dest/encoding/tx_start_marker.js +77 -0
- package/dest/errors.d.ts +1 -1
- package/dest/errors.d.ts.map +1 -1
- package/dest/hash.d.ts +43 -0
- package/dest/hash.d.ts.map +1 -0
- package/dest/hash.js +80 -0
- package/dest/index.d.ts +7 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +6 -16
- package/dest/interface.d.ts +1 -2
- package/dest/interface.d.ts.map +1 -1
- package/dest/kzg_context.d.ts +8 -0
- package/dest/kzg_context.d.ts.map +1 -0
- package/dest/kzg_context.js +14 -0
- package/dest/sponge_blob.d.ts +12 -14
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +26 -30
- package/dest/testing.d.ts +10 -23
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +37 -53
- package/dest/types.d.ts +17 -0
- package/dest/types.d.ts.map +1 -0
- package/dest/types.js +4 -0
- package/package.json +10 -7
- package/src/batched_blob.ts +26 -0
- package/src/blob.ts +81 -195
- package/src/blob_batching.ts +168 -231
- package/src/blob_utils.ts +82 -0
- package/src/circuit_types/blob_accumulator.ts +96 -0
- package/src/circuit_types/final_blob_accumulator.ts +76 -0
- package/src/circuit_types/final_blob_batching_challenges.ts +30 -0
- package/src/circuit_types/index.ts +4 -0
- package/src/encoding/block_blob_data.ts +102 -0
- package/src/encoding/block_end_marker.ts +55 -0
- package/src/encoding/block_end_state_field.ts +59 -0
- package/src/encoding/checkpoint_blob_data.ts +95 -0
- package/src/encoding/checkpoint_end_marker.ts +40 -0
- package/src/encoding/fixtures.ts +210 -0
- package/src/encoding/index.ts +9 -0
- package/src/encoding/tx_blob_data.ts +116 -0
- package/src/encoding/tx_start_marker.ts +97 -0
- package/src/hash.ts +89 -0
- package/src/index.ts +6 -19
- package/src/interface.ts +0 -1
- package/src/kzg_context.ts +16 -0
- package/src/sponge_blob.ts +28 -31
- package/src/testing.ts +48 -59
- package/src/types.ts +17 -0
- package/dest/blob_batching_public_inputs.d.ts +0 -71
- package/dest/blob_batching_public_inputs.d.ts.map +0 -1
- package/dest/blob_batching_public_inputs.js +0 -168
- package/dest/encoding.d.ts +0 -66
- package/dest/encoding.d.ts.map +0 -1
- package/dest/encoding.js +0 -113
- package/src/blob_batching_public_inputs.ts +0 -252
- package/src/encoding.ts +0 -138
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
2
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
|
+
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
+
|
|
5
|
+
import { inspect } from 'util';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
|
|
9
|
+
*/
|
|
10
|
+
export class FinalBlobAccumulator {
|
|
11
|
+
constructor(
|
|
12
|
+
public blobCommitmentsHash: Fr,
|
|
13
|
+
public z: Fr,
|
|
14
|
+
public y: BLS12Fr,
|
|
15
|
+
public c: BLS12Point,
|
|
16
|
+
) {}
|
|
17
|
+
|
|
18
|
+
static empty(): FinalBlobAccumulator {
|
|
19
|
+
return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator {
|
|
23
|
+
const reader = BufferReader.asReader(buffer);
|
|
24
|
+
return new FinalBlobAccumulator(
|
|
25
|
+
Fr.fromBuffer(reader),
|
|
26
|
+
Fr.fromBuffer(reader),
|
|
27
|
+
BLS12Fr.fromBuffer(reader),
|
|
28
|
+
BLS12Point.fromBuffer(reader),
|
|
29
|
+
);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
toBuffer() {
|
|
33
|
+
return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
toFields() {
|
|
37
|
+
return [
|
|
38
|
+
this.blobCommitmentsHash,
|
|
39
|
+
this.z,
|
|
40
|
+
...this.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
41
|
+
...this.c.toBN254Fields(),
|
|
42
|
+
];
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// The below is used to send to L1 for proof verification
|
|
46
|
+
toString() {
|
|
47
|
+
// We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
|
|
48
|
+
// is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
|
|
49
|
+
// we use calldata rather than a slice in memory:
|
|
50
|
+
const buf = Buffer.concat([Buffer.alloc(32), this.z.toBuffer(), this.y.toBuffer(), this.c.compress()]);
|
|
51
|
+
return buf.toString('hex');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
equals(other: FinalBlobAccumulator) {
|
|
55
|
+
return (
|
|
56
|
+
this.blobCommitmentsHash.equals(other.blobCommitmentsHash) &&
|
|
57
|
+
this.z.equals(other.z) &&
|
|
58
|
+
this.y.equals(other.y) &&
|
|
59
|
+
this.c.equals(other.c)
|
|
60
|
+
);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Creates a random instance. Used for testing only - will not prove/verify.
|
|
64
|
+
static random() {
|
|
65
|
+
return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
[inspect.custom]() {
|
|
69
|
+
return `FinalBlobAccumulator {
|
|
70
|
+
blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
|
|
71
|
+
z: ${inspect(this.z)},
|
|
72
|
+
y: ${inspect(this.y)},
|
|
73
|
+
c: ${inspect(this.c)},
|
|
74
|
+
}`;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { BLS12Fr } from '@aztec/foundation/curves/bls12';
|
|
2
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
|
+
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
|
|
7
|
+
*/
|
|
8
|
+
export class FinalBlobBatchingChallenges {
|
|
9
|
+
constructor(
|
|
10
|
+
public readonly z: Fr,
|
|
11
|
+
public readonly gamma: BLS12Fr,
|
|
12
|
+
) {}
|
|
13
|
+
|
|
14
|
+
equals(other: FinalBlobBatchingChallenges) {
|
|
15
|
+
return this.z.equals(other.z) && this.gamma.equals(other.gamma);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
static empty(): FinalBlobBatchingChallenges {
|
|
19
|
+
return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges {
|
|
23
|
+
const reader = BufferReader.asReader(buffer);
|
|
24
|
+
return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
toBuffer() {
|
|
28
|
+
return serializeToBuffer(this.z, this.gamma);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
2
|
+
import { FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
import {
|
|
6
|
+
type BlockEndMarker,
|
|
7
|
+
decodeBlockEndMarker,
|
|
8
|
+
encodeBlockEndMarker,
|
|
9
|
+
isBlockEndMarker,
|
|
10
|
+
} from './block_end_marker.js';
|
|
11
|
+
import {
|
|
12
|
+
type BlockEndStateField,
|
|
13
|
+
decodeBlockEndStateField,
|
|
14
|
+
encodeBlockEndStateField,
|
|
15
|
+
} from './block_end_state_field.js';
|
|
16
|
+
import { type TxBlobData, decodeTxBlobData, encodeTxBlobData } from './tx_blob_data.js';
|
|
17
|
+
|
|
18
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
19
|
+
|
|
20
|
+
export interface BlockEndBlobData {
|
|
21
|
+
blockEndMarker: BlockEndMarker;
|
|
22
|
+
blockEndStateField: BlockEndStateField;
|
|
23
|
+
lastArchiveRoot: Fr;
|
|
24
|
+
noteHashRoot: Fr;
|
|
25
|
+
nullifierRoot: Fr;
|
|
26
|
+
publicDataRoot: Fr;
|
|
27
|
+
l1ToL2MessageRoot: Fr | undefined;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface BlockBlobData extends BlockEndBlobData {
|
|
31
|
+
txs: TxBlobData[];
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function encodeBlockEndBlobData(blockEndBlobData: BlockEndBlobData): Fr[] {
|
|
35
|
+
return [
|
|
36
|
+
encodeBlockEndMarker(blockEndBlobData.blockEndMarker),
|
|
37
|
+
encodeBlockEndStateField(blockEndBlobData.blockEndStateField),
|
|
38
|
+
blockEndBlobData.lastArchiveRoot,
|
|
39
|
+
blockEndBlobData.noteHashRoot,
|
|
40
|
+
blockEndBlobData.nullifierRoot,
|
|
41
|
+
blockEndBlobData.publicDataRoot,
|
|
42
|
+
...(blockEndBlobData.l1ToL2MessageRoot ? [blockEndBlobData.l1ToL2MessageRoot] : []),
|
|
43
|
+
];
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function decodeBlockEndBlobData(fields: Fr[] | FieldReader, isFirstBlock: boolean): BlockEndBlobData {
|
|
47
|
+
const reader = FieldReader.asReader(fields);
|
|
48
|
+
|
|
49
|
+
const numBlockEndData = isFirstBlock ? 7 : 6;
|
|
50
|
+
if (numBlockEndData > reader.remainingFields()) {
|
|
51
|
+
throw new BlobDeserializationError(
|
|
52
|
+
`Incorrect encoding of blob fields: not enough fields for block end data. Expected ${numBlockEndData} fields, only ${reader.remainingFields()} remaining.`,
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return {
|
|
57
|
+
blockEndMarker: decodeBlockEndMarker(reader.readField()),
|
|
58
|
+
blockEndStateField: decodeBlockEndStateField(reader.readField()),
|
|
59
|
+
lastArchiveRoot: reader.readField(),
|
|
60
|
+
noteHashRoot: reader.readField(),
|
|
61
|
+
nullifierRoot: reader.readField(),
|
|
62
|
+
publicDataRoot: reader.readField(),
|
|
63
|
+
l1ToL2MessageRoot: isFirstBlock ? reader.readField() : undefined,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export function encodeBlockBlobData(blockBlobData: BlockBlobData): Fr[] {
|
|
68
|
+
return [...blockBlobData.txs.map(tx => encodeTxBlobData(tx)).flat(), ...encodeBlockEndBlobData(blockBlobData)];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function decodeBlockBlobData(fields: Fr[] | FieldReader, isFirstBlock: boolean): BlockBlobData {
|
|
72
|
+
const reader = FieldReader.asReader(fields);
|
|
73
|
+
|
|
74
|
+
const txs: TxBlobData[] = [];
|
|
75
|
+
let hasReachedBlockEnd = false;
|
|
76
|
+
while (!hasReachedBlockEnd) {
|
|
77
|
+
if (reader.isFinished()) {
|
|
78
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for block end marker.`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const currentField = reader.peekField();
|
|
82
|
+
if (isBlockEndMarker(currentField)) {
|
|
83
|
+
hasReachedBlockEnd = true;
|
|
84
|
+
} else {
|
|
85
|
+
txs.push(decodeTxBlobData(reader));
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const blockEndBlobData = decodeBlockEndBlobData(reader, isFirstBlock);
|
|
90
|
+
|
|
91
|
+
const blockEndMarker = blockEndBlobData.blockEndMarker;
|
|
92
|
+
if (blockEndMarker.numTxs !== txs.length) {
|
|
93
|
+
throw new BlobDeserializationError(
|
|
94
|
+
`Incorrect encoding of blob fields: expected ${blockEndMarker.numTxs} txs, but got ${txs.length}.`,
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return {
|
|
99
|
+
txs,
|
|
100
|
+
...blockEndBlobData,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { BLOCK_END_PREFIX } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
|
+
|
|
5
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
6
|
+
|
|
7
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
8
|
+
|
|
9
|
+
const BLOCK_NUMBER_BIT_SIZE = 32n;
|
|
10
|
+
const TIMESTAMP_BIT_SIZE = 64n;
|
|
11
|
+
const NUM_TXS_BIT_SIZE = 16n;
|
|
12
|
+
|
|
13
|
+
export interface BlockEndMarker {
|
|
14
|
+
timestamp: bigint;
|
|
15
|
+
blockNumber: BlockNumber;
|
|
16
|
+
numTxs: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function encodeBlockEndMarker(blockEndMarker: BlockEndMarker) {
|
|
20
|
+
let value = BLOCK_END_PREFIX;
|
|
21
|
+
value <<= TIMESTAMP_BIT_SIZE;
|
|
22
|
+
value += blockEndMarker.timestamp;
|
|
23
|
+
value <<= BLOCK_NUMBER_BIT_SIZE;
|
|
24
|
+
value += BigInt(blockEndMarker.blockNumber);
|
|
25
|
+
value <<= NUM_TXS_BIT_SIZE;
|
|
26
|
+
value += BigInt(blockEndMarker.numTxs);
|
|
27
|
+
return new Fr(value);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function decodeBlockEndMarker(field: Fr): BlockEndMarker {
|
|
31
|
+
let value = field.toBigInt();
|
|
32
|
+
const numTxs = Number(value & (2n ** NUM_TXS_BIT_SIZE - 1n));
|
|
33
|
+
value >>= NUM_TXS_BIT_SIZE;
|
|
34
|
+
const blockNumber = BlockNumber(Number(value & (2n ** BLOCK_NUMBER_BIT_SIZE - 1n)));
|
|
35
|
+
value >>= BLOCK_NUMBER_BIT_SIZE;
|
|
36
|
+
const timestamp = value & (2n ** TIMESTAMP_BIT_SIZE - 1n);
|
|
37
|
+
value >>= TIMESTAMP_BIT_SIZE;
|
|
38
|
+
|
|
39
|
+
const prefix = value;
|
|
40
|
+
if (prefix !== BLOCK_END_PREFIX) {
|
|
41
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid block end marker.`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
blockNumber,
|
|
46
|
+
timestamp,
|
|
47
|
+
numTxs,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Check if a field is a block end marker. Used before decoding to check if it has reached the end of the block.
|
|
52
|
+
export function isBlockEndMarker(field: Fr): boolean {
|
|
53
|
+
const prefix = field.toBigInt() >> (NUM_TXS_BIT_SIZE + BLOCK_NUMBER_BIT_SIZE + TIMESTAMP_BIT_SIZE);
|
|
54
|
+
return prefix === BLOCK_END_PREFIX;
|
|
55
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import {
|
|
2
|
+
L1_TO_L2_MSG_TREE_HEIGHT,
|
|
3
|
+
NOTE_HASH_TREE_HEIGHT,
|
|
4
|
+
NULLIFIER_TREE_HEIGHT,
|
|
5
|
+
PUBLIC_DATA_TREE_HEIGHT,
|
|
6
|
+
} from '@aztec/constants';
|
|
7
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
8
|
+
|
|
9
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
10
|
+
|
|
11
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
12
|
+
|
|
13
|
+
export const TOTAL_MANA_USED_BIT_SIZE = 48n;
|
|
14
|
+
|
|
15
|
+
export interface BlockEndStateField {
|
|
16
|
+
l1ToL2MessageNextAvailableLeafIndex: number;
|
|
17
|
+
noteHashNextAvailableLeafIndex: number;
|
|
18
|
+
nullifierNextAvailableLeafIndex: number;
|
|
19
|
+
publicDataNextAvailableLeafIndex: number;
|
|
20
|
+
totalManaUsed: bigint;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function encodeBlockEndStateField(blockEndStateField: BlockEndStateField) {
|
|
24
|
+
let value = BigInt(blockEndStateField.l1ToL2MessageNextAvailableLeafIndex);
|
|
25
|
+
value <<= BigInt(NOTE_HASH_TREE_HEIGHT);
|
|
26
|
+
value += BigInt(blockEndStateField.noteHashNextAvailableLeafIndex);
|
|
27
|
+
value <<= BigInt(NULLIFIER_TREE_HEIGHT);
|
|
28
|
+
value += BigInt(blockEndStateField.nullifierNextAvailableLeafIndex);
|
|
29
|
+
value <<= BigInt(PUBLIC_DATA_TREE_HEIGHT);
|
|
30
|
+
value += BigInt(blockEndStateField.publicDataNextAvailableLeafIndex);
|
|
31
|
+
value <<= BigInt(TOTAL_MANA_USED_BIT_SIZE);
|
|
32
|
+
value += BigInt(blockEndStateField.totalManaUsed);
|
|
33
|
+
return new Fr(value);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function decodeBlockEndStateField(field: Fr): BlockEndStateField {
|
|
37
|
+
let value = field.toBigInt();
|
|
38
|
+
const totalManaUsed = value & (2n ** TOTAL_MANA_USED_BIT_SIZE - 1n);
|
|
39
|
+
value >>= TOTAL_MANA_USED_BIT_SIZE;
|
|
40
|
+
const publicDataNextAvailableLeafIndex = Number(value & (2n ** BigInt(PUBLIC_DATA_TREE_HEIGHT) - 1n));
|
|
41
|
+
value >>= BigInt(PUBLIC_DATA_TREE_HEIGHT);
|
|
42
|
+
const nullifierNextAvailableLeafIndex = Number(value & (2n ** BigInt(NULLIFIER_TREE_HEIGHT) - 1n));
|
|
43
|
+
value >>= BigInt(NULLIFIER_TREE_HEIGHT);
|
|
44
|
+
const noteHashNextAvailableLeafIndex = Number(value & (2n ** BigInt(NOTE_HASH_TREE_HEIGHT) - 1n));
|
|
45
|
+
value >>= BigInt(NOTE_HASH_TREE_HEIGHT);
|
|
46
|
+
|
|
47
|
+
if (value > 2n ** BigInt(L1_TO_L2_MSG_TREE_HEIGHT) - 1n) {
|
|
48
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid block end state field.`);
|
|
49
|
+
}
|
|
50
|
+
const l1ToL2MessageNextAvailableLeafIndex = Number(value);
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
l1ToL2MessageNextAvailableLeafIndex,
|
|
54
|
+
noteHashNextAvailableLeafIndex,
|
|
55
|
+
nullifierNextAvailableLeafIndex,
|
|
56
|
+
publicDataNextAvailableLeafIndex,
|
|
57
|
+
totalManaUsed,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
2
|
+
import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
import { type BlockBlobData, decodeBlockBlobData, encodeBlockBlobData } from './block_blob_data.js';
|
|
6
|
+
import {
|
|
7
|
+
type CheckpointEndMarker,
|
|
8
|
+
decodeCheckpointEndMarker,
|
|
9
|
+
encodeCheckpointEndMarker,
|
|
10
|
+
isCheckpointEndMarker,
|
|
11
|
+
} from './checkpoint_end_marker.js';
|
|
12
|
+
import type { TxStartMarker } from './tx_start_marker.js';
|
|
13
|
+
|
|
14
|
+
export interface CheckpointBlobData {
|
|
15
|
+
checkpointEndMarker: CheckpointEndMarker;
|
|
16
|
+
blocks: BlockBlobData[];
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function encodeCheckpointBlobData(checkpointBlobData: CheckpointBlobData): Fr[] {
|
|
20
|
+
return [
|
|
21
|
+
...checkpointBlobData.blocks.map(block => encodeBlockBlobData(block)).flat(),
|
|
22
|
+
encodeCheckpointEndMarker(checkpointBlobData.checkpointEndMarker),
|
|
23
|
+
];
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function encodeCheckpointBlobDataFromBlocks(blocks: BlockBlobData[]): Fr[] {
|
|
27
|
+
const blocksBlobFields = blocks.map(block => encodeBlockBlobData(block)).flat();
|
|
28
|
+
const numBlobFields = blocksBlobFields.length + 1; // +1 for the checkpoint end marker.
|
|
29
|
+
return blocksBlobFields.concat(encodeCheckpointEndMarker({ numBlobFields }));
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function decodeCheckpointBlobData(fields: Fr[] | FieldReader): CheckpointBlobData {
|
|
33
|
+
const reader = FieldReader.asReader(fields);
|
|
34
|
+
|
|
35
|
+
if (reader.isFinished()) {
|
|
36
|
+
throw new BlobDeserializationError(`Cannot decode empty blob data.`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const blocks = [];
|
|
40
|
+
let checkpointEndMarker: CheckpointEndMarker | undefined;
|
|
41
|
+
while (!reader.isFinished() && !checkpointEndMarker) {
|
|
42
|
+
blocks.push(decodeBlockBlobData(reader, blocks.length === 0 /* isFirstBlock */));
|
|
43
|
+
|
|
44
|
+
// After reading a block, the next item must be either a checkpoint end marker or another block.
|
|
45
|
+
// The first field of a block is always a tx start marker. So if the provided fields are valid, it's not possible to
|
|
46
|
+
// misinterpret a tx start marker as checkpoint end marker, or vice versa.
|
|
47
|
+
const nextField = reader.peekField();
|
|
48
|
+
if (isCheckpointEndMarker(nextField)) {
|
|
49
|
+
checkpointEndMarker = decodeCheckpointEndMarker(reader.readField());
|
|
50
|
+
const numFieldsRead = reader.cursor;
|
|
51
|
+
if (numFieldsRead !== checkpointEndMarker.numBlobFields) {
|
|
52
|
+
throw new BlobDeserializationError(
|
|
53
|
+
`Incorrect encoding of blob fields: mismatch number of blob fields. Expected ${checkpointEndMarker.numBlobFields} fields, got ${numFieldsRead}.`,
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!checkpointEndMarker) {
|
|
60
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: checkpoint end marker does not exist.`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const remainingFields = reader.readFieldArray(reader.remainingFields());
|
|
64
|
+
if (!remainingFields.every(f => f.isZero())) {
|
|
65
|
+
throw new BlobDeserializationError(
|
|
66
|
+
`Incorrect encoding of blob fields: unexpected non-zero field after checkpoint end marker.`,
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return {
|
|
71
|
+
checkpointEndMarker,
|
|
72
|
+
blocks,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function decodeCheckpointBlobDataFromBuffer(buf: Buffer): CheckpointBlobData {
|
|
77
|
+
const reader = BufferReader.asReader(buf);
|
|
78
|
+
const totalFieldsInBuffer = Math.floor(buf.length / Fr.SIZE_IN_BYTES);
|
|
79
|
+
const blobFields = reader.readArray(totalFieldsInBuffer, Fr);
|
|
80
|
+
return decodeCheckpointBlobData(blobFields);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export function getTotalNumBlobFieldsFromTxs(txsPerBlock: TxStartMarker[][]): number {
|
|
84
|
+
const numBlocks = txsPerBlock.length;
|
|
85
|
+
if (!numBlocks) {
|
|
86
|
+
return 0;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return (
|
|
90
|
+
(numBlocks ? 1 : 0) + // l1ToL2Messages root in the first block
|
|
91
|
+
numBlocks * 6 + // 6 fields for each block end blob data.
|
|
92
|
+
txsPerBlock.reduce((total, txs) => total + txs.reduce((total, tx) => total + tx.numBlobFields, 0), 0) +
|
|
93
|
+
1 // checkpoint end marker
|
|
94
|
+
);
|
|
95
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { CHECKPOINT_END_PREFIX } from '@aztec/constants';
|
|
2
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
|
+
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
|
|
6
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/checkpoint_blob_data.nr`.
|
|
7
|
+
|
|
8
|
+
const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
|
|
9
|
+
|
|
10
|
+
export interface CheckpointEndMarker {
|
|
11
|
+
numBlobFields: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function encodeCheckpointEndMarker(checkpointEndMarker: CheckpointEndMarker) {
|
|
15
|
+
let value = CHECKPOINT_END_PREFIX;
|
|
16
|
+
value <<= NUM_BLOB_FIELDS_BIT_SIZE;
|
|
17
|
+
value += BigInt(checkpointEndMarker.numBlobFields);
|
|
18
|
+
return new Fr(value);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function decodeCheckpointEndMarker(field: Fr): CheckpointEndMarker {
|
|
22
|
+
let value = field.toBigInt();
|
|
23
|
+
const numBlobFields = Number(value & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
|
|
24
|
+
value >>= NUM_BLOB_FIELDS_BIT_SIZE;
|
|
25
|
+
|
|
26
|
+
const prefix = value;
|
|
27
|
+
if (prefix !== CHECKPOINT_END_PREFIX) {
|
|
28
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid checkpoint end marker.`);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return {
|
|
32
|
+
numBlobFields,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Check if a field is a checkpoint end marker. Used to check if it has reached the end of the blob fields.
|
|
37
|
+
export function isCheckpointEndMarker(field: Fr): boolean {
|
|
38
|
+
const prefix = field.toBigInt() >> NUM_BLOB_FIELDS_BIT_SIZE;
|
|
39
|
+
return prefix === CHECKPOINT_END_PREFIX;
|
|
40
|
+
}
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import {
|
|
2
|
+
FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH,
|
|
3
|
+
MAX_CONTRACT_CLASS_LOGS_PER_TX,
|
|
4
|
+
MAX_L2_TO_L1_MSGS_PER_TX,
|
|
5
|
+
MAX_NOTE_HASHES_PER_TX,
|
|
6
|
+
MAX_NULLIFIERS_PER_TX,
|
|
7
|
+
MAX_PRIVATE_LOGS_PER_TX,
|
|
8
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
9
|
+
PRIVATE_LOG_SIZE_IN_FIELDS,
|
|
10
|
+
} from '@aztec/constants';
|
|
11
|
+
import { makeTuple } from '@aztec/foundation/array';
|
|
12
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
13
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
14
|
+
|
|
15
|
+
import type { BlockBlobData, BlockEndBlobData } from './block_blob_data.js';
|
|
16
|
+
import type { BlockEndMarker } from './block_end_marker.js';
|
|
17
|
+
import type { BlockEndStateField } from './block_end_state_field.js';
|
|
18
|
+
import { type CheckpointBlobData, getTotalNumBlobFieldsFromTxs } from './checkpoint_blob_data.js';
|
|
19
|
+
import { type TxBlobData, getNumTxBlobFields } from './tx_blob_data.js';
|
|
20
|
+
import type { TxStartMarker } from './tx_start_marker.js';
|
|
21
|
+
|
|
22
|
+
const fr = (seed: number) => new Fr(BigInt(seed));
|
|
23
|
+
|
|
24
|
+
export function makeTxStartMarker({
|
|
25
|
+
isFullTx = false,
|
|
26
|
+
...overrides
|
|
27
|
+
}: { isFullTx?: boolean } & Partial<TxStartMarker> = {}): TxStartMarker {
|
|
28
|
+
const partialTxStartMarker = {
|
|
29
|
+
revertCode: 0,
|
|
30
|
+
numNoteHashes: isFullTx ? MAX_NOTE_HASHES_PER_TX : 1,
|
|
31
|
+
numNullifiers: isFullTx ? MAX_NULLIFIERS_PER_TX : 1,
|
|
32
|
+
numL2ToL1Msgs: isFullTx ? MAX_L2_TO_L1_MSGS_PER_TX : 1,
|
|
33
|
+
numPublicDataWrites: isFullTx ? MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX : 1,
|
|
34
|
+
numPrivateLogs: isFullTx ? MAX_PRIVATE_LOGS_PER_TX : 1,
|
|
35
|
+
privateLogsLength: isFullTx ? PRIVATE_LOG_SIZE_IN_FIELDS * MAX_PRIVATE_LOGS_PER_TX : 1,
|
|
36
|
+
publicLogsLength: isFullTx ? FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH : 1,
|
|
37
|
+
contractClassLogLength: isFullTx ? MAX_CONTRACT_CLASS_LOGS_PER_TX : 1,
|
|
38
|
+
...overrides,
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const numBlobFields = overrides.numBlobFields ?? getNumTxBlobFields(partialTxStartMarker);
|
|
42
|
+
return {
|
|
43
|
+
...partialTxStartMarker,
|
|
44
|
+
numBlobFields,
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function makeTxBlobData({
|
|
49
|
+
isFullTx = false,
|
|
50
|
+
seed = 1,
|
|
51
|
+
...overrides
|
|
52
|
+
}: { isFullTx?: boolean; seed?: number } & Partial<
|
|
53
|
+
Omit<TxBlobData, 'txStartMarker'> & { txStartMarker?: Partial<TxStartMarker> }
|
|
54
|
+
> = {}): TxBlobData {
|
|
55
|
+
const { txStartMarker: txStartMarkerOverrides, ...txBlobDataOverrides } = overrides;
|
|
56
|
+
const txStartMarker = makeTxStartMarker({ isFullTx, ...txStartMarkerOverrides });
|
|
57
|
+
|
|
58
|
+
const noteHashes = makeTuple(txStartMarker.numNoteHashes, fr, seed);
|
|
59
|
+
const nullifiers = makeTuple(txStartMarker.numNullifiers, fr, seed + 0x100);
|
|
60
|
+
const l2ToL1Msgs = makeTuple(txStartMarker.numL2ToL1Msgs, fr, seed + 0x200);
|
|
61
|
+
const publicDataWrites = makeTuple(
|
|
62
|
+
txStartMarker.numPublicDataWrites,
|
|
63
|
+
i => [fr(seed + i * 2), fr(seed + i * 2 + 1)] satisfies [Fr, Fr],
|
|
64
|
+
seed + 0x300,
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
const privateLogs = [];
|
|
68
|
+
if (txStartMarker.privateLogsLength > txStartMarker.numPrivateLogs * PRIVATE_LOG_SIZE_IN_FIELDS) {
|
|
69
|
+
throw new Error('Private logs length is too large');
|
|
70
|
+
}
|
|
71
|
+
if (txStartMarker.privateLogsLength < txStartMarker.numPrivateLogs) {
|
|
72
|
+
throw new Error('Private logs length is too small');
|
|
73
|
+
}
|
|
74
|
+
let remainingNumPrivateLogs = txStartMarker.numPrivateLogs;
|
|
75
|
+
let remainingPrivateLogsLength = txStartMarker.privateLogsLength;
|
|
76
|
+
for (let i = 0; i < txStartMarker.numPrivateLogs; i++) {
|
|
77
|
+
const minLength = Math.max(
|
|
78
|
+
1,
|
|
79
|
+
remainingPrivateLogsLength - (remainingNumPrivateLogs - 1) * PRIVATE_LOG_SIZE_IN_FIELDS,
|
|
80
|
+
);
|
|
81
|
+
const length = Math.max(minLength, Math.floor(remainingPrivateLogsLength / remainingNumPrivateLogs));
|
|
82
|
+
privateLogs.push(makeTuple(length, fr, seed + 0x400 + i * PRIVATE_LOG_SIZE_IN_FIELDS));
|
|
83
|
+
remainingNumPrivateLogs -= 1;
|
|
84
|
+
remainingPrivateLogsLength -= length;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const publicLogs = makeTuple(txStartMarker.publicLogsLength, fr, seed + 0x500);
|
|
88
|
+
const contractClassLogBlobDataLength =
|
|
89
|
+
txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
|
|
90
|
+
const contractClassLog = makeTuple(contractClassLogBlobDataLength, fr, seed + 0x600);
|
|
91
|
+
|
|
92
|
+
return {
|
|
93
|
+
txStartMarker,
|
|
94
|
+
txHash: fr(seed + 0x700),
|
|
95
|
+
transactionFee: fr(seed + 0x800),
|
|
96
|
+
noteHashes,
|
|
97
|
+
nullifiers,
|
|
98
|
+
l2ToL1Msgs,
|
|
99
|
+
publicDataWrites,
|
|
100
|
+
privateLogs,
|
|
101
|
+
publicLogs,
|
|
102
|
+
contractClassLog,
|
|
103
|
+
...txBlobDataOverrides,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
export function makeBlockEndMarker({
|
|
108
|
+
seed = 1,
|
|
109
|
+
...overrides
|
|
110
|
+
}: { seed?: number } & Partial<BlockEndMarker> = {}): BlockEndMarker {
|
|
111
|
+
return {
|
|
112
|
+
numTxs: seed,
|
|
113
|
+
blockNumber: BlockNumber(seed + 1),
|
|
114
|
+
timestamp: BigInt(seed + 2),
|
|
115
|
+
...overrides,
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export function makeBlockEndStateField({
|
|
120
|
+
seed = 1,
|
|
121
|
+
...overrides
|
|
122
|
+
}: { seed?: number } & Partial<BlockEndStateField> = {}): BlockEndStateField {
|
|
123
|
+
return {
|
|
124
|
+
l1ToL2MessageNextAvailableLeafIndex: seed,
|
|
125
|
+
noteHashNextAvailableLeafIndex: seed + 0x10,
|
|
126
|
+
nullifierNextAvailableLeafIndex: seed + 0x20,
|
|
127
|
+
publicDataNextAvailableLeafIndex: seed + 0x30,
|
|
128
|
+
totalManaUsed: BigInt(seed + 0x40),
|
|
129
|
+
...overrides,
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
export function makeBlockEndBlobData({
|
|
134
|
+
isFirstBlock = true,
|
|
135
|
+
seed = 1,
|
|
136
|
+
...overrides
|
|
137
|
+
}: { seed?: number; isFirstBlock?: boolean } & Partial<
|
|
138
|
+
Omit<BlockEndBlobData, 'blockEndMarker' | 'blockEndStateField'>
|
|
139
|
+
> & {
|
|
140
|
+
blockEndMarker?: Partial<BlockEndMarker>;
|
|
141
|
+
blockEndStateField?: Partial<BlockEndStateField>;
|
|
142
|
+
} = {}): BlockEndBlobData {
|
|
143
|
+
const {
|
|
144
|
+
blockEndMarker: blockEndMarkerOverrides,
|
|
145
|
+
blockEndStateField: blockEndStateFieldOverrides,
|
|
146
|
+
...blockEndBlobDataOverrides
|
|
147
|
+
} = overrides;
|
|
148
|
+
return {
|
|
149
|
+
blockEndMarker: makeBlockEndMarker({ seed, ...blockEndMarkerOverrides }),
|
|
150
|
+
blockEndStateField: makeBlockEndStateField({ seed: seed + 0x100, ...blockEndStateFieldOverrides }),
|
|
151
|
+
lastArchiveRoot: fr(seed + 0x200),
|
|
152
|
+
noteHashRoot: fr(seed + 0x300),
|
|
153
|
+
nullifierRoot: fr(seed + 0x400),
|
|
154
|
+
publicDataRoot: fr(seed + 0x500),
|
|
155
|
+
l1ToL2MessageRoot: isFirstBlock ? fr(seed + 0x600) : undefined,
|
|
156
|
+
...blockEndBlobDataOverrides,
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
export function makeBlockBlobData({
|
|
161
|
+
numTxs = 1,
|
|
162
|
+
isFirstBlock = true,
|
|
163
|
+
isFullTx = false,
|
|
164
|
+
seed = 1,
|
|
165
|
+
...overrides
|
|
166
|
+
}: { numTxs?: number; isFirstBlock?: boolean; isFullTx?: boolean; seed?: number } & Partial<
|
|
167
|
+
Parameters<typeof makeBlockEndBlobData>[0]
|
|
168
|
+
> = {}): BlockBlobData {
|
|
169
|
+
return {
|
|
170
|
+
txs: makeTuple(numTxs, i => makeTxBlobData({ isFullTx, seed: seed + i * 0x100 }), seed),
|
|
171
|
+
...makeBlockEndBlobData({
|
|
172
|
+
seed: seed + 0x1000 * numTxs,
|
|
173
|
+
blockEndMarker: {
|
|
174
|
+
numTxs,
|
|
175
|
+
},
|
|
176
|
+
isFirstBlock,
|
|
177
|
+
...overrides,
|
|
178
|
+
}),
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
export function makeCheckpointBlobData({
|
|
183
|
+
numBlocks = 1,
|
|
184
|
+
numTxsPerBlock = 1,
|
|
185
|
+
isFullTx = false,
|
|
186
|
+
seed = 1,
|
|
187
|
+
...overrides
|
|
188
|
+
}: {
|
|
189
|
+
numBlocks?: number;
|
|
190
|
+
numTxsPerBlock?: number;
|
|
191
|
+
isFullTx?: boolean;
|
|
192
|
+
seed?: number;
|
|
193
|
+
} & Partial<CheckpointBlobData> = {}): CheckpointBlobData {
|
|
194
|
+
const blocks =
|
|
195
|
+
overrides.blocks ??
|
|
196
|
+
makeTuple(
|
|
197
|
+
numBlocks,
|
|
198
|
+
i => makeBlockBlobData({ numTxs: numTxsPerBlock, isFirstBlock: i === seed, isFullTx, seed: seed + i * 0x1000 }),
|
|
199
|
+
seed,
|
|
200
|
+
);
|
|
201
|
+
|
|
202
|
+
const numBlobFields =
|
|
203
|
+
overrides.checkpointEndMarker?.numBlobFields ??
|
|
204
|
+
getTotalNumBlobFieldsFromTxs(blocks.map(block => block.txs.map(tx => tx.txStartMarker)));
|
|
205
|
+
|
|
206
|
+
return {
|
|
207
|
+
blocks,
|
|
208
|
+
checkpointEndMarker: { numBlobFields },
|
|
209
|
+
};
|
|
210
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from './block_blob_data.js';
|
|
2
|
+
export * from './block_end_marker.js';
|
|
3
|
+
export * from './block_end_state_field.js';
|
|
4
|
+
export * from './checkpoint_blob_data.js';
|
|
5
|
+
export * from './checkpoint_end_marker.js';
|
|
6
|
+
export * from './fixtures.js';
|
|
7
|
+
export * from './tx_blob_data.js';
|
|
8
|
+
export * from './tx_start_marker.js';
|
|
9
|
+
export { BlobDeserializationError } from '../errors.js';
|