@aztec/archiver 3.0.0-nightly.20251113 → 3.0.0-nightly.20251115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { type CheckpointBlobData } from '@aztec/blob-lib';
|
|
1
2
|
import type { BlobSinkClientInterface } from '@aztec/blob-sink/client';
|
|
2
3
|
import type { ViemClient, ViemPublicClient } from '@aztec/ethereum';
|
|
3
4
|
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
@@ -13,17 +14,16 @@ import type { DataRetrieval } from './structs/data_retrieval.js';
|
|
|
13
14
|
import type { InboxMessage } from './structs/inbox_message.js';
|
|
14
15
|
import type { L1PublishedData } from './structs/published.js';
|
|
15
16
|
export type RetrievedL2Block = {
|
|
16
|
-
l2BlockNumber: number;
|
|
17
17
|
archiveRoot: Fr;
|
|
18
18
|
stateReference: StateReference;
|
|
19
19
|
header: CheckpointHeader;
|
|
20
|
-
|
|
20
|
+
checkpointBlobData: CheckpointBlobData;
|
|
21
21
|
l1: L1PublishedData;
|
|
22
22
|
chainId: Fr;
|
|
23
23
|
version: Fr;
|
|
24
24
|
attestations: CommitteeAttestation[];
|
|
25
25
|
};
|
|
26
|
-
export declare function retrievedBlockToPublishedL2Block(
|
|
26
|
+
export declare function retrievedBlockToPublishedL2Block({ archiveRoot, stateReference, header: checkpointHeader, checkpointBlobData, l1, chainId, version, attestations, }: RetrievedL2Block): Promise<PublishedL2Block>;
|
|
27
27
|
/**
|
|
28
28
|
* Fetches new L2 blocks.
|
|
29
29
|
* @param publicClient - The viem public client to use for transaction retrieval.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"data_retrieval.d.ts","sourceRoot":"","sources":["../../src/archiver/data_retrieval.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"data_retrieval.d.ts","sourceRoot":"","sources":["../../src/archiver/data_retrieval.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,kBAAkB,EAIxB,MAAM,iBAAiB,CAAC;AACzB,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AACvE,OAAO,KAAK,EAEV,UAAU,EAGV,gBAAgB,EAEjB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAEhE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,KAAK,QAAQ,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAC;AAC/D,OAAO,EAAQ,oBAAoB,EAA0B,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAC3G,OAAO,EAAE,KAAK,EAAE,MAAM,sBAAsB,CAAC;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AAExD,OAAO,EAA0C,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAE1F,OAAO,EAEL,KAAK,qBAAqB,EAC1B,KAAK,GAAG,EAKT,MAAM,MAAM,CAAC;AAGd,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AACjE,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAC/D,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAE9D,MAAM,MAAM,gBAAgB,GAAG;IAC7B,WAAW,EAAE,EAAE,CAAC;IAChB,cAAc,EAAE,cAAc,CAAC;IAC/B,MAAM,EAAE,gBAAgB,CAAC;IACzB,kBAAkB,EAAE,kBAAkB,CAAC;IACvC,EAAE,EAAE,eAAe,CAAC;IACpB,OAAO,EAAE,EAAE,CAAC;IACZ,OAAO,EAAE,EAAE,CAAC;IACZ,YAAY,EAAE,oBAAoB,EAAE,CAAC;CACtC,CAAC;AAEF,wBAAsB,gCAAgC,CAAC,EACrD,WAAW,EACX,cAAc,EACd,MAAM,EAAE,gBAAgB,EACxB,kBAAkB,EAClB,EAAE,EACF,OAAO,EACP,OAAO,EACP,YAAY,GACb,EAAE,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CA8E9C;AAED;;;;;;;;GAQG;AACH,wBAAsB,wBAAwB,CAC5C,MAAM,EAAE,qBAAqB,CAAC,OAAO,SAAS,EAAE,gBAAgB,CAAC,EACjE,YAAY,EAAE,gBAAgB,EAC9B,cAAc,EAAE,uBAAuB,EACvC,gBAAgB,EAAE,MAAM,EACxB,cAAc,EAAE,MAAM,EACtB,MAAM,GAAE,MAAiC,GACxC,OAAO,CAAC,gBAAgB,EAAE,CAAC,CAuD7B;AA6DD,wBAAsB,cAAc,CAAC,YAAY,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAGzG;AAkJD,iHAAiH;AACjH,wBAAsB,qBAAqB,CACzC,KAAK,EAAE,qBAAqB,CAAC,OAAO,QAAQ,EAAE,UAAU,CAAC,EACzD,IAAI,EAAE,EAAE,EACR,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,GACd,OAAO,CAAC,YAAY,GAAG,SAAS,CAAC,CAKnC;AAED;;;;;;;;GAQG;AACH,wBAAsB,sBAAsB,CAC1C,KAAK,EAAE,qBAAqB,CAAC,OAAO,QAAQ,EAAE,UAAU,CAAC,EACzD,gBAAgB,EAAE,MAAM,EACxB,cAAc,EAAE,MAAM,GACrB,OAAO,CAAC,YAAY,EAAE,CAAC,CAgBzB;AAgBD,iEAAiE;AACjE,wBAAsB,6BAA6B,CACjD,YAAY,EAAE,gBAAgB,EAC9B,aAAa,EAAE,UAAU,EACzB,gBAAgB,EAAE,MAAM,EACxB,cAAc,CAAC,EAAE,MAAM,GACtB,OAAO,CAAC;IAAE,aAAa,EAAE,MAAM,CAAC;IAAC,aAAa,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,EAAE,CAAC;IAAC,MAAM,EAAE,GAAG,CAAA;CAAE,EAAE,CAAC,CAexF;AAED,yDAAyD;AACzD,wBAAsB,0BAA0B,CAC9C,YAAY,EAAE,gBAAgB,EAC9B,aAAa,EAAE,UAAU,EACzB,gBAAgB,EAAE,MAAM,EACxB,cAAc,CAAC,EAAE,MAAM,GACtB,OAAO,CAAC,aAAa,CAAC;IAAE,KAAK,EAAE,KAAK,CAAC;IAAC,QAAQ,EAAE,EAAE,CAAC;IAAC,aAAa,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,KAAK,MAAM,EAAE,CAAA;CAAE,CAAC,CAAC,CAatG;AAED,MAAM,MAAM,gBAAgB,GAAG;IAC7B,WAAW,EAAE,EAAE,CAAC;IAChB,QAAQ,EAAE,EAAE,CAAC;IACb,KAAK,EAAE,KAAK,CAAC;CACd,CAAC;AAEF;;;;;;;;GAQG;AACH,wBAAsB,yBAAyB,CAC7C,YAAY,EAAE,gBAAgB,EAC9B,MAAM,EAAE,KAAK,MAAM,EAAE,EACrB,gBAAgB,EAAE,EAAE,GACnB,OAAO,CAAC,gBAAgB,CAAC,CAmC3B"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BlobDeserializationError, SpongeBlob,
|
|
1
|
+
import { BlobDeserializationError, SpongeBlob, decodeCheckpointBlobDataFromBlobs, encodeBlockBlobData } from '@aztec/blob-lib';
|
|
2
2
|
import { asyncPool } from '@aztec/foundation/async-pool';
|
|
3
3
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
4
4
|
import { Fr } from '@aztec/foundation/fields';
|
|
@@ -8,43 +8,67 @@ import { Body, CommitteeAttestation, L2Block, L2BlockHeader, PublishedL2Block }
|
|
|
8
8
|
import { Proof } from '@aztec/stdlib/proofs';
|
|
9
9
|
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
10
10
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
11
|
-
import { GlobalVariables, StateReference } from '@aztec/stdlib/tx';
|
|
11
|
+
import { GlobalVariables, PartialStateReference, StateReference } from '@aztec/stdlib/tx';
|
|
12
12
|
import { decodeFunctionData, getAbiItem, hexToBytes, multicall3Abi } from 'viem';
|
|
13
13
|
import { NoBlobBodiesFoundError } from './errors.js';
|
|
14
|
-
export async function retrievedBlockToPublishedL2Block(
|
|
15
|
-
const {
|
|
16
|
-
|
|
17
|
-
const
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
14
|
+
export async function retrievedBlockToPublishedL2Block({ archiveRoot, stateReference, header: checkpointHeader, checkpointBlobData, l1, chainId, version, attestations }) {
|
|
15
|
+
const { totalNumBlobFields, blocks: blocksBlobData } = checkpointBlobData;
|
|
16
|
+
// The lastArchiveRoot of a block is the new archive for the previous block.
|
|
17
|
+
const newArchiveRoots = blocksBlobData.map((b)=>b.lastArchiveRoot).slice(1).concat([
|
|
18
|
+
archiveRoot
|
|
19
|
+
]);
|
|
20
|
+
// `blocksBlobData` is created from `decodeCheckpointBlobDataFromBlobs`. An error will be thrown if it can't read a
|
|
21
|
+
// field for the `l1ToL2MessageRoot` of the first block. So below we can safely assume it exists:
|
|
22
|
+
const l1toL2MessageTreeRoot = blocksBlobData[0].l1ToL2MessageRoot;
|
|
23
|
+
const spongeBlob = await SpongeBlob.init(totalNumBlobFields);
|
|
24
|
+
const l2Blocks = [];
|
|
25
|
+
for(let i = 0; i < blocksBlobData.length; i++){
|
|
26
|
+
const blockBlobData = blocksBlobData[i];
|
|
27
|
+
const { blockEndMarker, blockEndStateField, lastArchiveRoot, noteHashRoot, nullifierRoot, publicDataRoot } = blockBlobData;
|
|
28
|
+
const l2BlockNumber = blockEndMarker.blockNumber;
|
|
29
|
+
const globalVariables = GlobalVariables.from({
|
|
30
|
+
chainId,
|
|
31
|
+
version,
|
|
32
|
+
blockNumber: l2BlockNumber,
|
|
33
|
+
slotNumber: checkpointHeader.slotNumber,
|
|
34
|
+
timestamp: blockEndMarker.timestamp,
|
|
35
|
+
coinbase: checkpointHeader.coinbase,
|
|
36
|
+
feeRecipient: checkpointHeader.feeRecipient,
|
|
37
|
+
gasFees: checkpointHeader.gasFees
|
|
38
|
+
});
|
|
39
|
+
const state = StateReference.from({
|
|
40
|
+
l1ToL2MessageTree: new AppendOnlyTreeSnapshot(l1toL2MessageTreeRoot, blockEndStateField.l1ToL2MessageNextAvailableLeafIndex),
|
|
41
|
+
partial: PartialStateReference.from({
|
|
42
|
+
noteHashTree: new AppendOnlyTreeSnapshot(noteHashRoot, blockEndStateField.noteHashNextAvailableLeafIndex),
|
|
43
|
+
nullifierTree: new AppendOnlyTreeSnapshot(nullifierRoot, blockEndStateField.nullifierNextAvailableLeafIndex),
|
|
44
|
+
publicDataTree: new AppendOnlyTreeSnapshot(publicDataRoot, blockEndStateField.publicDataNextAvailableLeafIndex)
|
|
45
|
+
})
|
|
46
|
+
});
|
|
47
|
+
const body = Body.fromTxBlobData(checkpointBlobData.blocks[0].txs);
|
|
48
|
+
const blobFields = encodeBlockBlobData(blockBlobData);
|
|
49
|
+
await spongeBlob.absorb(blobFields);
|
|
50
|
+
const clonedSpongeBlob = spongeBlob.clone();
|
|
51
|
+
const spongeBlobHash = await clonedSpongeBlob.squeeze();
|
|
52
|
+
const header = L2BlockHeader.from({
|
|
53
|
+
lastArchive: new AppendOnlyTreeSnapshot(lastArchiveRoot, l2BlockNumber),
|
|
54
|
+
contentCommitment: checkpointHeader.contentCommitment,
|
|
55
|
+
state,
|
|
56
|
+
globalVariables,
|
|
57
|
+
totalFees: body.txEffects.reduce((accum, txEffect)=>accum.add(txEffect.transactionFee), Fr.ZERO),
|
|
58
|
+
totalManaUsed: new Fr(blockEndStateField.totalManaUsed),
|
|
59
|
+
spongeBlobHash
|
|
60
|
+
});
|
|
61
|
+
const newArchive = new AppendOnlyTreeSnapshot(newArchiveRoots[i], l2BlockNumber + 1);
|
|
62
|
+
l2Blocks.push(new L2Block(newArchive, header, body));
|
|
63
|
+
}
|
|
64
|
+
const lastBlock = l2Blocks[l2Blocks.length - 1];
|
|
65
|
+
if (!lastBlock.header.state.equals(stateReference)) {
|
|
66
|
+
throw new Error('The claimed state reference submitted to L1 does not match the state reference of the last block.');
|
|
67
|
+
}
|
|
27
68
|
// TODO(#17027)
|
|
28
|
-
//
|
|
29
|
-
// If there's more than one block, we need to build the spongeBlob from the endSpongeBlob of the previous block.
|
|
30
|
-
const spongeBlob = await SpongeBlob.init(blobFields.length);
|
|
31
|
-
// Skip the first field which is the checkpoint prefix indicating the number of total blob fields in a checkpoint.
|
|
32
|
-
const blockBlobFields = blobFields.slice(1);
|
|
33
|
-
await spongeBlob.absorb(blockBlobFields);
|
|
34
|
-
const spongeBlobHash = await spongeBlob.squeeze();
|
|
35
|
-
const body = Body.fromBlobFields(blockBlobFields);
|
|
36
|
-
const header = L2BlockHeader.from({
|
|
37
|
-
lastArchive: new AppendOnlyTreeSnapshot(checkpointHeader.lastArchiveRoot, l2BlockNumber),
|
|
38
|
-
contentCommitment: checkpointHeader.contentCommitment,
|
|
39
|
-
state: stateReference,
|
|
40
|
-
globalVariables,
|
|
41
|
-
totalFees: body.txEffects.reduce((accum, txEffect)=>accum.add(txEffect.transactionFee), Fr.ZERO),
|
|
42
|
-
totalManaUsed: checkpointHeader.totalManaUsed,
|
|
43
|
-
spongeBlobHash
|
|
44
|
-
});
|
|
45
|
-
const block = new L2Block(archive, header, body);
|
|
69
|
+
// There's only one block per checkpoint at the moment.
|
|
46
70
|
return PublishedL2Block.fromFields({
|
|
47
|
-
block,
|
|
71
|
+
block: l2Blocks[0],
|
|
48
72
|
l1,
|
|
49
73
|
attestations
|
|
50
74
|
});
|
|
@@ -221,10 +245,10 @@ export async function getL1BlockTime(publicClient, blockNumber) {
|
|
|
221
245
|
if (blobBodies.length === 0) {
|
|
222
246
|
throw new NoBlobBodiesFoundError(l2BlockNumber);
|
|
223
247
|
}
|
|
224
|
-
let
|
|
248
|
+
let checkpointBlobData;
|
|
225
249
|
try {
|
|
226
|
-
//
|
|
227
|
-
|
|
250
|
+
// Attempt to decode the checkpoint blob data.
|
|
251
|
+
checkpointBlobData = decodeCheckpointBlobDataFromBlobs(blobBodies.map((b)=>b.blob));
|
|
228
252
|
} catch (err) {
|
|
229
253
|
if (err instanceof BlobDeserializationError) {
|
|
230
254
|
logger.fatal(err.message);
|
|
@@ -236,11 +260,10 @@ export async function getL1BlockTime(publicClient, blockNumber) {
|
|
|
236
260
|
const archiveRoot = new Fr(Buffer.from(hexToBytes(decodedArgs.archive)));
|
|
237
261
|
const stateReference = StateReference.fromViem(decodedArgs.stateReference);
|
|
238
262
|
return {
|
|
239
|
-
l2BlockNumber,
|
|
240
263
|
archiveRoot,
|
|
241
264
|
stateReference,
|
|
242
265
|
header,
|
|
243
|
-
|
|
266
|
+
checkpointBlobData,
|
|
244
267
|
attestations
|
|
245
268
|
};
|
|
246
269
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aztec/archiver",
|
|
3
|
-
"version": "3.0.0-nightly.
|
|
3
|
+
"version": "3.0.0-nightly.20251115",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": "./dest/index.js",
|
|
@@ -66,18 +66,18 @@
|
|
|
66
66
|
]
|
|
67
67
|
},
|
|
68
68
|
"dependencies": {
|
|
69
|
-
"@aztec/blob-lib": "3.0.0-nightly.
|
|
70
|
-
"@aztec/blob-sink": "3.0.0-nightly.
|
|
71
|
-
"@aztec/constants": "3.0.0-nightly.
|
|
72
|
-
"@aztec/epoch-cache": "3.0.0-nightly.
|
|
73
|
-
"@aztec/ethereum": "3.0.0-nightly.
|
|
74
|
-
"@aztec/foundation": "3.0.0-nightly.
|
|
75
|
-
"@aztec/kv-store": "3.0.0-nightly.
|
|
76
|
-
"@aztec/l1-artifacts": "3.0.0-nightly.
|
|
77
|
-
"@aztec/noir-protocol-circuits-types": "3.0.0-nightly.
|
|
78
|
-
"@aztec/protocol-contracts": "3.0.0-nightly.
|
|
79
|
-
"@aztec/stdlib": "3.0.0-nightly.
|
|
80
|
-
"@aztec/telemetry-client": "3.0.0-nightly.
|
|
69
|
+
"@aztec/blob-lib": "3.0.0-nightly.20251115",
|
|
70
|
+
"@aztec/blob-sink": "3.0.0-nightly.20251115",
|
|
71
|
+
"@aztec/constants": "3.0.0-nightly.20251115",
|
|
72
|
+
"@aztec/epoch-cache": "3.0.0-nightly.20251115",
|
|
73
|
+
"@aztec/ethereum": "3.0.0-nightly.20251115",
|
|
74
|
+
"@aztec/foundation": "3.0.0-nightly.20251115",
|
|
75
|
+
"@aztec/kv-store": "3.0.0-nightly.20251115",
|
|
76
|
+
"@aztec/l1-artifacts": "3.0.0-nightly.20251115",
|
|
77
|
+
"@aztec/noir-protocol-circuits-types": "3.0.0-nightly.20251115",
|
|
78
|
+
"@aztec/protocol-contracts": "3.0.0-nightly.20251115",
|
|
79
|
+
"@aztec/stdlib": "3.0.0-nightly.20251115",
|
|
80
|
+
"@aztec/telemetry-client": "3.0.0-nightly.20251115",
|
|
81
81
|
"lodash.groupby": "^4.6.0",
|
|
82
82
|
"lodash.omit": "^4.5.0",
|
|
83
83
|
"tsc-watch": "^6.0.0",
|
|
@@ -1,4 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
BlobDeserializationError,
|
|
3
|
+
type CheckpointBlobData,
|
|
4
|
+
SpongeBlob,
|
|
5
|
+
decodeCheckpointBlobDataFromBlobs,
|
|
6
|
+
encodeBlockBlobData,
|
|
7
|
+
} from '@aztec/blob-lib';
|
|
2
8
|
import type { BlobSinkClientInterface } from '@aztec/blob-sink/client';
|
|
3
9
|
import type {
|
|
4
10
|
EpochProofPublicInputArgs,
|
|
@@ -19,7 +25,7 @@ import { Body, CommitteeAttestation, L2Block, L2BlockHeader, PublishedL2Block }
|
|
|
19
25
|
import { Proof } from '@aztec/stdlib/proofs';
|
|
20
26
|
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
21
27
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
22
|
-
import { GlobalVariables, StateReference } from '@aztec/stdlib/tx';
|
|
28
|
+
import { GlobalVariables, PartialStateReference, StateReference } from '@aztec/stdlib/tx';
|
|
23
29
|
|
|
24
30
|
import {
|
|
25
31
|
type GetContractEventsReturnType,
|
|
@@ -37,70 +43,103 @@ import type { InboxMessage } from './structs/inbox_message.js';
|
|
|
37
43
|
import type { L1PublishedData } from './structs/published.js';
|
|
38
44
|
|
|
39
45
|
export type RetrievedL2Block = {
|
|
40
|
-
l2BlockNumber: number;
|
|
41
46
|
archiveRoot: Fr;
|
|
42
47
|
stateReference: StateReference;
|
|
43
48
|
header: CheckpointHeader;
|
|
44
|
-
|
|
49
|
+
checkpointBlobData: CheckpointBlobData;
|
|
45
50
|
l1: L1PublishedData;
|
|
46
51
|
chainId: Fr;
|
|
47
52
|
version: Fr;
|
|
48
53
|
attestations: CommitteeAttestation[];
|
|
49
54
|
};
|
|
50
55
|
|
|
51
|
-
export async function retrievedBlockToPublishedL2Block(
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
56
|
+
export async function retrievedBlockToPublishedL2Block({
|
|
57
|
+
archiveRoot,
|
|
58
|
+
stateReference,
|
|
59
|
+
header: checkpointHeader,
|
|
60
|
+
checkpointBlobData,
|
|
61
|
+
l1,
|
|
62
|
+
chainId,
|
|
63
|
+
version,
|
|
64
|
+
attestations,
|
|
65
|
+
}: RetrievedL2Block): Promise<PublishedL2Block> {
|
|
66
|
+
const { totalNumBlobFields, blocks: blocksBlobData } = checkpointBlobData;
|
|
67
|
+
|
|
68
|
+
// The lastArchiveRoot of a block is the new archive for the previous block.
|
|
69
|
+
const newArchiveRoots = blocksBlobData
|
|
70
|
+
.map(b => b.lastArchiveRoot)
|
|
71
|
+
.slice(1)
|
|
72
|
+
.concat([archiveRoot]);
|
|
73
|
+
|
|
74
|
+
// `blocksBlobData` is created from `decodeCheckpointBlobDataFromBlobs`. An error will be thrown if it can't read a
|
|
75
|
+
// field for the `l1ToL2MessageRoot` of the first block. So below we can safely assume it exists:
|
|
76
|
+
const l1toL2MessageTreeRoot = blocksBlobData[0].l1ToL2MessageRoot!;
|
|
77
|
+
|
|
78
|
+
const spongeBlob = await SpongeBlob.init(totalNumBlobFields);
|
|
79
|
+
const l2Blocks: L2Block[] = [];
|
|
80
|
+
for (let i = 0; i < blocksBlobData.length; i++) {
|
|
81
|
+
const blockBlobData = blocksBlobData[i];
|
|
82
|
+
const { blockEndMarker, blockEndStateField, lastArchiveRoot, noteHashRoot, nullifierRoot, publicDataRoot } =
|
|
83
|
+
blockBlobData;
|
|
84
|
+
|
|
85
|
+
const l2BlockNumber = blockEndMarker.blockNumber;
|
|
86
|
+
|
|
87
|
+
const globalVariables = GlobalVariables.from({
|
|
88
|
+
chainId,
|
|
89
|
+
version,
|
|
90
|
+
blockNumber: l2BlockNumber,
|
|
91
|
+
slotNumber: checkpointHeader.slotNumber,
|
|
92
|
+
timestamp: blockEndMarker.timestamp,
|
|
93
|
+
coinbase: checkpointHeader.coinbase,
|
|
94
|
+
feeRecipient: checkpointHeader.feeRecipient,
|
|
95
|
+
gasFees: checkpointHeader.gasFees,
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
const state = StateReference.from({
|
|
99
|
+
l1ToL2MessageTree: new AppendOnlyTreeSnapshot(
|
|
100
|
+
l1toL2MessageTreeRoot,
|
|
101
|
+
blockEndStateField.l1ToL2MessageNextAvailableLeafIndex,
|
|
102
|
+
),
|
|
103
|
+
partial: PartialStateReference.from({
|
|
104
|
+
noteHashTree: new AppendOnlyTreeSnapshot(noteHashRoot, blockEndStateField.noteHashNextAvailableLeafIndex),
|
|
105
|
+
nullifierTree: new AppendOnlyTreeSnapshot(nullifierRoot, blockEndStateField.nullifierNextAvailableLeafIndex),
|
|
106
|
+
publicDataTree: new AppendOnlyTreeSnapshot(publicDataRoot, blockEndStateField.publicDataNextAvailableLeafIndex),
|
|
107
|
+
}),
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
const body = Body.fromTxBlobData(checkpointBlobData.blocks[0].txs);
|
|
111
|
+
|
|
112
|
+
const blobFields = encodeBlockBlobData(blockBlobData);
|
|
113
|
+
await spongeBlob.absorb(blobFields);
|
|
114
|
+
|
|
115
|
+
const clonedSpongeBlob = spongeBlob.clone();
|
|
116
|
+
const spongeBlobHash = await clonedSpongeBlob.squeeze();
|
|
117
|
+
|
|
118
|
+
const header = L2BlockHeader.from({
|
|
119
|
+
lastArchive: new AppendOnlyTreeSnapshot(lastArchiveRoot, l2BlockNumber),
|
|
120
|
+
contentCommitment: checkpointHeader.contentCommitment,
|
|
121
|
+
state,
|
|
122
|
+
globalVariables,
|
|
123
|
+
totalFees: body.txEffects.reduce((accum, txEffect) => accum.add(txEffect.transactionFee), Fr.ZERO),
|
|
124
|
+
totalManaUsed: new Fr(blockEndStateField.totalManaUsed),
|
|
125
|
+
spongeBlobHash,
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
const newArchive = new AppendOnlyTreeSnapshot(newArchiveRoots[i], l2BlockNumber + 1);
|
|
129
|
+
|
|
130
|
+
l2Blocks.push(new L2Block(newArchive, header, body));
|
|
131
|
+
}
|
|
63
132
|
|
|
64
|
-
const
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
chainId,
|
|
71
|
-
version,
|
|
72
|
-
blockNumber: l2BlockNumber,
|
|
73
|
-
slotNumber: checkpointHeader.slotNumber,
|
|
74
|
-
timestamp: checkpointHeader.timestamp,
|
|
75
|
-
coinbase: checkpointHeader.coinbase,
|
|
76
|
-
feeRecipient: checkpointHeader.feeRecipient,
|
|
77
|
-
gasFees: checkpointHeader.gasFees,
|
|
78
|
-
});
|
|
133
|
+
const lastBlock = l2Blocks[l2Blocks.length - 1];
|
|
134
|
+
if (!lastBlock.header.state.equals(stateReference)) {
|
|
135
|
+
throw new Error(
|
|
136
|
+
'The claimed state reference submitted to L1 does not match the state reference of the last block.',
|
|
137
|
+
);
|
|
138
|
+
}
|
|
79
139
|
|
|
80
140
|
// TODO(#17027)
|
|
81
|
-
//
|
|
82
|
-
|
|
83
|
-
const spongeBlob = await SpongeBlob.init(blobFields.length);
|
|
84
|
-
// Skip the first field which is the checkpoint prefix indicating the number of total blob fields in a checkpoint.
|
|
85
|
-
const blockBlobFields = blobFields.slice(1);
|
|
86
|
-
await spongeBlob.absorb(blockBlobFields);
|
|
87
|
-
const spongeBlobHash = await spongeBlob.squeeze();
|
|
88
|
-
|
|
89
|
-
const body = Body.fromBlobFields(blockBlobFields);
|
|
90
|
-
|
|
91
|
-
const header = L2BlockHeader.from({
|
|
92
|
-
lastArchive: new AppendOnlyTreeSnapshot(checkpointHeader.lastArchiveRoot, l2BlockNumber),
|
|
93
|
-
contentCommitment: checkpointHeader.contentCommitment,
|
|
94
|
-
state: stateReference,
|
|
95
|
-
globalVariables,
|
|
96
|
-
totalFees: body.txEffects.reduce((accum, txEffect) => accum.add(txEffect.transactionFee), Fr.ZERO),
|
|
97
|
-
totalManaUsed: checkpointHeader.totalManaUsed,
|
|
98
|
-
spongeBlobHash,
|
|
99
|
-
});
|
|
100
|
-
|
|
101
|
-
const block = new L2Block(archive, header, body);
|
|
102
|
-
|
|
103
|
-
return PublishedL2Block.fromFields({ block, l1, attestations });
|
|
141
|
+
// There's only one block per checkpoint at the moment.
|
|
142
|
+
return PublishedL2Block.fromFields({ block: l2Blocks[0], l1, attestations });
|
|
104
143
|
}
|
|
105
144
|
|
|
106
145
|
/**
|
|
@@ -358,13 +397,10 @@ async function getBlockFromRollupTx(
|
|
|
358
397
|
throw new NoBlobBodiesFoundError(l2BlockNumber);
|
|
359
398
|
}
|
|
360
399
|
|
|
361
|
-
let
|
|
400
|
+
let checkpointBlobData: CheckpointBlobData;
|
|
362
401
|
try {
|
|
363
|
-
//
|
|
364
|
-
|
|
365
|
-
blobBodies.map(b => b.blob),
|
|
366
|
-
true /* checkEncoding */,
|
|
367
|
-
);
|
|
402
|
+
// Attempt to decode the checkpoint blob data.
|
|
403
|
+
checkpointBlobData = decodeCheckpointBlobDataFromBlobs(blobBodies.map(b => b.blob));
|
|
368
404
|
} catch (err: any) {
|
|
369
405
|
if (err instanceof BlobDeserializationError) {
|
|
370
406
|
logger.fatal(err.message);
|
|
@@ -379,11 +415,10 @@ async function getBlockFromRollupTx(
|
|
|
379
415
|
const stateReference = StateReference.fromViem(decodedArgs.stateReference);
|
|
380
416
|
|
|
381
417
|
return {
|
|
382
|
-
l2BlockNumber,
|
|
383
418
|
archiveRoot,
|
|
384
419
|
stateReference,
|
|
385
420
|
header,
|
|
386
|
-
|
|
421
|
+
checkpointBlobData,
|
|
387
422
|
attestations,
|
|
388
423
|
};
|
|
389
424
|
}
|