@aztec/archiver 3.0.0-nightly.20250908 → 3.0.0-nightly.20250911
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +4 -3
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +32 -19
- package/dest/archiver/archiver_store.d.ts +5 -1
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +103 -5
- package/dest/archiver/data_retrieval.d.ts +2 -2
- package/dest/archiver/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/data_retrieval.js +3 -3
- package/dest/archiver/kv_archiver_store/block_store.d.ts +11 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +27 -3
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +3 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +6 -0
- package/dest/archiver/validation.d.ts.map +1 -1
- package/dest/archiver/validation.js +7 -4
- package/dest/test/mock_l2_block_source.d.ts +2 -10
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +2 -2
- package/package.json +13 -13
- package/src/archiver/archiver.ts +35 -20
- package/src/archiver/archiver_store.ts +7 -1
- package/src/archiver/archiver_store_test_suite.ts +120 -18
- package/src/archiver/data_retrieval.ts +5 -7
- package/src/archiver/kv_archiver_store/block_store.ts +44 -4
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +9 -1
- package/src/archiver/validation.ts +22 -2
- package/src/test/mock_l2_block_source.ts +19 -10
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"validation.d.ts","sourceRoot":"","sources":["../../src/archiver/validation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,KAAK,gBAAgB,EACrB,KAAK,mBAAmB,EAEzB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,KAAK,iBAAiB,EAAkB,MAAM,6BAA6B,CAAC;AAErF,YAAY,EAAE,mBAAmB,EAAE,CAAC;AAEpC;;;GAGG;AACH,wBAAsB,yBAAyB,CAC7C,cAAc,EAAE,gBAAgB,EAChC,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,EACnD,MAAM,CAAC,EAAE,MAAM,GACd,OAAO,CAAC,mBAAmB,CAAC,
|
|
1
|
+
{"version":3,"file":"validation.d.ts","sourceRoot":"","sources":["../../src/archiver/validation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,KAAK,gBAAgB,EACrB,KAAK,mBAAmB,EAEzB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,KAAK,iBAAiB,EAAkB,MAAM,6BAA6B,CAAC;AAErF,YAAY,EAAE,mBAAmB,EAAE,CAAC;AAEpC;;;GAGG;AACH,wBAAsB,yBAAyB,CAC7C,cAAc,EAAE,gBAAgB,EAChC,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,EACnD,MAAM,CAAC,EAAE,MAAM,GACd,OAAO,CAAC,mBAAmB,CAAC,CAqE9B"}
|
|
@@ -5,6 +5,7 @@ import { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
|
5
5
|
* Returns true if the attestations are valid and sufficient, false otherwise.
|
|
6
6
|
*/ export async function validateBlockAttestations(publishedBlock, epochCache, constants, logger) {
|
|
7
7
|
const attestations = getAttestationsFromPublishedL2Block(publishedBlock);
|
|
8
|
+
const attestors = attestations.map((a)=>a.getSender());
|
|
8
9
|
const { block } = publishedBlock;
|
|
9
10
|
const blockHash = await block.hash().then((hash)=>hash.toString());
|
|
10
11
|
const archiveRoot = block.archive.root.toString();
|
|
@@ -44,11 +45,12 @@ import { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
|
44
45
|
valid: false,
|
|
45
46
|
reason,
|
|
46
47
|
invalidIndex: i,
|
|
47
|
-
block: publishedBlock,
|
|
48
|
+
block: publishedBlock.block.toBlockInfo(),
|
|
48
49
|
committee,
|
|
49
50
|
seed,
|
|
50
51
|
epoch,
|
|
51
|
-
|
|
52
|
+
attestors,
|
|
53
|
+
attestations: publishedBlock.attestations
|
|
52
54
|
};
|
|
53
55
|
}
|
|
54
56
|
}
|
|
@@ -62,11 +64,12 @@ import { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
|
62
64
|
return {
|
|
63
65
|
valid: false,
|
|
64
66
|
reason,
|
|
65
|
-
block: publishedBlock,
|
|
67
|
+
block: publishedBlock.block.toBlockInfo(),
|
|
66
68
|
committee,
|
|
67
69
|
seed,
|
|
68
70
|
epoch,
|
|
69
|
-
|
|
71
|
+
attestors,
|
|
72
|
+
attestations: publishedBlock.attestations
|
|
70
73
|
};
|
|
71
74
|
}
|
|
72
75
|
logger?.debug(`Block attestations validated successfully for block ${block.number} at slot ${slot}`, logData);
|
|
@@ -2,7 +2,7 @@ import { EthAddress } from '@aztec/foundation/eth-address';
|
|
|
2
2
|
import type { Fr } from '@aztec/foundation/fields';
|
|
3
3
|
import type { FunctionSelector } from '@aztec/stdlib/abi';
|
|
4
4
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
5
|
-
import { L2Block, L2BlockHash, type L2BlockSource, type L2Tips, type ValidateBlockResult } from '@aztec/stdlib/block';
|
|
5
|
+
import { L2Block, L2BlockHash, type L2BlockSource, type L2Tips, PublishedL2Block, type ValidateBlockResult } from '@aztec/stdlib/block';
|
|
6
6
|
import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract';
|
|
7
7
|
import { type L1RollupConstants } from '@aztec/stdlib/epoch-helpers';
|
|
8
8
|
import { type BlockHeader, TxHash, TxReceipt } from '@aztec/stdlib/tx';
|
|
@@ -49,15 +49,7 @@ export declare class MockL2BlockSource implements L2BlockSource, ContractDataSou
|
|
|
49
49
|
* @returns The requested mocked L2 blocks.
|
|
50
50
|
*/
|
|
51
51
|
getBlocks(from: number, limit: number, proven?: boolean): Promise<L2Block[]>;
|
|
52
|
-
getPublishedBlocks(from: number, limit: number, proven?: boolean): Promise<
|
|
53
|
-
block: L2Block;
|
|
54
|
-
l1: {
|
|
55
|
-
blockNumber: bigint;
|
|
56
|
-
blockHash: `0x${string}`;
|
|
57
|
-
timestamp: bigint;
|
|
58
|
-
};
|
|
59
|
-
attestations: never[];
|
|
60
|
-
}[]>;
|
|
52
|
+
getPublishedBlocks(from: number, limit: number, proven?: boolean): Promise<PublishedL2Block[]>;
|
|
61
53
|
getBlockHeader(number: number | 'latest'): Promise<BlockHeader | undefined>;
|
|
62
54
|
getBlocksForEpoch(epochNumber: bigint): Promise<L2Block[]>;
|
|
63
55
|
getBlockHeadersForEpoch(epochNumber: bigint): Promise<BlockHeader[]>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mock_l2_block_source.d.ts","sourceRoot":"","sources":["../../src/test/mock_l2_block_source.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAC3D,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAEnD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AAC1D,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAChE,OAAO,
|
|
1
|
+
{"version":3,"file":"mock_l2_block_source.d.ts","sourceRoot":"","sources":["../../src/test/mock_l2_block_source.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAC3D,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAEnD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AAC1D,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAChE,OAAO,EACL,OAAO,EACP,WAAW,EACX,KAAK,aAAa,EAClB,KAAK,MAAM,EACX,gBAAgB,EAChB,KAAK,mBAAmB,EACzB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EAAE,mBAAmB,EAAE,kBAAkB,EAAE,2BAA2B,EAAE,MAAM,wBAAwB,CAAC;AACnH,OAAO,EAA0B,KAAK,iBAAiB,EAAwB,MAAM,6BAA6B,CAAC;AACnH,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,EAAE,SAAS,EAAY,MAAM,kBAAkB,CAAC;AACjF,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAElD;;GAEG;AACH,qBAAa,iBAAkB,YAAW,aAAa,EAAE,kBAAkB;IACzE,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE,CAAM;IAEnC,OAAO,CAAC,iBAAiB,CAAa;IACtC,OAAO,CAAC,oBAAoB,CAAa;IAEzC,OAAO,CAAC,GAAG,CAAiD;IAE/C,YAAY,CAAC,SAAS,EAAE,MAAM;IAUpC,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE;IAK3B,YAAY,CAAC,SAAS,EAAE,MAAM;IAK9B,oBAAoB,CAAC,iBAAiB,EAAE,MAAM;IAI9C,uBAAuB,CAAC,oBAAoB,EAAE,MAAM;IAO3D;;;OAGG;IACH,gBAAgB,IAAI,OAAO,CAAC,UAAU,CAAC;IAIvC;;;OAGG;IACH,kBAAkB,IAAI,OAAO,CAAC,UAAU,CAAC;IAIzC;;;OAGG;IACI,cAAc;IAId,oBAAoB,IAAI,OAAO,CAAC,MAAM,CAAC;IAI9C;;;;OAIG;IACI,QAAQ,CAAC,MAAM,EAAE,MAAM;IAI9B;;;;;OAKG;IACI,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO;IAQjD,kBAAkB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO;IAe7E,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,QAAQ,GAAG,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;IAI3E,iBAAiB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IAU1D,uBAAuB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAIpE;;;;OAIG;IACU,WAAW,CAAC,MAAM,EAAE,MAAM;;;;;;IAgBvC;;;;OAIG;IACU,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC;IAkB1E,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC;IA2BlC,gBAAgB,IAAI,OAAO,CAAC,MAAM,CAAC;IAInC,eAAe,IAAI,OAAO,CAAC,MAAM,CAAC;IAIlC,eAAe,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIvD,cAAc,IAAI,OAAO,CAAC,iBAAiB,CAAC;IAI5C,cAAc,IAAI,OAAO,CAAC,MAAM,CAAC;IAIjC;;;OAGG;IACI,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAK7B;;;OAGG;IACI,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK5B,gBAAgB,CAAC,GAAG,EAAE,EAAE,GAAG,OAAO,CAAC,mBAAmB,GAAG,SAAS,CAAC;IAInE,qBAAqB,CAAC,GAAG,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,GAAG,SAAS,CAAC;IAIvD,WAAW,CAAC,QAAQ,EAAE,YAAY,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,2BAA2B,GAAG,SAAS,CAAC;IAI1G,mBAAmB,IAAI,OAAO,CAAC,EAAE,EAAE,CAAC;IAIpC,oBAAoB,CAAC,QAAQ,EAAE,YAAY,EAAE,SAAS,EAAE,gBAAgB,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAItG,kCAAkC,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxE,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC;IAI9B,qBAAqB,IAAI,OAAO,CAAC,OAAO,CAAC;IAIzC,+BAA+B,IAAI,OAAO,CAAC,mBAAmB,CAAC;CAGhE"}
|
|
@@ -2,7 +2,7 @@ import { DefaultL1ContractsConfig } from '@aztec/ethereum';
|
|
|
2
2
|
import { Buffer32 } from '@aztec/foundation/buffer';
|
|
3
3
|
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
4
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
5
|
-
import { L2Block, L2BlockHash } from '@aztec/stdlib/block';
|
|
5
|
+
import { L2Block, L2BlockHash, PublishedL2Block } from '@aztec/stdlib/block';
|
|
6
6
|
import { EmptyL1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
|
|
7
7
|
import { TxReceipt, TxStatus } from '@aztec/stdlib/tx';
|
|
8
8
|
/**
|
|
@@ -75,7 +75,7 @@ import { TxReceipt, TxStatus } from '@aztec/stdlib/tx';
|
|
|
75
75
|
}
|
|
76
76
|
async getPublishedBlocks(from, limit, proven) {
|
|
77
77
|
const blocks = await this.getBlocks(from, limit, proven);
|
|
78
|
-
return blocks.map((block)=>({
|
|
78
|
+
return blocks.map((block)=>PublishedL2Block.fromFields({
|
|
79
79
|
block,
|
|
80
80
|
l1: {
|
|
81
81
|
blockNumber: BigInt(block.number),
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aztec/archiver",
|
|
3
|
-
"version": "3.0.0-nightly.
|
|
3
|
+
"version": "3.0.0-nightly.20250911",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": "./dest/index.js",
|
|
@@ -66,18 +66,18 @@
|
|
|
66
66
|
]
|
|
67
67
|
},
|
|
68
68
|
"dependencies": {
|
|
69
|
-
"@aztec/blob-lib": "3.0.0-nightly.
|
|
70
|
-
"@aztec/blob-sink": "3.0.0-nightly.
|
|
71
|
-
"@aztec/constants": "3.0.0-nightly.
|
|
72
|
-
"@aztec/epoch-cache": "3.0.0-nightly.
|
|
73
|
-
"@aztec/ethereum": "3.0.0-nightly.
|
|
74
|
-
"@aztec/foundation": "3.0.0-nightly.
|
|
75
|
-
"@aztec/kv-store": "3.0.0-nightly.
|
|
76
|
-
"@aztec/l1-artifacts": "3.0.0-nightly.
|
|
77
|
-
"@aztec/noir-protocol-circuits-types": "3.0.0-nightly.
|
|
78
|
-
"@aztec/protocol-contracts": "3.0.0-nightly.
|
|
79
|
-
"@aztec/stdlib": "3.0.0-nightly.
|
|
80
|
-
"@aztec/telemetry-client": "3.0.0-nightly.
|
|
69
|
+
"@aztec/blob-lib": "3.0.0-nightly.20250911",
|
|
70
|
+
"@aztec/blob-sink": "3.0.0-nightly.20250911",
|
|
71
|
+
"@aztec/constants": "3.0.0-nightly.20250911",
|
|
72
|
+
"@aztec/epoch-cache": "3.0.0-nightly.20250911",
|
|
73
|
+
"@aztec/ethereum": "3.0.0-nightly.20250911",
|
|
74
|
+
"@aztec/foundation": "3.0.0-nightly.20250911",
|
|
75
|
+
"@aztec/kv-store": "3.0.0-nightly.20250911",
|
|
76
|
+
"@aztec/l1-artifacts": "3.0.0-nightly.20250911",
|
|
77
|
+
"@aztec/noir-protocol-circuits-types": "3.0.0-nightly.20250911",
|
|
78
|
+
"@aztec/protocol-contracts": "3.0.0-nightly.20250911",
|
|
79
|
+
"@aztec/stdlib": "3.0.0-nightly.20250911",
|
|
80
|
+
"@aztec/telemetry-client": "3.0.0-nightly.20250911",
|
|
81
81
|
"lodash.groupby": "^4.6.0",
|
|
82
82
|
"lodash.omit": "^4.5.0",
|
|
83
83
|
"tsc-watch": "^6.0.0",
|
package/src/archiver/archiver.ts
CHANGED
|
@@ -120,7 +120,6 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
120
120
|
|
|
121
121
|
private l1BlockNumber: bigint | undefined;
|
|
122
122
|
private l1Timestamp: bigint | undefined;
|
|
123
|
-
private pendingChainValidationStatus: ValidateBlockResult = { valid: true };
|
|
124
123
|
private initialSyncComplete: boolean = false;
|
|
125
124
|
|
|
126
125
|
public readonly tracer: Tracer;
|
|
@@ -342,7 +341,8 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
342
341
|
|
|
343
342
|
// ********** Events that are processed per L2 block **********
|
|
344
343
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
345
|
-
// First we retrieve new L2 blocks
|
|
344
|
+
// First we retrieve new L2 blocks and store them in the DB. This will also update the
|
|
345
|
+
// pending chain validation status, proven block number, and synched L1 block number.
|
|
346
346
|
const rollupStatus = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber);
|
|
347
347
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
348
348
|
// Note that we don't do this before retrieving L2 blocks because we may need to retrieve
|
|
@@ -355,21 +355,11 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
355
355
|
currentL1Timestamp,
|
|
356
356
|
);
|
|
357
357
|
|
|
358
|
-
// Update the pending chain validation status with the last block validation result.
|
|
359
|
-
// Again, we only update if validation status changed, so in a sequence of invalid blocks
|
|
360
|
-
// we keep track of the first invalid block so we can invalidate that one if needed.
|
|
361
|
-
if (
|
|
362
|
-
rollupStatus.validationResult &&
|
|
363
|
-
rollupStatus.validationResult?.valid !== this.pendingChainValidationStatus.valid
|
|
364
|
-
) {
|
|
365
|
-
this.pendingChainValidationStatus = rollupStatus.validationResult;
|
|
366
|
-
}
|
|
367
|
-
|
|
368
358
|
// And lastly we check if we are missing any L2 blocks behind us due to a possible L1 reorg.
|
|
369
359
|
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
370
360
|
// re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid,
|
|
371
361
|
// since the archiver will rightfully refuse to sync up to it.
|
|
372
|
-
if (!rollupCanPrune &&
|
|
362
|
+
if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
|
|
373
363
|
await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
|
|
374
364
|
}
|
|
375
365
|
|
|
@@ -623,6 +613,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
623
613
|
|
|
624
614
|
private async handleL2blocks(blocksSynchedTo: bigint, currentL1BlockNumber: bigint) {
|
|
625
615
|
const localPendingBlockNumber = await this.getBlockNumber();
|
|
616
|
+
const initialValidationResult: ValidateBlockResult | undefined = await this.store.getPendingChainValidationStatus();
|
|
626
617
|
const [provenBlockNumber, provenArchive, pendingBlockNumber, pendingArchive, archiveForLocalPendingBlockNumber] =
|
|
627
618
|
await this.rollup.status(BigInt(localPendingBlockNumber), { blockNumber: currentL1BlockNumber });
|
|
628
619
|
const rollupStatus = {
|
|
@@ -630,7 +621,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
630
621
|
provenArchive,
|
|
631
622
|
pendingBlockNumber: Number(pendingBlockNumber),
|
|
632
623
|
pendingArchive,
|
|
633
|
-
validationResult:
|
|
624
|
+
validationResult: initialValidationResult,
|
|
634
625
|
};
|
|
635
626
|
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
|
|
636
627
|
localPendingBlockNumber,
|
|
@@ -809,7 +800,15 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
809
800
|
|
|
810
801
|
// Only update the validation result if it has changed, so we can keep track of the first invalid block
|
|
811
802
|
// in case there is a sequence of more than one invalid block, as we need to invalidate the first one.
|
|
812
|
-
if
|
|
803
|
+
// There is an exception though: if an invalid block is invalidated and replaced with another invalid block,
|
|
804
|
+
// we need to update the validation result, since we need to be able to invalidate the new one.
|
|
805
|
+
// See test 'chain progresses if an invalid block is invalidated with an invalid one' for more info.
|
|
806
|
+
if (
|
|
807
|
+
rollupStatus.validationResult?.valid !== validationResult.valid ||
|
|
808
|
+
(!rollupStatus.validationResult.valid &&
|
|
809
|
+
!validationResult.valid &&
|
|
810
|
+
rollupStatus.validationResult.block.blockNumber === validationResult.block.blockNumber)
|
|
811
|
+
) {
|
|
813
812
|
rollupStatus.validationResult = validationResult;
|
|
814
813
|
}
|
|
815
814
|
|
|
@@ -828,6 +827,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
828
827
|
|
|
829
828
|
// We keep consuming blocks if we find an invalid one, since we do not listen for BlockInvalidated events
|
|
830
829
|
// We just pretend the invalid ones are not there and keep consuming the next blocks
|
|
830
|
+
// Note that this breaks if the committee ever attests to a descendant of an invalid block
|
|
831
831
|
continue;
|
|
832
832
|
}
|
|
833
833
|
|
|
@@ -841,7 +841,9 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
841
841
|
}
|
|
842
842
|
|
|
843
843
|
try {
|
|
844
|
-
const
|
|
844
|
+
const updatedValidationResult =
|
|
845
|
+
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
846
|
+
const [processDuration] = await elapsed(() => this.store.addBlocks(validBlocks, updatedValidationResult));
|
|
845
847
|
this.instrumentation.processNewBlocks(
|
|
846
848
|
processDuration / validBlocks.length,
|
|
847
849
|
validBlocks.map(b => b.block),
|
|
@@ -1228,12 +1230,12 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1228
1230
|
return this.store.getDebugFunctionName(address, selector);
|
|
1229
1231
|
}
|
|
1230
1232
|
|
|
1231
|
-
getPendingChainValidationStatus(): Promise<ValidateBlockResult> {
|
|
1232
|
-
return
|
|
1233
|
+
async getPendingChainValidationStatus(): Promise<ValidateBlockResult> {
|
|
1234
|
+
return (await this.store.getPendingChainValidationStatus()) ?? { valid: true };
|
|
1233
1235
|
}
|
|
1234
1236
|
|
|
1235
1237
|
isPendingChainInvalid(): Promise<boolean> {
|
|
1236
|
-
return
|
|
1238
|
+
return this.getPendingChainValidationStatus().then(status => !status.valid);
|
|
1237
1239
|
}
|
|
1238
1240
|
|
|
1239
1241
|
async getL2Tips(): Promise<L2Tips> {
|
|
@@ -1351,6 +1353,7 @@ export class ArchiverStoreHelper
|
|
|
1351
1353
|
| 'backupTo'
|
|
1352
1354
|
| 'close'
|
|
1353
1355
|
| 'transactionAsync'
|
|
1356
|
+
| 'addBlocks'
|
|
1354
1357
|
>
|
|
1355
1358
|
{
|
|
1356
1359
|
#log = createLogger('archiver:block-helper');
|
|
@@ -1493,13 +1496,16 @@ export class ArchiverStoreHelper
|
|
|
1493
1496
|
return true;
|
|
1494
1497
|
}
|
|
1495
1498
|
|
|
1496
|
-
public addBlocks(blocks: PublishedL2Block[]): Promise<boolean> {
|
|
1499
|
+
public addBlocks(blocks: PublishedL2Block[], pendingChainValidationStatus?: ValidateBlockResult): Promise<boolean> {
|
|
1497
1500
|
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1498
1501
|
// or if the previous block is not in the store.
|
|
1499
1502
|
return this.store.transactionAsync(async () => {
|
|
1500
1503
|
await this.store.addBlocks(blocks);
|
|
1501
1504
|
|
|
1502
1505
|
const opResults = await Promise.all([
|
|
1506
|
+
// Update the pending chain validation status if provided
|
|
1507
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1508
|
+
// Add any logs emitted during the retrieved blocks
|
|
1503
1509
|
this.store.addLogs(blocks.map(block => block.block)),
|
|
1504
1510
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1505
1511
|
...blocks.map(async block => {
|
|
@@ -1539,6 +1545,8 @@ export class ArchiverStoreHelper
|
|
|
1539
1545
|
const blocks = await this.getPublishedBlocks(from - blocksToUnwind + 1, blocksToUnwind);
|
|
1540
1546
|
|
|
1541
1547
|
const opResults = await Promise.all([
|
|
1548
|
+
// Prune rolls back to the last proven block, which is by definition valid
|
|
1549
|
+
this.store.setPendingChainValidationStatus({ valid: true }),
|
|
1542
1550
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1543
1551
|
...blocks.map(async block => {
|
|
1544
1552
|
const contractClassLogs = block.block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
@@ -1656,4 +1664,11 @@ export class ArchiverStoreHelper
|
|
|
1656
1664
|
getLastL1ToL2Message(): Promise<InboxMessage | undefined> {
|
|
1657
1665
|
return this.store.getLastL1ToL2Message();
|
|
1658
1666
|
}
|
|
1667
|
+
getPendingChainValidationStatus(): Promise<ValidateBlockResult | undefined> {
|
|
1668
|
+
return this.store.getPendingChainValidationStatus();
|
|
1669
|
+
}
|
|
1670
|
+
setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
1671
|
+
this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
|
|
1672
|
+
return this.store.setPendingChainValidationStatus(status);
|
|
1673
|
+
}
|
|
1659
1674
|
}
|
|
@@ -3,7 +3,7 @@ import type { Fr } from '@aztec/foundation/fields';
|
|
|
3
3
|
import type { CustomRange } from '@aztec/kv-store';
|
|
4
4
|
import type { FunctionSelector } from '@aztec/stdlib/abi';
|
|
5
5
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
6
|
-
import type { L2Block } from '@aztec/stdlib/block';
|
|
6
|
+
import type { L2Block, ValidateBlockResult } from '@aztec/stdlib/block';
|
|
7
7
|
import type {
|
|
8
8
|
ContractClassPublic,
|
|
9
9
|
ContractInstanceUpdateWithAddress,
|
|
@@ -272,4 +272,10 @@ export interface ArchiverDataStore {
|
|
|
272
272
|
|
|
273
273
|
/** Returns the last L1 to L2 message stored. */
|
|
274
274
|
getLastL1ToL2Message(): Promise<InboxMessage | undefined>;
|
|
275
|
+
|
|
276
|
+
/** Returns the last synced validation status of the pending chain. */
|
|
277
|
+
getPendingChainValidationStatus(): Promise<ValidateBlockResult | undefined>;
|
|
278
|
+
|
|
279
|
+
/** Sets the last synced validation status of the pending chain. */
|
|
280
|
+
setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void>;
|
|
275
281
|
}
|
|
@@ -12,7 +12,16 @@ import { Fr } from '@aztec/foundation/fields';
|
|
|
12
12
|
import { toArray } from '@aztec/foundation/iterable';
|
|
13
13
|
import { sleep } from '@aztec/foundation/sleep';
|
|
14
14
|
import { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
15
|
-
import {
|
|
15
|
+
import {
|
|
16
|
+
CommitteeAttestation,
|
|
17
|
+
EthAddress,
|
|
18
|
+
L2Block,
|
|
19
|
+
L2BlockHash,
|
|
20
|
+
PublishedL2Block,
|
|
21
|
+
type ValidateBlockResult,
|
|
22
|
+
randomBlockInfo,
|
|
23
|
+
wrapInBlock,
|
|
24
|
+
} from '@aztec/stdlib/block';
|
|
16
25
|
import {
|
|
17
26
|
type ContractClassPublic,
|
|
18
27
|
type ContractInstanceWithAddress,
|
|
@@ -34,7 +43,6 @@ import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.j
|
|
|
34
43
|
import { BlockNumberNotSequentialError, InitialBlockNumberNotSequentialError } from './errors.js';
|
|
35
44
|
import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
36
45
|
import type { InboxMessage } from './structs/inbox_message.js';
|
|
37
|
-
import type { PublishedL2Block } from './structs/published.js';
|
|
38
46
|
|
|
39
47
|
/**
|
|
40
48
|
* @param testName - The name of the test suite.
|
|
@@ -58,15 +66,16 @@ export function describeArchiverDataStore(
|
|
|
58
66
|
|
|
59
67
|
const makeBlockHash = (blockNumber: number) => `0x${blockNumber.toString(16).padStart(64, '0')}`;
|
|
60
68
|
|
|
61
|
-
const makePublished = (block: L2Block, l1BlockNumber: number): PublishedL2Block =>
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
69
|
+
const makePublished = (block: L2Block, l1BlockNumber: number): PublishedL2Block =>
|
|
70
|
+
PublishedL2Block.fromFields({
|
|
71
|
+
block: block,
|
|
72
|
+
l1: {
|
|
73
|
+
blockNumber: BigInt(l1BlockNumber),
|
|
74
|
+
blockHash: makeBlockHash(l1BlockNumber),
|
|
75
|
+
timestamp: BigInt(l1BlockNumber * 1000),
|
|
76
|
+
},
|
|
77
|
+
attestations: times(3, CommitteeAttestation.random),
|
|
78
|
+
});
|
|
70
79
|
|
|
71
80
|
const expectBlocksEqual = (actual: PublishedL2Block[], expected: PublishedL2Block[]) => {
|
|
72
81
|
expect(actual.length).toEqual(expected.length);
|
|
@@ -757,7 +766,7 @@ export function describeArchiverDataStore(
|
|
|
757
766
|
return txEffect;
|
|
758
767
|
});
|
|
759
768
|
|
|
760
|
-
return {
|
|
769
|
+
return PublishedL2Block.fromFields({
|
|
761
770
|
block: block,
|
|
762
771
|
attestations: times(3, CommitteeAttestation.random),
|
|
763
772
|
l1: {
|
|
@@ -765,7 +774,7 @@ export function describeArchiverDataStore(
|
|
|
765
774
|
blockHash: makeBlockHash(blockNumber),
|
|
766
775
|
timestamp: BigInt(blockNumber),
|
|
767
776
|
},
|
|
768
|
-
};
|
|
777
|
+
});
|
|
769
778
|
};
|
|
770
779
|
|
|
771
780
|
beforeEach(async () => {
|
|
@@ -878,11 +887,13 @@ export function describeArchiverDataStore(
|
|
|
878
887
|
let blocks: PublishedL2Block[];
|
|
879
888
|
|
|
880
889
|
beforeEach(async () => {
|
|
881
|
-
blocks = await timesParallel(numBlocks, async (index: number) =>
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
890
|
+
blocks = await timesParallel(numBlocks, async (index: number) =>
|
|
891
|
+
PublishedL2Block.fromFields({
|
|
892
|
+
block: await L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numPublicLogs),
|
|
893
|
+
l1: { blockNumber: BigInt(index), blockHash: makeBlockHash(index), timestamp: BigInt(index) },
|
|
894
|
+
attestations: times(3, CommitteeAttestation.random),
|
|
895
|
+
}),
|
|
896
|
+
);
|
|
886
897
|
|
|
887
898
|
await store.addBlocks(blocks);
|
|
888
899
|
await store.addLogs(blocks.map(b => b.block));
|
|
@@ -1056,5 +1067,96 @@ export function describeArchiverDataStore(
|
|
|
1056
1067
|
}
|
|
1057
1068
|
});
|
|
1058
1069
|
});
|
|
1070
|
+
|
|
1071
|
+
describe('pendingChainValidationStatus', () => {
|
|
1072
|
+
it('should return undefined when no status is set', async () => {
|
|
1073
|
+
const status = await store.getPendingChainValidationStatus();
|
|
1074
|
+
expect(status).toBeUndefined();
|
|
1075
|
+
});
|
|
1076
|
+
|
|
1077
|
+
it('should store and retrieve a valid validation status', async () => {
|
|
1078
|
+
const validStatus: ValidateBlockResult = { valid: true };
|
|
1079
|
+
|
|
1080
|
+
await store.setPendingChainValidationStatus(validStatus);
|
|
1081
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
1082
|
+
|
|
1083
|
+
expect(retrievedStatus).toEqual(validStatus);
|
|
1084
|
+
});
|
|
1085
|
+
|
|
1086
|
+
it('should store and retrieve an invalid validation status with insufficient attestations', async () => {
|
|
1087
|
+
const invalidStatus: ValidateBlockResult = {
|
|
1088
|
+
valid: false,
|
|
1089
|
+
block: randomBlockInfo(1),
|
|
1090
|
+
committee: [EthAddress.random(), EthAddress.random()],
|
|
1091
|
+
epoch: 123n,
|
|
1092
|
+
seed: 456n,
|
|
1093
|
+
attestors: [EthAddress.random()],
|
|
1094
|
+
attestations: [CommitteeAttestation.random()],
|
|
1095
|
+
reason: 'insufficient-attestations',
|
|
1096
|
+
};
|
|
1097
|
+
|
|
1098
|
+
await store.setPendingChainValidationStatus(invalidStatus);
|
|
1099
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
1100
|
+
|
|
1101
|
+
expect(retrievedStatus).toEqual(invalidStatus);
|
|
1102
|
+
});
|
|
1103
|
+
|
|
1104
|
+
it('should store and retrieve an invalid validation status with invalid attestation', async () => {
|
|
1105
|
+
const invalidStatus: ValidateBlockResult = {
|
|
1106
|
+
valid: false,
|
|
1107
|
+
block: randomBlockInfo(2),
|
|
1108
|
+
committee: [EthAddress.random()],
|
|
1109
|
+
attestors: [EthAddress.random()],
|
|
1110
|
+
epoch: 789n,
|
|
1111
|
+
seed: 101n,
|
|
1112
|
+
attestations: [CommitteeAttestation.random()],
|
|
1113
|
+
reason: 'invalid-attestation',
|
|
1114
|
+
invalidIndex: 5,
|
|
1115
|
+
};
|
|
1116
|
+
|
|
1117
|
+
await store.setPendingChainValidationStatus(invalidStatus);
|
|
1118
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
1119
|
+
|
|
1120
|
+
expect(retrievedStatus).toEqual(invalidStatus);
|
|
1121
|
+
});
|
|
1122
|
+
|
|
1123
|
+
it('should overwrite existing status when setting a new one', async () => {
|
|
1124
|
+
const firstStatus: ValidateBlockResult = { valid: true };
|
|
1125
|
+
const secondStatus: ValidateBlockResult = {
|
|
1126
|
+
valid: false,
|
|
1127
|
+
block: randomBlockInfo(3),
|
|
1128
|
+
committee: [EthAddress.random()],
|
|
1129
|
+
epoch: 999n,
|
|
1130
|
+
seed: 888n,
|
|
1131
|
+
attestors: [EthAddress.random()],
|
|
1132
|
+
attestations: [CommitteeAttestation.random()],
|
|
1133
|
+
reason: 'insufficient-attestations',
|
|
1134
|
+
};
|
|
1135
|
+
|
|
1136
|
+
await store.setPendingChainValidationStatus(firstStatus);
|
|
1137
|
+
await store.setPendingChainValidationStatus(secondStatus);
|
|
1138
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
1139
|
+
|
|
1140
|
+
expect(retrievedStatus).toEqual(secondStatus);
|
|
1141
|
+
});
|
|
1142
|
+
|
|
1143
|
+
it('should handle empty committee and attestations arrays', async () => {
|
|
1144
|
+
const statusWithEmptyArrays: ValidateBlockResult = {
|
|
1145
|
+
valid: false,
|
|
1146
|
+
block: randomBlockInfo(4),
|
|
1147
|
+
committee: [],
|
|
1148
|
+
epoch: 0n,
|
|
1149
|
+
seed: 0n,
|
|
1150
|
+
attestors: [],
|
|
1151
|
+
attestations: [],
|
|
1152
|
+
reason: 'insufficient-attestations',
|
|
1153
|
+
};
|
|
1154
|
+
|
|
1155
|
+
await store.setPendingChainValidationStatus(statusWithEmptyArrays);
|
|
1156
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
1157
|
+
|
|
1158
|
+
expect(retrievedStatus).toEqual(statusWithEmptyArrays);
|
|
1159
|
+
});
|
|
1160
|
+
});
|
|
1059
1161
|
});
|
|
1060
1162
|
}
|
|
@@ -11,10 +11,11 @@ import type {
|
|
|
11
11
|
import { asyncPool } from '@aztec/foundation/async-pool';
|
|
12
12
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
13
13
|
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
14
|
+
import type { ViemSignature } from '@aztec/foundation/eth-signature';
|
|
14
15
|
import { Fr } from '@aztec/foundation/fields';
|
|
15
16
|
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
16
17
|
import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
|
|
17
|
-
import { Body, CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
|
|
18
|
+
import { Body, CommitteeAttestation, L2Block, PublishedL2Block } from '@aztec/stdlib/block';
|
|
18
19
|
import { Proof } from '@aztec/stdlib/proofs';
|
|
19
20
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
20
21
|
import { BlockHeader, GlobalVariables, ProposedBlockHeader, StateReference } from '@aztec/stdlib/tx';
|
|
@@ -32,7 +33,7 @@ import {
|
|
|
32
33
|
import { NoBlobBodiesFoundError } from './errors.js';
|
|
33
34
|
import type { DataRetrieval } from './structs/data_retrieval.js';
|
|
34
35
|
import type { InboxMessage } from './structs/inbox_message.js';
|
|
35
|
-
import type { L1PublishedData
|
|
36
|
+
import type { L1PublishedData } from './structs/published.js';
|
|
36
37
|
|
|
37
38
|
export type RetrievedL2Block = {
|
|
38
39
|
l2BlockNumber: number;
|
|
@@ -86,11 +87,7 @@ export function retrievedBlockToPublishedL2Block(retrievedBlock: RetrievedL2Bloc
|
|
|
86
87
|
|
|
87
88
|
const block = new L2Block(archive, header, body);
|
|
88
89
|
|
|
89
|
-
return {
|
|
90
|
-
block,
|
|
91
|
-
l1,
|
|
92
|
-
attestations,
|
|
93
|
-
};
|
|
90
|
+
return PublishedL2Block.fromFields({ block, l1, attestations });
|
|
94
91
|
}
|
|
95
92
|
|
|
96
93
|
/**
|
|
@@ -323,6 +320,7 @@ async function getBlockFromRollupTx(
|
|
|
323
320
|
},
|
|
324
321
|
ViemCommitteeAttestations,
|
|
325
322
|
Hex[],
|
|
323
|
+
ViemSignature,
|
|
326
324
|
Hex,
|
|
327
325
|
];
|
|
328
326
|
|
|
@@ -6,7 +6,15 @@ import { BufferReader } from '@aztec/foundation/serialize';
|
|
|
6
6
|
import { bufferToHex } from '@aztec/foundation/string';
|
|
7
7
|
import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncSingleton, Range } from '@aztec/kv-store';
|
|
8
8
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
9
|
-
import {
|
|
9
|
+
import {
|
|
10
|
+
Body,
|
|
11
|
+
CommitteeAttestation,
|
|
12
|
+
L2Block,
|
|
13
|
+
L2BlockHash,
|
|
14
|
+
PublishedL2Block,
|
|
15
|
+
type ValidateBlockResult,
|
|
16
|
+
} from '@aztec/stdlib/block';
|
|
17
|
+
import { deserializeValidateBlockResult, serializeValidateBlockResult } from '@aztec/stdlib/block';
|
|
10
18
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
11
19
|
import {
|
|
12
20
|
BlockHeader,
|
|
@@ -19,7 +27,7 @@ import {
|
|
|
19
27
|
} from '@aztec/stdlib/tx';
|
|
20
28
|
|
|
21
29
|
import { BlockNumberNotSequentialError, InitialBlockNumberNotSequentialError } from '../errors.js';
|
|
22
|
-
import type { L1PublishedData
|
|
30
|
+
import type { L1PublishedData } from '../structs/published.js';
|
|
23
31
|
|
|
24
32
|
export { TxReceipt, type TxEffect, type TxHash } from '@aztec/stdlib/tx';
|
|
25
33
|
|
|
@@ -52,6 +60,9 @@ export class BlockStore {
|
|
|
52
60
|
/** Stores l2 block number of the last proven block */
|
|
53
61
|
#lastProvenL2Block: AztecAsyncSingleton<number>;
|
|
54
62
|
|
|
63
|
+
/** Stores the pending chain validation status */
|
|
64
|
+
#pendingChainValidationStatus: AztecAsyncSingleton<Buffer>;
|
|
65
|
+
|
|
55
66
|
/** Index mapping a contract's address (as a string) to its location in a block */
|
|
56
67
|
#contractIndex: AztecAsyncMap<string, BlockIndexValue>;
|
|
57
68
|
|
|
@@ -64,6 +75,7 @@ export class BlockStore {
|
|
|
64
75
|
this.#contractIndex = db.openMap('archiver_contract_index');
|
|
65
76
|
this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block');
|
|
66
77
|
this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block');
|
|
78
|
+
this.#pendingChainValidationStatus = db.openSingleton('archiver_pending_chain_validation_status');
|
|
67
79
|
}
|
|
68
80
|
|
|
69
81
|
/**
|
|
@@ -224,7 +236,10 @@ export class BlockStore {
|
|
|
224
236
|
}
|
|
225
237
|
}
|
|
226
238
|
|
|
227
|
-
private async getBlockFromBlockStorage(
|
|
239
|
+
private async getBlockFromBlockStorage(
|
|
240
|
+
blockNumber: number,
|
|
241
|
+
blockStorage: BlockStorage,
|
|
242
|
+
): Promise<PublishedL2Block | undefined> {
|
|
228
243
|
const header = BlockHeader.fromBuffer(blockStorage.header);
|
|
229
244
|
const archive = AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive);
|
|
230
245
|
const blockHash = blockStorage.blockHash;
|
|
@@ -257,7 +272,7 @@ export class BlockStore {
|
|
|
257
272
|
);
|
|
258
273
|
}
|
|
259
274
|
const attestations = blockStorage.attestations.map(CommitteeAttestation.fromBuffer);
|
|
260
|
-
return { block, l1: blockStorage.l1, attestations };
|
|
275
|
+
return PublishedL2Block.fromFields({ block, l1: blockStorage.l1, attestations });
|
|
261
276
|
}
|
|
262
277
|
|
|
263
278
|
/**
|
|
@@ -361,4 +376,29 @@ export class BlockStore {
|
|
|
361
376
|
|
|
362
377
|
return { start, limit };
|
|
363
378
|
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Gets the pending chain validation status.
|
|
382
|
+
* @returns The validation status or undefined if not set.
|
|
383
|
+
*/
|
|
384
|
+
async getPendingChainValidationStatus(): Promise<ValidateBlockResult | undefined> {
|
|
385
|
+
const buffer = await this.#pendingChainValidationStatus.getAsync();
|
|
386
|
+
if (!buffer) {
|
|
387
|
+
return undefined;
|
|
388
|
+
}
|
|
389
|
+
return deserializeValidateBlockResult(buffer);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
/**
|
|
393
|
+
* Sets the pending chain validation status.
|
|
394
|
+
* @param status - The validation status to store.
|
|
395
|
+
*/
|
|
396
|
+
async setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
397
|
+
if (status) {
|
|
398
|
+
const buffer = serializeValidateBlockResult(status);
|
|
399
|
+
await this.#pendingChainValidationStatus.set(buffer);
|
|
400
|
+
} else {
|
|
401
|
+
await this.#pendingChainValidationStatus.delete();
|
|
402
|
+
}
|
|
403
|
+
}
|
|
364
404
|
}
|
|
@@ -5,7 +5,7 @@ import { createLogger } from '@aztec/foundation/log';
|
|
|
5
5
|
import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store';
|
|
6
6
|
import { FunctionSelector } from '@aztec/stdlib/abi';
|
|
7
7
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
8
|
-
import type { L2Block } from '@aztec/stdlib/block';
|
|
8
|
+
import type { L2Block, ValidateBlockResult } from '@aztec/stdlib/block';
|
|
9
9
|
import type {
|
|
10
10
|
ContractClassPublic,
|
|
11
11
|
ContractDataSource,
|
|
@@ -395,4 +395,12 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
395
395
|
public removeL1ToL2Messages(startIndex: bigint): Promise<void> {
|
|
396
396
|
return this.#messageStore.removeL1ToL2Messages(startIndex);
|
|
397
397
|
}
|
|
398
|
+
|
|
399
|
+
public getPendingChainValidationStatus(): Promise<ValidateBlockResult | undefined> {
|
|
400
|
+
return this.#blockStore.getPendingChainValidationStatus();
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
public setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
404
|
+
return this.#blockStore.setPendingChainValidationStatus(status);
|
|
405
|
+
}
|
|
398
406
|
}
|