@aztec/archiver 3.0.0-nightly.20251214 → 3.0.0-nightly.20251217
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +60 -36
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +366 -180
- package/dest/archiver/archiver_store.d.ts +79 -23
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +1666 -244
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +49 -17
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +320 -83
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +30 -28
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +51 -27
- package/dest/archiver/kv_archiver_store/log_store.d.ts +5 -5
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +39 -15
- package/dest/archiver/l1/bin/retrieve-calldata.js +2 -2
- package/dest/archiver/l1/calldata_retriever.d.ts +17 -3
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -1
- package/dest/archiver/l1/calldata_retriever.js +75 -7
- package/dest/archiver/l1/data_retrieval.d.ts +11 -8
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/l1/data_retrieval.js +30 -17
- package/dest/archiver/structs/published.d.ts +1 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.d.ts +3 -2
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +8 -15
- package/package.json +13 -13
- package/src/archiver/archiver.ts +464 -222
- package/src/archiver/archiver_store.ts +88 -22
- package/src/archiver/archiver_store_test_suite.ts +1689 -226
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/kv_archiver_store/block_store.ts +435 -94
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +63 -39
- package/src/archiver/kv_archiver_store/log_store.ts +62 -25
- package/src/archiver/l1/bin/retrieve-calldata.ts +2 -2
- package/src/archiver/l1/calldata_retriever.ts +116 -6
- package/src/archiver/l1/data_retrieval.ts +34 -13
- package/src/archiver/structs/published.ts +0 -1
- package/src/test/mock_l2_block_source.ts +9 -16
|
@@ -6,7 +6,8 @@ import { createLogger } from '@aztec/foundation/log';
|
|
|
6
6
|
import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store';
|
|
7
7
|
import { FunctionSelector } from '@aztec/stdlib/abi';
|
|
8
8
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
9
|
-
import {
|
|
9
|
+
import { CheckpointedL2Block, L2BlockHash, L2BlockNew, type ValidateBlockResult } from '@aztec/stdlib/block';
|
|
10
|
+
import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
10
11
|
import type {
|
|
11
12
|
ContractClassPublic,
|
|
12
13
|
ContractDataSource,
|
|
@@ -24,14 +25,13 @@ import { join } from 'path';
|
|
|
24
25
|
|
|
25
26
|
import type { ArchiverDataStore, ArchiverL1SynchPoint } from '../archiver_store.js';
|
|
26
27
|
import type { InboxMessage } from '../structs/inbox_message.js';
|
|
27
|
-
import type
|
|
28
|
-
import { BlockStore } from './block_store.js';
|
|
28
|
+
import { BlockStore, type CheckpointData } from './block_store.js';
|
|
29
29
|
import { ContractClassStore } from './contract_class_store.js';
|
|
30
30
|
import { ContractInstanceStore } from './contract_instance_store.js';
|
|
31
31
|
import { LogStore } from './log_store.js';
|
|
32
32
|
import { MessageStore } from './message_store.js';
|
|
33
33
|
|
|
34
|
-
export const ARCHIVER_DB_VERSION =
|
|
34
|
+
export const ARCHIVER_DB_VERSION = 5;
|
|
35
35
|
export const MAX_FUNCTION_SIGNATURES = 1000;
|
|
36
36
|
export const MAX_FUNCTION_NAME_LEN = 256;
|
|
37
37
|
|
|
@@ -67,7 +67,7 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
67
67
|
}
|
|
68
68
|
|
|
69
69
|
public getBlockNumber(): Promise<BlockNumber> {
|
|
70
|
-
return this.
|
|
70
|
+
return this.#blockStore.getLatestL2BlockNumber();
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
public async getContract(
|
|
@@ -186,42 +186,52 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
186
186
|
* @param blocks - The L2 blocks to be added to the store and the last processed L1 block.
|
|
187
187
|
* @returns True if the operation is successful.
|
|
188
188
|
*/
|
|
189
|
-
addBlocks(blocks:
|
|
189
|
+
addBlocks(blocks: L2BlockNew[], opts: { force?: boolean; checkpointNumber?: number } = {}): Promise<boolean> {
|
|
190
190
|
return this.#blockStore.addBlocks(blocks, opts);
|
|
191
191
|
}
|
|
192
192
|
|
|
193
|
+
getRangeOfCheckpoints(from: CheckpointNumber, limit: number): Promise<CheckpointData[]> {
|
|
194
|
+
return this.#blockStore.getRangeOfCheckpoints(from, limit);
|
|
195
|
+
}
|
|
196
|
+
getLatestBlockNumber(): Promise<BlockNumber> {
|
|
197
|
+
return this.#blockStore.getLatestBlockNumber();
|
|
198
|
+
}
|
|
199
|
+
|
|
193
200
|
/**
|
|
194
|
-
* Unwinds
|
|
201
|
+
* Unwinds checkpoints from the database
|
|
195
202
|
* @param from - The tip of the chain, passed for verification purposes,
|
|
196
203
|
* ensuring that we don't end up deleting something we did not intend
|
|
197
|
-
* @param
|
|
204
|
+
* @param checkpointsToUnwind - The number of checkpoints we are to unwind
|
|
198
205
|
* @returns True if the operation is successful
|
|
199
206
|
*/
|
|
200
|
-
|
|
201
|
-
return this.#blockStore.
|
|
207
|
+
unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
208
|
+
return this.#blockStore.unwindCheckpoints(from, checkpointsToUnwind);
|
|
202
209
|
}
|
|
203
210
|
|
|
204
|
-
|
|
205
|
-
return this.#blockStore.
|
|
211
|
+
addCheckpoints(checkpoints: PublishedCheckpoint[]): Promise<boolean> {
|
|
212
|
+
return this.#blockStore.addCheckpoints(checkpoints);
|
|
206
213
|
}
|
|
207
214
|
|
|
208
|
-
|
|
215
|
+
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
216
|
+
return this.#blockStore.getCheckpointedBlock(number);
|
|
217
|
+
}
|
|
218
|
+
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
219
|
+
return this.#blockStore.getCheckpointedBlockByHash(blockHash);
|
|
220
|
+
}
|
|
221
|
+
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
222
|
+
return this.#blockStore.getCheckpointedBlockByArchive(archive);
|
|
223
|
+
}
|
|
224
|
+
getBlock(number: BlockNumber): Promise<L2BlockNew | undefined> {
|
|
225
|
+
return this.#blockStore.getBlock(number);
|
|
226
|
+
}
|
|
227
|
+
getBlockByHash(blockHash: Fr): Promise<L2BlockNew | undefined> {
|
|
209
228
|
return this.#blockStore.getBlockByHash(L2BlockHash.fromField(blockHash));
|
|
210
229
|
}
|
|
211
|
-
|
|
212
|
-
getPublishedBlockByArchive(archive: Fr): Promise<PublishedL2Block | undefined> {
|
|
230
|
+
getBlockByArchive(archive: Fr): Promise<L2BlockNew | undefined> {
|
|
213
231
|
return this.#blockStore.getBlockByArchive(archive);
|
|
214
232
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
218
|
-
*
|
|
219
|
-
* @param start - Number of the first block to return (inclusive).
|
|
220
|
-
* @param limit - The number of blocks to return.
|
|
221
|
-
* @returns The requested L2 blocks
|
|
222
|
-
*/
|
|
223
|
-
getPublishedBlocks(start: BlockNumber, limit: number): Promise<PublishedL2Block[]> {
|
|
224
|
-
return toArray(this.#blockStore.getBlocks(start, limit));
|
|
233
|
+
getBlocks(from: BlockNumber, limit: BlockNumber): Promise<L2BlockNew[]> {
|
|
234
|
+
return toArray(this.#blockStore.getBlocks(from, limit));
|
|
225
235
|
}
|
|
226
236
|
|
|
227
237
|
/**
|
|
@@ -266,11 +276,11 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
266
276
|
* @param blocks - The blocks for which to add the logs.
|
|
267
277
|
* @returns True if the operation is successful.
|
|
268
278
|
*/
|
|
269
|
-
addLogs(blocks:
|
|
279
|
+
addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
270
280
|
return this.#logStore.addLogs(blocks);
|
|
271
281
|
}
|
|
272
282
|
|
|
273
|
-
deleteLogs(blocks:
|
|
283
|
+
deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
274
284
|
return this.#logStore.deleteLogs(blocks);
|
|
275
285
|
}
|
|
276
286
|
|
|
@@ -349,20 +359,12 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
349
359
|
}
|
|
350
360
|
}
|
|
351
361
|
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
* @returns The number of the latest L2 block processed.
|
|
355
|
-
*/
|
|
356
|
-
getSynchedL2BlockNumber(): Promise<BlockNumber> {
|
|
357
|
-
return this.#blockStore.getSynchedL2BlockNumber();
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
getProvenL2BlockNumber(): Promise<BlockNumber> {
|
|
361
|
-
return this.#blockStore.getProvenL2BlockNumber();
|
|
362
|
+
getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
363
|
+
return this.#blockStore.getProvenCheckpointNumber();
|
|
362
364
|
}
|
|
363
365
|
|
|
364
|
-
async
|
|
365
|
-
await this.#blockStore.
|
|
366
|
+
async setProvenCheckpointNumber(checkpointNumber: CheckpointNumber) {
|
|
367
|
+
await this.#blockStore.setProvenCheckpointNumber(checkpointNumber);
|
|
366
368
|
}
|
|
367
369
|
|
|
368
370
|
async setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) {
|
|
@@ -373,6 +375,10 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
373
375
|
await this.#messageStore.setSynchedL1Block(l1Block);
|
|
374
376
|
}
|
|
375
377
|
|
|
378
|
+
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
379
|
+
return this.#blockStore.getProvenBlockNumber();
|
|
380
|
+
}
|
|
381
|
+
|
|
376
382
|
/**
|
|
377
383
|
* Gets the last L1 block number processed by the archiver
|
|
378
384
|
*/
|
|
@@ -410,4 +416,22 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
410
416
|
public setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
411
417
|
return this.#blockStore.setPendingChainValidationStatus(status);
|
|
412
418
|
}
|
|
419
|
+
|
|
420
|
+
public getCheckpointedL2BlockNumber(): Promise<BlockNumber> {
|
|
421
|
+
return this.#blockStore.getCheckpointedL2BlockNumber();
|
|
422
|
+
}
|
|
423
|
+
public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
424
|
+
return this.#blockStore.getLatestCheckpointNumber();
|
|
425
|
+
}
|
|
426
|
+
async setCheckpointSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
427
|
+
await this.#blockStore.setSynchedL1BlockNumber(l1BlockNumber);
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise<L2BlockNew[] | undefined> {
|
|
431
|
+
return this.#blockStore.getBlocksForCheckpoint(checkpointNumber);
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
getCheckpointData(checkpointNumber: CheckpointNumber): Promise<CheckpointData | undefined> {
|
|
435
|
+
return this.#blockStore.getCheckpointData(checkpointNumber);
|
|
436
|
+
}
|
|
413
437
|
}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/constants';
|
|
2
2
|
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
-
import
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
5
5
|
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
6
6
|
import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
|
|
7
|
-
import
|
|
7
|
+
import { L2BlockHash, L2BlockNew } from '@aztec/stdlib/block';
|
|
8
8
|
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
9
9
|
import {
|
|
10
10
|
ContractClassLog,
|
|
@@ -42,7 +42,8 @@ export class LogStore {
|
|
|
42
42
|
this.#logsMaxPageSize = logsMaxPageSize;
|
|
43
43
|
}
|
|
44
44
|
|
|
45
|
-
#extractTaggedLogs(block:
|
|
45
|
+
async #extractTaggedLogs(block: L2BlockNew) {
|
|
46
|
+
const blockHash = L2BlockHash.fromField(await block.hash());
|
|
46
47
|
const taggedLogs = new Map<string, Buffer[]>();
|
|
47
48
|
const dataStartIndexForBlock =
|
|
48
49
|
block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
|
|
@@ -56,7 +57,9 @@ export class LogStore {
|
|
|
56
57
|
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
57
58
|
|
|
58
59
|
const currentLogs = taggedLogs.get(tag.toString()) ?? [];
|
|
59
|
-
currentLogs.push(
|
|
60
|
+
currentLogs.push(
|
|
61
|
+
new TxScopedL2Log(txHash, dataStartIndexForTx, logIndex, block.number, blockHash, log).toBuffer(),
|
|
62
|
+
);
|
|
60
63
|
taggedLogs.set(tag.toString(), currentLogs);
|
|
61
64
|
});
|
|
62
65
|
|
|
@@ -65,7 +68,9 @@ export class LogStore {
|
|
|
65
68
|
this.#log.debug(`Found public log with tag ${tag.toString()} in block ${block.number}`);
|
|
66
69
|
|
|
67
70
|
const currentLogs = taggedLogs.get(tag.toString()) ?? [];
|
|
68
|
-
currentLogs.push(
|
|
71
|
+
currentLogs.push(
|
|
72
|
+
new TxScopedL2Log(txHash, dataStartIndexForTx, logIndex, block.number, blockHash, log).toBuffer(),
|
|
73
|
+
);
|
|
69
74
|
taggedLogs.set(tag.toString(), currentLogs);
|
|
70
75
|
});
|
|
71
76
|
});
|
|
@@ -77,16 +82,15 @@ export class LogStore {
|
|
|
77
82
|
* @param blocks - The blocks for which to add the logs.
|
|
78
83
|
* @returns True if the operation is successful.
|
|
79
84
|
*/
|
|
80
|
-
addLogs(blocks:
|
|
81
|
-
const
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
}, new Map());
|
|
85
|
+
async addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
86
|
+
const taggedLogsInBlocks = await Promise.all(blocks.map(block => this.#extractTaggedLogs(block)));
|
|
87
|
+
const taggedLogsToAdd = taggedLogsInBlocks.reduce((acc, taggedLogs) => {
|
|
88
|
+
for (const [tag, logs] of taggedLogs.entries()) {
|
|
89
|
+
const currentLogs = acc.get(tag) ?? [];
|
|
90
|
+
acc.set(tag, currentLogs.concat(logs));
|
|
91
|
+
}
|
|
92
|
+
return acc;
|
|
93
|
+
}, new Map<string, Buffer[]>());
|
|
90
94
|
const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
|
|
91
95
|
|
|
92
96
|
return this.db.transactionAsync(async () => {
|
|
@@ -102,6 +106,8 @@ export class LogStore {
|
|
|
102
106
|
}
|
|
103
107
|
});
|
|
104
108
|
for (const block of blocks) {
|
|
109
|
+
const blockHash = await block.hash();
|
|
110
|
+
|
|
105
111
|
const tagsInBlock = [];
|
|
106
112
|
for (const [tag, logs] of taggedLogsToAdd.entries()) {
|
|
107
113
|
await this.#logsByTag.set(tag, logs);
|
|
@@ -129,15 +135,32 @@ export class LogStore {
|
|
|
129
135
|
)
|
|
130
136
|
.flat();
|
|
131
137
|
|
|
132
|
-
await this.#publicLogsByBlock.set(block.number,
|
|
133
|
-
await this.#contractClassLogsByBlock.set(
|
|
138
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
139
|
+
await this.#contractClassLogsByBlock.set(
|
|
140
|
+
block.number,
|
|
141
|
+
this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
|
|
142
|
+
);
|
|
134
143
|
}
|
|
135
144
|
|
|
136
145
|
return true;
|
|
137
146
|
});
|
|
138
147
|
}
|
|
139
148
|
|
|
140
|
-
|
|
149
|
+
#packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
|
|
150
|
+
return Buffer.concat([blockHash.toBuffer(), ...data]);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
#unpackBlockHash(reader: BufferReader): L2BlockHash {
|
|
154
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
155
|
+
|
|
156
|
+
if (!blockHash) {
|
|
157
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
return L2BlockHash.fromField(blockHash);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
141
164
|
return this.db.transactionAsync(async () => {
|
|
142
165
|
const tagsToDelete = (
|
|
143
166
|
await Promise.all(
|
|
@@ -207,6 +230,9 @@ export class LogStore {
|
|
|
207
230
|
const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
208
231
|
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
209
232
|
const reader = new BufferReader(buffer);
|
|
233
|
+
|
|
234
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
235
|
+
|
|
210
236
|
while (reader.remainingBytes() > 0) {
|
|
211
237
|
const indexOfTx = reader.readNumber();
|
|
212
238
|
const numLogsInTx = reader.readNumber();
|
|
@@ -219,7 +245,7 @@ export class LogStore {
|
|
|
219
245
|
const txLogs = publicLogsInBlock[txIndex];
|
|
220
246
|
|
|
221
247
|
const logs: ExtendedPublicLog[] = [];
|
|
222
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
248
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
223
249
|
|
|
224
250
|
return { logs, maxLogsHit };
|
|
225
251
|
}
|
|
@@ -242,6 +268,9 @@ export class LogStore {
|
|
|
242
268
|
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
|
|
243
269
|
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
244
270
|
const reader = new BufferReader(logBuffer);
|
|
271
|
+
|
|
272
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
273
|
+
|
|
245
274
|
while (reader.remainingBytes() > 0) {
|
|
246
275
|
const indexOfTx = reader.readNumber();
|
|
247
276
|
const numLogsInTx = reader.readNumber();
|
|
@@ -252,7 +281,7 @@ export class LogStore {
|
|
|
252
281
|
}
|
|
253
282
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
|
|
254
283
|
const txLogs = publicLogsInBlock[txIndex];
|
|
255
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
284
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
256
285
|
if (maxLogsHit) {
|
|
257
286
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
258
287
|
break loopOverBlocks;
|
|
@@ -291,6 +320,8 @@ export class LogStore {
|
|
|
291
320
|
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
292
321
|
|
|
293
322
|
const reader = new BufferReader(contractClassLogsBuffer);
|
|
323
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
324
|
+
|
|
294
325
|
while (reader.remainingBytes() > 0) {
|
|
295
326
|
const indexOfTx = reader.readNumber();
|
|
296
327
|
const numLogsInTx = reader.readNumber();
|
|
@@ -303,7 +334,7 @@ export class LogStore {
|
|
|
303
334
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
304
335
|
|
|
305
336
|
const logs: ExtendedContractClassLog[] = [];
|
|
306
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
337
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
307
338
|
|
|
308
339
|
return { logs, maxLogsHit };
|
|
309
340
|
}
|
|
@@ -329,6 +360,7 @@ export class LogStore {
|
|
|
329
360
|
})) {
|
|
330
361
|
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
331
362
|
const reader = new BufferReader(logBuffer);
|
|
363
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
332
364
|
while (reader.remainingBytes() > 0) {
|
|
333
365
|
const indexOfTx = reader.readNumber();
|
|
334
366
|
const numLogsInTx = reader.readNumber();
|
|
@@ -339,7 +371,7 @@ export class LogStore {
|
|
|
339
371
|
}
|
|
340
372
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
|
|
341
373
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
342
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
374
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
343
375
|
if (maxLogsHit) {
|
|
344
376
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
345
377
|
break loopOverBlocks;
|
|
@@ -353,9 +385,10 @@ export class LogStore {
|
|
|
353
385
|
#accumulateLogs(
|
|
354
386
|
results: (ExtendedContractClassLog | ExtendedPublicLog)[],
|
|
355
387
|
blockNumber: number,
|
|
388
|
+
blockHash: L2BlockHash,
|
|
356
389
|
txIndex: number,
|
|
357
390
|
txLogs: (ContractClassLog | PublicLog)[],
|
|
358
|
-
filter: LogFilter,
|
|
391
|
+
filter: LogFilter = {},
|
|
359
392
|
): boolean {
|
|
360
393
|
let maxLogsHit = false;
|
|
361
394
|
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
@@ -363,9 +396,13 @@ export class LogStore {
|
|
|
363
396
|
const log = txLogs[logIndex];
|
|
364
397
|
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
365
398
|
if (log instanceof ContractClassLog) {
|
|
366
|
-
results.push(
|
|
399
|
+
results.push(
|
|
400
|
+
new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
|
|
401
|
+
);
|
|
402
|
+
} else if (log instanceof PublicLog) {
|
|
403
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
367
404
|
} else {
|
|
368
|
-
|
|
405
|
+
throw new Error('Unknown log type');
|
|
369
406
|
}
|
|
370
407
|
|
|
371
408
|
if (results.length >= this.#logsMaxPageSize) {
|
|
@@ -141,8 +141,8 @@ async function main() {
|
|
|
141
141
|
logger.info('Retrieving block header from rollup transaction...');
|
|
142
142
|
logger.info('');
|
|
143
143
|
|
|
144
|
-
// For this script, we don't have blob hashes, so pass empty
|
|
145
|
-
const result = await retriever.getCheckpointFromRollupTx(txHash, [], CheckpointNumber(l2BlockNumber));
|
|
144
|
+
// For this script, we don't have blob hashes or expected hashes, so pass empty arrays/objects
|
|
145
|
+
const result = await retriever.getCheckpointFromRollupTx(txHash, [], CheckpointNumber(l2BlockNumber), {});
|
|
146
146
|
|
|
147
147
|
logger.info(' Successfully retrieved block header!');
|
|
148
148
|
logger.info('');
|
|
@@ -13,9 +13,20 @@ import {
|
|
|
13
13
|
TallySlashingProposerAbi,
|
|
14
14
|
} from '@aztec/l1-artifacts';
|
|
15
15
|
import { CommitteeAttestation } from '@aztec/stdlib/block';
|
|
16
|
+
import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p';
|
|
16
17
|
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
17
18
|
|
|
18
|
-
import {
|
|
19
|
+
import {
|
|
20
|
+
type AbiParameter,
|
|
21
|
+
type Hex,
|
|
22
|
+
type Transaction,
|
|
23
|
+
decodeFunctionData,
|
|
24
|
+
encodeAbiParameters,
|
|
25
|
+
hexToBytes,
|
|
26
|
+
keccak256,
|
|
27
|
+
multicall3Abi,
|
|
28
|
+
toFunctionSelector,
|
|
29
|
+
} from 'viem';
|
|
19
30
|
|
|
20
31
|
import type { ArchiverInstrumentation } from '../instrumentation.js';
|
|
21
32
|
import { getSuccessfulCallsFromDebug } from './debug_tx.js';
|
|
@@ -56,12 +67,17 @@ export class CalldataRetriever {
|
|
|
56
67
|
* @param txHash - Hash of the tx that published it.
|
|
57
68
|
* @param blobHashes - Blob hashes for the checkpoint.
|
|
58
69
|
* @param checkpointNumber - Checkpoint number.
|
|
70
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
59
71
|
* @returns Checkpoint header and metadata from the calldata, deserialized
|
|
60
72
|
*/
|
|
61
73
|
async getCheckpointFromRollupTx(
|
|
62
74
|
txHash: `0x${string}`,
|
|
63
75
|
blobHashes: Buffer[],
|
|
64
76
|
checkpointNumber: CheckpointNumber,
|
|
77
|
+
expectedHashes: {
|
|
78
|
+
attestationsHash?: Hex;
|
|
79
|
+
payloadDigest?: Hex;
|
|
80
|
+
},
|
|
65
81
|
): Promise<{
|
|
66
82
|
checkpointNumber: CheckpointNumber;
|
|
67
83
|
archiveRoot: Fr;
|
|
@@ -69,10 +85,14 @@ export class CalldataRetriever {
|
|
|
69
85
|
attestations: CommitteeAttestation[];
|
|
70
86
|
blockHash: string;
|
|
71
87
|
}> {
|
|
72
|
-
this.logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}
|
|
88
|
+
this.logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}`, {
|
|
89
|
+
willValidateHashes: !!expectedHashes.attestationsHash || !!expectedHashes.payloadDigest,
|
|
90
|
+
hasAttestationsHash: !!expectedHashes.attestationsHash,
|
|
91
|
+
hasPayloadDigest: !!expectedHashes.payloadDigest,
|
|
92
|
+
});
|
|
73
93
|
const tx = await this.publicClient.getTransaction({ hash: txHash });
|
|
74
94
|
const proposeCalldata = await this.getProposeCallData(tx, checkpointNumber);
|
|
75
|
-
return this.decodeAndBuildCheckpoint(proposeCalldata, tx.blockHash!, checkpointNumber);
|
|
95
|
+
return this.decodeAndBuildCheckpoint(proposeCalldata, tx.blockHash!, checkpointNumber, expectedHashes);
|
|
76
96
|
}
|
|
77
97
|
|
|
78
98
|
/** Gets rollup propose calldata from a transaction */
|
|
@@ -324,17 +344,59 @@ export class CalldataRetriever {
|
|
|
324
344
|
return calls[0].input;
|
|
325
345
|
}
|
|
326
346
|
|
|
347
|
+
/**
|
|
348
|
+
* Extracts the CommitteeAttestations struct definition from RollupAbi.
|
|
349
|
+
* Finds the _attestations parameter by name in the propose function.
|
|
350
|
+
* Lazy-loaded to avoid issues during module initialization.
|
|
351
|
+
*/
|
|
352
|
+
private getCommitteeAttestationsStructDef(): AbiParameter {
|
|
353
|
+
const proposeFunction = RollupAbi.find(item => item.type === 'function' && item.name === 'propose') as
|
|
354
|
+
| { type: 'function'; name: string; inputs: readonly AbiParameter[] }
|
|
355
|
+
| undefined;
|
|
356
|
+
|
|
357
|
+
if (!proposeFunction) {
|
|
358
|
+
throw new Error('propose function not found in RollupAbi');
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// Find the _attestations parameter by name, not by index
|
|
362
|
+
const attestationsParam = proposeFunction.inputs.find(param => param.name === '_attestations');
|
|
363
|
+
|
|
364
|
+
if (!attestationsParam) {
|
|
365
|
+
throw new Error('_attestations parameter not found in propose function');
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (attestationsParam.type !== 'tuple') {
|
|
369
|
+
throw new Error(`Expected _attestations parameter to be a tuple, got ${attestationsParam.type}`);
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Extract the tuple components (struct fields)
|
|
373
|
+
const tupleParam = attestationsParam as unknown as {
|
|
374
|
+
type: 'tuple';
|
|
375
|
+
components?: readonly AbiParameter[];
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
return {
|
|
379
|
+
type: 'tuple',
|
|
380
|
+
components: tupleParam.components || [],
|
|
381
|
+
} as AbiParameter;
|
|
382
|
+
}
|
|
383
|
+
|
|
327
384
|
/**
|
|
328
385
|
* Decodes propose calldata and builds the checkpoint header structure.
|
|
329
386
|
* @param proposeCalldata - The propose function calldata
|
|
330
387
|
* @param blockHash - The L1 block hash containing this transaction
|
|
331
388
|
* @param checkpointNumber - The checkpoint number
|
|
389
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
332
390
|
* @returns The decoded checkpoint header and metadata
|
|
333
391
|
*/
|
|
334
392
|
protected decodeAndBuildCheckpoint(
|
|
335
393
|
proposeCalldata: Hex,
|
|
336
394
|
blockHash: Hex,
|
|
337
395
|
checkpointNumber: CheckpointNumber,
|
|
396
|
+
expectedHashes: {
|
|
397
|
+
attestationsHash?: Hex;
|
|
398
|
+
payloadDigest?: Hex;
|
|
399
|
+
},
|
|
338
400
|
): {
|
|
339
401
|
checkpointNumber: CheckpointNumber;
|
|
340
402
|
archiveRoot: Fr;
|
|
@@ -365,6 +427,57 @@ export class CalldataRetriever {
|
|
|
365
427
|
];
|
|
366
428
|
|
|
367
429
|
const attestations = CommitteeAttestation.fromPacked(packedAttestations, this.targetCommitteeSize);
|
|
430
|
+
const header = CheckpointHeader.fromViem(decodedArgs.header);
|
|
431
|
+
const archiveRoot = new Fr(Buffer.from(hexToBytes(decodedArgs.archive)));
|
|
432
|
+
|
|
433
|
+
// Validate attestationsHash if provided (skip for backwards compatibility with older events)
|
|
434
|
+
if (expectedHashes.attestationsHash) {
|
|
435
|
+
// Compute attestationsHash: keccak256(abi.encode(CommitteeAttestations))
|
|
436
|
+
const computedAttestationsHash = keccak256(
|
|
437
|
+
encodeAbiParameters([this.getCommitteeAttestationsStructDef()], [packedAttestations]),
|
|
438
|
+
);
|
|
439
|
+
|
|
440
|
+
// Compare as buffers to avoid case-sensitivity and string comparison issues
|
|
441
|
+
const computedBuffer = Buffer.from(hexToBytes(computedAttestationsHash));
|
|
442
|
+
const expectedBuffer = Buffer.from(hexToBytes(expectedHashes.attestationsHash));
|
|
443
|
+
|
|
444
|
+
if (!computedBuffer.equals(expectedBuffer)) {
|
|
445
|
+
throw new Error(
|
|
446
|
+
`Attestations hash mismatch for checkpoint ${checkpointNumber}: ` +
|
|
447
|
+
`computed=${computedAttestationsHash}, expected=${expectedHashes.attestationsHash}`,
|
|
448
|
+
);
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
this.logger.trace(`Validated attestationsHash for checkpoint ${checkpointNumber}`, {
|
|
452
|
+
computedAttestationsHash,
|
|
453
|
+
expectedAttestationsHash: expectedHashes.attestationsHash,
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Validate payloadDigest if provided (skip for backwards compatibility with older events)
|
|
458
|
+
if (expectedHashes.payloadDigest) {
|
|
459
|
+
// Use ConsensusPayload to compute the digest - this ensures we match the exact logic
|
|
460
|
+
// used by the network for signing and verification
|
|
461
|
+
const consensusPayload = new ConsensusPayload(header, archiveRoot);
|
|
462
|
+
const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.blockAttestation);
|
|
463
|
+
const computedPayloadDigest = keccak256(payloadToSign);
|
|
464
|
+
|
|
465
|
+
// Compare as buffers to avoid case-sensitivity and string comparison issues
|
|
466
|
+
const computedBuffer = Buffer.from(hexToBytes(computedPayloadDigest));
|
|
467
|
+
const expectedBuffer = Buffer.from(hexToBytes(expectedHashes.payloadDigest));
|
|
468
|
+
|
|
469
|
+
if (!computedBuffer.equals(expectedBuffer)) {
|
|
470
|
+
throw new Error(
|
|
471
|
+
`Payload digest mismatch for checkpoint ${checkpointNumber}: ` +
|
|
472
|
+
`computed=${computedPayloadDigest}, expected=${expectedHashes.payloadDigest}`,
|
|
473
|
+
);
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
this.logger.trace(`Validated payloadDigest for checkpoint ${checkpointNumber}`, {
|
|
477
|
+
computedPayloadDigest,
|
|
478
|
+
expectedPayloadDigest: expectedHashes.payloadDigest,
|
|
479
|
+
});
|
|
480
|
+
}
|
|
368
481
|
|
|
369
482
|
this.logger.trace(`Decoded propose calldata`, {
|
|
370
483
|
checkpointNumber,
|
|
@@ -376,9 +489,6 @@ export class CalldataRetriever {
|
|
|
376
489
|
targetCommitteeSize: this.targetCommitteeSize,
|
|
377
490
|
});
|
|
378
491
|
|
|
379
|
-
const header = CheckpointHeader.fromViem(decodedArgs.header);
|
|
380
|
-
const archiveRoot = new Fr(Buffer.from(hexToBytes(decodedArgs.archive)));
|
|
381
|
-
|
|
382
492
|
return {
|
|
383
493
|
checkpointNumber,
|
|
384
494
|
archiveRoot,
|