@aztec/archiver 3.0.0-rc.5 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +69 -49
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +777 -214
- package/dest/archiver/archiver_store.d.ts +89 -30
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +1785 -288
- package/dest/archiver/config.d.ts +3 -3
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +2 -2
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +49 -17
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +320 -84
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +33 -37
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +60 -35
- package/dest/archiver/kv_archiver_store/log_store.d.ts +14 -11
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +149 -62
- package/dest/archiver/l1/bin/retrieve-calldata.js +5 -3
- package/dest/archiver/l1/calldata_retriever.d.ts +17 -3
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -1
- package/dest/archiver/l1/calldata_retriever.js +75 -7
- package/dest/archiver/l1/data_retrieval.d.ts +13 -10
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/l1/data_retrieval.js +31 -18
- package/dest/archiver/structs/published.d.ts +1 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/factory.d.ts +1 -1
- package/dest/factory.js +1 -1
- package/dest/test/mock_l2_block_source.d.ts +10 -3
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +16 -15
- package/package.json +13 -13
- package/src/archiver/archiver.ts +509 -260
- package/src/archiver/archiver_store.ts +99 -29
- package/src/archiver/archiver_store_test_suite.ts +1831 -274
- package/src/archiver/config.ts +7 -3
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/kv_archiver_store/block_store.ts +434 -94
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +74 -49
- package/src/archiver/kv_archiver_store/log_store.ts +213 -77
- package/src/archiver/l1/bin/retrieve-calldata.ts +3 -3
- package/src/archiver/l1/calldata_retriever.ts +116 -6
- package/src/archiver/l1/data_retrieval.ts +41 -20
- package/src/archiver/structs/published.ts +0 -1
- package/src/factory.ts +1 -1
- package/src/test/mock_l2_block_source.ts +20 -16
|
@@ -1,15 +1,21 @@
|
|
|
1
|
-
import { INITIAL_L2_BLOCK_NUM
|
|
1
|
+
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
2
|
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
4
5
|
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
6
|
+
import { L2BlockHash } from '@aztec/stdlib/block';
|
|
5
7
|
import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
6
8
|
/**
|
|
7
9
|
* A store for logs
|
|
8
10
|
*/ export class LogStore {
|
|
9
11
|
db;
|
|
10
12
|
blockStore;
|
|
11
|
-
|
|
12
|
-
#
|
|
13
|
+
// `tag` --> private logs
|
|
14
|
+
#privateLogsByTag;
|
|
15
|
+
// `{contractAddress}_${tag}` --> public logs
|
|
16
|
+
#publicLogsByContractAndTag;
|
|
17
|
+
#privateLogKeysByBlock;
|
|
18
|
+
#publicLogKeysByBlock;
|
|
13
19
|
#publicLogsByBlock;
|
|
14
20
|
#contractClassLogsByBlock;
|
|
15
21
|
#logsMaxPageSize;
|
|
@@ -18,65 +24,118 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
18
24
|
this.db = db;
|
|
19
25
|
this.blockStore = blockStore;
|
|
20
26
|
this.#log = createLogger('archiver:log_store');
|
|
21
|
-
this.#
|
|
22
|
-
this.#
|
|
27
|
+
this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
|
|
28
|
+
this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
|
|
29
|
+
this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
|
|
30
|
+
this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
|
|
23
31
|
this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
|
|
24
32
|
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
25
33
|
this.#logsMaxPageSize = logsMaxPageSize;
|
|
26
34
|
}
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
35
|
+
/**
|
|
36
|
+
* Extracts tagged logs from a single block, grouping them into private and public maps.
|
|
37
|
+
*
|
|
38
|
+
* @param block - The L2 block to extract logs from.
|
|
39
|
+
* @returns An object containing the private and public tagged logs for the block.
|
|
40
|
+
*/ #extractTaggedLogsFromBlock(block) {
|
|
41
|
+
// SiloedTag (as string) -> array of log buffers.
|
|
42
|
+
const privateTaggedLogs = new Map();
|
|
43
|
+
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
44
|
+
const publicTaggedLogs = new Map();
|
|
45
|
+
block.body.txEffects.forEach((txEffect)=>{
|
|
31
46
|
const txHash = txEffect.txHash;
|
|
32
|
-
|
|
33
|
-
|
|
47
|
+
txEffect.privateLogs.forEach((log)=>{
|
|
48
|
+
// Private logs use SiloedTag (already siloed by kernel)
|
|
34
49
|
const tag = log.fields[0];
|
|
35
50
|
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
36
|
-
const currentLogs =
|
|
37
|
-
currentLogs.push(new TxScopedL2Log(txHash,
|
|
38
|
-
|
|
51
|
+
const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
|
|
52
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
53
|
+
privateTaggedLogs.set(tag.toString(), currentLogs);
|
|
39
54
|
});
|
|
40
|
-
txEffect.publicLogs.forEach((log
|
|
55
|
+
txEffect.publicLogs.forEach((log)=>{
|
|
56
|
+
// Public logs use Tag directly (not siloed) and are stored with contract address
|
|
41
57
|
const tag = log.fields[0];
|
|
42
|
-
|
|
43
|
-
const
|
|
44
|
-
|
|
45
|
-
|
|
58
|
+
const contractAddress = log.contractAddress;
|
|
59
|
+
const key = `${contractAddress.toString()}_${tag.toString()}`;
|
|
60
|
+
this.#log.debug(`Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`);
|
|
61
|
+
const currentLogs = publicTaggedLogs.get(key) ?? [];
|
|
62
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
63
|
+
publicTaggedLogs.set(key, currentLogs);
|
|
46
64
|
});
|
|
47
65
|
});
|
|
48
|
-
return
|
|
66
|
+
return {
|
|
67
|
+
privateTaggedLogs,
|
|
68
|
+
publicTaggedLogs
|
|
69
|
+
};
|
|
49
70
|
}
|
|
50
71
|
/**
|
|
51
|
-
*
|
|
52
|
-
* @param blocks - The blocks
|
|
53
|
-
* @returns
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
72
|
+
* Extracts and aggregates tagged logs from a list of blocks.
|
|
73
|
+
* @param blocks - The blocks to extract logs from.
|
|
74
|
+
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
75
|
+
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
76
|
+
*/ #extractTaggedLogs(blocks) {
|
|
77
|
+
const taggedLogsInBlocks = blocks.map((block)=>this.#extractTaggedLogsFromBlock(block));
|
|
78
|
+
// Now we merge the maps from each block into a single map.
|
|
79
|
+
const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs })=>{
|
|
80
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
57
81
|
const currentLogs = acc.get(tag) ?? [];
|
|
58
82
|
acc.set(tag, currentLogs.concat(logs));
|
|
59
83
|
}
|
|
60
84
|
return acc;
|
|
61
85
|
}, new Map());
|
|
62
|
-
const
|
|
86
|
+
const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs })=>{
|
|
87
|
+
for (const [key, logs] of publicTaggedLogs.entries()){
|
|
88
|
+
const currentLogs = acc.get(key) ?? [];
|
|
89
|
+
acc.set(key, currentLogs.concat(logs));
|
|
90
|
+
}
|
|
91
|
+
return acc;
|
|
92
|
+
}, new Map());
|
|
93
|
+
return {
|
|
94
|
+
privateTaggedLogs,
|
|
95
|
+
publicTaggedLogs
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Append new logs to the store's list.
|
|
100
|
+
* @param blocks - The blocks for which to add the logs.
|
|
101
|
+
* @returns True if the operation is successful.
|
|
102
|
+
*/ addLogs(blocks) {
|
|
103
|
+
const { privateTaggedLogs, publicTaggedLogs } = this.#extractTaggedLogs(blocks);
|
|
104
|
+
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
105
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
63
106
|
return this.db.transactionAsync(async ()=>{
|
|
64
|
-
const
|
|
65
|
-
tag,
|
|
66
|
-
logBuffers: await this.#
|
|
107
|
+
const currentPrivateTaggedLogs = await Promise.all(keysOfPrivateLogsToUpdate.map(async (key)=>({
|
|
108
|
+
tag: key,
|
|
109
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key)
|
|
110
|
+
})));
|
|
111
|
+
currentPrivateTaggedLogs.forEach((taggedLogBuffer)=>{
|
|
112
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
113
|
+
privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(privateTaggedLogs.get(taggedLogBuffer.tag)));
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
const currentPublicTaggedLogs = await Promise.all(keysOfPublicLogsToUpdate.map(async (key)=>({
|
|
117
|
+
key,
|
|
118
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key)
|
|
67
119
|
})));
|
|
68
|
-
|
|
120
|
+
currentPublicTaggedLogs.forEach((taggedLogBuffer)=>{
|
|
69
121
|
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
70
|
-
|
|
122
|
+
publicTaggedLogs.set(taggedLogBuffer.key, taggedLogBuffer.logBuffers.concat(publicTaggedLogs.get(taggedLogBuffer.key)));
|
|
71
123
|
}
|
|
72
124
|
});
|
|
73
125
|
for (const block of blocks){
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
126
|
+
const blockHash = await block.hash();
|
|
127
|
+
const privateTagsInBlock = [];
|
|
128
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
129
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
130
|
+
privateTagsInBlock.push(tag);
|
|
78
131
|
}
|
|
79
|
-
await this.#
|
|
132
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
133
|
+
const publicKeysInBlock = [];
|
|
134
|
+
for (const [key, logs] of publicTaggedLogs.entries()){
|
|
135
|
+
await this.#publicLogsByContractAndTag.set(key, logs);
|
|
136
|
+
publicKeysInBlock.push(key);
|
|
137
|
+
}
|
|
138
|
+
await this.#publicLogKeysByBlock.set(block.number, publicKeysInBlock);
|
|
80
139
|
const publicLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
|
|
81
140
|
numToUInt32BE(txIndex),
|
|
82
141
|
numToUInt32BE(txEffect.publicLogs.length),
|
|
@@ -87,38 +146,60 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
87
146
|
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
88
147
|
txEffect.contractClassLogs.map((log)=>log.toBuffer())
|
|
89
148
|
].flat()).flat();
|
|
90
|
-
await this.#publicLogsByBlock.set(block.number,
|
|
91
|
-
await this.#contractClassLogsByBlock.set(block.number,
|
|
149
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
150
|
+
await this.#contractClassLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, contractClassLogsInBlock));
|
|
92
151
|
}
|
|
93
152
|
return true;
|
|
94
153
|
});
|
|
95
154
|
}
|
|
155
|
+
#packWithBlockHash(blockHash, data) {
|
|
156
|
+
return Buffer.concat([
|
|
157
|
+
blockHash.toBuffer(),
|
|
158
|
+
...data
|
|
159
|
+
]);
|
|
160
|
+
}
|
|
161
|
+
#unpackBlockHash(reader) {
|
|
162
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
163
|
+
if (!blockHash) {
|
|
164
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
165
|
+
}
|
|
166
|
+
return L2BlockHash.fromField(blockHash);
|
|
167
|
+
}
|
|
96
168
|
deleteLogs(blocks) {
|
|
97
169
|
return this.db.transactionAsync(async ()=>{
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
170
|
+
await Promise.all(blocks.map(async (block)=>{
|
|
171
|
+
// Delete private logs
|
|
172
|
+
const privateKeys = await this.#privateLogKeysByBlock.getAsync(block.number) ?? [];
|
|
173
|
+
await Promise.all(privateKeys.map((tag)=>this.#privateLogsByTag.delete(tag)));
|
|
174
|
+
// Delete public logs
|
|
175
|
+
const publicKeys = await this.#publicLogKeysByBlock.getAsync(block.number) ?? [];
|
|
176
|
+
await Promise.all(publicKeys.map((key)=>this.#publicLogsByContractAndTag.delete(key)));
|
|
177
|
+
}));
|
|
102
178
|
await Promise.all(blocks.map((block)=>Promise.all([
|
|
103
179
|
this.#publicLogsByBlock.delete(block.number),
|
|
104
|
-
this.#
|
|
180
|
+
this.#privateLogKeysByBlock.delete(block.number),
|
|
181
|
+
this.#publicLogKeysByBlock.delete(block.number),
|
|
105
182
|
this.#contractClassLogsByBlock.delete(block.number)
|
|
106
183
|
])));
|
|
107
|
-
await Promise.all(tagsToDelete.map((tag)=>this.#logsByTag.delete(tag.toString())));
|
|
108
184
|
return true;
|
|
109
185
|
});
|
|
110
186
|
}
|
|
111
187
|
/**
|
|
112
|
-
* Gets all logs that match any of the
|
|
113
|
-
*
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
188
|
+
* Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
189
|
+
* array implies no logs match that tag.
|
|
190
|
+
*/ async getPrivateLogsByTags(tags) {
|
|
191
|
+
const logs = await Promise.all(tags.map((tag)=>this.#privateLogsByTag.getAsync(tag.toString())));
|
|
192
|
+
return logs.map((logBuffers)=>logBuffers?.map((logBuffer)=>TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
196
|
+
* logs is returned. An empty array implies no logs match that tag.
|
|
197
|
+
*/ async getPublicLogsByTagsFromContract(contractAddress, tags) {
|
|
198
|
+
const logs = await Promise.all(tags.map((tag)=>{
|
|
199
|
+
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
200
|
+
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
201
|
+
}));
|
|
202
|
+
return logs.map((logBuffers)=>logBuffers?.map((logBuffer)=>TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
122
203
|
}
|
|
123
204
|
/**
|
|
124
205
|
* Gets public logs based on the provided filter.
|
|
@@ -149,6 +230,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
149
230
|
[]
|
|
150
231
|
];
|
|
151
232
|
const reader = new BufferReader(buffer);
|
|
233
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
152
234
|
while(reader.remainingBytes() > 0){
|
|
153
235
|
const indexOfTx = reader.readNumber();
|
|
154
236
|
const numLogsInTx = reader.readNumber();
|
|
@@ -159,7 +241,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
159
241
|
}
|
|
160
242
|
const txLogs = publicLogsInBlock[txIndex];
|
|
161
243
|
const logs = [];
|
|
162
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
244
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
163
245
|
return {
|
|
164
246
|
logs,
|
|
165
247
|
maxLogsHit
|
|
@@ -184,6 +266,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
184
266
|
[]
|
|
185
267
|
];
|
|
186
268
|
const reader = new BufferReader(logBuffer);
|
|
269
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
187
270
|
while(reader.remainingBytes() > 0){
|
|
188
271
|
const indexOfTx = reader.readNumber();
|
|
189
272
|
const numLogsInTx = reader.readNumber();
|
|
@@ -194,7 +277,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
194
277
|
}
|
|
195
278
|
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++){
|
|
196
279
|
const txLogs = publicLogsInBlock[txIndex];
|
|
197
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
280
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
198
281
|
if (maxLogsHit) {
|
|
199
282
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
200
283
|
break loopOverBlocks;
|
|
@@ -235,6 +318,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
235
318
|
[]
|
|
236
319
|
];
|
|
237
320
|
const reader = new BufferReader(contractClassLogsBuffer);
|
|
321
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
238
322
|
while(reader.remainingBytes() > 0){
|
|
239
323
|
const indexOfTx = reader.readNumber();
|
|
240
324
|
const numLogsInTx = reader.readNumber();
|
|
@@ -245,7 +329,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
245
329
|
}
|
|
246
330
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
247
331
|
const logs = [];
|
|
248
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
332
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
249
333
|
return {
|
|
250
334
|
logs,
|
|
251
335
|
maxLogsHit
|
|
@@ -270,6 +354,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
270
354
|
[]
|
|
271
355
|
];
|
|
272
356
|
const reader = new BufferReader(logBuffer);
|
|
357
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
273
358
|
while(reader.remainingBytes() > 0){
|
|
274
359
|
const indexOfTx = reader.readNumber();
|
|
275
360
|
const numLogsInTx = reader.readNumber();
|
|
@@ -280,7 +365,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
280
365
|
}
|
|
281
366
|
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++){
|
|
282
367
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
283
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
368
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
284
369
|
if (maxLogsHit) {
|
|
285
370
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
286
371
|
break loopOverBlocks;
|
|
@@ -292,16 +377,18 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
292
377
|
maxLogsHit
|
|
293
378
|
};
|
|
294
379
|
}
|
|
295
|
-
#accumulateLogs(results, blockNumber, txIndex, txLogs, filter) {
|
|
380
|
+
#accumulateLogs(results, blockNumber, blockHash, txIndex, txLogs, filter = {}) {
|
|
296
381
|
let maxLogsHit = false;
|
|
297
382
|
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
298
383
|
for(; logIndex < txLogs.length; logIndex++){
|
|
299
384
|
const log = txLogs[logIndex];
|
|
300
385
|
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
301
386
|
if (log instanceof ContractClassLog) {
|
|
302
|
-
results.push(new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), txIndex, logIndex), log));
|
|
387
|
+
results.push(new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
388
|
+
} else if (log instanceof PublicLog) {
|
|
389
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
303
390
|
} else {
|
|
304
|
-
|
|
391
|
+
throw new Error('Unknown log type');
|
|
305
392
|
}
|
|
306
393
|
if (results.length >= this.#logsMaxPageSize) {
|
|
307
394
|
maxLogsHit = true;
|
|
@@ -61,7 +61,9 @@ async function main() {
|
|
|
61
61
|
// Create viem public client
|
|
62
62
|
const publicClient = createPublicClient({
|
|
63
63
|
chain: mainnet,
|
|
64
|
-
transport: http(rpcUrl
|
|
64
|
+
transport: http(rpcUrl, {
|
|
65
|
+
batch: false
|
|
66
|
+
})
|
|
65
67
|
});
|
|
66
68
|
logger.info('Fetching transaction...');
|
|
67
69
|
const tx = await publicClient.getTransaction({
|
|
@@ -111,8 +113,8 @@ async function main() {
|
|
|
111
113
|
logger.info('');
|
|
112
114
|
logger.info('Retrieving block header from rollup transaction...');
|
|
113
115
|
logger.info('');
|
|
114
|
-
// For this script, we don't have blob hashes, so pass empty
|
|
115
|
-
const result = await retriever.getCheckpointFromRollupTx(txHash, [], CheckpointNumber(l2BlockNumber));
|
|
116
|
+
// For this script, we don't have blob hashes or expected hashes, so pass empty arrays/objects
|
|
117
|
+
const result = await retriever.getCheckpointFromRollupTx(txHash, [], CheckpointNumber(l2BlockNumber), {});
|
|
116
118
|
logger.info(' Successfully retrieved block header!');
|
|
117
119
|
logger.info('');
|
|
118
120
|
logger.info('Block Header Details:');
|
|
@@ -32,9 +32,13 @@ export declare class CalldataRetriever {
|
|
|
32
32
|
* @param txHash - Hash of the tx that published it.
|
|
33
33
|
* @param blobHashes - Blob hashes for the checkpoint.
|
|
34
34
|
* @param checkpointNumber - Checkpoint number.
|
|
35
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
35
36
|
* @returns Checkpoint header and metadata from the calldata, deserialized
|
|
36
37
|
*/
|
|
37
|
-
getCheckpointFromRollupTx(txHash: `0x${string}`, blobHashes: Buffer[], checkpointNumber: CheckpointNumber
|
|
38
|
+
getCheckpointFromRollupTx(txHash: `0x${string}`, blobHashes: Buffer[], checkpointNumber: CheckpointNumber, expectedHashes: {
|
|
39
|
+
attestationsHash?: Hex;
|
|
40
|
+
payloadDigest?: Hex;
|
|
41
|
+
}): Promise<{
|
|
38
42
|
checkpointNumber: CheckpointNumber;
|
|
39
43
|
archiveRoot: Fr;
|
|
40
44
|
header: CheckpointHeader;
|
|
@@ -80,14 +84,24 @@ export declare class CalldataRetriever {
|
|
|
80
84
|
* @returns The propose calldata from the successful call
|
|
81
85
|
*/
|
|
82
86
|
protected extractCalldataViaTrace(txHash: Hex): Promise<Hex>;
|
|
87
|
+
/**
|
|
88
|
+
* Extracts the CommitteeAttestations struct definition from RollupAbi.
|
|
89
|
+
* Finds the _attestations parameter by name in the propose function.
|
|
90
|
+
* Lazy-loaded to avoid issues during module initialization.
|
|
91
|
+
*/
|
|
92
|
+
private getCommitteeAttestationsStructDef;
|
|
83
93
|
/**
|
|
84
94
|
* Decodes propose calldata and builds the checkpoint header structure.
|
|
85
95
|
* @param proposeCalldata - The propose function calldata
|
|
86
96
|
* @param blockHash - The L1 block hash containing this transaction
|
|
87
97
|
* @param checkpointNumber - The checkpoint number
|
|
98
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
88
99
|
* @returns The decoded checkpoint header and metadata
|
|
89
100
|
*/
|
|
90
|
-
protected decodeAndBuildCheckpoint(proposeCalldata: Hex, blockHash: Hex, checkpointNumber: CheckpointNumber
|
|
101
|
+
protected decodeAndBuildCheckpoint(proposeCalldata: Hex, blockHash: Hex, checkpointNumber: CheckpointNumber, expectedHashes: {
|
|
102
|
+
attestationsHash?: Hex;
|
|
103
|
+
payloadDigest?: Hex;
|
|
104
|
+
}): {
|
|
91
105
|
checkpointNumber: CheckpointNumber;
|
|
92
106
|
archiveRoot: Fr;
|
|
93
107
|
header: CheckpointHeader;
|
|
@@ -95,4 +109,4 @@ export declare class CalldataRetriever {
|
|
|
95
109
|
blockHash: string;
|
|
96
110
|
};
|
|
97
111
|
}
|
|
98
|
-
//# sourceMappingURL=data:application/json;base64,
|
|
112
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2FsbGRhdGFfcmV0cmlldmVyLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvYXJjaGl2ZXIvbDEvY2FsbGRhdGFfcmV0cmlldmVyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sS0FBSyxFQUFFLGdCQUFnQixFQUFFLHFCQUFxQixFQUFFLE1BQU0sdUJBQXVCLENBQUM7QUFDckYsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0saUNBQWlDLENBQUM7QUFDbkUsT0FBTyxFQUFFLEVBQUUsRUFBRSxNQUFNLGdDQUFnQyxDQUFDO0FBQ3BELE9BQU8sRUFBRSxVQUFVLEVBQUUsTUFBTSwrQkFBK0IsQ0FBQztBQUUzRCxPQUFPLEtBQUssRUFBRSxNQUFNLEVBQUUsTUFBTSx1QkFBdUIsQ0FBQztBQVFwRCxPQUFPLEVBQUUsb0JBQW9CLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUUzRCxPQUFPLEVBQUUsZ0JBQWdCLEVBQUUsTUFBTSxzQkFBc0IsQ0FBQztBQUV4RCxPQUFPLEVBRUwsS0FBSyxHQUFHLEVBQ1IsS0FBSyxXQUFXLEVBT2pCLE1BQU0sTUFBTSxDQUFDO0FBRWQsT0FBTyxLQUFLLEVBQUUsdUJBQXVCLEVBQUUsTUFBTSx1QkFBdUIsQ0FBQztBQU1yRTs7O0dBR0c7QUFDSCxxQkFBYSxpQkFBaUI7SUFPMUIsT0FBTyxDQUFDLFFBQVEsQ0FBQyxZQUFZO0lBQzdCLE9BQU8sQ0FBQyxRQUFRLENBQUMsV0FBVztJQUM1QixPQUFPLENBQUMsUUFBUSxDQUFDLG1CQUFtQjtJQUNwQyxPQUFPLENBQUMsUUFBUSxDQUFDLGVBQWU7SUFDaEMsT0FBTyxDQUFDLFFBQVEsQ0FBQyxNQUFNO0lBVnpCLHVEQUF1RDtJQUN2RCxPQUFPLENBQUMsUUFBUSxDQUFDLGtCQUFrQixDQUFzQjtJQUV6RCxPQUFPLENBQUMsUUFBUSxDQUFDLGFBQWEsQ0FBYTtJQUUzQyxZQUNtQixZQUFZLEVBQUUsZ0JBQWdCLEVBQzlCLFdBQVcsRUFBRSxxQkFBcUIsRUFDbEMsbUJBQW1CLEVBQUUsTUFBTSxFQUMzQixlQUFlLEVBQUUsdUJBQXVCLEdBQUcsU0FBUyxFQUNwRCxNQUFNLEVBQUUsTUFBTSxFQUMvQixpQkFBaUIsRUFBRTtRQUNqQixhQUFhLEVBQUUsVUFBVSxDQUFDO1FBQzFCLHlCQUF5QixFQUFFLFVBQVUsQ0FBQztRQUN0Qyx1QkFBdUIsRUFBRSxVQUFVLENBQUM7UUFDcEMsbUJBQW1CLENBQUMsRUFBRSxVQUFVLENBQUM7S0FDbEMsRUFJRjtJQUVEOzs7Ozs7OztPQVFHO0lBQ0cseUJBQXlCLENBQzdCLE1BQU0sRUFBRSxLQUFLLE1BQU0sRUFBRSxFQUNyQixVQUFVLEVBQUUsTUFBTSxFQUFFLEVBQ3BCLGdCQUFnQixFQUFFLGdCQUFnQixFQUNsQyxjQUFjLEVBQUU7UUFDZCxnQkFBZ0IsQ0FBQyxFQUFFLEdBQUcsQ0FBQztRQUN2QixhQUFhLENBQUMsRUFBRSxHQUFHLENBQUM7S0FDckIsR0FDQSxPQUFPLENBQUM7UUFDVCxnQkFBZ0IsRUFBRSxnQkFBZ0IsQ0FBQztRQUNuQyxXQUFXLEVBQUUsRUFBRSxDQUFDO1FBQ2hCLE1BQU0sRUFBRSxnQkFBZ0IsQ0FBQztRQUN6QixZQUFZLEVBQUUsb0JBQW9CLEVBQUUsQ0FBQztRQUNyQyxTQUFTLEVBQUUsTUFBTSxDQUFDO0tBQ25CLENBQUMsQ0FTRDtJQUVELHNEQUFzRDtJQUN0RCxVQUFnQixrQkFBa0IsQ0FBQyxFQUFFLEVBQUUsV0FBVyxFQUFFLGdCQUFnQixFQUFFLGdCQUFnQixHQUFHLE9BQU8sQ0FBQyxHQUFHLENBQUMsQ0ErQnBHO0lBRUQ7Ozs7O09BS0c7SUFDSCxVQUFnQixzQkFBc0IsQ0FBQyxFQUFFLEVBQUUsV0FBVyxHQUFHLE9BQU8sQ0FBQyxHQUFHLEdBQUcsU0FBUyxDQUFDLENBNEJoRjtJQUVEOzs7OztPQUtHO0lBQ0gsU0FBUyxDQUFDLG1CQUFtQixDQUFDLEVBQUUsRUFBRTtRQUFFLEVBQUUsRUFBRSxHQUFHLEdBQUcsSUFBSSxHQUFHLFNBQVMsQ0FBQztRQUFDLEtBQUssRUFBRSxHQUFHLENBQUM7UUFBQyxJQUFJLEVBQUUsR0FBRyxDQUFBO0tBQUUsR0FBRyxHQUFHLEdBQUcsU0FBUyxDQXVGeEc7SUFFRDs7Ozs7T0FLRztJQUNILFNBQVMsQ0FBQyxzQkFBc0IsQ0FBQyxFQUFFLEVBQUU7UUFBRSxFQUFFLEVBQUUsR0FBRyxHQUFHLElBQUksR0FBRyxTQUFTLENBQUM7UUFBQyxLQUFLLEVBQUUsR0FBRyxDQUFDO1FBQUMsSUFBSSxFQUFFLEdBQUcsQ0FBQTtLQUFFLEdBQUcsR0FBRyxHQUFHLFNBQVMsQ0EwQjNHO0lBRUQ7Ozs7OztPQU1HO0lBQ0gsVUFBZ0IsdUJBQXVCLENBQUMsTUFBTSxFQUFFLEdBQUcsR0FBRyxPQUFPLENBQUMsR0FBRyxDQUFDLENBeUNqRTtJQUVEOzs7O09BSUc7SUFDSCxPQUFPLENBQUMsaUNBQWlDO0lBZ0N6Qzs7Ozs7OztPQU9HO0lBQ0gsU0FBUyxDQUFDLHdCQUF3QixDQUNoQyxlQUFlLEVBQUUsR0FBRyxFQUNwQixTQUFTLEVBQUUsR0FBRyxFQUNkLGdCQUFnQixFQUFFLGdCQUFnQixFQUNsQyxjQUFjLEVBQUU7UUFDZCxnQkFBZ0IsQ0FBQyxFQUFFLEdBQUcsQ0FBQztRQUN2QixhQUFhLENBQUMsRUFBRSxHQUFHLENBQUM7S0FDckIsR0FDQTtRQUNELGdCQUFnQixFQUFFLGdCQUFnQixDQUFDO1FBQ25DLFdBQVcsRUFBRSxFQUFFLENBQUM7UUFDaEIsTUFBTSxFQUFFLGdCQUFnQixDQUFDO1FBQ3pCLFlBQVksRUFBRSxvQkFBb0IsRUFBRSxDQUFDO1FBQ3JDLFNBQVMsRUFBRSxNQUFNLENBQUM7S0FDbkIsQ0E2RkE7Q0FDRiJ9
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"calldata_retriever.d.ts","sourceRoot":"","sources":["../../../src/archiver/l1/calldata_retriever.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,uBAAuB,CAAC;AACrF,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AACnE,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AACpD,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAE3D,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AAQpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,qBAAqB,CAAC;
|
|
1
|
+
{"version":3,"file":"calldata_retriever.d.ts","sourceRoot":"","sources":["../../../src/archiver/l1/calldata_retriever.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,uBAAuB,CAAC;AACrF,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AACnE,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AACpD,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAE3D,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AAQpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,qBAAqB,CAAC;AAE3D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AAExD,OAAO,EAEL,KAAK,GAAG,EACR,KAAK,WAAW,EAOjB,MAAM,MAAM,CAAC;AAEd,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,uBAAuB,CAAC;AAMrE;;;GAGG;AACH,qBAAa,iBAAiB;IAO1B,OAAO,CAAC,QAAQ,CAAC,YAAY;IAC7B,OAAO,CAAC,QAAQ,CAAC,WAAW;IAC5B,OAAO,CAAC,QAAQ,CAAC,mBAAmB;IACpC,OAAO,CAAC,QAAQ,CAAC,eAAe;IAChC,OAAO,CAAC,QAAQ,CAAC,MAAM;IAVzB,uDAAuD;IACvD,OAAO,CAAC,QAAQ,CAAC,kBAAkB,CAAsB;IAEzD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAa;IAE3C,YACmB,YAAY,EAAE,gBAAgB,EAC9B,WAAW,EAAE,qBAAqB,EAClC,mBAAmB,EAAE,MAAM,EAC3B,eAAe,EAAE,uBAAuB,GAAG,SAAS,EACpD,MAAM,EAAE,MAAM,EAC/B,iBAAiB,EAAE;QACjB,aAAa,EAAE,UAAU,CAAC;QAC1B,yBAAyB,EAAE,UAAU,CAAC;QACtC,uBAAuB,EAAE,UAAU,CAAC;QACpC,mBAAmB,CAAC,EAAE,UAAU,CAAC;KAClC,EAIF;IAED;;;;;;;;OAQG;IACG,yBAAyB,CAC7B,MAAM,EAAE,KAAK,MAAM,EAAE,EACrB,UAAU,EAAE,MAAM,EAAE,EACpB,gBAAgB,EAAE,gBAAgB,EAClC,cAAc,EAAE;QACd,gBAAgB,CAAC,EAAE,GAAG,CAAC;QACvB,aAAa,CAAC,EAAE,GAAG,CAAC;KACrB,GACA,OAAO,CAAC;QACT,gBAAgB,EAAE,gBAAgB,CAAC;QACnC,WAAW,EAAE,EAAE,CAAC;QAChB,MAAM,EAAE,gBAAgB,CAAC;QACzB,YAAY,EAAE,oBAAoB,EAAE,CAAC;QACrC,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC,CASD;IAED,sDAAsD;IACtD,UAAgB,kBAAkB,CAAC,EAAE,EAAE,WAAW,EAAE,gBAAgB,EAAE,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,CA+BpG;IAED;;;;;OAKG;IACH,UAAgB,sBAAsB,CAAC,EAAE,EAAE,WAAW,GAAG,OAAO,CAAC,GAAG,GAAG,SAAS,CAAC,CA4BhF;IAED;;;;;OAKG;IACH,SAAS,CAAC,mBAAmB,CAAC,EAAE,EAAE;QAAE,EAAE,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,CAAC;QAAC,KAAK,EAAE,GAAG,CAAC;QAAC,IAAI,EAAE,GAAG,CAAA;KAAE,GAAG,GAAG,GAAG,SAAS,CAuFxG;IAED;;;;;OAKG;IACH,SAAS,CAAC,sBAAsB,CAAC,EAAE,EAAE;QAAE,EAAE,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,CAAC;QAAC,KAAK,EAAE,GAAG,CAAC;QAAC,IAAI,EAAE,GAAG,CAAA;KAAE,GAAG,GAAG,GAAG,SAAS,CA0B3G;IAED;;;;;;OAMG;IACH,UAAgB,uBAAuB,CAAC,MAAM,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,CAyCjE;IAED;;;;OAIG;IACH,OAAO,CAAC,iCAAiC;IAgCzC;;;;;;;OAOG;IACH,SAAS,CAAC,wBAAwB,CAChC,eAAe,EAAE,GAAG,EACpB,SAAS,EAAE,GAAG,EACd,gBAAgB,EAAE,gBAAgB,EAClC,cAAc,EAAE;QACd,gBAAgB,CAAC,EAAE,GAAG,CAAC;QACvB,aAAa,CAAC,EAAE,GAAG,CAAC;KACrB,GACA;QACD,gBAAgB,EAAE,gBAAgB,CAAC;QACnC,WAAW,EAAE,EAAE,CAAC;QAChB,MAAM,EAAE,gBAAgB,CAAC;QACzB,YAAY,EAAE,oBAAoB,EAAE,CAAC;QACrC,SAAS,EAAE,MAAM,CAAC;KACnB,CA6FA;CACF"}
|
|
@@ -3,8 +3,9 @@ import { Fr } from '@aztec/foundation/curves/bn254';
|
|
|
3
3
|
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
4
4
|
import { EmpireSlashingProposerAbi, GovernanceProposerAbi, RollupAbi, SlashFactoryAbi, TallySlashingProposerAbi } from '@aztec/l1-artifacts';
|
|
5
5
|
import { CommitteeAttestation } from '@aztec/stdlib/block';
|
|
6
|
+
import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p';
|
|
6
7
|
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
7
|
-
import { decodeFunctionData, hexToBytes, multicall3Abi, toFunctionSelector } from 'viem';
|
|
8
|
+
import { decodeFunctionData, encodeAbiParameters, hexToBytes, keccak256, multicall3Abi, toFunctionSelector } from 'viem';
|
|
8
9
|
import { getSuccessfulCallsFromDebug } from './debug_tx.js';
|
|
9
10
|
import { getCallFromSpireProposer } from './spire_proposer.js';
|
|
10
11
|
import { getSuccessfulCallsFromTrace } from './trace_tx.js';
|
|
@@ -34,14 +35,19 @@ import { getSuccessfulCallsFromTrace } from './trace_tx.js';
|
|
|
34
35
|
* @param txHash - Hash of the tx that published it.
|
|
35
36
|
* @param blobHashes - Blob hashes for the checkpoint.
|
|
36
37
|
* @param checkpointNumber - Checkpoint number.
|
|
38
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
37
39
|
* @returns Checkpoint header and metadata from the calldata, deserialized
|
|
38
|
-
*/ async getCheckpointFromRollupTx(txHash, blobHashes, checkpointNumber) {
|
|
39
|
-
this.logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}
|
|
40
|
+
*/ async getCheckpointFromRollupTx(txHash, blobHashes, checkpointNumber, expectedHashes) {
|
|
41
|
+
this.logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}`, {
|
|
42
|
+
willValidateHashes: !!expectedHashes.attestationsHash || !!expectedHashes.payloadDigest,
|
|
43
|
+
hasAttestationsHash: !!expectedHashes.attestationsHash,
|
|
44
|
+
hasPayloadDigest: !!expectedHashes.payloadDigest
|
|
45
|
+
});
|
|
40
46
|
const tx = await this.publicClient.getTransaction({
|
|
41
47
|
hash: txHash
|
|
42
48
|
});
|
|
43
49
|
const proposeCalldata = await this.getProposeCallData(tx, checkpointNumber);
|
|
44
|
-
return this.decodeAndBuildCheckpoint(proposeCalldata, tx.blockHash, checkpointNumber);
|
|
50
|
+
return this.decodeAndBuildCheckpoint(proposeCalldata, tx.blockHash, checkpointNumber, expectedHashes);
|
|
45
51
|
}
|
|
46
52
|
/** Gets rollup propose calldata from a transaction */ async getProposeCallData(tx, checkpointNumber) {
|
|
47
53
|
// Try to decode as multicall3 with validation
|
|
@@ -283,12 +289,37 @@ import { getSuccessfulCallsFromTrace } from './trace_tx.js';
|
|
|
283
289
|
return calls[0].input;
|
|
284
290
|
}
|
|
285
291
|
/**
|
|
292
|
+
* Extracts the CommitteeAttestations struct definition from RollupAbi.
|
|
293
|
+
* Finds the _attestations parameter by name in the propose function.
|
|
294
|
+
* Lazy-loaded to avoid issues during module initialization.
|
|
295
|
+
*/ getCommitteeAttestationsStructDef() {
|
|
296
|
+
const proposeFunction = RollupAbi.find((item)=>item.type === 'function' && item.name === 'propose');
|
|
297
|
+
if (!proposeFunction) {
|
|
298
|
+
throw new Error('propose function not found in RollupAbi');
|
|
299
|
+
}
|
|
300
|
+
// Find the _attestations parameter by name, not by index
|
|
301
|
+
const attestationsParam = proposeFunction.inputs.find((param)=>param.name === '_attestations');
|
|
302
|
+
if (!attestationsParam) {
|
|
303
|
+
throw new Error('_attestations parameter not found in propose function');
|
|
304
|
+
}
|
|
305
|
+
if (attestationsParam.type !== 'tuple') {
|
|
306
|
+
throw new Error(`Expected _attestations parameter to be a tuple, got ${attestationsParam.type}`);
|
|
307
|
+
}
|
|
308
|
+
// Extract the tuple components (struct fields)
|
|
309
|
+
const tupleParam = attestationsParam;
|
|
310
|
+
return {
|
|
311
|
+
type: 'tuple',
|
|
312
|
+
components: tupleParam.components || []
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
/**
|
|
286
316
|
* Decodes propose calldata and builds the checkpoint header structure.
|
|
287
317
|
* @param proposeCalldata - The propose function calldata
|
|
288
318
|
* @param blockHash - The L1 block hash containing this transaction
|
|
289
319
|
* @param checkpointNumber - The checkpoint number
|
|
320
|
+
* @param expectedHashes - Optional expected hashes from the CheckpointProposed event for validation
|
|
290
321
|
* @returns The decoded checkpoint header and metadata
|
|
291
|
-
*/ decodeAndBuildCheckpoint(proposeCalldata, blockHash, checkpointNumber) {
|
|
322
|
+
*/ decodeAndBuildCheckpoint(proposeCalldata, blockHash, checkpointNumber, expectedHashes) {
|
|
292
323
|
const { functionName: rollupFunctionName, args: rollupArgs } = decodeFunctionData({
|
|
293
324
|
abi: RollupAbi,
|
|
294
325
|
data: proposeCalldata
|
|
@@ -298,6 +329,45 @@ import { getSuccessfulCallsFromTrace } from './trace_tx.js';
|
|
|
298
329
|
}
|
|
299
330
|
const [decodedArgs, packedAttestations, _signers, _attestationsAndSignersSignature, _blobInput] = rollupArgs;
|
|
300
331
|
const attestations = CommitteeAttestation.fromPacked(packedAttestations, this.targetCommitteeSize);
|
|
332
|
+
const header = CheckpointHeader.fromViem(decodedArgs.header);
|
|
333
|
+
const archiveRoot = new Fr(Buffer.from(hexToBytes(decodedArgs.archive)));
|
|
334
|
+
// Validate attestationsHash if provided (skip for backwards compatibility with older events)
|
|
335
|
+
if (expectedHashes.attestationsHash) {
|
|
336
|
+
// Compute attestationsHash: keccak256(abi.encode(CommitteeAttestations))
|
|
337
|
+
const computedAttestationsHash = keccak256(encodeAbiParameters([
|
|
338
|
+
this.getCommitteeAttestationsStructDef()
|
|
339
|
+
], [
|
|
340
|
+
packedAttestations
|
|
341
|
+
]));
|
|
342
|
+
// Compare as buffers to avoid case-sensitivity and string comparison issues
|
|
343
|
+
const computedBuffer = Buffer.from(hexToBytes(computedAttestationsHash));
|
|
344
|
+
const expectedBuffer = Buffer.from(hexToBytes(expectedHashes.attestationsHash));
|
|
345
|
+
if (!computedBuffer.equals(expectedBuffer)) {
|
|
346
|
+
throw new Error(`Attestations hash mismatch for checkpoint ${checkpointNumber}: ` + `computed=${computedAttestationsHash}, expected=${expectedHashes.attestationsHash}`);
|
|
347
|
+
}
|
|
348
|
+
this.logger.trace(`Validated attestationsHash for checkpoint ${checkpointNumber}`, {
|
|
349
|
+
computedAttestationsHash,
|
|
350
|
+
expectedAttestationsHash: expectedHashes.attestationsHash
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
// Validate payloadDigest if provided (skip for backwards compatibility with older events)
|
|
354
|
+
if (expectedHashes.payloadDigest) {
|
|
355
|
+
// Use ConsensusPayload to compute the digest - this ensures we match the exact logic
|
|
356
|
+
// used by the network for signing and verification
|
|
357
|
+
const consensusPayload = new ConsensusPayload(header, archiveRoot);
|
|
358
|
+
const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.blockAttestation);
|
|
359
|
+
const computedPayloadDigest = keccak256(payloadToSign);
|
|
360
|
+
// Compare as buffers to avoid case-sensitivity and string comparison issues
|
|
361
|
+
const computedBuffer = Buffer.from(hexToBytes(computedPayloadDigest));
|
|
362
|
+
const expectedBuffer = Buffer.from(hexToBytes(expectedHashes.payloadDigest));
|
|
363
|
+
if (!computedBuffer.equals(expectedBuffer)) {
|
|
364
|
+
throw new Error(`Payload digest mismatch for checkpoint ${checkpointNumber}: ` + `computed=${computedPayloadDigest}, expected=${expectedHashes.payloadDigest}`);
|
|
365
|
+
}
|
|
366
|
+
this.logger.trace(`Validated payloadDigest for checkpoint ${checkpointNumber}`, {
|
|
367
|
+
computedPayloadDigest,
|
|
368
|
+
expectedPayloadDigest: expectedHashes.payloadDigest
|
|
369
|
+
});
|
|
370
|
+
}
|
|
301
371
|
this.logger.trace(`Decoded propose calldata`, {
|
|
302
372
|
checkpointNumber,
|
|
303
373
|
archive: decodedArgs.archive,
|
|
@@ -307,8 +377,6 @@ import { getSuccessfulCallsFromTrace } from './trace_tx.js';
|
|
|
307
377
|
packedAttestations,
|
|
308
378
|
targetCommitteeSize: this.targetCommitteeSize
|
|
309
379
|
});
|
|
310
|
-
const header = CheckpointHeader.fromViem(decodedArgs.header);
|
|
311
|
-
const archiveRoot = new Fr(Buffer.from(hexToBytes(decodedArgs.archive)));
|
|
312
380
|
return {
|
|
313
381
|
checkpointNumber,
|
|
314
382
|
archiveRoot,
|