@aztec/archiver 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -6
- package/dest/archiver/archiver.d.ts +127 -84
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +1128 -380
- package/dest/archiver/archiver_store.d.ts +122 -45
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +2013 -343
- package/dest/archiver/config.d.ts +7 -20
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +21 -5
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/instrumentation.d.ts +5 -3
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/instrumentation.js +14 -0
- package/dest/archiver/kv_archiver_store/block_store.d.ts +83 -15
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +396 -73
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +51 -55
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +82 -46
- package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +149 -84
- package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.js +15 -14
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
- package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/archiver/l1/calldata_retriever.js +471 -0
- package/dest/archiver/l1/data_retrieval.d.ts +90 -0
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
- package/dest/archiver/l1/data_retrieval.js +331 -0
- package/dest/archiver/l1/debug_tx.d.ts +19 -0
- package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
- package/dest/archiver/l1/debug_tx.js +73 -0
- package/dest/archiver/l1/spire_proposer.d.ts +70 -0
- package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
- package/dest/archiver/l1/spire_proposer.js +157 -0
- package/dest/archiver/l1/trace_tx.d.ts +97 -0
- package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
- package/dest/archiver/l1/trace_tx.js +91 -0
- package/dest/archiver/l1/types.d.ts +12 -0
- package/dest/archiver/l1/types.d.ts.map +1 -0
- package/dest/archiver/l1/types.js +3 -0
- package/dest/archiver/l1/validate_trace.d.ts +29 -0
- package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
- package/dest/archiver/l1/validate_trace.js +150 -0
- package/dest/archiver/structs/data_retrieval.d.ts +1 -1
- package/dest/archiver/structs/inbox_message.d.ts +4 -4
- package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
- package/dest/archiver/structs/inbox_message.js +6 -5
- package/dest/archiver/structs/published.d.ts +2 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/archiver/validation.d.ts +10 -4
- package/dest/archiver/validation.d.ts.map +1 -1
- package/dest/archiver/validation.js +66 -44
- package/dest/factory.d.ts +4 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +5 -4
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/rpc/index.d.ts +2 -2
- package/dest/test/index.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts +16 -8
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +19 -14
- package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +10 -9
- package/dest/test/mock_l2_block_source.d.ts +31 -20
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +85 -18
- package/dest/test/mock_structs.d.ts +3 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +9 -8
- package/package.json +18 -17
- package/src/archiver/archiver.ts +971 -475
- package/src/archiver/archiver_store.ts +141 -44
- package/src/archiver/archiver_store_test_suite.ts +2114 -331
- package/src/archiver/config.ts +30 -35
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/instrumentation.ts +19 -2
- package/src/archiver/kv_archiver_store/block_store.ts +541 -83
- package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
- package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +107 -67
- package/src/archiver/kv_archiver_store/log_store.ts +209 -99
- package/src/archiver/kv_archiver_store/message_store.ts +21 -18
- package/src/archiver/l1/README.md +98 -0
- package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
- package/src/archiver/l1/calldata_retriever.ts +641 -0
- package/src/archiver/l1/data_retrieval.ts +512 -0
- package/src/archiver/l1/debug_tx.ts +99 -0
- package/src/archiver/l1/spire_proposer.ts +160 -0
- package/src/archiver/l1/trace_tx.ts +128 -0
- package/src/archiver/l1/types.ts +13 -0
- package/src/archiver/l1/validate_trace.ts +211 -0
- package/src/archiver/structs/inbox_message.ts +8 -8
- package/src/archiver/structs/published.ts +1 -1
- package/src/archiver/validation.ts +86 -32
- package/src/factory.ts +6 -7
- package/src/index.ts +1 -1
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/mock_archiver.ts +22 -16
- package/src/test/mock_l1_to_l2_message_source.ts +10 -9
- package/src/test/mock_l2_block_source.ts +114 -27
- package/src/test/mock_structs.ts +10 -9
- package/dest/archiver/data_retrieval.d.ts +0 -78
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.js +0 -354
- package/src/archiver/data_retrieval.ts +0 -535
|
@@ -1,15 +1,21 @@
|
|
|
1
|
-
import { INITIAL_L2_BLOCK_NUM
|
|
1
|
+
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
2
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
3
5
|
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
4
|
-
import {
|
|
6
|
+
import { L2BlockHash } from '@aztec/stdlib/block';
|
|
7
|
+
import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
5
8
|
/**
|
|
6
9
|
* A store for logs
|
|
7
10
|
*/ export class LogStore {
|
|
8
11
|
db;
|
|
9
12
|
blockStore;
|
|
10
|
-
|
|
11
|
-
#
|
|
12
|
-
|
|
13
|
+
// `tag` --> private logs
|
|
14
|
+
#privateLogsByTag;
|
|
15
|
+
// `{contractAddress}_${tag}` --> public logs
|
|
16
|
+
#publicLogsByContractAndTag;
|
|
17
|
+
#privateLogKeysByBlock;
|
|
18
|
+
#publicLogKeysByBlock;
|
|
13
19
|
#publicLogsByBlock;
|
|
14
20
|
#contractClassLogsByBlock;
|
|
15
21
|
#logsMaxPageSize;
|
|
@@ -18,68 +24,118 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
18
24
|
this.db = db;
|
|
19
25
|
this.blockStore = blockStore;
|
|
20
26
|
this.#log = createLogger('archiver:log_store');
|
|
21
|
-
this.#
|
|
22
|
-
this.#
|
|
23
|
-
this.#
|
|
27
|
+
this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
|
|
28
|
+
this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
|
|
29
|
+
this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
|
|
30
|
+
this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
|
|
24
31
|
this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
|
|
25
32
|
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
26
33
|
this.#logsMaxPageSize = logsMaxPageSize;
|
|
27
34
|
}
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
35
|
+
/**
|
|
36
|
+
* Extracts tagged logs from a single block, grouping them into private and public maps.
|
|
37
|
+
*
|
|
38
|
+
* @param block - The L2 block to extract logs from.
|
|
39
|
+
* @returns An object containing the private and public tagged logs for the block.
|
|
40
|
+
*/ #extractTaggedLogsFromBlock(block) {
|
|
41
|
+
// SiloedTag (as string) -> array of log buffers.
|
|
42
|
+
const privateTaggedLogs = new Map();
|
|
43
|
+
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
44
|
+
const publicTaggedLogs = new Map();
|
|
45
|
+
block.body.txEffects.forEach((txEffect)=>{
|
|
32
46
|
const txHash = txEffect.txHash;
|
|
33
|
-
|
|
34
|
-
|
|
47
|
+
txEffect.privateLogs.forEach((log)=>{
|
|
48
|
+
// Private logs use SiloedTag (already siloed by kernel)
|
|
35
49
|
const tag = log.fields[0];
|
|
36
50
|
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
37
|
-
const currentLogs =
|
|
38
|
-
currentLogs.push(new TxScopedL2Log(txHash,
|
|
39
|
-
|
|
51
|
+
const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
|
|
52
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
53
|
+
privateTaggedLogs.set(tag.toString(), currentLogs);
|
|
40
54
|
});
|
|
41
|
-
txEffect.publicLogs.forEach((log
|
|
55
|
+
txEffect.publicLogs.forEach((log)=>{
|
|
56
|
+
// Public logs use Tag directly (not siloed) and are stored with contract address
|
|
42
57
|
const tag = log.fields[0];
|
|
43
|
-
|
|
44
|
-
const
|
|
45
|
-
|
|
46
|
-
|
|
58
|
+
const contractAddress = log.contractAddress;
|
|
59
|
+
const key = `${contractAddress.toString()}_${tag.toString()}`;
|
|
60
|
+
this.#log.debug(`Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`);
|
|
61
|
+
const currentLogs = publicTaggedLogs.get(key) ?? [];
|
|
62
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
63
|
+
publicTaggedLogs.set(key, currentLogs);
|
|
47
64
|
});
|
|
48
65
|
});
|
|
49
|
-
return
|
|
66
|
+
return {
|
|
67
|
+
privateTaggedLogs,
|
|
68
|
+
publicTaggedLogs
|
|
69
|
+
};
|
|
50
70
|
}
|
|
51
71
|
/**
|
|
52
|
-
*
|
|
53
|
-
* @param blocks - The blocks
|
|
54
|
-
* @returns
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
72
|
+
* Extracts and aggregates tagged logs from a list of blocks.
|
|
73
|
+
* @param blocks - The blocks to extract logs from.
|
|
74
|
+
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
75
|
+
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
76
|
+
*/ #extractTaggedLogs(blocks) {
|
|
77
|
+
const taggedLogsInBlocks = blocks.map((block)=>this.#extractTaggedLogsFromBlock(block));
|
|
78
|
+
// Now we merge the maps from each block into a single map.
|
|
79
|
+
const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs })=>{
|
|
80
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
58
81
|
const currentLogs = acc.get(tag) ?? [];
|
|
59
82
|
acc.set(tag, currentLogs.concat(logs));
|
|
60
83
|
}
|
|
61
84
|
return acc;
|
|
62
85
|
}, new Map());
|
|
63
|
-
const
|
|
86
|
+
const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs })=>{
|
|
87
|
+
for (const [key, logs] of publicTaggedLogs.entries()){
|
|
88
|
+
const currentLogs = acc.get(key) ?? [];
|
|
89
|
+
acc.set(key, currentLogs.concat(logs));
|
|
90
|
+
}
|
|
91
|
+
return acc;
|
|
92
|
+
}, new Map());
|
|
93
|
+
return {
|
|
94
|
+
privateTaggedLogs,
|
|
95
|
+
publicTaggedLogs
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Append new logs to the store's list.
|
|
100
|
+
* @param blocks - The blocks for which to add the logs.
|
|
101
|
+
* @returns True if the operation is successful.
|
|
102
|
+
*/ addLogs(blocks) {
|
|
103
|
+
const { privateTaggedLogs, publicTaggedLogs } = this.#extractTaggedLogs(blocks);
|
|
104
|
+
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
105
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
64
106
|
return this.db.transactionAsync(async ()=>{
|
|
65
|
-
const
|
|
66
|
-
tag,
|
|
67
|
-
logBuffers: await this.#
|
|
107
|
+
const currentPrivateTaggedLogs = await Promise.all(keysOfPrivateLogsToUpdate.map(async (key)=>({
|
|
108
|
+
tag: key,
|
|
109
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key)
|
|
110
|
+
})));
|
|
111
|
+
currentPrivateTaggedLogs.forEach((taggedLogBuffer)=>{
|
|
112
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
113
|
+
privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(privateTaggedLogs.get(taggedLogBuffer.tag)));
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
const currentPublicTaggedLogs = await Promise.all(keysOfPublicLogsToUpdate.map(async (key)=>({
|
|
117
|
+
key,
|
|
118
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key)
|
|
68
119
|
})));
|
|
69
|
-
|
|
120
|
+
currentPublicTaggedLogs.forEach((taggedLogBuffer)=>{
|
|
70
121
|
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
71
|
-
|
|
122
|
+
publicTaggedLogs.set(taggedLogBuffer.key, taggedLogBuffer.logBuffers.concat(publicTaggedLogs.get(taggedLogBuffer.key)));
|
|
72
123
|
}
|
|
73
124
|
});
|
|
74
125
|
for (const block of blocks){
|
|
75
|
-
const
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
126
|
+
const blockHash = await block.hash();
|
|
127
|
+
const privateTagsInBlock = [];
|
|
128
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
129
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
130
|
+
privateTagsInBlock.push(tag);
|
|
131
|
+
}
|
|
132
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
133
|
+
const publicKeysInBlock = [];
|
|
134
|
+
for (const [key, logs] of publicTaggedLogs.entries()){
|
|
135
|
+
await this.#publicLogsByContractAndTag.set(key, logs);
|
|
136
|
+
publicKeysInBlock.push(key);
|
|
79
137
|
}
|
|
80
|
-
await this.#
|
|
81
|
-
const privateLogsInBlock = block.body.txEffects.map((txEffect)=>txEffect.privateLogs).flat().map((log)=>log.toBuffer());
|
|
82
|
-
await this.#privateLogsByBlock.set(block.number, Buffer.concat(privateLogsInBlock));
|
|
138
|
+
await this.#publicLogKeysByBlock.set(block.number, publicKeysInBlock);
|
|
83
139
|
const publicLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
|
|
84
140
|
numToUInt32BE(txIndex),
|
|
85
141
|
numToUInt32BE(txEffect.publicLogs.length),
|
|
@@ -90,57 +146,60 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
90
146
|
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
91
147
|
txEffect.contractClassLogs.map((log)=>log.toBuffer())
|
|
92
148
|
].flat()).flat();
|
|
93
|
-
await this.#publicLogsByBlock.set(block.number,
|
|
94
|
-
await this.#contractClassLogsByBlock.set(block.number,
|
|
149
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
150
|
+
await this.#contractClassLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, contractClassLogsInBlock));
|
|
95
151
|
}
|
|
96
152
|
return true;
|
|
97
153
|
});
|
|
98
154
|
}
|
|
155
|
+
#packWithBlockHash(blockHash, data) {
|
|
156
|
+
return Buffer.concat([
|
|
157
|
+
blockHash.toBuffer(),
|
|
158
|
+
...data
|
|
159
|
+
]);
|
|
160
|
+
}
|
|
161
|
+
#unpackBlockHash(reader) {
|
|
162
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
163
|
+
if (!blockHash) {
|
|
164
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
165
|
+
}
|
|
166
|
+
return L2BlockHash.fromField(blockHash);
|
|
167
|
+
}
|
|
99
168
|
deleteLogs(blocks) {
|
|
100
169
|
return this.db.transactionAsync(async ()=>{
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
170
|
+
await Promise.all(blocks.map(async (block)=>{
|
|
171
|
+
// Delete private logs
|
|
172
|
+
const privateKeys = await this.#privateLogKeysByBlock.getAsync(block.number) ?? [];
|
|
173
|
+
await Promise.all(privateKeys.map((tag)=>this.#privateLogsByTag.delete(tag)));
|
|
174
|
+
// Delete public logs
|
|
175
|
+
const publicKeys = await this.#publicLogKeysByBlock.getAsync(block.number) ?? [];
|
|
176
|
+
await Promise.all(publicKeys.map((key)=>this.#publicLogsByContractAndTag.delete(key)));
|
|
177
|
+
}));
|
|
105
178
|
await Promise.all(blocks.map((block)=>Promise.all([
|
|
106
|
-
this.#privateLogsByBlock.delete(block.number),
|
|
107
179
|
this.#publicLogsByBlock.delete(block.number),
|
|
108
|
-
this.#
|
|
180
|
+
this.#privateLogKeysByBlock.delete(block.number),
|
|
181
|
+
this.#publicLogKeysByBlock.delete(block.number),
|
|
109
182
|
this.#contractClassLogsByBlock.delete(block.number)
|
|
110
183
|
])));
|
|
111
|
-
await Promise.all(tagsToDelete.map((tag)=>this.#logsByTag.delete(tag.toString())));
|
|
112
184
|
return true;
|
|
113
185
|
});
|
|
114
186
|
}
|
|
115
187
|
/**
|
|
116
|
-
*
|
|
117
|
-
*
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
const logs = [];
|
|
122
|
-
for await (const buffer of this.#privateLogsByBlock.valuesAsync({
|
|
123
|
-
start,
|
|
124
|
-
limit
|
|
125
|
-
})){
|
|
126
|
-
const reader = new BufferReader(buffer);
|
|
127
|
-
while(reader.remainingBytes() > 0){
|
|
128
|
-
logs.push(reader.readObject(PrivateLog));
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
return logs;
|
|
188
|
+
* Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
189
|
+
* array implies no logs match that tag.
|
|
190
|
+
*/ async getPrivateLogsByTags(tags) {
|
|
191
|
+
const logs = await Promise.all(tags.map((tag)=>this.#privateLogsByTag.getAsync(tag.toString())));
|
|
192
|
+
return logs.map((logBuffers)=>logBuffers?.map((logBuffer)=>TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
132
193
|
}
|
|
133
194
|
/**
|
|
134
|
-
* Gets all logs that match any of the
|
|
135
|
-
*
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
const logs = await Promise.all(tags.map((tag)=>this.#logsByTag.getAsync(tag.toString())));
|
|
143
|
-
return logs.map((logBuffers)=>logBuffers?.slice(0, limitPerTag).map((logBuffer)=>TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
195
|
+
* Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
196
|
+
* logs is returned. An empty array implies no logs match that tag.
|
|
197
|
+
*/ async getPublicLogsByTagsFromContract(contractAddress, tags) {
|
|
198
|
+
const logs = await Promise.all(tags.map((tag)=>{
|
|
199
|
+
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
200
|
+
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
201
|
+
}));
|
|
202
|
+
return logs.map((logBuffers)=>logBuffers?.map((logBuffer)=>TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
144
203
|
}
|
|
145
204
|
/**
|
|
146
205
|
* Gets public logs based on the provided filter.
|
|
@@ -171,6 +230,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
171
230
|
[]
|
|
172
231
|
];
|
|
173
232
|
const reader = new BufferReader(buffer);
|
|
233
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
174
234
|
while(reader.remainingBytes() > 0){
|
|
175
235
|
const indexOfTx = reader.readNumber();
|
|
176
236
|
const numLogsInTx = reader.readNumber();
|
|
@@ -181,7 +241,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
181
241
|
}
|
|
182
242
|
const txLogs = publicLogsInBlock[txIndex];
|
|
183
243
|
const logs = [];
|
|
184
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
244
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
185
245
|
return {
|
|
186
246
|
logs,
|
|
187
247
|
maxLogsHit
|
|
@@ -206,6 +266,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
206
266
|
[]
|
|
207
267
|
];
|
|
208
268
|
const reader = new BufferReader(logBuffer);
|
|
269
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
209
270
|
while(reader.remainingBytes() > 0){
|
|
210
271
|
const indexOfTx = reader.readNumber();
|
|
211
272
|
const numLogsInTx = reader.readNumber();
|
|
@@ -216,7 +277,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
216
277
|
}
|
|
217
278
|
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++){
|
|
218
279
|
const txLogs = publicLogsInBlock[txIndex];
|
|
219
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
280
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
220
281
|
if (maxLogsHit) {
|
|
221
282
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
222
283
|
break loopOverBlocks;
|
|
@@ -257,6 +318,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
257
318
|
[]
|
|
258
319
|
];
|
|
259
320
|
const reader = new BufferReader(contractClassLogsBuffer);
|
|
321
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
260
322
|
while(reader.remainingBytes() > 0){
|
|
261
323
|
const indexOfTx = reader.readNumber();
|
|
262
324
|
const numLogsInTx = reader.readNumber();
|
|
@@ -267,7 +329,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
267
329
|
}
|
|
268
330
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
269
331
|
const logs = [];
|
|
270
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
332
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
271
333
|
return {
|
|
272
334
|
logs,
|
|
273
335
|
maxLogsHit
|
|
@@ -292,6 +354,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
292
354
|
[]
|
|
293
355
|
];
|
|
294
356
|
const reader = new BufferReader(logBuffer);
|
|
357
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
295
358
|
while(reader.remainingBytes() > 0){
|
|
296
359
|
const indexOfTx = reader.readNumber();
|
|
297
360
|
const numLogsInTx = reader.readNumber();
|
|
@@ -302,7 +365,7 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
302
365
|
}
|
|
303
366
|
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++){
|
|
304
367
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
305
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
368
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
306
369
|
if (maxLogsHit) {
|
|
307
370
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
308
371
|
break loopOverBlocks;
|
|
@@ -314,16 +377,18 @@ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, P
|
|
|
314
377
|
maxLogsHit
|
|
315
378
|
};
|
|
316
379
|
}
|
|
317
|
-
#accumulateLogs(results, blockNumber, txIndex, txLogs, filter) {
|
|
380
|
+
#accumulateLogs(results, blockNumber, blockHash, txIndex, txLogs, filter = {}) {
|
|
318
381
|
let maxLogsHit = false;
|
|
319
382
|
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
320
383
|
for(; logIndex < txLogs.length; logIndex++){
|
|
321
384
|
const log = txLogs[logIndex];
|
|
322
385
|
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
323
386
|
if (log instanceof ContractClassLog) {
|
|
324
|
-
results.push(new ExtendedContractClassLog(new LogId(blockNumber, txIndex, logIndex), log));
|
|
387
|
+
results.push(new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
388
|
+
} else if (log instanceof PublicLog) {
|
|
389
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
325
390
|
} else {
|
|
326
|
-
|
|
391
|
+
throw new Error('Unknown log type');
|
|
327
392
|
}
|
|
328
393
|
if (results.length >= this.#logsMaxPageSize) {
|
|
329
394
|
maxLogsHit = true;
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import type { L1BlockId } from '@aztec/ethereum';
|
|
2
|
-
import {
|
|
1
|
+
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
2
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
4
|
import { type AztecAsyncKVStore, type CustomRange } from '@aztec/kv-store';
|
|
4
5
|
import { type InboxMessage } from '../structs/inbox_message.js';
|
|
5
6
|
export declare class MessageStoreError extends Error {
|
|
@@ -28,12 +29,12 @@ export declare class MessageStore {
|
|
|
28
29
|
*/
|
|
29
30
|
getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined>;
|
|
30
31
|
getLastMessage(): Promise<InboxMessage | undefined>;
|
|
31
|
-
getL1ToL2Messages(
|
|
32
|
+
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]>;
|
|
32
33
|
iterateL1ToL2Messages(range?: CustomRange<bigint>): AsyncIterableIterator<InboxMessage>;
|
|
33
34
|
removeL1ToL2Messages(startIndex: bigint): Promise<void>;
|
|
34
|
-
|
|
35
|
+
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void>;
|
|
35
36
|
private indexToKey;
|
|
36
37
|
private leafToIndexKey;
|
|
37
38
|
private increaseTotalMessageCount;
|
|
38
39
|
}
|
|
39
|
-
//# sourceMappingURL=
|
|
40
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWVzc2FnZV9zdG9yZS5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL2FyY2hpdmVyL2t2X2FyY2hpdmVyX3N0b3JlL21lc3NhZ2Vfc3RvcmUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLEVBQUUsU0FBUyxFQUFFLE1BQU0sMEJBQTBCLENBQUM7QUFDMUQsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0saUNBQWlDLENBQUM7QUFFbkUsT0FBTyxFQUFFLEVBQUUsRUFBRSxNQUFNLGdDQUFnQyxDQUFDO0FBSXBELE9BQU8sRUFDTCxLQUFLLGlCQUFpQixFQUd0QixLQUFLLFdBQVcsRUFFakIsTUFBTSxpQkFBaUIsQ0FBQztBQUd6QixPQUFPLEVBQ0wsS0FBSyxZQUFZLEVBSWxCLE1BQU0sNkJBQTZCLENBQUM7QUFFckMscUJBQWEsaUJBQWtCLFNBQVEsS0FBSzthQUd4QixZQUFZLEVBQUUsWUFBWTtJQUY1QyxZQUNFLE9BQU8sRUFBRSxNQUFNLEVBQ0MsWUFBWSxFQUFFLFlBQVksRUFJM0M7Q0FDRjtBQUVELHFCQUFhLFlBQVk7O0lBWVgsT0FBTyxDQUFDLEVBQUU7SUFBdEIsWUFBb0IsRUFBRSxFQUFFLGlCQUFpQixFQUt4QztJQUVZLDBCQUEwQixJQUFJLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FFekQ7SUFFRCxxQ0FBcUM7SUFDeEIsaUJBQWlCLElBQUksT0FBTyxDQUFDLFNBQVMsR0FBRyxTQUFTLENBQUMsQ0FRL0Q7SUFFRCxvQ0FBb0M7SUFDdkIsaUJBQWlCLENBQUMsT0FBTyxFQUFFLFNBQVMsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBR2hFO0lBRUQ7Ozs7T0FJRztJQUNJLGlCQUFpQixDQUFDLFFBQVEsRUFBRSxZQUFZLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBNkZoRTtJQUVEOzs7O09BSUc7SUFDSSxxQkFBcUIsQ0FBQyxhQUFhLEVBQUUsRUFBRSxHQUFHLE9BQU8sQ0FBQyxNQUFNLEdBQUcsU0FBUyxDQUFDLENBRTNFO0lBRVksY0FBYyxJQUFJLE9BQU8sQ0FBQyxZQUFZLEdBQUcsU0FBUyxDQUFDLENBRy9EO0lBRVksaUJBQWlCLENBQUMsZ0JBQWdCLEVBQUUsZ0JBQWdCLEdBQUcsT0FBTyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBdUJoRjtJQUVhLHFCQUFxQixDQUFDLEtBQUssR0FBRSxXQUFXLENBQUMsTUFBTSxDQUFNLEdBQUcscUJBQXFCLENBQUMsWUFBWSxDQUFDLENBS3hHO0lBRU0sb0JBQW9CLENBQUMsVUFBVSxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBZ0I3RDtJQUVNLGtDQUFrQyxDQUFDLHNCQUFzQixFQUFFLGdCQUFnQixHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FJakc7SUFFRCxPQUFPLENBQUMsVUFBVTtJQUlsQixPQUFPLENBQUMsY0FBYztZQUlSLHlCQUF5QjtDQVN4QyJ9
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"message_store.d.ts","sourceRoot":"","sources":["../../../src/archiver/kv_archiver_store/message_store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"message_store.d.ts","sourceRoot":"","sources":["../../../src/archiver/kv_archiver_store/message_store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AAEnE,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAIpD,OAAO,EACL,KAAK,iBAAiB,EAGtB,KAAK,WAAW,EAEjB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EACL,KAAK,YAAY,EAIlB,MAAM,6BAA6B,CAAC;AAErC,qBAAa,iBAAkB,SAAQ,KAAK;aAGxB,YAAY,EAAE,YAAY;IAF5C,YACE,OAAO,EAAE,MAAM,EACC,YAAY,EAAE,YAAY,EAI3C;CACF;AAED,qBAAa,YAAY;;IAYX,OAAO,CAAC,EAAE;IAAtB,YAAoB,EAAE,EAAE,iBAAiB,EAKxC;IAEY,0BAA0B,IAAI,OAAO,CAAC,MAAM,CAAC,CAEzD;IAED,qCAAqC;IACxB,iBAAiB,IAAI,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,CAQ/D;IAED,oCAAoC;IACvB,iBAAiB,CAAC,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAGhE;IAED;;;;OAIG;IACI,iBAAiB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CA6FhE;IAED;;;;OAIG;IACI,qBAAqB,CAAC,aAAa,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAE3E;IAEY,cAAc,IAAI,OAAO,CAAC,YAAY,GAAG,SAAS,CAAC,CAG/D;IAEY,iBAAiB,CAAC,gBAAgB,EAAE,gBAAgB,GAAG,OAAO,CAAC,EAAE,EAAE,CAAC,CAuBhF;IAEa,qBAAqB,CAAC,KAAK,GAAE,WAAW,CAAC,MAAM,CAAM,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAKxG;IAEM,oBAAoB,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAgB7D;IAEM,kCAAkC,CAAC,sBAAsB,EAAE,gBAAgB,GAAG,OAAO,CAAC,IAAI,CAAC,CAIjG;IAED,OAAO,CAAC,UAAU;IAIlB,OAAO,CAAC,cAAc;YAIR,yBAAyB;CASxC"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
1
2
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
2
3
|
import { toArray } from '@aztec/foundation/iterable';
|
|
3
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
@@ -77,18 +78,18 @@ export class MessageStore {
|
|
|
77
78
|
if (!expectedRollingHash.equals(message.rollingHash)) {
|
|
78
79
|
throw new MessageStoreError(`Invalid rolling hash for incoming L1 to L2 message ${message.leaf.toString()} ` + `with index ${message.index} ` + `(expected ${expectedRollingHash.toString()} from previous hash ${previousRollingHash} but got ${message.rollingHash.toString()})`, message);
|
|
79
80
|
}
|
|
80
|
-
// Check index corresponds to the
|
|
81
|
-
const [expectedStart, expectedEnd] = InboxLeaf.
|
|
81
|
+
// Check index corresponds to the checkpoint number.
|
|
82
|
+
const [expectedStart, expectedEnd] = InboxLeaf.indexRangeForCheckpoint(message.checkpointNumber);
|
|
82
83
|
if (message.index < expectedStart || message.index >= expectedEnd) {
|
|
83
|
-
throw new MessageStoreError(`Invalid index ${message.index} for incoming L1 to L2 message ${message.leaf.toString()} ` + `at
|
|
84
|
+
throw new MessageStoreError(`Invalid index ${message.index} for incoming L1 to L2 message ${message.leaf.toString()} ` + `at checkpoint ${message.checkpointNumber} (expected value in range [${expectedStart}, ${expectedEnd}))`, message);
|
|
84
85
|
}
|
|
85
|
-
// Check there are no gaps in the indices within the same
|
|
86
|
-
if (lastMessage && message.
|
|
86
|
+
// Check there are no gaps in the indices within the same checkpoint.
|
|
87
|
+
if (lastMessage && message.checkpointNumber === lastMessage.checkpointNumber && message.index !== lastMessage.index + 1n) {
|
|
87
88
|
throw new MessageStoreError(`Missing prior message for incoming L1 to L2 message ${message.leaf.toString()} ` + `with index ${message.index}`, message);
|
|
88
89
|
}
|
|
89
90
|
// Check the first message in a block has the correct index.
|
|
90
|
-
if ((!lastMessage || message.
|
|
91
|
-
throw new MessageStoreError(`Message ${message.leaf.toString()} for
|
|
91
|
+
if ((!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) && message.index !== expectedStart) {
|
|
92
|
+
throw new MessageStoreError(`Message ${message.leaf.toString()} for checkpoint ${message.checkpointNumber} has wrong index ` + `${message.index} (expected ${expectedStart})`, message);
|
|
92
93
|
}
|
|
93
94
|
// Perform the insertions.
|
|
94
95
|
await this.#l1ToL2Messages.set(this.indexToKey(message.index), serializeInboxMessage(message));
|
|
@@ -123,17 +124,17 @@ export class MessageStore {
|
|
|
123
124
|
}));
|
|
124
125
|
return msg ? deserializeInboxMessage(msg) : undefined;
|
|
125
126
|
}
|
|
126
|
-
async getL1ToL2Messages(
|
|
127
|
+
async getL1ToL2Messages(checkpointNumber) {
|
|
127
128
|
const messages = [];
|
|
128
|
-
const [startIndex, endIndex] = InboxLeaf.
|
|
129
|
+
const [startIndex, endIndex] = InboxLeaf.indexRangeForCheckpoint(checkpointNumber);
|
|
129
130
|
let lastIndex = startIndex - 1n;
|
|
130
131
|
for await (const msgBuffer of this.#l1ToL2Messages.valuesAsync({
|
|
131
132
|
start: this.indexToKey(startIndex),
|
|
132
133
|
end: this.indexToKey(endIndex)
|
|
133
134
|
})){
|
|
134
135
|
const msg = deserializeInboxMessage(msgBuffer);
|
|
135
|
-
if (msg.
|
|
136
|
-
throw new Error(`L1 to L2 message with index ${msg.index} has invalid
|
|
136
|
+
if (msg.checkpointNumber !== checkpointNumber) {
|
|
137
|
+
throw new Error(`L1 to L2 message with index ${msg.index} has invalid checkpoint number ${msg.checkpointNumber}`);
|
|
137
138
|
} else if (msg.index !== lastIndex + 1n) {
|
|
138
139
|
throw new Error(`Expected L1 to L2 message with index ${lastIndex + 1n} but got ${msg.index}`);
|
|
139
140
|
}
|
|
@@ -164,9 +165,9 @@ export class MessageStore {
|
|
|
164
165
|
this.#log.warn(`Deleted ${deleteCount} L1 to L2 messages from index ${startIndex} from the store`);
|
|
165
166
|
});
|
|
166
167
|
}
|
|
167
|
-
|
|
168
|
-
this.#log.debug(`Deleting L1 to L2 messages up to target
|
|
169
|
-
const startIndex = InboxLeaf.
|
|
168
|
+
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber) {
|
|
169
|
+
this.#log.debug(`Deleting L1 to L2 messages up to target checkpoint ${targetCheckpointNumber}`);
|
|
170
|
+
const startIndex = InboxLeaf.smallestIndexForCheckpoint(CheckpointNumber(targetCheckpointNumber + 1));
|
|
170
171
|
return this.removeL1ToL2Messages(startIndex);
|
|
171
172
|
}
|
|
172
173
|
indexToKey(index) {
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
export {};
|
|
3
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmV0cmlldmUtY2FsbGRhdGEuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uLy4uL3NyYy9hcmNoaXZlci9sMS9iaW4vcmV0cmlldmUtY2FsbGRhdGEudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IiJ9
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"retrieve-calldata.d.ts","sourceRoot":"","sources":["../../../../src/archiver/l1/bin/retrieve-calldata.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
4
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
5
|
+
import { createPublicClient, http } from 'viem';
|
|
6
|
+
import { mainnet } from 'viem/chains';
|
|
7
|
+
import { CalldataRetriever } from '../calldata_retriever.js';
|
|
8
|
+
const logger = createLogger('archiver:calldata-test');
|
|
9
|
+
function parseArgs() {
|
|
10
|
+
const args = process.argv.slice(2);
|
|
11
|
+
if (args.length < 2) {
|
|
12
|
+
// eslint-disable-next-line no-console
|
|
13
|
+
console.error('Usage: node index.js <rollup-address> <tx-hash> [target-committee-size]');
|
|
14
|
+
// eslint-disable-next-line no-console
|
|
15
|
+
console.error('');
|
|
16
|
+
// eslint-disable-next-line no-console
|
|
17
|
+
console.error('Environment variables:');
|
|
18
|
+
// eslint-disable-next-line no-console
|
|
19
|
+
console.error(' ETHEREUM_HOST or RPC_URL - Ethereum RPC endpoint');
|
|
20
|
+
// eslint-disable-next-line no-console
|
|
21
|
+
console.error('');
|
|
22
|
+
// eslint-disable-next-line no-console
|
|
23
|
+
console.error('Example:');
|
|
24
|
+
// eslint-disable-next-line no-console
|
|
25
|
+
console.error(' RPC_URL=https://eth-mainnet.g.alchemy.com/v2/YOUR-API-KEY \\');
|
|
26
|
+
// eslint-disable-next-line no-console
|
|
27
|
+
console.error(' node index.js 0x1234... 0xabcd... 32');
|
|
28
|
+
process.exit(1);
|
|
29
|
+
}
|
|
30
|
+
const rollupAddress = EthAddress.fromString(args[0]);
|
|
31
|
+
const txHash = args[1];
|
|
32
|
+
const targetCommitteeSize = args[2] ? parseInt(args[2], 10) : 24;
|
|
33
|
+
const rpcUrl = process.env.ETHEREUM_HOST || process.env.RPC_URL;
|
|
34
|
+
if (!rpcUrl) {
|
|
35
|
+
// eslint-disable-next-line no-console
|
|
36
|
+
console.error('Error: ETHEREUM_HOST or RPC_URL environment variable must be set');
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
if (targetCommitteeSize <= 0 || targetCommitteeSize > 256) {
|
|
40
|
+
// eslint-disable-next-line no-console
|
|
41
|
+
console.error('Error: target-committee-size must be between 1 and 256');
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
return {
|
|
45
|
+
rollupAddress,
|
|
46
|
+
txHash,
|
|
47
|
+
rpcUrl,
|
|
48
|
+
targetCommitteeSize
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
async function main() {
|
|
52
|
+
const { rollupAddress, txHash, rpcUrl, targetCommitteeSize } = parseArgs();
|
|
53
|
+
logger.info('Calldata Retriever Test Script');
|
|
54
|
+
logger.info('===============================');
|
|
55
|
+
logger.info(`Rollup Address: ${rollupAddress.toString()}`);
|
|
56
|
+
logger.info(`Transaction Hash: ${txHash}`);
|
|
57
|
+
logger.info(`RPC URL: ${rpcUrl}`);
|
|
58
|
+
logger.info(`Target Committee Size: ${targetCommitteeSize}`);
|
|
59
|
+
logger.info('');
|
|
60
|
+
try {
|
|
61
|
+
// Create viem public client
|
|
62
|
+
const publicClient = createPublicClient({
|
|
63
|
+
chain: mainnet,
|
|
64
|
+
transport: http(rpcUrl, {
|
|
65
|
+
batch: false
|
|
66
|
+
})
|
|
67
|
+
});
|
|
68
|
+
logger.info('Fetching transaction...');
|
|
69
|
+
const tx = await publicClient.getTransaction({
|
|
70
|
+
hash: txHash
|
|
71
|
+
});
|
|
72
|
+
if (!tx) {
|
|
73
|
+
throw new Error(`Transaction ${txHash} not found`);
|
|
74
|
+
}
|
|
75
|
+
logger.info(`Transaction found in block ${tx.blockNumber}`);
|
|
76
|
+
// For simplicity, use zero addresses for optional contract addresses
|
|
77
|
+
// In production, these would be fetched from the rollup contract or configuration
|
|
78
|
+
const slashingProposerAddress = EthAddress.ZERO;
|
|
79
|
+
const governanceProposerAddress = EthAddress.ZERO;
|
|
80
|
+
const slashFactoryAddress = undefined;
|
|
81
|
+
logger.info('Using zero addresses for governance/slashing (can be configured if needed)');
|
|
82
|
+
// Create CalldataRetriever
|
|
83
|
+
const retriever = new CalldataRetriever(publicClient, publicClient, targetCommitteeSize, undefined, logger, {
|
|
84
|
+
rollupAddress,
|
|
85
|
+
governanceProposerAddress,
|
|
86
|
+
slashingProposerAddress,
|
|
87
|
+
slashFactoryAddress
|
|
88
|
+
});
|
|
89
|
+
// Extract L2 block number from transaction logs
|
|
90
|
+
logger.info('Decoding transaction to extract L2 block number...');
|
|
91
|
+
const receipt = await publicClient.getTransactionReceipt({
|
|
92
|
+
hash: txHash
|
|
93
|
+
});
|
|
94
|
+
const l2BlockProposedEvent = receipt.logs.find((log)=>{
|
|
95
|
+
try {
|
|
96
|
+
// Try to match the L2BlockProposed event
|
|
97
|
+
return log.address.toLowerCase() === rollupAddress.toString().toLowerCase() && log.topics[0] === '0x2f1d0e696fa5186494a2f2f89a0e0bcbb15d607f6c5eac4637e07e1e5e7d3c00' // L2BlockProposed event signature
|
|
98
|
+
;
|
|
99
|
+
} catch {
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
let l2BlockNumber;
|
|
104
|
+
if (l2BlockProposedEvent && l2BlockProposedEvent.topics[1]) {
|
|
105
|
+
// L2 block number is typically the first indexed parameter
|
|
106
|
+
l2BlockNumber = Number(BigInt(l2BlockProposedEvent.topics[1]));
|
|
107
|
+
logger.info(`L2 Block Number (from event): ${l2BlockNumber}`);
|
|
108
|
+
} else {
|
|
109
|
+
// Fallback: try to extract from transaction data or use a default
|
|
110
|
+
logger.warn('Could not extract L2 block number from event, using block number as fallback');
|
|
111
|
+
l2BlockNumber = Number(tx.blockNumber);
|
|
112
|
+
}
|
|
113
|
+
logger.info('');
|
|
114
|
+
logger.info('Retrieving block header from rollup transaction...');
|
|
115
|
+
logger.info('');
|
|
116
|
+
// For this script, we don't have blob hashes or expected hashes, so pass empty arrays/objects
|
|
117
|
+
const result = await retriever.getCheckpointFromRollupTx(txHash, [], CheckpointNumber(l2BlockNumber), {});
|
|
118
|
+
logger.info(' Successfully retrieved block header!');
|
|
119
|
+
logger.info('');
|
|
120
|
+
logger.info('Block Header Details:');
|
|
121
|
+
logger.info('====================');
|
|
122
|
+
logger.info(`Checkpoint Number: ${result.checkpointNumber}`);
|
|
123
|
+
logger.info(`Block Hash: ${result.blockHash}`);
|
|
124
|
+
logger.info(`Archive Root: ${result.archiveRoot.toString()}`);
|
|
125
|
+
logger.info('');
|
|
126
|
+
logger.info('Header:');
|
|
127
|
+
logger.info(` Slot Number: ${result.header.slotNumber.toString()}`);
|
|
128
|
+
logger.info(` Timestamp: ${result.header.timestamp.toString()}`);
|
|
129
|
+
logger.info(` Coinbase: ${result.header.coinbase.toString()}`);
|
|
130
|
+
logger.info(` Fee Recipient: ${result.header.feeRecipient.toString()}`);
|
|
131
|
+
logger.info(` Total Mana Used: ${result.header.totalManaUsed.toString()}`);
|
|
132
|
+
logger.info('');
|
|
133
|
+
logger.info('Attestations:');
|
|
134
|
+
logger.info(` Count: ${result.attestations.length}`);
|
|
135
|
+
logger.info(` Non-empty attestations: ${result.attestations.filter((a)=>!a.signature.isEmpty()).length}`);
|
|
136
|
+
process.exit(0);
|
|
137
|
+
} catch (error) {
|
|
138
|
+
logger.error('Error retrieving block header:');
|
|
139
|
+
logger.error(error instanceof Error ? error.message : String(error));
|
|
140
|
+
if (error instanceof Error && error.stack) {
|
|
141
|
+
logger.debug(error.stack);
|
|
142
|
+
}
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
// Only run if this is the main module
|
|
147
|
+
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
148
|
+
void main();
|
|
149
|
+
}
|