@aztec/archiver 0.0.1-commit.d3ec352c → 0.0.1-commit.f295ac2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +147 -22
- package/dest/archiver.d.ts +135 -0
- package/dest/archiver.d.ts.map +1 -0
- package/dest/archiver.js +769 -0
- package/dest/config.d.ts +30 -0
- package/dest/config.d.ts.map +1 -0
- package/dest/{archiver/config.js → config.js} +21 -5
- package/dest/errors.d.ts +36 -0
- package/dest/errors.d.ts.map +1 -0
- package/dest/errors.js +54 -0
- package/dest/factory.d.ts +5 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +82 -5
- package/dest/index.d.ts +10 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +8 -3
- package/dest/interfaces.d.ts +9 -0
- package/dest/interfaces.d.ts.map +1 -0
- package/dest/interfaces.js +3 -0
- package/dest/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/l1/bin/retrieve-calldata.js +149 -0
- package/dest/l1/calldata_retriever.d.ts +112 -0
- package/dest/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/l1/calldata_retriever.js +471 -0
- package/dest/l1/data_retrieval.d.ts +88 -0
- package/dest/l1/data_retrieval.d.ts.map +1 -0
- package/dest/{archiver → l1}/data_retrieval.js +75 -150
- package/dest/l1/debug_tx.d.ts +19 -0
- package/dest/l1/debug_tx.d.ts.map +1 -0
- package/dest/l1/debug_tx.js +73 -0
- package/dest/l1/spire_proposer.d.ts +70 -0
- package/dest/l1/spire_proposer.d.ts.map +1 -0
- package/dest/l1/spire_proposer.js +157 -0
- package/dest/l1/trace_tx.d.ts +97 -0
- package/dest/l1/trace_tx.d.ts.map +1 -0
- package/dest/l1/trace_tx.js +91 -0
- package/dest/l1/types.d.ts +12 -0
- package/dest/l1/types.d.ts.map +1 -0
- package/dest/l1/types.js +3 -0
- package/dest/l1/validate_trace.d.ts +29 -0
- package/dest/l1/validate_trace.d.ts.map +1 -0
- package/dest/l1/validate_trace.js +150 -0
- package/dest/modules/data_source_base.d.ts +85 -0
- package/dest/modules/data_source_base.d.ts.map +1 -0
- package/dest/modules/data_source_base.js +291 -0
- package/dest/modules/data_store_updater.d.ts +69 -0
- package/dest/modules/data_store_updater.d.ts.map +1 -0
- package/dest/modules/data_store_updater.js +304 -0
- package/dest/modules/instrumentation.d.ts +37 -0
- package/dest/modules/instrumentation.d.ts.map +1 -0
- package/dest/{archiver → modules}/instrumentation.js +22 -59
- package/dest/modules/l1_synchronizer.d.ts +75 -0
- package/dest/modules/l1_synchronizer.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.js +1113 -0
- package/dest/modules/validation.d.ts +17 -0
- package/dest/modules/validation.d.ts.map +1 -0
- package/dest/{archiver → modules}/validation.js +7 -1
- package/dest/store/block_store.d.ts +178 -0
- package/dest/store/block_store.d.ts.map +1 -0
- package/dest/store/block_store.js +680 -0
- package/dest/store/contract_class_store.d.ts +18 -0
- package/dest/store/contract_class_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +2 -2
- package/dest/store/contract_instance_store.d.ts +24 -0
- package/dest/store/contract_instance_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
- package/dest/store/kv_archiver_store.d.ts +331 -0
- package/dest/store/kv_archiver_store.d.ts.map +1 -0
- package/dest/store/kv_archiver_store.js +438 -0
- package/dest/store/log_store.d.ts +45 -0
- package/dest/store/log_store.d.ts.map +1 -0
- package/dest/store/log_store.js +422 -0
- package/dest/store/message_store.d.ts +40 -0
- package/dest/store/message_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/message_store.js +15 -14
- package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
- package/dest/structs/data_retrieval.d.ts.map +1 -0
- package/dest/structs/inbox_message.d.ts +15 -0
- package/dest/structs/inbox_message.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/inbox_message.js +6 -6
- package/dest/structs/published.d.ts +2 -0
- package/dest/structs/published.d.ts.map +1 -0
- package/dest/test/fake_l1_state.d.ts +190 -0
- package/dest/test/fake_l1_state.d.ts.map +1 -0
- package/dest/test/fake_l1_state.js +383 -0
- package/dest/test/index.d.ts +2 -1
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +1 -0
- package/dest/test/mock_archiver.d.ts +5 -6
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +6 -11
- package/dest/test/mock_l1_to_l2_message_source.d.ts +6 -7
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +19 -14
- package/dest/test/mock_l2_block_source.d.ts +28 -14
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +160 -52
- package/dest/test/mock_structs.d.ts +78 -3
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +141 -10
- package/package.json +17 -18
- package/src/archiver.ts +525 -0
- package/src/{archiver/config.ts → config.ts} +28 -12
- package/src/errors.ts +90 -0
- package/src/factory.ts +118 -6
- package/src/index.ts +10 -3
- package/src/interfaces.ts +9 -0
- package/src/l1/README.md +98 -0
- package/src/l1/bin/retrieve-calldata.ts +182 -0
- package/src/l1/calldata_retriever.ts +641 -0
- package/src/{archiver → l1}/data_retrieval.ts +136 -218
- package/src/l1/debug_tx.ts +99 -0
- package/src/l1/spire_proposer.ts +160 -0
- package/src/l1/trace_tx.ts +128 -0
- package/src/l1/types.ts +13 -0
- package/src/l1/validate_trace.ts +211 -0
- package/src/modules/data_source_base.ts +414 -0
- package/src/modules/data_store_updater.ts +419 -0
- package/src/{archiver → modules}/instrumentation.ts +24 -59
- package/src/modules/l1_synchronizer.ts +931 -0
- package/src/{archiver → modules}/validation.ts +11 -6
- package/src/store/block_store.ts +919 -0
- package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +2 -2
- package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +2 -2
- package/src/store/kv_archiver_store.ts +622 -0
- package/src/store/log_store.ts +552 -0
- package/src/{archiver/kv_archiver_store → store}/message_store.ts +21 -18
- package/src/{archiver/structs → structs}/inbox_message.ts +7 -8
- package/src/{archiver/structs → structs}/published.ts +0 -1
- package/src/test/fake_l1_state.ts +599 -0
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/index.ts +1 -0
- package/src/test/mock_archiver.ts +8 -13
- package/src/test/mock_l1_to_l2_message_source.ts +16 -15
- package/src/test/mock_l2_block_source.ts +184 -64
- package/src/test/mock_structs.ts +256 -11
- package/dest/archiver/archiver.d.ts +0 -290
- package/dest/archiver/archiver.d.ts.map +0 -1
- package/dest/archiver/archiver.js +0 -1434
- package/dest/archiver/archiver_store.d.ts +0 -256
- package/dest/archiver/archiver_store.d.ts.map +0 -1
- package/dest/archiver/archiver_store.js +0 -4
- package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
- package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
- package/dest/archiver/archiver_store_test_suite.js +0 -1289
- package/dest/archiver/config.d.ts +0 -21
- package/dest/archiver/config.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.d.ts +0 -80
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/errors.d.ts +0 -12
- package/dest/archiver/errors.d.ts.map +0 -1
- package/dest/archiver/errors.js +0 -17
- package/dest/archiver/index.d.ts +0 -7
- package/dest/archiver/index.d.ts.map +0 -1
- package/dest/archiver/index.js +0 -4
- package/dest/archiver/instrumentation.d.ts +0 -35
- package/dest/archiver/instrumentation.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +0 -125
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.js +0 -371
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -169
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +0 -296
- package/dest/archiver/kv_archiver_store/log_store.d.ts +0 -49
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/log_store.js +0 -337
- package/dest/archiver/kv_archiver_store/message_store.d.ts +0 -39
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
- package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/structs/inbox_message.d.ts +0 -15
- package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
- package/dest/archiver/structs/published.d.ts +0 -3
- package/dest/archiver/structs/published.d.ts.map +0 -1
- package/dest/archiver/validation.d.ts +0 -17
- package/dest/archiver/validation.d.ts.map +0 -1
- package/dest/rpc/index.d.ts +0 -9
- package/dest/rpc/index.d.ts.map +0 -1
- package/dest/rpc/index.js +0 -15
- package/src/archiver/archiver.ts +0 -1880
- package/src/archiver/archiver_store.ts +0 -310
- package/src/archiver/archiver_store_test_suite.ts +0 -1295
- package/src/archiver/errors.ts +0 -26
- package/src/archiver/index.ts +0 -6
- package/src/archiver/kv_archiver_store/block_store.ts +0 -482
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +0 -423
- package/src/archiver/kv_archiver_store/log_store.ts +0 -407
- package/src/rpc/index.ts +0 -16
- /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
- /package/dest/{archiver/structs → structs}/published.js +0 -0
- /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
|
@@ -0,0 +1,552 @@
|
|
|
1
|
+
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { filterAsync } from '@aztec/foundation/collection';
|
|
4
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
5
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
6
|
+
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
7
|
+
import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
|
|
8
|
+
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
9
|
+
import { L2BlockHash, L2BlockNew } from '@aztec/stdlib/block';
|
|
10
|
+
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
11
|
+
import {
|
|
12
|
+
ContractClassLog,
|
|
13
|
+
ExtendedContractClassLog,
|
|
14
|
+
ExtendedPublicLog,
|
|
15
|
+
type LogFilter,
|
|
16
|
+
LogId,
|
|
17
|
+
PublicLog,
|
|
18
|
+
type SiloedTag,
|
|
19
|
+
Tag,
|
|
20
|
+
TxScopedL2Log,
|
|
21
|
+
} from '@aztec/stdlib/logs';
|
|
22
|
+
|
|
23
|
+
import type { BlockStore } from './block_store.js';
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* A store for logs
|
|
27
|
+
*/
|
|
28
|
+
export class LogStore {
|
|
29
|
+
// `tag` --> private logs
|
|
30
|
+
#privateLogsByTag: AztecAsyncMap<string, Buffer[]>;
|
|
31
|
+
// `{contractAddress}_${tag}` --> public logs
|
|
32
|
+
#publicLogsByContractAndTag: AztecAsyncMap<string, Buffer[]>;
|
|
33
|
+
#privateLogKeysByBlock: AztecAsyncMap<number, string[]>;
|
|
34
|
+
#publicLogKeysByBlock: AztecAsyncMap<number, string[]>;
|
|
35
|
+
#publicLogsByBlock: AztecAsyncMap<number, Buffer>;
|
|
36
|
+
#contractClassLogsByBlock: AztecAsyncMap<number, Buffer>;
|
|
37
|
+
#logsMaxPageSize: number;
|
|
38
|
+
#log = createLogger('archiver:log_store');
|
|
39
|
+
|
|
40
|
+
constructor(
|
|
41
|
+
private db: AztecAsyncKVStore,
|
|
42
|
+
private blockStore: BlockStore,
|
|
43
|
+
logsMaxPageSize: number = 1000,
|
|
44
|
+
) {
|
|
45
|
+
this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
|
|
46
|
+
this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
|
|
47
|
+
this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
|
|
48
|
+
this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
|
|
49
|
+
this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
|
|
50
|
+
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
51
|
+
|
|
52
|
+
this.#logsMaxPageSize = logsMaxPageSize;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Extracts tagged logs from a single block, grouping them into private and public maps.
|
|
57
|
+
*
|
|
58
|
+
* @param block - The L2 block to extract logs from.
|
|
59
|
+
* @returns An object containing the private and public tagged logs for the block.
|
|
60
|
+
*/
|
|
61
|
+
#extractTaggedLogsFromBlock(block: L2BlockNew) {
|
|
62
|
+
// SiloedTag (as string) -> array of log buffers.
|
|
63
|
+
const privateTaggedLogs = new Map<string, Buffer[]>();
|
|
64
|
+
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
65
|
+
const publicTaggedLogs = new Map<string, Buffer[]>();
|
|
66
|
+
|
|
67
|
+
block.body.txEffects.forEach(txEffect => {
|
|
68
|
+
const txHash = txEffect.txHash;
|
|
69
|
+
|
|
70
|
+
txEffect.privateLogs.forEach(log => {
|
|
71
|
+
// Private logs use SiloedTag (already siloed by kernel)
|
|
72
|
+
const tag = log.fields[0];
|
|
73
|
+
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
74
|
+
|
|
75
|
+
const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
|
|
76
|
+
currentLogs.push(
|
|
77
|
+
new TxScopedL2Log(
|
|
78
|
+
txHash,
|
|
79
|
+
block.number,
|
|
80
|
+
block.timestamp,
|
|
81
|
+
log.getEmittedFields(),
|
|
82
|
+
txEffect.noteHashes,
|
|
83
|
+
txEffect.nullifiers[0],
|
|
84
|
+
).toBuffer(),
|
|
85
|
+
);
|
|
86
|
+
privateTaggedLogs.set(tag.toString(), currentLogs);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
txEffect.publicLogs.forEach(log => {
|
|
90
|
+
// Public logs use Tag directly (not siloed) and are stored with contract address
|
|
91
|
+
const tag = log.fields[0];
|
|
92
|
+
const contractAddress = log.contractAddress;
|
|
93
|
+
const key = `${contractAddress.toString()}_${tag.toString()}`;
|
|
94
|
+
this.#log.debug(
|
|
95
|
+
`Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`,
|
|
96
|
+
);
|
|
97
|
+
|
|
98
|
+
const currentLogs = publicTaggedLogs.get(key) ?? [];
|
|
99
|
+
currentLogs.push(
|
|
100
|
+
new TxScopedL2Log(
|
|
101
|
+
txHash,
|
|
102
|
+
block.number,
|
|
103
|
+
block.timestamp,
|
|
104
|
+
log.getEmittedFields(),
|
|
105
|
+
txEffect.noteHashes,
|
|
106
|
+
txEffect.nullifiers[0],
|
|
107
|
+
).toBuffer(),
|
|
108
|
+
);
|
|
109
|
+
publicTaggedLogs.set(key, currentLogs);
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
return { privateTaggedLogs, publicTaggedLogs };
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Extracts and aggregates tagged logs from a list of blocks.
|
|
118
|
+
* @param blocks - The blocks to extract logs from.
|
|
119
|
+
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
120
|
+
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
121
|
+
*/
|
|
122
|
+
#extractTaggedLogs(blocks: L2BlockNew[]): {
|
|
123
|
+
privateTaggedLogs: Map<string, Buffer[]>;
|
|
124
|
+
publicTaggedLogs: Map<string, Buffer[]>;
|
|
125
|
+
} {
|
|
126
|
+
const taggedLogsInBlocks = blocks.map(block => this.#extractTaggedLogsFromBlock(block));
|
|
127
|
+
|
|
128
|
+
// Now we merge the maps from each block into a single map.
|
|
129
|
+
const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs }) => {
|
|
130
|
+
for (const [tag, logs] of privateTaggedLogs.entries()) {
|
|
131
|
+
const currentLogs = acc.get(tag) ?? [];
|
|
132
|
+
acc.set(tag, currentLogs.concat(logs));
|
|
133
|
+
}
|
|
134
|
+
return acc;
|
|
135
|
+
}, new Map<string, Buffer[]>());
|
|
136
|
+
|
|
137
|
+
const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs }) => {
|
|
138
|
+
for (const [key, logs] of publicTaggedLogs.entries()) {
|
|
139
|
+
const currentLogs = acc.get(key) ?? [];
|
|
140
|
+
acc.set(key, currentLogs.concat(logs));
|
|
141
|
+
}
|
|
142
|
+
return acc;
|
|
143
|
+
}, new Map<string, Buffer[]>());
|
|
144
|
+
|
|
145
|
+
return { privateTaggedLogs, publicTaggedLogs };
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async #addPrivateLogs(blocks: L2BlockNew[]): Promise<void> {
|
|
149
|
+
const newBlocks = await filterAsync(
|
|
150
|
+
blocks,
|
|
151
|
+
async block => !(await this.#privateLogKeysByBlock.hasAsync(block.number)),
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
const { privateTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
155
|
+
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
156
|
+
|
|
157
|
+
const currentPrivateTaggedLogs = await Promise.all(
|
|
158
|
+
keysOfPrivateLogsToUpdate.map(async key => ({
|
|
159
|
+
tag: key,
|
|
160
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key),
|
|
161
|
+
})),
|
|
162
|
+
);
|
|
163
|
+
|
|
164
|
+
for (const taggedLogBuffer of currentPrivateTaggedLogs) {
|
|
165
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
166
|
+
privateTaggedLogs.set(
|
|
167
|
+
taggedLogBuffer.tag,
|
|
168
|
+
taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
169
|
+
);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
for (const block of newBlocks) {
|
|
174
|
+
const privateTagsInBlock: string[] = [];
|
|
175
|
+
for (const [tag, logs] of privateTaggedLogs.entries()) {
|
|
176
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
177
|
+
privateTagsInBlock.push(tag);
|
|
178
|
+
}
|
|
179
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
async #addPublicLogs(blocks: L2BlockNew[]): Promise<void> {
|
|
184
|
+
const newBlocks = await filterAsync(
|
|
185
|
+
blocks,
|
|
186
|
+
async block => !(await this.#publicLogKeysByBlock.hasAsync(block.number)),
|
|
187
|
+
);
|
|
188
|
+
|
|
189
|
+
const { publicTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
190
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
191
|
+
|
|
192
|
+
const currentPublicTaggedLogs = await Promise.all(
|
|
193
|
+
keysOfPublicLogsToUpdate.map(async key => ({
|
|
194
|
+
tag: key,
|
|
195
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
|
|
196
|
+
})),
|
|
197
|
+
);
|
|
198
|
+
|
|
199
|
+
for (const taggedLogBuffer of currentPublicTaggedLogs) {
|
|
200
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
201
|
+
publicTaggedLogs.set(
|
|
202
|
+
taggedLogBuffer.tag,
|
|
203
|
+
taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
204
|
+
);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
for (const block of newBlocks) {
|
|
209
|
+
const blockHash = await block.hash();
|
|
210
|
+
const publicTagsInBlock: string[] = [];
|
|
211
|
+
for (const [tag, logs] of publicTaggedLogs.entries()) {
|
|
212
|
+
await this.#publicLogsByContractAndTag.set(tag, logs);
|
|
213
|
+
publicTagsInBlock.push(tag);
|
|
214
|
+
}
|
|
215
|
+
await this.#publicLogKeysByBlock.set(block.number, publicTagsInBlock);
|
|
216
|
+
|
|
217
|
+
const publicLogsInBlock = block.body.txEffects
|
|
218
|
+
.map((txEffect, txIndex) =>
|
|
219
|
+
[
|
|
220
|
+
numToUInt32BE(txIndex),
|
|
221
|
+
numToUInt32BE(txEffect.publicLogs.length),
|
|
222
|
+
txEffect.publicLogs.map(log => log.toBuffer()),
|
|
223
|
+
].flat(),
|
|
224
|
+
)
|
|
225
|
+
.flat();
|
|
226
|
+
|
|
227
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
async #addContractClassLogs(blocks: L2BlockNew[]): Promise<void> {
|
|
232
|
+
const newBlocks = await filterAsync(
|
|
233
|
+
blocks,
|
|
234
|
+
async block => !(await this.#contractClassLogsByBlock.hasAsync(block.number)),
|
|
235
|
+
);
|
|
236
|
+
|
|
237
|
+
for (const block of newBlocks) {
|
|
238
|
+
const blockHash = await block.hash();
|
|
239
|
+
|
|
240
|
+
const contractClassLogsInBlock = block.body.txEffects
|
|
241
|
+
.map((txEffect, txIndex) =>
|
|
242
|
+
[
|
|
243
|
+
numToUInt32BE(txIndex),
|
|
244
|
+
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
245
|
+
txEffect.contractClassLogs.map(log => log.toBuffer()),
|
|
246
|
+
].flat(),
|
|
247
|
+
)
|
|
248
|
+
.flat();
|
|
249
|
+
|
|
250
|
+
await this.#contractClassLogsByBlock.set(
|
|
251
|
+
block.number,
|
|
252
|
+
this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
|
|
253
|
+
);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Append new logs to the store's list.
|
|
259
|
+
* @param blocks - The blocks for which to add the logs.
|
|
260
|
+
* @returns True if the operation is successful.
|
|
261
|
+
*/
|
|
262
|
+
addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
263
|
+
return this.db.transactionAsync(async () => {
|
|
264
|
+
await Promise.all([
|
|
265
|
+
this.#addPrivateLogs(blocks),
|
|
266
|
+
this.#addPublicLogs(blocks),
|
|
267
|
+
this.#addContractClassLogs(blocks),
|
|
268
|
+
]);
|
|
269
|
+
return true;
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
#packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
|
|
274
|
+
return Buffer.concat([blockHash.toBuffer(), ...data]);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
#unpackBlockHash(reader: BufferReader): L2BlockHash {
|
|
278
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
279
|
+
|
|
280
|
+
if (!blockHash) {
|
|
281
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return L2BlockHash.fromField(blockHash);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
288
|
+
return this.db.transactionAsync(async () => {
|
|
289
|
+
await Promise.all(
|
|
290
|
+
blocks.map(async block => {
|
|
291
|
+
// Delete private logs
|
|
292
|
+
const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
|
|
293
|
+
await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
|
|
294
|
+
|
|
295
|
+
// Delete public logs
|
|
296
|
+
const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
|
|
297
|
+
await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
|
|
298
|
+
}),
|
|
299
|
+
);
|
|
300
|
+
|
|
301
|
+
await Promise.all(
|
|
302
|
+
blocks.map(block =>
|
|
303
|
+
Promise.all([
|
|
304
|
+
this.#publicLogsByBlock.delete(block.number),
|
|
305
|
+
this.#privateLogKeysByBlock.delete(block.number),
|
|
306
|
+
this.#publicLogKeysByBlock.delete(block.number),
|
|
307
|
+
this.#contractClassLogsByBlock.delete(block.number),
|
|
308
|
+
]),
|
|
309
|
+
),
|
|
310
|
+
);
|
|
311
|
+
|
|
312
|
+
return true;
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
/**
|
|
317
|
+
* Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
318
|
+
* array implies no logs match that tag.
|
|
319
|
+
*/
|
|
320
|
+
async getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
321
|
+
const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
|
|
322
|
+
|
|
323
|
+
return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
/**
|
|
327
|
+
* Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
328
|
+
* logs is returned. An empty array implies no logs match that tag.
|
|
329
|
+
*/
|
|
330
|
+
async getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
331
|
+
const logs = await Promise.all(
|
|
332
|
+
tags.map(tag => {
|
|
333
|
+
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
334
|
+
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
335
|
+
}),
|
|
336
|
+
);
|
|
337
|
+
return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
/**
|
|
341
|
+
* Gets public logs based on the provided filter.
|
|
342
|
+
* @param filter - The filter to apply to the logs.
|
|
343
|
+
* @returns The requested logs.
|
|
344
|
+
*/
|
|
345
|
+
getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
346
|
+
if (filter.afterLog) {
|
|
347
|
+
return this.#filterPublicLogsBetweenBlocks(filter);
|
|
348
|
+
} else if (filter.txHash) {
|
|
349
|
+
return this.#filterPublicLogsOfTx(filter);
|
|
350
|
+
} else {
|
|
351
|
+
return this.#filterPublicLogsBetweenBlocks(filter);
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
async #filterPublicLogsOfTx(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
356
|
+
if (!filter.txHash) {
|
|
357
|
+
throw new Error('Missing txHash');
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
const [blockNumber, txIndex] = (await this.blockStore.getTxLocation(filter.txHash)) ?? [];
|
|
361
|
+
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
|
|
362
|
+
return { logs: [], maxLogsHit: false };
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
366
|
+
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
367
|
+
const reader = new BufferReader(buffer);
|
|
368
|
+
|
|
369
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
370
|
+
|
|
371
|
+
while (reader.remainingBytes() > 0) {
|
|
372
|
+
const indexOfTx = reader.readNumber();
|
|
373
|
+
const numLogsInTx = reader.readNumber();
|
|
374
|
+
publicLogsInBlock[indexOfTx] = [];
|
|
375
|
+
for (let i = 0; i < numLogsInTx; i++) {
|
|
376
|
+
publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
const txLogs = publicLogsInBlock[txIndex];
|
|
381
|
+
|
|
382
|
+
const logs: ExtendedPublicLog[] = [];
|
|
383
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
384
|
+
|
|
385
|
+
return { logs, maxLogsHit };
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
async #filterPublicLogsBetweenBlocks(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
389
|
+
const start =
|
|
390
|
+
filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
|
|
391
|
+
const end = filter.toBlock;
|
|
392
|
+
|
|
393
|
+
if (typeof end === 'number' && end < start) {
|
|
394
|
+
return {
|
|
395
|
+
logs: [],
|
|
396
|
+
maxLogsHit: true,
|
|
397
|
+
};
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
const logs: ExtendedPublicLog[] = [];
|
|
401
|
+
|
|
402
|
+
let maxLogsHit = false;
|
|
403
|
+
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
|
|
404
|
+
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
405
|
+
const reader = new BufferReader(logBuffer);
|
|
406
|
+
|
|
407
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
408
|
+
|
|
409
|
+
while (reader.remainingBytes() > 0) {
|
|
410
|
+
const indexOfTx = reader.readNumber();
|
|
411
|
+
const numLogsInTx = reader.readNumber();
|
|
412
|
+
publicLogsInBlock[indexOfTx] = [];
|
|
413
|
+
for (let i = 0; i < numLogsInTx; i++) {
|
|
414
|
+
publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
|
|
418
|
+
const txLogs = publicLogsInBlock[txIndex];
|
|
419
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
420
|
+
if (maxLogsHit) {
|
|
421
|
+
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
422
|
+
break loopOverBlocks;
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
return { logs, maxLogsHit };
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
/**
|
|
431
|
+
* Gets contract class logs based on the provided filter.
|
|
432
|
+
* @param filter - The filter to apply to the logs.
|
|
433
|
+
* @returns The requested logs.
|
|
434
|
+
*/
|
|
435
|
+
getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
436
|
+
if (filter.afterLog) {
|
|
437
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
438
|
+
} else if (filter.txHash) {
|
|
439
|
+
return this.#filterContractClassLogsOfTx(filter);
|
|
440
|
+
} else {
|
|
441
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
async #filterContractClassLogsOfTx(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
446
|
+
if (!filter.txHash) {
|
|
447
|
+
throw new Error('Missing txHash');
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
const [blockNumber, txIndex] = (await this.blockStore.getTxLocation(filter.txHash)) ?? [];
|
|
451
|
+
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
|
|
452
|
+
return { logs: [], maxLogsHit: false };
|
|
453
|
+
}
|
|
454
|
+
const contractClassLogsBuffer = (await this.#contractClassLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
455
|
+
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
456
|
+
|
|
457
|
+
const reader = new BufferReader(contractClassLogsBuffer);
|
|
458
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
459
|
+
|
|
460
|
+
while (reader.remainingBytes() > 0) {
|
|
461
|
+
const indexOfTx = reader.readNumber();
|
|
462
|
+
const numLogsInTx = reader.readNumber();
|
|
463
|
+
contractClassLogsInBlock[indexOfTx] = [];
|
|
464
|
+
for (let i = 0; i < numLogsInTx; i++) {
|
|
465
|
+
contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
const txLogs = contractClassLogsInBlock[txIndex];
|
|
470
|
+
|
|
471
|
+
const logs: ExtendedContractClassLog[] = [];
|
|
472
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
473
|
+
|
|
474
|
+
return { logs, maxLogsHit };
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
async #filterContractClassLogsBetweenBlocks(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
478
|
+
const start =
|
|
479
|
+
filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
|
|
480
|
+
const end = filter.toBlock;
|
|
481
|
+
|
|
482
|
+
if (typeof end === 'number' && end < start) {
|
|
483
|
+
return {
|
|
484
|
+
logs: [],
|
|
485
|
+
maxLogsHit: true,
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
const logs: ExtendedContractClassLog[] = [];
|
|
490
|
+
|
|
491
|
+
let maxLogsHit = false;
|
|
492
|
+
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#contractClassLogsByBlock.entriesAsync({
|
|
493
|
+
start,
|
|
494
|
+
end,
|
|
495
|
+
})) {
|
|
496
|
+
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
497
|
+
const reader = new BufferReader(logBuffer);
|
|
498
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
499
|
+
while (reader.remainingBytes() > 0) {
|
|
500
|
+
const indexOfTx = reader.readNumber();
|
|
501
|
+
const numLogsInTx = reader.readNumber();
|
|
502
|
+
contractClassLogsInBlock[indexOfTx] = [];
|
|
503
|
+
for (let i = 0; i < numLogsInTx; i++) {
|
|
504
|
+
contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
|
|
508
|
+
const txLogs = contractClassLogsInBlock[txIndex];
|
|
509
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
510
|
+
if (maxLogsHit) {
|
|
511
|
+
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
512
|
+
break loopOverBlocks;
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
return { logs, maxLogsHit };
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
#accumulateLogs(
|
|
521
|
+
results: (ExtendedContractClassLog | ExtendedPublicLog)[],
|
|
522
|
+
blockNumber: number,
|
|
523
|
+
blockHash: L2BlockHash,
|
|
524
|
+
txIndex: number,
|
|
525
|
+
txLogs: (ContractClassLog | PublicLog)[],
|
|
526
|
+
filter: LogFilter = {},
|
|
527
|
+
): boolean {
|
|
528
|
+
let maxLogsHit = false;
|
|
529
|
+
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
530
|
+
for (; logIndex < txLogs.length; logIndex++) {
|
|
531
|
+
const log = txLogs[logIndex];
|
|
532
|
+
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
533
|
+
if (log instanceof ContractClassLog) {
|
|
534
|
+
results.push(
|
|
535
|
+
new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
|
|
536
|
+
);
|
|
537
|
+
} else if (log instanceof PublicLog) {
|
|
538
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
539
|
+
} else {
|
|
540
|
+
throw new Error('Unknown log type');
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
if (results.length >= this.#logsMaxPageSize) {
|
|
544
|
+
maxLogsHit = true;
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
return maxLogsHit;
|
|
551
|
+
}
|
|
552
|
+
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import type { L1BlockId } from '@aztec/ethereum';
|
|
1
|
+
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
2
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
2
3
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
3
|
-
import { Fr } from '@aztec/foundation/
|
|
4
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
5
|
import { toArray } from '@aztec/foundation/iterable';
|
|
5
6
|
import { createLogger } from '@aztec/foundation/log';
|
|
6
7
|
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
@@ -113,20 +114,20 @@ export class MessageStore {
|
|
|
113
114
|
);
|
|
114
115
|
}
|
|
115
116
|
|
|
116
|
-
// Check index corresponds to the
|
|
117
|
-
const [expectedStart, expectedEnd] = InboxLeaf.
|
|
117
|
+
// Check index corresponds to the checkpoint number.
|
|
118
|
+
const [expectedStart, expectedEnd] = InboxLeaf.indexRangeForCheckpoint(message.checkpointNumber);
|
|
118
119
|
if (message.index < expectedStart || message.index >= expectedEnd) {
|
|
119
120
|
throw new MessageStoreError(
|
|
120
121
|
`Invalid index ${message.index} for incoming L1 to L2 message ${message.leaf.toString()} ` +
|
|
121
|
-
`at
|
|
122
|
+
`at checkpoint ${message.checkpointNumber} (expected value in range [${expectedStart}, ${expectedEnd}))`,
|
|
122
123
|
message,
|
|
123
124
|
);
|
|
124
125
|
}
|
|
125
126
|
|
|
126
|
-
// Check there are no gaps in the indices within the same
|
|
127
|
+
// Check there are no gaps in the indices within the same checkpoint.
|
|
127
128
|
if (
|
|
128
129
|
lastMessage &&
|
|
129
|
-
message.
|
|
130
|
+
message.checkpointNumber === lastMessage.checkpointNumber &&
|
|
130
131
|
message.index !== lastMessage.index + 1n
|
|
131
132
|
) {
|
|
132
133
|
throw new MessageStoreError(
|
|
@@ -138,12 +139,12 @@ export class MessageStore {
|
|
|
138
139
|
|
|
139
140
|
// Check the first message in a block has the correct index.
|
|
140
141
|
if (
|
|
141
|
-
(!lastMessage || message.
|
|
142
|
-
message.index !==
|
|
142
|
+
(!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) &&
|
|
143
|
+
message.index !== expectedStart
|
|
143
144
|
) {
|
|
144
145
|
throw new MessageStoreError(
|
|
145
|
-
`Message ${message.leaf.toString()} for
|
|
146
|
-
`${message.index} (expected ${
|
|
146
|
+
`Message ${message.leaf.toString()} for checkpoint ${message.checkpointNumber} has wrong index ` +
|
|
147
|
+
`${message.index} (expected ${expectedStart})`,
|
|
147
148
|
message,
|
|
148
149
|
);
|
|
149
150
|
}
|
|
@@ -184,10 +185,10 @@ export class MessageStore {
|
|
|
184
185
|
return msg ? deserializeInboxMessage(msg) : undefined;
|
|
185
186
|
}
|
|
186
187
|
|
|
187
|
-
public async getL1ToL2Messages(
|
|
188
|
+
public async getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
|
|
188
189
|
const messages: Fr[] = [];
|
|
189
190
|
|
|
190
|
-
const [startIndex, endIndex] = InboxLeaf.
|
|
191
|
+
const [startIndex, endIndex] = InboxLeaf.indexRangeForCheckpoint(checkpointNumber);
|
|
191
192
|
let lastIndex = startIndex - 1n;
|
|
192
193
|
|
|
193
194
|
for await (const msgBuffer of this.#l1ToL2Messages.valuesAsync({
|
|
@@ -195,8 +196,10 @@ export class MessageStore {
|
|
|
195
196
|
end: this.indexToKey(endIndex),
|
|
196
197
|
})) {
|
|
197
198
|
const msg = deserializeInboxMessage(msgBuffer);
|
|
198
|
-
if (msg.
|
|
199
|
-
throw new Error(
|
|
199
|
+
if (msg.checkpointNumber !== checkpointNumber) {
|
|
200
|
+
throw new Error(
|
|
201
|
+
`L1 to L2 message with index ${msg.index} has invalid checkpoint number ${msg.checkpointNumber}`,
|
|
202
|
+
);
|
|
200
203
|
} else if (msg.index !== lastIndex + 1n) {
|
|
201
204
|
throw new Error(`Expected L1 to L2 message with index ${lastIndex + 1n} but got ${msg.index}`);
|
|
202
205
|
}
|
|
@@ -232,9 +235,9 @@ export class MessageStore {
|
|
|
232
235
|
});
|
|
233
236
|
}
|
|
234
237
|
|
|
235
|
-
public
|
|
236
|
-
this.#log.debug(`Deleting L1 to L2 messages up to target
|
|
237
|
-
const startIndex = InboxLeaf.
|
|
238
|
+
public rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void> {
|
|
239
|
+
this.#log.debug(`Deleting L1 to L2 messages up to target checkpoint ${targetCheckpointNumber}`);
|
|
240
|
+
const startIndex = InboxLeaf.smallestIndexForCheckpoint(CheckpointNumber(targetCheckpointNumber + 1));
|
|
238
241
|
return this.removeL1ToL2Messages(startIndex);
|
|
239
242
|
}
|
|
240
243
|
|
|
@@ -1,14 +1,13 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
2
2
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
3
|
-
import { keccak256 } from '@aztec/foundation/crypto';
|
|
4
|
-
import { Fr } from '@aztec/foundation/
|
|
3
|
+
import { keccak256 } from '@aztec/foundation/crypto/keccak';
|
|
4
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
5
5
|
import { BufferReader, bigintToUInt64BE, numToUInt32BE, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
6
6
|
|
|
7
7
|
export type InboxMessage = {
|
|
8
8
|
index: bigint;
|
|
9
9
|
leaf: Fr;
|
|
10
|
-
|
|
11
|
-
l2BlockNumber: BlockNumber;
|
|
10
|
+
checkpointNumber: CheckpointNumber;
|
|
12
11
|
l1BlockNumber: bigint; // L1 block number - NOT Aztec L2
|
|
13
12
|
l1BlockHash: Buffer32;
|
|
14
13
|
rollingHash: Buffer16;
|
|
@@ -25,7 +24,7 @@ export function serializeInboxMessage(message: InboxMessage): Buffer {
|
|
|
25
24
|
message.leaf,
|
|
26
25
|
message.l1BlockHash,
|
|
27
26
|
numToUInt32BE(Number(message.l1BlockNumber)),
|
|
28
|
-
numToUInt32BE(message.
|
|
27
|
+
numToUInt32BE(message.checkpointNumber),
|
|
29
28
|
message.rollingHash,
|
|
30
29
|
]);
|
|
31
30
|
}
|
|
@@ -36,7 +35,7 @@ export function deserializeInboxMessage(buffer: Buffer): InboxMessage {
|
|
|
36
35
|
const leaf = reader.readObject(Fr);
|
|
37
36
|
const l1BlockHash = reader.readObject(Buffer32);
|
|
38
37
|
const l1BlockNumber = BigInt(reader.readNumber());
|
|
39
|
-
const
|
|
38
|
+
const checkpointNumber = CheckpointNumber(reader.readNumber());
|
|
40
39
|
const rollingHash = reader.readObject(Buffer16);
|
|
41
|
-
return { index, leaf, l1BlockHash, l1BlockNumber,
|
|
40
|
+
return { index, leaf, l1BlockHash, l1BlockNumber, checkpointNumber, rollingHash };
|
|
42
41
|
}
|