@aztec/archiver 0.0.1-commit.9b94fc1 → 0.0.1-commit.9ee6fcc6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +156 -22
- package/dest/archiver.d.ts +138 -0
- package/dest/archiver.d.ts.map +1 -0
- package/dest/archiver.js +743 -0
- package/dest/config.d.ts +30 -0
- package/dest/config.d.ts.map +1 -0
- package/dest/{archiver/config.js → config.js} +23 -6
- package/dest/errors.d.ts +53 -0
- package/dest/errors.d.ts.map +1 -0
- package/dest/errors.js +75 -0
- package/dest/factory.d.ts +8 -7
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +99 -15
- package/dest/index.d.ts +11 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +9 -3
- package/dest/interfaces.d.ts +9 -0
- package/dest/interfaces.d.ts.map +1 -0
- package/dest/interfaces.js +3 -0
- package/dest/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/l1/bin/retrieve-calldata.js +152 -0
- package/dest/l1/calldata_retriever.d.ts +135 -0
- package/dest/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/l1/calldata_retriever.js +402 -0
- package/dest/l1/data_retrieval.d.ts +88 -0
- package/dest/l1/data_retrieval.d.ts.map +1 -0
- package/dest/{archiver → l1}/data_retrieval.js +83 -155
- package/dest/l1/debug_tx.d.ts +19 -0
- package/dest/l1/debug_tx.d.ts.map +1 -0
- package/dest/l1/debug_tx.js +73 -0
- package/dest/l1/spire_proposer.d.ts +70 -0
- package/dest/l1/spire_proposer.d.ts.map +1 -0
- package/dest/l1/spire_proposer.js +149 -0
- package/dest/l1/trace_tx.d.ts +97 -0
- package/dest/l1/trace_tx.d.ts.map +1 -0
- package/dest/l1/trace_tx.js +91 -0
- package/dest/l1/types.d.ts +12 -0
- package/dest/l1/types.d.ts.map +1 -0
- package/dest/l1/types.js +3 -0
- package/dest/l1/validate_trace.d.ts +32 -0
- package/dest/l1/validate_trace.d.ts.map +1 -0
- package/dest/l1/validate_trace.js +154 -0
- package/dest/modules/data_source_base.d.ts +89 -0
- package/dest/modules/data_source_base.d.ts.map +1 -0
- package/dest/modules/data_source_base.js +216 -0
- package/dest/modules/data_store_updater.d.ts +85 -0
- package/dest/modules/data_store_updater.d.ts.map +1 -0
- package/dest/modules/data_store_updater.js +324 -0
- package/dest/modules/instrumentation.d.ts +50 -0
- package/dest/modules/instrumentation.d.ts.map +1 -0
- package/dest/{archiver → modules}/instrumentation.js +49 -62
- package/dest/modules/l1_synchronizer.d.ts +72 -0
- package/dest/modules/l1_synchronizer.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.js +1147 -0
- package/dest/modules/validation.d.ts +17 -0
- package/dest/modules/validation.d.ts.map +1 -0
- package/dest/{archiver → modules}/validation.js +7 -1
- package/dest/store/block_store.d.ts +195 -0
- package/dest/store/block_store.d.ts.map +1 -0
- package/dest/store/block_store.js +773 -0
- package/dest/store/contract_class_store.d.ts +17 -0
- package/dest/store/contract_class_store.d.ts.map +1 -0
- package/dest/store/contract_class_store.js +64 -0
- package/dest/store/contract_instance_store.d.ts +24 -0
- package/dest/store/contract_instance_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +7 -3
- package/dest/store/kv_archiver_store.d.ts +364 -0
- package/dest/store/kv_archiver_store.d.ts.map +1 -0
- package/dest/store/kv_archiver_store.js +477 -0
- package/dest/store/l2_tips_cache.d.ts +19 -0
- package/dest/store/l2_tips_cache.d.ts.map +1 -0
- package/dest/store/l2_tips_cache.js +89 -0
- package/dest/store/log_store.d.ts +57 -0
- package/dest/store/log_store.d.ts.map +1 -0
- package/dest/store/log_store.js +533 -0
- package/dest/store/message_store.d.ts +44 -0
- package/dest/store/message_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/message_store.js +29 -15
- package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
- package/dest/structs/data_retrieval.d.ts.map +1 -0
- package/dest/structs/inbox_message.d.ts +15 -0
- package/dest/structs/inbox_message.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/inbox_message.js +6 -5
- package/dest/structs/published.d.ts +2 -0
- package/dest/structs/published.d.ts.map +1 -0
- package/dest/test/fake_l1_state.d.ts +202 -0
- package/dest/test/fake_l1_state.d.ts.map +1 -0
- package/dest/test/fake_l1_state.js +455 -0
- package/dest/test/index.d.ts +2 -1
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +4 -1
- package/dest/test/mock_archiver.d.ts +16 -8
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +19 -14
- package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +21 -11
- package/dest/test/mock_l2_block_source.d.ts +54 -20
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +251 -85
- package/dest/test/mock_structs.d.ts +83 -4
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +157 -11
- package/dest/test/noop_l1_archiver.d.ts +26 -0
- package/dest/test/noop_l1_archiver.d.ts.map +1 -0
- package/dest/test/noop_l1_archiver.js +72 -0
- package/package.json +20 -20
- package/src/archiver.ts +500 -0
- package/src/{archiver/config.ts → config.ts} +36 -13
- package/src/errors.ts +118 -0
- package/src/factory.ts +154 -17
- package/src/index.ts +11 -3
- package/src/interfaces.ts +9 -0
- package/src/l1/README.md +55 -0
- package/src/l1/bin/retrieve-calldata.ts +194 -0
- package/src/l1/calldata_retriever.ts +511 -0
- package/src/{archiver → l1}/data_retrieval.ts +143 -226
- package/src/l1/debug_tx.ts +99 -0
- package/src/l1/spire_proposer.ts +152 -0
- package/src/l1/trace_tx.ts +128 -0
- package/src/l1/types.ts +13 -0
- package/src/l1/validate_trace.ts +229 -0
- package/src/modules/data_source_base.ts +333 -0
- package/src/modules/data_store_updater.ts +424 -0
- package/src/{archiver → modules}/instrumentation.ts +63 -66
- package/src/modules/l1_synchronizer.ts +967 -0
- package/src/{archiver → modules}/validation.ts +11 -6
- package/src/store/block_store.ts +1018 -0
- package/src/store/contract_class_store.ts +82 -0
- package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +10 -7
- package/src/store/kv_archiver_store.ts +682 -0
- package/src/store/l2_tips_cache.ts +89 -0
- package/src/store/log_store.ts +736 -0
- package/src/{archiver/kv_archiver_store → store}/message_store.ts +41 -19
- package/src/{archiver/structs → structs}/inbox_message.ts +8 -8
- package/src/{archiver/structs → structs}/published.ts +0 -1
- package/src/test/fake_l1_state.ts +698 -0
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/index.ts +4 -0
- package/src/test/mock_archiver.ts +23 -16
- package/src/test/mock_l1_to_l2_message_source.ts +18 -11
- package/src/test/mock_l2_block_source.ts +311 -93
- package/src/test/mock_structs.ts +289 -13
- package/src/test/noop_l1_archiver.ts +115 -0
- package/dest/archiver/archiver.d.ts +0 -287
- package/dest/archiver/archiver.d.ts.map +0 -1
- package/dest/archiver/archiver.js +0 -1408
- package/dest/archiver/archiver_store.d.ts +0 -255
- package/dest/archiver/archiver_store.d.ts.map +0 -1
- package/dest/archiver/archiver_store.js +0 -4
- package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
- package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
- package/dest/archiver/archiver_store_test_suite.js +0 -1289
- package/dest/archiver/config.d.ts +0 -21
- package/dest/archiver/config.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.d.ts +0 -79
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/errors.d.ts +0 -12
- package/dest/archiver/errors.d.ts.map +0 -1
- package/dest/archiver/errors.js +0 -17
- package/dest/archiver/index.d.ts +0 -7
- package/dest/archiver/index.d.ts.map +0 -1
- package/dest/archiver/index.js +0 -4
- package/dest/archiver/instrumentation.d.ts +0 -35
- package/dest/archiver/instrumentation.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +0 -124
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.js +0 -370
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +0 -120
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -168
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +0 -296
- package/dest/archiver/kv_archiver_store/log_store.d.ts +0 -49
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/log_store.js +0 -336
- package/dest/archiver/kv_archiver_store/message_store.d.ts +0 -39
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
- package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/structs/inbox_message.d.ts +0 -15
- package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
- package/dest/archiver/structs/published.d.ts +0 -3
- package/dest/archiver/structs/published.d.ts.map +0 -1
- package/dest/archiver/validation.d.ts +0 -17
- package/dest/archiver/validation.d.ts.map +0 -1
- package/dest/rpc/index.d.ts +0 -9
- package/dest/rpc/index.d.ts.map +0 -1
- package/dest/rpc/index.js +0 -15
- package/src/archiver/archiver.ts +0 -1858
- package/src/archiver/archiver_store.ts +0 -305
- package/src/archiver/archiver_store_test_suite.ts +0 -1264
- package/src/archiver/errors.ts +0 -26
- package/src/archiver/index.ts +0 -6
- package/src/archiver/kv_archiver_store/block_store.ts +0 -481
- package/src/archiver/kv_archiver_store/contract_class_store.ts +0 -176
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +0 -422
- package/src/archiver/kv_archiver_store/log_store.ts +0 -406
- package/src/rpc/index.ts +0 -16
- /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
- /package/dest/{archiver/structs → structs}/published.js +0 -0
- /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
|
@@ -0,0 +1,533 @@
|
|
|
1
|
+
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { compactArray, filterAsync } from '@aztec/foundation/collection';
|
|
4
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
5
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
6
|
+
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
7
|
+
import { BlockHash } from '@aztec/stdlib/block';
|
|
8
|
+
import { MAX_LOGS_PER_TAG } from '@aztec/stdlib/interfaces/api-limit';
|
|
9
|
+
import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
10
|
+
import { TxHash } from '@aztec/stdlib/tx';
|
|
11
|
+
import { OutOfOrderLogInsertionError } from '../errors.js';
|
|
12
|
+
/**
|
|
13
|
+
* A store for logs
|
|
14
|
+
*/ export class LogStore {
|
|
15
|
+
db;
|
|
16
|
+
blockStore;
|
|
17
|
+
// `tag` --> private logs
|
|
18
|
+
#privateLogsByTag;
|
|
19
|
+
// `{contractAddress}_${tag}` --> public logs
|
|
20
|
+
#publicLogsByContractAndTag;
|
|
21
|
+
#privateLogKeysByBlock;
|
|
22
|
+
#publicLogKeysByBlock;
|
|
23
|
+
#publicLogsByBlock;
|
|
24
|
+
#contractClassLogsByBlock;
|
|
25
|
+
#logsMaxPageSize;
|
|
26
|
+
#log;
|
|
27
|
+
constructor(db, blockStore, logsMaxPageSize = 1000){
|
|
28
|
+
this.db = db;
|
|
29
|
+
this.blockStore = blockStore;
|
|
30
|
+
this.#log = createLogger('archiver:log_store');
|
|
31
|
+
this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
|
|
32
|
+
this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
|
|
33
|
+
this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
|
|
34
|
+
this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
|
|
35
|
+
this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
|
|
36
|
+
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
37
|
+
this.#logsMaxPageSize = logsMaxPageSize;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Extracts tagged logs from a single block, grouping them into private and public maps.
|
|
41
|
+
*
|
|
42
|
+
* @param block - The L2 block to extract logs from.
|
|
43
|
+
* @returns An object containing the private and public tagged logs for the block.
|
|
44
|
+
*/ #extractTaggedLogsFromBlock(block) {
|
|
45
|
+
// SiloedTag (as string) -> array of log buffers.
|
|
46
|
+
const privateTaggedLogs = new Map();
|
|
47
|
+
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
48
|
+
const publicTaggedLogs = new Map();
|
|
49
|
+
block.body.txEffects.forEach((txEffect)=>{
|
|
50
|
+
const txHash = txEffect.txHash;
|
|
51
|
+
txEffect.privateLogs.forEach((log)=>{
|
|
52
|
+
// Private logs use SiloedTag (already siloed by kernel)
|
|
53
|
+
const tag = log.fields[0];
|
|
54
|
+
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
55
|
+
const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
|
|
56
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
57
|
+
privateTaggedLogs.set(tag.toString(), currentLogs);
|
|
58
|
+
});
|
|
59
|
+
txEffect.publicLogs.forEach((log)=>{
|
|
60
|
+
// Public logs use Tag directly (not siloed) and are stored with contract address
|
|
61
|
+
const tag = log.fields[0];
|
|
62
|
+
const contractAddress = log.contractAddress;
|
|
63
|
+
const key = `${contractAddress.toString()}_${tag.toString()}`;
|
|
64
|
+
this.#log.debug(`Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`);
|
|
65
|
+
const currentLogs = publicTaggedLogs.get(key) ?? [];
|
|
66
|
+
currentLogs.push(new TxScopedL2Log(txHash, block.number, block.timestamp, log.getEmittedFields(), txEffect.noteHashes, txEffect.nullifiers[0]).toBuffer());
|
|
67
|
+
publicTaggedLogs.set(key, currentLogs);
|
|
68
|
+
});
|
|
69
|
+
});
|
|
70
|
+
return {
|
|
71
|
+
privateTaggedLogs,
|
|
72
|
+
publicTaggedLogs
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Extracts and aggregates tagged logs from a list of blocks.
|
|
77
|
+
* @param blocks - The blocks to extract logs from.
|
|
78
|
+
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
79
|
+
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
80
|
+
*/ #extractTaggedLogs(blocks) {
|
|
81
|
+
const taggedLogsInBlocks = blocks.map((block)=>this.#extractTaggedLogsFromBlock(block));
|
|
82
|
+
// Now we merge the maps from each block into a single map.
|
|
83
|
+
const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs })=>{
|
|
84
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
85
|
+
const currentLogs = acc.get(tag) ?? [];
|
|
86
|
+
acc.set(tag, currentLogs.concat(logs));
|
|
87
|
+
}
|
|
88
|
+
return acc;
|
|
89
|
+
}, new Map());
|
|
90
|
+
const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs })=>{
|
|
91
|
+
for (const [key, logs] of publicTaggedLogs.entries()){
|
|
92
|
+
const currentLogs = acc.get(key) ?? [];
|
|
93
|
+
acc.set(key, currentLogs.concat(logs));
|
|
94
|
+
}
|
|
95
|
+
return acc;
|
|
96
|
+
}, new Map());
|
|
97
|
+
return {
|
|
98
|
+
privateTaggedLogs,
|
|
99
|
+
publicTaggedLogs
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
async #addPrivateLogs(blocks) {
|
|
103
|
+
const newBlocks = await filterAsync(blocks, async (block)=>!await this.#privateLogKeysByBlock.hasAsync(block.number));
|
|
104
|
+
const { privateTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
105
|
+
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
106
|
+
const currentPrivateTaggedLogs = await Promise.all(keysOfPrivateLogsToUpdate.map(async (key)=>({
|
|
107
|
+
tag: key,
|
|
108
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key)
|
|
109
|
+
})));
|
|
110
|
+
for (const taggedLogBuffer of currentPrivateTaggedLogs){
|
|
111
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
112
|
+
const newLogs = privateTaggedLogs.get(taggedLogBuffer.tag);
|
|
113
|
+
if (newLogs.length === 0) {
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1));
|
|
117
|
+
const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
|
|
118
|
+
if (lastExisting.blockNumber > firstNew.blockNumber) {
|
|
119
|
+
throw new OutOfOrderLogInsertionError('private', taggedLogBuffer.tag, lastExisting.blockNumber, firstNew.blockNumber);
|
|
120
|
+
}
|
|
121
|
+
privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
for (const block of newBlocks){
|
|
125
|
+
const privateTagsInBlock = [];
|
|
126
|
+
for (const [tag, logs] of privateTaggedLogs.entries()){
|
|
127
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
128
|
+
privateTagsInBlock.push(tag);
|
|
129
|
+
}
|
|
130
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
async #addPublicLogs(blocks) {
|
|
134
|
+
const newBlocks = await filterAsync(blocks, async (block)=>!await this.#publicLogKeysByBlock.hasAsync(block.number));
|
|
135
|
+
const { publicTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
136
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
137
|
+
const currentPublicTaggedLogs = await Promise.all(keysOfPublicLogsToUpdate.map(async (key)=>({
|
|
138
|
+
tag: key,
|
|
139
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key)
|
|
140
|
+
})));
|
|
141
|
+
for (const taggedLogBuffer of currentPublicTaggedLogs){
|
|
142
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
143
|
+
const newLogs = publicTaggedLogs.get(taggedLogBuffer.tag);
|
|
144
|
+
if (newLogs.length === 0) {
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1));
|
|
148
|
+
const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
|
|
149
|
+
if (lastExisting.blockNumber > firstNew.blockNumber) {
|
|
150
|
+
throw new OutOfOrderLogInsertionError('public', taggedLogBuffer.tag, lastExisting.blockNumber, firstNew.blockNumber);
|
|
151
|
+
}
|
|
152
|
+
publicTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
for (const block of newBlocks){
|
|
156
|
+
const blockHash = await block.hash();
|
|
157
|
+
const publicTagsInBlock = [];
|
|
158
|
+
for (const [tag, logs] of publicTaggedLogs.entries()){
|
|
159
|
+
await this.#publicLogsByContractAndTag.set(tag, logs);
|
|
160
|
+
publicTagsInBlock.push(tag);
|
|
161
|
+
}
|
|
162
|
+
await this.#publicLogKeysByBlock.set(block.number, publicTagsInBlock);
|
|
163
|
+
const publicLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
|
|
164
|
+
numToUInt32BE(txIndex),
|
|
165
|
+
txEffect.txHash.toBuffer(),
|
|
166
|
+
numToUInt32BE(txEffect.publicLogs.length),
|
|
167
|
+
txEffect.publicLogs.map((log)=>log.toBuffer())
|
|
168
|
+
].flat()).flat();
|
|
169
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
async #addContractClassLogs(blocks) {
|
|
173
|
+
const newBlocks = await filterAsync(blocks, async (block)=>!await this.#contractClassLogsByBlock.hasAsync(block.number));
|
|
174
|
+
for (const block of newBlocks){
|
|
175
|
+
const blockHash = await block.hash();
|
|
176
|
+
const contractClassLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
|
|
177
|
+
numToUInt32BE(txIndex),
|
|
178
|
+
txEffect.txHash.toBuffer(),
|
|
179
|
+
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
180
|
+
txEffect.contractClassLogs.map((log)=>log.toBuffer())
|
|
181
|
+
].flat()).flat();
|
|
182
|
+
await this.#contractClassLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, contractClassLogsInBlock));
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Append new logs to the store's list.
|
|
187
|
+
* @param blocks - The blocks for which to add the logs.
|
|
188
|
+
* @returns True if the operation is successful.
|
|
189
|
+
*/ addLogs(blocks) {
|
|
190
|
+
return this.db.transactionAsync(async ()=>{
|
|
191
|
+
await Promise.all([
|
|
192
|
+
this.#addPrivateLogs(blocks),
|
|
193
|
+
this.#addPublicLogs(blocks),
|
|
194
|
+
this.#addContractClassLogs(blocks)
|
|
195
|
+
]);
|
|
196
|
+
return true;
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
#packWithBlockHash(blockHash, data) {
|
|
200
|
+
return Buffer.concat([
|
|
201
|
+
blockHash.toBuffer(),
|
|
202
|
+
...data
|
|
203
|
+
]);
|
|
204
|
+
}
|
|
205
|
+
#unpackBlockHash(reader) {
|
|
206
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
207
|
+
if (!blockHash) {
|
|
208
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
209
|
+
}
|
|
210
|
+
return new BlockHash(blockHash);
|
|
211
|
+
}
|
|
212
|
+
deleteLogs(blocks) {
|
|
213
|
+
return this.db.transactionAsync(async ()=>{
|
|
214
|
+
const blockNumbers = new Set(blocks.map((block)=>block.number));
|
|
215
|
+
const firstBlockToDelete = Math.min(...blockNumbers);
|
|
216
|
+
// Collect all unique private tags across all blocks being deleted
|
|
217
|
+
const allPrivateTags = new Set(compactArray(await Promise.all(blocks.map((block)=>this.#privateLogKeysByBlock.getAsync(block.number)))).flat());
|
|
218
|
+
// Trim private logs: for each tag, delete all instances including and after the first block being deleted.
|
|
219
|
+
// This hinges on the invariant that logs for a given tag are always inserted in order of block number, which is enforced in #addPrivateLogs.
|
|
220
|
+
for (const tag of allPrivateTags){
|
|
221
|
+
const existing = await this.#privateLogsByTag.getAsync(tag);
|
|
222
|
+
if (existing === undefined || existing.length === 0) {
|
|
223
|
+
continue;
|
|
224
|
+
}
|
|
225
|
+
const lastIndexToKeep = existing.findLastIndex((buf)=>TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete);
|
|
226
|
+
const remaining = existing.slice(0, lastIndexToKeep + 1);
|
|
227
|
+
await (remaining.length > 0 ? this.#privateLogsByTag.set(tag, remaining) : this.#privateLogsByTag.delete(tag));
|
|
228
|
+
}
|
|
229
|
+
// Collect all unique public keys across all blocks being deleted
|
|
230
|
+
const allPublicKeys = new Set(compactArray(await Promise.all(blocks.map((block)=>this.#publicLogKeysByBlock.getAsync(block.number)))).flat());
|
|
231
|
+
// And do the same as we did with private logs
|
|
232
|
+
for (const key of allPublicKeys){
|
|
233
|
+
const existing = await this.#publicLogsByContractAndTag.getAsync(key);
|
|
234
|
+
if (existing === undefined || existing.length === 0) {
|
|
235
|
+
continue;
|
|
236
|
+
}
|
|
237
|
+
const lastIndexToKeep = existing.findLastIndex((buf)=>TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete);
|
|
238
|
+
const remaining = existing.slice(0, lastIndexToKeep + 1);
|
|
239
|
+
await (remaining.length > 0 ? this.#publicLogsByContractAndTag.set(key, remaining) : this.#publicLogsByContractAndTag.delete(key));
|
|
240
|
+
}
|
|
241
|
+
// After trimming the tagged logs, we can delete the block-level keys that track which tags are in which blocks.
|
|
242
|
+
await Promise.all(blocks.map((block)=>Promise.all([
|
|
243
|
+
this.#publicLogsByBlock.delete(block.number),
|
|
244
|
+
this.#privateLogKeysByBlock.delete(block.number),
|
|
245
|
+
this.#publicLogKeysByBlock.delete(block.number),
|
|
246
|
+
this.#contractClassLogsByBlock.delete(block.number)
|
|
247
|
+
])));
|
|
248
|
+
return true;
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Gets private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
253
|
+
* array implies no logs match that tag.
|
|
254
|
+
* @param tags - The tags to search for.
|
|
255
|
+
* @param page - The page number (0-indexed) for pagination.
|
|
256
|
+
* @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
|
|
257
|
+
* @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
|
|
258
|
+
* MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
|
|
259
|
+
*/ async getPrivateLogsByTags(tags, page = 0, upToBlockNumber) {
|
|
260
|
+
const logs = await Promise.all(tags.map((tag)=>this.#privateLogsByTag.getAsync(tag.toString())));
|
|
261
|
+
const start = page * MAX_LOGS_PER_TAG;
|
|
262
|
+
const end = start + MAX_LOGS_PER_TAG;
|
|
263
|
+
return logs.map((logBuffers)=>{
|
|
264
|
+
const deserialized = logBuffers?.slice(start, end).map((buf)=>TxScopedL2Log.fromBuffer(buf)) ?? [];
|
|
265
|
+
if (upToBlockNumber !== undefined) {
|
|
266
|
+
const cutoff = deserialized.findIndex((log)=>log.blockNumber > upToBlockNumber);
|
|
267
|
+
if (cutoff !== -1) {
|
|
268
|
+
return deserialized.slice(0, cutoff);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
return deserialized;
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
/**
|
|
275
|
+
* Gets public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
276
|
+
* logs is returned. An empty array implies no logs match that tag.
|
|
277
|
+
* @param contractAddress - The contract address to search logs for.
|
|
278
|
+
* @param tags - The tags to search for.
|
|
279
|
+
* @param page - The page number (0-indexed) for pagination.
|
|
280
|
+
* @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
|
|
281
|
+
* @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
|
|
282
|
+
* MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
|
|
283
|
+
*/ async getPublicLogsByTagsFromContract(contractAddress, tags, page = 0, upToBlockNumber) {
|
|
284
|
+
const logs = await Promise.all(tags.map((tag)=>{
|
|
285
|
+
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
286
|
+
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
287
|
+
}));
|
|
288
|
+
const start = page * MAX_LOGS_PER_TAG;
|
|
289
|
+
const end = start + MAX_LOGS_PER_TAG;
|
|
290
|
+
return logs.map((logBuffers)=>{
|
|
291
|
+
const deserialized = logBuffers?.slice(start, end).map((buf)=>TxScopedL2Log.fromBuffer(buf)) ?? [];
|
|
292
|
+
if (upToBlockNumber !== undefined) {
|
|
293
|
+
const cutoff = deserialized.findIndex((log)=>log.blockNumber > upToBlockNumber);
|
|
294
|
+
if (cutoff !== -1) {
|
|
295
|
+
return deserialized.slice(0, cutoff);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
return deserialized;
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
/**
|
|
302
|
+
* Gets public logs based on the provided filter.
|
|
303
|
+
* @param filter - The filter to apply to the logs.
|
|
304
|
+
* @returns The requested logs.
|
|
305
|
+
*/ getPublicLogs(filter) {
|
|
306
|
+
if (filter.afterLog) {
|
|
307
|
+
return this.#filterPublicLogsBetweenBlocks(filter);
|
|
308
|
+
} else if (filter.txHash) {
|
|
309
|
+
return this.#filterPublicLogsOfTx(filter);
|
|
310
|
+
} else {
|
|
311
|
+
return this.#filterPublicLogsBetweenBlocks(filter);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
async #filterPublicLogsOfTx(filter) {
|
|
315
|
+
if (!filter.txHash) {
|
|
316
|
+
throw new Error('Missing txHash');
|
|
317
|
+
}
|
|
318
|
+
const [blockNumber, txIndex] = await this.blockStore.getTxLocation(filter.txHash) ?? [];
|
|
319
|
+
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
|
|
320
|
+
return {
|
|
321
|
+
logs: [],
|
|
322
|
+
maxLogsHit: false
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
const buffer = await this.#publicLogsByBlock.getAsync(blockNumber) ?? Buffer.alloc(0);
|
|
326
|
+
const publicLogsInBlock = [];
|
|
327
|
+
const reader = new BufferReader(buffer);
|
|
328
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
329
|
+
while(reader.remainingBytes() > 0){
|
|
330
|
+
const indexOfTx = reader.readNumber();
|
|
331
|
+
const txHash = reader.readObject(TxHash);
|
|
332
|
+
const numLogsInTx = reader.readNumber();
|
|
333
|
+
publicLogsInBlock[indexOfTx] = {
|
|
334
|
+
txHash,
|
|
335
|
+
logs: []
|
|
336
|
+
};
|
|
337
|
+
for(let i = 0; i < numLogsInTx; i++){
|
|
338
|
+
publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
const txData = publicLogsInBlock[txIndex];
|
|
342
|
+
const logs = [];
|
|
343
|
+
const maxLogsHit = this.#accumulatePublicLogs(logs, blockNumber, blockHash, txIndex, txData.txHash, txData.logs, filter);
|
|
344
|
+
return {
|
|
345
|
+
logs,
|
|
346
|
+
maxLogsHit
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
async #filterPublicLogsBetweenBlocks(filter) {
|
|
350
|
+
const start = filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
|
|
351
|
+
const end = filter.toBlock;
|
|
352
|
+
if (typeof end === 'number' && end < start) {
|
|
353
|
+
return {
|
|
354
|
+
logs: [],
|
|
355
|
+
maxLogsHit: true
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
const logs = [];
|
|
359
|
+
let maxLogsHit = false;
|
|
360
|
+
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({
|
|
361
|
+
start,
|
|
362
|
+
end
|
|
363
|
+
})){
|
|
364
|
+
const publicLogsInBlock = [];
|
|
365
|
+
const reader = new BufferReader(logBuffer);
|
|
366
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
367
|
+
while(reader.remainingBytes() > 0){
|
|
368
|
+
const indexOfTx = reader.readNumber();
|
|
369
|
+
const txHash = reader.readObject(TxHash);
|
|
370
|
+
const numLogsInTx = reader.readNumber();
|
|
371
|
+
publicLogsInBlock[indexOfTx] = {
|
|
372
|
+
txHash,
|
|
373
|
+
logs: []
|
|
374
|
+
};
|
|
375
|
+
for(let i = 0; i < numLogsInTx; i++){
|
|
376
|
+
publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++){
|
|
380
|
+
const txData = publicLogsInBlock[txIndex];
|
|
381
|
+
maxLogsHit = this.#accumulatePublicLogs(logs, blockNumber, blockHash, txIndex, txData.txHash, txData.logs, filter);
|
|
382
|
+
if (maxLogsHit) {
|
|
383
|
+
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
384
|
+
break loopOverBlocks;
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
return {
|
|
389
|
+
logs,
|
|
390
|
+
maxLogsHit
|
|
391
|
+
};
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Gets contract class logs based on the provided filter.
|
|
395
|
+
* @param filter - The filter to apply to the logs.
|
|
396
|
+
* @returns The requested logs.
|
|
397
|
+
*/ getContractClassLogs(filter) {
|
|
398
|
+
if (filter.afterLog) {
|
|
399
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
400
|
+
} else if (filter.txHash) {
|
|
401
|
+
return this.#filterContractClassLogsOfTx(filter);
|
|
402
|
+
} else {
|
|
403
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
async #filterContractClassLogsOfTx(filter) {
|
|
407
|
+
if (!filter.txHash) {
|
|
408
|
+
throw new Error('Missing txHash');
|
|
409
|
+
}
|
|
410
|
+
const [blockNumber, txIndex] = await this.blockStore.getTxLocation(filter.txHash) ?? [];
|
|
411
|
+
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
|
|
412
|
+
return {
|
|
413
|
+
logs: [],
|
|
414
|
+
maxLogsHit: false
|
|
415
|
+
};
|
|
416
|
+
}
|
|
417
|
+
const contractClassLogsBuffer = await this.#contractClassLogsByBlock.getAsync(blockNumber) ?? Buffer.alloc(0);
|
|
418
|
+
const contractClassLogsInBlock = [];
|
|
419
|
+
const reader = new BufferReader(contractClassLogsBuffer);
|
|
420
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
421
|
+
while(reader.remainingBytes() > 0){
|
|
422
|
+
const indexOfTx = reader.readNumber();
|
|
423
|
+
const txHash = reader.readObject(TxHash);
|
|
424
|
+
const numLogsInTx = reader.readNumber();
|
|
425
|
+
contractClassLogsInBlock[indexOfTx] = {
|
|
426
|
+
txHash,
|
|
427
|
+
logs: []
|
|
428
|
+
};
|
|
429
|
+
for(let i = 0; i < numLogsInTx; i++){
|
|
430
|
+
contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
const txData = contractClassLogsInBlock[txIndex];
|
|
434
|
+
const logs = [];
|
|
435
|
+
const maxLogsHit = this.#accumulateContractClassLogs(logs, blockNumber, blockHash, txIndex, txData.txHash, txData.logs, filter);
|
|
436
|
+
return {
|
|
437
|
+
logs,
|
|
438
|
+
maxLogsHit
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
async #filterContractClassLogsBetweenBlocks(filter) {
|
|
442
|
+
const start = filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
|
|
443
|
+
const end = filter.toBlock;
|
|
444
|
+
if (typeof end === 'number' && end < start) {
|
|
445
|
+
return {
|
|
446
|
+
logs: [],
|
|
447
|
+
maxLogsHit: true
|
|
448
|
+
};
|
|
449
|
+
}
|
|
450
|
+
const logs = [];
|
|
451
|
+
let maxLogsHit = false;
|
|
452
|
+
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#contractClassLogsByBlock.entriesAsync({
|
|
453
|
+
start,
|
|
454
|
+
end
|
|
455
|
+
})){
|
|
456
|
+
const contractClassLogsInBlock = [];
|
|
457
|
+
const reader = new BufferReader(logBuffer);
|
|
458
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
459
|
+
while(reader.remainingBytes() > 0){
|
|
460
|
+
const indexOfTx = reader.readNumber();
|
|
461
|
+
const txHash = reader.readObject(TxHash);
|
|
462
|
+
const numLogsInTx = reader.readNumber();
|
|
463
|
+
contractClassLogsInBlock[indexOfTx] = {
|
|
464
|
+
txHash,
|
|
465
|
+
logs: []
|
|
466
|
+
};
|
|
467
|
+
for(let i = 0; i < numLogsInTx; i++){
|
|
468
|
+
contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++){
|
|
472
|
+
const txData = contractClassLogsInBlock[txIndex];
|
|
473
|
+
maxLogsHit = this.#accumulateContractClassLogs(logs, blockNumber, blockHash, txIndex, txData.txHash, txData.logs, filter);
|
|
474
|
+
if (maxLogsHit) {
|
|
475
|
+
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
476
|
+
break loopOverBlocks;
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
return {
|
|
481
|
+
logs,
|
|
482
|
+
maxLogsHit
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
#accumulatePublicLogs(results, blockNumber, blockHash, txIndex, txHash, txLogs, filter = {}) {
|
|
486
|
+
if (filter.fromBlock && blockNumber < filter.fromBlock) {
|
|
487
|
+
return false;
|
|
488
|
+
}
|
|
489
|
+
if (filter.toBlock && blockNumber >= filter.toBlock) {
|
|
490
|
+
return false;
|
|
491
|
+
}
|
|
492
|
+
if (filter.txHash && !txHash.equals(filter.txHash)) {
|
|
493
|
+
return false;
|
|
494
|
+
}
|
|
495
|
+
let maxLogsHit = false;
|
|
496
|
+
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
497
|
+
for(; logIndex < txLogs.length; logIndex++){
|
|
498
|
+
const log = txLogs[logIndex];
|
|
499
|
+
if ((!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) && (!filter.tag || log.fields[0]?.equals(filter.tag))) {
|
|
500
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log));
|
|
501
|
+
if (results.length >= this.#logsMaxPageSize) {
|
|
502
|
+
maxLogsHit = true;
|
|
503
|
+
break;
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
return maxLogsHit;
|
|
508
|
+
}
|
|
509
|
+
#accumulateContractClassLogs(results, blockNumber, blockHash, txIndex, txHash, txLogs, filter = {}) {
|
|
510
|
+
if (filter.fromBlock && blockNumber < filter.fromBlock) {
|
|
511
|
+
return false;
|
|
512
|
+
}
|
|
513
|
+
if (filter.toBlock && blockNumber >= filter.toBlock) {
|
|
514
|
+
return false;
|
|
515
|
+
}
|
|
516
|
+
if (filter.txHash && !txHash.equals(filter.txHash)) {
|
|
517
|
+
return false;
|
|
518
|
+
}
|
|
519
|
+
let maxLogsHit = false;
|
|
520
|
+
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
521
|
+
for(; logIndex < txLogs.length; logIndex++){
|
|
522
|
+
const log = txLogs[logIndex];
|
|
523
|
+
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
524
|
+
results.push(new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log));
|
|
525
|
+
if (results.length >= this.#logsMaxPageSize) {
|
|
526
|
+
maxLogsHit = true;
|
|
527
|
+
break;
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
return maxLogsHit;
|
|
532
|
+
}
|
|
533
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
2
|
+
import { CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
|
+
import { type AztecAsyncKVStore, type CustomRange } from '@aztec/kv-store';
|
|
5
|
+
import { type InboxMessage } from '../structs/inbox_message.js';
|
|
6
|
+
export declare class MessageStoreError extends Error {
|
|
7
|
+
readonly inboxMessage: InboxMessage;
|
|
8
|
+
constructor(message: string, inboxMessage: InboxMessage);
|
|
9
|
+
}
|
|
10
|
+
export declare class MessageStore {
|
|
11
|
+
#private;
|
|
12
|
+
private db;
|
|
13
|
+
constructor(db: AztecAsyncKVStore);
|
|
14
|
+
getTotalL1ToL2MessageCount(): Promise<bigint>;
|
|
15
|
+
/** Gets the last L1 block synced. */
|
|
16
|
+
getSynchedL1Block(): Promise<L1BlockId | undefined>;
|
|
17
|
+
/** Sets the last L1 block synced */
|
|
18
|
+
setSynchedL1Block(l1Block: L1BlockId): Promise<void>;
|
|
19
|
+
/**
|
|
20
|
+
* Append L1 to L2 messages to the store.
|
|
21
|
+
* Requires new messages to be in order and strictly after the last message added.
|
|
22
|
+
* Throws if out of order messages are added or if the rolling hash is invalid.
|
|
23
|
+
*/
|
|
24
|
+
addL1ToL2Messages(messages: InboxMessage[]): Promise<void>;
|
|
25
|
+
/**
|
|
26
|
+
* Gets the L1 to L2 message index in the L1 to L2 message tree.
|
|
27
|
+
* @param l1ToL2Message - The L1 to L2 message.
|
|
28
|
+
* @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
|
|
29
|
+
*/
|
|
30
|
+
getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined>;
|
|
31
|
+
getLastMessage(): Promise<InboxMessage | undefined>;
|
|
32
|
+
/** Returns the inbox tree-in-progress checkpoint number from L1, or undefined if not yet set. */
|
|
33
|
+
getInboxTreeInProgress(): Promise<bigint | undefined>;
|
|
34
|
+
/** Persists the inbox tree-in-progress checkpoint number from L1 state. */
|
|
35
|
+
setInboxTreeInProgress(value: bigint): Promise<void>;
|
|
36
|
+
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]>;
|
|
37
|
+
iterateL1ToL2Messages(range?: CustomRange<bigint>): AsyncIterableIterator<InboxMessage>;
|
|
38
|
+
removeL1ToL2Messages(startIndex: bigint): Promise<void>;
|
|
39
|
+
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void>;
|
|
40
|
+
private indexToKey;
|
|
41
|
+
private leafToIndexKey;
|
|
42
|
+
private increaseTotalMessageCount;
|
|
43
|
+
}
|
|
44
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWVzc2FnZV9zdG9yZS5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL3N0b3JlL21lc3NhZ2Vfc3RvcmUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLEVBQUUsU0FBUyxFQUFFLE1BQU0sMEJBQTBCLENBQUM7QUFDMUQsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0saUNBQWlDLENBQUM7QUFFbkUsT0FBTyxFQUFFLEVBQUUsRUFBRSxNQUFNLGdDQUFnQyxDQUFDO0FBSXBELE9BQU8sRUFDTCxLQUFLLGlCQUFpQixFQUd0QixLQUFLLFdBQVcsRUFFakIsTUFBTSxpQkFBaUIsQ0FBQztBQUl6QixPQUFPLEVBQ0wsS0FBSyxZQUFZLEVBSWxCLE1BQU0sNkJBQTZCLENBQUM7QUFFckMscUJBQWEsaUJBQWtCLFNBQVEsS0FBSzthQUd4QixZQUFZLEVBQUUsWUFBWTtJQUY1QyxZQUNFLE9BQU8sRUFBRSxNQUFNLEVBQ0MsWUFBWSxFQUFFLFlBQVksRUFJM0M7Q0FDRjtBQUVELHFCQUFhLFlBQVk7O0lBY1gsT0FBTyxDQUFDLEVBQUU7SUFBdEIsWUFBb0IsRUFBRSxFQUFFLGlCQUFpQixFQU14QztJQUVZLDBCQUEwQixJQUFJLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FFekQ7SUFFRCxxQ0FBcUM7SUFDeEIsaUJBQWlCLElBQUksT0FBTyxDQUFDLFNBQVMsR0FBRyxTQUFTLENBQUMsQ0FRL0Q7SUFFRCxvQ0FBb0M7SUFDdkIsaUJBQWlCLENBQUMsT0FBTyxFQUFFLFNBQVMsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBR2hFO0lBRUQ7Ozs7T0FJRztJQUNJLGlCQUFpQixDQUFDLFFBQVEsRUFBRSxZQUFZLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBNkZoRTtJQUVEOzs7O09BSUc7SUFDSSxxQkFBcUIsQ0FBQyxhQUFhLEVBQUUsRUFBRSxHQUFHLE9BQU8sQ0FBQyxNQUFNLEdBQUcsU0FBUyxDQUFDLENBRTNFO0lBRVksY0FBYyxJQUFJLE9BQU8sQ0FBQyxZQUFZLEdBQUcsU0FBUyxDQUFDLENBRy9EO0lBRUQsaUdBQWlHO0lBQzFGLHNCQUFzQixJQUFJLE9BQU8sQ0FBQyxNQUFNLEdBQUcsU0FBUyxDQUFDLENBRTNEO0lBRUQsMkVBQTJFO0lBQzlELHNCQUFzQixDQUFDLEtBQUssRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUVoRTtJQUVZLGlCQUFpQixDQUFDLGdCQUFnQixFQUFFLGdCQUFnQixHQUFHLE9BQU8sQ0FBQyxFQUFFLEVBQUUsQ0FBQyxDQTRCaEY7SUFFYSxxQkFBcUIsQ0FBQyxLQUFLLEdBQUUsV0FBVyxDQUFDLE1BQU0sQ0FBTSxHQUFHLHFCQUFxQixDQUFDLFlBQVksQ0FBQyxDQUt4RztJQUVNLG9CQUFvQixDQUFDLFVBQVUsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLElBQUksQ0FBQyxDQWdCN0Q7SUFFTSxrQ0FBa0MsQ0FBQyxzQkFBc0IsRUFBRSxnQkFBZ0IsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBSWpHO0lBRUQsT0FBTyxDQUFDLFVBQVU7SUFJbEIsT0FBTyxDQUFDLGNBQWM7WUFJUix5QkFBeUI7Q0FTeEMifQ==
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"message_store.d.ts","sourceRoot":"","sources":["../../src/store/message_store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AAEnE,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAIpD,OAAO,EACL,KAAK,iBAAiB,EAGtB,KAAK,WAAW,EAEjB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EACL,KAAK,YAAY,EAIlB,MAAM,6BAA6B,CAAC;AAErC,qBAAa,iBAAkB,SAAQ,KAAK;aAGxB,YAAY,EAAE,YAAY;IAF5C,YACE,OAAO,EAAE,MAAM,EACC,YAAY,EAAE,YAAY,EAI3C;CACF;AAED,qBAAa,YAAY;;IAcX,OAAO,CAAC,EAAE;IAAtB,YAAoB,EAAE,EAAE,iBAAiB,EAMxC;IAEY,0BAA0B,IAAI,OAAO,CAAC,MAAM,CAAC,CAEzD;IAED,qCAAqC;IACxB,iBAAiB,IAAI,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,CAQ/D;IAED,oCAAoC;IACvB,iBAAiB,CAAC,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAGhE;IAED;;;;OAIG;IACI,iBAAiB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CA6FhE;IAED;;;;OAIG;IACI,qBAAqB,CAAC,aAAa,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAE3E;IAEY,cAAc,IAAI,OAAO,CAAC,YAAY,GAAG,SAAS,CAAC,CAG/D;IAED,iGAAiG;IAC1F,sBAAsB,IAAI,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAE3D;IAED,2EAA2E;IAC9D,sBAAsB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAEhE;IAEY,iBAAiB,CAAC,gBAAgB,EAAE,gBAAgB,GAAG,OAAO,CAAC,EAAE,EAAE,CAAC,CA4BhF;IAEa,qBAAqB,CAAC,KAAK,GAAE,WAAW,CAAC,MAAM,CAAM,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAKxG;IAEM,oBAAoB,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAgB7D;IAEM,kCAAkC,CAAC,sBAAsB,EAAE,gBAAgB,GAAG,OAAO,CAAC,IAAI,CAAC,CAIjG;IAED,OAAO,CAAC,UAAU;IAIlB,OAAO,CAAC,cAAc;YAIR,yBAAyB;CASxC"}
|