@aztec/archiver 0.0.1-commit.fcb71a6 → 0.0.1-commit.ff7989d6c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +156 -22
- package/dest/archiver.d.ts +139 -0
- package/dest/archiver.d.ts.map +1 -0
- package/dest/archiver.js +699 -0
- package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
- package/dest/config.d.ts.map +1 -0
- package/dest/{archiver/config.js → config.js} +11 -2
- package/dest/errors.d.ts +41 -0
- package/dest/errors.d.ts.map +1 -0
- package/dest/{archiver/errors.js → errors.js} +8 -0
- package/dest/factory.d.ts +9 -7
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +94 -11
- package/dest/index.d.ts +11 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +9 -3
- package/dest/interfaces.d.ts +9 -0
- package/dest/interfaces.d.ts.map +1 -0
- package/dest/interfaces.js +3 -0
- package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
- package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +17 -18
- package/dest/{archiver/l1 → l1}/calldata_retriever.d.ts +9 -3
- package/dest/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/calldata_retriever.js +19 -6
- package/dest/l1/data_retrieval.d.ts +89 -0
- package/dest/l1/data_retrieval.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/data_retrieval.js +39 -57
- package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
- package/dest/l1/debug_tx.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +1 -1
- package/dest/l1/spire_proposer.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
- package/dest/l1/trace_tx.d.ts.map +1 -0
- package/dest/l1/types.d.ts +12 -0
- package/dest/l1/types.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/validate_trace.d.ts +6 -3
- package/dest/l1/validate_trace.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/validate_trace.js +14 -10
- package/dest/modules/data_source_base.d.ts +89 -0
- package/dest/modules/data_source_base.d.ts.map +1 -0
- package/dest/modules/data_source_base.js +216 -0
- package/dest/modules/data_store_updater.d.ts +80 -0
- package/dest/modules/data_store_updater.d.ts.map +1 -0
- package/dest/modules/data_store_updater.js +323 -0
- package/dest/modules/instrumentation.d.ts +39 -0
- package/dest/modules/instrumentation.d.ts.map +1 -0
- package/dest/{archiver → modules}/instrumentation.js +33 -67
- package/dest/modules/l1_synchronizer.d.ts +76 -0
- package/dest/modules/l1_synchronizer.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.js +1112 -0
- package/dest/modules/validation.d.ts +17 -0
- package/dest/modules/validation.d.ts.map +1 -0
- package/dest/{archiver → modules}/validation.js +7 -1
- package/dest/store/block_store.d.ts +196 -0
- package/dest/store/block_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/block_store.js +228 -62
- package/dest/store/contract_class_store.d.ts +18 -0
- package/dest/store/contract_class_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +12 -8
- package/dest/store/contract_instance_store.d.ts +24 -0
- package/dest/store/contract_instance_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
- package/dest/store/kv_archiver_store.d.ts +354 -0
- package/dest/store/kv_archiver_store.d.ts.map +1 -0
- package/dest/store/kv_archiver_store.js +464 -0
- package/dest/store/l2_tips_cache.d.ts +19 -0
- package/dest/store/l2_tips_cache.d.ts.map +1 -0
- package/dest/store/l2_tips_cache.js +89 -0
- package/dest/store/log_store.d.ts +54 -0
- package/dest/store/log_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/log_store.js +146 -91
- package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
- package/dest/store/message_store.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
- package/dest/structs/data_retrieval.d.ts.map +1 -0
- package/dest/structs/inbox_message.d.ts +15 -0
- package/dest/structs/inbox_message.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/published.d.ts +1 -1
- package/dest/structs/published.d.ts.map +1 -0
- package/dest/test/fake_l1_state.d.ts +193 -0
- package/dest/test/fake_l1_state.d.ts.map +1 -0
- package/dest/test/fake_l1_state.js +389 -0
- package/dest/test/index.d.ts +2 -1
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +4 -1
- package/dest/test/mock_archiver.d.ts +2 -2
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +3 -3
- package/dest/test/mock_l1_to_l2_message_source.d.ts +2 -2
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +12 -3
- package/dest/test/mock_l2_block_source.d.ts +39 -17
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +232 -83
- package/dest/test/mock_structs.d.ts +78 -3
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +140 -7
- package/dest/test/noop_l1_archiver.d.ts +23 -0
- package/dest/test/noop_l1_archiver.d.ts.map +1 -0
- package/dest/test/noop_l1_archiver.js +68 -0
- package/package.json +16 -17
- package/src/archiver.ts +443 -0
- package/src/{archiver/config.ts → config.ts} +13 -2
- package/src/{archiver/errors.ts → errors.ts} +12 -0
- package/src/factory.ts +140 -11
- package/src/index.ts +11 -3
- package/src/interfaces.ts +9 -0
- package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +16 -17
- package/src/{archiver/l1 → l1}/calldata_retriever.ts +28 -6
- package/src/{archiver/l1 → l1}/data_retrieval.ts +60 -74
- package/src/{archiver/l1 → l1}/validate_trace.ts +25 -7
- package/src/modules/data_source_base.ts +328 -0
- package/src/modules/data_store_updater.ts +448 -0
- package/src/{archiver → modules}/instrumentation.ts +33 -70
- package/src/modules/l1_synchronizer.ts +932 -0
- package/src/{archiver → modules}/validation.ts +11 -6
- package/src/{archiver/kv_archiver_store → store}/block_store.ts +293 -100
- package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +12 -8
- package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +1 -1
- package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +273 -40
- package/src/store/l2_tips_cache.ts +89 -0
- package/src/{archiver/kv_archiver_store → store}/log_store.ts +242 -121
- package/src/test/fake_l1_state.ts +607 -0
- package/src/test/index.ts +4 -0
- package/src/test/mock_archiver.ts +4 -3
- package/src/test/mock_l1_to_l2_message_source.ts +10 -4
- package/src/test/mock_l2_block_source.ts +276 -90
- package/src/test/mock_structs.ts +269 -8
- package/src/test/noop_l1_archiver.ts +109 -0
- package/dest/archiver/archiver.d.ts +0 -304
- package/dest/archiver/archiver.d.ts.map +0 -1
- package/dest/archiver/archiver.js +0 -1645
- package/dest/archiver/archiver_store.d.ts +0 -308
- package/dest/archiver/archiver_store.d.ts.map +0 -1
- package/dest/archiver/archiver_store.js +0 -4
- package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
- package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
- package/dest/archiver/archiver_store_test_suite.js +0 -2770
- package/dest/archiver/config.d.ts.map +0 -1
- package/dest/archiver/errors.d.ts +0 -36
- package/dest/archiver/errors.d.ts.map +0 -1
- package/dest/archiver/index.d.ts +0 -7
- package/dest/archiver/index.d.ts.map +0 -1
- package/dest/archiver/index.js +0 -4
- package/dest/archiver/instrumentation.d.ts +0 -37
- package/dest/archiver/instrumentation.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +0 -157
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -158
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +0 -313
- package/dest/archiver/kv_archiver_store/log_store.d.ts +0 -45
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
- package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
- package/dest/archiver/l1/data_retrieval.d.ts +0 -90
- package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
- package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
- package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
- package/dest/archiver/l1/types.d.ts +0 -12
- package/dest/archiver/l1/types.d.ts.map +0 -1
- package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
- package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/structs/inbox_message.d.ts +0 -15
- package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
- package/dest/archiver/structs/published.d.ts.map +0 -1
- package/dest/archiver/validation.d.ts +0 -17
- package/dest/archiver/validation.d.ts.map +0 -1
- package/dest/rpc/index.d.ts +0 -9
- package/dest/rpc/index.d.ts.map +0 -1
- package/dest/rpc/index.js +0 -15
- package/src/archiver/archiver.ts +0 -2157
- package/src/archiver/archiver_store.ts +0 -372
- package/src/archiver/archiver_store_test_suite.ts +0 -2843
- package/src/archiver/index.ts +0 -6
- package/src/rpc/index.ts +0 -16
- /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/spire_proposer.js +0 -0
- /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/types.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/message_store.js +0 -0
- /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
- /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
- /package/dest/{archiver/structs → structs}/published.js +0 -0
- /package/src/{archiver/l1 → l1}/README.md +0 -0
- /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/spire_proposer.ts +0 -0
- /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/types.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/message_store.ts +0 -0
- /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
- /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
- /package/src/{archiver/structs → structs}/published.ts +0 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { type BlockData, type CheckpointId, GENESIS_CHECKPOINT_HEADER_HASH, type L2Tips } from '@aztec/stdlib/block';
|
|
4
|
+
|
|
5
|
+
import type { BlockStore } from './block_store.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* In-memory cache for L2 chain tips (proposed, checkpointed, proven, finalized).
|
|
9
|
+
* Populated from the BlockStore on first access, then kept up-to-date by the ArchiverDataStoreUpdater.
|
|
10
|
+
* Refresh calls should happen within the store transaction that mutates block data to ensure consistency.
|
|
11
|
+
*/
|
|
12
|
+
export class L2TipsCache {
|
|
13
|
+
#tipsPromise: Promise<L2Tips> | undefined;
|
|
14
|
+
|
|
15
|
+
constructor(private blockStore: BlockStore) {}
|
|
16
|
+
|
|
17
|
+
/** Returns the cached L2 tips. Loads from the block store on first call. */
|
|
18
|
+
public getL2Tips(): Promise<L2Tips> {
|
|
19
|
+
return (this.#tipsPromise ??= this.loadFromStore());
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/** Reloads the L2 tips from the block store. Should be called within the store transaction that mutates data. */
|
|
23
|
+
public async refresh(): Promise<void> {
|
|
24
|
+
this.#tipsPromise = this.loadFromStore();
|
|
25
|
+
await this.#tipsPromise;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
private async loadFromStore(): Promise<L2Tips> {
|
|
29
|
+
const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([
|
|
30
|
+
this.blockStore.getLatestBlockNumber(),
|
|
31
|
+
this.blockStore.getProvenBlockNumber(),
|
|
32
|
+
this.blockStore.getCheckpointedL2BlockNumber(),
|
|
33
|
+
this.blockStore.getFinalizedL2BlockNumber(),
|
|
34
|
+
]);
|
|
35
|
+
|
|
36
|
+
const genesisBlockHeader = {
|
|
37
|
+
blockHash: GENESIS_BLOCK_HEADER_HASH,
|
|
38
|
+
checkpointNumber: CheckpointNumber.ZERO,
|
|
39
|
+
} as const;
|
|
40
|
+
const beforeInitialBlockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
|
|
41
|
+
|
|
42
|
+
const getBlockData = (blockNumber: BlockNumber) =>
|
|
43
|
+
blockNumber > beforeInitialBlockNumber ? this.blockStore.getBlockData(blockNumber) : genesisBlockHeader;
|
|
44
|
+
|
|
45
|
+
const [latestBlockData, provenBlockData, checkpointedBlockData, finalizedBlockData] = await Promise.all(
|
|
46
|
+
[latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber].map(getBlockData),
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
if (!latestBlockData || !provenBlockData || !finalizedBlockData || !checkpointedBlockData) {
|
|
50
|
+
throw new Error('Failed to load block data for L2 tips');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const [provenCheckpointId, finalizedCheckpointId, checkpointedCheckpointId] = await Promise.all([
|
|
54
|
+
this.getCheckpointIdForBlock(provenBlockData),
|
|
55
|
+
this.getCheckpointIdForBlock(finalizedBlockData),
|
|
56
|
+
this.getCheckpointIdForBlock(checkpointedBlockData),
|
|
57
|
+
]);
|
|
58
|
+
|
|
59
|
+
return {
|
|
60
|
+
proposed: { number: latestBlockNumber, hash: latestBlockData.blockHash.toString() },
|
|
61
|
+
proven: {
|
|
62
|
+
block: { number: provenBlockNumber, hash: provenBlockData.blockHash.toString() },
|
|
63
|
+
checkpoint: provenCheckpointId,
|
|
64
|
+
},
|
|
65
|
+
finalized: {
|
|
66
|
+
block: { number: finalizedBlockNumber, hash: finalizedBlockData.blockHash.toString() },
|
|
67
|
+
checkpoint: finalizedCheckpointId,
|
|
68
|
+
},
|
|
69
|
+
checkpointed: {
|
|
70
|
+
block: { number: checkpointedBlockNumber, hash: checkpointedBlockData.blockHash.toString() },
|
|
71
|
+
checkpoint: checkpointedCheckpointId,
|
|
72
|
+
},
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private async getCheckpointIdForBlock(blockData: Pick<BlockData, 'checkpointNumber'>): Promise<CheckpointId> {
|
|
77
|
+
const checkpointData = await this.blockStore.getCheckpointData(blockData.checkpointNumber);
|
|
78
|
+
if (!checkpointData) {
|
|
79
|
+
return {
|
|
80
|
+
number: CheckpointNumber.ZERO,
|
|
81
|
+
hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
return {
|
|
85
|
+
number: checkpointData.checkpointNumber,
|
|
86
|
+
hash: checkpointData.header.hash().toString(),
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
2
|
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { filterAsync } from '@aztec/foundation/collection';
|
|
3
4
|
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
5
|
import { createLogger } from '@aztec/foundation/log';
|
|
5
6
|
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
6
7
|
import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
|
|
7
8
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
8
|
-
import {
|
|
9
|
+
import { BlockHash, L2Block } from '@aztec/stdlib/block';
|
|
10
|
+
import { MAX_LOGS_PER_TAG } from '@aztec/stdlib/interfaces/api-limit';
|
|
9
11
|
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
10
12
|
import {
|
|
11
13
|
ContractClassLog,
|
|
@@ -18,6 +20,7 @@ import {
|
|
|
18
20
|
Tag,
|
|
19
21
|
TxScopedL2Log,
|
|
20
22
|
} from '@aztec/stdlib/logs';
|
|
23
|
+
import { TxHash } from '@aztec/stdlib/tx';
|
|
21
24
|
|
|
22
25
|
import type { BlockStore } from './block_store.js';
|
|
23
26
|
|
|
@@ -57,7 +60,7 @@ export class LogStore {
|
|
|
57
60
|
* @param block - The L2 block to extract logs from.
|
|
58
61
|
* @returns An object containing the private and public tagged logs for the block.
|
|
59
62
|
*/
|
|
60
|
-
#extractTaggedLogsFromBlock(block:
|
|
63
|
+
#extractTaggedLogsFromBlock(block: L2Block) {
|
|
61
64
|
// SiloedTag (as string) -> array of log buffers.
|
|
62
65
|
const privateTaggedLogs = new Map<string, Buffer[]>();
|
|
63
66
|
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
@@ -118,7 +121,7 @@ export class LogStore {
|
|
|
118
121
|
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
119
122
|
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
120
123
|
*/
|
|
121
|
-
#extractTaggedLogs(blocks:
|
|
124
|
+
#extractTaggedLogs(blocks: L2Block[]): {
|
|
122
125
|
privateTaggedLogs: Map<string, Buffer[]>;
|
|
123
126
|
publicTaggedLogs: Map<string, Buffer[]>;
|
|
124
127
|
} {
|
|
@@ -144,111 +147,148 @@ export class LogStore {
|
|
|
144
147
|
return { privateTaggedLogs, publicTaggedLogs };
|
|
145
148
|
}
|
|
146
149
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
153
|
-
const { privateTaggedLogs, publicTaggedLogs } = this.#extractTaggedLogs(blocks);
|
|
150
|
+
async #addPrivateLogs(blocks: L2Block[]): Promise<void> {
|
|
151
|
+
const newBlocks = await filterAsync(
|
|
152
|
+
blocks,
|
|
153
|
+
async block => !(await this.#privateLogKeysByBlock.hasAsync(block.number)),
|
|
154
|
+
);
|
|
154
155
|
|
|
156
|
+
const { privateTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
155
157
|
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
156
|
-
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
157
158
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
);
|
|
165
|
-
currentPrivateTaggedLogs.forEach(taggedLogBuffer => {
|
|
166
|
-
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
167
|
-
privateTaggedLogs.set(
|
|
168
|
-
taggedLogBuffer.tag,
|
|
169
|
-
taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
170
|
-
);
|
|
171
|
-
}
|
|
172
|
-
});
|
|
159
|
+
const currentPrivateTaggedLogs = await Promise.all(
|
|
160
|
+
keysOfPrivateLogsToUpdate.map(async key => ({
|
|
161
|
+
tag: key,
|
|
162
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key),
|
|
163
|
+
})),
|
|
164
|
+
);
|
|
173
165
|
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
publicTaggedLogs.set(
|
|
183
|
-
taggedLogBuffer.key,
|
|
184
|
-
taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.key)!),
|
|
185
|
-
);
|
|
186
|
-
}
|
|
187
|
-
});
|
|
166
|
+
for (const taggedLogBuffer of currentPrivateTaggedLogs) {
|
|
167
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
168
|
+
privateTaggedLogs.set(
|
|
169
|
+
taggedLogBuffer.tag,
|
|
170
|
+
taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
188
174
|
|
|
189
|
-
|
|
190
|
-
|
|
175
|
+
for (const block of newBlocks) {
|
|
176
|
+
const privateTagsInBlock: string[] = [];
|
|
177
|
+
for (const [tag, logs] of privateTaggedLogs.entries()) {
|
|
178
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
179
|
+
privateTagsInBlock.push(tag);
|
|
180
|
+
}
|
|
181
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
191
184
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
185
|
+
async #addPublicLogs(blocks: L2Block[]): Promise<void> {
|
|
186
|
+
const newBlocks = await filterAsync(
|
|
187
|
+
blocks,
|
|
188
|
+
async block => !(await this.#publicLogKeysByBlock.hasAsync(block.number)),
|
|
189
|
+
);
|
|
198
190
|
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
)
|
|
214
|
-
.flat();
|
|
215
|
-
|
|
216
|
-
const contractClassLogsInBlock = block.body.txEffects
|
|
217
|
-
.map((txEffect, txIndex) =>
|
|
218
|
-
[
|
|
219
|
-
numToUInt32BE(txIndex),
|
|
220
|
-
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
221
|
-
txEffect.contractClassLogs.map(log => log.toBuffer()),
|
|
222
|
-
].flat(),
|
|
223
|
-
)
|
|
224
|
-
.flat();
|
|
225
|
-
|
|
226
|
-
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
227
|
-
await this.#contractClassLogsByBlock.set(
|
|
228
|
-
block.number,
|
|
229
|
-
this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
|
|
191
|
+
const { publicTaggedLogs } = this.#extractTaggedLogs(newBlocks);
|
|
192
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
193
|
+
|
|
194
|
+
const currentPublicTaggedLogs = await Promise.all(
|
|
195
|
+
keysOfPublicLogsToUpdate.map(async key => ({
|
|
196
|
+
tag: key,
|
|
197
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
|
|
198
|
+
})),
|
|
199
|
+
);
|
|
200
|
+
|
|
201
|
+
for (const taggedLogBuffer of currentPublicTaggedLogs) {
|
|
202
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
203
|
+
publicTaggedLogs.set(
|
|
204
|
+
taggedLogBuffer.tag,
|
|
205
|
+
taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
230
206
|
);
|
|
231
207
|
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
for (const block of newBlocks) {
|
|
211
|
+
const blockHash = await block.hash();
|
|
212
|
+
const publicTagsInBlock: string[] = [];
|
|
213
|
+
for (const [tag, logs] of publicTaggedLogs.entries()) {
|
|
214
|
+
await this.#publicLogsByContractAndTag.set(tag, logs);
|
|
215
|
+
publicTagsInBlock.push(tag);
|
|
216
|
+
}
|
|
217
|
+
await this.#publicLogKeysByBlock.set(block.number, publicTagsInBlock);
|
|
218
|
+
|
|
219
|
+
const publicLogsInBlock = block.body.txEffects
|
|
220
|
+
.map((txEffect, txIndex) =>
|
|
221
|
+
[
|
|
222
|
+
numToUInt32BE(txIndex),
|
|
223
|
+
txEffect.txHash.toBuffer(),
|
|
224
|
+
numToUInt32BE(txEffect.publicLogs.length),
|
|
225
|
+
txEffect.publicLogs.map(log => log.toBuffer()),
|
|
226
|
+
].flat(),
|
|
227
|
+
)
|
|
228
|
+
.flat();
|
|
229
|
+
|
|
230
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async #addContractClassLogs(blocks: L2Block[]): Promise<void> {
|
|
235
|
+
const newBlocks = await filterAsync(
|
|
236
|
+
blocks,
|
|
237
|
+
async block => !(await this.#contractClassLogsByBlock.hasAsync(block.number)),
|
|
238
|
+
);
|
|
239
|
+
|
|
240
|
+
for (const block of newBlocks) {
|
|
241
|
+
const blockHash = await block.hash();
|
|
242
|
+
|
|
243
|
+
const contractClassLogsInBlock = block.body.txEffects
|
|
244
|
+
.map((txEffect, txIndex) =>
|
|
245
|
+
[
|
|
246
|
+
numToUInt32BE(txIndex),
|
|
247
|
+
txEffect.txHash.toBuffer(),
|
|
248
|
+
numToUInt32BE(txEffect.contractClassLogs.length),
|
|
249
|
+
txEffect.contractClassLogs.map(log => log.toBuffer()),
|
|
250
|
+
].flat(),
|
|
251
|
+
)
|
|
252
|
+
.flat();
|
|
253
|
+
|
|
254
|
+
await this.#contractClassLogsByBlock.set(
|
|
255
|
+
block.number,
|
|
256
|
+
this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
232
260
|
|
|
261
|
+
/**
|
|
262
|
+
* Append new logs to the store's list.
|
|
263
|
+
* @param blocks - The blocks for which to add the logs.
|
|
264
|
+
* @returns True if the operation is successful.
|
|
265
|
+
*/
|
|
266
|
+
addLogs(blocks: L2Block[]): Promise<boolean> {
|
|
267
|
+
return this.db.transactionAsync(async () => {
|
|
268
|
+
await Promise.all([
|
|
269
|
+
this.#addPrivateLogs(blocks),
|
|
270
|
+
this.#addPublicLogs(blocks),
|
|
271
|
+
this.#addContractClassLogs(blocks),
|
|
272
|
+
]);
|
|
233
273
|
return true;
|
|
234
274
|
});
|
|
235
275
|
}
|
|
236
276
|
|
|
237
|
-
#packWithBlockHash(blockHash:
|
|
277
|
+
#packWithBlockHash(blockHash: BlockHash, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
|
|
238
278
|
return Buffer.concat([blockHash.toBuffer(), ...data]);
|
|
239
279
|
}
|
|
240
280
|
|
|
241
|
-
#unpackBlockHash(reader: BufferReader):
|
|
281
|
+
#unpackBlockHash(reader: BufferReader): BlockHash {
|
|
242
282
|
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
243
283
|
|
|
244
284
|
if (!blockHash) {
|
|
245
285
|
throw new Error('Failed to read block hash from log entry buffer');
|
|
246
286
|
}
|
|
247
287
|
|
|
248
|
-
return
|
|
288
|
+
return new BlockHash(blockHash);
|
|
249
289
|
}
|
|
250
290
|
|
|
251
|
-
deleteLogs(blocks:
|
|
291
|
+
deleteLogs(blocks: L2Block[]): Promise<boolean> {
|
|
252
292
|
return this.db.transactionAsync(async () => {
|
|
253
293
|
await Promise.all(
|
|
254
294
|
blocks.map(async block => {
|
|
@@ -278,27 +318,49 @@ export class LogStore {
|
|
|
278
318
|
}
|
|
279
319
|
|
|
280
320
|
/**
|
|
281
|
-
* Gets
|
|
321
|
+
* Gets private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
282
322
|
* array implies no logs match that tag.
|
|
323
|
+
* @param tags - The tags to search for.
|
|
324
|
+
* @param page - The page number (0-indexed) for pagination.
|
|
325
|
+
* @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
|
|
326
|
+
* MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
|
|
283
327
|
*/
|
|
284
|
-
async getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
328
|
+
async getPrivateLogsByTags(tags: SiloedTag[], page: number = 0): Promise<TxScopedL2Log[][]> {
|
|
285
329
|
const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
|
|
330
|
+
const start = page * MAX_LOGS_PER_TAG;
|
|
331
|
+
const end = start + MAX_LOGS_PER_TAG;
|
|
286
332
|
|
|
287
|
-
return logs.map(
|
|
333
|
+
return logs.map(
|
|
334
|
+
logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
|
|
335
|
+
);
|
|
288
336
|
}
|
|
289
337
|
|
|
290
338
|
/**
|
|
291
|
-
* Gets
|
|
339
|
+
* Gets public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
292
340
|
* logs is returned. An empty array implies no logs match that tag.
|
|
341
|
+
* @param contractAddress - The contract address to search logs for.
|
|
342
|
+
* @param tags - The tags to search for.
|
|
343
|
+
* @param page - The page number (0-indexed) for pagination.
|
|
344
|
+
* @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
|
|
345
|
+
* MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
|
|
293
346
|
*/
|
|
294
|
-
async getPublicLogsByTagsFromContract(
|
|
347
|
+
async getPublicLogsByTagsFromContract(
|
|
348
|
+
contractAddress: AztecAddress,
|
|
349
|
+
tags: Tag[],
|
|
350
|
+
page: number = 0,
|
|
351
|
+
): Promise<TxScopedL2Log[][]> {
|
|
295
352
|
const logs = await Promise.all(
|
|
296
353
|
tags.map(tag => {
|
|
297
354
|
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
298
355
|
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
299
356
|
}),
|
|
300
357
|
);
|
|
301
|
-
|
|
358
|
+
const start = page * MAX_LOGS_PER_TAG;
|
|
359
|
+
const end = start + MAX_LOGS_PER_TAG;
|
|
360
|
+
|
|
361
|
+
return logs.map(
|
|
362
|
+
logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
|
|
363
|
+
);
|
|
302
364
|
}
|
|
303
365
|
|
|
304
366
|
/**
|
|
@@ -327,24 +389,33 @@ export class LogStore {
|
|
|
327
389
|
}
|
|
328
390
|
|
|
329
391
|
const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
330
|
-
const publicLogsInBlock:
|
|
392
|
+
const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
|
|
331
393
|
const reader = new BufferReader(buffer);
|
|
332
394
|
|
|
333
395
|
const blockHash = this.#unpackBlockHash(reader);
|
|
334
396
|
|
|
335
397
|
while (reader.remainingBytes() > 0) {
|
|
336
398
|
const indexOfTx = reader.readNumber();
|
|
399
|
+
const txHash = reader.readObject(TxHash);
|
|
337
400
|
const numLogsInTx = reader.readNumber();
|
|
338
|
-
publicLogsInBlock[indexOfTx] = [];
|
|
401
|
+
publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
|
|
339
402
|
for (let i = 0; i < numLogsInTx; i++) {
|
|
340
|
-
publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
|
|
403
|
+
publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
|
|
341
404
|
}
|
|
342
405
|
}
|
|
343
406
|
|
|
344
|
-
const
|
|
407
|
+
const txData = publicLogsInBlock[txIndex];
|
|
345
408
|
|
|
346
409
|
const logs: ExtendedPublicLog[] = [];
|
|
347
|
-
const maxLogsHit = this.#
|
|
410
|
+
const maxLogsHit = this.#accumulatePublicLogs(
|
|
411
|
+
logs,
|
|
412
|
+
blockNumber,
|
|
413
|
+
blockHash,
|
|
414
|
+
txIndex,
|
|
415
|
+
txData.txHash,
|
|
416
|
+
txData.logs,
|
|
417
|
+
filter,
|
|
418
|
+
);
|
|
348
419
|
|
|
349
420
|
return { logs, maxLogsHit };
|
|
350
421
|
}
|
|
@@ -365,22 +436,31 @@ export class LogStore {
|
|
|
365
436
|
|
|
366
437
|
let maxLogsHit = false;
|
|
367
438
|
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
|
|
368
|
-
const publicLogsInBlock:
|
|
439
|
+
const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
|
|
369
440
|
const reader = new BufferReader(logBuffer);
|
|
370
441
|
|
|
371
442
|
const blockHash = this.#unpackBlockHash(reader);
|
|
372
443
|
|
|
373
444
|
while (reader.remainingBytes() > 0) {
|
|
374
445
|
const indexOfTx = reader.readNumber();
|
|
446
|
+
const txHash = reader.readObject(TxHash);
|
|
375
447
|
const numLogsInTx = reader.readNumber();
|
|
376
|
-
publicLogsInBlock[indexOfTx] = [];
|
|
448
|
+
publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
|
|
377
449
|
for (let i = 0; i < numLogsInTx; i++) {
|
|
378
|
-
publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
|
|
450
|
+
publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
|
|
379
451
|
}
|
|
380
452
|
}
|
|
381
453
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
|
|
382
|
-
const
|
|
383
|
-
maxLogsHit = this.#
|
|
454
|
+
const txData = publicLogsInBlock[txIndex];
|
|
455
|
+
maxLogsHit = this.#accumulatePublicLogs(
|
|
456
|
+
logs,
|
|
457
|
+
blockNumber,
|
|
458
|
+
blockHash,
|
|
459
|
+
txIndex,
|
|
460
|
+
txData.txHash,
|
|
461
|
+
txData.logs,
|
|
462
|
+
filter,
|
|
463
|
+
);
|
|
384
464
|
if (maxLogsHit) {
|
|
385
465
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
386
466
|
break loopOverBlocks;
|
|
@@ -416,24 +496,33 @@ export class LogStore {
|
|
|
416
496
|
return { logs: [], maxLogsHit: false };
|
|
417
497
|
}
|
|
418
498
|
const contractClassLogsBuffer = (await this.#contractClassLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
419
|
-
const contractClassLogsInBlock:
|
|
499
|
+
const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
|
|
420
500
|
|
|
421
501
|
const reader = new BufferReader(contractClassLogsBuffer);
|
|
422
502
|
const blockHash = this.#unpackBlockHash(reader);
|
|
423
503
|
|
|
424
504
|
while (reader.remainingBytes() > 0) {
|
|
425
505
|
const indexOfTx = reader.readNumber();
|
|
506
|
+
const txHash = reader.readObject(TxHash);
|
|
426
507
|
const numLogsInTx = reader.readNumber();
|
|
427
|
-
contractClassLogsInBlock[indexOfTx] = [];
|
|
508
|
+
contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
|
|
428
509
|
for (let i = 0; i < numLogsInTx; i++) {
|
|
429
|
-
contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
|
|
510
|
+
contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
|
|
430
511
|
}
|
|
431
512
|
}
|
|
432
513
|
|
|
433
|
-
const
|
|
514
|
+
const txData = contractClassLogsInBlock[txIndex];
|
|
434
515
|
|
|
435
516
|
const logs: ExtendedContractClassLog[] = [];
|
|
436
|
-
const maxLogsHit = this.#
|
|
517
|
+
const maxLogsHit = this.#accumulateContractClassLogs(
|
|
518
|
+
logs,
|
|
519
|
+
blockNumber,
|
|
520
|
+
blockHash,
|
|
521
|
+
txIndex,
|
|
522
|
+
txData.txHash,
|
|
523
|
+
txData.logs,
|
|
524
|
+
filter,
|
|
525
|
+
);
|
|
437
526
|
|
|
438
527
|
return { logs, maxLogsHit };
|
|
439
528
|
}
|
|
@@ -457,20 +546,29 @@ export class LogStore {
|
|
|
457
546
|
start,
|
|
458
547
|
end,
|
|
459
548
|
})) {
|
|
460
|
-
const contractClassLogsInBlock:
|
|
549
|
+
const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
|
|
461
550
|
const reader = new BufferReader(logBuffer);
|
|
462
551
|
const blockHash = this.#unpackBlockHash(reader);
|
|
463
552
|
while (reader.remainingBytes() > 0) {
|
|
464
553
|
const indexOfTx = reader.readNumber();
|
|
554
|
+
const txHash = reader.readObject(TxHash);
|
|
465
555
|
const numLogsInTx = reader.readNumber();
|
|
466
|
-
contractClassLogsInBlock[indexOfTx] = [];
|
|
556
|
+
contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
|
|
467
557
|
for (let i = 0; i < numLogsInTx; i++) {
|
|
468
|
-
contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
|
|
558
|
+
contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
|
|
469
559
|
}
|
|
470
560
|
}
|
|
471
561
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
|
|
472
|
-
const
|
|
473
|
-
maxLogsHit = this.#
|
|
562
|
+
const txData = contractClassLogsInBlock[txIndex];
|
|
563
|
+
maxLogsHit = this.#accumulateContractClassLogs(
|
|
564
|
+
logs,
|
|
565
|
+
blockNumber,
|
|
566
|
+
blockHash,
|
|
567
|
+
txIndex,
|
|
568
|
+
txData.txHash,
|
|
569
|
+
txData.logs,
|
|
570
|
+
filter,
|
|
571
|
+
);
|
|
474
572
|
if (maxLogsHit) {
|
|
475
573
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
476
574
|
break loopOverBlocks;
|
|
@@ -481,12 +579,13 @@ export class LogStore {
|
|
|
481
579
|
return { logs, maxLogsHit };
|
|
482
580
|
}
|
|
483
581
|
|
|
484
|
-
#
|
|
485
|
-
results:
|
|
582
|
+
#accumulatePublicLogs(
|
|
583
|
+
results: ExtendedPublicLog[],
|
|
486
584
|
blockNumber: number,
|
|
487
|
-
blockHash:
|
|
585
|
+
blockHash: BlockHash,
|
|
488
586
|
txIndex: number,
|
|
489
|
-
|
|
587
|
+
txHash: TxHash,
|
|
588
|
+
txLogs: PublicLog[],
|
|
490
589
|
filter: LogFilter = {},
|
|
491
590
|
): boolean {
|
|
492
591
|
let maxLogsHit = false;
|
|
@@ -494,15 +593,37 @@ export class LogStore {
|
|
|
494
593
|
for (; logIndex < txLogs.length; logIndex++) {
|
|
495
594
|
const log = txLogs[logIndex];
|
|
496
595
|
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
throw new Error('Unknown log type');
|
|
596
|
+
results.push(
|
|
597
|
+
new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
|
|
598
|
+
);
|
|
599
|
+
|
|
600
|
+
if (results.length >= this.#logsMaxPageSize) {
|
|
601
|
+
maxLogsHit = true;
|
|
602
|
+
break;
|
|
505
603
|
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
return maxLogsHit;
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
#accumulateContractClassLogs(
|
|
611
|
+
results: ExtendedContractClassLog[],
|
|
612
|
+
blockNumber: number,
|
|
613
|
+
blockHash: BlockHash,
|
|
614
|
+
txIndex: number,
|
|
615
|
+
txHash: TxHash,
|
|
616
|
+
txLogs: ContractClassLog[],
|
|
617
|
+
filter: LogFilter = {},
|
|
618
|
+
): boolean {
|
|
619
|
+
let maxLogsHit = false;
|
|
620
|
+
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
621
|
+
for (; logIndex < txLogs.length; logIndex++) {
|
|
622
|
+
const log = txLogs[logIndex];
|
|
623
|
+
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
624
|
+
results.push(
|
|
625
|
+
new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
|
|
626
|
+
);
|
|
506
627
|
|
|
507
628
|
if (results.length >= this.#logsMaxPageSize) {
|
|
508
629
|
maxLogsHit = true;
|