@aztec/archiver 3.0.0-rc.5 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +69 -49
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +777 -214
- package/dest/archiver/archiver_store.d.ts +89 -30
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +1785 -288
- package/dest/archiver/config.d.ts +3 -3
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +2 -2
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +49 -17
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +320 -84
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +33 -37
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +60 -35
- package/dest/archiver/kv_archiver_store/log_store.d.ts +14 -11
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +149 -62
- package/dest/archiver/l1/bin/retrieve-calldata.js +5 -3
- package/dest/archiver/l1/calldata_retriever.d.ts +17 -3
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -1
- package/dest/archiver/l1/calldata_retriever.js +75 -7
- package/dest/archiver/l1/data_retrieval.d.ts +13 -10
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/l1/data_retrieval.js +31 -18
- package/dest/archiver/structs/published.d.ts +1 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/factory.d.ts +1 -1
- package/dest/factory.js +1 -1
- package/dest/test/mock_l2_block_source.d.ts +10 -3
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +16 -15
- package/package.json +13 -13
- package/src/archiver/archiver.ts +509 -260
- package/src/archiver/archiver_store.ts +99 -29
- package/src/archiver/archiver_store_test_suite.ts +1831 -274
- package/src/archiver/config.ts +7 -3
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/kv_archiver_store/block_store.ts +434 -94
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +74 -49
- package/src/archiver/kv_archiver_store/log_store.ts +213 -77
- package/src/archiver/l1/bin/retrieve-calldata.ts +3 -3
- package/src/archiver/l1/calldata_retriever.ts +116 -6
- package/src/archiver/l1/data_retrieval.ts +41 -20
- package/src/archiver/structs/published.ts +0 -1
- package/src/factory.ts +1 -1
- package/src/test/mock_l2_block_source.ts +20 -16
|
@@ -6,7 +6,8 @@ import { createLogger } from '@aztec/foundation/log';
|
|
|
6
6
|
import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store';
|
|
7
7
|
import { FunctionSelector } from '@aztec/stdlib/abi';
|
|
8
8
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
9
|
-
import {
|
|
9
|
+
import { CheckpointedL2Block, L2BlockHash, L2BlockNew, type ValidateBlockResult } from '@aztec/stdlib/block';
|
|
10
|
+
import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
10
11
|
import type {
|
|
11
12
|
ContractClassPublic,
|
|
12
13
|
ContractDataSource,
|
|
@@ -16,7 +17,7 @@ import type {
|
|
|
16
17
|
UtilityFunctionWithMembershipProof,
|
|
17
18
|
} from '@aztec/stdlib/contract';
|
|
18
19
|
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
19
|
-
import type { LogFilter, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
20
|
+
import type { LogFilter, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
20
21
|
import type { BlockHeader, TxHash, TxReceipt } from '@aztec/stdlib/tx';
|
|
21
22
|
import type { UInt64 } from '@aztec/stdlib/types';
|
|
22
23
|
|
|
@@ -24,14 +25,13 @@ import { join } from 'path';
|
|
|
24
25
|
|
|
25
26
|
import type { ArchiverDataStore, ArchiverL1SynchPoint } from '../archiver_store.js';
|
|
26
27
|
import type { InboxMessage } from '../structs/inbox_message.js';
|
|
27
|
-
import type
|
|
28
|
-
import { BlockStore } from './block_store.js';
|
|
28
|
+
import { BlockStore, type CheckpointData } from './block_store.js';
|
|
29
29
|
import { ContractClassStore } from './contract_class_store.js';
|
|
30
30
|
import { ContractInstanceStore } from './contract_instance_store.js';
|
|
31
31
|
import { LogStore } from './log_store.js';
|
|
32
32
|
import { MessageStore } from './message_store.js';
|
|
33
33
|
|
|
34
|
-
export const ARCHIVER_DB_VERSION =
|
|
34
|
+
export const ARCHIVER_DB_VERSION = 5;
|
|
35
35
|
export const MAX_FUNCTION_SIGNATURES = 1000;
|
|
36
36
|
export const MAX_FUNCTION_NAME_LEN = 256;
|
|
37
37
|
|
|
@@ -67,7 +67,7 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
67
67
|
}
|
|
68
68
|
|
|
69
69
|
public getBlockNumber(): Promise<BlockNumber> {
|
|
70
|
-
return this.
|
|
70
|
+
return this.#blockStore.getLatestL2BlockNumber();
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
public async getContract(
|
|
@@ -186,42 +186,52 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
186
186
|
* @param blocks - The L2 blocks to be added to the store and the last processed L1 block.
|
|
187
187
|
* @returns True if the operation is successful.
|
|
188
188
|
*/
|
|
189
|
-
addBlocks(blocks:
|
|
189
|
+
addBlocks(blocks: L2BlockNew[], opts: { force?: boolean; checkpointNumber?: number } = {}): Promise<boolean> {
|
|
190
190
|
return this.#blockStore.addBlocks(blocks, opts);
|
|
191
191
|
}
|
|
192
192
|
|
|
193
|
+
getRangeOfCheckpoints(from: CheckpointNumber, limit: number): Promise<CheckpointData[]> {
|
|
194
|
+
return this.#blockStore.getRangeOfCheckpoints(from, limit);
|
|
195
|
+
}
|
|
196
|
+
getLatestBlockNumber(): Promise<BlockNumber> {
|
|
197
|
+
return this.#blockStore.getLatestBlockNumber();
|
|
198
|
+
}
|
|
199
|
+
|
|
193
200
|
/**
|
|
194
|
-
* Unwinds
|
|
201
|
+
* Unwinds checkpoints from the database
|
|
195
202
|
* @param from - The tip of the chain, passed for verification purposes,
|
|
196
203
|
* ensuring that we don't end up deleting something we did not intend
|
|
197
|
-
* @param
|
|
204
|
+
* @param checkpointsToUnwind - The number of checkpoints we are to unwind
|
|
198
205
|
* @returns True if the operation is successful
|
|
199
206
|
*/
|
|
200
|
-
|
|
201
|
-
return this.#blockStore.
|
|
207
|
+
unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
208
|
+
return this.#blockStore.unwindCheckpoints(from, checkpointsToUnwind);
|
|
202
209
|
}
|
|
203
210
|
|
|
204
|
-
|
|
205
|
-
return this.#blockStore.
|
|
211
|
+
addCheckpoints(checkpoints: PublishedCheckpoint[]): Promise<boolean> {
|
|
212
|
+
return this.#blockStore.addCheckpoints(checkpoints);
|
|
206
213
|
}
|
|
207
214
|
|
|
208
|
-
|
|
215
|
+
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
216
|
+
return this.#blockStore.getCheckpointedBlock(number);
|
|
217
|
+
}
|
|
218
|
+
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
219
|
+
return this.#blockStore.getCheckpointedBlockByHash(blockHash);
|
|
220
|
+
}
|
|
221
|
+
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
222
|
+
return this.#blockStore.getCheckpointedBlockByArchive(archive);
|
|
223
|
+
}
|
|
224
|
+
getBlock(number: BlockNumber): Promise<L2BlockNew | undefined> {
|
|
225
|
+
return this.#blockStore.getBlock(number);
|
|
226
|
+
}
|
|
227
|
+
getBlockByHash(blockHash: Fr): Promise<L2BlockNew | undefined> {
|
|
209
228
|
return this.#blockStore.getBlockByHash(L2BlockHash.fromField(blockHash));
|
|
210
229
|
}
|
|
211
|
-
|
|
212
|
-
getPublishedBlockByArchive(archive: Fr): Promise<PublishedL2Block | undefined> {
|
|
230
|
+
getBlockByArchive(archive: Fr): Promise<L2BlockNew | undefined> {
|
|
213
231
|
return this.#blockStore.getBlockByArchive(archive);
|
|
214
232
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
218
|
-
*
|
|
219
|
-
* @param start - Number of the first block to return (inclusive).
|
|
220
|
-
* @param limit - The number of blocks to return.
|
|
221
|
-
* @returns The requested L2 blocks
|
|
222
|
-
*/
|
|
223
|
-
getPublishedBlocks(start: BlockNumber, limit: number): Promise<PublishedL2Block[]> {
|
|
224
|
-
return toArray(this.#blockStore.getBlocks(start, limit));
|
|
233
|
+
getBlocks(from: BlockNumber, limit: BlockNumber): Promise<L2BlockNew[]> {
|
|
234
|
+
return toArray(this.#blockStore.getBlocks(from, limit));
|
|
225
235
|
}
|
|
226
236
|
|
|
227
237
|
/**
|
|
@@ -266,11 +276,11 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
266
276
|
* @param blocks - The blocks for which to add the logs.
|
|
267
277
|
* @returns True if the operation is successful.
|
|
268
278
|
*/
|
|
269
|
-
addLogs(blocks:
|
|
279
|
+
addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
270
280
|
return this.#logStore.addLogs(blocks);
|
|
271
281
|
}
|
|
272
282
|
|
|
273
|
-
deleteLogs(blocks:
|
|
283
|
+
deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
274
284
|
return this.#logStore.deleteLogs(blocks);
|
|
275
285
|
}
|
|
276
286
|
|
|
@@ -308,16 +318,17 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
308
318
|
return this.#messageStore.getL1ToL2Messages(checkpointNumber);
|
|
309
319
|
}
|
|
310
320
|
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
321
|
+
getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
322
|
+
try {
|
|
323
|
+
return this.#logStore.getPrivateLogsByTags(tags);
|
|
324
|
+
} catch (err) {
|
|
325
|
+
return Promise.reject(err);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
319
330
|
try {
|
|
320
|
-
return this.#logStore.
|
|
331
|
+
return this.#logStore.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
321
332
|
} catch (err) {
|
|
322
333
|
return Promise.reject(err);
|
|
323
334
|
}
|
|
@@ -349,20 +360,12 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
349
360
|
}
|
|
350
361
|
}
|
|
351
362
|
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
* @returns The number of the latest L2 block processed.
|
|
355
|
-
*/
|
|
356
|
-
getSynchedL2BlockNumber(): Promise<BlockNumber> {
|
|
357
|
-
return this.#blockStore.getSynchedL2BlockNumber();
|
|
363
|
+
getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
364
|
+
return this.#blockStore.getProvenCheckpointNumber();
|
|
358
365
|
}
|
|
359
366
|
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
async setProvenL2BlockNumber(blockNumber: BlockNumber) {
|
|
365
|
-
await this.#blockStore.setProvenL2BlockNumber(blockNumber);
|
|
367
|
+
async setProvenCheckpointNumber(checkpointNumber: CheckpointNumber) {
|
|
368
|
+
await this.#blockStore.setProvenCheckpointNumber(checkpointNumber);
|
|
366
369
|
}
|
|
367
370
|
|
|
368
371
|
async setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) {
|
|
@@ -373,6 +376,10 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
373
376
|
await this.#messageStore.setSynchedL1Block(l1Block);
|
|
374
377
|
}
|
|
375
378
|
|
|
379
|
+
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
380
|
+
return this.#blockStore.getProvenBlockNumber();
|
|
381
|
+
}
|
|
382
|
+
|
|
376
383
|
/**
|
|
377
384
|
* Gets the last L1 block number processed by the archiver
|
|
378
385
|
*/
|
|
@@ -410,4 +417,22 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc
|
|
|
410
417
|
public setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
411
418
|
return this.#blockStore.setPendingChainValidationStatus(status);
|
|
412
419
|
}
|
|
420
|
+
|
|
421
|
+
public getCheckpointedL2BlockNumber(): Promise<BlockNumber> {
|
|
422
|
+
return this.#blockStore.getCheckpointedL2BlockNumber();
|
|
423
|
+
}
|
|
424
|
+
public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
425
|
+
return this.#blockStore.getLatestCheckpointNumber();
|
|
426
|
+
}
|
|
427
|
+
async setCheckpointSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
428
|
+
await this.#blockStore.setSynchedL1BlockNumber(l1BlockNumber);
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise<L2BlockNew[] | undefined> {
|
|
432
|
+
return this.#blockStore.getBlocksForCheckpoint(checkpointNumber);
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
getCheckpointData(checkpointNumber: CheckpointNumber): Promise<CheckpointData | undefined> {
|
|
436
|
+
return this.#blockStore.getCheckpointData(checkpointNumber);
|
|
437
|
+
}
|
|
413
438
|
}
|
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import { INITIAL_L2_BLOCK_NUM
|
|
1
|
+
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
2
2
|
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
-
import
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
4
|
import { createLogger } from '@aztec/foundation/log';
|
|
5
5
|
import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
|
|
6
6
|
import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
|
|
7
|
-
import type {
|
|
7
|
+
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
8
|
+
import { L2BlockHash, L2BlockNew } from '@aztec/stdlib/block';
|
|
8
9
|
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
9
10
|
import {
|
|
10
11
|
ContractClassLog,
|
|
@@ -13,6 +14,8 @@ import {
|
|
|
13
14
|
type LogFilter,
|
|
14
15
|
LogId,
|
|
15
16
|
PublicLog,
|
|
17
|
+
type SiloedTag,
|
|
18
|
+
Tag,
|
|
16
19
|
TxScopedL2Log,
|
|
17
20
|
} from '@aztec/stdlib/logs';
|
|
18
21
|
|
|
@@ -22,8 +25,12 @@ import type { BlockStore } from './block_store.js';
|
|
|
22
25
|
* A store for logs
|
|
23
26
|
*/
|
|
24
27
|
export class LogStore {
|
|
25
|
-
|
|
26
|
-
#
|
|
28
|
+
// `tag` --> private logs
|
|
29
|
+
#privateLogsByTag: AztecAsyncMap<string, Buffer[]>;
|
|
30
|
+
// `{contractAddress}_${tag}` --> public logs
|
|
31
|
+
#publicLogsByContractAndTag: AztecAsyncMap<string, Buffer[]>;
|
|
32
|
+
#privateLogKeysByBlock: AztecAsyncMap<number, string[]>;
|
|
33
|
+
#publicLogKeysByBlock: AztecAsyncMap<number, string[]>;
|
|
27
34
|
#publicLogsByBlock: AztecAsyncMap<number, Buffer>;
|
|
28
35
|
#contractClassLogsByBlock: AztecAsyncMap<number, Buffer>;
|
|
29
36
|
#logsMaxPageSize: number;
|
|
@@ -34,42 +41,107 @@ export class LogStore {
|
|
|
34
41
|
private blockStore: BlockStore,
|
|
35
42
|
logsMaxPageSize: number = 1000,
|
|
36
43
|
) {
|
|
37
|
-
this.#
|
|
38
|
-
this.#
|
|
44
|
+
this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
|
|
45
|
+
this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
|
|
46
|
+
this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
|
|
47
|
+
this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
|
|
39
48
|
this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
|
|
40
49
|
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
41
50
|
|
|
42
51
|
this.#logsMaxPageSize = logsMaxPageSize;
|
|
43
52
|
}
|
|
44
53
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
54
|
+
/**
|
|
55
|
+
* Extracts tagged logs from a single block, grouping them into private and public maps.
|
|
56
|
+
*
|
|
57
|
+
* @param block - The L2 block to extract logs from.
|
|
58
|
+
* @returns An object containing the private and public tagged logs for the block.
|
|
59
|
+
*/
|
|
60
|
+
#extractTaggedLogsFromBlock(block: L2BlockNew) {
|
|
61
|
+
// SiloedTag (as string) -> array of log buffers.
|
|
62
|
+
const privateTaggedLogs = new Map<string, Buffer[]>();
|
|
63
|
+
// "{contractAddress}_{tag}" (as string) -> array of log buffers.
|
|
64
|
+
const publicTaggedLogs = new Map<string, Buffer[]>();
|
|
65
|
+
|
|
66
|
+
block.body.txEffects.forEach(txEffect => {
|
|
51
67
|
const txHash = txEffect.txHash;
|
|
52
|
-
const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
|
|
53
68
|
|
|
54
|
-
txEffect.privateLogs.forEach(
|
|
69
|
+
txEffect.privateLogs.forEach(log => {
|
|
70
|
+
// Private logs use SiloedTag (already siloed by kernel)
|
|
55
71
|
const tag = log.fields[0];
|
|
56
72
|
this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
|
|
57
73
|
|
|
58
|
-
const currentLogs =
|
|
59
|
-
currentLogs.push(
|
|
60
|
-
|
|
74
|
+
const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
|
|
75
|
+
currentLogs.push(
|
|
76
|
+
new TxScopedL2Log(
|
|
77
|
+
txHash,
|
|
78
|
+
block.number,
|
|
79
|
+
block.timestamp,
|
|
80
|
+
log.getEmittedFields(),
|
|
81
|
+
txEffect.noteHashes,
|
|
82
|
+
txEffect.nullifiers[0],
|
|
83
|
+
).toBuffer(),
|
|
84
|
+
);
|
|
85
|
+
privateTaggedLogs.set(tag.toString(), currentLogs);
|
|
61
86
|
});
|
|
62
87
|
|
|
63
|
-
txEffect.publicLogs.forEach(
|
|
88
|
+
txEffect.publicLogs.forEach(log => {
|
|
89
|
+
// Public logs use Tag directly (not siloed) and are stored with contract address
|
|
64
90
|
const tag = log.fields[0];
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
91
|
+
const contractAddress = log.contractAddress;
|
|
92
|
+
const key = `${contractAddress.toString()}_${tag.toString()}`;
|
|
93
|
+
this.#log.debug(
|
|
94
|
+
`Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`,
|
|
95
|
+
);
|
|
96
|
+
|
|
97
|
+
const currentLogs = publicTaggedLogs.get(key) ?? [];
|
|
98
|
+
currentLogs.push(
|
|
99
|
+
new TxScopedL2Log(
|
|
100
|
+
txHash,
|
|
101
|
+
block.number,
|
|
102
|
+
block.timestamp,
|
|
103
|
+
log.getEmittedFields(),
|
|
104
|
+
txEffect.noteHashes,
|
|
105
|
+
txEffect.nullifiers[0],
|
|
106
|
+
).toBuffer(),
|
|
107
|
+
);
|
|
108
|
+
publicTaggedLogs.set(key, currentLogs);
|
|
70
109
|
});
|
|
71
110
|
});
|
|
72
|
-
|
|
111
|
+
|
|
112
|
+
return { privateTaggedLogs, publicTaggedLogs };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Extracts and aggregates tagged logs from a list of blocks.
|
|
117
|
+
* @param blocks - The blocks to extract logs from.
|
|
118
|
+
* @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
|
|
119
|
+
* "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
|
|
120
|
+
*/
|
|
121
|
+
#extractTaggedLogs(blocks: L2BlockNew[]): {
|
|
122
|
+
privateTaggedLogs: Map<string, Buffer[]>;
|
|
123
|
+
publicTaggedLogs: Map<string, Buffer[]>;
|
|
124
|
+
} {
|
|
125
|
+
const taggedLogsInBlocks = blocks.map(block => this.#extractTaggedLogsFromBlock(block));
|
|
126
|
+
|
|
127
|
+
// Now we merge the maps from each block into a single map.
|
|
128
|
+
const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs }) => {
|
|
129
|
+
for (const [tag, logs] of privateTaggedLogs.entries()) {
|
|
130
|
+
const currentLogs = acc.get(tag) ?? [];
|
|
131
|
+
acc.set(tag, currentLogs.concat(logs));
|
|
132
|
+
}
|
|
133
|
+
return acc;
|
|
134
|
+
}, new Map<string, Buffer[]>());
|
|
135
|
+
|
|
136
|
+
const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs }) => {
|
|
137
|
+
for (const [key, logs] of publicTaggedLogs.entries()) {
|
|
138
|
+
const currentLogs = acc.get(key) ?? [];
|
|
139
|
+
acc.set(key, currentLogs.concat(logs));
|
|
140
|
+
}
|
|
141
|
+
return acc;
|
|
142
|
+
}, new Map<string, Buffer[]>());
|
|
143
|
+
|
|
144
|
+
return { privateTaggedLogs, publicTaggedLogs };
|
|
73
145
|
}
|
|
74
146
|
|
|
75
147
|
/**
|
|
@@ -77,37 +149,59 @@ export class LogStore {
|
|
|
77
149
|
* @param blocks - The blocks for which to add the logs.
|
|
78
150
|
* @returns True if the operation is successful.
|
|
79
151
|
*/
|
|
80
|
-
addLogs(blocks:
|
|
81
|
-
const
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
const currentLogs = acc.get(tag) ?? [];
|
|
86
|
-
acc.set(tag, currentLogs.concat(logs));
|
|
87
|
-
}
|
|
88
|
-
return acc;
|
|
89
|
-
}, new Map());
|
|
90
|
-
const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
|
|
152
|
+
addLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
153
|
+
const { privateTaggedLogs, publicTaggedLogs } = this.#extractTaggedLogs(blocks);
|
|
154
|
+
|
|
155
|
+
const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
|
|
156
|
+
const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
|
|
91
157
|
|
|
92
158
|
return this.db.transactionAsync(async () => {
|
|
93
|
-
const
|
|
94
|
-
|
|
159
|
+
const currentPrivateTaggedLogs = await Promise.all(
|
|
160
|
+
keysOfPrivateLogsToUpdate.map(async key => ({
|
|
161
|
+
tag: key,
|
|
162
|
+
logBuffers: await this.#privateLogsByTag.getAsync(key),
|
|
163
|
+
})),
|
|
95
164
|
);
|
|
96
|
-
|
|
165
|
+
currentPrivateTaggedLogs.forEach(taggedLogBuffer => {
|
|
97
166
|
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
98
|
-
|
|
167
|
+
privateTaggedLogs.set(
|
|
99
168
|
taggedLogBuffer.tag,
|
|
100
|
-
taggedLogBuffer.logBuffers!.concat(
|
|
169
|
+
taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
|
|
101
170
|
);
|
|
102
171
|
}
|
|
103
172
|
});
|
|
173
|
+
|
|
174
|
+
const currentPublicTaggedLogs = await Promise.all(
|
|
175
|
+
keysOfPublicLogsToUpdate.map(async key => ({
|
|
176
|
+
key,
|
|
177
|
+
logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
|
|
178
|
+
})),
|
|
179
|
+
);
|
|
180
|
+
currentPublicTaggedLogs.forEach(taggedLogBuffer => {
|
|
181
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
182
|
+
publicTaggedLogs.set(
|
|
183
|
+
taggedLogBuffer.key,
|
|
184
|
+
taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.key)!),
|
|
185
|
+
);
|
|
186
|
+
}
|
|
187
|
+
});
|
|
188
|
+
|
|
104
189
|
for (const block of blocks) {
|
|
105
|
-
const
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
190
|
+
const blockHash = await block.hash();
|
|
191
|
+
|
|
192
|
+
const privateTagsInBlock: string[] = [];
|
|
193
|
+
for (const [tag, logs] of privateTaggedLogs.entries()) {
|
|
194
|
+
await this.#privateLogsByTag.set(tag, logs);
|
|
195
|
+
privateTagsInBlock.push(tag);
|
|
109
196
|
}
|
|
110
|
-
await this.#
|
|
197
|
+
await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
|
|
198
|
+
|
|
199
|
+
const publicKeysInBlock: string[] = [];
|
|
200
|
+
for (const [key, logs] of publicTaggedLogs.entries()) {
|
|
201
|
+
await this.#publicLogsByContractAndTag.set(key, logs);
|
|
202
|
+
publicKeysInBlock.push(key);
|
|
203
|
+
}
|
|
204
|
+
await this.#publicLogKeysByBlock.set(block.number, publicKeysInBlock);
|
|
111
205
|
|
|
112
206
|
const publicLogsInBlock = block.body.txEffects
|
|
113
207
|
.map((txEffect, txIndex) =>
|
|
@@ -129,54 +223,82 @@ export class LogStore {
|
|
|
129
223
|
)
|
|
130
224
|
.flat();
|
|
131
225
|
|
|
132
|
-
await this.#publicLogsByBlock.set(block.number,
|
|
133
|
-
await this.#contractClassLogsByBlock.set(
|
|
226
|
+
await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
|
|
227
|
+
await this.#contractClassLogsByBlock.set(
|
|
228
|
+
block.number,
|
|
229
|
+
this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
|
|
230
|
+
);
|
|
134
231
|
}
|
|
135
232
|
|
|
136
233
|
return true;
|
|
137
234
|
});
|
|
138
235
|
}
|
|
139
236
|
|
|
140
|
-
|
|
237
|
+
#packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
|
|
238
|
+
return Buffer.concat([blockHash.toBuffer(), ...data]);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
#unpackBlockHash(reader: BufferReader): L2BlockHash {
|
|
242
|
+
const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
|
|
243
|
+
|
|
244
|
+
if (!blockHash) {
|
|
245
|
+
throw new Error('Failed to read block hash from log entry buffer');
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
return L2BlockHash.fromField(blockHash);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
|
|
141
252
|
return this.db.transactionAsync(async () => {
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
253
|
+
await Promise.all(
|
|
254
|
+
blocks.map(async block => {
|
|
255
|
+
// Delete private logs
|
|
256
|
+
const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
|
|
257
|
+
await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
|
|
258
|
+
|
|
259
|
+
// Delete public logs
|
|
260
|
+
const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
|
|
261
|
+
await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
|
|
262
|
+
}),
|
|
263
|
+
);
|
|
150
264
|
|
|
151
265
|
await Promise.all(
|
|
152
266
|
blocks.map(block =>
|
|
153
267
|
Promise.all([
|
|
154
268
|
this.#publicLogsByBlock.delete(block.number),
|
|
155
|
-
this.#
|
|
269
|
+
this.#privateLogKeysByBlock.delete(block.number),
|
|
270
|
+
this.#publicLogKeysByBlock.delete(block.number),
|
|
156
271
|
this.#contractClassLogsByBlock.delete(block.number),
|
|
157
272
|
]),
|
|
158
273
|
),
|
|
159
274
|
);
|
|
160
275
|
|
|
161
|
-
await Promise.all(tagsToDelete.map(tag => this.#logsByTag.delete(tag.toString())));
|
|
162
276
|
return true;
|
|
163
277
|
});
|
|
164
278
|
}
|
|
165
279
|
|
|
166
280
|
/**
|
|
167
|
-
* Gets all logs that match any of the
|
|
168
|
-
*
|
|
169
|
-
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
|
|
170
|
-
* that tag.
|
|
281
|
+
* Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
|
|
282
|
+
* array implies no logs match that tag.
|
|
171
283
|
*/
|
|
172
|
-
async
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
284
|
+
async getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
285
|
+
const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
|
|
286
|
+
|
|
287
|
+
return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
/**
|
|
291
|
+
* Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
|
|
292
|
+
* logs is returned. An empty array implies no logs match that tag.
|
|
293
|
+
*/
|
|
294
|
+
async getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
295
|
+
const logs = await Promise.all(
|
|
296
|
+
tags.map(tag => {
|
|
297
|
+
const key = `${contractAddress.toString()}_${tag.value.toString()}`;
|
|
298
|
+
return this.#publicLogsByContractAndTag.getAsync(key);
|
|
299
|
+
}),
|
|
179
300
|
);
|
|
301
|
+
return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
|
|
180
302
|
}
|
|
181
303
|
|
|
182
304
|
/**
|
|
@@ -207,6 +329,9 @@ export class LogStore {
|
|
|
207
329
|
const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
|
|
208
330
|
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
209
331
|
const reader = new BufferReader(buffer);
|
|
332
|
+
|
|
333
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
334
|
+
|
|
210
335
|
while (reader.remainingBytes() > 0) {
|
|
211
336
|
const indexOfTx = reader.readNumber();
|
|
212
337
|
const numLogsInTx = reader.readNumber();
|
|
@@ -219,7 +344,7 @@ export class LogStore {
|
|
|
219
344
|
const txLogs = publicLogsInBlock[txIndex];
|
|
220
345
|
|
|
221
346
|
const logs: ExtendedPublicLog[] = [];
|
|
222
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
347
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
223
348
|
|
|
224
349
|
return { logs, maxLogsHit };
|
|
225
350
|
}
|
|
@@ -242,6 +367,9 @@ export class LogStore {
|
|
|
242
367
|
loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
|
|
243
368
|
const publicLogsInBlock: [PublicLog[]] = [[]];
|
|
244
369
|
const reader = new BufferReader(logBuffer);
|
|
370
|
+
|
|
371
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
372
|
+
|
|
245
373
|
while (reader.remainingBytes() > 0) {
|
|
246
374
|
const indexOfTx = reader.readNumber();
|
|
247
375
|
const numLogsInTx = reader.readNumber();
|
|
@@ -252,7 +380,7 @@ export class LogStore {
|
|
|
252
380
|
}
|
|
253
381
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
|
|
254
382
|
const txLogs = publicLogsInBlock[txIndex];
|
|
255
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
383
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
256
384
|
if (maxLogsHit) {
|
|
257
385
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
258
386
|
break loopOverBlocks;
|
|
@@ -291,6 +419,8 @@ export class LogStore {
|
|
|
291
419
|
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
292
420
|
|
|
293
421
|
const reader = new BufferReader(contractClassLogsBuffer);
|
|
422
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
423
|
+
|
|
294
424
|
while (reader.remainingBytes() > 0) {
|
|
295
425
|
const indexOfTx = reader.readNumber();
|
|
296
426
|
const numLogsInTx = reader.readNumber();
|
|
@@ -303,7 +433,7 @@ export class LogStore {
|
|
|
303
433
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
304
434
|
|
|
305
435
|
const logs: ExtendedContractClassLog[] = [];
|
|
306
|
-
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
436
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
307
437
|
|
|
308
438
|
return { logs, maxLogsHit };
|
|
309
439
|
}
|
|
@@ -329,6 +459,7 @@ export class LogStore {
|
|
|
329
459
|
})) {
|
|
330
460
|
const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
|
|
331
461
|
const reader = new BufferReader(logBuffer);
|
|
462
|
+
const blockHash = this.#unpackBlockHash(reader);
|
|
332
463
|
while (reader.remainingBytes() > 0) {
|
|
333
464
|
const indexOfTx = reader.readNumber();
|
|
334
465
|
const numLogsInTx = reader.readNumber();
|
|
@@ -339,7 +470,7 @@ export class LogStore {
|
|
|
339
470
|
}
|
|
340
471
|
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
|
|
341
472
|
const txLogs = contractClassLogsInBlock[txIndex];
|
|
342
|
-
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
473
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
|
|
343
474
|
if (maxLogsHit) {
|
|
344
475
|
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
345
476
|
break loopOverBlocks;
|
|
@@ -353,9 +484,10 @@ export class LogStore {
|
|
|
353
484
|
#accumulateLogs(
|
|
354
485
|
results: (ExtendedContractClassLog | ExtendedPublicLog)[],
|
|
355
486
|
blockNumber: number,
|
|
487
|
+
blockHash: L2BlockHash,
|
|
356
488
|
txIndex: number,
|
|
357
489
|
txLogs: (ContractClassLog | PublicLog)[],
|
|
358
|
-
filter: LogFilter,
|
|
490
|
+
filter: LogFilter = {},
|
|
359
491
|
): boolean {
|
|
360
492
|
let maxLogsHit = false;
|
|
361
493
|
let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
|
|
@@ -363,9 +495,13 @@ export class LogStore {
|
|
|
363
495
|
const log = txLogs[logIndex];
|
|
364
496
|
if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
|
|
365
497
|
if (log instanceof ContractClassLog) {
|
|
366
|
-
results.push(
|
|
498
|
+
results.push(
|
|
499
|
+
new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
|
|
500
|
+
);
|
|
501
|
+
} else if (log instanceof PublicLog) {
|
|
502
|
+
results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
|
|
367
503
|
} else {
|
|
368
|
-
|
|
504
|
+
throw new Error('Unknown log type');
|
|
369
505
|
}
|
|
370
506
|
|
|
371
507
|
if (results.length >= this.#logsMaxPageSize) {
|