@aztec/archiver 0.62.0 → 0.63.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +11 -2
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +43 -19
- package/dest/archiver/archiver_store.d.ts +8 -2
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +64 -25
- package/dest/archiver/config.d.ts +2 -6
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +3 -6
- package/dest/archiver/epoch_helpers.d.ts +10 -10
- package/dest/archiver/epoch_helpers.d.ts.map +1 -1
- package/dest/archiver/epoch_helpers.js +9 -10
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +8 -2
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +14 -1
- package/dest/archiver/kv_archiver_store/log_store.d.ts +8 -2
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +144 -57
- package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +13 -4
- package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
- package/dest/archiver/memory_archiver_store/memory_archiver_store.js +136 -31
- package/dest/factory.d.ts +5 -2
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +2 -2
- package/dest/rpc/index.d.ts +3 -2
- package/dest/rpc/index.d.ts.map +1 -1
- package/dest/rpc/index.js +10 -3
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +4 -2
- package/package.json +10 -10
- package/src/archiver/archiver.ts +54 -24
- package/src/archiver/archiver_store.ts +9 -2
- package/src/archiver/archiver_store_test_suite.ts +85 -26
- package/src/archiver/config.ts +11 -12
- package/src/archiver/epoch_helpers.ts +16 -12
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +15 -2
- package/src/archiver/kv_archiver_store/log_store.ts +176 -58
- package/src/archiver/memory_archiver_store/memory_archiver_store.ts +152 -27
- package/src/factory.ts +6 -3
- package/src/rpc/index.ts +11 -2
- package/src/test/mock_l2_block_source.ts +3 -1
- package/dest/rpc/archiver_client.d.ts +0 -3
- package/dest/rpc/archiver_client.d.ts.map +0 -1
- package/dest/rpc/archiver_client.js +0 -12
- package/dest/rpc/archiver_server.d.ts +0 -9
- package/dest/rpc/archiver_server.d.ts.map +0 -1
- package/dest/rpc/archiver_server.js +0 -20
- package/src/rpc/archiver_client.ts +0 -29
- package/src/rpc/archiver_server.ts +0 -35
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
|
+
type Body,
|
|
3
|
+
ContractClass2BlockL2Logs,
|
|
2
4
|
EncryptedL2BlockL2Logs,
|
|
3
|
-
EncryptedL2NoteLog,
|
|
4
5
|
EncryptedNoteL2BlockL2Logs,
|
|
5
6
|
ExtendedUnencryptedL2Log,
|
|
6
7
|
type FromLogType,
|
|
@@ -10,13 +11,14 @@ import {
|
|
|
10
11
|
type LogFilter,
|
|
11
12
|
LogId,
|
|
12
13
|
LogType,
|
|
14
|
+
TxScopedL2Log,
|
|
13
15
|
UnencryptedL2BlockL2Logs,
|
|
14
16
|
type UnencryptedL2Log,
|
|
15
17
|
} from '@aztec/circuit-types';
|
|
16
18
|
import { Fr } from '@aztec/circuits.js';
|
|
17
|
-
import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants';
|
|
19
|
+
import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/circuits.js/constants';
|
|
18
20
|
import { createDebugLogger } from '@aztec/foundation/log';
|
|
19
|
-
import { type AztecKVStore, type AztecMap
|
|
21
|
+
import { type AztecKVStore, type AztecMap } from '@aztec/kv-store';
|
|
20
22
|
|
|
21
23
|
import { type BlockStore } from './block_store.js';
|
|
22
24
|
|
|
@@ -25,58 +27,123 @@ import { type BlockStore } from './block_store.js';
|
|
|
25
27
|
*/
|
|
26
28
|
export class LogStore {
|
|
27
29
|
#noteEncryptedLogsByBlock: AztecMap<number, Buffer>;
|
|
28
|
-
#
|
|
29
|
-
#
|
|
30
|
-
#noteEncryptedLogTagsByBlock: AztecMultiMap<number, string>;
|
|
30
|
+
#logsByTag: AztecMap<string, Buffer[]>;
|
|
31
|
+
#logTagsByBlock: AztecMap<number, string[]>;
|
|
31
32
|
#encryptedLogsByBlock: AztecMap<number, Buffer>;
|
|
32
33
|
#unencryptedLogsByBlock: AztecMap<number, Buffer>;
|
|
34
|
+
#contractClassLogsByBlock: AztecMap<number, Buffer>;
|
|
33
35
|
#logsMaxPageSize: number;
|
|
34
36
|
#log = createDebugLogger('aztec:archiver:log_store');
|
|
35
37
|
|
|
36
38
|
constructor(private db: AztecKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) {
|
|
37
39
|
this.#noteEncryptedLogsByBlock = db.openMap('archiver_note_encrypted_logs_by_block');
|
|
38
|
-
this.#
|
|
39
|
-
this.#
|
|
40
|
-
this.#noteEncryptedLogTagsByBlock = db.openMultiMap('archiver_note_encrypted_log_tags_by_block');
|
|
40
|
+
this.#logsByTag = db.openMap('archiver_tagged_logs_by_tag');
|
|
41
|
+
this.#logTagsByBlock = db.openMap('archiver_log_tags_by_block');
|
|
41
42
|
this.#encryptedLogsByBlock = db.openMap('archiver_encrypted_logs_by_block');
|
|
42
43
|
this.#unencryptedLogsByBlock = db.openMap('archiver_unencrypted_logs_by_block');
|
|
44
|
+
this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
|
|
43
45
|
|
|
44
46
|
this.#logsMaxPageSize = logsMaxPageSize;
|
|
45
47
|
}
|
|
46
48
|
|
|
49
|
+
#extractTaggedLogs(block: L2Block, logType: keyof Pick<Body, 'noteEncryptedLogs' | 'unencryptedLogs'>) {
|
|
50
|
+
const taggedLogs = new Map<string, Buffer[]>();
|
|
51
|
+
const dataStartIndexForBlock =
|
|
52
|
+
block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
|
|
53
|
+
block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX;
|
|
54
|
+
block.body[logType].txLogs.forEach((txLogs, txIndex) => {
|
|
55
|
+
const txHash = block.body.txEffects[txIndex].txHash;
|
|
56
|
+
const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
|
|
57
|
+
const logs = txLogs.unrollLogs();
|
|
58
|
+
logs.forEach(log => {
|
|
59
|
+
if (
|
|
60
|
+
(logType == 'noteEncryptedLogs' && log.data.length < 32) ||
|
|
61
|
+
// TODO remove when #9835 and #9836 are fixed
|
|
62
|
+
(logType === 'unencryptedLogs' && log.data.length < 32 * 33)
|
|
63
|
+
) {
|
|
64
|
+
this.#log.warn(`Skipping log (${logType}) with invalid data length: ${log.data.length}`);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
let tag = Fr.ZERO;
|
|
69
|
+
// TODO remove when #9835 and #9836 are fixed. The partial note logs are emitted as bytes, but encoded as Fields.
|
|
70
|
+
// This means that for every 32 bytes of payload, we only have 1 byte of data.
|
|
71
|
+
// Also, the tag is not stored in the first 32 bytes of the log, (that's the length of public fields now) but in the next 32.
|
|
72
|
+
if (logType === 'unencryptedLogs') {
|
|
73
|
+
const correctedBuffer = Buffer.alloc(32);
|
|
74
|
+
const initialOffset = 32;
|
|
75
|
+
for (let i = 0; i < 32; i++) {
|
|
76
|
+
const byte = Fr.fromBuffer(
|
|
77
|
+
log.data.subarray(i * 32 + initialOffset, i * 32 + 32 + initialOffset),
|
|
78
|
+
).toNumber();
|
|
79
|
+
correctedBuffer.writeUInt8(byte, i);
|
|
80
|
+
}
|
|
81
|
+
tag = new Fr(correctedBuffer);
|
|
82
|
+
} else {
|
|
83
|
+
tag = new Fr(log.data.subarray(0, 32));
|
|
84
|
+
}
|
|
85
|
+
this.#log.verbose(`Found tagged (${logType}) log with tag ${tag.toString()} in block ${block.number}`);
|
|
86
|
+
const currentLogs = taggedLogs.get(tag.toString()) ?? [];
|
|
87
|
+
currentLogs.push(
|
|
88
|
+
new TxScopedL2Log(
|
|
89
|
+
txHash,
|
|
90
|
+
dataStartIndexForTx,
|
|
91
|
+
block.number,
|
|
92
|
+
logType === 'unencryptedLogs',
|
|
93
|
+
log.data,
|
|
94
|
+
).toBuffer(),
|
|
95
|
+
);
|
|
96
|
+
taggedLogs.set(tag.toString(), currentLogs);
|
|
97
|
+
} catch (err) {
|
|
98
|
+
this.#log.warn(`Failed to add tagged log to store: ${err}`);
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
});
|
|
102
|
+
return taggedLogs;
|
|
103
|
+
}
|
|
104
|
+
|
|
47
105
|
/**
|
|
48
106
|
* Append new logs to the store's list.
|
|
49
107
|
* @param blocks - The blocks for which to add the logs.
|
|
50
108
|
* @returns True if the operation is successful.
|
|
51
109
|
*/
|
|
52
|
-
addLogs(blocks: L2Block[]): Promise<boolean> {
|
|
110
|
+
async addLogs(blocks: L2Block[]): Promise<boolean> {
|
|
111
|
+
const taggedLogsToAdd = blocks
|
|
112
|
+
.flatMap(block => [
|
|
113
|
+
this.#extractTaggedLogs(block, 'noteEncryptedLogs'),
|
|
114
|
+
this.#extractTaggedLogs(block, 'unencryptedLogs'),
|
|
115
|
+
])
|
|
116
|
+
.reduce((acc, val) => {
|
|
117
|
+
for (const [tag, logs] of val.entries()) {
|
|
118
|
+
const currentLogs = acc.get(tag) ?? [];
|
|
119
|
+
acc.set(tag, currentLogs.concat(logs));
|
|
120
|
+
}
|
|
121
|
+
return acc;
|
|
122
|
+
});
|
|
123
|
+
const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
|
|
124
|
+
const currentTaggedLogs = await this.db.transaction(() =>
|
|
125
|
+
tagsToUpdate.map(tag => ({ tag, logBuffers: this.#logsByTag.get(tag) })),
|
|
126
|
+
);
|
|
127
|
+
currentTaggedLogs.forEach(taggedLogBuffer => {
|
|
128
|
+
if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
|
|
129
|
+
taggedLogsToAdd.set(
|
|
130
|
+
taggedLogBuffer.tag,
|
|
131
|
+
taggedLogBuffer.logBuffers!.concat(taggedLogsToAdd.get(taggedLogBuffer.tag)!),
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
});
|
|
53
135
|
return this.db.transaction(() => {
|
|
54
136
|
blocks.forEach(block => {
|
|
137
|
+
const tagsInBlock = [];
|
|
138
|
+
for (const [tag, logs] of taggedLogsToAdd.entries()) {
|
|
139
|
+
void this.#logsByTag.set(tag, logs);
|
|
140
|
+
tagsInBlock.push(tag);
|
|
141
|
+
}
|
|
142
|
+
void this.#logTagsByBlock.set(block.number, tagsInBlock);
|
|
55
143
|
void this.#noteEncryptedLogsByBlock.set(block.number, block.body.noteEncryptedLogs.toBuffer());
|
|
56
|
-
block.body.noteEncryptedLogs.txLogs.forEach(txLogs => {
|
|
57
|
-
const noteLogs = txLogs.unrollLogs();
|
|
58
|
-
noteLogs.forEach(noteLog => {
|
|
59
|
-
if (noteLog.data.length < 32) {
|
|
60
|
-
this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`);
|
|
61
|
-
return;
|
|
62
|
-
}
|
|
63
|
-
try {
|
|
64
|
-
const tag = new Fr(noteLog.data.subarray(0, 32));
|
|
65
|
-
const hexHash = noteLog.hash().toString('hex');
|
|
66
|
-
// Ideally we'd store all of the logs for a matching tag in an AztecMultiMap, but this type doesn't doesn't
|
|
67
|
-
// handle storing buffers well. The 'ordered-binary' encoding returns an error trying to decode buffers
|
|
68
|
-
// ('the number <> cannot be converted to a BigInt because it is not an integer'). We therefore store
|
|
69
|
-
// instead the hashes of the logs.
|
|
70
|
-
void this.#noteEncryptedLogHashesByTag.set(tag.toString(), hexHash);
|
|
71
|
-
void this.#noteEncryptedLogsByHash.set(hexHash, noteLog.toBuffer());
|
|
72
|
-
void this.#noteEncryptedLogTagsByBlock.set(block.number, tag.toString());
|
|
73
|
-
} catch (err) {
|
|
74
|
-
this.#log.warn(`Failed to add tagged note log to store: ${err}`);
|
|
75
|
-
}
|
|
76
|
-
});
|
|
77
|
-
});
|
|
78
144
|
void this.#encryptedLogsByBlock.set(block.number, block.body.encryptedLogs.toBuffer());
|
|
79
145
|
void this.#unencryptedLogsByBlock.set(block.number, block.body.unencryptedLogs.toBuffer());
|
|
146
|
+
void this.#contractClassLogsByBlock.set(block.number, block.body.contractClassLogs.toBuffer());
|
|
80
147
|
});
|
|
81
148
|
|
|
82
149
|
return true;
|
|
@@ -84,26 +151,19 @@ export class LogStore {
|
|
|
84
151
|
}
|
|
85
152
|
|
|
86
153
|
async deleteLogs(blocks: L2Block[]): Promise<boolean> {
|
|
87
|
-
const
|
|
88
|
-
return blocks.flatMap(block =>
|
|
89
|
-
});
|
|
90
|
-
const noteLogHashesToDelete = await this.db.transaction(() => {
|
|
91
|
-
return noteTagsToDelete.flatMap(tag => Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag)));
|
|
154
|
+
const tagsToDelete = await this.db.transaction(() => {
|
|
155
|
+
return blocks.flatMap(block => this.#logTagsByBlock.get(block.number)?.map(tag => tag.toString()) ?? []);
|
|
92
156
|
});
|
|
93
157
|
return this.db.transaction(() => {
|
|
94
158
|
blocks.forEach(block => {
|
|
95
159
|
void this.#noteEncryptedLogsByBlock.delete(block.number);
|
|
96
160
|
void this.#encryptedLogsByBlock.delete(block.number);
|
|
97
161
|
void this.#unencryptedLogsByBlock.delete(block.number);
|
|
98
|
-
void this.#
|
|
99
|
-
});
|
|
100
|
-
|
|
101
|
-
noteTagsToDelete.forEach(tag => {
|
|
102
|
-
void this.#noteEncryptedLogHashesByTag.delete(tag.toString());
|
|
162
|
+
void this.#logTagsByBlock.delete(block.number);
|
|
103
163
|
});
|
|
104
164
|
|
|
105
|
-
|
|
106
|
-
void this.#
|
|
165
|
+
tagsToDelete.forEach(tag => {
|
|
166
|
+
void this.#logsByTag.delete(tag.toString());
|
|
107
167
|
});
|
|
108
168
|
|
|
109
169
|
return true;
|
|
@@ -156,20 +216,12 @@ export class LogStore {
|
|
|
156
216
|
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
|
|
157
217
|
* that tag.
|
|
158
218
|
*/
|
|
159
|
-
getLogsByTags(tags: Fr[]): Promise<
|
|
160
|
-
return this.db.transaction(() =>
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
.map(hash => this.#noteEncryptedLogsByHash.get(hash))
|
|
166
|
-
// addLogs should ensure that we never have undefined logs, but we filter them out regardless to protect
|
|
167
|
-
// ourselves from database corruption
|
|
168
|
-
.filter(noteLogBuffer => noteLogBuffer != undefined)
|
|
169
|
-
.map(noteLogBuffer => EncryptedL2NoteLog.fromBuffer(noteLogBuffer!))
|
|
170
|
-
);
|
|
171
|
-
});
|
|
172
|
-
});
|
|
219
|
+
getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
|
|
220
|
+
return this.db.transaction(() =>
|
|
221
|
+
tags
|
|
222
|
+
.map(tag => this.#logsByTag.get(tag.toString()))
|
|
223
|
+
.map(noteLogBuffers => noteLogBuffers?.map(noteLogBuffer => TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? []),
|
|
224
|
+
);
|
|
173
225
|
}
|
|
174
226
|
|
|
175
227
|
/**
|
|
@@ -236,6 +288,72 @@ export class LogStore {
|
|
|
236
288
|
return { logs, maxLogsHit };
|
|
237
289
|
}
|
|
238
290
|
|
|
291
|
+
/**
|
|
292
|
+
* Gets contract class logs based on the provided filter.
|
|
293
|
+
* @param filter - The filter to apply to the logs.
|
|
294
|
+
* @returns The requested logs.
|
|
295
|
+
*/
|
|
296
|
+
getContractClassLogs(filter: LogFilter): GetUnencryptedLogsResponse {
|
|
297
|
+
if (filter.afterLog) {
|
|
298
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
299
|
+
} else if (filter.txHash) {
|
|
300
|
+
return this.#filterContractClassLogsOfTx(filter);
|
|
301
|
+
} else {
|
|
302
|
+
return this.#filterContractClassLogsBetweenBlocks(filter);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
#filterContractClassLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
|
|
307
|
+
if (!filter.txHash) {
|
|
308
|
+
throw new Error('Missing txHash');
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
const [blockNumber, txIndex] = this.blockStore.getTxLocation(filter.txHash) ?? [];
|
|
312
|
+
if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
|
|
313
|
+
return { logs: [], maxLogsHit: false };
|
|
314
|
+
}
|
|
315
|
+
const contractClassLogsBuffer = this.#contractClassLogsByBlock.get(blockNumber);
|
|
316
|
+
const contractClassLogsInBlock = contractClassLogsBuffer
|
|
317
|
+
? ContractClass2BlockL2Logs.fromBuffer(contractClassLogsBuffer)
|
|
318
|
+
: new ContractClass2BlockL2Logs([]);
|
|
319
|
+
const txLogs = contractClassLogsInBlock.txLogs[txIndex].unrollLogs();
|
|
320
|
+
|
|
321
|
+
const logs: ExtendedUnencryptedL2Log[] = [];
|
|
322
|
+
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
323
|
+
|
|
324
|
+
return { logs, maxLogsHit };
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
#filterContractClassLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
|
|
328
|
+
const start =
|
|
329
|
+
filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
|
|
330
|
+
const end = filter.toBlock;
|
|
331
|
+
|
|
332
|
+
if (typeof end === 'number' && end < start) {
|
|
333
|
+
return {
|
|
334
|
+
logs: [],
|
|
335
|
+
maxLogsHit: true,
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
const logs: ExtendedUnencryptedL2Log[] = [];
|
|
340
|
+
|
|
341
|
+
let maxLogsHit = false;
|
|
342
|
+
loopOverBlocks: for (const [blockNumber, logBuffer] of this.#contractClassLogsByBlock.entries({ start, end })) {
|
|
343
|
+
const contractClassLogsInBlock = ContractClass2BlockL2Logs.fromBuffer(logBuffer);
|
|
344
|
+
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.txLogs.length; txIndex++) {
|
|
345
|
+
const txLogs = contractClassLogsInBlock.txLogs[txIndex].unrollLogs();
|
|
346
|
+
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
|
|
347
|
+
if (maxLogsHit) {
|
|
348
|
+
this.#log.debug(`Max logs hit at block ${blockNumber}`);
|
|
349
|
+
break loopOverBlocks;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
return { logs, maxLogsHit };
|
|
355
|
+
}
|
|
356
|
+
|
|
239
357
|
#accumulateLogs(
|
|
240
358
|
results: ExtendedUnencryptedL2Log[],
|
|
241
359
|
blockNumber: number,
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
|
+
type Body,
|
|
3
|
+
type ContractClass2BlockL2Logs,
|
|
2
4
|
type EncryptedL2BlockL2Logs,
|
|
3
|
-
type EncryptedL2NoteLog,
|
|
4
5
|
type EncryptedNoteL2BlockL2Logs,
|
|
5
6
|
ExtendedUnencryptedL2Log,
|
|
6
7
|
type FromLogType,
|
|
@@ -14,6 +15,7 @@ import {
|
|
|
14
15
|
type TxEffect,
|
|
15
16
|
type TxHash,
|
|
16
17
|
TxReceipt,
|
|
18
|
+
TxScopedL2Log,
|
|
17
19
|
type UnencryptedL2BlockL2Logs,
|
|
18
20
|
} from '@aztec/circuit-types';
|
|
19
21
|
import {
|
|
@@ -24,6 +26,7 @@ import {
|
|
|
24
26
|
Fr,
|
|
25
27
|
type Header,
|
|
26
28
|
INITIAL_L2_BLOCK_NUM,
|
|
29
|
+
MAX_NOTE_HASHES_PER_TX,
|
|
27
30
|
type UnconstrainedFunctionWithMembershipProof,
|
|
28
31
|
} from '@aztec/circuits.js';
|
|
29
32
|
import { type ContractArtifact } from '@aztec/foundation/abi';
|
|
@@ -51,14 +54,16 @@ export class MemoryArchiverStore implements ArchiverDataStore {
|
|
|
51
54
|
|
|
52
55
|
private noteEncryptedLogsPerBlock: Map<number, EncryptedNoteL2BlockL2Logs> = new Map();
|
|
53
56
|
|
|
54
|
-
private
|
|
57
|
+
private taggedLogs: Map<string, TxScopedL2Log[]> = new Map();
|
|
55
58
|
|
|
56
|
-
private
|
|
59
|
+
private logTagsPerBlock: Map<number, Fr[]> = new Map();
|
|
57
60
|
|
|
58
61
|
private encryptedLogsPerBlock: Map<number, EncryptedL2BlockL2Logs> = new Map();
|
|
59
62
|
|
|
60
63
|
private unencryptedLogsPerBlock: Map<number, UnencryptedL2BlockL2Logs> = new Map();
|
|
61
64
|
|
|
65
|
+
private contractClassLogsPerBlock: Map<number, ContractClass2BlockL2Logs> = new Map();
|
|
66
|
+
|
|
62
67
|
/**
|
|
63
68
|
* Contains all L1 to L2 messages.
|
|
64
69
|
*/
|
|
@@ -206,6 +211,56 @@ export class MemoryArchiverStore implements ArchiverDataStore {
|
|
|
206
211
|
return Promise.resolve(true);
|
|
207
212
|
}
|
|
208
213
|
|
|
214
|
+
#storeTaggedLogs(block: L2Block, logType: keyof Pick<Body, 'noteEncryptedLogs' | 'unencryptedLogs'>): void {
|
|
215
|
+
const dataStartIndexForBlock =
|
|
216
|
+
block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
|
|
217
|
+
block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX;
|
|
218
|
+
block.body[logType].txLogs.forEach((txLogs, txIndex) => {
|
|
219
|
+
const txHash = block.body.txEffects[txIndex].txHash;
|
|
220
|
+
const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
|
|
221
|
+
const logs = txLogs.unrollLogs();
|
|
222
|
+
logs.forEach(log => {
|
|
223
|
+
if (
|
|
224
|
+
(logType == 'noteEncryptedLogs' && log.data.length < 32) ||
|
|
225
|
+
// TODO remove when #9835 and #9836 are fixed
|
|
226
|
+
(logType === 'unencryptedLogs' && log.data.length < 32 * 33)
|
|
227
|
+
) {
|
|
228
|
+
this.#log.warn(`Skipping log (${logType}) with invalid data length: ${log.data.length}`);
|
|
229
|
+
return;
|
|
230
|
+
}
|
|
231
|
+
try {
|
|
232
|
+
let tag = Fr.ZERO;
|
|
233
|
+
// TODO remove when #9835 and #9836 are fixed. The partial note logs are emitted as bytes, but encoded as Fields.
|
|
234
|
+
// This means that for every 32 bytes of payload, we only have 1 byte of data.
|
|
235
|
+
// Also, the tag is not stored in the first 32 bytes of the log, (that's the length of public fields now) but in the next 32.
|
|
236
|
+
if (logType === 'unencryptedLogs') {
|
|
237
|
+
const correctedBuffer = Buffer.alloc(32);
|
|
238
|
+
const initialOffset = 32;
|
|
239
|
+
for (let i = 0; i < 32; i++) {
|
|
240
|
+
const byte = Fr.fromBuffer(
|
|
241
|
+
log.data.subarray(i * 32 + initialOffset, i * 32 + 32 + initialOffset),
|
|
242
|
+
).toNumber();
|
|
243
|
+
correctedBuffer.writeUInt8(byte, i);
|
|
244
|
+
}
|
|
245
|
+
tag = new Fr(correctedBuffer);
|
|
246
|
+
} else {
|
|
247
|
+
tag = new Fr(log.data.subarray(0, 32));
|
|
248
|
+
}
|
|
249
|
+
this.#log.verbose(`Storing tagged (${logType}) log with tag ${tag.toString()} in block ${block.number}`);
|
|
250
|
+
const currentLogs = this.taggedLogs.get(tag.toString()) || [];
|
|
251
|
+
this.taggedLogs.set(tag.toString(), [
|
|
252
|
+
...currentLogs,
|
|
253
|
+
new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, logType === 'unencryptedLogs', log.data),
|
|
254
|
+
]);
|
|
255
|
+
const currentTagsInBlock = this.logTagsPerBlock.get(block.number) || [];
|
|
256
|
+
this.logTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]);
|
|
257
|
+
} catch (err) {
|
|
258
|
+
this.#log.warn(`Failed to add tagged log to store: ${err}`);
|
|
259
|
+
}
|
|
260
|
+
});
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
|
|
209
264
|
/**
|
|
210
265
|
* Append new logs to the store's list.
|
|
211
266
|
* @param block - The block for which to add the logs.
|
|
@@ -213,44 +268,30 @@ export class MemoryArchiverStore implements ArchiverDataStore {
|
|
|
213
268
|
*/
|
|
214
269
|
addLogs(blocks: L2Block[]): Promise<boolean> {
|
|
215
270
|
blocks.forEach(block => {
|
|
271
|
+
void this.#storeTaggedLogs(block, 'noteEncryptedLogs');
|
|
272
|
+
void this.#storeTaggedLogs(block, 'unencryptedLogs');
|
|
216
273
|
this.noteEncryptedLogsPerBlock.set(block.number, block.body.noteEncryptedLogs);
|
|
217
|
-
block.body.noteEncryptedLogs.txLogs.forEach(txLogs => {
|
|
218
|
-
const noteLogs = txLogs.unrollLogs();
|
|
219
|
-
noteLogs.forEach(noteLog => {
|
|
220
|
-
if (noteLog.data.length < 32) {
|
|
221
|
-
this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`);
|
|
222
|
-
return;
|
|
223
|
-
}
|
|
224
|
-
try {
|
|
225
|
-
const tag = new Fr(noteLog.data.subarray(0, 32));
|
|
226
|
-
const currentNoteLogs = this.taggedNoteEncryptedLogs.get(tag.toString()) || [];
|
|
227
|
-
this.taggedNoteEncryptedLogs.set(tag.toString(), [...currentNoteLogs, noteLog]);
|
|
228
|
-
const currentTagsInBlock = this.noteEncryptedLogTagsPerBlock.get(block.number) || [];
|
|
229
|
-
this.noteEncryptedLogTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]);
|
|
230
|
-
} catch (err) {
|
|
231
|
-
this.#log.warn(`Failed to add tagged note log to store: ${err}`);
|
|
232
|
-
}
|
|
233
|
-
});
|
|
234
|
-
});
|
|
235
274
|
this.encryptedLogsPerBlock.set(block.number, block.body.encryptedLogs);
|
|
236
275
|
this.unencryptedLogsPerBlock.set(block.number, block.body.unencryptedLogs);
|
|
276
|
+
this.contractClassLogsPerBlock.set(block.number, block.body.contractClassLogs);
|
|
237
277
|
});
|
|
238
278
|
return Promise.resolve(true);
|
|
239
279
|
}
|
|
240
280
|
|
|
241
281
|
deleteLogs(blocks: L2Block[]): Promise<boolean> {
|
|
242
|
-
const
|
|
243
|
-
|
|
282
|
+
const tagsToDelete = blocks.flatMap(block => this.logTagsPerBlock.get(block.number));
|
|
283
|
+
tagsToDelete
|
|
244
284
|
.filter(tag => tag != undefined)
|
|
245
285
|
.forEach(tag => {
|
|
246
|
-
this.
|
|
286
|
+
this.taggedLogs.delete(tag!.toString());
|
|
247
287
|
});
|
|
248
288
|
|
|
249
289
|
blocks.forEach(block => {
|
|
250
290
|
this.encryptedLogsPerBlock.delete(block.number);
|
|
251
291
|
this.noteEncryptedLogsPerBlock.delete(block.number);
|
|
252
292
|
this.unencryptedLogsPerBlock.delete(block.number);
|
|
253
|
-
this.
|
|
293
|
+
this.logTagsPerBlock.delete(block.number);
|
|
294
|
+
this.contractClassLogsPerBlock.delete(block.number);
|
|
254
295
|
});
|
|
255
296
|
|
|
256
297
|
return Promise.resolve(true);
|
|
@@ -419,8 +460,8 @@ export class MemoryArchiverStore implements ArchiverDataStore {
|
|
|
419
460
|
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
|
|
420
461
|
* that tag.
|
|
421
462
|
*/
|
|
422
|
-
getLogsByTags(tags: Fr[]): Promise<
|
|
423
|
-
const noteLogs = tags.map(tag => this.
|
|
463
|
+
getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
|
|
464
|
+
const noteLogs = tags.map(tag => this.taggedLogs.get(tag.toString()) || []);
|
|
424
465
|
return Promise.resolve(noteLogs);
|
|
425
466
|
}
|
|
426
467
|
|
|
@@ -507,6 +548,90 @@ export class MemoryArchiverStore implements ArchiverDataStore {
|
|
|
507
548
|
});
|
|
508
549
|
}
|
|
509
550
|
|
|
551
|
+
/**
|
|
552
|
+
* Gets contract class logs based on the provided filter.
|
|
553
|
+
* NB: clone of the above fn, but for contract class logs
|
|
554
|
+
* @param filter - The filter to apply to the logs.
|
|
555
|
+
* @returns The requested logs.
|
|
556
|
+
* @remarks Works by doing an intersection of all params in the filter.
|
|
557
|
+
*/
|
|
558
|
+
getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
|
|
559
|
+
let txHash: TxHash | undefined;
|
|
560
|
+
let fromBlock = 0;
|
|
561
|
+
let toBlock = this.l2Blocks.length + INITIAL_L2_BLOCK_NUM;
|
|
562
|
+
let txIndexInBlock = 0;
|
|
563
|
+
let logIndexInTx = 0;
|
|
564
|
+
|
|
565
|
+
if (filter.afterLog) {
|
|
566
|
+
// Continuation parameter is set --> tx hash is ignored
|
|
567
|
+
if (filter.fromBlock == undefined || filter.fromBlock <= filter.afterLog.blockNumber) {
|
|
568
|
+
fromBlock = filter.afterLog.blockNumber;
|
|
569
|
+
txIndexInBlock = filter.afterLog.txIndex;
|
|
570
|
+
logIndexInTx = filter.afterLog.logIndex + 1; // We want to start from the next log
|
|
571
|
+
} else {
|
|
572
|
+
fromBlock = filter.fromBlock;
|
|
573
|
+
}
|
|
574
|
+
} else {
|
|
575
|
+
txHash = filter.txHash;
|
|
576
|
+
|
|
577
|
+
if (filter.fromBlock !== undefined) {
|
|
578
|
+
fromBlock = filter.fromBlock;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
if (filter.toBlock !== undefined) {
|
|
583
|
+
toBlock = filter.toBlock;
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
// Ensure the indices are within block array bounds
|
|
587
|
+
fromBlock = Math.max(fromBlock, INITIAL_L2_BLOCK_NUM);
|
|
588
|
+
toBlock = Math.min(toBlock, this.l2Blocks.length + INITIAL_L2_BLOCK_NUM);
|
|
589
|
+
|
|
590
|
+
if (fromBlock > this.l2Blocks.length || toBlock < fromBlock || toBlock <= 0) {
|
|
591
|
+
return Promise.resolve({
|
|
592
|
+
logs: [],
|
|
593
|
+
maxLogsHit: false,
|
|
594
|
+
});
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
const contractAddress = filter.contractAddress;
|
|
598
|
+
|
|
599
|
+
const logs: ExtendedUnencryptedL2Log[] = [];
|
|
600
|
+
|
|
601
|
+
for (; fromBlock < toBlock; fromBlock++) {
|
|
602
|
+
const block = this.l2Blocks[fromBlock - INITIAL_L2_BLOCK_NUM];
|
|
603
|
+
const blockLogs = this.contractClassLogsPerBlock.get(fromBlock);
|
|
604
|
+
|
|
605
|
+
if (blockLogs) {
|
|
606
|
+
for (; txIndexInBlock < blockLogs.txLogs.length; txIndexInBlock++) {
|
|
607
|
+
const txLogs = blockLogs.txLogs[txIndexInBlock].unrollLogs();
|
|
608
|
+
for (; logIndexInTx < txLogs.length; logIndexInTx++) {
|
|
609
|
+
const log = txLogs[logIndexInTx];
|
|
610
|
+
if (
|
|
611
|
+
(!txHash || block.data.body.txEffects[txIndexInBlock].txHash.equals(txHash)) &&
|
|
612
|
+
(!contractAddress || log.contractAddress.equals(contractAddress))
|
|
613
|
+
) {
|
|
614
|
+
logs.push(new ExtendedUnencryptedL2Log(new LogId(block.data.number, txIndexInBlock, logIndexInTx), log));
|
|
615
|
+
if (logs.length === this.maxLogs) {
|
|
616
|
+
return Promise.resolve({
|
|
617
|
+
logs,
|
|
618
|
+
maxLogsHit: true,
|
|
619
|
+
});
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
logIndexInTx = 0;
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
txIndexInBlock = 0;
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
return Promise.resolve({
|
|
630
|
+
logs,
|
|
631
|
+
maxLogsHit: false,
|
|
632
|
+
});
|
|
633
|
+
}
|
|
634
|
+
|
|
510
635
|
/**
|
|
511
636
|
* Gets the number of the latest L2 block processed.
|
|
512
637
|
* @returns The number of the latest L2 block processed.
|
package/src/factory.ts
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
import { type ArchiverApi, type Service } from '@aztec/circuit-types';
|
|
1
2
|
import { type ContractClassPublic } from '@aztec/circuits.js';
|
|
2
3
|
import { createDebugLogger } from '@aztec/foundation/log';
|
|
4
|
+
import { type Maybe } from '@aztec/foundation/types';
|
|
5
|
+
import { type DataStoreConfig } from '@aztec/kv-store/config';
|
|
3
6
|
import { createStore } from '@aztec/kv-store/utils';
|
|
4
7
|
import { getCanonicalProtocolContract, protocolContractNames } from '@aztec/protocol-contracts';
|
|
5
8
|
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
@@ -8,13 +11,13 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop';
|
|
|
8
11
|
import { Archiver } from './archiver/archiver.js';
|
|
9
12
|
import { type ArchiverConfig } from './archiver/config.js';
|
|
10
13
|
import { KVArchiverDataStore } from './archiver/index.js';
|
|
11
|
-
import { createArchiverClient } from './rpc/
|
|
14
|
+
import { createArchiverClient } from './rpc/index.js';
|
|
12
15
|
|
|
13
16
|
export async function createArchiver(
|
|
14
|
-
config: ArchiverConfig,
|
|
17
|
+
config: ArchiverConfig & DataStoreConfig,
|
|
15
18
|
telemetry: TelemetryClient = new NoopTelemetryClient(),
|
|
16
19
|
opts: { blockUntilSync: boolean } = { blockUntilSync: true },
|
|
17
|
-
) {
|
|
20
|
+
): Promise<ArchiverApi & Maybe<Service>> {
|
|
18
21
|
if (!config.archiverUrl) {
|
|
19
22
|
const store = await createStore('archiver', config, createDebugLogger('aztec:archiver:lmdb'));
|
|
20
23
|
const archiverStore = new KVArchiverDataStore(store, config.maxLogs);
|
package/src/rpc/index.ts
CHANGED
|
@@ -1,2 +1,11 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { type ArchiverApi, ArchiverApiSchema } from '@aztec/circuit-types';
|
|
2
|
+
import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client';
|
|
3
|
+
import { createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server';
|
|
4
|
+
|
|
5
|
+
export function createArchiverClient(url: string, fetch = makeFetch([1, 2, 3], true)): ArchiverApi {
|
|
6
|
+
return createSafeJsonRpcClient<ArchiverApi>(url, ArchiverApiSchema, false, 'archiver', fetch);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function createArchiverRpcServer(handler: ArchiverApi) {
|
|
10
|
+
return createSafeJsonRpcServer(handler, ArchiverApiSchema);
|
|
11
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { L2Block, type L2BlockSource, type L2Tips, type TxHash, TxReceipt, TxStatus } from '@aztec/circuit-types';
|
|
2
2
|
import { EthAddress, type Header } from '@aztec/circuits.js';
|
|
3
|
+
import { DefaultL1ContractsConfig } from '@aztec/ethereum';
|
|
3
4
|
import { createDebugLogger } from '@aztec/foundation/log';
|
|
4
5
|
|
|
5
6
|
import { getSlotRangeForEpoch } from '../archiver/epoch_helpers.js';
|
|
@@ -103,7 +104,8 @@ export class MockL2BlockSource implements L2BlockSource {
|
|
|
103
104
|
}
|
|
104
105
|
|
|
105
106
|
getBlocksForEpoch(epochNumber: bigint): Promise<L2Block[]> {
|
|
106
|
-
const
|
|
107
|
+
const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration;
|
|
108
|
+
const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration });
|
|
107
109
|
const blocks = this.l2Blocks.filter(b => {
|
|
108
110
|
const slot = b.header.globalVariables.slotNumber.toBigInt();
|
|
109
111
|
return slot >= start && slot <= end;
|
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import { type ArchiveSource } from '../archiver/archiver.js';
|
|
2
|
-
export declare const createArchiverClient: (url: string, fetch?: (host: string, rpcMethod: string, body: any, useApiEndpoints: boolean) => Promise<any>) => ArchiveSource;
|
|
3
|
-
//# sourceMappingURL=archiver_client.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"archiver_client.d.ts","sourceRoot":"","sources":["../../src/rpc/archiver_client.ts"],"names":[],"mappings":"AAWA,OAAO,EAAE,KAAK,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAE7D,eAAO,MAAM,oBAAoB,QAAS,MAAM,qGAAuC,aAenE,CAAC"}
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { EncryptedNoteL2BlockL2Logs, ExtendedUnencryptedL2Log, L2Block, NullifierMembershipWitness, TxReceipt, UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types';
|
|
2
|
-
import { EthAddress, Fr } from '@aztec/circuits.js';
|
|
3
|
-
import { createJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client';
|
|
4
|
-
export const createArchiverClient = (url, fetch = makeFetch([1, 2, 3], true)) => createJsonRpcClient(url, {
|
|
5
|
-
EthAddress,
|
|
6
|
-
ExtendedUnencryptedL2Log,
|
|
7
|
-
Fr,
|
|
8
|
-
L2Block,
|
|
9
|
-
EncryptedNoteL2BlockL2Logs,
|
|
10
|
-
UnencryptedL2BlockL2Logs,
|
|
11
|
-
}, { TxReceipt, NullifierMembershipWitness }, false, 'archiver', fetch);
|
|
12
|
-
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYXJjaGl2ZXJfY2xpZW50LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL3JwYy9hcmNoaXZlcl9jbGllbnQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUNMLDBCQUEwQixFQUMxQix3QkFBd0IsRUFDeEIsT0FBTyxFQUNQLDBCQUEwQixFQUMxQixTQUFTLEVBQ1Qsd0JBQXdCLEdBQ3pCLE1BQU0sc0JBQXNCLENBQUM7QUFDOUIsT0FBTyxFQUFFLFVBQVUsRUFBRSxFQUFFLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQztBQUNwRCxPQUFPLEVBQUUsbUJBQW1CLEVBQUUsU0FBUyxFQUFFLE1BQU0sbUNBQW1DLENBQUM7QUFJbkYsTUFBTSxDQUFDLE1BQU0sb0JBQW9CLEdBQUcsQ0FBQyxHQUFXLEVBQUUsS0FBSyxHQUFHLFNBQVMsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLEVBQUUsQ0FBQyxDQUFDLEVBQUUsSUFBSSxDQUFDLEVBQWlCLEVBQUUsQ0FDckcsbUJBQW1CLENBQ2pCLEdBQUcsRUFDSDtJQUNFLFVBQVU7SUFDVix3QkFBd0I7SUFDeEIsRUFBRTtJQUNGLE9BQU87SUFDUCwwQkFBMEI7SUFDMUIsd0JBQXdCO0NBQ3pCLEVBQ0QsRUFBRSxTQUFTLEVBQUUsMEJBQTBCLEVBQUUsRUFDekMsS0FBSyxFQUNMLFVBQVUsRUFDVixLQUFLLENBQ1csQ0FBQyJ9
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { JsonRpcServer } from '@aztec/foundation/json-rpc/server';
|
|
2
|
-
import { type Archiver } from '../archiver/archiver.js';
|
|
3
|
-
/**
|
|
4
|
-
* Wrap an Archiver instance with a JSON RPC HTTP server.
|
|
5
|
-
* @param archiverService - The Archiver instance
|
|
6
|
-
* @returns An JSON-RPC HTTP server
|
|
7
|
-
*/
|
|
8
|
-
export declare function createArchiverRpcServer(archiverService: Archiver): JsonRpcServer;
|
|
9
|
-
//# sourceMappingURL=archiver_server.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"archiver_server.d.ts","sourceRoot":"","sources":["../../src/rpc/archiver_server.ts"],"names":[],"mappings":"AAUA,OAAO,EAAE,aAAa,EAAE,MAAM,mCAAmC,CAAC;AAElE,OAAO,EAAE,KAAK,QAAQ,EAAE,MAAM,yBAAyB,CAAC;AAExD;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,eAAe,EAAE,QAAQ,GAAG,aAAa,CAehF"}
|