@aztec/aztec-node 0.82.2 → 0.82.3-nightly.20250403
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/aztec-node/config.d.ts +7 -7
- package/dest/aztec-node/config.d.ts.map +1 -1
- package/dest/aztec-node/config.js +6 -6
- package/dest/aztec-node/server.d.ts +7 -1
- package/dest/aztec-node/server.d.ts.map +1 -1
- package/dest/aztec-node/server.js +78 -7
- package/dest/sentinel/config.d.ts +7 -0
- package/dest/sentinel/config.d.ts.map +1 -0
- package/dest/sentinel/config.js +13 -0
- package/dest/sentinel/factory.d.ts +8 -0
- package/dest/sentinel/factory.d.ts.map +1 -0
- package/dest/sentinel/factory.js +15 -0
- package/dest/sentinel/index.d.ts +3 -0
- package/dest/sentinel/index.d.ts.map +1 -0
- package/dest/sentinel/index.js +1 -0
- package/dest/sentinel/sentinel.d.ts +64 -0
- package/dest/sentinel/sentinel.d.ts.map +1 -0
- package/dest/sentinel/sentinel.js +246 -0
- package/dest/sentinel/store.d.ts +21 -0
- package/dest/sentinel/store.d.ts.map +1 -0
- package/dest/sentinel/store.js +100 -0
- package/package.json +25 -21
- package/src/aztec-node/config.ts +21 -14
- package/src/aztec-node/server.ts +107 -15
- package/src/sentinel/config.ts +19 -0
- package/src/sentinel/factory.ts +31 -0
- package/src/sentinel/index.ts +8 -0
- package/src/sentinel/sentinel.ts +280 -0
- package/src/sentinel/store.ts +103 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import { countWhile } from '@aztec/foundation/collection';
|
|
2
|
+
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
3
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
4
|
+
import { RunningPromise } from '@aztec/foundation/running-promise';
|
|
5
|
+
import { L2TipsMemoryStore } from '@aztec/kv-store/stores';
|
|
6
|
+
import { L2BlockStream } from '@aztec/stdlib/block';
|
|
7
|
+
import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers';
|
|
8
|
+
export class Sentinel {
|
|
9
|
+
epochCache;
|
|
10
|
+
archiver;
|
|
11
|
+
p2p;
|
|
12
|
+
store;
|
|
13
|
+
logger;
|
|
14
|
+
runningPromise;
|
|
15
|
+
blockStream;
|
|
16
|
+
l2TipsStore;
|
|
17
|
+
initialSlot;
|
|
18
|
+
lastProcessedSlot;
|
|
19
|
+
slotNumberToArchive;
|
|
20
|
+
constructor(epochCache, archiver, p2p, store, logger = createLogger('node:sentinel')){
|
|
21
|
+
this.epochCache = epochCache;
|
|
22
|
+
this.archiver = archiver;
|
|
23
|
+
this.p2p = p2p;
|
|
24
|
+
this.store = store;
|
|
25
|
+
this.logger = logger;
|
|
26
|
+
this.slotNumberToArchive = new Map();
|
|
27
|
+
this.l2TipsStore = new L2TipsMemoryStore();
|
|
28
|
+
const interval = epochCache.getL1Constants().ethereumSlotDuration * 1000 / 4;
|
|
29
|
+
this.runningPromise = new RunningPromise(this.work.bind(this), logger, interval);
|
|
30
|
+
}
|
|
31
|
+
async start() {
|
|
32
|
+
await this.init();
|
|
33
|
+
this.runningPromise.start();
|
|
34
|
+
}
|
|
35
|
+
/** Loads initial slot and initializes blockstream. We will not process anything at or before the initial slot. */ async init() {
|
|
36
|
+
this.initialSlot = this.epochCache.getEpochAndSlotNow().slot;
|
|
37
|
+
const startingBlock = await this.archiver.getBlockNumber();
|
|
38
|
+
this.blockStream = new L2BlockStream(this.archiver, this.l2TipsStore, this, this.logger, {
|
|
39
|
+
startingBlock
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
stop() {
|
|
43
|
+
return this.runningPromise.stop();
|
|
44
|
+
}
|
|
45
|
+
async handleBlockStreamEvent(event) {
|
|
46
|
+
await this.l2TipsStore.handleBlockStreamEvent(event);
|
|
47
|
+
if (event.type === 'blocks-added') {
|
|
48
|
+
// Store mapping from slot to archive
|
|
49
|
+
for (const block of event.blocks){
|
|
50
|
+
this.slotNumberToArchive.set(block.block.header.getSlot(), block.block.archive.root.toString());
|
|
51
|
+
}
|
|
52
|
+
// Prune the archive map to only keep at most N entries
|
|
53
|
+
const historyLength = this.store.getHistoryLength();
|
|
54
|
+
if (this.slotNumberToArchive.size > historyLength) {
|
|
55
|
+
const toDelete = Array.from(this.slotNumberToArchive.keys()).sort((a, b)=>Number(a - b)).slice(0, this.slotNumberToArchive.size - historyLength);
|
|
56
|
+
for (const key of toDelete){
|
|
57
|
+
this.slotNumberToArchive.delete(key);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Process data for two L2 slots ago.
|
|
64
|
+
* Note that we do not process historical data, since we rely on p2p data for processing,
|
|
65
|
+
* and we don't have that data if we were offline during the period.
|
|
66
|
+
*/ async work() {
|
|
67
|
+
const { slot: currentSlot } = this.epochCache.getEpochAndSlotNow();
|
|
68
|
+
try {
|
|
69
|
+
// Manually sync the block stream to ensure we have the latest data.
|
|
70
|
+
// Note we never `start` the blockstream, so it loops at the same pace as we do.
|
|
71
|
+
await this.blockStream.sync();
|
|
72
|
+
// Check if we are ready to process data for two L2 slots ago.
|
|
73
|
+
const targetSlot = await this.isReadyToProcess(currentSlot);
|
|
74
|
+
// And process it if we are.
|
|
75
|
+
if (targetSlot !== false) {
|
|
76
|
+
await this.processSlot(targetSlot);
|
|
77
|
+
}
|
|
78
|
+
} catch (err) {
|
|
79
|
+
this.logger.error(`Failed to process slot ${currentSlot}`, err);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Check if we are ready to process data for two L2 slots ago, so we allow plenty of time for p2p to process all in-flight attestations.
|
|
84
|
+
* We also don't move past the archiver last synced L2 slot, as we don't want to process data that is not yet available.
|
|
85
|
+
* Last, we check the p2p is synced with the archiver, so it has pulled all attestations from it.
|
|
86
|
+
*/ async isReadyToProcess(currentSlot) {
|
|
87
|
+
const targetSlot = currentSlot - 2n;
|
|
88
|
+
if (this.lastProcessedSlot && this.lastProcessedSlot >= targetSlot) {
|
|
89
|
+
this.logger.trace(`Already processed slot ${targetSlot}`, {
|
|
90
|
+
lastProcessedSlot: this.lastProcessedSlot
|
|
91
|
+
});
|
|
92
|
+
return false;
|
|
93
|
+
}
|
|
94
|
+
if (this.initialSlot === undefined) {
|
|
95
|
+
this.logger.error(`Initial slot not loaded.`);
|
|
96
|
+
return false;
|
|
97
|
+
}
|
|
98
|
+
if (targetSlot <= this.initialSlot) {
|
|
99
|
+
this.logger.debug(`Refusing to process slot ${targetSlot} given initial slot ${this.initialSlot}`);
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
const archiverSlot = await this.archiver.getL2SlotNumber();
|
|
103
|
+
if (archiverSlot < targetSlot) {
|
|
104
|
+
this.logger.debug(`Waiting for archiver to sync with L2 slot ${targetSlot}`, {
|
|
105
|
+
archiverSlot,
|
|
106
|
+
targetSlot
|
|
107
|
+
});
|
|
108
|
+
return false;
|
|
109
|
+
}
|
|
110
|
+
const archiverLastBlockHash = await this.l2TipsStore.getL2Tips().then((tip)=>tip.latest.hash);
|
|
111
|
+
const p2pLastBlockHash = await this.p2p.getL2Tips().then((tips)=>tips.latest.hash);
|
|
112
|
+
const isP2pSynced = archiverLastBlockHash === p2pLastBlockHash;
|
|
113
|
+
if (!isP2pSynced) {
|
|
114
|
+
this.logger.debug(`Waiting for P2P client to sync with archiver`, {
|
|
115
|
+
archiverLastBlockHash,
|
|
116
|
+
p2pLastBlockHash
|
|
117
|
+
});
|
|
118
|
+
return false;
|
|
119
|
+
}
|
|
120
|
+
return targetSlot;
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Gathers committee and proposer data for a given slot, computes slot stats,
|
|
124
|
+
* and updates overall stats.
|
|
125
|
+
*/ async processSlot(slot) {
|
|
126
|
+
const { epoch, seed, committee } = await this.epochCache.getCommittee(slot);
|
|
127
|
+
if (committee.length === 0) {
|
|
128
|
+
this.logger.warn(`No committee found for slot ${slot} at epoch ${epoch}`);
|
|
129
|
+
this.lastProcessedSlot = slot;
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
const proposerIndex = this.epochCache.computeProposerIndex(slot, epoch, seed, BigInt(committee.length));
|
|
133
|
+
const proposer = committee[Number(proposerIndex)];
|
|
134
|
+
const stats = await this.getSlotActivity(slot, epoch, proposer, committee);
|
|
135
|
+
this.logger.verbose(`Updating L2 slot ${slot} observed activity`, stats);
|
|
136
|
+
await this.updateValidators(slot, stats);
|
|
137
|
+
this.lastProcessedSlot = slot;
|
|
138
|
+
}
|
|
139
|
+
/** Computes activity for a given slot. */ async getSlotActivity(slot, epoch, proposer, committee) {
|
|
140
|
+
this.logger.debug(`Computing stats for slot ${slot} at epoch ${epoch}`, {
|
|
141
|
+
slot,
|
|
142
|
+
epoch,
|
|
143
|
+
proposer,
|
|
144
|
+
committee
|
|
145
|
+
});
|
|
146
|
+
// Check if there is an L2 block in L1 for this L2 slot
|
|
147
|
+
// Here we get all attestations for the block mined at the given slot,
|
|
148
|
+
// or all attestations for all proposals in the slot if no block was mined.
|
|
149
|
+
const archive = this.slotNumberToArchive.get(slot);
|
|
150
|
+
const attested = await this.p2p.getAttestationsForSlot(slot, archive);
|
|
151
|
+
const attestors = new Set(await Promise.all(attested.map((a)=>a.getSender().then((a)=>a.toString()))));
|
|
152
|
+
// We assume that there was a block proposal if at least one of the validators attested to it.
|
|
153
|
+
// It could be the case that every single validator failed, and we could differentiate it by having
|
|
154
|
+
// this node re-execute every block proposal it sees and storing it in the attestation pool.
|
|
155
|
+
// But we'll leave that corner case out to reduce pressure on the node.
|
|
156
|
+
const blockStatus = archive ? 'mined' : attestors.size > 0 ? 'proposed' : 'missed';
|
|
157
|
+
this.logger.debug(`Block for slot ${slot} was ${blockStatus}`, {
|
|
158
|
+
archive,
|
|
159
|
+
slot
|
|
160
|
+
});
|
|
161
|
+
// Get attestors that failed their duties for this block, but only if there was a block proposed
|
|
162
|
+
const missedAttestors = new Set(blockStatus === 'missed' ? [] : committee.filter((v)=>!attestors.has(v.toString()) && !proposer.equals(v)).map((v)=>v.toString()));
|
|
163
|
+
this.logger.debug(`Retrieved ${attestors.size} attestors out of ${committee.length} for slot ${slot}`, {
|
|
164
|
+
blockStatus,
|
|
165
|
+
proposer: proposer.toString(),
|
|
166
|
+
archive,
|
|
167
|
+
slot,
|
|
168
|
+
attestors: [
|
|
169
|
+
...attestors
|
|
170
|
+
],
|
|
171
|
+
missedAttestors: [
|
|
172
|
+
...missedAttestors
|
|
173
|
+
],
|
|
174
|
+
committee: committee.map((c)=>c.toString())
|
|
175
|
+
});
|
|
176
|
+
// Compute the status for each validator in the committee
|
|
177
|
+
const statusFor = (who)=>{
|
|
178
|
+
if (who === proposer.toString()) {
|
|
179
|
+
return `block-${blockStatus}`;
|
|
180
|
+
} else if (attestors.has(who)) {
|
|
181
|
+
return 'attestation-sent';
|
|
182
|
+
} else if (missedAttestors.has(who)) {
|
|
183
|
+
return 'attestation-missed';
|
|
184
|
+
} else {
|
|
185
|
+
return undefined;
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
return Object.fromEntries(committee.map((v)=>v.toString()).map((who)=>[
|
|
189
|
+
who,
|
|
190
|
+
statusFor(who)
|
|
191
|
+
]));
|
|
192
|
+
}
|
|
193
|
+
/** Push the status for each slot for each validator. */ updateValidators(slot, stats) {
|
|
194
|
+
return this.store.updateValidators(slot, stats);
|
|
195
|
+
}
|
|
196
|
+
/** Computes stats to be returned based on stored data. */ async computeStats() {
|
|
197
|
+
const histories = await this.store.getHistories();
|
|
198
|
+
const slotNow = this.epochCache.getEpochAndSlotNow().slot;
|
|
199
|
+
const fromSlot = (this.lastProcessedSlot ?? slotNow) - BigInt(this.store.getHistoryLength());
|
|
200
|
+
const result = {};
|
|
201
|
+
for (const [address, history] of Object.entries(histories)){
|
|
202
|
+
const validatorAddress = address;
|
|
203
|
+
result[validatorAddress] = this.computeStatsForValidator(validatorAddress, history, fromSlot);
|
|
204
|
+
}
|
|
205
|
+
return {
|
|
206
|
+
stats: result,
|
|
207
|
+
lastProcessedSlot: this.lastProcessedSlot,
|
|
208
|
+
initialSlot: this.initialSlot,
|
|
209
|
+
slotWindow: this.store.getHistoryLength()
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
computeStatsForValidator(address, allHistory, fromSlot) {
|
|
213
|
+
const history = fromSlot ? allHistory.filter((h)=>h.slot >= fromSlot) : allHistory;
|
|
214
|
+
return {
|
|
215
|
+
address: EthAddress.fromString(address),
|
|
216
|
+
lastProposal: this.computeFromSlot(history.filter((h)=>h.status === 'block-proposed' || h.status === 'block-mined').at(-1)?.slot),
|
|
217
|
+
lastAttestation: this.computeFromSlot(history.filter((h)=>h.status === 'attestation-sent').at(-1)?.slot),
|
|
218
|
+
totalSlots: history.length,
|
|
219
|
+
missedProposals: this.computeMissed(history, 'block', 'block-missed'),
|
|
220
|
+
missedAttestations: this.computeMissed(history, 'attestation', 'attestation-missed'),
|
|
221
|
+
history
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
computeMissed(history, computeOverPrefix, filter) {
|
|
225
|
+
const relevantHistory = history.filter((h)=>h.status.startsWith(computeOverPrefix));
|
|
226
|
+
const filteredHistory = relevantHistory.filter((h)=>h.status === filter);
|
|
227
|
+
return {
|
|
228
|
+
currentStreak: countWhile([
|
|
229
|
+
...relevantHistory
|
|
230
|
+
].reverse(), (h)=>h.status === filter),
|
|
231
|
+
rate: filteredHistory.length / relevantHistory.length,
|
|
232
|
+
count: filteredHistory.length
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
computeFromSlot(slot) {
|
|
236
|
+
if (slot === undefined) {
|
|
237
|
+
return undefined;
|
|
238
|
+
}
|
|
239
|
+
const timestamp = getTimestampForSlot(slot, this.epochCache.getL1Constants());
|
|
240
|
+
return {
|
|
241
|
+
timestamp,
|
|
242
|
+
slot,
|
|
243
|
+
date: new Date(Number(timestamp) * 1000).toISOString()
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { AztecAsyncKVStore } from '@aztec/kv-store';
|
|
2
|
+
import type { ValidatorStatusHistory, ValidatorStatusInSlot } from '@aztec/stdlib/validators';
|
|
3
|
+
export declare class SentinelStore {
|
|
4
|
+
private store;
|
|
5
|
+
private config;
|
|
6
|
+
static readonly SCHEMA_VERSION = 1;
|
|
7
|
+
private readonly map;
|
|
8
|
+
constructor(store: AztecAsyncKVStore, config: {
|
|
9
|
+
historyLength: number;
|
|
10
|
+
});
|
|
11
|
+
getHistoryLength(): number;
|
|
12
|
+
updateValidators(slot: bigint, statuses: Record<`0x${string}`, ValidatorStatusInSlot | undefined>): Promise<void>;
|
|
13
|
+
private pushValidatorStatusForSlot;
|
|
14
|
+
getHistories(): Promise<Record<`0x${string}`, ValidatorStatusHistory>>;
|
|
15
|
+
private getHistory;
|
|
16
|
+
private serializeHistory;
|
|
17
|
+
private deserializeHistory;
|
|
18
|
+
private statusToNumber;
|
|
19
|
+
private statusFromNumber;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../../src/sentinel/store.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,iBAAiB,EAAiB,MAAM,iBAAiB,CAAC;AACxE,OAAO,KAAK,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAE9F,qBAAa,aAAa;IAKZ,OAAO,CAAC,KAAK;IAAqB,OAAO,CAAC,MAAM;IAJ5D,gBAAuB,cAAc,KAAK;IAE1C,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAuC;gBAEvC,KAAK,EAAE,iBAAiB,EAAU,MAAM,EAAE;QAAE,aAAa,EAAE,MAAM,CAAA;KAAE;IAIhF,gBAAgB;IAIV,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,KAAK,MAAM,EAAE,EAAE,qBAAqB,GAAG,SAAS,CAAC;YAUhG,0BAA0B;IAU3B,YAAY,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,MAAM,EAAE,EAAE,sBAAsB,CAAC,CAAC;YAQrE,UAAU;IAKxB,OAAO,CAAC,gBAAgB;IAMxB,OAAO,CAAC,kBAAkB;IAW1B,OAAO,CAAC,cAAc;IAmBtB,OAAO,CAAC,gBAAgB;CAgBzB"}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { BufferReader, numToUInt8, numToUInt32BE, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
2
|
+
export class SentinelStore {
|
|
3
|
+
store;
|
|
4
|
+
config;
|
|
5
|
+
static SCHEMA_VERSION = 1;
|
|
6
|
+
map;
|
|
7
|
+
constructor(store, config){
|
|
8
|
+
this.store = store;
|
|
9
|
+
this.config = config;
|
|
10
|
+
this.map = store.openMap('sentinel-validator-status');
|
|
11
|
+
}
|
|
12
|
+
getHistoryLength() {
|
|
13
|
+
return this.config.historyLength;
|
|
14
|
+
}
|
|
15
|
+
async updateValidators(slot, statuses) {
|
|
16
|
+
await this.store.transactionAsync(async ()=>{
|
|
17
|
+
for (const [who, status] of Object.entries(statuses)){
|
|
18
|
+
if (status) {
|
|
19
|
+
await this.pushValidatorStatusForSlot(who, slot, status);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
async pushValidatorStatusForSlot(who, slot, status) {
|
|
25
|
+
const currentHistory = await this.getHistory(who) ?? [];
|
|
26
|
+
const newHistory = [
|
|
27
|
+
...currentHistory,
|
|
28
|
+
{
|
|
29
|
+
slot,
|
|
30
|
+
status
|
|
31
|
+
}
|
|
32
|
+
].slice(-this.config.historyLength);
|
|
33
|
+
await this.map.set(who, this.serializeHistory(newHistory));
|
|
34
|
+
}
|
|
35
|
+
async getHistories() {
|
|
36
|
+
const histories = {};
|
|
37
|
+
for await (const [address, history] of this.map.entriesAsync()){
|
|
38
|
+
histories[address] = this.deserializeHistory(history);
|
|
39
|
+
}
|
|
40
|
+
return histories;
|
|
41
|
+
}
|
|
42
|
+
async getHistory(address) {
|
|
43
|
+
const data = await this.map.getAsync(address);
|
|
44
|
+
return data && this.deserializeHistory(data);
|
|
45
|
+
}
|
|
46
|
+
serializeHistory(history) {
|
|
47
|
+
return serializeToBuffer(history.map((h)=>[
|
|
48
|
+
numToUInt32BE(Number(h.slot)),
|
|
49
|
+
numToUInt8(this.statusToNumber(h.status))
|
|
50
|
+
]));
|
|
51
|
+
}
|
|
52
|
+
deserializeHistory(buffer) {
|
|
53
|
+
const reader = new BufferReader(buffer);
|
|
54
|
+
const history = [];
|
|
55
|
+
while(!reader.isEmpty()){
|
|
56
|
+
const slot = BigInt(reader.readNumber());
|
|
57
|
+
const status = this.statusFromNumber(reader.readUInt8());
|
|
58
|
+
history.push({
|
|
59
|
+
slot,
|
|
60
|
+
status
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
return history;
|
|
64
|
+
}
|
|
65
|
+
statusToNumber(status) {
|
|
66
|
+
switch(status){
|
|
67
|
+
case 'block-mined':
|
|
68
|
+
return 1;
|
|
69
|
+
case 'block-proposed':
|
|
70
|
+
return 2;
|
|
71
|
+
case 'block-missed':
|
|
72
|
+
return 3;
|
|
73
|
+
case 'attestation-sent':
|
|
74
|
+
return 4;
|
|
75
|
+
case 'attestation-missed':
|
|
76
|
+
return 5;
|
|
77
|
+
default:
|
|
78
|
+
{
|
|
79
|
+
const _exhaustive = status;
|
|
80
|
+
throw new Error(`Unknown status: ${status}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
statusFromNumber(status) {
|
|
85
|
+
switch(status){
|
|
86
|
+
case 1:
|
|
87
|
+
return 'block-mined';
|
|
88
|
+
case 2:
|
|
89
|
+
return 'block-proposed';
|
|
90
|
+
case 3:
|
|
91
|
+
return 'block-missed';
|
|
92
|
+
case 4:
|
|
93
|
+
return 'attestation-sent';
|
|
94
|
+
case 5:
|
|
95
|
+
return 'attestation-missed';
|
|
96
|
+
default:
|
|
97
|
+
throw new Error(`Unknown status: ${status}`);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
package/package.json
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aztec/aztec-node",
|
|
3
|
-
"version": "0.82.
|
|
3
|
+
"version": "0.82.3-nightly.20250403",
|
|
4
4
|
"main": "dest/index.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
7
7
|
".": "./dest/index.js",
|
|
8
|
-
"./config": "./dest/aztec-node/config.js"
|
|
8
|
+
"./config": "./dest/aztec-node/config.js",
|
|
9
|
+
"./sentinel": "./dest/aztec-node/sentinel.js"
|
|
9
10
|
},
|
|
10
11
|
"bin": "./dest/bin/index.js",
|
|
11
12
|
"typedocOptions": {
|
|
@@ -62,27 +63,30 @@
|
|
|
62
63
|
]
|
|
63
64
|
},
|
|
64
65
|
"dependencies": {
|
|
65
|
-
"@aztec/archiver": "0.82.
|
|
66
|
-
"@aztec/bb-prover": "0.82.
|
|
67
|
-
"@aztec/blob-sink": "0.82.
|
|
68
|
-
"@aztec/constants": "0.82.
|
|
69
|
-
"@aztec/epoch-cache": "0.82.
|
|
70
|
-
"@aztec/ethereum": "0.82.
|
|
71
|
-
"@aztec/foundation": "0.82.
|
|
72
|
-
"@aztec/kv-store": "0.82.
|
|
73
|
-
"@aztec/
|
|
74
|
-
"@aztec/
|
|
75
|
-
"@aztec/
|
|
76
|
-
"@aztec/
|
|
77
|
-
"@aztec/
|
|
78
|
-
"@aztec/
|
|
79
|
-
"@aztec/
|
|
80
|
-
"@aztec/
|
|
81
|
-
"@aztec/
|
|
82
|
-
"@aztec/
|
|
66
|
+
"@aztec/archiver": "0.82.3-nightly.20250403",
|
|
67
|
+
"@aztec/bb-prover": "0.82.3-nightly.20250403",
|
|
68
|
+
"@aztec/blob-sink": "0.82.3-nightly.20250403",
|
|
69
|
+
"@aztec/constants": "0.82.3-nightly.20250403",
|
|
70
|
+
"@aztec/epoch-cache": "0.82.3-nightly.20250403",
|
|
71
|
+
"@aztec/ethereum": "0.82.3-nightly.20250403",
|
|
72
|
+
"@aztec/foundation": "0.82.3-nightly.20250403",
|
|
73
|
+
"@aztec/kv-store": "0.82.3-nightly.20250403",
|
|
74
|
+
"@aztec/l1-artifacts": "0.82.3-nightly.20250403",
|
|
75
|
+
"@aztec/merkle-tree": "0.82.3-nightly.20250403",
|
|
76
|
+
"@aztec/node-lib": "0.82.3-nightly.20250403",
|
|
77
|
+
"@aztec/p2p": "0.82.3-nightly.20250403",
|
|
78
|
+
"@aztec/protocol-contracts": "0.82.3-nightly.20250403",
|
|
79
|
+
"@aztec/prover-client": "0.82.3-nightly.20250403",
|
|
80
|
+
"@aztec/sequencer-client": "0.82.3-nightly.20250403",
|
|
81
|
+
"@aztec/simulator": "0.82.3-nightly.20250403",
|
|
82
|
+
"@aztec/stdlib": "0.82.3-nightly.20250403",
|
|
83
|
+
"@aztec/telemetry-client": "0.82.3-nightly.20250403",
|
|
84
|
+
"@aztec/validator-client": "0.82.3-nightly.20250403",
|
|
85
|
+
"@aztec/world-state": "0.82.3-nightly.20250403",
|
|
83
86
|
"koa": "^2.14.2",
|
|
84
87
|
"koa-router": "^12.0.0",
|
|
85
|
-
"tslib": "^2.4.0"
|
|
88
|
+
"tslib": "^2.4.0",
|
|
89
|
+
"viem": "2.23.7"
|
|
86
90
|
},
|
|
87
91
|
"devDependencies": {
|
|
88
92
|
"@jest/globals": "^29.5.0",
|
package/src/aztec-node/config.ts
CHANGED
|
@@ -1,9 +1,15 @@
|
|
|
1
1
|
import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config';
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
type GenesisStateConfig,
|
|
4
|
+
type L1ContractAddresses,
|
|
5
|
+
genesisStateConfigMappings,
|
|
6
|
+
l1ContractAddressesMapping,
|
|
7
|
+
} from '@aztec/ethereum';
|
|
3
8
|
import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config';
|
|
4
9
|
import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
|
|
10
|
+
import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config';
|
|
5
11
|
import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config';
|
|
6
|
-
import { type
|
|
12
|
+
import { type ProverClientUserConfig, proverClientConfigMappings } from '@aztec/prover-client/config';
|
|
7
13
|
import { type SequencerClientConfig, sequencerClientConfigMappings } from '@aztec/sequencer-client/config';
|
|
8
14
|
import { type ValidatorClientConfig, validatorClientConfigMappings } from '@aztec/validator-client/config';
|
|
9
15
|
import { type WorldStateConfig, worldStateConfigMappings } from '@aztec/world-state/config';
|
|
@@ -12,6 +18,8 @@ import { readFileSync } from 'fs';
|
|
|
12
18
|
import { dirname, resolve } from 'path';
|
|
13
19
|
import { fileURLToPath } from 'url';
|
|
14
20
|
|
|
21
|
+
import { type SentinelConfig, sentinelConfigMappings } from '../sentinel/config.js';
|
|
22
|
+
|
|
15
23
|
export { sequencerClientConfigMappings, type SequencerClientConfig };
|
|
16
24
|
|
|
17
25
|
/**
|
|
@@ -20,17 +28,18 @@ export { sequencerClientConfigMappings, type SequencerClientConfig };
|
|
|
20
28
|
export type AztecNodeConfig = ArchiverConfig &
|
|
21
29
|
SequencerClientConfig &
|
|
22
30
|
ValidatorClientConfig &
|
|
23
|
-
|
|
31
|
+
ProverClientUserConfig &
|
|
24
32
|
WorldStateConfig &
|
|
25
|
-
Pick<
|
|
33
|
+
Pick<ProverClientUserConfig, 'bbBinaryPath' | 'bbWorkingDirectory' | 'realProofs'> &
|
|
26
34
|
P2PConfig &
|
|
27
|
-
DataStoreConfig &
|
|
35
|
+
DataStoreConfig &
|
|
36
|
+
SentinelConfig &
|
|
37
|
+
SharedNodeConfig &
|
|
38
|
+
GenesisStateConfig & {
|
|
39
|
+
/** L1 contracts addresses */
|
|
40
|
+
l1Contracts: L1ContractAddresses;
|
|
28
41
|
/** Whether the validator is disabled for this node */
|
|
29
42
|
disableValidator: boolean;
|
|
30
|
-
/** Whether to populate the genesis state with initial fee juice for the test accounts */
|
|
31
|
-
testAccounts: boolean;
|
|
32
|
-
} & {
|
|
33
|
-
l1Contracts: L1ContractAddresses;
|
|
34
43
|
};
|
|
35
44
|
|
|
36
45
|
export const aztecNodeConfigMappings: ConfigMappingsType<AztecNodeConfig> = {
|
|
@@ -41,6 +50,9 @@ export const aztecNodeConfigMappings: ConfigMappingsType<AztecNodeConfig> = {
|
|
|
41
50
|
...proverClientConfigMappings,
|
|
42
51
|
...worldStateConfigMappings,
|
|
43
52
|
...p2pConfigMappings,
|
|
53
|
+
...sentinelConfigMappings,
|
|
54
|
+
...sharedNodeConfigMappings,
|
|
55
|
+
...genesisStateConfigMappings,
|
|
44
56
|
l1Contracts: {
|
|
45
57
|
description: 'The deployed L1 contract addresses',
|
|
46
58
|
nested: l1ContractAddressesMapping,
|
|
@@ -50,11 +62,6 @@ export const aztecNodeConfigMappings: ConfigMappingsType<AztecNodeConfig> = {
|
|
|
50
62
|
description: 'Whether the validator is disabled for this node.',
|
|
51
63
|
...booleanConfigHelper(),
|
|
52
64
|
},
|
|
53
|
-
testAccounts: {
|
|
54
|
-
env: 'TEST_ACCOUNTS',
|
|
55
|
-
description: 'Whether to populate the genesis state with initial fee juice for the test accounts.',
|
|
56
|
-
...booleanConfigHelper(),
|
|
57
|
-
},
|
|
58
65
|
};
|
|
59
66
|
|
|
60
67
|
/**
|