@aztec/prover-node 0.85.0-alpha-testnet.2 → 0.85.0-alpha-testnet.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/actions/download-epoch-proving-job.d.ts +15 -0
- package/dest/actions/download-epoch-proving-job.d.ts.map +1 -0
- package/dest/actions/download-epoch-proving-job.js +35 -0
- package/dest/actions/index.d.ts +3 -0
- package/dest/actions/index.d.ts.map +1 -0
- package/dest/actions/index.js +2 -0
- package/dest/actions/rerun-epoch-proving-job.d.ts +11 -0
- package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -0
- package/dest/actions/rerun-epoch-proving-job.js +37 -0
- package/dest/actions/upload-epoch-proof-failure.d.ts +15 -0
- package/dest/actions/upload-epoch-proof-failure.d.ts.map +1 -0
- package/dest/actions/upload-epoch-proof-failure.js +78 -0
- package/dest/config.d.ts +4 -4
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +12 -7
- package/dest/factory.d.ts +3 -2
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +6 -8
- package/dest/index.d.ts +1 -0
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -0
- package/dest/job/epoch-proving-job-data.d.ts +17 -0
- package/dest/job/epoch-proving-job-data.d.ts.map +1 -0
- package/dest/job/epoch-proving-job-data.js +45 -0
- package/dest/job/epoch-proving-job.d.ts +9 -9
- package/dest/job/epoch-proving-job.d.ts.map +1 -1
- package/dest/job/epoch-proving-job.js +38 -21
- package/dest/metrics.d.ts +27 -4
- package/dest/metrics.d.ts.map +1 -1
- package/dest/metrics.js +104 -36
- package/dest/monitors/epoch-monitor.d.ts +1 -1
- package/dest/monitors/epoch-monitor.d.ts.map +1 -1
- package/dest/monitors/epoch-monitor.js +7 -2
- package/dest/prover-coordination/combined-prover-coordination.d.ts +22 -0
- package/dest/prover-coordination/combined-prover-coordination.d.ts.map +1 -0
- package/dest/prover-coordination/combined-prover-coordination.js +136 -0
- package/dest/prover-coordination/config.d.ts +1 -1
- package/dest/prover-coordination/config.d.ts.map +1 -1
- package/dest/prover-coordination/config.js +5 -4
- package/dest/prover-coordination/factory.d.ts +5 -4
- package/dest/prover-coordination/factory.d.ts.map +1 -1
- package/dest/prover-coordination/factory.js +23 -14
- package/dest/prover-node-publisher.d.ts +1 -0
- package/dest/prover-node-publisher.d.ts.map +1 -1
- package/dest/prover-node-publisher.js +5 -2
- package/dest/prover-node.d.ts +26 -24
- package/dest/prover-node.d.ts.map +1 -1
- package/dest/prover-node.js +102 -50
- package/dest/test/index.d.ts +4 -2
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +1 -1
- package/package.json +20 -20
- package/src/actions/download-epoch-proving-job.ts +46 -0
- package/src/actions/index.ts +2 -0
- package/src/actions/rerun-epoch-proving-job.ts +59 -0
- package/src/actions/upload-epoch-proof-failure.ts +88 -0
- package/src/config.ts +16 -10
- package/src/factory.ts +25 -12
- package/src/index.ts +1 -0
- package/src/job/epoch-proving-job-data.ts +68 -0
- package/src/job/epoch-proving-job.ts +53 -23
- package/src/metrics.ts +111 -38
- package/src/monitors/epoch-monitor.ts +5 -3
- package/src/prover-coordination/combined-prover-coordination.ts +160 -0
- package/src/prover-coordination/config.ts +6 -5
- package/src/prover-coordination/factory.ts +37 -25
- package/src/prover-node-publisher.ts +7 -3
- package/src/prover-node.ts +144 -70
- package/src/test/index.ts +5 -2
|
@@ -11,20 +11,18 @@ import { Timer } from '@aztec/foundation/timer';
|
|
|
11
11
|
import { EpochProvingJobTerminalState } from '@aztec/stdlib/interfaces/server';
|
|
12
12
|
import { Attributes, trackSpan } from '@aztec/telemetry-client';
|
|
13
13
|
import * as crypto from 'node:crypto';
|
|
14
|
+
import { validateEpochProvingJobData } from './epoch-proving-job-data.js';
|
|
14
15
|
/**
|
|
15
16
|
* Job that grabs a range of blocks from the unfinalised chain from L1, gets their txs given their hashes,
|
|
16
17
|
* re-executes their public calls, generates a rollup proof, and submits it to L1. This job will update the
|
|
17
18
|
* world state as part of public call execution via the public processor.
|
|
18
19
|
*/ export class EpochProvingJob {
|
|
20
|
+
data;
|
|
19
21
|
dbProvider;
|
|
20
|
-
epochNumber;
|
|
21
|
-
blocks;
|
|
22
|
-
txs;
|
|
23
22
|
prover;
|
|
24
23
|
publicProcessorFactory;
|
|
25
24
|
publisher;
|
|
26
25
|
l2BlockSource;
|
|
27
|
-
l1ToL2MessageSource;
|
|
28
26
|
metrics;
|
|
29
27
|
deadline;
|
|
30
28
|
config;
|
|
@@ -35,25 +33,23 @@ import * as crypto from 'node:crypto';
|
|
|
35
33
|
epochCheckPromise;
|
|
36
34
|
deadlineTimeoutHandler;
|
|
37
35
|
tracer;
|
|
38
|
-
constructor(
|
|
36
|
+
constructor(data, dbProvider, prover, publicProcessorFactory, publisher, l2BlockSource, metrics, deadline, config = {
|
|
39
37
|
parallelBlockLimit: 32
|
|
40
38
|
}){
|
|
39
|
+
this.data = data;
|
|
41
40
|
this.dbProvider = dbProvider;
|
|
42
|
-
this.epochNumber = epochNumber;
|
|
43
|
-
this.blocks = blocks;
|
|
44
|
-
this.txs = txs;
|
|
45
41
|
this.prover = prover;
|
|
46
42
|
this.publicProcessorFactory = publicProcessorFactory;
|
|
47
43
|
this.publisher = publisher;
|
|
48
44
|
this.l2BlockSource = l2BlockSource;
|
|
49
|
-
this.l1ToL2MessageSource = l1ToL2MessageSource;
|
|
50
45
|
this.metrics = metrics;
|
|
51
46
|
this.deadline = deadline;
|
|
52
47
|
this.config = config;
|
|
53
48
|
this.state = 'initialized';
|
|
54
49
|
this.log = createLogger('prover-node:epoch-proving-job');
|
|
50
|
+
validateEpochProvingJobData(data);
|
|
55
51
|
this.uuid = crypto.randomUUID();
|
|
56
|
-
this.tracer = metrics.
|
|
52
|
+
this.tracer = metrics.tracer;
|
|
57
53
|
}
|
|
58
54
|
getId() {
|
|
59
55
|
return this.uuid;
|
|
@@ -62,11 +58,23 @@ import * as crypto from 'node:crypto';
|
|
|
62
58
|
return this.state;
|
|
63
59
|
}
|
|
64
60
|
getEpochNumber() {
|
|
65
|
-
return this.epochNumber;
|
|
61
|
+
return this.data.epochNumber;
|
|
66
62
|
}
|
|
67
63
|
getDeadline() {
|
|
68
64
|
return this.deadline;
|
|
69
65
|
}
|
|
66
|
+
getProvingData() {
|
|
67
|
+
return this.data;
|
|
68
|
+
}
|
|
69
|
+
get epochNumber() {
|
|
70
|
+
return this.data.epochNumber;
|
|
71
|
+
}
|
|
72
|
+
get blocks() {
|
|
73
|
+
return this.data.blocks;
|
|
74
|
+
}
|
|
75
|
+
get txs() {
|
|
76
|
+
return this.data.txs;
|
|
77
|
+
}
|
|
70
78
|
/**
|
|
71
79
|
* Proves the given epoch and submits the proof to L1.
|
|
72
80
|
*/ async run() {
|
|
@@ -97,8 +105,8 @@ import * as crypto from 'node:crypto';
|
|
|
97
105
|
this.checkState();
|
|
98
106
|
const globalVariables = block.header.globalVariables;
|
|
99
107
|
const txs = await this.getTxs(block);
|
|
100
|
-
const l1ToL2Messages =
|
|
101
|
-
const previousHeader =
|
|
108
|
+
const l1ToL2Messages = this.getL1ToL2Messages(block);
|
|
109
|
+
const previousHeader = this.getBlockHeader(block.number - 1);
|
|
102
110
|
this.log.verbose(`Starting processing block ${block.number}`, {
|
|
103
111
|
number: block.number,
|
|
104
112
|
blockHash: (await block.hash()).toString(),
|
|
@@ -217,9 +225,14 @@ import * as crypto from 'node:crypto';
|
|
|
217
225
|
* Kicks off a running promise that queries the archiver for the set of L2 blocks of the current epoch.
|
|
218
226
|
* If those change, stops the proving job with a `rerun` state, so the node re-enqueues it.
|
|
219
227
|
*/ async scheduleEpochCheck() {
|
|
220
|
-
const
|
|
228
|
+
const l2BlockSource = this.l2BlockSource;
|
|
229
|
+
if (!l2BlockSource) {
|
|
230
|
+
this.log.warn(`No L2 block source available, skipping epoch check`);
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
const intervalMs = Math.ceil((await l2BlockSource.getL1Constants()).ethereumSlotDuration / 2) * 1000;
|
|
221
234
|
this.epochCheckPromise = new RunningPromise(async ()=>{
|
|
222
|
-
const blocks = await
|
|
235
|
+
const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
|
|
223
236
|
const blockHashes = await Promise.all(blocks.map((block)=>block.hash()));
|
|
224
237
|
const thisBlockHashes = await Promise.all(this.blocks.map((block)=>block.hash()));
|
|
225
238
|
if (blocks.length !== this.blocks.length || !blockHashes.every((block, i)=>block.equals(thisBlockHashes[i]))) {
|
|
@@ -234,11 +247,15 @@ import * as crypto from 'node:crypto';
|
|
|
234
247
|
}, this.log, intervalMs).start();
|
|
235
248
|
this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
|
|
236
249
|
}
|
|
237
|
-
/* Returns the header for the given block number
|
|
238
|
-
|
|
239
|
-
|
|
250
|
+
/* Returns the header for the given block number based on the epoch proving job data. */ getBlockHeader(blockNumber) {
|
|
251
|
+
const block = this.blocks.find((b)=>b.number === blockNumber);
|
|
252
|
+
if (block) {
|
|
253
|
+
return block.header;
|
|
254
|
+
}
|
|
255
|
+
if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
|
|
256
|
+
return this.data.previousBlockHeader;
|
|
240
257
|
}
|
|
241
|
-
|
|
258
|
+
throw new Error(`Block header not found for block number ${blockNumber} (got ${this.blocks.map((b)=>b.number).join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`);
|
|
242
259
|
}
|
|
243
260
|
async getTxs(block) {
|
|
244
261
|
const txHashes = block.body.txEffects.map((tx)=>tx.txHash.toBigInt());
|
|
@@ -249,7 +266,7 @@ import * as crypto from 'node:crypto';
|
|
|
249
266
|
return txsAndHashes.filter((txAndHash)=>txHashes.includes(txAndHash.hash.toBigInt())).map((txAndHash)=>txAndHash.tx);
|
|
250
267
|
}
|
|
251
268
|
getL1ToL2Messages(block) {
|
|
252
|
-
return this.
|
|
269
|
+
return this.data.l1ToL2Messages[block.number];
|
|
253
270
|
}
|
|
254
271
|
async processTxs(publicProcessor, txs) {
|
|
255
272
|
const { deadline } = this;
|
|
@@ -269,7 +286,7 @@ import * as crypto from 'node:crypto';
|
|
|
269
286
|
_ts_decorate([
|
|
270
287
|
trackSpan('EpochProvingJob.run', function() {
|
|
271
288
|
return {
|
|
272
|
-
[Attributes.EPOCH_NUMBER]: Number(this.epochNumber)
|
|
289
|
+
[Attributes.EPOCH_NUMBER]: Number(this.data.epochNumber)
|
|
273
290
|
};
|
|
274
291
|
})
|
|
275
292
|
], EpochProvingJob.prototype, "run", null);
|
package/dest/metrics.d.ts
CHANGED
|
@@ -1,12 +1,35 @@
|
|
|
1
|
+
import type { RollupContract } from '@aztec/ethereum';
|
|
2
|
+
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
1
3
|
import type { L1PublishProofStats } from '@aztec/stdlib/stats';
|
|
2
|
-
import { type Gauge, type Histogram, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client';
|
|
3
|
-
export declare class
|
|
4
|
-
|
|
4
|
+
import { type Gauge, type Histogram, type Meter, type TelemetryClient, type Tracer, type UpDownCounter } from '@aztec/telemetry-client';
|
|
5
|
+
export declare class ProverNodeJobMetrics {
|
|
6
|
+
private meter;
|
|
7
|
+
readonly tracer: Tracer;
|
|
5
8
|
private logger;
|
|
6
9
|
proverEpochExecutionDuration: Histogram;
|
|
7
10
|
provingJobDuration: Histogram;
|
|
8
11
|
provingJobBlocks: Gauge;
|
|
9
12
|
provingJobTransactions: Gauge;
|
|
13
|
+
constructor(meter: Meter, tracer: Tracer, logger?: import("@aztec/foundation/log").Logger);
|
|
14
|
+
recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number): void;
|
|
15
|
+
}
|
|
16
|
+
export declare class ProverNodeRewardsMetrics {
|
|
17
|
+
private meter;
|
|
18
|
+
private coinbase;
|
|
19
|
+
private rollup;
|
|
20
|
+
private logger;
|
|
21
|
+
private rewards;
|
|
22
|
+
private accumulatedRewards;
|
|
23
|
+
private prevEpoch;
|
|
24
|
+
private proofSubmissionWindow;
|
|
25
|
+
constructor(meter: Meter, coinbase: EthAddress, rollup: RollupContract, logger?: import("@aztec/foundation/log").Logger);
|
|
26
|
+
start(): Promise<void>;
|
|
27
|
+
stop(): void;
|
|
28
|
+
private observe;
|
|
29
|
+
}
|
|
30
|
+
export declare class ProverNodePublisherMetrics {
|
|
31
|
+
readonly client: TelemetryClient;
|
|
32
|
+
private logger;
|
|
10
33
|
gasPrice: Histogram;
|
|
11
34
|
txCount: UpDownCounter;
|
|
12
35
|
txDuration: Histogram;
|
|
@@ -17,10 +40,10 @@ export declare class ProverNodeMetrics {
|
|
|
17
40
|
txBlobDataGasCost: Histogram;
|
|
18
41
|
txTotalFee: Histogram;
|
|
19
42
|
private senderBalance;
|
|
43
|
+
private meter;
|
|
20
44
|
constructor(client: TelemetryClient, name?: string, logger?: import("@aztec/foundation/log").Logger);
|
|
21
45
|
recordFailedTx(): void;
|
|
22
46
|
recordSubmitProof(durationMs: number, stats: L1PublishProofStats): void;
|
|
23
|
-
recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number): void;
|
|
24
47
|
recordSenderBalance(wei: bigint, senderAddress: string): void;
|
|
25
48
|
private recordTx;
|
|
26
49
|
}
|
package/dest/metrics.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"metrics.d.ts","sourceRoot":"","sources":["../src/metrics.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"metrics.d.ts","sourceRoot":"","sources":["../src/metrics.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACtD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAEhE,OAAO,KAAK,EAAE,mBAAmB,EAAkB,MAAM,qBAAqB,CAAC;AAC/E,OAAO,EAGL,KAAK,KAAK,EACV,KAAK,SAAS,EACd,KAAK,KAAK,EAGV,KAAK,eAAe,EACpB,KAAK,MAAM,EACX,KAAK,aAAa,EAEnB,MAAM,yBAAyB,CAAC;AAIjC,qBAAa,oBAAoB;IAO7B,OAAO,CAAC,KAAK;aACG,MAAM,EAAE,MAAM;IAC9B,OAAO,CAAC,MAAM;IARhB,4BAA4B,EAAE,SAAS,CAAC;IACxC,kBAAkB,EAAE,SAAS,CAAC;IAC9B,gBAAgB,EAAE,KAAK,CAAC;IACxB,sBAAsB,EAAE,KAAK,CAAC;gBAGpB,KAAK,EAAE,KAAK,EACJ,MAAM,EAAE,MAAM,EACtB,MAAM,yCAAgD;IAsBzD,gBAAgB,CAAC,eAAe,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAMxG;AAED,qBAAa,wBAAwB;IAOjC,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,MAAM;IAThB,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,kBAAkB,CAAgB;IAC1C,OAAO,CAAC,SAAS,CAAO;IACxB,OAAO,CAAC,qBAAqB,CAAM;gBAGzB,KAAK,EAAE,KAAK,EACZ,QAAQ,EAAE,UAAU,EACpB,MAAM,EAAE,cAAc,EACtB,MAAM,yCAAgD;IAanD,KAAK;IAKX,IAAI;IAIX,OAAO,CAAC,OAAO,CAsBb;CACH;AAED,qBAAa,0BAA0B;aAenB,MAAM,EAAE,eAAe;IAEvC,OAAO,CAAC,MAAM;IAhBhB,QAAQ,EAAE,SAAS,CAAC;IACpB,OAAO,EAAE,aAAa,CAAC;IACvB,UAAU,EAAE,SAAS,CAAC;IACtB,KAAK,EAAE,SAAS,CAAC;IACjB,cAAc,EAAE,SAAS,CAAC;IAC1B,aAAa,EAAE,SAAS,CAAC;IACzB,iBAAiB,EAAE,SAAS,CAAC;IAC7B,iBAAiB,EAAE,SAAS,CAAC;IAC7B,UAAU,EAAE,SAAS,CAAC;IAEtB,OAAO,CAAC,aAAa,CAAQ;IAC7B,OAAO,CAAC,KAAK,CAAQ;gBAGH,MAAM,EAAE,eAAe,EACvC,IAAI,SAAe,EACX,MAAM,yCAAgD;IAoEhE,cAAc;IAOd,iBAAiB,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,mBAAmB;IAIzD,mBAAmB,CAAC,GAAG,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM;IAO7D,OAAO,CAAC,QAAQ;CAuCjB"}
|
package/dest/metrics.js
CHANGED
|
@@ -1,84 +1,158 @@
|
|
|
1
1
|
import { createLogger } from '@aztec/foundation/log';
|
|
2
2
|
import { Attributes, Metrics, ValueType } from '@aztec/telemetry-client';
|
|
3
|
-
import { formatEther } from 'viem';
|
|
4
|
-
export class
|
|
5
|
-
|
|
3
|
+
import { formatEther, formatUnits } from 'viem';
|
|
4
|
+
export class ProverNodeJobMetrics {
|
|
5
|
+
meter;
|
|
6
|
+
tracer;
|
|
6
7
|
logger;
|
|
7
8
|
proverEpochExecutionDuration;
|
|
8
9
|
provingJobDuration;
|
|
9
10
|
provingJobBlocks;
|
|
10
11
|
provingJobTransactions;
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
txGas;
|
|
15
|
-
txCalldataSize;
|
|
16
|
-
txCalldataGas;
|
|
17
|
-
txBlobDataGasUsed;
|
|
18
|
-
txBlobDataGasCost;
|
|
19
|
-
txTotalFee;
|
|
20
|
-
senderBalance;
|
|
21
|
-
constructor(client, name = 'ProverNode', logger = createLogger('prover-node:publisher:metrics')){
|
|
22
|
-
this.client = client;
|
|
12
|
+
constructor(meter, tracer, logger = createLogger('prover-node:publisher:metrics')){
|
|
13
|
+
this.meter = meter;
|
|
14
|
+
this.tracer = tracer;
|
|
23
15
|
this.logger = logger;
|
|
24
|
-
|
|
25
|
-
this.proverEpochExecutionDuration = meter.createHistogram(Metrics.PROVER_NODE_EXECUTION_DURATION, {
|
|
16
|
+
this.proverEpochExecutionDuration = this.meter.createHistogram(Metrics.PROVER_NODE_EXECUTION_DURATION, {
|
|
26
17
|
description: 'Duration of execution of an epoch by the prover',
|
|
27
18
|
unit: 'ms',
|
|
28
19
|
valueType: ValueType.INT
|
|
29
20
|
});
|
|
30
|
-
this.provingJobDuration = meter.createHistogram(Metrics.PROVER_NODE_JOB_DURATION, {
|
|
21
|
+
this.provingJobDuration = this.meter.createHistogram(Metrics.PROVER_NODE_JOB_DURATION, {
|
|
31
22
|
description: 'Duration of proving job',
|
|
32
23
|
unit: 's',
|
|
33
24
|
valueType: ValueType.DOUBLE
|
|
34
25
|
});
|
|
35
|
-
this.provingJobBlocks = meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
|
|
26
|
+
this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
|
|
36
27
|
description: 'Number of blocks in a proven epoch',
|
|
37
28
|
valueType: ValueType.INT
|
|
38
29
|
});
|
|
39
|
-
this.provingJobTransactions = meter.createGauge(Metrics.PROVER_NODE_JOB_TRANSACTIONS, {
|
|
30
|
+
this.provingJobTransactions = this.meter.createGauge(Metrics.PROVER_NODE_JOB_TRANSACTIONS, {
|
|
40
31
|
description: 'Number of transactions in a proven epoch',
|
|
41
32
|
valueType: ValueType.INT
|
|
42
33
|
});
|
|
43
|
-
|
|
34
|
+
}
|
|
35
|
+
recordProvingJob(executionTimeMs, totalTimeMs, numBlocks, numTxs) {
|
|
36
|
+
this.proverEpochExecutionDuration.record(Math.ceil(executionTimeMs));
|
|
37
|
+
this.provingJobDuration.record(totalTimeMs / 1000);
|
|
38
|
+
this.provingJobBlocks.record(Math.floor(numBlocks));
|
|
39
|
+
this.provingJobTransactions.record(Math.floor(numTxs));
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
export class ProverNodeRewardsMetrics {
|
|
43
|
+
meter;
|
|
44
|
+
coinbase;
|
|
45
|
+
rollup;
|
|
46
|
+
logger;
|
|
47
|
+
rewards;
|
|
48
|
+
accumulatedRewards;
|
|
49
|
+
prevEpoch;
|
|
50
|
+
proofSubmissionWindow;
|
|
51
|
+
constructor(meter, coinbase, rollup, logger = createLogger('prover-node:publisher:metrics')){
|
|
52
|
+
this.meter = meter;
|
|
53
|
+
this.coinbase = coinbase;
|
|
54
|
+
this.rollup = rollup;
|
|
55
|
+
this.logger = logger;
|
|
56
|
+
this.prevEpoch = -1n;
|
|
57
|
+
this.proofSubmissionWindow = 0n;
|
|
58
|
+
this.observe = async (observer)=>{
|
|
59
|
+
const slot = await this.rollup.getSlotNumber();
|
|
60
|
+
// look at the prev epoch so that we get an accurate value, after proof submission window has closed
|
|
61
|
+
if (slot > this.proofSubmissionWindow) {
|
|
62
|
+
const closedEpoch = await this.rollup.getEpochNumberForSlotNumber(slot - this.proofSubmissionWindow);
|
|
63
|
+
const rewards = await this.rollup.getSpecificProverRewardsForEpoch(closedEpoch, this.coinbase);
|
|
64
|
+
const fmt = parseFloat(formatUnits(rewards, 18));
|
|
65
|
+
observer.observe(this.rewards, fmt, {
|
|
66
|
+
[Attributes.COINBASE]: this.coinbase.toString()
|
|
67
|
+
});
|
|
68
|
+
// only accumulate once per epoch
|
|
69
|
+
if (closedEpoch > this.prevEpoch) {
|
|
70
|
+
this.prevEpoch = closedEpoch;
|
|
71
|
+
this.accumulatedRewards.add(fmt, {
|
|
72
|
+
[Attributes.COINBASE]: this.coinbase.toString()
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
this.rewards = this.meter.createObservableGauge(Metrics.PROVER_NODE_REWARDS_PER_EPOCH, {
|
|
78
|
+
valueType: ValueType.DOUBLE,
|
|
79
|
+
description: 'The rewards earned'
|
|
80
|
+
});
|
|
81
|
+
this.accumulatedRewards = this.meter.createUpDownCounter(Metrics.PROVER_NODE_REWARDS_TOTAL, {
|
|
82
|
+
valueType: ValueType.DOUBLE,
|
|
83
|
+
description: 'The rewards earned (total)'
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
async start() {
|
|
87
|
+
this.proofSubmissionWindow = await this.rollup.getProofSubmissionWindow();
|
|
88
|
+
this.meter.addBatchObservableCallback(this.observe, [
|
|
89
|
+
this.rewards
|
|
90
|
+
]);
|
|
91
|
+
}
|
|
92
|
+
stop() {
|
|
93
|
+
this.meter.removeBatchObservableCallback(this.observe, [
|
|
94
|
+
this.rewards
|
|
95
|
+
]);
|
|
96
|
+
}
|
|
97
|
+
observe;
|
|
98
|
+
}
|
|
99
|
+
export class ProverNodePublisherMetrics {
|
|
100
|
+
client;
|
|
101
|
+
logger;
|
|
102
|
+
gasPrice;
|
|
103
|
+
txCount;
|
|
104
|
+
txDuration;
|
|
105
|
+
txGas;
|
|
106
|
+
txCalldataSize;
|
|
107
|
+
txCalldataGas;
|
|
108
|
+
txBlobDataGasUsed;
|
|
109
|
+
txBlobDataGasCost;
|
|
110
|
+
txTotalFee;
|
|
111
|
+
senderBalance;
|
|
112
|
+
meter;
|
|
113
|
+
constructor(client, name = 'ProverNode', logger = createLogger('prover-node:publisher:metrics')){
|
|
114
|
+
this.client = client;
|
|
115
|
+
this.logger = logger;
|
|
116
|
+
this.meter = client.getMeter(name);
|
|
117
|
+
this.gasPrice = this.meter.createHistogram(Metrics.L1_PUBLISHER_GAS_PRICE, {
|
|
44
118
|
description: 'The gas price used for transactions',
|
|
45
119
|
unit: 'gwei',
|
|
46
120
|
valueType: ValueType.DOUBLE
|
|
47
121
|
});
|
|
48
|
-
this.txCount = meter.createUpDownCounter(Metrics.L1_PUBLISHER_TX_COUNT, {
|
|
122
|
+
this.txCount = this.meter.createUpDownCounter(Metrics.L1_PUBLISHER_TX_COUNT, {
|
|
49
123
|
description: 'The number of transactions processed'
|
|
50
124
|
});
|
|
51
|
-
this.txDuration = meter.createHistogram(Metrics.L1_PUBLISHER_TX_DURATION, {
|
|
125
|
+
this.txDuration = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_DURATION, {
|
|
52
126
|
description: 'The duration of transaction processing',
|
|
53
127
|
unit: 'ms',
|
|
54
128
|
valueType: ValueType.INT
|
|
55
129
|
});
|
|
56
|
-
this.txGas = meter.createHistogram(Metrics.L1_PUBLISHER_TX_GAS, {
|
|
130
|
+
this.txGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_GAS, {
|
|
57
131
|
description: 'The gas consumed by transactions',
|
|
58
132
|
unit: 'gas',
|
|
59
133
|
valueType: ValueType.INT
|
|
60
134
|
});
|
|
61
|
-
this.txCalldataSize = meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_SIZE, {
|
|
135
|
+
this.txCalldataSize = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_SIZE, {
|
|
62
136
|
description: 'The size of the calldata in transactions',
|
|
63
137
|
unit: 'By',
|
|
64
138
|
valueType: ValueType.INT
|
|
65
139
|
});
|
|
66
|
-
this.txCalldataGas = meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_GAS, {
|
|
140
|
+
this.txCalldataGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_GAS, {
|
|
67
141
|
description: 'The gas consumed by the calldata in transactions',
|
|
68
142
|
unit: 'gas',
|
|
69
143
|
valueType: ValueType.INT
|
|
70
144
|
});
|
|
71
|
-
this.txBlobDataGasUsed = meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_USED, {
|
|
145
|
+
this.txBlobDataGasUsed = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_USED, {
|
|
72
146
|
description: 'The amount of blob gas used in transactions',
|
|
73
147
|
unit: 'gas',
|
|
74
148
|
valueType: ValueType.INT
|
|
75
149
|
});
|
|
76
|
-
this.txBlobDataGasCost = meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_COST, {
|
|
150
|
+
this.txBlobDataGasCost = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_COST, {
|
|
77
151
|
description: 'The gas cost of blobs in transactions',
|
|
78
152
|
unit: 'gwei',
|
|
79
153
|
valueType: ValueType.INT
|
|
80
154
|
});
|
|
81
|
-
this.txTotalFee = meter.createHistogram(Metrics.L1_PUBLISHER_TX_TOTAL_FEE, {
|
|
155
|
+
this.txTotalFee = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_TOTAL_FEE, {
|
|
82
156
|
description: 'How much L1 tx costs',
|
|
83
157
|
unit: 'gwei',
|
|
84
158
|
valueType: ValueType.DOUBLE,
|
|
@@ -104,7 +178,7 @@ export class ProverNodeMetrics {
|
|
|
104
178
|
]
|
|
105
179
|
}
|
|
106
180
|
});
|
|
107
|
-
this.senderBalance = meter.createGauge(Metrics.L1_PUBLISHER_BALANCE, {
|
|
181
|
+
this.senderBalance = this.meter.createGauge(Metrics.L1_PUBLISHER_BALANCE, {
|
|
108
182
|
unit: 'eth',
|
|
109
183
|
description: 'The balance of the sender address',
|
|
110
184
|
valueType: ValueType.DOUBLE
|
|
@@ -119,12 +193,6 @@ export class ProverNodeMetrics {
|
|
|
119
193
|
recordSubmitProof(durationMs, stats) {
|
|
120
194
|
this.recordTx(durationMs, stats);
|
|
121
195
|
}
|
|
122
|
-
recordProvingJob(executionTimeMs, totalTimeMs, numBlocks, numTxs) {
|
|
123
|
-
this.proverEpochExecutionDuration.record(Math.ceil(executionTimeMs));
|
|
124
|
-
this.provingJobDuration.record(totalTimeMs / 1000);
|
|
125
|
-
this.provingJobBlocks.record(Math.floor(numBlocks));
|
|
126
|
-
this.provingJobTransactions.record(Math.floor(numTxs));
|
|
127
|
-
}
|
|
128
196
|
recordSenderBalance(wei, senderAddress) {
|
|
129
197
|
const eth = parseFloat(formatEther(wei, 'wei'));
|
|
130
198
|
this.senderBalance.record(eth, {
|
|
@@ -2,7 +2,7 @@ import type { L2BlockSource } from '@aztec/stdlib/block';
|
|
|
2
2
|
import { type L1RollupConstants } from '@aztec/stdlib/epoch-helpers';
|
|
3
3
|
import { type TelemetryClient, type Traceable, type Tracer } from '@aztec/telemetry-client';
|
|
4
4
|
export interface EpochMonitorHandler {
|
|
5
|
-
handleEpochReadyToProve(epochNumber: bigint): Promise<
|
|
5
|
+
handleEpochReadyToProve(epochNumber: bigint): Promise<boolean>;
|
|
6
6
|
}
|
|
7
7
|
/**
|
|
8
8
|
* Fires an event when a new epoch ready to prove is detected.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"epoch-monitor.d.ts","sourceRoot":"","sources":["../../src/monitors/epoch-monitor.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,KAAK,iBAAiB,EAAkB,MAAM,6BAA6B,CAAC;AACrF,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,SAAS,EACd,KAAK,MAAM,EAGZ,MAAM,yBAAyB,CAAC;AAEjC,MAAM,WAAW,mBAAmB;IAClC,uBAAuB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"epoch-monitor.d.ts","sourceRoot":"","sources":["../../src/monitors/epoch-monitor.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,KAAK,iBAAiB,EAAkB,MAAM,6BAA6B,CAAC;AACrF,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,SAAS,EACd,KAAK,MAAM,EAGZ,MAAM,yBAAyB,CAAC;AAEjC,MAAM,WAAW,mBAAmB;IAClC,uBAAuB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CAChE;AAED;;;;;;;;;;GAUG;AACH,qBAAa,YAAa,YAAW,SAAS;IAS1C,OAAO,CAAC,QAAQ,CAAC,aAAa;IAC9B,OAAO,CAAC,QAAQ,CAAC,WAAW;IAC5B,OAAO,CAAC,OAAO;IAVjB,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,GAAG,CAA6C;IACxD,SAAgB,MAAM,EAAE,MAAM,CAAC;IAE/B,OAAO,CAAC,OAAO,CAAkC;IACjD,OAAO,CAAC,iBAAiB,CAAqB;gBAG3B,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,EAC9D,OAAO,EAAE;QAAE,iBAAiB,EAAE,MAAM,CAAA;KAAE,EAC9C,SAAS,GAAE,eAAsC;WAM/B,MAAM,CACxB,aAAa,EAAE,aAAa,EAC5B,OAAO,EAAE;QAAE,iBAAiB,EAAE,MAAM,CAAA;KAAE,EACtC,SAAS,GAAE,eAAsC,GAChD,OAAO,CAAC,YAAY,CAAC;IAKjB,KAAK,CAAC,OAAO,EAAE,mBAAmB;IAMzC,0BAA0B;IACnB,UAAU,CAAC,OAAO,EAAE,mBAAmB;IAIjC,IAAI;IAMJ,IAAI;YAwBH,qBAAqB;CAYpC"}
|
|
@@ -53,6 +53,10 @@ import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
|
|
|
53
53
|
}
|
|
54
54
|
async work() {
|
|
55
55
|
const { epochToProve, blockNumber, slotNumber } = await this.getEpochNumberToProve();
|
|
56
|
+
this.log.debug(`Epoch to prove: ${epochToProve}`, {
|
|
57
|
+
blockNumber,
|
|
58
|
+
slotNumber
|
|
59
|
+
});
|
|
56
60
|
if (epochToProve === undefined) {
|
|
57
61
|
this.log.trace(`Next block to prove ${blockNumber} not yet mined`, {
|
|
58
62
|
blockNumber
|
|
@@ -77,8 +81,9 @@ import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
|
|
|
77
81
|
return;
|
|
78
82
|
}
|
|
79
83
|
this.log.debug(`Epoch ${epochToProve} is ready to be proven`);
|
|
80
|
-
await this.handler?.handleEpochReadyToProve(epochToProve)
|
|
81
|
-
|
|
84
|
+
if (await this.handler?.handleEpochReadyToProve(epochToProve)) {
|
|
85
|
+
this.latestEpochNumber = epochToProve;
|
|
86
|
+
}
|
|
82
87
|
}
|
|
83
88
|
async getEpochNumberToProve() {
|
|
84
89
|
const lastBlockProven = await this.l2BlockSource.getProvenBlockNumber();
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { P2P } from '@aztec/p2p';
|
|
2
|
+
import type { P2PClient, ProverCoordination } from '@aztec/stdlib/interfaces/server';
|
|
3
|
+
import { type Tx, TxHash } from '@aztec/stdlib/tx';
|
|
4
|
+
export type CombinedCoordinationOptions = {
|
|
5
|
+
txGatheringBatchSize: number;
|
|
6
|
+
txGatheringMaxParallelRequestsPerNode: number;
|
|
7
|
+
};
|
|
8
|
+
export interface TxSource {
|
|
9
|
+
getTxsByHash(txHashes: TxHash[]): Promise<(Tx | undefined)[]>;
|
|
10
|
+
}
|
|
11
|
+
export declare class CombinedProverCoordination implements ProverCoordination {
|
|
12
|
+
#private;
|
|
13
|
+
readonly p2p: P2P | undefined;
|
|
14
|
+
readonly aztecNodes: TxSource[];
|
|
15
|
+
private readonly options;
|
|
16
|
+
private readonly log;
|
|
17
|
+
constructor(p2p: P2P | undefined, aztecNodes: TxSource[], options?: CombinedCoordinationOptions, log?: import("@aztec/foundation/log").Logger);
|
|
18
|
+
getP2PClient(): P2PClient | undefined;
|
|
19
|
+
getTxsByHash(txHashes: TxHash[]): Promise<Tx[]>;
|
|
20
|
+
gatherTxs(txHashes: TxHash[]): Promise<void>;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=combined-prover-coordination.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"combined-prover-coordination.d.ts","sourceRoot":"","sources":["../../src/prover-coordination/combined-prover-coordination.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAEnD,MAAM,MAAM,2BAA2B,GAAG;IAExC,oBAAoB,EAAE,MAAM,CAAC;IAC7B,qCAAqC,EAAE,MAAM,CAAC;CAC/C,CAAC;AASF,MAAM,WAAW,QAAQ;IACvB,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,EAAE,CAAC,CAAC;CAC/D;AAsCD,qBAAa,0BAA2B,YAAW,kBAAkB;;aAEjD,GAAG,EAAE,GAAG,GAAG,SAAS;aACpB,UAAU,EAAE,QAAQ,EAAE;IACtC,OAAO,CAAC,QAAQ,CAAC,OAAO;IAIxB,OAAO,CAAC,QAAQ,CAAC,GAAG;gBANJ,GAAG,EAAE,GAAG,GAAG,SAAS,EACpB,UAAU,EAAE,QAAQ,EAAE,EACrB,OAAO,GAAE,2BAGzB,EACgB,GAAG,yCAA2D;IAG1E,YAAY,IAAI,SAAS,GAAG,SAAS;IAI/B,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,EAAE,CAAC;IAY/C,SAAS,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAyE1D"}
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import { asyncPool } from '@aztec/foundation/async-pool';
|
|
2
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
3
|
+
import { TxHash } from '@aztec/stdlib/tx';
|
|
4
|
+
// Wraps the p2p client into a coordination pool
|
|
5
|
+
class P2PCoordinationPool {
|
|
6
|
+
p2p;
|
|
7
|
+
constructor(p2p){
|
|
8
|
+
this.p2p = p2p;
|
|
9
|
+
}
|
|
10
|
+
getTxsByHash(txHashes) {
|
|
11
|
+
return this.p2p.getTxsByHash(txHashes);
|
|
12
|
+
}
|
|
13
|
+
hasTxsInPool(txHashes) {
|
|
14
|
+
return this.p2p.hasTxsInPool(txHashes);
|
|
15
|
+
}
|
|
16
|
+
getTxsByHashFromPool(txHashes) {
|
|
17
|
+
return this.p2p.getTxsByHashFromPool(txHashes);
|
|
18
|
+
}
|
|
19
|
+
addTxs(txs) {
|
|
20
|
+
return this.p2p.addTxs(txs);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
// Wraps an in memory tx pool into a coordination pool. Used for testing when no p2p/tx pool is available.
|
|
24
|
+
class InMemoryCoordinationPool {
|
|
25
|
+
txs = new Map();
|
|
26
|
+
getTxsByHash(txHashes) {
|
|
27
|
+
return Promise.resolve(txHashes.map((hash)=>this.txs.get(hash.toString())));
|
|
28
|
+
}
|
|
29
|
+
hasTxsInPool(txHashes) {
|
|
30
|
+
return Promise.resolve(txHashes.map((hash)=>this.txs.has(hash.toString())));
|
|
31
|
+
}
|
|
32
|
+
getTxsByHashFromPool(txHashes) {
|
|
33
|
+
return this.getTxsByHash(txHashes);
|
|
34
|
+
}
|
|
35
|
+
async addTxs(txs) {
|
|
36
|
+
const hashes = await Promise.all(txs.map((tx)=>tx.getTxHash()));
|
|
37
|
+
txs.forEach((tx, index)=>this.txs.set(hashes[index].toString(), tx));
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
// Class to implement combined transaction retrieval from p2p and any available nodes
|
|
41
|
+
export class CombinedProverCoordination {
|
|
42
|
+
p2p;
|
|
43
|
+
aztecNodes;
|
|
44
|
+
options;
|
|
45
|
+
log;
|
|
46
|
+
constructor(p2p, aztecNodes, options = {
|
|
47
|
+
txGatheringBatchSize: 10,
|
|
48
|
+
txGatheringMaxParallelRequestsPerNode: 10
|
|
49
|
+
}, log = createLogger('prover-node:combined-prover-coordination')){
|
|
50
|
+
this.p2p = p2p;
|
|
51
|
+
this.aztecNodes = aztecNodes;
|
|
52
|
+
this.options = options;
|
|
53
|
+
this.log = log;
|
|
54
|
+
}
|
|
55
|
+
getP2PClient() {
|
|
56
|
+
return this.p2p;
|
|
57
|
+
}
|
|
58
|
+
async getTxsByHash(txHashes) {
|
|
59
|
+
const pool = this.p2p ? new P2PCoordinationPool(this.p2p) : new InMemoryCoordinationPool();
|
|
60
|
+
await this.#gatherTxs(txHashes, pool);
|
|
61
|
+
const availability = await pool.hasTxsInPool(txHashes);
|
|
62
|
+
const notFound = txHashes.filter((_, index)=>!availability[index]);
|
|
63
|
+
if (notFound.length > 0) {
|
|
64
|
+
throw new Error(`Could not find txs: ${notFound.map((tx)=>tx.toString())}`);
|
|
65
|
+
}
|
|
66
|
+
const txs = await pool.getTxsByHashFromPool(txHashes);
|
|
67
|
+
return txs.filter((tx)=>tx !== undefined);
|
|
68
|
+
}
|
|
69
|
+
async gatherTxs(txHashes) {
|
|
70
|
+
const pool = this.p2p ? new P2PCoordinationPool(this.p2p) : new InMemoryCoordinationPool();
|
|
71
|
+
await this.#gatherTxs(txHashes, pool);
|
|
72
|
+
}
|
|
73
|
+
async #gatherTxs(txHashes, pool) {
|
|
74
|
+
const availability = await pool.hasTxsInPool(txHashes);
|
|
75
|
+
const notFound = txHashes.filter((_, index)=>!availability[index]);
|
|
76
|
+
const txsToFind = new Set(notFound.map((tx)=>tx.toString()));
|
|
77
|
+
if (txsToFind.size === 0) {
|
|
78
|
+
this.log.info(`Check for ${txHashes.length} txs found all in the pool`);
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
this.log.info(`Check for ${txHashes.length} txs found ${txsToFind.size} missing. Will gather from nodes and p2p`);
|
|
82
|
+
const originalToFind = txsToFind.size;
|
|
83
|
+
await this.#gatherTxsFromAllNodes(txsToFind, pool);
|
|
84
|
+
if (txsToFind.size === 0) {
|
|
85
|
+
this.log.info(`Found all ${originalToFind} txs directly from nodes`);
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
const toFindFromP2P = txsToFind.size;
|
|
89
|
+
this.log.verbose(`Gathering ${toFindFromP2P} txs from p2p network`);
|
|
90
|
+
const foundFromP2P = await pool.getTxsByHash([
|
|
91
|
+
...txsToFind
|
|
92
|
+
].map((tx)=>TxHash.fromString(tx)));
|
|
93
|
+
const numFoundFromNodes = originalToFind - toFindFromP2P;
|
|
94
|
+
const numNotFound = toFindFromP2P - foundFromP2P.length;
|
|
95
|
+
if (numNotFound === 0) {
|
|
96
|
+
this.log.info(`Found all ${originalToFind} txs. ${numFoundFromNodes} from nodes, ${foundFromP2P.length} from p2p`);
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
this.log.warn(`Failed to find ${numNotFound} txs from any source. Found ${foundFromP2P.length} from p2p and ${numFoundFromNodes} from nodes`);
|
|
100
|
+
}
|
|
101
|
+
async #gatherTxsFromAllNodes(txsToFind, pool) {
|
|
102
|
+
if (txsToFind.size === 0 || this.aztecNodes.length === 0) {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
await Promise.all(this.aztecNodes.map((aztecNode)=>this.#gatherTxsFromNode(txsToFind, aztecNode, pool)));
|
|
106
|
+
}
|
|
107
|
+
async #gatherTxsFromNode(txsToFind, aztecNode, pool) {
|
|
108
|
+
const totalTxsRequired = txsToFind.size;
|
|
109
|
+
// It's possible that the set is empty as we already found the txs
|
|
110
|
+
if (totalTxsRequired === 0) {
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
let totalTxsGathered = 0;
|
|
114
|
+
const batches = [];
|
|
115
|
+
const allTxs = [
|
|
116
|
+
...txsToFind
|
|
117
|
+
];
|
|
118
|
+
while(allTxs.length){
|
|
119
|
+
const batch = allTxs.splice(0, this.options.txGatheringBatchSize);
|
|
120
|
+
batches.push(batch);
|
|
121
|
+
}
|
|
122
|
+
await asyncPool(this.options.txGatheringMaxParallelRequestsPerNode, batches, async (batch)=>{
|
|
123
|
+
try {
|
|
124
|
+
const txs = (await aztecNode.getTxsByHash(batch.map((b)=>TxHash.fromString(b)))).filter((tx)=>!!tx);
|
|
125
|
+
const hashes = await Promise.all(txs.map((tx)=>tx.getTxHash()));
|
|
126
|
+
await pool.addTxs(txs);
|
|
127
|
+
hashes.forEach((hash)=>txsToFind.delete(hash.toString()));
|
|
128
|
+
totalTxsGathered += txs.length;
|
|
129
|
+
} catch (err) {
|
|
130
|
+
this.log.error(`Error gathering txs from aztec node: ${err}`);
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
this.log.verbose(`Gathered ${totalTxsGathered} of ${totalTxsRequired} txs from a node`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { type ConfigMappingsType } from '@aztec/foundation/config';
|
|
2
2
|
export type ProverCoordinationConfig = {
|
|
3
|
-
|
|
3
|
+
proverCoordinationNodeUrls: string[];
|
|
4
4
|
};
|
|
5
5
|
export declare const proverCoordinationConfigMappings: ConfigMappingsType<ProverCoordinationConfig>;
|
|
6
6
|
export declare function getTxProviderConfigFromEnv(): ProverCoordinationConfig;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/prover-coordination/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,kBAAkB,EAAyB,MAAM,0BAA0B,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG;IACrC,
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/prover-coordination/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,kBAAkB,EAAyB,MAAM,0BAA0B,CAAC;AAE1F,MAAM,MAAM,wBAAwB,GAAG;IACrC,0BAA0B,EAAE,MAAM,EAAE,CAAC;CACtC,CAAC;AAEF,eAAO,MAAM,gCAAgC,EAAE,kBAAkB,CAAC,wBAAwB,CAOzF,CAAC;AAEF,wBAAgB,0BAA0B,IAAI,wBAAwB,CAErE"}
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import { getConfigFromMappings } from '@aztec/foundation/config';
|
|
2
2
|
export const proverCoordinationConfigMappings = {
|
|
3
|
-
|
|
4
|
-
env: '
|
|
5
|
-
description: 'The
|
|
6
|
-
parseEnv: (val)=>val
|
|
3
|
+
proverCoordinationNodeUrls: {
|
|
4
|
+
env: 'PROVER_COORDINATION_NODE_URLS',
|
|
5
|
+
description: 'The URLs of the tx provider nodes',
|
|
6
|
+
parseEnv: (val)=>val.split(',').map((url)=>url.trim().replace(/\/$/, '')),
|
|
7
|
+
defaultValue: []
|
|
7
8
|
}
|
|
8
9
|
};
|
|
9
10
|
export function getTxProviderConfigFromEnv() {
|