@aztec/prover-client 0.0.1-commit.b655e406 → 0.0.1-commit.c2595eba
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +1 -1
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +45 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +197 -0
- package/dest/mocks/fixtures.d.ts +1 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +9 -18
- package/dest/mocks/test_context.d.ts +29 -46
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +116 -116
- package/dest/orchestrator/block-building-helpers.d.ts +17 -19
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +88 -113
- package/dest/orchestrator/block-proving-state.d.ts +17 -11
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +81 -20
- package/dest/orchestrator/checkpoint-proving-state.d.ts +22 -9
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +49 -17
- package/dest/orchestrator/epoch-proving-state.d.ts +12 -10
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +38 -4
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +26 -11
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +556 -161
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -15
- package/dest/orchestrator/tx-proving-state.d.ts +6 -5
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +7 -16
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +7 -4
- package/dest/prover-client/server-epoch-prover.d.ts +8 -7
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +2 -2
- package/dest/proving_broker/broker_prover_facade.d.ts +25 -23
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +9 -15
- package/dest/proving_broker/config.d.ts +16 -8
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +14 -2
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +5 -9
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +4 -19
- package/dest/proving_broker/proving_broker.d.ts +2 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +6 -11
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +392 -3
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +15 -35
- package/dest/proving_broker/proving_job_controller.d.ts +5 -3
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +8 -6
- package/dest/proving_broker/rpc.d.ts +4 -4
- package/dest/test/mock_proof_store.d.ts +1 -1
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +3 -4
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +4 -4
- package/package.json +20 -18
- package/src/config.ts +1 -1
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +284 -0
- package/src/mocks/fixtures.ts +9 -31
- package/src/mocks/test_context.ts +158 -177
- package/src/orchestrator/block-building-helpers.ts +127 -207
- package/src/orchestrator/block-proving-state.ts +103 -25
- package/src/orchestrator/checkpoint-proving-state.ts +71 -21
- package/src/orchestrator/epoch-proving-state.ts +64 -14
- package/src/orchestrator/orchestrator.ts +158 -149
- package/src/orchestrator/orchestrator_metrics.ts +2 -25
- package/src/orchestrator/tx-proving-state.ts +10 -27
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +25 -15
- package/src/prover-client/server-epoch-prover.ts +6 -7
- package/src/proving_broker/broker_prover_facade.ts +31 -37
- package/src/proving_broker/config.ts +17 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proving_agent.ts +6 -19
- package/src/proving_broker/proving_broker.ts +6 -9
- package/src/proving_broker/proving_broker_database/memory.ts +2 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +20 -5
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +14 -35
- package/src/proving_broker/proving_job_controller.ts +13 -7
- package/src/test/mock_prover.ts +2 -14
- package/dest/block-factory/index.d.ts +0 -2
- package/dest/block-factory/index.d.ts.map +0 -1
- package/dest/block-factory/index.js +0 -1
- package/dest/block-factory/light.d.ts +0 -38
- package/dest/block-factory/light.d.ts.map +0 -1
- package/dest/block-factory/light.js +0 -94
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/block-factory/index.ts +0 -1
- package/src/block-factory/light.ts +0 -140
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
1
2
|
import { createLogger } from '@aztec/foundation/log';
|
|
2
3
|
import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
3
4
|
import { PriorityMemoryQueue } from '@aztec/foundation/queue';
|
|
@@ -15,13 +16,7 @@ import {
|
|
|
15
16
|
tryStop,
|
|
16
17
|
} from '@aztec/stdlib/interfaces/server';
|
|
17
18
|
import { ProvingRequestType } from '@aztec/stdlib/proofs';
|
|
18
|
-
import {
|
|
19
|
-
type TelemetryClient,
|
|
20
|
-
type Traceable,
|
|
21
|
-
type Tracer,
|
|
22
|
-
getTelemetryClient,
|
|
23
|
-
trackSpan,
|
|
24
|
-
} from '@aztec/telemetry-client';
|
|
19
|
+
import { type TelemetryClient, type Traceable, type Tracer, getTelemetryClient } from '@aztec/telemetry-client';
|
|
25
20
|
|
|
26
21
|
import assert from 'assert';
|
|
27
22
|
|
|
@@ -301,6 +296,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
|
|
|
301
296
|
this.resultsCache.delete(id);
|
|
302
297
|
this.inProgress.delete(id);
|
|
303
298
|
this.retries.delete(id);
|
|
299
|
+
this.enqueuedAt.delete(id);
|
|
304
300
|
}
|
|
305
301
|
}
|
|
306
302
|
|
|
@@ -353,6 +349,8 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
|
|
|
353
349
|
const enqueuedAt = this.enqueuedAt.get(job.id);
|
|
354
350
|
if (enqueuedAt) {
|
|
355
351
|
this.instrumentation.recordJobWait(job.type, enqueuedAt);
|
|
352
|
+
// we can clear this flag now.
|
|
353
|
+
this.enqueuedAt.delete(job.id);
|
|
356
354
|
}
|
|
357
355
|
|
|
358
356
|
return { job, time };
|
|
@@ -561,13 +559,12 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
|
|
|
561
559
|
return this.#getProvingJob(filter);
|
|
562
560
|
}
|
|
563
561
|
|
|
564
|
-
@trackSpan('ProvingBroker.cleanupPass')
|
|
565
562
|
private async cleanupPass() {
|
|
566
563
|
this.cleanupStaleJobs();
|
|
567
564
|
this.reEnqueueExpiredJobs();
|
|
568
565
|
const oldestEpochToKeep = this.oldestEpochToKeep();
|
|
569
566
|
if (oldestEpochToKeep > 0) {
|
|
570
|
-
await this.database.deleteAllProvingJobsOlderThanEpoch(oldestEpochToKeep);
|
|
567
|
+
await this.database.deleteAllProvingJobsOlderThanEpoch(EpochNumber(oldestEpochToKeep));
|
|
571
568
|
this.logger.trace(`Deleted all epochs older than ${oldestEpochToKeep}`);
|
|
572
569
|
}
|
|
573
570
|
}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
1
2
|
import {
|
|
2
3
|
type ProofUri,
|
|
3
4
|
type ProvingJob,
|
|
@@ -43,7 +44,7 @@ export class InMemoryBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
43
44
|
return Promise.resolve();
|
|
44
45
|
}
|
|
45
46
|
|
|
46
|
-
deleteAllProvingJobsOlderThanEpoch(epochNumber:
|
|
47
|
+
deleteAllProvingJobsOlderThanEpoch(epochNumber: EpochNumber): Promise<void> {
|
|
47
48
|
const toDelete = [
|
|
48
49
|
...Array.from(this.jobs.keys()).filter(x => getEpochFromProvingJobId(x) < epochNumber),
|
|
49
50
|
...Array.from(this.results.keys()).filter(x => getEpochFromProvingJobId(x) < epochNumber),
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
1
2
|
import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc';
|
|
2
3
|
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
3
4
|
import { BatchQueue } from '@aztec/foundation/queue';
|
|
@@ -10,7 +11,14 @@ import {
|
|
|
10
11
|
ProvingJobSettledResult,
|
|
11
12
|
getEpochFromProvingJobId,
|
|
12
13
|
} from '@aztec/stdlib/interfaces/server';
|
|
13
|
-
import {
|
|
14
|
+
import {
|
|
15
|
+
Attributes,
|
|
16
|
+
LmdbMetrics,
|
|
17
|
+
type TelemetryClient,
|
|
18
|
+
type Tracer,
|
|
19
|
+
getTelemetryClient,
|
|
20
|
+
trackSpan,
|
|
21
|
+
} from '@aztec/telemetry-client';
|
|
14
22
|
|
|
15
23
|
import { mkdir, readdir } from 'fs/promises';
|
|
16
24
|
import { join } from 'path';
|
|
@@ -77,6 +85,8 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
77
85
|
|
|
78
86
|
private batchQueue: BatchQueue<ProvingJob | [ProvingJobId, ProvingJobSettledResult], number>;
|
|
79
87
|
|
|
88
|
+
public readonly tracer: Tracer;
|
|
89
|
+
|
|
80
90
|
private constructor(
|
|
81
91
|
private epochs: Map<number, SingleEpochDatabase>,
|
|
82
92
|
private config: ProverBrokerConfig,
|
|
@@ -91,6 +101,8 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
91
101
|
() => this.estimateSize(),
|
|
92
102
|
);
|
|
93
103
|
|
|
104
|
+
this.tracer = client.getTracer('KVBrokerDatabase');
|
|
105
|
+
|
|
94
106
|
this.batchQueue = new BatchQueue(
|
|
95
107
|
(items, key) => this.commitWrites(items, key),
|
|
96
108
|
config.proverBrokerBatchSize,
|
|
@@ -104,7 +116,7 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
104
116
|
const jobsToAdd = items.filter((item): item is ProvingJob => 'id' in item);
|
|
105
117
|
const resultsToAdd = items.filter((item): item is [ProvingJobId, ProvingJobSettledResult] => Array.isArray(item));
|
|
106
118
|
|
|
107
|
-
const db = await this.getEpochDatabase(epochNumber);
|
|
119
|
+
const db = await this.getEpochDatabase(EpochNumber(epochNumber));
|
|
108
120
|
await db.batchWrite(jobsToAdd, resultsToAdd);
|
|
109
121
|
}
|
|
110
122
|
|
|
@@ -164,8 +176,11 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
164
176
|
}
|
|
165
177
|
}
|
|
166
178
|
|
|
167
|
-
|
|
168
|
-
|
|
179
|
+
@trackSpan('KVBrokerDatabase.deleteAllProvingJobsOlderThanEpoch', epochNumber => ({
|
|
180
|
+
[Attributes.EPOCH_NUMBER]: epochNumber,
|
|
181
|
+
}))
|
|
182
|
+
async deleteAllProvingJobsOlderThanEpoch(epochNumber: EpochNumber): Promise<void> {
|
|
183
|
+
const oldEpochs = Array.from(this.epochs.keys()).filter(e => e < Number(epochNumber));
|
|
169
184
|
for (const old of oldEpochs) {
|
|
170
185
|
const db = this.epochs.get(old);
|
|
171
186
|
if (!db) {
|
|
@@ -196,7 +211,7 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
|
|
|
196
211
|
return this.batchQueue.put([id, { status: 'fulfilled', value }], getEpochFromProvingJobId(id));
|
|
197
212
|
}
|
|
198
213
|
|
|
199
|
-
private async getEpochDatabase(epochNumber:
|
|
214
|
+
private async getEpochDatabase(epochNumber: EpochNumber): Promise<SingleEpochDatabase> {
|
|
200
215
|
let epochDb = this.epochs.get(epochNumber);
|
|
201
216
|
if (!epochDb) {
|
|
202
217
|
const newEpochDirectory = join(this.config.dataDirectory!, epochNumber.toString());
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
1
2
|
import type { ProofUri, ProvingJob, ProvingJobId, ProvingJobSettledResult } from '@aztec/stdlib/interfaces/server';
|
|
2
3
|
|
|
3
4
|
/**
|
|
@@ -14,7 +15,7 @@ export interface ProvingBrokerDatabase {
|
|
|
14
15
|
* Deletes all proving jobs belonging to epochs older than the given epoch
|
|
15
16
|
* @param epochNumber - The epoch number beyond which jobs should be deleted
|
|
16
17
|
*/
|
|
17
|
-
deleteAllProvingJobsOlderThanEpoch(epochNumber:
|
|
18
|
+
deleteAllProvingJobsOlderThanEpoch(epochNumber: EpochNumber): Promise<void>;
|
|
18
19
|
|
|
19
20
|
/**
|
|
20
21
|
* Returns an iterator over all saved proving jobs
|
|
@@ -8,7 +8,7 @@ import {
|
|
|
8
8
|
type ObservableResult,
|
|
9
9
|
type TelemetryClient,
|
|
10
10
|
type UpDownCounter,
|
|
11
|
-
|
|
11
|
+
createUpDownCounterWithDefault,
|
|
12
12
|
} from '@aztec/telemetry-client';
|
|
13
13
|
|
|
14
14
|
export type MonitorCallback = (proofType: ProvingRequestType) => number;
|
|
@@ -28,49 +28,28 @@ export class ProvingBrokerInstrumentation {
|
|
|
28
28
|
constructor(client: TelemetryClient, name = 'ProvingBroker') {
|
|
29
29
|
const meter = client.getMeter(name);
|
|
30
30
|
|
|
31
|
-
this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE
|
|
32
|
-
valueType: ValueType.INT,
|
|
33
|
-
});
|
|
31
|
+
this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE);
|
|
34
32
|
|
|
35
|
-
this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS
|
|
36
|
-
valueType: ValueType.INT,
|
|
37
|
-
});
|
|
33
|
+
this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS);
|
|
38
34
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
});
|
|
35
|
+
const provingJobTypes = Object.values(ProvingRequestType).filter(v => typeof v === 'string');
|
|
36
|
+
const provingJobAttrs = { [Attributes.PROVING_JOB_TYPE]: provingJobTypes };
|
|
42
37
|
|
|
43
|
-
this.
|
|
44
|
-
valueType: ValueType.INT,
|
|
45
|
-
});
|
|
38
|
+
this.resolvedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_RESOLVED_JOBS, provingJobAttrs);
|
|
46
39
|
|
|
47
|
-
this.
|
|
48
|
-
valueType: ValueType.INT,
|
|
49
|
-
});
|
|
40
|
+
this.rejectedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_REJECTED_JOBS, provingJobAttrs);
|
|
50
41
|
|
|
51
|
-
this.
|
|
52
|
-
valueType: ValueType.INT,
|
|
53
|
-
});
|
|
42
|
+
this.retriedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_RETRIED_JOBS, provingJobAttrs);
|
|
54
43
|
|
|
55
|
-
this.
|
|
56
|
-
valueType: ValueType.INT,
|
|
57
|
-
});
|
|
44
|
+
this.timedOutJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, provingJobAttrs);
|
|
58
45
|
|
|
59
|
-
this.
|
|
60
|
-
valueType: ValueType.INT,
|
|
61
|
-
});
|
|
46
|
+
this.cachedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_CACHED_JOBS, provingJobAttrs);
|
|
62
47
|
|
|
63
|
-
this.
|
|
64
|
-
description: 'Records how long a job sits in the queue',
|
|
65
|
-
unit: 'ms',
|
|
66
|
-
valueType: ValueType.INT,
|
|
67
|
-
});
|
|
48
|
+
this.totalJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_TOTAL_JOBS, provingJobAttrs);
|
|
68
49
|
|
|
69
|
-
this.
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
valueType: ValueType.INT,
|
|
73
|
-
});
|
|
50
|
+
this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT);
|
|
51
|
+
|
|
52
|
+
this.jobDuration = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_DURATION);
|
|
74
53
|
}
|
|
75
54
|
|
|
76
55
|
monitorQueueDepth(fn: MonitorCallback) {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
2
|
+
import { randomBytes } from '@aztec/foundation/crypto/random';
|
|
2
3
|
import { AbortError } from '@aztec/foundation/error';
|
|
3
|
-
import { createLogger } from '@aztec/foundation/log';
|
|
4
|
+
import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
|
|
4
5
|
import type {
|
|
5
6
|
ProvingJobId,
|
|
6
7
|
ProvingJobInputs,
|
|
@@ -20,16 +21,22 @@ export class ProvingJobController {
|
|
|
20
21
|
private promise?: Promise<void>;
|
|
21
22
|
private abortController = new AbortController();
|
|
22
23
|
private result?: ProvingJobResultsMap[ProvingRequestType] | Error;
|
|
24
|
+
private log: Logger;
|
|
23
25
|
|
|
24
26
|
constructor(
|
|
25
27
|
private jobId: ProvingJobId,
|
|
26
28
|
private inputs: ProvingJobInputs,
|
|
27
|
-
private epochNumber:
|
|
29
|
+
private epochNumber: EpochNumber,
|
|
28
30
|
private startedAt: number,
|
|
29
31
|
private circuitProver: ServerCircuitProver,
|
|
30
32
|
private onComplete: () => void,
|
|
31
|
-
|
|
32
|
-
) {
|
|
33
|
+
bindings?: LoggerBindings,
|
|
34
|
+
) {
|
|
35
|
+
this.log = createLogger('prover-client:proving-agent:job-controller', {
|
|
36
|
+
instanceId: randomBytes(4).toString('hex'),
|
|
37
|
+
...bindings,
|
|
38
|
+
});
|
|
39
|
+
}
|
|
33
40
|
|
|
34
41
|
public start(): void {
|
|
35
42
|
if (this.status !== ProvingJobControllerStatus.IDLE) {
|
|
@@ -124,8 +131,7 @@ export class ProvingJobController {
|
|
|
124
131
|
const signal = this.abortController.signal;
|
|
125
132
|
switch (type) {
|
|
126
133
|
case ProvingRequestType.PUBLIC_VM: {
|
|
127
|
-
|
|
128
|
-
return await this.circuitProver.getAvmProof(inputs, undefined, signal, this.epochNumber);
|
|
134
|
+
return await this.circuitProver.getAvmProof(inputs, signal, this.epochNumber);
|
|
129
135
|
}
|
|
130
136
|
|
|
131
137
|
case ProvingRequestType.PUBLIC_CHONK_VERIFIER: {
|
package/src/test/mock_prover.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import {
|
|
2
2
|
AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED,
|
|
3
|
-
AVM_V2_VERIFICATION_KEY_LENGTH_IN_FIELDS_PADDED,
|
|
4
3
|
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
5
4
|
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
6
5
|
RECURSIVE_PROOF_LENGTH,
|
|
@@ -14,7 +13,6 @@ import {
|
|
|
14
13
|
type ProvingJobStatus,
|
|
15
14
|
type PublicInputsAndRecursiveProof,
|
|
16
15
|
type ServerCircuitProver,
|
|
17
|
-
makeProofAndVerificationKey,
|
|
18
16
|
makePublicInputsAndRecursiveProof,
|
|
19
17
|
} from '@aztec/stdlib/interfaces/server';
|
|
20
18
|
import type { ParityBasePrivateInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
|
|
@@ -105,18 +103,8 @@ export class TestBroker implements ProvingJobProducer {
|
|
|
105
103
|
export class MockProver implements ServerCircuitProver {
|
|
106
104
|
constructor() {}
|
|
107
105
|
|
|
108
|
-
getAvmProof(
|
|
109
|
-
|
|
110
|
-
_skipPublicInputsValidation?: boolean, // TODO(#14234)[Unconditional PIs validation]: Remove.
|
|
111
|
-
_signal?: AbortSignal,
|
|
112
|
-
_epochNumber?: number,
|
|
113
|
-
) {
|
|
114
|
-
return Promise.resolve(
|
|
115
|
-
makeProofAndVerificationKey(
|
|
116
|
-
makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
|
|
117
|
-
VerificationKeyData.makeFake(AVM_V2_VERIFICATION_KEY_LENGTH_IN_FIELDS_PADDED),
|
|
118
|
-
),
|
|
119
|
-
);
|
|
106
|
+
getAvmProof(_inputs: AvmCircuitInputs, _signal?: AbortSignal, _epochNumber?: number) {
|
|
107
|
+
return Promise.resolve(makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED));
|
|
120
108
|
}
|
|
121
109
|
|
|
122
110
|
getBaseParityProof(_inputs: ParityBasePrivateInputs, _signal?: AbortSignal, _epochNumber?: number) {
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/block-factory/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './light.js';
|
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
import { SpongeBlob } from '@aztec/blob-lib';
|
|
2
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
4
|
-
import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
|
|
5
|
-
import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx';
|
|
6
|
-
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
7
|
-
/**
|
|
8
|
-
* Builds a block and its header from a set of processed tx without running any circuits.
|
|
9
|
-
*
|
|
10
|
-
* NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
|
|
11
|
-
* PRIOR to calling `buildBlock`.
|
|
12
|
-
*
|
|
13
|
-
* Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
|
|
14
|
-
* PublicProcessor which will do this for you as it processes transactions.
|
|
15
|
-
*
|
|
16
|
-
* If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
|
|
17
|
-
* function `buildBlockWithCleanDB`, which calls `insertSideEffectsAndBuildBaseRollupHints` for you.
|
|
18
|
-
*/
|
|
19
|
-
export declare class LightweightBlockFactory implements IBlockFactory {
|
|
20
|
-
private db;
|
|
21
|
-
private telemetry;
|
|
22
|
-
private globalVariables?;
|
|
23
|
-
private l1ToL2Messages?;
|
|
24
|
-
private startSpongeBlob?;
|
|
25
|
-
private txs;
|
|
26
|
-
private readonly logger;
|
|
27
|
-
constructor(db: MerkleTreeWriteOperations, telemetry?: TelemetryClient);
|
|
28
|
-
startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[], startSpongeBlob?: SpongeBlob, isFirstBlock?: boolean): Promise<void>;
|
|
29
|
-
addTxs(txs: ProcessedTx[]): Promise<void>;
|
|
30
|
-
setBlockCompleted(): Promise<L2Block>;
|
|
31
|
-
private buildBlock;
|
|
32
|
-
}
|
|
33
|
-
/**
|
|
34
|
-
* Inserts the processed transactions into the DB, then creates a block.
|
|
35
|
-
* @param db - A db fork to use for block building which WILL BE MODIFIED.
|
|
36
|
-
*/
|
|
37
|
-
export declare function buildBlockWithCleanDB(txs: ProcessedTx[], globalVariables: GlobalVariables, l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations, startSpongeBlob?: SpongeBlob, isFirstBlock?: boolean, telemetry?: TelemetryClient): Promise<L2Block>;
|
|
38
|
-
//# sourceMappingURL=light.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"light.d.ts","sourceRoot":"","sources":["../../src/block-factory/light.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAG7C,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAE9C,OAAO,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAC9C,OAAO,KAAK,EAAE,aAAa,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAEhG,OAAO,KAAK,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,yBAAyB,CAAC;AAQnF;;;;;;;;;;;GAWG;AACH,qBAAa,uBAAwB,YAAW,aAAa;IASzD,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,SAAS;IATnB,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,cAAc,CAAC,CAAO;IAC9B,OAAO,CAAC,eAAe,CAAC,CAAa;IACrC,OAAO,CAAC,GAAG,CAA4B;IAEvC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA6C;gBAG1D,EAAE,EAAE,yBAAyB,EAC7B,SAAS,GAAE,eAAsC;IAGrD,aAAa,CACjB,eAAe,EAAE,eAAe,EAChC,cAAc,EAAE,EAAE,EAAE,EAEpB,eAAe,CAAC,EAAE,UAAU,EAE5B,YAAY,UAAO,GAClB,OAAO,CAAC,IAAI,CAAC;IAYhB,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAWzC,iBAAiB,IAAI,OAAO,CAAC,OAAO,CAAC;YAIvB,UAAU;CA2BzB;AAED;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,GAAG,EAAE,WAAW,EAAE,EAClB,eAAe,EAAE,eAAe,EAChC,cAAc,EAAE,EAAE,EAAE,EACpB,EAAE,EAAE,yBAAyB,EAC7B,eAAe,CAAC,EAAE,UAAU,EAC5B,YAAY,UAAO,EACnB,SAAS,GAAE,eAAsC,oBAuBlD"}
|
|
@@ -1,94 +0,0 @@
|
|
|
1
|
-
import { SpongeBlob } from '@aztec/blob-lib';
|
|
2
|
-
import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
|
|
3
|
-
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
4
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
5
|
-
import { createLogger } from '@aztec/foundation/log';
|
|
6
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
7
|
-
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
8
|
-
import { getTelemetryClient } from '@aztec/telemetry-client';
|
|
9
|
-
import { buildHeaderAndBodyFromTxs, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints } from '../orchestrator/block-building-helpers.js';
|
|
10
|
-
/**
|
|
11
|
-
* Builds a block and its header from a set of processed tx without running any circuits.
|
|
12
|
-
*
|
|
13
|
-
* NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
|
|
14
|
-
* PRIOR to calling `buildBlock`.
|
|
15
|
-
*
|
|
16
|
-
* Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
|
|
17
|
-
* PublicProcessor which will do this for you as it processes transactions.
|
|
18
|
-
*
|
|
19
|
-
* If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
|
|
20
|
-
* function `buildBlockWithCleanDB`, which calls `insertSideEffectsAndBuildBaseRollupHints` for you.
|
|
21
|
-
*/ export class LightweightBlockFactory {
|
|
22
|
-
db;
|
|
23
|
-
telemetry;
|
|
24
|
-
globalVariables;
|
|
25
|
-
l1ToL2Messages;
|
|
26
|
-
startSpongeBlob;
|
|
27
|
-
txs;
|
|
28
|
-
logger;
|
|
29
|
-
constructor(db, telemetry = getTelemetryClient()){
|
|
30
|
-
this.db = db;
|
|
31
|
-
this.telemetry = telemetry;
|
|
32
|
-
this.logger = createLogger('lightweight-block-factory');
|
|
33
|
-
}
|
|
34
|
-
async startNewBlock(globalVariables, l1ToL2Messages, // Must be provided to generate the correct spongeBlobHash for the block header if there's more than one block in the checkpoint.
|
|
35
|
-
startSpongeBlob, // Only insert l1 to l2 messages for the first block in a checkpoint.
|
|
36
|
-
isFirstBlock = true) {
|
|
37
|
-
this.logger.debug('Starting new block', {
|
|
38
|
-
globalVariables: globalVariables.toInspect(),
|
|
39
|
-
l1ToL2Messages
|
|
40
|
-
});
|
|
41
|
-
this.globalVariables = globalVariables;
|
|
42
|
-
this.l1ToL2Messages = isFirstBlock ? padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP) : [];
|
|
43
|
-
this.startSpongeBlob = startSpongeBlob;
|
|
44
|
-
this.txs = undefined;
|
|
45
|
-
// Update L1 to L2 tree
|
|
46
|
-
await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages);
|
|
47
|
-
}
|
|
48
|
-
addTxs(txs) {
|
|
49
|
-
// Most times, `addTxs` is only called once per block.
|
|
50
|
-
// So avoid copies.
|
|
51
|
-
if (this.txs === undefined) {
|
|
52
|
-
this.txs = txs;
|
|
53
|
-
} else {
|
|
54
|
-
this.txs.push(...txs);
|
|
55
|
-
}
|
|
56
|
-
return Promise.resolve();
|
|
57
|
-
}
|
|
58
|
-
setBlockCompleted() {
|
|
59
|
-
return this.buildBlock();
|
|
60
|
-
}
|
|
61
|
-
async buildBlock() {
|
|
62
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(this.txs ?? [], this.globalVariables, this.l1ToL2Messages, this.db, this.startSpongeBlob);
|
|
63
|
-
header.state.validate();
|
|
64
|
-
const blockHeader = header.toBlockHeader();
|
|
65
|
-
await this.db.updateArchive(blockHeader);
|
|
66
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
|
|
67
|
-
const block = new L2Block(newArchive, header, body);
|
|
68
|
-
this.logger.debug(`Built block ${block.number}`, {
|
|
69
|
-
globalVariables: this.globalVariables?.toInspect(),
|
|
70
|
-
archiveRoot: newArchive.root.toString(),
|
|
71
|
-
stateReference: header.state.toInspect(),
|
|
72
|
-
blockHash: (await block.hash()).toString(),
|
|
73
|
-
txs: block.body.txEffects.map((tx)=>tx.txHash.toString())
|
|
74
|
-
});
|
|
75
|
-
return block;
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* Inserts the processed transactions into the DB, then creates a block.
|
|
80
|
-
* @param db - A db fork to use for block building which WILL BE MODIFIED.
|
|
81
|
-
*/ export async function buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, db, startSpongeBlob, isFirstBlock = true, telemetry = getTelemetryClient()) {
|
|
82
|
-
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
83
|
-
const builder = new LightweightBlockFactory(db, telemetry);
|
|
84
|
-
await builder.startNewBlock(globalVariables, l1ToL2Messages, startSpongeBlob, isFirstBlock);
|
|
85
|
-
const l1ToL2MessageTree = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
86
|
-
for (const tx of txs){
|
|
87
|
-
// startSpongeBlob and proverId are only used for constructing private inputs of the base rollup.
|
|
88
|
-
// Their values don't matter here because we are not using the return private inputs to build the block.
|
|
89
|
-
const proverId = Fr.ZERO;
|
|
90
|
-
await insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, l1ToL2MessageTree, startSpongeBlob?.clone() ?? SpongeBlob.empty(), proverId, db);
|
|
91
|
-
}
|
|
92
|
-
await builder.addTxs(txs);
|
|
93
|
-
return await builder.setBlockCompleted();
|
|
94
|
-
}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import type { Timer } from '@aztec/foundation/timer';
|
|
2
|
-
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
3
|
-
export declare class ProvingAgentInstrumentation {
|
|
4
|
-
private idleTime;
|
|
5
|
-
constructor(client: TelemetryClient, name?: string);
|
|
6
|
-
recordIdleTime(msOrTimer: Timer | number): void;
|
|
7
|
-
}
|
|
8
|
-
//# sourceMappingURL=proving_agent_instrumentation.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"proving_agent_instrumentation.d.ts","sourceRoot":"","sources":["../../src/proving_broker/proving_agent_instrumentation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,EAA2B,KAAK,eAAe,EAAa,MAAM,yBAAyB,CAAC;AAEnG,qBAAa,2BAA2B;IACtC,OAAO,CAAC,QAAQ,CAAY;gBAEhB,MAAM,EAAE,eAAe,EAAE,IAAI,SAAiB;IAU1D,cAAc,CAAC,SAAS,EAAE,KAAK,GAAG,MAAM;CAIzC"}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { Metrics, ValueType } from '@aztec/telemetry-client';
|
|
2
|
-
export class ProvingAgentInstrumentation {
|
|
3
|
-
idleTime;
|
|
4
|
-
constructor(client, name = 'ProvingAgent'){
|
|
5
|
-
const meter = client.getMeter(name);
|
|
6
|
-
this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, {
|
|
7
|
-
description: 'Records how long an agent was idle',
|
|
8
|
-
unit: 's',
|
|
9
|
-
valueType: ValueType.DOUBLE
|
|
10
|
-
});
|
|
11
|
-
}
|
|
12
|
-
recordIdleTime(msOrTimer) {
|
|
13
|
-
const duration = typeof msOrTimer === 'number' ? msOrTimer : msOrTimer.ms();
|
|
14
|
-
this.idleTime.record(duration / 1000);
|
|
15
|
-
}
|
|
16
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './light.js';
|
|
@@ -1,140 +0,0 @@
|
|
|
1
|
-
import { SpongeBlob } from '@aztec/blob-lib';
|
|
2
|
-
import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
|
|
3
|
-
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
4
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
5
|
-
import { createLogger } from '@aztec/foundation/log';
|
|
6
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
7
|
-
import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
|
|
8
|
-
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
9
|
-
import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx';
|
|
10
|
-
import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
|
|
11
|
-
|
|
12
|
-
import {
|
|
13
|
-
buildHeaderAndBodyFromTxs,
|
|
14
|
-
getTreeSnapshot,
|
|
15
|
-
insertSideEffectsAndBuildBaseRollupHints,
|
|
16
|
-
} from '../orchestrator/block-building-helpers.js';
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* Builds a block and its header from a set of processed tx without running any circuits.
|
|
20
|
-
*
|
|
21
|
-
* NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
|
|
22
|
-
* PRIOR to calling `buildBlock`.
|
|
23
|
-
*
|
|
24
|
-
* Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
|
|
25
|
-
* PublicProcessor which will do this for you as it processes transactions.
|
|
26
|
-
*
|
|
27
|
-
* If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
|
|
28
|
-
* function `buildBlockWithCleanDB`, which calls `insertSideEffectsAndBuildBaseRollupHints` for you.
|
|
29
|
-
*/
|
|
30
|
-
export class LightweightBlockFactory implements IBlockFactory {
|
|
31
|
-
private globalVariables?: GlobalVariables;
|
|
32
|
-
private l1ToL2Messages?: Fr[];
|
|
33
|
-
private startSpongeBlob?: SpongeBlob;
|
|
34
|
-
private txs: ProcessedTx[] | undefined;
|
|
35
|
-
|
|
36
|
-
private readonly logger = createLogger('lightweight-block-factory');
|
|
37
|
-
|
|
38
|
-
constructor(
|
|
39
|
-
private db: MerkleTreeWriteOperations,
|
|
40
|
-
private telemetry: TelemetryClient = getTelemetryClient(),
|
|
41
|
-
) {}
|
|
42
|
-
|
|
43
|
-
async startNewBlock(
|
|
44
|
-
globalVariables: GlobalVariables,
|
|
45
|
-
l1ToL2Messages: Fr[],
|
|
46
|
-
// Must be provided to generate the correct spongeBlobHash for the block header if there's more than one block in the checkpoint.
|
|
47
|
-
startSpongeBlob?: SpongeBlob,
|
|
48
|
-
// Only insert l1 to l2 messages for the first block in a checkpoint.
|
|
49
|
-
isFirstBlock = true,
|
|
50
|
-
): Promise<void> {
|
|
51
|
-
this.logger.debug('Starting new block', { globalVariables: globalVariables.toInspect(), l1ToL2Messages });
|
|
52
|
-
this.globalVariables = globalVariables;
|
|
53
|
-
this.l1ToL2Messages = isFirstBlock
|
|
54
|
-
? padArrayEnd<Fr, number>(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP)
|
|
55
|
-
: [];
|
|
56
|
-
this.startSpongeBlob = startSpongeBlob;
|
|
57
|
-
this.txs = undefined;
|
|
58
|
-
// Update L1 to L2 tree
|
|
59
|
-
await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
63
|
-
// Most times, `addTxs` is only called once per block.
|
|
64
|
-
// So avoid copies.
|
|
65
|
-
if (this.txs === undefined) {
|
|
66
|
-
this.txs = txs;
|
|
67
|
-
} else {
|
|
68
|
-
this.txs.push(...txs);
|
|
69
|
-
}
|
|
70
|
-
return Promise.resolve();
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
setBlockCompleted(): Promise<L2Block> {
|
|
74
|
-
return this.buildBlock();
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
private async buildBlock(): Promise<L2Block> {
|
|
78
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(
|
|
79
|
-
this.txs ?? [],
|
|
80
|
-
this.globalVariables!,
|
|
81
|
-
this.l1ToL2Messages!,
|
|
82
|
-
this.db,
|
|
83
|
-
this.startSpongeBlob,
|
|
84
|
-
);
|
|
85
|
-
|
|
86
|
-
header.state.validate();
|
|
87
|
-
|
|
88
|
-
const blockHeader = header.toBlockHeader();
|
|
89
|
-
await this.db.updateArchive(blockHeader);
|
|
90
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
|
|
91
|
-
|
|
92
|
-
const block = new L2Block(newArchive, header, body);
|
|
93
|
-
|
|
94
|
-
this.logger.debug(`Built block ${block.number}`, {
|
|
95
|
-
globalVariables: this.globalVariables?.toInspect(),
|
|
96
|
-
archiveRoot: newArchive.root.toString(),
|
|
97
|
-
stateReference: header.state.toInspect(),
|
|
98
|
-
blockHash: (await block.hash()).toString(),
|
|
99
|
-
txs: block.body.txEffects.map(tx => tx.txHash.toString()),
|
|
100
|
-
});
|
|
101
|
-
|
|
102
|
-
return block;
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
/**
|
|
107
|
-
* Inserts the processed transactions into the DB, then creates a block.
|
|
108
|
-
* @param db - A db fork to use for block building which WILL BE MODIFIED.
|
|
109
|
-
*/
|
|
110
|
-
export async function buildBlockWithCleanDB(
|
|
111
|
-
txs: ProcessedTx[],
|
|
112
|
-
globalVariables: GlobalVariables,
|
|
113
|
-
l1ToL2Messages: Fr[],
|
|
114
|
-
db: MerkleTreeWriteOperations,
|
|
115
|
-
startSpongeBlob?: SpongeBlob,
|
|
116
|
-
isFirstBlock = true,
|
|
117
|
-
telemetry: TelemetryClient = getTelemetryClient(),
|
|
118
|
-
) {
|
|
119
|
-
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
120
|
-
const builder = new LightweightBlockFactory(db, telemetry);
|
|
121
|
-
await builder.startNewBlock(globalVariables, l1ToL2Messages, startSpongeBlob, isFirstBlock);
|
|
122
|
-
const l1ToL2MessageTree = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
123
|
-
|
|
124
|
-
for (const tx of txs) {
|
|
125
|
-
// startSpongeBlob and proverId are only used for constructing private inputs of the base rollup.
|
|
126
|
-
// Their values don't matter here because we are not using the return private inputs to build the block.
|
|
127
|
-
const proverId = Fr.ZERO;
|
|
128
|
-
await insertSideEffectsAndBuildBaseRollupHints(
|
|
129
|
-
tx,
|
|
130
|
-
lastArchive,
|
|
131
|
-
l1ToL2MessageTree,
|
|
132
|
-
startSpongeBlob?.clone() ?? SpongeBlob.empty(),
|
|
133
|
-
proverId,
|
|
134
|
-
db,
|
|
135
|
-
);
|
|
136
|
-
}
|
|
137
|
-
await builder.addTxs(txs);
|
|
138
|
-
|
|
139
|
-
return await builder.setBlockCompleted();
|
|
140
|
-
}
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import type { Timer } from '@aztec/foundation/timer';
|
|
2
|
-
import { type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client';
|
|
3
|
-
|
|
4
|
-
export class ProvingAgentInstrumentation {
|
|
5
|
-
private idleTime: Histogram;
|
|
6
|
-
|
|
7
|
-
constructor(client: TelemetryClient, name = 'ProvingAgent') {
|
|
8
|
-
const meter = client.getMeter(name);
|
|
9
|
-
|
|
10
|
-
this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, {
|
|
11
|
-
description: 'Records how long an agent was idle',
|
|
12
|
-
unit: 's',
|
|
13
|
-
valueType: ValueType.DOUBLE,
|
|
14
|
-
});
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
recordIdleTime(msOrTimer: Timer | number) {
|
|
18
|
-
const duration = typeof msOrTimer === 'number' ? msOrTimer : msOrTimer.ms();
|
|
19
|
-
this.idleTime.record(duration / 1000);
|
|
20
|
-
}
|
|
21
|
-
}
|