@aztec/prover-client 0.69.0-devnet → 0.69.1-devnet
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block_builder/light.d.ts +0 -1
- package/dest/block_builder/light.d.ts.map +1 -1
- package/dest/block_builder/light.js +4 -14
- package/dest/config.d.ts +2 -1
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +3 -2
- package/dest/mocks/test_context.d.ts +2 -2
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +6 -6
- package/dest/orchestrator/block-building-helpers.d.ts +10 -25
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +33 -44
- package/dest/orchestrator/block-proving-state.d.ts +40 -44
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +149 -85
- package/dest/orchestrator/epoch-proving-state.d.ts +23 -30
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +92 -65
- package/dest/orchestrator/orchestrator.d.ts +16 -47
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +204 -341
- package/dest/orchestrator/tx-proving-state.d.ts +10 -6
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +57 -46
- package/dest/prover-agent/memory-proving-queue.d.ts +3 -3
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +4 -4
- package/dest/prover-agent/prover-agent.js +4 -4
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +5 -2
- package/dest/prover-client/server-epoch-prover.d.ts +25 -0
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -0
- package/dest/prover-client/server-epoch-prover.js +40 -0
- package/dest/proving_broker/broker_prover_facade.d.ts +15 -4
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +247 -44
- package/dest/proving_broker/config.d.ts +61 -0
- package/dest/proving_broker/config.d.ts.map +1 -0
- package/dest/proving_broker/config.js +83 -0
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +2 -5
- package/dest/proving_broker/fixtures.d.ts +5 -0
- package/dest/proving_broker/fixtures.d.ts.map +1 -0
- package/dest/proving_broker/fixtures.js +12 -0
- package/dest/proving_broker/index.d.ts +1 -0
- package/dest/proving_broker/index.d.ts.map +1 -1
- package/dest/proving_broker/index.js +2 -1
- package/dest/proving_broker/proving_broker.d.ts +16 -12
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +306 -273
- package/dest/proving_broker/proving_broker_database/memory.d.ts +4 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +17 -4
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +10 -6
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +105 -13
- package/dest/proving_broker/proving_broker_database.d.ts +7 -3
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +4 -4
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +4 -4
- package/dest/test/mock_prover.d.ts +6 -6
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +6 -6
- package/package.json +11 -11
- package/src/block_builder/light.ts +3 -21
- package/src/config.ts +4 -4
- package/src/mocks/test_context.ts +3 -6
- package/src/orchestrator/block-building-helpers.ts +44 -118
- package/src/orchestrator/block-proving-state.ts +251 -121
- package/src/orchestrator/epoch-proving-state.ts +159 -88
- package/src/orchestrator/orchestrator.ts +251 -527
- package/src/orchestrator/tx-proving-state.ts +35 -19
- package/src/prover-agent/memory-proving-queue.ts +11 -12
- package/src/prover-agent/prover-agent.ts +4 -4
- package/src/prover-client/prover-client.ts +4 -6
- package/src/prover-client/server-epoch-prover.ts +44 -0
- package/src/proving_broker/broker_prover_facade.ts +321 -61
- package/src/proving_broker/config.ts +93 -0
- package/src/proving_broker/factory.ts +2 -5
- package/src/proving_broker/fixtures.ts +14 -0
- package/src/proving_broker/index.ts +1 -0
- package/src/proving_broker/proving_broker.ts +114 -71
- package/src/proving_broker/proving_broker_database/memory.ts +24 -4
- package/src/proving_broker/proving_broker_database/persisted.ts +141 -19
- package/src/proving_broker/proving_broker_database.ts +8 -3
- package/src/proving_broker/proving_job_controller.ts +5 -5
- package/src/proving_broker/rpc.ts +2 -3
- package/src/test/mock_prover.ts +9 -11
|
@@ -4,19 +4,19 @@ import {
|
|
|
4
4
|
type ProvingJobInputsMap,
|
|
5
5
|
type ProvingJobProducer,
|
|
6
6
|
type ProvingJobResultsMap,
|
|
7
|
+
type ProvingJobStatus,
|
|
7
8
|
ProvingRequestType,
|
|
8
9
|
type PublicInputsAndRecursiveProof,
|
|
9
10
|
type ServerCircuitProver,
|
|
11
|
+
makeProvingJobId,
|
|
10
12
|
} from '@aztec/circuit-types';
|
|
11
13
|
import {
|
|
12
14
|
type AVM_PROOF_LENGTH_IN_FIELDS,
|
|
13
15
|
type AvmCircuitInputs,
|
|
14
16
|
type BaseParityInputs,
|
|
15
|
-
type KernelCircuitPublicInputs,
|
|
16
17
|
type NESTED_RECURSIVE_PROOF_LENGTH,
|
|
17
18
|
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
18
19
|
type ParityPublicInputs,
|
|
19
|
-
type PrivateKernelEmptyInputData,
|
|
20
20
|
type RECURSIVE_PROOF_LENGTH,
|
|
21
21
|
type RootParityInputs,
|
|
22
22
|
type TUBE_PROOF_LENGTH,
|
|
@@ -32,11 +32,13 @@ import {
|
|
|
32
32
|
type PublicBaseRollupInputs,
|
|
33
33
|
type RootRollupInputs,
|
|
34
34
|
type RootRollupPublicInputs,
|
|
35
|
+
type SingleTxBlockRootRollupInputs,
|
|
35
36
|
type TubeInputs,
|
|
36
37
|
} from '@aztec/circuits.js/rollup';
|
|
37
38
|
import { sha256 } from '@aztec/foundation/crypto';
|
|
38
39
|
import { createLogger } from '@aztec/foundation/log';
|
|
39
|
-
import {
|
|
40
|
+
import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
41
|
+
import { SerialQueue } from '@aztec/foundation/queue';
|
|
40
42
|
import { truncate } from '@aztec/foundation/string';
|
|
41
43
|
|
|
42
44
|
import { InlineProofStore, type ProofStore } from './proof_store.js';
|
|
@@ -44,7 +46,27 @@ import { InlineProofStore, type ProofStore } from './proof_store.js';
|
|
|
44
46
|
// 20 minutes, roughly the length of an Aztec epoch. If a proof isn't ready in this amount of time then we've failed to prove the whole epoch
|
|
45
47
|
const MAX_WAIT_MS = 1_200_000;
|
|
46
48
|
|
|
49
|
+
// Perform a snapshot sync every 30 seconds
|
|
50
|
+
const SNAPSHOT_SYNC_INTERVAL_MS = 30_000;
|
|
51
|
+
|
|
52
|
+
const MAX_CONCURRENT_JOB_SETTLED_REQUESTS = 10;
|
|
53
|
+
const SNAPSHOT_SYNC_CHECK_MAX_REQUEST_SIZE = 1000;
|
|
54
|
+
|
|
55
|
+
type ProvingJob = {
|
|
56
|
+
id: ProvingJobId;
|
|
57
|
+
type: ProvingRequestType;
|
|
58
|
+
promise: PromiseWithResolvers<any>;
|
|
59
|
+
abortFn?: () => Promise<void>;
|
|
60
|
+
signal?: AbortSignal;
|
|
61
|
+
};
|
|
62
|
+
|
|
47
63
|
export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
64
|
+
private jobs: Map<ProvingJobId, ProvingJob> = new Map();
|
|
65
|
+
private runningPromise?: RunningPromise;
|
|
66
|
+
private timeOfLastSnapshotSync = Date.now();
|
|
67
|
+
private queue: SerialQueue = new SerialQueue();
|
|
68
|
+
private jobsToRetrieve: Set<ProvingJobId> = new Set();
|
|
69
|
+
|
|
48
70
|
constructor(
|
|
49
71
|
private broker: ProvingJobProducer,
|
|
50
72
|
private proofStore: ProofStore = new InlineProofStore(),
|
|
@@ -53,77 +75,315 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
53
75
|
private log = createLogger('prover-client:broker-circuit-prover-facade'),
|
|
54
76
|
) {}
|
|
55
77
|
|
|
56
|
-
private
|
|
78
|
+
private enqueueJob<T extends ProvingRequestType>(
|
|
57
79
|
id: ProvingJobId,
|
|
58
80
|
type: T,
|
|
59
81
|
inputs: ProvingJobInputsMap[T],
|
|
60
82
|
epochNumber = 0,
|
|
61
83
|
signal?: AbortSignal,
|
|
62
84
|
): Promise<ProvingJobResultsMap[T]> {
|
|
85
|
+
if (!this.queue) {
|
|
86
|
+
throw new Error('BrokerCircuitProverFacade not started');
|
|
87
|
+
}
|
|
88
|
+
return this.queue!.put(() => this._enqueueJob(id, type, inputs, epochNumber, signal)).then(
|
|
89
|
+
({ enqueuedPromise }) => enqueuedPromise,
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
private async _enqueueJob<T extends ProvingRequestType>(
|
|
94
|
+
id: ProvingJobId,
|
|
95
|
+
type: T,
|
|
96
|
+
inputs: ProvingJobInputsMap[T],
|
|
97
|
+
epochNumber = 0,
|
|
98
|
+
signal?: AbortSignal,
|
|
99
|
+
): Promise<{ enqueuedPromise: Promise<ProvingJobResultsMap[T]> }> {
|
|
100
|
+
// Check if there is already a promise for this job
|
|
101
|
+
const existingPromise = this.jobs.get(id);
|
|
102
|
+
if (existingPromise) {
|
|
103
|
+
this.log.verbose(`Job already found in facade id=${id} type=${ProvingRequestType[type]}`, {
|
|
104
|
+
provingJobId: id,
|
|
105
|
+
provingJobType: ProvingRequestType[type],
|
|
106
|
+
epochNumber,
|
|
107
|
+
});
|
|
108
|
+
return { enqueuedPromise: existingPromise.promise.promise as Promise<ProvingJobResultsMap[T]> };
|
|
109
|
+
}
|
|
63
110
|
const inputsUri = await this.proofStore.saveProofInput(id, type, inputs);
|
|
64
|
-
await this.broker.enqueueProvingJob({
|
|
111
|
+
const jobStatus = await this.broker.enqueueProvingJob({
|
|
65
112
|
id,
|
|
66
113
|
type,
|
|
67
114
|
inputsUri,
|
|
68
115
|
epochNumber,
|
|
69
116
|
});
|
|
70
117
|
|
|
71
|
-
this
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
provingJobId: id,
|
|
75
|
-
provingJobType: ProvingRequestType[type],
|
|
76
|
-
epochNumber,
|
|
77
|
-
inputsUri: truncate(inputsUri),
|
|
78
|
-
},
|
|
79
|
-
);
|
|
80
|
-
|
|
81
|
-
// notify broker of cancelled job
|
|
118
|
+
// Create a promise for this job id, regardless of whether it was enqueued at the broker
|
|
119
|
+
// The running promise will monitor for the job to be completed and resolve it either way
|
|
120
|
+
const promise = promiseWithResolvers<ProvingJobResultsMap[T]>();
|
|
82
121
|
const abortFn = async () => {
|
|
83
122
|
signal?.removeEventListener('abort', abortFn);
|
|
84
123
|
await this.broker.cancelProvingJob(id);
|
|
85
124
|
};
|
|
125
|
+
const job: ProvingJob = {
|
|
126
|
+
id,
|
|
127
|
+
type,
|
|
128
|
+
promise,
|
|
129
|
+
abortFn,
|
|
130
|
+
signal,
|
|
131
|
+
};
|
|
132
|
+
this.jobs.set(id, job);
|
|
133
|
+
|
|
134
|
+
// If we are here then the job was successfully accepted by the broker
|
|
135
|
+
// the returned status is for before any action was performed
|
|
136
|
+
if (jobStatus.status === 'not-found') {
|
|
137
|
+
// Job added for the first time
|
|
138
|
+
// notify the broker if job is aborted
|
|
139
|
+
signal?.addEventListener('abort', abortFn);
|
|
140
|
+
|
|
141
|
+
this.log.verbose(
|
|
142
|
+
`Job enqueued with broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`,
|
|
143
|
+
{
|
|
144
|
+
provingJobId: id,
|
|
145
|
+
provingJobType: ProvingRequestType[type],
|
|
146
|
+
epochNumber,
|
|
147
|
+
inputsUri: truncate(inputsUri),
|
|
148
|
+
numOutstandingJobs: this.jobs.size,
|
|
149
|
+
},
|
|
150
|
+
);
|
|
151
|
+
} else if (jobStatus.status === 'fulfilled' || jobStatus.status === 'rejected') {
|
|
152
|
+
// Job was already completed by the broker
|
|
153
|
+
// No need to notify the broker on aborted job
|
|
154
|
+
job.abortFn = undefined;
|
|
155
|
+
this.log.verbose(
|
|
156
|
+
`Job already completed when sent to broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`,
|
|
157
|
+
{
|
|
158
|
+
provingJobId: id,
|
|
159
|
+
provingJobType: ProvingRequestType[type],
|
|
160
|
+
epochNumber,
|
|
161
|
+
inputsUri: truncate(inputsUri),
|
|
162
|
+
},
|
|
163
|
+
);
|
|
86
164
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
//
|
|
91
|
-
//
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
165
|
+
// Job was not enqueued. It must be completed already, add to our set of already completed jobs
|
|
166
|
+
this.jobsToRetrieve.add(id);
|
|
167
|
+
} else {
|
|
168
|
+
// Job was previously sent to the broker but is not completed
|
|
169
|
+
// notify the broker if job is aborted
|
|
170
|
+
signal?.addEventListener('abort', abortFn);
|
|
171
|
+
this.log.verbose(
|
|
172
|
+
`Job already in queue or in progress when sent to broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`,
|
|
173
|
+
{
|
|
174
|
+
provingJobId: id,
|
|
175
|
+
provingJobType: ProvingRequestType[type],
|
|
176
|
+
epochNumber,
|
|
177
|
+
inputsUri: truncate(inputsUri),
|
|
100
178
|
},
|
|
101
|
-
`Proving job=${id} type=${ProvingRequestType[type]}`,
|
|
102
|
-
this.waitTimeoutMs / 1000,
|
|
103
|
-
this.pollIntervalMs / 1000,
|
|
104
179
|
);
|
|
180
|
+
}
|
|
181
|
+
const typedPromise = promise.promise as Promise<ProvingJobResultsMap[T]>;
|
|
182
|
+
return { enqueuedPromise: typedPromise };
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
public start() {
|
|
186
|
+
if (this.runningPromise) {
|
|
187
|
+
throw new Error('BrokerCircuitProverFacade already started');
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
this.log.verbose('Starting BrokerCircuitProverFacade');
|
|
191
|
+
|
|
192
|
+
this.runningPromise = new RunningPromise(() => this.monitorForCompletedJobs(), this.log, this.pollIntervalMs);
|
|
193
|
+
this.runningPromise.start();
|
|
194
|
+
|
|
195
|
+
this.queue = new SerialQueue();
|
|
196
|
+
this.queue.start();
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
public async stop(): Promise<void> {
|
|
200
|
+
if (!this.runningPromise) {
|
|
201
|
+
throw new Error('BrokerCircuitProverFacade not started');
|
|
202
|
+
}
|
|
203
|
+
this.log.verbose('Stopping BrokerCircuitProverFacade');
|
|
204
|
+
await this.runningPromise.stop();
|
|
205
|
+
|
|
206
|
+
if (this.queue) {
|
|
207
|
+
await this.queue.cancel();
|
|
208
|
+
await this.queue.end();
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Reject any outstanding promises as stopped
|
|
212
|
+
for (const [_, v] of this.jobs) {
|
|
213
|
+
v.promise.reject(new Error('Broker facade stopped'));
|
|
214
|
+
}
|
|
215
|
+
this.jobs.clear();
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
private async updateCompletedJobs() {
|
|
219
|
+
// Here we check for completed jobs. If everything works well (there are no service restarts etc) then all we need to do
|
|
220
|
+
// to maintain correct job state is to check for incrementally completed jobs. i.e. call getCompletedJobs with an empty array
|
|
221
|
+
// However, if there are any problems then we may lose sync with the broker's actual set of completed jobs.
|
|
222
|
+
// In this case we need to perform a full snapshot sync. This involves sending all of our outstanding job Ids to the broker
|
|
223
|
+
// and have the broker report on whether they are completed or not.
|
|
224
|
+
// We perform an incremental sync on every call of this function with a full snapshot sync periodically.
|
|
225
|
+
// This should keep us in sync without over-burdening the broker with snapshot sync requests
|
|
226
|
+
|
|
227
|
+
const getAllCompletedJobs = async (ids: ProvingJobId[]) => {
|
|
228
|
+
// In this function we take whatever set of snapshot ids and we ask the broker for completed job notifications
|
|
229
|
+
// We collect all returned notifications and return them
|
|
230
|
+
const allCompleted = new Set<ProvingJobId>();
|
|
231
|
+
try {
|
|
232
|
+
let numRequests = 0;
|
|
233
|
+
while (ids.length > 0) {
|
|
234
|
+
const slice = ids.splice(0, SNAPSHOT_SYNC_CHECK_MAX_REQUEST_SIZE);
|
|
235
|
+
const completed = await this.broker.getCompletedJobs(slice);
|
|
236
|
+
completed.forEach(id => allCompleted.add(id));
|
|
237
|
+
++numRequests;
|
|
238
|
+
}
|
|
239
|
+
if (numRequests === 0) {
|
|
240
|
+
const final = await this.broker.getCompletedJobs([]);
|
|
241
|
+
final.forEach(id => allCompleted.add(id));
|
|
242
|
+
}
|
|
243
|
+
} catch (err) {
|
|
244
|
+
this.log.error(`Error thrown when requesting completed job notifications from the broker`, err);
|
|
245
|
+
}
|
|
246
|
+
return allCompleted;
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
const snapshotSyncIds = [];
|
|
250
|
+
const currentTime = Date.now();
|
|
251
|
+
const secondsSinceLastSnapshotSync = currentTime - this.timeOfLastSnapshotSync;
|
|
252
|
+
if (secondsSinceLastSnapshotSync > SNAPSHOT_SYNC_INTERVAL_MS) {
|
|
253
|
+
this.timeOfLastSnapshotSync = currentTime;
|
|
254
|
+
snapshotSyncIds.push(...this.jobs.keys());
|
|
255
|
+
this.log.trace(`Performing full snapshot sync of completed jobs with ${snapshotSyncIds.length} job(s)`);
|
|
256
|
+
} else {
|
|
257
|
+
this.log.trace(`Performing incremental sync of completed jobs`);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Now request the notifications from the broker
|
|
261
|
+
const snapshotIdsLength = snapshotSyncIds.length;
|
|
262
|
+
const completedJobs = await getAllCompletedJobs(snapshotSyncIds);
|
|
263
|
+
|
|
264
|
+
// We now have an additional set of completed job notifications to add to our cached set giving us the full set of jobs that we have been told are ready
|
|
265
|
+
// We filter this list to what we actually need, in case for any reason it is different and store in our cache
|
|
266
|
+
const allJobsReady = [...completedJobs, ...this.jobsToRetrieve];
|
|
267
|
+
this.jobsToRetrieve = new Set(allJobsReady.filter(id => this.jobs.has(id)));
|
|
268
|
+
|
|
269
|
+
if (completedJobs.size > 0) {
|
|
270
|
+
this.log.verbose(
|
|
271
|
+
`Check for job completion notifications returned ${completedJobs.size} job(s), snapshot ids length: ${snapshotIdsLength}, num outstanding jobs: ${this.jobs.size}, total jobs ready: ${this.jobsToRetrieve.size}`,
|
|
272
|
+
);
|
|
273
|
+
} else {
|
|
274
|
+
this.log.trace(
|
|
275
|
+
`Check for job completion notifications returned 0 jobs, snapshot ids length: ${snapshotIdsLength}, num outstanding jobs: ${this.jobs.size}, total jobs ready: ${this.jobsToRetrieve.size}`,
|
|
276
|
+
);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
105
279
|
|
|
280
|
+
private async retrieveJobsThatShouldBeReady() {
|
|
281
|
+
const convertJobResult = async <T extends ProvingRequestType>(
|
|
282
|
+
result: ProvingJobStatus,
|
|
283
|
+
jobType: ProvingRequestType,
|
|
284
|
+
): Promise<{
|
|
285
|
+
success: boolean;
|
|
286
|
+
reason?: string;
|
|
287
|
+
result?: ProvingJobResultsMap[T];
|
|
288
|
+
}> => {
|
|
106
289
|
if (result.status === 'fulfilled') {
|
|
107
290
|
const output = await this.proofStore.getProofOutput(result.value);
|
|
108
|
-
if (output.type ===
|
|
109
|
-
return output.result as ProvingJobResultsMap[T];
|
|
291
|
+
if (output.type === jobType) {
|
|
292
|
+
return { result: output.result as ProvingJobResultsMap[T], success: true };
|
|
110
293
|
} else {
|
|
111
|
-
|
|
294
|
+
return { success: false, reason: `Unexpected proof type: ${output.type}. Expected: ${jobType}` };
|
|
112
295
|
}
|
|
296
|
+
} else if (result.status === 'rejected') {
|
|
297
|
+
return { success: false, reason: result.reason };
|
|
113
298
|
} else {
|
|
114
|
-
throw new Error(result.
|
|
299
|
+
throw new Error(`Unexpected proving job status ${result.status}`);
|
|
115
300
|
}
|
|
116
|
-
}
|
|
117
|
-
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
const processJob = async (job: ProvingJob) => {
|
|
304
|
+
// First retrieve the settled job from the broker
|
|
305
|
+
this.log.debug(`Received notification of completed job id=${job.id} type=${ProvingRequestType[job.type]}`);
|
|
306
|
+
let settledResult;
|
|
307
|
+
try {
|
|
308
|
+
settledResult = await this.broker.getProvingJobStatus(job.id);
|
|
309
|
+
} catch (err) {
|
|
310
|
+
// If an error occurs retrieving the job result then just log it and move on.
|
|
311
|
+
// We will try again on the next iteration
|
|
312
|
+
this.log.error(
|
|
313
|
+
`Error retrieving job result from broker job id=${job.id} type=${ProvingRequestType[job.type]}`,
|
|
314
|
+
err,
|
|
315
|
+
);
|
|
316
|
+
return false;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Then convert the result and resolve/reject the promise
|
|
320
|
+
let result;
|
|
321
|
+
try {
|
|
322
|
+
result = await convertJobResult(settledResult, job.type);
|
|
323
|
+
} catch (err) {
|
|
324
|
+
// If an error occurs retrieving the job result then just log it and move on.
|
|
325
|
+
// We will try again on the next iteration
|
|
326
|
+
this.log.error(`Error processing job result job id=${job.id} type=${ProvingRequestType[job.type]}`, err);
|
|
327
|
+
return false;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
if (result.success) {
|
|
331
|
+
this.log.verbose(`Resolved proving job id=${job.id} type=${ProvingRequestType[job.type]}`);
|
|
332
|
+
job.promise.resolve(result.result);
|
|
333
|
+
} else {
|
|
334
|
+
this.log.error(
|
|
335
|
+
`Resolving proving job with error id=${job.id} type=${ProvingRequestType[job.type]}`,
|
|
336
|
+
result.reason,
|
|
337
|
+
);
|
|
338
|
+
job.promise.reject(new Error(result.reason));
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
if (job.abortFn && job.signal) {
|
|
342
|
+
job.signal?.removeEventListener('abort', job.abortFn);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Job is now processed removed from our cache
|
|
346
|
+
this.jobs.delete(job.id);
|
|
347
|
+
this.jobsToRetrieve.delete(job.id);
|
|
348
|
+
return true;
|
|
349
|
+
};
|
|
350
|
+
|
|
351
|
+
const toBeRetrieved = Array.from(this.jobsToRetrieve.values())
|
|
352
|
+
.map(id => this.jobs.get(id)!)
|
|
353
|
+
.filter(x => x !== undefined);
|
|
354
|
+
const totalJobsToRetrieve = toBeRetrieved.length;
|
|
355
|
+
let totalJobsRetrieved = 0;
|
|
356
|
+
while (toBeRetrieved.length > 0) {
|
|
357
|
+
const slice = toBeRetrieved.splice(0, MAX_CONCURRENT_JOB_SETTLED_REQUESTS);
|
|
358
|
+
const results = await Promise.all(slice.map(job => processJob(job!)));
|
|
359
|
+
totalJobsRetrieved += results.filter(x => x).length;
|
|
360
|
+
}
|
|
361
|
+
if (totalJobsToRetrieve > 0) {
|
|
362
|
+
this.log.verbose(
|
|
363
|
+
`Successfully retrieved ${totalJobsRetrieved} of ${totalJobsToRetrieve} jobs that should be ready, total ready jobs is now: ${this.jobsToRetrieve.size}`,
|
|
364
|
+
);
|
|
118
365
|
}
|
|
119
366
|
}
|
|
120
367
|
|
|
368
|
+
private async monitorForCompletedJobs() {
|
|
369
|
+
// Monitoring for completed jobs involves 2 stages.
|
|
370
|
+
|
|
371
|
+
// 1. Update our list of completed jobs.
|
|
372
|
+
// We poll the broker for any new job completion notifications and after filtering/deduplication add them to our cached
|
|
373
|
+
// list of jobs that we have been told are ready.
|
|
374
|
+
await this.updateCompletedJobs();
|
|
375
|
+
|
|
376
|
+
// 2. Retrieve the jobs that should be ready.
|
|
377
|
+
// We have a list of jobs that we have been told are ready, so we go ahead and ask for their results
|
|
378
|
+
await this.retrieveJobsThatShouldBeReady();
|
|
379
|
+
}
|
|
380
|
+
|
|
121
381
|
getAvmProof(
|
|
122
382
|
inputs: AvmCircuitInputs,
|
|
123
383
|
signal?: AbortSignal,
|
|
124
384
|
epochNumber?: number,
|
|
125
385
|
): Promise<ProofAndVerificationKey<typeof AVM_PROOF_LENGTH_IN_FIELDS>> {
|
|
126
|
-
return this.
|
|
386
|
+
return this.enqueueJob(
|
|
127
387
|
this.generateId(ProvingRequestType.PUBLIC_VM, inputs, epochNumber),
|
|
128
388
|
ProvingRequestType.PUBLIC_VM,
|
|
129
389
|
inputs,
|
|
@@ -137,7 +397,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
137
397
|
signal?: AbortSignal,
|
|
138
398
|
epochNumber?: number,
|
|
139
399
|
): Promise<PublicInputsAndRecursiveProof<ParityPublicInputs, typeof RECURSIVE_PROOF_LENGTH>> {
|
|
140
|
-
return this.
|
|
400
|
+
return this.enqueueJob(
|
|
141
401
|
this.generateId(ProvingRequestType.BASE_PARITY, inputs, epochNumber),
|
|
142
402
|
ProvingRequestType.BASE_PARITY,
|
|
143
403
|
inputs,
|
|
@@ -153,7 +413,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
153
413
|
): Promise<
|
|
154
414
|
PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
155
415
|
> {
|
|
156
|
-
return this.
|
|
416
|
+
return this.enqueueJob(
|
|
157
417
|
this.generateId(ProvingRequestType.BLOCK_MERGE_ROLLUP, input, epochNumber),
|
|
158
418
|
ProvingRequestType.BLOCK_MERGE_ROLLUP,
|
|
159
419
|
input,
|
|
@@ -169,7 +429,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
169
429
|
): Promise<
|
|
170
430
|
PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
171
431
|
> {
|
|
172
|
-
return this.
|
|
432
|
+
return this.enqueueJob(
|
|
173
433
|
this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input, epochNumber),
|
|
174
434
|
ProvingRequestType.BLOCK_ROOT_ROLLUP,
|
|
175
435
|
input,
|
|
@@ -178,33 +438,33 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
178
438
|
);
|
|
179
439
|
}
|
|
180
440
|
|
|
181
|
-
|
|
182
|
-
input:
|
|
441
|
+
getSingleTxBlockRootRollupProof(
|
|
442
|
+
input: SingleTxBlockRootRollupInputs,
|
|
183
443
|
signal?: AbortSignal,
|
|
184
444
|
epochNumber?: number,
|
|
185
445
|
): Promise<
|
|
186
446
|
PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
187
447
|
> {
|
|
188
|
-
return this.
|
|
189
|
-
this.generateId(ProvingRequestType.
|
|
190
|
-
ProvingRequestType.
|
|
448
|
+
return this.enqueueJob(
|
|
449
|
+
this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input, epochNumber),
|
|
450
|
+
ProvingRequestType.SINGLE_TX_BLOCK_ROOT_ROLLUP,
|
|
191
451
|
input,
|
|
192
452
|
epochNumber,
|
|
193
453
|
signal,
|
|
194
454
|
);
|
|
195
455
|
}
|
|
196
456
|
|
|
197
|
-
|
|
198
|
-
|
|
457
|
+
getEmptyBlockRootRollupProof(
|
|
458
|
+
input: EmptyBlockRootRollupInputs,
|
|
199
459
|
signal?: AbortSignal,
|
|
200
460
|
epochNumber?: number,
|
|
201
461
|
): Promise<
|
|
202
|
-
PublicInputsAndRecursiveProof<
|
|
462
|
+
PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
203
463
|
> {
|
|
204
|
-
return this.
|
|
205
|
-
this.generateId(ProvingRequestType.
|
|
206
|
-
ProvingRequestType.
|
|
207
|
-
|
|
464
|
+
return this.enqueueJob(
|
|
465
|
+
this.generateId(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, input, epochNumber),
|
|
466
|
+
ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP,
|
|
467
|
+
input,
|
|
208
468
|
epochNumber,
|
|
209
469
|
signal,
|
|
210
470
|
);
|
|
@@ -217,7 +477,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
217
477
|
): Promise<
|
|
218
478
|
PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
219
479
|
> {
|
|
220
|
-
return this.
|
|
480
|
+
return this.enqueueJob(
|
|
221
481
|
this.generateId(ProvingRequestType.MERGE_ROLLUP, input, epochNumber),
|
|
222
482
|
ProvingRequestType.MERGE_ROLLUP,
|
|
223
483
|
input,
|
|
@@ -232,7 +492,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
232
492
|
): Promise<
|
|
233
493
|
PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
234
494
|
> {
|
|
235
|
-
return this.
|
|
495
|
+
return this.enqueueJob(
|
|
236
496
|
this.generateId(ProvingRequestType.PRIVATE_BASE_ROLLUP, baseRollupInput, epochNumber),
|
|
237
497
|
ProvingRequestType.PRIVATE_BASE_ROLLUP,
|
|
238
498
|
baseRollupInput,
|
|
@@ -248,7 +508,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
248
508
|
): Promise<
|
|
249
509
|
PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
250
510
|
> {
|
|
251
|
-
return this.
|
|
511
|
+
return this.enqueueJob(
|
|
252
512
|
this.generateId(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs, epochNumber),
|
|
253
513
|
ProvingRequestType.PUBLIC_BASE_ROLLUP,
|
|
254
514
|
inputs,
|
|
@@ -262,7 +522,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
262
522
|
signal?: AbortSignal,
|
|
263
523
|
epochNumber?: number,
|
|
264
524
|
): Promise<PublicInputsAndRecursiveProof<ParityPublicInputs, typeof NESTED_RECURSIVE_PROOF_LENGTH>> {
|
|
265
|
-
return this.
|
|
525
|
+
return this.enqueueJob(
|
|
266
526
|
this.generateId(ProvingRequestType.ROOT_PARITY, inputs, epochNumber),
|
|
267
527
|
ProvingRequestType.ROOT_PARITY,
|
|
268
528
|
inputs,
|
|
@@ -276,7 +536,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
276
536
|
signal?: AbortSignal,
|
|
277
537
|
epochNumber?: number,
|
|
278
538
|
): Promise<PublicInputsAndRecursiveProof<RootRollupPublicInputs, typeof RECURSIVE_PROOF_LENGTH>> {
|
|
279
|
-
return this.
|
|
539
|
+
return this.enqueueJob(
|
|
280
540
|
this.generateId(ProvingRequestType.ROOT_ROLLUP, input, epochNumber),
|
|
281
541
|
ProvingRequestType.ROOT_ROLLUP,
|
|
282
542
|
input,
|
|
@@ -290,7 +550,7 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
290
550
|
signal?: AbortSignal,
|
|
291
551
|
epochNumber?: number,
|
|
292
552
|
): Promise<ProofAndVerificationKey<typeof TUBE_PROOF_LENGTH>> {
|
|
293
|
-
return this.
|
|
553
|
+
return this.enqueueJob(
|
|
294
554
|
this.generateId(ProvingRequestType.TUBE_PROOF, tubeInput, epochNumber),
|
|
295
555
|
ProvingRequestType.TUBE_PROOF,
|
|
296
556
|
tubeInput,
|
|
@@ -301,6 +561,6 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
301
561
|
|
|
302
562
|
private generateId(type: ProvingRequestType, inputs: { toBuffer(): Buffer }, epochNumber = 0) {
|
|
303
563
|
const inputsHash = sha256(inputs.toBuffer());
|
|
304
|
-
return
|
|
564
|
+
return makeProvingJobId(epochNumber, type, inputsHash.toString('hex'));
|
|
305
565
|
}
|
|
306
566
|
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { ProvingRequestType } from '@aztec/circuit-types';
|
|
2
|
+
import { type ConfigMappingsType, booleanConfigHelper, numberConfigHelper } from '@aztec/foundation/config';
|
|
3
|
+
import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
|
|
4
|
+
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
|
|
7
|
+
export const ProverBrokerConfig = z.object({
|
|
8
|
+
/** If starting a prover broker locally, the max number of retries per proving job */
|
|
9
|
+
proverBrokerJobMaxRetries: z.number(),
|
|
10
|
+
/** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */
|
|
11
|
+
proverBrokerJobTimeoutMs: z.number(),
|
|
12
|
+
/** If starting a prover broker locally, the interval the broker checks for timed out jobs */
|
|
13
|
+
proverBrokerPollIntervalMs: z.number(),
|
|
14
|
+
/** If starting a prover broker locally, the directory to store broker data */
|
|
15
|
+
dataDirectory: z.string().optional(),
|
|
16
|
+
/** The size of the data store map */
|
|
17
|
+
dataStoreMapSizeKB: z.number(),
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
export type ProverBrokerConfig = z.infer<typeof ProverBrokerConfig> &
|
|
21
|
+
Pick<DataStoreConfig, 'dataStoreMapSizeKB' | 'dataDirectory'>;
|
|
22
|
+
|
|
23
|
+
export const proverBrokerConfigMappings: ConfigMappingsType<ProverBrokerConfig> = {
|
|
24
|
+
proverBrokerJobTimeoutMs: {
|
|
25
|
+
env: 'PROVER_BROKER_JOB_TIMEOUT_MS',
|
|
26
|
+
description: 'Jobs are retried if not kept alive for this long',
|
|
27
|
+
...numberConfigHelper(30_000),
|
|
28
|
+
},
|
|
29
|
+
proverBrokerPollIntervalMs: {
|
|
30
|
+
env: 'PROVER_BROKER_POLL_INTERVAL_MS',
|
|
31
|
+
description: 'The interval to check job health status',
|
|
32
|
+
...numberConfigHelper(1_000),
|
|
33
|
+
},
|
|
34
|
+
proverBrokerJobMaxRetries: {
|
|
35
|
+
env: 'PROVER_BROKER_JOB_MAX_RETRIES',
|
|
36
|
+
description: 'If starting a prover broker locally, the max number of retries per proving job',
|
|
37
|
+
...numberConfigHelper(3),
|
|
38
|
+
},
|
|
39
|
+
...dataConfigMappings,
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export const ProverAgentConfig = z.object({
|
|
43
|
+
/** The number of prover agents to start */
|
|
44
|
+
proverAgentCount: z.number(),
|
|
45
|
+
/** The types of proofs the prover agent can generate */
|
|
46
|
+
proverAgentProofTypes: z.array(z.nativeEnum(ProvingRequestType)),
|
|
47
|
+
/** How often the prover agents poll for jobs */
|
|
48
|
+
proverAgentPollIntervalMs: z.number(),
|
|
49
|
+
/** The URL where this agent takes jobs from */
|
|
50
|
+
proverBrokerUrl: z.string().optional(),
|
|
51
|
+
/** Whether to construct real proofs */
|
|
52
|
+
realProofs: z.boolean(),
|
|
53
|
+
/** Artificial delay to introduce to all operations to the test prover. */
|
|
54
|
+
proverTestDelayMs: z.number(),
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
export type ProverAgentConfig = z.infer<typeof ProverAgentConfig>;
|
|
58
|
+
|
|
59
|
+
export const proverAgentConfigMappings: ConfigMappingsType<ProverAgentConfig> = {
|
|
60
|
+
proverAgentCount: {
|
|
61
|
+
env: 'PROVER_AGENT_COUNT',
|
|
62
|
+
description: 'Whether this prover has a local prover agent',
|
|
63
|
+
...numberConfigHelper(1),
|
|
64
|
+
},
|
|
65
|
+
proverAgentPollIntervalMs: {
|
|
66
|
+
env: 'PROVER_AGENT_POLL_INTERVAL_MS',
|
|
67
|
+
description: 'The interval agents poll for jobs at',
|
|
68
|
+
...numberConfigHelper(100),
|
|
69
|
+
},
|
|
70
|
+
proverAgentProofTypes: {
|
|
71
|
+
env: 'PROVER_AGENT_PROOF_TYPES',
|
|
72
|
+
description: 'The types of proofs the prover agent can generate',
|
|
73
|
+
parseEnv: (val: string) =>
|
|
74
|
+
val
|
|
75
|
+
.split(',')
|
|
76
|
+
.map(v => ProvingRequestType[v as any])
|
|
77
|
+
.filter(v => typeof v === 'number'),
|
|
78
|
+
},
|
|
79
|
+
proverBrokerUrl: {
|
|
80
|
+
env: 'PROVER_BROKER_HOST',
|
|
81
|
+
description: 'The URL where this agent takes jobs from',
|
|
82
|
+
},
|
|
83
|
+
realProofs: {
|
|
84
|
+
env: 'PROVER_REAL_PROOFS',
|
|
85
|
+
description: 'Whether to construct real proofs',
|
|
86
|
+
...booleanConfigHelper(false),
|
|
87
|
+
},
|
|
88
|
+
proverTestDelayMs: {
|
|
89
|
+
env: 'PROVER_TEST_DELAY_MS',
|
|
90
|
+
description: 'Artificial delay to introduce to all operations to the test prover.',
|
|
91
|
+
...numberConfigHelper(0),
|
|
92
|
+
},
|
|
93
|
+
};
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import { type ProverBrokerConfig } from '@aztec/circuit-types';
|
|
2
|
-
import { AztecLmdbStore } from '@aztec/kv-store/lmdb';
|
|
3
1
|
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
4
2
|
|
|
3
|
+
import { type ProverBrokerConfig } from './config.js';
|
|
5
4
|
import { ProvingBroker } from './proving_broker.js';
|
|
6
5
|
import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js';
|
|
7
6
|
import { KVBrokerDatabase } from './proving_broker_database/persisted.js';
|
|
@@ -10,9 +9,7 @@ export async function createAndStartProvingBroker(
|
|
|
10
9
|
config: ProverBrokerConfig,
|
|
11
10
|
client: TelemetryClient,
|
|
12
11
|
): Promise<ProvingBroker> {
|
|
13
|
-
const database = config.
|
|
14
|
-
? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory), client)
|
|
15
|
-
: new InMemoryBrokerDatabase();
|
|
12
|
+
const database = config.dataDirectory ? await KVBrokerDatabase.new(config, client) : new InMemoryBrokerDatabase();
|
|
16
13
|
|
|
17
14
|
const broker = new ProvingBroker(database, client, {
|
|
18
15
|
jobTimeoutMs: config.proverBrokerJobTimeoutMs,
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { type ProofUri, type ProvingJobId, ProvingRequestType, makeProvingJobId } from '@aztec/circuit-types';
|
|
2
|
+
import { randomBytes } from '@aztec/foundation/crypto';
|
|
3
|
+
|
|
4
|
+
export function makeRandomProvingJobId(epochNumber?: number): ProvingJobId {
|
|
5
|
+
return makeProvingJobId(epochNumber ?? 1, ProvingRequestType.BASE_PARITY, randomBytes(8).toString('hex'));
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function makeInputsUri(): ProofUri {
|
|
9
|
+
return randomBytes(8).toString('hex') as ProofUri;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function makeOutputsUri(): ProofUri {
|
|
13
|
+
return randomBytes(8).toString('hex') as ProofUri;
|
|
14
|
+
}
|