@aztec/prover-client 0.0.0-test.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dest/bin/get-proof-inputs.d.ts +2 -0
- package/dest/bin/get-proof-inputs.d.ts.map +1 -0
- package/dest/bin/get-proof-inputs.js +51 -0
- package/dest/block_builder/index.d.ts +6 -0
- package/dest/block_builder/index.d.ts.map +1 -0
- package/dest/block_builder/index.js +1 -0
- package/dest/block_builder/light.d.ts +33 -0
- package/dest/block_builder/light.d.ts.map +1 -0
- package/dest/block_builder/light.js +82 -0
- package/dest/config.d.ts +17 -0
- package/dest/config.d.ts.map +1 -0
- package/dest/config.js +39 -0
- package/dest/index.d.ts +4 -0
- package/dest/index.d.ts.map +1 -0
- package/dest/index.js +2 -0
- package/dest/mocks/fixtures.d.ts +20 -0
- package/dest/mocks/fixtures.d.ts.map +1 -0
- package/dest/mocks/fixtures.js +77 -0
- package/dest/mocks/test_context.d.ts +55 -0
- package/dest/mocks/test_context.d.ts.map +1 -0
- package/dest/mocks/test_context.js +193 -0
- package/dest/orchestrator/block-building-helpers.d.ts +55 -0
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -0
- package/dest/orchestrator/block-building-helpers.js +285 -0
- package/dest/orchestrator/block-proving-state.d.ts +76 -0
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/block-proving-state.js +269 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +60 -0
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/epoch-proving-state.js +163 -0
- package/dest/orchestrator/index.d.ts +2 -0
- package/dest/orchestrator/index.d.ts.map +1 -0
- package/dest/orchestrator/index.js +1 -0
- package/dest/orchestrator/orchestrator.d.ts +110 -0
- package/dest/orchestrator/orchestrator.d.ts.map +1 -0
- package/dest/orchestrator/orchestrator.js +690 -0
- package/dest/orchestrator/orchestrator_metrics.d.ts +8 -0
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -0
- package/dest/orchestrator/orchestrator_metrics.js +17 -0
- package/dest/orchestrator/tx-proving-state.d.ts +34 -0
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/tx-proving-state.js +94 -0
- package/dest/prover-client/factory.d.ts +6 -0
- package/dest/prover-client/factory.d.ts.map +1 -0
- package/dest/prover-client/factory.js +5 -0
- package/dest/prover-client/index.d.ts +3 -0
- package/dest/prover-client/index.d.ts.map +1 -0
- package/dest/prover-client/index.js +2 -0
- package/dest/prover-client/prover-client.d.ts +42 -0
- package/dest/prover-client/prover-client.d.ts.map +1 -0
- package/dest/prover-client/prover-client.js +110 -0
- package/dest/prover-client/server-epoch-prover.d.ts +28 -0
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -0
- package/dest/prover-client/server-epoch-prover.js +40 -0
- package/dest/proving_broker/broker_prover_facade.d.ts +46 -0
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -0
- package/dest/proving_broker/broker_prover_facade.js +344 -0
- package/dest/proving_broker/config.d.ts +83 -0
- package/dest/proving_broker/config.d.ts.map +1 -0
- package/dest/proving_broker/config.js +104 -0
- package/dest/proving_broker/factory.d.ts +5 -0
- package/dest/proving_broker/factory.d.ts.map +1 -0
- package/dest/proving_broker/factory.js +9 -0
- package/dest/proving_broker/fixtures.d.ts +5 -0
- package/dest/proving_broker/fixtures.d.ts.map +1 -0
- package/dest/proving_broker/fixtures.js +12 -0
- package/dest/proving_broker/index.d.ts +10 -0
- package/dest/proving_broker/index.d.ts.map +1 -0
- package/dest/proving_broker/index.js +9 -0
- package/dest/proving_broker/proof_store/factory.d.ts +6 -0
- package/dest/proving_broker/proof_store/factory.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/factory.js +36 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +14 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.js +51 -0
- package/dest/proving_broker/proof_store/index.d.ts +4 -0
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/index.js +3 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +15 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.js +41 -0
- package/dest/proving_broker/proof_store/proof_store.d.ts +36 -0
- package/dest/proving_broker/proof_store/proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/proof_store.js +3 -0
- package/dest/proving_broker/proving_agent.d.ts +46 -0
- package/dest/proving_broker/proving_agent.d.ts.map +1 -0
- package/dest/proving_broker/proving_agent.js +134 -0
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +8 -0
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -0
- package/dest/proving_broker/proving_agent_instrumentation.js +16 -0
- package/dest/proving_broker/proving_broker.d.ts +64 -0
- package/dest/proving_broker/proving_broker.d.ts.map +1 -0
- package/dest/proving_broker/proving_broker.js +570 -0
- package/dest/proving_broker/proving_broker_database/memory.d.ts +16 -0
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -0
- package/dest/proving_broker/proving_broker_database/memory.js +54 -0
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +25 -0
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -0
- package/dest/proving_broker/proving_broker_database/persisted.js +182 -0
- package/dest/proving_broker/proving_broker_database.d.ts +39 -0
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -0
- package/dest/proving_broker/proving_broker_database.js +3 -0
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +29 -0
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -0
- package/dest/proving_broker/proving_broker_instrumentation.js +110 -0
- package/dest/proving_broker/proving_job_controller.d.ts +33 -0
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -0
- package/dest/proving_broker/proving_job_controller.js +166 -0
- package/dest/proving_broker/rpc.d.ts +27 -0
- package/dest/proving_broker/rpc.d.ts.map +1 -0
- package/dest/proving_broker/rpc.js +66 -0
- package/dest/test/mock_prover.d.ts +35 -0
- package/dest/test/mock_prover.d.ts.map +1 -0
- package/dest/test/mock_prover.js +82 -0
- package/package.json +112 -0
- package/src/bin/get-proof-inputs.ts +59 -0
- package/src/block_builder/index.ts +6 -0
- package/src/block_builder/light.ts +101 -0
- package/src/config.ts +55 -0
- package/src/index.ts +4 -0
- package/src/mocks/fixtures.ts +117 -0
- package/src/mocks/test_context.ts +257 -0
- package/src/orchestrator/block-building-helpers.ts +553 -0
- package/src/orchestrator/block-proving-state.ts +379 -0
- package/src/orchestrator/epoch-proving-state.ts +252 -0
- package/src/orchestrator/index.ts +1 -0
- package/src/orchestrator/orchestrator.ts +971 -0
- package/src/orchestrator/orchestrator_metrics.ts +22 -0
- package/src/orchestrator/tx-proving-state.ts +139 -0
- package/src/prover-client/factory.ts +14 -0
- package/src/prover-client/index.ts +2 -0
- package/src/prover-client/prover-client.ts +162 -0
- package/src/prover-client/server-epoch-prover.ts +51 -0
- package/src/proving_broker/broker_prover_facade.ts +585 -0
- package/src/proving_broker/config.ts +138 -0
- package/src/proving_broker/factory.ts +18 -0
- package/src/proving_broker/fixtures.ts +15 -0
- package/src/proving_broker/index.ts +9 -0
- package/src/proving_broker/proof_store/factory.ts +42 -0
- package/src/proving_broker/proof_store/gcs_proof_store.ts +72 -0
- package/src/proving_broker/proof_store/index.ts +3 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +63 -0
- package/src/proving_broker/proof_store/proof_store.ts +54 -0
- package/src/proving_broker/proving_agent.ts +181 -0
- package/src/proving_broker/proving_agent_instrumentation.ts +21 -0
- package/src/proving_broker/proving_broker.ts +687 -0
- package/src/proving_broker/proving_broker_database/memory.ts +63 -0
- package/src/proving_broker/proving_broker_database/persisted.ts +218 -0
- package/src/proving_broker/proving_broker_database.ts +44 -0
- package/src/proving_broker/proving_broker_instrumentation.ts +145 -0
- package/src/proving_broker/proving_job_controller.ts +194 -0
- package/src/proving_broker/rpc.ts +95 -0
- package/src/test/mock_prover.ts +253 -0
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
import { sha256 } from '@aztec/foundation/crypto';
|
|
2
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
3
|
+
import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
4
|
+
import { truncate } from '@aztec/foundation/string';
|
|
5
|
+
import { makeProvingJobId } from '@aztec/stdlib/interfaces/server';
|
|
6
|
+
import { ProvingRequestType } from '@aztec/stdlib/proofs';
|
|
7
|
+
import { InlineProofStore } from './proof_store/index.js';
|
|
8
|
+
// Perform a snapshot sync every 30 seconds
|
|
9
|
+
const SNAPSHOT_SYNC_INTERVAL_MS = 30_000;
|
|
10
|
+
const MAX_CONCURRENT_JOB_SETTLED_REQUESTS = 10;
|
|
11
|
+
const SNAPSHOT_SYNC_CHECK_MAX_REQUEST_SIZE = 1000;
|
|
12
|
+
export class BrokerCircuitProverFacade {
|
|
13
|
+
broker;
|
|
14
|
+
proofStore;
|
|
15
|
+
failedProofStore;
|
|
16
|
+
pollIntervalMs;
|
|
17
|
+
log;
|
|
18
|
+
jobs;
|
|
19
|
+
runningPromise;
|
|
20
|
+
timeOfLastSnapshotSync;
|
|
21
|
+
jobsToRetrieve;
|
|
22
|
+
constructor(broker, proofStore = new InlineProofStore(), failedProofStore, pollIntervalMs = 1000, log = createLogger('prover-client:broker-circuit-prover-facade')){
|
|
23
|
+
this.broker = broker;
|
|
24
|
+
this.proofStore = proofStore;
|
|
25
|
+
this.failedProofStore = failedProofStore;
|
|
26
|
+
this.pollIntervalMs = pollIntervalMs;
|
|
27
|
+
this.log = log;
|
|
28
|
+
this.jobs = new Map();
|
|
29
|
+
this.timeOfLastSnapshotSync = Date.now();
|
|
30
|
+
this.jobsToRetrieve = new Set();
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* This is a critical section. This function can not be async since it writes
|
|
34
|
+
* to the jobs map which acts as a mutex, ensuring a job is only ever created once.
|
|
35
|
+
*
|
|
36
|
+
* This could be called in a SerialQueue if it needs to become async.
|
|
37
|
+
*/ getOrCreateProvingJob(id, type, signal) {
|
|
38
|
+
// Check if there is already a promise for this job
|
|
39
|
+
const existingJob = this.jobs.get(id);
|
|
40
|
+
if (existingJob) {
|
|
41
|
+
this.log.verbose(`Job already found in facade id=${id} type=${ProvingRequestType[type]}`, {
|
|
42
|
+
provingJobId: id,
|
|
43
|
+
provingJobType: ProvingRequestType[type]
|
|
44
|
+
});
|
|
45
|
+
return {
|
|
46
|
+
job: existingJob,
|
|
47
|
+
isEnqueued: true
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
// Create a promise for this job id, regardless of whether it was enqueued at the broker
|
|
51
|
+
// The running promise will monitor for the job to be completed and resolve it either way
|
|
52
|
+
const promise = promiseWithResolvers();
|
|
53
|
+
const abortFn = ()=>{
|
|
54
|
+
signal?.removeEventListener('abort', abortFn);
|
|
55
|
+
void this.broker.cancelProvingJob(id).catch((err)=>this.log.warn(`Error cancelling job id=${id}`, err));
|
|
56
|
+
};
|
|
57
|
+
const job = {
|
|
58
|
+
id,
|
|
59
|
+
type,
|
|
60
|
+
deferred: promise,
|
|
61
|
+
abortFn,
|
|
62
|
+
signal
|
|
63
|
+
};
|
|
64
|
+
this.jobs.set(id, job);
|
|
65
|
+
return {
|
|
66
|
+
job,
|
|
67
|
+
isEnqueued: false
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
async enqueueJob(id, type, inputs, epochNumber = 0, signal) {
|
|
71
|
+
const { job: job, isEnqueued } = this.getOrCreateProvingJob(id, type, signal);
|
|
72
|
+
if (isEnqueued) {
|
|
73
|
+
return job.deferred.promise;
|
|
74
|
+
}
|
|
75
|
+
try {
|
|
76
|
+
const inputsUri = await this.proofStore.saveProofInput(id, type, inputs);
|
|
77
|
+
job.inputsUri = inputsUri;
|
|
78
|
+
const jobStatus = await this.broker.enqueueProvingJob({
|
|
79
|
+
id,
|
|
80
|
+
type,
|
|
81
|
+
inputsUri,
|
|
82
|
+
epochNumber
|
|
83
|
+
});
|
|
84
|
+
// If we are here then the job was successfully accepted by the broker
|
|
85
|
+
// the returned status is for before any action was performed
|
|
86
|
+
if (jobStatus.status === 'fulfilled' || jobStatus.status === 'rejected') {
|
|
87
|
+
// Job was already completed by the broker
|
|
88
|
+
// No need to notify the broker on aborted job
|
|
89
|
+
this.log.verbose(`Job already completed when sent to broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`, {
|
|
90
|
+
provingJobId: id,
|
|
91
|
+
provingJobType: ProvingRequestType[type],
|
|
92
|
+
epochNumber,
|
|
93
|
+
inputsUri: truncate(inputsUri)
|
|
94
|
+
});
|
|
95
|
+
// Job was not enqueued. It must be completed already, add to our set of already completed jobs
|
|
96
|
+
this.jobsToRetrieve.add(id);
|
|
97
|
+
} else {
|
|
98
|
+
// notify the broker if job is aborted
|
|
99
|
+
signal?.addEventListener('abort', job.abortFn);
|
|
100
|
+
// Job added for the first time
|
|
101
|
+
if (jobStatus.status === 'not-found') {
|
|
102
|
+
this.log.verbose(`Job enqueued with broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`, {
|
|
103
|
+
provingJobId: id,
|
|
104
|
+
provingJobType: ProvingRequestType[type],
|
|
105
|
+
epochNumber,
|
|
106
|
+
inputsUri: truncate(inputsUri),
|
|
107
|
+
numOutstandingJobs: this.jobs.size
|
|
108
|
+
});
|
|
109
|
+
} else {
|
|
110
|
+
// Job was previously sent to the broker but is not completed
|
|
111
|
+
this.log.verbose(`Job already in queue or in progress when sent to broker id=${id} type=${ProvingRequestType[type]} epochNumber=${epochNumber}`, {
|
|
112
|
+
provingJobId: id,
|
|
113
|
+
provingJobType: ProvingRequestType[type],
|
|
114
|
+
epochNumber,
|
|
115
|
+
inputsUri: truncate(inputsUri)
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
} catch (err) {
|
|
120
|
+
this.jobs.delete(job.id);
|
|
121
|
+
job.deferred.reject(err);
|
|
122
|
+
}
|
|
123
|
+
return job.deferred.promise;
|
|
124
|
+
}
|
|
125
|
+
start() {
|
|
126
|
+
if (this.runningPromise) {
|
|
127
|
+
throw new Error('BrokerCircuitProverFacade already started');
|
|
128
|
+
}
|
|
129
|
+
this.log.verbose('Starting BrokerCircuitProverFacade');
|
|
130
|
+
this.runningPromise = new RunningPromise(()=>this.monitorForCompletedJobs(), this.log, this.pollIntervalMs);
|
|
131
|
+
this.runningPromise.start();
|
|
132
|
+
}
|
|
133
|
+
async stop() {
|
|
134
|
+
if (!this.runningPromise) {
|
|
135
|
+
throw new Error('BrokerCircuitProverFacade not started');
|
|
136
|
+
}
|
|
137
|
+
this.log.verbose('Stopping BrokerCircuitProverFacade');
|
|
138
|
+
await this.runningPromise.stop();
|
|
139
|
+
// Reject any outstanding promises as stopped
|
|
140
|
+
for (const [_, v] of this.jobs){
|
|
141
|
+
v.deferred.reject(new Error('Broker facade stopped'));
|
|
142
|
+
}
|
|
143
|
+
this.jobs.clear();
|
|
144
|
+
}
|
|
145
|
+
async updateCompletedJobs() {
|
|
146
|
+
// Here we check for completed jobs. If everything works well (there are no service restarts etc) then all we need to do
|
|
147
|
+
// to maintain correct job state is to check for incrementally completed jobs. i.e. call getCompletedJobs with an empty array
|
|
148
|
+
// However, if there are any problems then we may lose sync with the broker's actual set of completed jobs.
|
|
149
|
+
// In this case we need to perform a full snapshot sync. This involves sending all of our outstanding job Ids to the broker
|
|
150
|
+
// and have the broker report on whether they are completed or not.
|
|
151
|
+
// We perform an incremental sync on every call of this function with a full snapshot sync periodically.
|
|
152
|
+
// This should keep us in sync without over-burdening the broker with snapshot sync requests
|
|
153
|
+
const getAllCompletedJobs = async (ids)=>{
|
|
154
|
+
// In this function we take whatever set of snapshot ids and we ask the broker for completed job notifications
|
|
155
|
+
// We collect all returned notifications and return them
|
|
156
|
+
const allCompleted = new Set();
|
|
157
|
+
try {
|
|
158
|
+
let numRequests = 0;
|
|
159
|
+
while(ids.length > 0){
|
|
160
|
+
const slice = ids.splice(0, SNAPSHOT_SYNC_CHECK_MAX_REQUEST_SIZE);
|
|
161
|
+
const completed = await this.broker.getCompletedJobs(slice);
|
|
162
|
+
completed.forEach((id)=>allCompleted.add(id));
|
|
163
|
+
++numRequests;
|
|
164
|
+
}
|
|
165
|
+
if (numRequests === 0) {
|
|
166
|
+
const final = await this.broker.getCompletedJobs([]);
|
|
167
|
+
final.forEach((id)=>allCompleted.add(id));
|
|
168
|
+
}
|
|
169
|
+
} catch (err) {
|
|
170
|
+
this.log.error(`Error thrown when requesting completed job notifications from the broker`, err);
|
|
171
|
+
}
|
|
172
|
+
return allCompleted;
|
|
173
|
+
};
|
|
174
|
+
const snapshotSyncIds = [];
|
|
175
|
+
const currentTime = Date.now();
|
|
176
|
+
const secondsSinceLastSnapshotSync = currentTime - this.timeOfLastSnapshotSync;
|
|
177
|
+
if (secondsSinceLastSnapshotSync > SNAPSHOT_SYNC_INTERVAL_MS) {
|
|
178
|
+
this.timeOfLastSnapshotSync = currentTime;
|
|
179
|
+
snapshotSyncIds.push(...this.jobs.keys());
|
|
180
|
+
this.log.trace(`Performing full snapshot sync of completed jobs with ${snapshotSyncIds.length} job(s)`);
|
|
181
|
+
} else {
|
|
182
|
+
this.log.trace(`Performing incremental sync of completed jobs`, {
|
|
183
|
+
snapshotSyncIds
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
// Now request the notifications from the broker
|
|
187
|
+
const snapshotIdsLength = snapshotSyncIds.length;
|
|
188
|
+
const completedJobs = await getAllCompletedJobs(snapshotSyncIds);
|
|
189
|
+
// We now have an additional set of completed job notifications to add to our cached set giving us the full set of jobs that we have been told are ready
|
|
190
|
+
// We filter this list to what we actually need, in case for any reason it is different and store in our cache
|
|
191
|
+
const allJobsReady = [
|
|
192
|
+
...completedJobs,
|
|
193
|
+
...this.jobsToRetrieve
|
|
194
|
+
];
|
|
195
|
+
this.jobsToRetrieve = new Set(allJobsReady.filter((id)=>this.jobs.has(id)));
|
|
196
|
+
if (completedJobs.size > 0) {
|
|
197
|
+
this.log.verbose(`Check for job completion notifications returned ${completedJobs.size} job(s), snapshot ids length: ${snapshotIdsLength}, num outstanding jobs: ${this.jobs.size}, total jobs ready: ${this.jobsToRetrieve.size}`);
|
|
198
|
+
} else {
|
|
199
|
+
this.log.trace(`Check for job completion notifications returned 0 jobs, snapshot ids length: ${snapshotIdsLength}, num outstanding jobs: ${this.jobs.size}, total jobs ready: ${this.jobsToRetrieve.size}`);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
async retrieveJobsThatShouldBeReady() {
|
|
203
|
+
const convertJobResult = async (result, jobType)=>{
|
|
204
|
+
if (result.status === 'fulfilled') {
|
|
205
|
+
const output = await this.proofStore.getProofOutput(result.value);
|
|
206
|
+
if (output.type === jobType) {
|
|
207
|
+
return {
|
|
208
|
+
result: output.result,
|
|
209
|
+
success: true
|
|
210
|
+
};
|
|
211
|
+
} else {
|
|
212
|
+
return {
|
|
213
|
+
success: false,
|
|
214
|
+
reason: `Unexpected proof type: ${output.type}. Expected: ${jobType}`
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
} else if (result.status === 'rejected') {
|
|
218
|
+
return {
|
|
219
|
+
success: false,
|
|
220
|
+
reason: result.reason
|
|
221
|
+
};
|
|
222
|
+
} else {
|
|
223
|
+
throw new Error(`Unexpected proving job status ${result.status}`);
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
const processJob = async (job)=>{
|
|
227
|
+
// First retrieve the settled job from the broker
|
|
228
|
+
this.log.debug(`Received notification of completed job id=${job.id} type=${ProvingRequestType[job.type]}`);
|
|
229
|
+
let settledResult;
|
|
230
|
+
try {
|
|
231
|
+
settledResult = await this.broker.getProvingJobStatus(job.id);
|
|
232
|
+
} catch (err) {
|
|
233
|
+
// If an error occurs retrieving the job result then just log it and move on.
|
|
234
|
+
// We will try again on the next iteration
|
|
235
|
+
this.log.error(`Error retrieving job result from broker job id=${job.id} type=${ProvingRequestType[job.type]}`, err);
|
|
236
|
+
return false;
|
|
237
|
+
}
|
|
238
|
+
// Then convert the result and resolve/reject the promise
|
|
239
|
+
let result;
|
|
240
|
+
try {
|
|
241
|
+
result = await convertJobResult(settledResult, job.type);
|
|
242
|
+
} catch (err) {
|
|
243
|
+
// If an error occurs retrieving the job result then just log it and move on.
|
|
244
|
+
// We will try again on the next iteration
|
|
245
|
+
this.log.error(`Error processing job result job id=${job.id} type=${ProvingRequestType[job.type]}`, err);
|
|
246
|
+
return false;
|
|
247
|
+
}
|
|
248
|
+
if (result.success) {
|
|
249
|
+
this.log.verbose(`Resolved proving job id=${job.id} type=${ProvingRequestType[job.type]}`);
|
|
250
|
+
job.deferred.resolve(result.result);
|
|
251
|
+
} else {
|
|
252
|
+
this.log.error(`Resolving proving job with error id=${job.id} type=${ProvingRequestType[job.type]}`, result.reason);
|
|
253
|
+
if (result.reason !== 'Aborted') {
|
|
254
|
+
void this.backupFailedProofInputs(job);
|
|
255
|
+
}
|
|
256
|
+
job.deferred.reject(new Error(result.reason));
|
|
257
|
+
}
|
|
258
|
+
if (job.abortFn && job.signal) {
|
|
259
|
+
job.signal?.removeEventListener('abort', job.abortFn);
|
|
260
|
+
}
|
|
261
|
+
// Job is now processed removed from our cache
|
|
262
|
+
this.jobs.delete(job.id);
|
|
263
|
+
this.jobsToRetrieve.delete(job.id);
|
|
264
|
+
return true;
|
|
265
|
+
};
|
|
266
|
+
const toBeRetrieved = Array.from(this.jobsToRetrieve.values()).map((id)=>this.jobs.get(id)).filter((x)=>x !== undefined);
|
|
267
|
+
const totalJobsToRetrieve = toBeRetrieved.length;
|
|
268
|
+
let totalJobsRetrieved = 0;
|
|
269
|
+
while(toBeRetrieved.length > 0){
|
|
270
|
+
const slice = toBeRetrieved.splice(0, MAX_CONCURRENT_JOB_SETTLED_REQUESTS);
|
|
271
|
+
const results = await Promise.all(slice.map((job)=>processJob(job)));
|
|
272
|
+
totalJobsRetrieved += results.filter((x)=>x).length;
|
|
273
|
+
}
|
|
274
|
+
if (totalJobsToRetrieve > 0) {
|
|
275
|
+
this.log.verbose(`Successfully retrieved ${totalJobsRetrieved} of ${totalJobsToRetrieve} jobs that should be ready, total ready jobs is now: ${this.jobsToRetrieve.size}`);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
async backupFailedProofInputs(job) {
|
|
279
|
+
try {
|
|
280
|
+
if (!this.failedProofStore || !job.inputsUri) {
|
|
281
|
+
return;
|
|
282
|
+
}
|
|
283
|
+
const inputs = await this.proofStore.getProofInput(job.inputsUri);
|
|
284
|
+
const uri = await this.failedProofStore.saveProofInput(job.id, inputs.type, inputs.inputs);
|
|
285
|
+
this.log.info(`Stored proof inputs for failed job id=${job.id} type=${ProvingRequestType[job.type]} at ${uri}`, {
|
|
286
|
+
id: job.id,
|
|
287
|
+
type: job.type,
|
|
288
|
+
uri
|
|
289
|
+
});
|
|
290
|
+
} catch (err) {
|
|
291
|
+
this.log.error(`Error backing up proof inputs for failed job id=${job.id} type=${ProvingRequestType[job.type]}`, err);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
async monitorForCompletedJobs() {
|
|
295
|
+
// Monitoring for completed jobs involves 2 stages.
|
|
296
|
+
// 1. Update our list of completed jobs.
|
|
297
|
+
// We poll the broker for any new job completion notifications and after filtering/deduplication add them to our cached
|
|
298
|
+
// list of jobs that we have been told are ready.
|
|
299
|
+
await this.updateCompletedJobs();
|
|
300
|
+
// 2. Retrieve the jobs that should be ready.
|
|
301
|
+
// We have a list of jobs that we have been told are ready, so we go ahead and ask for their results
|
|
302
|
+
await this.retrieveJobsThatShouldBeReady();
|
|
303
|
+
}
|
|
304
|
+
getAvmProof(inputs, signal, epochNumber) {
|
|
305
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.PUBLIC_VM, inputs, epochNumber), ProvingRequestType.PUBLIC_VM, inputs, epochNumber, signal);
|
|
306
|
+
}
|
|
307
|
+
getBaseParityProof(inputs, signal, epochNumber) {
|
|
308
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.BASE_PARITY, inputs, epochNumber), ProvingRequestType.BASE_PARITY, inputs, epochNumber, signal);
|
|
309
|
+
}
|
|
310
|
+
getBlockMergeRollupProof(input, signal, epochNumber) {
|
|
311
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.BLOCK_MERGE_ROLLUP, input, epochNumber), ProvingRequestType.BLOCK_MERGE_ROLLUP, input, epochNumber, signal);
|
|
312
|
+
}
|
|
313
|
+
getBlockRootRollupProof(input, signal, epochNumber) {
|
|
314
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input, epochNumber), ProvingRequestType.BLOCK_ROOT_ROLLUP, input, epochNumber, signal);
|
|
315
|
+
}
|
|
316
|
+
getSingleTxBlockRootRollupProof(input, signal, epochNumber) {
|
|
317
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input, epochNumber), ProvingRequestType.SINGLE_TX_BLOCK_ROOT_ROLLUP, input, epochNumber, signal);
|
|
318
|
+
}
|
|
319
|
+
getEmptyBlockRootRollupProof(input, signal, epochNumber) {
|
|
320
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, input, epochNumber), ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, input, epochNumber, signal);
|
|
321
|
+
}
|
|
322
|
+
getMergeRollupProof(input, signal, epochNumber) {
|
|
323
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.MERGE_ROLLUP, input, epochNumber), ProvingRequestType.MERGE_ROLLUP, input, epochNumber, signal);
|
|
324
|
+
}
|
|
325
|
+
getPrivateBaseRollupProof(baseRollupInput, signal, epochNumber) {
|
|
326
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.PRIVATE_BASE_ROLLUP, baseRollupInput, epochNumber), ProvingRequestType.PRIVATE_BASE_ROLLUP, baseRollupInput, epochNumber, signal);
|
|
327
|
+
}
|
|
328
|
+
getPublicBaseRollupProof(inputs, signal, epochNumber) {
|
|
329
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs, epochNumber), ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs, epochNumber, signal);
|
|
330
|
+
}
|
|
331
|
+
getRootParityProof(inputs, signal, epochNumber) {
|
|
332
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.ROOT_PARITY, inputs, epochNumber), ProvingRequestType.ROOT_PARITY, inputs, epochNumber, signal);
|
|
333
|
+
}
|
|
334
|
+
getRootRollupProof(input, signal, epochNumber) {
|
|
335
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.ROOT_ROLLUP, input, epochNumber), ProvingRequestType.ROOT_ROLLUP, input, epochNumber, signal);
|
|
336
|
+
}
|
|
337
|
+
getTubeProof(tubeInput, signal, epochNumber) {
|
|
338
|
+
return this.enqueueJob(this.generateId(ProvingRequestType.TUBE_PROOF, tubeInput, epochNumber), ProvingRequestType.TUBE_PROOF, tubeInput, epochNumber, signal);
|
|
339
|
+
}
|
|
340
|
+
generateId(type, inputs, epochNumber = 0) {
|
|
341
|
+
const inputsHash = sha256(inputs.toBuffer());
|
|
342
|
+
return makeProvingJobId(epochNumber, type, inputsHash.toString('hex'));
|
|
343
|
+
}
|
|
344
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { type L1ReaderConfig } from '@aztec/ethereum';
|
|
2
|
+
import { type ConfigMappingsType } from '@aztec/foundation/config';
|
|
3
|
+
import { type DataStoreConfig } from '@aztec/kv-store/config';
|
|
4
|
+
import { ProvingRequestType } from '@aztec/stdlib/proofs';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
export declare const ProverBrokerConfig: z.ZodObject<{
|
|
7
|
+
/** If starting a prover broker locally, the max number of retries per proving job */
|
|
8
|
+
proverBrokerJobMaxRetries: z.ZodNumber;
|
|
9
|
+
/** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */
|
|
10
|
+
proverBrokerJobTimeoutMs: z.ZodNumber;
|
|
11
|
+
/** If starting a prover broker locally, the interval the broker checks for timed out jobs */
|
|
12
|
+
proverBrokerPollIntervalMs: z.ZodNumber;
|
|
13
|
+
/** If starting a prover broker locally, the directory to store broker data */
|
|
14
|
+
dataDirectory: z.ZodOptional<z.ZodString>;
|
|
15
|
+
/** The size of the data store map */
|
|
16
|
+
dataStoreMapSizeKB: z.ZodNumber;
|
|
17
|
+
/** The prover broker may batch jobs together before writing to the database */
|
|
18
|
+
proverBrokerBatchSize: z.ZodNumber;
|
|
19
|
+
/** How often the job batches get flushed */
|
|
20
|
+
proverBrokerBatchIntervalMs: z.ZodNumber;
|
|
21
|
+
/** The maximum number of epochs to keep results for */
|
|
22
|
+
proverBrokerMaxEpochsToKeepResultsFor: z.ZodNumber;
|
|
23
|
+
}, "strip", z.ZodTypeAny, {
|
|
24
|
+
proverBrokerJobMaxRetries: number;
|
|
25
|
+
proverBrokerJobTimeoutMs: number;
|
|
26
|
+
proverBrokerPollIntervalMs: number;
|
|
27
|
+
dataStoreMapSizeKB: number;
|
|
28
|
+
proverBrokerBatchSize: number;
|
|
29
|
+
proverBrokerBatchIntervalMs: number;
|
|
30
|
+
proverBrokerMaxEpochsToKeepResultsFor: number;
|
|
31
|
+
dataDirectory?: string | undefined;
|
|
32
|
+
}, {
|
|
33
|
+
proverBrokerJobMaxRetries: number;
|
|
34
|
+
proverBrokerJobTimeoutMs: number;
|
|
35
|
+
proverBrokerPollIntervalMs: number;
|
|
36
|
+
dataStoreMapSizeKB: number;
|
|
37
|
+
proverBrokerBatchSize: number;
|
|
38
|
+
proverBrokerBatchIntervalMs: number;
|
|
39
|
+
proverBrokerMaxEpochsToKeepResultsFor: number;
|
|
40
|
+
dataDirectory?: string | undefined;
|
|
41
|
+
}>;
|
|
42
|
+
export type ProverBrokerConfig = z.infer<typeof ProverBrokerConfig> & Pick<DataStoreConfig, 'dataStoreMapSizeKB' | 'dataDirectory'> & L1ReaderConfig;
|
|
43
|
+
export declare const proverBrokerConfigMappings: ConfigMappingsType<ProverBrokerConfig>;
|
|
44
|
+
export declare const defaultProverBrokerConfig: ProverBrokerConfig;
|
|
45
|
+
export declare const ProverAgentConfig: z.ZodObject<{
|
|
46
|
+
/** The number of prover agents to start */
|
|
47
|
+
proverAgentCount: z.ZodNumber;
|
|
48
|
+
/** The types of proofs the prover agent can generate */
|
|
49
|
+
proverAgentProofTypes: z.ZodArray<z.ZodNativeEnum<typeof ProvingRequestType>, "many">;
|
|
50
|
+
/** How often the prover agents poll for jobs */
|
|
51
|
+
proverAgentPollIntervalMs: z.ZodNumber;
|
|
52
|
+
/** The URL where this agent takes jobs from */
|
|
53
|
+
proverBrokerUrl: z.ZodOptional<z.ZodString>;
|
|
54
|
+
/** Whether to construct real proofs */
|
|
55
|
+
realProofs: z.ZodBoolean;
|
|
56
|
+
/** The type of artificial delay to introduce */
|
|
57
|
+
proverTestDelayType: z.ZodEnum<["fixed", "realistic"]>;
|
|
58
|
+
/** If using fixed delay, the time each operation takes. */
|
|
59
|
+
proverTestDelayMs: z.ZodNumber;
|
|
60
|
+
/** If using realistic delays, what percentage of realistic times to apply. */
|
|
61
|
+
proverTestDelayFactor: z.ZodNumber;
|
|
62
|
+
}, "strip", z.ZodTypeAny, {
|
|
63
|
+
proverAgentCount: number;
|
|
64
|
+
proverAgentProofTypes: ProvingRequestType[];
|
|
65
|
+
proverAgentPollIntervalMs: number;
|
|
66
|
+
realProofs: boolean;
|
|
67
|
+
proverTestDelayType: "fixed" | "realistic";
|
|
68
|
+
proverTestDelayMs: number;
|
|
69
|
+
proverTestDelayFactor: number;
|
|
70
|
+
proverBrokerUrl?: string | undefined;
|
|
71
|
+
}, {
|
|
72
|
+
proverAgentCount: number;
|
|
73
|
+
proverAgentProofTypes: ProvingRequestType[];
|
|
74
|
+
proverAgentPollIntervalMs: number;
|
|
75
|
+
realProofs: boolean;
|
|
76
|
+
proverTestDelayType: "fixed" | "realistic";
|
|
77
|
+
proverTestDelayMs: number;
|
|
78
|
+
proverTestDelayFactor: number;
|
|
79
|
+
proverBrokerUrl?: string | undefined;
|
|
80
|
+
}>;
|
|
81
|
+
export type ProverAgentConfig = z.infer<typeof ProverAgentConfig>;
|
|
82
|
+
export declare const proverAgentConfigMappings: ConfigMappingsType<ProverAgentConfig>;
|
|
83
|
+
//# sourceMappingURL=config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/proving_broker/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAA0B,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EACL,KAAK,kBAAkB,EAIxB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,wBAAwB,CAAC;AAClF,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,kBAAkB;IAC7B,qFAAqF;;IAErF,uHAAuH;;IAEvH,6FAA6F;;IAE7F,8EAA8E;;IAE9E,qCAAqC;;IAErC,+EAA+E;;IAE/E,4CAA4C;;IAE5C,uDAAuD;;;;;;;;;;;;;;;;;;;;EAEvD,CAAC;AAEH,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,GACjE,IAAI,CAAC,eAAe,EAAE,oBAAoB,GAAG,eAAe,CAAC,GAC7D,cAAc,CAAC;AAEjB,eAAO,MAAM,0BAA0B,EAAE,kBAAkB,CAAC,kBAAkB,CAiC7E,CAAC;AAEF,eAAO,MAAM,yBAAyB,EAAE,kBAAiE,CAAC;AAE1G,eAAO,MAAM,iBAAiB;IAC5B,2CAA2C;;IAE3C,wDAAwD;;IAExD,gDAAgD;;IAEhD,+CAA+C;;IAE/C,uCAAuC;;IAEvC,gDAAgD;;IAEhD,2DAA2D;;IAE3D,8EAA8E;;;;;;;;;;;;;;;;;;;;EAE9E,CAAC;AAEH,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAC;AAElE,eAAO,MAAM,yBAAyB,EAAE,kBAAkB,CAAC,iBAAiB,CA4C3E,CAAC"}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { l1ReaderConfigMappings } from '@aztec/ethereum';
|
|
2
|
+
import { booleanConfigHelper, getDefaultConfig, numberConfigHelper } from '@aztec/foundation/config';
|
|
3
|
+
import { dataConfigMappings } from '@aztec/kv-store/config';
|
|
4
|
+
import { ProvingRequestType } from '@aztec/stdlib/proofs';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
export const ProverBrokerConfig = z.object({
|
|
7
|
+
/** If starting a prover broker locally, the max number of retries per proving job */ proverBrokerJobMaxRetries: z.number().int().nonnegative(),
|
|
8
|
+
/** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */ proverBrokerJobTimeoutMs: z.number().int().nonnegative(),
|
|
9
|
+
/** If starting a prover broker locally, the interval the broker checks for timed out jobs */ proverBrokerPollIntervalMs: z.number().int().nonnegative(),
|
|
10
|
+
/** If starting a prover broker locally, the directory to store broker data */ dataDirectory: z.string().optional(),
|
|
11
|
+
/** The size of the data store map */ dataStoreMapSizeKB: z.number().int().nonnegative(),
|
|
12
|
+
/** The prover broker may batch jobs together before writing to the database */ proverBrokerBatchSize: z.number().int().nonnegative(),
|
|
13
|
+
/** How often the job batches get flushed */ proverBrokerBatchIntervalMs: z.number().int().nonnegative(),
|
|
14
|
+
/** The maximum number of epochs to keep results for */ proverBrokerMaxEpochsToKeepResultsFor: z.number().int().nonnegative()
|
|
15
|
+
});
|
|
16
|
+
export const proverBrokerConfigMappings = {
|
|
17
|
+
proverBrokerJobTimeoutMs: {
|
|
18
|
+
env: 'PROVER_BROKER_JOB_TIMEOUT_MS',
|
|
19
|
+
description: 'Jobs are retried if not kept alive for this long',
|
|
20
|
+
...numberConfigHelper(30_000)
|
|
21
|
+
},
|
|
22
|
+
proverBrokerPollIntervalMs: {
|
|
23
|
+
env: 'PROVER_BROKER_POLL_INTERVAL_MS',
|
|
24
|
+
description: 'The interval to check job health status',
|
|
25
|
+
...numberConfigHelper(1_000)
|
|
26
|
+
},
|
|
27
|
+
proverBrokerJobMaxRetries: {
|
|
28
|
+
env: 'PROVER_BROKER_JOB_MAX_RETRIES',
|
|
29
|
+
description: 'If starting a prover broker locally, the max number of retries per proving job',
|
|
30
|
+
...numberConfigHelper(3)
|
|
31
|
+
},
|
|
32
|
+
proverBrokerBatchSize: {
|
|
33
|
+
env: 'PROVER_BROKER_BATCH_SIZE',
|
|
34
|
+
description: 'The prover broker writes jobs to disk in batches',
|
|
35
|
+
...numberConfigHelper(100)
|
|
36
|
+
},
|
|
37
|
+
proverBrokerBatchIntervalMs: {
|
|
38
|
+
env: 'PROVER_BROKER_BATCH_INTERVAL_MS',
|
|
39
|
+
description: 'How often to flush batches to disk',
|
|
40
|
+
...numberConfigHelper(50)
|
|
41
|
+
},
|
|
42
|
+
proverBrokerMaxEpochsToKeepResultsFor: {
|
|
43
|
+
env: 'PROVER_BROKER_MAX_EPOCHS_TO_KEEP_RESULTS_FOR',
|
|
44
|
+
description: 'The maximum number of epochs to keep results for',
|
|
45
|
+
...numberConfigHelper(1)
|
|
46
|
+
},
|
|
47
|
+
...l1ReaderConfigMappings,
|
|
48
|
+
...dataConfigMappings
|
|
49
|
+
};
|
|
50
|
+
export const defaultProverBrokerConfig = getDefaultConfig(proverBrokerConfigMappings);
|
|
51
|
+
export const ProverAgentConfig = z.object({
|
|
52
|
+
/** The number of prover agents to start */ proverAgentCount: z.number(),
|
|
53
|
+
/** The types of proofs the prover agent can generate */ proverAgentProofTypes: z.array(z.nativeEnum(ProvingRequestType)),
|
|
54
|
+
/** How often the prover agents poll for jobs */ proverAgentPollIntervalMs: z.number(),
|
|
55
|
+
/** The URL where this agent takes jobs from */ proverBrokerUrl: z.string().optional(),
|
|
56
|
+
/** Whether to construct real proofs */ realProofs: z.boolean(),
|
|
57
|
+
/** The type of artificial delay to introduce */ proverTestDelayType: z.enum([
|
|
58
|
+
'fixed',
|
|
59
|
+
'realistic'
|
|
60
|
+
]),
|
|
61
|
+
/** If using fixed delay, the time each operation takes. */ proverTestDelayMs: z.number(),
|
|
62
|
+
/** If using realistic delays, what percentage of realistic times to apply. */ proverTestDelayFactor: z.number()
|
|
63
|
+
});
|
|
64
|
+
export const proverAgentConfigMappings = {
|
|
65
|
+
proverAgentCount: {
|
|
66
|
+
env: 'PROVER_AGENT_COUNT',
|
|
67
|
+
description: 'Whether this prover has a local prover agent',
|
|
68
|
+
...numberConfigHelper(1)
|
|
69
|
+
},
|
|
70
|
+
proverAgentPollIntervalMs: {
|
|
71
|
+
env: 'PROVER_AGENT_POLL_INTERVAL_MS',
|
|
72
|
+
description: 'The interval agents poll for jobs at',
|
|
73
|
+
...numberConfigHelper(100)
|
|
74
|
+
},
|
|
75
|
+
proverAgentProofTypes: {
|
|
76
|
+
env: 'PROVER_AGENT_PROOF_TYPES',
|
|
77
|
+
description: 'The types of proofs the prover agent can generate',
|
|
78
|
+
parseEnv: (val)=>val.split(',').map((v)=>ProvingRequestType[v]).filter((v)=>typeof v === 'number')
|
|
79
|
+
},
|
|
80
|
+
proverBrokerUrl: {
|
|
81
|
+
env: 'PROVER_BROKER_HOST',
|
|
82
|
+
description: 'The URL where this agent takes jobs from'
|
|
83
|
+
},
|
|
84
|
+
realProofs: {
|
|
85
|
+
env: 'PROVER_REAL_PROOFS',
|
|
86
|
+
description: 'Whether to construct real proofs',
|
|
87
|
+
...booleanConfigHelper(false)
|
|
88
|
+
},
|
|
89
|
+
proverTestDelayType: {
|
|
90
|
+
env: 'PROVER_TEST_DELAY_TYPE',
|
|
91
|
+
description: 'The type of artificial delay to introduce',
|
|
92
|
+
defaultValue: 'fixed'
|
|
93
|
+
},
|
|
94
|
+
proverTestDelayMs: {
|
|
95
|
+
env: 'PROVER_TEST_DELAY_MS',
|
|
96
|
+
description: 'Artificial delay to introduce to all operations to the test prover.',
|
|
97
|
+
...numberConfigHelper(0)
|
|
98
|
+
},
|
|
99
|
+
proverTestDelayFactor: {
|
|
100
|
+
env: 'PROVER_TEST_DELAY_FACTOR',
|
|
101
|
+
description: 'If using realistic delays, what percentage of realistic times to apply.',
|
|
102
|
+
...numberConfigHelper(1)
|
|
103
|
+
}
|
|
104
|
+
};
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { TelemetryClient } from '@aztec/telemetry-client';
|
|
2
|
+
import type { ProverBrokerConfig } from './config.js';
|
|
3
|
+
import { ProvingBroker } from './proving_broker.js';
|
|
4
|
+
export declare function createAndStartProvingBroker(config: ProverBrokerConfig, client: TelemetryClient): Promise<ProvingBroker>;
|
|
5
|
+
//# sourceMappingURL=factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../src/proving_broker/factory.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAE/D,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,aAAa,CAAC;AACtD,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAIpD,wBAAsB,2BAA2B,CAC/C,MAAM,EAAE,kBAAkB,EAC1B,MAAM,EAAE,eAAe,GACtB,OAAO,CAAC,aAAa,CAAC,CAOxB"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { ProvingBroker } from './proving_broker.js';
|
|
2
|
+
import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js';
|
|
3
|
+
import { KVBrokerDatabase } from './proving_broker_database/persisted.js';
|
|
4
|
+
export async function createAndStartProvingBroker(config, client) {
|
|
5
|
+
const database = config.dataDirectory ? await KVBrokerDatabase.new(config, client) : new InMemoryBrokerDatabase();
|
|
6
|
+
const broker = new ProvingBroker(database, config, client);
|
|
7
|
+
await broker.start();
|
|
8
|
+
return broker;
|
|
9
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { type ProofUri, type ProvingJobId } from '@aztec/stdlib/interfaces/server';
|
|
2
|
+
export declare function makeRandomProvingJobId(epochNumber?: number): ProvingJobId;
|
|
3
|
+
export declare function makeInputsUri(): ProofUri;
|
|
4
|
+
export declare function makeOutputsUri(): ProofUri;
|
|
5
|
+
//# sourceMappingURL=fixtures.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fixtures.d.ts","sourceRoot":"","sources":["../../src/proving_broker/fixtures.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,QAAQ,EAAE,KAAK,YAAY,EAAoB,MAAM,iCAAiC,CAAC;AAGrG,wBAAgB,sBAAsB,CAAC,WAAW,CAAC,EAAE,MAAM,GAAG,YAAY,CAEzE;AAED,wBAAgB,aAAa,IAAI,QAAQ,CAExC;AAED,wBAAgB,cAAc,IAAI,QAAQ,CAEzC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { randomBytes } from '@aztec/foundation/crypto';
|
|
2
|
+
import { makeProvingJobId } from '@aztec/stdlib/interfaces/server';
|
|
3
|
+
import { ProvingRequestType } from '@aztec/stdlib/proofs';
|
|
4
|
+
export function makeRandomProvingJobId(epochNumber) {
|
|
5
|
+
return makeProvingJobId(epochNumber ?? 1, ProvingRequestType.BASE_PARITY, randomBytes(8).toString('hex'));
|
|
6
|
+
}
|
|
7
|
+
export function makeInputsUri() {
|
|
8
|
+
return randomBytes(8).toString('hex');
|
|
9
|
+
}
|
|
10
|
+
export function makeOutputsUri() {
|
|
11
|
+
return randomBytes(8).toString('hex');
|
|
12
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export * from './proving_agent.js';
|
|
2
|
+
export * from './proving_broker.js';
|
|
3
|
+
export * from './rpc.js';
|
|
4
|
+
export * from './proving_broker_database.js';
|
|
5
|
+
export * from './proving_broker_database/memory.js';
|
|
6
|
+
export * from './proving_broker_database/persisted.js';
|
|
7
|
+
export * from './proof_store/index.js';
|
|
8
|
+
export * from './factory.js';
|
|
9
|
+
export * from './config.js';
|
|
10
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/proving_broker/index.ts"],"names":[],"mappings":"AAAA,cAAc,oBAAoB,CAAC;AACnC,cAAc,qBAAqB,CAAC;AACpC,cAAc,UAAU,CAAC;AACzB,cAAc,8BAA8B,CAAC;AAC7C,cAAc,qCAAqC,CAAC;AACpD,cAAc,wCAAwC,CAAC;AACvD,cAAc,wBAAwB,CAAC;AACvC,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from './proving_agent.js';
|
|
2
|
+
export * from './proving_broker.js';
|
|
3
|
+
export * from './rpc.js';
|
|
4
|
+
export * from './proving_broker_database.js';
|
|
5
|
+
export * from './proving_broker_database/memory.js';
|
|
6
|
+
export * from './proving_broker_database/persisted.js';
|
|
7
|
+
export * from './proof_store/index.js';
|
|
8
|
+
export * from './factory.js';
|
|
9
|
+
export * from './config.js';
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { GoogleCloudStorageProofStore } from './gcs_proof_store.js';
|
|
2
|
+
import { InlineProofStore } from './inline_proof_store.js';
|
|
3
|
+
import type { ProofStore } from './proof_store.js';
|
|
4
|
+
export declare function createProofStore(config: string | undefined, logger?: import("@aztec/foundation/log").Logger): InlineProofStore | GoogleCloudStorageProofStore;
|
|
5
|
+
export declare function createProofStoreForUri(uri: string, logger?: import("@aztec/foundation/log").Logger): Pick<ProofStore, 'getProofInput' | 'getProofOutput'>;
|
|
6
|
+
//# sourceMappingURL=factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../../src/proving_broker/proof_store/factory.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,4BAA4B,EAAE,MAAM,sBAAsB,CAAC;AACpE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAEnD,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,EAAE,MAAM,yCAA4C,mDAmB9G;AAED,wBAAgB,sBAAsB,CACpC,GAAG,EAAE,MAAM,EACX,MAAM,yCAA4C,GACjD,IAAI,CAAC,UAAU,EAAE,eAAe,GAAG,gBAAgB,CAAC,CAWtD"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
2
|
+
import { GoogleCloudStorageProofStore } from './gcs_proof_store.js';
|
|
3
|
+
import { InlineProofStore } from './inline_proof_store.js';
|
|
4
|
+
export function createProofStore(config, logger = createLogger('prover-client:proof-store')) {
|
|
5
|
+
if (config === undefined) {
|
|
6
|
+
logger.info('Creating inline proof store');
|
|
7
|
+
return new InlineProofStore();
|
|
8
|
+
} else if (config.startsWith('gs://')) {
|
|
9
|
+
try {
|
|
10
|
+
const url = new URL(config);
|
|
11
|
+
const bucket = url.host;
|
|
12
|
+
const path = url.pathname.replace(/^\/+/, '');
|
|
13
|
+
logger.info(`Creating google cloud proof store at ${bucket}`, {
|
|
14
|
+
bucket,
|
|
15
|
+
path
|
|
16
|
+
});
|
|
17
|
+
return new GoogleCloudStorageProofStore(bucket, path);
|
|
18
|
+
} catch (err) {
|
|
19
|
+
throw new Error(`Invalid google cloud proof store definition: '${config}'. Supported values are 'gs://bucket-name/path/to/store'.`);
|
|
20
|
+
}
|
|
21
|
+
} else {
|
|
22
|
+
throw new Error(`Unknown proof store config: '${config}'. Supported values are 'gs://bucket-name/path/to/store'.`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
export function createProofStoreForUri(uri, logger = createLogger('prover-client:proof-store')) {
|
|
26
|
+
if (uri.startsWith('data://')) {
|
|
27
|
+
return createProofStore(undefined, logger);
|
|
28
|
+
} else if (uri.startsWith('gs://')) {
|
|
29
|
+
const url = new URL(uri);
|
|
30
|
+
const basePath = url.pathname.replace(/^\/+/, '').split('/').slice(0, -3);
|
|
31
|
+
url.pathname = basePath.join('/');
|
|
32
|
+
return createProofStore(uri, logger);
|
|
33
|
+
} else {
|
|
34
|
+
throw new Error(`Unknown proof store config: '${uri}'. Supported protocols are 'data://' and 'gs://'.`);
|
|
35
|
+
}
|
|
36
|
+
}
|