@aztec/prover-node 0.86.0 → 0.87.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/actions/download-epoch-proving-job.d.ts +18 -0
- package/dest/actions/download-epoch-proving-job.d.ts.map +1 -0
- package/dest/actions/download-epoch-proving-job.js +37 -0
- package/dest/actions/index.d.ts +3 -0
- package/dest/actions/index.d.ts.map +1 -0
- package/dest/actions/index.js +2 -0
- package/dest/actions/rerun-epoch-proving-job.d.ts +11 -0
- package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -0
- package/dest/actions/rerun-epoch-proving-job.js +40 -0
- package/dest/actions/upload-epoch-proof-failure.d.ts +15 -0
- package/dest/actions/upload-epoch-proof-failure.d.ts.map +1 -0
- package/dest/actions/upload-epoch-proof-failure.js +78 -0
- package/dest/bin/run-failed-epoch.d.ts +2 -0
- package/dest/bin/run-failed-epoch.d.ts.map +1 -0
- package/dest/bin/run-failed-epoch.js +67 -0
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +5 -0
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +5 -5
- package/dest/index.d.ts +1 -0
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -0
- package/dest/job/epoch-proving-job-data.d.ts +15 -0
- package/dest/job/epoch-proving-job-data.d.ts.map +1 -0
- package/dest/job/epoch-proving-job-data.js +45 -0
- package/dest/job/epoch-proving-job.d.ts +10 -9
- package/dest/job/epoch-proving-job.d.ts.map +1 -1
- package/dest/job/epoch-proving-job.js +41 -24
- package/dest/metrics.js +2 -2
- package/dest/prover-coordination/combined-prover-coordination.d.ts.map +1 -1
- package/dest/prover-coordination/combined-prover-coordination.js +7 -4
- package/dest/prover-coordination/config.d.ts.map +1 -1
- package/dest/prover-coordination/config.js +2 -1
- package/dest/prover-coordination/factory.d.ts.map +1 -1
- package/dest/prover-coordination/factory.js +8 -4
- package/dest/prover-node.d.ts +23 -18
- package/dest/prover-node.d.ts.map +1 -1
- package/dest/prover-node.js +88 -37
- package/dest/test/index.d.ts +4 -2
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +1 -1
- package/package.json +25 -24
- package/src/actions/download-epoch-proving-job.ts +44 -0
- package/src/actions/index.ts +2 -0
- package/src/actions/rerun-epoch-proving-job.ts +61 -0
- package/src/actions/upload-epoch-proof-failure.ts +88 -0
- package/src/bin/run-failed-epoch.ts +77 -0
- package/src/config.ts +7 -1
- package/src/factory.ts +21 -7
- package/src/index.ts +1 -0
- package/src/job/epoch-proving-job-data.ts +68 -0
- package/src/job/epoch-proving-job.ts +55 -23
- package/src/metrics.ts +2 -2
- package/src/prover-coordination/combined-prover-coordination.ts +9 -6
- package/src/prover-coordination/config.ts +1 -0
- package/src/prover-coordination/factory.ts +7 -4
- package/src/prover-node.ts +120 -53
- package/src/test/index.ts +7 -4
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { Logger } from '@aztec/foundation/log';
|
|
2
|
+
/**
|
|
3
|
+
* Given a location returned by `uploadEpochProofFailure`, downloads the world state and archiver snapshots
|
|
4
|
+
* and the proving job data, so we can re-run the job later using `rerunEpochProvingJob`. This is decoupled
|
|
5
|
+
* from actually proving so we can download once and run multiple times.
|
|
6
|
+
*/
|
|
7
|
+
export declare function downloadEpochProvingJob(location: string, log: Logger, config: {
|
|
8
|
+
dataDirectory: string;
|
|
9
|
+
jobDataDownloadPath: string;
|
|
10
|
+
}): Promise<{
|
|
11
|
+
l2BlockNumber: number;
|
|
12
|
+
l2BlockHash: string;
|
|
13
|
+
rollupAddress: import("@aztec/foundation/schemas").EthAddress;
|
|
14
|
+
l1ChainId: number;
|
|
15
|
+
rollupVersion: number;
|
|
16
|
+
l1BlockNumber: number;
|
|
17
|
+
}>;
|
|
18
|
+
//# sourceMappingURL=download-epoch-proving-job.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"download-epoch-proving-job.d.ts","sourceRoot":"","sources":["../../src/actions/download-epoch-proving-job.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AAUpD;;;;GAIG;AACH,wBAAsB,uBAAuB,CAC3C,QAAQ,EAAE,MAAM,EAChB,GAAG,EAAE,MAAM,EACX,MAAM,EAAE;IACN,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;CAC7B;;;;;;;GAqBF"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { jsonParseWithSchemaSync } from '@aztec/foundation/json-rpc';
|
|
2
|
+
import { urlJoin } from '@aztec/foundation/string';
|
|
3
|
+
import { snapshotSync } from '@aztec/node-lib/actions';
|
|
4
|
+
import { createReadOnlyFileStore } from '@aztec/stdlib/file-store';
|
|
5
|
+
import { UploadSnapshotMetadataSchema, makeSnapshotPaths } from '@aztec/stdlib/snapshots';
|
|
6
|
+
import { readFileSync } from 'fs';
|
|
7
|
+
import { deserializeEpochProvingJobData } from '../job/epoch-proving-job-data.js';
|
|
8
|
+
/**
|
|
9
|
+
* Given a location returned by `uploadEpochProofFailure`, downloads the world state and archiver snapshots
|
|
10
|
+
* and the proving job data, so we can re-run the job later using `rerunEpochProvingJob`. This is decoupled
|
|
11
|
+
* from actually proving so we can download once and run multiple times.
|
|
12
|
+
*/ export async function downloadEpochProvingJob(location, log, config) {
|
|
13
|
+
log.info(`Downloading epoch proving job data from ${location}`);
|
|
14
|
+
const fileStore = await createReadOnlyFileStore(location);
|
|
15
|
+
const metadataUrl = urlJoin(location, 'metadata.json');
|
|
16
|
+
const metadataRaw = await fileStore.read(metadataUrl);
|
|
17
|
+
const metadata = jsonParseWithSchemaSync(metadataRaw.toString(), UploadSnapshotMetadataSchema);
|
|
18
|
+
const dataUrls = makeSnapshotPaths(location);
|
|
19
|
+
log.info(`Downloading state snapshot from ${location} to local data directory`, {
|
|
20
|
+
metadata,
|
|
21
|
+
dataUrls
|
|
22
|
+
});
|
|
23
|
+
await snapshotSync({
|
|
24
|
+
dataUrls
|
|
25
|
+
}, log, {
|
|
26
|
+
...config,
|
|
27
|
+
...metadata,
|
|
28
|
+
snapshotsUrl: location
|
|
29
|
+
});
|
|
30
|
+
const dataPath = urlJoin(location, 'data.bin');
|
|
31
|
+
const localPath = config.jobDataDownloadPath;
|
|
32
|
+
log.info(`Downloading epoch proving job data from ${dataPath} to ${localPath}`);
|
|
33
|
+
await fileStore.download(dataPath, localPath);
|
|
34
|
+
const jobData = deserializeEpochProvingJobData(readFileSync(localPath));
|
|
35
|
+
log.info(`Epoch proving job data for epoch ${jobData.epochNumber} downloaded successfully`);
|
|
36
|
+
return metadata;
|
|
37
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/actions/index.ts"],"names":[],"mappings":"AAAA,cAAc,iCAAiC,CAAC;AAChD,cAAc,8BAA8B,CAAC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { Logger } from '@aztec/foundation/log';
|
|
2
|
+
import type { DataStoreConfig } from '@aztec/kv-store/config';
|
|
3
|
+
import { type ProverClientConfig } from '@aztec/prover-client';
|
|
4
|
+
import { ProverBrokerConfig } from '@aztec/prover-client/broker';
|
|
5
|
+
/**
|
|
6
|
+
* Given a local folder where `downloadEpochProvingJob` was called, creates a new archiver and world state
|
|
7
|
+
* using the state snapshots, and creates a new epoch proving job to prove the downloaded proving job.
|
|
8
|
+
* Proving is done with a local proving broker and agents as specified by the config.
|
|
9
|
+
*/
|
|
10
|
+
export declare function rerunEpochProvingJob(localPath: string, log: Logger, config: DataStoreConfig & ProverBrokerConfig & ProverClientConfig): Promise<"initialized" | "processing" | "awaiting-prover" | "publishing-proof" | "completed" | "failed" | "stopped" | "timed-out" | "reorg">;
|
|
11
|
+
//# sourceMappingURL=rerun-epoch-proving-job.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"rerun-epoch-proving-job.d.ts","sourceRoot":"","sources":["../../src/actions/rerun-epoch-proving-job.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,KAAK,kBAAkB,EAAsB,MAAM,sBAAsB,CAAC;AACnF,OAAO,EAAE,kBAAkB,EAA+B,MAAM,6BAA6B,CAAC;AAW9F;;;;GAIG;AACH,wBAAsB,oBAAoB,CACxC,SAAS,EAAE,MAAM,EACjB,GAAG,EAAE,MAAM,EACX,MAAM,EAAE,eAAe,GAAG,kBAAkB,GAAG,kBAAkB,+IAqClE"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { createArchiverStore } from '@aztec/archiver';
|
|
2
|
+
import { createProverClient } from '@aztec/prover-client';
|
|
3
|
+
import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
|
|
4
|
+
import { PublicProcessorFactory } from '@aztec/simulator/server';
|
|
5
|
+
import { getTelemetryClient } from '@aztec/telemetry-client';
|
|
6
|
+
import { createWorldState } from '@aztec/world-state';
|
|
7
|
+
import { readFileSync } from 'fs';
|
|
8
|
+
import { deserializeEpochProvingJobData } from '../job/epoch-proving-job-data.js';
|
|
9
|
+
import { EpochProvingJob } from '../job/epoch-proving-job.js';
|
|
10
|
+
import { ProverNodeJobMetrics } from '../metrics.js';
|
|
11
|
+
/**
|
|
12
|
+
* Given a local folder where `downloadEpochProvingJob` was called, creates a new archiver and world state
|
|
13
|
+
* using the state snapshots, and creates a new epoch proving job to prove the downloaded proving job.
|
|
14
|
+
* Proving is done with a local proving broker and agents as specified by the config.
|
|
15
|
+
*/ export async function rerunEpochProvingJob(localPath, log, config) {
|
|
16
|
+
const jobData = deserializeEpochProvingJobData(readFileSync(localPath));
|
|
17
|
+
log.info(`Loaded proving job data for epoch ${jobData.epochNumber}`);
|
|
18
|
+
const telemetry = getTelemetryClient();
|
|
19
|
+
const metrics = new ProverNodeJobMetrics(telemetry.getMeter('prover-job'), telemetry.getTracer('prover-job'));
|
|
20
|
+
const worldState = await createWorldState(config);
|
|
21
|
+
const archiver = await createArchiverStore(config);
|
|
22
|
+
const publicProcessorFactory = new PublicProcessorFactory(archiver);
|
|
23
|
+
const publisher = {
|
|
24
|
+
submitEpochProof: ()=>Promise.resolve(true)
|
|
25
|
+
};
|
|
26
|
+
const l2BlockSourceForReorgDetection = undefined;
|
|
27
|
+
const deadline = undefined;
|
|
28
|
+
// This starts a local proving broker that does not get exposed as a service. This should be good enough for
|
|
29
|
+
// smallish epochs to be proven if we run on a large machine, but as epochs grow larger, we may want to switch
|
|
30
|
+
// this out for a live proving broker with multiple agents that we can connect to.
|
|
31
|
+
const broker = await createAndStartProvingBroker(config, telemetry);
|
|
32
|
+
const prover = await createProverClient(config, worldState, broker, telemetry);
|
|
33
|
+
const provingJob = new EpochProvingJob(jobData, worldState, prover.createEpochProver(), publicProcessorFactory, publisher, l2BlockSourceForReorgDetection, metrics, deadline, {
|
|
34
|
+
skipEpochCheck: true
|
|
35
|
+
});
|
|
36
|
+
log.info(`Rerunning epoch proving job for epoch ${jobData.epochNumber}`);
|
|
37
|
+
await provingJob.run();
|
|
38
|
+
log.info(`Completed job for epoch ${jobData.epochNumber} with status ${provingJob.getState()}`);
|
|
39
|
+
return provingJob.getState();
|
|
40
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { type Archiver } from '@aztec/archiver';
|
|
2
|
+
import type { Logger } from '@aztec/foundation/log';
|
|
3
|
+
import type { DataStoreConfig } from '@aztec/kv-store/config';
|
|
4
|
+
import type { ChainConfig } from '@aztec/stdlib/config';
|
|
5
|
+
import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server';
|
|
6
|
+
import { type EpochProvingJobData } from '../job/epoch-proving-job-data.js';
|
|
7
|
+
type UploadEpochProofConfig = Pick<ChainConfig, 'l1ChainId' | 'rollupVersion'> & Pick<DataStoreConfig, 'dataDirectory'>;
|
|
8
|
+
/**
|
|
9
|
+
* Uploads a snapshot of world state and archiver (requires pausing them) along with the proving job data,
|
|
10
|
+
* so we can download and re-run the job later under the same conditions.
|
|
11
|
+
* @param location The location to upload the data to (used to create the `FileStore`).
|
|
12
|
+
*/
|
|
13
|
+
export declare function uploadEpochProofFailure(location: string, jobId: string, jobData: EpochProvingJobData, archiver: Archiver, worldState: WorldStateSynchronizer, config: UploadEpochProofConfig, log: Logger): Promise<string>;
|
|
14
|
+
export {};
|
|
15
|
+
//# sourceMappingURL=upload-epoch-proof-failure.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-epoch-proof-failure.d.ts","sourceRoot":"","sources":["../../src/actions/upload-epoch-proof-failure.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuB,KAAK,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAGrE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AAEpD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAE9D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAExD,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAQ9E,OAAO,EAAE,KAAK,mBAAmB,EAAgC,MAAM,kCAAkC,CAAC;AAE1G,KAAK,sBAAsB,GAAG,IAAI,CAAC,WAAW,EAAE,WAAW,GAAG,eAAe,CAAC,GAAG,IAAI,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;AAKxH;;;;GAIG;AACH,wBAAsB,uBAAuB,CAC3C,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,mBAAmB,EAC5B,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,sBAAsB,EAClC,MAAM,EAAE,sBAAsB,EAC9B,GAAG,EAAE,MAAM,mBAuCZ"}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { ARCHIVER_DB_VERSION } from '@aztec/archiver';
|
|
2
|
+
import { tryRmDir } from '@aztec/foundation/fs';
|
|
3
|
+
import { jsonStringify } from '@aztec/foundation/json-rpc';
|
|
4
|
+
import { isoDate } from '@aztec/foundation/string';
|
|
5
|
+
import { buildSnapshotMetadata, createBackups } from '@aztec/node-lib/actions';
|
|
6
|
+
import { createFileStore } from '@aztec/stdlib/file-store';
|
|
7
|
+
import { getBasePath, uploadSnapshotData } from '@aztec/stdlib/snapshots';
|
|
8
|
+
import { WORLD_STATE_DB_VERSION } from '@aztec/world-state';
|
|
9
|
+
import { mkdtemp } from 'fs/promises';
|
|
10
|
+
import { tmpdir } from 'os';
|
|
11
|
+
import { dirname, join } from 'path';
|
|
12
|
+
import { serializeEpochProvingJobData } from '../job/epoch-proving-job-data.js';
|
|
13
|
+
/** Whether uploaded data to the file store should be of public access. */ const PUBLIC_UPLOADS = true;
|
|
14
|
+
/**
|
|
15
|
+
* Uploads a snapshot of world state and archiver (requires pausing them) along with the proving job data,
|
|
16
|
+
* so we can download and re-run the job later under the same conditions.
|
|
17
|
+
* @param location The location to upload the data to (used to create the `FileStore`).
|
|
18
|
+
*/ export async function uploadEpochProofFailure(location, jobId, jobData, archiver, worldState, config, log) {
|
|
19
|
+
const epochNumber = jobData.epochNumber;
|
|
20
|
+
log.warn(`Uploading epoch proof failure for ${epochNumber} to ${location}`, {
|
|
21
|
+
epochNumber,
|
|
22
|
+
jobId,
|
|
23
|
+
location
|
|
24
|
+
});
|
|
25
|
+
const backupDir = await mkdtemp(join(config.dataDirectory ?? tmpdir(), 'epoch-proof-data-'));
|
|
26
|
+
const store = await createFileStore(location);
|
|
27
|
+
if (!store) {
|
|
28
|
+
throw new Error(`Failed to create file store for epoch proof failure upload for location ${location}.`);
|
|
29
|
+
}
|
|
30
|
+
try {
|
|
31
|
+
const versions = {
|
|
32
|
+
archiver: ARCHIVER_DB_VERSION,
|
|
33
|
+
worldState: WORLD_STATE_DB_VERSION
|
|
34
|
+
};
|
|
35
|
+
const uploadMetadata = await buildSnapshotMetadata(archiver, config);
|
|
36
|
+
const paths = await createBackups(backupDir, archiver, worldState, log);
|
|
37
|
+
const basePath = `${getBasePath(uploadMetadata)}/${epochNumber}-${isoDate()}-${jobId}`;
|
|
38
|
+
const pathFor = (key)=>`${basePath}/${key}.db`;
|
|
39
|
+
const [metadata, dataUrl, metadataUrl] = await Promise.all([
|
|
40
|
+
uploadSnapshotData(paths, versions, uploadMetadata, store, {
|
|
41
|
+
pathFor,
|
|
42
|
+
private: !PUBLIC_UPLOADS
|
|
43
|
+
}),
|
|
44
|
+
uploadJobData(jobData, store, basePath),
|
|
45
|
+
uploadSnapshotMetadata(uploadMetadata, store, basePath)
|
|
46
|
+
]);
|
|
47
|
+
const baseUrl = dirname(metadataUrl);
|
|
48
|
+
log.warn(`Uploaded epoch ${epochNumber} proof failure data to ${baseUrl}`, {
|
|
49
|
+
epochNumber,
|
|
50
|
+
location,
|
|
51
|
+
basePath,
|
|
52
|
+
metadataUrl,
|
|
53
|
+
dataUrl,
|
|
54
|
+
metadata,
|
|
55
|
+
jobId
|
|
56
|
+
});
|
|
57
|
+
return baseUrl;
|
|
58
|
+
} finally{
|
|
59
|
+
log.info(`Cleaning up backup dir ${backupDir}`);
|
|
60
|
+
await tryRmDir(backupDir, log);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
async function uploadJobData(jobData, store, basePath) {
|
|
64
|
+
const data = serializeEpochProvingJobData(jobData);
|
|
65
|
+
const path = `${basePath}/data.bin`;
|
|
66
|
+
return await store.save(path, data, {
|
|
67
|
+
compress: true,
|
|
68
|
+
public: PUBLIC_UPLOADS
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
async function uploadSnapshotMetadata(metadata, store, basePath) {
|
|
72
|
+
const data = Buffer.from(jsonStringify(metadata), 'utf-8');
|
|
73
|
+
const path = `${basePath}/metadata.json`;
|
|
74
|
+
return await store.save(path, data, {
|
|
75
|
+
compress: false,
|
|
76
|
+
public: PUBLIC_UPLOADS
|
|
77
|
+
});
|
|
78
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"run-failed-epoch.d.ts","sourceRoot":"","sources":["../../src/bin/run-failed-epoch.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/* eslint-disable no-console */ import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { jsonParseWithSchemaSync, jsonStringify } from '@aztec/foundation/json-rpc';
|
|
3
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
4
|
+
import { downloadEpochProvingJob, getProverNodeConfigFromEnv, rerunEpochProvingJob } from '@aztec/prover-node';
|
|
5
|
+
import { UploadSnapshotMetadataSchema } from '@aztec/stdlib/snapshots';
|
|
6
|
+
import { existsSync, mkdirSync } from 'fs';
|
|
7
|
+
import { readFile, writeFile } from 'fs/promises';
|
|
8
|
+
import { basename, join } from 'path';
|
|
9
|
+
const logger = createLogger('prover-node:run-failed-epoch');
|
|
10
|
+
function printUsage() {
|
|
11
|
+
console.error('Usage: run-failed-epoch <proof-uri> [out-dir=./data]');
|
|
12
|
+
}
|
|
13
|
+
async function rerunFailedEpoch(provingJobUrl, baseLocalDir) {
|
|
14
|
+
const localDir = join(baseLocalDir, basename(provingJobUrl));
|
|
15
|
+
const jobPath = join(localDir, 'data.bin');
|
|
16
|
+
const dataDir = join(localDir, 'state');
|
|
17
|
+
const env = getProverNodeConfigFromEnv();
|
|
18
|
+
const config = {
|
|
19
|
+
...getProverNodeConfigFromEnv(),
|
|
20
|
+
dataDirectory: dataDir,
|
|
21
|
+
dataStoreMapSizeKB: env.dataStoreMapSizeKB ?? 1024 * 1024,
|
|
22
|
+
proverId: env.proverId ?? Fr.random()
|
|
23
|
+
};
|
|
24
|
+
let metadata;
|
|
25
|
+
const metadataPath = join(localDir, 'metadata.json');
|
|
26
|
+
if (existsSync(metadataPath)) {
|
|
27
|
+
logger.info(`Using downloaded data`);
|
|
28
|
+
metadata = jsonParseWithSchemaSync(await readFile(metadataPath, 'utf-8'), UploadSnapshotMetadataSchema);
|
|
29
|
+
} else {
|
|
30
|
+
logger.info(`Downloading epoch proving job data and state from ${provingJobUrl} to ${localDir}`);
|
|
31
|
+
metadata = await downloadEpochProvingJob(provingJobUrl, logger, {
|
|
32
|
+
jobDataDownloadPath: jobPath,
|
|
33
|
+
dataDirectory: dataDir
|
|
34
|
+
});
|
|
35
|
+
await writeFile(metadataPath, jsonStringify(metadata, true));
|
|
36
|
+
logger.info(`Download to ${localDir} complete`);
|
|
37
|
+
}
|
|
38
|
+
logger.info(`Rerunning proving job from ${jobPath} with state from ${dataDir}`, metadata);
|
|
39
|
+
const result = await rerunEpochProvingJob(jobPath, logger, {
|
|
40
|
+
...config,
|
|
41
|
+
l1Contracts: {
|
|
42
|
+
rollupAddress: metadata.rollupAddress
|
|
43
|
+
},
|
|
44
|
+
rollupVersion: metadata.rollupVersion
|
|
45
|
+
});
|
|
46
|
+
console.error(`Epoch proving job complete with result ${result}`);
|
|
47
|
+
}
|
|
48
|
+
async function main() {
|
|
49
|
+
if (process.argv[2] === '--help') {
|
|
50
|
+
printUsage();
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
const uri = process.argv[2];
|
|
54
|
+
const outDir = process.argv[3] || './data';
|
|
55
|
+
if (!uri) {
|
|
56
|
+
printUsage();
|
|
57
|
+
throw new Error('Missing URL to epoch proving job');
|
|
58
|
+
}
|
|
59
|
+
mkdirSync(outDir, {
|
|
60
|
+
recursive: true
|
|
61
|
+
});
|
|
62
|
+
await rerunFailedEpoch(uri, outDir);
|
|
63
|
+
}
|
|
64
|
+
main().catch((err)=>{
|
|
65
|
+
console.error(err);
|
|
66
|
+
process.exit(1);
|
|
67
|
+
});
|
package/dest/config.d.ts
CHANGED
|
@@ -11,10 +11,11 @@ import { type PublisherConfig, type TxSenderConfig } from '@aztec/sequencer-clie
|
|
|
11
11
|
import { type WorldStateConfig } from '@aztec/world-state/config';
|
|
12
12
|
import { type ProverCoordinationConfig } from './prover-coordination/config.js';
|
|
13
13
|
export type ProverNodeConfig = ArchiverConfig & ProverClientUserConfig & P2PConfig & WorldStateConfig & PublisherConfig & TxSenderConfig & DataStoreConfig & ProverCoordinationConfig & SharedNodeConfig & SpecificProverNodeConfig & GenesisStateConfig;
|
|
14
|
-
type SpecificProverNodeConfig = {
|
|
14
|
+
export type SpecificProverNodeConfig = {
|
|
15
15
|
proverNodeMaxPendingJobs: number;
|
|
16
16
|
proverNodePollingIntervalMs: number;
|
|
17
17
|
proverNodeMaxParallelBlocksPerEpoch: number;
|
|
18
|
+
proverNodeFailedEpochStore: string | undefined;
|
|
18
19
|
txGatheringIntervalMs: number;
|
|
19
20
|
txGatheringBatchSize: number;
|
|
20
21
|
txGatheringMaxParallelRequestsPerNode: number;
|
|
@@ -24,5 +25,4 @@ export declare function getProverNodeConfigFromEnv(): ProverNodeConfig;
|
|
|
24
25
|
export declare function getProverNodeBrokerConfigFromEnv(): ProverBrokerConfig;
|
|
25
26
|
export declare function getProverNodeAgentConfigFromEnv(): ProverAgentConfig & BBConfig & ACVMConfig;
|
|
26
27
|
export declare function resolveConfig(userConfig: ProverNodeConfig): ProverNodeConfig & ProverClientConfig;
|
|
27
|
-
export {};
|
|
28
28
|
//# sourceMappingURL=config.d.ts.map
|
package/dest/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAA0B,MAAM,wBAAwB,CAAC;AACrF,OAAO,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,yBAAyB,CAAC;AACpE,OAAO,EAAE,KAAK,kBAAkB,EAAwD,MAAM,iBAAiB,CAAC;AAChH,OAAO,EAAE,KAAK,kBAAkB,EAA6C,MAAM,0BAA0B,CAAC;AAE9G,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,wBAAwB,CAAC;AAClF,OAAO,EAAE,KAAK,gBAAgB,EAA4B,MAAM,wBAAwB,CAAC;AACzF,OAAO,EAAE,KAAK,SAAS,EAAqB,MAAM,mBAAmB,CAAC;AACtE,OAAO,EACL,KAAK,iBAAiB,EACtB,KAAK,kBAAkB,EAGxB,MAAM,6BAA6B,CAAC;AACrC,OAAO,EACL,KAAK,kBAAkB,EACvB,KAAK,sBAAsB,EAG5B,MAAM,6BAA6B,CAAC;AACrC,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,cAAc,EAGpB,MAAM,gCAAgC,CAAC;AACxC,OAAO,EAAE,KAAK,gBAAgB,EAA4B,MAAM,2BAA2B,CAAC;AAE5F,OAAO,EAAE,KAAK,wBAAwB,EAAoC,MAAM,iCAAiC,CAAC;AAElH,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAC3C,sBAAsB,GACtB,SAAS,GACT,gBAAgB,GAChB,eAAe,GACf,cAAc,GACd,eAAe,GACf,wBAAwB,GACxB,gBAAgB,GAChB,wBAAwB,GACxB,kBAAkB,CAAC;AAErB,
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAA0B,MAAM,wBAAwB,CAAC;AACrF,OAAO,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,yBAAyB,CAAC;AACpE,OAAO,EAAE,KAAK,kBAAkB,EAAwD,MAAM,iBAAiB,CAAC;AAChH,OAAO,EAAE,KAAK,kBAAkB,EAA6C,MAAM,0BAA0B,CAAC;AAE9G,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,wBAAwB,CAAC;AAClF,OAAO,EAAE,KAAK,gBAAgB,EAA4B,MAAM,wBAAwB,CAAC;AACzF,OAAO,EAAE,KAAK,SAAS,EAAqB,MAAM,mBAAmB,CAAC;AACtE,OAAO,EACL,KAAK,iBAAiB,EACtB,KAAK,kBAAkB,EAGxB,MAAM,6BAA6B,CAAC;AACrC,OAAO,EACL,KAAK,kBAAkB,EACvB,KAAK,sBAAsB,EAG5B,MAAM,6BAA6B,CAAC;AACrC,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,cAAc,EAGpB,MAAM,gCAAgC,CAAC;AACxC,OAAO,EAAE,KAAK,gBAAgB,EAA4B,MAAM,2BAA2B,CAAC;AAE5F,OAAO,EAAE,KAAK,wBAAwB,EAAoC,MAAM,iCAAiC,CAAC;AAElH,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAC3C,sBAAsB,GACtB,SAAS,GACT,gBAAgB,GAChB,eAAe,GACf,cAAc,GACd,eAAe,GACf,wBAAwB,GACxB,gBAAgB,GAChB,wBAAwB,GACxB,kBAAkB,CAAC;AAErB,MAAM,MAAM,wBAAwB,GAAG;IACrC,wBAAwB,EAAE,MAAM,CAAC;IACjC,2BAA2B,EAAE,MAAM,CAAC;IACpC,mCAAmC,EAAE,MAAM,CAAC;IAC5C,0BAA0B,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/C,qBAAqB,EAAE,MAAM,CAAC;IAC9B,oBAAoB,EAAE,MAAM,CAAC;IAC7B,qCAAqC,EAAE,MAAM,CAAC;CAC/C,CAAC;AAwCF,eAAO,MAAM,wBAAwB,EAAE,kBAAkB,CAAC,gBAAgB,CAYzE,CAAC;AAEF,wBAAgB,0BAA0B,IAAI,gBAAgB,CAE7D;AAED,wBAAgB,gCAAgC,IAAI,kBAAkB,CAIrE;AAED,wBAAgB,+BAA+B,IAAI,iBAAiB,GAAG,QAAQ,GAAG,UAAU,CAK3F;AAED,wBAAgB,aAAa,CAAC,UAAU,EAAE,gBAAgB,GAAG,gBAAgB,GAAG,kBAAkB,CAMjG"}
|
package/dest/config.js
CHANGED
|
@@ -26,6 +26,11 @@ const specificProverNodeConfigMappings = {
|
|
|
26
26
|
description: 'The Maximum number of blocks to process in parallel while proving an epoch',
|
|
27
27
|
...numberConfigHelper(32)
|
|
28
28
|
},
|
|
29
|
+
proverNodeFailedEpochStore: {
|
|
30
|
+
env: 'PROVER_NODE_FAILED_EPOCH_STORE',
|
|
31
|
+
description: 'File store where to upload node state when an epoch fails to be proven',
|
|
32
|
+
defaultValue: undefined
|
|
33
|
+
},
|
|
29
34
|
txGatheringIntervalMs: {
|
|
30
35
|
env: 'PROVER_NODE_TX_GATHERING_INTERVAL_MS',
|
|
31
36
|
description: 'How often to check that tx data is available',
|
package/dest/factory.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../src/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,QAAQ,EAAkB,MAAM,iBAAiB,CAAC;AAChE,OAAO,EAAE,KAAK,uBAAuB,EAAwB,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,SAAS,EAA+D,MAAM,iBAAiB,CAAC;
|
|
1
|
+
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../src/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,QAAQ,EAAkB,MAAM,iBAAiB,CAAC;AAChE,OAAO,EAAE,KAAK,uBAAuB,EAAwB,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,SAAS,EAA+D,MAAM,iBAAiB,CAAC;AAEzG,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAClE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAI9D,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AACxE,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAC9D,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,yBAAyB,CAAC;AAGnF,OAAO,EAAE,KAAK,gBAAgB,EAAiB,MAAM,aAAa,CAAC;AAEnE,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,uDAAuD,CAAC;AAEtF,OAAO,EAAE,mBAAmB,EAAE,MAAM,4BAA4B,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAE9C,gDAAgD;AAChD,wBAAsB,gBAAgB,CACpC,UAAU,EAAE,gBAAgB,GAAG,eAAe,EAC9C,IAAI,GAAE;IACJ,SAAS,CAAC,EAAE,eAAe,CAAC;IAC5B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,mBAAmB,CAAC,EAAE,QAAQ,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,SAAS,CAAC,EAAE,mBAAmB,CAAC;IAChC,cAAc,CAAC,EAAE,uBAAuB,CAAC;IACzC,MAAM,CAAC,EAAE,gBAAgB,CAAC;IAC1B,SAAS,CAAC,EAAE,SAAS,CAAC;CAClB,EACN,OAAO,GAAE;IACP,mBAAmB,CAAC,EAAE,kBAAkB,EAAE,CAAC;CACvC,uBAgFP"}
|
package/dest/factory.js
CHANGED
|
@@ -2,6 +2,7 @@ import { createArchiver } from '@aztec/archiver';
|
|
|
2
2
|
import { createBlobSinkClient } from '@aztec/blob-sink/client';
|
|
3
3
|
import { EpochCache } from '@aztec/epoch-cache';
|
|
4
4
|
import { L1TxUtils, RollupContract, createEthereumChain, createExtendedL1Client } from '@aztec/ethereum';
|
|
5
|
+
import { pick } from '@aztec/foundation/collection';
|
|
5
6
|
import { createLogger } from '@aztec/foundation/log';
|
|
6
7
|
import { trySnapshotSync } from '@aztec/node-lib/actions';
|
|
7
8
|
import { createProverClient } from '@aztec/prover-client';
|
|
@@ -54,11 +55,10 @@ import { ProverNode } from './prover-node.js';
|
|
|
54
55
|
telemetry
|
|
55
56
|
});
|
|
56
57
|
const proverNodeConfig = {
|
|
57
|
-
|
|
58
|
-
pollingIntervalMs: config.proverNodePollingIntervalMs,
|
|
59
|
-
maxParallelBlocksPerEpoch: config.proverNodeMaxParallelBlocksPerEpoch,
|
|
60
|
-
txGatheringIntervalMs: config.txGatheringIntervalMs
|
|
58
|
+
...pick(config, 'proverNodeMaxPendingJobs', 'proverNodeMaxParallelBlocksPerEpoch', 'proverNodePollingIntervalMs', 'txGatheringMaxParallelRequests', 'txGatheringIntervalMs', 'txGatheringTimeoutMs', 'proverNodeFailedEpochStore', 'dataDirectory', 'l1ChainId', 'rollupVersion')
|
|
61
59
|
};
|
|
62
|
-
const epochMonitor = await EpochMonitor.create(archiver,
|
|
60
|
+
const epochMonitor = await EpochMonitor.create(archiver, {
|
|
61
|
+
pollingIntervalMs: config.proverNodePollingIntervalMs
|
|
62
|
+
}, telemetry);
|
|
63
63
|
return new ProverNode(prover, publisher, archiver, archiver, archiver, worldStateSynchronizer, proverCoordination, epochMonitor, proverNodeConfig, telemetry);
|
|
64
64
|
}
|
package/dest/index.d.ts
CHANGED
package/dest/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,cAAc,CAAC;AAC7B,cAAc,WAAW,CAAC;AAC1B,cAAc,4BAA4B,CAAC;AAC3C,cAAc,kBAAkB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,oBAAoB,CAAC;AACnC,cAAc,aAAa,CAAC;AAC5B,cAAc,cAAc,CAAC;AAC7B,cAAc,WAAW,CAAC;AAC1B,cAAc,4BAA4B,CAAC;AAC3C,cAAc,kBAAkB,CAAC"}
|
package/dest/index.js
CHANGED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { L2Block } from '@aztec/stdlib/block';
|
|
3
|
+
import { BlockHeader, Tx } from '@aztec/stdlib/tx';
|
|
4
|
+
/** All data from an epoch used in proving. */
|
|
5
|
+
export type EpochProvingJobData = {
|
|
6
|
+
epochNumber: bigint;
|
|
7
|
+
blocks: L2Block[];
|
|
8
|
+
txs: Tx[];
|
|
9
|
+
l1ToL2Messages: Record<number, Fr[]>;
|
|
10
|
+
previousBlockHeader: BlockHeader;
|
|
11
|
+
};
|
|
12
|
+
export declare function validateEpochProvingJobData(data: EpochProvingJobData): void;
|
|
13
|
+
export declare function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer;
|
|
14
|
+
export declare function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData;
|
|
15
|
+
//# sourceMappingURL=epoch-proving-job-data.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"epoch-proving-job-data.d.ts","sourceRoot":"","sources":["../../src/job/epoch-proving-job-data.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAE9C,OAAO,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,kBAAkB,CAAC;AAEnD,8CAA8C;AAC9C,MAAM,MAAM,mBAAmB,GAAG;IAChC,WAAW,EAAE,MAAM,CAAC;IACpB,MAAM,EAAE,OAAO,EAAE,CAAC;IAClB,GAAG,EAAE,EAAE,EAAE,CAAC;IACV,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,EAAE,CAAC,CAAC;IACrC,mBAAmB,EAAE,WAAW,CAAC;CAClC,CAAC;AAEF,wBAAgB,2BAA2B,CAAC,IAAI,EAAE,mBAAmB,QAcpE;AAED,wBAAgB,4BAA4B,CAAC,IAAI,EAAE,mBAAmB,GAAG,MAAM,CAmB9E;AAED,wBAAgB,8BAA8B,CAAC,GAAG,EAAE,MAAM,GAAG,mBAAmB,CAgB/E"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
|
+
import { L2Block } from '@aztec/stdlib/block';
|
|
4
|
+
import { BlockHeader, Tx } from '@aztec/stdlib/tx';
|
|
5
|
+
export function validateEpochProvingJobData(data) {
|
|
6
|
+
if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
|
|
7
|
+
throw new Error(`Initial block number ${data.blocks[0].number} does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`);
|
|
8
|
+
}
|
|
9
|
+
for (const blockNumber of data.blocks.map((block)=>block.number)){
|
|
10
|
+
if (!(blockNumber in data.l1ToL2Messages)) {
|
|
11
|
+
throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
export function serializeEpochProvingJobData(data) {
|
|
16
|
+
const blocks = data.blocks.map((block)=>block.toBuffer());
|
|
17
|
+
const txs = data.txs.map((tx)=>tx.toBuffer());
|
|
18
|
+
const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages])=>[
|
|
19
|
+
Number(blockNumber),
|
|
20
|
+
messages.length,
|
|
21
|
+
...messages
|
|
22
|
+
]);
|
|
23
|
+
return serializeToBuffer(Number(data.epochNumber), data.previousBlockHeader, blocks.length, ...blocks, txs.length, ...txs, l1ToL2Messages.length, ...l1ToL2Messages);
|
|
24
|
+
}
|
|
25
|
+
export function deserializeEpochProvingJobData(buf) {
|
|
26
|
+
const reader = BufferReader.asReader(buf);
|
|
27
|
+
const epochNumber = BigInt(reader.readNumber());
|
|
28
|
+
const previousBlockHeader = reader.readObject(BlockHeader);
|
|
29
|
+
const blocks = reader.readVector(L2Block);
|
|
30
|
+
const txs = reader.readVector(Tx);
|
|
31
|
+
const l1ToL2MessageBlockCount = reader.readNumber();
|
|
32
|
+
const l1ToL2Messages = {};
|
|
33
|
+
for(let i = 0; i < l1ToL2MessageBlockCount; i++){
|
|
34
|
+
const blockNumber = reader.readNumber();
|
|
35
|
+
const messages = reader.readVector(Fr);
|
|
36
|
+
l1ToL2Messages[blockNumber] = messages;
|
|
37
|
+
}
|
|
38
|
+
return {
|
|
39
|
+
epochNumber,
|
|
40
|
+
previousBlockHeader,
|
|
41
|
+
blocks,
|
|
42
|
+
txs,
|
|
43
|
+
l1ToL2Messages
|
|
44
|
+
};
|
|
45
|
+
}
|
|
@@ -1,26 +1,22 @@
|
|
|
1
1
|
import type { PublicProcessorFactory } from '@aztec/simulator/server';
|
|
2
|
-
import type {
|
|
2
|
+
import type { L2BlockSource } from '@aztec/stdlib/block';
|
|
3
3
|
import { type EpochProver, type EpochProvingJobState, EpochProvingJobTerminalState, type ForkMerkleTreeOperations } from '@aztec/stdlib/interfaces/server';
|
|
4
|
-
import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging';
|
|
5
|
-
import type { Tx } from '@aztec/stdlib/tx';
|
|
6
4
|
import { type Traceable, type Tracer } from '@aztec/telemetry-client';
|
|
7
5
|
import type { ProverNodeJobMetrics } from '../metrics.js';
|
|
8
6
|
import type { ProverNodePublisher } from '../prover-node-publisher.js';
|
|
7
|
+
import { type EpochProvingJobData } from './epoch-proving-job-data.js';
|
|
9
8
|
/**
|
|
10
9
|
* Job that grabs a range of blocks from the unfinalised chain from L1, gets their txs given their hashes,
|
|
11
10
|
* re-executes their public calls, generates a rollup proof, and submits it to L1. This job will update the
|
|
12
11
|
* world state as part of public call execution via the public processor.
|
|
13
12
|
*/
|
|
14
13
|
export declare class EpochProvingJob implements Traceable {
|
|
14
|
+
private data;
|
|
15
15
|
private dbProvider;
|
|
16
|
-
private epochNumber;
|
|
17
|
-
private blocks;
|
|
18
|
-
private txs;
|
|
19
16
|
private prover;
|
|
20
17
|
private publicProcessorFactory;
|
|
21
18
|
private publisher;
|
|
22
19
|
private l2BlockSource;
|
|
23
|
-
private l1ToL2MessageSource;
|
|
24
20
|
private metrics;
|
|
25
21
|
private deadline;
|
|
26
22
|
private config;
|
|
@@ -31,13 +27,18 @@ export declare class EpochProvingJob implements Traceable {
|
|
|
31
27
|
private epochCheckPromise;
|
|
32
28
|
private deadlineTimeoutHandler;
|
|
33
29
|
readonly tracer: Tracer;
|
|
34
|
-
constructor(
|
|
35
|
-
parallelBlockLimit
|
|
30
|
+
constructor(data: EpochProvingJobData, dbProvider: Pick<ForkMerkleTreeOperations, 'fork'>, prover: EpochProver, publicProcessorFactory: PublicProcessorFactory, publisher: Pick<ProverNodePublisher, 'submitEpochProof'>, l2BlockSource: L2BlockSource | undefined, metrics: ProverNodeJobMetrics, deadline: Date | undefined, config: {
|
|
31
|
+
parallelBlockLimit?: number;
|
|
32
|
+
skipEpochCheck?: boolean;
|
|
36
33
|
});
|
|
37
34
|
getId(): string;
|
|
38
35
|
getState(): EpochProvingJobState;
|
|
39
36
|
getEpochNumber(): bigint;
|
|
40
37
|
getDeadline(): Date | undefined;
|
|
38
|
+
getProvingData(): EpochProvingJobData;
|
|
39
|
+
private get epochNumber();
|
|
40
|
+
private get blocks();
|
|
41
|
+
private get txs();
|
|
41
42
|
/**
|
|
42
43
|
* Proves the given epoch and submits the proof to L1.
|
|
43
44
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"epoch-proving-job.d.ts","sourceRoot":"","sources":["../../src/job/epoch-proving-job.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAmB,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACvF,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"epoch-proving-job.d.ts","sourceRoot":"","sources":["../../src/job/epoch-proving-job.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAmB,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACvF,OAAO,KAAK,EAAW,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAClE,OAAO,EACL,KAAK,WAAW,EAChB,KAAK,oBAAoB,EACzB,4BAA4B,EAC5B,KAAK,wBAAwB,EAC9B,MAAM,iCAAiC,CAAC;AAEzC,OAAO,EAAc,KAAK,SAAS,EAAE,KAAK,MAAM,EAAa,MAAM,yBAAyB,CAAC;AAI7F,OAAO,KAAK,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AAC1D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,6BAA6B,CAAC;AACvE,OAAO,EAAE,KAAK,mBAAmB,EAA+B,MAAM,6BAA6B,CAAC;AAEpG;;;;GAIG;AACH,qBAAa,eAAgB,YAAW,SAAS;IAY7C,OAAO,CAAC,IAAI;IACZ,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,sBAAsB;IAC9B,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,OAAO;IACf,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,MAAM;IAnBhB,OAAO,CAAC,KAAK,CAAuC;IACpD,OAAO,CAAC,GAAG,CAAiD;IAC5D,OAAO,CAAC,IAAI,CAAS;IAErB,OAAO,CAAC,UAAU,CAA4B;IAC9C,OAAO,CAAC,iBAAiB,CAA6B;IACtD,OAAO,CAAC,sBAAsB,CAA6B;IAE3D,SAAgB,MAAM,EAAE,MAAM,CAAC;gBAGrB,IAAI,EAAE,mBAAmB,EACzB,UAAU,EAAE,IAAI,CAAC,wBAAwB,EAAE,MAAM,CAAC,EAClD,MAAM,EAAE,WAAW,EACnB,sBAAsB,EAAE,sBAAsB,EAC9C,SAAS,EAAE,IAAI,CAAC,mBAAmB,EAAE,kBAAkB,CAAC,EACxD,aAAa,EAAE,aAAa,GAAG,SAAS,EACxC,OAAO,EAAE,oBAAoB,EAC7B,QAAQ,EAAE,IAAI,GAAG,SAAS,EAC1B,MAAM,EAAE;QAAE,kBAAkB,CAAC,EAAE,MAAM,CAAC;QAAC,cAAc,CAAC,EAAE,OAAO,CAAA;KAAE;IAOpE,KAAK,IAAI,MAAM;IAIf,QAAQ,IAAI,oBAAoB;IAIhC,cAAc,IAAI,MAAM;IAIxB,WAAW,IAAI,IAAI,GAAG,SAAS;IAI/B,cAAc,IAAI,mBAAmB;IAI5C,OAAO,KAAK,WAAW,GAEtB;IAED,OAAO,KAAK,MAAM,GAEjB;IAED,OAAO,KAAK,GAAG,GAEd;IAED;;OAEG;IAIU,GAAG;IAuGhB,OAAO,CAAC,aAAa;IAKrB,OAAO,CAAC,UAAU;IAML,IAAI,CAAC,KAAK,GAAE,4BAAwC;IASjE,OAAO,CAAC,oBAAoB;IAoB5B;;;OAGG;YACW,kBAAkB;IAiChC,OAAO,CAAC,cAAc;YAiBR,MAAM;IAQpB,OAAO,CAAC,iBAAiB;YAIX,UAAU;CAmBzB;AASD,OAAO,EAAE,KAAK,oBAAoB,EAAE,CAAC"}
|