@aztec/prover-client 0.56.0 → 0.58.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +4 -5
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +11 -2
- package/dest/index.d.ts +1 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/mocks/fixtures.d.ts +6 -6
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +8 -4
- package/dest/mocks/test_context.d.ts +7 -7
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +15 -18
- package/dest/orchestrator/block-building-helpers.d.ts +12 -12
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +4 -2
- package/dest/orchestrator/block-proving-state.d.ts +7 -7
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +11 -33
- package/dest/orchestrator/epoch-proving-state.d.ts +8 -2
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +8 -16
- package/dest/orchestrator/orchestrator.d.ts +15 -19
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +161 -106
- package/dest/prover-agent/memory-proving-queue.d.ts +5 -8
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +3 -3
- package/dest/prover-agent/prover-agent.js +4 -4
- package/dest/prover-agent/rpc.js +4 -4
- package/dest/test/mock_prover.d.ts +10 -16
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +18 -24
- package/dest/tx-prover/tx-prover.d.ts +3 -4
- package/dest/tx-prover/tx-prover.d.ts.map +1 -1
- package/dest/tx-prover/tx-prover.js +2 -2
- package/package.json +14 -10
- package/src/config.ts +20 -11
- package/src/index.ts +1 -1
- package/src/mocks/fixtures.ts +21 -7
- package/src/mocks/test_context.ts +26 -22
- package/src/orchestrator/block-building-helpers.ts +21 -12
- package/src/orchestrator/block-proving-state.ts +11 -35
- package/src/orchestrator/epoch-proving-state.ts +9 -20
- package/src/orchestrator/orchestrator.ts +205 -140
- package/src/prover-agent/memory-proving-queue.ts +8 -7
- package/src/prover-agent/prover-agent.ts +4 -4
- package/src/prover-agent/rpc.ts +3 -3
- package/src/test/mock_prover.ts +26 -28
- package/src/tx-prover/tx-prover.ts +5 -5
package/src/mocks/fixtures.ts
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
MerkleTreeId,
|
|
3
|
+
type MerkleTreeReadOperations,
|
|
4
|
+
type MerkleTreeWriteOperations,
|
|
3
5
|
type ProcessedTx,
|
|
4
6
|
makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots,
|
|
5
7
|
} from '@aztec/circuit-types';
|
|
@@ -21,8 +23,8 @@ import { randomBytes } from '@aztec/foundation/crypto';
|
|
|
21
23
|
import { type DebugLogger } from '@aztec/foundation/log';
|
|
22
24
|
import { fileURLToPath } from '@aztec/foundation/url';
|
|
23
25
|
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types';
|
|
26
|
+
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
24
27
|
import { NativeACVMSimulator, type SimulationProvider, WASMSimulator } from '@aztec/simulator';
|
|
25
|
-
import { type MerkleTreeOperations } from '@aztec/world-state';
|
|
26
28
|
|
|
27
29
|
import * as fs from 'fs/promises';
|
|
28
30
|
import path from 'path';
|
|
@@ -32,6 +34,7 @@ const {
|
|
|
32
34
|
TEMP_DIR = '/tmp',
|
|
33
35
|
BB_BINARY_PATH = '',
|
|
34
36
|
BB_WORKING_DIRECTORY = '',
|
|
37
|
+
BB_SKIP_CLEANUP = '',
|
|
35
38
|
NOIR_RELEASE_DIR = 'noir-repo/target/release',
|
|
36
39
|
ACVM_BINARY_PATH = '',
|
|
37
40
|
ACVM_WORKING_DIRECTORY = '',
|
|
@@ -56,12 +59,17 @@ export const getEnvironmentConfig = async (logger: DebugLogger) => {
|
|
|
56
59
|
const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`;
|
|
57
60
|
await fs.mkdir(acvmWorkingDirectory, { recursive: true });
|
|
58
61
|
logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`);
|
|
62
|
+
|
|
63
|
+
const bbSkipCleanup = ['1', 'true'].includes(BB_SKIP_CLEANUP);
|
|
64
|
+
bbSkipCleanup && logger.verbose(`Not going to clean up BB working directory ${bbWorkingDirectory} after run`);
|
|
65
|
+
|
|
59
66
|
return {
|
|
60
67
|
acvmWorkingDirectory,
|
|
61
68
|
bbWorkingDirectory,
|
|
62
69
|
expectedAcvmPath,
|
|
63
70
|
expectedBBPath,
|
|
64
71
|
directoryToCleanup: ACVM_WORKING_DIRECTORY && BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory,
|
|
72
|
+
bbSkipCleanup,
|
|
65
73
|
};
|
|
66
74
|
} catch (err) {
|
|
67
75
|
logger.verbose(`Native BB not available, error: ${err}`);
|
|
@@ -89,16 +97,22 @@ export async function getSimulationProvider(
|
|
|
89
97
|
return new WASMSimulator();
|
|
90
98
|
}
|
|
91
99
|
|
|
92
|
-
export const makeBloatedProcessedTx = (builderDb:
|
|
93
|
-
makeBloatedProcessedTxWithVKRoot(builderDb, getVKTreeRoot(), seed);
|
|
100
|
+
export const makeBloatedProcessedTx = (builderDb: MerkleTreeReadOperations, seed = 0x1) =>
|
|
101
|
+
makeBloatedProcessedTxWithVKRoot(builderDb, getVKTreeRoot(), protocolContractTreeRoot, seed);
|
|
94
102
|
|
|
95
|
-
export const makeEmptyProcessedTx = (builderDb:
|
|
103
|
+
export const makeEmptyProcessedTx = (builderDb: MerkleTreeReadOperations, chainId: Fr, version: Fr) => {
|
|
96
104
|
const header = builderDb.getInitialHeader();
|
|
97
|
-
return makeEmptyProcessedTxFromHistoricalTreeRoots(
|
|
105
|
+
return makeEmptyProcessedTxFromHistoricalTreeRoots(
|
|
106
|
+
header,
|
|
107
|
+
chainId,
|
|
108
|
+
version,
|
|
109
|
+
getVKTreeRoot(),
|
|
110
|
+
protocolContractTreeRoot,
|
|
111
|
+
);
|
|
98
112
|
};
|
|
99
113
|
|
|
100
114
|
// Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs
|
|
101
|
-
export const updateExpectedTreesFromTxs = async (db:
|
|
115
|
+
export const updateExpectedTreesFromTxs = async (db: MerkleTreeWriteOperations, txs: ProcessedTx[]) => {
|
|
102
116
|
await db.appendLeaves(
|
|
103
117
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
104
118
|
txs.flatMap(tx =>
|
|
@@ -144,5 +158,5 @@ export const makeGlobals = (blockNumber: number) => {
|
|
|
144
158
|
);
|
|
145
159
|
};
|
|
146
160
|
|
|
147
|
-
export const makeEmptyProcessedTestTx = (builderDb:
|
|
161
|
+
export const makeEmptyProcessedTestTx = (builderDb: MerkleTreeReadOperations): ProcessedTx =>
|
|
148
162
|
makeEmptyProcessedTx(builderDb, Fr.ZERO, Fr.ZERO);
|
|
@@ -1,14 +1,21 @@
|
|
|
1
1
|
import { type BBProverConfig } from '@aztec/bb-prover';
|
|
2
2
|
import {
|
|
3
|
-
type
|
|
4
|
-
type MerkleTreeAdminOperations,
|
|
3
|
+
type MerkleTreeWriteOperations,
|
|
5
4
|
type ProcessedTx,
|
|
5
|
+
type ProcessedTxHandler,
|
|
6
6
|
type PublicExecutionRequest,
|
|
7
7
|
type ServerCircuitProver,
|
|
8
8
|
type Tx,
|
|
9
9
|
type TxValidator,
|
|
10
10
|
} from '@aztec/circuit-types';
|
|
11
|
-
import {
|
|
11
|
+
import {
|
|
12
|
+
type CombinedConstantData,
|
|
13
|
+
type Gas,
|
|
14
|
+
type GlobalVariables,
|
|
15
|
+
Header,
|
|
16
|
+
type Nullifier,
|
|
17
|
+
type TxContext,
|
|
18
|
+
} from '@aztec/circuits.js';
|
|
12
19
|
import { type Fr } from '@aztec/foundation/fields';
|
|
13
20
|
import { type DebugLogger } from '@aztec/foundation/log';
|
|
14
21
|
import { openTmpStore } from '@aztec/kv-store/utils';
|
|
@@ -28,8 +35,6 @@ import { NativeWorldStateService } from '@aztec/world-state/native';
|
|
|
28
35
|
|
|
29
36
|
import * as fs from 'fs/promises';
|
|
30
37
|
import { type MockProxy, mock } from 'jest-mock-extended';
|
|
31
|
-
import { tmpdir } from 'os';
|
|
32
|
-
import { join } from 'path';
|
|
33
38
|
|
|
34
39
|
import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js';
|
|
35
40
|
import { ProvingOrchestrator } from '../orchestrator/index.js';
|
|
@@ -44,7 +49,7 @@ export class TestContext {
|
|
|
44
49
|
public publicProcessor: PublicProcessor,
|
|
45
50
|
public simulationProvider: SimulationProvider,
|
|
46
51
|
public globalVariables: GlobalVariables,
|
|
47
|
-
public actualDb:
|
|
52
|
+
public actualDb: MerkleTreeWriteOperations,
|
|
48
53
|
public prover: ServerCircuitProver,
|
|
49
54
|
public proverAgent: ProverAgent,
|
|
50
55
|
public orchestrator: ProvingOrchestrator,
|
|
@@ -53,13 +58,13 @@ export class TestContext {
|
|
|
53
58
|
public logger: DebugLogger,
|
|
54
59
|
) {}
|
|
55
60
|
|
|
56
|
-
public get
|
|
61
|
+
public get epochProver() {
|
|
57
62
|
return this.orchestrator;
|
|
58
63
|
}
|
|
59
64
|
|
|
60
65
|
static async new(
|
|
61
66
|
logger: DebugLogger,
|
|
62
|
-
worldState: 'native' | 'legacy' = '
|
|
67
|
+
worldState: 'native' | 'legacy' = 'native',
|
|
63
68
|
proverCount = 4,
|
|
64
69
|
createProver: (bbConfig: BBProverConfig) => Promise<ServerCircuitProver> = _ =>
|
|
65
70
|
Promise.resolve(new TestCircuitProver(new NoopTelemetryClient(), new WASMSimulator())),
|
|
@@ -73,23 +78,21 @@ export class TestContext {
|
|
|
73
78
|
const publicKernel = new RealPublicKernelCircuitSimulator(new WASMSimulator());
|
|
74
79
|
const telemetry = new NoopTelemetryClient();
|
|
75
80
|
|
|
76
|
-
let actualDb:
|
|
81
|
+
let actualDb: MerkleTreeWriteOperations;
|
|
77
82
|
|
|
78
83
|
if (worldState === 'native') {
|
|
79
|
-
const
|
|
80
|
-
|
|
81
|
-
const ws = await NativeWorldStateService.create(dir);
|
|
82
|
-
actualDb = ws.asLatest();
|
|
84
|
+
const ws = await NativeWorldStateService.tmp();
|
|
85
|
+
actualDb = await ws.fork();
|
|
83
86
|
} else {
|
|
84
87
|
const ws = await MerkleTrees.new(openTmpStore(), telemetry);
|
|
85
|
-
actualDb = ws.
|
|
88
|
+
actualDb = await ws.getLatest();
|
|
86
89
|
}
|
|
87
90
|
|
|
88
91
|
const processor = PublicProcessor.create(
|
|
89
92
|
actualDb,
|
|
90
93
|
publicExecutor,
|
|
91
94
|
publicKernel,
|
|
92
|
-
|
|
95
|
+
globalVariables,
|
|
93
96
|
Header.empty(),
|
|
94
97
|
worldStateDB,
|
|
95
98
|
telemetry,
|
|
@@ -109,11 +112,12 @@ export class TestContext {
|
|
|
109
112
|
acvmWorkingDirectory: config.acvmWorkingDirectory,
|
|
110
113
|
bbBinaryPath: config.expectedBBPath,
|
|
111
114
|
bbWorkingDirectory: config.bbWorkingDirectory,
|
|
115
|
+
bbSkipCleanup: config.bbSkipCleanup,
|
|
112
116
|
};
|
|
113
117
|
localProver = await createProver(bbConfig);
|
|
114
118
|
}
|
|
115
119
|
|
|
116
|
-
if (config?.directoryToCleanup) {
|
|
120
|
+
if (config?.directoryToCleanup && !config.bbSkipCleanup) {
|
|
117
121
|
directoriesToCleanup.push(config.directoryToCleanup);
|
|
118
122
|
}
|
|
119
123
|
|
|
@@ -150,12 +154,12 @@ export class TestContext {
|
|
|
150
154
|
public async processPublicFunctions(
|
|
151
155
|
txs: Tx[],
|
|
152
156
|
maxTransactions: number,
|
|
153
|
-
|
|
157
|
+
txHandler?: ProcessedTxHandler,
|
|
154
158
|
txValidator?: TxValidator<ProcessedTx>,
|
|
155
159
|
) {
|
|
156
160
|
const defaultExecutorImplementation = (
|
|
157
161
|
execution: PublicExecutionRequest,
|
|
158
|
-
|
|
162
|
+
_constants: CombinedConstantData,
|
|
159
163
|
availableGas: Gas,
|
|
160
164
|
_txContext: TxContext,
|
|
161
165
|
_pendingNullifiers: Nullifier[],
|
|
@@ -182,7 +186,7 @@ export class TestContext {
|
|
|
182
186
|
return await this.processPublicFunctionsWithMockExecutorImplementation(
|
|
183
187
|
txs,
|
|
184
188
|
maxTransactions,
|
|
185
|
-
|
|
189
|
+
txHandler,
|
|
186
190
|
txValidator,
|
|
187
191
|
defaultExecutorImplementation,
|
|
188
192
|
);
|
|
@@ -191,11 +195,11 @@ export class TestContext {
|
|
|
191
195
|
public async processPublicFunctionsWithMockExecutorImplementation(
|
|
192
196
|
txs: Tx[],
|
|
193
197
|
maxTransactions: number,
|
|
194
|
-
|
|
198
|
+
txHandler?: ProcessedTxHandler,
|
|
195
199
|
txValidator?: TxValidator<ProcessedTx>,
|
|
196
200
|
executorMock?: (
|
|
197
201
|
execution: PublicExecutionRequest,
|
|
198
|
-
|
|
202
|
+
constants: CombinedConstantData,
|
|
199
203
|
availableGas: Gas,
|
|
200
204
|
txContext: TxContext,
|
|
201
205
|
pendingNullifiers: Nullifier[],
|
|
@@ -206,6 +210,6 @@ export class TestContext {
|
|
|
206
210
|
if (executorMock) {
|
|
207
211
|
this.publicExecutor.simulate.mockImplementation(executorMock);
|
|
208
212
|
}
|
|
209
|
-
return await this.publicProcessor.process(txs, maxTransactions,
|
|
213
|
+
return await this.publicProcessor.process(txs, maxTransactions, txHandler, txValidator);
|
|
210
214
|
}
|
|
211
215
|
}
|
|
@@ -1,4 +1,11 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
type Body,
|
|
3
|
+
MerkleTreeId,
|
|
4
|
+
type MerkleTreeWriteOperations,
|
|
5
|
+
type ProcessedTx,
|
|
6
|
+
TxEffect,
|
|
7
|
+
getTreeHeight,
|
|
8
|
+
} from '@aztec/circuit-types';
|
|
2
9
|
import {
|
|
3
10
|
ARCHIVE_HEIGHT,
|
|
4
11
|
AppendOnlyTreeSnapshot,
|
|
@@ -52,8 +59,9 @@ import { type DebugLogger } from '@aztec/foundation/log';
|
|
|
52
59
|
import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
53
60
|
import { computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
|
|
54
61
|
import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types';
|
|
62
|
+
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
55
63
|
import { HintsBuilder, computeFeePayerBalanceLeafSlot } from '@aztec/simulator';
|
|
56
|
-
import { type
|
|
64
|
+
import { type MerkleTreeReadOperations } from '@aztec/world-state';
|
|
57
65
|
|
|
58
66
|
import { inspect } from 'util';
|
|
59
67
|
|
|
@@ -71,7 +79,7 @@ export async function buildBaseRollupInput(
|
|
|
71
79
|
tx: ProcessedTx,
|
|
72
80
|
proof: RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>,
|
|
73
81
|
globalVariables: GlobalVariables,
|
|
74
|
-
db:
|
|
82
|
+
db: MerkleTreeWriteOperations,
|
|
75
83
|
kernelVk: VerificationKeyData,
|
|
76
84
|
) {
|
|
77
85
|
// Get trees info before any changes hit
|
|
@@ -251,7 +259,7 @@ export async function buildHeaderFromTxEffects(
|
|
|
251
259
|
body: Body,
|
|
252
260
|
globalVariables: GlobalVariables,
|
|
253
261
|
l1ToL2Messages: Fr[],
|
|
254
|
-
db:
|
|
262
|
+
db: MerkleTreeReadOperations,
|
|
255
263
|
) {
|
|
256
264
|
const stateReference = new StateReference(
|
|
257
265
|
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
@@ -291,7 +299,7 @@ export async function buildHeaderFromTxEffects(
|
|
|
291
299
|
export async function validateBlockRootOutput(
|
|
292
300
|
blockRootOutput: BlockRootOrBlockMergePublicInputs,
|
|
293
301
|
blockHeader: Header,
|
|
294
|
-
db:
|
|
302
|
+
db: MerkleTreeReadOperations,
|
|
295
303
|
) {
|
|
296
304
|
await Promise.all([
|
|
297
305
|
validateState(blockHeader.state, db),
|
|
@@ -299,7 +307,7 @@ export async function validateBlockRootOutput(
|
|
|
299
307
|
]);
|
|
300
308
|
}
|
|
301
309
|
|
|
302
|
-
export async function validateState(state: StateReference, db:
|
|
310
|
+
export async function validateState(state: StateReference, db: MerkleTreeReadOperations) {
|
|
303
311
|
const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
|
|
304
312
|
async (id: MerkleTreeId) => {
|
|
305
313
|
return { key: id, value: await getTreeSnapshot(id, db) };
|
|
@@ -316,7 +324,7 @@ export async function validateState(state: StateReference, db: MerkleTreeOperati
|
|
|
316
324
|
);
|
|
317
325
|
}
|
|
318
326
|
|
|
319
|
-
export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db:
|
|
327
|
+
export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
|
|
320
328
|
const { size } = await db.getTreeInfo(treeId);
|
|
321
329
|
const path = await db.getSiblingPath(treeId, size);
|
|
322
330
|
return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
|
|
@@ -375,16 +383,17 @@ export function getPreviousRollupBlockDataFromPublicInputs(
|
|
|
375
383
|
|
|
376
384
|
export async function getConstantRollupData(
|
|
377
385
|
globalVariables: GlobalVariables,
|
|
378
|
-
db:
|
|
386
|
+
db: MerkleTreeReadOperations,
|
|
379
387
|
): Promise<ConstantRollupData> {
|
|
380
388
|
return ConstantRollupData.from({
|
|
381
389
|
vkTreeRoot: getVKTreeRoot(),
|
|
390
|
+
protocolContractTreeRoot,
|
|
382
391
|
lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
|
|
383
392
|
globalVariables,
|
|
384
393
|
});
|
|
385
394
|
}
|
|
386
395
|
|
|
387
|
-
export async function getTreeSnapshot(id: MerkleTreeId, db:
|
|
396
|
+
export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot> {
|
|
388
397
|
const treeInfo = await db.getTreeInfo(id);
|
|
389
398
|
return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
|
|
390
399
|
}
|
|
@@ -414,7 +423,7 @@ export function makeEmptyMembershipWitness<N extends number>(height: N) {
|
|
|
414
423
|
);
|
|
415
424
|
}
|
|
416
425
|
|
|
417
|
-
export async function processPublicDataUpdateRequests(tx: ProcessedTx, db:
|
|
426
|
+
export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: MerkleTreeWriteOperations) {
|
|
418
427
|
const allPublicDataUpdateRequests = padArrayEnd(
|
|
419
428
|
tx.finalPublicDataUpdateRequests,
|
|
420
429
|
PublicDataUpdateRequest.empty(),
|
|
@@ -482,7 +491,7 @@ export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: Merkl
|
|
|
482
491
|
export async function getSubtreeSiblingPath(
|
|
483
492
|
treeId: MerkleTreeId,
|
|
484
493
|
subtreeHeight: number,
|
|
485
|
-
db:
|
|
494
|
+
db: MerkleTreeReadOperations,
|
|
486
495
|
): Promise<Fr[]> {
|
|
487
496
|
const nextAvailableLeafIndex = await db.getTreeInfo(treeId).then(t => t.size);
|
|
488
497
|
const fullSiblingPath = await db.getSiblingPath(treeId, nextAvailableLeafIndex);
|
|
@@ -496,7 +505,7 @@ export async function getMembershipWitnessFor<N extends number>(
|
|
|
496
505
|
value: Fr,
|
|
497
506
|
treeId: MerkleTreeId,
|
|
498
507
|
height: N,
|
|
499
|
-
db:
|
|
508
|
+
db: MerkleTreeReadOperations,
|
|
500
509
|
): Promise<MembershipWitness<N>> {
|
|
501
510
|
// If this is an empty tx, then just return zeroes
|
|
502
511
|
if (value.isZero()) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { type L2Block, type MerkleTreeId
|
|
1
|
+
import { type L2Block, type MerkleTreeId } from '@aztec/circuit-types';
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
type AppendOnlyTreeSnapshot,
|
|
@@ -18,14 +18,9 @@ import {
|
|
|
18
18
|
} from '@aztec/circuits.js';
|
|
19
19
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
20
20
|
|
|
21
|
+
import { type EpochProvingState } from './epoch-proving-state.js';
|
|
21
22
|
import { type TxProvingState } from './tx-proving-state.js';
|
|
22
23
|
|
|
23
|
-
enum PROVING_STATE_LIFECYCLE {
|
|
24
|
-
PROVING_STATE_CREATED,
|
|
25
|
-
PROVING_STATE_RESOLVED,
|
|
26
|
-
PROVING_STATE_REJECTED,
|
|
27
|
-
}
|
|
28
|
-
|
|
29
24
|
export type MergeRollupInputData = {
|
|
30
25
|
inputs: [BaseOrMergeRollupPublicInputs | undefined, BaseOrMergeRollupPublicInputs | undefined];
|
|
31
26
|
proofs: [
|
|
@@ -46,11 +41,11 @@ export class BlockProvingState {
|
|
|
46
41
|
private rootParityInputs: Array<RootParityInput<typeof RECURSIVE_PROOF_LENGTH> | undefined> = [];
|
|
47
42
|
private finalRootParityInputs: RootParityInput<typeof NESTED_RECURSIVE_PROOF_LENGTH> | undefined;
|
|
48
43
|
public blockRootRollupPublicInputs: BlockRootOrBlockMergePublicInputs | undefined;
|
|
44
|
+
public blockRootRollupStarted: boolean = false;
|
|
49
45
|
public finalProof: Proof | undefined;
|
|
50
46
|
public block: L2Block | undefined;
|
|
51
47
|
private txs: TxProvingState[] = [];
|
|
52
|
-
|
|
53
|
-
private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED;
|
|
48
|
+
public error: string | undefined;
|
|
54
49
|
|
|
55
50
|
constructor(
|
|
56
51
|
public readonly index: number,
|
|
@@ -63,8 +58,7 @@ export class BlockProvingState {
|
|
|
63
58
|
public readonly archiveTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
64
59
|
public readonly archiveTreeRootSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
65
60
|
public readonly previousBlockHash: Fr,
|
|
66
|
-
private
|
|
67
|
-
private rejectionCallback?: (reason: string) => void,
|
|
61
|
+
private readonly parentEpoch: EpochProvingState,
|
|
68
62
|
) {
|
|
69
63
|
this.rootParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined);
|
|
70
64
|
}
|
|
@@ -188,6 +182,7 @@ export class BlockProvingState {
|
|
|
188
182
|
// Returns true if we have sufficient inputs to execute the block root rollup
|
|
189
183
|
public isReadyForBlockRootRollup() {
|
|
190
184
|
return !(
|
|
185
|
+
this.block === undefined ||
|
|
191
186
|
this.mergeRollupInputs[0] === undefined ||
|
|
192
187
|
this.finalRootParityInput === undefined ||
|
|
193
188
|
this.mergeRollupInputs[0].inputs.findIndex(p => !p) !== -1
|
|
@@ -206,35 +201,16 @@ export class BlockProvingState {
|
|
|
206
201
|
|
|
207
202
|
// Returns true if we are still able to accept transactions, false otherwise
|
|
208
203
|
public isAcceptingTransactions() {
|
|
209
|
-
return
|
|
210
|
-
this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED && this.totalNumTxs > this.txs.length
|
|
211
|
-
);
|
|
204
|
+
return this.totalNumTxs > this.txs.length;
|
|
212
205
|
}
|
|
213
206
|
|
|
214
|
-
// Returns
|
|
207
|
+
// Returns whether the proving state is still valid
|
|
215
208
|
public verifyState() {
|
|
216
|
-
return this.
|
|
209
|
+
return this.parentEpoch.verifyState();
|
|
217
210
|
}
|
|
218
211
|
|
|
219
|
-
// Attempts to reject the proving state promise with the given reason
|
|
220
212
|
public reject(reason: string) {
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
}
|
|
224
|
-
this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED;
|
|
225
|
-
if (this.rejectionCallback) {
|
|
226
|
-
this.rejectionCallback(reason);
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
// Attempts to resolve the proving state promise with the given result
|
|
231
|
-
public resolve(result: ProvingResult) {
|
|
232
|
-
if (!this.verifyState()) {
|
|
233
|
-
return;
|
|
234
|
-
}
|
|
235
|
-
this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_RESOLVED;
|
|
236
|
-
if (this.completionCallback) {
|
|
237
|
-
this.completionCallback(result);
|
|
238
|
-
}
|
|
213
|
+
this.error = reason;
|
|
214
|
+
this.parentEpoch.reject(reason);
|
|
239
215
|
}
|
|
240
216
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { type MerkleTreeId
|
|
1
|
+
import { type MerkleTreeId } from '@aztec/circuit-types';
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
type AppendOnlyTreeSnapshot,
|
|
@@ -36,6 +36,8 @@ export type BlockMergeRollupInputData = {
|
|
|
36
36
|
verificationKeys: [VerificationKeyAsFields | undefined, VerificationKeyAsFields | undefined];
|
|
37
37
|
};
|
|
38
38
|
|
|
39
|
+
export type ProvingResult = { status: 'success' } | { status: 'failure'; reason: string };
|
|
40
|
+
|
|
39
41
|
/**
|
|
40
42
|
* The current state of the proving schedule for an epoch.
|
|
41
43
|
* Contains the raw inputs and intermediate state to generate every constituent proof in the tree.
|
|
@@ -59,18 +61,20 @@ export class EpochProvingState {
|
|
|
59
61
|
|
|
60
62
|
/** Returns the current block proving state */
|
|
61
63
|
public get currentBlock(): BlockProvingState | undefined {
|
|
62
|
-
return this.blocks
|
|
64
|
+
return this.blocks.at(-1);
|
|
63
65
|
}
|
|
64
66
|
|
|
65
67
|
// Returns the number of levels of merge rollups
|
|
66
68
|
public get numMergeLevels() {
|
|
67
|
-
|
|
69
|
+
const totalLeaves = Math.max(2, this.totalNumBlocks);
|
|
70
|
+
return BigInt(Math.ceil(Math.log2(totalLeaves)) - 1);
|
|
68
71
|
}
|
|
69
72
|
|
|
70
73
|
// Calculates the index and level of the parent rollup circuit
|
|
71
74
|
// Based on tree implementation in unbalanced_tree.ts -> batchInsert()
|
|
72
75
|
// REFACTOR: This is repeated from the block orchestrator
|
|
73
76
|
public findMergeLevel(currentLevel: bigint, currentIndex: bigint) {
|
|
77
|
+
const totalLeaves = Math.max(2, this.totalNumBlocks);
|
|
74
78
|
const moveUpMergeLevel = (levelSize: number, index: bigint, nodeToShift: boolean) => {
|
|
75
79
|
levelSize /= 2;
|
|
76
80
|
if (levelSize & 1) {
|
|
@@ -79,8 +83,7 @@ export class EpochProvingState {
|
|
|
79
83
|
index >>= 1n;
|
|
80
84
|
return { thisLevelSize: levelSize, thisIndex: index, shiftUp: nodeToShift };
|
|
81
85
|
};
|
|
82
|
-
let [thisLevelSize, shiftUp] =
|
|
83
|
-
this.totalNumBlocks & 1 ? [this.totalNumBlocks - 1, true] : [this.totalNumBlocks, false];
|
|
86
|
+
let [thisLevelSize, shiftUp] = totalLeaves & 1 ? [totalLeaves - 1, true] : [totalLeaves, false];
|
|
84
87
|
const maxLevel = this.numMergeLevels + 1n;
|
|
85
88
|
let placeholder = currentIndex;
|
|
86
89
|
for (let i = 0; i < maxLevel - currentLevel; i++) {
|
|
@@ -107,8 +110,6 @@ export class EpochProvingState {
|
|
|
107
110
|
archiveTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
108
111
|
archiveTreeRootSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
109
112
|
previousBlockHash: Fr,
|
|
110
|
-
completionCallback?: (result: ProvingResult) => void,
|
|
111
|
-
rejectionCallback?: (reason: string) => void,
|
|
112
113
|
) {
|
|
113
114
|
const block = new BlockProvingState(
|
|
114
115
|
this.blocks.length,
|
|
@@ -121,15 +122,7 @@ export class EpochProvingState {
|
|
|
121
122
|
archiveTreeSnapshot,
|
|
122
123
|
archiveTreeRootSiblingPath,
|
|
123
124
|
previousBlockHash,
|
|
124
|
-
|
|
125
|
-
reason => {
|
|
126
|
-
// Reject the block
|
|
127
|
-
if (rejectionCallback) {
|
|
128
|
-
rejectionCallback(reason);
|
|
129
|
-
}
|
|
130
|
-
// An error on any block rejects this whole epoch
|
|
131
|
-
this.reject(reason);
|
|
132
|
-
},
|
|
125
|
+
this,
|
|
133
126
|
);
|
|
134
127
|
this.blocks.push(block);
|
|
135
128
|
if (this.blocks.length === this.totalNumBlocks) {
|
|
@@ -214,10 +207,6 @@ export class EpochProvingState {
|
|
|
214
207
|
}
|
|
215
208
|
this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED;
|
|
216
209
|
this.rejectionCallback(reason);
|
|
217
|
-
|
|
218
|
-
for (const block of this.blocks) {
|
|
219
|
-
block.reject('Proving cancelled');
|
|
220
|
-
}
|
|
221
210
|
}
|
|
222
211
|
|
|
223
212
|
// Attempts to resolve the proving state promise with the given result
|