@aztec/prover-client 0.0.0-test.1 → 0.0.1-fake-ceab37513c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dest/bin/get-proof-inputs.js +1 -1
  2. package/dest/block-factory/index.d.ts +2 -0
  3. package/dest/block-factory/index.d.ts.map +1 -0
  4. package/dest/block-factory/light.d.ts +36 -0
  5. package/dest/block-factory/light.d.ts.map +1 -0
  6. package/dest/{block_builder → block-factory}/light.js +35 -30
  7. package/dest/config.d.ts +6 -6
  8. package/dest/config.d.ts.map +1 -1
  9. package/dest/config.js +11 -1
  10. package/dest/mocks/fixtures.d.ts +3 -3
  11. package/dest/mocks/fixtures.d.ts.map +1 -1
  12. package/dest/mocks/fixtures.js +2 -2
  13. package/dest/mocks/test_context.d.ts +18 -13
  14. package/dest/mocks/test_context.d.ts.map +1 -1
  15. package/dest/mocks/test_context.js +44 -38
  16. package/dest/orchestrator/block-building-helpers.d.ts +18 -11
  17. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  18. package/dest/orchestrator/block-building-helpers.js +80 -63
  19. package/dest/orchestrator/block-proving-state.d.ts +19 -10
  20. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  21. package/dest/orchestrator/block-proving-state.js +63 -46
  22. package/dest/orchestrator/epoch-proving-state.d.ts +13 -6
  23. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  24. package/dest/orchestrator/epoch-proving-state.js +40 -14
  25. package/dest/orchestrator/orchestrator.d.ts +7 -5
  26. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  27. package/dest/orchestrator/orchestrator.js +78 -52
  28. package/dest/orchestrator/orchestrator_metrics.d.ts +2 -0
  29. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  30. package/dest/orchestrator/orchestrator_metrics.js +9 -0
  31. package/dest/orchestrator/tx-proving-state.d.ts +2 -2
  32. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  33. package/dest/orchestrator/tx-proving-state.js +9 -20
  34. package/dest/prover-client/prover-client.d.ts +3 -3
  35. package/dest/prover-client/prover-client.d.ts.map +1 -1
  36. package/dest/prover-client/prover-client.js +5 -4
  37. package/dest/prover-client/server-epoch-prover.d.ts +6 -4
  38. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  39. package/dest/prover-client/server-epoch-prover.js +4 -4
  40. package/dest/proving_broker/broker_prover_facade.d.ts +5 -3
  41. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  42. package/dest/proving_broker/broker_prover_facade.js +31 -21
  43. package/dest/proving_broker/config.d.ts +9 -4
  44. package/dest/proving_broker/config.d.ts.map +1 -1
  45. package/dest/proving_broker/config.js +15 -4
  46. package/dest/proving_broker/factory.d.ts +1 -1
  47. package/dest/proving_broker/factory.d.ts.map +1 -1
  48. package/dest/proving_broker/factory.js +5 -1
  49. package/dest/proving_broker/proof_store/factory.js +1 -1
  50. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
  51. package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
  52. package/dest/proving_broker/proving_agent.d.ts +3 -3
  53. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  54. package/dest/proving_broker/proving_agent.js +83 -47
  55. package/dest/proving_broker/proving_broker.d.ts +11 -2
  56. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  57. package/dest/proving_broker/proving_broker.js +6 -5
  58. package/dest/proving_broker/proving_broker_database/memory.js +1 -1
  59. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  60. package/dest/proving_broker/proving_broker_database/persisted.js +9 -8
  61. package/dest/proving_broker/proving_job_controller.d.ts +7 -8
  62. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  63. package/dest/proving_broker/proving_job_controller.js +53 -45
  64. package/dest/proving_broker/rpc.d.ts +3 -5
  65. package/dest/proving_broker/rpc.d.ts.map +1 -1
  66. package/dest/proving_broker/rpc.js +1 -4
  67. package/dest/test/mock_proof_store.d.ts +9 -0
  68. package/dest/test/mock_proof_store.d.ts.map +1 -0
  69. package/dest/test/mock_proof_store.js +10 -0
  70. package/dest/test/mock_prover.d.ts +7 -5
  71. package/dest/test/mock_prover.d.ts.map +1 -1
  72. package/dest/test/mock_prover.js +6 -3
  73. package/package.json +28 -27
  74. package/src/bin/get-proof-inputs.ts +1 -1
  75. package/src/block-factory/index.ts +1 -0
  76. package/src/{block_builder → block-factory}/light.ts +42 -29
  77. package/src/config.ts +24 -8
  78. package/src/mocks/fixtures.ts +5 -5
  79. package/src/mocks/test_context.ts +79 -59
  80. package/src/orchestrator/block-building-helpers.ts +96 -92
  81. package/src/orchestrator/block-proving-state.ts +78 -52
  82. package/src/orchestrator/epoch-proving-state.ts +51 -20
  83. package/src/orchestrator/orchestrator.ts +119 -60
  84. package/src/orchestrator/orchestrator_metrics.ts +20 -1
  85. package/src/orchestrator/tx-proving-state.ts +17 -24
  86. package/src/prover-client/prover-client.ts +16 -14
  87. package/src/prover-client/server-epoch-prover.ts +16 -7
  88. package/src/proving_broker/broker_prover_facade.ts +52 -36
  89. package/src/proving_broker/config.ts +17 -6
  90. package/src/proving_broker/factory.ts +2 -1
  91. package/src/proving_broker/proof_store/factory.ts +1 -1
  92. package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
  93. package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
  94. package/src/proving_broker/proving_agent.ts +89 -47
  95. package/src/proving_broker/proving_broker.ts +16 -15
  96. package/src/proving_broker/proving_broker_database/memory.ts +1 -1
  97. package/src/proving_broker/proving_broker_database/persisted.ts +9 -8
  98. package/src/proving_broker/proving_job_controller.ts +56 -65
  99. package/src/proving_broker/rpc.ts +1 -6
  100. package/src/test/mock_proof_store.ts +14 -0
  101. package/src/test/mock_prover.ts +27 -5
  102. package/dest/block_builder/index.d.ts +0 -6
  103. package/dest/block_builder/index.d.ts.map +0 -1
  104. package/dest/block_builder/light.d.ts +0 -33
  105. package/dest/block_builder/light.d.ts.map +0 -1
  106. package/src/block_builder/index.ts +0 -6
  107. /package/dest/{block_builder → block-factory}/index.js +0 -0
@@ -1,45 +1,42 @@
1
1
  import type { BBProverConfig } from '@aztec/bb-prover';
2
- import { times, timesParallel } from '@aztec/foundation/collection';
2
+ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
+ import { padArrayEnd, times, timesParallel } from '@aztec/foundation/collection';
3
4
  import { Fr } from '@aztec/foundation/fields';
4
5
  import type { Logger } from '@aztec/foundation/log';
5
6
  import { TestDateProvider } from '@aztec/foundation/timer';
6
7
  import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
7
8
  import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
8
9
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
9
- import {
10
- PublicProcessor,
11
- PublicTxSimulationTester,
12
- PublicTxSimulator,
13
- SimpleContractDataSource,
14
- WorldStateDB,
15
- } from '@aztec/simulator/server';
10
+ import { SimpleContractDataSource } from '@aztec/simulator/public/fixtures';
11
+ import { PublicProcessorFactory } from '@aztec/simulator/server';
16
12
  import { PublicDataWrite } from '@aztec/stdlib/avm';
17
13
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
18
- import type { L2Block } from '@aztec/stdlib/block';
14
+ import { EthAddress, type L2Block } from '@aztec/stdlib/block';
19
15
  import type { ServerCircuitProver } from '@aztec/stdlib/interfaces/server';
20
16
  import { makeBloatedProcessedTx } from '@aztec/stdlib/testing';
21
- import { type AppendOnlyTreeSnapshot, PublicDataTreeLeaf } from '@aztec/stdlib/trees';
17
+ import { type AppendOnlyTreeSnapshot, MerkleTreeId, PublicDataTreeLeaf } from '@aztec/stdlib/trees';
22
18
  import { type BlockHeader, type GlobalVariables, type ProcessedTx, TreeSnapshots, type Tx } from '@aztec/stdlib/tx';
23
19
  import type { MerkleTreeAdminDatabase } from '@aztec/world-state';
24
20
  import { NativeWorldStateService } from '@aztec/world-state/native';
25
21
 
26
22
  import { promises as fs } from 'fs';
27
23
 
24
+ // TODO(#12613) This means of sharing test code is not ideal.
25
+ // eslint-disable-next-line import/no-relative-packages
28
26
  import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js';
29
- import { buildBlock } from '../block_builder/light.js';
27
+ import { buildBlockWithCleanDB } from '../block-factory/light.js';
28
+ import { getTreeSnapshot } from '../orchestrator/block-building-helpers.js';
30
29
  import { ProvingOrchestrator } from '../orchestrator/index.js';
31
30
  import { BrokerCircuitProverFacade } from '../proving_broker/broker_prover_facade.js';
32
31
  import { TestBroker } from '../test/mock_prover.js';
33
- import { getEnvironmentConfig, getSimulationProvider, makeGlobals, updateExpectedTreesFromTxs } from './fixtures.js';
32
+ import { getEnvironmentConfig, getSimulator, makeGlobals, updateExpectedTreesFromTxs } from './fixtures.js';
34
33
 
35
34
  export class TestContext {
36
35
  private headers: Map<number, BlockHeader> = new Map();
37
36
  private feePayerBalance: Fr;
38
37
 
39
38
  constructor(
40
- public publicTxSimulator: PublicTxSimulator,
41
39
  public worldState: MerkleTreeAdminDatabase,
42
- public publicProcessor: PublicProcessor,
43
40
  public globalVariables: GlobalVariables,
44
41
  public prover: ServerCircuitProver,
45
42
  public broker: TestBroker,
@@ -49,7 +46,6 @@ export class TestContext {
49
46
  public feePayer: AztecAddress,
50
47
  initialFeePayerBalance: Fr,
51
48
  public directoriesToCleanup: string[],
52
- public tester: PublicTxSimulationTester,
53
49
  public logger: Logger,
54
50
  ) {
55
51
  this.feePayerBalance = initialFeePayerBalance;
@@ -61,10 +57,15 @@ export class TestContext {
61
57
 
62
58
  static async new(
63
59
  logger: Logger,
64
- proverCount = 4,
65
- createProver: (bbConfig: BBProverConfig) => Promise<ServerCircuitProver> = async (bbConfig: BBProverConfig) =>
66
- new TestCircuitProver(await getSimulationProvider(bbConfig, logger)),
67
- blockNumber = 1,
60
+ {
61
+ proverCount = 4,
62
+ createProver = async (bbConfig: BBProverConfig) => new TestCircuitProver(await getSimulator(bbConfig, logger)),
63
+ blockNumber = 1,
64
+ }: {
65
+ proverCount?: number;
66
+ createProver?: (bbConfig: BBProverConfig) => Promise<ServerCircuitProver>;
67
+ blockNumber?: number;
68
+ } = {},
68
69
  ) {
69
70
  const directoriesToCleanup: string[] = [];
70
71
  const globalVariables = makeGlobals(blockNumber);
@@ -76,25 +77,10 @@ export class TestContext {
76
77
 
77
78
  // Separated dbs for public processor and prover - see public_processor for context
78
79
  const ws = await NativeWorldStateService.tmp(
79
- undefined /* rollupAddress */,
80
- true /* cleanupTmpDir */,
80
+ /*rollupAddress=*/ undefined,
81
+ /*cleanupTmpDir=*/ true,
81
82
  prefilledPublicData,
82
83
  );
83
- const publicDb = await ws.fork();
84
-
85
- const contractDataSource = new SimpleContractDataSource();
86
- const worldStateDB = new WorldStateDB(publicDb, contractDataSource);
87
-
88
- const tester = new PublicTxSimulationTester(worldStateDB, contractDataSource, publicDb);
89
-
90
- const publicTxSimulator = new PublicTxSimulator(publicDb, worldStateDB, globalVariables, true);
91
- const processor = new PublicProcessor(
92
- publicDb,
93
- globalVariables,
94
- worldStateDB,
95
- publicTxSimulator,
96
- new TestDateProvider(),
97
- );
98
84
 
99
85
  let localProver: ServerCircuitProver;
100
86
  const config = await getEnvironmentConfig(logger);
@@ -107,6 +93,8 @@ export class TestContext {
107
93
  bbBinaryPath: config.expectedBBPath,
108
94
  bbWorkingDirectory: config.bbWorkingDirectory,
109
95
  bbSkipCleanup: config.bbSkipCleanup,
96
+ numConcurrentIVCVerifiers: 2,
97
+ bbIVCConcurrency: 1,
110
98
  };
111
99
  localProver = await createProver(bbConfig);
112
100
  }
@@ -117,15 +105,13 @@ export class TestContext {
117
105
 
118
106
  const broker = new TestBroker(proverCount, localProver);
119
107
  const facade = new BrokerCircuitProverFacade(broker);
120
- const orchestrator = new TestProvingOrchestrator(ws, facade, Fr.ZERO);
108
+ const orchestrator = new TestProvingOrchestrator(ws, facade, EthAddress.ZERO);
121
109
 
122
110
  await broker.start();
123
111
  facade.start();
124
112
 
125
113
  return new this(
126
- publicTxSimulator,
127
114
  ws,
128
- processor,
129
115
  globalVariables,
130
116
  localProver,
131
117
  broker,
@@ -135,7 +121,6 @@ export class TestContext {
135
121
  feePayer,
136
122
  initialFeePayerBalance,
137
123
  directoriesToCleanup,
138
- tester,
139
124
  logger,
140
125
  );
141
126
  }
@@ -150,6 +135,10 @@ export class TestContext {
150
135
  return blockNumber === 0 ? this.worldState.getCommitted().getInitialHeader() : this.headers.get(blockNumber);
151
136
  }
152
137
 
138
+ public setBlockHeader(header: BlockHeader, blockNumber: number) {
139
+ this.headers.set(blockNumber, header);
140
+ }
141
+
153
142
  public getPreviousBlockHeader(currentBlockNumber = this.blockNumber): BlockHeader {
154
143
  return this.getBlockHeader(currentBlockNumber - 1)!;
155
144
  }
@@ -166,13 +155,8 @@ export class TestContext {
166
155
  }
167
156
  }
168
157
 
169
- public async makeProcessedTx(opts?: Parameters<typeof makeBloatedProcessedTx>[0]): Promise<ProcessedTx>;
170
- public async makeProcessedTx(seed?: number): Promise<ProcessedTx>;
171
- public async makeProcessedTx(
172
- seedOrOpts?: Parameters<typeof makeBloatedProcessedTx>[0] | number,
173
- ): Promise<ProcessedTx> {
174
- const opts = typeof seedOrOpts === 'number' ? { seed: seedOrOpts } : seedOrOpts;
175
- const blockNum = (opts?.globalVariables ?? this.globalVariables).blockNumber.toNumber();
158
+ private async makeProcessedTx(opts?: Parameters<typeof makeBloatedProcessedTx>[0]): Promise<ProcessedTx> {
159
+ const blockNum = (opts?.globalVariables ?? this.globalVariables).blockNumber;
176
160
  const header = this.getBlockHeader(blockNum - 1);
177
161
  const tx = await makeBloatedProcessedTx({
178
162
  header,
@@ -193,44 +177,80 @@ export class TestContext {
193
177
  /** Creates a block with the given number of txs and adds it to world-state */
194
178
  public async makePendingBlock(
195
179
  numTxs: number,
196
- numMsgs: number = 0,
180
+ numL1ToL2Messages: number = 0,
197
181
  blockNumOrGlobals: GlobalVariables | number = this.globalVariables,
198
182
  makeProcessedTxOpts: (index: number) => Partial<Parameters<typeof makeBloatedProcessedTx>[0]> = () => ({}),
199
183
  ) {
200
184
  const globalVariables = typeof blockNumOrGlobals === 'number' ? makeGlobals(blockNumOrGlobals) : blockNumOrGlobals;
201
- const blockNum = globalVariables.blockNumber.toNumber();
185
+ const blockNum = globalVariables.blockNumber;
202
186
  const db = await this.worldState.fork();
203
- const msgs = times(numMsgs, i => new Fr(blockNum * 100 + i));
187
+ const l1ToL2Messages = times(numL1ToL2Messages, i => new Fr(blockNum * 100 + i));
188
+ const merkleTrees = await this.worldState.fork();
189
+ await merkleTrees.appendLeaves(
190
+ MerkleTreeId.L1_TO_L2_MESSAGE_TREE,
191
+ padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
192
+ );
193
+ const newL1ToL2Snapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, merkleTrees);
204
194
  const txs = await timesParallel(numTxs, i =>
205
- this.makeProcessedTx({ seed: i + blockNum * 1000, globalVariables, ...makeProcessedTxOpts(i) }),
195
+ this.makeProcessedTx({
196
+ seed: i + blockNum * 1000,
197
+ globalVariables,
198
+ newL1ToL2Snapshot,
199
+ ...makeProcessedTxOpts(i),
200
+ }),
206
201
  );
207
202
  await this.setTreeRoots(txs);
208
203
 
209
- const block = await buildBlock(txs, globalVariables, msgs, db);
204
+ const block = await buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, db);
210
205
  this.headers.set(blockNum, block.header);
211
- await this.worldState.handleL2BlockAndMessages(block, msgs);
212
- return { block, txs, msgs };
206
+ await this.worldState.handleL2BlockAndMessages(block, l1ToL2Messages);
207
+ return { block, txs, l1ToL2Messages };
213
208
  }
214
209
 
215
- public async processPublicFunctions(txs: Tx[], maxTransactions: number) {
216
- return await this.publicProcessor.process(txs, { maxTransactions });
210
+ public async processPublicFunctions(
211
+ txs: Tx[],
212
+ {
213
+ maxTransactions = txs.length,
214
+ numL1ToL2Messages = 0,
215
+ contractDataSource,
216
+ }: {
217
+ maxTransactions?: number;
218
+ numL1ToL2Messages?: number;
219
+ contractDataSource?: SimpleContractDataSource;
220
+ } = {},
221
+ ) {
222
+ const l1ToL2Messages = times(numL1ToL2Messages, i => new Fr(this.blockNumber * 100 + i));
223
+ const merkleTrees = await this.worldState.fork();
224
+ await merkleTrees.appendLeaves(
225
+ MerkleTreeId.L1_TO_L2_MESSAGE_TREE,
226
+ padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
227
+ );
228
+
229
+ const processorFactory = new PublicProcessorFactory(
230
+ contractDataSource ?? new SimpleContractDataSource(),
231
+ new TestDateProvider(),
232
+ );
233
+ const publicProcessor = processorFactory.create(merkleTrees, this.globalVariables, /*skipFeeEnforcement=*/ false);
234
+
235
+ return await publicProcessor.process(txs, { maxTransactions });
217
236
  }
218
237
 
219
- public async setTreeRoots(txs: ProcessedTx[]) {
238
+ private async setTreeRoots(txs: ProcessedTx[]) {
220
239
  const db = await this.worldState.fork();
221
240
  for (const tx of txs) {
222
241
  const startStateReference = await db.getStateReference();
223
242
  await updateExpectedTreesFromTxs(db, [tx]);
224
243
  const endStateReference = await db.getStateReference();
225
244
  if (tx.avmProvingRequest) {
245
+ const l1ToL2MessageTree = tx.avmProvingRequest.inputs.publicInputs.startTreeSnapshots.l1ToL2MessageTree;
226
246
  tx.avmProvingRequest.inputs.publicInputs.startTreeSnapshots = new TreeSnapshots(
227
- startStateReference.l1ToL2MessageTree,
247
+ l1ToL2MessageTree,
228
248
  startStateReference.partial.noteHashTree,
229
249
  startStateReference.partial.nullifierTree,
230
250
  startStateReference.partial.publicDataTree,
231
251
  );
232
252
  tx.avmProvingRequest.inputs.publicInputs.endTreeSnapshots = new TreeSnapshots(
233
- endStateReference.l1ToL2MessageTree,
253
+ l1ToL2MessageTree,
234
254
  endStateReference.partial.noteHashTree,
235
255
  endStateReference.partial.nullifierTree,
236
256
  endStateReference.partial.publicDataTree,
@@ -1,4 +1,4 @@
1
- import { Blob, type SpongeBlob } from '@aztec/blob-lib';
1
+ import { BatchedBlobAccumulator, Blob, type SpongeBlob } from '@aztec/blob-lib';
2
2
  import {
3
3
  ARCHIVE_HEIGHT,
4
4
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
@@ -14,23 +14,22 @@ import {
14
14
  } from '@aztec/constants';
15
15
  import { makeTuple } from '@aztec/foundation/array';
16
16
  import { padArrayEnd } from '@aztec/foundation/collection';
17
- import { sha256Trunc } from '@aztec/foundation/crypto';
18
- import { Fr } from '@aztec/foundation/fields';
19
- import type { Logger } from '@aztec/foundation/log';
20
- import { type Tuple, assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
21
- import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
17
+ import { sha256ToField, sha256Trunc } from '@aztec/foundation/crypto';
18
+ import { BLS12Point, Fr } from '@aztec/foundation/fields';
19
+ import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
20
+ import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleTreeRoot } from '@aztec/foundation/trees';
22
21
  import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
23
22
  import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
24
23
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
25
24
  import { PublicDataHint } from '@aztec/stdlib/avm';
26
25
  import { Body } from '@aztec/stdlib/block';
27
26
  import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
28
- import { ContractClassLog } from '@aztec/stdlib/logs';
27
+ import { ContractClassLogFields } from '@aztec/stdlib/logs';
29
28
  import type { ParityPublicInputs } from '@aztec/stdlib/parity';
30
29
  import {
31
30
  type BaseOrMergeRollupPublicInputs,
31
+ BlockConstantData,
32
32
  type BlockRootOrBlockMergePublicInputs,
33
- ConstantRollupData,
34
33
  PrivateBaseRollupHints,
35
34
  PrivateBaseStateDiffHints,
36
35
  PublicBaseRollupHints,
@@ -55,8 +54,6 @@ import {
55
54
  import { Attributes, type Span, runInSpan } from '@aztec/telemetry-client';
56
55
  import type { MerkleTreeReadOperations } from '@aztec/world-state';
57
56
 
58
- import { inspect } from 'util';
59
-
60
57
  /**
61
58
  * Type representing the names of the trees for the base rollup.
62
59
  */
@@ -67,19 +64,20 @@ type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'Public
67
64
  export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
68
65
 
69
66
  // Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
70
- export const buildBaseRollupHints = runInSpan(
67
+ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
71
68
  'BlockBuilderHelpers',
72
69
  'buildBaseRollupHints',
73
70
  async (
74
71
  span: Span,
75
72
  tx: ProcessedTx,
76
73
  globalVariables: GlobalVariables,
74
+ newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
77
75
  db: MerkleTreeWriteOperations,
78
76
  startSpongeBlob: SpongeBlob,
79
77
  ) => {
80
78
  span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
81
79
  // Get trees info before any changes hit
82
- const constants = await getConstantRollupData(globalVariables, db);
80
+ const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
83
81
  const start = new PartialStateReference(
84
82
  await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
85
83
  await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
@@ -141,13 +139,13 @@ export const buildBaseRollupHints = runInSpan(
141
139
  const inputSpongeBlob = startSpongeBlob.clone();
142
140
  await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
143
141
 
144
- const contractClassLogsPreimages = makeTuple(
142
+ const contractClassLogsFields = makeTuple(
145
143
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
146
- i => tx.txEffect.contractClassLogs[i]?.toUnsiloed() || ContractClassLog.empty(),
144
+ i => tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty(),
147
145
  );
148
146
 
149
147
  if (tx.avmProvingRequest) {
150
- const blockHash = await tx.constants.historicalHeader.hash();
148
+ const blockHash = await tx.data.constants.historicalHeader.hash();
151
149
  const archiveRootMembershipWitness = await getMembershipWitnessFor(
152
150
  blockHash,
153
151
  MerkleTreeId.ARCHIVE,
@@ -157,9 +155,9 @@ export const buildBaseRollupHints = runInSpan(
157
155
 
158
156
  return PublicBaseRollupHints.from({
159
157
  startSpongeBlob: inputSpongeBlob,
158
+ lastArchive,
160
159
  archiveRootMembershipWitness,
161
- contractClassLogsPreimages,
162
- constants,
160
+ contractClassLogsFields,
163
161
  });
164
162
  } else {
165
163
  if (
@@ -199,7 +197,7 @@ export const buildBaseRollupHints = runInSpan(
199
197
  feeWriteSiblingPath,
200
198
  });
201
199
 
202
- const blockHash = await tx.constants.historicalHeader.hash();
200
+ const blockHash = await tx.data.constants.historicalHeader.hash();
203
201
  const archiveRootMembershipWitness = await getMembershipWitnessFor(
204
202
  blockHash,
205
203
  MerkleTreeId.ARCHIVE,
@@ -207,13 +205,21 @@ export const buildBaseRollupHints = runInSpan(
207
205
  db,
208
206
  );
209
207
 
208
+ const constants = BlockConstantData.from({
209
+ lastArchive,
210
+ newL1ToL2: newL1ToL2MessageTreeSnapshot,
211
+ vkTreeRoot: getVKTreeRoot(),
212
+ protocolContractTreeRoot,
213
+ globalVariables,
214
+ });
215
+
210
216
  return PrivateBaseRollupHints.from({
211
217
  start,
212
218
  startSpongeBlob: inputSpongeBlob,
213
219
  stateDiffHints,
214
220
  feePayerFeeJuiceBalanceReadHint,
215
221
  archiveRootMembershipWitness,
216
- contractClassLogsPreimages,
222
+ contractClassLogsFields,
217
223
  constants,
218
224
  });
219
225
  }
@@ -226,7 +232,7 @@ export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot:
226
232
  throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
227
233
  }
228
234
 
229
- const siblingPath = await db.getSiblingPath<typeof PUBLIC_DATA_TREE_HEIGHT>(MerkleTreeId.PUBLIC_DATA_TREE, index);
235
+ const siblingPath = await db.getSiblingPath(MerkleTreeId.PUBLIC_DATA_TREE, index);
230
236
  const membershipWitness = new MembershipWitness(PUBLIC_DATA_TREE_HEIGHT, index, siblingPath.toTuple());
231
237
 
232
238
  const leafPreimage = (await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, index)) as PublicDataTreeLeafPreimage;
@@ -234,8 +240,8 @@ export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot:
234
240
  throw new Error(`Cannot find the leaf preimage for public data tree at index ${index}.`);
235
241
  }
236
242
 
237
- const exists = leafPreimage.slot.toBigInt() === leafSlot;
238
- const value = exists ? leafPreimage.value : Fr.ZERO;
243
+ const exists = leafPreimage.leaf.slot.toBigInt() === leafSlot;
244
+ const value = exists ? leafPreimage.leaf.value : Fr.ZERO;
239
245
 
240
246
  return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
241
247
  }
@@ -245,48 +251,56 @@ export const buildBlobHints = runInSpan(
245
251
  'buildBlobHints',
246
252
  async (_span: Span, txEffects: TxEffect[]) => {
247
253
  const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
248
- const blobs = await Blob.getBlobs(blobFields);
249
- const blobCommitments = blobs.map(b => b.commitmentToFields());
254
+ const blobs = await Blob.getBlobsPerBlock(blobFields);
255
+ // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
256
+ // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
257
+ // - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
258
+ // exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
259
+ // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
260
+ const blobCommitments = blobs.map(b => BLS12Point.decompress(b.commitment));
250
261
  const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
251
262
  return { blobFields, blobCommitments, blobs, blobsHash };
252
263
  },
253
264
  );
254
265
 
266
+ export const accumulateBlobs = runInSpan(
267
+ 'BlockBuilderHelpers',
268
+ 'accumulateBlobs',
269
+ async (_span: Span, txs: ProcessedTx[], startBlobAccumulator: BatchedBlobAccumulator) => {
270
+ const blobFields = txs.flatMap(tx => tx.txEffect.toBlobFields());
271
+ const blobs = await Blob.getBlobsPerBlock(blobFields);
272
+ const endBlobAccumulator = startBlobAccumulator.accumulateBlobs(blobs);
273
+ return endBlobAccumulator;
274
+ },
275
+ );
276
+
255
277
  export const buildHeaderFromCircuitOutputs = runInSpan(
256
278
  'BlockBuilderHelpers',
257
279
  'buildHeaderFromCircuitOutputs',
258
- async (
280
+ (
259
281
  _span,
260
282
  previousRollupData: BaseOrMergeRollupPublicInputs[],
261
283
  parityPublicInputs: ParityPublicInputs,
262
284
  rootRollupOutputs: BlockRootOrBlockMergePublicInputs,
285
+ blobsHash: Fr,
263
286
  endState: StateReference,
264
- logger?: Logger,
265
287
  ) => {
266
288
  if (previousRollupData.length > 2) {
267
289
  throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
268
290
  }
269
291
 
270
- const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
271
- const numTxs = previousRollupData.reduce((sum, d) => sum + d.numTxs, 0);
272
292
  const outHash =
273
293
  previousRollupData.length === 0
274
- ? Fr.ZERO.toBuffer()
294
+ ? Fr.ZERO
275
295
  : previousRollupData.length === 1
276
- ? previousRollupData[0].outHash.toBuffer()
277
- : sha256Trunc(
278
- Buffer.concat([previousRollupData[0].outHash.toBuffer(), previousRollupData[1].outHash.toBuffer()]),
279
- );
280
- const contentCommitment = new ContentCommitment(
281
- new Fr(numTxs),
282
- blobsHash,
283
- parityPublicInputs.shaRoot.toBuffer(),
284
- outHash,
285
- );
296
+ ? previousRollupData[0].outHash
297
+ : sha256ToField([previousRollupData[0].outHash, previousRollupData[1].outHash]);
298
+ const contentCommitment = new ContentCommitment(blobsHash, parityPublicInputs.shaRoot, outHash);
286
299
 
287
300
  const accumulatedFees = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedFees), Fr.ZERO);
288
301
  const accumulatedManaUsed = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedManaUsed), Fr.ZERO);
289
- const header = new BlockHeader(
302
+
303
+ return new BlockHeader(
290
304
  rootRollupOutputs.previousArchive,
291
305
  contentCommitment,
292
306
  endState,
@@ -294,15 +308,6 @@ export const buildHeaderFromCircuitOutputs = runInSpan(
294
308
  accumulatedFees,
295
309
  accumulatedManaUsed,
296
310
  );
297
- if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) {
298
- logger?.error(
299
- `Block header mismatch when building header from circuit outputs.` +
300
- `\n\nHeader: ${inspect(header)}` +
301
- `\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`,
302
- );
303
- throw new Error(`Block header mismatch when building from circuit outputs`);
304
- }
305
- return header;
306
311
  },
307
312
  );
308
313
 
@@ -316,7 +321,7 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
316
321
  l1ToL2Messages: Fr[],
317
322
  db: MerkleTreeReadOperations,
318
323
  ) => {
319
- span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber.toNumber());
324
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
320
325
  const stateReference = new StateReference(
321
326
  await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
322
327
  new PartialStateReference(
@@ -331,27 +336,15 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
331
336
  const txEffects = txs.map(tx => tx.txEffect);
332
337
  const body = new Body(txEffects);
333
338
 
334
- const numTxs = body.txEffects.length;
335
- const outHash =
336
- numTxs === 0
337
- ? Fr.ZERO.toBuffer()
338
- : numTxs === 1
339
- ? body.txEffects[0].txOutHash()
340
- : computeUnbalancedMerkleRoot(
341
- body.txEffects.map(tx => tx.txOutHash()),
342
- TxEffect.empty().txOutHash(),
343
- );
344
-
345
- l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
346
- const hasher = (left: Buffer, right: Buffer) => Promise.resolve(sha256Trunc(Buffer.concat([left, right])));
347
- const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
348
- const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
349
- const parityShaRoot = await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer()));
350
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobs(body.toBlobFields()));
351
-
352
- const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
353
-
354
- const fees = body.txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
339
+ const txOutHashes = txEffects.map(tx => tx.txOutHash());
340
+ const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeUnbalancedMerkleTreeRoot(txOutHashes));
341
+
342
+ const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
343
+ const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobsPerBlock(body.toBlobFields()));
344
+
345
+ const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
346
+
347
+ const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
355
348
  const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
356
349
 
357
350
  const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
@@ -360,12 +353,30 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
360
353
  },
361
354
  );
362
355
 
363
- export function getBlobsHashFromBlobs(inputs: Blob[]): Buffer {
364
- const blobHashes = serializeToBuffer(inputs.map(b => b.getEthVersionedBlobHash()));
365
- return sha256Trunc(serializeToBuffer(blobHashes));
356
+ /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
357
+ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr> {
358
+ const l1ToL2Messages = padArrayEnd(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
359
+ const hasher = (left: Buffer, right: Buffer) =>
360
+ Promise.resolve(sha256Trunc(Buffer.concat([left, right])) as Buffer<ArrayBuffer>);
361
+ const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
362
+ const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
363
+ return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
364
+ }
365
+
366
+ export function getBlobsHashFromBlobs(inputs: Blob[]): Fr {
367
+ return sha256ToField(inputs.map(b => b.getEthVersionedBlobHash()));
368
+ }
369
+
370
+ // Note: tested against the constant values in block_root/empty_block_root_rollup_inputs.nr, set by block_building_helpers.test.ts.
371
+ // Having this separate fn hopefully makes it clear how we treat empty blocks and their blobs, and won't break if we decide to change how
372
+ // getBlobsPerBlock() works on empty input.
373
+ export async function getEmptyBlockBlobsHash(): Promise<Fr> {
374
+ const blobHash = (await Blob.getBlobsPerBlock([])).map(b => b.getEthVersionedBlobHash());
375
+ return sha256ToField(blobHash);
366
376
  }
367
377
 
368
378
  // Validate that the roots of all local trees match the output of the root circuit simulation
379
+ // TODO: does this get called?
369
380
  export async function validateBlockRootOutput(
370
381
  blockRootOutput: BlockRootOrBlockMergePublicInputs,
371
382
  blockHeader: BlockHeader,
@@ -398,25 +409,18 @@ export const validateState = runInSpan(
398
409
  },
399
410
  );
400
411
 
412
+ export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
413
+ const { size } = await db.getTreeInfo(treeId);
414
+ const path = await db.getSiblingPath(treeId, size - 1n);
415
+ return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
416
+ }
417
+
401
418
  export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
402
419
  const { size } = await db.getTreeInfo(treeId);
403
420
  const path = await db.getSiblingPath(treeId, size);
404
421
  return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
405
422
  }
406
423
 
407
- export const getConstantRollupData = runInSpan(
408
- 'BlockBuilderHelpers',
409
- 'getConstantRollupData',
410
- async (_span, globalVariables: GlobalVariables, db: MerkleTreeReadOperations): Promise<ConstantRollupData> => {
411
- return ConstantRollupData.from({
412
- vkTreeRoot: getVKTreeRoot(),
413
- protocolContractTreeRoot,
414
- lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
415
- globalVariables,
416
- });
417
- },
418
- );
419
-
420
424
  export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot> {
421
425
  const treeInfo = await db.getTreeInfo(id);
422
426
  return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
@@ -537,17 +541,17 @@ function validateSimulatedTree(
537
541
  }
538
542
 
539
543
  export function validateTx(tx: ProcessedTx) {
540
- const txHeader = tx.constants.historicalHeader;
541
- if (txHeader.state.l1ToL2MessageTree.isZero()) {
544
+ const txHeader = tx.data.constants.historicalHeader;
545
+ if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
542
546
  throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
543
547
  }
544
- if (txHeader.state.partial.noteHashTree.isZero()) {
548
+ if (txHeader.state.partial.noteHashTree.isEmpty()) {
545
549
  throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`);
546
550
  }
547
- if (txHeader.state.partial.nullifierTree.isZero()) {
551
+ if (txHeader.state.partial.nullifierTree.isEmpty()) {
548
552
  throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`);
549
553
  }
550
- if (txHeader.state.partial.publicDataTree.isZero()) {
554
+ if (txHeader.state.partial.publicDataTree.isEmpty()) {
551
555
  throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
552
556
  }
553
557
  }