@aztec/prover-client 0.0.1-commit.b655e406 → 0.0.1-commit.c2595eba

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. package/dest/config.d.ts +2 -2
  2. package/dest/config.d.ts.map +1 -1
  3. package/dest/config.js +1 -1
  4. package/dest/index.d.ts +1 -1
  5. package/dest/light/index.d.ts +2 -0
  6. package/dest/light/index.d.ts.map +1 -0
  7. package/dest/light/index.js +1 -0
  8. package/dest/light/lightweight_checkpoint_builder.d.ts +45 -0
  9. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
  10. package/dest/light/lightweight_checkpoint_builder.js +197 -0
  11. package/dest/mocks/fixtures.d.ts +1 -4
  12. package/dest/mocks/fixtures.d.ts.map +1 -1
  13. package/dest/mocks/fixtures.js +9 -18
  14. package/dest/mocks/test_context.d.ts +29 -46
  15. package/dest/mocks/test_context.d.ts.map +1 -1
  16. package/dest/mocks/test_context.js +116 -116
  17. package/dest/orchestrator/block-building-helpers.d.ts +17 -19
  18. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  19. package/dest/orchestrator/block-building-helpers.js +88 -113
  20. package/dest/orchestrator/block-proving-state.d.ts +17 -11
  21. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  22. package/dest/orchestrator/block-proving-state.js +81 -20
  23. package/dest/orchestrator/checkpoint-proving-state.d.ts +22 -9
  24. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  25. package/dest/orchestrator/checkpoint-proving-state.js +49 -17
  26. package/dest/orchestrator/epoch-proving-state.d.ts +12 -10
  27. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  28. package/dest/orchestrator/epoch-proving-state.js +38 -4
  29. package/dest/orchestrator/index.d.ts +1 -1
  30. package/dest/orchestrator/orchestrator.d.ts +26 -11
  31. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  32. package/dest/orchestrator/orchestrator.js +556 -161
  33. package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
  34. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  35. package/dest/orchestrator/orchestrator_metrics.js +2 -15
  36. package/dest/orchestrator/tx-proving-state.d.ts +6 -5
  37. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  38. package/dest/orchestrator/tx-proving-state.js +7 -16
  39. package/dest/prover-client/factory.d.ts +3 -3
  40. package/dest/prover-client/factory.d.ts.map +1 -1
  41. package/dest/prover-client/index.d.ts +1 -1
  42. package/dest/prover-client/prover-client.d.ts +3 -3
  43. package/dest/prover-client/prover-client.d.ts.map +1 -1
  44. package/dest/prover-client/prover-client.js +7 -4
  45. package/dest/prover-client/server-epoch-prover.d.ts +8 -7
  46. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  47. package/dest/prover-client/server-epoch-prover.js +2 -2
  48. package/dest/proving_broker/broker_prover_facade.d.ts +25 -23
  49. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  50. package/dest/proving_broker/broker_prover_facade.js +9 -15
  51. package/dest/proving_broker/config.d.ts +16 -8
  52. package/dest/proving_broker/config.d.ts.map +1 -1
  53. package/dest/proving_broker/config.js +14 -2
  54. package/dest/proving_broker/factory.d.ts +1 -1
  55. package/dest/proving_broker/fixtures.d.ts +3 -2
  56. package/dest/proving_broker/fixtures.d.ts.map +1 -1
  57. package/dest/proving_broker/fixtures.js +3 -2
  58. package/dest/proving_broker/index.d.ts +1 -1
  59. package/dest/proving_broker/proof_store/factory.d.ts +2 -2
  60. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
  61. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
  62. package/dest/proving_broker/proof_store/index.d.ts +1 -1
  63. package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
  64. package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
  65. package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
  66. package/dest/proving_broker/proving_agent.d.ts +5 -9
  67. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  68. package/dest/proving_broker/proving_agent.js +4 -19
  69. package/dest/proving_broker/proving_broker.d.ts +2 -2
  70. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  71. package/dest/proving_broker/proving_broker.js +6 -11
  72. package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
  73. package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
  74. package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
  75. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  76. package/dest/proving_broker/proving_broker_database/persisted.js +392 -3
  77. package/dest/proving_broker/proving_broker_database.d.ts +3 -2
  78. package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
  79. package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
  80. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  81. package/dest/proving_broker/proving_broker_instrumentation.js +15 -35
  82. package/dest/proving_broker/proving_job_controller.d.ts +5 -3
  83. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  84. package/dest/proving_broker/proving_job_controller.js +8 -6
  85. package/dest/proving_broker/rpc.d.ts +4 -4
  86. package/dest/test/mock_proof_store.d.ts +1 -1
  87. package/dest/test/mock_proof_store.d.ts.map +1 -1
  88. package/dest/test/mock_prover.d.ts +3 -4
  89. package/dest/test/mock_prover.d.ts.map +1 -1
  90. package/dest/test/mock_prover.js +4 -4
  91. package/package.json +20 -18
  92. package/src/config.ts +1 -1
  93. package/src/light/index.ts +1 -0
  94. package/src/light/lightweight_checkpoint_builder.ts +284 -0
  95. package/src/mocks/fixtures.ts +9 -31
  96. package/src/mocks/test_context.ts +158 -177
  97. package/src/orchestrator/block-building-helpers.ts +127 -207
  98. package/src/orchestrator/block-proving-state.ts +103 -25
  99. package/src/orchestrator/checkpoint-proving-state.ts +71 -21
  100. package/src/orchestrator/epoch-proving-state.ts +64 -14
  101. package/src/orchestrator/orchestrator.ts +158 -149
  102. package/src/orchestrator/orchestrator_metrics.ts +2 -25
  103. package/src/orchestrator/tx-proving-state.ts +10 -27
  104. package/src/prover-client/factory.ts +6 -2
  105. package/src/prover-client/prover-client.ts +25 -15
  106. package/src/prover-client/server-epoch-prover.ts +6 -7
  107. package/src/proving_broker/broker_prover_facade.ts +31 -37
  108. package/src/proving_broker/config.ts +17 -1
  109. package/src/proving_broker/fixtures.ts +8 -3
  110. package/src/proving_broker/proving_agent.ts +6 -19
  111. package/src/proving_broker/proving_broker.ts +6 -9
  112. package/src/proving_broker/proving_broker_database/memory.ts +2 -1
  113. package/src/proving_broker/proving_broker_database/persisted.ts +20 -5
  114. package/src/proving_broker/proving_broker_database.ts +2 -1
  115. package/src/proving_broker/proving_broker_instrumentation.ts +14 -35
  116. package/src/proving_broker/proving_job_controller.ts +13 -7
  117. package/src/test/mock_prover.ts +2 -14
  118. package/dest/block-factory/index.d.ts +0 -2
  119. package/dest/block-factory/index.d.ts.map +0 -1
  120. package/dest/block-factory/index.js +0 -1
  121. package/dest/block-factory/light.d.ts +0 -38
  122. package/dest/block-factory/light.d.ts.map +0 -1
  123. package/dest/block-factory/light.js +0 -94
  124. package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
  125. package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
  126. package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
  127. package/src/block-factory/index.ts +0 -1
  128. package/src/block-factory/light.ts +0 -140
  129. package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
@@ -1,8 +1,8 @@
1
1
  import {
2
- BatchedBlob,
3
2
  BatchedBlobAccumulator,
4
3
  SpongeBlob,
5
4
  computeBlobsHashFromBlobs,
5
+ encodeBlockBlobData,
6
6
  getBlobCommitmentsFromBlobs,
7
7
  getBlobsPerL1Block,
8
8
  } from '@aztec/blob-lib';
@@ -17,24 +17,18 @@ import {
17
17
  NULLIFIER_SUBTREE_HEIGHT,
18
18
  NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
19
19
  NULLIFIER_TREE_HEIGHT,
20
- NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
21
20
  PUBLIC_DATA_TREE_HEIGHT,
22
21
  } from '@aztec/constants';
23
22
  import { makeTuple } from '@aztec/foundation/array';
23
+ import { BlockNumber } from '@aztec/foundation/branded-types';
24
24
  import { padArrayEnd } from '@aztec/foundation/collection';
25
- import { sha256Trunc } from '@aztec/foundation/crypto';
26
- import { Fr } from '@aztec/foundation/fields';
27
- import { type Bufferable, type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
28
- import {
29
- MembershipWitness,
30
- MerkleTreeCalculator,
31
- computeCompressedUnbalancedMerkleTreeRoot,
32
- } from '@aztec/foundation/trees';
25
+ import { Fr } from '@aztec/foundation/curves/bn254';
26
+ import { type Bufferable, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
27
+ import { MembershipWitness } from '@aztec/foundation/trees';
33
28
  import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
34
29
  import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
35
30
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
36
- import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
37
- import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
31
+ import { Body } from '@aztec/stdlib/block';
38
32
  import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
39
33
  import { ContractClassLogFields } from '@aztec/stdlib/logs';
40
34
  import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
@@ -50,13 +44,11 @@ import {
50
44
  AppendOnlyTreeSnapshot,
51
45
  MerkleTreeId,
52
46
  NullifierLeafPreimage,
53
- PublicDataTreeLeaf,
54
47
  PublicDataTreeLeafPreimage,
55
48
  getTreeHeight,
56
49
  } from '@aztec/stdlib/trees';
57
50
  import {
58
51
  BlockHeader,
59
- ContentCommitment,
60
52
  GlobalVariables,
61
53
  PartialStateReference,
62
54
  type ProcessedTx,
@@ -103,34 +95,11 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
103
95
  NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
104
96
  );
105
97
 
106
- // Update the note hash trees with the new items being inserted to get the new roots
107
- // that will be used by the next iteration of the base rollup circuit, skipping the empty ones
108
- const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
109
- await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
110
-
111
- // The read witnesses for a given TX should be generated before the writes of the same TX are applied.
112
- // All reads that refer to writes in the same tx are transient and can be simplified out.
113
- const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
114
-
115
- // Update the nullifier tree, capturing the low nullifier info for each individual operation
116
- const {
117
- lowLeavesWitnessData: nullifierWitnessLeaves,
118
- newSubtreeSiblingPath: nullifiersSubtreeRootSiblingPath,
119
- sortedNewLeaves: sortedNullifiers,
120
- sortedNewLeavesIndexes,
121
- } = await db.batchInsert(
122
- MerkleTreeId.NULLIFIER_TREE,
123
- padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
124
- NULLIFIER_SUBTREE_HEIGHT,
125
- );
126
-
127
- if (nullifierWitnessLeaves === undefined) {
128
- throw new Error(`Could not craft nullifier batch insertion proofs`);
129
- }
98
+ const { nullifierInsertionResult, publicDataInsertionResult } = await insertSideEffects(tx, db);
130
99
 
131
100
  const blockHash = await tx.data.constants.anchorBlockHeader.hash();
132
101
  const anchorBlockArchiveSiblingPath = (
133
- await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
102
+ await getMembershipWitnessFor(blockHash.toFr(), MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
134
103
  ).siblingPath;
135
104
 
136
105
  const contractClassLogsFields = makeTuple(
@@ -146,52 +115,59 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
146
115
  contractClassLogsFields,
147
116
  });
148
117
  } else {
149
- if (
150
- txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 ||
151
- txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 ||
152
- txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1
153
- ) {
118
+ if (tx.txEffect.publicDataWrites.length > 1) {
154
119
  throw new Error(`More than one public data write in a private only tx`);
155
120
  }
156
121
 
157
122
  // Get hints for reading fee payer's balance in the public data tree.
158
- const feePayerBalanceMembershipWitness = txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0];
159
- const feePayerBalanceLeafPreimage = txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0];
123
+ const feePayerBalanceLeafWitnessData = publicDataInsertionResult.lowLeavesWitnessData[0];
124
+ const feePayerBalanceMembershipWitness = MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
125
+ feePayerBalanceLeafWitnessData.index,
126
+ assertLength(feePayerBalanceLeafWitnessData.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
127
+ );
128
+ const feePayerBalanceLeafPreimage = feePayerBalanceLeafWitnessData.leafPreimage as PublicDataTreeLeafPreimage;
160
129
  const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
161
- if (!feePayerBalanceMembershipWitness || !leafSlot.equals(feePayerBalanceLeafPreimage?.leaf.slot)) {
130
+ if (!leafSlot.equals(feePayerBalanceLeafPreimage.leaf.slot)) {
162
131
  throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
163
132
  }
164
133
 
165
- // Extract witness objects from returned data
166
- const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
167
- nullifierWitnessLeaves.map(l =>
168
- MembershipWitness.fromBufferArray(
134
+ // Get hints for inserting the nullifiers.
135
+ const nullifierLowLeavesWitnessData = nullifierInsertionResult.lowLeavesWitnessData!;
136
+ const nullifierPredecessorPreimages = padArrayEnd(
137
+ nullifierLowLeavesWitnessData.map(l => l.leafPreimage as NullifierLeafPreimage),
138
+ NullifierLeafPreimage.empty(),
139
+ MAX_NULLIFIERS_PER_TX,
140
+ );
141
+ const nullifierPredecessorMembershipWitnesses = padArrayEnd(
142
+ nullifierLowLeavesWitnessData.map(l =>
143
+ MembershipWitness.fromBufferArray<typeof NULLIFIER_TREE_HEIGHT>(
169
144
  l.index,
170
145
  assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT),
171
146
  ),
172
- );
147
+ ),
148
+ makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
149
+ MAX_NULLIFIERS_PER_TX,
150
+ );
151
+ const sortedNullifiers = assertLength(
152
+ nullifierInsertionResult.sortedNewLeaves.map(n => Fr.fromBuffer(n)),
153
+ MAX_NULLIFIERS_PER_TX,
154
+ );
155
+ const sortedNullifierIndexes = assertLength(
156
+ nullifierInsertionResult.sortedNewLeavesIndexes,
157
+ MAX_NULLIFIERS_PER_TX,
158
+ );
159
+ const nullifierSubtreeRootSiblingPath = assertLength(
160
+ nullifierInsertionResult.newSubtreeSiblingPath.toFields(),
161
+ NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
162
+ );
173
163
 
174
164
  const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
175
165
  noteHashSubtreeRootSiblingPath,
176
- nullifierPredecessorPreimages: padArrayEnd(
177
- nullifierWitnessLeaves.map(l => l.leafPreimage as NullifierLeafPreimage),
178
- NullifierLeafPreimage.empty(),
179
- MAX_NULLIFIERS_PER_TX,
180
- ),
181
- nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
182
- i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
183
- ? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
184
- : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
185
- ),
186
- sortedNullifiers: assertLength(
187
- sortedNullifiers.map(n => Fr.fromBuffer(n)),
188
- MAX_NULLIFIERS_PER_TX,
189
- ),
190
- sortedNullifierIndexes: assertLength(sortedNewLeavesIndexes, MAX_NULLIFIERS_PER_TX),
191
- nullifierSubtreeRootSiblingPath: assertLength(
192
- nullifiersSubtreeRootSiblingPath.toFields(),
193
- NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
194
- ),
166
+ nullifierPredecessorPreimages,
167
+ nullifierPredecessorMembershipWitnesses,
168
+ sortedNullifiers,
169
+ sortedNullifierIndexes,
170
+ nullifierSubtreeRootSiblingPath,
195
171
  feePayerBalanceMembershipWitness,
196
172
  });
197
173
 
@@ -217,6 +193,44 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
217
193
  },
218
194
  );
219
195
 
196
+ export const insertSideEffects = runInSpan(
197
+ 'BlockBuilderHelpers',
198
+ 'buildBaseRollupHints',
199
+ async (span: Span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
200
+ span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
201
+
202
+ // Insert the note hashes. Padded with zeros to the max number of note hashes per tx.
203
+ const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
204
+ await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
205
+
206
+ // Insert the nullifiers. Padded with zeros to the max number of nullifiers per tx.
207
+ // Capturing the low nullifier info for each individual operation.
208
+ const nullifierInsertionResult = await db.batchInsert(
209
+ MerkleTreeId.NULLIFIER_TREE,
210
+ padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
211
+ NULLIFIER_SUBTREE_HEIGHT,
212
+ );
213
+ if (nullifierInsertionResult.lowLeavesWitnessData === undefined) {
214
+ throw new Error(`Failed to batch insert nullifiers.`);
215
+ }
216
+
217
+ if (tx.txEffect.publicDataWrites.some(write => write.isEmpty())) {
218
+ throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}.`);
219
+ }
220
+ // Insert the public data writes sequentially. No need to pad them to the max array size.
221
+ // Capturing the low leaf info for each individual operation.
222
+ const publicDataInsertionResult = await db.sequentialInsert(
223
+ MerkleTreeId.PUBLIC_DATA_TREE,
224
+ tx.txEffect.publicDataWrites.map(write => write.toBuffer()),
225
+ );
226
+
227
+ return {
228
+ nullifierInsertionResult,
229
+ publicDataInsertionResult,
230
+ };
231
+ },
232
+ );
233
+
220
234
  export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
221
235
  const publicInputs = tx.data.publicInputs().toFields();
222
236
 
@@ -246,24 +260,15 @@ export const buildBlobHints = (blobFields: Fr[]) => {
246
260
  return { blobCommitments, blobs, blobsHash };
247
261
  };
248
262
 
249
- // Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
250
- export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
251
- const blobFields = txsPerCheckpoint.map(txs => getCheckpointBlobFields([txs.map(tx => tx.txEffect)]));
252
- const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
253
- return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
254
- };
255
-
256
263
  export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
257
- const blobs = blobFieldsPerCheckpoint.map(blobFields => getBlobsPerL1Block(blobFields));
258
- return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
264
+ return await BatchedBlobAccumulator.precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint);
259
265
  };
260
266
 
261
267
  export const accumulateBlobs = runInSpan(
262
268
  'BlockBuilderHelpers',
263
269
  'accumulateBlobs',
264
270
  async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
265
- const blobs = getBlobsPerL1Block(blobFields);
266
- const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
271
+ const endBlobAccumulator = await startBlobAccumulator.accumulateFields(blobFields);
267
272
  return endBlobAccumulator;
268
273
  },
269
274
  );
@@ -276,8 +281,8 @@ export const buildHeaderFromCircuitOutputs = runInSpan(
276
281
  const globalVariables = GlobalVariables.from({
277
282
  chainId: constants.chainId,
278
283
  version: constants.version,
279
- blockNumber: blockRootRollupOutput.previousArchive.nextAvailableLeafIndex,
280
- timestamp: blockRootRollupOutput.endTimestamp,
284
+ blockNumber: BlockNumber(blockRootRollupOutput.previousArchive.nextAvailableLeafIndex),
285
+ timestamp: blockRootRollupOutput.timestamp,
281
286
  slotNumber: constants.slotNumber,
282
287
  coinbase: constants.coinbase,
283
288
  feeRecipient: constants.feeRecipient,
@@ -303,103 +308,60 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
303
308
  async (
304
309
  span,
305
310
  txs: ProcessedTx[],
311
+ lastArchive: AppendOnlyTreeSnapshot,
312
+ endState: StateReference,
306
313
  globalVariables: GlobalVariables,
307
- l1ToL2Messages: Fr[],
308
- db: MerkleTreeReadOperations,
309
- startSpongeBlob?: SpongeBlob,
314
+ startSpongeBlob: SpongeBlob,
315
+ isFirstBlock: boolean,
310
316
  ) => {
311
317
  span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
312
- const stateReference = new StateReference(
313
- await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
314
- new PartialStateReference(
315
- await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
316
- await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
317
- await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
318
- ),
319
- );
320
-
321
- const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
322
318
 
323
319
  const txEffects = txs.map(tx => tx.txEffect);
324
320
  const body = new Body(txEffects);
325
321
 
326
- const txOutHashes = txEffects.map(tx => tx.txOutHash());
327
- const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeCompressedUnbalancedMerkleTreeRoot(txOutHashes));
328
-
329
- const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
330
- const blockBlobFields = body.toBlobFields();
331
- // TODO(#17027): This only works when there's one block per checkpoint.
332
- const blobFields = [new Fr(blockBlobFields.length + 1)].concat(blockBlobFields);
333
- const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
334
-
335
- const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
336
-
337
- const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
338
- const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
322
+ const totalFees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
323
+ const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
324
+
325
+ const { l1ToL2MessageTree, partial } = endState;
326
+
327
+ const blockBlobFields = encodeBlockBlobData({
328
+ blockEndMarker: {
329
+ timestamp: globalVariables.timestamp,
330
+ blockNumber: globalVariables.blockNumber,
331
+ numTxs: txs.length,
332
+ },
333
+ blockEndStateField: {
334
+ l1ToL2MessageNextAvailableLeafIndex: l1ToL2MessageTree.nextAvailableLeafIndex,
335
+ noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
336
+ nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
337
+ publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
338
+ totalManaUsed: totalManaUsed.toBigInt(),
339
+ },
340
+ lastArchiveRoot: lastArchive.root,
341
+ noteHashRoot: partial.noteHashTree.root,
342
+ nullifierRoot: partial.nullifierTree.root,
343
+ publicDataRoot: partial.publicDataTree.root,
344
+ l1ToL2MessageRoot: isFirstBlock ? l1ToL2MessageTree.root : undefined,
345
+ txs: body.toTxBlobData(),
346
+ });
339
347
 
340
- const endSpongeBlob = startSpongeBlob?.clone() ?? (await SpongeBlob.init(blobFields.length));
348
+ const endSpongeBlob = startSpongeBlob.clone();
341
349
  await endSpongeBlob.absorb(blockBlobFields);
342
350
  const spongeBlobHash = await endSpongeBlob.squeeze();
343
351
 
344
- const header = new L2BlockHeader(
345
- previousArchive,
346
- contentCommitment,
347
- stateReference,
348
- globalVariables,
349
- fees,
350
- manaUsed,
352
+ const header = BlockHeader.from({
353
+ lastArchive,
354
+ state: endState,
351
355
  spongeBlobHash,
352
- );
353
-
354
- return { header, body };
355
- },
356
- );
357
-
358
- export const buildBlockHeaderFromTxs = runInSpan(
359
- 'BlockBuilderHelpers',
360
- 'buildBlockHeaderFromTxs',
361
- async (
362
- span,
363
- txs: ProcessedTx[],
364
- globalVariables: GlobalVariables,
365
- startSpongeBlob: SpongeBlob,
366
- db: MerkleTreeReadOperations,
367
- ) => {
368
- span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
369
- const stateReference = new StateReference(
370
- await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
371
- new PartialStateReference(
372
- await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
373
- await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
374
- await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
375
- ),
376
- );
377
-
378
- const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
379
-
380
- const blobFields = getBlockBlobFields(txs.map(tx => tx.txEffect));
381
- const endSpongeBlob = startSpongeBlob.clone();
382
- await endSpongeBlob.absorb(blobFields);
383
- const spongeBlobHash = await endSpongeBlob.squeeze();
384
-
385
- const txEffects = txs.map(tx => tx.txEffect);
386
- const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
387
- const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
356
+ globalVariables,
357
+ totalFees,
358
+ totalManaUsed,
359
+ });
388
360
 
389
- return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
361
+ return { header, body, blockBlobFields };
390
362
  },
391
363
  );
392
364
 
393
- /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
394
- export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr> {
395
- const l1ToL2Messages = padArrayEnd<Fr, number>(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
396
- const hasher = (left: Buffer, right: Buffer) =>
397
- Promise.resolve(sha256Trunc(Buffer.concat([left, right])) as Buffer<ArrayBuffer>);
398
- const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
399
- const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
400
- return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
401
- }
402
-
403
365
  export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
404
366
  const { size } = await db.getTreeInfo(treeId);
405
367
  const path = await db.getSiblingPath(treeId, size - 1n);
@@ -425,48 +387,6 @@ export function makeEmptyMembershipWitness<N extends number>(height: N) {
425
387
  );
426
388
  }
427
389
 
428
- const processPublicDataUpdateRequests = runInSpan(
429
- 'BlockBuilderHelpers',
430
- 'processPublicDataUpdateRequests',
431
- async (span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
432
- span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
433
- const allPublicDataWrites = tx.txEffect.publicDataWrites.map(
434
- ({ leafSlot, value }) => new PublicDataTreeLeaf(leafSlot, value),
435
- );
436
-
437
- const { lowLeavesWitnessData, insertionWitnessData } = await db.sequentialInsert(
438
- MerkleTreeId.PUBLIC_DATA_TREE,
439
- allPublicDataWrites.map(write => {
440
- if (write.isEmpty()) {
441
- throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}`);
442
- }
443
- return write.toBuffer();
444
- }),
445
- );
446
-
447
- const lowPublicDataWritesPreimages = lowLeavesWitnessData.map(
448
- lowLeafWitness => lowLeafWitness.leafPreimage as PublicDataTreeLeafPreimage,
449
- );
450
- const lowPublicDataWritesMembershipWitnesses = lowLeavesWitnessData.map(lowLeafWitness =>
451
- MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
452
- lowLeafWitness.index,
453
- assertLength(lowLeafWitness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
454
- ),
455
- );
456
- const publicDataWritesSiblingPaths = insertionWitnessData.map(w => {
457
- const insertionSiblingPath = w.siblingPath.toFields();
458
- assertLength(insertionSiblingPath, PUBLIC_DATA_TREE_HEIGHT);
459
- return insertionSiblingPath as Tuple<Fr, typeof PUBLIC_DATA_TREE_HEIGHT>;
460
- });
461
-
462
- return {
463
- lowPublicDataWritesPreimages,
464
- lowPublicDataWritesMembershipWitnesses,
465
- publicDataWritesSiblingPaths,
466
- };
467
- },
468
- );
469
-
470
390
  export async function getSubtreeSiblingPath(
471
391
  treeId: MerkleTreeId,
472
392
  subtreeHeight: number,
@@ -1,4 +1,4 @@
1
- import type { SpongeBlob } from '@aztec/blob-lib';
1
+ import { type BlockBlobData, type BlockEndBlobData, type SpongeBlob, encodeBlockEndBlobData } from '@aztec/blob-lib';
2
2
  import {
3
3
  type ARCHIVE_HEIGHT,
4
4
  type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
@@ -6,7 +6,8 @@ import {
6
6
  type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
7
7
  NUM_BASE_PARITY_PER_ROOT_PARITY,
8
8
  } from '@aztec/constants';
9
- import { Fr } from '@aztec/foundation/fields';
9
+ import { BlockNumber } from '@aztec/foundation/branded-types';
10
+ import { Fr } from '@aztec/foundation/curves/bn254';
10
11
  import { type Tuple, assertLength } from '@aztec/foundation/serialize';
11
12
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
12
13
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
@@ -25,7 +26,7 @@ import {
25
26
  } from '@aztec/stdlib/rollup';
26
27
  import type { CircuitName } from '@aztec/stdlib/stats';
27
28
  import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
28
- import { type BlockHeader, GlobalVariables } from '@aztec/stdlib/tx';
29
+ import { BlockHeader, GlobalVariables, StateReference } from '@aztec/stdlib/tx';
29
30
  import type { UInt64 } from '@aztec/stdlib/types';
30
31
 
31
32
  import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-helpers.js';
@@ -54,6 +55,7 @@ export class BlockProvingState {
54
55
  | ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
55
56
  | undefined;
56
57
  private builtBlockHeader: BlockHeader | undefined;
58
+ private endState: StateReference | undefined;
57
59
  private endSpongeBlob: SpongeBlob | undefined;
58
60
  private txs: TxProvingState[] = [];
59
61
  private isFirstBlock: boolean;
@@ -61,7 +63,7 @@ export class BlockProvingState {
61
63
 
62
64
  constructor(
63
65
  public readonly index: number,
64
- public readonly blockNumber: number,
66
+ public readonly blockNumber: BlockNumber,
65
67
  public readonly totalNumTxs: number,
66
68
  private readonly constants: CheckpointConstantData,
67
69
  private readonly timestamp: UInt64,
@@ -200,30 +202,34 @@ export class BlockProvingState {
200
202
  return this.blockRootProof?.provingOutput?.inputs;
201
203
  }
202
204
 
203
- public setBuiltBlockHeader(blockHeader: BlockHeader) {
204
- this.builtBlockHeader = blockHeader;
205
- }
205
+ public async buildBlockHeader() {
206
+ if (this.isAcceptingTxs()) {
207
+ throw new Error('All txs must be added to the block before building the header.');
208
+ }
209
+ if (!this.endState) {
210
+ throw new Error('Call `setEndState` first.');
211
+ }
212
+ if (!this.endSpongeBlob) {
213
+ throw new Error('Call `setEndSpongeBlob` first.');
214
+ }
215
+
216
+ const endSpongeBlob = this.endSpongeBlob.clone();
217
+ const endSpongeBlobHash = await endSpongeBlob.squeeze();
218
+
219
+ this.builtBlockHeader = new BlockHeader(
220
+ this.lastArchiveTreeSnapshot,
221
+ this.endState,
222
+ endSpongeBlobHash,
223
+ this.#getGlobalVariables(),
224
+ this.#getTotalFees(),
225
+ new Fr(this.#getTotalManaUsed()),
226
+ );
206
227
 
207
- public getBuiltBlockHeader() {
208
228
  return this.builtBlockHeader;
209
229
  }
210
230
 
211
- public getGlobalVariables() {
212
- if (this.txs.length) {
213
- return this.txs[0].processedTx.globalVariables;
214
- }
215
-
216
- const constants = this.constants;
217
- return GlobalVariables.from({
218
- chainId: constants.chainId,
219
- version: constants.version,
220
- blockNumber: this.blockNumber,
221
- slotNumber: constants.slotNumber,
222
- timestamp: this.timestamp,
223
- coinbase: constants.coinbase,
224
- feeRecipient: constants.feeRecipient,
225
- gasFees: constants.gasFees,
226
- });
231
+ public getBuiltBlockHeader() {
232
+ return this.builtBlockHeader;
227
233
  }
228
234
 
229
235
  public getStartSpongeBlob() {
@@ -238,6 +244,52 @@ export class BlockProvingState {
238
244
  return this.endSpongeBlob;
239
245
  }
240
246
 
247
+ public setEndState(endState: StateReference) {
248
+ this.endState = endState;
249
+ }
250
+
251
+ public hasEndState() {
252
+ return !!this.endState;
253
+ }
254
+
255
+ public getBlockEndBlobFields(): Fr[] {
256
+ return encodeBlockEndBlobData(this.getBlockEndBlobData());
257
+ }
258
+
259
+ getBlockEndBlobData(): BlockEndBlobData {
260
+ if (!this.endState) {
261
+ throw new Error('Call `setEndState` first.');
262
+ }
263
+
264
+ const partial = this.endState.partial;
265
+ return {
266
+ blockEndMarker: {
267
+ numTxs: this.totalNumTxs,
268
+ timestamp: this.timestamp,
269
+ blockNumber: this.blockNumber,
270
+ },
271
+ blockEndStateField: {
272
+ l1ToL2MessageNextAvailableLeafIndex: this.newL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex,
273
+ noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
274
+ nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
275
+ publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
276
+ totalManaUsed: this.#getTotalManaUsed(),
277
+ },
278
+ lastArchiveRoot: this.lastArchiveTreeSnapshot.root,
279
+ noteHashRoot: partial.noteHashTree.root,
280
+ nullifierRoot: partial.nullifierTree.root,
281
+ publicDataRoot: partial.publicDataTree.root,
282
+ l1ToL2MessageRoot: this.isFirstBlock ? this.newL1ToL2MessageTreeSnapshot.root : undefined,
283
+ };
284
+ }
285
+
286
+ public getBlockBlobData(): BlockBlobData {
287
+ return {
288
+ ...this.getBlockEndBlobData(),
289
+ txs: this.getTxEffects().map(t => t.toTxBlobData()),
290
+ };
291
+ }
292
+
241
293
  public getTxEffects() {
242
294
  return this.txs.map(t => t.processedTx.txEffect);
243
295
  }
@@ -295,7 +347,6 @@ export class BlockProvingState {
295
347
  this.lastArchiveTreeSnapshot,
296
348
  this.headerOfLastBlockInPreviousCheckpoint.state,
297
349
  this.constants,
298
- this.startSpongeBlob,
299
350
  this.timestamp,
300
351
  this.lastL1ToL2MessageSubtreeRootSiblingPath,
301
352
  this.lastArchiveSiblingPath,
@@ -392,4 +443,31 @@ export class BlockProvingState {
392
443
  ? [this.baseOrMergeProofs.getNode(rootLocation)?.provingOutput]
393
444
  : this.baseOrMergeProofs.getChildren(rootLocation).map(c => c?.provingOutput);
394
445
  }
446
+
447
+ #getGlobalVariables() {
448
+ if (this.txs.length) {
449
+ return this.txs[0].processedTx.globalVariables;
450
+ }
451
+
452
+ const constants = this.constants;
453
+
454
+ return GlobalVariables.from({
455
+ chainId: constants.chainId,
456
+ version: constants.version,
457
+ blockNumber: this.blockNumber,
458
+ slotNumber: constants.slotNumber,
459
+ timestamp: this.timestamp,
460
+ coinbase: constants.coinbase,
461
+ feeRecipient: constants.feeRecipient,
462
+ gasFees: constants.gasFees,
463
+ });
464
+ }
465
+
466
+ #getTotalFees() {
467
+ return this.txs.reduce((acc, tx) => acc.add(tx.processedTx.txEffect.transactionFee), Fr.ZERO);
468
+ }
469
+
470
+ #getTotalManaUsed() {
471
+ return this.txs.reduce((acc, tx) => acc + BigInt(tx.processedTx.gasUsed.billedGas.l2Gas), 0n);
472
+ }
395
473
  }