@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.b655e406

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. package/dest/block-factory/index.d.ts +2 -0
  2. package/dest/block-factory/index.d.ts.map +1 -0
  3. package/dest/block-factory/light.d.ts +38 -0
  4. package/dest/block-factory/light.d.ts.map +1 -0
  5. package/dest/block-factory/light.js +94 -0
  6. package/dest/config.d.ts +6 -6
  7. package/dest/config.d.ts.map +1 -1
  8. package/dest/config.js +11 -1
  9. package/dest/mocks/fixtures.d.ts +7 -4
  10. package/dest/mocks/fixtures.d.ts.map +1 -1
  11. package/dest/mocks/fixtures.js +32 -4
  12. package/dest/mocks/test_context.d.ts +43 -15
  13. package/dest/mocks/test_context.d.ts.map +1 -1
  14. package/dest/mocks/test_context.js +110 -48
  15. package/dest/orchestrator/block-building-helpers.d.ts +37 -28
  16. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  17. package/dest/orchestrator/block-building-helpers.js +156 -150
  18. package/dest/orchestrator/block-proving-state.d.ts +62 -46
  19. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  20. package/dest/orchestrator/block-proving-state.js +223 -179
  21. package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
  22. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
  23. package/dest/orchestrator/checkpoint-proving-state.js +211 -0
  24. package/dest/orchestrator/epoch-proving-state.d.ts +37 -24
  25. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  26. package/dest/orchestrator/epoch-proving-state.js +143 -73
  27. package/dest/orchestrator/orchestrator.d.ts +34 -31
  28. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  29. package/dest/orchestrator/orchestrator.js +392 -234
  30. package/dest/orchestrator/orchestrator_metrics.d.ts +2 -0
  31. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  32. package/dest/orchestrator/orchestrator_metrics.js +9 -0
  33. package/dest/orchestrator/tx-proving-state.d.ts +12 -10
  34. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  35. package/dest/orchestrator/tx-proving-state.js +30 -38
  36. package/dest/prover-client/prover-client.d.ts +3 -3
  37. package/dest/prover-client/prover-client.d.ts.map +1 -1
  38. package/dest/prover-client/prover-client.js +5 -4
  39. package/dest/prover-client/server-epoch-prover.d.ts +13 -10
  40. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  41. package/dest/prover-client/server-epoch-prover.js +11 -11
  42. package/dest/proving_broker/broker_prover_facade.d.ts +22 -15
  43. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  44. package/dest/proving_broker/broker_prover_facade.js +64 -39
  45. package/dest/proving_broker/config.d.ts +9 -4
  46. package/dest/proving_broker/config.d.ts.map +1 -1
  47. package/dest/proving_broker/config.js +15 -4
  48. package/dest/proving_broker/factory.d.ts +1 -1
  49. package/dest/proving_broker/factory.d.ts.map +1 -1
  50. package/dest/proving_broker/factory.js +5 -1
  51. package/dest/proving_broker/fixtures.js +1 -1
  52. package/dest/proving_broker/proof_store/factory.js +1 -1
  53. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
  54. package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
  55. package/dest/proving_broker/proof_store/index.d.ts +1 -0
  56. package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
  57. package/dest/proving_broker/proof_store/index.js +1 -0
  58. package/dest/proving_broker/proving_agent.d.ts +3 -3
  59. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  60. package/dest/proving_broker/proving_agent.js +83 -47
  61. package/dest/proving_broker/proving_broker.d.ts +11 -2
  62. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  63. package/dest/proving_broker/proving_broker.js +34 -22
  64. package/dest/proving_broker/proving_broker_database/memory.js +1 -1
  65. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  66. package/dest/proving_broker/proving_broker_database/persisted.js +9 -8
  67. package/dest/proving_broker/proving_job_controller.d.ts +7 -8
  68. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  69. package/dest/proving_broker/proving_job_controller.js +89 -61
  70. package/dest/proving_broker/rpc.d.ts +3 -5
  71. package/dest/proving_broker/rpc.d.ts.map +1 -1
  72. package/dest/proving_broker/rpc.js +1 -4
  73. package/dest/test/mock_proof_store.d.ts +9 -0
  74. package/dest/test/mock_proof_store.d.ts.map +1 -0
  75. package/dest/test/mock_proof_store.js +10 -0
  76. package/dest/test/mock_prover.d.ts +23 -16
  77. package/dest/test/mock_prover.d.ts.map +1 -1
  78. package/dest/test/mock_prover.js +38 -20
  79. package/package.json +29 -29
  80. package/src/block-factory/index.ts +1 -0
  81. package/src/block-factory/light.ts +140 -0
  82. package/src/config.ts +24 -8
  83. package/src/mocks/fixtures.ts +43 -15
  84. package/src/mocks/test_context.ts +201 -75
  85. package/src/orchestrator/block-building-helpers.ts +247 -243
  86. package/src/orchestrator/block-proving-state.ts +247 -231
  87. package/src/orchestrator/checkpoint-proving-state.ts +299 -0
  88. package/src/orchestrator/epoch-proving-state.ts +187 -111
  89. package/src/orchestrator/orchestrator.ts +590 -289
  90. package/src/orchestrator/orchestrator_metrics.ts +20 -1
  91. package/src/orchestrator/tx-proving-state.ts +60 -61
  92. package/src/prover-client/prover-client.ts +16 -14
  93. package/src/prover-client/server-epoch-prover.ts +40 -21
  94. package/src/proving_broker/broker_prover_facade.ts +200 -113
  95. package/src/proving_broker/config.ts +17 -6
  96. package/src/proving_broker/factory.ts +2 -1
  97. package/src/proving_broker/fixtures.ts +1 -1
  98. package/src/proving_broker/proof_store/factory.ts +1 -1
  99. package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
  100. package/src/proving_broker/proof_store/index.ts +1 -0
  101. package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
  102. package/src/proving_broker/proving_agent.ts +89 -47
  103. package/src/proving_broker/proving_broker.ts +51 -32
  104. package/src/proving_broker/proving_broker_database/memory.ts +1 -1
  105. package/src/proving_broker/proving_broker_database/persisted.ts +9 -8
  106. package/src/proving_broker/proving_job_controller.ts +92 -81
  107. package/src/proving_broker/rpc.ts +1 -6
  108. package/src/test/mock_proof_store.ts +14 -0
  109. package/src/test/mock_prover.ts +164 -60
  110. package/dest/bin/get-proof-inputs.d.ts +0 -2
  111. package/dest/bin/get-proof-inputs.d.ts.map +0 -1
  112. package/dest/bin/get-proof-inputs.js +0 -51
  113. package/dest/block_builder/index.d.ts +0 -6
  114. package/dest/block_builder/index.d.ts.map +0 -1
  115. package/dest/block_builder/light.d.ts +0 -33
  116. package/dest/block_builder/light.d.ts.map +0 -1
  117. package/dest/block_builder/light.js +0 -82
  118. package/src/bin/get-proof-inputs.ts +0 -59
  119. package/src/block_builder/index.ts +0 -6
  120. package/src/block_builder/light.ts +0 -101
  121. /package/dest/{block_builder → block-factory}/index.js +0 -0
@@ -1,13 +1,21 @@
1
- import { Blob, type SpongeBlob } from '@aztec/blob-lib';
1
+ import {
2
+ BatchedBlob,
3
+ BatchedBlobAccumulator,
4
+ SpongeBlob,
5
+ computeBlobsHashFromBlobs,
6
+ getBlobCommitmentsFromBlobs,
7
+ getBlobsPerL1Block,
8
+ } from '@aztec/blob-lib';
2
9
  import {
3
10
  ARCHIVE_HEIGHT,
11
+ CHONK_PROOF_LENGTH,
4
12
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
5
13
  MAX_NOTE_HASHES_PER_TX,
6
14
  MAX_NULLIFIERS_PER_TX,
7
15
  NOTE_HASH_SUBTREE_HEIGHT,
8
- NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH,
16
+ NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
9
17
  NULLIFIER_SUBTREE_HEIGHT,
10
- NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH,
18
+ NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
11
19
  NULLIFIER_TREE_HEIGHT,
12
20
  NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
13
21
  PUBLIC_DATA_TREE_HEIGHT,
@@ -16,24 +24,27 @@ import { makeTuple } from '@aztec/foundation/array';
16
24
  import { padArrayEnd } from '@aztec/foundation/collection';
17
25
  import { sha256Trunc } from '@aztec/foundation/crypto';
18
26
  import { Fr } from '@aztec/foundation/fields';
19
- import type { Logger } from '@aztec/foundation/log';
20
- import { type Tuple, assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
21
- import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
22
- import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
23
- import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
27
+ import { type Bufferable, type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
28
+ import {
29
+ MembershipWitness,
30
+ MerkleTreeCalculator,
31
+ computeCompressedUnbalancedMerkleTreeRoot,
32
+ } from '@aztec/foundation/trees';
33
+ import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
34
+ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
24
35
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
25
- import { PublicDataHint } from '@aztec/stdlib/avm';
26
- import { Body } from '@aztec/stdlib/block';
27
- import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
28
- import { ContractClassLog } from '@aztec/stdlib/logs';
29
- import type { ParityPublicInputs } from '@aztec/stdlib/parity';
36
+ import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
37
+ import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
38
+ import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
39
+ import { ContractClassLogFields } from '@aztec/stdlib/logs';
40
+ import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
30
41
  import {
31
- type BaseOrMergeRollupPublicInputs,
32
- type BlockRootOrBlockMergePublicInputs,
33
- ConstantRollupData,
42
+ BlockConstantData,
43
+ BlockRollupPublicInputs,
34
44
  PrivateBaseRollupHints,
35
- PrivateBaseStateDiffHints,
36
45
  PublicBaseRollupHints,
46
+ PublicChonkVerifierPrivateInputs,
47
+ TreeSnapshotDiffHints,
37
48
  } from '@aztec/stdlib/rollup';
38
49
  import {
39
50
  AppendOnlyTreeSnapshot,
@@ -46,17 +57,16 @@ import {
46
57
  import {
47
58
  BlockHeader,
48
59
  ContentCommitment,
49
- type GlobalVariables,
60
+ GlobalVariables,
50
61
  PartialStateReference,
51
62
  type ProcessedTx,
52
63
  StateReference,
53
- TxEffect,
64
+ Tx,
54
65
  } from '@aztec/stdlib/tx';
66
+ import { VkData } from '@aztec/stdlib/vks';
55
67
  import { Attributes, type Span, runInSpan } from '@aztec/telemetry-client';
56
68
  import type { MerkleTreeReadOperations } from '@aztec/world-state';
57
69
 
58
- import { inspect } from 'util';
59
-
60
70
  /**
61
71
  * Type representing the names of the trees for the base rollup.
62
72
  */
@@ -67,33 +77,30 @@ type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'Public
67
77
  export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
68
78
 
69
79
  // Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
70
- export const buildBaseRollupHints = runInSpan(
80
+ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
71
81
  'BlockBuilderHelpers',
72
82
  'buildBaseRollupHints',
73
83
  async (
74
84
  span: Span,
75
85
  tx: ProcessedTx,
76
- globalVariables: GlobalVariables,
77
- db: MerkleTreeWriteOperations,
86
+ lastArchive: AppendOnlyTreeSnapshot,
87
+ newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
78
88
  startSpongeBlob: SpongeBlob,
89
+ proverId: Fr,
90
+ db: MerkleTreeWriteOperations,
79
91
  ) => {
80
92
  span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
81
93
  // Get trees info before any changes hit
82
- const constants = await getConstantRollupData(globalVariables, db);
83
94
  const start = new PartialStateReference(
84
95
  await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
85
96
  await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
86
97
  await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
87
98
  );
88
- // Get the subtree sibling paths for the circuit
89
- const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(
90
- MerkleTreeId.NOTE_HASH_TREE,
91
- NOTE_HASH_SUBTREE_HEIGHT,
92
- db,
93
- );
94
99
 
95
- const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i =>
96
- i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO,
100
+ // Get the note hash subtree root sibling path for insertion.
101
+ const noteHashSubtreeRootSiblingPath = assertLength(
102
+ await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db),
103
+ NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
97
104
  );
98
105
 
99
106
  // Update the note hash trees with the new items being inserted to get the new roots
@@ -101,10 +108,6 @@ export const buildBaseRollupHints = runInSpan(
101
108
  const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
102
109
  await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
103
110
 
104
- // Create data hint for reading fee payer initial balance in Fee Juice
105
- const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
106
- const feePayerFeeJuiceBalanceReadHint = await getPublicDataHint(db, leafSlot.toBigInt());
107
-
108
111
  // The read witnesses for a given TX should be generated before the writes of the same TX are applied.
109
112
  // All reads that refer to writes in the same tx are transient and can be simplified out.
110
113
  const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
@@ -112,8 +115,8 @@ export const buildBaseRollupHints = runInSpan(
112
115
  // Update the nullifier tree, capturing the low nullifier info for each individual operation
113
116
  const {
114
117
  lowLeavesWitnessData: nullifierWitnessLeaves,
115
- newSubtreeSiblingPath: nullifiersSubtreeSiblingPath,
116
- sortedNewLeaves: sortednullifiers,
118
+ newSubtreeSiblingPath: nullifiersSubtreeRootSiblingPath,
119
+ sortedNewLeaves: sortedNullifiers,
117
120
  sortedNewLeavesIndexes,
118
121
  } = await db.batchInsert(
119
122
  MerkleTreeId.NULLIFIER_TREE,
@@ -125,41 +128,22 @@ export const buildBaseRollupHints = runInSpan(
125
128
  throw new Error(`Could not craft nullifier batch insertion proofs`);
126
129
  }
127
130
 
128
- // Extract witness objects from returned data
129
- const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
130
- nullifierWitnessLeaves.map(l =>
131
- MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)),
132
- );
133
-
134
- const nullifierSubtreeSiblingPathArray = nullifiersSubtreeSiblingPath.toFields();
135
-
136
- const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i =>
137
- i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
138
- );
139
-
140
- // Append new data to startSpongeBlob
141
- const inputSpongeBlob = startSpongeBlob.clone();
142
- await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
131
+ const blockHash = await tx.data.constants.anchorBlockHeader.hash();
132
+ const anchorBlockArchiveSiblingPath = (
133
+ await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
134
+ ).siblingPath;
143
135
 
144
- const contractClassLogsPreimages = makeTuple(
136
+ const contractClassLogsFields = makeTuple(
145
137
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
146
- i => tx.txEffect.contractClassLogs[i]?.toUnsiloed() || ContractClassLog.empty(),
138
+ i => tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty(),
147
139
  );
148
140
 
149
141
  if (tx.avmProvingRequest) {
150
- const blockHash = await tx.constants.historicalHeader.hash();
151
- const archiveRootMembershipWitness = await getMembershipWitnessFor(
152
- blockHash,
153
- MerkleTreeId.ARCHIVE,
154
- ARCHIVE_HEIGHT,
155
- db,
156
- );
157
-
158
142
  return PublicBaseRollupHints.from({
159
- startSpongeBlob: inputSpongeBlob,
160
- archiveRootMembershipWitness,
161
- contractClassLogsPreimages,
162
- constants,
143
+ startSpongeBlob,
144
+ lastArchive,
145
+ anchorBlockArchiveSiblingPath,
146
+ contractClassLogsFields,
163
147
  });
164
148
  } else {
165
149
  if (
@@ -170,139 +154,146 @@ export const buildBaseRollupHints = runInSpan(
170
154
  throw new Error(`More than one public data write in a private only tx`);
171
155
  }
172
156
 
173
- const feeWriteLowLeafPreimage =
174
- txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
175
- const feeWriteLowLeafMembershipWitness =
176
- txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] ||
177
- MembershipWitness.empty<typeof PUBLIC_DATA_TREE_HEIGHT>(PUBLIC_DATA_TREE_HEIGHT);
178
- const feeWriteSiblingPath =
179
- txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] ||
180
- makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO);
181
-
182
- const stateDiffHints = PrivateBaseStateDiffHints.from({
183
- nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
184
- i < nullifierWitnessLeaves.length
185
- ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
186
- : NullifierLeafPreimage.empty(),
157
+ // Get hints for reading fee payer's balance in the public data tree.
158
+ const feePayerBalanceMembershipWitness = txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0];
159
+ const feePayerBalanceLeafPreimage = txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0];
160
+ const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
161
+ if (!feePayerBalanceMembershipWitness || !leafSlot.equals(feePayerBalanceLeafPreimage?.leaf.slot)) {
162
+ throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
163
+ }
164
+
165
+ // Extract witness objects from returned data
166
+ const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
167
+ nullifierWitnessLeaves.map(l =>
168
+ MembershipWitness.fromBufferArray(
169
+ l.index,
170
+ assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT),
171
+ ),
172
+ );
173
+
174
+ const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
175
+ noteHashSubtreeRootSiblingPath,
176
+ nullifierPredecessorPreimages: padArrayEnd(
177
+ nullifierWitnessLeaves.map(l => l.leafPreimage as NullifierLeafPreimage),
178
+ NullifierLeafPreimage.empty(),
179
+ MAX_NULLIFIERS_PER_TX,
187
180
  ),
188
181
  nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
189
182
  i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
190
183
  ? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
191
184
  : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
192
185
  ),
193
- sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
194
- sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
195
- noteHashSubtreeSiblingPath,
196
- nullifierSubtreeSiblingPath,
197
- feeWriteLowLeafPreimage,
198
- feeWriteLowLeafMembershipWitness,
199
- feeWriteSiblingPath,
186
+ sortedNullifiers: assertLength(
187
+ sortedNullifiers.map(n => Fr.fromBuffer(n)),
188
+ MAX_NULLIFIERS_PER_TX,
189
+ ),
190
+ sortedNullifierIndexes: assertLength(sortedNewLeavesIndexes, MAX_NULLIFIERS_PER_TX),
191
+ nullifierSubtreeRootSiblingPath: assertLength(
192
+ nullifiersSubtreeRootSiblingPath.toFields(),
193
+ NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
194
+ ),
195
+ feePayerBalanceMembershipWitness,
200
196
  });
201
197
 
202
- const blockHash = await tx.constants.historicalHeader.hash();
203
- const archiveRootMembershipWitness = await getMembershipWitnessFor(
204
- blockHash,
205
- MerkleTreeId.ARCHIVE,
206
- ARCHIVE_HEIGHT,
207
- db,
208
- );
198
+ const constants = BlockConstantData.from({
199
+ lastArchive,
200
+ l1ToL2TreeSnapshot: newL1ToL2MessageTreeSnapshot,
201
+ vkTreeRoot: tx.data.constants.vkTreeRoot,
202
+ protocolContractsHash: tx.data.constants.protocolContractsHash,
203
+ globalVariables: tx.globalVariables,
204
+ proverId,
205
+ });
209
206
 
210
207
  return PrivateBaseRollupHints.from({
211
208
  start,
212
- startSpongeBlob: inputSpongeBlob,
213
- stateDiffHints,
214
- feePayerFeeJuiceBalanceReadHint,
215
- archiveRootMembershipWitness,
216
- contractClassLogsPreimages,
209
+ startSpongeBlob,
210
+ treeSnapshotDiffHints,
211
+ feePayerBalanceLeafPreimage,
212
+ anchorBlockArchiveSiblingPath,
213
+ contractClassLogsFields,
217
214
  constants,
218
215
  });
219
216
  }
220
217
  },
221
218
  );
222
219
 
223
- export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint) {
224
- const { index } = (await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot)) ?? {};
225
- if (index === undefined) {
226
- throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
227
- }
228
-
229
- const siblingPath = await db.getSiblingPath<typeof PUBLIC_DATA_TREE_HEIGHT>(MerkleTreeId.PUBLIC_DATA_TREE, index);
230
- const membershipWitness = new MembershipWitness(PUBLIC_DATA_TREE_HEIGHT, index, siblingPath.toTuple());
220
+ export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
221
+ const publicInputs = tx.data.publicInputs().toFields();
231
222
 
232
- const leafPreimage = (await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, index)) as PublicDataTreeLeafPreimage;
233
- if (!leafPreimage) {
234
- throw new Error(`Cannot find the leaf preimage for public data tree at index ${index}.`);
235
- }
236
-
237
- const exists = leafPreimage.slot.toBigInt() === leafSlot;
238
- const value = exists ? leafPreimage.value : Fr.ZERO;
223
+ const binaryProof = new Proof(
224
+ Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map(field => field.toBuffer())),
225
+ publicInputs.length,
226
+ );
227
+ return new RecursiveProof(tx.chonkProof.fields, binaryProof, true, CHONK_PROOF_LENGTH);
228
+ }
239
229
 
240
- return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
230
+ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, proverId: Fr) {
231
+ const proofData = new ProofData(
232
+ tx.data.toPrivateToPublicKernelCircuitPublicInputs(),
233
+ getChonkProofFromTx(tx),
234
+ getVkData('HidingKernelToPublic'),
235
+ );
236
+ return new PublicChonkVerifierPrivateInputs(proofData, proverId);
241
237
  }
242
238
 
243
- export const buildBlobHints = runInSpan(
239
+ // Build "hints" as the private inputs for the checkpoint root rollup circuit.
240
+ // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
241
+ // The `blobsHash` will be validated on L1 against the submitted blob data.
242
+ export const buildBlobHints = (blobFields: Fr[]) => {
243
+ const blobs = getBlobsPerL1Block(blobFields);
244
+ const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
245
+ const blobsHash = computeBlobsHashFromBlobs(blobs);
246
+ return { blobCommitments, blobs, blobsHash };
247
+ };
248
+
249
+ // Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
250
+ export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
251
+ const blobFields = txsPerCheckpoint.map(txs => getCheckpointBlobFields([txs.map(tx => tx.txEffect)]));
252
+ const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
253
+ return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
254
+ };
255
+
256
+ export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
257
+ const blobs = blobFieldsPerCheckpoint.map(blobFields => getBlobsPerL1Block(blobFields));
258
+ return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
259
+ };
260
+
261
+ export const accumulateBlobs = runInSpan(
244
262
  'BlockBuilderHelpers',
245
- 'buildBlobHints',
246
- async (_span: Span, txEffects: TxEffect[]) => {
247
- const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
248
- const blobs = await Blob.getBlobs(blobFields);
249
- const blobCommitments = blobs.map(b => b.commitmentToFields());
250
- const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
251
- return { blobFields, blobCommitments, blobs, blobsHash };
263
+ 'accumulateBlobs',
264
+ async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
265
+ const blobs = getBlobsPerL1Block(blobFields);
266
+ const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
267
+ return endBlobAccumulator;
252
268
  },
253
269
  );
254
270
 
255
271
  export const buildHeaderFromCircuitOutputs = runInSpan(
256
272
  'BlockBuilderHelpers',
257
273
  'buildHeaderFromCircuitOutputs',
258
- async (
259
- _span,
260
- previousRollupData: BaseOrMergeRollupPublicInputs[],
261
- parityPublicInputs: ParityPublicInputs,
262
- rootRollupOutputs: BlockRootOrBlockMergePublicInputs,
263
- endState: StateReference,
264
- logger?: Logger,
265
- ) => {
266
- if (previousRollupData.length > 2) {
267
- throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
268
- }
274
+ async (_span, blockRootRollupOutput: BlockRollupPublicInputs) => {
275
+ const constants = blockRootRollupOutput.constants;
276
+ const globalVariables = GlobalVariables.from({
277
+ chainId: constants.chainId,
278
+ version: constants.version,
279
+ blockNumber: blockRootRollupOutput.previousArchive.nextAvailableLeafIndex,
280
+ timestamp: blockRootRollupOutput.endTimestamp,
281
+ slotNumber: constants.slotNumber,
282
+ coinbase: constants.coinbase,
283
+ feeRecipient: constants.feeRecipient,
284
+ gasFees: constants.gasFees,
285
+ });
269
286
 
270
- const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
271
- const numTxs = previousRollupData.reduce((sum, d) => sum + d.numTxs, 0);
272
- const outHash =
273
- previousRollupData.length === 0
274
- ? Fr.ZERO.toBuffer()
275
- : previousRollupData.length === 1
276
- ? previousRollupData[0].outHash.toBuffer()
277
- : sha256Trunc(
278
- Buffer.concat([previousRollupData[0].outHash.toBuffer(), previousRollupData[1].outHash.toBuffer()]),
279
- );
280
- const contentCommitment = new ContentCommitment(
281
- new Fr(numTxs),
282
- blobsHash,
283
- parityPublicInputs.shaRoot.toBuffer(),
284
- outHash,
285
- );
287
+ const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
286
288
 
287
- const accumulatedFees = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedFees), Fr.ZERO);
288
- const accumulatedManaUsed = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedManaUsed), Fr.ZERO);
289
- const header = new BlockHeader(
290
- rootRollupOutputs.previousArchive,
291
- contentCommitment,
292
- endState,
293
- rootRollupOutputs.endGlobalVariables,
294
- accumulatedFees,
295
- accumulatedManaUsed,
289
+ return new BlockHeader(
290
+ blockRootRollupOutput.previousArchive,
291
+ blockRootRollupOutput.endState,
292
+ spongeBlobHash,
293
+ globalVariables,
294
+ blockRootRollupOutput.accumulatedFees,
295
+ blockRootRollupOutput.accumulatedManaUsed,
296
296
  );
297
- if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) {
298
- logger?.error(
299
- `Block header mismatch when building header from circuit outputs.` +
300
- `\n\nHeader: ${inspect(header)}` +
301
- `\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`,
302
- );
303
- throw new Error(`Block header mismatch when building from circuit outputs`);
304
- }
305
- return header;
306
297
  },
307
298
  );
308
299
 
@@ -315,8 +306,9 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
315
306
  globalVariables: GlobalVariables,
316
307
  l1ToL2Messages: Fr[],
317
308
  db: MerkleTreeReadOperations,
309
+ startSpongeBlob?: SpongeBlob,
318
310
  ) => {
319
- span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber.toNumber());
311
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
320
312
  const stateReference = new StateReference(
321
313
  await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
322
314
  new PartialStateReference(
@@ -331,92 +323,95 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
331
323
  const txEffects = txs.map(tx => tx.txEffect);
332
324
  const body = new Body(txEffects);
333
325
 
334
- const numTxs = body.txEffects.length;
335
- const outHash =
336
- numTxs === 0
337
- ? Fr.ZERO.toBuffer()
338
- : numTxs === 1
339
- ? body.txEffects[0].txOutHash()
340
- : computeUnbalancedMerkleRoot(
341
- body.txEffects.map(tx => tx.txOutHash()),
342
- TxEffect.empty().txOutHash(),
343
- );
344
-
345
- l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
346
- const hasher = (left: Buffer, right: Buffer) => Promise.resolve(sha256Trunc(Buffer.concat([left, right])));
347
- const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
348
- const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
349
- const parityShaRoot = await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer()));
350
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobs(body.toBlobFields()));
351
-
352
- const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
353
-
354
- const fees = body.txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
326
+ const txOutHashes = txEffects.map(tx => tx.txOutHash());
327
+ const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeCompressedUnbalancedMerkleTreeRoot(txOutHashes));
328
+
329
+ const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
330
+ const blockBlobFields = body.toBlobFields();
331
+ // TODO(#17027): This only works when there's one block per checkpoint.
332
+ const blobFields = [new Fr(blockBlobFields.length + 1)].concat(blockBlobFields);
333
+ const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
334
+
335
+ const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
336
+
337
+ const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
355
338
  const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
356
339
 
357
- const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
340
+ const endSpongeBlob = startSpongeBlob?.clone() ?? (await SpongeBlob.init(blobFields.length));
341
+ await endSpongeBlob.absorb(blockBlobFields);
342
+ const spongeBlobHash = await endSpongeBlob.squeeze();
343
+
344
+ const header = new L2BlockHeader(
345
+ previousArchive,
346
+ contentCommitment,
347
+ stateReference,
348
+ globalVariables,
349
+ fees,
350
+ manaUsed,
351
+ spongeBlobHash,
352
+ );
358
353
 
359
354
  return { header, body };
360
355
  },
361
356
  );
362
357
 
363
- export function getBlobsHashFromBlobs(inputs: Blob[]): Buffer {
364
- const blobHashes = serializeToBuffer(inputs.map(b => b.getEthVersionedBlobHash()));
365
- return sha256Trunc(serializeToBuffer(blobHashes));
366
- }
367
-
368
- // Validate that the roots of all local trees match the output of the root circuit simulation
369
- export async function validateBlockRootOutput(
370
- blockRootOutput: BlockRootOrBlockMergePublicInputs,
371
- blockHeader: BlockHeader,
372
- db: MerkleTreeReadOperations,
373
- ) {
374
- await Promise.all([
375
- validateState(blockHeader.state, db),
376
- validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive'),
377
- ]);
378
- }
379
-
380
- export const validateState = runInSpan(
358
+ export const buildBlockHeaderFromTxs = runInSpan(
381
359
  'BlockBuilderHelpers',
382
- 'validateState',
383
- async (_span, state: StateReference, db: MerkleTreeReadOperations) => {
384
- const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
385
- async (id: MerkleTreeId) => {
386
- return { key: id, value: await getTreeSnapshot(id, db) };
387
- },
388
- );
389
- const snapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot> = new Map(
390
- (await Promise.all(promises)).map(obj => [obj.key, obj.value]),
391
- );
392
- validatePartialState(state.partial, snapshots);
393
- validateSimulatedTree(
360
+ 'buildBlockHeaderFromTxs',
361
+ async (
362
+ span,
363
+ txs: ProcessedTx[],
364
+ globalVariables: GlobalVariables,
365
+ startSpongeBlob: SpongeBlob,
366
+ db: MerkleTreeReadOperations,
367
+ ) => {
368
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
369
+ const stateReference = new StateReference(
394
370
  await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
395
- state.l1ToL2MessageTree,
396
- 'L1ToL2MessageTree',
371
+ new PartialStateReference(
372
+ await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
373
+ await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
374
+ await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
375
+ ),
397
376
  );
377
+
378
+ const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
379
+
380
+ const blobFields = getBlockBlobFields(txs.map(tx => tx.txEffect));
381
+ const endSpongeBlob = startSpongeBlob.clone();
382
+ await endSpongeBlob.absorb(blobFields);
383
+ const spongeBlobHash = await endSpongeBlob.squeeze();
384
+
385
+ const txEffects = txs.map(tx => tx.txEffect);
386
+ const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
387
+ const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
388
+
389
+ return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
398
390
  },
399
391
  );
400
392
 
393
+ /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
394
+ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr> {
395
+ const l1ToL2Messages = padArrayEnd<Fr, number>(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
396
+ const hasher = (left: Buffer, right: Buffer) =>
397
+ Promise.resolve(sha256Trunc(Buffer.concat([left, right])) as Buffer<ArrayBuffer>);
398
+ const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
399
+ const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
400
+ return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
401
+ }
402
+
403
+ export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
404
+ const { size } = await db.getTreeInfo(treeId);
405
+ const path = await db.getSiblingPath(treeId, size - 1n);
406
+ return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
407
+ }
408
+
401
409
  export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
402
410
  const { size } = await db.getTreeInfo(treeId);
403
411
  const path = await db.getSiblingPath(treeId, size);
404
412
  return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
405
413
  }
406
414
 
407
- export const getConstantRollupData = runInSpan(
408
- 'BlockBuilderHelpers',
409
- 'getConstantRollupData',
410
- async (_span, globalVariables: GlobalVariables, db: MerkleTreeReadOperations): Promise<ConstantRollupData> => {
411
- return ConstantRollupData.from({
412
- vkTreeRoot: getVKTreeRoot(),
413
- protocolContractTreeRoot,
414
- lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
415
- globalVariables,
416
- });
417
- },
418
- );
419
-
420
415
  export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot> {
421
416
  const treeInfo = await db.getTreeInfo(id);
422
417
  return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
@@ -537,17 +532,26 @@ function validateSimulatedTree(
537
532
  }
538
533
 
539
534
  export function validateTx(tx: ProcessedTx) {
540
- const txHeader = tx.constants.historicalHeader;
541
- if (txHeader.state.l1ToL2MessageTree.isZero()) {
535
+ const txHeader = tx.data.constants.anchorBlockHeader;
536
+ if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
542
537
  throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
543
538
  }
544
- if (txHeader.state.partial.noteHashTree.isZero()) {
539
+ if (txHeader.state.partial.noteHashTree.isEmpty()) {
545
540
  throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`);
546
541
  }
547
- if (txHeader.state.partial.nullifierTree.isZero()) {
542
+ if (txHeader.state.partial.nullifierTree.isEmpty()) {
548
543
  throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`);
549
544
  }
550
- if (txHeader.state.partial.publicDataTree.isZero()) {
545
+ if (txHeader.state.partial.publicDataTree.isEmpty()) {
551
546
  throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
552
547
  }
553
548
  }
549
+
550
+ export function toProofData<T extends Bufferable, PROOF_LENGTH extends number>(
551
+ { inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<T, PROOF_LENGTH>,
552
+ vkIndex?: number,
553
+ ) {
554
+ const leafIndex = vkIndex || getVKIndex(verificationKey.keyAsFields);
555
+ const vkData = new VkData(verificationKey, leafIndex, getVKSiblingPath(leafIndex));
556
+ return new ProofData(inputs, proof, vkData);
557
+ }