@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.21caa21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. package/dest/block-factory/index.d.ts +2 -0
  2. package/dest/block-factory/index.d.ts.map +1 -0
  3. package/dest/block-factory/light.d.ts +38 -0
  4. package/dest/block-factory/light.d.ts.map +1 -0
  5. package/dest/block-factory/light.js +108 -0
  6. package/dest/config.d.ts +7 -7
  7. package/dest/config.d.ts.map +1 -1
  8. package/dest/config.js +11 -1
  9. package/dest/index.d.ts +1 -1
  10. package/dest/light/lightweight_checkpoint_builder.d.ts +28 -0
  11. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
  12. package/dest/light/lightweight_checkpoint_builder.js +107 -0
  13. package/dest/mocks/fixtures.d.ts +8 -8
  14. package/dest/mocks/fixtures.d.ts.map +1 -1
  15. package/dest/mocks/fixtures.js +32 -14
  16. package/dest/mocks/test_context.d.ts +40 -31
  17. package/dest/mocks/test_context.d.ts.map +1 -1
  18. package/dest/mocks/test_context.js +134 -86
  19. package/dest/orchestrator/block-building-helpers.d.ts +36 -29
  20. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  21. package/dest/orchestrator/block-building-helpers.js +168 -188
  22. package/dest/orchestrator/block-proving-state.d.ts +68 -47
  23. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  24. package/dest/orchestrator/block-proving-state.js +281 -176
  25. package/dest/orchestrator/checkpoint-proving-state.d.ts +62 -0
  26. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
  27. package/dest/orchestrator/checkpoint-proving-state.js +208 -0
  28. package/dest/orchestrator/epoch-proving-state.d.ts +40 -26
  29. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  30. package/dest/orchestrator/epoch-proving-state.js +143 -73
  31. package/dest/orchestrator/index.d.ts +1 -1
  32. package/dest/orchestrator/orchestrator.d.ts +35 -32
  33. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  34. package/dest/orchestrator/orchestrator.js +389 -239
  35. package/dest/orchestrator/orchestrator_metrics.d.ts +3 -1
  36. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  37. package/dest/orchestrator/orchestrator_metrics.js +9 -0
  38. package/dest/orchestrator/tx-proving-state.d.ts +13 -11
  39. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  40. package/dest/orchestrator/tx-proving-state.js +23 -40
  41. package/dest/prover-client/factory.d.ts +1 -1
  42. package/dest/prover-client/index.d.ts +1 -1
  43. package/dest/prover-client/prover-client.d.ts +4 -4
  44. package/dest/prover-client/prover-client.d.ts.map +1 -1
  45. package/dest/prover-client/prover-client.js +5 -4
  46. package/dest/prover-client/server-epoch-prover.d.ts +15 -11
  47. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  48. package/dest/prover-client/server-epoch-prover.js +11 -11
  49. package/dest/proving_broker/broker_prover_facade.d.ts +23 -16
  50. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  51. package/dest/proving_broker/broker_prover_facade.js +67 -41
  52. package/dest/proving_broker/config.d.ts +18 -9
  53. package/dest/proving_broker/config.d.ts.map +1 -1
  54. package/dest/proving_broker/config.js +22 -5
  55. package/dest/proving_broker/factory.d.ts +2 -2
  56. package/dest/proving_broker/factory.d.ts.map +1 -1
  57. package/dest/proving_broker/factory.js +5 -1
  58. package/dest/proving_broker/fixtures.d.ts +3 -2
  59. package/dest/proving_broker/fixtures.d.ts.map +1 -1
  60. package/dest/proving_broker/fixtures.js +2 -1
  61. package/dest/proving_broker/index.d.ts +1 -1
  62. package/dest/proving_broker/proof_store/factory.d.ts +2 -2
  63. package/dest/proving_broker/proof_store/factory.js +1 -1
  64. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
  65. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
  66. package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
  67. package/dest/proving_broker/proof_store/index.d.ts +2 -1
  68. package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
  69. package/dest/proving_broker/proof_store/index.js +1 -0
  70. package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
  71. package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
  72. package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
  73. package/dest/proving_broker/proving_agent.d.ts +4 -4
  74. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  75. package/dest/proving_broker/proving_agent.js +83 -47
  76. package/dest/proving_broker/proving_agent_instrumentation.d.ts +1 -1
  77. package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -1
  78. package/dest/proving_broker/proving_broker.d.ts +13 -4
  79. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  80. package/dest/proving_broker/proving_broker.js +36 -23
  81. package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
  82. package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
  83. package/dest/proving_broker/proving_broker_database/memory.js +1 -1
  84. package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
  85. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  86. package/dest/proving_broker/proving_broker_database/persisted.js +12 -10
  87. package/dest/proving_broker/proving_broker_database.d.ts +3 -2
  88. package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
  89. package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
  90. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  91. package/dest/proving_broker/proving_job_controller.d.ts +9 -9
  92. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  93. package/dest/proving_broker/proving_job_controller.js +89 -61
  94. package/dest/proving_broker/rpc.d.ts +4 -6
  95. package/dest/proving_broker/rpc.d.ts.map +1 -1
  96. package/dest/proving_broker/rpc.js +1 -4
  97. package/dest/test/mock_proof_store.d.ts +9 -0
  98. package/dest/test/mock_proof_store.d.ts.map +1 -0
  99. package/dest/test/mock_proof_store.js +10 -0
  100. package/dest/test/mock_prover.d.ts +23 -17
  101. package/dest/test/mock_prover.d.ts.map +1 -1
  102. package/dest/test/mock_prover.js +38 -20
  103. package/package.json +32 -31
  104. package/src/block-factory/index.ts +1 -0
  105. package/src/block-factory/light.ts +137 -0
  106. package/src/config.ts +24 -8
  107. package/src/light/lightweight_checkpoint_builder.ts +142 -0
  108. package/src/mocks/fixtures.ts +42 -37
  109. package/src/mocks/test_context.ts +207 -115
  110. package/src/orchestrator/block-building-helpers.ts +256 -333
  111. package/src/orchestrator/block-proving-state.ts +323 -230
  112. package/src/orchestrator/checkpoint-proving-state.ts +301 -0
  113. package/src/orchestrator/epoch-proving-state.ts +187 -112
  114. package/src/orchestrator/orchestrator.ts +592 -299
  115. package/src/orchestrator/orchestrator_metrics.ts +20 -1
  116. package/src/orchestrator/tx-proving-state.ts +50 -64
  117. package/src/prover-client/prover-client.ts +16 -14
  118. package/src/prover-client/server-epoch-prover.ts +39 -21
  119. package/src/proving_broker/broker_prover_facade.ts +214 -126
  120. package/src/proving_broker/config.ts +24 -6
  121. package/src/proving_broker/factory.ts +2 -1
  122. package/src/proving_broker/fixtures.ts +7 -2
  123. package/src/proving_broker/proof_store/factory.ts +1 -1
  124. package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
  125. package/src/proving_broker/proof_store/index.ts +1 -0
  126. package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
  127. package/src/proving_broker/proving_agent.ts +89 -47
  128. package/src/proving_broker/proving_broker.ts +53 -33
  129. package/src/proving_broker/proving_broker_database/memory.ts +3 -2
  130. package/src/proving_broker/proving_broker_database/persisted.ts +14 -12
  131. package/src/proving_broker/proving_broker_database.ts +2 -1
  132. package/src/proving_broker/proving_job_controller.ts +94 -82
  133. package/src/proving_broker/rpc.ts +1 -6
  134. package/src/test/mock_proof_store.ts +14 -0
  135. package/src/test/mock_prover.ts +164 -60
  136. package/dest/bin/get-proof-inputs.d.ts +0 -2
  137. package/dest/bin/get-proof-inputs.d.ts.map +0 -1
  138. package/dest/bin/get-proof-inputs.js +0 -51
  139. package/dest/block_builder/index.d.ts +0 -6
  140. package/dest/block_builder/index.d.ts.map +0 -1
  141. package/dest/block_builder/light.d.ts +0 -33
  142. package/dest/block_builder/light.d.ts.map +0 -1
  143. package/dest/block_builder/light.js +0 -82
  144. package/src/bin/get-proof-inputs.ts +0 -59
  145. package/src/block_builder/index.ts +0 -6
  146. package/src/block_builder/light.ts +0 -101
  147. /package/dest/{block_builder → block-factory}/index.js +0 -0
@@ -1,62 +1,63 @@
1
- import { Blob, type SpongeBlob } from '@aztec/blob-lib';
1
+ import {
2
+ BatchedBlobAccumulator,
3
+ SpongeBlob,
4
+ computeBlobsHashFromBlobs,
5
+ encodeBlockBlobData,
6
+ getBlobCommitmentsFromBlobs,
7
+ getBlobsPerL1Block,
8
+ } from '@aztec/blob-lib';
2
9
  import {
3
10
  ARCHIVE_HEIGHT,
11
+ CHONK_PROOF_LENGTH,
4
12
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
5
13
  MAX_NOTE_HASHES_PER_TX,
6
14
  MAX_NULLIFIERS_PER_TX,
7
15
  NOTE_HASH_SUBTREE_HEIGHT,
8
- NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH,
16
+ NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
9
17
  NULLIFIER_SUBTREE_HEIGHT,
10
- NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH,
18
+ NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
11
19
  NULLIFIER_TREE_HEIGHT,
12
- NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
13
20
  PUBLIC_DATA_TREE_HEIGHT,
14
21
  } from '@aztec/constants';
15
22
  import { makeTuple } from '@aztec/foundation/array';
16
23
  import { padArrayEnd } from '@aztec/foundation/collection';
17
- import { sha256Trunc } from '@aztec/foundation/crypto';
18
24
  import { Fr } from '@aztec/foundation/fields';
19
- import type { Logger } from '@aztec/foundation/log';
20
- import { type Tuple, assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
21
- import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
22
- import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
23
- import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
25
+ import { type Bufferable, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
26
+ import { MembershipWitness } from '@aztec/foundation/trees';
27
+ import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
28
+ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
24
29
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
25
- import { PublicDataHint } from '@aztec/stdlib/avm';
26
30
  import { Body } from '@aztec/stdlib/block';
27
- import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
28
- import { ContractClassLog } from '@aztec/stdlib/logs';
29
- import type { ParityPublicInputs } from '@aztec/stdlib/parity';
31
+ import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
32
+ import { ContractClassLogFields } from '@aztec/stdlib/logs';
33
+ import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
30
34
  import {
31
- type BaseOrMergeRollupPublicInputs,
32
- type BlockRootOrBlockMergePublicInputs,
33
- ConstantRollupData,
35
+ BlockConstantData,
36
+ BlockRollupPublicInputs,
34
37
  PrivateBaseRollupHints,
35
- PrivateBaseStateDiffHints,
36
38
  PublicBaseRollupHints,
39
+ PublicChonkVerifierPrivateInputs,
40
+ TreeSnapshotDiffHints,
37
41
  } from '@aztec/stdlib/rollup';
38
42
  import {
39
43
  AppendOnlyTreeSnapshot,
40
44
  MerkleTreeId,
41
45
  NullifierLeafPreimage,
42
- PublicDataTreeLeaf,
43
46
  PublicDataTreeLeafPreimage,
44
47
  getTreeHeight,
45
48
  } from '@aztec/stdlib/trees';
46
49
  import {
47
50
  BlockHeader,
48
- ContentCommitment,
49
- type GlobalVariables,
51
+ GlobalVariables,
50
52
  PartialStateReference,
51
53
  type ProcessedTx,
52
54
  StateReference,
53
- TxEffect,
55
+ Tx,
54
56
  } from '@aztec/stdlib/tx';
57
+ import { VkData } from '@aztec/stdlib/vks';
55
58
  import { Attributes, type Span, runInSpan } from '@aztec/telemetry-client';
56
59
  import type { MerkleTreeReadOperations } from '@aztec/world-state';
57
60
 
58
- import { inspect } from 'util';
59
-
60
61
  /**
61
62
  * Type representing the names of the trees for the base rollup.
62
63
  */
@@ -67,242 +68,236 @@ type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'Public
67
68
  export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
68
69
 
69
70
  // Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
70
- export const buildBaseRollupHints = runInSpan(
71
+ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
71
72
  'BlockBuilderHelpers',
72
73
  'buildBaseRollupHints',
73
74
  async (
74
75
  span: Span,
75
76
  tx: ProcessedTx,
76
- globalVariables: GlobalVariables,
77
- db: MerkleTreeWriteOperations,
77
+ lastArchive: AppendOnlyTreeSnapshot,
78
+ newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
78
79
  startSpongeBlob: SpongeBlob,
80
+ proverId: Fr,
81
+ db: MerkleTreeWriteOperations,
79
82
  ) => {
80
83
  span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
81
84
  // Get trees info before any changes hit
82
- const constants = await getConstantRollupData(globalVariables, db);
83
85
  const start = new PartialStateReference(
84
86
  await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
85
87
  await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
86
88
  await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
87
89
  );
88
- // Get the subtree sibling paths for the circuit
89
- const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(
90
- MerkleTreeId.NOTE_HASH_TREE,
91
- NOTE_HASH_SUBTREE_HEIGHT,
92
- db,
93
- );
94
90
 
95
- const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i =>
96
- i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO,
91
+ // Get the note hash subtree root sibling path for insertion.
92
+ const noteHashSubtreeRootSiblingPath = assertLength(
93
+ await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db),
94
+ NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
97
95
  );
98
96
 
99
- // Update the note hash trees with the new items being inserted to get the new roots
100
- // that will be used by the next iteration of the base rollup circuit, skipping the empty ones
101
- const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
102
- await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
103
-
104
- // Create data hint for reading fee payer initial balance in Fee Juice
105
- const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
106
- const feePayerFeeJuiceBalanceReadHint = await getPublicDataHint(db, leafSlot.toBigInt());
107
-
108
- // The read witnesses for a given TX should be generated before the writes of the same TX are applied.
109
- // All reads that refer to writes in the same tx are transient and can be simplified out.
110
- const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
111
-
112
- // Update the nullifier tree, capturing the low nullifier info for each individual operation
113
- const {
114
- lowLeavesWitnessData: nullifierWitnessLeaves,
115
- newSubtreeSiblingPath: nullifiersSubtreeSiblingPath,
116
- sortedNewLeaves: sortednullifiers,
117
- sortedNewLeavesIndexes,
118
- } = await db.batchInsert(
119
- MerkleTreeId.NULLIFIER_TREE,
120
- padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
121
- NULLIFIER_SUBTREE_HEIGHT,
122
- );
123
-
124
- if (nullifierWitnessLeaves === undefined) {
125
- throw new Error(`Could not craft nullifier batch insertion proofs`);
126
- }
127
-
128
- // Extract witness objects from returned data
129
- const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
130
- nullifierWitnessLeaves.map(l =>
131
- MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)),
132
- );
133
-
134
- const nullifierSubtreeSiblingPathArray = nullifiersSubtreeSiblingPath.toFields();
135
-
136
- const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i =>
137
- i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
138
- );
97
+ const { nullifierInsertionResult, publicDataInsertionResult } = await insertSideEffects(tx, db);
139
98
 
140
- // Append new data to startSpongeBlob
141
- const inputSpongeBlob = startSpongeBlob.clone();
142
- await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
99
+ const blockHash = await tx.data.constants.anchorBlockHeader.hash();
100
+ const anchorBlockArchiveSiblingPath = (
101
+ await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
102
+ ).siblingPath;
143
103
 
144
- const contractClassLogsPreimages = makeTuple(
104
+ const contractClassLogsFields = makeTuple(
145
105
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
146
- i => tx.txEffect.contractClassLogs[i]?.toUnsiloed() || ContractClassLog.empty(),
106
+ i => tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty(),
147
107
  );
148
108
 
149
109
  if (tx.avmProvingRequest) {
150
- const blockHash = await tx.constants.historicalHeader.hash();
151
- const archiveRootMembershipWitness = await getMembershipWitnessFor(
152
- blockHash,
153
- MerkleTreeId.ARCHIVE,
154
- ARCHIVE_HEIGHT,
155
- db,
156
- );
157
-
158
110
  return PublicBaseRollupHints.from({
159
- startSpongeBlob: inputSpongeBlob,
160
- archiveRootMembershipWitness,
161
- contractClassLogsPreimages,
162
- constants,
111
+ startSpongeBlob,
112
+ lastArchive,
113
+ anchorBlockArchiveSiblingPath,
114
+ contractClassLogsFields,
163
115
  });
164
116
  } else {
165
- if (
166
- txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 ||
167
- txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 ||
168
- txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1
169
- ) {
117
+ if (tx.txEffect.publicDataWrites.length > 1) {
170
118
  throw new Error(`More than one public data write in a private only tx`);
171
119
  }
172
120
 
173
- const feeWriteLowLeafPreimage =
174
- txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
175
- const feeWriteLowLeafMembershipWitness =
176
- txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] ||
177
- MembershipWitness.empty<typeof PUBLIC_DATA_TREE_HEIGHT>(PUBLIC_DATA_TREE_HEIGHT);
178
- const feeWriteSiblingPath =
179
- txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] ||
180
- makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO);
181
-
182
- const stateDiffHints = PrivateBaseStateDiffHints.from({
183
- nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
184
- i < nullifierWitnessLeaves.length
185
- ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
186
- : NullifierLeafPreimage.empty(),
187
- ),
188
- nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
189
- i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
190
- ? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
191
- : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
121
+ // Get hints for reading fee payer's balance in the public data tree.
122
+ const feePayerBalanceLeafWitnessData = publicDataInsertionResult.lowLeavesWitnessData[0];
123
+ const feePayerBalanceMembershipWitness = MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
124
+ feePayerBalanceLeafWitnessData.index,
125
+ assertLength(feePayerBalanceLeafWitnessData.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
126
+ );
127
+ const feePayerBalanceLeafPreimage = feePayerBalanceLeafWitnessData.leafPreimage as PublicDataTreeLeafPreimage;
128
+ const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
129
+ if (!leafSlot.equals(feePayerBalanceLeafPreimage.leaf.slot)) {
130
+ throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
131
+ }
132
+
133
+ // Get hints for inserting the nullifiers.
134
+ const nullifierLowLeavesWitnessData = nullifierInsertionResult.lowLeavesWitnessData!;
135
+ const nullifierPredecessorPreimages = padArrayEnd(
136
+ nullifierLowLeavesWitnessData.map(l => l.leafPreimage as NullifierLeafPreimage),
137
+ NullifierLeafPreimage.empty(),
138
+ MAX_NULLIFIERS_PER_TX,
139
+ );
140
+ const nullifierPredecessorMembershipWitnesses = padArrayEnd(
141
+ nullifierLowLeavesWitnessData.map(l =>
142
+ MembershipWitness.fromBufferArray<typeof NULLIFIER_TREE_HEIGHT>(
143
+ l.index,
144
+ assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT),
145
+ ),
192
146
  ),
193
- sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
194
- sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
195
- noteHashSubtreeSiblingPath,
196
- nullifierSubtreeSiblingPath,
197
- feeWriteLowLeafPreimage,
198
- feeWriteLowLeafMembershipWitness,
199
- feeWriteSiblingPath,
147
+ makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
148
+ MAX_NULLIFIERS_PER_TX,
149
+ );
150
+ const sortedNullifiers = assertLength(
151
+ nullifierInsertionResult.sortedNewLeaves.map(n => Fr.fromBuffer(n)),
152
+ MAX_NULLIFIERS_PER_TX,
153
+ );
154
+ const sortedNullifierIndexes = assertLength(
155
+ nullifierInsertionResult.sortedNewLeavesIndexes,
156
+ MAX_NULLIFIERS_PER_TX,
157
+ );
158
+ const nullifierSubtreeRootSiblingPath = assertLength(
159
+ nullifierInsertionResult.newSubtreeSiblingPath.toFields(),
160
+ NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
161
+ );
162
+
163
+ const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
164
+ noteHashSubtreeRootSiblingPath,
165
+ nullifierPredecessorPreimages,
166
+ nullifierPredecessorMembershipWitnesses,
167
+ sortedNullifiers,
168
+ sortedNullifierIndexes,
169
+ nullifierSubtreeRootSiblingPath,
170
+ feePayerBalanceMembershipWitness,
200
171
  });
201
172
 
202
- const blockHash = await tx.constants.historicalHeader.hash();
203
- const archiveRootMembershipWitness = await getMembershipWitnessFor(
204
- blockHash,
205
- MerkleTreeId.ARCHIVE,
206
- ARCHIVE_HEIGHT,
207
- db,
208
- );
173
+ const constants = BlockConstantData.from({
174
+ lastArchive,
175
+ l1ToL2TreeSnapshot: newL1ToL2MessageTreeSnapshot,
176
+ vkTreeRoot: tx.data.constants.vkTreeRoot,
177
+ protocolContractsHash: tx.data.constants.protocolContractsHash,
178
+ globalVariables: tx.globalVariables,
179
+ proverId,
180
+ });
209
181
 
210
182
  return PrivateBaseRollupHints.from({
211
183
  start,
212
- startSpongeBlob: inputSpongeBlob,
213
- stateDiffHints,
214
- feePayerFeeJuiceBalanceReadHint,
215
- archiveRootMembershipWitness,
216
- contractClassLogsPreimages,
184
+ startSpongeBlob,
185
+ treeSnapshotDiffHints,
186
+ feePayerBalanceLeafPreimage,
187
+ anchorBlockArchiveSiblingPath,
188
+ contractClassLogsFields,
217
189
  constants,
218
190
  });
219
191
  }
220
192
  },
221
193
  );
222
194
 
223
- export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint) {
224
- const { index } = (await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot)) ?? {};
225
- if (index === undefined) {
226
- throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
227
- }
195
+ export const insertSideEffects = runInSpan(
196
+ 'BlockBuilderHelpers',
197
+ 'buildBaseRollupHints',
198
+ async (span: Span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
199
+ span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
228
200
 
229
- const siblingPath = await db.getSiblingPath<typeof PUBLIC_DATA_TREE_HEIGHT>(MerkleTreeId.PUBLIC_DATA_TREE, index);
230
- const membershipWitness = new MembershipWitness(PUBLIC_DATA_TREE_HEIGHT, index, siblingPath.toTuple());
201
+ // Insert the note hashes. Padded with zeros to the max number of note hashes per tx.
202
+ const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
203
+ await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
231
204
 
232
- const leafPreimage = (await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, index)) as PublicDataTreeLeafPreimage;
233
- if (!leafPreimage) {
234
- throw new Error(`Cannot find the leaf preimage for public data tree at index ${index}.`);
235
- }
205
+ // Insert the nullifiers. Padded with zeros to the max number of nullifiers per tx.
206
+ // Capturing the low nullifier info for each individual operation.
207
+ const nullifierInsertionResult = await db.batchInsert(
208
+ MerkleTreeId.NULLIFIER_TREE,
209
+ padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
210
+ NULLIFIER_SUBTREE_HEIGHT,
211
+ );
212
+ if (nullifierInsertionResult.lowLeavesWitnessData === undefined) {
213
+ throw new Error(`Failed to batch insert nullifiers.`);
214
+ }
215
+
216
+ if (tx.txEffect.publicDataWrites.some(write => write.isEmpty())) {
217
+ throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}.`);
218
+ }
219
+ // Insert the public data writes sequentially. No need to pad them to the max array size.
220
+ // Capturing the low leaf info for each individual operation.
221
+ const publicDataInsertionResult = await db.sequentialInsert(
222
+ MerkleTreeId.PUBLIC_DATA_TREE,
223
+ tx.txEffect.publicDataWrites.map(write => write.toBuffer()),
224
+ );
236
225
 
237
- const exists = leafPreimage.slot.toBigInt() === leafSlot;
238
- const value = exists ? leafPreimage.value : Fr.ZERO;
226
+ return {
227
+ nullifierInsertionResult,
228
+ publicDataInsertionResult,
229
+ };
230
+ },
231
+ );
232
+
233
+ export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
234
+ const publicInputs = tx.data.publicInputs().toFields();
235
+
236
+ const binaryProof = new Proof(
237
+ Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map(field => field.toBuffer())),
238
+ publicInputs.length,
239
+ );
240
+ return new RecursiveProof(tx.chonkProof.fields, binaryProof, true, CHONK_PROOF_LENGTH);
241
+ }
239
242
 
240
- return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
243
+ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, proverId: Fr) {
244
+ const proofData = new ProofData(
245
+ tx.data.toPrivateToPublicKernelCircuitPublicInputs(),
246
+ getChonkProofFromTx(tx),
247
+ getVkData('HidingKernelToPublic'),
248
+ );
249
+ return new PublicChonkVerifierPrivateInputs(proofData, proverId);
241
250
  }
242
251
 
243
- export const buildBlobHints = runInSpan(
252
+ // Build "hints" as the private inputs for the checkpoint root rollup circuit.
253
+ // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
254
+ // The `blobsHash` will be validated on L1 against the submitted blob data.
255
+ export const buildBlobHints = (blobFields: Fr[]) => {
256
+ const blobs = getBlobsPerL1Block(blobFields);
257
+ const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
258
+ const blobsHash = computeBlobsHashFromBlobs(blobs);
259
+ return { blobCommitments, blobs, blobsHash };
260
+ };
261
+
262
+ export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
263
+ return await BatchedBlobAccumulator.precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint);
264
+ };
265
+
266
+ export const accumulateBlobs = runInSpan(
244
267
  'BlockBuilderHelpers',
245
- 'buildBlobHints',
246
- async (_span: Span, txEffects: TxEffect[]) => {
247
- const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
248
- const blobs = await Blob.getBlobs(blobFields);
249
- const blobCommitments = blobs.map(b => b.commitmentToFields());
250
- const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
251
- return { blobFields, blobCommitments, blobs, blobsHash };
268
+ 'accumulateBlobs',
269
+ async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
270
+ const endBlobAccumulator = await startBlobAccumulator.accumulateFields(blobFields);
271
+ return endBlobAccumulator;
252
272
  },
253
273
  );
254
274
 
255
275
  export const buildHeaderFromCircuitOutputs = runInSpan(
256
276
  'BlockBuilderHelpers',
257
277
  'buildHeaderFromCircuitOutputs',
258
- async (
259
- _span,
260
- previousRollupData: BaseOrMergeRollupPublicInputs[],
261
- parityPublicInputs: ParityPublicInputs,
262
- rootRollupOutputs: BlockRootOrBlockMergePublicInputs,
263
- endState: StateReference,
264
- logger?: Logger,
265
- ) => {
266
- if (previousRollupData.length > 2) {
267
- throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
268
- }
278
+ async (_span, blockRootRollupOutput: BlockRollupPublicInputs) => {
279
+ const constants = blockRootRollupOutput.constants;
280
+ const globalVariables = GlobalVariables.from({
281
+ chainId: constants.chainId,
282
+ version: constants.version,
283
+ blockNumber: blockRootRollupOutput.previousArchive.nextAvailableLeafIndex,
284
+ timestamp: blockRootRollupOutput.endTimestamp,
285
+ slotNumber: constants.slotNumber,
286
+ coinbase: constants.coinbase,
287
+ feeRecipient: constants.feeRecipient,
288
+ gasFees: constants.gasFees,
289
+ });
269
290
 
270
- const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
271
- const numTxs = previousRollupData.reduce((sum, d) => sum + d.numTxs, 0);
272
- const outHash =
273
- previousRollupData.length === 0
274
- ? Fr.ZERO.toBuffer()
275
- : previousRollupData.length === 1
276
- ? previousRollupData[0].outHash.toBuffer()
277
- : sha256Trunc(
278
- Buffer.concat([previousRollupData[0].outHash.toBuffer(), previousRollupData[1].outHash.toBuffer()]),
279
- );
280
- const contentCommitment = new ContentCommitment(
281
- new Fr(numTxs),
282
- blobsHash,
283
- parityPublicInputs.shaRoot.toBuffer(),
284
- outHash,
285
- );
291
+ const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
286
292
 
287
- const accumulatedFees = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedFees), Fr.ZERO);
288
- const accumulatedManaUsed = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedManaUsed), Fr.ZERO);
289
- const header = new BlockHeader(
290
- rootRollupOutputs.previousArchive,
291
- contentCommitment,
292
- endState,
293
- rootRollupOutputs.endGlobalVariables,
294
- accumulatedFees,
295
- accumulatedManaUsed,
293
+ return new BlockHeader(
294
+ blockRootRollupOutput.previousArchive,
295
+ blockRootRollupOutput.endState,
296
+ spongeBlobHash,
297
+ globalVariables,
298
+ blockRootRollupOutput.accumulatedFees,
299
+ blockRootRollupOutput.accumulatedManaUsed,
296
300
  );
297
- if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) {
298
- logger?.error(
299
- `Block header mismatch when building header from circuit outputs.` +
300
- `\n\nHeader: ${inspect(header)}` +
301
- `\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`,
302
- );
303
- throw new Error(`Block header mismatch when building from circuit outputs`);
304
- }
305
- return header;
306
301
  },
307
302
  );
308
303
 
@@ -312,111 +307,72 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
312
307
  async (
313
308
  span,
314
309
  txs: ProcessedTx[],
310
+ lastArchive: AppendOnlyTreeSnapshot,
311
+ endState: StateReference,
315
312
  globalVariables: GlobalVariables,
316
- l1ToL2Messages: Fr[],
317
- db: MerkleTreeReadOperations,
313
+ startSpongeBlob: SpongeBlob,
314
+ isFirstBlock: boolean,
318
315
  ) => {
319
- span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber.toNumber());
320
- const stateReference = new StateReference(
321
- await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
322
- new PartialStateReference(
323
- await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
324
- await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
325
- await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
326
- ),
327
- );
328
-
329
- const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
316
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
330
317
 
331
318
  const txEffects = txs.map(tx => tx.txEffect);
332
319
  const body = new Body(txEffects);
333
320
 
334
- const numTxs = body.txEffects.length;
335
- const outHash =
336
- numTxs === 0
337
- ? Fr.ZERO.toBuffer()
338
- : numTxs === 1
339
- ? body.txEffects[0].txOutHash()
340
- : computeUnbalancedMerkleRoot(
341
- body.txEffects.map(tx => tx.txOutHash()),
342
- TxEffect.empty().txOutHash(),
343
- );
321
+ const totalFees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
322
+ const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
344
323
 
345
- l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
346
- const hasher = (left: Buffer, right: Buffer) => Promise.resolve(sha256Trunc(Buffer.concat([left, right])));
347
- const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
348
- const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
349
- const parityShaRoot = await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer()));
350
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobs(body.toBlobFields()));
324
+ const { l1ToL2MessageTree, partial } = endState;
351
325
 
352
- const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
326
+ const blockBlobFields = encodeBlockBlobData({
327
+ blockEndMarker: {
328
+ timestamp: globalVariables.timestamp,
329
+ blockNumber: globalVariables.blockNumber,
330
+ numTxs: txs.length,
331
+ },
332
+ blockEndStateField: {
333
+ l1ToL2MessageNextAvailableLeafIndex: l1ToL2MessageTree.nextAvailableLeafIndex,
334
+ noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
335
+ nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
336
+ publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
337
+ totalManaUsed: totalManaUsed.toBigInt(),
338
+ },
339
+ lastArchiveRoot: lastArchive.root,
340
+ noteHashRoot: partial.noteHashTree.root,
341
+ nullifierRoot: partial.nullifierTree.root,
342
+ publicDataRoot: partial.publicDataTree.root,
343
+ l1ToL2MessageRoot: isFirstBlock ? l1ToL2MessageTree.root : undefined,
344
+ txs: body.toTxBlobData(),
345
+ });
353
346
 
354
- const fees = body.txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
355
- const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
347
+ const endSpongeBlob = startSpongeBlob.clone();
348
+ await endSpongeBlob.absorb(blockBlobFields);
349
+ const spongeBlobHash = await endSpongeBlob.squeeze();
356
350
 
357
- const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
351
+ const header = BlockHeader.from({
352
+ lastArchive,
353
+ state: endState,
354
+ spongeBlobHash,
355
+ globalVariables,
356
+ totalFees,
357
+ totalManaUsed,
358
+ });
358
359
 
359
- return { header, body };
360
+ return { header, body, blockBlobFields };
360
361
  },
361
362
  );
362
363
 
363
- export function getBlobsHashFromBlobs(inputs: Blob[]): Buffer {
364
- const blobHashes = serializeToBuffer(inputs.map(b => b.getEthVersionedBlobHash()));
365
- return sha256Trunc(serializeToBuffer(blobHashes));
366
- }
367
-
368
- // Validate that the roots of all local trees match the output of the root circuit simulation
369
- export async function validateBlockRootOutput(
370
- blockRootOutput: BlockRootOrBlockMergePublicInputs,
371
- blockHeader: BlockHeader,
372
- db: MerkleTreeReadOperations,
373
- ) {
374
- await Promise.all([
375
- validateState(blockHeader.state, db),
376
- validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive'),
377
- ]);
364
+ export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
365
+ const { size } = await db.getTreeInfo(treeId);
366
+ const path = await db.getSiblingPath(treeId, size - 1n);
367
+ return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
378
368
  }
379
369
 
380
- export const validateState = runInSpan(
381
- 'BlockBuilderHelpers',
382
- 'validateState',
383
- async (_span, state: StateReference, db: MerkleTreeReadOperations) => {
384
- const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
385
- async (id: MerkleTreeId) => {
386
- return { key: id, value: await getTreeSnapshot(id, db) };
387
- },
388
- );
389
- const snapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot> = new Map(
390
- (await Promise.all(promises)).map(obj => [obj.key, obj.value]),
391
- );
392
- validatePartialState(state.partial, snapshots);
393
- validateSimulatedTree(
394
- await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
395
- state.l1ToL2MessageTree,
396
- 'L1ToL2MessageTree',
397
- );
398
- },
399
- );
400
-
401
370
  export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
402
371
  const { size } = await db.getTreeInfo(treeId);
403
372
  const path = await db.getSiblingPath(treeId, size);
404
373
  return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
405
374
  }
406
375
 
407
- export const getConstantRollupData = runInSpan(
408
- 'BlockBuilderHelpers',
409
- 'getConstantRollupData',
410
- async (_span, globalVariables: GlobalVariables, db: MerkleTreeReadOperations): Promise<ConstantRollupData> => {
411
- return ConstantRollupData.from({
412
- vkTreeRoot: getVKTreeRoot(),
413
- protocolContractTreeRoot,
414
- lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
415
- globalVariables,
416
- });
417
- },
418
- );
419
-
420
376
  export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot> {
421
377
  const treeInfo = await db.getTreeInfo(id);
422
378
  return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
@@ -430,48 +386,6 @@ export function makeEmptyMembershipWitness<N extends number>(height: N) {
430
386
  );
431
387
  }
432
388
 
433
- const processPublicDataUpdateRequests = runInSpan(
434
- 'BlockBuilderHelpers',
435
- 'processPublicDataUpdateRequests',
436
- async (span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
437
- span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
438
- const allPublicDataWrites = tx.txEffect.publicDataWrites.map(
439
- ({ leafSlot, value }) => new PublicDataTreeLeaf(leafSlot, value),
440
- );
441
-
442
- const { lowLeavesWitnessData, insertionWitnessData } = await db.sequentialInsert(
443
- MerkleTreeId.PUBLIC_DATA_TREE,
444
- allPublicDataWrites.map(write => {
445
- if (write.isEmpty()) {
446
- throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}`);
447
- }
448
- return write.toBuffer();
449
- }),
450
- );
451
-
452
- const lowPublicDataWritesPreimages = lowLeavesWitnessData.map(
453
- lowLeafWitness => lowLeafWitness.leafPreimage as PublicDataTreeLeafPreimage,
454
- );
455
- const lowPublicDataWritesMembershipWitnesses = lowLeavesWitnessData.map(lowLeafWitness =>
456
- MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
457
- lowLeafWitness.index,
458
- assertLength(lowLeafWitness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
459
- ),
460
- );
461
- const publicDataWritesSiblingPaths = insertionWitnessData.map(w => {
462
- const insertionSiblingPath = w.siblingPath.toFields();
463
- assertLength(insertionSiblingPath, PUBLIC_DATA_TREE_HEIGHT);
464
- return insertionSiblingPath as Tuple<Fr, typeof PUBLIC_DATA_TREE_HEIGHT>;
465
- });
466
-
467
- return {
468
- lowPublicDataWritesPreimages,
469
- lowPublicDataWritesMembershipWitnesses,
470
- publicDataWritesSiblingPaths,
471
- };
472
- },
473
- );
474
-
475
389
  export async function getSubtreeSiblingPath(
476
390
  treeId: MerkleTreeId,
477
391
  subtreeHeight: number,
@@ -537,17 +451,26 @@ function validateSimulatedTree(
537
451
  }
538
452
 
539
453
  export function validateTx(tx: ProcessedTx) {
540
- const txHeader = tx.constants.historicalHeader;
541
- if (txHeader.state.l1ToL2MessageTree.isZero()) {
454
+ const txHeader = tx.data.constants.anchorBlockHeader;
455
+ if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
542
456
  throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
543
457
  }
544
- if (txHeader.state.partial.noteHashTree.isZero()) {
458
+ if (txHeader.state.partial.noteHashTree.isEmpty()) {
545
459
  throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`);
546
460
  }
547
- if (txHeader.state.partial.nullifierTree.isZero()) {
461
+ if (txHeader.state.partial.nullifierTree.isEmpty()) {
548
462
  throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`);
549
463
  }
550
- if (txHeader.state.partial.publicDataTree.isZero()) {
464
+ if (txHeader.state.partial.publicDataTree.isEmpty()) {
551
465
  throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
552
466
  }
553
467
  }
468
+
469
+ export function toProofData<T extends Bufferable, PROOF_LENGTH extends number>(
470
+ { inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<T, PROOF_LENGTH>,
471
+ vkIndex?: number,
472
+ ) {
473
+ const leafIndex = vkIndex || getVKIndex(verificationKey.keyAsFields);
474
+ const vkData = new VkData(verificationKey, leafIndex, getVKSiblingPath(leafIndex));
475
+ return new ProofData(inputs, proof, vkData);
476
+ }