@aztec/prover-client 0.0.0-test.0 → 0.0.1-fake-c83136db25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. package/dest/block-factory/index.d.ts +2 -0
  2. package/dest/block-factory/index.d.ts.map +1 -0
  3. package/dest/block-factory/light.d.ts +38 -0
  4. package/dest/block-factory/light.d.ts.map +1 -0
  5. package/dest/block-factory/light.js +94 -0
  6. package/dest/config.d.ts +6 -6
  7. package/dest/config.d.ts.map +1 -1
  8. package/dest/config.js +11 -1
  9. package/dest/mocks/fixtures.d.ts +7 -4
  10. package/dest/mocks/fixtures.d.ts.map +1 -1
  11. package/dest/mocks/fixtures.js +32 -4
  12. package/dest/mocks/test_context.d.ts +43 -15
  13. package/dest/mocks/test_context.d.ts.map +1 -1
  14. package/dest/mocks/test_context.js +110 -48
  15. package/dest/orchestrator/block-building-helpers.d.ts +37 -28
  16. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  17. package/dest/orchestrator/block-building-helpers.js +156 -150
  18. package/dest/orchestrator/block-proving-state.d.ts +62 -46
  19. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  20. package/dest/orchestrator/block-proving-state.js +223 -179
  21. package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
  22. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
  23. package/dest/orchestrator/checkpoint-proving-state.js +211 -0
  24. package/dest/orchestrator/epoch-proving-state.d.ts +37 -24
  25. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  26. package/dest/orchestrator/epoch-proving-state.js +143 -73
  27. package/dest/orchestrator/orchestrator.d.ts +34 -31
  28. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  29. package/dest/orchestrator/orchestrator.js +392 -234
  30. package/dest/orchestrator/orchestrator_metrics.d.ts +2 -0
  31. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  32. package/dest/orchestrator/orchestrator_metrics.js +9 -0
  33. package/dest/orchestrator/tx-proving-state.d.ts +12 -10
  34. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  35. package/dest/orchestrator/tx-proving-state.js +30 -38
  36. package/dest/prover-client/prover-client.d.ts +3 -3
  37. package/dest/prover-client/prover-client.d.ts.map +1 -1
  38. package/dest/prover-client/prover-client.js +5 -4
  39. package/dest/prover-client/server-epoch-prover.d.ts +13 -10
  40. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  41. package/dest/prover-client/server-epoch-prover.js +11 -11
  42. package/dest/proving_broker/broker_prover_facade.d.ts +22 -15
  43. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  44. package/dest/proving_broker/broker_prover_facade.js +64 -39
  45. package/dest/proving_broker/config.d.ts +9 -4
  46. package/dest/proving_broker/config.d.ts.map +1 -1
  47. package/dest/proving_broker/config.js +15 -4
  48. package/dest/proving_broker/factory.d.ts +1 -1
  49. package/dest/proving_broker/factory.d.ts.map +1 -1
  50. package/dest/proving_broker/factory.js +5 -1
  51. package/dest/proving_broker/fixtures.js +1 -1
  52. package/dest/proving_broker/proof_store/factory.js +1 -1
  53. package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
  54. package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
  55. package/dest/proving_broker/proof_store/index.d.ts +1 -0
  56. package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
  57. package/dest/proving_broker/proof_store/index.js +1 -0
  58. package/dest/proving_broker/proving_agent.d.ts +3 -3
  59. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  60. package/dest/proving_broker/proving_agent.js +83 -47
  61. package/dest/proving_broker/proving_broker.d.ts +11 -2
  62. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  63. package/dest/proving_broker/proving_broker.js +34 -22
  64. package/dest/proving_broker/proving_broker_database/memory.js +1 -1
  65. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  66. package/dest/proving_broker/proving_broker_database/persisted.js +9 -8
  67. package/dest/proving_broker/proving_job_controller.d.ts +7 -8
  68. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  69. package/dest/proving_broker/proving_job_controller.js +89 -61
  70. package/dest/proving_broker/rpc.d.ts +3 -5
  71. package/dest/proving_broker/rpc.d.ts.map +1 -1
  72. package/dest/proving_broker/rpc.js +1 -4
  73. package/dest/test/mock_proof_store.d.ts +9 -0
  74. package/dest/test/mock_proof_store.d.ts.map +1 -0
  75. package/dest/test/mock_proof_store.js +10 -0
  76. package/dest/test/mock_prover.d.ts +23 -16
  77. package/dest/test/mock_prover.d.ts.map +1 -1
  78. package/dest/test/mock_prover.js +38 -20
  79. package/package.json +29 -29
  80. package/src/block-factory/index.ts +1 -0
  81. package/src/block-factory/light.ts +140 -0
  82. package/src/config.ts +24 -8
  83. package/src/mocks/fixtures.ts +43 -15
  84. package/src/mocks/test_context.ts +201 -75
  85. package/src/orchestrator/block-building-helpers.ts +247 -243
  86. package/src/orchestrator/block-proving-state.ts +247 -231
  87. package/src/orchestrator/checkpoint-proving-state.ts +299 -0
  88. package/src/orchestrator/epoch-proving-state.ts +187 -111
  89. package/src/orchestrator/orchestrator.ts +590 -289
  90. package/src/orchestrator/orchestrator_metrics.ts +20 -1
  91. package/src/orchestrator/tx-proving-state.ts +60 -61
  92. package/src/prover-client/prover-client.ts +16 -14
  93. package/src/prover-client/server-epoch-prover.ts +40 -21
  94. package/src/proving_broker/broker_prover_facade.ts +200 -113
  95. package/src/proving_broker/config.ts +17 -6
  96. package/src/proving_broker/factory.ts +2 -1
  97. package/src/proving_broker/fixtures.ts +1 -1
  98. package/src/proving_broker/proof_store/factory.ts +1 -1
  99. package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
  100. package/src/proving_broker/proof_store/index.ts +1 -0
  101. package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
  102. package/src/proving_broker/proving_agent.ts +89 -47
  103. package/src/proving_broker/proving_broker.ts +51 -32
  104. package/src/proving_broker/proving_broker_database/memory.ts +1 -1
  105. package/src/proving_broker/proving_broker_database/persisted.ts +9 -8
  106. package/src/proving_broker/proving_job_controller.ts +92 -81
  107. package/src/proving_broker/rpc.ts +1 -6
  108. package/src/test/mock_proof_store.ts +14 -0
  109. package/src/test/mock_prover.ts +164 -60
  110. package/dest/bin/get-proof-inputs.d.ts +0 -2
  111. package/dest/bin/get-proof-inputs.d.ts.map +0 -1
  112. package/dest/bin/get-proof-inputs.js +0 -51
  113. package/dest/block_builder/index.d.ts +0 -6
  114. package/dest/block_builder/index.d.ts.map +0 -1
  115. package/dest/block_builder/light.d.ts +0 -33
  116. package/dest/block_builder/light.d.ts.map +0 -1
  117. package/dest/block_builder/light.js +0 -82
  118. package/src/bin/get-proof-inputs.ts +0 -59
  119. package/src/block_builder/index.ts +0 -6
  120. package/src/block_builder/light.ts +0 -101
  121. /package/dest/{block_builder → block-factory}/index.js +0 -0
@@ -1,209 +1,210 @@
1
- import { Blob } from '@aztec/blob-lib';
2
- import { ARCHIVE_HEIGHT, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PUBLIC_DATA_TREE_HEIGHT } from '@aztec/constants';
1
+ import { BatchedBlob, SpongeBlob, computeBlobsHashFromBlobs, getBlobCommitmentsFromBlobs, getBlobsPerL1Block } from '@aztec/blob-lib';
2
+ import { ARCHIVE_HEIGHT, CHONK_PROOF_LENGTH, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PUBLIC_DATA_TREE_HEIGHT } from '@aztec/constants';
3
3
  import { makeTuple } from '@aztec/foundation/array';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
5
  import { sha256Trunc } from '@aztec/foundation/crypto';
6
6
  import { Fr } from '@aztec/foundation/fields';
7
- import { assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
8
- import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
9
- import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
10
- import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
7
+ import { assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
8
+ import { MembershipWitness, MerkleTreeCalculator, computeCompressedUnbalancedMerkleTreeRoot } from '@aztec/foundation/trees';
9
+ import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
10
+ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
11
11
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
12
- import { PublicDataHint } from '@aztec/stdlib/avm';
13
- import { Body } from '@aztec/stdlib/block';
14
- import { ContractClassLog } from '@aztec/stdlib/logs';
15
- import { ConstantRollupData, PrivateBaseRollupHints, PrivateBaseStateDiffHints, PublicBaseRollupHints } from '@aztec/stdlib/rollup';
16
- import { AppendOnlyTreeSnapshot, MerkleTreeId, NullifierLeafPreimage, PublicDataTreeLeaf, PublicDataTreeLeafPreimage, getTreeHeight } from '@aztec/stdlib/trees';
17
- import { BlockHeader, ContentCommitment, PartialStateReference, StateReference, TxEffect } from '@aztec/stdlib/tx';
12
+ import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
13
+ import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
14
+ import { ContractClassLogFields } from '@aztec/stdlib/logs';
15
+ import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
16
+ import { BlockConstantData, PrivateBaseRollupHints, PublicBaseRollupHints, PublicChonkVerifierPrivateInputs, TreeSnapshotDiffHints } from '@aztec/stdlib/rollup';
17
+ import { AppendOnlyTreeSnapshot, MerkleTreeId, NullifierLeafPreimage, PublicDataTreeLeaf, getTreeHeight } from '@aztec/stdlib/trees';
18
+ import { BlockHeader, ContentCommitment, GlobalVariables, PartialStateReference, StateReference } from '@aztec/stdlib/tx';
19
+ import { VkData } from '@aztec/stdlib/vks';
18
20
  import { Attributes, runInSpan } from '@aztec/telemetry-client';
19
- import { inspect } from 'util';
20
21
  // Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
21
- export const buildBaseRollupHints = runInSpan('BlockBuilderHelpers', 'buildBaseRollupHints', async (span, tx, globalVariables, db, startSpongeBlob)=>{
22
+ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderHelpers', 'buildBaseRollupHints', async (span, tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, proverId, db)=>{
22
23
  span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
23
24
  // Get trees info before any changes hit
24
- const constants = await getConstantRollupData(globalVariables, db);
25
25
  const start = new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db));
26
- // Get the subtree sibling paths for the circuit
27
- const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db);
28
- const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, (i)=>i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO);
26
+ // Get the note hash subtree root sibling path for insertion.
27
+ const noteHashSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db), NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
29
28
  // Update the note hash trees with the new items being inserted to get the new roots
30
29
  // that will be used by the next iteration of the base rollup circuit, skipping the empty ones
31
30
  const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
32
31
  await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
33
- // Create data hint for reading fee payer initial balance in Fee Juice
34
- const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
35
- const feePayerFeeJuiceBalanceReadHint = await getPublicDataHint(db, leafSlot.toBigInt());
36
32
  // The read witnesses for a given TX should be generated before the writes of the same TX are applied.
37
33
  // All reads that refer to writes in the same tx are transient and can be simplified out.
38
34
  const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
39
35
  // Update the nullifier tree, capturing the low nullifier info for each individual operation
40
- const { lowLeavesWitnessData: nullifierWitnessLeaves, newSubtreeSiblingPath: nullifiersSubtreeSiblingPath, sortedNewLeaves: sortednullifiers, sortedNewLeavesIndexes } = await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map((n)=>n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT);
36
+ const { lowLeavesWitnessData: nullifierWitnessLeaves, newSubtreeSiblingPath: nullifiersSubtreeRootSiblingPath, sortedNewLeaves: sortedNullifiers, sortedNewLeavesIndexes } = await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map((n)=>n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT);
41
37
  if (nullifierWitnessLeaves === undefined) {
42
38
  throw new Error(`Could not craft nullifier batch insertion proofs`);
43
39
  }
44
- // Extract witness objects from returned data
45
- const nullifierPredecessorMembershipWitnessesWithoutPadding = nullifierWitnessLeaves.map((l)=>MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)));
46
- const nullifierSubtreeSiblingPathArray = nullifiersSubtreeSiblingPath.toFields();
47
- const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, (i)=>i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO);
48
- // Append new data to startSpongeBlob
49
- const inputSpongeBlob = startSpongeBlob.clone();
50
- await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
51
- const contractClassLogsPreimages = makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, (i)=>tx.txEffect.contractClassLogs[i]?.toUnsiloed() || ContractClassLog.empty());
40
+ const blockHash = await tx.data.constants.anchorBlockHeader.hash();
41
+ const anchorBlockArchiveSiblingPath = (await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)).siblingPath;
42
+ const contractClassLogsFields = makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, (i)=>tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty());
52
43
  if (tx.avmProvingRequest) {
53
- const blockHash = await tx.constants.historicalHeader.hash();
54
- const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db);
55
44
  return PublicBaseRollupHints.from({
56
- startSpongeBlob: inputSpongeBlob,
57
- archiveRootMembershipWitness,
58
- contractClassLogsPreimages,
59
- constants
45
+ startSpongeBlob,
46
+ lastArchive,
47
+ anchorBlockArchiveSiblingPath,
48
+ contractClassLogsFields
60
49
  });
61
50
  } else {
62
51
  if (txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 || txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 || txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1) {
63
52
  throw new Error(`More than one public data write in a private only tx`);
64
53
  }
65
- const feeWriteLowLeafPreimage = txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
66
- const feeWriteLowLeafMembershipWitness = txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] || MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT);
67
- const feeWriteSiblingPath = txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] || makeTuple(PUBLIC_DATA_TREE_HEIGHT, ()=>Fr.ZERO);
68
- const stateDiffHints = PrivateBaseStateDiffHints.from({
69
- nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>i < nullifierWitnessLeaves.length ? nullifierWitnessLeaves[i].leafPreimage : NullifierLeafPreimage.empty()),
54
+ // Get hints for reading fee payer's balance in the public data tree.
55
+ const feePayerBalanceMembershipWitness = txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0];
56
+ const feePayerBalanceLeafPreimage = txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0];
57
+ const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
58
+ if (!feePayerBalanceMembershipWitness || !leafSlot.equals(feePayerBalanceLeafPreimage?.leaf.slot)) {
59
+ throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
60
+ }
61
+ // Extract witness objects from returned data
62
+ const nullifierPredecessorMembershipWitnessesWithoutPadding = nullifierWitnessLeaves.map((l)=>MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)));
63
+ const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
64
+ noteHashSubtreeRootSiblingPath,
65
+ nullifierPredecessorPreimages: padArrayEnd(nullifierWitnessLeaves.map((l)=>l.leafPreimage), NullifierLeafPreimage.empty(), MAX_NULLIFIERS_PER_TX),
70
66
  nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>i < nullifierPredecessorMembershipWitnessesWithoutPadding.length ? nullifierPredecessorMembershipWitnessesWithoutPadding[i] : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT)),
71
- sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>Fr.fromBuffer(sortednullifiers[i])),
72
- sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>sortedNewLeavesIndexes[i]),
73
- noteHashSubtreeSiblingPath,
74
- nullifierSubtreeSiblingPath,
75
- feeWriteLowLeafPreimage,
76
- feeWriteLowLeafMembershipWitness,
77
- feeWriteSiblingPath
67
+ sortedNullifiers: assertLength(sortedNullifiers.map((n)=>Fr.fromBuffer(n)), MAX_NULLIFIERS_PER_TX),
68
+ sortedNullifierIndexes: assertLength(sortedNewLeavesIndexes, MAX_NULLIFIERS_PER_TX),
69
+ nullifierSubtreeRootSiblingPath: assertLength(nullifiersSubtreeRootSiblingPath.toFields(), NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH),
70
+ feePayerBalanceMembershipWitness
71
+ });
72
+ const constants = BlockConstantData.from({
73
+ lastArchive,
74
+ l1ToL2TreeSnapshot: newL1ToL2MessageTreeSnapshot,
75
+ vkTreeRoot: tx.data.constants.vkTreeRoot,
76
+ protocolContractsHash: tx.data.constants.protocolContractsHash,
77
+ globalVariables: tx.globalVariables,
78
+ proverId
78
79
  });
79
- const blockHash = await tx.constants.historicalHeader.hash();
80
- const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db);
81
80
  return PrivateBaseRollupHints.from({
82
81
  start,
83
- startSpongeBlob: inputSpongeBlob,
84
- stateDiffHints,
85
- feePayerFeeJuiceBalanceReadHint,
86
- archiveRootMembershipWitness,
87
- contractClassLogsPreimages,
82
+ startSpongeBlob,
83
+ treeSnapshotDiffHints,
84
+ feePayerBalanceLeafPreimage,
85
+ anchorBlockArchiveSiblingPath,
86
+ contractClassLogsFields,
88
87
  constants
89
88
  });
90
89
  }
91
90
  });
92
- export async function getPublicDataHint(db, leafSlot) {
93
- const { index } = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot) ?? {};
94
- if (index === undefined) {
95
- throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
96
- }
97
- const siblingPath = await db.getSiblingPath(MerkleTreeId.PUBLIC_DATA_TREE, index);
98
- const membershipWitness = new MembershipWitness(PUBLIC_DATA_TREE_HEIGHT, index, siblingPath.toTuple());
99
- const leafPreimage = await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, index);
100
- if (!leafPreimage) {
101
- throw new Error(`Cannot find the leaf preimage for public data tree at index ${index}.`);
102
- }
103
- const exists = leafPreimage.slot.toBigInt() === leafSlot;
104
- const value = exists ? leafPreimage.value : Fr.ZERO;
105
- return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
91
+ export function getChonkProofFromTx(tx) {
92
+ const publicInputs = tx.data.publicInputs().toFields();
93
+ const binaryProof = new Proof(Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map((field)=>field.toBuffer())), publicInputs.length);
94
+ return new RecursiveProof(tx.chonkProof.fields, binaryProof, true, CHONK_PROOF_LENGTH);
95
+ }
96
+ export function getPublicChonkVerifierPrivateInputsFromTx(tx, proverId) {
97
+ const proofData = new ProofData(tx.data.toPrivateToPublicKernelCircuitPublicInputs(), getChonkProofFromTx(tx), getVkData('HidingKernelToPublic'));
98
+ return new PublicChonkVerifierPrivateInputs(proofData, proverId);
106
99
  }
107
- export const buildBlobHints = runInSpan('BlockBuilderHelpers', 'buildBlobHints', async (_span, txEffects)=>{
108
- const blobFields = txEffects.flatMap((tx)=>tx.toBlobFields());
109
- const blobs = await Blob.getBlobs(blobFields);
110
- const blobCommitments = blobs.map((b)=>b.commitmentToFields());
111
- const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
100
+ // Build "hints" as the private inputs for the checkpoint root rollup circuit.
101
+ // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
102
+ // The `blobsHash` will be validated on L1 against the submitted blob data.
103
+ export const buildBlobHints = (blobFields)=>{
104
+ const blobs = getBlobsPerL1Block(blobFields);
105
+ const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
106
+ const blobsHash = computeBlobsHashFromBlobs(blobs);
112
107
  return {
113
- blobFields,
114
108
  blobCommitments,
115
109
  blobs,
116
110
  blobsHash
117
111
  };
112
+ };
113
+ // Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
114
+ export const buildBlobDataFromTxs = async (txsPerCheckpoint)=>{
115
+ const blobFields = txsPerCheckpoint.map((txs)=>getCheckpointBlobFields([
116
+ txs.map((tx)=>tx.txEffect)
117
+ ]));
118
+ const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
119
+ return {
120
+ blobFieldsLengths: blobFields.map((fields)=>fields.length),
121
+ finalBlobChallenges
122
+ };
123
+ };
124
+ export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint)=>{
125
+ const blobs = blobFieldsPerCheckpoint.map((blobFields)=>getBlobsPerL1Block(blobFields));
126
+ return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
127
+ };
128
+ export const accumulateBlobs = runInSpan('BlockBuilderHelpers', 'accumulateBlobs', async (_span, blobFields, startBlobAccumulator)=>{
129
+ const blobs = getBlobsPerL1Block(blobFields);
130
+ const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
131
+ return endBlobAccumulator;
118
132
  });
119
- export const buildHeaderFromCircuitOutputs = runInSpan('BlockBuilderHelpers', 'buildHeaderFromCircuitOutputs', async (_span, previousRollupData, parityPublicInputs, rootRollupOutputs, endState, logger)=>{
120
- if (previousRollupData.length > 2) {
121
- throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
122
- }
123
- const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
124
- const numTxs = previousRollupData.reduce((sum, d)=>sum + d.numTxs, 0);
125
- const outHash = previousRollupData.length === 0 ? Fr.ZERO.toBuffer() : previousRollupData.length === 1 ? previousRollupData[0].outHash.toBuffer() : sha256Trunc(Buffer.concat([
126
- previousRollupData[0].outHash.toBuffer(),
127
- previousRollupData[1].outHash.toBuffer()
128
- ]));
129
- const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityPublicInputs.shaRoot.toBuffer(), outHash);
130
- const accumulatedFees = previousRollupData.reduce((sum, d)=>sum.add(d.accumulatedFees), Fr.ZERO);
131
- const accumulatedManaUsed = previousRollupData.reduce((sum, d)=>sum.add(d.accumulatedManaUsed), Fr.ZERO);
132
- const header = new BlockHeader(rootRollupOutputs.previousArchive, contentCommitment, endState, rootRollupOutputs.endGlobalVariables, accumulatedFees, accumulatedManaUsed);
133
- if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) {
134
- logger?.error(`Block header mismatch when building header from circuit outputs.` + `\n\nHeader: ${inspect(header)}` + `\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`);
135
- throw new Error(`Block header mismatch when building from circuit outputs`);
136
- }
137
- return header;
133
+ export const buildHeaderFromCircuitOutputs = runInSpan('BlockBuilderHelpers', 'buildHeaderFromCircuitOutputs', async (_span, blockRootRollupOutput)=>{
134
+ const constants = blockRootRollupOutput.constants;
135
+ const globalVariables = GlobalVariables.from({
136
+ chainId: constants.chainId,
137
+ version: constants.version,
138
+ blockNumber: blockRootRollupOutput.previousArchive.nextAvailableLeafIndex,
139
+ timestamp: blockRootRollupOutput.endTimestamp,
140
+ slotNumber: constants.slotNumber,
141
+ coinbase: constants.coinbase,
142
+ feeRecipient: constants.feeRecipient,
143
+ gasFees: constants.gasFees
144
+ });
145
+ const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
146
+ return new BlockHeader(blockRootRollupOutput.previousArchive, blockRootRollupOutput.endState, spongeBlobHash, globalVariables, blockRootRollupOutput.accumulatedFees, blockRootRollupOutput.accumulatedManaUsed);
138
147
  });
139
- export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs, globalVariables, l1ToL2Messages, db)=>{
140
- span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber.toNumber());
148
+ export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs, globalVariables, l1ToL2Messages, db, startSpongeBlob)=>{
149
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
141
150
  const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
142
151
  const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
143
152
  const txEffects = txs.map((tx)=>tx.txEffect);
144
153
  const body = new Body(txEffects);
145
- const numTxs = body.txEffects.length;
146
- const outHash = numTxs === 0 ? Fr.ZERO.toBuffer() : numTxs === 1 ? body.txEffects[0].txOutHash() : computeUnbalancedMerkleRoot(body.txEffects.map((tx)=>tx.txOutHash()), TxEffect.empty().txOutHash());
147
- l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
148
- const hasher = (left, right)=>Promise.resolve(sha256Trunc(Buffer.concat([
149
- left,
150
- right
151
- ])));
152
- const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
153
- const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
154
- const parityShaRoot = await parityCalculator.computeTreeRoot(l1ToL2Messages.map((msg)=>msg.toBuffer()));
155
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobs(body.toBlobFields()));
156
- const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
157
- const fees = body.txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
154
+ const txOutHashes = txEffects.map((tx)=>tx.txOutHash());
155
+ const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeCompressedUnbalancedMerkleTreeRoot(txOutHashes));
156
+ const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
157
+ const blockBlobFields = body.toBlobFields();
158
+ // TODO(#17027): This only works when there's one block per checkpoint.
159
+ const blobFields = [
160
+ new Fr(blockBlobFields.length + 1)
161
+ ].concat(blockBlobFields);
162
+ const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
163
+ const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
164
+ const fees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
158
165
  const manaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
159
- const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
166
+ const endSpongeBlob = startSpongeBlob?.clone() ?? await SpongeBlob.init(blobFields.length);
167
+ await endSpongeBlob.absorb(blockBlobFields);
168
+ const spongeBlobHash = await endSpongeBlob.squeeze();
169
+ const header = new L2BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed, spongeBlobHash);
160
170
  return {
161
171
  header,
162
172
  body
163
173
  };
164
174
  });
165
- export function getBlobsHashFromBlobs(inputs) {
166
- const blobHashes = serializeToBuffer(inputs.map((b)=>b.getEthVersionedBlobHash()));
167
- return sha256Trunc(serializeToBuffer(blobHashes));
175
+ export const buildBlockHeaderFromTxs = runInSpan('BlockBuilderHelpers', 'buildBlockHeaderFromTxs', async (span, txs, globalVariables, startSpongeBlob, db)=>{
176
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
177
+ const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
178
+ const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
179
+ const blobFields = getBlockBlobFields(txs.map((tx)=>tx.txEffect));
180
+ const endSpongeBlob = startSpongeBlob.clone();
181
+ await endSpongeBlob.absorb(blobFields);
182
+ const spongeBlobHash = await endSpongeBlob.squeeze();
183
+ const txEffects = txs.map((tx)=>tx.txEffect);
184
+ const fees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
185
+ const manaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
186
+ return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
187
+ });
188
+ /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages) {
189
+ const l1ToL2Messages = padArrayEnd(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
190
+ const hasher = (left, right)=>Promise.resolve(sha256Trunc(Buffer.concat([
191
+ left,
192
+ right
193
+ ])));
194
+ const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
195
+ const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
196
+ return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map((msg)=>msg.toBuffer())));
168
197
  }
169
- // Validate that the roots of all local trees match the output of the root circuit simulation
170
- export async function validateBlockRootOutput(blockRootOutput, blockHeader, db) {
171
- await Promise.all([
172
- validateState(blockHeader.state, db),
173
- validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive')
174
- ]);
198
+ export async function getLastSiblingPath(treeId, db) {
199
+ const { size } = await db.getTreeInfo(treeId);
200
+ const path = await db.getSiblingPath(treeId, size - 1n);
201
+ return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
175
202
  }
176
- export const validateState = runInSpan('BlockBuilderHelpers', 'validateState', async (_span, state, db)=>{
177
- const promises = [
178
- MerkleTreeId.NOTE_HASH_TREE,
179
- MerkleTreeId.NULLIFIER_TREE,
180
- MerkleTreeId.PUBLIC_DATA_TREE
181
- ].map(async (id)=>{
182
- return {
183
- key: id,
184
- value: await getTreeSnapshot(id, db)
185
- };
186
- });
187
- const snapshots = new Map((await Promise.all(promises)).map((obj)=>[
188
- obj.key,
189
- obj.value
190
- ]));
191
- validatePartialState(state.partial, snapshots);
192
- validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), state.l1ToL2MessageTree, 'L1ToL2MessageTree');
193
- });
194
203
  export async function getRootTreeSiblingPath(treeId, db) {
195
204
  const { size } = await db.getTreeInfo(treeId);
196
205
  const path = await db.getSiblingPath(treeId, size);
197
206
  return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
198
207
  }
199
- export const getConstantRollupData = runInSpan('BlockBuilderHelpers', 'getConstantRollupData', async (_span, globalVariables, db)=>{
200
- return ConstantRollupData.from({
201
- vkTreeRoot: getVKTreeRoot(),
202
- protocolContractTreeRoot,
203
- lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
204
- globalVariables
205
- });
206
- });
207
208
  export async function getTreeSnapshot(id, db) {
208
209
  const treeInfo = await db.getTreeInfo(id);
209
210
  return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
@@ -269,17 +270,22 @@ function validateSimulatedTree(localTree, simulatedTree, name, label) {
269
270
  }
270
271
  }
271
272
  export function validateTx(tx) {
272
- const txHeader = tx.constants.historicalHeader;
273
- if (txHeader.state.l1ToL2MessageTree.isZero()) {
273
+ const txHeader = tx.data.constants.anchorBlockHeader;
274
+ if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
274
275
  throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
275
276
  }
276
- if (txHeader.state.partial.noteHashTree.isZero()) {
277
+ if (txHeader.state.partial.noteHashTree.isEmpty()) {
277
278
  throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`);
278
279
  }
279
- if (txHeader.state.partial.nullifierTree.isZero()) {
280
+ if (txHeader.state.partial.nullifierTree.isEmpty()) {
280
281
  throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`);
281
282
  }
282
- if (txHeader.state.partial.publicDataTree.isZero()) {
283
+ if (txHeader.state.partial.publicDataTree.isEmpty()) {
283
284
  throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
284
285
  }
285
286
  }
287
+ export function toProofData({ inputs, proof, verificationKey }, vkIndex) {
288
+ const leafIndex = vkIndex || getVKIndex(verificationKey.keyAsFields);
289
+ const vkData = new VkData(verificationKey, leafIndex, getVKSiblingPath(leafIndex));
290
+ return new ProofData(inputs, proof, vkData);
291
+ }
@@ -1,18 +1,20 @@
1
- import { SpongeBlob } from '@aztec/blob-lib';
2
- import { type ARCHIVE_HEIGHT, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
1
+ import type { SpongeBlob } from '@aztec/blob-lib';
2
+ import { type ARCHIVE_HEIGHT, type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
3
3
  import { Fr } from '@aztec/foundation/fields';
4
- import type { Logger } from '@aztec/foundation/log';
5
- import type { Tuple } from '@aztec/foundation/serialize';
4
+ import { type Tuple } from '@aztec/foundation/serialize';
6
5
  import { type TreeNodeLocation } from '@aztec/foundation/trees';
7
- import type { L2Block } from '@aztec/stdlib/block';
8
6
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
9
- import { type ParityPublicInputs, RootParityInputs } from '@aztec/stdlib/parity';
10
- import { type BaseOrMergeRollupPublicInputs, type BlockRootOrBlockMergePublicInputs, BlockRootRollupInputs, EmptyBlockRootRollupInputs, MergeRollupInputs, SingleTxBlockRootRollupInputs } from '@aztec/stdlib/rollup';
11
- import type { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
12
- import { type BlockHeader, type GlobalVariables } from '@aztec/stdlib/tx';
13
- import type { EpochProvingState } from './epoch-proving-state.js';
7
+ import { type ParityPublicInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
8
+ import { BlockRollupPublicInputs, BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointConstantData, TxMergeRollupPrivateInputs, type TxRollupPublicInputs } from '@aztec/stdlib/rollup';
9
+ import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
10
+ import { type BlockHeader, GlobalVariables } from '@aztec/stdlib/tx';
11
+ import type { UInt64 } from '@aztec/stdlib/types';
12
+ import type { CheckpointProvingState } from './checkpoint-proving-state.js';
14
13
  import type { TxProvingState } from './tx-proving-state.js';
15
- export type TreeSnapshots = Map<MerkleTreeId, AppendOnlyTreeSnapshot>;
14
+ export type ProofState<T, PROOF_LENGTH extends number> = {
15
+ provingOutput?: PublicInputsAndRecursiveProof<T, PROOF_LENGTH>;
16
+ isProving?: boolean;
17
+ };
16
18
  /**
17
19
  * The current state of the proving schedule for a given block. Managed by ProvingState.
18
20
  * Contains the raw inputs and intermediate state to generate every constituent proof in the tree.
@@ -20,57 +22,71 @@ export type TreeSnapshots = Map<MerkleTreeId, AppendOnlyTreeSnapshot>;
20
22
  export declare class BlockProvingState {
21
23
  #private;
22
24
  readonly index: number;
23
- readonly globalVariables: GlobalVariables;
24
- readonly newL1ToL2Messages: Fr[];
25
- private readonly l1ToL2MessageSubtreeSiblingPath;
26
- private readonly l1ToL2MessageTreeSnapshotAfterInsertion;
27
- private readonly lastArchiveSnapshot;
28
- private readonly newArchiveSiblingPath;
29
- private readonly previousBlockHeader;
30
- private readonly parentEpoch;
31
- private baseOrMergeProvingOutputs;
32
- private baseParityProvingOutputs;
33
- private rootParityProvingOutput;
34
- private blockRootProvingOutput;
35
- blockRootRollupStarted: boolean;
36
- block: L2Block | undefined;
37
- spongeBlobState: SpongeBlob | undefined;
38
- totalNumTxs: number;
25
+ readonly blockNumber: number;
26
+ readonly totalNumTxs: number;
27
+ private readonly constants;
28
+ private readonly timestamp;
29
+ readonly lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot;
30
+ private readonly lastArchiveSiblingPath;
31
+ private readonly lastL1ToL2MessageTreeSnapshot;
32
+ private readonly lastL1ToL2MessageSubtreeRootSiblingPath;
33
+ readonly newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot;
34
+ private readonly headerOfLastBlockInPreviousCheckpoint;
35
+ private readonly startSpongeBlob;
36
+ parentCheckpoint: CheckpointProvingState;
37
+ private baseOrMergeProofs;
38
+ private baseParityProofs;
39
+ private rootParityProof;
40
+ private blockRootProof;
41
+ private builtBlockHeader;
42
+ private endSpongeBlob;
39
43
  private txs;
40
- error: string | undefined;
41
- constructor(index: number, globalVariables: GlobalVariables, newL1ToL2Messages: Fr[], l1ToL2MessageSubtreeSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>, l1ToL2MessageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot, lastArchiveSnapshot: AppendOnlyTreeSnapshot, newArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>, previousBlockHeader: BlockHeader, parentEpoch: EpochProvingState);
42
- get blockNumber(): number;
43
- startNewBlock(numTxs: number, numBlobFields: number): void;
44
+ private isFirstBlock;
45
+ private error;
46
+ constructor(index: number, blockNumber: number, totalNumTxs: number, constants: CheckpointConstantData, timestamp: UInt64, lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot, lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>, lastL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH>, newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot, headerOfLastBlockInPreviousCheckpoint: BlockHeader, startSpongeBlob: SpongeBlob, parentCheckpoint: CheckpointProvingState);
47
+ get epochNumber(): number;
44
48
  addNewTx(tx: TxProvingState): number;
45
- setBaseRollupProof(txIndex: number, provingOutput: PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): TreeNodeLocation;
46
- setMergeRollupProof(location: TreeNodeLocation, provingOutput: PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): void;
49
+ isAcceptingTxs(): boolean;
50
+ getProcessedTxs(): import("@aztec/stdlib/tx").ProcessedTx[];
51
+ tryStartProvingBase(txIndex: number): boolean;
52
+ setBaseRollupProof(txIndex: number, provingOutput: PublicInputsAndRecursiveProof<TxRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): TreeNodeLocation;
53
+ tryStartProvingMerge(location: TreeNodeLocation): boolean;
54
+ setMergeRollupProof(location: TreeNodeLocation, provingOutput: PublicInputsAndRecursiveProof<TxRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): void;
55
+ tryStartProvingBaseParity(index: number): boolean;
47
56
  setBaseParityProof(index: number, provingOutput: PublicInputsAndRecursiveProof<ParityPublicInputs>): void;
57
+ tryStartProvingRootParity(): boolean;
48
58
  setRootParityProof(provingOutput: PublicInputsAndRecursiveProof<ParityPublicInputs>): void;
49
- setBlockRootRollupProof(provingOutput: PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): void;
50
- get allTxs(): TxProvingState[];
51
- /** Returns the block number as an epoch number. Used for prioritizing proof requests. */
52
- get epochNumber(): number;
59
+ tryStartProvingBlockRoot(): boolean;
60
+ setBlockRootRollupProof(provingOutput: PublicInputsAndRecursiveProof<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): TreeNodeLocation;
61
+ getBlockRootRollupOutput(): BlockRollupPublicInputs | undefined;
62
+ setBuiltBlockHeader(blockHeader: BlockHeader): void;
63
+ getBuiltBlockHeader(): BlockHeader | undefined;
64
+ getGlobalVariables(): GlobalVariables;
65
+ getStartSpongeBlob(): SpongeBlob;
66
+ setEndSpongeBlob(endSpongeBlob: SpongeBlob): void;
67
+ getEndSpongeBlob(): SpongeBlob | undefined;
68
+ getTxEffects(): import("@aztec/stdlib/tx").TxEffect[];
53
69
  getParentLocation(location: TreeNodeLocation): TreeNodeLocation;
54
- getMergeRollupInputs(mergeLocation: TreeNodeLocation): MergeRollupInputs;
55
- getBlockRootRollupTypeAndInputs(proverId: Fr): Promise<{
70
+ getMergeRollupInputs(mergeLocation: TreeNodeLocation): TxMergeRollupPrivateInputs;
71
+ getBlockRootRollupTypeAndInputs(): {
56
72
  rollupType: string;
57
- inputs: EmptyBlockRootRollupInputs;
73
+ inputs: BlockRootEmptyTxFirstRollupPrivateInputs;
58
74
  } | {
59
75
  rollupType: string;
60
- inputs: SingleTxBlockRootRollupInputs;
76
+ inputs: BlockRootSingleTxRollupPrivateInputs;
61
77
  } | {
62
78
  rollupType: string;
63
- inputs: BlockRootRollupInputs;
64
- }>;
65
- getPaddingBlockRootInputs(proverId: Fr): Promise<EmptyBlockRootRollupInputs>;
66
- getRootParityInputs(): RootParityInputs;
79
+ inputs: BlockRootRollupPrivateInputs;
80
+ };
81
+ getParityRootInputs(): ParityRootPrivateInputs;
67
82
  getTxProvingState(txIndex: number): TxProvingState;
68
- buildHeaderFromProvingOutputs(logger?: Logger): Promise<BlockHeader>;
83
+ buildHeaderFromProvingOutputs(): Promise<BlockHeader>;
69
84
  isReadyForMergeRollup(location: TreeNodeLocation): boolean;
70
85
  isReadyForBlockRootRollup(): boolean;
71
86
  isReadyForRootParity(): boolean;
72
87
  isComplete(): boolean;
73
88
  verifyState(): boolean;
89
+ getError(): string | undefined;
74
90
  reject(reason: string): void;
75
91
  }
76
92
  //# sourceMappingURL=block-proving-state.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"block-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EACL,KAAK,cAAc,EAGnB,KAAK,wCAAwC,EAC7C,KAAK,yCAAyC,EAI/C,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAC;AACzD,OAAO,EAAqB,KAAK,gBAAgB,EAAuB,MAAM,yBAAyB,CAAC;AAGxG,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AACnD,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,KAAK,kBAAkB,EAAmB,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AAClG,OAAO,EACL,KAAK,6BAA6B,EAClC,KAAK,iCAAiC,EAGtC,qBAAqB,EAErB,0BAA0B,EAC1B,iBAAiB,EAEjB,6BAA6B,EAC9B,MAAM,sBAAsB,CAAC;AAE9B,OAAO,KAAK,EAAE,sBAAsB,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAChF,OAAO,EAAE,KAAK,WAAW,EAAE,KAAK,eAAe,EAAkB,MAAM,kBAAkB,CAAC;AAG1F,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAE5D,MAAM,MAAM,aAAa,GAAG,GAAG,CAAC,YAAY,EAAE,sBAAsB,CAAC,CAAC;AAEtE;;;GAGG;AACH,qBAAa,iBAAiB;;aAiBV,KAAK,EAAE,MAAM;aACb,eAAe,EAAE,eAAe;aAChC,iBAAiB,EAAE,EAAE,EAAE;IACvC,OAAO,CAAC,QAAQ,CAAC,+BAA+B;IAChD,OAAO,CAAC,QAAQ,CAAC,uCAAuC;IACxD,OAAO,CAAC,QAAQ,CAAC,mBAAmB;IACpC,OAAO,CAAC,QAAQ,CAAC,qBAAqB;IACtC,OAAO,CAAC,QAAQ,CAAC,mBAAmB;IACpC,OAAO,CAAC,QAAQ,CAAC,WAAW;IAxB9B,OAAO,CAAC,yBAAyB,CAEF;IAC/B,OAAO,CAAC,wBAAwB,CAAoE;IACpG,OAAO,CAAC,uBAAuB,CAAgE;IAC/F,OAAO,CAAC,sBAAsB,CAEhB;IACP,sBAAsB,EAAE,OAAO,CAAS;IACxC,KAAK,EAAE,OAAO,GAAG,SAAS,CAAC;IAC3B,eAAe,EAAE,UAAU,GAAG,SAAS,CAAC;IACxC,WAAW,EAAE,MAAM,CAAC;IAC3B,OAAO,CAAC,GAAG,CAAwB;IAC5B,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;gBAGf,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,eAAe,EAChC,iBAAiB,EAAE,EAAE,EAAE,EACtB,+BAA+B,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,wCAAwC,CAAC,EAC3F,uCAAuC,EAAE,sBAAsB,EAC/D,mBAAmB,EAAE,sBAAsB,EAC3C,qBAAqB,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,cAAc,CAAC,EACvD,mBAAmB,EAAE,WAAW,EAChC,WAAW,EAAE,iBAAiB;IAMjD,IAAW,WAAW,WAErB;IAEM,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM;IAanD,QAAQ,CAAC,EAAE,EAAE,cAAc;IAU3B,kBAAkB,CACvB,OAAO,EAAE,MAAM,EACf,aAAa,EAAE,6BAA6B,CAC1C,6BAA6B,EAC7B,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAIZ,mBAAmB,CACxB,QAAQ,EAAE,gBAAgB,EAC1B,aAAa,EAAE,6BAA6B,CAC1C,6BAA6B,EAC7B,OAAO,yCAAyC,CACjD;IAMI,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IASlG,kBAAkB,CAAC,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IAInF,uBAAuB,CAC5B,aAAa,EAAE,6BAA6B,CAC1C,iCAAiC,EACjC,OAAO,yCAAyC,CACjD;IAMH,IAAW,MAAM,qBAEhB;IAED,yFAAyF;IACzF,IAAW,WAAW,IAAI,MAAM,CAE/B;IAEM,iBAAiB,CAAC,QAAQ,EAAE,gBAAgB;IAI5C,oBAAoB,CAAC,aAAa,EAAE,gBAAgB;IAS9C,+BAA+B,CAAC,QAAQ,EAAE,EAAE;;;;;;;;;;IAmD5C,yBAAyB,CAAC,QAAQ,EAAE,EAAE;IA+B5C,mBAAmB;IAYnB,iBAAiB,CAAC,OAAO,EAAE,MAAM;IAI3B,6BAA6B,CAAC,MAAM,CAAC,EAAE,MAAM;IA0BnD,qBAAqB,CAAC,QAAQ,EAAE,gBAAgB;IAKhD,yBAAyB;IAMzB,oBAAoB;IAIpB,UAAU;IAKV,WAAW;IAIX,MAAM,CAAC,MAAM,EAAE,MAAM;CA2D7B"}
1
+ {"version":3,"file":"block-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAClD,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,6CAA6C,EAElD,KAAK,yCAAyC,EAE/C,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,KAAK,KAAK,EAAgB,MAAM,6BAA6B,CAAC;AACvE,OAAO,EAAE,KAAK,gBAAgB,EAAuB,MAAM,yBAAyB,CAAC;AACrF,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,KAAK,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,sBAAsB,CAAC;AAExF,OAAO,EACL,uBAAuB,EACvB,wCAAwC,EAExC,4BAA4B,EAE5B,oCAAoC,EACpC,sBAAsB,EACtB,0BAA0B,EAC1B,KAAK,oBAAoB,EAC1B,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,sBAAsB,EAAE,MAAM,qBAAqB,CAAC;AAC7D,OAAO,EAAE,KAAK,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAGlD,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAC5E,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAE5D,MAAM,MAAM,UAAU,CAAC,CAAC,EAAE,YAAY,SAAS,MAAM,IAAI;IACvD,aAAa,CAAC,EAAE,6BAA6B,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC/D,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB,CAAC;AAEF;;;GAGG;AACH,qBAAa,iBAAiB;;aAmBV,KAAK,EAAE,MAAM;aACb,WAAW,EAAE,MAAM;aACnB,WAAW,EAAE,MAAM;IACnC,OAAO,CAAC,QAAQ,CAAC,SAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,SAAS;aACV,uBAAuB,EAAE,sBAAsB;IAC/D,OAAO,CAAC,QAAQ,CAAC,sBAAsB;IACvC,OAAO,CAAC,QAAQ,CAAC,6BAA6B;IAC9C,OAAO,CAAC,QAAQ,CAAC,uCAAuC;aAIxC,4BAA4B,EAAE,sBAAsB;IACpE,OAAO,CAAC,QAAQ,CAAC,qCAAqC;IACtD,OAAO,CAAC,QAAQ,CAAC,eAAe;IACzB,gBAAgB,EAAE,sBAAsB;IAjCjD,OAAO,CAAC,iBAAiB,CAEM;IAC/B,OAAO,CAAC,gBAAgB,CAGC;IACzB,OAAO,CAAC,eAAe,CAAmF;IAC1G,OAAO,CAAC,cAAc,CAER;IACd,OAAO,CAAC,gBAAgB,CAA0B;IAClD,OAAO,CAAC,aAAa,CAAyB;IAC9C,OAAO,CAAC,GAAG,CAAwB;IACnC,OAAO,CAAC,YAAY,CAAU;IAC9B,OAAO,CAAC,KAAK,CAAqB;gBAGhB,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EAClB,SAAS,EAAE,sBAAsB,EACjC,SAAS,EAAE,MAAM,EAClB,uBAAuB,EAAE,sBAAsB,EAC9C,sBAAsB,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,cAAc,CAAC,EACxD,6BAA6B,EAAE,sBAAsB,EACrD,uCAAuC,EAAE,KAAK,CAC7D,EAAE,EACF,OAAO,6CAA6C,CACrD,EACe,4BAA4B,EAAE,sBAAsB,EACnD,qCAAqC,EAAE,WAAW,EAClD,eAAe,EAAE,UAAU,EACrC,gBAAgB,EAAE,sBAAsB;IAUjD,IAAW,WAAW,IAAI,MAAM,CAE/B;IAGM,QAAQ,CAAC,EAAE,EAAE,cAAc;IAS3B,cAAc;IAId,eAAe;IAIf,mBAAmB,CAAC,OAAO,EAAE,MAAM;IASnC,kBAAkB,CACvB,OAAO,EAAE,MAAM,EACf,aAAa,EAAE,6BAA6B,CAC1C,oBAAoB,EACpB,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAIZ,oBAAoB,CAAC,QAAQ,EAAE,gBAAgB;IAS/C,mBAAmB,CACxB,QAAQ,EAAE,gBAAgB,EAC1B,aAAa,EAAE,6BAA6B,CAC1C,oBAAoB,EACpB,OAAO,yCAAyC,CACjD;IAKI,yBAAyB,CAAC,KAAK,EAAE,MAAM;IAUvC,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IASlG,yBAAyB;IASzB,kBAAkB,CAAC,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IAInF,wBAAwB;IASxB,uBAAuB,CAC5B,aAAa,EAAE,6BAA6B,CAC1C,uBAAuB,EACvB,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAKZ,wBAAwB;IAIxB,mBAAmB,CAAC,WAAW,EAAE,WAAW;IAI5C,mBAAmB;IAInB,kBAAkB;IAkBlB,kBAAkB;IAIlB,gBAAgB,CAAC,aAAa,EAAE,UAAU;IAI1C,gBAAgB;IAIhB,YAAY;IAIZ,iBAAiB,CAAC,QAAQ,EAAE,gBAAgB;IAI5C,oBAAoB,CAAC,aAAa,EAAE,gBAAgB;IASpD,+BAA+B;;;;;;;;;;IAuE/B,mBAAmB;IAWnB,iBAAiB,CAAC,OAAO,EAAE,MAAM;IAI3B,6BAA6B;IAQnC,qBAAqB,CAAC,QAAQ,EAAE,gBAAgB;IAKhD,yBAAyB;IAMzB,oBAAoB;IAIpB,UAAU;IAIV,WAAW;IAIX,QAAQ;IAIR,MAAM,CAAC,MAAM,EAAE,MAAM;CAgB7B"}