@aztec/prover-client 0.0.1-fake-c83136db25 → 0.0.1-fake-ceab37513c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bin/get-proof-inputs.d.ts +2 -0
- package/dest/bin/get-proof-inputs.d.ts.map +1 -0
- package/dest/bin/get-proof-inputs.js +51 -0
- package/dest/block-factory/light.d.ts +3 -5
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +9 -16
- package/dest/config.js +1 -1
- package/dest/mocks/fixtures.d.ts +1 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +3 -31
- package/dest/mocks/test_context.d.ts +9 -32
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +22 -78
- package/dest/orchestrator/block-building-helpers.d.ts +31 -33
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +137 -126
- package/dest/orchestrator/block-proving-state.d.ts +53 -60
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +187 -214
- package/dest/orchestrator/epoch-proving-state.d.ts +28 -34
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +84 -128
- package/dest/orchestrator/orchestrator.d.ts +30 -31
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +236 -368
- package/dest/orchestrator/tx-proving-state.d.ts +9 -11
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -26
- package/dest/prover-client/server-epoch-prover.d.ts +8 -9
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +9 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +15 -20
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +21 -36
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +0 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +0 -1
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +18 -29
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +18 -38
- package/dest/test/mock_prover.d.ts +17 -22
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +20 -35
- package/package.json +17 -16
- package/src/bin/get-proof-inputs.ts +59 -0
- package/src/block-factory/light.ts +9 -35
- package/src/config.ts +1 -1
- package/src/mocks/fixtures.ts +11 -39
- package/src/mocks/test_context.ts +31 -137
- package/src/orchestrator/block-building-helpers.ts +211 -211
- package/src/orchestrator/block-proving-state.ts +245 -235
- package/src/orchestrator/epoch-proving-state.ts +127 -172
- package/src/orchestrator/orchestrator.ts +303 -545
- package/src/orchestrator/tx-proving-state.ts +43 -49
- package/src/prover-client/server-epoch-prover.ts +18 -28
- package/src/proving_broker/broker_prover_facade.ts +86 -157
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proof_store/index.ts +0 -1
- package/src/proving_broker/proving_broker.ts +18 -36
- package/src/proving_broker/proving_job_controller.ts +18 -38
- package/src/test/mock_prover.ts +60 -142
- package/dest/orchestrator/checkpoint-proving-state.d.ts +0 -63
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +0 -1
- package/dest/orchestrator/checkpoint-proving-state.js +0 -211
- package/src/orchestrator/checkpoint-proving-state.ts +0 -299
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
import { BatchedBlobAccumulator, SpongeBlob } from '@aztec/blob-lib';
|
|
2
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { type
|
|
1
|
+
import { BatchedBlobAccumulator, Blob, type SpongeBlob } from '@aztec/blob-lib';
|
|
2
|
+
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
+
import { type Tuple } from '@aztec/foundation/serialize';
|
|
4
4
|
import { MembershipWitness } from '@aztec/foundation/trees';
|
|
5
|
-
import {
|
|
6
|
-
import
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
5
|
+
import { PublicDataHint } from '@aztec/stdlib/avm';
|
|
6
|
+
import { Body } from '@aztec/stdlib/block';
|
|
7
|
+
import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
|
|
8
|
+
import type { ParityPublicInputs } from '@aztec/stdlib/parity';
|
|
9
|
+
import { type BaseOrMergeRollupPublicInputs, type BlockRootOrBlockMergePublicInputs, PrivateBaseRollupHints, PublicBaseRollupHints } from '@aztec/stdlib/rollup';
|
|
9
10
|
import { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
10
|
-
import { BlockHeader, GlobalVariables, PartialStateReference, type ProcessedTx,
|
|
11
|
+
import { BlockHeader, type GlobalVariables, PartialStateReference, type ProcessedTx, StateReference, TxEffect } from '@aztec/stdlib/tx';
|
|
11
12
|
import type { MerkleTreeReadOperations } from '@aztec/world-state';
|
|
12
13
|
/**
|
|
13
14
|
* Type representing the names of the trees for the base rollup.
|
|
@@ -17,40 +18,38 @@ type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'Public
|
|
|
17
18
|
* Type representing the names of the trees.
|
|
18
19
|
*/
|
|
19
20
|
export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
|
|
20
|
-
export declare const insertSideEffectsAndBuildBaseRollupHints: (tx: ProcessedTx,
|
|
21
|
-
export declare function
|
|
22
|
-
export declare
|
|
23
|
-
|
|
24
|
-
blobCommitments:
|
|
25
|
-
blobs:
|
|
21
|
+
export declare const insertSideEffectsAndBuildBaseRollupHints: (tx: ProcessedTx, globalVariables: GlobalVariables, newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot, db: MerkleTreeWriteOperations, startSpongeBlob: SpongeBlob) => Promise<PublicBaseRollupHints | PrivateBaseRollupHints>;
|
|
22
|
+
export declare function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint): Promise<PublicDataHint>;
|
|
23
|
+
export declare const buildBlobHints: (txEffects: TxEffect[]) => Promise<{
|
|
24
|
+
blobFields: Fr[];
|
|
25
|
+
blobCommitments: BLS12Point[];
|
|
26
|
+
blobs: Blob[];
|
|
26
27
|
blobsHash: Fr;
|
|
27
|
-
};
|
|
28
|
-
export declare const buildBlobDataFromTxs: (txsPerCheckpoint: ProcessedTx[][]) => Promise<{
|
|
29
|
-
blobFieldsLengths: number[];
|
|
30
|
-
finalBlobChallenges: import("@aztec/blob-lib").FinalBlobBatchingChallenges;
|
|
31
28
|
}>;
|
|
32
|
-
export declare const
|
|
33
|
-
export declare const
|
|
34
|
-
export declare const
|
|
35
|
-
|
|
36
|
-
header: L2BlockHeader;
|
|
29
|
+
export declare const accumulateBlobs: (txs: ProcessedTx[], startBlobAccumulator: BatchedBlobAccumulator) => Promise<BatchedBlobAccumulator>;
|
|
30
|
+
export declare const buildHeaderFromCircuitOutputs: (previousRollupData: BaseOrMergeRollupPublicInputs[], parityPublicInputs: ParityPublicInputs, rootRollupOutputs: BlockRootOrBlockMergePublicInputs, blobsHash: Fr, endState: StateReference) => BlockHeader;
|
|
31
|
+
export declare const buildHeaderAndBodyFromTxs: (txs: ProcessedTx[], globalVariables: GlobalVariables, l1ToL2Messages: Fr[], db: MerkleTreeReadOperations) => Promise<{
|
|
32
|
+
header: BlockHeader;
|
|
37
33
|
body: Body;
|
|
38
34
|
}>;
|
|
39
|
-
export declare const buildBlockHeaderFromTxs: (txs: ProcessedTx[], globalVariables: GlobalVariables, startSpongeBlob: SpongeBlob, db: MerkleTreeReadOperations) => Promise<BlockHeader>;
|
|
40
35
|
/** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
|
|
41
36
|
export declare function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr>;
|
|
37
|
+
export declare function getBlobsHashFromBlobs(inputs: Blob[]): Fr;
|
|
38
|
+
export declare function getEmptyBlockBlobsHash(): Promise<Fr>;
|
|
39
|
+
export declare function validateBlockRootOutput(blockRootOutput: BlockRootOrBlockMergePublicInputs, blockHeader: BlockHeader, db: MerkleTreeReadOperations): Promise<void>;
|
|
40
|
+
export declare const validateState: (state: StateReference, db: MerkleTreeReadOperations) => Promise<void>;
|
|
42
41
|
export declare function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations): Promise<Tuple<Fr, {
|
|
43
|
-
readonly 1:
|
|
44
|
-
readonly 4:
|
|
45
|
-
readonly 3:
|
|
46
|
-
readonly 0:
|
|
42
|
+
readonly 1: 40;
|
|
43
|
+
readonly 4: 29;
|
|
44
|
+
readonly 3: 39;
|
|
45
|
+
readonly 0: 40;
|
|
47
46
|
readonly 2: 40;
|
|
48
47
|
}[TID]>>;
|
|
49
48
|
export declare function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations): Promise<Tuple<Fr, {
|
|
50
|
-
readonly 1:
|
|
51
|
-
readonly 4:
|
|
52
|
-
readonly 3:
|
|
53
|
-
readonly 0:
|
|
49
|
+
readonly 1: 40;
|
|
50
|
+
readonly 4: 29;
|
|
51
|
+
readonly 3: 39;
|
|
52
|
+
readonly 0: 40;
|
|
54
53
|
readonly 2: 40;
|
|
55
54
|
}[TID]>>;
|
|
56
55
|
export declare function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot>;
|
|
@@ -59,6 +58,5 @@ export declare function getSubtreeSiblingPath(treeId: MerkleTreeId, subtreeHeigh
|
|
|
59
58
|
export declare function getMembershipWitnessFor<N extends number>(value: Fr, treeId: MerkleTreeId, height: N, db: MerkleTreeReadOperations): Promise<MembershipWitness<N>>;
|
|
60
59
|
export declare function validatePartialState(partialState: PartialStateReference, treeSnapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot>): void;
|
|
61
60
|
export declare function validateTx(tx: ProcessedTx): void;
|
|
62
|
-
export declare function toProofData<T extends Bufferable, PROOF_LENGTH extends number>({ inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<T, PROOF_LENGTH>, vkIndex?: number): ProofData<T, PROOF_LENGTH>;
|
|
63
61
|
export {};
|
|
64
62
|
//# sourceMappingURL=block-building-helpers.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"block-building-helpers.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-building-helpers.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"block-building-helpers.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-building-helpers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,IAAI,EAAE,KAAK,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAiBhF,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,EAAE,KAAK,KAAK,EAAgC,MAAM,6BAA6B,CAAC;AACvF,OAAO,EAAE,iBAAiB,EAAyD,MAAM,yBAAyB,CAAC;AAInH,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,IAAI,EAAE,MAAM,qBAAqB,CAAC;AAC3C,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAEjF,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC/D,OAAO,EACL,KAAK,6BAA6B,EAElC,KAAK,iCAAiC,EACtC,sBAAsB,EAEtB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EACL,sBAAsB,EACtB,YAAY,EAKb,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EACL,WAAW,EAEX,KAAK,eAAe,EACpB,qBAAqB,EACrB,KAAK,WAAW,EAChB,cAAc,EACd,QAAQ,EACT,MAAM,kBAAkB,CAAC;AAE1B,OAAO,KAAK,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAEnE;;GAEG;AACH,KAAK,aAAa,GAAG,cAAc,GAAG,cAAc,GAAG,eAAe,GAAG,gBAAgB,CAAC;AAC1F;;GAEG;AACH,MAAM,MAAM,SAAS,GAAG,aAAa,GAAG,mBAAmB,GAAG,SAAS,CAAC;AAGxE,eAAO,MAAM,wCAAwC,kOAgKpD,CAAC;AAEF,wBAAsB,iBAAiB,CAAC,EAAE,EAAE,yBAAyB,EAAE,QAAQ,EAAE,MAAM,2BAkBtF;AAED,eAAO,MAAM,cAAc;;;;;EAe1B,CAAC;AAEF,eAAO,MAAM,eAAe,uGAS3B,CAAC;AAEF,eAAO,MAAM,6BAA6B,6MAmCzC,CAAC;AAEF,eAAO,MAAM,yBAAyB;;;EAwCrC,CAAC;AAEF,uFAAuF;AACvF,wBAAsB,+BAA+B,CAAC,sBAAsB,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAO/F;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,CAExD;AAKD,wBAAsB,sBAAsB,IAAI,OAAO,CAAC,EAAE,CAAC,CAG1D;AAID,wBAAsB,uBAAuB,CAC3C,eAAe,EAAE,iCAAiC,EAClD,WAAW,EAAE,WAAW,EACxB,EAAE,EAAE,wBAAwB,iBAM7B;AAED,eAAO,MAAM,aAAa,wEAmBzB,CAAC;AAEF,wBAAsB,kBAAkB,CAAC,GAAG,SAAS,YAAY,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,EAAE,wBAAwB;;;;;;SAI3G;AAED,wBAAsB,sBAAsB,CAAC,GAAG,SAAS,YAAY,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,EAAE,wBAAwB;;;;;;SAI/G;AAED,wBAAsB,eAAe,CAAC,EAAE,EAAE,YAAY,EAAE,EAAE,EAAE,wBAAwB,GAAG,OAAO,CAAC,sBAAsB,CAAC,CAGrH;AAED,wBAAgB,0BAA0B,CAAC,CAAC,SAAS,MAAM,EAAE,MAAM,EAAE,CAAC,wBAMrE;AA4CD,wBAAsB,qBAAqB,CACzC,MAAM,EAAE,YAAY,EACpB,aAAa,EAAE,MAAM,EACrB,EAAE,EAAE,wBAAwB,GAC3B,OAAO,CAAC,EAAE,EAAE,CAAC,CAMf;AAGD,wBAAsB,uBAAuB,CAAC,CAAC,SAAS,MAAM,EAC5D,KAAK,EAAE,EAAE,EACT,MAAM,EAAE,YAAY,EACpB,MAAM,EAAE,CAAC,EACT,EAAE,EAAE,wBAAwB,GAC3B,OAAO,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAY/B;AAED,wBAAgB,oBAAoB,CAClC,YAAY,EAAE,qBAAqB,EACnC,aAAa,EAAE,GAAG,CAAC,YAAY,EAAE,sBAAsB,CAAC,QASzD;AAqBD,wBAAgB,UAAU,CAAC,EAAE,EAAE,WAAW,QAczC"}
|
|
@@ -1,190 +1,170 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { ARCHIVE_HEIGHT,
|
|
1
|
+
import { Blob } from '@aztec/blob-lib';
|
|
2
|
+
import { ARCHIVE_HEIGHT, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PUBLIC_DATA_TREE_HEIGHT } from '@aztec/constants';
|
|
3
3
|
import { makeTuple } from '@aztec/foundation/array';
|
|
4
4
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
5
|
-
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
6
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
5
|
+
import { sha256ToField, sha256Trunc } from '@aztec/foundation/crypto';
|
|
6
|
+
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
7
7
|
import { assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
8
|
-
import { MembershipWitness, MerkleTreeCalculator,
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
8
|
+
import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleTreeRoot } from '@aztec/foundation/trees';
|
|
9
|
+
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
10
|
+
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
11
11
|
import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
|
|
12
|
-
import {
|
|
13
|
-
import {
|
|
12
|
+
import { PublicDataHint } from '@aztec/stdlib/avm';
|
|
13
|
+
import { Body } from '@aztec/stdlib/block';
|
|
14
14
|
import { ContractClassLogFields } from '@aztec/stdlib/logs';
|
|
15
|
-
import {
|
|
16
|
-
import {
|
|
17
|
-
import {
|
|
18
|
-
import { BlockHeader, ContentCommitment, GlobalVariables, PartialStateReference, StateReference } from '@aztec/stdlib/tx';
|
|
19
|
-
import { VkData } from '@aztec/stdlib/vks';
|
|
15
|
+
import { BlockConstantData, PrivateBaseRollupHints, PrivateBaseStateDiffHints, PublicBaseRollupHints } from '@aztec/stdlib/rollup';
|
|
16
|
+
import { AppendOnlyTreeSnapshot, MerkleTreeId, NullifierLeafPreimage, PublicDataTreeLeaf, PublicDataTreeLeafPreimage, getTreeHeight } from '@aztec/stdlib/trees';
|
|
17
|
+
import { BlockHeader, ContentCommitment, PartialStateReference, StateReference } from '@aztec/stdlib/tx';
|
|
20
18
|
import { Attributes, runInSpan } from '@aztec/telemetry-client';
|
|
21
19
|
// Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
|
|
22
|
-
export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderHelpers', 'buildBaseRollupHints', async (span, tx,
|
|
20
|
+
export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderHelpers', 'buildBaseRollupHints', async (span, tx, globalVariables, newL1ToL2MessageTreeSnapshot, db, startSpongeBlob)=>{
|
|
23
21
|
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
24
22
|
// Get trees info before any changes hit
|
|
23
|
+
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
25
24
|
const start = new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db));
|
|
26
|
-
// Get the
|
|
27
|
-
const
|
|
25
|
+
// Get the subtree sibling paths for the circuit
|
|
26
|
+
const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db);
|
|
27
|
+
const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, (i)=>i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO);
|
|
28
28
|
// Update the note hash trees with the new items being inserted to get the new roots
|
|
29
29
|
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
30
30
|
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
31
31
|
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
32
|
+
// Create data hint for reading fee payer initial balance in Fee Juice
|
|
33
|
+
const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
34
|
+
const feePayerFeeJuiceBalanceReadHint = await getPublicDataHint(db, leafSlot.toBigInt());
|
|
32
35
|
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
33
36
|
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
34
37
|
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
35
38
|
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
36
|
-
const { lowLeavesWitnessData: nullifierWitnessLeaves, newSubtreeSiblingPath:
|
|
39
|
+
const { lowLeavesWitnessData: nullifierWitnessLeaves, newSubtreeSiblingPath: nullifiersSubtreeSiblingPath, sortedNewLeaves: sortednullifiers, sortedNewLeavesIndexes } = await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map((n)=>n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT);
|
|
37
40
|
if (nullifierWitnessLeaves === undefined) {
|
|
38
41
|
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
39
42
|
}
|
|
40
|
-
|
|
41
|
-
const
|
|
43
|
+
// Extract witness objects from returned data
|
|
44
|
+
const nullifierPredecessorMembershipWitnessesWithoutPadding = nullifierWitnessLeaves.map((l)=>MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)));
|
|
45
|
+
const nullifierSubtreeSiblingPathArray = nullifiersSubtreeSiblingPath.toFields();
|
|
46
|
+
const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, (i)=>i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO);
|
|
47
|
+
// Append new data to startSpongeBlob
|
|
48
|
+
const inputSpongeBlob = startSpongeBlob.clone();
|
|
49
|
+
await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
42
50
|
const contractClassLogsFields = makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, (i)=>tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty());
|
|
43
51
|
if (tx.avmProvingRequest) {
|
|
52
|
+
const blockHash = await tx.data.constants.historicalHeader.hash();
|
|
53
|
+
const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db);
|
|
44
54
|
return PublicBaseRollupHints.from({
|
|
45
|
-
startSpongeBlob,
|
|
55
|
+
startSpongeBlob: inputSpongeBlob,
|
|
46
56
|
lastArchive,
|
|
47
|
-
|
|
57
|
+
archiveRootMembershipWitness,
|
|
48
58
|
contractClassLogsFields
|
|
49
59
|
});
|
|
50
60
|
} else {
|
|
51
61
|
if (txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 || txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 || txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1) {
|
|
52
62
|
throw new Error(`More than one public data write in a private only tx`);
|
|
53
63
|
}
|
|
54
|
-
|
|
55
|
-
const
|
|
56
|
-
const
|
|
57
|
-
const
|
|
58
|
-
|
|
59
|
-
throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
|
|
60
|
-
}
|
|
61
|
-
// Extract witness objects from returned data
|
|
62
|
-
const nullifierPredecessorMembershipWitnessesWithoutPadding = nullifierWitnessLeaves.map((l)=>MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)));
|
|
63
|
-
const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
|
|
64
|
-
noteHashSubtreeRootSiblingPath,
|
|
65
|
-
nullifierPredecessorPreimages: padArrayEnd(nullifierWitnessLeaves.map((l)=>l.leafPreimage), NullifierLeafPreimage.empty(), MAX_NULLIFIERS_PER_TX),
|
|
64
|
+
const feeWriteLowLeafPreimage = txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
|
|
65
|
+
const feeWriteLowLeafMembershipWitness = txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] || MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT);
|
|
66
|
+
const feeWriteSiblingPath = txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] || makeTuple(PUBLIC_DATA_TREE_HEIGHT, ()=>Fr.ZERO);
|
|
67
|
+
const stateDiffHints = PrivateBaseStateDiffHints.from({
|
|
68
|
+
nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>i < nullifierWitnessLeaves.length ? nullifierWitnessLeaves[i].leafPreimage : NullifierLeafPreimage.empty()),
|
|
66
69
|
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>i < nullifierPredecessorMembershipWitnessesWithoutPadding.length ? nullifierPredecessorMembershipWitnessesWithoutPadding[i] : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT)),
|
|
67
|
-
sortedNullifiers:
|
|
68
|
-
sortedNullifierIndexes:
|
|
69
|
-
|
|
70
|
-
|
|
70
|
+
sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>Fr.fromBuffer(sortednullifiers[i])),
|
|
71
|
+
sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, (i)=>sortedNewLeavesIndexes[i]),
|
|
72
|
+
noteHashSubtreeSiblingPath,
|
|
73
|
+
nullifierSubtreeSiblingPath,
|
|
74
|
+
feeWriteLowLeafPreimage,
|
|
75
|
+
feeWriteLowLeafMembershipWitness,
|
|
76
|
+
feeWriteSiblingPath
|
|
71
77
|
});
|
|
78
|
+
const blockHash = await tx.data.constants.historicalHeader.hash();
|
|
79
|
+
const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db);
|
|
72
80
|
const constants = BlockConstantData.from({
|
|
73
81
|
lastArchive,
|
|
74
|
-
|
|
75
|
-
vkTreeRoot:
|
|
76
|
-
|
|
77
|
-
globalVariables
|
|
78
|
-
proverId
|
|
82
|
+
newL1ToL2: newL1ToL2MessageTreeSnapshot,
|
|
83
|
+
vkTreeRoot: getVKTreeRoot(),
|
|
84
|
+
protocolContractTreeRoot,
|
|
85
|
+
globalVariables
|
|
79
86
|
});
|
|
80
87
|
return PrivateBaseRollupHints.from({
|
|
81
88
|
start,
|
|
82
|
-
startSpongeBlob,
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
89
|
+
startSpongeBlob: inputSpongeBlob,
|
|
90
|
+
stateDiffHints,
|
|
91
|
+
feePayerFeeJuiceBalanceReadHint,
|
|
92
|
+
archiveRootMembershipWitness,
|
|
86
93
|
contractClassLogsFields,
|
|
87
94
|
constants
|
|
88
95
|
});
|
|
89
96
|
}
|
|
90
97
|
});
|
|
91
|
-
export function
|
|
92
|
-
const
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
const
|
|
98
|
-
|
|
98
|
+
export async function getPublicDataHint(db, leafSlot) {
|
|
99
|
+
const { index } = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot) ?? {};
|
|
100
|
+
if (index === undefined) {
|
|
101
|
+
throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
|
|
102
|
+
}
|
|
103
|
+
const siblingPath = await db.getSiblingPath(MerkleTreeId.PUBLIC_DATA_TREE, index);
|
|
104
|
+
const membershipWitness = new MembershipWitness(PUBLIC_DATA_TREE_HEIGHT, index, siblingPath.toTuple());
|
|
105
|
+
const leafPreimage = await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, index);
|
|
106
|
+
if (!leafPreimage) {
|
|
107
|
+
throw new Error(`Cannot find the leaf preimage for public data tree at index ${index}.`);
|
|
108
|
+
}
|
|
109
|
+
const exists = leafPreimage.leaf.slot.toBigInt() === leafSlot;
|
|
110
|
+
const value = exists ? leafPreimage.leaf.value : Fr.ZERO;
|
|
111
|
+
return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
|
|
99
112
|
}
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
113
|
+
export const buildBlobHints = runInSpan('BlockBuilderHelpers', 'buildBlobHints', async (_span, txEffects)=>{
|
|
114
|
+
const blobFields = txEffects.flatMap((tx)=>tx.toBlobFields());
|
|
115
|
+
const blobs = await Blob.getBlobsPerBlock(blobFields);
|
|
116
|
+
// TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
|
|
117
|
+
// - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
|
|
118
|
+
// - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
|
|
119
|
+
// exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
|
|
120
|
+
// We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
|
|
121
|
+
const blobCommitments = blobs.map((b)=>BLS12Point.decompress(b.commitment));
|
|
122
|
+
const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
|
|
107
123
|
return {
|
|
124
|
+
blobFields,
|
|
108
125
|
blobCommitments,
|
|
109
126
|
blobs,
|
|
110
127
|
blobsHash
|
|
111
128
|
};
|
|
112
|
-
};
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
const
|
|
116
|
-
|
|
117
|
-
]));
|
|
118
|
-
const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
|
|
119
|
-
return {
|
|
120
|
-
blobFieldsLengths: blobFields.map((fields)=>fields.length),
|
|
121
|
-
finalBlobChallenges
|
|
122
|
-
};
|
|
123
|
-
};
|
|
124
|
-
export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint)=>{
|
|
125
|
-
const blobs = blobFieldsPerCheckpoint.map((blobFields)=>getBlobsPerL1Block(blobFields));
|
|
126
|
-
return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
|
|
127
|
-
};
|
|
128
|
-
export const accumulateBlobs = runInSpan('BlockBuilderHelpers', 'accumulateBlobs', async (_span, blobFields, startBlobAccumulator)=>{
|
|
129
|
-
const blobs = getBlobsPerL1Block(blobFields);
|
|
130
|
-
const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
|
|
129
|
+
});
|
|
130
|
+
export const accumulateBlobs = runInSpan('BlockBuilderHelpers', 'accumulateBlobs', async (_span, txs, startBlobAccumulator)=>{
|
|
131
|
+
const blobFields = txs.flatMap((tx)=>tx.txEffect.toBlobFields());
|
|
132
|
+
const blobs = await Blob.getBlobsPerBlock(blobFields);
|
|
133
|
+
const endBlobAccumulator = startBlobAccumulator.accumulateBlobs(blobs);
|
|
131
134
|
return endBlobAccumulator;
|
|
132
135
|
});
|
|
133
|
-
export const buildHeaderFromCircuitOutputs = runInSpan('BlockBuilderHelpers', 'buildHeaderFromCircuitOutputs',
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
|
|
146
|
-
return new BlockHeader(blockRootRollupOutput.previousArchive, blockRootRollupOutput.endState, spongeBlobHash, globalVariables, blockRootRollupOutput.accumulatedFees, blockRootRollupOutput.accumulatedManaUsed);
|
|
136
|
+
export const buildHeaderFromCircuitOutputs = runInSpan('BlockBuilderHelpers', 'buildHeaderFromCircuitOutputs', (_span, previousRollupData, parityPublicInputs, rootRollupOutputs, blobsHash, endState)=>{
|
|
137
|
+
if (previousRollupData.length > 2) {
|
|
138
|
+
throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
|
|
139
|
+
}
|
|
140
|
+
const outHash = previousRollupData.length === 0 ? Fr.ZERO : previousRollupData.length === 1 ? previousRollupData[0].outHash : sha256ToField([
|
|
141
|
+
previousRollupData[0].outHash,
|
|
142
|
+
previousRollupData[1].outHash
|
|
143
|
+
]);
|
|
144
|
+
const contentCommitment = new ContentCommitment(blobsHash, parityPublicInputs.shaRoot, outHash);
|
|
145
|
+
const accumulatedFees = previousRollupData.reduce((sum, d)=>sum.add(d.accumulatedFees), Fr.ZERO);
|
|
146
|
+
const accumulatedManaUsed = previousRollupData.reduce((sum, d)=>sum.add(d.accumulatedManaUsed), Fr.ZERO);
|
|
147
|
+
return new BlockHeader(rootRollupOutputs.previousArchive, contentCommitment, endState, rootRollupOutputs.endGlobalVariables, accumulatedFees, accumulatedManaUsed);
|
|
147
148
|
});
|
|
148
|
-
export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs, globalVariables, l1ToL2Messages, db
|
|
149
|
+
export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs, globalVariables, l1ToL2Messages, db)=>{
|
|
149
150
|
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
150
151
|
const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
|
|
151
152
|
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
152
153
|
const txEffects = txs.map((tx)=>tx.txEffect);
|
|
153
154
|
const body = new Body(txEffects);
|
|
154
155
|
const txOutHashes = txEffects.map((tx)=>tx.txOutHash());
|
|
155
|
-
const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(
|
|
156
|
+
const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeUnbalancedMerkleTreeRoot(txOutHashes));
|
|
156
157
|
const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
157
|
-
const
|
|
158
|
-
// TODO(#17027): This only works when there's one block per checkpoint.
|
|
159
|
-
const blobFields = [
|
|
160
|
-
new Fr(blockBlobFields.length + 1)
|
|
161
|
-
].concat(blockBlobFields);
|
|
162
|
-
const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
|
|
158
|
+
const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobsPerBlock(body.toBlobFields()));
|
|
163
159
|
const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
|
|
164
160
|
const fees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
|
|
165
161
|
const manaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
166
|
-
const
|
|
167
|
-
await endSpongeBlob.absorb(blockBlobFields);
|
|
168
|
-
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
169
|
-
const header = new L2BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed, spongeBlobHash);
|
|
162
|
+
const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
|
|
170
163
|
return {
|
|
171
164
|
header,
|
|
172
165
|
body
|
|
173
166
|
};
|
|
174
167
|
});
|
|
175
|
-
export const buildBlockHeaderFromTxs = runInSpan('BlockBuilderHelpers', 'buildBlockHeaderFromTxs', async (span, txs, globalVariables, startSpongeBlob, db)=>{
|
|
176
|
-
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
177
|
-
const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
|
|
178
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
179
|
-
const blobFields = getBlockBlobFields(txs.map((tx)=>tx.txEffect));
|
|
180
|
-
const endSpongeBlob = startSpongeBlob.clone();
|
|
181
|
-
await endSpongeBlob.absorb(blobFields);
|
|
182
|
-
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
183
|
-
const txEffects = txs.map((tx)=>tx.txEffect);
|
|
184
|
-
const fees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
|
|
185
|
-
const manaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
186
|
-
return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
|
|
187
|
-
});
|
|
188
168
|
/** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages) {
|
|
189
169
|
const l1ToL2Messages = padArrayEnd(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
190
170
|
const hasher = (left, right)=>Promise.resolve(sha256Trunc(Buffer.concat([
|
|
@@ -195,6 +175,42 @@ export const buildBlockHeaderFromTxs = runInSpan('BlockBuilderHelpers', 'buildBl
|
|
|
195
175
|
const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
|
|
196
176
|
return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map((msg)=>msg.toBuffer())));
|
|
197
177
|
}
|
|
178
|
+
export function getBlobsHashFromBlobs(inputs) {
|
|
179
|
+
return sha256ToField(inputs.map((b)=>b.getEthVersionedBlobHash()));
|
|
180
|
+
}
|
|
181
|
+
// Note: tested against the constant values in block_root/empty_block_root_rollup_inputs.nr, set by block_building_helpers.test.ts.
|
|
182
|
+
// Having this separate fn hopefully makes it clear how we treat empty blocks and their blobs, and won't break if we decide to change how
|
|
183
|
+
// getBlobsPerBlock() works on empty input.
|
|
184
|
+
export async function getEmptyBlockBlobsHash() {
|
|
185
|
+
const blobHash = (await Blob.getBlobsPerBlock([])).map((b)=>b.getEthVersionedBlobHash());
|
|
186
|
+
return sha256ToField(blobHash);
|
|
187
|
+
}
|
|
188
|
+
// Validate that the roots of all local trees match the output of the root circuit simulation
|
|
189
|
+
// TODO: does this get called?
|
|
190
|
+
export async function validateBlockRootOutput(blockRootOutput, blockHeader, db) {
|
|
191
|
+
await Promise.all([
|
|
192
|
+
validateState(blockHeader.state, db),
|
|
193
|
+
validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive')
|
|
194
|
+
]);
|
|
195
|
+
}
|
|
196
|
+
export const validateState = runInSpan('BlockBuilderHelpers', 'validateState', async (_span, state, db)=>{
|
|
197
|
+
const promises = [
|
|
198
|
+
MerkleTreeId.NOTE_HASH_TREE,
|
|
199
|
+
MerkleTreeId.NULLIFIER_TREE,
|
|
200
|
+
MerkleTreeId.PUBLIC_DATA_TREE
|
|
201
|
+
].map(async (id)=>{
|
|
202
|
+
return {
|
|
203
|
+
key: id,
|
|
204
|
+
value: await getTreeSnapshot(id, db)
|
|
205
|
+
};
|
|
206
|
+
});
|
|
207
|
+
const snapshots = new Map((await Promise.all(promises)).map((obj)=>[
|
|
208
|
+
obj.key,
|
|
209
|
+
obj.value
|
|
210
|
+
]));
|
|
211
|
+
validatePartialState(state.partial, snapshots);
|
|
212
|
+
validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), state.l1ToL2MessageTree, 'L1ToL2MessageTree');
|
|
213
|
+
});
|
|
198
214
|
export async function getLastSiblingPath(treeId, db) {
|
|
199
215
|
const { size } = await db.getTreeInfo(treeId);
|
|
200
216
|
const path = await db.getSiblingPath(treeId, size - 1n);
|
|
@@ -270,7 +286,7 @@ function validateSimulatedTree(localTree, simulatedTree, name, label) {
|
|
|
270
286
|
}
|
|
271
287
|
}
|
|
272
288
|
export function validateTx(tx) {
|
|
273
|
-
const txHeader = tx.data.constants.
|
|
289
|
+
const txHeader = tx.data.constants.historicalHeader;
|
|
274
290
|
if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
|
|
275
291
|
throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
|
|
276
292
|
}
|
|
@@ -284,8 +300,3 @@ export function validateTx(tx) {
|
|
|
284
300
|
throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
|
|
285
301
|
}
|
|
286
302
|
}
|
|
287
|
-
export function toProofData({ inputs, proof, verificationKey }, vkIndex) {
|
|
288
|
-
const leafIndex = vkIndex || getVKIndex(verificationKey.keyAsFields);
|
|
289
|
-
const vkData = new VkData(verificationKey, leafIndex, getVKSiblingPath(leafIndex));
|
|
290
|
-
return new ProofData(inputs, proof, vkData);
|
|
291
|
-
}
|