@aztec/blob-lib 3.0.0-devnet.2 → 3.0.0-devnet.2-patch.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/batched_blob.d.ts +26 -0
- package/dest/batched_blob.d.ts.map +1 -0
- package/dest/batched_blob.js +20 -0
- package/dest/blob.d.ts +6 -11
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +5 -2
- package/dest/blob_batching.d.ts +35 -84
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +73 -108
- package/dest/blob_utils.d.ts +21 -11
- package/dest/blob_utils.d.ts.map +1 -1
- package/dest/blob_utils.js +29 -20
- package/dest/circuit_types/blob_accumulator.d.ts +4 -2
- package/dest/circuit_types/blob_accumulator.d.ts.map +1 -1
- package/dest/circuit_types/blob_accumulator.js +5 -1
- package/dest/circuit_types/final_blob_accumulator.d.ts +3 -2
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -1
- package/dest/circuit_types/final_blob_accumulator.js +2 -1
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +3 -2
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -1
- package/dest/circuit_types/final_blob_batching_challenges.js +2 -1
- package/dest/circuit_types/index.d.ts +1 -1
- package/dest/encoding/block_blob_data.d.ts +22 -0
- package/dest/encoding/block_blob_data.d.ts.map +1 -0
- package/dest/encoding/block_blob_data.js +65 -0
- package/dest/encoding/block_end_marker.d.ts +11 -0
- package/dest/encoding/block_end_marker.d.ts.map +1 -0
- package/dest/encoding/block_end_marker.js +41 -0
- package/dest/encoding/block_end_state_field.d.ts +12 -0
- package/dest/encoding/block_end_state_field.d.ts.map +1 -0
- package/dest/encoding/block_end_state_field.js +39 -0
- package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
- package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
- package/dest/encoding/checkpoint_blob_data.js +67 -0
- package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
- package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
- package/dest/encoding/checkpoint_end_marker.js +28 -0
- package/dest/encoding/fixtures.d.ts +41 -0
- package/dest/encoding/fixtures.d.ts.map +1 -0
- package/dest/encoding/fixtures.js +140 -0
- package/dest/encoding/index.d.ts +10 -0
- package/dest/encoding/index.d.ts.map +1 -0
- package/dest/encoding/index.js +9 -0
- package/dest/encoding/tx_blob_data.d.ts +19 -0
- package/dest/encoding/tx_blob_data.d.ts.map +1 -0
- package/dest/encoding/tx_blob_data.js +79 -0
- package/dest/encoding/tx_start_marker.d.ts +16 -0
- package/dest/encoding/tx_start_marker.d.ts.map +1 -0
- package/dest/{encoding.js → encoding/tx_start_marker.js} +13 -59
- package/dest/errors.d.ts +1 -1
- package/dest/errors.d.ts.map +1 -1
- package/dest/hash.d.ts +13 -5
- package/dest/hash.d.ts.map +1 -1
- package/dest/hash.js +19 -8
- package/dest/index.d.ts +4 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +3 -3
- package/dest/interface.d.ts +1 -1
- package/dest/kzg_context.d.ts +6 -2
- package/dest/kzg_context.d.ts.map +1 -1
- package/dest/kzg_context.js +12 -3
- package/dest/sponge_blob.d.ts +9 -15
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +21 -36
- package/dest/testing.d.ts +8 -16
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +35 -64
- package/dest/types.d.ts +2 -1
- package/dest/types.d.ts.map +1 -1
- package/dest/types.js +1 -0
- package/package.json +8 -7
- package/src/batched_blob.ts +26 -0
- package/src/blob.ts +5 -2
- package/src/blob_batching.ts +91 -126
- package/src/blob_utils.ts +33 -22
- package/src/circuit_types/blob_accumulator.ts +13 -1
- package/src/circuit_types/final_blob_accumulator.ts +2 -1
- package/src/circuit_types/final_blob_batching_challenges.ts +2 -1
- package/src/encoding/block_blob_data.ts +102 -0
- package/src/encoding/block_end_marker.ts +55 -0
- package/src/encoding/block_end_state_field.ts +59 -0
- package/src/encoding/checkpoint_blob_data.ts +95 -0
- package/src/encoding/checkpoint_end_marker.ts +40 -0
- package/src/encoding/fixtures.ts +210 -0
- package/src/encoding/index.ts +9 -0
- package/src/encoding/tx_blob_data.ts +116 -0
- package/src/{encoding.ts → encoding/tx_start_marker.ts} +19 -76
- package/src/hash.ts +20 -8
- package/src/index.ts +3 -3
- package/src/kzg_context.ts +12 -1
- package/src/sponge_blob.ts +23 -36
- package/src/testing.ts +47 -73
- package/src/types.ts +1 -0
- package/dest/deserialize.d.ts +0 -14
- package/dest/deserialize.d.ts.map +0 -1
- package/dest/deserialize.js +0 -33
- package/dest/encoding.d.ts +0 -26
- package/dest/encoding.d.ts.map +0 -1
- package/src/deserialize.ts +0 -38
package/src/blob_batching.ts
CHANGED
|
@@ -1,30 +1,71 @@
|
|
|
1
|
-
import { AZTEC_MAX_EPOCH_DURATION,
|
|
2
|
-
import { poseidon2Hash
|
|
3
|
-
import {
|
|
1
|
+
import { AZTEC_MAX_EPOCH_DURATION, BLOBS_PER_CHECKPOINT } from '@aztec/constants';
|
|
2
|
+
import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
|
|
3
|
+
import { sha256ToField } from '@aztec/foundation/crypto/sha256';
|
|
4
|
+
import { BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
5
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
6
|
|
|
7
|
+
import { BatchedBlob } from './batched_blob.js';
|
|
5
8
|
import { Blob } from './blob.js';
|
|
6
|
-
import {
|
|
9
|
+
import { getBlobsPerL1Block } from './blob_utils.js';
|
|
7
10
|
import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
11
|
+
import { computeBlobFieldsHash, hashNoirBigNumLimbs } from './hash.js';
|
|
12
|
+
import { getKzg } from './kzg_context.js';
|
|
10
13
|
|
|
11
14
|
/**
|
|
12
15
|
* A class to create, manage, and prove batched EVM blobs.
|
|
16
|
+
* See noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr
|
|
13
17
|
*/
|
|
14
|
-
export class
|
|
18
|
+
export class BatchedBlobAccumulator {
|
|
15
19
|
constructor(
|
|
16
20
|
/** Hash of Cs (to link to L1 blob hashes). */
|
|
17
|
-
public readonly
|
|
18
|
-
/** Challenge point
|
|
19
|
-
public readonly
|
|
20
|
-
/** Evaluation
|
|
21
|
-
public readonly
|
|
22
|
-
/** Commitment
|
|
23
|
-
public readonly
|
|
24
|
-
/** KZG opening
|
|
25
|
-
public readonly
|
|
21
|
+
public readonly blobCommitmentsHashAcc: Fr,
|
|
22
|
+
/** Challenge point z_acc. Final value used such that p_i(z) = y_i. */
|
|
23
|
+
public readonly zAcc: Fr,
|
|
24
|
+
/** Evaluation y_acc. Final value is is linear combination of all evaluations y_i = p_i(z) with gamma. */
|
|
25
|
+
public readonly yAcc: BLS12Fr,
|
|
26
|
+
/** Commitment c_acc. Final value is linear combination of all commitments C_i = [p_i] with gamma. */
|
|
27
|
+
public readonly cAcc: BLS12Point,
|
|
28
|
+
/** KZG opening q_acc. Final value is linear combination of all blob kzg 'proofs' Q_i with gamma. */
|
|
29
|
+
public readonly qAcc: BLS12Point,
|
|
30
|
+
/**
|
|
31
|
+
* Challenge point gamma_acc for multi opening. Used with y, C, and kzg 'proof' Q above.
|
|
32
|
+
* TODO(#13608): We calculate this by hashing natively in the circuit (hence Fr representation), but it's actually used
|
|
33
|
+
* as a BLS12Fr field elt. Is this safe? Is there a skew?
|
|
34
|
+
*/
|
|
35
|
+
public readonly gammaAcc: Fr,
|
|
36
|
+
/** Simply gamma^(i + 1) at blob i. Used for calculating the i'th element of the above linear comb.s */
|
|
37
|
+
public readonly gammaPow: BLS12Fr,
|
|
38
|
+
/** Final challenge values used in evaluation. Optimistically input and checked in the final acc. */
|
|
39
|
+
public readonly finalBlobChallenges: FinalBlobBatchingChallenges,
|
|
26
40
|
) {}
|
|
27
41
|
|
|
42
|
+
/**
|
|
43
|
+
* Create the empty accumulation state of the epoch.
|
|
44
|
+
* @returns An empty blob accumulator with challenges.
|
|
45
|
+
*/
|
|
46
|
+
static newWithChallenges(finalBlobChallenges: FinalBlobBatchingChallenges): BatchedBlobAccumulator {
|
|
47
|
+
return new BatchedBlobAccumulator(
|
|
48
|
+
Fr.ZERO,
|
|
49
|
+
Fr.ZERO,
|
|
50
|
+
BLS12Fr.ZERO,
|
|
51
|
+
BLS12Point.ZERO,
|
|
52
|
+
BLS12Point.ZERO,
|
|
53
|
+
Fr.ZERO,
|
|
54
|
+
BLS12Fr.ZERO,
|
|
55
|
+
finalBlobChallenges,
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Returns an empty BatchedBlobAccumulator with precomputed challenges from all blobs in the epoch.
|
|
61
|
+
* @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
|
|
62
|
+
* beforehand from ALL blobs.
|
|
63
|
+
*/
|
|
64
|
+
static async fromBlobFields(blobFieldsPerCheckpoint: Fr[][]): Promise<BatchedBlobAccumulator> {
|
|
65
|
+
const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint);
|
|
66
|
+
return BatchedBlobAccumulator.newWithChallenges(finalBlobChallenges);
|
|
67
|
+
}
|
|
68
|
+
|
|
28
69
|
/**
|
|
29
70
|
* Get the final batched opening proof from multiple blobs.
|
|
30
71
|
* @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
|
|
@@ -32,30 +73,21 @@ export class BatchedBlob {
|
|
|
32
73
|
*
|
|
33
74
|
* @returns A batched blob.
|
|
34
75
|
*/
|
|
35
|
-
static async batch(
|
|
36
|
-
|
|
76
|
+
static async batch(blobFieldsPerCheckpoint: Fr[][], verifyProof = false): Promise<BatchedBlob> {
|
|
77
|
+
const numCheckpoints = blobFieldsPerCheckpoint.length;
|
|
78
|
+
if (numCheckpoints > AZTEC_MAX_EPOCH_DURATION) {
|
|
37
79
|
throw new Error(
|
|
38
|
-
`Too many
|
|
80
|
+
`Too many checkpoints sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${numCheckpoints}.`,
|
|
39
81
|
);
|
|
40
82
|
}
|
|
41
83
|
|
|
42
84
|
// Precalculate the values (z and gamma) and initialize the accumulator:
|
|
43
|
-
let acc = await this.
|
|
85
|
+
let acc = await this.fromBlobFields(blobFieldsPerCheckpoint);
|
|
44
86
|
// Now we can create a multi opening proof of all input blobs:
|
|
45
|
-
for (const
|
|
46
|
-
acc = await acc.
|
|
87
|
+
for (const blobFields of blobFieldsPerCheckpoint) {
|
|
88
|
+
acc = await acc.accumulateFields(blobFields);
|
|
47
89
|
}
|
|
48
|
-
return await acc.finalize();
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
/**
|
|
52
|
-
* Returns an empty BatchedBlobAccumulator with precomputed challenges from all blobs in the epoch.
|
|
53
|
-
* @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
|
|
54
|
-
* beforehand from ALL blobs.
|
|
55
|
-
*/
|
|
56
|
-
static async newAccumulator(blobs: Blob[][]): Promise<BatchedBlobAccumulator> {
|
|
57
|
-
const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(blobs);
|
|
58
|
-
return BatchedBlobAccumulator.newWithChallenges(finalBlobChallenges);
|
|
90
|
+
return await acc.finalize(verifyProof);
|
|
59
91
|
}
|
|
60
92
|
|
|
61
93
|
/**
|
|
@@ -70,13 +102,15 @@ export class BatchedBlob {
|
|
|
70
102
|
* @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
|
|
71
103
|
* @returns Challenges z and gamma.
|
|
72
104
|
*/
|
|
73
|
-
static async precomputeBatchedBlobChallenges(
|
|
105
|
+
static async precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint: Fr[][]): Promise<FinalBlobBatchingChallenges> {
|
|
74
106
|
// Compute the final challenge z to evaluate the blobs.
|
|
75
107
|
let z: Fr | undefined;
|
|
76
|
-
|
|
108
|
+
const allBlobs = [];
|
|
109
|
+
for (const blobFields of blobFieldsPerCheckpoint) {
|
|
77
110
|
// Compute the hash of all the fields in the block.
|
|
78
|
-
const blobFieldsHash = await
|
|
79
|
-
|
|
111
|
+
const blobFieldsHash = await computeBlobFieldsHash(blobFields);
|
|
112
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
113
|
+
for (const blob of blobs) {
|
|
80
114
|
// Compute the challenge z for each blob and accumulate it.
|
|
81
115
|
const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
|
|
82
116
|
if (!z) {
|
|
@@ -85,13 +119,13 @@ export class BatchedBlob {
|
|
|
85
119
|
z = await poseidon2Hash([z, challengeZ]);
|
|
86
120
|
}
|
|
87
121
|
}
|
|
122
|
+
allBlobs.push(...blobs);
|
|
88
123
|
}
|
|
89
124
|
if (!z) {
|
|
90
125
|
throw new Error('No blobs to precompute challenges for.');
|
|
91
126
|
}
|
|
92
127
|
|
|
93
128
|
// Now we have a shared challenge for all blobs, evaluate them...
|
|
94
|
-
const allBlobs = blobs.flat();
|
|
95
129
|
const proofObjects = allBlobs.map(b => b.evaluate(z));
|
|
96
130
|
const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y)));
|
|
97
131
|
// ...and find the challenge for the linear combination of blobs.
|
|
@@ -105,92 +139,12 @@ export class BatchedBlob {
|
|
|
105
139
|
return new FinalBlobBatchingChallenges(z, BLS12Fr.fromBN254Fr(gamma));
|
|
106
140
|
}
|
|
107
141
|
|
|
108
|
-
verify() {
|
|
109
|
-
return kzg.verifyKzgProof(this.commitment.compress(), this.z.toBuffer(), this.y.toBuffer(), this.q.compress());
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
113
|
-
getEthVersionedBlobHash(): Buffer {
|
|
114
|
-
return computeEthVersionedBlobHash(this.commitment.compress());
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
/**
|
|
118
|
-
* Returns a proof of opening of the blobs to verify on L1 using the point evaluation precompile:
|
|
119
|
-
*
|
|
120
|
-
* input[:32] - versioned_hash
|
|
121
|
-
* input[32:64] - z
|
|
122
|
-
* input[64:96] - y
|
|
123
|
-
* input[96:144] - commitment C
|
|
124
|
-
* input[144:192] - commitment Q (a 'proof' committing to the quotient polynomial q(X))
|
|
125
|
-
*
|
|
126
|
-
* See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
|
127
|
-
*/
|
|
128
|
-
getEthBlobEvaluationInputs(): `0x${string}` {
|
|
129
|
-
const buf = Buffer.concat([
|
|
130
|
-
this.getEthVersionedBlobHash(),
|
|
131
|
-
this.z.toBuffer(),
|
|
132
|
-
this.y.toBuffer(),
|
|
133
|
-
this.commitment.compress(),
|
|
134
|
-
this.q.compress(),
|
|
135
|
-
]);
|
|
136
|
-
return `0x${buf.toString('hex')}`;
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
toFinalBlobAccumulator() {
|
|
140
|
-
return new FinalBlobAccumulator(this.blobCommitmentsHash, this.z, this.y, this.commitment);
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
/**
|
|
145
|
-
* See noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr
|
|
146
|
-
*/
|
|
147
|
-
export class BatchedBlobAccumulator {
|
|
148
|
-
constructor(
|
|
149
|
-
/** Hash of Cs (to link to L1 blob hashes). */
|
|
150
|
-
public readonly blobCommitmentsHashAcc: Fr,
|
|
151
|
-
/** Challenge point z_acc. Final value used such that p_i(z) = y_i. */
|
|
152
|
-
public readonly zAcc: Fr,
|
|
153
|
-
/** Evaluation y_acc. Final value is is linear combination of all evaluations y_i = p_i(z) with gamma. */
|
|
154
|
-
public readonly yAcc: BLS12Fr,
|
|
155
|
-
/** Commitment c_acc. Final value is linear combination of all commitments C_i = [p_i] with gamma. */
|
|
156
|
-
public readonly cAcc: BLS12Point,
|
|
157
|
-
/** KZG opening q_acc. Final value is linear combination of all blob kzg 'proofs' Q_i with gamma. */
|
|
158
|
-
public readonly qAcc: BLS12Point,
|
|
159
|
-
/**
|
|
160
|
-
* Challenge point gamma_acc for multi opening. Used with y, C, and kzg 'proof' Q above.
|
|
161
|
-
* TODO(#13608): We calculate this by hashing natively in the circuit (hence Fr representation), but it's actually used
|
|
162
|
-
* as a BLS12Fr field elt. Is this safe? Is there a skew?
|
|
163
|
-
*/
|
|
164
|
-
public readonly gammaAcc: Fr,
|
|
165
|
-
/** Simply gamma^(i + 1) at blob i. Used for calculating the i'th element of the above linear comb.s */
|
|
166
|
-
public readonly gammaPow: BLS12Fr,
|
|
167
|
-
/** Final challenge values used in evaluation. Optimistically input and checked in the final acc. */
|
|
168
|
-
public readonly finalBlobChallenges: FinalBlobBatchingChallenges,
|
|
169
|
-
) {}
|
|
170
|
-
|
|
171
|
-
/**
|
|
172
|
-
* Create the empty accumulation state of the epoch.
|
|
173
|
-
* @returns An empty blob accumulator with challenges.
|
|
174
|
-
*/
|
|
175
|
-
static newWithChallenges(finalBlobChallenges: FinalBlobBatchingChallenges): BatchedBlobAccumulator {
|
|
176
|
-
return new BatchedBlobAccumulator(
|
|
177
|
-
Fr.ZERO,
|
|
178
|
-
Fr.ZERO,
|
|
179
|
-
BLS12Fr.ZERO,
|
|
180
|
-
BLS12Point.ZERO,
|
|
181
|
-
BLS12Point.ZERO,
|
|
182
|
-
Fr.ZERO,
|
|
183
|
-
BLS12Fr.ZERO,
|
|
184
|
-
finalBlobChallenges,
|
|
185
|
-
);
|
|
186
|
-
}
|
|
187
|
-
|
|
188
142
|
/**
|
|
189
143
|
* Given blob i, accumulate all state.
|
|
190
144
|
* We assume the input blob has not been evaluated at z.
|
|
191
145
|
* @returns An updated blob accumulator.
|
|
192
146
|
*/
|
|
193
|
-
|
|
147
|
+
async accumulateBlob(blob: Blob, blobFieldsHash: Fr) {
|
|
194
148
|
const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
|
|
195
149
|
const thisC = BLS12Point.decompress(blob.commitment);
|
|
196
150
|
const thisQ = BLS12Point.decompress(proof);
|
|
@@ -234,23 +188,25 @@ export class BatchedBlobAccumulator {
|
|
|
234
188
|
/**
|
|
235
189
|
* Given blobs, accumulate all state.
|
|
236
190
|
* We assume the input blobs have not been evaluated at z.
|
|
237
|
-
* @param
|
|
191
|
+
* @param blobFields - The blob fields of a checkpoint to accumulate.
|
|
238
192
|
* @returns An updated blob accumulator.
|
|
239
193
|
*/
|
|
240
|
-
async
|
|
241
|
-
|
|
194
|
+
async accumulateFields(blobFields: Fr[]) {
|
|
195
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
196
|
+
|
|
197
|
+
if (blobs.length > BLOBS_PER_CHECKPOINT) {
|
|
242
198
|
throw new Error(
|
|
243
|
-
`Too many blobs to accumulate. The maximum is ${
|
|
199
|
+
`Too many blobs to accumulate. The maximum is ${BLOBS_PER_CHECKPOINT} per checkpoint. Got ${blobs.length}.`,
|
|
244
200
|
);
|
|
245
201
|
}
|
|
246
202
|
|
|
247
203
|
// Compute the hash of all the fields in the block.
|
|
248
|
-
const blobFieldsHash = await
|
|
204
|
+
const blobFieldsHash = await computeBlobFieldsHash(blobFields);
|
|
249
205
|
|
|
250
206
|
// Initialize the acc to iterate over:
|
|
251
207
|
let acc: BatchedBlobAccumulator = this.clone();
|
|
252
208
|
for (const blob of blobs) {
|
|
253
|
-
acc = await acc.
|
|
209
|
+
acc = await acc.accumulateBlob(blob, blobFieldsHash);
|
|
254
210
|
}
|
|
255
211
|
return acc;
|
|
256
212
|
}
|
|
@@ -286,13 +242,22 @@ export class BatchedBlobAccumulator {
|
|
|
286
242
|
|
|
287
243
|
const batchedBlob = new BatchedBlob(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.qAcc);
|
|
288
244
|
|
|
289
|
-
if (verifyProof && !
|
|
245
|
+
if (verifyProof && !this.verify()) {
|
|
290
246
|
throw new Error(`KZG proof did not verify.`);
|
|
291
247
|
}
|
|
292
248
|
|
|
293
249
|
return batchedBlob;
|
|
294
250
|
}
|
|
295
251
|
|
|
252
|
+
verify() {
|
|
253
|
+
return getKzg().verifyKzgProof(
|
|
254
|
+
this.cAcc.compress(),
|
|
255
|
+
this.zAcc.toBuffer(),
|
|
256
|
+
this.yAcc.toBuffer(),
|
|
257
|
+
this.qAcc.compress(),
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
|
|
296
261
|
isEmptyState() {
|
|
297
262
|
return (
|
|
298
263
|
this.blobCommitmentsHashAcc.isZero() &&
|
package/src/blob_utils.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { FIELDS_PER_BLOB } from '@aztec/constants';
|
|
2
|
-
import { BLS12Point
|
|
2
|
+
import { BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
4
|
|
|
5
|
+
import type { BatchedBlob } from './batched_blob.js';
|
|
4
6
|
import { Blob } from './blob.js';
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
+
import { type CheckpointBlobData, decodeCheckpointBlobDataFromBuffer } from './encoding/index.js';
|
|
8
|
+
import { computeBlobsHash, computeEthVersionedBlobHash } from './hash.js';
|
|
7
9
|
|
|
8
10
|
/**
|
|
9
11
|
* @param blobs - The blobs to emit.
|
|
@@ -40,26 +42,13 @@ export function getBlobsPerL1Block(fields: Fr[]): Blob[] {
|
|
|
40
42
|
}
|
|
41
43
|
|
|
42
44
|
/**
|
|
43
|
-
* Get the
|
|
44
|
-
*
|
|
45
|
-
*
|
|
46
|
-
* @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
|
|
47
|
-
* @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
|
|
48
|
-
* the checkpoint prefix and throw if there's not enough fields.
|
|
49
|
-
* @returns The fields added throughout the checkpoint.
|
|
45
|
+
* Get the encoded data from all blobs in the checkpoint.
|
|
46
|
+
* @param blobs - The blobs to read data from. Should be all the blobs for the L1 block proposing the checkpoint.
|
|
47
|
+
* @returns The encoded data of the checkpoint.
|
|
50
48
|
*/
|
|
51
|
-
export function
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
export async function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr> {
|
|
56
|
-
const fields = blobs.map(b => b.toFields()).flat();
|
|
57
|
-
const numBlobFields = fields[0].toNumber();
|
|
58
|
-
if (numBlobFields > fields.length) {
|
|
59
|
-
throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
|
|
49
|
+
export function decodeCheckpointBlobDataFromBlobs(blobs: Blob[]): CheckpointBlobData {
|
|
50
|
+
const buf = Buffer.concat(blobs.map(b => b.data));
|
|
51
|
+
return decodeCheckpointBlobDataFromBuffer(buf);
|
|
63
52
|
}
|
|
64
53
|
|
|
65
54
|
export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
|
|
@@ -69,3 +58,25 @@ export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
|
|
|
69
58
|
export function getBlobCommitmentsFromBlobs(blobs: Blob[]): BLS12Point[] {
|
|
70
59
|
return blobs.map(b => BLS12Point.decompress(b.commitment));
|
|
71
60
|
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Returns a proof of opening of the blobs to verify on L1 using the point evaluation precompile:
|
|
64
|
+
*
|
|
65
|
+
* input[:32] - versioned_hash
|
|
66
|
+
* input[32:64] - z
|
|
67
|
+
* input[64:96] - y
|
|
68
|
+
* input[96:144] - commitment C
|
|
69
|
+
* input[144:192] - commitment Q (a 'proof' committing to the quotient polynomial q(X))
|
|
70
|
+
*
|
|
71
|
+
* See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
|
72
|
+
*/
|
|
73
|
+
export function getEthBlobEvaluationInputs(batchedBlob: BatchedBlob): `0x${string}` {
|
|
74
|
+
const buf = Buffer.concat([
|
|
75
|
+
computeEthVersionedBlobHash(batchedBlob.commitment.compress()),
|
|
76
|
+
batchedBlob.z.toBuffer(),
|
|
77
|
+
batchedBlob.y.toBuffer(),
|
|
78
|
+
batchedBlob.commitment.compress(),
|
|
79
|
+
batchedBlob.q.compress(),
|
|
80
|
+
]);
|
|
81
|
+
return `0x${buf.toString('hex')}`;
|
|
82
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
-
import { BLS12Fq, BLS12Fr, BLS12Point
|
|
2
|
+
import { BLS12Fq, BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
4
|
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
5
|
|
|
5
6
|
/**
|
|
@@ -81,4 +82,15 @@ export class BlobAccumulator {
|
|
|
81
82
|
BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
|
|
82
83
|
);
|
|
83
84
|
}
|
|
85
|
+
|
|
86
|
+
static random() {
|
|
87
|
+
return new BlobAccumulator(
|
|
88
|
+
Fr.random(),
|
|
89
|
+
Fr.random(),
|
|
90
|
+
BLS12Fr.random(),
|
|
91
|
+
BLS12Point.random(),
|
|
92
|
+
Fr.random(),
|
|
93
|
+
BLS12Fr.random(),
|
|
94
|
+
);
|
|
95
|
+
}
|
|
84
96
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { BLS12Fr, BLS12Point
|
|
1
|
+
import { BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
2
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
2
3
|
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
4
|
|
|
4
5
|
import { inspect } from 'util';
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
2
|
+
import { FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
import {
|
|
6
|
+
type BlockEndMarker,
|
|
7
|
+
decodeBlockEndMarker,
|
|
8
|
+
encodeBlockEndMarker,
|
|
9
|
+
isBlockEndMarker,
|
|
10
|
+
} from './block_end_marker.js';
|
|
11
|
+
import {
|
|
12
|
+
type BlockEndStateField,
|
|
13
|
+
decodeBlockEndStateField,
|
|
14
|
+
encodeBlockEndStateField,
|
|
15
|
+
} from './block_end_state_field.js';
|
|
16
|
+
import { type TxBlobData, decodeTxBlobData, encodeTxBlobData } from './tx_blob_data.js';
|
|
17
|
+
|
|
18
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
19
|
+
|
|
20
|
+
export interface BlockEndBlobData {
|
|
21
|
+
blockEndMarker: BlockEndMarker;
|
|
22
|
+
blockEndStateField: BlockEndStateField;
|
|
23
|
+
lastArchiveRoot: Fr;
|
|
24
|
+
noteHashRoot: Fr;
|
|
25
|
+
nullifierRoot: Fr;
|
|
26
|
+
publicDataRoot: Fr;
|
|
27
|
+
l1ToL2MessageRoot: Fr | undefined;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface BlockBlobData extends BlockEndBlobData {
|
|
31
|
+
txs: TxBlobData[];
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function encodeBlockEndBlobData(blockEndBlobData: BlockEndBlobData): Fr[] {
|
|
35
|
+
return [
|
|
36
|
+
encodeBlockEndMarker(blockEndBlobData.blockEndMarker),
|
|
37
|
+
encodeBlockEndStateField(blockEndBlobData.blockEndStateField),
|
|
38
|
+
blockEndBlobData.lastArchiveRoot,
|
|
39
|
+
blockEndBlobData.noteHashRoot,
|
|
40
|
+
blockEndBlobData.nullifierRoot,
|
|
41
|
+
blockEndBlobData.publicDataRoot,
|
|
42
|
+
...(blockEndBlobData.l1ToL2MessageRoot ? [blockEndBlobData.l1ToL2MessageRoot] : []),
|
|
43
|
+
];
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function decodeBlockEndBlobData(fields: Fr[] | FieldReader, isFirstBlock: boolean): BlockEndBlobData {
|
|
47
|
+
const reader = FieldReader.asReader(fields);
|
|
48
|
+
|
|
49
|
+
const numBlockEndData = isFirstBlock ? 7 : 6;
|
|
50
|
+
if (numBlockEndData > reader.remainingFields()) {
|
|
51
|
+
throw new BlobDeserializationError(
|
|
52
|
+
`Incorrect encoding of blob fields: not enough fields for block end data. Expected ${numBlockEndData} fields, only ${reader.remainingFields()} remaining.`,
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return {
|
|
57
|
+
blockEndMarker: decodeBlockEndMarker(reader.readField()),
|
|
58
|
+
blockEndStateField: decodeBlockEndStateField(reader.readField()),
|
|
59
|
+
lastArchiveRoot: reader.readField(),
|
|
60
|
+
noteHashRoot: reader.readField(),
|
|
61
|
+
nullifierRoot: reader.readField(),
|
|
62
|
+
publicDataRoot: reader.readField(),
|
|
63
|
+
l1ToL2MessageRoot: isFirstBlock ? reader.readField() : undefined,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export function encodeBlockBlobData(blockBlobData: BlockBlobData): Fr[] {
|
|
68
|
+
return [...blockBlobData.txs.map(tx => encodeTxBlobData(tx)).flat(), ...encodeBlockEndBlobData(blockBlobData)];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function decodeBlockBlobData(fields: Fr[] | FieldReader, isFirstBlock: boolean): BlockBlobData {
|
|
72
|
+
const reader = FieldReader.asReader(fields);
|
|
73
|
+
|
|
74
|
+
const txs: TxBlobData[] = [];
|
|
75
|
+
let hasReachedBlockEnd = false;
|
|
76
|
+
while (!hasReachedBlockEnd) {
|
|
77
|
+
if (reader.isFinished()) {
|
|
78
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for block end marker.`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const currentField = reader.peekField();
|
|
82
|
+
if (isBlockEndMarker(currentField)) {
|
|
83
|
+
hasReachedBlockEnd = true;
|
|
84
|
+
} else {
|
|
85
|
+
txs.push(decodeTxBlobData(reader));
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const blockEndBlobData = decodeBlockEndBlobData(reader, isFirstBlock);
|
|
90
|
+
|
|
91
|
+
const blockEndMarker = blockEndBlobData.blockEndMarker;
|
|
92
|
+
if (blockEndMarker.numTxs !== txs.length) {
|
|
93
|
+
throw new BlobDeserializationError(
|
|
94
|
+
`Incorrect encoding of blob fields: expected ${blockEndMarker.numTxs} txs, but got ${txs.length}.`,
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return {
|
|
99
|
+
txs,
|
|
100
|
+
...blockEndBlobData,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { BLOCK_END_PREFIX } from '@aztec/constants';
|
|
2
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
|
+
|
|
5
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
6
|
+
|
|
7
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
8
|
+
|
|
9
|
+
const BLOCK_NUMBER_BIT_SIZE = 32n;
|
|
10
|
+
const TIMESTAMP_BIT_SIZE = 64n;
|
|
11
|
+
const NUM_TXS_BIT_SIZE = 16n;
|
|
12
|
+
|
|
13
|
+
export interface BlockEndMarker {
|
|
14
|
+
timestamp: bigint;
|
|
15
|
+
blockNumber: BlockNumber;
|
|
16
|
+
numTxs: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function encodeBlockEndMarker(blockEndMarker: BlockEndMarker) {
|
|
20
|
+
let value = BLOCK_END_PREFIX;
|
|
21
|
+
value <<= TIMESTAMP_BIT_SIZE;
|
|
22
|
+
value += blockEndMarker.timestamp;
|
|
23
|
+
value <<= BLOCK_NUMBER_BIT_SIZE;
|
|
24
|
+
value += BigInt(blockEndMarker.blockNumber);
|
|
25
|
+
value <<= NUM_TXS_BIT_SIZE;
|
|
26
|
+
value += BigInt(blockEndMarker.numTxs);
|
|
27
|
+
return new Fr(value);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function decodeBlockEndMarker(field: Fr): BlockEndMarker {
|
|
31
|
+
let value = field.toBigInt();
|
|
32
|
+
const numTxs = Number(value & (2n ** NUM_TXS_BIT_SIZE - 1n));
|
|
33
|
+
value >>= NUM_TXS_BIT_SIZE;
|
|
34
|
+
const blockNumber = BlockNumber(Number(value & (2n ** BLOCK_NUMBER_BIT_SIZE - 1n)));
|
|
35
|
+
value >>= BLOCK_NUMBER_BIT_SIZE;
|
|
36
|
+
const timestamp = value & (2n ** TIMESTAMP_BIT_SIZE - 1n);
|
|
37
|
+
value >>= TIMESTAMP_BIT_SIZE;
|
|
38
|
+
|
|
39
|
+
const prefix = value;
|
|
40
|
+
if (prefix !== BLOCK_END_PREFIX) {
|
|
41
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid block end marker.`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
blockNumber,
|
|
46
|
+
timestamp,
|
|
47
|
+
numTxs,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Check if a field is a block end marker. Used before decoding to check if it has reached the end of the block.
|
|
52
|
+
export function isBlockEndMarker(field: Fr): boolean {
|
|
53
|
+
const prefix = field.toBigInt() >> (NUM_TXS_BIT_SIZE + BLOCK_NUMBER_BIT_SIZE + TIMESTAMP_BIT_SIZE);
|
|
54
|
+
return prefix === BLOCK_END_PREFIX;
|
|
55
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import {
|
|
2
|
+
L1_TO_L2_MSG_TREE_HEIGHT,
|
|
3
|
+
NOTE_HASH_TREE_HEIGHT,
|
|
4
|
+
NULLIFIER_TREE_HEIGHT,
|
|
5
|
+
PUBLIC_DATA_TREE_HEIGHT,
|
|
6
|
+
} from '@aztec/constants';
|
|
7
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
8
|
+
|
|
9
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
10
|
+
|
|
11
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
12
|
+
|
|
13
|
+
export const TOTAL_MANA_USED_BIT_SIZE = 48n;
|
|
14
|
+
|
|
15
|
+
export interface BlockEndStateField {
|
|
16
|
+
l1ToL2MessageNextAvailableLeafIndex: number;
|
|
17
|
+
noteHashNextAvailableLeafIndex: number;
|
|
18
|
+
nullifierNextAvailableLeafIndex: number;
|
|
19
|
+
publicDataNextAvailableLeafIndex: number;
|
|
20
|
+
totalManaUsed: bigint;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function encodeBlockEndStateField(blockEndStateField: BlockEndStateField) {
|
|
24
|
+
let value = BigInt(blockEndStateField.l1ToL2MessageNextAvailableLeafIndex);
|
|
25
|
+
value <<= BigInt(NOTE_HASH_TREE_HEIGHT);
|
|
26
|
+
value += BigInt(blockEndStateField.noteHashNextAvailableLeafIndex);
|
|
27
|
+
value <<= BigInt(NULLIFIER_TREE_HEIGHT);
|
|
28
|
+
value += BigInt(blockEndStateField.nullifierNextAvailableLeafIndex);
|
|
29
|
+
value <<= BigInt(PUBLIC_DATA_TREE_HEIGHT);
|
|
30
|
+
value += BigInt(blockEndStateField.publicDataNextAvailableLeafIndex);
|
|
31
|
+
value <<= BigInt(TOTAL_MANA_USED_BIT_SIZE);
|
|
32
|
+
value += BigInt(blockEndStateField.totalManaUsed);
|
|
33
|
+
return new Fr(value);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function decodeBlockEndStateField(field: Fr): BlockEndStateField {
|
|
37
|
+
let value = field.toBigInt();
|
|
38
|
+
const totalManaUsed = value & (2n ** TOTAL_MANA_USED_BIT_SIZE - 1n);
|
|
39
|
+
value >>= TOTAL_MANA_USED_BIT_SIZE;
|
|
40
|
+
const publicDataNextAvailableLeafIndex = Number(value & (2n ** BigInt(PUBLIC_DATA_TREE_HEIGHT) - 1n));
|
|
41
|
+
value >>= BigInt(PUBLIC_DATA_TREE_HEIGHT);
|
|
42
|
+
const nullifierNextAvailableLeafIndex = Number(value & (2n ** BigInt(NULLIFIER_TREE_HEIGHT) - 1n));
|
|
43
|
+
value >>= BigInt(NULLIFIER_TREE_HEIGHT);
|
|
44
|
+
const noteHashNextAvailableLeafIndex = Number(value & (2n ** BigInt(NOTE_HASH_TREE_HEIGHT) - 1n));
|
|
45
|
+
value >>= BigInt(NOTE_HASH_TREE_HEIGHT);
|
|
46
|
+
|
|
47
|
+
if (value > 2n ** BigInt(L1_TO_L2_MSG_TREE_HEIGHT) - 1n) {
|
|
48
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid block end state field.`);
|
|
49
|
+
}
|
|
50
|
+
const l1ToL2MessageNextAvailableLeafIndex = Number(value);
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
l1ToL2MessageNextAvailableLeafIndex,
|
|
54
|
+
noteHashNextAvailableLeafIndex,
|
|
55
|
+
nullifierNextAvailableLeafIndex,
|
|
56
|
+
publicDataNextAvailableLeafIndex,
|
|
57
|
+
totalManaUsed,
|
|
58
|
+
};
|
|
59
|
+
}
|