@aztec/blob-lib 3.0.0-canary.a9708bd → 3.0.0-devnet.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob.d.ts +52 -95
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +73 -165
- package/dest/blob_batching.d.ts +15 -48
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +81 -120
- package/dest/blob_utils.d.ts +30 -0
- package/dest/blob_utils.d.ts.map +1 -0
- package/dest/blob_utils.js +60 -0
- package/dest/circuit_types/blob_accumulator.d.ts +21 -0
- package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/blob_accumulator.js +58 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_accumulator.js +63 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
- package/dest/circuit_types/index.d.ts +4 -0
- package/dest/circuit_types/index.d.ts.map +1 -0
- package/dest/circuit_types/index.js +4 -0
- package/dest/deserialize.d.ts +14 -0
- package/dest/deserialize.d.ts.map +1 -0
- package/dest/deserialize.js +33 -0
- package/dest/encoding.d.ts +22 -62
- package/dest/encoding.d.ts.map +1 -1
- package/dest/encoding.js +114 -104
- package/dest/hash.d.ts +35 -0
- package/dest/hash.d.ts.map +1 -0
- package/dest/hash.js +69 -0
- package/dest/index.d.ts +5 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +5 -15
- package/dest/kzg_context.d.ts +4 -0
- package/dest/kzg_context.d.ts.map +1 -0
- package/dest/kzg_context.js +5 -0
- package/dest/sponge_blob.d.ts +13 -9
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +28 -17
- package/dest/testing.d.ts +7 -12
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +54 -41
- package/dest/types.d.ts +16 -0
- package/dest/types.d.ts.map +1 -0
- package/dest/types.js +3 -0
- package/package.json +6 -4
- package/src/blob.ts +76 -191
- package/src/blob_batching.ts +109 -137
- package/src/blob_utils.ts +71 -0
- package/src/circuit_types/blob_accumulator.ts +84 -0
- package/src/circuit_types/final_blob_accumulator.ts +75 -0
- package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
- package/src/circuit_types/index.ts +4 -0
- package/src/deserialize.ts +38 -0
- package/src/encoding.ts +136 -120
- package/src/hash.ts +77 -0
- package/src/index.ts +5 -18
- package/src/kzg_context.ts +5 -0
- package/src/sponge_blob.ts +24 -14
- package/src/testing.ts +55 -40
- package/src/types.ts +16 -0
- package/dest/blob_batching_public_inputs.d.ts +0 -71
- package/dest/blob_batching_public_inputs.d.ts.map +0 -1
- package/dest/blob_batching_public_inputs.js +0 -168
- package/src/blob_batching_public_inputs.ts +0 -252
package/dest/blob_batching.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader } from '@aztec/foundation/serialize';
|
|
3
2
|
import { Blob } from './blob.js';
|
|
3
|
+
import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
|
|
4
4
|
/**
|
|
5
5
|
* A class to create, manage, and prove batched EVM blobs.
|
|
6
6
|
*/
|
|
@@ -33,13 +33,13 @@ export declare class BatchedBlob {
|
|
|
33
33
|
*
|
|
34
34
|
* @returns A batched blob.
|
|
35
35
|
*/
|
|
36
|
-
static batch(blobs: Blob[]): Promise<BatchedBlob>;
|
|
36
|
+
static batch(blobs: Blob[][]): Promise<BatchedBlob>;
|
|
37
37
|
/**
|
|
38
38
|
* Returns an empty BatchedBlobAccumulator with precomputed challenges from all blobs in the epoch.
|
|
39
39
|
* @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
|
|
40
40
|
* beforehand from ALL blobs.
|
|
41
41
|
*/
|
|
42
|
-
static newAccumulator(blobs: Blob[]): Promise<BatchedBlobAccumulator>;
|
|
42
|
+
static newAccumulator(blobs: Blob[][]): Promise<BatchedBlobAccumulator>;
|
|
43
43
|
/**
|
|
44
44
|
* Gets the final challenges based on all blobs and their elements to perform a multi opening proof.
|
|
45
45
|
* Used in BatchedBlobAccumulator as 'finalZ' and finalGamma':
|
|
@@ -48,12 +48,13 @@ export declare class BatchedBlob {
|
|
|
48
48
|
* - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
|
|
49
49
|
* - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
|
|
50
50
|
* - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }, for all blob evaluations y_i (see above) and commitments C_i.
|
|
51
|
+
*
|
|
52
|
+
* @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
|
|
51
53
|
* @returns Challenges z and gamma.
|
|
52
54
|
*/
|
|
53
|
-
static precomputeBatchedBlobChallenges(blobs: Blob[]): Promise<FinalBlobBatchingChallenges>;
|
|
54
|
-
|
|
55
|
+
static precomputeBatchedBlobChallenges(blobs: Blob[][]): Promise<FinalBlobBatchingChallenges>;
|
|
56
|
+
verify(): boolean;
|
|
55
57
|
getEthVersionedBlobHash(): Buffer;
|
|
56
|
-
static getEthVersionedBlobHash(commitment: Buffer): Buffer;
|
|
57
58
|
/**
|
|
58
59
|
* Returns a proof of opening of the blobs to verify on L1 using the point evaluation precompile:
|
|
59
60
|
*
|
|
@@ -66,33 +67,10 @@ export declare class BatchedBlob {
|
|
|
66
67
|
* See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
|
67
68
|
*/
|
|
68
69
|
getEthBlobEvaluationInputs(): `0x${string}`;
|
|
70
|
+
toFinalBlobAccumulator(): FinalBlobAccumulator;
|
|
69
71
|
}
|
|
70
72
|
/**
|
|
71
|
-
*
|
|
72
|
-
* - Checking equality in each block merge circuit and propagating up
|
|
73
|
-
* - Checking final z_acc == z in root circuit
|
|
74
|
-
* - Checking final gamma_acc == gamma in root circuit
|
|
75
|
-
*
|
|
76
|
-
* - z = H(...H(H(z_0, z_1) z_2)..z_n)
|
|
77
|
-
* - where z_i = H(H(fields of blob_i), C_i),
|
|
78
|
-
* - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
|
|
79
|
-
* - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
|
|
80
|
-
* - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }
|
|
81
|
-
* for all blob evaluations y_i (see above) and commitments C_i.
|
|
82
|
-
*
|
|
83
|
-
* Iteratively calculated by BlobAccumulatorPublicInputs.accumulate() in nr. See also precomputeBatchedBlobChallenges() above.
|
|
84
|
-
*/
|
|
85
|
-
export declare class FinalBlobBatchingChallenges {
|
|
86
|
-
readonly z: Fr;
|
|
87
|
-
readonly gamma: BLS12Fr;
|
|
88
|
-
constructor(z: Fr, gamma: BLS12Fr);
|
|
89
|
-
equals(other: FinalBlobBatchingChallenges): boolean;
|
|
90
|
-
static empty(): FinalBlobBatchingChallenges;
|
|
91
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges;
|
|
92
|
-
toBuffer(): Buffer<ArrayBufferLike>;
|
|
93
|
-
}
|
|
94
|
-
/**
|
|
95
|
-
* See noir-projects/noir-protocol-circuits/crates/blob/src/blob_batching_public_inputs.nr -> BlobAccumulatorPublicInputs
|
|
73
|
+
* See noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr
|
|
96
74
|
*/
|
|
97
75
|
export declare class BatchedBlobAccumulator {
|
|
98
76
|
/** Hash of Cs (to link to L1 blob hashes). */
|
|
@@ -136,21 +114,6 @@ export declare class BatchedBlobAccumulator {
|
|
|
136
114
|
gammaPow: BLS12Fr,
|
|
137
115
|
/** Final challenge values used in evaluation. Optimistically input and checked in the final acc. */
|
|
138
116
|
finalBlobChallenges: FinalBlobBatchingChallenges);
|
|
139
|
-
/**
|
|
140
|
-
* Init the first accumulation state of the epoch.
|
|
141
|
-
* We assume the input blob has not been evaluated at z.
|
|
142
|
-
*
|
|
143
|
-
* First state of the accumulator:
|
|
144
|
-
* - v_acc := sha256(C_0)
|
|
145
|
-
* - z_acc := z_0
|
|
146
|
-
* - y_acc := gamma^0 * y_0 = y_0
|
|
147
|
-
* - c_acc := gamma^0 * c_0 = c_0
|
|
148
|
-
* - gamma_acc := poseidon2(y_0.limbs)
|
|
149
|
-
* - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
|
|
150
|
-
*
|
|
151
|
-
* @returns An initial blob accumulator.
|
|
152
|
-
*/
|
|
153
|
-
static initialize(blob: Blob, finalBlobChallenges: FinalBlobBatchingChallenges): Promise<BatchedBlobAccumulator>;
|
|
154
117
|
/**
|
|
155
118
|
* Create the empty accumulation state of the epoch.
|
|
156
119
|
* @returns An empty blob accumulator with challenges.
|
|
@@ -161,10 +124,11 @@ export declare class BatchedBlobAccumulator {
|
|
|
161
124
|
* We assume the input blob has not been evaluated at z.
|
|
162
125
|
* @returns An updated blob accumulator.
|
|
163
126
|
*/
|
|
164
|
-
accumulate
|
|
127
|
+
private accumulate;
|
|
165
128
|
/**
|
|
166
129
|
* Given blobs, accumulate all state.
|
|
167
130
|
* We assume the input blobs have not been evaluated at z.
|
|
131
|
+
* @param blobs - The blobs to accumulate. They should be in the same L1 block.
|
|
168
132
|
* @returns An updated blob accumulator.
|
|
169
133
|
*/
|
|
170
134
|
accumulateBlobs(blobs: Blob[]): Promise<BatchedBlobAccumulator>;
|
|
@@ -179,10 +143,13 @@ export declare class BatchedBlobAccumulator {
|
|
|
179
143
|
* - c := c_acc (final commitment to be checked on L1)
|
|
180
144
|
* - gamma := poseidon2(gamma_acc, z) (challenge for linear combination of y and C, above)
|
|
181
145
|
*
|
|
146
|
+
* @param verifyProof - Whether to verify the KZG proof.
|
|
182
147
|
* @returns A batched blob.
|
|
183
148
|
*/
|
|
184
|
-
finalize(): Promise<BatchedBlob>;
|
|
149
|
+
finalize(verifyProof?: boolean): Promise<BatchedBlob>;
|
|
185
150
|
isEmptyState(): boolean;
|
|
186
151
|
clone(): BatchedBlobAccumulator;
|
|
152
|
+
toBlobAccumulator(): BlobAccumulator;
|
|
153
|
+
toFinalBlobAccumulator(): FinalBlobAccumulator;
|
|
187
154
|
}
|
|
188
155
|
//# sourceMappingURL=blob_batching.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"blob_batching.d.ts","sourceRoot":"","sources":["../src/blob_batching.ts"],"names":[],"mappings":"AAEA,OAAO,
|
|
1
|
+
{"version":3,"file":"blob_batching.d.ts","sourceRoot":"","sources":["../src/blob_batching.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAEnE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAEjC,OAAO,EAAE,eAAe,EAAE,oBAAoB,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AAI9G;;GAEG;AACH,qBAAa,WAAW;IAEpB,8CAA8C;aAC9B,mBAAmB,EAAE,EAAE;IACvC,gDAAgD;aAChC,CAAC,EAAE,EAAE;IACrB,mFAAmF;aACnE,CAAC,EAAE,OAAO;IAC1B,kFAAkF;aAClE,UAAU,EAAE,UAAU;IACtC,4HAA4H;aAC5G,CAAC,EAAE,UAAU;;IAT7B,8CAA8C;IAC9B,mBAAmB,EAAE,EAAE;IACvC,gDAAgD;IAChC,CAAC,EAAE,EAAE;IACrB,mFAAmF;IACnE,CAAC,EAAE,OAAO;IAC1B,kFAAkF;IAClE,UAAU,EAAE,UAAU;IACtC,4HAA4H;IAC5G,CAAC,EAAE,UAAU;IAG/B;;;;;;OAMG;WACU,KAAK,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,GAAG,OAAO,CAAC,WAAW,CAAC;IAgBzD;;;;OAIG;WACU,cAAc,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,GAAG,OAAO,CAAC,sBAAsB,CAAC;IAK7E;;;;;;;;;;;OAWG;WACU,+BAA+B,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAmCnG,MAAM;IAKN,uBAAuB,IAAI,MAAM;IAIjC;;;;;;;;;;OAUG;IACH,0BAA0B,IAAI,KAAK,MAAM,EAAE;IAW3C,sBAAsB;CAGvB;AAED;;GAEG;AACH,qBAAa,sBAAsB;IAE/B,8CAA8C;aAC9B,sBAAsB,EAAE,EAAE;IAC1C,sEAAsE;aACtD,IAAI,EAAE,EAAE;IACxB,yGAAyG;aACzF,IAAI,EAAE,OAAO;IAC7B,qGAAqG;aACrF,IAAI,EAAE,UAAU;IAChC,oGAAoG;aACpF,IAAI,EAAE,UAAU;IAChC;;;;OAIG;aACa,QAAQ,EAAE,EAAE;IAC5B,uGAAuG;aACvF,QAAQ,EAAE,OAAO;IACjC,oGAAoG;aACpF,mBAAmB,EAAE,2BAA2B;;IAnBhE,8CAA8C;IAC9B,sBAAsB,EAAE,EAAE;IAC1C,sEAAsE;IACtD,IAAI,EAAE,EAAE;IACxB,yGAAyG;IACzF,IAAI,EAAE,OAAO;IAC7B,qGAAqG;IACrF,IAAI,EAAE,UAAU;IAChC,oGAAoG;IACpF,IAAI,EAAE,UAAU;IAChC;;;;OAIG;IACa,QAAQ,EAAE,EAAE;IAC5B,uGAAuG;IACvF,QAAQ,EAAE,OAAO;IACjC,oGAAoG;IACpF,mBAAmB,EAAE,2BAA2B;IAGlE;;;OAGG;IACH,MAAM,CAAC,iBAAiB,CAAC,mBAAmB,EAAE,2BAA2B,GAAG,sBAAsB;IAalG;;;;OAIG;YACW,UAAU;IAyCxB;;;;;OAKG;IACG,eAAe,CAAC,KAAK,EAAE,IAAI,EAAE;IAkBnC;;;;;;;;;;;;;OAaG;IACG,QAAQ,CAAC,WAAW,UAAQ,GAAG,OAAO,CAAC,WAAW,CAAC;IAwBzD,YAAY;IAYZ,KAAK;IAaL,iBAAiB;IAWjB,sBAAsB;CAGvB"}
|
package/dest/blob_batching.js
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
import { AZTEC_MAX_EPOCH_DURATION, BLOBS_PER_BLOCK } from '@aztec/constants';
|
|
2
|
-
import { poseidon2Hash,
|
|
2
|
+
import { poseidon2Hash, sha256ToField } from '@aztec/foundation/crypto';
|
|
3
3
|
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
import
|
|
7
|
-
import {
|
|
8
|
-
const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
4
|
+
import { computeBlobFieldsHashFromBlobs } from './blob_utils.js';
|
|
5
|
+
import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
|
|
6
|
+
import { computeEthVersionedBlobHash, hashNoirBigNumLimbs } from './hash.js';
|
|
7
|
+
import { kzg } from './kzg_context.js';
|
|
9
8
|
/**
|
|
10
9
|
* A class to create, manage, and prove batched EVM blobs.
|
|
11
10
|
*/ export class BatchedBlob {
|
|
@@ -28,14 +27,15 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
28
27
|
*
|
|
29
28
|
* @returns A batched blob.
|
|
30
29
|
*/ static async batch(blobs) {
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
throw new Error(`Too many blobs (${numBlobs}) sent to batch(). The maximum is ${BLOBS_PER_BLOCK * AZTEC_MAX_EPOCH_DURATION}.`);
|
|
30
|
+
if (blobs.length > AZTEC_MAX_EPOCH_DURATION) {
|
|
31
|
+
throw new Error(`Too many blocks sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${blobs.length}.`);
|
|
34
32
|
}
|
|
35
33
|
// Precalculate the values (z and gamma) and initialize the accumulator:
|
|
36
34
|
let acc = await this.newAccumulator(blobs);
|
|
37
35
|
// Now we can create a multi opening proof of all input blobs:
|
|
38
|
-
|
|
36
|
+
for (const blockBlobs of blobs){
|
|
37
|
+
acc = await acc.accumulateBlobs(blockBlobs);
|
|
38
|
+
}
|
|
39
39
|
return await acc.finalize();
|
|
40
40
|
}
|
|
41
41
|
/**
|
|
@@ -54,27 +54,42 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
54
54
|
* - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
|
|
55
55
|
* - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
|
|
56
56
|
* - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }, for all blob evaluations y_i (see above) and commitments C_i.
|
|
57
|
+
*
|
|
58
|
+
* @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
|
|
57
59
|
* @returns Challenges z and gamma.
|
|
58
60
|
*/ static async precomputeBatchedBlobChallenges(blobs) {
|
|
59
|
-
//
|
|
60
|
-
let z
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
// Compute the final challenge z to evaluate the blobs.
|
|
62
|
+
let z;
|
|
63
|
+
for (const blockBlobs of blobs){
|
|
64
|
+
// Compute the hash of all the fields in the block.
|
|
65
|
+
const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blockBlobs);
|
|
66
|
+
for (const blob of blockBlobs){
|
|
67
|
+
// Compute the challenge z for each blob and accumulate it.
|
|
68
|
+
const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
|
|
69
|
+
if (!z) {
|
|
70
|
+
z = challengeZ;
|
|
71
|
+
} else {
|
|
72
|
+
z = await poseidon2Hash([
|
|
73
|
+
z,
|
|
74
|
+
challengeZ
|
|
75
|
+
]);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (!z) {
|
|
80
|
+
throw new Error('No blobs to precompute challenges for.');
|
|
67
81
|
}
|
|
68
82
|
// Now we have a shared challenge for all blobs, evaluate them...
|
|
69
|
-
const
|
|
70
|
-
const
|
|
83
|
+
const allBlobs = blobs.flat();
|
|
84
|
+
const proofObjects = allBlobs.map((b)=>b.evaluate(z));
|
|
85
|
+
const evaluations = await Promise.all(proofObjects.map(({ y })=>hashNoirBigNumLimbs(y)));
|
|
71
86
|
// ...and find the challenge for the linear combination of blobs.
|
|
72
|
-
let gamma =
|
|
87
|
+
let gamma = evaluations[0];
|
|
73
88
|
// We start at i = 1, because gamma is initialized as the first blob's evaluation.
|
|
74
|
-
for(let i = 1; i <
|
|
89
|
+
for(let i = 1; i < allBlobs.length; i++){
|
|
75
90
|
gamma = await poseidon2Hash([
|
|
76
91
|
gamma,
|
|
77
|
-
|
|
92
|
+
evaluations[i]
|
|
78
93
|
]);
|
|
79
94
|
}
|
|
80
95
|
gamma = await poseidon2Hash([
|
|
@@ -83,33 +98,12 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
83
98
|
]);
|
|
84
99
|
return new FinalBlobBatchingChallenges(z, BLS12Fr.fromBN254Fr(gamma));
|
|
85
100
|
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
await Blob.fromFields([])
|
|
89
|
-
];
|
|
90
|
-
// We need to precompute the final challenge values to evaluate the blobs.
|
|
91
|
-
const z = blobs[0].challengeZ;
|
|
92
|
-
// Now we have a shared challenge for all blobs, evaluate them...
|
|
93
|
-
const proofObjects = blobs.map((b)=>computeKzgProof(b.data, z.toBuffer()));
|
|
94
|
-
const evaluations = proofObjects.map(([_, evaluation])=>BLS12Fr.fromBuffer(Buffer.from(evaluation)));
|
|
95
|
-
// ...and find the challenge for the linear combination of blobs.
|
|
96
|
-
let gamma = await hashNoirBigNumLimbs(evaluations[0]);
|
|
97
|
-
gamma = await poseidon2Hash([
|
|
98
|
-
gamma,
|
|
99
|
-
z
|
|
100
|
-
]);
|
|
101
|
-
return new FinalBlobBatchingChallenges(z, BLS12Fr.fromBN254Fr(gamma));
|
|
101
|
+
verify() {
|
|
102
|
+
return kzg.verifyKzgProof(this.commitment.compress(), this.z.toBuffer(), this.y.toBuffer(), this.q.compress());
|
|
102
103
|
}
|
|
103
104
|
// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
104
105
|
getEthVersionedBlobHash() {
|
|
105
|
-
|
|
106
|
-
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
107
|
-
return hash;
|
|
108
|
-
}
|
|
109
|
-
static getEthVersionedBlobHash(commitment) {
|
|
110
|
-
const hash = sha256(commitment);
|
|
111
|
-
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
112
|
-
return hash;
|
|
106
|
+
return computeEthVersionedBlobHash(this.commitment.compress());
|
|
113
107
|
}
|
|
114
108
|
/**
|
|
115
109
|
* Returns a proof of opening of the blobs to verify on L1 using the point evaluation precompile:
|
|
@@ -131,44 +125,12 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
131
125
|
]);
|
|
132
126
|
return `0x${buf.toString('hex')}`;
|
|
133
127
|
}
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
* Final values z and gamma are injected into each block root circuit. We ensure they are correct by:
|
|
137
|
-
* - Checking equality in each block merge circuit and propagating up
|
|
138
|
-
* - Checking final z_acc == z in root circuit
|
|
139
|
-
* - Checking final gamma_acc == gamma in root circuit
|
|
140
|
-
*
|
|
141
|
-
* - z = H(...H(H(z_0, z_1) z_2)..z_n)
|
|
142
|
-
* - where z_i = H(H(fields of blob_i), C_i),
|
|
143
|
-
* - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
|
|
144
|
-
* - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
|
|
145
|
-
* - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }
|
|
146
|
-
* for all blob evaluations y_i (see above) and commitments C_i.
|
|
147
|
-
*
|
|
148
|
-
* Iteratively calculated by BlobAccumulatorPublicInputs.accumulate() in nr. See also precomputeBatchedBlobChallenges() above.
|
|
149
|
-
*/ export class FinalBlobBatchingChallenges {
|
|
150
|
-
z;
|
|
151
|
-
gamma;
|
|
152
|
-
constructor(z, gamma){
|
|
153
|
-
this.z = z;
|
|
154
|
-
this.gamma = gamma;
|
|
155
|
-
}
|
|
156
|
-
equals(other) {
|
|
157
|
-
return this.z.equals(other.z) && this.gamma.equals(other.gamma);
|
|
158
|
-
}
|
|
159
|
-
static empty() {
|
|
160
|
-
return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
|
|
161
|
-
}
|
|
162
|
-
static fromBuffer(buffer) {
|
|
163
|
-
const reader = BufferReader.asReader(buffer);
|
|
164
|
-
return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
|
|
165
|
-
}
|
|
166
|
-
toBuffer() {
|
|
167
|
-
return serializeToBuffer(this.z, this.gamma);
|
|
128
|
+
toFinalBlobAccumulator() {
|
|
129
|
+
return new FinalBlobAccumulator(this.blobCommitmentsHash, this.z, this.y, this.commitment);
|
|
168
130
|
}
|
|
169
131
|
}
|
|
170
132
|
/**
|
|
171
|
-
* See noir-projects/noir-protocol-circuits/crates/blob/src/
|
|
133
|
+
* See noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr
|
|
172
134
|
*/ export class BatchedBlobAccumulator {
|
|
173
135
|
blobCommitmentsHashAcc;
|
|
174
136
|
zAcc;
|
|
@@ -193,27 +155,6 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
193
155
|
this.finalBlobChallenges = finalBlobChallenges;
|
|
194
156
|
}
|
|
195
157
|
/**
|
|
196
|
-
* Init the first accumulation state of the epoch.
|
|
197
|
-
* We assume the input blob has not been evaluated at z.
|
|
198
|
-
*
|
|
199
|
-
* First state of the accumulator:
|
|
200
|
-
* - v_acc := sha256(C_0)
|
|
201
|
-
* - z_acc := z_0
|
|
202
|
-
* - y_acc := gamma^0 * y_0 = y_0
|
|
203
|
-
* - c_acc := gamma^0 * c_0 = c_0
|
|
204
|
-
* - gamma_acc := poseidon2(y_0.limbs)
|
|
205
|
-
* - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
|
|
206
|
-
*
|
|
207
|
-
* @returns An initial blob accumulator.
|
|
208
|
-
*/ static async initialize(blob, finalBlobChallenges) {
|
|
209
|
-
const [q, evaluation] = computeKzgProof(blob.data, finalBlobChallenges.z.toBuffer());
|
|
210
|
-
const firstY = BLS12Fr.fromBuffer(Buffer.from(evaluation));
|
|
211
|
-
// Here, i = 0, so:
|
|
212
|
-
return new BatchedBlobAccumulator(sha256ToField([
|
|
213
|
-
blob.commitment
|
|
214
|
-
]), blob.challengeZ, firstY, BLS12Point.decompress(blob.commitment), BLS12Point.decompress(Buffer.from(q)), await hashNoirBigNumLimbs(firstY), finalBlobChallenges.gamma, finalBlobChallenges);
|
|
215
|
-
}
|
|
216
|
-
/**
|
|
217
158
|
* Create the empty accumulation state of the epoch.
|
|
218
159
|
* @returns An empty blob accumulator with challenges.
|
|
219
160
|
*/ static newWithChallenges(finalBlobChallenges) {
|
|
@@ -223,20 +164,32 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
223
164
|
* Given blob i, accumulate all state.
|
|
224
165
|
* We assume the input blob has not been evaluated at z.
|
|
225
166
|
* @returns An updated blob accumulator.
|
|
226
|
-
*/ async accumulate(blob) {
|
|
167
|
+
*/ async accumulate(blob, blobFieldsHash) {
|
|
168
|
+
const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
|
|
169
|
+
const thisC = BLS12Point.decompress(blob.commitment);
|
|
170
|
+
const thisQ = BLS12Point.decompress(proof);
|
|
171
|
+
const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash);
|
|
227
172
|
if (this.isEmptyState()) {
|
|
228
|
-
|
|
173
|
+
/**
|
|
174
|
+
* Init the first accumulation state of the epoch.
|
|
175
|
+
* - v_acc := sha256(C_0)
|
|
176
|
+
* - z_acc := z_0
|
|
177
|
+
* - y_acc := gamma^0 * y_0 = y_0
|
|
178
|
+
* - c_acc := gamma^0 * c_0 = c_0
|
|
179
|
+
* - gamma_acc := poseidon2(y_0.limbs)
|
|
180
|
+
* - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
|
|
181
|
+
*/ return new BatchedBlobAccumulator(sha256ToField([
|
|
182
|
+
blob.commitment
|
|
183
|
+
]), blobChallengeZ, thisY, thisC, thisQ, await hashNoirBigNumLimbs(thisY), this.finalBlobChallenges.gamma, this.finalBlobChallenges);
|
|
229
184
|
} else {
|
|
230
|
-
const [q, evaluation] = computeKzgProof(blob.data, this.finalBlobChallenges.z.toBuffer());
|
|
231
|
-
const thisY = BLS12Fr.fromBuffer(Buffer.from(evaluation));
|
|
232
185
|
// Moving from i - 1 to i, so:
|
|
233
186
|
return new BatchedBlobAccumulator(sha256ToField([
|
|
234
187
|
this.blobCommitmentsHashAcc,
|
|
235
188
|
blob.commitment
|
|
236
189
|
]), await poseidon2Hash([
|
|
237
190
|
this.zAcc,
|
|
238
|
-
|
|
239
|
-
]), this.yAcc.add(thisY.mul(this.gammaPow)), this.cAcc.add(
|
|
191
|
+
blobChallengeZ
|
|
192
|
+
]), this.yAcc.add(thisY.mul(this.gammaPow)), this.cAcc.add(thisC.mul(this.gammaPow)), this.qAcc.add(thisQ.mul(this.gammaPow)), await poseidon2Hash([
|
|
240
193
|
this.gammaAcc,
|
|
241
194
|
await hashNoirBigNumLimbs(thisY)
|
|
242
195
|
]), this.gammaPow.mul(this.finalBlobChallenges.gamma), this.finalBlobChallenges);
|
|
@@ -245,12 +198,18 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
245
198
|
/**
|
|
246
199
|
* Given blobs, accumulate all state.
|
|
247
200
|
* We assume the input blobs have not been evaluated at z.
|
|
201
|
+
* @param blobs - The blobs to accumulate. They should be in the same L1 block.
|
|
248
202
|
* @returns An updated blob accumulator.
|
|
249
203
|
*/ async accumulateBlobs(blobs) {
|
|
204
|
+
if (blobs.length > BLOBS_PER_BLOCK) {
|
|
205
|
+
throw new Error(`Too many blobs to accumulate. The maximum is ${BLOBS_PER_BLOCK} per block. Got ${blobs.length}.`);
|
|
206
|
+
}
|
|
207
|
+
// Compute the hash of all the fields in the block.
|
|
208
|
+
const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blobs);
|
|
250
209
|
// Initialize the acc to iterate over:
|
|
251
210
|
let acc = this.clone();
|
|
252
|
-
for(
|
|
253
|
-
acc = await acc.accumulate(
|
|
211
|
+
for (const blob of blobs){
|
|
212
|
+
acc = await acc.accumulate(blob, blobFieldsHash);
|
|
254
213
|
}
|
|
255
214
|
return acc;
|
|
256
215
|
}
|
|
@@ -265,8 +224,9 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
265
224
|
* - c := c_acc (final commitment to be checked on L1)
|
|
266
225
|
* - gamma := poseidon2(gamma_acc, z) (challenge for linear combination of y and C, above)
|
|
267
226
|
*
|
|
227
|
+
* @param verifyProof - Whether to verify the KZG proof.
|
|
268
228
|
* @returns A batched blob.
|
|
269
|
-
*/ async finalize() {
|
|
229
|
+
*/ async finalize(verifyProof = false) {
|
|
270
230
|
// All values in acc are final, apart from gamma := poseidon2(gammaAcc, z):
|
|
271
231
|
const calculatedGamma = await poseidon2Hash([
|
|
272
232
|
this.gammaAcc,
|
|
@@ -279,10 +239,11 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
279
239
|
if (!calculatedGamma.equals(this.finalBlobChallenges.gamma.toBN254Fr())) {
|
|
280
240
|
throw new Error(`Blob batching mismatch: accumulated gamma ${calculatedGamma} does not equal injected gamma ${this.finalBlobChallenges.gamma.toBN254Fr()}`);
|
|
281
241
|
}
|
|
282
|
-
|
|
242
|
+
const batchedBlob = new BatchedBlob(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.qAcc);
|
|
243
|
+
if (verifyProof && !batchedBlob.verify()) {
|
|
283
244
|
throw new Error(`KZG proof did not verify.`);
|
|
284
245
|
}
|
|
285
|
-
return
|
|
246
|
+
return batchedBlob;
|
|
286
247
|
}
|
|
287
248
|
isEmptyState() {
|
|
288
249
|
return this.blobCommitmentsHashAcc.isZero() && this.zAcc.isZero() && this.yAcc.isZero() && this.cAcc.isZero() && this.qAcc.isZero() && this.gammaAcc.isZero() && this.gammaPow.isZero();
|
|
@@ -290,10 +251,10 @@ const { computeKzgProof, verifyKzgProof } = cKzg;
|
|
|
290
251
|
clone() {
|
|
291
252
|
return new BatchedBlobAccumulator(Fr.fromBuffer(this.blobCommitmentsHashAcc.toBuffer()), Fr.fromBuffer(this.zAcc.toBuffer()), BLS12Fr.fromBuffer(this.yAcc.toBuffer()), BLS12Point.fromBuffer(this.cAcc.toBuffer()), BLS12Point.fromBuffer(this.qAcc.toBuffer()), Fr.fromBuffer(this.gammaAcc.toBuffer()), BLS12Fr.fromBuffer(this.gammaPow.toBuffer()), FinalBlobBatchingChallenges.fromBuffer(this.finalBlobChallenges.toBuffer()));
|
|
292
253
|
}
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
254
|
+
toBlobAccumulator() {
|
|
255
|
+
return new BlobAccumulator(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.gammaAcc, this.gammaPow);
|
|
256
|
+
}
|
|
257
|
+
toFinalBlobAccumulator() {
|
|
258
|
+
return new FinalBlobAccumulator(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc);
|
|
259
|
+
}
|
|
299
260
|
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { Blob } from './blob.js';
|
|
3
|
+
/**
|
|
4
|
+
* @param blobs - The blobs to emit.
|
|
5
|
+
* @returns The blobs' compressed commitments in hex prefixed by the number of blobs. 1 byte for the prefix, 48 bytes
|
|
6
|
+
* per blob commitment.
|
|
7
|
+
* @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs.
|
|
8
|
+
*/
|
|
9
|
+
export declare function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}`;
|
|
10
|
+
/**
|
|
11
|
+
* @param fields - Fields to broadcast in the blob(s)
|
|
12
|
+
* @returns As many blobs as required to broadcast the given fields to an L1 block.
|
|
13
|
+
*
|
|
14
|
+
* @throws If the number of fields does not match what's indicated by the checkpoint prefix.
|
|
15
|
+
*/
|
|
16
|
+
export declare function getBlobsPerL1Block(fields: Fr[]): Blob[];
|
|
17
|
+
/**
|
|
18
|
+
* Get the fields from all blobs in the checkpoint. Ignoring the fields beyond the length specified by the
|
|
19
|
+
* checkpoint prefix (the first field).
|
|
20
|
+
*
|
|
21
|
+
* @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
|
|
22
|
+
* @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
|
|
23
|
+
* the checkpoint prefix and throw if there's not enough fields.
|
|
24
|
+
* @returns The fields added throughout the checkpoint.
|
|
25
|
+
*/
|
|
26
|
+
export declare function getBlobFieldsInCheckpoint(blobs: Blob[], checkEncoding?: boolean): Fr[];
|
|
27
|
+
export declare function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr>;
|
|
28
|
+
export declare function computeBlobsHashFromBlobs(blobs: Blob[]): Fr;
|
|
29
|
+
export declare function getBlobCommitmentsFromBlobs(blobs: Blob[]): BLS12Point[];
|
|
30
|
+
//# sourceMappingURL=blob_utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blob_utils.d.ts","sourceRoot":"","sources":["../src/blob_utils.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAE1D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAIjC;;;;;GAKG;AACH,wBAAgB,6BAA6B,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,KAAK,MAAM,EAAE,CAS1E;AAED;;;;;GAKG;AACH,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,CASvD;AAED;;;;;;;;GAQG;AACH,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,aAAa,UAAQ,GAAG,EAAE,EAAE,CAEpF;AAED,wBAAsB,8BAA8B,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAQ/E;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,CAE3D;AAED,wBAAgB,2BAA2B,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,UAAU,EAAE,CAEvE"}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { FIELDS_PER_BLOB } from '@aztec/constants';
|
|
2
|
+
import { BLS12Point } from '@aztec/foundation/fields';
|
|
3
|
+
import { Blob } from './blob.js';
|
|
4
|
+
import { deserializeEncodedBlobToFields } from './deserialize.js';
|
|
5
|
+
import { computeBlobFieldsHash, computeBlobsHash } from './hash.js';
|
|
6
|
+
/**
|
|
7
|
+
* @param blobs - The blobs to emit.
|
|
8
|
+
* @returns The blobs' compressed commitments in hex prefixed by the number of blobs. 1 byte for the prefix, 48 bytes
|
|
9
|
+
* per blob commitment.
|
|
10
|
+
* @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs.
|
|
11
|
+
*/ export function getPrefixedEthBlobCommitments(blobs) {
|
|
12
|
+
// Prefix the number of blobs.
|
|
13
|
+
const lenBuf = Buffer.alloc(1);
|
|
14
|
+
lenBuf.writeUint8(blobs.length);
|
|
15
|
+
const blobBuf = Buffer.concat(blobs.map((blob)=>blob.commitment));
|
|
16
|
+
const buf = Buffer.concat([
|
|
17
|
+
lenBuf,
|
|
18
|
+
blobBuf
|
|
19
|
+
]);
|
|
20
|
+
return `0x${buf.toString('hex')}`;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* @param fields - Fields to broadcast in the blob(s)
|
|
24
|
+
* @returns As many blobs as required to broadcast the given fields to an L1 block.
|
|
25
|
+
*
|
|
26
|
+
* @throws If the number of fields does not match what's indicated by the checkpoint prefix.
|
|
27
|
+
*/ export function getBlobsPerL1Block(fields) {
|
|
28
|
+
if (!fields.length) {
|
|
29
|
+
throw new Error('Cannot create blobs from empty fields.');
|
|
30
|
+
}
|
|
31
|
+
const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB);
|
|
32
|
+
return Array.from({
|
|
33
|
+
length: numBlobs
|
|
34
|
+
}, (_, i)=>Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)));
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Get the fields from all blobs in the checkpoint. Ignoring the fields beyond the length specified by the
|
|
38
|
+
* checkpoint prefix (the first field).
|
|
39
|
+
*
|
|
40
|
+
* @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
|
|
41
|
+
* @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
|
|
42
|
+
* the checkpoint prefix and throw if there's not enough fields.
|
|
43
|
+
* @returns The fields added throughout the checkpoint.
|
|
44
|
+
*/ export function getBlobFieldsInCheckpoint(blobs, checkEncoding = false) {
|
|
45
|
+
return deserializeEncodedBlobToFields(Buffer.concat(blobs.map((b)=>b.data)), checkEncoding);
|
|
46
|
+
}
|
|
47
|
+
export async function computeBlobFieldsHashFromBlobs(blobs) {
|
|
48
|
+
const fields = blobs.map((b)=>b.toFields()).flat();
|
|
49
|
+
const numBlobFields = fields[0].toNumber();
|
|
50
|
+
if (numBlobFields > fields.length) {
|
|
51
|
+
throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
|
|
52
|
+
}
|
|
53
|
+
return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
|
|
54
|
+
}
|
|
55
|
+
export function computeBlobsHashFromBlobs(blobs) {
|
|
56
|
+
return computeBlobsHash(blobs.map((b)=>b.getEthVersionedBlobHash()));
|
|
57
|
+
}
|
|
58
|
+
export function getBlobCommitmentsFromBlobs(blobs) {
|
|
59
|
+
return blobs.map((b)=>BLS12Point.decompress(b.commitment));
|
|
60
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
/**
|
|
4
|
+
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
|
|
5
|
+
*/
|
|
6
|
+
export declare class BlobAccumulator {
|
|
7
|
+
blobCommitmentsHashAcc: Fr;
|
|
8
|
+
zAcc: Fr;
|
|
9
|
+
yAcc: BLS12Fr;
|
|
10
|
+
cAcc: BLS12Point;
|
|
11
|
+
gammaAcc: Fr;
|
|
12
|
+
gammaPowAcc: BLS12Fr;
|
|
13
|
+
constructor(blobCommitmentsHashAcc: Fr, zAcc: Fr, yAcc: BLS12Fr, cAcc: BLS12Point, gammaAcc: Fr, gammaPowAcc: BLS12Fr);
|
|
14
|
+
static empty(): BlobAccumulator;
|
|
15
|
+
equals(other: BlobAccumulator): boolean;
|
|
16
|
+
static fromBuffer(buffer: Buffer | BufferReader): BlobAccumulator;
|
|
17
|
+
toBuffer(): Buffer<ArrayBufferLike>;
|
|
18
|
+
toFields(): Fr[];
|
|
19
|
+
static fromFields(fields: Fr[] | FieldReader): BlobAccumulator;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=blob_accumulator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blob_accumulator.d.ts","sourceRoot":"","sources":["../../src/circuit_types/blob_accumulator.ts"],"names":[],"mappings":"AACA,OAAO,EAAW,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC5E,OAAO,EAAE,YAAY,EAAE,WAAW,EAAqB,MAAM,6BAA6B,CAAC;AAE3F;;GAEG;AACH,qBAAa,eAAe;IAEjB,sBAAsB,EAAE,EAAE;IAC1B,IAAI,EAAE,EAAE;IACR,IAAI,EAAE,OAAO;IACb,IAAI,EAAE,UAAU;IAChB,QAAQ,EAAE,EAAE;IACZ,WAAW,EAAE,OAAO;gBALpB,sBAAsB,EAAE,EAAE,EAC1B,IAAI,EAAE,EAAE,EACR,IAAI,EAAE,OAAO,EACb,IAAI,EAAE,UAAU,EAChB,QAAQ,EAAE,EAAE,EACZ,WAAW,EAAE,OAAO;IAG7B,MAAM,CAAC,KAAK,IAAI,eAAe;IAI/B,MAAM,CAAC,KAAK,EAAE,eAAe;IAW7B,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,eAAe;IAYjE,QAAQ;IAWR,QAAQ;IAaR,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,eAAe;CAe/D"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
+
import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
+
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
+
/**
|
|
5
|
+
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
|
|
6
|
+
*/ export class BlobAccumulator {
|
|
7
|
+
blobCommitmentsHashAcc;
|
|
8
|
+
zAcc;
|
|
9
|
+
yAcc;
|
|
10
|
+
cAcc;
|
|
11
|
+
gammaAcc;
|
|
12
|
+
gammaPowAcc;
|
|
13
|
+
constructor(blobCommitmentsHashAcc, zAcc, yAcc, cAcc, gammaAcc, gammaPowAcc){
|
|
14
|
+
this.blobCommitmentsHashAcc = blobCommitmentsHashAcc;
|
|
15
|
+
this.zAcc = zAcc;
|
|
16
|
+
this.yAcc = yAcc;
|
|
17
|
+
this.cAcc = cAcc;
|
|
18
|
+
this.gammaAcc = gammaAcc;
|
|
19
|
+
this.gammaPowAcc = gammaPowAcc;
|
|
20
|
+
}
|
|
21
|
+
static empty() {
|
|
22
|
+
return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
|
|
23
|
+
}
|
|
24
|
+
equals(other) {
|
|
25
|
+
return this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) && this.zAcc.equals(other.zAcc) && this.yAcc.equals(other.yAcc) && this.cAcc.equals(other.cAcc) && this.gammaAcc.equals(other.gammaAcc) && this.gammaPowAcc.equals(other.gammaPowAcc);
|
|
26
|
+
}
|
|
27
|
+
static fromBuffer(buffer) {
|
|
28
|
+
const reader = BufferReader.asReader(buffer);
|
|
29
|
+
return new BlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader));
|
|
30
|
+
}
|
|
31
|
+
toBuffer() {
|
|
32
|
+
return serializeToBuffer(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.gammaAcc, this.gammaPowAcc);
|
|
33
|
+
}
|
|
34
|
+
toFields() {
|
|
35
|
+
return [
|
|
36
|
+
this.blobCommitmentsHashAcc,
|
|
37
|
+
this.zAcc,
|
|
38
|
+
...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
39
|
+
...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
|
|
40
|
+
...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
41
|
+
new Fr(this.cAcc.isInfinite),
|
|
42
|
+
this.gammaAcc,
|
|
43
|
+
...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString)
|
|
44
|
+
];
|
|
45
|
+
}
|
|
46
|
+
static fromFields(fields) {
|
|
47
|
+
const reader = FieldReader.asReader(fields);
|
|
48
|
+
return new BlobAccumulator(reader.readField(), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
49
|
+
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
50
|
+
}), new BLS12Point(BLS12Fq.fromNoirBigNum({
|
|
51
|
+
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
52
|
+
}), BLS12Fq.fromNoirBigNum({
|
|
53
|
+
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
54
|
+
}), reader.readBoolean()), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
55
|
+
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
56
|
+
}));
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { BufferReader } from '@aztec/foundation/serialize';
|
|
3
|
+
import { inspect } from 'util';
|
|
4
|
+
/**
|
|
5
|
+
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
|
|
6
|
+
*/
|
|
7
|
+
export declare class FinalBlobAccumulator {
|
|
8
|
+
blobCommitmentsHash: Fr;
|
|
9
|
+
z: Fr;
|
|
10
|
+
y: BLS12Fr;
|
|
11
|
+
c: BLS12Point;
|
|
12
|
+
constructor(blobCommitmentsHash: Fr, z: Fr, y: BLS12Fr, c: BLS12Point);
|
|
13
|
+
static empty(): FinalBlobAccumulator;
|
|
14
|
+
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator;
|
|
15
|
+
toBuffer(): Buffer<ArrayBufferLike>;
|
|
16
|
+
toFields(): Fr[];
|
|
17
|
+
toString(): string;
|
|
18
|
+
equals(other: FinalBlobAccumulator): boolean;
|
|
19
|
+
static random(): FinalBlobAccumulator;
|
|
20
|
+
[inspect.custom](): string;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=final_blob_accumulator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"final_blob_accumulator.d.ts","sourceRoot":"","sources":["../../src/circuit_types/final_blob_accumulator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AACnE,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B;;GAEG;AACH,qBAAa,oBAAoB;IAEtB,mBAAmB,EAAE,EAAE;IACvB,CAAC,EAAE,EAAE;IACL,CAAC,EAAE,OAAO;IACV,CAAC,EAAE,UAAU;gBAHb,mBAAmB,EAAE,EAAE,EACvB,CAAC,EAAE,EAAE,EACL,CAAC,EAAE,OAAO,EACV,CAAC,EAAE,UAAU;IAGtB,MAAM,CAAC,KAAK,IAAI,oBAAoB;IAIpC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,oBAAoB;IAUtE,QAAQ;IAIR,QAAQ;IAUR,QAAQ;IAQR,MAAM,CAAC,KAAK,EAAE,oBAAoB;IAUlC,MAAM,CAAC,MAAM;IAIb,CAAC,OAAO,CAAC,MAAM,CAAC;CAQjB"}
|