@aztec/blob-lib 3.0.0-nightly.20251026 → 3.0.0-nightly.20251031

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dest/blob.d.ts +47 -89
  2. package/dest/blob.d.ts.map +1 -1
  3. package/dest/blob.js +62 -160
  4. package/dest/blob_batching.d.ts +14 -46
  5. package/dest/blob_batching.d.ts.map +1 -1
  6. package/dest/blob_batching.js +80 -100
  7. package/dest/blob_utils.d.ts +30 -0
  8. package/dest/blob_utils.d.ts.map +1 -0
  9. package/dest/blob_utils.js +60 -0
  10. package/dest/circuit_types/blob_accumulator.d.ts +21 -0
  11. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  12. package/dest/circuit_types/blob_accumulator.js +58 -0
  13. package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
  14. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/final_blob_accumulator.js +63 -0
  16. package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
  17. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
  19. package/dest/circuit_types/index.d.ts +4 -0
  20. package/dest/circuit_types/index.d.ts.map +1 -0
  21. package/dest/circuit_types/index.js +4 -0
  22. package/dest/deserialize.d.ts +7 -41
  23. package/dest/deserialize.d.ts.map +1 -1
  24. package/dest/deserialize.js +25 -73
  25. package/dest/encoding.d.ts +5 -0
  26. package/dest/encoding.d.ts.map +1 -1
  27. package/dest/encoding.js +35 -0
  28. package/dest/hash.d.ts +35 -0
  29. package/dest/hash.d.ts.map +1 -0
  30. package/dest/hash.js +69 -0
  31. package/dest/index.d.ts +4 -2
  32. package/dest/index.d.ts.map +1 -1
  33. package/dest/index.js +4 -2
  34. package/dest/sponge_blob.d.ts +13 -9
  35. package/dest/sponge_blob.d.ts.map +1 -1
  36. package/dest/sponge_blob.js +28 -17
  37. package/dest/testing.d.ts +7 -2
  38. package/dest/testing.d.ts.map +1 -1
  39. package/dest/testing.js +47 -14
  40. package/dest/types.d.ts +2 -0
  41. package/dest/types.d.ts.map +1 -1
  42. package/dest/types.js +2 -0
  43. package/package.json +4 -4
  44. package/src/blob.ts +67 -180
  45. package/src/blob_batching.ts +109 -119
  46. package/src/blob_utils.ts +71 -0
  47. package/src/circuit_types/blob_accumulator.ts +84 -0
  48. package/src/circuit_types/final_blob_accumulator.ts +75 -0
  49. package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
  50. package/src/circuit_types/index.ts +4 -0
  51. package/src/deserialize.ts +24 -79
  52. package/src/encoding.ts +45 -0
  53. package/src/hash.ts +77 -0
  54. package/src/index.ts +4 -2
  55. package/src/sponge_blob.ts +24 -14
  56. package/src/testing.ts +53 -16
  57. package/src/types.ts +2 -2
  58. package/dest/blob_batching_public_inputs.d.ts +0 -57
  59. package/dest/blob_batching_public_inputs.d.ts.map +0 -1
  60. package/dest/blob_batching_public_inputs.js +0 -144
  61. package/src/blob_batching_public_inputs.ts +0 -211
@@ -1,9 +1,11 @@
1
1
  import { AZTEC_MAX_EPOCH_DURATION, BLOBS_PER_BLOCK } from '@aztec/constants';
2
- import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
3
- import { BLS12Field, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
4
- import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
2
+ import { poseidon2Hash, sha256ToField } from '@aztec/foundation/crypto';
3
+ import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
5
4
 
6
- import { Blob, VERSIONED_HASH_VERSION_KZG } from './blob.js';
5
+ import { Blob } from './blob.js';
6
+ import { computeBlobFieldsHashFromBlobs } from './blob_utils.js';
7
+ import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
8
+ import { computeEthVersionedBlobHash, hashNoirBigNumLimbs } from './hash.js';
7
9
  import { kzg } from './kzg_context.js';
8
10
 
9
11
  /**
@@ -30,17 +32,19 @@ export class BatchedBlob {
30
32
  *
31
33
  * @returns A batched blob.
32
34
  */
33
- static async batch(blobs: Blob[]): Promise<BatchedBlob> {
34
- const numBlobs = blobs.length;
35
- if (numBlobs > BLOBS_PER_BLOCK * AZTEC_MAX_EPOCH_DURATION) {
35
+ static async batch(blobs: Blob[][]): Promise<BatchedBlob> {
36
+ if (blobs.length > AZTEC_MAX_EPOCH_DURATION) {
36
37
  throw new Error(
37
- `Too many blobs (${numBlobs}) sent to batch(). The maximum is ${BLOBS_PER_BLOCK * AZTEC_MAX_EPOCH_DURATION}.`,
38
+ `Too many blocks sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${blobs.length}.`,
38
39
  );
39
40
  }
41
+
40
42
  // Precalculate the values (z and gamma) and initialize the accumulator:
41
43
  let acc = await this.newAccumulator(blobs);
42
44
  // Now we can create a multi opening proof of all input blobs:
43
- acc = await acc.accumulateBlobs(blobs);
45
+ for (const blockBlobs of blobs) {
46
+ acc = await acc.accumulateBlobs(blockBlobs);
47
+ }
44
48
  return await acc.finalize();
45
49
  }
46
50
 
@@ -49,7 +53,7 @@ export class BatchedBlob {
49
53
  * @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
50
54
  * beforehand from ALL blobs.
51
55
  */
52
- static async newAccumulator(blobs: Blob[]): Promise<BatchedBlobAccumulator> {
56
+ static async newAccumulator(blobs: Blob[][]): Promise<BatchedBlobAccumulator> {
53
57
  const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(blobs);
54
58
  return BatchedBlobAccumulator.newWithChallenges(finalBlobChallenges);
55
59
  }
@@ -62,40 +66,52 @@ export class BatchedBlob {
62
66
  * - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
63
67
  * - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
64
68
  * - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }, for all blob evaluations y_i (see above) and commitments C_i.
69
+ *
70
+ * @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
65
71
  * @returns Challenges z and gamma.
66
72
  */
67
- static async precomputeBatchedBlobChallenges(blobs: Blob[]): Promise<FinalBlobBatchingChallenges> {
68
- // We need to precompute the final challenge values to evaluate the blobs.
69
- let z = blobs[0].challengeZ;
70
- // We start at i = 1, because z is initialized as the first blob's challenge.
71
- for (let i = 1; i < blobs.length; i++) {
72
- z = await poseidon2Hash([z, blobs[i].challengeZ]);
73
+ static async precomputeBatchedBlobChallenges(blobs: Blob[][]): Promise<FinalBlobBatchingChallenges> {
74
+ // Compute the final challenge z to evaluate the blobs.
75
+ let z: Fr | undefined;
76
+ for (const blockBlobs of blobs) {
77
+ // Compute the hash of all the fields in the block.
78
+ const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blockBlobs);
79
+ for (const blob of blockBlobs) {
80
+ // Compute the challenge z for each blob and accumulate it.
81
+ const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
82
+ if (!z) {
83
+ z = challengeZ;
84
+ } else {
85
+ z = await poseidon2Hash([z, challengeZ]);
86
+ }
87
+ }
88
+ }
89
+ if (!z) {
90
+ throw new Error('No blobs to precompute challenges for.');
73
91
  }
92
+
74
93
  // Now we have a shared challenge for all blobs, evaluate them...
75
- const proofObjects = blobs.map(b => kzg.computeKzgProof(b.data, z.toBuffer()));
76
- const evaluations = proofObjects.map(([_, evaluation]) => BLS12Fr.fromBuffer(Buffer.from(evaluation)));
94
+ const allBlobs = blobs.flat();
95
+ const proofObjects = allBlobs.map(b => b.evaluate(z));
96
+ const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y)));
77
97
  // ...and find the challenge for the linear combination of blobs.
78
- let gamma = await hashNoirBigNumLimbs(evaluations[0]);
98
+ let gamma = evaluations[0];
79
99
  // We start at i = 1, because gamma is initialized as the first blob's evaluation.
80
- for (let i = 1; i < blobs.length; i++) {
81
- gamma = await poseidon2Hash([gamma, await hashNoirBigNumLimbs(evaluations[i])]);
100
+ for (let i = 1; i < allBlobs.length; i++) {
101
+ gamma = await poseidon2Hash([gamma, evaluations[i]]);
82
102
  }
83
103
  gamma = await poseidon2Hash([gamma, z]);
84
104
 
85
105
  return new FinalBlobBatchingChallenges(z, BLS12Fr.fromBN254Fr(gamma));
86
106
  }
87
107
 
88
- // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
89
- getEthVersionedBlobHash(): Buffer {
90
- const hash = sha256(this.commitment.compress());
91
- hash[0] = VERSIONED_HASH_VERSION_KZG;
92
- return hash;
108
+ verify() {
109
+ return kzg.verifyKzgProof(this.commitment.compress(), this.z.toBuffer(), this.y.toBuffer(), this.q.compress());
93
110
  }
94
111
 
95
- static getEthVersionedBlobHash(commitment: Buffer): Buffer {
96
- const hash = sha256(commitment);
97
- hash[0] = VERSIONED_HASH_VERSION_KZG;
98
- return hash;
112
+ // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
113
+ getEthVersionedBlobHash(): Buffer {
114
+ return computeEthVersionedBlobHash(this.commitment.compress());
99
115
  }
100
116
 
101
117
  /**
@@ -119,44 +135,9 @@ export class BatchedBlob {
119
135
  ]);
120
136
  return `0x${buf.toString('hex')}`;
121
137
  }
122
- }
123
138
 
124
- /**
125
- * Final values z and gamma are injected into each block root circuit. We ensure they are correct by:
126
- * - Checking equality in each block merge circuit and propagating up
127
- * - Checking final z_acc == z in root circuit
128
- * - Checking final gamma_acc == gamma in root circuit
129
- *
130
- * - z = H(...H(H(z_0, z_1) z_2)..z_n)
131
- * - where z_i = H(H(fields of blob_i), C_i),
132
- * - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
133
- * - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
134
- * - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }
135
- * for all blob evaluations y_i (see above) and commitments C_i.
136
- *
137
- * Iteratively calculated by BlobAccumulator.accumulate() in nr. See also precomputeBatchedBlobChallenges() above.
138
- */
139
- export class FinalBlobBatchingChallenges {
140
- constructor(
141
- public readonly z: Fr,
142
- public readonly gamma: BLS12Fr,
143
- ) {}
144
-
145
- equals(other: FinalBlobBatchingChallenges) {
146
- return this.z.equals(other.z) && this.gamma.equals(other.gamma);
147
- }
148
-
149
- static empty(): FinalBlobBatchingChallenges {
150
- return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
151
- }
152
-
153
- static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges {
154
- const reader = BufferReader.asReader(buffer);
155
- return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
156
- }
157
-
158
- toBuffer() {
159
- return serializeToBuffer(this.z, this.gamma);
139
+ toFinalBlobAccumulator() {
140
+ return new FinalBlobAccumulator(this.blobCommitmentsHash, this.z, this.y, this.commitment);
160
141
  }
161
142
  }
162
143
 
@@ -187,39 +168,6 @@ export class BatchedBlobAccumulator {
187
168
  public readonly finalBlobChallenges: FinalBlobBatchingChallenges,
188
169
  ) {}
189
170
 
190
- /**
191
- * Init the first accumulation state of the epoch.
192
- * We assume the input blob has not been evaluated at z.
193
- *
194
- * First state of the accumulator:
195
- * - v_acc := sha256(C_0)
196
- * - z_acc := z_0
197
- * - y_acc := gamma^0 * y_0 = y_0
198
- * - c_acc := gamma^0 * c_0 = c_0
199
- * - gamma_acc := poseidon2(y_0.limbs)
200
- * - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
201
- *
202
- * @returns An initial blob accumulator.
203
- */
204
- static async initialize(
205
- blob: Blob,
206
- finalBlobChallenges: FinalBlobBatchingChallenges,
207
- ): Promise<BatchedBlobAccumulator> {
208
- const [q, evaluation] = kzg.computeKzgProof(blob.data, finalBlobChallenges.z.toBuffer());
209
- const firstY = BLS12Fr.fromBuffer(Buffer.from(evaluation));
210
- // Here, i = 0, so:
211
- return new BatchedBlobAccumulator(
212
- sha256ToField([blob.commitment]), // blobCommitmentsHashAcc = sha256(C_0)
213
- blob.challengeZ, // zAcc = z_0
214
- firstY, // yAcc = gamma^0 * y_0 = 1 * y_0
215
- BLS12Point.decompress(blob.commitment), // cAcc = gamma^0 * C_0 = 1 * C_0
216
- BLS12Point.decompress(Buffer.from(q)), // qAcc = gamma^0 * Q_0 = 1 * Q_0
217
- await hashNoirBigNumLimbs(firstY), // gammaAcc = poseidon2(y_0.limbs)
218
- finalBlobChallenges.gamma, // gammaPow = gamma^(i + 1) = gamma^1 = gamma
219
- finalBlobChallenges,
220
- );
221
- }
222
-
223
171
  /**
224
172
  * Create the empty accumulation state of the epoch.
225
173
  * @returns An empty blob accumulator with challenges.
@@ -242,20 +190,40 @@ export class BatchedBlobAccumulator {
242
190
  * We assume the input blob has not been evaluated at z.
243
191
  * @returns An updated blob accumulator.
244
192
  */
245
- async accumulate(blob: Blob) {
193
+ private async accumulate(blob: Blob, blobFieldsHash: Fr) {
194
+ const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
195
+ const thisC = BLS12Point.decompress(blob.commitment);
196
+ const thisQ = BLS12Point.decompress(proof);
197
+ const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash);
198
+
246
199
  if (this.isEmptyState()) {
247
- return BatchedBlobAccumulator.initialize(blob, this.finalBlobChallenges);
200
+ /**
201
+ * Init the first accumulation state of the epoch.
202
+ * - v_acc := sha256(C_0)
203
+ * - z_acc := z_0
204
+ * - y_acc := gamma^0 * y_0 = y_0
205
+ * - c_acc := gamma^0 * c_0 = c_0
206
+ * - gamma_acc := poseidon2(y_0.limbs)
207
+ * - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
208
+ */
209
+ return new BatchedBlobAccumulator(
210
+ sha256ToField([blob.commitment]), // blobCommitmentsHashAcc = sha256(C_0)
211
+ blobChallengeZ, // zAcc = z_0
212
+ thisY, // yAcc = gamma^0 * y_0 = 1 * y_0
213
+ thisC, // cAcc = gamma^0 * C_0 = 1 * C_0
214
+ thisQ, // qAcc = gamma^0 * Q_0 = 1 * Q_0
215
+ await hashNoirBigNumLimbs(thisY), // gammaAcc = poseidon2(y_0.limbs)
216
+ this.finalBlobChallenges.gamma, // gammaPow = gamma^(i + 1) = gamma^1 = gamma
217
+ this.finalBlobChallenges,
218
+ );
248
219
  } else {
249
- const [q, evaluation] = kzg.computeKzgProof(blob.data, this.finalBlobChallenges.z.toBuffer());
250
- const thisY = BLS12Fr.fromBuffer(Buffer.from(evaluation));
251
-
252
220
  // Moving from i - 1 to i, so:
253
221
  return new BatchedBlobAccumulator(
254
222
  sha256ToField([this.blobCommitmentsHashAcc, blob.commitment]), // blobCommitmentsHashAcc := sha256(blobCommitmentsHashAcc, C_i)
255
- await poseidon2Hash([this.zAcc, blob.challengeZ]), // zAcc := poseidon2(zAcc, z_i)
223
+ await poseidon2Hash([this.zAcc, blobChallengeZ]), // zAcc := poseidon2(zAcc, z_i)
256
224
  this.yAcc.add(thisY.mul(this.gammaPow)), // yAcc := yAcc + (gamma^i * y_i)
257
- this.cAcc.add(BLS12Point.decompress(blob.commitment).mul(this.gammaPow)), // cAcc := cAcc + (gamma^i * C_i)
258
- this.qAcc.add(BLS12Point.decompress(Buffer.from(q)).mul(this.gammaPow)), // qAcc := qAcc + (gamma^i * C_i)
225
+ this.cAcc.add(thisC.mul(this.gammaPow)), // cAcc := cAcc + (gamma^i * C_i)
226
+ this.qAcc.add(thisQ.mul(this.gammaPow)), // qAcc := qAcc + (gamma^i * C_i)
259
227
  await poseidon2Hash([this.gammaAcc, await hashNoirBigNumLimbs(thisY)]), // gammaAcc := poseidon2(gammaAcc, poseidon2(y_i.limbs))
260
228
  this.gammaPow.mul(this.finalBlobChallenges.gamma), // gammaPow = gamma^(i + 1) = gamma^i * final_gamma
261
229
  this.finalBlobChallenges,
@@ -266,13 +234,23 @@ export class BatchedBlobAccumulator {
266
234
  /**
267
235
  * Given blobs, accumulate all state.
268
236
  * We assume the input blobs have not been evaluated at z.
237
+ * @param blobs - The blobs to accumulate. They should be in the same L1 block.
269
238
  * @returns An updated blob accumulator.
270
239
  */
271
240
  async accumulateBlobs(blobs: Blob[]) {
241
+ if (blobs.length > BLOBS_PER_BLOCK) {
242
+ throw new Error(
243
+ `Too many blobs to accumulate. The maximum is ${BLOBS_PER_BLOCK} per block. Got ${blobs.length}.`,
244
+ );
245
+ }
246
+
247
+ // Compute the hash of all the fields in the block.
248
+ const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blobs);
249
+
272
250
  // Initialize the acc to iterate over:
273
251
  let acc: BatchedBlobAccumulator = this.clone();
274
- for (let i = 0; i < blobs.length; i++) {
275
- acc = await acc.accumulate(blobs[i]);
252
+ for (const blob of blobs) {
253
+ acc = await acc.accumulate(blob, blobFieldsHash);
276
254
  }
277
255
  return acc;
278
256
  }
@@ -288,9 +266,10 @@ export class BatchedBlobAccumulator {
288
266
  * - c := c_acc (final commitment to be checked on L1)
289
267
  * - gamma := poseidon2(gamma_acc, z) (challenge for linear combination of y and C, above)
290
268
  *
269
+ * @param verifyProof - Whether to verify the KZG proof.
291
270
  * @returns A batched blob.
292
271
  */
293
- async finalize(): Promise<BatchedBlob> {
272
+ async finalize(verifyProof = false): Promise<BatchedBlob> {
294
273
  // All values in acc are final, apart from gamma := poseidon2(gammaAcc, z):
295
274
  const calculatedGamma = await poseidon2Hash([this.gammaAcc, this.zAcc]);
296
275
  // Check final values:
@@ -304,11 +283,14 @@ export class BatchedBlobAccumulator {
304
283
  `Blob batching mismatch: accumulated gamma ${calculatedGamma} does not equal injected gamma ${this.finalBlobChallenges.gamma.toBN254Fr()}`,
305
284
  );
306
285
  }
307
- if (!kzg.verifyKzgProof(this.cAcc.compress(), this.zAcc.toBuffer(), this.yAcc.toBuffer(), this.qAcc.compress())) {
286
+
287
+ const batchedBlob = new BatchedBlob(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.qAcc);
288
+
289
+ if (verifyProof && !batchedBlob.verify()) {
308
290
  throw new Error(`KZG proof did not verify.`);
309
291
  }
310
292
 
311
- return new BatchedBlob(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.qAcc);
293
+ return batchedBlob;
312
294
  }
313
295
 
314
296
  isEmptyState() {
@@ -335,11 +317,19 @@ export class BatchedBlobAccumulator {
335
317
  FinalBlobBatchingChallenges.fromBuffer(this.finalBlobChallenges.toBuffer()),
336
318
  );
337
319
  }
338
- }
339
320
 
340
- // To mimic the hash accumulation in the rollup circuits, here we hash
341
- // each u128 limb of the noir bignum struct representing the BLS field.
342
- async function hashNoirBigNumLimbs(field: BLS12Field): Promise<Fr> {
343
- const num = field.toNoirBigNum();
344
- return await poseidon2Hash(num.limbs.map(Fr.fromHexString));
321
+ toBlobAccumulator() {
322
+ return new BlobAccumulator(
323
+ this.blobCommitmentsHashAcc,
324
+ this.zAcc,
325
+ this.yAcc,
326
+ this.cAcc,
327
+ this.gammaAcc,
328
+ this.gammaPow,
329
+ );
330
+ }
331
+
332
+ toFinalBlobAccumulator() {
333
+ return new FinalBlobAccumulator(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc);
334
+ }
345
335
  }
@@ -0,0 +1,71 @@
1
+ import { FIELDS_PER_BLOB } from '@aztec/constants';
2
+ import { BLS12Point, Fr } from '@aztec/foundation/fields';
3
+
4
+ import { Blob } from './blob.js';
5
+ import { deserializeEncodedBlobToFields } from './deserialize.js';
6
+ import { computeBlobFieldsHash, computeBlobsHash } from './hash.js';
7
+
8
+ /**
9
+ * @param blobs - The blobs to emit.
10
+ * @returns The blobs' compressed commitments in hex prefixed by the number of blobs. 1 byte for the prefix, 48 bytes
11
+ * per blob commitment.
12
+ * @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs.
13
+ */
14
+ export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
15
+ // Prefix the number of blobs.
16
+ const lenBuf = Buffer.alloc(1);
17
+ lenBuf.writeUint8(blobs.length);
18
+
19
+ const blobBuf = Buffer.concat(blobs.map(blob => blob.commitment));
20
+
21
+ const buf = Buffer.concat([lenBuf, blobBuf]);
22
+ return `0x${buf.toString('hex')}`;
23
+ }
24
+
25
+ /**
26
+ * @param fields - Fields to broadcast in the blob(s)
27
+ * @returns As many blobs as required to broadcast the given fields to an L1 block.
28
+ *
29
+ * @throws If the number of fields does not match what's indicated by the checkpoint prefix.
30
+ */
31
+ export function getBlobsPerL1Block(fields: Fr[]): Blob[] {
32
+ if (!fields.length) {
33
+ throw new Error('Cannot create blobs from empty fields.');
34
+ }
35
+
36
+ const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB);
37
+ return Array.from({ length: numBlobs }, (_, i) =>
38
+ Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)),
39
+ );
40
+ }
41
+
42
+ /**
43
+ * Get the fields from all blobs in the checkpoint. Ignoring the fields beyond the length specified by the
44
+ * checkpoint prefix (the first field).
45
+ *
46
+ * @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
47
+ * @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
48
+ * the checkpoint prefix and throw if there's not enough fields.
49
+ * @returns The fields added throughout the checkpoint.
50
+ */
51
+ export function getBlobFieldsInCheckpoint(blobs: Blob[], checkEncoding = false): Fr[] {
52
+ return deserializeEncodedBlobToFields(Buffer.concat(blobs.map(b => b.data)), checkEncoding);
53
+ }
54
+
55
+ export async function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr> {
56
+ const fields = blobs.map(b => b.toFields()).flat();
57
+ const numBlobFields = fields[0].toNumber();
58
+ if (numBlobFields > fields.length) {
59
+ throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
60
+ }
61
+
62
+ return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
63
+ }
64
+
65
+ export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
66
+ return computeBlobsHash(blobs.map(b => b.getEthVersionedBlobHash()));
67
+ }
68
+
69
+ export function getBlobCommitmentsFromBlobs(blobs: Blob[]): BLS12Point[] {
70
+ return blobs.map(b => BLS12Point.decompress(b.commitment));
71
+ }
@@ -0,0 +1,84 @@
1
+ import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
2
+ import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
3
+ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
+
5
+ /**
6
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
7
+ */
8
+ export class BlobAccumulator {
9
+ constructor(
10
+ public blobCommitmentsHashAcc: Fr,
11
+ public zAcc: Fr,
12
+ public yAcc: BLS12Fr,
13
+ public cAcc: BLS12Point,
14
+ public gammaAcc: Fr,
15
+ public gammaPowAcc: BLS12Fr,
16
+ ) {}
17
+
18
+ static empty(): BlobAccumulator {
19
+ return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
20
+ }
21
+
22
+ equals(other: BlobAccumulator) {
23
+ return (
24
+ this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) &&
25
+ this.zAcc.equals(other.zAcc) &&
26
+ this.yAcc.equals(other.yAcc) &&
27
+ this.cAcc.equals(other.cAcc) &&
28
+ this.gammaAcc.equals(other.gammaAcc) &&
29
+ this.gammaPowAcc.equals(other.gammaPowAcc)
30
+ );
31
+ }
32
+
33
+ static fromBuffer(buffer: Buffer | BufferReader): BlobAccumulator {
34
+ const reader = BufferReader.asReader(buffer);
35
+ return new BlobAccumulator(
36
+ Fr.fromBuffer(reader),
37
+ Fr.fromBuffer(reader),
38
+ BLS12Fr.fromBuffer(reader),
39
+ BLS12Point.fromBuffer(reader),
40
+ Fr.fromBuffer(reader),
41
+ BLS12Fr.fromBuffer(reader),
42
+ );
43
+ }
44
+
45
+ toBuffer() {
46
+ return serializeToBuffer(
47
+ this.blobCommitmentsHashAcc,
48
+ this.zAcc,
49
+ this.yAcc,
50
+ this.cAcc,
51
+ this.gammaAcc,
52
+ this.gammaPowAcc,
53
+ );
54
+ }
55
+
56
+ toFields() {
57
+ return [
58
+ this.blobCommitmentsHashAcc,
59
+ this.zAcc,
60
+ ...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
61
+ ...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
62
+ ...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
63
+ new Fr(this.cAcc.isInfinite),
64
+ this.gammaAcc,
65
+ ...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString),
66
+ ];
67
+ }
68
+
69
+ static fromFields(fields: Fr[] | FieldReader): BlobAccumulator {
70
+ const reader = FieldReader.asReader(fields);
71
+ return new BlobAccumulator(
72
+ reader.readField(),
73
+ reader.readField(),
74
+ BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
75
+ new BLS12Point(
76
+ BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
77
+ BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
78
+ reader.readBoolean(),
79
+ ),
80
+ reader.readField(),
81
+ BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
82
+ );
83
+ }
84
+ }
@@ -0,0 +1,75 @@
1
+ import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+
4
+ import { inspect } from 'util';
5
+
6
+ /**
7
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
8
+ */
9
+ export class FinalBlobAccumulator {
10
+ constructor(
11
+ public blobCommitmentsHash: Fr,
12
+ public z: Fr,
13
+ public y: BLS12Fr,
14
+ public c: BLS12Point,
15
+ ) {}
16
+
17
+ static empty(): FinalBlobAccumulator {
18
+ return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
19
+ }
20
+
21
+ static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator {
22
+ const reader = BufferReader.asReader(buffer);
23
+ return new FinalBlobAccumulator(
24
+ Fr.fromBuffer(reader),
25
+ Fr.fromBuffer(reader),
26
+ BLS12Fr.fromBuffer(reader),
27
+ BLS12Point.fromBuffer(reader),
28
+ );
29
+ }
30
+
31
+ toBuffer() {
32
+ return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
33
+ }
34
+
35
+ toFields() {
36
+ return [
37
+ this.blobCommitmentsHash,
38
+ this.z,
39
+ ...this.y.toNoirBigNum().limbs.map(Fr.fromString),
40
+ ...this.c.toBN254Fields(),
41
+ ];
42
+ }
43
+
44
+ // The below is used to send to L1 for proof verification
45
+ toString() {
46
+ // We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
47
+ // is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
48
+ // we use calldata rather than a slice in memory:
49
+ const buf = Buffer.concat([Buffer.alloc(32), this.z.toBuffer(), this.y.toBuffer(), this.c.compress()]);
50
+ return buf.toString('hex');
51
+ }
52
+
53
+ equals(other: FinalBlobAccumulator) {
54
+ return (
55
+ this.blobCommitmentsHash.equals(other.blobCommitmentsHash) &&
56
+ this.z.equals(other.z) &&
57
+ this.y.equals(other.y) &&
58
+ this.c.equals(other.c)
59
+ );
60
+ }
61
+
62
+ // Creates a random instance. Used for testing only - will not prove/verify.
63
+ static random() {
64
+ return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
65
+ }
66
+
67
+ [inspect.custom]() {
68
+ return `FinalBlobAccumulator {
69
+ blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
70
+ z: ${inspect(this.z)},
71
+ y: ${inspect(this.y)},
72
+ c: ${inspect(this.c)},
73
+ }`;
74
+ }
75
+ }
@@ -0,0 +1,29 @@
1
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+
4
+ /**
5
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
6
+ */
7
+ export class FinalBlobBatchingChallenges {
8
+ constructor(
9
+ public readonly z: Fr,
10
+ public readonly gamma: BLS12Fr,
11
+ ) {}
12
+
13
+ equals(other: FinalBlobBatchingChallenges) {
14
+ return this.z.equals(other.z) && this.gamma.equals(other.gamma);
15
+ }
16
+
17
+ static empty(): FinalBlobBatchingChallenges {
18
+ return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
19
+ }
20
+
21
+ static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges {
22
+ const reader = BufferReader.asReader(buffer);
23
+ return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
24
+ }
25
+
26
+ toBuffer() {
27
+ return serializeToBuffer(this.z, this.gamma);
28
+ }
29
+ }
@@ -0,0 +1,4 @@
1
+ /// Types used in the protocol circuits.
2
+ export * from './blob_accumulator.js';
3
+ export * from './final_blob_accumulator.js';
4
+ export * from './final_blob_batching_challenges.js';