@aztec/blob-lib 0.0.0-test.1 → 0.0.1-fake-c83136db25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/dest/blob.d.ts +58 -99
  2. package/dest/blob.d.ts.map +1 -1
  3. package/dest/blob.js +83 -183
  4. package/dest/blob_batching.d.ts +155 -0
  5. package/dest/blob_batching.d.ts.map +1 -0
  6. package/dest/blob_batching.js +260 -0
  7. package/dest/blob_utils.d.ts +30 -0
  8. package/dest/blob_utils.d.ts.map +1 -0
  9. package/dest/blob_utils.js +60 -0
  10. package/dest/circuit_types/blob_accumulator.d.ts +21 -0
  11. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  12. package/dest/circuit_types/blob_accumulator.js +58 -0
  13. package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
  14. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/final_blob_accumulator.js +63 -0
  16. package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
  17. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
  19. package/dest/circuit_types/index.d.ts +4 -0
  20. package/dest/circuit_types/index.d.ts.map +1 -0
  21. package/dest/circuit_types/index.js +4 -0
  22. package/dest/deserialize.d.ts +14 -0
  23. package/dest/deserialize.d.ts.map +1 -0
  24. package/dest/deserialize.js +33 -0
  25. package/dest/encoding.d.ts +22 -62
  26. package/dest/encoding.d.ts.map +1 -1
  27. package/dest/encoding.js +114 -104
  28. package/dest/hash.d.ts +35 -0
  29. package/dest/hash.d.ts.map +1 -0
  30. package/dest/hash.js +69 -0
  31. package/dest/index.d.ts +6 -2
  32. package/dest/index.d.ts.map +1 -1
  33. package/dest/index.js +6 -15
  34. package/dest/interface.d.ts +1 -2
  35. package/dest/interface.d.ts.map +1 -1
  36. package/dest/kzg_context.d.ts +4 -0
  37. package/dest/kzg_context.d.ts.map +1 -0
  38. package/dest/kzg_context.js +5 -0
  39. package/dest/sponge_blob.d.ts +15 -13
  40. package/dest/sponge_blob.d.ts.map +1 -1
  41. package/dest/sponge_blob.js +28 -17
  42. package/dest/testing.d.ts +12 -16
  43. package/dest/testing.d.ts.map +1 -1
  44. package/dest/testing.js +60 -46
  45. package/dest/types.d.ts +16 -0
  46. package/dest/types.d.ts.map +1 -0
  47. package/dest/types.js +3 -0
  48. package/package.json +16 -12
  49. package/src/blob.ts +82 -221
  50. package/src/blob_batching.ts +335 -0
  51. package/src/blob_utils.ts +71 -0
  52. package/src/circuit_types/blob_accumulator.ts +84 -0
  53. package/src/circuit_types/final_blob_accumulator.ts +75 -0
  54. package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
  55. package/src/circuit_types/index.ts +4 -0
  56. package/src/deserialize.ts +38 -0
  57. package/src/encoding.ts +136 -120
  58. package/src/hash.ts +77 -0
  59. package/src/index.ts +6 -19
  60. package/src/interface.ts +1 -4
  61. package/src/kzg_context.ts +5 -0
  62. package/src/sponge_blob.ts +24 -14
  63. package/src/testing.ts +68 -43
  64. package/src/trusted_setup_bit_reversed.json +4100 -0
  65. package/src/types.ts +16 -0
  66. package/dest/blob_public_inputs.d.ts +0 -50
  67. package/dest/blob_public_inputs.d.ts.map +0 -1
  68. package/dest/blob_public_inputs.js +0 -146
  69. package/src/blob_public_inputs.ts +0 -157
@@ -0,0 +1,335 @@
1
+ import { AZTEC_MAX_EPOCH_DURATION, BLOBS_PER_BLOCK } from '@aztec/constants';
2
+ import { poseidon2Hash, sha256ToField } from '@aztec/foundation/crypto';
3
+ import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
4
+
5
+ import { Blob } from './blob.js';
6
+ import { computeBlobFieldsHashFromBlobs } from './blob_utils.js';
7
+ import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
8
+ import { computeEthVersionedBlobHash, hashNoirBigNumLimbs } from './hash.js';
9
+ import { kzg } from './kzg_context.js';
10
+
11
+ /**
12
+ * A class to create, manage, and prove batched EVM blobs.
13
+ */
14
+ export class BatchedBlob {
15
+ constructor(
16
+ /** Hash of Cs (to link to L1 blob hashes). */
17
+ public readonly blobCommitmentsHash: Fr,
18
+ /** Challenge point z such that p_i(z) = y_i. */
19
+ public readonly z: Fr,
20
+ /** Evaluation y, linear combination of all evaluations y_i = p_i(z) with gamma. */
21
+ public readonly y: BLS12Fr,
22
+ /** Commitment C, linear combination of all commitments C_i = [p_i] with gamma. */
23
+ public readonly commitment: BLS12Point,
24
+ /** KZG opening 'proof' Q (commitment to the quotient poly.), linear combination of all blob kzg 'proofs' Q_i with gamma. */
25
+ public readonly q: BLS12Point,
26
+ ) {}
27
+
28
+ /**
29
+ * Get the final batched opening proof from multiple blobs.
30
+ * @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
31
+ * beforehand from ALL blobs.
32
+ *
33
+ * @returns A batched blob.
34
+ */
35
+ static async batch(blobs: Blob[][]): Promise<BatchedBlob> {
36
+ if (blobs.length > AZTEC_MAX_EPOCH_DURATION) {
37
+ throw new Error(
38
+ `Too many blocks sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${blobs.length}.`,
39
+ );
40
+ }
41
+
42
+ // Precalculate the values (z and gamma) and initialize the accumulator:
43
+ let acc = await this.newAccumulator(blobs);
44
+ // Now we can create a multi opening proof of all input blobs:
45
+ for (const blockBlobs of blobs) {
46
+ acc = await acc.accumulateBlobs(blockBlobs);
47
+ }
48
+ return await acc.finalize();
49
+ }
50
+
51
+ /**
52
+ * Returns an empty BatchedBlobAccumulator with precomputed challenges from all blobs in the epoch.
53
+ * @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
54
+ * beforehand from ALL blobs.
55
+ */
56
+ static async newAccumulator(blobs: Blob[][]): Promise<BatchedBlobAccumulator> {
57
+ const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(blobs);
58
+ return BatchedBlobAccumulator.newWithChallenges(finalBlobChallenges);
59
+ }
60
+
61
+ /**
62
+ * Gets the final challenges based on all blobs and their elements to perform a multi opening proof.
63
+ * Used in BatchedBlobAccumulator as 'finalZ' and finalGamma':
64
+ * - z = H(...H(H(z_0, z_1) z_2)..z_n)
65
+ * - where z_i = H(H(fields of blob_i), C_i) = Blob.challengeZ,
66
+ * - used such that p_i(z) = y_i = Blob.evaluationY for all n blob polynomials p_i().
67
+ * - gamma = H(H(...H(H(y_0, y_1) y_2)..y_n), z)
68
+ * - used such that y = sum_i { gamma^i * y_i }, and C = sum_i { gamma^i * C_i }, for all blob evaluations y_i (see above) and commitments C_i.
69
+ *
70
+ * @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
71
+ * @returns Challenges z and gamma.
72
+ */
73
+ static async precomputeBatchedBlobChallenges(blobs: Blob[][]): Promise<FinalBlobBatchingChallenges> {
74
+ // Compute the final challenge z to evaluate the blobs.
75
+ let z: Fr | undefined;
76
+ for (const blockBlobs of blobs) {
77
+ // Compute the hash of all the fields in the block.
78
+ const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blockBlobs);
79
+ for (const blob of blockBlobs) {
80
+ // Compute the challenge z for each blob and accumulate it.
81
+ const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
82
+ if (!z) {
83
+ z = challengeZ;
84
+ } else {
85
+ z = await poseidon2Hash([z, challengeZ]);
86
+ }
87
+ }
88
+ }
89
+ if (!z) {
90
+ throw new Error('No blobs to precompute challenges for.');
91
+ }
92
+
93
+ // Now we have a shared challenge for all blobs, evaluate them...
94
+ const allBlobs = blobs.flat();
95
+ const proofObjects = allBlobs.map(b => b.evaluate(z));
96
+ const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y)));
97
+ // ...and find the challenge for the linear combination of blobs.
98
+ let gamma = evaluations[0];
99
+ // We start at i = 1, because gamma is initialized as the first blob's evaluation.
100
+ for (let i = 1; i < allBlobs.length; i++) {
101
+ gamma = await poseidon2Hash([gamma, evaluations[i]]);
102
+ }
103
+ gamma = await poseidon2Hash([gamma, z]);
104
+
105
+ return new FinalBlobBatchingChallenges(z, BLS12Fr.fromBN254Fr(gamma));
106
+ }
107
+
108
+ verify() {
109
+ return kzg.verifyKzgProof(this.commitment.compress(), this.z.toBuffer(), this.y.toBuffer(), this.q.compress());
110
+ }
111
+
112
+ // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
113
+ getEthVersionedBlobHash(): Buffer {
114
+ return computeEthVersionedBlobHash(this.commitment.compress());
115
+ }
116
+
117
+ /**
118
+ * Returns a proof of opening of the blobs to verify on L1 using the point evaluation precompile:
119
+ *
120
+ * input[:32] - versioned_hash
121
+ * input[32:64] - z
122
+ * input[64:96] - y
123
+ * input[96:144] - commitment C
124
+ * input[144:192] - commitment Q (a 'proof' committing to the quotient polynomial q(X))
125
+ *
126
+ * See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
127
+ */
128
+ getEthBlobEvaluationInputs(): `0x${string}` {
129
+ const buf = Buffer.concat([
130
+ this.getEthVersionedBlobHash(),
131
+ this.z.toBuffer(),
132
+ this.y.toBuffer(),
133
+ this.commitment.compress(),
134
+ this.q.compress(),
135
+ ]);
136
+ return `0x${buf.toString('hex')}`;
137
+ }
138
+
139
+ toFinalBlobAccumulator() {
140
+ return new FinalBlobAccumulator(this.blobCommitmentsHash, this.z, this.y, this.commitment);
141
+ }
142
+ }
143
+
144
+ /**
145
+ * See noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr
146
+ */
147
+ export class BatchedBlobAccumulator {
148
+ constructor(
149
+ /** Hash of Cs (to link to L1 blob hashes). */
150
+ public readonly blobCommitmentsHashAcc: Fr,
151
+ /** Challenge point z_acc. Final value used such that p_i(z) = y_i. */
152
+ public readonly zAcc: Fr,
153
+ /** Evaluation y_acc. Final value is is linear combination of all evaluations y_i = p_i(z) with gamma. */
154
+ public readonly yAcc: BLS12Fr,
155
+ /** Commitment c_acc. Final value is linear combination of all commitments C_i = [p_i] with gamma. */
156
+ public readonly cAcc: BLS12Point,
157
+ /** KZG opening q_acc. Final value is linear combination of all blob kzg 'proofs' Q_i with gamma. */
158
+ public readonly qAcc: BLS12Point,
159
+ /**
160
+ * Challenge point gamma_acc for multi opening. Used with y, C, and kzg 'proof' Q above.
161
+ * TODO(#13608): We calculate this by hashing natively in the circuit (hence Fr representation), but it's actually used
162
+ * as a BLS12Fr field elt. Is this safe? Is there a skew?
163
+ */
164
+ public readonly gammaAcc: Fr,
165
+ /** Simply gamma^(i + 1) at blob i. Used for calculating the i'th element of the above linear comb.s */
166
+ public readonly gammaPow: BLS12Fr,
167
+ /** Final challenge values used in evaluation. Optimistically input and checked in the final acc. */
168
+ public readonly finalBlobChallenges: FinalBlobBatchingChallenges,
169
+ ) {}
170
+
171
+ /**
172
+ * Create the empty accumulation state of the epoch.
173
+ * @returns An empty blob accumulator with challenges.
174
+ */
175
+ static newWithChallenges(finalBlobChallenges: FinalBlobBatchingChallenges): BatchedBlobAccumulator {
176
+ return new BatchedBlobAccumulator(
177
+ Fr.ZERO,
178
+ Fr.ZERO,
179
+ BLS12Fr.ZERO,
180
+ BLS12Point.ZERO,
181
+ BLS12Point.ZERO,
182
+ Fr.ZERO,
183
+ BLS12Fr.ZERO,
184
+ finalBlobChallenges,
185
+ );
186
+ }
187
+
188
+ /**
189
+ * Given blob i, accumulate all state.
190
+ * We assume the input blob has not been evaluated at z.
191
+ * @returns An updated blob accumulator.
192
+ */
193
+ private async accumulate(blob: Blob, blobFieldsHash: Fr) {
194
+ const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
195
+ const thisC = BLS12Point.decompress(blob.commitment);
196
+ const thisQ = BLS12Point.decompress(proof);
197
+ const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash);
198
+
199
+ if (this.isEmptyState()) {
200
+ /**
201
+ * Init the first accumulation state of the epoch.
202
+ * - v_acc := sha256(C_0)
203
+ * - z_acc := z_0
204
+ * - y_acc := gamma^0 * y_0 = y_0
205
+ * - c_acc := gamma^0 * c_0 = c_0
206
+ * - gamma_acc := poseidon2(y_0.limbs)
207
+ * - gamma^(i + 1) = gamma^1 = gamma // denoted gamma_pow_acc
208
+ */
209
+ return new BatchedBlobAccumulator(
210
+ sha256ToField([blob.commitment]), // blobCommitmentsHashAcc = sha256(C_0)
211
+ blobChallengeZ, // zAcc = z_0
212
+ thisY, // yAcc = gamma^0 * y_0 = 1 * y_0
213
+ thisC, // cAcc = gamma^0 * C_0 = 1 * C_0
214
+ thisQ, // qAcc = gamma^0 * Q_0 = 1 * Q_0
215
+ await hashNoirBigNumLimbs(thisY), // gammaAcc = poseidon2(y_0.limbs)
216
+ this.finalBlobChallenges.gamma, // gammaPow = gamma^(i + 1) = gamma^1 = gamma
217
+ this.finalBlobChallenges,
218
+ );
219
+ } else {
220
+ // Moving from i - 1 to i, so:
221
+ return new BatchedBlobAccumulator(
222
+ sha256ToField([this.blobCommitmentsHashAcc, blob.commitment]), // blobCommitmentsHashAcc := sha256(blobCommitmentsHashAcc, C_i)
223
+ await poseidon2Hash([this.zAcc, blobChallengeZ]), // zAcc := poseidon2(zAcc, z_i)
224
+ this.yAcc.add(thisY.mul(this.gammaPow)), // yAcc := yAcc + (gamma^i * y_i)
225
+ this.cAcc.add(thisC.mul(this.gammaPow)), // cAcc := cAcc + (gamma^i * C_i)
226
+ this.qAcc.add(thisQ.mul(this.gammaPow)), // qAcc := qAcc + (gamma^i * C_i)
227
+ await poseidon2Hash([this.gammaAcc, await hashNoirBigNumLimbs(thisY)]), // gammaAcc := poseidon2(gammaAcc, poseidon2(y_i.limbs))
228
+ this.gammaPow.mul(this.finalBlobChallenges.gamma), // gammaPow = gamma^(i + 1) = gamma^i * final_gamma
229
+ this.finalBlobChallenges,
230
+ );
231
+ }
232
+ }
233
+
234
+ /**
235
+ * Given blobs, accumulate all state.
236
+ * We assume the input blobs have not been evaluated at z.
237
+ * @param blobs - The blobs to accumulate. They should be in the same L1 block.
238
+ * @returns An updated blob accumulator.
239
+ */
240
+ async accumulateBlobs(blobs: Blob[]) {
241
+ if (blobs.length > BLOBS_PER_BLOCK) {
242
+ throw new Error(
243
+ `Too many blobs to accumulate. The maximum is ${BLOBS_PER_BLOCK} per block. Got ${blobs.length}.`,
244
+ );
245
+ }
246
+
247
+ // Compute the hash of all the fields in the block.
248
+ const blobFieldsHash = await computeBlobFieldsHashFromBlobs(blobs);
249
+
250
+ // Initialize the acc to iterate over:
251
+ let acc: BatchedBlobAccumulator = this.clone();
252
+ for (const blob of blobs) {
253
+ acc = await acc.accumulate(blob, blobFieldsHash);
254
+ }
255
+ return acc;
256
+ }
257
+
258
+ /**
259
+ * Finalize accumulation state of the epoch.
260
+ * We assume ALL blobs in the epoch have been accumulated.
261
+ *
262
+ * Final accumulated values:
263
+ * - v := v_acc (hash of all commitments (C_i s) to be checked on L1)
264
+ * - z := z_acc (final challenge, at which all blobs are evaluated)
265
+ * - y := y_acc (final opening to be checked on L1)
266
+ * - c := c_acc (final commitment to be checked on L1)
267
+ * - gamma := poseidon2(gamma_acc, z) (challenge for linear combination of y and C, above)
268
+ *
269
+ * @param verifyProof - Whether to verify the KZG proof.
270
+ * @returns A batched blob.
271
+ */
272
+ async finalize(verifyProof = false): Promise<BatchedBlob> {
273
+ // All values in acc are final, apart from gamma := poseidon2(gammaAcc, z):
274
+ const calculatedGamma = await poseidon2Hash([this.gammaAcc, this.zAcc]);
275
+ // Check final values:
276
+ if (!this.zAcc.equals(this.finalBlobChallenges.z)) {
277
+ throw new Error(
278
+ `Blob batching mismatch: accumulated z ${this.zAcc} does not equal injected z ${this.finalBlobChallenges.z}`,
279
+ );
280
+ }
281
+ if (!calculatedGamma.equals(this.finalBlobChallenges.gamma.toBN254Fr())) {
282
+ throw new Error(
283
+ `Blob batching mismatch: accumulated gamma ${calculatedGamma} does not equal injected gamma ${this.finalBlobChallenges.gamma.toBN254Fr()}`,
284
+ );
285
+ }
286
+
287
+ const batchedBlob = new BatchedBlob(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.qAcc);
288
+
289
+ if (verifyProof && !batchedBlob.verify()) {
290
+ throw new Error(`KZG proof did not verify.`);
291
+ }
292
+
293
+ return batchedBlob;
294
+ }
295
+
296
+ isEmptyState() {
297
+ return (
298
+ this.blobCommitmentsHashAcc.isZero() &&
299
+ this.zAcc.isZero() &&
300
+ this.yAcc.isZero() &&
301
+ this.cAcc.isZero() &&
302
+ this.qAcc.isZero() &&
303
+ this.gammaAcc.isZero() &&
304
+ this.gammaPow.isZero()
305
+ );
306
+ }
307
+
308
+ clone() {
309
+ return new BatchedBlobAccumulator(
310
+ Fr.fromBuffer(this.blobCommitmentsHashAcc.toBuffer()),
311
+ Fr.fromBuffer(this.zAcc.toBuffer()),
312
+ BLS12Fr.fromBuffer(this.yAcc.toBuffer()),
313
+ BLS12Point.fromBuffer(this.cAcc.toBuffer()),
314
+ BLS12Point.fromBuffer(this.qAcc.toBuffer()),
315
+ Fr.fromBuffer(this.gammaAcc.toBuffer()),
316
+ BLS12Fr.fromBuffer(this.gammaPow.toBuffer()),
317
+ FinalBlobBatchingChallenges.fromBuffer(this.finalBlobChallenges.toBuffer()),
318
+ );
319
+ }
320
+
321
+ toBlobAccumulator() {
322
+ return new BlobAccumulator(
323
+ this.blobCommitmentsHashAcc,
324
+ this.zAcc,
325
+ this.yAcc,
326
+ this.cAcc,
327
+ this.gammaAcc,
328
+ this.gammaPow,
329
+ );
330
+ }
331
+
332
+ toFinalBlobAccumulator() {
333
+ return new FinalBlobAccumulator(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc);
334
+ }
335
+ }
@@ -0,0 +1,71 @@
1
+ import { FIELDS_PER_BLOB } from '@aztec/constants';
2
+ import { BLS12Point, Fr } from '@aztec/foundation/fields';
3
+
4
+ import { Blob } from './blob.js';
5
+ import { deserializeEncodedBlobToFields } from './deserialize.js';
6
+ import { computeBlobFieldsHash, computeBlobsHash } from './hash.js';
7
+
8
+ /**
9
+ * @param blobs - The blobs to emit.
10
+ * @returns The blobs' compressed commitments in hex prefixed by the number of blobs. 1 byte for the prefix, 48 bytes
11
+ * per blob commitment.
12
+ * @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs.
13
+ */
14
+ export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
15
+ // Prefix the number of blobs.
16
+ const lenBuf = Buffer.alloc(1);
17
+ lenBuf.writeUint8(blobs.length);
18
+
19
+ const blobBuf = Buffer.concat(blobs.map(blob => blob.commitment));
20
+
21
+ const buf = Buffer.concat([lenBuf, blobBuf]);
22
+ return `0x${buf.toString('hex')}`;
23
+ }
24
+
25
+ /**
26
+ * @param fields - Fields to broadcast in the blob(s)
27
+ * @returns As many blobs as required to broadcast the given fields to an L1 block.
28
+ *
29
+ * @throws If the number of fields does not match what's indicated by the checkpoint prefix.
30
+ */
31
+ export function getBlobsPerL1Block(fields: Fr[]): Blob[] {
32
+ if (!fields.length) {
33
+ throw new Error('Cannot create blobs from empty fields.');
34
+ }
35
+
36
+ const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB);
37
+ return Array.from({ length: numBlobs }, (_, i) =>
38
+ Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)),
39
+ );
40
+ }
41
+
42
+ /**
43
+ * Get the fields from all blobs in the checkpoint. Ignoring the fields beyond the length specified by the
44
+ * checkpoint prefix (the first field).
45
+ *
46
+ * @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
47
+ * @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
48
+ * the checkpoint prefix and throw if there's not enough fields.
49
+ * @returns The fields added throughout the checkpoint.
50
+ */
51
+ export function getBlobFieldsInCheckpoint(blobs: Blob[], checkEncoding = false): Fr[] {
52
+ return deserializeEncodedBlobToFields(Buffer.concat(blobs.map(b => b.data)), checkEncoding);
53
+ }
54
+
55
+ export async function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr> {
56
+ const fields = blobs.map(b => b.toFields()).flat();
57
+ const numBlobFields = fields[0].toNumber();
58
+ if (numBlobFields > fields.length) {
59
+ throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
60
+ }
61
+
62
+ return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
63
+ }
64
+
65
+ export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
66
+ return computeBlobsHash(blobs.map(b => b.getEthVersionedBlobHash()));
67
+ }
68
+
69
+ export function getBlobCommitmentsFromBlobs(blobs: Blob[]): BLS12Point[] {
70
+ return blobs.map(b => BLS12Point.decompress(b.commitment));
71
+ }
@@ -0,0 +1,84 @@
1
+ import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
2
+ import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
3
+ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
+
5
+ /**
6
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
7
+ */
8
+ export class BlobAccumulator {
9
+ constructor(
10
+ public blobCommitmentsHashAcc: Fr,
11
+ public zAcc: Fr,
12
+ public yAcc: BLS12Fr,
13
+ public cAcc: BLS12Point,
14
+ public gammaAcc: Fr,
15
+ public gammaPowAcc: BLS12Fr,
16
+ ) {}
17
+
18
+ static empty(): BlobAccumulator {
19
+ return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
20
+ }
21
+
22
+ equals(other: BlobAccumulator) {
23
+ return (
24
+ this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) &&
25
+ this.zAcc.equals(other.zAcc) &&
26
+ this.yAcc.equals(other.yAcc) &&
27
+ this.cAcc.equals(other.cAcc) &&
28
+ this.gammaAcc.equals(other.gammaAcc) &&
29
+ this.gammaPowAcc.equals(other.gammaPowAcc)
30
+ );
31
+ }
32
+
33
+ static fromBuffer(buffer: Buffer | BufferReader): BlobAccumulator {
34
+ const reader = BufferReader.asReader(buffer);
35
+ return new BlobAccumulator(
36
+ Fr.fromBuffer(reader),
37
+ Fr.fromBuffer(reader),
38
+ BLS12Fr.fromBuffer(reader),
39
+ BLS12Point.fromBuffer(reader),
40
+ Fr.fromBuffer(reader),
41
+ BLS12Fr.fromBuffer(reader),
42
+ );
43
+ }
44
+
45
+ toBuffer() {
46
+ return serializeToBuffer(
47
+ this.blobCommitmentsHashAcc,
48
+ this.zAcc,
49
+ this.yAcc,
50
+ this.cAcc,
51
+ this.gammaAcc,
52
+ this.gammaPowAcc,
53
+ );
54
+ }
55
+
56
+ toFields() {
57
+ return [
58
+ this.blobCommitmentsHashAcc,
59
+ this.zAcc,
60
+ ...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
61
+ ...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
62
+ ...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
63
+ new Fr(this.cAcc.isInfinite),
64
+ this.gammaAcc,
65
+ ...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString),
66
+ ];
67
+ }
68
+
69
+ static fromFields(fields: Fr[] | FieldReader): BlobAccumulator {
70
+ const reader = FieldReader.asReader(fields);
71
+ return new BlobAccumulator(
72
+ reader.readField(),
73
+ reader.readField(),
74
+ BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
75
+ new BLS12Point(
76
+ BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
77
+ BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
78
+ reader.readBoolean(),
79
+ ),
80
+ reader.readField(),
81
+ BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
82
+ );
83
+ }
84
+ }
@@ -0,0 +1,75 @@
1
+ import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+
4
+ import { inspect } from 'util';
5
+
6
+ /**
7
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
8
+ */
9
+ export class FinalBlobAccumulator {
10
+ constructor(
11
+ public blobCommitmentsHash: Fr,
12
+ public z: Fr,
13
+ public y: BLS12Fr,
14
+ public c: BLS12Point,
15
+ ) {}
16
+
17
+ static empty(): FinalBlobAccumulator {
18
+ return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
19
+ }
20
+
21
+ static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator {
22
+ const reader = BufferReader.asReader(buffer);
23
+ return new FinalBlobAccumulator(
24
+ Fr.fromBuffer(reader),
25
+ Fr.fromBuffer(reader),
26
+ BLS12Fr.fromBuffer(reader),
27
+ BLS12Point.fromBuffer(reader),
28
+ );
29
+ }
30
+
31
+ toBuffer() {
32
+ return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
33
+ }
34
+
35
+ toFields() {
36
+ return [
37
+ this.blobCommitmentsHash,
38
+ this.z,
39
+ ...this.y.toNoirBigNum().limbs.map(Fr.fromString),
40
+ ...this.c.toBN254Fields(),
41
+ ];
42
+ }
43
+
44
+ // The below is used to send to L1 for proof verification
45
+ toString() {
46
+ // We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
47
+ // is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
48
+ // we use calldata rather than a slice in memory:
49
+ const buf = Buffer.concat([Buffer.alloc(32), this.z.toBuffer(), this.y.toBuffer(), this.c.compress()]);
50
+ return buf.toString('hex');
51
+ }
52
+
53
+ equals(other: FinalBlobAccumulator) {
54
+ return (
55
+ this.blobCommitmentsHash.equals(other.blobCommitmentsHash) &&
56
+ this.z.equals(other.z) &&
57
+ this.y.equals(other.y) &&
58
+ this.c.equals(other.c)
59
+ );
60
+ }
61
+
62
+ // Creates a random instance. Used for testing only - will not prove/verify.
63
+ static random() {
64
+ return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
65
+ }
66
+
67
+ [inspect.custom]() {
68
+ return `FinalBlobAccumulator {
69
+ blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
70
+ z: ${inspect(this.z)},
71
+ y: ${inspect(this.y)},
72
+ c: ${inspect(this.c)},
73
+ }`;
74
+ }
75
+ }
@@ -0,0 +1,29 @@
1
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+
4
+ /**
5
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
6
+ */
7
+ export class FinalBlobBatchingChallenges {
8
+ constructor(
9
+ public readonly z: Fr,
10
+ public readonly gamma: BLS12Fr,
11
+ ) {}
12
+
13
+ equals(other: FinalBlobBatchingChallenges) {
14
+ return this.z.equals(other.z) && this.gamma.equals(other.gamma);
15
+ }
16
+
17
+ static empty(): FinalBlobBatchingChallenges {
18
+ return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
19
+ }
20
+
21
+ static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges {
22
+ const reader = BufferReader.asReader(buffer);
23
+ return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
24
+ }
25
+
26
+ toBuffer() {
27
+ return serializeToBuffer(this.z, this.gamma);
28
+ }
29
+ }
@@ -0,0 +1,4 @@
1
+ /// Types used in the protocol circuits.
2
+ export * from './blob_accumulator.js';
3
+ export * from './final_blob_accumulator.js';
4
+ export * from './final_blob_batching_challenges.js';
@@ -0,0 +1,38 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader } from '@aztec/foundation/serialize';
3
+
4
+ import { checkBlobFieldsEncoding } from './encoding.js';
5
+ import { BlobDeserializationError } from './errors.js';
6
+
7
+ /**
8
+ * Deserializes a buffer into an array of field elements.
9
+ *
10
+ * This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
11
+ * first field.
12
+ *
13
+ * @param buf - The buffer to deserialize.
14
+ * @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
15
+ * prefix and throw if there's not enough fields.
16
+ * @returns An array of field elements.
17
+ */
18
+ export function deserializeEncodedBlobToFields(buf: Uint8Array, checkEncoding = false): Fr[] {
19
+ const reader = BufferReader.asReader(buf);
20
+ const firstField = reader.readObject(Fr);
21
+
22
+ // Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error below if the first
23
+ // field is larger than a javascript integer.
24
+ const numFields = firstField.toBigInt();
25
+ const totalFieldsInBuffer = BigInt(buf.length / Fr.SIZE_IN_BYTES);
26
+ if (numFields > totalFieldsInBuffer) {
27
+ throw new BlobDeserializationError(`Failed to deserialize blob fields, this blob was likely not created by us`);
28
+ }
29
+
30
+ const numFieldsWithoutPrefix = Number(numFields) - 1;
31
+ const blobFields = [firstField].concat(reader.readArray(numFieldsWithoutPrefix, Fr));
32
+
33
+ if (checkEncoding && !checkBlobFieldsEncoding(blobFields)) {
34
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields, this blob was likely not created by us`);
35
+ }
36
+
37
+ return blobFields;
38
+ }