@aztec/blob-lib 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/dest/batched_blob.d.ts +26 -0
  2. package/dest/batched_blob.d.ts.map +1 -0
  3. package/dest/batched_blob.js +20 -0
  4. package/dest/blob.d.ts +50 -99
  5. package/dest/blob.d.ts.map +1 -1
  6. package/dest/blob.js +78 -169
  7. package/dest/blob_batching.d.ts +41 -123
  8. package/dest/blob_batching.d.ts.map +1 -1
  9. package/dest/blob_batching.js +129 -203
  10. package/dest/blob_utils.d.ts +40 -0
  11. package/dest/blob_utils.d.ts.map +1 -0
  12. package/dest/blob_utils.js +69 -0
  13. package/dest/circuit_types/blob_accumulator.d.ts +23 -0
  14. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/blob_accumulator.js +62 -0
  16. package/dest/circuit_types/final_blob_accumulator.d.ts +23 -0
  17. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_accumulator.js +66 -0
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts +16 -0
  20. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  21. package/dest/circuit_types/final_blob_batching_challenges.js +26 -0
  22. package/dest/circuit_types/index.d.ts +4 -0
  23. package/dest/circuit_types/index.d.ts.map +1 -0
  24. package/dest/circuit_types/index.js +4 -0
  25. package/dest/encoding/block_blob_data.d.ts +22 -0
  26. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  27. package/dest/encoding/block_blob_data.js +65 -0
  28. package/dest/encoding/block_end_marker.d.ts +11 -0
  29. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  30. package/dest/encoding/block_end_marker.js +41 -0
  31. package/dest/encoding/block_end_state_field.d.ts +12 -0
  32. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  33. package/dest/encoding/block_end_state_field.js +39 -0
  34. package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
  35. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  36. package/dest/encoding/checkpoint_blob_data.js +67 -0
  37. package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
  38. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
  39. package/dest/encoding/checkpoint_end_marker.js +28 -0
  40. package/dest/encoding/fixtures.d.ts +41 -0
  41. package/dest/encoding/fixtures.d.ts.map +1 -0
  42. package/dest/encoding/fixtures.js +140 -0
  43. package/dest/encoding/index.d.ts +10 -0
  44. package/dest/encoding/index.d.ts.map +1 -0
  45. package/dest/encoding/index.js +9 -0
  46. package/dest/encoding/tx_blob_data.d.ts +19 -0
  47. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  48. package/dest/encoding/tx_blob_data.js +79 -0
  49. package/dest/encoding/tx_start_marker.d.ts +16 -0
  50. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  51. package/dest/encoding/tx_start_marker.js +77 -0
  52. package/dest/errors.d.ts +1 -1
  53. package/dest/errors.d.ts.map +1 -1
  54. package/dest/hash.d.ts +43 -0
  55. package/dest/hash.d.ts.map +1 -0
  56. package/dest/hash.js +80 -0
  57. package/dest/index.d.ts +7 -4
  58. package/dest/index.d.ts.map +1 -1
  59. package/dest/index.js +6 -16
  60. package/dest/interface.d.ts +1 -2
  61. package/dest/interface.d.ts.map +1 -1
  62. package/dest/kzg_context.d.ts +8 -0
  63. package/dest/kzg_context.d.ts.map +1 -0
  64. package/dest/kzg_context.js +14 -0
  65. package/dest/sponge_blob.d.ts +12 -14
  66. package/dest/sponge_blob.d.ts.map +1 -1
  67. package/dest/sponge_blob.js +26 -30
  68. package/dest/testing.d.ts +10 -23
  69. package/dest/testing.d.ts.map +1 -1
  70. package/dest/testing.js +37 -53
  71. package/dest/types.d.ts +17 -0
  72. package/dest/types.d.ts.map +1 -0
  73. package/dest/types.js +4 -0
  74. package/package.json +10 -7
  75. package/src/batched_blob.ts +26 -0
  76. package/src/blob.ts +81 -195
  77. package/src/blob_batching.ts +168 -231
  78. package/src/blob_utils.ts +82 -0
  79. package/src/circuit_types/blob_accumulator.ts +96 -0
  80. package/src/circuit_types/final_blob_accumulator.ts +76 -0
  81. package/src/circuit_types/final_blob_batching_challenges.ts +30 -0
  82. package/src/circuit_types/index.ts +4 -0
  83. package/src/encoding/block_blob_data.ts +102 -0
  84. package/src/encoding/block_end_marker.ts +55 -0
  85. package/src/encoding/block_end_state_field.ts +59 -0
  86. package/src/encoding/checkpoint_blob_data.ts +95 -0
  87. package/src/encoding/checkpoint_end_marker.ts +40 -0
  88. package/src/encoding/fixtures.ts +210 -0
  89. package/src/encoding/index.ts +9 -0
  90. package/src/encoding/tx_blob_data.ts +116 -0
  91. package/src/encoding/tx_start_marker.ts +97 -0
  92. package/src/hash.ts +89 -0
  93. package/src/index.ts +6 -19
  94. package/src/interface.ts +0 -1
  95. package/src/kzg_context.ts +16 -0
  96. package/src/sponge_blob.ts +28 -31
  97. package/src/testing.ts +48 -59
  98. package/src/types.ts +17 -0
  99. package/dest/blob_batching_public_inputs.d.ts +0 -71
  100. package/dest/blob_batching_public_inputs.d.ts.map +0 -1
  101. package/dest/blob_batching_public_inputs.js +0 -168
  102. package/dest/encoding.d.ts +0 -66
  103. package/dest/encoding.d.ts.map +0 -1
  104. package/dest/encoding.js +0 -113
  105. package/src/blob_batching_public_inputs.ts +0 -252
  106. package/src/encoding.ts +0 -138
package/src/blob.ts CHANGED
@@ -1,253 +1,173 @@
1
- import { poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
2
- import { Fr } from '@aztec/foundation/fields';
1
+ import { FIELDS_PER_BLOB } from '@aztec/constants';
2
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
5
 
5
- // Importing directly from 'c-kzg' does not work:
6
- import cKzg from 'c-kzg';
7
- import type { Blob as BlobBuffer } from 'c-kzg';
8
-
9
- import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './encoding.js';
10
- import { BlobDeserializationError } from './errors.js';
6
+ import { computeBlobCommitment, computeChallengeZ, computeEthVersionedBlobHash } from './hash.js';
11
7
  import type { BlobJson } from './interface.js';
8
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
12
9
 
13
- const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;
14
-
15
- // The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
16
- export const VERSIONED_HASH_VERSION_KZG = 0x01;
10
+ export { FIELDS_PER_BLOB };
17
11
 
18
12
  /**
19
13
  * A class to create, manage, and prove EVM blobs.
14
+ *
15
+ * @dev Note: All methods in this class do not check the encoding of the given data. It's the responsibility of other
16
+ * components to ensure that the blob data (which might spread across multiple blobs) was created following the protocol
17
+ * and is correctly encoded.
20
18
  */
21
19
  export class Blob {
22
20
  constructor(
23
- /** The blob to be broadcast on L1 in bytes form. */
24
- public readonly data: BlobBuffer,
25
- /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
26
- public readonly fieldsHash: Fr,
27
- /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y for a single blob, used as z_i in batching (see ./blob_batching.ts). */
28
- public readonly challengeZ: Fr,
29
- /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
21
+ /**
22
+ * The data to be broadcast on L1 in bytes form.
23
+ */
24
+ public readonly data: Uint8Array,
25
+ /**
26
+ * Commitment to the blob data. Used in compressed BLS12 point format (48 bytes).
27
+ */
30
28
  public readonly commitment: Buffer,
31
- ) {}
29
+ ) {
30
+ if (data.length !== BYTES_PER_BLOB) {
31
+ throw new Error(`Blob data must be ${BYTES_PER_BLOB} bytes. Got ${data.length}.`);
32
+ }
33
+ if (commitment.length !== BYTES_PER_COMMITMENT) {
34
+ throw new Error(`Blob commitment must be ${BYTES_PER_COMMITMENT} bytes. Got ${commitment.length}.`);
35
+ }
36
+ }
32
37
 
33
38
  /**
34
- * The encoded version of the blob will determine the end of the blob based on the transaction encoding.
35
- * This is required when the fieldsHash of a blob will contain trailing zeros.
36
- *
37
- * See `./encoding.ts` for more details.
38
- *
39
- * This method is used to create a Blob from a buffer.
40
- * @param blob - The buffer to create the Blob from.
41
- * @param multiBlobFieldsHash - The fields hash to use for the Blob.
39
+ * Create a Blob from a buffer.
40
+ * @param data - The buffer of the Blob.
42
41
  * @returns A Blob created from the buffer.
43
42
  *
44
- * @throws If unable to deserialize the blob.
43
+ * @throws If data does not match the expected length (BYTES_PER_BLOB).
45
44
  */
46
- static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Promise<Blob> {
47
- try {
48
- const fields: Fr[] = deserializeEncodedBlobToFields(blob);
49
- return Blob.fromFields(fields, multiBlobFieldsHash);
50
- } catch {
51
- throw new BlobDeserializationError(
52
- `Failed to create Blob from encoded blob buffer, this blob was likely not created by us`,
53
- );
54
- }
45
+ static fromBlobBuffer(data: Uint8Array): Blob {
46
+ const commitment = computeBlobCommitment(data);
47
+ return new Blob(data, commitment);
55
48
  }
56
49
 
57
50
  /**
58
51
  * Create a Blob from an array of fields.
59
52
  *
53
+ * @dev This method pads 0s to the data, extending it to the size of a full blob.
54
+ *
60
55
  * @param fields - The array of fields to create the Blob from.
61
- * @param multiBlobFieldsHash - The fields hash to use for the Blob.
62
56
  * @returns A Blob created from the array of fields.
63
57
  */
64
- static async fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Promise<Blob> {
65
- if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
66
- throw new Error(
67
- `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`,
68
- );
58
+ static fromFields(fields: Fr[]): Blob {
59
+ if (fields.length > FIELDS_PER_BLOB) {
60
+ throw new Error(`Attempted to overfill blob with ${fields.length} fields. The maximum is ${FIELDS_PER_BLOB}.`);
69
61
  }
70
62
 
71
63
  const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);
64
+ const commitment = computeBlobCommitment(data);
65
+ return new Blob(data, commitment);
66
+ }
72
67
 
73
- // This matches the output of SpongeBlob.squeeze() in the blob circuit
74
- const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : await poseidon2Hash(fields);
75
- const commitment = Buffer.from(blobToKzgCommitment(data));
76
- const challengeZ = await poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]);
77
-
78
- return new Blob(data, fieldsHash, challengeZ, commitment);
68
+ /**
69
+ * Get the fields from the blob data.
70
+ *
71
+ * @dev WARNING: this method returns all fields
72
+ *
73
+ * @returns The fields from the blob.
74
+ */
75
+ toFields(): Fr[] {
76
+ const reader = BufferReader.asReader(this.data);
77
+ const numTotalFields = this.data.length / Fr.SIZE_IN_BYTES;
78
+ return reader.readArray(numTotalFields, Fr);
79
79
  }
80
80
 
81
81
  /**
82
82
  * Create a Blob from a JSON object.
83
83
  *
84
- * Blobs will be in this form when requested from the blob sink, or from
84
+ * Blobs will be in this form when requested from the blob client, or from
85
85
  * the beacon chain via `getBlobSidecars`
86
86
  * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
87
87
  *
88
- * @dev WARNING: by default json deals with encoded buffers
89
- *
90
88
  * @param json - The JSON object to create the Blob from.
91
89
  * @returns A Blob created from the JSON object.
92
90
  */
93
- static async fromJson(json: BlobJson): Promise<Blob> {
91
+ static fromJson(json: BlobJson): Blob {
94
92
  const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
95
-
96
- const blob = await Blob.fromEncodedBlobBuffer(blobBuffer);
93
+ const blob = Blob.fromBlobBuffer(blobBuffer);
97
94
 
98
95
  if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
99
96
  throw new Error('KZG commitment does not match');
100
97
  }
101
98
 
102
- // We do not check the proof, as it will be different if the challenge is shared
103
- // across multiple blobs
104
-
105
99
  return blob;
106
100
  }
107
101
 
108
102
  /**
109
103
  * Get the JSON representation of the blob.
110
104
  *
111
- * @dev WARNING: by default json deals with encoded buffers
112
- * @param index - optional - The index of the blob in the block.
113
105
  * @returns The JSON representation of the blob.
114
106
  */
115
- toJson(index: number): BlobJson {
107
+ toJSON(): BlobJson {
116
108
  return {
117
109
  blob: `0x${Buffer.from(this.data).toString('hex')}`,
118
- index: index.toString(),
119
110
  // eslint-disable-next-line camelcase
120
111
  kzg_commitment: `0x${this.commitment.toString('hex')}`,
121
112
  };
122
113
  }
123
114
 
124
- /**
125
- * Get the fields from the blob.
126
- *
127
- * @dev WARNING: this method does not take into account trailing zeros
128
- *
129
- * @returns The fields from the blob.
130
- */
131
- toFields(): Fr[] {
132
- return extractBlobFieldsFromBuffer(this.data);
133
- }
134
-
135
- /**
136
- * Get the encoded fields from the blob.
137
- *
138
- * @dev This method takes into account trailing zeros
139
- *
140
- * @returns The encoded fields from the blob.
141
- *
142
- * @throws If unable to deserialize the blob.
143
- */
144
- toEncodedFields(): Fr[] {
145
- try {
146
- return deserializeEncodedBlobToFields(this.data);
147
- } catch {
148
- throw new BlobDeserializationError(
149
- `Failed to deserialize encoded blob fields, this blob was likely not created by us`,
150
- );
151
- }
152
- }
153
-
154
- /**
155
- * Get the encoded fields from multiple blobs.
156
- *
157
- * @dev This method takes into account trailing zeros
158
- *
159
- * @returns The encoded fields from the blobs.
160
- */
161
- static toEncodedFields(blobs: Blob[]): Fr[] {
162
- try {
163
- return deserializeEncodedBlobToFields(Buffer.concat(blobs.map(b => b.data)));
164
- } catch {
165
- throw new BlobDeserializationError(
166
- `Failed to deserialize encoded blob fields, this blob was likely not created by us`,
167
- );
168
- }
115
+ getEthVersionedBlobHash(): Buffer {
116
+ return computeEthVersionedBlobHash(this.commitment);
169
117
  }
170
118
 
171
119
  /**
172
- * Get the commitment fields from the blob.
173
- *
174
- * The 48-byte commitment is encoded into two field elements:
175
- * +------------------+------------------+
176
- * | Field Element 1 | Field Element 2 |
177
- * | [bytes 0-31] | [bytes 32-47] |
178
- * +------------------+------------------+
179
- * | 32 bytes | 16 bytes |
180
- * +------------------+------------------+
181
- * @returns The commitment fields from the blob.
120
+ * Challenge point z (= H(H(tx_effects), kzgCommitment)).
121
+ * Used such that p(z) = y for a single blob, used as z_i in batching (see ./blob_batching.ts).
182
122
  */
183
- commitmentToFields(): [Fr, Fr] {
184
- return commitmentToFields(this.commitment);
185
- }
186
-
187
- // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
188
- getEthVersionedBlobHash(): Buffer {
189
- const hash = sha256(this.commitment);
190
- hash[0] = VERSIONED_HASH_VERSION_KZG;
191
- return hash;
192
- }
193
-
194
- static getEthVersionedBlobHash(commitment: Buffer): Buffer {
195
- const hash = sha256(commitment);
196
- hash[0] = VERSIONED_HASH_VERSION_KZG;
197
- return hash;
123
+ async computeChallengeZ(blobFieldsHash: Fr): Promise<Fr> {
124
+ return await computeChallengeZ(blobFieldsHash, this.commitment);
198
125
  }
199
126
 
200
127
  /**
201
128
  * Evaluate the blob at a given challenge and return the evaluation and KZG proof.
202
129
  *
203
- * @param challengeZ - The challenge z at which to evaluate the blob. If not given, assume we want to evaluate at the individual blob's z.
130
+ * @param challengeZ - The challenge z at which to evaluate the blob.
131
+ * @param verifyProof - Whether to verify the KZG proof.
204
132
  *
205
- * @returns -
206
- * y: Buffer - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts
133
+ * @returns
134
+ * y: BLS12Fr - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts.
207
135
  * proof: Buffer - KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes).
208
136
  */
209
- evaluate(challengeZ?: Fr) {
210
- const z = challengeZ || this.challengeZ;
211
- const res = computeKzgProof(this.data, z.toBuffer());
212
- if (!verifyKzgProof(this.commitment, z.toBuffer(), res[1], res[0])) {
137
+ evaluate(challengeZ: Fr, verifyProof = false) {
138
+ const kzg = getKzg();
139
+ const res = kzg.computeKzgProof(this.data, challengeZ.toBuffer());
140
+ if (verifyProof && !kzg.verifyKzgProof(this.commitment, challengeZ.toBuffer(), res[1], res[0])) {
213
141
  throw new Error(`KZG proof did not verify.`);
214
142
  }
143
+
215
144
  const proof = Buffer.from(res[0]);
216
- const y = Buffer.from(res[1]);
145
+ const y = BLS12Fr.fromBuffer(Buffer.from(res[1]));
217
146
  return { y, proof };
218
147
  }
219
148
 
220
149
  /**
221
150
  * Get the buffer representation of the ENTIRE blob.
222
151
  *
223
- * @dev WARNING: this buffer contains all metadata aswell as the data itself
152
+ * @dev WARNING: this buffer contains all metadata as well as the data itself.
224
153
  *
225
154
  * @returns The buffer representation of the blob.
226
155
  */
227
156
  toBuffer(): Buffer {
228
- return Buffer.from(
229
- serializeToBuffer(
230
- this.data.length,
231
- this.data,
232
- this.fieldsHash,
233
- this.challengeZ,
234
- this.commitment.length,
235
- this.commitment,
236
- ),
237
- );
157
+ return Buffer.from(serializeToBuffer(this.data.length, this.data, this.commitment.length, this.commitment));
238
158
  }
239
159
 
240
160
  /**
241
161
  * Create a Blob from a buffer.
242
162
  *
243
- * @dev WARNING: this method contains all metadata aswell as the data itself
163
+ * @dev WARNING: this method contains all metadata as well as the data itself.
244
164
  *
245
165
  * @param buf - The buffer to create the Blob from.
246
166
  * @returns A Blob created from the buffer.
247
167
  */
248
168
  static fromBuffer(buf: Buffer | BufferReader): Blob {
249
169
  const reader = BufferReader.asReader(buf);
250
- return new Blob(reader.readUint8Array(), reader.readObject(Fr), reader.readObject(Fr), reader.readBuffer());
170
+ return new Blob(reader.readUint8Array(), reader.readBuffer());
251
171
  }
252
172
 
253
173
  /**
@@ -257,49 +177,15 @@ export class Blob {
257
177
  return this.data.length;
258
178
  }
259
179
 
260
- /**
261
- * @param blobs - The blobs to emit
262
- * @returns The blobs' compressed commitments in hex prefixed by the number of blobs
263
- * @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs:
264
- * One byte for the number blobs + 48 bytes per blob commitment
265
- */
266
- static getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
267
- let buf = Buffer.alloc(0);
268
- blobs.forEach(blob => {
269
- buf = Buffer.concat([buf, blob.commitment]);
270
- });
271
- // We prefix the number of blobs:
272
- const lenBuf = Buffer.alloc(1);
273
- lenBuf.writeUint8(blobs.length);
274
- buf = Buffer.concat([lenBuf, buf]);
275
- return `0x${buf.toString('hex')}`;
276
- }
277
-
278
180
  static getViemKzgInstance() {
181
+ const kzg = getKzg();
279
182
  return {
280
- blobToKzgCommitment: cKzg.blobToKzgCommitment,
281
- computeBlobKzgProof: cKzg.computeBlobKzgProof,
183
+ blobToKzgCommitment: kzg.blobToKzgCommitment.bind(kzg),
184
+ computeBlobKzgProof: kzg.computeBlobKzgProof.bind(kzg),
185
+ computeCellsAndKzgProofs: (b: Uint8Array): [Uint8Array[], Uint8Array[]] => {
186
+ const result = kzg.computeCellsAndKzgProofs(b);
187
+ return [result.cells, result.proofs];
188
+ },
282
189
  };
283
190
  }
284
-
285
- /**
286
- * @param fields - Fields to broadcast in the blob(s)
287
- * @returns As many blobs as we require to broadcast the given fields for a block
288
- * @dev Assumes we share the fields hash between all blobs which can only be done for ONE BLOCK because the hash is calculated in block root.
289
- */
290
- static async getBlobsPerBlock(fields: Fr[]): Promise<Blob[]> {
291
- const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
292
- const multiBlobFieldsHash = await poseidon2Hash(fields);
293
- const res = [];
294
- for (let i = 0; i < numBlobs; i++) {
295
- const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
296
- res.push(await Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
297
- }
298
- return res;
299
- }
300
- }
301
-
302
- // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
303
- function commitmentToFields(commitment: Buffer): [Fr, Fr] {
304
- return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))];
305
191
  }