@aztec/blob-lib 3.0.0-nightly.20251026 → 3.0.0-nightly.20251031

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dest/blob.d.ts +47 -89
  2. package/dest/blob.d.ts.map +1 -1
  3. package/dest/blob.js +62 -160
  4. package/dest/blob_batching.d.ts +14 -46
  5. package/dest/blob_batching.d.ts.map +1 -1
  6. package/dest/blob_batching.js +80 -100
  7. package/dest/blob_utils.d.ts +30 -0
  8. package/dest/blob_utils.d.ts.map +1 -0
  9. package/dest/blob_utils.js +60 -0
  10. package/dest/circuit_types/blob_accumulator.d.ts +21 -0
  11. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  12. package/dest/circuit_types/blob_accumulator.js +58 -0
  13. package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
  14. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/final_blob_accumulator.js +63 -0
  16. package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
  17. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
  19. package/dest/circuit_types/index.d.ts +4 -0
  20. package/dest/circuit_types/index.d.ts.map +1 -0
  21. package/dest/circuit_types/index.js +4 -0
  22. package/dest/deserialize.d.ts +7 -41
  23. package/dest/deserialize.d.ts.map +1 -1
  24. package/dest/deserialize.js +25 -73
  25. package/dest/encoding.d.ts +5 -0
  26. package/dest/encoding.d.ts.map +1 -1
  27. package/dest/encoding.js +35 -0
  28. package/dest/hash.d.ts +35 -0
  29. package/dest/hash.d.ts.map +1 -0
  30. package/dest/hash.js +69 -0
  31. package/dest/index.d.ts +4 -2
  32. package/dest/index.d.ts.map +1 -1
  33. package/dest/index.js +4 -2
  34. package/dest/sponge_blob.d.ts +13 -9
  35. package/dest/sponge_blob.d.ts.map +1 -1
  36. package/dest/sponge_blob.js +28 -17
  37. package/dest/testing.d.ts +7 -2
  38. package/dest/testing.d.ts.map +1 -1
  39. package/dest/testing.js +47 -14
  40. package/dest/types.d.ts +2 -0
  41. package/dest/types.d.ts.map +1 -1
  42. package/dest/types.js +2 -0
  43. package/package.json +4 -4
  44. package/src/blob.ts +67 -180
  45. package/src/blob_batching.ts +109 -119
  46. package/src/blob_utils.ts +71 -0
  47. package/src/circuit_types/blob_accumulator.ts +84 -0
  48. package/src/circuit_types/final_blob_accumulator.ts +75 -0
  49. package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
  50. package/src/circuit_types/index.ts +4 -0
  51. package/src/deserialize.ts +24 -79
  52. package/src/encoding.ts +45 -0
  53. package/src/hash.ts +77 -0
  54. package/src/index.ts +4 -2
  55. package/src/sponge_blob.ts +24 -14
  56. package/src/testing.ts +53 -16
  57. package/src/types.ts +2 -2
  58. package/dest/blob_batching_public_inputs.d.ts +0 -57
  59. package/dest/blob_batching_public_inputs.d.ts.map +0 -1
  60. package/dest/blob_batching_public_inputs.js +0 -144
  61. package/src/blob_batching_public_inputs.ts +0 -211
package/dest/testing.js CHANGED
@@ -1,8 +1,12 @@
1
+ import { FIELDS_PER_BLOB } from '@aztec/constants';
1
2
  import { makeTuple } from '@aztec/foundation/array';
3
+ import { randomInt } from '@aztec/foundation/crypto';
2
4
  import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
3
5
  import { Blob } from './blob.js';
4
- import { BatchedBlobAccumulator, FinalBlobBatchingChallenges } from './blob_batching.js';
5
- import { encodeTxStartMarker } from './encoding.js';
6
+ import { BatchedBlobAccumulator } from './blob_batching.js';
7
+ import { getBlobsPerL1Block } from './blob_utils.js';
8
+ import { FinalBlobBatchingChallenges } from './circuit_types/index.js';
9
+ import { createBlockEndMarker, encodeTxStartMarker } from './encoding.js';
6
10
  import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
7
11
  /**
8
12
  * Makes arbitrary poseidon sponge for blob inputs.
@@ -20,17 +24,12 @@ import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
20
24
  */ export function makeBatchedBlobAccumulator(seed = 1) {
21
25
  return new BatchedBlobAccumulator(new Fr(seed), new Fr(seed + 1), new BLS12Fr(seed + 2), BLS12Point.random(), BLS12Point.random(), new Fr(seed + 3), new BLS12Fr(seed + 4), new FinalBlobBatchingChallenges(new Fr(seed + 5), new BLS12Fr(seed + 6)));
22
26
  }
23
- /**
24
- * Make an encoded blob with the given length
25
- *
26
- * This will deserialise correctly in the archiver
27
- * @param length
28
- * @returns
29
- */ export function makeEncodedBlob(length) {
27
+ export function makeEncodedTxBlobFields(length) {
30
28
  const txStartMarker = {
31
29
  numBlobFields: length,
32
- // The rest of the values don't matter. The test components using it only look at `numBlobFields` to split the blobs
33
- // into fields for tx effects.
30
+ // The rest of the values don't matter. The test components using it do not try to deserialize everything.
31
+ // Only `checkBlobFieldsEncoding` is used and it only looks at `numBlobFields`. This might change in the future
32
+ // when we add more thorough checks to `checkBlobFieldsEncoding`.
34
33
  revertCode: 0,
35
34
  numNoteHashes: 0,
36
35
  numNullifiers: 0,
@@ -40,12 +39,46 @@ import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
40
39
  publicLogsLength: 0,
41
40
  contractClassLogLength: 0
42
41
  };
43
- return Blob.fromFields([
42
+ return [
44
43
  encodeTxStartMarker(txStartMarker),
45
44
  ...Array.from({
46
45
  length: length - 1
47
- }, ()=>Fr.random())
48
- ]);
46
+ }, ()=>new Fr(randomInt(Number.MAX_SAFE_INTEGER)))
47
+ ];
48
+ }
49
+ export function makeEncodedBlockBlobFields(...lengths) {
50
+ return [
51
+ ...lengths.length > 0 ? makeEncodedTxBlobFields(lengths[0] - 1) : [],
52
+ ...lengths.slice(1).flatMap((length)=>makeEncodedTxBlobFields(length)),
53
+ createBlockEndMarker(lengths.length)
54
+ ];
55
+ }
56
+ // Create blob fields for a checkpoint with a single block.
57
+ export function makeEncodedBlobFields(length) {
58
+ if (length <= 2) {
59
+ throw new Error('Encoded blob fields length must be greater than 2');
60
+ }
61
+ const checkpointPrefix = new Fr(length);
62
+ return [
63
+ checkpointPrefix,
64
+ ...makeEncodedBlockBlobFields(length - 1)
65
+ ]; // -1 to account for the checkpoint prefix.
66
+ }
67
+ /**
68
+ * Make an encoded blob with the given length
69
+ *
70
+ * This will deserialise correctly in the archiver
71
+ * @param length
72
+ * @returns
73
+ */ export function makeEncodedBlob(length) {
74
+ if (length > FIELDS_PER_BLOB) {
75
+ throw new Error(`A single encoded blob must be less than ${FIELDS_PER_BLOB} fields`);
76
+ }
77
+ return Blob.fromFields(makeEncodedBlobFields(length));
78
+ }
79
+ export function makeEncodedBlobs(length) {
80
+ const fields = makeEncodedBlobFields(length);
81
+ return getBlobsPerL1Block(fields);
49
82
  }
50
83
  /**
51
84
  * Make a blob with random fields.
package/dest/types.d.ts CHANGED
@@ -1,3 +1,5 @@
1
+ export * from './circuit_types/index.js';
2
+ export * from './interface.js';
1
3
  export * from './sponge_blob.js';
2
4
  /**
3
5
  * Type definition for the KZG instance returned by Blob.getViemKzgInstance().
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AAIjC;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,wDAAwD;IACxD,mBAAmB,CAAC,IAAI,EAAE,UAAU,GAAG,UAAU,CAAC;IAClD,kDAAkD;IAClD,mBAAmB,CAAC,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,GAAG,UAAU,CAAC;IAC1E,8FAA8F;IAC9F,wBAAwB,CAAC,IAAI,EAAE,UAAU,GAAG,CAAC,UAAU,EAAE,EAAE,UAAU,EAAE,CAAC,CAAC;CAC1E"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,cAAc,0BAA0B,CAAC;AACzC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,kBAAkB,CAAC;AAEjC;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,wDAAwD;IACxD,mBAAmB,CAAC,IAAI,EAAE,UAAU,GAAG,UAAU,CAAC;IAClD,kDAAkD;IAClD,mBAAmB,CAAC,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,GAAG,UAAU,CAAC;IAC1E,8FAA8F;IAC9F,wBAAwB,CAAC,IAAI,EAAE,UAAU,GAAG,CAAC,UAAU,EAAE,EAAE,UAAU,EAAE,CAAC,CAAC;CAC1E"}
package/dest/types.js CHANGED
@@ -1 +1,3 @@
1
+ export * from './circuit_types/index.js';
2
+ export * from './interface.js';
1
3
  export * from './sponge_blob.js';
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/blob-lib",
3
- "version": "3.0.0-nightly.20251026",
3
+ "version": "3.0.0-nightly.20251031",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -27,9 +27,9 @@
27
27
  "../package.common.json"
28
28
  ],
29
29
  "dependencies": {
30
- "@aztec/constants": "3.0.0-nightly.20251026",
31
- "@aztec/foundation": "3.0.0-nightly.20251026",
32
- "@crate-crypto/node-eth-kzg": "^0.9.1",
30
+ "@aztec/constants": "3.0.0-nightly.20251031",
31
+ "@aztec/foundation": "3.0.0-nightly.20251031",
32
+ "@crate-crypto/node-eth-kzg": "^0.10.0",
33
33
  "tslib": "^2.4.0"
34
34
  },
35
35
  "devDependencies": {
package/src/blob.ts CHANGED
@@ -1,75 +1,80 @@
1
1
  import { FIELDS_PER_BLOB } from '@aztec/constants';
2
- import { poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
3
- import { Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
4
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
5
4
 
6
- import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './deserialize.js';
7
- import { BlobDeserializationError } from './errors.js';
5
+ import { computeBlobCommitment, computeChallengeZ, computeEthVersionedBlobHash } from './hash.js';
8
6
  import type { BlobJson } from './interface.js';
9
- import { BYTES_PER_BLOB, kzg } from './kzg_context.js';
7
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
10
8
 
11
- // The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
12
- export const VERSIONED_HASH_VERSION_KZG = 0x01;
9
+ export { FIELDS_PER_BLOB };
13
10
 
14
11
  /**
15
12
  * A class to create, manage, and prove EVM blobs.
13
+ *
14
+ * @dev Note: All methods in this class do not check the encoding of the given data. It's the responsibility of other
15
+ * components to ensure that the blob data (which might spread across multiple blobs) was created following the protocol
16
+ * and is correctly encoded.
16
17
  */
17
18
  export class Blob {
18
19
  constructor(
19
- /** The blob to be broadcast on L1 in bytes form. */
20
+ /**
21
+ * The data to be broadcast on L1 in bytes form.
22
+ */
20
23
  public readonly data: Uint8Array,
21
- /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
22
- public readonly fieldsHash: Fr,
23
- /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y for a single blob, used as z_i in batching (see ./blob_batching.ts). */
24
- public readonly challengeZ: Fr,
25
- /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
24
+ /**
25
+ * Commitment to the blob data. Used in compressed BLS12 point format (48 bytes).
26
+ */
26
27
  public readonly commitment: Buffer,
27
- ) {}
28
+ ) {
29
+ if (data.length !== BYTES_PER_BLOB) {
30
+ throw new Error(`Blob data must be ${BYTES_PER_BLOB} bytes. Got ${data.length}.`);
31
+ }
32
+ if (commitment.length !== BYTES_PER_COMMITMENT) {
33
+ throw new Error(`Blob commitment must be ${BYTES_PER_COMMITMENT} bytes. Got ${commitment.length}.`);
34
+ }
35
+ }
28
36
 
29
37
  /**
30
- * The encoded version of the blob will determine the end of the blob based on the transaction encoding.
31
- * This is required when the fieldsHash of a blob will contain trailing zeros.
32
- *
33
- * See `./encoding.ts` for more details.
34
- *
35
- * This method is used to create a Blob from a buffer.
36
- * @param blob - The buffer to create the Blob from.
37
- * @param multiBlobFieldsHash - The fields hash to use for the Blob.
38
+ * Create a Blob from a buffer.
39
+ * @param data - The buffer of the Blob.
38
40
  * @returns A Blob created from the buffer.
39
41
  *
40
- * @throws If unable to deserialize the blob.
42
+ * @throws If data does not match the expected length (BYTES_PER_BLOB).
41
43
  */
42
- static fromEncodedBlobBuffer(blob: Uint8Array, multiBlobFieldsHash?: Fr): Promise<Blob> {
43
- try {
44
- const fields: Fr[] = deserializeEncodedBlobToFields(blob);
45
- return Blob.fromFields(fields, multiBlobFieldsHash);
46
- } catch {
47
- throw new BlobDeserializationError(
48
- `Failed to create Blob from encoded blob buffer, this blob was likely not created by us`,
49
- );
50
- }
44
+ static fromBlobBuffer(data: Uint8Array): Blob {
45
+ const commitment = computeBlobCommitment(data);
46
+ return new Blob(data, commitment);
51
47
  }
52
48
 
53
49
  /**
54
50
  * Create a Blob from an array of fields.
55
51
  *
52
+ * @dev This method pads 0s to the data, extending it to the size of a full blob.
53
+ *
56
54
  * @param fields - The array of fields to create the Blob from.
57
- * @param multiBlobFieldsHash - The fields hash to use for the Blob.
58
55
  * @returns A Blob created from the array of fields.
59
56
  */
60
- static async fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Promise<Blob> {
57
+ static fromFields(fields: Fr[]): Blob {
61
58
  if (fields.length > FIELDS_PER_BLOB) {
62
- throw new Error(`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELDS_PER_BLOB}`);
59
+ throw new Error(`Attempted to overfill blob with ${fields.length} fields. The maximum is ${FIELDS_PER_BLOB}.`);
63
60
  }
64
61
 
65
62
  const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);
63
+ const commitment = computeBlobCommitment(data);
64
+ return new Blob(data, commitment);
65
+ }
66
66
 
67
- // This matches the output of SpongeBlob.squeeze() in the blob circuit
68
- const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : await poseidon2Hash(fields);
69
- const commitment = Buffer.from(kzg.blobToKzgCommitment(data));
70
- const challengeZ = await poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]);
71
-
72
- return new Blob(data, fieldsHash, challengeZ, commitment);
67
+ /**
68
+ * Get the fields from the blob data.
69
+ *
70
+ * @dev WARNING: this method returns all fields
71
+ *
72
+ * @returns The fields from the blob.
73
+ */
74
+ toFields(): Fr[] {
75
+ const reader = BufferReader.asReader(this.data);
76
+ const numTotalFields = this.data.length / Fr.SIZE_IN_BYTES;
77
+ return reader.readArray(numTotalFields, Fr);
73
78
  }
74
79
 
75
80
  /**
@@ -79,30 +84,23 @@ export class Blob {
79
84
  * the beacon chain via `getBlobSidecars`
80
85
  * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
81
86
  *
82
- * @dev WARNING: by default json deals with encoded buffers
83
- *
84
87
  * @param json - The JSON object to create the Blob from.
85
88
  * @returns A Blob created from the JSON object.
86
89
  */
87
- static async fromJson(json: BlobJson): Promise<Blob> {
90
+ static fromJson(json: BlobJson): Blob {
88
91
  const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
89
-
90
- const blob = await Blob.fromEncodedBlobBuffer(blobBuffer);
92
+ const blob = Blob.fromBlobBuffer(blobBuffer);
91
93
 
92
94
  if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
93
95
  throw new Error('KZG commitment does not match');
94
96
  }
95
97
 
96
- // We do not check the proof, as it will be different if the challenge is shared
97
- // across multiple blobs
98
-
99
98
  return blob;
100
99
  }
101
100
 
102
101
  /**
103
102
  * Get the JSON representation of the blob.
104
103
  *
105
- * @dev WARNING: by default json deals with encoded buffers
106
104
  * @param index - optional - The index of the blob in the block.
107
105
  * @returns The JSON representation of the blob.
108
106
  */
@@ -115,133 +113,61 @@ export class Blob {
115
113
  };
116
114
  }
117
115
 
118
- /**
119
- * Get the fields from the blob.
120
- *
121
- * @dev WARNING: this method does not take into account trailing zeros
122
- *
123
- * @returns The fields from the blob.
124
- */
125
- toFields(): Fr[] {
126
- return extractBlobFieldsFromBuffer(this.data);
127
- }
128
-
129
- /**
130
- * Get the encoded fields from the blob.
131
- *
132
- * @dev This method takes into account trailing zeros
133
- *
134
- * @returns The encoded fields from the blob.
135
- *
136
- * @throws If unable to deserialize the blob.
137
- */
138
- toEncodedFields(): Fr[] {
139
- try {
140
- return deserializeEncodedBlobToFields(this.data);
141
- } catch {
142
- throw new BlobDeserializationError(
143
- `Failed to deserialize encoded blob fields, this blob was likely not created by us`,
144
- );
145
- }
116
+ getEthVersionedBlobHash(): Buffer {
117
+ return computeEthVersionedBlobHash(this.commitment);
146
118
  }
147
119
 
148
120
  /**
149
- * Get the encoded fields from multiple blobs.
150
- *
151
- * @dev This method takes into account trailing zeros
152
- *
153
- * @returns The encoded fields from the blobs.
121
+ * Challenge point z (= H(H(tx_effects), kzgCommitment)).
122
+ * Used such that p(z) = y for a single blob, used as z_i in batching (see ./blob_batching.ts).
154
123
  */
155
- static toEncodedFields(blobs: Blob[]): Fr[] {
156
- try {
157
- return deserializeEncodedBlobToFields(Buffer.concat(blobs.map(b => b.data)));
158
- } catch {
159
- throw new BlobDeserializationError(
160
- `Failed to deserialize encoded blob fields, this blob was likely not created by us`,
161
- );
162
- }
163
- }
164
-
165
- /**
166
- * Get the commitment fields from the blob.
167
- *
168
- * The 48-byte commitment is encoded into two field elements:
169
- * +------------------+------------------+
170
- * | Field Element 1 | Field Element 2 |
171
- * | [bytes 0-31] | [bytes 32-47] |
172
- * +------------------+------------------+
173
- * | 32 bytes | 16 bytes |
174
- * +------------------+------------------+
175
- * @returns The commitment fields from the blob.
176
- */
177
- commitmentToFields(): [Fr, Fr] {
178
- return commitmentToFields(this.commitment);
179
- }
180
-
181
- // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
182
- getEthVersionedBlobHash(): Buffer {
183
- const hash = sha256(this.commitment);
184
- hash[0] = VERSIONED_HASH_VERSION_KZG;
185
- return hash;
186
- }
187
-
188
- static getEthVersionedBlobHash(commitment: Buffer): Buffer {
189
- const hash = sha256(commitment);
190
- hash[0] = VERSIONED_HASH_VERSION_KZG;
191
- return hash;
124
+ async computeChallengeZ(blobFieldsHash: Fr): Promise<Fr> {
125
+ return await computeChallengeZ(blobFieldsHash, this.commitment);
192
126
  }
193
127
 
194
128
  /**
195
129
  * Evaluate the blob at a given challenge and return the evaluation and KZG proof.
196
130
  *
197
- * @param challengeZ - The challenge z at which to evaluate the blob. If not given, assume we want to evaluate at the individual blob's z.
131
+ * @param challengeZ - The challenge z at which to evaluate the blob.
132
+ * @param verifyProof - Whether to verify the KZG proof.
198
133
  *
199
- * @returns -
200
- * y: Buffer - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts
134
+ * @returns
135
+ * y: BLS12Fr - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts.
201
136
  * proof: Buffer - KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes).
202
137
  */
203
- evaluate(challengeZ?: Fr) {
204
- const z = challengeZ || this.challengeZ;
205
- const res = kzg.computeKzgProof(this.data, z.toBuffer());
206
- if (!kzg.verifyKzgProof(this.commitment, z.toBuffer(), res[1], res[0])) {
138
+ evaluate(challengeZ: Fr, verifyProof = false) {
139
+ const res = kzg.computeKzgProof(this.data, challengeZ.toBuffer());
140
+ if (verifyProof && !kzg.verifyKzgProof(this.commitment, challengeZ.toBuffer(), res[1], res[0])) {
207
141
  throw new Error(`KZG proof did not verify.`);
208
142
  }
143
+
209
144
  const proof = Buffer.from(res[0]);
210
- const y = Buffer.from(res[1]);
145
+ const y = BLS12Fr.fromBuffer(Buffer.from(res[1]));
211
146
  return { y, proof };
212
147
  }
213
148
 
214
149
  /**
215
150
  * Get the buffer representation of the ENTIRE blob.
216
151
  *
217
- * @dev WARNING: this buffer contains all metadata aswell as the data itself
152
+ * @dev WARNING: this buffer contains all metadata as well as the data itself.
218
153
  *
219
154
  * @returns The buffer representation of the blob.
220
155
  */
221
156
  toBuffer(): Buffer {
222
- return Buffer.from(
223
- serializeToBuffer(
224
- this.data.length,
225
- this.data,
226
- this.fieldsHash,
227
- this.challengeZ,
228
- this.commitment.length,
229
- this.commitment,
230
- ),
231
- );
157
+ return Buffer.from(serializeToBuffer(this.data.length, this.data, this.commitment.length, this.commitment));
232
158
  }
233
159
 
234
160
  /**
235
161
  * Create a Blob from a buffer.
236
162
  *
237
- * @dev WARNING: this method contains all metadata aswell as the data itself
163
+ * @dev WARNING: this method contains all metadata as well as the data itself.
238
164
  *
239
165
  * @param buf - The buffer to create the Blob from.
240
166
  * @returns A Blob created from the buffer.
241
167
  */
242
168
  static fromBuffer(buf: Buffer | BufferReader): Blob {
243
169
  const reader = BufferReader.asReader(buf);
244
- return new Blob(reader.readUint8Array(), reader.readObject(Fr), reader.readObject(Fr), reader.readBuffer());
170
+ return new Blob(reader.readUint8Array(), reader.readBuffer());
245
171
  }
246
172
 
247
173
  /**
@@ -251,24 +177,6 @@ export class Blob {
251
177
  return this.data.length;
252
178
  }
253
179
 
254
- /**
255
- * @param blobs - The blobs to emit
256
- * @returns The blobs' compressed commitments in hex prefixed by the number of blobs
257
- * @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs:
258
- * One byte for the number blobs + 48 bytes per blob commitment
259
- */
260
- static getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
261
- let buf = Buffer.alloc(0);
262
- blobs.forEach(blob => {
263
- buf = Buffer.concat([buf, blob.commitment]);
264
- });
265
- // We prefix the number of blobs:
266
- const lenBuf = Buffer.alloc(1);
267
- lenBuf.writeUint8(blobs.length);
268
- buf = Buffer.concat([lenBuf, buf]);
269
- return `0x${buf.toString('hex')}`;
270
- }
271
-
272
180
  static getViemKzgInstance() {
273
181
  return {
274
182
  blobToKzgCommitment: kzg.blobToKzgCommitment.bind(kzg),
@@ -279,25 +187,4 @@ export class Blob {
279
187
  },
280
188
  };
281
189
  }
282
-
283
- /**
284
- * @param fields - Fields to broadcast in the blob(s)
285
- * @returns As many blobs as we require to broadcast the given fields for a block
286
- * @dev Assumes we share the fields hash between all blobs which can only be done for ONE BLOCK because the hash is calculated in block root.
287
- */
288
- static async getBlobsPerBlock(fields: Fr[]): Promise<Blob[]> {
289
- const numBlobs = Math.max(Math.ceil(fields.length / FIELDS_PER_BLOB), 1);
290
- const multiBlobFieldsHash = await poseidon2Hash(fields);
291
- const res = [];
292
- for (let i = 0; i < numBlobs; i++) {
293
- const end = fields.length < (i + 1) * FIELDS_PER_BLOB ? fields.length : (i + 1) * FIELDS_PER_BLOB;
294
- res.push(await Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, end), multiBlobFieldsHash));
295
- }
296
- return res;
297
- }
298
- }
299
-
300
- // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
301
- function commitmentToFields(commitment: Buffer): [Fr, Fr] {
302
- return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))];
303
190
  }