@aztec/blob-lib 0.0.0-test.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dest/blob.d.ts ADDED
@@ -0,0 +1,162 @@
1
+ /// <reference types="node" resolution-mode="require"/>
2
+ /// <reference types="node" resolution-mode="require"/>
3
+ import { Fr } from '@aztec/foundation/fields';
4
+ import { BufferReader } from '@aztec/foundation/serialize';
5
+ import cKzg from 'c-kzg';
6
+ import type { Blob as BlobBuffer } from 'c-kzg';
7
+ import type { BlobJson } from './interface.js';
8
+ export declare const VERSIONED_HASH_VERSION_KZG = 1;
9
+ /**
10
+ * A class to create, manage, and prove EVM blobs.
11
+ */
12
+ export declare class Blob {
13
+ /** The blob to be broadcast on L1 in bytes form. */
14
+ readonly data: BlobBuffer;
15
+ /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
16
+ readonly fieldsHash: Fr;
17
+ /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
18
+ readonly challengeZ: Fr;
19
+ /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
20
+ readonly evaluationY: Buffer;
21
+ /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
22
+ readonly commitment: Buffer;
23
+ /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
24
+ readonly proof: Buffer;
25
+ constructor(
26
+ /** The blob to be broadcast on L1 in bytes form. */
27
+ data: BlobBuffer,
28
+ /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
29
+ fieldsHash: Fr,
30
+ /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
31
+ challengeZ: Fr,
32
+ /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
33
+ evaluationY: Buffer,
34
+ /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
35
+ commitment: Buffer,
36
+ /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
37
+ proof: Buffer);
38
+ /**
39
+ * The encoded version of the blob will determine the end of the blob based on the transaction encoding.
40
+ * This is required when the fieldsHash of a blob will contain trailing zeros.
41
+ *
42
+ * See `./encoding.ts` for more details.
43
+ *
44
+ * This method is used to create a Blob from a buffer.
45
+ * @param blob - The buffer to create the Blob from.
46
+ * @param multiBlobFieldsHash - The fields hash to use for the Blob.
47
+ * @returns A Blob created from the buffer.
48
+ *
49
+ * @throws If unable to deserialize the blob.
50
+ */
51
+ static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Promise<Blob>;
52
+ /**
53
+ * Create a Blob from an array of fields.
54
+ *
55
+ * @param fields - The array of fields to create the Blob from.
56
+ * @param multiBlobFieldsHash - The fields hash to use for the Blob.
57
+ * @returns A Blob created from the array of fields.
58
+ */
59
+ static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Promise<Blob>;
60
+ /**
61
+ * Create a Blob from a JSON object.
62
+ *
63
+ * Blobs will be in this form when requested from the blob sink, or from
64
+ * the beacon chain via `getBlobSidecars`
65
+ * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
66
+ *
67
+ * @dev WARNING: by default json deals with encoded buffers
68
+ *
69
+ * @param json - The JSON object to create the Blob from.
70
+ * @returns A Blob created from the JSON object.
71
+ */
72
+ static fromJson(json: BlobJson): Promise<Blob>;
73
+ /**
74
+ * Get the JSON representation of the blob.
75
+ *
76
+ * @dev WARNING: by default json deals with encoded buffers
77
+ * @param index - optional - The index of the blob in the block.
78
+ * @returns The JSON representation of the blob.
79
+ */
80
+ toJson(index?: number): BlobJson;
81
+ /**
82
+ * Get the fields from the blob.
83
+ *
84
+ * @dev WARNING: this method does not take into account trailing zeros
85
+ *
86
+ * @returns The fields from the blob.
87
+ */
88
+ toFields(): Fr[];
89
+ /**
90
+ * Get the encoded fields from the blob.
91
+ *
92
+ * @dev This method takes into account trailing zeros
93
+ *
94
+ * @returns The encoded fields from the blob.
95
+ *
96
+ * @throws If unable to deserialize the blob.
97
+ */
98
+ toEncodedFields(): Fr[];
99
+ /**
100
+ * Get the encoded fields from multiple blobs.
101
+ *
102
+ * @dev This method takes into account trailing zeros
103
+ *
104
+ * @returns The encoded fields from the blobs.
105
+ */
106
+ static toEncodedFields(blobs: Blob[]): Fr[];
107
+ /**
108
+ * Get the commitment fields from the blob.
109
+ *
110
+ * The 48-byte commitment is encoded into two field elements:
111
+ * +------------------+------------------+
112
+ * | Field Element 1 | Field Element 2 |
113
+ * | [bytes 0-31] | [bytes 32-47] |
114
+ * +------------------+------------------+
115
+ * | 32 bytes | 16 bytes |
116
+ * +------------------+------------------+
117
+ * @returns The commitment fields from the blob.
118
+ */
119
+ commitmentToFields(): [Fr, Fr];
120
+ getEthVersionedBlobHash(): Buffer;
121
+ static getEthVersionedBlobHash(commitment: Buffer): Buffer;
122
+ /**
123
+ * Get the buffer representation of the ENTIRE blob.
124
+ *
125
+ * @dev WARNING: this buffer contains all metadata aswell as the data itself
126
+ *
127
+ * @returns The buffer representation of the blob.
128
+ */
129
+ toBuffer(): Buffer;
130
+ /**
131
+ * Create a Blob from a buffer.
132
+ *
133
+ * @dev WARNING: this method contains all metadata aswell as the data itself
134
+ *
135
+ * @param buf - The buffer to create the Blob from.
136
+ * @returns A Blob created from the buffer.
137
+ */
138
+ static fromBuffer(buf: Buffer | BufferReader): Blob;
139
+ /**
140
+ * Get the size of the blob in bytes
141
+ */
142
+ getSize(): number;
143
+ /**
144
+ * Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
145
+ *
146
+ * input[:32] - versioned_hash
147
+ * input[32:64] - z
148
+ * input[64:96] - y
149
+ * input[96:144] - commitment C
150
+ * input[144:192] - proof (a commitment to the quotient polynomial q(X))
151
+ *
152
+ * See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
153
+ */
154
+ getEthBlobEvaluationInputs(): `0x${string}`;
155
+ static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}`;
156
+ static getViemKzgInstance(): {
157
+ blobToKzgCommitment: typeof cKzg.blobToKzgCommitment;
158
+ computeBlobKzgProof: typeof cKzg.computeBlobKzgProof;
159
+ };
160
+ static getBlobs(fields: Fr[]): Promise<Blob[]>;
161
+ }
162
+ //# sourceMappingURL=blob.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"blob.d.ts","sourceRoot":"","sources":["../src/blob.ts"],"names":[],"mappings":";;AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAG9E,OAAO,IAAI,MAAM,OAAO,CAAC;AACzB,OAAO,KAAK,EAAE,IAAI,IAAI,UAAU,EAAE,MAAM,OAAO,CAAC;AAIhD,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAM/C,eAAO,MAAM,0BAA0B,IAAO,CAAC;AAE/C;;GAEG;AACH,qBAAa,IAAI;IAEb,oDAAoD;aACpC,IAAI,EAAE,UAAU;IAChC,6IAA6I;aAC7H,UAAU,EAAE,EAAE;IAC9B,sFAAsF;aACtE,UAAU,EAAE,EAAE;IAC9B,sHAAsH;aACtG,WAAW,EAAE,MAAM;IACnC,kFAAkF;aAClE,UAAU,EAAE,MAAM;IAClC,iIAAiI;aACjH,KAAK,EAAE,MAAM;;IAX7B,oDAAoD;IACpC,IAAI,EAAE,UAAU;IAChC,6IAA6I;IAC7H,UAAU,EAAE,EAAE;IAC9B,sFAAsF;IACtE,UAAU,EAAE,EAAE;IAC9B,sHAAsH;IACtG,WAAW,EAAE,MAAM;IACnC,kFAAkF;IAClE,UAAU,EAAE,MAAM;IAClC,iIAAiI;IACjH,KAAK,EAAE,MAAM;IAG/B;;;;;;;;;;;;OAYG;IACH,MAAM,CAAC,qBAAqB,CAAC,IAAI,EAAE,UAAU,EAAE,mBAAmB,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAWvF;;;;;;OAMG;WACU,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,EAAE,mBAAmB,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAuB9E;;;;;;;;;;;OAWG;WACU,QAAQ,CAAC,IAAI,EAAE,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC;IAepD;;;;;;OAMG;IACH,MAAM,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,QAAQ;IAWhC;;;;;;OAMG;IACH,QAAQ,IAAI,EAAE,EAAE;IAIhB;;;;;;;;OAQG;IACH,eAAe,IAAI,EAAE,EAAE;IAUvB;;;;;;OAMG;IACH,MAAM,CAAC,eAAe,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE;IAU3C;;;;;;;;;;;OAWG;IACH,kBAAkB,IAAI,CAAC,EAAE,EAAE,EAAE,CAAC;IAK9B,uBAAuB,IAAI,MAAM;IAMjC,MAAM,CAAC,uBAAuB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAM1D;;;;;;OAMG;IACH,QAAQ,IAAI,MAAM;IAiBlB;;;;;;;OAOG;IACH,MAAM,CAAC,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,YAAY,GAAG,IAAI;IAYnD;;OAEG;IACH,OAAO;IAIP;;;;;;;;;;OAUG;IACH,0BAA0B,IAAI,KAAK,MAAM,EAAE;IAW3C,MAAM,CAAC,0BAA0B,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,KAAK,MAAM,EAAE;IAmB/D,MAAM,CAAC,kBAAkB;;;;WASZ,QAAQ,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;CAUrD"}
package/dest/blob.js ADDED
@@ -0,0 +1,266 @@
1
+ import { poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
2
+ import { Fr } from '@aztec/foundation/fields';
3
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
+ // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
5
+ import cKzg from 'c-kzg';
6
+ import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './encoding.js';
7
+ import { BlobDeserializationError } from './errors.js';
8
+ /* eslint-disable import/no-named-as-default-member */ const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;
9
+ // The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
10
+ export const VERSIONED_HASH_VERSION_KZG = 0x01;
11
+ /**
12
+ * A class to create, manage, and prove EVM blobs.
13
+ */ export class Blob {
14
+ data;
15
+ fieldsHash;
16
+ challengeZ;
17
+ evaluationY;
18
+ commitment;
19
+ proof;
20
+ constructor(/** The blob to be broadcast on L1 in bytes form. */ data, /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ fieldsHash, /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ challengeZ, /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ evaluationY, /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ commitment, /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ proof){
21
+ this.data = data;
22
+ this.fieldsHash = fieldsHash;
23
+ this.challengeZ = challengeZ;
24
+ this.evaluationY = evaluationY;
25
+ this.commitment = commitment;
26
+ this.proof = proof;
27
+ }
28
+ /**
29
+ * The encoded version of the blob will determine the end of the blob based on the transaction encoding.
30
+ * This is required when the fieldsHash of a blob will contain trailing zeros.
31
+ *
32
+ * See `./encoding.ts` for more details.
33
+ *
34
+ * This method is used to create a Blob from a buffer.
35
+ * @param blob - The buffer to create the Blob from.
36
+ * @param multiBlobFieldsHash - The fields hash to use for the Blob.
37
+ * @returns A Blob created from the buffer.
38
+ *
39
+ * @throws If unable to deserialize the blob.
40
+ */ static fromEncodedBlobBuffer(blob, multiBlobFieldsHash) {
41
+ try {
42
+ const fields = deserializeEncodedBlobToFields(blob);
43
+ return Blob.fromFields(fields, multiBlobFieldsHash);
44
+ } catch (err) {
45
+ throw new BlobDeserializationError(`Failed to create Blob from encoded blob buffer, this blob was likely not created by us`);
46
+ }
47
+ }
48
+ /**
49
+ * Create a Blob from an array of fields.
50
+ *
51
+ * @param fields - The array of fields to create the Blob from.
52
+ * @param multiBlobFieldsHash - The fields hash to use for the Blob.
53
+ * @returns A Blob created from the array of fields.
54
+ */ static async fromFields(fields, multiBlobFieldsHash) {
55
+ if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
56
+ throw new Error(`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`);
57
+ }
58
+ const data = Buffer.concat([
59
+ serializeToBuffer(fields)
60
+ ], BYTES_PER_BLOB);
61
+ // This matches the output of SpongeBlob.squeeze() in the blob circuit
62
+ const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : await poseidon2Hash(fields);
63
+ const commitment = Buffer.from(blobToKzgCommitment(data));
64
+ const challengeZ = await poseidon2Hash([
65
+ fieldsHash,
66
+ ...commitmentToFields(commitment)
67
+ ]);
68
+ const res = computeKzgProof(data, challengeZ.toBuffer());
69
+ if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) {
70
+ throw new Error(`KZG proof did not verify.`);
71
+ }
72
+ const proof = Buffer.from(res[0]);
73
+ const evaluationY = Buffer.from(res[1]);
74
+ return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof);
75
+ }
76
+ /**
77
+ * Create a Blob from a JSON object.
78
+ *
79
+ * Blobs will be in this form when requested from the blob sink, or from
80
+ * the beacon chain via `getBlobSidecars`
81
+ * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
82
+ *
83
+ * @dev WARNING: by default json deals with encoded buffers
84
+ *
85
+ * @param json - The JSON object to create the Blob from.
86
+ * @returns A Blob created from the JSON object.
87
+ */ static async fromJson(json) {
88
+ const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
89
+ const blob = await Blob.fromEncodedBlobBuffer(blobBuffer);
90
+ if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
91
+ throw new Error('KZG commitment does not match');
92
+ }
93
+ // We do not check the proof, as it will be different if the challenge is shared
94
+ // across multiple blobs
95
+ return blob;
96
+ }
97
+ /**
98
+ * Get the JSON representation of the blob.
99
+ *
100
+ * @dev WARNING: by default json deals with encoded buffers
101
+ * @param index - optional - The index of the blob in the block.
102
+ * @returns The JSON representation of the blob.
103
+ */ toJson(index) {
104
+ return {
105
+ blob: `0x${Buffer.from(this.data).toString('hex')}`,
106
+ index,
107
+ // eslint-disable-next-line camelcase
108
+ kzg_commitment: `0x${this.commitment.toString('hex')}`,
109
+ // eslint-disable-next-line camelcase
110
+ kzg_proof: `0x${this.proof.toString('hex')}`
111
+ };
112
+ }
113
+ /**
114
+ * Get the fields from the blob.
115
+ *
116
+ * @dev WARNING: this method does not take into account trailing zeros
117
+ *
118
+ * @returns The fields from the blob.
119
+ */ toFields() {
120
+ return extractBlobFieldsFromBuffer(this.data);
121
+ }
122
+ /**
123
+ * Get the encoded fields from the blob.
124
+ *
125
+ * @dev This method takes into account trailing zeros
126
+ *
127
+ * @returns The encoded fields from the blob.
128
+ *
129
+ * @throws If unable to deserialize the blob.
130
+ */ toEncodedFields() {
131
+ try {
132
+ return deserializeEncodedBlobToFields(this.data);
133
+ } catch (err) {
134
+ throw new BlobDeserializationError(`Failed to deserialize encoded blob fields, this blob was likely not created by us`);
135
+ }
136
+ }
137
+ /**
138
+ * Get the encoded fields from multiple blobs.
139
+ *
140
+ * @dev This method takes into account trailing zeros
141
+ *
142
+ * @returns The encoded fields from the blobs.
143
+ */ static toEncodedFields(blobs) {
144
+ try {
145
+ return deserializeEncodedBlobToFields(Buffer.concat(blobs.map((b)=>b.data)));
146
+ } catch (err) {
147
+ throw new BlobDeserializationError(`Failed to deserialize encoded blob fields, this blob was likely not created by us`);
148
+ }
149
+ }
150
+ /**
151
+ * Get the commitment fields from the blob.
152
+ *
153
+ * The 48-byte commitment is encoded into two field elements:
154
+ * +------------------+------------------+
155
+ * | Field Element 1 | Field Element 2 |
156
+ * | [bytes 0-31] | [bytes 32-47] |
157
+ * +------------------+------------------+
158
+ * | 32 bytes | 16 bytes |
159
+ * +------------------+------------------+
160
+ * @returns The commitment fields from the blob.
161
+ */ commitmentToFields() {
162
+ return commitmentToFields(this.commitment);
163
+ }
164
+ // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
165
+ getEthVersionedBlobHash() {
166
+ const hash = sha256(this.commitment);
167
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
168
+ return hash;
169
+ }
170
+ static getEthVersionedBlobHash(commitment) {
171
+ const hash = sha256(commitment);
172
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
173
+ return hash;
174
+ }
175
+ /**
176
+ * Get the buffer representation of the ENTIRE blob.
177
+ *
178
+ * @dev WARNING: this buffer contains all metadata aswell as the data itself
179
+ *
180
+ * @returns The buffer representation of the blob.
181
+ */ toBuffer() {
182
+ return Buffer.from(serializeToBuffer(this.data.length, this.data, this.fieldsHash, this.challengeZ, this.evaluationY.length, this.evaluationY, this.commitment.length, this.commitment, this.proof.length, this.proof));
183
+ }
184
+ /**
185
+ * Create a Blob from a buffer.
186
+ *
187
+ * @dev WARNING: this method contains all metadata aswell as the data itself
188
+ *
189
+ * @param buf - The buffer to create the Blob from.
190
+ * @returns A Blob created from the buffer.
191
+ */ static fromBuffer(buf) {
192
+ const reader = BufferReader.asReader(buf);
193
+ return new Blob(reader.readUint8Array(), reader.readObject(Fr), reader.readObject(Fr), reader.readBuffer(), reader.readBuffer(), reader.readBuffer());
194
+ }
195
+ /**
196
+ * Get the size of the blob in bytes
197
+ */ getSize() {
198
+ return this.data.length;
199
+ }
200
+ /**
201
+ * Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
202
+ *
203
+ * input[:32] - versioned_hash
204
+ * input[32:64] - z
205
+ * input[64:96] - y
206
+ * input[96:144] - commitment C
207
+ * input[144:192] - proof (a commitment to the quotient polynomial q(X))
208
+ *
209
+ * See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
210
+ */ getEthBlobEvaluationInputs() {
211
+ const buf = Buffer.concat([
212
+ this.getEthVersionedBlobHash(),
213
+ this.challengeZ.toBuffer(),
214
+ this.evaluationY,
215
+ this.commitment,
216
+ this.proof
217
+ ]);
218
+ return `0x${buf.toString('hex')}`;
219
+ }
220
+ static getEthBlobEvaluationInputs(blobs) {
221
+ let buf = Buffer.alloc(0);
222
+ blobs.forEach((blob)=>{
223
+ buf = Buffer.concat([
224
+ buf,
225
+ blob.getEthVersionedBlobHash(),
226
+ blob.challengeZ.toBuffer(),
227
+ blob.evaluationY,
228
+ blob.commitment,
229
+ blob.proof
230
+ ]);
231
+ });
232
+ // For multiple blobs, we prefix the number of blobs:
233
+ const lenBuf = Buffer.alloc(1);
234
+ lenBuf.writeUint8(blobs.length);
235
+ buf = Buffer.concat([
236
+ lenBuf,
237
+ buf
238
+ ]);
239
+ return `0x${buf.toString('hex')}`;
240
+ }
241
+ static getViemKzgInstance() {
242
+ return {
243
+ blobToKzgCommitment: cKzg.blobToKzgCommitment,
244
+ computeBlobKzgProof: cKzg.computeBlobKzgProof
245
+ };
246
+ }
247
+ // Returns as many blobs as we require to broadcast the given fields
248
+ // Assumes we share the fields hash between all blobs
249
+ static async getBlobs(fields) {
250
+ const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
251
+ const multiBlobFieldsHash = await poseidon2Hash(fields);
252
+ const res = [];
253
+ for(let i = 0; i < numBlobs; i++){
254
+ const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
255
+ res.push(await Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
256
+ }
257
+ return res;
258
+ }
259
+ }
260
+ // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
261
+ function commitmentToFields(commitment) {
262
+ return [
263
+ new Fr(commitment.subarray(0, 31)),
264
+ new Fr(commitment.subarray(31, 48))
265
+ ];
266
+ }
@@ -0,0 +1,50 @@
1
+ /// <reference types="node" resolution-mode="require"/>
2
+ /// <reference types="node" resolution-mode="require"/>
3
+ import { BLOBS_PER_BLOCK } from '@aztec/constants';
4
+ import { Fr } from '@aztec/foundation/fields';
5
+ import { BufferReader, FieldReader, type Tuple } from '@aztec/foundation/serialize';
6
+ import type { FieldsOf } from '@aztec/foundation/types';
7
+ import { type Blob } from './blob.js';
8
+ /**
9
+ * Public inputs required to be passed from our rollup circuits to verify a blob.
10
+ */
11
+ export declare class BlobPublicInputs {
12
+ /** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
13
+ z: Fr;
14
+ /** Evaluation y = p(z), where p() is the blob polynomial. */
15
+ y: bigint;
16
+ /** Commitment to the blob C. */
17
+ kzgCommitment: Tuple<Fr, 2>;
18
+ constructor(
19
+ /** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
20
+ z: Fr,
21
+ /** Evaluation y = p(z), where p() is the blob polynomial. */
22
+ y: bigint,
23
+ /** Commitment to the blob C. */
24
+ kzgCommitment: Tuple<Fr, 2>);
25
+ static empty(): BlobPublicInputs;
26
+ isEmpty(): boolean;
27
+ static fromBuffer(buffer: Buffer | BufferReader): BlobPublicInputs;
28
+ toBuffer(): Buffer;
29
+ static fromFields(fields: Fr[] | FieldReader): BlobPublicInputs;
30
+ toFields(): Fr[];
31
+ static getFields(fields: FieldsOf<BlobPublicInputs>): readonly [Fr, bigint, [Fr, Fr]];
32
+ static fromBlob(input: Blob): BlobPublicInputs;
33
+ getBlobHash(): Buffer;
34
+ commitmentToBuffer(): Buffer;
35
+ equals(other: BlobPublicInputs): boolean;
36
+ }
37
+ export declare class BlockBlobPublicInputs {
38
+ inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>;
39
+ constructor(inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>);
40
+ static empty(): BlockBlobPublicInputs;
41
+ static fromBuffer(buffer: Buffer | BufferReader): BlockBlobPublicInputs;
42
+ toBuffer(): Buffer;
43
+ static fromFields(fields: Fr[] | FieldReader): BlockBlobPublicInputs;
44
+ toFields(): Fr[];
45
+ static getFields(fields: FieldsOf<BlockBlobPublicInputs>): readonly [[BlobPublicInputs, BlobPublicInputs, BlobPublicInputs]];
46
+ static fromBlobs(inputs: Blob[]): BlockBlobPublicInputs;
47
+ getBlobsHash(): Buffer;
48
+ toString(): string;
49
+ }
50
+ //# sourceMappingURL=blob_public_inputs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"blob_public_inputs.d.ts","sourceRoot":"","sources":["../src/blob_public_inputs.ts"],"names":[],"mappings":";;AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAInD,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,KAAK,EAAqB,MAAM,6BAA6B,CAAC;AACvG,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,yBAAyB,CAAC;AAExD,OAAO,EAAE,KAAK,IAAI,EAA8B,MAAM,WAAW,CAAC;AAElE;;GAEG;AACH,qBAAa,gBAAgB;IAEzB,6DAA6D;IACtD,CAAC,EAAE,EAAE;IACZ,6DAA6D;IACtD,CAAC,EAAE,MAAM;IAChB,gCAAgC;IACzB,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;;IALlC,6DAA6D;IACtD,CAAC,EAAE,EAAE;IACZ,6DAA6D;IACtD,CAAC,EAAE,MAAM;IAChB,gCAAgC;IACzB,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;IAGpC,MAAM,CAAC,KAAK,IAAI,gBAAgB;IAIhC,OAAO,IAAI,OAAO;IAIlB,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,gBAAgB;IAKlE,QAAQ;IAIR,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,gBAAgB;IAa/D,QAAQ;IAUR,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,gBAAgB,CAAC;IAInD,MAAM,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,GAAG,gBAAgB;IAI9C,WAAW,IAAI,MAAM;IAQrB,kBAAkB,IAAI,MAAM;IAO5B,MAAM,CAAC,KAAK,EAAE,gBAAgB;CAQ/B;AAID,qBAAa,qBAAqB;IACb,KAAK,EAAE,KAAK,CAAC,gBAAgB,EAAE,OAAO,eAAe,CAAC;gBAAtD,KAAK,EAAE,KAAK,CAAC,gBAAgB,EAAE,OAAO,eAAe,CAAC;IAEzE,MAAM,CAAC,KAAK,IAAI,qBAAqB;IAIrC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,qBAAqB;IAKvE,QAAQ;IAIR,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,qBAAqB;IAKpE,QAAQ;IAIR,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,qBAAqB,CAAC;IAIxD,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,qBAAqB;IAWvD,YAAY;IAMZ,QAAQ;CAYT"}
@@ -0,0 +1,146 @@
1
+ import { BLOBS_PER_BLOCK } from '@aztec/constants';
2
+ import { makeTuple } from '@aztec/foundation/array';
3
+ import { toBigIntBE, toBufferBE, toHex } from '@aztec/foundation/bigint-buffer';
4
+ import { sha256, sha256Trunc } from '@aztec/foundation/crypto';
5
+ import { Fr } from '@aztec/foundation/fields';
6
+ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
7
+ import { VERSIONED_HASH_VERSION_KZG } from './blob.js';
8
+ /**
9
+ * Public inputs required to be passed from our rollup circuits to verify a blob.
10
+ */ export class BlobPublicInputs {
11
+ z;
12
+ y;
13
+ kzgCommitment;
14
+ constructor(/** Challenge point z (= H(H(tx_effects), kzgCommmitment). */ z, /** Evaluation y = p(z), where p() is the blob polynomial. */ y, /** Commitment to the blob C. */ kzgCommitment){
15
+ this.z = z;
16
+ this.y = y;
17
+ this.kzgCommitment = kzgCommitment;
18
+ }
19
+ static empty() {
20
+ return new BlobPublicInputs(Fr.ZERO, 0n, [
21
+ Fr.ZERO,
22
+ Fr.ZERO
23
+ ]);
24
+ }
25
+ isEmpty() {
26
+ return this.z.isZero() && this.y == 0n && this.kzgCommitment[0].isZero() && this.kzgCommitment[1].isZero();
27
+ }
28
+ static fromBuffer(buffer) {
29
+ const reader = BufferReader.asReader(buffer);
30
+ return new BlobPublicInputs(Fr.fromBuffer(reader), toBigIntBE(reader.readBytes(32)), reader.readArray(2, Fr));
31
+ }
32
+ toBuffer() {
33
+ return serializeToBuffer(...BlobPublicInputs.getFields(this));
34
+ }
35
+ static fromFields(fields) {
36
+ const reader = FieldReader.asReader(fields);
37
+ // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
38
+ const fromBigNum = (fieldArr)=>{
39
+ return BigInt(fieldArr[2].toString().concat(fieldArr[1].toString().substring(2), fieldArr[0].toString().substring(2)));
40
+ };
41
+ return new BlobPublicInputs(reader.readField(), fromBigNum(reader.readFieldArray(3)), reader.readFieldArray(2));
42
+ }
43
+ // NB: y is NOT a BN254 field, it's a larger BLS field, we cannot use serialiseToFields here as it assumes bigints will fit
44
+ // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
45
+ toFields() {
46
+ const hex = toHex(this.y, true);
47
+ const bigNum = [
48
+ Fr.fromString('0x' + hex.substring(36)),
49
+ Fr.fromString('0x' + hex.substring(6, 36)),
50
+ Fr.fromString(hex.substring(0, 6))
51
+ ];
52
+ return [
53
+ this.z,
54
+ ...bigNum,
55
+ ...this.kzgCommitment
56
+ ];
57
+ }
58
+ static getFields(fields) {
59
+ return [
60
+ fields.z,
61
+ fields.y,
62
+ fields.kzgCommitment
63
+ ];
64
+ }
65
+ static fromBlob(input) {
66
+ return new BlobPublicInputs(input.challengeZ, toBigIntBE(input.evaluationY), input.commitmentToFields());
67
+ }
68
+ getBlobHash() {
69
+ const hash = sha256(this.commitmentToBuffer());
70
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
71
+ return hash;
72
+ }
73
+ // Performs the reverse conversion of blob.commitmentToFields()
74
+ // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
75
+ commitmentToBuffer() {
76
+ return Buffer.concat([
77
+ this.kzgCommitment[0].toBuffer().subarray(1),
78
+ this.kzgCommitment[1].toBuffer().subarray(-17)
79
+ ]);
80
+ }
81
+ equals(other) {
82
+ return this.z.equals(other.z) && this.y == other.y && this.kzgCommitment[0].equals(other.kzgCommitment[0]) && this.kzgCommitment[1].equals(other.kzgCommitment[1]);
83
+ }
84
+ }
85
+ // NB: it is much cleaner throughout the protocol circuits to define this struct rather than use a nested array.
86
+ // Once we accumulate blob inputs, it should be removed, and we just use BlobPublicInputs::accumulate everywhere.
87
+ export class BlockBlobPublicInputs {
88
+ inner;
89
+ constructor(inner){
90
+ this.inner = inner;
91
+ }
92
+ static empty() {
93
+ return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty));
94
+ }
95
+ static fromBuffer(buffer) {
96
+ const reader = BufferReader.asReader(buffer);
97
+ return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
98
+ }
99
+ toBuffer() {
100
+ return serializeToBuffer(...BlockBlobPublicInputs.getFields(this));
101
+ }
102
+ static fromFields(fields) {
103
+ const reader = FieldReader.asReader(fields);
104
+ return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
105
+ }
106
+ toFields() {
107
+ return this.inner.map((i)=>i.toFields()).flat();
108
+ }
109
+ static getFields(fields) {
110
+ return [
111
+ fields.inner
112
+ ];
113
+ }
114
+ static fromBlobs(inputs) {
115
+ const inner = makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty);
116
+ if (inputs.length > BLOBS_PER_BLOCK) {
117
+ throw new Error(`Can only fit ${BLOBS_PER_BLOCK} in one BlockBlobPublicInputs instance (given ${inputs.length})`);
118
+ }
119
+ inputs.forEach((input, i)=>{
120
+ inner[i] = BlobPublicInputs.fromBlob(input);
121
+ });
122
+ return new BlockBlobPublicInputs(inner);
123
+ }
124
+ getBlobsHash() {
125
+ const blobHashes = this.inner.map((item)=>item.isEmpty() ? Buffer.alloc(0) : item.getBlobHash());
126
+ return sha256Trunc(serializeToBuffer(blobHashes));
127
+ }
128
+ // The below is used to send to L1 for proof verification
129
+ toString() {
130
+ const nonEmptyBlobs = this.inner.filter((item)=>!item.isEmpty());
131
+ // Write the number of blobs for L1 to verify
132
+ let buf = Buffer.alloc(1);
133
+ buf.writeUInt8(nonEmptyBlobs.length);
134
+ // Using standard toBuffer() does not correctly encode the commitment
135
+ // On L1, it's a 48 byte number, which we convert to 2 fields for use in the circuits
136
+ nonEmptyBlobs.forEach((blob)=>{
137
+ buf = Buffer.concat([
138
+ buf,
139
+ blob.z.toBuffer(),
140
+ toBufferBE(blob.y, 32),
141
+ blob.commitmentToBuffer()
142
+ ]);
143
+ });
144
+ return buf.toString('hex');
145
+ }
146
+ }