@aztec/blob-lib 3.0.0-canary.a9708bd → 3.0.0-manual.20251030

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dest/blob.d.ts +52 -95
  2. package/dest/blob.d.ts.map +1 -1
  3. package/dest/blob.js +73 -165
  4. package/dest/blob_batching.d.ts +15 -48
  5. package/dest/blob_batching.d.ts.map +1 -1
  6. package/dest/blob_batching.js +81 -120
  7. package/dest/blob_utils.d.ts +30 -0
  8. package/dest/blob_utils.d.ts.map +1 -0
  9. package/dest/blob_utils.js +60 -0
  10. package/dest/circuit_types/blob_accumulator.d.ts +21 -0
  11. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  12. package/dest/circuit_types/blob_accumulator.js +58 -0
  13. package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
  14. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/final_blob_accumulator.js +63 -0
  16. package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
  17. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
  19. package/dest/circuit_types/index.d.ts +4 -0
  20. package/dest/circuit_types/index.d.ts.map +1 -0
  21. package/dest/circuit_types/index.js +4 -0
  22. package/dest/deserialize.d.ts +14 -0
  23. package/dest/deserialize.d.ts.map +1 -0
  24. package/dest/deserialize.js +33 -0
  25. package/dest/encoding.d.ts +22 -62
  26. package/dest/encoding.d.ts.map +1 -1
  27. package/dest/encoding.js +114 -104
  28. package/dest/hash.d.ts +35 -0
  29. package/dest/hash.d.ts.map +1 -0
  30. package/dest/hash.js +69 -0
  31. package/dest/index.d.ts +5 -2
  32. package/dest/index.d.ts.map +1 -1
  33. package/dest/index.js +5 -15
  34. package/dest/kzg_context.d.ts +4 -0
  35. package/dest/kzg_context.d.ts.map +1 -0
  36. package/dest/kzg_context.js +5 -0
  37. package/dest/sponge_blob.d.ts +13 -9
  38. package/dest/sponge_blob.d.ts.map +1 -1
  39. package/dest/sponge_blob.js +28 -17
  40. package/dest/testing.d.ts +7 -12
  41. package/dest/testing.d.ts.map +1 -1
  42. package/dest/testing.js +54 -41
  43. package/dest/types.d.ts +16 -0
  44. package/dest/types.d.ts.map +1 -0
  45. package/dest/types.js +3 -0
  46. package/package.json +6 -4
  47. package/src/blob.ts +76 -191
  48. package/src/blob_batching.ts +109 -137
  49. package/src/blob_utils.ts +71 -0
  50. package/src/circuit_types/blob_accumulator.ts +84 -0
  51. package/src/circuit_types/final_blob_accumulator.ts +75 -0
  52. package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
  53. package/src/circuit_types/index.ts +4 -0
  54. package/src/deserialize.ts +38 -0
  55. package/src/encoding.ts +136 -120
  56. package/src/hash.ts +77 -0
  57. package/src/index.ts +5 -18
  58. package/src/kzg_context.ts +5 -0
  59. package/src/sponge_blob.ts +24 -14
  60. package/src/testing.ts +55 -40
  61. package/src/types.ts +16 -0
  62. package/dest/blob_batching_public_inputs.d.ts +0 -71
  63. package/dest/blob_batching_public_inputs.d.ts.map +0 -1
  64. package/dest/blob_batching_public_inputs.js +0 -168
  65. package/src/blob_batching_public_inputs.ts +0 -252
@@ -0,0 +1,63 @@
1
+ import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+ import { inspect } from 'util';
4
+ /**
5
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
6
+ */ export class FinalBlobAccumulator {
7
+ blobCommitmentsHash;
8
+ z;
9
+ y;
10
+ c;
11
+ constructor(blobCommitmentsHash, z, y, c){
12
+ this.blobCommitmentsHash = blobCommitmentsHash;
13
+ this.z = z;
14
+ this.y = y;
15
+ this.c = c;
16
+ }
17
+ static empty() {
18
+ return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
19
+ }
20
+ static fromBuffer(buffer) {
21
+ const reader = BufferReader.asReader(buffer);
22
+ return new FinalBlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader));
23
+ }
24
+ toBuffer() {
25
+ return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
26
+ }
27
+ toFields() {
28
+ return [
29
+ this.blobCommitmentsHash,
30
+ this.z,
31
+ ...this.y.toNoirBigNum().limbs.map(Fr.fromString),
32
+ ...this.c.toBN254Fields()
33
+ ];
34
+ }
35
+ // The below is used to send to L1 for proof verification
36
+ toString() {
37
+ // We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
38
+ // is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
39
+ // we use calldata rather than a slice in memory:
40
+ const buf = Buffer.concat([
41
+ Buffer.alloc(32),
42
+ this.z.toBuffer(),
43
+ this.y.toBuffer(),
44
+ this.c.compress()
45
+ ]);
46
+ return buf.toString('hex');
47
+ }
48
+ equals(other) {
49
+ return this.blobCommitmentsHash.equals(other.blobCommitmentsHash) && this.z.equals(other.z) && this.y.equals(other.y) && this.c.equals(other.c);
50
+ }
51
+ // Creates a random instance. Used for testing only - will not prove/verify.
52
+ static random() {
53
+ return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
54
+ }
55
+ [inspect.custom]() {
56
+ return `FinalBlobAccumulator {
57
+ blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
58
+ z: ${inspect(this.z)},
59
+ y: ${inspect(this.y)},
60
+ c: ${inspect(this.c)},
61
+ }`;
62
+ }
63
+ }
@@ -0,0 +1,15 @@
1
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader } from '@aztec/foundation/serialize';
3
+ /**
4
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
5
+ */
6
+ export declare class FinalBlobBatchingChallenges {
7
+ readonly z: Fr;
8
+ readonly gamma: BLS12Fr;
9
+ constructor(z: Fr, gamma: BLS12Fr);
10
+ equals(other: FinalBlobBatchingChallenges): boolean;
11
+ static empty(): FinalBlobBatchingChallenges;
12
+ static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges;
13
+ toBuffer(): Buffer<ArrayBufferLike>;
14
+ }
15
+ //# sourceMappingURL=final_blob_batching_challenges.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"final_blob_batching_challenges.d.ts","sourceRoot":"","sources":["../../src/circuit_types/final_blob_batching_challenges.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AACvD,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E;;GAEG;AACH,qBAAa,2BAA2B;aAEpB,CAAC,EAAE,EAAE;aACL,KAAK,EAAE,OAAO;gBADd,CAAC,EAAE,EAAE,EACL,KAAK,EAAE,OAAO;IAGhC,MAAM,CAAC,KAAK,EAAE,2BAA2B;IAIzC,MAAM,CAAC,KAAK,IAAI,2BAA2B;IAI3C,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,2BAA2B;IAK7E,QAAQ;CAGT"}
@@ -0,0 +1,25 @@
1
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
+ /**
4
+ * See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
5
+ */ export class FinalBlobBatchingChallenges {
6
+ z;
7
+ gamma;
8
+ constructor(z, gamma){
9
+ this.z = z;
10
+ this.gamma = gamma;
11
+ }
12
+ equals(other) {
13
+ return this.z.equals(other.z) && this.gamma.equals(other.gamma);
14
+ }
15
+ static empty() {
16
+ return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
17
+ }
18
+ static fromBuffer(buffer) {
19
+ const reader = BufferReader.asReader(buffer);
20
+ return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
21
+ }
22
+ toBuffer() {
23
+ return serializeToBuffer(this.z, this.gamma);
24
+ }
25
+ }
@@ -0,0 +1,4 @@
1
+ export * from './blob_accumulator.js';
2
+ export * from './final_blob_accumulator.js';
3
+ export * from './final_blob_batching_challenges.js';
4
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/circuit_types/index.ts"],"names":[],"mappings":"AACA,cAAc,uBAAuB,CAAC;AACtC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,qCAAqC,CAAC"}
@@ -0,0 +1,4 @@
1
+ /// Types used in the protocol circuits.
2
+ export * from './blob_accumulator.js';
3
+ export * from './final_blob_accumulator.js';
4
+ export * from './final_blob_batching_challenges.js';
@@ -0,0 +1,14 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ /**
3
+ * Deserializes a buffer into an array of field elements.
4
+ *
5
+ * This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
6
+ * first field.
7
+ *
8
+ * @param buf - The buffer to deserialize.
9
+ * @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
10
+ * prefix and throw if there's not enough fields.
11
+ * @returns An array of field elements.
12
+ */
13
+ export declare function deserializeEncodedBlobToFields(buf: Uint8Array, checkEncoding?: boolean): Fr[];
14
+ //# sourceMappingURL=deserialize.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deserialize.d.ts","sourceRoot":"","sources":["../src/deserialize.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAM9C;;;;;;;;;;GAUG;AACH,wBAAgB,8BAA8B,CAAC,GAAG,EAAE,UAAU,EAAE,aAAa,UAAQ,GAAG,EAAE,EAAE,CAoB3F"}
@@ -0,0 +1,33 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader } from '@aztec/foundation/serialize';
3
+ import { checkBlobFieldsEncoding } from './encoding.js';
4
+ import { BlobDeserializationError } from './errors.js';
5
+ /**
6
+ * Deserializes a buffer into an array of field elements.
7
+ *
8
+ * This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
9
+ * first field.
10
+ *
11
+ * @param buf - The buffer to deserialize.
12
+ * @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
13
+ * prefix and throw if there's not enough fields.
14
+ * @returns An array of field elements.
15
+ */ export function deserializeEncodedBlobToFields(buf, checkEncoding = false) {
16
+ const reader = BufferReader.asReader(buf);
17
+ const firstField = reader.readObject(Fr);
18
+ // Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error below if the first
19
+ // field is larger than a javascript integer.
20
+ const numFields = firstField.toBigInt();
21
+ const totalFieldsInBuffer = BigInt(buf.length / Fr.SIZE_IN_BYTES);
22
+ if (numFields > totalFieldsInBuffer) {
23
+ throw new BlobDeserializationError(`Failed to deserialize blob fields, this blob was likely not created by us`);
24
+ }
25
+ const numFieldsWithoutPrefix = Number(numFields) - 1;
26
+ const blobFields = [
27
+ firstField
28
+ ].concat(reader.readArray(numFieldsWithoutPrefix, Fr));
29
+ if (checkEncoding && !checkBlobFieldsEncoding(blobFields)) {
30
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields, this blob was likely not created by us`);
31
+ }
32
+ return blobFields;
33
+ }
@@ -1,66 +1,26 @@
1
1
  import { Fr } from '@aztec/foundation/fields';
2
- import type { Blob as BlobBuffer } from 'c-kzg';
3
- export declare const TX_START_PREFIX = 8392562855083340404n;
4
- export declare const TX_START_PREFIX_BYTES_LENGTH: number;
5
- export declare const TX_EFFECT_PREFIX_BYTE_LENGTH: number;
6
- export declare const REVERT_CODE_PREFIX = 1;
2
+ export interface TxStartMarker {
3
+ prefix: bigint;
4
+ numBlobFields: number;
5
+ revertCode: number;
6
+ numNoteHashes: number;
7
+ numNullifiers: number;
8
+ numL2ToL1Msgs: number;
9
+ numPublicDataWrites: number;
10
+ numPrivateLogs: number;
11
+ publicLogsLength: number;
12
+ contractClassLogLength: number;
13
+ }
14
+ export declare function encodeTxStartMarker(txStartMarker: Omit<TxStartMarker, 'prefix'>): Fr;
15
+ export declare function decodeTxStartMarker(field: Fr): TxStartMarker;
16
+ export declare function getNumBlobFieldsFromTxStartMarker(field: Fr): number;
17
+ export declare function isValidTxStartMarker(txStartMarker: TxStartMarker): boolean;
18
+ export declare function createBlockEndMarker(numTxs: number): Fr;
19
+ export declare function getNumTxsFromBlockEndMarker(field: Fr): number;
20
+ export declare function isBlockEndMarker(field: Fr): boolean;
7
21
  /**
8
- * Deserializes a blob buffer into an array of field elements.
9
- *
10
- * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
11
- * This method is sparse, meaning it does not include trailing zeros at the end of the blob.
12
- *
13
- * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
14
- * within them.
15
- * If we end on a set of zeros, such as the log below:
16
- * length 7: [ a, b, c, d, e, 0, 0]
17
- *
18
- * we will end up with the incorrect hash if we trim the zeros from the end.
19
- *
20
- * Each transactions logs contains a TX start prefix, which includes a string followed
21
- * by the length ( in field elements ) of the transaction's log.
22
- *
23
- * This function finds the end of the last transaction's logs, and returns the array up to this point.
24
- *
25
- * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
26
- * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
27
- *
28
- * +------------------+------------------+------------------+------------------+
29
- * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
30
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
31
- * +------------------+------------------+------------------+------------------+
32
- * ^
33
- * |
34
- * Function reads until here --------------------------------
35
- *
36
- * @param blob - The blob buffer to deserialize.
37
- * @returns An array of field elements.
22
+ * Check that the fields are emitted from the circuits and conform to the encoding.
23
+ * @param blobFields - The concatenated fields from all blobs of an L1 block.
38
24
  */
39
- export declare function deserializeEncodedBlobToFields(blob: BlobBuffer): Fr[];
40
- /**
41
- * Get the length of the transaction from the first field.
42
- *
43
- * @param firstField - The first field of the transaction.
44
- * @returns The length of the transaction.
45
- *
46
- * @throws If the first field does not include the correct prefix - encoding invalid.
47
- */
48
- export declare function getLengthFromFirstField(firstField: Fr): number;
49
- /**
50
- * Determines whether a field is the first field of a tx effect
51
- */
52
- export declare function isValidFirstField(field: Fr): boolean;
53
- /**
54
- * Extract the fields from a blob buffer, but do not take into account encoding
55
- * that will include trailing zeros.
56
- *
57
- * +------------------+------------------+------------------+------------------+
58
- * | | | | Padded zeros |
59
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
60
- * +------------------+------------------+------------------+------------------+
61
- * ^
62
- * |
63
- * Function reads until here ----------------------
64
- */
65
- export declare function extractBlobFieldsFromBuffer(blob: BlobBuffer): Fr[];
25
+ export declare function checkBlobFieldsEncoding(blobFields: Fr[]): boolean;
66
26
  //# sourceMappingURL=encoding.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"encoding.d.ts","sourceRoot":"","sources":["../src/encoding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAG9C,OAAO,KAAK,EAAE,IAAI,IAAI,UAAU,EAAE,MAAM,OAAO,CAAC;AAIhD,eAAO,MAAM,eAAe,uBAAuB,CAAC;AAEpD,eAAO,MAAM,4BAA4B,QAA0C,CAAC;AAEpF,eAAO,MAAM,4BAA4B,QAAmC,CAAC;AAC7E,eAAO,MAAM,kBAAkB,IAAI,CAAC;AAEpC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AACH,wBAAgB,8BAA8B,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,EAAE,CAsBrE;AAED;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CAAC,UAAU,EAAE,EAAE,GAAG,MAAM,CAO9D;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,EAAE,GAAG,OAAO,CAqBpD;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,2BAA2B,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,EAAE,CAYlE"}
1
+ {"version":3,"file":"encoding.d.ts","sourceRoot":"","sources":["../src/encoding.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAa9C,MAAM,WAAW,aAAa;IAC5B,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,EAAE,MAAM,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,cAAc,EAAE,MAAM,CAAC;IACvB,gBAAgB,EAAE,MAAM,CAAC;IACzB,sBAAsB,EAAE,MAAM,CAAC;CAChC;AAGD,wBAAgB,mBAAmB,CAAC,aAAa,EAAE,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,MAqB/E;AAED,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,EAAE,GAAG,aAAa,CAmC5D;AAED,wBAAgB,iCAAiC,CAAC,KAAK,EAAE,EAAE,UAE1D;AAED,wBAAgB,oBAAoB,CAAC,aAAa,EAAE,aAAa,WAEhE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,MAAM,MAGlD;AAED,wBAAgB,2BAA2B,CAAC,KAAK,EAAE,EAAE,UAEpD;AAED,wBAAgB,gBAAgB,CAAC,KAAK,EAAE,EAAE,WAIzC;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,UAAU,EAAE,EAAE,EAAE,WAsCvD"}
package/dest/encoding.js CHANGED
@@ -1,113 +1,123 @@
1
+ import { BLOCK_END_PREFIX, TX_START_PREFIX } from '@aztec/constants';
1
2
  import { Fr } from '@aztec/foundation/fields';
2
- import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
- // Note duplicated from stdlib !
4
- // This will appear as 0x74785f7374617274 in logs
5
- export const TX_START_PREFIX = 8392562855083340404n;
6
- // These are helper constants to decode tx effects from blob encoded fields
7
- export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
8
- // 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
9
- export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;
10
- export const REVERT_CODE_PREFIX = 1;
11
- /**
12
- * Deserializes a blob buffer into an array of field elements.
13
- *
14
- * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
15
- * This method is sparse, meaning it does not include trailing zeros at the end of the blob.
16
- *
17
- * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
18
- * within them.
19
- * If we end on a set of zeros, such as the log below:
20
- * length 7: [ a, b, c, d, e, 0, 0]
21
- *
22
- * we will end up with the incorrect hash if we trim the zeros from the end.
23
- *
24
- * Each transactions logs contains a TX start prefix, which includes a string followed
25
- * by the length ( in field elements ) of the transaction's log.
26
- *
27
- * This function finds the end of the last transaction's logs, and returns the array up to this point.
28
- *
29
- * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
30
- * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
31
- *
32
- * +------------------+------------------+------------------+------------------+
33
- * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
34
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
35
- * +------------------+------------------+------------------+------------------+
36
- * ^
37
- * |
38
- * Function reads until here --------------------------------
39
- *
40
- * @param blob - The blob buffer to deserialize.
41
- * @returns An array of field elements.
42
- */ export function deserializeEncodedBlobToFields(blob) {
43
- // Convert blob buffer to array of field elements
44
- const reader = BufferReader.asReader(blob);
45
- const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
46
- const fieldReader = FieldReader.asReader(array);
47
- // Read fields until we hit zeros at the end
48
- while(!fieldReader.isFinished()){
49
- const currentField = fieldReader.peekField();
50
- // Stop when we hit a zero field
51
- if (!currentField || currentField.isZero()) {
52
- break;
53
- }
54
- // Skip the remaining fields in this transaction
55
- const len = getLengthFromFirstField(currentField);
56
- fieldReader.skip(len);
57
- }
58
- // Return array up to last non-zero field
59
- return array.slice(0, fieldReader.cursor);
3
+ import { FieldReader } from '@aztec/foundation/serialize';
4
+ const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
5
+ const REVERT_CODE_BIT_SIZE = 8n;
6
+ const NUM_NOTE_HASH_BIT_SIZE = 16n;
7
+ const NUM_NULLIFIER_BIT_SIZE = 16n;
8
+ const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
9
+ const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
10
+ const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
11
+ const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
12
+ const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
13
+ // Must match the implementation in `noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr`.
14
+ export function encodeTxStartMarker(txStartMarker) {
15
+ let value = TX_START_PREFIX;
16
+ value <<= NUM_NOTE_HASH_BIT_SIZE;
17
+ value += BigInt(txStartMarker.numNoteHashes);
18
+ value <<= NUM_NULLIFIER_BIT_SIZE;
19
+ value += BigInt(txStartMarker.numNullifiers);
20
+ value <<= NUM_L2_TO_L1_MSG_BIT_SIZE;
21
+ value += BigInt(txStartMarker.numL2ToL1Msgs);
22
+ value <<= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
23
+ value += BigInt(txStartMarker.numPublicDataWrites);
24
+ value <<= NUM_PRIVATE_LOG_BIT_SIZE;
25
+ value += BigInt(txStartMarker.numPrivateLogs);
26
+ value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
27
+ value += BigInt(txStartMarker.publicLogsLength);
28
+ value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
29
+ value += BigInt(txStartMarker.contractClassLogLength);
30
+ value <<= REVERT_CODE_BIT_SIZE;
31
+ value += BigInt(txStartMarker.revertCode);
32
+ value <<= NUM_BLOB_FIELDS_BIT_SIZE;
33
+ value += BigInt(txStartMarker.numBlobFields);
34
+ return new Fr(value);
60
35
  }
61
- /**
62
- * Get the length of the transaction from the first field.
63
- *
64
- * @param firstField - The first field of the transaction.
65
- * @returns The length of the transaction.
66
- *
67
- * @throws If the first field does not include the correct prefix - encoding invalid.
68
- */ export function getLengthFromFirstField(firstField) {
69
- // Check that the first field includes the correct prefix
70
- if (!isValidFirstField(firstField)) {
71
- throw new Error('Invalid prefix');
72
- }
73
- const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
74
- return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
36
+ export function decodeTxStartMarker(field) {
37
+ let value = field.toBigInt();
38
+ const numBlobFields = Number(value & 2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n);
39
+ value >>= NUM_BLOB_FIELDS_BIT_SIZE;
40
+ const revertCode = Number(value & 2n ** REVERT_CODE_BIT_SIZE - 1n);
41
+ value >>= REVERT_CODE_BIT_SIZE;
42
+ const contractClassLogLength = Number(value & 2n ** CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE - 1n);
43
+ value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
44
+ const publicLogsLength = Number(value & 2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n);
45
+ value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
46
+ const numPrivateLogs = Number(value & 2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n);
47
+ value >>= NUM_PRIVATE_LOG_BIT_SIZE;
48
+ const numPublicDataWrites = Number(value & 2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n);
49
+ value >>= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
50
+ const numL2ToL1Msgs = Number(value & 2n ** NUM_L2_TO_L1_MSG_BIT_SIZE - 1n);
51
+ value >>= NUM_L2_TO_L1_MSG_BIT_SIZE;
52
+ const numNullifiers = Number(value & 2n ** NUM_NULLIFIER_BIT_SIZE - 1n);
53
+ value >>= NUM_NULLIFIER_BIT_SIZE;
54
+ const numNoteHashes = Number(value & 2n ** NUM_NOTE_HASH_BIT_SIZE - 1n);
55
+ value >>= NUM_NOTE_HASH_BIT_SIZE;
56
+ // Do not throw if the prefix doesn't match.
57
+ // The caller function can check it by calling `isValidTxStartMarker`, and decide what to do if it's incorrect.
58
+ const prefix = value;
59
+ return {
60
+ prefix,
61
+ numBlobFields,
62
+ revertCode,
63
+ numNoteHashes,
64
+ numNullifiers,
65
+ numL2ToL1Msgs,
66
+ numPublicDataWrites,
67
+ numPrivateLogs,
68
+ publicLogsLength,
69
+ contractClassLogLength
70
+ };
71
+ }
72
+ export function getNumBlobFieldsFromTxStartMarker(field) {
73
+ return Number(field.toBigInt() & 2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n);
74
+ }
75
+ export function isValidTxStartMarker(txStartMarker) {
76
+ return txStartMarker.prefix === TX_START_PREFIX;
77
+ }
78
+ export function createBlockEndMarker(numTxs) {
79
+ // Must match the implementation in `block_rollup_public_inputs_composer.nr > create_block_end_marker`.
80
+ return new Fr(BLOCK_END_PREFIX * 256n * 256n + BigInt(numTxs));
81
+ }
82
+ export function getNumTxsFromBlockEndMarker(field) {
83
+ return Number(field.toBigInt() & 0xffffn);
84
+ }
85
+ export function isBlockEndMarker(field) {
86
+ const value = field.toBigInt();
87
+ const numTxs = value & 0xffffn;
88
+ return value - numTxs === BLOCK_END_PREFIX * 256n * 256n;
75
89
  }
76
90
  /**
77
- * Determines whether a field is the first field of a tx effect
78
- */ export function isValidFirstField(field) {
79
- const buf = field.toBuffer();
80
- if (!buf.subarray(0, field.size - TX_EFFECT_PREFIX_BYTE_LENGTH).equals(Buffer.alloc(field.size - TX_EFFECT_PREFIX_BYTE_LENGTH))) {
91
+ * Check that the fields are emitted from the circuits and conform to the encoding.
92
+ * @param blobFields - The concatenated fields from all blobs of an L1 block.
93
+ */ export function checkBlobFieldsEncoding(blobFields) {
94
+ const reader = FieldReader.asReader(blobFields);
95
+ const checkpointPrefix = reader.readField();
96
+ if (checkpointPrefix.toBigInt() !== BigInt(blobFields.length)) {
81
97
  return false;
82
98
  }
83
- const sliced = buf.subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
84
- if (// Checking we start with the correct prefix...
85
- !new Fr(sliced.subarray(0, TX_START_PREFIX_BYTES_LENGTH)).equals(new Fr(TX_START_PREFIX)) || // ...and include the revert code prefix..
86
- sliced[sliced.length - 3] !== REVERT_CODE_PREFIX || // ...and the following revert code is valid.
87
- sliced[sliced.length - 1] > 4) {
88
- return false;
99
+ const numFieldsInCheckpoint = checkpointPrefix.toNumber();
100
+ let seenNumTxs = 0;
101
+ while(reader.cursor < numFieldsInCheckpoint){
102
+ const currentField = reader.readField();
103
+ if (isBlockEndMarker(currentField)) {
104
+ // Found a block end marker. Confirm that the number of txs in this block is correct.
105
+ const numTxs = getNumTxsFromBlockEndMarker(currentField);
106
+ if (numTxs !== seenNumTxs) {
107
+ return false;
108
+ }
109
+ seenNumTxs = 0;
110
+ continue;
111
+ }
112
+ // If the field is not a block end marker, it must be a tx start marker.
113
+ const txStartMarker = decodeTxStartMarker(currentField);
114
+ if (!isValidTxStartMarker(txStartMarker)) {
115
+ return false;
116
+ }
117
+ seenNumTxs += 1;
118
+ // Skip the remaining fields in this tx. -1 because we already read the tx start marker.
119
+ reader.skip(txStartMarker.numBlobFields - 1);
120
+ // TODO: Check the encoding of the tx if we want to be more strict.
89
121
  }
90
122
  return true;
91
123
  }
92
- /**
93
- * Extract the fields from a blob buffer, but do not take into account encoding
94
- * that will include trailing zeros.
95
- *
96
- * +------------------+------------------+------------------+------------------+
97
- * | | | | Padded zeros |
98
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
99
- * +------------------+------------------+------------------+------------------+
100
- * ^
101
- * |
102
- * Function reads until here ----------------------
103
- */ export function extractBlobFieldsFromBuffer(blob) {
104
- const reader = BufferReader.asReader(blob);
105
- const array = reader.readArray(blob.length >> 5, Fr);
106
- // Find the index of the last non-zero field
107
- let lastNonZeroIndex = array.length - 1;
108
- while(lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()){
109
- lastNonZeroIndex--;
110
- }
111
- // Return the trimmed array
112
- return array.slice(0, lastNonZeroIndex + 1);
113
- }
package/dest/hash.d.ts ADDED
@@ -0,0 +1,35 @@
1
+ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ /**
3
+ * Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
4
+ */
5
+ export declare function computeEthVersionedBlobHash(commitment: Buffer): Buffer;
6
+ export declare function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr;
7
+ /**
8
+ * The hash of the fields added throughout the checkpoint. The exact number of fields is specified by the checkpoint
9
+ * prefix (the first field). It's verified in the circuit against the fields absorbed into the sponge blob.
10
+ * This hash is used in generating the challenge z for all blobs in the same checkpoint.
11
+ */
12
+ export declare function computeBlobFieldsHash(fields: Fr[]): Promise<Fr>;
13
+ export declare function computeBlobCommitment(data: Uint8Array): Buffer;
14
+ /**
15
+ * Get the commitment fields of the blob, to compute the challenge z.
16
+ *
17
+ * The 48-byte commitment is encoded into two field elements:
18
+ * +-------------------+------------------------+
19
+ * | 31 bytes | 17 bytes |
20
+ * +-------------------+------------------------+
21
+ * | Field Element 1 | Field Element 2 |
22
+ * | [0][bytes 0-30] | [0...0][bytes 31-47] |
23
+ * +-------------------+------------------------+
24
+ *
25
+ * @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
26
+ * @returns The fields representing the commitment buffer.
27
+ */
28
+ export declare function commitmentToFields(commitment: Buffer): [Fr, Fr];
29
+ export declare function computeChallengeZ(blobFieldsHash: Fr, commitment: Buffer): Promise<Fr>;
30
+ /**
31
+ * Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
32
+ * rollup circuits.
33
+ */
34
+ export declare function hashNoirBigNumLimbs(field: BLS12Fr): Promise<Fr>;
35
+ //# sourceMappingURL=hash.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAMvD;;GAEG;AACH,wBAAgB,2BAA2B,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAItE;AAOD,wBAAgB,gBAAgB,CAAC,sBAAsB,EAAE,MAAM,EAAE,GAAG,EAAE,CAErE;AAED;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAErE;AAED,wBAAgB,qBAAqB,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,CAM9D;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,kBAAkB,CAAC,UAAU,EAAE,MAAM,GAAG,CAAC,EAAE,EAAE,EAAE,CAAC,CAM/D;AAED,wBAAsB,iBAAiB,CAAC,cAAc,EAAE,EAAE,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAG3F;AAED;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC,CAGrE"}
package/dest/hash.js ADDED
@@ -0,0 +1,69 @@
1
+ import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
2
+ import { Fr } from '@aztec/foundation/fields';
3
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
4
+ const VERSIONED_HASH_VERSION_KZG = 0x01;
5
+ /**
6
+ * Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
7
+ */ export function computeEthVersionedBlobHash(commitment) {
8
+ const hash = sha256(commitment);
9
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
10
+ return hash;
11
+ }
12
+ // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
13
+ // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
14
+ // - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
15
+ // exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
16
+ // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
17
+ export function computeBlobsHash(evmVersionedBlobHashes) {
18
+ return sha256ToField(evmVersionedBlobHashes);
19
+ }
20
+ /**
21
+ * The hash of the fields added throughout the checkpoint. The exact number of fields is specified by the checkpoint
22
+ * prefix (the first field). It's verified in the circuit against the fields absorbed into the sponge blob.
23
+ * This hash is used in generating the challenge z for all blobs in the same checkpoint.
24
+ */ export async function computeBlobFieldsHash(fields) {
25
+ return await poseidon2Hash(fields);
26
+ }
27
+ export function computeBlobCommitment(data) {
28
+ if (data.length !== BYTES_PER_BLOB) {
29
+ throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
30
+ }
31
+ return Buffer.from(kzg.blobToKzgCommitment(data));
32
+ }
33
+ /**
34
+ * Get the commitment fields of the blob, to compute the challenge z.
35
+ *
36
+ * The 48-byte commitment is encoded into two field elements:
37
+ * +-------------------+------------------------+
38
+ * | 31 bytes | 17 bytes |
39
+ * +-------------------+------------------------+
40
+ * | Field Element 1 | Field Element 2 |
41
+ * | [0][bytes 0-30] | [0...0][bytes 31-47] |
42
+ * +-------------------+------------------------+
43
+ *
44
+ * @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
45
+ * @returns The fields representing the commitment buffer.
46
+ */ export function commitmentToFields(commitment) {
47
+ if (commitment.length !== BYTES_PER_COMMITMENT) {
48
+ throw new Error(`Expected ${BYTES_PER_COMMITMENT} bytes for blob commitment. Got ${commitment.length}.`);
49
+ }
50
+ return [
51
+ new Fr(commitment.subarray(0, 31)),
52
+ new Fr(commitment.subarray(31, BYTES_PER_COMMITMENT))
53
+ ];
54
+ }
55
+ export async function computeChallengeZ(blobFieldsHash, commitment) {
56
+ const commitmentFields = commitmentToFields(commitment);
57
+ return await poseidon2Hash([
58
+ blobFieldsHash,
59
+ commitmentFields[0],
60
+ commitmentFields[1]
61
+ ]);
62
+ }
63
+ /**
64
+ * Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
65
+ * rollup circuits.
66
+ */ export async function hashNoirBigNumLimbs(field) {
67
+ const num = field.toNoirBigNum();
68
+ return await poseidon2Hash(num.limbs.map(Fr.fromHexString));
69
+ }
package/dest/index.d.ts CHANGED
@@ -1,8 +1,11 @@
1
1
  export * from './blob.js';
2
2
  export * from './blob_batching.js';
3
+ export * from './blob_utils.js';
4
+ export * from './circuit_types/index.js';
5
+ export * from './deserialize.js';
3
6
  export * from './encoding.js';
4
- export * from './interface.js';
5
7
  export * from './errors.js';
6
- export * from './blob_batching_public_inputs.js';
8
+ export * from './hash.js';
9
+ export * from './interface.js';
7
10
  export * from './sponge_blob.js';
8
11
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAIA,cAAc,WAAW,CAAC;AAC1B,cAAc,oBAAoB,CAAC;AACnC,cAAc,eAAe,CAAC;AAC9B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,aAAa,CAAC;AAC5B,cAAc,kCAAkC,CAAC;AACjD,cAAc,kBAAkB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC;AAC1B,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,kBAAkB,CAAC;AACjC,cAAc,eAAe,CAAC;AAC9B,cAAc,aAAa,CAAC;AAC5B,cAAc,WAAW,CAAC;AAC1B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,kBAAkB,CAAC"}