@aztec/blob-lib 0.0.0-test.1 → 0.0.1-commit.21caa21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dest/batched_blob.d.ts +25 -0
  2. package/dest/batched_blob.d.ts.map +1 -0
  3. package/dest/batched_blob.js +20 -0
  4. package/dest/blob.d.ts +53 -100
  5. package/dest/blob.d.ts.map +1 -1
  6. package/dest/blob.js +83 -183
  7. package/dest/blob_batching.d.ts +105 -0
  8. package/dest/blob_batching.d.ts.map +1 -0
  9. package/dest/blob_batching.js +223 -0
  10. package/dest/blob_utils.d.ts +39 -0
  11. package/dest/blob_utils.d.ts.map +1 -0
  12. package/dest/blob_utils.js +69 -0
  13. package/dest/circuit_types/blob_accumulator.d.ts +22 -0
  14. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/blob_accumulator.js +61 -0
  16. package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
  17. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_accumulator.js +63 -0
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
  20. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  21. package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
  22. package/dest/circuit_types/index.d.ts +4 -0
  23. package/dest/circuit_types/index.d.ts.map +1 -0
  24. package/dest/circuit_types/index.js +4 -0
  25. package/dest/encoding/block_blob_data.d.ts +22 -0
  26. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  27. package/dest/encoding/block_blob_data.js +65 -0
  28. package/dest/encoding/block_end_marker.d.ts +10 -0
  29. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  30. package/dest/encoding/block_end_marker.js +40 -0
  31. package/dest/encoding/block_end_state_field.d.ts +12 -0
  32. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  33. package/dest/encoding/block_end_state_field.js +39 -0
  34. package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
  35. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  36. package/dest/encoding/checkpoint_blob_data.js +67 -0
  37. package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
  38. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
  39. package/dest/encoding/checkpoint_end_marker.js +28 -0
  40. package/dest/encoding/fixtures.d.ts +41 -0
  41. package/dest/encoding/fixtures.d.ts.map +1 -0
  42. package/dest/encoding/fixtures.js +139 -0
  43. package/dest/encoding/index.d.ts +10 -0
  44. package/dest/encoding/index.d.ts.map +1 -0
  45. package/dest/encoding/index.js +9 -0
  46. package/dest/encoding/tx_blob_data.d.ts +19 -0
  47. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  48. package/dest/encoding/tx_blob_data.js +79 -0
  49. package/dest/encoding/tx_start_marker.d.ts +16 -0
  50. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  51. package/dest/encoding/tx_start_marker.js +77 -0
  52. package/dest/errors.d.ts +1 -1
  53. package/dest/errors.d.ts.map +1 -1
  54. package/dest/hash.d.ts +42 -0
  55. package/dest/hash.d.ts.map +1 -0
  56. package/dest/hash.js +79 -0
  57. package/dest/index.d.ts +7 -4
  58. package/dest/index.d.ts.map +1 -1
  59. package/dest/index.js +6 -16
  60. package/dest/interface.d.ts +2 -3
  61. package/dest/interface.d.ts.map +1 -1
  62. package/dest/kzg_context.d.ts +4 -0
  63. package/dest/kzg_context.d.ts.map +1 -0
  64. package/dest/kzg_context.js +5 -0
  65. package/dest/sponge_blob.d.ts +13 -17
  66. package/dest/sponge_blob.d.ts.map +1 -1
  67. package/dest/sponge_blob.js +24 -28
  68. package/dest/testing.d.ts +13 -25
  69. package/dest/testing.d.ts.map +1 -1
  70. package/dest/testing.js +37 -53
  71. package/dest/types.d.ts +17 -0
  72. package/dest/types.d.ts.map +1 -0
  73. package/dest/types.js +4 -0
  74. package/package.json +20 -15
  75. package/src/batched_blob.ts +25 -0
  76. package/src/blob.ts +82 -221
  77. package/src/blob_batching.ts +293 -0
  78. package/src/blob_utils.ts +81 -0
  79. package/src/circuit_types/blob_accumulator.ts +95 -0
  80. package/src/circuit_types/final_blob_accumulator.ts +75 -0
  81. package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
  82. package/src/circuit_types/index.ts +4 -0
  83. package/src/encoding/block_blob_data.ts +102 -0
  84. package/src/encoding/block_end_marker.ts +54 -0
  85. package/src/encoding/block_end_state_field.ts +59 -0
  86. package/src/encoding/checkpoint_blob_data.ts +95 -0
  87. package/src/encoding/checkpoint_end_marker.ts +40 -0
  88. package/src/encoding/fixtures.ts +209 -0
  89. package/src/encoding/index.ts +9 -0
  90. package/src/encoding/tx_blob_data.ts +116 -0
  91. package/src/encoding/tx_start_marker.ts +97 -0
  92. package/src/hash.ts +87 -0
  93. package/src/index.ts +6 -20
  94. package/src/interface.ts +1 -4
  95. package/src/kzg_context.ts +5 -0
  96. package/src/sponge_blob.ts +26 -29
  97. package/src/testing.ts +48 -50
  98. package/src/trusted_setup_bit_reversed.json +4100 -0
  99. package/src/types.ts +17 -0
  100. package/dest/blob_public_inputs.d.ts +0 -50
  101. package/dest/blob_public_inputs.d.ts.map +0 -1
  102. package/dest/blob_public_inputs.js +0 -146
  103. package/dest/encoding.d.ts +0 -66
  104. package/dest/encoding.d.ts.map +0 -1
  105. package/dest/encoding.js +0 -113
  106. package/src/blob_public_inputs.ts +0 -157
  107. package/src/encoding.ts +0 -138
package/src/types.ts ADDED
@@ -0,0 +1,17 @@
1
+ export * from './batched_blob.js';
2
+ export * from './circuit_types/index.js';
3
+ export * from './interface.js';
4
+ export * from './sponge_blob.js';
5
+
6
+ /**
7
+ * Type definition for the KZG instance returned by Blob.getViemKzgInstance().
8
+ * Contains the cryptographic functions needed for blob commitment and proof generation.
9
+ */
10
+ export interface BlobKzgInstance {
11
+ /** Function to compute KZG commitment from blob data */
12
+ blobToKzgCommitment(blob: Uint8Array): Uint8Array;
13
+ /** Function to compute KZG proof for blob data */
14
+ computeBlobKzgProof(blob: Uint8Array, commitment: Uint8Array): Uint8Array;
15
+ /** Function to compute both blob data cells and their corresponding KZG proofs for EIP7594 */
16
+ computeCellsAndKzgProofs(blob: Uint8Array): [Uint8Array[], Uint8Array[]];
17
+ }
@@ -1,50 +0,0 @@
1
- /// <reference types="node" resolution-mode="require"/>
2
- /// <reference types="node" resolution-mode="require"/>
3
- import { BLOBS_PER_BLOCK } from '@aztec/constants';
4
- import { Fr } from '@aztec/foundation/fields';
5
- import { BufferReader, FieldReader, type Tuple } from '@aztec/foundation/serialize';
6
- import type { FieldsOf } from '@aztec/foundation/types';
7
- import { type Blob } from './blob.js';
8
- /**
9
- * Public inputs required to be passed from our rollup circuits to verify a blob.
10
- */
11
- export declare class BlobPublicInputs {
12
- /** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
13
- z: Fr;
14
- /** Evaluation y = p(z), where p() is the blob polynomial. */
15
- y: bigint;
16
- /** Commitment to the blob C. */
17
- kzgCommitment: Tuple<Fr, 2>;
18
- constructor(
19
- /** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
20
- z: Fr,
21
- /** Evaluation y = p(z), where p() is the blob polynomial. */
22
- y: bigint,
23
- /** Commitment to the blob C. */
24
- kzgCommitment: Tuple<Fr, 2>);
25
- static empty(): BlobPublicInputs;
26
- isEmpty(): boolean;
27
- static fromBuffer(buffer: Buffer | BufferReader): BlobPublicInputs;
28
- toBuffer(): Buffer;
29
- static fromFields(fields: Fr[] | FieldReader): BlobPublicInputs;
30
- toFields(): Fr[];
31
- static getFields(fields: FieldsOf<BlobPublicInputs>): readonly [Fr, bigint, [Fr, Fr]];
32
- static fromBlob(input: Blob): BlobPublicInputs;
33
- getBlobHash(): Buffer;
34
- commitmentToBuffer(): Buffer;
35
- equals(other: BlobPublicInputs): boolean;
36
- }
37
- export declare class BlockBlobPublicInputs {
38
- inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>;
39
- constructor(inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>);
40
- static empty(): BlockBlobPublicInputs;
41
- static fromBuffer(buffer: Buffer | BufferReader): BlockBlobPublicInputs;
42
- toBuffer(): Buffer;
43
- static fromFields(fields: Fr[] | FieldReader): BlockBlobPublicInputs;
44
- toFields(): Fr[];
45
- static getFields(fields: FieldsOf<BlockBlobPublicInputs>): readonly [[BlobPublicInputs, BlobPublicInputs, BlobPublicInputs]];
46
- static fromBlobs(inputs: Blob[]): BlockBlobPublicInputs;
47
- getBlobsHash(): Buffer;
48
- toString(): string;
49
- }
50
- //# sourceMappingURL=blob_public_inputs.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"blob_public_inputs.d.ts","sourceRoot":"","sources":["../src/blob_public_inputs.ts"],"names":[],"mappings":";;AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAInD,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,KAAK,EAAqB,MAAM,6BAA6B,CAAC;AACvG,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,yBAAyB,CAAC;AAExD,OAAO,EAAE,KAAK,IAAI,EAA8B,MAAM,WAAW,CAAC;AAElE;;GAEG;AACH,qBAAa,gBAAgB;IAEzB,6DAA6D;IACtD,CAAC,EAAE,EAAE;IACZ,6DAA6D;IACtD,CAAC,EAAE,MAAM;IAChB,gCAAgC;IACzB,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;;IALlC,6DAA6D;IACtD,CAAC,EAAE,EAAE;IACZ,6DAA6D;IACtD,CAAC,EAAE,MAAM;IAChB,gCAAgC;IACzB,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;IAGpC,MAAM,CAAC,KAAK,IAAI,gBAAgB;IAIhC,OAAO,IAAI,OAAO;IAIlB,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,gBAAgB;IAKlE,QAAQ;IAIR,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,gBAAgB;IAa/D,QAAQ;IAUR,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,gBAAgB,CAAC;IAInD,MAAM,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,GAAG,gBAAgB;IAI9C,WAAW,IAAI,MAAM;IAQrB,kBAAkB,IAAI,MAAM;IAO5B,MAAM,CAAC,KAAK,EAAE,gBAAgB;CAQ/B;AAID,qBAAa,qBAAqB;IACb,KAAK,EAAE,KAAK,CAAC,gBAAgB,EAAE,OAAO,eAAe,CAAC;gBAAtD,KAAK,EAAE,KAAK,CAAC,gBAAgB,EAAE,OAAO,eAAe,CAAC;IAEzE,MAAM,CAAC,KAAK,IAAI,qBAAqB;IAIrC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,qBAAqB;IAKvE,QAAQ;IAIR,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,qBAAqB;IAKpE,QAAQ;IAIR,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,qBAAqB,CAAC;IAIxD,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,qBAAqB;IAWvD,YAAY;IAMZ,QAAQ;CAYT"}
@@ -1,146 +0,0 @@
1
- import { BLOBS_PER_BLOCK } from '@aztec/constants';
2
- import { makeTuple } from '@aztec/foundation/array';
3
- import { toBigIntBE, toBufferBE, toHex } from '@aztec/foundation/bigint-buffer';
4
- import { sha256, sha256Trunc } from '@aztec/foundation/crypto';
5
- import { Fr } from '@aztec/foundation/fields';
6
- import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
7
- import { VERSIONED_HASH_VERSION_KZG } from './blob.js';
8
- /**
9
- * Public inputs required to be passed from our rollup circuits to verify a blob.
10
- */ export class BlobPublicInputs {
11
- z;
12
- y;
13
- kzgCommitment;
14
- constructor(/** Challenge point z (= H(H(tx_effects), kzgCommmitment). */ z, /** Evaluation y = p(z), where p() is the blob polynomial. */ y, /** Commitment to the blob C. */ kzgCommitment){
15
- this.z = z;
16
- this.y = y;
17
- this.kzgCommitment = kzgCommitment;
18
- }
19
- static empty() {
20
- return new BlobPublicInputs(Fr.ZERO, 0n, [
21
- Fr.ZERO,
22
- Fr.ZERO
23
- ]);
24
- }
25
- isEmpty() {
26
- return this.z.isZero() && this.y == 0n && this.kzgCommitment[0].isZero() && this.kzgCommitment[1].isZero();
27
- }
28
- static fromBuffer(buffer) {
29
- const reader = BufferReader.asReader(buffer);
30
- return new BlobPublicInputs(Fr.fromBuffer(reader), toBigIntBE(reader.readBytes(32)), reader.readArray(2, Fr));
31
- }
32
- toBuffer() {
33
- return serializeToBuffer(...BlobPublicInputs.getFields(this));
34
- }
35
- static fromFields(fields) {
36
- const reader = FieldReader.asReader(fields);
37
- // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
38
- const fromBigNum = (fieldArr)=>{
39
- return BigInt(fieldArr[2].toString().concat(fieldArr[1].toString().substring(2), fieldArr[0].toString().substring(2)));
40
- };
41
- return new BlobPublicInputs(reader.readField(), fromBigNum(reader.readFieldArray(3)), reader.readFieldArray(2));
42
- }
43
- // NB: y is NOT a BN254 field, it's a larger BLS field, we cannot use serialiseToFields here as it assumes bigints will fit
44
- // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
45
- toFields() {
46
- const hex = toHex(this.y, true);
47
- const bigNum = [
48
- Fr.fromString('0x' + hex.substring(36)),
49
- Fr.fromString('0x' + hex.substring(6, 36)),
50
- Fr.fromString(hex.substring(0, 6))
51
- ];
52
- return [
53
- this.z,
54
- ...bigNum,
55
- ...this.kzgCommitment
56
- ];
57
- }
58
- static getFields(fields) {
59
- return [
60
- fields.z,
61
- fields.y,
62
- fields.kzgCommitment
63
- ];
64
- }
65
- static fromBlob(input) {
66
- return new BlobPublicInputs(input.challengeZ, toBigIntBE(input.evaluationY), input.commitmentToFields());
67
- }
68
- getBlobHash() {
69
- const hash = sha256(this.commitmentToBuffer());
70
- hash[0] = VERSIONED_HASH_VERSION_KZG;
71
- return hash;
72
- }
73
- // Performs the reverse conversion of blob.commitmentToFields()
74
- // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
75
- commitmentToBuffer() {
76
- return Buffer.concat([
77
- this.kzgCommitment[0].toBuffer().subarray(1),
78
- this.kzgCommitment[1].toBuffer().subarray(-17)
79
- ]);
80
- }
81
- equals(other) {
82
- return this.z.equals(other.z) && this.y == other.y && this.kzgCommitment[0].equals(other.kzgCommitment[0]) && this.kzgCommitment[1].equals(other.kzgCommitment[1]);
83
- }
84
- }
85
- // NB: it is much cleaner throughout the protocol circuits to define this struct rather than use a nested array.
86
- // Once we accumulate blob inputs, it should be removed, and we just use BlobPublicInputs::accumulate everywhere.
87
- export class BlockBlobPublicInputs {
88
- inner;
89
- constructor(inner){
90
- this.inner = inner;
91
- }
92
- static empty() {
93
- return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty));
94
- }
95
- static fromBuffer(buffer) {
96
- const reader = BufferReader.asReader(buffer);
97
- return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
98
- }
99
- toBuffer() {
100
- return serializeToBuffer(...BlockBlobPublicInputs.getFields(this));
101
- }
102
- static fromFields(fields) {
103
- const reader = FieldReader.asReader(fields);
104
- return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
105
- }
106
- toFields() {
107
- return this.inner.map((i)=>i.toFields()).flat();
108
- }
109
- static getFields(fields) {
110
- return [
111
- fields.inner
112
- ];
113
- }
114
- static fromBlobs(inputs) {
115
- const inner = makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty);
116
- if (inputs.length > BLOBS_PER_BLOCK) {
117
- throw new Error(`Can only fit ${BLOBS_PER_BLOCK} in one BlockBlobPublicInputs instance (given ${inputs.length})`);
118
- }
119
- inputs.forEach((input, i)=>{
120
- inner[i] = BlobPublicInputs.fromBlob(input);
121
- });
122
- return new BlockBlobPublicInputs(inner);
123
- }
124
- getBlobsHash() {
125
- const blobHashes = this.inner.map((item)=>item.isEmpty() ? Buffer.alloc(0) : item.getBlobHash());
126
- return sha256Trunc(serializeToBuffer(blobHashes));
127
- }
128
- // The below is used to send to L1 for proof verification
129
- toString() {
130
- const nonEmptyBlobs = this.inner.filter((item)=>!item.isEmpty());
131
- // Write the number of blobs for L1 to verify
132
- let buf = Buffer.alloc(1);
133
- buf.writeUInt8(nonEmptyBlobs.length);
134
- // Using standard toBuffer() does not correctly encode the commitment
135
- // On L1, it's a 48 byte number, which we convert to 2 fields for use in the circuits
136
- nonEmptyBlobs.forEach((blob)=>{
137
- buf = Buffer.concat([
138
- buf,
139
- blob.z.toBuffer(),
140
- toBufferBE(blob.y, 32),
141
- blob.commitmentToBuffer()
142
- ]);
143
- });
144
- return buf.toString('hex');
145
- }
146
- }
@@ -1,66 +0,0 @@
1
- import { Fr } from '@aztec/foundation/fields';
2
- import type { Blob as BlobBuffer } from 'c-kzg';
3
- export declare const TX_START_PREFIX = 8392562855083340404n;
4
- export declare const TX_START_PREFIX_BYTES_LENGTH: number;
5
- export declare const TX_EFFECT_PREFIX_BYTE_LENGTH: number;
6
- export declare const REVERT_CODE_PREFIX = 1;
7
- /**
8
- * Deserializes a blob buffer into an array of field elements.
9
- *
10
- * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
11
- * This method is sparse, meaning it does not include trailing zeros at the end of the blob.
12
- *
13
- * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
14
- * within them.
15
- * If we end on a set of zeros, such as the log below:
16
- * length 7: [ a, b, c, d, e, 0, 0]
17
- *
18
- * we will end up with the incorrect hash if we trim the zeros from the end.
19
- *
20
- * Each transactions logs contains a TX start prefix, which includes a string followed
21
- * by the length ( in field elements ) of the transaction's log.
22
- *
23
- * This function finds the end of the last transaction's logs, and returns the array up to this point.
24
- *
25
- * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
26
- * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
27
- *
28
- * +------------------+------------------+------------------+------------------+
29
- * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
30
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
31
- * +------------------+------------------+------------------+------------------+
32
- * ^
33
- * |
34
- * Function reads until here --------------------------------
35
- *
36
- * @param blob - The blob buffer to deserialize.
37
- * @returns An array of field elements.
38
- */
39
- export declare function deserializeEncodedBlobToFields(blob: BlobBuffer): Fr[];
40
- /**
41
- * Get the length of the transaction from the first field.
42
- *
43
- * @param firstField - The first field of the transaction.
44
- * @returns The length of the transaction.
45
- *
46
- * @throws If the first field does not include the correct prefix - encoding invalid.
47
- */
48
- export declare function getLengthFromFirstField(firstField: Fr): number;
49
- /**
50
- * Determines whether a field is the first field of a tx effect
51
- */
52
- export declare function isValidFirstField(field: Fr): boolean;
53
- /**
54
- * Extract the fields from a blob buffer, but do not take into account encoding
55
- * that will include trailing zeros.
56
- *
57
- * +------------------+------------------+------------------+------------------+
58
- * | | | | Padded zeros |
59
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
60
- * +------------------+------------------+------------------+------------------+
61
- * ^
62
- * |
63
- * Function reads until here ----------------------
64
- */
65
- export declare function extractBlobFieldsFromBuffer(blob: BlobBuffer): Fr[];
66
- //# sourceMappingURL=encoding.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"encoding.d.ts","sourceRoot":"","sources":["../src/encoding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAG9C,OAAO,KAAK,EAAE,IAAI,IAAI,UAAU,EAAE,MAAM,OAAO,CAAC;AAIhD,eAAO,MAAM,eAAe,uBAAuB,CAAC;AAEpD,eAAO,MAAM,4BAA4B,QAA0C,CAAC;AAEpF,eAAO,MAAM,4BAA4B,QAAmC,CAAC;AAC7E,eAAO,MAAM,kBAAkB,IAAI,CAAC;AAEpC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AACH,wBAAgB,8BAA8B,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,EAAE,CAsBrE;AAED;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CAAC,UAAU,EAAE,EAAE,GAAG,MAAM,CAO9D;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,EAAE,GAAG,OAAO,CAqBpD;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,2BAA2B,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,EAAE,CAYlE"}
package/dest/encoding.js DELETED
@@ -1,113 +0,0 @@
1
- import { Fr } from '@aztec/foundation/fields';
2
- import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
- // Note duplicated from stdlib !
4
- // This will appear as 0x74785f7374617274 in logs
5
- export const TX_START_PREFIX = 8392562855083340404n;
6
- // These are helper constants to decode tx effects from blob encoded fields
7
- export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
8
- // 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
9
- export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;
10
- export const REVERT_CODE_PREFIX = 1;
11
- /**
12
- * Deserializes a blob buffer into an array of field elements.
13
- *
14
- * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
15
- * This method is sparse, meaning it does not include trailing zeros at the end of the blob.
16
- *
17
- * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
18
- * within them.
19
- * If we end on a set of zeros, such as the log below:
20
- * length 7: [ a, b, c, d, e, 0, 0]
21
- *
22
- * we will end up with the incorrect hash if we trim the zeros from the end.
23
- *
24
- * Each transactions logs contains a TX start prefix, which includes a string followed
25
- * by the length ( in field elements ) of the transaction's log.
26
- *
27
- * This function finds the end of the last transaction's logs, and returns the array up to this point.
28
- *
29
- * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
30
- * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
31
- *
32
- * +------------------+------------------+------------------+------------------+
33
- * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
34
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
35
- * +------------------+------------------+------------------+------------------+
36
- * ^
37
- * |
38
- * Function reads until here --------------------------------
39
- *
40
- * @param blob - The blob buffer to deserialize.
41
- * @returns An array of field elements.
42
- */ export function deserializeEncodedBlobToFields(blob) {
43
- // Convert blob buffer to array of field elements
44
- const reader = BufferReader.asReader(blob);
45
- const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
46
- const fieldReader = FieldReader.asReader(array);
47
- // Read fields until we hit zeros at the end
48
- while(!fieldReader.isFinished()){
49
- const currentField = fieldReader.peekField();
50
- // Stop when we hit a zero field
51
- if (!currentField || currentField.isZero()) {
52
- break;
53
- }
54
- // Skip the remaining fields in this transaction
55
- const len = getLengthFromFirstField(currentField);
56
- fieldReader.skip(len);
57
- }
58
- // Return array up to last non-zero field
59
- return array.slice(0, fieldReader.cursor);
60
- }
61
- /**
62
- * Get the length of the transaction from the first field.
63
- *
64
- * @param firstField - The first field of the transaction.
65
- * @returns The length of the transaction.
66
- *
67
- * @throws If the first field does not include the correct prefix - encoding invalid.
68
- */ export function getLengthFromFirstField(firstField) {
69
- // Check that the first field includes the correct prefix
70
- if (!isValidFirstField(firstField)) {
71
- throw new Error('Invalid prefix');
72
- }
73
- const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
74
- return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
75
- }
76
- /**
77
- * Determines whether a field is the first field of a tx effect
78
- */ export function isValidFirstField(field) {
79
- const buf = field.toBuffer();
80
- if (!buf.subarray(0, field.size - TX_EFFECT_PREFIX_BYTE_LENGTH).equals(Buffer.alloc(field.size - TX_EFFECT_PREFIX_BYTE_LENGTH))) {
81
- return false;
82
- }
83
- const sliced = buf.subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
84
- if (// Checking we start with the correct prefix...
85
- !new Fr(sliced.subarray(0, TX_START_PREFIX_BYTES_LENGTH)).equals(new Fr(TX_START_PREFIX)) || // ...and include the revert code prefix..
86
- sliced[sliced.length - 3] !== REVERT_CODE_PREFIX || // ...and the following revert code is valid.
87
- sliced[sliced.length - 1] > 4) {
88
- return false;
89
- }
90
- return true;
91
- }
92
- /**
93
- * Extract the fields from a blob buffer, but do not take into account encoding
94
- * that will include trailing zeros.
95
- *
96
- * +------------------+------------------+------------------+------------------+
97
- * | | | | Padded zeros |
98
- * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
99
- * +------------------+------------------+------------------+------------------+
100
- * ^
101
- * |
102
- * Function reads until here ----------------------
103
- */ export function extractBlobFieldsFromBuffer(blob) {
104
- const reader = BufferReader.asReader(blob);
105
- const array = reader.readArray(blob.length >> 5, Fr);
106
- // Find the index of the last non-zero field
107
- let lastNonZeroIndex = array.length - 1;
108
- while(lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()){
109
- lastNonZeroIndex--;
110
- }
111
- // Return the trimmed array
112
- return array.slice(0, lastNonZeroIndex + 1);
113
- }
@@ -1,157 +0,0 @@
1
- import { BLOBS_PER_BLOCK } from '@aztec/constants';
2
- import { makeTuple } from '@aztec/foundation/array';
3
- import { toBigIntBE, toBufferBE, toHex } from '@aztec/foundation/bigint-buffer';
4
- import { sha256, sha256Trunc } from '@aztec/foundation/crypto';
5
- import { Fr } from '@aztec/foundation/fields';
6
- import { BufferReader, FieldReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize';
7
- import type { FieldsOf } from '@aztec/foundation/types';
8
-
9
- import { type Blob, VERSIONED_HASH_VERSION_KZG } from './blob.js';
10
-
11
- /**
12
- * Public inputs required to be passed from our rollup circuits to verify a blob.
13
- */
14
- export class BlobPublicInputs {
15
- constructor(
16
- /** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
17
- public z: Fr,
18
- /** Evaluation y = p(z), where p() is the blob polynomial. */
19
- public y: bigint,
20
- /** Commitment to the blob C. */
21
- public kzgCommitment: Tuple<Fr, 2>,
22
- ) {}
23
-
24
- static empty(): BlobPublicInputs {
25
- return new BlobPublicInputs(Fr.ZERO, 0n, [Fr.ZERO, Fr.ZERO]);
26
- }
27
-
28
- isEmpty(): boolean {
29
- return this.z.isZero() && this.y == 0n && this.kzgCommitment[0].isZero() && this.kzgCommitment[1].isZero();
30
- }
31
-
32
- static fromBuffer(buffer: Buffer | BufferReader): BlobPublicInputs {
33
- const reader = BufferReader.asReader(buffer);
34
- return new BlobPublicInputs(Fr.fromBuffer(reader), toBigIntBE(reader.readBytes(32)), reader.readArray(2, Fr));
35
- }
36
-
37
- toBuffer() {
38
- return serializeToBuffer(...BlobPublicInputs.getFields(this));
39
- }
40
-
41
- static fromFields(fields: Fr[] | FieldReader): BlobPublicInputs {
42
- const reader = FieldReader.asReader(fields);
43
- // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
44
- const fromBigNum = (fieldArr: Fr[]) => {
45
- return BigInt(
46
- fieldArr[2].toString().concat(fieldArr[1].toString().substring(2), fieldArr[0].toString().substring(2)),
47
- );
48
- };
49
- return new BlobPublicInputs(reader.readField(), fromBigNum(reader.readFieldArray(3)), reader.readFieldArray(2));
50
- }
51
-
52
- // NB: y is NOT a BN254 field, it's a larger BLS field, we cannot use serialiseToFields here as it assumes bigints will fit
53
- // TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
54
- toFields() {
55
- const hex = toHex(this.y, true);
56
- const bigNum = [
57
- Fr.fromString('0x' + hex.substring(36)),
58
- Fr.fromString('0x' + hex.substring(6, 36)),
59
- Fr.fromString(hex.substring(0, 6)),
60
- ];
61
- return [this.z, ...bigNum, ...this.kzgCommitment];
62
- }
63
-
64
- static getFields(fields: FieldsOf<BlobPublicInputs>) {
65
- return [fields.z, fields.y, fields.kzgCommitment] as const;
66
- }
67
-
68
- static fromBlob(input: Blob): BlobPublicInputs {
69
- return new BlobPublicInputs(input.challengeZ, toBigIntBE(input.evaluationY), input.commitmentToFields());
70
- }
71
-
72
- getBlobHash(): Buffer {
73
- const hash = sha256(this.commitmentToBuffer());
74
- hash[0] = VERSIONED_HASH_VERSION_KZG;
75
- return hash;
76
- }
77
-
78
- // Performs the reverse conversion of blob.commitmentToFields()
79
- // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
80
- commitmentToBuffer(): Buffer {
81
- return Buffer.concat([
82
- this.kzgCommitment[0].toBuffer().subarray(1),
83
- this.kzgCommitment[1].toBuffer().subarray(-17),
84
- ]);
85
- }
86
-
87
- equals(other: BlobPublicInputs) {
88
- return (
89
- this.z.equals(other.z) &&
90
- this.y == other.y &&
91
- this.kzgCommitment[0].equals(other.kzgCommitment[0]) &&
92
- this.kzgCommitment[1].equals(other.kzgCommitment[1])
93
- );
94
- }
95
- }
96
-
97
- // NB: it is much cleaner throughout the protocol circuits to define this struct rather than use a nested array.
98
- // Once we accumulate blob inputs, it should be removed, and we just use BlobPublicInputs::accumulate everywhere.
99
- export class BlockBlobPublicInputs {
100
- constructor(public inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>) {}
101
-
102
- static empty(): BlockBlobPublicInputs {
103
- return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty));
104
- }
105
-
106
- static fromBuffer(buffer: Buffer | BufferReader): BlockBlobPublicInputs {
107
- const reader = BufferReader.asReader(buffer);
108
- return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
109
- }
110
-
111
- toBuffer() {
112
- return serializeToBuffer(...BlockBlobPublicInputs.getFields(this));
113
- }
114
-
115
- static fromFields(fields: Fr[] | FieldReader): BlockBlobPublicInputs {
116
- const reader = FieldReader.asReader(fields);
117
- return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
118
- }
119
-
120
- toFields() {
121
- return this.inner.map(i => i.toFields()).flat();
122
- }
123
-
124
- static getFields(fields: FieldsOf<BlockBlobPublicInputs>) {
125
- return [fields.inner] as const;
126
- }
127
-
128
- static fromBlobs(inputs: Blob[]): BlockBlobPublicInputs {
129
- const inner = makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty);
130
- if (inputs.length > BLOBS_PER_BLOCK) {
131
- throw new Error(`Can only fit ${BLOBS_PER_BLOCK} in one BlockBlobPublicInputs instance (given ${inputs.length})`);
132
- }
133
- inputs.forEach((input, i) => {
134
- inner[i] = BlobPublicInputs.fromBlob(input);
135
- });
136
- return new BlockBlobPublicInputs(inner);
137
- }
138
-
139
- getBlobsHash() {
140
- const blobHashes = this.inner.map(item => (item.isEmpty() ? Buffer.alloc(0) : item.getBlobHash()));
141
- return sha256Trunc(serializeToBuffer(blobHashes));
142
- }
143
-
144
- // The below is used to send to L1 for proof verification
145
- toString() {
146
- const nonEmptyBlobs = this.inner.filter(item => !item.isEmpty());
147
- // Write the number of blobs for L1 to verify
148
- let buf = Buffer.alloc(1);
149
- buf.writeUInt8(nonEmptyBlobs.length);
150
- // Using standard toBuffer() does not correctly encode the commitment
151
- // On L1, it's a 48 byte number, which we convert to 2 fields for use in the circuits
152
- nonEmptyBlobs.forEach(blob => {
153
- buf = Buffer.concat([buf, blob.z.toBuffer(), toBufferBE(blob.y, 32), blob.commitmentToBuffer()]);
154
- });
155
- return buf.toString('hex');
156
- }
157
- }