@aztec/blob-lib 0.0.0-test.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,138 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
+
4
+ import type { Blob as BlobBuffer } from 'c-kzg';
5
+
6
+ // Note duplicated from stdlib !
7
+ // This will appear as 0x74785f7374617274 in logs
8
+ export const TX_START_PREFIX = 8392562855083340404n;
9
+ // These are helper constants to decode tx effects from blob encoded fields
10
+ export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
11
+ // 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
12
+ export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;
13
+ export const REVERT_CODE_PREFIX = 1;
14
+
15
+ /**
16
+ * Deserializes a blob buffer into an array of field elements.
17
+ *
18
+ * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
19
+ * This method is sparse, meaning it does not include trailing zeros at the end of the blob.
20
+ *
21
+ * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
22
+ * within them.
23
+ * If we end on a set of zeros, such as the log below:
24
+ * length 7: [ a, b, c, d, e, 0, 0]
25
+ *
26
+ * we will end up with the incorrect hash if we trim the zeros from the end.
27
+ *
28
+ * Each transactions logs contains a TX start prefix, which includes a string followed
29
+ * by the length ( in field elements ) of the transaction's log.
30
+ *
31
+ * This function finds the end of the last transaction's logs, and returns the array up to this point.
32
+ *
33
+ * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
34
+ * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
35
+ *
36
+ * +------------------+------------------+------------------+------------------+
37
+ * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
38
+ * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
39
+ * +------------------+------------------+------------------+------------------+
40
+ * ^
41
+ * |
42
+ * Function reads until here --------------------------------
43
+ *
44
+ * @param blob - The blob buffer to deserialize.
45
+ * @returns An array of field elements.
46
+ */
47
+ export function deserializeEncodedBlobToFields(blob: BlobBuffer): Fr[] {
48
+ // Convert blob buffer to array of field elements
49
+ const reader = BufferReader.asReader(blob);
50
+ const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
51
+ const fieldReader = FieldReader.asReader(array);
52
+
53
+ // Read fields until we hit zeros at the end
54
+ while (!fieldReader.isFinished()) {
55
+ const currentField = fieldReader.peekField();
56
+
57
+ // Stop when we hit a zero field
58
+ if (!currentField || currentField.isZero()) {
59
+ break;
60
+ }
61
+
62
+ // Skip the remaining fields in this transaction
63
+ const len = getLengthFromFirstField(currentField);
64
+ fieldReader.skip(len);
65
+ }
66
+
67
+ // Return array up to last non-zero field
68
+ return array.slice(0, fieldReader.cursor);
69
+ }
70
+
71
+ /**
72
+ * Get the length of the transaction from the first field.
73
+ *
74
+ * @param firstField - The first field of the transaction.
75
+ * @returns The length of the transaction.
76
+ *
77
+ * @throws If the first field does not include the correct prefix - encoding invalid.
78
+ */
79
+ export function getLengthFromFirstField(firstField: Fr): number {
80
+ // Check that the first field includes the correct prefix
81
+ if (!isValidFirstField(firstField)) {
82
+ throw new Error('Invalid prefix');
83
+ }
84
+ const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
85
+ return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
86
+ }
87
+
88
+ /**
89
+ * Determines whether a field is the first field of a tx effect
90
+ */
91
+ export function isValidFirstField(field: Fr): boolean {
92
+ const buf = field.toBuffer();
93
+ if (
94
+ !buf
95
+ .subarray(0, field.size - TX_EFFECT_PREFIX_BYTE_LENGTH)
96
+ .equals(Buffer.alloc(field.size - TX_EFFECT_PREFIX_BYTE_LENGTH))
97
+ ) {
98
+ return false;
99
+ }
100
+ const sliced = buf.subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
101
+ if (
102
+ // Checking we start with the correct prefix...
103
+ !new Fr(sliced.subarray(0, TX_START_PREFIX_BYTES_LENGTH)).equals(new Fr(TX_START_PREFIX)) ||
104
+ // ...and include the revert code prefix..
105
+ sliced[sliced.length - 3] !== REVERT_CODE_PREFIX ||
106
+ // ...and the following revert code is valid.
107
+ sliced[sliced.length - 1] > 4
108
+ ) {
109
+ return false;
110
+ }
111
+ return true;
112
+ }
113
+
114
+ /**
115
+ * Extract the fields from a blob buffer, but do not take into account encoding
116
+ * that will include trailing zeros.
117
+ *
118
+ * +------------------+------------------+------------------+------------------+
119
+ * | | | | Padded zeros |
120
+ * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
121
+ * +------------------+------------------+------------------+------------------+
122
+ * ^
123
+ * |
124
+ * Function reads until here ----------------------
125
+ */
126
+ export function extractBlobFieldsFromBuffer(blob: BlobBuffer): Fr[] {
127
+ const reader = BufferReader.asReader(blob);
128
+ const array = reader.readArray(blob.length >> 5, Fr);
129
+
130
+ // Find the index of the last non-zero field
131
+ let lastNonZeroIndex = array.length - 1;
132
+ while (lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()) {
133
+ lastNonZeroIndex--;
134
+ }
135
+
136
+ // Return the trimmed array
137
+ return array.slice(0, lastNonZeroIndex + 1);
138
+ }
package/src/errors.ts ADDED
@@ -0,0 +1,6 @@
1
+ export class BlobDeserializationError extends Error {
2
+ constructor(message: string) {
3
+ super(message);
4
+ this.name = 'BlobDeserializationError';
5
+ }
6
+ }
package/src/index.ts ADDED
@@ -0,0 +1,23 @@
1
+ import cKzg from 'c-kzg';
2
+
3
+ /* eslint-disable import/no-named-as-default-member */
4
+ const { loadTrustedSetup } = cKzg;
5
+
6
+ export * from './blob.js';
7
+ export * from './encoding.js';
8
+ export * from './interface.js';
9
+ export * from './errors.js';
10
+ export * from './blob_public_inputs.js';
11
+ export * from './sponge_blob.js';
12
+
13
+ try {
14
+ loadTrustedSetup();
15
+ } catch (error: any) {
16
+ if (error.message.includes('trusted setup is already loaded')) {
17
+ // NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
18
+ // and it throws an error if it's already loaded, even though nothing is wrong.
19
+ // This is a rudimentary way of ensuring we load the trusted setup if we need it.
20
+ } else {
21
+ throw new Error(error);
22
+ }
23
+ }
@@ -0,0 +1,11 @@
1
+ /**
2
+ * The relevant parts of a response from https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
3
+ */
4
+ export interface BlobJson {
5
+ blob: string;
6
+ index?: number;
7
+ // eslint-disable-next-line camelcase
8
+ kzg_commitment: string;
9
+ // eslint-disable-next-line camelcase
10
+ kzg_proof: string;
11
+ }
@@ -0,0 +1,178 @@
1
+ import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
2
+ import { poseidon2Permutation } from '@aztec/foundation/crypto';
3
+ import { Fr } from '@aztec/foundation/fields';
4
+ import {
5
+ BufferReader,
6
+ FieldReader,
7
+ type Tuple,
8
+ serializeToBuffer,
9
+ serializeToFields,
10
+ } from '@aztec/foundation/serialize';
11
+
12
+ /**
13
+ * A Poseidon2 sponge used to accumulate data that will be added to a blob.
14
+ * See noir-projects/noir-protocol-circuits/crates/types/src/abis/sponge_blob.nr.
15
+ */
16
+ export class SpongeBlob {
17
+ constructor(
18
+ /** Sponge with absorbed tx effects that will go into a blob. */
19
+ public readonly sponge: Poseidon2Sponge,
20
+ /** Number of effects absorbed so far. */
21
+ public fields: number,
22
+ /** Number of effects that will be absorbed. */
23
+ public readonly expectedFields: number,
24
+ ) {}
25
+
26
+ static fromBuffer(buffer: Buffer | BufferReader): SpongeBlob {
27
+ const reader = BufferReader.asReader(buffer);
28
+ return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber(), reader.readNumber());
29
+ }
30
+
31
+ toBuffer() {
32
+ return serializeToBuffer(this.sponge, this.fields, this.expectedFields);
33
+ }
34
+
35
+ static getFields(fields: FieldsOf<SpongeBlob>) {
36
+ return [fields.sponge, fields.fields, fields.expectedFields];
37
+ }
38
+
39
+ toFields(): Fr[] {
40
+ return serializeToFields(...SpongeBlob.getFields(this));
41
+ }
42
+
43
+ static fromFields(fields: Fr[] | FieldReader): SpongeBlob {
44
+ const reader = FieldReader.asReader(fields);
45
+ return new SpongeBlob(
46
+ reader.readObject(Poseidon2Sponge),
47
+ reader.readField().toNumber(),
48
+ reader.readField().toNumber(),
49
+ );
50
+ }
51
+
52
+ clone() {
53
+ return SpongeBlob.fromBuffer(this.toBuffer());
54
+ }
55
+
56
+ async absorb(fields: Fr[]) {
57
+ if (this.fields + fields.length > this.expectedFields) {
58
+ throw new Error(
59
+ `Attempted to fill spongeblob with ${this.fields + fields.length}, but it has a max of ${this.expectedFields}`,
60
+ );
61
+ }
62
+ await this.sponge.absorb(fields);
63
+ this.fields += fields.length;
64
+ }
65
+
66
+ async squeeze(): Promise<Fr> {
67
+ // If the blob sponge is not 'full', we append 1 to match Poseidon2::hash_internal()
68
+ // NB: There is currently no use case in which we don't 'fill' a blob sponge, but adding for completeness
69
+ if (this.fields != this.expectedFields) {
70
+ await this.sponge.absorb([Fr.ONE]);
71
+ }
72
+ return this.sponge.squeeze();
73
+ }
74
+
75
+ static empty(): SpongeBlob {
76
+ return new SpongeBlob(Poseidon2Sponge.empty(), 0, 0);
77
+ }
78
+
79
+ static init(expectedFields: number): SpongeBlob {
80
+ return new SpongeBlob(Poseidon2Sponge.init(expectedFields), 0, expectedFields);
81
+ }
82
+ }
83
+
84
+ // This is just noir's stdlib version of the poseidon2 sponge. We use it for a blob-specific implmentation of the hasher.
85
+ export class Poseidon2Sponge {
86
+ constructor(
87
+ public cache: Tuple<Fr, 3>,
88
+ public state: Tuple<Fr, 4>,
89
+ public cacheSize: number,
90
+ public squeezeMode: boolean,
91
+ ) {}
92
+
93
+ static fromBuffer(buffer: Buffer | BufferReader): Poseidon2Sponge {
94
+ const reader = BufferReader.asReader(buffer);
95
+ return new Poseidon2Sponge(
96
+ reader.readArray(3, Fr),
97
+ reader.readArray(4, Fr),
98
+ reader.readNumber(),
99
+ reader.readBoolean(),
100
+ );
101
+ }
102
+
103
+ toBuffer() {
104
+ return serializeToBuffer(this.cache, this.state, this.cacheSize, this.squeezeMode);
105
+ }
106
+
107
+ static getFields(fields: FieldsOf<Poseidon2Sponge>) {
108
+ return [fields.cache, fields.state, fields.cacheSize, fields.squeezeMode];
109
+ }
110
+
111
+ toFields(): Fr[] {
112
+ return serializeToFields(...Poseidon2Sponge.getFields(this));
113
+ }
114
+
115
+ static fromFields(fields: Fr[] | FieldReader): Poseidon2Sponge {
116
+ const reader = FieldReader.asReader(fields);
117
+ return new Poseidon2Sponge(
118
+ reader.readFieldArray(3),
119
+ reader.readFieldArray(4),
120
+ reader.readField().toNumber(),
121
+ reader.readBoolean(),
122
+ );
123
+ }
124
+
125
+ static empty(): Poseidon2Sponge {
126
+ return new Poseidon2Sponge(
127
+ makeTuple(3, () => Fr.ZERO),
128
+ makeTuple(4, () => Fr.ZERO),
129
+ 0,
130
+ false,
131
+ );
132
+ }
133
+
134
+ static init(expectedFields: number): Poseidon2Sponge {
135
+ const iv = new Fr(expectedFields).mul(new Fr(BigInt('18446744073709551616')));
136
+ const sponge = Poseidon2Sponge.empty();
137
+ sponge.state[3] = iv;
138
+ return sponge;
139
+ }
140
+
141
+ // Note: there isn't currently an impl in ts that allows for a custom aborption via an
142
+ // existing sponge.
143
+ // A custom blob-based impl of noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr
144
+ async performDuplex() {
145
+ for (let i = 0; i < this.cache.length; i++) {
146
+ if (i < this.cacheSize) {
147
+ this.state[i] = this.state[i].add(this.cache[i]);
148
+ }
149
+ }
150
+ const perm = await poseidon2Permutation(this.state);
151
+ // ts doesn't understand that the above always gives 4
152
+ this.state = [perm[0], perm[1], perm[2], perm[3]];
153
+ }
154
+
155
+ async absorb(fields: Fr[]) {
156
+ if (this.squeezeMode) {
157
+ throw new Error(`Poseidon sponge is not able to absorb more inputs.`);
158
+ }
159
+ for (const field of fields) {
160
+ if (this.cacheSize == this.cache.length) {
161
+ await this.performDuplex();
162
+ this.cache[0] = field;
163
+ this.cacheSize = 1;
164
+ } else {
165
+ this.cache[this.cacheSize++] = field;
166
+ }
167
+ }
168
+ }
169
+
170
+ async squeeze(): Promise<Fr> {
171
+ if (this.squeezeMode) {
172
+ throw new Error(`Poseidon sponge has already been squeezed.`);
173
+ }
174
+ await this.performDuplex();
175
+ this.squeezeMode = true;
176
+ return this.state[0];
177
+ }
178
+ }
package/src/testing.ts ADDED
@@ -0,0 +1,95 @@
1
+ import { BLOBS_PER_BLOCK } from '@aztec/constants';
2
+ import { makeTuple } from '@aztec/foundation/array';
3
+ import { toBufferBE } from '@aztec/foundation/bigint-buffer';
4
+ import { Fr } from '@aztec/foundation/fields';
5
+
6
+ import { Blob } from './blob.js';
7
+ import { BlobPublicInputs, BlockBlobPublicInputs } from './blob_public_inputs.js';
8
+ import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
9
+ import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
10
+
11
+ /**
12
+ * Makes arbitrary poseidon sponge for blob inputs.
13
+ * Note: will not verify inside the circuit.
14
+ * @param seed - The seed to use for generating the sponge.
15
+ * @returns A sponge blob instance.
16
+ */
17
+ export function makeSpongeBlob(seed = 1): SpongeBlob {
18
+ return new SpongeBlob(
19
+ new Poseidon2Sponge(
20
+ makeTuple(3, i => new Fr(i)),
21
+ makeTuple(4, i => new Fr(i)),
22
+ 1,
23
+ false,
24
+ ),
25
+ seed,
26
+ seed + 1,
27
+ );
28
+ }
29
+
30
+ /**
31
+ * Makes arbitrary blob public inputs.
32
+ * Note: will not verify inside the circuit.
33
+ * @param seed - The seed to use for generating the blob inputs.
34
+ * @returns A blob public inputs instance.
35
+ */
36
+ export function makeBlobPublicInputs(seed = 1): BlobPublicInputs {
37
+ return new BlobPublicInputs(
38
+ new Fr(seed),
39
+ BigInt(seed + 1),
40
+ makeTuple(2, i => new Fr(i)),
41
+ );
42
+ }
43
+
44
+ /**
45
+ * Makes arbitrary block blob public inputs.
46
+ * Note: will not verify inside the circuit.
47
+ * @param seed - The seed to use for generating the blob inputs.
48
+ * @returns A block blob public inputs instance.
49
+ */
50
+ export function makeBlockBlobPublicInputs(seed = 1): BlockBlobPublicInputs {
51
+ return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, () => makeBlobPublicInputs(seed)));
52
+ }
53
+
54
+ // TODO: copied form stdlib tx effect
55
+ function encodeFirstField(length: number): Fr {
56
+ const lengthBuf = Buffer.alloc(2);
57
+ lengthBuf.writeUInt16BE(length, 0);
58
+ return new Fr(
59
+ Buffer.concat([
60
+ toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
61
+ Buffer.alloc(1),
62
+ lengthBuf,
63
+ Buffer.alloc(1),
64
+ Buffer.from([1]),
65
+ Buffer.alloc(1),
66
+ Buffer.alloc(1),
67
+ ]),
68
+ );
69
+ }
70
+
71
+ /**
72
+ * Make an encoded blob with the given length
73
+ *
74
+ * This will deserialise correctly in the archiver
75
+ * @param length
76
+ * @returns
77
+ */
78
+ export function makeEncodedBlob(length: number): Promise<Blob> {
79
+ return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
80
+ }
81
+
82
+ /**
83
+ * Make an unencoded blob with the given length
84
+ *
85
+ * This will fail deserialisation in the archiver
86
+ * @param length
87
+ * @returns
88
+ */
89
+ export function makeUnencodedBlob(length: number): Promise<Blob> {
90
+ return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
91
+ }
92
+
93
+ export function makeEncodedBlobFields(fields: Fr[]): Promise<Blob> {
94
+ return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
95
+ }