@aztec/blob-lib 0.75.0-commit.c03ba01a2a4122e43e90d5133ba017e54b90e9d2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob.js +253 -0
- package/dest/encoding.js +114 -0
- package/dest/errors.js +6 -0
- package/dest/index.js +18 -0
- package/dest/interface.js +3 -0
- package/dest/mocks.js +53 -0
- package/package.json +84 -0
- package/src/blob.ts +312 -0
- package/src/encoding.ts +139 -0
- package/src/errors.ts +6 -0
- package/src/index.ts +22 -0
- package/src/interface.ts +11 -0
- package/src/mocks.ts +48 -0
package/dest/blob.js
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
|
|
2
|
+
import { poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
|
|
3
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
4
|
+
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
5
|
+
import cKzg from 'c-kzg';
|
|
6
|
+
import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './encoding.js';
|
|
7
|
+
import { BlobDeserializationError } from './errors.js';
|
|
8
|
+
/* eslint-disable import/no-named-as-default-member */ const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;
|
|
9
|
+
// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
|
|
10
|
+
export const VERSIONED_HASH_VERSION_KZG = 0x01;
|
|
11
|
+
/**
|
|
12
|
+
* A class to create, manage, and prove EVM blobs.
|
|
13
|
+
*/ export class Blob {
|
|
14
|
+
data;
|
|
15
|
+
fieldsHash;
|
|
16
|
+
challengeZ;
|
|
17
|
+
evaluationY;
|
|
18
|
+
commitment;
|
|
19
|
+
proof;
|
|
20
|
+
constructor(/** The blob to be broadcast on L1 in bytes form. */ data, /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ fieldsHash, /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ challengeZ, /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ evaluationY, /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ commitment, /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ proof){
|
|
21
|
+
this.data = data;
|
|
22
|
+
this.fieldsHash = fieldsHash;
|
|
23
|
+
this.challengeZ = challengeZ;
|
|
24
|
+
this.evaluationY = evaluationY;
|
|
25
|
+
this.commitment = commitment;
|
|
26
|
+
this.proof = proof;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* The encoded version of the blob will determine the end of the blob based on the transaction encoding.
|
|
30
|
+
* This is required when the fieldsHash of a blob will contain trailing zeros.
|
|
31
|
+
*
|
|
32
|
+
* See `./encoding.ts` for more details.
|
|
33
|
+
*
|
|
34
|
+
* This method is used to create a Blob from a buffer.
|
|
35
|
+
* @param blob - The buffer to create the Blob from.
|
|
36
|
+
* @param multiBlobFieldsHash - The fields hash to use for the Blob.
|
|
37
|
+
* @returns A Blob created from the buffer.
|
|
38
|
+
*
|
|
39
|
+
* @throws If unable to deserialize the blob.
|
|
40
|
+
*/ static fromEncodedBlobBuffer(blob, multiBlobFieldsHash) {
|
|
41
|
+
try {
|
|
42
|
+
const fields = deserializeEncodedBlobToFields(blob);
|
|
43
|
+
return Blob.fromFields(fields, multiBlobFieldsHash);
|
|
44
|
+
} catch (err) {
|
|
45
|
+
throw new BlobDeserializationError(`Failed to create Blob from encoded blob buffer, this blob was likely not created by us`);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Create a Blob from an array of fields.
|
|
50
|
+
*
|
|
51
|
+
* @param fields - The array of fields to create the Blob from.
|
|
52
|
+
* @param multiBlobFieldsHash - The fields hash to use for the Blob.
|
|
53
|
+
* @returns A Blob created from the array of fields.
|
|
54
|
+
*/ static async fromFields(fields, multiBlobFieldsHash) {
|
|
55
|
+
if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
|
|
56
|
+
throw new Error(`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`);
|
|
57
|
+
}
|
|
58
|
+
const data = Buffer.concat([
|
|
59
|
+
serializeToBuffer(fields)
|
|
60
|
+
], BYTES_PER_BLOB);
|
|
61
|
+
// This matches the output of SpongeBlob.squeeze() in the blob circuit
|
|
62
|
+
const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : await poseidon2Hash(fields);
|
|
63
|
+
const commitment = Buffer.from(blobToKzgCommitment(data));
|
|
64
|
+
const challengeZ = await poseidon2Hash([
|
|
65
|
+
fieldsHash,
|
|
66
|
+
...commitmentToFields(commitment)
|
|
67
|
+
]);
|
|
68
|
+
const res = computeKzgProof(data, challengeZ.toBuffer());
|
|
69
|
+
if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) {
|
|
70
|
+
throw new Error(`KZG proof did not verify.`);
|
|
71
|
+
}
|
|
72
|
+
const proof = Buffer.from(res[0]);
|
|
73
|
+
const evaluationY = Buffer.from(res[1]);
|
|
74
|
+
return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Create a Blob from a JSON object.
|
|
78
|
+
*
|
|
79
|
+
* Blobs will be in this form when requested from the blob sink, or from
|
|
80
|
+
* the beacon chain via `getBlobSidecars`
|
|
81
|
+
* https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
|
|
82
|
+
*
|
|
83
|
+
* @dev WARNING: by default json deals with encoded buffers
|
|
84
|
+
*
|
|
85
|
+
* @param json - The JSON object to create the Blob from.
|
|
86
|
+
* @returns A Blob created from the JSON object.
|
|
87
|
+
*/ static async fromJson(json) {
|
|
88
|
+
const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
|
|
89
|
+
const blob = await Blob.fromEncodedBlobBuffer(blobBuffer);
|
|
90
|
+
if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
|
|
91
|
+
throw new Error('KZG commitment does not match');
|
|
92
|
+
}
|
|
93
|
+
// We do not check the proof, as it will be different if the challenge is shared
|
|
94
|
+
// across multiple blobs
|
|
95
|
+
return blob;
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Get the JSON representation of the blob.
|
|
99
|
+
*
|
|
100
|
+
* @dev WARNING: by default json deals with encoded buffers
|
|
101
|
+
* @param index - optional - The index of the blob in the block.
|
|
102
|
+
* @returns The JSON representation of the blob.
|
|
103
|
+
*/ toJson(index) {
|
|
104
|
+
return {
|
|
105
|
+
blob: `0x${Buffer.from(this.data).toString('hex')}`,
|
|
106
|
+
index,
|
|
107
|
+
// eslint-disable-next-line camelcase
|
|
108
|
+
kzg_commitment: `0x${this.commitment.toString('hex')}`,
|
|
109
|
+
// eslint-disable-next-line camelcase
|
|
110
|
+
kzg_proof: `0x${this.proof.toString('hex')}`
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Get the fields from the blob.
|
|
115
|
+
*
|
|
116
|
+
* @dev WARNING: this method does not take into account trailing zeros
|
|
117
|
+
*
|
|
118
|
+
* @returns The fields from the blob.
|
|
119
|
+
*/ toFields() {
|
|
120
|
+
return extractBlobFieldsFromBuffer(this.data);
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Get the encoded fields from the blob.
|
|
124
|
+
*
|
|
125
|
+
* @dev This method takes into account trailing zeros
|
|
126
|
+
*
|
|
127
|
+
* @returns The encoded fields from the blob.
|
|
128
|
+
*
|
|
129
|
+
* @throws If unable to deserialize the blob.
|
|
130
|
+
*/ toEncodedFields() {
|
|
131
|
+
try {
|
|
132
|
+
return deserializeEncodedBlobToFields(this.data);
|
|
133
|
+
} catch (err) {
|
|
134
|
+
throw new BlobDeserializationError(`Failed to deserialize encoded blob fields, this blob was likely not created by us`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Get the commitment fields from the blob.
|
|
139
|
+
*
|
|
140
|
+
* The 48-byte commitment is encoded into two field elements:
|
|
141
|
+
* +------------------+------------------+
|
|
142
|
+
* | Field Element 1 | Field Element 2 |
|
|
143
|
+
* | [bytes 0-31] | [bytes 32-47] |
|
|
144
|
+
* +------------------+------------------+
|
|
145
|
+
* | 32 bytes | 16 bytes |
|
|
146
|
+
* +------------------+------------------+
|
|
147
|
+
* @returns The commitment fields from the blob.
|
|
148
|
+
*/ commitmentToFields() {
|
|
149
|
+
return commitmentToFields(this.commitment);
|
|
150
|
+
}
|
|
151
|
+
// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
152
|
+
getEthVersionedBlobHash() {
|
|
153
|
+
const hash = sha256(this.commitment);
|
|
154
|
+
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
155
|
+
return hash;
|
|
156
|
+
}
|
|
157
|
+
static getEthVersionedBlobHash(commitment) {
|
|
158
|
+
const hash = sha256(commitment);
|
|
159
|
+
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
160
|
+
return hash;
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Get the buffer representation of the ENTIRE blob.
|
|
164
|
+
*
|
|
165
|
+
* @dev WARNING: this buffer contains all metadata aswell as the data itself
|
|
166
|
+
*
|
|
167
|
+
* @returns The buffer representation of the blob.
|
|
168
|
+
*/ toBuffer() {
|
|
169
|
+
return Buffer.from(serializeToBuffer(this.data.length, this.data, this.fieldsHash, this.challengeZ, this.evaluationY.length, this.evaluationY, this.commitment.length, this.commitment, this.proof.length, this.proof));
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Create a Blob from a buffer.
|
|
173
|
+
*
|
|
174
|
+
* @dev WARNING: this method contains all metadata aswell as the data itself
|
|
175
|
+
*
|
|
176
|
+
* @param buf - The buffer to create the Blob from.
|
|
177
|
+
* @returns A Blob created from the buffer.
|
|
178
|
+
*/ static fromBuffer(buf) {
|
|
179
|
+
const reader = BufferReader.asReader(buf);
|
|
180
|
+
return new Blob(reader.readUint8Array(), reader.readObject(Fr), reader.readObject(Fr), reader.readBuffer(), reader.readBuffer(), reader.readBuffer());
|
|
181
|
+
}
|
|
182
|
+
/**
|
|
183
|
+
* Get the size of the blob in bytes
|
|
184
|
+
*/ getSize() {
|
|
185
|
+
return this.data.length;
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
|
|
189
|
+
*
|
|
190
|
+
* input[:32] - versioned_hash
|
|
191
|
+
* input[32:64] - z
|
|
192
|
+
* input[64:96] - y
|
|
193
|
+
* input[96:144] - commitment C
|
|
194
|
+
* input[144:192] - proof (a commitment to the quotient polynomial q(X))
|
|
195
|
+
*
|
|
196
|
+
* See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
|
197
|
+
*/ getEthBlobEvaluationInputs() {
|
|
198
|
+
const buf = Buffer.concat([
|
|
199
|
+
this.getEthVersionedBlobHash(),
|
|
200
|
+
this.challengeZ.toBuffer(),
|
|
201
|
+
this.evaluationY,
|
|
202
|
+
this.commitment,
|
|
203
|
+
this.proof
|
|
204
|
+
]);
|
|
205
|
+
return `0x${buf.toString('hex')}`;
|
|
206
|
+
}
|
|
207
|
+
static getEthBlobEvaluationInputs(blobs) {
|
|
208
|
+
let buf = Buffer.alloc(0);
|
|
209
|
+
blobs.forEach((blob)=>{
|
|
210
|
+
buf = Buffer.concat([
|
|
211
|
+
buf,
|
|
212
|
+
blob.getEthVersionedBlobHash(),
|
|
213
|
+
blob.challengeZ.toBuffer(),
|
|
214
|
+
blob.evaluationY,
|
|
215
|
+
blob.commitment,
|
|
216
|
+
blob.proof
|
|
217
|
+
]);
|
|
218
|
+
});
|
|
219
|
+
// For multiple blobs, we prefix the number of blobs:
|
|
220
|
+
const lenBuf = Buffer.alloc(1);
|
|
221
|
+
lenBuf.writeUint8(blobs.length);
|
|
222
|
+
buf = Buffer.concat([
|
|
223
|
+
lenBuf,
|
|
224
|
+
buf
|
|
225
|
+
]);
|
|
226
|
+
return `0x${buf.toString('hex')}`;
|
|
227
|
+
}
|
|
228
|
+
static getViemKzgInstance() {
|
|
229
|
+
return {
|
|
230
|
+
blobToKzgCommitment: cKzg.blobToKzgCommitment,
|
|
231
|
+
computeBlobKzgProof: cKzg.computeBlobKzgProof
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
// Returns as many blobs as we require to broadcast the given fields
|
|
235
|
+
// Assumes we share the fields hash between all blobs
|
|
236
|
+
static async getBlobs(fields) {
|
|
237
|
+
const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
|
|
238
|
+
const multiBlobFieldsHash = await poseidon2Hash(fields);
|
|
239
|
+
const res = [];
|
|
240
|
+
for(let i = 0; i < numBlobs; i++){
|
|
241
|
+
const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
|
|
242
|
+
res.push(await Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
|
|
243
|
+
}
|
|
244
|
+
return res;
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
|
|
248
|
+
function commitmentToFields(commitment) {
|
|
249
|
+
return [
|
|
250
|
+
new Fr(commitment.subarray(0, 31)),
|
|
251
|
+
new Fr(commitment.subarray(31, 48))
|
|
252
|
+
];
|
|
253
|
+
}
|
package/dest/encoding.js
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
// Note duplicated from circuit-types !
|
|
4
|
+
// This will appear as 0x74785f7374617274 in logs
|
|
5
|
+
export const TX_START_PREFIX = 8392562855083340404n;
|
|
6
|
+
// These are helper constants to decode tx effects from blob encoded fields
|
|
7
|
+
export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
|
|
8
|
+
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
|
|
9
|
+
export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;
|
|
10
|
+
export const REVERT_CODE_PREFIX = 1;
|
|
11
|
+
/**
|
|
12
|
+
* Deserializes a blob buffer into an array of field elements.
|
|
13
|
+
*
|
|
14
|
+
* Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
|
|
15
|
+
* This method is sparse, meaning it does not include trailing zeros at the end of the blob.
|
|
16
|
+
*
|
|
17
|
+
* However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
|
|
18
|
+
* within them.
|
|
19
|
+
* If we end on a set of zeros, such as the log below:
|
|
20
|
+
* length 7: [ a, b, c, d, e, 0, 0]
|
|
21
|
+
*
|
|
22
|
+
* we will end up with the incorrect hash if we trim the zeros from the end.
|
|
23
|
+
*
|
|
24
|
+
* Each transactions logs contains a TX start prefix, which includes a string followed
|
|
25
|
+
* by the length ( in field elements ) of the transaction's log.
|
|
26
|
+
*
|
|
27
|
+
* This function finds the end of the last transaction's logs, and returns the array up to this point.
|
|
28
|
+
*
|
|
29
|
+
* We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
|
|
30
|
+
* a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
|
|
31
|
+
*
|
|
32
|
+
* +------------------+------------------+------------------+------------------+
|
|
33
|
+
* | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
|
|
34
|
+
* | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
|
|
35
|
+
* +------------------+------------------+------------------+------------------+
|
|
36
|
+
* ^
|
|
37
|
+
* |
|
|
38
|
+
* Function reads until here --------------------------------
|
|
39
|
+
*
|
|
40
|
+
* @param blob - The blob buffer to deserialize.
|
|
41
|
+
* @returns An array of field elements.
|
|
42
|
+
*/ export function deserializeEncodedBlobToFields(blob) {
|
|
43
|
+
// Convert blob buffer to array of field elements
|
|
44
|
+
const reader = BufferReader.asReader(blob);
|
|
45
|
+
const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
|
|
46
|
+
const fieldReader = FieldReader.asReader(array);
|
|
47
|
+
// Read fields until we hit zeros at the end
|
|
48
|
+
while(!fieldReader.isFinished()){
|
|
49
|
+
const currentField = fieldReader.peekField();
|
|
50
|
+
// Stop when we hit a zero field
|
|
51
|
+
if (!currentField || currentField.isZero()) {
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
// Skip the remaining fields in this transaction
|
|
55
|
+
const len = getLengthFromFirstField(currentField);
|
|
56
|
+
fieldReader.skip(len);
|
|
57
|
+
}
|
|
58
|
+
// Return array up to last non-zero field
|
|
59
|
+
return array.slice(0, fieldReader.cursor);
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Get the length of the transaction from the first field.
|
|
63
|
+
*
|
|
64
|
+
* @param firstField - The first field of the transaction.
|
|
65
|
+
* @returns The length of the transaction.
|
|
66
|
+
*
|
|
67
|
+
* @throws If the first field does not include the correct prefix - encoding invalid.
|
|
68
|
+
*/ export function getLengthFromFirstField(firstField) {
|
|
69
|
+
// Check that the first field includes the correct prefix
|
|
70
|
+
if (!isValidFirstField(firstField)) {
|
|
71
|
+
throw new Error('Invalid prefix');
|
|
72
|
+
}
|
|
73
|
+
const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
|
|
74
|
+
return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
|
|
75
|
+
}
|
|
76
|
+
// NOTE: duplicated from circuit-types tx effect!
|
|
77
|
+
/**
|
|
78
|
+
* Determines whether a field is the first field of a tx effect
|
|
79
|
+
*/ export function isValidFirstField(field) {
|
|
80
|
+
const buf = field.toBuffer();
|
|
81
|
+
if (!buf.subarray(0, field.size - TX_EFFECT_PREFIX_BYTE_LENGTH).equals(Buffer.alloc(field.size - TX_EFFECT_PREFIX_BYTE_LENGTH))) {
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
const sliced = buf.subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
|
|
85
|
+
if (// Checking we start with the correct prefix...
|
|
86
|
+
!new Fr(sliced.subarray(0, TX_START_PREFIX_BYTES_LENGTH)).equals(new Fr(TX_START_PREFIX)) || // ...and include the revert code prefix..
|
|
87
|
+
sliced[sliced.length - 3] !== REVERT_CODE_PREFIX || // ...and the following revert code is valid.
|
|
88
|
+
sliced[sliced.length - 1] > 4) {
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
return true;
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Extract the fields from a blob buffer, but do not take into account encoding
|
|
95
|
+
* that will include trailing zeros.
|
|
96
|
+
*
|
|
97
|
+
* +------------------+------------------+------------------+------------------+
|
|
98
|
+
* | | | | Padded zeros |
|
|
99
|
+
* | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
|
|
100
|
+
* +------------------+------------------+------------------+------------------+
|
|
101
|
+
* ^
|
|
102
|
+
* |
|
|
103
|
+
* Function reads until here ----------------------
|
|
104
|
+
*/ export function extractBlobFieldsFromBuffer(blob) {
|
|
105
|
+
const reader = BufferReader.asReader(blob);
|
|
106
|
+
const array = reader.readArray(blob.length >> 5, Fr);
|
|
107
|
+
// Find the index of the last non-zero field
|
|
108
|
+
let lastNonZeroIndex = array.length - 1;
|
|
109
|
+
while(lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()){
|
|
110
|
+
lastNonZeroIndex--;
|
|
111
|
+
}
|
|
112
|
+
// Return the trimmed array
|
|
113
|
+
return array.slice(0, lastNonZeroIndex + 1);
|
|
114
|
+
}
|
package/dest/errors.js
ADDED
package/dest/index.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import cKzg from 'c-kzg';
|
|
2
|
+
/* eslint-disable import/no-named-as-default-member */ const { loadTrustedSetup } = cKzg;
|
|
3
|
+
export * from './blob.js';
|
|
4
|
+
export * from './mocks.js';
|
|
5
|
+
export * from './encoding.js';
|
|
6
|
+
export * from './interface.js';
|
|
7
|
+
export * from './errors.js';
|
|
8
|
+
try {
|
|
9
|
+
loadTrustedSetup();
|
|
10
|
+
} catch (error) {
|
|
11
|
+
if (error.message.includes('trusted setup is already loaded')) {
|
|
12
|
+
// NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
|
|
13
|
+
// and it throws an error if it's already loaded, even though nothing is wrong.
|
|
14
|
+
// This is a rudimentary way of ensuring we load the trusted setup if we need it.
|
|
15
|
+
} else {
|
|
16
|
+
throw new Error(error);
|
|
17
|
+
}
|
|
18
|
+
}
|
package/dest/mocks.js
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
2
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
+
import { Blob } from './blob.js';
|
|
4
|
+
import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
|
|
5
|
+
// TODO: copied form circuit-types tx effect
|
|
6
|
+
function encodeFirstField(length) {
|
|
7
|
+
const lengthBuf = Buffer.alloc(2);
|
|
8
|
+
lengthBuf.writeUInt16BE(length, 0);
|
|
9
|
+
return new Fr(Buffer.concat([
|
|
10
|
+
toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
|
|
11
|
+
Buffer.alloc(1),
|
|
12
|
+
lengthBuf,
|
|
13
|
+
Buffer.alloc(1),
|
|
14
|
+
Buffer.from([
|
|
15
|
+
1
|
|
16
|
+
]),
|
|
17
|
+
Buffer.alloc(1),
|
|
18
|
+
Buffer.alloc(1)
|
|
19
|
+
]));
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Make an encoded blob with the given length
|
|
23
|
+
*
|
|
24
|
+
* This will deserialise correctly in the archiver
|
|
25
|
+
* @param length
|
|
26
|
+
* @returns
|
|
27
|
+
*/ export function makeEncodedBlob(length) {
|
|
28
|
+
return Blob.fromFields([
|
|
29
|
+
encodeFirstField(length + 1),
|
|
30
|
+
...Array.from({
|
|
31
|
+
length: length
|
|
32
|
+
}, ()=>Fr.random())
|
|
33
|
+
]);
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Make an unencoded blob with the given length
|
|
37
|
+
*
|
|
38
|
+
* This will fail deserialisation in the archiver
|
|
39
|
+
* @param length
|
|
40
|
+
* @returns
|
|
41
|
+
*/ export function makeUnencodedBlob(length) {
|
|
42
|
+
return Blob.fromFields([
|
|
43
|
+
...Array.from({
|
|
44
|
+
length: length
|
|
45
|
+
}, ()=>Fr.random())
|
|
46
|
+
]);
|
|
47
|
+
}
|
|
48
|
+
export function makeEncodedBlobFields(fields) {
|
|
49
|
+
return Blob.fromFields([
|
|
50
|
+
encodeFirstField(fields.length + 1),
|
|
51
|
+
...fields
|
|
52
|
+
]);
|
|
53
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aztec/blob-lib",
|
|
3
|
+
"version": "0.75.0-commit.c03ba01a2a4122e43e90d5133ba017e54b90e9d2",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"exports": {
|
|
6
|
+
".": "./dest/index.js"
|
|
7
|
+
},
|
|
8
|
+
"typedocOptions": {
|
|
9
|
+
"entryPoints": [
|
|
10
|
+
"./src/index.ts"
|
|
11
|
+
],
|
|
12
|
+
"name": "Blob Lib",
|
|
13
|
+
"tsconfig": "./tsconfig.json"
|
|
14
|
+
},
|
|
15
|
+
"scripts": {
|
|
16
|
+
"build": "yarn clean && tsc -b",
|
|
17
|
+
"build:dev": "tsc -b --watch",
|
|
18
|
+
"clean": "rm -rf ./dest .tsbuildinfo",
|
|
19
|
+
"formatting": "run -T prettier --check ./src && run -T eslint ./src",
|
|
20
|
+
"formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src",
|
|
21
|
+
"start:dev": "tsc-watch -p tsconfig.json --onSuccess 'yarn start'",
|
|
22
|
+
"start": "node ./dest/index.js",
|
|
23
|
+
"test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}"
|
|
24
|
+
},
|
|
25
|
+
"inherits": [
|
|
26
|
+
"../package.common.json"
|
|
27
|
+
],
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"@aztec/foundation": "0.75.0-commit.c03ba01a2a4122e43e90d5133ba017e54b90e9d2",
|
|
30
|
+
"c-kzg": "4.0.0-alpha.1",
|
|
31
|
+
"tslib": "^2.4.0"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@jest/globals": "^29.5.0",
|
|
35
|
+
"@types/jest": "^29.5.0",
|
|
36
|
+
"@types/node": "^18.14.6",
|
|
37
|
+
"get-port": "^7.1.0",
|
|
38
|
+
"jest": "^29.5.0",
|
|
39
|
+
"ts-node": "^10.9.1",
|
|
40
|
+
"typescript": "^5.0.4"
|
|
41
|
+
},
|
|
42
|
+
"files": [
|
|
43
|
+
"dest",
|
|
44
|
+
"src",
|
|
45
|
+
"!*.test.*"
|
|
46
|
+
],
|
|
47
|
+
"types": "./dest/index.d.ts",
|
|
48
|
+
"jest": {
|
|
49
|
+
"moduleNameMapper": {
|
|
50
|
+
"^(\\.{1,2}/.*)\\.[cm]?js$": "$1"
|
|
51
|
+
},
|
|
52
|
+
"testRegex": "./src/.*\\.test\\.(js|mjs|ts)$",
|
|
53
|
+
"rootDir": "./src",
|
|
54
|
+
"transform": {
|
|
55
|
+
"^.+\\.tsx?$": [
|
|
56
|
+
"@swc/jest",
|
|
57
|
+
{
|
|
58
|
+
"jsc": {
|
|
59
|
+
"parser": {
|
|
60
|
+
"syntax": "typescript",
|
|
61
|
+
"decorators": true
|
|
62
|
+
},
|
|
63
|
+
"transform": {
|
|
64
|
+
"decoratorVersion": "2022-03"
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
]
|
|
69
|
+
},
|
|
70
|
+
"extensionsToTreatAsEsm": [
|
|
71
|
+
".ts"
|
|
72
|
+
],
|
|
73
|
+
"reporters": [
|
|
74
|
+
"default"
|
|
75
|
+
],
|
|
76
|
+
"testTimeout": 30000,
|
|
77
|
+
"setupFiles": [
|
|
78
|
+
"../../foundation/src/jest/setup.mjs"
|
|
79
|
+
]
|
|
80
|
+
},
|
|
81
|
+
"engines": {
|
|
82
|
+
"node": ">=18"
|
|
83
|
+
}
|
|
84
|
+
}
|
package/src/blob.ts
ADDED
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
|
|
2
|
+
import { poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
|
|
3
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
4
|
+
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
5
|
+
|
|
6
|
+
import cKzg from 'c-kzg';
|
|
7
|
+
import type { Blob as BlobBuffer } from 'c-kzg';
|
|
8
|
+
|
|
9
|
+
import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './encoding.js';
|
|
10
|
+
import { BlobDeserializationError } from './errors.js';
|
|
11
|
+
import { type BlobJson } from './interface.js';
|
|
12
|
+
|
|
13
|
+
/* eslint-disable import/no-named-as-default-member */
|
|
14
|
+
const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;
|
|
15
|
+
|
|
16
|
+
// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
|
|
17
|
+
export const VERSIONED_HASH_VERSION_KZG = 0x01;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* A class to create, manage, and prove EVM blobs.
|
|
21
|
+
*/
|
|
22
|
+
export class Blob {
|
|
23
|
+
constructor(
|
|
24
|
+
/** The blob to be broadcast on L1 in bytes form. */
|
|
25
|
+
public readonly data: BlobBuffer,
|
|
26
|
+
/** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
|
|
27
|
+
public readonly fieldsHash: Fr,
|
|
28
|
+
/** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
|
|
29
|
+
public readonly challengeZ: Fr,
|
|
30
|
+
/** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
|
|
31
|
+
public readonly evaluationY: Buffer,
|
|
32
|
+
/** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
|
|
33
|
+
public readonly commitment: Buffer,
|
|
34
|
+
/** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
|
|
35
|
+
public readonly proof: Buffer,
|
|
36
|
+
) {}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* The encoded version of the blob will determine the end of the blob based on the transaction encoding.
|
|
40
|
+
* This is required when the fieldsHash of a blob will contain trailing zeros.
|
|
41
|
+
*
|
|
42
|
+
* See `./encoding.ts` for more details.
|
|
43
|
+
*
|
|
44
|
+
* This method is used to create a Blob from a buffer.
|
|
45
|
+
* @param blob - The buffer to create the Blob from.
|
|
46
|
+
* @param multiBlobFieldsHash - The fields hash to use for the Blob.
|
|
47
|
+
* @returns A Blob created from the buffer.
|
|
48
|
+
*
|
|
49
|
+
* @throws If unable to deserialize the blob.
|
|
50
|
+
*/
|
|
51
|
+
static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Promise<Blob> {
|
|
52
|
+
try {
|
|
53
|
+
const fields: Fr[] = deserializeEncodedBlobToFields(blob);
|
|
54
|
+
return Blob.fromFields(fields, multiBlobFieldsHash);
|
|
55
|
+
} catch (err) {
|
|
56
|
+
throw new BlobDeserializationError(
|
|
57
|
+
`Failed to create Blob from encoded blob buffer, this blob was likely not created by us`,
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Create a Blob from an array of fields.
|
|
64
|
+
*
|
|
65
|
+
* @param fields - The array of fields to create the Blob from.
|
|
66
|
+
* @param multiBlobFieldsHash - The fields hash to use for the Blob.
|
|
67
|
+
* @returns A Blob created from the array of fields.
|
|
68
|
+
*/
|
|
69
|
+
static async fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Promise<Blob> {
|
|
70
|
+
if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
|
|
71
|
+
throw new Error(
|
|
72
|
+
`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`,
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);
|
|
77
|
+
|
|
78
|
+
// This matches the output of SpongeBlob.squeeze() in the blob circuit
|
|
79
|
+
const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : await poseidon2Hash(fields);
|
|
80
|
+
const commitment = Buffer.from(blobToKzgCommitment(data));
|
|
81
|
+
const challengeZ = await poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]);
|
|
82
|
+
const res = computeKzgProof(data, challengeZ.toBuffer());
|
|
83
|
+
if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) {
|
|
84
|
+
throw new Error(`KZG proof did not verify.`);
|
|
85
|
+
}
|
|
86
|
+
const proof = Buffer.from(res[0]);
|
|
87
|
+
const evaluationY = Buffer.from(res[1]);
|
|
88
|
+
|
|
89
|
+
return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Create a Blob from a JSON object.
|
|
94
|
+
*
|
|
95
|
+
* Blobs will be in this form when requested from the blob sink, or from
|
|
96
|
+
* the beacon chain via `getBlobSidecars`
|
|
97
|
+
* https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
|
|
98
|
+
*
|
|
99
|
+
* @dev WARNING: by default json deals with encoded buffers
|
|
100
|
+
*
|
|
101
|
+
* @param json - The JSON object to create the Blob from.
|
|
102
|
+
* @returns A Blob created from the JSON object.
|
|
103
|
+
*/
|
|
104
|
+
static async fromJson(json: BlobJson): Promise<Blob> {
|
|
105
|
+
const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
|
|
106
|
+
|
|
107
|
+
const blob = await Blob.fromEncodedBlobBuffer(blobBuffer);
|
|
108
|
+
|
|
109
|
+
if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
|
|
110
|
+
throw new Error('KZG commitment does not match');
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// We do not check the proof, as it will be different if the challenge is shared
|
|
114
|
+
// across multiple blobs
|
|
115
|
+
|
|
116
|
+
return blob;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Get the JSON representation of the blob.
|
|
121
|
+
*
|
|
122
|
+
* @dev WARNING: by default json deals with encoded buffers
|
|
123
|
+
* @param index - optional - The index of the blob in the block.
|
|
124
|
+
* @returns The JSON representation of the blob.
|
|
125
|
+
*/
|
|
126
|
+
toJson(index?: number): BlobJson {
|
|
127
|
+
return {
|
|
128
|
+
blob: `0x${Buffer.from(this.data).toString('hex')}`,
|
|
129
|
+
index,
|
|
130
|
+
// eslint-disable-next-line camelcase
|
|
131
|
+
kzg_commitment: `0x${this.commitment.toString('hex')}`,
|
|
132
|
+
// eslint-disable-next-line camelcase
|
|
133
|
+
kzg_proof: `0x${this.proof.toString('hex')}`,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Get the fields from the blob.
|
|
139
|
+
*
|
|
140
|
+
* @dev WARNING: this method does not take into account trailing zeros
|
|
141
|
+
*
|
|
142
|
+
* @returns The fields from the blob.
|
|
143
|
+
*/
|
|
144
|
+
toFields(): Fr[] {
|
|
145
|
+
return extractBlobFieldsFromBuffer(this.data);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Get the encoded fields from the blob.
|
|
150
|
+
*
|
|
151
|
+
* @dev This method takes into account trailing zeros
|
|
152
|
+
*
|
|
153
|
+
* @returns The encoded fields from the blob.
|
|
154
|
+
*
|
|
155
|
+
* @throws If unable to deserialize the blob.
|
|
156
|
+
*/
|
|
157
|
+
toEncodedFields(): Fr[] {
|
|
158
|
+
try {
|
|
159
|
+
return deserializeEncodedBlobToFields(this.data);
|
|
160
|
+
} catch (err) {
|
|
161
|
+
throw new BlobDeserializationError(
|
|
162
|
+
`Failed to deserialize encoded blob fields, this blob was likely not created by us`,
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Get the commitment fields from the blob.
|
|
169
|
+
*
|
|
170
|
+
* The 48-byte commitment is encoded into two field elements:
|
|
171
|
+
* +------------------+------------------+
|
|
172
|
+
* | Field Element 1 | Field Element 2 |
|
|
173
|
+
* | [bytes 0-31] | [bytes 32-47] |
|
|
174
|
+
* +------------------+------------------+
|
|
175
|
+
* | 32 bytes | 16 bytes |
|
|
176
|
+
* +------------------+------------------+
|
|
177
|
+
* @returns The commitment fields from the blob.
|
|
178
|
+
*/
|
|
179
|
+
commitmentToFields(): [Fr, Fr] {
|
|
180
|
+
return commitmentToFields(this.commitment);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
184
|
+
getEthVersionedBlobHash(): Buffer {
|
|
185
|
+
const hash = sha256(this.commitment);
|
|
186
|
+
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
187
|
+
return hash;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
static getEthVersionedBlobHash(commitment: Buffer): Buffer {
|
|
191
|
+
const hash = sha256(commitment);
|
|
192
|
+
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
193
|
+
return hash;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Get the buffer representation of the ENTIRE blob.
|
|
198
|
+
*
|
|
199
|
+
* @dev WARNING: this buffer contains all metadata aswell as the data itself
|
|
200
|
+
*
|
|
201
|
+
* @returns The buffer representation of the blob.
|
|
202
|
+
*/
|
|
203
|
+
toBuffer(): Buffer {
|
|
204
|
+
return Buffer.from(
|
|
205
|
+
serializeToBuffer(
|
|
206
|
+
this.data.length,
|
|
207
|
+
this.data,
|
|
208
|
+
this.fieldsHash,
|
|
209
|
+
this.challengeZ,
|
|
210
|
+
this.evaluationY.length,
|
|
211
|
+
this.evaluationY,
|
|
212
|
+
this.commitment.length,
|
|
213
|
+
this.commitment,
|
|
214
|
+
this.proof.length,
|
|
215
|
+
this.proof,
|
|
216
|
+
),
|
|
217
|
+
);
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Create a Blob from a buffer.
|
|
222
|
+
*
|
|
223
|
+
* @dev WARNING: this method contains all metadata aswell as the data itself
|
|
224
|
+
*
|
|
225
|
+
* @param buf - The buffer to create the Blob from.
|
|
226
|
+
* @returns A Blob created from the buffer.
|
|
227
|
+
*/
|
|
228
|
+
static fromBuffer(buf: Buffer | BufferReader): Blob {
|
|
229
|
+
const reader = BufferReader.asReader(buf);
|
|
230
|
+
return new Blob(
|
|
231
|
+
reader.readUint8Array(),
|
|
232
|
+
reader.readObject(Fr),
|
|
233
|
+
reader.readObject(Fr),
|
|
234
|
+
reader.readBuffer(),
|
|
235
|
+
reader.readBuffer(),
|
|
236
|
+
reader.readBuffer(),
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* Get the size of the blob in bytes
|
|
242
|
+
*/
|
|
243
|
+
getSize() {
|
|
244
|
+
return this.data.length;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
/**
|
|
248
|
+
* Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
|
|
249
|
+
*
|
|
250
|
+
* input[:32] - versioned_hash
|
|
251
|
+
* input[32:64] - z
|
|
252
|
+
* input[64:96] - y
|
|
253
|
+
* input[96:144] - commitment C
|
|
254
|
+
* input[144:192] - proof (a commitment to the quotient polynomial q(X))
|
|
255
|
+
*
|
|
256
|
+
* See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
|
257
|
+
*/
|
|
258
|
+
getEthBlobEvaluationInputs(): `0x${string}` {
|
|
259
|
+
const buf = Buffer.concat([
|
|
260
|
+
this.getEthVersionedBlobHash(),
|
|
261
|
+
this.challengeZ.toBuffer(),
|
|
262
|
+
this.evaluationY,
|
|
263
|
+
this.commitment,
|
|
264
|
+
this.proof,
|
|
265
|
+
]);
|
|
266
|
+
return `0x${buf.toString('hex')}`;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` {
|
|
270
|
+
let buf = Buffer.alloc(0);
|
|
271
|
+
blobs.forEach(blob => {
|
|
272
|
+
buf = Buffer.concat([
|
|
273
|
+
buf,
|
|
274
|
+
blob.getEthVersionedBlobHash(),
|
|
275
|
+
blob.challengeZ.toBuffer(),
|
|
276
|
+
blob.evaluationY,
|
|
277
|
+
blob.commitment,
|
|
278
|
+
blob.proof,
|
|
279
|
+
]);
|
|
280
|
+
});
|
|
281
|
+
// For multiple blobs, we prefix the number of blobs:
|
|
282
|
+
const lenBuf = Buffer.alloc(1);
|
|
283
|
+
lenBuf.writeUint8(blobs.length);
|
|
284
|
+
buf = Buffer.concat([lenBuf, buf]);
|
|
285
|
+
return `0x${buf.toString('hex')}`;
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
static getViemKzgInstance() {
|
|
289
|
+
return {
|
|
290
|
+
blobToKzgCommitment: cKzg.blobToKzgCommitment,
|
|
291
|
+
computeBlobKzgProof: cKzg.computeBlobKzgProof,
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Returns as many blobs as we require to broadcast the given fields
|
|
296
|
+
// Assumes we share the fields hash between all blobs
|
|
297
|
+
static async getBlobs(fields: Fr[]): Promise<Blob[]> {
|
|
298
|
+
const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
|
|
299
|
+
const multiBlobFieldsHash = await poseidon2Hash(fields);
|
|
300
|
+
const res = [];
|
|
301
|
+
for (let i = 0; i < numBlobs; i++) {
|
|
302
|
+
const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
|
|
303
|
+
res.push(await Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
|
|
304
|
+
}
|
|
305
|
+
return res;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
|
|
310
|
+
function commitmentToFields(commitment: Buffer): [Fr, Fr] {
|
|
311
|
+
return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))];
|
|
312
|
+
}
|
package/src/encoding.ts
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
|
|
4
|
+
import type { Blob as BlobBuffer } from 'c-kzg';
|
|
5
|
+
|
|
6
|
+
// Note duplicated from circuit-types !
|
|
7
|
+
// This will appear as 0x74785f7374617274 in logs
|
|
8
|
+
export const TX_START_PREFIX = 8392562855083340404n;
|
|
9
|
+
// These are helper constants to decode tx effects from blob encoded fields
|
|
10
|
+
export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
|
|
11
|
+
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
|
|
12
|
+
export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;
|
|
13
|
+
export const REVERT_CODE_PREFIX = 1;
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Deserializes a blob buffer into an array of field elements.
|
|
17
|
+
*
|
|
18
|
+
* Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
|
|
19
|
+
* This method is sparse, meaning it does not include trailing zeros at the end of the blob.
|
|
20
|
+
*
|
|
21
|
+
* However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
|
|
22
|
+
* within them.
|
|
23
|
+
* If we end on a set of zeros, such as the log below:
|
|
24
|
+
* length 7: [ a, b, c, d, e, 0, 0]
|
|
25
|
+
*
|
|
26
|
+
* we will end up with the incorrect hash if we trim the zeros from the end.
|
|
27
|
+
*
|
|
28
|
+
* Each transactions logs contains a TX start prefix, which includes a string followed
|
|
29
|
+
* by the length ( in field elements ) of the transaction's log.
|
|
30
|
+
*
|
|
31
|
+
* This function finds the end of the last transaction's logs, and returns the array up to this point.
|
|
32
|
+
*
|
|
33
|
+
* We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
|
|
34
|
+
* a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
|
|
35
|
+
*
|
|
36
|
+
* +------------------+------------------+------------------+------------------+
|
|
37
|
+
* | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
|
|
38
|
+
* | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
|
|
39
|
+
* +------------------+------------------+------------------+------------------+
|
|
40
|
+
* ^
|
|
41
|
+
* |
|
|
42
|
+
* Function reads until here --------------------------------
|
|
43
|
+
*
|
|
44
|
+
* @param blob - The blob buffer to deserialize.
|
|
45
|
+
* @returns An array of field elements.
|
|
46
|
+
*/
|
|
47
|
+
export function deserializeEncodedBlobToFields(blob: BlobBuffer): Fr[] {
|
|
48
|
+
// Convert blob buffer to array of field elements
|
|
49
|
+
const reader = BufferReader.asReader(blob);
|
|
50
|
+
const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
|
|
51
|
+
const fieldReader = FieldReader.asReader(array);
|
|
52
|
+
|
|
53
|
+
// Read fields until we hit zeros at the end
|
|
54
|
+
while (!fieldReader.isFinished()) {
|
|
55
|
+
const currentField = fieldReader.peekField();
|
|
56
|
+
|
|
57
|
+
// Stop when we hit a zero field
|
|
58
|
+
if (!currentField || currentField.isZero()) {
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Skip the remaining fields in this transaction
|
|
63
|
+
const len = getLengthFromFirstField(currentField);
|
|
64
|
+
fieldReader.skip(len);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Return array up to last non-zero field
|
|
68
|
+
return array.slice(0, fieldReader.cursor);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Get the length of the transaction from the first field.
|
|
73
|
+
*
|
|
74
|
+
* @param firstField - The first field of the transaction.
|
|
75
|
+
* @returns The length of the transaction.
|
|
76
|
+
*
|
|
77
|
+
* @throws If the first field does not include the correct prefix - encoding invalid.
|
|
78
|
+
*/
|
|
79
|
+
export function getLengthFromFirstField(firstField: Fr): number {
|
|
80
|
+
// Check that the first field includes the correct prefix
|
|
81
|
+
if (!isValidFirstField(firstField)) {
|
|
82
|
+
throw new Error('Invalid prefix');
|
|
83
|
+
}
|
|
84
|
+
const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
|
|
85
|
+
return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// NOTE: duplicated from circuit-types tx effect!
|
|
89
|
+
/**
|
|
90
|
+
* Determines whether a field is the first field of a tx effect
|
|
91
|
+
*/
|
|
92
|
+
export function isValidFirstField(field: Fr): boolean {
|
|
93
|
+
const buf = field.toBuffer();
|
|
94
|
+
if (
|
|
95
|
+
!buf
|
|
96
|
+
.subarray(0, field.size - TX_EFFECT_PREFIX_BYTE_LENGTH)
|
|
97
|
+
.equals(Buffer.alloc(field.size - TX_EFFECT_PREFIX_BYTE_LENGTH))
|
|
98
|
+
) {
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
const sliced = buf.subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
|
|
102
|
+
if (
|
|
103
|
+
// Checking we start with the correct prefix...
|
|
104
|
+
!new Fr(sliced.subarray(0, TX_START_PREFIX_BYTES_LENGTH)).equals(new Fr(TX_START_PREFIX)) ||
|
|
105
|
+
// ...and include the revert code prefix..
|
|
106
|
+
sliced[sliced.length - 3] !== REVERT_CODE_PREFIX ||
|
|
107
|
+
// ...and the following revert code is valid.
|
|
108
|
+
sliced[sliced.length - 1] > 4
|
|
109
|
+
) {
|
|
110
|
+
return false;
|
|
111
|
+
}
|
|
112
|
+
return true;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Extract the fields from a blob buffer, but do not take into account encoding
|
|
117
|
+
* that will include trailing zeros.
|
|
118
|
+
*
|
|
119
|
+
* +------------------+------------------+------------------+------------------+
|
|
120
|
+
* | | | | Padded zeros |
|
|
121
|
+
* | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] |
|
|
122
|
+
* +------------------+------------------+------------------+------------------+
|
|
123
|
+
* ^
|
|
124
|
+
* |
|
|
125
|
+
* Function reads until here ----------------------
|
|
126
|
+
*/
|
|
127
|
+
export function extractBlobFieldsFromBuffer(blob: BlobBuffer): Fr[] {
|
|
128
|
+
const reader = BufferReader.asReader(blob);
|
|
129
|
+
const array = reader.readArray(blob.length >> 5, Fr);
|
|
130
|
+
|
|
131
|
+
// Find the index of the last non-zero field
|
|
132
|
+
let lastNonZeroIndex = array.length - 1;
|
|
133
|
+
while (lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()) {
|
|
134
|
+
lastNonZeroIndex--;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// Return the trimmed array
|
|
138
|
+
return array.slice(0, lastNonZeroIndex + 1);
|
|
139
|
+
}
|
package/src/errors.ts
ADDED
package/src/index.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import cKzg from 'c-kzg';
|
|
2
|
+
|
|
3
|
+
/* eslint-disable import/no-named-as-default-member */
|
|
4
|
+
const { loadTrustedSetup } = cKzg;
|
|
5
|
+
|
|
6
|
+
export * from './blob.js';
|
|
7
|
+
export * from './mocks.js';
|
|
8
|
+
export * from './encoding.js';
|
|
9
|
+
export * from './interface.js';
|
|
10
|
+
export * from './errors.js';
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
loadTrustedSetup();
|
|
14
|
+
} catch (error: any) {
|
|
15
|
+
if (error.message.includes('trusted setup is already loaded')) {
|
|
16
|
+
// NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
|
|
17
|
+
// and it throws an error if it's already loaded, even though nothing is wrong.
|
|
18
|
+
// This is a rudimentary way of ensuring we load the trusted setup if we need it.
|
|
19
|
+
} else {
|
|
20
|
+
throw new Error(error);
|
|
21
|
+
}
|
|
22
|
+
}
|
package/src/interface.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* The relevant parts of a response from https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
|
|
3
|
+
*/
|
|
4
|
+
export interface BlobJson {
|
|
5
|
+
blob: string;
|
|
6
|
+
index?: number;
|
|
7
|
+
// eslint-disable-next-line camelcase
|
|
8
|
+
kzg_commitment: string;
|
|
9
|
+
// eslint-disable-next-line camelcase
|
|
10
|
+
kzg_proof: string;
|
|
11
|
+
}
|
package/src/mocks.ts
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
2
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
+
|
|
4
|
+
import { Blob } from './blob.js';
|
|
5
|
+
import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
|
|
6
|
+
|
|
7
|
+
// TODO: copied form circuit-types tx effect
|
|
8
|
+
function encodeFirstField(length: number): Fr {
|
|
9
|
+
const lengthBuf = Buffer.alloc(2);
|
|
10
|
+
lengthBuf.writeUInt16BE(length, 0);
|
|
11
|
+
return new Fr(
|
|
12
|
+
Buffer.concat([
|
|
13
|
+
toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
|
|
14
|
+
Buffer.alloc(1),
|
|
15
|
+
lengthBuf,
|
|
16
|
+
Buffer.alloc(1),
|
|
17
|
+
Buffer.from([1]),
|
|
18
|
+
Buffer.alloc(1),
|
|
19
|
+
Buffer.alloc(1),
|
|
20
|
+
]),
|
|
21
|
+
);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Make an encoded blob with the given length
|
|
26
|
+
*
|
|
27
|
+
* This will deserialise correctly in the archiver
|
|
28
|
+
* @param length
|
|
29
|
+
* @returns
|
|
30
|
+
*/
|
|
31
|
+
export function makeEncodedBlob(length: number): Promise<Blob> {
|
|
32
|
+
return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Make an unencoded blob with the given length
|
|
37
|
+
*
|
|
38
|
+
* This will fail deserialisation in the archiver
|
|
39
|
+
* @param length
|
|
40
|
+
* @returns
|
|
41
|
+
*/
|
|
42
|
+
export function makeUnencodedBlob(length: number): Promise<Blob> {
|
|
43
|
+
return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function makeEncodedBlobFields(fields: Fr[]): Promise<Blob> {
|
|
47
|
+
return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
|
|
48
|
+
}
|