@aztec/blob-lib 3.0.0-nightly.20251121 → 3.0.0-nightly.20251122
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob_batching.d.ts +6 -6
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +24 -20
- package/dest/blob_utils.d.ts +0 -1
- package/dest/blob_utils.d.ts.map +1 -1
- package/dest/blob_utils.js +1 -9
- package/dest/encoding/block_end_marker.js +1 -1
- package/dest/encoding/block_end_state_field.js +1 -1
- package/dest/encoding/checkpoint_blob_data.d.ts +4 -2
- package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -1
- package/dest/encoding/checkpoint_blob_data.js +42 -29
- package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
- package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
- package/dest/encoding/checkpoint_end_marker.js +28 -0
- package/dest/encoding/fixtures.js +5 -3
- package/dest/encoding/index.d.ts +1 -0
- package/dest/encoding/index.d.ts.map +1 -1
- package/dest/encoding/index.js +1 -0
- package/dest/encoding/tx_start_marker.js +1 -1
- package/dest/hash.d.ts +10 -3
- package/dest/hash.d.ts.map +1 -1
- package/dest/hash.js +14 -4
- package/dest/sponge_blob.d.ts +7 -11
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +19 -34
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +1 -1
- package/package.json +3 -3
- package/src/blob_batching.ts +25 -20
- package/src/blob_utils.ts +1 -11
- package/src/encoding/block_blob_data.ts +1 -1
- package/src/encoding/block_end_marker.ts +1 -1
- package/src/encoding/block_end_state_field.ts +1 -1
- package/src/encoding/checkpoint_blob_data.ts +54 -34
- package/src/encoding/checkpoint_end_marker.ts +40 -0
- package/src/encoding/fixtures.ts +3 -3
- package/src/encoding/index.ts +1 -0
- package/src/encoding/tx_blob_data.ts +1 -1
- package/src/encoding/tx_start_marker.ts +1 -1
- package/src/hash.ts +14 -4
- package/src/sponge_blob.ts +21 -34
- package/src/testing.ts +0 -1
package/dest/sponge_blob.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { TWO_POW_64 } from '@aztec/constants';
|
|
1
|
+
import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, TWO_POW_64 } from '@aztec/constants';
|
|
2
2
|
import { makeTuple } from '@aztec/foundation/array';
|
|
3
3
|
import { poseidon2Permutation } from '@aztec/foundation/crypto';
|
|
4
4
|
import { Fr } from '@aztec/foundation/fields';
|
|
@@ -9,15 +9,22 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
9
9
|
*/ export class SpongeBlob {
|
|
10
10
|
sponge;
|
|
11
11
|
numAbsorbedFields;
|
|
12
|
-
|
|
13
|
-
constructor(/** Sponge with absorbed fields that will go into one or more blobs. */ sponge, /** Number of effects absorbed so far. */ numAbsorbedFields
|
|
12
|
+
static MAX_FIELDS = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB;
|
|
13
|
+
constructor(/** Sponge with absorbed fields that will go into one or more blobs. */ sponge, /** Number of effects absorbed so far. */ numAbsorbedFields){
|
|
14
14
|
this.sponge = sponge;
|
|
15
15
|
this.numAbsorbedFields = numAbsorbedFields;
|
|
16
|
-
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Initialize the sponge blob to absorb data for a checkpoint.
|
|
19
|
+
*/ static init() {
|
|
20
|
+
// This must match the implementation in noir-projects/noir-protocol-circuits/types/src/abis/sponge_blob.nr
|
|
21
|
+
const iv = new Fr(BigInt(SpongeBlob.MAX_FIELDS) * TWO_POW_64);
|
|
22
|
+
const sponge = Poseidon2Sponge.init(iv);
|
|
23
|
+
return new SpongeBlob(sponge, 0);
|
|
17
24
|
}
|
|
18
25
|
static fromBuffer(buffer) {
|
|
19
26
|
const reader = BufferReader.asReader(buffer);
|
|
20
|
-
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber()
|
|
27
|
+
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber());
|
|
21
28
|
}
|
|
22
29
|
toBuffer() {
|
|
23
30
|
return serializeToBuffer(...SpongeBlob.getFields(this));
|
|
@@ -25,8 +32,7 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
25
32
|
static getFields(fields) {
|
|
26
33
|
return [
|
|
27
34
|
fields.sponge,
|
|
28
|
-
fields.numAbsorbedFields
|
|
29
|
-
fields.numExpectedFields
|
|
35
|
+
fields.numAbsorbedFields
|
|
30
36
|
];
|
|
31
37
|
}
|
|
32
38
|
toFields() {
|
|
@@ -34,43 +40,23 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
34
40
|
}
|
|
35
41
|
static fromFields(fields) {
|
|
36
42
|
const reader = FieldReader.asReader(fields);
|
|
37
|
-
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readField().toNumber()
|
|
43
|
+
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readField().toNumber());
|
|
38
44
|
}
|
|
39
45
|
clone() {
|
|
40
46
|
return SpongeBlob.fromBuffer(this.toBuffer());
|
|
41
47
|
}
|
|
42
48
|
async absorb(fields) {
|
|
43
|
-
if (this.numAbsorbedFields + fields.length >
|
|
44
|
-
throw new Error(`Attempted to fill spongeBlob with ${this.numAbsorbedFields + fields.length}, but it has a max of ${
|
|
49
|
+
if (this.numAbsorbedFields + fields.length > SpongeBlob.MAX_FIELDS) {
|
|
50
|
+
throw new Error(`Attempted to fill spongeBlob with ${this.numAbsorbedFields + fields.length}, but it has a max of ${SpongeBlob.MAX_FIELDS}`);
|
|
45
51
|
}
|
|
46
52
|
await this.sponge.absorb(fields);
|
|
47
53
|
this.numAbsorbedFields += fields.length;
|
|
48
54
|
}
|
|
49
55
|
async squeeze() {
|
|
50
|
-
|
|
51
|
-
// NB: There is currently no use case in which we don't 'fill' a blob sponge, but adding for completeness
|
|
52
|
-
if (this.numAbsorbedFields != this.numExpectedFields) {
|
|
53
|
-
await this.sponge.absorb([
|
|
54
|
-
Fr.ONE
|
|
55
|
-
]);
|
|
56
|
-
}
|
|
57
|
-
return this.sponge.squeeze();
|
|
56
|
+
return await this.sponge.squeeze();
|
|
58
57
|
}
|
|
59
58
|
static empty() {
|
|
60
|
-
return new SpongeBlob(Poseidon2Sponge.empty(), 0
|
|
61
|
-
}
|
|
62
|
-
/**
|
|
63
|
-
* Initialize the sponge blob with the number of expected fields in the checkpoint and absorb it as the first field.
|
|
64
|
-
* Note: `numExpectedFields` includes the first field absorbed in this method.
|
|
65
|
-
*/ static async init(numExpectedFields) {
|
|
66
|
-
// This must match what the checkpoint root rollup circuit expects.
|
|
67
|
-
// See noir-projects/noir-protocol-circuits/types/src/abis/sponge_blob.nr -> init_for_checkpoint.
|
|
68
|
-
const sponge = Poseidon2Sponge.init(numExpectedFields);
|
|
69
|
-
await sponge.absorb([
|
|
70
|
-
new Fr(numExpectedFields)
|
|
71
|
-
]);
|
|
72
|
-
const numAbsorbedFields = 1;
|
|
73
|
-
return new SpongeBlob(sponge, numAbsorbedFields, numExpectedFields);
|
|
59
|
+
return new SpongeBlob(Poseidon2Sponge.empty(), 0);
|
|
74
60
|
}
|
|
75
61
|
}
|
|
76
62
|
// This is just noir's stdlib version of the poseidon2 sponge. We use it for a blob-specific implmentation of the hasher.
|
|
@@ -110,8 +96,7 @@ export class Poseidon2Sponge {
|
|
|
110
96
|
static empty() {
|
|
111
97
|
return new Poseidon2Sponge(makeTuple(3, ()=>Fr.ZERO), makeTuple(4, ()=>Fr.ZERO), 0, false);
|
|
112
98
|
}
|
|
113
|
-
static init(
|
|
114
|
-
const iv = new Fr(numExpectedFields).mul(new Fr(TWO_POW_64));
|
|
99
|
+
static init(iv) {
|
|
115
100
|
const sponge = Poseidon2Sponge.empty();
|
|
116
101
|
sponge.state[3] = iv;
|
|
117
102
|
return sponge;
|
package/dest/testing.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"testing.d.ts","sourceRoot":"","sources":["../src/testing.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAE5D,OAAO,EAAmB,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAE/D,cAAc,wBAAwB,CAAC;AAEvC;;;;;GAKG;AACH,wBAAgB,cAAc,CAAC,IAAI,SAAI,GAAG,UAAU,
|
|
1
|
+
{"version":3,"file":"testing.d.ts","sourceRoot":"","sources":["../src/testing.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAE5D,OAAO,EAAmB,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAE/D,cAAc,wBAAwB,CAAC;AAEvC;;;;;GAKG;AACH,wBAAgB,cAAc,CAAC,IAAI,SAAI,GAAG,UAAU,CAUnD;AAED;;;;;GAKG;AACH,wBAAgB,0BAA0B,CAAC,IAAI,SAAI,GAAG,sBAAsB,CAW3E;AAED;;;;;;GAMG;AACH,wBAAgB,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAEnD"}
|
package/dest/testing.js
CHANGED
|
@@ -11,7 +11,7 @@ export * from './encoding/fixtures.js';
|
|
|
11
11
|
* @param seed - The seed to use for generating the sponge.
|
|
12
12
|
* @returns A sponge blob instance.
|
|
13
13
|
*/ export function makeSpongeBlob(seed = 1) {
|
|
14
|
-
return new SpongeBlob(new Poseidon2Sponge(makeTuple(3, (i)=>new Fr(i)), makeTuple(4, (i)=>new Fr(i)), 1, false), seed
|
|
14
|
+
return new SpongeBlob(new Poseidon2Sponge(makeTuple(3, (i)=>new Fr(i)), makeTuple(4, (i)=>new Fr(i)), 1, false), seed);
|
|
15
15
|
}
|
|
16
16
|
/**
|
|
17
17
|
* Makes arbitrary blob public accumulator.
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aztec/blob-lib",
|
|
3
|
-
"version": "3.0.0-nightly.
|
|
3
|
+
"version": "3.0.0-nightly.20251122",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": "./dest/index.js",
|
|
@@ -27,8 +27,8 @@
|
|
|
27
27
|
"../package.common.json"
|
|
28
28
|
],
|
|
29
29
|
"dependencies": {
|
|
30
|
-
"@aztec/constants": "3.0.0-nightly.
|
|
31
|
-
"@aztec/foundation": "3.0.0-nightly.
|
|
30
|
+
"@aztec/constants": "3.0.0-nightly.20251122",
|
|
31
|
+
"@aztec/foundation": "3.0.0-nightly.20251122",
|
|
32
32
|
"@crate-crypto/node-eth-kzg": "^0.10.0",
|
|
33
33
|
"tslib": "^2.4.0"
|
|
34
34
|
},
|
package/src/blob_batching.ts
CHANGED
|
@@ -3,9 +3,9 @@ import { poseidon2Hash, sha256ToField } from '@aztec/foundation/crypto';
|
|
|
3
3
|
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
4
4
|
|
|
5
5
|
import { Blob } from './blob.js';
|
|
6
|
-
import {
|
|
6
|
+
import { getBlobsPerL1Block } from './blob_utils.js';
|
|
7
7
|
import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
|
|
8
|
-
import { computeEthVersionedBlobHash, hashNoirBigNumLimbs } from './hash.js';
|
|
8
|
+
import { computeBlobFieldsHash, computeEthVersionedBlobHash, hashNoirBigNumLimbs } from './hash.js';
|
|
9
9
|
import { kzg } from './kzg_context.js';
|
|
10
10
|
|
|
11
11
|
/**
|
|
@@ -32,18 +32,19 @@ export class BatchedBlob {
|
|
|
32
32
|
*
|
|
33
33
|
* @returns A batched blob.
|
|
34
34
|
*/
|
|
35
|
-
static async batch(
|
|
36
|
-
|
|
35
|
+
static async batch(blobFieldsPerCheckpoint: Fr[][]): Promise<BatchedBlob> {
|
|
36
|
+
const numCheckpoints = blobFieldsPerCheckpoint.length;
|
|
37
|
+
if (numCheckpoints > AZTEC_MAX_EPOCH_DURATION) {
|
|
37
38
|
throw new Error(
|
|
38
|
-
`Too many
|
|
39
|
+
`Too many checkpoints sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${numCheckpoints}.`,
|
|
39
40
|
);
|
|
40
41
|
}
|
|
41
42
|
|
|
42
43
|
// Precalculate the values (z and gamma) and initialize the accumulator:
|
|
43
|
-
let acc = await this.newAccumulator(
|
|
44
|
+
let acc = await this.newAccumulator(blobFieldsPerCheckpoint);
|
|
44
45
|
// Now we can create a multi opening proof of all input blobs:
|
|
45
|
-
for (const
|
|
46
|
-
acc = await acc.
|
|
46
|
+
for (const blobFields of blobFieldsPerCheckpoint) {
|
|
47
|
+
acc = await acc.accumulateFields(blobFields);
|
|
47
48
|
}
|
|
48
49
|
return await acc.finalize();
|
|
49
50
|
}
|
|
@@ -53,8 +54,8 @@ export class BatchedBlob {
|
|
|
53
54
|
* @dev MUST input all blobs to be broadcast. Does not work in multiple calls because z and gamma are calculated
|
|
54
55
|
* beforehand from ALL blobs.
|
|
55
56
|
*/
|
|
56
|
-
static async newAccumulator(
|
|
57
|
-
const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(
|
|
57
|
+
static async newAccumulator(blobFieldsPerCheckpoint: Fr[][]): Promise<BatchedBlobAccumulator> {
|
|
58
|
+
const finalBlobChallenges = await this.precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint);
|
|
58
59
|
return BatchedBlobAccumulator.newWithChallenges(finalBlobChallenges);
|
|
59
60
|
}
|
|
60
61
|
|
|
@@ -70,13 +71,15 @@ export class BatchedBlob {
|
|
|
70
71
|
* @param blobs - The blobs to precompute the challenges for. Each sub-array is the blobs for an L1 block.
|
|
71
72
|
* @returns Challenges z and gamma.
|
|
72
73
|
*/
|
|
73
|
-
static async precomputeBatchedBlobChallenges(
|
|
74
|
+
static async precomputeBatchedBlobChallenges(blobFieldsPerCheckpoint: Fr[][]): Promise<FinalBlobBatchingChallenges> {
|
|
74
75
|
// Compute the final challenge z to evaluate the blobs.
|
|
75
76
|
let z: Fr | undefined;
|
|
76
|
-
|
|
77
|
+
const allBlobs = [];
|
|
78
|
+
for (const blobFields of blobFieldsPerCheckpoint) {
|
|
77
79
|
// Compute the hash of all the fields in the block.
|
|
78
|
-
const blobFieldsHash = await
|
|
79
|
-
|
|
80
|
+
const blobFieldsHash = await computeBlobFieldsHash(blobFields);
|
|
81
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
82
|
+
for (const blob of blobs) {
|
|
80
83
|
// Compute the challenge z for each blob and accumulate it.
|
|
81
84
|
const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
|
|
82
85
|
if (!z) {
|
|
@@ -85,13 +88,13 @@ export class BatchedBlob {
|
|
|
85
88
|
z = await poseidon2Hash([z, challengeZ]);
|
|
86
89
|
}
|
|
87
90
|
}
|
|
91
|
+
allBlobs.push(...blobs);
|
|
88
92
|
}
|
|
89
93
|
if (!z) {
|
|
90
94
|
throw new Error('No blobs to precompute challenges for.');
|
|
91
95
|
}
|
|
92
96
|
|
|
93
97
|
// Now we have a shared challenge for all blobs, evaluate them...
|
|
94
|
-
const allBlobs = blobs.flat();
|
|
95
98
|
const proofObjects = allBlobs.map(b => b.evaluate(z));
|
|
96
99
|
const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y)));
|
|
97
100
|
// ...and find the challenge for the linear combination of blobs.
|
|
@@ -190,7 +193,7 @@ export class BatchedBlobAccumulator {
|
|
|
190
193
|
* We assume the input blob has not been evaluated at z.
|
|
191
194
|
* @returns An updated blob accumulator.
|
|
192
195
|
*/
|
|
193
|
-
|
|
196
|
+
async accumulateBlob(blob: Blob, blobFieldsHash: Fr) {
|
|
194
197
|
const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
|
|
195
198
|
const thisC = BLS12Point.decompress(blob.commitment);
|
|
196
199
|
const thisQ = BLS12Point.decompress(proof);
|
|
@@ -234,10 +237,12 @@ export class BatchedBlobAccumulator {
|
|
|
234
237
|
/**
|
|
235
238
|
* Given blobs, accumulate all state.
|
|
236
239
|
* We assume the input blobs have not been evaluated at z.
|
|
237
|
-
* @param
|
|
240
|
+
* @param blobFields - The blob fields of a checkpoint to accumulate.
|
|
238
241
|
* @returns An updated blob accumulator.
|
|
239
242
|
*/
|
|
240
|
-
async
|
|
243
|
+
async accumulateFields(blobFields: Fr[]) {
|
|
244
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
245
|
+
|
|
241
246
|
if (blobs.length > BLOBS_PER_CHECKPOINT) {
|
|
242
247
|
throw new Error(
|
|
243
248
|
`Too many blobs to accumulate. The maximum is ${BLOBS_PER_CHECKPOINT} per checkpoint. Got ${blobs.length}.`,
|
|
@@ -245,12 +250,12 @@ export class BatchedBlobAccumulator {
|
|
|
245
250
|
}
|
|
246
251
|
|
|
247
252
|
// Compute the hash of all the fields in the block.
|
|
248
|
-
const blobFieldsHash = await
|
|
253
|
+
const blobFieldsHash = await computeBlobFieldsHash(blobFields);
|
|
249
254
|
|
|
250
255
|
// Initialize the acc to iterate over:
|
|
251
256
|
let acc: BatchedBlobAccumulator = this.clone();
|
|
252
257
|
for (const blob of blobs) {
|
|
253
|
-
acc = await acc.
|
|
258
|
+
acc = await acc.accumulateBlob(blob, blobFieldsHash);
|
|
254
259
|
}
|
|
255
260
|
return acc;
|
|
256
261
|
}
|
package/src/blob_utils.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
|
3
3
|
|
|
4
4
|
import { Blob } from './blob.js';
|
|
5
5
|
import { type CheckpointBlobData, decodeCheckpointBlobDataFromBuffer } from './encoding/index.js';
|
|
6
|
-
import {
|
|
6
|
+
import { computeBlobsHash } from './hash.js';
|
|
7
7
|
|
|
8
8
|
/**
|
|
9
9
|
* @param blobs - The blobs to emit.
|
|
@@ -49,16 +49,6 @@ export function decodeCheckpointBlobDataFromBlobs(blobs: Blob[]): CheckpointBlob
|
|
|
49
49
|
return decodeCheckpointBlobDataFromBuffer(buf);
|
|
50
50
|
}
|
|
51
51
|
|
|
52
|
-
export async function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr> {
|
|
53
|
-
const fields = blobs.map(b => b.toFields()).flat();
|
|
54
|
-
const numBlobFields = fields[0].toNumber();
|
|
55
|
-
if (numBlobFields > fields.length) {
|
|
56
|
-
throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
|
|
60
|
-
}
|
|
61
|
-
|
|
62
52
|
export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
|
|
63
53
|
return computeBlobsHash(blobs.map(b => b.getEthVersionedBlobHash()));
|
|
64
54
|
}
|
|
@@ -15,7 +15,7 @@ import {
|
|
|
15
15
|
} from './block_end_state_field.js';
|
|
16
16
|
import { type TxBlobData, decodeTxBlobData, encodeTxBlobData } from './tx_blob_data.js';
|
|
17
17
|
|
|
18
|
-
// Must match the implementation in `noir-protocol-circuits/crates/
|
|
18
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
19
19
|
|
|
20
20
|
export interface BlockEndBlobData {
|
|
21
21
|
blockEndMarker: BlockEndMarker;
|
|
@@ -3,7 +3,7 @@ import { Fr } from '@aztec/foundation/fields';
|
|
|
3
3
|
|
|
4
4
|
import { BlobDeserializationError } from '../errors.js';
|
|
5
5
|
|
|
6
|
-
// Must match the implementation in `noir-protocol-circuits/crates/
|
|
6
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
7
7
|
|
|
8
8
|
const BLOCK_NUMBER_BIT_SIZE = 32n;
|
|
9
9
|
const TIMESTAMP_BIT_SIZE = 64n;
|
|
@@ -8,7 +8,7 @@ import { Fr } from '@aztec/foundation/fields';
|
|
|
8
8
|
|
|
9
9
|
import { BlobDeserializationError } from '../errors.js';
|
|
10
10
|
|
|
11
|
-
// Must match the implementation in `noir-protocol-circuits/crates/
|
|
11
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
|
|
12
12
|
|
|
13
13
|
export const TOTAL_MANA_USED_BIT_SIZE = 48n;
|
|
14
14
|
|
|
@@ -3,20 +3,32 @@ import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
|
3
3
|
|
|
4
4
|
import { BlobDeserializationError } from '../errors.js';
|
|
5
5
|
import { type BlockBlobData, decodeBlockBlobData, encodeBlockBlobData } from './block_blob_data.js';
|
|
6
|
+
import {
|
|
7
|
+
type CheckpointEndMarker,
|
|
8
|
+
decodeCheckpointEndMarker,
|
|
9
|
+
encodeCheckpointEndMarker,
|
|
10
|
+
isCheckpointEndMarker,
|
|
11
|
+
} from './checkpoint_end_marker.js';
|
|
6
12
|
import type { TxStartMarker } from './tx_start_marker.js';
|
|
7
13
|
|
|
8
14
|
export interface CheckpointBlobData {
|
|
9
|
-
|
|
15
|
+
checkpointEndMarker: CheckpointEndMarker;
|
|
10
16
|
blocks: BlockBlobData[];
|
|
11
17
|
}
|
|
12
18
|
|
|
13
19
|
export function encodeCheckpointBlobData(checkpointBlobData: CheckpointBlobData): Fr[] {
|
|
14
20
|
return [
|
|
15
|
-
new Fr(checkpointBlobData.totalNumBlobFields),
|
|
16
21
|
...checkpointBlobData.blocks.map(block => encodeBlockBlobData(block)).flat(),
|
|
22
|
+
encodeCheckpointEndMarker(checkpointBlobData.checkpointEndMarker),
|
|
17
23
|
];
|
|
18
24
|
}
|
|
19
25
|
|
|
26
|
+
export function encodeCheckpointBlobDataFromBlocks(blocks: BlockBlobData[]): Fr[] {
|
|
27
|
+
const blocksBlobFields = blocks.map(block => encodeBlockBlobData(block)).flat();
|
|
28
|
+
const numBlobFields = blocksBlobFields.length + 1; // +1 for the checkpoint end marker.
|
|
29
|
+
return blocksBlobFields.concat(encodeCheckpointEndMarker({ numBlobFields }));
|
|
30
|
+
}
|
|
31
|
+
|
|
20
32
|
export function decodeCheckpointBlobData(fields: Fr[] | FieldReader): CheckpointBlobData {
|
|
21
33
|
const reader = FieldReader.asReader(fields);
|
|
22
34
|
|
|
@@ -24,52 +36,60 @@ export function decodeCheckpointBlobData(fields: Fr[] | FieldReader): Checkpoint
|
|
|
24
36
|
throw new BlobDeserializationError(`Cannot decode empty blob data.`);
|
|
25
37
|
}
|
|
26
38
|
|
|
27
|
-
const
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
//
|
|
39
|
+
const blocks = [];
|
|
40
|
+
let checkpointEndMarker: CheckpointEndMarker | undefined;
|
|
41
|
+
while (!reader.isFinished() && !checkpointEndMarker) {
|
|
42
|
+
blocks.push(decodeBlockBlobData(reader, blocks.length === 0 /* isFirstBlock */));
|
|
43
|
+
|
|
44
|
+
// After reading a block, the next item must be either a checkpoint end marker or another block.
|
|
45
|
+
// The first field of a block is always a tx start marker. So if the provided fields are valid, it's not possible to
|
|
46
|
+
// misinterpret a tx start marker as checkpoint end marker, or vice versa.
|
|
47
|
+
const nextField = reader.peekField();
|
|
48
|
+
if (isCheckpointEndMarker(nextField)) {
|
|
49
|
+
checkpointEndMarker = decodeCheckpointEndMarker(reader.readField());
|
|
50
|
+
const numFieldsRead = reader.cursor;
|
|
51
|
+
if (numFieldsRead !== checkpointEndMarker.numBlobFields) {
|
|
52
|
+
throw new BlobDeserializationError(
|
|
53
|
+
`Incorrect encoding of blob fields: mismatch number of blob fields. Expected ${checkpointEndMarker.numBlobFields} fields, got ${numFieldsRead}.`,
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!checkpointEndMarker) {
|
|
60
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: checkpoint end marker does not exist.`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const remainingFields = reader.readFieldArray(reader.remainingFields());
|
|
64
|
+
if (!remainingFields.every(f => f.isZero())) {
|
|
33
65
|
throw new BlobDeserializationError(
|
|
34
|
-
`Incorrect encoding of blob fields:
|
|
66
|
+
`Incorrect encoding of blob fields: unexpected non-zero field after checkpoint end marker.`,
|
|
35
67
|
);
|
|
36
68
|
}
|
|
37
69
|
|
|
38
|
-
const blocks = [];
|
|
39
|
-
while (reader.cursor < totalNumBlobFields) {
|
|
40
|
-
blocks.push(decodeBlockBlobData(reader, blocks.length === 0 /* isFirstBlock */));
|
|
41
|
-
}
|
|
42
70
|
return {
|
|
43
|
-
|
|
71
|
+
checkpointEndMarker,
|
|
44
72
|
blocks,
|
|
45
73
|
};
|
|
46
74
|
}
|
|
47
75
|
|
|
48
76
|
export function decodeCheckpointBlobDataFromBuffer(buf: Buffer): CheckpointBlobData {
|
|
49
77
|
const reader = BufferReader.asReader(buf);
|
|
50
|
-
const
|
|
51
|
-
|
|
52
|
-
// Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error if the first field is
|
|
53
|
-
// larger than a javascript integer.
|
|
54
|
-
const numFields = firstField.toBigInt();
|
|
55
|
-
const totalFieldsInBuffer = BigInt(buf.length / Fr.SIZE_IN_BYTES);
|
|
56
|
-
if (numFields > totalFieldsInBuffer) {
|
|
57
|
-
throw new BlobDeserializationError(
|
|
58
|
-
`Failed to deserialize blob buffer: not enough fields for checkpoint blob data. Expected ${numFields} fields, got ${totalFieldsInBuffer}.`,
|
|
59
|
-
);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const numFieldsWithoutPrefix = Number(numFields) - 1;
|
|
63
|
-
const blobFields = [firstField].concat(reader.readArray(numFieldsWithoutPrefix, Fr));
|
|
64
|
-
|
|
78
|
+
const totalFieldsInBuffer = Math.floor(buf.length / Fr.SIZE_IN_BYTES);
|
|
79
|
+
const blobFields = reader.readArray(totalFieldsInBuffer, Fr);
|
|
65
80
|
return decodeCheckpointBlobData(blobFields);
|
|
66
81
|
}
|
|
67
82
|
|
|
68
|
-
export function getTotalNumBlobFieldsFromTxs(
|
|
83
|
+
export function getTotalNumBlobFieldsFromTxs(txsPerBlock: TxStartMarker[][]): number {
|
|
84
|
+
const numBlocks = txsPerBlock.length;
|
|
85
|
+
if (!numBlocks) {
|
|
86
|
+
return 0;
|
|
87
|
+
}
|
|
88
|
+
|
|
69
89
|
return (
|
|
70
|
-
1 + //
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
90
|
+
(numBlocks ? 1 : 0) + // l1ToL2Messages root in the first block
|
|
91
|
+
numBlocks * 6 + // 6 fields for each block end blob data.
|
|
92
|
+
txsPerBlock.reduce((total, txs) => total + txs.reduce((total, tx) => total + tx.numBlobFields, 0), 0) +
|
|
93
|
+
1 // checkpoint end marker
|
|
74
94
|
);
|
|
75
95
|
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { CHECKPOINT_END_PREFIX } from '@aztec/constants';
|
|
2
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
+
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
|
|
6
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/checkpoint_blob_data.nr`.
|
|
7
|
+
|
|
8
|
+
const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
|
|
9
|
+
|
|
10
|
+
export interface CheckpointEndMarker {
|
|
11
|
+
numBlobFields: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function encodeCheckpointEndMarker(checkpointEndMarker: CheckpointEndMarker) {
|
|
15
|
+
let value = CHECKPOINT_END_PREFIX;
|
|
16
|
+
value <<= NUM_BLOB_FIELDS_BIT_SIZE;
|
|
17
|
+
value += BigInt(checkpointEndMarker.numBlobFields);
|
|
18
|
+
return new Fr(value);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function decodeCheckpointEndMarker(field: Fr): CheckpointEndMarker {
|
|
22
|
+
let value = field.toBigInt();
|
|
23
|
+
const numBlobFields = Number(value & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
|
|
24
|
+
value >>= NUM_BLOB_FIELDS_BIT_SIZE;
|
|
25
|
+
|
|
26
|
+
const prefix = value;
|
|
27
|
+
if (prefix !== CHECKPOINT_END_PREFIX) {
|
|
28
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid checkpoint end marker.`);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return {
|
|
32
|
+
numBlobFields,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Check if a field is a checkpoint end marker. Used to check if it has reached the end of the blob fields.
|
|
37
|
+
export function isCheckpointEndMarker(field: Fr): boolean {
|
|
38
|
+
const prefix = field.toBigInt() >> NUM_BLOB_FIELDS_BIT_SIZE;
|
|
39
|
+
return prefix === CHECKPOINT_END_PREFIX;
|
|
40
|
+
}
|
package/src/encoding/fixtures.ts
CHANGED
|
@@ -198,12 +198,12 @@ export function makeCheckpointBlobData({
|
|
|
198
198
|
seed,
|
|
199
199
|
);
|
|
200
200
|
|
|
201
|
-
const
|
|
202
|
-
overrides.
|
|
201
|
+
const numBlobFields =
|
|
202
|
+
overrides.checkpointEndMarker?.numBlobFields ??
|
|
203
203
|
getTotalNumBlobFieldsFromTxs(blocks.map(block => block.txs.map(tx => tx.txStartMarker)));
|
|
204
204
|
|
|
205
205
|
return {
|
|
206
|
-
totalNumBlobFields,
|
|
207
206
|
blocks,
|
|
207
|
+
checkpointEndMarker: { numBlobFields },
|
|
208
208
|
};
|
|
209
209
|
}
|
package/src/encoding/index.ts
CHANGED
|
@@ -2,6 +2,7 @@ export * from './block_blob_data.js';
|
|
|
2
2
|
export * from './block_end_marker.js';
|
|
3
3
|
export * from './block_end_state_field.js';
|
|
4
4
|
export * from './checkpoint_blob_data.js';
|
|
5
|
+
export * from './checkpoint_end_marker.js';
|
|
5
6
|
export * from './fixtures.js';
|
|
6
7
|
export * from './tx_blob_data.js';
|
|
7
8
|
export * from './tx_start_marker.js';
|
|
@@ -5,7 +5,7 @@ import { FieldReader } from '@aztec/foundation/serialize';
|
|
|
5
5
|
import { BlobDeserializationError } from '../errors.js';
|
|
6
6
|
import { type TxStartMarker, decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
|
|
7
7
|
|
|
8
|
-
// Must match the implementation in noir-protocol-circuits/crates/
|
|
8
|
+
// Must match the implementation in noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr.
|
|
9
9
|
|
|
10
10
|
export interface TxBlobData {
|
|
11
11
|
txStartMarker: TxStartMarker;
|
|
@@ -3,7 +3,7 @@ import { Fr } from '@aztec/foundation/fields';
|
|
|
3
3
|
|
|
4
4
|
import { BlobDeserializationError } from '../errors.js';
|
|
5
5
|
|
|
6
|
-
// Must match the implementation in `noir-protocol-circuits/crates/
|
|
6
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr`.
|
|
7
7
|
|
|
8
8
|
const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
|
|
9
9
|
const REVERT_CODE_BIT_SIZE = 8n;
|
package/src/hash.ts
CHANGED
|
@@ -2,6 +2,7 @@ import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
|
|
|
2
2
|
import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
3
3
|
|
|
4
4
|
import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
|
|
5
|
+
import { SpongeBlob } from './sponge_blob.js';
|
|
5
6
|
|
|
6
7
|
const VERSIONED_HASH_VERSION_KZG = 0x01;
|
|
7
8
|
|
|
@@ -24,12 +25,21 @@ export function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr {
|
|
|
24
25
|
}
|
|
25
26
|
|
|
26
27
|
/**
|
|
27
|
-
*
|
|
28
|
-
*
|
|
29
|
-
* This
|
|
28
|
+
* Computes a non-standard Poseidon2 hash over the provided fields.
|
|
29
|
+
*
|
|
30
|
+
* This function is used to compute:
|
|
31
|
+
* - `blobFieldsHash` of a checkpoint:
|
|
32
|
+
* Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
|
|
33
|
+
* The exact number of fields is encoded in the checkpoint end marker (the last field).
|
|
34
|
+
* This hash is used when generating the challenge `z` for all blobs in the checkpoint.
|
|
35
|
+
* - `spongeBlobHash` of a block:
|
|
36
|
+
* Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
|
|
37
|
+
* This hash is included in the block header.
|
|
30
38
|
*/
|
|
31
39
|
export async function computeBlobFieldsHash(fields: Fr[]): Promise<Fr> {
|
|
32
|
-
|
|
40
|
+
const sponge = SpongeBlob.init();
|
|
41
|
+
await sponge.absorb(fields);
|
|
42
|
+
return sponge.squeeze();
|
|
33
43
|
}
|
|
34
44
|
|
|
35
45
|
export function computeBlobCommitment(data: Uint8Array): Buffer {
|