@aztec/blob-lib 3.0.0-nightly.20251025 → 3.0.0-nightly.20251030-2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob.d.ts +47 -89
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +62 -160
- package/dest/blob_batching.d.ts +14 -46
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +80 -100
- package/dest/blob_utils.d.ts +30 -0
- package/dest/blob_utils.d.ts.map +1 -0
- package/dest/blob_utils.js +60 -0
- package/dest/circuit_types/blob_accumulator.d.ts +21 -0
- package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/blob_accumulator.js +58 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts +22 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_accumulator.js +63 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +15 -0
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
- package/dest/circuit_types/final_blob_batching_challenges.js +25 -0
- package/dest/circuit_types/index.d.ts +4 -0
- package/dest/circuit_types/index.d.ts.map +1 -0
- package/dest/circuit_types/index.js +4 -0
- package/dest/deserialize.d.ts +7 -41
- package/dest/deserialize.d.ts.map +1 -1
- package/dest/deserialize.js +25 -73
- package/dest/encoding.d.ts +5 -0
- package/dest/encoding.d.ts.map +1 -1
- package/dest/encoding.js +35 -0
- package/dest/hash.d.ts +35 -0
- package/dest/hash.d.ts.map +1 -0
- package/dest/hash.js +69 -0
- package/dest/index.d.ts +4 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +4 -2
- package/dest/sponge_blob.d.ts +13 -9
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +28 -17
- package/dest/testing.d.ts +7 -2
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +47 -14
- package/dest/types.d.ts +2 -0
- package/dest/types.d.ts.map +1 -1
- package/dest/types.js +2 -0
- package/package.json +4 -4
- package/src/blob.ts +67 -180
- package/src/blob_batching.ts +109 -119
- package/src/blob_utils.ts +71 -0
- package/src/circuit_types/blob_accumulator.ts +84 -0
- package/src/circuit_types/final_blob_accumulator.ts +75 -0
- package/src/circuit_types/final_blob_batching_challenges.ts +29 -0
- package/src/circuit_types/index.ts +4 -0
- package/src/deserialize.ts +24 -79
- package/src/encoding.ts +45 -0
- package/src/hash.ts +77 -0
- package/src/index.ts +4 -2
- package/src/sponge_blob.ts +24 -14
- package/src/testing.ts +53 -16
- package/src/types.ts +2 -2
- package/dest/blob_batching_public_inputs.d.ts +0 -57
- package/dest/blob_batching_public_inputs.d.ts.map +0 -1
- package/dest/blob_batching_public_inputs.js +0 -144
- package/src/blob_batching_public_inputs.ts +0 -211
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
-
import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
-
import { inspect } from 'util';
|
|
5
|
-
import { BatchedBlobAccumulator } from './blob_batching.js';
|
|
6
|
-
/**
|
|
7
|
-
* See nr BlobAccumulator and ts BatchedBlobAccumulator for documentation.
|
|
8
|
-
*/ export class BlobAccumulator {
|
|
9
|
-
blobCommitmentsHashAcc;
|
|
10
|
-
zAcc;
|
|
11
|
-
yAcc;
|
|
12
|
-
cAcc;
|
|
13
|
-
gammaAcc;
|
|
14
|
-
gammaPowAcc;
|
|
15
|
-
constructor(blobCommitmentsHashAcc, zAcc, yAcc, cAcc, gammaAcc, gammaPowAcc){
|
|
16
|
-
this.blobCommitmentsHashAcc = blobCommitmentsHashAcc;
|
|
17
|
-
this.zAcc = zAcc;
|
|
18
|
-
this.yAcc = yAcc;
|
|
19
|
-
this.cAcc = cAcc;
|
|
20
|
-
this.gammaAcc = gammaAcc;
|
|
21
|
-
this.gammaPowAcc = gammaPowAcc;
|
|
22
|
-
}
|
|
23
|
-
static empty() {
|
|
24
|
-
return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
|
|
25
|
-
}
|
|
26
|
-
equals(other) {
|
|
27
|
-
return this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) && this.zAcc.equals(other.zAcc) && this.yAcc.equals(other.yAcc) && this.cAcc.equals(other.cAcc) && this.gammaAcc.equals(other.gammaAcc) && this.gammaPowAcc.equals(other.gammaPowAcc);
|
|
28
|
-
}
|
|
29
|
-
static fromBuffer(buffer) {
|
|
30
|
-
const reader = BufferReader.asReader(buffer);
|
|
31
|
-
return new BlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader));
|
|
32
|
-
}
|
|
33
|
-
toBuffer() {
|
|
34
|
-
return serializeToBuffer(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.gammaAcc, this.gammaPowAcc);
|
|
35
|
-
}
|
|
36
|
-
/**
|
|
37
|
-
* Given blobs, accumulate all public inputs state.
|
|
38
|
-
* We assume the input blobs have not been evaluated at z.
|
|
39
|
-
* NOTE: Does NOT accumulate non circuit values including Q. This exists to simulate/check exactly what the circuit is doing
|
|
40
|
-
* and is unsafe for other use. For that reason, a toBatchedBlobAccumulator does not exist. See evaluateBlobs() oracle for usage.
|
|
41
|
-
* @returns An updated blob accumulator.
|
|
42
|
-
*/ async accumulateBlobs(blobs, finalBlobChallenges) {
|
|
43
|
-
let acc = new BatchedBlobAccumulator(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, BLS12Point.ZERO, this.gammaAcc, this.gammaPowAcc, finalBlobChallenges);
|
|
44
|
-
acc = await acc.accumulateBlobs(blobs);
|
|
45
|
-
return new BlobAccumulator(acc.blobCommitmentsHashAcc, acc.zAcc, acc.yAcc, acc.cAcc, acc.gammaAcc, acc.gammaPow);
|
|
46
|
-
}
|
|
47
|
-
toFields() {
|
|
48
|
-
return [
|
|
49
|
-
this.blobCommitmentsHashAcc,
|
|
50
|
-
this.zAcc,
|
|
51
|
-
...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
52
|
-
...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
|
|
53
|
-
...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
54
|
-
new Fr(this.cAcc.isInfinite),
|
|
55
|
-
this.gammaAcc,
|
|
56
|
-
...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString)
|
|
57
|
-
];
|
|
58
|
-
}
|
|
59
|
-
static fromFields(fields) {
|
|
60
|
-
const reader = FieldReader.asReader(fields);
|
|
61
|
-
return new BlobAccumulator(reader.readField(), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
62
|
-
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
63
|
-
}), new BLS12Point(BLS12Fq.fromNoirBigNum({
|
|
64
|
-
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
65
|
-
}), BLS12Fq.fromNoirBigNum({
|
|
66
|
-
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
67
|
-
}), reader.readBoolean()), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
68
|
-
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
69
|
-
}));
|
|
70
|
-
}
|
|
71
|
-
/**
|
|
72
|
-
* Converts from an accumulator to a struct for the public inputs of our rollup circuits.
|
|
73
|
-
* @returns A BlobAccumulator instance.
|
|
74
|
-
*/ static fromBatchedBlobAccumulator(accumulator) {
|
|
75
|
-
return new BlobAccumulator(accumulator.blobCommitmentsHashAcc, accumulator.zAcc, accumulator.yAcc, accumulator.cAcc, accumulator.gammaAcc, accumulator.gammaPow);
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* See nr FinalBlobAccumulator and ts BatchedBlobAccumulator for documentation.
|
|
80
|
-
*/ export class FinalBlobAccumulator {
|
|
81
|
-
blobCommitmentsHash;
|
|
82
|
-
z;
|
|
83
|
-
y;
|
|
84
|
-
c;
|
|
85
|
-
constructor(blobCommitmentsHash, z, y, c){
|
|
86
|
-
this.blobCommitmentsHash = blobCommitmentsHash;
|
|
87
|
-
this.z = z;
|
|
88
|
-
this.y = y;
|
|
89
|
-
this.c = c;
|
|
90
|
-
}
|
|
91
|
-
static empty() {
|
|
92
|
-
return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
|
|
93
|
-
}
|
|
94
|
-
static fromBuffer(buffer) {
|
|
95
|
-
const reader = BufferReader.asReader(buffer);
|
|
96
|
-
return new FinalBlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader));
|
|
97
|
-
}
|
|
98
|
-
toBuffer() {
|
|
99
|
-
return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
|
|
100
|
-
}
|
|
101
|
-
static fromBatchedBlob(blob) {
|
|
102
|
-
return new FinalBlobAccumulator(blob.blobCommitmentsHash, blob.z, blob.y, blob.commitment);
|
|
103
|
-
}
|
|
104
|
-
toFields() {
|
|
105
|
-
return [
|
|
106
|
-
this.blobCommitmentsHash,
|
|
107
|
-
this.z,
|
|
108
|
-
...this.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
109
|
-
...this.c.toBN254Fields()
|
|
110
|
-
];
|
|
111
|
-
}
|
|
112
|
-
// The below is used to send to L1 for proof verification
|
|
113
|
-
toString() {
|
|
114
|
-
// We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
|
|
115
|
-
// is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
|
|
116
|
-
// we use calldata rather than a slice in memory:
|
|
117
|
-
const buf = Buffer.concat([
|
|
118
|
-
Buffer.alloc(32),
|
|
119
|
-
this.z.toBuffer(),
|
|
120
|
-
this.y.toBuffer(),
|
|
121
|
-
this.c.compress()
|
|
122
|
-
]);
|
|
123
|
-
return buf.toString('hex');
|
|
124
|
-
}
|
|
125
|
-
equals(other) {
|
|
126
|
-
return this.blobCommitmentsHash.equals(other.blobCommitmentsHash) && this.z.equals(other.z) && this.y.equals(other.y) && this.c.equals(other.c);
|
|
127
|
-
}
|
|
128
|
-
// Creates a random instance. Used for testing only - will not prove/verify.
|
|
129
|
-
static random() {
|
|
130
|
-
return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
|
|
131
|
-
}
|
|
132
|
-
// Warning: MUST be final accumulator state.
|
|
133
|
-
static fromBatchedBlobAccumulator(accumulator) {
|
|
134
|
-
return new FinalBlobAccumulator(accumulator.blobCommitmentsHashAcc, accumulator.zAcc, accumulator.yAcc, accumulator.cAcc);
|
|
135
|
-
}
|
|
136
|
-
[inspect.custom]() {
|
|
137
|
-
return `FinalBlobAccumulator {
|
|
138
|
-
blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
|
|
139
|
-
z: ${inspect(this.z)},
|
|
140
|
-
y: ${inspect(this.y)},
|
|
141
|
-
c: ${inspect(this.c)},
|
|
142
|
-
}`;
|
|
143
|
-
}
|
|
144
|
-
}
|
|
@@ -1,211 +0,0 @@
|
|
|
1
|
-
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
-
import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
-
|
|
5
|
-
import { inspect } from 'util';
|
|
6
|
-
|
|
7
|
-
import { Blob } from './blob.js';
|
|
8
|
-
import { BatchedBlob, BatchedBlobAccumulator, FinalBlobBatchingChallenges } from './blob_batching.js';
|
|
9
|
-
|
|
10
|
-
/**
|
|
11
|
-
* See nr BlobAccumulator and ts BatchedBlobAccumulator for documentation.
|
|
12
|
-
*/
|
|
13
|
-
export class BlobAccumulator {
|
|
14
|
-
constructor(
|
|
15
|
-
public blobCommitmentsHashAcc: Fr,
|
|
16
|
-
public zAcc: Fr,
|
|
17
|
-
public yAcc: BLS12Fr,
|
|
18
|
-
public cAcc: BLS12Point,
|
|
19
|
-
public gammaAcc: Fr,
|
|
20
|
-
public gammaPowAcc: BLS12Fr,
|
|
21
|
-
) {}
|
|
22
|
-
|
|
23
|
-
static empty(): BlobAccumulator {
|
|
24
|
-
return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
equals(other: BlobAccumulator) {
|
|
28
|
-
return (
|
|
29
|
-
this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) &&
|
|
30
|
-
this.zAcc.equals(other.zAcc) &&
|
|
31
|
-
this.yAcc.equals(other.yAcc) &&
|
|
32
|
-
this.cAcc.equals(other.cAcc) &&
|
|
33
|
-
this.gammaAcc.equals(other.gammaAcc) &&
|
|
34
|
-
this.gammaPowAcc.equals(other.gammaPowAcc)
|
|
35
|
-
);
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
static fromBuffer(buffer: Buffer | BufferReader): BlobAccumulator {
|
|
39
|
-
const reader = BufferReader.asReader(buffer);
|
|
40
|
-
return new BlobAccumulator(
|
|
41
|
-
Fr.fromBuffer(reader),
|
|
42
|
-
Fr.fromBuffer(reader),
|
|
43
|
-
BLS12Fr.fromBuffer(reader),
|
|
44
|
-
BLS12Point.fromBuffer(reader),
|
|
45
|
-
Fr.fromBuffer(reader),
|
|
46
|
-
BLS12Fr.fromBuffer(reader),
|
|
47
|
-
);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
toBuffer() {
|
|
51
|
-
return serializeToBuffer(
|
|
52
|
-
this.blobCommitmentsHashAcc,
|
|
53
|
-
this.zAcc,
|
|
54
|
-
this.yAcc,
|
|
55
|
-
this.cAcc,
|
|
56
|
-
this.gammaAcc,
|
|
57
|
-
this.gammaPowAcc,
|
|
58
|
-
);
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
/**
|
|
62
|
-
* Given blobs, accumulate all public inputs state.
|
|
63
|
-
* We assume the input blobs have not been evaluated at z.
|
|
64
|
-
* NOTE: Does NOT accumulate non circuit values including Q. This exists to simulate/check exactly what the circuit is doing
|
|
65
|
-
* and is unsafe for other use. For that reason, a toBatchedBlobAccumulator does not exist. See evaluateBlobs() oracle for usage.
|
|
66
|
-
* @returns An updated blob accumulator.
|
|
67
|
-
*/
|
|
68
|
-
async accumulateBlobs(blobs: Blob[], finalBlobChallenges: FinalBlobBatchingChallenges) {
|
|
69
|
-
let acc = new BatchedBlobAccumulator(
|
|
70
|
-
this.blobCommitmentsHashAcc,
|
|
71
|
-
this.zAcc,
|
|
72
|
-
this.yAcc,
|
|
73
|
-
this.cAcc,
|
|
74
|
-
BLS12Point.ZERO,
|
|
75
|
-
this.gammaAcc,
|
|
76
|
-
this.gammaPowAcc,
|
|
77
|
-
finalBlobChallenges,
|
|
78
|
-
);
|
|
79
|
-
acc = await acc.accumulateBlobs(blobs);
|
|
80
|
-
return new BlobAccumulator(acc.blobCommitmentsHashAcc, acc.zAcc, acc.yAcc, acc.cAcc, acc.gammaAcc, acc.gammaPow);
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
toFields() {
|
|
84
|
-
return [
|
|
85
|
-
this.blobCommitmentsHashAcc,
|
|
86
|
-
this.zAcc,
|
|
87
|
-
...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
88
|
-
...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
|
|
89
|
-
...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
90
|
-
new Fr(this.cAcc.isInfinite),
|
|
91
|
-
this.gammaAcc,
|
|
92
|
-
...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
93
|
-
];
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
static fromFields(fields: Fr[] | FieldReader): BlobAccumulator {
|
|
97
|
-
const reader = FieldReader.asReader(fields);
|
|
98
|
-
return new BlobAccumulator(
|
|
99
|
-
reader.readField(),
|
|
100
|
-
reader.readField(),
|
|
101
|
-
BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
|
|
102
|
-
new BLS12Point(
|
|
103
|
-
BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
|
|
104
|
-
BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
|
|
105
|
-
reader.readBoolean(),
|
|
106
|
-
),
|
|
107
|
-
reader.readField(),
|
|
108
|
-
BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
|
|
109
|
-
);
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
/**
|
|
113
|
-
* Converts from an accumulator to a struct for the public inputs of our rollup circuits.
|
|
114
|
-
* @returns A BlobAccumulator instance.
|
|
115
|
-
*/
|
|
116
|
-
static fromBatchedBlobAccumulator(accumulator: BatchedBlobAccumulator) {
|
|
117
|
-
return new BlobAccumulator(
|
|
118
|
-
accumulator.blobCommitmentsHashAcc,
|
|
119
|
-
accumulator.zAcc,
|
|
120
|
-
accumulator.yAcc,
|
|
121
|
-
accumulator.cAcc,
|
|
122
|
-
accumulator.gammaAcc,
|
|
123
|
-
accumulator.gammaPow,
|
|
124
|
-
);
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
/**
|
|
129
|
-
* See nr FinalBlobAccumulator and ts BatchedBlobAccumulator for documentation.
|
|
130
|
-
*/
|
|
131
|
-
export class FinalBlobAccumulator {
|
|
132
|
-
constructor(
|
|
133
|
-
public blobCommitmentsHash: Fr,
|
|
134
|
-
public z: Fr,
|
|
135
|
-
public y: BLS12Fr,
|
|
136
|
-
public c: BLS12Point,
|
|
137
|
-
) {}
|
|
138
|
-
|
|
139
|
-
static empty(): FinalBlobAccumulator {
|
|
140
|
-
return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator {
|
|
144
|
-
const reader = BufferReader.asReader(buffer);
|
|
145
|
-
return new FinalBlobAccumulator(
|
|
146
|
-
Fr.fromBuffer(reader),
|
|
147
|
-
Fr.fromBuffer(reader),
|
|
148
|
-
BLS12Fr.fromBuffer(reader),
|
|
149
|
-
BLS12Point.fromBuffer(reader),
|
|
150
|
-
);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
toBuffer() {
|
|
154
|
-
return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
static fromBatchedBlob(blob: BatchedBlob) {
|
|
158
|
-
return new FinalBlobAccumulator(blob.blobCommitmentsHash, blob.z, blob.y, blob.commitment);
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
toFields() {
|
|
162
|
-
return [
|
|
163
|
-
this.blobCommitmentsHash,
|
|
164
|
-
this.z,
|
|
165
|
-
...this.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
166
|
-
...this.c.toBN254Fields(),
|
|
167
|
-
];
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
// The below is used to send to L1 for proof verification
|
|
171
|
-
toString() {
|
|
172
|
-
// We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
|
|
173
|
-
// is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
|
|
174
|
-
// we use calldata rather than a slice in memory:
|
|
175
|
-
const buf = Buffer.concat([Buffer.alloc(32), this.z.toBuffer(), this.y.toBuffer(), this.c.compress()]);
|
|
176
|
-
return buf.toString('hex');
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
equals(other: FinalBlobAccumulator) {
|
|
180
|
-
return (
|
|
181
|
-
this.blobCommitmentsHash.equals(other.blobCommitmentsHash) &&
|
|
182
|
-
this.z.equals(other.z) &&
|
|
183
|
-
this.y.equals(other.y) &&
|
|
184
|
-
this.c.equals(other.c)
|
|
185
|
-
);
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
// Creates a random instance. Used for testing only - will not prove/verify.
|
|
189
|
-
static random() {
|
|
190
|
-
return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
// Warning: MUST be final accumulator state.
|
|
194
|
-
static fromBatchedBlobAccumulator(accumulator: BatchedBlobAccumulator) {
|
|
195
|
-
return new FinalBlobAccumulator(
|
|
196
|
-
accumulator.blobCommitmentsHashAcc,
|
|
197
|
-
accumulator.zAcc,
|
|
198
|
-
accumulator.yAcc,
|
|
199
|
-
accumulator.cAcc,
|
|
200
|
-
);
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
[inspect.custom]() {
|
|
204
|
-
return `FinalBlobAccumulator {
|
|
205
|
-
blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
|
|
206
|
-
z: ${inspect(this.z)},
|
|
207
|
-
y: ${inspect(this.y)},
|
|
208
|
-
c: ${inspect(this.c)},
|
|
209
|
-
}`;
|
|
210
|
-
}
|
|
211
|
-
}
|