@aztec/blob-lib 0.0.1-fake-c83136db25 → 0.0.1-fake-ceab37513c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob.d.ts +98 -52
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +167 -73
- package/dest/blob_batching.d.ts +48 -15
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +120 -81
- package/dest/blob_batching_public_inputs.d.ts +71 -0
- package/dest/blob_batching_public_inputs.d.ts.map +1 -0
- package/dest/blob_batching_public_inputs.js +168 -0
- package/dest/encoding.d.ts +62 -22
- package/dest/encoding.d.ts.map +1 -1
- package/dest/encoding.js +104 -114
- package/dest/index.d.ts +2 -5
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +15 -5
- package/dest/sponge_blob.d.ts +9 -13
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +17 -28
- package/dest/testing.d.ts +12 -7
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +41 -54
- package/dest/types.d.ts +0 -2
- package/dest/types.d.ts.map +1 -1
- package/dest/types.js +0 -2
- package/package.json +4 -5
- package/src/blob.ts +198 -76
- package/src/blob_batching.ts +137 -109
- package/src/blob_batching_public_inputs.ts +252 -0
- package/src/encoding.ts +120 -136
- package/src/index.ts +18 -5
- package/src/sponge_blob.ts +14 -24
- package/src/testing.ts +40 -55
- package/src/types.ts +2 -2
- package/dest/blob_utils.d.ts +0 -30
- package/dest/blob_utils.d.ts.map +0 -1
- package/dest/blob_utils.js +0 -60
- package/dest/circuit_types/blob_accumulator.d.ts +0 -21
- package/dest/circuit_types/blob_accumulator.d.ts.map +0 -1
- package/dest/circuit_types/blob_accumulator.js +0 -58
- package/dest/circuit_types/final_blob_accumulator.d.ts +0 -22
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +0 -1
- package/dest/circuit_types/final_blob_accumulator.js +0 -63
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +0 -15
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +0 -1
- package/dest/circuit_types/final_blob_batching_challenges.js +0 -25
- package/dest/circuit_types/index.d.ts +0 -4
- package/dest/circuit_types/index.d.ts.map +0 -1
- package/dest/circuit_types/index.js +0 -4
- package/dest/deserialize.d.ts +0 -14
- package/dest/deserialize.d.ts.map +0 -1
- package/dest/deserialize.js +0 -33
- package/dest/hash.d.ts +0 -35
- package/dest/hash.d.ts.map +0 -1
- package/dest/hash.js +0 -69
- package/dest/kzg_context.d.ts +0 -4
- package/dest/kzg_context.d.ts.map +0 -1
- package/dest/kzg_context.js +0 -5
- package/src/blob_utils.ts +0 -71
- package/src/circuit_types/blob_accumulator.ts +0 -84
- package/src/circuit_types/final_blob_accumulator.ts +0 -75
- package/src/circuit_types/final_blob_batching_challenges.ts +0 -29
- package/src/circuit_types/index.ts +0 -4
- package/src/deserialize.ts +0 -38
- package/src/hash.ts +0 -77
- package/src/kzg_context.ts +0 -5
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
-
import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
-
/**
|
|
5
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
|
|
6
|
-
*/ export class BlobAccumulator {
|
|
7
|
-
blobCommitmentsHashAcc;
|
|
8
|
-
zAcc;
|
|
9
|
-
yAcc;
|
|
10
|
-
cAcc;
|
|
11
|
-
gammaAcc;
|
|
12
|
-
gammaPowAcc;
|
|
13
|
-
constructor(blobCommitmentsHashAcc, zAcc, yAcc, cAcc, gammaAcc, gammaPowAcc){
|
|
14
|
-
this.blobCommitmentsHashAcc = blobCommitmentsHashAcc;
|
|
15
|
-
this.zAcc = zAcc;
|
|
16
|
-
this.yAcc = yAcc;
|
|
17
|
-
this.cAcc = cAcc;
|
|
18
|
-
this.gammaAcc = gammaAcc;
|
|
19
|
-
this.gammaPowAcc = gammaPowAcc;
|
|
20
|
-
}
|
|
21
|
-
static empty() {
|
|
22
|
-
return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
|
|
23
|
-
}
|
|
24
|
-
equals(other) {
|
|
25
|
-
return this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) && this.zAcc.equals(other.zAcc) && this.yAcc.equals(other.yAcc) && this.cAcc.equals(other.cAcc) && this.gammaAcc.equals(other.gammaAcc) && this.gammaPowAcc.equals(other.gammaPowAcc);
|
|
26
|
-
}
|
|
27
|
-
static fromBuffer(buffer) {
|
|
28
|
-
const reader = BufferReader.asReader(buffer);
|
|
29
|
-
return new BlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader));
|
|
30
|
-
}
|
|
31
|
-
toBuffer() {
|
|
32
|
-
return serializeToBuffer(this.blobCommitmentsHashAcc, this.zAcc, this.yAcc, this.cAcc, this.gammaAcc, this.gammaPowAcc);
|
|
33
|
-
}
|
|
34
|
-
toFields() {
|
|
35
|
-
return [
|
|
36
|
-
this.blobCommitmentsHashAcc,
|
|
37
|
-
this.zAcc,
|
|
38
|
-
...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
39
|
-
...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
|
|
40
|
-
...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
41
|
-
new Fr(this.cAcc.isInfinite),
|
|
42
|
-
this.gammaAcc,
|
|
43
|
-
...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString)
|
|
44
|
-
];
|
|
45
|
-
}
|
|
46
|
-
static fromFields(fields) {
|
|
47
|
-
const reader = FieldReader.asReader(fields);
|
|
48
|
-
return new BlobAccumulator(reader.readField(), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
49
|
-
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
50
|
-
}), new BLS12Point(BLS12Fq.fromNoirBigNum({
|
|
51
|
-
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
52
|
-
}), BLS12Fq.fromNoirBigNum({
|
|
53
|
-
limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map((f)=>f.toString())
|
|
54
|
-
}), reader.readBoolean()), reader.readField(), BLS12Fr.fromNoirBigNum({
|
|
55
|
-
limbs: reader.readFieldArray(BLS12_FR_LIMBS).map((f)=>f.toString())
|
|
56
|
-
}));
|
|
57
|
-
}
|
|
58
|
-
}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader } from '@aztec/foundation/serialize';
|
|
3
|
-
import { inspect } from 'util';
|
|
4
|
-
/**
|
|
5
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
|
|
6
|
-
*/
|
|
7
|
-
export declare class FinalBlobAccumulator {
|
|
8
|
-
blobCommitmentsHash: Fr;
|
|
9
|
-
z: Fr;
|
|
10
|
-
y: BLS12Fr;
|
|
11
|
-
c: BLS12Point;
|
|
12
|
-
constructor(blobCommitmentsHash: Fr, z: Fr, y: BLS12Fr, c: BLS12Point);
|
|
13
|
-
static empty(): FinalBlobAccumulator;
|
|
14
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator;
|
|
15
|
-
toBuffer(): Buffer<ArrayBufferLike>;
|
|
16
|
-
toFields(): Fr[];
|
|
17
|
-
toString(): string;
|
|
18
|
-
equals(other: FinalBlobAccumulator): boolean;
|
|
19
|
-
static random(): FinalBlobAccumulator;
|
|
20
|
-
[inspect.custom](): string;
|
|
21
|
-
}
|
|
22
|
-
//# sourceMappingURL=final_blob_accumulator.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"final_blob_accumulator.d.ts","sourceRoot":"","sources":["../../src/circuit_types/final_blob_accumulator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AACnE,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B;;GAEG;AACH,qBAAa,oBAAoB;IAEtB,mBAAmB,EAAE,EAAE;IACvB,CAAC,EAAE,EAAE;IACL,CAAC,EAAE,OAAO;IACV,CAAC,EAAE,UAAU;gBAHb,mBAAmB,EAAE,EAAE,EACvB,CAAC,EAAE,EAAE,EACL,CAAC,EAAE,OAAO,EACV,CAAC,EAAE,UAAU;IAGtB,MAAM,CAAC,KAAK,IAAI,oBAAoB;IAIpC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,oBAAoB;IAUtE,QAAQ;IAIR,QAAQ;IAUR,QAAQ;IAQR,MAAM,CAAC,KAAK,EAAE,oBAAoB;IAUlC,MAAM,CAAC,MAAM;IAIb,CAAC,OAAO,CAAC,MAAM,CAAC;CAQjB"}
|
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
|
-
import { inspect } from 'util';
|
|
4
|
-
/**
|
|
5
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
|
|
6
|
-
*/ export class FinalBlobAccumulator {
|
|
7
|
-
blobCommitmentsHash;
|
|
8
|
-
z;
|
|
9
|
-
y;
|
|
10
|
-
c;
|
|
11
|
-
constructor(blobCommitmentsHash, z, y, c){
|
|
12
|
-
this.blobCommitmentsHash = blobCommitmentsHash;
|
|
13
|
-
this.z = z;
|
|
14
|
-
this.y = y;
|
|
15
|
-
this.c = c;
|
|
16
|
-
}
|
|
17
|
-
static empty() {
|
|
18
|
-
return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
|
|
19
|
-
}
|
|
20
|
-
static fromBuffer(buffer) {
|
|
21
|
-
const reader = BufferReader.asReader(buffer);
|
|
22
|
-
return new FinalBlobAccumulator(Fr.fromBuffer(reader), Fr.fromBuffer(reader), BLS12Fr.fromBuffer(reader), BLS12Point.fromBuffer(reader));
|
|
23
|
-
}
|
|
24
|
-
toBuffer() {
|
|
25
|
-
return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
|
|
26
|
-
}
|
|
27
|
-
toFields() {
|
|
28
|
-
return [
|
|
29
|
-
this.blobCommitmentsHash,
|
|
30
|
-
this.z,
|
|
31
|
-
...this.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
32
|
-
...this.c.toBN254Fields()
|
|
33
|
-
];
|
|
34
|
-
}
|
|
35
|
-
// The below is used to send to L1 for proof verification
|
|
36
|
-
toString() {
|
|
37
|
-
// We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
|
|
38
|
-
// is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
|
|
39
|
-
// we use calldata rather than a slice in memory:
|
|
40
|
-
const buf = Buffer.concat([
|
|
41
|
-
Buffer.alloc(32),
|
|
42
|
-
this.z.toBuffer(),
|
|
43
|
-
this.y.toBuffer(),
|
|
44
|
-
this.c.compress()
|
|
45
|
-
]);
|
|
46
|
-
return buf.toString('hex');
|
|
47
|
-
}
|
|
48
|
-
equals(other) {
|
|
49
|
-
return this.blobCommitmentsHash.equals(other.blobCommitmentsHash) && this.z.equals(other.z) && this.y.equals(other.y) && this.c.equals(other.c);
|
|
50
|
-
}
|
|
51
|
-
// Creates a random instance. Used for testing only - will not prove/verify.
|
|
52
|
-
static random() {
|
|
53
|
-
return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
|
|
54
|
-
}
|
|
55
|
-
[inspect.custom]() {
|
|
56
|
-
return `FinalBlobAccumulator {
|
|
57
|
-
blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
|
|
58
|
-
z: ${inspect(this.z)},
|
|
59
|
-
y: ${inspect(this.y)},
|
|
60
|
-
c: ${inspect(this.c)},
|
|
61
|
-
}`;
|
|
62
|
-
}
|
|
63
|
-
}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader } from '@aztec/foundation/serialize';
|
|
3
|
-
/**
|
|
4
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
|
|
5
|
-
*/
|
|
6
|
-
export declare class FinalBlobBatchingChallenges {
|
|
7
|
-
readonly z: Fr;
|
|
8
|
-
readonly gamma: BLS12Fr;
|
|
9
|
-
constructor(z: Fr, gamma: BLS12Fr);
|
|
10
|
-
equals(other: FinalBlobBatchingChallenges): boolean;
|
|
11
|
-
static empty(): FinalBlobBatchingChallenges;
|
|
12
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges;
|
|
13
|
-
toBuffer(): Buffer<ArrayBufferLike>;
|
|
14
|
-
}
|
|
15
|
-
//# sourceMappingURL=final_blob_batching_challenges.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"final_blob_batching_challenges.d.ts","sourceRoot":"","sources":["../../src/circuit_types/final_blob_batching_challenges.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AACvD,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E;;GAEG;AACH,qBAAa,2BAA2B;aAEpB,CAAC,EAAE,EAAE;aACL,KAAK,EAAE,OAAO;gBADd,CAAC,EAAE,EAAE,EACL,KAAK,EAAE,OAAO;IAGhC,MAAM,CAAC,KAAK,EAAE,2BAA2B;IAIzC,MAAM,CAAC,KAAK,IAAI,2BAA2B;IAI3C,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,2BAA2B;IAK7E,QAAQ;CAGT"}
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
|
-
/**
|
|
4
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
|
|
5
|
-
*/ export class FinalBlobBatchingChallenges {
|
|
6
|
-
z;
|
|
7
|
-
gamma;
|
|
8
|
-
constructor(z, gamma){
|
|
9
|
-
this.z = z;
|
|
10
|
-
this.gamma = gamma;
|
|
11
|
-
}
|
|
12
|
-
equals(other) {
|
|
13
|
-
return this.z.equals(other.z) && this.gamma.equals(other.gamma);
|
|
14
|
-
}
|
|
15
|
-
static empty() {
|
|
16
|
-
return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
|
|
17
|
-
}
|
|
18
|
-
static fromBuffer(buffer) {
|
|
19
|
-
const reader = BufferReader.asReader(buffer);
|
|
20
|
-
return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
|
|
21
|
-
}
|
|
22
|
-
toBuffer() {
|
|
23
|
-
return serializeToBuffer(this.z, this.gamma);
|
|
24
|
-
}
|
|
25
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/circuit_types/index.ts"],"names":[],"mappings":"AACA,cAAc,uBAAuB,CAAC;AACtC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,qCAAqC,CAAC"}
|
package/dest/deserialize.d.ts
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
-
/**
|
|
3
|
-
* Deserializes a buffer into an array of field elements.
|
|
4
|
-
*
|
|
5
|
-
* This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
|
|
6
|
-
* first field.
|
|
7
|
-
*
|
|
8
|
-
* @param buf - The buffer to deserialize.
|
|
9
|
-
* @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
|
|
10
|
-
* prefix and throw if there's not enough fields.
|
|
11
|
-
* @returns An array of field elements.
|
|
12
|
-
*/
|
|
13
|
-
export declare function deserializeEncodedBlobToFields(buf: Uint8Array, checkEncoding?: boolean): Fr[];
|
|
14
|
-
//# sourceMappingURL=deserialize.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"deserialize.d.ts","sourceRoot":"","sources":["../src/deserialize.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAM9C;;;;;;;;;;GAUG;AACH,wBAAgB,8BAA8B,CAAC,GAAG,EAAE,UAAU,EAAE,aAAa,UAAQ,GAAG,EAAE,EAAE,CAoB3F"}
|
package/dest/deserialize.js
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader } from '@aztec/foundation/serialize';
|
|
3
|
-
import { checkBlobFieldsEncoding } from './encoding.js';
|
|
4
|
-
import { BlobDeserializationError } from './errors.js';
|
|
5
|
-
/**
|
|
6
|
-
* Deserializes a buffer into an array of field elements.
|
|
7
|
-
*
|
|
8
|
-
* This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
|
|
9
|
-
* first field.
|
|
10
|
-
*
|
|
11
|
-
* @param buf - The buffer to deserialize.
|
|
12
|
-
* @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
|
|
13
|
-
* prefix and throw if there's not enough fields.
|
|
14
|
-
* @returns An array of field elements.
|
|
15
|
-
*/ export function deserializeEncodedBlobToFields(buf, checkEncoding = false) {
|
|
16
|
-
const reader = BufferReader.asReader(buf);
|
|
17
|
-
const firstField = reader.readObject(Fr);
|
|
18
|
-
// Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error below if the first
|
|
19
|
-
// field is larger than a javascript integer.
|
|
20
|
-
const numFields = firstField.toBigInt();
|
|
21
|
-
const totalFieldsInBuffer = BigInt(buf.length / Fr.SIZE_IN_BYTES);
|
|
22
|
-
if (numFields > totalFieldsInBuffer) {
|
|
23
|
-
throw new BlobDeserializationError(`Failed to deserialize blob fields, this blob was likely not created by us`);
|
|
24
|
-
}
|
|
25
|
-
const numFieldsWithoutPrefix = Number(numFields) - 1;
|
|
26
|
-
const blobFields = [
|
|
27
|
-
firstField
|
|
28
|
-
].concat(reader.readArray(numFieldsWithoutPrefix, Fr));
|
|
29
|
-
if (checkEncoding && !checkBlobFieldsEncoding(blobFields)) {
|
|
30
|
-
throw new BlobDeserializationError(`Incorrect encoding of blob fields, this blob was likely not created by us`);
|
|
31
|
-
}
|
|
32
|
-
return blobFields;
|
|
33
|
-
}
|
package/dest/hash.d.ts
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
/**
|
|
3
|
-
* Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
4
|
-
*/
|
|
5
|
-
export declare function computeEthVersionedBlobHash(commitment: Buffer): Buffer;
|
|
6
|
-
export declare function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr;
|
|
7
|
-
/**
|
|
8
|
-
* The hash of the fields added throughout the checkpoint. The exact number of fields is specified by the checkpoint
|
|
9
|
-
* prefix (the first field). It's verified in the circuit against the fields absorbed into the sponge blob.
|
|
10
|
-
* This hash is used in generating the challenge z for all blobs in the same checkpoint.
|
|
11
|
-
*/
|
|
12
|
-
export declare function computeBlobFieldsHash(fields: Fr[]): Promise<Fr>;
|
|
13
|
-
export declare function computeBlobCommitment(data: Uint8Array): Buffer;
|
|
14
|
-
/**
|
|
15
|
-
* Get the commitment fields of the blob, to compute the challenge z.
|
|
16
|
-
*
|
|
17
|
-
* The 48-byte commitment is encoded into two field elements:
|
|
18
|
-
* +-------------------+------------------------+
|
|
19
|
-
* | 31 bytes | 17 bytes |
|
|
20
|
-
* +-------------------+------------------------+
|
|
21
|
-
* | Field Element 1 | Field Element 2 |
|
|
22
|
-
* | [0][bytes 0-30] | [0...0][bytes 31-47] |
|
|
23
|
-
* +-------------------+------------------------+
|
|
24
|
-
*
|
|
25
|
-
* @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
|
|
26
|
-
* @returns The fields representing the commitment buffer.
|
|
27
|
-
*/
|
|
28
|
-
export declare function commitmentToFields(commitment: Buffer): [Fr, Fr];
|
|
29
|
-
export declare function computeChallengeZ(blobFieldsHash: Fr, commitment: Buffer): Promise<Fr>;
|
|
30
|
-
/**
|
|
31
|
-
* Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
|
|
32
|
-
* rollup circuits.
|
|
33
|
-
*/
|
|
34
|
-
export declare function hashNoirBigNumLimbs(field: BLS12Fr): Promise<Fr>;
|
|
35
|
-
//# sourceMappingURL=hash.d.ts.map
|
package/dest/hash.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAMvD;;GAEG;AACH,wBAAgB,2BAA2B,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAItE;AAOD,wBAAgB,gBAAgB,CAAC,sBAAsB,EAAE,MAAM,EAAE,GAAG,EAAE,CAErE;AAED;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAErE;AAED,wBAAgB,qBAAqB,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,CAM9D;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,kBAAkB,CAAC,UAAU,EAAE,MAAM,GAAG,CAAC,EAAE,EAAE,EAAE,CAAC,CAM/D;AAED,wBAAsB,iBAAiB,CAAC,cAAc,EAAE,EAAE,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAG3F;AAED;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC,CAGrE"}
|
package/dest/hash.js
DELETED
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
|
|
2
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
|
|
4
|
-
const VERSIONED_HASH_VERSION_KZG = 0x01;
|
|
5
|
-
/**
|
|
6
|
-
* Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
7
|
-
*/ export function computeEthVersionedBlobHash(commitment) {
|
|
8
|
-
const hash = sha256(commitment);
|
|
9
|
-
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
10
|
-
return hash;
|
|
11
|
-
}
|
|
12
|
-
// TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
|
|
13
|
-
// - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
|
|
14
|
-
// - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
|
|
15
|
-
// exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
|
|
16
|
-
// We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
|
|
17
|
-
export function computeBlobsHash(evmVersionedBlobHashes) {
|
|
18
|
-
return sha256ToField(evmVersionedBlobHashes);
|
|
19
|
-
}
|
|
20
|
-
/**
|
|
21
|
-
* The hash of the fields added throughout the checkpoint. The exact number of fields is specified by the checkpoint
|
|
22
|
-
* prefix (the first field). It's verified in the circuit against the fields absorbed into the sponge blob.
|
|
23
|
-
* This hash is used in generating the challenge z for all blobs in the same checkpoint.
|
|
24
|
-
*/ export async function computeBlobFieldsHash(fields) {
|
|
25
|
-
return await poseidon2Hash(fields);
|
|
26
|
-
}
|
|
27
|
-
export function computeBlobCommitment(data) {
|
|
28
|
-
if (data.length !== BYTES_PER_BLOB) {
|
|
29
|
-
throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
|
|
30
|
-
}
|
|
31
|
-
return Buffer.from(kzg.blobToKzgCommitment(data));
|
|
32
|
-
}
|
|
33
|
-
/**
|
|
34
|
-
* Get the commitment fields of the blob, to compute the challenge z.
|
|
35
|
-
*
|
|
36
|
-
* The 48-byte commitment is encoded into two field elements:
|
|
37
|
-
* +-------------------+------------------------+
|
|
38
|
-
* | 31 bytes | 17 bytes |
|
|
39
|
-
* +-------------------+------------------------+
|
|
40
|
-
* | Field Element 1 | Field Element 2 |
|
|
41
|
-
* | [0][bytes 0-30] | [0...0][bytes 31-47] |
|
|
42
|
-
* +-------------------+------------------------+
|
|
43
|
-
*
|
|
44
|
-
* @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
|
|
45
|
-
* @returns The fields representing the commitment buffer.
|
|
46
|
-
*/ export function commitmentToFields(commitment) {
|
|
47
|
-
if (commitment.length !== BYTES_PER_COMMITMENT) {
|
|
48
|
-
throw new Error(`Expected ${BYTES_PER_COMMITMENT} bytes for blob commitment. Got ${commitment.length}.`);
|
|
49
|
-
}
|
|
50
|
-
return [
|
|
51
|
-
new Fr(commitment.subarray(0, 31)),
|
|
52
|
-
new Fr(commitment.subarray(31, BYTES_PER_COMMITMENT))
|
|
53
|
-
];
|
|
54
|
-
}
|
|
55
|
-
export async function computeChallengeZ(blobFieldsHash, commitment) {
|
|
56
|
-
const commitmentFields = commitmentToFields(commitment);
|
|
57
|
-
return await poseidon2Hash([
|
|
58
|
-
blobFieldsHash,
|
|
59
|
-
commitmentFields[0],
|
|
60
|
-
commitmentFields[1]
|
|
61
|
-
]);
|
|
62
|
-
}
|
|
63
|
-
/**
|
|
64
|
-
* Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
|
|
65
|
-
* rollup circuits.
|
|
66
|
-
*/ export async function hashNoirBigNumLimbs(field) {
|
|
67
|
-
const num = field.toNoirBigNum();
|
|
68
|
-
return await poseidon2Hash(num.limbs.map(Fr.fromHexString));
|
|
69
|
-
}
|
package/dest/kzg_context.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"kzg_context.d.ts","sourceRoot":"","sources":["../src/kzg_context.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,cAAc,4BAA4B,CAAC;AAE3C,eAAO,MAAM,GAAG,cAA4C,CAAC"}
|
package/dest/kzg_context.js
DELETED
package/src/blob_utils.ts
DELETED
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
import { FIELDS_PER_BLOB } from '@aztec/constants';
|
|
2
|
-
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
-
|
|
4
|
-
import { Blob } from './blob.js';
|
|
5
|
-
import { deserializeEncodedBlobToFields } from './deserialize.js';
|
|
6
|
-
import { computeBlobFieldsHash, computeBlobsHash } from './hash.js';
|
|
7
|
-
|
|
8
|
-
/**
|
|
9
|
-
* @param blobs - The blobs to emit.
|
|
10
|
-
* @returns The blobs' compressed commitments in hex prefixed by the number of blobs. 1 byte for the prefix, 48 bytes
|
|
11
|
-
* per blob commitment.
|
|
12
|
-
* @dev Used for proposing blocks to validate injected blob commitments match real broadcast blobs.
|
|
13
|
-
*/
|
|
14
|
-
export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
|
|
15
|
-
// Prefix the number of blobs.
|
|
16
|
-
const lenBuf = Buffer.alloc(1);
|
|
17
|
-
lenBuf.writeUint8(blobs.length);
|
|
18
|
-
|
|
19
|
-
const blobBuf = Buffer.concat(blobs.map(blob => blob.commitment));
|
|
20
|
-
|
|
21
|
-
const buf = Buffer.concat([lenBuf, blobBuf]);
|
|
22
|
-
return `0x${buf.toString('hex')}`;
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* @param fields - Fields to broadcast in the blob(s)
|
|
27
|
-
* @returns As many blobs as required to broadcast the given fields to an L1 block.
|
|
28
|
-
*
|
|
29
|
-
* @throws If the number of fields does not match what's indicated by the checkpoint prefix.
|
|
30
|
-
*/
|
|
31
|
-
export function getBlobsPerL1Block(fields: Fr[]): Blob[] {
|
|
32
|
-
if (!fields.length) {
|
|
33
|
-
throw new Error('Cannot create blobs from empty fields.');
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB);
|
|
37
|
-
return Array.from({ length: numBlobs }, (_, i) =>
|
|
38
|
-
Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)),
|
|
39
|
-
);
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
/**
|
|
43
|
-
* Get the fields from all blobs in the checkpoint. Ignoring the fields beyond the length specified by the
|
|
44
|
-
* checkpoint prefix (the first field).
|
|
45
|
-
*
|
|
46
|
-
* @param blobs - The blobs to read fields from. Should be all the blobs in the L1 block proposing the checkpoint.
|
|
47
|
-
* @param checkEncoding - Whether to check if the entire encoded blob fields are valid. If false, it will still check
|
|
48
|
-
* the checkpoint prefix and throw if there's not enough fields.
|
|
49
|
-
* @returns The fields added throughout the checkpoint.
|
|
50
|
-
*/
|
|
51
|
-
export function getBlobFieldsInCheckpoint(blobs: Blob[], checkEncoding = false): Fr[] {
|
|
52
|
-
return deserializeEncodedBlobToFields(Buffer.concat(blobs.map(b => b.data)), checkEncoding);
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
export async function computeBlobFieldsHashFromBlobs(blobs: Blob[]): Promise<Fr> {
|
|
56
|
-
const fields = blobs.map(b => b.toFields()).flat();
|
|
57
|
-
const numBlobFields = fields[0].toNumber();
|
|
58
|
-
if (numBlobFields > fields.length) {
|
|
59
|
-
throw new Error(`The prefix indicates ${numBlobFields} fields. Got ${fields.length}.`);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
return await computeBlobFieldsHash(fields.slice(0, numBlobFields));
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
export function computeBlobsHashFromBlobs(blobs: Blob[]): Fr {
|
|
66
|
-
return computeBlobsHash(blobs.map(b => b.getEthVersionedBlobHash()));
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
export function getBlobCommitmentsFromBlobs(blobs: Blob[]): BLS12Point[] {
|
|
70
|
-
return blobs.map(b => BLS12Point.decompress(b.commitment));
|
|
71
|
-
}
|
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
|
|
2
|
-
import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/blob_accumulator.nr` for documentation.
|
|
7
|
-
*/
|
|
8
|
-
export class BlobAccumulator {
|
|
9
|
-
constructor(
|
|
10
|
-
public blobCommitmentsHashAcc: Fr,
|
|
11
|
-
public zAcc: Fr,
|
|
12
|
-
public yAcc: BLS12Fr,
|
|
13
|
-
public cAcc: BLS12Point,
|
|
14
|
-
public gammaAcc: Fr,
|
|
15
|
-
public gammaPowAcc: BLS12Fr,
|
|
16
|
-
) {}
|
|
17
|
-
|
|
18
|
-
static empty(): BlobAccumulator {
|
|
19
|
-
return new BlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO, Fr.ZERO, BLS12Fr.ZERO);
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
equals(other: BlobAccumulator) {
|
|
23
|
-
return (
|
|
24
|
-
this.blobCommitmentsHashAcc.equals(other.blobCommitmentsHashAcc) &&
|
|
25
|
-
this.zAcc.equals(other.zAcc) &&
|
|
26
|
-
this.yAcc.equals(other.yAcc) &&
|
|
27
|
-
this.cAcc.equals(other.cAcc) &&
|
|
28
|
-
this.gammaAcc.equals(other.gammaAcc) &&
|
|
29
|
-
this.gammaPowAcc.equals(other.gammaPowAcc)
|
|
30
|
-
);
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
static fromBuffer(buffer: Buffer | BufferReader): BlobAccumulator {
|
|
34
|
-
const reader = BufferReader.asReader(buffer);
|
|
35
|
-
return new BlobAccumulator(
|
|
36
|
-
Fr.fromBuffer(reader),
|
|
37
|
-
Fr.fromBuffer(reader),
|
|
38
|
-
BLS12Fr.fromBuffer(reader),
|
|
39
|
-
BLS12Point.fromBuffer(reader),
|
|
40
|
-
Fr.fromBuffer(reader),
|
|
41
|
-
BLS12Fr.fromBuffer(reader),
|
|
42
|
-
);
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
toBuffer() {
|
|
46
|
-
return serializeToBuffer(
|
|
47
|
-
this.blobCommitmentsHashAcc,
|
|
48
|
-
this.zAcc,
|
|
49
|
-
this.yAcc,
|
|
50
|
-
this.cAcc,
|
|
51
|
-
this.gammaAcc,
|
|
52
|
-
this.gammaPowAcc,
|
|
53
|
-
);
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
toFields() {
|
|
57
|
-
return [
|
|
58
|
-
this.blobCommitmentsHashAcc,
|
|
59
|
-
this.zAcc,
|
|
60
|
-
...this.yAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
61
|
-
...this.cAcc.x.toNoirBigNum().limbs.map(Fr.fromString),
|
|
62
|
-
...this.cAcc.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
63
|
-
new Fr(this.cAcc.isInfinite),
|
|
64
|
-
this.gammaAcc,
|
|
65
|
-
...this.gammaPowAcc.toNoirBigNum().limbs.map(Fr.fromString),
|
|
66
|
-
];
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
static fromFields(fields: Fr[] | FieldReader): BlobAccumulator {
|
|
70
|
-
const reader = FieldReader.asReader(fields);
|
|
71
|
-
return new BlobAccumulator(
|
|
72
|
-
reader.readField(),
|
|
73
|
-
reader.readField(),
|
|
74
|
-
BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
|
|
75
|
-
new BLS12Point(
|
|
76
|
-
BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
|
|
77
|
-
BLS12Fq.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FQ_LIMBS).map(f => f.toString()) }),
|
|
78
|
-
reader.readBoolean(),
|
|
79
|
-
),
|
|
80
|
-
reader.readField(),
|
|
81
|
-
BLS12Fr.fromNoirBigNum({ limbs: reader.readFieldArray(BLS12_FR_LIMBS).map(f => f.toString()) }),
|
|
82
|
-
);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
|
-
|
|
4
|
-
import { inspect } from 'util';
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_accumulator.nr` for documentation.
|
|
8
|
-
*/
|
|
9
|
-
export class FinalBlobAccumulator {
|
|
10
|
-
constructor(
|
|
11
|
-
public blobCommitmentsHash: Fr,
|
|
12
|
-
public z: Fr,
|
|
13
|
-
public y: BLS12Fr,
|
|
14
|
-
public c: BLS12Point,
|
|
15
|
-
) {}
|
|
16
|
-
|
|
17
|
-
static empty(): FinalBlobAccumulator {
|
|
18
|
-
return new FinalBlobAccumulator(Fr.ZERO, Fr.ZERO, BLS12Fr.ZERO, BLS12Point.ZERO);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobAccumulator {
|
|
22
|
-
const reader = BufferReader.asReader(buffer);
|
|
23
|
-
return new FinalBlobAccumulator(
|
|
24
|
-
Fr.fromBuffer(reader),
|
|
25
|
-
Fr.fromBuffer(reader),
|
|
26
|
-
BLS12Fr.fromBuffer(reader),
|
|
27
|
-
BLS12Point.fromBuffer(reader),
|
|
28
|
-
);
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
toBuffer() {
|
|
32
|
-
return serializeToBuffer(this.blobCommitmentsHash, this.z, this.y, this.c);
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
toFields() {
|
|
36
|
-
return [
|
|
37
|
-
this.blobCommitmentsHash,
|
|
38
|
-
this.z,
|
|
39
|
-
...this.y.toNoirBigNum().limbs.map(Fr.fromString),
|
|
40
|
-
...this.c.toBN254Fields(),
|
|
41
|
-
];
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
// The below is used to send to L1 for proof verification
|
|
45
|
-
toString() {
|
|
46
|
-
// We prepend 32 bytes for the (unused) 'blobHash' slot. This is not read or required by getEpochProofPublicInputs() on L1, but
|
|
47
|
-
// is expected since we usually pass the full precompile inputs via verifyEpochRootProof() to getEpochProofPublicInputs() to ensure
|
|
48
|
-
// we use calldata rather than a slice in memory:
|
|
49
|
-
const buf = Buffer.concat([Buffer.alloc(32), this.z.toBuffer(), this.y.toBuffer(), this.c.compress()]);
|
|
50
|
-
return buf.toString('hex');
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
equals(other: FinalBlobAccumulator) {
|
|
54
|
-
return (
|
|
55
|
-
this.blobCommitmentsHash.equals(other.blobCommitmentsHash) &&
|
|
56
|
-
this.z.equals(other.z) &&
|
|
57
|
-
this.y.equals(other.y) &&
|
|
58
|
-
this.c.equals(other.c)
|
|
59
|
-
);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
// Creates a random instance. Used for testing only - will not prove/verify.
|
|
63
|
-
static random() {
|
|
64
|
-
return new FinalBlobAccumulator(Fr.random(), Fr.random(), BLS12Fr.random(), BLS12Point.random());
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
[inspect.custom]() {
|
|
68
|
-
return `FinalBlobAccumulator {
|
|
69
|
-
blobCommitmentsHash: ${inspect(this.blobCommitmentsHash)},
|
|
70
|
-
z: ${inspect(this.z)},
|
|
71
|
-
y: ${inspect(this.y)},
|
|
72
|
-
c: ${inspect(this.c)},
|
|
73
|
-
}`;
|
|
74
|
-
}
|
|
75
|
-
}
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* See `noir-projects/noir-protocol-circuits/crates/blob/src/abis/final_blob_batching_challenges.nr` for documentation.
|
|
6
|
-
*/
|
|
7
|
-
export class FinalBlobBatchingChallenges {
|
|
8
|
-
constructor(
|
|
9
|
-
public readonly z: Fr,
|
|
10
|
-
public readonly gamma: BLS12Fr,
|
|
11
|
-
) {}
|
|
12
|
-
|
|
13
|
-
equals(other: FinalBlobBatchingChallenges) {
|
|
14
|
-
return this.z.equals(other.z) && this.gamma.equals(other.gamma);
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
static empty(): FinalBlobBatchingChallenges {
|
|
18
|
-
return new FinalBlobBatchingChallenges(Fr.ZERO, BLS12Fr.ZERO);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
static fromBuffer(buffer: Buffer | BufferReader): FinalBlobBatchingChallenges {
|
|
22
|
-
const reader = BufferReader.asReader(buffer);
|
|
23
|
-
return new FinalBlobBatchingChallenges(Fr.fromBuffer(reader), reader.readObject(BLS12Fr));
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
toBuffer() {
|
|
27
|
-
return serializeToBuffer(this.z, this.gamma);
|
|
28
|
-
}
|
|
29
|
-
}
|