@aztec/blob-lib 0.76.4 → 0.77.0-testnet-ignition.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/blob.d.ts +1 -1
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob.js +125 -138
- package/dest/blob_public_inputs.d.ts +50 -0
- package/dest/blob_public_inputs.d.ts.map +1 -0
- package/dest/blob_public_inputs.js +146 -0
- package/dest/encoding.d.ts.map +1 -1
- package/dest/encoding.js +12 -23
- package/dest/errors.js +1 -2
- package/dest/index.d.ts +2 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +8 -11
- package/dest/interface.js +3 -2
- package/dest/sponge_blob.d.ts +52 -0
- package/dest/sponge_blob.d.ts.map +1 -0
- package/dest/sponge_blob.js +148 -0
- package/dest/testing.d.ts +43 -0
- package/dest/testing.d.ts.map +1 -0
- package/dest/testing.js +81 -0
- package/package.json +7 -5
- package/src/blob.ts +1 -1
- package/src/blob_public_inputs.ts +157 -0
- package/src/encoding.ts +1 -2
- package/src/index.ts +2 -1
- package/src/sponge_blob.ts +178 -0
- package/src/testing.ts +95 -0
- package/dest/mocks.d.ts +0 -20
- package/dest/mocks.d.ts.map +0 -1
- package/dest/mocks.js +0 -42
- package/src/mocks.ts +0 -48
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import { BLOBS_PER_BLOCK } from '@aztec/constants';
|
|
2
|
+
import { makeTuple } from '@aztec/foundation/array';
|
|
3
|
+
import { toBigIntBE, toBufferBE, toHex } from '@aztec/foundation/bigint-buffer';
|
|
4
|
+
import { sha256, sha256Trunc } from '@aztec/foundation/crypto';
|
|
5
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
6
|
+
import { BufferReader, FieldReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
7
|
+
import type { FieldsOf } from '@aztec/foundation/types';
|
|
8
|
+
|
|
9
|
+
import { type Blob, VERSIONED_HASH_VERSION_KZG } from './blob.js';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Public inputs required to be passed from our rollup circuits to verify a blob.
|
|
13
|
+
*/
|
|
14
|
+
export class BlobPublicInputs {
|
|
15
|
+
constructor(
|
|
16
|
+
/** Challenge point z (= H(H(tx_effects), kzgCommmitment). */
|
|
17
|
+
public z: Fr,
|
|
18
|
+
/** Evaluation y = p(z), where p() is the blob polynomial. */
|
|
19
|
+
public y: bigint,
|
|
20
|
+
/** Commitment to the blob C. */
|
|
21
|
+
public kzgCommitment: Tuple<Fr, 2>,
|
|
22
|
+
) {}
|
|
23
|
+
|
|
24
|
+
static empty(): BlobPublicInputs {
|
|
25
|
+
return new BlobPublicInputs(Fr.ZERO, 0n, [Fr.ZERO, Fr.ZERO]);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
isEmpty(): boolean {
|
|
29
|
+
return this.z.isZero() && this.y == 0n && this.kzgCommitment[0].isZero() && this.kzgCommitment[1].isZero();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
static fromBuffer(buffer: Buffer | BufferReader): BlobPublicInputs {
|
|
33
|
+
const reader = BufferReader.asReader(buffer);
|
|
34
|
+
return new BlobPublicInputs(Fr.fromBuffer(reader), toBigIntBE(reader.readBytes(32)), reader.readArray(2, Fr));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
toBuffer() {
|
|
38
|
+
return serializeToBuffer(...BlobPublicInputs.getFields(this));
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
static fromFields(fields: Fr[] | FieldReader): BlobPublicInputs {
|
|
42
|
+
const reader = FieldReader.asReader(fields);
|
|
43
|
+
// TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
|
|
44
|
+
const fromBigNum = (fieldArr: Fr[]) => {
|
|
45
|
+
return BigInt(
|
|
46
|
+
fieldArr[2].toString().concat(fieldArr[1].toString().substring(2), fieldArr[0].toString().substring(2)),
|
|
47
|
+
);
|
|
48
|
+
};
|
|
49
|
+
return new BlobPublicInputs(reader.readField(), fromBigNum(reader.readFieldArray(3)), reader.readFieldArray(2));
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// NB: y is NOT a BN254 field, it's a larger BLS field, we cannot use serialiseToFields here as it assumes bigints will fit
|
|
53
|
+
// TODO: Create a BigNum to fields conversion we can use here and in type_conversion.ts
|
|
54
|
+
toFields() {
|
|
55
|
+
const hex = toHex(this.y, true);
|
|
56
|
+
const bigNum = [
|
|
57
|
+
Fr.fromString('0x' + hex.substring(36)),
|
|
58
|
+
Fr.fromString('0x' + hex.substring(6, 36)),
|
|
59
|
+
Fr.fromString(hex.substring(0, 6)),
|
|
60
|
+
];
|
|
61
|
+
return [this.z, ...bigNum, ...this.kzgCommitment];
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
static getFields(fields: FieldsOf<BlobPublicInputs>) {
|
|
65
|
+
return [fields.z, fields.y, fields.kzgCommitment] as const;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
static fromBlob(input: Blob): BlobPublicInputs {
|
|
69
|
+
return new BlobPublicInputs(input.challengeZ, toBigIntBE(input.evaluationY), input.commitmentToFields());
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
getBlobHash(): Buffer {
|
|
73
|
+
const hash = sha256(this.commitmentToBuffer());
|
|
74
|
+
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
|
75
|
+
return hash;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Performs the reverse conversion of blob.commitmentToFields()
|
|
79
|
+
// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
|
|
80
|
+
commitmentToBuffer(): Buffer {
|
|
81
|
+
return Buffer.concat([
|
|
82
|
+
this.kzgCommitment[0].toBuffer().subarray(1),
|
|
83
|
+
this.kzgCommitment[1].toBuffer().subarray(-17),
|
|
84
|
+
]);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
equals(other: BlobPublicInputs) {
|
|
88
|
+
return (
|
|
89
|
+
this.z.equals(other.z) &&
|
|
90
|
+
this.y == other.y &&
|
|
91
|
+
this.kzgCommitment[0].equals(other.kzgCommitment[0]) &&
|
|
92
|
+
this.kzgCommitment[1].equals(other.kzgCommitment[1])
|
|
93
|
+
);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// NB: it is much cleaner throughout the protocol circuits to define this struct rather than use a nested array.
|
|
98
|
+
// Once we accumulate blob inputs, it should be removed, and we just use BlobPublicInputs::accumulate everywhere.
|
|
99
|
+
export class BlockBlobPublicInputs {
|
|
100
|
+
constructor(public inner: Tuple<BlobPublicInputs, typeof BLOBS_PER_BLOCK>) {}
|
|
101
|
+
|
|
102
|
+
static empty(): BlockBlobPublicInputs {
|
|
103
|
+
return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty));
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
static fromBuffer(buffer: Buffer | BufferReader): BlockBlobPublicInputs {
|
|
107
|
+
const reader = BufferReader.asReader(buffer);
|
|
108
|
+
return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
toBuffer() {
|
|
112
|
+
return serializeToBuffer(...BlockBlobPublicInputs.getFields(this));
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
static fromFields(fields: Fr[] | FieldReader): BlockBlobPublicInputs {
|
|
116
|
+
const reader = FieldReader.asReader(fields);
|
|
117
|
+
return new BlockBlobPublicInputs(reader.readArray(BLOBS_PER_BLOCK, BlobPublicInputs));
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
toFields() {
|
|
121
|
+
return this.inner.map(i => i.toFields()).flat();
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
static getFields(fields: FieldsOf<BlockBlobPublicInputs>) {
|
|
125
|
+
return [fields.inner] as const;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
static fromBlobs(inputs: Blob[]): BlockBlobPublicInputs {
|
|
129
|
+
const inner = makeTuple(BLOBS_PER_BLOCK, BlobPublicInputs.empty);
|
|
130
|
+
if (inputs.length > BLOBS_PER_BLOCK) {
|
|
131
|
+
throw new Error(`Can only fit ${BLOBS_PER_BLOCK} in one BlockBlobPublicInputs instance (given ${inputs.length})`);
|
|
132
|
+
}
|
|
133
|
+
inputs.forEach((input, i) => {
|
|
134
|
+
inner[i] = BlobPublicInputs.fromBlob(input);
|
|
135
|
+
});
|
|
136
|
+
return new BlockBlobPublicInputs(inner);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
getBlobsHash() {
|
|
140
|
+
const blobHashes = this.inner.map(item => (item.isEmpty() ? Buffer.alloc(0) : item.getBlobHash()));
|
|
141
|
+
return sha256Trunc(serializeToBuffer(blobHashes));
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// The below is used to send to L1 for proof verification
|
|
145
|
+
toString() {
|
|
146
|
+
const nonEmptyBlobs = this.inner.filter(item => !item.isEmpty());
|
|
147
|
+
// Write the number of blobs for L1 to verify
|
|
148
|
+
let buf = Buffer.alloc(1);
|
|
149
|
+
buf.writeUInt8(nonEmptyBlobs.length);
|
|
150
|
+
// Using standard toBuffer() does not correctly encode the commitment
|
|
151
|
+
// On L1, it's a 48 byte number, which we convert to 2 fields for use in the circuits
|
|
152
|
+
nonEmptyBlobs.forEach(blob => {
|
|
153
|
+
buf = Buffer.concat([buf, blob.z.toBuffer(), toBufferBE(blob.y, 32), blob.commitmentToBuffer()]);
|
|
154
|
+
});
|
|
155
|
+
return buf.toString('hex');
|
|
156
|
+
}
|
|
157
|
+
}
|
package/src/encoding.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
|
|
|
3
3
|
|
|
4
4
|
import type { Blob as BlobBuffer } from 'c-kzg';
|
|
5
5
|
|
|
6
|
-
// Note duplicated from
|
|
6
|
+
// Note duplicated from stdlib !
|
|
7
7
|
// This will appear as 0x74785f7374617274 in logs
|
|
8
8
|
export const TX_START_PREFIX = 8392562855083340404n;
|
|
9
9
|
// These are helper constants to decode tx effects from blob encoded fields
|
|
@@ -85,7 +85,6 @@ export function getLengthFromFirstField(firstField: Fr): number {
|
|
|
85
85
|
return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
|
|
86
86
|
}
|
|
87
87
|
|
|
88
|
-
// NOTE: duplicated from circuit-types tx effect!
|
|
89
88
|
/**
|
|
90
89
|
* Determines whether a field is the first field of a tx effect
|
|
91
90
|
*/
|
package/src/index.ts
CHANGED
|
@@ -4,10 +4,11 @@ import cKzg from 'c-kzg';
|
|
|
4
4
|
const { loadTrustedSetup } = cKzg;
|
|
5
5
|
|
|
6
6
|
export * from './blob.js';
|
|
7
|
-
export * from './mocks.js';
|
|
8
7
|
export * from './encoding.js';
|
|
9
8
|
export * from './interface.js';
|
|
10
9
|
export * from './errors.js';
|
|
10
|
+
export * from './blob_public_inputs.js';
|
|
11
|
+
export * from './sponge_blob.js';
|
|
11
12
|
|
|
12
13
|
try {
|
|
13
14
|
loadTrustedSetup();
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
|
|
2
|
+
import { poseidon2Permutation } from '@aztec/foundation/crypto';
|
|
3
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
4
|
+
import {
|
|
5
|
+
BufferReader,
|
|
6
|
+
FieldReader,
|
|
7
|
+
type Tuple,
|
|
8
|
+
serializeToBuffer,
|
|
9
|
+
serializeToFields,
|
|
10
|
+
} from '@aztec/foundation/serialize';
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* A Poseidon2 sponge used to accumulate data that will be added to a blob.
|
|
14
|
+
* See noir-projects/noir-protocol-circuits/crates/types/src/abis/sponge_blob.nr.
|
|
15
|
+
*/
|
|
16
|
+
export class SpongeBlob {
|
|
17
|
+
constructor(
|
|
18
|
+
/** Sponge with absorbed tx effects that will go into a blob. */
|
|
19
|
+
public readonly sponge: Poseidon2Sponge,
|
|
20
|
+
/** Number of effects absorbed so far. */
|
|
21
|
+
public fields: number,
|
|
22
|
+
/** Number of effects that will be absorbed. */
|
|
23
|
+
public readonly expectedFields: number,
|
|
24
|
+
) {}
|
|
25
|
+
|
|
26
|
+
static fromBuffer(buffer: Buffer | BufferReader): SpongeBlob {
|
|
27
|
+
const reader = BufferReader.asReader(buffer);
|
|
28
|
+
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber(), reader.readNumber());
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
toBuffer() {
|
|
32
|
+
return serializeToBuffer(this.sponge, this.fields, this.expectedFields);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
static getFields(fields: FieldsOf<SpongeBlob>) {
|
|
36
|
+
return [fields.sponge, fields.fields, fields.expectedFields];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
toFields(): Fr[] {
|
|
40
|
+
return serializeToFields(...SpongeBlob.getFields(this));
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
static fromFields(fields: Fr[] | FieldReader): SpongeBlob {
|
|
44
|
+
const reader = FieldReader.asReader(fields);
|
|
45
|
+
return new SpongeBlob(
|
|
46
|
+
reader.readObject(Poseidon2Sponge),
|
|
47
|
+
reader.readField().toNumber(),
|
|
48
|
+
reader.readField().toNumber(),
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
clone() {
|
|
53
|
+
return SpongeBlob.fromBuffer(this.toBuffer());
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async absorb(fields: Fr[]) {
|
|
57
|
+
if (this.fields + fields.length > this.expectedFields) {
|
|
58
|
+
throw new Error(
|
|
59
|
+
`Attempted to fill spongeblob with ${this.fields + fields.length}, but it has a max of ${this.expectedFields}`,
|
|
60
|
+
);
|
|
61
|
+
}
|
|
62
|
+
await this.sponge.absorb(fields);
|
|
63
|
+
this.fields += fields.length;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async squeeze(): Promise<Fr> {
|
|
67
|
+
// If the blob sponge is not 'full', we append 1 to match Poseidon2::hash_internal()
|
|
68
|
+
// NB: There is currently no use case in which we don't 'fill' a blob sponge, but adding for completeness
|
|
69
|
+
if (this.fields != this.expectedFields) {
|
|
70
|
+
await this.sponge.absorb([Fr.ONE]);
|
|
71
|
+
}
|
|
72
|
+
return this.sponge.squeeze();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
static empty(): SpongeBlob {
|
|
76
|
+
return new SpongeBlob(Poseidon2Sponge.empty(), 0, 0);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
static init(expectedFields: number): SpongeBlob {
|
|
80
|
+
return new SpongeBlob(Poseidon2Sponge.init(expectedFields), 0, expectedFields);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// This is just noir's stdlib version of the poseidon2 sponge. We use it for a blob-specific implmentation of the hasher.
|
|
85
|
+
export class Poseidon2Sponge {
|
|
86
|
+
constructor(
|
|
87
|
+
public cache: Tuple<Fr, 3>,
|
|
88
|
+
public state: Tuple<Fr, 4>,
|
|
89
|
+
public cacheSize: number,
|
|
90
|
+
public squeezeMode: boolean,
|
|
91
|
+
) {}
|
|
92
|
+
|
|
93
|
+
static fromBuffer(buffer: Buffer | BufferReader): Poseidon2Sponge {
|
|
94
|
+
const reader = BufferReader.asReader(buffer);
|
|
95
|
+
return new Poseidon2Sponge(
|
|
96
|
+
reader.readArray(3, Fr),
|
|
97
|
+
reader.readArray(4, Fr),
|
|
98
|
+
reader.readNumber(),
|
|
99
|
+
reader.readBoolean(),
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
toBuffer() {
|
|
104
|
+
return serializeToBuffer(this.cache, this.state, this.cacheSize, this.squeezeMode);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
static getFields(fields: FieldsOf<Poseidon2Sponge>) {
|
|
108
|
+
return [fields.cache, fields.state, fields.cacheSize, fields.squeezeMode];
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
toFields(): Fr[] {
|
|
112
|
+
return serializeToFields(...Poseidon2Sponge.getFields(this));
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
static fromFields(fields: Fr[] | FieldReader): Poseidon2Sponge {
|
|
116
|
+
const reader = FieldReader.asReader(fields);
|
|
117
|
+
return new Poseidon2Sponge(
|
|
118
|
+
reader.readFieldArray(3),
|
|
119
|
+
reader.readFieldArray(4),
|
|
120
|
+
reader.readField().toNumber(),
|
|
121
|
+
reader.readBoolean(),
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
static empty(): Poseidon2Sponge {
|
|
126
|
+
return new Poseidon2Sponge(
|
|
127
|
+
makeTuple(3, () => Fr.ZERO),
|
|
128
|
+
makeTuple(4, () => Fr.ZERO),
|
|
129
|
+
0,
|
|
130
|
+
false,
|
|
131
|
+
);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
static init(expectedFields: number): Poseidon2Sponge {
|
|
135
|
+
const iv = new Fr(expectedFields).mul(new Fr(BigInt('18446744073709551616')));
|
|
136
|
+
const sponge = Poseidon2Sponge.empty();
|
|
137
|
+
sponge.state[3] = iv;
|
|
138
|
+
return sponge;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Note: there isn't currently an impl in ts that allows for a custom aborption via an
|
|
142
|
+
// existing sponge.
|
|
143
|
+
// A custom blob-based impl of noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr
|
|
144
|
+
async performDuplex() {
|
|
145
|
+
for (let i = 0; i < this.cache.length; i++) {
|
|
146
|
+
if (i < this.cacheSize) {
|
|
147
|
+
this.state[i] = this.state[i].add(this.cache[i]);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
const perm = await poseidon2Permutation(this.state);
|
|
151
|
+
// ts doesn't understand that the above always gives 4
|
|
152
|
+
this.state = [perm[0], perm[1], perm[2], perm[3]];
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
async absorb(fields: Fr[]) {
|
|
156
|
+
if (this.squeezeMode) {
|
|
157
|
+
throw new Error(`Poseidon sponge is not able to absorb more inputs.`);
|
|
158
|
+
}
|
|
159
|
+
for (const field of fields) {
|
|
160
|
+
if (this.cacheSize == this.cache.length) {
|
|
161
|
+
await this.performDuplex();
|
|
162
|
+
this.cache[0] = field;
|
|
163
|
+
this.cacheSize = 1;
|
|
164
|
+
} else {
|
|
165
|
+
this.cache[this.cacheSize++] = field;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async squeeze(): Promise<Fr> {
|
|
171
|
+
if (this.squeezeMode) {
|
|
172
|
+
throw new Error(`Poseidon sponge has already been squeezed.`);
|
|
173
|
+
}
|
|
174
|
+
await this.performDuplex();
|
|
175
|
+
this.squeezeMode = true;
|
|
176
|
+
return this.state[0];
|
|
177
|
+
}
|
|
178
|
+
}
|
package/src/testing.ts
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { BLOBS_PER_BLOCK } from '@aztec/constants';
|
|
2
|
+
import { makeTuple } from '@aztec/foundation/array';
|
|
3
|
+
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
4
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
5
|
+
|
|
6
|
+
import { Blob } from './blob.js';
|
|
7
|
+
import { BlobPublicInputs, BlockBlobPublicInputs } from './blob_public_inputs.js';
|
|
8
|
+
import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
|
|
9
|
+
import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Makes arbitrary poseidon sponge for blob inputs.
|
|
13
|
+
* Note: will not verify inside the circuit.
|
|
14
|
+
* @param seed - The seed to use for generating the sponge.
|
|
15
|
+
* @returns A sponge blob instance.
|
|
16
|
+
*/
|
|
17
|
+
export function makeSpongeBlob(seed = 1): SpongeBlob {
|
|
18
|
+
return new SpongeBlob(
|
|
19
|
+
new Poseidon2Sponge(
|
|
20
|
+
makeTuple(3, i => new Fr(i)),
|
|
21
|
+
makeTuple(4, i => new Fr(i)),
|
|
22
|
+
1,
|
|
23
|
+
false,
|
|
24
|
+
),
|
|
25
|
+
seed,
|
|
26
|
+
seed + 1,
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Makes arbitrary blob public inputs.
|
|
32
|
+
* Note: will not verify inside the circuit.
|
|
33
|
+
* @param seed - The seed to use for generating the blob inputs.
|
|
34
|
+
* @returns A blob public inputs instance.
|
|
35
|
+
*/
|
|
36
|
+
export function makeBlobPublicInputs(seed = 1): BlobPublicInputs {
|
|
37
|
+
return new BlobPublicInputs(
|
|
38
|
+
new Fr(seed),
|
|
39
|
+
BigInt(seed + 1),
|
|
40
|
+
makeTuple(2, i => new Fr(i)),
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Makes arbitrary block blob public inputs.
|
|
46
|
+
* Note: will not verify inside the circuit.
|
|
47
|
+
* @param seed - The seed to use for generating the blob inputs.
|
|
48
|
+
* @returns A block blob public inputs instance.
|
|
49
|
+
*/
|
|
50
|
+
export function makeBlockBlobPublicInputs(seed = 1): BlockBlobPublicInputs {
|
|
51
|
+
return new BlockBlobPublicInputs(makeTuple(BLOBS_PER_BLOCK, () => makeBlobPublicInputs(seed)));
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// TODO: copied form stdlib tx effect
|
|
55
|
+
function encodeFirstField(length: number): Fr {
|
|
56
|
+
const lengthBuf = Buffer.alloc(2);
|
|
57
|
+
lengthBuf.writeUInt16BE(length, 0);
|
|
58
|
+
return new Fr(
|
|
59
|
+
Buffer.concat([
|
|
60
|
+
toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
|
|
61
|
+
Buffer.alloc(1),
|
|
62
|
+
lengthBuf,
|
|
63
|
+
Buffer.alloc(1),
|
|
64
|
+
Buffer.from([1]),
|
|
65
|
+
Buffer.alloc(1),
|
|
66
|
+
Buffer.alloc(1),
|
|
67
|
+
]),
|
|
68
|
+
);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Make an encoded blob with the given length
|
|
73
|
+
*
|
|
74
|
+
* This will deserialise correctly in the archiver
|
|
75
|
+
* @param length
|
|
76
|
+
* @returns
|
|
77
|
+
*/
|
|
78
|
+
export function makeEncodedBlob(length: number): Promise<Blob> {
|
|
79
|
+
return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Make an unencoded blob with the given length
|
|
84
|
+
*
|
|
85
|
+
* This will fail deserialisation in the archiver
|
|
86
|
+
* @param length
|
|
87
|
+
* @returns
|
|
88
|
+
*/
|
|
89
|
+
export function makeUnencodedBlob(length: number): Promise<Blob> {
|
|
90
|
+
return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function makeEncodedBlobFields(fields: Fr[]): Promise<Blob> {
|
|
94
|
+
return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
|
|
95
|
+
}
|
package/dest/mocks.d.ts
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
-
import { Blob } from './blob.js';
|
|
3
|
-
/**
|
|
4
|
-
* Make an encoded blob with the given length
|
|
5
|
-
*
|
|
6
|
-
* This will deserialise correctly in the archiver
|
|
7
|
-
* @param length
|
|
8
|
-
* @returns
|
|
9
|
-
*/
|
|
10
|
-
export declare function makeEncodedBlob(length: number): Promise<Blob>;
|
|
11
|
-
/**
|
|
12
|
-
* Make an unencoded blob with the given length
|
|
13
|
-
*
|
|
14
|
-
* This will fail deserialisation in the archiver
|
|
15
|
-
* @param length
|
|
16
|
-
* @returns
|
|
17
|
-
*/
|
|
18
|
-
export declare function makeUnencodedBlob(length: number): Promise<Blob>;
|
|
19
|
-
export declare function makeEncodedBlobFields(fields: Fr[]): Promise<Blob>;
|
|
20
|
-
//# sourceMappingURL=mocks.d.ts.map
|
package/dest/mocks.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"mocks.d.ts","sourceRoot":"","sources":["../src/mocks.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAE9C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAoBjC;;;;;;GAMG;AACH,wBAAgB,eAAe,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE7D;AAED;;;;;;GAMG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE/D;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAEjE"}
|
package/dest/mocks.js
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
2
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { Blob } from './blob.js';
|
|
4
|
-
import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
|
|
5
|
-
// TODO: copied form circuit-types tx effect
|
|
6
|
-
function encodeFirstField(length) {
|
|
7
|
-
const lengthBuf = Buffer.alloc(2);
|
|
8
|
-
lengthBuf.writeUInt16BE(length, 0);
|
|
9
|
-
return new Fr(Buffer.concat([
|
|
10
|
-
toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
|
|
11
|
-
Buffer.alloc(1),
|
|
12
|
-
lengthBuf,
|
|
13
|
-
Buffer.alloc(1),
|
|
14
|
-
Buffer.from([1]),
|
|
15
|
-
Buffer.alloc(1),
|
|
16
|
-
Buffer.alloc(1),
|
|
17
|
-
]));
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
* Make an encoded blob with the given length
|
|
21
|
-
*
|
|
22
|
-
* This will deserialise correctly in the archiver
|
|
23
|
-
* @param length
|
|
24
|
-
* @returns
|
|
25
|
-
*/
|
|
26
|
-
export function makeEncodedBlob(length) {
|
|
27
|
-
return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
|
|
28
|
-
}
|
|
29
|
-
/**
|
|
30
|
-
* Make an unencoded blob with the given length
|
|
31
|
-
*
|
|
32
|
-
* This will fail deserialisation in the archiver
|
|
33
|
-
* @param length
|
|
34
|
-
* @returns
|
|
35
|
-
*/
|
|
36
|
-
export function makeUnencodedBlob(length) {
|
|
37
|
-
return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
|
|
38
|
-
}
|
|
39
|
-
export function makeEncodedBlobFields(fields) {
|
|
40
|
-
return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
|
|
41
|
-
}
|
|
42
|
-
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibW9ja3MuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvbW9ja3MudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFVBQVUsRUFBRSxNQUFNLGlDQUFpQyxDQUFDO0FBQzdELE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQUU5QyxPQUFPLEVBQUUsSUFBSSxFQUFFLE1BQU0sV0FBVyxDQUFDO0FBQ2pDLE9BQU8sRUFBRSxlQUFlLEVBQUUsNEJBQTRCLEVBQUUsTUFBTSxlQUFlLENBQUM7QUFFOUUsNENBQTRDO0FBQzVDLFNBQVMsZ0JBQWdCLENBQUMsTUFBYztJQUN0QyxNQUFNLFNBQVMsR0FBRyxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBQ2xDLFNBQVMsQ0FBQyxhQUFhLENBQUMsTUFBTSxFQUFFLENBQUMsQ0FBQyxDQUFDO0lBQ25DLE9BQU8sSUFBSSxFQUFFLENBQ1gsTUFBTSxDQUFDLE1BQU0sQ0FBQztRQUNaLFVBQVUsQ0FBQyxlQUFlLEVBQUUsNEJBQTRCLENBQUM7UUFDekQsTUFBTSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUM7UUFDZixTQUFTO1FBQ1QsTUFBTSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUM7UUFDZixNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUM7UUFDaEIsTUFBTSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUM7UUFDZixNQUFNLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQztLQUNoQixDQUFDLENBQ0gsQ0FBQztBQUNKLENBQUM7QUFFRDs7Ozs7O0dBTUc7QUFDSCxNQUFNLFVBQVUsZUFBZSxDQUFDLE1BQWM7SUFDNUMsT0FBTyxJQUFJLENBQUMsVUFBVSxDQUFDLENBQUMsZ0JBQWdCLENBQUMsTUFBTSxHQUFHLENBQUMsQ0FBQyxFQUFFLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxFQUFFLE1BQU0sRUFBRSxNQUFNLEVBQUUsRUFBRSxHQUFHLEVBQUUsQ0FBQyxFQUFFLENBQUMsTUFBTSxFQUFFLENBQUMsQ0FBQyxDQUFDLENBQUM7QUFDL0csQ0FBQztBQUVEOzs7Ozs7R0FNRztBQUNILE1BQU0sVUFBVSxpQkFBaUIsQ0FBQyxNQUFjO0lBQzlDLE9BQU8sSUFBSSxDQUFDLFVBQVUsQ0FBQyxDQUFDLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxFQUFFLE1BQU0sRUFBRSxNQUFNLEVBQUUsRUFBRSxHQUFHLEVBQUUsQ0FBQyxFQUFFLENBQUMsTUFBTSxFQUFFLENBQUMsQ0FBQyxDQUFDLENBQUM7QUFDakYsQ0FBQztBQUVELE1BQU0sVUFBVSxxQkFBcUIsQ0FBQyxNQUFZO0lBQ2hELE9BQU8sSUFBSSxDQUFDLFVBQVUsQ0FBQyxDQUFDLGdCQUFnQixDQUFDLE1BQU0sQ0FBQyxNQUFNLEdBQUcsQ0FBQyxDQUFDLEVBQUUsR0FBRyxNQUFNLENBQUMsQ0FBQyxDQUFDO0FBQzNFLENBQUMifQ==
|
package/src/mocks.ts
DELETED
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
2
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
|
|
4
|
-
import { Blob } from './blob.js';
|
|
5
|
-
import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
|
|
6
|
-
|
|
7
|
-
// TODO: copied form circuit-types tx effect
|
|
8
|
-
function encodeFirstField(length: number): Fr {
|
|
9
|
-
const lengthBuf = Buffer.alloc(2);
|
|
10
|
-
lengthBuf.writeUInt16BE(length, 0);
|
|
11
|
-
return new Fr(
|
|
12
|
-
Buffer.concat([
|
|
13
|
-
toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
|
|
14
|
-
Buffer.alloc(1),
|
|
15
|
-
lengthBuf,
|
|
16
|
-
Buffer.alloc(1),
|
|
17
|
-
Buffer.from([1]),
|
|
18
|
-
Buffer.alloc(1),
|
|
19
|
-
Buffer.alloc(1),
|
|
20
|
-
]),
|
|
21
|
-
);
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
/**
|
|
25
|
-
* Make an encoded blob with the given length
|
|
26
|
-
*
|
|
27
|
-
* This will deserialise correctly in the archiver
|
|
28
|
-
* @param length
|
|
29
|
-
* @returns
|
|
30
|
-
*/
|
|
31
|
-
export function makeEncodedBlob(length: number): Promise<Blob> {
|
|
32
|
-
return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
/**
|
|
36
|
-
* Make an unencoded blob with the given length
|
|
37
|
-
*
|
|
38
|
-
* This will fail deserialisation in the archiver
|
|
39
|
-
* @param length
|
|
40
|
-
* @returns
|
|
41
|
-
*/
|
|
42
|
-
export function makeUnencodedBlob(length: number): Promise<Blob> {
|
|
43
|
-
return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
export function makeEncodedBlobFields(fields: Fr[]): Promise<Blob> {
|
|
47
|
-
return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
|
|
48
|
-
}
|