@aztec/stdlib 3.0.0-nightly.20250925 → 3.0.0-nightly.20250926
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/avm/avm_accumulated_data.d.ts +1 -0
- package/dest/avm/avm_accumulated_data.d.ts.map +1 -1
- package/dest/avm/avm_accumulated_data.js +4 -0
- package/dest/avm/index.d.ts +0 -1
- package/dest/avm/index.d.ts.map +1 -1
- package/dest/avm/index.js +0 -1
- package/dest/file-store/factory.d.ts.map +1 -1
- package/dest/file-store/factory.js +18 -0
- package/dest/file-store/s3.d.ts +25 -0
- package/dest/file-store/s3.d.ts.map +1 -0
- package/dest/file-store/s3.js +187 -0
- package/dest/interfaces/aztec-node-admin.d.ts +6 -0
- package/dest/interfaces/aztec-node-admin.d.ts.map +1 -1
- package/dest/interfaces/slasher.d.ts +4 -0
- package/dest/interfaces/slasher.d.ts.map +1 -1
- package/dest/interfaces/slasher.js +1 -0
- package/dest/interfaces/validator.d.ts +6 -1
- package/dest/interfaces/validator.d.ts.map +1 -1
- package/dest/interfaces/validator.js +2 -1
- package/dest/rollup/avm_proof_data.d.ts +2 -12
- package/dest/rollup/avm_proof_data.d.ts.map +1 -1
- package/dest/rollup/avm_proof_data.js +0 -24
- package/dest/rollup/base_rollup_hints.d.ts +15 -16
- package/dest/rollup/base_rollup_hints.d.ts.map +1 -1
- package/dest/rollup/base_rollup_hints.js +24 -25
- package/dest/rollup/block_root_rollup_private_inputs.d.ts +13 -13
- package/dest/rollup/block_root_rollup_private_inputs.d.ts.map +1 -1
- package/dest/rollup/block_root_rollup_private_inputs.js +20 -20
- package/dest/rollup/index.d.ts +1 -1
- package/dest/rollup/index.d.ts.map +1 -1
- package/dest/rollup/index.js +1 -1
- package/dest/rollup/public_tx_base_rollup_private_inputs.d.ts +1 -1
- package/dest/rollup/public_tx_base_rollup_private_inputs.d.ts.map +1 -1
- package/dest/rollup/public_tx_base_rollup_private_inputs.js +2 -2
- package/dest/rollup/{state_diff_hints.d.ts → tree_snapshot_diff_hints.d.ts} +22 -38
- package/dest/rollup/tree_snapshot_diff_hints.d.ts.map +1 -0
- package/dest/rollup/tree_snapshot_diff_hints.js +75 -0
- package/dest/rollup/tx_rollup_public_inputs.d.ts +4 -4
- package/dest/rollup/tx_rollup_public_inputs.d.ts.map +1 -1
- package/dest/rollup/tx_rollup_public_inputs.js +7 -7
- package/dest/tests/factories.d.ts +4 -4
- package/dest/tests/factories.d.ts.map +1 -1
- package/dest/tests/factories.js +20 -25
- package/package.json +9 -8
- package/src/avm/avm_accumulated_data.ts +10 -0
- package/src/avm/index.ts +0 -1
- package/src/file-store/factory.ts +15 -0
- package/src/file-store/s3.ts +186 -0
- package/src/interfaces/slasher.ts +2 -0
- package/src/interfaces/validator.ts +5 -1
- package/src/rollup/avm_proof_data.ts +2 -31
- package/src/rollup/base_rollup_hints.ts +20 -21
- package/src/rollup/block_root_rollup_private_inputs.ts +14 -14
- package/src/rollup/index.ts +1 -1
- package/src/rollup/public_tx_base_rollup_private_inputs.ts +3 -2
- package/src/rollup/{state_diff_hints.ts → tree_snapshot_diff_hints.ts} +26 -41
- package/src/rollup/tx_rollup_public_inputs.ts +4 -4
- package/src/tests/factories.ts +25 -37
- package/dest/avm/public_data_hint.d.ts +0 -16
- package/dest/avm/public_data_hint.d.ts.map +0 -1
- package/dest/avm/public_data_hint.js +0 -27
- package/dest/rollup/state_diff_hints.d.ts.map +0 -1
- package/dest/rollup/state_diff_hints.js +0 -85
- package/src/avm/public_data_hint.ts +0 -38
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
GetObjectCommand,
|
|
5
|
+
type GetObjectCommandOutput,
|
|
6
|
+
HeadObjectCommand,
|
|
7
|
+
PutObjectCommand,
|
|
8
|
+
S3Client,
|
|
9
|
+
} from '@aws-sdk/client-s3';
|
|
10
|
+
import { createReadStream, createWriteStream } from 'fs';
|
|
11
|
+
import { mkdir } from 'fs/promises';
|
|
12
|
+
import { dirname, join } from 'path';
|
|
13
|
+
import { Readable } from 'stream';
|
|
14
|
+
import { finished } from 'stream/promises';
|
|
15
|
+
import { createGzip } from 'zlib';
|
|
16
|
+
|
|
17
|
+
import type { FileStore, FileStoreSaveOptions } from './interface.js';
|
|
18
|
+
|
|
19
|
+
function normalizeBasePath(path: string): string {
|
|
20
|
+
return path?.replace(/^\/+|\/+$/g, '') ?? '';
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export class S3FileStore implements FileStore {
|
|
24
|
+
private readonly s3: S3Client;
|
|
25
|
+
private readonly region: string;
|
|
26
|
+
private readonly endpoint?: string;
|
|
27
|
+
private readonly publicBaseUrl?: string;
|
|
28
|
+
|
|
29
|
+
constructor(
|
|
30
|
+
private readonly bucketName: string,
|
|
31
|
+
private readonly basePath: string,
|
|
32
|
+
opts: { endpoint?: string; publicBaseUrl?: string },
|
|
33
|
+
private readonly log: Logger = createLogger('stdlib:s3-file-store'),
|
|
34
|
+
) {
|
|
35
|
+
this.endpoint = opts.endpoint;
|
|
36
|
+
this.region = this.endpoint ? 'auto' : (process.env.AWS_REGION ?? process.env.AWS_DEFAULT_REGION ?? 'us-east-1');
|
|
37
|
+
this.publicBaseUrl = opts.publicBaseUrl;
|
|
38
|
+
|
|
39
|
+
const clientOptions: any = {};
|
|
40
|
+
if (this.endpoint) {
|
|
41
|
+
clientOptions.region = 'auto';
|
|
42
|
+
clientOptions.endpoint = this.endpoint;
|
|
43
|
+
clientOptions.forcePathStyle = true;
|
|
44
|
+
} else {
|
|
45
|
+
clientOptions.region = this.region;
|
|
46
|
+
}
|
|
47
|
+
this.s3 = new S3Client(clientOptions);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
public async save(path: string, data: Buffer, opts: FileStoreSaveOptions = {}): Promise<string> {
|
|
51
|
+
const key = this.getFullPath(path);
|
|
52
|
+
const shouldCompress = !opts.compress;
|
|
53
|
+
const body = shouldCompress ? (await import('zlib')).gzipSync(data) : data;
|
|
54
|
+
const put = new PutObjectCommand({
|
|
55
|
+
Bucket: this.bucketName,
|
|
56
|
+
Key: key,
|
|
57
|
+
Body: body,
|
|
58
|
+
ContentEncoding: shouldCompress ? 'gzip' : undefined,
|
|
59
|
+
CacheControl: opts.metadata?.['Cache-control'],
|
|
60
|
+
Metadata: this.extractUserMetadata(opts.metadata),
|
|
61
|
+
});
|
|
62
|
+
await this.s3.send(put);
|
|
63
|
+
return this.buildReturnedUrl(key, !!opts.public);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
public async upload(destPath: string, srcPath: string, opts: FileStoreSaveOptions = {}): Promise<string> {
|
|
67
|
+
const key = this.getFullPath(destPath);
|
|
68
|
+
const shouldCompress = opts.compress !== false; // default true like GCS impl
|
|
69
|
+
|
|
70
|
+
await mkdir(dirname(srcPath), { recursive: true }).catch(() => undefined);
|
|
71
|
+
|
|
72
|
+
const source = createReadStream(srcPath);
|
|
73
|
+
const bodyStream = shouldCompress ? source.pipe(createGzip()) : source;
|
|
74
|
+
const put = new PutObjectCommand({
|
|
75
|
+
Bucket: this.bucketName,
|
|
76
|
+
Key: key,
|
|
77
|
+
Body: bodyStream as any,
|
|
78
|
+
ContentEncoding: shouldCompress ? 'gzip' : undefined,
|
|
79
|
+
CacheControl: opts.metadata?.['Cache-control'],
|
|
80
|
+
Metadata: this.extractUserMetadata(opts.metadata),
|
|
81
|
+
});
|
|
82
|
+
await this.s3.send(put);
|
|
83
|
+
return this.buildReturnedUrl(key, !!opts.public);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
public async read(pathOrUrlStr: string): Promise<Buffer> {
|
|
87
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
88
|
+
const out: GetObjectCommandOutput = await this.s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
89
|
+
const stream = out.Body as Readable;
|
|
90
|
+
const chunks: Buffer[] = [];
|
|
91
|
+
for await (const chunk of stream) {
|
|
92
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
93
|
+
}
|
|
94
|
+
return Buffer.concat(chunks);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
public async download(pathOrUrlStr: string, destPath: string): Promise<void> {
|
|
98
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
99
|
+
const out: GetObjectCommandOutput = await this.s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
100
|
+
await mkdir(dirname(destPath), { recursive: true });
|
|
101
|
+
const write = createWriteStream(destPath);
|
|
102
|
+
await finished((out.Body as Readable).pipe(write));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
public async exists(pathOrUrlStr: string): Promise<boolean> {
|
|
106
|
+
try {
|
|
107
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
108
|
+
await this.s3.send(new HeadObjectCommand({ Bucket: bucket, Key: key }));
|
|
109
|
+
return true;
|
|
110
|
+
} catch (err: any) {
|
|
111
|
+
const code = err?.$metadata?.httpStatusCode ?? err?.name ?? err?.Code;
|
|
112
|
+
if (code === 404 || code === 'NotFound' || code === 'NoSuchKey') {
|
|
113
|
+
return false;
|
|
114
|
+
}
|
|
115
|
+
this.log.warn(`Error checking existence for ${pathOrUrlStr}: ${err?.message ?? String(err)}`);
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
private extractUserMetadata(meta?: Record<string, string>): Record<string, string> | undefined {
|
|
121
|
+
if (!meta) {
|
|
122
|
+
return undefined;
|
|
123
|
+
}
|
|
124
|
+
const { ['Cache-control']: _ignored, ...rest } = meta;
|
|
125
|
+
return Object.keys(rest).length ? rest : undefined;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private buildReturnedUrl(key: string, makePublic: boolean): string {
|
|
129
|
+
if (!makePublic) {
|
|
130
|
+
return `s3://${this.bucketName}/${key}`;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (this.publicBaseUrl) {
|
|
134
|
+
const base = this.publicBaseUrl.replace(/\/$/, '');
|
|
135
|
+
// key already includes basePath via getFullPath, so do not prefix basePath again
|
|
136
|
+
const full = key.replace(/^\/+/, '');
|
|
137
|
+
return `${base}/${full}`;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Try to synthesize a URL from endpoint if available (works for public R2 buckets)
|
|
141
|
+
if (this.endpoint) {
|
|
142
|
+
try {
|
|
143
|
+
const url = new URL(this.endpoint);
|
|
144
|
+
return `https://${this.bucketName}.${url.host}/${key}`;
|
|
145
|
+
} catch {
|
|
146
|
+
// fallthrough
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Fallback to AWS style URL if region looks valid
|
|
151
|
+
return `https://${this.bucketName}.s3.${this.region}.amazonaws.com/${key}`;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
private getBucketAndKey(pathOrUrlStr: string): { bucket: string; key: string } {
|
|
155
|
+
if (URL.canParse(pathOrUrlStr)) {
|
|
156
|
+
const url = new URL(pathOrUrlStr);
|
|
157
|
+
if (url.protocol === 's3:') {
|
|
158
|
+
return { bucket: url.host, key: url.pathname.replace(/^\/+/, '') };
|
|
159
|
+
}
|
|
160
|
+
// For https URLs, try to infer virtual-hosted or path-style
|
|
161
|
+
if (url.protocol === 'https:' || url.protocol === 'http:') {
|
|
162
|
+
// If the URL matches the configured publicBaseUrl host, map back to our bucket and key
|
|
163
|
+
if (this.publicBaseUrl && url.host === new URL(this.publicBaseUrl).host) {
|
|
164
|
+
return { bucket: this.bucketName, key: url.pathname.replace(/^\/+/, '') };
|
|
165
|
+
}
|
|
166
|
+
const hostParts = url.host.split('.');
|
|
167
|
+
if (hostParts.length > 3 && (hostParts[1] === 's3' || hostParts[hostParts.length - 2] === 'r2')) {
|
|
168
|
+
// virtual hosted
|
|
169
|
+
return { bucket: hostParts[0], key: url.pathname.replace(/^\/+/, '') };
|
|
170
|
+
} else if (this.endpoint && url.host === new URL(this.endpoint).host) {
|
|
171
|
+
// path-style at custom endpoint
|
|
172
|
+
const [bucket, ...rest] = url.pathname.replace(/^\/+/, '').split('/');
|
|
173
|
+
return { bucket, key: rest.join('/') };
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
// Treat as path
|
|
178
|
+
return { bucket: this.bucketName, key: this.getFullPath(pathOrUrlStr) };
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
private getFullPath(path: string): string {
|
|
182
|
+
const base = normalizeBasePath(this.basePath);
|
|
183
|
+
const rel = path.replace(/^\/+/, '');
|
|
184
|
+
return base ? join(base, rel) : rel;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
@@ -24,6 +24,7 @@ export interface SlasherConfig {
|
|
|
24
24
|
slashOffenseExpirationRounds: number; // Number of rounds after which pending offenses expire
|
|
25
25
|
slashMaxPayloadSize: number; // Maximum number of offenses to include in a single slash payload
|
|
26
26
|
slashGracePeriodL2Slots: number; // Number of L2 slots to wait after genesis before slashing for most offenses
|
|
27
|
+
slashExecuteRoundsLookBack: number; // How many rounds to look back when searching for a round to execute
|
|
27
28
|
}
|
|
28
29
|
|
|
29
30
|
export const SlasherConfigSchema = z.object({
|
|
@@ -44,5 +45,6 @@ export const SlasherConfigSchema = z.object({
|
|
|
44
45
|
slashMaxPayloadSize: z.number(),
|
|
45
46
|
slashGracePeriodL2Slots: z.number(),
|
|
46
47
|
slashBroadcastedInvalidBlockPenalty: schemas.BigInt,
|
|
48
|
+
slashExecuteRoundsLookBack: z.number(),
|
|
47
49
|
slashSelfAllowed: z.boolean().optional(),
|
|
48
50
|
}) satisfies ZodFor<SlasherConfig>;
|
|
@@ -32,11 +32,14 @@ export interface ValidatorClientConfig {
|
|
|
32
32
|
/** Interval between polling for new attestations from peers */
|
|
33
33
|
attestationPollingIntervalMs: number;
|
|
34
34
|
|
|
35
|
-
/**
|
|
35
|
+
/** Whether to re-execute transactions in a block proposal before attesting */
|
|
36
36
|
validatorReexecute: boolean;
|
|
37
37
|
|
|
38
38
|
/** Will re-execute until this many milliseconds are left in the slot */
|
|
39
39
|
validatorReexecuteDeadlineMs: number;
|
|
40
|
+
|
|
41
|
+
/** Whether to always reexecute block proposals, even for non-validator nodes or when out of the currnet committee */
|
|
42
|
+
alwaysReexecuteBlockProposals?: boolean;
|
|
40
43
|
}
|
|
41
44
|
|
|
42
45
|
export type ValidatorClientFullConfig = ValidatorClientConfig &
|
|
@@ -50,6 +53,7 @@ export const ValidatorClientConfigSchema = z.object({
|
|
|
50
53
|
attestationPollingIntervalMs: z.number().min(0),
|
|
51
54
|
validatorReexecute: z.boolean(),
|
|
52
55
|
validatorReexecuteDeadlineMs: z.number().min(0),
|
|
56
|
+
alwaysReexecuteBlockProposals: z.boolean().optional(),
|
|
53
57
|
}) satisfies ZodFor<Omit<ValidatorClientConfig, 'validatorPrivateKeys'>>;
|
|
54
58
|
|
|
55
59
|
export interface Validator {
|
|
@@ -1,39 +1,10 @@
|
|
|
1
1
|
import { AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED } from '@aztec/constants';
|
|
2
2
|
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
4
3
|
|
|
5
4
|
import { AvmCircuitPublicInputs } from '../avm/avm_circuit_public_inputs.js';
|
|
6
|
-
import {
|
|
7
|
-
import { VkData } from '../vks/vk_data.js';
|
|
5
|
+
import type { ProofData } from '../proofs/proof_data.js';
|
|
8
6
|
|
|
9
|
-
export
|
|
10
|
-
constructor(
|
|
11
|
-
public publicInputs: AvmCircuitPublicInputs,
|
|
12
|
-
public proof: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>,
|
|
13
|
-
public vkData: VkData,
|
|
14
|
-
) {}
|
|
15
|
-
|
|
16
|
-
static fromBuffer(buffer: Buffer | BufferReader) {
|
|
17
|
-
const reader = BufferReader.asReader(buffer);
|
|
18
|
-
return new AvmProofData(
|
|
19
|
-
reader.readObject(AvmCircuitPublicInputs),
|
|
20
|
-
RecursiveProof.fromBuffer(reader),
|
|
21
|
-
reader.readObject(VkData),
|
|
22
|
-
);
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
toBuffer() {
|
|
26
|
-
return serializeToBuffer(this.publicInputs, this.proof, this.vkData);
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
static empty() {
|
|
30
|
-
return new AvmProofData(
|
|
31
|
-
AvmCircuitPublicInputs.empty(),
|
|
32
|
-
makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
|
|
33
|
-
VkData.empty(),
|
|
34
|
-
);
|
|
35
|
-
}
|
|
36
|
-
}
|
|
7
|
+
export type AvmProofData = ProofData<AvmCircuitPublicInputs, typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
|
|
37
8
|
|
|
38
9
|
// TODO(#14234)[Unconditional PIs validation]: remove this function.
|
|
39
10
|
export function enhanceProofWithPiValidationFlag(proof: Fr[], skipPublicInputsValidation: boolean): Fr[] {
|
|
@@ -4,15 +4,14 @@ import { makeTuple } from '@aztec/foundation/array';
|
|
|
4
4
|
import { Fr } from '@aztec/foundation/fields';
|
|
5
5
|
import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
6
6
|
import { bufferToHex, hexToBuffer } from '@aztec/foundation/string';
|
|
7
|
-
import { MembershipWitness } from '@aztec/foundation/trees';
|
|
8
7
|
import type { FieldsOf } from '@aztec/foundation/types';
|
|
9
8
|
|
|
10
|
-
import { PublicDataHint } from '../avm/public_data_hint.js';
|
|
11
9
|
import { ContractClassLogFields } from '../logs/index.js';
|
|
12
|
-
import { AppendOnlyTreeSnapshot } from '../trees/
|
|
10
|
+
import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js';
|
|
11
|
+
import { PublicDataTreeLeafPreimage } from '../trees/public_data_leaf.js';
|
|
13
12
|
import { PartialStateReference } from '../tx/partial_state_reference.js';
|
|
14
13
|
import { BlockConstantData } from './block_constant_data.js';
|
|
15
|
-
import {
|
|
14
|
+
import { TreeSnapshotDiffHints } from './tree_snapshot_diff_hints.js';
|
|
16
15
|
|
|
17
16
|
export type BaseRollupHints = PrivateBaseRollupHints | PublicBaseRollupHints;
|
|
18
17
|
|
|
@@ -29,15 +28,15 @@ export class PrivateBaseRollupHints {
|
|
|
29
28
|
/**
|
|
30
29
|
* Hints used while proving state diff validity.
|
|
31
30
|
*/
|
|
32
|
-
public
|
|
31
|
+
public treeSnapshotDiffHints: TreeSnapshotDiffHints,
|
|
33
32
|
/**
|
|
34
|
-
* Public data
|
|
33
|
+
* Public data tree leaf preimage for accessing the balance of the fee payer.
|
|
35
34
|
*/
|
|
36
|
-
public
|
|
35
|
+
public feePayerBalanceLeafPreimage: PublicDataTreeLeafPreimage,
|
|
37
36
|
/**
|
|
38
37
|
* Membership witnesses of blocks referred by each of the 2 kernels.
|
|
39
38
|
*/
|
|
40
|
-
public
|
|
39
|
+
public anchorBlockArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
41
40
|
/**
|
|
42
41
|
* Preimages to the kernel's contractClassLogsHashes.
|
|
43
42
|
*/
|
|
@@ -56,9 +55,9 @@ export class PrivateBaseRollupHints {
|
|
|
56
55
|
return [
|
|
57
56
|
fields.start,
|
|
58
57
|
fields.startSpongeBlob,
|
|
59
|
-
fields.
|
|
60
|
-
fields.
|
|
61
|
-
fields.
|
|
58
|
+
fields.treeSnapshotDiffHints,
|
|
59
|
+
fields.feePayerBalanceLeafPreimage,
|
|
60
|
+
fields.anchorBlockArchiveSiblingPath,
|
|
62
61
|
fields.contractClassLogsFields,
|
|
63
62
|
fields.constants,
|
|
64
63
|
] as const;
|
|
@@ -85,9 +84,9 @@ export class PrivateBaseRollupHints {
|
|
|
85
84
|
return new PrivateBaseRollupHints(
|
|
86
85
|
reader.readObject(PartialStateReference),
|
|
87
86
|
reader.readObject(SpongeBlob),
|
|
88
|
-
reader.readObject(
|
|
89
|
-
reader.readObject(
|
|
90
|
-
|
|
87
|
+
reader.readObject(TreeSnapshotDiffHints),
|
|
88
|
+
reader.readObject(PublicDataTreeLeafPreimage),
|
|
89
|
+
reader.readArray(ARCHIVE_HEIGHT, Fr),
|
|
91
90
|
makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, () => reader.readObject(ContractClassLogFields)),
|
|
92
91
|
reader.readObject(BlockConstantData),
|
|
93
92
|
);
|
|
@@ -101,9 +100,9 @@ export class PrivateBaseRollupHints {
|
|
|
101
100
|
return new PrivateBaseRollupHints(
|
|
102
101
|
PartialStateReference.empty(),
|
|
103
102
|
SpongeBlob.empty(),
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
103
|
+
TreeSnapshotDiffHints.empty(),
|
|
104
|
+
PublicDataTreeLeafPreimage.empty(),
|
|
105
|
+
makeTuple(ARCHIVE_HEIGHT, Fr.zero),
|
|
107
106
|
makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, ContractClassLogFields.empty),
|
|
108
107
|
BlockConstantData.empty(),
|
|
109
108
|
);
|
|
@@ -123,7 +122,7 @@ export class PublicBaseRollupHints {
|
|
|
123
122
|
/**
|
|
124
123
|
* Membership witnesses of blocks referred by each of the 2 kernels.
|
|
125
124
|
*/
|
|
126
|
-
public
|
|
125
|
+
public anchorBlockArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
127
126
|
/**
|
|
128
127
|
* Preimages to the kernel's contractClassLogsHashes.
|
|
129
128
|
*/
|
|
@@ -142,7 +141,7 @@ export class PublicBaseRollupHints {
|
|
|
142
141
|
return [
|
|
143
142
|
fields.startSpongeBlob,
|
|
144
143
|
fields.lastArchive,
|
|
145
|
-
fields.
|
|
144
|
+
fields.anchorBlockArchiveSiblingPath,
|
|
146
145
|
fields.contractClassLogsFields,
|
|
147
146
|
fields.proverId,
|
|
148
147
|
] as const;
|
|
@@ -169,7 +168,7 @@ export class PublicBaseRollupHints {
|
|
|
169
168
|
return new PublicBaseRollupHints(
|
|
170
169
|
reader.readObject(SpongeBlob),
|
|
171
170
|
reader.readObject(AppendOnlyTreeSnapshot),
|
|
172
|
-
|
|
171
|
+
reader.readArray(ARCHIVE_HEIGHT, Fr),
|
|
173
172
|
makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, () => reader.readObject(ContractClassLogFields)),
|
|
174
173
|
reader.readObject(Fr),
|
|
175
174
|
);
|
|
@@ -183,7 +182,7 @@ export class PublicBaseRollupHints {
|
|
|
183
182
|
return new PublicBaseRollupHints(
|
|
184
183
|
SpongeBlob.empty(),
|
|
185
184
|
AppendOnlyTreeSnapshot.empty(),
|
|
186
|
-
|
|
185
|
+
makeTuple(ARCHIVE_HEIGHT, Fr.zero),
|
|
187
186
|
makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, ContractClassLogFields.empty),
|
|
188
187
|
Fr.ZERO,
|
|
189
188
|
);
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { SpongeBlob } from '@aztec/blob-lib/types';
|
|
2
|
-
import { ARCHIVE_HEIGHT,
|
|
2
|
+
import { ARCHIVE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH } from '@aztec/constants';
|
|
3
3
|
import { Fr } from '@aztec/foundation/fields';
|
|
4
4
|
import { bufferSchemaFor } from '@aztec/foundation/schemas';
|
|
5
5
|
import { BufferReader, type Tuple, bigintToUInt64BE, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
@@ -28,9 +28,9 @@ export class BlockRootFirstRollupPrivateInputs {
|
|
|
28
28
|
*/
|
|
29
29
|
public previousL1ToL2: AppendOnlyTreeSnapshot,
|
|
30
30
|
/**
|
|
31
|
-
* Hint for inserting the new l1 to l2 message subtree into `previousL1ToL2`.
|
|
31
|
+
* Hint for inserting the new l1 to l2 message subtree root into `previousL1ToL2`.
|
|
32
32
|
*/
|
|
33
|
-
public
|
|
33
|
+
public newL1ToL2MessageSubtreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH>,
|
|
34
34
|
/**
|
|
35
35
|
* Hint for inserting the new block hash to the last archive.
|
|
36
36
|
*/
|
|
@@ -46,7 +46,7 @@ export class BlockRootFirstRollupPrivateInputs {
|
|
|
46
46
|
fields.l1ToL2Roots,
|
|
47
47
|
fields.previousRollups,
|
|
48
48
|
fields.previousL1ToL2,
|
|
49
|
-
fields.
|
|
49
|
+
fields.newL1ToL2MessageSubtreeRootSiblingPath,
|
|
50
50
|
fields.newArchiveSiblingPath,
|
|
51
51
|
] as const;
|
|
52
52
|
}
|
|
@@ -61,7 +61,7 @@ export class BlockRootFirstRollupPrivateInputs {
|
|
|
61
61
|
ProofData.fromBuffer(reader, ParityPublicInputs),
|
|
62
62
|
[ProofData.fromBuffer(reader, TxRollupPublicInputs), ProofData.fromBuffer(reader, TxRollupPublicInputs)],
|
|
63
63
|
AppendOnlyTreeSnapshot.fromBuffer(reader),
|
|
64
|
-
reader.readArray(
|
|
64
|
+
reader.readArray(L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr),
|
|
65
65
|
reader.readArray(ARCHIVE_HEIGHT, Fr),
|
|
66
66
|
);
|
|
67
67
|
}
|
|
@@ -90,9 +90,9 @@ export class BlockRootSingleTxFirstRollupPrivateInputs {
|
|
|
90
90
|
*/
|
|
91
91
|
public previousL1ToL2: AppendOnlyTreeSnapshot,
|
|
92
92
|
/**
|
|
93
|
-
* Hint for inserting the new l1 to l2 message subtree.
|
|
93
|
+
* Hint for inserting the new l1 to l2 message subtree root.
|
|
94
94
|
*/
|
|
95
|
-
public
|
|
95
|
+
public newL1ToL2MessageSubtreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH>,
|
|
96
96
|
/**
|
|
97
97
|
* Hint for inserting the new block hash to the last archive.
|
|
98
98
|
*/
|
|
@@ -110,7 +110,7 @@ export class BlockRootSingleTxFirstRollupPrivateInputs {
|
|
|
110
110
|
fields.l1ToL2Roots,
|
|
111
111
|
fields.previousRollup,
|
|
112
112
|
fields.previousL1ToL2,
|
|
113
|
-
fields.
|
|
113
|
+
fields.newL1ToL2MessageSubtreeRootSiblingPath,
|
|
114
114
|
fields.newArchiveSiblingPath,
|
|
115
115
|
] as const;
|
|
116
116
|
}
|
|
@@ -125,7 +125,7 @@ export class BlockRootSingleTxFirstRollupPrivateInputs {
|
|
|
125
125
|
ProofData.fromBuffer(reader, ParityPublicInputs),
|
|
126
126
|
ProofData.fromBuffer(reader, TxRollupPublicInputs),
|
|
127
127
|
AppendOnlyTreeSnapshot.fromBuffer(reader),
|
|
128
|
-
reader.readArray(
|
|
128
|
+
reader.readArray(L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr),
|
|
129
129
|
reader.readArray(ARCHIVE_HEIGHT, Fr),
|
|
130
130
|
);
|
|
131
131
|
}
|
|
@@ -167,9 +167,9 @@ export class BlockRootEmptyTxFirstRollupPrivateInputs {
|
|
|
167
167
|
*/
|
|
168
168
|
public timestamp: UInt64,
|
|
169
169
|
/**
|
|
170
|
-
* Hint for inserting the new l1 to l2 message subtree.
|
|
170
|
+
* Hint for inserting the new l1 to l2 message subtree root.
|
|
171
171
|
*/
|
|
172
|
-
public
|
|
172
|
+
public newL1ToL2MessageSubtreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH>,
|
|
173
173
|
/**
|
|
174
174
|
* Hint for inserting the new block hash to the last archive.
|
|
175
175
|
*/
|
|
@@ -188,7 +188,7 @@ export class BlockRootEmptyTxFirstRollupPrivateInputs {
|
|
|
188
188
|
fields.constants,
|
|
189
189
|
fields.startSpongeBlob,
|
|
190
190
|
fields.timestamp,
|
|
191
|
-
fields.
|
|
191
|
+
fields.newL1ToL2MessageSubtreeRootSiblingPath,
|
|
192
192
|
fields.newArchiveSiblingPath,
|
|
193
193
|
] as const;
|
|
194
194
|
}
|
|
@@ -201,7 +201,7 @@ export class BlockRootEmptyTxFirstRollupPrivateInputs {
|
|
|
201
201
|
this.constants,
|
|
202
202
|
this.startSpongeBlob,
|
|
203
203
|
bigintToUInt64BE(this.timestamp),
|
|
204
|
-
this.
|
|
204
|
+
this.newL1ToL2MessageSubtreeRootSiblingPath,
|
|
205
205
|
this.newArchiveSiblingPath,
|
|
206
206
|
]);
|
|
207
207
|
}
|
|
@@ -215,7 +215,7 @@ export class BlockRootEmptyTxFirstRollupPrivateInputs {
|
|
|
215
215
|
CheckpointConstantData.fromBuffer(reader),
|
|
216
216
|
SpongeBlob.fromBuffer(reader),
|
|
217
217
|
reader.readUInt64(),
|
|
218
|
-
reader.readArray(
|
|
218
|
+
reader.readArray(L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr),
|
|
219
219
|
reader.readArray(ARCHIVE_HEIGHT, Fr),
|
|
220
220
|
);
|
|
221
221
|
}
|
package/src/rollup/index.ts
CHANGED
|
@@ -15,6 +15,6 @@ export * from './public_tx_base_rollup_private_inputs.js';
|
|
|
15
15
|
export * from './public_tube_private_inputs.js';
|
|
16
16
|
export * from './root_rollup_private_inputs.js';
|
|
17
17
|
export * from './root_rollup_public_inputs.js';
|
|
18
|
-
export * from './
|
|
18
|
+
export * from './tree_snapshot_diff_hints.js';
|
|
19
19
|
export * from './tx_merge_rollup_private_inputs.js';
|
|
20
20
|
export * from './tx_rollup_public_inputs.js';
|
|
@@ -3,9 +3,10 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
|
|
|
3
3
|
import { bufferToHex, hexToBuffer } from '@aztec/foundation/string';
|
|
4
4
|
import type { FieldsOf } from '@aztec/foundation/types';
|
|
5
5
|
|
|
6
|
+
import { AvmCircuitPublicInputs } from '../avm/avm_circuit_public_inputs.js';
|
|
6
7
|
import { PrivateToPublicKernelCircuitPublicInputs } from '../kernel/private_to_public_kernel_circuit_public_inputs.js';
|
|
7
8
|
import { ProofData, type RollupHonkProofData } from '../proofs/proof_data.js';
|
|
8
|
-
import { AvmProofData } from './avm_proof_data.js';
|
|
9
|
+
import type { AvmProofData } from './avm_proof_data.js';
|
|
9
10
|
import { PublicBaseRollupHints } from './base_rollup_hints.js';
|
|
10
11
|
|
|
11
12
|
export class PublicTxBaseRollupPrivateInputs {
|
|
@@ -27,7 +28,7 @@ export class PublicTxBaseRollupPrivateInputs {
|
|
|
27
28
|
const reader = BufferReader.asReader(buffer);
|
|
28
29
|
return new PublicTxBaseRollupPrivateInputs(
|
|
29
30
|
ProofData.fromBuffer(reader, PrivateToPublicKernelCircuitPublicInputs),
|
|
30
|
-
|
|
31
|
+
ProofData.fromBuffer(reader, AvmCircuitPublicInputs),
|
|
31
32
|
reader.readObject(PublicBaseRollupHints),
|
|
32
33
|
);
|
|
33
34
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
MAX_NULLIFIERS_PER_TX,
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
4
|
+
NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
5
|
NULLIFIER_TREE_HEIGHT,
|
|
6
6
|
PUBLIC_DATA_TREE_HEIGHT,
|
|
7
7
|
} from '@aztec/constants';
|
|
@@ -11,13 +11,17 @@ import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/s
|
|
|
11
11
|
import { MembershipWitness } from '@aztec/foundation/trees';
|
|
12
12
|
import type { FieldsOf } from '@aztec/foundation/types';
|
|
13
13
|
|
|
14
|
-
import { NullifierLeafPreimage
|
|
14
|
+
import { NullifierLeafPreimage } from '../trees/index.js';
|
|
15
15
|
|
|
16
16
|
/**
|
|
17
17
|
* Hints used while proving state diff validity for the private base rollup.
|
|
18
18
|
*/
|
|
19
|
-
export class
|
|
19
|
+
export class TreeSnapshotDiffHints {
|
|
20
20
|
constructor(
|
|
21
|
+
/**
|
|
22
|
+
* Sibling path "pointing to" where the new note hash subtree should be inserted into the note hash tree.
|
|
23
|
+
*/
|
|
24
|
+
public noteHashSubtreeRootSiblingPath: Tuple<Fr, typeof NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH>,
|
|
21
25
|
/**
|
|
22
26
|
* The nullifiers which need to be updated to perform the batch insertion of the new nullifiers.
|
|
23
27
|
* See `StandardIndexedTree.batchInsert` function for more details.
|
|
@@ -39,44 +43,29 @@ export class PrivateBaseStateDiffHints {
|
|
|
39
43
|
* The indexes of the sorted nullifiers to the original ones.
|
|
40
44
|
*/
|
|
41
45
|
public sortedNullifierIndexes: Tuple<number, typeof MAX_NULLIFIERS_PER_TX>,
|
|
42
|
-
/**
|
|
43
|
-
* Sibling path "pointing to" where the new note hash subtree should be inserted into the note hash tree.
|
|
44
|
-
*/
|
|
45
|
-
public noteHashSubtreeSiblingPath: Tuple<Fr, typeof NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH>,
|
|
46
46
|
/**
|
|
47
47
|
* Sibling path "pointing to" where the new nullifiers subtree should be inserted into the nullifier tree.
|
|
48
48
|
*/
|
|
49
|
-
public
|
|
50
|
-
|
|
51
|
-
/**
|
|
52
|
-
* Low leaf for the fee write in the public data tree.
|
|
53
|
-
*/
|
|
54
|
-
public feeWriteLowLeafPreimage: PublicDataTreeLeafPreimage,
|
|
55
|
-
/**
|
|
56
|
-
* Membership witness for the low leaf for the fee write in the public data tree.
|
|
57
|
-
*/
|
|
58
|
-
public feeWriteLowLeafMembershipWitness: MembershipWitness<typeof PUBLIC_DATA_TREE_HEIGHT>,
|
|
49
|
+
public nullifierSubtreeRootSiblingPath: Tuple<Fr, typeof NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH>,
|
|
59
50
|
/**
|
|
60
|
-
*
|
|
51
|
+
* Membership witness for the fee payer's balance leaf in the public data tree.
|
|
61
52
|
*/
|
|
62
|
-
public
|
|
53
|
+
public feePayerBalanceMembershipWitness: MembershipWitness<typeof PUBLIC_DATA_TREE_HEIGHT>,
|
|
63
54
|
) {}
|
|
64
55
|
|
|
65
|
-
static from(fields: FieldsOf<
|
|
66
|
-
return new
|
|
56
|
+
static from(fields: FieldsOf<TreeSnapshotDiffHints>): TreeSnapshotDiffHints {
|
|
57
|
+
return new TreeSnapshotDiffHints(...TreeSnapshotDiffHints.getFields(fields));
|
|
67
58
|
}
|
|
68
59
|
|
|
69
|
-
static getFields(fields: FieldsOf<
|
|
60
|
+
static getFields(fields: FieldsOf<TreeSnapshotDiffHints>) {
|
|
70
61
|
return [
|
|
62
|
+
fields.noteHashSubtreeRootSiblingPath,
|
|
71
63
|
fields.nullifierPredecessorPreimages,
|
|
72
64
|
fields.nullifierPredecessorMembershipWitnesses,
|
|
73
65
|
fields.sortedNullifiers,
|
|
74
66
|
fields.sortedNullifierIndexes,
|
|
75
|
-
fields.
|
|
76
|
-
fields.
|
|
77
|
-
fields.feeWriteLowLeafPreimage,
|
|
78
|
-
fields.feeWriteLowLeafMembershipWitness,
|
|
79
|
-
fields.feeWriteSiblingPath,
|
|
67
|
+
fields.nullifierSubtreeRootSiblingPath,
|
|
68
|
+
fields.feePayerBalanceMembershipWitness,
|
|
80
69
|
] as const;
|
|
81
70
|
}
|
|
82
71
|
|
|
@@ -85,42 +74,38 @@ export class PrivateBaseStateDiffHints {
|
|
|
85
74
|
* @returns A buffer of the serialized state diff hints.
|
|
86
75
|
*/
|
|
87
76
|
toBuffer(): Buffer {
|
|
88
|
-
return serializeToBuffer(...
|
|
77
|
+
return serializeToBuffer(...TreeSnapshotDiffHints.getFields(this));
|
|
89
78
|
}
|
|
90
79
|
|
|
91
80
|
/**
|
|
92
81
|
* Deserializes the state diff hints from a buffer.
|
|
93
82
|
* @param buffer - A buffer to deserialize from.
|
|
94
|
-
* @returns A new
|
|
83
|
+
* @returns A new TreeSnapshotDiffHints instance.
|
|
95
84
|
*/
|
|
96
|
-
static fromBuffer(buffer: Buffer | BufferReader):
|
|
85
|
+
static fromBuffer(buffer: Buffer | BufferReader): TreeSnapshotDiffHints {
|
|
97
86
|
const reader = BufferReader.asReader(buffer);
|
|
98
|
-
return new
|
|
87
|
+
return new TreeSnapshotDiffHints(
|
|
88
|
+
reader.readArray(NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr),
|
|
99
89
|
reader.readArray(MAX_NULLIFIERS_PER_TX, NullifierLeafPreimage),
|
|
100
90
|
reader.readArray(MAX_NULLIFIERS_PER_TX, {
|
|
101
91
|
fromBuffer: buffer => MembershipWitness.fromBuffer(buffer, NULLIFIER_TREE_HEIGHT),
|
|
102
92
|
}),
|
|
103
93
|
reader.readArray(MAX_NULLIFIERS_PER_TX, Fr),
|
|
104
94
|
reader.readNumbers(MAX_NULLIFIERS_PER_TX),
|
|
105
|
-
reader.readArray(
|
|
106
|
-
reader.readArray(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, Fr),
|
|
107
|
-
reader.readObject(PublicDataTreeLeafPreimage),
|
|
95
|
+
reader.readArray(NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr),
|
|
108
96
|
MembershipWitness.fromBuffer(reader, PUBLIC_DATA_TREE_HEIGHT),
|
|
109
|
-
reader.readArray(PUBLIC_DATA_TREE_HEIGHT, Fr),
|
|
110
97
|
);
|
|
111
98
|
}
|
|
112
99
|
|
|
113
100
|
static empty() {
|
|
114
|
-
return new
|
|
101
|
+
return new TreeSnapshotDiffHints(
|
|
102
|
+
makeTuple(NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr.zero),
|
|
115
103
|
makeTuple(MAX_NULLIFIERS_PER_TX, NullifierLeafPreimage.empty),
|
|
116
104
|
makeTuple(MAX_NULLIFIERS_PER_TX, () => MembershipWitness.empty(NULLIFIER_TREE_HEIGHT)),
|
|
117
105
|
makeTuple(MAX_NULLIFIERS_PER_TX, Fr.zero),
|
|
118
106
|
makeTuple(MAX_NULLIFIERS_PER_TX, () => 0),
|
|
119
|
-
makeTuple(
|
|
120
|
-
makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, Fr.zero),
|
|
121
|
-
PublicDataTreeLeafPreimage.empty(),
|
|
107
|
+
makeTuple(NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH, Fr.zero),
|
|
122
108
|
MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT),
|
|
123
|
-
makeTuple(PUBLIC_DATA_TREE_HEIGHT, Fr.zero),
|
|
124
109
|
);
|
|
125
110
|
}
|
|
126
111
|
}
|