@aztec/end-to-end 3.0.0-rc.5 → 4.0.0-nightly.20260107

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dest/bench/client_flows/data_extractor.js +3 -1
  2. package/dest/bench/utils.d.ts +2 -2
  3. package/dest/bench/utils.d.ts.map +1 -1
  4. package/dest/bench/utils.js +10 -6
  5. package/dest/e2e_epochs/epochs_test.d.ts +2 -1
  6. package/dest/e2e_epochs/epochs_test.d.ts.map +1 -1
  7. package/dest/e2e_epochs/epochs_test.js +8 -5
  8. package/dest/e2e_fees/fees_test.d.ts +3 -1
  9. package/dest/e2e_fees/fees_test.d.ts.map +1 -1
  10. package/dest/e2e_fees/fees_test.js +4 -2
  11. package/dest/e2e_p2p/p2p_network.js +1 -1
  12. package/dest/e2e_p2p/shared.d.ts +1 -1
  13. package/dest/e2e_p2p/shared.d.ts.map +1 -1
  14. package/dest/e2e_p2p/shared.js +4 -4
  15. package/dest/fixtures/e2e_prover_test.d.ts +1 -3
  16. package/dest/fixtures/e2e_prover_test.d.ts.map +1 -1
  17. package/dest/fixtures/e2e_prover_test.js +6 -9
  18. package/dest/fixtures/fixtures.d.ts +2 -3
  19. package/dest/fixtures/fixtures.d.ts.map +1 -1
  20. package/dest/fixtures/fixtures.js +1 -2
  21. package/dest/fixtures/snapshot_manager.d.ts +1 -3
  22. package/dest/fixtures/snapshot_manager.d.ts.map +1 -1
  23. package/dest/fixtures/snapshot_manager.js +5 -32
  24. package/dest/fixtures/utils.d.ts +7 -4
  25. package/dest/fixtures/utils.d.ts.map +1 -1
  26. package/dest/fixtures/utils.js +16 -24
  27. package/dest/spartan/utils.d.ts +37 -6
  28. package/dest/spartan/utils.d.ts.map +1 -1
  29. package/dest/spartan/utils.js +137 -27
  30. package/package.json +38 -38
  31. package/src/bench/client_flows/data_extractor.ts +1 -1
  32. package/src/bench/utils.ts +11 -7
  33. package/src/e2e_epochs/epochs_test.ts +20 -7
  34. package/src/e2e_fees/fees_test.ts +5 -4
  35. package/src/e2e_p2p/p2p_network.ts +1 -1
  36. package/src/e2e_p2p/shared.ts +6 -5
  37. package/src/fixtures/e2e_prover_test.ts +4 -7
  38. package/src/fixtures/fixtures.ts +1 -3
  39. package/src/fixtures/snapshot_manager.ts +7 -41
  40. package/src/fixtures/utils.ts +15 -28
  41. package/src/spartan/utils.ts +192 -18
@@ -18,6 +18,7 @@ const testConfigSchema = z.object({
18
18
  L1_RPC_URLS_JSON: z.string().optional(),
19
19
  L1_ACCOUNT_MNEMONIC: z.string().optional(),
20
20
  AZTEC_SLOT_DURATION: z.coerce.number().optional().default(24),
21
+ AZTEC_EPOCH_DURATION: z.coerce.number().optional().default(32),
21
22
  AZTEC_PROOF_SUBMISSION_WINDOW: z.coerce.number().optional().default(5)
22
23
  });
23
24
  export function setupEnvironment(env) {
@@ -185,9 +186,11 @@ export async function deleteResourceByName({ resource, namespace, name, force =
185
186
  return stdout;
186
187
  }
187
188
  export async function deleteResourceByLabel({ resource, namespace, label, timeout = '5m', force = false }) {
188
- // Check if the resource type exists before attempting to delete
189
189
  try {
190
- await execAsync(`kubectl api-resources --api-group="" --no-headers -o name | grep -q "^${resource}$" || kubectl api-resources --no-headers -o name | grep -q "^${resource}$"`);
190
+ // Match both plain and group-qualified names (e.g., "podchaos" or "podchaos.chaos-mesh.org")
191
+ const escaped = resource.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&');
192
+ const regex = `(^|\\.)${escaped}(\\.|$)`;
193
+ await execAsync(`kubectl api-resources --no-headers -o name | grep -Eq '${regex}'`);
191
194
  } catch (error) {
192
195
  logger.warn(`Resource type '${resource}' not found in cluster, skipping deletion ${error}`);
193
196
  return '';
@@ -203,6 +206,31 @@ export async function waitForResourceByLabel({ resource, label, namespace, condi
203
206
  const { stdout } = await execAsync(command);
204
207
  return stdout;
205
208
  }
209
+ export async function waitForResourceByName({ resource, name, namespace, condition = 'Ready', timeout = '10m' }) {
210
+ const command = `kubectl wait ${resource}/${name} --for=condition=${condition} -n ${namespace} --timeout=${timeout}`;
211
+ logger.info(`command: ${command}`);
212
+ const { stdout } = await execAsync(command);
213
+ return stdout;
214
+ }
215
+ export async function waitForResourcesByName({ resource, names, namespace, condition = 'Ready', timeout = '10m' }) {
216
+ if (!names.length) {
217
+ throw new Error(`No ${resource} names provided to waitForResourcesByName`);
218
+ }
219
+ // Wait all in parallel; if any fails, surface which one.
220
+ await Promise.all(names.map(async (name)=>{
221
+ try {
222
+ await waitForResourceByName({
223
+ resource,
224
+ name,
225
+ namespace,
226
+ condition,
227
+ timeout
228
+ });
229
+ } catch (err) {
230
+ throw new Error(`Failed waiting for ${resource}/${name} condition=${condition} timeout=${timeout} namespace=${namespace}: ${String(err)}`);
231
+ }
232
+ }));
233
+ }
206
234
  export function getChartDir(spartanDir, chartName) {
207
235
  return path.join(spartanDir.trim(), chartName);
208
236
  }
@@ -224,6 +252,30 @@ async function execHelmCommand(args) {
224
252
  const { stdout } = await execAsync(helmCommand);
225
253
  return stdout;
226
254
  }
255
+ async function getHelmReleaseStatus(instanceName, namespace) {
256
+ try {
257
+ const { stdout } = await execAsync(`helm list --namespace ${namespace} --all --filter '^${instanceName}$' --output json | cat`);
258
+ const parsed = JSON.parse(stdout);
259
+ const row = parsed.find((r)=>r.name === instanceName);
260
+ return row?.status;
261
+ } catch {
262
+ return undefined;
263
+ }
264
+ }
265
+ async function forceDeleteHelmReleaseRecord(instanceName, namespace, logger) {
266
+ const labelSelector = `owner=helm,name=${instanceName}`;
267
+ const cmd = `kubectl delete secret -n ${namespace} -l ${labelSelector} --ignore-not-found=true`;
268
+ logger.warn(`Force deleting Helm release record: ${cmd}`);
269
+ await execAsync(cmd).catch(()=>undefined);
270
+ }
271
+ async function hasDeployedHelmRelease(instanceName, namespace) {
272
+ try {
273
+ const status = await getHelmReleaseStatus(instanceName, namespace);
274
+ return status?.toLowerCase() === 'deployed';
275
+ } catch {
276
+ return false;
277
+ }
278
+ }
227
279
  export async function uninstallChaosMesh(instanceName, namespace, logger) {
228
280
  // uninstall the helm chart if it exists
229
281
  logger.info(`Uninstalling helm chart ${instanceName}`);
@@ -295,51 +347,66 @@ export function applyProverFailure({ namespace, spartanDir, durationSeconds, log
295
347
  logger
296
348
  });
297
349
  }
298
- export function applyProverKill({ namespace, spartanDir, logger }) {
350
+ export function applyValidatorFailure({ namespace, spartanDir, logger, values, instanceName }) {
351
+ return installChaosMeshChart({
352
+ instanceName: instanceName ?? 'validator-failure',
353
+ targetNamespace: namespace,
354
+ valuesFile: 'validator-failure.yaml',
355
+ helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
356
+ values,
357
+ logger
358
+ });
359
+ }
360
+ export function applyProverKill({ namespace, spartanDir, logger, values }) {
299
361
  return installChaosMeshChart({
300
362
  instanceName: 'prover-kill',
301
363
  targetNamespace: namespace,
302
364
  valuesFile: 'prover-kill.yaml',
303
365
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
366
+ chaosMeshNamespace: namespace,
304
367
  clean: true,
305
- logger
368
+ logger,
369
+ values
306
370
  });
307
371
  }
308
- export function applyProverBrokerKill({ namespace, spartanDir, logger }) {
372
+ export function applyProverBrokerKill({ namespace, spartanDir, logger, values }) {
309
373
  return installChaosMeshChart({
310
374
  instanceName: 'prover-broker-kill',
311
375
  targetNamespace: namespace,
312
376
  valuesFile: 'prover-broker-kill.yaml',
313
377
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
314
378
  clean: true,
315
- logger
379
+ logger,
380
+ values
316
381
  });
317
382
  }
318
- export function applyBootNodeFailure({ namespace, spartanDir, durationSeconds, logger }) {
383
+ export function applyBootNodeFailure({ instanceName = 'boot-node-failure', namespace, spartanDir, durationSeconds, logger, values }) {
319
384
  return installChaosMeshChart({
320
- instanceName: 'boot-node-failure',
385
+ instanceName,
321
386
  targetNamespace: namespace,
322
387
  valuesFile: 'boot-node-failure.yaml',
323
388
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
324
389
  values: {
325
- 'bootNodeFailure.duration': `${durationSeconds}s`
390
+ 'bootNodeFailure.duration': `${durationSeconds}s`,
391
+ ...values ?? {}
326
392
  },
327
393
  logger
328
394
  });
329
395
  }
330
- export function applyValidatorKill({ namespace, spartanDir, logger, values }) {
396
+ export function applyValidatorKill({ instanceName = 'validator-kill', namespace, spartanDir, logger, values, clean = true }) {
331
397
  return installChaosMeshChart({
332
- instanceName: 'validator-kill',
398
+ instanceName: instanceName ?? 'validator-kill',
333
399
  targetNamespace: namespace,
334
400
  valuesFile: 'validator-kill.yaml',
335
401
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
402
+ clean,
336
403
  logger,
337
404
  values
338
405
  });
339
406
  }
340
- export function applyNetworkShaping({ valuesFile, namespace, spartanDir, logger }) {
407
+ export function applyNetworkShaping({ instanceName = 'network-shaping', valuesFile, namespace, spartanDir, logger }) {
341
408
  return installChaosMeshChart({
342
- instanceName: 'network-shaping',
409
+ instanceName,
343
410
  targetNamespace: namespace,
344
411
  valuesFile,
345
412
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
@@ -402,7 +469,12 @@ export async function restartBot(namespace, logger) {
402
469
  // Provide L1 execution RPC for bridging fee juice
403
470
  'bot.node.env.ETHEREUM_HOSTS': `http://${namespace}-eth-execution.${namespace}.svc.cluster.local:8545`,
404
471
  // Provide L1 mnemonic for bridging (falls back to labs mnemonic)
405
- 'bot.node.env.BOT_L1_MNEMONIC': mnemonic
472
+ 'bot.node.env.BOT_L1_MNEMONIC': mnemonic,
473
+ // The bot does not need Kubernetes API access. Disable RBAC + ServiceAccount creation so the chart
474
+ // can be installed by users without cluster-scoped RBAC permissions.
475
+ 'bot.rbac.create': false,
476
+ 'bot.serviceAccount.create': false,
477
+ 'bot.serviceAccount.name': 'default'
406
478
  };
407
479
  // Ensure we derive a funded L1 key (index 0 is funded on anvil default mnemonic)
408
480
  if (mnemonicStartIndex === undefined) {
@@ -425,7 +497,7 @@ export async function restartBot(namespace, logger) {
425
497
  let tag = tagFromEnv;
426
498
  if (!repository || !tag) {
427
499
  try {
428
- const { stdout } = await execAsync(`kubectl get pods -l app.kubernetes.io/component=validator -n ${namespace} -o jsonpath='{.items[0].spec.containers[?(@.name=="aztec")].image}' | cat`);
500
+ const { stdout } = await execAsync(`kubectl get pods -l app.kubernetes.io/name=validator -n ${namespace} -o jsonpath='{.items[0].spec.containers[?(@.name=="aztec")].image}' | cat`);
429
501
  const image = stdout.trim().replace(/^'|'$/g, '');
430
502
  if (image && image.includes(':')) {
431
503
  const lastColon = image.lastIndexOf(':');
@@ -443,6 +515,22 @@ export async function restartBot(namespace, logger) {
443
515
  if (mnemonicStartIndex !== undefined) {
444
516
  values['bot.mnemonicStartIndex'] = typeof mnemonicStartIndex === 'string' ? mnemonicStartIndex : Number(mnemonicStartIndex);
445
517
  }
518
+ // If a previous install attempt left the release in a non-deployed state (e.g. FAILED),
519
+ // `helm upgrade --install` can error with "has no deployed releases".
520
+ // In that case, clear the release record and do a clean install.
521
+ const existingStatus = await getHelmReleaseStatus(instanceName, namespace);
522
+ if (existingStatus && existingStatus.toLowerCase() !== 'deployed') {
523
+ logger.warn(`Transfer bot release ${instanceName} is in status '${existingStatus}'. Reinstalling cleanly.`);
524
+ await execAsync(`helm uninstall ${instanceName} --namespace ${namespace} --wait --ignore-not-found`).catch(()=>undefined);
525
+ // If helm left the release in `uninstalling`, force-delete the record so we can reinstall.
526
+ const afterUninstallStatus = await getHelmReleaseStatus(instanceName, namespace);
527
+ if (afterUninstallStatus?.toLowerCase() === 'uninstalling') {
528
+ await forceDeleteHelmReleaseRecord(instanceName, namespace, logger);
529
+ }
530
+ }
531
+ // `--reuse-values` fails if the release has never successfully deployed (e.g. first install, or a previous failed install).
532
+ // Only reuse values when we have a deployed release to reuse from.
533
+ const effectiveReuseValues = reuseValues && await hasDeployedHelmRelease(instanceName, namespace);
446
534
  await execHelmCommand({
447
535
  instanceName,
448
536
  helmChartDir,
@@ -450,7 +538,7 @@ export async function restartBot(namespace, logger) {
450
538
  valuesFile: undefined,
451
539
  timeout,
452
540
  values: values,
453
- reuseValues
541
+ reuseValues: effectiveReuseValues
454
542
  });
455
543
  if (replicas > 0) {
456
544
  await waitForResourceByLabel({
@@ -482,8 +570,9 @@ export async function restartBot(namespace, logger) {
482
570
  const drop = enabled ? 'true' : 'false';
483
571
  const prob = String(probability);
484
572
  const selectors = [
485
- 'app=validator',
486
- 'app.kubernetes.io/component=validator'
573
+ 'app.kubernetes.io/name=validator',
574
+ 'app.kubernetes.io/component=validator',
575
+ 'app=validator'
487
576
  ];
488
577
  let updated = false;
489
578
  for (const selector of selectors){
@@ -510,8 +599,9 @@ export async function restartBot(namespace, logger) {
510
599
  }
511
600
  export async function restartValidators(namespace, logger) {
512
601
  const selectors = [
513
- 'app=validator',
514
- 'app.kubernetes.io/component=validator'
602
+ 'app.kubernetes.io/name=validator',
603
+ 'app.kubernetes.io/component=validator',
604
+ 'app=validator'
515
605
  ];
516
606
  let any = false;
517
607
  for (const selector of selectors){
@@ -565,11 +655,27 @@ export async function enableValidatorDynamicBootNode(instanceName, namespace, sp
565
655
  logger.info(`Validator dynamic boot node enabled`);
566
656
  }
567
657
  export async function getSequencers(namespace) {
568
- const command = `kubectl get pods -l app.kubernetes.io/component=validator -n ${namespace} -o jsonpath='{.items[*].metadata.name}'`;
569
- const { stdout } = await execAsync(command);
570
- const sequencers = stdout.split(' ');
571
- logger.verbose(`Found sequencer pods ${sequencers.join(', ')}`);
572
- return sequencers;
658
+ const selectors = [
659
+ 'app.kubernetes.io/name=validator',
660
+ 'app.kubernetes.io/component=validator',
661
+ 'app.kubernetes.io/component=sequencer-node',
662
+ 'app=validator'
663
+ ];
664
+ for (const selector of selectors){
665
+ try {
666
+ const command = `kubectl get pods -l ${selector} -n ${namespace} -o jsonpath='{.items[*].metadata.name}'`;
667
+ const { stdout } = await execAsync(command);
668
+ const sequencers = stdout.split(' ').map((s)=>s.trim()).filter(Boolean);
669
+ if (sequencers.length > 0) {
670
+ logger.verbose(`Found sequencer pods ${sequencers.join(', ')} (selector=${selector})`);
671
+ return sequencers;
672
+ }
673
+ } catch {
674
+ // try next selector
675
+ }
676
+ }
677
+ // Fail fast instead of returning [''] which leads to attempts to port-forward `pod/`.
678
+ throw new Error(`No sequencer/validator pods found in namespace ${namespace}. Tried selectors: ${selectors.join(', ')}`);
573
679
  }
574
680
  export function updateSequencersConfig(env, config) {
575
681
  return withSequencersAdmin(env, async (client)=>{
@@ -619,7 +725,9 @@ export async function withSequencersAdmin(env, fn) {
619
725
  const url = `http://127.0.0.1:${port}`;
620
726
  const client = createPublicClient({
621
727
  transport: fallback([
622
- http(url)
728
+ http(url, {
729
+ batch: false
730
+ })
623
731
  ])
624
732
  });
625
733
  if (processes) {
@@ -637,7 +745,9 @@ export async function withSequencersAdmin(env, fn) {
637
745
  }
638
746
  const client = createPublicClient({
639
747
  transport: fallback([
640
- http(L1_RPC_URLS_JSON)
748
+ http(L1_RPC_URLS_JSON, {
749
+ batch: false
750
+ })
641
751
  ])
642
752
  });
643
753
  return {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/end-to-end",
3
- "version": "3.0.0-rc.5",
3
+ "version": "4.0.0-nightly.20260107",
4
4
  "type": "module",
5
5
  "exports": "./dest/index.js",
6
6
  "inherits": [
@@ -25,43 +25,43 @@
25
25
  "formatting": "run -T prettier --check ./src && run -T eslint ./src"
26
26
  },
27
27
  "dependencies": {
28
- "@aztec/accounts": "3.0.0-rc.5",
29
- "@aztec/archiver": "3.0.0-rc.5",
30
- "@aztec/aztec": "3.0.0-rc.5",
31
- "@aztec/aztec-node": "3.0.0-rc.5",
32
- "@aztec/aztec.js": "3.0.0-rc.5",
33
- "@aztec/bb-prover": "3.0.0-rc.5",
34
- "@aztec/bb.js": "3.0.0-rc.5",
35
- "@aztec/blob-lib": "3.0.0-rc.5",
36
- "@aztec/blob-sink": "3.0.0-rc.5",
37
- "@aztec/bot": "3.0.0-rc.5",
38
- "@aztec/cli": "3.0.0-rc.5",
39
- "@aztec/constants": "3.0.0-rc.5",
40
- "@aztec/entrypoints": "3.0.0-rc.5",
41
- "@aztec/epoch-cache": "3.0.0-rc.5",
42
- "@aztec/ethereum": "3.0.0-rc.5",
43
- "@aztec/foundation": "3.0.0-rc.5",
44
- "@aztec/kv-store": "3.0.0-rc.5",
45
- "@aztec/l1-artifacts": "3.0.0-rc.5",
46
- "@aztec/merkle-tree": "3.0.0-rc.5",
47
- "@aztec/node-keystore": "3.0.0-rc.5",
48
- "@aztec/noir-contracts.js": "3.0.0-rc.5",
49
- "@aztec/noir-noirc_abi": "3.0.0-rc.5",
50
- "@aztec/noir-protocol-circuits-types": "3.0.0-rc.5",
51
- "@aztec/noir-test-contracts.js": "3.0.0-rc.5",
52
- "@aztec/p2p": "3.0.0-rc.5",
53
- "@aztec/protocol-contracts": "3.0.0-rc.5",
54
- "@aztec/prover-client": "3.0.0-rc.5",
55
- "@aztec/prover-node": "3.0.0-rc.5",
56
- "@aztec/pxe": "3.0.0-rc.5",
57
- "@aztec/sequencer-client": "3.0.0-rc.5",
58
- "@aztec/simulator": "3.0.0-rc.5",
59
- "@aztec/slasher": "3.0.0-rc.5",
60
- "@aztec/stdlib": "3.0.0-rc.5",
61
- "@aztec/telemetry-client": "3.0.0-rc.5",
62
- "@aztec/test-wallet": "3.0.0-rc.5",
63
- "@aztec/validator-client": "3.0.0-rc.5",
64
- "@aztec/world-state": "3.0.0-rc.5",
28
+ "@aztec/accounts": "4.0.0-nightly.20260107",
29
+ "@aztec/archiver": "4.0.0-nightly.20260107",
30
+ "@aztec/aztec": "4.0.0-nightly.20260107",
31
+ "@aztec/aztec-node": "4.0.0-nightly.20260107",
32
+ "@aztec/aztec.js": "4.0.0-nightly.20260107",
33
+ "@aztec/bb-prover": "4.0.0-nightly.20260107",
34
+ "@aztec/bb.js": "4.0.0-nightly.20260107",
35
+ "@aztec/blob-client": "4.0.0-nightly.20260107",
36
+ "@aztec/blob-lib": "4.0.0-nightly.20260107",
37
+ "@aztec/bot": "4.0.0-nightly.20260107",
38
+ "@aztec/cli": "4.0.0-nightly.20260107",
39
+ "@aztec/constants": "4.0.0-nightly.20260107",
40
+ "@aztec/entrypoints": "4.0.0-nightly.20260107",
41
+ "@aztec/epoch-cache": "4.0.0-nightly.20260107",
42
+ "@aztec/ethereum": "4.0.0-nightly.20260107",
43
+ "@aztec/foundation": "4.0.0-nightly.20260107",
44
+ "@aztec/kv-store": "4.0.0-nightly.20260107",
45
+ "@aztec/l1-artifacts": "4.0.0-nightly.20260107",
46
+ "@aztec/merkle-tree": "4.0.0-nightly.20260107",
47
+ "@aztec/node-keystore": "4.0.0-nightly.20260107",
48
+ "@aztec/noir-contracts.js": "4.0.0-nightly.20260107",
49
+ "@aztec/noir-noirc_abi": "4.0.0-nightly.20260107",
50
+ "@aztec/noir-protocol-circuits-types": "4.0.0-nightly.20260107",
51
+ "@aztec/noir-test-contracts.js": "4.0.0-nightly.20260107",
52
+ "@aztec/p2p": "4.0.0-nightly.20260107",
53
+ "@aztec/protocol-contracts": "4.0.0-nightly.20260107",
54
+ "@aztec/prover-client": "4.0.0-nightly.20260107",
55
+ "@aztec/prover-node": "4.0.0-nightly.20260107",
56
+ "@aztec/pxe": "4.0.0-nightly.20260107",
57
+ "@aztec/sequencer-client": "4.0.0-nightly.20260107",
58
+ "@aztec/simulator": "4.0.0-nightly.20260107",
59
+ "@aztec/slasher": "4.0.0-nightly.20260107",
60
+ "@aztec/stdlib": "4.0.0-nightly.20260107",
61
+ "@aztec/telemetry-client": "4.0.0-nightly.20260107",
62
+ "@aztec/test-wallet": "4.0.0-nightly.20260107",
63
+ "@aztec/validator-client": "4.0.0-nightly.20260107",
64
+ "@aztec/world-state": "4.0.0-nightly.20260107",
65
65
  "@iarna/toml": "^2.2.5",
66
66
  "@jest/globals": "^30.0.0",
67
67
  "@noble/curves": "=1.0.0",
@@ -22,7 +22,7 @@ async function main() {
22
22
  logger.info(`Flows in ${ivcFolder}: \n${flows.map(flowName => `\t- ${flowName}`).join('\n')}`);
23
23
  const simulator = new WASMSimulator();
24
24
  const log = proxyLogger.createLogger('bb:prover');
25
- const prover = new BBBundlePrivateKernelProver(simulator, log);
25
+ const prover = new BBBundlePrivateKernelProver(simulator, { logger: log });
26
26
 
27
27
  const userLog = createLogger('chonk_flows:data_processor');
28
28
 
@@ -1,4 +1,5 @@
1
1
  import type { AztecNodeService } from '@aztec/aztec-node';
2
+ import { AztecAddress } from '@aztec/aztec.js/addresses';
2
3
  import { BatchCall, type SentTx, type WaitOpts } from '@aztec/aztec.js/contracts';
3
4
  import { mean, stdDev, times } from '@aztec/foundation/collection';
4
5
  import { BenchmarkingContract } from '@aztec/noir-test-contracts.js/Benchmarking';
@@ -103,19 +104,22 @@ function getMetricValues(points: BenchmarkDataPoint[]) {
103
104
  * @param heavyPublicCompute - Whether the transactions include heavy public compute (like a big sha256).
104
105
  * @returns A BatchCall instance.
105
106
  */
106
- function makeCall(
107
+ async function makeCall(
107
108
  index: number,
108
109
  context: EndToEndContext,
109
110
  contract: BenchmarkingContract,
110
111
  heavyPublicCompute: boolean,
111
112
  ) {
112
- const [owner] = context.accounts;
113
113
  if (heavyPublicCompute) {
114
114
  return new BatchCall(context.wallet, [contract.methods.sha256_hash_1024(randomBytesAsBigInts(1024))]);
115
115
  } else {
116
+ // We use random address for the new note owner because we can emit at most UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN
117
+ // logs for a given sender-recipient-contract tuple.
118
+ const ownerOfNewNote = await AztecAddress.random();
119
+ const [ownerOfBalance] = context.accounts;
116
120
  return new BatchCall(context.wallet, [
117
- contract.methods.create_note(owner, index + 1),
118
- contract.methods.increment_balance(owner, index + 1),
121
+ contract.methods.create_note(ownerOfNewNote, index + 1),
122
+ contract.methods.increment_balance(ownerOfBalance, index + 1),
119
123
  ]);
120
124
  }
121
125
  }
@@ -129,13 +133,13 @@ function makeCall(
129
133
  * @param heavyPublicCompute - Whether the transactions include heavy public compute (like a big sha256).
130
134
  * @returns Array of sent txs.
131
135
  */
132
- export function sendTxs(
136
+ export async function sendTxs(
133
137
  txCount: number,
134
138
  context: EndToEndContext,
135
139
  contract: BenchmarkingContract,
136
140
  heavyPublicCompute: boolean = false,
137
- ): SentTx[] {
138
- const calls = times(txCount, index => makeCall(index, context, contract, heavyPublicCompute));
141
+ ): Promise<SentTx[]> {
142
+ const calls = await Promise.all(times(txCount, index => makeCall(index, context, contract, heavyPublicCompute)));
139
143
  context.logger.info(`Creating ${txCount} txs`);
140
144
  const [from] = context.accounts;
141
145
  context.logger.info(`Sending ${txCount} txs`);
@@ -97,12 +97,24 @@ export class EpochsTestContext {
97
97
  const aztecSlotDuration = opts.aztecSlotDuration ?? ethereumSlotDuration * 2;
98
98
  const aztecEpochDuration = opts.aztecEpochDuration ?? 6;
99
99
  const aztecProofSubmissionEpochs = opts.aztecProofSubmissionEpochs ?? 1;
100
- return { ethereumSlotDuration, aztecSlotDuration, aztecEpochDuration, aztecProofSubmissionEpochs };
100
+ const l1PublishingTime = opts.l1PublishingTime ?? 1;
101
+ return {
102
+ l1PublishingTime,
103
+ ethereumSlotDuration,
104
+ aztecSlotDuration,
105
+ aztecEpochDuration,
106
+ aztecProofSubmissionEpochs,
107
+ };
101
108
  }
102
109
 
103
110
  public async setup(opts: EpochsTestOpts = {}) {
104
- const { ethereumSlotDuration, aztecSlotDuration, aztecEpochDuration, aztecProofSubmissionEpochs } =
105
- EpochsTestContext.getSlotDurations(opts);
111
+ const {
112
+ ethereumSlotDuration,
113
+ aztecSlotDuration,
114
+ aztecEpochDuration,
115
+ aztecProofSubmissionEpochs,
116
+ l1PublishingTime,
117
+ } = EpochsTestContext.getSlotDurations(opts);
106
118
 
107
119
  this.L1_BLOCK_TIME_IN_S = ethereumSlotDuration;
108
120
  this.L2_SLOT_DURATION_IN_S = aztecSlotDuration;
@@ -130,6 +142,7 @@ export class EpochsTestContext {
130
142
  worldStateBlockHistory: WORLD_STATE_BLOCK_HISTORY,
131
143
  exitDelaySeconds: DefaultL1ContractsConfig.exitDelaySeconds,
132
144
  slasherFlavor: 'none',
145
+ l1PublishingTime,
133
146
  ...opts,
134
147
  });
135
148
 
@@ -292,7 +305,7 @@ export class EpochsTestContext {
292
305
  }
293
306
 
294
307
  /** Waits until the given checkpoint number is mined. */
295
- public async waitUntilCheckpointNumber(target: CheckpointNumber, timeout = 60) {
308
+ public async waitUntilCheckpointNumber(target: CheckpointNumber, timeout = 120) {
296
309
  await retryUntil(
297
310
  () => Promise.resolve(target <= this.monitor.checkpointNumber),
298
311
  `Wait until checkpoint ${target}`,
@@ -302,7 +315,7 @@ export class EpochsTestContext {
302
315
  }
303
316
 
304
317
  /** Waits until the given checkpoint number is marked as proven. */
305
- public async waitUntilProvenCheckpointNumber(target: CheckpointNumber, timeout = 60) {
318
+ public async waitUntilProvenCheckpointNumber(target: CheckpointNumber, timeout = 120) {
306
319
  await retryUntil(
307
320
  () => Promise.resolve(target <= this.monitor.provenCheckpointNumber),
308
321
  `Wait proven checkpoint ${target}`,
@@ -391,11 +404,11 @@ export class EpochsTestContext {
391
404
  const stateChanges: TrackedSequencerEvent[] = [];
392
405
  const failEvents: TrackedSequencerEvent[] = [];
393
406
 
394
- // Note we do not include the 'tx-count-check-failed' event here, since it is fine if we dont build
407
+ // Note we do not include the 'block-tx-count-check-failed' event here, since it is fine if we dont build
395
408
  // due to lack of txs available.
396
409
  const failEventsKeys: (keyof SequencerEvents)[] = [
397
410
  'block-build-failed',
398
- 'block-publish-failed',
411
+ 'checkpoint-publish-failed',
399
412
  'proposer-rollup-check-failed',
400
413
  ];
401
414
 
@@ -19,6 +19,7 @@ import { CounterContract } from '@aztec/noir-test-contracts.js/Counter';
19
19
  import { ProtocolContractAddress } from '@aztec/protocol-contracts';
20
20
  import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice';
21
21
  import { GasSettings } from '@aztec/stdlib/gas';
22
+ import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client';
22
23
  import { TestWallet } from '@aztec/test-wallet/server';
23
24
 
24
25
  import { getContract } from 'viem';
@@ -58,6 +59,7 @@ export class FeesTest {
58
59
 
59
60
  public logger: Logger;
60
61
  public aztecNode!: AztecNode;
62
+ public aztecNodeAdmin!: AztecNodeAdmin;
61
63
  public cheatCodes!: CheatCodes;
62
64
 
63
65
  public wallet!: TestWallet;
@@ -142,9 +144,7 @@ export class FeesTest {
142
144
  const blockReward = await this.rollupContract.getCheckpointReward();
143
145
  const rewardConfig = await this.rollupContract.getRewardConfig();
144
146
 
145
- const balance = await this.feeJuiceBridgeTestHarness.getL1FeeJuiceBalance(
146
- EthAddress.fromString(rewardConfig.rewardDistributor),
147
- );
147
+ const balance = await this.feeJuiceBridgeTestHarness.getL1FeeJuiceBalance(rewardConfig.rewardDistributor);
148
148
 
149
149
  const toDistribute = balance > blockReward ? blockReward : balance;
150
150
  const sequencerBlockRewards = (toDistribute * BigInt(rewardConfig.sequencerBps)) / 10000n;
@@ -190,6 +190,7 @@ export class FeesTest {
190
190
  async ({ deployedAccounts }, { wallet, aztecNode, cheatCodes }) => {
191
191
  this.wallet = wallet;
192
192
  this.aztecNode = aztecNode;
193
+ this.aztecNodeAdmin = aztecNode;
193
194
  this.gasSettings = GasSettings.default({ maxFeesPerGas: (await this.aztecNode.getCurrentBaseFees()).mul(2) });
194
195
  this.cheatCodes = cheatCodes;
195
196
  this.accounts = deployedAccounts.map(a => a.address);
@@ -322,7 +323,7 @@ export class FeesTest {
322
323
  const { baseFee } = await this.rollupContract.getL1FeesAt(block!.header.globalVariables.timestamp);
323
324
  const proverCost =
324
325
  mulDiv(
325
- mulDiv(L1_GAS_PER_EPOCH_VERIFIED, baseFee, await this.rollupContract.getEpochDuration()),
326
+ mulDiv(L1_GAS_PER_EPOCH_VERIFIED, baseFee, BigInt(await this.rollupContract.getEpochDuration())),
326
327
  1n,
327
328
  await this.rollupContract.getManaTarget(),
328
329
  ) + (await this.rollupContract.getProvingCostPerMana());
@@ -451,7 +451,7 @@ export class P2PNetworkTest {
451
451
  );
452
452
 
453
453
  const slasherContract = getContract({
454
- address: getAddress(await rollup.getSlasherAddress()),
454
+ address: getAddress((await rollup.getSlasherAddress()).toString()),
455
455
  abi: SlasherAbi,
456
456
  client: this.ctx.deployL1ContractsValues.l1Client,
457
457
  });
@@ -13,6 +13,7 @@ import type {
13
13
  } from '@aztec/ethereum/contracts';
14
14
  import { EpochNumber } from '@aztec/foundation/branded-types';
15
15
  import { timesAsync, unique } from '@aztec/foundation/collection';
16
+ import { EthAddress } from '@aztec/foundation/eth-address';
16
17
  import { retryUntil } from '@aztec/foundation/retry';
17
18
  import { pluralize } from '@aztec/foundation/string';
18
19
  import type { SpamContract } from '@aztec/noir-test-contracts.js/Spam';
@@ -137,7 +138,7 @@ export async function awaitCommitteeExists({
137
138
  logger: Logger;
138
139
  }): Promise<readonly `0x${string}`[]> {
139
140
  logger.info(`Waiting for committee to be set`);
140
- let committee: readonly `0x${string}`[] | undefined;
141
+ let committee: EthAddress[] | undefined;
141
142
  await retryUntil(
142
143
  async () => {
143
144
  committee = await rollup.getCurrentEpochCommittee();
@@ -146,7 +147,7 @@ export async function awaitCommitteeExists({
146
147
  'non-empty committee',
147
148
  60,
148
149
  );
149
- return committee!;
150
+ return committee!.map(c => c.toString() as `0x${string}`);
150
151
  }
151
152
 
152
153
  export async function awaitOffenseDetected({
@@ -218,9 +219,9 @@ export async function awaitCommitteeKicked({
218
219
 
219
220
  if (slashingProposer.type === 'empire') {
220
221
  // Await for the slash payload to be created if empire (no payload is created on tally until execution time)
221
- const targetEpoch = BigInt(await cheatCodes.getEpoch()) + (await rollup.getLagInEpochsForValidatorSet()) + 1n;
222
+ const targetEpoch = EpochNumber((await cheatCodes.getEpoch()) + (await rollup.getLagInEpochsForValidatorSet()) + 1);
222
223
  logger.info(`Advancing to epoch ${targetEpoch} so we start slashing`);
223
- await cheatCodes.advanceToEpoch(EpochNumber.fromBigInt(targetEpoch));
224
+ await cheatCodes.advanceToEpoch(targetEpoch);
224
225
 
225
226
  const slashPayloadEvents = await retryUntil(
226
227
  async () => {
@@ -275,7 +276,7 @@ export async function awaitCommitteeKicked({
275
276
  logger.info(`Advancing to check current committee`);
276
277
  await cheatCodes.debugRollup();
277
278
  await cheatCodes.advanceToEpoch(
278
- EpochNumber.fromBigInt(BigInt(await cheatCodes.getEpoch()) + (await rollup.getLagInEpochsForValidatorSet()) + 1n),
279
+ EpochNumber((await cheatCodes.getEpoch()) + (await rollup.getLagInEpochsForValidatorSet()) + 1),
279
280
  );
280
281
  await cheatCodes.debugRollup();
281
282